From c5f00a8d7091b46b0ecd175f722d98ea8bea485e Mon Sep 17 00:00:00 2001 From: HaodongDuan Date: Fri, 16 Oct 2020 17:35:47 +0800 Subject: [PATCH 001/414] resolve comments --- tools/data/hvu/generate_sub_file_list.py | 49 ++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 tools/data/hvu/generate_sub_file_list.py diff --git a/tools/data/hvu/generate_sub_file_list.py b/tools/data/hvu/generate_sub_file_list.py new file mode 100644 index 0000000000..77c7bed651 --- /dev/null +++ b/tools/data/hvu/generate_sub_file_list.py @@ -0,0 +1,49 @@ +import argparse +import os.path as osp + +import mmcv + + +def main(annotation_file, category): + assert category in [ + 'action', 'attribute', 'concept', 'event', 'object', 'scene' + ] + + data = mmcv.load(annotation_file) + basename = osp.basename(annotation_file) + dirname = osp.dirname(annotation_file) + basename = basename.replace('hvu', f'hvu_{category}') + + target_file = osp.join(dirname, basename) + + def parse_item(item, category): + label = item['label'] + if category in label: + item['label'] = label[category] + return item + else: + return None + + result = [] + for item in data: + label = item['label'] + if category in label: + item['label'] = label[category] + result.append(item) + + mmcv.dump(data, target_file) + + +if __name__ == '__main__': + description = 'Helper script for generating HVU per-category file list.' + p = argparse.ArgumentParser(description=description) + p.add_argument( + 'annotation_file', + type=str, + help=('The annotation file which contains tags of all categories.')) + p.add_argument( + 'category', + type=str, + choices=['action', 'attribute', 'concept', 'event', 'object', 'scene'], + help='The tag category that you want to generate file list for.') + main(**vars(p.parse_args())) From 05575c18bf7dbba7e6e9f9a0f1c4f973668d2c44 Mon Sep 17 00:00:00 2001 From: HaodongDuan Date: Fri, 16 Oct 2020 17:37:19 +0800 Subject: [PATCH 002/414] update changelog --- docs/changelog.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/changelog.md b/docs/changelog.md index d0fe20a249..3cff48664d 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -4,6 +4,7 @@ **Improvements** - Set default values of 'average_clips' in each config file so that there is no need to set it explicitly during testing in most cases ([#232](https://github.com/open-mmlab/mmaction2/pull/232)) +- Extend HVU datatools to generate individual file list for each tag category ([#258](https://github.com/open-mmlab/mmaction2/pull/258)) **Bug Fixes** - Fix the potential bug for default value in dataset_setting ([#245](https://github.com/open-mmlab/mmaction2/pull/245)) From 24d1b7e121f4915c77503097c732559d2fac4513 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 26 Mar 2021 21:27:20 +0800 Subject: [PATCH 003/414] CN configs (#759) * first tsm cn config * template * tin c3d * csn * i3d * r31d * slowonly slowfast * not full tanet * ssn bsn * half bmn * bmn bsn * polish --- configs/localization/bmn/README_zh-CN.md | 98 +++++++++ configs/localization/bsn/README_zh-CN.md | 155 +++++++++++++ configs/localization/ssn/README.md | 4 +- configs/localization/ssn/README_zh-CN.md | 63 ++++++ configs/recognition/c3d/README_zh-CN.md | 69 ++++++ configs/recognition/csn/README.md | 1 - configs/recognition/csn/README_zh-CN.md | 83 +++++++ configs/recognition/i3d/README_zh-CN.md | 90 ++++++++ configs/recognition/r2plus1d/README_zh-CN.md | 72 ++++++ configs/recognition/slowfast/README_zh-CN.md | 76 +++++++ configs/recognition/slowonly/README_zh-CN.md | 118 ++++++++++ configs/recognition/tanet/README_zh-CN.md | 69 ++++++ configs/recognition/tin/README_zh-CN.md | 84 +++++++ configs/recognition/tsm/README.md | 6 +- configs/recognition/tsm/README_zh-CN.md | 140 ++++++++++++ configs/recognition/tsn/README_zh-CN.md | 220 +++++++++++++++++++ configs/recognition/x3d/README.md | 2 +- configs/recognition/x3d/README_zh-CN.md | 51 +++++ 18 files changed, 1394 insertions(+), 7 deletions(-) create mode 100644 configs/localization/bmn/README_zh-CN.md create mode 100644 configs/localization/bsn/README_zh-CN.md create mode 100644 configs/localization/ssn/README_zh-CN.md create mode 100644 configs/recognition/c3d/README_zh-CN.md create mode 100644 configs/recognition/csn/README_zh-CN.md create mode 100644 configs/recognition/i3d/README_zh-CN.md create mode 100644 configs/recognition/r2plus1d/README_zh-CN.md create mode 100644 configs/recognition/slowfast/README_zh-CN.md create mode 100644 configs/recognition/slowonly/README_zh-CN.md create mode 100644 configs/recognition/tanet/README_zh-CN.md create mode 100644 configs/recognition/tin/README_zh-CN.md create mode 100644 configs/recognition/tsm/README_zh-CN.md create mode 100644 configs/recognition/tsn/README_zh-CN.md create mode 100644 configs/recognition/x3d/README_zh-CN.md diff --git a/configs/localization/bmn/README_zh-CN.md b/configs/localization/bmn/README_zh-CN.md new file mode 100644 index 0000000000..5d3a7ffc38 --- /dev/null +++ b/configs/localization/bmn/README_zh-CN.md @@ -0,0 +1,98 @@ +# BMN + +## 简介 + +[ALGORITHM] + +```BibTeX +@inproceedings{lin2019bmn, + title={Bmn: Boundary-matching network for temporal action proposal generation}, + author={Lin, Tianwei and Liu, Xiao and Li, Xin and Ding, Errui and Wen, Shilei}, + booktitle={Proceedings of the IEEE International Conference on Computer Vision}, + pages={3889--3898}, + year={2019} +} +``` + +[DATASET] + +```BibTeX +@article{zhao2017cuhk, + title={Cuhk \& ethz \& siat submission to activitynet challenge 2017}, + author={Zhao, Y and Zhang, B and Wu, Z and Yang, S and Zhou, L and Yan, S and Wang, L and Xiong, Y and Lin, D and Qiao, Y and others}, + journal={arXiv preprint arXiv:1710.08011}, + volume={8}, + year={2017} +} +``` + +## 模型库 + +### ActivityNet feature + +|配置文件 |特征 | GPU 数量 | AR@100| AUC | AP@0.5 | AP@0.75 | AP@0.95 | mAP | GPU 显存占用 (M) | 推理时间 (s) | ckpt | log| json| +|:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:|---|:-:|:-:|---| +|[bmn_400x100_9e_2x8_activitynet_feature](/configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py) |cuhk_mean_100 |2|75.28|67.22|42.47|31.31|9.92|30.34|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature_20200619-42a3b111.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log)| [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log.json)| +| |mmaction_video |2|75.43|67.22|42.62|31.56|10.86|30.77|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809-c9fd14d2.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.json) | +| |mmaction_clip |2|75.35|67.38|43.08|32.19|10.73|31.15|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809-10d803ce.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.json) | +| [BMN-official](https://github.com/JJBOY/BMN-Boundary-Matching-Network) (for reference)* |cuhk_mean_100 |-|75.27|67.49|42.22|30.98|9.22|30.00|-|-|-| - | - | + +- Notes: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 对于 **特征** 这一列,`cuhk_mean_100` 表示所使用的特征为利用 [anet2016-cuhk](https://github.com/yjxiong/anet2016-cuhk) 代码库抽取的,被广泛利用的 CUHK ActivityNet 特征, + `mmaction_video` 和 `mmaction_clip` 分布表示所使用的特征为利用 MMAction 抽取的,视频级别 ActivityNet 预训练模型的特征;视频片段级别 ActivityNet 预训练模型的特征。 +3. MMAction2 使用 ActivityNet2017 未剪辑视频分类赛道上 [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) 所提交的结果来为每个视频的时序动作候选指定标签,以用于 BMN 模型评估。 + +*MMAction2 在 [原始代码库](https://github.com/JJBOY/BMN-Boundary-Matching-Network) 上训练 BMN,并且在 [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) 的对应标签上评估时序动作候选生成和时序检测的结果。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 ActivityNet 特征部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:在 ActivityNet 特征上训练 BMN。 + +```shell +python tools/train.py configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 ActivityNet 特征上测试 BMN 模型。 + +```shell +# 注意:如果需要进行指标验证,需确测试数据的保标注文件包含真实标签 +python tools/test.py configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json +``` + +用户也可以利用 [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) 的预测文件评估模型时序检测的结果,并生成时序动作候选文件(即命令中的 `results.json`) + +```shell +python tools/analysis/report_map.py --proposal path/to/proposal_file +``` + +注意: + +1. (可选项) 用户可以使用以下指令生成格式化的时序动作候选文件,该文件可被送入动作识别器中(目前只支持 SSN 和 P-GCN,不包括 TSN, I3D 等),以获得时序动作候选的分类结果。 + + ```shell + python tools/data/activitynet/convert_proposal_format.py + ``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/bsn/README_zh-CN.md b/configs/localization/bsn/README_zh-CN.md new file mode 100644 index 0000000000..3072e2ed00 --- /dev/null +++ b/configs/localization/bsn/README_zh-CN.md @@ -0,0 +1,155 @@ +# BSN + +## 简介 + +[ALGORITHM] + +```BibTeX +@inproceedings{lin2018bsn, + title={Bsn: Boundary sensitive network for temporal action proposal generation}, + author={Lin, Tianwei and Zhao, Xu and Su, Haisheng and Wang, Chongjing and Yang, Ming}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + pages={3--19}, + year={2018} +} +``` + +## 模型库 + +### ActivityNet feature + +|配置文件 |特征 | GPU 数量| 预训练 | AR@100| AUC | GPU 显存占用 (M) | 迭代时间 (s) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:| +|bsn_400x100_1x16_20e_activitynet_feature |cuhk_mean_100 |1| None |74.66|66.45|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature_20200619-cd6accc3.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature_20210203-1c27763d.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log)| [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log.json)| +| |mmaction_video |1| None |74.93|66.74|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809-ad6ec626.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809-aa861b26.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.json) | +| |mmaction_clip |1| None |75.19|66.81|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809-0a563554.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809-e32f61e6.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.json) | + +Notes: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. For feature column, cuhk_mean_100 denotes the widely used cuhk activitynet feature extracted by [anet2016-cuhk](https://github.com/yjxiong/anet2016-cuhk), mmaction_video and mmaction_clip denote feature extracted by mmaction, with video-level activitynet finetuned model or clip-level activitynet finetuned model respectively. + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 ActivityNet 特征部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如: + +1. 在 ActivityNet 特征上训练 BSN(TEM) 模型。 + + ```shell + python tools/train.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py + ``` + +2. 基于 PGM 的结果训练 BSN(PEM)。 + + ```shell + python tools/train.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py + ``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何进行推理 + +用户可以使用以下指令进行模型推理。 + +1. 推理 TEM 模型。 + + ```shell + # Note: This could not be evaluated. + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` + +2. 推理 PGM 模型 + + ```shell + python tools/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] + ``` + +3. 推理 PEM 模型 + + ```shell + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` + +例如 + +1. 利用预训练模型进行 BSN(TEM) 模型的推理。 + + ```shell + python tools/test.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth + ``` + +2. 利用预训练模型进行 BSN(PGM) 模型的推理 + + ```shell + python tools/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode train + ``` + +3. 推理 BSN(PEM) 模型,并计算 'AR@AN' 指标,输出结果文件。 + + ```shell + # 注意:如果需要进行指标验证,需确测试数据的保标注文件包含真实标签 + python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json + ``` + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +1. TEM + + ```shell + # 注意:该命令无法进行指标验证 + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` + +2. PGM + + ```shell + python tools/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] + ``` + +3. PEM + + ```shell + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` + +例如: + +1. 在 ActivityNet 数据集上测试 TEM 模型。 + + ```shell + python tools/test.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth + ``` + +2. 在 ActivityNet 数据集上测试 PGM 模型。 + + ```shell + python tools/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode test + ``` + +3. 测试 PEM 模型,并计算 'AR@AN' 指标,输出结果文件。 + + ```shell + python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json + ``` + +注意: + +1. (可选项) 用户可以使用以下指令生成格式化的时序动作候选文件,该文件可被送入动作识别器中(目前只支持 SSN 和 P-GCN,不包括 TSN, I3D 等),以获得时序动作候选的分类结果。 + + ```shell + python tools/data/activitynet/convert_proposal_format.py + ``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/ssn/README.md b/configs/localization/ssn/README.md index 8ae272880b..6da1152842 100644 --- a/configs/localization/ssn/README.md +++ b/configs/localization/ssn/README.md @@ -43,7 +43,7 @@ Example: train SSN model on thumos14 dataset. python tools/train.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting) . +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -60,4 +60,4 @@ Example: test BMN on ActivityNet feature dataset. python tools/test.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py checkpoints/SOME_CHECKPOINT.pth --eval mAP ``` -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/localization/ssn/README_zh-CN.md b/configs/localization/ssn/README_zh-CN.md new file mode 100644 index 0000000000..bc3a13f451 --- /dev/null +++ b/configs/localization/ssn/README_zh-CN.md @@ -0,0 +1,63 @@ +# SSN + +## 简介 + +[ALGORITHM] + +```BibTeX +@InProceedings{Zhao_2017_ICCV, +author = {Zhao, Yue and Xiong, Yuanjun and Wang, Limin and Wu, Zhirong and Tang, Xiaoou and Lin, Dahua}, +title = {Temporal Action Detection With Structured Segment Networks}, +booktitle = {Proceedings of the IEEE International Conference on Computer Vision (ICCV)}, +month = {Oct}, +year = {2017} +} +``` + +## 模型库 + +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | mAP@0.3 | mAP@0.4 | mAP@0.5 | 参考代码的 mAP@0.3 | 参考代码的 mAP@0.4 | 参考代码的 mAP@0.5 | GPU 显存占用 (M) | ckpt | log | json | 参考代码的 ckpt | 参考代码的 json | +|:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:|:-:|:-:|:-:|---|:--:|:--:| +|[ssn_r50_450e_thumos14_rgb](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) |8| ResNet50 | ImageNet |29.37|22.15|15.69|[27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|6352|[ckpt](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/ssn_r50_450e_thumos14_rgb_20201012-1920ab16.pth)| [log](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log)| [json](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log.json)| [ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth)| [json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json)| + +注意: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 由于 SSN 在训练和测试阶段使用不同的结构化时序金字塔池化方法(structured temporal pyramid pooling methods),请分别参考 [ssn_r50_450e_thumos14_rgb_train](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) 和 [ssn_r50_450e_thumos14_rgb_test](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py)。 +3. MMAction2 使用 TAG 的时序动作候选进行 SSN 模型的精度验证。关于数据准备的更多细节,用户可参考 [Data 数据集准备文档](/docs_zh_CN/data_preparation.md) 准备 thumos14 的 TAG 时序动作候选。 +4. 参考代码的 SSN 模型是和 MMAction2 一样在 `ResNet50` 主干网络上验证的。注意,这里的 SSN 的初始设置与原代码库的 `BNInception` 骨干网络的设置相同。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:在 thumos14 数据集上训练 SSN 模型。 + +```shell +python tools/train.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 ActivityNet 特征上测试 BMN。 + +```shell +# Note: If evaluated, then please make sure the annotation file for test data contains groundtruth. +python tools/test.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py checkpoints/SOME_CHECKPOINT.pth --eval mAP +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/c3d/README_zh-CN.md b/configs/recognition/c3d/README_zh-CN.md new file mode 100644 index 0000000000..ef5bda2330 --- /dev/null +++ b/configs/recognition/c3d/README_zh-CN.md @@ -0,0 +1,69 @@ +# C3D + +## 简介 + +[ALGORITHM] + +```BibTeX +@ARTICLE{2014arXiv1412.0767T, +author = {Tran, Du and Bourdev, Lubomir and Fergus, Rob and Torresani, Lorenzo and Paluri, Manohar}, +title = {Learning Spatiotemporal Features with 3D Convolutional Networks}, +keywords = {Computer Science - Computer Vision and Pattern Recognition}, +year = 2014, +month = dec, +eid = {arXiv:1412.0767} +} +``` + +## 模型库 + +### UCF-101 + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 测试方案| 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[c3d_sports1m_16x1x1_45e_ucf101_rgb.py](/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py)|128x171|8| c3d | sports1m | 83.27 | 95.90 | 10 clips x 1 crop | x | 6053 | [ckpt](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/c3d_sports1m_16x1x1_45e_ucf101_rgb_20201021-26655025.pth)|[log](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log)|[json](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json)| + +注意: + +1. C3D 的原论文使用 UCF-101 的数据均值进行数据正则化,并且使用 SVM 进行视频分类。MMAction2 使用 ImageNet 的 RGB 均值进行数据正则化,并且使用线性分类器。 +2. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +3. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 UCF-101 部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 C3D 模型在 UCF-101 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 UCF-101 数据集上测试 C3D 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index 34e9c1189d..2e0e4f905c 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -43,7 +43,6 @@ Notes: e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. -3. The values in columns named after "reference" are the results got by training on the original repo, using the same model settings. For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/csn/README_zh-CN.md b/configs/recognition/csn/README_zh-CN.md new file mode 100644 index 0000000000..594427316f --- /dev/null +++ b/configs/recognition/csn/README_zh-CN.md @@ -0,0 +1,83 @@ +# CSN + +## 简介 + +[ALGORITHM] + +```BibTeX +@inproceedings{inproceedings, +author = {Wang, Heng and Feiszli, Matt and Torresani, Lorenzo}, +year = {2019}, +month = {10}, +pages = {5551-5560}, +title = {Video Classification With Channel-Separated Convolutional Networks}, +doi = {10.1109/ICCV.2019.00565} +} +``` + +[OTHERS] + +```BibTeX +@inproceedings{ghadiyaram2019large, + title={Large-scale weakly-supervised pre-training for video action recognition}, + author={Ghadiyaram, Deepti and Tran, Du and Mahajan, Dhruv}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={12046--12055}, + year={2019} +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|8x4| ResNet152 | IG65M|80.14|94.93|x|8517|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json)| +|[ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|8x4| ResNet152 | IG65M|82.76|95.68|x|8516|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json)| + +注意: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 CSN 模型在 Kinetics400 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py \ + --work-dir work_dirs/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics400 数据集上测试 CSN 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json --average-clips prob +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/i3d/README_zh-CN.md b/configs/recognition/i3d/README_zh-CN.md new file mode 100644 index 0000000000..ab7dbee769 --- /dev/null +++ b/configs/recognition/i3d/README_zh-CN.md @@ -0,0 +1,90 @@ +# I3D + +## Introduction + +[ALGORITHM] + +```BibTeX +@inproceedings{inproceedings, + author = {Carreira, J. and Zisserman, Andrew}, + year = {2017}, + month = {07}, + pages = {4724-4733}, + title = {Quo Vadis, Action Recognition? A New Model and the Kinetics Dataset}, + doi = {10.1109/CVPR.2017.502} +} +``` + +[BACKBONE] + +```BibTeX +@article{NonLocal2018, + author = {Xiaolong Wang and Ross Girshick and Abhinav Gupta and Kaiming He}, + title = {Non-local Neural Networks}, + journal = {CVPR}, + year = {2018} +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[i3d_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) |340x256|8| ResNet50|ImageNet |72.68|90.78|1.7 (320x3 frames)| 5170|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log)| [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log.json)| +|[i3d_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) |短边 256|8| ResNet50|ImageNet | 73.27|90.92|x|5170|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log.json)| +|[i3d_r50_video_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py)|短边 256p|8| ResNet50 |ImageNet|72.85 |90.75 |x|5170|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log.json)| +|[i3d_r50_dense_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py) |340x256|8x2| ResNet50| ImageNet|72.77|90.57|1.7 (320x3 frames)| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_32x2x1_100e_kinetics400_rgb_20200616-2bbb4361.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log)| [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log.json)| +|[i3d_r50_dense_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet|73.48|91.00|x| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb_20200725-24eb54cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log.json)| +|[i3d_r50_lazy_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py) |340x256|8| ResNet50 |ImageNet|72.32|90.72|1.8 (320x3 frames)| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_32x2x1_100e_kinetics400_rgb_20200612-000e4d2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log)| [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log.json)| +|[i3d_r50_lazy_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet|73.24|90.99|x| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb_20200817-4e90d1d5.pth)| [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log.json) | +|[i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb.py)|短边 256p|8x4| ResNet50 |ImageNet|74.71|91.81|x|6438|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200813-6e6aef1b.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log.json)| +|[i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb.py)|短边 256p|8x4| ResNet50 |ImageNet|73.37|91.26|x|4944|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200815-17f84aa2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json)| +|[i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py)|短边 256p|8x4| ResNet50 |ImageNet|73.92|91.59|x|4832|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb_20200814-7c30d5bb.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json)| + +注意: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 I3D 模型在 Kinetics400 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py \ + --work-dir work_dirs/i3d_r50_32x2x1_100e_kinetics400_rgb \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics400 数据集上测试 I3D 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json --average-clips prob +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/r2plus1d/README_zh-CN.md b/configs/recognition/r2plus1d/README_zh-CN.md new file mode 100644 index 0000000000..c9f6ca6f01 --- /dev/null +++ b/configs/recognition/r2plus1d/README_zh-CN.md @@ -0,0 +1,72 @@ +# R2plus1D + +## 简介 + +[ALGORITHM] + +```BibTeX +@inproceedings{tran2018closer, + title={A closer look at spatiotemporal convolutions for action recognition}, + author={Tran, Du and Wang, Heng and Torresani, Lorenzo and Ray, Jamie and LeCun, Yann and Paluri, Manohar}, + booktitle={Proceedings of the IEEE conference on Computer Vision and Pattern Recognition}, + pages={6450--6459}, + year={2018} +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | 短边 256|8x4| ResNet34|None |67.30|87.65|x|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb_20200729-aa94765e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log)|[json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log.json)| +|[r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py) | 短边 256|8| ResNet34|None |67.3|87.8|x|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb_20200826-ab35a529.pth)|[log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log.json)|[json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log)| +|[r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | 短边 320|8x2| ResNet34|None |68.68|88.36|1.6 (80x3 frames)|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8x1_180e_kinetics400_rgb_20200618-3fce5629.pth)| [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log)| [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json)| +|[r2plus1d_r34_32x2x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py) |短边 320|8x2| ResNet34|None |74.60|91.59|0.5 (320x3 frames)|12975| [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2x1_180e_kinetics400_rgb_20200618-63462eb3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log)| [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json)| + +注意: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 R(2+1)D 模型在 Kinetics400 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py \ + --work-dir work_dirs/r2plus1d_r34_3d_8x8x1_180e_kinetics400_rgb \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics400 数据集上测试 R(2+1)D 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json --average-clips=prob +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md new file mode 100644 index 0000000000..15e685c6a8 --- /dev/null +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -0,0 +1,76 @@ +# SlowFast + +## 简介 + +[ALGORITHM] + +```BibTeX +@inproceedings{feichtenhofer2019slowfast, + title={Slowfast networks for video recognition}, + author={Feichtenhofer, Christoph and Fan, Haoqi and Malik, Jitendra and He, Kaiming}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + pages={6202--6211}, + year={2019} +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet50|None |74.75|91.73|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json)| +|[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |短边256|8| ResNet50|None |74.34|91.58|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| +|[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x3| ResNet50|None |75.64|92.3|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20200704-bcde7ed7.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log.json)| +|[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet50 |None |75.61|92.34|x|9062|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json)| +|[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| +|[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| +|[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| +|[slowfast_r152_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet152 + ResNet50 |None|77.13|93.20||10077| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json)| + +注意: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 SlowFast 模型在 Kinetics400 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py \ + --work-dir work_dirs/slowfast_r50_4x16x1_256e_kinetics400_rgb \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 SlowFast 数据集上测试 CSN 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json --average-clips=prob +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md new file mode 100644 index 0000000000..1827dd6214 --- /dev/null +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -0,0 +1,118 @@ +# SlowOnly + +## 简介 + +[ALGORITHM] + +```BibTeX +@inproceedings{feichtenhofer2019slowfast, + title={Slowfast networks for video recognition}, + author={Feichtenhofer, Christoph and Fan, Haoqi and Malik, Jitendra and He, Kaiming}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + pages={6202--6211}, + year={2019} +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py)|short-side 256|8x4| ResNet50 | None |72.76|90.51|x|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json)| +|[slowonly_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|short-side 320|8x2| ResNet50 | None |72.90|90.82|x|8472|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json)| +|[slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50 | None |74.42|91.49|x|5820|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb_20200820-75851a7d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json)| +|[slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py)|short-side 320|8x2| ResNet50 | None |73.02|90.77|4.0 (40x3 frames)|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json)| +|[slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 | None |74.93|91.92|2.3 (80x3 frames)|5820| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8x1_256e_kinetics400_rgb_20200703-a79c555a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json)| +|[slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py)|short-side 320|8x2| ResNet50 | ImageNet |73.39|91.12|x|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912-1e8fc736.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json)| +|[slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py) |short-side 320|8x4| ResNet50 | ImageNet |75.55|92.04|x|5820|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912-3f9ce182.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json)| +|[slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | ImageNet | 74.54 | 91.73 | x | 4435 |[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb_20210308-0d6e5a69.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json)| +|[slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py) | short-side 320 | 8x4 | ResNet50 | ImageNet | 76.07 | 92.42 | x | 8895 |[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb_20210308-e8dd9e82.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json)| +|[slowonly_r50_4x16x1_256e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py)|short-side 320|8x2| ResNet50 | ImageNet |61.79|83.62|x|8450| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_20200704-decb8568.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json)| +|[slowonly_r50_8x8x1_196e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py) |short-side 320|8x4| ResNet50 | ImageNet |65.76|86.25|x|8455| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_256e_kinetics400_flow_20200704-6b384243.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json)| + +### Kinetics-400 数据基准测试 + +在数据基准测试中,比较两种不同的数据预处理方法 (1) 视频分辨率为 340x256, (2) 视频分辨率为短边 320px, (3) 视频分辨率为短边 256px. + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 输入 | 预训练 | top1 准确率 | top5 准确率 | 测试方案 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :--: | :------: | :---: | :------: | :------: | :------: | :----------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py) | 340x256 | 8x2 | ResNet50 | 4x16 | None | 71.61 | 90.05 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803-dadca1a3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803.json) | +| [slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | 4x16 | None | 73.02 | 90.77 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | +| [slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet50 | 4x16 | None | 72.76 | 90.51 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json) | + +### Kinetics-400 OmniSource Experiments + +| 配置文件 | 分辨率 | 主干网络 | 预训练 | w. OmniSource | top1 准确率 | top5 准确率 | ckpt | log | json | +| :----------------------------------------------------------: | :------------: | :-------: | :------: | :----------------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | ResNet50 | None | :x: | 73.0 | 90.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | +| x | x | ResNet50 | None | :heavy_check_mark: | 76.8 | 92.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | x | x | +| [slowonly_r101_8x8x1_196e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py) | x | ResNet101 | None | :x: | 76.5 | 92.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) | x | x | +| x | x | ResNet101 | None | :heavy_check_mark: | 80.4 | 94.4 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | x | x | + +### Kinetics-600 + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_r50_video_8x8x1_256e_kinetics600_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py) | short-side 256 | 8x4 | ResNet50 | None | 77.5 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015-81e5153e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.json) | + +### Kinetics-700 + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_r50_video_8x8x1_256e_kinetics700_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py) | short-side 256 | 8x4 | ResNet50 | None | 65.0 | 86.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015-9250f662.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.json) | + +### GYM99 + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | 类别平均准确率 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py) | short-side 256 | 8x2 | ResNet50 | ImageNet | 79.3 | 70.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111-a9c34b54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json) | +| [slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py) | short-side 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | +| 1: 1 融合 | | | | | 83.7 | 74.8 | | | | + +注意: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 SlowOnly 模型在 Kinetics400 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py \ + --work-dir work_dirs/slowonly_r50_4x16x1_256e_kinetics400_rgb \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics400 数据集上测试 SlowOnly 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json --average-clips=prob +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tanet/README_zh-CN.md b/configs/recognition/tanet/README_zh-CN.md new file mode 100644 index 0000000000..1a46617d0f --- /dev/null +++ b/configs/recognition/tanet/README_zh-CN.md @@ -0,0 +1,69 @@ +# TANet + +## 简介 + +[ALGORITHM] + +```latex +@article{liu2020tam, + title={TAM: Temporal Adaptive Module for Video Recognition}, + author={Liu, Zhaoyang and Wang, Limin and Wu, Wayne and Qian, Chen and Lu, Tong}, + journal={arXiv preprint arXiv:2005.06803}, + year={2020} +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tanet_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py) |短边 320|8| TANet | ImageNet |76.28 | 92.60 |[76.22](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh)|[92.53](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh) | x | 7124 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log)| [json](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json)| + +Notes: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +2. The values in columns named after "reference" are the results got by testing on our dataset, using the checkpoints provided by the author with same model settings. The checkpoints for reference repo can be downloaded [here](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing). + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 TANet 模型在 Kinetics400 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py \ + --work-dir work_dirs/tanet_r50_dense_1x1x8_100e_kinetics400_rgb \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics400 数据集上测试 TANet 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tin/README_zh-CN.md b/configs/recognition/tin/README_zh-CN.md new file mode 100644 index 0000000000..4aa1314df3 --- /dev/null +++ b/configs/recognition/tin/README_zh-CN.md @@ -0,0 +1,84 @@ +# TIN + +## 简介 + +[ALGORITHM] + +```BibTeX +@article{shao2020temporal, + title={Temporal Interlacing Network}, + author={Hao Shao and Shengju Qian and Yu Liu}, + year={2020}, + journal={AAAI}, +} +``` + +## 模型库 + +### Something-Something V1 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tin_r50_1x1x8_40e_sthv1_rgb](/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py)|高 100|8x4| ResNet50 | ImageNet | 44.25 | 73.94 | 44.04 | 72.72 | 6181 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/tin_r50_1x1x8_40e_sthv1_rgb_20200729-4a33db86.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log.json) | + +### Something-Something V2 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tin_r50_1x1x8_40e_sthv2_rgb](/configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py)|高 240|8x4| ResNet50 | ImageNet | 56.70 | 83.62 | 56.48 | 83.45 | 6185 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/tin_r50_1x1x8_40e_sthv2_rgb_20200912-b27a7337.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log.json) | + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py)|短边 256|8x4| ResNet50 | TSM-Kinetics400 | 70.89 | 89.89 | 6187 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb_20200810-4a146a70.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json) | + +Here, we use `finetune` to indicate that we use [TSM model](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) trained on Kinetics-400 to finetune the TIN model on Kinetics-400. + +Notes: + +1. 参考代码的结果是通过 [原始 repo](https://github.com/deepcs233/TIN/tree/1aacd0c4c30d5e1d334bf023e55b855b59f158db) 解决 [AverageMeter 相关问题](https://github.com/deepcs233/TIN/issues/4) 后训练得到的,该问题会导致错误的精度计算。 +2. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +3. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +4. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 TIN 模型在 Something-Something V1 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ + --work-dir work_dirs/tin_r50_1x1x8_40e_sthv1_rgb \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Something-Something V1 数据集上测试 TIN 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index aae497f3c6..076ee9e6ae 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -32,7 +32,7 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |70.24|89.56|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json)| |[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.59|89.52|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json)| -|[tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.48|89.40|x|x|x|7076|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json)| +|[tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.48|89.40|x|x|x|7076|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json)| |[tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.25|89.66|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7077 | [ckpt]( https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json)| |[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8x4| ResNet50 | ImageNet|72.9|90.44|[72.22](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|[90.37](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|11.5 (8x10 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20200626-91a54551.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log.json)| |[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50 | ImageNet|73.38|91.02|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb_20200727-e1e0c785.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json)| @@ -101,10 +101,10 @@ test_pipeline = [ ] ``` -For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). - 5. When applying Mixup and CutMix, we use the hyper parameter `alpha=0.2`. +For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). + ## Train You can use the following command to train a model. diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md new file mode 100644 index 0000000000..5e73057d0a --- /dev/null +++ b/configs/recognition/tsm/README_zh-CN.md @@ -0,0 +1,140 @@ +# TSM + +## 简介 + +[ALGORITHM] + +```BibTeX +@inproceedings{lin2019tsm, + title={TSM: Temporal Shift Module for Efficient Video Understanding}, + author={Lin, Ji and Gan, Chuang and Han, Song}, + booktitle={Proceedings of the IEEE International Conference on Computer Vision}, + year={2019} +} +``` + +[BACKBONE] + +```BibTeX +@article{NonLocal2018, + author = {Xiaolong Wang and Ross Girshick and Abhinav Gupta and Kaiming He}, + title = {Non-local Neural Networks}, + journal = {CVPR}, + year = {2018} +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |70.24|89.56|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json)| +|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.59|89.52|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json)| +|[tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.48|89.40|x|x|x|7076|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json)| +|[tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.25|89.66|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7077 | [ckpt]( https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json)| +|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8x4| ResNet50 | ImageNet|72.9|90.44|[72.22](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|[90.37](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|11.5 (8x10 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20200626-91a54551.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log.json)| +|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50 | ImageNet|73.38|91.02|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb_20200727-e1e0c785.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json)| +|[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |72.09|90.37|[70.67](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|[89.98](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|47.0 (16x1 frames)| 10404 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json)| +|[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50| ImageNet |71.89|90.73|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json)| +|[tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4| ResNet50| ImageNet |72.03|90.25|71.81|90.36|x|8931|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json)| +|[tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4| ResNet50| ImageNet |70.70|89.90|x|x|x|10125|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json)| +|[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| +|[tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|short-side 320|8|MobileNetV2| ImageNet |68.46|88.64|x|x|x|3385|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json)| + +### Something-Something V1 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| 参考代码的 top1 准确率 (efficient/accurate)| 参考代码的 top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsm_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 45.58 / 47.70|75.02 / 76.12|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json)| +|[tsm_r50_flip_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.10 / 48.51|76.02 / 77.56|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json)| +|[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|47.62 / 49.28|76.63 / 77.82|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20201010-17fa49f6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json)| +|[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|45.72 / 48.43|74.67 / 76.72|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json)| + +### Something-Something V2 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| 参考代码的 top1 准确率 (efficient/accurate)| 参考代码的 top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 240|8| ResNet50| ImageNet |57.86 / 61.12|84.67 / 86.26|[57.98 / 60.69](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[84.57 / 86.28](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_1x1x8_50e_sthv2_rgb_20200912-033c4ac6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json)| +|[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py) |height 240|8| ResNet50| ImageNet |59.93 / 62.04|86.10 / 87.35|[58.90 / 60.98](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.29 / 86.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_1x1x16_50e_sthv2_rgb_20201010-16469c6f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json)| +|[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |height 240|8| ResNet101 | ImageNet|58.59 / 61.51|85.07 / 86.90|[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9784 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json)| + +### MixUp & CutMix on Something-Something V1 + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | top1 准确率变化 (efficient/accurate) | top5 准确率变化 (efficient/accurate) | ckpt | log | json | +| :----------------------------------------------------------- | :--------: | :--: | :------: | :------: | :---------------------------: | :---------------------------: | :---------------------------------: | :---------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsm_r50_mixup_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 46.35 / 48.49 | 75.07 / 76.88 | +0.77 / +0.79 | +0.05 / +0.70 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb-9eca48e5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_cutmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 45.92 / 47.46 | 75.23 / 76.71 | +0.34 / -0.24 | +0.21 / +0.59 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb-34934615.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json) | + +注意: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +3. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。对应的模型权重文件可从 [这里](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_reference_ckpt.rar) 下载。 +4. 对于 Something-Something 数据集,有两种测试方案:efficient(对应 center crop x 1 clip)和 accurate(对应 Three crop x 2 clip)。两种方案参考自 [原始代码库](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd)。 + MMAction2 使用 efficient 方案作为配置文件中的默认选择,用户可以通过以下方式转变为 accurate 方案: + +```python +... +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, # 当使用 8 个 视频段时,设置 `num_clips = 8` + twice_sample=True, # 设置 `twice_sample=True` 用于 accurate 方案中的 Twice Sample + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + # dict(type='CenterCrop', crop_size=224), 用于 efficient 方案 + dict(type='ThreeCrop', crop_size=256), # 用于 accurate 方案 + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +``` + +5. 当采用 Mixup 和 CutMix 的数据增强时,使用超参 `alpha=0.2`。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 TSM 模型在 Kinetics-400 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py \ + --work-dir work_dirs/tsm_r50_1x1x8_100e_kinetics400_rgb \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics-400 数据集上测试 TSM 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md new file mode 100644 index 0000000000..a10d2aac0e --- /dev/null +++ b/configs/recognition/tsn/README_zh-CN.md @@ -0,0 +1,220 @@ +# TSN + +## 简介 + +[ALGORITHM] + +```BibTeX +@inproceedings{wang2016temporal, + title={Temporal segment networks: Towards good practices for deep action recognition}, + author={Wang, Limin and Xiong, Yuanjun and Wang, Zhe and Qiao, Yu and Lin, Dahua and Tang, Xiaoou and Van Gool, Luc}, + booktitle={European conference on computer vision}, + pages={20--36}, + year={2016}, + organization={Springer} +} +``` + +## 模型库 + +### UCF-101 + +|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsn_r50_1x1x3_75e_ucf101_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py) [1] |8| ResNet50 | ImageNet |83.03|96.78|8332| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023-d85ab600.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.json) | + +[1] 这里汇报的是 UCF-101 的 split1 部分的结果。 + +### HMDB51 + +|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py)|8| ResNet50 | ImageNet | 48.95| 80.19| 21535| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb_20201123-ce6c27ed.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log.json) | +|[tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py) |8| ResNet50 | Kinetics400 | 56.08 | 84.31 | 21535| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb_20201123-7f84701b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log.json) | +|[tsn_r50_1x1x8_50e_hmdb51_mit_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py) |8| ResNet50 | Moments | 54.25 | 83.86| 21535| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/tsn_r50_1x1x8_50e_hmdb51_mit_rgb_20201123-01526d41.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log.json) | + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络|预训练 | top1 准确率| top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) |340x256|8| ResNet50 | ImageNet|70.60|89.26|x|x|4.3 (25x10 frames)|8344| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json)| +|[tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) |短边 256|8| ResNet50 | ImageNet|70.42|89.03|x|x|x|8343|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json)| +|[tsn_r50_dense_1x1x5_50e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py) |340x256|8x3| ResNet50| ImageNet |70.18|89.10|[69.15](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[88.56](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|12.7 (8x10 frames)|7028| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/tsn_r50_dense_1x1x5_100e_kinetics400_rgb_20200627-a063165f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log.json)| +|[tsn_r50_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py) |短边 320|8x2| ResNet50| ImageNet |70.91|89.51|x|x|10.7 (25x3 frames)| 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json)| +|[tsn_r50_320p_1x1x3_110e_kinetics400_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py) |短边 320|8x2| ResNet50 | ImageNet|55.70|79.85|x|x|x| 8471 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_320p_1x1x3_110e_kinetics400_flow_20200705-3036bab6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log.json)| +|tsn_r50_320p_1x1x3_kinetics400_twostream [1: 1]* |x|x| ResNet50 | ImageNet|72.76|90.52| x | x | x | x | x|x|x| +|[tsn_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py)|短边 256|8| ResNet50| ImageNet |71.80|90.17|x|x|x|8343|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/tsn_r50_256p_1x1x8_100e_kinetics400_rgb_20200817-883baf16.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log.json)| +|[tsn_r50_320p_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py) |短边 320|8x3| ResNet50| ImageNet |72.41|90.55|x|x|11.1 (25x3 frames)| 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_320p_1x1x8_100e_kinetics400_rgb_20200702-ef80e3d7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log.json)| +|[tsn_r50_320p_1x1x8_110e_kinetics400_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py) |短边 320|8x4| ResNet50 | ImageNet|57.76|80.99|x|x|x| 8473 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_320p_1x1x8_110e_kinetics400_flow_20200705-1f39486b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log.json)| +|tsn_r50_320p_1x1x8_kinetics400_twostream [1: 1]* |x|x| ResNet50| ImageNet |74.64|91.77| x | x | x | x | x|x|x| +|[tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py) |短边 320|8| ResNet50 | ImageNet |71.11|90.04| x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014-5ae1ee79.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json)| +|[tsn_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8| ResNet50 | ImageNet|70.77|89.3|[68.75](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[88.42](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|12.2 (8x10 frames)|8344| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_dense_1x1x8_100e_kinetics400_rgb_20200606-e925e6e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json)| +|[tsn_r50_video_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet | 71.79 | 90.25 |x|x|x|21558| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json)| +|[tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet | 70.40 | 89.12 |x|x|x|21553| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb_20200703-0f19175f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json)| + +这里,MMAction2 使用 [1: 1] 表示以 1: 1 的比例融合 RGB 和光流两分支的融合结果(融合前不经过 softmax) + +### 在 TSN 模型中使用第三方的主干网络 + +用户可在 MMAction2 的框架中使用第三方的主干网络训练 TSN,例如: + +- [x] MMClassification 中的主干网络 + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | +| :----------------------------------------------------------: | :------------: | :--: | :----------------------------------------------------------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] | ImageNet | 73.43 | 91.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json) | + +### Kinetics-400 数据基准测试 (8 块 GPU, ResNet50, ImageNet 预训练; 3 个视频段) + +在数据基准测试中,比较: + +1. 不同的数据预处理方法:(1) 视频分辨率为 340x256, (2) 视频分辨率为短边 320px, (3) 视频分辨率为短边 256px; +2. 不同的数据增强方法:(1) MultiScaleCrop, (2) RandomResizedCrop; +3. 不同的测试方法:(1) 25 帧 x 10 裁剪片段, (2) 25 frames x 3 裁剪片段. + +| 配置文件 | 分辨率 | 训练时的数据增强 | 测试时的策略 | top1 准确率 | top5 准确率 | ckpt | log | json | +| :----------------------------------------------------------: | :------------: | :-------------------: | :--------------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py) | 340x256 | MultiScaleCrop | 25x10 frames | 70.60 | 89.26 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | +| x | 340x256 | MultiScaleCrop | 25x3 frames | 70.52 | 89.39 | x | x | x | +| [tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py) | 340x256 | RandomResizedCrop | 25x10 frames | 70.11 | 89.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725-88cb325a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725.json) | +| x | 340x256 | RandomResizedCrop | 25x3 frames | 69.95 | 89.02 | x | x | x | +| [tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | MultiScaleCrop | 25x10 frames | 70.32 | 89.25 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725-9922802f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725.json) | +| x | 短边 320 | MultiScaleCrop | 25x3 frames | 70.54 | 89.39 | x | x | x | +| [tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | RandomResizedCrop | 25x10 frames | 70.44 | 89.23 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json) | +| x | 短边 320 | RandomResizedCrop | 25x3 frames | 70.91 | 89.51 | x | x | x | +| [tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py) | 短边 256 | MultiScaleCrop | 25x10 frames | 70.42 | 89.03 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json)| +| x | 短边 256 | MultiScaleCrop | 25x3 frames | 70.79 | 89.42 | x | x | x | +| [tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py) | 短边 256 | RandomResizedCrop | 25x10 frames | 69.80 | 89.06 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb_20200817-ae7963ca.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/20200815_172601.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/20200815_172601.log.json)| +| x | 短边 256 | RandomResizedCrop | 25x3 frames | 70.48 | 89.89 | x | x | x | + +### Kinetics-400 OmniSource 实验 + +| 配置文件 | 分辨率 | 主干网络 | 预训练 | w. OmniSource | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------: | :------------: | :------: | :-------: | :----------------: | :------: | :------: | :---------------------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 340x256 | ResNet50 | ImageNet | :x: | 70.6 | 89.3 | 4.3 (25x10 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | +| x | 340x256 | ResNet50 | ImageNet | :heavy_check_mark: | 73.6 | 91.0 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | x | x | +| x | 短边 320 | ResNet50 | IG-1B [1] | :x: | 73.1 | 90.4 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) | x | x | +| x | 短边 320 | ResNet50 | IG-1B [1] | :heavy_check_mark: | 75.7 | 91.9 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | x | x | + +[1] We obtain the pre-trained model from [torch-hub](https://pytorch.org/hub/facebookresearch_semi-supervised-ImageNet1K-models_resnext/), the 预训练 model we used is `resnet50_swsl` + +### Kinetics-600 + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsn_r50_video_1x1x8_100e_kinetics600_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 74.8 | 92.3 | 11.1 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015-4db3c461.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.json) | + +### Kinetics-700 + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsn_r50_video_1x1x8_100e_kinetics700_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 61.7 | 83.6 | 11.1 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015-e381a6c7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.json) | + +### Something-Something V1 + +|配置文件|分辨率 | GPU 数量| 主干网络 |预训练| top1 准确率| top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py)|height 100 |8| ResNet50 | ImageNet|18.55 |44.80 |[17.53](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[44.29](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10978 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_1x1x8_50e_sthv1_rgb_20200618-061b9195.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json)| +|[tsn_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py)| height 100 |8| ResNet50| ImageNet |15.77 |39.85 |[13.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[35.58](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 5691 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/tsn_r50_1x1x16_50e_sthv1_rgb_20200614-7e2fe4f1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json)| + +### Something-Something V2 + +|配置文件 |分辨率| GPU 数量| 主干网络| 预训练 | top1 准确率| top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py)|height 240 |8| ResNet50| ImageNet |32.97 |63.62 |[30.56](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[58.49](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10966 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20200915-f3b381a5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log.json)| +|[tsn_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py)| height 240 |8|ResNet50| ImageNet |27.21 |55.84 |[21.91](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[46.87](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|8337| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20200917-80bc3611.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log.json)| + +### Moments in Time + +|配置文件 |分辨率| GPU 数量| 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsn_r50_1x1x6_100e_mit_rgb](/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py)|短边 256 |8x2| ResNet50| ImageNet |26.84|51.6| 8339| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_1x1x6_100e_mit_rgb_20200618-d512ab1b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_mit.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_f6_mit_26.8_51.6.log.json)| + +### Multi-Moments in Time + +|配置文件 | 分辨率|GPU 数量| 主干网络 | 预训练 | mAP| GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsn_r101_1x1x5_50e_mmit_rgb](/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py)|短边 256 |8x2| ResNet101| ImageNet |61.09| 10467 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_1x1x5_50e_mmit_rgb_20200618-642f450d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_mmit.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_f6_mmit_61.1.log.json)| + +### ActivityNet v1.3 + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------- | :--------: | :--: | :------: | :---------: | :------: | :------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsn_r50_320p_1x1x8_50e_activitynet_video_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py) | 短边 320 | 8x1 | ResNet50 | Kinetics400 | 73.93 | 93.44 | 5692 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb_20210301-7f8da0c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log.json) | +| [tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py) | 短边 320 | 8x1 | ResNet50 | Kinetics400 | 76.90 | 94.47 | 5692 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb_20210301-c0f04a7e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log.json) | +| [tsn_r50_320p_1x1x8_150e_activitynet_video_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py) | 340x256 | 8x2 | ResNet50 | Kinetics400 | 57.51 | 83.02 | 5780 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804-13313f52.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.json) | +| [tsn_r50_320p_1x1x8_150e_activitynet_clip_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py) | 340x256 | 8x2 | ResNet50 | Kinetics400 | 59.51 | 82.69 | 5780 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804-8622cf38.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.json) | + +### HVU + +| 配置文件[1] | tag category | 分辨率 | GPU 数量 | 主干网络 | 预训练 | mAP | HATNet[2] | HATNet-multi[2] | ckpt | log | json | +| :----------------------------------------------------------: | :----------: | :------------: | :--: | :------: | :------: | :--: | :-------: | :-------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsn_r18_1x1x8_100e_hvu_action_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py) | action | 短边 256 | 8x2 | ResNet18 | ImageNet | 57.5 | 51.8 | 53.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027-011b282b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_scene_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py) | scene | 短边 256 | 8 | ResNet18 | ImageNet | 55.2 | 55.8 | 57.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027-00e5748d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_object_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py) | object | 短边 256 | 8 | ResNet18 | ImageNet | 45.7 | 34.2 | 35.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201102-24a22f30.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_event_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py) | event | 短边 256 | 8 | ResNet18 | ImageNet | 63.7 | 38.5 | 39.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027-dea8cd71.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_concept_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py) | concept | 短边 256 | 8 | ResNet18 | ImageNet | 47.5 | 26.1 | 27.3 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027-fc1dd8e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_attribute_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py) | attribute | 短边 256 | 8 | ResNet18 | ImageNet | 46.1 | 33.6 | 34.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027-0b3b49d2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.json) | +| - | Overall | 短边 256 | - | ResNet18 | ImageNet | 52.6 | 40.0 | 41.3 | - | - | - | + +[1] 简单起见,MMAction2 对每个 tag 类别训练特定的模型,作为 HVU 的基准模型。 + +[2] 这里 HATNet 和 HATNet-multi 的结果来自于 paper: [Large Scale Holistic Video Understanding](https://pages.iai.uni-bonn.de/gall_juergen/download/HVU_eccv20.pdf)。 +HATNet 的时序动作候选是一个双分支的卷积网络(一个 2D 分支,一个 3D 分支),并且和 MMAction2 有相同的主干网络(ResNet18)。HATNet 的输入是 16 帧或 32 帧的长视频片段(这样的片段比 MMAction2 使用的要长),同时输入分辨率更粗糙(112px 而非 224px)。 +HATNet 是在每个独立的任务(对应每个 tag 类别)上进行训练的,HATNet-multi 是在多个任务上进行训练的。由于目前没有 HATNet 的开源代码和模型,这里仅汇报了原 paper 的精度。 + +注意: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +3. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。 + +对于数据集准备的细节,用户可参考: + +- [准备 ucf101](/tools/data/ucf101/README_zh-CN.md) +- [准备 kinetics](/tools/data/kinetics/README_zh-CN.md) +- [准备 sthv1](/tools/data/sthv1/README_zh-CN.md) +- [准备 sthv2](/tools/data/sthv2/README_zh-CN.md) +- [准备 mit](/tools/data/mit/README_zh-CN.md) +- [准备 mmit](/tools/data/mmit/README_zh-CN.md) +- [准备 hvu](/tools/data/hvu/README_zh-CN.md) +- [准备 hmdb51](/tools/data/hmdb51/README_zh-CN.md) + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 TSN 模型在 Kinetics-400 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ + --work-dir work_dirs/tsn_r50_1x1x3_100e_kinetics400_rgb \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics-400 数据集上测试 TSN 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/x3d/README.md b/configs/recognition/x3d/README.md index b4579a72cb..0d9397564c 100644 --- a/configs/recognition/x3d/README.md +++ b/configs/recognition/x3d/README.md @@ -22,7 +22,7 @@ |config | resolution | backbone | top1 10-view | top1 30-view | reference top1 10-view | reference top1 30-view | ckpt | |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[x3d_s_13x6x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py)|short-side 320| X3D_S | 72.7 | 73.2 | 73.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | 73.5 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_s_facebook_13x6x1_kinetics400_rgb_20201027-623825a0.pth)[1] | -|[x3d_m_16x5x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb)|short-side 320| X3D_M | 75.0 | 75.6 | 75.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | 76.2 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth)[1] | +|[x3d_m_16x5x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py)|short-side 320| X3D_M | 75.0 | 75.6 | 75.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | 76.2 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth)[1] | [1] The models are ported from the repo [SlowFast](https://github.com/facebookresearch/SlowFast/) and tested on our data. Currently, we only support the testing of X3D models, training will be available soon. diff --git a/configs/recognition/x3d/README_zh-CN.md b/configs/recognition/x3d/README_zh-CN.md new file mode 100644 index 0000000000..2b5d100b8f --- /dev/null +++ b/configs/recognition/x3d/README_zh-CN.md @@ -0,0 +1,51 @@ +# X3D + +## 简介 + +[ALGORITHM] + +```BibTeX +@misc{feichtenhofer2020x3d, + title={X3D: Expanding Architectures for Efficient Video Recognition}, + author={Christoph Feichtenhofer}, + year={2020}, + eprint={2004.04730}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | 分辨率 | 主干网络 | top1 10-view | top1 30-view | 参考代码的 top1 10-view | 参考代码的 top1 30-view | ckpt | +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[x3d_s_13x6x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py)|短边 320| X3D_S | 72.7 | 73.2 | 73.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | 73.5 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_s_facebook_13x6x1_kinetics400_rgb_20201027-623825a0.pth)[1] | +|[x3d_m_16x5x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py)|短边 320| X3D_M | 75.0 | 75.6 | 75.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | 76.2 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth)[1] | + +[1] 这里的模型是从 [SlowFast](https://github.com/facebookresearch/SlowFast/) 代码库中导入并在 MMAction2 使用的数据上进行测试的。目前仅支持 X3D 模型的测试,训练部分将会在近期提供。 + +注意: + +1. 参考代码的结果是通过使用相同的数据和原来的代码库所提供的模型进行测试得到的。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics-400 数据集上测试 X3D 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json --average-clips prob +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 From f3d3758e17f9729691cd7ae9ccfa4358790564f1 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sun, 28 Mar 2021 17:16:02 +0800 Subject: [PATCH 004/414] Polish minor cn words (#762) * polish minor cn words * polish minor cn words --- configs/localization/bmn/README_zh-CN.md | 6 +-- configs/localization/bsn/README_zh-CN.md | 11 +++--- configs/localization/ssn/README_zh-CN.md | 4 +- configs/recognition/c3d/README_zh-CN.md | 2 +- configs/recognition/csn/README_zh-CN.md | 2 +- configs/recognition/i3d/README_zh-CN.md | 4 +- configs/recognition/r2plus1d/README_zh-CN.md | 2 +- configs/recognition/slowfast/README_zh-CN.md | 2 +- configs/recognition/slowonly/README_zh-CN.md | 38 +++++++++---------- configs/recognition/tanet/README_zh-CN.md | 4 +- configs/recognition/tin/README_zh-CN.md | 4 +- configs/recognition/tsm/README_zh-CN.md | 38 +++++++++---------- ...netv2_dense_1x1x8_100e_kinetics400_rgb.py} | 11 ++---- configs/recognition/tsn/README_zh-CN.md | 8 ++-- configs/recognition/x3d/README_zh-CN.md | 2 +- 15 files changed, 67 insertions(+), 71 deletions(-) rename configs/recognition/tsm/{tsm_mobilenetv2_1x1x8_50e_kinetics400_rgb.py => tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py} (93%) diff --git a/configs/localization/bmn/README_zh-CN.md b/configs/localization/bmn/README_zh-CN.md index 5d3a7ffc38..d735740287 100644 --- a/configs/localization/bmn/README_zh-CN.md +++ b/configs/localization/bmn/README_zh-CN.md @@ -37,7 +37,7 @@ | |mmaction_clip |2|75.35|67.38|43.08|32.19|10.73|31.15|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809-10d803ce.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.json) | | [BMN-official](https://github.com/JJBOY/BMN-Boundary-Matching-Network) (for reference)* |cuhk_mean_100 |-|75.27|67.49|42.22|30.98|9.22|30.00|-|-|-| - | - | -- Notes: +- 注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 @@ -77,7 +77,7 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] 例如:在 ActivityNet 特征上测试 BMN 模型。 ```shell -# 注意:如果需要进行指标验证,需确测试数据的保标注文件包含真实标签 +# 注:如果需要进行指标验证,需确测试数据的保标注文件包含真实标签 python tools/test.py configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json ``` @@ -87,7 +87,7 @@ python tools/test.py configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_fea python tools/analysis/report_map.py --proposal path/to/proposal_file ``` -注意: +注: 1. (可选项) 用户可以使用以下指令生成格式化的时序动作候选文件,该文件可被送入动作识别器中(目前只支持 SSN 和 P-GCN,不包括 TSN, I3D 等),以获得时序动作候选的分类结果。 diff --git a/configs/localization/bsn/README_zh-CN.md b/configs/localization/bsn/README_zh-CN.md index 3072e2ed00..5cd2709567 100644 --- a/configs/localization/bsn/README_zh-CN.md +++ b/configs/localization/bsn/README_zh-CN.md @@ -24,12 +24,13 @@ | |mmaction_video |1| None |74.93|66.74|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809-ad6ec626.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809-aa861b26.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.json) | | |mmaction_clip |1| None |75.19|66.81|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809-0a563554.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809-e32f61e6.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.json) | -Notes: +注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 -2. For feature column, cuhk_mean_100 denotes the widely used cuhk activitynet feature extracted by [anet2016-cuhk](https://github.com/yjxiong/anet2016-cuhk), mmaction_video and mmaction_clip denote feature extracted by mmaction, with video-level activitynet finetuned model or clip-level activitynet finetuned model respectively. +2. 对于 **特征** 这一列,`cuhk_mean_100` 表示所使用的特征为利用 [anet2016-cuhk](https://github.com/yjxiong/anet2016-cuhk) 代码库抽取的,被广泛利用的 CUHK ActivityNet 特征, + `mmaction_video` 和 `mmaction_clip` 分布表示所使用的特征为利用 MMAction 抽取的,视频级别 ActivityNet 预训练模型的特征;视频片段级别 ActivityNet 预训练模型的特征。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 ActivityNet 特征部分。 @@ -97,7 +98,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 3. 推理 BSN(PEM) 模型,并计算 'AR@AN' 指标,输出结果文件。 ```shell - # 注意:如果需要进行指标验证,需确测试数据的保标注文件包含真实标签 + # 注:如果需要进行指标验证,需确测试数据的保标注文件包含真实标签 python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json ``` @@ -108,7 +109,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 1. TEM ```shell - # 注意:该命令无法进行指标验证 + # 注:该命令无法进行指标验证 python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] ``` @@ -144,7 +145,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json ``` -注意: +注: 1. (可选项) 用户可以使用以下指令生成格式化的时序动作候选文件,该文件可被送入动作识别器中(目前只支持 SSN 和 P-GCN,不包括 TSN, I3D 等),以获得时序动作候选的分类结果。 diff --git a/configs/localization/ssn/README_zh-CN.md b/configs/localization/ssn/README_zh-CN.md index bc3a13f451..98da514984 100644 --- a/configs/localization/ssn/README_zh-CN.md +++ b/configs/localization/ssn/README_zh-CN.md @@ -20,7 +20,7 @@ year = {2017} |:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:|:-:|:-:|:-:|---|:--:|:--:| |[ssn_r50_450e_thumos14_rgb](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) |8| ResNet50 | ImageNet |29.37|22.15|15.69|[27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|6352|[ckpt](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/ssn_r50_450e_thumos14_rgb_20201012-1920ab16.pth)| [log](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log)| [json](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log.json)| [ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth)| [json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json)| -注意: +注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 @@ -56,7 +56,7 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] 例如:在 ActivityNet 特征上测试 BMN。 ```shell -# Note: If evaluated, then please make sure the annotation file for test data contains groundtruth. +# 注:如果需要进行指标验证,需确测试数据的保标注文件包含真实标签 python tools/test.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py checkpoints/SOME_CHECKPOINT.pth --eval mAP ``` diff --git a/configs/recognition/c3d/README_zh-CN.md b/configs/recognition/c3d/README_zh-CN.md index ef5bda2330..62e5fb326f 100644 --- a/configs/recognition/c3d/README_zh-CN.md +++ b/configs/recognition/c3d/README_zh-CN.md @@ -23,7 +23,7 @@ eid = {arXiv:1412.0767} |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[c3d_sports1m_16x1x1_45e_ucf101_rgb.py](/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py)|128x171|8| c3d | sports1m | 83.27 | 95.90 | 10 clips x 1 crop | x | 6053 | [ckpt](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/c3d_sports1m_16x1x1_45e_ucf101_rgb_20201021-26655025.pth)|[log](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log)|[json](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json)| -注意: +注: 1. C3D 的原论文使用 UCF-101 的数据均值进行数据正则化,并且使用 SVM 进行视频分类。MMAction2 使用 ImageNet 的 RGB 均值进行数据正则化,并且使用线性分类器。 2. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 diff --git a/configs/recognition/csn/README_zh-CN.md b/configs/recognition/csn/README_zh-CN.md index 594427316f..8295b8dbe3 100644 --- a/configs/recognition/csn/README_zh-CN.md +++ b/configs/recognition/csn/README_zh-CN.md @@ -36,7 +36,7 @@ doi = {10.1109/ICCV.2019.00565} |[ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|8x4| ResNet152 | IG65M|80.14|94.93|x|8517|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json)| |[ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|8x4| ResNet152 | IG65M|82.76|95.68|x|8516|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json)| -注意: +注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 diff --git a/configs/recognition/i3d/README_zh-CN.md b/configs/recognition/i3d/README_zh-CN.md index ab7dbee769..684a4a12f9 100644 --- a/configs/recognition/i3d/README_zh-CN.md +++ b/configs/recognition/i3d/README_zh-CN.md @@ -1,6 +1,6 @@ # I3D -## Introduction +## 简介 [ALGORITHM] @@ -43,7 +43,7 @@ |[i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb.py)|短边 256p|8x4| ResNet50 |ImageNet|73.37|91.26|x|4944|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200815-17f84aa2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json)| |[i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py)|短边 256p|8x4| ResNet50 |ImageNet|73.92|91.59|x|4832|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb_20200814-7c30d5bb.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json)| -注意: +注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 diff --git a/configs/recognition/r2plus1d/README_zh-CN.md b/configs/recognition/r2plus1d/README_zh-CN.md index c9f6ca6f01..0882c30589 100644 --- a/configs/recognition/r2plus1d/README_zh-CN.md +++ b/configs/recognition/r2plus1d/README_zh-CN.md @@ -25,7 +25,7 @@ |[r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | 短边 320|8x2| ResNet34|None |68.68|88.36|1.6 (80x3 frames)|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8x1_180e_kinetics400_rgb_20200618-3fce5629.pth)| [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log)| [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json)| |[r2plus1d_r34_32x2x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py) |短边 320|8x2| ResNet34|None |74.60|91.59|0.5 (320x3 frames)|12975| [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2x1_180e_kinetics400_rgb_20200618-63462eb3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log)| [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json)| -注意: +注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index 15e685c6a8..a215fa55b9 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -29,7 +29,7 @@ |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| |[slowfast_r152_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet152 + ResNet50 |None|77.13|93.20||10077| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json)| -注意: +注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md index 1827dd6214..ece5748ba8 100644 --- a/configs/recognition/slowonly/README_zh-CN.md +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -20,17 +20,17 @@ |配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py)|short-side 256|8x4| ResNet50 | None |72.76|90.51|x|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json)| -|[slowonly_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|short-side 320|8x2| ResNet50 | None |72.90|90.82|x|8472|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json)| -|[slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50 | None |74.42|91.49|x|5820|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb_20200820-75851a7d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json)| -|[slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py)|short-side 320|8x2| ResNet50 | None |73.02|90.77|4.0 (40x3 frames)|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json)| -|[slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 | None |74.93|91.92|2.3 (80x3 frames)|5820| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8x1_256e_kinetics400_rgb_20200703-a79c555a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json)| -|[slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py)|short-side 320|8x2| ResNet50 | ImageNet |73.39|91.12|x|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912-1e8fc736.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json)| -|[slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py) |short-side 320|8x4| ResNet50 | ImageNet |75.55|92.04|x|5820|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912-3f9ce182.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json)| -|[slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | ImageNet | 74.54 | 91.73 | x | 4435 |[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb_20210308-0d6e5a69.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json)| -|[slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py) | short-side 320 | 8x4 | ResNet50 | ImageNet | 76.07 | 92.42 | x | 8895 |[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb_20210308-e8dd9e82.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json)| -|[slowonly_r50_4x16x1_256e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py)|short-side 320|8x2| ResNet50 | ImageNet |61.79|83.62|x|8450| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_20200704-decb8568.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json)| -|[slowonly_r50_8x8x1_196e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py) |short-side 320|8x4| ResNet50 | ImageNet |65.76|86.25|x|8455| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_256e_kinetics400_flow_20200704-6b384243.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json)| +|[slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py)|短边 256|8x4| ResNet50 | None |72.76|90.51|x|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json)| +|[slowonly_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|短边 320|8x2| ResNet50 | None |72.90|90.82|x|8472|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json)| +|[slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) |短边 256|8x4| ResNet50 | None |74.42|91.49|x|5820|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb_20200820-75851a7d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json)| +|[slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py)|短边 320|8x2| ResNet50 | None |73.02|90.77|4.0 (40x3 frames)|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json)| +|[slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) |短边 320|8x3| ResNet50 | None |74.93|91.92|2.3 (80x3 frames)|5820| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8x1_256e_kinetics400_rgb_20200703-a79c555a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json)| +|[slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py)|短边 320|8x2| ResNet50 | ImageNet |73.39|91.12|x|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912-1e8fc736.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json)| +|[slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py) |短边 320|8x4| ResNet50 | ImageNet |75.55|92.04|x|5820|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912-3f9ce182.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json)| +|[slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet50 | ImageNet | 74.54 | 91.73 | x | 4435 |[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb_20210308-0d6e5a69.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json)| +|[slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py) | 短边 320 | 8x4 | ResNet50 | ImageNet | 76.07 | 92.42 | x | 8895 |[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb_20210308-e8dd9e82.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json)| +|[slowonly_r50_4x16x1_256e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py)|短边 320|8x2| ResNet50 | ImageNet |61.79|83.62|x|8450| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_20200704-decb8568.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json)| +|[slowonly_r50_8x8x1_196e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py) |短边 320|8x4| ResNet50 | ImageNet |65.76|86.25|x|8455| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_256e_kinetics400_flow_20200704-6b384243.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json)| ### Kinetics-400 数据基准测试 @@ -39,14 +39,14 @@ | 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 输入 | 预训练 | top1 准确率 | top5 准确率 | 测试方案 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :--: | :------: | :---: | :------: | :------: | :------: | :----------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py) | 340x256 | 8x2 | ResNet50 | 4x16 | None | 71.61 | 90.05 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803-dadca1a3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803.json) | -| [slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | 4x16 | None | 73.02 | 90.77 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | -| [slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet50 | 4x16 | None | 72.76 | 90.51 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json) | +| [slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet50 | 4x16 | None | 73.02 | 90.77 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | +| [slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py) | 短边 256 | 8x4 | ResNet50 | 4x16 | None | 72.76 | 90.51 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json) | ### Kinetics-400 OmniSource Experiments | 配置文件 | 分辨率 | 主干网络 | 预训练 | w. OmniSource | top1 准确率 | top5 准确率 | ckpt | log | json | | :----------------------------------------------------------: | :------------: | :-------: | :------: | :----------------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | ResNet50 | None | :x: | 73.0 | 90.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | +| [slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py) | 短边 320 | ResNet50 | None | :x: | 73.0 | 90.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | | x | x | ResNet50 | None | :heavy_check_mark: | 76.8 | 92.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | x | x | | [slowonly_r101_8x8x1_196e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py) | x | ResNet101 | None | :x: | 76.5 | 92.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) | x | x | | x | x | ResNet101 | None | :heavy_check_mark: | 80.4 | 94.4 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | x | x | @@ -55,23 +55,23 @@ | 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_video_8x8x1_256e_kinetics600_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py) | short-side 256 | 8x4 | ResNet50 | None | 77.5 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015-81e5153e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.json) | +| [slowonly_r50_video_8x8x1_256e_kinetics600_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py) | 短边 256 | 8x4 | ResNet50 | None | 77.5 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015-81e5153e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.json) | ### Kinetics-700 | 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_video_8x8x1_256e_kinetics700_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py) | short-side 256 | 8x4 | ResNet50 | None | 65.0 | 86.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015-9250f662.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.json) | +| [slowonly_r50_video_8x8x1_256e_kinetics700_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py) | 短边 256 | 8x4 | ResNet50 | None | 65.0 | 86.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015-9250f662.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.json) | ### GYM99 | 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | 类别平均准确率 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py) | short-side 256 | 8x2 | ResNet50 | ImageNet | 79.3 | 70.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111-a9c34b54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json) | -| [slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py) | short-side 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | +| [slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 79.3 | 70.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111-a9c34b54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json) | +| [slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py) | 短边 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | | 1: 1 融合 | | | | | 83.7 | 74.8 | | | | -注意: +注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 diff --git a/configs/recognition/tanet/README_zh-CN.md b/configs/recognition/tanet/README_zh-CN.md index 1a46617d0f..2925069b12 100644 --- a/configs/recognition/tanet/README_zh-CN.md +++ b/configs/recognition/tanet/README_zh-CN.md @@ -21,14 +21,14 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tanet_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py) |短边 320|8| TANet | ImageNet |76.28 | 92.60 |[76.22](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh)|[92.53](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh) | x | 7124 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log)| [json](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json)| -Notes: +注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 -2. The values in columns named after "reference" are the results got by testing on our dataset, using the checkpoints provided by the author with same model settings. The checkpoints for reference repo can be downloaded [here](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing). +3. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。对应的模型权重文件可从 [这里](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing) 下载。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 diff --git a/configs/recognition/tin/README_zh-CN.md b/configs/recognition/tin/README_zh-CN.md index 4aa1314df3..0af09296fc 100644 --- a/configs/recognition/tin/README_zh-CN.md +++ b/configs/recognition/tin/README_zh-CN.md @@ -33,9 +33,9 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py)|短边 256|8x4| ResNet50 | TSM-Kinetics400 | 70.89 | 89.89 | 6187 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb_20200810-4a146a70.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json) | -Here, we use `finetune` to indicate that we use [TSM model](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) trained on Kinetics-400 to finetune the TIN model on Kinetics-400. +这里,MMAction2 使用 `finetune` 一词表示 TIN 模型使用 Kinetics400 上的 [TSM 模型](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) 进行微调。 -Notes: +注: 1. 参考代码的结果是通过 [原始 repo](https://github.com/deepcs233/TIN/tree/1aacd0c4c30d5e1d334bf023e55b855b59f158db) 解决 [AverageMeter 相关问题](https://github.com/deepcs233/TIN/issues/4) 后训练得到的,该问题会导致错误的精度计算。 2. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index 5e73057d0a..62e5249e07 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -31,43 +31,43 @@ |配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |70.24|89.56|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json)| -|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.59|89.52|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json)| -|[tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.48|89.40|x|x|x|7076|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json)| -|[tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.25|89.66|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7077 | [ckpt]( https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json)| +|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet |70.59|89.52|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json)| +|[tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet |70.48|89.40|x|x|x|7076|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json)| +|[tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet |70.25|89.66|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7077 | [ckpt]( https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json)| |[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8x4| ResNet50 | ImageNet|72.9|90.44|[72.22](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|[90.37](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|11.5 (8x10 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20200626-91a54551.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log.json)| -|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50 | ImageNet|73.38|91.02|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb_20200727-e1e0c785.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json)| +|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |短边 256|8| ResNet50 | ImageNet|73.38|91.02|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb_20200727-e1e0c785.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json)| |[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |72.09|90.37|[70.67](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|[89.98](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|47.0 (16x1 frames)| 10404 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json)| -|[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50| ImageNet |71.89|90.73|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json)| -|[tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4| ResNet50| ImageNet |72.03|90.25|71.81|90.36|x|8931|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json)| -|[tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4| ResNet50| ImageNet |70.70|89.90|x|x|x|10125|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json)| -|[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| -|[tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|short-side 320|8|MobileNetV2| ImageNet |68.46|88.64|x|x|x|3385|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json)| +|[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |短边 256|8x4| ResNet50| ImageNet |71.89|90.73|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json)| +|[tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4| ResNet50| ImageNet |72.03|90.25|71.81|90.36|x|8931|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json)| +|[tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4| ResNet50| ImageNet |70.70|89.90|x|x|x|10125|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json)| +|[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| +|[tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|短边 320|8|MobileNetV2| ImageNet |68.46|88.64|x|x|x|3385|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json)| ### Something-Something V1 |配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| 参考代码的 top1 准确率 (efficient/accurate)| 参考代码的 top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 45.58 / 47.70|75.02 / 76.12|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json)| -|[tsm_r50_flip_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.10 / 48.51|76.02 / 77.56|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json)| -|[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|47.62 / 49.28|76.63 / 77.82|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20201010-17fa49f6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json)| -|[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|45.72 / 48.43|74.67 / 76.72|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json)| +|[tsm_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py) |高 100|8| ResNet50 | ImageNet| 45.58 / 47.70|75.02 / 76.12|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json)| +|[tsm_r50_flip_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py) |高 100|8| ResNet50 | ImageNet| 47.10 / 48.51|76.02 / 77.56|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json)| +|[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|高 100|8| ResNet50 | ImageNet|47.62 / 49.28|76.63 / 77.82|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20201010-17fa49f6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json)| +|[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|高 100|8| ResNet50 | ImageNet|45.72 / 48.43|74.67 / 76.72|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json)| ### Something-Something V2 |配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| 参考代码的 top1 准确率 (efficient/accurate)| 参考代码的 top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 240|8| ResNet50| ImageNet |57.86 / 61.12|84.67 / 86.26|[57.98 / 60.69](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[84.57 / 86.28](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_1x1x8_50e_sthv2_rgb_20200912-033c4ac6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json)| -|[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py) |height 240|8| ResNet50| ImageNet |59.93 / 62.04|86.10 / 87.35|[58.90 / 60.98](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.29 / 86.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_1x1x16_50e_sthv2_rgb_20201010-16469c6f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json)| -|[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |height 240|8| ResNet101 | ImageNet|58.59 / 61.51|85.07 / 86.90|[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9784 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json)| +|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 240|8| ResNet50| ImageNet |57.86 / 61.12|84.67 / 86.26|[57.98 / 60.69](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[84.57 / 86.28](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_1x1x8_50e_sthv2_rgb_20200912-033c4ac6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json)| +|[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py) |高 240|8| ResNet50| ImageNet |59.93 / 62.04|86.10 / 87.35|[58.90 / 60.98](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.29 / 86.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_1x1x16_50e_sthv2_rgb_20201010-16469c6f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json)| +|[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |高 240|8| ResNet101 | ImageNet|58.59 / 61.51|85.07 / 86.90|[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9784 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json)| ### MixUp & CutMix on Something-Something V1 | 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | top1 准确率变化 (efficient/accurate) | top5 准确率变化 (efficient/accurate) | ckpt | log | json | | :----------------------------------------------------------- | :--------: | :--: | :------: | :------: | :---------------------------: | :---------------------------: | :---------------------------------: | :---------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsm_r50_mixup_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 46.35 / 48.49 | 75.07 / 76.88 | +0.77 / +0.79 | +0.05 / +0.70 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb-9eca48e5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json) | -| [tsm_r50_cutmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 45.92 / 47.46 | 75.23 / 76.71 | +0.34 / -0.24 | +0.21 / +0.59 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb-34934615.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_mixup_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 46.35 / 48.49 | 75.07 / 76.88 | +0.77 / +0.79 | +0.05 / +0.70 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb-9eca48e5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_cutmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 45.92 / 47.46 | 75.23 / 76.71 | +0.34 / -0.24 | +0.21 / +0.59 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb-34934615.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json) | -注意: +注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 diff --git a/configs/recognition/tsm/tsm_mobilenetv2_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py similarity index 93% rename from configs/recognition/tsm/tsm_mobilenetv2_1x1x8_50e_kinetics400_rgb.py rename to configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py index 647cb609ac..57628cfc23 100644 --- a/configs/recognition/tsm/tsm_mobilenetv2_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py @@ -1,6 +1,6 @@ _base_ = [ '../../_base_/models/tsm_mobilenet_v2.py', - '../../_base_/schedules/sgd_tsm_mobilenet_v2_50e.py', + '../../_base_/schedules/sgd_tsm_mobilenet_v2_100e.py', '../../_base_/default_runtime.py' ] @@ -33,7 +33,7 @@ ] val_pipeline = [ dict( - type='SampleFrames', + type='DenseSampleFrames', clip_len=1, frame_interval=1, num_clips=8, @@ -48,7 +48,7 @@ ] test_pipeline = [ dict( - type='SampleFrames', + type='DenseSampleFrames', clip_len=1, frame_interval=1, num_clips=8, @@ -82,11 +82,6 @@ evaluation = dict( interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy']) -# optimizer -optimizer = dict( - lr=0.01, # this lr is used for 8 gpus -) - # runtime settings checkpoint_config = dict(interval=1) work_dir = './work_dirs/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/' diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index a10d2aac0e..ffaad3b368 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -96,7 +96,7 @@ | x | 短边 320 | ResNet50 | IG-1B [1] | :x: | 73.1 | 90.4 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) | x | x | | x | 短边 320 | ResNet50 | IG-1B [1] | :heavy_check_mark: | 75.7 | 91.9 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | x | x | -[1] We obtain the pre-trained model from [torch-hub](https://pytorch.org/hub/facebookresearch_semi-supervised-ImageNet1K-models_resnext/), the 预训练 model we used is `resnet50_swsl` +[1] MMAction2 使用 [torch-hub](https://pytorch.org/hub/facebookresearch_semi-supervised-ImageNet1K-models_resnext/) 提供的 `resnet50_swsl` 预训练模型。 ### Kinetics-600 @@ -147,7 +147,7 @@ ### HVU -| 配置文件[1] | tag category | 分辨率 | GPU 数量 | 主干网络 | 预训练 | mAP | HATNet[2] | HATNet-multi[2] | ckpt | log | json | +| 配置文件[1] | tag 类别 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | mAP | HATNet[2] | HATNet-multi[2] | ckpt | log | json | | :----------------------------------------------------------: | :----------: | :------------: | :--: | :------: | :------: | :--: | :-------: | :-------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [tsn_r18_1x1x8_100e_hvu_action_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py) | action | 短边 256 | 8x2 | ResNet18 | ImageNet | 57.5 | 51.8 | 53.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027-011b282b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.json) | | [tsn_r18_1x1x8_100e_hvu_scene_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py) | scene | 短边 256 | 8 | ResNet18 | ImageNet | 55.2 | 55.8 | 57.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027-00e5748d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.json) | @@ -155,7 +155,7 @@ | [tsn_r18_1x1x8_100e_hvu_event_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py) | event | 短边 256 | 8 | ResNet18 | ImageNet | 63.7 | 38.5 | 39.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027-dea8cd71.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.json) | | [tsn_r18_1x1x8_100e_hvu_concept_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py) | concept | 短边 256 | 8 | ResNet18 | ImageNet | 47.5 | 26.1 | 27.3 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027-fc1dd8e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.json) | | [tsn_r18_1x1x8_100e_hvu_attribute_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py) | attribute | 短边 256 | 8 | ResNet18 | ImageNet | 46.1 | 33.6 | 34.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027-0b3b49d2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.json) | -| - | Overall | 短边 256 | - | ResNet18 | ImageNet | 52.6 | 40.0 | 41.3 | - | - | - | +| - | 所有 tag | 短边 256 | - | ResNet18 | ImageNet | 52.6 | 40.0 | 41.3 | - | - | - | [1] 简单起见,MMAction2 对每个 tag 类别训练特定的模型,作为 HVU 的基准模型。 @@ -163,7 +163,7 @@ HATNet 的时序动作候选是一个双分支的卷积网络(一个 2D 分支,一个 3D 分支),并且和 MMAction2 有相同的主干网络(ResNet18)。HATNet 的输入是 16 帧或 32 帧的长视频片段(这样的片段比 MMAction2 使用的要长),同时输入分辨率更粗糙(112px 而非 224px)。 HATNet 是在每个独立的任务(对应每个 tag 类别)上进行训练的,HATNet-multi 是在多个任务上进行训练的。由于目前没有 HATNet 的开源代码和模型,这里仅汇报了原 paper 的精度。 -注意: +注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 diff --git a/configs/recognition/x3d/README_zh-CN.md b/configs/recognition/x3d/README_zh-CN.md index 2b5d100b8f..c36b8fe80a 100644 --- a/configs/recognition/x3d/README_zh-CN.md +++ b/configs/recognition/x3d/README_zh-CN.md @@ -26,7 +26,7 @@ [1] 这里的模型是从 [SlowFast](https://github.com/facebookresearch/SlowFast/) 代码库中导入并在 MMAction2 使用的数据上进行测试的。目前仅支持 X3D 模型的测试,训练部分将会在近期提供。 -注意: +注: 1. 参考代码的结果是通过使用相同的数据和原来的代码库所提供的模型进行测试得到的。 From 101ea64f287e20ee818f8de855fe1394ec87fd07 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Mon, 29 Mar 2021 10:21:06 +0800 Subject: [PATCH 005/414] Dataset note (#763) * mark * preview * full --- README.md | 78 +++++++++++++++++++++++++------------------------ README_zh-CN.md | 74 +++++++++++++++++++++++----------------------- 2 files changed, 78 insertions(+), 74 deletions(-) diff --git a/README.md b/README.md index 13812ea3af..ebe9d493b2 100644 --- a/README.md +++ b/README.md @@ -79,22 +79,22 @@ Supported methods for Action Recognition:
(click to collapse) -- [x] [TSN](configs/recognition/tsn/README.md) (ECCV'2016) -- [x] [TSM](configs/recognition/tsm/README.md) (ICCV'2019) -- [x] [TSM Non-Local](configs/recognition/i3d) (ICCV'2019) -- [x] [R(2+1)D](configs/recognition/r2plus1d/README.md) (CVPR'2018) -- [x] [I3D](configs/recognition/i3d/README.md) (CVPR'2017) -- [x] [I3D Non-Local](configs/recognition/i3d/README.md) (CVPR'2018) -- [x] [SlowOnly](configs/recognition/slowonly/README.md) (ICCV'2019) -- [x] [SlowFast](configs/recognition/slowfast/README.md) (ICCV'2019) -- [x] [CSN](configs/recognition/csn/README.md) (ICCV'2019) -- [x] [TIN](configs/recognition/tin/README.md) (AAAI'2020) -- [x] [TPN](configs/recognition/tpn/README.md) (CVPR'2020) -- [x] [C3D](configs/recognition/c3d/README.md) (CVPR'2014) -- [x] [X3D](configs/recognition/x3d/README.md) (CVPR'2020) -- [x] [OmniSource](configs/recognition/omnisource/README.md) (ECCV'2020) -- [x] [MultiModality: Audio](configs/recognition_audio/resnet/README.md) (ArXiv'2020) -- [x] [TANet](configs/recognition/tanet/README.md) (ArXiv'2020) +- ✅ [TSN](configs/recognition/tsn/README.md) (ECCV'2016) +- ✅ [TSM](configs/recognition/tsm/README.md) (ICCV'2019) +- ✅ [TSM Non-Local](configs/recognition/i3d) (ICCV'2019) +- ✅ [R(2+1)D](configs/recognition/r2plus1d/README.md) (CVPR'2018) +- ✅ [I3D](configs/recognition/i3d/README.md) (CVPR'2017) +- ✅ [I3D Non-Local](configs/recognition/i3d/README.md) (CVPR'2018) +- ✅ [SlowOnly](configs/recognition/slowonly/README.md) (ICCV'2019) +- ✅ [SlowFast](configs/recognition/slowfast/README.md) (ICCV'2019) +- ✅ [CSN](configs/recognition/csn/README.md) (ICCV'2019) +- ✅ [TIN](configs/recognition/tin/README.md) (AAAI'2020) +- ✅ [TPN](configs/recognition/tpn/README.md) (CVPR'2020) +- ✅ [C3D](configs/recognition/c3d/README.md) (CVPR'2014) +- ✅ [X3D](configs/recognition/x3d/README.md) (CVPR'2020) +- ✅ [OmniSource](configs/recognition/omnisource/README.md) (ECCV'2020) +- ✅ [MultiModality: Audio](configs/recognition_audio/resnet/README.md) (ArXiv'2020) +- ✅ [TANet](configs/recognition/tanet/README.md) (ArXiv'2020)
@@ -103,9 +103,9 @@ Supported methods for Temporal Action Detection:
(click to collapse) -- [x] [BSN](configs/localization/bsn/README.md) (ECCV'2018) -- [x] [BMN](configs/localization/bmn/README.md) (ICCV'2019) -- [x] [SSN](configs/localization/ssn/README.md) (ICCV'2017) +- ✅ [BSN](configs/localization/bsn/README.md) (ECCV'2018) +- ✅ [BMN](configs/localization/bmn/README.md) (ICCV'2019) +- ✅ [SSN](configs/localization/ssn/README.md) (ICCV'2017)
@@ -114,9 +114,9 @@ Supported methods for Spatial Temporal Action Detection:
(click to collapse) -- [x] [SlowOnly+Fast R-CNN](configs/detection/ava/README.md) (ICCV'2019) -- [x] [SlowFast+Fast R-CNN](configs/detection/ava/README.md) (ICCV'2019) -- [x] [Long-Term Feature Bank](configs/detection/lfb/README.md) (CVPR'2019) +- ✅ [SlowOnly+Fast R-CNN](configs/detection/ava/README.md) (ICCV'2019) +- ✅ [SlowFast+Fast R-CNN](configs/detection/ava/README.md) (ICCV'2019) +- ✅ [Long-Term Feature Bank](configs/detection/lfb/README.md) (CVPR'2019)
@@ -135,17 +135,17 @@ Supported datasets for Action Recognition:
(click to collapse) -- [x] [UCF101](/tools/data/ucf101/README.md) \[ [Homepage](https://www.crcv.ucf.edu/research/data-sets/ucf101/) \] (CRCV-IR-12-01) -- [x] [HMDB51](/tools/data/hmdb51/README.md) \[ [Homepage](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) \] (ICCV'2011) -- [x] [Kinetics-[400/600/700]](/tools/data/kinetics/README.md) \[ [Homepage](https://deepmind.com/research/open-source/kinetics) \] (CVPR'2017) -- [x] [Something-Something V1](/tools/data/sthv1/README.md) \[ [Homepage](https://20bn.com/datasets/something-something/v1) \] (ICCV'2017) -- [x] [Something-Something V2](/tools/data/sthv2/README.md) \[ [Homepage](https://20bn.com/datasets/something-something) \] (ICCV'2017) -- [x] [Moments in Time](/tools/data/mit/README.md) \[ [Homepage](http://moments.csail.mit.edu/) \] (TPAMI'2019) -- [x] [Multi-Moments in Time](/tools/data/mmit/README.md) \[ [Homepage](http://moments.csail.mit.edu/challenge_iccv_2019.html) \] (ArXiv'2019) -- [x] [HVU](/tools/data/hvu/README.md) \[ [Homepage](https://github.com/holistic-video-understanding/HVU-Dataset) \] (ECCV'2020) -- [x] [Jester](/tools/data/jester/README.md) \[ [Homepage](https://20bn.com/datasets/jester/v1) \] (ICCV'2019) -- [x] [GYM](/tools/data/gym/README.md) \[ [Homepage](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) -- [x] [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] (CVPR'2015) +- ✅ [UCF101](/tools/data/ucf101/README.md) \[ [Homepage](https://www.crcv.ucf.edu/research/data-sets/ucf101/) \] (CRCV-IR-12-01) +- ✅ [HMDB51](/tools/data/hmdb51/README.md) \[ [Homepage](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) \] (ICCV'2011) +- ✅ [Kinetics-[400/600/700]](/tools/data/kinetics/README.md) \[ [Homepage](https://deepmind.com/research/open-source/kinetics) \] (CVPR'2017) +- ✅ [Something-Something V1](/tools/data/sthv1/README.md) \[ [Homepage](https://20bn.com/datasets/something-something/v1) \] (ICCV'2017) +- ✅ [Something-Something V2](/tools/data/sthv2/README.md) \[ [Homepage](https://20bn.com/datasets/something-something) \] (ICCV'2017) +- ✅ [Moments in Time](/tools/data/mit/README.md) \[ [Homepage](http://moments.csail.mit.edu/) \] (TPAMI'2019) +- ✅ [Multi-Moments in Time](/tools/data/mmit/README.md) \[ [Homepage](http://moments.csail.mit.edu/challenge_iccv_2019.html) \] (ArXiv'2019) +- ✅ [HVU](/tools/data/hvu/README.md) \[ [Homepage](https://github.com/holistic-video-understanding/HVU-Dataset) \] (ECCV'2020) +- ✅ [Jester](/tools/data/jester/README.md) \[ [Homepage](https://20bn.com/datasets/jester/v1) \] (ICCV'2019) +- ✅ [GYM](/tools/data/gym/README.md) \[ [Homepage](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) +- ✅ [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] (CVPR'2015)
@@ -154,8 +154,8 @@ Supported datasets for Temporal Action Detection
(click to collapse) -- [x] [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] (CVPR'2015) -- [x] [THUMOS14](/tools/data/thumos14/README.md) \[ [Homepage](https://www.crcv.ucf.edu/THUMOS14/download.html) \] (THUMOS Challenge 2014) +- ✅ [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] (CVPR'2015) +- ✅ [THUMOS14](/tools/data/thumos14/README.md) \[ [Homepage](https://www.crcv.ucf.edu/THUMOS14/download.html) \] (THUMOS Challenge 2014)
@@ -164,12 +164,14 @@ Supported datasets for Spatial Temporal Action Detection
(click to collapse) -- [x] [AVA](/tools/data/ava/README.md) \[ [Homepage](https://research.google.com/ava/index.html) \] (CVPR'2018) -- [x] [UCF101-24](/tools/data/ucf101_24/README.md) \[ [Homepage](http://www.thumos.info/download.html) \] (CRCV-IR-12-01) -- [x] [JHMDB](/tools/data/jhmdb/README.md) \[ [Homepage](http://jhmdb.is.tue.mpg.de/) \] (ICCV'2013) +- ✅ [AVA](/tools/data/ava/README.md) \[ [Homepage](https://research.google.com/ava/index.html) \] (CVPR'2018) +- 🔲 [UCF101-24](/tools/data/ucf101_24/README.md) \[ [Homepage](http://www.thumos.info/download.html) \] (CRCV-IR-12-01) +- 🔲 [JHMDB](/tools/data/jhmdb/README.md) \[ [Homepage](http://jhmdb.is.tue.mpg.de/) \] (ICCV'2013)
+Datasets marked with 🔲 are not fully supported yet, but related dataset preparation steps are provided. + ## Installation Please refer to [install.md](docs/install.md) for installation. diff --git a/README_zh-CN.md b/README_zh-CN.md index 91c3f7ba81..e3b78bc4dc 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -73,21 +73,21 @@ v0.12.0 版本已于 2021 年 2 月 28 日发布,可通过查阅 [更新日志
(点击收起) -- [x] [TSN](/configs/recognition/tsn/README.md) (ECCV'2016) -- [x] [TSM](/configs/recognition/tsm/README.md) (ICCV'2019) -- [x] [TSM Non-Local](/configs/recognition/i3d) (ICCV'2019) -- [x] [R(2+1)D](/configs/recognition/r2plus1d/README.md) (CVPR'2018) -- [x] [I3D](/configs/recognition/i3d/README.md) (CVPR'2017) -- [x] [I3D Non-Local](/configs/recognition/i3d/README.md) (CVPR'2018) -- [x] [SlowOnly](/configs/recognition/slowonly/README.md) (ICCV'2019) -- [x] [SlowFast](/configs/recognition/slowfast/README.md) (ICCV'2019) -- [x] [CSN](/configs/recognition/csn/README.md) (ICCV'2019) -- [x] [TIN](/configs/recognition/tin/README.md) (AAAI'2020) -- [x] [TPN](/configs/recognition/tpn/README.md) (CVPR'2020) -- [x] [C3D](/configs/recognition/c3d/README.md) (CVPR'2014) -- [x] [X3D](/configs/recognition/x3d/README.md) (CVPR'2020) -- [x] [OmniSource](/configs/recognition/omnisource/README.md) (ECCV'2020) -- [x] [MultiModality: Audio](/configs/recognition_audio/resnet/README.md) (ArXiv'2020) +- ✅ [TSN](/configs/recognition/tsn/README.md) (ECCV'2016) +- ✅ [TSM](/configs/recognition/tsm/README.md) (ICCV'2019) +- ✅ [TSM Non-Local](/configs/recognition/i3d) (ICCV'2019) +- ✅ [R(2+1)D](/configs/recognition/r2plus1d/README.md) (CVPR'2018) +- ✅ [I3D](/configs/recognition/i3d/README.md) (CVPR'2017) +- ✅ [I3D Non-Local](/configs/recognition/i3d/README.md) (CVPR'2018) +- ✅ [SlowOnly](/configs/recognition/slowonly/README.md) (ICCV'2019) +- ✅ [SlowFast](/configs/recognition/slowfast/README.md) (ICCV'2019) +- ✅ [CSN](/configs/recognition/csn/README.md) (ICCV'2019) +- ✅ [TIN](/configs/recognition/tin/README.md) (AAAI'2020) +- ✅ [TPN](/configs/recognition/tpn/README.md) (CVPR'2020) +- ✅ [C3D](/configs/recognition/c3d/README.md) (CVPR'2014) +- ✅ [X3D](/configs/recognition/x3d/README.md) (CVPR'2020) +- ✅ [OmniSource](/configs/recognition/omnisource/README.md) (ECCV'2020) +- ✅ [MultiModality: Audio](/configs/recognition_audio/resnet/README.md) (ArXiv'2020)
@@ -96,9 +96,9 @@ v0.12.0 版本已于 2021 年 2 月 28 日发布,可通过查阅 [更新日志
(点击收起) -- [x] [BSN](/configs/localization/bsn/README.md) (ECCV'2018) -- [x] [BMN](/configs/localization/bmn/README.md) (ICCV'2019) -- [x] [SSN](/configs/localization/ssn/README.md) (ICCV'2017) +- ✅ [BSN](/configs/localization/bsn/README.md) (ECCV'2018) +- ✅ [BMN](/configs/localization/bmn/README.md) (ICCV'2019) +- ✅ [SSN](/configs/localization/ssn/README.md) (ICCV'2017)
@@ -107,8 +107,8 @@ v0.12.0 版本已于 2021 年 2 月 28 日发布,可通过查阅 [更新日志
(点击收起) -- [x] [SlowOnly+Fast R-CNN](/configs/detection/ava/README.md) (ICCV'2019) -- [x] [SlowFast+Fast R-CNN](/configs/detection/ava/README.md) (ICCV'2019) +- ✅ [SlowOnly+Fast R-CNN](/configs/detection/ava/README.md) (ICCV'2019) +- ✅ [SlowFast+Fast R-CNN](/configs/detection/ava/README.md) (ICCV'2019)
@@ -125,17 +125,17 @@ v0.12.0 版本已于 2021 年 2 月 28 日发布,可通过查阅 [更新日志
(点击收起) -- [x] [UCF101](/tools/data/ucf101/README.md) \[ [主页](https://www.crcv.ucf.edu/research/data-sets/ucf101/) \] (CRCV-IR-12-01) -- [x] [HMDB51](/tools/data/hmdb51/README.md) \[ [主页](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) \] (ICCV'2011) -- [x] [Kinetics-[400/600/700]](/tools/data/kinetics/README.md) \[ [主页](https://deepmind.com/research/open-source/kinetics) \] (CVPR'2017) -- [x] [Something-Something V1](/tools/data/sthv1/README.md) \[ [主页](https://20bn.com/datasets/something-something/v1) \] (ICCV'2017) -- [x] [Something-Something V2](/tools/data/sthv2/README.md) \[ [主页](https://20bn.com/datasets/something-something) \] (ICCV'2017) -- [x] [Moments in Time](/tools/data/mit/README.md) \[ [主页](http://moments.csail.mit.edu/) \] (TPAMI'2019) -- [x] [Multi-Moments in Time](/tools/data/mmit/README.md) \[ [主页](http://moments.csail.mit.edu/challenge_iccv_2019.html) \] (ArXiv'2019) -- [x] [HVU](/tools/data/hvu/README.md) \[ [主页](https://github.com/holistic-video-understanding/HVU-Dataset) \] (ECCV'2020) -- [x] [Jester](/tools/data/jester/README.md) \[ [主页](https://20bn.com/datasets/jester/v1) \] (ICCV'2019) -- [x] [GYM](/tools/data/gym/README.md) \[ [主页](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) -- [x] [ActivityNet](/tools/data/activitynet/README.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015) +- ✅ [UCF101](/tools/data/ucf101/README.md) \[ [主页](https://www.crcv.ucf.edu/research/data-sets/ucf101/) \] (CRCV-IR-12-01) +- ✅ [HMDB51](/tools/data/hmdb51/README.md) \[ [主页](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) \] (ICCV'2011) +- ✅ [Kinetics-[400/600/700]](/tools/data/kinetics/README.md) \[ [主页](https://deepmind.com/research/open-source/kinetics) \] (CVPR'2017) +- ✅ [Something-Something V1](/tools/data/sthv1/README.md) \[ [主页](https://20bn.com/datasets/something-something/v1) \] (ICCV'2017) +- ✅ [Something-Something V2](/tools/data/sthv2/README.md) \[ [主页](https://20bn.com/datasets/something-something) \] (ICCV'2017) +- ✅ [Moments in Time](/tools/data/mit/README.md) \[ [主页](http://moments.csail.mit.edu/) \] (TPAMI'2019) +- ✅ [Multi-Moments in Time](/tools/data/mmit/README.md) \[ [主页](http://moments.csail.mit.edu/challenge_iccv_2019.html) \] (ArXiv'2019) +- ✅ [HVU](/tools/data/hvu/README.md) \[ [主页](https://github.com/holistic-video-understanding/HVU-Dataset) \] (ECCV'2020) +- ✅ [Jester](/tools/data/jester/README.md) \[ [主页](https://20bn.com/datasets/jester/v1) \] (ICCV'2019) +- ✅ [GYM](/tools/data/gym/README.md) \[ [主页](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) +- ✅ [ActivityNet](/tools/data/activitynet/README.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015)
@@ -144,8 +144,8 @@ v0.12.0 版本已于 2021 年 2 月 28 日发布,可通过查阅 [更新日志
(点击收起) -- [x] [ActivityNet](/tools/data/activitynet/README.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015) -- [x] [THUMOS14](/tools/data/thumos14/README.md) \[ [主页](https://www.crcv.ucf.edu/THUMOS14/download.html) \] (THUMOS Challenge 2014) +- ✅ [ActivityNet](/tools/data/activitynet/README.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015) +- ✅ [THUMOS14](/tools/data/thumos14/README.md) \[ [主页](https://www.crcv.ucf.edu/THUMOS14/download.html) \] (THUMOS Challenge 2014)
@@ -154,12 +154,14 @@ v0.12.0 版本已于 2021 年 2 月 28 日发布,可通过查阅 [更新日志
(点击收起) -- [x] [AVA](/tools/data/ava/README.md) \[ [主页](https://research.google.com/ava/index.html) \] (CVPR'2018) -- [x] [UCF101-24](/tools/data/ucf101_24/README.md) \[ [主页](http://www.thumos.info/download.html) \] (CRCV-IR-12-01) -- [x] [JHMDB](/tools/data/jhmdb/README.md) \[ [主页](http://jhmdb.is.tue.mpg.de/) \] (ICCV'2013) +- ✅ [AVA](/tools/data/ava/README.md) \[ [主页](https://research.google.com/ava/index.html) \] (CVPR'2018) +- 🔲 [UCF101-24](/tools/data/ucf101_24/README.md) \[ [主页](http://www.thumos.info/download.html) \] (CRCV-IR-12-01) +- 🔲 [JHMDB](/tools/data/jhmdb/README.md) \[ [主页](http://jhmdb.is.tue.mpg.de/) \] (ICCV'2013)
+标记 🔲 代表对应数据集并未被完全支持,但提供相应的数据准备步骤。 + ## 安装 请参考 [安装指南](/docs_zh_CN/install.md) 进行安装 From c3289ff7221ae0fe41bb41ba5edc20be860afb8b Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 30 Mar 2021 15:15:22 +0800 Subject: [PATCH 006/414] update maximum version of MMCV (#769) --- mmaction/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmaction/__init__.py b/mmaction/__init__.py index f1e036546c..7a516d40fc 100644 --- a/mmaction/__init__.py +++ b/mmaction/__init__.py @@ -4,7 +4,7 @@ from .version import __version__ mmcv_minimum_version = '1.2.6' -mmcv_maximum_version = '1.3' +mmcv_maximum_version = '1.4.0' mmcv_version = digit_version(mmcv.__version__) assert (digit_version(mmcv_minimum_version) <= mmcv_version From e05a750fc3fd965d538d780a2ebb473488aafad6 Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Tue, 30 Mar 2021 16:36:30 +0800 Subject: [PATCH 007/414] [docs] LFB README_zh-CN.md (#761) * draft * polish * refine * fix * fix --- configs/detection/lfb/README.md | 2 +- configs/detection/lfb/README_zh-CN.md | 103 ++++++++++++++++++++++++++ 2 files changed, 104 insertions(+), 1 deletion(-) create mode 100644 configs/detection/lfb/README_zh-CN.md diff --git a/configs/detection/lfb/README.md b/configs/detection/lfb/README.md index ba04da7a69..1d90b4b0af 100644 --- a/configs/detection/lfb/README.md +++ b/configs/detection/lfb/README.md @@ -81,7 +81,7 @@ For more details and optional arguments infos, you can refer to **Training setti Before train or test lfb, you also need to infer long-term feature bank first. If you have generated the feature bank file, you can skip it. -The step is the same with **Infer long-term feature bank for training** part in [Train](#Trian). +The step is the same with **Infer long-term feature bank for training** part in [Train](#Train). ### b. Test LFB diff --git a/configs/detection/lfb/README_zh-CN.md b/configs/detection/lfb/README_zh-CN.md new file mode 100644 index 0000000000..6df55b0d6b --- /dev/null +++ b/configs/detection/lfb/README_zh-CN.md @@ -0,0 +1,103 @@ +# LFB + +## 简介 + +[ALGORITHM] + +```BibTeX +@inproceedings{wu2019long, + title={Long-term feature banks for detailed video understanding}, + author={Wu, Chao-Yuan and Feichtenhofer, Christoph and Fan, Haoqi and He, Kaiming and Krahenbuhl, Philipp and Girshick, Ross}, + booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, + pages={284--293}, + year={2019} +} +``` + +## 模型库 + +### AVA2.1 + +| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | 分辨率 | 平均精度 | log | json | ckpt | +| :----------------------------------------------------------: | :------: | :----------: | :-------: | :---: | :--: | :------------: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | 短边 256 | 24.11 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210224-2ae136d9.pth) | +| [lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | 短边 256 | 20.17 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-19c330b7.pth) | +| [lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | 短边 256 | 22.15 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-37efcd15.pth) | + +- 注: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 本 LFB 模型暂没有使用原论文中的 `I3D-R50-NL` 作为主干网络,而是用 `slowonly_r50_4x16x1` 替代,但取得了同样的提升效果:(本模型:20.1 -> 24.11 而原论文模型:22.1 -> 25.8)。 +3. 因为测试时,长时特征是被随机采样的,所以测试精度可能有一些偏差。 +4. 在训练或测试 LFB 之前,用户需要使用配置文件特征库 [lfb_slowonly_r50_ava_infer.py](/configs/detection/lfb/lfb_slowonly_r50_ava_infer.py) 来推导长时特征库。有关推导长时特征库的更多细节,请参照[训练部分](#训练)。 +5. 用户也可以直接从 [AVA_train_val_float32_lfb](https://download.openmmlab.com/mmaction/detection/lfb/AVA_train_val_float32_lfb.rar) 或者 [AVA_train_val_float16_lfb](https://download.openmmlab.com/mmaction/detection/lfb/AVA_train_val_float16_lfb.rar) 下载 float32 或 float16 的长时特征库,并把它们放在 `lfb_prefix_path` 上。 + +## 训练 + +### a. 为训练 LFB 推导长时特征库 + +在训练或测试 LFB 之前,用户首先需要推导长时特征库。 + +具体来说,使用配置文件 [lfb_slowonly_r50_ava_infer](/configs/detection/lfb/lfb_slowonly_r50_ava_infer.py),在训练集、验证集、测试集上都运行一次模型测试。 + +配置文件的默认设置是推导训练集的长时特征库,用户需要将 `dataset_mode` 设置成 `'val'` 来推导验证集的长时特征库,在推导过程中。共享头 [LFBInferHead](/mmaction/models/heads/lfb_infer_head.py) 会生成长时特征库。 + +AVA 训练集和验证集的 float32 精度的长时特征库文件大约占 3.3 GB。如果以半精度来存储长时特征,文件大约占 1.65 GB。 + +用户可以使用以下命令来推导 AVA 训练集和验证集的长时特征库,而特征库会被存储为 `lfb_prefix_path/lfb_train.pkl` 和 `lfb_prefix_path/lfb_val.pkl`。 + +```shell +# 在 lfb_slowonly_r50_ava_infer.py 中 设置 `dataset_mode = 'train'` +python tools/test.py configs/detection/lfb/lfb_slowonly_r50_ava_infer.py \ + checkpoints/YOUR_BASELINE_CHECKPOINT.pth --eval mAP + +# 在 lfb_slowonly_r50_ava_infer.py 中 设置 `dataset_mode = 'val'` +python tools/test.py configs/detection/lfb/lfb_slowonly_r50_ava_infer.py \ + checkpoints/YOUR_BASELINE_CHECKPOINT.pth --eval mAP +``` + +MMAction2 使用来自配置文件 [slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) 的模型权重文件 [slowonly_r50_4x16x1 checkpoint](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth)作为推导长时特征库的 LFB 模型的主干网络的预训练模型。 + +### b. 训练 LFB + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:使用半精度的长时特征库在 AVA 数据集上训练 LFB 模型。 + +```shell +python tools/train.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 测试 + +### a. 为测试 LFB 推导长时特征库 + +在训练或测试 LFB 之前,用户首先需要推导长时特征库。如果用户之前已经生成了特征库文件,可以跳过这一步。 + +这一步做法与[训练部分](#Train)中的 **为训练 LFB 推导长时特征库** 相同。 + +### b. 测试 LFB + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:使用半精度的长时特征库在 AVA 数据集上测试 LFB 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 From a0eb70aebc74a3b6834659a4425ec6b8e210f752 Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Tue, 30 Mar 2021 16:38:41 +0800 Subject: [PATCH 008/414] add unittest for average_clips=None (#765) --- .../test_common_modules/test_base_recognizers.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/test_models/test_common_modules/test_base_recognizers.py b/tests/test_models/test_common_modules/test_base_recognizers.py index e8d6f22505..9d0a72bf19 100644 --- a/tests/test_models/test_common_modules/test_base_recognizers.py +++ b/tests/test_models/test_common_modules/test_base_recognizers.py @@ -45,6 +45,12 @@ def test_base_recognizer(): recognizer = ExampleRecognizer(None, None) recognizer(torch.tensor(0)) + # average_clips=None + test_cfg = dict(average_clips=None) + recognizer = ExampleRecognizer(None, test_cfg) + score = recognizer.average_clip(cls_score, num_segs=5) + assert torch.equal(score, cls_score) + # average_clips='score' test_cfg = dict(average_clips='score') recognizer = ExampleRecognizer(None, test_cfg) From 5d378e544eeb3a7f716a82af2d295ea23bf5c4cc Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Wed, 31 Mar 2021 15:44:25 +0800 Subject: [PATCH 009/414] [Fix] Bump mmcv version and fix CI (#774) * Bump mmcv version and fix CI * fix bugs * fix --- .github/workflows/build.yml | 42 +++++++++---------- .../test_recognizers/test_recognizer3d.py | 2 +- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index bb8905af61..8cf2e2e0a5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -46,7 +46,7 @@ jobs: run: pip install pip --upgrade - name: Install Pillow run: pip install Pillow==6.2.2 - if: ${{matrix.torchvision == '0.4.2'}} + if: ${{matrix.torchvision < 0.5}} - name: Install soundfile lib run: sudo apt-get install -y libsndfile1 - name: Install onnx @@ -60,7 +60,7 @@ jobs: - name: Install PyTorch run: pip install torch==${{matrix.torch}}+cpu torchvision==${{matrix.torchvision}}+cpu -f https://download.pytorch.org/whl/torch_stable.html - name: Install MMCV - run: pip install mmcv-full==1.2.6 -f https://download.openmmlab.com/mmcv/dist/cpu/torch${{matrix.torch}}/index.html + run: pip install mmcv-full==1.3.0 -f https://download.openmmlab.com/mmcv/dist/cpu/torch${{matrix.torch}}/index.html - name: Install MMDet run: pip install git+https://github.com/open-mmlab/mmdetection/ - name: Install MMCls @@ -84,27 +84,21 @@ jobs: strategy: matrix: python-version: [3.7] - torch: [1.3.0, 1.5.0+cu101, 1.6.0+cu101, 1.7.0+cu101] + torch: [1.3.0, 1.5.0, 1.6.0, 1.7.0] include: - torch: 1.3.0 torchvision: 0.4.1 - mmcv: "cu101/torch1.3.0" - - torch: 1.5.0+cu101 - torchvision: 0.6.0+cu101 - mmcv: "cu101/torch1.5.0" - - torch: 1.6.0+cu101 - torchvision: 0.7.0+cu101 - mmcv: "cu101/torch1.6.0" - - torch: 1.7.0+cu101 - torchvision: 0.8.1+cu101 - mmcv: "cu101/torch1.7.0" - - torch: 1.7.0+cu101 - torchvision: 0.8.1+cu101 - mmcv: "cu101/torch1.7.0" + - torch: 1.5.0 + torchvision: 0.6.0 + - torch: 1.6.0 + torchvision: 0.7.0 + - torch: 1.7.0 + torchvision: 0.8.1 + - torch: 1.7.0 + torchvision: 0.8.1 python-version: 3.6 - - torch: 1.7.0+cu101 - torchvision: 0.8.1+cu101 - mmcv: "cu101/torch1.7.0" + - torch: 1.7.0 + torchvision: 0.8.1 python-version: 3.8 steps: @@ -141,10 +135,16 @@ jobs: - name: Install lmdb run: pip install lmdb - name: Install PyTorch - run: pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html + run: | + if [ ${{matrix.torch}} == '1.3.0' ] + then + pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html + else + pip install torch==${{matrix.torch}}+cu101 torchvision==${{matrix.torchvision}}+cu101 -f https://download.pytorch.org/whl/torch_stable.html + fi - name: Install mmaction dependencies run: | - pip install mmcv-full==1.2.6 -f https://download.openmmlab.com/mmcv/dist/${{matrix.mmcv}}/index.html + pip install mmcv-full==1.3.0 -f https://download.openmmlab.com/mmcv/dist/cu101/torch${{matrix.torch}}/index.html pip install -q git+https://github.com/open-mmlab/mmdetection/ pip install -q git+https://github.com/open-mmlab/mmclassification/ pip install -r requirements.txt diff --git a/tests/test_models/test_recognizers/test_recognizer3d.py b/tests/test_models/test_recognizers/test_recognizer3d.py index e46543c5ed..9fab8ee627 100644 --- a/tests/test_models/test_recognizers/test_recognizer3d.py +++ b/tests/test_models/test_recognizers/test_recognizer3d.py @@ -214,7 +214,7 @@ def test_tpn(): recognizer = build_recognizer(config.model) - input_shape = (1, 8, 3, 1, 224, 224) + input_shape = (1, 8, 3, 1, 32, 32) demo_inputs = generate_recognizer_demo_inputs(input_shape, '3D') imgs = demo_inputs['imgs'] From a6356a0c3b26be54fb199106d38c37d098cafa80 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Wed, 31 Mar 2021 15:46:07 +0800 Subject: [PATCH 010/414] Cn readme (#775) * resolve comments * update changelog * CN_README --- configs/detection/ava/README_zh-CN.md | 121 ++++++++++++++++++ configs/recognition/omnisource/README.md | 2 +- .../recognition/omnisource/README_zh-CN.md | 70 ++++++++++ 3 files changed, 192 insertions(+), 1 deletion(-) create mode 100644 configs/detection/ava/README_zh-CN.md create mode 100644 configs/recognition/omnisource/README_zh-CN.md diff --git a/configs/detection/ava/README_zh-CN.md b/configs/detection/ava/README_zh-CN.md new file mode 100644 index 0000000000..b3f049226b --- /dev/null +++ b/configs/detection/ava/README_zh-CN.md @@ -0,0 +1,121 @@ +# AVA + +
+ +
+ +## 简介 + +[DATASET] + +```BibTeX +@inproceedings{gu2018ava, + title={Ava: A video dataset of spatio-temporally localized atomic visual actions}, + author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={6047--6056}, + year={2018} +} +``` + +[ALGORITHM] + +```BibTeX +@article{duan2020omni, + title={Omni-sourced Webly-supervised Learning for Video Recognition}, + author={Duan, Haodong and Zhao, Yue and Xiong, Yuanjun and Liu, Wentao and Lin, Dahua}, + journal={arXiv preprint arXiv:2003.13042}, + year={2020} +} +``` + +[ALGORITHM] + +```BibTeX +@inproceedings{feichtenhofer2019slowfast, + title={Slowfast networks for video recognition}, + author={Feichtenhofer, Christoph and Fan, Haoqi and Malik, Jitendra and He, Kaiming}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + pages={6202--6211}, + year={2019} +} +``` + +## 模型库 + +### AVA2.1 + +| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | 分辨率 | mAP | log | json | ckpt | +| :----------------------------------------------------------: | :------: | :----------: | :-------: | :---: | :--: | :------------: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 4x16 | 8 | 短边 256 | 20.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth) | +| [slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | OmniSource | ResNet50 | 4x16 | 8 | 短边 256 | 21.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201217-0c6d2e98.pth) | +| [slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb](/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 4x16 | 8 | 短边 256 | 21.75 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb_20210316-959829ec.pth) | +| [slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb](/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 8x8 | 8x2 | 短边 256 | 23.79 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb_20210316-5742e4dd.pth) | +| [slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet101 | 8x8 | 8x2 | 短边 256 | 24.6 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201217-1c9b4117.pth) | +| [slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb](/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py) | RGB | OmniSource | ResNet101 | 8x8 | 8x2 | 短边 256 | 25.9 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth) | +| [slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 24.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth) | +| [slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 25.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222-f4d209c9.pth) | +| [slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 25.5 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217-ae225e97.pth) | + +注: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. **Context** 表示同时使用 RoI 特征与全局特征进行分类,可带来约 1% mAP 的提升。 + +对于数据集准备的细节,用户可参考 [数据准备](/docs_zh-CN/data_preparation.md)。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:在 AVA 数据集上训练 SlowOnly,并定期验证。 + +```shell +python tools/train.py configs/detection/AVA/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py --validate +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +### 训练 AVA 数据集中的自定义类别 + +用户可以训练 AVA 数据集中的自定义类别。AVA 中不同类别的样本量很不平衡:其中有超过 100000 样本的类别: `stand`/`listen to (a person)`/`talk to (e.g., self, a person, a group)`/`watch (a person)`,也有样本较少的类别(半数类别不足 500 样本)。大多数情况下,仅使用样本较少的类别进行训练将在这些类别上得到更好精度。 + +训练 AVA 数据集中的自定义类别包含 3 个步骤: + +1. 从原先的类别中选择希望训练的类别,将其填写至配置文件的 `custom_classes` 域中。其中 `0` 不表示具体的动作类别,不应被选择。 +2. 将 `num_classes` 设置为 `num_classes = len(custom_classes) + 1`。 + - 在新的类别到编号的对应中,编号 `0` 仍对应原类别 `0`,编号 `i` (i > 0) 对应原类别 `custom_classes[i-1]`。 + - 配置文件中 3 处涉及 `num_classes` 需要修改:`model -> roi_head -> bbox_head -> num_classes`, `data -> train -> num_classes`, `data -> val -> num_classes`. + - 若 `num_classes <= 5`, 配置文件 `BBoxHeadAVA` 中的 `topk` 参数应被修改。`topk` 的默认值为 `(3, 5)`,`topk` 中的所有元素应小于 `num_classes`。 +3. 确认所有自定义类别在 `label_file` 中。 + +以 `slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb` 为例,这一配置文件训练所有 AP 在 `(0.1, 0.3)` 间的类别(这里的 AP 为 AVA 80 类训出模型的表现),即 `[3, 6, 10, 27, 29, 38, 41, 48, 51, 53, 54, 59, 61, 64, 70, 72]`。下表列出了自定义类别训练的模型精度: + +|训练类别|mAP (自定义类别)|配置文件|log|json|ckpt| +|:-:|:-:|:-:|:-:|:-:|:-:| +|全部 80 类|0.1948|[slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py)|[log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth) | +|自定义类别|0.3311|[slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py)| [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes-4ab80419.pth) | +|全部 80 类|0.1864|[slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py)| [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth) | +|自定义类别|0.3785|[slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py)| [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305-c6225546.pth) | + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 AVA 上测试 SlowOnly 模型,并将结果存为 csv 文件。 + +```shell +python tools/test.py configs/detection/AVA/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/omnisource/README.md b/configs/recognition/omnisource/README.md index d978c64da2..39197b96d9 100644 --- a/configs/recognition/omnisource/README.md +++ b/configs/recognition/omnisource/README.md @@ -23,7 +23,7 @@ We currently released 4 models trained with OmniSource framework, including both We release a subset of web dataset used in the OmniSource paper. Specifically, we release the web data in the 200 classes of [Mini-Kinetics](https://arxiv.org/pdf/1712.04851.pdf). The statistics of those datasets is detailed in [preparing_omnisource](/tools/data/omnisource/README.md). To obtain those data, you need to fill in a [data request form](https://docs.google.com/forms/d/e/1FAIpQLSd8_GlmHzG8FcDbW-OEu__G7qLgOSYZpH-i5vYVJcu7wcb_TQ/viewform?usp=sf_link). After we received your request, the download link of these data will be send to you. For more details on the released OmniSource web dataset, please refer to [preparing_omnisource](/tools/data/omnisource/README.md). -We benchmark the OmniSource framework on the released subset, results are listed in the following table (we report the Top-1 and Top-5 accuracy on Mini-Kinetics validation). The cbenchmark can be used as a baseline for video recognition with web data. +We benchmark the OmniSource framework on the released subset, results are listed in the following table (we report the Top-1 and Top-5 accuracy on Mini-Kinetics validation). The benchmark can be used as a baseline for video recognition with web data. ### TSN-8seg-ResNet50 diff --git a/configs/recognition/omnisource/README_zh-CN.md b/configs/recognition/omnisource/README_zh-CN.md new file mode 100644 index 0000000000..2fb8b0e1c6 --- /dev/null +++ b/configs/recognition/omnisource/README_zh-CN.md @@ -0,0 +1,70 @@ +# Omni-sourced Webly-supervised Learning for Video Recognition + +[Haodong Duan](https://github.com/kennymckormick), [Yue Zhao](https://github.com/zhaoyue-zephyrus), [Yuanjun Xiong](https://github.com/yjxiong), Wentao Liu, [Dahua Lin](https://github.com/lindahua) + +In ECCV, 2020. [Paper](https://arxiv.org/abs/2003.13042) + +![pipeline](https://github.com/open-mmlab/mmaction2/blob/master/configs/recognition/omnisource/pipeline.png?raw=true) + +## 模型库 + +### Kinetics-400 + +MMAction2 当前公开了 4 个 OmniSource 框架训练的模型,包含 2D 架构与 3D 架构。下表比较了使用或不适用 OmniSource 框架训练得的模型在 Kinetics-400 上的精度: + +| 模型 | 模态 | 预训练 | 主干网络 | 输入 | 分辨率 | Top-1 准确率(Baseline / OmniSource (Delta)) | Top-5 准确率(Baseline / OmniSource (Delta))) | 模型下载链接 | +| :------: | :--: | :------: | :-------: | :--: | :------------: | :-----------------------------------------: | :------------------------------------------: | :----------------------------------------------------------: | +| TSN | RGB | ImageNet | ResNet50 | 3seg | 340x256 | 70.6 / 73.6 (+ 3.0) | 89.4 / 91.0 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | +| TSN | RGB | IG-1B | ResNet50 | 3seg | short-side 320 | 73.1 / 75.7 (+ 2.6) | 90.4 / 91.9 (+ 1.5) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | +| SlowOnly | RGB | Scratch | ResNet50 | 4x16 | short-side 320 | 72.9 / 76.8 (+ 3.9) | 90.9 / 92.5 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | +| SlowOnly | RGB | Scratch | ResNet101 | 8x8 | short-side 320 | 76.5 / 80.4 (+ 3.9) | 92.7 / 94.4 (+ 1.7) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | + +## Mini-Kinetics 上的基准测试 + +OmniSource 项目当前公开了所采集网络数据的一个子集,涉及 [Mini-Kinetics](https://arxiv.org/pdf/1712.04851.pdf) 中的 200 个动作类别。[OmniSource 数据集准备](/tools/data/omnisource/README_zh-CN.md) 中记录了这些数据集的详细统计信息。用户可以通过填写 [申请表](https://docs.google.com/forms/d/e/1FAIpQLSd8_GlmHzG8FcDbW-OEu__G7qLgOSYZpH-i5vYVJcu7wcb_TQ/viewform?usp=sf_link) 获取这些数据,在完成填写后,数据下载链接会被发送至用户邮箱。更多关于 OmniSource 网络数据集的信息请参照 [OmniSource 数据集准备](/tools/data/omnisource/README_zh-CN.md)。 + +MMAction2 在公开的数据集上进行了 OmniSource 框架的基准测试,下表记录了详细的结果(在 Mini-Kinetics 验证集上的精度),这些结果可以作为使用网络数据训练视频识别任务的基线。 + +### TSN-8seg-ResNet50 + +| Setting | Top-1 | Top-5 | ckpt | json | log | +| :----------: | :---: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| Baseline | 77.4 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030-b4eaf92b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log) | +| +GG-img | 78.0 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030-23966b4b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log) | +| +[GG-IG]-img | 78.6 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030-66f5e046.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log) | +| +IG-vid | 80.6 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030-011f984d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log) | +| +KRaw | 78.6 | 93.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030-59f5d064.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log) | +| OmniSource | 81.3 | 94.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030-0f56ef51.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log) | + +### SlowOnly-8x8-ResNet50 + +| Setting | Top-1 | Top-5 | ckpt | json | log | +| :----------: | :---: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| Baseline | 78.6 | 93.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030-168eb098.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log) | +| +GG-img | 80.8 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030-7da6dfc3.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log) | +| +[GG-IG]-img | 81.3 | 95.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030-c36616e9.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log) | +| +IG-vid | 82.4 | 95.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030-e2890e8d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log) | +| +KRaw | 80.3 | 94.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030-62974bac.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log) | +| OmniSource | 82.9 | 95.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030-284cfd3b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log) | + +下表列出了原论文中在 Kinetics-400 上进行基准测试的结果供参考: + +| Model | Baseline | +GG-img | +[GG-IG]-img | +IG-vid | +KRaw | OmniSource | +| :--------------------: | :---------: | :---------: | :----------: | :---------: | :---------: | :---------: | +| TSN-3seg-ResNet50 | 70.6 / 89.4 | 71.5 / 89.5 | 72.0 / 90.0 | 72.0 / 90.3 | 71.7 / 89.6 | 73.6 / 91.0 | +| SlowOnly-4x16-ResNet50 | 73.8 / 90.9 | 74.5 / 91.4 | 75.2 / 91.6 | 75.2 / 91.7 | 74.5 / 91.1 | 76.6 / 92.5 | + +## 注: + +如果 OmniSource 项目对您的研究有所帮助,请使用以下 BibTex 项进行引用: + +[ALGORITHM] + +```BibTeX +@article{duan2020omni, + title={Omni-sourced Webly-supervised Learning for Video Recognition}, + author={Duan, Haodong and Zhao, Yue and Xiong, Yuanjun and Liu, Wentao and Lin, Dahua}, + journal={arXiv preprint arXiv:2003.13042}, + year={2020} +} +``` From 78eb4468f5bfbccca9dc94b0ff3c5b5bc975534e Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Wed, 31 Mar 2021 15:57:59 +0800 Subject: [PATCH 011/414] [Docs] Update indexing of config readme (#772) * modify stat.py merge_docs * unify merge_docs style * fix bugs * fix bugs --- docs/merge_docs.sh | 45 +++++++++++++--------------------------- docs_zh_CN/merge_docs.sh | 45 ++++++++++++---------------------------- 2 files changed, 27 insertions(+), 63 deletions(-) diff --git a/docs/merge_docs.sh b/docs/merge_docs.sh index 9749f367f1..884c95e231 100755 --- a/docs/merge_docs.sh +++ b/docs/merge_docs.sh @@ -2,6 +2,18 @@ sed -i '$a\\n' ../demo/README.md +# gather models +cat ../configs/localization/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# Action Localization Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > localization_models.md +cat ../configs/recognition/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# Action Recognition Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > recognition_models.md +cat ../configs/recognition_audio/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' >> recognition_models.md +cat ../configs/detection/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# Spatio Temporal Action Detection Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > detection_models.md + +# demo +cat ../demo/README.md | sed "s/md###t/html#t/g" | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > demo.md + +# gather datasets +cat ../tools/data/*/README.md | sed 's/# Preparing/# /g' | sed 's/#/#&/' > prepare_data.md + sed -i 's/(\/tools\/data\/activitynet\/README.md/(#activitynet/g' supported_datasets.md sed -i 's/(\/tools\/data\/kinetics\/README.md/(#kinetics-400-600-700/g' supported_datasets.md sed -i 's/(\/tools\/data\/mit\/README.md/(#moments-in-time/g' supported_datasets.md @@ -18,42 +30,13 @@ sed -i 's/(\/tools\/data\/jester\/README.md/(#jester/g' supported_datasets.md sed -i 's/(\/tools\/data\/ava\/README.md/(#ava/g' supported_datasets.md sed -i 's/(\/tools\/data\/gym\/README.md/(#gym/g' supported_datasets.md -cat ../configs/localization/*/*.md > localization_models.md -cat ../configs/recognition/*/*.md > recognition_models.md -cat ../configs/recognition_audio/*/*.md >> recognition_models.md -cat ../configs/detection/*/*.md > detection_models.md -cat ../tools/data/*/README.md > prepare_data.md -cat ../demo/README.md > demo.md - -sed -i 's/#/#&/' localization_models.md -sed -i 's/#/#&/' recognition_models.md -sed -i 's/#/#&/' detection_models.md -sed -i 's/md###t/html#t/g' localization_models.md -sed -i 's/md###t/html#t/g' recognition_models.md -sed -i 's/md###t/html#t/g' detection_models.md -sed -i "s/md###t/html#t/g" demo.md - -sed -i 's/# Preparing/# /g' prepare_data.md -sed -i 's/#/#&/' prepare_data.md - -sed -i '1i\# Action Localization Models' localization_models.md -sed -i '1i\# Action Recognition Models' recognition_models.md -sed -i '1i\# Spatio Temporal Action Detection Models' detection_models.md - cat prepare_data.md >> supported_datasets.md +sed -i 's/](\/docs\//](/g' supported_datasets.md +sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' supported_datasets.md -sed -i 's/](\/docs\//](/g' recognition_models.md # remove /docs/ for link used in doc site -sed -i 's/](\/docs\//](/g' localization_models.md -sed -i 's/](\/docs\//](/g' detection_models.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' recognition_models.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' localization_models.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' detection_models.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' benchmark.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' getting_started.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' install.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' changelog.md sed -i 's/](\/docs\//](/g' ./tutorials/*.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' ./tutorials/*.md -sed -i 's/](\/docs\//](/g' supported_datasets.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' supported_datasets.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' demo.md diff --git a/docs_zh_CN/merge_docs.sh b/docs_zh_CN/merge_docs.sh index bc7eff788b..07252f8bc7 100755 --- a/docs_zh_CN/merge_docs.sh +++ b/docs_zh_CN/merge_docs.sh @@ -1,4 +1,12 @@ #!/usr/bin/env bash +# gather models +cat ../configs/localization/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# 时序动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > localization_models.md +cat ../configs/recognition/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# 动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > recognition_models.md +cat ../configs/recognition_audio/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' >> recognition_models.md +cat ../configs/detection/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# 时空动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > detection_models.md + +# gather datasets +cat ../tools/data/*/README_zh-CN.md | sed 's/# 准备/# /g' | sed 's/#/#&/' > prepare_data.md sed -i 's/(\/tools\/data\/activitynet\/README_zh-CN.md/(#activitynet/g' supported_datasets.md sed -i 's/(\/tools\/data\/kinetics\/README_zh-CN.md/(#kinetics-400-600-700/g' supported_datasets.md @@ -16,41 +24,14 @@ sed -i 's/(\/tools\/data\/jester\/README_zh-CN.md/(#jester/g' supported_datasets sed -i 's/(\/tools\/data\/ava\/README_zh-CN.md/(#ava/g' supported_datasets.md sed -i 's/(\/tools\/data\/gym\/README_zh-CN.md/(#gym/g' supported_datasets.md -cat ../configs/localization/*/*.md > localization_models.md -cat ../configs/recognition/*/*.md > recognition_models.md -cat ../configs/recognition_audio/*/*.md >> recognition_models.md -cat ../configs/detection/*/*.md > detection_models.md -cat ../tools/data/*/README_zh-CN.md > prepare_data.md - -sed -i 's/#/#&/' localization_models.md -sed -i 's/#/#&/' recognition_models.md -sed -i 's/#/#&/' detection_models.md -sed -i 's/md###t/html#t/g' localization_models.md -sed -i 's/md###t/html#t/g' recognition_models.md -sed -i 's/md###t/html#t/g' detection_models.md -sed -i "s/md###t/html#t/g" demo.md - -sed -i 's/# 准备/# /g' prepare_data.md -sed -i 's/#/#&/' prepare_data.md - -sed -i '1i\# 时序动作检测模型' localization_models.md -sed -i '1i\# 动作识别模型' recognition_models.md -sed -i '1i\# 时空动作检测模型' detection_models.md - cat prepare_data.md >> supported_datasets.md +sed -i 's/](\/docs_zh_CN\//](/g' supported_datasets.md +sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' supported_datasets.md -sed -i 's/](\/docs\//](/g' recognition_models.md # remove /docs/ for link used in doc site -sed -i 's/](\/docs\//](/g' localization_models.md -sed -i 's/](\/docs\//](/g' detection_models.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' recognition_models.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' localization_models.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' detection_models.md +sed -i "s/md###t/html#t/g" demo.md +sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' demo.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' benchmark.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' getting_started.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' install.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' changelog.md -sed -i 's/](\/docs\//](/g' ./tutorials/*.md +sed -i 's/](\/docs_zh_CN\//](/g' ./tutorials/*.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' ./tutorials/*.md -sed -i 's/](\/docs\//](/g' supported_datasets.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' supported_datasets.md -sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' demo.md From a2c814d4bb2a8479a8980debdf7616b8d270dd94 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Wed, 31 Mar 2021 16:57:39 +0800 Subject: [PATCH 012/414] [Feature] Add `--test-last` & `--test-best` for `tools/train.py` to test checkpoints after training (#608) * resolve comments * update changelog * Test the final ckpt when training is over. * update * jintao revise * test_last or test_best * update * Test the final ckpt when training is over. * update * jintao revise * test_last or test_best * update * update docs * additional info * fix test_option * testing passed * Update train.py * Update train.py * fix lint * Test the final ckpt when training is over. * update * jintao revise * test_last or test_best * update * test_last or test_best * update * update docs * additional info * fix test_option * testing passed * Update train.py * Update train.py * fix lint Co-authored-by: Jintao Lin <528557675@qq.com> --- docs/getting_started.md | 2 + docs_zh_CN/getting_started.md | 2 + mmaction/apis/train.py | 75 ++++++++++++++++++++++++++++++++++- tools/train.py | 11 +++++ 4 files changed, 89 insertions(+), 1 deletion(-) diff --git a/docs/getting_started.md b/docs/getting_started.md index 067ce181e6..a8f8559a22 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -343,6 +343,8 @@ If you want to specify the working directory in the command, you can add an argu Optional arguments are: - `--validate` (**strongly recommended**): Perform evaluation at every k (default value is 5, which can be modified by changing the `interval` value in `evaluation` dict in each config file) epochs during the training. +- `--test-last`: Test the final checkpoint when training is over, save the prediction to `${WORK_DIR}/last_pred.pkl`. +- `--test-best`: Test the best checkpoint when training is over, save the prediction to `${WORK_DIR}/best_pred.pkl`. - `--work-dir ${WORK_DIR}`: Override the working directory specified in the config file. - `--resume-from ${CHECKPOINT_FILE}`: Resume from a previous checkpoint file. - `--gpus ${GPU_NUM}`: Number of gpus to use, which is only applicable to non-distributed training. diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index 0702611b34..201e072614 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -330,6 +330,8 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 可选参数为: - `--validate` (**强烈建议**):在训练期间每 k 个周期进行一次验证(默认值为 5,可通过修改每个配置文件中的 `evaluation` 字典变量的 `interval` 值进行改变)。 +- `--test-last`:在训练结束后使用最后一个检查点的参数进行测试,将测试结果存储在 `${WORK_DIR}/last_pred.pkl` 中。 +- `--test-best`:在训练结束后使用效果最好的检查点的参数进行测试,将测试结果存储在 `${WORK_DIR}/best_pred.pkl` 中。 - `--work-dir ${WORK_DIR}`:覆盖配置文件中指定的工作目录。 - `--resume-from ${CHECKPOINT_FILE}`:从以前的模型权重文件恢复训练。 - `--gpus ${GPU_NUM}`:使用的 GPU 数量,仅适用于非分布式训练。 diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index 2e88eab40f..a8a9a03363 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -1,15 +1,17 @@ import copy as cp +import os.path as osp import torch from mmcv.parallel import MMDataParallel, MMDistributedDataParallel from mmcv.runner import (DistSamplerSeedHook, EpochBasedRunner, OptimizerHook, - build_optimizer) + build_optimizer, get_dist_info) from mmcv.runner.hooks import Fp16OptimizerHook from ..core import (DistEvalHook, EvalHook, OmniSourceDistSamplerSeedHook, OmniSourceRunner) from ..datasets import build_dataloader, build_dataset from ..utils import PreciseBNHook, get_root_logger +from .test import multi_gpu_test def train_model(model, @@ -17,6 +19,7 @@ def train_model(model, cfg, distributed=False, validate=False, + test=dict(test_best=False, test_last=False), timestamp=None, meta=None): """Train model entry function. @@ -28,6 +31,10 @@ def train_model(model, distributed (bool): Whether to use distributed training. Default: False. validate (bool): Whether to do evaluation. Default: False. + test (dict): The testing option, with two keys: test_last & test_best. + The value is True or False, indicating whether to test the + corresponding checkpoint. + Default: dict(test_best=False, test_last=False). timestamp (str | None): Local time for runner. Default: None. meta (dict | None): Meta dict to record some important information. Default: None @@ -154,3 +161,69 @@ def train_model(model, if cfg.omnisource: runner_kwargs = dict(train_ratio=train_ratio) runner.run(data_loaders, cfg.workflow, cfg.total_epochs, **runner_kwargs) + + if test['test_last'] or test['test_best']: + best_ckpt_path = None + if test['test_best']: + if hasattr(eval_hook, 'best_ckpt_path'): + best_ckpt_path = eval_hook.best_ckpt_path + + if best_ckpt_path is None or not osp.exists(best_ckpt_path): + test['test_best'] = False + if best_ckpt_path is None: + runner.logger.info('Warning: test_best set as True, but ' + 'is not applicable ' + '(eval_hook.best_ckpt_path is None)') + else: + runner.logger.info('Warning: test_best set as True, but ' + 'is not applicable (best_ckpt ' + f'{best_ckpt_path} not found)') + if not test['test_last']: + return + + test_dataset = build_dataset(cfg.data.test, dict(test_mode=True)) + gpu_collect = cfg.get('evaluation', {}).get('gpu_collect', False) + tmpdir = cfg.get('evaluation', {}).get('tmpdir', + osp.join(cfg.work_dir, 'tmp')) + dataloader_setting = dict( + videos_per_gpu=cfg.data.get('videos_per_gpu', 1), + workers_per_gpu=cfg.data.get('workers_per_gpu', 1), + num_gpus=len(cfg.gpu_ids), + dist=distributed, + shuffle=False) + dataloader_setting = dict(dataloader_setting, + **cfg.data.get('test_dataloader', {})) + + test_dataloader = build_dataloader(test_dataset, **dataloader_setting) + + names, ckpts = [], [] + + if test['test_last']: + names.append('last') + ckpts.append(None) + if test['test_best']: + names.append('best') + ckpts.append(best_ckpt_path) + + for name, ckpt in zip(names, ckpts): + if ckpt is not None: + runner.load_checkpoint(ckpt) + + outputs = multi_gpu_test(runner.model, test_dataloader, tmpdir, + gpu_collect) + rank, _ = get_dist_info() + if rank == 0: + out = osp.join(cfg.work_dir, f'{name}_pred.pkl') + test_dataset.dump_results(outputs, out) + + eval_cfg = cfg.get('evaluation', {}) + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', + 'save_best', 'rule', 'by_epoch', 'broadcast_bn_buffers' + ]: + eval_cfg.pop(key, None) + + eval_res = test_dataset.evaluate(outputs, **eval_cfg) + runner.logger.info(f'Testing results of the {name} checkpoint') + for name, val in eval_res.items(): + runner.logger.info(f'{name}: {val:.04f}') diff --git a/tools/train.py b/tools/train.py index 8cb41397ab..9b0e67416a 100644 --- a/tools/train.py +++ b/tools/train.py @@ -28,6 +28,15 @@ def parse_args(): '--validate', action='store_true', help='whether to evaluate the checkpoint during training') + parser.add_argument( + '--test-last', + action='store_true', + help='whether to test the checkpoint after training') + parser.add_argument( + '--test-best', + action='store_true', + help=('whether to test the best checkpoint (if applicable) after ' + 'training')) group_gpus = parser.add_mutually_exclusive_group() group_gpus.add_argument( '--gpus', @@ -172,12 +181,14 @@ def main(): mmaction_version=__version__ + get_git_hash(digits=7), config=cfg.text) + test_option = dict(test_last=args.test_last, test_best=args.test_best) train_model( model, datasets, cfg, distributed=distributed, validate=args.validate, + test=test_option, timestamp=timestamp, meta=meta) From 1a48ce60dfa5481917537bf926dfb8ab84f2d8db Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 31 Mar 2021 20:48:38 +0800 Subject: [PATCH 013/414] add changelog (#776) --- docs/changelog.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/docs/changelog.md b/docs/changelog.md index 1dd4b135ed..11ba2da508 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,24 +1,36 @@ ## Changelog +### 0.13.0 (31/03/2021) + **Highlights** - Support LFB +- Support using backbone from MMCls/TorchVision +- Support Chinese documentation **New Features** - Support LFB ([#553](https://github.com/open-mmlab/mmaction2/pull/553)) - Support using backbones from MMCls for TSN ([#679](https://github.com/open-mmlab/mmaction2/pull/679)) - Support using backbones from TorchVision for TSN ([#720](https://github.com/open-mmlab/mmaction2/pull/720)) -- Support Mixup and Cutmix for recognizers [#681](https://github.com/open-mmlab/mmaction2/pull/681) +- Support Mixup and Cutmix for recognizers ([#681](https://github.com/open-mmlab/mmaction2/pull/681)) +- Support Chinese documentation **Improvements** - Add slowfast config/json/log/ckpt for training custom classes of AVA ([#678](https://github.com/open-mmlab/mmaction2/pull/678)) - Set RandAugment as Imgaug default transforms ([#585](https://github.com/open-mmlab/mmaction2/pull/585)) +- Add `--test-last` & `--test-best` for `tools/train.py` to test checkpoints after training ([#608](https://github.com/open-mmlab/mmaction2/pull/608) +- Add fcn_testing in TPN ([#684](https://github.com/open-mmlab/mmaction2/pull/684)) +- Remove redundant recall functions ([#741](https://github.com/open-mmlab/mmaction2/pull/741)) +- Recursively remove pretrained step for testing ([#695](https://github.com/open-mmlab/mmaction2/pull/695)) +- Add limiter for demo ([#668](https://github.com/open-mmlab/mmaction2/pull/668)) **Bug and Typo Fixes** - Fix a bug about multi-class in VideoDataset ([#723](https://github.com/open-mmlab/mmaction2/pull/678)) +- Reverse key-value in anet filelist generation ([#686](https://github.com/open-mmlab/mmaction2/pull/686)) +- Fix flow norm cfg typo ([#693](https://github.com/open-mmlab/mmaction2/pull/693)) **ModelZoo** From ddf5cd6a3df7e4d7b3d1729e77d003b0dcc2bb31 Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Thu, 1 Apr 2021 14:11:37 +0800 Subject: [PATCH 014/414] add CN PR No (#778) * add cn pr no * add comma --- docs/changelog.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.md b/docs/changelog.md index 11ba2da508..da62eb1c9e 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -14,7 +14,7 @@ - Support using backbones from MMCls for TSN ([#679](https://github.com/open-mmlab/mmaction2/pull/679)) - Support using backbones from TorchVision for TSN ([#720](https://github.com/open-mmlab/mmaction2/pull/720)) - Support Mixup and Cutmix for recognizers ([#681](https://github.com/open-mmlab/mmaction2/pull/681)) -- Support Chinese documentation +- Support Chinese documentation ([#665](https://github.com/open-mmlab/mmaction2/pull/665), [#680](https://github.com/open-mmlab/mmaction2/pull/680), [#689](https://github.com/open-mmlab/mmaction2/pull/689), [#701](https://github.com/open-mmlab/mmaction2/pull/701)[#702](https://github.com/open-mmlab/mmaction2/pull/702), [#703](https://github.com/open-mmlab/mmaction2/pull/703), [#706](https://github.com/open-mmlab/mmaction2/pull/706)[#716](https://github.com/open-mmlab/mmaction2/pull/716), [#717](https://github.com/open-mmlab/mmaction2/pull/717), [#731](https://github.com/open-mmlab/mmaction2/pull/731), [#733](https://github.com/open-mmlab/mmaction2/pull/733), [#735](https://github.com/open-mmlab/mmaction2/pull/735), [#736](https://github.com/open-mmlab/mmaction2/pull/736), [#737](https://github.com/open-mmlab/mmaction2/pull/737), [#738](https://github.com/open-mmlab/mmaction2/pull/738) , [#739](https://github.com/open-mmlab/mmaction2/pull/739), [#740](https://github.com/open-mmlab/mmaction2/pull/740), [#742](https://github.com/open-mmlab/mmaction2/pull/742), [#752](https://github.com/open-mmlab/mmaction2/pull/752), [#759](https://github.com/open-mmlab/mmaction2/pull/759), [#761](https://github.com/open-mmlab/mmaction2/pull/761), [#772](https://github.com/open-mmlab/mmaction2/pull/772), [#775](https://github.com/open-mmlab/mmaction2/pull/775)) **Improvements** From 18824abfc36ec44b4d51915262af1be3dbf8c74b Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 1 Apr 2021 16:09:38 +0800 Subject: [PATCH 015/414] Bump version to v0.13.0 (#777) * Bump version to v0.13.0 * polish changelog * Update changelog.md * Update changelog.md Co-authored-by: lizz --- README.md | 2 +- README_zh-CN.md | 4 +++- docs/changelog.md | 8 ++++---- mmaction/version.py | 2 +- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index ebe9d493b2..617ed4ef8f 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,7 @@ The master branch works with **PyTorch 1.3+**. ## Changelog -v0.12.0 was released in 28/02/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +v0.13.0 was released in 31/03/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Benchmark diff --git a/README_zh-CN.md b/README_zh-CN.md index e3b78bc4dc..482e24c52f 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -49,7 +49,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 -v0.12.0 版本已于 2021 年 2 月 28 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.13.0 版本已于 2021 年 3 月 31 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 基准测试 @@ -88,6 +88,7 @@ v0.12.0 版本已于 2021 年 2 月 28 日发布,可通过查阅 [更新日志 - ✅ [X3D](/configs/recognition/x3d/README.md) (CVPR'2020) - ✅ [OmniSource](/configs/recognition/omnisource/README.md) (ECCV'2020) - ✅ [MultiModality: Audio](/configs/recognition_audio/resnet/README.md) (ArXiv'2020) +- ✅ [TANet](configs/recognition/tanet/README.md) (ArXiv'2020) @@ -109,6 +110,7 @@ v0.12.0 版本已于 2021 年 2 月 28 日发布,可通过查阅 [更新日志 - ✅ [SlowOnly+Fast R-CNN](/configs/detection/ava/README.md) (ICCV'2019) - ✅ [SlowFast+Fast R-CNN](/configs/detection/ava/README.md) (ICCV'2019) +- ✅ [Long-Term Feature Bank](configs/detection/lfb/README.md) (CVPR'2019) diff --git a/docs/changelog.md b/docs/changelog.md index da62eb1c9e..3e2559d543 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -6,7 +6,7 @@ - Support LFB - Support using backbone from MMCls/TorchVision -- Support Chinese documentation +- Add Chinese documentation **New Features** @@ -20,11 +20,11 @@ - Add slowfast config/json/log/ckpt for training custom classes of AVA ([#678](https://github.com/open-mmlab/mmaction2/pull/678)) - Set RandAugment as Imgaug default transforms ([#585](https://github.com/open-mmlab/mmaction2/pull/585)) -- Add `--test-last` & `--test-best` for `tools/train.py` to test checkpoints after training ([#608](https://github.com/open-mmlab/mmaction2/pull/608) +- Add `--test-last` & `--test-best` for `tools/train.py` to test checkpoints after training ([#608](https://github.com/open-mmlab/mmaction2/pull/608)) - Add fcn_testing in TPN ([#684](https://github.com/open-mmlab/mmaction2/pull/684)) - Remove redundant recall functions ([#741](https://github.com/open-mmlab/mmaction2/pull/741)) - Recursively remove pretrained step for testing ([#695](https://github.com/open-mmlab/mmaction2/pull/695)) -- Add limiter for demo ([#668](https://github.com/open-mmlab/mmaction2/pull/668)) +- Improve demo by limiting inference fps ([#668](https://github.com/open-mmlab/mmaction2/pull/668)) **Bug and Typo Fixes** @@ -35,9 +35,9 @@ **ModelZoo** - Add LFB for AVA2.1 ([#553](https://github.com/open-mmlab/mmaction2/pull/553)) -- Add slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb ([#690](https://github.com/open-mmlab/mmaction2/pull/690)) - Add TSN with ResNeXt-101-32x4d backbone as an example for using MMCls backbones ([#679](https://github.com/open-mmlab/mmaction2/pull/679)) - Add TSN with Densenet161 backbone as an example for using TorchVision backbones ([#720](https://github.com/open-mmlab/mmaction2/pull/720)) +- Add slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb ([#690](https://github.com/open-mmlab/mmaction2/pull/690)) - Add slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb ([#704](https://github.com/open-mmlab/mmaction2/pull/704)) - Add slowonly_nl_kinetics_pretrained_r50_4x16x1(8x8x1)_20e_ava_rgb ([#730](https://github.com/open-mmlab/mmaction2/pull/730)) diff --git a/mmaction/version.py b/mmaction/version.py index 6a9a6dddab..e090d9f31a 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.12.0' +__version__ = '0.13.0' def parse_version_info(version_str): From 8e2b3385dbef01dcf1e7438d4cd8a18ca874d8ba Mon Sep 17 00:00:00 2001 From: Kenny Date: Mon, 5 Apr 2021 17:37:43 +0800 Subject: [PATCH 016/414] add pose_dataset --- mmaction/datasets/pose_dataset.py | 185 ++++++++++++++++++++++++++++++ 1 file changed, 185 insertions(+) create mode 100644 mmaction/datasets/pose_dataset.py diff --git a/mmaction/datasets/pose_dataset.py b/mmaction/datasets/pose_dataset.py new file mode 100644 index 0000000000..6f04405eae --- /dev/null +++ b/mmaction/datasets/pose_dataset.py @@ -0,0 +1,185 @@ +import copy +import os.path as osp + +import mmcv +import numpy as np +from mmcv.utils import print_log + +from ..core import mean_class_accuracy, top_k_accuracy +from ..utils import get_root_logger +from .base import BaseDataset +from .registry import DATASETS + + +@DATASETS.register_module() +class PoseDataset(BaseDataset): + """Pose dataset for action recognition. + + The dataset loads pose and apply specified transforms to return a + dict containing pose information. + + The ann_file is a pickle file, the json file contains a list of + annotations, the fields of an annotation include frame_dir(video_id), + total_frames, label, kp, kpscore. + + Args: + ann_file (str): Path to the annotation file. + pipeline (list[dict | callable]): A sequence of data transforms. + resample (float | None): The sampling probability for classes 61-120 in + NTU-120 XSub split. If set as 1.5, it means the probability of + sampling a video in classes 61-120 is 1.5 times than the + probability of sampling a video in classes 1-60. None means not + applicable (only applicable to NTU-120 XSub). Default: None. + valid_ratio (float | None): The valid_ratio for videos in KineticsPose. + For a video with n frames, it is a valid training sample only if + n * valid_ratio frames have human pose. None means not applicable + (only applicable to Kinetics Pose). Default: None. + box_thre (str | None): The threshold for human proposals. Only boxes + with confidence score larger than `box_thre` is kept. None means + not applicable (only applicable to Kinetics Pose [ours]). Allowed + choices are '0.5', '0.6', '0.7', '0.8', '0.9'. Default: None. + **kwargs: Keyword arguments for ``BaseDataset``. + """ + + def __init__(self, + ann_file, + pipeline, + resample=None, + valid_ratio=None, + box_thre=None, + **kwargs): + # For NTU-120 X-Sub + self.resample = resample + modality = 'Pose' + + super().__init__( + ann_file, pipeline, start_index=0, modality=modality, **kwargs) + + # box_thre, which should be a string + self.box_thre = box_thre + if self.box_thre is not None: + assert box_thre in ['0.5', '0.6', '0.7', '0.8', '0.9'] + + # Thresholding Training Examples + self.valid_ratio = valid_ratio + if self.valid_ratio is not None: + assert isinstance(self.valid_ratio, float) + if self.box_thre is None: + self.video_infos = self.video_infos = [ + x for x in self.video_infos + if x['valid_frames'] / x['total_frames'] >= valid_ratio + ] + else: + key = f'valid@{self.box_thre}' + self.video_infos = [ + x for x in self.video_infos + if x[key] / x['total_frames'] >= valid_ratio + ] + if self.box_thre != '0.5': + box_thre = float(self.box_thre) + for item in self.video_infos: + inds = [ + i for i, score in enumerate(item['box_score']) + if score >= box_thre + ] + item['anno_inds'] = np.array(inds) + + logger = get_root_logger() + logger.info(f'{len(self)} videos remain after valid thresholding') + + def load_annotations(self): + """Load annotation file to get video information.""" + assert self.ann_file.endswith('.pkl') + return self.load_pkl_annotations() + + def load_pkl_annotations(self): + data = mmcv.load(self.ann_file) + + for i, item in enumerate(data): + # Sometimes we may need to load anno from the file + if 'filename' in item: + item['filename'] = osp.join(self.data_prefix, item['filename']) + if 'frame_dir' in item: + item['frame_dir'] = osp.join(self.data_prefix, + item['frame_dir']) + return data + + def prepare_train_frames(self, idx): + """Prepare the frames for training given the index.""" + results = copy.deepcopy(self.video_infos[idx]) + + results['modality'] = self.modality + results['start_index'] = self.start_index + return self.pipeline(results) + + def prepare_test_frames(self, idx): + """Prepare the frames for testing given the index.""" + results = copy.deepcopy(self.video_infos[idx]) + + results['modality'] = self.modality + results['start_index'] = self.start_index + return self.pipeline(results) + + def evaluate(self, + results, + metrics='top_k_accuracy', + topk=(1, 5), + logger=None, + **kwargs): + """Evaluation in rawframe dataset. + + Args: + results (list): Output results. + metrics (str | sequence[str]): Metrics to be performed. + Defaults: 'top_k_accuracy'. + logger (obj): Training logger. Defaults: None. + topk (tuple[int]): K value for top_k_accuracy metric. + Defaults: (1, 5). + logger (logging.Logger | None): Logger for recording. + Default: None. + + Return: + dict: Evaluation results dict. + """ + if not isinstance(results, list): + raise TypeError(f'results must be a list, but got {type(results)}') + assert len(results) == len(self), ( + f'The length of results is not equal to the dataset len: ' + f'{len(results)} != {len(self)}') + + if not isinstance(topk, (int, tuple)): + raise TypeError( + f'topk must be int or tuple of int, but got {type(topk)}') + + metrics = metrics if isinstance(metrics, (list, tuple)) else [metrics] + allowed_metrics = ['top_k_accuracy', 'mean_class_accuracy'] + + for metric in metrics: + if metric not in allowed_metrics: + raise KeyError(f'metric {metric} is not supported') + + eval_results = {} + gt_labels = [ann['label'] for ann in self.video_infos] + + for metric in metrics: + msg = f'Evaluating {metric}...' + if logger is None: + msg = '\n' + msg + print_log(msg, logger=logger) + + if metric == 'top_k_accuracy': + top_k_acc = top_k_accuracy(results, gt_labels, topk) + log_msg = [] + for k, acc in zip(topk, top_k_acc): + eval_results[f'top{k}_acc'] = acc + log_msg.append(f'\ntop{k}_acc\t{acc:.4f}') + log_msg = ''.join(log_msg) + print_log(log_msg, logger=logger) + + if metric == 'mean_class_accuracy': + mean_acc = mean_class_accuracy(results, gt_labels) + eval_results['mean_class_accuracy'] = mean_acc + log_msg = f'\nmean_acc\t{mean_acc:.4f}' + print_log(log_msg, logger=logger) + + return eval_results From 9eac26be4050a52f722647e98c08207ba8f6eda9 Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 6 Apr 2021 18:02:46 +0800 Subject: [PATCH 017/414] add pose_loading --- mmaction/datasets/pipelines/__init__.py | 5 +- mmaction/datasets/pipelines/loading.py | 4 +- mmaction/datasets/pipelines/pose_loading.py | 593 ++++++++++++++++++++ mmaction/datasets/pose_dataset.py | 3 - 4 files changed, 599 insertions(+), 6 deletions(-) create mode 100644 mmaction/datasets/pipelines/pose_loading.py diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index 8c342187ff..8a15583ce9 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -16,6 +16,8 @@ PyAVDecodeMotionVector, PyAVInit, RawFrameDecode, SampleAVAFrames, SampleFrames, SampleProposalFrames, UntrimmedSampleFrames) +from .pose_loading import (GeneratePoseTarget, LoadKineticsPose, PoseDecode, + UniformSampleFrames) __all__ = [ 'SampleFrames', 'PyAVDecode', 'DecordDecode', 'DenseSampleFrames', @@ -31,5 +33,6 @@ 'FormatAudioShape', 'LoadAudioFeature', 'AudioFeatureSelector', 'AudioDecodeInit', 'EntityBoxFlip', 'EntityBoxCrop', 'EntityBoxRescale', 'RandomScale', 'ImageDecode', 'BuildPseudoClip', 'RandomRescale', - 'PyAVDecodeMotionVector', 'Rename', 'Imgaug' + 'PyAVDecodeMotionVector', 'Rename', 'Imgaug', 'UniformSampleFrames', + 'PoseDecode', 'LoadKineticsPose', 'GeneratePoseTarget' ] diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index 2714ea36d0..77841aa67b 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -78,8 +78,8 @@ def __repr__(self): class SampleFrames: """Sample frames from the video. - Required keys are "filename", "total_frames", "start_index" , added or - modified keys are "frame_inds", "frame_interval" and "num_clips". + Required keys are "total_frames", "start_index" , added or modified keys + are "frame_inds", "frame_interval" and "num_clips". Args: clip_len (int): Frames of each sampled output clip. diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py new file mode 100644 index 0000000000..c0aca52282 --- /dev/null +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -0,0 +1,593 @@ +import copy as cp +import pickle + +import numpy as np +from mmcv.fileio import FileClient +from scipy.stats import mode + +from ..registry import PIPELINES +from .augmentations import Flip + + +@PIPELINES.register_module() +class UniformSampleFrames: + """Uniformly sample frames from the video. + + Required keys are "total_frames", "start_index" , added or modified keys + are "frame_inds", "clip_len", "frame_interval" and "num_clips". + + Args: + clip_len (int): Frames of each sampled output clip. + num_clips (int): Number of clips to be sampled. Default: 1. + test_mode (bool): Store True when building test or validation dataset. + Default: False. + seed (int): The random seed used during test time. + """ + + def __init__(self, clip_len, num_clips=1, test_mode=False, seed=255): + + self.clip_len = clip_len + self.num_clips = num_clips + self.test_mode = test_mode + self.seed = seed + + def _get_train_clips(self, num_frames, clip_len): + assert self.num_clips == 1 + if num_frames < clip_len: + start = np.random.randint(0, num_frames) + inds = np.arange(start, start + clip_len) + elif clip_len <= num_frames < 2 * clip_len: + basic = np.arange(clip_len) + inds = np.random.choice( + clip_len + 1, num_frames - clip_len, replace=False) + offset = np.zeros(clip_len + 1, dtype=np.int64) + offset[inds] = 1 + offset = np.cumsum(offset) + inds = basic + offset[:-1] + else: + bids = np.array( + [i * num_frames // clip_len for i in range(clip_len + 1)]) + bsize = np.diff(bids) + bst = bids[:clip_len] + offset = np.random.randint(bsize) + inds = bst + offset + return inds + + def _get_test_clips(self, num_frames, clip_len): + np.random.seed(self.seed) + if num_frames < clip_len: + # Then we use a simple strategy + if num_frames < self.num_clips: + start_inds = list(range(self.num_clips)) + else: + start_inds = [ + i * num_frames // self.num_clips + for i in range(self.num_clips) + ] + inds = np.concatenate( + [np.arange(i, i + clip_len) for i in start_inds]) + elif clip_len <= num_frames < clip_len * 2: + all_inds = [] + for i in range(self.num_clips): + basic = np.arange(clip_len) + inds = np.random.choice( + clip_len + 1, num_frames - clip_len, replace=False) + offset = np.zeros(clip_len + 1, dtype=np.int64) + offset[inds] = 1 + offset = np.cumsum(offset) + inds = basic + offset[:-1] + all_inds.append(inds) + inds = np.concatenate(all_inds) + else: + bids = np.array( + [i * num_frames // clip_len for i in range(clip_len + 1)]) + bsize = np.diff(bids) + bst = bids[:clip_len] + all_inds = [] + for i in range(self.num_clips): + offset = np.random.randint(bsize) + all_inds.append(bst + offset) + inds = np.concatenate(all_inds) + return inds + + def __call__(self, results): + num_frames = results['total_frames'] + + if self.test_mode: + inds = self._get_test_clips(num_frames, self.clip_len) + else: + inds = self._get_train_clips(num_frames, self.clip_len) + + inds = np.mod(inds, num_frames) + start_index = results['start_index'] + inds = inds + start_index + + results['frame_inds'] = inds.astype(np.int) + results['clip_len'] = self.clip_len + results['frame_interval'] = None + results['num_clips'] = self.num_clips + return results + + +@PIPELINES.register_module() +class PoseDecode(object): + """Load and decode pose with given indices. + + Required keys are "kp", "frame_inds" (optional), "kpscore" (optional), + added or modified keys are "kp", "kpscore" (if applicable). + + Args: + random_drop (bool): Whether to randomly drop keypoints. The following + args are applicable only when `random_crop == True`. When set as + True, "kpscore" is a mandatory key. Default: False. + random_seed (int): Random seed used for randomly dropping keypoints. + Default: 1. + drop_prob (float): The probability for dropping one keypoint for each + frame. Default: 1 / 16. + manipulate_joints (list[int]): The joint indexes that may be dropped. + Default: [7, 8, 9, 10, 13, 14, 15, 16]. (limb joints) + """ + + def __init__(self, + random_drop=False, + random_seed=1, + drop_prob=1. / 16., + manipulate_joints=[7, 8, 9, 10, 13, 14, 15, 16]): + self.random_drop = random_drop + self.random_seed = random_seed + self.drop_prob = drop_prob + self.manipulate_joints = manipulate_joints + + # inplace + def _drop_kpscore(self, kpscores): + for kpscore in kpscores: + lt = kpscore.shape[0] + for tidx in range(lt): + if np.random.random() < self.drop_prob: + jidx = np.random.choice(self.manipulate_joints) + kpscore[tidx, jidx] = 0. + + def _load_kp(self, kp, frame_inds): + return [x[frame_inds].astype(np.float32) for x in kp] + + def _load_kpscore(self, kpscore, frame_inds): + return [x[frame_inds].astype(np.float32) for x in kpscore] + + def __call__(self, results): + if self.random_drop: + np.random.seed(self.random_seed) + assert 'kpscore' in results, 'for simplicity' + + if 'frame_inds' not in results: + results['frame_inds'] = np.arange(results['total_frames']) + + if results['frame_inds'].ndim != 1: + results['frame_inds'] = np.squeeze(results['frame_inds']) + + offset = results.get('offset', 0) + frame_inds = results['frame_inds'] + offset + + if 'kpscore' in results: + kpscore = results['kpscore'] + assert results['num_person'] == kpscore.shape[0] + if self.random_drop: + self._drop_kpscore(kpscore) + + results['kpscore'] = kpscore[:, frame_inds].astype(np.float32) + + if 'kp' in results: + assert results['num_person'] == len(results['kp']) + results['kp'] = results['kp'][:, frame_inds].astype(np.float32) + + return results + + +@PIPELINES.register_module() +class LoadKineticsPose: + """Load Kinetics Pose given filename (The format should be pickle) + + Required keys are "filename", "total_frames", "img_shape", "frame_inds", + "anno_inds" (for mmpose source, optional), added or modified keys are "kp", + "kpscore". + + Args: + io_backend (str): IO backend where frames are stored. Default: 'disk'. + squeeze (bool): Whether to remove frames with no human pose. + Default: True. + max_person (int): The max number of persons in a frame. Default: 10. + keypoint_weight (dict): The weight of keypoints. We set the confidence + score of a person as the weighted sum of confidence scores of each + joint. Persons with low confidence scores are dropped (if exceed + max_person). Default: dict(face=1, torso=2, limb=3). + source (str): The sources of the keypoints used. Choices are 'mmpose' + and 'openpose'. Default: 'mmpose'. + kwargs (dict, optional): Arguments for FileClient. + """ + + # squeeze (Remove those frames that w/o. keypoints) + # kp2keep (The list of keypoint ids to keep) + def __init__(self, + io_backend='disk', + squeeze=True, + max_person=100, + keypoint_weight=dict(face=1, torso=2, limb=3), + source='mmpose', + **kwargs): + + self.io_backend = io_backend + self.squeeze = squeeze + self.max_person = max_person + self.keypoint_weight = keypoint_weight + self.source = source + + if source == 'openpose': + self.kpsubset = dict( + face=[0, 14, 15, 16, 17], + torso=[1, 2, 8, 5, 11], + limb=[3, 4, 6, 7, 9, 10, 12, 13]) + elif source == 'mmpose': + self.kpsubset = dict( + face=[0, 1, 2, 3, 4], + torso=[5, 6, 11, 12], + limb=[7, 8, 9, 10, 13, 14, 15, 16]) + else: + raise NotImplementedError('Unknown source of Kinetics Pose') + + self.kwargs = kwargs + self.file_client = None + + def __call__(self, results): + + assert 'filename' in results + filename = results.pop('filename') + + # only applicable to source == 'mmpose' + anno_inds = None + if 'anno_inds' in results: + assert self.source == 'mmpose' + anno_inds = results.pop('anno_inds') + results.pop('box_score', None) + + if self.file_client is None: + self.file_client = FileClient(self.io_backend, **self.kwargs) + + bytes = self.file_client.get(filename) + + # only the kp array is in the pickle file, each kp include x, y, score. + kps = pickle.loads(bytes) + + total_frames = results['total_frames'] + + frame_inds = list(results.pop('frame_inds')) + + if anno_inds is not None: + kps = kps[anno_inds] + frame_inds = frame_inds[anno_inds] + + def mapinds(inds): + uni = np.unique(inds) + mapp = {x: i for i, x in enumerate(uni)} + inds = [mapp[x] for x in inds] + return np.array(inds, dtype=np.int16) + + if self.squeeze: + frame_inds = mapinds(frame_inds) + total_frames = np.max(frame_inds) + 1 + + # write it back + results['total_frames'] = total_frames + + h, w = results['img_shape'] + if self.source == 'openpose': + kps[:, :, 0] *= w + kps[:, :, 1] *= h + + num_kp = kps.shape[1] + num_person = mode(frame_inds)[-1][0] + + new_kp = np.zeros([num_person, total_frames, num_kp, 2], + dtype=np.float16) + new_kpscore = np.zeros([num_person, total_frames, num_kp], + dtype=np.float16) + # 32768 is enough + num_person_frame = np.zeros([total_frames], dtype=np.int16) + + for frame_ind, kp in zip(frame_inds, kps): + person_ind = num_person_frame[frame_ind] + new_kp[person_ind, frame_ind] = kp[:, :2] + new_kpscore[person_ind, frame_ind] = kp[:, 2] + num_person_frame[frame_ind] += 1 + + kpgrp = self.kpsubset + weight = self.keypoint_weight + results['num_person'] = num_person + + if num_person > self.max_person: + for i in range(total_frames): + np_frame = num_person_frame[i] + val = new_kpscore[:np_frame, i] + + val = np.sum(val[:, kpgrp['face']], 1) * weight['face'] + \ + np.sum(val[:, kpgrp['torso']], 1) * weight['torso'] + \ + np.sum(val[:, kpgrp['limb']], 1) * weight['limb'] + inds = sorted(range(np_frame), key=lambda x: -val[x]) + new_kpscore[:np_frame, i] = new_kpscore[inds, i] + new_kp[:np_frame, i] = new_kp[inds, i] + results['num_person'] = self.max_person + + results['kp'] = new_kp[:self.max_person] + results['kpscore'] = new_kpscore[:self.max_person] + return results + + +@PIPELINES.register_module() +class GeneratePoseTarget: + """Generate pseudo heatmaps based on joint coordinates and confidence. + + Required keys are "kp", "img_shape", "kpscore" (optional), added or + modified keys are "imgs", + + Args: + sigma (float): The sigma of the generated gaussian map. Default: 0.6. + use_score (bool): Use the confidence score of keypoints as the maximum + of the gaussian maps. Default: True. + with_kp (bool): Generate pseudo heatmaps for keypoints. Default: True. + with_limb (bool): Generate pseudo heatmaps for limbs. At least one of + 'with_kp' and 'with_limb' should be True. Default: False. + skeletons (list[tuple]): The definition of human skeletons. + Default: [(0, 1), (0, 2), (1, 3), (2, 4), (0, 5), (5, 7), (7, 9), + (0, 6), (6, 8), (8, 10), (5, 11), (11, 13), (13, 15), + (6, 12), (12, 14), (14, 16), (11, 12)], + which is the definition of COCO-17p skeletons. + double (bool): Output both original heatmaps and flipped heatmaps. + Default: False. + left (list[int]): Indexes of left keypoints, which is used when + flipping heatmaps. Default: [1, 3, 5, 7, 9, 11, 13, 15], + which is left keypoints in COCO-17p. + right (list[int]): Indexes of right keypoints, which is used when + flipping heatmaps. Default: [2, 4, 6, 8, 10, 12, 14, 16], + which is right keypoints in COCO-17p. + """ + + def __init__(self, + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False, + skeletons=[(0, 1), (0, 2), (1, 3), (2, 4), (0, 5), (5, 7), + (7, 9), (0, 6), (6, 8), (8, 10), (5, 11), (11, 13), + (13, 15), (6, 12), (12, 14), (14, 16), (11, 12)], + double=False, + left=[1, 3, 5, 7, 9, 11, 13, 15], + right=[2, 4, 6, 8, 10, 12, 14, 16]): + + self.sigma = sigma + self.use_score = use_score + self.with_kp = with_kp + self.with_limb = with_limb + self.double = double + + # an auxiliary const + self.eps = 1e-4 + + assert self.with_kp or self.with_limb, ( + 'At least one of "with_limb" ' + 'and "with_kp" should be set as True.') + self.left = left + self.right = right + self.skeletons = skeletons + + def generate_a_heatmap(self, img_h, img_w, centers, sigma, max_values): + """Generate pseudo heatmap for one keypoint in one frame. + + Args: + img_h (int): The height of the heatmap. + img_w (int): The width of the heatmap. + centers (np.ndarray): The coordinates of corresponding keypoints + (of multiple persons). + sigma (float): The sigma of generated gaussian. + max_values (np.ndarray): The max values of each keypoint. + + Returns: + np.ndarray: The generated pseudo heatmap. + """ + + heatmap = np.zeros([img_h, img_w], dtype=np.float32) + + for center, max_value in zip(centers, max_values): + mu_x, mu_y = center[0], center[1] + if max_value < self.eps: + continue + + st_x = max(int(mu_x - 3 * sigma), 0) + ed_x = min(int(mu_x + 3 * sigma) + 1, img_w) + st_y = max(int(mu_y - 3 * sigma), 0) + ed_y = min(int(mu_y + 3 * sigma) + 1, img_h) + x = np.arange(st_x, ed_x, 1, np.float32) + y = np.arange(st_y, ed_y, 1, np.float32) + + # if the keypoint not in the heatmap coordinate system + if not (len(x) and len(y)): + continue + y = y[:, None] + + patch = np.exp(-((x - mu_x)**2 + (y - mu_y)**2) / 2 / sigma**2) + patch = patch * max_value + heatmap[st_y:ed_y, + st_x:ed_x] = np.maximum(heatmap[st_y:ed_y, st_x:ed_x], + patch) + + return heatmap + + def generate_a_limb_heatmap(self, img_h, img_w, starts, ends, sigma, + start_values, end_values): + """Generate pseudo heatmap for one limb in one frame. + + Args: + img_h (int): The height of the heatmap. + img_w (int): The width of the heatmap. + starts (np.ndarray): The coordinates of one keypoint in the + corresponding limbs (of multiple persons). + ends (np.ndarray): The coordinates of the other keypoint in the + corresponding limbs (of multiple persons). + sigma (float): The sigma of generated gaussian. + start_values (np.ndarray): The max values of one keypoint in the + corresponding limbs. + end_values (np.ndarray): The max values of the other keypoint in + the corresponding limbs. + + Returns: + np.ndarray: The generated pseudo heatmap. + """ + + heatmap = np.zeros([img_h, img_w], dtype=np.float32) + + for start, end, start_value, end_value in zip(starts, ends, + start_values, + end_values): + value_coeff = min(start_value, end_value) + if value_coeff < self.eps: + continue + + min_x, max_x = min(start[0], end[0]), max(start[0], end[0]) + min_y, max_y = min(start[1], end[1]), max(start[1], end[1]) + + min_x = max(int(min_x - 3 * sigma), 0) + max_x = min(int(max_x + 3 * sigma) + 1, img_w) + min_y = max(int(min_y - 3 * sigma), 0) + max_y = min(int(max_y + 3 * sigma) + 1, img_h) + + x = np.arange(min_x, max_x, 1, np.float32) + y = np.arange(min_y, max_y, 1, np.float32) + + if not (len(x) and len(y)): + continue + + y = y[:, None] + x_0 = np.zeros_like(x) + y_0 = np.zeros_like(y) + + # distance to start keypoints + d2_start = ((x - start[0])**2 + (y - start[1])**2) + + # distance to end keypoints + d2_end = ((x - end[0])**2 + (y - end[1])**2) + + # the distance between start and end keypoints. + d2_ab = ((start[0] - end[0])**2 + (start[1] - end[1])**2) + + if d2_ab < 1: + full_map = self.generate_a_heatmap(img_h, img_w, [start], + sigma, [start_value]) + heatmap = np.maximum(heatmap, full_map) + continue + + coeff = (d2_start - d2_end + d2_ab) / 2. / d2_ab + + a_dominate = coeff <= 0 + b_dominate = coeff >= 1 + seg_dominate = 1 - a_dominate - b_dominate + + position = np.stack([x + y_0, y + x_0], axis=-1) + projection = start + np.stack([coeff, coeff], axis=-1) * ( + end - start) + d2_line = position - projection + d2_line = d2_line[:, :, 0]**2 + d2_line[:, :, 1]**2 + d2_seg = ( + a_dominate * d2_start + b_dominate * d2_end + + seg_dominate * d2_line) + + patch = np.exp(-d2_seg / 2. / sigma**2) + patch = patch * value_coeff + + heatmap[min_y:max_y, min_x:max_x] = np.maximum( + heatmap[min_y:max_y, min_x:max_x], patch) + + return heatmap + + def generate_heatmap(self, img_h, img_w, kps, sigma, max_values): + """Generate pseudo heatmap for all keypoints and limbs in one frame (if + needed). + + Args: + img_h (int): The height of the heatmap. + img_w (int): The width of the heatmap. + kps (np.ndarray): The coordinates of keypoints in this frame. + sigma (float): The sigma of generated gaussian. + max_values (np.ndarray): The confidence score of each keypoint. + + Returns: + np.ndarray: The generated pseudo heatmap. + """ + + heatmaps = [] + if self.with_kp: + num_kp = kps.shape[1] + for i in range(num_kp): + heatmap = self.generate_a_heatmap(img_h, img_w, kps[:, i], + sigma, max_values[:, i]) + heatmaps.append(heatmap) + + if self.with_limb: + for limb in self.skeletons: + start_idx, end_idx = limb + starts = kps[:, start_idx] + ends = kps[:, end_idx] + + start_values = max_values[:, start_idx] + end_values = max_values[:, end_idx] + heatmap = self.generate_a_limb_heatmap(img_h, img_w, starts, + ends, sigma, + start_values, + end_values) + heatmaps.append(heatmap) + + return np.stack(heatmaps, axis=-1) + + def gen_an_aug(self, results): + """Generate pseudo heatmaps for all frames. + + Args: + results (dict): The dictionary that contains all info of a sample. + + Returns: + list[np.ndarray]: The generated pseudo heatmaps. + """ + + all_kps = results['kp'] + kp_shape = all_kps.shape + + if 'kpscore' in results: + all_kpscores = results['kpscore'] + else: + all_kpscores = np.ones(kp_shape[:-1], dtype=np.float32) + + img_h, img_w = results['img_shape'] + num_frame = kp_shape[1] + + imgs = [] + for i in range(num_frame): + sigma = self.sigma + kps = all_kps[:, i] + kpscores = all_kpscores[:, i] + + max_values = np.ones(kpscores.shape, dtype=np.float32) + if self.use_score: + max_values = kpscores + + hmap = self.generate_heatmap(img_h, img_w, kps, sigma, max_values) + imgs.append(hmap) + + return imgs + + def __call__(self, results): + if not self.double: + results['imgs'] = np.stack(self.gen_an_aug(results)) + else: + results_ = cp.deepcopy(results) + flip = Flip(flip_ratio=1, left=self.left, right=self.right) + results_ = flip(results_) + results['imgs'] = np.concatenate( + [self.gen_an_aug(results), + self.gen_an_aug(results_)]) + return results diff --git a/mmaction/datasets/pose_dataset.py b/mmaction/datasets/pose_dataset.py index 6f04405eae..a6958b697c 100644 --- a/mmaction/datasets/pose_dataset.py +++ b/mmaction/datasets/pose_dataset.py @@ -99,9 +99,6 @@ def load_pkl_annotations(self): # Sometimes we may need to load anno from the file if 'filename' in item: item['filename'] = osp.join(self.data_prefix, item['filename']) - if 'frame_dir' in item: - item['frame_dir'] = osp.join(self.data_prefix, - item['frame_dir']) return data def prepare_train_frames(self, idx): From 32804508c9ee007b7597ef3d61d7a682b86e99b8 Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 6 Apr 2021 22:51:48 +0800 Subject: [PATCH 018/414] add pose augmentations --- mmaction/datasets/pipelines/augmentations.py | 303 +++++++++++++++---- 1 file changed, 244 insertions(+), 59 deletions(-) diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 2b9e521905..0c1f94c80c 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -8,6 +8,14 @@ from ..registry import PIPELINES +def combine_quadruple(a, b): + return (a[0] + a[2] * b[0], a[1] + a[3] * b[1], a[2] * b[2], a[3] * b[3]) + + +def flip_quadruple(a): + return (1 - a[0] - a[2], a[1], a[2], a[3]) + + def _init_lazy_if_proper(results, lazy): """Initialize lazy operation properly. @@ -42,6 +50,102 @@ def _init_lazy_if_proper(results, lazy): assert 'lazy' not in results, 'Use Fuse after lazy operations' +@PIPELINES.register_module() +class PoseCompact: + """Convert the coordinates of keypoints to make it more compact. + Specifically, it first find a tight bounding box that surrounds all joints + in each frame, then we expand the tight box by a given padding ratio. For + example, if 'padding == 0.25', then the expanded box has unchanged center, + and 1.25x width and height. + + Args: + padding (float): The padding size. Default: 0.25. + threshold (int): The threshold for the tight bounding box. If the width + or height of the tight bounding box is smaller than the threshold, + we do not perform the compact operation. Default: 10. + hw_ratio (float | tuple[float] | None): The hw_ratio of the expanded + box. Float indicates the specific ratio and tuple indicates a + ratio range. If set as None, it means there is no requirement on + hw_ratio. Default: None. + allow_imgpad (bool): Whether to allow expanding the box outside the + image to meet the hw_ratio requirement. Default: True. + + Returns: + type: Description of returned object. + """ + + def __init__(self, + padding=0.25, + threshold=10, + hw_ratio=None, + allow_imgpad=True): + + self.padding = padding + self.threshold = threshold + if isinstance(hw_ratio, float): + hw_ratio = (hw_ratio, hw_ratio) + + self.hw_ratio = hw_ratio + + self.allow_imgpad = allow_imgpad + assert self.padding >= 0 + + def __call__(self, results): + img_shape = results['img_shape'] + h, w = img_shape + kp = results['kp'] + min_x, min_y, max_x, max_y = np.Inf, np.Inf, -np.Inf, -np.Inf + + # Make NaN zero + kp[np.isnan(kp)] = 0. + kp_x = kp[..., 0] + kp_y = kp[..., 1] + + # There is at least one legal keypoint + if np.sum(kp_x != 0) or np.sum(kp_y != 0): + min_x = min(min(kp_x[kp_x != 0]), min_x) + min_y = min(min(kp_y[kp_y != 0]), min_y) + max_x = max(max(kp_x[kp_x != 0]), max_x) + max_y = max(max(kp_y[kp_y != 0]), max_y) + + # The compact area is too small + if max_x - min_x < self.threshold or max_y - min_y < self.threshold: + return results + + center = ((max_x + min_x) / 2, (max_y + min_y) / 2) + half_width = (max_x - min_x) / 2 * (1 + self.padding) + half_height = (max_y - min_y) / 2 * (1 + self.padding) + + if self.hw_ratio is not None: + half_height = max(self.hw_ratio[0] * half_width, half_height) + half_width = max(1 / self.hw_ratio[1] * half_height, half_width) + + min_x, max_x = center[0] - half_width, center[0] + half_width + min_y, max_y = center[1] - half_height, center[1] + half_height + + # hot update + if not self.allow_imgpad: + min_x, min_y = int(max(0, min_x)), int(max(0, min_y)) + max_x, max_y = int(min(w, max_x)), int(min(h, max_y)) + else: + min_x, min_y = int(min_x), int(min_y) + max_x, max_y = int(max_x), int(max_y) + + kp_x[kp_x != 0] -= min_x + kp_y[kp_y != 0] -= min_y + + new_shape = (max_y - min_y, max_x - min_x) + results['img_shape'] = new_shape + + # the order is x, y, w, h (in [0, 1]), a tuple + crop_quadruple = results.get('crop_quadruple', (0., 0., 1., 1.)) + new_crop_quadruple = (min_x / w, min_y / h, (max_x - min_x) / w, + (max_y - min_y) / h) + crop_quadruple = combine_quadruple(crop_quadruple, new_crop_quadruple) + results['crop_quadruple'] = crop_quadruple + return results + + @PIPELINES.register_module() class Imgaug: """Imgaug augmentation. @@ -523,9 +627,9 @@ def __repr__(self): class RandomCrop: """Vanilla square random crop that specifics the output size. - Required keys in results are "imgs" and "img_shape", added or - modified keys are "imgs", "lazy"; Required keys in "lazy" are "flip", - "crop_bbox", added or modified key is "crop_bbox". + Required keys in results are "img_shape", "kp" (optional), "imgs" + (optional), added or modified keys are "kp", "imgs", "lazy"; Required keys + in "lazy" are "flip", "crop_bbox", added or modified key is "crop_bbox". Args: size (int): The output size of the images. @@ -538,6 +642,13 @@ def __init__(self, size, lazy=False): self.size = size self.lazy = lazy + def _crop_kps(self, kps, crop_bbox): + return kps - crop_bbox[:2] + + def _crop_imgs(self, imgs, crop_bbox): + x1, y1, x2, y2 = crop_bbox + return [img[y1:y2, x1:x2] for img in imgs] + def __call__(self, results): """Performs the RandomCrop augmentation. @@ -546,6 +657,9 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, self.lazy) + if 'kp' in results: + assert not self.lazy, ('Keypoint Augmentations are not compatible ' + 'with lazy == True') img_h, img_w = results['img_shape'] assert self.size <= img_h and self.size <= img_w @@ -578,16 +692,17 @@ def __call__(self, results): new_h, new_w = self.size, self.size - results['crop_bbox'] = np.array( + crop_bbox = np.array( [x_offset, y_offset, x_offset + new_w, y_offset + new_h]) + results['crop_bbox'] = crop_bbox results['img_shape'] = (new_h, new_w) if not self.lazy: - results['imgs'] = [ - img[y_offset:y_offset + new_h, x_offset:x_offset + new_w] - for img in results['imgs'] - ] + if 'kp' in results: + results['kp'] = self._crop_kps(results['kp'], crop_bbox) + if 'imgs' in results: + results['imgs'] = self._crop_imgs(results['imgs'], crop_bbox) else: lazyop = results['lazy'] if lazyop['flip']: @@ -620,12 +735,13 @@ def __repr__(self): @PIPELINES.register_module() -class RandomResizedCrop: +class RandomResizedCrop(RandomCrop): """Random crop that specifics the area and height-weight ratio range. - Required keys in results are "imgs", "img_shape", "crop_bbox" and "lazy", - added or modified keys are "imgs", "crop_bbox" and "lazy"; Required keys - in "lazy" are "flip", "crop_bbox", added or modified key is "crop_bbox". + Required keys in results are "img_shape", "crop_bbox", "imgs" (optional), + "kp" (optional), added or modified keys are "imgs", "kp", "crop_bbox" and + "lazy"; Required keys in "lazy" are "flip", "crop_bbox", added or modified + key is "crop_bbox". Args: area_range (Tuple[float]): The candidate area scales range of @@ -708,6 +824,9 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, self.lazy) + if 'kp' in results: + assert not self.lazy, ('Keypoint Augmentations are not compatible ' + 'with lazy == True') img_h, img_w = results['img_shape'] @@ -734,13 +853,15 @@ def __call__(self, results): results['crop_quadruple'] = np.array( new_crop_quadruple, dtype=np.float32) - results['crop_bbox'] = np.array([left, top, right, bottom]) + crop_bbox = np.array([left, top, right, bottom]) + results['crop_bbox'] = crop_bbox results['img_shape'] = (new_h, new_w) if not self.lazy: - results['imgs'] = [ - img[top:bottom, left:right] for img in results['imgs'] - ] + if 'kp' in results: + results['kp'] = self._crop_kps(results['kp'], crop_bbox) + if 'imgs' in results: + results['imgs'] = self._crop_imgs(results['imgs'], crop_bbox) else: lazyop = results['lazy'] if lazyop['flip']: @@ -773,16 +894,18 @@ def __repr__(self): @PIPELINES.register_module() -class MultiScaleCrop: +class MultiScaleCrop(RandomCrop): """Crop images with a list of randomly selected scales. Randomly select the w and h scales from a list of scales. Scale of 1 means the base size, which is the minimal of image width and height. The scale level of w and h is controlled to be smaller than a certain value to prevent too large or small aspect ratio. - Required keys are "imgs", "img_shape", added or modified keys are "imgs", - "crop_bbox", "img_shape", "lazy" and "scales". Required keys in "lazy" are - "crop_bbox", added or modified key is "crop_bbox". + + Required keys are "img_shape", "imgs" (optional), "kp" (optional), added or + modified keys are "imgs", "crop_bbox", "img_shape", "lazy" and "scales". + Required keys in "lazy" are "crop_bbox", added or modified key is + "crop_bbox". Args: input_size (int | tuple[int]): (w, h) of network input. @@ -835,6 +958,9 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, self.lazy) + if 'kp' in results: + assert not self.lazy, ('Keypoint Augmentations are not compatible ' + 'with lazy == True') img_h, img_w = results['img_shape'] base_size = min(img_h, img_w) @@ -882,8 +1008,9 @@ def __call__(self, results): new_h, new_w = crop_h, crop_w - results['crop_bbox'] = np.array( + crop_bbox = np.array( [x_offset, y_offset, x_offset + new_w, y_offset + new_h]) + results['crop_bbox'] = crop_bbox results['img_shape'] = (new_h, new_w) results['scales'] = self.scales @@ -907,10 +1034,10 @@ def __call__(self, results): new_crop_quadruple, dtype=np.float32) if not self.lazy: - results['imgs'] = [ - img[y_offset:y_offset + new_h, x_offset:x_offset + new_w] - for img in results['imgs'] - ] + if 'kp' in results: + results['kp'] = self._crop_kps(results['kp'], crop_bbox) + if 'imgs' in results: + results['imgs'] = self._crop_imgs(results['imgs'], crop_bbox) else: lazyop = results['lazy'] if lazyop['flip']: @@ -949,10 +1076,10 @@ def __repr__(self): class Resize: """Resize images to a specific size. - Required keys are "imgs", "img_shape", "modality", added or modified - keys are "imgs", "img_shape", "keep_ratio", "scale_factor", "lazy", - "resize_size". Required keys in "lazy" is None, added or modified key is - "interpolation". + Required keys are "img_shape", "modality", "imgs" (optional), "kp" + (optional), added or modified keys are "imgs", "img_shape", "keep_ratio", + "scale_factor", "lazy", "resize_size". Required keys in "lazy" is None, + added or modified key is "interpolation". Args: scale (float | Tuple[int]): If keep_ratio is True, it serves as scaling @@ -991,6 +1118,16 @@ def __init__(self, self.interpolation = interpolation self.lazy = lazy + def _resize_imgs(self, imgs, new_w, new_h): + return [ + mmcv.imresize( + img, (new_w, new_h), interpolation=self.interpolation) + for img in imgs + ] + + def _resize_kps(self, kps, scale_factor): + return kps * scale_factor + def __call__(self, results): """Performs the Resize augmentation. @@ -1000,6 +1137,9 @@ def __call__(self, results): """ _init_lazy_if_proper(results, self.lazy) + if 'kp' in results: + assert not self.lazy, ('Keypoint Augmentations are not compatible ' + 'with lazy == True') if 'scale_factor' not in results: results['scale_factor'] = np.array([1, 1], dtype=np.float32) @@ -1018,11 +1158,12 @@ def __call__(self, results): results['scale_factor'] = results['scale_factor'] * self.scale_factor if not self.lazy: - results['imgs'] = [ - mmcv.imresize( - img, (new_w, new_h), interpolation=self.interpolation) - for img in results['imgs'] - ] + if 'imgs' in results: + results['imgs'] = self._resize_imgs(results['imgs'], new_w, + new_h) + if 'kp' in results: + results['kp'] = self._resize_kps(results['kp'], + self.scale_factor) else: lazyop = results['lazy'] if lazyop['flip']: @@ -1103,11 +1244,13 @@ class Flip: Reverse the order of elements in the given imgs with a specific direction. The shape of the imgs is preserved, but the elements are reordered. - Required keys are "imgs", "img_shape", "modality", added or modified - keys are "imgs", "lazy" and "flip_direction". Required keys in "lazy" is - None, added or modified key are "flip" and "flip_direction". The Flip - augmentation should be placed after any cropping / reshaping augmentations, - to make sure crop_quadruple is calculated properly. + + Required keys are "img_shape", "modality", "imgs" (optional), "kp" + (optional), added or modified keys are "imgs", "kp", "lazy" and + "flip_direction". Required keys in "lazy" is None, added or modified key + are "flip" and "flip_direction". The Flip augmentation should be placed + after any cropping / reshaping augmentations, to make sure crop_quadruple + is calculated properly. Args: flip_ratio (float): Probability of implementing flip. Default: 0.5. @@ -1115,14 +1258,21 @@ class Flip: "horizontal" | "vertical". Default: "horizontal". flip_label_map (Dict[int, int] | None): Transform the label of the flipped image with the specific label. Default: None. + left (list[int]): Indexes of left keypoints, used to flip keypoints. + Default: [1, 3, 5, 7, 9, 11, 13, 15]. (COCO-17P keypoints) + right (list[ind]): Indexes of right keypoints, used to flip keypoints. + Default: [2, 4, 6, 8, 10, 12, 14, 16]. (COCO-17P keypoints) lazy (bool): Determine whether to apply lazy operation. Default: False. """ - _directions = ['horizontal', 'vertical'] + # Only horizontal flip is useful + _directions = ['horizontal'] def __init__(self, flip_ratio=0.5, direction='horizontal', flip_label_map=None, + left=[1, 3, 5, 7, 9, 11, 13, 15], + right=[2, 4, 6, 8, 10, 12, 14, 16], lazy=False): if direction not in self._directions: raise ValueError(f'Direction {direction} is not supported. ' @@ -1130,8 +1280,31 @@ def __init__(self, self.flip_ratio = flip_ratio self.direction = direction self.flip_label_map = flip_label_map + self.left = left + self.right = right self.lazy = lazy + def _flip_imgs(self, imgs, modality): + _ = [mmcv.imflip_(img, self.direction) for img in imgs] + lt = len(imgs) + if modality == 'Flow': + # The 1st frame of each 2 frames is flow-x + for i in range(0, lt, 2): + imgs[i] = mmcv.iminvert(imgs[i]) + return imgs + + def _flip_kps(self, kps, kpscores, img_width): + kp_x = kps[..., 0] + kp_x[kp_x != 0] = img_width - kp_x[kp_x != 0] + new_order = list(range(kps.shape[2])) + for left, right in zip(self.left, self.right): + new_order[left] = right + new_order[right] = left + kps = kps[:, :, new_order] + if kpscores is not None: + kpscores = kpscores[:, :, new_order] + return kps, kpscores + def __call__(self, results): """Performs the Flip augmentation. @@ -1140,6 +1313,10 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, self.lazy) + if 'kp' in results: + assert not self.lazy, ('Keypoint Augmentations are not compatible ' + 'with lazy == True') + modality = results['modality'] if modality == 'Flow': assert self.direction == 'horizontal' @@ -1148,6 +1325,7 @@ def __call__(self, results): results['flip'] = flip results['flip_direction'] = self.direction + img_width = results['img_shape'][1] if self.flip_label_map is not None and flip: results['label'] = self.flip_label_map.get(results['label'], @@ -1155,17 +1333,18 @@ def __call__(self, results): if not self.lazy: if flip: - for i, img in enumerate(results['imgs']): - mmcv.imflip_(img, self.direction) - lt = len(results['imgs']) - for i in range(0, lt, 2): - # flow with even indexes are x_flow, which need to be - # inverted when doing horizontal flip - if modality == 'Flow': - results['imgs'][i] = mmcv.iminvert(results['imgs'][i]) - - else: - results['imgs'] = list(results['imgs']) + if 'imgs' in results: + results['imgs'] = self._flip_imgs(results['imgs'], + modality) + if 'kp' in results: + kp = results['kp'] + kpscore = None + if 'kpscore' in results: + kpscore = results['kpscore'] + kp, kpscore = self._flip_kps(kp, kpscore, img_width) + results['kp'] = kp + if 'kpscore' in results: + results['kpscore'] = kpscore else: lazyop = results['lazy'] if lazyop['flip']: @@ -1457,12 +1636,13 @@ def __repr__(self): @PIPELINES.register_module() -class CenterCrop: +class CenterCrop(RandomCrop): """Crop the center area from images. - Required keys are "imgs", "img_shape", added or modified keys are "imgs", - "crop_bbox", "lazy" and "img_shape". Required keys in "lazy" is - "crop_bbox", added or modified key is "crop_bbox". + Required keys are "img_shape", "imgs" (optional), "kp" (optional), added or + modified keys are "imgs", "kp", "crop_bbox", "lazy" and "img_shape". + Required keys in "lazy" is "crop_bbox", added or modified key is + "crop_bbox". Args: crop_size (int | tuple[int]): (w, h) of crop size. @@ -1484,6 +1664,9 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, self.lazy) + if 'kp' in results: + assert not self.lazy, ('Keypoint Augmentations are not compatible ' + 'with lazy == True') img_h, img_w = results['img_shape'] crop_w, crop_h = self.crop_size @@ -1494,7 +1677,8 @@ def __call__(self, results): bottom = top + crop_h new_h, new_w = bottom - top, right - left - results['crop_bbox'] = np.array([left, top, right, bottom]) + crop_bbox = np.array([left, top, right, bottom]) + results['crop_bbox'] = crop_bbox results['img_shape'] = (new_h, new_w) if 'crop_quadruple' not in results: @@ -1517,9 +1701,10 @@ def __call__(self, results): new_crop_quadruple, dtype=np.float32) if not self.lazy: - results['imgs'] = [ - img[top:bottom, left:right] for img in results['imgs'] - ] + if 'kp' in results: + results['kp'] = self._crop_kps(results['kp'], crop_bbox) + if 'imgs' in results: + results['imgs'] = self._crop_imgs(results['imgs'], crop_bbox) else: lazyop = results['lazy'] if lazyop['flip']: From 41ebf8a238db9ee888565cd65c6d1dff8bd89c5f Mon Sep 17 00:00:00 2001 From: Kenny Date: Wed, 7 Apr 2021 11:52:45 +0800 Subject: [PATCH 019/414] fix Flip --- mmaction/datasets/pipelines/augmentations.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 0c1f94c80c..1a84423c7f 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -1265,7 +1265,7 @@ class Flip: lazy (bool): Determine whether to apply lazy operation. Default: False. """ # Only horizontal flip is useful - _directions = ['horizontal'] + _directions = ['horizontal', 'vertical'] def __init__(self, flip_ratio=0.5, @@ -1316,6 +1316,9 @@ def __call__(self, results): if 'kp' in results: assert not self.lazy, ('Keypoint Augmentations are not compatible ' 'with lazy == True') + assert self.direction == 'horizontal', ( + 'Only horizontal flips are' + 'supported for human keypoints') modality = results['modality'] if modality == 'Flow': From 16c2c4f17edfc153ca39dc73491c221681408193 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Wed, 7 Apr 2021 13:11:32 +0800 Subject: [PATCH 020/414] [Improvement] Add softmax option for pytorch2onnx tool. (#781) * first commit * update docs * add unittest * update changelog --- docs/changelog.md | 14 ++++++++++++++ docs/tutorials/6_export_model.md | 1 + docs/useful_tools.md | 1 + docs_zh_CN/tutorials/6_export_model.md | 1 + docs_zh_CN/useful_tools.md | 1 + mmaction/models/recognizers/recognizer2d.py | 8 +++++--- mmaction/models/recognizers/recognizer3d.py | 9 ++++++--- .../test_recognizers/test_recognizer2d.py | 6 ++++++ .../test_recognizers/test_recognizer3d.py | 12 ++++++++++++ tools/pytorch2onnx.py | 7 ++++++- 10 files changed, 53 insertions(+), 7 deletions(-) diff --git a/docs/changelog.md b/docs/changelog.md index 3e2559d543..f747898662 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,19 @@ ## Changelog +### Master + +**Highlights** + +**New Features** + +**Improvements** + +- Add softmax option for pytorch2onnx tool ([#781](https://github.com/open-mmlab/mmaction2/pull/781)) + +**Bug and Typo Fixes** + +**ModelZoo** + ### 0.13.0 (31/03/2021) **Highlights** diff --git a/docs/tutorials/6_export_model.md b/docs/tutorials/6_export_model.md index a3a3f41514..8db4804bb1 100644 --- a/docs/tutorials/6_export_model.md +++ b/docs/tutorials/6_export_model.md @@ -53,6 +53,7 @@ Optional arguments: - `--output-file`: The output onnx model name. If not specified, it will be set to `tmp.onnx`. - `--is-localizer`: Determines whether the model to be exported is a localizer. If not specified, it will be set to `False`. - `--opset-version`: Determines the operation set version of onnx, we recommend you to use a higher version such as 11 for compatibility. If not specified, it will be set to `11`. +- `--softmax`: Determines whether to add a softmax layer at the end of recognizers. If not specified, it will be set to `False`. For now, localizers are not supported. ### Recognizers diff --git a/docs/useful_tools.md b/docs/useful_tools.md index 5de6fa9a19..0e83116dc6 100644 --- a/docs/useful_tools.md +++ b/docs/useful_tools.md @@ -98,6 +98,7 @@ You may use the result for simple comparisons, but double check it before you ad `/tools/pytorch2onnx.py` is a script to convert model to [ONNX](https://github.com/onnx/onnx) format. It also supports comparing the output results between Pytorch and ONNX model for verification. Run `pip install onnx onnxruntime` first to install the dependency. +Please note that a softmax layer could be added for recognizers by `--softmax` option, in order to get predictions in range `[0, 1]`. - For recognizers, please run: diff --git a/docs_zh_CN/tutorials/6_export_model.md b/docs_zh_CN/tutorials/6_export_model.md index 8e73caaccf..9adb1b1608 100644 --- a/docs_zh_CN/tutorials/6_export_model.md +++ b/docs_zh_CN/tutorials/6_export_model.md @@ -54,6 +54,7 @@ python tools/pytorch2onnx.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--shape ${SHAPE} - `--output-file`: 导出的 onnx 模型名。如果没有被指定,它将被置为 `tmp.onnx`。 - `--is-localizer`:决定导出的模型是否为时序检测器。如果没有被指定,它将被置为 `False`。 - `--opset-version`:决定 onnx 的执行版本,MMAction2 推荐用户使用高版本(例如 11 版本)的 onnx 以确保稳定性。如果没有被指定,它将被置为 `11`。 +- `--softmax`: 是否在行为识别器末尾添加 Softmax。如果没有指定,将被置为 `False`。目前仅支持行为识别器,不支持时序动作检测器。 ### 行为识别器 diff --git a/docs_zh_CN/useful_tools.md b/docs_zh_CN/useful_tools.md index e45796cfb9..f364557222 100644 --- a/docs_zh_CN/useful_tools.md +++ b/docs_zh_CN/useful_tools.md @@ -98,6 +98,7 @@ Params: 28.04 M `/tools/pytorch2onnx.py` 脚本用于将模型转换为 [ONNX](https://github.com/onnx/onnx) 格式。 同时,该脚本支持比较 PyTorch 模型和 ONNX 模型的输出结果,验证输出结果是否相同。 本功能依赖于 `onnx` 以及 `onnxruntime`,使用前请先通过 `pip install onnx onnxruntime` 安装依赖包。 +请注意,可通过 `--softmax` 选项在行为识别器末尾添加 Softmax 层,从而获取 `[0, 1]` 范围内的预测结果。 - 对于行为识别模型,请运行: diff --git a/mmaction/models/recognizers/recognizer2d.py b/mmaction/models/recognizers/recognizer2d.py index 0847fb6ae8..17690ecbf0 100644 --- a/mmaction/models/recognizers/recognizer2d.py +++ b/mmaction/models/recognizers/recognizer2d.py @@ -131,7 +131,7 @@ def forward_test(self, imgs): return self._do_fcn_test(imgs).cpu().numpy() return self._do_test(imgs).cpu().numpy() - def forward_dummy(self, imgs): + def forward_dummy(self, imgs, softmax=False): """Used for computing network FLOPs. See ``tools/analysis/get_flops.py``. @@ -157,8 +157,10 @@ def forward_dummy(self, imgs): x = x.squeeze(2) num_segs = 1 - outs = (self.cls_head(x, num_segs), ) - return outs + outs = self.cls_head(x, num_segs) + if softmax: + outs = nn.functional.softmax(outs) + return (outs, ) def forward_gradcam(self, imgs): """Defines the computation performed at every call when using gradcam diff --git a/mmaction/models/recognizers/recognizer3d.py b/mmaction/models/recognizers/recognizer3d.py index 20d60e3255..0589ee73b2 100644 --- a/mmaction/models/recognizers/recognizer3d.py +++ b/mmaction/models/recognizers/recognizer3d.py @@ -1,4 +1,5 @@ import torch +from torch import nn from ..registry import RECOGNIZERS from .base import BaseRecognizer @@ -61,7 +62,7 @@ def forward_test(self, imgs): testing.""" return self._do_test(imgs).cpu().numpy() - def forward_dummy(self, imgs): + def forward_dummy(self, imgs, softmax=False): """Used for computing network FLOPs. See ``tools/analysis/get_flops.py``. @@ -78,8 +79,10 @@ def forward_dummy(self, imgs): if hasattr(self, 'neck'): x, _ = self.neck(x) - outs = (self.cls_head(x), ) - return outs + outs = self.cls_head(x) + if softmax: + outs = nn.functional.softmax(outs) + return (outs, ) def forward_gradcam(self, imgs): """Defines the computation performed at every call when using gradcam diff --git a/tests/test_models/test_recognizers/test_recognizer2d.py b/tests/test_models/test_recognizers/test_recognizer2d.py index 781576a83e..b021832a8f 100644 --- a/tests/test_models/test_recognizers/test_recognizer2d.py +++ b/tests/test_models/test_recognizers/test_recognizer2d.py @@ -30,6 +30,12 @@ def test_tsn(): for one_img in img_list: recognizer(one_img, gradcam=True) + # test forward dummy + recognizer.forward_dummy(imgs, softmax=False) + res = recognizer.forward_dummy(imgs, softmax=True)[0] + assert torch.min(res) >= 0 + assert torch.max(res) <= 1 + mmcls_backbone = dict( type='mmcls.ResNeXt', depth=101, diff --git a/tests/test_models/test_recognizers/test_recognizer3d.py b/tests/test_models/test_recognizers/test_recognizer3d.py index 9fab8ee627..31064c7f33 100644 --- a/tests/test_models/test_recognizers/test_recognizer3d.py +++ b/tests/test_models/test_recognizers/test_recognizer3d.py @@ -37,6 +37,12 @@ def test_i3d(): for one_img in img_list: recognizer(one_img, gradcam=True) + # Test forward dummy + recognizer.forward_dummy(imgs, softmax=False) + res = recognizer.forward_dummy(imgs, softmax=True)[0] + assert torch.min(res) >= 0 + assert torch.max(res) <= 1 + else: losses = recognizer(imgs, gt_labels) assert isinstance(losses, dict) @@ -52,6 +58,12 @@ def test_i3d(): for one_img in img_list: recognizer(one_img, gradcam=True) + # Test forward dummy + recognizer.forward_dummy(imgs, softmax=False) + res = recognizer.forward_dummy(imgs, softmax=True)[0] + assert torch.min(res) >= 0 + assert torch.max(res) <= 1 + def test_r2plus1d(): config = get_recognizer_cfg( diff --git a/tools/pytorch2onnx.py b/tools/pytorch2onnx.py index 1358e069b3..e421eaabb1 100644 --- a/tools/pytorch2onnx.py +++ b/tools/pytorch2onnx.py @@ -122,6 +122,10 @@ def parse_args(): nargs='+', default=[1, 3, 8, 224, 224], help='input video size') + parser.add_argument( + '--softmax', + action='store_true', + help='wheter to add softmax layer at the end of recognizers') args = parser.parse_args() return args @@ -144,7 +148,8 @@ def parse_args(): # onnx.export does not support kwargs if hasattr(model, 'forward_dummy'): - model.forward = model.forward_dummy + from functools import partial + model.forward = partial(model.forward_dummy, softmax=args.softmax) elif hasattr(model, '_forward') and args.is_localizer: model.forward = model._forward else: From a2cbd11346aff1adf2a1a46cd443d494045952d8 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Wed, 7 Apr 2021 13:31:21 +0800 Subject: [PATCH 021/414] [Improvement] Speedup AVATest (#784) * resolve comments * update changelog * speed up readcsv * remove difficult & group flags * update action_thr * remove arg capacity --- .../detection/_base_/models/slowonly_r50.py | 2 +- .../_base_/models/slowonly_r50_nl.py | 2 +- ...etics_pretrained_r50_4x16x1_20e_ava_rgb.py | 2 +- ...etics_pretrained_r50_4x16x1_20e_ava_rgb.py | 2 +- ...d_r50_4x16x1_20e_ava_rgb_custom_classes.py | 2 +- ...netics_pretrained_r50_8x8x1_20e_ava_rgb.py | 2 +- ...etics_pretrained_r101_8x8x1_20e_ava_rgb.py | 2 +- ...etics_pretrained_r50_4x16x1_20e_ava_rgb.py | 2 +- ...d_r50_4x16x1_20e_ava_rgb_custom_classes.py | 2 +- ...ource_pretrained_r101_8x8x1_20e_ava_rgb.py | 2 +- ...ource_pretrained_r50_4x16x1_20e_ava_rgb.py | 2 +- docs/tutorials/1_config.md | 2 +- docs_zh_CN/tutorials/1_config.md | 2 +- .../object_detection_evaluation.py | 90 +++---------------- .../ava_evaluation/per_image_evaluation.py | 86 +++--------------- mmaction/core/evaluation/ava_utils.py | 33 +++---- 16 files changed, 47 insertions(+), 188 deletions(-) diff --git a/configs/detection/_base_/models/slowonly_r50.py b/configs/detection/_base_/models/slowonly_r50.py index 9938ce05da..965338ea44 100644 --- a/configs/detection/_base_/models/slowonly_r50.py +++ b/configs/detection/_base_/models/slowonly_r50.py @@ -40,4 +40,4 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) diff --git a/configs/detection/_base_/models/slowonly_r50_nl.py b/configs/detection/_base_/models/slowonly_r50_nl.py index 1c33f9b24d..fd2f739da7 100644 --- a/configs/detection/_base_/models/slowonly_r50_nl.py +++ b/configs/detection/_base_/models/slowonly_r50_nl.py @@ -47,4 +47,4 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) diff --git a/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py index ec208c27db..6b5796425c 100644 --- a/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -57,7 +57,7 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) dataset_type = 'AVADataset' data_root = 'data/ava/rawframes' diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py index 58a7c40d4d..22020db977 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -56,7 +56,7 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) dataset_type = 'AVADataset' data_root = 'data/ava/rawframes' diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py index 5bbdfa3d61..3b14fabd04 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py @@ -62,7 +62,7 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) dataset_type = 'AVADataset' data_root = 'data/ava/rawframes' diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py index 7044d3e686..9106fa8d29 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py @@ -56,7 +56,7 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) dataset_type = 'AVADataset' data_root = 'data/ava/rawframes' diff --git a/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py index 374bef8a36..ce12865cd0 100644 --- a/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py @@ -40,7 +40,7 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) dataset_type = 'AVADataset' data_root = 'data/ava/rawframes' diff --git a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py index 3080c5e709..7ff769e7a8 100644 --- a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -40,7 +40,7 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) dataset_type = 'AVADataset' data_root = 'data/ava/rawframes' diff --git a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py index e3608b7acb..1f81b01afa 100644 --- a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py +++ b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py @@ -47,7 +47,7 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) dataset_type = 'AVADataset' data_root = 'data/ava/rawframes' diff --git a/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py index 11b3737341..0113a42751 100644 --- a/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py @@ -40,7 +40,7 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) dataset_type = 'AVADataset' data_root = 'data/ava/rawframes' diff --git a/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py index bd320c19fa..37af19e945 100644 --- a/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -40,7 +40,7 @@ add_gt_as_proposals=True), pos_weight=1.0, debug=False)), - test_cfg=dict(rcnn=dict(action_thr=0.00))) + test_cfg=dict(rcnn=dict(action_thr=0.002))) dataset_type = 'AVADataset' data_root = 'data/ava/rawframes' diff --git a/docs/tutorials/1_config.md b/docs/tutorials/1_config.md index 71802de61e..569e55176c 100644 --- a/docs/tutorials/1_config.md +++ b/docs/tutorials/1_config.md @@ -488,7 +488,7 @@ We incorporate modular design into our config system, which is convenient to con debug=False)), # Debug mode test_cfg=dict( # Testing config of FastRCNN rcnn=dict( # Dict for rcnn testing config - action_thr=0.00))) # The threshold of an action + action_thr=0.002))) # The threshold of an action # dataset settings dataset_type = 'AVADataset' # Type of dataset for training, validation and testing diff --git a/docs_zh_CN/tutorials/1_config.md b/docs_zh_CN/tutorials/1_config.md index 27760e5bb3..22c1489687 100644 --- a/docs_zh_CN/tutorials/1_config.md +++ b/docs_zh_CN/tutorials/1_config.md @@ -485,7 +485,7 @@ MMAction2 将模块化设计整合到配置文件系统中,以便于执行各 debug=False)), # 是否为 debug 模式 test_cfg=dict( # 测试 FastRCNN 的超参设置 rcnn=dict( # rcnn 测试字典设置 - action_thr=0.00))) # 某行为的阈值 + action_thr=0.002))) # 某行为的阈值 # 数据集设置 dataset_type = 'AVADataset' # 训练,验证,测试的数据集类型 diff --git a/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py b/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py index a0539eacf4..95f0cc501c 100644 --- a/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py +++ b/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py @@ -30,6 +30,7 @@ import collections import logging from abc import ABCMeta, abstractmethod +from collections import defaultdict import numpy as np @@ -163,10 +164,6 @@ def add_single_ground_truth_image_info(self, image_id, groundtruth_dict): standard_fields.InputDataFields.groundtruth_classes: integer numpy array of shape [num_boxes] containing 1-indexed groundtruth classes for the boxes. - standard_fields.InputDataFields.groundtruth_difficult: Optional - length M numpy boolean array denoting whether a ground - truth box is a difficult instance or not. This field is - optional to support the case that no boxes are difficult. standard_fields.InputDataFields.groundtruth_instance_masks: Optional numpy array of shape [num_boxes, height, width] with values in {0, 1}. @@ -184,21 +181,7 @@ def add_single_ground_truth_image_info(self, image_id, groundtruth_dict): groundtruth_dict[ standard_fields.InputDataFields.groundtruth_classes] - self._label_id_offset) - # If the key is not present in the groundtruth_dict or the array is - # empty (unless there are no annotations for the groundtruth on this - # image) use values from the dictionary or insert None otherwise. - if (standard_fields.InputDataFields.groundtruth_difficult - in groundtruth_dict.keys()) and (groundtruth_dict[ - standard_fields.InputDataFields.groundtruth_difficult].size - or - not groundtruth_classes.size): - groundtruth_difficult = groundtruth_dict[ - standard_fields.InputDataFields.groundtruth_difficult] - else: - groundtruth_difficult = None - if not len(self._image_ids) % 1000: - logging.warn(('image %s does not have groundtruth difficult ' - 'flag specified'), image_id) + groundtruth_masks = None if self._evaluate_masks: if (standard_fields.InputDataFields.groundtruth_instance_masks @@ -212,7 +195,6 @@ def add_single_ground_truth_image_info(self, image_id, groundtruth_dict): groundtruth_boxes=groundtruth_dict[ standard_fields.InputDataFields.groundtruth_boxes], groundtruth_class_labels=groundtruth_classes, - groundtruth_is_difficult_list=groundtruth_difficult, groundtruth_masks=groundtruth_masks, ) self._image_ids.update([image_id]) @@ -397,8 +379,6 @@ def __init__( self.groundtruth_boxes = {} self.groundtruth_class_labels = {} self.groundtruth_masks = {} - self.groundtruth_is_difficult_list = {} - self.groundtruth_is_group_of_list = {} self.num_gt_instances_per_class = np.zeros(self.num_class, dtype=int) self.num_gt_imgs_per_class = np.zeros(self.num_class, dtype=int) @@ -424,8 +404,6 @@ def add_single_ground_truth_image_info( image_key, groundtruth_boxes, groundtruth_class_labels, - groundtruth_is_difficult_list=None, - groundtruth_is_group_of_list=None, groundtruth_masks=None, ): """Adds groundtruth for a single image to be used for evaluation. @@ -437,14 +415,6 @@ def add_single_ground_truth_image_info( [ymin, xmin, ymax, xmax] in absolute image coordinates. groundtruth_class_labels: integer numpy array of shape [num_boxes] containing 0-indexed groundtruth classes for the boxes. - groundtruth_is_difficult_list: A length M numpy boolean array - denoting whether a ground truth box is a difficult instance or - not. To support the case that no boxes are difficult, it is by - default set as None. - groundtruth_is_group_of_list: A length M numpy boolean array - denoting whether a ground truth box is a group-of box or not. - To support the case that no boxes are groups-of, it is by - default set as None. groundtruth_masks: uint8 numpy array of shape [num_boxes, height, width] containing `num_boxes` groundtruth masks. The mask values range from 0 to 1. @@ -457,22 +427,8 @@ def add_single_ground_truth_image_info( self.groundtruth_boxes[image_key] = groundtruth_boxes self.groundtruth_class_labels[image_key] = groundtruth_class_labels self.groundtruth_masks[image_key] = groundtruth_masks - if groundtruth_is_difficult_list is None: - num_boxes = groundtruth_boxes.shape[0] - groundtruth_is_difficult_list = np.zeros(num_boxes, dtype=bool) - self.groundtruth_is_difficult_list[ - image_key] = groundtruth_is_difficult_list.astype(dtype=bool) - if groundtruth_is_group_of_list is None: - num_boxes = groundtruth_boxes.shape[0] - groundtruth_is_group_of_list = np.zeros(num_boxes, dtype=bool) - self.groundtruth_is_group_of_list[ - image_key] = groundtruth_is_group_of_list.astype(dtype=bool) - - self._update_ground_truth_statistics( - groundtruth_class_labels, - groundtruth_is_difficult_list.astype(dtype=bool), - groundtruth_is_group_of_list.astype(dtype=bool), - ) + + self._update_ground_truth_statistics(groundtruth_class_labels) def add_single_detected_image_info( self, @@ -523,10 +479,6 @@ def add_single_detected_image_info( # Masks are popped instead of look up. The reason is that we do not # want to keep all masks in memory which can cause memory overflow. groundtruth_masks = self.groundtruth_masks.pop(image_key) - groundtruth_is_difficult_list = self.groundtruth_is_difficult_list[ - image_key] - groundtruth_is_group_of_list = self.groundtruth_is_group_of_list[ - image_key] else: groundtruth_boxes = np.empty(shape=[0, 4], dtype=float) groundtruth_class_labels = np.array([], dtype=int) @@ -534,8 +486,6 @@ def add_single_detected_image_info( groundtruth_masks = None else: groundtruth_masks = np.empty(shape=[0, 1, 1], dtype=float) - groundtruth_is_difficult_list = np.array([], dtype=bool) - groundtruth_is_group_of_list = np.array([], dtype=bool) ( scores, tp_fp_labels, @@ -545,8 +495,6 @@ def add_single_detected_image_info( detected_class_labels=detected_class_labels, groundtruth_boxes=groundtruth_boxes, groundtruth_class_labels=groundtruth_class_labels, - groundtruth_is_difficult_list=groundtruth_is_difficult_list, - groundtruth_is_group_of_list=groundtruth_is_group_of_list, detected_masks=detected_masks, groundtruth_masks=groundtruth_masks, ) @@ -556,35 +504,19 @@ def add_single_detected_image_info( self.scores_per_class[i].append(scores[i]) self.tp_fp_labels_per_class[i].append(tp_fp_labels[i]) - def _update_ground_truth_statistics( - self, - groundtruth_class_labels, - groundtruth_is_difficult_list, - groundtruth_is_group_of_list, - ): + def _update_ground_truth_statistics(self, groundtruth_class_labels): """Update grouth truth statitistics. - 1. Difficult boxes are ignored when counting the number of ground truth - instances as done in Pascal VOC devkit. - 2. Difficult boxes are treated as normal boxes when computing CorLoc - related statitistics. - Args: groundtruth_class_labels: An integer numpy array of length M, representing M class labels of object instances in ground truth - groundtruth_is_difficult_list: A boolean numpy array of length M - denoting whether a ground truth box is a difficult instance or - not - groundtruth_is_group_of_list: A boolean numpy array of length M - denoting whether a ground truth box is a group-of box or not """ - for class_index in range(self.num_class): - num_gt_instances = np.sum(groundtruth_class_labels[ - ~groundtruth_is_difficult_list - & ~groundtruth_is_group_of_list] == class_index) - self.num_gt_instances_per_class[class_index] += num_gt_instances - if np.any(groundtruth_class_labels == class_index): - self.num_gt_imgs_per_class[class_index] += 1 + count = defaultdict(lambda: 0) + for label in groundtruth_class_labels: + count[label] += 1 + for k in count: + self.num_gt_instances_per_class[k] += count[k] + self.num_gt_imgs_per_class[k] += 1 def evaluate(self): """Compute evaluation result. diff --git a/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py b/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py index 3013ae7ce2..6265c17d7a 100644 --- a/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py +++ b/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py @@ -47,19 +47,13 @@ def compute_object_detection_metrics( detected_class_labels, groundtruth_boxes, groundtruth_class_labels, - groundtruth_is_difficult_list, - groundtruth_is_group_of_list, detected_masks=None, groundtruth_masks=None, ): """Evaluates detections as being tp, fp or ignored from a single image. The evaluation is done in two stages: - 1. All detections are matched to non group-of boxes; true positives - are determined and detections matched to difficult boxes are - ignored. - 2. Detections that are determined as false positives are matched - against group-of boxes and ignored if matched. + 1. All detections are matched to non group-of boxes. Args: detected_boxes: A float numpy array of shape [N, 4], representing N @@ -74,11 +68,6 @@ def compute_object_detection_metrics( representing M regions of object instances in ground truth groundtruth_class_labels: An integer numpy array of shape [M, 1], representing M class labels of object instances in ground truth - groundtruth_is_difficult_list: A boolean numpy array of length M - denoting whether a ground truth box is a difficult instance or - not - groundtruth_is_group_of_list: A boolean numpy array of length M - denoting whether a ground truth box has group-of tag detected_masks: (optional) A uint8 numpy array of shape [N, height, width]. If not None, the metrics will be computed based on masks. @@ -110,8 +99,6 @@ def compute_object_detection_metrics( detected_class_labels=detected_class_labels, groundtruth_boxes=groundtruth_boxes, groundtruth_class_labels=groundtruth_class_labels, - groundtruth_is_difficult_list=groundtruth_is_difficult_list, - groundtruth_is_group_of_list=groundtruth_is_group_of_list, detected_masks=detected_masks, groundtruth_masks=groundtruth_masks, ) @@ -125,8 +112,6 @@ def _compute_tp_fp( detected_class_labels, groundtruth_boxes, groundtruth_class_labels, - groundtruth_is_difficult_list, - groundtruth_is_group_of_list, detected_masks=None, groundtruth_masks=None, ): @@ -146,11 +131,6 @@ def _compute_tp_fp( representing M regions of object instances in ground truth groundtruth_class_labels: An integer numpy array of shape [M, 1], representing M class labels of object instances in ground truth - groundtruth_is_difficult_list: A boolean numpy array of length M - denoting whether a ground truth box is a difficult instance or - not - groundtruth_is_group_of_list: A boolean numpy array of length M - denoting whether a ground truth box has group-of tag detected_masks: (optional) A np.uint8 numpy array of shape [N, height, width]. If not None, the scores will be computed based on masks. @@ -179,10 +159,6 @@ def _compute_tp_fp( result_scores = [] result_tp_fp_labels = [] for i in range(self.num_groundtruth_classes): - groundtruth_is_difficult_list_at_ith_class = ( - groundtruth_is_difficult_list[groundtruth_class_labels == i]) - groundtruth_is_group_of_list_at_ith_class = ( - groundtruth_is_group_of_list[groundtruth_class_labels == i]) ( gt_boxes_at_ith_class, gt_masks_at_ith_class, @@ -199,10 +175,6 @@ def _compute_tp_fp( detected_boxes=detected_boxes_at_ith_class, detected_scores=detected_scores_at_ith_class, groundtruth_boxes=gt_boxes_at_ith_class, - groundtruth_is_difficult_list=( - groundtruth_is_difficult_list_at_ith_class), - groundtruth_is_group_of_list=( - groundtruth_is_group_of_list_at_ith_class), detected_masks=detected_masks_at_ith_class, groundtruth_masks=gt_masks_at_ith_class, ) @@ -210,13 +182,8 @@ def _compute_tp_fp( result_tp_fp_labels.append(tp_fp_labels) return result_scores, result_tp_fp_labels - def _get_overlaps_and_scores_box_mode( - self, - detected_boxes, - detected_scores, - groundtruth_boxes, - groundtruth_is_group_of_list, - ): + def _get_overlaps_and_scores_box_mode(self, detected_boxes, + detected_scores, groundtruth_boxes): """Computes overlaps and scores between detected and groudntruth boxes. Args: @@ -226,10 +193,6 @@ def _get_overlaps_and_scores_box_mode( classification score groundtruth_boxes: A numpy array of shape [M, 4] representing ground truth box coordinates - groundtruth_is_group_of_list: A boolean numpy array of length M - denoting whether a ground truth box has group-of tag. If a - groundtruth box is group-of box, every detection matching this - box is ignored. Returns: iou: A float numpy array of size [num_detected_boxes, @@ -243,8 +206,7 @@ def _get_overlaps_and_scores_box_mode( """ detected_boxlist = np_box_list.BoxList(detected_boxes) detected_boxlist.add_field('scores', detected_scores) - gt_non_group_of_boxlist = np_box_list.BoxList( - groundtruth_boxes[~groundtruth_is_group_of_list]) + gt_non_group_of_boxlist = np_box_list.BoxList(groundtruth_boxes) iou = np_box_ops.iou(detected_boxlist.get(), gt_non_group_of_boxlist.get()) @@ -257,8 +219,6 @@ def _compute_tp_fp_for_single_class( detected_boxes, detected_scores, groundtruth_boxes, - groundtruth_is_difficult_list, - groundtruth_is_group_of_list, detected_masks=None, groundtruth_masks=None, ): @@ -272,14 +232,6 @@ def _compute_tp_fp_for_single_class( classification score groundtruth_boxes: A numpy array of shape [M, 4] representing groundtruth box coordinates - groundtruth_is_difficult_list: A boolean numpy array of length M - denoting whether a ground truth box is a difficult instance or - not. If a groundtruth box is difficult, every detection - matching this box is ignored. - groundtruth_is_group_of_list: A boolean numpy array of length M - denoting whether a ground truth box has group-of tag. If a - groundtruth box is group-of box, every detection matching this - box is ignored. detected_masks: (optional) A uint8 numpy array of shape [N, height, width]. If not None, the scores will be computed based on masks. @@ -288,8 +240,7 @@ def _compute_tp_fp_for_single_class( Returns: Two arrays of the same size, containing all boxes that were - evaluated as being true positives or false positives; if a box - matched to a difficult box or to a group-of box, it is ignored. + evaluated as being true positives or false positives. scores: A numpy array representing the detection scores. tp_fp_labels: a boolean numpy array indicating whether a detection @@ -306,45 +257,30 @@ def _compute_tp_fp_for_single_class( ) = self._get_overlaps_and_scores_box_mode( detected_boxes=detected_boxes, detected_scores=detected_scores, - groundtruth_boxes=groundtruth_boxes, - groundtruth_is_group_of_list=groundtruth_is_group_of_list, - ) + groundtruth_boxes=groundtruth_boxes) if groundtruth_boxes.size == 0: return scores, np.zeros(num_detected_boxes, dtype=bool) tp_fp_labels = np.zeros(num_detected_boxes, dtype=bool) - is_matched_to_difficult_box = np.zeros(num_detected_boxes, dtype=bool) - is_matched_to_group_of_box = np.zeros(num_detected_boxes, dtype=bool) # The evaluation is done in two stages: - # 1. All detections are matched to non group-of boxes; true positives - # are determined and detections matched to difficult boxes are - # ignored. + # 1. All detections are matched to non group-of boxes. # 2. Detections that are determined as false positives are matched # against group-of boxes and ignored if matched. # Tp-fp evaluation for non-group of boxes (if any). if iou.shape[1] > 0: - groundtruth_nongroup_of_is_difficult_list = ( - groundtruth_is_difficult_list[~groundtruth_is_group_of_list]) max_overlap_gt_ids = np.argmax(iou, axis=1) is_gt_box_detected = np.zeros(iou.shape[1], dtype=bool) for i in range(num_detected_boxes): gt_id = max_overlap_gt_ids[i] if iou[i, gt_id] >= self.matching_iou_threshold: - if not groundtruth_nongroup_of_is_difficult_list[gt_id]: - if not is_gt_box_detected[gt_id]: - tp_fp_labels[i] = True - is_gt_box_detected[gt_id] = True - else: - is_matched_to_difficult_box[i] = True + if not is_gt_box_detected[gt_id]: + tp_fp_labels[i] = True + is_gt_box_detected[gt_id] = True - return ( - scores[~is_matched_to_difficult_box & ~is_matched_to_group_of_box], - tp_fp_labels[~is_matched_to_difficult_box - & ~is_matched_to_group_of_box], - ) + return scores, tp_fp_labels def _get_ith_class_arrays( self, diff --git a/mmaction/core/evaluation/ava_utils.py b/mmaction/core/evaluation/ava_utils.py index 89dc04ff50..01036b85f9 100644 --- a/mmaction/core/evaluation/ava_utils.py +++ b/mmaction/core/evaluation/ava_utils.py @@ -1,5 +1,4 @@ import csv -import heapq import logging import time from collections import defaultdict @@ -48,7 +47,7 @@ def tostr(item): def print_time(message, start): - print('==> %g seconds to %s' % (time.time() - start, message)) + print('==> %g seconds to %s' % (time.time() - start, message), flush=True) def make_image_key(video_id, timestamp): @@ -56,7 +55,7 @@ def make_image_key(video_id, timestamp): return f'{video_id},{int(timestamp):04d}' -def read_csv(csv_file, class_whitelist=None, capacity=0): +def read_csv(csv_file, class_whitelist=None): """Loads boxes and class labels from a CSV file in the AVA format. CSV file format described at https://research.google.com/ava/download.html. @@ -65,8 +64,6 @@ def read_csv(csv_file, class_whitelist=None, capacity=0): csv_file: A file object. class_whitelist: If provided, boxes corresponding to (integer) class labels not in this set are skipped. - capacity: Maximum number of labeled boxes allowed for each example. - Default is 0 where there is no limit. Returns: boxes: A dictionary mapping each unique image key (string) to a list of @@ -94,20 +91,16 @@ def read_csv(csv_file, class_whitelist=None, capacity=0): score = 1.0 if len(row) == 8: score = float(row[7]) - if capacity < 1 or len(entries[image_key]) < capacity: - heapq.heappush(entries[image_key], - (score, action_id, y1, x1, y2, x2)) - elif score > entries[image_key][0][0]: - heapq.heapreplace(entries[image_key], - (score, action_id, y1, x1, y2, x2)) + + entries[image_key].append((score, action_id, y1, x1, y2, x2)) + for image_key in entries: # Evaluation API assumes boxes with descending scores entry = sorted(entries[image_key], key=lambda tup: -tup[0]) - for item in entry: - score, action_id, y1, x1, y2, x2 = item - boxes[image_key].append([y1, x1, y2, x2]) - labels[image_key].append(action_id) - scores[image_key].append(score) + boxes[image_key] = [x[2:] for x in entry] + labels[image_key] = [x[1] for x in entry] + scores[image_key] = [x[0] for x in entry] + print_time('read file ' + csv_file.name, start) return boxes, labels, scores @@ -179,7 +172,7 @@ def ava_eval(result_file, categories = [cat for cat in categories if cat['id'] in custom_classes] # loading gt, do not need gt score - gt_boxes, gt_labels, _ = read_csv(open(ann_file), class_whitelist, 0) + gt_boxes, gt_labels, _ = read_csv(open(ann_file), class_whitelist) if verbose: print_time('Reading detection results', start) @@ -189,7 +182,7 @@ def ava_eval(result_file, excluded_keys = list() start = time.time() - boxes, labels, scores = read_csv(open(result_file), class_whitelist, 0) + boxes, labels, scores = read_csv(open(result_file), class_whitelist) if verbose: print_time('Reading detection results', start) @@ -208,9 +201,7 @@ def ava_eval(result_file, standard_fields.InputDataFields.groundtruth_boxes: np.array(gt_boxes[image_key], dtype=float), standard_fields.InputDataFields.groundtruth_classes: - np.array(gt_labels[image_key], dtype=int), - standard_fields.InputDataFields.groundtruth_difficult: - np.zeros(len(gt_boxes[image_key]), dtype=bool) + np.array(gt_labels[image_key], dtype=int) }) if verbose: print_time('Convert groundtruth', start) From 6a252b82db361067fb4a6ac6e1c49fc6198c93ad Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Wed, 7 Apr 2021 16:23:27 +0800 Subject: [PATCH 022/414] [Improvement] Test with onnx models and TensorRT engines. (#758) * tensorrt inference first commit * support test with onnx model * update docs * update changelog * update docs * update changelog * update * update changelog --- docs/changelog.md | 1 + docs/getting_started.md | 12 ++- docs_zh_CN/getting_started.md | 14 ++- tools/test.py | 197 +++++++++++++++++++++++++++------- 4 files changed, 182 insertions(+), 42 deletions(-) diff --git a/docs/changelog.md b/docs/changelog.md index f747898662..72a018a210 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -9,6 +9,7 @@ **Improvements** - Add softmax option for pytorch2onnx tool ([#781](https://github.com/open-mmlab/mmaction2/pull/781)) +- Test with onnx models and TensorRT engines ([#758](https://github.com/open-mmlab/mmaction2/pull/758)) **Bug and Typo Fixes** diff --git a/docs/getting_started.md b/docs/getting_started.md index a8f8559a22..1c7f204c95 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -68,7 +68,7 @@ You can use the following commands to test a dataset. # single-gpu testing python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ [--gpu-collect] [--tmpdir ${TMPDIR}] [--options ${OPTIONS}] [--average-clips ${AVG_TYPE}] \ - [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] + [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] [--onnx] [--tensorrt] # multi-gpu testing ./tools/dist_test.sh ${CONFIG_FILE} ${CHECKPOINT_FILE} ${GPU_NUM} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ @@ -86,6 +86,8 @@ Optional arguments: - `AVG_TYPE`: Items to average the test clips. If set to `prob`, it will apply softmax before averaging the clip scores. Otherwise, it will directly average the clip scores. - `JOB_LAUNCHER`: Items for distributed job initialization launcher. Allowed choices are `none`, `pytorch`, `slurm`, `mpi`. Especially, if set to none, it will test in a non-distributed mode. - `LOCAL_RANK`: ID for local rank. If not specified, it will be set to 0. +- `--onnx`: If specified, recognition results will be generated by onnx model and `CHECKPOINT_FILE` should be onnx model file path. Onnx model files are generated by `/tools/pytorch2onnx.py`. For now, multi-gpu mode and dynamic input shape mode are not supported. Please note that the output tensors of dataset and the input tensors of onnx model should share the same shape. And it is recommended to remove all test-time augmentation methods in `test_pipeline`(`ThreeCrop`, `TenCrop`, `twice_sample`, etc.) +- `--tensorrt`: If specified, recognition results will be generated by TensorRT engine and `CHECKPOINT_FILE` should be TensorRT engine file path. TensorRT engines are generated by exported onnx models and TensorRT official convertion tools. For now, multi-gpu mode and dynamic input shape mode are not supported. Please note that the output tensors of dataset and the input tensors of TensorRT engine should share the same shape. And it is recommended to remove all test-time augmentation methods in `test_pipeline`(`ThreeCrop`, `TenCrop`, `twice_sample`, etc.) Examples: @@ -115,6 +117,14 @@ Assume that you have already downloaded the checkpoints to the directory `checkp --launcher slurm --eval top_k_accuracy ``` +4. Test TSN on Something-Something V1 with onnx model and evaluate the top-k accuracy + + ```shell + python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.onnx \ + --eval top_k_accuracy --onnx + ``` + ### High-level APIs for testing a video and rawframes Here is an example of building the model and testing a given video. diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index 201e072614..7e3de13fc3 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -17,7 +17,7 @@ - [使用单个 GPU 进行训练](#使用单个-GPU-进行训练) - [使用多个 GPU 进行训练](#使用多个-GPU-进行训练) - [使用多台机器进行训练](#使用多台机器进行训练) - - [使用单台机器创建多个任务](#使用单台机器启动多个任务) + - [使用单台机器启动多个任务](#使用单台机器启动多个任务) - [详细教程](#详细教程) @@ -67,7 +67,7 @@ MMAction2 提供了一些脚本用于测试数据集(如 Kinetics-400,Someth # 单 GPU 测试 python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ [--gpu-collect] [--tmpdir ${TMPDIR}] [--options ${OPTIONS}] [--average-clips ${AVG_TYPE}] \ - [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] + [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] [--onnx] [--tensorrt] # 多 GPU 测试 ./tools/dist_test.sh ${CONFIG_FILE} ${CHECKPOINT_FILE} ${GPU_NUM} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ @@ -85,6 +85,8 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [- - `AVG_TYPE`:用于平均测试片段结果的选项。如果被设置为 `prob`,则会在平均测试片段结果之前施加 softmax 函数。否则,会直接进行平均。 - `JOB_LAUNCHER`:分布式任务初始化启动器选项。可选值有 `none`,`pytorch`,`slurm`,`mpi`。特别地,如果被设置为 `none`, 则会以非分布式模式进行测试。 - `LOCAL_RANK`:本地 rank 的 ID。如果没有被指定,则会被设置为 0。 +- `--onnx`: 如果指定,将通过 onnx 模型推理获取预测结果,输入参数 `CHECKPOINT_FILE` 应为 onnx 模型文件。Onnx 模型文件由 `/tools/pytorch2onnx.py` 脚本导出。目前,不支持多 GPU 测试以及动态张量形状(Dynamic shape)。请注意,数据集输出与模型输入张量的形状应保持一致。同时,不建议使用测试时数据增强,如 `ThreeCrop`,`TenCrop`,`twice_sample` 等。 +- `--tensorrt`: 如果指定,将通过 TensorRT 模型推理获取预测结果,输入参数 `CHECKPOINT_FILE` 应为 TensorRT 模型文件。TensorRT 模型文件由导出的 onnx 模型以及 TensorRT 官方模型转换工具生成。目前,不支持多 GPU 测试以及动态张量形状(Dynamic shape)。请注意,数据集输出与模型输入张量的形状应保持一致。同时,不建议使用测试时数据增强,如 `ThreeCrop`,`TenCrop`,`twice_sample` 等。 例子: @@ -114,6 +116,14 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [- --launcher slurm --eval top_k_accuracy ``` +4. 在 Something-Something V1 下测试 onnx 格式的 TSN 模型,并验证 `top-k accuracy` 指标 + + ```shell + python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.onnx \ + --eval top_k_accuracy --onnx + ``` + ### 使用高级 API 对视频和帧文件夹进行测试 这里举例说明如何构建模型并测试给定视频 diff --git a/tools/test.py b/tools/test.py index efc88e8aae..ab037abaa8 100644 --- a/tools/test.py +++ b/tools/test.py @@ -80,6 +80,14 @@ def parse_args(): default='none', help='job launcher') parser.add_argument('--local_rank', type=int, default=0) + parser.add_argument( + '--onnx', + action='store_true', + help='Whether to test with onnx model or not') + parser.add_argument( + '--tensorrt', + action='store_true', + help='Whether to test with TensorRT engine or not') args = parser.parse_args() if 'LOCAL_RANK' not in os.environ: os.environ['LOCAL_RANK'] = str(args.local_rank) @@ -106,9 +114,152 @@ def turn_off_pretrained(cfg): turn_off_pretrained(sub_cfg) +def inference_pytorch(args, cfg, distributed, data_loader): + """Get predictions by pytorch models.""" + if args.average_clips is not None: + # You can set average_clips during testing, it will override the + # original setting + if cfg.model.get('test_cfg') is None and cfg.get('test_cfg') is None: + cfg.model.setdefault('test_cfg', + dict(average_clips=args.average_clips)) + else: + if cfg.model.get('test_cfg') is not None: + cfg.model.test_cfg.average_clips = args.average_clips + else: + cfg.test_cfg.average_clips = args.average_clips + + # remove redundant pretrain steps for testing + turn_off_pretrained(cfg.model) + + # build the model and load checkpoint + model = build_model( + cfg.model, train_cfg=None, test_cfg=cfg.get('test_cfg')) + + if len(cfg.module_hooks) > 0: + register_module_hooks(model, cfg.module_hooks) + + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + load_checkpoint(model, args.checkpoint, map_location='cpu') + + if args.fuse_conv_bn: + model = fuse_conv_bn(model) + + if not distributed: + model = MMDataParallel(model, device_ids=[0]) + outputs = single_gpu_test(model, data_loader) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, args.tmpdir, + args.gpu_collect) + + return outputs + + +def inference_tensorrt(ckpt_path, distributed, data_loader, batch_size): + """Get predictions by TensorRT engine. + + For now, multi-gpu mode and dynamic tensor shape are not supported. + """ + assert not distributed, \ + 'TensorRT engine inference only supports single gpu mode.' + import tensorrt as trt + from mmcv.tensorrt.tensorrt_utils import (torch_dtype_from_trt, + torch_device_from_trt) + + # load engine + with trt.Logger() as logger, trt.Runtime(logger) as runtime: + with open(ckpt_path, mode='rb') as f: + engine_bytes = f.read() + engine = runtime.deserialize_cuda_engine(engine_bytes) + + # For now, only support fixed input tensor + cur_batch_size = engine.get_binding_shape(0)[0] + assert batch_size == cur_batch_size, \ + ('Dataset and TensorRT model should share the same batch size, ' + f'but get {batch_size} and {cur_batch_size}') + + context = engine.create_execution_context() + + # get output tensor + dtype = torch_dtype_from_trt(engine.get_binding_dtype(1)) + shape = tuple(context.get_binding_shape(1)) + device = torch_device_from_trt(engine.get_location(1)) + output = torch.empty( + size=shape, dtype=dtype, device=device, requires_grad=False) + + # get predictions + results = [] + dataset = data_loader.dataset + prog_bar = mmcv.ProgressBar(len(dataset)) + for data in data_loader: + bindings = [ + data['imgs'].contiguous().data_ptr(), + output.contiguous().data_ptr() + ] + context.execute_async_v2(bindings, + torch.cuda.current_stream().cuda_stream) + results.extend(output.cpu().numpy()) + batch_size = len(next(iter(data.values()))) + for _ in range(batch_size): + prog_bar.update() + return results + + +def inference_onnx(ckpt_path, distributed, data_loader, batch_size): + """Get predictions by ONNX. + + For now, multi-gpu mode and dynamic tensor shape are not supported. + """ + assert not distributed, 'ONNX inference only supports single gpu mode.' + + import onnx + import onnxruntime as rt + + # get input tensor name + onnx_model = onnx.load(ckpt_path) + input_all = [node.name for node in onnx_model.graph.input] + input_initializer = [node.name for node in onnx_model.graph.initializer] + net_feed_input = list(set(input_all) - set(input_initializer)) + assert len(net_feed_input) == 1 + + # For now, only support fixed tensor shape + input_tensor = None + for tensor in onnx_model.graph.input: + if tensor.name == net_feed_input[0]: + input_tensor = tensor + break + cur_batch_size = input_tensor.type.tensor_type.shape.dim[0].dim_value + assert batch_size == cur_batch_size, \ + ('Dataset and ONNX model should share the same batch size, ' + f'but get {batch_size} and {cur_batch_size}') + + # get predictions + sess = rt.InferenceSession(ckpt_path) + results = [] + dataset = data_loader.dataset + prog_bar = mmcv.ProgressBar(len(dataset)) + for data in data_loader: + imgs = data['imgs'].cpu().numpy() + onnx_result = sess.run(None, {net_feed_input[0]: imgs})[0] + results.extend(onnx_result) + batch_size = len(next(iter(data.values()))) + for _ in range(batch_size): + prog_bar.update() + return results + + def main(): args = parse_args() + if args.tensorrt and args.onnx: + raise ValueError( + 'Cannot set onnx mode and tensorrt mode at the same time.') + cfg = Config.fromfile(args.config) cfg.merge_from_dict(args.cfg_options) @@ -158,18 +309,6 @@ def main(): torch.backends.cudnn.benchmark = True cfg.data.test.test_mode = True - if args.average_clips is not None: - # You can set average_clips during testing, it will override the - # original setting - if cfg.model.get('test_cfg') is None and cfg.get('test_cfg') is None: - cfg.model.setdefault('test_cfg', - dict(average_clips=args.average_clips)) - else: - if cfg.model.get('test_cfg') is not None: - cfg.model.test_cfg.average_clips = args.average_clips - else: - cfg.test_cfg.average_clips = args.average_clips - # init distributed env first, since logger depends on the dist info. if args.launcher == 'none': distributed = False @@ -191,34 +330,14 @@ def main(): **cfg.data.get('test_dataloader', {})) data_loader = build_dataloader(dataset, **dataloader_setting) - # remove redundant pretrain steps for testing - turn_off_pretrained(cfg.model) - - # build the model and load checkpoint - model = build_model( - cfg.model, train_cfg=None, test_cfg=cfg.get('test_cfg')) - - if len(cfg.module_hooks) > 0: - register_module_hooks(model, cfg.module_hooks) - - fp16_cfg = cfg.get('fp16', None) - if fp16_cfg is not None: - wrap_fp16_model(model) - load_checkpoint(model, args.checkpoint, map_location='cpu') - - if args.fuse_conv_bn: - model = fuse_conv_bn(model) - - if not distributed: - model = MMDataParallel(model, device_ids=[0]) - outputs = single_gpu_test(model, data_loader) + if args.tensorrt: + outputs = inference_tensorrt(args.checkpoint, distributed, data_loader, + dataloader_setting['videos_per_gpu']) + elif args.onnx: + outputs = inference_onnx(args.checkpoint, distributed, data_loader, + dataloader_setting['videos_per_gpu']) else: - model = MMDistributedDataParallel( - model.cuda(), - device_ids=[torch.cuda.current_device()], - broadcast_buffers=False) - outputs = multi_gpu_test(model, data_loader, args.tmpdir, - args.gpu_collect) + outputs = inference_pytorch(args, cfg, distributed, data_loader) rank, _ = get_dist_info() if rank == 0: From 723e724f68f5d70a999d3c7f4bc0be1e8e48b8bf Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Thu, 8 Apr 2021 18:36:19 +0800 Subject: [PATCH 023/414] [Feature] Support TRN (#755) * TRN draft * add config * add comments * add unittest * use partial bn * add README * update readme and changelog * add recognizer unit test * Update README_zh-CN.md * Update trn_head.py Co-authored-by: Jintao Lin <528557675@qq.com> --- README.md | 3 +- README_zh-CN.md | 83 +++---- configs/_base_/models/trn_r50.py | 22 ++ configs/recognition/trn/README.md | 78 +++++++ configs/recognition/trn/README_zh-CN.md | 78 +++++++ .../trn/trn_r50_1x1x8_50e_sthv1_rgb.py | 102 +++++++++ .../trn/trn_r50_1x1x8_50e_sthv2_rgb.py | 102 +++++++++ docs/changelog.md | 5 +- mmaction/models/__init__.py | 6 +- mmaction/models/heads/__init__.py | 3 +- mmaction/models/heads/trn_head.py | 210 ++++++++++++++++++ tests/test_models/test_head.py | 61 ++++- .../test_recognizers/test_recognizer2d.py | 41 ++++ 13 files changed, 745 insertions(+), 49 deletions(-) create mode 100644 configs/_base_/models/trn_r50.py create mode 100644 configs/recognition/trn/README.md create mode 100644 configs/recognition/trn/README_zh-CN.md create mode 100644 configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py create mode 100644 configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py create mode 100644 mmaction/models/heads/trn_head.py diff --git a/README.md b/README.md index 617ed4ef8f..7b39a4f3e4 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,7 @@ Supported methods for Action Recognition: - ✅ [TSN](configs/recognition/tsn/README.md) (ECCV'2016) - ✅ [TSM](configs/recognition/tsm/README.md) (ICCV'2019) -- ✅ [TSM Non-Local](configs/recognition/i3d) (ICCV'2019) +- ✅ [TSM Non-Local](configs/recognition/tsm/README.md) (ICCV'2019) - ✅ [R(2+1)D](configs/recognition/r2plus1d/README.md) (CVPR'2018) - ✅ [I3D](configs/recognition/i3d/README.md) (CVPR'2017) - ✅ [I3D Non-Local](configs/recognition/i3d/README.md) (CVPR'2018) @@ -95,6 +95,7 @@ Supported methods for Action Recognition: - ✅ [OmniSource](configs/recognition/omnisource/README.md) (ECCV'2020) - ✅ [MultiModality: Audio](configs/recognition_audio/resnet/README.md) (ArXiv'2020) - ✅ [TANet](configs/recognition/tanet/README.md) (ArXiv'2020) +- ✅ [TRN](configs/recognition/trn/README.md) (CVPR'2015) diff --git a/README_zh-CN.md b/README_zh-CN.md index 482e24c52f..2d055bc52f 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -6,7 +6,7 @@ [English](/README.md) | 简体中文 -[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/en/latest/) +[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/zh_CN/latest/) [![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) [![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) [![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) @@ -73,22 +73,23 @@ v0.13.0 版本已于 2021 年 3 月 31 日发布,可通过查阅 [更新日志
(点击收起) -- ✅ [TSN](/configs/recognition/tsn/README.md) (ECCV'2016) -- ✅ [TSM](/configs/recognition/tsm/README.md) (ICCV'2019) -- ✅ [TSM Non-Local](/configs/recognition/i3d) (ICCV'2019) -- ✅ [R(2+1)D](/configs/recognition/r2plus1d/README.md) (CVPR'2018) -- ✅ [I3D](/configs/recognition/i3d/README.md) (CVPR'2017) -- ✅ [I3D Non-Local](/configs/recognition/i3d/README.md) (CVPR'2018) -- ✅ [SlowOnly](/configs/recognition/slowonly/README.md) (ICCV'2019) -- ✅ [SlowFast](/configs/recognition/slowfast/README.md) (ICCV'2019) -- ✅ [CSN](/configs/recognition/csn/README.md) (ICCV'2019) -- ✅ [TIN](/configs/recognition/tin/README.md) (AAAI'2020) -- ✅ [TPN](/configs/recognition/tpn/README.md) (CVPR'2020) -- ✅ [C3D](/configs/recognition/c3d/README.md) (CVPR'2014) -- ✅ [X3D](/configs/recognition/x3d/README.md) (CVPR'2020) -- ✅ [OmniSource](/configs/recognition/omnisource/README.md) (ECCV'2020) -- ✅ [MultiModality: Audio](/configs/recognition_audio/resnet/README.md) (ArXiv'2020) -- ✅ [TANet](configs/recognition/tanet/README.md) (ArXiv'2020) +- ✅ [TSN](/configs/recognition/tsn/README_zh-CN.md) (ECCV'2016) +- ✅ [TSM](/configs/recognition/tsm/README_zh-CN.md) (ICCV'2019) +- ✅ [TSM Non-Local](/configs/recognition/tsm/README_zh-CN.md) (ICCV'2019) +- ✅ [R(2+1)D](/configs/recognition/r2plus1d/README_zh-CN.md) (CVPR'2018) +- ✅ [I3D](/configs/recognition/i3d/README_zh-CN.md) (CVPR'2017) +- ✅ [I3D Non-Local](/configs/recognition/i3d/README_zh-CN.md) (CVPR'2018) +- ✅ [SlowOnly](/configs/recognition/slowonly/README_zh-CN.md) (ICCV'2019) +- ✅ [SlowFast](/configs/recognition/slowfast/README_zh-CN.md) (ICCV'2019) +- ✅ [CSN](/configs/recognition/csn/README_zh-CN.md) (ICCV'2019) +- ✅ [TIN](/configs/recognition/tin/README_zh-CN.md) (AAAI'2020) +- ✅ [TPN](/configs/recognition/tpn/README_zh-CN.md) (CVPR'2020) +- ✅ [C3D](/configs/recognition/c3d/README_zh-CN.md) (CVPR'2014) +- ✅ [X3D](/configs/recognition/x3d/README_zh-CN.md) (CVPR'2020) +- ✅ [OmniSource](/configs/recognition/omnisource/README_zh-CN.md) (ECCV'2020) +- ✅ [MultiModality: Audio](/configs/recognition_audio/resnet/README_zh-CN.md) (ArXiv'2020) +- ✅ [TANet](/configs/recognition/tanet/README_zh-CN.md) (ArXiv'2020) +- ✅ [TRN](/configs/recognition/trn/README_zh-CN.md) (CVPR'2015)
@@ -97,9 +98,9 @@ v0.13.0 版本已于 2021 年 3 月 31 日发布,可通过查阅 [更新日志
(点击收起) -- ✅ [BSN](/configs/localization/bsn/README.md) (ECCV'2018) -- ✅ [BMN](/configs/localization/bmn/README.md) (ICCV'2019) -- ✅ [SSN](/configs/localization/ssn/README.md) (ICCV'2017) +- ✅ [BSN](/configs/localization/bsn/README_zh-CN.md) (ECCV'2018) +- ✅ [BMN](/configs/localization/bmn/README_zh-CN.md) (ICCV'2019) +- ✅ [SSN](/configs/localization/ssn/README_zh-CN.md) (ICCV'2017)
@@ -108,36 +109,36 @@ v0.13.0 版本已于 2021 年 3 月 31 日发布,可通过查阅 [更新日志
(点击收起) -- ✅ [SlowOnly+Fast R-CNN](/configs/detection/ava/README.md) (ICCV'2019) -- ✅ [SlowFast+Fast R-CNN](/configs/detection/ava/README.md) (ICCV'2019) -- ✅ [Long-Term Feature Bank](configs/detection/lfb/README.md) (CVPR'2019) +- ✅ [SlowOnly+Fast R-CNN](/configs/detection/ava/README_zh-CN.md) (ICCV'2019) +- ✅ [SlowFast+Fast R-CNN](/configs/detection/ava/README_zh-CN.md) (ICCV'2019) +- ✅ [Long-Term Feature Bank](/configs/detection/lfb/README_zh-CN.md) (CVPR'2019)
-各个模型的结果和设置都可以在对应的 config 目录下的 *README.md* 中查看。整体的概况也可也在 [**模型库**](https://mmaction2.readthedocs.io/en/latest/recognition_models.html) 页面中查看 +各个模型的结果和设置都可以在对应的 config 目录下的 *README_zh-CN.md* 中查看。整体的概况也可也在 [**模型库**](https://mmaction2.readthedocs.io/zh_CN/latest/recognition_models.html) 页面中查看 我们将跟进学界的最新进展,并支持更多算法和框架。如果您对 MMAction2 有任何功能需求,请随时在 [问题](https://github.com/open-mmlab/mmaction2/issues/19) 中留言。 ## 数据集 -支持的 [数据集](https://mmaction2.readthedocs.io/en/latest/supported_datasets.html): +支持的 [数据集](https://mmaction2.readthedocs.io/zh_CN/latest/supported_datasets.html): 支持的动作识别数据集:
(点击收起) -- ✅ [UCF101](/tools/data/ucf101/README.md) \[ [主页](https://www.crcv.ucf.edu/research/data-sets/ucf101/) \] (CRCV-IR-12-01) -- ✅ [HMDB51](/tools/data/hmdb51/README.md) \[ [主页](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) \] (ICCV'2011) -- ✅ [Kinetics-[400/600/700]](/tools/data/kinetics/README.md) \[ [主页](https://deepmind.com/research/open-source/kinetics) \] (CVPR'2017) -- ✅ [Something-Something V1](/tools/data/sthv1/README.md) \[ [主页](https://20bn.com/datasets/something-something/v1) \] (ICCV'2017) -- ✅ [Something-Something V2](/tools/data/sthv2/README.md) \[ [主页](https://20bn.com/datasets/something-something) \] (ICCV'2017) -- ✅ [Moments in Time](/tools/data/mit/README.md) \[ [主页](http://moments.csail.mit.edu/) \] (TPAMI'2019) -- ✅ [Multi-Moments in Time](/tools/data/mmit/README.md) \[ [主页](http://moments.csail.mit.edu/challenge_iccv_2019.html) \] (ArXiv'2019) -- ✅ [HVU](/tools/data/hvu/README.md) \[ [主页](https://github.com/holistic-video-understanding/HVU-Dataset) \] (ECCV'2020) -- ✅ [Jester](/tools/data/jester/README.md) \[ [主页](https://20bn.com/datasets/jester/v1) \] (ICCV'2019) -- ✅ [GYM](/tools/data/gym/README.md) \[ [主页](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) -- ✅ [ActivityNet](/tools/data/activitynet/README.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015) +- ✅ [UCF101](/tools/data/ucf101/README_zh-CN.md) \[ [主页](https://www.crcv.ucf.edu/research/data-sets/ucf101/) \] (CRCV-IR-12-01) +- ✅ [HMDB51](/tools/data/hmdb51/README_zh-CN.md) \[ [主页](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) \] (ICCV'2011) +- ✅ [Kinetics-[400/600/700]](/tools/data/kinetics/README_zh-CN.md) \[ [主页](https://deepmind.com/research/open-source/kinetics) \] (CVPR'2017) +- ✅ [Something-Something V1](/tools/data/sthv1/README_zh-CN.md) \[ [主页](https://20bn.com/datasets/something-something/v1) \] (ICCV'2017) +- ✅ [Something-Something V2](/tools/data/sthv2/README_zh-CN.md) \[ [主页](https://20bn.com/datasets/something-something) \] (ICCV'2017) +- ✅ [Moments in Time](/tools/data/mit/README_zh-CN.md) \[ [主页](http://moments.csail.mit.edu/) \] (TPAMI'2019) +- ✅ [Multi-Moments in Time](/tools/data/mmit/README_zh-CN.md) \[ [主页](http://moments.csail.mit.edu/challenge_iccv_2019.html) \] (ArXiv'2019) +- ✅ [HVU](/tools/data/hvu/README_zh-CN.md) \[ [主页](https://github.com/holistic-video-understanding/HVU-Dataset) \] (ECCV'2020) +- ✅ [Jester](/tools/data/jester/README_zh-CN.md) \[ [主页](https://20bn.com/datasets/jester/v1) \] (ICCV'2019) +- ✅ [GYM](/tools/data/gym/README_zh-CN.md) \[ [主页](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) +- ✅ [ActivityNet](/tools/data/activitynet/README_zh-CN.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015)
@@ -146,8 +147,8 @@ v0.13.0 版本已于 2021 年 3 月 31 日发布,可通过查阅 [更新日志
(点击收起) -- ✅ [ActivityNet](/tools/data/activitynet/README.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015) -- ✅ [THUMOS14](/tools/data/thumos14/README.md) \[ [主页](https://www.crcv.ucf.edu/THUMOS14/download.html) \] (THUMOS Challenge 2014) +- ✅ [ActivityNet](/tools/data/activitynet/README_zh-CN.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015) +- ✅ [THUMOS14](/tools/data/thumos14/README_zh-CN.md) \[ [主页](https://www.crcv.ucf.edu/THUMOS14/download.html) \] (THUMOS Challenge 2014)
@@ -156,9 +157,9 @@ v0.13.0 版本已于 2021 年 3 月 31 日发布,可通过查阅 [更新日志
(点击收起) -- ✅ [AVA](/tools/data/ava/README.md) \[ [主页](https://research.google.com/ava/index.html) \] (CVPR'2018) -- 🔲 [UCF101-24](/tools/data/ucf101_24/README.md) \[ [主页](http://www.thumos.info/download.html) \] (CRCV-IR-12-01) -- 🔲 [JHMDB](/tools/data/jhmdb/README.md) \[ [主页](http://jhmdb.is.tue.mpg.de/) \] (ICCV'2013) +- ✅ [AVA](/tools/data/ava/README_zh-CN.md) \[ [主页](https://research.google.com/ava/index.html) \] (CVPR'2018) +- 🔲 [UCF101-24](/tools/data/ucf101_24/README_zh-CN.md) \[ [主页](http://www.thumos.info/download.html) \] (CRCV-IR-12-01) +- 🔲 [JHMDB](/tools/data/jhmdb/README_zh-CN.md) \[ [主页](http://jhmdb.is.tue.mpg.de/) \] (ICCV'2013)
diff --git a/configs/_base_/models/trn_r50.py b/configs/_base_/models/trn_r50.py new file mode 100644 index 0000000000..ff84e78cb1 --- /dev/null +++ b/configs/_base_/models/trn_r50.py @@ -0,0 +1,22 @@ +# model settings +model = dict( + type='Recognizer2D', + backbone=dict( + type='ResNet', + pretrained='torchvision://resnet50', + depth=50, + norm_eval=False, + partial_bn=True), + cls_head=dict( + type='TRNHead', + num_classes=400, + in_channels=2048, + num_segments=8, + spatial_type='avg', + relation_type='TRNMultiScale', + hidden_dim=256, + dropout_ratio=0.8, + init_std=0.001), + # model training and testing settings + train_cfg=None, + test_cfg=dict(average_clips='prob')) diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md new file mode 100644 index 0000000000..4098a36fdb --- /dev/null +++ b/configs/recognition/trn/README.md @@ -0,0 +1,78 @@ +# TRN + +## Introduction + +[ALGORITHM] + +```BibTeX +@article{zhou2017temporalrelation, + title = {Temporal Relational Reasoning in Videos}, + author = {Zhou, Bolei and Andonian, Alex and Oliva, Aude and Torralba, Antonio}, + journal={European Conference on Computer Vision}, + year={2018} +} +``` + +## Model Zoo + +### Something-Something V1 + +|config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[trn_r50_1x1x8_50e_sthv1_rgb](configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 31.62 / 33.88 |60.01 / 62.12| 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json)| + +### Something-Something V2 + +|config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[trn_r50_1x1x8_50e_sthv2_rgb](configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 45.14 / 47.96 |73.21 / 75.97 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210401-773eca7b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json)| + +Notes: + +1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. + According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, + e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. +2. There are two kinds of test settings for Something-Something dataset, efficient setting (center crop x 1 clip) and accurate setting (Three crop x 2 clip). +3. In the original [repository](https://github.com/zhoubolei/TRN-pytorch), the author augments data with random flipping on something-something dataset, but the augmentation method may be wrong due to the direct actions, such as `push left to right`. So, we replaced `flip` with `flip with label mapping`, and change the testing method `TenCrop`, which has five flipped crops, to `Twice Sample & ThreeCrop`. +4. We use `ResNet50` instead of `BNInception` as the backbone of TRN. When Training `TRN-ResNet50` on sthv1 dataset in the original repository, we get top1 (top5) accuracy 30.542 (58.627) vs. ours 31.62 (60.01). + +For more details on data preparation, you can refer to + +- [preparing_sthv1](/tools/data/sthv1/README.md) +- [preparing_sthv2](/tools/data/sthv2/README.md) + +## Train + +You can use the following command to train a model. + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +Example: train TRN model on sthv1 dataset in a deterministic option with periodic validation. + +```shell +python tools/train.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ + --work-dir work_dirs/trn_r50_1x1x8_50e_sthv1_rgb \ + --validate --seed 0 --deterministic +``` + +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). + +## Test + +You can use the following command to test a model. + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +Example: test TRN model on sthv1 dataset and dump the result to a json file. + +```shell +python tools/test.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json +``` + +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset#test-a-dataset). diff --git a/configs/recognition/trn/README_zh-CN.md b/configs/recognition/trn/README_zh-CN.md new file mode 100644 index 0000000000..a38120f9c7 --- /dev/null +++ b/configs/recognition/trn/README_zh-CN.md @@ -0,0 +1,78 @@ +# TRN + +## 简介 + +[ALGORITHM] + +```BibTeX +@article{zhou2017temporalrelation, + title = {Temporal Relational Reasoning in Videos}, + author = {Zhou, Bolei and Andonian, Alex and Oliva, Aude and Torralba, Antonio}, + journal={European Conference on Computer Vision}, + year={2018} +} +``` + +## 模型库 + +### Something-Something V1 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[trn_r50_1x1x8_50e_sthv1_rgb](configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 31.62 / 33.88 |60.01 / 62.12| 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json)| + +### Something-Something V2 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[trn_r50_1x1x8_50e_sthv2_rgb](configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 45.14 / 47.96 |73.21 / 75.97 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210401-773eca7b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json)| + +注: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 对于 Something-Something 数据集,有两种测试方案:efficient(对应 center crop x 1 clip)和 accurate(对应 Three crop x 2 clip)。 +3. 在原代码库中,作者在 Something-Something 数据集上使用了随机水平翻转,但这种数据增强方法有一些问题,因为 Something-Something 数据集有一些方向性的动作,比如`从左往右推`。所以 MMAction2 把`随机水平翻转`改为`带标签映射的水平翻转`,同时修改了测试模型的数据处理方法,即把`裁剪 10 个图像块`(这里面包括 5 个翻转后的图像块)修改成`采帧两次 & 裁剪 3 个图像块`。 +4. MMAction2 使用 `ResNet50` 代替 `BNInception` 作为 TRN 的主干网络。使用原代码,在 sthv1 数据集上训练 `TRN-ResNet50` 时,实验得到的 top1 (top5) 的准确度为 30.542 (58.627),而 MMAction2 的精度为 31.62 (60.01)。 + +关于数据处理的更多细节,用户可以参照 + +- [准备 sthv1](/tools/data/sthv1/README_zh-CN.md) +- [准备 sthv2](/tools/data/sthv2/README_zh-CN.md) + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 TRN 模型在 sthv1 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ + --work-dir work_dirs/trn_r50_1x1x8_50e_sthv1_rgb \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 sthv1 数据集上测试 TRN 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py b/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py new file mode 100644 index 0000000000..0578748296 --- /dev/null +++ b/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py @@ -0,0 +1,102 @@ +_base_ = [ + '../../_base_/models/trn_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=174)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/sthv1/rawframes' +data_root_val = 'data/sthv1/rawframes' +ann_file_train = 'data/sthv1/sthv1_train_list_rawframes.txt' +ann_file_val = 'data/sthv1/sthv1_val_list_rawframes.txt' +ann_file_test = 'data/sthv1/sthv1_val_list_rawframes.txt' + +sthv1_flip_label_map = {2: 4, 4: 2, 30: 41, 41: 30, 52: 66, 66: 52} +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, flip_label_map=sthv1_flip_label_map), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + twice_sample=True, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + filename_tmpl='{:05}.jpg', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(lr=0.002, paramwise_cfg=dict(fc_lr5=False), weight_decay=5e-4) +# learning policy +lr_config = dict(policy='step', step=[30, 45]) +total_epochs = 50 + +# runtime settings +find_unused_parameters = True +work_dir = './work_dirs/trn_r50_1x1x8_50e_sthv1_rgb/' diff --git a/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py b/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py new file mode 100644 index 0000000000..a3e2615db2 --- /dev/null +++ b/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py @@ -0,0 +1,102 @@ +_base_ = [ + '../../_base_/models/trn_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=174)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/sthv2/rawframes' +data_root_val = 'data/sthv2/rawframes' +ann_file_train = 'data/sthv2/sthv2_train_list_rawframes.txt' +ann_file_val = 'data/sthv2/sthv2_val_list_rawframes.txt' +ann_file_test = 'data/sthv2/sthv2_val_list_rawframes.txt' + +sthv2_flip_label_map = {86: 87, 87: 86, 93: 94, 94: 93, 166: 167, 167: 166} +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, flip_label_map=sthv2_flip_label_map), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + twice_sample=True, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + filename_tmpl='{:05}.jpg', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(lr=0.002, paramwise_cfg=dict(fc_lr5=False), weight_decay=5e-4) +# learning policy +lr_config = dict(policy='step', step=[30, 45]) +total_epochs = 50 + +# runtime settings +find_unused_parameters = True +work_dir = './work_dirs/trn_r50_1x1x8_50e_sthv2_rgb/' diff --git a/docs/changelog.md b/docs/changelog.md index 72a018a210..cdfdb0ad9d 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -9,8 +9,11 @@ **Improvements** - Add softmax option for pytorch2onnx tool ([#781](https://github.com/open-mmlab/mmaction2/pull/781)) +- Support TRN ([#755](https://github.com/open-mmlab/mmaction2/pull/755)) - Test with onnx models and TensorRT engines ([#758](https://github.com/open-mmlab/mmaction2/pull/758)) +**Improvements** + **Bug and Typo Fixes** **ModelZoo** @@ -29,7 +32,7 @@ - Support using backbones from MMCls for TSN ([#679](https://github.com/open-mmlab/mmaction2/pull/679)) - Support using backbones from TorchVision for TSN ([#720](https://github.com/open-mmlab/mmaction2/pull/720)) - Support Mixup and Cutmix for recognizers ([#681](https://github.com/open-mmlab/mmaction2/pull/681)) -- Support Chinese documentation ([#665](https://github.com/open-mmlab/mmaction2/pull/665), [#680](https://github.com/open-mmlab/mmaction2/pull/680), [#689](https://github.com/open-mmlab/mmaction2/pull/689), [#701](https://github.com/open-mmlab/mmaction2/pull/701)[#702](https://github.com/open-mmlab/mmaction2/pull/702), [#703](https://github.com/open-mmlab/mmaction2/pull/703), [#706](https://github.com/open-mmlab/mmaction2/pull/706)[#716](https://github.com/open-mmlab/mmaction2/pull/716), [#717](https://github.com/open-mmlab/mmaction2/pull/717), [#731](https://github.com/open-mmlab/mmaction2/pull/731), [#733](https://github.com/open-mmlab/mmaction2/pull/733), [#735](https://github.com/open-mmlab/mmaction2/pull/735), [#736](https://github.com/open-mmlab/mmaction2/pull/736), [#737](https://github.com/open-mmlab/mmaction2/pull/737), [#738](https://github.com/open-mmlab/mmaction2/pull/738) , [#739](https://github.com/open-mmlab/mmaction2/pull/739), [#740](https://github.com/open-mmlab/mmaction2/pull/740), [#742](https://github.com/open-mmlab/mmaction2/pull/742), [#752](https://github.com/open-mmlab/mmaction2/pull/752), [#759](https://github.com/open-mmlab/mmaction2/pull/759), [#761](https://github.com/open-mmlab/mmaction2/pull/761), [#772](https://github.com/open-mmlab/mmaction2/pull/772), [#775](https://github.com/open-mmlab/mmaction2/pull/775)) +- Support Chinese documentation ([#665](https://github.com/open-mmlab/mmaction2/pull/665), [#680](https://github.com/open-mmlab/mmaction2/pull/680), [#689](https://github.com/open-mmlab/mmaction2/pull/689), [#701](https://github.com/open-mmlab/mmaction2/pull/701), [#702](https://github.com/open-mmlab/mmaction2/pull/702), [#703](https://github.com/open-mmlab/mmaction2/pull/703), [#706](https://github.com/open-mmlab/mmaction2/pull/706), [#716](https://github.com/open-mmlab/mmaction2/pull/716), [#717](https://github.com/open-mmlab/mmaction2/pull/717), [#731](https://github.com/open-mmlab/mmaction2/pull/731), [#733](https://github.com/open-mmlab/mmaction2/pull/733), [#735](https://github.com/open-mmlab/mmaction2/pull/735), [#736](https://github.com/open-mmlab/mmaction2/pull/736), [#737](https://github.com/open-mmlab/mmaction2/pull/737), [#738](https://github.com/open-mmlab/mmaction2/pull/738), [#739](https://github.com/open-mmlab/mmaction2/pull/739), [#740](https://github.com/open-mmlab/mmaction2/pull/740), [#742](https://github.com/open-mmlab/mmaction2/pull/742), [#752](https://github.com/open-mmlab/mmaction2/pull/752), [#759](https://github.com/open-mmlab/mmaction2/pull/759), [#761](https://github.com/open-mmlab/mmaction2/pull/761), [#772](https://github.com/open-mmlab/mmaction2/pull/772), [#775](https://github.com/open-mmlab/mmaction2/pull/775)) **Improvements** diff --git a/mmaction/models/__init__.py b/mmaction/models/__init__.py index 0fb79ab759..f73a26d83a 100644 --- a/mmaction/models/__init__.py +++ b/mmaction/models/__init__.py @@ -7,8 +7,8 @@ build_recognizer) from .common import LFB, TAM, Conv2plus1d, ConvAudio from .heads import (AudioTSNHead, AVARoIHead, BaseHead, BBoxHeadAVA, FBOHead, - I3DHead, LFBInferHead, SlowFastHead, TPNHead, TSMHead, - TSNHead, X3DHead) + I3DHead, LFBInferHead, SlowFastHead, TPNHead, TRNHead, + TSMHead, TSNHead, X3DHead) from .localizers import BMN, PEM, TEM from .losses import (BCELossWithLogits, BinaryLogisticRegressionLoss, BMNLoss, CrossEntropyLoss, HVULoss, NLLLoss, OHEMHingeLoss, @@ -32,5 +32,5 @@ 'AudioTSNHead', 'X3D', 'X3DHead', 'ResNet3dLayer', 'DETECTORS', 'SingleRoIExtractor3D', 'BBoxHeadAVA', 'ResNetAudio', 'build_detector', 'ConvAudio', 'AVARoIHead', 'MobileNetV2', 'MobileNetV2TSM', 'TANet', 'LFB', - 'FBOHead', 'LFBInferHead' + 'FBOHead', 'LFBInferHead', 'TRNHead' ] diff --git a/mmaction/models/heads/__init__.py b/mmaction/models/heads/__init__.py index f21c89d44c..ada62589a2 100644 --- a/mmaction/models/heads/__init__.py +++ b/mmaction/models/heads/__init__.py @@ -8,6 +8,7 @@ from .slowfast_head import SlowFastHead from .ssn_head import SSNHead from .tpn_head import TPNHead +from .trn_head import TRNHead from .tsm_head import TSMHead from .tsn_head import TSNHead from .x3d_head import X3DHead @@ -15,5 +16,5 @@ __all__ = [ 'TSNHead', 'I3DHead', 'BaseHead', 'TSMHead', 'SlowFastHead', 'SSNHead', 'TPNHead', 'AudioTSNHead', 'X3DHead', 'BBoxHeadAVA', 'AVARoIHead', - 'FBOHead', 'LFBInferHead' + 'FBOHead', 'LFBInferHead', 'TRNHead' ] diff --git a/mmaction/models/heads/trn_head.py b/mmaction/models/heads/trn_head.py new file mode 100644 index 0000000000..f93818a4bc --- /dev/null +++ b/mmaction/models/heads/trn_head.py @@ -0,0 +1,210 @@ +import itertools + +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn import normal_init + +from ..registry import HEADS +from .base import BaseHead + + +class RelationModule(nn.Module): + """Relation Module of TRN. + + Args: + hidden_dim (int): The dimension of hidden layer of MLP in relation + module. + num_segments (int): Number of frame segments. + num_classes (int): Number of classes to be classified. + """ + + def __init__(self, hidden_dim, num_segments, num_classes): + super().__init__() + self.hidden_dim = hidden_dim + self.num_segments = num_segments + self.num_classes = num_classes + bottleneck_dim = 512 + self.classifier = nn.Sequential( + nn.ReLU(), + nn.Linear(self.num_segments * self.hidden_dim, bottleneck_dim), + nn.ReLU(), nn.Linear(bottleneck_dim, self.num_classes)) + + def init_weights(self): + # Use the default kaiming_uniform for all nn.linear layers. + pass + + def forward(self, x): + # [N, num_segs * hidden_dim] + x = x.view(x.size(0), -1) + x = self.classifier(x) + return x + + +class RelationModuleMultiScale(nn.Module): + """Relation Module with Multi Scale of TRN. + + Args: + hidden_dim (int): The dimension of hidden layer of MLP in relation + module. + num_segments (int): Number of frame segments. + num_classes (int): Number of classes to be classified. + """ + + def __init__(self, hidden_dim, num_segments, num_classes): + super().__init__() + self.hidden_dim = hidden_dim + self.num_segments = num_segments + self.num_classes = num_classes + + # generate the multiple frame relations + self.scales = range(num_segments, 1, -1) + + self.relations_scales = [] + self.subsample_scales = [] + max_subsample = 3 + for scale in self.scales: + # select the different frame features for different scales + relations_scale = list( + itertools.combinations(range(self.num_segments), scale)) + self.relations_scales.append(relations_scale) + # sample `max_subsample` relation_scale at most + self.subsample_scales.append( + min(max_subsample, len(relations_scale))) + assert len(self.relations_scales[0]) == 1 + + bottleneck_dim = 256 + self.fc_fusion_scales = nn.ModuleList() + for scale in self.scales: + fc_fusion = nn.Sequential( + nn.ReLU(), nn.Linear(scale * self.hidden_dim, bottleneck_dim), + nn.ReLU(), nn.Linear(bottleneck_dim, self.num_classes)) + self.fc_fusion_scales.append(fc_fusion) + + def init_weights(self): + # Use the default kaiming_uniform for all nn.linear layers. + pass + + def forward(self, x): + # the first one is the largest scale + act_all = x[:, self.relations_scales[0][0], :] + act_all = act_all.view( + act_all.size(0), self.scales[0] * self.hidden_dim) + act_all = self.fc_fusion_scales[0](act_all) + + for scaleID in range(1, len(self.scales)): + # iterate over the scales + idx_relations_randomsample = np.random.choice( + len(self.relations_scales[scaleID]), + self.subsample_scales[scaleID], + replace=False) + for idx in idx_relations_randomsample: + act_relation = x[:, self.relations_scales[scaleID][idx], :] + act_relation = act_relation.view( + act_relation.size(0), + self.scales[scaleID] * self.hidden_dim) + act_relation = self.fc_fusion_scales[scaleID](act_relation) + act_all += act_relation + return act_all + + +@HEADS.register_module() +class TRNHead(BaseHead): + """Class head for TRN. + + Args: + num_classes (int): Number of classes to be classified. + in_channels (int): Number of channels in input feature. + num_segments (int): Number of frame segments. Default: 8. + loss_cls (dict): Config for building loss. Default: + dict(type='CrossEntropyLoss') + spatial_type (str): Pooling type in spatial dimension. Default: 'avg'. + relation_type (str): The relation module type. Choices are 'TRN' or + 'TRNMultiScale'. Default: 'TRNMultiScale'. + hidden_dim (int): The dimension of hidden layer of MLP in relation + module. Default: 256. + dropout_ratio (float): Probability of dropout layer. Default: 0.8. + init_std (float): Std value for Initiation. Default: 0.001. + kwargs (dict, optional): Any keyword argument to be used to initialize + the head. + """ + + def __init__(self, + num_classes, + in_channels, + num_segments=8, + loss_cls=dict(type='CrossEntropyLoss'), + spatial_type='avg', + relation_type='TRNMultiScale', + hidden_dim=256, + dropout_ratio=0.8, + init_std=0.001, + **kwargs): + super().__init__(num_classes, in_channels, loss_cls, **kwargs) + + self.num_classes = num_classes + self.in_channels = in_channels + self.num_segments = num_segments + self.spatial_type = spatial_type + self.relation_type = relation_type + self.hidden_dim = hidden_dim + self.dropout_ratio = dropout_ratio + self.init_std = init_std + + if self.relation_type == 'TRN': + self.consensus = RelationModule(self.hidden_dim, self.num_segments, + self.num_classes) + elif self.relation_type == 'TRNMultiScale': + self.consensus = RelationModuleMultiScale(self.hidden_dim, + self.num_segments, + self.num_classes) + else: + raise ValueError(f'Unknown Relation Type {self.relation_type}!') + + if self.dropout_ratio != 0: + self.dropout = nn.Dropout(p=self.dropout_ratio) + else: + self.dropout = None + self.fc_cls = nn.Linear(self.in_channels, self.hidden_dim) + + if self.spatial_type == 'avg': + # use `nn.AdaptiveAvgPool2d` to adaptively match the in_channels. + self.avg_pool = nn.AdaptiveAvgPool2d(1) + else: + self.avg_pool = None + + def init_weights(self): + """Initiate the parameters from scratch.""" + normal_init(self.fc_cls, std=self.init_std) + self.consensus.init_weights() + + def forward(self, x, num_segs): + """Defines the computation performed at every call. + + Args: + x (torch.Tensor): The input data. + num_segs (int): Useless in TRNHead. By default, `num_segs` + is equal to `clip_len * num_clips * num_crops`, which is + automatically generated in Recognizer forward phase and + useless in TRN models. The `self.num_segments` we need is a + hyper parameter to build TRN models. + Returns: + torch.Tensor: The classification scores for input samples. + """ + # [N * num_segs, in_channels, 7, 7] + if self.avg_pool is not None: + x = self.avg_pool(x) + # [N * num_segs, in_channels, 1, 1] + x = torch.flatten(x, 1) + # [N * num_segs, in_channels] + if self.dropout is not None: + x = self.dropout(x) + + # [N, num_segs, hidden_dim] + cls_score = self.fc_cls(x) + cls_score = cls_score.view((-1, self.num_segments) + + cls_score.size()[1:]) + + # [N, num_classes] + cls_score = self.consensus(cls_score) + return cls_score diff --git a/tests/test_models/test_head.py b/tests/test_models/test_head.py index 3ad7cbf013..1856265266 100644 --- a/tests/test_models/test_head.py +++ b/tests/test_models/test_head.py @@ -9,8 +9,8 @@ import mmaction from mmaction.models import (AudioTSNHead, BBoxHeadAVA, FBOHead, I3DHead, - LFBInferHead, SlowFastHead, TPNHead, TSMHead, - TSNHead, X3DHead) + LFBInferHead, SlowFastHead, TPNHead, TRNHead, + TSMHead, TSNHead, X3DHead) from .base import generate_backbone_demo_inputs @@ -309,6 +309,63 @@ def test_tsm_head(): assert cls_scores.shape == torch.Size([2, 4]) +def test_trn_head(): + """Test loss method, layer construction, attributes and forward function in + trn head.""" + from mmaction.models.heads.trn_head import (RelationModule, + RelationModuleMultiScale) + trn_head = TRNHead(num_classes=4, in_channels=2048, relation_type='TRN') + trn_head.init_weights() + + assert trn_head.num_classes == 4 + assert trn_head.dropout_ratio == 0.8 + assert trn_head.in_channels == 2048 + assert trn_head.init_std == 0.001 + assert trn_head.spatial_type == 'avg' + + relation_module = trn_head.consensus + assert isinstance(relation_module, RelationModule) + assert relation_module.hidden_dim == 256 + assert isinstance(relation_module.classifier[3], nn.Linear) + assert relation_module.classifier[3].out_features == trn_head.num_classes + + assert trn_head.dropout.p == trn_head.dropout_ratio + assert isinstance(trn_head.dropout, nn.Dropout) + assert isinstance(trn_head.fc_cls, nn.Linear) + assert trn_head.fc_cls.in_features == trn_head.in_channels + assert trn_head.fc_cls.out_features == trn_head.hidden_dim + + assert isinstance(trn_head.avg_pool, nn.AdaptiveAvgPool2d) + assert trn_head.avg_pool.output_size == 1 + + input_shape = (8, 2048, 7, 7) + feat = torch.rand(input_shape) + + # tsm head inference with no init + num_segs = input_shape[0] + cls_scores = trn_head(feat, num_segs) + assert cls_scores.shape == torch.Size([1, 4]) + + # tsm head inference with init + trn_head = TRNHead( + num_classes=4, + in_channels=2048, + num_segments=8, + relation_type='TRNMultiScale') + trn_head.init_weights() + assert isinstance(trn_head.consensus, RelationModuleMultiScale) + assert trn_head.consensus.scales == range(8, 1, -1) + cls_scores = trn_head(feat, num_segs) + assert cls_scores.shape == torch.Size([1, 4]) + + with pytest.raises(ValueError): + trn_head = TRNHead( + num_classes=4, + in_channels=2048, + num_segments=8, + relation_type='RelationModlue') + + @patch.object(mmaction.models.LFBInferHead, '__del__', Mock) def test_lfb_infer_head(): """Test layer construction, attributes and forward function in lfb infer diff --git a/tests/test_models/test_recognizers/test_recognizer2d.py b/tests/test_models/test_recognizers/test_recognizer2d.py index b021832a8f..927f046273 100644 --- a/tests/test_models/test_recognizers/test_recognizer2d.py +++ b/tests/test_models/test_recognizers/test_recognizer2d.py @@ -139,6 +139,47 @@ def test_tsm(): recognizer(one_img, gradcam=True) +def test_trn(): + config = get_recognizer_cfg('trn/trn_r50_1x1x8_50e_sthv1_rgb.py') + config.model['backbone']['pretrained'] = None + + recognizer = build_recognizer(config.model) + + input_shape = (1, 8, 3, 32, 32) + demo_inputs = generate_recognizer_demo_inputs(input_shape) + + imgs = demo_inputs['imgs'] + gt_labels = demo_inputs['gt_labels'] + + losses = recognizer(imgs, gt_labels) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + img_list = [img[None, :] for img in imgs] + for one_img in img_list: + recognizer(one_img, None, return_loss=False) + + # test twice sample + 3 crops + input_shape = (2, 48, 3, 32, 32) + demo_inputs = generate_recognizer_demo_inputs(input_shape) + imgs = demo_inputs['imgs'] + + config.model.test_cfg = dict(average_clips='prob') + recognizer = build_recognizer(config.model) + + # Test forward test + with torch.no_grad(): + img_list = [img[None, :] for img in imgs] + for one_img in img_list: + recognizer(one_img, None, return_loss=False) + + # Test forward gradcam + recognizer(imgs, gradcam=True) + for one_img in img_list: + recognizer(one_img, gradcam=True) + + def test_tpn(): config = get_recognizer_cfg('tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py') config.model['backbone']['pretrained'] = None From af4cc63d1c416665bb747e25e40a47a66c86ab5a Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 8 Apr 2021 23:54:02 +0800 Subject: [PATCH 024/414] Support pytorch 1.8 CI (#791) * supoort pytorch 1.8 CI * mmcv latest * torchvision issue * fix * fix * pt1.3 * pt1.3 --- .github/workflows/build.yml | 48 +++++++++++++++++-------------------- 1 file changed, 22 insertions(+), 26 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8cf2e2e0a5..83763ac1c7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -26,16 +26,16 @@ jobs: strategy: matrix: python-version: [3.7] - torch: [1.3.0, 1.5.0, 1.6.0, 1.7.0] + torch: [1.5.0, 1.6.0, 1.7.0, 1.8.0] include: - - torch: 1.3.0 - torchvision: 0.4.1 - torch: 1.5.0 torchvision: 0.6.0 - torch: 1.6.0 torchvision: 0.7.0 - torch: 1.7.0 torchvision: 0.8.1 + - torch: 1.8.0 + torchvision: 0.9.0 steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} @@ -60,7 +60,7 @@ jobs: - name: Install PyTorch run: pip install torch==${{matrix.torch}}+cpu torchvision==${{matrix.torchvision}}+cpu -f https://download.pytorch.org/whl/torch_stable.html - name: Install MMCV - run: pip install mmcv-full==1.3.0 -f https://download.openmmlab.com/mmcv/dist/cpu/torch${{matrix.torch}}/index.html + run: pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cpu/torch${{matrix.torch}}/index.html - name: Install MMDet run: pip install git+https://github.com/open-mmlab/mmdetection/ - name: Install MMCls @@ -84,23 +84,23 @@ jobs: strategy: matrix: python-version: [3.7] - torch: [1.3.0, 1.5.0, 1.6.0, 1.7.0] + torch: [1.3.0, 1.5.0+cu101, 1.6.0+cu101, 1.7.0+cu101, 1.8.0+cu101] include: - torch: 1.3.0 torchvision: 0.4.1 - - torch: 1.5.0 - torchvision: 0.6.0 - - torch: 1.6.0 - torchvision: 0.7.0 - - torch: 1.7.0 - torchvision: 0.8.1 - - torch: 1.7.0 - torchvision: 0.8.1 - python-version: 3.6 - - torch: 1.7.0 - torchvision: 0.8.1 - python-version: 3.8 - + mmcv: 1.3.0+cu101 + - torch: 1.5.0+cu101 + torchvision: 0.6.0+cu101 + mmcv: 1.5.0+cu101 + - torch: 1.6.0+cu101 + torchvision: 0.7.0+cu101 + mmcv: 1.6.0+cu101 + - torch: 1.7.0+cu101 + torchvision: 0.8.1+cu101 + mmcv: 1.7.0+cu101 + - torch: 1.8.0+cu101 + torchvision: 0.9.0+cu101 + mmcv: 1.8.0+cu101 steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} @@ -135,16 +135,10 @@ jobs: - name: Install lmdb run: pip install lmdb - name: Install PyTorch - run: | - if [ ${{matrix.torch}} == '1.3.0' ] - then - pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html - else - pip install torch==${{matrix.torch}}+cu101 torchvision==${{matrix.torchvision}}+cu101 -f https://download.pytorch.org/whl/torch_stable.html - fi + run: pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html - name: Install mmaction dependencies run: | - pip install mmcv-full==1.3.0 -f https://download.openmmlab.com/mmcv/dist/cu101/torch${{matrix.torch}}/index.html + pip install mmcv-full==latest+torch${{matrix.mmcv}} -f https://download.openmmlab.com/mmcv/dist/index.html --use-deprecated=legacy-resolver pip install -q git+https://github.com/open-mmlab/mmdetection/ pip install -q git+https://github.com/open-mmlab/mmclassification/ pip install -r requirements.txt @@ -158,7 +152,9 @@ jobs: coverage run --branch --source mmaction -m pytest tests/ coverage xml coverage report -m + # Only upload coverage report for python3.7 && pytorch1.5 - name: Upload coverage to Codecov + if: ${{matrix.torch == '1.5.0+cu101' && matrix.python-version == '3.7'}} uses: codecov/codecov-action@v1.0.14 with: file: ./coverage.xml From 905f07a7128c4d996af13d47d25546ad248ee187 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 10 Apr 2021 21:03:52 +0800 Subject: [PATCH 025/414] add self.with_neck (#796) --- mmaction/models/recognizers/base.py | 7 ++++++- mmaction/models/recognizers/recognizer2d.py | 8 ++++---- mmaction/models/recognizers/recognizer3d.py | 8 ++++---- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/mmaction/models/recognizers/base.py b/mmaction/models/recognizers/base.py index 4a89f195ab..7732ed0766 100644 --- a/mmaction/models/recognizers/base.py +++ b/mmaction/models/recognizers/base.py @@ -91,6 +91,11 @@ def __init__(self, self.fp16_enabled = False + @property + def with_neck(self): + """bool: whether the detector has a neck""" + return hasattr(self, 'neck') and self.neck is not None + def init_weights(self): """Initialize the model network weights.""" if self.backbone_from in ['mmcls', 'mmaction2']: @@ -105,7 +110,7 @@ def init_weights(self): f'{self.backbone_from}!') self.cls_head.init_weights() - if hasattr(self, 'neck'): + if self.with_neck: self.neck.init_weights() @auto_fp16() diff --git a/mmaction/models/recognizers/recognizer2d.py b/mmaction/models/recognizers/recognizer2d.py index 17690ecbf0..bda7db2312 100644 --- a/mmaction/models/recognizers/recognizer2d.py +++ b/mmaction/models/recognizers/recognizer2d.py @@ -26,7 +26,7 @@ def forward_train(self, imgs, labels, **kwargs): x = x.reshape((x.shape[0], -1)) x = x.reshape(x.shape + (1, 1)) - if hasattr(self, 'neck'): + if self.with_neck: x = [ each.reshape((-1, num_segs) + each.shape[1:]).transpose(1, 2).contiguous() @@ -60,7 +60,7 @@ def _do_test(self, imgs): x = x.reshape((x.shape[0], -1)) x = x.reshape(x.shape + (1, 1)) - if hasattr(self, 'neck'): + if self.with_neck: x = [ each.reshape((-1, num_segs) + each.shape[1:]).transpose(1, 2).contiguous() @@ -97,7 +97,7 @@ def _do_fcn_test(self, imgs): imgs = torch.flip(imgs, [-1]) x = self.extract_feat(imgs) - if hasattr(self, 'neck'): + if self.with_neck: x = [ each.reshape((-1, num_segs) + each.shape[1:]).transpose(1, 2).contiguous() @@ -147,7 +147,7 @@ def forward_dummy(self, imgs, softmax=False): num_segs = imgs.shape[0] // batches x = self.extract_feat(imgs) - if hasattr(self, 'neck'): + if self.with_neck: x = [ each.reshape((-1, num_segs) + each.shape[1:]).transpose(1, 2).contiguous() diff --git a/mmaction/models/recognizers/recognizer3d.py b/mmaction/models/recognizers/recognizer3d.py index 0589ee73b2..a4b420eee6 100644 --- a/mmaction/models/recognizers/recognizer3d.py +++ b/mmaction/models/recognizers/recognizer3d.py @@ -15,7 +15,7 @@ def forward_train(self, imgs, labels, **kwargs): losses = dict() x = self.extract_feat(imgs) - if hasattr(self, 'neck'): + if self.with_neck: x, loss_aux = self.neck(x, labels.squeeze()) losses.update(loss_aux) @@ -42,7 +42,7 @@ def _do_test(self, imgs): while view_ptr < total_views: batch_imgs = imgs[view_ptr:view_ptr + self.max_testing_views] x = self.extract_feat(batch_imgs) - if hasattr(self, 'neck'): + if self.with_neck: x, _ = self.neck(x) cls_score = self.cls_head(x) cls_scores.append(cls_score) @@ -50,7 +50,7 @@ def _do_test(self, imgs): cls_score = torch.cat(cls_scores) else: x = self.extract_feat(imgs) - if hasattr(self, 'neck'): + if self.with_neck: x, _ = self.neck(x) cls_score = self.cls_head(x) @@ -76,7 +76,7 @@ def forward_dummy(self, imgs, softmax=False): imgs = imgs.reshape((-1, ) + imgs.shape[2:]) x = self.extract_feat(imgs) - if hasattr(self, 'neck'): + if self.with_neck: x, _ = self.neck(x) outs = self.cls_head(x) From 70aba4c355a2e159404238f6f41ff22c51ae7ed1 Mon Sep 17 00:00:00 2001 From: yrqUni <35153598+yrqUni@users.noreply.github.com> Date: Sun, 11 Apr 2021 21:09:56 +0800 Subject: [PATCH 026/414] Update install.md (#797) --- docs_zh_CN/install.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs_zh_CN/install.md b/docs_zh_CN/install.md index 311d40e194..71f18ca620 100644 --- a/docs_zh_CN/install.md +++ b/docs_zh_CN/install.md @@ -127,7 +127,7 @@ pip install -r requirements/build.txt pip install -v -e . # or "python setup.py develop" ``` -如果实在 macOS 环境安装 MMAction2,则需使用如下命令: +如果是在 macOS 环境安装 MMAction2,则需使用如下命令: ```shell CC=clang CXX=clang++ CFLAGS='-stdlib=libc++' pip install -e . From 8e0b49da97af5903d191da29183a0fbf43513860 Mon Sep 17 00:00:00 2001 From: yrqUni <35153598+yrqUni@users.noreply.github.com> Date: Sun, 11 Apr 2021 21:25:32 +0800 Subject: [PATCH 027/414] Update install.md (#799) --- docs_zh_CN/install.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs_zh_CN/install.md b/docs_zh_CN/install.md index 71f18ca620..17992071ad 100644 --- a/docs_zh_CN/install.md +++ b/docs_zh_CN/install.md @@ -135,7 +135,7 @@ CC=clang CXX=clang++ CFLAGS='-stdlib=libc++' pip install -e . f. 安装 mmdetection 以支持时空检测任务。 -如果用户不像做时空检测相关任务,这部分步骤可以选择跳过。 +如果用户不想做时空检测相关任务,这部分步骤可以选择跳过。 可参考 [这里](https://github.com/open-mmlab/mmdetection#installation) 进行 mmdetection 的安装。 From c4978a54d0bfdd5a19cb7c64981fcba7350cf78e Mon Sep 17 00:00:00 2001 From: lizz Date: Sun, 11 Apr 2021 21:28:27 +0800 Subject: [PATCH 028/414] Add ref to mmocr (#798) * Add ref to mmocr * Add chinese version --- README.md | 1 + README_zh-CN.md | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7b39a4f3e4..3ea214b8e0 100644 --- a/README.md +++ b/README.md @@ -239,3 +239,4 @@ We wish that the toolbox and benchmark could serve the growing research communit - [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab video perception toolbox and benchmark. - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab pose estimation toolbox and benchmark. - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab image and video editing toolbox. +- [MMOCR](https://github.com/open-mmlab/mmocr): A Comprehensive Toolbox for Text Detection, Recognition and Understanding. diff --git a/README_zh-CN.md b/README_zh-CN.md index 2d055bc52f..cba594f878 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -220,9 +220,10 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 - [MMCV](https://github.com/open-mmlab/mmcv): OpenMMLab 计算机视觉基础库 - [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab 图像分类工具箱与测试基准 - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab 检测工具箱与测试基准 -- [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab's 新一代通用3D目标检测平台 +- [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab 新一代通用3D目标检测平台 - [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab 语义分割工具箱与测试基准 -- [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab's 新一代视频理解工具箱与测试基准 +- [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab 新一代视频理解工具箱与测试基准 - [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab 一体化视频目标感知平台 - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab 姿态估计工具箱与测试基准 - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab 图像视频编辑工具箱 +- [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包. From 9ca91a49089932f28e93e611bd9a9833135eef08 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Tue, 13 Apr 2021 13:14:20 +0800 Subject: [PATCH 029/414] [Fix] Fix#802 (#803) * resolve comments * update changelog * fix#802 --- demo/demo_spatiotemporal_det.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index d8362021a5..c7cc1b29eb 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -357,7 +357,7 @@ def main(): for i in range(len(result)): if i + 1 not in label_map: continue - for j in range(proposal.shape[0]): + for j in range(result[i].shape[0]): if result[i][j, 4] > args.action_score_thr: prediction[j].append((label_map[i + 1], result[i][j, 4])) From 9ee7129e6ad86131154d00a0e809f082c801441b Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Tue, 13 Apr 2021 22:47:17 +0800 Subject: [PATCH 030/414] [Fix] Fix#802 (#805) * resolve comments * update changelog * update * update --- demo/demo_spatiotemporal_det.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index c7cc1b29eb..f56cca2f9e 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -318,6 +318,13 @@ def main(): img_norm_cfg['std'] = np.array(img_norm_cfg['std']) # Build STDET model + try: + # In our spatiotemporal detection demo, different actions should have + # the same number of bboxes. + config['model']['test_cfg']['rcnn']['action_thr'] = .0 + except KeyError: + pass + config.model.backbone.pretrained = None model = build_detector(config.model, test_cfg=config.get('test_cfg')) @@ -357,7 +364,7 @@ def main(): for i in range(len(result)): if i + 1 not in label_map: continue - for j in range(result[i].shape[0]): + for j in range(proposal.shape[0]): if result[i][j, 4] > args.action_score_thr: prediction[j].append((label_map[i + 1], result[i][j, 4])) From d30a3116a3cf5e72df3ce7239bbfbde86052b31b Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Wed, 14 Apr 2021 16:48:29 +0800 Subject: [PATCH 031/414] Use a random master port (no conflict) (#809) --- tools/slurm_train.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/slurm_train.sh b/tools/slurm_train.sh index e586cda180..2cff8aae19 100755 --- a/tools/slurm_train.sh +++ b/tools/slurm_train.sh @@ -1,5 +1,6 @@ #!/usr/bin/env bash +export MASTER_PORT=$((12000 + $RANDOM % 20000)) set -x PARTITION=$1 From d2a1302af9dee55f5a5ed713b5694c31bdb0ff39 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Wed, 14 Apr 2021 16:51:30 +0800 Subject: [PATCH 032/414] add -r 30 after -i (#807) --- tools/data/ava/cut_videos.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/data/ava/cut_videos.sh b/tools/data/ava/cut_videos.sh index 3f12587f0d..763c9127f4 100644 --- a/tools/data/ava/cut_videos.sh +++ b/tools/data/ava/cut_videos.sh @@ -29,6 +29,6 @@ for video in $(ls -A1 -U ${IN_DATA_DIR}/*) do out_name="${OUT_DATA_DIR}/${video##*/}" if [ ! -f "${out_name}" ]; then - ffmpeg -ss 900 -t 901 -i "${video}" -strict experimental "${out_name}" + ffmpeg -ss 900 -t 901 -i "${video}" -r 30 -strict experimental "${out_name}" fi done From f950c3c8bdabbc23909fcb81f518b96339dff3b8 Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Wed, 14 Apr 2021 16:54:32 +0800 Subject: [PATCH 033/414] fix typo (#801) --- tools/data/ava/README.md | 2 -- tools/data/ava/README_zh-CN.md | 2 -- 2 files changed, 4 deletions(-) diff --git a/tools/data/ava/README.md b/tools/data/ava/README.md index c6532e867b..4bee5b65d3 100644 --- a/tools/data/ava/README.md +++ b/tools/data/ava/README.md @@ -64,8 +64,6 @@ bash cut_videos.sh ## Step 4. Extract RGB and Flow -This part is **optional** if you only want to use the video loader. - Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. And you can run the following script to soft link the extracted frames. diff --git a/tools/data/ava/README_zh-CN.md b/tools/data/ava/README_zh-CN.md index 8ae943b3c2..1024f2aaef 100644 --- a/tools/data/ava/README_zh-CN.md +++ b/tools/data/ava/README_zh-CN.md @@ -56,8 +56,6 @@ bash cut_videos.sh ## 4. 提取 RGB 帧和光流 -如果用户仅使用 video loader,则可以跳过本步。 - 在提取之前,请参考 [安装教程](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有足够的 SSD 空间,那么建议将视频抽取为 RGB 帧以提升 I/O 性能。用户可以使用以下脚本为抽取得到的帧文件夹建立软连接: From 95b8917e874db91df67c7e3cc885cc11a65c57d7 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Wed, 14 Apr 2021 16:55:54 +0800 Subject: [PATCH 034/414] [Fix] Fix a bug in pytorch2onnx.py when `num_classes <= 4` (#800) * first commit * update changelog * add verify in onnx unittest * change unittest Co-authored-by: dreamerlin <528557675@qq.com> --- docs/changelog.md | 2 ++ tests/test_utils/test_onnx.py | 7 +++++-- tools/pytorch2onnx.py | 3 ++- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/docs/changelog.md b/docs/changelog.md index cdfdb0ad9d..68b1e61992 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -16,6 +16,8 @@ **Bug and Typo Fixes** +- Fix a bug in pytorch2onnx.py when `num_classes <= 4` ([#800](https://github.com/open-mmlab/mmaction2/pull/800)) + **ModelZoo** ### 0.13.0 (31/03/2021) diff --git a/tests/test_utils/test_onnx.py b/tests/test_utils/test_onnx.py index 22fc097780..d76c4db369 100644 --- a/tests/test_utils/test_onnx.py +++ b/tests/test_utils/test_onnx.py @@ -16,7 +16,8 @@ def forward(self, x): return self.bn(self.conv(x)) def forward_dummy(self, x): - return (self.forward(x), ) + out = self.bn(self.conv(x)) + return (out, ) def test_onnx_exporting(): @@ -25,4 +26,6 @@ def test_onnx_exporting(): model = TestModel() model = _convert_batchnorm(model) # test exporting - pytorch2onnx(model, (1, 1, 1, 1, 1), output_file=out_file) + if hasattr(model, 'forward_dummy'): + model.forward = model.forward_dummy + pytorch2onnx(model, (2, 1, 1, 1, 1), output_file=out_file, verify=True) diff --git a/tools/pytorch2onnx.py b/tools/pytorch2onnx.py index e421eaabb1..f27e02b434 100644 --- a/tools/pytorch2onnx.py +++ b/tools/pytorch2onnx.py @@ -94,8 +94,9 @@ def pytorch2onnx(model, onnx_result = sess.run( None, {net_feed_input[0]: input_tensor.detach().numpy()})[0] # only compare part of results + random_class = np.random.randint(pytorch_result.shape[1]) assert np.allclose( - pytorch_result[:, 4], onnx_result[:, 4] + pytorch_result[:, random_class], onnx_result[:, random_class] ), 'The outputs are different between Pytorch and ONNX' print('The numerical values are same between Pytorch and ONNX') From 454aa3ff3759d5a19833d85c23f7a100ddf5fa49 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 14 Apr 2021 17:10:03 +0800 Subject: [PATCH 035/414] Update ckpt for height sthv2 (#789) --- configs/recognition/tsm/README.md | 2 ++ configs/recognition/tsm/README_zh-CN.md | 2 ++ 2 files changed, 4 insertions(+) diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 076ee9e6ae..3e8533f9fd 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -59,7 +59,9 @@ |config | resolution | gpus | backbone | pretrain| top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| reference top1 acc (efficient/accurate)| reference top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 240|8| ResNet50| ImageNet |57.86 / 61.12|84.67 / 86.26|[57.98 / 60.69](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[84.57 / 86.28](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_1x1x8_50e_sthv2_rgb_20200912-033c4ac6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json)| +|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 256|8| ResNet50| ImageNet |60.79 / 63.84|86.60 / 88.30|[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log.json)| |[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py) |height 240|8| ResNet50| ImageNet |59.93 / 62.04|86.10 / 87.35|[58.90 / 60.98](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.29 / 86.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_1x1x16_50e_sthv2_rgb_20201010-16469c6f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json)| +|[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 256|8| ResNet50| ImageNet |61.06 / 63.19|86.66 / 87.93|[xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json)| |[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |height 240|8| ResNet101 | ImageNet|58.59 / 61.51|85.07 / 86.90|[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9784 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json)| ### MixUp & CutMix on Something-Something V1 diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index 62e5249e07..17e537717f 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -57,7 +57,9 @@ |配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| 参考代码的 top1 准确率 (efficient/accurate)| 参考代码的 top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 240|8| ResNet50| ImageNet |57.86 / 61.12|84.67 / 86.26|[57.98 / 60.69](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[84.57 / 86.28](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_1x1x8_50e_sthv2_rgb_20200912-033c4ac6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json)| +|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 256|8| ResNet50| ImageNet |60.79 / 63.84|86.60 / 88.30|[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log.json)| |[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py) |高 240|8| ResNet50| ImageNet |59.93 / 62.04|86.10 / 87.35|[58.90 / 60.98](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.29 / 86.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_1x1x16_50e_sthv2_rgb_20201010-16469c6f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json)| +|[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 256|8| ResNet50| ImageNet |61.06 / 63.19|86.66 / 87.93|[xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json)| |[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |高 240|8| ResNet101 | ImageNet|58.59 / 61.51|85.07 / 86.90|[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9784 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json)| ### MixUp & CutMix on Something-Something V1 From 36271bb2fe46158398f9c47d1e43d3ff75ae0e41 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Wed, 14 Apr 2021 21:39:18 +0800 Subject: [PATCH 036/414] [Improvement] Refactor STDet (#782) * resolve comments * update changelog * refactor stdet * fix bug * improve codecov * update * update * update --- mmaction/datasets/pipelines/augmentations.py | 261 ++++++++---------- mmaction/datasets/pipelines/loading.py | 4 +- .../test_augmentations/test_boxes.py | 94 ------- .../test_augmentations/test_crop.py | 8 +- .../test_augmentations/test_flip.py | 4 +- .../test_augmentations/test_transform.py | 2 +- 6 files changed, 126 insertions(+), 247 deletions(-) delete mode 100644 tests/test_data/test_pipelines/test_augmentations/test_boxes.py diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 2b9e521905..ad4d44c85a 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -1,4 +1,5 @@ import random +import warnings from collections.abc import Sequence import mmcv @@ -42,6 +43,36 @@ def _init_lazy_if_proper(results, lazy): assert 'lazy' not in results, 'Use Fuse after lazy operations' +@PIPELINES.register_module() +class EntityBoxRescale: + + def __init__(self, scale_factor): + raise NotImplementedError( + 'This component should not be used in the ' + 'data pipeline and is removed in PR #782. Details see ' + 'https://github.com/open-mmlab/mmaction2/pull/782') + + +@PIPELINES.register_module() +class EntityBoxCrop: + + def __init__(self, crop_bbox): + raise NotImplementedError( + 'This component should not be used in the ' + 'data pipeline and is removed in PR #782. Details see ' + 'https://github.com/open-mmlab/mmaction2/pull/782') + + +@PIPELINES.register_module() +class EntityBoxFlip: + + def __init__(self, img_shape): + raise NotImplementedError( + 'This component should not be used in the ' + 'data pipeline and is removed in PR #782. Details see ' + 'https://github.com/open-mmlab/mmaction2/pull/782') + + @PIPELINES.register_module() class Imgaug: """Imgaug augmentation. @@ -345,6 +376,8 @@ class RandomScale: """ def __init__(self, scales, mode='range', **kwargs): + warnings.warn('"RandomScale" is deprecated and will be removed in ' + 'later versions. It is currently not used in MMAction2') self.mode = mode if self.mode not in ['range', 'value']: raise ValueError(f"mode should be 'range' or 'value', " @@ -391,134 +424,6 @@ def __repr__(self): return repr_str -# Note, entity box transfroms are not added to: ThreeCrop, TenCrop, -# MultiGroupCrop. -@PIPELINES.register_module() -class EntityBoxRescale: - """Rescale the entity box and proposals according to the image shape. - - Required keys are "proposals", "gt_bboxes", added or modified keys are - "gt_bboxes". If original "proposals" is not None, "proposals" and - will be added or modified. - - Args: - scale_factor (np.ndarray): The scale factor used entity_box rescaling. - """ - - def __init__(self, scale_factor): - self.scale_factor = scale_factor - - def __call__(self, results): - scale_factor = np.concatenate([self.scale_factor, self.scale_factor]) - - proposals = results['proposals'] - gt_bboxes = results['gt_bboxes'] - results['gt_bboxes'] = gt_bboxes * scale_factor - - if proposals is not None: - assert proposals.shape[1] == 4, ( - 'proposals shape should be in ' - f'(n, 4), but got {proposals.shape}') - results['proposals'] = proposals * scale_factor - - return results - - def __repr__(self): - return f'{self.__class__.__name__}(scale_factor={self.scale_factor})' - - -@PIPELINES.register_module() -class EntityBoxCrop: - """Crop the entity boxes and proposals according to the cropped images. - - Required keys are "proposals", "gt_bboxes", added or modified keys are - "gt_bboxes". If original "proposals" is not None, "proposals" will be - modified. - - Args: - crop_bbox(np.ndarray | None): The bbox used to crop the original image. - """ - - def __init__(self, crop_bbox): - self.crop_bbox = crop_bbox - - def __call__(self, results): - proposals = results['proposals'] - gt_bboxes = results['gt_bboxes'] - - if self.crop_bbox is None: - return results - - x1, y1, x2, y2 = self.crop_bbox - img_w, img_h = x2 - x1, y2 - y1 - - assert gt_bboxes.shape[-1] == 4 - gt_bboxes_ = gt_bboxes.copy() - gt_bboxes_[..., 0::2] = np.clip(gt_bboxes[..., 0::2] - x1, 0, - img_w - 1) - gt_bboxes_[..., 1::2] = np.clip(gt_bboxes[..., 1::2] - y1, 0, - img_h - 1) - results['gt_bboxes'] = gt_bboxes_ - - if proposals is not None: - assert proposals.shape[-1] == 4 - proposals_ = proposals.copy() - proposals_[..., 0::2] = np.clip(proposals[..., 0::2] - x1, 0, - img_w - 1) - proposals_[..., 1::2] = np.clip(proposals[..., 1::2] - y1, 0, - img_h - 1) - results['proposals'] = proposals_ - return results - - def __repr__(self): - return f'{self.__class__.__name__}(crop_bbox={self.crop_bbox})' - - -@PIPELINES.register_module() -class EntityBoxFlip: - """Flip the entity boxes and proposals with a probability. - - Reverse the order of elements in the given bounding boxes and proposals - with a specific direction. The shape of them are preserved, but the - elements are reordered. Only the horizontal flip is supported (seems - vertical flipping makes no sense). Required keys are "proposals", - "gt_bboxes", added or modified keys are "gt_bboxes". If "proposals" - is not None, it will also be modified. - - Args: - img_shape (tuple[int]): The img shape. - """ - - def __init__(self, img_shape): - self.img_shape = img_shape - assert mmcv.is_tuple_of(img_shape, int) - - def __call__(self, results): - proposals = results['proposals'] - gt_bboxes = results['gt_bboxes'] - img_h, img_w = self.img_shape - - assert gt_bboxes.shape[-1] == 4 - gt_bboxes_ = gt_bboxes.copy() - gt_bboxes_[..., 0::4] = img_w - gt_bboxes[..., 2::4] - 1 - gt_bboxes_[..., 2::4] = img_w - gt_bboxes[..., 0::4] - 1 - if proposals is not None: - assert proposals.shape[-1] == 4 - proposals_ = proposals.copy() - proposals_[..., 0::4] = img_w - proposals[..., 2::4] - 1 - proposals_[..., 2::4] = img_w - proposals[..., 0::4] - 1 - else: - proposals_ = None - - results['proposals'] = proposals_ - results['gt_bboxes'] = gt_bboxes_ - return results - - def __repr__(self): - repr_str = f'{self.__class__.__name__}(img_shape={self.img_shape})' - return repr_str - - @PIPELINES.register_module() class RandomCrop: """Vanilla square random crop that specifics the output size. @@ -538,6 +443,37 @@ def __init__(self, size, lazy=False): self.size = size self.lazy = lazy + def _box_crop(self, box, crop_bbox): + """Crop the bounding boxes according to the crop_bbox. + + Args: + box (np.ndarray): The bounding boxes. + crop_bbox(np.ndarray): The bbox used to crop the original image. + """ + + x1, y1, x2, y2 = crop_bbox + img_w, img_h = x2 - x1, y2 - y1 + + box_ = box.copy() + box_[..., 0::2] = np.clip(box[..., 0::2] - x1, 0, img_w - 1) + box_[..., 1::2] = np.clip(box[..., 1::2] - y1, 0, img_h - 1) + return box_ + + def _all_box_crop(self, results, crop_bbox): + """Crop the gt_bboxes and proposals in results according to crop_bbox. + + Args: + results (dict): All information about the sample, which contain + 'gt_bboxes' and 'proposals' (optional). + crop_bbox(np.ndarray): The bbox used to crop the original image. + """ + results['gt_bboxes'] = self._box_crop(results['gt_bboxes'], crop_bbox) + if 'proposals' in results and results['proposals'] is not None: + assert results['proposals'].shape[1] == 4 + results['proposals'] = self._box_crop(results['proposals'], + crop_bbox) + return results + def __call__(self, results): """Performs the RandomCrop augmentation. @@ -578,8 +514,9 @@ def __call__(self, results): new_h, new_w = self.size, self.size - results['crop_bbox'] = np.array( + crop_bbox = np.array( [x_offset, y_offset, x_offset + new_w, y_offset + new_h]) + results['crop_bbox'] = crop_bbox results['img_shape'] = (new_h, new_w) @@ -608,8 +545,7 @@ def __call__(self, results): # Process entity boxes if 'gt_bboxes' in results: assert not self.lazy - entity_box_crop = EntityBoxCrop(results['crop_bbox']) - results = entity_box_crop(results) + results = self._all_box_crop(results, results['crop_bbox']) return results @@ -620,7 +556,7 @@ def __repr__(self): @PIPELINES.register_module() -class RandomResizedCrop: +class RandomResizedCrop(RandomCrop): """Random crop that specifics the area and height-weight ratio range. Required keys in results are "imgs", "img_shape", "crop_bbox" and "lazy", @@ -760,8 +696,8 @@ def __call__(self, results): if 'gt_bboxes' in results: assert not self.lazy - entity_box_crop = EntityBoxCrop(results['crop_bbox']) - results = entity_box_crop(results) + results = self._all_box_crop(results, results['crop_bbox']) + return results def __repr__(self): @@ -773,7 +709,7 @@ def __repr__(self): @PIPELINES.register_module() -class MultiScaleCrop: +class MultiScaleCrop(RandomCrop): """Crop images with a list of randomly selected scales. Randomly select the w and h scales from a list of scales. Scale of 1 means @@ -930,8 +866,7 @@ def __call__(self, results): if 'gt_bboxes' in results: assert not self.lazy - entity_box_crop = EntityBoxCrop(results['crop_bbox']) - results = entity_box_crop(results) + results = self._all_box_crop(results, results['crop_bbox']) return results @@ -991,6 +926,17 @@ def __init__(self, self.interpolation = interpolation self.lazy = lazy + def _box_resize(self, box, scale_factor): + """Rescale the bounding boxes according to the scale_factor. + + Args: + box (np.ndarray): The bounding boxes. + scale_factor (np.ndarray): The scale factor used for rescaling. + """ + assert len(scale_factor) == 2 + scale_factor = np.concatenate([scale_factor, scale_factor]) + return box * scale_factor + def __call__(self, results): """Performs the Resize augmentation. @@ -1031,8 +977,12 @@ def __call__(self, results): if 'gt_bboxes' in results: assert not self.lazy - entity_box_rescale = EntityBoxRescale(self.scale_factor) - results = entity_box_rescale(results) + results['gt_bboxes'] = self._box_resize(results['gt_bboxes'], + self.scale_factor) + if 'proposals' in results and results['proposals'] is not None: + assert results['proposals'].shape[1] == 4 + results['proposals'] = self._box_resize( + results['proposals'], self.scale_factor) return results @@ -1132,6 +1082,18 @@ def __init__(self, self.flip_label_map = flip_label_map self.lazy = lazy + def _box_flip(self, box, img_width): + """Flip the bounding boxes given the width of the image. + + Args: + box (np.ndarray): The bounding boxes. + img_width (int): The img width. + """ + box_ = box.copy() + box_[..., 0::4] = img_width - box[..., 2::4] + box_[..., 2::4] = img_width - box[..., 0::4] + return box_ + def __call__(self, results): """Performs the Flip augmentation. @@ -1175,8 +1137,12 @@ def __call__(self, results): if 'gt_bboxes' in results and flip: assert not self.lazy and self.direction == 'horizontal' - entity_box_flip = EntityBoxFlip(results['img_shape']) - results = entity_box_flip(results) + width = results['img_shape'][1] + results['gt_bboxes'] = self._box_flip(results['gt_bboxes'], width) + if 'proposals' in results and results['proposals'] is not None: + assert results['proposals'].shape[1] == 4 + results['proposals'] = self._box_flip(results['proposals'], + width) return results @@ -1457,7 +1423,7 @@ def __repr__(self): @PIPELINES.register_module() -class CenterCrop: +class CenterCrop(RandomCrop): """Crop the center area from images. Required keys are "imgs", "img_shape", added or modified keys are "imgs", @@ -1539,8 +1505,7 @@ def __call__(self, results): if 'gt_bboxes' in results: assert not self.lazy - entity_box_crop = EntityBoxCrop(results['crop_bbox']) - results = entity_box_crop(results) + results = self._all_box_crop(results, results['crop_bbox']) return results @@ -1577,6 +1542,8 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, False) + if 'gt_bboxes' in results or 'proposals' in results: + warnings.warn('ThreeCrop cannot process bounding boxes') imgs = results['imgs'] img_h, img_w = results['imgs'][0].shape[:2] @@ -1649,6 +1616,9 @@ def __call__(self, results): """ _init_lazy_if_proper(results, False) + if 'gt_bboxes' in results or 'proposals' in results: + warnings.warn('TenCrop cannot process bounding boxes') + imgs = results['imgs'] img_h, img_w = results['imgs'][0].shape[:2] @@ -1724,6 +1694,9 @@ def __call__(self, results): results (dict): The resulting dict to be modified and passed to the next transform in pipeline. """ + if 'gt_bboxes' in results or 'proposals' in results: + warnings.warn('MultiGroupCrop cannot process bounding boxes') + imgs = results['imgs'] img_h, img_w = imgs[0].shape[:2] crop_w, crop_h = self.crop_size diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index 2714ea36d0..2fb6bf3365 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -422,9 +422,9 @@ def _get_clips(self, center_index, skip_offsets, shot_info): start = center_index - (self.clip_len // 2) * self.frame_interval end = center_index + ((self.clip_len + 1) // 2) * self.frame_interval frame_inds = list(range(start, end, self.frame_interval)) - frame_inds = frame_inds + skip_offsets + if not self.test_mode: + frame_inds = frame_inds + skip_offsets frame_inds = np.clip(frame_inds, shot_info[0], shot_info[1] - 1) - return frame_inds def __call__(self, results): diff --git a/tests/test_data/test_pipelines/test_augmentations/test_boxes.py b/tests/test_data/test_pipelines/test_augmentations/test_boxes.py deleted file mode 100644 index 15c5c85731..0000000000 --- a/tests/test_data/test_pipelines/test_augmentations/test_boxes.py +++ /dev/null @@ -1,94 +0,0 @@ -import copy - -import numpy as np -import pytest -from mmcv.utils import assert_dict_has_keys -from numpy.testing import assert_array_almost_equal - -from mmaction.datasets.pipelines import (EntityBoxCrop, EntityBoxFlip, - EntityBoxRescale) - - -class TestBoxes: - - def test_box_crop(self): - target_keys = ['proposals', 'crop_bbox', 'gt_bboxes'] - results = dict( - proposals=np.array([[3.696, 65.312, 220.08, 408.928]]), - crop_bbox=[13, 75, 200, 450], - gt_bboxes=np.array([[10.416, 67.392, 225.12, 413.92]])) - - crop_bbox = results['crop_bbox'] - - box_crop = EntityBoxCrop(crop_bbox) - - results_ = copy.deepcopy(results) - results_ = box_crop(results_) - assert_dict_has_keys(results_, target_keys) - assert_array_almost_equal(results_['gt_bboxes'], - np.array([[0, 0, 186, 338.92]])) - assert_array_almost_equal(results_['proposals'], - np.array([[0, 0, 186, 333.928]])) - - results_ = copy.deepcopy(results) - results_['proposals'] = None - results_ = box_crop(results_) - assert results_['proposals'] is None - assert repr(box_crop) == f'EntityBoxCrop(crop_bbox={crop_bbox})' - - def test_box_flip(self): - target_keys = ['gt_bboxes', 'proposals', 'img_shape'] - results = dict( - proposals=np.array([[0, 0, 186, 333.928]]), - img_shape=(305, 200), - gt_bboxes=np.array([[0, 0, 186, 338.92]])) - - img_shape = results['img_shape'] - - box_flip = EntityBoxFlip(img_shape) - results_ = copy.deepcopy(results) - results_ = box_flip(results_) - assert_dict_has_keys(results_, target_keys) - assert_array_almost_equal(results_['gt_bboxes'], - np.array([[13, 0, 199, 338.92]])) - assert_array_almost_equal(results_['proposals'], - np.array([[13, 0, 199, 333.928]])) - - box_flip = EntityBoxFlip(img_shape) - results_ = copy.deepcopy(results) - results_['proposals'] = None - results_ = box_flip(results_) - assert results_['proposals'] is None - assert repr(box_flip) == f'EntityBoxFlip(img_shape={img_shape})' - - def test_box_rescale(self): - target_keys = ['img_shape', 'scale_factor', 'proposals', 'gt_bboxes'] - results = dict( - img_shape=(520, 480), - scale_factor=(0.7, 0.8), - proposals=np.array([[5.28, 81.64, 314.4, 511.16]]), - gt_bboxes=np.array([[14.88, 84.24, 321.6, 517.4]])) - scale_factor = results['scale_factor'] - - with pytest.raises(AssertionError): - box_scale = EntityBoxRescale(scale_factor) - results_ = copy.deepcopy(results) - results_['proposals'] = np.array([[5.28, 81.64, 314.4]]) - box_scale(results_) - - box_scale = EntityBoxRescale(scale_factor) - results_ = copy.deepcopy(results) - results_ = box_scale(results_) - assert_dict_has_keys(results_, target_keys) - assert_array_almost_equal(results_['proposals'], - np.array([[3.696, 65.312, 220.08, 408.928]])) - assert_array_almost_equal(results_['gt_bboxes'], - np.array([[10.416, 67.392, 225.12, 413.92]])) - - results_ = copy.deepcopy(results) - results_['proposals'] = None - results_ = box_scale(results_) - assert_dict_has_keys(results_, target_keys) - assert results_['proposals'] is None - assert repr(box_scale) == ('EntityBoxRescale' - f'(scale_factor={scale_factor})') diff --git a/tests/test_data/test_pipelines/test_augmentations/test_crop.py b/tests/test_data/test_pipelines/test_augmentations/test_crop.py index e04f18c032..dda2a57ad4 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_crop.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_crop.py @@ -28,7 +28,7 @@ def test_random_crop(self): results = dict(imgs=imgs) random_crop = RandomCrop(size=224) results['gt_bboxes'] = np.array([[0, 0, 340, 224]]) - results['proposals'] = None + results['proposals'] = np.array([[0, 0, 340, 224]]) random_crop_result = random_crop(results) assert assert_dict_has_keys(random_crop_result, target_keys) assert check_crop(imgs, random_crop_result['imgs'], @@ -75,7 +75,7 @@ def test_random_resized_crop(self): imgs = list(np.random.rand(2, 256, 341, 3)) results = dict(imgs=imgs) results['gt_bboxes'] = np.array([[0, 0, 340, 256]]) - results['proposals'] = None + results['proposals'] = np.array([[0, 0, 340, 256]]) with pytest.raises(AssertionError): # area_range[0] > area_range[1], which is wrong @@ -142,7 +142,7 @@ def test_multi_scale_crop(self): imgs = list(np.random.rand(2, 256, 341, 3)) results = dict(imgs=imgs) results['gt_bboxes'] = np.array([[0, 0, 340, 256]]) - results['proposals'] = None + results['proposals'] = np.array([[0, 0, 340, 256]]) config = dict( input_size=224, scales=(1, 0.8), @@ -212,7 +212,7 @@ def test_center_crop(self): imgs = list(np.random.rand(2, 240, 320, 3)) results = dict(imgs=imgs) results['gt_bboxes'] = np.array([[0, 0, 320, 240]]) - results['proposals'] = None + results['proposals'] = np.array([[0, 0, 320, 240]]) center_crop = CenterCrop(crop_size=224) center_crop_results = center_crop(results) target_keys = ['imgs', 'crop_bbox', 'img_shape'] diff --git a/tests/test_data/test_pipelines/test_augmentations/test_flip.py b/tests/test_data/test_pipelines/test_augmentations/test_flip.py index 6f25002bc0..9987c23320 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_flip.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_flip.py @@ -21,8 +21,6 @@ def test_flip(self): # do not flip imgs. imgs = list(np.random.rand(2, 64, 64, 3)) results = dict(imgs=copy.deepcopy(imgs), modality='RGB') - results['gt_bboxes'] = np.array([[0, 0, 60, 60]]) - results['proposals'] = None flip = Flip(flip_ratio=0, direction='horizontal') flip_results = flip(results) assert assert_dict_has_keys(flip_results, target_keys) @@ -33,6 +31,8 @@ def test_flip(self): # always flip imgs horizontally. imgs = list(np.random.rand(2, 64, 64, 3)) results = dict(imgs=copy.deepcopy(imgs), modality='RGB') + results['gt_bboxes'] = np.array([[0, 0, 60, 60]]) + results['proposals'] = np.array([[0, 0, 60, 60]]) flip = Flip(flip_ratio=1, direction='horizontal') flip_results = flip(results) assert assert_dict_has_keys(flip_results, target_keys) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_transform.py b/tests/test_data/test_pipelines/test_augmentations/test_transform.py index 98916aa0f8..c2e9a624d1 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_transform.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_transform.py @@ -73,7 +73,7 @@ def test_resize(self): imgs = list(np.random.rand(2, 240, 320, 3)) results = dict(imgs=imgs, modality='RGB') results['gt_bboxes'] = np.array([[0, 0, 320, 240]]) - results['proposals'] = None + results['proposals'] = np.array([[0, 0, 320, 240]]) resize = Resize(scale=(-1, 256), keep_ratio=True) resize_results = resize(results) assert assert_dict_has_keys(resize_results, target_keys) From 8972e537a77770c4488b33aa595ae45054239159 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 15 Apr 2021 17:54:26 +0800 Subject: [PATCH 037/414] [Docs] Add QR code in CN README (#812) * add QR code in CN README * add QR code in CN README * same height --- README_zh-CN.md | 21 ++++++++++++++++++++- docs/imgs/qq_group_qrcode.jpg | Bin 0 -> 204806 bytes docs/imgs/zhihu_qrcode.jpg | Bin 0 -> 397245 bytes 3 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 docs/imgs/qq_group_qrcode.jpg create mode 100644 docs/imgs/zhihu_qrcode.jpg diff --git a/README_zh-CN.md b/README_zh-CN.md index cba594f878..18ddb43e1d 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -226,4 +226,23 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 - [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab 一体化视频目标感知平台 - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab 姿态估计工具箱与测试基准 - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab 图像视频编辑工具箱 -- [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包. +- [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包 + +## 欢迎加入 OpenMMLab 社区 + +扫描下方的二维码可关注 OpenMMLab 团队的 [知乎官方账号](https://www.zhihu.com/people/openmmlab),加入 OpenMMLab 团队的 [官方交流 QQ 群](https://jq.qq.com/?_wv=1027&k=aCvMxdr3) + +
+ +
+ +我们会在 OpenMMLab 社区为大家 + +- 📢 分享 AI 框架的前沿核心技术 +- 💻 解读 PyTorch 常用模块源码 +- 📰 发布 OpenMMLab 的相关新闻 +- 🚀 介绍 OpenMMLab 开发的前沿算法 +- 🏃 获取更高效的问题答疑和意见反馈 +- 🔥 提供与各行各业开发者充分交流的平台 + +干货满满 📘,等你来撩 💗,OpenMMLab 社区期待您的加入 👬 diff --git a/docs/imgs/qq_group_qrcode.jpg b/docs/imgs/qq_group_qrcode.jpg new file mode 100644 index 0000000000000000000000000000000000000000..417347449fe64cbb2c9076601f7a8206d8b54706 GIT binary patch literal 204806 zcmeEu2V4|emUoi{L?kK_8p%O&j*WpVA|N@mA{hY%$A{8Ig=YlaVAj z=bR-&10BBdy?HaUY3A+B?C$rQ-NJjT?26~yQ~wih;l^-tpvxL6>M9^SJP=3?_yghQ zK}O16_BJ4pmKKN)1Oi#!mxq|c$0I>s2-gEh zKWw3H!Eg;AstY792&D<|*g*JHcmz~0zx8Uk_)6{m$i0GJi?_=XW#3!VsXJlq&=j7&nDJ?6nsI024X=-k1ZEOG5(K#?UG(0jo zHa;=Gu(-6mvbwguvA2J4cyxS%Iz9VI7r@V7$pZRcDf?G+Q2}(}6A}^-lKi9#58v}A z;Z%e~*F}h_Z|aaddPc)0`r-oZt*B2Wjil^iy1R6iuKi^69OCnwdp}A0g|gpASm578 z*&hk}6J3*_+aUa376N==ln4lbks<^ZA`+sX3&{nNUzQ7hSjc`^$bT*re_U|DMeu-Y z04Eaz|0qaFNdLI+cW>aP0kOP;n*d!TzypMdfC>ZwVaC!B{HfhMX$a8&wO@IF>-#^Z z{2O7w_5UAJ{w-4m{*NjDiYeg;5N_~)Jy!ysjRcVKkyM4^aUA9q!yo}3c{)!hu0575 za`iEgkJIfKT3o*`M!%t+_n@`=jc4s^%6Ch{w)HOaOa|}~MA4m~ZtdfEWZjufWSJIb z9tZLpqSd!!>c$QIZ{=)OXx0;1@p;9^r}{jft39(#LRUi-4-6i}xvGiu>FcU5 z4ZFs4+jscM>^Ju4MK2i3CIm+pUN;X>Ec4Y-7BTyC^@bb%-^R$C8U1_Zw;TrtWF07i zdPt(!86H32MD|{wuq`c3D(T2tO6b4IW)@N#dHpnU{%#uLD>bc-GU(rHaNNlM7UreT zmg^;~)9}@%XxN)E3dp@;aOjOad>(F-t1cc5{(z;Ub4b&CdlHoa5y^U6)V`RF_4JLEw(CAPCMYl5nPRL&VO2l!pkT10 zV^f(f&*1v!Gz9PKKga#wvyJ;_H0tF1P|zMXaxUs1|MJT2|3xmt3OY&`wQ>oNR3ucdqy|U{O~(tMceH3&SjlE zg*tf3wD}#Eh}x29_IP$o$$afAb;6INk8Ga=J=Y8`^Gl4rx?P{#o++~omJX(cDQuW@^c?cu36%3nKM$}St=M-*ft*uzpD#~+XTSZT=P^x8jCQoj`!ESr(^jM4AAhg||L#%v zIg;T2AzQIrz{z7lokSr>&?@+Cm#_T%GLr{g2C;NzT8Z;AAZCD(%S%cSqeQN(Vzin~ z`?abEq=bs z;B4N%1S9{Meg6~2W`Sw*sv_i-d)o03IeqNp>-T5y3xyg=psUP80*|=e?f^2y3J@AV zhETdBraQ=FlR#6cLqdQbC`uvbsk45+EF3B3-#-$znY!p`v6PLF67tE4vs(&UWyvZa zzTwqgCz%zS^IhTrmXxjYb1Pp`_@HcreW%jZ%)a4curk-Y%DmdGe)TGys{8nVwn_kX z*H%v4&5_3P^%jNaJbKwT!rZOGAcE$OGHE>IOaq>%;NKm$96`?gbnfc82Q2}l3i zS|Ea_pp%RzD*<$MA%5lJLQ*q-L^<&kb92nPsz58(KwnIcOdHXiA6Ms+;7553MeL!P zr91>h_e7wQe%hD}Kjt~{gn>@|p`#mgMxSDy>h;c8Q&e8vF+a614sRbhmez2-yH^^) z+O=OJSTCA~I8Y27Ix3V?F8A$Gc^=HuXk#l4x|EFr4MQ*cnP7@;Y$f>9G^kZxA+Ofj z3Og@j4^N#E^%vz|+g?~cVN>E43=%XKNzp@mK*3ebtue= z?PNc0Bx%>);*FkcKchGfpak1R(HmVo_tw?Gfj$)9?W<=+>9zEGpK+yAG(8nnza29e zG6C0E&nm4*qD$a-=qmi+;Y$nE1$ZEs60OBPjRV;zXKyflff}>jejfRq!D`~38&rmO zexrAO8UEr}_*49&!(+rVb1eH<^pbSvqq=9>qEQj=4@Mg>&=yHg3s#ixLSnT9Ir7p} zp6Sfp<(2C4x6X$CElGhM=Zoe31RO9l3-*1lL1bypqCcfJY1mmSkxr4}um{(HG|5QH z*JOvHo;+gM4&3B}UjsdRLTj+` zpdZ`xCKOQ#W=Xreukf_2IF=+8a-s2)4^4h_$>f~~pO(B;++BLG_!8l++h;3n&o?{; zGs`%3R@N^HoBDThW$EvauRqq}f1!K)l$PyC!I3Y8J^pIR&fUVaf`Z$Rlxg)Y{{u|A zKgVLc|7Mr#{ew~>X7yBxF!9OD&RcE<>v_V$vX^*=U-~4lW^mJ%GkroZr}&m%|Ag+J z$y3|Unz@PHYvt=;510%0(RJcZz3y{)%#=KHL-yFXUlXhu1bv+8V* zU+#&bUM{RuKQFF+7{v-z7Gc$^9L)5pnRe|KIe0p=I)W@l5pZO_=*+EZ8;t%6%Ra?{ z7Cfme#pL&ETw&h@GHUFMpLj40g|R3-eld#3`C>~4Nw`OwVsc(>uIJ8V3@>20CudWw=uKJwB%aqf#Zz;Jy1e&>B~Lm%w$FK;#>*AG+AtR>%(Q zH4b#!Pe8|vH&FenD-y$Zr$9=G*@0Rl$i>7e7VU{7_NYfKG3QiI;Y}$M}Ibh4YLgdtNL2 z#xph83suG8Z5GzO${;jv8ocRt@rJOGG0`QiWF^En!&QL*_4d1?=3e*iXY<)tOqUJq z=jvyNcB zd?9kX*ezdOGHQ20DtJ7ieTY-}K$vou>%!9GkJFngk(BEBsLeK6_qwC*QBTjx*W~wF zyU>%9#Ww@IiU+EEo3B+NMB8k1m$GT6W4@kl!DQMLh4q?7f}!tj)_jTMcd~ch^u@Gq zOc%eY=b5pWOy>XGU+8i};sSeB#DENg^IPeQ1Xr4Di3^RwaaX`$=Hfh$%y$Jr}?#p9!ic@?DbN~r{_F$Jm^>u9(b3o z8#^h!wFS%6#{00ZQhr10W1OhH_-wCBRARbU`mH@$wngRoz_rIWO1?3SLT2u}La5B~ zFE=nPu>{xK?1hAg!$Dq)&9d1++oM|7K8}w4Sa`V{q3=h>smj1Kx5!riA%ZnkD4Tt+ z;MoT|y;uBt&I5*{BeS1oV;46@^L2x~GI``A%hysij0s#Pw#~pdYJ`J$F$h5_SjN@w(IcbQ7&pEc%YVk?i zAgdH5K8Jm1gh|h82$p{HtSE*1y}V>$NzKD#{&dotqs3(W-s1HKk;1JvB zm%K*XgeFRxz?9_nCO5Go!xek9Mq>f1L}TxnYX}myk+zWtDIADSo=l?9v{MtsZD(T? zJWN71_b}Q=xVWi9Fx&o3cV@E9^$fwuFxwfVD&%1#y>rbdnv?ypxA{#^mV(B|tp4~b zNb?0}P?PmINqKO&U?R-a2+jCPUZZU#N%Nv4yM~??=XfEU1C|&;x+>A(HESRX4stht zSTS2Np)Jfk%x(kjtZ5jR%U^Ms=xu%6oI-NM5n9m;GnkQQvSML8lsLVo73fvqPDXXE z#eSU3(Js%T|GLFpCq{MgM|oaFVZMdKch?g=b&<2LRzu7gKGwXmSi>iU?SW486>F1T^YR?|h65=$KFYJ)(54!mMkb0eKQsOOHJ%UWCco+g7DO`@yrx^!ZPVsN9#~XL=krzG~Wn+3R_^6m=PSe#j0*K$UVHakb@=6J#K@j9?{~QGH6A$H zSK@%GxnscnjQjEC>W5p>IdikE9J5nmizFfKq4n3i!Zh`#MFxwkD{-Kr-L@VoG!q%t z8Lhu3)s7Vy>WO?)Zu}!r17bsU@y0_X$;WmC3UVvRoD;QWm57_wRzzwF7n~{J!jQ7j zSF0$`z1dbDaw~YIe<9`WyRSOJ1Zylv&E1-Ep`v?1?ywvbnEG>cEWLz%ormSRq3hFYyXi%wf}9B7-ybQKk9Sb+yKwBrYQ@mYZQXE*eC_vM^LfIHauz$LiuhtcR#t+U$ zd@wJZq!W}m&PlivE+*2H(?!!~nMo*l)%C)6$5RvY-Yw~IA=|Oj^F>J4f;K_qS-2|3 zaSc?U&XQH{gBlszQw?Y@`jRqIAHrmq`Np8BuW}!W;Z0UFnrvGbJxRxbGFsea?90*% zy|b(2M>r?8Ki2I!I)5o1rRMjwEMg3O2jJ4sAESF2+)N23+1^sI0hh883_XZDujs~e zG_3KJ3uhRs}XCH~rT>o`g3_*%1>vJ9s4S|4Z-2x-805 z2>&v92Fr^4rH`;;%2K8)J~oAG5B2}rcfb7C*Oqzf8c%D09CyM%^i+TCMK9^GAx6&y zkl7n{zxFFPJFRp~a3Jf-LyQ*qm!9dr@R`hjYrh;QdyWGwqWN6Dn4ZDP<#jlW$7F{( zZ>uFF$PVVF=N7cd)=C|}bwU!$--WOD0U5p%zTL8KaWO7uI|fx5Y*Qs0=-pGa9l@@%eD5-?}SgVX$yAwo+OZlN?+Z#vLeBf zRPGfsq4ZOW5$+TYT@Sq<_PGdJ>H!GFJ|6M$w8*9DtgJ>T^!c$_wC&>j_)8d2>u|KS!$Hm&UrIde3dW>dJS*<4nAsWJOX> z9Q0Asq0pPVkV~#OkmJX}Fq2z5KX9NxxH2j|!S(4F#xt9webU*w3QQiB8{PF3euL$dd>!w(}bDvw~7ANQup$YUkW z4=+YXkjif~@n4@umkg+8@%v9%V;5z8ixZBX~#q(!F54JMtq zPHpvEW%LBZcz5YpJ2U_8ESAiDPAAJxx!=#;mH{#uTW_`lO=mC8Mow}N!N9Se%c zFkJ0L)4psZXqm-N9~BrBTl0A*<*G)~U9m~q*uY+~VzP(2&3N===2eQJ3!41)kE%qn z5*a5`!MO8s2#I*&AX7CHox(W+i-21Skx7NrU$7r7{bc%4|4nyL2&XnL4ir%imsMmM zTpn@XhNwo$?G&+BGOBWSKLmk8i#HbMvx1Wuc3ED0x?#y~on_p#K}?LAS9TdPT*LA% z>dPzKxB+h2Ie|B)B=24_?up0^bF_BZ{2`{uhYIzRnrHZNH_>9nuJ8&8iF+fD2rA)_ zf+u$H#(0E!nmQDAf$=+hr-|Yop`WM83bbgs+c) zF?f#qN%u&`mWLcV9JzXHx=JNNzl(^|plG#;P%~qD+5!GYuQ%78wsQkf_C=@++Eva^ ze-{UuH3>KDS4HLDu-)bHfdJYd*c^NE8a^9|eH$j8UdYL_Y4K?5(@|m$9Pex-Du*kS zvkj}1eZ_{D?q7i2$kbZ$?*uZB>nhqBjR|^UvD*`0xl1b_m4&~Yt{o9sIAz)<>@VF%Opc`w8w!?P(ysGU1WKp&JwJVc9-Pj*5lzz_Dh4oJ9#)00j zj9mZrr~CD;z}q6X5`|7z;9^*DU!#!!UvFoBDaHyO5ZUqz1*~(fw1!$-p?KgLkK5Q$ ziTBU4%uSa+Gy5AR!RR#7*0lM>0J}Qv)^7plzp|$M-AN_hZw0Fk;R$xDktSo60k^6l z-ZcZx%^Hi3D%9FcR}|~=hG`81rau2Y(PzEbL=D+Kg7x&=Y#?*v#l0>4Z1;96H#VvF zfNsZq4qmoNgkoMugRcUExpu3L>C+^$G7jVeNsuqtmmha`$Be^NmqNC*953bXeJbn9 zoQhdH$$4Eg}f=Rp(-U5Bb*8>2}T6(;Ll3p34g(TfEs zlXA@z^C=NEUxUV33i3TKY?q-^dbD6TP(WLa-)+N-$4N96HQ*kj?i!&Gd&_~cPjZlt z6X@Qd;&I3JC`V-JN@Cu$dQg91a?k0ibpj#Dz6shG6}jMDtG!t0W?IeB;qA*#4PTER zgDBG-MmA6)UN%GV>Ur$kzEdyAiPMuNf6-)a+k#zpT;TrdZNBvGfL$ylcWFePJw~41 z_D!7Nb`CNTx#H4h{H1N#>xRUSxw;*NE2m&TY*R875Y-4-s7+?A3iGdcZ?{jp8uoE- z3kPcVH0%eg(fcRkfUcqdn~tl%=h!Ndb17H~Y#T~P3Sp~N>S!ohL3#hPoC@ToSv#86 z{1uF1w*d#r9#_T6@-JC`D1ctO9W^8D0d+$z_y@`7Z}^eucbMoI!eJrH!bxuxg!!H) zMt=X?f*i;^h!BQM6wm5wdnSIvK1~LHIv2o!2(eDUy(M1aKst24D5v$8HTC#_di51? z2{!5Y>1SX-qq8f>chZws)%O_q9k`vBA99hUS%~|zG-=PCy=kHud`2<_jC@6LLoWrY z2zn9KdvEkpN6ft;YIl}QuypY(_!|6MW}&bBs4Wh3T>&m7Wr$q=5-{7=z8;JNZ5pmw z(6mVikL8J(Xmq5!^O|O8zw)w}q<{tkZYqAy_em!Mz3;f8HcV)442`W-FefmO_0cQX#e%vPTnb^DG^ z@Pi20`%^_x>c+>*%t@T5a_J{b#Z*ilruQ;DlHd}LPopr=@)xmUm^68*RP#2avg+u4 zC11un_77m3L~yOt2v@NrEX(j|(7CYRRYht{em&=gZu6Hx^(RI$%({M`+UK-j2~e}l z%r`xa>x8;$FhnNuZ87{!DWv!@uS#OwCvd&Gq<4~3pwSHpb;U36Q-0ee9BBLI0?@fk zTu~ih_3yVIoVcX|+X}wVv53XzKXLAf00(Bx~ItBT&vfzG9KhPoTBLGx0l|_#l#%f=W{MXPXp4WK)iJ<(p+W@IYy3UEY8~x;V(XQ7 zy6+Z{D2&n^M%i_sl|%IE5~GpNNzs0e1HGmvy%J`@Di<3vHuCmka)t&svof!4M!q}K zHH|gVU-D2tb5VbxL4Icj`7I7~w1$6_WcuM7JS2OhY=hit{PK}}LUKur_)TTn2LXR) z6bO%#_GP93i0%Lo0kugOp;vhb`V@@0VAw}@FuQ%<15LR1>=4kR<3TJQ41Tc~>gwj25Q+IqKkZ z`Ujq9nhhnm-3QTUI1qK>>?$)X3O38az`}~6f#;>IIFWfdr?J{%torZFreDM8iMIM{ zhuP`-8pC+dad>fUNYat?VL2RVseL~D+MDTG*NK~AH{3u2cx{U>!!csog96q=M{){W z)9T5`UfYX}SQ#_6!xM?F#F%C$h@_gt;mKC$TnfqveC<$dlth)6z zw}m!O+gk=wCw4|2zQ$)JqZ{s_qxqxc^cvSriiTI!YH^|EX$xg%#z8P#7MrqCCw6LK zq<7qlC7r_BHeu|}9Mo~3)N2?L=$hjJ5=+`SiKTZ8!KT^&@+*~@8?;N_1_zpVq1r%l zz)hcWJaAfeuDSP@;8`SszVm z*5N??LeS3dnK|24?*VIH!0g$P#0i$>!ToIG`Ze@H>l9-l^irG+tO5sWl$L*1zuJPm zkZ^PP1<@93N^Q5X5#AQ4c!ZUb@Mr~@SUh&JpuP)k|46*B06o-F(f+KFSrKgi`VKli z|4fOZT2U`rmAL+Pi#%Dg&EPwq4ga{157~PB4>FIyFP(Y(d(y6m}ttMxaW-W_5Ipb4<^YO+7`os;CgfzHM<*-&(9ZpCvE~x(Fjs| z&%CuWM6kmM-P@GlOkI>>4yRd#M(&ihfhp>a##;TAAX)s&DHE3$+aLf!Zb88N#kr(c zH=(f$$QQSN;+-lv_5eBzQ}YL+Fna^F2SdJLcNOg0Geuwg7X2U+P8uke%>9|@&cXj9 z`u&E2e|>$))~I}(r|RYl?u-6c;cqN=z3IjsL`a>OJheZ-8@!7oYwSBW+Vo6<037{m z;bpwvTX=DmaYH!Om&}BxfMw-{GOg}!g1(}j0#^@ca3B^Pi}*@D zRUh^1>Xvp%)<$&fLo5^?3kPN@Y_-qmQpDdQ33}7t^`(crCv#MN4(D*qh?R1)AI-F^ zs(8Qf-ZYTm5+zCU@EH)IB<$MtOnkPG!mQ2mz?)uJWNX8C>gA6M#ifdz*j6L~mYv~; z&48)qkTZGdjIoTdCWK?Ys~CFhIG{+3rs;zbdqJ?5F~ke@bE5HgM~UthwImzmHvwp!AQd-e4_qy^3u0J!cxeNOA@L#h~g zV9UMO8`pqkWyikoM4nyWJiURef>+#wtf#}ppd+(k+Y<*(veOyf(~=)OuM$L9&=Z`F z?w>C72ExSpfBayYh*jEhn`J1j*{!eR(Jj{Sf8sX^eza^j-Z@Rl+kX`HhQw^}=o=}u z{fY2&mh6ug&5=WfM=rbhEq$ow`IVOoc1db31+4MkWH9shufj+Q;XD(;ak=&_t)9+n zQ+p8*Sqz{z1D~Nzu+%OuI_AH|drsS3(Yr0E-A(V!qSpQiPpv40W>+{rDAOc*Y;jIr zFVna`8gk1MUu*_>5k8k~A#p9GrT7xcVB%(_+L!oj?c?m{){wUn=gBZ@h6ecsq&UPL zGIqAPP#G@d_lQ_)cp&d-gB3Rc1Jg4oQKNLY#KrFtC8A$c$M&4A4S&|nRCk7n`N`K< z?~R7sKXfwal?LMB9nj(ApQuj(VZwnvH3FJK0Cr^!va3BCe$6HPd7Vk;Rcy0$PUF)a z@o)FOEg47?BnKiro+o+>h$r&Pg!B^C>L@~LU*lnveX$%iWAe|}{S6E&R^K$l>p!`c zl$USO-h~5|$rV@#qaM}1_}W4hAcj`W^uPCVg-lhUcX6P)m&%K~d4{iUQ0ORMM}9(~ zp}NdcZGV{=#R_hh7%otH%v@tz;dn4H{9yeSbKJ)G@la0Hh`U_$B`pZ;)WgBhRyXD+ zj(Ya$J)HQzT2vX__z6nlNeLP}`F}Hr6n%<%hXY+^KcH(Eg~59_%}6&*b=gvZ#M%=p zpqA+0oe%Td@9^JSY!4JC(dwA}M7_VcfEe$$bMKLqOCmtgZx2(M2Koq%*!Qw6KXx2v z=2j$JT9PxB%NZX3LPD60ORE%uQ$31mj-|#LPiUP!i@Y^0mN}}iPVvD_>jL5_W z>C&8Sc0y+8;za#5)XQ@jv<=F#mpjx?!8P_-G|`fhI>GklH#wvRHg)#J`q?mfqNd|o z0^aE=G6l=Fx6FUG>y0#l1%;C-M~I_(H+a=gtFa z1W(~zpdjn7Ovm}1^CbR};{H?Ylk^&UM8g*XIMUt%GXKk)pmvYfE|r++nmx!PTSDA< zT~j${!o@~LOYyC3oxsn@MZXl4UR1E>)i%)p@hbHQ4&GZ=xl>MO&mAKO6=17u?y~ss0qOc+$%MYRCri zdjsOXlT~s*p(^mN0PmmI5;g$itn`}!0<7hJmh1d<1mFG!dVgB$vJip93TbsrejWof zy&(xk3cdmJ+j`ZAff2fCj=`!3aj1a}D;}zksFS}FO}B@fmuhjLcCet@6+%3pb~Egs zH{K~M$Tm*CFpc1s9PImCtVhBwK4Cu|P^M$Hia?vs7Za>~K}EKny|0MMQtJF-ZY~|c zVJm&#=q<4}{##~dx)tmNRO2pkL2J4vB%oG1YqL`FSwU&*W7+P#wpBYDGB3h#0*ffE ztg?r+I;w{mc*H*&m4cNzlJ+A!uxQ zLvs1-uNX3N_&SFZpgsOi{P_3hd;IP>9DjL8N3E3wm-uozxKjyo#X;koYU%b?*2+53 z-5xmv)!eDY!B|0fqcYsO_tlj+S(VHMm+cRT^%kDJ9E5EPFm znwAENdcUBcH`m^{t-j^QkahQXA~@aly0N?WVrSK*+QPgFB@i0bu|@J4+l}1Sntg*T zvxp*j<6;%oWI_KGvs|B^lP=M^5OW}ZnKC}E%9F2fEMX`6RiN3CdH6>|-#Zv(7u1dH ztuw(E`}tz3@{&zngz)Ui_nl3s5}FTiWny8Sv$REX1A{H1Uar`cn@k30Q&5|!_Us)z zyY_{qdg(~)h5We^>k609YcFaVFl&R0RIA9Yl@ttFJufC5ITt&74Mu8bm@K;ceo9eh z7ZdIxF=*UkiL4Cr`8Kjtair6+KmHmN4a$FWL%dA)iugzYHp8i?aEDbh-&ohq`hEw8 zm8fX)E2>78BTHm#QbR0-Uo0s}zyDlk7ku1~ zx^k|%IT0BQyXr`7;b3B&QMJH1COHl3MS^R2_n6JG zjV>Mn5=KKfP~jlpjk`ZpASIiOQ@BbnfhTf+^IW>L+ub&IC&35s z=U?*|C_|CE_mSiI^H(AzB7?CKBhH28b!1k7`#jqg%7$J8I>7{lPS2n&ctEB}!4Y*1 zh$WDx=~zQQfE>}5<3Iz94y#S~^{s|#uIS$5Tf=6-2(f*DJ8d^hK8>oJ_egUmXJl+E zshSDqW&5ZPy;%5-$n3K3Xrp}w1VjK(C@OL*;Qquk~cELS#7hFXGWcm*sZTzEAr zyH#WfpsxT%!y^e6PPW-S=e4*NzxTmd+RnuB?mhO!G!|zP45<-9j=K~#qW}h2+hQ4o z8W}cA@AqN}%6)ViRmTqMy8ZK4N-$5NRk>4FkP2n9C|)40n3%T_TceO%=QS!7lUc?( z{tYi$^+LMzVl@9DXZtr;Ff!OBWOiRyUfLH2ax#Xm-!i!BK$VlR+*cE0pAVi|rp+$o zbp6tV-~k|+R_D2#FrYODtK!?yxAxx0H6!siv2sI?q8CP+&)mPKi|hEGXj%z&R{v01 zK*}qU00AEzEFIbnl|SDbbd54tB8(A;^_!`?5Vbajw=$|;jG-P|&9H(=9BXd_VN;Vo3@1?B~RU30IFZ8^E0j zKBH_7D}g%7bb#tm2C0hon>L-$)U?w&7*9vg>8q%7>;n0W?AB?wInUOeU>GOLy4b7v z_Br_V;rbTSn_Cq9WjVKG(KwJd<3)v5U#W=alRdVL7Oee;OVsAG_MR1e*B^e$Odz>K z6iIMHsiUEI8Hf}#3L~9@xv{)FqS_eZf_C`XwmB_tlS!umn^Wee9#|Wc>IcMnc9KN1 zY((f0wp;dNd(ctn!dw3YHSU;B^=5*y)F_>1+aNAvUZN0U_M$uE*z;+*Rv(Gs;g&F3 z;b#Af(dz6>e0jkgmR@S)8Y( z%^EF@h57jvFW(kLK1>1ZioFS+y1PV5=pp8oyDBIBs8+nQGY5CpFoJ9K)0}5f=aBu- zy|mbGoix$?DdW;BNO>G+E*AtIn{0g;p*WfiWKid!pWdIY15l=l1I0+~K{5Wof6k;0 zQ&dr}XVmwn(7+6kK>i#sT|L`2@S%rLqDbwtQy*-Ym=5of2V_8YSPgKeEp^<%kMBR|pg8A>cdPHB#2&xtMIYjH)o zy^EvDm~5mYoOzza`gf=c&3S8?!>d_TYDr$2|h>=>H8 zW^#+I6;69^(*-YB|EkT~`FJR6$wpe<)#T?>56igjSL&_oykvo3YJSuUdWlZ9B!omH zYQ&j+HtlzF5q>opAJBpulu{1 z+y%m{l@-=T^Bl(sw#v@SQj5gMVny`%fWZQv04#TkNRd#z@`VdAurSDk zFZubKb9#{~jQ7wG2s59m03x4+{eA{*D2~K|CT8f%H6avUKPI$H1_;fRUA&^5>H;O0 z2rqT8y=V&ch#-Oi8nuzPUJQ!9k4dCyIs@zqyCFYPpXVR9JmrkPWtlp)7b#7pW8M># zdy$~&OI6C?Az+GHC4J@AqOabTZ>Ch;9IPH4PUB;5RcWKxY(5E1!1&i8Qn7|A)$LBR z>!ha(buyLmLSoZbRW;tQOrP7q8`b3D=c0a2H7N7>9Wwib)(m=j&%liz8)_K>u8CuG z4<5akOd26+L8AC!L32C7EHo+2HFWjmdju8(0!Pdm9o+2q29oEf*`x21mwjlwaD|oX ztFz;r5Q<+L{qZ;*fE#lj48)ax1VFP7@DlDyt^_aC0v=7F%nGzgK5M5u{9VTM zc3jHQGM9r5RT&I)rCB3x9v1Y#Tdqk3mHEzIMg<=;%iSIP zjX_0CAH)`VX!xV1+zz(|BGggJZ7N8pB zRBqUytM89WX4IOrei4 z?~rqO^%qgC3*=CcpV=VWu1B5UIz`ZuYy9)gT+@K?E!w7~LyGq@4Npcl-#Rs1zR8lF zG`e2`JTogM60LA9?6rA&kFt3+R)`E$WhPOc5pKLCB%mQ4Rq#=R?rF!=W2RllV>c{m z_W3ObC=0|k%+ZDBp0l6oTvy7L=4sN3=IcdeU;D)RmC%64)9*%%5aOr$8or{7ywLZ_ zF6SQD*%@g}43^Ulc44rsUlQN)W*M5{}@v`xSXV0GT^N6G~JXO@aCrv_JuLcbN_q}r^FKPHt z7V^wc!40{4H*##S2~x&YUg)QZN++=JvTc^SY%;Mv&d+?b8MwXjd8w5;fGo1sG^bsz z?G5KG39j4klD5Ko+^}SO=lakMZSM?Il$+)k!Mg@+O{RhQn31>LiF<=`2K8B(B=}tM ziEQudEkA+tTWb{w{)Jm%7e>ocyDo~#G}zC%YP2Vl^Gk|-YnM3tS_ta|mqxx8r7FQ(oB}VV8#X?7 za5cBQqcBq2yMZn7Q!QSH(e_DzX&l`OD^fB|dc58EKjuX#1x-c!g9^j<6@ck1fU;{e ztycuT*6|f}-ElWkImM=AkJvDH(b2EpUNX~UE^3{hHmu^d$hL*Ktlz$lRLqxZl;c9- z<2FbId|S)i`|*kbP*W{Y%A8Q4RyGLaccaE1RovPlVd!rjjW0!S=qU$!Nr+U=NW@An+eJAZX~~%8N0&JYwhRC%14ZEV5(le zSdID=9Oy9<^vsBVI$K9ZgFH>fs!S_o>h&5w>I@j#;3U`tgpM*R*AbZXA~}o+j0i+V z7NdAv(8-al+UI1Z@O6rE?^BmHV4{Y@Eb07=*sf|AjMNcJH7dVaSv7R%&)7Pluyxy+ z`iFQ`=-abq9v=9_ZRnWc*5Y2sJ=DRjb`zZTaR0?oBBA4$A=U`me4d|E17KQbqucOP z-_b?xZQET+EKMOv8riGkIPc;OQh)k7+%3~U6< zC;+jY4M^rk#j|HmZf#9Ws)zQTZQt7wF%_h|U*!22yHJ_w&(QlA`n?!U4_-V$b6Cjt zVL^6oD_u=%HCe^wUy5_4vwu8(G$`EQs<6zSQ^X`8WUq5)!1YV@ss3DKNIe%S&(C(@ z>au5K^EIyY1%H!$vuZY>9hR@F@xx-tg~`K5+-MqZthP`i_=f}d?B3BaP^1|UG9QVi zp9GI{nV>>a&1SgfgTpi{FUEEySH0Uw{LI&}4y6O+9KN)Vroyg#0>){_0$HZ} zYGC|o&Uk*x$RMWqPy#KA1{f0^HQ#$q@zawvJQ2r%o?3K?>@HclgyyH1VM(JsR&m!nZ#*=CFUA8D_?jWs7(VX4Qk0an~hw%0?{|_ywZ`L_tOO;JVuwGuaR?~!DT-R z-Nd}yG$p_N91!I-y#O#fD_bjJa7& zI0J{be(=ibj{t@I{*_M}{F`{}%^!O!I z7B&qjk!k{*5+ZrcMKC!E*4|vtyIcR!EpK9aePk2AhdGVGI@oA&BD?_c^x0cDANs*B z;Hf+;PjqgMlAZW&XevaD$-X_rMiRiWB~buGFUb_Ti&B0Hg42STfUoGI$cX~tyGK! zKR{Hyc*FNN74TSHQNVNn%ZmJ;!UAoRdV!WJ_CmJ$2)(KmESjjO^t|-ZE=MDVbMQhn z``fC=UolCm2x0z&q!>DbLDe7N?JIzS4;b32z3n}pGTy6FR$E&7w#9u~pXE-KH8&Td zHP2n!Qn5hai%g^6E$9Fm>#>7Al9^2Pyo2%zh2z29v0aV=@4_iF($s}X&OOi*rpmo3+G1MygCN2Be2}Q!g&&=ECiuW2SRL?Ql}4 z{hRqo@TJW*E?HP{=458s=X-eL)$C*uK_%5{9Y?uuOvtB~`?48!Tt~NJ&h7l9Fa^)~ z<97GeGf**_5mG~CrfKqH%}b=$AGMTFQ+CTR5jRR4^Q>mGk08TbLM){BW`WOK+-ffT z6g_LSYf~Z+oBO0%zortQEx5o&`5-~$9d~x8#Vn&t+kQ@Jq;f;$6|niJO;VpLgwCm=20CdXgZ-xj+?2`1T#A zrqEUSxr9UhoBbK-bQ_Nt+~iV1!Mi8O31Bjb7Z9@{ipGKV^}VIqc2GQV+eCZk^UEjA zLM*DOMQ?ICE|xMfGF(p)mRz3LmJ1LYeF+_Sh>3t(Pf?;O5-70*DOkRGWp`@7I+Cj2mrs?vv^ezCUawd+o|9Q4iPJV0Wq&S>9fH!p zt$Tcp_O3K@44Ukilp_Y#91OOf2J)DFlYA8|dP$p`s~*roA$713=s{Q0Eb2xafS&oD zSW>ZFAYRuB4ONsIgt2@1bhXfr$Ol~Tdo`#@k5cjD7JWh{1CMT*h)?oW0vdIwzD0%ay_%rZaGpqnww7wetARgC3+X$D27}*^dmYNLLMR!IfMdYgMEz{m8cuP$yk;w-=;H!?)+C(1twSm!8F?v|W#h6doJKYi*GprX37 zv!w}00MV|?HgEPMN~q1rCDreCWjLt-xl`p(U^s&lIZJl_9d+xItK3^D;lQDaEPj5w z%CiA5_Vv&l`~Sz>o5w@_{(IviMYg0YS%(m1OIfl`DufVak4Z=fA^T)R$ex5krYs>b zc3CHTNMzr~mVKBJW0U+-PalYq!zUSP(-{-#1{WyQ{V7$lsn%Dbvz1HXJdF|1~ zU2x#xu5dVHDb`^!x-A+G?_tMzlFyJ9h!;pv9}L`!pHvuN-jpiv*IDV$Joyk-hqXq&kDxnQ6&tY0un&?S7s}^lS7b7yvUMf?6rl#h3 zH+T#EY%-y>)D$DYhB_Qa%~&L$+A^?-S>hEk*(7y-TmEmx(|tuZJ_qusLo2Qs|Ckag zf@jtpB8?K?lYVBeW+@URlWhpT)Z&Paio&m4r?qcB%@C8Eh>b9b|L!$|DU%BaRd&i* z{H?i;2z*cTqoa=-PYmDH`NF3D`dhM!pq;t+>w|QukomL-~6m!Ry{$}hw)CY%UJbLUS0X88wOH3oI&vwRWaIMWUqCSc2QcO+R`3>PDLCpe6J}n)&_VFSo9`niP+BdpI z@1v{fv`UOKYQk;K87Ds5W6Vinzx{lw(ttR9&e^Q7ECZ_G-I<9GI^SK|%$UC3c?&T! zpmyP`hIC){S*Fg1KPJ-&*m$Ur<|)cyd;~VGNp$_ZrHxI8oz#=L5|7L$V{@@vYGtz! zsR->FN7o$~FjuoxO_wSjlU(pKOeUM=hF7g?FMIzGzFiYqx*>*@g-t2egO6&0AD$B8 zBfXtp%{l8C82rKVa@!-Er$FBs_9uCxFl$OAiAq3rV5;&vV?pV&DsuqqHY)E@cP5S7 zcD7RItlPs-XPH#-ud`pj>mIh))g$v$=wpdqfLduQD(z^v<%93Y`N36c zwv1_@rGLtaoI%aOLanvi{FPtWL65J^ol+j} z))nI}t7d8~eQ7RQz;7J1(9+)hv8>p@yFN=ENce7jt?>kVN9wuWMT6N@N8djF984v1 zo4>F;Gx|KbnTAc1|$g@{=)dIqEM!TXj#0)L^h2#lrF0n24JG{KTdUGhcp8 zelHF1F@BXGcCKY ze9vS-IEdcvl?1f7{*(gs%`A*LL_K6Giy6<5Jco|BxQ-7nA338MWaZ+fz%O>+yPIYr zQ~Ua*w~rlo;A~*X=m@A~qp+MnpE9ANUFabcNet5oWhY^YA+dAfz1`^N6gD4qnZpV6 zz4pGMy`Iu;LmwmMkE?Qatx4_rKj;+QSt>1x`I%uF?G-T+M}uj~lF-CK(pKKs13>ms ziRD-y3A(f8^Wf#`jjY?{k~()y#nnUUxDNDy>>lGs+n|R7^5^Yh=X=>r2c)Z51kWw8 z*Vr^19cUJB(HvN#Q**qtP|CGyjH?1muE;NR9nd=eQ-Z#6t_tI<_|yx^kVuXZ6W z+iI`O@)~{L_1ljXCAqUpLe9s=({BD@z% z;f_>*C)hE(k~}tZ>IHYFn9R^?N;#M|Nn*67d17WO;oCM1OdybM!yMe+O`r?ex2)@# z*8BxL3Jlqm+lQNf^KuQKK0|~evs5fTg zie}kKGK^q(VVI2@z+D^hq*QUyhJJ8q(r>{@qF)(_CPPnGe?yoO@<(Zn58bxuY0MY# zO3SknIOUJ<6_cv0#Q7R9@e!+=Dqqw62CIDZ8ItoBh4Oetibzj83LL_jyh7yILLo>Q zWa2^NFbs`gN}ES+P*BGo2e8(4wvMNhnJ;YRPq`=j)L}515`Eea>8v_urXnw+@mg^c zYC`M43cAA{$c--mfc$as6r&HRt;Qn`Um2vx!lD_XrQY-<$cU%trDWm^sW+IcA2MXU znLoGZHhp1WRb6>*Wub>LZ1|h&3T$=WFOU)H8;Uat>BUs-MT4Xr8-d}{6kKJv*Q7kq z7IeE{jGHeur6%=}?)3diKaqMp&X6h)A>j?6p{?_yI4;Jx-C(5J6%z8)aGrF2&r52; zEcR~A|HwN+MJwzn+?GAh7i;ki|rwsZvME$wIIju%Q zsnhB~61D3XD2kx|fKsy3Kv9Hg5)?&>k#}3)p%*~09JF;MatOC*EveXptL`SCe#LPR zl4njbUcE(h#W%$-AOVUN8;;P1)DGhr^qYIvd#9Kk1 zLhVNo^5m-z{75lWo$!(ia)$z1H8Oh>ynC8X8)wVqopM3DNgCWAGkoxdeDx#33vZNx zzu*b9&&2{izjD$3g`G-sK#st~S3l$wSO58}wX!X{g(oB^=zV z@5`%1e7NM6582_c$#LI<_;dBo&_=Rk8mKQmVVtT1Lwemxss?Sj$rI-4ZB+{-^Vtdx ztmpv72ss;%Iiwy7JsHw}FpcSZY}0zcuR8nO!Rh6;<1U-<_2xC!hiW5b`5PiC8V4t^ zYUMC(n7bjrA%1JakN|BxP+)Si{}7<>re-O6%JN-JyhA)>#|*+u3&za8fo}(9fgS2| z69l~e=#@0$)+uTtO^W>>O%C<~z3fI~0-j6a&1~YHB0E(gDDzC^nNHL0>$Gb4CiFde zE^Tg)@+$Eda?G^{Jn|Em+I(`sd&kMxk>4Uepq-qsg3Fqwe%6;3UzQlrfKD?{;Xa z{vmWK_i9+!rX?;jjQy}!ahR$vr?%}F?>?Dqfo!%?z|yVn(dekCO0sPT**FQexjQ-C zdoNjnwVF_POLnkhyV?8FVP6tM9}!sHusF&+|3-Yo*7GVBfA502GcOP8KjF%bl?e^n zlt8)elZj_Y?sKqMOcgW~!MIRd@gO~7V|%()G=&H5P@Vo$L?(0I>a>|?*zKgDQKEd_ zDxZ0TJwrG<8`&KUFOm)J=CaLo*$)u*7mi|uOXTx#o!6B`sx_` z%qQB}D$-M-SSrewn(N{IA%?vy&~SMlCAqI&JlR+TY~olHBe9BPoS;h)s}AVA;R7E? z#O;EjyTvSWpeQ2`4NKLodJwp&RzTK7r&j6tU0s(oUX7 zSFP@=$UNs{keFf}-3}x~wI6ii_{moS65G?oD-512Adhnn-i2)1Uo39?m^2<|e{scU z6+X7yf;^&y$Ic~2XhJ7{LyGX5iB2C9ums$R_f+kmOYIM?Xxu3D=+gon={`kG1ob1u zYykxKA>U^-35hMlcy)v9m2IrG-nqc+OS9+pPMq@(z_lg3`8vg*#fx6zCwfs+HRWjI znu^2fdVyZPLW!I&ZP%O5G)>DoHv3)g5$IJDieVQtf}q!H%m$@>i!W^H4$a%dO09@j ze~zGSB;CjpG%hpMDp78=qr*qFLZfn;g^mp1cH;SOl%+}q+xSkrJFIeF!)mhVqGQ=& z`bysO$MZw}jz&DNM9lc;oYGT*1Mw9e+dSvp4I_81@)vad{krRZ9Z1dQyf>R%tbB&9G#rbbsuk?8D`h7OH|9p2gsXu)K z{rnN?zPGGd` zFxlO-$N%)@QC^Ce)}VqY!=*1gM7DwyJiC7n-xr}c zDJqjyb&oZzCgM>=`_lPCjB%xNboWjjZ)p@T(+paZYZW%a zd~Ue9aP~MgQL8e=py;??<6zu#hd%VT;Ip03UfpQQEy5&mk~A)mtm`!|chW6+pW9q% zoO{H4U+nZ*@q3ZtKVPFv+p-#KHV9;1N)S+9tuuxB)_u;o26Ar-0+KB+9M$tl70Kku zx|%uXZRC^5OiRPuhmMQRS!~qKJj35IDbJVrY9b_xmWe8YkN-_>GlW~FPv=6I_xdPzghO3iCRgp zh-gq-@H~bk-D*!q@RcMLej)DprAZ2%b?^#1y7A=bqQG%2F3QJ{X-t_>Jz1JG82jI6e50Tr^BI`_(uVQI0382>$$XV23wA9NP79vUs*;1*>?D+qM`I+l358kl@O`SBR0oOl^o=BCSpJrYIaF4Z-^U;MzYg(W1$#P4j z9MKca{;FcqeAr@^F#+QwH!(s=D9A6Mxj*O}J|hfIP<`zm zQf1Q$ufXDb$Tvt~Qe6th9BzZgXJD08$>%291S7Y_P)9#9iV0Js$jr3%A3gh#Eu;Q> z4X^r^V=w|V~oH!^N%&1MrhTCx6P=U zG5mOWL3$^1KfR))Xy(hKskVIblmN$JrOn}(1Z zOnJza^v2o5vHAs8Oa;0XJ<&>hFof2Po|wG(#JuIZu`I!{YRKcA70y_XE=JKJ;_3IP zxMeO~8Z+3X62S$)%AC{(gYck207RXR+;&8=0%D5x2WZcYnNmD+23R27AnBMMS;VXnr!AbJf*7&IABs2?AB za;q(9t!wFeiOy2AW9#?DCsT-=50}yFO14r4S#V7^wLmcmwF$BNeM51>L)YT3=)5-z zU}k^_wUh;HEt*S>WBmg2o3Rl>CrBdr$!jEs*#h-=DeW2#sq;dP+b^xHayzZWy{!_P z7$FvKLxzwQ#^GQEk^Vw-gj@V-pEIXiRMUk6AIg@6uI0vmZH>AUm-t9(^_*0+VWE#q zuXHsmN$xqk9EEcsi;z|ky6-L53MSkt#z={+%}WsreMg84W9$U6aH#bliio4u`o_a( z9M>E4KJMosJ_IOGYvM5pHulS$Bz2vG~Q^GNiosOp<*$ zX47&V9in_cq{rq1QW{>dOgTA8jHKovtP!^eGK%bM5Bx8FI!LG}ma3wTNl2G=ikwg@ zoxNzUqiY{Sd)vg1P{-%DBpzOcvNv$tOXX9F^{6KH=fC^DlQs7C!DtX6Xo0Bt{e_<1|@@5NvaX4)CgK{7z|)QihXEx2{O-{fTzY&@q)} z9K4^6S5d*0tny2w*GCGPj=}4r7y|0QV9GZppz#WBIF_555<|<}bkLYyY*$}g@3w^Td$Ft0XVoM-f*NxI@)emDAr z&%&cS{MxtOJ`WMYycz4t&L8Yra>}My`D?i`L)jIXQ+eMMQa&LC zk^D#pIp9{=Xi_9pbCwX^uCc^L)$jc|mxK6(w+j7ttjeZYin7a>9auw+2Ak0`&Tg}w57p0f_xFk+kDiH{sg^Yp?e2BY^)ZATj^(OcIu*;BT370SE1*8`Bt)^x>LCv@^d2Td%;0WQcX=QQV)M3l^v3rZJ zzT8xi_#j1=$?Gb7|NJr9>GSfjdm?+f>PUUsZHy=MDV(ptgnWmhPhgB}V3}vTb)#{} zeP&{N&?xJHxQUZTZq7$8``YVbLhnX*9drzhgevl69Z$v&C!#P;f)cm;k@t}i2eLIT ziV@Vr2!#S!caP(p3Ku#(o+kaMH8(x4waC{`&DA`%iV~tR2cYNf=T{k(-JKt}SKkmj zh6(hPMs<2O(kI6r5?$$eil)MB=V zaDm?nNte=Wse0jFUu1^~+^mZ*Jy)kir zZ^Uea>MR_IAaZw{2e(pd&@NY;Ts;!T_i==Upqsc536+&MdYHY9#%LcNV0N<*5!m+|4s~dIm`f}VOmhZQ7 zO<=uMQ0ccd^zedlg_M+sHU-6QT&FB)*Jvss?eI#^jk1O3RZoPv0RdfUS^cja^DQbNrZU-@jmWR6ZPbK|9C~IG!UXsSRj_MSRv?c{-avDeVVp{sf9EfkI zg_NsEf-^C*X}FTAG%;iID$!)hmt~2gLX5lr#F07=$jQnlOlW3mQWJ#YipS!t@XJ;s z>PO~f)bvXU(kG1lee#V3&bP#2FV+j+eCKcm^J(HvI;!M*?_+4WZcNCJI=`Gx2zpXf z?yAzjjNMFtr)lKO45yrQdBZU-4jqGwLZQqwYfT8P^*7`cfCwMR(UbC9t@obElzfuS zi!vKh=vt_XS>v2$*8l!dsUDHfC|dAcicI$|Ii@@E^zCGmH5DzrX;jy6bs0G@q-Ad} z>sTpYM@F9Z#n=6lRkWnD>iaL0m`moqb0&JF|H29&T&U$p*EyaD-#1mD1~>j~fzLd{ zS;y+aZtY{R(`?$F`Kv_tx`C0S?4P_oLi?H5 z5@b9U3N?GaQ5--Zia<82R3%1aiBys%3Ez)ez^gAGoQaW_EDz)i*JyP_XVSGuLS^+N zE}biEIUB8%yg?EC3+#<<7 zPBlWqAjvs)qk8=}gvHJdxyFp4r07yqE=Ps3$2naU-9K18pw|lB~KPMH6@f zf-7x1IOPNgtHeR(2W|=SG_)Lz32gb7g+r;_-b?P!989F3@4%Zdp75ubN@OB(?hZxW z{l;2L$ASsWt$I6q+r3+gGMU)>hT+H)mp^Uds(&3OK!2AT=bgwHm z=*CaYAx%;fL*xS-W}!Hixx|>P3o&a?M0Lo{6DPV3C!;iHu3NaKLMY$Z?NZ1m2`6dH zX21Yh+lp(7k|geo4qmHMu09)>-!aeoe8Q6blB3XteBoKgTgbVdKPs~F&K1P2EIgn~ z0mSS%hoN$^!GA;O?txPbikgb=sQ>&n{tAdekP2d#b>i)Tjk&?fUdA@R)YP>Tr-TRR zVkCziAB~kXqc1&F3?)fhY_tPPR}?35mJ>f?N5emKt6Uz<8Kws>riRv!7)#3A47Po_@ z%OKh9l7X3pa-h*Wg5j@2ep-wt(2n%|%L^5~d# zDSE@y?pwhxUyXAyk|o|@|77^~f6OTV-?F_9)TA?wsgGbxOeHP*5@0Q=#Q6NxU#mp( zF#eHOH-siMqBccYPYw!0udQ)KL@vHq`7}y81VgHfR~a>6ZEL@<1nc;RdEjSV?aut5*dHlwoO$ZSR?A&0Rol9`n4Gk|TcED706npS<*9-nA~h}d zRS%2W^qw$$%4KpvNF)LDb5^rr|@1Vh(RuSN$H@)!j>AAQYk@+7?{u+Iy{6X*CuQmJdzpu=zdwgSt8viLx0X^q98lw+rKH?-GIcl1L(8LaYf{D zJ6$KY<3AVgUI;!S*jM^d^BAEf3gQ2utB7=e8E5`O-~RrD#W9H3aj&tM5(~E-`r_=E zpA9w(a>qze1J%B0+0=o_ppH}9-*Eks^kM^gHI9t8w7qC-gwaS!u!B*zDVu778s3}!#(afyUV|C&f@0i;*|6qgg<(U%QNhx`dMSaHRId>V4ZOm zdK|Amw0zdSq@JWJUF=)>AjqAoHDGjJE<#hN)kQhRgVvo^vdT0D_YpqwnLh52R472R zC6hpvQ!D&8Bm_=5w_xOMD_py|&sjBZjl?nHD{eJ~8yZvztz|zQWu#}Ss%rXaXgO{~@ zOvp_hXkfIUerV!awTQnjcfUdYC7*$%on^95`WiBDMWf7SYj?v#N9(B4@Z zvLpX@T)ZdUH46>!V0Y>PX!$j}Ky0hyHeH*PYEEn6J_E-MkWgV;{;v+g*@D4?* zQTb4x`(WVum5*U?)s%w!nj$eXtfHg}|3(y5m-dXQIi0fI(lCRHaOe|)C-_^MHFXOHM(S-do zv-Wqd{dJy_|9++o#v=aT~m6hH3Pw|~qx{zIRfnUQ&jgKtV8KkjPGgqIXCTQo*tS#iy_{?eH`gZTs zqszPK7|jzTQCmXTx~5EQCy%Onnnm$L@vj$cJCB{;N)!Ap-xsv@Zd7y>f7)hjrHO3? z!%;%&k%rk3|+C z;i6OV?H@@K)} zOjtJc^NM?#M7_n{V+J^C{C2RlR*}Z*^v@-s=e+kDFv+ zP3g<{esPqmIHx;&U|5Ab%JQVYVibgbBtk*l6dc1T|9_u@^(a-PjL&_P&9u{Xw2>)Pg z>Xt_ZRJn_9LF7DU3a4@jKH-_~+vlo69&CeQ5bY|&D)}_BOBM4QQksi8IH$Vq;}KiS zy_Z->kTF%WVUNr@DZYz2@j0Se=w8jG!nkf95-}m6uSbd3DiVJ}uk3iqfjXaDH5y7f4 zRmDcY#BP=_>4=sviuqp>8vcra_CHjYBVQv$wH{c%swyqx&|$yz{CyA9Peo#;(Px&i zbsckT#fQ||7TPrr46}_~`o1SFZ2SXIM5!7akW1-R=7gLEFXR z?9LLFLH7$KK1~cRxF36ntB$` z;9NO=`Pu8h;zKx_=mpO^nLj%i$--H?E0{`bw4#aAdxZKA@$*xpto81cq1k<{l=H~( z`<@wN7ViR zr4ep*^>4Ehg7`%N|B~kuW`RMXO2sEv`dARMB;Q#>2DZ~~QoLf4hX-fQDI{Ol6-!%m z+-ZNbJc~$&wPiqsC@M|iH!2SE_Xt~do|jl=sS$N6ToiidXf7VLF*74kNt+<0pk0-% zhi89@k=2_|ht`PPKJHUYtdcC+VZIuKu{Z zv#SwtFJI}^@q?*-GlhTa@5j^iHJaLA7) zUV>a~1LLvyxnSkSS~pVO<0}lJamuFGH?<$vN|>r{7*>koEn4LwM`oD%b4PZbs82OU z)TUd7vTk$oIwTLJ@5o5ZEr&j?|LSW9)h-X4KUsG++wS&37+ZigYGc`JKKPO^G@5Uh zQuj3a%B0oj1Qs(09p9m&YF?`&pA7=l%>aL))}TUgqmHdH7$C?p4g6-=`StMZ^26uX z)dkM?amm;b9cuTkt1K)2GVVn4uhF?{a*@>2MiJM{dDWZLWBq3r6K7 zNDU%5=IXajtUWxel$3f~&@nmt-JU~x0OJ=Zv#a$m@jW52y{F!EWV)%P1U57&wD5Vl zA6c%kCj6=PwG!H)TU}+g2pWa0RE}u4=y-Ks^`Wsk=GTb-Nl(uYI*g@UdsKwyJak`8 z<0P$_fW)~TOj-;}KQ>faHXq8fbT2;b;^cR!MOX|lg?}tknz|z7oB$X&5zSj5nWiWRv?P2q6SZ>*^_{xn%UTe#Q{@{qN<=QT^ z6G<-*i?AT<{)TiIbLeQte~O&E_O!$7ieq4?kfzBXHQU%VpFpO8CRr=yV2!HAt|YQ= zUixGE#PF_Wmxgi3gLiwH%lkj>L_Yny;OU>{jQ`~Ce?7<=p^gDW$Gd-WZs)&clOWUl zudv?#WIcbsZpLs*z)-{O$1HM319V2LJSc{0mW?g!FOQa`O$B@KQs~fGnp}wEc)uEu z(eVbjwXtY)KR9Wl#gd!B!t;whxtUSi2OjduT)32xQFQDeW}g8&i)~{wnCFSz&3JNu zy;aG7&rPPG`X?Oc>c6d{$M1t3sdKrFNKh_gbMsMa5n_vKM)5W1&Y=%2F$xJ`T|M{% zta7Eoj0GH-^9ag%AW-)NbTe?SwRFb={mX1ak3NVwPGKG&EOTKgN+gE`xuXb+A>T&t zZCkr;cZJL8aX+s%L_8g@QCZ+x!|?358@IvPcBgrCLZ@bIZJx!5f03RXT0ptxn-ADt zOHD_qAv6YH5A+&NW_Qgs-ZF?aaJv-$QNPwV*0Y;G>oXd?b9-ohK-yu%N^I!FjL2gG zwIfq<#$v46EsbQKhh2fQSF%BGKX3MTabUfrxy!WSQ2HhxUQr<#X37fL_|2NH3t^n z^-*?mscD;vHhst3D%OQqA6N^vxe!gFyDjUozI0t2O5iyW#eF zLaENJc#b~q1cMs$F7%n^q2xJCm^(tn=Yp|s<8p11=DNX|i(!GnbQUU_271wfLYZ#O zno0pY^Kx;yKWx4nlnu^Tq{M%7yLiRM%NCu|F&hefwc8u~lPk{2**m?~v_&d5m_I;v z@mGF=n3dD&()Ew@N@|XA!*!B{fA9$OSp=8gkQg|qd!eNgsocZA@;72&^V4u5Y~m#- zu0C8q7kGY}8+w>u`nfA+UMU*?$nSGARVoFq9ICE?U)N95!FL-7Ru1xx>C^0IzR-VX zJS8Y)rnoVs?d}m^oS=D_Gzs`$eHZ$U1tbVwurf0hD0*}6=4WBST=tf7xh8gmXJmKa zebUn=h3N6YW^8R7Q_bQH?_QBvyH!vQX0?mR|pu-ycOU5+; z8kB=g&aeuj<#S&-1hixhXECSK3v;isWts{DD2YY zOrz)*47<7te%%OVfdA&*d8Dn$CmQuloDdiObfP}V%(UKDrC)N@VG&8X1rRr4Z- z@H5kg&BnwuxnA#;aD?5-Qv#A7BSmtzhVJBFovY`kN?a~09r2(|26{5*?nxxdrxp2x z!$qyiNIwbFXq&wmKD^(dV`V+($|j!deZ5Fq14lDO#960Abf7M(>iV3#hU0gCRC3<0 zbj53?*7FcW13%i@sQKh#>v?srJNoxiej+dEdP9DeUg|XNsCYJX>xsVWva8m5%svk> zx8L)ki+?w~NW zA=8YwSCPBsm->z(bnvJ+1xdx}i#J2Xo~ySdwWu4Z7SVrgN4zCC*i?})1bC|*3qD%e zYxP!YU0vLL^;C0j{<@Bj{z7jQ*Mn*{s_-*3+~$Vi(%Yr73)+WkcseZy}!ehCpgCm{K6}yx=p&T9T zTW6BXg*S~CRnzxHzv$|dispR?SLYLooCQCfC||X{mMm%6B2V|0X|*%Nayy_S!~{N)xDQUjyLb1ok(_Gie@h=KA4 zTnTijsv7?EcIu-}^o5tkcOi;j5RV8^u@uI+2f%)+1;}zUwb(JeXd27NVdaPu%AYCU zL67Imc|;dZtPz!J47{Ar^>Vnye^C{Pi*FdMx3!+c+H^SwpD;Y!ZF+sm!`yuBsD?{$ z;WWFARi<3HrWnbhBZ#Z1%)a1qvDn?-ld47S5;~?#hwtv7(?XQoMv$B#A_2~Gd(SNG zy6pXzrj2>}_>=|t-!s=r`;0WgEU2kX0$)OmGaYoXM}AW<={Ip(w3 zgyxFM>4s|TY+6T;K>*Gj5t4M3`IA?6QG~WhC}g~!`Cv0SYnY<|XJ3i-`DJW|S8hA1 zsbm>&Z@F#8qh7IGkGtS1+w+zia?^dMK#)2Tgl)m{fVL1b|CjWVrZm;uWX6{Vm&t;3 zUrptc_wUtDq9bQ`>In_dbnuk{ly#8Iafr zQQD|{!cp@|CuuVXRrTdCHwI| zT~b14--&`2`yW=@|0~k^%aZy(fo5{`kg%5Z_`Pt&>7XwnwS$k>iwdrVmh*~4zx9V` ziHasAT^7_um=fSUf2eo5Lq_g8kd>QXg9?PxC;%FQdF9rhNH^F41L-2@vZf&^Z!bfK z(41cI9t(=51+fAg)!-Qp=D_^H5YG_u|LL_U6oA9{$2-E`!SF@^f{I=6$C(gptnn6Y zMTUpiV8#P{2WJL8E!Vv27Bk~dH+b{(j!=fy-aGbj)k`0(KW#bX&DdQH7dvinZgbH? zTg)-Y3;rz;kYSW!s_rMx%iPkh9&8Y>FK_%@6IY$_*v8TH0xPP<_sdc!EWfFNS?X6k z=z@}OAq=P=X+i;M?XZ(~2i`Rd_+H<@jq25;&RpczZP+@|D=(8Zn;^Awixcu4+a2Tg z1KtP2fl%;>4mB4{JNOekTj9)>Bx_rVtBp!TbYT8e*Q1+|5PzFC(&?-VT-BthT(`EnfBK zlKhpQ7Qr&}VPgU>AVjn@-~i~RjUac}@Xjcv>L>|%!PHp@Xq#^lW5D?o zPGDj2*`d7Jum?F{eLm?7nw7Y0%n_tJjC2-`Vti3s_(GUAI-#d6I zHve*9oqQ|1aI0&9S(1J)KjsqpXAuo5MYaN`0xCCy0Y`w^UtELDZDXoXRC)wJ(}wk6 zXh!eY*DU^jTzgAYBOn`-g&B9IF~^dxgEK71v)ITG=`wt!wN7J*Oi6lo%aT-Rgx)^y z=Csz=wcAvw1MV;H7-*Hw8dCe~)I#KJ0j4ss83hJqNzQXpP!@yS2g*wm!&?gEH?pk3 zw`qg{0#}0Z&FrREV16B;<>U(ZpH57)av!y3ss_8wt2A@i(Cf$nw1oDLFdIrAj>Iz`Y}lYeA{dS^XE2 z;B#RGe|cG~ahm2XKI%VO3p%R4m+t-&CKXLWRoGsr1qYgy}DkS=ac(-}a zW+WrxGJ)?)`iI8c=HSQG1rDJGl3hnXeQ$n3ckjsx{peZ#Qvmlox};0tj3qXaipXbH z?N}R0_MSOYni!2me+TpG)LA-@vRB)b!nuhMpUfqUViUl2*uq>rxe=G~*oa*jSPPR& zk!!Wztx9Y6fv=~t%yiBfuF{nnF&%a;d&j=&Kahz2Rj9=3oKDfCHUPMFHdF|6AD!S$ zQ6UAp00xOyr8Tu|g;vJu4p8tAMt;4pj-g^q9y9JjwI9Kl)C~(r*txVwb^A}x!Q7(d01hpC znAucBtbn&#id)1x;BfMmT~ulCkH?2^->zqDkJ{ zreNA&QO8#+*1Xq{fcSbSR zEENS?x_X0lz0%;(K5Ug-8-@eHPhy+LM%!^1SSh+6`#NPKv*9jN`{?ri4*z^O)2=5(4$FPalZ39K`O)0Nozd^)I{M6YPG>zH00{SfSiV3<9a0Xp8uh_jn&7BQ+;PjdXf)o~N}@XkbgH?&bCDRJY3L*tSR5 zvpb3efSj&^&Avi}bRK|)dp;RUJr)W(37Y;x$ZBOO%K?(cDw;zR)FnG0`!V@IRxPTu zs`4PA%7%W%Z4-d{%b}KGR^Wr6Eo3hU#jn98`DqbU7{DBGjP9283V^Es1nu8lB|%Y5 zBz8c5$+;}k=D|%Pkk-q>zBvW&*yrTt0f3f=!bA9;gJ19TBTTGYTbb3yoIU)Jg{_tK zlDfqjjKbkbGdkH0CCVz(67=_zY0Net7E4;d9N-vfFi19CAq?PS7Ef!Van-%1t>lpU?qs0jo=-9m!YL_ z{0$7?l80|h;z!$s1W1L2Q)4r0L7)81*N;>f@rlQ}h@r2hQy-8(aqkE>^>v6$z=9qr zaJJEPd%=3-!?w)nm%R(-h?8M$vyY@mxZWoyFl$AG5niJJ3Uosnw!`p_eWL^uhnn1? z87<5!QzHPvPm7ECCh|K3y`W4ySSEfZ#epdFbrCprZN=|`=-2XZIBiUhG*Y0C}GvTdXfP4%^gkVk&enC!{$HGiKq^E!qs`}(BixC~PB zSrO^{Qw!m6fJ6nMRw$MO>5L5B6FNeA>&%r}e5S(JudMa_n;RFNG5gE$URs6wtpw8A zfqtxmW6mMwiTgY67=LJU(owF^hLPJ|;O!fCn}C=<6jSJc@sXnq@~Bp!TVh#)R~4?s z7+^H#?lPCZC4rJm7B30z%CZ3ROQj z3no$ci4}NcJDiD>Z{7b%v~*zC&88^LW@YS?$t7@ZMjw41(nLNi>0{Q>n=ld6tXG4S z0{R7b92oM;-&n`9cUnM?HHoh6`2`JEH7H-uX3P1Ot@gB5)4t=83i>Rtz$GD_u$}X+ zP?FrsKhC~5igr=TnvoV-r%Q`g+|Ou-AZ?)d-M%s4+rG6HAZMG53F!V}dfP#Y;rp?G zSUn%{)O>fY>H6~%)@NT`IKh5J3q2bcG`eSk*QH(G3WIIH2a!8(CFy&cV6;)#5dcIE zE%MS}%bv9O)n(5c++4pf6b&Du3OF1t988oqBU13X6z)Bm4m1+F zy9&(0%A?U){GSy8W`&i>A2m4`9wcP#!=e4{Fi}XdEw|-neEM!8I=) zAuU_7$vxui=Cr*yebW20Jv{y$GpzwOn+D&$pLOgHUoz(4Qa|Hd)ZaEUxHl4P^X6xd zk*&Z%J@h=_0ih@5lrDUv9X#q};N}1Q%TkCSYDUP}klys4QBfi1EL)v|3vDure8ZgH zTA%lOslTIw-eka}V@WcWKred$xhGv0c)ErVgc{g6e_-0lUq~P)PB{f4wBPDpfMk0bcA$L~Ct7sfiy#vvcawJhCB}1ohkfy`b2afz6$IKSI0nj!DLLzi~q!YGBy2-c=g%74};DDt# zM@co~WY8~@hU1a=mKlg}aX z4bJK0_&hli?e#=_Hf#FbScSB`e0_;uI;ECmU%X1=B_+-!a=3nXC?8Y|ma305`TXhT zw~|G=p;^AUiireC`h!s@-V8Ig4&6S#2KG}j#SL#vW99`%+JM##9f9a6L{X|w{dL`4 zb7o3UL5CZAN;tl_7a^6I&KIg-Gh5T_DTD|i41oAe8;BON8pye$W_6*O`p$% zFX)PFAWl=-SkY;DJP{T81x0$TJ4QXXMLA(WGnUc;PE@uS0N1sVmW#)R=$5by6qSzw zSLZAu)MbjZJCS)VO4T)z#h<0igv}udpHDZ+rk|baXS}!|#|@~t@YVSXP!9e6)pYI3 zG=I}`X-+ZnslvxTG=aeIkCC_pCEQ#S#4JH2NTWlrDwj?`F?y2LPTm5?X_BIanqJXS zM-gJAirK#SCWcZ{^elh0Y^AL(GD*7Zb>@b+(d}&&u5AknXYedI-)BN}R-@)=)2Z?H z;3C~m6Eg}vH$+T0?ZX9oJ9$@uF9v5EEPhw#O*{w$_OuxOF5n&e;^+CI?k=5;A~ z-?D^CnK%h*$o$SP=7$OJVdZdz^R>=pSB(^!I`t%&s4ZhW)cq)AfjVwO0)?q9Z=m!zP&H(;dOZ< z?-$rl+3*XVo0uOio_~2ytMz&(%NKMkY_=M;Z5aY3{DxfE{f@++Y2uMrRd=HyF-{afaV5D$%IA#hinU{3Jg<~M}LWcf0_eqO*kGyjV%J#ria30 zLpUi?fb0q>u!=p_z~qr;{l94Y_IRlJuI~}0l878~nu?Ny3W+c}IV4R-<2;pejJc#D zF&l*(CxlW=QK`h_oZ~njI>~Vug)xMX8Hcf%nce$y-S_=G*L_|0`##V6-tY5QpSGFb z?ETy8x7K&9^<8Uqzx-I-X1O_X&B1pgB*7@I-#_CeCO}Q8py!7Y&|A3$Kp$t~dyb+Y z6>yCH2$$Oa1d|}UuN|e}4U+$PHOwE88D+vb^>BKp0Pp^v^sW<*o%lQU6Y}56975My zZEf!`ElS@1JXyXl>GrYTs5?dz@x3<2MT>dh5uLyzVnC(+7H6Qy0b;X{!7X^P3J?qO z9k3f~E`Z}9LA?`0TOj=D;_J6q}EpW2@wZ49deUkGqnYRV9WC;ts_tdV=vK?>;Jl!E6d0UN%ZGO{<=s0ooH za3aAr{>R5K4B%6`k(bnnJHf%Nr2RsHp_3WG7!&A{7F~o%z6*i6BDwA5KzJlP)S8nnQz`u?xD$*!Vn84H9#!b*o6yC-X1gqQx z&C|vB8`a0D@YVs|W7R0kqyvGkkTk3MfBZX8O8YG2j>Up$L}2ogVW`+iv)?=$T==O} z*Ye0`@5l^l*t2A7O)#e*&&6dpA+XBA$m*8YMghhE>m#|KgfEF*3wAIAJX0*~1GN~c z#e`8ss4jR#c$PuEQ%G3npJ?7u!4>2~~~*PwGOH}-Z+=uYw!IP@!p-tyxO zFK76hPS^TQOuex&8cRJ8cGq+R$E)Ut=g@~%c!x2W>uUmrLnM*zjI!znfEzYslUhg< zF|bRVopz%obKg}z5o;vD?o_%u0;V(Yq2WCMDm1AtL41<2cp&Jh%-!#=3No68${BD< zy=e0meIIy#3*inJ!fD}fH5a-Cdcy~c+^s}1#7 zRh}cA_lKJIhWXXjSUoU2mjY=Tj<`%%BGgv`h+ivc;fu8CY=)m^~!0*rS@sw0uJ z#rNe`3*Q~Fhc5Uvf4mU``beBBYF!ldK+n(XA>p1&p+NNa1JTd!hkBYoncR~m6ZD%- zkZ1D${)%fDDBKMia@*U$w;0)deWmLPzB;(yXtz`^v37N9QCUiKG`n3T!_$NP7xuzp zb#mBZBLg1NA4(qG4hnrEY2aY3y<8%1;RD1{Kx^w_YJ)M3|GC`8|5(mFKfkaeXUp5SVH#68Z5P93 z@9+G5Xvlt)K=4YU7=Xb)=kQD0OCVT|%mA^H>7zg=c>4W9g}g#G3N%4A1ZemI4D_v2 zDjA6oC1R2H0Vf{jB5hPL9WRe|`3aFvH@elMJ?}^y-ML2}XWD!|Nc2|gR2$c=7wIt| zg;Ayq^|d0X48`yt+ihKJD+UTT>dTJDCe1|DP|!^*%1}zUv9hHbt%WgVs?NOfX5)M1 z*7s`$U4k*dF2OGN-aaHUD@>n>99)*%$}@FD8P@3S!Uu(7YsQmPjUTN#Zk>GcnfP{Q zVv+dDO~#>A;Jz{z0<`e3_5bFXxE)7{7m+5gYo|V8IJt1*bry$KGdpJGmDLi&c?iaI z1m>^$Dv<7r&r(iB+072f@V3E@>{UO31FYzeHD#~-RCd$H>Xl91wXq2J+%xOVw{TB! zqheE!fSwQpdLqy{)Bgm}6P}~EJkJ^(oQOEdbBca&I2FYeISj949j=qhDK2LW2>AOQ z@4V6%RN0mLkg%>z?|A0xRcC&R2J}*z@WE7;wWAi(uc1{mQ2ll_&cG86pR|iEE;rk7 z+2SisP&tYp1uXF+4ijEvm)Bv6u=Q5LbyY@e>4z`q9^v~nG;|N#Js-J3>;2t0DKphz z^B00Vy!9tKcm`TSoON~U2X4A1O3ooTk!IOp2A`0%y&E)8#}ks7-4 zandnzSCdNmvs8Wu4H}G=`yBoBhd~QW^4#1p@xr-9d*`zdwu4~VTk8dZKb zPqdI7vo;Eh5hdUzwsWgGI7vu9^8N(D11UZk6#EVz$ zK|1<3vBVlk5+3QhSzSCouj|h|{jLu5e008>FB9tG-}7qtk`F#*e$I6=hy7Ws_XE>9 zmhHkk9&|zHewx+etE=f){k1{N>P9!grDY%P&#ICHyI}t&f)MKz2*5N>%%Bmp9{})e z%UwvTI{|kAvn+3XNqZ_!6w7)dM*GWm#{Rl472vo(ek~W&2(WBp(Tz&G+YEP**XQQ+ z*>q~WIr4(NnlvS`(_2}AssBW*OOm@6`P>7bqB{f6+h_+0D$5WTXz*3ldKNhZl;yR| zVOP0v43yG!%57KXlD(6^OrO8nrz~(4BM@GS#>bx0C_p%}-( zoM+|fNyzCpTPOabL`}ghUtH>}ISO?{YM9LGfC{nvjZ`UUuzvy!YB-0dYay0Y5@yl} z==e-4|4P&?r7Z4Yo_}{CH&zt~hYi_(P=B@uOE7-rt8zN?w{u<3wuH=Wt?=8PaNoHS z6_6ES4911Ch>7cOGq4K&x6M1eW=HI7J+y09TpFG_@>{0r7wc)B&cKu;AnI#1;9))B z&@z)2MkfKJ6)FTT&5d$LPOsq>LBZaA!wFW|Hp2dpyUHLA|oEl{``7k_Z}1UCaBH|X~WE&=A8px!7U1+Z}!)pWJ=$l zUa1KEqWHr7v{BDzy?feAw4Ztr)WK-G&y`Qc>2#i1(OnPew_opY<S~EQ&>Js?dmZ~>nH9J9>Nz{pivos&J~pXJl{v8M*^+MF0m$ct zhjdPrNVbLCrVPTlYg<`a5FzPqoon;^Dw?ijj>iW1oI`%44>I;nInGF5Szo6s!;`v&0QVVK`nO>uB*F&GGRH3J z9Kdte|G+Ovbb|obKYzdk-QpU6Od--xi#ChQ4*_xALpWA^KeZeRBhMV?f%x-zOEJE9 z(ka1;KhJ^xe3i>R`~aqQQJVDaP%o+17_y>fC%t0FBBt1{2Zku-f85|*X|4Oz^XNt zV3vp>KuV5JfFK;I0?UVL0PI@ppNTXyf=eyKg-=ZkBnufqrTv^_BIE~fBNoA?V5x4r|aIArdYq925 z(CZ&JuF`wyO?lztm+$S|I@(E_?+6Z{HKJK3p9oK0Uyez4s(=TseAq9QgP7B6U?u=`a$#Ijra_Y8vygeB zEROj>>`QU24>2FxnrE?D`v0WBtQ%bT%cfqIwkOYd#AeEqg+ z;pE;Rb#89;9OVN6R)5(vNjAoeX}R(~p`e`#N?&pLy~t;>lCFq3;f*ocKW?c1yKeHP ziyf|);$<)Gz4vbO?HVbjat?GIEd;b!nEqfK=KJ4+^)Qgba5w%_%-CZX$mdEwH18;y z^$A_5%9mWex7_7>qx@7CY>yE;hC&n}Mk>Y)yQFFiHlxN7VB`&EJZ-Znpem0r>jz-< zcgsX*ZdCPLreJ$8<{=*5t``<)!BS=xKOJg~m2;$p%-PP}%*pK39kZ+Lec>Wxs`6lG zvU0KK!Zad>=iE{xDlf;#SAi@!&yfg0HJ9G;AI?<(l`jc{loMryrV32KzmT=rZ{Y3& z!A6G~f-W|-G6)#^#SfqlnEH>TVBcBZiTzkww^u@7D|O4&AgfJAZh{l&9$&|{Kwa)R zo*~WfaL}qpS++kyEq>-Ym~C{=D|NYoQeWx?o0Gx^j?`G@?{{@$`L3Y#_sdDIo{0Ta zN961|;GAc~GRVA5eE@5LRDvba3eBD3X#KGj@n2e5^+atI%e)1#$NgzeqZLwM5rZB=QY&-`5>~ zw>h*vbp7W$xQDP189q^=-|`D}uabn2jv>=?UoZtZ{=ZPkd%&%34aVGVmG)O|K!iF? z8=e%-Qaj~!<^2I|=Oxnxf z#E^0PYk)Pyg{L&0Gm&6%5azGIBWrwHrydUSK+hxfX}*M;;CBM-LZV9|kxgK>G7h|; z`coj0(Z)Rlwe4r?A!~nP;3#D1t>CIOsF(iX|C9 z4J2Xw0MD4MSXoq6p7lW|-@V|qio1cd%eLJpg+x4ETKKuF(tT9T7uQ$l(RS)XD!kLR zh8%6YpP%L0U4b6td)mDNSX`ix}*gqQIo7f#!s~(FR%;pizkIy z3@&hZTm0Cg{0Itl&YdDdapMDYKU@yaB6YNZB^_Gymz8`i%g*aLheXxQi<4(K^Ee?j2lw*^V<8H)VUg!vztMHS^g8^c39g1R&>`R=}c)KO{dRH z)HPq1^%`|wxI65xxAJO5uulW%A11~VhPxOPdP=q3z}-Mm$~ZkJH$%zuQI4^HM(mAr z+lMAnOE>`HkBts4)GN$_7B^BQNFr`3T=+Rk1CqnDPLdZ5@sVVXG-gQy^kt(kEs?dr z4N#3{Ya_KlqaA&Y3Mv2#0C7(E@P~jqA0ujGZX1KXmCM%)6XqFvz zaX$uNf}S$!l799Qf%5$@A_7pHsxSlM%7D8u(?tb>Z53(LkY%m*FjdO$7RfdB*Ng^w=bK6Ii^gGqxX=&mfDgd}3M zo`%$wr66~TfKC4A(0I4-fAIkjJGcES+~cYEQGO0ETzQEYpxb*S>VSgZcGhGXkY}I# zvCj&KVv@VxNp}C=&)oet3g`aYAP|Jkv5VULe}v90h3)v?`C-uDQ&5-%5Zodc<4zXk z1uVV(!AZeMnrnEi%bQBd-`yAPCdm1~^s62KZ@Ew_6wjEh&vyS=cvEw0*`B_S=b?QP z$I_~mR^6n;LW~Z4UK3wH7vgQg03+ov4W@N+K#^j4>NJ8vBRfg(t%b=Hkodno%|l^$ z=TO%m);OgH=+#szvJQwrB%rG3auuL2l0Z&s5`h)b{S4I&P%lvzCn(UM05GT^4;?`z zwm`EqkoBk*1gG_+@Jm>DdEBQuPlugOVh)J7%RM%zinx|?gP_4RkKXCJ@;+s26;E+u z<^!U{=Wc^qt^7bsD-g~D9iNOAIF?h}D}m(A{=U}G|2ejv#G*{4usv(Z3K%znB+rs{ z>hZW-VZ3f>Dd~OU>R<(m2R9p8T>`{?%@BzzBM-!V8w#?TJLg&cU^3@L#Uo>f4TU*z zCpv}FLHKP?5g4TFH~IWIs7rS2OdU;gHB6rIdaFW+j4EFL)AT5c_!K9=#xP345KUO# z8otgne?~sq&EOh+{jK@p4bo;--TvVK+ymq@zHp%qOOF78ABk*w)L77-o7_Iwf9L|V zC*`;5mga^4OamdZN*>l{{X*HoiA=Hm?s6N^Ze{z%zgxT&H$FGr9c(NMw$=)10=)va z=Kzzf2X@6urrSOjjq^K9Lu>(Z|qO#<_3Vuc5| z2n*~itrlN^3%TA3Zv!PxP$ar=;C`xEjF+ToM_kl^_r0&x$SuN28IQ@aTd7W1bu$1Uq~Acs^^n zzKyx<<99!8%h)C#!MOcWlieLn*9lUcw!M3IMW?Tqy@Ue|4AE25EOm}MyMl;xTKcONR}`PT zCnm+47lYOHrby&P(Q!1afkk#wEs%Xg8nlM$Jsn|30&^(zDSeGf+j}`1UYU4#LBI7o zPv1*!dT*pYJAo()a_yj2BcP21fijmgK|eCqW_uSfTQ_>7?_rBPD@)Jw-S(}ov3%nP zwGZNzr4#_%{)F#&N7${miwF^jW*1M2a?|-_oAp}%NpfL(GV+Rh${kcDj+$$_iwZ!M)^a?+C;8B?0m3;r~lm3 z;I!>{a+{rf_W1jcTf+`*R`pX6=%yMpox#6I2PZZ3B(bw!?TgNU#CZG|*||&OLdETm zRcrel#uTsaWBqm%WepBUKGO3GH7DPWVcC&+=WgjuP6I2m7f|E32L8svh43u1$FGhD zbW;fKH{!>C#I>297?jl$qSZi!SRcdVcUIk1%_z5|FdesWv;ZEaC@c8`YZk?{k7t+PyHq?Of zaCv3JlfrEffKRT|;Xp|f?l4%`daiwRrKU(y`H|ws)k9~61mCUjiGuq9C+r|s@vyt$ zc!o!B+&=KOBvpUBHb>AN^PqpgEN2eh#I^K)PY?{;A7oLqL&Iq*Y~)p#Vc~wvKCCY1 z1TguVCn!ryRV36+5Z=r7CVM@Dvu)eogh+Gj!n!I<9>*)3&flS0TCG5vMDrvqIa;t3 z*D_EK3|~al#usQQbnlnRZ*_BeOHN6ey$gMM;zZEIjB?Y!j1f#iY8M8Uj3$#Sx-M$05{I!9*;3P(Cj1sC1QSm?W@mlQF zfvJ5j7>1?$B+NdR?$?)E1VDG?8-Sqn01s+C_`Qr+6r@ML%x_G3N&iuNM5Ey``|A;# zOTv-2)tZPVlr}KT^bMw1Wsizl&bWZ{$xLM1t2R?%Hg427DeJ4pQc`-nJIRXhbwpC4YMT zoHy4|^?3^OsIM~uvQUl80?0LLnmwn0kO|5zUr?Uf`JAV;=%J=Q-3MqmOvw`UE^4vZEv}b zU!tto^NTmqcL=saj7e%ShQDf##FNef7SJ3Td_ykQRy&h3E~|{55j_ENXgg=8y zENP&ir0xSLk}Q7an4?nncM)6B=RL`e3!+84-)-8YK&un5+)pV4h#VgPq zCLkrMsKi!R)OyIWJo#u(&aP(#;s@5zezY!=C6B@yT&po;UpY*1zIjtK($`(_VNuyW z#_6*zUe#*f5>}~yQ{%?$SE-Swi&y>+C>p^pIHe99d{gEJxAIt~}~{ z>+ify~haNN~&o)NWn>xCfNM7nbgM9}^>-xeon>HhSU}cR*InLKrdmcG`=KdtQ?}r|7c&u`K|Aw2r=sNa=%pR)`wSQYs&PEr_Y)LwWIgyv zI#$-Rrj>~f+@mow_8*F?lLjQE_8%y_bW*q;T||y8nufO%qI9qkQ2wJA@tOW*4f8=K zj83h3GVQl6{aC)X*UlBcnd#0Dct$ClCXk@-j|YkcD3_t^35afAT{ggzLf6}2>&d9MI^a+Mv#Bu^BRxW62A=c3Aims^B z(C4X~^9ex>d*e}u*6omOp?sI7<`2NCCA^2UIt*8c#Fw2DC}#6!HpgC z1Knbw$n<^Mv1p3~>v#KFzFW>dJympzHn-syN{q-)_=OtH1(qOK80^^-v6Jj+o+Oro zgHxDB_@*kg_UrmZL03n9PAjQ%V&!n$hO=5LO8*5!)CY?N@e`<{FUtSER_5=11_%#1 z0&V)cqhX?m<{5Z$_5&f<4rJ@LDi8v9%s(6^0BL=euXv{Y#k)E6w>$mA;!niNiNb;F zU*!bO2Vq5reYURosHe8k+;u&7f|dq>GS+P-Y#ImTp1Yd3L9wb4uzpY`wPkJ z|0|#D->B3EPVouiCjSET+~3M69H-25_Bb5k%x1J~j5^g;efhHY>dnLAW+wrN5u0&A zm_*Z*mPg#!U#P%TWWuaw;LdFle6V7lFh5iS19u%jHZIg+mLv?pn+}ZzDLPP|5s|33 zAdf8S+|-7AsMCq~ZxByy`VKUw;=jD}(qx_o2|hEh8fYeSK#p9yybKCjt8{$eD}g)Q zHvEa-_zP8K1>HcivZ{bIegRX|L5>-Qthw))%SM%gnh{>LB7&pvDnbVk|$KILfnO^m!Z6E+{ z#3CVgz%;VKj1Iu}vhI%<#0bJpBh!J}jBo1Z@8WM57EAB8H~+wIZ_7w9Dln1j-wa}{ z>z@OTOCoriCsnqD{QK1xgWh@Tr6{(gaCL z$?h?*s1{a1*y=N?$l8&Yc%BAe-14;;NuB-T`DJaFlWzsBGVR!sdQela%wD$Z!TP-i z_fO-MiFHJMc1aA?gB(#M$A<3eI{KMcb=LdY5wmU?eS0>Z7uS2?VZR>U<@650LCSW* zPd)yOCs&If^6UsdfvBkd_Own>v#Rj4+yWSqGq4&4N&;O+#!fy05`YIlaf-rhRq)4f z$n4Pw!)-$v_D9;U6iLU?>#VBM+q+lNjcpi_?K(>DQGkm;f;Aq;Q)>ny7&dymvwDi_ zp^+ZiY!{1(91{kZeNx>O_)XTYC;IoZv!b3v^))DM?$Y%?f01qW(&^LHyY=VuBOOB% z_BZEVRl8ggzdekuFCK1v`rQ4vI~$ree-U0N&9D;VXp(0synwo|&~Lcel|TxktrIy} zg>E%Ky{w)h?c8fTm$X<#l{MDbdIvA>AG0$CrR(_?rHC2Hfz0r%V>u#60Ig2%oB!=0 zQ?mO|<*Dlv&DLCp5MA|n386mOblgSjb&eG-2I zGy8=$0fMjc3w8hq)@ijoI_+6FoIab}t(u5^$4SnUOj~|_EUvy|(itm}< z^=Xp|HxVC2X5m0K;#>yL=n>J$wW661VN>P%0m%fU-geqN z8pM)TlqFn~|0#f;ZKkoF*qn^avcQIx3_yYoWFc@SDJaTdKOM867l*x{uhg9@u;V(y$&$Ap5teZ3~htZ;9OHL~Y~(z7<{ zE&?O45@g5|v1Fu{1APQRNpe563?Hg}#CW~`W*PjV%v?Vc32LW?rGKwnFBXm8ICVa* zyRVVB84%XCnki*pd-*yOwL=1hQb4Ul5kEsU-rwZwG3)ldCCpi2pSH5=@X46GXSV~czOO6mv^0sPq?sncPFR;Du!!tcsE zaz-BpkC%QEQMd&hTTd{Zv8|b0SoPA@9L*xNb~?k|)T|scmIYF_+Nc z!7=E`FyL8Lb+KqX=>o)j08C=l2h=iM?w&0703zwK^rL5X2QRy#`lKogJ=dKg9X%p& zHfSYLd)Wg4$(2}G9jQxz#$Kb48x?}TP|CCj%%m1@QZn1Az-+NThAAlI-T04QWYyi=LM7NJ%Yf&#lp& zuTR;#5?w22>P))Q6{Kp{ts~OeIj7D|Gnm=6=qK%KD7oGLfaBV?t$}hV1m=xFRn&zq zlpz9#8fV7Os@j&HEgpE-FH}Hz8uySCgOS0E`Lof09RmFCDaa8}g9qx!5&bIsv?{b` zzcQ;a`t=XLC8;+wTnRm>vf)$xn~IA3=9r&Nr5CMGL@>=#yse`iBe|f49!qsSMPdSH zkErL%@@)iK`zZwK1={^`vrJg{+dMTD3d2kIcpH6CK--=38~x#mx&Dhszs0aq$8t3W z%S#fvf9Rxcy8kS``Dt5Q_}cvR)ykowC`bS~Uix={kc}CPkI6&cOHXWB3l&Z8cfQ?; z8x!NH4cxi;3l)#9gJ88~yM2yFgKAC&9vwmheOXw6D8V;hJHhtH1 z^~fFTDLHSW&mdw2;48~I#TMfnblG_eiPN1}``og2nq+vqU0rV;S%K_jX zOTsBbdQ*VMh5ah z$R?Cpf;B)-Q@*Ri5(Smz9X~TF-=^E!-?cG3bLhLi@_GNump8vs%1oW6GJFDo!}*k1 z55fBYer6U5%+q)qS>gP=3YW8?n6@&Awv7U$J#;_R(gB#xr2tYt-A1(bl>1Df479xV zETI`B4fo7yYJR`@J@7X@!#jCRrIaqrKe>-5h38l1Le0{k+_uu?Z|OJc-CwAglZGH1 zbHToqFW1HIogmLF!$pcM6(rXg+Cv!LeK9cl)_N9fND&G;L}eW*IRazY!oC8)zZWez zLt4B+0c07%D*)gwk?y{ZkxkQhE6~ENrINe^o0N1}ZzSwse7)QY(z;0{n@KH~BC`wC zE;eZgl{;tg&cu4eklh-Cai11|eH*H_b`^cvzHn;I^$RZtuzsEfUYK3?-cQCEJ&;4e zmdoIDQ4Rs1V~{MdmDmKW>*`S#Rk_%+arXM` zr-17CFph@p&~hm#4B4mFw{drwX>{V_oxq#V25Gw{Ggx;p=#3Z;`(n)Z7iyR1B8`RN z#ifJr&fDiHpjMYy$t26eFIG+h?R#lfOqSiI0{R^d$N`3Aqh?v+B_yGq& z+^IWnzEuE5*Dt8u8X~M8H>j-l9C=3T-tEx*ist5i>h7k|O${h$74koZqF??65reEZ z`5$Qg@93Sj0K%qgZ!ysPHA)KLEF1-J&|M8o=Sm*{M zf*F1YW0C4)^wO4{+u;MOm>8&5j8Q;#+}Z&SGw$ZPlYRBcoUyzHxjSHZ?m1jAF#87t z&WUgP|He`K_ZmM5g;NzUSHxhorf7@{Db^sO_LFK&u|Y#+QlVt@tDotr&U;7Y&zTQV zohKI6!8y-`S_UQt<{6iPuMF}5)Jd>sE7jPzDKmDuR@f5t_T)92Z5?1lPp6EQ`fYCk zk)r#*`Qj;Q6N}4XhC?85{EZIMw8B8?f3c=!8=!VP(;cR6s5%h189wam?#|Y3NOcYW zPBl&F)m_sqopf?ANoe+Vcd(Ins&AwENGt3`?mdh|r2wrBN(Dd>iWjNZwIx4NY>{pn7}lwE>|XkK!fu#>nfsU*jttebL(~9@1SE2%Pz^Fc*nz zOaZBlc96}KnjM{b$kH1cof6<4hRH*e8fdKtaW^ZjKm~i@jr&R6hUd*k*EZ^Jyztgk ztTGU!5!YF9aX_7FuvM4&-NiJeq2-npSEQ&Cm(jv}pnKW4^j53jv)q4WW36*d5cw6m;;W`Vzf zS9Zc*kLlW=0S$#eWu_fIL3S$D)AE#hc2(t4@NU_6s#(v203sGG;Hq-q=Zt0EI6D)O zcjbWmf5zYc|3qxGgfq<#uF&Id@mH-&-`N&;#*bo=>v6iL0=I8`ZDU@;ckh)p2^N%3 z8?s>S58pe{$A}M&eA_AMk`zw>+KtiD`neTc3kYry+&5O{N{68 z=3<;o@=5F_N~0JY)Jmd`J$8p+MJ+&J!`lFTc8Kbs)jK<+I+-^_T?;EWm2EG|{>CS_ z6cm=(Zcn@t{CLMi$R}v`SphB(K(Tl1@j>2xAl)njJhHVMbX+EY!x5aoJjyO~u3p2M z)94g?K`xfh3^*n}WcBD!SaSnWfHn!}@>)zxE%lB8tzMoKnXcMVp{MlJ^HOon%j0h& zAF5oe5m7Q1PDL$gazV;!a5|Xd8|d0m;rr6V&q>6zJEI`EeSbS+vgv`c%erIX%9~bH zq#$Tl<`@K^@Y+$!#l27XWv4n~`0x2g;YzMG@iWm7b{qne9Wg`e!?Gt*yOVq`XZ6^9 zQ*QP3-796zJbCqF{h$E;V?8+!JLS-RAggv-Lj2=zM-xF4sU;9l1VL1*Gn_P7*_rK( zYroLutvNldA^JvQ(@Bl&uywM*H(Q?vQo)1&!EvKKgKD&^(Ec5al*#O8@R_D1-Ttub zx@?a#Mro5SYRUSSSIWK`U8gVg9c&&Bzo@ea0s((xGO9J@Y*U*sc_KtP3}cxSfNSl?wmkJya1>wWdC5%w6 zc{yD!sR(homoF3h)^ubzAPTjl$N9$3numJL7+&Oh4f&(D9a2Y6#WnYRJ=V4ZC-n1u z=q=pv^Cq5jQ&T~WiYRN;a>bELs#ERkch20T35a!0i>^KhH4uX3Kw4VSzr&op{<%`g zx@j-xtNG1&ukSg$G+O1kICR8a@wBn+-L*y*V><-R96JZ>yXQqcj%=vpchZgr534== za8In*2M=1O41iWo62#{I4c&&S8L)M$6A4#o-Z@{`BU`6I|p&2NdbCwuqQ(yyIb0nc zOn8o)Y2Tv5>N(`%&r}t7pmh+WtCCnhAEXZ4)%hf6w&yv2q%wQ|@XOSrhfL;CU`(ke z@Pv=Eq#6qKgopOMTo<3soZRE{PLRLEd&o&%3Z#ny&lJM9GAFbr?tL9^>wgq| zH(AE|s_9p?3vmXt#w2dhvRMT#-~K>r;ACX^-)6Li7D3|fd1Lpg7xwM@wCm#0i^mcw zR%o|V_fi>=RNx*jZL3lNL?Bgg>QRdx)QeRcfd+F5^kfwTphunObO=p!eR|;AfIdW6D6CA z%EaFj&3bwc*n_rSvTW5!awb%>d#;iH#!xla!5_GF7T4QlEULo1&d2L&k7z#>FJJMX zS(b8ugot=DyW`LD^xKh(!aT@wfbg(>H#YNcaD`6Z0MWmSyn+WezeoJ z*Wp=#j*q!*&kp_CxU3>b_sio?Xzm1WOpsd!yAHKf++;1(RcV)&*xO#uG3cLXoVIm# zzqjY)EvD`ac8bIWsS8dul{NLVbjY$21qW;zI$9j!BC$9rn^+pYCu&KHa}8t~HVk68 zs2&hve(Ka}f(|FMh9TgYwwZNdSgW)U%|t|jG`OLOw=3uj&_WkM@??V3=h!_*7Il{i z;FMoij3Zx=#QOSbd&TYea`|E87-|}?2N25@V$1NQK!D|pFdb)jpu>nhKN}Gq2l?2> zZ@!EZNv|xk?Z=I+`(E8Mo&LVr`<#W~rW{PKIw!RYx{QhR4K#0?bJJ}Zd7D}{Ns}l% zCe(8v|Hpk2 zLIxpGd3fqEjRG7cAzzjm=_yvhA<(2V=Q6F+1G~buaYE9Or1RwB;ucG1-9@QqNCUa( zl3mgLe4Z9tF2mu-N2|m^)L>#eHIE`dcj)m~J(yP+oAIus!^=5&pX}x9u?K|$ZkwQE zKmjA+j~ksoPli<^!l8>^`ye`WkbQaj?lez`O5g3CV%;L7x69b^&5w^Jqi-qSmKI$( z9jvUo6<*xE@N_M*v17DL*Pg$>$;UA9iQQU#|$Z0nS;kx+y=m!iNH5 z?P%UQvU5TDTGpQa*3QZYCh3Qz25~>u4X{k9g8s&;>}zQ8>*|p@M-8Hmbe{@~&%p$4 zytgP&%+wtMw$Z9p)RUTTR!@)wvq6&b9x=M(-?;nq+BkW>_uBIAorT^5rc3oMna9@~ zh^7AJoK}9)bf9XEEuDvDO%`CY9FNLr{E+y}-hEEcFyRDYvyP$ZVuM+=P^i+DFNg~K!Gbl#Z>;EOAV9yp zZDz~fCkT|q#1#r~8AtuaZc{z*y`w|IcfA7D2B~PbfX|d$@dQ`*YYA3cP9+wZsS2hp z`~Y2HD?SWi8ZluYci=TSZM@z85tj8;?`K<|@zdRRj*acD@$C#TQF>$^5J39?vHCFi z5qv2vQDCb)pjb4k51k*vM5O=WLoLCNw@7v|P-jf{2+Ey?JwThZXfHkT?)~k=v zVg$M>VdmT@V_92&zud%&8(0rb%KL#ZG@z-<_#E3aPr1cUu25!7D~ z*%Q0%o$R%xBIoZZG{#mOr)sG9adZrp+-y>7OW^5 zH2MkBL>H+}>9Gqqr{LZpu@Fcg&_XMWC&{42>U`6b&3L%k_Ur-Fta9lGg_*`G5EP6^ zB=(LX9@}Wh3NS(oM9b?Vo9y%{OhCZ}P9}-ii!&&Str!r`wbS>$Xu7JqZ>4RuG9TxI zr^$qA{?ru{8CzH&e8>7;@Y10K>shEA`{>V10LkxBbwtDg%hQBF?7e3dfv<>OL>vYWr^*L`?sxDCLcCI zYx$T-@$Zu0?X*b{L^|>l*o_srV8WLc?|>&~H^3u)0DOh$;dwWLF{+rsNLNHltRBsk z$)V1@pgzkDniwVDCg)SC$-#C-NGXe4Cq9NteJtX6eV4DLTEa#zhL7@FS#N*J)0%43 zd;=i)YhYM5(~Abt7_Q7NXiiOiDVZ{og@`BuzSnZz_~Hl&3EPGuS|JEPwBZ_fi*bPvhIMyaTSBJ^#2@9A^1m<7&E7WG9?j zxQF~bkFfk2r{%y1q~zL;U_$*L<3jhKwaXT<`YqA$2_EUhq zfVax@3_-YeTo&r|?~ej{@iDelyn4Mm)E41YC_BLe7* zz5EzxEDZ#~2G6yEHVn_P@b>9R_l?oJATf>G%EG5LbNzQ-6)jRDrlJrQQ>-zf3g?F^vJign)lBItuJ0*nQ1P*h23tN=au)?@hc zcsCIDew#8`#8&5AVQIxyxDG}Knb~z+Yt#MK@%2QkO4x%-VfS`0zu*Qi!qm&y5VU}< z=O;e~Hhk+B3V|WLAkn0gpAw@GRv%ZFg9;K$z3%IzRax2rjP?&{!pK*XjAeWb`@oJBV-mpdM3j!9VGfID5wnY6&BHL!2w~;zK8d0+1x@HqD@I=2h{4m?N;Ej%jKxu+bY>fMY>4vjDpmD<)~~3KYV|uzwtbO z1C=M~$I&*YAAxUPSsQwgYt`yao&hBXclc?bK@2Q{RR5lbDs-VHU?ZUt+>`~23J{91 zqQ7$Ef}D2<Wnvf17=d- zZx_YqP`C*_-UeXx8qf5LEv0~>*BuixE>zpUvJELZYwdQRo{bi1y6@eA`X19xo0Z|Z z5WiuH&~;jL=O&^s3mco&am`Wn^zL4IPQ`j<(#6Ls*ag>(3k5i?Jn3>Y>xH4%G{uU< zol#$WL-{)0W8bK>m0!stQ2fO>ITZv&H=fcbj zKlkzxrKnsJ^mCW}P_TOh%PS7O*c!qp7#@rFRG}(k4tM9BH+DRV)n@ zusR>@x+vL;_vpT{ut74IZLu?3(@c5u5H)89H28>|_voO!US0X=W2-RZIK#&f%?>p~ zfK{}3soN+Joc;sU>C+Aaz3te_+k=$MxhDsAre;vIPM!+=jO zIQWA8$#3VA4;#Zo?iu)oGYa8cPi_Gqp@l%T&v`*8bkH_1|j z!kqyo%L?5SK;yxV&_D{n=Wc>?G-M61JF3gr<-DRLXnkvkWDol&5Vnjx$y*XgMa}_J zl#pCvfS+>ry8fze+hgU!oo8DN9wctV3HqY6r|_L&-6a{1Y$N)nZpKna@&lks6LhlD zc5uJd#8yO_t(3*c9uR}Wa&^i}bGBWI(s;uvHC5++z7w}s{X06QUmh5V&;loSWEZ@C z{0SYF(|b#`3Ve`mp-Fo1VvqQ5CV~l`YK*WeNsZpv@ zl_Dx7qS8bKL_tA_ib#owh)5AL5R~482`DH5QBe^h(rf4lNEeVAniOdXB@9S1=6RWG zt-aTp`<(OR?C;0_8EwQq%ttygqIgt|#1^Z)&G?#b;n& z&ac>W>3a?h339RUE0%O3D}bOtXinYrg;o^pWhb}UZeA&Lr19pmM&PM;Ur#GUUbkMO zu}aWLF>r`99$T>|Uh?mN8s~O7UgGO+rU+%6eT^PLw$R z7{qvvn~BQOqm-yOJ+JS)F8F!XYBcmkjQj_5WQ-p}9ofm9<>HOWU9T&?US1Su*kyeA zs(G&7%~jN72WK;dy0O$hh*?&F;}Mp@7D3$V+~|5eukZx6Hj01TDdc)8#jH=c{yX2b zfUlq8BM)-~K%$7i9OEeK33jp*5eJZ78CsP-iMeTYq2!Z(y7T%#PjlYs8}7>A!-JHp zWR*OYp4jx%`vV{l2aV$x+kxwQpN4L+CBaJMd76u-=G#;Wnuzl)=fOSok?H#<)>C_y zm+IRC2#0`WrNgfwXJVkYRjFJM{%*3E?Of!|QuTrR^BnpOwd80#bpjy3*xq#D*es!$ z+vH3fEZz1l(}vLE?O|tE4enHpf&arMsgDgmH>&rK2k1Pa>wvd)WIC~kh^{k6z~4IC zxybKs!qi5tO_TIl|2l=E!RFe1i|@=sE_bWk+A}y@typAWa`^>@(s&r)DmFFx zgQLm~WPrJbCR7TCj0MgSsP7Gaovc16g+^J%za=T3pIn~(Ycy|xAsd+lC3pW zer55(IH6^i&^^CL*5kr-N#F)fGXC%7b@PM8LPrj9UiMO`rRsEWu7boAO=P`> zX6$J4tc?&r^<)XkPJyk2wsn4^Z9d+0*S|HVy?8SkAG0xw`|efou4nMY!htv^c3Fpu z(~`xg-nQ3o@TbiS9Bw?|Bx0Dk&=T4eg)BdCJ1gO2oZG9Mkf_LT&T+6B3;h11N)OiQ z0|i0(g?2HAohI`1HV%9$kXiM|F;$H~gh`cvni)z^&xSzB0%f{dboxVOT z;lhXEl8erxR>yW2adEFshNGHkM_=mYjaAqh6*^vZpTFz&JY_LB`RGZrFY?I5UG#MH`Ep!`N zD>?j7U?<*qYR zwx%+Lv$3oECou_Rs_oK-aOFN=oteNoPvZhc1gLA22!FG;uuXlJ_G^1O&%x9SeD>a4 z?{a_gw?ZdzR9jQ+77CiFCiHN*y>QO5JuwpAI4~FBfeXcOth(LAM(S~U=a%d&MUqVp z7o&Kd0&;TDsKAbW$L5=0mXG*&g7^Yrc-=*)_663Nw7IYR_@5}~J-EYOtP||@mPE^L zx3@>G4OjC&dHvP+lP_H!M7oa*G*qocIE=L7FktvCoN%O91K+CFXI5F@^ZD)b_p0}w zShGDhWSeUx^q)Jj>*?B0D8c$KAH`=2eAKJhk5YLaeF2_Uw~q&#t=n#qO-wKhpnq^p z0Ad>#(!kQzW@-QdfP%h-5G2Bt>A*@~b+b2%PSuXn7E81w*~bExq#wJyTaoULaD6b0 zNn@TB%@r~8J_-^lp}br1hv`m?i}t48c#VVp+J0IlV*A@BCuG>bn$mfJlz_SV+ppaX zIf5WpuHU=1umTVkwqSVsA7;gupU?2YH747CeC(0kue+*xJ#^-y*oKeWeEC%Q)`}ie zPm8jA^nhoe<*{R=Q0V$=`O8JD>`I9i-Gn;l(l>GK{2$m|ej5YQ)%ic}9lca(nHuqrjehl*(o%%n^hiFGxA^j zD!fx{zrvRWUVkxMw>*_o6vP?6Ec%E#c#9oWx60O%{sU)q{>M)pJ6_J>K%AhOgTC0- zPIUn>#Q)G+AAa-h&$1nh)2W_>w(u4yH?fCVp2;l9=qpnOiE_Im)jntL4;tdD&g|e^ z@t^{K!37p>PWo6jp5P24fiQt?a)0;Rs_*{i`z;l5B7#3vza8K2(Fz&wY3QtZE_pWX z8f%Awm7{^mQbGN%qQ^?XbuE-$3kmK=ttZr#!oZO&;vpC@CRZ5*BieA}V8 zNhvBREQ@I^NRYdbl5xFuMMQ7|+>`%YCjwLHHbC&;f6`1x|BqFnPoWaO1S~H7yBOjB zcM-z>h3<-RbI!YcJ2@-W{0sbh$3J#A!(xRI5jC`W&SqCevRSA5c2n<$uAtB zs-6d9U1f^4yr-6@#)X1AazmAr0}L1M1CAiD$bbG_&U1#`wc**r1`b&I)GM9ATYM&g zt!>xjAt>fv)(~Y?z@0w0q-r?aeR=Gtyg1Oth%fB(Ydlw znI|hAL8*A4&L&(H<_H=Oh&>03YtI;5<@BUy}&bf z8kQ}=_xq~cH^uz*H%=phV?<#c6EAQ#;>*5Jld7Hj>gXdUqL!9!^{GjUD90pbVG_N0kpdcJzZR*i$OUrV7iC;r2X~%h#K96Fq9g)^#DFKX`Sji4`41)h9irsn( z@{Aje^B0)a*4#w5=FC=2q!b?mM1v8yVb#C~65Qn?X?ZTaPhYEjkDrvsmYC7;%gyZw zS0h!fd70-M^C~GZ@KS>Qp3y({N00PuuD_k_oE5#Cn!qC*-8`cB;P!7TSuXDN{pOr2 z5V#A;$#%jMn838o*Po>y@4VUMdQPFS?e{4`4n{K5nP}6qD1Es7v6qrtt@QW#w~qqv z;WrtqPar#v3BeX5g+Cw)!Y#HxR$HQ^T0f7Q?s+C6QZ%_ZeVo)v9tZ-eEmUYUrYc4l zw5b89f>&0>=~}ws6xUrL)kA~#bHvz>fv2(Em^9x<=a`@y!Jfp;?Ih$W9&T<^T-#Dl z5wx5@S3|)Am|9)VS{voz;AeN0$C5`|rb+`Z*m8;h^5wpJZHf_a3@CSiKu@G|g9q1n z9+!-vnn8mtV7MGVLFR{T8<1p6J-?4T~PCIETq#6q)Z@~y{OHCmNB zyB@I4m+O0Y3XH?WTnEfsirjO2)6iL1=f~(X5u>>1$r!T&E4v+V@RdD#{4>670Qz7e z>ke@VGmXo)0W^}QC(w<$w_8T1L;bbh^v|ZMz0|;$MR=;d7SM8PxqXgS%-(>X`vu~o zsy}DKnUMO_{o4_TI$c|^>T=t|P;t{46p5AncYt5Ks1Eao2J~-+i$WZ!&ean5O{cAY z!`y*cISK~=2XKQlEk7)JbCX7q{hlW=71+OL zFJ*D{;@@t*6gqSLwBoLhlA}~S*ze;4)g7s&x+T!YfIu6AE*PZUYd5~20gnZ(0YbDZOO(JLB`Uu5;eEV{ zd9k`|H$5!zCteNok{*A)rtr3?PyJdc>16lu`v!w z*}0M~FM+%;rBed8ZVj0Xw>h)_YX2#EFb9io5B}0F+8mXo^6Q?6*|%Z+(LTDK4mJ&x zagMelOe1jl`lgbqiLk6w^->_2UjrR3${D7aRR$oz5bHhHU5-=ocld$_)T8lhLIL}O zM-+W}#U5PAuopDnZn4V#1Q$uB+97aTB8nR-@3FaC%>YHaq0(=yN+yf9(fwS@UzmRI!pha0bq@uWkc zu^d^KJepamm*Bz>_o;bgtLS`dZ~u*nXM*Iu-J`dxeHl9l-I7YIkCl7veHaa*k5f~8 z-9NV6%DpOdq0NkQ?pw)YT=v$3kNczciqT^s^>~DJz^g7N% zdka_v2-p+5g8_-ubly83*EB^ zT1xJD6Cf6ZYhGds7G3TEVaK&4G+R|=KiC6RFFbB%oj%Vc$hruqPu~OKMe^jl3U;2) zi5Z0gWowS)o&wwUK{3PNMf$jNTU6HGuV>pz_;!i!HGr&&k4=$7(fwrrIER6`_;4Yz zpw1x&{0CQ|!lI)Q+ozTeW;ZX~e7F*O!g^*N0&DLv%2Fhzls#tS1-e2DY6!r-o$gq@ zy01x&=se#jo6I3IU83#RHwuvUtJi28>{@yVY2SO5vLr+^cP{H@$1%OxNwrc z7nM>=IP$~6Sf$|g(GmKU@g@V`*dsit_Y|kVoC95o%DLd~n`ZCRedWi4LMqeJ0F}#i zI_$*}WpcN;IVWvUt|a@sn2}1lm(-VPtC!8s7uBzNv3B_bKNo21F9^F+Gwb!VyO_l4|7iQnEz}cvna{;{2K`aW`3QmaZ1e9uUHI!Yjw^3#HDoMRLd8TQq z=tX_>L)wqN+4Xr0FhV|<$hgS7O&sfsSXEZMkQ;Hfn7p_DrB1ur-gKkZhU3S{nKh<=r<8%z#X4IQ8tr($WM?=v0>xE2MVbpbs<7F2*6nkqcx5)oO~^mC@l3X;0yqQ2bJ=8 ziU3F$_TsJlLPAvt-8w?!s)xkb?v;o%01}OZKQjbbprZQU0R}^T_n*OsjoC7u?^d9D zp-EKRv@F6){#dxxNwbfra3tX$KJPF;F(xz1k`d$G}+<$ zMB-i4%5QXvOHpjThC%Vhk?<`#5CaO+kRkIPxn!?JpnnSI#fw%3&3WX!7^^G zt(5tAH_f3gGkN;d8?CeTgJMbIxpIZr(Jacc)8-Nj+L~wdf*&(9p=zVbLxk@TuLJBb zrp#&bwvppNn;VFQy++E28-EQ(f&9RbvXguV0`dk< z;eFNEUFA32v*%PflB5O>-Zng9us)~~eylzFvBqL|OH_DW^8ycpx&>m}tS*RS3TQ%^jgFE81{ItSG@ zK@RB6gLII&0{d*pg6B8|yg*&Y5|K1z2m_pNln3zTBO;<{#MDWmtAb{D3;>@2>sT<3 z5dHm;1<-V3@c%W|P`@?S017Hp$?1R2HOR6efCXoOV%exJxDBGDiO*DzaO_SLQjZ;* zBlh_JX~B-lPk#x^j+35HIGC3?!*txnTd=v#o0pE5iYHkn^5)u{ zxg>t*w*jP>woMUy6fFPcPgGqX{yYs=oCXx{7cMtFnZZqbCKrJQn0tzLOX) zsCqAXGS^=pn2_J#pQy72&?(?ev6DnN38}UwZXU|om;q4?Xn==|bCN^;8oNq|{u1>8 z(B^u|KSX`x@Wf@&AK1Cy?yOyX9S}Ek3*g;}q+ntLhNoQGBYT!_NH^C5y+~6@YE35l8fnW;gc2C7I3QSWTloMT zj9c3PdFmTLJiQy?o*S?R43a*l>ue^+2hF+*B1}_WhRPTT+~4;7WYehi+7n@>Ks^1( zl}@ud{JCe~)@2O&L468{c2%Ymh}P>))V%Tb06h)Z5F_EQ(c{qj;P_PHh<0&Lr;V`~ zKJu8=D8o9#ddi)-*Ah9`XCRFPoggF8f}1ypt|-ecavTY$sFCnXlB#sXYouF+sU5&=P4Q??X_|mAVCR(OCZj zZZCX_aNgBh(We#azJ^G*+-~66U9ic+I1Ag_)5uk35EA@Pmyq1q1^7TZb^^q>L0lok;!kO3ausf^8~zK!WECcjVz<&jP0CaBu z%@On=Xrzb8M$mO#PZq9;5vV5NuAEd+kqSTLaEMRan_ueS9Ioyzwq2890l?WM(C}=W zFk@^p0@eWZUSw(O%+Re>i!TQ2K(ZJ?d#P@44MTmt*i~uV2xm+T)4Wo8-=C-#2J2&t z5Yia?MGWl-=;{1Z88edtOM3$;%S>>uXMtN`!&wk0L|z>?+9Uc+n${CNNtc+cxuh-| z&5E~oKD0^yp??3D^^+x>3TQZs!K`FWGX@$wiL`K*dvzLmB5!Mm6!?hFV_Vg-2nf-} z^UbzY~zp!)|nhc z&SO=kc+uSJ4$ttpE=gU@&jo}uov3SQW!tnHz2##6!(-yuplWwcrX^(;c5cZMHbLs%q;Kg<(+H{JKT!&aC-kIy&J#dOhGx z=1r)?M#`7I`V!gt+x@cP2j%a&Ees zNC(#oXx%I^g|v|Kx4ToJF{xa{?4;x@aBAB{Skan1Vi7!Mp@}iN-_3RzD=(_GOC?w! zB=I=K4VZ>y@=wr9X=b5oW_xGsQKhBbUban3%JNqisuH<}%I~tp@|76tME2GA$?Yz< zj>gQI6Ks3QW#|wQP(X#o$c*h*!6kkN6zI@DPuC}N&U25F_WDTEzn0?I@b3udp*-A9&m3K6nzZ$ePRWKemxpY^7!?p394=?D|0Aew6d0MGbZ$>7j ze7P=**AFGbeF%?jV>OWj`swG+;>WcW1!5yA?@_J?O2b=?YUB(jyxDUq`cFA}huO=&W3akoeG{HO5F+IpccHnZw>e8)cQe zSkUjP)*|0&dh443d-+`c`IVS&yUhI!JNKyXJ9fuOU@1kTLoplCvn=dfsr9<48!^uy zwbEvK?g?NABiG}eV)r22J|D~ZX%HDxc%}NxFrkdC(n{Md8`0|d5AQ)e!V9N+f2i$0#dvZsVy z_nQD)1r7_Z(66wFdIx>NaY8^XLhoe`o?q&1lDFc4=Lvc<+ShjLC2X-Kq4QvU5JPL& z$1x0s8-UrJ|9A~FYSE$4(n1lKCq9V*ooqDL5g=YHH}h1bBJ4H#T1&sDSN^HpKp02r zHKH%U=|<2h$)gN2lLz%_A%^5*WJjPEfUR8`h#2>Wc$mk>>~O~Aam|l9J^ZNowr*9j z_aZM`@F8_^|7hCvd^gX>Yb?Wq?C|BSgi+QVB#hz!3v@2oJK@SwwZZc`>VK7ni_&c8 zAM}|%87yIIUQ!;d6+UrasY^ZJ-ivRX1^2~O zU9Ajv@bJC&RYXSd0Q<3f$0U13G1SYX{@PJ@P-hP$NCIIBSo;E%p6;0g7%_#O)Rr5{ z$zGVctETvtnzYa~x&+HF2Dvc~!PB(^Nnx7A9qfV)neTN>T#mom2uMpvXkY`Asj*mh z41Cb+2^NnH_A`3IJTjJTX;|D{U_n^1FIS3uveQk{mOs^*effIV|3X^BS2Rj;XcmL_6O2; z)U5h}#=0dg7vw<2*WR0fBaf7b}iw!H|ae>m>PUhItOz=Rdn1dL&+`DD_6cD9>dDbzF1( zLmI~*P7BVX>$8&my~cUk1%1lKkoZron?V~zTArVu{UW?P7K`qxjFm4SSonj*)W9$P zYVaYNwmp0!3)ZqFmv!7dpI>xz`J~kIfVNBOLD7%E04`wMae@NLI|{^FZO5Re4*-H3 z!PR}a)_OZf^h)gF&cWBgY*S^}P(W2<#x(h70@Aa=`9JOY^44g}7s}4^M|jmhrE)aM zb_hQ+aL3YAxg=VvRPtipjg@E0@zG#3u%iz9e%2Qhp1IT&29(@P=br5yo1*P%7ZWAFAs>TA? z+Ac!)b|NW4wfyopuePn^fxv--iPf?=rK<0BMK7}YbZ!RdJpn6;f~@D{nWP!w`gbz( zemt_znr;Z-7VZs10eKp&d9y7XHLk-(od7_UM@LKesh;EveN!@O*Vl8257VFGWg1m# zJ>-@AR&aYk06MQH3$Kh>&kuuSS7OfkgwUtb*Pl=Q?sJZJAigAAujQP@y^$j;18pMm zvI~GrKkeHAb2yU?e2?F(95c@G;6g$+I8rgvyzrRZJLof0g_@ZA>0F;yQmOLOb$&f9 z6Qx*bb#auM$tr1VzZYr7y$VCUihhdhp&orQW=mqnceiHDCKjrhiWiPaER0{0aa;*B z0;*4xH#T%Pv0>p#58srVaQTs6j;m{-?pXksEnSVv7!O zVP{U!&@2{6)y8MoXD~qTrXCjxaaFRpj7Y=yoM9j6Vv>i^VJG2&g!d@uz6d*)FHlaf1(})oy3f|2QKEQmP@4D50@l9ic)L4dTq_g z$KnO?{ssI7KnsDrP}40cm{pks&i;L62o((I293!@j18+ACRL^Gk2B>_d}QsFIEdFc z@@cOg;b}N{V4+yv0x%fh#Uj_cKkGAxRoZJTUQ0f5yC))>Y^3M>;OJP@HE~by>50T# zr>Etb6XbzMReF1A1mfqOrI#oNdOD`e# z7vU7_gm5oRkbcJ7<+DEe-xzmO#dRVW6V^xX_p`GKcGuE2jvC&&e6p_Sz9JYK6KWsh zWO3;o0HLIi!n8v@-r@uM+gpMvih;j+8le%3W@>}b(;wc94e@Ks{ivI;HuXFqZhY8b zNE$j^Jb>0=!{uN(5JeZF7(v(v8c&RI(jz*J^H_KZ7BpEZU5_ii`8JX^P>EwzkZ%zM z2))RT4jsj@$F7YADJDNemrdLg4UMeM7fW6NLuSlhKDP8;)+ev_y|bcn*);j9CAmM` z7qV5-9!A{xa$f3fDD-f*hzaKaIgw#kfU)qP7iWLZQ#LNGL5r=_*Jz*Q8fgYcRSl-6wP~W)&qC`{j7`<`oa9^vu6TN z7svR)SVSz34l?04={XR^18gR;G-*p!kKK`-xmofH?9_IPLh8e$%rhEBxxGP6_m*qC zg?FG@@`L7*$AD-2TeJsErK<4V_JKJhJQx`AV^E@N_yBJE94iN!`GAMkf8dy|m7o+K zBIl1=m{>wXlq@15MV2NUgtsxq-0QG={I!4N+$<}lvQ`V#+PS2Ti%JK!lyP#71Yq&t zW~B5LVGAE-PnemrJ8$_mjnluDaj0; zzZ?hhR22ja%Ah+C8WShG#SEyQWPQ+6YoV#v`q_;g~vF|#)WAOw*p!xnGZ!Yav z>o-JZ{vZz~;k?6OGAF+m3CrEollDKw`Ey(ro? zC|pmjf+n8qH5@lkFm~~#byU&z0kD>Yb^Q_+-y1Fg$>*I_YvJtx+G}ty5 z7*=3FuXt;^`^)wvBtnyUhCLjKDjHNbm!6d|bEoJ@p z^p>DrGS(FgugS-d9dRLi{>C)baEwyXVO{nY$;l(!WX1;OQBN_)8jug#7D*$H4cU zfG^m@`0Ij!`y8;)j$$Gq^l4UhTG)Y*@rQ+BqR%e#?m?-dpe#TY{WrWthW_qn6jvzp zudD=rQ-+++#04rgY%N0Bb;9C{@T_Kd>6`SMWyZ1WufkD9fS9WvEV*==_hFZS5@8R9 zIt&%`u>^VDkxh`mL|HdIFz2K%Fb?{}*wJFWOG3?|WyzIXYV4axunwtC0-o9F6 z&eK~)PIgaZ?5QJQBVqQ%zNxg2)vfxM_W)V558Hr_=)eAOV5iq|IM#AtP@&$EI4gQ< zpbzBFetZLe0jh}q=Rbe`Z=}ss>$yhUgDVxnWmT3kaGyR!#+^e4aNft4z5%k1MA#b9 zR^X!2KSnwsI==uu#HaqyXf-u1IqLWBbFcSK_I;VB#RB~9Rf!@SOb{bW6*p9Y`Q%(D z^ldJps=nk-AZ#ar2A;`d>+_&cfETlEwOZskxFh7l$3$Ntji$>JN++90zfU7V=qGB} z@${|@(1-j+hCd;^>qQP$ZW2fj`a9Gs4|$x#P;RLiY2Yqc5xC8ly*B`>Pab`vCr;xGBja(Rb9&<4IR&OXVf&BR zU9DNEnP?FinAu6J>Z>DCN#(wv@Ui$2SX>;)JTzH%plXdOY;OA#QsmBZ_)agTUKiQEg?X_I;0E;DK{Nc5-#P zmn;M7Q8!#GWvNqPYWkmD6CbLTE}kiQEp=Nm?FgsjnpI0f<(?mR9lmz2Rp?FmMoWh) zz8D_b+`h|1lL@?VHC_Zvu(JbISyU?pkYrl}??*NB-G}@n?y8mv%JRmpwtiq|$hY}6 zW@<8hk>L>@8UUgTH)uN-YtQ^p*`H*o6ldTjEzO&e&J%?Dc~`wRzU4wOCIJO;B$I;u z4bto4=T6xDdU`+i5+8=mc^T4%GPcv&Ut zqYduGy=%7T;uhM)7vu$*e0T3q%_P?tP2kI%k(mQ&(tR_g?@wr^F%3PV=Q~#TY*9tx zX1J!(;-tc-=(q7z+e*}dftEBfWdP0Jb`Want@-{^1LdMk9|I#yyz5vxAXT4LC%*!1 zYsLZv;K@p5?aQFq=vdK}lU`nQ^JF5o(3$2nu|J+S`Z0UJukzK!=6xISrzyvVc6g-rYz8r|*8c*0&VBcPsc4HyF+&! z25J#UvFP%Lem;*td#vg>1Sgxp)*}Gar2qAT33vfnKDvMY&_nt4t&grg`VJV$Ob1Uk z&ZOdNSahdseObf13bm?Tz5AMuvCDrEWSs)Cz6nP2o$vN!NY{ZD<0iZH3eS5Y{9*vu znll+_B1!CoIUCg!nlK@2N3GbN2*r&CeUtu0ni)j6I^W<$<*WZhiRgd+Idr9YbLdW% z2mhHm-;=j?pb*W?_E7;dSl|Ena?Az?F#4INIYl?=`!>xvzu}4!q82k4GXBP2yGP@u z#_ePikG-*!&K)V=Zf-)F=_2`rC2{jU$&yo#uyERHGVAJ-T8$>bTiAcchzmLChTdO> z8Au%4v>tC~3VrwjYma7UA`X8Mo3l)5lo)@se4x&$=RP6lls3;(QFisf+eiBMFpvIL zetl{tDE4+dXwZHdxf2A(Ok05cIZ)~5ctSG^&Uf!Gvn=RYvsDy{uF&RMls8Usu?Py5 z#@ev(-AsouU8~tU`K`BW7ROc!52{E%Z##Z$XI&rFN+jeyXI};3DX=t^{usb`K5Bw3 ztpvHdTmUv_rHdS~G%#3&sFkzcAu@JXXKW&=Eu`W6`$l+0=|ehuPL0O+Asw!|VSoh* zQ{B!~T#|%3%05A(GGyj?`G5Ra@y{P2Uu}Ly#&QM-{tpO1&O;2UsRSCjJf)e)HdD|P z?*dHcy4V$!-$a=EAIGiNgUm2L47@%X(+ol#1Y1{b8JYK%)omGB98hA^<~KA8DKIN1 zFVCuy7zM2H1>9oz`B3PoIcE%^{a(8pRlBO=T0^I)(6d`P(TAIB4s-2al_K*)qeaAJ z4D1L5JgkAa9l>u3Q~{Yma0>3wB=$c!y))YvFw~`PWG{~@Yo8+H;ViDsZ2lcsKL)QN z4gY-cMlx#Jee0YRetj;`yU!f#K(+qYc^o%FT0muV=XxV?_1{kLm;w7UOv1s-$P5q&pdf-v zG1!@Z`G=UlpC_{X*M&7Hz?K5Cn74$A z=s=_1UQs&w@6~fr2g^d^OscAeJmYb(z`fgiHL8wbf8Ge#w}Az2aV&9wPC<=J+AiY8 zPCyj{e(fcVV~)Er_peGn4;mq-pkXENXH1i9CV~2WQ!mN)@m|>LQbjphA$TPv@Dd;K zPT#6mS`7T$r364WWAI*I3O*K~W&A+iF*@;|$MFOl=YO2tg}N>4`$-%YU}JI)4-EC2 z$-bMssIPPG@aezlgLj$!j;e5j_y#bHbSQeo58sfjGWOJ)e3Vc52x+aQ`tXQOQep2q zi6Oy#sb}P+i@&9zqja|aH9XtLu=Hs$W~J&KuyHx;Z#75?D9WRJ`HuT zV$o{6kAGuFqoF)dc6kv@ZiMnx-C=p{#6N z;|Ci%FyV20WPG2j?=5MpmjKa~1(njecM^Bm=NV^b`pLI03-sC;l6o zCtRcGrZ-nJ!si&>Ya zb&4HKKECUXfxU^m^o~klRt9tnbVhN7*!p32`coEh^&rz=6vv-`-^DMosF1F#R+lbz z_*#=dQ!s~rvtk$4fgGHA1RR_f%&>$>jH~^sPyN-iT^fzQmIYqw*_HIHE>K+}{=8Jw zrwik0pp~pz@FA)^I5+Fj_K1} za#AgJKtd|Fiz#-ev?pjLXnwh%ZfWxH{lTwgerTIbClI^>&lB40xw`L`JHDmNWj{(k zwHZ|$H+EJe`^j@}-twqsb-=R*+8%U0n8DCu8qq>R^;CTbx{{4noum2r&sDQ?BI9Kk z-v&9N4TWFDD$+3x$zCI*QI`AJFZ!~rQ$$Y=V`IB^nLD4IT$2qIJy))!n>5>d$w#vqdK-gkfLh zcr}bu5_)F=>Q~mtAeeaq=woyOVEc(xwl&onvqAoG_l2d26ik1_gkZ%p(z9|4@KOY&vaRD~1$28&cFi!^K53{Tf)J>s71dlRfk zEHb=>I#r)^m#eyIs=Q?a6m8AKna;GN2&U%o(yq0~H-G6Y!8E6m9 zTZn$ridtkdXT$tlXOS14p$KkFXI3T?*gnTNt*3FmmU@Cmv3=*P`RPxmRkB5rXIek1 zzErU@-hO)5?qei*@Q53cZu08x4lGp!Wa1lx$WCEe{Mb?>G+=27hX6^c$Cjr`x;5sU z7bUrsTTD0{uiZ(oXh46Ms`LT^+ot1Mjbvfc6N2z;)+f9&Puu=Sq1hoxm=W!%TV}PO zObb2%9rL$KNOtM|6Lolj9pH@=khqbg|J%@5*g_4kGK|Hs<4aXC#wq}ctlca>CAM&M z+}8F*Eho2#OabPLB6|&5a1a&1j?&C19gBNz8m}e$7nkI?y7rC7 zEFC_xJw-CWoYTU9;2i@igv_6(G^}G_`7H}nW$GHy0ZvLE0oQ4}Xvm+aeQ9V3@bsIY zro35*e%KadAH2BlLazboh8S*h0fRhM+&sb}5maD)u<<}0=^LG%e@8%+Xm(LdzqfQt zeYxZ;mpuyYQzJ@8sK%p5@hesogxiDRdUeRa7aOSDdr;z*CySK%NSuh)`P_M=8$3qWo77_dH=g`tAl%%BtU^by1qcLC$BgaExi zz?(yWczOrfg%Z?PsY6@q{kS_F>|ou_a#DMHTf20qBT^HVK zxM4V}R_@q>RF4e5jbNGkq4iGzX3aj+*^hw><>-fbX@Cv@mWD>#!w0VUUC7qU)*QMg z`(5i~O%-Y|-5XbX95~|959H zc!FF<4$?!@kn&u)jGS4k^&-Wc3y~%Vn+CLZBKxu!uB;fCm4t{wKPz08m>O)W2_Cq; znheTLe=JQtY_%iG`|Yf^cP#e~^CpgSbH#qQJnbE6gY-1`2;=CWi!~&?R-)lkkeR40 zl^8$l8^yI94%AYNs?AZHY!?B>02=(aV#4(JdI8x#wti)Ju2SYpaaaDJ=evnoJD>h6ExHhG9=8K%=jT+x4XVx3h?2RMmQ3O^s|v zO|g`qZPyI$>y)6Iy=&UaSt@wwky~)N>>|zATjes$ z;TfF(FrjgU4kQtLJ4GrGoZ~l}A&|PhXqMjlf2}+&qBQDIz zaOdpGT9L@ZFM@CO%j<-1l+_W$dziB0ykP8OUW#JTwJ1|i8hZF~T;089(Bb7Q{zkeF z3o_)H_gQbV*Gx%IMR)nt*(f$jOz8fqASEsbULqi8hGVsz1lUaJ7pLhlTNlP*dlC{U z$SM~_t|1_4sXhvdzDwJfE>x!ors*iG{GdYKx}dD=g=?YOzOkrkhXOgJpArGBki{9~ z65GZvS|h?uqAE@-)jc*H_l|Ve{J&Ux^KdA`w{LueR7hmsrtCt>USTRBBuXXgR4Q9Y zWyyAnO7>_`DvE4bCVSb&E=iJoXGU2kGhs|-={J2z9VIgA7XN&;FW_8q<$D?_TA&X1r zeqRY>e0-o*kD9sTB+_CvvOsmBsSzEZWrbhgM(0)0+=yYfcdFO)lx}>}OpJn2(yXWJ zB%V}GKM?jUO%>q?QKqO?6%|XBrNpxjIyvwusCs)XAB$}@73)NW8t0DIX20-p8GX9^V(H4_77-XQ91Ed^dKMWi+N~yXU8l(g5`j_esd|&$c-S3d) zX%tVz&9>`zs)VB>dFYqV@}yQ@bh()~0eq;Qb%bA+@5|4MO<#DYIU@IsYrZ^(AuK|D zURr+ty8-5iGKZ)5Bkw=EH*XH}{vbpL8^-aQnav2MpJMFk4{@h#lF(BxV>%8Ut2lm& z=jguC2&)6yGzlMg+XrU(-anC@>9uf*1-E8=QnNFXZ^vAPJr6GV&68VC9@L01P$4EJ z!0ca}&rns9WpACj8$UET)!=wD>uYjIcZ?@FRl7GhA8iKtZG^q7#_Q%k(_oKHv2 z^cG(GsdT+TA?ein(CdM?xvOZJY|dhv-2Q40ah9DRjAt~ip??i5+mXBMdKiZ&JDcpO zg}d)}ob2=3TO_ybe8DO6f;PA5$?XyNcQnjo9==%XdTUgh24km4uJ8|lQ8nCNjER01QYqf4L1jB5Zhp~XmxXs zmW7>cjKkdshX`q-dlE9|Tpp1QOTXOiv5f(qH!6qKT%S%W{AGYl^W`Fx82XG^pK5vS zUbsES4);)wfPDu}XJtMyQf@%SMd)dtVm}(A^bVB~qxA0=8{Z$M=N$vjx__4FVD`w` zTq&bm14?+||J@7o_kSYmCDamPR{ilk?KUBJMA=>*GQ8UDkPZsvMl`p zB38zY7Nw=m|C(1`b+X^>>nG=1$8Xmqr!;BJ^#dl1#H<+FeZ>vLqrJgMNjdZU4wd#0 z39`uKKvU<%kRy`_*W5vcx5Z^g=f94cDw)<_89lV)0Pf63d9|!7db>pvT_O()Tz1XP zI?X33&eE(?4eQ01oqJBfv;_G@H5&wRdwN}3SIV%cID5?O*`Dx&C_UT|ghp~)E$R>O zJE5U{1oyyD22bIr2)g&qp?oFVMxgBS!?!CO8*Hi-I7Vs;l(Og|MMxGRxX_7sV*c^F z7QTP|>M`&J!#UFedv;%sy!ZHD)L9HGPJ3CGAsov~Hw<)C$52k;tkwyA40<4z8bkXr zSttOr+B+$ArCIL&;SapjyzTWe+uMgTvn|6ikOqyKQv~&48{+obDZa$idu@qY4pKrm zwF2!*U2Eg(@@JRNN`%wR#DKZCm5JJkn;Ye4rEpCx7b5;Fu8UBza=w#{nHB*nka4=k z!j}02al`)qh9wn?t9ZcSVetll8+Rs~neYe}@thhf}E0-x-$qbEyH%YC+@u z-&sGm>`ov(k&QWEclHchy)_D^kMg~x;zL}(5p0oTfq*j?mi8FnhbO-wq7+ahSS8lU zGZZKZIYed<9a~k=pAjSGlciDP`|QS9Gg6x8s!em4_&0rQ##rFZX6g=)peysY1B5HL zNGsbOXi{DPkZLkcxy|S=svo!5M%`S`Og5B^0L(br9zg8C0mvD7p2L7`w=Q{&#a4)D zVavN(mtxbm;@uL(Lkwx2gif2LFp2J^VP1?y&&sKGA#tl3}+;v1G4nVr$aL z$4>AfgsWDJj}WG>@6Z3OLVfc6Rwoy6B2mjrc=(Wtgu#v962XBcgM@gw-DgcNaQcpO z&~OylskX4 z-p*P^G@XCsh4o^~bV_qZ-R*OlrTpi={?fT0c)9wjnJm#H8Y33gvQ~ zKUES*D`)Htxh6(yB3{x=q#f5*4Iq;EoL(~vG49xPW$9f||7gX{cT6FR-DIv7G;fU> z!D9ZU>NH z6DTxL)LAWb{516TKNZVr@)OhQ#`YqNoiC_CwB1D9(4odOIZ9N-tx)WlkI7`q0KaP` z=JYGOt|Y!Dgk$tT^mqteR+m?uNPB1n5msF})4<~drnKn3$q4}nPcIKZej}LKFfPn| z8OAjjI5KJS+-uS)DD;wqnIEqZXJJ$QXS2Af<+7yj_-zGvfB}ZhJwNS!M9_U8CYQv2 zr}~=EB{nYLPm-pPFB0+mM9goIjdIo!wi~DhY!f5Hw7Sp~{lrHDOl#aX^)0Lpq_m=N z%2MQPZ=Lk4u5Li8Utvo#1P=Pi9Rj7*(m$RhTNQ?BgSk4nN^+&T)1FKv@-jeYtv<*( zE6bqoNA`n>D>1j5?-^C$&aua!m^YdD2bdDB=J$MzMp1;gfNk{(c6|WwB05o8t-Gk^ z5Wz>v)Bfr6pfsnJ!j>NW(>a;S^Ln_QFq}LeM@{ZWYhys($Pu^X0sv^wS36`zyZ(@@3iD}DAe%AzJ&*CwLq{YQ@A2u*AY)RWimHWdNf~L2!ZECHAmwX=1!DhW7etQGo>xawJJ`C)e6Shmn_-k7oVA|@ zZB;^nne|nBbMoh_j0^i#f3&=-8Q9q&>I$OoSM?uZpJB;of}?@4QVm;AO)n*;ql^ki zoZ53YS}igPdz$C@-;S)sL>z`bixu&0exF{KP-T~K1U*(i^gb~K6L%n!F2HMP$NCQ8 zZt_2u8?^?*JwKOd*L=iCT`Envqfjc*quPonqEL~6_h<9wuIoaG!1VeVT^wbZ~ z`Q`Fd(bmLyTgl4^LnvUceSFQ&yqEnOL;T9eWb?$T3?_cXKgflAop0D&?tCUKp1^s` z`W+|n0;eeZ?;MC9?Zdv~S5E66fjRAXD5r5T1Pc;rE{N3RN(>)UT!r?heHC|=VoKUc z>1eG|Ym3siBZj)3{r)R_r@fP7S<-Pf@U}GranP$+P8YrjLUmAwFAf@5ANUE=*rPiq z)1&le#GlM8?7;P?58_ys7E=f_mWuDMO>Dz*8%l*(MhuS-xS|TQX$3=RW9k_K4~Fh< zbzFg9Won0tYql~cauq9#TuC45v7})Mok!cIenxTv{c))3Y55EYo6keo9HF5?`~HzY zI4g-FwGjczg=kms4qavhDpy|uPA{>(_d^6F6c|ZIFMI%3*xU#ih7ssM{3@Kk-KpAgvN$Ae1F)!uLAq5Z8gF8keaT`G-DU z{a)^F*-`)um?oF?1#+@bVW%GYew*2huU5bJGDMU7Y^7TZ_WwhH8ZxULDYe)nF@=wM zppUerGF8Ua)YJ;pE*6)@U#zT*zZ9b-dghquS?PM7ZMkMQW!BMHjk~_x@4GaXXBSwF!o6lFf@jos=$1{lI*?lWIpZQ*TkeWirsOvJk9Z zTmU4biX@Ks5o(1FG5Y|n19Vnf?scu}GWHCazWD7*Kkr+e_-AU#xf_Dyb2J>e2Z}-^ z<*i9m$WCBoUiP#Pjt#cR{h8uX+&)}za)W{P^YF4CAM=*gPbK!XJpANC+jQZV3~Q@s z{<#H06SG8Z`?D)L1p3;z^Iy~fqibVvlde*G#?qtP82n`2$feiN8I~Lpf|PMkbg380 zgz?pbXQJ1BeZ854t-x+Zrjuy8ta^V?m{y^{)B|*Pw591%5}W?i?0)Sb{>6XYZ}>Cy z+|oW1jZN&=xWq`QsAWt%o^1O96N}#pAj4Nolz@9D!Qh|+zyI*F$3{P0ehy9VT@1R4 z7r4H^>yOX^p8VQScYE6A0X%t=4@)7DW{s8zLbH30{zdiNo!Fq4PYCP8lg(dr4ey+U zG2ly>2mG@gg(@FPz4X4g4332TMZNsk^}UugFbT%3LG~rXueLX;`3yyrRWE+a7r82G z5w_O3%xziyi|F0GimSw5z&{AvOmnC2rC}QIJN!tPrYh3in`&QQ<&Mgi_sYLas?*(_ zBAb-t%nnO%cHovzJepG@4?ln7$_o^1P0E`iLL9AN5m|PTp zC75Wro%?|n{d`vRQU*^#(w>rs4!a_tlLv6*t?m>x%;dw|FahTw^aB!cC$mD`1zBvk zCc_*^c^_RlQ*=WGz+y=5Xe#pQZC0@?aheUfsr}Yd3h%&B-Jl=;zNkGX{ckI$rDfEa zLV{8>cQCybA5#ZFHot=fky}Da{QO?;TW55OjZ`c?$d(<6s5sD|y)3ebCVLXvqVS%W zDErorQ)-tQTSsh`6+V0@P1eh_j??(^86~#*nr|T_7~+H{bjT&zM?(N4f1ioW%HT^+ zVOwoUYtT;NyP>)ezrpH&!qbJqK54K9rPT)*%6!oxO{PXYM7*XpZn$Pb!%E%z8-<9D z4@*>M@K@36dFu3#XkVv9zI2jKrMR7u`Vc1g{r;Ck|4~=yJ)MEVR{9@R%nnngMgPRQ z4ffka^Oh?g$?6ZhNZ;D!s;&&nD79bRzYoRM`v|};G6QMwEV$(h^$QV;-Nn#*q|SV| zc}a2lt6^8G?@NDNFVqpKuM(je?x5S8drN^#jy;vuv{>j%O%8dL0iL=Ki_G>okI;9rS~OXM0W5`*s21+2%&EZ(k4x>D`Cpfx z74842us?>341;b&AL52ad)Rw5Iu4I6R3;l~9XohXD~j{@WaI)i7f&{skPcL@U!Q1p z54l)4INNd4{nGmvL*LnU+ez#WNO~F2g2E8c4E_>&y))-PjoX9(p;}z+?m*q)@1Jj{ znLiz=?$YZjQlSVKZ&^&coA_yf8UPnhOlcUim`sUZgpgirHbq2pHXx}*FrZtry6iHi zwS?RwY^&CwMtj1GX!I-8F_n4rkSx_Fh5D9Oh;9-sFaIV+QmCRk9HAzz8vR-veHD5W ztGJBe)9jbuo?p`GZWKFXY< z_8a<-MOc-#=2d&2)Qlr4R27BWK6VPZMbmb^BT@^mTWNq zR!6=O7wTdqGhr3YbKY`Fm!i&)^L6|z=EI45M7g=Bc#kzJGSc2sIzeGAufjHz@xI_#688=f)<=h@^qFY zXBXz3w@i}iEPq?l{3B5B3WuaPipj++#ZPAT@JA!dS+MYJt}`9 z%%rY}?AnHnv_~RWR3s+E-5xqR%vJbW2&Qzaw89%V9K>y{CDN@GW|H@@eFcObSc#4w4Y24E{Ib5!fjq zCg(OQ7AQn=Hh^E)QOS%VVu#KV8xjd04W$zoOAF-5r6QL@=!{2#FAS;sYX*j)8HXV# zJVLFZ+%KQf)0bpP9yE>_zug(MH>9(((z{#SWf=$R^nW3leT1hyhe2W&uz2#JaE3X8 zDLHq6Ai}d%ByDfs;!5DvZkOZJwa+iwUfg><>j4LZ*I6XwgqP=$-ZHFahc5Ne@KUS) z?d}rqHnG;5*oaCD`IrMeHX1Day__A(Qt9X+#rWp*KQ!F%gF1r{;SW#sc2g?u{x)ym zKesMm_iU1F`Es&l5aInsA7Na?p%@e^6q#y+Q?hba2HYwfmFbvuG!+tn2z%$SuW0dG zKC!21>jv&xk6-a7&;4XqeEEWH0_YH5u0VI~-2%(t=7S>lR@$|3{PV)=UU&5b97jLN zL|aJml_mG_-7UUl+iHA9-P z`iADPLX%(4VrXc-zbGMWdmHHxIfW1q3q3JtX>4Y?^Djtfx34T;x}qQ@k?Cw}H&v%<~?X#bP-2bu06Z3w^LX0)I}U{`F%KxCSc zC&Joh1=o5yhr!QnZECx*#Qeix{UD|`2Of(-Q6R;%ay==vjKj&o^{*p|O)_r!PNXXYuAz zCEATSv*X#1kxD)K&*TN_y;#;ZJer|-B`t1(?{YZ?BA4M3OXgcqJ4T&tvha2X#A)Oz5SQ{zzx!E*cZC%R!~_Te?1lEt{qLvk1LFT_cTufXNVPu$lx zBHyGw_(-vB@iHVf9y~M~;NKBiVAAhT(z>V9DwpE%OHWS>YBC)gd%+A}TJoU=BRF-@ zXJA}CK#01TRa-d+h+%+q`tG7&3@#QUgUSUnW&eHFfMT?h-dS6xOzI}E2 z43TuRn)~V!ZYO>_m_Nu^m$A^;fr6beKw%PbchW`6(c%jO>qw+_^$UkrCiCa7CbXV2 zlf5mPugo2>N>CQBjqvXei+mY>364t>zhn!<^nm4YE z+>s0fv8t$JJxGQ23*KG(;wBuMyzoU6+KsN{ubx(oEN+TVy!YD@azfJW0>`6imy?E$ z_8i=;*S=*dyJ7hP5FUz9p0>5vYyX&uSpDEdD(5!KN?N=Em6o&5pFOE(J0eFFWLs~) z2&-UTS6f_B3wu6GAohVviVxN7^OChM3t!>696;@0KEA?CV(oHI<4kYfO3=)6hkZv< zx#ktI>4y$!@1MB=ZRVv}ymT1%$ei##OQILAe9W-mEl-+}${y4ai4VKtT>9~wp^dIe zxRjDfbd3VGy{DfMk-3Utff1{K%-yvS2Wt8ci4jKMybl5umQ;MHPlGM1nu|oQ=ylKhkj^neg-b z`HT~-reC1{$Ak4A0#F4Y*M5atj1^~f?xcxZ@#CA4n0NiI;DsBP7bkxA8vQW*GLucj z-U@3$SH6OVwnAF0X=J0-ROV}kmg`P9&Vd&{D@I*9Eb)b5Y^*^vr{UfOY}rh6yzO!F z{nFd6emKW=9UKTWFBhpfFhK-?Vw53xIws8s{}G^swOW z!>sytK{g28(^+QdIQ$}KE;0*@ZuUf$VHS*>3a`EPbf!vuslobg3?ock4wnxK2m(}phIYQ2es=_z)R$jNpXUjhFn}0s_<;D}Q0xmAeen8d( zTF3dJlXt(TG#|~qz>b#gkoNog@fGHt%T*t5#sB_N+-moL9Vf{4dk#K$V(?B9xya@( z$``idr&ap%DK8(E0G8L?S%&{MyG==D4ZrIleOe!NrcT!d=B(4hu@Z-`f_X(dqc$PL zMjsZ)4Gn~ejdZz_7aFOR`^#*MGKb_IH1QO?&)t>KES`I3q}y2NFfcHo{{7nh7lkRj zu2>k*#$c7#rr?AdB8crN=0G^YP&@r?iaGuPk$fsWw+B}O8cSqR!f?=g$K^aCM* z%vUS;2Uvcs;Og3|zo1YnAM^YCnF6nSjd}IZjf#2l@80-WMoSpP{S8K#i|8RDHI$h) zL%TyBFsm-@m+iYoI%kHHdlixw&7)J?CM!hSFQIYpQx4DDed`dIEEw`2?Q&!v7`wo% z9JS51{jK%}n7$FuMBdp7@y*Y1gKF8Ye1r87@y1$Ss3z5f?seB`7)_o}qM0W2HidH5#9J)*TpJ%rKRugUjKsC^<1_ zr+d8%i!PT(4=X5DUy6^weLl2}j-A9To+67-y(tDWu`J=afO7}qS8;6t{LJ=!n;p7hS&-n^J#FR#*`GQdE5PKA2Tdn>}o=(Nkg4RfI zWk%c6;grmpuHf$0$%Pz)`nivUNBRzZ7Y#>G@#faxk1j{VONnCJYWNz%F`$DRjro1P z4lv8XTCcyTEE)XU&#YZ_DAsqp5a#Nun>G}_#uu}y>VBSQNMW+0#~6 zO&JD})HV`93WKI@kA!>+HplD(a^@a`j`(<>Vg z7kuv^k#&y*&v`Rg4hl?qJvoK#b#2Aa6wr&$CztLyt5Qxzzn(D7b+@np8zqxr5w zK=||-fD&2Gm)S0Z^ua##iEkW7@u!i=Bw+BjZVUo%SJ3<`j#nR}{>v6DWP&$!Ke;U(YAa1!dZ4;pno zF;M(RL{jhf^yg_1#Kl}YWJ@ljl(A6~TPJytmLqU<=8ip~wlNHXscl_`yEo2{6Bo^P z(dsTT3Hz)BdMvr!(fs;n-%MB*(4Kzgb+Ar8^e5A_s^*AlfuN+)-nKj6!wp^Gaqae< znuw+?-OhHM*s2|t7dFU|I21*&lSEn!0vPu&UeJ6YrMR`0hBU~)YNCK7_yM}xMKIlDLI}vAuY*Avh=sGz{O@@@kR2$iyKVCOX zGlnC}$;>k3q6sujwS{ZP9&*}wQbW7u^pLS;q>A^oYGp+j8vV5AX;$m2Tc!-kBj~@^ zX1sufAH-Zk)@^E#atI+jLiqE9W2W7d*}S#Gq!#D!{a;Qm!HJ$XRnkS=tV>F zcWnYjWt=0CsjhpeS>^Fhv##$g5(qv&qb3voI^vIPqB)AyV``3B?_-R@tPc4UZa9+A zM&gIcLz+haVv&}Yr?v2!Z<~wYwQiD2lM=@*+?<3g$SM#u|8s76b0fD5@x>1MG~<9& z1N!u_t?QM|7%{iXO7Yh610SlB56xXNc^IxCbdqZmUICfRMi-~onJ(R3!iX)$F!k8 z*wse>I(35oUMEIgYvQ3+_fklY@8i=vSR;5hz9E+>E|Rv_Tl)~(_aE?y$8KLP{k>Ou zH1C;G$fE3lVHHAGG@J_y{E#e&ujyM6)D#cnEygsD*H%}$h>rfQv>0CFAh10SWZkEt35tlvUI8K=LNH$?^y>}qv>UQB%#S^vU)SgzJJXuX8urL zg2eGjpM$%OzWT&QR0uab{jL6BXE)9!`1bD0hs92so;r+v_}F$eAF}y$W&&o`xfese0&#~B79Kb0Hkk++%T|nAmjvSw z*^$pDV>b0pWm$|FPeJiLn~2W#1283`yy3bp8EVj`H(kFa3g;ThU*AKiL#DVlVgiw1 zxLId>4D>CQrL?FI;hcN*P02;;+!CadLtcBO-2^7J%tyl68rrm1Sa0CEt(my;L)@=) zmYRimc}LYb!rjSnZ)46zot*r}HPc_WALnrx=>m5NikmC2X*nF>a0OFLnQ`E+;i*24 zeG%2s00r1{w}w??vM=*Bm-K{L1xsJtjrp1z&fHy>H!?$OCtgbpS zs*)!58@6>~(1;qye9eHSj2(vZ?$FxOF=FiT9iUk5A5E_^J>E-Rt&?NEg+2oOJ>}~vcZm9Vn1I& zZM%-N;|dU0>|htsie#KUKa6Xv_LO*_WtYs2D<3z#)-WxR!uCn-LXd5HBR(UYCZo%U z#_;18<=tW5#iK&@!x4&RzQ@l>WFnLOe3VRKliq0eAfLO zYzwv8CIAIpCf2tCG2EVXh6;Iswatd7G;DDEU|`a#6W=aA7Qpl+5bF5pY6B2Hu>OGG zIs>Kk20Q#^FX!)}d%8~bpI9q4sImJ#!~W%odC^I|>9|xzdLL%;@kDugjJ5?3*4g_f z^xKy=SN6A*>j}l_s$PzJc=E`eCfV|^t8T)TvQ9l__fLct@l9ctyF>HoII?l)d-BzA zo?JevQwoLe58*Kz8Ozw|Nxe*aL*K{Hqv+lbQyv*^2!1zTJT_98_1Q5mUUnq8z5c+W zWvC+Eo}o$6ZAA~ww5IdcNJySuIQRn$jNK1@lgp|(Km8|pwyi6Y&f%Ab;c1;NY^mOn zzegYlRsS;_^SFz4%a#<`#jv=AWr#PhwExg4MpKp1b^L(~=V*RJju->%v6mxbssgIr zQId?JxA}b6J?T~24A-y;oi#qX1H<@A^e0xY_tBT3-G|ZQse4TCLu}qJt4Obpxw>`I zdebQz=F0}!S=j#L?5ZpjkF{BiGG~UV`rYiEdXhPtRZ;) z28a!>E`=NNbk}i*9I5TwO;L|jC3LSItLXWWCR9~soy?JBGT<0_-+IWb?dFs@dXa=hns=s~wjp83T; zTBS`yjYn_0)Z;ot;Ob0RgWDGuvS+K$13~M<0x3T5;|sV47}ujB#L93W0dX#F$E^0+ zuJLpBy}zfah4>*4f>&?Iig(?vFJ z0Qc7OCjheyVdk?ic}|W;Q}i0P_E~6%d}2?po*ZE8%!gT%_rtT!a+H zK}9Nf$=BZTAboTm)0&yW+A2lIHthVo?jKt&2oXkfffTBJ3ES6b#S_KX;3x-M%S_QL#*a>NyL4E&qG>) z=Hz+N;AzUOij>X2sE4i!X#5hj-IbKEx*U**pAM#=nfE}0ICx;i65Ndq1QfcUjIiAl zEYv+zh%Y&cDnTRBWkO88L9t>f?p#o4Wn>87b!{no2bIBJzl2fS9!c(h^=s&2{ZG6_ zB133|YBlU?D7`4Si{x_C|Oj8mq^|}jm;nf^zEl` zu`Kp(mMra{+1u{&Pn?~ZRy&PUOp^`WMxFrA2G~hXFlEWQlQ{Zz@VpuEp`CQL&DbIC zsf_n~kP?xIePMLlCJfoW>yK>OHn7~&>FY|MM~h_5t%=uVxxO6=_f~g}8Q;~cG@-)b zn4Tv&Fn0}0K0YzB$u0Rz&n>T_H&G1B3$rGnpgCHvAGpx;oXAr6XEIRLQ&f2Wl?M;p zQC=b~YRwc9202z|x)?(U9@rq+xUGS2_^REaz7^s>+`ND8dPOE%`7^K6SMCb&CVg9W z(uL95jwun+1i9-pGc}BDbbO39&>S!LbiaoV?#i_ekMV0SvLACgDiIAlrx2QI@Z0pV zqNk@K`4+bv;KDR}wjz@$nBTX<_#lF{#jneKQzv~pGK&C`bJl0{Dp2>zsMQEyaEswOLGY5qKOLhZfAv@fP+Yy&L8&ot(K?iCu2BqE7?6 z8kQko!Lm|;tmXaQLRcVU#7>*>3c3R>q+&)CP26Su(1TT#^2dSsL@&=7S81NVsLFJvcpovH3>n)W z?O+UDbP-t*gV7;Q?Q(&;D-Db2Sbj_MOL(N zze=Y%``(ke?l+`j{%oi&?D&gHvw#QLg)oE%SQdOuOB<6DE7=>Oo>#1*rNwB*6nZ0| z5YE5b-nvyb24_cTs@z2-@ZZLmGnK*K4n*FhAX9XQL@ho(#|WFBVsxvMDiq7YUxLDJ zFWGvZ?AnH66(Un$%Zitn9OaOFTCs%wUuiy_AcFz)J+=TbulB13NaGBe*= zgx;3Q>NtG(L_5!24avMon3SBt4w*7`juXZWzm_!<NKLorIrthjDXOEN<%#U$SSzrcLs#)jIwm zgUnAWxSwB!NAEn^d51LzE(jF?EjE)cv6YJTkjqUee4Lz!5lOu(9#4JmbJ4*o7fr5`p6MvoGxXo~g|bLS?k01S>C?%m_Q|tIIb`i!5F;$T+0a3^ZNoyY zObuqf3JFkb!bOXuJ{E8G?{+G2IB(&LPBAo}`gS4hpg%4a8d!#dr$7#ldkS>drs(>k zPUEzWb59R-xRtB8zOUSGFZKJX`}J_*YsFN?8f=Z4gJyImC#`Fu$VKp|Mt*B1FQU#p)$*d$RupTfP*t`9hZ7XA!y!^S9vtK$} zp(_0~C6kJs>f7k8;f9Cd$=4$78BWMxFu2cesrbiV#908;dUOrpPlB?iPzSm4O#cj- zA>ceR9$Lcf_wl+{`)+^D9Nx6*4{NwDVjy50wlzlN5lc8N+g#b_4~OxfD=Q3`?1zPz zf)L^G> z^hGavkSD(QS3H4N)|4Ed6K#wAaDOW8=PK!hg8WFhu(Af<)M0cFh7_NRb;p)KX`s;5 z;zRQHUlT_@2421Q`FPy^h=MJ<=mV55FzIUFicDz?974VvMOb_BR)+iFqMSpVzz)Gw zxR?_|ucvG<#1vS1$J*Xms5Hv8ec3URd&xjU1*r1?Kmo;P<|K}bwnRNcc8$^Z${6$< zZke``h-I3LAA0fJ{1b@d#e5l*yy3aHM6bMlAcCf&SWR7CMrMtjpWGvbHNgVZj5%{M^sl5uzMv zicjO);ubCEUwTku@aVRZH@hO=Ux>uEBC)K!jMJr*tL5&V1d@m7gaZB>P!B&5iX2MWQm=$XT zO+Jkg2>Xl{o}zhX1dn-k+7FY8G7FsE*TuI|zuUt1j{v^QboaaIa#QHXOB@IJec&{ZMqm7v zCmEpv=Vt0gh&?-+{M_>y1Y&og^5+!R69VB~gbq!&9>d2lBAz!?@UXq%mg?6se|Y%J z@e(bi^U4>mU*8V$D;#4-kGDT+kp0bYWAAQj#l}L&lGNb@=Y8Xo&bllh{QAKJ2hD6+ zRI`0rj>mc3M*)7S-A|>q4_wm2_3;fZu(rleB@RbT#wWJCvT3s&=2#0DczXXcui>G) zr#zfu`w1e;j0oOt|0al z5L2T*>2!&H80r|M`_8+U))tl-6F4hcB=!_pDQ>FEf=b?GAbd#Yo*oR@7eYr_+t+SL zv0*HHnsiSlvw&enh)w6j(s29RGwp`u8T+_3e{3pkms1@nPP4uguCL45Dnf+g9GFRs z?_Jb4CGc9Dsc_mBuw!h@Ok*?8zUzx7#)w%}A|uipzi>BweJid3&(C0g51ZiRkUEKb z%D@@>z&7YnJbzi|QMT-3(YjwQ3$!jLn;8C@X#F6ONK@;IFH-wa3HNE2mHR|YlZjJq zoit_80eIs2cfyGKF9{>|UkM|kckoy4@OWjer@_C+jIcETMUZK^A9={h`Z*@8k zt8|zRTzh{q;`mso*vTV8jYwlE&0?aVN`!?v0=~<3N`!%7ZzU$u&e~)DW&bf#G(&i_ z8E3PRNrDf{1ZZEeCwsI=(yeOhX5}$ozp_;*?K&dqRu}UQ#Xh&e2tl^5NskQscjh<{ zjx>=&e1O$B{{-yR<_ShT79BQ-kga`ogbk$ecQcd=0uS~y`Ob+H29}OY81x*s^C#`; zHrZ@&lV;5|^YXMAPqdb0F>&{kI}PU~BMwcAubiShA~c5Q6Q!=B_HhA88y90+O)oGJ z|Eh*5&%iX255qLi$s=XY9gfPIT3-eId8S_m+m7MM7D>=goHDtzP7H-RvJ7h5V8-Sk zkY`92$rVck;e4X!1fPXhZu=#*!bO1}#}98Ob!tf8oWtf~N+B!j0hpmQs)Zy=`1*pl z!1NB4S2$bp!ZKk@Zuw!3hUza9&9wXBm~)nhczStCi%)cwD@lhr7_>RcOw^j#hMgT(t)eJX1S?NfjSgi&PxAG@|w2W^b|FzGYU*8%;v;M>{evfawEFHazS0_B5n)gf|&r)-iC_I+&Y~>hC(l6tIO*n z0WP=weJ=D~{@|yRE2?C2k8KPT1dz+)1vT;YktE={YRPH)i;Bevs)p$X5z@60ZsoqZq`BYgO?rlWfWqvV@;FcR=OK60C3p5Ld8bvh(1l2go0&Tfsw(xQMjB4Af+e zBP;GWN%%KhetQA9kioVqfOOS{PSAB2yv59yECEP|wyu!QIeGBPei;eYQt|$MJI>+p zx$FC1h7uhUNj~)b?&;-7lU|sFN7W)r`o7vxSb%be5w(s-+f6#-C~=ajozRTxX1C?a z=l2J=K4{N&hhS;>9PT`Z>ixt7?QQQWmr?KZP^eKhYz>AC){+DL=Ur+JYn#l52f<9W zv0wvaM;c*Ew}85iSsVOlBdOvtH$a~ z7MO}j<*zxkB3vv%<4LB!TMarxHv4(t=Z=f0T@SFLhx88M+F-m+s{%gG$ibB2*(mZo z#5BECof)h4G*k3DvGRCSs+?_f5x2m}eP?W6E}+Tw6XFX84X8_{#AG=;Gh+VNfC9Gc+Yhs3a?5Lci0fb`OdmfD?!)>m0A1zj1XB}1LccjU&a zuRERn)JD(7pO0dQa6zcMVqVt8ko?DKW+S_jqA9in?FloDJ%HT({OtCP;vb_|pAFOR) z9f)$^8gU|{!=Ju&3de@b$w(E%!HhTmH(=t@E*g@X>QZ z3^s2Pt%Fr>AGIM;E98wQ{=}}&PJH>cIjx!?70^78V?z;sb2D#TXp3w0lkA7@RwSN1 zx-_*``vqiAfSM=CWj`v4kgzHc4W#SC+}HRAd`VjG-dyj5IE0=DNSr zeLv6pJM=9@P97`m?DtVn61+S`7a)rkYi`IBrxbnW=xBwO&e=%C+05EOuI9WMVHKfwb+(s8tsz%jTY0%@j^NdHmP{C)J!}s zUqHQeNi%sz$%4ix#+bn^GlmncJ+_EpAGgR!bi{N)c}@G)lW4|E1H=Jn$zV>i+b-rp z6VLAtlyzUAK~aanbKk)yxudsaCIbDYKbWkqzX6k(q2q)`>qKkYcq4^+CYRRU?Ywi= z{LtRQ1~tH;!7Js;g37qn*eLWvsDR83&wv4<7Od{%?xKnX4BOOzD+d787DEHZ9HAw9 zBrk@m=c)0iaC%Fey6*+oZtER4&MLg8Jf<=dK-J<-9`hy0Q@MgZgxRxTiOreWqyRCt zSGLXIhT4dElOQbFat}Dpfnac~(+y->4o?6D9UC~($DR~uOwEx^X^|IO;^W#|bGDIs zQ;z+zVGg^UNwz+q^Ag3MhW~*t@}8Z)Qt`%AzRSQu`LLktMghzTSId8$g_?o)8B zxVa?@u6A4V)#W}AnoIz^umYV{AJPZlZ0wgXTw$kDZ_Z^>$MUk>pcB!eD?gXfuRGzK zvGYM3if`JAG7f1Eadn4?5Tlg5k}wD=l;41nEyr$+*Ye(X?4W*(z5Zr%6Uz5;L_`4` z!ztkSF3pjV=$(+AlPWg?nCt5ASir0($pz_TgFE%x^gd8sa4iKTsHYFn52$0gByDB^ z(DW>5%-zE(DIh%gsYCVpRWg5H<C-*jgT=#{L(V}j4C42!JwVaLl1ywEyWpw!Hf)I zV0WNG&=@^w$yZtEZC%|L!uINz16gSnNzrE&pVAijRcBo}rz<`?GM?3&j|Z?FrfeXe zSl#h-p<)!tg7l6qNm3+z1n{;bhe?@et@9kqlN$1ITn_jtTM;SpeC$n%)=9fXNm_Ov=Ul;`2fj%IAneCBBoC1H54CZ1@@7k*ANgQcXDeVp z$|*cIy&6_k`0Qh~RF=O=*NND@tag4i{;!)&C}U?sxQ*1eP$X{Af+*g}eWyj|sUH7J z+`Wdl87^goy1=2>CY;E$;&!raRj<^~myZu_NJ9Vg=^{@v~3)fWR+z3Kbh_?bG=urk@EDpqP?TiY69oR zw;TCe`BuQg2%d&LC+GO2{=ceY75DJ@R2Bctv1Z*@|4x9#3rFBu1wEzOUn8H~T~1a+PE<#8-W1T1eZ zQ>L1UAb z1{$!>|7F|vX4Y(b5)wCBN4x~v6bw)2iE~v8y`-}2w_V6&OUN3AdwQwXt8sEB`l*uO zURQb|`{)IvX3|Y?LNYHuahY{2=u;2kcgEe~d zv)uM-8~5EQ$1cm*4NO@|a`%w)p`(D(>7*7qp8A_}v5i*_1U7#>)w0({1zRLhy%)l9 z=s*rxB$(DFYo1!(;0Uk@HMxa_D7NJ9!aYT&Y1cR|cO_!V#wB6E{S1IQuW z88(5pRqfX@D24V<@G7uZ3ZDY>vkPf6C}!ds&12zEsQF zq8*}jD8UPqb?6hEij-hjeZHrIaDj$duHh!+R>kOwClFlC)hKruDAye-5i ztH^uRGb;e^uWNPb+U(6&N5nQzzT5Ti!~i!!CnDU#S{Pes$%|Ip++8mpTy8^~pVPkQ znJcGvn=J34#J^u*J)U`VXehKMu6S*k_H$`#V&8*1D8+C0&s=&72|(>X8C-v`IAE>G zlfTP1Uy6YF)yjQeUX~JdX!jIM)kGW`L=U3BgITmDNm3tn>BR2^fCiAxCD8{$HmX?G zq!7dwym}doEuR9mdzNzsoXsJ?d?5K>Ci++S7=plVN2n_YM`5MioGmd$Nh-fegiBD+w*@ps9;*kc?H3JqlO= zFhCpEhwF()CQ?KjA zE}7DpMS);v`XCUUF-JVYl_>Spu^h!C%%CP{DAAmSy;F(zl|G*EVNP*=uJPE*pV5Nk z6WOrTT{|Im3FM0%JSUnLA0nm?Mnb!3-42eKZ!_-eCnqIp*Y)DH>-$Zn@GBrZM@Ex|F%<<3(2%tQ^KcU+{r;&}W`Aax#nr0*MZsIr z`+^Lwe$5M6eFZgmVm|g?l6Ke@RSu$xrUo6A(vA3I1wur%o9aYsy=Z8{Kk3BjQApRNErp=(r zV!bG!lv<(i;{n9Nq{GN85PUkWF^NXRu$|CzS9B;at9YGhL<^h=e52^y#rVL|g zwqQ0)k7BC2f5*l@yt6od`TkX7vG3>3iG_U%MRO(SY<`%^Q6rx2uf?Z@yQtn}LQY)Z3nO=fGR`(u8!r~b z>Mz=>X4s%h25_*KatxGdpAz@+6g_QvC{uejX}UN ze&v;G@=&!Pvj_*}1h8}4NuN8yGBtreu{vAww$vWl)KY)1#4RT=c>b=+ho^kMqWAGl zlxp#fogF6X3L#QLG2p<|*4%{s>+Gr8okh-5Zo9j+iz+>08rdT8lAv<0>Y&?61iq5f z3TjgSCTjM584;1|oIB-7UX|VlEvZtvcm{~b9?qd5Z+2Cg7iq7i=z-Q|r{Rg+59LXJ zpn4rYVrA<2;2K0YYWykhu@S$^ptqtzRTbIP%kbzNA&8xWy*IE=?cl6ItJo~4jZenT z$}rm+u+mg-vS_fxxY+%-&tf4qxx%Tu_9EN-+?6MLoAN@C6i^Wz1Pu-XPXd&abr!(( zt>gEsM*0A4+>!1p>ICH)ZiwhcHL>2NzYvI^%x4{|XxM)bs@X9X0XeTm#=x4S~+R$*j`keZ}Q~!lD z9rsG}gI3y;5c3y$*cwULN{0o#0WWURG*t=^ZX)`mFC>*-z_X?4z~og$PsG!mdKmSu zvkjil&3|4xe|CGIBWjT!Qyf5N{=g35k*InAvfdSu5s=sd2if!y>#MoC`!qBr3GSop z6GYLK+ngk4s9cq+Js#_PHUsVlfUz8z*NO4D1%4TvFMz--33WPSEX;i%BZ^bY}&#=d(YK zpQytGy3^RU5KPmEu;s;GZBF}aaV{gFxy-8T4JySbGTaCh6JoRe{1x&|$k$>l{0Rb) z`PJcOAi~bQ2zbG@C+Kk@@qK|ED;k;OKT~&T?6~`a`~1Zd{w~LHkReANKzdv-b9B&W z89UpywwpbG6Tx1^#ZZ~HWWhnC1GjB9(8jjKP1-YUe)3p?-b)wPJZ;13Ow0Qz1@|8| zJVXz#<`;_d9)NJ}q5_g#8xjZ$O_Nv;+$!I{!Vt`J0G~6m1Nr!CsB@i{+urq-BkO&A zS(yT&f1sY;0As*8wixK&6O$Zd1Gud!gw{N^JQ*a3!pL<)+d`9AdepwdG}_?< zG21SbTYlzKd~HZu0yg*eK}k{^X@F$FvS`8%R<{}HGhKi7R2J{BIW#!3cVCPi|L)H; zLEF>))PF3%x1c#t=GYzZmx;(wrVoMe;z}WU!kfkc>~`ChJxV`AdudZyk#)je^7@DC z-_OR$hsYujmhlm)p!0-k0UVBS1e@;0m^w`9@*VDJ)*-epQ}YfH9~`KFckDT~oV8O@ zv`p&agXin+2whP7RfC9tJYR*-fMd&p5Md_E+sFHdku;vZ-iRMI1#=ufs$cgm$%*^= z?<_iW^z*j7lvnl^M_Ne0F*M+@U`S96X#!GqPmD32Mxwuez8B_Ocpj)s;P=`}xEr|X zh!(@*xNG1z1=V@w!zhk>LQ$oHnT6{MUs9D?jb}3-yv-t56(iSFq#JzHx~`AK8}V z;^t#1ihDfH2DUfI2rN;07XJHxEdd1%?iLC59cCW6R&PAn0u;}Ne(LAv&vrSqB*AyJsIQskxk*l{$y(RPhmwOYVAwc|aOLz5=QkbR<-f2Y&l= zqB_$&i{j){?UbC{Z3PJ&c(-M+LwD!SfHZx7TwKciPZ-X`3YZkfGaZqQbk26TfT4Pu zXi)--$vrT&|1ziH=W#g8`iHghx=+z}`O-oiTdCD&e}lp4VLN7h4o3ycRjNWXV#X)V zS%%kePGo+RI%w0G?davc>`%+)J>flozTFN+BkzF9$!$ZY$prls}!_Qe6CX zefQw8RLil0-`zzI8bsmxwzoi9ZlJ;v=|Q}~yU(>MAl_g-D`s$tMcys%DW*5M`)g$# zbRJTeFI8(!_-s5TCAqB0H40!~oLz*Idvz7pXc3w zTt>gS`F@9{u0jzzcFdwRPn6V!-E^Zf!3Z5WKWyD+*tyG~_ssoUSI^7~f7HLzT2JBu zr|z*JN9x5279)zm!qbgCrxG2i>JR#8uv)$!+qVD1j{CoQw(@arnj9KFk=}gsy@Xof zOw8$&Fn!waD#YCy%Wms_sb0~6#(a{jd}<|X#Wpxyxr6V_<(BktwVwnyK=p4n0pz&< zW?=l&lmX91H&EY+j0`^1?559qIcISvkSaQTz&pgX_ho;~Xtx(&oK!E3KZ(2lYFC{B z`yCGyVXhEj^2+m)iG;CEL5qSJ{j(9--QWG@U0NC8Lipn*T(jVXcOX&o#xSi$q))-_ zoTFws5U7qI#+*O#L&|Za3uUfW2S;#7-0i$_!|jEnPZ-gq5uc{! zQN2Wtf$i(QiwSDF-`#5t)?ROK2%g>whQTl5dI+<4ju5E>BSH1ZTL&++BEpHM`hSP% z+T6PCS7P^pGvMiC`9hcfAqA|m0&n~8&VmY2ujECOlwdU`x&b8X+*8_>t%VQICF1X? z%jB;2{dhe(GHrh-D9*&wnP;FugE38DQp~o-Y(O0Yi9U-Bd+xL7>~g$X!lr&_;_vQ9l?$8Z&XLdTOhC!7J?th+Bq<<;6Y&h#ROS;`rClIs;Iz|5Ge+e!+MR z@#g@>&*WQW5-tSl4oJIPzE9(z}ZD35#+9&XxwE9qrT`Ch%L1Gv|3 zg}C7r^wp&(P&{_x-T}WY3V8=fgVH}GAw)Bo$XDo9byJ^|w(p9oxwm^tyNY_oMv2J% zYdaf+0X&dge{Qr7=~3LmR3+DXY7{>9u|H9*Q|wFFcI~<4N}Be?s44vFDM|xjPz2eS z$Qm9y+_tcZc#3rs7+A8V#Vx1c*^*ti;%@BKiHWwD!PSAmAA4j7S`4I0tfgZAbGlFY zNFTOEPF9aYyrjd}Zt4jzGr9KGxMl!*1m+=0T6~Wp* zmX+oOgMua?5ov_R?g1*TmbQIipE&l7Wxg#=tJZa5!ry8ogv1I z`1#nIn*0>UwSkU+4Uc4uRX5R_$^Ip_% zd#@&7#u6;u*M?2gH* z!wQR>*vW0v-UkXrU#Z-)C@45@*a^%11LX|Szkz%PTU7zAARu+6zDKmV3Q(gvfNZN` zJ~&wNFzB;eaCaGLf`-D6e1OnS0|7!lUOybQwhHsAayxtGD(}t&Sr0pz6AB7a6W7s6 zzZbfTPGgo*>e3OJOO!(&5ZbYH%IOkjDC5fFBx7 zmov+sK77+_GC;kybF)g&_6_fgq}_J2v}e)yTSfFvktDQZATU&wFVV!UKtK?T__g@>(ohT+{d*zBT<5M%t-S2nB}hsCc!99iMCWCiN{rRR(y5 zLmYGH{RY+oFPuPKIz?f$LcbTGTKY%YvclSTgx0>J`LFMM2Aq{Yy!cnay^6H)2?;Ei z^?za!5FmO|`@y-4G_%tmAKuxhU`D!UQuvc*`+eBe6j-5q>bZ%jO^{%wn+j`v4yv-J z(xBtqnR)RBe-*Ed(>IIw(s6|#ibCSSQTDtS5370u4LY9RNxym1)?XztMabjQZvNFQ z)dWP=NM~xbhIo-xUYzlM6J`3CxX9wBrbS)KyvtY^w)G8-nH9Y=H9yO&XXIEB9=6H9grqJLmVX%P)7&!|FSFJFcylaXoV|bsD{uZ>f{r z4{|bWKYe-S5O-z;fcwY?=SO`oRho(OFO@QXNF{V!O?h-f_cniworM#88j-+APw>0+ z0#M!Y6(l7dlxZ-R`sB$^BPCu+?ami7{SK=SX3Iw7HT=e{eq1ZxeApnyJ>uQ_hv;u@ z9O-j^pz`GMIE$c6WAV@DGlY6Td=y5&#QP>@EDyD`z!gaR0~Pl|HY=)Dd7nnZD(Ex<=rrwK~Jb#w*QU=7aIP{ z4dMQDX2pLwGa-vsH_cA{ANlYIu)&t@|8h!LSQm5l_&CKs7$Y^NHs~1xY@OPv5Zg9o`ZgZVWP@CS;7w*D9RLr?eWg_E&g)HCg$`U1<#R|*oHLvZ->vVohPfEoi`{~MH|gtdzdUe^#Ub% z`M$gz%2ENZJ0p8EtvKxJhOwxFo+~}t9)}w$NGOu z@}H?ei4+>6j(-mpa4grP| zs%mSop0ocO2@Gs7p{rAWpnmau0r^Ba*9#c}VR~|P7y;uJFpUWcOdH9-%BTJ^1t1=b zVg3WAB2n2{h#y6#6d=um0dh?Qlo@AV&tv#PMcOh6+Wa0?l zKf>1+dZq=-pSbsOh8OD6=YFoz=IA_Y^ZeOd@11sG#@CIt|B?SC>WSnPrAS>RatI3f zbz>9WLRRYjrr)oym!lA8)n%c&tB|kP|NB|-u%Y4s*7fl9ZwgWCQ5=`Aq3GOnL;?-G zR967Fitpb1$)3#mceJAQA5V*iL9O<7TMKBW$@fA`4IHvJVnS0I=o+p)-kKZf5~?E& zZL!dtJIm6yyyq&{EUj|zex&Y{h2-jaP`j|Ltte+vnr$mPkb6ZEScv1b5#9SL z3P}=ht1d--WZ0A{T(mdMDuRg;+C`1cc_49)bF5(YuGJi{@sy#-W=+@blY_ z$6Ia>zI${)KuYgN{cfOTMxBd!hk+fkUv*wp#o)53PqDBnD0#|rck=F4K{aw^dMcCMs`dve z;~RaW=9{C;15s`V&|Imeac;T=hO5To7kdY_^|b|%=V?4X3GrHG(&0EvX=lW9o&Monmky7K3ZZFv zOP+f_juye^mDO%yt!rz#nLad~?p<44(PQ+@fnJ?f+XeJn-)Y7=K`qpzdT|*4V#M0r zdtCjbnoP+ByYA*P&mVRJ0!cxXQ967b*N9%-Eel4o*9`-zQ<6Us#Y>}l5h5`%Tnr08 zPL9&7KyQSd&%~bU`)%;xv7Gg}@_LIVi3-pA!T=(J;^u8qTi%j=1?AIP@`Zwf5cw=- z6q2x>`d43V!h)-($q?dg-30yBm~E{GX-e#E_iPUyf;%-2rF6{V2M3YBvX8xPH0@VmU}aoF&x<;niQUQT@bn` zn>V+@QJ&WK?a#Zy!dPdHPs?nN-&*fvweq4!luvs zR$E#sUNsncP|rkw$>ZHnQyQ`v`eIB2r5PzY7kJg!om?f*6^)>PxT8*mX>(!3@(JuP zj(rgh8+Tq1f}7gj`sKXzF1zLEP?klPSe4%cS_ns|jQ7w6?9@X%I0F#^qAf_{icc^s z!izmvm3en6J93iw~B?Qw9t*9cPzwydu5=rnuAt507jQqlN=~nUGg+)ziR2>^G zlDpS%QM!HOMExPRwlU4Y<)h%AyznXj3>6uLF7V;AmG>&P7fel6OU-S{6>fLCBo{!? z+tvbM-%fI4D$%Q}AQGEub2z}6H}YN}D6TXw*N>e&$rQVEEy6iG zWE|kGzh7Gn^BeUxX2}ke&$)utAUEbDB8n0mMd-`%ZtCGR&O!{+jy%A;4>f-H=NI@_ z6ydh9M9moL*4k5J>iXl>HNHajuS*kdol$E~sqgx@-dIJfZwWIS%khG%*(SBDO$*T# zA`1Ssrdtmj*US%YzO;Uy@T`ztrygY+@3Eojli=!!tYoegVph^W*wd`5yZZ8yb`pf5XT^-955{9(QKOH-Ih9E8-r0GCH$LZ~Any zzsWXwUc=*Bj(fs^O!r?mB^OP%+ncn56}pG(6aG%#ul5a!p_XaRiL*i*yI>llGjic~ zoz$(Tkp{2Vt@x@~i0SQkMCrl|#4^y$xPj$~V>oH77f%Wj2tIDGXLk5>@BZx)ANR*E zX6j*&qmX-@$C2RLFaSYP;c^CAeg=`OV(yC8(l^Qva+fCu=z3b;I?pe!!MFW7{Bw4d4m0l@T6(!$>p3H;jzTh-?B65RMo;R5EbW{lWx zwR`*Kh$impN#%|FR4yfQ!L`dz3ewU#d!`E-wIo7c!chO4I-EvR{=O=wpDamoIwTRxxBV(uX zy>Z|@v9#eelzA2&Vb)AQEok8xoJb&tu=)JCpJ>76szGHv(R;$zg?v!lmq)xLd*MS3 zq+M|Icqm#ZSijZP_I&U6MPK#psM?ZS@3?|t;75rq^Iic|o9**8RQ1o~@KIhlP-2Q= zWVO;GjU?dwm%%;fdln2$E^9ois;-WH9G`fQVJ)B97P7L;Ugm|@ZsMo}OB=Ir(Om07 ze8BW-;^2G9*#oU{2{~o?8}&YsD9A@JP$msH9Tvt2x9i1-msq}8F+_Jp;&#s>Z^tXQ z`_p4Q1~?`%4GNoxa~udR2a|7A%MpgyG3uZn8;hB7+VyyOnVs!-BE|XORk7j5Bq;z? z^8Wu5N=Yse#b`&cC~@_giSqj$_YW}6S{e266o;S& zW9nr&=X1UVaC-i}RP*Q4*31v69rJ80zQu((kOl}!vX*BHSZ?#A(bEs z&}ZB$1YmY7R+eP=6WIm37n@MLQ>MCx>poQ--+1O;e7s&mpz_lexZF%d$vn`#A1G~4wPXC%>{{`S0s z($jBZ`8K3eYdd3UZDVB4HZb|T`#6dRDy6Zzh)Q}p#@E+H<*fupk^8BZpag>+>@ZoA zs4r-;6$8!1md7p~0qn$zbKgcZPu*6zRG+AzHi?agDhpW% zXoA(V(A(hC{ozK*kLwTo9mfDIv>ogtm$nj*cbAxp$CE60-qnQv~ycYflbnJoOOYIHz<3pD@WoAkrUa^&_;N`&Tlib*P8Hn}(K(iz# z6xSXmOeA*3)F0`p4J{aM3krCIGY#9iC9QBr<8WlDe7OG*6<`krI}apYQ+ZPu(0xWE z;fG%QpdWz)c3c5d!DJ#Rclx15i4pK z$k|4`oHtILJM{F^n60o)zKeHZ;k54N%CFx~_;1R&^F!Dl6h19O23ixJL$sIlRXz&i z&^VTR7rg}cT91(q5znk{ZrMkZkzo;!O z6!>ns8GZ7%Y9nDVXCk&uU1Rw??8tEL+DC%Cwh0W}ZIh}baGCL(e;)HM6tesmcK&Z5g!gKu8+7|^{~N69 zKMBr4z!kQ5>gAMKj3HM}hN>APMxVy7-_{%*Zn&DjYb|jGt73~gX8=Eo2=LWkG(nAX zG~cgEKY1i=O9w$Kb^Q@b^Bt;-ls?J;Si+tevmNSWjU29Rf z8o}?k^K(EtF5}Oo7Ir=$8c7E?q?Pj16WQti9;HvIJu?*0_ zoJOQricd2b?_-*l~>+;d6GeA>n79{z*@|jIY%M4)#cMotekyo~m6rh3Q z;LezVH>?Z;Q`{NQKxHfR0P_lQTs{p7DT-j`?lrX?a4$36k>it5(5wNdxqiqy&@BprF6flH3xJth zVi8+@0COoK3%qq;N8IG>^t%_oPXVp)|D&C;|AiW#$>Y}xaaqgRj^RWrFydSx?T~%; z8}mb*y(PoLL1sb;6z(qTO~{{q>&QyN5*CKHp|A{ZykSTj8tE>8njmCg1lfbf-otq=@bm|zL ztv`0Ut-n6>p`cJDWQJiLBM~Lm1jrl!4;t;)OSLt|)yN%Cd-}h=2spdQb1(dX7$h*3 z(vX=|N7nHWVT8>+%kbrI9aW?<3lgqQ^lCk@m#VI<%re*&zgl!5Dry%0oeBqmUTpwa zpz=#z=>&r+pyu87m|C6(UqAzj@c4gy6?Ah@YxJ<0Q}p}b^lpY@lNZpSDjWdHXOwpA z8bQ`tw|{Zx6bNDuU9*t*<15cJ2c+3Ysxwrh7^I{$^M)@Gd3!%xN3V{y)>l_lR+m=B zeE1@8%|y0ERxBZI-E)lb0b_~rq;4A6yQ-ol0kOVc>rg&~OJi?j2x2`a)e2w_MY@8{P&!W@C3hv>OZek$9w! z)N<)}1TxUauHC~5nvjmVYIte_Ek2l7ciN%$mm#O{y;9AJU*dhScVPN6iz}{)SPL~P z0?cOX3|>${HWF>LBY+7oXdYX*7WfmJKY0f1=ps|tQq}NU%?Zq)1JD-Kr^RWXzcF|6 z*YWphKl*iR`;xg6yWf$5QMB)r+hA!DH`LLbtJ+OGQbM%qqiD^YWQ$nW1Z|$p-1xx$ z*wtfVGFC!DfLV%E4T#T*0EGA}vCGNxK|WM045+b<>wD%1e#kbjMO%N->mO!4!IGhe z>T;cxUJ;e~4pZ93+cF~e3&g)WT|%s6GsqMBpI-%dNR*Ew4}DSpLe&FNat;N_$WTUc zu=1|;RF+l%u=)_Fhv!b4CUf@-&H4kkR&CA&20c-^0*qg9OL-M9PK&z12UC{h&ZujP zFVT3^muK~Dq-~v=Ic4(JeYGv-V0g3?Q{l-?fx!D4SjVZyeouE~em4>=3FymN$>uCG zZ0GKAY`Nw@ra6xf9v+t}Eb@|N7x-Q*JokC~Vu`QkwK8*#`=*%FQIVS<^Haz>sPt=G z+Tq4Ixt%66SbHJtj2C}1bu2A!(c`TqFQyXW$*b)^glEX`m5rS|vpMWNO4mXxR+PAr zNp2DkKE$XeuaA0cx-Y%W)Ax02*@^4l2p)!cA>?t3#!eK`l{ph^J8qs#DEYZb`Gc=U z(ACkrz0C+^j5VgXwc;?rxUrLd-;;?BQBx+kNrb`);e($vv|@5lAs+Y0g!1&IUJ`P?^;D z`W-%RV=@XG`4Up%{~68%v~<$YPhFj-=7zL1CtUA)OJ}XUa@J^ z`IVFy{*`&i7*pEC#QP1>JnGLV8Z)90YB!%jYTg6J3|WqC6SmrR&YJ?KTXj{E zB ztQ_zp$c!cMz`$*!>jlyvNo*214~AFLabJS|Zmo;-xpby9LumF5Q^XDa#N=<1B8jj{ z$UEN-%*c{{`LF9#vRb!NGV^Os!-s4X&*R7@XbZhchzq=C^G6^wLclKVorM?xX93}X zy-uCq!D-1-rUJ$+W>bN#%0<|Kfl0t@sTFxPGrEVRlePXykx$Sroc{du#jI3zd(oSk zF+dq&StARudn8cQSj#Tl*XMxV-A3F?9Mb_TN{vz9@gx{LFyAD6KG@I3ew?Q%+w<{b z$ChS0on^lq@^S71cn82GWWfBo-cbAXX%esSK0U;;y?F1rQKSARtcdp1HFWZ#s zLjei@Zln_825&B2A;4}{(;C!zCEkhqdZ{_RV_f1pNjlejOF z+^som8{PvWaik-=APEd)%Gq=Uz7BNN*|wmb^vvvzxA0r;e3R&rqLrrGKR|Vnj`^VYfS%0z4NMF#{3^6$M`^+Q zw}eEl)r%kVAk=ntlEIT}ffNkI8Pv@~`RttMGdfkv4&9!YOAW$53@D9}Ly&UvnpFjm zqu=iD1kneCUqg`AY`l%;9%zfg&HLG#geJpAP+Q| zqy!&kqFbbMMVW5N-*{0LJuli1dBeG%F2bJp^DLRe-?jZ z%1=kxH1CjSUxY{Aow3lIKj!-;NFKC#xffM)y*Ix91bKkE>jLoclhnq4MxAajz>Olc zVV10K5I#eQfiY$S0K{7W`n$sgnNw-ZFkmMwV%Lf$EYhR%1W1y^Ej_PHf~t!cy=@`Z*q*|~5i$1a%DFe(a1z3_I@ zxL3H%ci-3|@%8CT_cf;8&rE~iixl;Q?_57tqBqP-Ad{6?ih5sNSVcGu$b$z=)CJu;H(pnBhUFc&b-7!z$pS?PhQC?hKfnxI2!!Gmp0fo z<#9HGJ=9VhIai8Th4q01;E<-qTq!vI#m^!0EaBn7p<~+zMXkR$9!QUv$49|M95e1! zc*NV?o#ot;HM%3pCMffZMssld*NbrjiM=6R>(LG56}8+=pp5+kz>6o{)VHDTiC(X< zAPJz15v+R`mRyh19J_zuLp%1Q1@Cn6yK}Dn%Ec6O6B$aeG#lZSo;^xwQMfF)P2Qvn z8s3Xq1*(C|z=H?}wOt$=;o-LW16lSp zufm+|M^$zDn>Re+^_aglKB-IT(u~amnk(O#dCjljHU=%?#t8a+grY96xW+jsK0L)nw9*^R-qCb$nC*e5rNefXmR1s+&c&a!RW5V-l_k zV0vK)2nicZ$nXcy)qdqv{D6U8`Z(#aw81+X%Z$JI0V^JifG`7`Qp3|hP| zbu3kCnIsMuNx+{WMg3{1G_T9b?=q=1S5jEk1mGRj;Cikf1u3sF z2FKk6oOu;j9j;`lHc>a-psb(C49Zomztbu+87%5wd#}N_RaI9X*j^B1OI0ZM8$djNdTr8j3R84s+f;Ngr3(2MvEU1$C;2=+6)U{LC}d7%D(* zEjFGEhZ=^u=c&3c_oS>79d94w^zI*0XqXC91FV>AtPnJHjD_ZLs{z{0SP5D~yG%B9 z95*!F`nE0h#!Q`kTe;pKGZ&4X4YjUTcko}PGT+PhvU73Yc2>klw9A91!5Ez3&#D>(PQcgIXL${ZgV6SsVqp7C{T zd0vf~RRGhhyvI7Fal`q2cjBMBm@0j+l6-k5>4>SI5=caYKnsxwW7sv!!!blP22O0+ zNo#0Vf%;MzPU}QTn%VbgoO*n_A?=4_bmh4UG|(IPI@JwkBu%cjQ*ULeIw*P4T>CgQ3%QPyBav+o2RXJG#TMkML+%-J*eGAc8jsy4fNo zqW)A|Xta}DKH@|IOKVt>D;}kjuGdNi}M<}5?gNhI6p|TH36h- z+T0ANLCbLac%3G@jv4mj$&+cR6btr)?oYldUWeqwtWkoHQw7R#tc@%63o*usQfC&| zn!aZFq|vXz6Ae}AN9y`(o;B^ediKlwV+RKX6Tz}gBuS{j$I#^AOo(*T%pw1KjHDT!*ZFKBLCY)Mw|;PJpqa8K}2xO z5Zyl82%<^KL+EAoGSz={arYv!3zln5E(3y&vZ1e|+&>5UzMs zoP$5G+p^)v-s-s{9P?m^eF63X^FCawXRbc|@qv<*R1Z5#r_&7`Df^8o7pdGWw#W&Z z8TPkqA9d;4OY)x#|Iux{=-EF|fPDn?#bXD!3Om4femFF2O@yXCAi?!Ya16a=RF5T; zP`o4fV!yUp(bNRd@7U|qr^5cJDPo(h8};>BjNxQEx7dh973%B9I5=BbUOBR1`z@d$ z{hk)atbTWxz7^RH)O1F3{3Y{gahi`o-?7X+Ip6S*ufSNSFmQ;s`P*j9g|_Dh$8gI@bB1hrib1ph?En~30{IHqlLsJ*c@QYl zKA$!+WEu>(D#KAHR2q~}uFbY1*f~3)@9QhU^~{CM9^<7~wGbiNsI^F8@qWzy}|T&s^1=nRphTw2CvG&df*5t`^r(A?MEPVD;mjUZ>NtEUZjOn~F* zhvb4IgS8ZnTYl>!xGMPW-20(NPe7qIQr~d_==x62(;A=++>2KxWS(@*@9KXN{J8g= zQ%yle5GEj#^<75M?%y(*QAz2qCx$$r9$ja#Yq2btO50h=LlwoRe>r`?aap|}nDeg5 zwo<5B|D4eYXmLe)*@AP1k9BzM;|fYL(X&OYb#%sZJXib0^s=LrRfuYtm&5*&YtoY! z0&og&C%cmgG`by|mN8{n;aW#bo1Ewg$2$mxKE-lQ$M$t*4>zCa{KXd$fm4JUaFt`5@9{uUbcHXS@3RZUTbXtvN8SnfH$s6`ZhsZ?H|SJ{c2s^AIG(1 z;wJW8+s!|NUc2oYh(2?f#{p&Ri9imEY5FdRE&`&sEn_s_Uvh*VyVQQwBUe7|TX2ks zhPnT5$)2_CmodN5?f<%>Z=FMPLOX7}smSn%IlD$M#lxjL9Dcr<)LtnrfErZl_i(pUtKp&-aNDuEGUn~V z5*KIV^mva%Tjii*`@;P$+!keJBME^=LXUkBAiSKG>Q{D&`}DDJ^L70Re;JBA+%rKD zv|+}!rkYZIQg%$~C%h>BY8JoW2*p=sHD`>um&fFwqngOD2@zx=e??vllY@z(E-44c zA_#!o{|{?#9uH;Tw~v#AC`I;VDqAHfOG+|IsHABn#8k+ZR1z`fER`)w2&F5dBnnMr z$vR|@vV;lQWyv-p#>vc_*Z1wZpX+zu&wYP?-+!Lxk6!gUUmE9pmiKzRkAwLcyYhYA z;n&RqlM7R+uv?m_H;iDfrlF??nuOh!ET($6aa})pbyPRzQw#43R_FLz4H@7kQb0Wc z*=2dNNBd;^^n>^~cio?rD-NC56kVrq`0UC*tSm(D7?cQVl$){`s@F&ZV@cdP$9{zl^802ipPs+P`$A_AW zcdm2JM-}?r{U5ueJ~Ym;vRtVZEnT;9R*-7m#yOBY<|J<2=5gg%@}!iuvPm{wE(Eq? zG#FY}f%cpu+-JJ(ged#X%*#=4UCa36yo)pT9RkV`b>q?)t+5Te_`?6NY9TH!4IGMS zsVdyUEF?COR%5uSBSXk4JMvdhKJb|G?$Nh)Z1$88+n3(i8Scz$bR{N09erQVf3!1UewBuMy+*P++hYn9+}^UUWY8~%M^VkJX)KE(I5Db0bVPTQfC;9m>~ zsl9F;G2=dxD}*-(TKtyS__OZ3h}RdAosG@q=(p~AILFp2qh-(d!*4NjqjJ6XoH)LJ zjBn{sE_|yt4z8pqu-#zIAiwtE=s?GU@*PH?&5$(D-jw{S#l1@Oqp!%x?3F+B3-=7Q(hOnmb7gxY zvN-k(k*@vod2`ptq*hOPZ2SDSW^|9rk&0^4Y4!@F{s(8TV&Y3EIlv6b2OG{7?rTVx z`ozojaBeW2t1YCemoy$wLNv+k5_zAGJ4M`jP$_XxyVi|Me^t#xK20u%+)XMHag0&` zOcr??Q}StMWu|LAV6xP=3mTG2y02hrd-HEeDx8 zJwWMdC|`<-G|+ry@51WxUVQKCX)V(kD6kyU0((0yM3h6w(_CUhT4}0_n9&$!qT(p{ z4!%M60OFYsIdLV28sA*kKU7VM8(y^$-~*K}3_r5qbD0PZ->BsK}T1feI^v%7E7f zaD9`&zq|bZOpy-97u=%1H}iJh?p3*^M?U(!qtpJ%6$d2DPt$MOt#^pErsZc_9}&E& za_-*xwiQ*E@8JLVj}K8>&Y~bFeL7Rww=g{lz(M*P7^Z*o|IsB#|CQ^53YyTh*bJ^N zosX$q4pZa7`2K{h)mS4=>}E!jq`PEm+54`b$del3K7aCPC5vweXedZhpTKoevDMCu z3054M;>;+hER$%TEAFmSeSUREm-xDM;g4?Z-K)DO1D5Vwt2a>!Yq!#;J=i|2`%L`h zIP>8%!LAX7v8+Pl8krmW!kOoYjALuW&oR=bh%bL(Te3g4Za-DSv>91aEqGuZd&k~r zgLpS267PA_)e~TL=x+ZBA;V3r=mhpu6o<~=%&v=h*&rui#vAu|c%A>f-nSbK|JTOm z{s(GvCK8Rl-RFuJ3-`Qx$W`j+$BVq%e{8BbQ!jFC;-lmB=q4TBc3I2<>3_RW@Q6$= z8ot)F-EI4ova`4N9?z|`g5?&C;l9cK`fPyQa|Jq^L-UK^NPZ8ch2vdlAq9vyOd!3I z%^E&=aNJ4GMDSdY)au4sXe$(gWUf^~0xs{%TML7CwC^@iB9oQV)Jqn~Svz%<#7zRh z5LxmU{JPVF#5h*H*O$siRW@Wnbcpxn0zODJJiO^q24Jc3Y9UKHijp|NL9n%Pm)Ew;Ke;` zZF|VIWd6io%8907q>?b1s2R~yrtTvIbo+d27HyI}5F98ygl9y9=-}4cnBKrLGc=3s zMzZ4GCwZll8*u_2H#2CfeT_12X-%x#Up*ld{G`{RR@v>g0qJGg4sI&m(=(iwr+0rl zNs1}iWLEJ0+l6Z)%?P?BZ894!q9qy(p#*6!<>bfQ_S||+HU02< z^-xKRd#-1WiTGQ6rR7jRHLc~iqTK$OAVc*SkZiRtlca094X6s`MSZT?DWC+h#kYas zD4tv)x%(7Wk6)W)?)x+{zEEXD>~F;qt40A#u#>HEQ#Ju=(z|ikF5)YSP??MiUceCN z(zk}QeL>9CPafx-@BUhJtC835bsNN+l9SiYu=S3^sMI<9Ocz|<>&W`t30?*?0`A)$ z#1Edm{U^_MI(Azxk8~D{g$&?JmB^pTyo`cSOx|{!F6Gyep&M7;cG2~biM-*3Z9scV z@#Ea=0M^vL-06Z>y<_x5&mc?TiQ5{(Tb*`ZhN;rX`-2P)^nYnDrK7Kw)NY49g|m8ZV%a24&zPSz*+|(4|FdoeIjR|rJ#~5kq1mR-)E${~4 z5w!Y09`N|UARvBG+fE9rv?;(MC8A*w;FiY{DA*?!(n$Y=mqpxgAqc|)WChNX8~RpU zcwfp$id(7wO58nLw|$&)il}v>)s(sQMCDI8Q+|>&`ye>+L*zZ=)+8R!V8i_%S~h-D z9NZVfl592EjTVXIzT1VK5U5bfLcC}zbEo!~twmKLGWUA$9amd$uirgBMXbb0aN9`h z0YA1E@MBVQe=%py5hi=~VF4GR_T`Rk$hdeZTtVZ7rNGAfCk`wPzO#A+lvy3wjE4TE za04V+Su=f1|I9*u!?n~wEz?zccMd4Q(9*<{YD5XgI3w3xhg_jZn@Loqzf|AOe~X7a zd}~#yGlo-Pp8{*6_X+rG{xZL;+E2Le3j>fs36$sK^Q3Aro*!?jPEvDKDhW1us*(BF zW>@HLGvyoa_nGZ=fNt7eT$fE0hf9E*K#t?adiKUm?L^+HU&uTY<_%3T#sYP``#)@2 zI{d1@edLCYUHj+s6G9id-^6+O3(2J{1|iq>3_&wZk0D99(7TE;p_{AxT(v63!T!iy z=pl{4e`~H|;iNfI3J-O2@2>ECF)WJMPW^I8~)n!T@LZ1#mQnaq&?U)|UHQziVJxQ#8{qOY9Bl1i1aWyv{ObhX> zI!tr#3w4!!o8I2dYs5FLd6B3@XqL7bo3WQDzUicLm}6pM!efWEwjraxJlSa95D{$OWS|Y9 z^Wi{7UG;aA+#FM@9J7zkR3mm1Cj+1IniD;MCh`{RQGpo&(U-Ru!^0==pW~ zd$rfBF?|&+>05f=`)zdXb*6$^y9GHDOKV`;`;fX)z+aVtqg)}2-A?Dd;dct;hsh8t zRj9lSL%NmBA|0(aj1}ddEx%Z0oK5|Ox+Y%T#X7fZ{3jK)V)DXr`Y{tGdzh--gTZ$< ze|Tn@m-jKxq%0yexS`OF8fSUxxV{$=0imCSsbhQeJsL;eNeX9jWyus#FzwZ6`aBv z5asivc4(qXnLC@1oO-=lSunQ%XlMNq4L(J9_RT#%wj{4Wo6^I1|t zsdtRS$VP|4xHF1`R4?3z;3>HwN@sBG4U#d6@JH07YW$JEJ@LN9cEdB@m8UF?llF{2 z{+;E1{98=~*(6g*@&nSKiw&@-15txJUXYUvnX;*kpHt43l#A`gcNCP8pF#p$aS`vz zwe-!+$kMA^Ay;dAdv)h$>FsIm7ZykVP~1apS)+~#sk3!`hmPeklc?!vv}azOL)%qL z^}6_78lOYux9p7Bpe?_`?}Wf_A&oCp!#cAD_pb?OJ4Ds$&cm@2fJT@XV-))YiAi$e zye7J(Y4QzYh;^E|@PM?7F*tPcV_rw@8|u}yDO!yan=Rg7*%q>5h;BfS2XM@+4}L8& zxrMeQlq@>g6{fy8$9+rdPsitKRv=NmyHJM#b5YUyS7TBmU7gcY%D2NjEEhYSP7n;C zpUzL$7(y1s;2qffy^vWZP)E+gw{-+mtLM@xln7cHe;HOEFp+_U*f&G-|ov2(*orC-R<$FI2ZPOX4pRY~1!75@C_Zx!h`dqTV`K=Bm0)U>Fp z`2dQyn%#&^Fa*TWm<-n5bvYuHbogUi)$~+XW0tE2T;kRkoUJ7(iC|-m6L^rwm#awa#^Cd^GMo1WCq~U(&ag zl$4U(7hfms+R$pMucQ?|KLj877V?8;V}G`?O_RUuk0ac|^oZH?TjfsW4UYYm3mgfX zUSWKv==sD}oxlT#BVWx6IXSM;s%%>SBg(AHEkJR$(dJaL;2v*V;5@~OVXj*k$6b2| z9~=9|P2isQF6r?RJ(Rw1mXRCF`1a|3%#oHWLdwIGA1a8D!UVejwFZ8%rcVfKCB!Bz zLTA_Q`@H34`oA*suf&hezgs!~4$9n=j*sFU1+PpF0MUP2-M_N^|KI-o=l_mIpbDTL zLi}dj|4DMPk_N9T3bLja zqZUqt7dwT>i~22n#-XZeQ&A#sw9)PSrk42eYjXl>Xht)>|4|`6uRR@zv?SqNC1)XL zt&F*nSCA$>%=8Xkk0b|y*E;TF4)1`5TzT&nrfXh@j&H)@hg{dvACIr6N?hI>J$ z_E0m+1jwyKL7Gd3hWSbKWG3QX@#^!Vo?W4Xv%Loj&AzU>p^koD=D1bytbtfNdE^CL zNBem;?Ayx2O`J{m^&bFLt8R!)MJJ1PA@aV<^ulX7YHxk}s;!_drsM8YR6^m;KlJ># zfNGf%SXEHDuR`j^;KS06(9lGdXZLA5<1=vsbe>-P1r9X_Q#(J(dd!VC)Fsnt(s@l$ zv=urBsHl2XmVDyX7o*CGH>!%|h93b@p4sLSB~0Ni+X-oo$1`?7UxJp5psER+r?q$S z6OXaJ*=Bk#pd-Q|zF{1T>XxKi<`h1wxa$x!vQ?VwsE*9XigaiB9Wz zS3h;d<3$`&#H@K+*|*XX+seZSiKQ;Wu$fuZP!zz=^FD*3yVYpgq&HLGQ=c%+Rg(`u zJ2iCVQcG^YA%f5K+_I|gm3!<7sx)JKf8IyPT3Y%^5DKpHZL+8*RGij5i0=H()V5spM$TY zviYgleHXp~BC3&wg9HlsUU?lj2Wx203@A2OEWSUVHfv^xgL2s4&ksb-@pLUwLCpOk zXb_MXDKHoo9E{}YJTCM%r8Gruw~*8m*((rE{~X5!@JNL`GTfoLzr+lJi{#qB*vBVD zyVbG#*^m}ogS6lbQu!W#g}NkGi}AyrS6j_9A!x88sdI~w%alN5ZOMMdvq4-aSW}Yu zfVtq!Qp={vS$wq5)fbL-pHq)Zd#@@Lz!VX-EG*3!!VvSsRB2BrU74_!W+W59;{>vy z=q9_!x`U@ip|E0;?tm$H0m;Ymxe?`T086-gzk$nt{ntl+*PxQvNbbPH@Fn>NMVd;+ zlfM+5U}^?>3#;4d_nr7H)*VnC_+*5*o~s_ig0dGBIR7CL{+PFtbDjH^yc+xJV`GQ4y zaZ%i#CnNSpB3g?H`)!-*e&+9hbq8mb|xn-9-jcd9JWGFx4FA$czujZmk!SH zS^PC4oKG7)0;zDM4R`63(PnY?mNEF)MV-Dy!7c~`HHP4o9`KMKz}7M>n!1Lnx~5YG zE#xW&J@A&rNlP7syo1I4g<%WZ;63QGtMZHo}`E1e@rjS4SY-NfY?uH|CMqHTTX z?S)AfUqc-{Dg<$*xETk)Yb;$EJR=duS^3w!99EuBJ|VF1kio9)l|obcr?hL6M2gr~ zA7&m>%l)|ebYs+>uX0I;XYAe&UoL~s2t*zsj-o;Ly`FDG#yp6QYajF0p{OXdcaGHB zZH6-q6ygG137q?&npZ+uT2GzfgU#BE#sWCAZ)wv6if^M0D?$EYHw=pt!4sgzaAs+9 zlMM~gOGIhXPR1ChpIw#h9Qoa9aP*Jp;0x~VgOo`%_EC~7!)~Su7h)(Qdn+_8*L{Ea z$1sc6#(Ctk(AHc~!UVg@u`jaebIg?h-d;h^MJi4rfW>uy-vl_oYd}l0o0i_xS&OY! zVsAx7`w4epB8({c%+AWJWoqi`09z+3=Uk1n5ZlO0@~5r|=4v5wzKA&HE-QtK>)P1_7ivd)dko*{crr(9RaD!I)^`mpnijDbju zx~A?g+)8a25{asi1`lu#GP2hD$)Z{Kf$z9{0#^v$ZpHF(ee>4?tA9SoSeloaOY=z< z9sE5wI*Ere$QlNOhwBWL0UDuws31|!bq9adjY~(qw#l7|t*6G^*?!jSl<(XKEfYc2 zojFh6f#a1aAytRmsT~Gl-%{ifEiLCUjuAgrCi!i+HE-{NFi~~d)nM4-gM`}F zucq!0RyYFJ!@`saqmmWIaE>f-7&z&a;;J|3{-(OoB6~&j^W+%!2?DyA$cb(W!LxRD z|H(7HliJn4O8*33#^OtEHUdi*47{CcLlm5^+NVB`mb{2VO|I8jPhuM_!U`*{M85H~sappr%K1i@Pkz3$o*05>oJL@DdUT4P8Fi3J_|ycS=>b!P6z~IjrMDp0+oH8?D8Zb zrX8(QH$cuTuwzyvX!hr^VMnnwnzf=`<^xib6Hc#x;U(tpzB-Um$LM+=`t!Hqu6Svq z!{yXl^CL82XxkMCU|T4`O8DQ<)5JPO8Ofrlm?~`d5mCt}2MC|0K)9qK@!%~+F@3)g5C&wD$nt{A9f%TpHylf@;>m#5YCx)(l znyH%Qf1^1d>Buhm#?{Y1e;k#VMDmDJYZw`aE9jHR z?%CA0arJJ%4PTDjXEm<(FSAyCFl)6JE+xoWUq6$eu|WLhSkXWXYq?QY3_ zplSHob8h_Bt#4^CKdxda@3-DAc}4Qu5K!$t!}~9y-79dvm*^|}=p>|R;=Is=ZR_Ub z$0tqP95+B^V76LdMdG*qqNqiNW=S8~xUH8STE1vmDGWuLR((veU} zmTe~;Y>^3i>U7=LL4dzXkm#cJ`g#}-^95Ul+_;SgCv2e1GY(DXho0DG+7l{Jx(0HT z`Zc?r*)V1TZ~AKpn}>b=()#rif5BbrZ`%)UHrn;y$XrR=||Nb{kl{{-}Q{L-xB9HVh)0~lkx}<;oC1etCcxI*daqv1958Ci{)`9M&gou#Y460|I96#}_gl-%rH1LL8j~AV5 zQXu*k^%3pNPD-xmRd>GsW()Dwh}wSr69Nr*Mh7K#hAV*wxKw}dC^(gr|NQjdofAg@ z*SWwE7cgqUyG8|(w=&hHgdIGYa%WrTU9zrFNfy#=qXl?H9O0HeC;0UdI)W{K-~LJi3V0J zXM;L+Y;@3>8%V&ykMdx;K<~x9iyv;mFo_gYdKspdB75SxAfqU$yMyGu$6>BNsRl6I-n&`3D>z4d^RiLxVExSKZ z^h@kqCWO9$d)Y9p&fTl`hG*b$>6?6^rx7)AP{f@RHJ40)RY+ac=uMg6$P&54`&_doy1bDOo1 z?|Q8&Z9SkVsYWg3t+oTD6VEoWE`4I{`dB^dEIkuOL<2@4%pOaGVpji6WWWel(q=WB zHrWDS+q`vIPGm{%(?cR(kE@s)c6G;a(Gg4hR`VOld|%b)-`&B75WA?@@qM686Ap2r z4!tRlD?ze>?`7zYh=DF-Y;6naKyHqO|H-69kFGC0FU9v>NWIH5STcjj2M>xaZM7t# zd}=802c?zU=@B{%b$r#2wr9O8`B2=EIhfSzBr2a>az`sCNl+c}U0NHCj zU=SyO9gys$Oj`|=EE6WX_v_R%Uyjz^DtcwV8sF{d7#}5fUKrnd`(o&)_!@1y6Yx^q zF6+s_6p}aAISYfElK~&+fKq=(5PURFbnY-muRlPx;Xv!l=%42M4S{Ug-Z(zL@Ml%;csxx-&{arU$BjB$=~_}Cb8`{u39e9J-v2a!bcp%a=3Ayrhni| z6KlBHcxO3k*TVsMipZ31$lPAVSvhKMgWS<%lWwks<1RoUoxge#CXJ-QIHe))& z{hbSbt$}vhFG;P;4!Y%%Q;C=H<~c|2V;O6!tcHmk!{1;T<}5rBhCZH=g5hpMzzg67 z8mfSU=GM#({$)@Y<0P$1d?@WM;*OI*JuAu#nQI@W+L~Ryn3LV+94OdOCG2tNLW){! zb$}I|&0@wPe%8YCI!uJa@p?ekC$}T=Q-AWf%tZLntDo6v%=ylrxoh$&M#$RWu~5U> zwE*3H0ev~5j4zcHi4H)6G%B%lv`S;e8b$YyxlQXEm*QTYSK4B2xwT=-HKB?c_XYa} z)(P&@7dc5TWm3KtihFwEf?}@-I;{*``}HL)j_~~hEBOQWv7wqB=U_>T3jNgR@t7*x z>MfynZ>*h6+EYzVgJ`|N_X?%jwB|O2 zCT-38kkhs!YpVa@*^h?@gSJ|qTyNaJEgc#MD`-x%{wIEGA3{8=%fyGkH#COBM3ybySz)g`e)QjYg9^voZLrkLx6;+z;e`W2^(S@0QT(w%S_&|$ zP&m;vS>*c-Ja@wuzPVUTMw@jBjJWjxp0fccpf(_tDp;c#w8Dju{+AGqhEQLa9X-lZ zvn@U{cbW^Gdyrx5N%q4`0Tng-?vIv^7VxrnYkBRHFm&#RwvjwF5X1S(4b;RhsolA$fUoQ##mFReA1^R>Ni?pw zlzm4lzm(ZO!C)?UR7pBuzOoo;*c7jX&SVdFTbi**~r1Gjq(8xM zFEgPE*O|BmskQ0BXm1owO>6t4e_as2a%yMs4-lrM#cb-J+0Y!C(5;ta**i#^qXwRj z-n;5us8pIbF?I6%tD}d~_H>meLFMrt=ruw=%^m7c-`x*G-JpgRn~tZOVJLQqb{=t` zLAT~d!-VjLGL+!lU;JFmws>M+g^xz-(YG4sv+dsUj?k1&1{ReWjthKy0qGiC!4GMi zM^R9V_`a$Z;PtD=O1v5O2Ar!FHJ}SQuwSVv%2CGS+362qW1pmd2Hp<4H9TKU-oRNd zCDgjuuV9FRxUgDfcf;$Jo9OmLr;><6K@!3rS7>S575sP8kt|BrP-0!;KF?iqwA;GH zP|o*K&C*ws#NLrU^Mc58uZ-gyos13$ypWiFLam_un>RA!7|#6D3@tOY5#R;F$jyBU zBvG~{T2Ps^mbnme({5Y$BxdnEGDT;dS<%kpZKgbqY%BQDmEW1vhcuVIs0726=t5>M z=Ky2UPosU%)n4iOyt-~-XsqMQtqtkk^_31vx{C|!)12%1+?QoK6Nto=PQ6Y@o~OFU zt4L)RPM+|xIU>TyM}<6oczgywdA<}1Au%vdfkPc>oe!~4=ljb?&jU~(Ct(^SfxQc9 z+0IzjrJ$i zVU&jm!_x~YpY-`)wuo;Ok?Av;##pE#Z*9Bk?P1A2CU(Bp{_r<~_RPh?p3v>r^ka$D zrHE4>gW$QS4g8`kA>7igF}}%PBbVT9~T!CRMFRr6y0w3tYPX2b(@lpFG0e+Pe(s$s_RDPh`6{rT@CjA&6M4GdiK# zsMmUDGaQtAz8O_ztGSO$oCgVlZV7(7Gu-7>g&Vfo2o0$kt)-t5tQ)#V{5-?ag=L0I zK&YmmdWBHenOK6o41@tBkaVBH3J<{GkO`vL#>*|h>bgd<2Z^0&17AxA)HU6fn_GY%Wh*+S}wZju( zoE0b(!uh>h$m==E3~Hrq6i-iI{lFJ}b;&~Wk1dyXjgG7~HM5x~{M(H%!e!za^~eV) zu2_>0glvD=`(*6zCx1HtfP2A_xjd&SiBioNCCFOpMh}~%-o~Sz4x2=##c%EUs-4I! ziQv@59DmkstN0W;yfFy6&FXH63~>X<9=9kY`oJhpLdRw+?*$hhhe!qecR z>xl^&FAtKxKoM6d)L*8Z1G;OM}?2^Cn-PW2LZ|fX)w_xSfA8GO< zA83wIsOpX^wl>M}J^JSMkgdn%ed@RScSqZlT&Z#DeqIG_JT6JX{x$Jv(A$lW4WEGQ z+34>-g8#^v?U>N7%(|wQ<#{+P@T7^vn@1;2Ht_sE{_y_!KadRkzYvQ4vwg<@cl%5Q zv7s=I5*D?&0vK%unbXIPPAs*<@c}DPpX`F4|C7fbYKYAOV``xWRs&J1Txs2k8ts{L z5T3~tl|9!O=)M+{G9ay?{+U1%f>(FK?0zEw2jXs{EGv{cXl`T?q+Xff$-Og)pF3!kdO;S3 z+-MFncWE-Ynq58vWzwZoBQnk^_?w^u&`<7_NA(cT zp#aNocD7F^bA_Z%ehd@cn$YOdzvJ`MJ@wMmdJ4MLRf$!{?8DD?iE<^?$EoP%QGBji zn06EAbOc;XFu}U7xJj6PRhBYEwB+d9J0HYTf8602j=6PHVpzNqDF0X1ImGPVGFHiN zeOyqfO%_|=7zwb}rb724YA?jNwpNT_=;WfPC8yuIKd3* z>zCB7Zso`wT4!wzCja48O=fToK$(42kFQkK?v7qOV>qV%wcX3jZnq{Izlieyp7zK% zv8v|qxG5ov$7N!BR3_2DQ7f;pYJJ_t!ismbmAKW0YQEo&>lfTN*qZ5Xd2~eSx`UH$ z)d6};arEK3Y4s-cr8qbxd+vhMqTa6MH2U-{iQELJAz0r5KsI@PqUELC_oj)V`_8vc zZj>_gXz#h-{OQ?Cr4gSKDY{(C)(d{8npenQ`Zyy=I>6ApE7?nsMT0Gx<0s?p{E|A^ zEp&9Yf>a|HqHx_V4o+Ex=bmOm83zo}gr&DRAuz64I}?RpIgbw{>wTL82p;M;GBRXb z8oA|CTUJj`m)456!OBp(KpUT}vKAbN&ZYJVPzwk4;d*%OBM`xexTYZA-2n^M@`-jV z<2R5~L32y>UmvFPiXO!z$a+Y<1Cz!LmrcMP??=2%X%C1IeZaKfxy-bX3QP;4Mv8WS z*XvH1rheZR9>0fArhYE{*Yq>smyshb&ET;6Tlm^Hb17wee^e!`^X03$VX`gQ%ucI6 zLtP|L@Yp)Dquc>_u`_O6#PXR3z1v#I+IAKF0S6bXj!b2FFUa2fc2J3@NcpGm76ZrC z+;D~vj^F|+R1HJ2jMh+G9z zY&`#fhe!H>?X7y}x2eYvT1$;vY2kohP+P<^zT?)wv3>$8)htW7D?H{seY&BkltmLcMCD$i|I#@POP0QDA zC}7>_Rw}T2>v%$lI?d*5rAYuq`yJ7HNvlE0S{ardy8F*5D=j2w5eE3{*CynzyTucf zvdhm}KZ;kLZcZ*NZ}S?^5ZuGEO^K;&}2ipIDnVM zT4b-j>E~Xmwf$hRRxr_D`2NT6Jr1++2TcP$5aVTZ99xW5>(evFDJwAT2^*4ZessU* zI(zG$nDy&IVL zdwJrod<%}0w9Ets@1{5JFL^;Q&Mx1BKZ>yqOOL59TEO-fBlX|UBj1Q zyb(Shx1<4c48kW-ac0`#Y|80&lJKCgxw)g(7lW=V?1`i3* z*UPC5MIapdgwQ3`fBIvD;YQRYdn1poIVr}MZ)X0B-+T$_yP`G2dx+%jGHAaCkhubAYskrMIlMC7k^%vRq0`mRzA=3{G^C8jTo8$A({VViY6bHBB*M)`OM#H3Q zGxlxd39vxCSMqg~>2qn&s0uPgBWgLHx?$=Wx*5frH1p4w~_&EnUGlkgc z1Nv*&v55f+H_!|V_v2yfCnvgO4c;ETE`R-v^Q}Fr-)bE(ymMy0!M6`PREl5~KC~s3 zF~0jqmw5lNt98)5Bkj(7mBIJcHc+sCboAZCr!rkeaOCz^Z6t-PbMEo)X>rP30iVvB zQt7TP^}zdt2$pSBWBZ>Ph8h#3g-dc)bJ?(?P+5p2G4b4R57-yWJBCl;nEZQ)+0KQ) zv%(x1{%YU@8js0vxn9zu+;~oQKy!aJDo}&%i}y8ich+G z1bPd3J7_oIWJ9@OR-XiS{=dYb7N0T-`kiY6#38?*f9_D;#f^)Iq&i*q^17Zzkl$G? z|M~0mkva1s z>9TO!eKX}j+Wkh#X<{mlaa4>jb@zoM0En04Nh4fGSmcqVGrAYf`la82AkXMuTO*tP zD**JVsZ?paX^S8jC|GQ#Qam~zvetr>mihG|dUIkgw>;FIiRRAQh^3hcPABZ{{5ZrovRtvEp<%y7BhyOP*(^VbG?-b7=(F=b+_S zIv9m+rA`qTeN~B+${CSgHU@rXLDM6xla_XP|A+2oyAH{eRK!S_9$#fahJ#z7jd$~W zh|e7jDsgzjIaBzd@R@08L11&2ZNMKZV+`^K3Uj=OjRiGCU^~%*Q}+|<b~H3v_r4?RZ)(#jDzbm3Ez zykCCsD}s;9H?^nIal0ZKqNC9;%nccsBIe@)PMg3RU6=t@D)o7#K_9@a!}6t(aTCjo z6_rBs)&^EX#d~!ER5Y(}9^NXVOouA>c3VY$)Lo2%Mn?#jYKC)aMi>r&98c$)ivo5g z1dt4L^x`Wrcmo9wPpX0QVCrpMnOrs{r+Zgu*p9mVvX)%GM7C|e=%g_N$8u$wj6TGq ztba)^MR3!;zjiPFpfBySs@(43SF`i zpexJxXgA%lIMzp?wbTJqoMFRxZt8$kB5(%lHo~PQvXJ-Q+u_+Oup1f~s46k2jpBXy1+fLO$^evJqleakYM2 zUubTzhe)T}@A`l%#1mb;77a2=KMX$H%L_7dFjAy|N&3bO!&Z^MLBEYKNDRB>_n?}U z@V3_dYDWU%d_dgn!I3bnuR28e9ho6WS8zD{#YsDQnbWW%MDC@_VqS+coBWgjl775Qp%K6V{EJV5$z z7fr;jl`aE?;_&^ceIM|jH1F7pqCSHY5f^m>B&%$Dg#FiVzgVI6Ghj#w+RKUXQ-Is@ zDWB<#8tA)UV#avaighhmwU2x#gsr>wS~=Lat5`~4{{%CJ_JY=h)av6nfQCpktBmUO zepT3Efb(7Y`bzf|TjWy5%~|n=EA@p1^V0HGmHGu7F)vnb%gy31_AL5AN!HFI*SFf4 zd0h!L-z|_(U!E-5_?E5SZ#G3|vRT&|j!Ar|lYg4+xRzbLujQ|%rr0kDa%CP9wer`A z3O_1Tva0f1he$hG^7qO;?#J@h@Q~-pRYb;#nJ%-Ts3|ExLmBR;wT`rSYwr1>QW_Pv zsZ&Nh__v%q+iFgQSZ4+;HjIF)pqW#+8{C%`l0`?%hoBYqo0Y+ks=-V9YF92rMk_u` zaa*vwm7z*0tvXq_TBhXdaRD2aXQ%NAh!z8r`rMbSrYucUJo`gl6t=ED5rvG{`U+~pZunVKGW~}3;rt>YE0ti*u>fR>0;9MU zU`UjKXX}fOCwq-s-_jl{I42bc$ojruoc&NC&8RYY3y)Qqi4RKO`=^uUW`~t`b{F1W z_~O1i7|d64oo#@9E%K}+goIv6{PHd}-R`i4%I}x6)5;=GNIQ<}Gtb~5vABcnhsG>l z1A{%#YEz%FpIcgITS$7sI&^#S{SRydFA7`&2Gc*#n>r{NfTF z3ZHf`_~5bvD0}n?tbKsav@iIR=bk3kZVpyAmKzCKwNw@aC*YHqZ@=OhRtQLBlJjOb zQV8oayRx5FgA*Y6paE$dbB6d2!-zLJ4|$8C>JxVD?)dCD6b)Wt-M_yQC?ttFunYsw zB<|O?>5&t_F}3y|s2ggaev=IvJR79;6q|HuyiVtFY~~Nx@u~0VH;;Kf3raFGUHvK2i8awB6+{!9AX3nWl3hgY+^k$OZt=NON3(uPV2UV zl0ZkWB*Mx>P7k0t6eL9s|X6>whcSSP7o;+#sVxPW$rlw zexZ+>e+Pfkoe88-*30P5f9hSkqIN(Qg-Dg~r$Fub-m#_Kg*=6M?)PVZZaIR}8txyy z`Go{2Bfk>&38sd9>k~E58=wWPx30gP`itPbr<77h(1`W?d<)j%(jEL`iqJS~sUS?p zV;e)9fBJ>H=a$ofr>xJ~3S?JMf8A=rgm$qNzc{aKdhPE2DpVQ@;KN8hLKjPIjV2{zP4`PXyrVFp+$MP`z8ozbyohlSRfZR%!v31s8N zkSa=o!}Tw>nqnOanG};q)xBxPF6zMUQMMu0dV{N zet@O!ZovDo^&Q$aRiKQ80432ROIy@6YYLf*SXxHz-KlDl0C*E6| zxw6zL2C;mHZGAp}WLHWoWei#X-CZxPt;GCoyrkdROnL(Us*%~`Q&V?;^eb-Rb1Tz8 zhuABzCL~uc;u!?{V%xp9&O6`LXz1#dR+=4R({|L1GO1v_f{V#@#+ZHH5dAeQO!t=M zCs-Zwd!7onzZiImn^y(@Hyfb@S@+w$`2Lu_>_O8>ol?(D00n!L0Khfs8B_)qQGiRq zyLg630&v(+p(*1?=8x{Cn}O3!^$!~gPBg{${uoNG4k+CmAJOK0!E9s;A9@C@7$^mH zuMI%vY`5M{hpT?JFgM>?&R_85jf{`X*hrLmj++FNc;U1UYu-^^8E=2pbnU--IJHh3+7BsCoQ6oR7LpJ}7m(Nwlw%vu1R@(~|tt%?1 z_wg=$o!W!+$wKcG8`9~pn!UPpKV#ag&)@FP+%8d|kP&IY6Y!J)eO4U%2+5!MHI(5z zD1kl22bDPauZ@0d=HWEl$SOD{W)$P;5q{bD9* ztwvU_5~ltP*D&naf*%Ing|MSqmGq+*Ken0vgP$0bP%aH;*-S?N7e#{Gb z`F|WxR1RCM&K3ggr9a60w&=`2Tzsl0tZi54haP*Gp;;%WA7!)XwT#X+BpGvGVFI?k!ULJNV%(zLB?~e& zRcDoEUy8_$RC?77L5?0gMwyj;5A)0E$lNYsPXZ7zs$t;L-k+slVK&%>DWpFJDBbcC zyw;|9yT7=-`-VnUs<+tjx{%A|vx|EqmOd<_%FfW7i2UP{_Uk&{d1t4)#52bi4(?ks zrka0JZpw3~bP@&$LD+f{nGsw?l*(;* zwGg)*D5~Y84yk{2?W*Vd;<3K)l9Q>*?{+^y+K-xWn^tE39z*2>znQ%bUt5T;@qc*$aHZgS!IySn-iSLfG)Wq*W*ej51_}2PFSq1% z1Mtk|8JPR6*=CJ}dxrg*1=_%8vzod^u|PjHOc)NB!dt&6j)WJhxcw$OOY8UPmSa+n z>Q^Sae%z;gD(l|JlI$YF^5H%;R3m@GHnx>$Gs@JrYTnE^Tl`Tfc2RKi<*wb&vtpCl z=leMNN~{tXbFu?Vy9&>Jdk4R3$2-U3!o*)!WK?fOXMfv%!+pZ@4n7~sBS>e)Cwh`7 zYIi%Wdz?;3Jf2>iGFZ@-=zi2q=i^Ha5Qd`*IU6!cKLGm<8JrsxLJ1dQXYn9mvVJfF zFF-mT`E~z6tl7SrwYOw$m!x`why4whblYDrI10Q_tj8!Fv=j!Q$UgTb*Y3>5kLLb3 zRpa-=9s#4cIFgD9%)M+Ue?b_>`a0bQiRDo%ec9Erx2URF?_Af}+vQqe=Pz4*JxYzg zwr5^

OgxTY)3VeUhhObsDvDMLZ1i#EfrVoi1xmRoZKpa%m_rn+-m${tZVHat?C$ z61`1_cZ33F?7sK#{>?cnxjuhE7-HZGKNaDe+2j*hm z53TdIkfL!a5w-_%Dv7_{Q@4-bMYcEj?7O@*TCKk>rMNv+3AGN98Hyz~K#U#;k!>6K*NurIArjin&lBJ9}OWC(j zl*%YcvP{a7EMwOud$t%$WoO1RPG;u3-mmMrpXa^r>%QMV-p}*CfAwiFbIy5wf8YH$ z4r2{3rf2O*$iV;V!_1t?qF-M(D`r&RR`aVm(f4S{%;yQEIJ26b&S(n_ZHsk3lDIQ) zm$gX*20oG_$6V$qfqI&?pD%1x6IX5>KU3^d5gE7kYxHg-AYI_3bHiinCL9IMo-GHcxmc zldktD#cIeKSzbUY!*P&`oclRMF(&==qWL2!(m{2eiTIHT2mgH+CUoe{2lj zFz*#eB^(ox#d8nJ$B4Y-l~t+qC3-P++1Tzw!y~D7Dk|GV1vbhH7~bikMH3`Q?r8nR zf$R&-4RvM8fdX1WuLi=^&M_6E#BEl-er_#x{0a`}m4zpT{(3^p7|!}lO*AK3Sa_D8 zzIi!6mb3}kL}Zkf=^#z}m_%$16+!0$9M~cx6BxnLyJE`aMVJlYT2v8`VR^Z+q(#ep z?*QxbFYTS(n(K<|HnMkto*}MIdER0A#q>{XjBG;tunAqqFolk~;Z}G`7V4PBVOX7m zm$0y*AOgd}8PX>gUIqYZ9Q?8LiU~ZN~K5m<#ad*0M zAT9>xk9skstJLcudn`SqXfeq5ko;H zbx{1-pyP^K@Yu3Uz+)pETUcfk|*;y`SFhYiL4%L$lsdgF~yOx=6mNz%DbOENke}A-) zHCF6+kjScKT-tCuBAGV#v3>5>Z&PmnLi_JT5I-s}6GYwFYJft6NWn2m^Rsn)$0l^n zS&Dv>GiJ#(<48pl&3d*Q`uan}IMqny9NGhm9>O&Us$UAm11lXZ%Y^qOalk_d_97p7 z`xCijA{486)bD@bI>Rycap*+xyc+dg=h$<~#pbA`;pF)Q)18s;3`DtL#7=$d;sNP<3Vea4O@hVkV)#a7)KZT`;S7ndyx;ckJihB8fC?C zT$eQ=x5_asP7zXZ%y=dQP^JDvzEk`7^+?bD< ze21yPIon^a=;-g3t-H7DX{1EdTIa+o`*vwJUeO5W7X1h7L>)ywaa$H_=iuHiFLAc& zXsM_O<*zRttK$@C%{@ZJ7^dkQm4=lN&*Hv7dfPq&m&Yf}Y=|ODliQ59FhZ&*YhBCM zH@a`k(_TCs=CA$KNzm`MlU(V$I?+kPCtE&2f8e{Wx5r)lv)4(9oI5$Ru}GNtQGX9& zf^tmt0Ug)@E44bzB!=ggx;9i7alh=Knz~M3q^)(FA^_5lDyj*<%_dMT8H?Hf+J$9e=R>^5*9JVwHfs zwZD9<+`UEXS7i<}-*2lLU45ewj{QCyKBP=+5jkgvFq3dh;SRwEg!A~|+aLMaN>}_a zZECLJ$3Ji-1bu8b&3$RjbZv8A6fp*;M*cj~i+@P%_gacf(7^tCu=>}Rz>miK=||W10VWBx z|F3tJSo$#Er4P%5({o=-hD&65pxzm-g~v|!o*EX9v%Gp$(OfgfRCTiY-MUeztybCD zX7G->eIe`UD9t1c1-0aSN0m5^cDHkDf;>+cgr$QR4I{@Y!d|&Q%45~ZQpCHd#iDX@n*dxn>8%j39g(v8A@9Dj}T1z zM+gq-v#J2eBr0hsMma!?+Ku0`TaprnT@jn`uA5U3VF7<+%jhua|Xx@&v#)K@| z6gqppvODFv$GPA`*oELdd5qz{4E&MZ_{|H~kn7m-W&zy|;n)b$zn=r1;Q5VyD~{;W zam7pso@#irMuMAF=%PuGv-`^q2iLT-`FTMgLEVuqB+Yk1$|wjI&;pMEJZ{{V!5B6U zB4cXi;&JM4DkhKy^ahXX@GPE?C1n3>#%?}j7lC1~vW8mq_ROxMGhpLzws~ zBqVu9af~(7Hz?21V^nFn+1=EtusHTuPUDQpIR6_MMfA<4M!z=gx1^5Yv6ae<$Nj_* z!uc2(S4^Ja88TZuyzy3@w{u!o_qRv)R%OLjXK9SPEdU+TgeMCU&iG?X#3tvsg>%&O zFRw<+$y(DeVYh1-Ph-&j(nw3DFzg62ehn36t5Jk-SBraWZ``1DvE5_KlBD_<%XaK` z5>hw=1f;CYudt@FmNQZvY0D(Q-j!#q8!bFpB)^zBA2t;1=vt!`?s$&b)b`T)#DlaY zQFtb?tERxdcjGnl17qze;OIXCMd*kW&8_%(N-Q_%B?#u`dx6I|sCAx8o!wY|nCxUOHxH z_rmuBnW9y_<}+o-n(*VXy|)Omuc$8w{V{M%7L~B=@MZ_<{=kWt(R1e~4!^c5TkrQC zx68cQLthx>!;uF5U4yNCrYhj9e_C%ty6X;ecW-ya8qHl9w$b}_onf~`c)2vU5gyK7 zk_&3ipjOHo<)iU~t7WP*PP>XzJ>@fQh-Ds(kXoJ+8{D@fRI`75NSdGV9rh9I+v%;} z-3^WU8^TUYw&oiw?AIS6LxPnw56TgFX-HiWPoFrsTP zb?%Vm{bKB!J$~J-0InNLL($uk{BExw=o-&0%nZ_Q{ehFhBu4Iy5kyGqa{N3h2Z$S0 zo*u4sa7jLO@myXw&t(7`aKrHo`=`h$#RHzfKDn#+ao4|Zeez%(=}}#Lw_Z*Yp)c}- zaO8TwB21~rp`d_}7pZ7zBl4KmJI?=2s=HWfDjY+uo0@q)7EC)6TjwpLvF_GuSy6KU z3n!ubVACKm+fezr{VC3I#{Faa((O$*YG->$$(`N3hY#H_x1ML=DP+%k88ByF&!%z2 zm7%(%oSKBznfzdDi>HnYw!0(N{Bx3o~5=Dc{oQG#b)=q_tlCXyWtE#0(e`{Hx2hFoy12x1frm)T0H>nvd7dZ1?os zbn;A)zH?6~xWamz=MkbVQgsIV410k#hEU_(@%O~G$Do(hJBe6$-CbC0cZbFHMt<01 z;E1tX_S zSTE75{rFW_{rnr!p|-q$b8bC*zf?|YnNPSUFN;vk@NkS3B0USO^`q7ZQ9Bh`l-z;@ z)=a-=cwgZ9UFJ$jCn{fWj7(5^iNjrU;hj6!fC;%#J3A{}j3v8v1S-W4x;($t@DfJ- z2eM!jVTuGDtNoSW-dA`+iRQ*2oAqv$-g-Rs**0MPQVbNBTmKodeQp}9F9Rl|5{!_O z=n>yOivW2whcESAM9o@)+MR$Z>7n#&vPHW^zxoRFUEPPD@=^?&*c&;n?RL|bvlK23 z#k+j!3Ki59n?M+Kw0tYBC?bJ>8G8+30q*ew&Dm-}2OmOYsdqq0^X#++`x3dv245f+ z5}f5(VyPa&A9CUSgF`lzgWqOax36*3o~S$RO1OpOA?4~J^c5tzKB_FNbWR3Qhd&(1 zPELJEAKlU2V}Gjp_UWxoKC7Z%fFdH%8s#0-sYJr#9IF`JHC=?Tro&vGOC2HwLF z3up*c-~zv%%)WQO01?R=#QfSPVkV$OQ&Oy>v3Qo!aT8hy{J)*XuIB)pW%o_n@yAdJ z^BLz6yS|)cE>B)j)E3sfCdSi4-6`_Y#m~Y+8nsrBp2QZ0FH%{D;Zj^i2yf{%y4dDU zv}^EmtlFl0wrYX}uDaynmDmpfDqjJ=MbFbIhpn8q!SVeRdB6Fp#TD7&)Yk`D_QB6q zmsY-r3(SIhz`JHooz~)|K3bP>pJ#Qzc|ZShV}nM+JsI~e9eDW^S^c#}BJ;9|GH)sF zCMH1bC;nM{$s+$%dmVK{LEL&tvJ3KM)(%(VqV+7#95Qal_8ukV{-zAL3E@{(4eT7y zb5B!KYbu;3f0I3fi+S4p2kszOeQ9>g7tn6J4UochRgcQ#x=*HudP`om|K+{HYwi8P zE63J;GtwY_3kul8mWQrnBMB;#+~AfadYG;qZquGD@M3`uF3=tHt(7AV)VD|HXxeEV zzipXxGV62lL3j4=1K-FG7C)QGxz&_7vdu`#7;B0N!9d->U-dzrdU<`lGxDm6ulj85 z@wWymq~suBs57$d+XNLU`3JeNYkL+(pd3B>?5Xx|g)^b=IK3AiW^Yw;9|`}JgY*?( z8{QWt{NU`&lfQiV5!OQ1Sw{UpdW?+{+1%l*Pg#Gdn_qB@6#{2y_5dW zZ~atw^kt%fbWvx#vOyW=E9;ZR%KQim$d#0x)aPiRlzOgbCBmwCf> zp%M2rVK^RzM43+l&pB}8VvMmk#}su8Sn&u_E_+IF&SdJ$*^p!Tah*RSWj#*ZTV?_? z-U%I!@q9IY_K+HZaREHlg_yqu5VEYFXO*!6Gq}*LpSE(OUB4icggNFvePXFOLvVZ* zE~wmL`vIwv$J+3-<{U?OLJ<^f<1Jm-R>zC|M(Iq?YD8?is>H$gY8}MYZU4Q`3c5mh zdb~zPDs^Kc!l)CyD-{bPM9>&VAm^^OhxWXBu@}?9&u)0Sp64}K-n`ADPO8=`oV!o}m!XJ~2YMmH z@4wg+vr~-8efY!!2dhf195KcrMko4ZPw|J?bHS^ReltIRZP|0xk9H4PxK;7Q&iarE zgsocVbUB7etW-#ltl+mT_^4pYzRttRC3jKY&QZoGOyidRiG*V1P{pE_)Kd1LdqUUJ z0Ie77IdsYt08G2+tQfTYIcIZm8_^`@ghjAU*xE;_)7G>fszu*+eqZ9WdSSeh={vEL z`L?c(@UAz;pD9uGeesaZwb5kh+jA=W-`glvHM5XoxL zGo*K8UP2rHN>q@OZ%Ut9q-2FuJ&S?ucv}~q5+lFl-T*fcxLBk$l?SMl*e4J;3 zUO<^>dxdq_Bf(kf92MaZq+c%K_X(GrsEWi1o)v43?2LNABm|b2^a?YBBL?#Bo+^kz z+(`<`F84sI!d>}^N3F)G8izM;O54;^TCR5}6~RqE8z_0dB+hJTXp7G{hm!{#vtXO+)oL*#~KuGV86F0frD4d;B z6GtFyQEKSumLdQ`K^6D+V}LVY9pOI9p@F-N)JDYJHLtgy&wE##$Cizk6bov0?kXpdNa9J0gz`o?^Zz7-*U+9--Rfx~nf-P%G4x zs>@Lsr*#yvMp_Z4ZedZolc+sjBu2z?c~*0e45Iw;Xv!+ZAw3p#S@Rz_XM7jY0fe^$ z6#GNMsj5^(yX;AY<{WuFr>hT~&pg}^efI%_pq#aO+S3HKsNfIaN zJHgqr%c`o&@mpaIX&3WAP|$sMt(yAUM8m5Gq$3l2w%)>xTKb6_So6<{rIV|Z5YBp= zCW0&XSoJijKU7mhPp!DDE-L&En+mfVR4bNZKc^y9_eu!Rsx*dHKVR5VGo%OUdrHp> zDOcygJ}0kZv_}~$_<8Mb%PwmgvLm|gs4%>7FSbbTwv>F^qGBHsLtkFDX>=lmWW@Y- zXF$#T+@-D-OIq;nv!YI3O92T$gUm8ECXm?AHc*LZK^Qw|^83Wno2LA(vJ zy8vgAPk;6z*tuo$cY&x#;DEBq6Q6G?>~G&iel>fZ3$rlj{`xxQe4On)gHUIM9eZS9G&|2;1-Fhc+t3C7kT)_<%tyRr zjJe#r@1wHHCa}*;DdOu7^XqjlRVMtWP-UM)wn&#b+j<1sL8+dXUxx28fAPho;L~-} zFK<38U3+?SLTie^FpCMIi(q0LRc7rYrec+Sda%coR`-qU{x-Gq1vi`=0@7knEjuo+ zLA1r_K>0`)p^b~v7uTi;e{T4jd-nFhGpyXzS;KiTtn5}2kr3oTR*0D}*YtB2A5jxr z&=$0Kh7>)SkaJrA0{0n~G>A%Eq^!U`(}hVI1GLay>1J;?xgDln&NnjltYVb0796E+ zA8WXYTad9uF9~x*H)^8!&xtzK=7#6$4)`0~hhf~<+8I*$sPhl2LuYK^p$nG+AICZ& zD~=QmY6mJrxWFVdVlVdb8R6m zHnM%-Uk@_mR0Dt^w!-lKz__qkoPuf~m}kaz7);urOQ@d`_Crm`pjmt?`wMDH-F`g}w4JVVRmT1#{Su2MCd99(@G z+*Rs+ZTTU-TksU@A-+dkrUKuP8xaBTh9C+1$0H(c7SfNaBM&x>pZTGn@?`KkO^ML| z@iZRVClp_J<7#kMZR|SWBQ=^w7{}xsSSCgBESI;8{qhQT`oGAnG_rB{Ug)1+FtRn& zq(ZR4vbIc`n+NR{3s!j($tacXXMhDLk9y^X)4QEoik70-34TNwb zbqLL6r&HF%SM(})3ZU;ynFsf7moYCZEL|I)XLjZa+v}S~1xome()93zQ9wYhtq(Nw;QQonl2H;K>A zk#ap6#o?S)RCus`Nya)1q-v_`u?Z6&?fqDd|6urMru>ZcQ};fb>y?3%yO*-$*+t>m9Q6eO*{!*2yjIB?v7pwU1+$v)cx_0xp&o_I&!fgG9L0WL(NV?nB*PRZ6$vd}9|2`+{Zg_sNy-$_MI0mg9 z@k^rO%*&h3pLr1wZ5#=_y@L?{-s8nN5C_lp?CZuHXlXZ+_fW?)D(H;Ev`*Ur;k&bl zoL_3yx>IBF8*&Sww<2+EOWf~~<#BJ0lz-W&E7puu<k&#Tj z($TPw7@3Z(3}ZV*C#&579^%0i;W63@njF%XcZ%Gw#L=G{h0I#m{%io#WPzuCR)FX5 zMwgXwqso;03bB|I<1E>+&^C2U`J*KRdv)uZN|F2y_rXD<1+WMLRfqhBX2p z^RTkT!$!34o5;t>oxR#jVa~eZT6^WHhR)M{eop(Da(6~=My}gZysJlX!L{PWPh80v z+ei`*_YK*A+zJSIjuu0B(6~^Z+DDV@iI6+}+2H!^^gC|GO&h1vH(Udrj$`4v)UguK z^o9l2NaSN0A)P}uw}1vmF&A3w!$&EwysHF6?^Ec;3&OR+p|Z%=VrVR+X>nI_OmxwJ zfkPqy#><4}!o4J#1i>B8?}BF>rU(FY9R2MJ{wnE)aBo4)?8BEH_Jp~gjrq1;NO|SH znOGtPgBmOF9`>EA);>G6a*Vt+*xkVPLvZb+!X-&MUzNbmQ*%;BOa75WG{R^t3O~xmOFakWhpQd-{GhCH98B7q& zTA9et4cyN5U&*APTYM`(=Z>?Ao90abyIZ5`aJb2RwqAcgSAO4CQ7fs71~G<73k`kq zwUki&GLn6n9_mT{;FI&FWZ8wtgNB8pJAMGK=Drr=Kw3~tR28O<2csE31N_j>#>Qgd zIqc=;z60-ZSq|6|^||E?B4-_qWs64jao5%LZez5z+c$)ctL0nnJ?pzs&?9Hi&iPDg z%J%A8aK1IEFKABm1%2UQ3TsCfqCte@yy7Tw0lABRmq6{aunLkslyvH5ed$au|D%_^ zjd^Ex7Qf_`M&GfFpa)Mu;n$s3r_X5eaPy?GeGoxjJ;WCfBTc!xpnco4kw#sU&!pE_ zbr=Bdq+!s39!gkiXrRU_e6>w=Ci_dRq=`25v#zW)e?OfgQjBLbFH*S?^Y}&S_PI5> zqh3T1e&I%7IC2A~{J0jsN^`4b&tZR`{2TWDzgBG4Q(RpWVC1j3=i*h(i)&&HwvGj^ zk6mo!%W+$P8GT?Iv=^KiHaC&8pJ0Yz>Bk=dVUV7WB@UFjS-xYQSMj{`s;5Gf8?A9S z+U5lQme~B%xN{#x7^&EAD3?vz!+ovR5SC9}L*M>5y+KH3kKDV>(`ioqZ-4x3WDz93< zGwQvEWV9O-+){-#tRj#pjD(5?u3p`4e0#7PY-b4B{_B>s824UDz(7nm;a$vaj{l&+ zgOcz0QMy*XyHa*fdz^imdb^pX56D+!+GGh=fA$k$_6VRX>U1mc**Y=c3rBMzDz$C8 zDB-h?+DzZ>o>A?42UDXyDZS3pm|TsVEFdz#3w4aT>25{@t%?Rr2qSUc+W?pfiBF); zZD?aOy>=?2U7ce;r=7x8d{g;I7UHZ#uYwZ&IzK{x^ph@+M6D`ELo1bohq87Yw$d7x zO5vjp9AlLau_#OH#!jp0SP-$>jvBlalwf!H0m*;h>9N|=n;F&Z`Z>8*=6$_{iX=#H z0|sxyL})O%)U+>zlVG(R+EFiOVJY)2b@UD<=gXFXVZfh`{8W1s0g8^dv?|cHC9XBx z+W#oqo4V%YsnI0T?@1F8N4whQ>Yws!+O4m`Jiuai-S?;U2xKUXS@mu?Q%=+yq)+xb zg)vL)R>XokkR|WJh0G_g;36j3ZYTfYdpsCROevz=IQS>isW)4Ny@lLJTnB(C{(`UV;#I`R~c^FQPBWyek=*M zc!za;sL%9P40pHcl90U1?_Ahs=c-rUTpbdeXIAof^{!q62bY=mSw~jC-gUFj=C{{w zyfL|rGVMVS(EunZcNlPT7igjPOjOuD+)UcfAI2Mzj%D*uj3r?k`48d*>&XDOlYN7@ z`Zus8)M-h!owPU9aT&HI$C>-sPy;dpdDO>Y!p(XHYtrx0U(u<+`%88e4>1*4K{y8_ zntlA?^jgZQ>r$FM_7)fOE+zN>l0W|b#}4{I++v2QJbh?7-3vpw*PkcclDR_}Bg*|t z%|gx19He&7V3sliI^4kSe}G?)%``tE!v41}Jb4TNSlh_;Wh)??h~(&~{m#$2G~i$K zeWcOwamV>BeV}Ifla6jo7Oq8PW(W6xn}h*ldOhhAgTUI0O0f;bY%vAY_W7rmL|2qrR-D$0=(*MMtctRd{r3`a}eC48SsVDX49~N0B^uEtgDRO-qlr z2@gMhDOMZb!aJ`Yj_srM4^s+ExvT#)2w`-a`a~>cLFNqPJ-WlZHHf>_f__6dp0xwc z>cIs>_QHn-8MJ|jh7I%_tHriGadcg*uj)B{aQ%iG#EdRj1BR~H@*-{$CQ5E1Z)WB^ zW)cs^AHQ?(Z9!7g(F-9?#rLWw#$tEl@87qb`vpPZ!#_p-)Flg-6<)ai@*i2(ni||d z8O$>SBS{iy008{LDdos&Rhh$0E#|pJYYl^sZhCOC=*NvAF@`gapeV-eBB?ZRR0x7Y z*ew8OE2yC6HHZ)FG!cG2Lang!1bA)RbN(9#)}QznYLbw@5UcdRNI<{OhJsMn5&K8~ zMRD?f`WOGl|3I(pKTw$5L0_bmNujyN&p#FC;0&<-VP15f7@NXgi&U5yufV=hcr+V= zfs0}8)0~4rX4ngV8NU~Iv$7vlT(TZ&59jQbKK$zVj=U#{<`tITz@*VNRSt!%KU*i%dFkDCvIZNJ-fVLAici+1R%iMO8hUsI zIV;QG`kjq9yHaL`u7a-4W6l<7n+)I#_r=eT&?;{cUNnTzm*7Iw%3eO0PzBGe2FiW! zw2^`WFHN5}_jm1+PF+jtkBzN?1ckBS*@SL&KT3Z`t@^Usqq{npP!J$-=&^NSw88QBl*vbqn|w}4 z{tQy?DiZ-DgBH1!A3N6zDMxt`4Xq*hv&Ba~L;|~Pm7r4oQ!&z>%NCLyIK7{7JBeMy zn>fr7bgKyY{9L{gbs(q*QSNZo)NP{R(uTtFT&5a0fi6)8cZ+>YzCxB<7FpiQ>q?<& z<*5&!-@LMXePj6(7Ik>9<=#q(o%PV`Ow6G7~u-!7LIYTOB`>BR)D=zX4rW03lYa_^Rb zSSGTTqc=ba@%?Zh|7CpIr;EErH9oC-ByCy#%It>|{vb;2FC51+5ta(GES`OsbF8#k z-B7J>D=ja7%k_+(Npk}ghxW%vtH{3Tdkc)av6Jdq*(cS$GOMjN7z8y85uwDc*sY#5 z3D`0qIAz7x88)Pyb&9Go!0*(>iCW@F4bUNZz)b|?Kj{bn?CwDiZjrkcHD?;^K|e9( zP0;xM&`E_Bt&e;)ZymRoKM<_@mhy%_#vYR(y!EzPn4!Ex+H|Zw({dX+cCo&-unwU& zJiQICd`>zSY%Z&^^~cRXC?meXECKLt>D5+U#pS2PV7`)>t^a-OK--1V_wFXPVXLm~ zb>caw80R(0FSc-^pRlwMGok(rO*i|VGe=yNr!Db)UUYf!!(6kcCkN*tn@mGJRVU3I zE~Va&HZxtdnqj_3kdH^&hC;QG7LM;;R}o{77l|e?8Li!DUK)UI6j4-AQ#U zs)efju%8aLvYV}g(ra#7-PL@mULAHL%dzGnuch0{TBQdC8Rs@Dt(q&>TD+GfccRV& z2W>uM>|2*dD!z`$Q`~hpTx(1=UIs_FX)pki$Vi(hHoCMVg;^ne*_7!U_U+u;*JHS% zz--$&$RaV*0c}2qdi9x)XV2*r#{D$nM9CU#9Q&O4WbrBy0!xJh;pcQdmAz~Pzo_&b z1lv`x!)3zk<);ya-EstuUM4Gr_o+iA)V&fqAlOK4G~&lrX0i=QTbopkCH*1}G1q@c zzdJF|M3wLAQVz;dUB6EuDzf~(YRK~2bslZ(Ro4z6LU@@N&S@(dNGihbz|mS-xa_iu zxoO$J5sMx&{|>;9T^~-#uzhLLISqXy10 zSME_RHTEaK{WcwtPQ1Y2_Tr?JZ;$N83nNv(5N9T^JjAR=`jiM9atul;9EYTBH{~BM z&>nh@KediQ|8k@))31+KsTDs=k4nATc5*GExEtRJ6EFuB5mwNx!U+sfJVrS31-wTD z_uu8b7qW#K?Rhs>DcAdT?CBsU*vTJ=-c{ zjP3ALr^5gq6ro@MW-Nh*d8Pm88T}CFSb2Tq^l^cV!r$9Ny(q5iW99bANy#f1qoqH2 zg5%E#?AoF?y{@(0mYbOdgRNgQE|&;69bss^O1eQOmb}cbs-F^+Rv2=;J^jKqD(B5g02H{=m)8A+?EY2Mhf4f+w6W zTaF8F70tOJSQ-qDdJ7Z+FAhA~@c2Zd>Vf<(UAZoH^E(#=SDFpIr`CI-Y8Qb3ln)3% ztpM81a%0LVX%r4IOa`|fJ5~h?thrDg zju3m-!0F#GMqC`kV}UBDF+-d`nxtEOYelxx6L{ik&9q`QofFNqPu(7Wxylax1NZCo zAGquOjVpfnP1SiuofhL#$TF_P^)N+kU9?GPQf=zoSn~68w@KsIl<9y2HxpR}|MX>P z#zoeCqJf(N$BesP-43b$J9AHIhQ|rpAs_(ag zWB1^YemqUQ@w+-5YeEZ;R!$%^&>KcRjy^r<==wrSg$l@8?jLjnDf7s`ZCDC+rq zAX&HT6Ba>*0lQ+8q1^S)LuqA7TPE5 z-Eo<8Z?1wmMAO8j%MLuY=oWKT5gNNYqc-vD!ACr(?hichD}2!twLU@2m5h;FpZuP?a6+;M6@ z^TKqMOluC})0@Y$Sj@JWVFR5IEHv{?VG&xKec@Oy;+l4m`vg;{z4cCR**uJR+3z}R zncFl=tz_-Kc>$yYayZ_32bLDNokR{l$^#WFq{kfRbhnn#55%WKMo&vT1!rX6blIOU z5>6w>lk4HYSR;WjJAxs%66)V4BqDSPc%bPP7%ePI1h^1ej9#(Qy zq-Ay_-%K&MeiK34`Fd=}c7A6qP8%l1QXgM|3~o1;9mE17z*j=$O2uSb*w#qnKLwXz z2@t^rR~85|H*$=snxozIuZOh>7eF3KUC8*i=e;*aO^wXNX0@#2kG4F&SEKfI%V1#k z#$f*8!!FCv5xd2g&(_4hbQ~?jJ-pl(*Iya!vp?;zoL>3B?aWK(FHZc!F%@~rjCR>E zE|l^8;Kw%k-jP2yH%)|!UK4t#eYv!EVh>usUmG!2R$m^oPUO-W9fgXzsm1fQp9pfs zq975o^3i~URTS~vkAkiXVg2m0&wkVmh-_eKH5 z2``LbU_1L1@`&>#c?Dbw>2Tt@IuU@aM7@!#q|B+nl4k-@2I+f5lrQ7uXt$BK_|c9u zDmUld$~N@z6)maO>(UMm+cVS}&iPFp0}}TmuTEOB<@`o zw^UDP!1F#qZUk)&L!M`(0}NJre@9-!TD!D~x2GHrz7733QX&w_-@Y8+hdd+$f4Hh= zv^GOq_5sbCPbtwGE#&k_x|S_33hKThngd%5<3`1t8QeD; zKBh!o)RU?wyCW5@A6rW#7|iQ~te1`^<8MOdAIhH`E7arxs+qqPC`--P&)Aor&~Z=% zy=OsTIH%O#0wXu$0_!j8;?BpfJD0)pZRc)$5m6esFeL}*DbfXcUV3>%gq%T@ggEn( z;EGa3hhD+)J$8a-2`9QNuFhQSu&pEFX?ncz}Mg8~ey=YVcZb$t}*1%jvi3JzWj{i6eFns~u$RZFdSBkaCz{ z5x0HFWn;I1Lg*^7r?9eNVKaFhIEGCjSm;4a)U2))@uB~JD}w(AGI)pLbbm)Zd$2Dx zFe{74hX2(D#-8-1-LfvI8)F#xn*E$C?G5>j421k3cg7SJ6d(b?Vd?vO6x(m%1vJ9n zUyea-=xd-4Zrg+~93c?bj^#hUrAz($tI$XK`(G}-VO$NJy3l2!Eo1wccHdXuRX72#23C=syMBf z#tY_|n1Xa{Z-7Bg8rjHV>ExnD&#D(&jkVfe#8p@}D=P6=?A5t-rKnl;N8oueZ6*Adr~D^II1O~Vj3;ShV}(7C96Ct7mmb{C5KwjcfBaHz~-?b`#2 zyL1~ate}~e$&$2NXnol?y&Gy`!V7ny#_USbDCu7Mex8AZt)@wiU%R9G{074pz4~G} zqN)8zg>d+3Jeu(weP>rz#^5$|oh^KCw*PSV!Ty5CB~4-(czT|UnZlsF!%vZSkW`{Q zh!U}oKZ0c2PwPsoTT~D6erkHKWx{*2nVM?OufoDGpO^=VQ4iMoZ&^mmJ!JdKv^ECa zgs-GW5!ZQ7g>^MVcxS5A-|YMGWx1eWwDyKoTb-35DwQ1!&@pxBD&1`P$N2 zQzNQa<6*wus-syRJMYPb^3$7X&-;nDq0lpy?*KS(-k4Q5vU2+-X3$jBOMSVvvVvs? zmRqYQ%M+B{Jk;uxC88I z_VOHpHNt>d?y)1y+vC{gACf=+Hk?T6H3>h^kgBkp?A?M?enKI#?K8=T9VD3;Opn+& z#nA&~Cs$a016P;`s96A@!FE|C$ddad~e%rqE7uKk+_Y*ptZ1LD# z=;-_0@Jm)*d`y51epF%YjH_iS&utk<1z%pVVI zE6fM7=^O=Vp02CdTXG$D6vqHHFX(PYe*R7y7kUA=Io_VmFe@Xt2@u9bZg7G~?(9S4 zMtf)(XfcmY4N${0$K1EGCY!%EWlso626pANZ79@J%(mk`gf;Vqwb)mpX#9X}@NBoE z4O7iIf30JJ$i)3KPoD9^ET}M^SlLk952MkD+9B-aYB-$-~EM7EPsHWV0u{ag`02XSFT22hpuZFL8<8mJ1?#Hs7 z5d+et^@C%NObYdHtPCV5d#>7~*N}Dt;TY2fb+`hH{9FzN>Ruo~xXjT;eW8I}5JAVz z{^@aP@t)|`oX0Om2nE6sS(=Z{RNYgY9D(b`SZwl1~f$u7K8!l=@ksMIk^o=@h* z?34DRYge!E5pgV!PdW4Dqpb~&-9DTDilm2<5+t@KOEjHG*eYek@}@1{TJZ@Q3zx+Cm9H{2Zlws~aBFaKeAghEeQUfC zhA&OoNMGe3aBkS@WxHM*yIGID+jTb%`|Xx~g8jNh?;8J`&9tM0GoBsm$|?EO+mL9B z`BAI&6JoXFU!^^{+NtX7D5WFXU7=TxBVPt=!zVNh&f($zI)|nlRZ1vjR@04$?tBlq z>w~2g`KtTItCEp^-S$6lQ$O$=OFZM3@EyXeq2C>h3%8)Uk$z?Xk}ZFNQI0YPax=_NOZ%Gnild$sS z<=i-uP>Nq>W%7pB{IGoYJ>x&4=bVn8INlSRy9ciLxDr(+E40RL&97}&_Q(bbpHq2* z7d2M6eS09vg{Kj5IR)gB1H&n6+A(pfYO6(QEhN#%&=uo^h3X&u{7E zBkTO)tA(V;y7E~s6h1{d{`jPOXs-LWhLaKVq}C$b>g616BnQtvJxbo5%e;_}q^gIU z%}ZE!?p5u+E8hg%`(`_d&j@P?ebGdL5`{kETEEu~wKvCGMx>p4#;<;om67WUd%_#U zzdRg=`QDpnh`Rz~l((g&zw@7s6wdS7ln}K?o26cqD@J{byh{9Xdn_EwqV>fR zrZ2HzPh6q|Gyw@_d2~LMVDdqtOFQIc$EjD_HW$1LmReS5EhTc2m!34T^5?NvDQonR zkTRa4r;{EfTB-G_S$04D1*Cdnt{ps2@v{Fw2r(q|Gq(-fJ0~PVm`e3j_ZlGW|UQ;V{^Xg};RGMLYm* z`_KF=d$mTTyl7*NWpCK?OI-mKM}r+4%$~(x8r9jDQ887I=o9~R3-teX3uTSmAgEuS z1Ky10N{Q6q=rT=%XG=FXt8E+LSY4QYBvg5vZ{d@qUW)MddQWV3;UM-DA|{X~-=#zM zy5kc&Tpf|(7VU#jJ2V?>P8LI*7pqccCc{CKdKlkG%^^) zpHQ+b^A>+|*_;1;o2#JvG!2FyPW?=WzK6KCRWyD*XAkq3!@I!R`DzKGYf|rzcv0=f z_l`eUDQmp`ajd-majamsfhpKpryQ=8Xdg$zP-MATMtgZ_SML7O zlEBW+n?XstU&LHljXR%k;oLYt-i*9}q4fn?Tw}j#wcE2X{otoU1+3`i(UT&NTVL-A z3>aHfY(e@Wi2Ul;#~kXI1trpcPWNxiO7gYt#PBzZ|LsoU5dE%@Xws!jd!yhC>S$s@ za{%h0ej!17F;nl@g6($TPEP%|Kl%@jtmx7pUGAiV=|!gjHTzzH;EsXtMZc!?J}Yx@ z^5wP@RveM~hVs*X3r#*y+>yv+7u_W&!`J*?|Ih)W= zVA8=|C#nk*lI2=|=LX*>7d49=oKd=pcQ6u+R{$dgp`PZQBiz`qoF z69))CPin_Jm7_)w21nciMUH(UCNs3Obn`p|@+yv?80VZv13nuMUDb;SgHFf=-8(B` z@GWMk|MrVyO)?~9(A=mZVRf|G6NUsB{cVgvJt-aYSa>=N+Zjr>%rLYGZRBS`^0Rto z37gY&99q@b@^C_b8RA4=+?I@o8DlK5R(a4A)crc?9G!-7f7n1c28S6HRcR(mVl6}- zY}eQ^)W1I@N|PqyYSSq5$UW;NedAsMzK^q?sQtq~eW@J{f667`!yyju9YltIr3jA@ ziF%D%>Wj0GgdG5~YO)TriCc(J$)`FZ0JUiw{Qu}_Bk`>8um_nO^xu;7Wl)jl?VhjuxpGiHE4 z?6M)gZz}ZW_ROr&xEtC&J~NXEmv$U(lN`HEtVVSu<;KQg)4Ql)B8>6%BA%%&xj62M z!#)qcklPB&8KBl`qaH|Kk7@aZTJOYtpJDruQ3n%w^Et<}!wVJTfwAPzV*P zgxF`{ZmKZ(G2TCLb7O2hLuS1JgxZ~i{@ci9!r~?}lpf^>R!aUxE~l?E6lBYD#NVTK z>_cwau)%G+J=5#e%LiG7vDd!G2Z?-^pgahF5}?Amvy7HNK~+8R`9>*!;4*yb7D@qj zhArpjK(CJ;Rt6$czPC6?s+)b=xw6FA^rp^K)Hj~WJ!D+KeyvFR1#H0 z(1nBCT$pnRR(6aCz@KXbl{@Kbled)!2hWVY^W34dhl@q|db%%nQdfFwviFtzi}q

GpQS;L&Feq!hjsQkVZ(!SH)1^T`0v*u?6F=hG_qd~YMiO5efZOX}$bCrDlbey^l zlXp0kDqpZC{54SH#>w#UQih$3G+W4%n>9xE$bNB%*7dm)TbTzS(tlc}?} zpd`2Hviqchf285Hnw6OulA{7O$c8?iDU&HMwG%*Yi&PxQ@}RkA+g5aijAzJ@4i+-1 zC9aH#x*#J$&YPM5GjDk2y@}$m+l&P!DZv86e-2Bv9R5FWW9_3291&rPFvn^NOMHn8 zud|(!kf)^K5J?K7oK}G6iqh61eQ;||$Dn73KynsF3VK1LWAWD-YcOb4Q>%%7Rkb%Gr4L8F zJTTaAzKRqXaw~1>&jKq&IUnRWAZe4&+-$!gPvBYha{Y}eMN0qENNdYu&&=lbz^_Pxu8y9wT zH(n#0XOm(UB6<<%$v&frlx;?Z`;~*wgVI}oYy0k|vk6<|SxnY`(4c-k)E0AaVVn%n zV&o58(qDlDY}Ymzsc^W#jq-kiG`g9CuX;&~?+Beq)x`%OPjQHU*Y?6RG{)>*d!E6= zDNQzd)ZZ;M`sm3g&URK;4v=?(&Dj$!Hbl~7@+r6J#D z*=18^M-P`C&nBJ^2<3ju1p4wkq6x>F-@Li`qY=xpZu;wSzXRTJv`w)Ku@Q{i^c!nm zJMI4B_F#MKmf-ujpb+h0jmUM_^9Jd@9c?Jr`XIf8FLyqb#O3ElF~3gGoLv%Dt&r z-!Yj@cy41*{NzOCDIY7W<+jJj`1riW|A&t3Ra2^CM3o_`l{wmjqnM z3zI^8zrxYq5E5jU6#{^@9xwFtStlC3(Lo-{FV=j%M9IGs^;#k8-l6uWP_~F5-%!Vm z=M7FkiYlZ|W!i2=jl9+aYkiFlO^l_84p9YHgbnkp>xrBDVclw~n7XUeWx%7~`>5+z z+__Z6TTNHdW)#*8xSe*PSTA?11}Pt4Iwtbbec(WDAr=w2Np20IoNafZbT4u(^)Y%Y z|EN9nGFO?|0C3%VyR5F4n{VURZP2cbn{T04%Coxw;y4F>y@D!jnvldn zHWh^)@@&N8#o<=u{Hku${0Ry(-{VVt-{hDkfHPkTwYMU8Bi8&KMW+m#PX4<}L)PQr zj{h}Li2n=fU}m6sz^z}6jfF{^!?*ZqZ~(1b#M!2#qCC<(JA%{igi{*vqgl_np?fpA zn+bg2Wx+lmai~0L!yRC8HVSZnA?!HXN#2Ko-=RkQP}h;mdk3t405FH7!#G2iV+=oF zJHk$i4We6I;>!xdJ)d3nO|EVlmwNR~bVk}*P<9Ue3d!VH`a|=(;8gxH*chYp0*grQ zL7{1IG)@kl;(`<uSQlp(Yu#XFsMazJ+N`I~q?j_DgWu2S8TrbE zYf6b=ei+X>uVH_-lYJz)+#2%*0ZAi^iGCIG%r5q878HOJ$!JKM=-qr=m9gtC0zD1LA-f;-0-I|Sb_p2l25UcjHdE6YrZ55zf*WFre*Y;#iFbb4KXmUp?!Ybj=t zx^0>0fd?bTIjkm{TbP05Ezh`R-^E~+s#cLNy`vjlOOD!Ix!EN56aguLn+2e^T;y%Z zv6~}+Qwi=Cz}i%SxF;qX?+y~U8eq+|kBmC=MDbE!0o+h<0}wcGiy+VI!%qCA3b3wQ z1VA$+c*p_J)!aA}!dLI$@58>EL+5wRn;uYoQ#x3rZR?vJ*siVIfM;0$KursBR>9y? zPO{oUDlXDz#9g*`Zy$Rp`NV3n_8zOud~B=RQT1(2fRfmd;t~8%ZZlA&N!o5&z;U_j zAR+Ve+PV6TwCj(D{f;Uxxq#5M32N!S3_$MXP9Sw-hV~u;mh?8zE*)yS#b3?RzNLU| z5pniE8x_R?RIXf4UK!8?--z!)Z%curMjZBYO!LFci$5GhFWtPx{I!St=xfPRWF1I^lVlalTXe^x{1y@4MTyn&qR=W&~JCh=JQ=QlDf^z=DmL0MkPe z11!;C4}v7dvr3+|rgiwIPI#O913jX*nf;F!qoo=X)D3ujt{uEfgS)~=XyP%lq;c|O zImVS-Y4i1hTr~jCDKe@2^+GHky5(bawH=dH>P$vXZut$7`LmykjZHgmd=qqil504x zF=x$qz}bWyWgp9D@@B#{c*?eDu~zl75$dNm-dwXfe$MGkDZ@~XS`9kMfd6GFHi`<} z(xKF=p7{wzb-BQwF4SLSI&3Iy+1eh;FMhq}jj{eIe*hMdMLDCE4)9k255ptyBP4lE zC+gR);M;Z?BK5c?ycm=-Da=tZz7Z1FlfIgRInuCYv%|Nw!yUCp-@H1w&8q1X7iIU`Y0XOw78n;vurrtApQ@MSRUTpm0!g8ZhpCCQ2UQZ^064 z%Z9UkU0g7aC`U!^?o_^C?ziLmyM8&C^jU4tnZLoDImk|dk0{omgkd|oPB%xkFsT47 z^I%h7sr=%7-RnFhK~{bxS_GLYFbpI5yBnG zv=W{o^#z<~K1$@MG~p9NYBkZbfuDmaeCw#u+eBB3!hQmLNH(%EjkzRw#bbtI|d~elm*nuVtvSyIj zyl9a3#rjL}_?=aujnd;oz-A>&7y0+?sP!`l0ag=4tt9e9+E3~$XeDp{k5ioV$0;V2 zfV;|(QE=mWIah~V{4&0R8T^bMkf%K=<>=rp_84rz63!NO&&;PkO)JgYvCca|*TWH`=s`s~W2O!@V0SUG$x*tn^GEltu0mNa^9M4BJdwv|3=Il7LcSaU zesh$~H#20N>%Pmo@23eh?>d7DD}*$NAAr8;2mw%1H;mUKB_}nP8`2wJsy+*_JQQ-G zbhX4--T|!z5=EB;tGdh1tob>(BJPtUr`~2T7@L>pJkudbu>Xf?hixZZ`tbMG+Fm&q8iHq z96>y_3c-ukTTjATH&n^?EWBLsHByt0w>-Go=)z#&-1QSH*E!1_hnQ}GRXedaIkh_u zek7dhSkS6S%)a~}??SVd)=oC*#qa)5#fdKoY%>ZtMD$7}W17D{Qq+%Y^&7$$7$ep) zYJ^P$5z)j6mI-gc8I%+O51CL+)kY?z4&+~KU{vkaLz86&iQ58udsU`Ggs#4NWj#8> z-I~ugrTUYnwH5iH>g2yU&;=%nF~`Bfg6v}EfyX13>);1A9hsdPLKSfqC#$V`V(v

C5pJdOmW(uAuC>WjX>7b;%jjsgdddCq%FzKN8;e@aFbgFeSf z+a~Ms(meAXqUGjw{-upwWABba8yE-O&y6@C>{v{*jV~_gIZ7(5)MhncGVTd z;dg!vUEZc>m#Xff)O=}B+@9cMh$KSb4t>iloG81ii^1OqC(P7~PrAz6s(z&oNxN4> zq@<@qr7bzu51+*7=l3EV03MN4ArTC+1df?{34EzK-|XFcLMLtwE)D-;9Ei7Q)~_(BGc4$pn>cm& zo$%wg!!Bjvs%{=`yDaifj&95pZVz2LMF#p=EkqgA1dd7O&3OUL3JBqX_TpTDpPu>C zETxDx+~(~9bLPn13;VDL?4U9xsXk{b)uA)yczKB(CVYDHYr~uxA$g5?la)xs4MZZ_ z{t=0I5=BwYRM14Sf1#0=%HRL8vItDP#N>yugwGdt%@6i7 zRIeOQycQ^>6nwZ+(ok}_M~%BRj8;XGaXAF^QCs%>OnmqVWAUyF=SQSEBa$&=GHtMY zQ={4rhcBZ(qTAv7!-MFkLZz!O?^ZOAmKog6W;}_gKDueWbc*N`fG7N~4iJi6#EZf? zaCXC~CxNEjLXz%fqW#BczVyQpCDTrAy}X>UHvp%2#8H=t9=Y_EMWR1?%B(wC;a|Jx zShAI6?{Ih4>Dv0%JF7<{YyI?F|6P2?8RZa5s9_ zR$8XQ;uv|e9!PS_ujH(!_uT+splI@&6(7NkA_{^a&)bc+S$U~}$$!0-K$_$4b=yND0&m5pnUiGF0S2S=oO#!}9{mduZss zMYbCZTHGw9vB>IWeC+$a&91*jM5+$g2}xhmmYlD|1H>l7U_HoB1%W%m&o%%vZ3oa^ z;_CdizxW9R*Me7w`aXqMg8J$n@RYyeC2<}wiFxYRGH1js_-+NtAT3O?YtZ#`aM1ou zH1F*(qvQ1!nsiE|r4;v6r)x%jK5J{H#{5Bh*Tap`;!l0kaW1<#(D6ijx+;Ai*4PaU^C{DKbv4iABEM$KCkkUjy5d9oDw8VcTrszdjC z(w?J)xmF{tZkH+vR%I0&qsPT!u_Zenb6Ujn4o#vnQTcA%EtHXXhAQiL+lX6W#lBn@ zYs#}PI^WP5rBBvby9>3(CMp(h5ttt5gURFU{IHX&(fPqx$86;q)a(6-=#p+@=aPWC zCSzwBz0S!oME1)5tYrb)E6G8UYc3dqLQTpBr8j_>6!^6Uw69cy2jYwx zNAmZM1LSFau!C1Lx^Ez)Q`MgX3MuqQ5Q&86z*j*sWeuQEWjObeL*4o^`De;UAYS-U zYA?Ak6)4UUfVzkqH`yc-WC;t9%Sr&+%`_q}X<2T8`bwzF+R9^=|M?4mCbAotMM2RE zEX|+-vrBSL=kq^CQSPUIo|HJaAP}r@(GIl@Hn%D>*OkLqmXthWn9@rlh6pKV>xs4G zBqFS;&-g)VgjNe_*pt9`U<$mCx6%AO!NvG?p-6T60TEieMCz$4HOfYp|??6 z(4EFw3=;n}zJDO+#m-Ol;$x&rgyv-?GE$*eEzLsk{0+>@VAS>aXZCZ8n5gc8r1K^v zoV__N`p*YXczKE!xE0K6UD+f8VF&icX2@-?PSm|`@g6nX0+r}zF9G}JUsoei%rJMB zt8f8V+*;jhkKR0$El2_#jg~-OUI27}v?gmZT}jg{CVP0?q3cbY>Z%rp9Y6Qn*nQ~P zu{-|Cv!EtxMHdmaYsbqRAY=_tU(Ber@{+H|-#V15>8dC@E(z~U$VlJ*#JsfU!WE?` zKf%d_P@mTFXKOysnnxx(#Wi)Q3rv9=ACVgm2|#`LCQ#T3<&T04oE4Ij&3y`OLjC>! zQvUou1f)o2ULr5)<=hfo{=ILBGB%c!{?oRhsb4+&@{HonDA{$L20}Qcy<}4WlKitP ziU_)*fZ`%@wG^`Oj(FSlQ{;XrqH@;N_DAXhOCaI6PBhYz+d7c(0Q6%GW=p};D$WrF}YyU%=1Z|OQ_LLR6b7;wq~}l3f91Nqu0+O&|)+TA43UG!R*|30fbL*!vU4~z6&ueFk>q(%W~IU20Ga5R z&nyD(ejZ4M?e;TR*QK6x-sS~RnRqaKdM!PURq$RYcZ)NLlJGRHEBxDbIdi__***aay$F<}v(di^LK8^t(Pn`hniGbFjnIx_U$at|c zK|SF2A&0%aRn{1?|JZw*Z#JJs@8{H8NQnOw6hIF*vCqVVd3glT6_ZdcIS~!3suorj zH>#~kFEA6+k$v*yY^j0Ee2QqJkGXC+=Xrjfzj??HEmQirUrp^wtHJf!5;oKWQ+u4h z@?rN^=69Yvy*5OX;lu3JzUUxhjmSnElacvIC+C5dtG_>}A9jlWlwk>S+j~}EyT6Yc zJ+F2f0@8&b{6`Nh{?h{z{`L+cj%%5ya0U=@ocyJ(dPjjS0J&NE(oy&Ebs#J1bUg)VVHxo=xp3I=BW>>F;IVH`dRiB%1;yanb>e%V^BX#rZD?r71L-%rqj1O>EyA zz}n}LWZYZd>UHg`{pqN?t2Dm~iYIUdvJVaq+pSU^kKQ@-NZX!o?1!qb8l4eDF1>iz z#uW|&%=alUrl53lnt8S;*pCBy<7s_x-~0`Q?c8)=jGI^-mCK|2KwUN7nyG%k#oF1bIqga72l7#E z1NT3hOK`frQNqEwb^_(so@o#vIei4N-SrgFADk1~z3)*|Kao5U3?~o1hoV#eEFfWE z3sAZKt^qb1`Yc#f40?$8&jMr!t^oM-cG#_cJGEg3(VFTK@7d8b!g>0r`0HF&|6H=OU+?EVrFM8+(?R@+Ul%p4}uzM6bel7 z5YrJZ1qslqFVHC@(^ywom)O5p&v>x(>Eec1YPL+N52^4-VW>13btI;LY zKsFuco^=r|X7BgzAYjRooLhrf(HK8qfsX-uN(uVTR99POg2a2f@|+|26bgjVv)JuY zJS^ZX0}@rB&g3%MYn{(G2PoMvYu{f>}AlW<_|*C7UBeI;Wva2*n90lsFm;* z_}Y_YAmz;dM}Qm4^9K`SjNiyT2G0PpG@50gttUG-Z@DnGx%jXGx-2Bo=kEPfLJoui zvD(a_CjUtkfH4$Nx%d5023e$bH0-g+0B?5O6zuImH@>zBSBJy?0zOdzqPZ4>PNfu4 zSzajgeX0uBj=nlq9)@G9ue{{PdjZjsQH9QRPDO~-R3*P3y&X|159urG7kBV+iR z%OYNVpZJzWC7b(Dy`Jc?rjUvzN=qz(?89UO++uGWVKRURvORAcFQ5Ss+wH zj;vCFT}L7`4HHkd&sbEJe;fSNb=Ysyv~a_3gd^2qtb5U9pUc;kV&{ERy@0% zmy8#{=HMh*C+dSv^$ZW_7W&zlAGjK?Yp3*Hy9R_P(WoLzuEYtNbv4q@U>8iJ@ys2F z7dpAbJqN}kMhu*u>iRckW>(@iSq~wf^4_cEDVPH4g8bCFjr2 zZH(i)JsY7e#_Uh24WGr#YVzfLIX8hFiAFJx4gx$sP{HewTowix)hb>-G+~B>Wn4kr zvyzKLi(VDU>Ew6P#t-tFv~Dw z5}Uk5>&Y^(<+K5k`GvbZIrIJjT1-n}(Un<)p8H=4jqk0+uoqCYO!CKs-Ho_A$Ib#5 zcf5qr#GOkKt($?Bow>=E3Rk3O^;UysvH45e#IF7+ga`MxqOfGFgHnxvzO|-`Kk(cvN`qurqqOGdw#YSMZ*1_DCp# z2jbhHPyDVDz@}#sh+=MarC2!2Wp!nk)<}d1yM2p@nGFX4V;vE6V}biyYm<&*y@=2S*2aX$ z053U4o~66Q$xf5@gW{`9M$vou^<}Y{TnXYCmO(tn7RBAQXmB`gz*qX9n?Z-%3y&9L zA|sDpN|ADW0UtpY@?+H0Cqrp_R_mZ;!*lc{-1FS#tgJ^v=DI8fS8N}V4ytT56Y92Um z8t;>vzAN^qp+hNRF5G%@k*hDrJ|YX%-T{vQHdaKt(1a?^f@8t2h1RXeYHprPR)m9m zvImA-E@gCzXMT=)9g*nydHd0pFZUtlJhHEqAN1!y39z@&^bG(I1sk(k8R(50%C^;_ z;5}{C$N-dh0V&H z%eb@a6_%|#0HPKY2L_AjGmMKY!Foq2_^DIcpzWmXZHL+coSIC$%h8cAT~;XfJG=NF6~n~7k?4%n}#kA+qrCD*6ckV zZc??r0e{W{EZ$w-OT`2;sx|BAr1UHLPn8>y(0Bx%bX%&o%pu}yrZ?Yy>$4WUoN&6< z^L$xRLuFe3?w* z!Pk&MHSrMBnf;PUA@Fx$ZCnQAo_J#Po6GziGE6^Z4Mf^}=K!MX8vv3mL>uRBtAj)ti@Bsjn zr-8tiiNY!*hHf}?9qSBkykNyf3m3*~i2dc&ROUI75Q!np>ZEhduvEGX651#3t87iS zC|-BmH1FsrHG%(7C>=6{0Ey?IGy5Sd!O^dy5kl~LbR0h|zX-K@L-6C0PGGBQ%#GQG~(&J=xgQkWS#ofU9x&i_7UHWs42 zfQqh}=HkQ-rfG8VgX4n4B>?XP!`%P`kkR>s2);Dz)UceJP1l<7h&Vhj>k&G()8Fjb zku_`OAIG4Rs7xzz-{J^yI_Vu&r*))fIQ?M)ud=%sHE=jBP5r>T@}s-2T$y`CnTG1f zz+AI!;qx?Cp^?l;J)1jv$3_y@J^XTJa5|>t`;eWInZ*6q=L6-A?I!O7S|m_GQ%&1| z@+GD!rPcw%xu(CPcUHIVyP0TQXIU(=;^J4sHlP=ybc=B;-6|Q7h%YY!dv^ucyI0sF zfEr>RV@>9Ff)V(mfbSP*V$L;DG<(Y)T7N_feej4xAC~ny{HacMt^tq6*Ak1N=`T#c zXu@psGuVkunH(4Sw%1GU#C1z9?@E7Z{7j zRzN==IBZI>WItTy`A&IP`;ghm(a_YJyqAXtn!*CS)>83kv1f(L|~j} z39~PNR)981oL*tquiQe{Gv?ZXuVqaAH3;|iOLmWIz%%clnyZjYgx%?i&Rlh($U4%_ zxS>x2UrJQoogG~qx`+7%VqDc*;3ls@gE*q#N!}Pi8ZXraZvGmXxjR`FVz5{NY-^?m*Z@_?zKO+4wohjU(_N2CD?Bs3a6rJ?DtyxdbJzlr46vYU zSMr(6^KJyx4g(Yn3Ql43jJ0yg)?QmZ3yP!m3W!A&CR~#9&0ee`hXGIr-8rC2jNe%$ z!2*>UH7I}H;;31|*r=01b+h5p+09-+*k~tI*A6+UEz1%32tXUiDnsyZL$0VL9qOM% zg3F5h^&OnuTuoj!*M$B>k-ld*9kVIOWi@-;GD`lYi~U=XQakw_vE5u}UI=unZ9s;e zxk9kKkJ;SA-8HIz+``&~%sG9K_ofqf$2VH1Y+P>L+NwBSsJa74n)c*i!|Kt6&(X|a z8#r%RMt}cVqrT)@+sboy4jW4W2$^X%)- zwrsrfbKAPWWAX`njI&YXWqHTYAI81{4Ujcb0!2$l<%tFx$ia!nJGs|yRCKFnw_>pT zrhA&LDyQEac0ycF;9LEYwsn!-bV66mH-zrDlXQni4c`EX2WY$4fnHrgIE>i2To-4*-d2^}mM1{1?>B{(CoIkmTwm0)V9u8`Tc( zXhZ<8+(eEpW0ThMJJ7QH9k<+JOf_G#_o2msz0T(5x-qi*+gf&RIt(OT7UdhHNpvT^ zj3^r%*f&XZHLl2Q)W`=AH>oY}8{Q3~pk5%Am#Th~90I|2Py=E7P|sj8QzHxDDC&4w zRJvy{I3yZ?^-$l3zrl_O z-+|w)fEZ^DY^^ku( zPwb%PA<$csgZ`%i5~vSZvxK;Q9%{>TMARZ98#ZD!aU&D>(8N9;Dj!7KkXOEOj4|uf z>u96>)$@QTW0~@mg3ME#Zt>JBwnmBQ=__>Un#mU?M1O8lhyl1Q1RmtAYqpTThhwSy zI*?wQNZ@5ONpeS_n$x%wIs8^=bH$+%sDFU%7F2d7C*91`*ZH+(|L&5K%a2qdvwQqm z2~1R<4?B^Ug1f=Fz%^jDg>rp!ZznkLGk@3**bg0P*nh{V@Hr4E&!Sx2 z20YmTOo*XB+O`RcZdx9DncjbUuda`sPloB6v#C!VK@Ru*#!j#tKt2pLK={XUWal!a z08Z~T6-zbDFNb64(kaAqg^@PH3q^VAq3h(^Bb5X>x{}-iFeE4P_n)e$X?qajwx^MP zN|{9c=v2A!EY7ni6drb=G4m#6Hjm#(nOr5V$Va94zI1yR$&stx#wxBkEr$hjXeh*y4q3Y zp)s2->NmWmccJ{P8foUCxmoUky{?_+@+m5xvEP${)F-%z;owr1=7?hm6HKtd-fo=U zBE>rUey_x~(>3zv%0G6m!h$tQ!gpXnedBlTZR*N{)jbC_Mv$cUB`5jjFdwjAf$7Jv ztZS3SS#E@d%JW7&3m7|Hj&fqEg4EX8X35bg%Z=}~hUI#dD+D1i{^aoB`xq zbpUmB6L;$(-xvuJ^OGPdp14P$%`8o!hnxxgjkx`Aj_NBwlA))X0f)>pr*2c2>rd}K zeOEa7Wp!I;#SDMj9NPgLCMQ6Ey~-BB-z${4K$>deulM{7vDbz6i#+89&AXF4K!4wK zWu$>L7W^I&M@}XTrqXBXXZ+9wgy1T`R>C@vN~5lyj2b9RCZv6JW$ZlMDE-M;OC~V} z#BgZlWqAPc>qrNz%Ag6Lhg+6hIK|h7msmvlqv!U)v18S7lfKg4@$fW%@Nj*6`uhAjEgg3`0hPl?+YH}u5OJ>x)> zW4b&UzYPXgCPcKK7P(K5Zy`8Bs`sQTCk>p$(-9^eXp1ELD&out6(^OOT_@TI2>-DX=@$$SZJt=q=Q2 zJjE|>0Xe(v%NxeoWxR|T)%1cgaE$g{P1?RsoU|=lKKwB8+9u_3#haea58|$wDVC}W z@J@nLAp`DrfZkeknb5Z6D!EKW*Qq_OGQc^*H`1yCm;jArTl6@yW#3E2`@0*hUW#6- zo|!=dQwt*Y0o_qDfebn_Ho#7ec%9zDq`hOxbsFhIU1k!D;Mi}OJ8A7g3&Iwe+xbSQ z+o*v=b{Lbw5M>D&w&QnA_3C&8mOfZhSRr*f%4mDCq~U~6I${MfQR4=EcZ9FDaVu{_ z%d0iGbCfLHF}RIoO@Dvs8P0@tCY`P{&wf;Bz7+mOVy~uT+iBX(imKm%3@Bi|Y0)S^ zEUC$R?BT}SqP$$27`x2W_&P}I zYK`2~0G;;%b$e@+B691lJ~_s!H$`{UVo zR%R`E1Gcy!S)hHv!P&ag_3jKQH|LVsuLFAw+Z4jRZh}_psaZfn$zc*kh_#iNk!jQ> zoJJXS2Y z!rkrguSlB#Lunh&)z%9j`U2W&YMUPb`copjxSXgB@z)$F!)xK)qx zwn99CZ1@aJAiy>|6hPiNXB=9--NU1mDSRwAhp?)-Bvc|Vq%qe+nI>?KpdHM8ya05w zvrV@ky1DoE<)KJ3gYTf763O?b^pQUxxk=z@IUCgY{+MMjr-6FBcxd9q3egB6uIfJT zJ%2S$7DjOl;8r@S_BVvkq~kW0LCCec>I~~M#bQIYpVrBwx_>GHU8B66Ux4Gc!S|sH z#Xt|&AC4dWPaa-yQHU=L{IY^GC~dHJB(6wZ%vlo}E746<*(x>exevvi{N32%i>x*2 z?*ngLCBOO<)od)Rq`zLW0X{w~L)tn~RBXB@(ABtipLpO2`A;bUpjgo^qzzkv;jpvc z@S9Qj8<+K;baIa{r}W}J8QGAc#&=m=e|0 zyv*LT*=x%ryP z0?Zbx%pueEs~3C({Pw?D6Z*ea^Z6Haa%^_&?+^;X9m>W&Y-K|XbLB9U=G_^!w&RIK ztXIxj%{j|Tf=neqah2l@)%HOvZAE68P#wXQfDTgw*R`wQzOKO;H$vdog5Rn_6Yl{F zNf1akTY=299tcR@d;oXBy-02>y6Pdw;y81)W-0toFkv8ZF?$io9BoA}D~tZ)oi6^L zzmsnOM;fZp-OY=_@B9hMoQ+@TC*HbbT5=A3*jj#gWN~ILt?kSUQrCVZfn$1O@#+Ig zf^!)5O5F7x#wc& zCninMWtu3b?Pn8Png#FvhS1l12*T<-fa+SA6L#M$UgaM%cd01|qJ_o{tR-hm1<=!C zAm^dF2R{Jf>nZhzmTxJ?tkFH3M)@_gMZmF9^kctW|1awIlTp7WlS0?J&xCSoc^^T# z8;Hjsx6t9|d>v@hi-HLigJ$)ShoaFoGt_YtisxzTpQ-@73D7W`^g)r}P2L}C6l3=1 zqW$9+V9~$>61n?9bE2|ZkbxW%QO?1(tHUY=-RE16cG3MK3B0IOjw9w3=+IKRiOy3!HV@Vv-6;O0T0;2=o2B=9~nEy0!?5oO{-Mh>9{ z0tr`LuPF}Zf9B}jPGo-{eRL!|Ve(eC0EhP$S2tEIj2Q8_Vw)pU^D z=35E+OQ)0Px(U_iZvjvcNNkXqW>^C)oC8a?0N4+5@XXVwrTqlheSyT21k+WsGI#HIv!L%l z5E^M>Q|B*%UVzo$3G z)l}G>SIm=)|ML-0BTc>JzkK-OC$Nn~Krsku3!PMS^xG{ z|LDhLDI(pWxNXTEsf-Q5E!RlR%X`i|bePPlt$2A%TU>IzB% zFA3G`*ako2hl~ITKvgX-sw%a~bUb=#u7)k2 z5q5=tpYat~ViqAC|DnThlSt%{OM#V<-`{Z+C;osK&k&Y)n8*4U?~@lchbrZq_WuiB z<4@eiU%!j~D{>ouWuf_>{Cry9WkK%W!=?X(P5=M>UF;utmH#;``(I#X{h!RwUtYUv z$Ma7Q&fjhav_Gf+x^Dlse*Qnl`1}iuMt>QPIfF(cYNq5`5TI@R_LgSW(jYy>Y{7Z}<9kM43gQ zpF_+Kz>K|L0=~!v7`BZBkj=!jfskupki7s~u;fQ5Pw`r?X047v(rBeO^;D>xrsn47 zM^4CcaOIf;H8*9^TIwq`JUb3^KY|oWYGfBsDhn|I=v6@>{b=}D^G$CIT6z6=DvC%!`2CgMEiVjScWJSJw^u= zH3AAxS2Nd>5+E1L0a7DM0Ef#(8&OFE1U59ppH_wmCpnDphnE=t3Mw} zsay7Z>p;goA~LPqLZz(l*`cOqMz_VeY(64yvbSfWVGFPA*sBEIx$}+QKE7T67-}2R z^&GFDk=2qJIG~tJHwsm%aR%sG!=Gk)0%sy^ei%h7S{qAWI=4=~TO6H?WS*M|)gcB{ z31rrhH|P>2xTm^zGTAS;D1;>K@t6yBk_&y!_Fg8@OdhAeX3WG|lrTv{wD6~nL6noT`VI=f1c#AjK-2oq&2f_do~n)CVbw|M2jf6-uS# zT+QWZvq)$v_-zh!ixhwa0S_(Z9J~m;rkHxfiE8R*J%e_PlsNT$T+#NY@!wpw6-i)` zN~78ud3CMKQiBd;VYhWOP8WRC1Ns+UZz8j+al1*#f%mBr_6?c#X){}{Gd9+kRRgV0 z89c=nVib-rOzKe*RbpJzcAN%HM8bq~Cr+xURkhFO;mxGSNAm^NA_zp2Hs&j#wXFEBd_wik}gUM8ha|*AC2x#k}OOp!I-=BU- zpT@^M=!{Il%z9e!qJKlU>T8}U?Y_xNo7wp2LyPX?>-g#0HoqZ|DY=XM^|7O*5b|cc zC>+y3-i$NzpgX&X+uQUxIEX;%%(qoyJyF(Pa|J)2%WY{Z}OD>aQE-* zsW$ztQt^lE;=fWgdX5k*>Tkwe+5PjV2zKE9x7xcMo&^Hk&F|b>?7wd^8QcE@Zwd?A z0%I4Hk27`ASi=!#>KA?fsu#g`OzcuE_Oxo!_nlhsr(Wo~`+e5+04@2n^PT(IMFxHe zz3B#a-kZ}LCwoI-w+2vtC-m!95AW;2Nz+kV9BST8P}E$!HoJI6KS?l11+fOt*a(Bu z;E^E^qK7$ZcJk%0$ogXIvR@zN8=PJDCV8$ZH;#~BHGA?Md4Fy2qX;x%tEtU=mLD9($sf#1Frm5;9HT^qNDokl~XE97z=Ki(+WmC`5%_x6VCJN?mR6tHH>EfEg%}so!`50JIoi3SKqH4rXo2{S6^rNS=<-eqzMlv&ST1%jv-ZqjRzc7WF0Xduc40b1q@u zERIMqw3*}VP}nCIx;yaA9;#KOlQ#`Iu7By8p*?pF9K(GAW_k#Ph$D*;?kI4ZBNU*4 z`zf@nwe05@vi4LK2hM9Zg>a1^k%5K{PZY0TJ6r8$(|P*sxJp54P)z=W!9)D#?>k;~ zF65fnL~tEwIj06|$_gIk{Y>!vQbl}cCw{m_B`VM4@RgB@;+ksxY?dI60NC%;!ikdj z{aiJ|=vZb3xlZJa$Wim6qPa`+Un#!cp$8n|L2pF{oWt0fc971C-PSz%hORZPvdQUe z`>(+OigXTjHP&rZlwp*Kl^Atq=(D`rY790}+^&`}+X5eIS5+^@@3ub~A^+@jV!NyK zBkj?+aP3%O?9Z$w#t~p8W~wWl>f&_JevE17ula(~+m(Ya5R)#p z9;JEHt6uN|3^agv@*LA(w2{0yy@+UA%v$?8z1+6xhtZFL_qzAUm+sznHny^M3XkdI zoC{TYANt=7WdF|t(tm-_=g+|EuipXvd^Xz`zQ{S|Vv7%BS)1_t3OdIzi5KS5f?A`x z_i0BwfAu(&{cZ_bDWU0EGG$ELf9dUEa;)K}SyPlFwhb=na#bgc-_>MXAv|^c=HAz1 z%$QI+KkHNjm!De{lTXu&p50P_!+g>mL$3el5f>d@uz`U;E>Fy=yXG2 z0Mu~TTNV%sHarD+@F3e|t|$ByRJEfa)-t@v4(2_#X1l`u9MlkSArKw49f*5kzYZTx znErBj&^LMP^O$OBKp8#-(e`7HI*-E90g^y3XgJzjN> zmw|EyNDb26}(PY{mOi%MFLEkz1HY?G!6$b z=$3rbTSWB}+M;4#(_hXNoVB3DWy{o?TNHjIzkf zx4F7!LGEO#iO>tnujWaE-{z|IZ{OH9**pWUBA;i?v9oy3@B%m;-di9HMhwD>Gbi)f zD_q~%+hHCxj#+a~x_`4ZPSHJded)SenTaj$4XnvTj<`*OAvnShM()P)2v>dY9mw5% zM6W!(vHgMkAzAAYO@B@%P@}fVt9U0_c=@0 z^8_b8cUNQC=MBHXQN0|tq!U+hPrway3pASb=(Tg!&{R>{p!uykbpj&qLJUl)o}UGz ztLbGJ#TKNC8s+W5+rOHq!w*q&9nj2cbP`DQTqu=y>uYUz?O?`njH#AKu{e@m@m!s5 zsN>&u4#-XR@SX^PqDudaBOGd=j3qZ9uatZUcS$I0ZE;Z}K9KcEI{kjV!(fn{kK@ZVtM5Iq zpM_suY9}yLnVHwZt*&o>BttlLdc=!yCaw8(nh=fD04^uSP3L&UxfO1sWI;IIzP(D! zuI)yO%cY|}^$AfjnH8}_@!k9VE_@Vv&9LVuagM<;^~F?Rz}$G(KK*L=9-_B~*?Kan zRXyS9rw=L@H&JG*oOf;(mJ)oXJOwH>QcXN5^Ux-?Gt;0rW5RJKYd%A5Ge%V9UT#cU zMAd_@b<)unKD~>H77)yN2nibO<+mgs>c*`Hee|kbclGRU(5{uGXS|CGS0BW1)_n{4 z9_uHf9{Jf0>lC|LsuyOvthjBW7#l^3cmd2u}X?+}-D1 zBPRUd!=b(C#h{p1obgV+3Kq@Y6TohUO)}tfY8%z+I1#$%40hT1MBfSjnb|T&{k+F6 zgZknUrH;7f|7q{L>1Pfh2M0$yc(n67r5E2DJK{|p80xG>m zy428{Akw9mP^3vB5CSRh9p3Z3=bqoWr+Ci$9>4G2-~A)WOlHr_p0(HPz4ltqdN#aQ z?Hu5_cDh8$ih0UkHS&)9kvv|{kr`y)%e*w?*>Q1#hcyUDg$3ri1`Mt4_!40c5KGnKsS~nh??Ty+XHVoQk^n}@K|$-Jq2Kj3zUV=ymyvnBKPa|(~7h@IsRhLf^}ZA_C8oD2Rs zs$L&_Bctubxm_k67Rgs0@5VnIu-ZN#+`)4Gz^$pUWt&T2t%F@5m4ls`p2ackFa+U!7_`=en7f+dWmjSaWE{$#F;~-F$~&UTidb@AL5%A$0?V z?qg%d1PUOS*F+vd;bZ~Nx>x~f6L9#8iILI)usS;s0edq&=vJGsL!z#_e#la=e#3x@ zy+5G5-4YZ6lg-E1wY|`B%wCl%o1QjU6fg1=vmdMJq^v37osO0R3n1q5Zw4d+ut+!$ z`4fcEEqS|b2;me^jLG~IE~PeOC;^Qz>z$P0xm&Q;9^6S_gLxUoqRRWji8c@9NZy9e z2m)4`V5Qk{il|Q{>yDW#!|?Pn3y~ zPp>~WjxDz;M)Zbd))44ygRy4K94KZo2nwfb96?@E4mSPLD*V_M>c6;Pc|n7IXZa8CNNFGbX=ZS=*ggd^;%0L0*eS}az=xM854a{j$fF+EjH zdeF1Fue|mjQa{DWDN{Bf-vu<6BYwKmWFrF_qJ zm7nz}O#i-`dQ0^B_j}LW z1Oxj-X}3)Cro1Q0U4ohiiYcoP8DFyYvasLa?DM>s+_o%vXy9<1%8B46{< zh>8aZ)3hoLd0)mOE)FTO6ViafOqKy!EeBd*MIy;-vsx7)J&L)(-@D%D-S{-V^ps1a z2H6_>I0dn(lLsG#ROFFTDrFc~msAW-;W}|U9>SQx>3u$fnU`&;zO<8Ud-=Hb^lQdZ0$p6s{ixq!9f>K98;}?#qR@mE zIXM!su_avRNRWq*J9e^8R@Lwo7nT%X0!h`)GF35zKT~DSp042YjQox}%&|I66eh(r z(&6oI+FWfv4SKp$*k>?tQg?#Cp2jsg%7fq0!`cg4O!P27y>YhR0*GC|BW5eANg;42e5c)S3xdR9vi5`isvHUt`sa)uD7oGF1=+96#+h$+y4RIX*HL>-qrcid`lofx$6$#5b!)nHtHHBCSWUy_?qO!_ zKl&hk54X!-)#2BAUU$9Aht5{3PecRsp$IhUiRF_gaL#~yBKLxhrAYZmg6#8aMvrdy zbj=k?Yfa}=kEw%|$UGVN$^JHg;)c062v`GvSGo~{9oy6+s1Yw>*=AUo)fgR-tgEHufQCF}Uq^38 zuhlm>#<{+4ye?x^Ry$nrY*VRcbtAW?Dw(syo`BI_GqLfa?MaMJxdIjgyR!mo=hhK~ zlfm1k>$fI1P1z45u<~eR3pmuvyd4W}2iB)pu@j_VNG02&-4 z9_5ckwSc$~PKM8z(65M0@)MU*12!KE-8E%=6BnOnGp~1>LxECu&jY|`0)!>-i#m2P z?I4CP_~?pK{&M+8{Ru)Hq&hA*j>dYYc%}PW$H5tibl{~p(Mb>j0?~#TAZMCN3Z@D` zX<1152*=ro*)n8AJ!npb*ZK5Jxvy_dg`D4uqdGQs>Y}F}s7}sc#iabFsK)y{W^Mb`{I@A055BF~No(bu*>7q>V%Wpy^r;_P231>pg z2;KnYAzBxhl`cqp^Pt2_kF|i9Dhw&6lw9NvG?P5CmGw9$J$aaEBkn44-d8k^@4J%` zf}*+D|48K`(DFfGvMiZB;r=qL%g^sKzLU%cTor&edz%0Ody{^^bEidA)xuo`?1kk* zpnPEl`D`N)W(||Tc#|I+9tSiq9VitI0EFnx=2l3w>0H2xsu{mfoGyNl@5i16vj_Cl zh!j_LvBc#~&?=A*sS&J-;l?H6tq3}+r*o|Onj<~Ze zMs9%T^A6;sEk{6dTbs{&Mm4+WBz6d_w&LicyCn#X#VC)D2HgRjucc35$aqJ z05q|AWBP%)FT)B)o}v}N|)Nm&u@iaYBQstG;}bvJ3l7429_v#Pq4yOgpm`p6!Tyb$?~>6Xk*|i% zn6IsA_rH}R!rAMJ`Jnke!kuGaDmX2S1}`kDY#1776CO9w9I<6zS#$ByjON_qa6bX} z?XFH+=wd%!d}Ck|{!z+PtVLr%mFvrHOL0$P6ONZE=u{Kxi{4!9#x@#(H&>%bwGB4g zD7Qcx4O8{<-6H70LfBT8I29KfWN=BOj7uI9R`V#LL@?r3hEAnC?!|IYK(eEhGk=`) z*geiW=b*22#HQ+$L2m3w4kK%AfO)z-glP&1rwCp%q z(esvk*Y0~F#1(dp6b*Jlo(kA*;}iP~cA7@C9yk=|TThR#-mp^Eus{CEg8wQmiOSyi z;e7>OiLV}<@Gca!ikt78jh6AyiRi6XLYK_WYDHFfTfZkolvvFq^M(BLq16Hj50JSA z+A4E^S-k=f+F9yW7I*1BTZ3DglK~8A#Zu*F!|-e+O@v5{n9+=Y3;uo)oiRDFLyNiu ze>?{T^e$@H;LVDbO_&Vkd#hf}bMI#n5UIKnx(@9^xGX6Z;i2l-R{JpkiVF=jz%1?V z_EwpendjG3l_zUD%}ZD13ST1Kvca~P zlf7rAOjx}yGAD}&Za+Ln!8SFL>R}`p6x#`oOwKsg$cj%~h{k2ekl`34x9Gqy;rC>?v))KA-R90a zb~50rVFl3zdyRY?f3J9bwDD98HmkIgx8CwZk)Fzne&4g|rO!Ppa_&-XH(gJ(r6I5r zsYe@6;4iiT4i0#l<2yXCd}izViwT3*1y!}yg3N9XkB&yQL`^>Z7OZhMVXzqX`_sCx(v=y3pzCcu&yM#})33%%S2p;WZCG^K^&0F$4(=iq1^q ziYR9gkVM1e>%qRB4P&0?`X(a_V%*?|Or?dNHHT%vvdz!U!?%nhL(J-{SQ{!7Ab4R^ zi@qL^N;&U{#K&9sZh>a{7NGO|`=4#*)N? zMA2$|ES9Mg#oV%$2^5LYbgw3 zqFZfbi}du*n(P$d9?#aXXYug6PisMOKtDM#wDRIjLJ%mxrMBeN+tsntRi(b{Nl8Tx zI^RS*c|706SiE`XtB2NRqYe`Ty0bfV&=PuhiX&AEqMj&aAi9O9xst<-?SY1sSdG$m5=-K=LZ zA|WtU47zy&{0JuGS{0}n;mn>^Btbmi@u6FFyf$&_^;jr;3{~#%f}l?ns>M8rcl9OE zoH}64j7=FlklbAKV(#quvH?%p1y{Q$VQ*v&Ei*XVZ|mViNky{JUQpCH3Ku^4hG>NG zv|>LMBcN2l98!QyZhCMuPlRiR)^4V;oRlloC^-vb#F_$FF98oun+b*>mTxBudgg_l zy3xJn&)E98OKqV9LvqJq2+pT$bW0?oRqg!XtJAG*fldtmoL6Wq&Y1;p?i*ab+ND~! zhGcTue1k_Ae9o#%ClyKARY41LW>uC28J4x1j@L0Kc$z6{61$8x`kRqYGRiYrQ4b+i z$d=f!8O75M*)&A{lxT8;cmC6viLF~{?ysinXBY?BSykfR?8*`5V!2kE5?RQPeU^1Zs`g|Akfir>+Udi%FM z2!)@6%vxsO$8Nag!MyP%#rT}b1&;`raFwk<{ZMrJi}U7p&vi09C=TcHn+@^&OizyZ zHV*~3u#H0{-5;ih-WNC+>jIa`Z=e?-|;ee9o|t=D}tA;_gT zk>?k+hwO?SUq=H#`&^hEkcT2LXV%8-|0a z8Yx%hs%qt1oMcokFQlfLT&RO6T&s=@3QhR#`I@zppZ?%fE4UP3K&1}QLu)3^AwlE# zAPaoj!np4`ukI%iTpEsNS95wruK`U5bXBIhBO6zbe}*N2=ZqD;px zUZY$}&~6e*{2Uau3$Dm$G6WITvEWd$G>mI(^Ad;;&oiy%Ov+|X?l}0g<&o;4VEFRr zt&>_iC0g?`8zgLo8GZyxxJ(`lTuyvY;uPpJ0eQ0HMlWxr%=zsy>qmj^gzriT4W6MJ zq6R!wEaoy$=0OqIt zJD`2<=Ga=hbBBG60CrjNKS4x^2wl-P(tqud;8{^zjyoPSIG_&P@z)Q%(&xaThYRtw zgKLyd?;>vlXB@fzC$QPCT(t~g|5cFYw`+;qQFV5DmlOwF;fJ7!$AC#^fAM|x$r+s` z%bxuXHgaW~=}y(fmq1o^KJdjx@ee*ne)++SX^Md|SpN0zitO%^w~qfcaQ0u>XJY47%}??Kq$~FDA5`|&2f6tuMK^#m>kEUvd&L?waJab1 zjm^mPM_fxkR)+ofwotvkEwP+&Yc7&*3($m-JQoJSB8VIwV!`(`%1i zqwaodi4-jm{AW-4%MYgLP>2VJoU0~?2Tyf{)Y%(g~h{Y{ajgjEGX^f z2P!tWz;-6EY4Y;Vn}^lG$9{@!gZo%KG}2lCD}kB!59_`9e9;OZq!f4<2^ljva$4zFHL@$T@pK%L(89*sP_ zEl~8?IeJmxwucpw{O>HP6HnHcpr{DK<=HN9hOhF7?wu|BCi~L(nXh!P;n9?5L6oJ1 zk^@cQI#w>Rw!7UB+XnP66n?UVLb9xFP(%p?)@#K}V|QZ|6ISVPGi?Xxax3`%nf@If z|4}&lZ@hy36xmZVx!YOq;=U;Zp3DHUj|XB4^eI*2h~NKf`wq5!qKQs^ox>wCavu9% z0Uq9g!c!URE@`d_r6&>|lq34J0Jh0!KG)f8Ac(k?X*6JRSE9`Dx3ZaispLQ~X~#uL zw2a|@$lo^@PSV_T`Yip?Z^0R|RYe1NIoc&jeF1UNr1;(U%$58?_NZ6RU;OsSSmM{2 zUnpk4X>yVA(=h=539u>qkzK%ZbzOru;Emb2j00*-6WD&we?On@j;GubWzvTkI$ z$}%pJDo@-&5WWs9_W^knAo77MFWW|#pU!W)m;Nj|Ee(##|NsBS=>L!$!gUO(zL151 z#F)!|d^<)Ow3q)@Jmc^DNAt@MCSUy4GyI<4lbrrCwGOy~UCG(?LoNFHcUuG2LX{>Iq_mqk5AjXhyMyftMaj z@M>iZ)1htoH=o=XedQY)V?`4kKjUsGl;(L|YJS#|UA9-iJM$Muv)@Iw*Chk;=l{)! z7n?cb)c8_1*5+8Ik1La>`7!&LrqTCbUeRf=nI!ymL;a`s5x@K(AoC9p?HNx*K^IevL+P(G)Er$KOfV0vSsi?rwc{;CyLHq_t0z?G|!C!HY(nHD4lY0 z9H3y~e|Zjj*!?ge{?W~QkQ^?^XbE@3#tmTEiknCONiNwReVpk>L#2PHH0NSqyO$<@ zmFC}4QcjWFOB4m!FZtJ?ew?Q4O>2R}9LdYFGt-8;bruFT4B9{gJo;^}^xNc$>Q;TM zj|H!3@sPz;G2@w(DXvckDPrHUdGa?ruNU{W{B*(o&?FTw$9}|Q`$uKOkbC+{p@S)u z>2Yp;kidI-;DeRCLa)Pq?*>260{}CrKYfZBl)W=#YyJAPJX{A)u6&CeqokzFzLxFA zpnaJCu+8~*I>96HfvI<=lt;uTYGZq^ADGG*NCa#gW? z%-jjzt7bk#Qz`=F&K_)tW=n))AIF+rFyqjT(RhPn?=#zqW1X4xMFgus65w0nef881 zK<0n^#3>>>dJdp#e!n&$otx8UcmcI_H1?gCaN|B+o?u3##ZMkZKCCx%^Yl?a;dU;_ z`D!55s~gos8d>SAK(=@NSteIRjV98x>?RKeHZ7unCshB-j^;171&)WPLaFbHE}D`Y zA!p_v7Ut}-UIb1Ue)yY?{Dfc1AD#pMXk-1Oa{Z_)x8wp%ZQQ4F@K1Mz(lftNx& zg7J0CyT+5GYOt#m2!P6ZmJMF_PnQ@uYiH-SOm#lQb6Md?3{`O9f~wcYR!jE1r?RK$ z+;W@tSV|RI-t-$oIuFwzQZ4ZF*idV1c&@a_ za`G7~=lia#=r>m19_WWkTs<5Awd-<`k{CZY%L6a+WRn1-YQF)f*6ysHYCA99nLDeE zmwzb6Z6&a^7?=<^wh_15X)vn`V9U4`8X(O8qDsZ&?I+w=?Y&-qW*u%f{kzVH57}Ss}8S!K*txn^cvx!K3R1=^B9XVHf7>MjTGM=Fvhd60WtdZyrpCXd*>Wq z?a%HGcPJ=_6V+La(nr3Fv?C{<00e=ml2z#fSk=mr4Z-~CQs0{Ut$2eL&(%IyKA6b9 zJ$KAsSk$LaQ~Oa<9M9drxonBI;~7QMqR7XMz_L^tCZ+ zB)H!)n^J;j=B|y#&#wBPx3#)XsU;~bs7aH<$#UYnd5s2T#*zk~I$L1Sh#r9;B|WM) zt-Bzh7x2hg=vB+vBU7dP0NMA#6C#@)@R)YqK!()9v5}I6uJg`|1~oJ^EBdX9ZzO|< zx)gTap@iQ70fI(&|CaZXPL7Khhv)K&N+8++x@P!zwpwcgjSX?ys9<899G~faSalFu zT+X3l=O3S#;bL~RT&+1TNB^salA?WzZ1O2HEs8?1Sz3d|JBLFDrUwT z(B`wyS!2$puVh2LT>B$Xb8q+*5l6_q>B6WGLuspd-lv_`2D&L^ZLViL=~;^@WB_F} zR~##qyRpRn0W5itRJ8&6R)btt8Iy9hNojWyVly&-e%QN-Dj>h2$+S!8kI_J8Q-lhG z=NZpzYpLPe0I;OX@6lK`LV;9>n!E= zSX&_3=}pA)s}?Oz$=)f7%zPvel*et5YGL=#{ii{Pu;bwL`;9^4wo?-YuiSH8WnOR} zwNPJ)GerSDfjNLhS_FTdpbC{wah);9Ejm+L?(W5lt>YBHdqZe}n5i(3XCWa`CuN4- zi;`*_2=+Y}d)@h*A5UpKHfk~9oVQN2^a>S5fJ`U00Oqp1ys#BBu|D3g77_jY_@IG5 z4|(_HnPY_W$f`}$+;dP_mTCI+1DnM`G_`Tiea7JXLhbMr>~ItTAud|R=l}pn907e+ zb@IslSU_YS;~P9UX(Nez)$6XMF4z{Ep}X&m^CcCK<=`D))}eWsj(A`2lZhp_l@K)J zXKB9&adq>NF&_l!51Zf9!2r37A) zQkCc5xbhVe4%5mGs8cV+N{3yT`r>k~2&0?f8-IWZ(^=p}=whm#JynBbUzi9AzzTf;o-o?-NhtjE z*V*G0Tv7hE!mk=XQ+thmr4U#0zcf|B9vcpJ22Y^TBAWm_mJ9&gn&y3}Lxwo*LcSbJ zK-)DZn_Yizi|?O@=n(;wQHbg)bqg;9ZCbxQ)61b%RwlC)(kjSxo_mq6atFY+Ud7!T z1Y+Vs75GPm6QK)W7MY3T{UP?%x8+1B-|luFn5}-9yY0Vz#YS_w3VZR{Vst8pythl3#;2sfUH17A=fj*McI+FM&G;m?%dssi*l5R zsX@L61CTQAZ)z9F{W$wuFZJ^S0x#!&T2W4QA!e<7xfAxq%};H=CKl#MDQL=f!Y3)h z`_zjwomcyyG@rvGhmACx)PIt}cjRY!B7W`ioHbP9^&Zl&RVJ3KbEJa?JbdfMZiV;E z=!HxAMQLW&h>Wp)qnU>gIRwxqnI#bzdxN_VG(QriLYiOpFsNv0?{3ze%Q?o$$pr51xMGIf!^YkW!B0#53h=4H@ z-0_LcRo||iVzp^$eREb`Qeky?WD74AHcXZwAz?!EbT9Yi$}Bahz0OO+F@_!fZt;bv_C#BumInFA};lN7RSC1U!ORx1khQC9T? z)7NJ$mikqp^Jy~L6*uB0Ew7#~h&q0ZxA1L|2jdz*l6fNhtxNN}^a&ADo)G$&g(|4Q z(?c(nv?3Q8HLtDJkEqlNFIKU@FmwcJEYMh78h_;-8crEBB6cfAN1Cpox zM~3hpb?m2kwtlOy#2f-Ix(Co%*%RblItGo|mp{zje)K$QDclg^LO>(g@j8I2zQL+= z=i)&vXHfnsiMHqV)Y%Y9hIYmMO=fo%>%4k(z{b~+53n>F0B}U+IFVhHk@$=oWPY{fb$HdW())>ufj8)sIqf*9K<8Awd__@xyH799$f8X3ej&rYb+ZcQb zR@y&SwRgVa<{7e}J1wzrznz2xUfJ0QL|H~V6&}cyl+AWKZB-*AF*r$rWAZ zlpK4x5Hu*(FG75M^PC7j3*-6tVY3Bf6F@3b9pbAmH>QCf&z^ElDRL@p*nLNLCg}Q>)|fy3mM%$!&QF^%k*?3CcVnV>{E4(FLyVW z0%gi}>?AP0aRG46mHCcb%yOrGE!K4m8_!C`+v6varr=;xgXiW6{!;jALV*RuknR3{+kltvQ$b zUFrZ$dWhFT6a^T!LO3eS{rle6XO@VfIaCO)4Q0JCwgrj6psk+WH^Q@}lO_z?_JAB@A1v z@BH}PUimY6t&%rEyT)i=Nz`I)3AeXgUxZqT(Ty!UPt;kCecV z;k;`M-o9w6$c4+?5}d^~GciXL1YZNWl^DI*PpvkTgdud@O{FqW_H9aFj zm+TAJ(G4<9SfT@_57}`>H78W4%^mH2fv(3NbscI7>U>A8~2-FI*I6^OnoN=RF@8oIv*PO=}~&7`ltBaavagF~Y|) z%EQyJGB^hmhUW1`GFOFy{fD8q5m(}Iu4$pM3&=%#qoJ)!h3nDsH=RK14HfI`t8x2SH>zix@Ar!Y7GDG}5?Wr)Org zSB$I6ugSsP(J!>FMy1b*=B0ejE8nh)wD#1oU^|@KscPP*xd}Zm0PGiicz<;|J>EN? zcmvD(HWJ-_KFbf=L#lH_NonhMUra(8_uN424VhZXo@&gOmoBu(gJA_>I|e4@+~_sl6O{cQJj*R(@+0 zB5VkhfM7Z&Py(UJYhPHIh&nl2?{dWP$9lTdM z=GYUshA+fvD(@hBw;~Z3o&Iq`r&*#)I#0$8%t%|2d8Y$*pDGr1>H2IjJl9C$h0zr8 z;uA70YCg7d8FAV+)pf@v6}x->PGb=sfG8bnBBiNu%rqCSaqPi`8mq;>PNWK)t$iJG zR5PO(9%rPNZST8o%0ZFhq(x;! zfa-$+_A>u7-vA{$|K||Gf5ET*=zY*n3$pz7!i%CC*ly5#%BbjXVkT%1m0vW;Ag2R@ z?cZ)!6eKOsBHbb#)NP-O7aStrxR-y;*jglLZ##H8!3lCt-!Z zNNf774myJNaHy*&``Is*hQfAM5jR*3Jm2A$A*>x*%~ z6W{p}M;g~zx|8`^<~KX;c;(d6=lV0ftb37H__&rXlMmo<)c%o6{GKx4A6<|0clqD` z-2A^ctaLD-L191LVjnzrzw>*6@8@pLc-@qiWVz_40x=^MHUm<=0*6lY-y@NKb}smb z(TIQ8Q~yq>i5K@Cy|-tZve|{RSiKF8?Wa(~+O*75}5;b$A`5 zC&eq;E%Q&&TocpKw*}R53mQG7`I!=yZ^9*a26QjqytT`glz-&!0DOMW|34KR^oRF? zxxQ}8(d*5i;=3m=HTjI~{^_%#e=3B3bv&5c z1&ZUI4?|Ba*i1Gj%J7Up-o zva6$+$tF)>ZS+0Kn_2S#?7H&QeRS@b1{V`85~g)Tcqw)ON*GHClVeYpf$sff$bT4b z_rs+6|BW4AN-rCW!FhGYE?$?U_AjxEwmLfnL(?zz1ya}xRN+Fnh+^r{~IEn;@y9y>-y{KQvQ#& z-z50|1i_-r+G8+G1V|4OA literal 0 HcmV?d00001 diff --git a/docs/imgs/zhihu_qrcode.jpg b/docs/imgs/zhihu_qrcode.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c745fb027f06564d41794e9a40069b06c34e2bb5 GIT binary patch literal 397245 zcmdSA1z23owl>&U2n3e^f#BZ3-9rd&!5tFZ-CY6%4<0;Na5^NoySux)H`+i$Pv_k8 zoqPT}Gtb;}|GD>>sqWfUy?6KSy-L=*)?2G!^RRUQj+~UN6aWDM0FZ&d0I&^!wxp+( zIRK!j2w(;P0M7x42zUS_cntym0)$%ukpET(0O;W*0D$xz;oq-*NBrw5GXHm^zv`%+ zf4&Gi0q}pYbZ~WWv2<{xDSQ- zVeJ6i=ct{?{74AY07P5_BwPeoH-G~EL?{S znB)~5Jp&^XGdB+}AHRU0#Cu68X&G5LHFXV5Eo~iL6H_yD3rj0&7gslT4^J=ekk6rE z;a?&m6B3h>Q&PXBrRU`r6#ghGE-C%>ySk>fuD+qMv#YzOx37O-aB^yTW_E6VVR2(~ zYkOyRZ~x#Bd~tboeRB)ByZ<8>ygmO-Eco%?B>N9?;lkxYL`FtJM*Slf0;0zs!EupM zs5qYCiL0P~a>A$P40=xRE?_X9Rlk7hySnz*IvVREn z?{X~xBmjtiOGt?DUP3~G_Y^Wbp`fDtNvO|J|CXNrJ)!?Cz4((b|B+zun;^jN0srwc z_!|=q7409_{?irM3Vc|;hb;mykPzU52?-Y<26&9liv(UFU7qJfG6MefY{QwCVoZQl zkco~^ojiZI>vo2(yAS4MJF^f#FLmZRJJ3J-0yVoao=yLR^@tJj6*#nNbxLQo=X|Im7m|1bULY*O(MGw%t9jgsL5m5Ti|6LW2u!z zQkt}qF7lu>V_U-r9nv9u)Dn&>u6T24B&2JwJU`6JqTjn!e`c7U?>RVoUeU6DVJ6nC z+^>8E%IMi~DLRu-_YX-1V+vNOl%LSFDka^AF{pB4iEpdV?9}HdGr6NP=0#DV5U5Cg z{r~tOxt%dD@=f4>?x6V=E|C!BAL0ISNPHUr8~@)JFQ%d0wk9MWvBrI}rx7q`rg|4>L|!WaVwI_qAiM<3KjI>9IxhS;g#;+5#6^fe24-@xSXq1?@YT z@fy~^HMPs~TahwV2W(90YJB9!Q!C4Z*~sh3W8X&jBY*3RWc-f!CnpkAf0((#=Bga` zRrC0GfHLKk@+feazu02lr&8A%H(cj2`R8$*!MERE-$XDE>|%L14qgW;GVTw65SB3e^6+zO1E@& zVtuuWq(#Fdy!qJSj}rLboD=`_(_ep-{HJq2!cPF;83<63B>2|^_=!I! z%Mm0A)c!u#nP~s%!`~DD5^UmsqmLPpw%8)MG$6IxdA3pQm{*}?On3l%wxCa># zHl-vQl%5|L^oDMc*e!gVj)mN#o{H9Vs(!TK8TryBRc<>vOfFjcvZy-mcr3sC%np=? zuO~yRwye>-+l(qVd&V6`zw+zzPSO?6VLC;`n`=q5X4<||{87KMU9dLVDtAr0-}Aq>@t=17|3gNAP5y87_U2h63~(W8O>x=-1AL#$*2;xy!2t1> zAO5X_n=jgwhuT)G?%DdGi4;#@wV5Z=iA&8-jSbal=5@{m!hQgl`y~XN7Gb>$8wg9v_TZYL56H37{-#D{BYmFqgazrKeFJ^UkZ{_H!T72_LQ#(st}$^VD4NRQsHFXIdkAnrt~8$IT0$`32K|pMQVm z;Qw6y@tc{+gGxi3HXDH0N;~N<+glMj7S5dOfMNUta z&3xCnyN@2kQ!odDoVpvw+(u|R9S{*&1Oo&y+;JUP1Gjw6miRh;Zd={I%yrv0l-YQS zF&OIzh#LdD7*1b>PlSC-`9KR695y#ciTnx!FbDc^71KhKVZ`NnfvP@qe@p!3R{>)c^2w`y@%AWz|iv z-kBju{?_{7K5I6IAq~mYL#?0ZK+wLytT;4=)UO9*>p@O1lq@q zUo7v2alR6gwNER83rcky`cFvk&uZYmt4_`Xe(2fiXnVnvF~4Rua}}^K+Y*=Iik={X z-upVd9yA;fwuV;&3cVk`KMiuQ;hDbKUE-6`r{)6UeP{0wa>`Mdn`)~bE!@QZ0t4W1 zuZ59gSEoY{BwUBk6MmdJzOZ*_d`a6*o0F6>;umlZYOnGYJQq#f_%QJ1X5X|iSsG7Y z&O=7J82Lo)afa}rRwex-zftH0h8>~q52i~&-`QK1)4Ker_mj2FW530SMmU!paKW@Y zA*BauU91{HMr*Al)-^Lf&We~%<3idtezr2e0AoLrVE}&$7$8;!2EYsji#_>qgC5Rd z0H|}-4gw5d?To4nk&)g<@h`;T^gJ^Re`x}7pu4Yw(sugKq6ZLG_~*Q6tIQ6kz*zST z;k)mHGL4cC+EZYj*|*`<{U{2C2yOaPR)Ze>2x3R;2}v z7KN({5UJ54@yyP77?7nXao20Al}l8T zZc@IRd&&tzQ=Q9Lz=oy&l-T8*0SwTN3Il9u;P~g%1<(wGVWPbu3 zOBEG?Qi9uJ*zBMZ>ms`HKA*pcyL`OG#!zKW(0or52;B%;k9Ou58)CR|gj{-1J-mM% z5UD6BBbJzGRQ%g3kAZhX*P!U5ODIb$0wo#7H7MM*ZPBM!lmZgN7uw2rkfQlgWtr3} z=9dh}R`!ne&1t7hKV@R*`nE}--yV{a7~U9)FujIS=3}&j6`TJ`a{6xF9cw?Nm2+d1 zB9@c5e9(>R!~jHeYBYMV2m|asWUCMzr-J>@oxyb!02 zG+#B4fmOm{7p)4X?htSwO`EaZ@~a6$w#{`clSw*v`b%uGND>_-2__O`ig&<0w#B+9 znH?D5(QhGOf2VSl46G;8XAZF?w3kF1<*<^@clT3Lc<2&NMNQG_QQdX^OgpnP!zQu0 z7E1B^sR67fnWH~-EwA^SO4n=;XmwA4z$2!;uvrpT``5;ibgZX)=%x9z3Elv~SzoJ7Wt{cjo(?XrwX&r-~a7v5^ zJ)Ata2)X+CPs9NdZ2JFQh|BZ+Uo6Vg|C2@ecZmDr_B2o@L;}8E&Ni|zbQ1}>*d}tl z5BQudJ=xGQy$%C}nrNn5Vq5;4uDE`VJV9nrn%G}~vD+*+d+#HQ{I!7?BsJ+PZR1l* z65zqKl^xc6_|3iMeYGnb#$8w>KjuZgbCT?4Aqd<}`8Ti5tJqL71|!5==Qw_swUcda`v_MA7x>8358G!;oh{ljB&#b4_>=&5Mu3$G@s5*4h^n zGz{jcnx1~0%K1>~H%UaMqC9FCJ;O_tGzPREx4oHJHWzD1xyUor5|`^Q{ijV+zjj#-a$OJ2EnUBC1T;+H0YhMMHg zRc{u0-%`CL++x^hJCXTb6|N1yNPH~Bc6jgvO*yY&|D11T5mvMvji#f`0VdcP<=>{X z%qhl=aX?+ZbZ%ON=cj698ZYjuxVs;=tTcHno5zr}JeI~2?J{(8`8>Ao-D@3+YmKdl zCHpgay2yZQ+cv=_8-?{b-J-UY7%t3;yY%m4c7HRD>1Qp^)xZFqHIF3$dtp^@#myLS zN8P5db&A<#PWW@8BCb$&s)%^ej{YRT%bJvD$SsiS`o9ood0ohdL8H7Y2$_1yQA&1CHyDZthmR}To_=&S!%JmzsEI>;;`S>dg8}+O2%m`V@t*qDE)-bXj+w-s0+X-Je2X3^0tVO{ zVSt|nFu*;c){KHS6Eo-Ab^PcY5O=fqpF5B|@rpi&JAcpcq0ilXLmt$j^bYuzQClZi zIBu`0IqRzOc8aQxnwEP8U;x(~xq;~bTOT|6r#<;;UlKQznF0{TM9qDeEDfDD#j-nf zvFwA@{zdPE{QBL>Yq?(;S5!&SIMVUsk@vB$)>qjEou+@kp?`Vv^e809<_5N%+aJr= z#A4adA45@N>oK+qI2}Ex$mwbf*oI^1FOOJXiA^>%#f=r?Sud7qyAq4ypO4G&WKY{w zhZ8rw{)Sl3_X-}%0=8p~V1T)m)Tdb83l^c9p7YswiKcm#+sAldHl^tH)f;h8P~P@pC}(H67?$`vm|1#%HnM0r7t-&3ZHmVGJvw(hOtYhwhWRb! z{hYbb=gUTY11*6tq$Sn8UD;9edmxXOeRk>ob_ZBWSu6BlhqL#AIkVRPIICvH>UH|W zLU!^4uP+e{@cD%lISUwBd#*-Cs(;zV!XA3OR}jl?H16A4b4};V9$*)xw2GYy13Z(5 zkFV*82b=fDcx+B37hi5o9v4nng22*1o}KGUwM)`#uxjbah@luQnu z5Y8$zLuP)q}93Dr^rHD|$2N_n*u})U$z+m89h#$eW_mt%%Dj__S-= zKI6O-a~qSJQGE_#Ro^ajm(V2io+{4gzQG^I)J)$cer9G%-bF%Rc1UzS-)Wj0d~e>| zrZG#`QosB;sn7d{bGXto!bQ<1JT0bg>c^d6dSh=J%eaqc*Bh@snrHPLS$&I@z|^Db z(ow-2H6y2xu6MyZL6`iIr}h`Xk0tBz=|@mpN~4!nq>ZHY$*s}tuXb!o&UM4UQt8Sz zpFdrrQa4@bhu{1k z$%Mn6a|R(@qByf|55b7q3(6b=y>hwjgN#2sLz|=OTOL0!Mfe zD(|qh-?mduUXF^9bf?a1g*D#Xpx}w>ogKpRoE69dIV%1h&fvx1{6LGU`(@$;Tidy) z>ti-lV`B`(^S26kE~>J*zT7s8CPwP6DElC=+h+PJLE=2ph<|~l?rHL-0ri~01S{U* z-~|V*YHmN}{XOn@Y;T*`6OsSu?2{-&$faB#*Q&y3x%yuHSX708@<%TWV653cX3V7z z>IjIupX*R2)Q52Oa>%92FMqRN_dWfU!_P%|S_BE)2p<+WdJrQsC03#XMRuEMkilt= z!HvR9)d!L5wlSqSXhz8IW-aUwT_1)IiZ{wC%|u)i18jC2Qd&;#Nx(EuvbPap$0Tr# zn>XUC_COfW-?iy*4~E0)M2wLY^nHRr>ub+2?Qepj$sgr`{X@A56cM1<02?uU zh>bSH!^En z&jpE2i}$ClMpd`a#226kbY(c-N$00y#PlDK(qt1Gn&deK{_Xz<3$fr>czK2cWrR3w zwusj_;6VIGFTm~CLQJw0NJvia3crwQOsbC;)38?lE&qbC8wHc8Lw-_Xe< zLP72HNT6vEZEBvvb?J7Rn`~n;`?<4E9dYfGyKu-80a3@;Jz+Tva8?ok+3_?wAv(|B znETvHY-nRhI$m+imfuh#na)M^5yV`Eo%Us$%oS99vLo`_jgxYF!;XXx!7(=~Lt2wS|z4)WBw|^^8a99zpI`F~eFX zc(P26@9a=1fQ&~*aDTNk7$N`zysOE1yk>&|$UqE_u>spzhekK}4vzq>+3Tskg&Pa$ zFHWw-n-;_+mPy+Q6Sj#0o(yzDrQ6!p$&TkqnQrzRc8B-XUZ_w;wve1FAe=f}Q-uM* zgH=}Nd>eEz(|on9#J^>~yCv|7n%KDspki#3QK%S#Bz=AB<9Myd2vyg%Qtxuqf00!Q zmGcO}0KsD0aIN9lSN_ksBLFu0UqA*EnUdRBk$rqCVAZvu*DS%Z425pb^mY9S*;40= zfk1hri|9GZ+uOPeF7D5Pj(kpWMKuoSSi0GoK=xq$pWZ3gOG^>+xHPm*a68CCtm1=m zngsrcB=hK!rZFih!|T36@tbO?=bU-Nr6AF&wd z_CASBurF7v=q=XLA%6-iT?(RXBfhTK@T|ZnB%~`@_oqKMIE|vpo+mlM7HWUU?yb-J zMl4cx{-pQ4Rpaw9P1KPo-5phght`s^5bs!n$nlQi0uzDMGkPvB8xN0EjP$mVCVB$ zMC8xg%B^?cSM?7f4}mLLjjz^aiHn#v8w#a!oJ8WBbn;yBP*q#MgLP89t6%2Etx`Io zx)@s61h~DbDdjLrBqn*NDVW1prxL0s+ihZR&^M!FI`*8;G>NKGR?GGo5Pk^bH|b3L z3KW!yca&ZMV0X{VQZzsB6FAV=O*Mb}wrly^=CDMUNeU>@UYuxeFo;syfOH@O=_iQo zS#w`$lasi_HJJF#Ux@4OgL_?>*ijj=Y4eOBNN^VLHvY5%gjQnb-X33_5xPYQ&TiJq z$moUWEL9!sph*)xd&u!BK{iz@vGzN(Zx<>JGDi8=O|6FV99ho}zsW08K;ONuT*qR1y9s65b(M$xTA`6lBb z(>LYj@U2N~kcMM%+gson8-f1_IXSp3q6o|uA5|e=5joxi=N{J;h^~>^9el&tv`S6+Lq| zN=>Y7t;%Z?>4$`nB%9+7|D3#?nuP(ZM5a!2ps&XoM>&OA&4N;lzQonfrabU2fRrJm z%A5tZk zG(E@K$HEqKI@g{JY@8S#zGzBwplx8WCmllHcuVpTP1vO!WS$oZ-QpUy*#c{D)Terl ztX$2FKS0|Mq*21c@UbeM16dnUG@F}ircUVFTH-&|)k!7jkcPfjN$8u$Qq=j;NLxH1 zmpjp+sR<}!BBl)KKz1YSBQ+D?}x|83q9GgA$y2O&~E3lvxt08JZBU0F-i< zs`%=Mkq#P$1nPJR--iA2XQ<#;Tg^NVq@KSv?|H$76=r6pv}VxwN6*xK)uf(o@VmwoUFmZ&BSdL2cNdwEThhxU%W6 zN&hBi{%O!BAWTb3wM4yfx{zb9Uh;$T6n$%Xxe*q`NA~@`n~Q8Jj-j%ktse4dQ>$ zAlR_Ee+{j^eto7)7*S^9-e`T|j`C$xXiCMwgn4E|*3fpmsU>WJPsrIDecMe@)us@) z_g$q|pDJ_oT$#HE2W@O{$;*!3DT6CZx-!uK+;oB3{GTq>Ap<-*>)ARcxQWYn$^eZo z-^da;GzwZ|XuB+sEvQ`LDqLw3g&TON6jCX;{l0Fwb1HI}B01LyU-vS1uDl1IsU`C$ za4ySDTCT)ZT*Tih;@#6<+YSL3~+t4?`0mkJy=%T6&;&w{66?_=6eIu zpnq`VIK)&YY zz%8y8mK)!8r-4(U-~0c9L#C}l&Pc0>vUG3j@c1NWrx0CCij=~iM6a7>3|_yh2F1!E zHlmqeAoxGk9DOpsv-N%AV4#m5%X`eTWnV;0OfRF&$YoY%``EQud-{PIm%_d#y)t;d z8-m#;AewxjmQp7?7&+L(KeKI+B`L|$^j={78p?cdk<9ND+efHHxhK$9QrTbE$iMSa z^Ab(5!T>u|=G9D?|7~{jqv9ude9Rxq+f{wfYPqFs{H#YzQ9V<*?jv{mdU@C{qmDm%1$F zp9s#33&y4NcKXM1#b&=N)61B?f3wu8R&P=?S&(dK(V3I+bCgS^hpAS`^h>bIs52v~ zXA`Ftm81;xC&5QVkpn|~ywkV}aSx6J8FEp87&{?bIzi7Reoo8S3vt@`Cc%?;z1vpS#!?M%eSV9izHf!M6lmtj0KBWu-HC$;G{*MyZyxJij~!q^`~n5bO8G?8Hz zVP;P^<)f?bipOG|hF_GQ(aSFPeorQg&)L%$3~ItV^@vX)_1o_YQr%W@0M0k z)`KqKG^}-#bW(Mcu#m1ueTQ99h&oB7#LMWD0ftH&U7n(6g{W1TB~o%D6g<}622XKf z3pLMm>7QPTx%Cy7AKvR}Or5wDzg+qdWAtg`%O1njYO_zbC|1)Xy+)toh1e*kvX@QyXEug*o1(>SgGI%50N$@AMM*jL&r8M# zp(lRIhtJvE^yL`yg5fO7zeUgeOC>N391v+MOp!ZMzUI5cFLR5++V`DeF0#=vX*(Ci zefV%DhF@T0fyU8rF(coYPQsDME1KdPTcwCVnZ}*$=2`SA?Rj0d4I7!R(!icf@9vua zb^^7d`0JZufa`(L4^a%TdFoaG<~$_leUhSuA6|b$yCBM8^< zkc+o=9b=yxB)>E-DcY?7^ya$rUlQYw=;^Cy_C`aY^0;$Ig{~=$0A$jXQ6H?Co`^oR zyc!F8QZ|sm;`5yuolzV-bP{9 zL^zGjOiB@)Cw=}ncNgaKs3jLnpjTTmsmQ@s^# zCep;Q@+(n!jz3-MYP`vg26O1pQztm(%s>!Wg7qR+Wvy8KjnWW z?l0JBw!~ny&xU+!Km@gdyYl^^6!UStj#dgk;gGfNMe&6l!m^${3?Mh!GGot5i|qSF z5J7v=#71DR#Z9$q&84H)952x+ETsT7{=wVubk}I)@iIXUav$o zQXjfV$ftMiBCtF`QXI0{U<2t(<}th7VLE(@H!)3~?P8&pw!u6Xb4ZqlG9$PJ3c}UI zA3RJ}tIXpIn=F?GNq>a|>XBdR)i*S#{GhO0G$p>`A3IR_mYaARru@x)L_Mq3?$cX>xq`N@1VzOYuYB@CC(Z)3m!x(vUB zUXsun_e9X%kk260)kcIb6tFNI#b;_5ZUa+xZHg!*$UBxZjlZYpeB=*2aqr!V#bW01 z%1^vacs4d+Ep7EaupU66i>{aYd|0mQOom(lhqQ^ ztYPvYtktd8R+1|VF^8IP3%g0iY@sSlf=PHb{VR5g6vrBMsU3Doy_D9dGMUMdiXP$^ z(^tgYpJ^o2Srt`$bMG#t5^VX90Yx0eM>(oi6MCK5rlovdfx+5)Vp%fOD9`PBs(OiD zd|*l-4WP;D2pX%(yN^{l^@2x*V>&p$+EbziCUAa8tZB_o-l}_fu{b=gPZcx09!3l* zD||e>3oK^Xxr-Il!~$)saOu=?7%{C)%aV`=-OrVT4(}-zkEuIrb^NIG9rm8o60d-G z!vK=?W?5*Tt8fPv!fo$gbu1kiNy}AR1G~X{Z zD(ncZOWY}*m!W8OLLz7;n!9yYdIPB0f%lI#b=b5u!pjN@gqPpHHs~kby2mHVQt}lH zixaQ!74sB{ROVvMC{~IjQ5@;)s+naCwpR_b_oE&eCRrP4XR9A9IhW%-TaJoja@LM* z_EsnTk-}-_5}y?+y^E=Q6Cm*U_-%+k8tQBRWX=VhKIUO_=j`yw8V%erW-W7F>9DTt z9A!|2QEk{N(K&Aqq7A9BT2YDS4a-m8{~ zTJmhu$ufb|PNS%MN7qDr3!XI0%NC+_--=KxKO$>HLXrBPQo}n){{*i8msI`pAL&5P zY_D&$PGv$|WB>xw?Q{s+=3KXcuvJ-xj}F3=?Lx|mR=x#7W&(mx)oM$?R$R9jwSX^4 zf$YQF2R0*1eu-cli(LLun{5f4Xh(if{f2D`lb?OwT<%>Rk%elqbV$MDOk3J|4)JiC z_0M^%34A>1x!IP>8z5vf`}K?&#A*`_+9aa%_U6qa-gaUV>O`;LImco%!{%`%T@A|o zJ1XO$Y_u_b@41MmJJ*s5H$}mBznJi(siQdww{d)M4}p`T?BEJdnTubUMl;LSmEl+R z^P^npa(BOP%pgNHV25`16A#A(n?>vAHN^4gSo}H!91K>(0|UFQiJ(#IN!RTSnrnS> zX5vV>@;_mozXjp`8XPx(TQC68)1<%X`C258B9b!1BA{f+>Q+^+$BXQOdGw1T8G>#5 zv!m%po27n;&+Clo96BQPK~#Gv;2 zF*!d{nKPd~l{`)l^rFI}`hctc zoPt^4qsxGaQ9#&v9Iuy~ttJ(n3V*4S29vJt!^lwjB1`g_yYGl0Ux%M{)+@z&od!{z z7585?*Z!Jf8vCGdv5m4;S~%BFH{EtlZS*9W|MaS9Q~43k0KV*pNC%Jm)&&P_5<3w= z@57%k;VYa2+2QwV$uNL$+c_FMikM;9EffUF>B=Vf{VF`dO~`CFl&=nG>x%6_9)(nD`<$iDmGh0jyspOR1TPtY&h3i=2tUUIA(+! z<=>ToX(0gY5n(HQbbn~f-i%Gi8zM!OMo3QD=FApHhT9~Vg00ez$-Y{f`zLOoP*H(k zmw@%V6k>Z?P3D{JuQaPtSj&i6gY3sBriXCU7qb>>(Y!cPgrr&6S-rUs#!1QG=pB|T z>8T>a5=_6fadZ zA?n(8k3Lc}xqIU#B82fWsTu|HPjclYooY^TA}53%HXUr;eUBz3=wCy9->etXk+$j? zoK&aX&X5p<=Yocef}dW%mnZTaHsJ+@A! z(dU;b@jLjF)#-z43Nl)54(igb{Rr>W#DX09dZQe|c-siwWV!367B9B6ZHhD^-~M9% z!nP<^>;8T8+}}P^YnlS1^*JaUYGSfUY806s7a^6cE6ct2wb{9hNjc|{UIC!wrPg0I z>PgBv^z*R#@pH;N4>zY+lZ#02WCYMN6BnC`H)B`cJ4RDKxj|N~(tTd26*WPzCpk0Aj<6TiApxAE zHrnOmw7^(VljZbGXRi`8%n?4ax&G0DrRJ=$x+3CE;VMmmr~p#WcSqfInV%bnumGrhZi4T*<_@>WHiC(5VtS}kWiXtU%w73D>~xkuwG=@Cc4pj(MTF) zrD$6DKEy)Qjrf{=@CBUNk@`%;QLH+D`OAYW^P1;=)M!K0E^cF3)Gc)~OD@@h zJD;FD_q9Mq%ju45qmDj=%!p#0J*`K-j!2dLGYygZ%5rwCp&V~p+C9P4o;5k~RnhZ; zzF~2R4w5~>i@mOZ#r$@$m%i$)#OF_`Ws8O{tWGuM=S^Z?|73DT(;?mt>Lc7-R6Yp@ z+jQ487Kma&G@L-PyJhAy$88)ktc)|6uc>Y(mv6NsBYUzi|6m^p zU7TrUewqbtod|8jTpoC{HkEZJE)_Vv-KTLrByQp!6o{!822Z75G5MqWPi0d_+k&LeS9NYU0HqblQWSBPqW9h-+5sx><%XoE$O?HH z;jBX4(;w_E0hC-KP`tA_7=#QTp?vP#%pxPS&}8ye!g_VB4>Axd@LBMuZn0+mL7hX} zX$=EtBy9K=hxLdkdGR=f zSg_I%zOr6XBwbMz1L8nO^qUg~guD{675TWo#{x@n<#X4KnFk%5`QA#tn}{}P75Mc63X07-vm1UFqU-*3Tm&Q*9N>PR3k4Uo1vgfyBT7# zy#N>j-=AJ{I8~R6q*3z^4BYS+Q`SXVWlL*VsP&@gQ@@u&4hF0nSB0lnYUh6RJ`wV) zz<;l-IAW?EeW#D~IFWOtUeuf*0Ieu4i%YT)KrCeM3@V7)b>yerdseno>_W3F6|A}a zaW_FhNwEz3a}UkkFe(jF;%dS{UJQFNY7vP*aS8)6emyyaOl_4z=#AI91$_mz3IV5H z{~EKy)~@-TdyWEm^+3Td4U~uhVT8rBRfKDdb9c^7>1`*QhkYd*Ct^ z1HfE5qo&B^cS^tlQnvN;(3b8=%%x-iu+QY^Ojcyz?!(7Gz_xiQTL#(ZQk?STy~(eY zQ5WK5%dwHq3FQWvK1p2yMr98cD-|6pckIw(@>zmbqYN&?E=g%Wa57;e;6#28>0=UYFew7rjN1P9-&VcM38_$eJqx}auSz3___0off9MV3$tI*nD zbGc`YJKx%e(~dMB^8UpQ*&s1U#8wm=ie&|NEapz5 zRbndD(NjkN#oWBJ&E;9&Dn~Oo_Sb#V8m*j6-RI2D3JCXGVlh4f^jdGs%y(P*&z_`5 zov@sLCJ8XM`DMoDmgpa*{QAvoP1CQrjXVbmAwf>w7C}i5Um=m zkswA_e7C*Fvap^*^1;4(4Bgmv9e=f%)vm(2)G3shjBKtIYWfK|g`e^)CMo)Iwi#v0 zQAk<&$dHxF(2i@ibAfhD0fG-UepsLDCN;uqS4j75*R#fXxtQ>-k3^WFA**W{{c&U5 ztp(WKC{o#!x6Cq#bgK7N|aod>$95Iqz@fc{AP7%3_!4xm8uRdl35CYt<_ zt$?^7luNSx+tltgJ4cUirAwea`66ll5hQ>shN+Q<*N37oNi75^9v*x)H>Cd%^tYvk zIQB5!6dhivNS(A;LUm_Zk-$RG*jVHmAyf}M)tvx+3E{%%>>K8=zR$&cRc*Osio8y| zuGWQrDdYcvqsxyC!?Bk}pqpR3ST(psc4*_XPn+nF`;(oQK&yj1f~=hEpz|b$-oTAf z*}fM=UrzLm*vlvS13|1D6R&blqqmJ8S>8BLSt~u~`~@K0p>i+jxO4WIo5b;-^-eCw zUDESq0t-96i;U094=l|GAl&!|YK1|Hy1dzrH^6c2HX9?HNBRj}$_YN-PJ$jZuDgSh zGXbDLuFgQm{6m{n7+|A<$LOB!i5>=Mg>U$WR-C_GBq zR}2Oqt_-+gX|a=Y__CPz?q>H)C)Ncc$DfNStCkOGSAQKmt<`U#&lFnpb&F%hc_utd z%wllHLKQkCdoVmX)#kF&ZsBH;_Mxkb@Wh1GrU*+~zwGtmg+skiNd4Du(! zHp<}<>jEBd58TL^pL>$XA!s}1u!HYQT-6#nUxOx+^c7-13VJVqpt0=aHDXsG^P)^o z6RoHa@lAsBy)SRh%o^(JORWUHCMLJqFUigNV&ajedfl5!vv~T|W!N?ipuRJf4(2q~V{C1MV?u zj;99B7qoEA@BDFjl!(1c3|64HKG0Fr*s_?xpZuk8r?2$g4LmwedI>cb)R*(Q;z8KU z`xeTo%)S<2Nl|s@Z!VmZ;qLBqTQ%FJ(HsCeGJJU+mJjmukV!O{wtGqQEDLM%GBp`D zaFL`{!+04hxb;mFq+&~6o4&Ob@{Br?&WbKYUI(!>R_EI{exwqfv5m|s*K_{*@u(Ut zlo9ml14Vv=gRwH@G#0`-sZ8BsdHi=8ubK(63YFBNRHu#)3Y&67p*ju=Tjp@50Nr62 z&a^KAZ^pQW`x29vZXpi~2S)Nwlda^FP#v)J246T&lGU3UJi9%0yK)ncXdKe2N*i&= z!X`GJwsbkfVck|RG=l@p#Rh4J#8h}A1&XZGJ+ny!o1 z1tnImoIl(CNHutUwp`?QZnTb>+G{7YzU1kk<`W~WY$xszmj5MQgWfJojLk``fkxm= zPYh8QI}ZZ9$p)u>7Up=-V`5eG{%3hyOQ2u0^Rh*v9)8Y=(USYX#z8*BYtq)c89yEd z$WLhu_-sN^%hrvj6<^p9{ZQ_9;nnIA`!v|C+O3tD^pm56j%iRnTZ0;8zS6Q}cc1;F zld@52a^S2MZ5QFFNZT@l`Xk#z^t!e-c9tNRs`K_#QO7|eHalpqP$YA!Jo9a@2~7xv^f_3gz>bv$^{OOpscU2e&BxD#1hEXs`o;l z;#*KGa9M0)gW`GHK4E#l8ZJDp&u!RKYIX7G1`5vfP@x@o%nY}UB(^wy+46JE8V~+PW^bFQRNy!lZ>(_nCa&1wR-I(r~28adURN@F#4Jc9W7; zhzEM5TgEZnjETGBI6g zva;n;(=4>AHbt{e{#s0Ou-If@)rP&Ybnu>sT#t3OsjcMvHqf8IbE;3QS~^0M3HoWu zf8<%@klptM*{CQh`1bYK@YCe?$eD4ikNkwazIKyWO>Ijw8(}dvR`y>DvzkUQ9P{!C z(+^d3-gex*Jp@+IJ|R9$-G|i_AHFi`eNiR^%xKuHt<$l0!r+t{Av3{xhAU&pzu78N z1+fE%^cYoVt%ZC7Z_gPrpReQ(`o243^2Fm-ca>=J?8q59TAsT~#gxX3?VTNl?_W%a z#dG}d*}8Z#j)B_$%3U>iB_q*6J|}9=zLgYP`Pokmfl^}{1}1(!zL60-3Ny1jSr+Aa zY=~5~kT}muR!lYi9c!!UhXT7`ft|OY@PJQVpCvX0DH?s>i75*FwTgNw_u|<2 z^FuORHWgp3k|yUU#qTjOB@n6ey8|x-F#Asoy&kYv^%j?)GWaEQP^s@YTrhx0*(Jnm zn%$QpbFyo5>x<&29B%al<;9_u+er`2Haq3=lBkL=qrVQ5iv?M)Q-M2e?KQW0MwOwU zrN;L@%Hh#1*9)@HTu>ujL&7tbSEpmv5Yn5Sw*$>$n>_KegWv|&WJx+fRsGZx{((!QcL6A5Zjl43-4@HI2I_@{U40IWmMa3yX_0biWexZ zMN5$4E~OMN4nd1k+}+(>i%W3{9^4_gyB7%V?(poq?>=X(z0MeWo%JCZ`J9p5_w}FG zoWJ=N-BxYzTH;GR2uu5J*}MSHV^sBkv(^c|Fn_TmoxQ-8F|CiLM&H}#*Ss4RC)ni{ z*2&aYs4rn!s3mSgmzn(hJ%)6mze+@xlFJ1K&B$SXEfK+d*t?dS6H?v$sqzGs$kNr4 zo`^KE7k8zhNYchL8W+N_*C){=ldKD#hZ7vb|k241FfB7ZIK3!c7vBoJk-5HqZMiiEl3t4E!iYAs=wtXScx|7R*k_-}&zdDCKtUnyv+A67O- zA-^+NRtyc;Z5(>~!$FhVR>hI-C1SMJ#F{1_ob-jfflk;;x91L90zX2HMF!ES#4iIHwkDqHv>XT*e^ay=N*xRA7^7io*Zw8 zQ;xeV=Hz{;S1M?xd10)13@?;k-pN2=6kPtuYUVGqJ5Efe18~TwwFw=??k&L}4Qwt! z3Aa}iNwKNWjBR3E_sD9n)VyHN^5s@>S2x5RL;t`Ya(b?sVJToTjDfVKRm zps{${cw8)DiJz{h#0ip`7|>$N^1=uj>U|5^xoTRDUc;D9->?-B*aqX|Op-z_>l2cC zZL>?69?=XH+71?6aiN}it@N-4)n=}CB*C&(=T42OYWL2sLVnrV+Bmc}b!?bEVNu&> zzi*$Qg(9|5sFTYs#uSrh<_ds**XFn^Md{)L8PG>gqRD}s9@Qj#C%>eS@=VX+B`CqtUix`=fhCwd0DPy2nJEH9rEh;IybiU(bWy#-yN~!40 zW<S$k&52zg31n^tky6ShGtMEoFAWs00u99wP{pKjsrCWT1!BFhO{$Sr;161ORnp`z9tO=OQ3_*Jz`AcJ1>$#wIc;7sr74p$i%?Ph(W zoMr3^XHU9B*kPsk@Pu!*%x7CIOOJPSj^Jrta6ywb-_a4<4y13mujAn{=?jQvn{)vT zEx%7kWFu2uwp6=a@|=Wkmax#|bR?_=;fqly$eG+%K8FN;*`lomI80dKX}m8Sq29vruaj%4oP@ z&*AuhlV!^we9W^;qHeq!n-_|LXnbU)vUhd1U`$e)$p@DZ4fJgPts$a&VJ4KS2R8`v zbS^w~^>A5VPo#a_{vK&aO8B4=aJ!e8w3`mp|HAMP>X)KrabNbcS(yK>RHWxi3T-s} z-+LmmQ<3JPiA+u_Is7pV+)r#H(H+tQoQk#TvEQ-pqY#=7EDU4K=VGd1Q#e6Sx5(No zP$G7Z?MQtT4*qzd(Jq7ab69~krP-lPn?jT5&A4BRJ`6JUCYJj57WUbEaVrU!q-XsW zlK;Z@MD-^UApgJjjQ>M>PJBE(LtfFi`#q$KTr}jSw#1^}r}a{H=L(mzM^rV3$(o0v z$g@0!yMn(qEzR&y08C!#K{@axyg+SkGSnF~X#@Xyk{3iOEi8DM@%Z#cY|tMIKeQQ{ zY86wTPWu?a&v2|>{RL?7(af&Hw4~9?;CIbu}Ch!G<^={|J-(>>t`G2)LL815inlp41OpE zO4eih50xNQ?0Y>Id$^q$Ecf)0kzCd)%jTO&{0&@x+&qKW;~YkTECOlWh1m5L*wJiL^EYaZv7-AN-=5!u5n?s z{Kz$}X^x5*ng8qNW}BqlU)L7+Hi7=Efh*>OD~}f&9m`SZBjGJ#8kB<8bZ{psKI{+)#z(%DD2J)P*DGj{ znRfoX12^9L43S(rGm$dUm1>hvwiuzMfqR;=!6TU?89_B*LIoeBm!DHtFHjN+HOtBC^U7v8M+s{dj zYRZ0@jP5qLbaBvZQR?UWVt%t5^~}Dg3Z;Co^do{Tb(t3ZF3;b%HFIlCp+}tFl5=z+ zpp9d#NY{|J8~IP^krY{Fe4?`VxkPW%e(3Ee)q3t`{VQ2_tucuOGKBFbMA89^R#;f# z*n^Q5=S!WgbBo)A;f9eC^>eq7jP*5Rdt=UooZwQ2ENA)-wB*AoyfLQP(Hs&^H5&#- zFxOs2lSl@O^O=*RF%C1fI^m?X~_%+6FhyPOUt&iaE6Zx*s zn5RY`Wo`9Km<);wR8PXiU@xZGYbaK z7kJrV%QL@g)-Px^>uKwIbLruB@MG0)(B%r@hXVbO{ zo}AKfi6LGKQHKR{;g~k#0&+i`wcBi-M;=#L{U(VSC9S9slxx`l1`aP+6QvBVq#L}~ zLq-AnAUsPyoaw`R)6bt<&q#I5{eEU$E>Q7qVeMm#kH=cCNG!bdDd=#|$$lnzNcSZ) zCm9Sp_p23h!i4+S3Z}M<${9UR_QuAXoC@5i;;tQ(sNX;^f_9H}HZgno0akyf6 z_I~HDI>DOJ%F5HB)64`=q|*>h6V29l0ylZ)sN}4$mAAk`_{1ZwTQQ1~(3|06VN&WM zu21eS?&0*LU~xXe2+n#qa~>_Lh(5?i{?e@94^>Fe?vJ}&n6dbSFFU+~v-aZ$TdbuYWcrtgZ-+ug{rMksw{lMOX zt@{sLMCX^cF_F)gOAkR%?DtmtDjy{9bQC_W3ikC5Q=7`M>9TU5kOk@6W$b_*9&dGh zFfdZkC-i19x%q)A%c2|H_;g+ksi!(k0YQ*K%v33C4LnK<^D5a|2PLbfmI{T1w!@a$ z7b@6j4)Jm98>FP<%I3h^?!pVqzyu$YcbTrCCnI7meV5*bg%)QM;j3+5(|xJDMJ^|! zz}a%z)7W9Y09q!i%0aHW>XjL0Mt8%6k*b*jPb7?XN3n4y0k_l13lRv7^v$a_J6ic4 zx^^+8&)sk+Fgz`sGyb^6KX5Q<@Yf{Si}l5Q40F41MJ~P^v!_q{NZCh4mcI8hTI%|H z=5q4kM3GshC`Vi2%N}!{jqLW5ip#VGIn5(F*R=tP?XJy z4w$Y83prkK`$mObR76-!hj^;xL1{ByPw5IYwE5RPIicm3dvcP6WfO%}nTq!=6sxaE zf;|YLO)y3DAGi~et|u`I{rs3NI=omu?uXZWZ@Guug10G>#lbHRK$4RJjYfih;FkaX zYN61hip1P7OPGABeBF4c&i^-7>3l;3rWvM)TE61Ny$DV^0Uj7Pw@?A@mLGSY zS7c{{ZRjc&Cd;R9%!V#Xh)vxE+)KCV=q9xb7ti+2iDeHDDY`uFoAIdefA85Tt0~EF z)&;V0U<>Va!fY9#oew!xW=WfO?L<4y&8tBtHm!^Euw&T8?-Hda^^Jc1hBOZIudWe-a~E@} zY8$o5<<>_m8Oq=SpTFZ2rWbxtI%RdZ7jMmu?zQ9P9OMQ7+%Ob3$1`mrT7GL5C|MC8h#v2h-6~bZZ@o zB`Jv}O33ODl#CGXkc)~2SyF{EkwAx`6h%vi&xCfqbyuc9GXmhCXUgL6{*rtXCh zb7AD#lZe@<=eqH*;Wt-{Ue?IbIM2Nq7AJmIAL;M;{;hS{xpmD>U}EB~ zjKaC}HH-CWxrMWE_1$iiO(q z^2JRjQCssRD+rn_5YK`zNP{l%EqM4^5bCDb+og_QoIwee>oE4kI)gSc?ep!0!ooSr zl7t8mF)oyL9uj=#d>zJ2GTsY~ty8xTMve51c5&4ae-2x`SV$Gk!L6-+4Ed(E%&70y z&TRMj@gvI0&o=-Mp$nhqK@X~WjF+l+{tqq}b$6>@FDrki-t#W}wP6O>a{vRy5%p7a<)I#7-VUfvthu4adt(;e}gdnsT$U@;cq5ATTr<6fAEaP)+ zeAN4YILfjA8{ARe&7Vkk*y_n7<4ly|NV@R=D27EhNsEY(<5Mw~&k-Ii&-bZ?9sbp- z?O~dc630b)Iabv5>&@%RVw$9P!}B*Dzu;)6B>fswE)V;Lzlkq#yDxhc?oWsRa?J6# zWnf2gvml!!_R5!cdGZiTlJ~spOuU$kakl&LJqo?jP4_H3ntiYY z(KGl?WDUg!Lo`Ox=$ydNm?v3 zT&#PwXY#f} zPetb1fb9fRCBx&-CoQ#5A*8)B!Nlzo3DzVtd$-Xxkq*vCXFWC9OEy~gsVC35?v;se ztU#rLN{vu;uJ0DV+!d&_|9FUBZdKvNerYHlx?M10|*rq@ywH zvx$R6e68QxZ@DvVpYVq=I2ck7Y2h`4iHP759_MC;VsI<#l8yM3H>y#PcuG-(#cs-H zwaRga@LNdLvf>4EwWo1D*Cj7pYsXxMBAC-A#mn536*b0e5Su<`BTHg`^YO8i*87z< zNX?syf_U}$Gh+JiWY>C$kBqW0Z=G&D^j-o64Y_27`^}WmXp`!K-s0(^XW6yB`ofz8KdNyV=B$W(! zjD9`{=}MtlQ`|FB1EM(=^2|<_XJvp5KZ9p&hN$YljI`{fb4NSL?WH_UV4WnyEoAdE z9etl|3~`IGrN*@}C|5`Y64wS|FLF__dmQAi$CJVFLH~j^=WO)of&L%& zo8Vq8Pr;e;=s~Q#qp>L32TqZ39GpA0K+Y383ZtX1QI>(o9Gb5d6_|5PMjBzEOv6WP zR+hpf%WCzb*YG_9$bJHmy(3~UtsVjJK`Bh=Rl@jmK2xHVz;b_Hn-h7KW$2WMvd9uh z7`m&~;N_;5Y315Q**`FE(!Ts$g})I@nF#X{)$+pL{)bfQQd@@D6~Y{2Fbz_A+}DCc z#hEx-`WX!`q8$25%#u*&GO%1%8_}brLrWYS+Jhj3f~w;Z>1HK+Vn4%-7WyZ)`%UUHzukBHB?vB}N({MDXvASTTWTBs*=PGVgU^opIonM(4c*;%oZd1o6#O0GzJ zgRlE}eGZsBfJ|p>*x>uC^ikUMQGm?XrKyZu@#6!yl_`^Oa%4J554BX{qra|gjO3Yp z_w612joknri+oIFa7_js6)OiQX>n0x{@Nr`^TZ&hSs`~wBP7bLbX7PKU7Vcko$hCA ziqOhH(|xppSt|8DL^t?gcvbVY@o4Bu{bfK_ps?Rim1^~rgA$g4C@zw3`5WXf$)e6w z?p9a$7hafLp3NtX^=N1$^R@pDDV6{|zbP*C|soqdzOwF73=8QY3s?hvuEIESrMcEssmm~t2W5R zwWm(LR7Hkx>gp6G;_C%Dy|Whto`z-{7HJp7rrY2bwfnh#R+5amL4&JER-c0av+xsI%Q|{CY!&V%|u-lg~=L&i5TNK=N?n)+{>_rF6PfENvEiEF& zt!Mxm?LtFP%r8TR_L|~q>GU`jn4y~X4kehOMBr>&UUyuS-AdMhN{0G z%NK{nOMPPsV~3gZ!{*mAJrLLYd0c-z$f7XJgh|_!! zL&12z86COm^*a#RIXZ0Tbi(S>sab=Ixq*jL9>(fwQ={cL=p=D?SGcW|d{Xw&63n2* zYLoQ08f-=EkJRqL-w4ZaD= zAgBM)9;r+PmQnEoHI>*z`dP&)QX>gGimXF^BZ(C`r_dW|a+$nq#|TnTFJD=kJO_M; z(x`imGK@V_)N@eW5R44ui|WceV3$t|-k$|Imi|@E=W1fH{Y7SWUC!u70K4{sZ9qX; z><3ta*K(L+wrnLLU}feReiI{crp61NfS^}T7p_M_rR2Zgc9*gvJrvY-@PbzI=Js*K zz$-s6*srd}dv3_)c(Iw}h(i5Xq&9x|9Hog2BXp^-YY?#({n%U=|nWWaP%(qw5x{IG%b4^nl1YXe6| zraw#-Y+0+nenRnWrbJgT3WD{2+f@r6X%(iurm#R&2x%~Y@txhJDz9;7;SQ<~@JU{lMqWmK=g-jC6Zx8uRV) zL950mokQAw1;>Qk){+Gf*YA4i#mh<-+aS`Y5Bqj{?2*$rZa$K_7B-|z!!&11%~Co1 zm8;d54{rURVLDyHL8HdZw<05}G!@`i4?borPVL0Ml~E$yn0Ugg800-G%x5cY3V`!AWl{|1PbcPa7d@N3~orl_bd=c!%9ihokb z>w<44L3^iTogli)N~G0{FAr?iMyFKm+41-au3C$O%h^z-k^7}z#!bCA$R9J}2nJCr?k44V*D6D<^$n#eqztM3uO`-)2^hrPxMe9GJUK7%T^4hACj&{PD8~G|5CTB-b)Wos$Shxu*tmp zexeyh%r}U#(!$=1rkSCT{oQrbiDXH;9K=v|ACg*Rvi~d2v+imNd z*ouNQaYdTe!e$j|%lPV4*x^uL&D~GGZt*1bY&?u+@0+hF_v4*$(tNMr%3&^O=kXN7 z*@n;pG(C0t==4MBuSbh;M~F-x>srDYfY2os+AU2%{29G66+)dXX!8}GXyW`0%ouGA z=!O&KE8DwTGPWPA9shb}tc3F$rIxtFbv`ZQJ0r8DD~+8~H{!JntMt1cOwhBS3|{Qc z5f1*Lj};DhRhnd;Q2H#T%oDTKogw83B<*JbVouvOT^HkVyl&L&T4n6lo>n zSmuY%BgOfK9@Y@h;2oGQ;nt^DzVKM~OGzGJhC^0GNrUF)YZb#^3< zCE&5fn05BhW@56(qY6m90r~Ik#%{tXhz)pDl3HYZE^gpIj!`}>FfWX=3MGznbvncL zRK{7V+su!T#n#QpkU?1E`wS7I6sS_XD$gkDkKi)or4B(P(0c=S@N9(lI@Gt(y5~+T zQ)6~4#ioeaQWIjn8!3OWp#XPq!K{emG^nKJHY1`b#)z?wRZU-}c;3TBzu{NXpZ;=E z4_Vojv>82Ov8TqRWDzlg#uaHbCKpAd3`}W;VDM5FTW&Ugp0uGjj6k?gBYLsqN0n5g zA`f%WTLv}7M89S#oIEM^(2H+XaB^5%QwUd7RJrw<=8AzCo{L)nFHwa5?qV=IZASOe z&hAR_{nverqWM29tq9(3N8etIDGVik;V;M<8v<_4cOKB3K^Hn`1}W&H`ZB)9g5FX% z>beC($zhhPhm|Y2blZ|ShV-4(t*GIo`?P42#i<<+zlO5Yy#^ghhJNCTU<#6)bEk}} zfM}{CWPAR-dS4Ix6{5ZrK7Mj0AWDi_rB!03;=Veb%V65Klb**|_Q{#fH_Q&L$Lr$M zf$uE2)UY=WBSs_J!wp3wR%fALBZ7 zqxJ^A!qDXS=_((MyzVQe6LZ^a3Y~X7iucBnU4#Uo2;W6|5pEC@-S^rNl%Kg0P}Btn zNkhEuA;<2gIsHT8%%w{((cz~0L+ce(dirah!%a1AgU_AkjYg{@M`}Vg4YTW}f4hlS z>}@W!?l**XuaEY816EccbH9}G`6ykpQnz-gzmm&LycYsulB519O0L9NizPBjMMf2J zWSdwR7a5abNQWPoXRy&Eh_g1{Kse{yj?bDu@zH-~d$A1Lii<{0DE)yP2jUoXqI|iI zzo^6NL)?wCXS!pj;8!+{2pZVr5Xh=v!U)8+m!j^Exb-W#S^ z2T%5)Z?4;(`&Jh~o;zN+QVT-v=mb0$C(JZa>?+0gd?dv(KC3;oFT@$2Xfq-UI?3sC z`Lz9)2N0#oeM^j_DHZjo&pTHApTBM55_cXL-itluq@-9}_0-EMIVDO@f#oVteW&Z) zhm^93g68I`%hyPk$7IjmVMr>D6#4i7itef%n=IuEgUhdnRy4hyXYwqJQ=eX#eSLWf z%_u{?7RS|c1>57Q@`+~SXnD9v{()0oDW;HF8H}#toB&v`+S9iF{|7&I#yrHH9QcAZ%f}x*c@;S zvLwxUsUwv9%&hRh>02!)UmDu4g_sLUk(|cY!LLi2*A(zFl+(u^_WzRcHR6M_w`_ea zmr1?0P*7y!l?$b(gWQpZvHbNoJ5Fk@a^{>LuA z(_w9?zP0uH$MDh`!w4YTObGF4)A;0;a)eack8DA88_B&rewtW__>km3aLgCPUT@Ti zu?3EC%3Uk$vf__6fv!+;06z|Jt1wac;QfFOLaZ1>qb=!PHJA~P%L6chLSLKCZ5p^i z>MWs>1x76k%|2@7K^y8k zg!pANcAd#gzQIYYJhx?Sxh=%BF?K`gJ?1q<7QANSdhgMSt_o&sA(y4EQO8kENrl3l zovi|`{%?C|hkSt)^IiI{@QIccousC}_)10d>1x#;pZ|RFjIw$kFfH3Gd5x-pp3}k4 z^Yq! zq+f5UYKJN>x~re?Qk`QJSN+ECYi*YWr7&cf;OG9NOdCK5;`-rlZGN4z50cGS`F7yW z6gSX;H*CQb{5_$-nV&((Y$dDl-WugB;>9Ar5bf(InM+aZ7L-)3OXP(Fy~M1gnyk*l zGHG!4gj7<&s&Lx->l5ojQyXA&AFcUCV3C-%NdrlFxSI^?mOClDc+4MX>TFX33-B8p z#@r~#*4#3km?BK+bV7;bK0*0G8?eTwlQ8Ez?lvQkf)whYkH2)Jh;&qlxtqv=MtQ`v z?oA2Z>TvEbD69(&0l&F3&g#++F>zwB8 zh?iOl*L@p-y(3b0601O(7-SiS$jdIA7l}kWj!AL(a4ey_Bi|r04Lv+eV0*`W{-j=| zA!)W>_=|eUAwdx@sx6UUh3fT}xw~qtIhWB>tx??;ripoBxXLoxL@5hXBUlYzQldQD z2d3kiWgh{7Rn?w;qilJiEHkba`ozdn=^fZ%pWXehA0gv%@2t7;)poHL{F$f7(v7A` zl0P1O3D^~A6>S?T)=e(8C7EEccX(ITe}Yq`(e$o?b-M}2#v17*#J)9S=_t~5slXj# z3~X}rzr2d9Fe1@W#oV&+{e@V2`U;pksO=$+Nd64dIUowIL}>LGH3M6~^yC89F%O1l zHbkUo+&0eaz7lO^@{Su02|)OrKfInuitz~pI~Yd9ca*Wyw{(ca{(=!S#(>t1H@T$& z$i~4+`pyml{R#iD+v=53;bgwszAvK9gM8Mdz+}tBE&XQAmD60#JD`|4$NIWM<_7KH zQ@6;F#r~~&K#9_O@!+{^;>Wi6vNT|oFKm@{5hZZIb4b;rICm8r0_7>M8rV{BLBnOD~J~K$lIqG#uNJq0kVowF5DD* zafDb#dZR0*x_BLggvgmO3;T%?+DRoZFSW!z8z++8cSy7et*u-1V7?-Su_5O~M#Ka@ z6=Xi4nOc=M-E!S{Y`wzg`FIQ`4|@}Q;^F;B@V>$r?^di=!1r=1BQ-^|hoTbgRvj7Y z<|Jgk#>6GW{oH|e2IcyUA%^V0S2Z&}k=q+72;U4rLn8;GMUTB_%<}nxPel)H?Ob8j z0I8p-9}%1{_uk_D&xt3QqO#$l=(-<^s+;wGuBmR+>OX`1u~hjOE~{crs6*DIPbShQ zut#%?j6M>oq$|iqvkF5TkH%FyAw+J-s~Ca^ucw2|*tH-z^Zk|HZMJ;U&#+yxY{qBF z6P0#01*w%ngNXQYc%i3xw3Q=mX@B@2Mp`qt>z|YUFD{ktln??vx>{=9dc)oea_kMv z*u2P{n>{&or-l!b@Pu#?*A??uQ_Jx-yeO{R-bB_RXf>H#e+!eM}Zk?c_) zz)=`RN@f+rI9PBv@J&hHvma0auG@-4ey-x2|BtDSWj~wv&u6Ty7D`G$-OcdnsRWzH zbZJWrf=rvyz7WFVN{fSEpZRf*p!$yuLY}i)pK8RqM2Lriuq7A*DSB*#57d$De>*N2 zU8y&1ovBK;m?gaCOsX+qJRZWI0l)Apu>0iDnj5L^{gyApwtQS9@cEYIksT&cwmMGA z(n*W{x4AO>m(K9Y%9hs07f35+UtZ;XS8#t5@ScvPgq}|Q2g2HQWNlH=?%0xi-JfD~ zb_AjWhm9~9ItPI>g`rc4IiA#l38BxY8}+u_>Y+F;xMvz~WKNnIiXJwmoYFuo_hpi- z{oL7i1)+jI3LjL?4RG+?rH>SKUjg`n^=9Est+AmSIzF;E%u_FUJ9b1pcV(BBVqjc= z-4GmwgKlc99c!VNS~By8D#EO+ILn1Z(&l~1xcf}cIPD30Wmi*`YL*kb9`eRqt6gnM zPP0!^Dwb5eyYTkL>DS>f=Ez=MuFD_0X4MbE{H43CI+D7bmke~T@Fz@sGrTTU5r__2 z%vUQ?jnduNv@!Q_w`D;g;MjAOtAbx-Z2Q%+Dx}W>ij2`P{81hWs1yudue74!lJzI= zH%7SxC4b4!Dl$R4Cscj>iqt)Hnx%ICU5+;wHilrVTx}dqwTIWY={j!QeL~NvOYPCj&wkxgDff$Dk=Kb@kgkVlg9DHJ`ut}VEb1qf zt%^Hm=e2fF#T{f$-5HM@y5)L33?y^(Irc@p36A)25%DsQ;actIw9@>mC1jr7r|!}2 zYM=w%b3W5X;n*axkA#|$E+{vP(r_c|;>vQ0KE%$fGK!>fZ${j;XAC7o;&DPzHg-jy z$!5%92-#iNY20{K|Lc8(GubMVBVDY1HmWehV%&pwWrbVm6KzYfJ^!K3Gvnt{;?}a! z4Ct4t0!}%r5Bfrk7UC4@in`d^qm6Ap>CMf+F?%wqc?{7v<3gi$9_}s8{Vm-|w&KQ% zLqAZjgbgB(ixY>1>q?4~STY7NYR1!C-L0L93N5_N8i7W_1AX-hwqKu&&Im6X#|9rsR&0AuA z()*jn+XMbCbD|{b;0Urz_0NYMM*RqqX2ag_uTLM5+q0W$`9vAFyMB#i*EHN7bkRI& z2%Ta@EXy=vP~_c|*e-RNS%RHe7Kl{viS;@k%O^yBxN;w|xCrGaOY}-^|LVi* z(d$shR1helA_6YJXp-Hw zD4|olxEX}1K|^Qulx&vDRIg2LL5yz`g_3fVrN_Ep8y~DEmYj+$Q6gRQ5o7yBg2`BP z0;U`8a{*wPMTIuqctI$Tn5J1IwDW5yFqN|2Vhaj0TD9f7C#hVlO?$ZU!G}H!Z!20p zX1VH?^K7-;dFc751j&}SD^?N{7wh`0Io+1Q5L$ba6k+&d4_*);$}}Uz{mz;ZMv+C zQm&a~PDP4U1TiERJ??m_=Wzgdmftnh@t~+Eipl>x2n06fDFuX7sp?%Svl6AYjHp}NU) zclWF!y!!}u9ccNyI?U3O&=aBLk0nVd?0y7Dw2nN;ie|9;v8eVdqjsE6XXD$~tzyl6 zYAQts;K(Pe5&U<7PP4F4+9qUvEO87~30~#>_Mg-ZDPug8t4YLE6q<|jy%tuMXs~I3 z$n+;x+D)7(=uE}WpDihvbtYz6Rnjk3rq(o(h=EkD-`w?@&lN*8)0a&`X!dI$a-~h; z&O7tz#~XyC&f!uEtt9&|96b#UgI^Vxy#4!7vKIFr%JeF8Gewnpy4J*}Dd;XKx8G8> z^m%1xM%ItSq)o{x85++7o;Ai+zCb7m4>Q`1#y#i;GoA}0#jiN_`4F1GKGm1ln-pQt zY~n9Brcv_q)9AF}Xocg4RKpWq6wB2CLNZ(OZau9@7QUarUUC6h$j_kIqxJVs8p{{4 z3;>97%KM^Ifs(cxQFf>|k4G>BaC?4I)k5Z~*J-vcg7y%V_b>pLTKB#h--#B(*YM^R z{{TMVP(PhOJLc^-$HrmOu#J6IWkTZu{;gSj`iQhR5D zPp^n{vA1Hd$#{CJQy`Rn$=k0@xVp}_(`-gf%|SLvvTmR!Ayh3WUaX91ht)Sc!}0)j zq8b0kpz&Y!)Ri`F+Tf|1Wf$~BcA`fOK4vd?1$IYE_O-|qV+B?xPnwvAknP>66JLmB z|N0sh9N$U@hhLF*>*1cDhzzp~?LB=XIQ!SJ)*M}Dhjl~nDawOJ4W;ZbXMZIPKT7FM zb<^mDWiX*{$^LQ<|Mt-}@1G@qjzHC^?Y#J3Kky(r&MI<2T0KEyy1Zi1e%iOT8A4?h zxTk7TJ5E|4Cc~7wkU>Pej6|>00OByc`HvTw;k&AlXwRx1c?($H~7 zC>%MGu{`be&)O@oBP83Rzbo)SB>vTFhj9t}+7;W8t+ou4fs|VzQs{T(Vf>Qi3(b-- z&LJZ&jTA+caTT3(Wx0g567mp-1&jI; z9nJAMkg@YWaNk$lk?J76%~TYAfIp4O)L)Vn2+t8zV}kA-dDaIJ8WnF#WTW13jBRrc zs!ocmTcI>BWD-|oKO;}|XPIPWsoh-&t_{?|iP#QuR~mh3DE>&P2;XPusJT#p~k^;AR zzsro^X;~@g8L8vdlQ#neK1hk{m;1qW(how<(OwfT$}KZg3OS1%f3k--ochC`XS`IR zQjcCc3p366lDY3IJ@{?w{B#|Wx=QhHj!;0X-7%pUB)6Ivt8oBHtZoZi=ThEkzf5j< zg+A`A!q_WowkE8emm)yoqYCrCZeT!V zDyO383d{XP3!h!G@#+Rhou5^{QwB47pC<2~ro&i|H)1;UH}fp94VUC@?}--L<|$dy zS`v`k##tv*P6r4yOi9-?qk2~Jv`xIbM^H+6SwB(0Vb8kXYdP3!5V&3H0{0(mekJa{ zMsT~~-ccz*z2>OWH|B~GU(-}(Df z_}cD+mFBl=TN=aIKpp+6DDpw1{zsUcN)0VqNV`4PHakjcQvOZ{*dTt@X_4(19T#47 zOIZTJmXw#XHbM;lD*;vP<$@e0O}u>x!L0jn@e19IiPY&n{w8Xn5J&zyYnO^)LqCh+ z)YPpV4>+TTAtk>yS+y4BB7rK{k^K$1es%dxe&BYiqFd2wGavY{Xye z_@CH^&8dI~(ghxsTf0&P;Aio3n2~OS4!`Y)$neu%3D-`kn^@S;>SztodPhpx3j0j&*uOAG}_x-mHu-<8GOcC3t0MGwY$P({biPb1E>jH<{S zI91T+7f73TmC77BPu9!q{NPx8qjBB_0_* z9Vau$-tT%93WObQs)qQ$d-jgtmgLnx-xlI~XB=|bQeNEgWo6F$OQ!}&KRrK3AmS}j z^!{nA)zhN<5jzDK~g&NY&S~m#I7>?Y=nn z&{ft{+G6dvUo8)U%G05 zO}L+K95(%U&+ZTRP1s6L!g+d_fq`%$^1~y#L2c<@4t6y438%x@?N&?kAJs_BRgt#C*M6~ySdKz(w|9q|cmMmlYSZ8qQRN1(=7 z5?O3iY)U&EIb-dgfqW$ctiW8=kMxh(Fmq+ObP23&@jtso{yis8{eRH+-VW`cM)u}h z*>nZQpE2||(x#e?;GEOO?zTA3gN8{g8K#IY0cwM2*z|2tI)Yw&{lFu9seL^WPM#Ry zPi4njju}a^>ReJR##`=FC@t|xi{@n51Fa8+u^(~r;MEM!Kt4WZB|07JH^YdZIVSeu z&>r2kQBH}Ct!Zby3A)TvWMo)PRBQd#b$2^k@CO4%FI$Qfv{AwHH&T+K?A^Q0kH_4; z>-t0Y`h=F9uu$T&WRn9K2Pe{3B0Hl&IQWM}nolsMBd$iRNXi8;e;ZS$U%Qa-X?*Ut z>f`jO8SN?Q;0XUFw(Ro&d)jnYd)`~hZOWDC$xv@r$=I&f9q`0E2IKXY3xl$FDtpt! z>tuj02Mk;h5?5ZkAMPl+`L4ZDX6nmOGt5Aw`p{o28X?e7ScdTl4q3P83OXIjjUgQV4;H})1Xa;B6KX6Tpet&=OjXe^18l@!LU??J|>Srr#1${U14*v(v zMc&i=3Qnud=&NMUECZQJA@2A0249_0CX1MDCsVo&U#J$+lAG0!=indfEv(b zw6srr4CEOn+ue_@MfA_R!T*RS+&CJ3cWQ)3Jc@7}2^Jui5E@tudhWi3I4XK&H$W-V zo9xPveg;xIEg4f+=!PVtmc1T4kvJ@!w9Ipv6Zl{5{*w%kwc8fv+a*NzK`{3exr=w%D zoJ+dTtsLs;Kh+T7EVK*xBIht#6t?Bc{r;tH4{|)FD191ZE3&2#tERgcj55V3YAmDS zXF#WM^dsrowxiUxU=MHD{;ir76qKS5tB_LnUc9r>VEqmjSxL3|<~+AryJ96U9~xnG z7qy(inO%{a9)5$rnL~FuQ38^@tKkovk6Tl{CiBeLiz zIdSy7Upw*i1TI8ut)AMtXoe02kgj*?i7ww(TTH(Q7n|qO%5ctb$k98{BAHwJ5o!30ux5E~%2 zogm=-lFW~vl#(*tB`?=$R0AKU=Wwnbl3;|B%Jbt)$WgK5`*lP_C|~2InX8k^vNW%U zUA&qbEVEP+F%=HsEi(|?DL5lub4Qq|KNV}qknS@!oN&nPZ6$kGfUo&}J0+bEX+j3S z((=qgz=CTCovyWhXjxbR=_@hokYV>`Tz(0&2J&s@1L-44CyPHn4Epp^o}7qcHg%(j zQC^%@E{OLN(`keBXCvhW%}>-ref&t7xrF??8Ny+dh9N_WGq0%M*Wui%|Kcw8wh5A;;j0dMg2<}-6!B}z989*Q;-=DIlmX+o zwC6dmCb#2yzTkKZHhUHNHCa?H%M^%TRU2%Yp)7*kog_*3@Sknm%q+a@=s^FXA(d%J z!39>bNpW9dL%NphG=H?-2M~I)TQD@xVs>1mcXyMwjQjTVQGAm8qiGFU(I20Fddv-V za0_+pV>GG*yW2wNJ>8@tUPK=tFbdIV-2!p7^A7yC+}i~BS&KoMrNw0u>~QBU7&fK! zP?dJN88O5_mQoTS|7md#0;#;Km$7gTV%RUK<%Vk5C9H)b> zWKR0>t4k)_9b?%6OKJrStG~Qs#RrI@5f>~ftmF2{sXr6ue;-4FRRvp z5Ll>NK|YciO}=LDq(L(a+G|BSSyjA`{R;W<`-?ygYCR?BgU5lKK(4W@qg#uNj(DFh z`%>?HYExYsvUDT>;cAnjxf-q+Xg-IF@VwaZqSq3;Tbc3oyMK+67mNo{tw&lKLaH%p z$A{Gd6_wpjtBmd|DZrcwXn&^`l#qL<)Mxxly>tFkq6++{P|bu{rpJ%-$a| zS(8aJnas1+dhYwauHRKZOsM2pg%3xfjz_Ssz{_V&QlSHbId1ZkJ;B;4B#tS8_ioru z3uVsDGzYW6@59z&qN?%3d-n54ru4D`Y;0^7pV9A(9@1ZPz4(H8yeJjNkZY~@Q5l5N zbK07#+E1WL>weKpJME2bmFS5{6>~fj4fS5G zlW8hEB|mojok-~JSzcM~h`8A^9;+6b}muD0~Tm46lzled=(lCX7S47&Ma+MNKzy^_m~=I z>*wHYriucsNGpM_##a-z26a;(>e|99X;S^CI0Tjh02C}BTsytYNjcRfb;_|($ym{0 zI&W9~$%Mvj-}gCPaxt*=U??*)jJo*Y2X6dqrGZXgz#eDDhi5K~xXj=R#JX7gQv*CP zy$cZmjYojm$d-479YXcO{=?XB~OGh=uvIY?H^Dvu*T>T5C(|yw4K%dmIZSA5}GXBV}m%RF#Ey2~2z*JN&frA*Hm*N_C=UXBcMg0RN3#o^-ZMpa(hgWZQV? zzWJcw>+xLX*Nj5vk0U7$fFff2d3G zP)=>?d}*vzzp^=7c<6bCdiV}s!Q)G3B~I-bE>tEvT2sz{QTF7y?ura#J=O#2E9xTl zqq&f~Y+@f6{jBu2xC{)>FUS5=E| zh&hD~Tk}M|oyN+#;xmpg{H{WK2!_smxy-66v@LI14yjs`NYVNSaERNGd(0f%Acq-N zk(0n{ahYbW2&(T}g5R#ap0s1QI2N&F?C*0F2~tsN_@)Y)lJ4xJRrMdGQQ36~xI0r~sl72`b9quD$~^#TI?R@!U5 zh2w?HGA)DI0G$&jCzw0@q&?^fr>glyawi_+flDs#xu36Qg)#Pvl$a1X2B5>|%ot7i zLj++8MR}-e?pFb-3ZKqGkI#4;LAKtYEB(j8caO0Nr5*U#a@Wu-G1lJtj}tcelUWS7 zeV_BxG9R*YS3Po=pkaZ0P|JUSA7>TgIEL&R7_M@IyrB=lpyBsYvDF8=+i8=_YwX`q zrmcfjeuUID8FnaxJO9QIFaz^mNniAFxt)1{<^qpEAqp|h+2HHri!0D$RsYGekb0Ol&oV8{+HVB9?8t)SbBEli6sV6&gQT=UO|zp89dzO%bE? z#Nuq9RkSZz`*bHGEr|JDIX>eu$D}VfpB4pf*{r zZZ<@#hN59|4-G%@_53^JGn?Xg{=MCa1d9;HWw!TbAkWCeTc^a^%-@jqmUwCGhKK|} z;oP&ELM7M(#u{P5_@A8<#LzQcQvhbNUH^2`ms|c0_87 zG%onuDp^tq9rIqthNuBT@I4GL0lv-vyfcWl&FRbVfb{?ufs6iFk6J~{tKBwgQM_5T z|Lscmiw&@DzW#>TswFYHUEc)-nHEIjH=GCOxtW_uo4cptfKvnrCnr36oF5@iWX!mC zbRc-FMd6mcd*@DR?=J&Ms#VSDGvLM4+nY~Pm;D-pwAGf}{dO*7SxKx3MdE^J}*%e$8o zQMBeHeT@%G(jPU*Q!UR~=Gt+g@yby6%G}CQ+xrfVB-|f653S^PmE&5{-ojGvjAnDU zJ5+ZqS5@-FO07hQ3p9$Ag>4Hn8dve9_(9 zz)t(re0Fjy#V$#D=f=+xkv95O z@hH()*yGgxjt6aIKFN`#dE;T82yVanBqz@}wxuauBzMGJAte<4nJ#Z!1c{yFwlZVR3b!<8|Agc4 zCUfo&fAOAWqNo`k46RKnAf#heyJuDMcXtMrh=ms0^3V>OZlqC^_kLt(Pwo5H%}eA0 ziK_@It{_2eGhmCJkXBQh6yUqQz?oXD^gdxi8=fDW_85^rpHnJ}RAjDQm(BX_Qh_-o zo$h(YR|4FaIN$wxTnddcw*=XzX>*iEJuKctJ|`ho57#+KTF$IUT8SaIsdObM-ExGi6m34i6{>=*LT?mXjoY z&hs;`1V0z?U@un0gq@PLzTb$dvbR%{h`d^eTvHzrZE)oW^M1^Xcbkw+^7@cH(Qffo zN?JT-VTKe1Y?9|g!azV4{>xM!3Y&%ynaj5C%;CeqD{bCUMc9qAO>zdI$s}hL(YA<` z4o=p?Q4h|Y$P-=<>4AEC_uqC-?@A4)OkYlxZ>Zn~^14BBCB!X>)p9umZi~L)>ik{c zHZb9D<)m|8Cqn!D?`Mw(8=4bx3zK27-p-Q;WXy(~WT3M%sdP<9<4m0WL2=bNy$iuR zx-W>Ti`p~B@Ur&u_ouV)2D*Ec1_%X*&b<$j`n-iJbLxjkXNpCXA(_UxpW>Hc_^CoT zSlHt`92Dx9P=ozC{L9`dyLW#N#+Ny+atqv3MsO_*%Q>;lW&f;YIo}xwrhyV=nWf6o zRJxsBrg)N^K3CKyn_DvCtaN3xXJL3TP?S}7OmDWT9Z!&zjGDaUEym03(a+tUpn~Lq z2r~%J6_m;%(t6=iiW`}`e3~CE%AbGj`w7jrwEkQ;_dZ&Y^HMC)618GXOfyFK@y% zbab3Ik?Kknp^+I`=1@~rKPiVor{{R^L*D>~gL=uz=W(?*6-?qwNyT~kC z2-P`hvA;|o-kFs0NUm6NnwYwg^$tC|@R@9)SXx>AGeW!6X+%7tldW(z<)~%pd#b5*?a+1_3XAP2ywOD!B$b z_niwNT3;16|3qzTOOdsX-y1I593iel@bIe;%+Jw0hSqANR3WEU^UfbVqH+JYkWAud z!Pi^2YQ~@uWLQc6;iHLwdD^Z6%9Zv7wHFJ0$s>UusB#;H;Yr4Jty6e{Sy_Q>^McQ0x zg@^l&mOJdg!#2}!h2Ie1FBmtgslZK^F%D~Je}L1~rOXhFaEq5nxyLLi!3yop$TDT@ z)3gwIu^a*oljssD{z=5kvakq7jKEGzt3d?Zp6BTS%2i+ zxsT2(fcwGmFNldoIC7VlJ%}`=egIzzzC=hK3#FuDI?p(`h0?((l$??jfdbcaV3FJ2s8CY6v6SiFMbkNS?tD;j*>NS6SY%Vb3ja*r0PHblo z``-RwiV~LQf+tL|ZC<&e$g`jLfz0&VI52prn?m5kSo|oG5GSaWd49yvJ=T#?mLP(J zEGxdi-wB(5l;H7L0wkiHcuP{L&+3GhE$Cq=z`_l_{|BI*cp-xFi9_ARtP=++mL3&2 zZ-VZXC32M-rXH@hFe|&57PyfJ`ThZXs^#*-+$Xt0bpnMS=+a}Jy26Z4(@`@pXg|o3 zzcdR|eGvkSg>|x>h?2Jm62*VoB}38tRS8xBkcZyaL3nO7E=v`6IGL7LdM=zwyrhTB zYT^~Te%e+V*Qq*ev@=1`4rOAu7VQ}wZGMi{V~B<^`i%t$Tca?3 zWm5B1&IYi6BOv5gm_EM@JWZ3fJ5~neJ!bv^GH!^=dA0k!iO5PMZ}kV>VqO%xYFg*m zcB{L>H=BQ?daubiLi+vOct!(q zRpaPooXj^ve8?i9aEnxWFYC~uKA8IzSxqpo1y#fBn>wQq-HBdkgJJUr$&SH0L9<$Z@2B#*#0>4+kF;;$@+J!ewn1Ud(XaB79yu;P%yva7*-U z_UN^)Au{}oy!0yeHW^}Y!@+{70aZa_7I<5=>-ml|EHm9ym^E+6GuTg$*X1`cksJ zFWp#v+s!9xj+ukC$G#Hu2b}PIa6<*Sr6rGLcticXL~1^WG0xw8j`UsKwaDXD4JJR? zOs3M^ek%v;VoV2Gmf%WMD&zNhHZEu0Fi*r-!g&`-o$ut}yKoAW0FYt;-hVh-^k4!m zW-0pGbgB-NFV4;i+7W4KHc`!YJ5l$_v8YT*11bJ($`d(}a|D|+5;St?qy0?|FJ0dY z-4;$Y?Da!k3TY#dW3II)ij$E74gI3@^F8559V1)FHCR}pgCmjXi67$yZh!ZJDSW8T zg_|d5{i3Rfezl;I089vlBX~r*vzjKms1GFx*;hIGL^qXDkVRLg5f|~C;^VuuHGg}X zt3Y5MlNyFx2*Jk}cm@(&Vx)6a{wdk~y10b16%U#1Fj$ObTQhI4{C4;b!<=fel}W9= zLAbn+@(;S&>H&8=*0QaHWyhbdqSpzsVzKVY{so%968iY3$y0&iR;09_(4Lex<;#xlALw zVZ2uoqSS)5*ygca?VtUmoAkMfH>-q66GsgV88>hO?~)SS@f#jzo{Rsp6TB(KZpZh< zc3y>$rJ?;ZiIn7r))2w@ui%gHC`kO|?q&7r3I@f08>>r(CCLwdpJ)>W$IYLjF?OaA zF~dZP39i5YMlmuS|3{(xKLzvu>vPb5V86@PjNioP{sEfpU4eKt|T0 zpC8`eSWkWGqbjN5hDN@G<^2Oxnsu;V^*@8I#fXE3<;0%$S#HSjm&?W|hjvuL=d58v z5Wk(SAEegbE2=QFe8ZsqlUzN?QBTVK%TDRf!Y8f9eL-PwWQu-+poOArtfJyXme;fV zFaSWiSR{hWh~aW4e))Z$(LMO#<>cgoyfDZi7am5eL%VncJ-pEmDDHHGOM6R^uXd0f z1s=YNg;%Hi1Js!tf%dXW8Lw^qJR0s=c0obCxMki=GVQPV{>C5gn#zsn2-m4XOvDq) zh<6eg;Lu8hz9YDBoh<$X^z!`iR=?|qb<4NfxT~w{FL%qxabFj@(~APbJSrsy?tJ4q zo=pyMZSL?^8&iES_ijdu9N1a?ya@S;Nhq-Fo4O#}6V7)_(^wgtqhLYFf$U-Rgrst( zXSeAr+}U<=v(lCJ%8t}!eU=r8wXsx68`s|1JMguw(dq6RPtb_4yOJf=M2Z(=}3JYYN z4B8S}h><3CDT+4HJQ1?*ERJGdETKm}@cDl8I5Wt#6tqpmhSJ)*O)-5rY9 z-?BEnE>lKdiDh%SuZwm)>^;_&q}?>DJS+K}!O&`#8M@hvj8pjgbLW3FY$x~JQbh(7 zKP5y^3Hb=n(pFdQpSA3}w#7v8n|ETI8h(Ar$G8fxw>1-g&pl4`Ty&z_2*Yb;hXKd& z<65>W4D@7v(KsF*soe3#?MEDbGYyMSFwgSU0!d8}Wu3E&ngGu2Q=*tqm*ts#ccKny zWJwWpdfb{Zu(n(1K{MGkC0?%c?+pci{Q_vj@I8R@Ee5?>D=$n4OJoRHl76Qc{!WG< zKh0UDWAD~~Z{(jsEXyvVm|v>y^m~JjkQ{cDY-I;)6DYBCV`yy~$n>2*ZPzCVeV>@u z=FU#sQ1aE8id*42r`FZjOO`V#oJWsiQw9l~9% zFo~prW&fjvIHR*+H35~rxoqYw0Tyx%!EV`*ypabxZ3ZZaj_RS@(ACPG6qUkN!Qam*eGSdS(>8r5p)Nx-q-?crmV@>hYzxysgKsoL*c7Kv9tHbZmn@&TU^JaG{y&;gtQ*V=5?WHd-t zbnnxs=7Cx-OKKvgnE@p~)@@ctzlg+o%lSSp=Oh)3)-y}#&LQ9h6z6d5VTR?w&n;I7 zI~4gl2d@F;zPa{ON~2*D-WxV|-}v%M3} z?dJ`i#`aBh&=Tp%^diVta2V%|IvsIp7T6IGR?qW_e5KS>({F`pYbK$38^VBJwd})i zfuTI5HW3Y!)Nu%^OxCOXWZncywxG2x&+qYa^>YTuHtGAD4oDfST=~JHcmaafs(x*R zd6q7y+*^i$Dn8x93q=jrDoKkYBc&BYpkR6_HKEY>J?*pJQqwjK!!(F%MMoi+!3{W# zb<-MAgq4b%32eX#z2;LA()#?PU9<3zum|b@|07+!5{fN@#0JW&K9HIgdG*Lia$*a(;$?fC?jbSu>riPv7}m1U4CsE-gm%ELlb?c4YJ&uyIeCQepRxChi10dW z{r>^D4!u2^P_rqc{+PH3@iMb$6Z{CnD`q??em1Sw=M=LKKCC{=9c5v&Fj;W^op*!q8?@4R9*~- zB3|Cdd;*DlrM94D;e_k(KDD3d{P05B06|#piUicpv!lkC(k9PChaKF9e79e?pwdQ{ zlJkKi(9{;4KL8Rk`fn612QDIRpxbr4v=8M{>hwimkq)7}Jxh>AcU3VEL*S?b{|r_t z4J>8w`5MP|Vj7M36bwU_-GA+tSaeTe$o|!pC$HcR zF?H@}OD_707Wn0ZCpR|P-~{=Xc<#`BHrOn)UdX-ZL1KHdZtHpA@*wuNrM#;-7mh+G zGWrGrmaF~ZZ7!C#^0rb^5?4V#9bWP`m(5pS=#k<3GJR~s@QFWDvS%M#jg)Wr59j*K zEub0XYph?_bE4i91B>kQPe=nBEGh1?IVUi_<7k9P!L0KOpYgoSC80(cqKOsy<(Ep~ zsMJmd_v(vLk8&rHP)M)Hmt$Y^%n1SF7*h~u(Qgl8>P?TTfJ?Cf%=ZIOI?T79f&4a9 z4;}M2w6|5XBuNW@JDY<2Z@^9ZRYpTImaCG=1aCc{xjPR}VA|}>#i74G)c7uN;f09y zxRc`g^Gyt1ckJ9Z>0;Gu_*2;RCr8x&ZdPYj!y-|FiI_Sj1k?Wy)cMYR?)Je7B1{|t zQ8#F5Kw#ySOb6pdc=Ox^h5CE^c^HL^&lOVo!tM923MQW&8RsrnrWV^kW^H{d z+S4E^$;^2o zg#KZuuw46EJ6MyXy*~xd{ajUFzGAk@F>RY=jLH#Tbg#7UBhp}r-Qw$wN-0$zIciO@X$Ew zSsfVxlfrhMKi5dcSj#fUV2iS7X#8N!#faiQPYlv-ZIat7 zOJmsisrQ2B!l_kp^LJu1)5y)6w+4giCcLjn-*wCxkU{D!C?wAz91pYo;1bmA6z-m8 zDsU@V&EQuyOZ7cVa^GT}n}hBp1(4-5?Zbe7pQ{DPxW4rK(|t&ci67dN{^qyRJEGE3 z4c6~&;C3b0t->wwmYk%}7CYmHjUDkL(;*Yfb>97VSVaK%zx06r4>E9hw)2~KT~KpS zs=by}+hDS*v|%Jc0adJT*5cHr@=UG>77W0*-^l^HF-8SJIE>O~;wu$>iekie>2hz! zwKq2BH0KQC2RE1bsUpR$TyltDQoEC&17i(;3n)^?X6~|EZznGlV|zosditDDO2u$I zi35#Y#C|U*(jaiUlbGt}*(nGE0yZX}+d?YXVf2^ShUfeF*|B2a?~&(hurnEp6Q0Zt z#nbnpBDsgtr;xI2ekD%Z-l>#V`41H%iA=q~=vevZwY0Y)#2~2scJ2^*#%Umy4ZP$W z$;O%Q3Kfe#kVEZT(HO$+RmPOD+@GyQe}YS;ZAE*zPEmGfO!2hHb4X)a%j7|{i&_5x zX2T;q!PrCkc0?IZtu1hbGCV&*95MX+l{unDTuFrMYI2gReL5^I14`DPpx60xOK706 z18v4|)r^?-M=nSjv;KP?U0e~tj68EeWD!;6$XBeXR1xEcoQo@gkD1_pEm*pp2eWRa zpYCVL+Q(xX#Zo&`7^pg@grm8&xg~ZlIRTXo8*rn#=t*$jO8zzSHSwiv^SKIZYOBJE zg7QSIae*Pi#((6DAWWB4R5i&}+V{DOI9Mv^yW4}dG>%4*fNCmLaFJ7X48K#Vo?JbP z&lmKkb`qMV|{V}9}7gZ?;faueI z{c#)}jSqb~hvmX`a1EtshAP5p;?=brqRV$*>k~T?wXU+)L&EHfW^=uh9MH!i@%?MA zp3aBXA24ykZ*q*NkF`l2qTd{+JRPELES-vS$!bYbWZe^`eqs2!SCIMn(Bo0RLQ)om z;#@_F(?0zn3uca!5ssp1PGXu?A}>o!+o39On^gYOz}=djCo{uao(YA-Xn<=K9&u=& z;N$mXq@RSw@cm#WbKnU`U!x_5$KcU~JjX5T4#Bg0ucV!30_9U-+y{|9t{+z@f!eut znWr=!>(IxrtONey-&Qu1vzCZ6Q=_syHwP6q62;4>xP%-#ae~&+O4~My>3Iu13+jlT z(HjEq0bw0yF4Wfzy(D^V2HT_fAs#b}Ro(-h5tdsav-;_1Shbu3RFaA- zzuL*{#||QiM&vznB}c;Kb-q?bI`e7f^Ckf{TuBzr^(BA>t(enpWiI3o0mn<4>M`3X zh4`CS0n9DgKX#e&U>Zq-93vq$?w(p?Wusfhm@^tDjUZfp9u!8etE3-u75jcRD-GVI0Ag!S^iJlr zq|#@_FZ52poos&>@-En6xhx{i@{A6q%i@prYVC943s#L8Fp%`mNu;1K(2cu83}nFE zMlM_0?c9a<$9#U$8lE=Eb{E2-C~K3AhrQ0)U8-(KFwHAjrAS#VGB<7x3mWmySx|}W zQsv-73=i|o8Z%VsbDk(m^PI5trL($z8-3b|%;zK*1^u8_-!q;Ys926nSOyx`iC?1JC4L z0*3E+&bQt4h)3CU>R(HjDm#k?_g+L|M}e}Ia~(*xF*!BY$~S!Nnwui%1K=R*c^Y)) ztRX=RLEq~`zo^Gr7}o?k)bpw_m0u5ORd3w>nv8pk@%z&Lw7Rpw=opOtOOIqn7dHZ} zdYC89I7hQD@ci#V5%{xn;^Bcmmo)jBopMr>Yz5U+%icQXL;BVQ*-}8-nHqZSH)m&F ztZiS%DTAmjf9FdRR7=PUiEvtzYzE#kYn9l|I7|>OCv1CZftqP>=}=|uMdpn2;ZYVo zDYDpku-5-G)KBkn4|0dyk3uIzhVG(KMgt41`{fsPS3M6(2VePYv{SyN9mgjRnvuoN zH|Jz`rIiN>Rj7ZwR36ASmTrU5?fixkMsQ;QO?cV0@N@eJZnsqhtj?zmGKd3NCESWL zDG^A`pMrXg-an)l47tY?L?vU+(>WWicWv7DoSuo&_v7M!*H-<#UbImtyr?r1+_ZovP2 z$i$)GP5MW}i;a;yXf1J3bJnL~desn)<<7+irhsyTqxZ`ve3ZbZrTQd!z|!WM<|g#; zPEYe})w3kFZuonSOKCrx(aB{Bv>0wxDhQTX2B}(TPRA)V*%*Ei3N>feEjP;z>fu@$ zhA-ky2`%^(=o+|1p$j!0NZPo}Fnj_A9}W0OFiA~{k=`mtD`^jUNst3y%Epv)IjE&n zrQOzHdwH?{+RKamvX3fzqn8eVA49A5QX4`x?SVhh(Soi?v-s@nB8N<*iSBA`s378$DUS8dF zr;-EK|Kg6r|80HQghryc8g_%XFJG2M zy_4Kk(Wjn@vH-R7q5Yp@jK}#0wJ-#EN*Dfo2?_mnMOV(TjdnaMtBJ!jZq0MVl2NsdRp-hc)GPx&H?AA%^f5?U0+_Et z)ZE*A_+*(aABWY0bzP;4R+7f>qjvZQ741=ZY6kR}5^~r2%>v7t8RmEr-4JMq&yutl z;l^;A?;9ehzCsMy8H> zlHLTWBNcu<8~+{r2$?GLz-*u4Y270dy58Wq`P`%bgFaf-o2$P0WTnD;^~*}H5S4Pz z=u(yl)vn*xArm77W7QP)8qR+X8~zWE;r;`;TpsK9Caz6&4uX`&dcBGBQJsTRI(d%W z%nhrr1C_5t{hd2E{!3;<^@z&w>PB!BdBHai$#@tNTbBKAkxh8$8}p4AY3QMg$v}}E zdR1PgUzurY^H~0ouK-6t}PC4jlYHqm*Vj5b|Qq`>cUy203-+ zrQU72)t5w(jjvs5seEk*7OGBU8PFMEl43yoR#&_rL*JU#&{llJjeh1U=Q97zGg6ZV zm%%#B%MH*4;(QvRCWX4K&UTV&_4Zz!?82@=Tc>U4KVqSCd*P=IYdIo+{h1*S{_?*c zG6$C{liXjCq~>`tiGL}Wp#=faUC@PSk(j^i`e~Q)oqhBvJzA%qxm6VLk8bj=ug#^_ zx5a1DHeuKGF#%2~`<`-ViiJ|4zG%ATpJ+F6*>^I}!|L-{O}||F1A9jm8uf6v;l3n6 zKP}~f39dWO%Z(p>Cpn}6+S=*wCkRrm^5~V^0B9{o&(GJ5I>k8#}En^jFFb z0+#f+h_Ck2&ZT#jhK(Hl>9Ut${Et!KF9({+16BTk3ctfCL1;t&P`_UNw~}xxVK0Hf zAid4MHP0HgCV#_^n$L)Na*I~&NK>wj?I$hTn%!tAmqfbImQXQVxxK~O4X6qNz<8Mj zvP@Lb=2>={$@;wJeRxBkGifO@qk?nzFZ7`8gYV?RV}CA7t4HaWMyyse@iV}8=`{!_ z#@gaOPCvO%7#z5PSkleHOaIpVzjfV@PtC&_bfkN39`#SX;}&(Spf`Zj@PrAdOm z3Qkdzf;wfBv-d3LC7!4TpvQQ$n4c!}=6p+L8ziy$eN|woN(FoLBJFt~RQ)A_R($40IL(*B@MWkko_AL_KB18toN1rVyrop9bgWn3W1G{&~TraiZw#CPn?VC;9t zrVCog7yeSNHDN36pM2br@Y4bI@=hvI1y=y|xsuYaX}dt5Bs?p_a{lcKA5JF>a>Sbf zYD#Og#rr{997hKy^S4{hZ#nEcW2OnPs9zX}k%l8Ud`Mi~AraN_xo+{U*yu~HaQ18I zR&@oiahDTOMFUHKxFm!cF}oMrH!q+>*H$aYu`@xMa<)%INd(c``R9wDUm0>;t+}(E zGNKfn=^IQW$jEY)8gW>TSQ+_+s9J1}<9{Rb$Cew}$K+viVyGZZr( zf9fQT24++gu}$oN8^i~!bzk=@&>lw4r&_}t@qen`B}kitF5F*r&siA+`m3cTV3~3u zu<@fPg`H(fbCWJ&3;cwXL}DVW&wgO!$%j(*agCx`K|YTjq9zuR`+h1a=e$ovF`Y0U z?~&M~KX{N|Si3$yxSOWa_7W0)bAjnIIFWCi6?^({K=DweG|Mx>x@6Y;@z3qCRtr~5 zYi@%F`%LYrecw_9O%5S<&E%CfP*qRC8~Bh&fSx73Hdyz-#W`V9@wM$e}K zA$joR>K4XpP16v}O~bEGij-4|kV-s9Gjw`}m%bJJ1AJiFczR6#2l&bZb9SMK z%^y=;qP<=S-n*cR(LTyYXuq%yfsK=4b|WU+3B{^#=`)VihobE^Bxvl$e9#9YyHVc| z`Z~$oH4gv%U71Bu|RK21x?P{1qfL zSRqw4!mf+JSvf1^DllA1gg4~au&16Ni>Q$tb)zJ8TB+ONcIQAs$hm3(I!MN*VY@_e z&F4dWnAD!vTqy~J>eTP6VLP?jyMLIx*#yG##E1(h~M1& zMfa0`C`kK2BcD?V%O+_McrQcYZNf1(n-{hJg}fU-ArJ;jxcf~QHNX%jv-`!+EjzO& z23|Sc;ls|l#pabA-Efjrr-G|o@YGfDcC&uwAH4Hji`eFsQ$=_ZcZj_6d%9YT?N{h$l~!tjJ=M$;&c@@ z5sYyBVgo$~k6_K=_Gh~E&%dD4R!xKps=_V5TIk5{w-Z(-yvV0z*>MsZ(w%9c6?t?G z*1|jPz-eNuuT9KyMH4N7cgnvnlycUWH>C>Ihj(mN;)^HjUla7j@wrNOp+gK}dV^hQ z_S&nio)o2DkVz_2^`6PqW1Bb*G{bv`S|aytIO@c?%gcWmCsYq&&vod7uAa2k>Tn0) zFr?KVG>;rlhh?2+ys9zlYxrMcoe3v(*@}3asWNe{Q89dWDgsC@IqBlYRM7JMUnkwb zoxYZ-Y`=bid8S6+)8RhAq{ax@WXfHr_w6u(%Y;wHb&z`EjqnH9kcQt^Z1~=EHVRpe z<+|^l?*EVA<^SuwLI3_AHg&l%PS7y@&g=3r>}b>Gp4As&zrFxk<|P2!IA0L!h5eF) z=|HO|(6uuxOLtgO?BrLKEIh{gE(G*2^!AnMfA4L>-Z(GJp$#qj>U@hl#g#WdtTOfI z(R5b^?3202Ur@R!GP}f$m0y@<+TkyKx1EVz48WhZputVHc{NtoA8VT)n`1Tb3?mT^ zM9TJMJhHXBLA!4PvqrqFgz>h{`;`|T_Wl9V%2s?gO(3(Bl$R4lk(~r9{vNGi5Ag$n zp>&D7zY90N%*mKLe?-Ej#)8e|%%iRO7*#X(!fv#VUpBr4M)$H8j4@vt%tll`f1D{; zxL5?YG%qyxWlQw?ehd@En7)|0Xjd)+c-a2Va{335I@WOEjqApiJ8s9_stR8o>WhE> z0O><_t1zzDhKlBu1%H}p)~S9*rDD#Cfiq(8DR@Zkzm~3eQGz0X*pkW>tPX_?M^ME$ z^_MS?C0xoiidD~07h=3^(Mn+Rs%02%&(ucGAlu3%Bwe zxv-XekYiQ2eZy1TAqUBtu&}i@ev3KXqk0o}rv8#chl7t~S-US!>nI0xFnox>06@oDmJHM+U5~lz!Q#VH+j^8VkP%gX^wr9anruHoV#HXdG?~ur^ zl83|v5p8J1m2e{1R?3WvdE(bRh}~8&CdbMyL1B#=suU>BKuz#dPaWJLo|HD2uTL*= z@lA?k{?$Fzp@MTp_x`Pp_PS&r?N}qbr8Mn|5D{&2(52(v?^#8_$pD}qBADxIr%2`w zL(@M-@+dA7f3USqG}nP4pa=L1mt}DaPit5{lf@wdDQTevIp0Z4nSi{j% zribu-q?#Fh^bktec`12QMV%%Ml1kHJJ9U-aO*WPd9eFH2cJSdZ_l<9Zx7LRemQ}5y zR;@a!dH%IvL|(4R%(3N?O|m~cnhoO>_L)z~UcrkoOCeWLvRm<@miuqoVF5e?er97i z!hi@(k`^jny7@F*vYxMKd_cu2x*t9R^1c}sqF-c0J8hv2_FZ)C|6=VegW~GLZBG(h z0>PaoNN^8sAp|ElH16KGy9NRT8g~osH12MVLvVL@cX+4YId|rqxl(tgYCd#zb@l$% zUHkv+^{n+OYm}+QmE)X{jMb&dZ6EFp2EgD9xAEoX&)*-3gj|Lo zK6NvFC?%ASYH+YAd$(lgqqFj9tlDEM$8yA@jUB5;>b0E7L6x$H#}Xzq$EudOlbu|` zI;(y!#@e|D#_i|oHaikYT(n9nXrFo%oJeW=u&T4xrahD9t^@e-bCjB|c*NN_!a+&W zS#oKAPxwcuq3V1EPXd{$3EQHgP%Mjdta|dHjdI`fir94(tta> zS0S?5+?SP*P@&EKovENw^}##QL_d3v&OY3SPR*nbQu-~;TCQ5u>GHotRCK=Mg(~s$ z_nHW!jQK^&^5(JK2Ldwv96@<1Q;~NqKXRogLdw4oEQJg5oEe4fA@7-yM$-hIdE)Pw z#`h#`#lT2)L^iKB$B{rni1(ehdv5c!3hlGPUk5=8@aRR|5n8meOIQkH8~ohp%EQv z1`^TXGmQRNxRn9sl({W(76@M$o8|nCS20PZjw&yoL;GCgq`(Qv$?93U`r_HW4o(br zwP#Pnr!=XJ;hI5POXoWp*cB|YbYl6d{SkR|g)G~;zr#HZFA86h)a`5ci(-MT!8_*i zZ>Qps#ry+IJiCnG}(N4zPkssI;sp4COsj5RvCP zeB&m6ypN~E@G3!moSzg$uw4@7xuM9igL^q1p2&WtmYTdsb)x_LCyu*Z5eN2%e}DhhJhy82koP9>ynZiQ0TO?Ry_I4WUPf2% z-1U#$C?W%2c!vXExMJWRIpb4ZX-w1=yXPhX5e=Gi_c7sp0#rtKDp*ysTB9Q;Y1gqoKkRZslX z!9Ql>ug%OGz-ntUW;>hdm5fmp`-$aOK|WBp`vA5JHVdb~8+`m)dJhV`W*?Fc^rrx7G;=M`mns|B~L8Gsf zPF@*`=uQg!&6+vdkFP#sA1tvwXFajQ*_zS-OAneE&0s6J797yaob{W5DDF4e!my9b z1zGgP@W(mYtBPSBb7RmP0`@q3DBo%D{1xQ4=aM6|-HR@`gVLIWq%hSf${n5$1$m5; zuIlzl+2fw?*eGL3`bmXP(hVFuGd(_IsCFZC{Uema*OqKCj{FE5Wf7tA9Ac(oOv!B4(|Sk!aW)op^ubboVVkc4-Cu~jIDEwFOu&d+i! zPbYGRi2ZF-XjEaTKOdon@oqkNI%kQP4o+{T|JZQ>Nj+x?RELw0c_|>CU z!Kz6uuR!%>c3I%nqHoDB1Uv4u_A_TOKZ`!%78cQwK4n4~;4_?4HN*C$@Nq^WLGB@?!I9%7b8eq7c1;eJ95!P(6;%5!9)}hEEMRfHfx+5_Q&9)*V@%e^N zG>%2{oYoc-%&>h9hx$c&P1O_=OMX^n2Qxs16nB2d#!>&hQ__BIZeY%bsvy;C0t`Ol z!ygV4Y@>ei*bzBM`!xC4Y=(XpW!^hWBwtnI-e)zz$`%w8!(|VmPio@6VZazelUzFN zX2UgEn4U;wzJG{Kf%-t$Hr=QN$XLWr2RH2kX==~?-Kp9#UB74Xah2WXvNh)gR3?Kx zpcZ_y*QOztUDNuG-4IH5tRrCFPTNY1q=_9Q3`*)iFIUw0kX|=qec}~b+tlR@{#`2)k@zn_8CDjE|2a6R!DZEM`-h;^1oqLdCk@*7RghmH>Q;Hy z`1`WDlNMEr>pE+FG$Db5<6MJ~3=sw&foz4W>;5c?M(O4$Su#}%J(w8_z#>; z<>({t=6tm63w80*vh_7jNzMwH)qIFpTwfJpPC|C7V+^}5)5){oi1)EebAv0aCajDB zf$WuO-ykd-L9*E16@W5?Uzfb^x|+X!dCr9y$q`g>!v_($XbgZ=m|>xRpA#?}tJBEl zP)ib{Lb5yJqrTp1({xr^Bt?%|j^E#f&rmi*_bO|P zzWg9mkC4VQ%xq#@SE#@FMfDlEa_Ovxu=m@cXRvyJ# z+x*mzsHkXh?Z2&dDjLn#y5g$niXX(`LrYG{xtWG+Jp~utWtX#4)3RTFo{JuD8N6c5 z*Z9Cx9`$1XE~bH7(MQxZ|M}6*Ct`@{gZNvS(m!wkz#Al|z=xq_=f8b(Aea#-Z z7E6HVSQWI4uba1!5N|+jHkOZ^lMkr0_QSF{(M#EQn#ca9TpP}dbw$}5gK(9Q%Z`e` zf8bbPYPXQ*&l*oY{sAY#=<@Hu8sWwxbt&e^yb*ns97**t!vZsl(%|ozBI1@+P*td%kP9v;n;7w zs#VZMlce!>VWSbH*D+PE$E|eaM#y8VoWRxhH}r`Bp8H4+OG4(GM(e^}yZTPfZ)W2{pj55ed!*4xKS-zvzxVFE)G9Hv^p%}_NdnIl_I4K!i){qFjzyefdS+8 z3i9MIS4j!Shj10{7;{Cm3%Q4zH=;9DC0u!wQ_a?~%BjeWfYQT)d+@4+PHi^4Uo{j+kWEjxz@Q6RHQ)c?(q(=3_z4JU84_oN;$9Fky6ygOVJ)c z`xY6i9;YHgxoR>y z^7uIC<&q=r^P zz*poTDFYZt2AS?8*N-?HSjOaTDua3OlK;94X!}}>Gs(b2xq2Qk>G4)BBB-!jj!;a~u6kiv@3=oO^l+%u%^jlW&IzCn+E_TwI zzzubp&zf-!s}y^hyBXOn_m(LBLPw7Iehcqm#66#J6g4X$g8Ll=^`TH)UkiIO&!0o< zix&~y2=&nO-*_>oHC(yY$=Xu{mbI#?wzlrNE^R@=51}bC8cTF8fFVDmRD{fL{A!I^ z8xmzO6xC%`r(p1gu(&NeAOK8G-%pWm0GvA2p3a{Mk0$?eTr7~lpkZz4_`T)_GBR8Qy5gQfvH!UqF%@Vu~oU6J*V|jYDs9%71#Qn5A^jfC3*Q zLtXDCeU)3z$qnWoElcJXavQS@q9W!azQr5N4Foz2Nb#3wJAPD=bjRe-Y`zgV?51_{ zXyn|zJdJ*9w_LP#*S0C0Bp+5}^hB!3NzAYyDid@!ZaSu&InU#%msE^=F)Gm;xkER$ z-7p&o5%ri)LlQpDx#SG#Ft`V0K-|$KzM=NpnQ1ts--nD5kI~}K!Ju>W=>I2~njQg` zcvl2jIeT+1FAl&uRq(~=j`e3Z4StoF`z=?Trs?{`V*b9pZR(72LS}TQ4Vj0IB=b;rwVL1NoQ0vL(o@WItBC9$--Ar)lWlaWe-h7j}99Zo^M$xmKja@1X_QFnUB89GFC zlwITA_RHmVZ*XG~D)8%alujKF6$tN@vEL7?``u}Y9iKTX5h0vi2PO?T z-W1R7WGzI~$~neq!b9z>r*yb5-F?vsd4n{bg?3_qBF66~s*5+I&5XmyY{{WSPqW_Y z;fa|kNIQ%%Fc!tJO2vM1>zFtZd{jk7_C=}VB;M><@34j-vK_lMA?I>uQa9W^%PS)f zm+nxL%$>*JppC`n#J)J4@+e9tOkT!QI*N|EQ`eI8_+~GLd7K6osFZ=&A?k)ZvGC~b{($Et&Ru-| zq5{YdQ_VWOTy}}IEhZ)N=`by8ubEU~iHyYaSDz8s6EVr1;!n||daMUTkF|p6)Blvv z3{X&Dd=a-rgVudpAvC{V*U103RCWqGjeMv@Ekp<&;y)>pS(jC1jNP~M;rC0_>v+DY zuf0fqUE`ds{Rd9?AGkjdVSh%9pq)4m4A9LBx?O@W;f|kpia&`TGtN4ADE>U;n^; zP3oWv%YI?qBzdbB>7<^f@2B1<9h$8`>~A(BcG)jdUaK!b!C?2kG_d`hUpnQouDWA6 zcju&19luE$f|Jg$OwAvqv($Qc7B}P;#4{AzwSo)_cj-O3)znD6u%bMX+|qZohnq*a z<3Bds90`NADIXaHe50KTOJrQxXq#KWT%3>Am`bKWz4Ktk)-PjUhQ3H&GAuQb?USPx zSXCy@krMreL)xDhMlD%Fl{aVU�z}kBy(lpECAp?2$3?2Ce?C@=QTLjJV^{=f7xK z=)a2^#182G)Wm-WF@ccc&LCmd?GY0dg6p34!?ODQKPllf54~8IM^b;Aptxv@1)3s8 zY~^tf+97WOalb@X)8OEzCn7j-q~(;YFY0L3?~bi6=0%#NQ#y#$Pk2l~mymxNISR#& zx&1k9oS*@(&0f+)`hBuqW6IKKdhJv2(=-vKr?@TuMwC1RZ| z3yOf5VKN8tL2qgD7S`4>qb*4B8Fg}!vY8(djy5q+@SAAp1Xay!Gb1hHhx(S~GLE_) zI>C0z7}`Uha5!iK%&A2o+E0Lu@bN?+Z_gKC`1iOz3d9I1tV?091&&CCBTD82~h!FUUZDu2bvOc$H zp8TQ09Z`5h5v70?wEuh~JkTdy78+)=yN;#F_<4)Qp&tt}vo%BO29fkdu;WX@;e#wI zb=_PIJ~pe?DcZ9R(i`O#-loh*DDgz99^|&Umz)o1TjR|M0dk5|Npqe^a7et4tFIKp zw-q0x*@z#Omc$!>?1#o=D~?*QZ}TZn7%8HwZb$SYGxUcKRdIE43m!$o^XNL;&UyK z8Yxm!_Hs!Ep@;E}2Zi+=wR6N4i8hC|$KoX`O#GDC+pPkyl*YhG@oDriuo{iw?7fFc z)c4_GEtCOaP}dtHg3bu#FKQFuWO6lkA9jbr1}CRC@R!H6_^j)yz2fv2Z!Nn@8Wj$b z9X9GDsMxvYOy9Q*a#`?@tKbD5okpAG^pByFxaKXr<5?lZi}-#DfhLOYlEWi>ppt}; z7i6y|w#|u?>!L$Bf|6~j#(Ak&Yt$>%qP4+CSbmY|aDm0PykfhdF8ou(r=QYY>dGgG zekOfFuJ~R=STE==(W`tWz5HtRKC@T1$~rF-!WVQsqUattv`NG79R1XgTVu=?qQ(CD zqurRx^H^<;GyIGLU9BiER#N4aX#eQsb-t7TjFUt{F&?QRS5@?FSW(+0%y!QSxC?y@S80CWPmo6{GlnP zxmNgD62^+@JrnRwH!<@@&?INyj~*r-ZoZv5<4<{Hz?Lykz{xznft-}Kqg*vEP6T}g zMo$EzrR`pG{7|Df&97W=VM3bh2~KM@mdz6^VBleVt{v;a@lBd^!I10;XtkLQCqnS7 zN$Q0k2J@rkmw-}EDU|`qk@V!hrrMa@Ebc68XL>iD2FE7bc#9r9-Io4bqx8x;X~Nd? zcDe+XFoH%eb@6TPm5hlbqofQ``6h4j=jD21GM@q@2@h4L>~^}61Beq(Sbvsfw<)|x zDvdJv%yga{gJz<~hrsxV)NfGS<~QtCgjw`uZ>`fD(oC)K6ZipoWer4c{hfyULUo6J zw~d&wjBFz?V?kR%;d`m2s8-6GyUzvBfcTHUodN`qY)HZ@wqrF37b)Qw=%NQ2{!*hV zC;*$D8BBwuRAE#x&Pq;ElElpw!bTKi$`njWF~sFT&iGAgrv*2qrxnMUwLB@+$q6?fB)l_hB~_U%*NSESh4ns8EcIf>{&-9D%r#tBv% zA34@qZuo*2T37Mn1y_;#ap0qF($_asDM&yA0I60MnAU3aP>_|W@0M+p<57m&I4hFMuhpS;v_ zR2Y^2bsl~$DDx}`nUhW_W21~Xb!qpJEkxf#>oeq)e+301MDUVq^=;&6X-nB+*c|%M zO=$b~wvJgSHMh3Kblco>*0`>WG`*>xlCTCE<@6X;|7hM!Z}kP)TapW^%-4rQ=O7d^ z?ScS~WK$0tvP@aW`>YcIm>}6Zt-9=X00X9fgh3Om$|7%?DBr#{F;GXtl)N(^D>$s2>%hd1FOZP>6SlzXzbXK)_{EIIr2B;R>X z64bGm5T6@+jq{K??*acNl?ngWL+}sW6A`4k(+cwV(dZwzstnO5g!;>g-Yd~f{z**# zJ!;$a*J=aXx5<{z)s-H2{dr3=p0xER?l?5!&S9}M7+tP{PF&#Giwx-*=Qir3--@Hp z@}aDr3XB{pFSi9H6rZCanm+wfl0>|a7<#g#J0V{4<(d$ zogvl3%tdidLIXw_-VwKHSAYLqpZDV&scGse5MY4!(ANB2a zA;mpzD9T#L^1(Y%Rm|-V637FDAW;H8~wrOGZz3sEvI% zRc9`PJu5|Q6gXxf@E2TTUUx9;4FGr~y?E9P-Jgy)nLF=oG2KI_{}l9%UvnEBx~m_*H7lFVA^@zuQoh(< z(yy>?YxH&%K@za5iu?KeVO;QSx3lM0eQSg0<>?pG+s{1P*ZB8GE6aLpOGe!)UbiJU zYdwZ*E;5wV0L~x=sVKUFgEv~((a4o!VSRokwO^dXuC*IIHeq+uu>Ac4%_WM1BATED zxc&HKOs7j9Z)VwIk$|&Y$AwbT$uFoax-T2`+$h333L`xw*C34n1&2VG$0Ya(SRc9- z$wM_Pmzd22-Vho=M^+YbX{F${!+ttve0YA~K76trs|=wS#8xjF{R-WBaFCw|R0ek1g=qZ}Y?#WiJ} zvwA_0{YOn3Bp~H}%cUNkvy|hEy~SGwvAc+^OzqWxG8!W%AC7h`ou_JbRX{Apk6TEiR zOs*(MH7Fy(gTDaaq27P&90UzGD)p~B^DVI7*GzU>zGkp*^Fmvll_trAIr0lDvd61; z7kOre9u@rRUs;mZk0xRF#25SY;x<}M?FUy9I*{)+s$U9i^x%c3pZzP^1jhhG!;Z>|?KRok~^ zB*`$8rqVm?O;+^lg`)&?eHw`NtKXaFsmi|s=&-F71Dh~@7P zy3s87qSKoqk-HBqM;`2so2b6+NnXmI3#P0QBf{KAYT72XwpkYc3@U5V`iMm8PQK!( z*dVh12P@`$ELWb#s4{=I${_uNxw#0;g} z0akt_JBGO%J~2jWik8D!ZWm5$CpJOm2+SLR#(ib~ida2C@oJTCo_|6!JvizvSvhvE z2@bv}tv-n}B>pWE?Ao;Oj@hHviNEY>z;=2}g%?xOI=vxSnL#%MwTb%d z=6x}vq|e}6vzQSST%4Dp*<6Q1*dzjz|mBbRr!R!ukJ2MB$K z+bMIS!2s^&&p#!uZqBft?WY~4duHyK$Gcaa>~mi)g1xHh_3+7+Zx?yd+guUDGETBa zCSRv?LYsbmRVp?{ouEl;rvX!eoGA$H;I}%t{BZqdNU}p%s3zNtA^sNjvEX)Qb+a4|882=xQz+6RNXv^_3QHe`oLFV%i5>5iQ!ect-u@Yd^QKG ziuTI3o@#6kvE`AY*jn5t=Q4ovp?v-0NBi)+MUV<3WY>BN7RDBU`P`}^xS6GCTTP}q zUSg9pdvyBnm0PNJ=5s=!Q~hGY**~zDxQ0Q77DO8L>&Cu4eA+0JSP47|SW_j%W=8c+ z?V7RoojpZK3}3a^3IjDZrwQoZ$e?dgvhc%Hb2N!IdnkzUxe2 z{vESDS$gPL0r;!#N5)Z8L+R%3LtWK^QLiQ&lRu};M^;o#!`F(HgbFj}4Jpj2mYU94 zf1ZJQ_MZ4KEZexCdnM|$qs5fbrDJK`WU~ahW%>BO*hRQMFaz0S*Mk)F0J80CHKFPx z^YZHfNOHbg4SfD-5;*SP(+U6-+mjrVf;pVC2Z59mcBGiS%nohtkQs+$wN1Z~D^c?P z<)!h}^*zr9h~3=M&+4iH1*yq7TNi!mkg%%+V6<}b>P3r|Rm5c;$1!rD8t?L-}h5x7Q&JR$=}=xCl}j@Qr%8uymB={PS-Y;GMUo zG?7pF5nl{e*LJ7uLpHP%tqr(A_$*lfm#yf|K)!?QVlnM-^z#nE^-yaK{o(gk^t+XC z7oE(zSBfd83AKvLAW2G>5@DBb4cE6b;^Pox;y{#R03` zrxKlc+^J zyRyp!{8^GBo2{2=easuHJinZyagGV_%r{XxVcq*m!*W`BvTxVw?Qw5U;>=eWwG_8H)?ot^)<(o# zrA4{Ck)G;{GC8S4gGAVe;6&7>80zxhy$lE!R`XXQAD4c|c+Js7aXICUP*++*Dxd{7 zoHPrX#qu$V!c3xt5;ijQ30eH`kai`HuX@W>-oYxd4$~^t`}Xy|z-PYlGEnIVK%8+u zS^|>DbhN(MFI_iFix3bO%Cy)7{W5y#v{&d$$6T%Ox1Wm@9Zrl7;`|^%UaoC>wY*Q^ z&QEj;$+GK#3Mfc^lt%x^74d|-$zWdH`B{-$=zO(--R(Kz>N@*ghOnoebaW%x;*p|L z0uR`qT@AzPe*NFS)Kn+%>wXB0Vt+JCxp|)k{EMxU`_nTe8MhyVzM{}g9VN#_5rFoH zzSY_1*(O2uBpG=nW%bd9le`dkq9Y6kgpco;xd={360kC8)2QIn}fg{fB!i zckgqs&0%vo<*V@V(UnhL!Q`R$w%Q;J zQeYzAF$vg+PWp;f$q!%Pu=jw7Do{6x^}n2Xr~kW( z;Qv55l^6xdg(Em_JKh#IA0Yj3)&5bsoIBwXNZgDNANv5L;KeEK9X;*Z*wxRKvn$ZF zVYNj8uwG9-0j28F4I_d6hbpqU4kRK{CgMM(-3Z=2oEtEx;xb03>112TAu43p!ha#! zKs;*+{wt+D$`kJBWNwP6xr5<^Xp8!-JbhAH+$4S(|6M|Et4?3Da$LdSD@b$Pj*i<) z`9OWSJ#AL0wD?gIli|>MvdKM_qb{nVi~q%1IxJPi&7LMpHkl7EnQJ-fue|yLks&Xp z^(LJ{%bd)9Atf?z@XIuum1kRgmua3Y{lbQp)Z68wP-&5R?jNwq(LUPn2gbYNc-6w=J@EyQuIHlwLBVQemxsE*^CW2)gOSMfMG13WR0^$G_cI zcz|;(oC|b?BfH%t-EQ~cbD-_M)EIZM-y0fPm_4?=qdR=VDoy&e@v|V-hJhwJb;KG= zydROBkU$d?2lE3)daN<6()Ufg#394i$PqQ|It*O<``y0do`)K65#Q;?7`t9qE~KuD zcN;x+J$8i#q+d$&>cSS5P;TMVUb;Uwisl&&X`neQ{Eb?vl3ws>mpRvVOTJ!iSV(6% z)!j&Jd}$^wGg|x9t`iS6&@RwdUKp<=$Vl#}^g_%I{}gYdfVsWSJ>AVIE2aT!KqilR z3EeNQDARdv1!_?`$(+t}ugc*w0jN_PZbg8M0$ky<1_JF*B5>D4SbcL0V9D52Odbm1 z51_U3IQ`-2;P!>;)1lJ8e>9e<4Jib!$NW{zYGFI{xMTcBK$q4ms>&$Sbk>oR!EYuh zyyUN3JOhV8AUV?*%CF+=#Rav|yQVbwhrud;{4joHSJ)~SRF9w}q2ar(y))$GP$>^cot9kZw($yxdLL( ziT&Ki57AuIJ<W5t{Djdz;)3Y^*Rp)TVu_=j=5iHuP#%4b-my1TO2&g!Er7f*Lc( zIPP9R(=0B>EIfz}RgqFS+eAl>=X$POW+bv7wUedb`2{CTLjDR-+vk1%*%PN1fG$0f zoxVKPYP?4j=)<&O`cWtTgwaa;9D#xnA3?#)xv2^Yk-ga0SEuuA&B_#@7}LVP3lB@F zGC7T*Pw63abX5_wNHe{{Xb#)BhvL)B{qk{=z~NxJHnq6SX$8v;nAVKY#BL*rtVhXl z>J0>ajm>ohjwA0_ol(AS2JFX)M4c+v6?&^hbvtA4VAfaEX>cq%{fwlo@(w4K-wdFn z=zUa+lVj?tY4J4T-`UID>qn(Gq$aXq9{|a2#(Dex!IYBtGQ@-u_Z_Uc>VC?PyjGj{ z^W!}sMk#6+1}Y#?yCK{$l#lXq!i!){r(_L*SpAL4;|tEb3C!1QIEZJGqa>l*oLFEc zNfQ9acV@kwNL!aBqa1`v^Oi8vsCD|Yd$=YOW^{z8<7I8tGO|puU*)Op@_xvAD6!3t z!%F>%K!HlUm9QB0Rgue)CK1hRwm%Z35wN-Q%5V?@vi5}bWg{M}W4i$Vx?a&mXpGvF z)t~MF27cb3-oWtr>d%xOwelJ*7k4>Gd8E}0=nP;9`3rR1x(r{Y8#}n#q3OuI3T9k= z-QONFjg<8F5MJ^`BS*HSIqk}#)&vW_QK~x+ch`lN==eL!hXF|a7tIw4v1fS`>Pzm! zERxqKAB!?7p3ai)d`ZVWwgd~?qDnL#PNqAy?+-?{&t(-xjXrtza?8Tum6tEnxujBc z5sTCq1>*0n**bhd8<$ffCHz91KC6J#r$QNwwYp6m(h(vzojM=Vi=qwQUvKyeqb@{6 zoh{AOzAx2&mjTdWGzfYO3e3{8K3N4Mk9}-5~k|T`K#_nA;ou#r_@Qv$$<`tu(KsFV`|R^rMRW5UDH{fT0^8 z`$~{Rm1%8V!0R&6f0-hi;x(SAZXT|VP;IW{uPn7-D=!AzH9J~_tpg_(DQPk95eEk} zcf8fEB}mBg0V$C&{@QlhXkQ*vGZ8B-)K>$LoF}v!T|Wi(xKTsewK($Wn3mh6IB7Pd z&w#dy+#`-vV?8Qh&23W0h3A>0Z;ZWlM&7d;V?B)BpV2Zx2RsW=&SLg}u&Qj5xow>z z{n&gBy~tMIW0heomI9n-r~1gt@>XN>oh$yEl046t&1K=H@?C_n92F(^--5?KzA?m{ z_t^;IYEuSJwQcPTUThKtXEj&63Idm6yBSuH-Wh`;Tnxlly3J8Lw4tgJl5zRL^kNiH zZdxOg`N(+Sze3W05Ah}1C$=cp5a*C*MbSj3XJyr0xRAr!%3n}LYL9hINztnd|3qgD zukQd5jws2i9LjPL`W;L&y<>fwb3cP&dZ~5mpY`I|0YXwRvX`>Hk1*3hzS?rvMx=3D z=^OTkmEvIZrfnF2Ca`?I#bcq^l>sIapJaTJhYG147L}AF^~!f&;{Kc-L{1VyJMp2l zOIo6mp%*NC7k1w7pxx?Q`9#V+;)H~Jte=I07m8xYnx*S4|I;x>_;{z9YrkK{MXFhW z^$XQ=ybhE^*3^JgJ|o1a=aoZQzoOOIk(h=IjD3lTHEW7xUSJpw_ZDb1g&z2eZSxYUC5a#FVA1X zTcEFA?VjApZEEU^7Y^_iyVBCit?&^J&$(r(c{YE>ffan|BOt1%!DnfDIAs2EoqEGA zTeU9g-2;&1o=JTycDHE%QrY4XQ(Y9>s7Lqbl(?xqwnY-Q%rL=bqSC2y|LYlm4aos3 z)o74NmvO0_{rd}r>BFz1T*Q3a6ssWE{JF-)Bs*&+{C&9TZUD45m=pSqT4Mww4s;Y9 z5L>RTQPdoOWQGtAT^&Q>iw?NG)6y~+V&04%g`)vZW}}Me@tk5I$Z`rZ4Sd%8OKqC| zYa@nZx)c928_jsx!k04D42zx;Vqf8z3VkjU;FBd7&3*N!o^Mrwc^Q${uT%J^TC^^4$mc(W1oQgVW#Cn+M<_rdDMaWc!V1UTt_mvq7B+IOfmWP>U3bTnk~55R??MQuRdWH=LZ(iM{&iZPBy_Z~0#OvLO% zU5#kuo`r_;F62YMXF@EphR^HYV|{gP_QaWb!NVDcxCD2pbGF{-Fag58Z1iQ1Y>8B~ zQ4VIYKuc37eQz=KbeR--?OYc=goS7)kfW z?X6y0&XR`Xhl`vd@i<|~L?x4~x4Vrl)!Tz(_cq|0qZXl#QB&wbUEJfS3^QE!Q^ZMd zr|M{*)MEaVjiMuYK_AOrWCtggX0BSz&y<(`sOBXR`)T5`Mx4RO;o#lpw(G9q%F`6v ztWgG>)4gwwdI{PpvWntKZ2;P>_N|bHT7m-p-#B*g2rZ{X_|`J8yUnIMPOrTU=41JB zYuL?Vq=E@)D9~<;>&#K+%N2L>xe_C#D36nl=u`_V(1@7RJwZqM41&!@I%=F z1cQ#3-CUXC>+p8k<6n170h03&1M3RN#J-)F3Ay#k|8L*%IE zHzm9|;bPC5&D`pJ1&M8tMYFLfcex9Pg+h zzOeKBMd1i)OsF~wc$Mj|^?nFbML1=WhAm`TNPVhEG(;K9w-sxE?D0}2L)4OFoowo2 zG<#1RJcAK9f2e03g_P`$HepLOEcTe3gWwJ1Nx_y0!W#Ah#*3F591`xG4g3J3B2Pwt zV3im{5nx#PX$@@`;YGQu5>wLHd3|@;01N|#GS0>^5a1ZiRhD7qC&sR zeLx7=(*hpKsmU|v59Z+lwTohOC*LF9R5JT;4Ey8Yh^aV9Z{OIGYj&={0jt)Rg3*dy z#~{b7I_F=XWU;to+Xk3*0=%YWK;CYF3N+>jjv7WBHAEEvzrS&g!9TupCTVfzzYC>q z2XfLS1hT9FWeVM|Rly7WLT7?Gbo9`7)#D%Zf_TF=G6&+v?YcgE=y)J&##EX2&#aF_DV{_v;q3y~o$>5Im(zA-SLgGHjPLGG3 z2G{=?%Y+L!bz1LDSJy;_eU-Iw$9;YKmdukW)KD8$^Jze;$e8Ky8<#F1>L$n8rtva5 z4f_|=5mwL@v%QLz5$6TK?}xRP;fd5Mh`hMp2x)%aB$bv+dEyw5x#ea$b&!7UvcRVt zm=bVS2NS5$!(wZzlDbPPFEvk=RG9||sp`V76V^USU6g33cXaVegC$I0R3%ZEY4EnR z_q$JUZ4eef_HR2kXDxNux6*}2w+xpvs{tOv}RTVuc$zl?8SLn78W7VGiM|?c9`MOUEob{{zRIoYpfd zq-bVZ&c`a?{-(Z9k{o6Skqg5d+@W)zNL>BZrB-%Mo3p!hEvx1`UnWeVNW6SlyNnJ* zkmJk?K2Ts;%YP6{Y5Oa+%+Vq2$L ztFSJVd~Drg#HETjsp4aU(#bcnf4z{rdW{D+c18kbKA zX0nDjzSp`LEkAYy-RW&#IbNL&B*e)T9OX*(W^ycYGGo=wS-0!LMm|CR+;UgHX1h-h z^WPsnsmrk-+Sx@uRf(92GhhSg8awBzpyupRyl7rSZ}0_TYbW+UzcA1x*0*NthMT^C zT3f)vIdnd!{0Mi2VV{%m)%z$hV_jaYs=9?FW(LvNA7iUz`aSUGm%Ov`?__K&gc=&I z%EaU2@DhapJq@3ehNQ+wI{|%dey*8-#k0yF#*irF;~!0NThdvJ!`NBxK171qZ}fP@Gb+-BwVtDFLNy$&W#K=)&x#X=0}j9=0}n_WE|NtnFx*j%uV8)4aDPjBl6T? z9%(}7J+7149*Ume`;#mF_;1-wS zJ#H3u?R*2%*2j0m`Jpn@2?B8i3S#79_(S2nJ@ve(d98?%C84FI;`@9467)x~5^iyf zAc2owB(6TH&x|k56l_yl7i{5`A3+4t1mjY4t}~3Nl<@;M@R#^^#fJu{`aes4^hg~M z3F!&_pw)<}Qy9j0KR z(r}A>T|J+^Qvia>yP%w~GmzM~8cw>Ah5v)EzYL1&i^9B72n2##AUK4^HMqNb z(8k@}-Q61}xVtp&?(P=c-5mn_-=2AA?#!*abw6~S>hs}Lbyc6e_gd?Devh|uKb{Kg z!OHkv17ee2+~_|(R85IcK~FvL!65A4Uzmu43KA)(EMPLiI5~PFP0ZZQ5Z zk7bH-E*^h=yEKf()yw8F3e|1A&4@fwH3;YC=ci?7b2?NvQSP6Ysv$Q)dMCZ{Yh*!L zouO)(FFlA^(hge_#+yb{Xsrh1CpXc_VmQpX8Be(D_m^+=Ht(?>eLW5Xu-oyGWTh(m zdaE$5d|`L}Cq(ZFxcXFUG@15Fh*q~4ikD!S3WtyDaL$Nby4=x#B1io@rI)d_`Q@wA zk;IN0Zq<|~BM!>=kb+m#MM#YUFf4{Lj;6Yw3-7lRL5;lA_$isZrGrUqz@SxW82qeJ zH2 zVDuUPnR?o&?=%#YgL81M5bu1~Ij>!qfTWs(@cvqj7WqoCR2Y9LY=awwm?)@{sG8MC zTU9l6q!7`7vwGnt@y*NE`e;*Jj#gPYe1-?&4~YW~=Tm*gmoT=Lr5dAY zA?GaM@U2lyIplA#g{$f?s8+BX%hcOT&PnU!tO9R`{!CT~2j%=tFqR*N6;pwEz8j^I zq=moQs2`8a`H+MAp&H4USKzJ#w1QjVSQR0P1H*r=MK$_wH?s}aUuG@YRWehpQkQV6 zi2QR_QFyGR40#=)?_6ip0DkdJm5j}!*ND=TG|L3|n4|rb5F4V-boG;b?aP_T zWF1uOJbaCVg8M14$BPxEzkT6+c+tC$lC_Y-KdGxQ@0i=4aum8sXAYPJ%BEz4SXKzW zNvPXZ6_$2s$Asf5DXrVwx7(sDW~3iEQl8w#rJ$+XqCnZiNCmoO7vbN^hE2}oUPDBi zD?bHUTgc024)v%UC-3~b{jC1@o@Q0eCQIQQEg9<}*ZSo?AaTPAb& z^_+fDpX+*Lu~`j?rz=&QPNduTDw%tiT1ho%k2XjQ)J?OVUu)5J>#3H}dcll>|FHRb z|DL_#JC}S_+!B3;?C&ll_?qVq731o7Bgi4oPvG>C!}+|$yc&>I980$RR;gZ%({GVX zG#y8Uc+~v5)4&XyaE=;aIRLIOa&@FJ&ewWU8CbX0M8AHiL|(t2H|$#}K=FE*WAPXI zfH=Jpg|H%gk`GN`uOt#FZ;b^_E~CaI((1yu`LV$gi7kB$l3n#sH5zQvIo7}*YeP>j zBqF7>2%|bF-zqxI%}td7Ah)y&v7W&QoB79YsR5^$jrbo_C-tf^De^%Mtwz$bU47za zAOA^m;u1#jr4gn%a~i4vJfE zI5{T}-`&jRXM%`Ao|HFCMxv~^S(rhEOjeG7fnSrItUt@|k6+H}bIb$k3T|5X73%nN zk!e)b-nqe5v8fR8G&bCFbP}( zU%HIjkR603j|4^xTJx24M;s-KH>Q70IQzXoQb&P&6}FP1#2nT&(D)|`f9;hz$F zMsNMmEvJgDutY)ebtVGCZFyi)mY^>!xL~u}qacISFxJ){`FPM3Rx z#vN(9zP}aTe*T=uLBcs*m#m;0Hj-SOhR5T6x|XTOIdKwbEZLL`?RzTFwjCnU$%BdS zn(|6qaqd+);|9TH9>#ZO*i6PXGo9Q6=f=gFPx_i`LDC+zA~#h8(@(3O&m<(rZY#PYUxY zqAB-qgonIeY-+w=999ZQ;^nB7WyB3Fkb{$`1YRrpCVk)L$3@ES?q{miVN+(j9+v3n z0D~z&DmSaYka83YNm=r}2@*MG+3$>Oz63EcM?VaSdRupVdX?UT8;yD+p*~MO)$TUL zmQKM`uepUcRPSA{n99UqsR`YuZ@?*%eAb;FT%ML_S9KzRS^pHLq4Xo$&fqw@qT674 zKgy+VBzHGU6Oo8eXWW!MssrZ=Xk#R?>YwxwwHoBex;)LSR%0S-@jlWEQ%M|DHR%QA z;FvMm9c#DGVSgWXkh3%^>_pe0Nsa+PIdu0UZW8?+e#!i*6uZhjdaB&~CyZT6V-RX& zBFMMAuF5TBgs+VY)_I!WQN(RE?YpXWYF(3*n1nPpgxpb6l(UB*q^dj*eUHwga+Kpq z1ve=%3VZ<-n{+EhY{8&)_BtpQQ`TH5%EI2`C8)1S3}03OW0bY|)LT?Uohh%NM5*|4 z8p$anUiM9458{Pk-)=J!y*&=PN(%(c!q@xIs%9Xa58|Scz6D1;WShXv zU!}CEM+U;@6F(mHAZkfnf^CnL6R%MRYNx#P>i&ZwoydJVf(T2qYb>SLAcnVFw15kC zYOS*&u^-S*Uy$iY9QCNYjonR#30gBa2%VVqHA*+JQz|UH%)lgR-iSSgE`VDwM-EM? znJ($M9ldR7h? zsSMBNv?Rl_O&DMnIA(j#acl-*F5jaRt=l>7$ zChP*?tLJsBk0o3vjOH3o*M871H0A@>x5X{CTOR?EMrNVj&58It=a#K*i_VVLo;L4} zQYHWduX|ydZe2;CUhxdv^Let*^k&+a_q&;>j0BrmM~6WaCbEPXLmlxy`*!x(D>Zq7 zDqE>)zJY5?zc*`<5NKQfaT+IlP2ji>9%;SQbZ=4Gu#v_x{>euS0i5c9=&ZmK9eKn} zDK5Lso>{NPpz>?ISZ;>7gGJkv2;>md-i2N0u({=US#(9Zmk)ktdF9dX7m#T|%<^S= z5s2?)s5sq+2BlZ-74yn%WD|aIWo*94IK_Sd`$=-p(3dI%>=PUsDk_C*?(jr(zMtN$ zLrFJTi6nDWi}4hJu>y9Kr!Qihd(68sU8BK3B6Jxtv2f9nX!6{hn9wzsPOrYUHPJLm zWD$i^$=L)sZU8UBduL+2{D(kDqE4D*OPA=9zBVNERli^YPd*&ixxj-rG1|l_Xa7MV zbdy@^BX`QGH0S)BGt)F|6`ITsv{-L%=D|4CHG~BY%P_JyrXHU-*=$l>BdfV8F4sST)4ogQ|B^j zo;-l_dKrvZM)!_yV<>ScRy3@`w2iDotjwn^)m#afcJSpNbfbQfo!QY&wl?NjDj7OZ zLQ|@isKmn0s8DaYRza(id9TH?I6F+GJza_bMiz;RR>PbWb;o0DihbJzHC;OPKIJ|xk)qY~ zx|7MKmk0d0>Z5iTI&4gq3GoMflPfhv-53q8Zqjz+jM0oW9!nUE#lvG&o4Q43vwuRw z2Bh+xtL;jZn;>KP37LR9KUA$pR#Td>`UGIY+(TvxVhYk$A&gjm$fSaffGan&-^+kYU|qzS~ugm z9}6L=QgEAL+Akvek5#J&3=tuQQtxLUjW+c``vKxE3qVU^Q;VYP7|$Zh5`(!EM90<# zcG&d5m9b9&grZJ2?_rEYEGocO`Z1dmF$TK9g= znO=6as{%I|N2n}1=s2r~J-SAWX%;kcnBOc~efUF|zVqK^Aa3mHl2SV5b_$I46pAq@ z=bWO2LnAP*3M8HGVF*(KZ*qJv?7c#*Pwp!o(Wz4gRoPzedbiikN~HW}+ka47?3b1I zV!LUUeb*VcviZ@GlK&(+YI#$XG|}Vbsc}mV=92Wvr_8Mp6|G#Hq+zP+nAMfP0h%P2 zlkwB(3U*#yC2Ksi_sHueh{9S$hd1a~p~|1Z#?qJILQPv;6VXxAQ+<+V+30?J=Ym+#u{9oZ>1C+tx^lS57B5s3DngnFH6ya?o^Z zJ{AmITS7&&Dfbgm6+9s8-$GHnq#@(}h~1LKaF3P;E^bBgo0t{;W&)X^1I>6`k7qqU zv&z@+t=O4K-a+cdv@*shc?r{yMxbLy?Z$v5e;18@UGbHr<(%hRXs2I2(>*vZ4Fg2) zLWp?Fgw*#GYXu#Le#L%SWCE)83CEC^X*6E_kPJMf2+i77S87RfJxF+S231=UH%+dM z!$UMbg7=etVeQVrQyv-_b}TIkkGS~V^O^P?@5A%>?%0Rh=-n*uoGU zUZwh_olonWB=_iGJF@Mh?OgpBL_SGc0nLzvSL0n0bQF&J(N=P8IY0X+jpzTM@Lzy? z|3STfmz8g?OOtrfEaIEisnk8AF%u} z^WNmPhP%bTarID<)LiBWGz&|@Y|gh)ROA-?1Flbo%0un%=3L;LxunxbWpRcmIKtVV zM1sDC{hfi?cV~=ZPcR42CArpMbss+Fs0voc4nVE~ z@fyiYNAxiF3N+1{nU9c9Uc?Qi_yhqh*1Vw4L0XIklJPnyWqoef04}_uR!PQK^c|9n zM5AFiSi&c~Vc#-1l{k275D!A48eI^qf%D^~=48+^&2tSB(Fhgk@#^wWWNZO2=U2TK zD)XgWd&cr<_xM=w3O_5z3wbiypZoGak@fl(SM4uoymWy)UXmVbSZ1C2i-@Ih+Bki^ zn8LyxhXb>wBE|Fw1IwOhQ-Uy|AO8uA1fCuKl_pYJ0Cb}U2-6@Mbntit3oZQzbwB{= z#pxQ)escLMqCJ8laiHZgRjim<$S@Gl%PZ;P# z7%;70SpBXU;Xd}GUVh1%l>>qI=wO65{%y5??+-WRXD#E`<{fkMTk6lIZAqNNA`li< z+Bp7z%FK+{8*nEQMrd&iZ>Zn#{4V3QHvdEd%~4<82Q~VWJOlLMQ~DWX-SoF`yrQZa zuom0i4)5PSF`kpPQsqN0OjLZ}D`l9ZC(ANH?h!j~+zQrs(eARn79|>FIrzjrI2!v< z*9Xk*diz^S&V5Q&LCkx%$7hfx?f-8Nx{iO;ySC--x+ETo%b8E(j91g1KIUH!r^zXY z^&-PN(zD@iqa|0mYNIFFe*^83e9&`}d0-XBiI2vvV#V-#KDl)c)gN_5A6Fky_;LB3 z1U}9bELD^ghnfFvMo>1 zshz-?^-#TAS}KelLfE_okgGK1!(X*C(_)}5qMCAIeHFd>bvl8EihOtg z`_`_mXac#$l+@PkrxU`tj_XvO(;SU~(7947Y4UZJ+Ny}8AOMVQWNl4Uam$vudZz$| zTcZLbpo^CZlv?vftW69Ggh%F`!w*wFRD_52&T=m$7qEa>h{I_|Ud3p8AC?$q7tl(&_&Ug@HV zXMf%@apkiY>x0l^QE@LRx)YiVbJVNJOk2BGKIB|%@V_BX{{IKqc`q?loQNIvo!&iZ zq{(K6tqM8?*#=(GPy^q0YAUlKA2>=%y|Xmj-R)}L5kqg>W*`OrGa?~VDY+5ZXd z04%p;!BSFPN84SD!ySd3dGPlz*=Y`|imZbc4NOcK z>TJA-_&J8z1>Ay z@CJMv!c$0>a*>@^KnzGWS5 z(Ie`y^*CQTy}_!$m;lGsH}!|6M9Gw>XG0oK!odzLO+r_Z)#4cX=u{YoYalYQ_T+)@ z5xKWfUQ3LP1HzV9lfSm%#~Oy7s0PQ!6WaJCxe6!~U&sW-L@ zjzn-~b8A2OHYtJtEb%=bq^^;9!B=ZcpOeJmv6sw$%78UR4q?d&aZZqJT$vOgx*e|0 zYZV#ZLp?d>B3^o6jNBvQ)D4g|r%fH!;(MQX0gf26yv}xP0#0iGtS+msuJskdW#JMh zHyzPztGaP~iK6|HW;yheTwaYl%{X*KC91A9Fw3k${vQ3JLyDg@!xDw)I55etoG#%R zvnyaf>TWxH4wiNDbs_-1%gZo>@p~;?_YeN)RGQTf|B(H$v|%Qfb@Qw<{N>#Im&CJY zwYIqBIWh+FdP3SzJo3*k-KH-o|GeIR-h^QNeGxIgB`(JODJRvx$i)ZoF8+N(T>B_< z*%Z~gk&L)-F__o&B&7G;P(`lAJ-%Am$(l>f&`5WfYg300`gWeN!cx#iGc!6tGi0rf zu)S^mcN@dsPCuqwuq92d*;^O2Z)`1<_WYO1tY^owqyEn`;PYG8!nReKik^<_#28nWvH&sY(bAOh{bFfxMOr`#iK5?(ej{qb&9#y~>N%F@8pWLI z7r6wxzrA){kQ|KjT#Z~!%3Cug!d1t(5aJNveyviX^|j#NesF4`Dvm#m(tT% zt!U3JnCAN{A2?Sr(E67`VKP#0>St| z;elKnyTdwReFvM(d;O@DeH6=EKKS^Jc;eft^|58n_vGaRX5K_^UzQPdr4FhRAfuqH zqaGCq)enw1e18?RzxYUw627*r<aYYg7{c|lPNk>}qXnoH&Y*>AZ>s*+->W)ecusLV=+=Z=Kl)h(@Y3F?MLzu#&iO_6bG%vAyh=EqguuwEU1mVY8$(wE~f9+rtI3W&dV>giG zw?6sMLb4ts*Y~3w6a;j83xUwmv8naezQrn%|SM`Pvbyx>$p2Ikq(N`iN z7zjZAM%|2cwLgdc+HhGMAP}JL(6xykBCrF8w%lY&a&je#vz~~`tmz<*9JpmTdIH@? z*}l(PIFq=TPhW3tFigvJMXH%%SO5K4Z>jie(g<^oZ2=3{G2fdi>%h0xd#taDg0U|8 z`9X0-Z!YulN1}-230K2XGjQFax3)>z9$}x%1Z1AqfpN^2`r3_FiPZD}h9K@?>tlUl z*(X~&*JK13tk_caF`biWz94HwhSdFvV*)S21@$HSOy`EmeJI2eW%9O}OlUP_T%w(W z4?5Z>nyuZE^}|8Mi6D4^@N=DFbWwYT&OVb*v~%FdpRTc5Z7{UkjQ6&9gXt#xMcw1TT z0fEnT8yu{V8f(m~&AVjNq8hDd_hMpgWK}h-6Qoj*_B~}USK+`@*#jEk48K z+&`b#*(ySH2enXyJ2?<8kjLsc!$dam6%2++dCI1PI*ZUnWsV~-@%j)QZoMTqQG#Yg zPj4~N=jeVt+B!bN3A!I(^jtl7P{;ukq;WVn7PIsQjW>! z=z<-|<3e5yn}a;cAE_cd?OnrV&B$?%!Np3^Dho{!KCSkonq+*W1~pSB;xa*l{r&IV zd(3sMqlK$uoG$EI?1nSao1WP7S9oIy6sGjQDoSCn*&7o8wAfp{N!f#ra;**j=;@wR zCryIF@p}2oZ~nq{sm1&<45Z@!{YLE~_%Gzsc2fq2w-W<5l(>DS6S)DNlu#y@v@=vwHN9P=^6nP=EW)AN*BNsK(CZ zJ)ZzVv6ebNiAqP~Jex;N(+_p7Lr%j9KkWO|O1G7?6;9afxb=9f?7Rn(P_4F(_MYUf z#n_*M%*f8FOc)$jdYsUDKL`ydLi&|~9ZBvF+9b9+N2bU#tb;OZBDC8arsdS`j0o7g zI1paI!J7&u^8(wlIuNOKCuZev?61ZrC|PBd;Fz237M*KsNNfZ8bA?dF2%ejp@0l$U zsKlnXU;-H`|8hRpMi(JZ@;Vec;kPE^*8eGQEB-Tz=yR{b!BL?|<;uge+L9JC>0(lu z=x@q@P~DQ}3L4o+v?HA`=Yg+_^|#IeeotD7Hl0P`KNW>|fRFs#9TW$kdoX63)y&eT zsipcu;+KkaE*&TJa!KTbKxe~YqTzm2Z zmrZ6ne3)9KPhF7WLR6F8UHP|t_qK8}e||r^==o%3@>|g*BE;u*%2eJEO>GB@KMq|B z@78_SaE!$eiQG!N*tAgsiw3&y`G9?z>A}RwQHRqjqcuiEQd3m2lDIb&05a1H{GUoR z>T_usuA(RCK`*rl*?*e?;;nTl1<5Xe^+opz?ee~1=BSas+k4^?3XCUj@cp^ywoPm} zZ#nDUDbVibhw*Rs3#WQ{iR1w+gu$a^-M>%;QnoBe(61ZW|kYO#6m^G{q3E{Y6s;y zF6Cl^0TQXhiM8j@R#fZCUj-@RVc%(Qfx(@kJF!}}KL(ebG&jU9Dm&51q4u3}LWq~9 zZQ0+zw<80o5P?9WbU|0F{8R<&-%%(Of;g{d_*U=)-cHW^k+y8gUwX#5_BWO+{8yc0 z%E}E_!nsxKGlQ9c71QYKq>a?+=lJ~ z8y~x;Gw2_+xPNlRCy@QfI^p<$l|FbzJ(!IKsL-Cm&mRl_L1|r%T05D2HzhgJj|w1ecS`6YIy|{V_i7mzH?$#H&XmbU^!+uWzu9W5`88puS8sGo#XG5JKf#NmniO&Su}8D<<@q7Omz>g z0;%XT~%CHKG&+;DD&KDJI1?vJ|ZmFigXOeYo zK7Ou2UtsPXwYLEKnqB<%rRzoq_o`Sl=3I#>zJExSjX#HWCA=cgrT zgJt#{!WR(?^M8w}Q1AjTS7QQn_Sr@|YyW*mSL7bS zW-Y|-k4I~rf-)COvy0`)BQ_+c<@vCxYnDy3)ZnT3SeV6Md3Ce3^R?Ow;HDVpvSMmJ8%#9(3__(%_poCk4H`&15 z$GzH#h`^xJCouSr7dbe`1FrTeKZk2Gor4!0S~1hjYq_K5j>V}kyXf{!x!dk%46Mcz z#|2E%5R{i>|9RjOchxBGo=x*^aFSkhB?R8evH#iDOW1GLWrVj25YR z2u~no!O{!E6S<{AWIuF9YkQ&5+M3H}`S$`wR*r+!#ub^W8y-JG&A5hN)$6;P`IN`( zYPLa3+-e_wsGRYaqKM&ytT?Fa-<4mGzM#z)rpFJS2$RSCa@4I;U%lCQZA#rGN8j|h z@>9BwqqWu**%#^jk>UO7L3*JV18>BC&a8{SC50zTOTB}fQDXZ9?{G0-kGSornE(A) zUV*QE6=cr?K`mVT%awR>AMrA4iy+~6#{Cxynew>nHs}+GdVa>$uOnb8Z_~gefDZN2 zc5@@rGJGP0AFI~yR7WSNB|MC=~)~N zd23q@@@9@)V{|cSd0@8gC|Mer>8fJi$`eTs|Hx#*+dz%C=5X^0?av#Z9ilj1{6~k? z75MQP+9raIT)`O^0%Io91%;!~pqs%15}Az^Ho7i`pbf(W(D=lB?VrgC9P-Va?xoB5 zvf>K9v+}KZR=b3~$isxr8P(I=@rJ`h!Bnm}Nxc1PO9cvCsH_}R+Dc)3j{O0QIN$ST zs_zLBRGJtGSur_qtW#?7f21QW1==d-wW}~ZqlEEB;JMxwzZ$oLo&(_Zp!N2*+U0bk zseZ>+zc&iqL5I>qBfYK!c-&EdG-9Ke?~N(`l&Q_^>YaXVl{&cRF$WcUJP#3|u~)?? zsB%EuqeLFVbogbhIY5SF z!}v%{1g(=|&<}41Xgrv$ zre~N160PZKcCJ=gqf^xs?uvLfi;s*OaOA`G4~x{ds}#~ighlflwT|-VnxGzTe{)6r z7tla^gJnNu>)f?z94AVa#AOz?Kxxl>kAzwawY^ftOO_wP?wNL>pxoF!mofGBWi>8D zBkhn=1~Fqo|2Oxn7?w+7Qiox&#hT;-6%uv8M3*xaH?Z72H`g?ny2T55CA>X*O-TvV^S@7 z?QeBC4QNLxWA1$foV49ER_J=wc=G=&G+k*YYx6|fn%%=clkj(^ zuqcp)iE<-e2uQuWIvPg(x=dF0Z#C6i9kh$ zk-Ab9B4Ksnyp>X!WP?P>1nw5MnT!(cKVY*|Qq@SEN|$5yQv2%jIVnoZPs7=d>tnh0 zWY2oW`Qu5$!1atk1x^IGxwY@(8%;b^QZFxjnq_%818<2~kxyQ6#{dzoVo9?njK?tM)& z+;T=uRew)gSAz3%<5ylN%ZxeSnAUL$>$kb0VDky^r?WW0yK{847^n-EfxNPXT>!k^J? zYDmP2C=ugLWL8l7*Z5X%?bxK}Xct$9Fki~6EVvt^)7J8>cZkk`N%wOAb2 z@Y1oHe=*a#8lkX*nk;fCRT(?3kGzcQ_q_)mE2Utq!b8s3LJCN@>7wg{MAuFdJy)-A(z`w#vw$;USvW8lI}^pYO}KV4(wo z|9f~U!!xfWgrA`R_Ak)!E$EjIc~p7UKBL=sXWzFba@=ojhePURadLr5*<~ujkwG|qUQjg6DzMnQ+81iOu2&gPM;Wgb9#Nl~!n2^-e(v z(uW+b@OKS)8=Y|^{YC2VRNLd?o~iapWra}=)n+eTeJ(dPROS?$>K2aOE8rEhSJ{%FGClIBuP^ z5_3!fc^%diixC8e&FDE<^XyroUplJlnG!MQJYLNWP!BH)0I_1THJv$ljx^1fgH zamRnsEav$otxBW6rRty1-ER>`r5Y+QReqqYwpJ*I0o5Hh$yZV`_@MXiN>c@|c+o~z zOmCi*KF4lz68y7l+SK@898~{<;+ecnDbRT7P;Ufm8+iZyoRRm~-`bM`pzk*iXsTRV-4G%8(T zdZ<4GhpW%vLLkLUXjN6|`E8Nww^~#xeEe5A!tq`S4&xGUb=zzjIt#Ic?>^j6YIP*U ziw)$VLTT_@*$Cq)pGRNKDKGPj02^He5HT{Aww~Z&1?1PgMYv$!OT0P=o(Sk4V#)^Hp+c^;MbyZO|A72#@7H`KC7KUg~&Tv|^P2D2C?!QPrd`XKRew*u zrW9}RBVG>Cr+N1>?}L2MrUPXi%bnzCm1LXuI6%V6cx2z)YW^Y|Ae&>u1_&WfBbI++ zACo9nt6>#h=CE(T`9|hR*7&fR-7zYQbUerXSZMOX`Tz9PkGD~AmQs(-Xb8OPgTiAw zy?}XE|3TfMK!P7@Z1r$hp554rZ<$m*MSSNTO4ZC;!(dV9z7itms_52$3Wvz0T^(Ss zMv20gJHf`3ZS5qQQ9}#G-d%E=bW*9Wohu}5M84-uEsZVp5k+(X3X0VhWMn0azw_K+ zu=SL0B@|fWtm|XRTLi3sM5Uk7Lf#%*hi2$CJkW(t8M-dKDwM9$pXLIT9Z zczfZ$0s6Pt=lNtYw9QO%e({YS^a`pSC+Jg-&`4?Rki9C$kNA@l@1XWKgf57Btl(RZ zA;AAkPSCUCq%4l?OpSS}&!HSL&{5TA<0*Y}6L^A!3^DH^k(~lG@^lau>Ps}SRsiO% z>LbZEA85Gi3ZbAalqxWIgcP}|lr(q)IJ|eG=CE*3)F z#=Qdn_hz9a|5@HOBr7#uRaHG3EL2=~oHT?NU$GODQC^qGp{^72-ny5|X8U!5>IQ4% zMa9nC!We$S41N|^RC8&(f6Ee;DdC}RyZg#Tz`}#?XQo4*>E(~=U(I6T9f&1zL>7{^ z&)fBt#X5Yuby$c<8W*lz`O2})O4Qf*>$|PJL*TvvbFS7s3Hl4eC1}lNu~W#T%>EC_ z8Qs#_9*vruIefy56F+TKr^={UJvLHS7$3%^E}QjY@U!NW7DmFo>ebJ7jqRTrJ2zHk zO)ZwV;(*c81~)+v`(+N`W+{1||0o$MVTBwuBW8Y((S)A1I9`QP6niI3r z{ddGT2gNab+#fK@OQCV!|bYe(cL%F%=a_9YppXxTEAwAval^y ziM@lxC8%?dTi|~G{ssGVEhnocH7=PjMt&GF|%Mn=H;@K;?oDiXpuWkpe`b%;m&`eo-gi4$tS2*LrAMIUmk!g_AnTMPl zj+rnL&l_W{9YSk((Xgn6p~%>Zyu+8&fzCN!Dfaq)hh%M0)ROo;0UFZ;O5}+|1{HBN zY`b-e32t@3ykd2IJulMsNMxsFD&`SFf|BLL2KC2ubWx}Uo@HbqZiZL>1(+JHvb>ot&O(jC}VsR=X~q=1n%15E6BlI9G%kbaZMYHlD~=kiCzEY1X7WQlro8hZ zFSmv&=hsv`Jb@dPsEj`l=VIWA@TV%n9M-z%v9q!LulRJr+JtC`iKn6*LHSEosZCby zaLst4up_IOLp8wEG3#E;1T_y5J)<_`d{5KQNw+y>x5u@*Q$7PA%OFcG*jWEcUur>B zf~H5iF-M+26inH08PePq8%RAhviMl3aHI_C))!9*WJMooC87nu8X^`{{hUV7Yod>t z5)!STVin*Tn~}lA5nljPMErO-k8fLoKPIq?(EoQ}z<*fpICJgnkxg zGuTcWXuOA8m4Us-xq4nea5CLq{+Z7;I`|23ODXMOs^_OvA0AN!WxWJ);UT%P!@<(f z@SKWR==ztQ!kb6@63~vPQtYJ50vYZ$TOx5dRrx!e$hH(93o7_U61&*mq;^yzQ5N+h zRui+#m_(UQTHRJ->bG=?yY!CkAn(?@?QmRd~<8r>rZP0_h*uy|A!oUN%bU-wOOm*NI7zKo*6 zZxJq0#%qM(#kT)GJNY*J-+bA*|LdESx z6j^!QC?0c;ysG__i|m}n?T3GFOS@6iW+W%hM$0(Mc+YYxChMiX@_@^Ip7IxJ0ymJ% z?&Ed%trZi+Ry+AsU zA$aHdf_`(&+qO4qodcxn8`mR?Mx6pQ1*Iu!Q5U%zGvzxO6*;cq_LVJL={LpRX%~-z zDYvLw8(9bmeeae11Hft@^sJ#=fT+24w+3V6HBPv{{iJ`uGN{ zas$4w-Q$gK5ujVLz5Ee|^OQG!#g9ak?+;MP+&%j}EUy}yl+?0iKk$gd1S9|3XG&a` zL2IGzS|1f$*mHvu&gFF16x!;>ze)=GnXx z!08h>&NwTA!l>`D9bnW_0-1o0ko##h+7o5Q#;!}12EU0!^rnMUi6r;u3u)L!jzAMo z4^-?xmHQM&(`W4!-xZq8kVbi#2Gdlnx9$@M zr>86;&sTY??)PXpX>;)GRV5>~@qZO7o~Q#eG|^i^L6Xk3Am?qhPuGXx0sUVV_qzJ$ z&1+_X))bHDJn?lkhcV{YQl4vti#xy;rikkYU+KnNY;E?G@n#s;$dv|wJ3J(qdWGwi z@ETioU7W_+_+bmBS6#y@#gxRg$+#APt#uyl$@?DrUNPsAAexViCPG`h{q8;SKPr;l zq6N!b;Yh{5V;(1F1ByQnj$dL`j1ux4 zio3$KK4jrN=B3`#i2QATN{MV!!{x!^W#AA7w))bT2sHqDzBy+#FT=MGeVpD&T!@lq z{zesoA^)&0IuaGN<@LgvuaGuJzQZGN<^i>%vI3!{Az=H>9c4O+p=QF`omp5V8BqO} zxXDIG#I)N#EIBNxqTvp=<7ZFqXn*&}wK|qI$Rlsn7C|@oQ)0)YaTKz_G!&>*q5*i^7PGtC09mhH;g%U)YGhV?zk{ z3WNj}js`mlM*w-pT^gyo>F>z_p@77J`MjXti8vP7&&MeztU@5*i!WU#5OJ;oY9R0U z9xS?8;CWFJM*qWw)VxIA(GOaP!Q5UQ`-I%A(?;~UitXacQ!wIztML+QQ^tK77c20t z{DDMf=i&Ot84uqX7Ryi{t#%o|5ZLIp9&|w!^YH(W^_FdIu;H>cR$Ph|cPJ9vEoh4t zC@w{cySrPFQXse$cMtCFQarc^cXxSq*4pp;Y5#-dLvq}6&CEIMfP?hia5NHK#E(vB zXNK9jsKRXWM(UVK-edZ+Vu>?;^2H~(7HFF0I}c_zdV7EOkrJw2fnk2Bwxz(Vk=Dj_ z%+{A)ka(Cca`4XEC9huh^S_|zg3^nOu`UGtwP~4;RaNOnkJ@T2$Ebvoq;k|qUt4y| z3^{=l^2TK=MlzTx3BDQvpLI!~a!cix@=va}eEm!c!aNEI`fZrFb-|yVdJgDjvvAeU zud>F3#EopKZ0&TiVogVBlDq77Hp*AW*;3Pxi8~@l&NSl3(nMta2~J4N<9&ByJ39d) zrw=L47m?*5oL(e+V7{ipxDm}o*QQfD3bDp;%QQNvCyR4m*wu%#j;CxrB}L1NbJI6L zjxg1m8!!k!I7a4K>{gKGUATmM`E9xi+c=P=Nz$WYIzx&jx1>^0SeF8*C%AU)x{lLd zR}6#^nC|k~^N{<>^<|hj>+^o;TQ#D_=4lrnYpgnB#0_?l#$O~jQ2k2brBA3Zd1NJN zelW@!*Bc2%WN2@0^mpGX!hQUq6dj0!UUCfByZ!ndA=;!X@6u{G{Ivc*I9Lk0%C9<5 z%*Hvu^6-@*RLAB<^*9!0o`6%tF8}|n6XpaP6?q+&(ox15!tHn7$LN79 zf&N*-ll?W*1R&Aei2<>l=!wF#;8n|*j+dc|;lMk~6|gCGRvH&yA6Z0Q)Di;gFS3iO zR&@4F&|fn-kCngWnggjyzGHaBc`rJoA z+W~2!e^aj0CE!|zcJ!{f;Y9c8@#7oo<7|710+TEo9jvn1Gloo>e~pu2^FT~z>#I@? zZoY&WC5&cI+^7AFBpwb-rCaQ@1g8^o?|yX61nK2wR5cX_t;V^CcvP+8Vx}nJ*RZvt zh?-u3#hiCNtmd_%H-5oyC$LrY2PFTqCDjnlfOicVebAM`a(6_iFeVmwDWT14jnPNQ zuTK6Ul5ab=4`SYaj{U=*MaNn`JK(KNmwOAXL6;n>yPM?hSe#C6%64xX5XpApmb)GF z_KbWL5;Z#Vh&n9|Fz+3qQnB@Hb3G?)ExvmGtAU5m7XV$EU-g4&n36C zb))2-LYVNG*v-B>vfO5BYGMNB#Z>BEKZ8}5Nc~h;z@Wh0Gx$m;#~;>+5li*LtH@WN>3ns8h2|$iFQ;V#+oJd4kY9pJXY7 z-Ewb^wf4149iW|_4N-y}XO*5THR#yDmYvzwba%9$vHN5DA2?Xs@%J$M4{hc~`HJ&H zgi@?_wO7fNC6FG(8fE%cFl17`#+!TEb=-`2eo=YA&wHLPHPGL6{PAV;(jcWc<1XJQ z>?7;gciFhdJ78e4{|hcMFWpOi7j|uXmZ)Umpd&{rXIrEL`KOck9G{n_r9=e$MXL$F zMcno=oRK%O;1aI|kVQAG4{RRD>}4AE%Ru~Z?k)5Pme95amPvqTaHa_C!g|1;A5$+Q z`o;0szNOAQktV4oHMc3FPFW1A8aB$XYzs-=d>LvahJaW>BRQ0Qf>#{Ma$3J=IJAY$*X95p`R^2RgMSwZ?JIBrVL&R&G(yGmsR zK;Cs#uErbIY=QA)1Z2F0j!{ah$2N`uX#??@A%LL@`QD}H48bvXqV`sc{Ip;){2sWQ zBei+oA}o8_+8!3~%7w#~iU%^;DWBwPsoAue)7g)qn|x{;-nvWrru10tl8ARzfN`to zkr8E?P`ab{-?0gxaS9>>U#MaAv2d{Fj6GlKXzP+6-YImgRc!Qi& z3X^j%BxAU%Id#e+Sz#9e#c+Ox9#_VkR$}n?nyiG_R9 z>oa4MWU0~u)Fh@DSBjZ>Zq&Z?G7O37IqQW^H;j^FME>;xzRXvc3u60Z0DdAcX1ap%sXlt z+dFJ#YN>i84z9{}@phm7Tqg*LD}c<@ECjXT+^H8vb=TA6n{H|F%7;m~0JX#>UP4v) zn1whjd}yz-f(Z4WP!qlvu$Eke3k?aC?PQ>t0>~&z_7EwvlM3&qt+CoPRbe*JL6kGg z5FP3VuHUHSiKjM6_xBgrnuUx@>d@a%(1XKv;0i*6ij>1e8MkCJdwbkL9)*oZd&+!A zE8Qp^bz<;@L^GGMF^_kmfSpDvlXXx5<%&O%-6gW1vu7ezMq7%!vRI_0?ZRt%(UgWFj4+GHwy11Rv>Q{E|B7y4Cp=t_c z)+hI}#uG}qON^xV>FELnoljn_u-um>y(2*kLAfeqt86RONHJnas(j|yo3Knv!WW@LQx8en@*qY*ZZ`WfUwK?}g?a z5VA1-W(G!h|33Pjm?*dAR)k2GA!FhW=6~!oWxMF_)D!XgSNCMQS03GTb}aY`n7w~? zRU3%f0x9edZtoaImyTOhdhVqyZ(V6zeb;vO6vMjIK3%^e8*)L2mo|3{`c^L}IQ6Ym zXwVgIP&>$ykGe+>vIetfPnwLRO^ zFiuM*(e)a1_;Dg|>6Czs-p%BY{xE1JXPN|;zBL~frpCD}1a7Yg%QNJ!LV8nu@-Rx@ z;eM`e!Y+u00TLck|ASkxis3b8R1+XPlql4Z>D!r$lK4$vqHh)$!vauPjRX)s3@VXw z7JN|1{!L=Sm0YWtdeOebEK|;ax45`4!=>+vIHz z4YTj&)5y4^ZVOe85@_vg7qLIvR=XB)?oquvpm)}tFC%qwNvxA;D4i@!vaV>d9_7_q zIODA({BD-G(bPe+)s6cqlSN-6&-+HG+0Jv#e76&wRh^ac9~x{43EhLRFDh>h^AL zwSbMVNHcz}$TrE7=I~s%^t0BQa8`Cj@u<5swcFD29l4M8^z~Qr5z}PN`Ec|aB;0c> zFREyrgU%Hi(&-R|0S(9w2fa!ZgCCuvLVs~-_ZG-=^ zjp39S$5$^%!?xrJ3PW8e3yvl%4GhQMntd%xr4~Fdm5-e58F9$}#!6C+Lw?4h@?HpA z>*lICM`voEEujr*m@>5&2jGCB3k+2g>c3V%`zVec!arzfw{U$#*13wxBC@e_knQG8 z>17Dy=SfDPO7_eQ{V7q3?BWXv&=1)iMZ|QA`{;c;2>8)KL}^AWX8g8)F9_#@Jx`Nv zYJ!jeN8jwggy6{KVIEQXTJgD$BtJzZDa=H!ii)pDZnp`2YyiJyXBRl!C2Wz&`2Ahy zxX%o89|a6HxK{6Wn3pxqUADq;(z1`GHRVFa6j485>(6bDk+&(1To{$3gTo z(V=GDk08bMOtX(dM>$8DTF~RW$!?YoGKHWi^{+XK>+JT0)m)mA9gyV9k|1uGe43iD{JTG|Z!XAT}A|x%TFOQv# zM8va1i*+u^D$IhPEV{xUf3;5Q2R1d{JR~+QNCB2dyTbqvg0(Vcv-MdMy zlkInj#Fg0(+hwyuPnkSdlz&~?G-hl@XsOty0jk;*4`|$1#Gd1+P*29o^6ne8 zlH@ie11S_QCeSkmtgk1#CWw=|*zJ*J#YXoSzn`$1Z>Xj?Z0M|Z$Tiq{x|LS%2 zTKHW!2);r(enL13npC*He4$>LTN!59cnhl)2wtwoiXUY8^>t)CEA~EzC1ofzWhazn z6Th3C0|qDO+zK2HrgsT70AuGX6k!AwQBR?#2>)|nt98%v{We`SG5^{tf%JmDXjV(DCPgf7n#C<{YB0M*99PeZl`Rg{IuVrd;2xQG76^wtp!7_JAlTs0w+r4HsQ4 zXuR%@g!;*Dr?xg{Ttggks&Up|BaHewir@S0Cy~SNG`~gmaK~nPO!(u?DwFu_eXr!^ zTBvt75cvkVm-Q!e!Y1)WzbJ@JA_52=8z*JWJe8HzfWEiH{AT=TmowBP%C@yGJD^?u zotOWgcRJ)0?AG`w&M?&MS$2XRzWI;sLmk&iHU?#Pm`}`oGG$ir9~l5b@HJ`=oaHq7 zEuGr&L2_i(*cpCU&E-YJtANAh(N)7WVgHzR=9@pud-lU9+6#?_gd#;h%f@_d*~+k` zyFDvDUP7N{vh0{ACZ3EiWZd2rhS6^OM!j0B=b45eA(~atDt6~7dNkVX;kr`&*Tl}g z`h0SEl$N21;kzk>@27tqs(I83^Y~gTQmiscpxzN@k54I_Omw(R7h zl1R#TIywajYJo9)px~jco=10wF;nM^(yq}N+kq<=Cc!OGWWULL2CGmJyKw)9_jFrW z4-DF9q~NwFFk^!^S=%#6z$ByEodYFz)dZ&;*`ariPw<3D>i@)F&Cp-LwFM+TS{n<$ z=MV@mELRsWpuViC=)Y(m^3~8HVJ#wb14YRSPgK2flzv2hDIAN9MkYq^MZ~^%EaP;^% z*_JY^TqOgYCnD&>>4|0as56uUS%&RmT@jcNR4dotcCY5U*ryj0TamZ<7bt^PS)J>k z+AD+PNx=;q4493NmDRg;{2hqqCv-)nWCg~xcPH27};25>T#j{D>|g&Xu8Q)B*5AIQc~r2v8583ffLL72uNN8J@J>Y7+Nw4I3yJTAtlqcF7>tmS2>@9veg?w#4vyN=cKZ_zsz^9;&5JIn;L zs>0YL>kHNT`e>p{e)TEz;b#--pfgkZwx4Av7ajqwW8Bl`h6aH$Gp5@LjMITR6K1`sA{d-6sBI__(m2zM<5 z$_c_4Aqx~e9_?}R(_v!E)y-~R@ahK_+4B)){AOIq-_^7FqeG3-9#hG_7aacwC;g1l z^JPJVFmMmF|8E%`6$BS215X0YkMwIrNS*MSPj-^LBP4eiuqS-NToEsme~5E|Y>~a$ zaALrG4xI!dai6RNQm^JIt;99wToo6efb&iaFy#_9dP|C9Calxa#E3 z`{@<08A_9~6U|p(&xrXOQjC|Y*Qxj&%h3drk`=ty^c`$DL}TjEh4v|QFX(}+WBu`$ zDfpfbzZt}z=X=z&@6PP_ghy`*Uaw0c%M`SWr>a;RMX+qv9a#6|3Ebfhkc+YucC4mU z7P<^(?ke8!cJ*}?zMBzA1ZX4Hf_2lt&Lyo zNhRI-7%+A4q+NtH1=UZNQFm-JcWNr~MJm#MIehsFqQ`_G&4x!2vGzY4b7`Z=?6&X^ zmDhdVHAnY3c-HdniWVN4X@QGV7NNW=pktCaKu_#B}anorC>1l@`0U!E6LXv>?Ht`-}$TC_0X>?|7c1eEn>IIAe#E&Q&^ z4j)!D{yG$xOF4l-$&t5wsi8)y@rLEDqyL1E$S)}A#`MnSDnhSZrj;n z6=@LFD+Lzchhg5?e&Q0Dm#eymxUSf@ZGi0Vq>~(5C1a>&tO#i_|=84Z1Dq~xQZBAAHsJ7nt@$Gv-a!>Ct zVN%uu|K1;8{Js$?Bv-Bd;S`0LM!#0B9?hM#Ipeoo5!cLz${$_B8g6U#h$6#H`ldBy ziB@B=Lgi;Cuc!&svJHs$Uee~F_mf*>UVWxV72?^+vc0^L_l!c0QoDp8cncBxBIDV>N_cYdm_ z?kwqV19#|wVL*aF;q%)V9K~t7{5f7*oysZ`V!1qna$rE=17o7zF0`9{;xVL&U%&Rj zc=dH^^_rcq&m00NOe%LbMR?Ij3feEou-b|#N)DlAwy+R{ml&nb4nn`2ZM}V_ms5h< z@8$`#%6Vk+4DX^mu6tD3)@O8Z|#Kql^-xMbwE-E1tDX}M~^)RkiU zkD~o4%J9J8sz@(AcOveWu5ZuJQNQUqrkzjJ7N@GJebGv4p*e(ND32;o`|cHXI?{mJ zPng0nZA_`4?ZlF_h!)1!0ZQAO?x78|E2F+Q^M(t86@td(u@xKK-PN_=&@j^xhkzZw z{xw%4uzM&}9Mj5>b#`y2BMW7dn%Nf#WqZ8nMk=?17Xy2;F;tx0`SMtp(x zIr^h;=t2j}b%e}`cv83w7eVu zT|2MkHP=mPtNRuXU*Y!^?Oqb8cd>bH%N;4V8IwHlr-Ns!O1Zw!#BzI!a_H$SsL5Qb zywJl(Np;Q~ZDO0Z_+k`hHQUF*Igdg+X<^GaJb+$TQkfnADCbdrSf$Dbmv3hgHkpKb zg)B`KVfK?Jg7BupwQaW!fOxWcWF+}2<>%$|?dAp>aF9q%?QFQAYN>2kg8W&&ofadr z-Ej?Wh?6D!g01y{&&RNnC3^az7n|p3qm!>O)}vAnFZ36>(6KKhVIVYk6uBbuiwI`S zLy)*aCLvNY_~hFcN1owmOzd>ZyIHFh$UR;$N?I0rA`BK3OrI`I8U%mxs5aat){1mn zY11qByFcw-5u;>YdE8Ay{~FmTzwJa(6~W#PAF@ccMc_HNXCXc$$hQd}b?MBpQ7882 zLr7Fq-2D(TbJBMqLGp}UtNV+w>b~Nr4c%D$y9%wQ&#GN^rKdn!0~4Ka0+alcykTv8 z;zP~wQB#$)^UqCo=f(B$7R@fzov2&$qRw){`>iKT6E5}t+-+Q48BLc&fzXj^h-7Ko zFuN0RCUW)R#~$1fz8f`sgY#hMvVCFH&{MOFQ~7Wy7=Jb|7etcEHx;EiNELl^N2FYliXrSduOm8E^eJ(H5fnYftobL&&2q>-nmkjJ zy3**ggm&FdyuoDEwrxcqx37l9FD@AD8%`VVWUo~XaKf}0^VKT{N9(UjQkfUkm^;^= zllR&ORTw-|GMgANyRpIpL&}NgOh-&Vir>;|MDP|YlVG4CR_8ojd5-w>btyme0Yvhw zEx?k*;Cwxr^1M^zM#OpH87DC2Bu?7bE^oF{d(#Y_ZO>g8rk|g5uiyW4^ut4V{XAp) ztjK)$@*wp4xYZ}*Bu<9F&`$GAt_gP~8RtD;a=&v9>BCKIzc2M&jfx`qT5dg}TjUR; zV5x={SGNShF?-Pi>Vu6gj+?=B9?@(a^8K!eN7eF)s>9Fk@))_v`G{q2o9aeF>08#b z5qykb(}kV|mw>0^vwIMa|Kz;%13@Mp%TzkCG))X~>GqF(ZmQoNPK<@ zJMui;NmRn@jf(GX6A_+>aFU6bn*}B8L7=6Tk!KrMQ^bGBvwFif%O?E6K|KM)bG**gdBR4toa7tf*a?^h$uJ# z2Q5*rm))jkkBnPY$K5m#*gW0xgCtB{p}>|Re!=9|t@O`>c_*FigpaMp`@PdKDqJzz4!PW-ha8R!Oi7`asBn`M;6{jhyag@!$gyitbXZ=)ACsy zVB6p58-&+()XtsQ5bbQK0{MftAvccmIbfqiEt^&#_F&xPvXNE8in!;TAUi&xG#NR_;HGZ1&(Zxs^3;}5_4ll `>)#!#P|5Ci-PrOOx&3?kH z3h%IigtJ~atn~NBr<~tL=ePd4FH^zld&4HQ-N^x#!lRjxh0AK}^F~(ZaMsr|)x_9_ zDPE!{p60ho^w-c-1C^sebt!rT(Kx}v8;mBN;3KuM06&jGTs;!`L{G#AuVTiJF=B*FYX<%XNVt zFedRx*(iRuJ%(NWLjFYFXo;w+dT90cPw;GrM9wpT6G$iZiiNm%*H&HaPOE3FI)US-iY=45>(wdFdCMZ+O>i zt3^y)?DX6DRlr{eQdJUM=iy3WjDfcO>e3-kygpbYCncTiQbwqZ>gK=`=_pf6O%~2~10i z+~l?w+TG!E_95x~2;oGX&ok(70UkwuvPR_WL1~?|@fIze+b1@?233i!+G|Qi2QTfH ztIbIR$5HpS`hwt&RTN49imqwesG~bDmcik`HLYn&NJ~^G4ZN#{X#DiM5(K(ZH0{uyq@k<-RAC?RdiDnBv;1GQxj zG97+5W@3xCZ;uQrK7^S_-E*#{|H~%OAL$w7^iwZc^%r?-_Y`Rjf^BxOF z78r-1ZMBnQ_uPPLUYWNxaUMC8-v3RreeJOG92rPSkR1{b$<}$Z-Kno;j-@o5Lcd_| z1kDIK2A4ytj7W;a){UMheYdDWg;gRLLiHBcY@P?$x|sD%0P070w>vKriP>IYJ5>0) zJZ-B%;ckIJ=rPnrI5}%o+_HD;(Ab(q(5rNKgt=0Jgtstw6#S>#I%id&#|WO_F<=xY z((ebal-%u!CmL2;E1BW3-N&F1T~%owQzWLJ^VLQvQ0K;ih`#;|mFFk?n4y`zS@()| zdViWLGN@-r0O4bR3ZB=2R?}?doRc)qnRh0F$CpsnQ|B2;APkK9AKV$vi<_U~;|$+s z_$#;BCHPlnN1$bsOLJ;Xiy^!DF{k8{=?bc#-Hnm3pW0*Z710iVM>g%_pYkl_bENB^ z7^KpeT)SDdLqRNeyE}}neEUTLc#`DU-rz$qg3Al6ROu7q+Y;@7D^N+G_I2&!dEgLV z?W9Ic)lka%-Dlan*{`%Seuc813GhIoq*Fa2RwHJY(H?l{fh!><96 z?^8LKDuG0P^3$|epn>)>QI29Gy~%rig;q%Vx<@bXNWYnC=*z^f3*(?wZ*WGJ*!#b3 z+-^&MbNg2nqWjoC-T>K>6OY=Qx~T?-W$IB*rRE&ADbZ6)^a1}5*vw{@qfi0|EhFzQ(63-5n$z0%c{44}Ti2CSVmNjj6w0a`vIT0SjS z_5}L1C+ZLG`ZpsNM&M_yvXYYw`|7G9{XtJlUv&K+Qy9PLQkaod=Od6fcS+x6uM!}26^Z~7;r?%#3B;ZQI7$D%Y`hB zDD zL!GI}3)VeA3v2D}9eG8WncYX??X?~fE zmW9_pLUK(3T5)~}GdL%r z)-$6raojFqFBPa^Mw{pFD$#{ssZA|XYQCQ0gJjs*TbI6}Mp>JM*P87y4hjl*ufhxc#ne+p^0E-qSI#t3#@o9DNtJj2GfY5TkE&Z_fT<{5&$=|-@8s?=G$g|__TGuQyVgf}p z#O+e(@uxrx+WYi)%Z%jewR9~nV*O}{DU+u;v!NoCwU;mBr2f4!dxfKvcZZt&kDvII zgte$PtDzmRHXAWH$^fu?$*xFQ5!ztjt3e6)+;Ev>fJbC!_L;Mm*bS$hD@!7^RB^o}kCBlFfuTsxOEV=Zh$YiN5X>dFbfv zbbNxZ&k^4wTw&!NrPd%1NWPih?3gOb6Vsu=`y3xqj)&v3+|(eKlmh>EaW>~e%WugG zE$!$<@@N3`uY_RH$wER(;davb0B!6CzBQ)G)xUM{8Ydgc@eS0=Hb06Y{W@*P0I4|| z4Ol^#<(vSLw3esx^Q1tit>u4k1}W9nOg!^_#!qI2{-&q{awz7646;PejnC)_U1;Y4 zIV#(*H?!(pZO3g6!XK+_ zHT-h}X3lH;vUE4#a>o(oMlrkWuEtB)zb2mUtAQ1<3!C>F+`Hy&t68R{ZBF*i-dN_W zh={+XGUW8@MD!7D%_!V!zgT{mEQaXcQF?#d11zc^ozo5r$D{Z052++tl)p$HaBv>G zl@<|XbbhQ`9>lb$zaPQq@y|N-{h1dQZrW7@xX6Ys{Uudq#QSYdv@2^jp%ad%1@dxn z$SCm{V9!$P6%9vcE*-d?Y_w;s%fDa7&?++j)pV!z1F^B3_0EevW$N)UUGIY8cC|d3 zcaU2=K*}aj1AEL1OnsJA$cc7N`xd0dcitUp1n2jZ$7$%KRweSD`_` zl8@tQ;;%-fzdh0drz)qa%F1U5CE=~gV}JsdEC1|=4ajh4)v<_9NnOWRH!)`wXDvAiFm#pZ0cZTI92;(oMhVKgAdM^k2^pE)?1k($Lg zTMYdapLcSF1(|&IIt-lRyzte-Wz422KU4B+iMQ7>Oow_(F%U8SHbvdEE z%;#SXl2&|avDk8;j|DJS?PgfCrbWZY6cpQlV1hDT zA+ZB~fsGO+F!y_$IVJ5JiTO`<{wKt%q~KhHF2~3c?_Lkt#Y;tdEkoODD%fJ=B$XdbK*9yWrdR$SK?$p> z{g1FD9;sUBsD}#l1!DQh)mXWI`Dl>)HovC$n;^&u?L7A;>drX9>4U4@WVdaJ#43qd z(j@_<>_TI~)WkXYQjW#e+SJ$ufgkl%bGUnb{@N1nYT6Ue7f)GQ%h2pKcZ!NbP#EQh zj!T2TYla5xN*?8gYgFq)fn{L(?ygkT)QOe@k`)}gMWH~S6l?m3geW=0vU_lO9*Ks$ zyBKz(6<|EC*c>TNh4&2KFjT$OUkq<*FGO(%&a}yQG5$E4qcepD$%d9KTUOuSw&u~6 z1@%05?@{O?dQq7UUM-nKBSX+?=LiBOb+W7|q@iN+0tPhdEBA>+B!)%alZdDKG?KUU z(!tH*ZR?k!3nwmu^sdy;ea<z2pY9N} zDrPC0B`%c^NXa(~^-$*w)x4So^U4le5JKRtnApR#1xKN+7bLmDnC-rG@8MmkznE_< zi5;(oZ^Y<8$Ey*wot>H8bVK2Ch51qMZPrvhT=rJ%SPbk+4AhJNi(V-*^>;O9f{m~W zo&%O&xppYT)p1vB@VvEULvz}O|APY?;v^~*hPKLM3L&{ylWU<9nq0KbNPj%4Kaqj2 z8*^daG>=j=?abiHtW^&UzA&WNo+4Y

BleOu!O-<4iCWZ(G;i3}JnJvDT7{oZRkt zv)#KAv{swXULeCIaWLYNtu>3@a~?DsGfR<3_?oQV;=Axm`D=5^>(7rkclj%LzZ-Y0 zff%OZMa~sj+@3Ssg#tHgRIF!y?$p5`8#Ab>D@h~@s*sul`NFUGFD zyJN(rRq3Y;hbr^yMMpY_22Us;@ctEs7HGN(eIr!4u&5fwbdDKYcJZ;OwJ*pADAFYK zN!erE_@LA3$Dwn_FY@K>dbe7*6a|KdrHK)?2v&9~>3@7wm0-bkRkp&oS!L_{=H0O%m&E-Oo8OJ0;y)J1^=oJ~W0ifh;ok7XUl3n17qy8J_cY37xtouXR} z423LiwLOWN7t&~_(kZr%i}!^No&YWvE*BFHrARSxHtU-S3g2}MzyHlOMK+IaTNk** z%Ex`Es(i9(!6Uw*UWukJpL&%3?SmJ1m}-S%ySc{z8|JrvI>zlPy*~qngYf@@D>jT% zC{+!u#fT;$wkH-GB?k9)gK`>62E{?10Q(AkEHbGWboTg?_DVblVpA?gx1vtsWW-!> z?RGZCkj0#uVnXVTSXb8OmS30mppJeQJKBxLX`4@uWx!nE<@e$F%HdzIFZVR|!$9h4 z^Td337v3O9?P+yu2tRD14{C3rEw$orHj4a2!wSk5!?1XMlctjb<;IqAO_qGL5w)+)25x zbE#_eu@Dwnp$CmR>qHL&2aBk@Zxzko4NLxs<8;77FQUYl8!okj8y+ByzO!GPLmLTL zQY_-9#-a%DbR)^N&t*@$U$7sgw{AVe_^&!l^{#4Gg?WGdvFBRmH2sac%-}wXtTiIu zS1~HC@yhxrvc(bFMcH(D5PVz8S`e}!$k4WgH9zw$;buH$^51UlXb)o$GK?28zAA!Y z4R*AM-Pl+r)h$l$lO?y}{&za?K@*Wa0gmu-^OIL_)C5`ocITMU6VF}qE@;lnH`L1f ze!v8H+#T}Xo}%i+X_Hb^v}CiOu4BW=FhGQ=CbvtXr49+a8>VXv&<>a}x>Wr`g>55# z8S97sX=R|M_D84I@hASCin3M58+ayj%%Z3u;c73wTaMHD7h5l!oh0W+UxNfH?8gpH zqthoNJE9*;zh3BLH#_X?F(YtVUh~48w@g|T`TMu&V$0-9fo|*Gjcq_5zptrU108r7 z`>$eepg)1uRF8}S6}eN6)$db2{zC=Dr!Br`?9d}h=vvt()$;h6HEdJ~X_?x#bj6r}BT6*uo*&FRk}7Mt4I zj107dy|AMx(y7tWb!^~eP6y5hWIx0I`!=~I#vq!4HCjQtHi%PxwQC12^+Nj9Vo6=ywOH=Q8bvm8N;a(>qdf6DZs1aut_9LrJP>@pGU>Ix6QM)6P}va zY*K3L@X9_Jd@0)x&N^VJbPD(mxWDLm7rl%a2*l}!f{>0ER)+ra@R~rEf2-0`rIW~E zMsssl!uO&OQ|BC<({4vQsA$UKlJ%Z3z{+KeyHiVbhSYr)Zcn8m@{W`huZ!tRxEUp$ z=Jwu5Db1?%(_=;SF%a&yY*gw?Nyt;N%>L7+Dn{juT_ zzb_C^{p_$nVWB{cuB%y}Xv$jSr{JWJIOnPDxeFy%Rk%$o^9JyR3O?sjiB;YGvQ4o$ z@LF8hxxpl^O_1JI+fRKPOz;YXq)J^if|m_t+?rZZIQ7e&TgjbA4y^le6nb=s#r}f> zhEPfn4+g+8?W%*bnpR_nuYX8_!C|FG+A(P=`^r*<=Bhp^g8#wY%|b=|ROo+wc#=^j zNV&heEpnoExo{eglh8Ebder)ZnzGg~8Ygs=ZCkVBOPRR!*hWYG3sV2Voq<$ z+U5Rq_X6%9w|ZZ(G&x%Te3)Dtr1!dS2Nk1Pm*Pc{>UTB{-st$W{>iUSu7sju&Ce>h zL090+!vYR|5-^zAkD9O^U?_>1-x*ydZgI8-bqGOd~hVXk#ZXV20be zlJ@wL|L>ETl(}>8tJs|bz0=q)_an{Iy%ZNy*9InfDutAP6TCS*H9wUFkMz&lZQLph zbZ$ZDHcom9cIN?KlVYka4Er6_$I&x!jEsDq`o6|zP#f(l7h+npbutb#`?%#PADP-A zRLT#xfB}Ru&joFS=C+heJ?bh&mdiF;&C`|fBWQFClDh$ffz+6bNzgfnRy!tyM}ZDj zbEx1u9vb*eMwmzs2Bf+|ZcmwA>YGx?($|`S{qncv9q;axN+f*wQMHFs8IhEYt_uIb z#Wm)ZZ}H3bOq_=Di;@f`BaO0jo~t?TY;Oq}ROimOJDTKaemcQXd+_;m zq01M~iGL}ai>ildj`&;+A}0Fa=?CPJQdpFjaeD4YXfvSObb^tVD%Dz$Uc3K4xCW6> z=!=D&s|{@3pkAbXIC}x|w{AH| zimDCL4Q=n3REmT~nPQSEJezsR-L8{CfqP*!P2J70docWqpK<-FXO(l?LTeyc!Lq|I z)+2XY!ykYb)n)P&H;u}B$s_+&o?|4_l6-NA3SK_3w~wcfwN$Cxch#O#lWd#`fcRwS1jvhgu8KAKmVU=%P7?ZAn=LIbY5klH*N8A${*X*~e z`vVf1L`CMrp@epLx=pDuCap%^iTtk7{^@c?vK_IlwlZw(Qpd{GG)OOC7r)Tl^)GUh z2~|>v+C?^xFRm@Exy|cC>!+iK!b5J`)jtZtRBT%OkUX5f6h-}ddt0V)TsH`SpsVj$ z9az<}PSvMZpNOeADTkW?ohdr!)nmFAoI9ANV|6cEm#(*9fvdfyD$8v)mmljAqeQ7M zEtl}a5Y65-jR!G~1VNe=mxEXg`NSua+G@sl{rpToK%MX~qfOYxA8AQEfL zf2v>C=xdG(d~T*lD7?X~V6ztCpr$&@wwpy}r_2!Q?H5OlJlnZd*g$j(ed2d%tE)A8$e0lt6mw)&eEerlN*J8_iltf7`EHND-k(O ziK+_Y7eIMiw;hJ(o%sHP3k*2 z^AE-XQQ`~q*=l}(-|tw|oImH&CCN;Vrizy>>2#a!S17m^U6V0J2(+tt9(w-JY0*m+;kg0vYi1 zJHV+b!`QWdxfSKXn@-^R{yoT%x`M*#W2KC6k?uMWnWV5al`Z<7GNe;{8}e&izxtF( zHSuV|d%#t{&p;m#!b$v5?zLiGj3j8&%9_A0Ph-o2F=}@rxu#B-yJ>@k^&Z1~BRavs z+~HR=0?QIQ-E}BCSF~mGY&v=UD1a`APCe|dtawE;g(hMc>2|8q43Zu#{FDF#vvxY_(zcC%fkSTAmIX;{c? zK?vQb7B34iy&*`1L}R)Y(-MwQlz+khEW=6EN@0EInd0ZGz1yTEUI;ml%!}Qz6-_Xf z-*%q6%EPUb6l)(A}7^4F`AEk9lk7GVahN4-!1~IkAPCKQ)Pdo_XgvlatT?%%+D$08mr3knXs?c zwA%s{pu7euJiU_|)PmIUWal=X=y%G-<-NQx85j8>+uc>~!Vt}r&rIVZ)LT(^7ubFh zQF1$TcbWipsmkRM$11;b9zYVs(J@FJD@skj4yddnIxNfTMFH{&m!nURN*BCPZi?sH zXU50|XNxtw=w@Xoof<&mTjC^d!KA9TGCqHjLCdtsQ5=6oId97-IXDtSd`wArr`Y!l zEHQsiiNt!nGP9eTlZ7IS4U4MDkt@_mR1Yw%w2-@%pIGC@@q7@`2+p>#W>4|-05o!< zFeXNU?YKJ4U>Dv(Hy&qcwMAF+mO<;zv0jIGPf^kkL8xR8I?pS0&Q-am>e%&pLCEtuMT->}W#EyW833MD|G6u075+TsO@Yw;8bQlNMURy=rdE5$WX z+})wLyB7~0An@D#ci)|z&CK%wGMRaDU)Oz{$9a=gs8Mlvea(N%@8C7Gp$6OASZ!I{ z(@fV$7;W}z>^h5XS03SIi8j>K&nj#zt=p#$vIR+J9?s>4YR?vqSe#w)ldL@1{b<%e zJ51vDvdt`fn=V?EO^7`J(~VW8lAUt3Z-@b2KR2q08vR`ax?GF)ntqRFSqW&KYh}^{ za>>auv_IH=lQ&k8zz6-J$5N`hdK>*CX@bSlYtUBuE86Z^!mDPd`$^21(X?m#lCV`t z%Ky+%T&JKAaS->fjiEOF?&SS5hfUFh=7GInoc{nbrg~6Glw3YaU!mmiUP$$u)$KNr zgVaW&iQbR=wsf~Ti!nDSGGf*@=Fi3nY42B6rNWnfqmK$no}}xm#hYJ)jL#yJ3NXcU zQ({Vc8aIX(<>&15N4N+dHM#*UFVti-SL*wiysMiK*C=A$uVQqvUUqGK~ z0fWwNj8QH<93|U3sVV+|L6*&i95u~&IMtGrm>OTCi@V}MW)H?G%TS2jF5GT=y0j;U za&{I8lBRgZO9ojrdV0kxW?lKmcO+Q2Z>=ShFr$>L=-o1%C?e`VfUV!sQeQTO8^#Ft z49(S;{vPQSgNdnxyiz_3626t}Sjq{XbDYMqqCb&524=>p^M=4eh4 zLnlmFYS1rvZ64ho*0A$sqY^0QmwcN^07rk>^aG-@ZhBYwrweD?jP_QzM~9uDSiy2f zML}5FW&l1w8H8Bo^9&+q@-p4l$;t7HMd0GfOC@mJUn(7K3MDCgF@zb4A4#_f?Dr!t z2wgA5zn8QpUd^mQAE|I8efN1tTub!d0m{L(OlJq7ca3zwe7ua^M|tb z;l{JjBg~h=JrA&$LVKNUNY15Uy)~g>N*HjLVQ+DB$~ZnG5r%J^%Tr(Vt5?-bIWSdL zg)`RI%SFH0!qOm@r_v??V3+fKZv2M_CBD~wTr#0XNthRT%5>-n-_^%ZnV)ZZE^c-W zD0W5~|B*D0Nunb!4PDmG;8BIqnbM}6F^I&~vrEOJEbmx%Vdd(iN!uwf2j=E0>*;J7 zn4stL^_U-yEnl{LdZU=5qH2HIXG2YaCIKp+}rnmV(v9jCb8XBfK%0I0dy@6D;4Q=4Y zyTN+_zs?U=5YA>^T&P#9iQy=-tkWi+u+$%iMZ8UBppXl}X@hzE6%ArRzLMTw2K(rAei_VRd6v#28=mk5x_q8m~F0@8@ zyFE~cdajn!l+Y{VwxsGoS28-iPbdRa@_ER) zEd>k>b>otGGDiQJ+cjV#-_4LQ)c5#~IF&vA4}f)x=(*46XX&thD^+q>lYCW-4$A== zUbhDUfse1DL~}XQEmS4SN%(GQ7fOi>Wln$&a~6XRi}p9M=5s}ibNe^PAMOFRus2FA zU?lS-v7nKyp-MT?+i&_SUO%jsk`|Nx7INuH8)3iy!HAExo2}vY>3*5t7rv?TEK2Zm zP2BIS#a-cO#qAFvnlx@&oi~V;KP`Kln_QqmtIJt|+PUF#$b}5k$e%KIl?a{Zw%mDj z)|cRWvB%j}tcRSOhRVtHbh)`Q4XcJJUY4lHTx(v(gN~7Kf7&`G&3By9kb@nG5XdcX zqW0LS>#jFDtVP5}I=y)Rg6q28{jX&bJ9UmOLoQ=bH?}M;1?Sb$wb_4wz~JuUuol|0 zyTjq>6UTUrSR!P_vfi0JT6C+4ylVb_m_OO#BN@^96}mnWDqbG^vr5dur4Zdb=y7El zen(2`JSr(yi8^LQuK*EWVRafh+&nqW;cWGB;P5`3i6Hhn@?Imv(4y@^_V3qeO=2;W zo*%li_@`JVB~{Hya%|aNizT~1`KNuQDM5y@5CS7iv7qY6P5GHis`M|&eb@6o_?$3* zv_fjsss&a?L%)5rt7*ZHudyY65XM!1_#3@rdWwrmt`gmp3 z+XC3?Z1!*x8TTlwB@;eUQjo}h`^InT1hsrf<3`K!V@&PN_{k-2<5fNEw#!>rH?orf zw=NV?KNJ3n&vlR7h&x=S>9*QRXBNHhWvOd?AN@r1pLfiAQK5Shm7Wy)UPGSABN%)3 zw^cUoGB&J=~l27q4JCZ7?I@7Uyai!oFl%t)qga*F_{4l+>_Iv-;+5!&tFkC%5c zJecEx(7Cm;T%^yldyRO!7&zjbgo-Ow?aemue~YU4TPBrJD>BCR7USVBFY`Vt^nD-U4N}}7vr2&L znHSnXX!V+~TII|&OCrWg*DOJwCJI{cZDrgeJO>WH{Lr0vA7J@e%I;*mg%?0X2>FhX zu30|ntFgKkne&FGA7?vyBP>mV^D*AMOD|X0fyVr$Q@&RwZNC&}U>gI~i}b84YW`xm zweLROm1a*)RZK+k(eG-j=Nwx2xw;HaT&omVELU6M-x17h4Brj4jIy-?M{F$;zTY`= z>oqN0dx<=(6bf0Cekqlz32RkO>veS3iCUl7)fA8?>?Qxud`b?FKt*1MrpUmLFJe$v zI9G@|o(e-@<`1zXqPuN055JaGwj{isi{A4c@UONda|A`nuUTG{4c5!AT8D|@o_+cq zamV6m^Z$yTzDboHo%(xWQp?0`W_fGIMD?Gr`Aum^YOkF-Vo3v1y}zC~?Hi+_n7%v7 zvVoSMNdcdJYbQ&+7gj8~3IFB0Oeu-!XNa@`Jq@^t5-_rkG7{AWWIe%1@Luv7RnOY> zb29ZEAvrUf8i)m!R%t$R3E)3e_&zC5-8F!sl5D!kgO|Ty-Beg=PdW4?T`d0|Kt0J< z5@o|waS$$6Z||Ngk{R13C6C%nSg^(TET=wYk6u&0Sc^z{)C(4R%1l>2TID3@1vc|` zo@{;bA3!cp6gK(3YK!Am^v-N93##2}AW2=aBs<Afa#MmhTChEgB^~3*n&Dk%@C07zyVB1YoOLJYN?c2uO z3-=WC0=|xsl)Cp&2Evugq4g*Cu2R7PHMwWInlCe;>?X`=dAsiPUBkYq8xt#KRs$^p z{+-j4dFCx7;q`5y)xE4H;lS|PzNt0avgt2G#wOnV{2kD|Q-;_{=l}Mz?u|A5KWWzS z|NrFA{8cPk4Y*2C2=2=NO;~`mqk<^8ZcMkQjvTw_*CpXO4dp$Y6S4D-@j--~f#3gt?pU zr(IYNSp0AihwCka8<_*I6Pv!vNRCt=#6PSsH2*=TCFK-7=blD@7}T~hp6_eZDzX7G z?mt%yy`wT`jm?!Cmw{T%P4%*r#iaerxS?g((_(lxIBN2OT#g>RU_gH^aX+giKgdK| z92ZM;%-ZUs@n*T`pG|G=Pe3=oLXFR;)(6;i(I(NL96Lf4x(@02t;lC0w+vZ?k)_-0 zwOCsS1`|@o-Cj~{?R}sA7L_%tI5+z{_2`ERoJYhGJ{>3LeiqCVXnd|07n4&cdELFo zO$m}dgF^b} z!t`t5JH2k_AtxMKncWB4yZRq$x3%(f^L}@lf+v7y@$X8M4@AQ%#r6Us+L_COm1uh< zdkTMT&uvfSvrB;P)R*MRcN6|_G>7(>L3_6H^+WK(f?xTvTh=YPG9aB}AzwCvX{5Jk z*l@B)_e{+bz6E=qYxVNzKXRelBU?1Oz-QkD?zM~Bwg+Zc)J-02>lbx0j~G;YEIw|V zMYNQWKRt|2WG%fttG3MPg=HtM@0*z)Ri6Ek$r8-Cp-Y=~b53v?{(Y$s=$YO@{nFEC zh3$JK@5scJnN;a$_1`D`vu)imWG(Ri(Cdn4y)fL$+2rFpckUBDCBk1Up=;y!bJJ$7 z4#u3&)bqsi8<i2Op9nJLNC-U3PuIRmW! zqshTimh!>WC`>^t|BSbo7uo99HaJO7#Qe>}?qFzm-8!c*{k`xO5qCX1*jjtT^ z4+*E1(hQ;|vZlG(g^)f`#;r9B0eY3(XLqLHNJsT%&{gnY*o{kB5bVn#QEJm&6!32F zwloSk7R}`bH(HLVUddX+w8ko*^d}uB3=`fOP9JDwvrL@ImswS$e(uIn&xC>ZHS9IZ z_+wX!r}~axKTp`t(7IN_GT_4@LiSixNQCFzypdzw95|EzQEFBUDv;0xKO$bFGl_7y z9K3^jN8}(--9w}KDPQ`Tvd3eTFMQJPBM%YM>nEaAYayLOYUh$f-6C*aLZ7skb&YPm zDcfvjVj7cG%%p69nbtO&<2wvhc^nM7#LGX1B6BXz61`y2O!2=E4Upm`P9`A=$JWCh`*(3nVJP+ zQ3m6>k=Ss0YGXn=C7=9jiM!cTXmX{t84j=ZZr^Xl( zNHXtqXp9Pr*3HX2ngU>ZUn)3YP3mqB42gVUj{ z>g`TimmjXV9;#ssw%)Kzq-CGDTxd`dGxL-ioz{-Nu^_vb;nBcaQ<4&nF#4y(y18@0 z-47=X0l0#wPWbK5N5g|9sFQ{e;;(q%<`PDiH5m%t53( zyLD9rdAt^Z<}UVx4Qn?|6t>lQTC`O&s^U~KVoZ+j*kTnhcv5=IBHOhUD&bz{kY6Se z`@m4G*jZrVPB={9bLYDUUeutXid813R{P|Uyf4MZ<54)hCh(X+baWtv@+(Gc>s zB!-SFE<-@lg`IUQ}Cd;i~+EvEjJMCwPrz1Yn3+e6 z3P~^2Gv5-@@@pd(c4=5WHRk5jaL;uL>fS}cz}&E5!`xe1931>i)8g!!8}#!nL*$mJ z=iz!-vffl^rnB?L#f7O{W3rX|Q+mI)kDC3KKt2xt=J~mu(V6m@sOed~%0VY*Cy?A* zfgB}J+*4g$T|kH~E}6V6E(7F_lWCLw1w>(SgSBq|q)lP-woM~^*v?1tqsKyd+xg2k zzgwqI##mg&=l}Vn9YL{>_kG!vk_x0h(0F-cWp=KCx-{=}bZ}0UK#7XueqLd;Tat4p zen%7cgr3hgUM`@T`^nkDpk^TXkJ1*IKfL}Gubuk zl?{@~ID_nloVshezEqp1)Si%zEc&)H(@3X{$;(Xf*J+yvmceqSvVjIMayob^NGu`r z?c=;(O?C*zwQc%*e~y6$dA>iJ#+O7nT~X;Vhz`j8L5xOM(btNVpKzJ5|S?@8l zF+zRLv{31Ndd#9-VaGLx7Khdj6k9tVxfTX*Bl%ep($3oXI8I%z^L>0Tevzq zH)DOwqk&vQW{Hy2;9pgrL?JbH*-;o?c+Yf?PNH|kc|tSH{vm->p3=52QOKs8tVTo5 z%$|uBA&81ZrgyGU98Ge==BU3}Et!4@ZU_zog*>wEoz%&BJ5c3wOy#p`rdP`gAukU* zCrc?e?*uOlF=PSb!xS?jM5#G6iScTu482h%UkOOVoY8AD)mYLCa!)_doEoJ&Pr4U&e^YQqdkC zLxs$OnA4|Apl^b+lfboSKEn09jb+{74Fm4~V7E|Wbdf1H@zM3i2Sv^2zs?HWq4-RCSA9Y9L#KzI76dP1k4cxVCByq` z2|6qGXj7eFNOA0~WA|IYT8WUpS5 zaOh}O^Ixd`Ko`AI;O#o$r8>!{mosryvEJT#nF5_VyFv??w+mygRd)k(W0gj$@ck=K znN1e=+fzMmu7Z|W7Leq*FQ>w%iP3>IQLx&w-WLa_S53!oGhfcmVx96Br-C24-d|eN zh~DrxCta03UN%SbD;c{uZqStn59YDG)io(Ew{2oAU9}T@GJ*dMU;lx?km*@7W2R!S z$)}0!8#qEKF~A ztn<$V^uuKJ!|tQbovJBkZ_PA6U&K=KmGp4H&S+>U8+t8=Ro1or=-kWC$F^5p=cL{@ zOSKS}9g$>|4UuqZ&eC$DGurYRJb`Qi7YXJ3Xx|Z-R%6cLPG3@Ar<9WZ{4QP4Mu&Q+ ze|)arAz9u4EJoxF73}D&Ro(W1(QL52P==bHyN9ZXT!pZ}|D)c=dj&b>^>^-P3#?*f zc{bQUz>s(b_RU1Hc6m|-h;n=I0rOog5Q8}rq3&^~f_daI%Y74DZAUZ0NiTwLtcEC$ zukR*Yx%+?S%g^D^I0ts`_lj@#I+(Lm#c{z?&Aw&aSc|bCO)FPknWjTBp53H$tR;Z<()9lvF)6B0Qla*>I~v zStXsf*&5;EqIgS-{?z-ip}i$u|0HDr>+c3yrw6SVNZazPl)Eb060aHipY*p8Zw7I_ z0t}J+ToeQsSs)QsOzHN!I}Aca?H2mimiov)-R{vA%rbh!o1Z;TP&sxf`)ncNF_9apF#s@j}C|DM#{g+J|%w960Yd%D!ygVi@l zN)^VN5;_{SMs-F6G67{fnns`*Wq#8Z>rfO>VaCnZLp}XdKwk8-l$DhbDu9%F<*6xH zwXlw5vK(YUahPOlL@RZ@RjZnZ(j4j=J~~;md`B%oO{DlpW~Y+wb@bLY)iOLwK=Z{) zooZ;7{_M|5h3xR2d+p!3R-q`%kIg|ncRICqRDr8H=ei2J=G>63HH5A{n7>X`R}#7>GjL3*cE)$lJ(9c zUkr>d!p`6Gse^@f>#04c1!#Hb>tWu6A)6561uP1$<(Ix7|qn>S5CBr2nziqQ^ zaO>3R??@6y9o?N)`9ab(g4mw>rVc^FOdlM^e)R3SR;<*~{{=lP%hd(V3DIqwFEGWl zig{mribc;H7WvdkCUYJr{w=2{q5&WH+Syov-wTdvY9$gMY880nbpG@G_eajOibEi_ z^VY(9l9xU1P~%tBrgKZUSjQ_nYo6GW;BF6sRuxON_3}Q>g-43sCY3%Mo)pf6^NW5^ zt7nVGVB9WoBf7w_ok=KA|*CTdC z3AHgo_JJeNc;MvxoE)Q3r~mB0%Yr*?%i~2d8y>MG68oev^M4TJT)RW z?0YwcU(UkjATywK69VBvgz_~gjInwVJVyn@j3(r$FXt{eKIjjE;AHXw;@XZ?*@+zv zE$bJWk~TDecK&~MIdxxC^9Pr6zY6-xB65Al#htaL^ZSQ1Yt!`9*LMD@vsjcp^^mKT2kdJqQoML_+D%eWs^mso z|6ai!@j50_`%N2sWV+^yh(RU)0l6a!83O*tCjzPJ+Es5Tv7@g}{q>!<&)g)ZC_k+j zsLmI{Y^_9-7w2eA!${msFs)Y{(661_){oEe16*~!giXR;dnC<@{3t+Lv#$t(f^+v0 z4mby22XDAX%PkIlQ)u;-2U5K(`FB>L@zbOzB;rj)E*VD60r=mO|H}5wucx!}Q9oiE z<+Wlv)`LtEHrm6eh2cp})wHp(FYM97k2ggK-uzdP_%@PEozrsFVx4au3Nul~wx^JX z-(dOo;vpDaa=p=+&dl1KRE35f$X!kKTbgmqPD=f&)W6GT+4*+3JYGV(AFy*c=~K{u z^Rii9VNi&tuJJ}^#hM~oW6b4!FNLtwrVe$LFIuI+Cko|m{}O+Dkrkf$^toc;=1hd? zF`8WqmF_&re78pt%Okdac$O`Z;-}F;#Ou8Vwu$_n zNZlfy3yl6VXWS8kEEQjhQIl?7D6jqv{9{yKwYy4%bZp)#O>oGOx2VDmcVok|hPk+?|o)!?#Z^X8?YWJ6wxIpwZy~Ln!%~gE}2n@L#POR(?LUT&2n|;o^GDE@S)JQ zlC`{skV5n~od%^lPOT{|6{d+hbFN?ce3CQgzCgCCLGllEV9V+$x|tAwgAI30Q1m?g z#QS&3gjr!qIB7FMeqD4-==S(t6}Iw7h|(fH{Q+YzmfZwW<0j$T(D@dm3-`Xc5MGJT z&L9ixKZ)%jkGNI8_Qc#dMSJF|%&LW+UF*(VHZSCoA6$Gk3t()dc_0jCQ0~dCsG9Za(diNvO zWX^00f6?MW((@wzWrLAwc^9mM#?H=ddh*pe9wursC7c%;%#>U|N{QfB%&AxU)qVZlh7e{UQ`ZZ z$;yWzRj5?e&Xe{}2sz!^WutdVSMl@s-erXYek{HtWuWOC6L)U8N(9chMq$xs9`_}MVMqO1e(v)g+qN0v#Tm;Ft@y0U3 z&x?V68C!PoGJO5%lrfY86N7Y=-_1U7vUh5dG6r=L#6s@wz{CSW$o^xtdlIM&YP??h zHS&+#Mhx{RZ?@3U6V=uWGLL5|0XVMo&pG_>8yyzJ9zOhN`IPjkOxnSNA;Ab!-{*CD z00okLduvT?%7gjkd2fNCK%zE2=m*pX#0P{YwNcB|kg=RpZ<$N|f)nl+icJb-#e5~> zxI}^-#`PEsW|4c{ixlaP5*|~U0oRU8T63z<*^~=kXAi0K4caSe{?lSPOJoYQpIi6q zi3v6(gHb@@J$*Jw!kzP0IGg0hE;_|??vW$8RuSF1G!p)xZzhQy^9P2}3BE51b4`r( zP2MR*Y|iS1*bHMALUM>2HDx>hDI%K28nk%FkcZ@Q`BKw!rfd##n=$Y~(NMadI^XR~ zlkR#iFOU38=5=ZR%0ad6m0`W6q;2B84^?^g$o>dsC3lkrqrTWL&F9kd5jhzrI5+;u z8d-D!ru2^^n5;&tit-ETC#dZyJ0+>{2wrmHh}knGf9dInUrjlKJh|>9Pec*(54^fl zLd{RyR2dHFmL!7Lh}lw1ImAOReia!B(GX)f6lQO>XYr!um}_ z9)9s`&N@MlpLI49h3eb55$HFs0xwcaU_;F@Stq zP}y6GZ0JtO1K}30!=rOWMQI6eUVTk3ux%3hA?}y}!Hddlda_Yp7iAH`@`{Qapu^B% z(N<5jSt0Dn3sy^uQfO({Zj#a3<9lQnk7l1PmA3PxzO5dHk)cUYu1GD}NI9f%f|D)p z&$t!Sl+M-`&Y6OAx^%YPXI<=UHdF^5H`j(biG8`n>aS|c%LLTlPSy=ziv!?SY94*>7lkPciry9~EFl{hU!Rjzvt`K;oNwtUP4rTCw>huZDw3=T6#W6qMno^*LG`djsq?q&1a9kYkK4h(l=rskW(bCiree+jHS;tl*p~jJd++;Mf4>fq;9@Gyd_p&u( z?wl~0oGQ=dZt)%$gTdDLf9(}}0Vwq@2DY=+nk+h`W2$S}K|DLSrW!uXGyaad){F2N zQC5)9J>;Gr*3m|Z@*~cUlICKOW;!FP>rDQX#PmAT;zVgsNl)-ok&fEclk#S)HeH|r zl=30#krw)F;`bDqHm{?;ncubKQ^2Vsiy1C_GFG{ijHI{gUa#rI)DRsL36<;l%Dz#_ z-j(cE%WE<`@IV^cz)AXJr^ll8AhU@V#q#2M+;FKM0>S7e-cX2ceJ%>iB`nGH2drat zP+hEH#-T;ajkyj|(orwOa`|J8r9$-tH#p|Fp26~nIMj?nXeV_E$dO^S4$S4_JZHP^ zd8*Zqa74M#!HA%AQp3%!5y*9Tr6LH_~P3);!K+Zc`Q+Jq;#E1>L9#xUC? zHtH7)1`d)UTXkk?0x?jZOtI(b{f*I~BP6H~UKx$$Zk}YZD z(is-|z`_Q^j+W+>)1}Ws&4b~r4=2W5#w9{tqX{CT!B@ZvUJu0d%J$a;{uH|k(KVRJToXpc4Z5qxeQAhlzczkApn1=(yS}jL>VM~@TsR`NGIb z$at|$Hh=}*s-n97W(Gk*Vc2biEtRO0Ma2gvte715@6z$asaKe>i*}KU?w4JBgf2cJ z=i;W<$$ybL#(t-Dq9qe#Xkompz(BE{<;+KeiN&8?ka*ASVRorv2u}%3U%Ebb3fts} zk&?o_XdSJnvnF&|o>`y_J^kF#j&qg}k{}qyXzRBi{iaH#A%L5XLRS$p&rkRhEz=@pya(08_SjiI^;V!ZRLih4o z>E>tCI>wTSwI&&&(en&7?CRVw3_vNs&Ln{&BGo^OFf;*Ha|!N*v+c4K&L)N&4^V&8 ziYC_eQ6iay=gwzArX}6&3|ROxcU5mM325)8a#AD|mCwtZH4NGp#``6XOqHu~AzZxFl(fx#U@2PTxCmIxi}~Kq%Kbq6 zg_3sB*zj!gTIfv^ckA}q0gt9GM{#rTUg4IpQ;NSmfDpo@k*z#bCbqt{JR7B^#11s< z8Tcp6XC}q2*bDs+u+dIq_fnP>w=5jwxtB--n@w<2U7aTw-Ih!)4u z5`H$lCv@m4>=kw3?a?Jdd`6%qx-@JCqqCi+&%8- z%t(caS29{2_8I@(^{8R|%uDL!oqOclA;R(R#aJrT#i+K6 zC9)9QiGSbw3@h&Hv)t0Q{lh_2mbiS8LvQ-5UE!pG1m+xw-ZI%o`%P-WG(ay+jM zv9@1p1?ySx=`%DSbP1P$yl*(`hMxa@!OmJ=FVJ+3O;imuo|9mJFYacuw&m%R?^*82 z%YPs}iM4xJ>6tj-pPxUD=5CIB>Jv23>p1wFXZcec2q)H)CEI)9kG7_VSh@5wRUXfp z6`n~>h|S+g=68*ke+guwzE^|r!0fb+%8p{RS7l4hKEwLCJ|YHB{ZhyT6lPoQ`yPcS z-iBSPels~%xC=9i^;?uG(r6y9TUx$U%z`wuw8~n>X>SW;Y?pss`BWE0uZ9#iw*K~$ z{tdcxCrlayNvn_kZVr)5a-Uq0d-g7>TmB>`;A&vh*Vf(sCM8o_ltrFpo&OR)TZD7R zm#|bNp76`?B9ZQC$$n)&^}=Ukh5H^>9i$KWn9vmlJYlpkgq1Tin!BHq$#wCEdt+t! z&&_pge{FM;@hE-K41{!Q*imJtp@T8@zE0`&|KUTJHsBx@thAllY>Fj)_7Gqe^bxx; z%CgthSg;fP6Mlwa*KWa4WTW3U-W`lMr#+YwCl7g7pS3tTQ3|_eIxdozG%zKDfl@m!WiP{i74;b zK3>YSfyxUCuXO9G7ooE5HJe_0ET>9P1_Y1&s&Kp+abL;Yz9Z87^D|DiPk%#Uvf!(i zl_p`uYQDU5?G>im#1=2-T!=%voO9Yc^^93LpT#V6dB!97w?lJ1sfMB8lRUAgcR2oe z(C!1fyfXcwK4G}-nb?7B;je$@9{uf9^k*W&(fw(a2`S~POlWIFzI+tBO!Jrj02*l% zb-hC&T#^-HgQc2AO?8vfr^Y%Kh3@obS@WetZ1A|}W_Y+U#?#8W%kFeL@6}Ik9HsLt zVrALVn!CmK;?}5GJct>!pOgj-V~y<@x>N9_3(()s>TpQdMD0&aBcw89JEJ19KO~q; zAb)-!n$xxKO_L*$AQ8Xn^^$CZsDpC)jCMeittuON9Ctt4d;;#bh_VZX+>g-v41muPm(nvkCZPI= zuKOFaz+1~B-_lJRt?TH$#W@5A$6l3TnX7y?-u;T8o^n(Jf)RNGXXtw!c+X2i^PT{7 zXp#ypF61Gx@?IKn5l?SUJAQ}>}c;MD= z@%4W@G&_7&?YO~TqLu;b8dOYO%x1}hJF(4WO9PjfVE7W0Pb+v zzRM@~Vz?p#dKIX-0MI3)r|htowmMETE9_A zrLWEPrM;*t|824__~%oojRg*goTb^^1a<77$MgIi*2IQwy<`XjWfIX?VGy&&Cl{H% zHVlGVY%ADbD%YDK&#$C80FSB-z@;a%4N&zi{Q_r5kNSUCUWIQ?RSOZyG+F(sxpFw~ zPC>60)(PjMg=|I3qDTZRzXDe)V$AM`r}glZ3>DIK88CbL;82`=;b#W#DU1Ui7Hg@x z6GBb!6fa~cB)1by3O!_v|D=VKIY+lHQ1r!>3c}N7C*Edi^)vQbSBct)@?Rk}*vqU>4cBO_-m*$cl;d8OD}7Z$gip4c~B( zdRvY^!|f33J2UuIYGFQpq`#3R4Yhm``DQ{bntC-G7Kt94^o01AT9k+N-NzpP?@DxU zjr5W9Uls+w-D2z)1>>h7O>T6|Y+_^xB%Izl-cR`&@Jp)*F>*MUX%}6t=LM`WLw^5O zlSnV=WzkH$r1m>vV81e@!mn4FZVLTn<5;2FFjpdRUdm9Bv70@rCYqDWrqEMx<@qNfU(76P-C*a#_01T zFTxj3tmJ(=oXk?0VBb+nNUcp^AA7sOEI zTDoSZSw4C>a*|YF#-HL4*(4X-^};lDM$bbwrciK{iKbU`nu~FUt6=KnQy|?Bn>=zw zWTOHu84J8Lr*pj&TjC{piz=ctqMT!9Wrxjutf6~jBebAsLRZCdi*zyQdKAgl@z1e^ zwSZ|3YA%&4`9dR>yBs~PE9S@w26NCn->KM@__idA-!6sGG>|TJYamD_MGQT^ zD*5v&#>Weel6Olj^f3cc5`7Gu6EzMh-A(ccAM5->&X;Qooq~BH%&&>EEO0_PUwN-5 z-sc7RuL%U~tFs}ii9>qD8yfEw3r|XIolZS2m(D?gTSxNDf%&1v@jh+sJpG4bM{EB9 z@LnjIr~fZIEHv6P`L@VGXc%H;r9V_cvsYgqWg3Eq?jcb-qWb(Rd(KDYP~!f3)1Nf4 zjaWy$sGwqo9tvVVCN-?jGWou8_hPV_H8MYINci#FuqnfFCvdb;6vw%OwyqlO@jL}n zT*3~Ut!~!|f!$VmWQ$;-Xp5S5BY5@|u+T}hl>{CX!zx!o8?+m9;bzi0_D^K9z4Jc+ z0R0ttiUyg;Og-1Al^5t(g1dCGYvydl!N4FXF|B}D2=c>q^aH#NgvQPK@^!(qr|mgc zfLo;J?CXH-<&Rn&U@vMjt@{olD~H>lx}k@8l;GLn#$3NCX_RY6Q^Y8jMxT@YYmVe3 zO~t1RZ_VOK32?z`0#E7Z5M#I=9&uf0@%?z> ze*lcMqrkV$DRx~jf}^B%b<%Wi1~^~^#auD&`(e1K>tAQ5M>i#d!eR3HxnBn9)@%hU zpNfX@VT%zq22`$jY45>8gozP;f=f#Nfcbuorr2m&5ot1HkBet$Z{6CAwH> zjqV9eA?>5z+fjAh@l(0SNFUe2eOv;D06E~<=D@q5S(oAn? zqlL4xoBFs!+ohmZ19IW3cXfCv1eNVme|Gd!Gqa8NT;VrV0hPJs7isc3ySGhpdT08I z_9vrtSDr?NqGIl6inq8K9091gtTGA}=5E5b@e+6+)eKUQm}-YRy*wx%N@Hsc&hVxl zne6|4%GZmJ7gV)CM9w3k#P!2LhxV14nkRm{2gX}5HecTX#D=z0*pdDRcuE~<;5E%- zp&dQ41C&6hU%;eLTMm?lT-W0(`v1Ur)LOK%ZjktjEJdBgPh1uH{CZD@^tU0J-XMurg%&IhS2~$1B7I{#JO2eR8D!PL$R%7&oRL7@3XK zK!h8vMS&Vn9|v)k=mmyA0OS+w3AQ&u%)geYjz>XXOH;=X)~>NBh}8$6|1!rNFW5c*&Ivs5gslwp+~NPw%L@2Sc%! z2l_!hZ(~(B*sAC&vApT%#N9PIro7$ObpEizkHLYbHk~hx6mF$R!R?OcV{Y8+X05MA&Ny z{`^423mbBdTb?H`}VD57|VbiH+jUK1E2w`miSnWliKdbLgMLU-AjXT~drL)A-)6N(Mp?47vuaA46WtO7P7gZw=Fz}2}v7E>*? z5=$;;yGb+k9f^naq*abCUn^?WW%r$HgS4#akT4FtCZ~g>-r?;j$$GV+^cXczzTO0m z-owP6L5{3yj1!=)g$o4eq? zmxj#%pSN2t&DA0=2a7$u55P&M^3AkC?((;cj6sxTjkE+1`WFP)?S7AhXfKrS*;04E zDN^nx%{=Z`V@)cwGzI)FKS+Mwp}{-^v-`Rzr2OV6$d)a5voDpGytj~DV`Z%MV?;9$ zS}P$l)p)F}*V$f6aNM{-8fbCa1tW`JGClj_papJ7hrGI{a~m#HAa8IBBQq_~c9DLD z()iz>Bnq*CpjsX~4m`=|pHZ<%>C(>^Y zj?7hGQ9ig6^XBc((O=~nM%F$AD`EOl6;6eyI?zZIQ{ahI z7$Url_i-5FKIs{{VVwXLynhdzHHIE9Pp=0z5f7NaeJA0XVYeLyJInJ^r-d?wS6hFX})UOQV(ST^Lgjn#zc~)!XDsWT~(?6pJMH7{Je# zov%cT)h4$X(L>Krn@#sNqHz=9MUkSYFi)xlPrK)_S`F=cQ`^X{LFqtyKN%Dezw%P3 zTgMA`bUS4%y(YFdWSOwRT)^!7$rfdv7V~+=PLu#S=o8r`<4krMrn8<+>Oen5B5?iG z1XWCHT=FKi|H>F4+cAesTo(f4qtQ_eIK zl?`j9VpP5M8Ga^J@Z)wgD2qfSTgzDL$mJa%lF2BlJ1%XiHDOR)D0E1)OsLU?;a$TC z7QOCI+x$EoIAl7(F!k4;D!tf-S9W20`Ye&Lbh_7 zq&NMZNe&N3Q*o3AB$(o(-MUemA0DOwdTZ8u?-gkXg5bMtIH=tY)ULBX$;7w?Ci3M` z^g#Oe!f69KM&%r&TG(y~_N9=d{2wy(zlTR%`ToN?-@-aA4ZX#+Wmdaq1gb%AVGSGT z6zCrN1Tt8i5S>y1Rqt@ZaC5b}19^gNNS}a+Mpve!CH|;hugk))+)Toc2P_(i1dW#^ zF)VFbITHHLCLFd>>R0eTgmGk-U@gwMcj|V^9)sLF|f{G}w2>URiiYNpB)BCef z)h#r|Epp&H0%JkRWBwK9I>(5wHCIS(f~0Yrgi(NV-1?e^Lf5kqb9KH${{Ss8IX-}Y z2&^Wznk`OMxaBs2ta=<*^o}0`7<@?nTE*rvttR5Ehu)R6kIYV4jxpDo#B=1##w5YU z;>vJ&>spr=61M1F$7N1LB|6kt|-HrO_KeNGZd2yjhu|~4M0=6;eXx#0QJ?6GUH){ zACtZYY7rcx=GaKjekysM=De5Ko#Jw;sGxu|yS*0ZCz)B>3%6+=wV;c>7h(Z1u6BZR zO}ATlG310&Ily8`$E9Ycb68UMtY=;O=t&jgzBc{NzrGNwgxi8RKT7$0 z7sP!oX&G{Ab8hPbVY~BJ^nGq^E>AK7S|&dhm6 zwCJfvQ|0MZlwZ1g){)|S4NDP*DK0Wp@N24!t9$0YUyA+hU1x_XIM%L<;^&|Gu3x`p`o8RBZ`Hi9%lS=R#CUEcCzwEBhr+BY7@4IE>@5+QGt_H3adGd ziOZby;-~WGj%Sn1+>MQ~fYPUck%=@;(zgU{aU^{wJ^x2B~jJcwKOWu2UK$-?{8 zGO0-Sbw5yll?xrxpDA%FHzo0c2hyThk`yuQS0taG7$kP$x@o%{RW)ZjsOUDDurm+c zP6uJZuQj`|n#x9wP)5>5dsn($LXj#m$fu4#Jm#=(EUm8ZvQ7?23w*sKKafRPG?BuS#Vo>rIL5cK#d~ zwsW^RZgX832#y@!aCrLHJD@R+&72N1$?aZ~;NJpke-JKq*ngwxgK(Eoak?WDO351GGR#gks-uO6G?~an1?HuD7LTLbwdSF5pC$;t^JA$b z@Txnp;8)b2555T5X>r?YJ~GCksO$_aKjqd2KEwTE0fCmt^cCtJ3-AYryc1$Aw3q>% z)kODBH%~EM?A>wb1_10Ux&T+@xz`g`#jj^e-H+nW+Bj!{@cDhQIFEXxTRoSd5LEyG zxC5{l6(WpM_32COE9G^2pLUW%sWL0h{v&)&@F&7w55I~&AZqYw8gOEY@{(Rz0+#aP zjX{!G2vR~Kvm72VUy9!sf8e4&6+A`bn;lExu#dyq9*K7&+uM@Tok~1~{{T%5wOTe& zkcz5Pci!9s52VOAPcqIqrG$sNzSG+My?-vJhnsN@6E8_ue)H(J^*?*4uZ6#6zuSAj z9~rc}9TUb^c77W1w33Tg)18_-Tgh7!BXU? zIRlFQEd8cGX1y2ojquDmMgFaOqv{p~p2F%09qpBvjlz3f>lDD_sl%`Euo(yW@A$Lu zQ{oT7KM^;FynCg|V$4ajxyrr0++!GcU0H(;Mn-a{h5(BF8^esJCdE{8#lCvWXg#gp z``@Q-r^NB5WtrjEE*)~ye`#<300a8e{eb2lSy_5%jO*DmG}`IyM+|AmES2zWn{0{{U}a8hj;&KMDLm zj&BtBZ_JHinpxc6Y4Pxf%XShOVpJIs5!(vN39na+JTuke@{PK~{{WGHqxn9EpToR! zWU%><%3tUHXYbbs@T0Fv;(QVD55eCNbXfdD;GJ66S<|E#^X+9>0=WCgz!Fv0xIHVh zQ=V(_nv~^EPBT_(x%z!6!lXIi?w?dU8OODJANxvv!u|sI#cel;JQW?cg}hd~r1Rj1 z7T3&n4Yv`g0dN%UWB~4BTX#eEkgqNHsqvfP-@%^`zli*QtHY;3f*^ELle+%C z^ZD~IqlblgM+q)y-$!C9QP#ej{h$8;Wj~1j0JH|HZ8n>0Hk09hF~!cmaS{745#?DS zQpjhMcguu3RD->SFn>_IYySWQ*!WGWcoM@w@MZnpkFRNBLYlq6S=#xeAfL9Ph{J#e z6U*7NIRNqxhnsQMRhQR}9=w~A-rDl|{{X3>7e%MpG~<(ltC*gYkRn( zZN7E9ply~jka93rIA9wfd7_4m!^TRokW{Wg1RR10=t1dNXM8t+;tC$sz1O=nzt6hz zG|Kb5R!>&ZS}o%L0IkpK{DA9UF8xFHZs+@B!fS{ZRFz%t^yJUV%*mIyg*gREw4h)s zVM3qUZv^=B!T$gev@3rT_$OAi)->CW65`T9B8LDHj5uU<$lQzrDd2%$g=RS}X~Wp1 zS6JEmvs*te^GyA70h(p`<{80SORQe&`tFbBljE1{@9`7%e%BjL@twJSGHZz;)3t;F zZLfr2Hc)S7M%qCr*dv@1f@|ErW-r)}#lIZIuZ(weGsU66I9OtRq@{R&FX;QD&%POaF7PkH zPYpxiKLP1b>6Q^Hh6pn(&Z8UVkmaOua!F&&59v=0?V@+{gV#Nnlp zP!{uToQ3n_DhXf!c*r4!HnV+cg>}OWgX`Y1%QHO7hjEhST(a6xdq3-=MPl%oetm>i zmE+N_kNW7(3HK(6GK=yAbd5`X@vx__it!nx3=F50K}vs$UDE3Oj4~>CDwxPe zA1hP*-)H1~vEt9zN8$Ff@IJ%gK8>nh+g|9eadoHabIl}+_PdSbnXSVJjCo)-qNxEv z2PBjDpYfC6XT(o{ek6~>e-LcqiLVTjSY2DlC4%-C$OcIN0ElC73>X3l?m4gO55s;V z@b`;!du@Be8jY^2rrs|2F0AEQp^ySW+UQ6)AcN>Z2C}?w@bkfc5%d}SHREj_*I3i1 zQVdryhT2FYayQlGDN(dnS_0WJl4B@heyKP!D$zpMe^X<_eg5R}Y>__8&hZnlf z#9swX{k!2HX1}}hWX1NLfjr4tV&@XZtQhZ8xG7LnoV9*CbbiARb4 z9_o^5moMd|(kKlL+%dRu6v)7aQb$|{0Gtt%U%Y>_KkcF5kB-pUcw16O(R^yq;!Qq9 zEe+el8F{6)kwF_kMs1IRtAUn0*Yp1X;;#sJr@-3W{u}Y8mvf`)wxoHsmq=1Y3Z}MVM+6W-!K`R*ZfxByY~^?+jDnIz#a&3~K^*43`#s@2w>+S#!P;$mvtNbRRsAM> zj!(oGyrQd(uNAys^|}3bBW@J>{AvA`zu=&^)|VFk3HbYVl35iI+IWbeSA?hvk=>+G zw)JoM?r_V=kgz#l*c}&J(tJUtTI*UJ#EbsY{)6*8Zdm<*h8QIu?gSPyKGnsXArjcQ;jvqv6ZmOW|8 z$lM;ZM;YA3&OzzzNbr^!iyMK-`U(wOGqobls~+AI=hm_a^9PorX#+K@dn3-jAS;eI zKi0EEY=utju+CWLrEg7XbCRbwQ#%cBDu(%k4s%pyVkLdO3F5S_qIrO52$TcK2c=ZF zj6|r+r!V<&U6DyGPHEqvog=Zr`W)t*9OIrbO_&nFhfb6r^CR8$7^_a^Egk;=!#0op z6HPVb{{WuK0rw`qO$Xf{L0^~O4p=vh^r*WD)c!`lP)FS#L0_HmxBJB}Mt+&VpTMi? zg{rrzt!pB%$SP{pR=fWIk7)<{CcZv9qwXp@L9@z|{p89+mDor*85tGuf5!sNQ zr58M7+P>>WDwzjp$?eyg_&ei!gX7zb9Q=a5mnyTU?b!U=F_-Szea|9(w90dJrq|G; z$3aCDPytE-G-ABh#orS&zYYnktpK^c0H)vIMm@>(6%(3@xurT&o3v5V_?N{RXND|g zxV)5k>MC+Ad1sY32_rf2y*iiyCk1#{KVCDi~VZt z?(gI-%yY>mzVQ8m5;;6e;#k~dG#2BIy;{7yMj^{>k4~Nu!F$`Er^y&dRh7(**?1qV zO*FAV3z=|Ciat;|R{GQ;AjBj}(W>Np-F~$U_D|)K(&c;c)A6n(lX{!ItaFVX?DyuP z@|raRac)WHwra9TbqhRmg>WAcx}Uy>x%4@!LVU{66k)gck3;mYUN1xDZqUb#rH=%3 z^{+Shqa=gCTF4|hTXKvSouHA{y&8DaDBZzr}3jWoXb1TZbt-`9Q$UXl6DqYDc6k-5C2>fapWQyHDOiCQ!`x@wp zymmG`8dXMqE4Pyk|aUzz?5n|F=0LAU;2t^KQuM z#<{BdwWV(&>o46x{$-yI!yD_itso3Uz^+Di{SRvTYQA&O;tQ-=mE-q|Uq)BXXKwR_ z`yXGy-vw9ZzaXNDE5ts~D58o0|J3*BEWDQlq-;OB2~ql*lI5i(!5&;ufwg-K^}(WN zbMrV1zKNgfQJYqgl)JjbdaLnY)T`drpUG7t_EDPx8D+_Dkb_nJdv_%pYi`YHPN@?F z{ts%#mv(veJl9+*X=92|u!bdOCQOjWClzW~t(wovgcZXEJC1)ULimg3>OtG}6-p+Q z@&HyH@sLNRDxRhjX>L}LSPXh_D#9%A!z>_@8w3pZsljDC*ajn&wKrFkTgCEiNyDmWkHQ((5bw2(EFh0X}vF^{J;Bp6AIP1KG^ z@0yyyRv6mZ!wt?`Ipg!Fe(7or>NX@}zB`DCex#wn{OhB#M0l1nBkq2tPH;Vl;gF^NErlIuKYTUP0h*c zwFc8(UvC7Yw62?mFmsNT>Hh!@C57+qA&ng{gFCt&dsolq@BN-f(`5;LVkfRe8#0~j zNzWOt4e{Rg*7u0<7+eNDN%~j3yLntH7SE?@@SliMV_n#Lp#5v+@DB6P_H&t9o~IS( z1yC?DPBT$W^8AKC`Sk5cuy-{Xfn#4dbm?D0qU~mUg0+f=aBLoxVH^%KQ8r2G)|D0# zu5pK62iBJ=DOx(6c7x&>w2PE!T1SMCHU=Noy}L}-Y_&DrZ29g~k&$0B$s)3eY?(_7jP$Wn^{a5PzL;Vz6G(^CQy2W-^A7)gM#mAW>d_;m;aDeF{OSowpq~Z~?CE zrbGaf!LJ6k9(3NQ_bFF}Plf6uw>xoAPlZs#g(L2h&=d5lK&uSmp>QPa#bTMGNh2{O zo)i1_-=9uKPo+mCqB?n*DgnnF9C}uqjT#{1*i~t_$s0!i>H+*~YJJS6uB>7;vq%WH z2jx8E)mKG$oM3Mm0GifhlOa_=KgW}UjCRL-)_Rpdog1 zpO}-|+M{S7ak=E%=xXGxEC>A}J&H&1cgo>MJ5CA|;4fOMz9$ zeZ@~fR%0Rp?=O7fqq~fql0;4l@+t;2)98|*UX{Yz8mCgf>MS56#Ol6>_4pScnn9`~`MgvJBoFr(anUA0YlU)~t z{3qg{AL|XF_)-|;k}hs8X2`dNtOdv00Np{{U@&hra=&Zw}ZY@h6I& z0Ww%yKHGRh5)|BA0Lq6e_W)TMuT%RHy8kNPtvO(+H1G)7moZ%@YlswJ~Z%@(Lt(d z>{Wun=6Ef?cs^53i~)ty{So4)UNWAsz@ zRsDeVUlYls{7m?@D74VsW4yA_;&~G4C=w*w)zhVuh12m?9&vPIa6xy}F? zKGpDl?92Oacq`*NTlizevP0qj02vtfJ+zFmSzMnnV?k{!kCu``;}IX5t}+FE;toJH z{J+K+{MQqXf7-F-lG$5Cyu14KKYHPOE?-9ozq8%b-SuzJ{DrOy;vd6b1^iU- zT#{4`;wDz)1W>p*zyJ#UqlG+5Q&Ga_8|909l}dvYi)NRwzQ2= zNMe)T@17lv{^YE2O{4 zo{9TVe%!tj_$tzPL&7kB!b9Sln>iXcf)m3AR;0aK0 zr{@4EbNeHwYMOSdX%+UFaTV3H>%J+WaU^cJ##NM%MnM()h>$ap*1nne3Hw+4R`?S= zj+3NnO?TmFZv6Y58g)mBqjEgeg_(CkSPcB6F6^9Sl7AuM-wmUP=Edc%)7{6T^z8h$ z^gl)6&NX;wsu=6aJsP&2zDM*k@lWA5f&MV~W={+F>rIbQwIs<2a_aC*0Sh8DY>o=& zI1B@HJXhe??HT(R{6P2-yTlf&}#2#FW0&J!TbsRoxUM_fAJyF zJShj*^y_%U_qx5(vf9BBV37a+Bo&U z6|6g2=&)@+Pm)#2v$CJxI;#fq)PQ&l%U>FP(to$#jQ%8CYCaD5T_b2-54nNuq_}95 zYB$i#in1hyi-=b{fb#IgTa%m-`QL;9D8K`tudv}C4BVV?d8+>aw?FwV-iznh`EDZP znAzd7=dbgBm-(G9i+pF|&xk%Cy76C)^-1-8Uh@K5n}*3MoGPk}Dk6@EfG8x8IW>W6 zWp#IR9i@~K+|Ma0kwT_P9D$M*R1!`Ga0PnL!OwyJ01&=8c!BgE0a%HAg)MP=bG|z$ z2;?M_DmYhes=Y|&zj(d?e#l=6{s-IGYgYCPsrb4ZUnb{SxMp?|NEgogJZwrttXBZ2 z-M1q+CqHk*JYRsw=`1}pd99-!p1qgie7;M=ILz9ksV8Xllm4}5$Dgs^>@TTlT9%XX zi{j*x+f5Qff1_w{O7P7DtT#n9)UoYg%A`1nSYgg2#@0stc0d(C0|0P6F-Sn*_xjhs zpS1Vw%i%wS3#oV`z~ObD8bXMNAT^DXN)Ak}8|RS+K3MKXXRxuo_> zXs^+EpRUt}2P469x62jxxBh3=UlhJAcoX3FhORtS<>Tqf?(w@C*^ck*Zd{VuxeF^Xb z_LumT`#$R;e+^x=?uBu5E;S7f;m_HwzFmZ?VT=;(#vKPcS9-Fj75m}v6ZVz(d-3-` zj{g9|cLvu}i9o*6?pP(oyrg7+GyB!R$aKiZTpWS?;P?aK?~6Vrc$&|`ehky))HMs) zQZ*5y7~plnvAd9`%`{ z=sGutY$DP;KVfU7*ukGF*3J~Sfj1IP*y9SLB$9AC*1GU3=YNlXwBN&T+4E1flg0DP zaj9w^V6)R~!pU;`5VrRPhnE&WUF3B*0N3L994;djRyb@lT%|3N>VBh$uSX9DTDwUu zuc7of^Vh2X0Kr5*XsNtsCyBJ{>pNX2O}ZTq?9sibfMfS>Azw9%W0&2KNya#@ z*5BFN_OtkH`$6fZ4JLo=zY*G`X7fh7brXhUEX{6mJotfD1(aoR^C$!p^cl|u=6P); zTD)%_yt>=h`my5WoK=9v)O4x4HQ!76A5!4ajB`q-H0t?FpGbJ$#y^2y4}LiKaA-a^ z)7E#lSe}CiZXQLn z)~#Y9AQR>baDcJ5kg4dzsoL9FuuXqU6`QH)I&QOLai!|k(Oy~I#?ahb&Vop#jm8;W zSP(%SL9egjjxxq%)h!Qto8B+dK@KWIDI zZqWY#!ad=uLZahSyM(hyWCWtnhz-eiHmB@SnlI2HN;9!O%$!lm(?= z^UWxEr+wij^RSlhsh z_KSmUA(AOBowpXr+qxipv$O&ZPJYq-m49#j6XFNiyb1A|crZuFqPw+k$jb@FId&ry@UWMXli9QM>Y70&gkb6! zNFa=Y4nYQ;Ulx5?=l(tTYvE6b`f}KKn%B&DL;afOGqQVTZ~zOiuoNbH_Id~8^b&-Qdp&3uN#i4!1N&3 z?3pfqhsObKn7q2HjcEJ5A4Xn4RdVK$1a71-%APr@=_@q8Q+EXZ6=@I% z{I&iOUh2P7#L=TZ;Ib8jAVgD@E6#qkTIyjE22vHhd)2#;`BnprRar@wYK6!Gt{a%m z88Ddww!`<1TO+zl^E_fQKvraEA;QC z2cQ-CoACP5Rq>XRE(X#r5%KgnuhS56Pp4}9^Nl3_u}jgPqi`x}m0s6E)_$f{Z02bO zR3Xi1ReP|8+EF0Nl{ppiPq(t=U$c{_Ec(w?-H@1w|S!BN~z4W+x? zT)L!DjI%Mo6`QMT+E$4fZ9-tI6tGPE&0jD0ui_r5;slb*%-LupBr1W(9Xab<^()3Y z9W*e~sj54#AN)<83E{Eu^raVRGD@s6+4S_qcooIemlC}4F=unRn4FSNy+bU5L%1HC zQwXm*rDoozuS$$*uQE_c=D$FHV0PW&UlaL%d|NO0?VS^CqxBXyR|)5?;1435c%a+Nv2eQ&dbY8n4a1?TH<7Whx{r8JRE1&J z@veAlGYLZaqlfZ@ep12}QAyl6B!W1{zG^~|xyuZm)RC?b0&{`ZpNtV++;%=zHoFIp za30mdd`*N&;2mBk83qsPE6{6#_?%gM39GXI01O15pdz(^;(mDjj7oOw%78wVN?43e zNAM{G4z;(uNCa|&gU1IL&1Jd8%iEzWsouIGZ5@q21q}G(Lu?=T{NLPBI<4w^Iu5U&Sy{es@;#T;Qs*ORP_odqP%15f{G}h5C7Hr zFv)HL+O*8_s3Y!UpyMX1PQ|VKtA~)5IZ`@|Qcvbe-!KulKQSHss@g_nK^@X5QTKMA z>(;-hoRUA2n!9Mhf3x&?Bn3ab2eSNB-l1U%QmGE#t z4bwGXd6vH?+Dvf+g#c%h>0Lb97<)H;OzCk9j~P?bwOxWMhya=6A$FV-z%=HL1uh>qM(w_3zjJIY}#;0Jq=m1w-BVt8H%S&p0$-p7fvqvqE9_> z8je{{Vb?Y4zYftCP?(naigf&I$

1*>=a5&H={)y-VSw4<4g%(qQgPf)0Dvi<^JD zNcXavePT8}s=No{7*^Mt{gf5-2HrDX1Mw6H^}>6)3i;f#bbA@XUd;3IRoexzibrN# zBy#Ul)C2tLv#Bq^BAp`$wv{95E9lRUO|@d_+O7{}s*(V*GvMuGoa2gx-HtHS(iz7j zk@cWum)R<)g+XOMdp^F@1vc`M!zxD=3}j*VE^=x#Zf zo$P=p>CS35&eNU_Gmoua3=bWtlbFzM+!T@Cxnk_>&bFPdWR^fT79jd{J0vPJXHseqS8t(`ZpbC) zU_D9bIl%pE)Vw3`iuc8+x0>?3o|Q5NT#d8Nt^L8(6p!L=PhpO0)_e`{drr|=bw3d7 zzq~7f2^;O=KAIlUw6~O;x2Pz9v$XpERA>`aL+Ltb^W^xB!#d zqmoGCLnMq9R2E`-06Lod(fy_W0N|khANYGH@V~*aW8!^X<9|O=^SsL&Cjz|Gvv|qzNfZ8y=@tyu} z?jvWi(yhmtd3Choexrru*SlBX29Dp*qk-<_9L9SQFUl05x@gG9D@b`=K z%Uwp&<_Vx5Xo5LjA3X{)!qO{wasYCy20;M!uf=$4k8=#NxBEn#>AT%Gt^23k{XaS3 z3??$4HW?^R+pU^ElSX~AEU}zAFl9L9N}Pf{2t5x`U#MTR5BwAtUGSXtz7F`~b8)Na zg&7lED)%~!61E_#R&RV&;rC|UW+5_P z*sA2^$_=u{7a5RkV%hls8Lt}soquG{h@T&|dv{1D(L5~5&o;X}4$!bd0Z0eRB$VfY zkT~Sm>zS5e!};D+e0AJM;sd{yD! z8)z0@DDb8HI-ZrN#IsvnTej%rM)dR`@XC2o2p|webT#CE1%3{AQ{ZQUE&L7PA35z} zcA9DK3Yn*!@x9!lsOPsC0|O@&>UURG_E1T6cPc{+N?u7MV5o|tB$hk?0Ldo5n=uNP zwv=$~xZT?NHo9nj;YM?Gl&d72wbw=SED{G=XPFs&x#qtre`#O%D3*tzoBsd=d=Lq% zY6joSlf)Z=vA2>?bM{~}=gAnsEy4nD2_b;_Q}%QHyS^a&EYwDq@iOxJ#r_()n|#t- z%4gKA2?Q2`NgFPN;IwXgWeDhg#|h!~abE*0Y+#m`QF=-Geu>)W&0;vqE~ATBSxWsb zwoLu_HZ}(H`#{^oZi4LdwlA5t({?kDY}J-zQIo}RpANnycrW6whm*w~1=Vf!Z8@Z9 znrnC55>&?M+;vx6jDuaY_VL_A(aNEbfdmndL9dFbPH>Z_SvbjWbVH}=Bk(Fa*r?A0hoxxu={eZklp^f)$hyK5k8ZT--r(X>lc`>`!kzF44&pidE6j;gx~p5O^ZL zqj(1clT*T~xpeueKZ(DK&0C`U`?K)wFRz-{#VUB{^1)w~Exm8tv0i}J0Ia}yFqMj5)&@e>uC(!zhjbbodB zn)sLbF7NW|w+B(mr-|goJJ0N=@c#hA`kn#%PJh8W65<=b8vF%TH}hRQ)A)7#h;4*Y zoPlI<62;{ck15zIDac?~5BU?TXu8LTbt&|35Ln!4xAv}5dt11g8RB8t<8T=p*nn&L z75$}uZC{6f1a!--A48ho#C|ajU|lW=a+a(BEcZ^lBCbFPGVFK*0f_t+_?z)7#NQXZ zZK-(w0L9M}YI>U?b%i!X8b+ZbjTe#?5kTaUdXtk`cyo-->fx`AkM4fVo|5@@UVoXV z7-90d72I&Lw7R9L{{VsgY-0FR#hxPYKEG$;e+z0C8fK+sAd)MKX&NUO>ZOo^Re>r- zc?636$o-gqZ$AlqP=`w8LpcEnEEIkX z+uB{;+(l)l!uJ<5y2lhzsb_a$096Adlh{}4{{Za4{{RH`pHzoV@&5qFHlDyoaMHu! zrA?AZu{!x}cOz$NKGw`ixhTqat7HzZ5HNWx@P5}Ye6>EyUlV<;)px$#(a*uv^ElSF z4_I|i;r{@D{SVoQ_~H*cc&np>+I-8;^GI{wx0&`BzV zJL5krv62*&I6Qo;q~^YWg%%W8qZ?5aDyZxS0DcwD_`}Bj7Vr*-H;FuBs#@tfWxhPi zi>U-?3E%;qr1k@v{&m4t#8SjUrWWd=dRu#+wPLAJof%WaS;4JuvHaA0ZT*@)Ec^kz zf-f9cMa&jpsJV<4K=>5v0}IF>fU50*qydE2`L*U}%eKkOOfZ;BTd-YWR14d;eD zD$)7Y@u?8%SG@d~jzlIHo0GY~W?U5;STURSrTx0KPm4&t67UY29+jck-bZwo_Zug@ zo_lA^az;LA;!nN7-d$Uqau3$Ovse5SC&J!2SUf55iLHD)ue(0YNq=*uT<&nOMgzM% zQ(>?{$~N4F1-6gfGfdNpR!F9(r9ZURvP+}o>F2$>E}xNLFc}2%R|iqJeU#OceO3Pe zEsxMI1^6GrzYjb=YvFGPX_nenn`nU&3s{mRVh1k400r3cPf~fUFB15lK=5vZed2Eu z-bJTrdUP*6)zsWa9BI^?WRi1*QIf=f0VfsWzaIYpX`hEb05o5=c$RT{t4|n`%SX17 zF?Vu>f|pS2phl$Sn>{)Lc(1{4j-Rxb#Gj9zAiKZu4x=@mi0&5CLbj3C#t4w*H>Pnk zx#gsAa(?Jg03Vz1zXoG-$-48rY4wxq>EH4`??2*PCRI&RhO^m4boKtOeRum={@-_= zE{^Bn_rVK^EVOr2TdxoymfrHSgH zPpLGLMmp4L3X(l5`XeRC@L5hBVQ{gxch$c$@vPG}#AX$Jt|C|Lx+B!S82lpmh4GVG ze-HdS(Wcd|p+%N58B3nUIVDm!i-VKK3FRT&ts-S3Bgw1>g}0EoT|x$(x4;!6lEG`lFTb%^e6BS|!+ zk})A$ub6}>lB_{1xDW}i%zQ`3XOwEHbk{8ojyJo0cGYXsPnq_-6N1WXr#hbXRJBss zdUR*wSM4?X4|w0;wD&$L@M7F}m%(EV=Bk->3jh}`Zq*b9k0T;vP@|)T;J3n~1Y~qI z`o;TOe&11Q_PU?LFM=(;*5)>~zwr=Y=`3;qxoozv7{*nkVd@o6-XD|EoQ`_e+VE~C zIIDn9Heca?X|FZ?KIhGGMjruB6yujl@#?xK*W<^O5EZa~hbDxxucs2b3d>Z|rei?jn(Lc2O5voTm+z`ht_MIqb?(EEi zqoV~Zp@C5%6S>_4WjrK?oS7N7#5x()!~v@ zbqzvz*{p3IHz;F>?q5DoRFXh30gN*eK>SzMb^U+D+JxHAiM1=uUr@L?mhSpzxSh}Y zQaLPt6JK_I)L*d2ihdPe9};{mhW`M>z8ID}i;3Y#Ot*S!!b1@$L5U;SavT8PD`j{D z{(U5YhaC-le*<80S~xXREPT>?$^2h6)B4=-xT71GRK&_SF#`@oVk>vqetyG z?7ja01qji#^-l(Tc)PXHui;C0Y<0b?miLG5xx8{p!znm#GYb(Lixj{I^*YM>%Tlz4 z`%a!an`?;HSZ0O9k;xl?%F4<}Wne)B91&m0Lnv-*?BCh9_RjJD0Kkj1(lv&k#J&@d zDn6faJ2du+ncMazk@iM8JEI^gb;{$P#<;J;ILXTult#_!;nV)z{{SQOz8>SrMXWw$ zs{U@jGx{~8Cjz{?;djO#0eoZd+IXYD8mu~%x5~4|r)>7pvWD`fjQr8K2caKI^yMYf zk;$*cD^jUOqorCcNp706KU1Yz6%v&J)|#-A1ZQFY?q9O_f*H@6Z<(X6h`4Xkd(gf{5J zh@w)*YA*m+>fh~S`vpa>-0MFTz7tI_)h=J{SAHIj1!TFAimSD$K)W3M;*1Dmxr+nE zeqi`B;8(@39eB>;z84V^J=OszxK=&M1Le;Z`|km)gAaxJ z)e4`yOW$(QuJ-cu`_IR?!g1xFsNBSpfB+dJ zlV7JFvnTu$<6iMw$Kub9cGGCuKknwU(h=1!Zb%`sbfp+Y3ojdY33Hr;z%}-_?9uxX zcpKoIxAxzPg|~`)Q97pUi;HxU*3704`Xb7u)wgFdk{FBttImGq3J1_v&T)5%QmxH| z%GILubL#y&eDyt^9N<+=tVTaq(m&=>()7C>A5ON?bjVWL(#|A;2xAyX<3WX09e@KB zcn}UN_Q?2s`#xGWleFvJDw``7lWQV_22*g7>O+wfeE=9|IpIzYKN@X+Z4cTO z-{MrC6Ay`Io=GQpKe6U{4v?@gDj*iB%DYug1O5Ds#FLu&Jd=VmJl2!MQ&x`Y%S-;c z_t^Su(~598eMr%kudTX&U61K#9MNARe$Ah?=fmHR_6w%RAklnN3YMQoxs^o26Pei|Bqr zc&EYsDDVcY9*^T2L2Yv)vA3N*Xf6>>3r66P{{R7OFwY=XWLR!Nuhzd3_*X&kewj9p z;u|P$ZKhJCWl&1zx;IsCLI@(iJ^m?t2k}3{d0STS=$~HDUvYL+&Bc;0PcSbGfqJ+D zk;onY09D91(lw&6_{V8<-GKtMVsEg&roqr0R9fuVFc>6GqNx?^;2;|#|AAfC?(4o^an5<3 zheMW;_tR*3d^Pj=A8%B+X)TfGx&zNZqW*!I`nZqzjN557Ea+;{X%xJPBAY1v$FXOg z5Sip0+C8X6pPa|Z-4kn7ui@KDS)Y~aHxaB-#|z>erk{I;l*{1>*!}%U=~rc!iP4CS zl(!Fjw98MyPJUdEgG`Dm02H1v(%e6w({=8Ao5%^weM%8fWiw^V;9AkZ&0NiyFEx*+ z%0TlyLk$P$oQL{?VvWVYqETCmS*tN;Y&+bOZ!;x{8%Wuxd${u-;Gh`#*RklL7K6O5m8 zzTIDtP-|b-D37#HA+y)Ufw|GuWDRpSa)>klnlF6kxoodrijta1X$!qB{u!^wQx~!| za=>>!wa9piV2Ni4RhV-1zi$j8teoiG+5Zjw3tfD^F5RIB3*EOLGXs-pYkF63nX!~L z-4Y#-Sah~hWtTJ0zwi1|Ax(v?Q1FO~WDmxZ*vt{&z}a1n5uec9e)0BSZMS;*-TWTq zu6&a(NMP);3hxSb;yeLbJQ_OX3NXwwBf{?;x`TpWXUjmoe6ho(Uz%HD zirX%DDRi?gLl_0Vt15RRZ~eh-kRTFg(_**uhaF6aa_w7LV1_E(UgM*Z|ar8 zP{i8d2e6e2^&U1cf?+?v;La5LFFADDjZ5_WHj^J=0-Rf_$aqz767n?#+XcXj2=t2T z)Cz+u7cGd{d7q5i>(a3bzjik!EymIRUkQ8*La5*E`4H?FL??hQJtwQhp`cd{+5N^e z-w50)Qh4znV9NHvc{NDhbaaA6l9{{A*AMdF{JqmrPmjG#OEcF`<2O7ln}$^l8q5Q_ z3~2zmwFjpL1n!+H=_6QI$qTOavWtEg^o!l;3r*!bjSXr-m1fz?EbctRS$9=|H*^!f zU9e*6u}RTzltrNmy9aaYl=Q@xK!ZBzBYm^l3Z^={$iM>SF^ZW(gLMXJU0B}k%)ah% z{MRS3{T|Jo1aH5sytL?{?IhgCO?=?Z$US|RLIh2}PYl0Z7zYLfJ15OuqaG|qvXp9XR5lICb(`@!+K>xEp|`(V z&24H_Do$CRx~88j+|m$EXZbB~TqsWZLv7+maGrF+zN(vF$X{B5;K>YXGth#~P{R*I z44sO5e?HnM1Xk7Ml`npXUyj^8`8N<7Zm>%k8YYmsOUfjg)?T+HUTiAu(yz%Z`Acz1 zx}3KB{nzy0P438Jq(PR-o!wVK0_pc2kbq|S)K{7x^qqYq4!)A7rH)bLv6KWf_kJ=F zbj@-(JL(FKezf1{Kli6=nW@+E_9<_{$&5HVeMY2(h}7{ zG44g{P>l>>UHhL;-w4UhhB>3j0`47LV+P88F#P6@&yuEAfKTwIyIfL}41LvcQkht3 z5BHRk{SC}iCuYd;6>?pl6)8}Wf2>4w#t{49WM%IH4yKC6ep)v4Z$UR()Y z^dy}bL)Y86WEg9tN;?KG<_g7kf*`5k58E!Rk?8a`~$KX>)?T3nj(IvHM-TB2nqR& zwHh;e&^S!VU2ThBJ4AKR{qR#=8J@1`PR%qli{3L)KN~t;{Ai@_=0@G&NJ3HVQAku? zi1%IwKtNl802S={uXEGiIq<2~VH$y^BQ0J}vQ@9<_R&)z*;asFgu04C?)dW#gT>o# z-$NM}$PffDod^XQr2X+gfy5l^%omjL(j|L}h`laEkZRbG@y1{TEe(Z)eA-yXTDu$F zgg<8li@uv9p00XoxRowtt}u@STuZpgtDbc~%G=!Xm;e)_9g&9!ob9OuhW>TQ4$@lu z=cX=IHkH`bt#177R*XntOgkQY|oNBJeD+T&O zH4dsLxvpDZ4z4?&Z7e}<=n?w^6K^;AE#>Q5;ZqU+BBM1Ug)KKi{nXh$i+x(%Q^iSi zAz)33^x=rr?dI*nH9B%TS;5|#p3b5r#T1?)4@^<&2FZLD_VbIur3-s2O@2kJ8bm(` zzBuo_)RP!CcDWN2E^uS)ynCY`955nu_G|yI`v5;~tlu=eEHp2^LVt%uFkuN%+G*P zHEJXcN);Q%B=2&ln9=L-YwUWuHVI#L^7*t`x~}V5#7;iEeHJ=i6A{sabFK6AM_Su| zfK$^2l-&I8o&P;LWy)`BFFC*5QLV}hjqWN9Dh_=#5KI7F)g6(p|EaCV>$J?RN2<(9HD(@DunE zW=a9~0U{@{EE%z#e|E0-d``$AXYJgT4cEU^WZm2$4WPmz_-Gi1`JPbD7fL)0{B$0X zJJrVkO_&F>H~$BSv>#wxGarUB^l=2y=F%8sjG8?n=W*cp{It~zi9+dvR|9Sbppv~n z65IBFa-4E+1u_2^H3R**PL~82fZ6n0K2e`{7z^gmr&;iM=X~qk@!ZUZ!?*W9Advm0 z+BGoS&e*S9Y_37KX=4X`4<}&a+kz~s-Mm10Aqu8+5j4>*Mey?^SH{((w;~vW)vo?* zVFyRDl+MCea+hgjBiJTT0yLH2tqz5s?Dlp4?Fh&3J{zbCy<4ILdBWLyDoff)ku}Zz zbFzOriP%n&uHLTj0-8pIP@FeT;6t&`;Z3fyn*6{5onUC&uK`?F&u&k^h*YJ&U|`yw zZdpu%0mH4i>ugO|37*d7m2n1xug$5m_84jRboEe&`V`b%Rblt zW19){7I-DqP2fbadZ8eWj6?aNy%1%IKM_JPjkxfEq{!-f@OKw_?bIqh?(>~gFIj52 z9o{oQWd#mP1y+eWz&IpfW?6m%W6&N?&y&Cu zW_`teVQg)!tuu9}%r##o_Hh1eP`ulzI|K4?dL?Ut1(1*)3@==;*<%Yz=jdyyQ0DY@ z#Lbs3Db~=IzwE5d)$q}5=4Zxdpg*97?9-{ukcgxTq*~9gIM#ewzEijITS3YED<<0L z%rqg1e1S?8?hC3ORis9mVD}2LL=XLFN?+>a#_IlZW%H;_=cngbeX_iAU*n66QRAqH+#k1diVI8%GCIjN*tmSP zMRb?pA(s>6MuPykAqiGN4MwLf@P(V;dI{6WGE)TqsKQi{af?WW-73^`jWsdxV_vxSTd~+1~9dXBBk>(qpADP{xRW03Hp$fK6ies*-93mLt5_qcXxsU>%WKYW% z>6F-_)gO;>X zD66n+NhUV4z2r=d!wHooNlcH@9V3^9enxVlM1npbPixj3!8E_k_~b>bNXyEP=;GHW zCOs2dDK&%$gn!l&oPTCqICnTXM~Nb7Zv5dW$90y=#;1D6PeUp#zRqZO*IHCoR?xrn ztfW?4Ng7DuLRKZe@_%PRB$27JYbIo1R0 zXJc;*Z5T&j@RJ082~yfLd)pu_F5&Jw8pxmd3d~FhTt|T0ur8rLq<%f$t@OwvhyGGM zYioLaOSnahWVhdQ$A!>6sm1n}D5%OrcS4GB0Y_Jq1J1pJ z9&H$Wm}`H4nnA`E`a+hfw#oW87`sBhOp#e0Hv5eC>9)=mrme4cv*PAhWZ1>pEhc0I zcc*0Y10BGW;9eHI4zf*uIeFNcaFAxzk;ER8nQpGh77dZ@<#Vw(4g>YSk=^^8ILyvx zI$W>Dj|xTeYH`xTl69sl*4IrOeWoopCYTyKUN_XgJ4*eTd@}y}@gGvwZS<6wZW+M| ztfts(?yhTuu@y-8(ero&b$Xrcc3gpEKp9~A3wV%bmRehLmjLqu{50oIjg z%pvQJ>{7r#0KB1zV@ofTBxaa}qElgYsB+}l0rNB5IPt(pY3%6o%BM+^e6^`tf(q97 z`>C4>EC)2SwARH7j@mTuHvkX(^d83rFS&zOFYQ;QFy}=XM;NvH*z!;)3bWG}9q<3d{#z9eak(&(uZMv=`qG zYCr^iKkFI2@Dxp4sJ1d&?-0UHO*CX(JA7&mH#0nSRX`n54`Vv_f0 z=w9?_+i!h3g7KAxZA8S)*N0~8KICQ*Kh=GywRKK(cq?)1lQqDGGGCCWdty`I@TlqQ zrnNXHfJNd2ap$VvN7v+Q=~+Jm%Yb&dp5C#9s7R4c5PuesehnJJa_hFY%DMw-PtQwT zK$d^PO6z9$>7^Ny3X3LJYN@ou#H?-fTaEWSc=Ip@|)Iu>SxtU{*+Pf$X%an3naDZb?H+&&N8FDttZN%otTe_VP)^ zUshEs7)f*>G6u14kz#~soDMGdw<{s>{GbA_ax5_{lx`Bhke4+SfOwG}im$OrW{OGg z{?H$6DmutfX|`Vf7cEJ1V&NEUh|U*k>3+ick^`dMc(#E9ba0E^V7^8+{0AU9yx>Zu zg2_Hu+wnPmBH$-x^iL*0*S~90pL~QRE};Z5-34sOIIjHdu7rP0sar-|pBg$cX2q#! zdG<)`*VpbwhBHGqS$X$w+ItHqu&IS`F=8Mw(sF|T%NSw*dqrmV%0JQ^X~7yCx#;tF zw0cpv8(CJPOgrdg2)&F-Y1+aNRmJJ{TR$u+K9JN;HaCxfA1_A%t<%O?q5cd|800jCN{#I2-Yc_un3#6fmK@OAP1e?!582o_Flfp5pHNEu!#7R~ z20#Zv%bFM>o^oWQ)|OD1wb@hYoV4Ho*emYV6fYh#KMS2qhH?_Dmh7i&V{2=RQOmZY zgOFRV&h2s|G*1424pROhT9a`&dyB6$Kh-Ru%wIeGAk8jYS=T97bSCjNGQ+(rcB-4$ zO{&9o}N%`4R^`XPQ^swr9= z5BGKJQ1`lLLq|IOA$WzeFOgWy|2bc28U2m0F9XDcrjKuL~O||<3MJw4bo2iP>@i#lX=-RKqDI0yI z%n}#!3Hz=uz-Ue>%1dhYz5q!~hdCSm)fDsNobX!uhZkw@bJIQgz=$`gaR-+BNcQ$Y zKdM8dL^+%8yR*QojF9Djfb8!;AdFci(`@TvjpouDoZlrfS>oYaVL4ImL?gPkx+?AO z(2`n`rHV!MuGT-h7Jnn+`H0T4bHfycbL0Dv(#akK#xZa9YXA}txPP6fu)x2x-xncP z4}I8XJsLL<>3-b*3N-)J?B>w}T%(#UCVj1G9m1T0L(!n50;yX3Xihd z))TzNGanMG#2Y8tzwO4W9E^5{lU>pgM1X3r3+_3ZihUBr9i|{}0TIttiRs6VRs+Qx zy{SF{rg%AZQ`%cRje{5v`!~kOe3Zh`*8!OER@b!K&L^zaGm)6OTDQOR3C_&p5w07? zxMcxXzwf%ic20)-`DG7ynfQ%ugT5DiN>gK+=u9CA6WV8c-QyaT)bXu2k$kwCt1sEc zZonO(%7-ODI1Rvx^DOSnT_!>V+q`wO_qCOkC40Anv*-mhOb<7*)`R8ARuYKYEtDpS zQ9MPh%`HiG(VxD_d8%&n9{j(%Y2?hOI^FO<@V4%s*!NG>C!YmijZMu_N-89;%GI}3 za2^@F29SnYH#*0uc+DPdy=cYeE9Y+gYmiPtbR>YCWX>}^-}+xb(@-<=5$b`8gMD#sBKbK0b| zwk)O(5O(oB5C!I$pEArf|HcKpShLEzVb>XX(h@ezO6>*%T-Ju^1;nG|A`%d^&nxDG z{-Sl^9hyB~JP_W$*H3k@dr<0~TC|@@=tp*W;e|uJs#i&qCcXeFNdocZAa$hUwosIB zs+sfVy0KQeTZI`rEp5cD?e?vnZ=bv3R*HC!_|6pVrxhrZD5(CU@Re{zrYWTsOYHUq zT9ve_C@vx~Rb7b+niDO!SOM-MfygkG1QL~Qeow2%UhQtsm7~OpX&;))I6|TVh?OVP z4<^fc;%%@Q+yLt%R^T&GF^Mb@dJ~=zD5qBP?#Z&-i$?+5{Crn2EW*$z*f~spq3D6^ zFOm=G9c*+YQL)ihQk=X7sLF&?x=4sbAE*T7NvbW)A?fz@CUe$UIRnzLzZUlJBgomy z3n6O}@h%xQN$pKCGiYt!%e&(sO~6S1;6?LT%oEWTxtTRc8q7D;$sNQBcNj>Lt`Sdg z6Wmg~aNWMMjcaF7Mn+hkQM6cMj|PnPtwB?L6B)&d=dj{0bXi%+rYX3_w;C-Ct zutzKIByKMy?owzI&7PufXbw^4wlRpp{_7pX;;af9%8zNHJ;P50{xoRJJA!^us{=kQmWh#~3gia?@!mJ(eJmTyKbMsum1lj9IyMpzaO*YzbZ*8Y3t zq-BZN99H2=^yQvCLed-pP$rKyTL$?HaB{=Qk>-!ehE+tM(G!Bg|K@6MoLj z$}IM-!gof~rZiDkKoaE}i^miEv!S%VG@F1OLmUfwGBeK!)9iCpnculeA17xDn>snl zOsDNym$Q0J0I4UOFkUKxpn{sq_iK;pv z@yq#ySfU+`?Su=j^79(&6_ZWdBX7}F}-s&21`On&Z&47YU~1uVTKCjCjluzo{KQ6u9V}Iiv3AO-ecQzwM+O zySO=wygUnh?DxQTbkR{-K$*GRv7z5R;Z@nPv~);1Bm{F?OHkpE99?AH`Di5Y$2kxb zZ9L9@;_hoji2B(&8O-;yCZpQD=dks^g?rzVaukOvVUM`@C2@E-2|fo-S87U$jtfC0L{1^^_x#2CfYE9yTnLfbtw2AbA31ZHP27%>TS=s4~$+R#eD>RJ7+ZZH15f6l_g zmlrs9ovd=F$WNf>#IvnIGb$%bLgJ0)b9~oA?0E)Et-a89Y+u z0ql(MNWohF13W`!tHDoX%f3zdhTBB>e^k5u!`I`z@%-45HC-~DIx=`Mdsh)j6bGX; zrT^E1QafkDk~i4>OEu-6C|yS9XLGZhETn76Es)yfkJTO-?@R~f9ILMc8gagzRIAV= zCJG@8i!H1bTjmpQoek{jZA`riyan;|PSCn+=aC1{U!h)ZoTd;GvAg^HmR_@TfG5BD zGIkfY%auZ$7(Fb0e9_2JQS&9qr%B=aiCvq$H<71&GtKjGcww|21ZrqPJ$yy&8X1}SPBo#k(qK#f#ebEC| zM(VR<`g(SC++_nhIIhJ938(*!x^kv)OUVTo{Rook3g6dniDY*(zTb|&xRjiV&ir@0 z>%KdnSTMBQ>5t5D;aI*Me$6F*j9mDOWG{q1LS~FS@2FB@8@jeFuvt`F`>UYm1 z{Bu{v-1@7@rYncTU;Fq}LBD{`4fu@!5uv1Uuj zhtf?5nz`=MP6hnPd`@T5jK@_b_?XHSNHSX@f>Pd2&e=;Jjq#aArGSNBlST;`vxs^z z29+^AJiNS^_4rr&RYk3SY<1Ktd9HZ4Y&5-e2iv#Asx^>lCdHj@dPqf)Sf#GB1LQ5+ ztxjVL_uGWje5(9eDy8snWcT6+yoJD;2XNnPTFa{Lev!XE>J?vEi_cmVE}HDO2jS38 zizIHzT#L_W5Nj=Q2pVP9G`!f5GQT$pEb zNYmhw=_ZODvWq2dmV1?P{ypuRcYc#%$Qv$V3utD-7jevLQ>}%Wb)7trQi%QA4wPP@ z(uBakIqZeUX4nIt2i8qyITZ*lo4NYg#ldmf0x9q$9RH-I1Cu@lX*uR9K#G=Yl58gX9)V%)L(3%qh2IZeo0= z%WMk=xs0mt1r+OV|9MYfs+gyji)@=KGYq7#y zk1vm;nwgsK`tFw4)#NY;Nw`I{AEj)qNlZFoA?VW_f!306xg~?hH1Y_>Jd!+bXv_aN zeaM#+rOm!9QDC`mVuw4hr=GYMr<#CJ4g@PFanQsc@O$^-N5iPkLw(~5Pa>0fM=eZE z8!=!X1*0c}OWr~HecX2c{y^@Sb5Mqu-ZtRC_iV&e?_!OZ4)qA%SosjUjM-6G2EZ(> z_U(7S7fPMlC(Jr9$>bnn7$ka~rxPBN*l;KIpre!dGfRR_byf(kw+rSmMak6 zv-Q`><%aJiMRP_bbz8iGi3$%<1@y)|-yuP7{8 zdJ+-H+3xJ^dR%h@6zg@Lw*pbY^Rm*`Q9`1ysW4EzQUAs=KE@Vq@l&N_X@tr*w`AsX zW90B^Xu$AMt94v@VnF;u3mkf)3@7W`Al-rEmldkiX!um#1`JG}&K7L>U%tozJ9|GJ z9ZYIw;X9|Bnq6mvcg3JNi~D{copqXKKD*5FD!Wkj=K(~N!m?OBhtk)Zo8r;jvB=|B za@p~OAd`P4^;sg9=R zq!+P8{HGjsj2&s}y9H{l_Igy$01y=oh zkV}1F4VC$q>;?L^QO|;=V(7!jxCnIvX@+hxly$yh(yofwX0vrT%MM|@8 zl+H7mkH#r8gz(-M9O7~3K#nL0@`fY5$8-=OaJjh5o+G|PP-s_O8CFMIetaz`#-K8T zmc{FL46L#YPvxc5yyI21bH@j~f6d{5!x1!DsS<2~k_RsUDLVmgzpP1To54?}2QLqe zlKa`XP7)lSC@9LtZMq1AD?)!wVWqhOBHhuno6e9j&3lMsx*Fc~w~jz3i1+;+OMM&g z#1T%cfwbc8BY+=Snwm>~mq!;r)j9)wOnWF%`Yn7eOOt^UUd9bf+GvP5i9irnM4TV;y22vlMGSo#Mq=yyG@bea$qk=|u-Llr_DdshbfxM*M&$8r5 zoyUiGX}?&dp79;9gkpW0ln{prr~UWv2t|Q{QfvC8%<;hO7IojxJ-AeMtBY30hpCo6 zZu|~&+}C(C`XyJO1)F9;V~5z3`IVhxm#JmoQu>V$T&;Ud(~ZI}nlCtHum>Ehu`u4; z@X?s8mhWeS?TxJ} zrLUcHYAHCf*=6ohQ~TJoK2}vFp9|8#I}ht5xVyK^=dx;9*dX?HkBK(3>$&)FA-U7a znAC`#u9Lf!8ZS|!pp!{}c`gItvSvHrwTGIF-ES`^Bi@qPHitmhgEvs>;3aJNJwRX~ z#-yt$=;MuxhhvEn!czThZVVS zseV0J*Ce~Po*G{+WF+s2%)uyVgUzsu@wHhi))|30x3@=MauvW-vrHID_kB6PZIYI@ zcSW-hqumgJ{aBN{+tju#6O}YgkrBQww>cu#!X|brSI}L+cM}8&zrgkyN+vX{SfWcB zJKBZ%(S72uAh2C*eBZhR_{eNys`UX6JHOi4)i;$Q6Wp1}I8x%*#%GXwtDcdX1$aU~ zw@9WSZY`oV{)hp4igoVU>jXS}16n}{&~(_#dVvVHF=DM!9cGX@kr|oJV`%(rm(<`1 zNO*zcS*YmHMpfi{nMVU)oPMf{0~-g%U)mqqyJp7feSo$jCCOQIny)l;#;z~f!sm&- zUPO!Oh>xc6)Zb+Qz*8H2F6n|sm zqx+Mfyjpn3a@h?df~LngFohW@0dw){eXQi56!q(>pC}^egp?z;31(~X2w#%KE~eYT zk>p22V8t+ci28vd8@@og*Q!?%@pfb4X6Ewptmy5?yrDUj*|2=u~#S;eeHIO^yRhw`aZUT z@nk~abl3N(rSqG1Bm_=+@l;{?NXx5t@Pl*G^TUhh;tcAInGxdcE`xwO5qzUxAzwPF zz%w9X1%4#)dt^khld|^je*ny4if+G%vBcU&$4+>nXl=}T&jD7!ZAppnSd#yMkpgL` z!MhwD*snfk$G`H=@E01Bb*4ey7hPOp8Pnn7WB|GjZ@Av_wjTV%y<<5#pFiBJii;F* zA)_H@Dc^9 zY8R)kNC=t%5tpv3-O2fU7w9pLfdV~~)hv5%j`R_Z`*cv7ySZ0L1^4$Wq#MEvwK0b} z0_zMs8?zJ`6Iy{f--TF4jMvi|t`*zI5sSRAHZb{38}|2ESe|!xdANGN3Q@gm-b(|1 z4ea>sPWmE|A{+T4@XO{yutKB#{NCOumfOiwu)ep}|8?RSmEW4bp&>5yM}J(#2#}S5 zthAcf-p`r*y`}pKlu$^IeAxm$3tbSnF>OPKhjCtpQS@`#fjW-;;n(xH$6raD3`R!y z3#~fqABuoh(CGgFR9|?+bpVd@ozD@fkWV(X&&dZ|tiFtla`vkiK30AIw)oZ4OE;oQ zInf(ytUepD=rnES$Gu%$B|gW>xR0C8rCquHqM8nghyUL$pZQQl92lt$~SB@j8*&ORR@ zpPv1)xp)K=%jvx=$h^Ns31Iq|u`vfXt8JaQ_X9P4lfVr%V(Q#J9ryl08>JTiZCV8@ zPJ>qM*EV-Lz|$B6CL~T6{Q>)RW~Lh^KX20Xj#KCr__ot`<0(NsN$Sz^^@%ie;e09O zW*rV1GW!n1JNBt5L=K!TO|-tK5wfHjFI~DIJ;4tVh*rWG`?&BA@kg5@8BZywBkT6z zyUX3i*vy`vIY*sZg45Gh(Axgnb;HYR1(?CI*Rkaj#P&@F(h;TLNMVVb-qV1d^#!ND ztJf7+MR}xAeJ$@gl{T0PK;GUIVjq8)BZlD5tO5&~@#6Za=Tf8*{Y+HWdWNCZ%ZFy)N1gGgt}*a=V`f%!9BmQmCj{Z*_F#ww?$d2p785)nuxa((R_c?BLs) zXJ_7y8y6x9?wsu!VcsYAs9_X1*vu9~J!E1&-wLIKG;;S(rQBB~1zev5$>Sz|ZZNYK zUjDhi>;HjeLo-ytVOT#uOi{MeL)wR$9$F*&)l)w2^CoHsO&7brW*Rzj z=nq*;-xNv-qHx?ImM0=3U3GMC@R2FaA@NwNqT*Hf+oRa3Hxw8n8#hU(Nf{yrv}akF z!k!?BRJtM+c;ts-gLvpqnp*&x<%@tPM2YI6LwQe=mndV|9j( zc^n0Oz$QiNMDPrX8`JF@5TUreks=|oywX1+o)WYBh$A&jZi9y*oP=!6qa7&8$dZAP z#Vl@6frb4lhx8i1eWKTnDi+3C8f z0;^5hHnaqybeFXTG3OEsn5NeIKsy`9GSfaQD_o!X`GeX4l9!ZXDzrqEg#f?-0ANIn z0K$gCI?0euA=G}C`UyJqkwLLw|9$WXiSk}*+zDP#4oVlZT0n`E#+r-~9VGDMb+MFN z!HB*3nH{OM{WeIh3c-Wvl`Cx`c?fwYPml>>V=&}da{`tIClKO?Kg>=mOA-Yt;%=%rJxP1cV*GBbYt=#fOg9r*jm`S*Z%pB$+g=l3(6}< z5m^CyS);(Rwu*Gy&ICvN}X_ac^NH*a|~Ka!4Odw9X}6n z2*XIdySZN(&#HB=UASme$8x`76lmrRKvO@S+;q+sXq)LqNC;xsz#y zv)j`=iWReR^&cupR9_5qL$O0!eB0--mJ~?4E$1m(yXlh*n%sA3<9;7hSVY^I zA18;9uWGp#|j~;6h&eu|l6EXb-C(JlzAI z>zX%5bU1E7So{KJ1}rINU(AWM?+W0J<7T)Nhf6;S!~Mcaf8@OB@I*=bnW6KS;3%2Y zybqgaIr){2xnQV`M1G~&kgu~$MiyO$m}^jV|sAX1f+_Pxg15-&_66qapIy&fbogi&n_z!Yy?CySNClbCl{QleiBk3!r^dYvqh7ys0(}}>}nR8Q7 z8G81DHHF5`xnHvAC-RJ54U7Dp4Ey__2Y$(9^MA)9Op7Iil5s{Myj65EWlS z?;rmmF;vC}x(tj5)k8uRSP!#lc$}G{s~33!id>Byh`Qa?j+`$pu^|+~4g>gtF=ldp zlWX1jov-diq73OG?!9-7^_Y(S$n&1>!sQfsj|*?PYQ|jn+>5xxh_7n~8v#|6#!#hz zS!n~COTbWbKw)6cjr$uLjzlTBK%)&%Enepe^tlFk_Y)^V=H05zbB>N2I+ppa*Z%>m z)@r9<<@Z_7yM8$7@xDD8I*1HWAR^g(^ub%jt@bU)`@)Ds#<@QSwiJUWWO5|dl^fe9 z(=nc4Sh8>cK$Zxs?HCVu46!^BQyWo{UG(6H|M*%)@8*~0AkA$X zFvgonZ&a^d@rmseD@%#2d6A~cLPHJy(l2k~SQv30AF~Ei=ruGq$Ptl-(l5}GRMvNM zPV|3ZN@*_fx2&jk3Ydj@luur&4{>{JEA;-(bk{z+?%*k@aH261YgobRO&(i!zc?I= zosp7qk@YO=LSCRBfrA!tLmWo?h8ZI)i9rfyJqB)8);hGrY}g`akUp|+pl(C+MoU61 zT51t}+~bC6B+4i;Sg~=ceVaj3(F*Hg9I^Ad5fC|1VPnaUc^^!5k6OqclWxmAEBo5s zI$|@zRqxP3*2p>d8g)90;i@E#^L0&<96B!Gn=Oe>6n7BYx$_tL;WEAK!^;))KuD$a zsk~(Q-SbUysml0z;nh4*3hj23k`keZ&|J%}vc-u%+B*H0ZlB)oWZ>GhFK|Pg4!ko` zbERz$6afL&wGz-0|He-CUlfM3|NXtc=zD49@JLxOSor#WZp8RykgEF0TFUhI8{;xV zjj(0KNo^-%WRz7CrvW~}bk|P?;CCt)26lzOZ>OirTa$B%&p+Q2!G)fq_0m+bhWd~x z9zA)XW*D5qYp4t2&1bQvN=78Tvi?ul%eQNS6#Tuet4p|Dv<~%jJWa}4+cL}AQj1e3 zO}?jT@!7bjXr>#lo+b+X&d^n1>&4dT>DU~kgd>XYT}wz}03&o$`<8t^cXh$?_)Hov zX3jHDC;Vx8ILS(nz(0*^k8YOd>s{y5{!7rM66bP`k;EP7q5cUaho{(&w_kmJPkb5Pa};Q1vM=3<&LnF0 zR>1s^)bX_NqR}id4|AU4>O7Gc3C)U2ZFjdC5Qi$qPNI#AYPWPw#$9*u0^1? z-PpespKh;b%;6 zcGS?@IO4GWeWCD)0g)cc<7zWBm8=W^hj*s&Bh^M%h z1s~%X=N&-rv$ToUmR-Um-O5+8633S`c80>g*+L2a2n*)J`a8l&X?{;AGWuF-WU2Di zc7oo$9~dA!dIozOh)}zvTs;^-bQMB`1S-O# zLVy1~&B9~#uk%i`*G-z1)YAbe76@BtOgW+Zq+xFtt7eMttdQubB~NC`zD>$1qm2rr zkji~*;iLF>)_HzZ9`Sc2UdMxavX$Y+p#)y$Wl0@gTc8*)OFKd*QJnD0sl+IEt_G*8 zv1&>DV8mYPFblR=yZ|r)?N3?uy184x@bLNRY%y#gefxB%CmJ96e^4IlYs8l`yrl_$ zRq4?uZw0@I5+u%73lgWMOOvQG$z|`+XSmo4ma5kf8?0pKc&$^ z@XZx$C`flz)6ug%&-Cz|f_G5$Ns|!?(H%CU#fQ^^h(Nk+cf~S`AhThz5Nf}T4=r=z z49)x(;~Tt*X%ysEx-?Qmg#f;sHwNY}yEi|60T@*Ty+po1&}{q0TElNc#pYYBy*&K6 z&rStwaPj@^+_HBGWL1r?0C_~1uu&;D{^3`XRn9p<`q|e%FR(Rj^^E6Cd2{d0zQnKG zU&>YB;9)gg0OT_^L#e97?D;fA6@nXL9TEv73gYuBKEC&cCWyPJ*eBnX?9UoCs}aYdPsCV&0O-WsV9jmQZfm3 z`BnKCy<>Za6r!vf*B0KIaW_-daSX|xf-jy8x~l@oqLchkQMINWLb zPpN9(0l1{Czw#A@_C@2chKmq#-x+o`w9Y`eX3n?LurkEB=Kya;nxuYsb@5}uEAbVy zG`9H4Ez0!XFS|Rp`rcskC8scBF`|gzVpqBE2m9B+XTu}EEcviGJ+g0Q@x7+vmTfLV z**XT7t-llp{St9*utY?bcuc$KeSLdFI6}!>HLdVPs5gm-3?t*Ee&RNMr|3f-R@oGa zfiR{qJw_?8AWZwG1R_Ja4wm*>G{{0{+X^oW z)mn(|;0PMFTj3g;c*{*|C9G|ec!_GXXW=T%Vv=}q#BUd7Xz&r_^EkN?S%fIM=|@%}%wTva31VaO@Kb2aIrF|^ zWAhVsQ(*?&ZqyvH{Y>(0w<#G06EkrS=49A{lQAE_H~+>6AtH4{_}&Iv9#Bj=9Hu??!?XE z57}LA5K`pe%3C%#7=CQ96}ukr;Oeb&l@IonMsAHmC$M;T(KtqcM_zmk+Zc6#Fzzq*Jn3kecH*}C zCq*aP%j_3Pv+cO`E%#zT4AWT*orba1!`R2s{1kLri!+mh54%&J1^87LBoF<3_R8WF zsT7@>1#vIlICK>^5!}ev3s1Na?Ag{sUAC0I?!fh2et4(HDL$(35SCB3c+E%l!@Y~# zVHa~#O4Up8Kq_58*jeLbC&(%2w~vAnb=2Ehnz+`o)mZ(dAq?{+a4m)fICRja9srXM zJY<^wKb1Y3zYC$iYolZqRthr=R@_@m`8|ZS(d4QC`~k>`O-D;3kBglYi(7Z|8O}&n z4w^`B8;HYnR~4f2-zUrTi0C`VuGvGU)==-(q~zD`V+p(yW(~r2`W)$!3*2xK|c!m&f>u_`4!eGW2O$kxI zzwB#seq3MrSJTEM=|Sf^_~PN&*`q+Yv)7D`g_qT+wVOleB*1%nfI3-MjUI?zcc5V& z!82R^GvH8J9vri($`hi^$w1WE^B_Q3?p5n@B*4y-0U3lFO{?Ls8wQ~ZJ z#KX1DSj3!bldqo}nDEMXg0KO~kDG!tCMDoxKQrXBp*FAN7(y2ULrcQ)Z7yeBR+pR2 zy2O+5Ib-9kH}k>IMr0t8f6!^~HH~g3)6atpa!#>H?PJZ+A|ojDrFTaDp_NnT5H6N3 zeVWppXb4F9$pqWM7Bsrp>&fG`ApCZ}&&6Y^FLF$L7u{WHi@HL$2Sy0-qwwz&brYUx zUmD%DUnE6;1~`|)lB;!y`5$2ql};J|9Y5PeFnf!07-TCpFfo*!& zCYESX-Asp;{?kWVkX))Zh~&Z}n9AcVlwL-YyR>cg^gobDCKfMFaJ`D0x)ojpdvyuf zhB`y1U_Yg!lsNhWkn9m|CcA7r^Jk-_1^BYSfIME9^0t?e$Rw<+(U<| zk~^WhJg@^K8pi&tG?BoJqE9O4^=g`z_=$W-?^4Usj*2!DNK}E2iRq?u%PkgrnnY`x zrDCI7ZLj2mehSn_qa$l6m+8OWRzB-~Z8^E6v68wHY~~!yzCuhGPfaoH#cq7u!5XNB zj_&ECUG#13_K#?k&iFOR%_TupLocqKDIO+0;QoRwVm*8ux(=QC5A@R|94D*_j>WO? zc19~$oN2hZju{&5YsxdE{Q*g5KJ^yl(HZ6u<~C8KC}Kxm5?)3Z5yX#f!->-SSuep$Ir`r#O)E(yd9v6L`8FMuGnb+jUpbY2hS20A2)B;wZW158|AwZty94SsTv3yxiuqjj?dg z5=zx;j0b6H7eIkOkS6KK4heO_@IV(bb5``nu2K&*`X1ct9#~yn-YB;_?!9%{?s)v% z8sh8R25sf1T*bsI7vk!v1^95`1>T65Y8W61R=2^Yy=a z9ouu~=;d%wHe;Z@Of~vTod@d7onlSOOug@3l<06dxPEmQjjBYbhKOJ(JS^G2teM6? zuJbD!sG18R?<)MVp)3A{nB!F4+2P>9^OiyFH^XyY?d-kTntxgVFp_2I!jFuNZ6B(B zY7zZU3Lt^W3Cw}S__bXirT(NM#QSQ^21v|w-%=0=AAJw&7p^|C0u1;?cM)D|5w6R= zQ;4=E|EtKqqms+w{8{JCO@X1`hCA%KWR_|83Cv<-Yiut+-Nh()psT;5rDI>=m{D;9 zY0CcSfz!3Hd9iM0wO<@h&xno1qZ_rg?SMmf$(f)RL)muay}VMNTF)?qH+*G#J68dE zCp2+0RQTrvw)JVfhJbhz{AmL7gfJG$6p4cOBZD@o?*W!oDCE!dVO_Vrt!(p`T=uNC z{+$f`2X|7D=0bj?m9*`p8hc9Sm;MlRlLm}bH=05a@w(~o3U?yye2HrW>$-rgwL^Sh z-4NqrFX+X_~K+!@hGes|cXsm(h3eN?d_d(-tj%H(>or ztnFDsNAPOPzF{|s^h9Dy1D{pjdtF3F$@kCUiy~~e>COjGb__K;`)4-phR$DY?7O0S zlS1SDORUXQ1=Oz65Bt`iD9mkuj~?7wVpT9_<$_dgz){^5?Mas8*t&Md|LoW;P1PeB z&XQjlD9Ng1-O}(o;c=MV|3S3ney&$O#iyD! zm_RF8!VnVLE(vMznY}JiYb$20YvwymDVFY4=H|PIRC}!Y{uGch9uh2a75r;%a7BKQ z5-a~36|_4i%qdN!%%mEt_MxIaMy@tA#ptUTbzsX5-7V>@7_O&-79-)~g4BMrEE1z& z_@!heTa2|Nx9?c#&3%m#nfC&0;aj9>u)pG!dzv!OjS@kd+jW0@{{xAQi5`$05iL$O z0W=bkSN7?y-lPw7W)0HzpwU~>M<%zR(pWeg${9!*ORA{lNo|Vpex_#8mMc9WNUadV zLO!H!4d(ud@AT>8TH>Rcl=5BU^x3#-6LtF_D}fhoE>D(t+<){6kbe({)PfrzSXvS= zIRITI#}T#}`0S>dtonYDYtyfD6|;*pdh98mzVNfG%gPO(VvzLC>iuj3X&%J5HT&k~ zSOvIk5o4*N+-yc!iRu7p%M(5usF>(MMq-qSTD%fL2Vz?xkS^>&0{wlNMAJw0aKILy z33G3J@R;EFu|LV4f*2y0%6G!J5p@#jd61y3)0XlvYITLQPTunm0q9DG?pF3z8ra%n zCD(fbHP8)C3Yrxou^f{j;`WzWz0}u(uqR*<&A2d?L-aYz>wX!QeD)*pw$*-9 znH>F}-rtSX`;Y#>1fn;cxQbQX>CCdiZ0&fGTQ=9dC;aa!@5%tYvp$=U2a-LMzR_Q$ z{H$R+rul_~_aB3Y_pG+MMkG!HYg48O;oD8U>3(*mRbM`}za>-KQ>=wEB9ae8 zO$>iEF3YE!wJ1_ai-D3wA9~m*IHV(gPEePQj#i~L%w1oboc)~>M^yC^n(64cWbHYZ zhENtE6>fB&aCWlHotFcQ4qc2bpxlfrDuEG{SD7yiJ5+v76JTgnzfKh`{-V|y@dS^b z0LUOk1TO+vk4JtF##SZoU`JL``6h~M(Qm)`ju3dK{@b)_6Ge{jP`c8{cA-Sg`L>$l z9e>VPjZ)O{)mO_i6!1 zFgIyn*Rsx;dr)3lwtaON8so=(LDF!#98Ekab~MvD4*LN3i5o>iI`HhT9_hvP*I;_D zH0Tp+hPaJ>Ybb;=@;}*e_K|)OzX_$re#9UFR8UeJFIpr4?o)Ox3LxgH$Rq_d>c zo1>m0SNKQ^_qGoY*AteEk04O?-9fak{DWj2uYJjn}9Tk^ad}|sHps{QOVh0!VjowkE!tlsC=^bqF`BRpAi&SKqN>*|L8PQx5E0gv+C9QGAc8(F^(irT zG*5ZSGn@(4RP%_A)2!F%4ZbbsSAAI&M#N*FN`+D)lr23fzfJd72TJn(wYlJ-QSOag zeS`P&TBoHmigTsc>~nS;r%xSDlP)cQ?jXv(gENQ^aWY_+sWtt@#4zxAO~LHeSE>#lKk;@v_fDF=WmN$*3bD7^07TY)TH*bxZe)t~%PL}gc3@WaM_j?&$P9sY ztJ}Q+Kbu(iHMT~YRC?q(*_o-gAJ;I18SFM^Mh|?wQH>F_Y74Ea|Jj$)WD5koF?w^v z9+biPX}EGC+PJa9$`Mh0!i_iH_?0$=`0&G-?g5GQRLN_3b@Pt&ecf`VXB&RsetSg9KK85n zS06%@IAE0@hliKyIX&DLzEgA?Va>rjW>|O)% z*03ENg-_2Rq{ARGPl9sO3CXrHSBmU;wl(8?jfR&Dt}bQ;4R5wJ6Wg;F|K6nx+jMat zWx+_(KXnKg8K&glp;s$UYh*XGspmwnj+wQ)JhrmMop_~*LtnY77J9Q`fw-}-qH|3w zF`ev)-oEOGErgr?-T6HiNEcTvq@j?}2U8N2y_+8(3ElUAa7}#vJA|5GQnzvx|Jh4X z24B5L6%ceNBYUtM9171>5`d0FSuvq5!j(V1oL5^Mrxd-q=5tZOYpJ%LQ&v-7dVdOI z<{^U~gN<)@E?LFyI{P_)e>ob$jt4p+lcHB2qv)$VVKm0-_~G9+2dt6<*rL}aJBIF} zt0U%c{5jr}wTqknN!fK@>!o$UFRdDsJ)8K|I9QU5oh!8Gi`ZuT80?Y7?`ArY)5sr$ z;7E@i=v}Pan}>xl=F`{@4!Q=dtVwaoeeQr-bS;dn({CgHeEu))yw$WH+&6)THlbJTRZ!G6Bj@{|Y66t_SCSb-WO5 z(-1oKb$BmL-mf%vy;wQLvXk@%^C1Wc@xmm&TxFc`Xx?5KAV$)m!fF+3jno$7=5(Rc zUv+fT6N@)#bzX}_(&{|VAN;q5&u#zg?k%^YC66_`*24hW+NBt!!Dn#6Q1=gPGe~y) zwkd})qEf>SYeee>+ypN+M6G z3}3}>F~}%7G-GVI^Nc-HyzBb&Iul%L<*z$iU#jnqpTb7k*%80XXO4MX@xufuOnu;d z5>ph9Cm@1v48OUbD<+rHon`h3SS#`5>mM{c$5!rX3G9GlIAF8`_XbCZ*h<<28#+G+ zWL?~)>`bblU)8)L%e@c1;2lYiWEzmrU4<~*O$i6>Hx6Xb8fK0y`Z4tf@pvmyXhq8G z7Kz`zKb+Js&nCzCVL6DFp#YDrt#)i-WpT4 z%6&pQAX}MCv@yh!hs5{)uxnu>OwP+H3h--h_OnlDXj9aZ$x{7Uu4tR)XhX86dTk*0)@id(yUzTxwX35jfgg7xbe0ehCH<`6gXpEKF zcb1Ks`6!2U`rR|Lv2I;Pp#B;@ti-vQ@kJg5wF@5Up2haJ_92-TV@H24g@V7tA zB-u5CPxN!qoRHGVxoVa#!+MwuZW6tzGur9UUs_20MO8LE3Xcu;jZIujr>y$VNiCUhF zkLrN&^d^(U*A2oj8Z!nb3Km%m-nxdt&=ky*y&je*|IyCZINS0-8_XSA1UcKlR*k$x z3(~tt&kI`Uur?IDS?GCkJ09}A)h1DHEgZVfzkyD&MIR(FwD z2s+fWli7Ex_-`PCXSH|GliuJ5SV7dZ5)JBc zCVhIzON}odhgZ0*JX*yB|IBZuWgn)>{NIoML+^tHNCtH8uR$o*`>v>i88@*gz0_vu z&pYi*l|8E7#0eKR^dZGk?c6JJ`5FL3er0t8XQRoVzW)r&Es2hR)#Wy{HGOlQtRODC z;b!0 z**o-{53sp`f?94x0hy-qaea;h1Ropd2j|rMx#XMT##7Ovr`I0OUsqA1p;cVO6FAbB z$<}CzD6x<1u6k1sGpwGw#&*(-_qcW3?|6#XI<83#0G}$g|1VkhBP5!gnI1?UxOr=< zjLsXTZY-r~%Jkv-{@Uc*pDlq%Jok_;atcX|0s1{cBoRuA`4vonIqh0BSHbIl<`-b{ zZQeGJIvKBQcYaHoFdJ8L?&P70CeI_U{;~R^%y7BP*V(Cvx|=^gK2=yR^a4M2Q;8Qi zcW^)Kp{Xxq@R3sFPL znorifEwKFSR`!}8onVN7 z%4-9<9(f+`n+*OYj3{kw9g{?haZy96@c$ z`jT<(i2hxemhg|4T2#>Up#mjhE^sS^#)f5!NJeG|z9@hYcYfUc^mVy_-CRHW>G?hs zqDqrA>}F7xHOS>fd-%Jlv6orA|bVFE8jO98! zu7kcAWt&t#f1xRi$j}x#h11sN7Nw{aJ#&A$JM!Y_Zwlm3=HcC~cHqmq{EHUezq|~K zLdYJM^PE&;Cf&z>xU67xLmz&whQucuK|lt88MsS(F#rJ{yaUo}rQ@7xx}E3-3Jb98d{1%viPvL=y(6c z`7u-pHw~+$h{X{HRSqbTpEXPn__ei+Wj5X$=Kk~H9htz?*dgJ>@Oa=O*Urv$yzPr3G20r|K#tD!l!gITJ*vd7p^In?=ieLGB=SMe%RlZKwJAeSy|vIKIL^@5id)mV1)rP|S^P9p_XKaHa+=$`YB7_C`8aK}3uc0Yp$ z69@E}WqUWXFsI5rY0=dMb7j{4_VL!LpW|c|Ol;D+dg)&Q7c|GF!5K6teuJz@;^vQ> z(}&#?--ZQGhtZSiyVj>YHjLQL{qus{fS}5&Q>g8k#&_HZghg^9HVyj(*VSA*?PQ&- z?dL@nS6W0o=_VIe&AjQjsLq1~Md4j)tn(eJqec;`(abn%6PY&k4FHqlV5Wi#>Jg*c zG}r5I|4qQm>B;NvzUd_kZ|Hd<-^Ki8!0nGxLr#rDHaN;Nt~$WHJzQ(5%1Xj?#Z0YM znap7(Z$9#!qIyl>Sz?{SEY%OF+Tpb4b}($Hsx-n?HgLter3hZRn=~8@kg13PO2_EN z+5!$-r-rBQ^?gE2G?%(>v6aBak%ee@DH8BtI&1|m3|?|!lF$_wl1Vh(Zirx#g_WFu zVPT}5OAVRd8Fv^H1GfXH>JWs#ZXKQ!bXN>SL;lF@;EuiOX2%P>18ZeShavk!ev_8> zI_nD2M2EUHWax9fRA;7fHrawNtX5S69l+eu#Tx5{F}`72AlvCf-E zT+c;UvVz}?{OEV!&E9?!{0ZPd{yqr0Y`9Qjm++&wta4!({oe)|+X`#Y{|{6<*Tw7- z^$RPU)T`#1Rg!qDZfp;ozN&p?sgdyw@Ck_Nd|{M-&X?$bUIGUE2@3Xr?=Q%<=bgE4 zcz2UK)q^p&XIE0zTuqst+>mtjwO? z|8>HW^_*~QvEJ8m`fD!(rsYBWz^nT;kNtol$h+2^op$&fZuK0SxVTDGNZ{j0%+e zj*)*^iyDj){5a)VEkzeLJ4+RjD9Lg!H|=Of@7wJ}d@b|RGq*CB~ybH=>w zZQhsJn>zKG^p%vHulw987501a{ILh2r+{1QS3V0`DspVw%@2%Zk~L^l!4;P2V?_;;?h-wCbq7ICj%DFrIaE(ZG+mY}b^*GXC=6(HEB z@ZiHH)Y`ZSbbMEpX=io5^bQ_Wi+ni}zvcQ$t>#R38`-1Ig)(ER?Dr zAJX0NGB(v>Oti~=G^a)C6IAB(ryd75j6OxSG-WAaJzjPH8*mq|y2nIV4DH4N`!v+M zuQR2!D84~^tETpROBbgNk(F-8?kGz0Cm@x6REJ%UJLyrcBvcs_@O)0epnB2Qqt`fS zpu7Syyk!h5Evz%_MYZTuO+o64ApBgACUhD)#*Wgyqv}_HG++y0rewS18YKAdqevJ=ZVk9> zqrq>FoGsb`JjBARoy|M+P=vUg=#UL}7||xxY+DYJy{j`C!ctFFfH`@qQ5q(jI)YX= z$>Ga-F8;DB>6-^he z-WthUX98=~D<$rpsz}p5wmr+cRl)UlQg#sRyPDbQnE6)rHvMYzp7TytXcA5C;#yIC zoToNt*$XG6NRrqni#ySF<6o|{(U0WE!V6FfxB3WOMaB!f zOHHJ~Z5P$9%4@Rl>Y!QfzWBuw0vVan^!?n7lcv(4YZfrx)^x24^~{{wx#~zK6W9tc z&cK)g!M4Twavt^!{+b0w_L(;xe(sAV{UNmxjsv@(-TfesZ0r*Z`c?+}>JBE^ET~|H z@3D8at!!DgSf`FZ%1qS|qqiWuET-kkXpz6x1g;N0f}zgV$((*H7GTo)ZNrOAw5Xg^ z?yYz|F*pU5y8%AU=j@VD3amb60(p{<|D**CNxUEyw*I=Y`cK`&MCJpwwwF=<-Mwj~ z;KC=KRENVrvBLZny=uM-2XO51EmrPo?Z;W5n_zjs-{6kZJ8iK)ukAn9 z*<7k*t;s)^OQV>E6doyPB7z79jOk9I0<~@E3yb$f>lyUx68?=c$I~fdy|c!eM%_eCL^;<%K0OIQRhNW*qlSM_qTT~NOu8IV_eXN(t|Pa^4mA+e_$$2 z=H*WN(9smMIkQ*^!tBYgbN<8>HYp1HS$m$~peJPwSdy(*xtr}#3CYM4Gkg zPmq{@0M+szwLD)>>4zHs2K9-`niE~Y|FJJlT<*_w2wdkAV%|$0$01ygn;6hTmsiH$ z*q`7F&&J*-9&>6Q?zU)Ng5+5pBWvWd@9IJ`oPTZIRUG5SaXi2f8~*+j$Cq!8c9!-3 zb%1R;rHn)_-OMbJk{)TDvVt`lvrqRC5w6|%PJ+K=cPnCvc`oml>m7mLbxgWAU1*B? zIXNHks;}oWh>g;Gadru)cEbVy;p74e5Y_Z? zZTs~MJN$UfU1iV2L~s-}D>5`@bg>j;)m~q{tSB?DwXNt zbNAyvkPCa%8rd`WAw0UtJ6{ydyZHiP!WbM*+RSqVDcT}|7DRyM=y6}tp zAFeS8ZL9dWuX)!_DBjJ>rt^s<@wdIaJr?aXkJJSmf_RQr*aJ_V??)tzrxK{j91J40 zJjsbTqyiq{Od8gb{sYkia)=nWK*M(QgAv~-;*Y6J)e~v78VvU(W1u=r_?19vshaFC zkO#F8cX@|e>yvO;pNNAy0na|_wtUTY6oHW3f1spH;lAp&!IA+^7Nq4U(ey;1%-Aoi zcwc{c5nLb$c4qF+km1%r$PAfRcVBb3kOePl-Oxfbr9NwVMEadlQJYOkg9x#>PdP|dMfJQ7o@IB@$fbpb+Q2oQ{us&jT}xG};|2ax(}Rp=OGL-M zt0GRAF|vUDqpgKx8C$G?Bi9RNvGM21-dF*U#GJMI;c- z(eWsZ%x@X}Vx>Cy&4`!hH`Hhe)&dix z$~71ok&4$FSDL)lpB6e#;GYdxIvB>6x@`QXFr~@usrC;~qV^E55X=C#0giaDe0jgo zTmiF3*;pI$!pr9Spn%`$+oVp0UW*Se>Y;l^akk_J%-V;|@t@xJ8VZ|~3E0!2fsp7b z>|zwV_=KCn>^AOfft?RJ-ua6?G1yL{tRb{j4HY5%ly$b8qgN|5BwgqpzQPxoWtA@9 z%`n2_wQhI_5z=VQx9nAQ_gs3d2ntDJeKa?nB>GNL~8B`|KlM|8z( z?XcBKrzFO6~6n<1^Y1PRQwbDq{+~!AxE8L ztU%$bI%OKm7h-O*HfJ7>SS%}M>sBG~^&PD3dA4-?k-mn;&k3t?87O3ciKoR5$`CMN<<@8a5llT$cMsq{5983I>;>@6mO7GM45Z_8RL6lFF zf?L5j;)G$(J9{9k4~BN+uzM&W^2+Ay&+q0 zPOCI%wc$2_WI>f`{fMWOt+EQToqck{{#AqGoArFVNkpc30<;WQg`*ENLV*V;NEUsY zx^ChPGMdCxPL^SKLhI!kig4hIBUW)Eh66Piw{3Sc+hgETXFm3v zsUa)Gv^8knsIB`i!|eQ;EuEvVHfka`LWMm z6moTfmFAIw+UxBieNLBkIc6`o7wDEv)e2?rwTg$&c7_q+$w0~nP) ztbYFwMn$ip@B z<@AzL`?96$jCfYH9d3N~mk*)g;e$HW51uGa4A-&zY9x22<38KfONR;0ZjnW}HCyX> z#%_^hWl75(7BW?)s1)VR)1`U&@kkT!a(oA3S#+rt9<3~V4j8VUHhHx$JPd~H>7-sU z8uc_%;DzcGER8p;@h&5@dbiPplEyprSyqzFAN^dmK^ml!IQuvnE_`rrI;k%EL7nNFhz| z<@@3lxAJLsTml=H9FuZd?e*`+Q~9rnBI=V?lSgg60J>9fHTti9uG*4MC(lKBK?TzE0&=4K`eDU*b6CpYt{OCvHIUs8X)TBRsL6hwGzg1(~vv(Cpu)b;0SBj_f%y zLxXP!t;Bwcjo9nWRkJpPL=g(5}3i`ml9=4 zc5g!VAp7c`=d61Qv*ZYTgz!0qKYAkcips?PccDD!i05<2`7mMj#f6hDDgKw&B%^J0 zO8)V4Z`qXx;lAVbSC3oTqf`@kt>c@P9}Wb)mUP-^Y-YF3JYnsk$xc*}Ht-&IbClxK zXHBZMTkU0v8&CM4c5iWI;PXy#3YGJCdZB$mo2|E?*ul+Q1y(vlyK>oFp5Q|?QDM9o zQt1NuQ`4Q1`Rjmd%LU!h+CywogDI&;+D~Wf!Pt1}O`W1+%o=r#|0)sPS#aOV8Y+!b zIf_U$=wR>h9z+_es)^$#^XIWhvNER!WX=8ovR9}HrDm$*vFbWS-xJMSrDY2VZn71>3qQM#M=0^#vG5UyUS@hnkgZ1_?96X*2ve@bljT#sXl%xZhQ6G_Dg>C7gdM9ubE zHCLNhvC#i8(=$!gB9Jh;5*>A4wbqCnG;b;Ow6QGK&LD|*CM2An9=>O&tFr7ZJn5fp zm_DZQ`?)td{+^bb_!*U~qNv*k_+Oq$18eb*4-CAy{dx%Y2dC)H8G7Mi*-;(u4bX-; zTD6NM@;2AD&4j6jJ7RFH1;yIgMktY;IjCK`$y~GzP12}YeGGQx=H3YTEKb;CCc7swcpC0UII+hGl}zd@j>;vIPIzLiL0x0 zBq;+wKHNLU1s!O9_?VjCE^xNJT(q=&rzEChTC4w(s-IW3a)V+q_;L~z+@;rs>WF6X zUrXRr%eht3+;efP`{C)gqAF`v#Fd-uAA9aT2B%A^`8luV0jL)Zf>K5k2mkiyc-2yG zsRGti`VMg{7$brxMN21nsU2DhZ7p@@%`v=p66B5&JCmR+I`0~M+({5@FS5h{81LDg zdH){9?Xpou*;3m7RqHV++I@8Td_Jw@C5TQP98m4c&<`*&ejjuPLuA2`P{5{a&Te^n@KMZOt7*L*JPu26BV?Q#Y>Mu^ ztzTOm8NoM&eYK@KpF6y`l?3)D-xXfal|(x)Lc^IUnhr9l=$o2e3{%A?$-R%~e>wm~ zXYE^qAIFWpop3Tu?X;<{8#BvYF!r4YQu)!mqi*-pA@cBZA?ATcU z1qu}|?y|Ag{%?w%{MyCAF-)dVOY44;t~y^iA7A#FOlw~(qn!q6y3*$y4^Q(vU{3Y^ z?~pH>Z~7z5g`JCY75XGJxNfd@B4m}-jJ-#H&8|IH0P;DLzhwVdk0tea4+qcD>vi&? ze20~yRGC718;dU?brLKO0AoUa2+2kVC-x;OsRvGC?mIQe(5pQu`0~2$s~`D}%xjX6 zRHJ4Vt7*V(XM6hwXi20n<4)j+KsGpHf>g~W(JSUZ&}0MAH^i$6EYbTDu6%V_9yiE0 z>o#zf8~xMqrcZZ!od*V+op{G*rYCr5|K4R>P72&iP2WN;3-3CgxkOzV01@o3gd`^@ z2Y3=s-g%4ImdbR!`{CQ131s?&l`&cUW$$OF^MwQ9e={X>ZI%^g4v!K>>xSL@1m0|Y z+dLZX5%%W~sgMDI9(dfxSB`jB_?L_%r%~XBIoj6%FYW!Mq3^1TU;Em%2oF>?&y7Gt z9Cl(>_)jPTkG5qaR$wm`Vu#vToW6N~8GKiv#EqLUn5qfkntkRl#jtqZdTo6=(%;E$ z>&16?s1Z0u)s1(21z)2Gh1ElPVM%h-pOvUPUrm1ulXa=j`bIh`m24etiX9>1_TSQy zn>)QSa|u_l-w2Lg<*7rvgkLb7x9bjv#`-ej*SxTHg&L@IEmslAu(%4G9Sg53JpP>L zD7RU?@;^l!&UViAg#M%J!}n#^<(CJi;3|h!0f$>ZAz`HY_pCSSEU`6-rD05llrPS? z>8_n_wE^|6XEl@*{s>DLr6_GMI^p$09#&^AFJ>Fy$C{MJ$4B)as8ZpQo^P(=KhWb~F8_|qvR%1qK>EC-VH0f2nHb>rx2X2#;fQPS zKo!QNUx^E)-|Dd8Lh_m6k@F9GLH9w)wgjffkICtNs%#ByvONq|53XlFY)7pjFJn;$ zvvH*d|3YT7x2ir17ubLvq}BhUl=>RD9yCcr(1{PW11}&4!F4CGFjchRW29PZbE+b@ zzFC<3?mwO3`6^?ncL+mKc2Sw3Zup9fPlBlIA+%hVU-Hqc$cNjd&SLmAENv%OENv}ES^XqO!3Cx^giLph>Eyq&>18cIu33MPQXzrK$$QJ0IXes?^|Jf9bxBG zAtXJXd|pDBJg|j1RkW%_61{AK_*xifu3NtXJkfwyZjsj7^FY}_=l!UUcf7dZZD>!R z_gx;8JunD`np7}wx(vXO9|#%G=(srL+JSfejHa6f^!8c^d zHwxV73q7`?&#D@@-|rPkrpzI>J$gZ&OoUY9_qkRI_1Z(q9M0VQj;Ap<%6H_5F+Pn7h%;$KoKqZ00G-d(tOAvj8xj&W za9}*y2iU3#rFs<;$`Xo2SQ1A*z2Y|@6dAcYb~vMh@O4nUMLArVDv1ClHkxSuT)EOV z$S!tbsPf;K`&*l}R^bH4x&7exYlJ}d9w{;@5{U+fy0o3&)8C%d`L#c(E1PI{mi%H} zbfi(7m~Q_{Kci{()Y!b2wU4@Hfh%3PFiAxppXvg>xzh!sQgB1H_9bH~yZqY+Qp#SA zEFl~RpP0YYU>Z@&g{6nGjrj&yUj))21q?k!=rtM~Eh6*PyttCv>KK1f zWDM1|rhme77F8c&k9UKBHpB-7wHQ_l8qSj7*YK>HObWL|c2-9v?v*m!&K~Y93jRMi z`T^aTe^OG%1C{L?^~L?2-sq37HdHJ-uBmDlst-aHGHGV@@5Vbg6ua<*gKoHA z17E`IZR4z)x|>AfbS$s%;cDn}p6r_dWpo&*x^e=ukgS`wbVDmr^S$HXh66un6If^?G0o=|hR}b~=F7L8PLaaj7IaSt6(WUr<8Xbeu)*T&1X=pH1syFohm_w6>!c#w7tJTg8Ui}1w%TlvU@{C| z?;HTSgBH6K?|G|tIK*WVh5serN_tMnGlyU}dV1^aJAowCMrhv`zo7aIaEFx8a@dZV=0mOKp=eBL4nIkQn1_+b?isu4_oXxib6pP_ioO}SQ<1MOBuF@X2ze`l zbjN>jNBXX>+kX$Kt>^kBM7pGiFVI~w90F!*{JXh8!%H@4%nY-=@$*#q948v?BZi-3 zDVGJMqJx~>B-QQ7xkm~HhfR?RoCrAHu9P_=DnAc6AAiq}0uBydiz#J{H>qx;)anYU znn81PzDcS?`OE@=GjgN8aIF)Rl5#ZWapU7R9kT9IyT3b6RU@Gj!N0D0;R|PPQo-b} z7NeTHE1G_fFDBKG5u!MTDF@T87~p97uE8mmC%j2`shXcY`wyfPsL~D8202;Mo~b&~ zth`OGN*i%w`D_~UXD3o;SW!TYaa;EuSt)-&k{cw_$}JtOXUI$edM`t2#QJXy9V;>T83OSCfXn!!ouhp*VZ0s!|iT=j5E`~n9YpAQ^&h@%%IWSS8FFzY8^AJLaO+=>@EP-|_{+MZ#~lEgCuM>T8*zO? z8;h>vE)=MI%K1M*2zT-eCVWr$6o8b#k#JZ0z67{5=;0d4b}O<3YWj+R!98q!J$Zr0Kl5-3xSvjbֽUZ#?zv6#s zxF9y&I$jL~Pv-fV0z)k=x7=i3_)-P>SeuM`fWN78;nv{DWsm*adxXLtLqOJ2X9gW_ zUB>iY`rTzJaYFwtw%NXlyZwJ0orPOdkK4wFq_hIk3<&`#0qGb9EueG=6Da}d8Zb%e z1_?F#E9ZlEw=+xI=c_b=E5*SXGlp69;r&n?;OGE8{Y?DBPG;79Bs3BzUK zvcDbyIl`THuhXOBk^k_2AR-(cTE>~g=##;64K$A_Wxn{KnYOQ~^jaI;YsQ|Rgdn`E z#?dc#%9C{gl!ucCw-m7**}xbOwLH%q#x*q5!w!0=#&s>1QM}zrbD%&1oE)|%XwAr2 zc{GB~oQ2fof_dLsXO-EHE3#zaLqlC9nfHvxptc&TgtI-EV+bE_tYq-hO@_;hHNIl>9VTk zz6%E%r;s}sEP4A~8xqn}Trk!4tY(B@IYv9WT z;aZE#r_BxBE9v{Uv)}nV!0^2I20}9;e0zKw{Qtrud+DuB_Lnky315Y@2RT=`ZGO{F z;l@w0`&@rq+k7CN&r2G<;Q!>z9jw>bZ1?DXhJ?|sh~wowX=bC}yKBK-XN&_%5{%Z3 zsnVxAp9&&V5J?hG502c*%AUQtCrZK|0)3}bC};8;=-oileqN~48ayRFX#^euTn)0h z26a71(!W>F66|0`Xlzx}$RO)KcY(WAbm`U$)sYrVBJRLVL8?`Honx;i$_^6xx8ffu zJyfC0vtwJaOBHTOlQ<5ws7s;zA%n{9)_2$rSPH?BQk28|k4CySScSM|{}x+&M)!#Z z4EBHX6OEP`_&ORCCTR&HmwxODt!r`oms4)(Tzq&DSzfWNww3atcaWXDOe`drL>#l! zw+PI~c~GI#PrZbRfK5s<&Z}&*2Kz1;nIus7!k0Xi6bQw-|RqwyG$FScovh;-{~EK zDXlbKyH3Ukrw>yJ*10g?H0|lHm&;EF3W`@XyuCd+HRLX(eYm`iMhCf)p=v$mO!|H5 zQdu)=zyIwXVf};!u~gs)^mZz%jI`fg?xhEG&)2ZOGd6(QUkXS<&;qji+q>7h@FqbE zB3X@3L0l@dK2o40wU49Px`(?_sKSRV2XNNS^M`1_a@!IR0%!iXoKsu;XUfz> zdAO|qU3R4H(;rXyMWi?Jp-lkz#P)=2izkz5oozazQKL{)%gh@`NRvVljb{ohunu8+Kg}u zdK3EaHO{1go`;<69fm{eyFnM010pE`OHyRrmCG9d46N6mcuEj1y``x@A*NLu`!S5n zgCy@QA>*Kc3z5-I&Rz?_%Qm3zH~f_GP7@8j@|_&bK!~rmd*fxknVx#joFy?f#;X$% z<$tB+hxbPu&1RRgm!%Y?x3;#?N?3E_?}$@5Ve~f)8bVfR4GHet{s%IP_d(G|K8M^t zxR$w&6#g+&Ciw1Gs$V3*r;;DZL-H1BPH&*cPc3I0S3FY_9a3uQr3N;sM$ddk znt8IwDfzqU?g1s>TU3;vKJ2m>SbaMZ%%2L##86GZ;$!zCwc-db25oLTFTky%jGP(& zAYY+`a$iu5eJHeLR|`Cis);OfKu1sj@rV3UEe0euX&u>hucH*39$*2XgIiTMq6XuH za(itk3(yr zCw`mIrQ2c6mp;T17!5D#iK4Y`1|xM_GW)M0kv=MgzxSvujdtc2X zpKIesjtzN5{o`B9~d0f%m~*fi^IlAm%lCa2tOc8sR&- zT<~X|Jtj16eOZDlVk;7)uRkv8b^=-*Rr2ZYt2J9y^K|DNdpwz~U0J;^vd zNlh+!XfJ_H1loZdB>GVEiLc*X|M*F#&>u{={5IUcT9Q%0J#CEvK|eM$%nC&Uq8Q>1>MTesU!*>;)B(WL25*F-!YTfU}&LKAEmcO=!ZyV$Hw z2OuC@N15X6>?miV5|xV2eHQw>wcurJea555@PiiWE+SCua3Onu_~u?+ohH-(xnAi| z`AIU_?l$(@PX~ugZ;=t2n3z&ZAA*p@;1`<<-6(DREiogRs`vGsnVpsY+>b>m`TDI- zb=wdBY5#>(7%Uoc=2cE?EZ1JKPx^~7!tFMR>j5rWG)J&5*GuZuma;5b8aj5RC*D-g z`JMhyb^SqU1GVR*kL_R?-a-CyVoE-t!Bh`a*mS(tLg`)EZ;V3XMTqX>^Czido%sQR z7mX=J269ha+MLEzA=bb2b`ooo0w?kg_2pA(1o-B!ZUj-@AsAiYC;R9X=up42B~nV3 zn$~>x@z0yKWoSYVC9!4MGXtKIl z8|q3D@Y;t0owMG0r* zBEZl~g>>w9z%6ES9ct5nv>jBln)9%+j@mHS2isRGy)8*2iIO`ABR+@BQE6h`pmNIGbt%!wP04me%IP(Loc5##FHuKS!KVl4H&>LGy4fXC%>bHiJGmF1H zJ<5M{%{uUpl^L5iQQ@$3Q1G(tU?d~6mRy-F`;&|(-HZZl3vP1Ssk63z#8{x7kYxbN z;3mmWNg{QsZyoq&CH>$2Wo22%vo157+8YkKLdsVi4mS{A&Qu|WN-AeMjmxwX?EOa8rT@iC_jCsG>DJm7r*RYQ7yvo<6ZlU8Alz198-_4ahwT&q{DNdE8e` zmH3S^sHsG!ZC4)=AgI>Oa2)0Ru_{TOqbW{;tFC0x+(or$yZO`j$gc~v@yYuZQFR|0 zlVE3DaG4lTK!J==j0RUlcaR*p2QAajf>{c=#2SUkuFEVdeZSz9~I-~OZ z+S)j%a$4 z@>CcN5Okwv2TE#w&UZWWajNxicZ*83LK*U7e|Zv>O6qEgsSai=W!LbGc`-8%Mkk+)ue(NP$yl90@}7B=TH4o{ege@JLnK1QVqO zG`s;NrwmL@H2b`7#B$zLzs9tfU8_X{f(fobe=#3I&_lIS7X}Lh&5@+Jn?$YX4n+~eUK@F{r0%P=amSN?a*Oq~nlc0VBltT6mdXE$5=tsqik#KchUp(V zZ!{k32)I5(eRv)v1%ayH8cRBWDWulEd#l@8>7T!Sv7gE=LRxq@T{kMySJIczb(y@L zYS!#N6QHY4L;o=BCA|%fL&N|iTEvOgvLh$8G5|2+p#2cEZnSEk5(%DB?AU6&`jV`D zOd+!N<>YEFm$$^UoY>24;B=AVVZB4$n+Vl_%c9p9L=OC=58JD6hz#U}G_tuzUG&z%0J3H19ISG;=#Cet^+R+(dqQ|e1^t0@$xhy9CJ!KoZz z=pSe^Bt4;w5iR>g-Sw9}CirPTFvSN%v;Zd3v;4Zqj-_8tvL~yUsp8`ukuBE+yPGc} zP)nHMfDC=~UsTbj#(PRE8Z;EOFaHCrdB_#tF6{bLC`ERu|D!_y@c&%PYc25h8zBd+ zziWka0cq}~oZ=VpfX1-IyyZO@?Xafuww?(R^(2f2oi6obrg$0H|EGsAyqwgIvG8V@w5DwzvcaV83q@O$V;Bvn7v);Z(i z-cpZzPO)Q)Df2HE)%$#U97^5fF(q9wo5cnSQu0*0(0Onl1b|7Rv$sq)h|mL*P&@A< zg2bItem-*Gn{KF#_bu+LA>cJV)x4vGm2S!RPsA!P_q$fY6qC88lJ_?3mI$i9f#m+E zykp<>eRiOI{+@4}o5d}Tnf1(I8Ms$LcQzb5IZ^9eo5PEIe()!C|AFZK1A#`cnGmO3 zRyb`O6$!fj3Cb|^fo0VCx^a)0-2tP@*%OzQe7!%^%_|A5CL*ogXSsp?Phd3L{s|f9 zf&W_N?WYiN$_~c*Il|AK$OL>gXyRU|=mT|-cmz~CCqX=83v`E_q#eSr{msV6@Mt-F zSVP(vndxW^&v$ZU$Njk1Ttv))7OT`0cKvK@7VZpf%I4qrJ#ogqKqOVPhE23GNIi~! zqj0U=urLv;466n9L)3Vr>sBSbUBhz2caK(^y4DyCnyVq*RWJD$JBEmfQ+ePvfNnvi zi)}#4;Zb8WU<@^5f~fWH zqRIEkOVp=3#+*7U1~2G63H!5W8@v5gN<31CH@LrtQMSSv?hlCBS2@+dZvr62NBf=S-ZHzaKq59wuU_AGyLnF4RqHK}z zaes#dgg@ToJ9iSGz^z<5@hOvcY?QzseZDF@U?i~4QU$MnI4>NK=83*m#RMp9l^GZd z*&S=1{0Ew--hZx({;^4(aYxK=58v#aZa3VL?5+f3KB43b!{9W(#~|6FgRbx?>6+U2 zkAz*HmI^q8N=*o;zDrW9EDo^VSak2tW&Ev83xA=}Nr9FF>GJJv*sCLOX4k^Q1BmZqnUUA{pJmmw#e7jbx}dn z>mrHd9{`e(6nR(k*cTTVU2G*f{57&7)_a@kfq8eKT?2L7wWOgCXB01j@cZ|8MZl^$ z;Jlm+REAVIq#2qMqp3WvSXsCH{Kot&Oq;Xg#H?1rQ#0M9_y7)-9+n z(|%slFdHW>bfT?D_ks&Ycl+f|1TbJI)H~A(r2)wibFY+wRv#4Fa~oDEWQoIfyGtpb z&3WVZCRB^rmK_UZq5TK^AD|yN6GzrhW=zDI)3EmMA-J=v1hfPnOp&IVwSEu!9CMEt z+K~B)QNHwYT?qB$HwXH2@x`F6Q00_|x!*0=p%-&sD?|U1&c`F(SvM0|`4uC!`Q_nC zuL9-tsfXi}QOQJGYtmj)=Z_~ZdGQ5CjR9}RlOL$_SPTTY(S2JPC#Ng+qTt}_)ZOJk zQktwwj}cT`P5L82@<9@@`1GK=qnOOAr>axgWwc>WzaU{4dkWn*Gyh8O|7|yzXI_#0 zCNkmd$}~n^WRO}w*>B+6>NMqh{E>Qb`a8p?>PTAj{ zy1}lY?=D3SM3NxzPFu*5?AHFt<%b2C_W{c|j^>w*GIPr+SE0uDd=get-hK?M!LQ}Y zDI0aTxdK`eoq;bNQJWfllt1{$&1f~a15QwDvNrHFk+n~;C{bE!h`<)Oz_vnb!T)>W zn<~{F8Dhn<$)U0YAr`Oq44;l291Zuy7(rkYgOQZC4#%7k7ZrrV# zwZJFJH;s1<&8PhD0EhC`#?k(=)iW?RT=>oiw`?NW#RVlrn=G?B4@ll`>*j-4RmaDu zxz>f*IB&%ovF!u}`k!?HP1~qDNsJ^m&!f~7%(60?<_k+ckqOO=ICC6rTE%-DlJ``! zjO6T@PvxHrbu=k#QCl=5MX1=ned_j!qXxa)^LbK(+}y*Z-0`#0J4Lv~9F7C9GcD)# zU87*%{qpvDyz0kW6IR9vQY_82@B8DqU%)MCar_t%&vSc*-Fu6u6y8^*i zPj(i=qb%pxtrj%H7xVFg_H_;ZfaoDF-&=4FvVuk~KRUVClk9GFVPR%s^p2Nh+C__( zn8k1)w=V586Ud7_*FAXjS^S*7MLqE_wX95FSHvkyW7^2awU%zvdda=c6`HUqicyxR z>D~IUD>)Tl0xm#^)Va)N+h4qkI{QM0;@l1h27rNX$NdU2bL=w$f**bG62rgqL2qoy zErkVQR7oR#B`Nm%Q{twf4HI$xa)Xj#b+6NU*{D8D4Hfn)+?;n=hP5W%3;qRyrwsFr z8>7W9qNAi zP?PG?o?y`u&mU0ej;uM?+r>!KxI_!+1y%z{KiFX?kUW8oa_$Zlrs%+nDXJRN-GN#}WE2u9J

;)nHSEMCOR2qY<5vCck_q;_FvVeJ^-1MZ4|kL)BtQ z5B!h#% z$ohR+xBKl8C56x#Kihd)QnZt#3Nk#Q|Nok;)5fZ#!GZrkZ1Cu-BpNP%MzZwn#Omsb z--i58=Egtjue1`hQl)mX{aS0}?2S;m|?vvBpavj9~{VjHO zc8cYy8SZ}fKv_csz?VRip>+!wT7}`E&Mg`=z8w8b(y?cy8bj)$vfk%AhE>T9H}9Gh z1_p{8`S)oY{d+0cFiQF_P^G=4?hKp7&mHmged=pzh6nNUpDG-G`*21nd4)Ep((YS+ zU}5o)V&5)tkWon97JPdCYC-UE=gfJYpI@oz*N3m3ivnc~60d-35+@y|7sH4B<^R7* zfs}=4Imz-d&nJb?XO0Y7vXa(}H#A-2MSZp}={f!9^tEb-eQl=(R|Qp@5S?q9_x;kp*ztABDaX;Da@ z__K{x$tfU72YCf3D9e>s_%mBoZRj8*Q&<0U{yj6a6KK39=3<_tL^WZ5FCzOZRkGb5 zXI@|n+04apqDBg%{Kb|0w#Mj~ubEVJor_mno(+9d4gd2sPz8U~=|>SLcQvdxD%x6~ zQhvi-OtwwAjcym@LA=I8ct1ESMaQK{K-9OiR!KtYFF7Uq1#}d$S5*~)?f36FF4j%<(b9Sn`5!#(> z<9nvM@UlK7Mn}N8-DIl^DjoeHEAU59P=A1u`7@--SIn2ZiLFk?LxYy_OcxSAho)3L z@)vIp7+8~dE$bJ^f9*VgMuO{k0JUE`Y-$a*vc>e?A%`(YZX}xi$rtshcaj>9?=uM` z#QqA-cZ(xI@SMSA`mH=lFA({6d)OtlyAA_0aHaw@_SsUlx?xY}+CPneTn~TO$J-g7 z@MgEd{>Rtn_*HrEtrZ+C&A&wdJX`zSl(?*!tx8MA!0oVJ&N9%2qWem~N>oNBLH}D! zFr|`zhyN&0kT>7iG6Tm$ZEXhS!bM=e)eIOSZ{XkB;y>y0q)6fgW7js})8N4ro$joA z`u>-eK{Le%GEt;q$A2rcCSZPeS-Ku5kycYRXYg_DR9sj(j!bUYmR`UhObcN$c z`GC87S+C)=jPX9cTh(?a`TrL974SeO$Z2b`-dPrkbdWrMe*I~KQ?q^%Rqv>!yH zH>-cSHtVtrq2XMnXS=<7U5)H6Dw4C0wK@o>SV9?hsxg$nv zHQnXEtrJ#~_60z^T@8j_i~%qlHQXKTjw&2>t^J(Nk+W@Gq|`PmVDzqf`sbn5Vv7P|dec;luX(A>ip6{jjX&63@&Og$d6? zoXWrOqQxb9FTmlOPhe=!=qe}HD;gfBy2EVDs!a~?H`#u6J_VdnXdd(E(!o-o7L!Z& z&8(MvUk(dNv9)CCkCU3k)}M)0i?Pw1VxSI_xG&5=QADep7|Kjgjt=$W^e+`Xc&+yt zb}j6ed+38lJR|9G)z!UjZxa(B%3k-O-@U{QgDC<&35xul z+r$l{SVUACGUhTkCYp*!8}Y%N5p=qB3B!0K!(_Hd9t5-t648k#r`*AY>W{eCC!aS7 ztrlHc`-wD>WT#eE)(^D!Uz^Q&Pn&1R*pH&wYksdUU%*h!2_SSE>dNTgY1ppa>8v}z zo1c^ZW5;mq{f$PH%mN@F6TxZ+mrpyxF!aCH9Ar{P2;_zT|TsO`tUWniIB271;y`4b{tj<(0 z(*FF*MKy0fw;t1xVkzc=WU?-NJUv3z@b@^seslH@7Y)e2f->!uKY?!?U{9#tI-~B} zlPG`ZHgmo)c>}rf0uGe^Fsd#IBszS%PhwqgVU<5xEuZSQ@q^%d%1f68#AWe=nnk}J zZsgxE?EUb0$R!FH&gV|g0uj6U8SZ+Y0!&Lnyzm~w1qD1bt2^OdrGr4~a!&bkHplvE zve<^c?3&>+pO1{RF&1%(7=;I8uy|IR)qOPgE$2TKY<9vHBoz{Ey9lWRpK2h_uf9y_ z?3;Mad1iiRVs_*hP#qiir+K%(+SN-k9K2{VQbBte-#rl*P?y`c5ij%WnvJhBX+zZ~ z36NKol}*W<1z%QBC%0dw3hgu)A@E7%=ym7n&HVJ5=EX-=VwG{HqqS>h!!z3ajRrmd z_4muzr%y~o_1m!@Cwg%#)ww#emAgic<$?)A0yUDLr}6vKKygD3X}8s1T(xnr-Z4+j z-_zsf8wjkjK^oK?7pC}_HkBPDs&*Iz_3|Its8)$mkc3pQ zOh?01&xI#>Ew4Tt(2q#tj}I{t5K2Ai=*Ff6e4ya((WVCqTue>vZ>y}6MwF#8hSyk0 zEWDYEjdyOEbT9Z=?{|ZXsb41aB=p_;3Ho0@E(-|M1)>Zz9CLM!v?n!sQ}`w8KhO=` zEp}}pZ|jWk+jO6eX@Ky}l@wLj2ALcgGPBnOZP;a^)RAKOfO4CzcY^orZ@b*Gq?#ha z^dwjFC?Y<)ibEzwt`8;o+>u=%?o?~AQ%ZE2Qk?V4Y_UiC=D0MR-jBYy#O2M0n%3cR|t z0D#&Eakh?#$_N|}3cJ~samqv(u%<{`_TaVeSNu|sn|Np;{tSlGmZgKKn$%^N(!W{! zGNrWjC1bJ!B7uJc!|5p?BcA|D;Uu7JclaxVCYAauQ}Du=^J2(GA0-uTURU|VQ_OS( zv}#FGq=1swM!fCiSS@7<{1LmT!+_W*c;9a6^@jZIlNVZU+Vj|nH{sQ_$s`yO6i}HO`Aqz_3H7&M?Zu5+Gi1wD*`eZ+|`bUZ?qm4mygp>N?03AgqGtbJ}QJb)-+%pyNoaHN=WXIHlof4;-7Imw~Xr-Gp^ zNyLU{cA-4g+RaV>2$Q&PDKe$jO_j6fx1`zMd+v2)M;*hI&!i3}?9kRqd3K}5wJ^uK zWXwh^K0+xa{#HQn@@dTN_ODB|aT$Hjmmx>)hLgcw9$+5d(PsUW`TN<1aus-EMzUX< zk9gC&T{=vu8NRF!J`em{@|ZO78T3u!#J>KTt(3s3_upc8(~{w-M%tU899JD`QiR-F z`-tHmM5?F+QLj0EzXs9ftat(J08gUfJPVQDzDvJBlU~Eyh$wv532BHr!QAlWGr@K$ zw)*cb+>WCNX^_y7&j)du zsSJro;8)Jv%^?g;Jjb$PDOMe6qg>vhKlq1dU)mA*zW#+$zn(gJG*w%*{kF+d zp_xHEOmkb`$Zcj6G1~dv`Did3=B{4Wd@x@GlzgOD(?rPsx zHM}3pe+;Ivt@MUbDAio1Ud(`9~|V?-jMBPUg-Qs<}J3A?|tjHoRnX}vxriz?3lY|M{bvUO&hra_g*|He?^{pg&KRB!o#_D#Q&phX8V&e z=jf&d$%zB)p?4YW>Km=mq^9uzrga{f0!O38t?w`Un9LvB+uLg5;4jImm|7AJN zr964{ywjFjNQ^xpxczGTyXZ&JO}4yf+rQ==!?k|7+Y@KYg zz*#~_heB-Jc;T+KJPAkf2|oktp?=jfO`y;EX?M@h7Mmuxl06-ry{NrLi9!|H8b?tj{ScaK3+L1kXMIE3=IGT5u0l>kP?x1LivmNo z_dkWco!N&gFI>VH5yqUZVJdhj;d#U%dVgarH5UXv&U$2~pPd#k8gqOY{?c@QYVrkL z8T#$LS$!~vW+)O|fRj2|twib{pNJkY|Q2aJ_0zH;mZG6n|pW;v1lav=F zD~F}I)x?4NNhkrjbZaoeNsb$lE#b&%SWhy&yy__MNRiQq)&|HBxs@ryj!3}s{jl7D zi9jf&f*jhe4T6ZKxKL&-t0!~cT{j*bsZ*m3sgU{h;m_jDw2nBh5XZr3Pg@?Lo*KjS_V--Ga%fV}*ryK+8v z39zrS`Z@UK&|-fGrE1W}a@H+agiw(cGPJ1Pcv^Ele0oNxwdrjIx42WmjVQ=sbHsZZ zr$$QAHk*4R@53He0TaWBY@le>8%T>OCDHB~{hqiB44m!X8#m&X!_hTR@GTi_)d*eC zn;1jFPI|ffjn@>a{UFE9SXWp;pNw#{hpg}ewA+O&R_tAnkfW0YaGXf6HuB))EaGHG zD#{tsptLvld(UHV%+6%+7t=Fu>(;p<=VElbIsSescq0RTOG$D>5ugxIdwpsP1jruc zlQVr8Z5a#;N|?xdKdRJ9PrMfy)f^N8Us9Cx_|O}L-9gjcy}~GA<1381D2PPkmU!W=>MPw)q4SVl)JcCeGBo(ibS4 z7p^P}7{%D5XYTMY=1BS|L=D>05)%DaQ{DTK_X8h0!7A>kehNyNR9 zphdhuLI8ZLz09*U*yTv6@gIoZ9?-q)hMc+PW%L5%=a#fU`e0WLYRLxr3egw%I|0k? zI8wZ=4UqUoFpf&?QR6nPkK?&GySMS-_>T{LCX!LR7pm+JBlNqpXIs~5_%$S!}$aMXAuh_1IUGle|{@~62%x2$IlAWT?+tmd)#{yk`Ybw zaddJD_b>I%r+{FpH^Tc=4dskjn}j~HhgeP_5`Q&L3#gjKGp=(0v#Fqs*@HCsDQy} z4abkSO2Sm(JMRR`t6T$}RAp_F3HC&p#5Z>uwHGem9P@3Ba((*s^YpBrEAtZAVJ60r z(oPoHAEke;whsMOkimWjayYqc7>Bl*HtlUGtI0MhQNivGx1ZZMv1@}l>Bpw6RrsuU z<0@y`J@Fx(R5)_97ZnAJUL$UpVfLnxz`41}J$OQ(P@pd{%l6X=q3@U2^752Ag9evh z)ICv&gG`~XUo(&b$Uh=br4YIoRoVr>=Nc)zl{wu5K1cKIl|lYg1a87(}Db$|NjnY04y&`M_6_rvSk*sv5!g@R&j*7EW? z?dLP<-8xc$E}WhN0$DBWK>nAYB&5hLY(jbiv{Xzob3Y`K^z0)P|54(fbe%RFDrktk zh+zmwizHwx{2${cf#u91*DHJr2L93!P0;btC-A|ZCa<1dE&+nmKE8!W4f6CgXw!w* z9!vAJO(K;4&kNl{5E90QF{tU1RFaylH zkG|4}rJQTZY=Z0R`bCg|F~aNd1;d5EMOHAC_?W?Yiq9sUp~^+7Wn0|+ z&7_KAL}w5mt((~W++Nb0_ZBX41GR38ZJgU{A2@U{L(kf zaQNk!&B#Q|mF`*D~e_{Wr&U(W>@G^}N#3{p!xi|4?853)LU#w%w(B%_?(A3XKlBaL! zLA?74{x7)5*`wW$3)!^D>d*ma_Ll`AXP z-os=!#Yc=kozfXFUN*e8}edw30pPVNj$dzjWwZZ|q?qIxv!uwducb6E+EjqK2(h=vjb(tC)e%Km< zi%$6NM3i6X*%IPfPJxPds|{S|6>K_$(Fw@(58s)qHqP;j*lkTdX@KUTPFyDBXx3hl z$5WJq=e$<7iZt%C_{B~gWfmN~NEpF5bT5A;=FadRCvxS=AFYdQ?OsqIMve!4YH_@E z>@=QSZ9lZQWZX$_{d1p(bo6B$2xPbmILF-Yh#ebIyMEw@eN*`#cYo7|OXHN_d zzp)8)!=tUg^{ksj|8DlMP^ZiMA+Ly0>jKBw@1^~QI+$eV#QuJ5+t%3>HNVcTC@e7g zfcBvx<40mHQtTQw1M*>l7$aaROpj5y177nwQOW(x${QbNcCBSo#VsSh!|tglz91;6 zaM*3ikE&`Gr#(KFhVua)oFTS(&4g`tnYM2pQEOKaBAzfm(@005Z5U6Oa+FblPLc`- zi)c+1p%L4Um>I+J099}uDGxQHh_3^sE=Yx&!k zxFJiMx=isw_TAIpk z9f9B6?>~bZ-lzhHV;19}5Y>%13j1z(RhaGKpPM?WCqV&X0+S?lB zh*_5SkNoG5rOc~+R9bhC_2xNCSq>-Cx6YM)6KYrOB%49pb@`2GBIPlT`ayEm{vZPj zf+omyxB$mhmuwEamVHfeM3~3XT~qyqvF5QS@f*jcrVjj{PCsv{K8|oyMZAs3pXBYr zKQy;P86qTmCqkGW0{Rbi{inZ+JhpMm0*pdJzmx=Yco+6Q5{a%pO_F?&n48cc70~?x z#{dKOOll%~&Q|(c&hkO2BnxAvrg{mil`dYT&IImMBkVZAAHe~%xW{lk)CR!mRojjS zAJy8YjY|}}H}EFkB`PZzP5Sa;#{4yU+1L>7_x;}6dmP@JVP$=5{|a3G4`b+NAt>$h zxpVXy&=j?=hJbl%(C;|*UVp9<5>D^TUGPmn$1mcFVRlU>HSJ?Kta-7ks z$J5~#M4iQuh+F~%28D^AN&1r4@-R^7Gj#DYkLi$VM)0rgi$7K|h1nl}3&i)6gX~fV{{QIH^6T1qUbu70^ z{1C1+kIAETQ&%%Et!dH|Oz91zmlg~f``@~Ue8&pzy(df#C(PL}n*F{*zg&hAsSAEq zpzqjgIhN2ozAXW2a~fAE_8);%gF^w3n7I&Ycm)>HultIIHx-1mOWI5`l6*wI8fevZ zI7pmRcomP0QecC-Bx%ze+UKyMi~{?dPq!&3CSO}Ht-iWeBiHjOk?RU^QUy_smu|zL zcdnW*TKIvGB@oA=sC3R99$cF2MG;@UicY%fyb*7AcE}AefG~9M8KFl3V@^L)E{2Q3 zO=zc0Qn*FU+qt*Gi*^F(xDc8t_6!EeGRMb;0Q+eEHYb|1&*XInb9K#wp&|!KUhW#zEL$w&)YY1*FM$fP z_69HiRqrO`1cBVR@}CS#+dSqJ#2ozm4`k5V3;3dvl=r$w0+Z{h3Grula^2%a?0~x+Ra+muyS20kDAXlA?kkK*8wqL z5lTJo-XO<~Z3{mfm|j_36^OlZl+e3TlZwsHQTkl0h_Ji|{ylSJVEApjmg$({Yxwir zt)pA;Pee5u&dOu>KtUh-*&pzleljlDwwqpk*{GAR!9qBzK9z^}T~{_LSdojIe6B$$ zpnWXhsbMZthu|uD1NZ3lI{%z$U&*-c+dGZ2q6#&U-_!T{$K|T-;a!{#(%t^Md*Z+D z3fGyd+Xw}0tO8?&5xe#G9ZHohUlhN8!7SJDSv5*<$NJ@QOg>BHuvgd}`E_s1pkS|n z9=I;bFr{PSrV`S1lT{k~%xHOnN_=FgJ@-N$%NQ zUnq;*&4O-pWVYaatcn6u(r9AfKhKR~-iE|Dm18{UdvR=2Lgp*)G8lifG-YfHJZku= z#91Om@W}3&9<@Xg>|elg?$v*wg?jAE<1QewF3i;XN?*k6II&d4+r!wa&Ba;Y&r?Tr z9h8um!tROo+dLnfhy?bNx9i+Urvi5R&yc#$`_7_dJ8R8fOO|dXq!KI^02FC%%T`%u zWbw`%UrCgxos_!+SG)7?%qQ7x@!!U)eeg%~b(4;S*Ikz|ElXF8qqyicK_uB0_$&NT z4&Dr0%t9#X@w4H^w_KN6RNMtAv#kxj58b2&p7CD{Uc~?#8q6VwDb<-4Lt$M1<$2Bd zq?OA;!ab-*q9$j_5L0Z3B^;}@Cuay)*~lTzxxpR6NHTQzKuZ}L!@~1pXW4ht1w!M1 zWcMiC`Bn>akB$sz>_#L*(*pqBcrl6$Q9d>v`cnfE*2c(j6R_k`vx=`(UVhz_&))<-@w3y$ z8O>96i>*PoB6jQhL)*e6YbFtCBN;{$qb7&>(lKn$qPnXS77wA-GF0Jp5VFIB0~}jb z<4oD6lImtdtfe9?Aqy{~6v*llI`JRqWb4iieYaVci;PFtnZas%1$X)>+7_gms+2y< znzjX9NaS#QEKYc4$hB|??T=qM-<$}B-?!a{5UmexuR#sMqy_%A8ozdXSwmiH&StVP zf=?tWH@ySOb32|GkcLoiyA!InA3edYn_ghomn;QYj`-Q?V ziYFQ)wngIT1lKNJIjn}7bz3?uP|}YzH@mXDt8_C?r&5%b0%aV)ToYFfFS=3g2!Ki> z0jojuvx&~yCTjL-T7KlVbo`J-ph^>}Swdt4GOpR*`>4GHsa3(y68D?qWl)h|%>61K ze$t(weP^$u*>bKKPN=~;Y_k2#Mb&n-gmQJ0FQ&H#GZ04jk#9)dt2JK#;C5wqM~~PQZN>kh*LSOTof7M2A{C^==P;3X>3^{euMuXBahQHYQvCY+y5)CyLND?yJ zk2RP?vRB{Qr@37H&vuJi1t`;>nU0F8GF z=w$+uCRyRnkGDL(c-~JrT{VWUO`ANmhS|bJ@y<9QZQ5ZuVGAXDm-%+7+q{*Qo9K4t z!#s>V1;hsD?)P2ig(;R0R{dGaA~EZ2clf+m@vNHSdo%+z=0*YeGy_rw7Bc`LG)L$T zwb@D*{|t;`jn|Io><1}**wXtKV%N;+A5K*2&MHyXpj(W|u!eS#2U!nT@o!X)XokHgzDR)TjlzkxJ%T6(!} z=*bXZa(4q+_MZbY9i=6IGEW-M>aP}`&)zP7_HuD0(ybVHrddZZuuB zf?7N5t{(EuD{^R4|Gju*@XdH-_>QGH$vVm7upGdY;emGfvLooWm|~a)^JV9EW*hcA z@T9n<)Py)~<3+U8q+G=52_=}l?-OppPmRG$Fh*RE)K;(CHa~T%mNTjD?h?=^ZSS8E z_K@h{w_yo!hA{k4HlzXiyEr6Erm}qzYf_n$arOIs!Wa5CgN(kc9?V8F1o>!2ewqsF z0+PR{rE*z~B=(NeFO2M3^XkpCe?3vw;NSjbeJYIvC<@~`3*EwZLw-Q~y9E;-5~8SK z**`KG8at(8aUh`QwN~Vn+Qu&sBK-=Q0MRX?kj!%#f%Ez4)zjK7gf# z%bE}9LCfImAGuLIv>WlRRiGev+pM{eO0u%6i?E6yTbIIW*sLzgc4A(M6`wLJcWaKi21foWsUn+z?B8i`Vc2Og-XD1}e%i)+C zA}9HbFtnM#{57;6MK^cW&?z`3K+#$BM~y(>)iqu`;8`1h8)m_}s%7tZ z>R>ayBG@T0^UBGe;vS5%5KQAH^vajXHE@Lpo+ncqy|;B|<#1O%M>7l$8)l0uQzlt= z*LKI<0)}9iv>XQ|n0gChoC8dx*NUT7m zua$X!wIedLLT~~vuM`|z?Oh3{9E9pW6BE2I(5%w^{Oa?iWa55GgQ}J|!6JQKvXG9c znaK!*@>H2fqqxhJw|;*1L-f;MgN-rj9UP`A#cwMkRje(&(!C7zF3Y2yvm zoP5$gy@(_jOa@?9eU1%uRb~&;fAy?@NH-ar7r>Ml$}&mgBBuC_Y$VO<OoU)XQl%j0Gk|FF_hYUVu3e7`(v_^wICx%p{m|un#kV+(ic>=hay7I-u zZ^odYpmJImt{r_46#b#e^mQzQ0@ON4uJ|p@phZe|I4zKVkotdsfQ0A;=)%Ei7nABW zLvP!Rshs#bgf znHg`7b$KQ9!8ZztLd(GabtP4DjR#qgwKeTpV|!hkAfU%I=y-g*)@{I7iL-ix4+!Vs zz5r{chH<+Quu>2s11wz~tTgAtc>&DO*(D~`seDXSgiWh{Kh0NDdWCr z%({;^n<@Pw|ANLwcGK%xZ56zM0;;tmfx(>$fhBz>;=o?})VQ}zyhwN@W<5_{8dO&O#yg=}9*+U=p}3BJ()?PR;^G1{jmy!3q&Md~fglfxY~uvGuz;$7 zZDo6GiXNbN8N2uA+ZwF_oA`e!#M?S5792Ec_Uuw#L(=8tBX_(tq|bsE?<#aYD&aj- z#%TKjo-O+mUl&gP<(p8V0o-1O#pHE3VSr;Z3EnTXEhpLG4;D>T=5P{J+t#Lmq7-BO z584r~V+v7CeFjmrlc-8nu~n2t+zu*NKp74{(zF!d>dCYU)39Bb=ix+k7;ZB?w@jEO z(tONH4;lO(b4Lb`%s(EmA?Sb!r4EKfZ8F8>u&3}Yxzsz9z60cs>3{}UeWe*fUW`Cu z#E3Bp?DLn8dEbp0Hn)ZEqnoU@@~8dzLC{HidVYEOJfp`(skgQ4W>cKeqo zZ~&dWhl_iJa#2!S1-C89Q^1L361WiuNw$C0pA>|yosy^o71Pj~f{6|Um)?`tOSjik zW@F&o+24heBrURr{!1iNYijq@U>UOW$uJXcKyxbo(R{jpO(jrxdR(FQC-V^X-tCm{ zy8R?cOe^Z_yvp`-akK&_t0zmZ0ou9AIMf)-y~9L-z_e_FYeqDxB{3N^OsD%eub5#U zR}$*a+$mydx;s6PJQa4^$VP+dt|yg@=1#Z%g6stwzBy5z)Tnzz;omlx;eP*$r*a?8 z8kvJ+Mzq5$w{d!jNV}tuAtfU1xj#)8d!3{fhvB_^0Ov5$T){hPX7C{>CgJk*7F?6- z!dDX#w9v$TEXmQN}#0QVmxc)#*Srl7&d4v>|?p$#h6>JG6@{EBWue}!4K02ZADuTmh% z)rT#=fj2ELk_D+Rrrc$S-wLVz6;9()OahMd>T9dj3!n#vDy-L-IsURlb-Azkj7AiX zj?BVHSkzEy8)NBgkrr^3_ZSR zBED=>C=rbPs|?fO(Jy~)rMkb~*DM_g96A(!cvL}Wfc6Y6d{GU+epgU@$NR!a&%V7$ z)!%3hd zO8zoG2fo*-Xw)E_F=fwr&>RQkj6+49i26lkmo1p*dA-Nqp3vl9+D7o6yH|9*tH_kb z_H@!`u+=*3%?M}tGQxZfT!e1hnujyV)Y z22x^=NBUG~Rvy>AtD(!jjbKsyWb{vqtDIYkxE%02eZ9LxGrs$IC~ffPS?p9ekz$Mv zNg-S3^h0}v`<`(}b{65!ro_!lnc{AMtV7C@(*T;nRov_fHEepwin%d9P!~K@P4Hb{*Y5FBggq~gJla!cg#{&&sJ*YJBp+(WqY-s0koXf zKK{FjXWr;XGN7-(96>k46@tS^VAVL%zmHXxLs;9ZQ`#;MLnu|i)|ai-hWqt4di?G=jkQj*7O;s?cLp>w;S0|Z05sSIn{T?M4~Jn(+oBp!C$GZUYGg9SLwGF zPhA~0Tk1MQSV*guPPk%r;O%XbSR7Mc>B=N)fs7~iV-y#XpZ_wv^>w%VSxNHcGIwdS zb2g>IY7w0i03Fb_69+|LFr3^!3XNUyF%EHe8N1Q&qA$XQZh5d}G4nsJ>~;7$oOf3z zF-;XmJ>nn(=DRqX`}0hOq$L+boZ>|09?xV?+n2QaS@xBBUIAlh?!))HokB5DQ=xz- zFj9;$Yb!|qr+)s6!ucCNg8B^=>RxO7mN=DzaruUj07!vo@~WZ4Ha~^GQ{OjvuWc8T zah2$OD>w3HD9K9Uhn{cYBV+i)xBxG68Ys=DfDT|x>9aW-eDc%b6Sj-MUMkQC8n6LU$jxxh zRCq4q&d_bwB@k=|>+aRWGDBPpZa6s6`6|Ent!bDA)#a6T1efCOcztVSwr9me-i6M9 z8<_^eQu0|kB=b(&{GtgGZh7(66-{lEc}@e~%69%Z;F%oq*8)&K@Dm=|`*@^~#gsW3 z-sM)$h3L19PrzChs*TkMN~gG&dDH3~!cy-pPOJWhTd18QWuiw0G(~FX_}gUm%aKw= zyhMcDDY&z`1MsP>BiY?u0Z-=jHFa|?EH!gg+vu>0LOivV9KFYH0(4st<1IZv9MUtr z1in3j$MoJ8pxtT`ZXlH{654)JS5k|kEdc&56|=CFBIx(qqX-E|n+#gc+AA+Z?XTOP zTGE#=05@nYT^X_Bq)+X&-qRaq>&X=1%^QBvx~J^ycd(rp6ANF;DTz%0i}gW!vi6vy zPL92ICA!q3#yhK+Jb332KcB*?QffkZfLP05kP zHw?uDczRfV*=TS(3)q=X2^?JeN;>`!{6Xm&qtKH8yedqnuN+3)@dni+=Yiw{OMv z#bQ5&DKP*{5){DeWPr$!ZyH4Cx!?lNRW@!^_nQhXcn*w~=4@}b;wV!1DG*F2Sn-EE#_G{=HW z(QJZX%ZN!o&o5_HDiY7usm3Zeb|!!xr%Gu2c86~NBC<-d3kmMaOw;Vn%DX$Anqn9g zN)(kC0=E=)qNq^z-$NI=n>~@zEgc9qSU#ueKxqMAqiJlD>0%sN;17fMuL2|(I%=;7 zGN~bh)5K0Wka&^t3o=Cfxw*XlKX#{|m^Ms)=m&}L#fAT>!HHt+xh#fiCJmyhaKJz4svX2@%LVZSndCAxiDAy#GVHJ(po z+W?&mEgAA{x)YoI!LyYX@C+0Me0NIv#`q^DJ07;mAy+{LeYlk2^)yWW)&gM+iVL_6x|&I(Ze8G8@3`47MY#h(_rVm}jl1R!*W-OUl>TUec4U$zyD zS<}^ZnCIM#@&?W@iB;}mxt`U!{rE@_h|nazeE6|ZF-0i6GOKxGPKNfwf*z1q$t5wj zGmbi@gJED@*kX8p{A|OFNX6vqJM;S5mY*7P@1ClUfS;mY?qXfhzbwmzr*WV2FKOO# zFLVhW*CA4db(BgFY9CrNXZP`gN_RWjTfa8Akv0&bR0|;!6Y^qMGP>!Hky}hrOoSa2 zTk@L_LjNGmhQH<>(!ZJ&lQ(niiV28vhB4VLom|=AOgNbaM$W<^f7+G~+biyxSMAf> zU4aZlPWqQhCE3VAsSC@j(pO#cPd~a{H0h8yPL_C1eVWAnJmN{t=6&NOz*c^?dS{;1 z+#lI!*{|R8+$Z%%i_pG&`o9f%n%m$$rKZ1I4T-CkhiLdlXXd|A`=+#alwz*r?yB+%ge_|J#B^lzx5UiEI zv`b0SHK@k%CuCbZQ$&Hzz0g-objB!+d%}j!EBVQ3PKFBI4Za&bJyy0Ao-K3pqjl>Dx^*qdj`=@3z8wv4Nd#oTds(zEA2xj4TuNfvJBQOiZ{d z&ZqwpEz#XxFP!?&@kfJcKeaCm75cc@7dgTQ>dfBFqE3}Osff7FSK^-4N2quz0dK#3 z3|Er3B>o=_!4l29e3yf2rI%rWJC|ZwnkhnF=HrUj8_=YH?53wY<_WpaT83sQyK%OW z7KU-dW7UR)MxBphYkaX!K{0O@AE*@wf+75a!J}`mtPm*!v`SIJXhJfn(_B?`Uh&GP zkB`u^_a5)Xgg)BlOX01R--+Jd8C!u4`M((K>zC3W5YqEl)$v{)`CQg%P2cf+_ghl=@{l9 zf>8ZOwMWCyo(Ii%mR3Y0Oaq^nOfOlYWL~&1mgoGdM+7aQfjBFh#_1MylZqK>7!}_c zzwr1;p6I6Pv=VEwj}l4zwhcQ z*`ha&nsC@Y`=ORCt!oc*9fiOUNKD_T+=a_Kj@82|y-M&zo4Ya{`FL5Ld}c&aP;xW!N&LvN>VNrCa%o^QBgInzU#t)73L*;lwA`1P zMtUqh-|A`jOu=B)$QAQBG+|rDICqC7m~Oww#ft7qR?G#OwHXMaNE$!XPa8lVc=IZ| zs}DL+bN>NqHokTV#B~^Na-0QA@6fOXRI{<9&1pNx+~t?j*|vAN2^jLIY^3?|Z&Af{ zNR=a`1|(y?-XLZf)Kv4%Tz8TcGHh~WXygmH7NvV%bu0Y`aI{=G7zpK=uMTE_y+efn z`{6v_%#PuZjDR5Kk77L=q15gr4qLgR$_zOBJSnVo9dlCugPZPp8m}$xW&hZ%CmUtm z%~uZ$W!m%#eocZttE#Pq5m*j%S}M3??Rt8+0(l`O2#Sl4;-)a>Wv(DKhYP-fy85{N zmKL9Q>O!4ar|xgn^m{*Ov5MeDS5siCiv;1?k0+O;>IV)Cqe*Ce(iuzsI9HPem)CYk?w1 z9){Fgf}bzdz4X%B)UMoqi`Nm-5A5p%x9OU8U&`kGY1E zU+(_3()4icIeHL}e#4pKWif6NoKo-{`DNnyzTV}qjjq}IR)z08CnNiYVfcaKBQAkp z)e{y9Js4&JaX-rW^B|i0DWOeZ`yVUjash<>_FSK6Wl3 z_u6pJPG869;)W^AKXPE50al#v<3{F~8K{s}BkvI+X*$?)O}EP>7$RhnDV7#5!y=B&i+wG>Q+2hBacA8MbEh{}^427oRzeY`>vCN|J;bdhGC=T8s`+H#4xfxomxWNU=&Y< z8gj@-z0*PIMpUsq8>KPr;MaUFJAm|(+5#Dvi|M3vzPwn0BB#;7hfhoh4)yAC=MH@M zx?W@V;x`jSIDvv6=g0D9<#I|YE~N{c^Th~_i7yr&k2>h%!@FvvA|D6`_R9EF`10K< zB9VzN=qqqvP0x;(megZE4>CLgV5zvQ8p-hZ!1*P=uQ?@VNpso5%6$NvV#ydQhE|3J_iPIad=BV*DQ(nKeyArFvxx+HXl4tPdBKA%aon zFDI9{<1W82+SbC|;7N)~5+B-UChLI|p~tI>!^5)l)n_23>Z;+G4(d8g-VgMYnixFYE?*v+jK zXWgL@85{(KM2`Q0waA8|9aKFwCu{fo9j7A*C9;$~h*n7S#( z5^mH47fbL{;<6BS<~@xMS6wM&`f>66KCvLS2P0s5Rr!Z?;C`{icMr?(edB6&CS&NJ zW{&2=U*|5RQzL?IqR8K+TRV17u3|;{-Sa%&6)OBUG&@brQEDnYl@xkJI!;qEZ0L?i zSl<&br12z0@TetXh?I8QKw@|toFTrK!lXMG`Vpy)_`8jUPnaYoRjfW`sgRyG&b{*5 zVFS9_pSq5lqac^+#gaXhX))Wa+NNOpv{lB*m(w|R@W z_$aeBJ!)&k9_*jv-rS1h`MsPtza#mRuUaxvODq_ao->>?d*vG9hlgkgvjCA3*Zn14vw0^f65hkzC_O~D??$4Lsgm4PXG6QM2 z_96N&t=Z=3w=QC3*CJlhe*JexSNrFyj}^47{TkyDk?e5pD(ZQGCnq%2wZF7 zkT+hFrMr&6@7*rcJUD*;cKcankcm$y?Z6q|Q=91dcJx6<++~xgO-VT1!|a=b`-?%$ zJ@dj}rD`c}Fw&=`KR>HeO&vLENc8U6xhhWEx4#BhS4mQV1Tbg3i(N39qNaj&wO%W{ z@HDE$xfC8{BKRu`cnI?S>^C}HeqP4)>$1=I^fv~yijJU1& z(k33MQ=B~R+_>$1IN+L`?DYe2$3eh=^90cc>tf)P)`}>}JZNVT#{e4yzkZ3U|E!V^ogtMLz zmKR7K<`Ejv@o@IN;q#hU$rVJQhkE!Iq`yP4yGiLZl&M17k2-M9-BER5USvh;nT_x2 z4niT+{)Gfn>E8mKiUyKOWxzReh`JYTN*qfw8&8=3u?+n1rRQDSC#G0ct0GTPf@KneGg~?$sd-li{S8br1l9vB9i5E9RpfzL)`JQGi{@ zLP1Saw4nX-Gz~vSGz|O3K)0AOqdQYJ6-k!Iy{~_PoJB5~)tDAN>e=Mum)>_3SDPPs zj4~d46SgHSJ66zY@6`5Xep5#CI4n^e%`L9VHW2e1q9~3jvLP`jttp}Fk~{Incp$iC z1wSBYq3yOcye0Ne1$f?Q(Fc9IDCuU<%EfpKlIu|jV*&v zq^w-5m^|RJlGO(H*Aq#LJJ9%GgUVgeZGmYDYk#%=22oE>&AHD_9;+P9`W*qdCVXc(@_;=7M*i%kU4{eX%6djCIl7Ap0uE_T1To zgn4VF*4SzJLEbR{nV*(|3b6_s7n36-*jBfiwb6F5()QaM#&Enf)v008*Sv!b#@2W~ z$6RGN@djgllOG~IcMWwGVe_s7&z*+@%M*2TYIOJ67YaVBLfD0S2T~O`C!_^b-;&VZ zlxKfqXyJZx_$FnhXbZ$Jf=GLE5KpKblz;gyv6h~w-z*o7^HxBAZ(~XMuHb!gIYzJU zRLDzGe`_LaG(o^)1JmfO@UxYjphXrjVJDBTTr#cJ<(W@3;H5N zuq6>|@5`iun{+R$U?P!R{N{w#mOZVPn3IHT%#nX$NSD#qKzH9cu@H5mss(mjyBkxFqh z+~1uTXc@Q8KxcM&z4qxtcvQESr;`7VJ8560A3@N42`Ii{FTWEGnlaz0xh8I(oeOc{ON@JM@1@o4O_nqu>lmL3)0t)5x*BvD2#^)W5Z@^#rd75k= zPdBdL!@~Eqij>~-d91RtT?Q_`r4MRyqoyPRMJk%TLqzu-+F|GNxR-6*)o%|Zq~!Sd zOrFVs!7H@|Ym=yZEK+6VSy#8f&cqJQ*JRQnHP*`IzoP;F0ZeXW!Z7<~<%LTX5;yTd2Vm-{D@oTqC+?%oKZO$^ErVm12tv?imqSR0K8G z^drMdFXH%{#Zy(dK0Sl72o*v4A=2S<<}pU_g~ENd9FVWCX#ZG!lakA19oxNMEv8p) zJ(iXVp1`%=X)ih(TECfVuucr`@M8qRf*r>RRP{EDGTG|!N5&YX*Q|Z<1Xb%U>E^HhShW$eGYNRYT^2&T{-G|^)scO@;k=k&yK0O-dq!Ou|XJcnw-|7e;q?7;H(mSx?&# zQbc2sv(6o6DLb<-`>A{7Vh~JFTo20{^lO14*<9=yGSH%ClCByO08lDU-h)`Xp+a;I z$1^=C=C;2lj2OuP=kNFJ;zQ?WDa(p1qA%?e1xfWXSke>>6`zMJXa((HccRAf)WE9k^k8tWj7uiNB<)c2zY;U*#*AMVq~m z9vP7}9n7cSf>)dv+ZBqnJqli*A|Fuf6Sg3`*xu5=LtURy+{o!;extwM5aJ_46VYae zmp4(@{BB#-W#%Yej1rb4EA!!w$K(7!Txv?#EZN3o+Q3(G%VbK|q^R*A8Gk47Es zFYSqf(R3LqgCX=|)Pc}liBE3X?kGX+=(L2p-?yO^HXIF4^5A7eade1g=B?kpiT-!lS6N_}&3-f9NcmpCj(SB1I!Ir5#e zuvi2-TX!D z?NtMFS$mz!5@ctm6cdz~IazAXbh%p~%r0!@N(J8A2;(0LaJdE(OPMY&9@}uclqug^ zu5-|R{Qj-;R!EG_-{eV>AX%I_9u+uQmuO%d>+kF=VTX_vegN0mE#)&YDH!yt9zC7R zBYU69!`xPw0ORFoAcGpgoPYk<*6F#AX@w52b8*DGbRLhX!fAKy@ALt|(FB_xQ4&nC z`J(AV<8Ydrgw$;oU;eK3syg{ME~`;}XVdyav*JVIkRzs@lGAZm_icD%0*Wk%U0q}9 zb^Ne?qZh-!x>^Fm$21nJJ6nSu-i9)3r~6C91&nF1yB;0cQHnz9^EQsH3D+5Ex(Pea zEf-qZrLkGh)IXCkxS07SWLf8rc087AWfGE~8AAdeTKSqn+94Cjxg=P2hGYVXPVju2 zsfHzYXs71vbT}BM!;AMHK+T10r%wi4YP`}hTDAPnHsZdY{@p?i%V)M_LjruoU4hp! z822l;N$51c7hG;1cF=dw27^SgH@z`)GlJ@N*gv9dV{I?BFrtidN%39T!@KitIDF_y zv7L@T!wA*oDd96QISC7#_x#6eM0tZAp>~FNHOa-xqRnAKE1@q@GaD2#&n(O}VJEt3 zPEL2xYUA8Lbb_4HaYxoG1^-0owNZqWTzC(k$$mxW)xnX07yS(K+mt2P6;`j){+g~W zh9vDe@D&@_D!=~F(|aqkb_oRhsiC*uWLiqNC4^fJ{9KWx3N~y4zlSCCWW6zsAz44B zyVoL*uuj3`nT(L53)~I;hw$3U3h!XwJVY@aN>U^>#}t04TG6~TP-aVhD<7xdA5e7Z zmo;jQ=0_cLF@G|XHEFQHGrLK@E22zm)?3TlP;a=HdDM8>YflcFGDFq`alS@5n48|n z=3Oic-L@c79^bB+If$h$ zl(l*8jB8e&fMcuhQ{v8KI)+~b*XS7Z!mSVTH6IHhv85c$viVX(_~gaf-nZRR(3MKk z&+7Z5Huu=Q3uN(<-meiO`q=-z_F@pf2knroN?jT%%)=}I%`qcVKk>XfUKD~zO%Fsq$lUfTylDNIN=I5*=WxAfMN5dByq3nnKT0jWIkZ|KEenBY$X*B>G>)%?chf^r)5u?5`p)6wUGW{My>J zF8I7%<+BZ*XQ0Fswgm8Y$yzX!w%1sMy87ps=8gTH@X;rlbFRsz^$T?25qM=f`d!if za${TvQ)aG#@+f_d{X`pX`LD6NFC``ug(*w$YQ{OMFM4%3k9&LqTXgq;wbuEnTdF^J zrP<3eg{hI~6h8ol`X5mEYnHZH)GPz^uag*Dp6&Zsz$;@>?tx>)zvIsc{d;9)zahY1 zhpnZf{DVYMR3S_d`@8&gx1Jxl=&|lQi@&@NB1fypIT|UgXKM5W1eZ=h<}knbU;&5X zV@8%)H{lZ(YfK+2-U)SDvp&MvznPfKjGaS@fr%(6YOK2k5{+V5IkF^1u@63#Y$G<# z1dIa`c#|S4(#bgAjSK&L*R}0iCXVM$+jWA7fz9=XQ3P}PpeMIiZ z+}TFKf9%SS)jZHg$Zbx7bo#d7j+u;v?wacv{{gNupqQO~6Nz$M>D}tXbuvF@HIiK50DVb=xw<$kyMq9UFW+3FXXSjSq1=ev|krwLieU zzcr2dSt)PYZsZlfcOD)_Y)f6)MwWy2jo<0asvX#mKiq!m;7zwD7nm!;_2dXP{3n}n z^F)-`?I?@jFsQOCHA7c5bEzbq7t+P{-RhMz*bI+yJ-vF8Eg?-K$lAGnbhIukwaA7pyR>Be+A!Y)2JCjEQ;cW%MVu}{{2qlbhS=X zs%=hwN0jwU-26X)HyN<_^t$H+d0!Dje+QZ6U(&sW!P&!I8_*j~q)E$9eOMiHV||BF z{*HE|7pe^RdBY4^-D6&?Fw)EQrYj}ZIMgG)Pkis{X|40HiuH-fQghm7auaD`Do=)5 zV+ul3(6f$1kCV!~Rqs=7Bc3X{s8Q1kmY<-X(skeju_xTEX6 zKQL>J5)DVX8)MFHyteQ&5q_}1^`HpM)HrCg=mL%$)Xy!!$Sz#HBWiMdC`Cg+yt0k z`tMMjg3+7ndbB&TaVmAo+#|VrGvVz=7l}*(9~ZK;!Lwo(qq^FA8()t8T{>KXZhn8@ z)?I=OtA8{RzQ2(APrdo-0ObO+O~RtZE$xuSFU#HNcA8!HqI@5i0QeUf2A5oQTo?Ib zurHjVV`JTQg$n-1jICtNeg4lJ{#@PW%#l>8ho6<7&i6Jo#s&vMCV3&lMe zYH4mI{aNoxFAenQEkW7M`Fz{ozPUvqjT4xvT31)Y9IYdVQ(NJ|H>)9eQ&w@`+ejn! zQpz!OweZ|?+Tvh`|BM%XG&ydc-D!uH^yoXBery(8_4irYYa@vAbdGyI{(W^_?afC+ zk}RIu5GHRSMGr*KB?v@(Og~bRL zrRYU5Inl{BYH;2vQr*5-;ohVwzJQ3TJ zVFBvwh0wC8>`HswKNf+F|61C)iXcW~C5lF)It6a97c@iiL56u`UD`9FE%Pjyh$FaLvS^}MUeX`1W2eo2!-ZApsuFkIc@)JnFn6&j zL%@YrrSBa!hi$g3uUEsR#=53z4ZE!KxtzD1kl!kXbzYn z1&l2n24PxQc)3*^t)BNT@ApYAn_-9A^wN1v1<`h~C2!h74m2V@^D*IdV(djr{MIH2 zJE3z8Bo?xiqRf$YRlX5u#4Yp*^2Ooqxqj7AF(5V9+CRBmTw)Gc{3rnY8hBC-H?YmgFa!H|03`!W%YX} zwO1SOIvhtLY2~i6U&(dOjB0!gcCmpO`n`(&{ey&9D9-R6>Jd;XXHV^5n(KXG=31HP zBBz^FO{i2_?HiQM_|80mNyp1F5%KMNt==rka{lR}FSkhqmcga@r~AG0#tGDmjGnc8 z_xi->lW5EMh{n!*{tXYE1fQS+P?J2RNqWF<$L$I-b+4m3eD3ee!|6aSTv=QA3~5rFiY#x>Ar-Fpzx*& zf%3c6#7?xjuI7#s1t{m2!G=1Fpu zR`R``%$HY+zV8JxX~!RZ4Jh(;S1@)|Au}CHdY)(<`z2ynLUxVf2yLipHrG&gF_Hw? z?s`A>g@|^ykoHj;k?n#q7}NvB`c9P70@(3J28~LAnCWIqg=#NZ{p@5fIBu6Oi5TA7 z&J*YVm<&X{3`mJTd)@gD7_mfah~C!~rO5Pk+2D!~8K%pm2%24oWHf_j0|+=9{n-;2 zx?@1HRVPq&!aT<-EPxPGh8|eQX$d3-`*I*);WTF@k4^k4gH1`?E8pwP;;O_!(l^z) zDaWP57tR-IG|w?w{h>e3N+k#+Q+0F7Qxb`eD>e)sQPiCWut$ZyJQ5{UDmy9XtG>OW z=}l5Oo68M?7COn0!cdKNxA-d@=78lNT6jA%w^h8Brn3!nVjcfnw;SW+q>9{s?bJ-` zTE0f0ZBvQg19JQ#W{BlSM_-vWV~9}o7?tfg+ai=0rYE>#pt)zWRG_A@Sd>=Bc#%)xjAb&Z?#!y zyg6uOB@QTi5Pw*mT7V}+V*}lj7>0^Ggb+vvQWnK!31{iTD~F2klJ(#G-Ah;^SJu7U zDGAhfNi18O6%XE1MkKS?dxkYFs~s>x&i3-9yyCwL$7W~IU4gY9bX#sX zlyH^|#{{V)rZg=aIYHS6%B)Fon(M5ENr{rgOIW5e@%ieL>_2wNW66*%BFJ&C+_%Tx@mk;@b1x^OcQfW)m2IW#_A~JwokojV2Yekx9(L@7M z;^ZsfXky-HwlX1@rszr3T&?8({{UV=p}u*q%?ohSLPMTwqwuGTyhGr9OYPntocT`5 zZY-o@O3pr9a?YLW?I8!*6Y;$9?oeG zO7kxOd{pq?k6>u*WH#E2ITKjUK$&sDWgo{MqYe+&y)^x6^Gf)7l&0y_R%rdF4T`0R zrmE4BY4!|p#eDhuNql_K{{Ux?hm!ciQ?;J-;qMw~w;mkvR-0?9X?I`=?Jpu(p_6G0xkdzJFa#WMMr%wI=)$}u zh>tWA)opYvT{_dFDA1QI?W20?e=*+}{yqFi_~WnLYhE(dCHpMpJe?{DWEy?4$DM*W zOBE$@eqsv}NhZ8W%y6N38C6tdvk(9tq@FAE{{Z%f{{Vt@_@~1?Pq}g*KI)xq0c6c&!_y6NlJrO(CV!dbQ}8!C98oSVA0 zm-%!*4m?-jKOJ}$JvMI{XcyWvTErh|v3tmjYdlAIEnm3?oj z>30&^URp}CL2(0^<%&c(Rz_2mV0i>|uj<>!zYV+-@jt`zcxT62MYgf0xn@}I5HX5E zr38^T-ik#GcDfMDo(QkT&xk+pO^+XGUl26SBjNU^4gUaz^;um8p?$mU8h@7?$lq#M zqu9#9Hpc6;G>j01$R`|!#GD2;5~rBjoWA(A{(fD51E)6Nd}bC(rVHO*S~Pm!?0fri zYyKeDbl-&E71HZeyS*YEH&=owE^efei!*((RFknr0FhVa{w_Hmu1ZN9@n1B20{xi& z9ee`O;EO`FivIv!F_pa6Cxu1a<+)&|lN@>bs0qO8O?@l7Z&Uf#<2*@bZbg8UG$_Ptd1CFmw{h}-?GQ;=i@&HU)X#; z_@5pB0E+Z`tB)$$_C5Dny|YV&NYz=haS(os{;@+HrGQkgZT+#oZcTr|_qwmbFNU^S zhKa01!Yi$A@*{03#70T8ibW1X@~dz_1ZAS!Hb{Kh{0*7Y%vN2?cJ_TO-@nS&I_dF- z0$3-ElWN^9t4{@hQa~oa6qyv_OF>o+JAog^9BJD2p>5(R9LV~lS28W*xxgi+1Ubk69Fvj3&3@s2#~-$4 zrSZCT@DGi&Yl!?qX>QVgYFJymntbu6LpuWBW|e?Ex9;7P;gy+VK9>`Cd5p?vtVATI z1@Uxi<=gQ&GEOqX<~3a!k>qb~xBmcwbl`CB%jQ00_Bk-z2;olb#G~(jkHApoJ zhYOFk0Hl$X`|F0}lboFVh5J5#!8!a#KZs7V@#$l;(rjic2A69vl1ulA$lWZ8K1+50 zGKC@7ADAy9ziMfODLFl>#ldloYlgwR4LYxt?S0py=z3N7Q!TBESgF(Zmrjq>9RC1@ zd?VqH1?ceT9ud;5G<`bXK3%n(D-1xMrwRs1?oTJZL*ow__+P*t9=h>QjCCu0CrYvk zXS%s(b%F@Mau*a$11tcyORoe_wU@&s7Sw83)3_o{; zETe`n7nPwOKZ;NDZ2X#aGc5BL-cRmG`G4UDz)y+)030<@ruY|0d0xX*tu>;036rvm+blbD*QS41E!fYhz6;zOjymV-a{YSg18xqG6(MB2RH+g z42}&anF%bm){(4eU9q{yQOh<4N#xevwXW#i9Mfmkd_j9} zr&wC9`R=ZzSYe4rcXuIFeNBEN#awfY&R?*kxha1t-d?}uvHGS#!gzeTKH971so%G! z`JXC&(Lb@b!+(!9jj6{Jejo8neq+b0S;BzaGbzGdpaafL_+r@-4tB7~ufh+D-vquR zd=AxN(f%Q6O=j0mB3j)>M7DQEAf>#SMhjpt;gwHbI5qu7Xxi41;w>`UTF|c`zp%WG zWrF6~MV2XJag}9sC0G!02sO`m-{IGR{v>IC*q%Gk?6unL5y z_z$LA>Ux}Zz9I34nA1M9CAeR+7gC;GqOoHV1Y)JLx$;Ti?H{3WKN#@tFD|u6dr3)X zwrTmj<8Q~%_^gYDGn_U%_fq6bchRp-r_EosH|?$CF9qM+{4ekZmw%w$-Pp#{>eu$~ zZF3i%izaM{W=8@x+RxP_1a(^cZ`Ji}TVK>)Ro3tBbt~C$9^&q2o>s>LF66Pt*1xFV zjK2c@9{h9g-2NQ##+@9@(8&$1qa11W`x7BjUfNey*x6Mi5LK~;1aV)Ici-?#--$8& zMsE-4n$>~utCpTEUS@M~9k_4=Zvc!dTwwjxV1y0Zmpez9z+6v|;c(8CX!|OOYTmrx zJ^n|j#d#KIUlHvf+~D?0r_1#}KKvQre-->f@hz`}d>y9GscN^J#{?)_Y61gD=NL%W zBXgbxKqej#(cAS=;WmP9F$WetI0OGyHaz7gR&Mfg8 z6OXz`1AWrd^7!@^m#6P zS$wnW656%SpQAu?b$Q8E!NFMLEC?>#01g8YgYysVm-}ODUk9%4ei!&FNx6$%zOfR2 zZQUzJZ*G(2b7}?*vZ%@5xGZgs80Q~{-YD@Oh`eE}&*BdiYIl0RrE!lo>hflqSK2T# z%t_!3R-7Z@-Au|c#p7kmN$Qf`U*x|Z%&sKkIAEsmxK(mBrFRSY>;t`U{``RzzZ<|4oN5Zn!DlO4{M$&)vdG- z4cJ_2dX3}kg4WVDmRT6|3aSbGdJgy%`%U{W{{X>1G;ao!@ps1rwb%7mi4DGpRcF)e zi>_PF+zQ-Gr{&1~+TiWqkk$IeX`SVKJA+V)O5Y3?@A~T5`Q8UF$LDybH>-Qe?2n7T zX8!=#YsWts=3RTnt9Ri~3V|YtWQ@hAT*|DWL{Q9}L`LFdVc2a19FN@}hu;gl3-G%@ zgTsCW(e1RWNKsMD5rvXsx%uUAH%P>iNn&{>yZB*>-Xw+;SmTXCs;I)Mfq($)0IpwJ z@#lj4H>&Bn7Qe1(`gWtNOXO+xmp3uRHPoNoi5-JEpkDnSJ2mxi&}93DOuvkF}BT{}MZ>*#goSX^a%E2V*-EWfV359RjvQ`EJ4 z`;Ajqx|;U#>HNv(xRIrnM?7Vn$!0hJa(WC`Ti`#3-ZA)};yZr|co$2TRn%uIxgz0V zLcx5=U!q2&^2e~?6UBY?WB&jI%J`=zkIspx>pEm!7mg&C`)V-lB!J`?lGA?iDzM2d zAdhJb2|0{@w)`RZ8Sv-ej)=N1fb^Jc8KXqHyeb;*?UO$|(w~{mNyj)H2NnBfb>dC~ z2SF^l)00{yqF%q`epQxmMmrT<%NV(0o~`cv51Rh~W&Z%!f5CqTwCFYe03Tb~_?N}* z(a-105SH6ckra)Vp~pP2CPo<~0x^M)y*#x8Xy8+jQ;!^1;8~_+h|DX>zA9If`jtDjQ)B9t7!}H!<)p}c}sqx%vmgDNs=fg|) zYk2JaeAkiUzq2px$>V>7GTQi4$CA(DF9>UHlO0HX7(hxu!aT~T3w`WH%91jw&GGScm2iw4C8tGW8#m&KZhR}^aJ7l z037JHw}Loc8%wV(e$`{VV|}!Q6B{oCby7gw2;!qZ%=mVuZyM2{Uy>F{;Yp7J~8pPj68ed7V%e%b&JhkR$u_Tx|NjbEYL13HP@N#Z!T^Hwd8iz;a)>6lM?RuED%D@9H}kkjN|oJ z_ILf2J{|l7(Cr1at+uh^bXGSy^w!9?EY2{?5j%`g0l-ynQgTVJtm3X9;rZo|ry6Q^ zUhGxVj_UX6s{a6heC|P$=Mcxbu5LQ}&AX@NYwCSP9D+p$A9!SaE0*zx!|w!qLDN66 z{BNPyYnEen*sq{Tf^Np`rGY9=21=2fS6Xnm#c_H+#IJ|`A@LpWgfvePX_wmOoEY8d z*7tEqbpTQoi^Qy1c9H?e;G7!#I-j&s=c1mP-E{f%KUk^k`BgfW*G+n#jNczW;GLc| z@W+WX`Thx`>)Ix*bZyPG%z-D<8b)STl0rUvbxbLKTo5uFc1PWRvrp_v;2(n;Y&zeI zZLNGu;_1>kCb_wZWwx@BdD&==4lr?<2;4}>$3gckQq*-D9VWu#RMh8=*3!}^o_VEU zk;fchvokT^1p_44(_;9`HpO8U3Z+MGw%%T^`oB}>@VqCM;js@D2y5rE^gJWur|jAA z)8p2PYw)&i5MXs@F8ZG_0hEmG1Fguli+Eua!ayhTbtsDLcxABha;zXVv)~_t> zd?^&7R@UL0Gf8IhI-rUjoLuZcT^r<0!{%;9e&hIK#hw}Qu8{|bd?Tva>pEr5OgDBj zCBzbKI6GYl1cER}V_g9q>N0*U&2p&9jS1O3EcLVQliB|ORyE9ccQ4EwYSoR}>bm-# zFYssJKf}*~`T}U)4AY{D;p0TS)Lu0gccwhyrgZ~2$-o!`(!HE0pwg~u=W!T{xQed5 zc{xe$-siPL4^D+OPLi@iv5F|9Cl!gXw9?8t)E75)_Ez#;UC6OT5|?*!6;)JZl1~5u z-o6z0)B9J&YWIE__+aZam@!GL{pgUNz4md@$o}~|bDRU#y&ShI#AB+eIC$NjZfBce za1>p9Osu~X-u^HAd+@Kq+jg-MCZn&&fuPf(Z<#}Md5HUo`kuqM{K)u=@iWCA5+rmosF%IGx^FV#=2tHxx$0Q9N&M>; z{wd^&NRaGP&reGIr-%4E50|AW7r$_TAvYs25M3H1JF@iDD zu16XVl;8@&(e5RZ*A7{BF-@nZ2e0GSwb@aelPLonr)v8}1r?$35`50=#MG^=Zf0b| zz*PM6#sMs&wtH5tlMS2OW+UbWBq@Ga52>zXP3B)W*~`SChep~6?~r+}y^2HtjsYMY zNaGY(hOR+p4Xjgn5E9S0kYADs^d`H#BK}*gL+pwH@`elKmgDrUCu>|4F`$Tp_kqc; zPqBi|7zMYR95T&=@{BtibYq@#?OYh$Qt~|f98)`szm%0q<TE=8 z#4cS69DW@IUK-)IvALCyP2>UriAmhUpuo*^(tQqTJ0b;Cf<|la-YmFOTxaI)bJo7G z@GF(@wx1FyAyo9|iunV~yq+6rk~cqWU!6h7PLyJ)VXF-(Eb_9HcPbf_dQIvy)n$)|KgprowNC-D!2Ueq<_oC`?YT-%| z!Q<;+N2xi(epSZn{uH*pSGlnsT;n9D`>p;J>fy2W(xW&k;rkvs*#?{St9O={7IJx4 zF_dpYDw!D->cVPD=;3P4%!%7AfO=Bzk)%*C5CflF)in-61VXS*`aKMg{SLJBp`p?j#vv z7p+evviYkrg$xD{9Fb5=A>PahE6hqHn%CI3r zb}Rd*^sj@_{{V!lkG!AcO)A2Q ztbX%%^);5n#z{EOZgat@uC6}NK14eQAor>zmQjQ!Bd%-UIv=vZX{jtCX=CzsuOQ%O zo+(N`?Tx`B`Bg@Fp_E9h6aj_={{UKOmp2LXU=U7o+Ltk>DXhh1D{Ub-#&8ZY1v6)m zBX!A)w<-^%Q+2tLOu*>8V?FCdW{%nfOO^yLQax(ouG2G=ENBa{IV%dNk#WNu3dV02 zY0>J@XfjHTs5+Mjhbk0x$sOy$z99TkyRp8TMeyqza-$N=cyp1^dVOn#_+>5C*NtuE z4$V8qCLHoX$6EfW%ToHCEB^p2e!6US(++ zwm>9p6tel!clI^vzYebEp89)#D{7=;2R}Dn!o2dpSkJotIT}cibCto*^{$WM-;Cq% z-nDgmCZFcVa6l@<=1{ribJIEIxN}uGVWsSq+B&3jW5HreSC{Qq z>V&Kje8YEO&Dy^!sSI{1zpzn!-H+H|TQ0)kmL<({chma0>NCp?x-^n7MM08DzyiK^ z_<{RD=$;=*bpHSkFWc|AUEqNnXW!I{`HRON75rP{Mrf|Cn%>VmTflZM0m$4(E%?`! zcW{}WMUFVbe8q{zF=RrK^HHNaSDvRvxcx7wVq zad2min6b|}u6nt3TD4t8v5ic9D8<4|`m-&Z8kKq)l1--tMg>LExLxg`#E{dwJOPSA zsxVmKlYle*D0vw|h3dx@9I@_nIQf?*oJT7qVbT^D+A=FRTSpwJ%~_ip94xUB77Lu5vBz3iOE2!`V$Iz4s|=n{1QOrQ ztVUKv=*#TG6cR-7C*1%5d8o?uUezS%_$4{O;Aa&TxOZRP4Xutwb3hbsoW~O}A9RdT zpjJRndWuVVo_2XdDxNXBEA_`}S*KfUAq9(l-M`khi<3tr^m>j`QLw!#CWcvZB9O+% z_^5%I#^wZPps8S#I}zzktL{bJ^egNdRm2yz_bqLA0|$y|+iRnag-`PbgV2mu)SnLi zJwZ9Q(R^~t6!O294xob&Nt|QOAM(kG<3Duq(>Sk|-sLVXji;1ZqsCd+0|z|`>(Z;* zFWTBzfKmY}LBSQ{=GhJt9ZmaHY4mGz>iBnw@p)B6>|NmZi`e~3j##FRWr@|%fFYFR z5Pb;iPsp!?ejRxC#9t3v?Dx_e%@x>Y+21V)>6nT8ynhcvj@9;8hWu5b__I%4OGCL? zqAjt3`CT9Ttf!EFL)$g^7HN~#z|`fQtGd|!g2?mw8J!hUmXhlpsOa5G3O-#7^l%f5n5+hVR?45gi^&&ZexL*068S~1Fe1_{@0(j-^7he#agGs9|A?Fcsob5 z2HNUth-9+W?qx?IofwsSc$feb038#64op-@;Z-N?h1GHQu0s#?@HZ0S4sBVoQ}g4E^Ey^YH88r-A+z_;TCB z9tF~%(`>Dy5sivG*)U1nESTLRWS&@)U#;;ci8x#>K3uX&bst-475cv~y!_iE;B00h zT=?b51paQ{q4V$T+4~^;Jopo)TWYi0L#%kk?bR;zX@{Dq#fvi9EQ9w++jcjw9G=zb z9~%DvXHSKH9W=ML@xG4~nu^3@w$-2lEBkT-gxfF$nGu|@jrT{m?%SSg*>!ziOVBjS zooiaUips{$C!XTsW(gcoMx~wEj{quyNvz)sc*nxtInZr6f`$hvI!96 z1A~rL#&F2MGM*R%kzb0{&9e&FmzdP4CsO|a7iaoOqb9Er=B=Vn`*phGw+M|U4abBZ=oC{BQlWzhdtT=$4mX z6@DCQI$wru;fh%`4J%N%7S{7j(t@pKTlaAQ9D^9zfCUN%O8cAkko~Ou3HvkjJz6Myo#)gl>w)IOhJQXeo|N|>A|nS&y0T@Jah5S#Ye?HH@e>>ahNS_Vai)u zLFPNEB4-4R`%9mlg0A9lEA<}^@*L(^g_vQb?CE!U^LbmZuS4_xGR^YkN?2-eCG zyzlxSuHUjp>`mY=25DMf#D5!0r}(p9nPI)Mdue?83ldN=>?(Fn<8V8e6;c6QoHc%w zVR3*livDE20DjZo5x-}>aL=I5bu9|%9MQ@p*A{{Zmk;4g-x@ehr!u_^g&bnARcd37lt zD#spp2fMZvM)-tTDoU-4}D9P^5? zIh>vzish%ah>>y&~v@ytg0%Nlf9>o*fBNK^frJ^EIt$8mEUf zd(8u0wU)zC)8~@f=Gt+#NT!L3y0G>m85sw$rO|vtuXtBgxA6ytH4B|DQM8aeS2nV= zGOjv}$U(sz5zvkg6~7bkHWN69*C{m#^lNE(yKm}ak?}?|E1%WsUQz0v>(jCQb20ev@BT1q``;w;$TxCv1LV!(v ztR#sdtDy|60R$X^4{&Sw?>EV@nRRbri;cTJkI|p9WqGa}A4}Td-TSVq)RP#dmCI(7 zkOogm_}liI{jod^@FPo;Nz)~RT=8|nvfJs=NUoOb0Qs*96478VV22FDrySQd2QROV zuN*cSJkos*jBYlD5{s{hm7# zh^~st^2<`Sj`qT6?rr5=(aRHuM;@xn!5>QgRK6(uUh((FKNU~nza45)UEE69js<30 zTW}OEA%`U+`BVZx1+kNpUfKIQ{@0%oz5w1^>5$JRvEc@tUhhVo^RY-F&8&@+103$a z`51wb*1t@|cvXy!586|xrB|$adMDr1_-sFjSZU(DoZ-t)q`y{=-`|R#2tFVDWbo_w zYsdOrSN7mJf@}r3wUNdQkWGTow@l-PzzhiDzYhK+e#9Ol@W;m;8{uDvyh)?aZ{mNn zTxpWuU8FY=K?97(JZB8k`D__PYJ9*PpgR~3r@l1*0KrQ%r|{Hv9|P=X8pYf}rN7l7 zUoOeG$|N~qD`P(~Ax1hL-;#Qti@Z~>Ytne9S=DcEHHjw|Ci+{4Yk?}9t1Ow^r7$*a zBWME{#d`b;!@2ev6PG`;gp;yr`t)|Xw!eAj@n&0ii2NB#xY-tAF|Kw_pJD8 z>es-Z8!n{OG*Yo!S!)G%+*v9qRTJQEomlM!*zQruBxG0Z7LlvzTJE0?t)kxBYBpC3 zz8jlKQfOsdk(6{StTG75Jw<+P#W`MMg2lI)UCHasx_>{@q4wN8nP4$kb@dx0w_R8D z-1x8dw*9as{hwoh3+TE9-R8f4VfKqHLgEs-R4nXJZ+wF*E(zGZ@Cg~O!M_)H>&5>7 z5cS9LH;uK6&0|z&F1HsmGZ=R^{jjb=!hyL}j4F=8zp9@Tcvr)oG12ZcFBEB(x~`ve zjFQ{k#Ir_2sZ+xPgM-v^I~x2${i1(i{VU; zyAkjV@hofx=Gs93Gm`2n?mRxua*RA(3ee@M{?pd$ucniFD5pTKqWpwQ+yq8+~4DTP-J0NW}M6(y?U4!C&7_j9EtWy_b^5Xe55s zp)4{*e5Lz1{>nZA_ywgc-Kr+D;%k&owd(W6x!$?r-A+s?^AJLup5)in#jrDq_>L^b z<+!YTs^yU7hhEM1OQH4LIfTaH@R6sEpESC8FJ7QPt{cZc68r)1&c+Q-$J+IlgKKjr zf+?@2Sdru)oC1A^C!nvAKWZP_gTg-pWxMcahM--0#!Ct}k{o@G%*-ESPU=e*U8=?) z6qR6dEAYGGN5x+r{9N%Az9{&Cs>`b1Ny2VBVS#V~NMau(N^z1D44zGUd@bSrFEmQN zB3{mu+1pp>yLp~wdB#|56x|9Q{ZGMqv--BOzO>YBA-=GgC%3pok||?g$mqi=%0~nd z$Re#|0nL9npR;f6jpN^dHz!KD^E^Z0dCGa3ZPe~w4WU#utc*yD2r;t-KiSR*`qTD= z{{VuICH9uS9{33q(8+fJ`(CrCL*>hESdp^YEyQoY<=Z&j@_`Qgpzjs#w2gB^Hl+cON9HtQS%Gyb%e?>@Ach(1KV;9?x4|C_wB0|){wkWs#a<`6hC8ckxG_GT zXp*R9Ho%ri9C8Hz0L9gIgNpkz;t#_g1N>3&?A{mg#*=TU*vgF>i4b{^?#4_qV|-1( z`H+LfepAF5?n75Nje2-~XvMx~w>QhJyzO)D@@$_srT_2ZeMU>%a8FZ{t2JsyMG+VAHwZM&VLHuTmsL3 zD=p5O3qA^!!TU#+pD;U=x7|Ga$MkdH2kgJ_=imm76goYWI)=R9HRiW-61CKlZNU+- z1j87~!XX`b$t2g(aaR)X<|{6ZIK$dWuXRV=aDKMZ-L<*k@b*KVVR6n^agF`!>YwAU z;C^;~*Z%;wSBf<~QvU$qZ^EIdcy89l;(JX`Q;{Ch`ryRDO52>Qupo6{Ux#dbqz+H! zr-!^%;vWlmj@!ll9n`LDHLFR@%s0_W#&Iz@W-P?+-I5(!D(XtGDo8)kr|lK{DEvD8 zp>&09N+r@fLuirBZpzNxMFUroEzF11}Q)6TcOirsw0 zLXq(e%K}g*++qO-FSLGke$zj-&%_Up8r9a74A*)OgcP8IL<}QlzyzDJhcdA^I~bH) z0tRwx`V{adfP5qH-@|sE6!2b~2AQVXBBDnM0M1ke-y>smjzHy@mOagUt@}^@#Xbl4 z@ofGf(0rc__`(R7M|}hdRq7{sYuPK|b%a>DOjSbot8# z8{P$x7tH||Y=DOV0iC%A`X>FKzB%as0JH~yA@R16r(Wq~NY?jO_mMKHPP~y6xY;rl zD&dfkh5+HR2f#nGxBL@+4J!Bk5>JnB73`MR0j)GkIgFOBu|7lHNwrLY@IQ6{U8IK2 zc>QIt)9iH1NNhCgc&#mM(PN4j5o3-tQGlw70;mIk0OG5U^PHm_QAY&})K6Kh6}wxn zujWSvWqFnkoBGSN9?^QgTl6jfDb!LEftvi$A9yWsUO4ghhJGIClIy-Exe!2{Bx*NH zEB08>a>w)?O?jWi{{W5Je}HW#(zH)6Q1J;*-P@9Us0)49l>2qqIX>0-m*X!Hc(cY@ z)VkM^lDdxDMZ3kPUbXv{s;Ri_8Oz3VU5c3?%^| zuTzYSQ{lMD+(5wqsrAi%*_CBfGWuS^gl*^5pMhri#w#$Z?W@VZE{o6+nb&j&%kQ+F zYFI8IodA3@at?UrtIIS3UHot1bM>sdyHrNp&9IY^&sz0QF*}ytc(!y<#GmzNAY;~$ zM=F6SWs!)_CmeoNg$p&Lb2*GaX8YXl9C!R{R>DMUb22}gM>!)H{3`FD6))`#)*b+e z;DSn=jy-DH$1mB+VTp|HTphVT;ZOQsJSV3aCd{5*`rhlK!k&6^;`kawoPP4!+V6~5QUR1dt9tih8(!JUN z6qbT}ISi;HJJnk%SPp~UxN+Mf(98bU$huNei7xU4e>OWX#c}q!-K3M;SkE&}cN=A$ zRaG`IJ4YUaf@@<%y|+y&IV6fy^CT!^=0;LZIUR*mmr2nhyO#RO1YmMqp}{4GVg-57 zTArM?3FWyMF%T5?>}xhV75f~LY0|8C4APKtH~LnfSanNRMON~NDsrW9T<`XZ-Wjd! z-dlBryp$3TEq5sO{vq#OYSz%?Y}(PDMAM0yK`qV+01N^2_4?P=J^|d`6w~IE;IbV4 zb@67Rn$DcMT+oNPW_L0;JAlqO$6;S*_zGDi@Lrm3kVhj%OD1whC-ScoJfH3~?s_;C z?`U;58Ip2$7F-N|6fj6)EXV1)6y>&bB4pgk2Oyv2Rpo%(5G723)0+9n$4j2fmqt_6 z8YQy_a`}uh6miGUS0~%0bXvA!0Z8Zh3d6^>chJ!sHFvNMl#B;@QAoooMQeL%V=LIK z>-)gK^cd?-f+q8%X8WWJRAp5dZex?$q%w%myD(K9Njy|0?{Z@FGPSJ^%T|mf!T`(n zTy?J**0dY#B6bdep&W-&Pagi2?~KvN7v}(uah|o0cr9)rc%&j#ZmKyr`d6ig#!3%E znxzEwK71<{3xG0xI@D~e>-YYE^sc{L@T{?1eXmOep=IS5V5zPaQp(#i&Pc#Moom{q zUNcuXCrgxUQJZOGmPhj%PtXc*n7jFEhXiLMu&CpTM_}09f(=-e?*xO&QL%^4bAwxn zS*sO)d-kw{{a!ga=xJG_-I#V9edE;U@u;t0X#+TRQ^3NUQ{s{B;y}@69CG0D57WH^ zCtSkcK0bb*g;SC>`%4BmB=c4-OU`51rAKD=sn=kCz#IGq*c@9jpRop z3~l4|uRh1QuIF5~wp5soHm*n3xNYg{Tva|JWPktB{Ry^+pxv}CdSe{bhIZZ*DaWlg zIN2fEI}8ev*fPczPFuZx&ik|Qgl6i1i*tdR;Q>3&?BgcA4^v#)xL>`FKDFmJQall| zD-j{?EAej-Zn5!S-CwEwKj6eIlhpqJ8~l-AN)fj9runfmm6dV`#~d2X^T|a8cNXps zJXBFG*55M~FUqO(uY$WjVNukzC990IELn5FZgEkn%Oo4-E09X<3yz-RoYP4Xoz6C% zz+l#7mn`F8JZChm)Th}N?XJbCWefBalif*ndSH507{Jas`kIlxQI)MJy=-GsW8~i* z5BRF@f8G_<{286#@#VaVrT+kNGT?U%Fl&_Y!Ox1XbLuOt_%UqPemu9gR@hqJW4qI6 z2EVED`7f$cU)_)7e+Fl8<;T}g`Q`bu?C-NkB=aOjjDj|Y?VkN{Rp!v_q7qJIb!mQP zZZccg5sK5eg5DUjJhDj2xZ|FK>sS`{@#MON3vE*4AdVI0NY~eTy zfyl*6bWkuckaO1+R!tTeBfz>UpmMSgrc)TDWCPq?iv z?$r@FqZD(FE75E2Gv`ZCJaJA39cwh-&prC++8vylzM~+>wY3H09;AWL4|@7@;cx8Y zuU_3IweccxW{fi)l@SH{f~?LM_Q)d@$C~CCe7=jTikgpQ)%>?QvfQ^7kF2R-VC8+^ zMe5I!^`_0p3&<50I)k)INwI2pjrdhd$78EJLn?PAUYkXof6vK^&}8za`Y? z9@PMyd$OFAncwr8+T(Jze z$3Q z9G_ab3jY9S!t&fN?m+a;N$HBQ_S;q2v9|-y1EqP@Yc`#aZk7kx`_c)mZtzuPGK1Nb zaZ>neR`DK>sxGOg$Sfqczzm;uP!HUZGlo(){zM*W?Wn}5D-~?Qp!ECOvw%HM70p9& z1g|B?l2&;>@;h+9LE5~`RT_2ax^y!<)%;d$Zc;?lfX zG`g0W&xF5Yx_f0k1b}lQXZXJG?~&8sb8Z}+8`i_?9>?qX4;RA`YB;?m-=X$>6`!YS zmKwFJciL>pac?NfOBvj(J&3FM74pwVW_`MPTv{lifrw59Yrwube$ihJ{s3r6s`%34 zSGl_mwmN;Hi7sxP04x*|6#&S~NXov1o-4%v0BmpC(^LJPY^`(;4(Jy*TIRWI?_;Um zNB;m4t9DbhgB*sIR3A5%xZ@dM4SogqtHk~!@xHrt;(r%vGwQmv>u#Rv;fp&Ak`xY# zi=UNL1zy6xN#M=^shZmtikB@PE#LVc4aB@(3>%gg3$0GRU*>yv?Fsu%d`11BHJKY% zx|2`v;z%0LLWV+Zp#T*RCr>s~80FhI+(Ll31DW_M@$=$OzKi5Dkp44nBskECjP?x$^`=$4Y->Nk?CHrBT2WR_W6;Z;;*l1+48 zI{1C#Z-$!Z{3D(_(XO<6@}6SFa~WSTGmY`L-F46LDI&je{>jTJ&{<^T9$RS+{&wHy zeo*mQhqe0bXDhF@`hUPbqF>qj_O$fgd#xV%L{lB02_&~|%(KE6 zkO(`#!F61W{YXi#Vy|__b;GOGj_K{XW#SO+Q7`?lo;%<|{ip zNZMKFox(*FYBJ2sZ~z06PfGlj{ilE6q0mOM_&4@`xml!Tc<($y@#SMAjiSUc!yInR zuDBT+27Y1jkL@x0Tl`1(^{LCGNM-Q%h3zMqZS;s4E-n~NsOnPz_M7R(aPCI|P#v+L;TP45Gkq5n@#YgJg2UkwonFgj>u$bV zo*qTQxV+V>mHpU%H&%ZzSGKoWb>!N9qXd?>cG5IcK{^=aibObMbYK}-PXrN=YAgfD zujsS(mi>*q5Boq_uBm8Y@b8M?W|4JEdt@f>DYQp-w+$ljxP0zVIxynD3O+G>6!G`K zzZ1>j&l=cC6k_4wv(yn7S>G&R67V|#xNTraATZA(t#)v~9APtbsr)s1#r}`4=3&Gf zEs4upe-%IG{{Soc6{onA4#X;jjdDUL90SfzHT=dB9A!=qt$qIh z`z`+6o+S7?9lwHpBQ4K}d?v1m1i{){?Kut?3B8Y*0xmHSIZ!Z}&f4=lLdf%6!h-sY zuLZmojrrfNr|Na^hHHSq*S0D;y`|gw?tZ)cbN!mW8vK3Gj+x@CF>9#ARjoCPL`A-~ zAd*x$2Xt;3gCk{0JFpp5m+(vD=j_YzBldmPB)9Q3t6pexPGZyb%XrV(E|&$AHVYVL z+74oKoU3dui@g5;NZtzZw}k#7Xcqn`@NTJXt7(@x^6o8U5-vwoIQy(SgUIx)t!GBk zb^R{uMb~X&y|BKC;JCJ(cf~An0<5g3f(Sh;@=hP)9DYqdtI2ayPguU5ouA--iJb7x z6EvgkD85V49apr`(t2n;Q* zJD6CTsNA~kvm6N+6y%Z>M||KMaJU?!hOmxJsK>0Eyr1g*68RBd7nx^xl^J`_ve7T= z%=x3jo(u6`jvzi(lN);rces=!YFUX?1oR(?;AG$&38HA2$0#VuNGijS zN3DNS-v@pOd_VXl;g#^$f;7mWEQq(4?uu`3oim5HlmO0h%s~6Ale7x(AKHiZF7S89 zzdOY`G^?Qa(mQsJJHS8DFG_*st>OS%E3_z;K4nva90d#V7(W)Uv&Xs>Y4Xr+%3Xb0 z{{V-v^%$QIaWKOwv}C1Ita^Nz{P1l9kwyh~zY+cz_~YR>h#SK`JkqDq?#YnbnUsXN ziZ?0d!r#P&5>75cR2iN8^oJ>sr+2Ictl#42%Xz3V`s4x%p8*RU7~Su2_z= z+VS!Z63eor-V#3Z-MwF@lJrNdn(-DlGKyGgdDfmZoY2hD;^IYq` z4rUR??8aNoG#miY6Z1(5_lTYcDTDI2W&2P5-<~1y(_VNd;MCISz8qqUF0zJ5bsOaX zmsHNvZwoO40C|q9$yIhr{L8>2nwaBm4mhrQoDG#!$NvCm%9`h0KFjmBa_Mm-L(|5L}5ruq<69rNcQj||ZP(*U+|2bZ4bxO}G#^{iFx z(JfnNUR9P>$KnHstKvST;7i|$b{-zSpTjzI$qmk=u^%NGPK0EJbW?!t ztF;MG#1&Vhnd9E*2SN8&{RsPt3$*n*8f2 z@m7u_5h_^tdpJLZuAWl=05$&r1p7?S!|Jr@drUOBq_Oz-XDXX9)28ra?nn>;9W^3XtDOI3CgAQ zp*;lff=cI{Ff5Iam5vTSm@RWa*E}b!U3hcEwo~bvrIQ7@v${hZ(K9G;8BZfB!H2O1 zw>%N!zZHBo@jZu(d>eZ$mZzu49yUfHBus+QN~7+vrEVKaDrYYNmV z%T(9HFJE5k@jRS^i!oVb{;!BmRMWFhc=~^l{bW}y;{N~{_)oxECC7<8XR6w0+Fh>S zHtOEcg_Lhr)97eO1wuL)b-KuDp-kQ{AT!k%O~=YXw)kJ=-L$+&j_;Zi9> zXMJ3~S5xxmh|IH$Z#)}v>20I_UZ?1n>~;HE-28ab*TViM(r=T*`dlOJnw7n=Z8GTq z`DJM%8*S!t4(P}TSY;2*v3~If0=Qp?{{RoXA@Iw?mfjNZK9>fQrdt3+P_7J;Z3@yz z-+DO*!{~bp@^f0jXYHOq`HU)Iim#A zfgLN%{v-TZ@Q1@4HLo@MwKq(p+t|jy+)mwssXPx>I0C%*!cumMyE`X1!Y)#FM{}#{ z`gVn6M20eJMo<1e~Tk%)LtHUm{y3$)Sfh~|2x49;=HKS>9WPV_d zP7vccC$|}_3r$XUf;i=sq2v;C^JBW2=^3peNbRj~BW!%Cx%rQCU!viyLfneUADCj4 z>-*Im+2>lGn=Py>D8eZrUw9=>YNW;!h@I2{@~Grz>5BC^jlIU7D1EmGxebBbcKX+z z$!1nVG#0l25DK9Ls5$G5SGPkO34BL~iNQ@P#TnmZj#*+~o6Jw29hfhzP-~q+Nq>JF zaplUQxXEu!R@Lm+5vVaRS5i0prH?g?Ce+_kM7WJ7w+cyMHvWg4iu9G5Jk+hDA2Xzu zj+ZeV(jA~DVmtS!bjVCkOg4)(>$;x0#CP6uy#^WnMbJ&Lo*bE3eO+FiQ5Y;%WpILY-Ur@quyD4>r~wFVc# zmOwaPrrv74rM_KVZ5fZ2v2IiY?=>-Q-rhyDpHJQ#a#-Z!-nyZ&$oDj))c)CJB$|7} z_GrOeo!K}YF^c+^;i{y54%0R%8>7o|TPHrIzE0Gx5#koQe#=KWGA1$z`d8K84jx#1 zDW|-JWJZhSz`(B$IBVFuo`wabBhX#Pm*xfCgN$OcB)Rfs`#C2rODkg_dm6BlGr$*zD@e1k1<%U4R{?gBf?&C41=()aC8;aYLF$oM{8~* zWE0l8dhMk2IB`1w7^O6v_US?CS{O<#D;Y9Ly@}il0zgsJRN#4b>}EqJA9Y4^Q3;>S zlHNFWkU!QD$3BO>7N^N?Do5N;ZuM#xS0hw5NsuVUcOHrdHDca1WduT39mpL1R8P6k zsxVRRGgbb_D<~pWUt}2k>XyN&BF1Kwhv{B>;%^A9sLgq&ffdYo1~33wKE9uYcMWNH zv5_2~Ic^z2;Cu7USB}##i-pSP?v4rkYdRUl)zdPDQ5*fnISN4N zC?-UnL^jDinnJ-y!Ek+h^U|u=tQO4D$k+hjG31Y>OK_7|YUvr-?XGbi)e6{S)SBk9{=cPg)qe5NkN?vB z6^X==i4f&mk&s98=ANvi@Im@iBh7UolrPZcrh*Wwl7SmM;O8R0Ws>TC6&PKVwL;5h zBdOXz>*<>FbEiy&r+IwfNhdYvx~rA2n;@KE;8&Wgv%r%_ytHlebNn^<$B1_R(O=`2rYoj-qzk_l8C*6hd6{-gLh)cRj`bsqCmm{}LmleF6O4-9oVGBiv}eiQ zH~#>UruY5fU6;XEjeK zpL(tf4s+I*Gu!;O#jrRS9@NyDR*ZeCiFwU$&TwPZsMw1C0RB9r{{VKpYEhPAJxzNp z{{Z}X$?hw~?p(%LjP*6^Woqz}UC$M_eReGP4@7t~Di&;_y3pD;#c_B|#jJPfQHg z&F6XMCZ1)NZ0qXd)=S0O>iwRu$jdf039sDz5A29|f) z03A(IdoeVtGdqH~!iFJu^&KkWV9N{$Gkr$PAS-oVG9p>=w{HE5&$q9=X64bH4)QeRxMA{+m73Gr zM8S^T(wqXLIX?L5_|y^FEC5Jdm<~=`bxU%vM;iH1z9r7$owM#b8q1Ylk)=1uNL`%T~JT2k9t$f0Ke zy$^Bt*2+Fc6Ri3jCxiT9uXrlrDYPqswyh{@n@2f21L(wBLUOgejr!E%wdjb?Z)rkA!Pw_pbt~n)2$@9vAJ8@X&&BtX#>S9 zWUH<@1QE@A_HDzIaVgZItuyR+e~c^Oq^Q?!RNvv~ewYKATKSLR-^ZJ4*%L$YlX+J= zl_S3^vMEawfgl42M;!2{+rQGV&XXv3*fO4kSLZmq4Qw>sI?Bx-vtaSmaa7e>61rzI z;~xZgN5}d*=pHoDthK#8+i|zPg=Rq5BXaHsYhaPMVBnF^n*50PoBsd=*6}m^MThK&`_wm=d(f#MlU$f`zwea)cCV^!hoo8>Uc$p(2^In=2`&806A^fs(Vvh_| zDxbT-CcQJ_f5GpE{{R>KKK>o??voCoZc)}Lh}&~+WX9%#Ve`bt10_#58O|%zX}tPZ z1#Hh3R~7Zzl^dzv?*9NT-y^D?U514Zt40fxw?py6O8)?YUg~jtL{`~21Wu^SiT(=~_;3X+xVQc>YR`fn0{iOc@Vy^@I za-&0=u%L4XDPlUfz;^?^V<1(8{#wTiMlXT`c;aj6byR>>1+kg_jo|E%;v) z-XZX;v$K7YHEDGXHa6u|$0m4`85?MeFrX+AoZ=f@ugVz<(KK{L&Bd!`@V+3FHF2QiYtEu}08BreA-^KChL{fC5n zN0e=Z%_Zcg+x%bW_xC?A;ywuEZxxp>SpNW*`5%)X34S>INBAkN+v^?%)vhhz7g5~Z z>83^&SHeCDPZ&5Mm*p%lz+(r2kJ5h^f5A_*`@aRr7leE#b!p;Dl$te?>U5IY&9)CJ zP(p(-+)vAe#?W$ef%!S&j|=#N!J5_AhkR9GK9{LkOz=%@aJyVQWUFBGUYo0#-}>l$b{jp+viQo4DY&PlyLxsypBH%7#r{6j{{XhU zYpvgFm*q-}i}~7T3QlmjI6>^74{FBJG)-T`8jKn~gJW^2U)++g+SRP)yZB!5Ofvv=&h@U!5a zkp{WpxM0=1L3wDA+-uAUl4#flA|+>#LXr>*Di9lL=QtSU;*KiA@b(rEr%zY#q^z|3 zE%=*cIW8wTuKA^+_@woIhshtaC;Su2&q$Wf#{U2w65mpuNETQ;I5N>$%vo7Y%!=D4 zWk6M<89>DH*ap8=thDc%Rd4_Uu&K6e85Q#X0PR`v z^IZ4~;3U@kJ>jeUZue3%TETY$gKMYFv*eQG?U403RN09zM&+;PtY&4HXSmp@zG=zm zwoB8ZKW4&UaCtrz%9`bh-SvLGPigVj#V?273+y7)z94JXdL6yPh~aCe!%D@`Fo*$m z+Ff@Q1SmV3025tak*#R{BGWDPjSlwTP_VmCGT!FeMV1**@wr_IRvF`i)MCGs{{V>} z5`1a#Ys8ayyT@9LnvSD53zqpKi3dcAG4jTv(Lnd+y|eaf{ir@Bdu}M5&&u_e7v@+v-P2VaEcUQ4n@)`e^*2koiA}lv>W!mm-HeR?06-oS*JklQhV<_c zTxr)@OqzwXcMWBF5L`j#4Z&mx%%OnooQ{Mk$Q=vBlEzX+46FztxdeI|_}qhvvDr0u z>|Nv5FK_q{%^ypf@a_jRqNdfQ(o27zbNK^C$j(9ZBau(9*uUF*_D1+`rRX}h#$OET zcAgm25?L=b3p=?;x74Ja*KmMkjC1ZR%8|z2Yq10ZcMr!*spWba{*d9!`i5CQ0HnZc1Pi#40w;mdd;_n{3WK`>e{8*A{$#oGDz6! za5z;R*pQ>S75g#!F@M27G*1aM9y9pe4W_xPJWW4PBXuxiJ!dc7{n7BqfR3gpYGyOF2l8$j1t!B>M{f zed6yC@mRS_3zoi0TVH8E@?VeSe#ygp8A}Zq;&FDA`*dID`571f7x3n*qPK?iEiTUM zNhqvk+vP2klM*=yW78#ptib}@p zM>zur9>NG=kzW~<=XsV*T~?td7WHZEB>Shm`=3dV%P={e7fz+6Z7#Q8KhXX`@2s!% zoBNF~R=1YU^3}{!+s=&~u}Hm9(ST)P+!31ePlewXejxlD*MH$6_oDq{>zQ45R?2+LQ+5=9TRfMjW;~Q&1ZKrB%WH73%zHgT_D#2Y? zmR;mCF74QDp-;;H0JEq36P6zh>Tlvdj}2w3&21?A4wqvruWSkwVKlPlKQJ-erV}19 zhTM7&*rDPMO^nN>EPQ?bu(j{$<@q0&z6{JT*x0HsOX)7&zGvEhvj@lNzi4j)w~H=3 zJ9VH*1c?h7etTQCDtAk3F9p+bkU<$v0VLPbn5wq6&{@Q=TEvjVg9?$TRZ-{w;8A~d zC6$H6f2}eh-bQPOWK9ggIi~iubpb+4}Yt z6)DkCtv+cj-*e_4+C%ni_*?Ol!n(cZh^@5!7h6kXEM6utB28)JfJ4nDQ+z>J4jHkq zV;})voIkS%?2quT#{U2kwIA(|h#EhOth_IH-|;8YmPpO|CN@nWmm96gBr?UgWDLF- z1LEJcSM7E2JL3+$sCY(OdHgfsIA)Dw(PfWi%#8t7^Df zt@u}0v+);(?`G5W3ki~VCxN!QJZB^7GC2o>(2Q5^yf?ym#B!-nmMPPem%AP9r+wA- z(doZ)@|^EI%PM9nOAe%>^>=Fj04~2X`o_{cHK6DkJYF8rH0w(pG8Aai&fXM?0F9l) z%TO1bobk?iuYmsmX&=}R!e0|EA@L`|!DHbqWm;CewMaJDUwMSS?=CW;DT5R9uM7?t zaJcI~VvpKG;xETf4&GY$>%+GTE{upD#Au5Jo%FI6A|V#l3o<6s!ZRQ_+`=HpAMVWV z=D#C&jK?C(r4>6>i`LHR`t*16{Lj&Sh6^>otH$)?wCLBTWAlIZW&MFXG4SL5DvyiN zSa_peaU0uOG-gXH9n15fNeM~cIK#3GVBm9KqsbaZijjb-qY9&d0O$v`I1JLByw{zY zXE@BtyRVF!ll&K==yc_|ZW{qh*Q;xePdg? zj?&r%EYBgw&;iwn=m6x`!5<56;-nP674l zUjl2B-*}HypIg_blJ@%GhLTyBxi}<%Gr$8U0B|YqY($MHC0u|r)YD;0lqbyj$M>^? z`B&>XFAQPuDO7&+&&Tu5Da7OIa?^aY*H7~~`><`}X#*7t)QZlE;botClbxfIdskU? zXl^1gg$x^mlZ;n0eWFVO?@~qvPTp~j)$U54RCp?{HsZWUV=3x$`P6NCU>iiw76uCr z0LQ5!gniL(-ndcCM|j9>%xpJq#}$>Cvg~f?nv^n@g=ENVoMVDVf30emjl`?uOz24< z4nR2eHRo%oT4_$PPQoW6A)A3#Zgsep>N}}qx_68fi^)>R3wnTXMId_}B$`{!WL9;C z;4#U^1a_`M=ECyU>RXAIV-QtX6UhEm*IG$;1P!d}O}x#83I+fJryi!Zq_$m3*evU? z9&p?ceQVIeV<#4ro`;o)!YL-wJfUUOV~xwZ6~=cS-oR8*{kBlxNbe+Im2QWc-qh@6 zvW?}FGDeTJwsIHKHASuFxa#kY#+`Lo>Q%e&=1V9q_p zaaZjA&v>CO6tgj4Q#t#-ovJ^x!tb<^mgIZXvcn~&>0!$48CFra`;2$3E@asm8&RQz z$pe5&p=_KA^<5&(?3UM2xGZEY2dOQJSGUMSdeBERMyw9bKqImD z`d2j^%cxyXtIc^Oy~VKskdx4ijywJpvlzS5?8Dxa**_>lZQR|4F~K!<@(U|FL34Ww zd5odKOdMy_*6vLslc`GjJKa(UE@1u9g*VRXK@tPdH*sHOd^kaK;9V|qy9-2EuHeCN zdlAKag&mcoX5Rk*O_MQPsau5&m10M6Ut;_|5nA{!O|*(Vh_Mi%PXJ`si=0LK3%=*4 zmn+oN>86f03v$F^a5`kwi(pSkKaml7^wvH87a>*wEj>q1tF=$CL$h$v;XPWRX zne5AB4_4eG0G>TDT-GbC)!>p?mTiSY)LaN#4Amj?TWP4;DW@IdJpG?)8iDR{ge4B_JLiOjRQ8Fxrcb$>2 zC4)9e^b`QMA1U3tP6yogeiE8_vL9O^V>~}L=*jzl(ag`&5`g>OgaXfGmts6c%yk5qX4g>K{Cr_0;M81<^Q(8In+ z=ah!PA(xC+rlD&3bn;o=MdYfUp|=z7SW_%wQqeJGZ(8+ZCAv9Yj>YBsIDLhbz6n-- z_^ioZS!OZB*>2@JVxD4UfhTEH%FIG1P6aU~M{hDHp50%DW;h2P^cZT z)B0t9oq1b_k``ExC!rp-_{WG%Jjq-6SLV;@?*`*+j#vJ7`6D7&*=1$y38w8)(xzo~ zC1v4-RXN?pabE={&)1W^ixK0BiHP4$Qb5i#ie@*Qfl^hK^(;m+`Bk{%`EX8ZqJ{&C zu!Zqk!Wyfb)tX74CwS-nM$6Iu(5}PamA$ps#0zMymva4@;EZrX1>^OuKgS>PHQ#6Z z!}(Wf@HC?QLbrJaRV^-D{{X;KHT_YRul6cm;a`#b8=R}=75@OoFU_B4+{GQevzV2g zP-7iiKGfB*n&Q?7!!lzG*y~o(;yb=#Q-R+VE6H&f9$Z_q*ql@Ln%MKKh~u@E-ZhOU zB1Op=$Itcu0P9v%vs_OmXI;3!z!)4=$yVMeR(2VD=LeDxwKn9C<2fOKBR=)CS?A^| z)ES+9pRq}wO7Ux;#5OwQ*R)^rWhc<~uN#?_ceC)SNhi|1+_FwEZFfF)Hr_|0_zV6O zr-pnzW3Bv6x|_s$!`zk9^xU5<9OD2W!sM|d=ExQHzl^kx5O|-#QQ3G}f9#9d?SuIl zP3-}A*8NMNWMgIVVpHou027pYv*nE;opzQ-bhm&5RUN?O7#?G+uDnfKWSVNVN` zxnn8v!hY;|bd-|bZSMTKb~{hm@50x%-weD#;13+iV!pf7F5$UGmvp8l067Gc$rxZl?pr)ZuI@fV16Ti+7e+p}5PWu%$^0F39Iq@GSsHMMM-KHgZ2!l%1Yr^ua` zH>XwD=7xJEhK?TrPv54k&eh){zg~>%JQb)3rTH??a_c0Fd$}Z&UCsQ4TX_Ur ze5ac8--j`I&3uc<0yER=UW2GcEv@7&!tFJ_F3S?*9hHwMRJ{&nak1^-Rg4O$4jDt| ztxP4s+luU4A31xwfsIQRBA5v$K9w^z2TbOoW)a3rY60oet|l$$K)c+LxTyA}iru2i zJgq6tM&LLel?o*8zb_}fNovv%!azX>1RA3xhsafi-~c_nGw)QOx>av30)1dD=!XP)lj%&jgAAD%|nfq0TFFXws8qiCC<8k8bbwx>OO_>UUAGVIB`pO0)f& zVX&l*%t^?{v8_)mGlOtdwXk0mKgr>j(FqWlT?kx>7V5C zS`6bVa{2eANa8YLE&l*UInUuxUffw}dvhA5@_7Rs)(cHp()nZXnvFDQo8(+)zB#UP zOY20sc%s}HhECvlA6k}6^|(+}@OlreWnEiIE5<{V2-zbad)l*7>}$zwM%}|D(z+=C zHa79o6bL-!5v#K7;kM(NyBsdSEPR@l#@b74nUs=PmhLgynvXNF9MOlo%FP-=(Lk!l z%{zJIel@uSf-qoQF;UJdEr3g-BgpEHz(L3~`0Zt~l1X5F{$b7};{(#Ra7xE4>Nlz- zihnueRouIH&OeoAZ+I@^vvx9hnSU{{kOyx}RsHuS>0AXU%ZE|e`ijfdG}X9~B$n&T zWj`+%{5YC+ofg zXf0({ztI5!?XW!9Gsftck@Ao39z6)@UU_8#La#J(F&q*5mfuaZD6?Hk2g@3?gevYmSo9p=;}vj&oRnj`CUa1WoVG@` zud8X=C4}0Zt#b{XqzYPD=aFNL6nAD~0IxRqTk+4~f5tmo`@e_U5^Iw>s zvR~}6@OR<|kM%h<{{S25zAcZ!_wspKC7g0w{{U!3Zsk_yQ7V(;5b)oI0yFb z8RG1oj!`Ik9!SA^(Mi5${Px$+@;@ohczZIdnMaD1Xdn{plH4s(%{j&Jz;OA z*js`kwzP?1g*Ot%YA66@i6_*PTLj=|(!UkWaa1uFx!1$fRN&r`T|V>mEDaj8C`OfO zb4e{(;GZ6UXFrD@A9N#Q;|oz1rD+7Irs}a|Ph!#)Vs?gAl`^9!`54a9q?2FHhsF?Uzza|R{(v{`d9CM9b}n~4+yK*ti6=g z)Hk*I->0Jd&&RmiF~Q<-9JFcLYk6&dm+HV|8v391b^WfsB!15?Hj$>m7QNxSxjfl4 zo4Fb>9J@;Ix0nYpWNqBLSq|P55=rv!0(=qpx$!$nzSn#M;cM+@PSb(&?qE={mtIOn zT(6qoa#b=%exp84+m+cQLX3=2gK$GB!3VD(j2in~b{j6tV=NXS#&+H})jyW0_Ue4j z8n!b6<6f2G-p`}|05kfW_(Acr;qS)J4#(mT0_u@nA1zwy&6q9Ko1UpOS;~-m^dkfu z5ni%YB~p$9DHtGtLHsNEFz}C!{A2L{09#)W_#auk(KQuNpJ=m{qh^nAZH%RWw`YX@DkYbLej~J62a6dai7q5ymO7Ng)BIz3b>h#1cQENXrS0gN z=SiF_dU;uh5;P?g7LlY28 za7DR-1A)1TEZ)G7e#5{&GWV=ad&E8;@TQwKx8iM1(@E3q_W9RWvn9MKyzNlVPy0 zm387ZXNuxo$Ca}jG--i@BYe%C0)dnHNq2L5s9jBceKeOhHx6Z*X_(0*j@?!nlIrauXL5YsCJr{^uUhWDCVVdNC&gb3-FRciI$f>( zw9rc&a?2W-ty)o!HU}dy$QT9;2<1j=`PUc4Syc1y?GLMSMSPomC2gu8PP=ALW zKhtjY$t>V9MdCPP7*MB(?{=!7Y<<^WTOTmbC*-{s_FMP|b@BS^z&9Tkw9B6i#cq?u zwotb-Nibq9mhgX~7mws(>e*o4SY6L?zR?2eWB5Bquk)BgY$yd$eW!v6pZX?m}N^yhUi;vjaNQalD!w3!AXT<#JL z-eCX&6;D5wMJQW9CqHT6Z=)K3spM+N4Kab&z{wmXB z@f@S&vT49#<0s4A_k7F(b`zWqmFwz5FSUOl(p=ojboa7M?Qruo!^*^&8fR_oA#CKi zZ9afUu;-YdId zQ}>)l;jLT-4dUoj=22eqdVJUUGvjhjI)*P0@b&5Px3#tV`+Dqs=le%~$$t+&XnP}h zsUgyQO$j|>J>~fN_cFBIA5GD$E%dD>+AB*-NY*GIg#t$!D8N+%u&p2nsVX_8ju@Uo$=48UB5ae=y+U?uczBHQVd;6EXy?37CQ@Q1omywC? zS-Ca!`6mij#mCxK_hb2=A)ax492{Lb9`w$S$A1w#OYs9!I+nd~-dH$>);1ytLF}vC z`je7tl333Hg+|Hd$jZtI`68zzNoK`Z0 zjp0>nbJIa_KA&?1=}brt8yVUv9R}Le+8CNP44*DvKQQcS^~4vll0Yj71$Op-L!P3j zJne53VZU%8``nYe9lo{AV`S8ph4^QV#_6qQh%Kap0OgO(?VRGf2(9hqvBjJ3by5_& z9B%ELR|^%@uDGaBGJ($0Fl$)|u|X6BfZf5xAmv0h`i7#nO>2D&r4Pzjs49Oh-&%dm zm)k96dwVx88)I$&+&xBXXGPa+t`XBaWsSD5Zn*W$Y+Tw%>}9u*lQ17EISMi9k6P)W zRnvDz9wL&dqdcC|Mv~g$uWr=FUxkmH56-q;1a&@R9li0^yNm5I-|XIH>oD3lJ#cG+ zju}jBh{i<>aNeT5tUe)3@o^YAQ`F6ZSn^@CLJ`k06V69!^v^2!luZWQ2!J{6JxzI2 ziL6+8Yq?#9c<-O*Ubq)fZ7of#ZM>)fTwu4Ttnu#ZEgEJ20Bht?f+2Bm(D_PajRetT zkV|`Wim!8F{h;UjLECwiSfgWpN2wfR>0KNamP>gD+S?O^#8LFet!L^Mwz@^v+fqH| zXTaXXfKRtTE6;}Kr!9&%b~jIV6n8J1QaHB@l{o0x<$)gDR$iqG>azWU3ysegRg7`~ zKI0W~au2T4j@0|D(Ojcip#^0-%5Yeo%tdU~&PG(zwEN2&sXo;^ zO>8!mLxF?b3|HDd1%bce71Ohvd42uOe1ob^(MGXFZ1T**e%Qb+N47}M=U-L)D|^K7 z9+HI8j`TyQ|g*c^;VesW#PkjFLH75C!Nv*5?~S79w>E$Zn_7i@0Es8D+)@{AxLs zo$&z(st!ot15rs33bHXQ_`yA@l&nfB$>dwj=F1^M%Y{*ZtTEBEO&)VPLkwyVsb}B} zfBNd3{Fk?~iyK(cjAdDxL68UIQrs-Iq)f=tI2%DupnW=Gtvv+dw9 zJW#hKoL`9ay*F5Nw$yF0xZqqV!vXZ}E6X+C4UG~+GnaT-w#LXt)?UQdxn5tH3~*f8 z4ZFBJ^!3e4sOg%1p%ROTm&lLijYk}T(>3T(#JFg4%93ZzQd`}mVb(a(EyQ;$KVJ2f zBC(L*VN_!sje8Eg;Ym~eCR$i|VUc-+uif>P7H5>-VIj?4wMJV3qG~;KX z%jy7&86*G0b6?cicl(tu@UO`q!kNeX zj4S^Dj$fJgY&OjkIy>(NAUAqg?Z80~n8Yta+?-WwaukTAk)B*ZFbVBb=JWyhlluUk3}5k9ytvI=nthACzi+;4G|vcYdQXD<2c?}oF7J}od89zBKLMm8Nsp)FUXOCQvm#Po*LO9*ct2FrEq}FdBt^5c7=6zCAOzrYa!x%fjqw-8 zXz%q4eFwod(%n3hqDwhkfP~sk20@+Nbf~FSMAnua$%rkabDV+i+L;>59ne`Fe6TVZbAo^R)u58b@eP}~>$qU& z=}{YVBY@li&(^qVS8_dCcxc645ty3LMFp5eh;z`B_!^dJqYH@{e9qgB4L#gOSangr z=CLH3&AB7bRv6(5u1DjF%~a`QSD{8{lTXdvvy&lij6*IDrYb9<_ffo;U`Q7QK;r}4 zR2L|Bc`=Yvspp}nJkL7k$b{}HaKj{HiqYzhd1%WJtYoN-NX89Wca7gv$BXCrx146Zhlh83Uwlp%raWeo@M zDf!#;Jk=$4a#CwT0IQz|YdH`O-0EKfF)wo|Kx$#wW5A-o5@l5eJQpY zQ$lVd7lByef)2d(tfvJf%O#;uoS`mgyEEDT7W{kEY?d7(#)~z@u-c1xwT#Kz_ki;1 zJx4wHud=l5Lc>+Fi%`>UBf7SkcSkHt9T<9oIIqpw z>y?{HEgiG4Yl}I*Sc%3tPw zh|_kC{MQHIpNLu~jcpoBGS=5o9kW`@K#>oq!6BF2ab4&+uY;T=2g@a9eyL8RD#cC? z>7N~b(Vwx$!+(zpKD*_%hvLB*^7UIV7wrtFRy4ap2%0rJdE!L_3g8kC&7ZSx{1d-V z@b&yYGWg=q>UY+&$@Xmq77|#mmgQ1u+;cHt1EPGY0r}Lo%D<~5W|{yT*SkZCvsySe zfTvaaH}|F4K8W%vaP|gx#f_XVCHH#UxeOMz_K-(yYZ^lgN)kBG1yxlT00F=NcCRJ) zyYbWEx4|6@>mEJT;IooQJe4g(d)HT>Fm{{YASG5DY2yT2It>rk6f)vnIh zdAJeAx!V+JfFp@AcA|g*B%HBkKKBOjT(gOLdh_L^p0;VpzSimL&m)PrqB&ajG@*Kr zzKhq)`WyC+{jfeIe$ZDbVCVi3uMI#XMba#1lNT!{!~U500(L^bE)`I9+n#IYJp;sA z?}fFCZxDED`r}K~tny&Hw2`5jJ;Q5lbs&s_LG=}oze@Yd_8<5s@N43?jU=1mPl#>q zF0CSVw(z0zE#$g(Vyh!gb_r2epDq?Bb1w1#+TXh3p_gP?T%`(syNcN>x9jWGv+{aR z9h_sM&r|o0X5FpmevtmgzqDt?4~^OsQ25`%b1l7;4Ilg^7QjVgc{~9WqeR4h`SMxJ zM4)a0Ag{W>+&W|0vh?0sr@+-#?VqspZ8io5>+JzE*oBb96b z;g=x(cla;<35}&$d`i8#@t(HYkAiIGj%_;D7ct)6sU(3YA@MhXHH&Rt!caeX1A@`FIY|@mM&N=9umllZ!1b>D7ZGN;Z7JbsRFo6Z zuHEg~oLIb<0gtI1O<1Wlt^4}-M>pY52zY0~8U?q7yeFpH>3U6)?1IwS0z`oFNCT-S zl20V^YvKO@+TZp^_%q{uB7cb&nx&t^{Yu2|ywwUKSlyZARR-=cu(XBSYC1BxFNWMU zf%hki^?g@B@Xn>JXu3@L?e&$r%?6un=^WQiqarkE3aDe-C!T{OfPb3*0NOL-_r*Vr zUNX5pJ@d4y8w-c~4~PC_hQl8&8CU}IWA8}ACnsoCIInYn^1OaOG4|MM+>ScRTfe)l z{PaA|Eyv)nxJ5$|Pn(-at649fPnM_8H&I6g)b{bplcZ9-Vpz#2;TW#v+!W!lk&q5L ziej?^!NxFuDr=HmKvrgRfD{r*{43e-zh{4qdRKu{;!XbmgmoCS1~0nc$MeS$1wM3< zee{L54YComocbF6jjxKs(s5XtiY@4_(!26M6Qzf%SDV>Iw6)Ui$oix9eg6Q0fq3)b zW{`X_FSotfgSL8pr{nDgYYQz( z1e!V1_pwCn)+wEGBb7!sD;_h7@qDXrd-WCS!nHhZ7Ec9NbkdY;?ECgOXG(M`rCu97 zoz?#UQ~H+pH}E^*zrgpMp7HCJ!ARVgw6_x@DBBSp&XHr2>i+YgnwwCjUU_5 zd?bTMyz#Du1-LOZmn$5)L=dYG7HP1J8y%`6l2J^ZWmlX{xU465aCg_>?he6&!{89y z-QC^YAq01K5AFnaXK;e+zZ%q1)mh8y|C9ks__^Lj#UQV2LMFP6Qjw<+@lT`m}wS;L^xInua+z? zqwol{-@F%ygK(5d_&7zA2ImRGN{PJI$T z7p|2*mAusNC*q$KA{MOt8+pU67UH?x%t*1VWrv?MYoB0f_$62p7l%WjhA`yb-{yGb zc^hGbc_E6x^}%sCC}3ax%Gr$Bm49jG^rNx+J9#obn|AcLQ0J+rasYI|>?iC;VGrj= z;=Z0MPpK;*in3-V<-zNGBWwf+q1bsNc7OzOF@7FJ3o(Rw+27X@;y$e95ey}qv}1)j z0F8R%S!LkCpo;vWa9yYIHx zIz$ID&mC9lv|Yo)xF86l)%n{#DLlb{6nzSMfnNQf5A0bkhIa@T4=@tZ`7|>Xzlq46 zWXD7gC14XzvyUF*;|E|t_M!gP#>-6kZDJfn=S^?V;P04GIE9Of<-{q9!0kUe&MAEs zU-=&EA&V5rOJ*nlasJAWcVAdwSALOcUIwqL>X9w@^kv#>S!BzrdS82e^lk-&iyHhHqh3O94hQ?Oz`~*t zevYM^=NTN;;X3o2-><`k#K?YXWMxWemUv$o%K~_I zAxo5*1#5I={`8ZLYVcBF`Wb`q{4WgEv#Cu9sy7Rd{^NTYk2=xKiC`JA?FwM z0YVL3`8Ho z8DsN8eN?$L=|XjrOtDx0Xdmf~QaWw*0b^@R7!;OJPhtWNwpj)kqCTAT57#G%P`^aG z(xf-~b4mgjbg9tq3a&Vj9iE_ruf6L8hj(ai@#0vHmcViz&sglQ;(YOV4N}Aak@T^s zkOBd%Ge6NkT)&q6fVFO-Oa~&YwQ5QcSlfvKHOU&^Mra36r9?spX8hle4Ud441CvTh zS4pyS{U*Oe9_9A3J+R1G@F7@6t@-8grA!OaVH?4fY~~FG_igf1G^L);w97iN2Uf5d z)|EQ5S`3H)%gw$UAAU6_!#sYJ+Jy5|_EQ6?5^qCweE^?54@=V_e0$iJ!m14`8m3B{ z;| z)%_Mx8?IPrPO7=|M%qeU$jboX-=o7re?p$HR!duY^wx4Gi6B{5RHr}&iAMX{Ln6~d zD*)mF%fle|_B7T+FF`P3jLd~zPRw}I5MxOqrR9{l5I|YqO*E<`W><|;CdUfgDK02R zv_^Uo9Qsh{EJfdO`O|vkc1WnAp+%YvPHvq1nHbUhGTIUAtIP535u(%BMqO2HequH) zkbFc^R@tatcX+ABG(-ESBf&pyw-m;Q|g@kJ~D z?8>x%s04LGCpmRIoN#4;2uK_$1GEynV3v5x)tXnx8(L#5+PcLI#^K17=}-zpbuY9d zK2}_MC}UlsZ5|ASJW}PtJAvyb;`adt&OcfUK+j4UvzOi=f&^04d(%2b!#^`ca`p>~ zx%wEh2l(!zws8NHI|X)hN@XV1OEePS`#qFlo-V5wuW)&5>1&gI+fVp4VpM_ zLYJ@9%HK}>><9g6;<=1qeImU9b96TEU^$ouGVk_*;2%D%tQ(?U3ygb=5`f6kxAbEj0 zwA;Vy)dh6W_I17(c?yq(aCNd!VaS^bE-Pse7ew^@_-KtZEU!-&)OsaJO#0;z&@ot+ zrO`Ek!JxHMo+I^5mi#_}p~D1yb-KG?(;ox1qWIEn2 z_k0L_IY);Hcq$&bnG}itay6Zf^8_9cYlOZO3O#Q<)M%Ca^=bCeHnOvRcDY?Rg64}^`d;CCKM*g6oCb!ddQ&U zd02;{A3M49@aXFmpgO=m6ezIdAr(cFp)eAkm4;zikXt?D=eRe>$1A{cdcAhdAYzr} zIV(VToh4_A9uJLrbmC0#meWi;uvw%#FR*o7?NjPD~o5WSDA{rJ*(5C)J|fz z{z6B`z<*^pcu?Rb*H^vK6C)cS-W?*KbpVFkrm1i~abBC>Vh4{3v0fy zN%^Xh8w{m%m_g2Yh&x2ZDRtAYX#Xo22lj}jdlW*OdW%qOyxO;p^c=f5ao~3JLq2zW z)&KM6Q**@+7s6IUw;Ld!B*?vk>aTP;`5aLIGv0}Yb0)sxYeBl&`#Jx8Ti*8AM!%ZR ziSV@o$G06-swq*~7bSLr>bN@(B&kf_JILk;Dq)OssrP_tsgCLi?7*eiaWuQe!n~iB zbTE5wE3oTE+(C#PhlqoXkG1d7pHujLxSHf7#e6V_LR!K;Q7lJZpBF(xiMksRC;52> z#tR0MyoI-iydTb~{{h6g4>|$JtF4BCV{yBWqskvJt5MVVyt}YgUOj34MOxvA2=hzo zE#996WvcNZ^?MkkyW!+;>W7EtiTeDp{addsjcRcL_HqIi%Wj~joc-zg69JIwYse6T z`)->%4c=s1Y*(|(&d0)~Kf!JJ0gNH?Dl(yPn5lMWOQJw%RX3C0m1^PjQ+?8-y^-qb zb*{?twOMDUL*0pJsjtixi97Zue*)J}T}Hn2piA$17}YVHwmz1_Bt!2O;C z7V2pRWsZ8J_nz6^)pHxpc>~FhF7ePUgJb}{KAna8=25h4lAzsr(blX}#IsZqi z6G>Mqdc9QH2rNp&<{@!g25!$!|Jz&6DjJ_SQx~_Y3!8L;=N6nmM(jLRR^uN4 zGZMC5{T4Vg-qcE=(uTVh@VD91B)M00RLA$~gx@HR6jTD8P_Gu!%j-7Jm&2qdb4mv} z&{pXi*c-CvBwT-(7-PCPPf*27Ww_=&j@MlO{SBO#@pO_TMKiSJ(bLXu7u(SWV6)Ij_u-qmfwD-;RzbM;DsR|jD#{$0OYP|05L6mWh8;!$;Hl>K zbY|`~wVBa3lr=Ihtj$upA?L0&Sv>Cv0J2%4?ust)`7tAC33dQ?lIKf-^3LO%NnPtO zkS&^a-0&*{XNWcLgeyLPi@CrDlYx;Qb5FU=nwqHXKnf8%VVi-PE5cCz?pFe?CZ~(C zfA6^g&o_Ij*W+0)@;vf4L_b|K=jm0{b&E@14dv3!DUcNHbM&lpUI4MCJWvYPp$?PI z92UC+ntCr)V`EabC`F2~)hj&aZ&|Ucodg<#QueHDDVFSAVyomm#b)rmM#_Y7`T?I^jd+*k^FH_Bt1e!(T=_F>wb7@Mx*kiQdMU z`LbaznYr;^zzpYMuJ-5FIj@?bMDpt&3WiM7Tro1;D`tYYX)SWoI)>p*YC#RA_*3ga z?P%JO1aEuV7|^U`TSwwaf_AQHoo|R}ru-Z4e*YMw?@6rKZEkJvCT}LnDSpfkw|ieI zGNUfvgFdzJNBE`18Fp`bB4p@$WYX;_+YX8Zsc=uOc2QcvFFQSUn>3{f;W;f`oaap} zbi?L#f1it&IUmmXP17ZZ&(ED)%BhkNK2WVcyz0Y+aBfX?Zm`5l49Kofm?k2^Coshe z%6MnlcWqHRPsOQM*FFhufsqGOXjjaLS1iU8zyXTnNX?&4;$*b}23g12c_cE3jep%w z1OBF4cKq40?mlAzfXKoyUpjl1mL)r z+#!MIkjD?nArYFE6iBjI5u(Ujl}alY&@#yX(`1TY*28BI|EKdL5dM_v?O1KCg{ix< zK12NUjNT|mnL&%35XX;PVumrl`mlL>@nrF0|1M#j0Sw?sIN127lzi0M-(VgC66N|r zi8+3HzDCTEhmoA%`Xcx)_Bkbr*>8EJWbIxp(|Ihd@`{&mP$DPWLm}&cCa{|N@ zl!y(b#l%xm!oxgLd;I+i7TsE%^5%zs0h@*KCq0ZYQ@Pb>J~`*@hSiTEg)hpGqR?-* z?csh%6;?**Ekg&2HVf-`GE41O*2nk;?LF}W$UF~2>@cL-7&j0iLhPsT%kqKIi##~6 zviD=j>aU(u3pLMQPTX}&RBWGua%k@3LwG(ev){))3ASe(pZHS^+R-KrX|LwY;Qpa? zZ>rJZleJ4l8i`hgkwD$&*Mh>E_Ji_a(a=x2#Nk4e)~}b3U!AcVq>&qT=+B57T$=8m z#bH7h`bz_Ycld6Nh8-gW=;Hhwe~Cu4#!J(AKb&kK8ylqSGlEa4Rqz@821sr91J`8s z94}vK!;No^#FxfmdUTNCkV<>js!O(TDZt&F(+-2Oa)qv3-rpp;EgxReq_&f=10?#< zZ?QxMO`VZYB+;h$ge{hH@7f)C??g5B@~W{ShJ3A;ddypZmEODkNr&Tu|BTQ+*T}wz zz`dLdc9kaxxx3r$8(2(k$iy0oMGMWrcqh=KCpiS{A>j+&{z+2wBx~`!GQPQg@?Fqn zmR7BPN!i!2=kk@2&epSLWFFr^RyiT#-ErK9he;eTfwfky%c2?7CQ<%dka&+}Sl%a1jtYfQ*-3bI#ln>6qDd*h}tg(Ed&=O${sq!Ve zN;@?lw%emKP4zA% z(D1mWy)*c#?zi0UbRFshDacA?(@cv)7axTA8S|;+hjwU$<0p74Qg|o|XCRzWT{_*o zIi6Q{J4zC8Q4HOh9t?xYZq$Ygslf`ovUGkI()W`WMf&x*VtS&tHAKPAlB1E)CEzQ& zYBw$|cB+0dpnC6<=Q)J`93St!svfspJiX-Ym*TMU@SZ3d)_h6$XELN@F`;fM=hJiN z6H>pt2?_DT4+cVwm3y81@hL3~qwVjBnLUxgXA>CL?ww|1{iihv>o-vRQw{Zt(8tg_ z*XPWC0Js3mQx6g9o9}HqRLj@oUdOkTQzV1At{9X9e=`!eka>*x|LjK#R=-}{&L#M1 z-ja>*?y%qbgEQYUO6hZS@biO`TbOMBVrU&}62k)A>J4A8-j%>gFR%ebLs}8f(#g%v z#k*W7+6^w<+mu>HEGFn*DDT&f%(NCP{K)cw6UJc)^pmRFhLds+pzX~$X)kS3%Bf0= zrV#<$(2He}r~5n6(tJzFzhJrIZ7YF~G{T5{UyPW81rT=zfvH2)=|G6+hx=>%q2mi} zVrVOCG$F8jZ(GTp^$+Ltf-Rw4B1jRF96w=J0_h9;xXJLV>Gc!zm1wYNOW=@E;s$kB zV(9e-(gl~Kf9Vdk!j!>qkd9^+2PW#On#MbE@INW+20Mt8pZt%7PMw^gTZPqUII(ROV=}Y>BYOOcj-Vm>i6T) zPP|yb10ybh#3Bd~C;!NPX8TK~B~)D@_JZDdcm}~GbP*cm^x7AApp;QZDxf#{qQB|+a^^3Ja$OyK)F9N*t%E+~+p*z)SYYw)-AWAGa=N!zzKO51E_3%hAaY1S zP_SX2R_e;=>|bD_>*&n=BtD|w;)+2c@Fn|Q(%?eQ`PU2bge`!Erk~2Lel4n#&nTc* zKxC-LVRL@Yz4*0!}V8RM{=|!NC!U zOh%?FDEe0!>NxlEF(};(C%G-(dGG{fOJhr8B0|xZQGdS5!Sr4Bt}Y+g&sIa>aP{ z7af>N@DI+xF)FfJ!{(e({3*TA_98y&&sG2r{kEB4q41xFpJ=s0y5o=0&LICv!a%hn3EWC3} z2sTH5P9e`==$!aN>5|C&Q#dn0NotY#W_P0gttQhIowQy^zqgoOYTgHD44{$w;f$g+ zh7D})!pove!cZe%QG`>|c#elcm!rf(3FAmcJvJT^I|J6$#xaiw72zxt6-5q$F@707wTwY8mKPO-g;Ic5uX-L2q$>>uLRx*<*Yd-t0bnlv}wTj#pN z>n(p>^Rpo{d4;*fJYg=aK#?h<2yI-6J)zY?f{H=dzmia8n#xd`Gv_sub2m-HgYEjR z?MMCdCSRRts|R5gnI%hLXtI6*tcQwY(O^^*YPy0ARq>&Qrsv z50(UHHqL^*W9#v-{n(@zSjJ|KiKVWm!d$I1z-h=C|Mbh1Zofr{1uL!GC=j z85J*_mLUCM#bIWAm9N{u;CPHJ+41%e?e-aRCb1({elP=TW9tD~-lc;!FO23o%#PdS zWWVJAdm~+*&*4H7OL5QkNli2YGUA~#3iv{Tt`i7Cu&06A*^t4A3;A+XC~r_>0Yg2k zKTX@W%=o1r^;d!~AdW&39U@008)|<4#vA38K8^drg zFLvmiNu$5{rgD=Kd6bO(o1)M{3z@`w-smcF(PE}q+;qLfF|4rRV(PM$&(mFO< zzNloncnd8zV^YGCTKrnCAW)YUFei$=dNQ(KmC~d1Jwbf2EzRUbUCpH!Lgs| zCT%DqyL8LbS_t+zLyX=-2Fl)yb4&qRf>7_Qem;b!MV$whm}i))*@_$r<4*9=z)J9qrAn7RtjYk~em#g*smGj0qq zLj*HY@+&~0EfG$> zRfL>O)x-#@x#Y+W4kc*1D#ujR0tzcrE#CLuLxnAf>qv~c)TB9R9G zq&Tdvbx*;+zT<_0rTYIAXFeCBEwslmnfjh{UMje9Oi=)%&>rnNPcGDx9Wv$6(q}pF zS&@rL59iKZLXuqGo0Jo&qfiSo`(N_)T_fyAVyjix*(dA31_ z%crSB$Br_qsV8gbiUdX6i6t#a2MTv^uQ2oGAJAja7E#K!mEgVwW9bo8Sv~CRVa9u) zkd=bJH99T6-RwkG@teGXUd{}w=}w6s`<*B=lidK&MC&3xPFdWQ!PB9@Cl8;Se-R=r zETgM($uF4q0ctOwq3}|ZL+RYkv;Pulm$MY}heXW3Yd?$Y9RCZLwpanL>7MX}BQ9e* zxSi~WW-{y{P65BhcP)l2zNe}HX`ijWXl(S-#xI#g%(JfFWto!0Tg1XAw8sF2^0nMO z?F}_d?|LCRUPl@SCzn|X`r|_(>ouKvx;fI@(p$s&ucdyKjbksq%LJD6uE;-5PGH!c zuQX>?H|tX53bbMD!wrK@%DFwB7Le|Eoo?NX2dg7iu;{(DC~r;T1YlpY--^h#$NmEt z)(SD25=HgIQjQXl5wx^FT|GN=&7sp3$61K~2T1&KVU%Yn#IBCwljZIe zAaem099?qufJBWgcZ)@aYy4d${`Sa~ZdnD8H?}Xeytk|i*KR3rVb+NcjqX(=2YC~t zqv^JmodX40({v;91@!fP6cwj%c50K|FIH5WtxiZgpk7EP_NK(87fgdyK1u`k^1J#Y zJ{J#JgTJy=m-p1%y?96ab{jLsA!&(TP>3l{p7>NHLd0QkQ#wtAw|#m1VG02uJ;qWs zW6{Rsz#Re=3~5Av{offs{&%U*VKUS{jzQ;|7egc?#Fsjqu(^uCuXij8#1l_Z=)b%B zpI?6Z-q}9xVBWHnG8%JgdD)dO`#c-^z7^&`y;-ANI@h-gETvO?~1+C)Y(O(|jjI zQD>A}3LddQk~Bo1jTo%w2lZ==9C<6*?I+}EEUZ7jw~Gpw=9)GTzu34s^UNUdbSRa|;me$U2 zfz5KHMczcrtuvEo+k;rDH&`Gr6G<)l{sa2Ah0_1yW+_t`N@8Z{|jjkK0N96y2~#cFfG zgr9|Y!~T-MrrXTuD`OFlWG>0eH^Cn|;wd|t)6K@RItX{8a$-uWWObFq&a}fs8g9_P zNOwZdj3!va{45W4Bj1)psBVDup2g>Edta|;tfp8b?VJ!$!-C}7#K+`6@Rs}c%0xt$ z0`S`->|3;J{fwAMzvn!-d_mYkur_H&x|0jx0S`mZ3TPb>8w{+E@{&Eqa6Tocw$*LL zf_w4CKd>VB+F6H;1L}dqDGI;4GQni(^MQN6$Y4djcsVNPj^*%_TJHA~>QgJ?hT2fc zwyIcYq^Lq&-IZv#T#@hpap{gWg+LT~*>w#e_||A4gBs-nG#ndG-M&({k+QCy^s@Bm zlYi`d#5Zt&OGdH_qw>WP+H4w z{w2Bz&Y-F2C*v#_n?Vz>%a)8W#0BXP1ku5q#9Ry>+`VxJHs377qyzn57=rC$a+WUsUG)vIcn$GW+Wy+Xt}NzhRENByJ=r`$ zTe~EAJkq6!W3QSKAdXLd$M{V3`Ss)0^l&$^N7%Xa<#Y5zn>BD0Wluq;&J> zEQ?bI!5jK$&5&XBTQTzvsZA7nsf<;e3suXb8VBvYIZdNEq#qPwi3r&Sa9DE%B75gp zq#!Xhg+@CF1m6P?^gYO;>tPeNsUV}+ST$++g^_KFQ%QBeGxil;)43n5_sGN9P6igR zEAt<#yP~H`uFh&*X>WIaEN*(N{n5>rJv1>kSQ_#(1)~yUr?iHLpcwNqfbfe2?YL^jqmZsFa0w@0qPsUT%>SUu)i&m@U7)yAUJW>Q9GM_=THdO+c z+m$Y&h-ee5`4bDgP;CcHyBhnKTKmYYqsmU~T$R@-w!>0{b1dix;eeL0k6~jGOU83= z(RhhWm19ih?FqW?CLDnbf2WGFs+}l0G&<4)egCj*5hwb%%2EBg9{?gd!JoaU2{(GL zNVqm9mAHwtG_!!D`1e4i-POo$KTn)-P)3F$mdhKB_$mh;_VzxdW6ORPC}+%dJi_CP zJaT5mk%}wf{|p|xD}ixQKz8zOW;3WxEqD-Tn#~PC=tV*qSdlJfKIG8b7}onaAY=;P zIH%i6oH~a%LQZP;1L={NOlj#0dt*x%%ZYK)$G^;DQB`fwp9KRT{!ryzX4Zmm<>SRf zt>9*?0SA@Wr7B)Z?1YVit8wM`Ox)WmDyGt=@}(n@@Dd+63!l*2A`4S-pzv{_oBSHXu>HwGtB0g7F}to# z^rEqJY8;L8ESiNqL7T;z{?@cv!n6V*3qfh>36M0#3)HO$$v0_xNV-t6rXF`pr3mbM zjaaUqJqgIkl@4bjX5Vp0Sgu*d%tv^e_7bMgw0M|UJ=NWnwD$@8Rn%S?y<90DmujB{ zJIO+d30HKjIY%hBW=PlB<4^u!T>CCpy2p#iQQaSUdO|!)BOaf`z{d6z5Vfyo9JoU@ zwZq@`_uJN_(~qOaibpJL>G29p>Pc-g`}*nt!U_Bv!wmADlcv<+r>M#}_rjNyD=WYb zNzf&Angek{YzwF_;YE{c_a^l0$_GkhDcV{N=jyHCjH+hZH zQ)wq|;Zs&e&`e412S0!che^Q(PN1Dddn>pbsd5@NFL%4EyE*;mwGGTIl zmU5Oo-LPlHW)5~cXYxRUVK_b#mEjb9pdPOfgta6;aCl$k>4}@9YtMucMfFYEzY|oU z+dHM}H^IO?hMux1-!Wn5S#|$<;Wj~`%wUuqnZAUkHTO_~t>8_$ImOV0j*s(Mz7l&S z&NE?C73k4(zXVa_5j`_fB#B1b#YVw3-}esJE6}}HtYK{SRp7_%K5?W8e)lAhS2UU; zPAkctT#V@V$J~wLX5emQZE!eg10x)?*0TNs6zj;I#wQOTBQQVI*d;p{T0IF32|<;7 zyY#nSX>a#ruZcQ>a@y>yUi~{uu9HD|i>i}G>@RFR=1d8*m^Dc9FAGk2GU(^Yg>Glu z=B2M2FWG2*@Rg?hwb29|hcuW?O=Rx@I~6yDfcAX>>z;V;L#~#v?-#>B}v*CYEvbAGFY?HR)|CLcv^MK|5!;$_(JksY2=XoQp0$ZXK$%|a(I?^ z4Dshp=_LoxfeCTJINA+1f@#Me#+-qg}AOt?L1V)gM+^3XJ7K4)8KakTw0M&o1 zA8fxKenXys^+oaZ^2*~hL?Rh@CC&uBLSRatDg(NPfdvLEbnrR0TvE`Bu+9F@o8P27%LKea;aT^?!bGAIgDKqvtMHR5E^hN(=Y$|dr*!E2NGvy3Z!~5;$CH||Kty_d;1ti zOMX$!)@ElAaB`iszN^zP&53o25#j`lT}KEjx08Vv80y#d`GD1!-}rd&*~0> zN&E=%;ex)X0>Kvsd6+%HDQZZl_+J(}dw+cEW+(=r0_ti6mu~Ot_sT&Q|5r?6t9R^y z%wPol7jcDeU+d9@t55HCLk!Dj1s@0&V$@v4U=!e}pGXuv-{NV9t8rJ_dDReV54`ao z5IGGp!uR}@Rn}d73ckbVnK>9=of&4Ay_hU28Vih$m=XXjhztMpY$=%|I;PvqZr~#} zd-_oE4(h^=#$spU&7yAsJPP-rx)>|$)V-sijt(A$jfw2 z;xKM?f`aMHZfuC12ee3#Bs{f)8IT&>`EN{Qxnx{1dEIrRU&M+gjU4l0B$71M<1|t!hR9wI% zssfLmsFEviO?G;L6D4Z^`gMtCWdcO7ymn+tHuM=t*d3<^P{Y1YNJqBKQmuoZ4%>UP zs26V0xmVsLx!~07##Je)sX3p|~Nh$E*6xpWRD z{<^ej%{@n#o^Y$WL>1SaFW5hfv)@nPqpF3LvM%ns=au0o2?&?8d^pG_ZlsFZPV4X2 z&A@QFm$jgp-00>Toby{W3s3X@8^05}Qg=SY`G_zWT^#0Z&pOrVCA6S>CRZlA>MS7m z^BX4C*!+DC>~!jErLzNt=BPqm(pb6Vl`hkGjLS%7(TEH?qQKVg%o%g!1)Br{q&zc^ z%9PD32B8UJ7MEZ>hcB1T>K_S{dk)_WWCu}Fl~VW?ut3m_GUBq$LOp*C((co1e;aid zh(C+!N}`O+g14z6&RRW^U!H3gzrKD>cAK!26Q_t@q}ZCP38s71ZW!bROd=6m~84l^vqOx4Nr)NGKct@Oiwf{y=Wq2_!g?)mQ=F~vN&{xy~3 z{>49)vubSIXqP%%g0BpE=ptDKr&aHvlmEVHFC>!D>-{m<29&jd1 zlYCv*G!+!(7bfSqS3Zs&CS2C}h?LcJc_&aF``615j`q?`6Qoo;V(3jgGoMwB{eFkV z1t*l%0GEU=*4#x|J+GaJejRexJ=++lBz+ZS@wa8vSlnMajyaY-2AKPhl=o_8N;fxk zgdFRE09SGCJEHNZ^w)gtcy2ztVLi?VWRcHjJUP2o?LDcyxV5Q7$%`a^d&drQ-Z=-! zFt|jCHwCSyjk`n1tXeXoP3ijI<$c)>PhA-Ibky{s_VIAbtzSDGyJY5!M8W8 zU@W4cF6Cjy5B1TvLt8_vmK=JwcoFtv{jIvI>6}p5%~rr>lr!kW!*q1$txZ5hiE&}- zC6aAWH658-x|pT#c=O1`w229oqr}#PzMSU6b{AxBtqaWmefIRGlx~lbpI;(}jC(_# z@U1uwRzpe!s;UA2;KRA2yNbX0_vE$;Mq-Wo6om3e7z7w6uyZhxqyE5yKEltNBDgvQJ~^O1 zEAIpp^zKB)j`Ghi;bN@`Ktf>5!prC0@k<_m4HQZ861ls^{}50cG4HS`t(e1xG~!dk zk&|-xEHSPmtFDEKDPRJiQ{?uLwaz>*tKVU5MMMf%3H_{FIv4zes!}$r!?IZFCr&ur zto9hsBJ6IYIgRFZM!iF)*k=0}JnqhSi(XNo8AeRc-x-h$KQQh@2OZZ}4+_V+-(!sc z*5glYeg-zYDc-m$oQxb$FHFZ#qif6K<8(I1k-1)TgChR}-4onJcv)1+{juEgN=Uz# z=1DeOVs3!{v$~RDQd~`ZCQx$+<0dgu2x&C%p$Pb z$~9Q{m!WKnR~lv!m4v3;yZY>7BpY6!B|r?X8iXQxM^sZ+e`91mFs>LW!-TE4Zg zMiDx9>8Vq#Z;en%xmq&?(WlUc1#pj#R#5Yn%GaB8Lz?M zo&Nh7j%)&&;DHYb2sMhCWfVnFIh^^*zF4~`h8r;|h7AB61gH)|^!Ls#bB~|4D37Z+ zw(s`eBzg7FiWnFiLV?Bka7RZ*)J@Pa4dKbLK!`$uz|a8h)NhEdL8Aw zz9Azty$)*Zab&DZC&i?~@h-+}?#|pe4DwgGrtC{jEgz+pX1-{{iLnJlFa8c{`+chw z9h8i+QA}t;{h|y>db}kITjKqJ?^A^p%!Hf-90>ir^a5AmZH`DXuNpi;%bTY<0 z^iME{q)Q_~!b1u~(BwqQ52cB-GFG(io+KWYP>|+vAl^#bd~S8J6M7=|uqhLo%9pd8 zUc^b}6~mm228co8_j@sHEbSOy zqS)+OOLVC_^xLCxv|nd2{qEM5Zpj~|GINvHSDJ?2<;FN*DdX&(hS1j(0mODUOfPe9 zsBvR{Ml$#qfJNR);~aBWAHM0)ocOsgtX|HyB3h|b3vY7dqd@tQ@at=AZ{Ya6EWhbc zfH!~3Mri0NCXnGs~D#2%8FBcE<=S0jsj`<;)O=t@nUv(>&?#^ zBuD7rMBtSMb1lnW{_2k?4ut}NJ3;Vz<>d}YFYB^3YaG+T1S36J+YO#L#0tZK1W$g$ zm^>{N1n(?tTkH`kp=U3!@pn?c7>fh>a&vA*{-;|6S@ZAL?9FT?r~XKShgxr@9k=26 z>^6*6*Elo>DO2GY)5iR#84hLIyKidye?ShL-^PBglrFcWSL=L>SOd$6hcUN65M6@$!$5uF3T^;K&%01&Sn z@^B#95t#2}*|Ov`wX{iBO>e?|>5h9df@mx{QQ;u%geIf$ zWS^_GJd&Fxm^-070SAaNsxVVCb`+0WvynqCUwTvFjCm-#HmKid;ySW@*BWpno#PTy;Y{; zq*TtmV%pxkZR-?A64}%i`KjXJ!q1u#oBdnzIEBkh@Mh%*|H_z>)zW5ob_dWj_0=M0 zi(kiNg&x`Bkn9f|TC3U;rfn3DgsMS@yP6!M9)~_`q)mm|p^zIm-g5K*0pCC%zeJX| zEproNdf=RH3dg=HP%-qz78fJUfaLtGv#&ktQ&qCH5v|3kZR&7&IdslEHmCYk_mPQ)a6e@Kpba_Rz>jj+B}jrQ==S#&3yj=<%;@0#*$q_;X9SNiv~;* z`4~8_oIK%PzFSd7mAubC6Y(Ga(ETsSG^897US1^$y@nX=FjR+2~z&VZGSw!LbJz% zN`Q`)EW-@L(XmZjb@`tSSF{Mh#3;P&;V`7z4M@+f_ucOy9ZdQ)e$XNPHaZ(o0{Iu)PFz{F&K zdz$D<#~Xnph*UE|pt<#|8_hQI>wVNw76=CLImf+pQEycOW>VguV1GL4yewmRr;p55 z3<2BJ@t^UfQn#~@Ix?ODIKSM;@jLI9#%%G3q#yX{uMoi`C>b^OM~gfW9rdizX-l$a z1Q`DSgy;EJm00{aveG7x`%YB3yCfZjepDyhKi0j>w>6-tyY77Te2WtaaNvuX^A}^c9xGyRBX}jt5mBZ~(?DCFVb4xmf{lBYflr z94S5O%xfY}0h7!v#z`6csc+*y+1G9c3dH%q>%po^O-!eA%sfXT{{U-ADh*N6glmQe=hj+cEgl zS{zO;?dE%V$4MjMxa0%sJ*qi_2?D9Xa7g+Jt~}gfj%#mbxpgpgPVY+bh08iw>rD(& zF$$B9mN`E@6}`F|?iE*YR~+{h;p?%{qkJ;3!Od!EdfoPdg#dfoG0&DxW{eJgMn8Ce zvVQ|!P9p~QiaHf^maO&lh%Q2K4>b&y%2bj$to<`yu+<_H*vaOn0G@;&LJvdrtCHQn zo8^d`Z}+puon%ZZcDOPtxk2#G`%K85Ap)pz5eYlfpdGHVN zs&^Lu0P82qHqb~J$owi}b-m51#I6ob?Z9NOYGOp+l?a01}}06NZ21x{j&NUi|el1TNZZi&msyD}gd9+~Ny#Z|b9ea~`|Wc|Q4;C>|LoJ4Ir z!Jo^E;-HrD3u7Et2j-CAW}sJBmCQ$!#(x^CHjbA~EYeEh-3p8z21f_igH;vp zY9`C~OtB~c3?Is<&GxHlf?5P1@>u=cnx`4Rf;epBCUigpO8k?G(tDTriHHq@f(R$_ zt$A!F%t-8YDQ1S|>kvo_1Ld8|f=&-U*sUXGAh(ivq-WZ>IXq&dg|D2m+-(s8Jg`3T z80u=&@W*v(Rz=*Ws67oOQS5}G%%xZ-jya<_Vb7M|GT?h2pIWGu~_@W(W{Iem)bXwOX6z0IVS^2sDowB|vHHy^qO z0~F&M0(T&}xq{hXk~lFW{BM*DFRx6Rs|;6IT8v6-MeCX1`e^k8{Q|^Ku8_S^9MC zV&-eN-7avd&GK#aEu0F_VzR>5R}rerRE)1VQ`4tf0D1dp5?@_`96%EoQ^5z;tJz+k3)?S-wymAq?o?9Sx(GR>hN{43B7b!uJxpwIo_~b=-&*|FGsrPGOUl=Z>VC_S@lG2b6&XHR_C1V%MhMM( z$@@Y6%Krcjzi3-H4~Y_8Ggh+RVAVA6vD|#&0TSg%nLv#|51peSag$$7jzs`sysQ>7 zz80)&;pXEdvq#Y4@RV^hof`4HUqj&^+0*tI_#GEpM3Sc&vSAh{esyPx{lbo&zt#;HfG&XxSNhqw0yLUN(-9 zz3_rVYPST4T79R>5*7J^7RKT^#eX*c0JWEjzCP-o6K^zsj!gDfkOXUeI@U-2%kmd; z$1SoPE2|ykNJjE=oMQv}Tdyeit?+xmUl}|#H--FnrbmBj?y|=Fo++W4JneYpjI4-8 za6n)f5~m=Leclk_8aYlc+Sa6|o|pSk^H=`7(c|$xOCyV1%F>XkSeyVd zPo;i@e$8L7cZ_~3iys*LMrk}d;a4%w6aa3#)Zle<`?9Tw6FUy(1(`{}QUR~Kz6O86 zKfWFOCGkDai#$(%{{RV<+*2cI33qHEg2hnhb4Da|nqqN>U;^M3P)RlV2M|<28jJ-1 z06Jp6>xukMbBERPi>vQ9`6c?l_$SF^Tn5usb1nY>m2LB958ywCUJ>|h;p<-vcrQ$Y zOt7?yID*80NiUYWL(JfAmP6H@$t03{*Kh_p)Rc!E>+#B!B{{m1cW-m_`f!CfsKHqw zy$y1{F7daAJQJmzS6{qHBf;|1Ig(T8ia9@CgSoFU_@nXLK=3JjCrjI@>s*f^3G(72 z{(3M62mO)Ht}Ee9XI}9Ki}fjWe-zwFbu5SXXzjToUqoJtJ%%gX$}$Sr*(Wt)iaKGtBWA+KzRle=dN$VwClyrm)K3m1csHvI?&>%C4~I4P1<+;!7`;js`i;*EJ(X4?V>L zDakZe@ELC80S8jU*XdQRypcQ+#LA5MV59he>(kRUa^X|SZ!m69IUcngv{vgHy2b}} z_RR!IyAa*SzS-1eH{&IEJbQ}77V5+{_T0VmpU%1~w^0$UL+w(@axHB@Wnj$H1C>(0 z!}YD&D~;-MPqsQ%n+?MRaC(9t zf*6--E^r6A$*Dw`O?CDKl35Z$WB{BFL9IBZBFS!27A+#2f(GH5wD83$uwV}uAoExz zOQ^0@T4YOpimm&z_=Y*4M2fS>%G)v|{#kX$USf5c^=|IVjp)ssay(>?20mzskHfm#%5w!@zmp9nfY}-%bsfFm+5{M8)(yUOidy#0r$l}WY{W^NXgUnol^Z2 zUGI%oK3r~h1_!f_cYZZ`W}apbD#sTeaxsOgF~@Ic0)@%-?@XQHb;2BzU393WxyxF! zdli8rs}=*Gz~YF@d1pO&q$Dl|I?@&cA23|{*IZG=msT*y!45$+_CLd{zk+naI3)@F zYvba8uNC$OfP+Wiy)$q3KdpTK021BMg}UB2;5(lWRU%htIO{12juc-q{g}kTWo}Tp9Ko|h0 z>N-`N4&Z)NwYoB82;XsF++gCMnI(X{>foqfxKwrZ&sw)68Feb#26BDsmDiZ<y9Y6{PAI_uuI$Ufr z3>5i_o<>D#rO?d9vWghNmnBPdR8T4RlDa`Acf@h>a0xZ7G*eB$jUrX(;{>0;bgpjh z2L~-FWgSU9D&x76MSBShA2q@}W>5J2h(326qyhLEQf%7hGdj-5*62)y zfp!?c9+>v4h%Ln5=~DSgf=SLeA4-<%O|+IQ+epZ9!wxg=P-&J3k<0z1C!Pa*xg3T5 zb)8Lg6WrrGZ#em1aV!mbHAEkX3 zUCOAdo^~ox=hXkv{V@wqrNN^Hzg#~adLxbTx-)1`kNN7K&b+48g5b9SPI<3h@q|WA z6}kTama+c;I`T`HcBoqXPmN#vD15(Ge^2;Hra-^PFVLqsS8~hHoYZ*jND!gsrGYco zn)vhd$vp`oKx);!v&k;h86@_rF(%%6dRIqi@vLgFM7z73;L}R($2%c{&N#}zpcxyI zed_!c-dx>-dVMOgGBw#kuC70gPamCV!33!Lq>#wD7q40j(EsAd!+Y z(yk(fPzUqPRJ6-mjAZfOtzH1)zoBq-Er+DPy1yg%yEgv-f3N;2er!V9Mo&RacUFsV zVYP<_m4gAm$@HLkj+k}YLEr{Gah&WIwMZR8(%>--}OO3Yms%?LLz_3mq! zl!%vc1dnR!{4C1SNXy1nvc*5#vwQ?^h)nGAYk=0RVS@9_Rr%kNPpx;=!+3^I^x&B2 zqn}#f!#hhu(9C6ZW^?mKs>a~39ASa)>48d|uJt$rs3NM|sghB+3=g_oe4zT(wgutZ zt^;EmvDe=f>q%VjTTG`k5lb)(bG&puf~#J`Z4^lo@PB}I_p3tA-U!6ka_%Y7(Y`tRgqQXC5R-9oQxkz6BVpewvCqK<;Me_27fxScvgKn?PL4iRDh3s8goq`+?LV# zj;914lzw$yNo89rzdkV*A#wtoj+ES6FmIbf#Jp4*T}oIAupwjoa0O6x@?zbbU;)4x z2kTlMD%8qq(uso{B5cC@de=1#)Rzd&fTKKyM|Qk^!qi!s^B@o%6pVO>&Vng7~0IoB=j4o^6n+V%2&$=Ay3{1*!HX|;MCErELu*? z`cHs|9crB5g;!-K4lo8sKBpbMtIBkb7F=meEphX{Ezq~#BSFBpzS?X{k zmXmLfA--1YkEd$+iuh`jr1`cz7*&MyMr`_8G^SRNg2Onf62#G~1svmw=;j?U{zXM9 z`G~5k!8knQ0+OAOI9;67Y)4)x@<@s@><1aEH`;q9)``Adz@KWTD#FqR7%VGTG)F73 zj|BTJ2@#i#ixDO`8LdkjZz*MEa>7>KgV22{Pc}v25E1!MlTOkUC{?U%})<{%h{+k#X8Fgd~gE`KVuhfX_HW|1akxP+2d<7W*;XR=rU>3-Py{G6G&960nZse-nD5n3X$r}JIQV>ZzKNA zw@9KzJEhtP$@Cx|YfXGX8blRQo8|#cdQ@S+_=rF`V_??^i9J-fhb~ zQHD%pADf@ZRSG6l7rW+bksBn1ette$A-iKMq{{ek2yLJ)e!Wd-$7t^I`NT37-4+Mk z1EBm3Az^bPgA=yV&gJ0Zg3R)5Mcc=yz``Pc2^f5Y_Q9)?Ng23~-BhZKAE@=FCZhKv z>|)i>E(zScW7@5Q0a>MxmT0g(RQZ7V=B3KS*^RrI;GP#>xW^SFi#+lN6Nr#=gWjRI z5vzH!hHeLzr>0l9AI_;qdo9$WDb$4rjBr_f$6-x7kfK-^^IB94O)^#&7BCnLBg+MkH1NqgawTerK_6Xv(0GTj0u_vYv^P`N5h|u*E(9org+NXuPoeziueL#g!)bW z<^KR^ay!@6_Bw^0qi-GEjl_2l$^#^kFlAtU#eQi^6}0ONr}u(BV>#d-U#)hY5b>9Y zJQZ-48Z4$uc0awck&lw&uNeD{?mryY!RFj9la)*_+`h_ZysTDXorap=& zq-ZxB0ZIlb3t-~B`^VoCJU8(7O#5!HF8SmixwMS_`a|^o5$=Bs)kZRtQI6!)oM5@8 z?2g|4`qNCbmj3=_g5DL~B#v;Zqq#NlAH_e~67O4@PY-wkSuL&9;!Qyg*N}G!k;0!| z52bkzj{YThzv4yXKBaLbrkbu=8yJaL!2TWM_{i&%o@k$` zes`X6cQSNveL9bZ)m7T!OuWyM&c`f%rmRw_z~+*^_MI3~rm4jrpH{6pvvZrf3Mitq zMnOdsQvj%BxQZuO+~J(y3Q3-HWD!OkN%@IvWALXi5BG%vY@7@n(r--TJk&;AtP=#2 zQ7XqSW`%}N1gXzI<3P#XmA+bqm9VOSZST(`(-mEW(K3d0C5{GosNOwG?F%GwqQD2v zjJZEhXd+h|GEAu=$W@5>hSSk~>n<4JjW$36u1g$r+L|~fZK>dCQEG%xa9Tptzyh48}4fYxF8U7=~>bN6q}lxZyYW2z`ILz%`cH8 zs_%9J?9R{*M;#4x#x_@GaZ2DMYldOBXpOK2dht`n)4H-qo?bd1TvKcTk}}PMlfeTO zAWfML#e;TZky*EK%3VmERiN1 zK|ju?xn_zpCc?_2Bq`_c6u}qn4YI2=ZS#4Dmj}}n2+z~48xV8TNvY;eo4y`1#)^snw_=v#P`t8_FJ~hOR)Ki<^KSC*!Hc-WWCm{ zx7njuO0w-N$+7m1xIJ)a0>{{9cArpiB#sFiWNt9LcLt=ezIm;@;cRWj{kwmt_N_)+ zZ8t`ErD|3o-mgXxx98GUGmLCEt+-Dy3=+&g7 z9S%BG9OZUsDm<*^_Lcq;4N#d<5=cLVT887C1==%#ny-CqUO?oK2Vikri*vQ?LL}Y+ zBY{p4nHW9+0;2#R0dpRq$erMS1E)r6kqHUyh>_tR}dj9ZWiXsoV zhCEe+9&w8J*)@9~RFX3fc}1ki z*(Y(+f-Bu-^8*u(1%6`w*31vZaoqm^XWRb(TI<3=RF@&oh?gXJ&(hbpj{4Cqtyo+_ z<8m0=h6kYp8m${i6ktA3u|Ff!{PnRHsBctiIi*eJ07>OZrCDV?dR_m{#B641Es`i z`{KFWJ1G@zEBiZkEYcRp{`OR2nAYk?+hJgWPFYoQSXVmH>NeKLKj%)&W4A>ZUT}af zKc;I=Bx_q(((uO9uscGG;2&Pq4aB$Y8X_jf8 z$DkGXryFURwb?J!{-E%pHpnCXDSm}nV@Qi40&siPt0hKUN-_z+$;C!unV7L&Pinlu zLCSM(MNJlJ|~`>XOljdPoQezm`vUz-w& zegzm6-5n1wXzR~4()ege=1yf$I2#D`@1N4S%nIr}K2Mn^`=YV0{3#t+NZPboI-2v} z7F|OW#K;@uPCyyXYtXCDyia8;*YMm4nHD}mso}o0&4stL)alF-!-uks^ApGz{KtNF z$ioi~>14)@rTAmBV>1HcY^gAi>BO z&%YH3^(n0hqY}KUcCp6DZ8$!Z z!+SI<@`c*t10&Y6d$R;(BRG+=LRQNIG zMcQPLg>o`;^vzP13z?K&#=j}*eKA%bEZ}aFcc(m7vP+?wsO_mFkh5)mcISc(E2Amg z12?(FK^%&iV5PIu2Li0cHOnCl2rJ!}s1*#QC2OI=Hn~sU6;&J()jf>&Iwi{7X-+vD z#z_d@p~v7WFkhgX?AuP<<8k$?-gI!}lq}2BoC?Zxqa|r0YE`WsW7ceZSEp(wIER@W zq<-;aUAtS`laPBF=uTJ-roMf-noEe*Sc^p>467$le!i93cyq>z9c=ZYBF+^h{v7-I zX1pw3TGq7D-%A*!qdibW9X7Wh)+D-Gt))D@SqhRnQxfDq@G!@y#ZqD{w8(@bbu}zVm>?*@_Np?gOi-p2 zf-{kxD&Eq_W6m-$!KMXiCP?=futJ}luU;xEdy(fcEL(p?$4Yz}Y*wnmA%f>D$C6LE zrdzNQJ-$9={w=s4oggY*8{3&?haOaFP{hXsI6rtE^K-+^(oz`I_2n#S6 zIO72K{3@lbsEXVq-|0mdGLzJi$Ujk5W;scKvqf@HCoZbF1L{s{oY6--e`vH{v`9`F z!6QGJz^h7-f-*hLRhA2h z<5W0833p@L9jZC7Pwt}##ertV4QSm<1Qwf|l~Or9>t(T40WwNKGI7FMYXbqvIF;I@J4FXY=EnCr@onPBrlTZZuKCs zW8Vjgw{EwRHt?vir^}PeFvot`t9P)8AqH+zKbsvoRZC0R1TJQS$xz=QR^7RMX*n%` zCcKDTiw9k#p_3$FVMrg=n2Tk6OhuAyyp-mlx+73rd2CTmL4GhtrA?;WT3fS8uzAn- zr2ha9YH3&pX^}}Nk)(-ZjdGzuCxCkMS7)^|g93x)u*OI0SlWu+6C5_~%8vaNPb_^; zVOrL zp1H3FG{|UUAGF#>)#N-+jLYVserP_2={#<(p_PLJ$ROkpX;^?VPCe`5j{|&6*1Q;> zW4SM=X-EJN5icy!R=gc#UGCv$Bm?)K=97{9Uu^4xZwQk zJ-7$2HSsp3ae1w3lWQ8i#Fv+M4ZcYvJZ|nD z-b<{4QC~>ho4NB*@(L)TlHXF=uu(-ZH z!;*IKlg(hfAl@z8F)FG+UflPq+MJ1QuW>YzZ5)I^PadRID@%mExASAjQ_~%5cxZFf zzd~}Vq%rw(%|={L3#ncXM_=%$qSMtBmXX|Z!yNUdTEuO`BN59I0O?veT^Xe8OJ%kQ zmINPkaw+~(G~0>bR47BvWPuj|kt6rdrAgllTQ|x?FKr0RxZ{k0^`L1H zTU|lAJ9vXSla1b#%l%H;#$B?N+B$^{Tl^}{p9r>qBQ75wF~@&us|B>zLgUPBpfMqb z-XkBOq?qL!7ACriOK7EmcQm;XAU5$fQlE&_FsY*;yXs-LwPYs&?HLCLV`%i@6BXfUfkbIXSi}$lZDU90rV8cag5{>UalVr%C95J#bX+> ziDEH~`_dADy3-YjOp0bV9&6BwNoaX;lWy#?4tiH#45bt@o#9iF&Q#;KYT^-!vGf(+ zSl3G)cc>HVY z{{RCGz7NxKIr7i*uaBVIP8+3thv25qg*5*F`sMv==Qxx25%oP>r}&KN#F!l^JjOi- z>rdDlpv6G!W0hkgJBC58j*`5OsJ)R^{fW0A*EwAk?tGFBU~LuKLrua zZBN7cf@+#Y!`ocOL03cR&=JTZ>sbMt9cjUjdiLcL%x&Jt`_tg>?F-@kRKA1p4llCZ z%;_9AQKll4w*zoecAWbG?_XTo-$fiLZwq30k1$7_mNucxN$7N9(rW^n&6c=DIIj| z*E2FjrJkRBqSig-m-n7w+%S3!o|QUTc}82HPUz3doH7jhcdUzKx|TbamIwQ9Ax3hA zM?>vSwK}94Y?941XhG#b+(7*3ZS*pl)VFgaaoS|;Qs)Ywa(#PM2<9=wOhpEIsb0UO zYAm*b*hwRKfn(fCu6aJypQl|k)0*LKvq-y>Cvd{3KZiAA>=2cID|}O_z(Dy2(DbOLl4iG(*HRAdpamfIG^}X^x|}DE zQ_Jw&#jb?LK&+}WoPI!8&UN$`k7Nm;N|FU!U?9)TR=$1n6Vkq)Ew`{oiH&R8pa0YS zJftHuv{&mZbMfb~%6Pwx7^xWzGh?{rXkv)t`&& zvP7}&XwL^ckxVsvj+4#%NCA$_bnQ@Taj)3!o;3;@-bLOH1~J%otFy}_dq_gSo^jU| zPf%NTu)M)->2EI?;AKy0HRuf`7TNR1jgilYTLC~3$C3!iIO=g&_eR<6{HR!Yha7z? zL&XU>)+K3Q=19T9Ml=#FVJ#D+GvNGEd~VaGoF{o?|QoQrWYv#t=aJ=Kz zn6X>Dtbk=UjAz%qJX*?FLlT$W#s_+ta-v+N(4}Da7iDfGQ@1@>0a`OU%Wz8k%HRbf zq4lQ8ZY`w{%^5`}1$uK)Y+o^o7F3TLdC%!dIK7C2aaLu-M}!|R5D4JbJ;KK%oQ{=d zLoAmHUPd4f;yiu=ohaQF@&*M}kuFW@jh!c6ztbIjnHpG!SX|@*{?Da))|0Q<>h?sz zC`tJnNy7v5uL`r2B(7tKNaa8Ujxkb2JoYyqX}3O9=)t(;W7D41=Hl}8Q&{eLI6QAE zw9lh0MBYLRh7B$TBz-;QnIq;dy4tGn0eD} zNgmZYv6{SDLR&c8{^$Ypu6-eyCb$y^{{UIW?wG*)zJj{=*BPcu638&snyIag4s;-8 z!4(geNij2{S+FXDDCnu#-w`nAeUvMGDL0&Qs zaZ_GfV25TRF~Lj%8}q8u3#6G1s0fj$U4&p`(wuZ;v|5JND1q6WvB@OoCX3l#*4(sX zE!!uI`_yr?;_0{j%mM)9bpHSvo>z(745XX0^5;0mwGu>`VlwEG$YVlI=|~5swoON= zOM?@}QJ@Ku2;5JmD&5_b(u6Qc<$!)+jD}xA4N{A0J6+C(C=i&2@ql)mWDi5w){0sS z(2+=ZcCJtkpmElg3m}eWhBasH-x=>qa}wM%ep4rq-!z1r4mwn^T|6gh$GJ%3%R9LG z(*}+hUG5#&fmFa^omsL70RFX;YpF)_$!IN@TF408*&%`VrvCtB$1slK(ss8B{{V2O z{Iegz4r(S!O6H-s6JN?cW(b4~ecboWRtx6F_WuBI#(rKn=C;Pwi1{d^9^Gr7TLpqS zks0$j-b*ngf4iQ8*i=PWqgp70P)4Vm9I^JOWt|w|6$DJ7qL*;Xp5O|lEH^gaWhlXeoa^&_={6B?k0daqCWMZ1>aDe=dPB8UpWR~C`n>*By zMJJaW`y#!zR@&K6K4l(wTD*U_4 z0y>_wh7%;reBHu7yu5s*^Uo)>PKYE0kYo<^14U`BZ()u|t>fIgA9MFP73VhUs)2yP zZb+|BGWk~XDCCIP^(VD?s-43=wXHo_>0$NIc_4~y6jxeGIU?krE3YPizX1E(R+|Td9Pa*sTmAIT@F$^{QHio%YD~i5emsA&}tKjFPNaXW$+w z#zZ5N)|`BVW=#^q62}xGPBE7M0PEAB^A-)gSw{motr+76a>hw5$IM9-Y4$G^rY2L2 z?k(HsDj3Yr+Jv)PM8TmFxl&b09@)oC`qx_wHp6@&ywZc_H2EZw4=rPe$X&Ma*m|F8 zmIc~bTma4J0Hh4hmJha}wzQq?iV#Ur&H?GqV^zW(5(fr3FIqx(v2UCI0M%D7ZLi%} z*vuhu(2<&g8RN2;!ErCmAPz)_KU`1*bKOr1+sv`~P_musL1zcIPS~qgH?i8;o56+i z6tE0%2^|MMwD@oBZ}k|on@r1dD+NVSki_~|7vsG*S<*Ep)Adp1?jB@uXCNUY1Nnna zj5+H2KD&k@tEWraC3Ueve-`Sq##9L71Y-&gN8?<)&eHPal6snIj3KDl;dtwjUhO0!O zGN0M#jykPzJ3s)}Q5Enuh$4-*;00`C16=r}?&x*lz9VwRHX)k?Wk(0nmf|UK^X((s ztyhj#ir75lvBx+#KZYr-8#GvnnMvesB$1!3am85dmvJE*oP*k>^1{ejSdx2VimI(D z5Tq78&N|kVWto0z+9Wq+aigVugWzPxg=_)m_jU3KTKgBlV;&aMzw4cR-x7ZTdY-OG z?@~>jAp~?Z(x9m)lS8_YS3J`f&2#&&9BR>KkqU0JB%2mVY1QapS*&y-$|F#VslA zNnYzb@TnDm7^vu^7B)1Eds*=P>S|gQ?AA9CoQH+VXX*jLA6omf;b-kFWv{^ogX6iP zv%CrsE{=9i!=`;0`jd+M`=c2nHI(To4N0w!*~2Bp%u&l_Cg@~P%E)pGo`ez9Vwr9& z=Yn^*3_^UWa6u3B>-g8_$H700-YW3#h%~Pa+}u6popUl;s?6Cjd4wn+ka%BD(5CzZ!NJ63MXMp+t8%XI5=CfO6I3UGfK zu?^g2{bZ4ullYI`2h#vmEk?^wf&!`K&hj#XL*hgNw#H)>`$Phl@_)+ zABi^+&7|ByBaCfqs{!+JYv)~q)P8mJ7mtw#h9*~ad6k!QU$wpy7&t+lz9$j&`48R<0sQTMSG0`Hg(TN8i+)iwh#b8oS`De7IhwrqiA~iCtrjq6#yE z&ow=qXyF@f%nmXJKU%kL2$8nJ!?xj!AJ(>vQbrOrRQ#ZN3Q^XYqZI0;xr4oquFMa! zuh3PT3aw|%3-qfs{RM-U?kO+sugLy3&VS$Qe~Mq53<@wPqPtH%B8+3DbiN(~e4p;u zHP@Qyd_DK%{{VKeuly+;Sa$uiZgkbjYf>s)Dlr6+_}5!phPwck>2?sT2PXiU)sVP(OBh;B=-X#1ffOA?GTbaxwhqa*#+Cc2FGrx$bImvB8o+LFAd1X;8a0w4~!0A1Uiq1$LOE?5xk8 zJVPkG*Ct{Ld1)G!#xhFwu6r*TD?0(6pGwr%%gd|JEPwzufS|T7ep(p= zl4L8u$4cr_yE&w^5#^H#g?D50qNOnr~ixHbDq$>htGY~g_(mF?1TYesWbi-yIkdx-pxF?c=wstGS( zDtRTFzA285u5-vB@mbp(;4+L4O6o~DXmV6;*EDyzz-aJ2`3Ad9+|zjD>ceok8KgzV+&!8u5gBw2f_a1adD-{{RsB*UaNG+0%U852C_j;Xik& z+%hW+Fv#>Gv*)%Sc}L+|d>%(iV}-{&SC@8=O39qkTP$-+AQ)*npQi$h9M(*>?5xLc<5w9F;%3~%71XQeE<{8!pF>L``Rnq&NAjh(a|<-Rd$rR;g9$0L@f@G)p4P+t{-(Bmsa=1Ms0wxh)xw zazHj( z++kP`V~T~5qSIRD1DlnM1HT01e|QR#%6O-=-aM%1{{WVoI9=bBYGKK#cJkE6Xy;E= zjz-4fGm<-wY0zS5R#FZ~#tkwixH1%tgM?l(da?eM0lK=~5_u$V2XRsIsRzH-vzK#g zD-ZiUuw|b5QpN_|8!-cKw;e@4bg3-TqlOMLsz*cXnw6rCNJyOnk3us}je!b8Wq;W{ zN99kSOQK4#yo)S;dn$%pl1?&xg(UXZFCwfmGodWZTeq;QGg^z5+~^L|#zt{UuJEY} zto-AOmhY)fER0_?uGTjATZLcZ>C-gbHalx__Y$~SrvNx7)6i9>L=LU624jNQtlf!l zWF-M)Vxw>eIPN`-0$Y}+iC@lt2(4pt8J7f!f#wjs22U9^DaJA?oR*Q?su$!8Fdef> z$tLC6*a~^irE3{jz;TUk2hKWnseZ`vZH^wGcjli4#vq~Gq@0jCW9wTs#@qrTU{6Lq zhLypQ+S!QL!nS8E-e%-Qko)+?s z>T#j;b?O7|K>1;M%~c8;v4h}bEqu+BK@sZw8TRNA80%-xNKBABYGD)PcYk|MzxI9z z5MbqW`fv_sYF>STL!-GT93))owQWj<4B3q%j%qFC96Rc3b$fYn-y|7^hE${oK4AF) zkM^6d5V_;>L2ZDmZrYNvFhU~2;!tYaQh8)3Mjga@{IncAL?AOM>}S=j&3`Foq*%*e zisT|(=&r$zRojg(F5t6xdRv7w%ocJ!-wL#BYo$B;>`)N6hc`pkL<%I4XjaS=?zI_= zKW8Jr=vTz_C@@=D@kEmMVhhuv9#Cx*yy_^h({A7FtGwvm+&&|D&WPkt$9_4jtT!#+Yd zpj`?0#Vtd!Tf<7u1aZ1}!bWY?S)OOi$HedQmQ8kn!@hclL<7r!F}IWaU`GRa`G)lg zeq7dm$Dl-_d}@1=X0t8uNduwdI6w+2o9v;Y(E8+3g6Uoc%2{I-fCuY`t%3c@Lti;AXNL=AKVxF3Qtop z6)dG2w}iSvJd`Ey1L*Q%=dW4mPQqJEPN2Eml0WL3b z-e$>WmfrU>D%%+~N7FX^pjvkN7zDT-D*Kl5ZEDq^rvC%-_Sg=|3&UcVZBqNF98oOw zaA|g$?=@({w!xKl$6TB?%TGhFkF+M|whIoazAv$e+5E(l6zrhoO0HAUKRvw#tYa!G zPrdLU+E1Tu2A|nP@1|Yj101ku4<&|b1-7^R(&zoil6TV-?+m026nw_^{Ob*+pCuax zLXgfKn+si4afID7_c|P(*u55J{#$MD;ab;M_``F;Me+&{<^FO#w%XeGeJ|}OM%~XZ;BaVk0Q{n6ag2WEdWh3`SKfSYHxr;yT6!Dag0{5XbE5`gax%S zk6#C03ooWtJ{J?L8>WIog=u-W34T|MQ?A5sJU=xNK3g?>Hq|hVMn>kY#Ys4Lt2@0g z<|(iq34_av9?Y8Wo%9E&ozfb=M>3gSbe#b-WiZ78yT}66rGLKrUfnUrt7lSaDbgDF zcR@g-S`v?3LhiTsw5`Z{wk||!2%iR6r{Mcq($e&DO<_%ICLIy=0tj}#T^!a%;7qK) zdHTU}tJy_Z;AiR^*#2iJYEAk})h*Z4_^)9Cd_8WckWEhj`6i(taPu6bi8rySTkOuV z^_@D~*>HC0i}F}VF+;h5S|=imHsboAhqLn5-31O9eDiBOcWu2qlRB(=@biBCxYgsQtq2AK)Y<@=I>cnvi+j1RJj3v+2wF10oN5 z@elAfCx#X1FG`qfp9B5U*iG%7>golOQqZ%jrUoA&pkY@blQ@sLsSg~TQm15~ zTNs<*Vun#YYSNI0|0SxQN^z(rAfDx>BI??|EK7Oyu?`|Z&&2I<7?>`KJGt^%%ZObE zB+SfEX1Ft^c1!#{HHa6mYPca&D=~n;2=Rw#5O*oI7tM2jeF%?09&j6k|9bB5OdTov zHzY+TFET}HE)q5XDiBvMuv__1ol7e8J@H_!Sm9~WK{3&y(wDj~l$5Dro2g$>io9Rm zS{y0SF51mKdgA^A93YqB)`7Fs&uaIg?H%XsDywLIGT4l8m^$TLuxa&-hrB+xyj~sv zo%^f=)~Pz)Aq*s<0Q^U|&exl3_u9x*P3<1@ncusj8}9qC zdzxN*SFRzUp0ejQLUAIzH!b9Ym~9lw6ojZaN~tm4fuF146S1e zS%7YwB!y=v$fDowu0V)kDD?NTFDvW1p(DT)KY<584B(I9}T5c8*DdxMM!GLk2<>i?r|K&~VmW25pOzcxn73_T9XFbBlPI+fug0OC zP-C8q6;V^I*k$a7fo$OO@xgD{&0K+}VzAqHyc&JJf6=ZwmvJ_ookG*gBlxVOp z%_JVMPr){HyOoQLWLyQ)AP)6jaIvLvF;ihQ2GWSVM>Z!*%0(}q#0-lyITFNBZ)gx> z?+$ppepDO0$munUKp|%>DJLA#F8^gVe2&*TC1BxdR1WkIfQfcFy)zOjuJnqozir6H zPG0N5viEvu-bwOzJCR0Lu(wQa-;3%#nMnKKGzke?C7avWx@xhz4m{qhMR7?x?{btK zX_l_gC)Tf@r)0*yS`)geCi%3g!nkj(>fmNEDW*5PXV#Y*hbv+67A)B>$XLJ_=X(@2 zX_2&XQ&4O!pPLKH4bHqNR^6A$Q_&~Ck?Z`@q8x4Iwk5LDdO0dNGh;jf;S`Ql{*e#ddy;#FGmz-wE0gLERrxI#wzi*F zF}VYuOD6~O+#8ri1}I+sju)y)qYRe41&*1OX8z1YRKiZwiaE|gh|9veAncN$HsYuRTMT*4%P_a$pi$UT-FJWP2Ow+Vjy+e9>s({_S z`*2Z0W=9B~eVU8Ry&LR~GICe(divzx@m%mLj-1@BexE9_RB)uv{1nN1hnx;&c6z}1 zDWO4?11l+OQ$oE0w)9ZF(gr^5O;p} z!Bg=be!chM8P=J}D4j)uf`>nCWcHMBlrw)em(So)8#dd9kvgVllxw5VEiT)TfMM8H z7j{Sq^JRzUA2T?3j84=z*fxK&bxn{LS&wc;sDALfw@WQa?os(1uUp)@L~YG17H>K+ z)*8e1IIH1M7Di8-%8iSab5lwiSMF(4bw@%kko~7XLwd6HrwClp2mGn4%)-)((ro9ZI1P2oQB!u)0Wr0GT7HOs7sRx#rO#%(F2!=v4u2 zh>mx(>oxXuH^~wXy)5q-u#g_N>h~B*)ZF&UT6fh6WTgXw9<@z=KEUD|ppZ_H!y#*>@?}*zDDB%Is}TOh{_5tMfCfZw1R7TtBm*_3b9a ziSn+fuCGWqg%XASfCpc9g~|RLG<~4xOQZ$Hp*!UwS{?U$a1}ETYb|qv0;uKg)r^d5 zA)7`G*I!pLDjx!;n=0jc+4Vz1(er}4$rt9!@M$!3Y@8QLO3Q1#{sH=?XntRbr8vg% zZ9IOuPCGYkZL->X8uh}2pBgz!5?XNSOZHs3+(WICKe0()mYG=aSA~Q=7`k6p-pD_O~vNBwh)n1GKU5r8+QtiPeo~I<^-nCsxrDyDpl7w9vN>5Rc8kxt0$+K_Ctzn~ieHaD>eog%Dc*F5o6DSw$NJ z3{q9P&lgr!o-U<^^h+B!2_8p)Ws|6u+cB4r=9NhRZYF{cRW_PkE}L1qxn8JDX0dEd z7@r3xNTzrav75^t&GNblapOi&sX(=CEKf~nUM8iF3zVy8-uT2+m%LRB% z8%B7+=Lhta4XlLwB?QbsyboNN<{d*9j#PhI+=Ta&eC&{Q$z`8t|B!zTb-6vGeAD3^ z;ScWv+^HG`;OcxkD^0;8J+zRPh5l?=jSe^=0n8RWcDjENt>8QFIWbet`>0CSMG{0m zja{Ohig@Hv_(j54&8N*cKYmfMvtw6D`B=Bu8KpY;daY&{&wS>d1iOc9jG z6J(?`G{BX9g(1e>Af=Vg*}pDIp`v&A4c96L_S6P*w$rk)LI-J;57R!|G zEo$o&m{_Gr^;YJ1!poQ^JL22#Sajm>r6HDVMtXonqo6c2(=GimVQZ~d#e@CbX@=8F zA?oAyKLFH_)#y=k_W#%aIEvNq5Kt*9&aBd25*0V-94$-O&WznM33rT!N%8m3W5O%A<4+~p*`c+oSp78p==wj_3 z;134z!>f6eJ9bEfc>o?capFeiePxEXrMaf%TA0sQRhS0Dk{nyF{WZd$OaM?^7UbIW z<&KM=htx^9&}UdD8>n)+_9@x0r591)c}2@8cy)f+)V1X2ccfwXMpY^gDuZ+TKiFk> z0BFmojl+$+Wt=IT4d2g>f|(oyL;aI{4lk*;%3`@CE_MP=m`Sb?G9jBvw*+!q_PrAg zs6N7*SN$lX?%aTFH>@==ezN`G0hCG)1}S^g_upiVqUd)t&z@1b*ou;Wrb%A;qZ=x5 z?=O)LuhV-}Tq=4>Du2tZD{84K6EZOUy(;7U8T~}-cGWBW8oua1 z*A3aC#eQB5;$>dAf%3vKgeFk=EGPz3AI&?h@2nXPUfoKW6WoQ&6YaO!oi`@>$#I`Q zUy^vmWHHJW4Ikf)urJD}cAA93jcq|!s9(_7krjU6aWuc;82GWURgzm!GckM3?w`{0 zbNY+;V|Gx(EVk^!HZM`a?3Cw!peOGJCVvqAvkl!|Z?_lQ()R81=gY?OWAK1(Dn;P+ z`h9G=TG^G^v`^>VMxoJGBIO8TmGJxP2bc4Z_!kAXbzd6bH7VS5XQl_K7AKU}I--kl(@;DRJ76%!9sO>YbzZ;uZ18DQ2@IHKm7CNb<;UnPm zxf(DxUOkICl(~iBB*dgGGD5WCxzYCB{FD5mL#J`7AB32pRy2E~@n=qiUw6lBwAz$# zdl#8dxJ15}b8J(aZs3h+(U{Ov`c!N`gATyM6%o52x*cL;y+*HO^>}Vn%8l(pCNUZ5 zWT%gl-e|hDpA&5#u6V5bIz0{?+U)FLyeY4^S={dEEL%$h2Ed~nXG10r2@8L`L%*L$ z60UfGF-{o1@@?pv{f=LsM*Vvpv-Nu(f2R8ms04##8TXYui1$e5HHzUaLtN@UqW{HE zY#LC_$t_0aRH|N&p{X^`S|utWYCh_6Ojo>Py1?9pU3AJ+`T2zr=hch{>YL27D^3ys zw@O6Gh4Re{(5a=>RfiCLq4a#l@P^6%IZl=uHF2_1RdI2ZiaB~E3w?Rth2)GWiWq!b zg?6lv&gisIrXn4K&+T|>Fxmr5UHuu@jS|F=oNHq5YrnHbXZoss0~4@GP8Kq6AiaW! zBjp2~6dli}lk9TrQ`)uK16|6~s2ad#QSK<<6CkB-NWwlE+?>P$bZ3iy%SZB&RUm z2>Ii;if2rm3I9sZ@pEp%4JhY>+0f#)t7lW@r;27JgHXyJwL&4GT6-b~-AVsRFXR(h zQ5fQme2%a7GXG6lD0~V$xl?Wb2;G6-?6{qp}uj zUua!$@&TW&n{(Q^U;6P=B4XX8a)o30`J%YXxb@8Jgi5duBL_CX!uCX@U%}v)Ton9& z?dDX8&KW$N>ysiz%~-cnGBBpzu_fEXk{hp1LHg@PD9M1QDEaeUj6NUEUs8tE?!Sc8 z>VSok!A9y(Hy-t;Tq*I}f{HYDFlmLRM$wkGBW3e2tr(V{??Jr2qdvoJk{g>* zBJ1P39n(9`DsFoC;e=O>Y_j{LwK7o-CN^#X-zhJFZ39Dzypy-Obe_k%DBo#s^;k;6 zNPOy#@Mp5LkX{S*(>EGvA|z*--`hy8<)- zF~uu#G5C8h;l{jUqg0D-3OPH=)I_BYWRKrNM+S2e*;!XYKkb@P&pQBTP@qJRcVND0 z@H(&ZA%!Ev(*nUnT4TcWQvI*=TDm5!Ni)^4Zwz=GurI2KTwsj$qF)0i+ zp;UsUH{&;t%e2hh!t;KV9fKFy&yn-~x1*W^?}R;JpeL$7)>2^=aN1a)4^GYe1HiGF zbp_&C3n}&0%@Fw|21oU9il~MDM{*U=eYUfO_241~aNTR(sLyT6O_M!QeegPl0Xe+Z z!T~;^H`@o9#D#0u04ftsNJ=WJf6Jk}LPvyG@Lpv%{sDgfr+zjposy+NS0gbgg^qmv z&j$x6Q25yWbxeN=*zwtm%+I8dI>p7O#&VpFGwH_WiV0s?E{0x@v?W49h-W8V0z_ls z_E}xcG~NS%Fq>d#{!H|e#HEDG$p_D+QjsHN3u<#a4&=0 zxq{9I7m3ZEx<|aHAI)`=o{&d7a=}9L@c+ZdsUsYAl2=n7_=W*Kwv*X;bsYn;b8^$y zorY&!8ErhmH_Dwh?UzQrszlzO$nj_AzOUPdu*lF4N{UftjN>xbZn`#RoVtj8mRHg% zp&OnISoPxWB?>^SrFba|Bk>RV#ewT}$&B9af9d&mcH*3GN{y#&zBI)j#-)*;XF`An z2@MAiNeY$9kp;ea(kfT0&n0YiF_o*JxsT-5=DHd7?CP%%D_W0Pvp;a`ZQ|cXZK}}& z{J8uGT5w*^ZJ2uaGuc)Z)MPBL|AK6$U~Y5`NLV}@B-giu8}isEJmp_QOGmQLaITwwebugohp)*OaD9QRkRPMcg=v$fCpQJGnAn!3_M^M9 z6X7+hH}6OrXk|lk+wMy3X`vW4D!iEg&Vl>dz-0&(T~Q4xfWFuHVe9V$ky>NQj{gss z`#%8XC+_R#K^*XE6Mq-aa0AM|=AD6^O4W)KxxcHp%{|<$51cp@6-LzkxQ*Pxbyb

fqVur)!^utdG}}U@jlO$4zfR-yhDoY{M_@_U@`oJW0ps3z1UW9TYDAqs z=J=HOHEPlOptXir#Ty@~%bWCETSnOgGf_8aWauO1{xE>#aR$u)T+Iyo7D7l?FirYQU`#8`$KDmkyCV86> z98jd;mtns3?Cj&ReCBOzcI7NMtLUTJv!WW8s#H&=EI{Z+Om8}oR4P`zfHWMs);0-{&z-+hp4e?;Ik8;E#78=7OOyTcnNGuZVez_C;8KR&*8RuBf z{n`|D2c^Z4#nZ5;mY5Cw?IxUKxU_JWdhxga<+H6hy~&oYWp``!1UjE5I38KFUz(J0 z)fG&hS%Z+~+gQFWIq6x7XJ7h`GcGyGI zUx$>lT`3 z$7}TdWnGwI{eE&cY3pP>bm(+R$WG~J)Esd>iHbmmq9`Jf;L&^fBohX9OeL;De3qx; z1VXB)dG_}4GL<$}E&)qr?E2arf(ciCDseQ=)NuW5Ry8g%PaJ z#~x4jn6>fhwc>G{JRymw@p=CFO%2znv;4(;v^NyUQCT08jkv1BnE6(^UjewVH*pHR zw3drZ6~aDzD^P;&-qJSQ+Z^{ixdNU96q*U`0*AUbv%H4i-8wd9|CE{K%@V%*1B>3u zChSO8fTZLf;hi*MOGJjN;qAt{eCkP98$E_Kx~{HDWpO%}Zt3 ze9&Uax@B5AUAO9jfN_;=hILC7;bpd{syLo!FO?m0CjG%%nYtjtodADKy37_rv3IQq z8hY^vgh-HErs~}2LZp-v6xmA0wL2NE^GO}Tk zR{a|y?k9A6fVNq_pZ3cekWflOF0ZMS1B-v`VO=-3>ySf~z*2)skW6{^pLa2N>W(zVKv)tML7nd9HB%PcqlJXTu;)K3eK z@>VRJUJvG{;n6KEj*Egp74t`mvO&I%nrMkUTw;a|XGT#O@45*2o9`>Bs4b{*g@?d% zJf)PQ0UXXTX(bzVS0;<&dXMQT=97+x>gM9X6v?b798;e7+-EcJ%w1%6G?#ef299BY z-^g2h8)z(#Nb%Cqdn;sH6$3mEnvTyQYHD}_UmV6q;4uj8zGWU?wDV)M#+7GT?~%+a zqfEi#rbJhEGRC2+zol2$k$IUq0^rUv^$vO^Y*Ah>^hYJ|DyeQ;kf-au)RcyJ)agyh zkYGSDX;(SM4l`93=6=+hwQ5M>KDz1k&k*hKZrT`zwvcZbzu?*EH1OU`!>Bn_x*@-u zbtj-J4oiN!P^~ZryG)*X-TB_Jwq&BpdR9!Oc65=!m=PO9v)#7KVW0kfs ziWKE|iH3P)OdJrz(LfD>gl!0EXQ4s?4E!S80vzgdnOZ&0W5!q3r4gE$Y>FT$1(#+t;CB7%2PQ(azKJ7v5^WhWL-(HlD45GaAhsxS83kU5=^X0 z?b)tPc~+HnnNqv}Gvd9yPgPdL>*KikMu%|B_RY7W_6uzl&o!K9?b9S&+3HEf{>leW zG}$1|kScgqzIJ++rhvrC5rpYTrBBn(h1fi}+UUy<5Ag&d9BtdzV#$mI0)$ebl>NkC z+HLBUdzIC5&eFEchwDF0=Q4ky_6Sq{;k%BZ<>WT!*Xh+BMnHIN znu;;XaW1RFQ7^(l8JGgOmH{2CK zKxrFQo%6{ldG%wH4-%Sp3|uui7E!L4M9VNfJfMM@WSyi&CwbK=zT(HfG~mCfK&8i= zoz7Z3XZ~aEYB7do5k4Fiu-_cC@%ylD)%bn&TecOErtsfb>m95j;6{V_TGE(@9&NbF zA3mUEqrGC-w4*t$9B%oDThfbdDN|J>T?E{I`{q#Qsehwk`=(FUREY`sPj(@vbi@$B zE$v_|8yNR7aEym40Xzdxs~(JDQ0+rkVcZPhI^@$0G(!W&&+QimlJaThSSrihae1;$ ztUYReW8CC(fU0VzWX^fEs^?#$e=22t(=@+dCC6D1OaA!uQ8dhMW2E2U7dV2Bq=fFB`jWJ}?p`l24(v@}}Fwt_ECVU~Jx zJl&B8<9?#D<~xiufPwc8?~DqnwW+EcEmk%R*lTWJWUD-_cG~j~x{*42Fjolf zQdRpsscaZ|OVLJSOa1eiL21{3hlSZyW~s3@W%&0}t0h@bd*07f-9rPTYAdjTj6>N} zgRUbA;$P5<=AXNK*)n~dXRHwcdBcuklP_!UBi;-;;0M%@@CyB@qimjK+LUu!tazrX zKJ!&D_YEm-bFYclrb2nqz3hHQIn*878?$2fP1EZwltxFiAgwhlQv;g%Y1Yg_t{Cs@ zrNbuZyN$7if!_$Lc3ZGHD{4&6Sce@C)tKr_ayB>-nD znX#30eQDQ!RYzYL1C}{eMnmZ-Gg(&8FwRd6garKf7EU2%IY)m+1X{>YGvH36;8> z>s)q$^}nRrw;vifJG+~i0OCCpM!t`<`sA1#8%8-Q@Xn5vse)SFo?{RF-u9~I_S7BC zSxIu+wkKC4uc*rLn74GMGSP9@N;{OG=y{=CWRrMYo%gzv8_($V{3LU%OJaxYwwRL3 z`bP@Lr!Q&QUXy~ZxQ#r+77h;$^!+HKBAELDR8b43dWc;vSC2r_Xw8Z{rIUYv?&sKj zDE_l*Y5K6XfhH{p-`Xu)I<(_2R0ym*rcFMQoEqq`jy|@-u96fP=g`Pm`jTUwcp4Su zJV=nm8$LXOW)?{04_C)O<}E&dIb{1Fi90}~cP>~uM!d!(%n685 z%RDRD4j-&2S&=!{^k$Z^?xle&1*)$d8`xhOA4-LpfZRmf=?qwvU<(BUNN4?A4;kxi z`CCVW>6|}?dOo6DRjklt3y|3Hlzl{Pr6o_&_HR*iKv7kb`+Aufv%0`6^Q9t8!P0RAJ#`17B&fO72w)o#tX|%@*bxK1c?~ zivCJdJW)QsKB*TgRW5;Yduf3k;kn*bOz5xze|EWWt>!7bXGE26FhGWZIo*~+XG1Y$ zCDnD44-gUSPz2C-b{;hk66a}B-|e^xh6|V+U}E^QU$*`^rZxjl22~Wll|B$9WxScW z(|v);V=w#g!*U&_Sxv(fW($OSW=7K5o0gTffc^a&1@WaQN4_64WB2U{vU4AeE@Z=o zx5R@K6?q8IZ3bg70m|TX8}9l0#&y@W+`KCoohZ_$c%G1InnIT5@x6N}%lzsc0W?`kn}` zFGM~DF4q7#-%pFDx9JG!EDY7nRZ(g!#;G?V5ArJaW6AzWmNtJmo0mZ;bn&{|F4eCc zG($*4H`=fzan!qruuu`DlQNm_lq|l;zw(jYE&r+^wKu|K&NC%#s0ymErCO#|Uv?h6 zFJJK+L*&bB@In|WtC6FlkYpcw^j`R`AH#2`VR@K*!2Qs+ox{aEx`(oiU1O5N(0OqOjQev%g5|R?XB)jpa1|T8t-R$+-~E*0 z$h`wR>Iexd^s&b+ybYIddr_*|{JW{}@EQ(J!WG7;I}h=DJL_ZUH?-GZL`j92$%Id-pz{t2A-ZMUZBDww8{&n!``q>()E=f$O zcW*FvuXNJ9wH8@Gw6o|k^0#_$)Z?mD_t-}G19@FQ&hRSLu8wF5p@I=?u-G&}In zUbxskG7`1CXKdzq=VYxOoo`z?;bm5ml=R%V+|<1f@lsF1!qf_;vh5Nh>i2c>u zCtJAuRpd8wwX2Akv83-?QRb0>_e{!Z zgWrD_f%y1*`*GW+9kbD!(-NEz1*uP}xnRp4TU~l;2g)j3iV19RlX;)O> zrV+^6s_?sW`Qm%7i~WpvXhaAMTp_EU&}EC)lSJO&p)CSREbo28g_sIT-&D|ID5g6f zDKYe$cMH4R6cYzf_h6UC6qW1gdZO3X<>R`Yr-LhV(%(#HZiCOPJ!S3Ac29-XA%Fb& z7Plc%#*8yJ_P8rY+M&w3axuRy{E~L6*^%D=M$^dJFk_1%>u+)-Bv7>-AN@v~;QTif zi(==82-mL8*GTh*ETdUe)<%R_>l>JzQHv-wW>s+$*M>1`l4Yl_)#|O4@n==rfeD*< zgnqX5>cYUOX4P0_eKOIanou!8;Je`g)iCGGeiSaaxMUJCt<+9#msgvnMV58P5%C$U-gN*O?>Jap1fn9>UZD zWD9mBrzZE)8^4+yDdiquC^hwHI(w9^z3T4WE}&`;GB$8B9#thRd8&EyPYrJ z?4}4gH`xti&aQ-vyra!8DkKWJMV_wHn?8}a6)86jg73r8-2f8Iv`V6nyW^u--!2#TODd2xE`pf>S-`|P8G}u;r__jtn zDkUn@ijZw@!8FSL4eB%B8z*yT9=i0L!XVaxXbVS+G`z^@g2!2%6BdHErzan9P%WqtCovUF{{pSd5IMe> zp`c~2-`_!no7$UOooQ|=L~ZhduS(AK)h?#JGV2|G#DuPwZUG}R2W%>lXoGq;GreQx zz(w-&e3{(XrG#ci$3ttkWeeX4BvJQGubKzQ3xBkSw=hz$ZRemAdf^`wq)Vljb=aBv zVqLb|35ag`eu4F*F%4eRAfniW3*B!UQp=G1PS%#hdaK9>dTdp-6@Mj=D4fWkhI#~j zV?vO?A&2YYJG{K@>yzYj*WWR{d^|Mp$B zy;HsSkC}qn)PR*l7skmsuD9yJu0r*Ufatu3jY`{M)1Rx zc2QG#lN{9}vR^ZAMk2tLh+Y0!GQgD@!ULc4YG`@Lb#BHh=Bg1!j2LCks6Az?Y(R!V7OW{d_N+_^NAdY!nBN-xSU;xOjT#i+Zd zf{QX@Vdsi`nvu6au|j1QayySm5EN<4yK>0n@y6^YN% z_Mu`^tQ#YUA5zfpImB`3Ty@s^TNUb|ToYMRYjW79`WrH2osQv)!Id1X;B~kI?PZPc ziRO|YJpDNC13dFuPxC=@)bV{ArFR;0^NyV=s}+2rpMSWOu1UWZypulAXJyFfY2#Uu zBV9G@kBYad-@|znEX-C)lb)o(P1Rx4GXh??_GET2`evI6u|G)_N>_{;AdJ|fxXP?+21l-<@}P2syajlRP5>9 zKa=)pixpzW&O9bc+Ua!)?_Jn&7w2p?f5A zyxd8$H7?w5&f!vGkj1h#vBYE1(Gs?%;kj2@RQeAf3*ug8)wQ60+Gq>i6f3rBVn!zcT-fPY?6 z*vh;rO8q_wbrS0my6E$@v!>GS2oC8yphSGw59TOpYUl*f8EbD^cBp<^gJ%?h@?Fua zYQEc68EAlC&EJD#ozvH5q9XT*V$;w;2$IJMs)Ok+*aAR20wWKSE8wj(v)#ndSz0hz zhP|f;t_Eh!o0WzD1X4^Xolrw6`B_|^Gpdkp97I@`Pa28U`YVU6E?40PqN7<3-d^(# zp;$Pw5FaC@PjT8#C<}+W&93Hlv|u0UcwQHOHdFRiMFd`ImkHU6_M(meSYcs9R-r@+ zR@$FTuT!`|f@ZLzp=z?4VvvwMS$6#-Jj+Eb548U*zNNt3qw&?hq*+b53y~%^2QmFc zHq(q=2W97$K@!I~VTy>1r#ZAo7TB7HMBjt!;0cWqkl1<;#WASU*vzXIzv@*|5e|oc zs|U}nTQA~$Iw{L``pL`e-+xH@={FzwDqU6KYNc&+07haQ*W25&f0g{aoE~FjoONqV zc_!!Ro$%Pki5%HXQg$mOoqp2!YsfOPC+^M~iXW}Ub{ITJ5Lv1%OZ8Eiug*pi-<3Q> zzbR^vb((OXCC*=`Kc1$Mu3C;tdMwI~VqrzM^x%5(ZdPu!*YqnpMF;Q0C2OpwWR;}PE!Sp#?IzIwopVMo5YpSs_P53^p` zL|e9pkH<07XK~?}3n!6Z;;ceSt zmdd1Tx5&+AjEMrZzVwgrzs8JyY%qjd7akk}P=46u*R61W$EFzwv@( zx~^9J2|*$OB}D(#iT}QG@6Xqru^JLfgb%}`_1wXnom*M26l)a@Oe9y$oIJc{WRvwN zc~ZKwe<}U?N-l#E;^WJ|Zu#{y&T4kLoC=JH+GjKvR-1`A_jqdMYUOKznQO+drlT(z zKfr*k4DO=1K4N_6Zu~*y(+zPpG<}^gtc$+y(Tx2A`C^{kqdJ&VJ>;z%W#)Bx1lVg^ z)lgaWEu{8Jv$H3hy;EO*;<%&k4|x;Ca@j?8n^pE5e9-uV%zz&lRD4W~<{nx`*tTjt2?&x5^Ttqw=Jkt89$ZHc=WryefYCjS}4aj z?}HTActw1Dzgt|ZH193kE-Q48mrI~j8u)g zB}~)LIG<9AG@bVk&koylB>|jOCs4b2`SG9gS`<}! zZQczUwkyV~OJ6u}C=cur0C2I<6L>Qd#FDa$6ONqaZaLVGdlq;O+Ai4*7wFdk7;Ww-ldg z;<`oL_nfHoF~pXUA|etRn7epvNBzq2unGi5YQ(QwuG3SRYvat8Qz2d>qr{0GVTr5j zR{E{^=)6GM5HG!`T+76k&FeqmV3wPf)qX=6P6%RSGMRktfk{J${Xv~?Iw*W7Q%APs zM8Bm_2Qm7pEU@bgD@qeRO*J4q=plCZOL5&0qVL6gE@>w>=^w2ro6Ue6T+W{BYX;r2 z<-=-aJIxnMox)FF<`Q;H9zN}+Jj}$FJT8<+$T}n%s*V(E&m>xqg`IJCV;JzCV<;7! z(VVal$4c*yKwkx0+Ox{xJIYdU-<$grAjcanK{hpEA4r&+?4zHUW4<+MHrlyA5`XEtu49Z8nZ9MHoHmQ_mn zhM&S;%f+~uuF1$gr-AzL;5W=!?Z%mJYsHM{nmk5lKl1}fkd ziVNqFtr+_byj1`2@kw9v7d3)=h9;~x=F-U?f8BGaig)F3 zBD&v7?W)+~{1%j&;hNn1$u(Qf+kDjN@*idQS?xvC4ECDeHewz?G0#he7 zCsstL(w{Plu(!FNhXfJ{0n?Q-YOzcW?9qza`gYz$;4N?dn;5|r^Pj^{3o7ySB<|3Z zEr@B#4pRlOCC?#z)^q@2e){xYUpE2rMRM>FXPmLC&x`~LfebWD@>?Un}Q0R z?W&qdt#z$7TS909gY%<;1R0EY_u4#ZKbm_*{+%P)iLAaOC~5h0SR75Q&;)qgPVa?` zZ9N38VjZfZx+7Pp3f$(HaQ*`@-RF-~@`5qmpUYKC?;y$|({-2ZH7m!Y;3P?}1V|Nr z9~k))*ryOY#+AV7tiAXy6$Md1)dc)WBQk3wcE0FpoZ($wnpPSit`ZWuksAs`LDecg zxv-R{K7o8qc(^UB8Qw}AI^Svg|IfR_?q}RvzMS;*8+a4d5)%Lyb;ETWUZ35T=@62< z)+fexuiom_)_+HqL6$jS6pv{-RHk=BpbVmu8sz1yP&{&Ql&7WTN=UFl{1V+Cz@g@f zA-19zIhuL3gqCAkeODGm0{NwA&2ZARt1MUhRvjtc3&0oxC$nf?YKx+}=xKB$Hyjs8 zk2y4EZ<$46>CwdAF%pwlylk(R6K+3kB7Kid#S|HsZb#EHZQt{pyHql@7-$bZ`^)&b z+h*q)c2gh)l2E5jq!o_l)0$@=t$nZ_5dORyJ5#AC_B%O3i}GxQgT)Eha;HOL_@Sa< z58ox$8^vTLj-dcboJob(7@V!6cBP?hs>yC@N@7A-8qXcNaymut^z7-2ke|LB%~;N{ z{z_oDy6Rz}RXvxws@=lX-o}H(m_+ONBFu2nY>To&(~yXs&)R(hm4GSz6sq_3Hgk4k z`W22YT|SY@aekU#feXOKrZLdtD21 z8q%M+|*4(yY!krPml6|~uw z)m0l)Oznc?xB$BSiUnir_D8J@t=;0CmK)GtNku+f!b=`}3^Y*Qo`}>bjx6}r5P0Hp z%OgFR#L{`XvDNk|EtHX1*d%b8)Z$fYN}M4#FsyOlSSSu&DbGA>UP_5x__o4;-3^y% zr(Kbw%K`-}SDT*jyaqc5yjOqlOJb{Vsq#ijkD zdbo3n1{DM?mtNfSu@lMj_bG*ufjZY_T;58=wiQ^#A*t z#Rsm{Xel4#;sN|%lzP5;Weu8Kgm)a7#;q?5M4#sAkP z+-l(a-HGG6BM~R{ELG2LGxs~c6p&r0*#y7=ZRo-qG5UD#X-vAjiftLabapwcO5gm= zN#qbCEaf+FmJ`-O-XTy{)OtCkC!82ONnu4Ur*PN;Gw$#G&-2w#V_OuHeq%dm>dxYKR=2wrC_XgQ6j z0p~59{5ydPT~j6h%HaL9QmB1pSWM$T%TW5`b2O5dmrs+bmIOEQKFH4fqx_}GkJ8^2 zF*f!q&`NOwMyTm)8)&apr}0^XyDeW`K1^fmWy$ngD#>6a?tcI=(4lLyXHVXa8(B)( zBx1h=bYcC2cLLou0ZjJ2Bp-PvSF{DEyPf-jF@{f-9Q^L`R2H@(G24kdD0b23i`!3) zJ5mbX+!TV{PlIJ*Q)v=&iVuw6xgQ;=sULFTQ7wH5_z#e@UeupabYuT&J9ecvU&+cp z34`a$EESSc@=T@u2e40;T#ZlO1Hk=@JDW^fR8S$dp&rBaH(HVgba&U4xos0<3T${e ze#d`X(*BLi+dehjmWf1IE^eQgp(?KUCMAfZ&^!mltdug<2!BE|N*mu4dOrFrEko`@ zSR*XHK=NMqX=*v#N**P1jl2Qp#&$byd}N0v^4i`!=SVAG{ZFx)zzIvPoXO8Z%F*rP z=&Dj8^AGe9lD@N}g(HLpO8me(_jd^(%sEv~#jh`<;Ct8yI{Awz`c>rW{ngD9^=huL zxjc)={Lw!^3Z&qH%OewsXP4pnj7r$|;#XF@=hA(V1)z0`Q69^uBEBo@{@zA`MfZgE z9SdjFJBVhOW|Ehv=$OMlZFtbYm8b$(GCtN1aES5iv$(0MWNM5uxCkIygvUK|KDDArKeu3 zc|9t)eyC3eCKdtk)h@^`?>ioHav+yKb_U9R@zQ83gR#`Kh&`=SpCDI0f0+F)z{YkP zA@r56RHz&2`DSwCF_p6ePERhx9l^DtUt~!kg>Oe_e=U49)79%8E(BLUP5?)64T06IDW_^6dFaWF@v%8Q5Z4lSzk8?XtqiUC5)T!s|rMJuvUXzEL+xIpC(h zPNR@Wj91AFs&x}Ih6q*u&Q-m0X)xb(xsh081g>N#?a<~VIo=HLVJeRM{kA_9JIqsV zN$#;0le?h)rf#o0k@+Bnv?T=WU z#!<2IZ)uD8E;R_1AG|;DgdS+vG`HP`x^k3N7%@zYOAWg0_v8Y%tQHje=mq`cZ(Unw zzZHydgq`|_3u}Fm{xW@I$6Pz6KSzEf61KosH)_Yq&5=j@rf`Wi`sxh*S$5hZ88)wF zb6oj9t;uLvF@A#S{eenkzJt8W7trR>M&T#Vl>H}Geg-&0VzSuRhv|!!TW(FISVBJ< zZBJH<#82_zLUcJb@~V!L4eP3k&zCdqCrWbdnM$)KmNl&nW(i%wgz+REZ&k1ZKj**s zPUXsFtaKr=-VrPgI!MsJ&*;*A8n!vP5tKA&afJXKp-8VpgM6o^g27WSD<1BTH=_0f zv@s?D;jz^Zvrk;!`HLfj>ebi?M&ZNmdsJqp|!5Q8^Vi&w`k>x5N0t`J&zdCDl+RQKgKHt@X3@#|y;L zk4$Z~3Z*nkn^B))uK6w^V1e7tvxt+@sp_7Ib;q((F5)EQhPa-FSN;buaSar{7WBIR zb1nLObnkM*$(G4V$XzT7@Sd2e?Mq)?bwAFdz-gWHuX2d}$iz)E?5ow|@=Z$0B&h6aX=CCeoTq)nx}Q_vp10eFQ7J;tA>g%{2iRRKe8F!0}4&h70V1QVo_4i z9hQ5XW5`1XKbDN3Xnau66o%)92WI<*7w79Eic!02)b|$1yZV=H2GSi<7B7bnakt5G z)UgHr?{K-b=ofORp~EGUiAMxu9@rT2pA#0d3f0oKF?K9dL3>%!rUI= z5x_$ND%wCp=YYC1_1>04??0iw6y&nIssO-J$x2L0Q=EFd@%$@()Xfy`{% z*aV-@2F%1{ObyCng|r6&1YLa)wSPMa&E-!~W zZzinUjGvKm0_eDSrCm*5&WQK4SMJl;TEUmRktasMZYk_$II$w0yglQ}WZ6}L+VjfQ zkm^wP47D;9hViUroYAIRBF#XllCN~3yv0f>a&3yO;HOEJwyLFMidlX+6%y_J0U%*~ zSx~5MVeZs=LQ|N`7qJ%E1vUBGA^>eU?`R*Q6(6Lbut#`^s&Dgd1mU~>$+`XFgIhT> zU#)<6H}xN&IWXD#;4t8K^SKFDn@ZZV-CqP#{a65lS&~)qYl5BenhGg4*F+sdXD)VG zjVluJ%-oFjXXjg^XUDz1O)$v&#C1nzT<3g~e{ofBYH zVL5ekqhqx0$LQpd5aeG0v|dMgclauQioc@`iNW0Nb)-hm+#EKmlFFt~G@uUGd@!tO z<$ktIY16$kQwXw3^6NCwhtF37bX40~zV6utZ)~EGXSPBm4b;8D(YidH-((MniANAB z6v^@0?O4Mlkw5-SU@A=Tb~umfny|lnQ!zkxaPeJc#FU@`UW;}5iU$r{Xcwu<*q9Pr zu++v6u1YavcbkUacx~~gm|Z9@CeaJ5FdAOL z*vp!)^HZ3vB^4F>R&J|7WVV&pVD!0ymWk zOioaAX)JABBd`xamG@0g0&`T!e|7SLwnX9h;bh4& z)(d*Hv6P)c$6Cr-`c7Z;+m@aue%KT5p?oETbc6u7ahv#O&1UE0gfE}2OxR|`3oCR3;*l5PzHRxht332nZ-ZT zZ;TWr-g#aw_wWQZ(ANCCeej3O4{sAug?CVlH?DE@SZNesy2>oht*%<1_}~I|(nfy+ zOV;3uckOFzwc__RtaGOP-j`4iqFb8VoXU02THhF$hl`cco6eJ}pNwcJC3os+yJBB5d2&Jk&| zOQKIQ9B{lv@3CYQAQvnhakO(kyx^e(c0^HsnN~^_z$$ zu9sf~PusiS?m9mMy#GA3AOQU94i+me46$Vv$oQMNF9w+p%nKYSsz4%19P@hQc=p9D z|6h%nLt3fD4vB8~pBIF#Od9siFC_Xsn^lxS-75a8{qJ9iZ9H3qg{SeL(|7)|fe2Lo zUr{;S?iCq;2H#b>7+gfS@LT1U7XBjt3ViNB@1aoblR5r5$4MhXquD5zCPJyN3cL&C zV#1o=agx&X_dTTsd9ljfsA!vgnKqbSbj-o-+M-jnL2$CCeU7qOyKgj^ zo`d!i#~8KSMjhX4^@bCoaeu%q#N$Rd+<;-Mjk=?KXp5!Z8i;ERuXfl0Ij9B37CnP5SSp^V=T7UAS0lIaMf!f2ouq5f8~Iw-c?z3E zVRs~>gc;#t53RO0-bm;Vqz+F}^m`4_;`-NxM#QC)DwbGAiQ@Plp-UMUjoE!;HaoZ+ z(e#~Bmt(bK{nDgsLFVs0mKyEE3U&GHHPC3`vDv1fI^%G*g)i9^MG9KREpS%JRKB;L zV5t25vYpyI(jZ4P?gAIq0lJWd32-e9PCL*Lis!&Ubxm(ivKSeG-Ryao#?pn3XcGye z1Hi|<@OW^tvGL<;#QN(;xc0+iYmCwlc33RhHkI2R;@wFF*fLUgTeOd7_+Tq6LI} zhYC0q6;&LYHP&4w)zaf=GNA<26*yV3ZV)Uh>u%9CjhTZpX5vSEeAlXdH=th&wQ1P2L;#4(T)X?T-SK6|ox300 zlAi2D{=ey`v;MESbIY9LLKO2Eb`Uc#*mPN|b2vD5KKgo%u*&Y%O6c3#Tw{NRd{!7T zc9-BD@TvY?9+Z}9;F8~3{omu5mneb$--B}oyoJS-Y?@%Z%q)+ym$odq^Bq;!^Kb3a% zOLjiadb-vZnLQ}xB!{C(NCuV@vri7nbYIZg3qHuc1sV7i#eY+yPC_~|j`q@#vz*jx z7)8qP1h|cJJz^*saQQNxWuk}HCB+36=-YvUdgaYyq#g{#b>YieA*+T*?@`?<+^!3K zk)z4&Y_|t5%7VAVxEDUAZ$D^+tD{NE?};%7jQEp{u>Hxa&2n_)4~$`*40I;%^v>* zRe!gPhO6Lkx}A+_aF$ef|8$lsw;ukjy#xbq78!08eAWf+0oetXyV-lGWc#{~)Q0S- z;fS8WUwiqst{WhQGyhrk)d`d=O2*z6BU|}iwiEgM3u`r?AZmb?>c;zzY@ZfnnoLrN zR;VmXn2gzXo`uB+nybQp>Fc6ns*Kn-x93#g>+Zwgsfj}r6zJyvQoXy^xD9HKh%&d7 z4u`$nxqm%BG5A_AYOY@?J>JT(-e%sXQR}H#Ek|l}ITDvA)<`NQqBARweIHnGuVX(# z=w?I^%0ai)NLg*fR*~#fsK?5$mPVs|qApBuve?Wfe9QDE3JELl2PLRZO4VXfQ1zO*uX+;WrvB7l?E?QT{W}cX-aX#^-K6 zzX5y+*UZ3C{|GFORYcALQxv&xjItq;Zf*9&Aq{G=g*b5;Hwk{^IZfba;B=Wll~6nu z)%CJ7Ti8Y;8=m{JGEpZzt$Arw!45Qc%Mn+!`1;$w zxg5m3Ddzgb>FcCZN3DBthj*SWHi8Ts<{PW3c(?YyGWCXNK1grX;te!FPU4vQz=R$) zRQk2`ptR59*x$+|d#Y?!xMKQwLUxew`)=G`F{Mo<&Rz#ITloJ&w>^oyZuw*TecP&h zpx_I9d`f(npLE`NDQm;U6@I1gR}>bM(K8xX5qy{FMd!-t`3Zu9!;)O>K`K3FvR41r z_Ae1zgM{(T?Zv-)LyKR9!@L6ySes1JEkG>aS@I}6-N?a--@dc!mvk+^0QLOakDBY^a^2-rJA4G*6xX9#}+eWh9KCVsE4^sq0^`A>kB-!p&(e(#KPSC5Ocu<1YO0(yl zCEk^Y4hF%tV$-nRJGMm#SplVzMgh zy;U?(4jIq;ZDgDHKx`WV!Tb9HVv$dt$-^pjmO2F;INvAx_U-#68;T~C(_6(dp#6!> z%A)quY>*X*uwhk%BEyrX4&fB8zm0@$@t#q0D-QNG3aWe5`7OzP zWw;3alo(WSbn$c&FdO6EcF|V&{I1v@VH6GSQ@k2w(Cwr%&zO6`D)*O7PhnK$!O%S) z5Dn0pv!&un>6QZa)K()8)PqtRv3JWXm;hc!Y60s+!55A0DAs(?^Xo%fUI&%h^)rI~ zF%hkrnewSkRFZ&RX!J~<-;SLRLo#uWy$bO!*!LIEoFCU_;7O$CibJ&2y;Zw8aq@Wb z4zas$qiCN((aiEm(E8JyLtph_8)PohQVhqx4@+>xSks{Cs3;F^X({+ET5$o6M?-R? z_s2kqc$GpsDq5-fEu(aX3yMe4Qc=n0$3zMKDc|B(t*veBUhAexQ;CZc#;3vuow(zN ziDHKTPvS)`W%K>BTFk)oOz(zePbk)5|8z7F)lOo1Z*Tn8UFceN%d88Qj)pX=~G1UtaGu(;jsD;^EKRz@yYI&cJn1QW@eNL!9mf;>;Tj@tHe}DOfgkFGO7&&^OCEd+v`M2nh`8UGMd{ijZ zEEh(k8`)@Pjf3HE_xdSo%|;gc4PX`O{e1W(+}1B@CC~HA0`%fKAB9+LWl3OBK?+t< zNSDFT+I-nn^T~^P>mX-Otq**e>q97LU&IK{K9c8_v)rk-KotF?LL@K%y)XrBN>~e; zDAFN#REPg!8MO@>u_$TD*q{tcPG{{S<8Sorzv(|!)o%&-rKkCG1upv*U?Q9|xT?P% z+$9?1ix1k@73CxD6=PUFB3_kDAo(mw%W5Sb$Fg2TvL!Thv^2d(f-iU{=ZVGMgOTq+ zRd+4Xb7G?1c3!X=e;ejeT_4wj0WhpKm4@tPnL?g)!g6Gum^5SM9&)66>OP!UE0#=F z4kp&Kyneh8FWwY+sAE%?F})}msPhcU z20V)$aj3Tttd7o075Hzv3;`AWRHE?T1ZBC3;Mgg5uofz+l+LO8W-X*RL{@%L`>_+U zANe}wTFoNjh&_!U#ZR5_QPLIHG^YU95(q=kPn|oVvtBWC_$MwA%5O;yeXUB0XOMbZ z$k(tC0xYVCT+DpVVY{uw!Fe8%5;t*J@7Vx7--o zHeTw<+7YbghaFDpRRBSbi%AZg;k=@0gH6_m$GFzl*32Ql>Or;9F(TBYhAsiEjLo`d zQ6SRc3%te_d)dO&r>H{bWlC0V!>CnkK)1CWgd1f%m2ou-@p{M-mD}jY9&-Ai*&hH|)%AL`Ws2E)y2pBgCQ$I&QDw{xO$Q2o5lbJU_o`aEUn zvWTKRMDpUL`lTpf*(As&ld!OLT=e^I?UTO_&bzqmA4)>KA1SP=+gbhi{wo*h(*M1J z#@m1cW3)=nId}3*QiB~flBaTGBT+^B>s{CEVivP3e-^UiFJdsJa)qBeGmxrRkK2mq zOx{eX{{YJWBWMw%;v_^`O`G2#ssL96#) zCr7z{*KTv~^rW#5sc0+mHW+ZA%J#v@6|1Z391Po%V`=GV_8w^SS14LG5DZu4(nlpv zp2s4eM$w7wO9RJ`fUnAVpC@lt$v1!AhvICWPN(GD9>)s!YF#CtnvXW2dV2Hyf=zZVIM7g|(-EOBan* z$Jg^6Utz6;!}-HN_WQBauicx@uRXTR63uJ7R)-)b?#e~BA4TK36gd9DW7;unWhaZe zG!CWX#FfMrFy zOnZHcsSDJ-%h}nCg>rp7sp#GYoIiIYgO`SEhDU5r9G;b+RJLr|R|q?nFBcQU5pdRNC7&ujS5Ome-s$M<|7r3zr1!p?H+XzHeCMBh9o<;g;N*ZgZ36 zQ^&~ux1LiTa3m+ckTcK}TF+kUuSb8f(%g_q@&I_nJ5YuBGHcQ{j6pZTamD*f^bGW? zJYd=n3UqkSO@*1>_$IaZUPuk_o3`T$7|Be6HDlR5_wdtrds3KMOuyjXV(XvBzS?y- zIF8~({}CuU6cqH6t6utaQ1Ev14@WJU6Cid(U8>V+9~FF(8PN$pYtxBz%ygH>XdJ&-TWrY=?AEaDac@juu#d?Lj^3k-EHU-0RN*5`p4|WBQXl3U z1^mWLj>I4WV@cuwVPF2Vpum48RLDJOC1(>KaG)AcM|on!-w-UXQ12*;h$X_->ZY?v+ReWn%7;3Yz7I_eAEM$i=)F z?Azuz%h3nVA{*GMeZ-TCw(+OxYps43OgD(GZ)YIJ>5WoaW!>}A0<0=dy74vnhA%um z-C&t1 zl>cYth+E5t8ZSsQ7|x~N@J`4~$y@g!$bn?Dpa!Bgb(|l~R^Q8$MUkWbWZh+v6Yl)z zAA(?JN`mjZ$-QCv$`8@bK7!AsC)8-BejHH|s#S)8``9KNy{FfRb716S1<{XWaOG3k z!_k0EsD8ts2rS3;uXQY&%T(hux})A8@=uIF{Ubju-ttn}OL060HcfnIO&@dWk?75^ znQ~@1`lxYSTER!N_6TI6^S{P&Jyz-_dc}uhS|9Q>wa}rbw8dqA|Hye8Uba*E#VyTA+5C$ic@ROq{K@df&dhcWg%$?@!9@GNE;Fyv>EvRB%T1pYV$=Gp{qD8MC z4z#nUl_w0~1sl|n^Ua=Ye61_`L2nQlN;e#f%FAY@y3djBkNvwoIpSNzt>S7V;l*ZFl(0Dl^pYxk*guKuZ3D}>@TsE zIscnL761PesKuZI4y^ktl?xK0inrx!-v7?mPT>&;bL}1%bu}YfY!)=@MqK{^+NBVf_{tr5SeYqFtcJ@DN1J-1(>p#r%m+*(ax8NWat_ z=J%UcOcnU2gSelQp^TX=#{=ceQWg* ze+=!L&qWH9x-X3W0?885h=h%q$1C#QQWHl2;^h$tdH9-GBzkoY@o@_b82 zf6S5qh7R$OBqN)CJI{ca^sDY4S;hmS)R=)bcBM+!slCKwmPH>PoJeY<#gDO+^W)b# zP)RpCfT^3fk^}&`>mqS#Vo!2&*%Yo>7(|q@$Jv6Z7BV(ld%CdWP`LY|- zj5}4sw`%v^E-UC0FINbOyEe+r$TuN(Dr!g-kY}SU;hCkg~xR1(rXuKEw%INStfv0Ah4&GF_BJH#-#cJ)IZ&@X*McJY+ z7Qv+YA+2KapwGZAEHX(33nR;`bla1dyKx^1S0NqmGsxQBV4LLg!D&BC8-+^U>E;ig~W6O0et5hyYq$ zWV|ZG+3O18UTo$eV)V5YfUyr-iBnBV{{irRjo;Qs8z58JJheXh&T14g*9a#Q zYAtjG209f=?=8C@1BMtIfP3=|cf#?OW}odB7EaV;$F0ACTK0AyqU^1YrVlg-sE*&# zuC@t#f3CC6)B!S-h2W9B4`L$ON983w`P|QsfLn-o71i5YB|{pni}1`E*Ia4`ShB=V zIpsvHT6mSDllgBfh@GW*gw%jb;iTIy;DbnBmPnu{lnTTxv+@z>!tAKmIY5;Yrmy5l zpC+$?TwVNbrCUThW??vpo6|+f*_dzjI~D{wt3mg-)}TSu@n4|SnKq-*Fg|ieeD<@; zy;Ts;m+gK_^^H)M%i=z^snwPoTBocsVn?}^n`dyhfdZKiJDyfnRgVSybyitP6!K$5 z2HkrQ!(f^a$s=J;+>VZvtid1azdCcjmd}{|IYroUc$VE2XddEcB}XW`6eBG>CkCL`nqg8edr}v|P7DpOYKz@K;a^NC_94*sndd5s*UU+^>0IWT z2X0Re*m}^iTKmXM@Ew?-iGC@(_wAol@*a*K!HN?yk8RaNl$Wk}BAHG1R%mC3$aMd? ziGU7$?tXlXp6*Spzm4OTA#9|=tLaPvjM z>$CF|==HKpSD^JorP!Leu4HS`o%+Y>LTW+hx6%om4oWKgM3AZpGwfy&j4Haeul^&! z8Tj)SpuYK13oMl6S*5yI|8Jn$@o8!@M3n}%4g)V|MP7L6ErVK*IBH$^^rMQ}c4#w>MF4psou0rmUZqE%6;qK~hS%TV{z z_Frw9R5$lW1>T7o=exg}!(-g&yomk-xWuRV6&7uU!nZPBM=`3tpXk`BN!8LWWSM*UiA~?ATA{ zNk3rtkDnuKzK;q&fm{MlH!nZ9pXiV_1esr!)TN8>oIm&p+0Si;4_H5Ya&o<7e22@T z%ynGUnej4LFm(5P*c6|bM>)~A27DbQqDBem@$qtAOv&Zf+5ttzW#z=X`kE&u>JCi| z%Yk3GU-F+RR_V^M`-{Qo%~LL}C3*C--lpCYpHw$Y(g&B8IX(>_ADOh@`x_@KR|^`X zi%8rvUPVOB*iDUP7N7H-zSfhVe$#(+{DI(!;}QPyuoeSUgvy@hpB}jdY-rGw1iAVj z{oM)?44(nuev5u0yDw#kf9PJ^HBQZ2I;cj8Oz6BEXoCbqx|#7D+C?EF0~hyopGd@a zTUoUf6vZ!Q!|r36VY*XVT!Xnu(D$;ootx14oce*E)4uS)a&KwZy8g_B6=my3o$OkA z-8?9#AkDcfXkRqWMfdS{uz7Lr>MtQ*`MjKgxt0SnAN$aF-3vOSKm8I*CVM7e+g3dT zoK^L}4<;YL8Dh7I!gakCy%o4=@1y|p^NZdervF}dpsOfJUP;khu2$_LVI}t!%k3U+?%xgpd90C6B@up^;5gqM)09}dyn!B5`%X81Leo9vCL%u#l&yiw-6rq7!XQtfx1TROi>Xp8biwLvJK4GQ2D$|g ztz~B`wQyPQF+Yh-XVj>h#G_*4!2P|-^8Wyq0PLK|jx|K?d7-p0X8N_SWd1IY*TdWT z^n!>|yu##BIku@1k$+bCK*8l>0oU{@X_)fyj9*|YHq}9K7(&4R^|xJwB~H^S(1fUF zr}Wl7wQQ$nwS%|*;f|+o(;Ks=o?A_ zEi(jrSTkMA69NHtlV;IEvU`THkc%bEGULTw9L+)0l`f&10sULPI2oZ>ZtnO_WX#y| zfZ|TqR)#?S`;{**<``?X0XD-1vet07aBuGzO+=1NPTlvWw%$5MIeNkQyT>(O+3kqq zC%sYBjJ~Q-y{Kzai2`}HSyLBhS=CBXzDHiwF7W5^I=vgU1_89vhS((N#Zq)01&>_= zw}D|S{jWhaVNF3l+OcZYI(Txj%C>6w-Li@8uT_Eixr$akeqgOI*`#TM^pM8) zeK2C!iHoI&oF|3l%~qC|(20Q%ZOWjT${DDHe*nwJQ3~}p3p)QuTh0rZ+VG87$sEpX zihQ)gQ3iWKh^!$}f|yTkB7Lp`#!A0gI!gRC%_WbaQ)DZ}2!ZiIA-Ix_x^SxH2aOr3 zh4M%iqP=xLmqwt(pw1J*RdrL&Y{sbS0BhzWG^_BN^P3%3MmE2XxG7``el(3g81t>D z-mnw<#Ghf)OYl}i@gn1r{DkY6k#n#`#hb>I4^qpemRY?-n*QT`1V>{QEIVSVo&SFS zhyxa&kzF5;J6n39%0VHZc%Qdr`gO}EB=*58_My(R4_4Xj{W`2(TZFUTngGGsZHw*d zz~u1I{-HU?g&o|3B82lFAY52G%5Xz;r011n=2_0WzS&**q-EC?rxO7zKn)8Z*>6TM zb5KNSZK60$lwdw-AgRp#%WXc?}oS0&opt0GtO<&u7m>jMl|NqAdaXgN3(B(hMv&#y*>VjL*YeDH8+$sa+U zh6j>0nZdy!KP!0t-VzSY=CY67k-TXdKt3-nrzFSg8{yod?87D)sR-MK)0t-eKt;wr z(--z*?QO9T5M=!MJJKp#{*>MG0n8s}tm40qZ&KD<{EiW2ud&P$WaH5V0*h&V-{p7W z`r*V#%`OHy!9}|`|HJgc`xN`~_GmK`E!%Ifyo(%{E(_s=F0Nul+XMi66JIvIxeSU0 zXji6e$~H0ouY@PoF4cKveaP$ED4f@f60Kq_o5{JOrOtL!AK6u+`~EOW08ZdCDvU}0 zIdgJqZnR@x?oIL^0*^M=R3Cn(4KfSeVV@Vz%<$zH! zYbK-yQ6F^t9SyY7mj4eRZaYji!rVqI%6~IKH9$J1O7(A+(mLu6y1w@e`lGEXonn`k z$2Snk@HU({`tXW++n~C<(OS( zYE3Wes{DfRrK|icFwFWFU(dj54Yj%FIU)DXg3xgEqEfCe@{lVs(m*b?;_gMVR$hPMaAbk+JnL8S4heVz3ypN=vV=3T&|_`ISNBYQ2Qt5oCY z65rW%razIcW)7&VRMz_XK3>osxSQj!OG?H{%ql2Wqu0b1LyPGssU!m$aQ=ydku2sm z&-*Yv5mZX8MmM^7RIuwy;fH#bV&RU zz^3sKoqt3g%cXhBXRQ#(p;1q=H!c-hiZNW-#eop+>b5EZryV@UsZV1!e7n@7ejnR~ z@4s!TVga$~XTQff!RI;6Y*eXFep1IkzK7!UEKkcB28!EVk!Wrg{ceyuxC+VAPZ_u> z+==5{rn(M&({dI~MU~iXg7^GoqgYMu7F%&waK@TS!F3WZ%OfUfJ_la+2&ej$MVP8_ zg>bX-RC7Ci-pA^l&;DzjtT28@@7GI_ALMB%N+ozhS$awxDpAWU2b&w0iQe8RoMv#X zQhM2qyum({6FNkz3`!Nu)e3#{Jsibh>8@sp>ZS|UsZmVrea-2U4Q6BFxqIq17M^T1 zvY01xBiV^ONkNy#XTAXtBr^udc#L`h>^3VGN_zqr8Z%X9>ujaPlO5moWIdm7XGh(j z$8=tQY@drW>kuhN#ej*W-<^9!Cm#AGUzoqj8B{**;J$`{g6lOHzCvy$>b%x~2qK$D zGlFNPhL{xm*qF_m6hA2IDoJ4;JKiBPJf-VxQhmMf zojAm?0n;LSl#im`2ZDqb_yJNZ7t2G;Qjral?E zT?YR3E9f+>JEdXCAE9YM3j8L7}YTZmw1M1GYN8xG< zriC|mS$UjP1%=8xKC`E6Z$phdP|ZPzG)3_subc6gjX;uz5D~lErK-0RH zu?!0uokau0Z|H;CHyWyklD`o)dIi+2vNYzo`Oxwy&a`9;QJj#v#_Tfo09i3b#K35Q z=jF%Wv#f?25fy>ma5zL!^Vi9CNdBH!O~#+D&eaNF8)WLU@e|>C?34IPf*Cm+O}sW_ zm9a;8rE#|u{P~CfW%jTTE9x~6!j!MOrDukvh}^+Qc=}e}iffrK<#V;XOJgy~v+BEi zC6gMSkv*rubt*ugUNkie12?BvEGs}rLP|25o(T$tZiyHW1`P^uD>0_*VF}4n7KN*> zDh>nPvU{BeR?C-VD5}xSI^!fva1ipKase>XT}5- zL}P9vhw*vBw-WUogLAdaAkLcWn}5Q}%gO?$bl*c9_ilcoaag#o5HJ8lpS#Np>gf#n z0UYSHaKP`jicDk_j#N)lx`e>)O2gj7xW85Iu}FY@t|LkOUUlZ}7M!92=Xc(Zb2$;) zH4avzAA^qXx%Xf)-^%WxtVI!`5Y3BUbkS<|T__G^F7NjWn7nv%#TBb_tRT3tetcf0 zw}Fm&k|RJ?P7L@$=Yg?1E^FFSkR;8OiR961K-&h-CV9DJi|io&%(X~$${zgwXtCWt zL+y%!BMGUCW=|s9dHcpmVfxhwS4pT4{Cs3xoI^fF&Rxo5ejhclr>*a^ioHL^r7=zd z=t$cwM=LMTs5)~NhNEu2m9~kYe?ZAJ4Q^d0d|2h3pBD~s( zCpF1rH&(wOpEep*@!3ql@MT{=S|KkonD#Boy7kOV-JS$NrAp>c{X>xjq9u(#3112m zLX@kXCCkrijr1q;n_DQ-j@5gO8>?FQ>3&$L+|2$e6cmmZ>(vu>EwG3e$%%Sd(4d%C?}2A)a3_3cmb^1O+Yef|E&*f8T{?q9hNvMj3O2;b&# z^r%olYT1J@Xct}=KSzVg`^fYY%_^LE@>e=?p0ifdyOtM_NkfL<@u{ZF$0Dt7cPglX ziW|j%-KJ_@v6AZf(0)~1hPes1d7BE%FLS?ZMHm_6pKvx?L<|Nw2u6edh`=^mS?(3o zmmihcS%9;f!ryWV)Y*mSl0re6e${I?#rfAW*96TpgJR6X;+BuoxTFA6D|nmEk^YF4 z?YpLF>vijBiEXPniDP=7{{VUL_PUp(2DaEp3b%B&f!8n-dhfhfSqJmiZk?P*p`#;& zYL$_;ZC+;tj1XV)P4{2bD`^ErT*=9WA$qVI(`OPd403Z+kgy?SK0I0{)HB~1RZOR+ z1>n0rQq;CUQK5pSmmFEBCShKOhLi{Sh$UKO@Z(I5Q)Bv+Y*w`vZ5;aU9}9b?uVd0z z6&Ty5x0T}gj;FqYzOJjXLn%H#@~+jm8bf>@HLImxVO?oOlz+Is#C{F8v`ity&hCYA z4?thFtQP;Wc5lw0Qul?AOCYP4S5J~P#Il9ks>!kE+VQG`lH>fcd2%2(N6d5$VRQFE zegBY*&GQVR_)yZ`w^&i&2$S6$rF~U7crQ3a2M_7cSpVFXbsQ+-y?z$eT`-AwkV1q; zj(pWt&Dr%u)1sW3zkES~+oPV)TdE-`Q~4h*&l%g=j$;`Ggyc9*mLNIinQpj1=l3Ti z07)iXF%B$j8|mLH)e3D8403V$W_hnodq-r^E0xwOtDIq*@qtcz^H@lR0aNaxsk(*M z)AsfYQ&Pe*m}uslBNl7j_M-8XAVlQFZujv~V{7+~tt z`EHv8?SFGmnMN@MnJ+|gnMf*<+ZJwmI9KdlZn)_UrwI5&A8#X$?CA&A0hl z>YC@AsG{12?FLN5sC?M(Y#@>8v7bPl(Ng#zH|kb%qE2`sI8^TQtJX@r zna%bwgm3B~!LJ6dk*e3@`JjPEbRO_;DBTQ&>JYtrHet~U2Ug-X zD5^QFM6vTt_U!<8FGf#Rr+UeOH5PBaRe(Qb@A%X@wz?k;!L*^j;EbiV=2>Lyez9@s(`^tDkoBpc9{Wr=iPS!}Vn2{FYQDXx7M4!cx1Pwz}+rEhCr}cbN z0&Z`+MB5Xvp6^PqNlh64PF&Lvp!dZ|Pv+WrgAUkH0W%dyw9-|xbEVwy>GvBYG%vr; zgV!bqisTtf2|br8tx390o4xcji~vZN0SE9pmZfDq1i9An8At4`&a^($UF*G;;G(L~ z$2xXOr+evoKAN**o2Gn+#w8DKiARy=v1R8#;i+1KLM-U>HneI&v54)v^Fy4{uKAg% zX>>J8zu-!wEt;r#(U{wpUz?fV97N-n6$uGG2(s%u&M#5nXiH$2+KOLHSD+)TN8@%D zehWF;B)TxR>|P8+Ui)pq{Z}N%j)i&BXS~uK@8s9h%-4&XEcTrG(!d?-aVlQ#@p3HX zyi9UOmjv7t5eQ_S>h!$gbC=7~DdK{5B5L*Fhekt&qrgA?KfyDxtnL-WC;c{X9`Yiw zVOT(Tdk)Hc)%P6p^((&DdG(bA6sO(y3DBSBaAyPY!mI!-Pu=nMthvszd8Mab z5gp11@K~$$d&Xf*5R8ywt}i|~b*%Z5EZS1+;42V1IjvB8&CwsnbX9)m$e2eqC1I05 zOE}2_!Zl31pQBZ^uWr->*gJy)m}yd`7-X?8gZo@&TlLo7WiXSp*5MT*$+AQ*4MH!U zOBUAu4^w9u)m9g+YiMyOw75fY2u^WHffg-L+zG|q-QA^l@fJ!c5`w!FcPQ=_+zA>W z(3|g^d&juHGm^P%@2s`vJKrZ2bRcyMHMeA1J zXB0@p=(on2lng%yRg`O5d-Y(DLXk?!8JmmZHt}N&H14%_ZJ#yGJs9e;Q6`6Lt5^Bk z+xfVyX}Zd{K>u<_WuUkze=~s)la=_2&lYcon;czq^3Y(yV7X79B(Z?|18ir1m*VcW zBC~C((C@2zLLkRj<3@8&r(U%Lo;mIH(squ?dq&pO`Vy(_2*)z9&K{aj(QPLT%w4tY zQlu-vy-eKSE7*xGH|KgAWew*oG%+8NJt7j9P-s?XlN)Y7b>+2n`SQ+M^pfV61 zRMUoz`9yn#wIunT5bYK+GUkrl>NY0@FsOO{xDXxZv5rYQ|6MUdat=a$rZ(}J>1)h- zR!AkU5_%umc%F}ws_=Ybdd37>>F_0pst0tvu@`z!DI(=>wD>Za^?KkAc$O46N%Hvi zf^G{O9U&p^SXQYzN_%4;9~T3tqVY-j!5C;$L^Al;AbUN@?)I zlN+*}U%HC*lOvc9MHsjHeB5h;6u9HnN_Ra%_k_{B#=yu_%TKE(?1J)??hfbs+&q}`&ie&TCqL0h(k?waj#72O-4G3H?8}r zNxE^pb-3g^2D#MD>P-e})TYe4(ilzj4GL=X6-_=fSF#ZjH-+sI{7y5@iedLQ+6*f$ zta_%ubLUSPYbY9IoKb&lEkwn&NV5oXD& z5<9KbWsL;yw>x;j29Xo}gfANg_tljpmXOPFB}#`v!-UPufz@8 zrb_f3qb`Cjg=GLZ%j{Lf_tgo}j8Yj6Ln%mCux0r3R%N+nLdH)=IX8_N*_C&Jek%sm z`P&%6AH-*Gw;Pb}w+aLpkCBdVHLz~E6`)USn<~Lno*^j$kt_&MC@Eei-+Z)c&(YHx zD0Y>KU!G7mTB}s7@%NM{^sC_gDt2-@t38M%j;;yUmixdI10Vc!2a(U|!+ZQt(~RzW zSH_Z({*vPJ{&z+}I!@7@xJ)|1Z8^U0Z<9{fydFP)C;`W$S`YD?|CqoK{UwU0sEYmU zu@^sPrG8|SM8~-q_WsF+h%seJrkuaNx%=Mr$u{~FD{j1@8z}mcN>1E=4_xDRbkas9 zLLZ7aiti>s(rq0Ff1tqL6MnH&spRi)2`vA-=AL%I@$+*npQqbs9?e>327ODSA2m^4 z5N$fjtRAJz(RU$VkH<5s@86tM|p5%FA zvc775Kkx1rQYY&X`Xr_-N~tf~Z`(w6h7HP*Xpk9S=9hKjHELO4Sm z-4jz{)%BvxMCg=qHhZF8I=u*S`J<&;@Uc2YP85T22HGO~yC7Vp=gOwnmNB z^t$S1&bU#z@X|OOO%*tV-{c_L65Z{Z$0Qi#a)DCM1vX-`YY+&d#En@zm1(qExY zsXcrq{lMGMU zglU2n7qgy*gXI39^r{#)bYUDe)tU#Vn>+Xo^aD$H`o>@Iy!O#J!_=6>xt~IvZM}wY zz$wZ~qOyX2ft0@1j?^)ogeD9H(ae z4bORURMIe-UMrl`C{Y!#D_BFr5sx!2o=(Tp_}g#6GNqiL{_*Aiv)d+Psa#%Dv!%}@ z?Iq}=tWZTWEArSDv?akF;f9IFzw)Bk?w==o?~nh=u08C-8l4Pal*PPh7wiZ$E|~kk zpUj6zyQy2Ki;V?wx#RV1)le_@`ffqfQ$YNp`Q|r$OWe^LPKhW4JFF2z8=;%8Ayf(| zRLXID7liIpnoM*|@9{j{{tsnp5@h^5vMnapsD`+Wzo)z>=m%ZCvj^T1{_lq-K_gw4 zSo3hY>vWf!-&#q8Z@ zdJXrS>8ZtWx6R0VIk~L?yKoD(WenGGuIf}#VftaoNYfb&arVh?NA+!rPE}*mPRq3@ z3TN&n-R{i87#miM1HyjL_Ac4x)=ZnX+V{eQCwbw?1~=HOUUD83ZwlMi>JCA_FPPFI zPLlVNsVue=zqj>2M=Eaf|KTFzK$c&WS9Il0?Kz<}VE)FZrR=bx+4Q!n#Ks5BJ z>i2oZM{3l#tcvOuU-eBOT?vK?Z|Gg`_5~!5f9~3P5I!w5%{PQ|bCtOZ`Rj%QIpVE7 zzc0*#3H(Ee#71;6{#$sgeX!S^Wp;lZn-Se&c3zY_w{7c*I zZ3h0hm9OSKvzAUbFuxd2a7|+fAz;mWepfwC?8-FeEnxkoARAF<(6^J8F*4IjxKhbK z9?M9_Bd$!eB6&1zV6|Yg(u|Mmx%PRZQWuqKM9~D8Ycuh=aFBjB<(m?~>2lR=XgsY@ zRm|!~|5lOv0XPlQu(kfFjzL8XnM=(jc%3I3-1!hEhfVB-|DZs_=`#b=+U zGri1d^z|Fnlt^+>^d!leoZoj}3vQ{*IfaRQxrjb&$xRO@BD|#L$`edqHJc)7Vq9l$ zkhq&@>w%5*{iF_gl492vAXIVjw=qOXx!*j-Ej4r(4?bscP369t9%afR5>6Kfz^2>1 z2WLLyR9hR>$KaeECL7%5dZwNTe3G)bxq-Zf!LB^pf$Mr%j59gcm)?*22X>7j*pAcc zdk!cC5!7jL9c7f-lt0U^GlH!Z)fUZ+Gb#F-j)}1Y(9=m|;`D2-|F;RT)hLy!KLT$G zojBe#D*KYEes@DB8hL@|sUFwj{9Aw2u4WWl;Dz*;lzw$=mLZ{H0fj#nc{%!&YYc3$ zSA1wSr*Zk48x*EqNpsR+V~JN0Y$!O=qd_A5SSm!)cy6hhnjNadcAjA!jeOaZsBj#i!nVu=YKMPp`*#VD1*0-ZSZk zVz|d+h&%$d0vhkj`q6l*$gT6h@!U@Ac{u`nw{=0uUxIn={Y3oum7g zx%LB2_qQ_%_rYC_Vf5d$T1$it<&&om8idT*^Hg!k2?~wR&zs08E4p}SwxvvvN> z(L!s=t$Wa10XOI5Q*6nl;xRDz(N+a1k*I_PKcT+snh1Ut1u+xa8o3ql%6yOJ=>mzt+PmA9*?E(KX3m&0(d-W-1 zM7Dx^AMkYqYX}mqs~GVQh3YxunuQ;b7T;6Nd~Br1ZsSd=_oo}}q0sKM=fX)|j+?i) zBv|;_nifD8S2a^6{xne7vr@Nt9PUq&E}V2$`^ic8ON!se%aqG~rtLJ4N%C^V0QvsM z>EVaTe<*{1-EXFGYiO=`s84nV`;cx55`B|j6u`W$vGpj&gkoAHPtH!zPM|E(7)Kv^ zHS+;!sf1L$!*^djYQx*BfHnWO%bSUwy!>^9S_*dMg$@kJtMg)YXef{s0Re zSn~%hSk0cjJ7&wDBCfPjDYm&YvnL&e?$tP0teD$*|HjP50J1>%X1lgeJHyIZHBIL5 z@rQq)lW)y+fbI)E28xsV9v@Rk{T4LQ92;$sTy2&0!IojGQ%MV zoc6-y+$2}6W;M2dD22GNe<=Jxb-MS-NOS4P7O>;sA{XBcGU*;ynjF78{hQ9TezMBq z9}2W$xGi)!tY7+!{r|@E|A)f+Fb>+2)bHdz#)CC2Iw~gYSLPwfL311kvjy4vcuN~( z3U#ayq3uh-(ZzeNvYGc6uKfRk@;l z{bpVd49q0^B8T-#%Sgt3y*y>T)`%*N^%qUxIbvdH&*vw2!fzB|Iv+E58(6r{iq!O} z3-^Qd*N~p@^>`R!!~anFb<;Ns(&_vlH$dda|1+-0A_!Y(aNZ3BZs<3OfG-ER1;ezp zJK2wRo1T&usjpwL0IxYNFIA|ag%5>k!T-}vvfCuUJ@T2E18zCli{k`F&02W=H-w+M z$ORivp2B8AkN=BqA}41{ZjP+y2`;A)Mo#{Jlkq&&hg`2mGYI4PQdEP1xu7VuXO5y| zXjN5sJg%zW)&G3YxG_0br1$r$U>3R;u`~0tdpn-@dx%?d8_FZf<(_(;Y=2Uk6fT>;F*3)cBBbfjfTD2re#d z+2HTXi930F$j}9aBn{2q^&(5kKA9#X86I&Ec2BPZG4C?@HIy9SEF$+gfT!ah%C}%! zv*5gI1u3Q@uPcc|dk5tZzAWT|E8poN@$^>XTwJbp_Wo}Hj#4>GryZ-&Z;~7#7m?kT_ub=~O)@mIDy0>dN`rbQuTw5~Q+rRm zunS~hJcfzs_5c0Cm#320=is61Nw|T*`boU9X$xA}y7V9d2iay^kO5Qc(lYmUD(^oO z~n(1~hq3r;G%(O_d({UDK_+R&%z0SCgGX zv}bOMDel7*BzPSBx?OhjAIfWZrr|#nJi9kW1`32ShZ*8ELzbo#3Wr1e0j$IYpN_T{ zAoKNSRXh$=Kib3OViz(zL?jl5txU)*Z}@nK!qQ5G!1n7r=L@iV+BJNpK2FxT3TC#_ zV8MqV%vml^cZo9s(tfw9V$lb``TQKJW2+GPuU#B@G#fEh1Y5{@Pgvj$B&tEwtgbyj z!ew>H>%)oP%SF)>F`}6Qz?n?|5ZS^=F^MeAiJYVok8%=kC$}Kj`DK;-XR(ECBbA$k z%&xhLo*UX>hXs#EI_)&ebH3z;U#IA8fdsr4y1gra_%CHgWH7dbkf|E-7&@qn@1N_c z1wD8gM&d7?CDGK)wv+T+7KNy*Qgq zbJ;vfM5*skHa*owvr2@mJ7KP4uq5OYK<2GZ-3$0ZgHuD&WxvfsrjOz%=-;e#A*9s% zu<(voOtTJ@JfJ#w{;D9>9%c`9EP}4|Jyc5l;NCHMmP0-dxBl=77*Rj)_10erMJVo* z>WH#!$)n_4??^7R8`GZJ!m3jo>5D{gG6Dt0XX2|0*m?j3NjB0NYpy|eDNq=Z+o*ki zhp7HTDE@A`xP{*npsX|IDb^C`?c{Fcuaf#6alCNq=+FQ}ll+@NVVV%O_6(Ztl7|}c za2`NypR)Due+8L$Qhb~Fr{XHPn(1=kSU#LSzw>6bm%tqSzfaNK`BD`z7&Y7vjfE1o8Z3m# zQVA^1;R`CxKfn<$#C;Hl?nuzb#a?<}0?i+G`o}*vL&wwY<;@LTL{g=kL)yR%-L-3C z+o?a&GQEF^QdE~o1|;pJ7KhR#Lh34RCtp+*^SKtP8@acqsT54BCqPsGA~0dPh(yZC zrw9n}+I?XGY1LCscj9Q<8ZIGwEqF-~*R^9aAN>Ck+2c%Bl3t2Haa89gY685 z93(7*U@!inmfJ{O+5>qFaGg!3)(YTF2ofYjfM-hmS}4l`_H+E7a+syQuzFjbf2h+^}iyq1RfUg;5Y z67x?XM#f*6Y*w7n@TP~!vR=INbs)-7CVCuvntvAU?luaN1W<6Pj*tlcJ-+q_vTDl5 z9ebEwrWAlzzXE2t(v`+TFbIxKz7(0fTS$4k#xT;$fw^YtcOj-*5go@a1Q~71(>l!ECjq{P1q|dZfpgWt(OujQ64zG8IYaM-x$ny~Sk@sR~6&!fw zY%w_?sw8yxrmUw-yw$u!#pUkJ6|vpD#Lo7MRJL2-85WMh5OBIG_2;QXK||4uy2wF5 z+oNm%xF{51`xY45A20{{^gJb@Qbh1z&4W}32%731w2NfTzI6tUAtqz95Nv9@_jLdM zK5lE`H}O|HfxC~B{d)W(4D*rhH(r~l6MP1KnWKisu}+`=kY9##7E6VARKE1JW*&VX zM=OYu7@h%~=yC~f1*W)GNiC~Bf%e2$(ua3n(q-pmu;zC**sC{batgyvL2PCWgE)pi zM6Sza$TwZx0fc9Z+Fyn>`}I>X_~;r1x_5V2--f?D)q=QZ3Z=FJKD>>S%iooK*WO~{ z7Sv#}JObobo}#>e>Kn>*$E?b{;*;gKcB?kR`gmyZfMhW^&Ii&na)gesF z)WBaYR9ysPf8qidMxsGJqg!j#b|kg>Sk2|~pF&&J*81zqj-Fgih+u^4Y@YWYTXY#unO?A7O#LqO+oy{T1=UAoF@zeQ;0pXar zzgzE9VOCzC9r-kiI62=SkjJ3CrW@c3j=#EnSyeDMN8a;0!dyJu=^x6Cj#8b`7=+0b53%BBK3}iR7$=uHCH$fHB_>6g zWuIJ*G|$+3^}~MMFHUt8V&@vZD4SQB5_qFSZ=ytZqsec;{=9F528K?bCxKu~fZm!D zTjmE1Gv?w%x00;y*W-a5oBM!F12PLd|S7#YFYtLB_hRl<>*yl%NE9DkB=@2 z$;=t*pMNM83&Sv;i^JfUhjr3tLC{6rZZZjTy6)}ehLh;biEFYfiM!j1EX86aOX45K z1anwb@c_Eil+^_GTItdY$p~}7NTH%mzizC{A{;4GX}gC{R8M`}4|H#`*`BP58=I`` ze(?J$le;F7i6YAT{LSKIBG?_=)RYPq)zG4R7FDYTU0L(!>>4v&#V>2vBr!iFJI&3o zUrQkKrHNshH9j)yzbbtb9Dkv!TWatgw5h(*NyXG%WZ(`j4o~jADATdG*;zeiQQT() zPS>P*{XL5CHTl_P>93bE6;9|T>p=m|MsI15`tlJUiPxTN`26U=Pso&#x}?j*OTP#C zdFb!-s)A=o8ns#~-##1cDbCg-Y%9RBzQmF-FGyuh&EJT$)4fVNGmNF8~M$&3`|Pu+!^r>lx+zuAoK((>XhL z=5nhEISNOk9M;#^xvz>#QWL3)w{=gc&yNq1t!IndB-v>c$oV?N`2MAJ<0c1FhQ5Kw zgo`NSdZ8V}M#!}FX2;{#_dBfm#b35uQ2sJJx+K3V6$0{DS@X1U6-K6Mhc6QXvp5zO zvTvzgCTthXrRN+N{WJ>G$vI`*aG}D=Um;-D37OA0Fnvnab(rJT2buiG=yhyziHc z86@s*n+`=i9u(vh71zm&_}GuEO@R54e70tMnld=!#M`o2)C}fvQSIe!3z}PmwfX-pMZu@~I~PU0P=rvq zILihOj)Qs{3v{wAg=%QZzx`1-oqiwkRRo3G9i-a*$Oz{b*j0fME`|=AUk5MTWn3<0 zQCW)MNzr!ltI{sx*t(4vyenK%voCnYje;$fH`soGmYnQB*QVz{ZN|ybel9ix1oNHn zz1lD987 zndTFkYW1yo)5X)U46r0J>`--M%axQ6jJ@Z>Dc)~JpH5PBUvxL+Fp2$=m=-+zdQ~4cew51h+2&i|GmZ;}ydQWQ6dqg$jP^+M5?3P|^6U_rKOe65 z|2pQhl^7@Y{u7}QYQh30;=>r&oEB9Bd`eVjGGTlge6e9&GvCZZbK@E3$$lvqoaoij z5s*CjO8Hr8<$$B;)e_0w$E55>lFr;xnxNZLPr2Gg;c}Kv6r*no<0xz^iT8(tC7vc^ z7s4lBple<;y+O2@x993z?$od2b9Yy}m1*@*M9?nnYrA5Jc`wq-H~BdDIyP#jN4TlRIu^~snJ`9&iGsZ=8MRIloC2>alWKGQF^MmbR-JGN&Z0ua2Hsyd!6utx-;S17`CatqjIHRCnQT?~EZ-1YGr8dTrq3Yb zfNl~MZhXW(S8A%`&6FRl4r{c#ge0p$2eur*3yHN)W<1NWp228^puw$<2ReDC5bPEN z+2tl5om=FT7=_R2Xk$XZJ>onT-^M=`3L2aJL;27Mm){)qh{9qLUeFoGd&P*TAQZaO zQ2K4ADjwG>x=r21IaYn|#I3|#p0?*G$EUuM-Ppj8*i0E!Z}S4uO2(MHVcyl!Pmdv_tC$%t+gk&;_RgYM5sS0mha zcsgw{dz~j%K~P|lAn-I&38J4HF&c7C<>SH4j(X)vTlxm+M9d65PLJrFY`-VYB1+2Ij;0pqwd0SJ@I0- zGd^kCx!M9f$3=lm88=eW#SQe6e&?MO#y;Gm0}mF+bG$?1bA%r}>@U%FmQE#WX+qr8gyGUk@O4}B;wG!{zF5Mm9JEpw?RScCt{m0J% zRP`4a9nR2m?Dz3oiTq7U1vQ$4F`$bXrMRXHC8hkS2R9Ap>}}&ZHrX*3ZjMm81Dw$4NC7I@ci6 z8{>zLfdtvH@0!m|z_7;i9`9fx&z~0`6C0vZYKFea=eP&OO0&&QZnal>A#lz9BV5!`&oh3O+o#pyEF@?Lqzh; z%x(W{B$@lvKuPIvaJ2ZOKYr-=Z;yIE*j(e0)nlek?Mh^`OUMsdfd5(V23mDw5LEh8 zapy=Ui?o(xm9aUt$W8LS3%=y>=>|d2U=UrQ>P!QO2!4jeFk2jD!x+lgI2yJ=W}SJ zw^1H+4_RX+UOn<4&_f*p^ljZMkIB4FnH0qY$NH(*m~gyg3V9{Mk2E_d5K z{a9O0rSQPi7Q6yr0$XO5Y46=#qxm2Qcx)2vFfQ3k^hM_ooCldA*bMGZMhosCdv`~v zmXzM!bEJ#@%T!E8L_P*}%o<;mJ}ncH+er_{HG;}hgv zxN+f)d$g#NtsGyN*fb#)&m!el(}aa-T9N)E%o2|x^wvm=w7pI0@Bu*Xy_(X2BU-BM zL^xb}08r2+my{rN%9my=Psr)DpK(pz4Jnd9-S+PyF$@gX%PyUD_{-6jS6_pVdWWpB zJKj^+wsO$b-I6p=d*u^0Xb_#|$Wh0BFyvz?g@g}#$2JFLnun(onoJA325O!j+y)2B z)}d$5{?IsF=J5y|Eb;L@>+*LI8bH~+V`**vFN1*{Qn6gw>rT?AZnWHaVD&MTU%Vnj z8j&c?k76JbI03ki`nj{xxj)wwD6^GG<;&zoBEvT;P_vwuO+M5%RuTfB}2S&!_mg2eh+4nH4P0F5qs zF>Q4El))==-z4`{Cz#j)M_L!LU7SWIz+pQ z1OKMsl$*0eSE^KRuEFa+@((4nU$-S_VulGwe0}F0@;2RiDP-KoY0}lC;X)|5EX8{qFWTen{PT_Qs;y=MS92Vr+e&sZFG@l|EtRtu8X6qb!DKDIor#--R0OO>uGe1*PdboWB5Zv2C(`pD#0iekH6bi7lOZ9~N@k>yUox}PI9Wz0 zi$~iDYuWu^pZn{U`o9-oEHYZRr#SKYvInkf1SwKbY! z9DPbXy`C0tkq0YC|Nc0(e|Tu#tKL_GV4ViVqKu56`sS=V%Rk@T2u{qG1~jk_S~$XA zrTU7Hm))8>|CX4&x-Yu=@|f%MFpC?-l$2XD?#$I7@eqF(TNon6P63mv`-k!cF!@A% zbGvX-OnZ)X^rLJ4gl@W%6~;$*k$w!iee=JX8KB8TlMS zjL-3UvKiz0+@POffSlG&)Qg0w^Nde#B)*Gp4eygfi+avJZtu(I!*QEKoz&H#(bvZC z2{mf({hSHDg#3oGpOS;ib8k%r7MNbfji}F_YO`9*@2eL{6Z@^cvagRsH*_fNYCuXR z`xH`ILl>5tRrtsr^{un3v3f#y1&kLbRtW~W6gIUT1m&N$FMyQpiuJ@9iX!+_#v3j6Dg&AxKf*kVAW!_rM2HNsVB-+9%<~NN zaDLe3d5!?=RmMHBMl(8~Nd;%$7ZP^CKNpT(0{=}t#%pD&`@lZ!Sy}Ja{PjYVvxIyM zMUB%;s<;fQ822>DPpsD*C2UUlo57V}eN_qY_iS9>5_S#g0NgHWwYC9M`9y>%uj0Mu zvfY-OZWC*kC2HHgWtoRb3K-tO#6NXfyq!U*;#Udqh1qX=Ra`7UpBgfDxLk{ZtyhMb zYq#CnvqxWk6+H*Z!ECBEh((!v*O~e8u2^H4_j7`|g(t5O z*I;3N>o++Pg0;z;3IewRBmR;2xAa6cvagYMU<#}sGe1VjV|@Bjlg_wsY!EX_OrN|( z6YaNK;WNapv@D>cXjZU#Uy?2?ap9k zSb0?^_T4hUVTchUW8AEzoSo}$sw}eLUyx;8GAk6mmupkQ)t5FXl9EedKpv=D-rZ zOe9MZ-M8hVxRla1pvt~$QOHI)O_J)c|Cmkh82*x;^dTqgXszl+yQR;0K??-ZvUhaz zRz5{y(IVQ-oSoH(&JN;CJUt|gh^}FLmuh5Gglp;8Mj)ZpW#>vx*r-RH#3!R`Aq*bJ zW)zzKz0r$T^OD4EF}}IvikMffXGh1(Dc?X&6veyZq9uyqx15T~2dIHJOMTqixA^@= z9wCX-DC|@7L24(bm>VCLc+~+MbE4qFgOT%X}}-`iq@03L(so`sVPu($Mz6CTB4R zYaM&Tqe$;I@#pWg0{@+bl}?=dXJSTgG0$N{O2t+$b(|q;GqA@Mfy8d1B^KhmrERsG zLT2-6JI!N}NhdXg{M<$D(+%jNP{=3pS4L&3NUfnt?dWT)J?5LkxV^}B-nGC?y~fcd z<`6sug#FKj?+tBPYWzbax+*oA6QY;8al51i zci;5%$-8_X`f2gPo{Y6kjG<~Y9*_T>Q%z=UdM3Uo@lgGv?>_CPGO8c{P;}QJ(zo$0 z!%P;v-Dt4To6?CMA;&mfz;iWT@S|QVe2bV{WbC?u7#l>MeOYO>bKsCHwQT+9(XuzV?Mr zmRM#uR7lfmg?Oh-s=A4p)iYYXA09Z$Vi#L(ob>y8NYOe!J4Qa{VGdtiLLYu5SO3Ebj}E zZwdV@!l?%>Cd6}|98H^?&SxS|EO8^}-P9t;su}dTeLN{3oV937ck)z_WuJWL9YRC~ zgqOW&T)cynNwF6XZec8L+Z*?O6}z^(x7Gw6Y#)Of;ND7nqn3hgX$V3IhaR=%pO562 zjB!nlK^HX1Baa0cAHK3fl3oR_-H7Nf%1UT*!x!o7zTw~W_lx&z{oN*+S~}ueMI>H! zT@~$Q<-1jO2;*6Gsdl10PrC=ahnZw;l3lEIgpjH|XJ%2YJW=u?q%@vRg8jPqeQpL@ zBy2#DQ zJY}q?<&0VsbO^V~uU+8P34Z%%s=4DNHS=MC>!GR1xIyCTwh+E6RVN%HMHp<>i?amk z0oIe6Sp>h$RMi|MpH%Ary@>@$v;}$IC7%LZFL>l>nQNw}&Jo__CM%=vd}|EL4yi&Z zFI{54Es%FR`9X`S;gClb=E!>QAna7d?I^*F&WRw~7a@aB*6wLW5{$XMoI^IjR+bqh z6i%sgqnTud9XwHLTi1S!r7`^X=I%;e#sf2|;>IT}B{q*0KHw*BDdCI!9hgs+BHM}l z%4sbwHQhQ_V8^5L>d$rVi$B!Dx1LCH>OYi9gES>%Xx7nk16j+>Y{uqPC{yI~gxnHE zh~{%$O(nrf!hZiVSvT%;sO#|QMi7??p$OX5 zFeG`Td6c?t)yZBa!I&fC_mq=L8C%-`H^-W1T9-#4t#EWT1kh`0H*4oos_`q+wqw_O zx9FZ$o;14zZhtE=+t8M)Kzn6SppI872j+Mx@~lVKOGv;PCVyDSdLA?Y-j)u;8)h0+ z<(s>^5fpE!2TKlgh9pB-Pqr%&>=*mJ5GuQ?cYcBSKeJwUQ`bNbb=|{8O6e6&PyFWo zp+rlGKcz{LAssFUBRrL3xU0UrZW~Ke5_;K&LQKBbL9w?KDN!oe%}L^`)2CGqof8$g z9GOb$+ETNeh3erk!4|32=50AVigBwv{lO*$oz??DI#10CgeA;zQ9MJC{k!bqPu5Y; zlzsd~S7N*F=6&Jwn>G8-Q_xnM=eegsmy35X5!>Plt6D?}``|#sM^)scc>aK^$-$9g z|7L!8=1r;ZZpJrd;;C$DoIj4kqE(t(+{upu8$Lo5&#XrREB2iM-P5ARWFu(4)~Mue z0_$C}Bk(j-Zi-_4FXdXlz$)FcIl57a&1(mFK?Za*iNi2uu`-qU2$pjxYXCsmn9eKv z04Y67PLg2XEW``=0C(Q*BHB1G$a~AZ-#okLP1uYf>{v-6aP{^xFGVoXvTcD7ETOO_ ze?NZ1uq)7$=_~h7CnjOjSK16H%5A|9bxn}9?Gon$o=0Ig)Q7n$QwEC{`j&Wfbgl0($Vr$7 zr27Lv+dQ;BxdUWs7!_4^t)02fu7E}Pa%0KSK`Lqh%?6uWh!Z`|6DN!X@+c1H=~)22 zY{=KxR;&qu?#<~>`=TZ8rB0uS&s_~ulWdm+WW&tmpRzgIDI}1(l^d$~L=WZHal#p^ zU#`gL=eL3 zHbGJIVkJ%S2v)LKrj8s@dSBDV++i&v5?UW7i>v;i&;VL^B2w%^3Kuqpbs?20bkUV3 zxcwa6j?P4D~WKqLc7viB&_#UN6eRJJPPLR3?yOGbXi8<46;2CQ($Np{0 z`^DYlF|$dO34Aybd6(&gscswklKHK41+*gY6_`N3UuVfyqM>2VzFd~J2Pv5POF;7( zK_>reA!5|^&bX(#hD4BJ^M#$Q_Z?|Gb3{oWVZDD0d%`KBU>|(pHWF^UO@O!I6a4W4 zkJ6;1j9iCm%iby(ufR=Q2);0e79!19U|;mqr0}Q~hCI!^zi&ZkbesqR_Lo#Rn$e@0 zn(hJz#~+&?*UxfJt6FJ|?J`GTH|)@khNrBKbKCP#70;{Fva*Xijag6N;?*D-Wny2G z+x>D}EQ>oM`fI~Y%UF?N!TZNM-fjBALy#YW2M1`bMxB=SoKhBi5bt+Cx|!UQC=Ok3 z_-#vB(4K?mB{V9gbdbD9YStFyEj?(`aPCs|C34g<&kV+snFnZF5_a|iSmYj0(VF;2 zKoXKbGI8y@iiU=sf%?ZA_?={MAk_I!ptt(PG7|3A<5|R)X?n*ml43quE>Hf}Yn)aF z(bnES;HuYEGnU#k5QCO9_{@BlN2qo|`Jjf(&ejzpMD33!F%VjDJ%KVCCo>{Vjlo}e zP5->x^@fNmVxklptiUB7#Sgk(A$%l2w%b@YqiN`Ujt(__kqLjIf>U)RtR?wC=y|U5 z`kY;YJ%gl2GPO>ru4Ut7Y|H9xaZ18)&l$r)yM1;^6O-|@Zcd8kSBquIKTMsHsZPZ&b{i#7T{ znicH>6ic1ODxEEk)srelZ&peG;dw3e;{wGXKxs~j0qzf#IiM`LK4+AxUTOPKMf}cj zPkBgO4ry*~?0>5~g;1$n_X#(IXOoOSA;u;9z<4j+TZXq!$zT4(NB7+H*_@ zb4sF%ii&>doE67uIIq?zatZn!eTm!38~);|#rC)TXtpgaoogu9mdEeTIhgV4>lM^&Q-nvTi_}KC(3yBCsa?`JxkmAyeJ0sT|3jr+~tJLzU zmhF={m4cjRp6)_{UUJSUu?!c!@VecAGa7OBAe(Ct2Eqo;2#fAp=IMoPn!kNBN|Lp5 z0%mG#h{GySDgUurw8i@ZqQS8r3$&pN|A&Gepi&tCoIx-PmNoiBZro~vD}}D!RsRg9 z7bAUZvFuG?GUvQGiM%_!IPNas1(Eq^_Nzcv^?I4)NZydB)xY==D`$p}`f|Rell6JL z5MS_|{D>9+(@of!c52A}1jW{2`b?tD8@WnIi~@ks2E8tD=cL{@JSvz}c)mS@2%eAH zuVfj61J$NmM8ZVf`sCg{bxA$%ws6^l91)YmFx`RkrAeKOv(i$VE_{uF=e>D7a8+lI0v=afW-ROQH z3Qy|F@A`*A4T+cV{b5@`4$G|BdLq1fl@VH=;EX>;B8ynDPLf^n}acyb700g z-L{tcl|6LN``tucw_&HHT~&QdRz@b@{a%7)&HfGCdixIAUbULp=M`_}lLfLp-RzPy zs@hS8eUS+@LHW(QezkZLs@mE1Hy5`Qf}m4?ZvjA!b7H=#+(7Y%N%kVX7UJhs!cF1k zLN}Ha$C`ch@xu|^XaNXJKE5@FCvj?wzH6m8D>;BbKmJkcN4HTWR)0~!PNh{!ng zI5b&sNfI&mOewop=k_C3R0P^t{x{ef6yE9`Q{N#s3Dxp$BGqlYl6Jn0*STVf`)Kk0 zCDGMuOzNxt0CgB+cQu@7kTNU$#90nZlMqX3EcYEdzNiUR8NZa8$XK{%BbruxF2Hu{ zRA#VuIij)gp(K5JjBAz2b&y{`fDi)(CDr~G@0hcc^H;_v2-S^)*spuDbVsJSg~9Tf z{j&R8!^E7y3Gx%+#`3+Yv$GtpVEp7~)CY9f2J%0+7ZO9uQZv_Xyp4sB^hEy}Chnbz z#zXGq2+s&TQrJ!cfHn(zHc)*8KEi}zw|Ge8p;!&SwYf(W@uJo~Ig^XT%8DtX}a z_gaf8W_AbHXd|$wo#-y$LL}6Oq5SZ%C}JCN>$uXqs;H~k`gg`Rk8y8#PP3Pn{pw=e z^L11|zyXvc(#Li?E@Ix%znAaVM2W8MfLlwXQS{n(*jY&~J=Tb6!e}H+%`jK4ZGr2< z1j5Xdh*Hj;bVCE9Uf+Aunh(T@;@i|+MjK08Y@YqQ#6HKp(I5_oqTeesK3QXH?Ji03 z^5r5T5RXVsEAva$-ofND9KsooTa7^d}F;ev?@OWv@CIpEfD0p@z1q1+@m=27!+}nzY|IFswdfAV@w5P zL^v28fTce z-s6@*oN1bKb@&yvLP=FHltk!wBj5#mBm#}n!+Yg!m~tG>J{~`$Hbf zTi@m?NfO32`b;n_((p=sz}@`z?cf8C%i^0!|4$6fFGW*HTm1QGrpm#Xzbq+&kGa~618rj4O^h|s@&k%W7Uh(gv zi}j&kZ4*EVi5~U~!$jjG3N2hv{HEF*7J&@C(dWMs7JJi#krjE#TgB7)7jx8e!n1{V zf_&S%H=raUSO^v7q++DhHT_f4fRat`yhH+JAsx>wkdduDs$9YyS$BChJ#)#~-ciJ9}lAY(JdN#~lR+wJGvcK#KB2Ks+v5izo$Qvz4N28twmg3_+ z(jfIN>ZHAzUc}*AtO>`#{8e$Sl*F;xW;)cGuMOTxITL+;2;T_+eg0UT<$V0@lCeaB z*nJz?IVVfyr#1C)oO?;!%z+nzl<~X9aExBY&|*9(_Tf~sa&?Uucjps8c+L(FE&W{M z-SX@t@b822UCij(K|pIg2iZu7&yu06-_*WaX&&t>k@YpKxjOT1;ZTozefEn#y!v@4 zBIWx1@_(tD>8EA=HV#8+;VkEl8tTp7*u_T;0V{h#YyAvapPCR=0Eni`=FqO4l?eZ;$X$gyCxGSKstU=?nk6#2F*zdxzgK5$UVY-c)!0d^|NJmVP<=xg6_*N%nm^g ztdJ894a%<6#pH+#Nu=l|iO6PVv}H3_)EAHHhQC+Ym3i8Adv-}^$_;`Pr{uXEe=%&p zH=(>pC(#u{vCbKjpGewz$V8VoDHx*?`nTRwt^sR#Nd~l=g3y{n?sMFehBg@Zp|0_1 z7EtLih}UXUqLBoGjEVy{oZE`Du(v<4oUczIq1GP*g^Q8>SJW7?BLP*?Y(_OsY8uDq z=Bs(}Ztm9&&C1<&G`tb44O8T3la^h(P#a!tS8$)C}lrn_r3TQs|IIzsuV z&f3wb;3zecgCQkCAG$`nWvSH@=Qt<};mVZytLc?$U=@qdJaT|Sp7G?|(?|Bf)bQ-E zN#Y;iONS4??ZwmT*a=b~_{<+!3VziptAvU*WGb|nbS@J;L_XNj`nMLpviyCV{H!=% zZog|e-g>(dL7j~h?-|ZHhJX(Myo_FfGn*R{!9xkaLZcSN0+4}MjV#Oe zBYU|xnl)5^*oUWOgh6h4n4@KFAIkGurJ($_aM=p)`K-IGLnKjpyH{3LfBwVdr4f`> zD@*1Wrem9i9_Xe}xN z0Xf!biHid5)$Ok)r(s=aZ$r zO4BQ3hYxaSSDf_|i#)%V!Q};qnXhMeyt7LDi!Cp96sbj#Pfr|8Dy2MKt<)BHN=%Mw zgPE*oFqbtnJqoZlEM z@(Fr1Di97O;0rakTCpr^?s;L@gy0`KAGkN^=xw7T6^fmJ$`Vc_{hBsAjR~g9gY4BG z((|GtB)&Jm~XlUJEvY!1*NOOxrFrzWk>FyQrwVP$BG$_GK!%e02)Mu&X34K;Is23 zICjzf#QR{&lV?n!GV}?SMjm5tN`)=Wrooa^ox|5%ai7!D)`nmV1111cxRo=R&F&9L#p1( z-n#86AErJlMi&c+P7+mmuRrr3_dCMTq)Z=jA1$5XY1U&*`+hAnQatpuO-i6t7EVj? z=xk!0W?koU@}W6?g`yX%{oAjVow@u8m(`h7GA@{Vj~_=|zmD$6=8Lw5z3mu>)+7Ky za)hb~fP2exIx4bNc`;-UOIz_j7%1ajZ@iC-S)sW8Hl>M3@leEj_96U=2X`Dc$q{{D zE%H(%%mi0jneWKAG5D-o7}-0mW&WgY!qKwA3Wckry!CLQbHw0?xZoWhLn-aevMC?b zJ6?+encFk(muh+{kx}(;UTB5!DfTQ4r5rrk`*X~9naP}UVIue_)i!~0JnPTv`beF| zA+`=BQMsDDh-!&f6FnO1X*H^A>zmw@`QySC&d6Bz~$nhO>Tp9grMTZkoE;jbfriG z7rKT5xRg@zdyRApK1%`)IX7yKJ0hgPqh%j$e=+METaXLq;;K3<(-x{32y#4_p1)rJ zQ+sI}xTj2w(GE6fmJMcWK^H(tZpxKwsfQ~!$5D{Qz&%4=oi((89z5aPa}dooIClx) zp{zmrsi{@b@}$-`_)o4?Cd|2|=3!w7>eJ}3zAWpHl`};x!KrP&j&Iy-E@PaKleMsN z$Ml3@4PraGoN#x$WK9$Rhu;$5IqV=r2k4T<%4OWt`9zq_Su1J|fxHXfJ694;>oCyl zx3&!Zm@qJZKQPlfedx&ys=ST!_H!e60#Y6tzU#z0aE__1K$FYKA886Ed%@@wxUl^3 ztbK_LQpD@kacR0o6LkZI=czrb4*RJaR@3~U?r^?~giRS=`u7uh{;jlVn7Z*GBr@jx zKoz^s@PVMa1o&QP8kNGYcl&pw zv##2f?rq+smex@uduo-G_7J_ZeXa}U>WZUI^4Th`Y?~w9<>O)&&TBSHH{pU;Q5VDj zL#@MgY(Y%>Co27Co-{R$OX5TUrnEHbEsUbNxLiBPt!UHQ3wQ*=D~%mi7+9^*3QONA z8LgOpV%u9{3C+T8va5FfI|>VovH6*y(m>Car^*`TtKZ~n$pS+;s~3nvd7U@F2ot^h z+OGrZw)yp7R=*I!kmpR}=)+KJ+gl$u0I=LcLr}<1bm#SL_U*mVxBdh`%QbaV?Wk~if6XXV~f_V&Uooh7(sK3 z@Nyj5-8f^zB2rdZ(C^pLPNa*pfco(5*@kxsRPEte0?VmPt|B!FBQn>^3${K%7c|>B zaTo3i3&D>_UZJI4k@Gx};?2eZJLCIQ;=-^+b|tYRluHpjSd+o!LOAm?a7#DI+1F$F zXAUlo@2uo24zM>)*$x*%o@9owX46FQveQ5r=f=L9ni+e8;tum71A*B+eINzrEMk>PvAO?1+24S9Kj&lW+JXG@VEG4>38cVgCF?nhL#Gy8l>oAx*ExN*PI zu?%AL3;QD^bD;G8&`{aq_-^>pt?(rquP=`!Y`ZCV$fXHOSN~y>6*qZ=$QFPRgu7wF zYZ!6iF^)XDi@Me`4@{AoR^P=hDQzfEJ>;#5+%K0G7-7Tr0nOu-K4~a64=}V)uVmT( z))g}JCQaD1fA5jm<6p}YbK{R|eRbFra{IoY!*L|HA@<^jir>oyB9)#9h@ zcG=!vV76S9!Ew8equga@W9zYnhaf1Xdm)w(|2>}+&aA5@7?gkLFaO&a$3PmTE55qK z-&7rVS+o)YfD_yrI3d4qa9wx)cvQ84Iv-D^yUeu)uMHNaWGs7Ji!y{F6*)+A-Twmw zw%b0DKQql)TuKMKerTLU!bGJDebj`)AzCW(_$s>iV`TlB!jyYlsUTgZzRj|^Y%4js zUJRh(;W_8cdd*BvGrv8NGLdHe7eXiaHZoCpqpuwO%0C#kjTaT6qI{PGN7a~vM}vXe zXDqw={;};cf~MO{-~k4#5LbZ_yHDIb=2E;Wj)QIiS?*P;oeR|Ys-cGBA^KnOB}FuF z(yu+UwKN9iO3||l7V^J~zvI5EO6w&>zT}DH^0;Ane>iQp%16}5Oc|kk38$5I#anqL z5L9a?a^Gwt!?A8MV5zZ{mXe)0Z5%!)5~SK}ti}Bv`^z%Rr1S43=BfTh9Fn#2CM3X* zFm#SxdYR^j7>1X40wQHtvF)=kT$kj&CN3l>zWXrwdaaq1HXTLOY zw;6RR#i#FM9!ggT^*)Pa6Qo$(wH0bjR-4vC@XTlKO}xf_Nm#Rv-}jIRh@+4FO^eMy zAY_>1_uUd^vft<5VBeeR{}$X7WxZ{WG1N)0f^c<0q-nf5wSZv6hkos4x)JxuBj4aM zv|6}pOLxisyh_PHJClYaMOqU0m$ncev0xqNi#i|OKE+7m?_O*K8Kq@hV!_$esi6lFWu6~f?ov> z{uuOj)7%9vS6tK_c51Q8@jL-7U#1L6Jg&}rdDhrws|2yk?g-y!Me%B{E{_`U_+6yF zeZ8CZ)7$FI4CAO+$I|p5-bOi`4eS!sKO8G=J$G4qZf%%{Qua@>pL8ZuMvT>O$migy&w8+eNH5$imjBVM|-MS4k&V0~X_aPeM%-49s5V$@FN>$rPO^3k^|_ zPhfZGYmv`brTCV7y6pa5N|cf1pGo9D)#ZMy&mI?P5yIF9WaynFx~p2+{gm=B=8p^o zn!h})v}HlpR1af_iJ(0|mYN@-j{)q>X6{>SC`fu~KdB1Y>_SgfUMN^al3^kNH)%=)!@Yb~I=s`LmwxV+NdV*RgL0Tcdb}(h?&u#3he4PU?;T?J2qtkHg=a@o4M^8*6;t}u;^T2hEXbe$KY+u z88Cfmg_tjVH?%KMY9(O=ch?`cQ{-73R^c~NHkryVF1#~Sag{(O@rA0sLIlc7@}oL? zv=)ORRU&lQN;L%4srU(+SeMqmx|R1lzuLjI%IO6=S)kD_{61f?tLm8B^0vUGpzMO4+<4HogWgFkFADI&yn37d9JOBhTsW1i~gRtUEvT zZ7!#CeerNTX5(veS>kF>D6r6=BF0^%#{1#xJl_G@s6NHpoFMA$x~TMHIvmoxc=mXr znm}n`Ez2GwPH19`T*+tF_P`%Rxi*aR$2NuM{E{lQ3gQpBS(yq0<@;(8w0F#rPML>_ z{En(TCtMsfb+dQzTlom{$HEGZ_|#>c=2V;NuGN&c^wUg9zYdwFhmsaqp7NE=?f`>2 z4jV2X2P)gwoqH0w(|-^kc32I48W?1^5p4q?q;WlcNO%}Fo2`i_ku90TQxP9{qAjem(CDTCvyar0cm0?f9n?b_$6(GSvuDHj-Tz`(SYcsijJWX7RIv?MLL zb~@D*ovNuw+eb>9KX!%rvyTg#DZzKb$TO;vD7BwkOg#9U93S5p#4y+gL~;uqmDEK} zuYDjg8|OYs_#G5YkB0V&kh$e6B_z=26`Mzibj9?QJZIuh$cv-lC_$>gF@8pKi*M2H zeOs<^a9#x+3zeV#v&Mqi!n1ez>iBXrh+VfFBjk%o)FbNkh9v)%MQe_H+Gdek{EOZW zTv{fo2;E=Ml!1S)P*ry|cW#joHI@(uv?NQW;5V+r+4RcMF^hu0ATOnt<7a99+^WF> zbyMcp;^%ph$RmSYgDPG}TW$OKKUbH^{P@~`P{ZR?f@1&d)gE^Dvj-N<$`*yv`l9IT zl{c0+peybED&0LbNav`q&l1YE`{J**KQ-M7zYW(Y6CEhE<90Ri-SkE7Aj=*45Ru0X zc&P`UF1r#yiZBg(J4Mh=b(rp@*SB#Gl~!|i{Mz}FUaTL&xEN#gjj(7RB35J9b*inu zPBld2e1)oiR94He*&3lEAjkYwz1FdS{h(tzn13sD1eWE(3>`s2;}Oq%xKT?Kk-NC$ z{DSAcCNkZsFEH3dXc6REh_6u2E#7$YeR!W3#Dey>vhxlZIY-LzmTP-=(HfZY?XC^H z=S%7ymmgIemPOVzX6>mfd0#0zvzNrTjbHBALjK~YzG2qvNQ~^w)(vphzf0eIKfLa- zsTHpQ+QYxU;aWDREhcL8)KX}zqOiePC=D|3D!^84Hej*F4hF|x-*(d`I#&}HEf#HC^=2#x~J}PUYt&p*XouB zo;8L?_5=`6T}Ufsm!X6dp#9a8`u6i7`IFfBfW)4nwL@A|t-N*^BOV$$)y%5?=AIZ~ zzU@kJ8T{T!A7{EKUry86epN}CILRKn-YZQ>Qr7-h0Y+nMZ51p=!*PLB{%t*Sk8I&{dl`iHI0Y_=du&V8nvYr zo+1N9QpLbXamjFnjt1h2RFJ*9BjHQ-A9;+zR-eJr{){w>>{Dn%bWJSI7ZP6ltrl;Y z%Bs_EngtV_5`khG%8utcYI?}gx}`?&IO)m~q7bX^TZtkmrc zW2?C#-N92r0`>A=%So}m%G+w}KfK(FLb8bq`iXs~h3LD72u_vq>vj0PUXnqYB8Hwd z`2YR`l*IgxFlgGHj-JQo!?d8g!B>q->JG5bee|ujybM&^Z1$|TEczc{%%b`)+0sxS ze`wG~>F-(Y(8SYlROeV^owIEZkrQjrMFWC4l`Vdte(?jxs!4bq7Wra_#2+f`!vP!Z z!gkd(d@{Nbg8l1CA}u5>3%(Urj_FF?h}lxHTU=NSJ-1HotScM(i5Y+UM2EO+G~k2r zk4N>$Y4&6nhI+UJ^aSJQBEI)#Z!&rP?Y%(FF(M8VtAEROta4oAOQG(ye%=xp<_0f=V<;WY+vbH7Mqcq(U@Rt7PH`W zLj#xtC-8xtEhT8c&}&8b30F~#B;_D?ci1j0a1{{zD@#2Of4E%{GNW#)xw zeyc6V(>xYe|3oW9WV#L$8j`_bY))f8c3`tsZ8h(ew7xvL2~{jG^>gCI=d!e17GG|A zYv1k(T`%-&&M1o8Ypo|fnhmZ`crbl3{zd`IwW)HFO0d3trGJp>tbD1Zxawp^iD0Ch zu~ordG1`7u7}dCkp*!5HCkiX$5rMLN21W`@w185Q?y`FO4Y5(ZuMC%XLASIG%9v}i zU_oh0@WKER3Jr8-5J=CB6y^9rQJj2S231^Rfr<1uTeS1pzU-iAcB8C|CI z<~!HCfpmBY3b5RTrkN5ir+$o*&eS7lLb?k49sLexfW8HSKXj}~5|L?ffpe?sVehPv zfijaGdnNtG3j|$xNWJ~58$qh2N{6wll!C);H%g@;fWMLDQex_=xy^e+(U5UGQLp}X zD(Gd-0Og(&T02`g*v`$pesEUZ?DI5hhONsD^!%o8|5eM>(q_LX#{qO7Q`h{sAYUYC zE^Zc+A1|$ToH3X`BjJVh%|t-HbTvnWY0QBQT5npHh(}E)HSWMR38ub{gCwxS_+#l7)hxwnF| z)OrT(^(#Kq6*a`p%NK(5cU~Z8`;ES3R;F!{AUq@_K62@0()0?Tx2;t|S?9iUN8@cA z3H^=xr47b$t3qoqk*{Aix?bMbbBq*8GRF+6QNfmvW_$oueH!Rley}+??`mvbn9poN z{^0LvCC#r#(mEl`z0vZd8l8s{f#9^v^p%6uk_x7(`NKSBtuLd0c!$edlM~rlpt$ZE zCzR*dXEq-Mcv{|B=f#^PL?8n&B{FK<&6rGqpGbb0*E&NQtfpIAvP^Ej=h-^Dl^L5z zou9iss;(%1fl{EI@G>fsdrjH(YCDu2sax*xm0#j34o9piWV?OD@iQqq{*Ar3cSae@ zQAZ->PAXgaMKq$38>&3+z>zg~>ORczD+Z_yLjSX)>7uGz!H7e$Us_o@dax}wGh3IE zC}iO-J*pwbwM>)M044E+sO2Uwn#U|7@!r zbGtJNOr2z1DTTU{|G*N^HxwQHRQe*Ea{f6zp#W)vrkWU(_-sQ31LDQqZs+#4vA2&l zBH|KjkSuJq79VNI0ZTDt$q0A$aX))L2`*WCT)-5M#`(MZDpD6fP!v)Jp^T}wwP;qt#fHXA2d8UCPdM7qHo$nVi5`+f+Ba2XVwZy*F z>J74x)xHpKfkM!!(uywi4@ShFM9{LJ$a<0Om8UteW7U|{zMcst9#?8b6)iVMDY2~# zOdC14k`I}jWg!!@Oy1id`w1EUY-VT0?g>^IC6w;MyTYyb?GADiT!QjzozYq>Bt062WwL$=`97R42g9k`qjw*=fy!j*old+$%UEMO_g`gADJaF7T`YL*1c4Gz-I!k+3 zcnmWfeH2&BF71>4FpL878$a?y{3TqByC9EWXAx^eMZe1teTB&dQzmpgWz;J(=ksn? zyDyx2U|~M58dH27O_hb;2zif=x0S|bx^fdFsXYFM4)Z~Tz~g>w{`N<&fH}@wOlZUS z{Reyj;R_8{%bQm$gPyx!s0kH%9|06UvaNW)WON?RwW^WO{ANlDd^N6bP zh5A@recG1uWVOnV%iu8W(b=}EF?Agc#QsG^=I9OOyOKtP)Z>60q4ImYS1eyV;Iw3H zlP$P}&S^1`%Lijsnk>X;VA~L$!rQ#7EL#s}Tvklw8n?xMJt@b?6N;02Hi*?{)MPJ2 zqv-&@tU?aI_Yu~fG^U!y#4(ziuT7LM)?FYgeqBbvBdBZj{> z$^nKrT!q(CQ9@Z?#kB2`2V+h!xsKOxUeX$B9N*rDyCT(JWt%NwQ1Pk7V~BQF2aH{< zC{Y7b@bwM-!j6_29Rqu$R0h<6ZqJZ5#)43N{sl0C0@md}z#E(^efB)|uC+-?lf%2- zd968%Rbs~i)7Un37LgPe+aPRDcxA;8WHrOO8J|CTL)U`ar*ISmbeEQbV^qH8YSz-; z1Z)S9OakSO1h9v_{pq^`lH6QJhPY!cSC7anCyTQB6W%Opk}&rm0co+e9S{j_t3`0r z#f49}ET_%2v8!5+4$`(v4%SHj%xEbR%TWUxv=)?Y@F`d1SeF>$3N;&>n`ZGKO?=3W zLi{uN&Gg^c;|U_V&qS*+DyN3MQ-@Ko!yYQOJH?Mh6Y~H8{D><+Hxz$js9`O`YI}W8|-u zlGQ;x)3cx3H%X*$;}#lF<$9-xbV78o`kl(QyJq0HF!tg_LMg^F?g8D zq>)OoA}e^!e|2n1e7MWM}KX3IKPy?^P2TRE?a}Gce4_`@SjQL+)ezk2$eP& zzsxE0mDXEuy)d|1*SyGwU`OPG$t&#EdGh`G7*Yk7*74P`} zn|O6 zIWp1+-=fk^b1CF6ZME6{E2Lepu1;tI%^Nmq{Aqyw!C|v~bgc*zilW=_293Bra_ifd ztHG0eBK(8@0PV`NdP%a9xGR%Jza!}YI|N*ZT_~;-U<|V%xTz z5LlZqOdcq<)!zS!A7p#6m}75m%TM{$5i%$LALT@4e?fg@nIw+`Z{Jj zC=qnclRS}0XHPYJwcAf^Xm=X+sm1d8F@z&V!C0X1fj-M|LPslbvT&C5ge_}U0N0Kn zi_>Z-rZ0M7HJABU@8R4etUn4OH7Ft`Yb!JxX5=7xvK@CQFL=!u^`~?hLfIf!HLcJ` z^3LN_aiXIyK4nmD*r|2-zU(aF@`B~Az95a7663ci-j+T+58&k2h zDz)Wjx){lnpP|_=7v#PM!I9KT*O#f9mT-QbEWi;|kBm=UZ|Yw+5bWRlE991@Qp_QB zH)RTy+mj@g;oATjul1X+KX?)6ONDpULT;G{7(sGq2OO~iuGJmw^eWFR0cs(H-^*d& zZJu8i4SxxUR(C#=c0V)xxEai5dHpeWxuSic+H_53*~>h~@b$Uk0R(dSP%MG^$#vGk zv{6^UyEZc9p94>xEZjW9T3gD3F*BCVWSeDyTq+1MN_{Dmf@DC=>c)zlFQ0IQIKI{8 zg@4vi72>VRY^-qPZ7AdS*4L+0Z|pQOV;I~1B{X64!JE%^jkur;JCuaOz^cE+IzgVB zb>*(|gY2sM+k||Yb==?s<}h19jg$-lCcdiQO_&V08sFcXP8ZFd<*dpOP}obhW_CJAFQ56J%(`r({%Eli9SLTK<0Q9N_Na~A0&_(aNp#gH>D8C$k|Rus z$X)nxrOzZk7{P!zj&Pg1`K|?;cBwT>pCeEB45s79~~AGrYR8RczCL9bJdfMl>{bb_oxb| zvBjW!_9BXZeQ1>V_=n5|V|Ii(mWE9j+u?X7r?>p$0!lMApZX4a7UF5btom6Q)2w3P zZWu60jjEUA=TPYJJMl8d88SMAN~001XyQ-2!Z$-%UjWa2>lxx2bE)ize|p~>%d{am zmXXmrX5AxCw_9$PQ<36|;p{sm+e#tF!M|q0Tgy*$Z+{}@xD#I4$MVn%enrw%Kg&!# z?=HBb$Jie%+UR$eL1gP5sww@=KyDda7W_23l&@q)Mf~P**6rYz%sYUck_{cGT->IT zj(5aWhoi8UnLDwCQXc>#H=VDv12)kRJv#Vjt3Yen;T$dl3bG1f3_j)ElqJ=reV1#_ za)O;RnR7I-#jR9bv5u&Kd~3mi@3}@cmQj`x&G+408k%PW;>(+}8>z;z46fh4qkMlS z6jtft-!DEXk|6?npE*X$99<4Z9CxcmUgyetNfQ|h?Be$IcwtcG)#OcPL0`u$HMVy2 zrtd*bquLfuakxa}^|ygHWMRr%+<%wgv`%YA13y%u9=lY(JFbN`Opl=fS)OkgMefw} zwJ%nnz|cJh6y|sQSK#8tSbb^L&mf~H1=HcEbGuvQN01rsK2O6+{y)HvteGAar4a(C zWv@Nf(Y&=c*wq9JzL#EMz7*MbcA|UaaD=thJd-NJS!gA!KLf2AFAg>Pf8z3>uf!|c zbV8ax+ex82cZy8RX6(hiBE)Q`s@Uw0gD;W{{obC&`hJlA2N*2Vfbnfn3n=ae=HBH1 zA0ISs_`~PEb#{~u_v}>f(Hf9j`~ysoJ-_{QbwoBJY~vraMj>&nznIeje=Cnn7s|8L z45OiSEm2pL&rh-kjLs-3$Qu{MHr>O>d|6(>@fJs2sGd zHMYe_VM+f0F|!FiWS2x}QRTRg?Qqzh^}R_^8^YKcD4_CrIdo&o%lL;8ou%l2)rCq}in=lqkMIgc9v69gBb z2s4yf;uPVY$td=jO4E`r%PDpol$5_jW(g#v;V2^~`J(M`X%_*BcKvaV+2N!cnuo%W-_DE-_YK%En%w{`2y}d|SRf z`uH$wo1fui^G$b**bVNOd_UvnwE=Mb0eGS|^5=}M{`Go41Lju%hU@W0O3!H*EA zQT>M3x90xGOOEUIxq{!C)tz{&Unm?<#Yd%-&?|r_NH|s6s z8vQp}-(ZSccmzD3!6yd)wHN(1gK#ViFROk}nsZ~8bajK%_5TgLMZV)Es(XUu?7)Cq zZ*7bAAph&-nY2x%b`{pv+M%auxWW6ocsaMX0x{r2e!+!rmD4yOd?XRnzx^mIE)8+~ zFL1~fVTK~@4}%LE_YrpMW<1>A;2}G>w}`4DS$_C20tECASN;KJWc^`tz7m$jONSdj z#Muz4~<;k1YIS|PK#_DGCWS5^fy8hcAB0Q!38`V$U4duwd9YgD( zP9FFdH&}*s!7-qlpm>(U6Maq9?!QC-_(!HN1u|MZU!t#{;4}={Rbh)Xgj=cpKPzPa zSy|WsMQO3&d3hqfX2?Tc#eZW4x3)B* z7YjkNLj}tyf_?SF!WK?K`nAeJb)bUwNa|RX;CDV*7!8)E@6_5x-myCIM8Ar+3qr!L z8GDJF%O``;pu2l8Cyn$g}>y5Ng|D z|8^x=(!s)u!E&dJdT9kZ^r;8nT~MIMp*~Ij69Ff@;G}SF(r?NWRR%`pQ5FLEHOUX^ zTODGZv1S<{GpD0}!0~JjrJOt4 zXWLQ!2^I@P=EWcMCp>!Q+0>T+Nw{1Twxh1Ro0Pe&(yM->%+S~R zF=cM~2_g|u2W^*((<)R>sZU9O=s?$w(q!#T_;b@66Oj)MHaE{dEFb=fu#=nMD4CT0 zpv&);4KAjT02BTgLW=dgrEZv<6%d_p-nzU>4Zpq0A&vbkIM)y##TMgJif>EiLOb|Y z)BErB2M<#VDzmI@fLbAR$jwWQY5Nmeqtp97+ac2QYR>z_#^sM|{RzQsl4IZ4t}dj` zihf(A_arX$t-O68XtSJeL`C@d-7MM{Off5{Cdh7!MAi}O;N`4AR4Lc|)MHj?;QG$_ z{tW>EX>-YM*>ME9|=qE2{w{WqkH06*u9l1*?dEVzj*G z))i6Q>~=0}K3io{B}7%DQ}>EJiymt-{>oTzo)F0)K2!QdpR*`*6jJ|p0Q|LTP_#Sy zgm=HWJH{KG} zCG@^k?Sb^3zRz~Ze>GB#{|z_d9;q8VaZuPGO&0*-xCU>>5`aLaa11U=RA? zPdINvskZKFVz|*7(0y(wG@#n}<}v@I`IE>+Ne4lUw>b&)x(_2}1;oSEK3`DrBc7kf zR6q1{Iu&p%hj1kah(i>`Ko~g2kL$u==kWW-{oqr;esmV={Yk`kJK^&WU|8&M+L`y^;ni}NST6T2Mfd&%^WWTyMxs!tGLsnN zQcp!Kvnj>`73+=Uq3(EkLS~+62Bhs8Ya2x?Ou#MLRJf??n}&T2EZasFJo_B;(kS|$ zC>b^eqP3f4LeRaUkFTMb-->AUAb!xx`AH_rn00l`@vUCd%Y4%7kqF25_EGa`z1j~jby|K4DZImY-vBdSbZ8>0DPK7bY-4J?V{R)v3X-8+OX(Wkzh;V)5 z7~_>stV%$=u{7uHHMg9MeSFNmRX63|+v?6%Vd*qQsh>M>Ozc^?rjg%OSNgsWZ}2%k zQzBu^9&ER_ntpM6;*S;BiJjA^iqDJXG+s9P8cF%dnl^_W3#IGIXTeQVt0x6t+JPNBgXTVyHy=B?+Y4j zFLPzWBcwR-!wxb+{=M~#vz~8uC$F>AI#WMMb)$~M73AKpFo-HFD3lz1GcC9LQ&H~E zwG&A)md+=5qo%_q$eI^6q5X-uuj;Kiog2S(eQ|c8D<8rJL zKWJvdHSg_?52;CIRLS#_;4u!ce)?1aEzqlhN*2s6%}{D47>_|uU>2z@W1qsE+wQ#` zH(#n#ir&oj#mO;;p%!aKEoRxxe&Qty@I;2#OhcvdUUT`p{uk~mVJRy9_RC*J$L@l< zSP|az1@AzopWAUdMEdVxIfFN>BNQXMo3Y*f7#Qf*%hhRwe`tzW6^*(lO%voj?N(@! ziM=$6L$%VzK83mxr}7?8IL<9Nw?=>Om{HvG35gPD^69G~y%|eX4@_;_=2aP9>^aY& zOTGR{iB3*xtjBf?axZW|Zk?Q7u|0Zotaq#!v!a(0>-4D1Xxm%cq*?|)&>qi5rydh- zu{*t4@zX2^Eu@ZeFFlWAYV(^e<7$w4CJZmaPB15O3*E%sBKqic&g5zz977D)rH|G6 z(6BCvJQGQENy_;>mO{4*czzvg+EQ~{w~G%sEUT`LMl!eG6`WUNCtZbEWkWV zDtE-&G;vhkGmiV&MEUK+ldSVn?jF6BFkft=M^G0Ls5AI-c{h&vzGwILdPpS1YqkBo zU8jRJ843Kjs+6*`PP`MS{0<7CmUvie_i`yo&W? zB_JS3ih$(MB{0$@-J!H}*T4kl_WOI@C(e7G^T&D5ALqT!AN%4G@0-2vz1LcMt&I-$w_nA<^Lzp#mi8Z8Y-zk2;$?AG+Hu>kx6{qzkLDVkNS(C`YDyyd z{N8fqwsqRhZjQqPgWWgji4PwK?ZzgJ8kf}G8Z0EGk5NoU2|%g62P|h0Uxyyd_w*u~!ds-XY>jeu ztm?dRmBQAel2lB4y_ohSwA_w_cITWY=EsYYzMuZ;4l))wX+~Ku43m?De+Pg2MkJ@! zjU@7?PZ%+zhH5C9J}5b=9;k^=dNOGD95fKV?1ci#)+dRA?&@yDO& z&bGCDAEz{kRo+~%ch83#cUKM0CEV;iD~Pz&;C`tqu1z2Gy)JuOgMUF=@=Y}_E{B$e zhS%qn1#M5+9JkLtwC(X2f2l~u*Yr~&f=F{k)G>XH`)08NAXg@{hr&9aoBC+#M3fz=``(A*<{}NXT4ny6;`zNa^0PZD7DqQ zV!CpFDQpR}e^h1*Vw5PjHvK2(qtg$WaA=ZhV^sMXo$!~WuX9?}LEfTcx6KrVT!ZHB zeo(xTMdVP=`AoL0)X?^vq4G_L*OrD4%qh=y#Gv<&6t4s;&48J9lewzLaE^6Oh|Qk{ z6S|9v;i>1*MfHT&lFhq0Jxlg$czf>1&u4$wpMC`++FH)Ih9jiG!W|gF4Yf1j_Lf`WBZ)Jv`{0#>2u?h(0D!!i7 z=Pc7F>v{awI;u?WcK&&uQ^)p|eS(&B?N^Hq;|kL{bV)6eCt=33e!lyjTTf9{H;w;F z;JmZa_6d*_qAmg>VP7VFV8QJ?w5A@C{G6}8wP;HFLHDtcNi3MLcT?rJSHug>SButW z+ZUs*qa8fcDH?88p6hLMFEw1ub zRX`p1mnFKno{QfJCi-y!GNEib**DdY{fMu4T0%DpbdVC+tn`z=7t}dibRKCxAu?d` z2yWvMDZQQyG(u8Ib0c{=tRTJuTDh%nhxpPiRISwGeo>eBL8i#&SIdtckEB)%U`?OK z_^7-b%BT>_Eg?Ck_mECNeh%hM7E7Qc~l7Ts5bb7I4r0ZbEO!Odn5 z7jb(XzSeZRmypmB(U%GG)YWChe;NoMhd3{bFlwpu8*Z<8%lKKAC4H&0mdr-8?3``7 zl+Y);dRio8Pi4!ITTo#mn?1=%tGFgItkxyEO>bVl{bt!feevP;Y(G%p@+(l`+oO!j zZBlW=%cLclnKYd$fcH~hIeD22R(j&`=ZKa*qxlP0kKZEd5xMUvFi(a|EH3ojW;#9H zIlH#|wc=8~aW!O5>=uwFL~kiK!P*)~LtYo~gYK7ZKn0bOlpXP0>|{$62T;-ifys5GP#7XxNTyPu@f# zr_GEQW0^?0_FcB@&2r=Vy$qry*ZH1cc^}>9Xna~ZSLyvcPzl89L-o$?`aPF-H_Nn2 zLy9}1`jpyHevtxi`&FkCL^z-cPi{WZBW}2sf1Fo0CoG}IamP~H;`Cdtldxx_`R8ei zRY^MYihDoLd7YC-syMA&j-nIQf7Eo;k(WRMJ6tuV^0dH=P1lyy2MaZvNW3_bkv12QDd7^`;l{lPV9tCZ$+%qB>dF+`mJlm9zGK`d14uYMxW~)F{HE=d;rQ3~u&1uX`Mf$}J)7FStUFWh`R9WL0^hO~=TqPK zqN=NgQ_0W5m47CR)6dmhRLt_bJvSgUOIlZ;!Wk)F2t*lu9TVo z%lGeQ%53&~Ua~_JI#|V3_B)+TUG#Xpi;7yF!yNq+NQR`9p_P|!DMu&sV4dXc`0C?U zwn~|zt!l;|v&7BGmak~~GHZnOee)h=CzZwz!evv8#k@g1d9I}6g~ij6_Y>jFQ_~A|OipN%k-~Lm`EbAi{C29FXc=YHw}81Pc#wM(J+r`| z{cGV5(Q=xGt<;EV#E%bJN*cWLE!Xiw@LLEQ@U2IQOJwe$Z4-%@XS(%7Mzg#pgOc7i ziR}cr8wk_dX(IB%^0)O<`1E%(BuwtTQ8yUQVYn}oAT2H(D9O19*0$fw3IeJuo_A$2 zp>v;4`8{5;zXI)S2);_%FSEeN)IWDN*iW>P_JQr}|Ja}2Ut(xAwsE>suI}4Gn%!G$ z$5W}qczR?ZsgyECDEl?jerxesAm|4xi1%1}szc(}SgTIszTnXnDEJ6!*ozfR%7qCP zTVpGLuEa-hk#(HN5+Xb|LD`YBI%+`jHof%u2H5Bj%oNbqB5?>f6~&_1oKz$42eO+K zKa}=ax`}M!K}NS8DnQA^rG%AHoI~{;&aVwkNiz&;Q$9n*5ld0{+a!d@aM6%;ZZar< zPwSv|tXC$rb32%obS9n-z4OOw*q=B?oGmZF5U<&N-{A^mj$)Ee_P1BIX3aGZ0Hb>0xhGn9l=VNe3;xHu zZULR9Qr^r;r@Nf=+wp&wuHA`Y^ty0HA~#uxUsbK2s^B1B;!ZIVcy|D%&%OeMdnN$g zp|3zcQZMGA7EVmv$MR*iW;ms+EUfDn&FP`8D8Y310UnxGkmft+ek6|mOUmWNXCTJ| zcty*;9h>SO26XxmIc@__uXj~cIZla|qS`=!iy$1CTkZ5-YA|IH-SlXxHeYFHpxg>v zPMzo0Gbn7o!OR=R)af02UtT|9WWmz&RsA#mwg#z(c3VUGeuBjTeZTp~DH6L&Y`>k= zJ~}H=`kWl%taRbSJ^_Tu1Bt3VH~8ID^@$}rfS%3r0f8egKi025DxhN=u*HpgCpAg- zJ{TBu_C)-)ZY4ZZIdqA3ny5w!Wu;a;BnB!X76qRv9JWc$m2Mh?->9b}7U(Xo%NwuE z17X$xYQ7MfN^=EDak>IsL;Ue$0Pke1ForJdmWmM00uG%G+6}1X6^K#ZZ9T(U?e;nc zjT=xT2liW6_X>pcMja!u*Xe)`P;CK{Tuv?? zG@&XH(HRq=HTluNvA+a7BfyeB>LHMlb$?gPgranXUI59h2V8!F3_Hck^(lm~F}D-o zm?OTWoar2X;A1f!>&x^yt_0%_M_YCQ3R-{k?-b;gfEcO+hyZ#l%b+=9+&QvZR#iar zPLlc@ATe#hozfMeUv-9K0Zz6kbQ&f<629HyfZYb1Hh>L#Ynu}f_-}$lma2hT8%8vi zXV8Sav)NFT(ng`5eOmLMmQ@(`+0T>< z2%c!DOCCr7w#UK@uvN<_umyA;@W8L=9s=zZr_rTACPpS?1@{>V_(ZE{wShel(fk8< zz3=6li1>&2(UXY9&j{LnKqY)%+{xp7CHP`k_sAke=8I&YvDFA7OTT@8c|&jx<0x1ffxrg zKF+G(?%?5@ojJwPa9X(D`+?Q$h3D()0YXJ?5Rrjc-X>nqTc!c~BdOwiCdDc6;9>J^~!F|y%XF3xKJ zk@5G)*etXDH60(S5*7W-s-{%($7Z}Z1ic2#NnW>n{9(4`bDSBl5j1d@5mlVWKkp&_ z{R8QoW?)^{J!0%NSl5GifP#Vd5Al6?L4BfaX4~~lt|5F7+-*eNrRz=0EFd6Xp#Eb* zp@qu2sAu$XM&;|6HaPxnHl(s3`s{;y3?D>~#_jZicW>)HAVdDek!1KggUVO{kp%=u zIldSFF#HklV6x;UurMlZ$PB!j1E*h=Cy2e_gumOe_a&@bvI=AWZ!ER)KZFk;zg{3- zJ2AF=xP05#l|Cv2dUWr{Dr9d`5Gx!P9Wit^n0oPdt6{%I_3fjN<$aa?dp1o-KHhWu zhb0&H(N+*9#IUs*HXkT)b04wv(GPI>KFJU{0whB?aEk&N)i0~y03$&zBR>9%kq!`l z|2AqvBtJk0aY&13=Rx=5gQcw3{;nMErKNben}!g^d_a6lRs81!iT~>(0KJjtt_WU| zZ)SZ2q^19oJY>7p7Tdq~1pmbf@~^XFMp%KF*c<|mjOKU0ndAObmL zlg(H3lqka1zX!g>rS-3k>^?(P1vFdV45d)`VNrNjEeqMN1gI=^QDjzP>Jt9XQCM~V zV0m-&cVXXN{~pc2O2j`TEe)+Ih*Gu2n6|Tj2EKmTmgUU#P-{?3UD|pUj^}FW04~It zPgfw|o3cQw`(A%VFcWwN*_mnPF;NXfL;dBTQSqIrqY(59MsSah}TF4g5#}OH*v>%-S{9q0hHA~y*f_<|La!C)#Vp1yGK)VA9b!k z##MsArh`ujx&I|RmlH7}UhQve8e1}Iq}Hr@u^!{_gP*1X$A0a_oAs;~GzehEU%-qr zDSv0g-@&W@Yfn15&t4sxGIb#fyi-}cQV6HewwgP=%LhO5kp-ZDa~fCw79)&#By@78 z{~~2R_&>d<9nhb7|I?ck!wRb*=(Zcc)w=x{L9cic4*d$(8?>l27vPeangz6!y4UcZ zEvt}2aHZ2f8+sox6V-7fHzHeD;3&iBoeM$l>Od-|qQ0aIgc*!g$`RFnO!x8sopeqI zxGW~I|L2JTf%~&X=vcb7cr)r~g|u=J5cAcQ0Y6&=6NCy3$M9#G)GO>+9xYSAx%^7~ zOKMl3aq_Hx?uf|lo&Mzy`45XW=Ulp;qmcbZmkc{I#Rev#m$7-)!s)L?VXfU|^8UW^ zYV}nQ6Y8We@h=wnGKFbHV!@7*?9>lN;+DO>qR3AhEMW|jTf{F2k1zFd57u>Ns!iq~-<%GRW?xdy zCV5ed&>b~yr_M}?0M;$cl*2^rPrn&(R(s@N%97Zz(rCAvV@NC_jtFyTx+xEtGbr2M z1OMvSb zA@1GDt(m@+04V$Y!h@;~@@tyk=90P(!Uho!&h2>cbDigKXN1|(V`$TX7+Sf6E{(~F zU+qnOT-bcUQT~&IIHNkNIfSF!b_NhHZ+ZoihwlD_yPp&PYYHl$!=s97_2kcWbom8d z4>&zCv4|n@TJ#R(aolM0U?hC*6R;rgjx@LRc_s;QTgF=@`mMb_B{>O@>`s>73l#BQ zE}yolFiRP5s7t2jz~}y_7ihq3{whpH27CN8&V;0px}_Hb2gnZB9#*)~eUlZO*)|~l zkbYmX&h`WwF(J5+_kROWA_6G4b;{WHIyGI3F1U}1bC(2)a!BWANp4wqup(5>i+J$A z{(dy7+}S_t_Yf#-N#Hx%+3iy^^2MM9}iy+!5jc>G0+AP=){0yy9hZ zaPus}S6T;87h4KgAs!2fz!7NGqqR!ZFr8~iHCG6h_qR=%iH4PFlT3zU&RlT6jhUUV z5ieY4{R)KEHJXxdsUe((qP4}__}@g6ZNq^;nzojpFHLm{#8s>g2H*4jR-kQgs>=vX?#U@rMcEIzvnSRZ~ zfco$UToKDe)(@lXb4R@biJ`lc(Y)(UR9$$`79m`n|ElIcrm+~v;AM0f7TUK(16=QsjX zI5WRCK@6&x51AyqQYKMUci-NIC_LnA4}qdiS{)^=qn4Ipa%yUW*(GeG;nC1gf4Ced zlQ`!a_pELPdOb2$tnoM_pTok!hOID#KGc>r@dycxF^UDcldX55s)biNl5eNSQ+QGM z?(b4%p;h@>cR|Q&$VuV*i1M)$D95smv2aln(1I6nHHAZbf5zuTYspI$9UpJ-x~C&D zVQJyI^6rIxXNM=aA@{M$F#uuK-AgqL*JG`aK`~>(J)nFYPamH`*??ZeAlM2IprzeN zwaMKe==6L0i+9IqE~W#;G-)MiDEj@84!)O|?y#MG0&~LP-~$4<{a zb$KE|H|}NM+ISx6m|Zivd+*bHPrQPjTf=6CpFTJt-hnI7>xFkxnm-VskJzbe^b&Q_$(oHe2pa7hK=UHwU!Sc$86%W2 zm};3?OgHS%`RO8%A&&Xh%zzNT^ni<8J`0vG&4h*MJrG<_w!rs#B$^b)eGMGWg*e#`|ZplRUeW=Uknia z@LvNofG`&vudGBOrt_1h*a={%oJgGR3aGvT%Stzq`%19MhUFkrC#Tzf#*$VhFtXpg zeFPqDDF`!)x&Oq;7q{MXXj!1&ES0e9)Qu-GCyA3_oQrh-u5>R!8ML}+G4?xNXRKM0 z!}+4OdlRnY_c=7L`!s)#$U-_Yg`i%hsHKhdQV;&CT+BZ%67bh2fTJ7~UHC*u5exkJx3d+rNZd4da5W%>B1wek$|HDnEJW-R(2^@aAw# zN0!FK;LFdeWURMDf5wEiBcIJr&GQfSZ$B3M86bn7-?)n&jsMn`pJ++z3$kDNmXqd5 z^&^I(;)fm+=hUx-TFnr5O&Y9=5?`WS3|Ifpq?Rpbx5RlI?@-g@MSqs|jk0jA2F9Ny zQvL+UY1dDZW`NEd0*QXTt4ODX{Db%`-QR|yVAo~!|^0oE}$ zGq$#=c|+f_iM5nZM6D+S=_7fvul?^dnuxoNY$^_k61s#^RJa)Nhf64}tghzECgk#4 zj^6S{!X(c){3Qs$_-eLRPfZ@)_Z&Vv?t9>+e_0G2K1%A;$kwwGlS;!_{qYltY>*%< zmHu}3Q_bzo$a__Hc$Lr=XCa(kgJM^peCTA;-R8NokSkF940JmjNrId_y7%Tf^x0wN z-30FtO`WR|oWJh)%iqdOuoFZjsIOq?>lEwy;XW=DOz>hHxgrU)Hs*tueAiBk!5UO>GZ9Pu`ha~8@q9>hfa1& z;zUMdwuNo-gx|WoF2VM4prrQrd8g}^9-KH-Tk-ZO-6e`8qOGR`1ilkHW(r^4C$-HK zt4{0Iub>7Q_tk^f8X+&_cHw3pTsRME^TH<~@kgoal!`Q%S<=z%KLK_!0jUA)!Fc}l za4=5M7)T3n*j|LPqQM3SdfWUn43A8LmZQXTcy!%7bZg}N|aGBGoE6}L1 z&nfzxig|`JPyLrEG<9HUjbwDlf~nY6um@ul$%Fh6w-NZ?(=bUVh&J`)>$xb695KY` zsVzxRUJ>=dxaqz*SfaUCyf2M05`qOwaG1U($YT=6?J?y!5gv1 z?z+EaKQ5jS*oa9873CDkjT2aiHel^Q;|Dg^Gz(}OPV--9WEp|nX@6&_cp8CtzC`|S zF?a`BfY0r@3smPA1I93WGgs5tvUgXUEr7^c0E%t1=UUeOi&JUr6mB#iEav{7lENbU z$&x~3qvQGy;nB2DdhUfm8+N8`itUhUh^%<``CS0b@+Y4Pfz{!M)euWENr=sl@JkT# z7INz7zIagng2f>pP7pa>=XN!U3&!a52Z=s$czSIh@w>~VN0~1SmaO`E?Cb%@+8iH* zJ@dW4eL)bcC-hNbpUpVy%Hx(V#Y3x#oTcR^RM7;T{vY2$AC=^3OP8##dpU6@5Qy}z zeemD`6@$Mq(Zz6Od9SA~n~$uMhcTI+jxil~7ywO;7T?Btv zyGse`f?l3hTU)xAJ2jW-OuWtD&>eX_iriglh}~35Vs(_1%?ul!a=}QV*{6PFvP{0k z(0`bFX;{F!t)8 zV(sEy$GARh!AbT_LfxREV%{$2c6MB0-#CG@m^5<{S%hv_u>llK_(05ABUqq-E;@4UPD*G|CQ<`M4;z}m+V53c5S>qx+MBq zG5#oFS!(4sYT4u2tU9oYLj4$}HCYN=_9rmpr_7xw0CAyP`u*w@>43|TM2A6fE@8m0 zj1G24alst+TcV}YSXHRz_Nl)nP&*|czRWCD@J{qg<6h>Dk#_{o5>$itgrKCEM5LoH z+lq<^I3ZamN`~u>kD#JVt?_VNQOjO|4$=%ynp6tKL`0O z)E(^2O#^!?tGpRy>6mw$^=$(k!L|E3`!63>DPFfrQ05+1<}?nj=cAkBy4Ox>TNwby6 zPK!`I&kMqDUumoVgO1gN z6&UZlVmx8`lO=bZR_p(zp<_5f`~5?aRD$z$kW^$zO0uhv(t6CN6M|OmgJRMDm|gz= zIY<2uYA^VAYDdv@y^Yemyo6lxZ-3_p>VgKfr(S^?hlUTmE{e&t6vF7Fa*%avvUI$A z1{!WU{7)m*4fMA4Kd5t7%U;K2d!yrBAK$z^ggdBNo@Sreg+LIiUl*=GQ@nuR;H1~S z3p!+kbGvs1a>)zm!HP&btZXfOhXRSUv96=Wb8{v;UaQe}W~a%014SAuENhw=NN>Za z?(>jaQw25=1RqA;Pz27g^R(_OA7)sdU!@P&w)n6uD zRS;JmT$cpJy_w?m&9ke!K^$o_tK{`SC`WV2sLjJDoiSSKg(_l7tJt28o`Myf340uG zi7E1>d<-kYK1@D(Nxiu6;Om#w;Hsn_@?E?`mXUj8G-n^)@3zyepOzgjxuuE@JUJ5% z_VL);kc*X7ta1LMqY*5F&8N&17V_7@#%TbO{?Aa=|3vIOUu8dnGV#@88Zvf|L_w2c zWMYyra$SLpyPCC80}i|Aid%Lq1qMgdK>Pm6k@T36su27{#IeFV{V@}P^SF@8{URpo`HSk zqg}7X^rY;I5$}xaW_W9Tn|Du>Q|<4a<+{+7R&%^fR9lu9_NjUZ0+)K#WbKk1T!D02 znu6t-V~efLIGyM1r2M0&^y7QEACPsGxWsxA)CCU&82f3Wr&dsrbsAc z0qtOZUg@6-2xK*wvl`jcvGn)690FdK{tPCcn&{M^ycuc9g#~Y~KxKt9W!*VJKW)uF zex%Wkjpa8)>aC9y7mS3^YMEGj{k>a9KkKQlVL-eLO2k}@}5jy9>N zEp14lSjpzvY}yB1Qo9#6*VN8W^YSCLM*p$Gz~s1Wgbe}+5U+9yvN{~}=EYKo#D|KzCVeL*yp zwQbm4^7`S9Jp-PU=5SjF{R<<%JuL5KeX#z%?B2**n-723l6Pxur^bhP`0J1BY&Nb3 zWL|+<#ZcxD5*t~2ZnZQ0JOe%@8=7p{EHLiCM60_(3pfV#d^dt>yytiFyu{OVPu z9s<`Ne(gNk-lHSR-~HLqU{QjJi6Btyj155#CBO@Ky zu6aGKA?CvPoQ938TPqVS^b|1sNH+8t5jOL3QJwJ{{&zTmnN-Sl;$q7H#{_5mp&@@^xbVRedfSHOK=I64-h~< z!;^^|9BWT?|fajCJVHFm!+z}D8@*u zzy9;MnC);YtbP9cyC=W2OoBhmX#xvjkq=;)w{MX{NIJX2a}t-zK=_6;UPQ|jzn{u0mS((#E%7+ zRs%sl-@DUr2#b2IEZV4$arf?Q1jv#BM$usIsnhU+qgF`ig>F{tTL_w;IJ96wUyvFr z(o#K^@xyjkOk$5K;&B##PPCHCoa-?0?yeu;kGy{Sa3MN?HJ}fGN1KRCUfLLSFek^; z!fG|wenyC`Ds8a<#`cplWImn}YmN3kTxq38-I8UtvoExT@mV-4^zIhOk+n)&e_B(E z4sIp7rSd_u8*-orzYuYvx&lobKLc!Kv<=~bP8(27_IwxU$V|Oygj?^B@ztpvLptmg zRAjD-!nB?*eGO?fP2k~0Po35+Ix%c4+>&Pbf?jAJ%NzV$W-zxfrxNyHWH6{38q150 ztQtP`f2KYz7R6%7T{+s6Rr~W3FR1>tK|lFAd7pyZZT9*eYA3Ed$O*#!xfs%vA@ z_PMp*b7Ph}=#v`|pOIB|O zcXF4$LrPaeizLRwkr0F&{6InTf-8#(x+4bI$zRw5B~j=HKtAXdC<wTQtkmyLH;V@4fl*97`XJSFvu_#1PoQFLmewWJ}}1{bcClmOn|hq$yO97IK@j) z!COC-C9(ykLL7a{z&>Y}pbN}d9=hACBCquriEI;UCnohB^&f#?>q{_jN=2Crqj=kVRk(vfY4T)<;xfi#uxVdOPZL7>NH8> zJ8ADyn?{**DZ_VtgDaK~Q{TMud{8ejzhi(&!&Tw{3vG3xg$daF8{j_^SU`_y)qt)Z zV}S@I6Hx?C+<2IHAC9Hj?h4>V2Vr=;9(M*jns9V4H9t~UGcoDCwHg53?@#)U~y=B;Ginc-|UgsE4WKkgX)nxNEh z8Y5FFiM&6ISge+1Pd<}qQa$K8`aQ7#mdX5<&}6&C;l<#6_Pd$@6ATx`&$jA@v105iiW`PJR@3y!AHhSr`ZCsw9 zWEs#wOQpb1^6}l%7#7|4nzr6{n3W{()=g<3 z#Y^dS;1gPc(UJcJ^Xadm1^eu$Au-Ai-QGZKfK3xk9e~YwLZ5Yd0zDxv!&c$8*bC^b z@Vn4w-8y*&DiIV0a=g12aUmOO%@5?izIdn)ViX;(;WCC~As`ii19J(CozgANcQfjBji+N1foLwZ;c?T!Pm)BfPXbL3&=5yq_0!A zzZa=8Z6c8jZ*17f2)&P?7rn(l^x1`)+EYltZGPpXG&EL{W9D`rkZxw9{X>*wbQ}?M8y)9gwkYXOoNQk;x4Y{SVf0^4kW-wYmHk)wPxH$V}cH z3GvL~G&Bd|v`jO zq9kwJE=i=^TAZ#<)6MkQRMW4_r_fM-Ph7lLO7fqX(VcH5@4mj#fUyxz)_2#%J$QId z37ekEL-T$n3zbfk9Ks#dbjB2AaMQ-Y9^?v^K1lGH$w;ErGG#h7lc#+_@SlhJspyuI znH+@}3$wQv88av%M&F~*WCA8i;L#~;q;mqKrrPn)^hdzv(vtrLFFbwv?x!;`XPVLR z6)3WW3r_4OaR`Y}Qn06xS~eI}m_~B7QHgB*lw&(KUizleY@-!=WGkq@ZaN8Gb3Cg8fBK5%2-u206NZ7={Zl zL~Jzhu3~ksKnaeh66i^_|5OkFXq6e*pA}w#zA9n<08qDEv+FYEN{bsXE?t!Y)J;vF)19ed#cLfTfb@=drxi# zlJWLU(U#T^E3ZV=~gCtMv@;&fJJh8&WSkZQ7O%O z4zbuq_reb#T9Y&JLfH`i5bnLEB0D6cgZG-b?EDo#oiguFWdPVz;njj4!T3p z^Ic$u=zzX=u+KK>Bj9$b*p$gC^{NG8xRw0Jwm~K*M>3ZvB7V}g>A@04KQ#In{1D``m_|} zkC@&&aDz=;ABx116)k8MWs%rLSULH>eUfQxo2{H{coQ?kXr4F3KYnaFXX3TP(XRj2 zxVdEimuMP7bY#=Y3%%kz(xLFEc}z1m%Ef+gKrA3%e$?B|(ixIZe(j3|=jZidKDP{xu0A z_9xfvF&+m65uzwYG+gn}BjWaFjmaJU8Zu?#$BT_IR?&xBB|L-y0Ok%}u|I1)XwC}0 z0@;Bh@a*N`PaGJ+_pKxEVZNoroP8*I%tO6M*&c}x!xL^-dI90E4OT^Jo%2|k$yeqY zN*Gaw7eQzuI_x@rBJ*MS;r=eqsU2+2dnt<+ zT>2hI0_Y!-Z%!W9r;%8o-HSDPR_wtw#wLR##N$ff;bG#Vs-+kN>>&U9S9C%9@#mK? zh3~cFeAasU4__MfRCX*G(A|6aq?L%(l_KB}KL`YbslX|`vfA_)Zb5Awj19jZ3WmS1 z6W-KMxwf!_vc>%o`195~ui+DoGw&R1q1Ui9+?}Xk@ypPhY}>(H!!dVm!!rK6A3}`7 zt)8oo+>mg0X>h&ynu%}_x|ocDh&hEKs;?Kc^Y$lRfo@K9P)};Nb3g>xRCs>vzJ4++ z8lpxKbnK)Cbe9={bPqgg*GID*5^9c*u+PHDx8^#6fnx1FeciZ8g0OnzD)`L1v3?9M z<0sNwbwjxn3(xMoWm?vYARxqF9}R?l6-3{+-eoJsUPse6!rlu6^e;QHn8+sDNXo_m*%8R8nQtm2{A;pTJJmNp`#9lG~<=(>PBjm%^f@cWP%obgP7O1;S@k?bGj># z{ZN<@CDIlnF}qE@=~<`M`|Z)^T!AlHHqY-oQ!+~=w#77LgMVpKBU&GzEY&UA6Kt6U zq6#;pBBD!-YHEu;rMI~xUPt~>c9^y<3ZLQ#RMf)a(cN-%tdfVz|Ha8 z%)v8|K5xGV8K1BNXE0j_pLmxiLGrT%3V*~Ehynlyt}6q9_YffTY>5wrje!ed4G#)- zgAUs}2UfBoGD#2hOH0M7AH80*5pKnVtJ1!#01BJcA1U}BTn8ca7p~*)*)A})(0Lgz zJ0{ba9a4U#Sh>qu;zm_#(<&X$TGU6f9f{}10>RfD8Z~j3BrgFiaUHppizjK$Mc1{X zDzn0xper(5=knvC+ut|j^RQqism?$9+S+oAg!Lh=kLk_Hjtx3=m$t_Y8?Pj?eOPZ zfj0fb(181zsQC(%*n3cb^RfdpW5s&`eTY^)#Ogp35MONO00?f?rEGAo6PjziokDy? zW~vJs%3PEa2S2czd%i!)8!zd_Iiyvor|mK*{CFxgpksli20HWktZtOwPs6lpbnDWI z1pQ7j;0d!y-(8rqmlj2zA_vU~q0bZ${;Du^!S`Dd@XZ7O0KE!@HwsG!Tg>%)gv(Lv zU=x_{{OW~Yz&!IQyxI7X=-%x5_lGkN0j1$10q!yE7$BnScIXjEoUG&~5RWWc=r;!j z%?g_kNfK+LNH`-E&vPRzSP@>4IA=5oMt>vV4ldH6MF#!llzqExmhbFx^Y!(?ynMxq zU?5kdqyaZy)U26yJ%AG5y#RJc%_7fQ#!eA*3RBK(+6a1d(4gTpdBOAwYOi)REjmqI zB9Cma5w2X86cPx8&qIC)-u&HmcT#ZAz0}%Z$^%NF7&Ba5aPz^&X8EDM7(lA zhbD%?03gS(5I1*{&r>0(7&K*AsuU7tlb)mbr1EC5V$}!^H0a0SY>^om~<*kp4g5OlMc;zOCVoHhMLlidCT|K#W5Eu+(zRf=<~ zx`z1NcyJX5l^jG2(aIL^$N@ozy*9DF1cX%Oj7$v5zM^O8nUYb~q6uNU*05)P&S5%v zvOr@5lmH$0Nd6@=oS+(@#C{}S7(C&%8_^7rPD#M0Pjz&f)aV!jq@Mhen0U%n%6 zD9-WkL}l#-(VB&oL~X+X>Dj6V*yktG08g8o#Ni$p4HKVeVi>MKy~Tba!0C`f28u6$ zU3!;}=v_xI;OlJwW^$}UZup)F0{o?q214FnF+xAXj@tpcl>mMOfzUSe17<~yDBfaoE%fXf4fsDJ~bYVt^UlZVVv$3b6 z)yBg4dxwepj~D~vxfC{80+k3kmjj_ZS0Kwc^gTU%jX)t1KjGtkmPk=j(V4%I>{%ZC z_9mCIb=t45R!@FBS5V;QOm~6H;YT3{s+xhyK*%yX2U^A+v4o_u@p13Uv1e+@Jexe2 z?XtA7QcX@8?#&6WCHVWdF_Mf{{C6rFCXv_H2`Xb4 z*YOSUX)8^N7$ZchinLDl){B>$Qfrx?Wxj5DoZ;!c^!R5VQL~Mq;KcO75F2o=as58Qp zE6_6){0lrWqWARy6c^3xiH}pkBccEG0T5nMniz@x-Y&~G=8hmRF)>01S=#%{8b4`V zRQHH+3a{r>YI}Ms?$g8T&OexO2WY@HA__;_62Ipc6O3anD=tK5UfvO#NKOSJNj)@g z4}FAKD2qm9rJfV;iN~bWw{-duy*5fgnF6+l9BDGh7?JB*VqI=MQjS<_jx);|zUzvsa!+jwt=l z_kA>{b$7FIdlu~}I54fWH}Tn_UiD>YDs8l8e34Vt5hnV8FfT&3}V0kP1jibkY^*_GkEChcMxk<4gQ8VVLk0$Rl;7r^c4P zIL}Z8V_DsBFd!2}+N8VW^(`?^bGFb?bsD>k-g$Tk)rIZ=xN~;ok<^)HT>K8Ypy)cL zMGv_CTCx?Og<>ny0F)7T{s6wDxGXvGc%=x470m)Ne!n@U>#A$BC$TV}pUz+0SJkf4 z71R-vo4NZ5Sr($qJN%b7Z)4&v_W|k8?~ToxY$j}l9xk~6$u?J@Gx;(T_`x8S_%MEE zM4oOn6a5#a`OhQt!+G9Gei|sIh*(U7x$I9MZ^JhWmOsx=rN`z+tdC8CHzxVpbf;utYX}Ht>PR0FTH!IPpk8aQli=a6^($?jYcw_5h`*eIGvnuF zIi;X&jBhP(cAJdv?#^*j7T(x?*C#SGKwY?d?9aPXg#Cs?VYLOM?U#@@bKLO*V|gu! zS@AJDpJ}3TYxKKwfxnJi4l>ym1FAY0-=0PNL7Z(hV&4QLE_Yd+$})6{o~FHkpB)3b zTd$+vqblp1;%nx-IcU|T(8Gm}EJ`m`s!BVl_xFYSent)}`1PhdAiHL+K*-j!>dspG zZDei>=5Y{H(di<5dpiVn8{7jsegAOS{WqJRGWU@~-h~L-JYPp&!e)8)+=`oSoAxu7 zv*l=cQ1dQI74gb*A;+sbGo|()^y9l~?CR9!{tjsA1k7YnvzvmCN@(HkWLgO0y#y`* z6~&F;q*`ms*vCg%_;`*Jt}h8*=3dJQ@`|aET_th?Kj`l$y3*1FP19&kz4!Av)bZj~ zKR`tg_QcPkV|z5$UVS(2VY{c5K>RaW(Fp&t-&v%~+#0D zIYsTAX&UF?f@rt_^FbyTtuqodwZ~}#rQp@cwC8FVPmnVxG*?%Gb;V&;B>X2!W?!3h z+^=#i&gdt5Pj0g^B2z}qzHl_ogiI$ks>Xi z(nRS^T2w?jh;#@Epj0UWB1#KLuR*Hx-cdRTC>?162{n-7d4GHFIkWeknQLawH~YKJ zxvu?RLIQbt-Y3sm>t6SLFXDZo!*UjjMTJnOri=CnAb$f{FQ#d2E3CxCKQt0+>SzDP zvvw?4d0Z@VX4!W_mDG}uMZ8i8VQ3W#a+$URQm>Dw9FYy^%Q$58BJ<2d_ciGG9j0I= z+tvW$F`zt{0NPhP{>swjp#W=n8_1t$<9ujh3qSKz^73(TeQadgm|y=eL&&wgr*bgI zR#i}(Q+m&d^-LzH+TTGqYKAeL`WXaaES= z+4UAz_EO(pWgPqJqq#IM*ujRtyA|7(vQ@95CBofQ$zYz7kZRGJCqARHv!^%d@9?@` zOC`HtXc~R61`&Q)TbWIdGsieAGXr}pj!4yvrs&QT7PHcBJ_~Cb61^&PT_8;TP(%3X zuFnS9OJM5I#{Oy~o>yUzZTj|oJS{(@s9&d8;f=GS+D zuQGeA#7r&Fs@UJQ1HU#OMN%S`fT7|e3HwvcE^ZD>DngQn;yyQ$ey+bgJ}7saeP4N$ z@{s~JNpb+xHN|M^ZYYoz#a(T;yz44z^t5%_;}XA^HO+-h1cLJ8Bo`rET;b1{z^4&e)O?ibo)j`3*pPKa8M(Jd+?tg|~ud_nR6n%@url zK?-%25yFjs1`CVp^`~A6Ieju5)8aL(Th%LrR61>UgT>f5Uk;RVX(*o)* zs*djE>$ac1P*Z4fZMU-#J0Q=Hb6v}RyX;G1GSIQwAa=+VcK_b;oFT%;M$u;S!q)W>5 z&_YPfBAx-c;vT&+w9iQVGDO?3Owe4>nYiQf{@1>D`n|AYou=>}{J-BK0P%l|FF0W1W;Jd5c`rd6rrb;WmBl0$e zwnrWjev4PzoEJe(b)!vlj*78xp4Jp~5gImiX8P14y62y?jsv%q|Hyt836;u^%LI^+ zutm+Ne%`d-AoHe(g-els5T(Iw$X42|BPQG~j+NNk%1B6t@p`dYS5&VMJfA2|mob(- z)ku1GG8mxEE2^hI{L4>H!4!m@AyTuWa}8&RTv+*)y$>|W65$f=H+S#yB=Ocvk=Z|k zc~dJ3Wi8ml2*9VmBjmzZ(WmAZNXI^x^P8<3D>}QV8mrg9WbWEY=dmG@#LA+B3uWn1 zH0q=oq&UI~ih7ev4!w}S3}&3wDB5Ccsd?vDSABWx3r&gX+l1!`Dq;}A(F4ne>Y=0F z@l%}ng}Z5c1C6g@j?`^x&~-EVqOT-BY{WViD6;?X(o!)*3Pg+%N4frT?fe9i~ZJ?`{HxhjGDX@1sXH;b;j(RQPYaNREJX|~_ zS>N2Ik!HRN5qiTRMOI->M!XOC^$_=bTT_DOyFYLDi}~&jyO^MqYf<_fN?+DO0hcYp zMZ#O4$V;A!PpoiiRVCzAxN*ALTSYbBc-Fbjmq=^ATTcb6%89sd=wZ%j{UVR_gm-B_ zp%xqirD;~e&;iXk^!EksD&6%7!3=91GUKN`jT(2!nuDNwvE-<;9Fp$V`8 z9)W&;CE4S_pngHrvp0wS%0*LvwL;Pk@@B%LTo%y}!1Icy-E1kx^`Y1O-6h-w;@awt za^5DH*(=CDihVg6J4$h^;_@5BRS7gHWr??2fl4dv9bzB@ySionN4-1q!*_dt%<(&u z2F3P8SRu{>CW)!K7fJ1UNb+ z5Nijp8^1xeifzQi0caKS@7V}0%lqH7PB;k#tyIMERxmakBiQM=WqCQSAeZq}x5Vj` zpKE1yH8$Y6wa)dSD+DBwoYWA6)A)9$T)d zP4S@?={)4?8QiVQGJ~dOytxP;8X=Q6&~fo%8=~nS>yIZD;iX~XMyqoGMSYK4945Ip zjjj{S0J~cGnR;l+CS6N4JHx=n0>18#G|M zt#mUX)#HUDDdv<&t@9hiQ{Opq4#f6ZeO zPGUx3r1t2ScOJOAG3*<^JF`8R9vReqRZpMsL-&eXuNJI;gXoL0}v$ zkOrJ9&m;qH0{*H!T>`j9KVF%>41XQH?c$pzmlEU_#Je~FYLlf9cTg8sw>xGt9#b5mL*~yL1)z81VTNJU8Mt2X7!p!s_qrfajZ6yAJ z7Ipw$aS5K$3Ld+jAn>qtOwL43{|oOY?LM6F$F{Pzx%#CHXbp3_2NO)@?wz7V%Ybv^ z1}ff3x2Sisuen|h8?YxHBvArH17hzYT2CT=feA}k9$^Er$ZkcgNM*&5Vif+qs|O6X zsfJU2rsTBlN}>8}1$6Nu2tG<2hF%g=CBsvpBfFG*pb*`OF;t#mg53G)X}vV~G~ZVi z4{Wi&hgD{dqoE^(yL~unIFN6#BAk%Su)A2FWtQUyq^1uPEN$dU3+Gucx5Qo$%6fDG9n#Psr=b1lLL81o7w z5(>i8Eg_iaUW++sjz6hwsb}buj5ie&X%+BxZ!wKR0Zf_s^b=M>AzX$yp?U)wjo#=q z+)g9Ur%kJyc~IhX((|PyLrUk5D|qvGG~UF4mBWDI8x*ZK5(lPGzK-Mdmr%nvBq+;# z-1glsVJ)k@{N(HOPww;ZE^%yrijCh58fkKMvgaDX4FDp}iBkp+QUkODk&z8R$ib+e zxkS!5oHJtdB^{CW2uZvTJb(Sy&$H>aN;R+>X#MWA^1ATl#VFNlQ?ep`2fJmzYD-04 zM*4o7kqsU{MqvtXcc!) zP&$NU#3PA0)N&+{mq5VS`HoLc=Uman>FGNuQ#G^0h5^z7u+Q$cnrH86gp;9#(0ch_ z8U^sF^UD#{+0Z*5v8!tF+DEvda630Y_YKe5hS_(fi-&xn7C9qiQj8>PP_&K+~cuyMG1Lzjv9m*Zw9l%;2MgZWN>3RrNRYy_!) z0@LuC``FbqUA)LbdceNqhkxfm3M6(Ng^0}~)b`=YEP%!tVu8})(JjQ$O;jf|907bU z=Bc}&SCvIp#2nf&R$J%KKH2+u2~B&~&(cVjD!J*_J!_{3p_C|YA_sPHSp~Xs8set^^7`j26{lxYH|LAy#$B8~J{r?_ zaiGB8erA$5Q|HtMMl1oj1PAQuiWLy=Lmw+YUQYkO_wJI$q*-7}#?$pD9|RJw1!srT zU1h-Xee}+z`-E63g4QBKYert3A}+w_$}C+}xiQMo*Gfbk9*%eRQaog}_aC4ZEn^gR zxUS9z(nKi!0Z4;s_M80xn4ZXEk+IcES=~4-zD|1jwS8>94;Oq5D ztIJ3(w`7~P-f|ixz5DKDQwW;!0`}RCLyJX_lrL$xJ0utoRsSu6Yp!hJiKQnV|8vJm>v>YRZX?WzDa?5)qwN4_t@A|r0Uf(c2k-gwu+!y#1 z39viC0MJ~#jX3Td0d$C~C^nQm_BSBn8fMC$K_92L*!s|6aE+^a%Z9?_8jCS~D$1V147U27)l@7RVe zA+czH@=);`gyWf!lfb&}5B&zk#(xEv7~H3SFZLoU0N)EJ0Hd=nrIo5!IG6K-Oc2hg z`agX~P7y^#Pq|VAG7#hJKeP*Y-D8CEWX`^0dcJ3br&=vJLNw?46Ph5BgD(}BY+W3hAS3BX@BS5IL zTrh0l!zOsg(%oh^d_#|&c?~ldbyudwh;+AJ({AnbE;swB2U>Mj<0g!t@WP`{X?Pk!VY9WeIcB4mx&Poy z&b;q_H2)J{ghNsN^ISpao1dCOAT3#WGrtNWRrxLT(c&X3lP5|1RsIgR7rHjfYqLzB zJ|x3IqPJHSNM-SR-?z<~>yvGN3?5orYaa}WMtU_hhz@rj=m_$i%ATQK&AJw^N-3nh zw&15r_e1yzL(DKuH{)DsuTtu4$!#3m8^fI;zr#a_9bAkyUMs%ED<8sY0?CoJqy_tn zHg!|>?x87$E$AtXZ?$K=Mb@RzosSQguesYufPi4nu;vIIA=%}uhb zdiNP|!NaG421N99whqS+U|Z#~wQfYpXvD10^y+MZ9heS&wyk;{!bSUugii{YN$yhRxmHiA+p&Kv2_mY5)B<_aw+6On<-=|6F_meFb$j+ zk0XgONvQ6@)aC7=uaI*iDfCOap&pBX0R7(}?Zk5pC=YQ?bO4?9<4wxrQmZRkPnggy zJ0;kSlZE#k=&&D=!Is|;t5xPp-!B8*SG*v0j3Bo1)N0u}5PG<$)q>~1Xk${lz_k7$ zdxwIb;&1s?IsonALQyT>r()O{UYvnH2VR4Kgas@sj0ZR%wbHhyMIJXktSFQ6^I(YO zI7_}ArMC96$6xE$^;PgLl-y;3{%tQ?36{9_*jToBo=)8x zBhC|rCxZUx&@@PReEj8MGp{ax=VMu~!XXKh277D)g7akNGFiyW-T>6r)ssu;F@Uo9 zYc_kKDKa=PsN4V(_EGB~EbCVLYe*cN6mNxvEl^;;tRN$%M+aQ4Wum@so#@^YT^TFn znfjKT+)A!Bu=X;In2*1>jKbbgrqA}*724385EErxv5DIEVCenIT=&xm!A77121UvY z6&V#A6`i%}AWr(v;1>tayx2ZRmp^KN19e5a^3IBR)b#ucw2O}FqNdv&v$mFJKMu32 zD>E70ju6U+#F6Qi!PX0y)EH(nLPIk+M>vIzKYc#y1!qB3!)H+ncRnh3%IPlL6S-u@ zOc!QjuM0Gs<|opj;R%Jyuz^*nhPREk>v;ea%&7x{Puutc*m2Oph44>LFqhu|$m^xj zEvmE9rCd^sJ}|f&!%8+%x6Edk^rU*7=X`TQ?VSw`%I`zUxjAk)Q3U?0f?4EitE5w7L^V3K zcTP3gga60zX{4-+i;I86mOz?|j?qoSA0MJXfoVsULZoNPSP~o$5P`X{>*#sg891~5 zyFR@M<%ih|wl@0D%!I^t_p_58a%_a(H5v{gflhP&e~bqI-RSY3y&s4!B1CnwKhrva zzKG(V`yzi5$^m^5t3Uc8GQcz|WX|f2nNg{jpH-jp6Mf?9AI2#XQEh7uQ2c$smf{~9 zTL3Q`#>4wJNvKVogojKWvZ<1cMc<2!JG5&3yfJ>kE3LR`DDf#67^G4hwOy1)cIDD$ zKz^PKn(lToQRcnSE=GI}v95P-p3n@F0Y?Ci8|=Hc7jBq|1@_JuIKWZ@(2w6-Ke`~> z&q-OKtDX+7hXY$sL-9|)b6!ZK^7#0EBm{4O0$kHBt0aSu2MuQNmr$FItC<`)QrhIa z)pKIx;r*%}Xr$*sU1eE5AYLFAQZDbmZhJxqIY!X+it{u*2qMyd{uzJ&8nd1(`B%xi z&lX{PURjxxOlYRU4?%vH^Y{W%8ThpIAAG$XyPENI+8fS2byYi6UALG#Q>@dbrEdR6 z4W^W*U+Y1KMM&`FU@(r$U$z;6Q}cf^u;N!U{?pBJ7w|rr5`EkMnp>GHN$u0x>Er3v zq!g-Ks?5xS^k7>Ps=eq=r-tZCaxFQoAx@ zLH(*yuLJdm54!jsTccr_cslj*wsf~A!Hr*mt{Bu9;3xE=wlC4pm>+y88;NrPNWQ2c z0P;>`Yg0vM)oh#hnxs!1)b9jl!Ku-XZ8v}| zC`wR3MMFFuHYbFgjO>)nCE#D&!@0N80hw#W$Qq2c#>3{tBw{sbW*NpR;_%*qu!gvU z*;R#e^>j6}9!UIB!cY+D0r=Yx>$|c#REv?#t4k*f8cvlY73Jk-2q6*BQ`@9-CX}Go z7dzl`$PQcSF=ZRFq*BN8@Rrp8u{zCoLGrs~;ME61d$gAk`e{B#idjgvqWL#Z8p8!{ zU*dI*VR@mT%mx4wOlai~;mS{aGh7LFfUi~V3XHi&Hf6G12gugVZIgftMlQg$VUAdS zLMZ_EaG+&>c_Om0Y3vW+>-)yRHHVIlJ6;|v^I`B2H(Ha8%iMe+8t|5r3L{)cAZ_ld zm`I=B8%iIyYrQ9(gIZLbLbVj31@`^?)`U#+=e+Bp%@Gyys~fSrXmCdY3$_@KpWTwD zWVezVFtQ3=)7+kA5k4UAzs0hehK+ruT48@|ExaIQU|a8t7j4zrvVe&xw<^Ty&eqFs zxwQ3etdDf;vnFuD=gqSCV~2SreabDhjg}&k6g0|5?<$CrPk}&`AaB&1A`#YRN8KPm z$j3ANyyfeJiUyd&uHr9rR30GIYmXO%#^XfcT(NR-Liq&2E79a^aJ;X z5DSl<|3vInA1Q}A51QJS>Oh_ARKYp+fhL3!vMuA6z<7m?HUN)26AmmSyr>4cRQ@}? zrL%Ad5Q>ebAUi2YVsf9YEVKgPbX^uX(OR}vN9+op5}zRe+m?Uq!M=ARGGBS(0qFLW zO^>B>#Y%|K=Mp12=TT3SJj*-**R8{Vx+SjS17ugeZH1p+#vRY~`X9*&eYM^u?|IMW zNBJS>MG_#U$==%mOQiFb zEZw)RrSiTW75P=#RPx+B|A;=z;6N`Y|8c8DTjfq)nZ6eSb4>@n?N7i`Cw(n29dsq8 z9r*& z{ZT9pD!ga%SZ8&h(0l zzb&+`;O5Sf?54%yb1;YyWgql54kyv4cr7>Q`|d(%u$x%h2bL#cccq2*H0`< zS66Sy{EYulm9*=7yuQLot)0iS^qHnHxEns9c<1c?^hR$(F=A%Dj~MGrEZaGgnsKK% z*~@?V8zhkmK9?%pXcq9z5*!EPJa@58x>;Gw(+HfCr@I0RY6dXI6S$j>mt9?Sy8~qQ zV@5Npe1079{-|%jEBtF&4iUCHAs#@2 zR!ZO#Lfv=`wDL85rlG#$0`$cnY=#fLOf6Z455?9R;HRi%uO5Jixl@~Zl8u)3lW_*0 zdo-dp2iww+-!zx%cONdXL~o2tB97*LUCzx=h#q;^*Hl6f^xPyN6FnGNHMHG_+dt46 zY$LIPu&650>0jLfFl)Dn))Le^)eCbrJs6kO%h)d2FA{U^ zY_{D?!4&thuGOtcZboEKMvI!Xb068zZR#)Ax53lqD%pwIH{@Rb|Qku+vQZ zY)XdA2K>ToWzeZBs?`{Vjqa;9^Hz5Xw`PzI)&8bkgAE=ZhGZ`IhK|zKEJh<4WD}?W z;l3&}`naXGXQAOaZ|bh_?t1UnsIm*vr=ogZr;em^|K*s3x{0k`Rncv?y^QP*G?CQz zWE|~fSdxm3W~aFRP%rpa1*+4swzqBBjn}%#M5mUTJ?O(V$=f1d*czyl^hlJSrhzv9 z^jWAI*q+rE#2&xcJr8N zi!*cbHJ*^`C=9F$gvkBJ7yNl2jLpW%bj%e-8yc=xg{CnkD{&cjiadH5)=m0SjWdp^ z6EBCMtzH=+-y-tNJPup86tZvNsh_{rwR*XZ{j6Yd4@k z;Jo_31Ly8g+y4%n!Or8R0F&|0c-xUE3aEzEGKJw&+Hc`OFaUE;Dl9?#T|s30R|V0Y zK5_le1(5_H-}X0Xcm%MA`o~62l^b-2g~ma_LKmckDM|%$9;-2aH@$VOhQrj{@{xW2 zrCSs%74f~B)}53PMReCWQ8toiQu6*uHKlP@cVJ9V@~4?G!ye%WEJ-pcviv%lw$(YC zt-KTYV|ew)F2&#U>%Y`Chl_J3yO7(A^|M;hkg#&lr?t^Y&ff;0(N7u-ukNZbnVLKT zQ2@Q%3;7?BGWB9d%))K5wNyk?I6q7po3ix4!JrY78h-4?_G8aJ(V@8}NkL9m#8uT@ zxh=i&M>2HE)U2thdXn443d_?F{;BcS8|_F=>9R|x(q-ElS-M6qOwJ~EgWYOR78?#Z z^t&X#-sqG@ip1^apVps&`vbpxek#w^+vWgSO4);U0uH7LhJ@S#pg8&9vNaOjR@wIp zgSjDq#DV<1j8eZo4*n=V3;vS%@@B#na#kx9`5aQg=^BVVY8Fr!U%^)E)z8E?*rOKa zzgq^pTVc(P|G_CY*b&p;=li1MoDwEw)s7wN#aHx^{sXnILW;(79SA5_=Xz{G+*G=4W+5*`k1r(1`*ip8S$NwbSK{F_*4N! zFXbOAimr7k$V>B>vN&VoxEh%s70<)O=IMlbV;+Ooz|t=vqy0RBS1$Q|-M-Rbu;F|} zsuCV#%7lJzZR4s;$fr~oYE*5bkI1*>NU`&VB>*7Si5ONEz0sGZ)m=`--aS)eC!@9_ z+~?Zs|1`AzcJau7h~3Yx&s}z6gvzBFA_Q4wjjzfJq*UOphIbfrX>`6aG5*DrSYqP_ zt-gMiz8^$KY_V=`w6dt@vOwof&PY;guYX@lPI5`)&8gC!M)lT`xtP;C)lpvME)^D* zWs;6bOd`?WJuZkrAOK@WgDJ!sa{$o62iQHE0;s9W=0tTrig-|GbkS?zWT}#jwmysF5T;(T{VBMf)*NB00tF)EaC{~_cuIhqAvVaFZJ#W zo*psXz83p3w7m#n|3LMJHLo8os&ffpFe*5-7%9|^+>R({5(dIo1s_5N{ z%B-vCCYNGQU#{g>v03-u<1%iHFadnBE+;N%1e-|z3q-Yv!?zoxM@%b=Rc-i}_y<6^ z2WSwZj--C8BhfHBx8L=EIMeD4$rle}C@3W4nw&5_SBdAv?YIx(GmXVJ&3~x(|FpP8uj@1r_%O zduK%w9bfLk82Dr^J=)`b+)vWAnwZyI-x-E4gl6efJUd-E?P(x&Zqy19hg3y=%?d!M ztnPd+Nk~mlb9sk>r?{2FC+1Y*R!Y07SU*7D^e#}OXe#0RMSi7$4j~c9t2KydMc4Xk zo-O&iA1M8tZk(;!WzcWuy&S2d6$VE`8NEj$E3M`M^nd$w!!7d_1b$m@R0s4X53RtL z1(IFOVhe|<;O=jq`$RCz8&^B6#YlzIm|DG2gta5FOx_5#X6lulMfyzyy)yHKMW|yA zQ#q}?WZay_BbJaZQFT^|0-0~Xybxwp3CFrV3k-88tM}B|x@7-?t5ke`#KdUeO3Pv= zm*kkTTlSq5F=$^d<;BLKwfAz*CWU|fPzOHhRO(S2#%?4VB2kD=*(FXdbF*1Kvm{Zi!u_Z0SKy)=?v6x8^kU>*1gnuujdLcxc=eX(Y#r|n$X+RW|~dwd@NHIQ%Xn~B}> zW|`&rAhYX_#(zz*Mn0$%6g>I@+ri0O4V8?njBwh-4wY;R@Z0?`mX_k`9!Tg6Q3+as zbbp~+?oH>M{>b+8VDoKl^=mHIGDW04D=P#L7#w1PR;7CY#Q`W82Vsx-sW0zfz3Lu_ zsD8+XcUghVMLwADxx%&hpg!)S7OQp7RXs^M3UP$K^Z9v*^5ODKcP90U>f6&f+f8w| ze%(?X&!3%r1)fv-PZX9;nL7Mmuf}p+!^Zh6U$z}rINGr?e(v5K`^@hE))QwpA2$5W zlyxm_2?oA`-q@9@aAE_PZN!C#d~phDHb#&8-0~84lTD6Qgl8-WCD-elP5oEmMMBD? zsny+D{Pzz@)HqcDyA1Ga&*G_F7|HXrd+U7}&=D!0?;R$)tKViS@wc!EA7R>K@%j0C z5i8{MKP>yTK2T?U=rdJGfCs&_4To~Wl%o8DPGT`~(T~Hco5BL_8^-cvj`cVD&NoD^ zs&NfrL{@q~ERokw02$VlPtzjyr1E7dcLq7^f|_7xX!2JIrk@OWCv0K1&=kyS$?4Jf z3!19CP}seoj9A2?1xi6xqH#i11)I<g~{NHA+Rs2{8txE_z*YC>Wo=`UaXzBk_I~mAX zVznbb9|KpYr_LEw<`Gp_xqzjE)_*qW_r^tZY`neQb--=@9zHxiAlNzY?Ul&+V-)(x z`V&DXn>I+3kaNuWWYM&?27T5vUA|{VeOYx}E3puYgjM)nj!M_mSFU6dPsPl4H;T8o zXyTD0h(|8DpMp*@}X5-{t7|;4eR(%7E@e_%)McX$xvjz z3w&x!bAfWAP?#pscA5P%O(Z1iBFNFXjTPoqGISF_1(mORmO2?qnYO)^)Qu}7>&|}B z8hPuaVrW^vr^zEK+NTg|$thzwad`4tlMkQ0E7|};f0yLtp2@{N8(-+^YP8QRxwITH z&ZNN!x+L-wL819>v1TKNC&bmP{u#HQ1c&%yI>#tQj{nRm2b>af92s8AZV{2`C>pD@{JeqVKx*F%K-bEszTl8%C3V+-7ej zy^F{M0b8O^y~k%=BsYimh-4Cr9p_}^(Mm}-RT@ST&`B5;~s z=#NXtGi)vJ-qHs2(?(Y?Rn4v1chRoKcWPW-$|-+h3H5UyVd=939o&N7!uEB~$RIjD zq2HcYAgSNtyysI`BdtM_GJCTp6_M?WY$#a|22@zH*T;cn!js48&)3dgWH@*XlUxi@ z3Va#sAe+qGH$puhG0{x2oT+af!F;{7%h^aYYl){}seTcVGZSJByY3TS8_b^1Pi-_m{*n{pQCl*4tm;a5@lcBke)L18}_4=7h}t+Ym> z-nKc%sR|uEpzAhBmB-F`X9hQemx2ZK>30k<(zo?f=sDGeDfxqrKxjRec^{X;s_}h^C#*q10rrM7 zZ;iALP^oS9=`0nZ%33QAG5ot6+3n*JXEtijeyLFpTHt0JsIKsJy-Q*KP@t@`tnw#)hfw$Ly(6I+-<^cM?d%QncpWjJ9v*?(0ClcAsZ?V$CUW22Q@G z5W1_Tk3O#IwS<2}tacX;J)&EoJU5`I!A^8DV68eEQ=_uO-MKnRrMrhst;5JEL5VZt z%{*PJebF%8W!w!I=kmKL*$=Pcf@nl{J)Y*w+R+J&rTVakVM2m?eAtfh=Y_v;VuS&K zhu#)L@3yr8B*gkzI!^wf)jUk3@^kow0UwpgqTUcoV!CZgFQTHysUIRcp~D)5>r1JNouXDaX561 zaBV^9m6f=f3P{<>rL5KZ#ApUdgo8CNai5YmN1kL%uBkGQH;cIHq+3qf8NDpkqWjW* z@**V>Ckkuyi~h(Vye$&?4fdIsA{dKUsUUpBD`h8SEvnTs8^z~5{rQA$Qq&HUO_yII zCauD(Q8~`jJFn!J&9c8&R_plh*uDOH7O;Q(><|?VJ)i_m6co@r%T@^#3mEA} zosj{|#+lJSJgb{t?4Q2pxP`=(5$L$}cg&JX$^pR|(3mAq>-^D}<=2zCulq+|_Kzm) zUttcvkbkBDA^(L2#EzPI;Q+fVaTm_ieYttC2?2l*6aQEc;zDxU^t(5sqz^PZ^HpoQ z)R^9z`dNPN%(l;_IlETWUt6+c$|?LNjk~5f=$2BA!@k5Z)1-!|7k83O!-E$_f+|Aw zjcEg?T7$^umN*)nZ@(N%-py3nh#M?76>kgw(D`@yR;CpdA8Qj}ky%6E)5`sd@3>MG z?(-c1c03JzNmkJ`)f~y^K~%n1pC*0bpDcJtH|l#KKh7yJ-{x)sUt@wssbejh*h7tC zp}?3A8d4g7>ZaW&!BT%mFZ)9EvB%lP1;k8q+c{-9m5XrW+vltnk6!k3pWGxl*#A}H zwd{r zQ5YL7I-RiPS3h+29sM!iaPec7Y?7St3jpU(;N^|4A?d)2>dP;`d&%Y5+F=f{jyVIY zrn)cFEt?gtKxnZWUnkby9R{`~9b6trvA<3#`AfZnymhs%uC6*%%ymWkdX$^?szFrl zs8BoU*2A=f2)t71pdiqt5Sdj2sq{3zs3grEx}lIwYq#PS-WR3HzEcF##CD5_#a$EIjj|8=)KSB>pn=e{7*EYu&b17DQNc_M8HkfY`@AWz6e8AN7b`gjq z1i*VN+R$V^&T@=X!JbR$`-&8}%eA^UdSKj#`Sp!OUzMHs zDKYVpU9J2dm-r>AHOUR)D#2CvR8OR47Fr{k(GIVC*t?3Ln{~|0e0xszb#a*qr3`IS zfW73`HnO8#^Ilcu8MyfNG|K7XsH80lj6Z(z8If&TsEn0+EIULQl(g`&5R-FqY6 zEag@6An1~Bn&7RA`bsYp9?>nZlBS(}Ta6c*7JAFWO;wT$A{hlKQrY*^wya6G`7!9z zyrkoZ;baNC;(wFmuq~D-kLRAm%lJ7Tir36YR`uDRI8iGlhGuSGcyySg&e;G^DV{CT z?PPJG8IyJl2dEalv%mJY=3G(9cAt-pBdG1zwzw@J$RGX)uA8|t@iF7d-DnpUVJ*)C zH2i%=BMScp%}I34 zT{_#vL)^vlId2aQcY{wuV&Sq!>jkL=7P>c2V)#Qw+CTXULAidqb`DvqQRs&ar%BLQ}E+C8yO*OPAc$*I%V zamaq=*;MJ%&8<;X?1Vk-IKM!#Z0m>9>OoXUL@0CGF;)o_7QMU` z#~SuAMXftTt@j`Rl`keXK0eKE5kg7h!WKF~B`HL@j*LB*t6rw#assp5)HQ3auc5q_ z!{8u%mDjWPc}1+b3JK|SlK>%)$b4C5x76?X47=BfM76fSB4K#<;Bdc*>v;dKjGe5D z(1KwaBlF}h3!A-FuT1_Av8+_a@OfG!?rp`2ERotaZV}{E&1qdVs&y~_}8K7+8Sdw)J_@#%BD-eyQUR(0PLMO3=jJ)@gq(rSX0*( zzV2a0?bgt$j~XMF`&XGZ6_LAw=<%5cE}7EX!MA_qNLgxm2;YJ|XU#NK8!=AqyFb%1 zLdI;+(lYFH)97+Vl92tQpez174MX~5{&JmbwzH0bu&ZILPjwxq-o}S(L*Ejl)L0NS zc=eTiMt^@7xY$I_1lRhIWyh80y)-mR`%l#EGH7TegJz;&0uT%wA8_;;=n+Nj>?nSz z8>@M(G*HW~#O^4*C$pi?A(S+boZQk*Gpl@{f0HC`we5_-XCFuY1F>0v;vu|mz(Sb` zaBBePGdFQg+}x<9Ds zO@W?{QVb!h&W2m1fIz$n$(YK)nPmXhv&1%_r#^KAN9H=Q>8E*@vb#tqN2LBj=UMfC zrFJ9bdwP6a=&Y!hQg&PHI7lRD^X(p`7sFuodB{@iDCte;8L0&j$7>+KgoyVrVJ*BY zMRX)VF@ZpTG1xXLvFI*|sTy!*f;K^A*dsR{&f5ED2lAKLVo(sg0 zHA!*^i3MPmh!pgDfbRwnY_cCf@3|}mR?GBLU9+Sdwsnj_cfFpY zV#zm)f+s{->m+t{8DL5uV}K4yj%hIB?o{&~oT9~68qdvokB?j9qE4rI7US~Xqz(3D zb}d0uCz*xYzP%$xwGn&Y-`qFutj&S0~au>vUh~)-K+#Eunr+k&UW$$8#==;M(dJ9qX=sU;Fe<*Ku}0Wk4he zUYqOn(Qzhu{)f#s%MsZeV-LPJxy}4~n!Vg99jq+9mqGsWyJ~7!r{syrSIDDsMJkft|IKb-hwZ#-)JDue~hsV5*!rR5GU?M1Cj0 zkG`xpadh{QriSsP@~vfp<+O;H{)3vvu#59FFFNBtRe(A#sM>-!o`k4EFm2J0WdycK z3my3~=flx!*84N&6|pZ}9w{vy#TC`IwH_^%sm zc8!)^xCF#Y&L#b8bNIXHST=NZki}kC4<7^PH{SI}?4&QU`YyCSMrQ#kj{7c}xrlhzR7WUq1L?@hzoa7u&TP-pORdr$ z7)`VHQx3%YPv7m|k05&%qUwW5*rm()QV5*N_?u2X!M^A+vktMmS$260Jx|5zW;R#g zl6g=WR;%0gCe~OmvRPkRwZYGs&yT7o`Z?{fwK%Bnhv2qL<9IXwaxkr(XYQ7quNOyx zGz&eLqUbxh=Wg1jBk39>9LZ9Hys%liBoQ^Bni!NPbB7~=x=}e`FpTkE5G=;x?c-wnvTnjy||hSx-I@fm!uzLK7C#v(X4`cv}0+Xk1-5) zi89hO+++>qMVGSKzRulzlOD|0yU;6l%f3GP35}Ll4Gpp!6wudgBYpYm(q3tvRYs3} zei@ZMEB&rDvO?w|v>Js^dH^QiPy{f4kj9tt+TEr(+dCZSdveV$io73hnyk?u7yWV- z^&YaXo^Z>lslUG)LuRpRosl6NLasBg5~BJL+lg2?`NK%N0)yB*MwKq^$&XG4PbcI- zZ8{J^c5a?tu3j|boe6w*IqRq<@CUWD2MESynWZ ze0@8bCOudsx5h_58Z%ip&6hy=LOx-iwv7;2Ha6uOUzBM$-Qnl)9OAu%)QdbQW$I+) z#+W^slz5D*Vo+gEdqDU8S5bwN>_Fsse@g4C1WYNfO(b^3^N9P8@uIYw-#>mr)Qh!k zWFQN%lWrA#czUVxH;~@f7spyOyy(im*VFeF9}hsri-!FCAPoK8TwS4ORH7VxhMq;1 z^XQR=w#MeP&W6d2hQU5u%@L>mtfH7`Zx;IhmOT-=Oy| zrK*WxGAlIAGhN9y)V_Eji^5r5cOnL4ICHb+-wq+U{4KqHm%YXFi{f8Rbt@?0q2USV z>SEj$BFU$ZdXk+unmC^M7rzu^Ux8hm=DxL;-hZPe|Bq^Q;OF!y7H*F&sl2Wa?LYg_kc+~8Yn_A9f9QUhLs$Z>X@3JPvxO%t-y4nRhhzp zp^7Gi;%F895sw5caJT-JbMTja{I4wRz5C^%`I`xypFj?S}lH zo0J3mZ2&qX@wyE zMB9w}6Kzx8b^vHJ6YpgKb*8l|U;&n;h-oDF3HY^-9|25GwTa=xW5Y6pd3J|QV-!2?{0d;r;x;9##?mqlEMB&ADi{J2zL0h z1Qfy#_>d54VdxhuqC>etdvUyJ|6;Kfb?hk;2;!Cve24K#5W#Ef@>%E`%- z)25!*nDtL%k?pt}hlkb;z(~^T3;*FX(o998F}2I*#-93p`GT8{7o=~5J3qN`DVwUt zDcpH_?y)7P$mqRM1yjLpHdT<)xNDzfeO=u9Y^rn(!PM`jUux2YE>!9}```Oi_f`ya z`&u5Q(Z#~>>`R@Kp-%Ho-+(=nshiN5kPqTi98WM-LgY7Yd#3B~lwZ4#d+C^|j;p?^sKoL_vU~JoGE-@8^;*^ zmSndN{#qs?UyKK6fF5mUr@%?n&aixp2c-D;s|OUT>nUqUMj!cjO4j>wu#zOm>H_2* zX7Lve;1{2!60yKY>;<(x&IjW9#i)u7Y|^GdIJk5%#R}lM?&?PjD~rU7Tvfi^J^x|h zJCg7TP$eGzMU_}|p~<^s#LOz+ED%&EcwGKNiKucF*ma|Ek97zxetN$_33u^`BVIM; zqW^+{*nRr(GLXWkP4Xl9z6bCFp)2V8s6a)Hq!xEkj_{wD6Fl z>*gi#xqypC0v54(J47;A`D5IgzS;3Bkg7BO<^H)ubU z$QT?kMD*AC4T903OHKay(mz+@pL^qXz}O>1*Kg2mMv`#wacQ*4DiH33E(`)e&U7Zh>J3o)}h9fCJ(*B=4 zg8yF&UH{eV{XY$H6aQS_|B_na|8#vP|GB>Z(Dj}Ahp+FaW{pyT&}D+YO`@N8x`U^0 z*Q7f8EiiT$1argb!{>n$s^nDWzJ=@am^=J;# zvl&l7KYWo;c+6@)<-JiE)_YArduigD1usRm6Nikr%Dn6@^|T+6{F;U`i#ugBD*;^Ka0Q zS`P{Dbxm!;ZB(Z|RtVY-*u{xs>Rzu(kk1!nI;<@F&RmmvbcboZIJO@n1me-IDxOmS zI);}>kB}z<*vw#z)(K+Fqc$LI)o64r`EJvPd9VKI(EEDb2Nvy^Jwd)o!4?qYo^If-;9iI{R-so0`oS^*&**RxJ+=3`s z^hiPpfs}oJ&z*B;pR@OwIWud`z3a~G#mdT`(3J@50B18RGINI#6_lspy*^bF77`P6e_J2%ekRmJjv{2mRs z9BQ@)pTPiJM%-|EyrF*msO*el=MxPo=hD#GqerUxJjwpRP?=$nWLjIKTB%ze;^DAc za`z*aOiO+Ga>kXT?~$*=MRqHTDc|y@o~#r)%U^5Fb}QLT$?wlx)l)gA?JJn1ODQh; z<1}9k5ejrOy!7L43ueQoFU`4hnByiaqMu!sEEsn~zr3^_raK`zM+44h+%hT zqD)}!E9)a%^;6tdK057S>3PYQ+xF^lkbjR=AW(Fw{DVXWMcL{xZsBkkb#SWG%p7_6 z*W){XLr(oyqH0(=(Hh^fhziSM`hZQ3b0@wM_+mLzK!48z<;`=99!;{pla4X&M1RjFctF3pM6y zEGo={dhJcXX{xwtO|cb(Ng+lzVkvt04Ar@L*k@>U_D=tMa{tiwNzi(XDSY%Wtsz74 z?09+SIn57O>HGMU$3qk;3xgNnMl@d+>X6;L%tQ*jm5TRLDYwM+oh|u^V4wx#^>^1d zK88Q$xCMATV~7(#0w#xeA8>IjJD{mRc^6VKO@e6+AI`w5TzurGBU=~AW@CD?fzg_E zZJZb4JhL(7YCJ$BTAqtsq=FFLL!iK;r41+tbi*7o_#Gg6>e(I+W*{Nc(44((mp#2b!2C^drjLH@TW9_U%$8;LKK>@c*6T|B0nnX7 zY-uNix0S6hK}Fxrp>SS~G+vnUK#+MSw6gFh7M-$?fqz4$1&G&pkxbC-R-m?+qiHh? ze2x9F0}?mRE*&=hp1-!bdR$KFhvcRENnNaNA2VP56z$9a5$t+`6)}??NLpt#S*-W+ z-OYf9+V}F^3#;ETmi_oV1gZiB>yOfg04W0at-fw_AinCtCFby%U2q8xt&HyyV49l*{YDhYt5W z4xN%8>w`~v373QW8>M#k7qhEd`v%WB_wh7#VK}MB&Q@w|vfkrE`h8Eh4^qplXQ!CI z=wn>?t<^S`?K=LrK1`2N6)Z>?8~c6cn$7h%Vx1hI56qW3i;{ZemLbU-s7+`(*Zp0x z9F|427(2A)UW$&XbECb-H|8e$qPP6vvqv{x^FN5xX1pse{S)sjH4VX)0+PXMo81eh zDFj$&I!jvZaQR#R)@)t(LA*@bo{L4*)yolL8Mg>@rY5K@bT1$un&{HXQB0ZHD8 zpWVjwiH(&Goy?C{?+;;u5A9T&%;7uRgfsZkWvvs$ZPL3-Eny?r3AESLce|pk%d_c? ziJ6$(0Ee%(bEnSWo0j@h-@hRk;$a3=v082}D=jh{xm1Z&1^HY>y}gT!2VjyS(31;J zeeaT?6Xg{+$n0|TRp3qh?gg#!AJxq=5uKX{aMb19E#o9F; z?p$A}eWz~?goIy-t7Hte*h78u#F@3?Zx?Q5$>8&Iy4+UIxMxgv>I=f)kE&RHkz#p1 z$M)UF-tH)@K#~3_80(V)dA4Bt3uIA*#lmx&Io4vPLDee7`{EU*nP&913H z3oH&!3wM}0XV9u;mpXNwGaV_sKQdFZ?ovwcpQOe(`Dv9|o&O!F1OQGzdlsmgjj}G) zHOM$wxJ~5Rd2K(Jc<;6%Sr&gqd#_1%Lt~X!d=ElCGgY!x53dc1%zvTSFy{3JUtw(c z<-v`!#kQl*+0<2QcGV$;C_p6@vOG97*fD=$oG4o0H2t!%>!7~+eU6Y=OcEy;E;_U> z(CSw{IK{oP@Eql62-@PkjMdJzE3}&0l6sW%?F^MYn^`qt(laaQ30w-m4Cv1BB39z0%XgV%BkN{89eKN{{m^yVBaRpV5XRx=h2v2j(-Mc8Wvc5O|`jw<-$lbTzaz6h~9Q#|7{GW_z zC;p3lRvhMSDi4S2?TlPMtY0-{EEJO>sP4-qdz2s zomNsfA5ycVV*IEL7+eurkZK(?A5QHYR;?w+Sn)FaMBjt<+rYpEB9Bc$c!4Q zrC)#s8vLc`Pj4*uJp+qfjwV)PEqc9q9Hq&##(8gl1+}O{vH{-7?iOEM@)*3E3>jIk z$FN68n(Ebld_y~8y+D%^82;JcgnoxptfZ;ZqIDLSp>Ta=gd7%enJGDA>{fKuqY!rz z#bgHH_-gcokU0?Y7>7?4;JX%^K4`y_vHG+vN-+&?4W4S=vXRP1)kcu z517H7J@4%8nn#T7ug2^-#ewQVS0fYoH&4xrrvVa%!d>QkWdR$7^A%~t{ za!Fi5Dz0}x^alejCp?%wKmTTEA>_;(yWF4*=?AV~-XQrz&U{C&I9`y3zXXVhO%tN$ znMS=TXb~EtM_`p77($ZbO~x24!zu1&U~?bOHe z(p;?R%=UING!v5y-b~9LnAyJd0z}F-zW)JEgv|jUxN!s2QUGdGb>}*>zkT?3Htb)_ zA5s>2{t&IUfGfq)JR@rIRNwuEZ*iAgiKP^1bGmlkj3&0aC-kslGXRxQF>|%uH;b9{ z4&mk`CZkhi#c*@{SvUTDF2e`pjQY>cmx6o@3~fhnnVa>SpT{;UEEWr;%DxDEkzQ9z zP;?c!`+&4c^iFM~TKC|A4Wd}o=z;+}f3-ob!}8TY&w^=v`Mw~ycB7~32x(W+jHCNU zA3xe7fE=dA-5MzjB&QfqQ7XB4_34hT#904ez2`pY9#`0l;Hl{A@Tu$`2b(=2YG>m7 z{3GbfLG=r_k7OwYchwEbBvLHO$ufJIvJK{wC3JTMYZ!gY*!h&u)P=7mQG11ER2v#79cWZ+p9oFW2@9?i5JGC z$}XL$a0#uQ6Vm87V-KIdrMxVt^m(Jpk#esBH;-@g#iCZMcxf=Y&c$8@GFkZ-MI!M*VDH+6>7@tZgREH!(F?Sa5 z`*@(W$NCUTqTnL#8pgFsL#QQMsWxIJ8r54H#|}C9E|1JdsojtJ1i36tE!mOyBqgNo z|sgB~UjJmFgq_efUM-Qlv;x7%x1aodIuy|W@cluzvmRl@XUud?z`tBoQr`qhlB$RkRd^|*q zNzr5W8u=;886J9VVP_QkKzWd%8MC3VpQ+O^`PpEDOP=1X?^n>EE;om8+I1#jAE+Jb z;ovlHA5ewuDBAVl8L8Bzr?dl7+1TW!f5o1Ja(}?6zHU8^XNcxmY6Ie+}kxDX62DuYZ`wN_wE9_>90kMtoxtFP9CygUGB0?17eA19McnXvOa^_TT zCsePj_0S&kP<#ZPECYBlihuT2v<1a({G`#35IZymXz_rMh>3LC=oIVE>aS_S$#`cD zzyMh$0R!s5jugftX^CF>IZXd$b@|_BoQao6<;aI99-;(x?x+hIb>3GYn5(JDO%HBJ zKgp%qCy%2q-9H_kJp5VF7zn_rOEwlT#z30`o$m0t8?B=Uss<+_olZz!hikL-igWy0 zDo<(%k;Yqqp6sL$Gz3BCjqp~^W^|I7KWm9*vZK^b8QT?Y65dvagk>|Rok4hzs?-D< zVw;_WRs-x;e>sFQTWfxAuhI!!oi1SGY$p~ylh65Hd5sKs6~5{t2+^EPGUt%n#Hhgt z`dw{Z=rtXBF*Y3(WwO6;ip8)Nk966UcRgI7k`j;^We(^usBnm88HSD~T=ZLLx4G7g zZ^EQ?uMu>|+}{{~j5c7vl}jEy_DN!6dS&PPHFWZ&>pBmNatVbOfuRMKZVSn(-n<}w zFYR=vsCUV?eq3_}s7X^+3>}}JF1V++4sG=wgyVJE1Il}*4{x&@G-m_y3@`5HBTjzU z3j-b)dJBpffz}(w-*3zs*th%;>o4v4Y{n_T>vmk{xo68XpqjJy2Ef9h@Q=TOY6{yf zC@exc#357`UmMc9GPX^c&8CH$(z(B^XU2f;4BEb>(5-N(3jq$uPeh-3BlMNIw{cW!*zNoIcI{)trvBBm z`_Q-e9UX0gm+xM}f_oWq`1g@;zI9NDVX%-i?hV7@ZOh|2|oOiGL1_8Md3q8+C)2Xo1A$IbIHc zc<$>p6|&)V2+l-ZEKLtI{>&1bZfeeY(}p*jz^;GeIc|2ok3VmOGDOWuUENT<<*~~V zIR=FFm^xaL%*L25X-6;WtCxGaoql7KsZD=WgV`w*L`8$O@Ac+Y!A0+7k6SW#>DRDC|DbGGh0sIVrb9`vJ{P#@n@P4T< zuYNtGa{|-NiF7bcLK#QZb6hAXpL5_zY~bWUofgR7utyoi^$o#?^GWee4yDI3cLJ7C7XbJ*gq7%oBIWSa>7#83H$TQK5wAB?-@dFtQ9mWPuSWq_ z02|ILc?6K=OEA+GmMYbj9Xy809;rzb=^o~MPlNhF2v33!Q45Re$PVq^{$#4>f-HAV zbh>J#9y{9QqH?wO;Vv|hEaK#$hZZOpgzN|io`CG{djQte5!m`-aPX;JTqhuugv92yXE9*CJDGB~ z8p;{ISxa2JB+VMv8L9z)ts_T}1Fp70wi&s(zi(z+jI4{&i`hD&dN%ZtatA#LnzRH` zal`c=>hW^r`Cm=0_gK9Q<$tNTH(Eu3(k{wjlEn97vOADmtq|v6FjnS)c;|xKQ&!ou zbVsICmEa9S0fDD!rfqUWqMr?68MTv+ZP!cdUUUwIG`d)Bd+E3Y5)|6R6R!;Sb27$_ z*;|GwlG8+T7!{U|;%lXz+c1p+dVxKUs>T-YnLRW~zA?wE#CqW6T71Ow(9=5gKfvSs zueD{ifPb~DB9yeyZP?l39suxTM4bwtd`c^BR@Q<=HuZI{nGDhVUjbD8*0TPi<5K7e z!Q*6O;e--!9e;EDPZiPr(VPDD`QQ4YW`DD?ivCNmmjB88$Ug_WvTXDv1{4Aw`Y&9d zmVb|V`?s&#cwjA;kUtie8(ZG@FggkVI_2#8$wGA8o7v>8+`tY{vhL=c9gRJ zh(X%k_w0uqd~scdihO6U9+FFor#2d`*Q$z8=}xvD5?^=2W*$W9fBdw%q@l>Rn9Xjw zSlG2dW90|8z&?s(?KqlXFD|w`Q}!bfk=#B1NSn!cD4xL|=)`|!b9J1|j<}9@z;ey& zOcDih<|=S`_gm_7?N!TA4w?3x=8~m}8p@MdIYTs6_@)(xxT8;*F3Xc0mbH!XbfN`S zS$=mfCUdIP&vCmwprlZS-NV>*S)k(8&ilD^HDcrH7)*K{K3pkG=X|Pr3=^hIu)vRg zg6}1uHOXx!>EtxkAVUHW$n?@+V5IPK0PMpVw16MQo}XEQcUZf0n>1d=RdVc%A2az% z^_^X5Xnc09LU=IEw1GVa_qwwXrcC=(bu3D-!r-SgMfk3$0UlR-xj#%)~(c zsz!CcA)py^yHo{{U;Y+;O+0k!j{jFCdv!IO2<0T|5%M|j73@(*((>-J$@(*@Bp~X# zAJ11=SsS}U-R7D6kdcNmgel;Jo8X}PD=2&0gFwt)fxk=&9^hqoflG&n>lJNvXj*f? zcP*1^j`KNNF&uPBqihE~#48Q$6(@9l;lF|?699H^b;zSOPdBe9eJ2xp8tUBp9cr)H zQ~_3PWCWn91zkEODlPTB=zA0RqM*s4C4C@Kxuh6edF6v~S3u7!{f_yl+SU!-kVc zR1yRidp*$P{TIQr+M4dv3g8|qF5)_tEddQ)=|xy$DE9ARFr$d8c{8(cI(@b<>gP9} z_E2dt%T!tgFrLm~s%$+UJRHo5ZHbGF+zH5hpL;&OVLEbdEmJQ4nWLSi^5q}zRP&#U zhkxF*ImeDpQ5{cD_(AUSk>HC*s|fi!<E+_IXx+CNhJEvfgT!%{2UvIlGVr4oW4c&CZ~Lv1BYnphytWC+1noN0n`n#65gc=%r;D7 zYk{|Um&&GLsi`+V$Cx@=f8<-_nVeL+IF78WWze_WBLb*wr60}BE65aXwOQPNG(01l z(gWpaifP>PVDKyLXq1W~Lj3&;Zl0v(*m2a&$X3g~Vd^E!uI@?0gZ-lR37Sjtr2+1X zD5qxVQ?$2pTh8nLQg^>E7~^=|9%s-oyaQT?>@wus6n&3}Ey>}E3qS92S!wv>bLKuM zJ9URA?21bc(AFA>-~API0|sP*KIQ!iqFqKgsJbl6hZDq|d>7~rl#$Ha@EL_(obqdP z);+Fe>)nre&+K(=6nri0_I^?=awF9Vf`_pG3c3UX6U>~mfI2IMAJhmR zo!9ZsWdxrAsk=M#Stwqj_~|)%UUgqSnqA&n@QJh*W&Yo)Gyh8g{6FSD+8ATGS3EjA zB96}cv?`AyYw}P!$>7!AOW!rap2mD@Q{5ilUaCmI)lVm&r7=jukCBpgNk48Se*R@V zWeqBmb$aC>he;Fo|G$zlQ5%jBJRJZC;C`>!gQ|CEXIY%H!b3VY0A7))Y=WxA!~=Sw z8_iQ{V!vq<7Ses%eLZFd7hILWnlz!LHg#ViM^OBJy|#{Gp1?9qY%QF{k@qrQ$zjPw z=|Dm&cg#FcY0BN?tgU+0c|?sbWvmg+t1Wna^*thFNUApTY4>{16V<8-*Y~cgbeA6{ z$#i!(3y?+_b-Vor#D921q1X`Tur4}pEHSFs*82LF8cNd5kCbjkf71jfgV9|GI3~So zo(bcy6VR$WRvUM1-)uPTN4M<9Fa;;6$POKfj{=@dukS^9R&o(Mmm2$o!@S}EjRC#d zVW~rIMJcJvJEMK^qWS7wPbS3}8J}tZl(Q<6D@cF@nH2$P9G2_E%w{x1WX{PoZE$-{ z*v34Yvg1h_OZHQhVZ1s82iW#q3jJs&m)!d z=`DzCoaEZuDsFu*qYt#I)F9|mAv$$Q8O7;0vfV=#37gcg+?DD`TOR)!=UlelwW;%I?h;ZW@|IjkbybNPLzcelQT@X;Fuj zYVTLp7|E-9cDuO4-%T*_{XvJAK$_rNMw);_V*tm%WQjpW9~y%3Yoj-Y5~2(5Rf zzvpyI_g^4SZmRQE5u_0}u{vGQ=muT^tV^5FPC)thcW?Zf^Y(tZ&;Gs7Grmynz&h9~ zxFdt9(_`nO+-OAyz|oy-T+i3ZRJO5uTz;Ew=--}T3+g=3k z{|G>{SjZh{PQh+vCy*mtXm8Gq{t8+dheAlFho}LzGfB*zd;#4t0}QWvWe%e$u8@=!%t%zj3zz#9w)0-Nt$e30qmBQA z<(t>EWs}C*E3PTm-sZA=-t7vywGobcoYWrDzZh(wq&WJax8hB(FhZI1UhVScB%Fz; zJtyP$Qd&~Hhfx0EUbF}Uz@KL;H1xR(d>)a^pJMxpa5{lG|3evhU{R^g+3 zVd9%iQ-x$*Xc%gTBm3l>5ls&hCR_tAM^GoSV7g;(HJIuY^%pHDob;8ZoC{l5I)=TI zvi)HYT#T0>Kszvlomqld)H82^Wyo2Vh7`6jAuAOhS3;`NrodZH=^n5SUCJ7g3$=<4 zrbAAu0$*LLCFr9%Bm>T>qOc;xMxJ8I!=LM<>98pE3ud>(WZANmsIB%8G-D1Scw=N8 z4e9(nD28M56~bU#jSn!l^|#Vd^apQ(Tr7AYk>(R?#`KT)699!rCCWm%t!zvs%60cd z7|?^H2YktRBbtd;mQn9aNc)L->hcBkt;Wp!yeo4HEVZ*=V<>y$XAQZ8T~-7rDV;UZN0ZoiyOd zd08!wg&3#z$JH$$MbCpwa3eZ-FG*-KMW_3u+3-1{h6-9^dnsDApVC~1DeKXLyYIvT z)wX31FnJ&m04o*7N;Ye}_gm@0A>AhG=H%xdJ(v{dFHCJUbJa>y;yW)4;S1OZUe^p#;GX(GI#q@eu*PYx{IOa zO#^tyHF1@fojw8$I{U2x)vWXG8{TtCt#pYbeRJBW74oqzM7q!a@pQGMA*Uiw)>`XB zyxE7b7!0JOpk&GLH0J8XK?AwZ4OW`B9_Q?akBiSn#QUu^RZMa6 z!l#^8p6fT&*O!#Y=*fHmfu8DB2$C{e4*?!beJsEpa0cR`-@_Vf&bWSQke*xvehF(D15Og(Ul2cUdub9(|9Q&uc@+5O;#BjrN% z&0}&>4PEJv_8BoZth@xlJr>(p$W!R5P5zMZl*Y@>8M4C|#^|VIlWc=yF?5Xv3Bm+VY7|PwHxUr11>@wZSnk{yQ*0nXCrRHVz zi;2W0-Np4{!9*qi-r(vH0ikae#~{tHuxNymoo%T;l1_D2?3K}?GC9L>LojZbBlncVB&(64{LIc4_p0g<#ew)b24GhWzJPzN;XhFO~+FXWqX!M(@WZG|8&) zzqS&ZkCf<`kAwiq1eJBx0cMdoXLN2_XpE$VKmn4!-!!q9Ih5Wu%gzUxCO3Ei+1o?I z9v~qi7cJ!O^jC+;kple(aEpJ~=#Btm*>B)mFe|v%D16W2!fE~GmaDK4{z_2~{^DDL zVvGA9@5+M0Yl}^{M|V#|D3_y2zn~l`^0Y886Gojc7ON~-Q^z#>h2%k-v-s>^oB@AB z8lgFKyWr!px#bIM2|>7wR$bheZra!K?#jlVDYFUiI*}Zv0RT|Oqjs67i#b)ZXekwL zBRKlQyNJ5zL+Zn1#c-K4!Jw=wepK|8oJ#1yZ%Ar{REeL|l1^l^RC!K<(dfZ9yN|KV z^HzE84?i!a3OfnJ#*2?`f;5{IcWvE1<1FAhYLb z@_P-{7c6MF&9FM;bm0r6GcI-rB8woSSUvqh?I^_JgoHjp;~J2~Zt_e;d>tuZ4zMxM!Dt zo81I7m5Z)w_|mnBn16Vhmlt}MLW$?8_*My!vN!BeyExV?yd+`R8DNm4*5Yf+^oBRn z^=AK*Fz}srrMWO&u%2R9us6aI+tVdAtu4jq$8;MHufY2YOz<^l>>EYdM^Z%Hs%d87 z7b~ri7EFl+XQ7H@|Im1Wf19}d?J->3k8*K07P3`|Z_1|WGnjDukqWOe6cL05XWJEd9;5!zx0f<2VRg;n2nly1=aMHMsOj72T zV6*!^i5mPBbHu0k!Jt5w5^KcimJD2{u7pFH`Yuxj$Sv0H%a2b3r-2Z<5m-hQQyoi3 zopgYeA}l9#wXk^=H`~}Xytw_cG@&MLP5m<``8sV5)Bc4*0V-HNf)y`ROytJ0$GjW$ z4poToc1is>Dw`b9@-{AP{i`qsk#R8JX-EeNhI@p!$4uI!v7MhtJ9&G4M9=ODQqtk=*Q!(p z*TnCyY->_NV~*f#2+c_{J)W+e%*snY(X8)(Df~ihT%P)o^vvu`d(z97-3cLgF@YgZ z;Ksm=lnDOJ%ZwEcCo^k#ESH|~>YLLvZf-rFPHW=0@|t{m%ilzOfQgS+T?IB)`+GF> zKiuAaZiOae(3SDOz#`11^NWx@rDVtna}K@8*0kB(t!b|uIy&?7ITo3}K}U>{4vH)a zEYsE^uP_tt&SAU56bgl8s^C%!ZzbFWBR5=o9btc z)e9GmXaFvoLse(_ZIe3fF|c*XDK@7__4W`^#ghKE&!z9hO3OhWQ?qo>G$GB3wm^1q zvec|kL)lWlCBREy45mlld(A^L8*Z$A7663*&I|pQ?A?FE=e1;eDLgzA3{BNpY^><~ z^qHFdjCZZJ_MSTJ4;;ztx@Uq8Sow#jK8uaSL{i@ahDXAn#aWI`Y={!jz>{+t0|PuU zN2+31KNmZzy#%wc(a~0Fi}5QPQR}c-M_SvOO*vA$kYKUWcb&*u9b+VlII$K1!Xy%0P>uWnFjcN|6k58iTMtQ+M(7nf>A3v zIP@fO-*A%XNLljkBOEW2mk+X6&~omiXXzjmbUcUopc7)WV?rk=M3(bBpW_~o=iT%p^v?C7~=sp{A%W1 z5BKa$+rB3feeQ{5Hf!@jtsH8zQZdnzx@+YiRqDIOw~S4}(;>Xnx{9nrR}Ht-nE+yh zcR*A*Nx~O2lmi4x&tk_OKFZ&`Ezkmj*&0=kzf&H2Xhbb<5t>#Z^ONjNBl(>u?8}&z zBSq*<+ncQbNmqyn$ELj5$V-5dD?ME&9p+==?*%sO%k3-8vgWl0neJ`uZLI?@sIw9q zRT@8jAO1c>4V6rgeDCk|=ZXKrh9`dtCU0CFB6fOnN$m4Yznh@4v-xLCBYsl1rHa{G zBud;YRcA0OPIWSKHWC3Dn}o)wE02Zv=1^0z9v&7P7XO8&jS>J@;epmE@acY;ygUI} zz;of`O;xz*1iuLiiHR-0Cou8KJVY$D=*xI@?MUkw_*TAbxuvLHb%^Lr-jw+l6IXyo zPPJ$XePUMr(hZkG+oX!RDIXY|>f%wy9aKc|e$ZniE^8s*Vg{|f;0!9QnK{UHXxKEb zCm1(OhluUqNFsl^wsq{!i~k(3|K5|N*vBmMWs?X+{g=lUm1i@nL&Sl_S{!EDPB5a< za8|Fy0Dqt%(Zzt#kb^?%!yFK;_XVfL2O%J7{lA1_l0ZiqKz(*1&?28#kfhG=zn86S ztm#J1>COBgF)Lbk2G9+i@=L=eBlnG{gq-9FcV--C)S2B_-#7;q#MOdNP0Npc!{zdh zff@fi`g08a&pgNnCJCCgDX7G{Al(gg<9ueVDQ;xZh!XU39u}G`4pEqdX82G`fcQA_ z|H2^omKp@|Ed*MukN^=B@#jR$4-);*rKbhb(SkbWO*?{XO%_LtzMr~H2bcZfcA@&PiY z9Q{zo{G3yfhd+PBpBXhIPz?F`L;g*%J(mLbWuwc5M7RHOVg29C5B^8WCI64lrM)(a Q5e+X_`$G&2^w+8X1u%?H6#xJL literal 0 HcmV?d00001 From be2147ca47cb669fa2e48cd05524c90c1683902a Mon Sep 17 00:00:00 2001 From: Arda Kayagil Date: Mon, 19 Apr 2021 06:08:17 +0300 Subject: [PATCH 038/414] Update download_videos.sh (#816) * Update download_videos.sh * Add mmcv package for hvu download_videos script --- tools/data/gym/download_videos.sh | 1 + tools/data/hvu/download_videos.sh | 1 + 2 files changed, 2 insertions(+) diff --git a/tools/data/gym/download_videos.sh b/tools/data/gym/download_videos.sh index 2788a4265d..1e8fd99599 100644 --- a/tools/data/gym/download_videos.sh +++ b/tools/data/gym/download_videos.sh @@ -3,6 +3,7 @@ # set up environment conda env create -f environment.yml source activate gym +pip install mmcv pip install --upgrade youtube-dl DATA_DIR="../../../data/gym" diff --git a/tools/data/hvu/download_videos.sh b/tools/data/hvu/download_videos.sh index eca14cd812..a4ce0d63f1 100644 --- a/tools/data/hvu/download_videos.sh +++ b/tools/data/hvu/download_videos.sh @@ -3,6 +3,7 @@ # set up environment conda env create -f environment.yml source activate hvu +pip install mmcv pip install --upgrade youtube-dl DATA_DIR="../../../data/hvu" From c96ec7dcee5e63a145660dac3bb86c9389bfe569 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Mon, 19 Apr 2021 13:53:46 +0800 Subject: [PATCH 039/414] [Fix] Fix bug: Resume can not work. (#820) The first line of cfg.text is the config filename, which will not be parsed by this line: https://github.com/open-mmlab/mmcv/blob/de4f14e9cd547fb8bf886a362ad4f02a0be95141/mmcv/runner/base_runner.py#L346 Use cfg.pretty_text instead. --- tools/train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/train.py b/tools/train.py index 9b0e67416a..08a768770f 100644 --- a/tools/train.py +++ b/tools/train.py @@ -137,7 +137,7 @@ def main(): # log some basic info logger.info(f'Distributed training: {distributed}') - logger.info(f'Config: {cfg.text}') + logger.info(f'Config: {cfg.pretty_text}') # set random seeds if args.seed is not None: From d1f5b9e5e2ff35c930543c711d614dd8db91e5c0 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Mon, 19 Apr 2021 13:54:26 +0800 Subject: [PATCH 040/414] make hmdb train/val list generation robust (#811) --- tools/data/parse_file_list.py | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/tools/data/parse_file_list.py b/tools/data/parse_file_list.py index f86015911e..9276f05ad4 100644 --- a/tools/data/parse_file_list.py +++ b/tools/data/parse_file_list.py @@ -430,15 +430,23 @@ def generate_class_index_file(): class_list = sorted(os.listdir(frame_path)) class_dict = dict() - with open(class_index_file, 'w') as f: - content = [] - for class_id, class_name in enumerate(class_list): - # like `ClassInd.txt` in UCF-101, the class_id begins with 1 - class_dict[class_name] = class_id + 1 - cur_line = ' '.join([str(class_id + 1), class_name]) - content.append(cur_line) - content = '\n'.join(content) - f.write(content) + if not osp.exists(class_index_file): + with open(class_index_file, 'w') as f: + content = [] + for class_id, class_name in enumerate(class_list): + # like `ClassInd.txt` in UCF-101, + # the class_id begins with 1 + class_dict[class_name] = class_id + 1 + cur_line = ' '.join([str(class_id + 1), class_name]) + content.append(cur_line) + content = '\n'.join(content) + f.write(content) + else: + print(f'{class_index_file} has been generated before.') + class_dict = { + class_name: class_id + 1 + for class_id, class_name in enumerate(class_list) + } for i in range(1, 4): train_content = [] @@ -469,8 +477,7 @@ def generate_class_index_file(): with open(test_file_template.format(i), 'w') as fout: fout.write(test_content) - if not osp.exists(class_index_file): - generate_class_index_file() + generate_class_index_file() with open(class_index_file, 'r') as fin: class_index = [x.strip().split() for x in fin] From db1fa970c9b96a21f193fedb8a23b7aa177c29c0 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 19 Apr 2021 14:08:21 +0800 Subject: [PATCH 041/414] alternative way to download kinetics (#817) --- tools/data/kinetics/README.md | 2 +- tools/data/kinetics/README_zh-CN.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/data/kinetics/README.md b/tools/data/kinetics/README.md index cf09c7eb79..bb3ab2dcb9 100644 --- a/tools/data/kinetics/README.md +++ b/tools/data/kinetics/README.md @@ -63,7 +63,7 @@ For better decoding speed, you can resize the original videos into smaller sized python ../resize_videos.py ../../../data/${DATASET}/videos_train/ ../../../data/${DATASET}/videos_train_256p_dense_cache --dense --level 2 ``` -You can also download kinetics400 and kinetics700 from [Academic Torrents](https://academictorrents.com/details/184d11318372f70018cf9a72ef867e2fb9ce1d26). +You can also download from [Academic Torrents](https://academictorrents.com/)([kinetics400](https://academictorrents.com/details/184d11318372f70018cf9a72ef867e2fb9ce1d26) & [kinetics700](https://academictorrents.com/details/49f203189fb69ae96fb40a6d0e129949e1dfec98) are avaiable) and [cvdfoundation/kinetics-dataset](https://github.com/cvdfoundation/kinetics-dataset)(Host by Common Visual Data Foundation and Kinetics400/Kinetics600/Kinetics-700-2020 are available) ## Step 3. Extract RGB and Flow diff --git a/tools/data/kinetics/README_zh-CN.md b/tools/data/kinetics/README_zh-CN.md index badbdea256..bb11c62667 100644 --- a/tools/data/kinetics/README_zh-CN.md +++ b/tools/data/kinetics/README_zh-CN.md @@ -60,7 +60,7 @@ bash rename_classnames.sh ${DATASET} python ../resize_videos.py ../../../data/${DATASET}/videos_train/ ../../../data/${DATASET}/videos_train_256p_dense_cache --dense --level 2 ``` -也可以从 [Academic Torrents](https://academictorrents.com/details/184d11318372f70018cf9a72ef867e2fb9ce1d26) 下载 Kinetics400 和 Kinetics700。 +也可以从 [Academic Torrents](https://academictorrents.com/) 中下载 [kinetics400](https://academictorrents.com/details/184d11318372f70018cf9a72ef867e2fb9ce1d26) 和 [kinetics700](https://academictorrents.com/details/49f203189fb69ae96fb40a6d0e129949e1dfec98),从 Common Visual Data Foundation 维护的 [cvdfoundation/kinetics-dataset](https://github.com/cvdfoundation/kinetics-dataset) 中下载 Kinetics400/Kinetics600/Kinetics-700-2020。 ## 3. 提取 RGB 帧和光流 From 91483e8314da49d01c4e223a3d784ec1b5769b7d Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Tue, 20 Apr 2021 11:56:58 +0800 Subject: [PATCH 042/414] [Improvement] Refactor sampler (#790) * resolve comments * update changelog * refactor sampler * resolve comments * update * add docstring * add unit_test for sampler --- mmaction/datasets/base.py | 35 +++--- mmaction/datasets/builder.py | 14 ++- mmaction/datasets/rawframe_dataset.py | 27 ++--- mmaction/datasets/samplers/__init__.py | 5 +- .../datasets/samplers/distributed_sampler.py | 102 +++++++++++++----- tests/test_data/test_sampler.py | 95 ++++++++++++++++ 6 files changed, 214 insertions(+), 64 deletions(-) create mode 100644 tests/test_data/test_sampler.py diff --git a/mmaction/datasets/base.py b/mmaction/datasets/base.py index fbc2c79c44..bbed8addec 100644 --- a/mmaction/datasets/base.py +++ b/mmaction/datasets/base.py @@ -46,11 +46,13 @@ class BaseDataset(Dataset, metaclass=ABCMeta): sample_by_class (bool): Sampling by class, should be set `True` when performing inter-class data balancing. Only compatible with `multi_class == False`. Only applies for training. Default: False. - power (float | None): We support sampling data with the probability + power (float): We support sampling data with the probability proportional to the power of its label frequency (freq ^ power) when sampling data. `power == 1` indicates uniformly sampling all data; `power == 0` indicates uniformly sampling all classes. - Default: None. + Default: 0. + dynamic_length (bool): If the dataset length is dynamic (used by + ClassSpecificDistributedSampler). Default: False. """ def __init__(self, @@ -63,7 +65,8 @@ def __init__(self, start_index=1, modality='RGB', sample_by_class=False, - power=None): + power=0, + dynamic_length=False): super().__init__() self.ann_file = ann_file @@ -77,6 +80,8 @@ def __init__(self, self.modality = modality self.sample_by_class = sample_by_class self.power = power + self.dynamic_length = dynamic_length + assert not (self.multi_class and self.sample_by_class) self.pipeline = Compose(pipeline) @@ -84,6 +89,16 @@ def __init__(self, if self.sample_by_class: self.video_infos_by_class = self.parse_by_class() + class_prob = [] + for k, samples in self.video_infos_by_class.items(): + class_prob.append(len(samples) / len(self.video_infos)) + class_prob = [x**self.power for x in class_prob] + + summ = sum(class_prob) + class_prob = [x / summ for x in class_prob] + + self.class_prob = dict(zip(self.video_infos_by_class, class_prob)) + @abstractmethod def load_annotations(self): """Load the annotation according to ann_file into video_infos.""" @@ -230,12 +245,7 @@ def dump_results(results, out): def prepare_train_frames(self, idx): """Prepare the frames for training given the index.""" - if self.sample_by_class: - # Then, the idx is the class index - samples = self.video_infos_by_class[idx] - results = copy.deepcopy(np.random.choice(samples)) - else: - results = copy.deepcopy(self.video_infos[idx]) + results = copy.deepcopy(self.video_infos[idx]) results['modality'] = self.modality results['start_index'] = self.start_index @@ -250,12 +260,7 @@ def prepare_train_frames(self, idx): def prepare_test_frames(self, idx): """Prepare the frames for testing given the index.""" - if self.sample_by_class: - # Then, the idx is the class index - samples = self.video_infos_by_class[idx] - results = copy.deepcopy(np.random.choice(samples)) - else: - results = copy.deepcopy(self.video_infos[idx]) + results = copy.deepcopy(self.video_infos[idx]) results['modality'] = self.modality results['start_index'] = self.start_index diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index 1936586cc8..33056bde7f 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -10,7 +10,7 @@ from .dataset_wrappers import RepeatDataset from .registry import DATASETS -from .samplers import DistributedPowerSampler, DistributedSampler +from .samplers import ClassSpecificDistributedSampler, DistributedSampler if platform.system() != 'Windows': # https://github.com/pytorch/pytorch/issues/973 @@ -79,13 +79,17 @@ def build_dataloader(dataset, """ rank, world_size = get_dist_info() sample_by_class = getattr(dataset, 'sample_by_class', False) - power = getattr(dataset, 'power', None) if dist: if sample_by_class: - assert power is not None - sampler = DistributedPowerSampler( - dataset, world_size, rank, power, seed=seed) + dynamic_length = getattr(dataset, 'dynamic_length', True) + sampler = ClassSpecificDistributedSampler( + dataset, + world_size, + rank, + dynamic_length=dynamic_length, + shuffle=shuffle, + seed=seed) else: sampler = DistributedSampler( dataset, world_size, rank, shuffle=shuffle, seed=seed) diff --git a/mmaction/datasets/rawframe_dataset.py b/mmaction/datasets/rawframe_dataset.py index 6c632851f2..6a0883e18e 100644 --- a/mmaction/datasets/rawframe_dataset.py +++ b/mmaction/datasets/rawframe_dataset.py @@ -1,7 +1,6 @@ import copy import os.path as osp -import numpy as np import torch from .base import BaseDataset @@ -77,11 +76,13 @@ class RawframeDataset(BaseDataset): sample_by_class (bool): Sampling by class, should be set `True` when performing inter-class data balancing. Only compatible with `multi_class == False`. Only applies for training. Default: False. - power (float | None): We support sampling data with the probability + power (float): We support sampling data with the probability proportional to the power of its label frequency (freq ^ power) when sampling data. `power == 1` indicates uniformly sampling all data; `power == 0` indicates uniformly sampling all classes. - Default: None. + Default: 0. + dynamic_length (bool): If the dataset length is dynamic (used by + ClassSpecificDistributedSampler). Default: False. """ def __init__(self, @@ -96,7 +97,8 @@ def __init__(self, start_index=1, modality='RGB', sample_by_class=False, - power=None): + power=0., + dynamic_length=False): self.filename_tmpl = filename_tmpl self.with_offset = with_offset super().__init__( @@ -109,7 +111,8 @@ def __init__(self, start_index, modality, sample_by_class=sample_by_class, - power=power) + power=power, + dynamic_length=dynamic_length) def load_annotations(self): """Load annotation file to get video information.""" @@ -151,12 +154,7 @@ def load_annotations(self): def prepare_train_frames(self, idx): """Prepare the frames for training given the index.""" - if self.sample_by_class: - # Then, the idx is the class index - samples = self.video_infos_by_class[idx] - results = copy.deepcopy(np.random.choice(samples)) - else: - results = copy.deepcopy(self.video_infos[idx]) + results = copy.deepcopy(self.video_infos[idx]) results['filename_tmpl'] = self.filename_tmpl results['modality'] = self.modality results['start_index'] = self.start_index @@ -171,12 +169,7 @@ def prepare_train_frames(self, idx): def prepare_test_frames(self, idx): """Prepare the frames for testing given the index.""" - if self.sample_by_class: - # Then, the idx is the class index - samples = self.video_infos_by_class[idx] - results = copy.deepcopy(np.random.choice(samples)) - else: - results = copy.deepcopy(self.video_infos[idx]) + results = copy.deepcopy(self.video_infos[idx]) results['filename_tmpl'] = self.filename_tmpl results['modality'] = self.modality results['start_index'] = self.start_index diff --git a/mmaction/datasets/samplers/__init__.py b/mmaction/datasets/samplers/__init__.py index 1299b6b378..72cb2f686f 100644 --- a/mmaction/datasets/samplers/__init__.py +++ b/mmaction/datasets/samplers/__init__.py @@ -1,3 +1,4 @@ -from .distributed_sampler import DistributedPowerSampler, DistributedSampler +from .distributed_sampler import (ClassSpecificDistributedSampler, + DistributedSampler) -__all__ = ['DistributedSampler', 'DistributedPowerSampler'] +__all__ = ['DistributedSampler', 'ClassSpecificDistributedSampler'] diff --git a/mmaction/datasets/samplers/distributed_sampler.py b/mmaction/datasets/samplers/distributed_sampler.py index 5529a0d9cd..aefb6eaf2e 100644 --- a/mmaction/datasets/samplers/distributed_sampler.py +++ b/mmaction/datasets/samplers/distributed_sampler.py @@ -1,3 +1,6 @@ +import math +from collections import defaultdict + import torch from torch.utils.data import DistributedSampler as _DistributedSampler @@ -40,43 +43,92 @@ def __iter__(self): return iter(indices) -class DistributedPowerSampler(_DistributedSampler): - """DistributedPowerSampler inheriting from +class ClassSpecificDistributedSampler(_DistributedSampler): + """ClassSpecificDistributedSampler inheriting from ``torch.utils.data.DistributedSampler``. - Samples are sampled with the probability that is proportional to the power - of label frequency (freq ^ power). The sampler only applies to single class - recognition dataset. + Samples are sampled with a class specific probability, which should be an + attribute of the dataset (dataset.class_prob, which is a dictionary that + map label index to the prob). This sampler is only applicable to single + class recognition dataset. This sampler is also compatible with + RepeatDataset. - The default value of power is 1, which is equivalent to bootstrap sampling - from the entire dataset. + The default value of dynamic_length is True, which means we use + oversampling / subsampling, and the dataset length may changed. If + dynamic_length is set as False, the dataset length is fixed. """ - def __init__(self, dataset, num_replicas=None, rank=None, power=1, seed=0): + def __init__(self, + dataset, + num_replicas=None, + rank=None, + dynamic_length=True, + shuffle=True, + seed=0): super().__init__(dataset, num_replicas=num_replicas, rank=rank) - self.power = power + self.shuffle = shuffle + + if type(dataset).__name__ == 'RepeatDataset': + dataset = dataset.dataset + + assert hasattr(dataset, 'class_prob') + + self.class_prob = dataset.class_prob + self.dynamic_length = dynamic_length + # for the compatibility from PyTorch 1.3+ self.seed = seed if seed is not None else 0 def __iter__(self): - # deterministically shuffle based on epoch g = torch.Generator() - g.manual_seed(self.epoch + self.seed) - video_infos_by_class = self.dataset.video_infos_by_class - num_classes = self.dataset.num_classes - # For simplicity, discontinuous labels are not permitted - assert set(video_infos_by_class) == set(range(num_classes)) - counts = [len(video_infos_by_class[i]) for i in range(num_classes)] - counts = [cnt**self.power for cnt in counts] - - indices = torch.multinomial( - torch.Tensor(counts), - self.total_size, - replacement=True, - generator=g) - indices = indices.data.numpy().tolist() + g.manual_seed(self.seed + self.epoch) + + class_indices = defaultdict(list) + + # To be compatible with RepeatDataset + times = 1 + dataset = self.dataset + if type(dataset).__name__ == 'RepeatDataset': + times = dataset.times + dataset = dataset.dataset + for i, item in enumerate(dataset.video_infos): + class_indices[item['label']].append(i) + + if self.dynamic_length: + indices = [] + for k, prob in self.class_prob.items(): + prob = prob * times + for i in range(int(prob // 1)): + indices.extend(class_indices[k]) + rem = int((prob % 1) * len(class_indices[k])) + rem_indices = torch.randperm( + len(class_indices[k]), generator=g).tolist()[:rem] + indices.extend(rem_indices) + if self.shuffle: + shuffle = torch.randperm(len(indices), generator=g).tolist() + indices = [indices[i] for i in shuffle] + + # re-calc num_samples & total_size + self.num_samples = math.ceil(len(indices) / self.num_replicas) + self.total_size = self.num_samples * self.num_replicas + else: + # We want to keep the dataloader length same as original + video_labels = [x['label'] for x in dataset.video_infos] + probs = [ + self.class_prob[lb] / len(class_indices[lb]) + for lb in video_labels + ] + + indices = torch.multinomial( + torch.Tensor(probs), + self.total_size, + replacement=True, + generator=g) + indices = indices.data.numpy().tolist() + + indices += indices[:(self.total_size - len(indices))] assert len(indices) == self.total_size + # retrieve indices for current process indices = indices[self.rank:self.total_size:self.num_replicas] assert len(indices) == self.num_samples - return iter(indices) diff --git a/tests/test_data/test_sampler.py b/tests/test_data/test_sampler.py new file mode 100644 index 0000000000..2e7f2b1524 --- /dev/null +++ b/tests/test_data/test_sampler.py @@ -0,0 +1,95 @@ +from torch.utils.data import DataLoader, Dataset + +from mmaction.datasets.samplers import (ClassSpecificDistributedSampler, + DistributedSampler) + + +class MyDataset(Dataset): + + def __init__(self, class_prob={i: 1 for i in range(10)}): + super().__init__() + self.class_prob = class_prob + self.video_infos = [ + dict(data=idx, label=idx % 10) for idx in range(100) + ] + + def __len__(self): + return len(self.video_infos) + + def __getitem__(self, idx): + return self.video_infos[idx] + + +def test_distributed_sampler(): + dataset = MyDataset() + sampler = DistributedSampler(dataset, num_replicas=1, rank=0) + data_loader = DataLoader(dataset, batch_size=4, sampler=sampler) + batches = [] + for i, data in enumerate(data_loader): + batches.append(data) + + assert len(batches) == 25 + assert sum([len(x['data']) for x in batches]) == 100 + + sampler = DistributedSampler(dataset, num_replicas=4, rank=2) + data_loader = DataLoader(dataset, batch_size=4, sampler=sampler) + batches = [] + for i, data in enumerate(data_loader): + batches.append(data) + + assert len(batches) == 7 + assert sum([len(x['data']) for x in batches]) == 25 + + sampler = DistributedSampler(dataset, num_replicas=6, rank=3) + data_loader = DataLoader(dataset, batch_size=4, sampler=sampler) + batches = [] + for i, data in enumerate(data_loader): + batches.append(data) + + assert len(batches) == 5 + assert sum([len(x['data']) for x in batches]) == 17 + + +def test_class_specific_distributed_sampler(): + class_prob = dict(zip(list(range(10)), [1] * 5 + [3] * 5)) + dataset = MyDataset(class_prob=class_prob) + + sampler = ClassSpecificDistributedSampler( + dataset, num_replicas=1, rank=0, dynamic_length=True) + data_loader = DataLoader(dataset, batch_size=4, sampler=sampler) + batches = [] + for i, data in enumerate(data_loader): + batches.append(data) + + assert len(batches) == 50 + assert sum([len(x['data']) for x in batches]) == 200 + + sampler = ClassSpecificDistributedSampler( + dataset, num_replicas=1, rank=0, dynamic_length=False) + data_loader = DataLoader(dataset, batch_size=4, sampler=sampler) + batches = [] + for i, data in enumerate(data_loader): + batches.append(data) + + assert len(batches) == 25 + assert sum([len(x['data']) for x in batches]) == 100 + + sampler = ClassSpecificDistributedSampler( + dataset, num_replicas=6, rank=2, dynamic_length=True) + data_loader = DataLoader(dataset, batch_size=4, sampler=sampler) + batches = [] + for i, data in enumerate(data_loader): + batches.append(data) + + assert len(batches) == 9 + assert sum([len(x['data']) for x in batches]) == 34 + + sampler = ClassSpecificDistributedSampler( + dataset, num_replicas=6, rank=2, dynamic_length=False) + data_loader = DataLoader(dataset, batch_size=4, sampler=sampler) + batches = [] + for i, data in enumerate(data_loader): + batches.append(data) + + assert len(batches) == 5 + assert sum([len(x['data']) for x in batches]) == 17 From a1a10050b0bca1c6781138c74b5b7070988fc788 Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 20 Apr 2021 13:28:30 +0800 Subject: [PATCH 043/414] resolve comments --- mmaction/datasets/pipelines/augmentations.py | 15 ++-- mmaction/datasets/pipelines/pose_loading.py | 80 +++++++++++++++----- mmaction/datasets/pose_dataset.py | 19 +---- 3 files changed, 70 insertions(+), 44 deletions(-) diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 1a84423c7f..39bf260215 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -82,8 +82,8 @@ def __init__(self, self.padding = padding self.threshold = threshold - if isinstance(hw_ratio, float): - hw_ratio = (hw_ratio, hw_ratio) + if hw_ratio is not None: + hw_ratio = _pair(hw_ratio) self.hw_ratio = hw_ratio @@ -145,6 +145,13 @@ def __call__(self, results): results['crop_quadruple'] = crop_quadruple return results + def __repr__(self): + repr_str = (f'{self.__class__.__name__}(padding={self.padding}, ' + f'threshold={self.threshold}, ' + f'hw_ratio={self.hw_ratio}, ' + f'allow_imgpad={self.allow_imgpad})') + return repr_str + @PIPELINES.register_module() class Imgaug: @@ -1341,9 +1348,7 @@ def __call__(self, results): modality) if 'kp' in results: kp = results['kp'] - kpscore = None - if 'kpscore' in results: - kpscore = results['kpscore'] + kpscore = results.get('kpscore', None) kp, kpscore = self._flip_kps(kp, kpscore, img_width) results['kp'] = kp if 'kpscore' in results: diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index c0aca52282..76f96c2607 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -1,12 +1,12 @@ -import copy as cp -import pickle +import copy as cp # isort: skip +import pickle # isort: skip -import numpy as np -from mmcv.fileio import FileClient -from scipy.stats import mode +import numpy as np # isort: skip +from mmcv.fileio import FileClient # isort: skip +from scipy.stats import mode # isort: skip -from ..registry import PIPELINES -from .augmentations import Flip +from ..registry import PIPELINES # isort: skip +from .augmentations import Flip # isort: skip @PIPELINES.register_module() @@ -108,6 +108,14 @@ def __call__(self, results): results['num_clips'] = self.num_clips return results + def __repr__(self): + repr_str = (f'{self.__class__.__name__}(' + f'clip_len={self.clip_len}, ' + f'num_clips={self.num_clips}, ' + f'test_mode={self.test_mode}, ' + f'seed={self.seed})') + return repr_str + @PIPELINES.register_module() class PoseDecode(object): @@ -124,15 +132,15 @@ class PoseDecode(object): Default: 1. drop_prob (float): The probability for dropping one keypoint for each frame. Default: 1 / 16. - manipulate_joints (list[int]): The joint indexes that may be dropped. - Default: [7, 8, 9, 10, 13, 14, 15, 16]. (limb joints) + manipulate_joints (tuple[int]): The joint indexes that may be dropped. + Default: (7, 8, 9, 10, 13, 14, 15, 16). (limb joints) """ def __init__(self, random_drop=False, random_seed=1, drop_prob=1. / 16., - manipulate_joints=[7, 8, 9, 10, 13, 14, 15, 16]): + manipulate_joints=(7, 8, 9, 10, 13, 14, 15, 16)): self.random_drop = random_drop self.random_seed = random_seed self.drop_prob = drop_prob @@ -181,6 +189,14 @@ def __call__(self, results): return results + def __repr__(self): + repr_str = (f'{self.__class__.__name}(', + f'random_crop={self.random_crop}, ' + f'random_seed={self.random_seed}, ' + f'drop_prob={self.drop_prob}, ' + f'manipulate_joints={self.manipulate_joints})') + return repr_str + @PIPELINES.register_module() class LoadKineticsPose: @@ -319,6 +335,16 @@ def mapinds(inds): results['kpscore'] = new_kpscore[:self.max_person] return results + def __repr__(self): + repr_str = (f'{self.__class__.__name__}(' + f'io_backend={self.io_backend}, ' + f'squeeze={self.squeeze}, ' + f'max_person={self.max_person}, ' + f'keypoint_weight={self.keypoint_weight}, ' + f'source={self.source}, ' + f'kwargs={self.kwargs})') + return repr_str + @PIPELINES.register_module() class GeneratePoseTarget: @@ -334,18 +360,18 @@ class GeneratePoseTarget: with_kp (bool): Generate pseudo heatmaps for keypoints. Default: True. with_limb (bool): Generate pseudo heatmaps for limbs. At least one of 'with_kp' and 'with_limb' should be True. Default: False. - skeletons (list[tuple]): The definition of human skeletons. - Default: [(0, 1), (0, 2), (1, 3), (2, 4), (0, 5), (5, 7), (7, 9), + skeletons (tuple[tuple]): The definition of human skeletons. + Default: ((0, 1), (0, 2), (1, 3), (2, 4), (0, 5), (5, 7), (7, 9), (0, 6), (6, 8), (8, 10), (5, 11), (11, 13), (13, 15), - (6, 12), (12, 14), (14, 16), (11, 12)], + (6, 12), (12, 14), (14, 16), (11, 12)), which is the definition of COCO-17p skeletons. double (bool): Output both original heatmaps and flipped heatmaps. Default: False. - left (list[int]): Indexes of left keypoints, which is used when - flipping heatmaps. Default: [1, 3, 5, 7, 9, 11, 13, 15], + left (tuple[int]): Indexes of left keypoints, which is used when + flipping heatmaps. Default: (1, 3, 5, 7, 9, 11, 13, 15), which is left keypoints in COCO-17p. - right (list[int]): Indexes of right keypoints, which is used when - flipping heatmaps. Default: [2, 4, 6, 8, 10, 12, 14, 16], + right (tuple[int]): Indexes of right keypoints, which is used when + flipping heatmaps. Default: (2, 4, 6, 8, 10, 12, 14, 16), which is right keypoints in COCO-17p. """ @@ -354,12 +380,12 @@ def __init__(self, use_score=True, with_kp=True, with_limb=False, - skeletons=[(0, 1), (0, 2), (1, 3), (2, 4), (0, 5), (5, 7), + skeletons=((0, 1), (0, 2), (1, 3), (2, 4), (0, 5), (5, 7), (7, 9), (0, 6), (6, 8), (8, 10), (5, 11), (11, 13), - (13, 15), (6, 12), (12, 14), (14, 16), (11, 12)], + (13, 15), (6, 12), (12, 14), (14, 16), (11, 12)), double=False, - left=[1, 3, 5, 7, 9, 11, 13, 15], - right=[2, 4, 6, 8, 10, 12, 14, 16]): + left=(1, 3, 5, 7, 9, 11, 13, 15), + right=(2, 4, 6, 8, 10, 12, 14, 16)): self.sigma = sigma self.use_score = use_score @@ -591,3 +617,15 @@ def __call__(self, results): [self.gen_an_aug(results), self.gen_an_aug(results_)]) return results + + def __repr__(self): + repr_str = (f'{self.__class__.__name__}(' + f'sigma={self.sigma}, ' + f'use_score={self.use_score}, ' + f'with_kp={self.with_kp}, ' + f'with_limb={self.with_limb}, ' + f'skeletons={self.skeletons}, ' + f'double={self.double}, ' + f'left={self.left}, ' + f'right={self.right})') + return repr_str diff --git a/mmaction/datasets/pose_dataset.py b/mmaction/datasets/pose_dataset.py index a6958b697c..09f1c6bed9 100644 --- a/mmaction/datasets/pose_dataset.py +++ b/mmaction/datasets/pose_dataset.py @@ -1,4 +1,3 @@ -import copy import os.path as osp import mmcv @@ -95,28 +94,12 @@ def load_annotations(self): def load_pkl_annotations(self): data = mmcv.load(self.ann_file) - for i, item in enumerate(data): + for item in data: # Sometimes we may need to load anno from the file if 'filename' in item: item['filename'] = osp.join(self.data_prefix, item['filename']) return data - def prepare_train_frames(self, idx): - """Prepare the frames for training given the index.""" - results = copy.deepcopy(self.video_infos[idx]) - - results['modality'] = self.modality - results['start_index'] = self.start_index - return self.pipeline(results) - - def prepare_test_frames(self, idx): - """Prepare the frames for testing given the index.""" - results = copy.deepcopy(self.video_infos[idx]) - - results['modality'] = self.modality - results['start_index'] = self.start_index - return self.pipeline(results) - def evaluate(self, results, metrics='top_k_accuracy', From eb5d2e4a633a95f87d1302c6852559cb76605f8a Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 20 Apr 2021 13:29:33 +0800 Subject: [PATCH 044/414] remove skip --- mmaction/datasets/pipelines/pose_loading.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 76f96c2607..7b955992a7 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -1,12 +1,12 @@ -import copy as cp # isort: skip -import pickle # isort: skip +import copy as cp +import pickle -import numpy as np # isort: skip -from mmcv.fileio import FileClient # isort: skip -from scipy.stats import mode # isort: skip +import numpy as np +from mmcv.fileio import FileClient +from scipy.stats import mode -from ..registry import PIPELINES # isort: skip -from .augmentations import Flip # isort: skip +from ..registry import PIPELINES +from .augmentations import Flip @PIPELINES.register_module() From 74d8f12bf5a8d51ccb25b487222ef7aba915bbcf Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 20 Apr 2021 14:23:57 +0800 Subject: [PATCH 045/414] improve docstring coverage --- mmaction/datasets/pipelines/pose_loading.py | 35 +++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 7b955992a7..c01e334a9b 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -32,6 +32,13 @@ def __init__(self, clip_len, num_clips=1, test_mode=False, seed=255): self.seed = seed def _get_train_clips(self, num_frames, clip_len): + """Uniformly sample indices for training clips. + + Args: + num_frames (int): The number of frames. + clip_len (int): The length of the clip. + """ + assert self.num_clips == 1 if num_frames < clip_len: start = np.random.randint(0, num_frames) @@ -54,6 +61,13 @@ def _get_train_clips(self, num_frames, clip_len): return inds def _get_test_clips(self, num_frames, clip_len): + """Uniformly sample indices for testing clips. + + Args: + num_frames (int): The number of frames. + clip_len (int): The length of the clip. + """ + np.random.seed(self.seed) if num_frames < clip_len: # Then we use a simple strategy @@ -148,6 +162,13 @@ def __init__(self, # inplace def _drop_kpscore(self, kpscores): + """Randomly drop keypoints by setting the corresponding keypoint scores + as 0. + + Args: + kpscores (np.ndarray): The confidence scores of keypoints. + """ + for kpscore in kpscores: lt = kpscore.shape[0] for tidx in range(lt): @@ -156,9 +177,23 @@ def _drop_kpscore(self, kpscores): kpscore[tidx, jidx] = 0. def _load_kp(self, kp, frame_inds): + """Load keypoints given frame indices. + + Args: + kp (np.ndarray): The keypoint coordinates. + frame_inds (np.ndarray): The frame indices. + """ + return [x[frame_inds].astype(np.float32) for x in kp] def _load_kpscore(self, kpscore, frame_inds): + """Load keypoint scores given frame indices. + + Args: + kpscore (np.ndarray): The confidence scores of keypoints. + frame_inds (np.ndarray): The frame indices. + """ + return [x[frame_inds].astype(np.float32) for x in kpscore] def __call__(self, results): From 1532c803c8c17246fbd46db744295e345ceb3902 Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 20 Apr 2021 14:27:03 +0800 Subject: [PATCH 046/414] add interface to set class_prob explicitly. --- mmaction/datasets/pose_dataset.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/mmaction/datasets/pose_dataset.py b/mmaction/datasets/pose_dataset.py index 09f1c6bed9..7a7c8c61e3 100644 --- a/mmaction/datasets/pose_dataset.py +++ b/mmaction/datasets/pose_dataset.py @@ -37,6 +37,9 @@ class PoseDataset(BaseDataset): with confidence score larger than `box_thre` is kept. None means not applicable (only applicable to Kinetics Pose [ours]). Allowed choices are '0.5', '0.6', '0.7', '0.8', '0.9'. Default: None. + class_prob (dict | None): The per class sampling probability. If not + None, it will override the class_prob calculated in + BaseDataset.__init__(). Default: None. **kwargs: Keyword arguments for ``BaseDataset``. """ @@ -46,6 +49,7 @@ def __init__(self, resample=None, valid_ratio=None, box_thre=None, + class_prob=None, **kwargs): # For NTU-120 X-Sub self.resample = resample @@ -83,6 +87,9 @@ def __init__(self, ] item['anno_inds'] = np.array(inds) + if class_prob is not None: + self.class_prob = class_prob + logger = get_root_logger() logger.info(f'{len(self)} videos remain after valid thresholding') From 562084d4b24dd74c68ac2bf01914389ddd8db265 Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Tue, 20 Apr 2021 15:11:48 +0800 Subject: [PATCH 047/414] [Docs] add notes for download kinetics (#822) * add notes for download kinetics * refine * refine --- tools/data/kinetics/README.md | 2 +- tools/data/kinetics/README_zh-CN.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/data/kinetics/README.md b/tools/data/kinetics/README.md index bb3ab2dcb9..141998fda2 100644 --- a/tools/data/kinetics/README.md +++ b/tools/data/kinetics/README.md @@ -63,7 +63,7 @@ For better decoding speed, you can resize the original videos into smaller sized python ../resize_videos.py ../../../data/${DATASET}/videos_train/ ../../../data/${DATASET}/videos_train_256p_dense_cache --dense --level 2 ``` -You can also download from [Academic Torrents](https://academictorrents.com/)([kinetics400](https://academictorrents.com/details/184d11318372f70018cf9a72ef867e2fb9ce1d26) & [kinetics700](https://academictorrents.com/details/49f203189fb69ae96fb40a6d0e129949e1dfec98) are avaiable) and [cvdfoundation/kinetics-dataset](https://github.com/cvdfoundation/kinetics-dataset)(Host by Common Visual Data Foundation and Kinetics400/Kinetics600/Kinetics-700-2020 are available) +You can also download from [Academic Torrents](https://academictorrents.com/) ([kinetics400](https://academictorrents.com/details/184d11318372f70018cf9a72ef867e2fb9ce1d26) & [kinetics700](https://academictorrents.com/details/49f203189fb69ae96fb40a6d0e129949e1dfec98) with short edge 256 pixels are avaiable) and [cvdfoundation/kinetics-dataset](https://github.com/cvdfoundation/kinetics-dataset) (Host by Common Visual Data Foundation and Kinetics400/Kinetics600/Kinetics-700-2020 are available) ## Step 3. Extract RGB and Flow diff --git a/tools/data/kinetics/README_zh-CN.md b/tools/data/kinetics/README_zh-CN.md index bb11c62667..af10444469 100644 --- a/tools/data/kinetics/README_zh-CN.md +++ b/tools/data/kinetics/README_zh-CN.md @@ -60,7 +60,7 @@ bash rename_classnames.sh ${DATASET} python ../resize_videos.py ../../../data/${DATASET}/videos_train/ ../../../data/${DATASET}/videos_train_256p_dense_cache --dense --level 2 ``` -也可以从 [Academic Torrents](https://academictorrents.com/) 中下载 [kinetics400](https://academictorrents.com/details/184d11318372f70018cf9a72ef867e2fb9ce1d26) 和 [kinetics700](https://academictorrents.com/details/49f203189fb69ae96fb40a6d0e129949e1dfec98),从 Common Visual Data Foundation 维护的 [cvdfoundation/kinetics-dataset](https://github.com/cvdfoundation/kinetics-dataset) 中下载 Kinetics400/Kinetics600/Kinetics-700-2020。 +也可以从 [Academic Torrents](https://academictorrents.com/) 中下载短边长度为 256 的 [kinetics400](https://academictorrents.com/details/184d11318372f70018cf9a72ef867e2fb9ce1d26) 和 [kinetics700](https://academictorrents.com/details/49f203189fb69ae96fb40a6d0e129949e1dfec98),或从 Common Visual Data Foundation 维护的 [cvdfoundation/kinetics-dataset](https://github.com/cvdfoundation/kinetics-dataset) 中下载 Kinetics400/Kinetics600/Kinetics-700-2020。 ## 3. 提取 RGB 帧和光流 From 6b66a0d93374d5d083938de66486d7ace8b393bc Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Tue, 20 Apr 2021 16:03:41 +0800 Subject: [PATCH 048/414] [Fix] Patch of #820 (#824) --- tools/train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/train.py b/tools/train.py index 08a768770f..20d075e7ec 100644 --- a/tools/train.py +++ b/tools/train.py @@ -179,7 +179,7 @@ def main(): # checkpoints as meta data cfg.checkpoint_config.meta = dict( mmaction_version=__version__ + get_git_hash(digits=7), - config=cfg.text) + config=cfg.pretty_text) test_option = dict(test_last=args.test_last, test_best=args.test_best) train_model( From 8fb39c3af0d3f9ce2d1d1a2acf3703e4fbbde8d7 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Wed, 21 Apr 2021 07:14:37 +0800 Subject: [PATCH 049/414] [Feature] Support Webcam Demo for Spatio-temporal Action Detection Models (#795) * first commit * fix bugs * add logging * add changelog * add writing to local file * fix sampeling strategy bug * update annotations, remove global variables * update docs * decouple display frame shape and model frame shape * fix known issue * fix display default shape and fix visualize tools * fix predict_stepsize bug * add cn docs * update * fix * update color plate docs * fix a bug when drawing * add alphaction visualization tool * add visualization tool docs * fix a few bugs * typo * remove alphaction visualization tool due to licence issue * typo Co-authored-by: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> --- demo/README.md | 84 ++- demo/webcam_demo_spatiotemporal_det.py | 841 +++++++++++++++++++++++++ docs/changelog.md | 2 + docs_zh_CN/demo.md | 83 ++- 4 files changed, 1006 insertions(+), 4 deletions(-) create mode 100644 demo/webcam_demo_spatiotemporal_det.py diff --git a/demo/README.md b/demo/README.md index 6c96453c57..ea0334618b 100644 --- a/demo/README.md +++ b/demo/README.md @@ -5,8 +5,9 @@ - [Video demo](#video-demo): A demo script to predict the recognition result using a single video. - [SpatioTemporal Action Detection Video Demo](#spatiotemporal-action-detection-video-demo): A demo script to predict the SpatioTemporal Action Detection result using a single video. - [Video GradCAM Demo](#video-gradcam-demo): A demo script to visualize GradCAM results using a single video. -- [Webcam demo](#webcam-demo): A demo script to implement real-time action recognition from web camera. +- [Webcam demo](#webcam-demo): A demo script to implement real-time action recognition from a web camera. - [Long Video demo](#long-video-demo): a demo script to predict different labels using a single long video. +- [SpatioTempoval Action Detection Webcam Demo](#spatiotemporal-action-detection-webcam-demo): A demo script to implement real-time spatio-temporval action detection from a web camera. ## Video demo @@ -143,7 +144,7 @@ Optional arguments: - `HUMAN_DETECTION_CHECKPOINT`: The human detection checkpoint URL. - `HUMAN_DETECTION_SCORE_THRE`: The score threshold for human detection. Default: 0.9. - `ACTION_DETECTION_SCORE_THRESHOLD`: The score threshold for action detection. Default: 0.5. -- `LABEL_MAP`: The label map used. Default: `demo/label_map_ava.txt` +- `LABEL_MAP`: The label map used. Default: `demo/label_map_ava.txt`. - `DEVICE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. Default: `cuda:0`. - `OUTPUT_FILENAME`: Path to the output file which is a video format. Default: `demo/stdet_demo.mp4`. - `PREDICT_STEPSIZE`: Make a prediction per N frames. Default: 8. @@ -326,3 +327,82 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO \ ``` + +## SpatioTemporal Action Detection Webcam Demo + +We provide a demo script to implement real-time spatio-temporal action detection from a web camera. + +```shell +python demo/webcam_demo_spatiotemporal_det.py \ + [--config ${SPATIOTEMPORAL_ACTION_DETECTION_CONFIG_FILE}] \ + [--checkpoint ${SPATIOTEMPORAL_ACTION_DETECTION_CHECKPOINT}] \ + [--action-score-thr ${ACTION_DETECTION_SCORE_THRESHOLD}] \ + [--det-config ${HUMAN_DETECTION_CONFIG_FILE}] \ + [--det-checkpoint ${HUMAN_DETECTION_CHECKPOINT}] \ + [--det-score-thr ${HUMAN_DETECTION_SCORE_THRESHOLD}] \ + [--input-video] ${INPUT_VIDEO} \ + [--label-map ${LABEL_MAP}] \ + [--device ${DEVICE}] \ + [--output-fps ${OUTPUT_FPS}] \ + [--out-filename ${OUTPUT_FILENAME}] \ + [--show] \ + [--display-height] ${DISPLAY_HEIGHT} \ + [--display-width] ${DISPLAY_WIDTH} \ + [--predict-stepsize ${PREDICT_STEPSIZE}] \ + [--clip-vis-length] ${CLIP_VIS_LENGTH} +``` + +Optional arguments: + +- `SPATIOTEMPORAL_ACTION_DETECTION_CONFIG_FILE`: The spatiotemporal action detection config file path. +- `SPATIOTEMPORAL_ACTION_DETECTION_CHECKPOINT`: The spatiotemporal action detection checkpoint path or URL. +- `ACTION_DETECTION_SCORE_THRESHOLD`: The score threshold for action detection. Default: 0.4. +- `HUMAN_DETECTION_CONFIG_FILE`: The human detection config file path. +- `HUMAN_DETECTION_CHECKPOINT`: The human detection checkpoint URL. +- `HUMAN_DETECTION_SCORE_THRE`: The score threshold for human detection. Default: 0.9. +- `INPUT_VIDEO`: The webcam id or video path of the source. Default: `0`. +- `LABEL_MAP`: The label map used. Default: `demo/label_map_ava.txt`. +- `DEVICE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. Default: `cuda:0`. +- `OUTPUT_FPS`: The FPS of demo video output. Default: 15. +- `OUTPUT_FILENAME`: Path to the output file which is a video format. Default: None. +- `--show`: Whether to show predictions with `cv2.imshow`. +- `DISPLAY_HEIGHT`: The height of the display frame. Default: 0. +- `DISPLAY_WIDTH`: The width of the display frame. Default: 0. If `DISPLAY_HEIGHT <= 0 and DISPLAY_WIDTH <= 0`, the display frame and input video share the same shape. +- `PREDICT_STEPSIZE`: Make a prediction per N frames. Default: 8. +- `CLIP_VIS_LENGTH`: The number of the draw frames for each clip. In other words, for each clip, there are at most `CLIP_VIS_LENGTH` frames to be draw around the keyframe. DEFAULT: 8. + +Tips to get a better experience for webcam demo: + +- How to choose `--output-fps`? + + - `--output-fps` should be almost equal to read thread fps. + - Read thread fps is printed by logger in format `DEBUG:__main__:Read Thread: {duration} ms, {fps} fps` + +- How to choose `--predict-stepsize`? + + - It's related to how to choose human detector and spatio-temporval model. + - Overall, the duration of read thread for each task should be greater equal to that of model inference. + - The durations for read/inference are both printed by logger. + - Larger `--predict-stepsize` leads to larger duration for read thread. + - In order to fully take the advantage of computation resources, decrease the value of `--predict-stepsize`. + +Examples: + +Assume that you are located at `$MMACTION2` . + +1. Use the Faster RCNN as the human detector, SlowOnly-8x8-R101 as the action detector. Making predictions per 40 frames, and FPS of the output is 20. Show predictions with `cv2.imshow`. + +```shell +python demo/webcam_demo_spatiotemporal_det.py \ + --input-video 0 \ + --config configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py \ + --checkpoint https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth \ + --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ + --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ + --det-score-thr 0.9 \ + --action-score-thr 0.5 \ + --label-map demo/label_map_ava.txt \ + --predict-stepsize 40 \ + --output-fps 20 \ + --show +``` diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py new file mode 100644 index 0000000000..fddb2d5d8b --- /dev/null +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -0,0 +1,841 @@ +"""Webcam Spatio-Temporal Action Detection Demo. + +Some codes are based on https://github.com/facebookresearch/SlowFast +""" + +import argparse +import atexit +import copy +import logging +import queue +import threading +import time +from abc import ABCMeta, abstractmethod + +import cv2 +import mmcv +import numpy as np +import torch +from mmcv.runner import load_checkpoint + +from mmaction.models import build_detector +from mmaction.utils import import_module_error_func + +try: + from mmdet.apis import inference_detector, init_detector +except (ImportError, ModuleNotFoundError): + + @import_module_error_func('mmdet') + def inference_detector(*args, **kwargs): + pass + + @import_module_error_func('mmdet') + def init_detector(*args, **kwargs): + pass + + +logging.basicConfig(level=logging.DEBUG) +logger = logging.getLogger(__name__) + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMAction2 webcam spatio-temporal detection demo') + + parser.add_argument( + '--config', + default=('configs/detection/ava/' + 'slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py'), + help='spatio temporal detection config file path') + parser.add_argument( + '--checkpoint', + default=('https://download.openmmlab.com/mmaction/detection/ava/' + 'slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/' + 'slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb' + '_20201217-16378594.pth'), + help='spatio temporal detection checkpoint file/url') + parser.add_argument( + '--action-score-thr', + type=float, + default=0.4, + help='the threshold of human action score') + parser.add_argument( + '--det-config', + default='demo/faster_rcnn_r50_fpn_2x_coco.py', + help='human detection config file path (from mmdet)') + parser.add_argument( + '--det-checkpoint', + default=('http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/' + 'faster_rcnn_r50_fpn_2x_coco/' + 'faster_rcnn_r50_fpn_2x_coco_' + 'bbox_mAP-0.384_20200504_210434-a5d8aa15.pth'), + help='human detection checkpoint file/url') + parser.add_argument( + '--det-score-thr', + type=float, + default=0.9, + help='the threshold of human detection score') + parser.add_argument( + '--input-video', + default='0', + type=str, + help='webcam id or input video file/url') + parser.add_argument( + '--label-map', default='demo/label_map_ava.txt', help='label map file') + parser.add_argument( + '--device', type=str, default='cuda:0', help='CPU/CUDA device option') + parser.add_argument( + '--output-fps', + default=15, + type=int, + help='the fps of demo video output') + parser.add_argument( + '--out-filename', + default=None, + type=str, + help='the filename of output video') + parser.add_argument( + '--show', + action='store_true', + help='Whether to show results with cv2.imshow') + parser.add_argument( + '--display-height', + type=int, + default=0, + help='Image height for human detector and draw frames.') + parser.add_argument( + '--display-width', + type=int, + default=0, + help='Image width for human detector and draw frames.') + parser.add_argument( + '--predict-stepsize', + default=8, + type=int, + help='give out a prediction per n frames') + parser.add_argument( + '--clip-vis-length', + default=8, + type=int, + help='Number of draw frames per clip.') + + args = parser.parse_args() + return args + + +class TaskInfo: + """Wapper for a clip. + + Transmit data around three threads. + + 1) Read Thread: Create task and put task into read queue. Init `frames`, + `processed_frames`, `img_shape`, `ratio`, `clip_vis_length`. + 2) Main Thread: Get data from read queue, predict human bboxes and stdet + action labels, draw predictions and put task into display queue. Init + `display_bboxes`, `stdet_bboxes` and `action_preds`, update `frames`. + 3) Display Thread: Get data from display queue, show/write frames and + delete task. + """ + + def __init__(self): + self.id = -1 + + # raw frames, used as human detector input, draw predictions input + # and output, display input + self.frames = None + + # stdet params + self.processed_frames = None # model inputs + self.frames_inds = None # select frames from processed frames + self.img_shape = None # model inputs, processed frame shape + # `action_preds` is `list[list[tuple]]`. The outter brackets indicate + # different bboxes and the intter brackets indicate different action + # results for the same bbox. tuple contains `class_name` and `score`. + self.action_preds = None # stdet results + + # human bboxes with the format (xmin, ymin, xmax, ymax) + self.display_bboxes = None # bboxes coords for self.frames + self.stdet_bboxes = None # bboxes coords for self.processed_frames + self.ratio = None # processed_frames.shape[1::-1]/frames.shape[1::-1] + + # for each clip, draw predictions on clip_vis_length frames + self.clip_vis_length = -1 + + def add_frames(self, idx, frames, processed_frames): + """Add the clip and corresponding id. + + Args: + idx (int): the current index of the clip. + frames (list[ndarray]): list of images in "BGR" format. + processed_frames (list[ndarray]): list of resize and normed images + in "BGR" format. + """ + self.frames = frames + self.processed_frames = processed_frames + self.id = idx + self.img_shape = processed_frames[0].shape[:2] + + def add_bboxes(self, display_bboxes): + """Add correspondding bounding boxes.""" + self.display_bboxes = display_bboxes + self.stdet_bboxes = display_bboxes.clone() + self.stdet_bboxes[:, ::2] = self.stdet_bboxes[:, ::2] * self.ratio[0] + self.stdet_bboxes[:, 1::2] = self.stdet_bboxes[:, 1::2] * self.ratio[1] + + def add_action_preds(self, preds): + """Add the corresponding action predictions.""" + self.action_preds = preds + + def get_model_inputs(self, device): + """Convert preprocessed images to MMAction2 STDet model inputs.""" + cur_frames = [self.processed_frames[idx] for idx in self.frames_inds] + input_array = np.stack(cur_frames).transpose((3, 0, 1, 2))[np.newaxis] + input_tensor = torch.from_numpy(input_array).to(device) + return dict( + return_loss=False, + img=[input_tensor], + proposals=[[self.stdet_bboxes]], + img_metas=[[dict(img_shape=self.img_shape)]]) + + +class BaseHumanDetector(metaclass=ABCMeta): + """Base class for Human Dector. + + Args: + device (str): CPU/CUDA device option. + """ + + def __init__(self, device): + self.device = torch.device(device) + + @abstractmethod + def _do_detect(self, image): + """Get human bboxes with shape [n, 4]. + + The format of bboxes is (xmin, ymin, xmax, ymax) in pixels. + """ + pass + + def predict(self, task): + """Add keyframe bboxes to task.""" + # keyframe idx == (clip_len * frame_interval) // 2 + keyframe = task.frames[len(task.frames) // 2] + + # call detector + bboxes = self._do_detect(keyframe) + + # convert bboxes to torch.Tensor and move to target device + if isinstance(bboxes, np.ndarray): + bboxes = torch.from_numpy(bboxes).to(self.device) + elif isinstance(bboxes, torch.Tensor) and bboxes.device != self.device: + bboxes = bboxes.to(self.device) + + # update task + task.add_bboxes(bboxes) + + return task + + +class MmdetHumanDetector(BaseHumanDetector): + """Wrapper for mmdetection human detector. + + Args: + config (str): Path to mmdetection config. + ckpt (str): Path to mmdetection checkpoint. + device (str): CPU/CUDA device option. + score_thr (float): The threshold of human detection score. + person_classid (int): Choose class from detection results. + Default: 0. Suitable for COCO pretrained models. + """ + + def __init__(self, config, ckpt, device, score_thr, person_classid=0): + super().__init__(device) + self.model = init_detector(config, ckpt, device) + self.person_classid = person_classid + self.score_thr = score_thr + + def _do_detect(self, image): + """Get bboxes in shape [n, 4] and values in pixels.""" + result = inference_detector(self.model, image)[self.person_classid] + result = result[result[:, 4] >= self.score_thr][:, :4] + return result + + +class StdetPredictor: + """Wrapper for MMAction2 spatio-temporal action models. + + Args: + config (str): Path to stdet config. + ckpt (str): Path to stdet checkpoint. + device (str): CPU/CUDA device option. + score_thr (float): The threshold of human action score. + label_map_path (str): Path to label map file. The format for each line + is `{class_id}: {class_name}`. + """ + + def __init__(self, config, checkpoint, device, score_thr, label_map_path): + self.score_thr = score_thr + + # load model + config.model.backbone.pretrained = None + model = build_detector(config.model, test_cfg=config.get('test_cfg')) + load_checkpoint(model, checkpoint, map_location=device) + model.to(device) + model.eval() + self.model = model + self.device = device + + # init label map, aka class_id to class_name dict + with open(label_map_path) as f: + lines = f.readlines() + lines = [x.strip().split(': ') for x in lines] + self.label_map = {int(x[0]): x[1] for x in lines} + + def predict(self, task): + """Spatio-temporval Action Detection model inference.""" + # No need to do inference if no one in keyframe + if len(task.stdet_bboxes) == 0: + return task + + with torch.no_grad(): + result = self.model(**task.get_model_inputs(self.device))[0] + + # pack results of human detector and stdet + preds = [] + for _ in range(task.stdet_bboxes.shape[0]): + preds.append([]) + for class_id in range(len(result)): + if class_id + 1 not in self.label_map: + continue + for bbox_id in range(task.stdet_bboxes.shape[0]): + if result[class_id][bbox_id, 4] > self.score_thr: + preds[bbox_id].append((self.label_map[class_id + 1], + result[class_id][bbox_id, 4])) + + # update task + # `preds` is `list[list[tuple]]`. The outter brackets indicate + # different bboxes and the intter brackets indicate different action + # results for the same bbox. tuple contains `class_name` and `score`. + task.add_action_preds(preds) + + return task + + +class ClipHelper: + """Multithrading utils to manage the lifecycle of task.""" + + def __init__(self, + config, + display_height=0, + display_width=0, + input_video=0, + predict_stepsize=40, + output_fps=25, + clip_vis_length=8, + out_filename=None, + show=True, + stdet_input_shortside=256): + # stdet sampling strategy + val_pipeline = config['val_pipeline'] + sampler = [x for x in val_pipeline + if x['type'] == 'SampleAVAFrames'][0] + clip_len, frame_interval = sampler['clip_len'], sampler[ + 'frame_interval'] + self.window_size = clip_len * frame_interval + + # asserts + assert (out_filename or show), \ + 'out_filename and show cannot both be None' + assert clip_len % 2 == 0, 'We would like to have an even clip_len' + assert clip_vis_length <= predict_stepsize + assert 0 < predict_stepsize <= self.window_size + + # source params + try: + self.cap = cv2.VideoCapture(int(input_video)) + self.webcam = True + except ValueError: + self.cap = cv2.VideoCapture(input_video) + self.webcam = False + assert self.cap.isOpened() + + # stdet input preprocessing params + h = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) + w = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH)) + self.stdet_input_size = mmcv.rescale_size( + (w, h), (stdet_input_shortside, np.Inf)) + img_norm_cfg = config['img_norm_cfg'] + if 'to_rgb' not in img_norm_cfg and 'to_bgr' in img_norm_cfg: + to_bgr = img_norm_cfg.pop('to_bgr') + img_norm_cfg['to_rgb'] = to_bgr + img_norm_cfg['mean'] = np.array(img_norm_cfg['mean']) + img_norm_cfg['std'] = np.array(img_norm_cfg['std']) + self.img_norm_cfg = img_norm_cfg + + # task init params + self.clip_vis_length = clip_vis_length + self.predict_stepsize = predict_stepsize + self.buffer_size = self.window_size - self.predict_stepsize + frame_start = self.window_size // 2 - (clip_len // 2) * frame_interval + self.frames_inds = [ + frame_start + frame_interval * i for i in range(clip_len) + ] + self.buffer = [] + self.processed_buffer = [] + + # output/display params + if display_height > 0 and display_width > 0: + self.display_size = (display_width, display_height) + elif display_height > 0 or display_width > 0: + self.display_size = mmcv.rescale_size( + (w, h), (np.Inf, max(display_height, display_width))) + else: + self.display_size = (w, h) + self.ratio = tuple( + n / o for n, o in zip(self.stdet_input_size, self.display_size)) + if output_fps <= 0: + self.output_fps = int(self.cap.get(cv2.CAP_PROP_FPS)) + else: + self.output_fps = output_fps + self.show = show + self.video_writer = None + if out_filename is not None: + self.video_writer = self.get_output_video_writer(out_filename) + display_start_idx = self.window_size // 2 - self.predict_stepsize // 2 + self.display_inds = [ + display_start_idx + i for i in range(self.predict_stepsize) + ] + + # display multi-theading params + self.display_id = -1 # task.id for display queue + self.display_queue = {} + self.display_lock = threading.Lock() + self.output_lock = threading.Lock() + + # read multi-theading params + self.read_id = -1 # task.id for read queue + self.read_id_lock = threading.Lock() + self.read_queue = queue.Queue() + self.read_lock = threading.Lock() + self.not_end = True # cap.read() flag + + # program state + self.stopped = False + + atexit.register(self.clean) + + def read_fn(self): + """Main function for read thread. + + Contains three steps: + + 1) Read and preprocess (resize + norm) frames from source. + 2) Create task by frames from previous step and buffer. + 3) Put task into read queue. + """ + was_read = True + start_time = time.time() + while was_read and not self.stopped: + # init task + task = TaskInfo() + task.clip_vis_length = self.clip_vis_length + task.frames_inds = self.frames_inds + task.ratio = self.ratio + + # read buffer + frames = [] + processed_frames = [] + if len(self.buffer) != 0: + frames = self.buffer + if len(self.processed_buffer) != 0: + processed_frames = self.processed_buffer + + # read and preprocess frames from source and update task + with self.read_lock: + before_read = time.time() + read_frame_cnt = self.window_size - len(frames) + while was_read and len(frames) < self.window_size: + was_read, frame = self.cap.read() + if not self.webcam: + # Reading frames too fast may lead to unexpected + # performance degradation. If you have enough + # resource, this line could be commented. + time.sleep(1 / self.output_fps) + if was_read: + frames.append(mmcv.imresize(frame, self.display_size)) + processed_frame = mmcv.imresize( + frame, self.stdet_input_size).astype(np.float32) + _ = mmcv.imnormalize_(processed_frame, + **self.img_norm_cfg) + processed_frames.append(processed_frame) + task.add_frames(self.read_id + 1, frames, processed_frames) + + # update buffer + if was_read: + self.buffer = frames[-self.buffer_size:] + self.processed_buffer = processed_frames[-self.buffer_size:] + + # update read state + with self.read_id_lock: + self.read_id += 1 + self.not_end = was_read + + self.read_queue.put((was_read, copy.deepcopy(task))) + cur_time = time.time() + logger.debug( + f'Read thread: {1000*(cur_time - start_time):.0f} ms, ' + f'{read_frame_cnt / (cur_time - before_read):.0f} fps') + start_time = cur_time + + def display_fn(self): + """Main function for display thread. + + Read data from display queue and display predictions. + """ + start_time = time.time() + while not self.stopped: + # get the state of the read thread + with self.read_id_lock: + read_id = self.read_id + not_end = self.not_end + + with self.display_lock: + # If video ended and we have display all frames. + if not not_end and self.display_id == read_id: + break + + # If the next task are not available, wait. + if (len(self.display_queue) == 0 or + self.display_queue.get(self.display_id + 1) is None): + time.sleep(0.02) + continue + + # get display data and update state + self.display_id += 1 + was_read, task = self.display_queue[self.display_id] + del self.display_queue[self.display_id] + display_id = self.display_id + + # do display predictions + with self.output_lock: + if was_read and task.id == 0: + # the first task + cur_display_inds = range(self.display_inds[-1] + 1) + elif not was_read: + # the last task + cur_display_inds = range(self.display_inds[0], + len(task.frames)) + else: + cur_display_inds = self.display_inds + + for frame_id in cur_display_inds: + frame = task.frames[frame_id] + if self.show: + cv2.imshow('Demo', frame) + cv2.waitKey(int(1000 / self.output_fps)) + if self.video_writer: + self.video_writer.write(frame) + + cur_time = time.time() + logger.debug( + f'Display thread: {1000*(cur_time - start_time):.0f} ms, ' + f'read id {read_id}, display id {display_id}') + start_time = cur_time + + def __iter__(self): + return self + + def __next__(self): + """Get data from read queue. + + This function is part of the main thread. + """ + if self.read_queue.qsize() == 0: + time.sleep(0.02) + return not self.stopped, None + else: + was_read, task = self.read_queue.get() + if not was_read: + # If we reach the end of the video, there aren't enough frames + # in the task.processed_frames, so no need to model inference + # and draw predictions. Put task into display queue. + with self.read_id_lock: + read_id = self.read_id + with self.display_lock: + self.display_queue[read_id] = was_read, copy.deepcopy(task) + + # main thread doesn't need to handle this task again + task = None + return was_read, task + + def start(self): + """Start read thread and display thread.""" + self.read_thread = threading.Thread( + target=self.read_fn, args=(), name='VidRead-Thread', daemon=True) + self.read_thread.start() + self.display_thread = threading.Thread( + target=self.display_fn, + args=(), + name='VidDisplay-Thread', + daemon=True) + self.display_thread.start() + + return self + + def clean(self): + """Close all threads and release all resources.""" + self.stopped = True + self.read_lock.acquire() + self.cap.release() + self.read_lock.release() + self.output_lock.acquire() + cv2.destroyAllWindows() + if self.video_writer: + self.video_writer.release() + self.output_lock.release() + + def join(self): + """Waiting for the finalization of read and display thread.""" + self.read_thread.join() + self.display_thread.join() + + def display(self, task): + """Add the visualized task to the display queue. + + Args: + task (TaskInfo object): task object that contain the necessary + information for prediction visualization. + """ + with self.display_lock: + self.display_queue[task.id] = (True, task) + + def get_output_video_writer(self, path): + """Return a video writer object. + + Args: + path (str): path to the output video file. + """ + return cv2.VideoWriter( + filename=path, + fourcc=cv2.VideoWriter_fourcc(*'mp4v'), + fps=float(self.output_fps), + frameSize=self.display_size, + isColor=True) + + +class BaseVisualizer(metaclass=ABCMeta): + """Base class for visualization tools.""" + + def __init__(self, max_labels_per_bbox): + self.max_labels_per_bbox = max_labels_per_bbox + + def draw_predictions(self, task): + """Visualize stdet predictions on raw frames.""" + # read bboxes from task + bboxes = task.display_bboxes.cpu().numpy() + + # draw predictions and update task + keyframe_idx = len(task.frames) // 2 + draw_range = [ + keyframe_idx - task.clip_vis_length // 2, + keyframe_idx + (task.clip_vis_length - 1) // 2 + ] + assert draw_range[0] >= 0 and draw_range[1] < len(task.frames) + task.frames = self.draw_clip_range(task.frames, task.action_preds, + bboxes, draw_range) + + return task + + def draw_clip_range(self, frames, preds, bboxes, draw_range): + """Draw a range of frames with the same bboxes and predictions.""" + # no predictions to be draw + if bboxes is None or len(bboxes) == 0: + return frames + + # draw frames in `draw_range` + left_frames = frames[:draw_range[0]] + right_frames = frames[draw_range[1] + 1:] + draw_frames = frames[draw_range[0]:draw_range[1] + 1] + + # get labels(texts) and draw predictions + draw_frames = [ + self.draw_one_image(frame, bboxes, preds) for frame in draw_frames + ] + + return list(left_frames) + draw_frames + list(right_frames) + + @abstractmethod + def draw_one_image(self, frame, bboxes, preds): + """Draw bboxes and corresponding texts on one frame.""" + pass + + def abbrev(self, name): + """Get the abbreviation of label name: + + 'take (an object) from (a person)' -> 'take ... from ...' + """ + while name.find('(') != -1: + st, ed = name.find('('), name.find(')') + name = name[:st] + '...' + name[ed + 1:] + return name + + +class DefaultVisualizer(BaseVisualizer): + """Tools to visualize predictions. + + Args: + max_labels_per_bbox (int): Max number of labels to visualize for a + person box. Default: 5. + plate (str): The color plate used for visualization. Two recommended + plates are blue plate `03045e-023e8a-0077b6-0096c7-00b4d8-48cae4` + and green plate `004b23-006400-007200-008000-38b000-70e000`. These + plates are generated by https://coolors.co/. + Default: '03045e-023e8a-0077b6-0096c7-00b4d8-48cae4'. + text_fontface (int): Fontface from OpenCV for texts. + Default: cv2.FONT_HERSHEY_DUPLEX. + text_fontscale (float): Fontscale from OpenCV for texts. + Default: 0.5. + text_fontcolor (tuple): fontface from OpenCV for texts. + Default: (255, 255, 255). + text_thickness (int): Thickness from OpenCV for texts. + Default: 1. + text_linetype (int): LInetype from OpenCV for texts. + Default: 1. + """ + + def __init__( + self, + max_labels_per_bbox=5, + plate='03045e-023e8a-0077b6-0096c7-00b4d8-48cae4', + text_fontface=cv2.FONT_HERSHEY_DUPLEX, + text_fontscale=0.5, + text_fontcolor=(255, 255, 255), # white + text_thickness=1, + text_linetype=1): + super().__init__(max_labels_per_bbox=max_labels_per_bbox) + self.text_fontface = text_fontface + self.text_fontscale = text_fontscale + self.text_fontcolor = text_fontcolor + self.text_thickness = text_thickness + self.text_linetype = text_linetype + + def hex2color(h): + """Convert the 6-digit hex string to tuple of 3 int value (RGB)""" + return (int(h[:2], 16), int(h[2:4], 16), int(h[4:], 16)) + + plate = plate.split('-') + self.plate = [hex2color(h) for h in plate] + + def draw_one_image(self, frame, bboxes, preds): + """Draw predictions on one image.""" + for bbox, pred in zip(bboxes, preds): + # draw bbox + box = bbox.astype(np.int64) + st, ed = tuple(box[:2]), tuple(box[2:]) + cv2.rectangle(frame, st, ed, (0, 0, 255), 2) + + # draw texts + for k, (label, score) in enumerate(pred): + if k >= self.max_labels_per_bbox: + break + text = f'{self.abbrev(label)}: {score:.4f}' + location = (0 + st[0], 18 + k * 18 + st[1]) + textsize = cv2.getTextSize(text, self.text_fontface, + self.text_fontscale, + self.text_thickness)[0] + textwidth = textsize[0] + diag0 = (location[0] + textwidth, location[1] - 14) + diag1 = (location[0], location[1] + 2) + cv2.rectangle(frame, diag0, diag1, self.plate[k + 1], -1) + cv2.putText(frame, text, location, self.text_fontface, + self.text_fontscale, self.text_fontcolor, + self.text_thickness, self.text_linetype) + + return frame + + +def main(args): + # init human detector + human_detector = MmdetHumanDetector(args.det_config, args.det_checkpoint, + args.device, args.det_score_thr) + + # init action detector + config = mmcv.Config.fromfile(args.config) + try: + # In our spatiotemporal detection demo, different actions should have + # the same number of bboxes. + config['model']['test_cfg']['rcnn']['action_thr'] = .0 + except KeyError: + pass + stdet_predictor = StdetPredictor( + config=config, + checkpoint=args.checkpoint, + device=args.device, + score_thr=args.action_score_thr, + label_map_path=args.label_map) + + # init clip helper + clip_helper = ClipHelper( + config=config, + display_height=args.display_height, + display_width=args.display_width, + input_video=args.input_video, + predict_stepsize=args.predict_stepsize, + output_fps=args.output_fps, + clip_vis_length=args.clip_vis_length, + out_filename=args.out_filename, + show=args.show) + + # init visualizer + vis = DefaultVisualizer() + + # start read and display thread + clip_helper.start() + + try: + # Main thread main function contains: + # 1) get data from read queue + # 2) get human bboxes and stdet predictions + # 3) draw stdet predictions and update task + # 4) put task into display queue + for able_to_read, task in clip_helper: + # get data from read queue + + if not able_to_read: + # read thread is dead and all tasks are processed + break + + if task is None: + # when no data in read queue, wait + time.sleep(0.01) + continue + + inference_start = time.time() + + # get human bboxes + human_detector.predict(task) + + # get stdet predictions + stdet_predictor.predict(task) + + # draw stdet predictions in raw frames + vis.draw_predictions(task) + logger.info(f'Stdet Results: {task.action_preds}') + + # add draw frames to display queue + clip_helper.display(task) + + logger.debug('Main thread inference time ' + f'{1000*(time.time() - inference_start):.0f} ms') + + # wait for display thread + clip_helper.join() + except KeyboardInterrupt: + pass + finally: + # close read & display thread, release all resources + clip_helper.clean() + + +if __name__ == '__main__': + main(parse_args()) diff --git a/docs/changelog.md b/docs/changelog.md index 68b1e61992..3a738a0516 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -6,6 +6,8 @@ **New Features** +- Support Webcam Demo for Spatio-temporal Action Detection Models ([#795](https://github.com/open-mmlab/mmaction2/pull/795)) + **Improvements** - Add softmax option for pytorch2onnx tool ([#781](https://github.com/open-mmlab/mmaction2/pull/781)) diff --git a/docs_zh_CN/demo.md b/docs_zh_CN/demo.md index 876943cde7..703df0cdc1 100644 --- a/docs_zh_CN/demo.md +++ b/docs_zh_CN/demo.md @@ -140,8 +140,8 @@ python demo/demo_spatiotemporal_det.py --video ${VIDEO_FILE} \ - `SPATIOTEMPORAL_ACTION_DETECTION_CHECKPOINT`: 时空检测模型权重文件路径。 - `HUMAN_DETECTION_CONFIG_FILE`: 人体检测配置文件路径。 - `HUMAN_DETECTION_CHECKPOINT`: 人体检测模型权重文件路径。 -- `HUMAN_DETECTION_SCORE_THRE`: 人体检测分数阈值:默认为 0.9。 -- `ACTION_DETECTION_SCORE_THRESHOLD`: 动作检测分数阈值:默认为 0.5。 +- `HUMAN_DETECTION_SCORE_THRE`: 人体检测分数阈值,默认为 0.9。 +- `ACTION_DETECTION_SCORE_THRESHOLD`: 动作检测分数阈值,默认为 0.5。 - `LABEL_MAP`: 所使用的标签映射文件,默认为 `demo/label_map_ava.txt`。 - `DEVICE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`)。默认为 `cuda:0`。 - `OUTPUT_FILENAME`: 输出视频的路径,默认为 `demo/stdet_demo.mp4`。 @@ -317,3 +317,82 @@ python demo/long_video_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} $ python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO \ ``` + +## 基于网络摄像头的实时时空动作检测 + +MMAction2 提供本脚本实现基于网络摄像头的实时时空动作检测。 + +```shell +python demo/webcam_demo_spatiotemporal_det.py \ + [--config ${SPATIOTEMPORAL_ACTION_DETECTION_CONFIG_FILE}] \ + [--checkpoint ${SPATIOTEMPORAL_ACTION_DETECTION_CHECKPOINT}] \ + [--action-score-thr ${ACTION_DETECTION_SCORE_THRESHOLD}] \ + [--det-config ${HUMAN_DETECTION_CONFIG_FILE}] \ + [--det-checkpoint ${HUMAN_DETECTION_CHECKPOINT}] \ + [--det-score-thr ${HUMAN_DETECTION_SCORE_THRESHOLD}] \ + [--input-video] ${INPUT_VIDEO} \ + [--label-map ${LABEL_MAP}] \ + [--device ${DEVICE}] \ + [--output-fps ${OUTPUT_FPS}] \ + [--out-filename ${OUTPUT_FILENAME}] \ + [--show] \ + [--display-height] ${DISPLAY_HEIGHT} \ + [--display-width] ${DISPLAY_WIDTH} \ + [--predict-stepsize ${PREDICT_STEPSIZE}] \ + [--clip-vis-length] ${CLIP_VIS_LENGTH} +``` + +可选参数: + +- `SPATIOTEMPORAL_ACTION_DETECTION_CONFIG_FILE`: 时空检测配置文件路径。 +- `SPATIOTEMPORAL_ACTION_DETECTION_CHECKPOINT`: 时空检测模型权重文件路径。 +- `ACTION_DETECTION_SCORE_THRESHOLD`: 动作检测分数阈值,默认为 0.4。 +- `HUMAN_DETECTION_CONFIG_FILE`: 人体检测配置文件路径。 +- `HUMAN_DETECTION_CHECKPOINT`: 人体检测模型权重文件路径。 +- `HUMAN_DETECTION_SCORE_THRE`: 人体检测分数阈值,默认为 0.9。 +- `INPUT_VIDEO`: 网络摄像头编号或本地视频文件路径,默认为 `0`。 +- `LABEL_MAP`: 所使用的标签映射文件,默认为 `demo/label_map_ava.txt`。 +- `DEVICE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`),默认为 `cuda:0`。 +- `OUTPUT_FPS`: 输出视频的帧率,默认为 15。 +- `OUTPUT_FILENAME`: 输出视频的路径,默认为 `None`。 +- `--show`: 是否通过 `cv2.imshow` 展示预测结果。 +- `DISPLAY_HEIGHT`: 输出结果图像高度,默认为 0。 +- `DISPLAY_WIDTH`: 输出结果图像宽度,默认为 0。若 `DISPLAY_HEIGHT <= 0 and DISPLAY_WIDTH <= 0`,则表示输出图像形状与输入视频形状相同。 +- `PREDICT_STEPSIZE`: 每 N 帧进行一次预测(以控制计算资源),默认为 8。 +- `CLIP_VIS_LENGTH`: 预测结果可视化持续帧数,即每次预测结果将可视化到 `CLIP_VIS_LENGTH` 帧中,默认为 8。 + +小技巧: + +- 如何设置 `--output-fps` 的数值? + + - `--output-fps` 建议设置为视频读取线程的帧率。 + - 视频读取线程帧率已通过日志输出,格式为 `DEBUG:__main__:Read Thread: {duration} ms, {fps} fps`。 + +- 如何设置 `--predict-stepsize` 的数值? + + - 该参数选择与模型选型有关。 + - 模型输入构建时间(视频读取线程)应大于等于模型推理时间(主线程)。 + - 模型输入构建时间与模型推理时间均已通过日志输出。 + - `--predict-stepsize` 数值越大,模型输入构建时间越长。 + - 可降低 `--predict-stepsize` 数值增加模型推理频率,从而充分利用计算资源。 + +示例: + +以下示例假设用户的当前目录为 $MMACTION2,并已经将所需的模型权重文件下载至目录 checkpoints/ 下,用户也可以使用所提供的 URL 来直接加载模型权重,文件将会被默认下载至 $HOME/.cache/torch/checkpoints。 + +1. 使用 Faster RCNN 作为人体检测器,SlowOnly-8x8-R101 作为动作检测器,每 8 帧进行一次预测,设置输出视频的帧率为 20,并通过 `cv2.imshow` 展示预测结果。 + +```shell +python demo/webcam_demo_spatiotemporal_det.py \ + --input-video 0 \ + --config configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py \ + --checkpoint https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth \ + --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ + --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ + --det-score-thr 0.9 \ + --action-score-thr 0.5 \ + --label-map demo/label_map_ava.txt \ + --predict-stepsize 40 \ + --output-fps 20 \ + --show +``` From a7a44c1f315f69eab7b96fe792e40e7425f941f1 Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Wed, 21 Apr 2021 12:05:16 +0800 Subject: [PATCH 050/414] [Docs] change tag into comment (#825) --- configs/detection/ava/README.md | 6 +++--- configs/detection/ava/README_zh-CN.md | 6 +++--- configs/detection/lfb/README.md | 2 +- configs/detection/lfb/README_zh-CN.md | 2 +- configs/localization/bmn/README.md | 4 ++-- configs/localization/bmn/README_zh-CN.md | 4 ++-- configs/localization/bsn/README.md | 2 +- configs/localization/bsn/README_zh-CN.md | 2 +- configs/localization/ssn/README.md | 2 +- configs/localization/ssn/README_zh-CN.md | 2 +- configs/recognition/c3d/README.md | 2 +- configs/recognition/c3d/README_zh-CN.md | 2 +- configs/recognition/csn/README.md | 4 ++-- configs/recognition/csn/README_zh-CN.md | 4 ++-- configs/recognition/i3d/README.md | 4 ++-- configs/recognition/i3d/README_zh-CN.md | 4 ++-- configs/recognition/omnisource/README.md | 2 +- configs/recognition/omnisource/README_zh-CN.md | 2 +- configs/recognition/r2plus1d/README.md | 2 +- configs/recognition/r2plus1d/README_zh-CN.md | 2 +- configs/recognition/slowfast/README.md | 2 +- configs/recognition/slowfast/README_zh-CN.md | 2 +- configs/recognition/slowonly/README.md | 2 +- configs/recognition/slowonly/README_zh-CN.md | 2 +- configs/recognition/tanet/README.md | 2 +- configs/recognition/tanet/README_zh-CN.md | 2 +- configs/recognition/tin/README.md | 2 +- configs/recognition/tin/README_zh-CN.md | 2 +- configs/recognition/tpn/README.md | 2 +- configs/recognition/trn/README.md | 2 +- configs/recognition/trn/README_zh-CN.md | 2 +- configs/recognition/tsm/README.md | 4 ++-- configs/recognition/tsm/README_zh-CN.md | 4 ++-- configs/recognition/tsn/README.md | 2 +- configs/recognition/tsn/README_zh-CN.md | 2 +- configs/recognition/x3d/README.md | 2 +- configs/recognition/x3d/README_zh-CN.md | 2 +- configs/recognition_audio/resnet/README.md | 2 +- docs/stat.py | 12 +++++++----- docs_zh_CN/stat.py | 12 +++++++----- tools/data/activitynet/README.md | 2 +- tools/data/activitynet/README_zh-CN.md | 2 +- tools/data/ava/README.md | 6 +++--- tools/data/ava/README_zh-CN.md | 2 +- tools/data/gym/README.md | 2 +- tools/data/gym/README_zh-CN.md | 2 +- tools/data/hmdb51/README.md | 2 +- tools/data/hmdb51/README_zh-CN.md | 2 +- tools/data/hvu/README.md | 2 +- tools/data/hvu/README_zh-CN.md | 2 +- tools/data/jester/README.md | 2 +- tools/data/jester/README_zh-CN.md | 2 +- tools/data/jhmdb/README.md | 2 +- tools/data/jhmdb/README_zh-CN.md | 2 +- tools/data/kinetics/README.md | 2 +- tools/data/kinetics/README_zh-CN.md | 2 +- tools/data/mit/README.md | 2 +- tools/data/mit/README_zh-CN.md | 2 +- tools/data/mmit/README.md | 2 +- tools/data/mmit/README_zh-CN.md | 2 +- tools/data/omnisource/README.md | 2 +- tools/data/omnisource/README_zh-CN.md | 2 +- tools/data/sthv1/README.md | 2 +- tools/data/sthv2/README.md | 2 +- tools/data/sthv2/README_zh-CN.md | 2 +- tools/data/thumos14/README.md | 2 +- tools/data/thumos14/README_zh-CN.md | 2 +- tools/data/ucf101/README.md | 2 +- tools/data/ucf101_24/README.md | 2 +- 69 files changed, 95 insertions(+), 91 deletions(-) diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index 45ebcfad42..ea44eb66c6 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -6,7 +6,7 @@ ## Introduction -[DATASET] + ```BibTeX @inproceedings{gu2018ava, @@ -18,7 +18,7 @@ } ``` -[ALGORITHM] + ```BibTeX @article{duan2020omni, @@ -29,7 +29,7 @@ } ``` -[ALGORITHM] + ```BibTeX @inproceedings{feichtenhofer2019slowfast, diff --git a/configs/detection/ava/README_zh-CN.md b/configs/detection/ava/README_zh-CN.md index b3f049226b..79e0b14a10 100644 --- a/configs/detection/ava/README_zh-CN.md +++ b/configs/detection/ava/README_zh-CN.md @@ -6,7 +6,7 @@ ## 简介 -[DATASET] + ```BibTeX @inproceedings{gu2018ava, @@ -18,7 +18,7 @@ } ``` -[ALGORITHM] + ```BibTeX @article{duan2020omni, @@ -29,7 +29,7 @@ } ``` -[ALGORITHM] + ```BibTeX @inproceedings{feichtenhofer2019slowfast, diff --git a/configs/detection/lfb/README.md b/configs/detection/lfb/README.md index 1d90b4b0af..d086343f4d 100644 --- a/configs/detection/lfb/README.md +++ b/configs/detection/lfb/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{wu2019long, diff --git a/configs/detection/lfb/README_zh-CN.md b/configs/detection/lfb/README_zh-CN.md index 6df55b0d6b..4c90a66bd5 100644 --- a/configs/detection/lfb/README_zh-CN.md +++ b/configs/detection/lfb/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @inproceedings{wu2019long, diff --git a/configs/localization/bmn/README.md b/configs/localization/bmn/README.md index 346c9685e4..7e25d192b7 100644 --- a/configs/localization/bmn/README.md +++ b/configs/localization/bmn/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{lin2019bmn, @@ -14,7 +14,7 @@ } ``` -[DATASET] + ```BibTeX @article{zhao2017cuhk, diff --git a/configs/localization/bmn/README_zh-CN.md b/configs/localization/bmn/README_zh-CN.md index d735740287..3778f390fa 100644 --- a/configs/localization/bmn/README_zh-CN.md +++ b/configs/localization/bmn/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @inproceedings{lin2019bmn, @@ -14,7 +14,7 @@ } ``` -[DATASET] + ```BibTeX @article{zhao2017cuhk, diff --git a/configs/localization/bsn/README.md b/configs/localization/bsn/README.md index 16e38710b6..c274aee77c 100644 --- a/configs/localization/bsn/README.md +++ b/configs/localization/bsn/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{lin2018bsn, diff --git a/configs/localization/bsn/README_zh-CN.md b/configs/localization/bsn/README_zh-CN.md index 5cd2709567..5ec8362292 100644 --- a/configs/localization/bsn/README_zh-CN.md +++ b/configs/localization/bsn/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @inproceedings{lin2018bsn, diff --git a/configs/localization/ssn/README.md b/configs/localization/ssn/README.md index 6da1152842..d73600626a 100644 --- a/configs/localization/ssn/README.md +++ b/configs/localization/ssn/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @InProceedings{Zhao_2017_ICCV, diff --git a/configs/localization/ssn/README_zh-CN.md b/configs/localization/ssn/README_zh-CN.md index 98da514984..d1ec5bbcee 100644 --- a/configs/localization/ssn/README_zh-CN.md +++ b/configs/localization/ssn/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @InProceedings{Zhao_2017_ICCV, diff --git a/configs/recognition/c3d/README.md b/configs/recognition/c3d/README.md index 22d152b046..39829a1408 100644 --- a/configs/recognition/c3d/README.md +++ b/configs/recognition/c3d/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @ARTICLE{2014arXiv1412.0767T, diff --git a/configs/recognition/c3d/README_zh-CN.md b/configs/recognition/c3d/README_zh-CN.md index 62e5fb326f..c4f02c16f2 100644 --- a/configs/recognition/c3d/README_zh-CN.md +++ b/configs/recognition/c3d/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @ARTICLE{2014arXiv1412.0767T, diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index 2e0e4f905c..593e8c023a 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{inproceedings, @@ -15,7 +15,7 @@ doi = {10.1109/ICCV.2019.00565} } ``` -[OTHERS] + ```BibTeX @inproceedings{ghadiyaram2019large, diff --git a/configs/recognition/csn/README_zh-CN.md b/configs/recognition/csn/README_zh-CN.md index 8295b8dbe3..78aa1bc6b2 100644 --- a/configs/recognition/csn/README_zh-CN.md +++ b/configs/recognition/csn/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @inproceedings{inproceedings, @@ -15,7 +15,7 @@ doi = {10.1109/ICCV.2019.00565} } ``` -[OTHERS] + ```BibTeX @inproceedings{ghadiyaram2019large, diff --git a/configs/recognition/i3d/README.md b/configs/recognition/i3d/README.md index e9bd7f7363..e477ba2af4 100644 --- a/configs/recognition/i3d/README.md +++ b/configs/recognition/i3d/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{inproceedings, @@ -15,7 +15,7 @@ } ``` -[BACKBONE] + ```BibTeX @article{NonLocal2018, diff --git a/configs/recognition/i3d/README_zh-CN.md b/configs/recognition/i3d/README_zh-CN.md index 684a4a12f9..d63a404df0 100644 --- a/configs/recognition/i3d/README_zh-CN.md +++ b/configs/recognition/i3d/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @inproceedings{inproceedings, @@ -15,7 +15,7 @@ } ``` -[BACKBONE] + ```BibTeX @article{NonLocal2018, diff --git a/configs/recognition/omnisource/README.md b/configs/recognition/omnisource/README.md index 39197b96d9..281b3818d8 100644 --- a/configs/recognition/omnisource/README.md +++ b/configs/recognition/omnisource/README.md @@ -58,7 +58,7 @@ We also list the benchmark in the original paper which run on Kinetics-400 for c If you find OmniSource useful for your research, please consider citing the paper using the following BibTeX entry. -[ALGORITHM] + ```BibTeX @article{duan2020omni, diff --git a/configs/recognition/omnisource/README_zh-CN.md b/configs/recognition/omnisource/README_zh-CN.md index 2fb8b0e1c6..104efaa8d4 100644 --- a/configs/recognition/omnisource/README_zh-CN.md +++ b/configs/recognition/omnisource/README_zh-CN.md @@ -58,7 +58,7 @@ MMAction2 在公开的数据集上进行了 OmniSource 框架的基准测试, 如果 OmniSource 项目对您的研究有所帮助,请使用以下 BibTex 项进行引用: -[ALGORITHM] + ```BibTeX @article{duan2020omni, diff --git a/configs/recognition/r2plus1d/README.md b/configs/recognition/r2plus1d/README.md index 36d92adb39..14c6a3b22e 100644 --- a/configs/recognition/r2plus1d/README.md +++ b/configs/recognition/r2plus1d/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{tran2018closer, diff --git a/configs/recognition/r2plus1d/README_zh-CN.md b/configs/recognition/r2plus1d/README_zh-CN.md index 0882c30589..b3814af0b2 100644 --- a/configs/recognition/r2plus1d/README_zh-CN.md +++ b/configs/recognition/r2plus1d/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @inproceedings{tran2018closer, diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index ba00f13269..4ea16f92de 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{feichtenhofer2019slowfast, diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index a215fa55b9..f53fac0232 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @inproceedings{feichtenhofer2019slowfast, diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index f441f51875..9d297eb4b9 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{feichtenhofer2019slowfast, diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md index ece5748ba8..61b0130199 100644 --- a/configs/recognition/slowonly/README_zh-CN.md +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @inproceedings{feichtenhofer2019slowfast, diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 04709d583a..85a8a81036 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```latex @article{liu2020tam, diff --git a/configs/recognition/tanet/README_zh-CN.md b/configs/recognition/tanet/README_zh-CN.md index 2925069b12..8fe466da27 100644 --- a/configs/recognition/tanet/README_zh-CN.md +++ b/configs/recognition/tanet/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```latex @article{liu2020tam, diff --git a/configs/recognition/tin/README.md b/configs/recognition/tin/README.md index 0c0b9254c6..44e56a2952 100644 --- a/configs/recognition/tin/README.md +++ b/configs/recognition/tin/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @article{shao2020temporal, diff --git a/configs/recognition/tin/README_zh-CN.md b/configs/recognition/tin/README_zh-CN.md index 0af09296fc..a047d3ca75 100644 --- a/configs/recognition/tin/README_zh-CN.md +++ b/configs/recognition/tin/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @article{shao2020temporal, diff --git a/configs/recognition/tpn/README.md b/configs/recognition/tpn/README.md index c873ce4dd3..83715cefb6 100644 --- a/configs/recognition/tpn/README.md +++ b/configs/recognition/tpn/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{yang2020tpn, diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md index 4098a36fdb..0d39abe6ef 100644 --- a/configs/recognition/trn/README.md +++ b/configs/recognition/trn/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @article{zhou2017temporalrelation, diff --git a/configs/recognition/trn/README_zh-CN.md b/configs/recognition/trn/README_zh-CN.md index a38120f9c7..79a2e92e50 100644 --- a/configs/recognition/trn/README_zh-CN.md +++ b/configs/recognition/trn/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @article{zhou2017temporalrelation, diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 3e8533f9fd..add51e34b9 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{lin2019tsm, @@ -13,7 +13,7 @@ } ``` -[BACKBONE] + ```BibTeX @article{NonLocal2018, diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index 17e537717f..82b2eeb2a4 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @inproceedings{lin2019tsm, @@ -13,7 +13,7 @@ } ``` -[BACKBONE] + ```BibTeX @article{NonLocal2018, diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index 86e9b4ca2d..facca2ad0f 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @inproceedings{wang2016temporal, diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index ffaad3b368..ff3e4818c9 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @inproceedings{wang2016temporal, diff --git a/configs/recognition/x3d/README.md b/configs/recognition/x3d/README.md index 0d9397564c..6d99dc0dce 100644 --- a/configs/recognition/x3d/README.md +++ b/configs/recognition/x3d/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @misc{feichtenhofer2020x3d, diff --git a/configs/recognition/x3d/README_zh-CN.md b/configs/recognition/x3d/README_zh-CN.md index c36b8fe80a..c2e47e549b 100644 --- a/configs/recognition/x3d/README_zh-CN.md +++ b/configs/recognition/x3d/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[ALGORITHM] + ```BibTeX @misc{feichtenhofer2020x3d, diff --git a/configs/recognition_audio/resnet/README.md b/configs/recognition_audio/resnet/README.md index 22965f5f1d..f597f46789 100644 --- a/configs/recognition_audio/resnet/README.md +++ b/configs/recognition_audio/resnet/README.md @@ -2,7 +2,7 @@ ## Introduction -[ALGORITHM] + ```BibTeX @article{xiao2020audiovisual, diff --git a/docs/stat.py b/docs/stat.py index 662a7291b1..e0445737b9 100755 --- a/docs/stat.py +++ b/docs/stat.py @@ -28,10 +28,11 @@ def anchor(name): title = content.split('\n')[0].replace('#', '') # count papers - papers = set((papertype, titlecase.titlecase(paper.lower().strip())) - for (papertype, paper) in re.findall( - r'\n\s*\[([A-Z]+?)\]\s*\n.*?\btitle\s*=\s*{(.*?)}', - content, re.DOTALL)) + papers = set( + (papertype, titlecase.titlecase(paper.lower().strip())) + for (papertype, paper) in re.findall( + r'\s*\n.*?\btitle\s*=\s*{(.*?)}', + content, re.DOTALL)) # paper links revcontent = '\n'.join(list(reversed(content.splitlines()))) paperlinks = {} @@ -111,7 +112,8 @@ def anchor(name): papers = set( (papertype, titlecase.titlecase(paper.lower().strip())) for (papertype, paper) in re.findall( - r'\[([A-Z]*?)\]\s*\n.*?\btitle\s*=\s*{(.*?)}', content, re.DOTALL)) + r'\s*\n.*?\btitle\s*=\s*{(.*?)}', + content, re.DOTALL)) # paper links revcontent = '\n'.join(list(reversed(content.splitlines()))) paperlinks = {} diff --git a/docs_zh_CN/stat.py b/docs_zh_CN/stat.py index 05a584147c..8a6cdcffe7 100755 --- a/docs_zh_CN/stat.py +++ b/docs_zh_CN/stat.py @@ -27,10 +27,11 @@ def anchor(name): title = content.split('\n')[0].replace('#', '') # count papers - papers = set((papertype, titlecase.titlecase(paper.lower().strip())) - for (papertype, paper) in re.findall( - r'\n\s*\[([A-Z]+?)\]\s*\n.*?\btitle\s*=\s*{(.*?)}', - content, re.DOTALL)) + papers = set( + (papertype, titlecase.titlecase(paper.lower().strip())) + for (papertype, paper) in re.findall( + r'\s*\n.*?\btitle\s*=\s*{(.*?)}', + content, re.DOTALL)) # paper links revcontent = '\n'.join(list(reversed(content.splitlines()))) paperlinks = {} @@ -109,7 +110,8 @@ def anchor(name): papers = set( (papertype, titlecase.titlecase(paper.lower().strip())) for (papertype, paper) in re.findall( - r'\[([A-Z]*?)\]\s*\n.*?\btitle\s*=\s*{(.*?)}', content, re.DOTALL)) + r'\s*\n.*?\btitle\s*=\s*{(.*?)}', + content, re.DOTALL)) # paper links revcontent = '\n'.join(list(reversed(content.splitlines()))) paperlinks = {} diff --git a/tools/data/activitynet/README.md b/tools/data/activitynet/README.md index 3d54b124cc..f3286f6fc1 100644 --- a/tools/data/activitynet/README.md +++ b/tools/data/activitynet/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @article{Heilbron2015ActivityNetAL, diff --git a/tools/data/activitynet/README_zh-CN.md b/tools/data/activitynet/README_zh-CN.md index f7159a55c5..7687b948db 100644 --- a/tools/data/activitynet/README_zh-CN.md +++ b/tools/data/activitynet/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @article{Heilbron2015ActivityNetAL, diff --git a/tools/data/ava/README.md b/tools/data/ava/README.md index 4bee5b65d3..5ec84284fa 100644 --- a/tools/data/ava/README.md +++ b/tools/data/ava/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @inproceedings{gu2018ava, @@ -98,11 +98,11 @@ If both are required, run the following script to extract frames. bash extract_frames.sh ``` -## Step 5. Fetching Proposal Files +## Step 5. Fetch Proposal Files The scripts are adapted from FAIR's [Long-Term Feature Banks](https://github.com/facebookresearch/video-long-term-feature-banks). -Run the follow scripts to fetch pre-computed proposal list. +Run the following scripts to fetch the pre-computed proposal list. ```shell bash fetch_ava_proposals.sh diff --git a/tools/data/ava/README_zh-CN.md b/tools/data/ava/README_zh-CN.md index 1024f2aaef..9cfda9aaf4 100644 --- a/tools/data/ava/README_zh-CN.md +++ b/tools/data/ava/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @inproceedings{gu2018ava, diff --git a/tools/data/gym/README.md b/tools/data/gym/README.md index 01093311aa..a39eda6fd4 100644 --- a/tools/data/gym/README.md +++ b/tools/data/gym/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @inproceedings{shao2020finegym, diff --git a/tools/data/gym/README_zh-CN.md b/tools/data/gym/README_zh-CN.md index 5416ba7d0a..cb3a796ec7 100644 --- a/tools/data/gym/README_zh-CN.md +++ b/tools/data/gym/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @inproceedings{shao2020finegym, diff --git a/tools/data/hmdb51/README.md b/tools/data/hmdb51/README.md index bf14e9d96a..206b548764 100644 --- a/tools/data/hmdb51/README.md +++ b/tools/data/hmdb51/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @article{Kuehne2011HMDBAL, diff --git a/tools/data/hmdb51/README_zh-CN.md b/tools/data/hmdb51/README_zh-CN.md index d13382a38e..a34c4b9ce9 100644 --- a/tools/data/hmdb51/README_zh-CN.md +++ b/tools/data/hmdb51/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @article{Kuehne2011HMDBAL, diff --git a/tools/data/hvu/README.md b/tools/data/hvu/README.md index e0143b6e1c..755e71dbb3 100644 --- a/tools/data/hvu/README.md +++ b/tools/data/hvu/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @article{Diba2019LargeSH, diff --git a/tools/data/hvu/README_zh-CN.md b/tools/data/hvu/README_zh-CN.md index 1ff6b2caf5..5b3ffa1ea3 100644 --- a/tools/data/hvu/README_zh-CN.md +++ b/tools/data/hvu/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @article{Diba2019LargeSH, diff --git a/tools/data/jester/README.md b/tools/data/jester/README.md index 1f4f322467..7acdbe13d9 100644 --- a/tools/data/jester/README.md +++ b/tools/data/jester/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @InProceedings{Materzynska_2019_ICCV, diff --git a/tools/data/jester/README_zh-CN.md b/tools/data/jester/README_zh-CN.md index 015e316939..fdfda97d65 100644 --- a/tools/data/jester/README_zh-CN.md +++ b/tools/data/jester/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @InProceedings{Materzynska_2019_ICCV, diff --git a/tools/data/jhmdb/README.md b/tools/data/jhmdb/README.md index bf5766e790..c026052cb6 100644 --- a/tools/data/jhmdb/README.md +++ b/tools/data/jhmdb/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @inproceedings{Jhuang:ICCV:2013, diff --git a/tools/data/jhmdb/README_zh-CN.md b/tools/data/jhmdb/README_zh-CN.md index ec36f77df9..5806a9d338 100644 --- a/tools/data/jhmdb/README_zh-CN.md +++ b/tools/data/jhmdb/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @inproceedings{Jhuang:ICCV:2013, diff --git a/tools/data/kinetics/README.md b/tools/data/kinetics/README.md index 141998fda2..6ece037493 100644 --- a/tools/data/kinetics/README.md +++ b/tools/data/kinetics/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @inproceedings{inproceedings, diff --git a/tools/data/kinetics/README_zh-CN.md b/tools/data/kinetics/README_zh-CN.md index af10444469..ef49ba8e8a 100644 --- a/tools/data/kinetics/README_zh-CN.md +++ b/tools/data/kinetics/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @inproceedings{inproceedings, diff --git a/tools/data/mit/README.md b/tools/data/mit/README.md index ab72165416..e67ca45335 100644 --- a/tools/data/mit/README.md +++ b/tools/data/mit/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @article{monfortmoments, diff --git a/tools/data/mit/README_zh-CN.md b/tools/data/mit/README_zh-CN.md index 2e3240b1d4..74a3d0c247 100644 --- a/tools/data/mit/README_zh-CN.md +++ b/tools/data/mit/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @article{monfortmoments, diff --git a/tools/data/mmit/README.md b/tools/data/mmit/README.md index b946680655..5deedf71d0 100644 --- a/tools/data/mmit/README.md +++ b/tools/data/mmit/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @misc{monfort2019multimoments, diff --git a/tools/data/mmit/README_zh-CN.md b/tools/data/mmit/README_zh-CN.md index fa3393f491..e070505e34 100644 --- a/tools/data/mmit/README_zh-CN.md +++ b/tools/data/mmit/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @misc{monfort2019multimoments, diff --git a/tools/data/omnisource/README.md b/tools/data/omnisource/README.md index efb46d220f..08c4c1475e 100644 --- a/tools/data/omnisource/README.md +++ b/tools/data/omnisource/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @article{duan2020omni, diff --git a/tools/data/omnisource/README_zh-CN.md b/tools/data/omnisource/README_zh-CN.md index 4dd3f4a3e9..ed85b7c557 100644 --- a/tools/data/omnisource/README_zh-CN.md +++ b/tools/data/omnisource/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTeX @article{duan2020omni, diff --git a/tools/data/sthv1/README.md b/tools/data/sthv1/README.md index 577eefc7c8..5b93de2a2f 100644 --- a/tools/data/sthv1/README.md +++ b/tools/data/sthv1/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @misc{goyal2017something, diff --git a/tools/data/sthv2/README.md b/tools/data/sthv2/README.md index 76bc0506f4..af112872da 100644 --- a/tools/data/sthv2/README.md +++ b/tools/data/sthv2/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @misc{goyal2017something, diff --git a/tools/data/sthv2/README_zh-CN.md b/tools/data/sthv2/README_zh-CN.md index 28b40c00a3..cce654ce84 100644 --- a/tools/data/sthv2/README_zh-CN.md +++ b/tools/data/sthv2/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ``` @misc{goyal2017something, diff --git a/tools/data/thumos14/README.md b/tools/data/thumos14/README.md index d94342aec8..eaddb60cbe 100644 --- a/tools/data/thumos14/README.md +++ b/tools/data/thumos14/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @misc{THUMOS14, diff --git a/tools/data/thumos14/README_zh-CN.md b/tools/data/thumos14/README_zh-CN.md index 6bc3588d15..fb7140a24e 100644 --- a/tools/data/thumos14/README_zh-CN.md +++ b/tools/data/thumos14/README_zh-CN.md @@ -2,7 +2,7 @@ ## 简介 -[DATASET] + ```BibTex @misc{THUMOS14, diff --git a/tools/data/ucf101/README.md b/tools/data/ucf101/README.md index bcd707661f..3e3af8c570 100644 --- a/tools/data/ucf101/README.md +++ b/tools/data/ucf101/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @article{Soomro2012UCF101AD, diff --git a/tools/data/ucf101_24/README.md b/tools/data/ucf101_24/README.md index dbb0466c90..f93b12c6dc 100644 --- a/tools/data/ucf101_24/README.md +++ b/tools/data/ucf101_24/README.md @@ -2,7 +2,7 @@ ## Introduction -[DATASET] + ```BibTeX @article{Soomro2012UCF101AD, From 37af2e5306a238bad0954fdd76e39ed7df97f1ef Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 22 Apr 2021 16:50:12 +0800 Subject: [PATCH 051/414] Update github template (#821) * update github template * polish --- .github/ISSUE_TEMPLATE/error-report.md | 1 + .github/ISSUE_TEMPLATE/general_questions.md | 7 ++++++ .github/pull_request_template.md | 26 +++++++++++++++++++++ 3 files changed, 34 insertions(+) create mode 100644 .github/pull_request_template.md diff --git a/.github/ISSUE_TEMPLATE/error-report.md b/.github/ISSUE_TEMPLATE/error-report.md index 51147bbb07..bac7c835fa 100644 --- a/.github/ISSUE_TEMPLATE/error-report.md +++ b/.github/ISSUE_TEMPLATE/error-report.md @@ -16,6 +16,7 @@ If you feel we have help you, give us a STAR! :satisfied: 2. The bug has not been fixed in the latest version. **Describe the bug** + A clear and concise description of what the bug is. **Reproduction** diff --git a/.github/ISSUE_TEMPLATE/general_questions.md b/.github/ISSUE_TEMPLATE/general_questions.md index b5a6451a6c..b5ffabd97e 100644 --- a/.github/ISSUE_TEMPLATE/general_questions.md +++ b/.github/ISSUE_TEMPLATE/general_questions.md @@ -6,3 +6,10 @@ labels: '' assignees: '' --- + +Before raising a question, you may need to check the following listed items. + +**Checklist** + +1. I have searched related issues but cannot get the expected help. +2. I have read the [FAQ documentation](https://mmaction2.readthedocs.io/en/latest/faq.html) but cannot get the expected help. diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..63052769a7 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,26 @@ +Thanks for your contribution and we appreciate it a lot. The following instructions would make your pull request more healthy and more easily got feedback. +If you do not understand some items, don't worry, just make the pull request and seek help from maintainers. + +## Motivation + +Please describe the motivation of this PR and the goal you want to achieve through this PR. + +## Modification + +Please briefly describe what modification is made in this PR. + +## BC-breaking (Optional) + +Does the modification introduces changes that break the back-compatibility of this repo? +If so, please describe how it breaks the compatibility and how users should modify their codes to keep compatibility with this PR. + +## Use cases (Optional) + +If this PR introduces a new feature, it is better to list some use cases here, and update the documentation. + +## Checklist + +1. Pre-commit or other linting tools should be used to fix the potential lint issues. +2. The modification should be covered by complete unit tests. If not, please add more unit tests to ensure the correctness. +3. If the modification has potential influence on downstream projects, this PR should be tested with downstream projects, like MMDet or MMCls. +4. The documentation should be modified accordingly, like docstring or example tutorials. From f366f8299091fbd321ca691f7158181fdaca3cf3 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 23 Apr 2021 15:10:11 +0800 Subject: [PATCH 052/414] fix bug --- mmaction/datasets/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mmaction/datasets/__init__.py b/mmaction/datasets/__init__.py index 48341c9383..6c1d447a71 100644 --- a/mmaction/datasets/__init__.py +++ b/mmaction/datasets/__init__.py @@ -10,6 +10,7 @@ from .dataset_wrappers import RepeatDataset from .hvu_dataset import HVUDataset from .image_dataset import ImageDataset +from .pose_dataset import PoseDataset from .rawframe_dataset import RawframeDataset from .rawvideo_dataset import RawVideoDataset from .ssn_dataset import SSNDataset @@ -19,6 +20,6 @@ 'VideoDataset', 'build_dataloader', 'build_dataset', 'RepeatDataset', 'RawframeDataset', 'BaseDataset', 'ActivityNetDataset', 'SSNDataset', 'HVUDataset', 'AudioDataset', 'AudioFeatureDataset', 'ImageDataset', - 'RawVideoDataset', 'AVADataset', 'AudioVisualDataset', + 'RawVideoDataset', 'AVADataset', 'AudioVisualDataset', 'PoseDataset', 'BaseMiniBatchBlending', 'CutmixBlending', 'MixupBlending' ] From 6468f18eadfe961279dceae141f6458fa0497de8 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 23 Apr 2021 15:32:21 +0800 Subject: [PATCH 053/414] remove num_person field --- mmaction/datasets/pipelines/pose_loading.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index c01e334a9b..2458671cd5 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -212,14 +212,12 @@ def __call__(self, results): if 'kpscore' in results: kpscore = results['kpscore'] - assert results['num_person'] == kpscore.shape[0] if self.random_drop: self._drop_kpscore(kpscore) results['kpscore'] = kpscore[:, frame_inds].astype(np.float32) if 'kp' in results: - assert results['num_person'] == len(results['kp']) results['kp'] = results['kp'][:, frame_inds].astype(np.float32) return results From d32e2dfb5724cecf60baa53809806132578424d1 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sun, 25 Apr 2021 12:47:39 +0800 Subject: [PATCH 054/414] use twice sample for tpn_tsm sthv1 testing (#833) * correct tpn sthv1 testing * Update tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py --- configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py b/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py index 8a06172ffd..12602ae8da 100644 --- a/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py +++ b/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py @@ -33,7 +33,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='ColorJitter', color_space_aug=True), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -44,7 +43,8 @@ type='SampleFrames', clip_len=1, frame_interval=1, - num_clips=16, + num_clips=8, + twice_sample=True, test_mode=True), dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), From 9bfdbbe1bc3d66ffd4c44ef8850a3fdd97a3cdc5 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sun, 25 Apr 2021 12:48:20 +0800 Subject: [PATCH 055/414] add link to MMGeneration (#834) --- README.md | 1 + README_zh-CN.md | 1 + 2 files changed, 2 insertions(+) diff --git a/README.md b/README.md index 3ea214b8e0..941f14d826 100644 --- a/README.md +++ b/README.md @@ -240,3 +240,4 @@ We wish that the toolbox and benchmark could serve the growing research communit - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab pose estimation toolbox and benchmark. - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab image and video editing toolbox. - [MMOCR](https://github.com/open-mmlab/mmocr): A Comprehensive Toolbox for Text Detection, Recognition and Understanding. +- [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab image and video generative models toolbox. diff --git a/README_zh-CN.md b/README_zh-CN.md index 18ddb43e1d..511dfb6869 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -227,6 +227,7 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab 姿态估计工具箱与测试基准 - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab 图像视频编辑工具箱 - [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包 +- [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab 图片视频生成模型工具箱 ## 欢迎加入 OpenMMLab 社区 From 9ca8a95cb593fcc8b3b96b2c8c6162491d1a9d5f Mon Sep 17 00:00:00 2001 From: Kenny Date: Sun, 25 Apr 2021 22:27:22 +0800 Subject: [PATCH 056/414] add visualization --- demo/visualize_heatmap_volume.ipynb | 403 ++++++++++++++++++++++++++++ 1 file changed, 403 insertions(+) create mode 100644 demo/visualize_heatmap_volume.ipynb diff --git a/demo/visualize_heatmap_volume.ipynb b/demo/visualize_heatmap_volume.ipynb new file mode 100644 index 0000000000..44823ae70f --- /dev/null +++ b/demo/visualize_heatmap_volume.ipynb @@ -0,0 +1,403 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 6, + "id": "speaking-algebra", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import cv2\n", + "import os.path as osp\n", + "import decord\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import urllib\n", + "import moviepy.editor as mpy\n", + "import random as rd\n", + "from mmpose.apis import vis_pose_result\n", + "from mmpose.models import TopDown\n", + "from mmcv import load, dump\n", + "\n", + "# We assume the annotation is already prepared\n", + "gym_train_ann_file = '../data/skeleton/gym_train.pkl'\n", + "gym_val_ann_file = '../data/skeleton/gym_val.pkl'\n", + "ntu60_xsub_train_ann_file = '../data/skeleton/ntu60_xsub_train.pkl'\n", + "ntu60_xsub_val_ann_file = '../data/skeleton/ntu60_xsub_val.pkl'" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "alive-consolidation", + "metadata": {}, + "outputs": [], + "source": [ + "FONTFACE = cv2.FONT_HERSHEY_DUPLEX\n", + "FONTSCALE = 0.6\n", + "FONTCOLOR = (255, 255, 255)\n", + "BGBLUE = (0, 119, 182)\n", + "THICKNESS = 1\n", + "LINETYPE = 1" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "ranging-conjunction", + "metadata": {}, + "outputs": [], + "source": [ + "def add_label(frame, label, BGCOLOR=BGBLUE):\n", + " threshold = 30\n", + " def split_label(label):\n", + " label = label.split()\n", + " lines, cline = [], ''\n", + " for word in label:\n", + " if len(cline) + len(word) < threshold:\n", + " cline = cline + ' ' + word\n", + " else:\n", + " lines.append(cline)\n", + " cline = word\n", + " if cline != '':\n", + " lines += [cline]\n", + " return lines\n", + " \n", + " if len(label) > 30:\n", + " label = split_label(label)\n", + " else:\n", + " label = [label]\n", + " label = ['Action: '] + label\n", + " \n", + " sizes = []\n", + " for line in label:\n", + " sizes.append(cv2.getTextSize(line, FONTFACE, FONTSCALE, THICKNESS)[0])\n", + " box_width = max([x[0] for x in sizes]) + 10\n", + " text_height = sizes[0][1]\n", + " box_height = len(sizes) * (text_height + 6)\n", + " \n", + " cv2.rectangle(frame, (0, 0), (box_width, box_height), BGCOLOR, -1)\n", + " for i, line in enumerate(label):\n", + " location = (5, (text_height + 6) * i + text_height + 3)\n", + " cv2.putText(frame, line, location, FONTFACE, FONTSCALE, FONTCOLOR, THICKNESS, LINETYPE)\n", + " return frame\n", + " \n", + "\n", + "def vis_skeleton(vid_path, anno, category_name=None, ratio=0.5):\n", + " vid = decord.VideoReader(vid_path)\n", + " frames = [x.asnumpy() for x in vid]\n", + " \n", + " h, w, _ = frames[0].shape\n", + " new_shape = (int(w * ratio), int(h * ratio))\n", + " frames = [cv2.resize(f, new_shape) for f in frames]\n", + " \n", + " assert len(frames) == anno['total_frames']\n", + " # The shape is N x T x K x 3\n", + " kps = np.concatenate([anno['kp'], anno['kpscore'][..., None]], axis=-1)\n", + " kps[..., :2] *= ratio\n", + " # Convert to T x N x K x 3\n", + " kps = kps.transpose([1, 0, 2, 3])\n", + " vis_frames = []\n", + "\n", + " # we need an instance of TopDown model, so build a minimal one\n", + " model = TopDown(backbone=dict(type='ShuffleNetV1'))\n", + "\n", + " for f, kp in zip(frames, kps):\n", + " bbox = np.zeros([0, 4], dtype=np.float32)\n", + " result = [dict(bbox=bbox, keypoints=k) for k in kp]\n", + " vis_frame = vis_pose_result(model, f, result)\n", + " \n", + " if category_name is not None:\n", + " vis_frame = add_label(vis_frame, category_name)\n", + " \n", + " vis_frames.append(vis_frame)\n", + " return vis_frames" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "id": "applied-humanity", + "metadata": {}, + "outputs": [], + "source": [ + "keypoint_pipeline = [\n", + " dict(type='PoseDecode'),\n", + " dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True),\n", + " dict(type='Resize', scale=(-1, 64)),\n", + " dict(type='CenterCrop', crop_size=64),\n", + " dict(type='GeneratePoseTarget', sigma=0.6, use_score=True, with_kp=True, with_limb=False)\n", + "]\n", + "\n", + "limb_pipeline = [\n", + " dict(type='PoseDecode'),\n", + " dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True),\n", + " dict(type='Resize', scale=(-1, 64)),\n", + " dict(type='CenterCrop', crop_size=64),\n", + " dict(type='GeneratePoseTarget', sigma=0.6, use_score=True, with_kp=False, with_limb=True)\n", + "]\n", + "\n", + "from mmaction.datasets.pipelines import Compose\n", + "def get_pseudo_heatmap(anno, flag='keypoint'):\n", + " assert flag in ['keypoint', 'limb']\n", + " pipeline = Compose(keypoint_pipeline if flag == 'keypoint' else limb_pipeline)\n", + " return pipeline(anno)['imgs']\n", + "\n", + "def vis_heatmaps(heatmaps, channel=-1, ratio=8):\n", + " # if channel is -1, draw all keypoints / limbs on the same map\n", + " import matplotlib.cm as cm\n", + " h, w, _ = heatmaps[0].shape\n", + " newh, neww = int(h * ratio), int(w * ratio)\n", + " \n", + " if channel == -1:\n", + " heatmaps = [np.max(x, axis=-1) for x in heatmaps]\n", + " cmap = cm.viridis\n", + " heatmaps = [(cmap(x)[..., :3] * 255).astype(np.uint8) for x in heatmaps]\n", + " heatmaps = [cv2.resize(x, (neww, newh)) for x in heatmaps]\n", + " return heatmaps" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "automatic-commons", + "metadata": {}, + "outputs": [], + "source": [ + "# Load GYM annotations\n", + "lines = list(urllib.request.urlopen('https://sdolivia.github.io/FineGym/resources/dataset/gym99_categories.txt'))\n", + "gym_categories = [x.decode().strip().split('; ')[-1] for x in lines]\n", + "gym_annos = load(gym_train_ann_file) + load(gym_val_ann_file)" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "id": "numerous-bristol", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2021-04-25 22:18:53-- https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/gym_samples.tar\n", + "Resolving openmmlab.oss-cn-hangzhou.aliyuncs.com (openmmlab.oss-cn-hangzhou.aliyuncs.com)... 124.160.145.22\n", + "Connecting to openmmlab.oss-cn-hangzhou.aliyuncs.com (openmmlab.oss-cn-hangzhou.aliyuncs.com)|124.160.145.22|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 36300800 (35M) [application/x-tar]\n", + "Saving to: ‘gym_samples.tar’\n", + "\n", + "100%[======================================>] 36,300,800 11.5MB/s in 3.0s \n", + "\n", + "2021-04-25 22:18:58 (11.5 MB/s) - ‘gym_samples.tar’ saved [36300800/36300800]\n", + "\n" + ] + } + ], + "source": [ + "# download sample videos of GYM\n", + "!wget https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/gym_samples.tar\n", + "!tar -xf gym_samples.tar\n", + "!rm gym_samples.tar" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "id": "ranging-harrison", + "metadata": {}, + "outputs": [], + "source": [ + "gym_root = 'gym_samples/'\n", + "gym_vids = os.listdir(gym_root)\n", + "# visualize pose of which video? index in 0 - 50.\n", + "idx = 1\n", + "vid = gym_vids[idx]\n", + "\n", + "frame_dir = vid.split('.')[0]\n", + "vid_path = osp.join(gym_root, vid)\n", + "anno = [x for x in gym_annos if x['frame_dir'] == frame_dir][0]" + ] + }, + { + "cell_type": "code", + "execution_count": 86, + "id": "fitting-courage", + "metadata": {}, + "outputs": [], + "source": [ + "# Visualize Skeleton\n", + "vis_frames = vis_skeleton(vid_path, anno, gym_categories[anno['label']])\n", + "vid = mpy.ImageSequenceClip(vis_frames, fps=24)\n", + "vid.ipython_display()" + ] + }, + { + "cell_type": "code", + "execution_count": 87, + "id": "orange-logging", + "metadata": {}, + "outputs": [], + "source": [ + "keypoint_heatmap = get_pseudo_heatmap(anno)\n", + "keypoint_mapvis = vis_heatmaps(keypoint_heatmap)\n", + "keypoint_mapvis = [add_label(f, gym_categories[anno['label']]) for f in keypoint_mapvis]\n", + "vid = mpy.ImageSequenceClip(keypoint_mapvis, fps=24)\n", + "vid.ipython_display()" + ] + }, + { + "cell_type": "code", + "execution_count": 88, + "id": "residential-conjunction", + "metadata": {}, + "outputs": [], + "source": [ + "limb_heatmap = get_pseudo_heatmap(anno, 'limb')\n", + "limb_mapvis = vis_heatmaps(limb_heatmap)\n", + "limb_mapvis = [add_label(f, gym_categories[anno['label']]) for f in limb_mapvis]\n", + "vid = mpy.ImageSequenceClip(limb_mapvis, fps=24)\n", + "vid.ipython_display()" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "id": "coupled-stranger", + "metadata": {}, + "outputs": [], + "source": [ + "# The name list of \n", + "ntu_categories = ['drink water', 'eat meal/snack', 'brushing teeth', 'brushing hair', 'drop', 'pickup', \n", + " 'throw', 'sitting down', 'standing up (from sitting position)', 'clapping', 'reading', \n", + " 'writing', 'tear up paper', 'wear jacket', 'take off jacket', 'wear a shoe', \n", + " 'take off a shoe', 'wear on glasses', 'take off glasses', 'put on a hat/cap', \n", + " 'take off a hat/cap', 'cheer up', 'hand waving', 'kicking something', \n", + " 'reach into pocket', 'hopping (one foot jumping)', 'jump up', \n", + " 'make a phone call/answer phone', 'playing with phone/tablet', 'typing on a keyboard', \n", + " 'pointing to something with finger', 'taking a selfie', 'check time (from watch)', \n", + " 'rub two hands together', 'nod head/bow', 'shake head', 'wipe face', 'salute', \n", + " 'put the palms together', 'cross hands in front (say stop)', 'sneeze/cough', \n", + " 'staggering', 'falling', 'touch head (headache)', 'touch chest (stomachache/heart pain)', \n", + " 'touch back (backache)', 'touch neck (neckache)', 'nausea or vomiting condition', \n", + " 'use a fan (with hand or paper)/feeling warm', 'punching/slapping other person', \n", + " 'kicking other person', 'pushing other person', 'pat on back of other person', \n", + " 'point finger at the other person', 'hugging other person', \n", + " 'giving something to other person', \"touch other person's pocket\", 'handshaking', \n", + " 'walking towards each other', 'walking apart from each other']\n", + "ntu_annos = load(ntu60_xsub_train_ann_file) + load(ntu60_xsub_val_ann_file)" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "id": "critical-review", + "metadata": {}, + "outputs": [], + "source": [ + "ntu_root = 'ntu_samples/'\n", + "ntu_vids = os.listdir(ntu_root)\n", + "# visualize pose of which video? index in 0 - 50.\n", + "idx = 20\n", + "vid = ntu_vids[idx]\n", + "\n", + "frame_dir = vid.split('.')[0]\n", + "vid_path = osp.join(ntu_root, vid)\n", + "anno = [x for x in ntu_annos if x['frame_dir'] == frame_dir.split('_')[0]][0]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "id": "seasonal-palmer", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2021-04-25 22:21:16-- https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/ntu_samples.tar\n", + "Resolving openmmlab.oss-cn-hangzhou.aliyuncs.com (openmmlab.oss-cn-hangzhou.aliyuncs.com)... 124.160.145.22\n", + "Connecting to openmmlab.oss-cn-hangzhou.aliyuncs.com (openmmlab.oss-cn-hangzhou.aliyuncs.com)|124.160.145.22|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 121753600 (116M) [application/x-tar]\n", + "Saving to: ‘ntu_samples.tar’\n", + "\n", + "100%[======================================>] 121,753,600 14.4MB/s in 9.2s \n", + "\n", + "2021-04-25 22:21:26 (12.6 MB/s) - ‘ntu_samples.tar’ saved [121753600/121753600]\n", + "\n" + ] + } + ], + "source": [ + "# download sample videos of NTU-60\n", + "!wget https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/ntu_samples.tar\n", + "!tar -xf ntu_samples.tar\n", + "!rm ntu_samples.tar" + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "id": "accompanied-invitation", + "metadata": {}, + "outputs": [], + "source": [ + "vis_frames = vis_skeleton(vid_path, anno, ntu_categories[anno['label']])\n", + "vid = mpy.ImageSequenceClip(vis_frames, fps=24)\n", + "vid.ipython_display()" + ] + }, + { + "cell_type": "code", + "execution_count": 90, + "id": "respiratory-conclusion", + "metadata": {}, + "outputs": [], + "source": [ + "keypoint_heatmap = get_pseudo_heatmap(anno)\n", + "keypoint_mapvis = vis_heatmaps(keypoint_heatmap)\n", + "keypoint_mapvis = [add_label(f, gym_categories[anno['label']]) for f in keypoint_mapvis]\n", + "vid = mpy.ImageSequenceClip(keypoint_mapvis, fps=24)\n", + "vid.ipython_display()" + ] + }, + { + "cell_type": "code", + "execution_count": 91, + "id": "thirty-vancouver", + "metadata": {}, + "outputs": [], + "source": [ + "limb_heatmap = get_pseudo_heatmap(anno, 'limb')\n", + "limb_mapvis = vis_heatmaps(limb_heatmap)\n", + "limb_mapvis = [add_label(f, gym_categories[anno['label']]) for f in limb_mapvis]\n", + "vid = mpy.ImageSequenceClip(limb_mapvis, fps=24)\n", + "vid.ipython_display()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From c24d7f72c7aa4c7fcaf19b95c647a87f411fe6a8 Mon Sep 17 00:00:00 2001 From: Kenny Date: Mon, 26 Apr 2021 18:50:33 +0800 Subject: [PATCH 057/414] Remove Option Resample --- mmaction/datasets/pose_dataset.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/mmaction/datasets/pose_dataset.py b/mmaction/datasets/pose_dataset.py index 7a7c8c61e3..b6d5f42e6e 100644 --- a/mmaction/datasets/pose_dataset.py +++ b/mmaction/datasets/pose_dataset.py @@ -24,11 +24,6 @@ class PoseDataset(BaseDataset): Args: ann_file (str): Path to the annotation file. pipeline (list[dict | callable]): A sequence of data transforms. - resample (float | None): The sampling probability for classes 61-120 in - NTU-120 XSub split. If set as 1.5, it means the probability of - sampling a video in classes 61-120 is 1.5 times than the - probability of sampling a video in classes 1-60. None means not - applicable (only applicable to NTU-120 XSub). Default: None. valid_ratio (float | None): The valid_ratio for videos in KineticsPose. For a video with n frames, it is a valid training sample only if n * valid_ratio frames have human pose. None means not applicable @@ -46,13 +41,10 @@ class PoseDataset(BaseDataset): def __init__(self, ann_file, pipeline, - resample=None, valid_ratio=None, box_thre=None, class_prob=None, **kwargs): - # For NTU-120 X-Sub - self.resample = resample modality = 'Pose' super().__init__( From 3859d20130563d12321dbcd55933759746e786ad Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 27 Apr 2021 15:09:23 +0800 Subject: [PATCH 058/414] make stage_blocks configurable --- mmaction/models/backbones/resnet3d.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index f1023342aa..1ca0a65ca8 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -328,6 +328,8 @@ class ResNet3d(nn.Module): Args: depth (int): Depth of resnet, from {18, 34, 50, 101, 152}. pretrained (str | None): Name of pretrained model. + stage_blocks (tuple | None): Set number of stages for each res layer. + Default: None. pretrained2d (bool): Whether to load pretrained 2D model. Default: True. in_channels (int): Channel num of input features. Default: 3. @@ -390,6 +392,7 @@ class ResNet3d(nn.Module): def __init__(self, depth, pretrained, + stage_blocks=None, pretrained2d=True, in_channels=3, num_stages=4, @@ -425,6 +428,7 @@ def __init__(self, self.base_channels = base_channels self.num_stages = num_stages assert 1 <= num_stages <= 4 + self.stage_blocks = stage_blocks self.out_indices = out_indices assert max(out_indices) < num_stages self.spatial_strides = spatial_strides @@ -432,6 +436,9 @@ def __init__(self, self.dilations = dilations assert len(spatial_strides) == len(temporal_strides) == len( dilations) == num_stages + if self.stage_blocks is not None: + assert len(self.stage_blocks) == num_stages + self.conv1_kernel = conv1_kernel self.conv1_stride_t = conv1_stride_t self.pool1_stride_t = pool1_stride_t @@ -449,7 +456,10 @@ def __init__(self, self.zero_init_residual = zero_init_residual self.block, stage_blocks = self.arch_settings[depth] - self.stage_blocks = stage_blocks[:num_stages] + + if self.stage_blocks is None: + self.stage_blocks = stage_blocks[:num_stages] + self.inplanes = self.base_channels self.non_local_cfg = non_local_cfg From 95df96db7070afc16269439467626c66ac62767e Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 27 Apr 2021 15:32:37 +0800 Subject: [PATCH 059/414] update --- mmaction/models/backbones/resnet3d.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index 1ca0a65ca8..ab85302a04 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -402,7 +402,9 @@ def __init__(self, temporal_strides=(1, 1, 1, 1), dilations=(1, 1, 1, 1), conv1_kernel=(5, 7, 7), + conv1_stride_s=2, conv1_stride_t=2, + pool1_stride_s=2, pool1_stride_t=2, with_pool2=True, style='pytorch', @@ -440,7 +442,9 @@ def __init__(self, assert len(self.stage_blocks) == num_stages self.conv1_kernel = conv1_kernel + self.conv1_stride_s = conv1_stride_s self.conv1_stride_t = conv1_stride_t + self.pool1_stride_s = pool1_stride_s self.pool1_stride_t = pool1_stride_t self.with_pool2 = with_pool2 self.style = style @@ -743,7 +747,8 @@ def _make_stem_layer(self): self.in_channels, self.base_channels, kernel_size=self.conv1_kernel, - stride=(self.conv1_stride_t, 2, 2), + stride=(self.conv1_stride_t, self.conv1_stride_s, + self.conv1_stride_s), padding=tuple([(k - 1) // 2 for k in _triple(self.conv1_kernel)]), bias=False, conv_cfg=self.conv_cfg, @@ -752,7 +757,8 @@ def _make_stem_layer(self): self.maxpool = nn.MaxPool3d( kernel_size=(1, 3, 3), - stride=(self.pool1_stride_t, 2, 2), + stride=(self.pool1_stride_t, self.pool1_stride_s, + self.pool1_stride_s), padding=(0, 1, 1)) self.pool2 = nn.MaxPool3d(kernel_size=(2, 1, 1), stride=(2, 1, 1)) From 201e7cd218687c52efe1c3f576c12f9700df837b Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 27 Apr 2021 17:43:59 +0800 Subject: [PATCH 060/414] add config --- .../slowonly_r50_u48_240e_gym_keypoint.py | 128 +++++++++++++++++ .../posec3d/slowonly_r50_u48_240e_gym_limb.py | 134 +++++++++++++++++ ...wonly_r50_u48_240e_ntu120_xsub_keypoint.py | 130 +++++++++++++++++ .../slowonly_r50_u48_240e_ntu120_xsub_limb.py | 136 ++++++++++++++++++ ...owonly_r50_u48_240e_ntu60_xsub_keypoint.py | 128 +++++++++++++++++ .../slowonly_r50_u48_240e_ntu60_xsub_limb.py | 134 +++++++++++++++++ 6 files changed, 790 insertions(+) create mode 100644 configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py create mode 100644 configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py create mode 100644 configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py create mode 100644 configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py create mode 100644 configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py create mode 100644 configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py new file mode 100644 index 0000000000..7824643547 --- /dev/null +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py @@ -0,0 +1,128 @@ +model = dict( + type='Recognizer3D', + backbone=dict( + type='ResNet3dSlowOnly', + depth=50, + pretrained=None, + in_channels=17, + base_channels=32, + num_stages=3, + out_indices=(2, ), + stage_blocks=(4, 6, 3), + conv1_stride_s=1, + pool1_stride_s=1, + inflate=(0, 1, 1), + spatial_strides=(2, 2, 2), + temporal_strides=(1, 1, 2), + dilations=(1, 1, 1)), + cls_head=dict( + type='I3DHead', + in_channels=512, + num_classes=99, + spatial_type='avg', + dropout_ratio=0.5), + train_cfg=dict(), + test_cfg=dict(average_clips='prob')) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/posec3d/gym_train.pkl' +ann_file_val = 'data/posec3d/gym_val.pkl' +left = [1, 3, 5, 7, 9, 11, 13, 15] +right = [2, 4, 6, 8, 10, 12, 14, 16] +train_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), + dict(type='Resize', scale=(56, 56), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48, num_clips=1, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='UniformSampleFrames', clip_len=48, num_clips=10, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False, + double=True, + left=left, + right=right), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.2, momentum=0.9, + weight_decay=0.0003) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='CosineAnnealing', by_epoch=False, min_lr=0) +total_epochs = 240 +checkpoint_config = dict(interval=10) +workflow = [('train', 10)] +evaluation = dict( + interval=10, + metrics=['top_k_accuracy', 'mean_class_accuracy'], + topk=(1, 5)) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/posec3d/slowonly_r50_u48_240e_gym_keypoint' +load_from = None +resume_from = None +find_unused_parameters = False diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py new file mode 100644 index 0000000000..b0d8eeda49 --- /dev/null +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py @@ -0,0 +1,134 @@ +model = dict( + type='Recognizer3D', + backbone=dict( + type='ResNet3dSlowOnly', + depth=50, + pretrained=None, + in_channels=17, + base_channels=32, + num_stages=3, + out_indices=(2, ), + stage_blocks=(4, 6, 3), + conv1_stride_s=1, + pool1_stride_s=1, + inflate=(0, 1, 1), + spatial_strides=(2, 2, 2), + temporal_strides=(1, 1, 2), + dilations=(1, 1, 1)), + cls_head=dict( + type='I3DHead', + in_channels=512, + num_classes=99, + spatial_type='avg', + dropout_ratio=0.5), + train_cfg=dict(), + test_cfg=dict(average_clips='prob')) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/posec3d/gym_train.pkl' +ann_file_val = 'data/posec3d/gym_val.pkl' +left = [1, 3, 5, 7, 9, 11, 13, 15] +right = [2, 4, 6, 8, 10, 12, 14, 16] +skeletons = [[0, 5], [0, 6], [5, 7], [7, 9], [6, 8], [8, 10], [5, 11], + [11, 13], [13, 15], [6, 12], [12, 14], [14, 16], [0, 1], [0, 2], + [1, 3], [2, 4], [11, 12]] +train_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), + dict(type='Resize', scale=(56, 56), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=False, + with_limb=True, + skeletons=skeletons), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48, num_clips=1, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=False, + with_limb=True, + skeletons=skeletons), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='UniformSampleFrames', clip_len=48, num_clips=10, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=False, + with_limb=True, + skeletons=skeletons, + double=True, + left=left, + right=right), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.2, momentum=0.9, + weight_decay=0.0003) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='CosineAnnealing', by_epoch=False, min_lr=0) +total_epochs = 240 +checkpoint_config = dict(interval=10) +workflow = [('train', 10)] +evaluation = dict( + interval=10, + metrics=['top_k_accuracy', 'mean_class_accuracy'], + topk=(1, 5)) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/posec3d/slowonly_r50_u48_240e_gym_limb' +load_from = None +resume_from = None +find_unused_parameters = False diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py new file mode 100644 index 0000000000..80627341e2 --- /dev/null +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py @@ -0,0 +1,130 @@ +model = dict( + type='Recognizer3D', + backbone=dict( + type='ResNet3dSlowOnly', + depth=50, + pretrained=None, + in_channels=17, + base_channels=32, + num_stages=3, + out_indices=(2, ), + stage_blocks=(4, 6, 3), + conv1_stride_s=1, + pool1_stride_s=1, + inflate=(0, 1, 1), + spatial_strides=(2, 2, 2), + temporal_strides=(1, 1, 2), + dilations=(1, 1, 1)), + cls_head=dict( + type='I3DHead', + in_channels=512, + num_classes=120, + spatial_type='avg', + dropout_ratio=0.5), + train_cfg=dict(), + test_cfg=dict(average_clips='prob')) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/posec3d/ntu120_xsub_train.pkl' +ann_file_val = 'data/posec3d/ntu120_xsub_val.pkl' +left = [1, 3, 5, 7, 9, 11, 13, 15] +right = [2, 4, 6, 8, 10, 12, 14, 16] +train_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), + dict(type='Resize', scale=(56, 56), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48, num_clips=1, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='UniformSampleFrames', clip_len=48, num_clips=10, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False, + double=True, + left=left, + right=right), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + class_prob={i: 1 + int(i >= 60) + for i in range(120)}, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.2, momentum=0.9, + weight_decay=0.0003) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='CosineAnnealing', by_epoch=False, min_lr=0) +total_epochs = 240 +checkpoint_config = dict(interval=10) +workflow = [('train', 10)] +evaluation = dict( + interval=10, + metrics=['top_k_accuracy', 'mean_class_accuracy'], + topk=(1, 5)) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint' +load_from = None +resume_from = None +find_unused_parameters = False diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py new file mode 100644 index 0000000000..ddb0aa0ecb --- /dev/null +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py @@ -0,0 +1,136 @@ +model = dict( + type='Recognizer3D', + backbone=dict( + type='ResNet3dSlowOnly', + depth=50, + pretrained=None, + in_channels=17, + base_channels=32, + num_stages=3, + out_indices=(2, ), + stage_blocks=(4, 6, 3), + conv1_stride_s=1, + pool1_stride_s=1, + inflate=(0, 1, 1), + spatial_strides=(2, 2, 2), + temporal_strides=(1, 1, 2), + dilations=(1, 1, 1)), + cls_head=dict( + type='I3DHead', + in_channels=512, + num_classes=120, + spatial_type='avg', + dropout_ratio=0.5), + train_cfg=dict(), + test_cfg=dict(average_clips='prob')) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/posec3d/ntu60_xsub_train.pkl' +ann_file_val = 'data/posec3d/ntu60_xsub_val.pkl' +left = [1, 3, 5, 7, 9, 11, 13, 15] +right = [2, 4, 6, 8, 10, 12, 14, 16] +skeletons = [[0, 5], [0, 6], [5, 7], [7, 9], [6, 8], [8, 10], [5, 11], + [11, 13], [13, 15], [6, 12], [12, 14], [14, 16], [0, 1], [0, 2], + [1, 3], [2, 4], [11, 12]] +train_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), + dict(type='Resize', scale=(56, 56), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=False, + with_limb=True, + skeletons=skeletons), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48, num_clips=1, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=False, + with_limb=True, + skeletons=skeletons), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='UniformSampleFrames', clip_len=48, num_clips=10, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=False, + with_limb=True, + skeletons=skeletons, + double=True, + left=left, + right=right), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + class_prob={i: 1 + int(i >= 60) + for i in range(120)}, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.2, momentum=0.9, + weight_decay=0.0003) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='CosineAnnealing', by_epoch=False, min_lr=0) +total_epochs = 240 +checkpoint_config = dict(interval=10) +workflow = [('train', 10)] +evaluation = dict( + interval=10, + metrics=['top_k_accuracy', 'mean_class_accuracy'], + topk=(1, 5)) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb' +load_from = None +resume_from = None +find_unused_parameters = False diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py new file mode 100644 index 0000000000..a5fd0f2516 --- /dev/null +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py @@ -0,0 +1,128 @@ +model = dict( + type='Recognizer3D', + backbone=dict( + type='ResNet3dSlowOnly', + depth=50, + pretrained=None, + in_channels=17, + base_channels=32, + num_stages=3, + out_indices=(2, ), + stage_blocks=(4, 6, 3), + conv1_stride_s=1, + pool1_stride_s=1, + inflate=(0, 1, 1), + spatial_strides=(2, 2, 2), + temporal_strides=(1, 1, 2), + dilations=(1, 1, 1)), + cls_head=dict( + type='I3DHead', + in_channels=512, + num_classes=60, + spatial_type='avg', + dropout_ratio=0.5), + train_cfg=dict(), + test_cfg=dict(average_clips='prob')) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/posec3d/ntu60_xsub_train.pkl' +ann_file_val = 'data/posec3d/ntu60_xsub_val.pkl' +left = [1, 3, 5, 7, 9, 11, 13, 15] +right = [2, 4, 6, 8, 10, 12, 14, 16] +train_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), + dict(type='Resize', scale=(56, 56), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48, num_clips=1, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='UniformSampleFrames', clip_len=48, num_clips=10, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False, + double=True, + left=left, + right=right), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.2, momentum=0.9, + weight_decay=0.0003) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='CosineAnnealing', by_epoch=False, min_lr=0) +total_epochs = 240 +checkpoint_config = dict(interval=10) +workflow = [('train', 10)] +evaluation = dict( + interval=10, + metrics=['top_k_accuracy', 'mean_class_accuracy'], + topk=(1, 5)) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint' +load_from = None +resume_from = None +find_unused_parameters = False diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py new file mode 100644 index 0000000000..1847aa561e --- /dev/null +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py @@ -0,0 +1,134 @@ +model = dict( + type='Recognizer3D', + backbone=dict( + type='ResNet3dSlowOnly', + depth=50, + pretrained=None, + in_channels=17, + base_channels=32, + num_stages=3, + out_indices=(2, ), + stage_blocks=(4, 6, 3), + conv1_stride_s=1, + pool1_stride_s=1, + inflate=(0, 1, 1), + spatial_strides=(2, 2, 2), + temporal_strides=(1, 1, 2), + dilations=(1, 1, 1)), + cls_head=dict( + type='I3DHead', + in_channels=512, + num_classes=60, + spatial_type='avg', + dropout_ratio=0.5), + train_cfg=dict(), + test_cfg=dict(average_clips='prob')) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/posec3d/ntu60_xsub_train.pkl' +ann_file_val = 'data/posec3d/ntu60_xsub_val.pkl' +left = [1, 3, 5, 7, 9, 11, 13, 15] +right = [2, 4, 6, 8, 10, 12, 14, 16] +skeletons = [[0, 5], [0, 6], [5, 7], [7, 9], [6, 8], [8, 10], [5, 11], + [11, 13], [13, 15], [6, 12], [12, 14], [14, 16], [0, 1], [0, 2], + [1, 3], [2, 4], [11, 12]] +train_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), + dict(type='Resize', scale=(56, 56), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=False, + with_limb=True, + skeletons=skeletons), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48, num_clips=1, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=False, + with_limb=True, + skeletons=skeletons), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='UniformSampleFrames', clip_len=48, num_clips=10, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='CenterCrop', crop_size=64), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=False, + with_limb=True, + skeletons=skeletons, + double=True, + left=left, + right=right), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.2, momentum=0.9, + weight_decay=0.0003) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='CosineAnnealing', by_epoch=False, min_lr=0) +total_epochs = 240 +checkpoint_config = dict(interval=10) +workflow = [('train', 10)] +evaluation = dict( + interval=10, + metrics=['top_k_accuracy', 'mean_class_accuracy'], + topk=(1, 5)) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb' +load_from = None +resume_from = None +find_unused_parameters = False From b874d9d2e04827b034e3c7f9c38ca5a97167199c Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 27 Apr 2021 20:00:49 +0800 Subject: [PATCH 061/414] add downloading scripts --- tools/data/skeleton/README.md | 22 +++++++++++++++++++++ tools/data/skeleton/download_annotations.sh | 22 +++++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 tools/data/skeleton/README.md create mode 100644 tools/data/skeleton/download_annotations.sh diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md new file mode 100644 index 0000000000..54bbe759c5 --- /dev/null +++ b/tools/data/skeleton/README.md @@ -0,0 +1,22 @@ +# Preparing Skeleton Dataset + +## Introduction + +We release the skeleton annotations used in [Revisiting Skeleton-based Action Recognition](). By default, we use [Faster-RCNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) with ResNet50 backbone for human detection and [HRNet-w32](https://github.com/open-mmlab/mmpose/blob/master/configs/top_down/hrnet/coco/hrnet_w32_coco_256x192.py) for single person pose estimation. For FineGYM, we use Ground-Truth bounding boxes for the athlete instead of detection bounding boxes. + +## Prepare Annotations + +Currently, we support three datasets: FineGYM, NTU60_XSub and NTU120_XSub. You can execute following scripts to prepare the annotations. + +```shell +bash download_annotations.sh ${DATASET} +``` + +**TODO**: + +- [x] FineGYM +- [x] NTU60_XSub +- [x] NTU120_XSub +- [ ] NTU60_XView +- [ ] NTU120_XSet +- [ ] Kinetics diff --git a/tools/data/skeleton/download_annotations.sh b/tools/data/skeleton/download_annotations.sh new file mode 100644 index 0000000000..e48c5af7d3 --- /dev/null +++ b/tools/data/skeleton/download_annotations.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +DATASET=$1 +if [ "$DATASET" == "gym" ] || [ "$1" == "ntu60_xsub" ] || [ "$1" == "ntu120_xsub" ]; then + echo "We are processing $DATASET" +else + echo "Bad Argument, we only support gym, ntu60_xsub, ntu120_xsub now." + exit 0 +fi + +DATA_DIR="../../../data/posec3d/" + +if [[ ! -d "${DATA_DIR}" ]]; then + echo "${DATA_DIR} does not exist. Creating"; + mkdir -p ${DATA_DIR} +fi + +wget https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/${DATASET}_train.pkl +wget https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/${DATASET}_val.pkl + +mv ${DATASET}_train.pkl ${DATA_DIR} +mv ${DATASET}_val.pkl ${DATA_DIR} From 4f19052cceaff77f40949e6f3d9a208ddcc11ae1 Mon Sep 17 00:00:00 2001 From: Kenny Date: Tue, 27 Apr 2021 20:23:42 +0800 Subject: [PATCH 062/414] add README --- configs/skeleton/posec3d/README.md | 74 ++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 configs/skeleton/posec3d/README.md diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md new file mode 100644 index 0000000000..2a74dde166 --- /dev/null +++ b/configs/skeleton/posec3d/README.md @@ -0,0 +1,74 @@ +# PoseC3D + +## Introduction + +[ALGORITHM] + +## Model Zoo + +### FineGYM + +|config |pseudo heatmap | gpus | backbone | Mean Top-1 | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|---| +|[slowonly_r50_u48_240e_gym_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py) |keypoint |8 x 2| SlowOnly-R50 |93.7 | | | | +|[slowonly_r50_u48_240e_gym_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py) |limb |8 x 2| SlowOnly-R50 |94.0 | | | | +|Fusion | || |94.3 | | | | + +### NTU60_XSub + +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :--: | :--: | :--: | +| [slowonly_r50_u48_240e_ntu60_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 93.7 | | | | +| [slowonly_r50_u48_240e_ntu60_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 93.4 | | | | +| Fusion | | | | 94.1 | | | | + +### NTU120_XSub + +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :--: | :--: | :--: | +| [slowonly_r50_u48_240e_ntu120_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 86.3 | | | | +| [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 85.7 | | | | +| Fusion | | | | 86.9 | | | | + +Notes: + +1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. + According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, + e.g., lr=0.01 for 8 GPUs x 8 videos/gpu and lr=0.04 for 16 GPUs x 16 videos/gpu. +2. The values in columns named after "reference" are the results got by testing on our dataset, using the checkpoints provided by the author with same model settings. The checkpoints for reference repo can be downloaded [here](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing). + +## Train + +You can use the following command to train a model. + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +Example: train PoseC3D model on FineGYM dataset in a deterministic option with periodic validation. + +```shell +python tools/train.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py \ + --work-dir work_dirs/slowonly_r50_u48_240e_gym_keypoint \ + --validate --seed 0 --deterministic +``` + +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). + +## Test + +You can use the following command to test a model. + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +Example: test PoseC3D model on FineGYM dataset and dump the result to a pickle file. + +```shell +python tools/test.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.pkl +``` + +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). From 26568f0ea8a420e1c35fba377a45756ef836bd0c Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 28 Apr 2021 22:11:41 +0800 Subject: [PATCH 063/414] Support Diving48 dataset (#835) * add diving64 * update diving * cn empty * add filelist generation * add docs * add doc * add doc * polish docs --- configs/recognition/tsm/README.md | 7 + configs/recognition/tsm/README_zh-CN.md | 7 + .../tsm_r50_video_1x1x16_50e_diving48_rgb.py | 101 ++++++++++++++ .../tsm_r50_video_1x1x8_50e_diving48_rgb.py | 99 ++++++++++++++ configs/recognition/tsn/README.md | 7 + configs/recognition/tsn/README_zh-CN.md | 7 + .../tsn_r50_video_1x1x16_100e_diving48_rgb.py | 97 ++++++++++++++ .../tsn_r50_video_1x1x8_100e_diving48_rgb.py | 97 ++++++++++++++ tools/data/build_file_list.py | 8 +- tools/data/diving48/README.md | 123 ++++++++++++++++++ tools/data/diving48/README_zh-CN.md | 123 ++++++++++++++++++ tools/data/diving48/download_annotations.sh | 16 +++ tools/data/diving48/download_videos.sh | 16 +++ tools/data/diving48/extract_frames.sh | 6 + tools/data/diving48/extract_rgb_frames.sh | 7 + .../diving48/extract_rgb_frames_opencv.sh | 7 + .../diving48/generate_rawframes_filelist.sh | 8 ++ .../data/diving48/generate_videos_filelist.sh | 8 ++ tools/data/parse_file_list.py | 28 ++++ tools/data/sthv2/README_zh-CN.md | 4 +- 20 files changed, 772 insertions(+), 4 deletions(-) create mode 100644 configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py create mode 100644 configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py create mode 100644 configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py create mode 100644 configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py create mode 100644 tools/data/diving48/README.md create mode 100644 tools/data/diving48/README_zh-CN.md create mode 100644 tools/data/diving48/download_annotations.sh create mode 100644 tools/data/diving48/download_videos.sh create mode 100644 tools/data/diving48/extract_frames.sh create mode 100644 tools/data/diving48/extract_rgb_frames.sh create mode 100644 tools/data/diving48/extract_rgb_frames_opencv.sh create mode 100644 tools/data/diving48/generate_rawframes_filelist.sh create mode 100644 tools/data/diving48/generate_videos_filelist.sh diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index add51e34b9..32bf6656b5 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -43,6 +43,13 @@ |[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| |[tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|short-side 320|8|MobileNetV2| ImageNet |68.46|88.64|x|x|x|3385|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json)| +### Diving48 + +|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsm_r50_video_1x1x8_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py)| 8 | ResNet50 | ImageNet | 75.99 | 97.16 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json)| +|[tsm_r50_video_1x1x16_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py)| 8 | ResNet50 | ImageNet | 81.62 | 97.66 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json)| + ### Something-Something V1 |config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| reference top1 acc (efficient/accurate)| reference top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index 82b2eeb2a4..7e692684f9 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -43,6 +43,13 @@ |[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| |[tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|短边 320|8|MobileNetV2| ImageNet |68.46|88.64|x|x|x|3385|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json)| +### Diving48 + +|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsm_r50_video_1x1x8_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py)| 8 | ResNet50 | ImageNet | 75.99 | 97.16 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json)| +|[tsm_r50_video_1x1x16_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py)| 8 | ResNet50 | ImageNet | 81.62 | 97.66 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json)| + ### Something-Something V1 |配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| 参考代码的 top1 准确率 (efficient/accurate)| 参考代码的 top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| diff --git a/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py b/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py new file mode 100644 index 0000000000..ee348e8999 --- /dev/null +++ b/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py @@ -0,0 +1,101 @@ +_base_ = [ + '../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict( + backbone=dict(num_segments=16), + cls_head=dict(num_classes=48, num_segments=16)) + +# dataset settings +dataset_type = 'VideoDataset' +data_root = 'data/diving48/videos' +data_root_val = 'data/diving48/videos' +ann_file_train = 'data/diving48/diving48_train_list_videos.txt' +ann_file_val = 'data/diving48/diving48_val_list_videos.txt' +ann_file_test = 'data/diving48/diving48_val_list_videos.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='DecordInit'), + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=16), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='DecordInit'), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, + test_mode=True), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict(type='DecordInit'), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, + test_mode=True), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=4, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, + metrics=['top_k_accuracy', 'mean_class_accuracy'], +) + +# optimizer +optimizer = dict( + lr=0.005, # this lr is used for 8 gpus +) +# runtime settings +checkpoint_config = dict(interval=1) +work_dir = './work_dirs/tsm_r50_video_1x1x16_50e_diving48_rgb/' diff --git a/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py b/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py new file mode 100644 index 0000000000..3d4a439439 --- /dev/null +++ b/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py @@ -0,0 +1,99 @@ +_base_ = [ + '../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=48)) + +# dataset settings +dataset_type = 'VideoDataset' +data_root = 'data/diving48/videos' +data_root_val = 'data/diving48/videos' +ann_file_train = 'data/diving48/diving48_train_list_videos.txt' +ann_file_val = 'data/diving48/diving48_val_list_videos.txt' +ann_file_test = 'data/diving48/diving48_val_list_videos.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='DecordInit'), + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='DecordInit'), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict(type='DecordInit'), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, + metrics=['top_k_accuracy', 'mean_class_accuracy'], +) + +# optimizer +optimizer = dict( + lr=0.01, # this lr is used for 8 gpus +) +# runtime settings +checkpoint_config = dict(interval=1) +work_dir = './work_dirs/tsm_r50_video_1x1x8_50e_diving48_rgb/' diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index facca2ad0f..3466febc01 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -25,6 +25,13 @@ [1] We report the performance on UCF-101 split1. +### Diving48 + +|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsn_r50_video_1x1x8_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py)|8| ResNet50 | ImageNet | 71.27 | 95.74 | 5699 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/tsn_r50_video_1x1x8_100e_diving48_rgb_20210426-6dde0185.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json)| +|[tsn_r50_video_1x1x16_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py)|8| ResNet50 | ImageNet | 76.75 | 96.95 | 5705 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/tsn_r50_video_1x1x16_100e_diving48_rgb_20210426-63c5f2f7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json)| + ### HMDB51 |config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index ff3e4818c9..f6e9c08c61 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -25,6 +25,13 @@ [1] 这里汇报的是 UCF-101 的 split1 部分的结果。 +### Diving48 + +|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsn_r50_video_1x1x8_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py)|8| ResNet50 | ImageNet | 71.27 | 95.74 | 5699 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/tsn_r50_video_1x1x8_100e_diving48_rgb_20210426-6dde0185.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json)| +|[tsn_r50_video_1x1x16_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py)|8| ResNet50 | ImageNet | 76.75 | 96.95 | 5705 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/tsn_r50_video_1x1x16_100e_diving48_rgb_20210426-63c5f2f7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json)| + ### HMDB51 |配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py new file mode 100644 index 0000000000..c0ce9e9e04 --- /dev/null +++ b/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py @@ -0,0 +1,97 @@ +_base_ = [ + '../../_base_/models/tsn_r50.py', '../../_base_/schedules/sgd_100e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=48)) + +# dataset settings +dataset_type = 'VideoDataset' +data_root = 'data/diving48/videos' +data_root_val = 'data/diving48/videos' +ann_file_train = 'data/diving48/diving48_train_list_videos.txt' +ann_file_val = 'data/diving48/diving48_val_list_videos.txt' +ann_file_test = 'data/diving48/diving48_val_list_videos.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='DecordInit'), + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='DecordInit'), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict(type='DecordInit'), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=4, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +optimizer = dict( + type='SGD', + lr=0.00125, # this lr is used for 8 gpus + momentum=0.9, + weight_decay=0.0001) + +# runtime settings +work_dir = './work_dirs/tsn_r50_video_1x1x16_100e_diving48_rgb/' diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py new file mode 100644 index 0000000000..d9ba6bf97a --- /dev/null +++ b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py @@ -0,0 +1,97 @@ +_base_ = [ + '../../_base_/models/tsn_r50.py', '../../_base_/schedules/sgd_100e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=48)) + +# dataset settings +dataset_type = 'VideoDataset' +data_root = 'data/diving48/videos' +data_root_val = 'data/diving48/videos' +ann_file_train = 'data/diving48/diving48_train_list_videos.txt' +ann_file_val = 'data/diving48/diving48_val_list_videos.txt' +ann_file_test = 'data/diving48/diving48_val_list_videos.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='DecordInit'), + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='DecordInit'), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict(type='DecordInit'), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +optimizer = dict( + type='SGD', + lr=0.0025, # this lr is used for 8 gpus + momentum=0.9, + weight_decay=0.0001) + +# runtime settings +work_dir = './work_dirs/tsn_r50_video_1x1x8_100e_diving48_rgb/' diff --git a/tools/data/build_file_list.py b/tools/data/build_file_list.py index 5676121567..cf72692477 100644 --- a/tools/data/build_file_list.py +++ b/tools/data/build_file_list.py @@ -6,7 +6,8 @@ from mmcv.runner import set_random_seed from tools.data.anno_txt2json import lines2dictlist -from tools.data.parse_file_list import (parse_directory, parse_hmdb51_split, +from tools.data.parse_file_list import (parse_directory, parse_diving48_splits, + parse_hmdb51_split, parse_jester_splits, parse_kinetics_splits, parse_mit_splits, parse_mmit_splits, @@ -21,7 +22,8 @@ def parse_args(): type=str, choices=[ 'ucf101', 'kinetics400', 'kinetics600', 'kinetics700', 'thumos14', - 'sthv1', 'sthv2', 'mit', 'mmit', 'activitynet', 'hmdb51', 'jester' + 'sthv1', 'sthv2', 'mit', 'mmit', 'activitynet', 'hmdb51', 'jester', + 'diving48' ], help='dataset to be built file list') parser.add_argument( @@ -205,6 +207,8 @@ def main(): splits = parse_hmdb51_split(args.level) elif args.dataset == 'jester': splits = parse_jester_splits(args.level) + elif args.dataset == 'diving48': + splits = parse_diving48_splits() else: raise ValueError( f"Supported datasets are 'ucf101, sthv1, sthv2', 'jester', " diff --git a/tools/data/diving48/README.md b/tools/data/diving48/README.md new file mode 100644 index 0000000000..588cddd173 --- /dev/null +++ b/tools/data/diving48/README.md @@ -0,0 +1,123 @@ +# Preparing Diving48 + +## Introduction + + + +```BibTeX +@inproceedings{li2018resound, + title={Resound: Towards action recognition without representation bias}, + author={Li, Yingwei and Li, Yi and Vasconcelos, Nuno}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + pages={513--528}, + year={2018} +} +``` + +For basic dataset information, you can refer to the official dataset [website](http://www.svcl.ucsd.edu/projects/resound/dataset.html). +Before we start, please make sure that the directory is located at `$MMACTION2/tools/data/diving48/`. + +## Step 1. Prepare Annotations + +You can run the following script to download annotations (considering the correctness of annotation files, we only download V2 version here). + +```shell +bash download_annotations.sh +``` + +## Step 2. Prepare Videos + +You can run the following script to download videos. + +```shell +bash download_videos.sh +``` + +## Step 3. Prepare RGB and Flow + +This part is **optional** if you only want to use the video loader. + +The frames provided in official compressed file are not complete. You may need to go through the following extraction steps to get the complete frames. + +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). + +If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. + +You can run the following script to soft link SSD. + +```shell +# execute these two line (Assume the SSD is mounted at "/mnt/SSD/") +mkdir /mnt/SSD/diving48_extracted/ +ln -s /mnt/SSD/diving48_extracted/ ../../../data/diving48/rawframes +``` + +If you only want to play with RGB frames (since extracting optical flow can be time-consuming), consider running the following script to extract **RGB-only** frames using denseflow. + +```shell +cd $MMACTION2/tools/data/diving48/ +bash extract_rgb_frames.sh +``` + +If you didn't install denseflow, you can still extract RGB frames using OpenCV by the following script, but it will keep the original size of the images. + +```shell +cd $MMACTION2/tools/data/diving48/ +bash extract_rgb_frames_opencv.sh +``` + +If both are required, run the following script to extract frames. + +```shell +cd $MMACTION2/tools/data/diving48/ +bash extract_frames.sh +``` + +## Step 4. Generate File List + +you can run the follow script to generate file list in the format of rawframes and videos. + +```shell +bash generate_videos_filelist.sh +bash generate_rawframes_filelist.sh +``` + +## Step 5. Check Directory Structure + +After the whole data process for Diving48 preparation, +you will get the rawframes (RGB + Flow), videos and annotation files for Diving48. + +In the context of the whole project (for Diving48 only), the folder structure will look like: + +``` +mmaction2 +├── mmaction +├── tools +├── configs +├── data +│ ├── diving48 +│ │ ├── diving48_{train,val}_list_rawframes.txt +│ │ ├── diving48_{train,val}_list_videos.txt +│ │ ├── annotations +│ | | ├── Diving48_V2_train.json +│ | | ├── Diving48_V2_test.json +│ | | ├── Diving48_vocab.json +│ | ├── videos +│ | | ├── _8Vy3dlHg2w_00000.mp4 +│ | | ├── _8Vy3dlHg2w_00001.mp4 +│ | | ├── ... +│ | ├── rawframes +│ | | ├── 2x00lRzlTVQ_00000 +│ | | | ├── img_00001.jpg +│ | | | ├── img_00002.jpg +│ | | | ├── ... +│ | | | ├── flow_x_00001.jpg +│ | | | ├── flow_x_00002.jpg +│ | | | ├── ... +│ | | | ├── flow_y_00001.jpg +│ | | | ├── flow_y_00002.jpg +│ | | | ├── ... +│ | | ├── 2x00lRzlTVQ_00001 +│ | | ├── ... +``` + +For training and evaluating on Diving48, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/diving48/README_zh-CN.md b/tools/data/diving48/README_zh-CN.md new file mode 100644 index 0000000000..e91f8729a5 --- /dev/null +++ b/tools/data/diving48/README_zh-CN.md @@ -0,0 +1,123 @@ +# 准备 Diving48 + +## 简介 + + + +```BibTeX +@inproceedings{li2018resound, + title={Resound: Towards action recognition without representation bias}, + author={Li, Yingwei and Li, Yi and Vasconcelos, Nuno}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + pages={513--528}, + year={2018} +} +``` + +用户可参考该数据集的 [官网](http://www.svcl.ucsd.edu/projects/resound/dataset.html),以获取数据集相关的基本信息。 +在数据集准备前,请确保命令行当前路径为 `$MMACTION2/tools/data/diving48/`。 + +## 步骤 1. 下载标注文件 + +用户可以使用以下命令下载标注文件(考虑到标注的准确性,这里仅下载 V2 版本)。 + +```shell +bash download_annotations.sh +``` + +## 步骤 2. 准备视频 + +用户可以使用以下命令下载视频。 + +```shell +bash download_videos.sh +``` + +## Step 3. 抽取 RGB 帧和光流 + +如果用户只想使用视频加载训练,则该部分是 **可选项**。 + +官网提供的帧压缩包并不完整。若想获取完整的数据,可以使用以下步骤解帧。 + +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 + +如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。 + +可以运行以下命令为 SSD 建立软链接。 + +```shell +# 执行这两行进行抽取(假设 SSD 挂载在 "/mnt/SSD/") +mkdir /mnt/SSD/diving48_extracted/ +ln -s /mnt/SSD/diving48_extracted/ ../../../data/diving48/rawframes +``` + +如果用户需要抽取 RGB 帧(因为抽取光流的过程十分耗时),可以考虑运行以下命令使用 denseflow **只抽取 RGB 帧**。 + +```shell +cd $MMACTION2/tools/data/diving48/ +bash extract_rgb_frames.sh +``` + +如果用户没有安装 denseflow,则可以运行以下命令使用 OpenCV 抽取 RGB 帧。然而,该方法只能抽取与原始视频分辨率相同的帧。 + +```shell +cd $MMACTION2/tools/data/diving48/ +bash extract_rgb_frames_opencv.sh +``` + +如果用户想抽取 RGB 帧和光流,则可以运行以下脚本进行抽取。 + +```shell +cd $MMACTION2/tools/data/diving48/ +bash extract_frames.sh +``` + +## 步骤 4. 生成文件列表 + +用户可以通过运行以下命令生成帧和视频格式的文件列表。 + +```shell +bash generate_videos_filelist.sh +bash generate_rawframes_filelist.sh +``` + +## 步骤 5. 检查文件夹结构 + +在完成所有 Diving48 数据集准备流程后, +用户可以获得对应的 RGB + 光流文件,视频文件以及标注文件。 + +在整个 MMAction2 文件夹下,Diving48 的文件结构如下: + +``` +mmaction2 +├── mmaction +├── tools +├── configs +├── data +│ ├── diving48 +│ │ ├── diving48_{train,val}_list_rawframes.txt +│ │ ├── diving48_{train,val}_list_videos.txt +│ │ ├── annotations +│ | | ├── Diving48_V2_train.json +│ | | ├── Diving48_V2_test.json +│ | | ├── Diving48_vocab.json +│ | ├── videos +│ | | ├── _8Vy3dlHg2w_00000.mp4 +│ | | ├── _8Vy3dlHg2w_00001.mp4 +│ | | ├── ... +│ | ├── rawframes +│ | | ├── 2x00lRzlTVQ_00000 +│ | | | ├── img_00001.jpg +│ | | | ├── img_00002.jpg +│ | | | ├── ... +│ | | | ├── flow_x_00001.jpg +│ | | | ├── flow_x_00002.jpg +│ | | | ├── ... +│ | | | ├── flow_y_00001.jpg +│ | | | ├── flow_y_00002.jpg +│ | | | ├── ... +│ | | ├── 2x00lRzlTVQ_00001 +│ | | ├── ... +``` + +关于对 Diving48 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/diving48/download_annotations.sh b/tools/data/diving48/download_annotations.sh new file mode 100644 index 0000000000..1f8845672b --- /dev/null +++ b/tools/data/diving48/download_annotations.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +DATA_DIR="../../../data/diving48/annotations" + +if [[ ! -d "${DATA_DIR}" ]]; then + echo "${DATA_DIR} does not exist. Creating"; + mkdir -p ${DATA_DIR} +fi + +cd ${DATA_DIR} + +wget http://www.svcl.ucsd.edu/projects/resound/Diving48_vocab.json +wget http://www.svcl.ucsd.edu/projects/resound/Diving48_V2_train.json +wget http://www.svcl.ucsd.edu/projects/resound/Diving48_V2_test.json + +cd - diff --git a/tools/data/diving48/download_videos.sh b/tools/data/diving48/download_videos.sh new file mode 100644 index 0000000000..757f443fc9 --- /dev/null +++ b/tools/data/diving48/download_videos.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +DATA_DIR="../../../data/diving48/" + +if [[ ! -d "${DATA_DIR}" ]]; then + echo "${DATA_DIR} does not exist. Creating"; + mkdir -p ${DATA_DIR} +fi + +cd ${DATA_DIR} + +wget http://www.svcl.ucsd.edu/projects/resound/Diving48_rgb.tar.gz --no-check-certificate +tar -zxvf Diving48_rgb.tar.gz +mv ./rgb ./videos + +cd - diff --git a/tools/data/diving48/extract_frames.sh b/tools/data/diving48/extract_frames.sh new file mode 100644 index 0000000000..1563d9993f --- /dev/null +++ b/tools/data/diving48/extract_frames.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +cd ../ +python build_rawframes.py ../../data/diving48/videos/ ../../data/diving48/rawframes/ --task both --level 1 --flow-type tvl1 --ext mp4 +echo "Raw frames (RGB and tv-l1) Generated" +cd - diff --git a/tools/data/diving48/extract_rgb_frames.sh b/tools/data/diving48/extract_rgb_frames.sh new file mode 100644 index 0000000000..830d1433a3 --- /dev/null +++ b/tools/data/diving48/extract_rgb_frames.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +cd ../ +python build_rawframes.py ../../data/diving48/videos/ ../../data/diving48/rawframes/ --task rgb --level 1 --ext mp4 +echo "Genearte raw frames (RGB only)" + +cd - diff --git a/tools/data/diving48/extract_rgb_frames_opencv.sh b/tools/data/diving48/extract_rgb_frames_opencv.sh new file mode 100644 index 0000000000..db4c83c313 --- /dev/null +++ b/tools/data/diving48/extract_rgb_frames_opencv.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +cd ../ +python build_rawframes.py ../../data/diving48/videos/ ../../data/diving48/rawframes/ --task rgb --level 1 --ext mp4 --use-opencv +echo "Genearte raw frames (RGB only)" + +cd - diff --git a/tools/data/diving48/generate_rawframes_filelist.sh b/tools/data/diving48/generate_rawframes_filelist.sh new file mode 100644 index 0000000000..96d7397607 --- /dev/null +++ b/tools/data/diving48/generate_rawframes_filelist.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +cd ../../../ +PYTHONPATH=. python tools/data/build_file_list.py diving48 data/diving48/rawframes/ --num-split 1 --level 1 --subset train --format rawframes --shuffle +PYTHONPATH=. python tools/data/build_file_list.py diving48 data/diving48/rawframes/ --num-split 1 --level 1 --subset val --format rawframes --shuffle +echo "Filelist for rawframes generated." + +cd tools/data/diving48/ diff --git a/tools/data/diving48/generate_videos_filelist.sh b/tools/data/diving48/generate_videos_filelist.sh new file mode 100644 index 0000000000..68d7ff199c --- /dev/null +++ b/tools/data/diving48/generate_videos_filelist.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +cd ../../../ +PYTHONPATH=. python tools/data/build_file_list.py diving48 data/diving48/videos/ --num-split 1 --level 1 --subset train --format videos --shuffle +PYTHONPATH=. python tools/data/build_file_list.py diving48 data/diving48/videos/ --num-split 1 --level 1 --subset val --format videos --shuffle +echo "Filelist for videos generated." + +cd tools/data/diving48/ diff --git a/tools/data/parse_file_list.py b/tools/data/parse_file_list.py index 9276f05ad4..3adb3366bd 100644 --- a/tools/data/parse_file_list.py +++ b/tools/data/parse_file_list.py @@ -504,3 +504,31 @@ def line_to_map(line): splits.append((train_list, test_list)) return splits + + +def parse_diving48_splits(): + + train_file = 'data/diving48/annotations/Diving48_V2_train.json' + test_file = 'data/diving48/annotations/Diving48_V2_test.json' + + train = json.load(open(train_file)) + test = json.load(open(test_file)) + + # class_index_file = 'data/diving48/annotations/Diving48_vocab.json' + # class_list = json.load(open(class_index_file)) + + train_list = [] + test_list = [] + + for item in train: + vid_name = item['vid_name'] + label = item['label'] + train_list.append((vid_name, label)) + + for item in test: + vid_name = item['vid_name'] + label = item['label'] + test_list.append((vid_name, label)) + + splits = ((train_list, test_list), ) + return splits diff --git a/tools/data/sthv2/README_zh-CN.md b/tools/data/sthv2/README_zh-CN.md index cce654ce84..7d8080c5a4 100644 --- a/tools/data/sthv2/README_zh-CN.md +++ b/tools/data/sthv2/README_zh-CN.md @@ -4,7 +4,7 @@ -``` +```BibTeX @misc{goyal2017something, title={The "something something" video database for learning and evaluating visual common sense}, author={Raghav Goyal and Samira Ebrahimi Kahou and Vincent Michalski and Joanna Materzyńska and Susanne Westphal and Heuna Kim and Valentin Haenel and Ingo Fruend and Peter Yianilos and Moritz Mueller-Freitag and Florian Hoppe and Christian Thurau and Ingo Bax and Roland Memisevic}, @@ -32,7 +32,7 @@ cat 20bn-something-something-v2-?? | tar zx cd $MMACTION2/tools/data/sthv2/ ``` -## Step 3. 抽取 RGB 帧和光流 +## 步骤 3. 抽取 RGB 帧和光流 如果用户只想使用视频加载训练,则该部分是 **可选项**。 From 0e760a1f724737f2fd29ff28e06a89851db544dd Mon Sep 17 00:00:00 2001 From: Kenny Date: Thu, 29 Apr 2021 17:52:13 +0800 Subject: [PATCH 064/414] update README --- configs/skeleton/posec3d/README.md | 73 ++++++++++++++++++++++++------ tools/data/skeleton/README.md | 53 +++++++++++++++++++++- 2 files changed, 111 insertions(+), 15 deletions(-) diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index 2a74dde166..0a6e402045 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -2,33 +2,78 @@ ## Introduction -[ALGORITHM] + + +```BibTeX +@misc{duan2021revisiting, + title={Revisiting Skeleton-based Action Recognition}, + author={Haodong Duan and Yue Zhao and Kai Chen and Dian Shao and Dahua Lin and Bo Dai}, + year={2021}, + eprint={2104.13586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + + + + + + + + + +
+
+ Pose Estimation Results +
+ +
+
+ +
+
+ Keypoint Heatmap Volume Visualization +
+ +
+
+ +
+
+ Limb Heatmap Volume Visualization +
+ +
+
+ +
## Model Zoo ### FineGYM |config |pseudo heatmap | gpus | backbone | Mean Top-1 | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|---| -|[slowonly_r50_u48_240e_gym_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py) |keypoint |8 x 2| SlowOnly-R50 |93.7 | | | | -|[slowonly_r50_u48_240e_gym_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py) |limb |8 x 2| SlowOnly-R50 |94.0 | | | | +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| +|[slowonly_r50_u48_240e_gym_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py) |keypoint |8 x 2| SlowOnly-R50 |93.7 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint-b07a98a0.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json) | +|[slowonly_r50_u48_240e_gym_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py) |limb |8 x 2| SlowOnly-R50 |94.0 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb-c0d7b482.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json) | |Fusion | || |94.3 | | | | ### NTU60_XSub -| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :--: | :--: | :--: | -| [slowonly_r50_u48_240e_ntu60_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 93.7 | | | | -| [slowonly_r50_u48_240e_ntu60_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 93.4 | | | | -| Fusion | | | | 94.1 | | | | +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_r50_u48_240e_ntu60_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 93.7 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint-f3adabf1.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.json) | +| [slowonly_r50_u48_240e_ntu60_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 93.4 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb-1d69006a.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json) | +| Fusion | | | | 94.1 | | | | ### NTU120_XSub -| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :--: | :--: | :--: | -| [slowonly_r50_u48_240e_ntu120_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 86.3 | | | | -| [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 85.7 | | | | -| Fusion | | | | 86.9 | | | | +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_r50_u48_240e_ntu120_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 86.3 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.json) | +| [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 85.7 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth?) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json) | +| Fusion | | | | 86.9 | | | | Notes: diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index 54bbe759c5..c941c61964 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -1,8 +1,21 @@ # Preparing Skeleton Dataset + + +```BibTeX +@misc{duan2021revisiting, + title={Revisiting Skeleton-based Action Recognition}, + author={Haodong Duan and Yue Zhao and Kai Chen and Dian Shao and Dahua Lin and Bo Dai}, + year={2021}, + eprint={2104.13586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + ## Introduction -We release the skeleton annotations used in [Revisiting Skeleton-based Action Recognition](). By default, we use [Faster-RCNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) with ResNet50 backbone for human detection and [HRNet-w32](https://github.com/open-mmlab/mmpose/blob/master/configs/top_down/hrnet/coco/hrnet_w32_coco_256x192.py) for single person pose estimation. For FineGYM, we use Ground-Truth bounding boxes for the athlete instead of detection bounding boxes. +We release the skeleton annotations used in [Revisiting Skeleton-based Action Recognition](). By default, we use [Faster-RCNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) with ResNet50 backbone for human detection and [HRNet-w32](https://github.com/open-mmlab/mmpose/blob/master/configs/top_down/hrnet/coco/hrnet_w32_coco_256x192.py) for single person pose estimation. For FineGYM, we use Ground-Truth bounding boxes for the athlete instead of detection bounding boxes. Currently, we release the skeleton annotations for FineGYM and NTURGB-D Xsub split. Other annotations will be soo released. ## Prepare Annotations @@ -12,6 +25,44 @@ Currently, we support three datasets: FineGYM, NTU60_XSub and NTU120_XSub. You c bash download_annotations.sh ${DATASET} ``` +## Visualization + +For skeleton data visualization, you need also to prepare the RGB videos. Please refer to [visualize_heatmap_volume](/demo/visualize_heatmap_volume.ipynb) for detailed process. Here we provide some visualization examples from NTU-60 and FineGYM. + + + + + + + + + +
+
+ Pose Estimation Results +
+ +
+
+ +
+
+ Keypoint Heatmap Volume Visualization +
+ +
+
+ +
+
+ Limb Heatmap Volume Visualization +
+ +
+
+ +
+ **TODO**: - [x] FineGYM From b8378d72c7751072b9723d753f963d02ff62a72b Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 13:01:18 +0800 Subject: [PATCH 065/414] add test_misc --- .../test_augmentations/test_misc.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 tests/test_data/test_pipelines/test_augmentations/test_misc.py diff --git a/tests/test_data/test_pipelines/test_augmentations/test_misc.py b/tests/test_data/test_pipelines/test_augmentations/test_misc.py new file mode 100644 index 0000000000..b639229c54 --- /dev/null +++ b/tests/test_data/test_pipelines/test_augmentations/test_misc.py @@ -0,0 +1,16 @@ +from mmaction.datasets.pipelines.augmentations import (combine_quadruple, + flip_quadruple) + + +class TestQuadrupleOps: + + def test_combine_quadruple(self): + a = (0.1, 0.1, 0.5, 0.5) + b = (0.3, 0.3, 0.7, 0.7) + res = combine_quadruple(a, b) + assert res == (0.25, 0.25, 0.35, 0.35) + + def test_flip_quadruple(self): + a = (0.1, 0.1, 0.5, 0.5) + res = flip_quadruple(a) + assert res == (0.4, 0.1, 0.5, 0.5) From 35497917cf1c5d40063a6f42d1080ade7bb97eba Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 13:03:44 +0800 Subject: [PATCH 066/414] add test_pose_compact --- .../test_augmentations/test_transform.py | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_transform.py b/tests/test_data/test_pipelines/test_augmentations/test_transform.py index c2e9a624d1..54ee47ca2a 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_transform.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_transform.py @@ -5,6 +5,7 @@ from mmcv.utils import assert_dict_has_keys from mmaction.datasets.pipelines import RandomRescale, RandomScale, Resize +from mmaction.datasets.pipelines.augmentations import PoseCompact class TestTransform: @@ -138,3 +139,41 @@ def test_random_scale(self): f'{random_scale_range.__class__.__name__}' f'(scales={((200, 64), (250, 80))}, ' 'mode=range)') + + +class TestPoseCompact: + + def test_pose_compact(self): + results = {} + results['img_shape'] = (100, 100) + fake_kp = np.zeros([1, 4, 2, 2]) + fake_kp[:, :, 0] = [10, 10] + fake_kp[:, :, 1] = [90, 90] + results['kp'] = fake_kp + + pose_compact = PoseCompact( + padding=0, threshold=0, hw_ratio=None, allow_imgpad=False) + inp = copy.deepcopy(results) + ret = pose_compact(inp) + assert ret['img_shape'] == (80, 80) + assert str(pose_compact) == ( + 'PoseCompact(padding=0, threshold=0, hw_ratio=None, ' + 'allow_imgpad=False)') + + pose_compact = PoseCompact( + padding=0.3, threshold=0, hw_ratio=None, allow_imgpad=False) + inp = copy.deepcopy(results) + ret = pose_compact(inp) + assert ret['img_shape'] == (100, 100) + + pose_compact = PoseCompact( + padding=0.3, threshold=0, hw_ratio=None, allow_imgpad=True) + inp = copy.deepcopy(results) + ret = pose_compact(inp) + assert ret['img_shape'] == (104, 104) + + pose_compact = PoseCompact( + padding=0, threshold=100, hw_ratio=None, allow_imgpad=False) + inp = copy.deepcopy(results) + ret = pose_compact(inp) + assert ret['img_shape'] == (100, 100) From 0a350ea8fd3195f09aebb379a739a5a61bfe749b Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 14:07:03 +0800 Subject: [PATCH 067/414] add aug unittest --- .../test_augmentations/test_crop.py | 7 ++++- .../test_augmentations/test_flip.py | 28 +++++++++++++++++++ .../test_augmentations/test_transform.py | 6 +++- 3 files changed, 39 insertions(+), 2 deletions(-) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_crop.py b/tests/test_data/test_pipelines/test_augmentations/test_crop.py index dda2a57ad4..d63d902f50 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_crop.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_crop.py @@ -209,19 +209,24 @@ def test_center_crop(self): CenterCrop([224, 224]) # center crop with crop_size 224 + # add kps in test_center_crop imgs = list(np.random.rand(2, 240, 320, 3)) results = dict(imgs=imgs) + kp = np.array([[160, 120], [160, 120]]).reshape([1, 1, 2, 2]) + results['kp'] = kp + results['gt_bboxes'] = np.array([[0, 0, 320, 240]]) results['proposals'] = np.array([[0, 0, 320, 240]]) center_crop = CenterCrop(crop_size=224) center_crop_results = center_crop(results) - target_keys = ['imgs', 'crop_bbox', 'img_shape'] + target_keys = ['imgs', 'crop_bbox', 'img_shape', 'kp'] assert assert_dict_has_keys(center_crop_results, target_keys) assert check_crop(imgs, center_crop_results['imgs'], center_crop_results['crop_bbox']) assert np.all( center_crop_results['crop_bbox'] == np.array([48, 8, 272, 232])) assert center_crop_results['img_shape'] == (224, 224) + assert np.all(center_crop_results['kp'] == 112) assert repr(center_crop) == (f'{center_crop.__class__.__name__}' f'(crop_size={(224, 224)}, lazy={False})') diff --git a/tests/test_data/test_pipelines/test_augmentations/test_flip.py b/tests/test_data/test_pipelines/test_augmentations/test_flip.py index 9987c23320..c1cda273cc 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_flip.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_flip.py @@ -4,6 +4,7 @@ import numpy as np import pytest from mmcv.utils import assert_dict_has_keys +from numpy.testing import assert_array_almost_equal from mmaction.datasets.pipelines import Flip from .base import check_flip @@ -101,3 +102,30 @@ def test_flip(self): flip_label_map=_flip_label_map) flip_results = flip(results) assert results['label'] == 3 + + # flip the keypoints + results = dict( + kp=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), + modality='Pose', + img_shape=(64, 64)) + flip = Flip(flip_ratio=1, direction='horizontal', left=[0], right=[1]) + flip_results = flip(results) + assert_array_almost_equal(flip_results['kp'][0, 0], + np.array([[1, 63], [63, 1]])) + + results = dict( + kp=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), + modality='Pose', + img_shape=(64, 64)) + flip = Flip(flip_ratio=1, direction='horizontal', left=[], right=[]) + flip_results = flip(results) + assert_array_almost_equal(flip_results['kp'][0, 0], + np.array([[63, 1], [1, 63]])) + + with pytest.raises(AssertionError): + results = dict( + kp=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), + modality='Pose', + img_shape=(64, 64)) + flip = Flip(flip_ratio=1, direction='vertical', left=[], right=[]) + flip_results = flip(results) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_transform.py b/tests/test_data/test_pipelines/test_augmentations/test_transform.py index 54ee47ca2a..9b94a922c2 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_transform.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_transform.py @@ -3,6 +3,7 @@ import numpy as np import pytest from mmcv.utils import assert_dict_has_keys +from numpy.testing import assert_array_almost_equal from mmaction.datasets.pipelines import RandomRescale, RandomScale, Resize from mmaction.datasets.pipelines.augmentations import PoseCompact @@ -62,13 +63,16 @@ def test_resize(self): # test resize for flow images imgs = list(np.random.rand(2, 240, 320)) - results = dict(imgs=imgs, modality='Flow') + kp = np.array([60, 60]).reshape([1, 1, 1, 2]) + results = dict(imgs=imgs, kp=kp, modality='Flow') resize = Resize(scale=(160, 80), keep_ratio=False) resize_results = resize(results) assert assert_dict_has_keys(resize_results, target_keys) assert np.all(resize_results['scale_factor'] == np.array( [.5, 1. / 3.], dtype=np.float32)) assert resize_results['img_shape'] == (80, 160) + kp = resize_results['kp'][0, 0, 0] + assert_array_almost_equal(kp, np.array([30, 20])) # scale with -1 to indicate np.inf imgs = list(np.random.rand(2, 240, 320, 3)) From 2db6fb0cdab19adf71de028e22a11e88d1a0af60 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 15:05:34 +0800 Subject: [PATCH 068/414] add test uniform sample frames --- mmaction/datasets/pipelines/pose_loading.py | 2 +- .../test_loadings/test_pose_loading.py | 64 +++++++++++++++++++ 2 files changed, 65 insertions(+), 1 deletion(-) create mode 100644 tests/test_data/test_pipelines/test_loadings/test_pose_loading.py diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 2458671cd5..a05f0344df 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -223,7 +223,7 @@ def __call__(self, results): return results def __repr__(self): - repr_str = (f'{self.__class__.__name}(', + repr_str = (f'{self.__class__.__name__}(', f'random_crop={self.random_crop}, ' f'random_seed={self.random_seed}, ' f'drop_prob={self.drop_prob}, ' diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py new file mode 100644 index 0000000000..8d27762369 --- /dev/null +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -0,0 +1,64 @@ +import numpy as np # isort: skip +from numpy.testing import assert_array_equal # isort: skip + +from mmaction.datasets.pipelines.pose_loading import UniformSampleFrames + + +class TestPoseLoading: + + def test_uniform_sample_frames(self): + results = dict(total_frames=64, start_index=0) + sampling = UniformSampleFrames( + clip_len=8, num_clips=1, test_mode=True, seed=0) + + assert str(sampling) == ('UniformSampleFrames(clip_len=8, ' + 'num_clips=1, test_mode=True, seed=0)') + sampling_results = sampling(results) + assert sampling_results['clip_len'] == 8 + assert sampling_results['frame_interval'] is None + assert sampling_results['num_clips'] == 1 + assert_array_equal(sampling_results['frame_inds'], + np.array([4, 15, 21, 24, 35, 43, 51, 63])) + + results = dict(total_frames=15, start_index=0) + sampling = UniformSampleFrames( + clip_len=8, num_clips=1, test_mode=True, seed=0) + sampling_results = sampling(results) + assert sampling_results['clip_len'] == 8 + assert sampling_results['frame_interval'] is None + assert sampling_results['num_clips'] == 1 + assert_array_equal(sampling_results['frame_inds'], + np.array([0, 2, 4, 6, 8, 9, 11, 13])) + + results = dict(total_frames=7, start_index=0) + sampling = UniformSampleFrames( + clip_len=8, num_clips=1, test_mode=True, seed=0) + sampling_results = sampling(results) + assert sampling_results['clip_len'] == 8 + assert sampling_results['frame_interval'] is None + assert sampling_results['num_clips'] == 1 + assert_array_equal(sampling_results['frame_inds'], + np.array([0, 1, 2, 3, 4, 5, 6, 0])) + + results = dict(total_frames=64, start_index=0) + sampling = UniformSampleFrames( + clip_len=8, num_clips=4, test_mode=True, seed=0) + sampling_results = sampling(results) + assert sampling_results['clip_len'] == 8 + assert sampling_results['frame_interval'] is None + assert sampling_results['num_clips'] == 4 + assert_array_equal( + sampling_results['frame_inds'], + np.array([ + 4, 15, 21, 24, 35, 43, 51, 63, 1, 11, 21, 26, 36, 47, 54, 56, + 0, 12, 18, 25, 38, 47, 55, 62, 0, 9, 21, 25, 37, 40, 49, 60 + ])) + + results = dict(total_frames=64, start_index=0) + sampling = UniformSampleFrames( + clip_len=8, num_clips=1, test_mode=False, seed=0) + sampling_results = sampling(results) + assert sampling_results['clip_len'] == 8 + assert sampling_results['frame_interval'] is None + assert sampling_results['num_clips'] == 1 + assert len(sampling_results['frame_inds']) == 8 From 939a1e8fbc6e7e4209e4e0f4db4aeda77465e5c3 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 15:06:53 +0800 Subject: [PATCH 069/414] fix bug --- mmaction/datasets/pipelines/pose_loading.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index a05f0344df..9a5c5ba263 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -224,7 +224,7 @@ def __call__(self, results): def __repr__(self): repr_str = (f'{self.__class__.__name__}(', - f'random_crop={self.random_crop}, ' + f'random_drop={self.random_drop}, ' f'random_seed={self.random_seed}, ' f'drop_prob={self.drop_prob}, ' f'manipulate_joints={self.manipulate_joints})') From 509889aff3a434ac43fa13622b39fad902cb0e04 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 15:08:16 +0800 Subject: [PATCH 070/414] fix bug --- mmaction/datasets/pipelines/pose_loading.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 9a5c5ba263..9c1e18c616 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -223,7 +223,7 @@ def __call__(self, results): return results def __repr__(self): - repr_str = (f'{self.__class__.__name__}(', + repr_str = (f'{self.__class__.__name__}(' f'random_drop={self.random_drop}, ' f'random_seed={self.random_seed}, ' f'drop_prob={self.drop_prob}, ' From 22197d7148ca4bac4e6230c7cc2f8ae9969a3050 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 30 Apr 2021 18:15:27 +0800 Subject: [PATCH 071/414] [Refactor] Use EvalHool in MMCV with backward compatibility (#793) * eval_hook BC * use better evalhook * auto --- mmaction/apis/test.py | 343 ++++++------ mmaction/core/evaluation/eval_hooks.py | 722 +++++++++++++------------ tests/test_runtime/test_apis_test.py | 16 +- tests/test_runtime/test_eval_hook.py | 13 +- tools/test.py | 11 +- 5 files changed, 584 insertions(+), 521 deletions(-) diff --git a/mmaction/apis/test.py b/mmaction/apis/test.py index 2541343feb..4f4a52224b 100644 --- a/mmaction/apis/test.py +++ b/mmaction/apis/test.py @@ -2,186 +2,203 @@ import pickle import shutil import tempfile +# TODO import test functions from mmcv and delete them from mmaction2 +import warnings import mmcv import torch import torch.distributed as dist from mmcv.runner import get_dist_info - -def single_gpu_test(model, data_loader): - """Test model with a single gpu. - - This method tests model with a single gpu and displays test progress bar. - - Args: - model (nn.Module): Model to be tested. - data_loader (nn.Dataloader): Pytorch data loader. - - Returns: - list: The prediction results. - """ - model.eval() - results = [] - dataset = data_loader.dataset - prog_bar = mmcv.ProgressBar(len(dataset)) - for data in data_loader: - with torch.no_grad(): - result = model(return_loss=False, **data) - results.extend(result) - - # use the first key as main key to calculate the batch size - batch_size = len(next(iter(data.values()))) - for _ in range(batch_size): - prog_bar.update() - return results - - -def multi_gpu_test(model, data_loader, tmpdir=None, gpu_collect=True): - """Test model with multiple gpus. - - This method tests model with multiple gpus and collects the results - under two different modes: gpu and cpu modes. By setting 'gpu_collect=True' - it encodes results to gpu tensors and use gpu communication for results - collection. On cpu mode it saves the results on different gpus to 'tmpdir' - and collects them by the rank 0 worker. - - Args: - model (nn.Module): Model to be tested. - data_loader (nn.Dataloader): Pytorch data loader. - tmpdir (str): Path of directory to save the temporary results from - different gpus under cpu mode. Default: None - gpu_collect (bool): Option to use either gpu or cpu to collect results. - Default: True - - Returns: - list: The prediction results. - """ - model.eval() - results = [] - dataset = data_loader.dataset - rank, world_size = get_dist_info() - if rank == 0: +try: + from mmcv.engine import (single_gpu_test, multi_gpu_test, + collect_results_gpu, collect_results_cpu) + from_mmcv = True +except (ImportError, ModuleNotFoundError): + warnings.warn( + 'DeprecationWarning: single_gpu_test, multi_gpu_test, ' + 'collect_results_cpu, collect_results_gpu from mmaction2 will be ' + 'deprecated. Please install mmcv through master branch.') + from_mmcv = False + +if not from_mmcv: + + def single_gpu_test(model, data_loader): # noqa: F811 + """Test model with a single gpu. + + This method tests model with a single gpu and + displays test progress bar. + + Args: + model (nn.Module): Model to be tested. + data_loader (nn.Dataloader): Pytorch data loader. + + Returns: + list: The prediction results. + """ + model.eval() + results = [] + dataset = data_loader.dataset prog_bar = mmcv.ProgressBar(len(dataset)) - for data in data_loader: - with torch.no_grad(): - result = model(return_loss=False, **data) - results.extend(result) + for data in data_loader: + with torch.no_grad(): + result = model(return_loss=False, **data) + results.extend(result) - if rank == 0: # use the first key as main key to calculate the batch size batch_size = len(next(iter(data.values()))) - for _ in range(batch_size * world_size): + for _ in range(batch_size): prog_bar.update() - - # collect results from all ranks - if gpu_collect: - results = collect_results_gpu(results, len(dataset)) - else: - results = collect_results_cpu(results, len(dataset), tmpdir) - return results - - -def collect_results_cpu(result_part, size, tmpdir=None): - """Collect results in cpu mode. - - It saves the results on different gpus to 'tmpdir' and collects - them by the rank 0 worker. - - Args: - result_part (list): Results to be collected - size (int): Result size. - tmpdir (str): Path of directory to save the temporary results from - different gpus under cpu mode. Default: None - - Returns: - list: Ordered results. - """ - rank, world_size = get_dist_info() - # create a tmp dir if it is not specified - if tmpdir is None: - MAX_LEN = 512 - # 32 is whitespace - dir_tensor = torch.full((MAX_LEN, ), - 32, - dtype=torch.uint8, - device='cuda') + return results + + def multi_gpu_test( # noqa: F811 + model, data_loader, tmpdir=None, gpu_collect=True): + """Test model with multiple gpus. + + This method tests model with multiple gpus and collects the results + under two different modes: gpu and cpu modes. By setting + 'gpu_collect=True' it encodes results to gpu tensors and use gpu + communication for results collection. On cpu mode it saves the results + on different gpus to 'tmpdir' and collects them by the rank 0 worker. + + Args: + model (nn.Module): Model to be tested. + data_loader (nn.Dataloader): Pytorch data loader. + tmpdir (str): Path of directory to save the temporary results from + different gpus under cpu mode. Default: None + gpu_collect (bool): Option to use either gpu or cpu to collect + results. Default: True + + Returns: + list: The prediction results. + """ + model.eval() + results = [] + dataset = data_loader.dataset + rank, world_size = get_dist_info() if rank == 0: - mmcv.mkdir_or_exist('.dist_test') - tmpdir = tempfile.mkdtemp(dir='.dist_test') - tmpdir = torch.tensor( - bytearray(tmpdir.encode()), dtype=torch.uint8, device='cuda') - dir_tensor[:len(tmpdir)] = tmpdir - dist.broadcast(dir_tensor, 0) - tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip() - else: - mmcv.mkdir_or_exist(tmpdir) - # synchronizes all processes to make sure tmpdir exist - dist.barrier() - # dump the part result to the dir - mmcv.dump(result_part, osp.join(tmpdir, f'part_{rank}.pkl')) - # synchronizes all processes for loding pickle file - dist.barrier() - # collect all parts - if rank != 0: - return None - # load results of all parts from tmp dir - part_list = [] - for i in range(world_size): - part_file = osp.join(tmpdir, f'part_{i}.pkl') - part_list.append(mmcv.load(part_file)) - # sort the results - ordered_results = [] - for res in zip(*part_list): - ordered_results.extend(list(res)) - # the dataloader may pad some samples - ordered_results = ordered_results[:size] - # remove tmp dir - shutil.rmtree(tmpdir) - return ordered_results - - -def collect_results_gpu(result_part, size): - """Collect results in gpu mode. - - It encodes results to gpu tensors and use gpu communication for results - collection. - - Args: - result_part (list): Results to be collected - size (int): Result size. - - Returns: - list: Ordered results. - """ - rank, world_size = get_dist_info() - # dump result part to tensor with pickle - part_tensor = torch.tensor( - bytearray(pickle.dumps(result_part)), dtype=torch.uint8, device='cuda') - # gather all result part tensor shape - shape_tensor = torch.tensor(part_tensor.shape, device='cuda') - shape_list = [shape_tensor.clone() for _ in range(world_size)] - dist.all_gather(shape_list, shape_tensor) - # padding result part tensor to max length - shape_max = torch.tensor(shape_list).max() - part_send = torch.zeros(shape_max, dtype=torch.uint8, device='cuda') - part_send[:shape_tensor[0]] = part_tensor - part_recv_list = [ - part_tensor.new_zeros(shape_max) for _ in range(world_size) - ] - # gather all result part - dist.all_gather(part_recv_list, part_send) - - if rank == 0: + prog_bar = mmcv.ProgressBar(len(dataset)) + for data in data_loader: + with torch.no_grad(): + result = model(return_loss=False, **data) + results.extend(result) + + if rank == 0: + # use the first key as main key to calculate the batch size + batch_size = len(next(iter(data.values()))) + for _ in range(batch_size * world_size): + prog_bar.update() + + # collect results from all ranks + if gpu_collect: + results = collect_results_gpu(results, len(dataset)) + else: + results = collect_results_cpu(results, len(dataset), tmpdir) + return results + + def collect_results_cpu(result_part, size, tmpdir=None): # noqa: F811 + """Collect results in cpu mode. + + It saves the results on different gpus to 'tmpdir' and collects + them by the rank 0 worker. + + Args: + result_part (list): Results to be collected + size (int): Result size. + tmpdir (str): Path of directory to save the temporary results from + different gpus under cpu mode. Default: None + + Returns: + list: Ordered results. + """ + rank, world_size = get_dist_info() + # create a tmp dir if it is not specified + if tmpdir is None: + MAX_LEN = 512 + # 32 is whitespace + dir_tensor = torch.full((MAX_LEN, ), + 32, + dtype=torch.uint8, + device='cuda') + if rank == 0: + mmcv.mkdir_or_exist('.dist_test') + tmpdir = tempfile.mkdtemp(dir='.dist_test') + tmpdir = torch.tensor( + bytearray(tmpdir.encode()), + dtype=torch.uint8, + device='cuda') + dir_tensor[:len(tmpdir)] = tmpdir + dist.broadcast(dir_tensor, 0) + tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip() + else: + mmcv.mkdir_or_exist(tmpdir) + # synchronizes all processes to make sure tmpdir exist + dist.barrier() + # dump the part result to the dir + mmcv.dump(result_part, osp.join(tmpdir, f'part_{rank}.pkl')) + # synchronizes all processes for loding pickle file + dist.barrier() + # collect all parts + if rank != 0: + return None + # load results of all parts from tmp dir part_list = [] - for recv, shape in zip(part_recv_list, shape_list): - part_list.append( - pickle.loads(recv[:shape[0]].cpu().numpy().tobytes())) + for i in range(world_size): + part_file = osp.join(tmpdir, f'part_{i}.pkl') + part_list.append(mmcv.load(part_file)) # sort the results ordered_results = [] for res in zip(*part_list): ordered_results.extend(list(res)) # the dataloader may pad some samples ordered_results = ordered_results[:size] + # remove tmp dir + shutil.rmtree(tmpdir) return ordered_results - return None + + def collect_results_gpu(result_part, size): # noqa: F811 + """Collect results in gpu mode. + + It encodes results to gpu tensors and use gpu communication for results + collection. + + Args: + result_part (list): Results to be collected + size (int): Result size. + + Returns: + list: Ordered results. + """ + rank, world_size = get_dist_info() + # dump result part to tensor with pickle + part_tensor = torch.tensor( + bytearray(pickle.dumps(result_part)), + dtype=torch.uint8, + device='cuda') + # gather all result part tensor shape + shape_tensor = torch.tensor(part_tensor.shape, device='cuda') + shape_list = [shape_tensor.clone() for _ in range(world_size)] + dist.all_gather(shape_list, shape_tensor) + # padding result part tensor to max length + shape_max = torch.tensor(shape_list).max() + part_send = torch.zeros(shape_max, dtype=torch.uint8, device='cuda') + part_send[:shape_tensor[0]] = part_tensor + part_recv_list = [ + part_tensor.new_zeros(shape_max) for _ in range(world_size) + ] + # gather all result part + dist.all_gather(part_recv_list, part_send) + + if rank == 0: + part_list = [] + for recv, shape in zip(part_recv_list, shape_list): + part_list.append( + pickle.loads(recv[:shape[0]].cpu().numpy().tobytes())) + # sort the results + ordered_results = [] + for res in zip(*part_list): + ordered_results.extend(list(res)) + # the dataloader may pad some samples + ordered_results = ordered_results[:size] + return ordered_results + return None diff --git a/mmaction/core/evaluation/eval_hooks.py b/mmaction/core/evaluation/eval_hooks.py index baba638bad..d96ad87a6b 100644 --- a/mmaction/core/evaluation/eval_hooks.py +++ b/mmaction/core/evaluation/eval_hooks.py @@ -8,368 +8,382 @@ from torch.nn.modules.batchnorm import _BatchNorm from torch.utils.data import DataLoader +try: + from mmcv.runner import EvalHook as BasicEvalHook + from mmcv.runner import DistEvalHook as BasicDistEvalHook -class EvalHook(Hook): - """Non-Distributed evaluation hook. - - Notes: - If new arguments are added for EvalHook, tools/test.py, - tools/eval_metric.py may be effected. - - This hook will regularly perform evaluation in a given interval when - performing in non-distributed environment. - - Args: - dataloader (DataLoader): A PyTorch dataloader. - start (int | None, optional): Evaluation starting epoch. It enables - evaluation before the training starts if ``start`` <= the resuming - epoch. If None, whether to evaluate is merely decided by - ``interval``. Default: None. - interval (int): Evaluation interval. Default: 1. - by_epoch (bool): Determine perform evaluation by epoch or by iteration. - If set to True, it will perform by epoch. Otherwise, by iteration. - default: True. - save_best (str | None, optional): If a metric is specified, it would - measure the best checkpoint during evaluation. The information - about best checkpoint would be save in best.json. - Options are the evaluation metrics to the test dataset. e.g., - ``top1_acc``, ``top5_acc``, ``mean_class_accuracy``, - ``mean_average_precision``, ``mmit_mean_average_precision`` - for action recognition dataset (RawframeDataset and VideoDataset). - ``AR@AN``, ``auc`` for action localization dataset. - (ActivityNetDataset). ``mAP@0.5IOU`` for spatio-temporal action - detection dataset (AVADataset). If ``save_best`` is ``auto``, the - first key of the returned ``OrderedDict`` result will be used. - Default: 'auto'. - rule (str | None, optional): Comparison rule for best score. If set to - None, it will infer a reasonable rule. Keys such as 'acc', 'top' - .etc will be inferred by 'greater' rule. Keys contain 'loss' will - be inferred by 'less' rule. Options are 'greater', 'less', None. - Default: None. - **eval_kwargs: Evaluation arguments fed into the evaluate function of - the dataset. - """ - - rule_map = {'greater': lambda x, y: x > y, 'less': lambda x, y: x < y} - init_value_map = {'greater': -inf, 'less': inf} - greater_keys = ['acc', 'top', 'AR@', 'auc', 'precision', 'mAP@', 'Recall@'] - less_keys = ['loss'] - - def __init__(self, - dataloader, - start=None, - interval=1, - by_epoch=True, - save_best='auto', - rule=None, - **eval_kwargs): - if 'key_indicator' in eval_kwargs: - raise RuntimeError( - '"key_indicator" is deprecated, ' - 'you need to use "save_best" instead. ' - 'See https://github.com/open-mmlab/mmaction2/pull/395 for more info' # noqa: E501 - ) - - if not isinstance(dataloader, DataLoader): - raise TypeError(f'dataloader must be a pytorch DataLoader, ' - f'but got {type(dataloader)}') - - if interval <= 0: - raise ValueError(f'interval must be positive, but got {interval}') - - assert isinstance(by_epoch, bool) - - if start is not None and start < 0: - warnings.warn( - f'The evaluation start epoch {start} is smaller than 0, ' - f'use 0 instead', UserWarning) - start = 0 - self.dataloader = dataloader - self.interval = interval - self.start = start - self.by_epoch = by_epoch - - assert isinstance(save_best, str) or save_best is None - self.save_best = save_best - self.eval_kwargs = eval_kwargs - self.initial_flag = True - - if self.save_best is not None: - self.best_ckpt_path = None - self._init_rule(rule, self.save_best) - - def _init_rule(self, rule, key_indicator): - """Initialize rule, key_indicator, comparison_func, and best score. + from_mmcv = True - Args: - rule (str | None): Comparison rule for best score. - key_indicator (str | None): Key indicator to determine the - comparison rule. - """ - if rule not in self.rule_map and rule is not None: - raise KeyError(f'rule must be greater, less or None, ' - f'but got {rule}.') - - if rule is None: - if key_indicator != 'auto': - if any(key in key_indicator for key in self.greater_keys): - rule = 'greater' - elif any(key in key_indicator for key in self.less_keys): - rule = 'less' - else: - raise ValueError(f'Cannot infer the rule for key ' - f'{key_indicator}, thus a specific rule ' - f'must be specified.') - self.rule = rule - self.key_indicator = key_indicator - if self.rule is not None: - self.compare_func = self.rule_map[self.rule] - - def before_run(self, runner): - if self.save_best is not None: - if runner.meta is None: - warnings.warn('runner.meta is None. Creating a empty one.') - runner.meta = dict() - runner.meta.setdefault('hook_msgs', dict()) - - def before_train_iter(self, runner): - """Evaluate the model only at the start of training by iteration.""" - if self.by_epoch: - return - if not self.initial_flag: - return - if self.start is not None and runner.iter >= self.start: - self.after_train_iter(runner) - self.initial_flag = False - - def before_train_epoch(self, runner): - """Evaluate the model only at the start of training by epoch.""" - if not self.by_epoch: - return - if not self.initial_flag: - return - if self.start is not None and runner.epoch >= self.start: - self.after_train_epoch(runner) - self.initial_flag = False - - def after_train_iter(self, runner): - """Called after every training iter to evaluate the results.""" - if not self.by_epoch: - self._do_evaluate(runner) - - def after_train_epoch(self, runner): - """Called after every training epoch to evaluate the results.""" - if self.by_epoch: - self._do_evaluate(runner) - - def _do_evaluate(self, runner): - """perform evaluation and save ckpt.""" - if not self.evaluation_flag(runner): - return - - from mmaction.apis import single_gpu_test - results = single_gpu_test(runner.model, self.dataloader) - key_score = self.evaluate(runner, results) - if self.save_best: - self._save_ckpt(runner, key_score) - - def evaluation_flag(self, runner): - """Judge whether to perform_evaluation. - - Returns: - bool: The flag indicating whether to perform evaluation. - """ - if self.by_epoch: - current = runner.epoch - check_time = self.every_n_epochs - else: - current = runner.iter - check_time = self.every_n_iters - - if self.start is None: - if not check_time(runner, self.interval): - # No evaluation during the interval. - return False - elif (current + 1) < self.start: - # No evaluation if start is larger than the current time. - return False - else: - # Evaluation only at epochs/iters 3, 5, 7... - # if start==3 and interval==2 - if (current + 1 - self.start) % self.interval: - return False - return True - - def _save_ckpt(self, runner, key_score): - if self.by_epoch: - current = f'epoch_{runner.epoch + 1}' - cur_type, cur_time = 'epoch', runner.epoch + 1 - else: - current = f'iter_{runner.iter + 1}' - cur_type, cur_time = 'iter', runner.iter + 1 - - best_score = runner.meta['hook_msgs'].get( - 'best_score', self.init_value_map[self.rule]) - if self.compare_func(key_score, best_score): - best_score = key_score - runner.meta['hook_msgs']['best_score'] = best_score - - if self.best_ckpt_path and osp.isfile(self.best_ckpt_path): - os.remove(self.best_ckpt_path) - - best_ckpt_name = f'best_{self.key_indicator}_{current}.pth' - runner.save_checkpoint( - runner.work_dir, best_ckpt_name, create_symlink=False) - self.best_ckpt_path = osp.join(runner.work_dir, best_ckpt_name) - - runner.meta['hook_msgs']['best_ckpt'] = self.best_ckpt_path - runner.logger.info( - f'Now best checkpoint is saved as {best_ckpt_name}.') - runner.logger.info( - f'Best {self.key_indicator} is {best_score:0.4f} ' - f'at {cur_time} {cur_type}.') - - def evaluate(self, runner, results): - """Evaluate the results. + class EvalHook(BasicEvalHook): + greater_keys = [ + 'acc', 'top', 'AR@', 'auc', 'precision', 'mAP@', 'Recall@' + ] + less_keys = ['loss'] + + def __init__(self, *args, save_best='auto', **kwargs): + super().__init__(*args, save_best=save_best, **kwargs) + + class DistEvalHook(BasicDistEvalHook): + greater_keys = [ + 'acc', 'top', 'AR@', 'auc', 'precision', 'mAP@', 'Recall@' + ] + less_keys = ['loss'] + + def __init__(self, *args, save_best='auto', **kwargs): + super().__init__(*args, save_best=save_best, **kwargs) + +except (ImportError, ModuleNotFoundError): + warnings.warn('DeprecationWarning: EvalHook and DistEvalHook in mmaction2 ' + 'will be deprecated, please install mmcv through master ' + 'branch.') + from_mmcv = False + +if not from_mmcv: + + class EvalHook(Hook): # noqa: F811 + """Non-Distributed evaluation hook. + + Notes: + If new arguments are added for EvalHook, tools/test.py, + tools/eval_metric.py may be effected. + + This hook will regularly perform evaluation in a given interval when + performing in non-distributed environment. Args: - runner (:obj:`mmcv.Runner`): The underlined training runner. - results (list): Output results. + dataloader (DataLoader): A PyTorch dataloader. + start (int | None, optional): Evaluation starting epoch. It enables + evaluation before the training starts if ``start`` <= the + resuming epoch. If None, whether to evaluate is merely decided + by ``interval``. Default: None. + interval (int): Evaluation interval. Default: 1. + by_epoch (bool): Determine perform evaluation by epoch or by + iteration. If set to True, it will perform by epoch. + Otherwise, by iteration. default: True. + save_best (str | None, optional): If a metric is specified, it + would measure the best checkpoint during evaluation. The + information about best checkpoint would be save in best.json. + Options are the evaluation metrics to the test dataset. e.g., + ``top1_acc``, ``top5_acc``, ``mean_class_accuracy``, + ``mean_average_precision``, ``mmit_mean_average_precision`` + for action recognition dataset (RawframeDataset and + VideoDataset). ``AR@AN``, ``auc`` for action localization + dataset. (ActivityNetDataset). ``mAP@0.5IOU`` for + spatio-temporal action detection dataset (AVADataset). + If ``save_best`` is ``auto``, the first key of the returned + ``OrderedDict`` result will be used. Default: 'auto'. + rule (str | None, optional): Comparison rule for best score. + If set to None, it will infer a reasonable rule. Keys such as + 'acc', 'top' .etc will be inferred by 'greater' rule. Keys + contain 'loss' will be inferred by 'less' rule. Options are + 'greater', 'less', None. Default: None. + **eval_kwargs: Evaluation arguments fed into the evaluate function + of the dataset. """ - eval_res = self.dataloader.dataset.evaluate( - results, logger=runner.logger, **self.eval_kwargs) - for name, val in eval_res.items(): - runner.log_buffer.output[name] = val - runner.log_buffer.ready = True - if self.save_best is not None: - if self.key_indicator == 'auto': - # infer from eval_results - self._init_rule(self.rule, list(eval_res.keys())[0]) - return eval_res[self.key_indicator] - - return None - - -class DistEvalHook(EvalHook): - """Distributed evaluation hook. - - This hook will regularly perform evaluation in a given interval when - performing in distributed environment. - - Args: - dataloader (DataLoader): A PyTorch dataloader. - start (int | None, optional): Evaluation starting epoch. It enables - evaluation before the training starts if ``start`` <= the resuming - epoch. If None, whether to evaluate is merely decided by - ``interval``. Default: None. - interval (int): Evaluation interval. Default: 1. - by_epoch (bool): Determine perform evaluation by epoch or by iteration. - If set to True, it will perform by epoch. Otherwise, by iteration. - default: True. - save_best (str | None, optional): If a metric is specified, it would - measure the best checkpoint during evaluation. The information - about best checkpoint would be save in best.json. - Options are the evaluation metrics to the test dataset. e.g., - ``top1_acc``, ``top5_acc``, ``mean_class_accuracy``, - ``mean_average_precision``, ``mmit_mean_average_precision`` - for action recognition dataset (RawframeDataset and VideoDataset). - ``AR@AN``, ``auc`` for action localization dataset - (ActivityNetDataset). ``mAP@0.5IOU`` for spatio-temporal action - detection dataset (AVADataset). If ``save_best`` is ``auto``, the - first key of the returned ``OrderedDict`` result will be used. - Default: 'auto'. - rule (str | None, optional): Comparison rule for best score. If set to - None, it will infer a reasonable rule. Keys such as 'acc', 'top' - .etc will be inferred by 'greater' rule. Keys contain 'loss' will - be inferred by 'less' rule. Options are 'greater', 'less', None. - Default: None. - tmpdir (str | None): Temporary directory to save the results of all - processes. Default: None. - gpu_collect (bool): Whether to use gpu or cpu to collect results. - Default: False. - broadcast_bn_buffer (bool): Whether to broadcast the - buffer(running_mean and running_var) of rank 0 to other rank - before evaluation. Default: True. - **eval_kwargs: Evaluation arguments fed into the evaluate function of - the dataset. - """ - - def __init__(self, - dataloader, - start=None, - interval=1, - by_epoch=True, - save_best='auto', - rule=None, - broadcast_bn_buffer=True, - tmpdir=None, - gpu_collect=False, - **eval_kwargs): - super().__init__( - dataloader, - start=start, - interval=interval, - by_epoch=by_epoch, - save_best=save_best, - rule=rule, - **eval_kwargs) - self.broadcast_bn_buffer = broadcast_bn_buffer - self.tmpdir = tmpdir - self.gpu_collect = gpu_collect - - def _do_evaluate(self, runner): - """perform evaluation and save ckpt.""" - # Synchronization of BatchNorm's buffer (running_mean - # and running_var) is not supported in the DDP of pytorch, - # which may cause the inconsistent performance of models in - # different ranks, so we broadcast BatchNorm's buffers - # of rank 0 to other ranks to avoid this. - if self.broadcast_bn_buffer: - model = runner.model - for name, module in model.named_modules(): - if isinstance(module, - _BatchNorm) and module.track_running_stats: - dist.broadcast(module.running_var, 0) - dist.broadcast(module.running_mean, 0) - - if not self.evaluation_flag(runner): - return - - from mmaction.apis import multi_gpu_test - tmpdir = self.tmpdir - if tmpdir is None: - tmpdir = osp.join(runner.work_dir, '.eval_hook') - - results = multi_gpu_test( - runner.model, - self.dataloader, - tmpdir=tmpdir, - gpu_collect=self.gpu_collect) - if runner.rank == 0: - print('\n') - key_score = self.evaluate(runner, results) + rule_map = {'greater': lambda x, y: x > y, 'less': lambda x, y: x < y} + init_value_map = {'greater': -inf, 'less': inf} + greater_keys = [ + 'acc', 'top', 'AR@', 'auc', 'precision', 'mAP@', 'Recall@' + ] + less_keys = ['loss'] + + def __init__(self, + dataloader, + start=None, + interval=1, + by_epoch=True, + save_best='auto', + rule=None, + **eval_kwargs): + + if 'key_indicator' in eval_kwargs: + raise RuntimeError( + '"key_indicator" is deprecated, ' + 'you need to use "save_best" instead. ' + 'See https://github.com/open-mmlab/mmaction2/pull/395 ' + 'for more info') + + if not isinstance(dataloader, DataLoader): + raise TypeError(f'dataloader must be a pytorch DataLoader, ' + f'but got {type(dataloader)}') + + if interval <= 0: + raise ValueError( + f'interval must be positive, but got {interval}') + + assert isinstance(by_epoch, bool) + + if start is not None and start < 0: + warnings.warn( + f'The evaluation start epoch {start} is smaller than 0, ' + f'use 0 instead', UserWarning) + start = 0 + self.dataloader = dataloader + self.interval = interval + self.start = start + self.by_epoch = by_epoch + + assert isinstance(save_best, str) or save_best is None + self.save_best = save_best + self.eval_kwargs = eval_kwargs + self.initial_flag = True + + if self.save_best is not None: + self.best_ckpt_path = None + self._init_rule(rule, self.save_best) + + def _init_rule(self, rule, key_indicator): + """Initialize rule, key_indicator, comparison_func, and best score. + + Args: + rule (str | None): Comparison rule for best score. + key_indicator (str | None): Key indicator to determine the + comparison rule. + """ + if rule not in self.rule_map and rule is not None: + raise KeyError(f'rule must be greater, less or None, ' + f'but got {rule}.') + + if rule is None: + if key_indicator != 'auto': + if any(key in key_indicator for key in self.greater_keys): + rule = 'greater' + elif any(key in key_indicator for key in self.less_keys): + rule = 'less' + else: + raise ValueError( + f'Cannot infer the rule for key ' + f'{key_indicator}, thus a specific rule ' + f'must be specified.') + self.rule = rule + self.key_indicator = key_indicator + if self.rule is not None: + self.compare_func = self.rule_map[self.rule] + + def before_run(self, runner): + if self.save_best is not None: + if runner.meta is None: + warnings.warn('runner.meta is None. Creating a empty one.') + runner.meta = dict() + runner.meta.setdefault('hook_msgs', dict()) + + def before_train_iter(self, runner): + """Evaluate the model only at the start of training by + iteration.""" + if self.by_epoch: + return + if not self.initial_flag: + return + if self.start is not None and runner.iter >= self.start: + self.after_train_iter(runner) + self.initial_flag = False + + def before_train_epoch(self, runner): + """Evaluate the model only at the start of training by epoch.""" + if not self.by_epoch: + return + if not self.initial_flag: + return + if self.start is not None and runner.epoch >= self.start: + self.after_train_epoch(runner) + self.initial_flag = False + + def after_train_iter(self, runner): + """Called after every training iter to evaluate the results.""" + if not self.by_epoch: + self._do_evaluate(runner) + + def after_train_epoch(self, runner): + """Called after every training epoch to evaluate the results.""" + if self.by_epoch: + self._do_evaluate(runner) + + def _do_evaluate(self, runner): + """perform evaluation and save ckpt.""" + if not self.evaluation_flag(runner): + return + + from mmaction.apis import single_gpu_test + results = single_gpu_test(runner.model, self.dataloader) + key_score = self.evaluate(runner, results) if self.save_best: self._save_ckpt(runner, key_score) + def evaluation_flag(self, runner): + """Judge whether to perform_evaluation. + + Returns: + bool: The flag indicating whether to perform evaluation. + """ + if self.by_epoch: + current = runner.epoch + check_time = self.every_n_epochs + else: + current = runner.iter + check_time = self.every_n_iters + + if self.start is None: + if not check_time(runner, self.interval): + # No evaluation during the interval. + return False + elif (current + 1) < self.start: + # No evaluation if start is larger than the current time. + return False + else: + # Evaluation only at epochs/iters 3, 5, 7... + # if start==3 and interval==2 + if (current + 1 - self.start) % self.interval: + return False + return True + + def _save_ckpt(self, runner, key_score): + if self.by_epoch: + current = f'epoch_{runner.epoch + 1}' + cur_type, cur_time = 'epoch', runner.epoch + 1 + else: + current = f'iter_{runner.iter + 1}' + cur_type, cur_time = 'iter', runner.iter + 1 + + best_score = runner.meta['hook_msgs'].get( + 'best_score', self.init_value_map[self.rule]) + if self.compare_func(key_score, best_score): + best_score = key_score + runner.meta['hook_msgs']['best_score'] = best_score + + if self.best_ckpt_path and osp.isfile(self.best_ckpt_path): + os.remove(self.best_ckpt_path) + + best_ckpt_name = f'best_{self.key_indicator}_{current}.pth' + runner.save_checkpoint( + runner.work_dir, best_ckpt_name, create_symlink=False) + self.best_ckpt_path = osp.join(runner.work_dir, best_ckpt_name) + + runner.meta['hook_msgs']['best_ckpt'] = self.best_ckpt_path + runner.logger.info( + f'Now best checkpoint is saved as {best_ckpt_name}.') + runner.logger.info( + f'Best {self.key_indicator} is {best_score:0.4f} ' + f'at {cur_time} {cur_type}.') + + def evaluate(self, runner, results): + """Evaluate the results. + + Args: + runner (:obj:`mmcv.Runner`): The underlined training runner. + results (list): Output results. + """ + eval_res = self.dataloader.dataset.evaluate( + results, logger=runner.logger, **self.eval_kwargs) + for name, val in eval_res.items(): + runner.log_buffer.output[name] = val + runner.log_buffer.ready = True + if self.save_best is not None: + if self.key_indicator == 'auto': + # infer from eval_results + self._init_rule(self.rule, list(eval_res.keys())[0]) + return eval_res[self.key_indicator] + + return None + + class DistEvalHook(EvalHook): # noqa: F811 + """Distributed evaluation hook. + + This hook will regularly perform evaluation in a given interval when + performing in distributed environment. -class EpochEvalHook(EvalHook): - """Deprecated class for ``EvalHook``.""" - - def __init__(self, *args, **kwargs): - warnings.warn( - '"EpochEvalHook" is deprecated, please switch to' - '"EvalHook". See https://github.com/open-mmlab/mmaction2/pull/395 for more info' # noqa: E501 - ) - super().__init__(*args, **kwargs) - - -class DistEpochEvalHook(DistEvalHook): - """Deprecated class for ``DistEvalHook``.""" + Args: + dataloader (DataLoader): A PyTorch dataloader. + start (int | None, optional): Evaluation starting epoch. It enables + evaluation before the training starts if ``start`` <= the + resuming epoch. If None, whether to evaluate is merely decided + by ``interval``. Default: None. + interval (int): Evaluation interval. Default: 1. + by_epoch (bool): Determine perform evaluation by epoch or by + iteration. If set to True, it will perform by epoch. Otherwise, + by iteration. default: True. + save_best (str | None, optional): If a metric is specified, it + would measure the best checkpoint during evaluation. The + information about best checkpoint would be save in best.json. + Options are the evaluation metrics to the test dataset. e.g., + ``top1_acc``, ``top5_acc``, ``mean_class_accuracy``, + ``mean_average_precision``, ``mmit_mean_average_precision`` + for action recognition dataset (RawframeDataset and + VideoDataset). ``AR@AN``, ``auc`` for action localization + dataset (ActivityNetDataset). ``mAP@0.5IOU`` for + spatio-temporal action detection dataset (AVADataset). + If ``save_best`` is ``auto``, the first key of the returned + ``OrderedDict`` result will be used. Default: 'auto'. + rule (str | None, optional): Comparison rule for best score. If + set to None, it will infer a reasonable rule. Keys such as + 'acc', 'top' .etc will be inferred by 'greater' rule. Keys + contain 'loss' will be inferred by 'less' rule. Options are + 'greater', 'less', None. Default: None. + tmpdir (str | None): Temporary directory to save the results of all + processes. Default: None. + gpu_collect (bool): Whether to use gpu or cpu to collect results. + Default: False. + broadcast_bn_buffer (bool): Whether to broadcast the + buffer(running_mean and running_var) of rank 0 to other rank + before evaluation. Default: True. + **eval_kwargs: Evaluation arguments fed into the evaluate function + of the dataset. + """ - def __init__(self, *args, **kwargs): - warnings.warn( - '"DistEpochEvalHook" is deprecated, please switch to' - '"DistEvalHook". See https://github.com/open-mmlab/mmaction2/pull/395 for more info' # noqa: E501 - ) - super().__init__(*args, **kwargs) + def __init__(self, + dataloader, + start=None, + interval=1, + by_epoch=True, + save_best='auto', + rule=None, + broadcast_bn_buffer=True, + tmpdir=None, + gpu_collect=False, + **eval_kwargs): + super().__init__( + dataloader, + start=start, + interval=interval, + by_epoch=by_epoch, + save_best=save_best, + rule=rule, + **eval_kwargs) + self.broadcast_bn_buffer = broadcast_bn_buffer + self.tmpdir = tmpdir + self.gpu_collect = gpu_collect + + def _do_evaluate(self, runner): + """perform evaluation and save ckpt.""" + # Synchronization of BatchNorm's buffer (running_mean + # and running_var) is not supported in the DDP of pytorch, + # which may cause the inconsistent performance of models in + # different ranks, so we broadcast BatchNorm's buffers + # of rank 0 to other ranks to avoid this. + if self.broadcast_bn_buffer: + model = runner.model + for name, module in model.named_modules(): + if isinstance(module, + _BatchNorm) and module.track_running_stats: + dist.broadcast(module.running_var, 0) + dist.broadcast(module.running_mean, 0) + + if not self.evaluation_flag(runner): + return + + from mmaction.apis import multi_gpu_test + tmpdir = self.tmpdir + if tmpdir is None: + tmpdir = osp.join(runner.work_dir, '.eval_hook') + + results = multi_gpu_test( + runner.model, + self.dataloader, + tmpdir=tmpdir, + gpu_collect=self.gpu_collect) + if runner.rank == 0: + print('\n') + key_score = self.evaluate(runner, results) + + if self.save_best: + self._save_ckpt(runner, key_score) diff --git a/tests/test_runtime/test_apis_test.py b/tests/test_runtime/test_apis_test.py index 29bb376b28..d3e5dcc947 100644 --- a/tests/test_runtime/test_apis_test.py +++ b/tests/test_runtime/test_apis_test.py @@ -1,4 +1,5 @@ import sys +import warnings from unittest.mock import MagicMock, Mock, patch import pytest @@ -6,8 +7,19 @@ import torch.nn as nn from torch.utils.data import DataLoader, Dataset -from mmaction.apis.test import (collect_results_cpu, multi_gpu_test, - single_gpu_test) +# TODO import test functions from mmcv and delete them from mmaction2 +try: + from mmcv.engine import (collect_results_cpu, multi_gpu_test, + single_gpu_test) + pytest.skip( + 'Test functions are supported in MMCV', allow_module_level=True) +except (ImportError, ModuleNotFoundError): + warnings.warn( + 'DeprecationWarning: single_gpu_test, multi_gpu_test, ' + 'collect_results_cpu, collect_results_gpu from mmaction2 will be ' + 'deprecated. Please install mmcv through master branch.') + from mmaction.apis.test import (collect_results_cpu, multi_gpu_test, + single_gpu_test) class OldStyleModel(nn.Module): diff --git a/tests/test_runtime/test_eval_hook.py b/tests/test_runtime/test_eval_hook.py index 7d79fe51a8..4af9b3c498 100644 --- a/tests/test_runtime/test_eval_hook.py +++ b/tests/test_runtime/test_eval_hook.py @@ -1,6 +1,7 @@ import os.path as osp import tempfile import unittest.mock as mock +import warnings from collections import OrderedDict from unittest.mock import MagicMock, patch @@ -11,7 +12,17 @@ from mmcv.utils import get_logger from torch.utils.data import DataLoader, Dataset -from mmaction.core import DistEvalHook, EvalHook +# TODO import eval hooks from mmcv and delete them from mmaction2 +try: + from mmcv.runner import EvalHook, DistEvalHook + pytest.skip( + 'EvalHook and DistEvalHook are supported in MMCV', + allow_module_level=True) +except ImportError: + warnings.warn('DeprecationWarning: EvalHook and DistEvalHook from ' + 'mmaction2 will be deprecated. Please install mmcv through ' + 'master branch.') + from mmaction.core import DistEvalHook, EvalHook class ExampleDataset(Dataset): diff --git a/tools/test.py b/tools/test.py index ab037abaa8..cafed2b3b5 100644 --- a/tools/test.py +++ b/tools/test.py @@ -12,11 +12,20 @@ from mmcv.runner import get_dist_info, init_dist, load_checkpoint from mmcv.runner.fp16_utils import wrap_fp16_model -from mmaction.apis import multi_gpu_test, single_gpu_test from mmaction.datasets import build_dataloader, build_dataset from mmaction.models import build_model from mmaction.utils import register_module_hooks +# TODO import test functions from mmcv and delete them from mmaction2 +try: + from mmcv.engine import multi_gpu_test, single_gpu_test +except (ImportError, ModuleNotFoundError): + warnings.warn( + 'DeprecationWarning: single_gpu_test, multi_gpu_test, ' + 'collect_results_cpu, collect_results_gpu from mmaction2 will be ' + 'deprecated. Please install mmcv through master branch.') + from mmaction.apis import multi_gpu_test, single_gpu_test + def parse_args(): parser = argparse.ArgumentParser( From 8ae1db9cd0a265e8fb343c6baaefb8ca0252cf05 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 20:19:00 +0800 Subject: [PATCH 072/414] fix bug --- mmaction/datasets/pipelines/pose_loading.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 9c1e18c616..afe847df86 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -307,12 +307,14 @@ def __call__(self, results): total_frames = results['total_frames'] - frame_inds = list(results.pop('frame_inds')) + frame_inds = results.pop('frame_inds') if anno_inds is not None: kps = kps[anno_inds] frame_inds = frame_inds[anno_inds] + frame_inds = list(frame_inds) + def mapinds(inds): uni = np.unique(inds) mapp = {x: i for i, x in enumerate(uni)} From 2f88e0472db5a77e373bc32110f6ed70120bc888 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 20:29:55 +0800 Subject: [PATCH 073/414] update unittest --- .../test_loadings/test_pose_loading.py | 115 +++++++++++++++++- 1 file changed, 114 insertions(+), 1 deletion(-) diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 8d27762369..fc32fc2797 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -1,7 +1,14 @@ +import copy as cp # isort: skip +from collections import defaultdict # isort: skip + import numpy as np # isort: skip +from mmcv import dump # isort: skip +from numpy.testing import assert_array_almost_equal # isort: skip from numpy.testing import assert_array_equal # isort: skip -from mmaction.datasets.pipelines.pose_loading import UniformSampleFrames +from mmaction.datasets.pipelines import LoadKineticsPose # isort: skip +from mmaction.datasets.pipelines import PoseDecode # isort: skip +from mmaction.datasets.pipelines import UniformSampleFrames # isort: skip class TestPoseLoading: @@ -62,3 +69,109 @@ def test_uniform_sample_frames(self): assert sampling_results['frame_interval'] is None assert sampling_results['num_clips'] == 1 assert len(sampling_results['frame_inds']) == 8 + + def test_pose_decode(): + kp = np.random.random([1, 16, 17, 2]) + kpscore = np.random.random([1, 16, 17]) + frame_inds = np.array([2, 4, 6, 8, 10]) + results = dict(kp=kp, kpscore=kpscore, frame_inds=frame_inds) + pose_decode = PoseDecode() + assert str(pose_decode) == ('PoseDecode(random_drop=False, ' + 'random_seed=1, ' + 'drop_prob=0.0625, ' + 'manipulate_joints=(7, 8, 9, 10, ' + '13, 14, 15, 16))') + decode_results = pose_decode(results) + assert_array_almost_equal(decode_results['kp'], kp[:, frame_inds]) + assert_array_almost_equal(decode_results['kpscore'], + kpscore[:, frame_inds]) + + results = dict(kp=kp, kpscore=kpscore, total_frames=16) + pose_decode = PoseDecode() + decode_results = pose_decode(results) + assert_array_almost_equal(decode_results['kp'], kp) + assert_array_almost_equal(decode_results['kpscore'], kpscore) + + results = dict(kp=kp, kpscore=kpscore, frame_inds=frame_inds) + pose_decode = PoseDecode( + random_drop=True, drop_prob=1, manipulate_joints=(7, )) + decode_results = pose_decode(results) + assert_array_almost_equal(decode_results['kpscore'][..., 7], 0) + + def test_load_kinetics_pose(): + + def get_mode(arr): + cnt = defaultdict(lambda: 0) + for num in arr: + cnt[num] += 1 + max_val = max(cnt.values()) + return [k for k in cnt if cnt[k] == max_val], max_val + + filename = '/tmp/tmp.pkl' + total_frames = 100 + img_shape = (224, 224) + frame_inds = np.random.choice(range(100), size=120) + frame_inds.sort() + anno_flag = np.random.random(120) > 0.1 + anno_inds = np.array([i for i, f in enumerate(anno_flag) if f]) + kp = np.random.random([120, 17, 3]) + dump(kp, filename) + results = dict( + filename=filename, + total_frames=total_frames, + img_shape=img_shape, + frame_inds=frame_inds) + + inp = cp.deepcopy(results) + load_kinetics_pose = LoadKineticsPose( + squeeze=True, max_person=100, source='openpose') + return_results = load_kinetics_pose(inp) + assert return_results['kp'].shape[:-1] == \ + return_results['kpscore'].shape + + num_person = return_results['kp'].shape[0] + num_frame = return_results['kp'].shape[1] + assert num_person == get_mode(frame_inds)[1] + assert np.max(return_results['kp']) > 1 + assert num_frame == len(set(frame_inds)) + + inp = cp.deepcopy(results) + load_kinetics_pose = LoadKineticsPose( + squeeze=False, max_person=100, source='openpose') + return_results = load_kinetics_pose(inp) + assert return_results['kp'].shape[:-1] == \ + return_results['kpscore'].shape + + num_person = return_results['kp'].shape[0] + num_frame = return_results['kp'].shape[1] + assert num_person == get_mode(frame_inds)[1] + assert np.max(return_results['kp']) > 1 + assert num_frame == total_frames + + inp = cp.deepcopy(results) + inp['anno_inds'] = anno_inds + load_kinetics_pose = LoadKineticsPose( + squeeze=True, max_person=100, source='mmpose') + return_results = load_kinetics_pose(inp) + assert return_results['kp'].shape[:-1] == \ + return_results['kpscore'].shape + + num_person = return_results['kp'].shape[0] + num_frame = return_results['kp'].shape[1] + assert num_person == get_mode(frame_inds[anno_inds])[1] + assert np.max(return_results['kp']) <= 1 + assert num_frame == len(set(frame_inds[anno_inds])) + + inp = cp.deepcopy(results) + inp['anno_inds'] = anno_inds + load_kinetics_pose = LoadKineticsPose( + squeeze=True, max_person=2, source='mmpose') + return_results = load_kinetics_pose(inp) + assert return_results['kp'].shape[:-1] == \ + return_results['kpscore'].shape + + num_person = return_results['kp'].shape[0] + num_frame = return_results['kp'].shape[1] + assert num_person <= 2 + assert np.max(return_results['kp']) <= 1 + assert num_frame == len(set(frame_inds[anno_inds])) From da013a5407556e343fbb949d9960af8c5fd5248a Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 20:30:33 +0800 Subject: [PATCH 074/414] remote isort skip --- .../test_loadings/test_pose_loading.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index fc32fc2797..20be42d051 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -1,14 +1,12 @@ -import copy as cp # isort: skip -from collections import defaultdict # isort: skip +import copy as cp +from collections import defaultdict -import numpy as np # isort: skip -from mmcv import dump # isort: skip -from numpy.testing import assert_array_almost_equal # isort: skip -from numpy.testing import assert_array_equal # isort: skip +import numpy as np +from mmcv import dump +from numpy.testing import assert_array_almost_equal, assert_array_equal -from mmaction.datasets.pipelines import LoadKineticsPose # isort: skip -from mmaction.datasets.pipelines import PoseDecode # isort: skip -from mmaction.datasets.pipelines import UniformSampleFrames # isort: skip +from mmaction.datasets.pipelines import (LoadKineticsPose, PoseDecode, + UniformSampleFrames) class TestPoseLoading: From dc39f1fc667ceaa4a62de88a1b5f01677d5e087f Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 21:06:29 +0800 Subject: [PATCH 075/414] update unittest --- .../test_pipelines/test_loadings/test_pose_loading.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 20be42d051..9c5df56456 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -68,7 +68,7 @@ def test_uniform_sample_frames(self): assert sampling_results['num_clips'] == 1 assert len(sampling_results['frame_inds']) == 8 - def test_pose_decode(): + def test_pose_decode(self): kp = np.random.random([1, 16, 17, 2]) kpscore = np.random.random([1, 16, 17]) frame_inds = np.array([2, 4, 6, 8, 10]) @@ -96,7 +96,7 @@ def test_pose_decode(): decode_results = pose_decode(results) assert_array_almost_equal(decode_results['kpscore'][..., 7], 0) - def test_load_kinetics_pose(): + def test_load_kinetics_pose(self): def get_mode(arr): cnt = defaultdict(lambda: 0) From 56515c493a12fe2cc8190498e5e3ad5b321f1975 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 30 Apr 2021 21:41:20 +0800 Subject: [PATCH 076/414] [Refactor] Use MMCV Model Registry (#843) * use registry * use registry * use registry * fix circle import * fix --- docs/getting_started.md | 2 +- docs/tutorials/3_new_dataset.md | 2 +- docs/tutorials/5_new_modules.md | 6 +- docs/tutorials/7_customize_runtime.md | 8 +-- docs_zh_CN/getting_started.md | 2 +- docs_zh_CN/tutorials/3_new_dataset.md | 2 +- docs_zh_CN/tutorials/5_new_modules.md | 6 +- docs_zh_CN/tutorials/7_customize_runtime.md | 8 +-- mmaction/datasets/__init__.py | 6 +- mmaction/datasets/activitynet_dataset.py | 2 +- mmaction/datasets/audio_dataset.py | 2 +- mmaction/datasets/audio_feature_dataset.py | 2 +- mmaction/datasets/audio_visual_dataset.py | 2 +- mmaction/datasets/ava_dataset.py | 2 +- mmaction/datasets/blending_utils.py | 2 +- mmaction/datasets/builder.py | 9 ++- mmaction/datasets/dataset_wrappers.py | 2 +- mmaction/datasets/hvu_dataset.py | 2 +- mmaction/datasets/image_dataset.py | 2 +- mmaction/datasets/pipelines/augmentations.py | 2 +- mmaction/datasets/pipelines/compose.py | 2 +- mmaction/datasets/pipelines/formating.py | 2 +- mmaction/datasets/pipelines/loading.py | 2 +- mmaction/datasets/rawframe_dataset.py | 2 +- mmaction/datasets/rawvideo_dataset.py | 2 +- mmaction/datasets/registry.py | 5 -- mmaction/datasets/ssn_dataset.py | 2 +- mmaction/datasets/video_dataset.py | 2 +- mmaction/models/__init__.py | 6 +- mmaction/models/backbones/c3d.py | 2 +- mmaction/models/backbones/mobilenet_v2_tsm.py | 2 +- mmaction/models/backbones/resnet.py | 2 +- mmaction/models/backbones/resnet2plus1d.py | 2 +- mmaction/models/backbones/resnet3d.py | 2 +- mmaction/models/backbones/resnet3d_csn.py | 2 +- .../models/backbones/resnet3d_slowfast.py | 2 +- .../models/backbones/resnet3d_slowonly.py | 2 +- mmaction/models/backbones/resnet_audio.py | 4 +- mmaction/models/backbones/resnet_tin.py | 2 +- mmaction/models/backbones/resnet_tsm.py | 2 +- mmaction/models/backbones/tanet.py | 2 +- mmaction/models/backbones/x3d.py | 2 +- mmaction/models/builder.py | 62 +++++++------------ mmaction/models/heads/audio_tsn_head.py | 2 +- mmaction/models/heads/i3d_head.py | 2 +- mmaction/models/heads/slowfast_head.py | 2 +- mmaction/models/heads/ssn_head.py | 2 +- mmaction/models/heads/tpn_head.py | 2 +- mmaction/models/heads/trn_head.py | 2 +- mmaction/models/heads/tsm_head.py | 2 +- mmaction/models/heads/tsn_head.py | 2 +- mmaction/models/heads/x3d_head.py | 2 +- mmaction/models/localizers/bmn.py | 3 +- mmaction/models/localizers/bsn.py | 3 +- mmaction/models/localizers/ssn.py | 2 +- .../losses/binary_logistic_regression_loss.py | 2 +- mmaction/models/losses/bmn_loss.py | 2 +- mmaction/models/losses/cross_entropy_loss.py | 2 +- mmaction/models/losses/hvu_loss.py | 2 +- mmaction/models/losses/nll_loss.py | 2 +- mmaction/models/losses/ssn_loss.py | 2 +- mmaction/models/necks/tpn.py | 3 +- .../models/recognizers/audio_recognizer.py | 2 +- mmaction/models/recognizers/base.py | 2 +- mmaction/models/recognizers/recognizer2d.py | 2 +- mmaction/models/recognizers/recognizer3d.py | 2 +- mmaction/models/registry.py | 8 --- tests/test_runtime/test_train.py | 2 +- 68 files changed, 106 insertions(+), 139 deletions(-) delete mode 100644 mmaction/datasets/registry.py delete mode 100644 mmaction/models/registry.py diff --git a/docs/getting_started.md b/docs/getting_started.md index 1c7f204c95..a31934aec7 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -248,7 +248,7 @@ in [TSM: Temporal Shift Module for Efficient Video Understanding](https://arxiv. 1. create a new file in `mmaction/models/backbones/resnet_tsm.py`. ```python - from ..registry import BACKBONES + from ..builder import BACKBONES from .resnet import ResNet @BACKBONES.register_module() diff --git a/docs/tutorials/3_new_dataset.md b/docs/tutorials/3_new_dataset.md index 8c7d08c913..1b1d18d37e 100644 --- a/docs/tutorials/3_new_dataset.md +++ b/docs/tutorials/3_new_dataset.md @@ -170,7 +170,7 @@ import os.path as osp import mmcv from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/docs/tutorials/5_new_modules.md b/docs/tutorials/5_new_modules.md index 0652c3f7af..940205b596 100644 --- a/docs/tutorials/5_new_modules.md +++ b/docs/tutorials/5_new_modules.md @@ -23,7 +23,7 @@ Assume you want to add an optimizer named as `MyOptimizer`, which has arguments You need to first implement the new optimizer in a file, e.g., in `mmaction/core/optimizer/my_optimizer.py`: ```python -from .registry import OPTIMIZERS +from mmcv.runner import OPTIMIZERS from torch.optim import Optimizer @OPTIMIZERS.register_module() @@ -118,7 +118,7 @@ Here we show how to develop new components with an example of TSN. ```python import torch.nn as nn - from ..registry import BACKBONES + from ..builder import BACKBONES @BACKBONES.register_module() class ResNet(nn.Module): @@ -161,7 +161,7 @@ Here we show how to develop a new head with the example of TSNHead as the follow and overwrite `init_weights(self)` and `forward(self, x)` method. ```python - from ..registry import HEADS + from ..builder import HEADS from .base import BaseHead diff --git a/docs/tutorials/7_customize_runtime.md b/docs/tutorials/7_customize_runtime.md index 50c77542ff..dffb2e9668 100644 --- a/docs/tutorials/7_customize_runtime.md +++ b/docs/tutorials/7_customize_runtime.md @@ -59,7 +59,7 @@ You need to create a new directory named `mmaction/core/optimizer`. And then implement the new optimizer in a file, e.g., in `mmaction/core/optimizer/my_optimizer.py`: ```python -from .registry import OPTIMIZERS +from mmcv.runner import OPTIMIZERS from torch.optim import Optimizer @@ -113,11 +113,7 @@ Some models may have some parameter-specific settings for optimization, e.g. wei The users can do those fine-grained parameter tuning through customizing optimizer constructor. ```python -from mmcv.utils import build_from_cfg - -from mmcv.runner.optimizer import OPTIMIZER_BUILDERS, OPTIMIZERS -from mmaction.utils import get_root_logger -from .my_optimizer import MyOptimizer +from mmcv.runner.optimizer import OPTIMIZER_BUILDERS @OPTIMIZER_BUILDERS.register_module() diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index 7e3de13fc3..b0e7dde2fa 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -242,7 +242,7 @@ MMAction2 将模型组件分为 4 种基础模型: 1. 创建 `mmaction/models/backbones/resnet_tsm.py` 文件 ```python - from ..registry import BACKBONES + from ..builder import BACKBONES from .resnet import ResNet @BACKBONES.register_module() diff --git a/docs_zh_CN/tutorials/3_new_dataset.md b/docs_zh_CN/tutorials/3_new_dataset.md index b6d2faf9e6..19402cb41e 100644 --- a/docs_zh_CN/tutorials/3_new_dataset.md +++ b/docs_zh_CN/tutorials/3_new_dataset.md @@ -163,7 +163,7 @@ import os.path as osp import mmcv from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/docs_zh_CN/tutorials/5_new_modules.md b/docs_zh_CN/tutorials/5_new_modules.md index f6f23d1fe7..ead61332bb 100644 --- a/docs_zh_CN/tutorials/5_new_modules.md +++ b/docs_zh_CN/tutorials/5_new_modules.md @@ -23,7 +23,7 @@ 用户需要首先实现一个新的优化器文件,如 `mmaction/core/optimizer/my_optimizer.py`: ```python -from .registry import OPTIMIZERS +from mmcv.runner import OPTIMIZERS from torch.optim import Optimizer @OPTIMIZERS.register_module() @@ -105,7 +105,7 @@ MMAction2 将模型组件分为 4 种基础模型: ```python import torch.nn as nn - from ..registry import BACKBONES + from ..builder import BACKBONES @BACKBONES.register_module() class ResNet(nn.Module): @@ -148,7 +148,7 @@ MMAction2 将模型组件分为 4 种基础模型: 并重写 `init_weights(self)` 和 `forward(self, x)` 方法 ```python - from ..registry import HEADS + from ..builder import HEADS from .base import BaseHead diff --git a/docs_zh_CN/tutorials/7_customize_runtime.md b/docs_zh_CN/tutorials/7_customize_runtime.md index d0e80a0ff0..bbf451a0db 100644 --- a/docs_zh_CN/tutorials/7_customize_runtime.md +++ b/docs_zh_CN/tutorials/7_customize_runtime.md @@ -58,7 +58,7 @@ optimizer = dict(type='Adam', lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_de 可以创建一个名为 `mmaction/core/optimizer` 的文件夹,并在目录下的文件进行构建,如 `mmaction/core/optimizer/my_optimizer.py`: ```python -from .registry import OPTIMIZERS +from mmcv.runner import OPTIMIZERS from torch.optim import Optimizer @@ -111,11 +111,7 @@ optimizer = dict(type='MyOptimizer', a=a_value, b=b_value, c=c_value) 用户可以通过自定义优化器构造函数来进行那些细粒度的参数调整。 ```python -from mmcv.utils import build_from_cfg - -from mmcv.runner.optimizer import OPTIMIZER_BUILDERS, OPTIMIZERS -from mmaction.utils import get_root_logger -from .my_optimizer import MyOptimizer +from mmcv.runner.optimizer import OPTIMIZER_BUILDERS @OPTIMIZER_BUILDERS.register_module() diff --git a/mmaction/datasets/__init__.py b/mmaction/datasets/__init__.py index 48341c9383..5575422b67 100644 --- a/mmaction/datasets/__init__.py +++ b/mmaction/datasets/__init__.py @@ -6,7 +6,8 @@ from .base import BaseDataset from .blending_utils import (BaseMiniBatchBlending, CutmixBlending, MixupBlending) -from .builder import build_dataloader, build_dataset +from .builder import (BLENDINGS, DATASETS, PIPELINES, build_dataloader, + build_dataset) from .dataset_wrappers import RepeatDataset from .hvu_dataset import HVUDataset from .image_dataset import ImageDataset @@ -20,5 +21,6 @@ 'RawframeDataset', 'BaseDataset', 'ActivityNetDataset', 'SSNDataset', 'HVUDataset', 'AudioDataset', 'AudioFeatureDataset', 'ImageDataset', 'RawVideoDataset', 'AVADataset', 'AudioVisualDataset', - 'BaseMiniBatchBlending', 'CutmixBlending', 'MixupBlending' + 'BaseMiniBatchBlending', 'CutmixBlending', 'MixupBlending', 'DATASETS', + 'PIPELINES', 'BLENDINGS' ] diff --git a/mmaction/datasets/activitynet_dataset.py b/mmaction/datasets/activitynet_dataset.py index 018fd7b889..db04a8cfb2 100644 --- a/mmaction/datasets/activitynet_dataset.py +++ b/mmaction/datasets/activitynet_dataset.py @@ -9,7 +9,7 @@ from ..core import average_recall_at_avg_proposals from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/mmaction/datasets/audio_dataset.py b/mmaction/datasets/audio_dataset.py index 4443402a71..041695e9b2 100644 --- a/mmaction/datasets/audio_dataset.py +++ b/mmaction/datasets/audio_dataset.py @@ -3,7 +3,7 @@ import torch from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/mmaction/datasets/audio_feature_dataset.py b/mmaction/datasets/audio_feature_dataset.py index 15daa1182c..89e7c06e3d 100644 --- a/mmaction/datasets/audio_feature_dataset.py +++ b/mmaction/datasets/audio_feature_dataset.py @@ -3,7 +3,7 @@ import torch from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/mmaction/datasets/audio_visual_dataset.py b/mmaction/datasets/audio_visual_dataset.py index 4a1b97f946..6e10b4b040 100644 --- a/mmaction/datasets/audio_visual_dataset.py +++ b/mmaction/datasets/audio_visual_dataset.py @@ -1,7 +1,7 @@ import os.path as osp +from .builder import DATASETS from .rawframe_dataset import RawframeDataset -from .registry import DATASETS @DATASETS.register_module() diff --git a/mmaction/datasets/ava_dataset.py b/mmaction/datasets/ava_dataset.py index fb017c0358..111a5e146f 100644 --- a/mmaction/datasets/ava_dataset.py +++ b/mmaction/datasets/ava_dataset.py @@ -11,7 +11,7 @@ from ..core.evaluation.ava_utils import ava_eval, read_labelmap, results2csv from ..utils import get_root_logger from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/mmaction/datasets/blending_utils.py b/mmaction/datasets/blending_utils.py index 88eb541f61..8ef35b0e73 100644 --- a/mmaction/datasets/blending_utils.py +++ b/mmaction/datasets/blending_utils.py @@ -4,7 +4,7 @@ import torch.nn.functional as F from torch.distributions.beta import Beta -from .registry import BLENDINGS +from .builder import BLENDINGS __all__ = ['BaseMiniBatchBlending', 'MixupBlending', 'CutmixBlending'] diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index 33056bde7f..267c323b0f 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -5,11 +5,9 @@ import numpy as np from mmcv.parallel import collate from mmcv.runner import get_dist_info -from mmcv.utils import build_from_cfg +from mmcv.utils import Registry, build_from_cfg from torch.utils.data import DataLoader -from .dataset_wrappers import RepeatDataset -from .registry import DATASETS from .samplers import ClassSpecificDistributedSampler, DistributedSampler if platform.system() != 'Windows': @@ -20,6 +18,10 @@ soft_limit = min(4096, hard_limit) resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit)) +DATASETS = Registry('dataset') +PIPELINES = Registry('pipeline') +BLENDINGS = Registry('blending') + def build_dataset(cfg, default_args=None): """Build a dataset from config dict. @@ -33,6 +35,7 @@ def build_dataset(cfg, default_args=None): Dataset: The constructed dataset. """ if cfg['type'] == 'RepeatDataset': + from .dataset_wrappers import RepeatDataset dataset = RepeatDataset( build_dataset(cfg['dataset'], default_args), cfg['times']) else: diff --git a/mmaction/datasets/dataset_wrappers.py b/mmaction/datasets/dataset_wrappers.py index 3ec8af0941..ecb7609b24 100644 --- a/mmaction/datasets/dataset_wrappers.py +++ b/mmaction/datasets/dataset_wrappers.py @@ -1,4 +1,4 @@ -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/mmaction/datasets/hvu_dataset.py b/mmaction/datasets/hvu_dataset.py index 12beeb64aa..59a4c855a8 100644 --- a/mmaction/datasets/hvu_dataset.py +++ b/mmaction/datasets/hvu_dataset.py @@ -8,7 +8,7 @@ from ..core import mean_average_precision from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/mmaction/datasets/image_dataset.py b/mmaction/datasets/image_dataset.py index 59f859b83f..a359277264 100644 --- a/mmaction/datasets/image_dataset.py +++ b/mmaction/datasets/image_dataset.py @@ -1,4 +1,4 @@ -from .registry import DATASETS +from .builder import DATASETS from .video_dataset import VideoDataset diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index ad4d44c85a..32a8f5c75c 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -6,7 +6,7 @@ import numpy as np from torch.nn.modules.utils import _pair -from ..registry import PIPELINES +from ..builder import PIPELINES def _init_lazy_if_proper(results, lazy): diff --git a/mmaction/datasets/pipelines/compose.py b/mmaction/datasets/pipelines/compose.py index f6f557a8f3..c4d315aba5 100644 --- a/mmaction/datasets/pipelines/compose.py +++ b/mmaction/datasets/pipelines/compose.py @@ -2,7 +2,7 @@ from mmcv.utils import build_from_cfg -from ..registry import PIPELINES +from ..builder import PIPELINES @PIPELINES.register_module() diff --git a/mmaction/datasets/pipelines/formating.py b/mmaction/datasets/pipelines/formating.py index df784796dc..045d54fbd0 100644 --- a/mmaction/datasets/pipelines/formating.py +++ b/mmaction/datasets/pipelines/formating.py @@ -5,7 +5,7 @@ import torch from mmcv.parallel import DataContainer as DC -from ..registry import PIPELINES +from ..builder import PIPELINES def to_tensor(data): diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index 2fb6bf3365..102b221db8 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -11,7 +11,7 @@ from torch.nn.modules.utils import _pair from ...utils import get_random_string, get_shm_dir, get_thread_id -from ..registry import PIPELINES +from ..builder import PIPELINES @PIPELINES.register_module() diff --git a/mmaction/datasets/rawframe_dataset.py b/mmaction/datasets/rawframe_dataset.py index 6a0883e18e..5bcf678cfe 100644 --- a/mmaction/datasets/rawframe_dataset.py +++ b/mmaction/datasets/rawframe_dataset.py @@ -4,7 +4,7 @@ import torch from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/mmaction/datasets/rawvideo_dataset.py b/mmaction/datasets/rawvideo_dataset.py index 5ba5e612eb..ada7b4aa66 100644 --- a/mmaction/datasets/rawvideo_dataset.py +++ b/mmaction/datasets/rawvideo_dataset.py @@ -5,7 +5,7 @@ import mmcv from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/mmaction/datasets/registry.py b/mmaction/datasets/registry.py deleted file mode 100644 index ade60f11b3..0000000000 --- a/mmaction/datasets/registry.py +++ /dev/null @@ -1,5 +0,0 @@ -from mmcv.utils import Registry - -DATASETS = Registry('dataset') -PIPELINES = Registry('pipeline') -BLENDINGS = Registry('blending') diff --git a/mmaction/datasets/ssn_dataset.py b/mmaction/datasets/ssn_dataset.py index 374926d42a..76d24324df 100644 --- a/mmaction/datasets/ssn_dataset.py +++ b/mmaction/datasets/ssn_dataset.py @@ -12,7 +12,7 @@ perform_regression, temporal_iou, temporal_nms) from ..utils import get_root_logger from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS class SSNInstance: diff --git a/mmaction/datasets/video_dataset.py b/mmaction/datasets/video_dataset.py index 08b90862be..7c1b681b99 100644 --- a/mmaction/datasets/video_dataset.py +++ b/mmaction/datasets/video_dataset.py @@ -1,7 +1,7 @@ import os.path as osp from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() diff --git a/mmaction/models/__init__.py b/mmaction/models/__init__.py index f73a26d83a..ac7b4ce27e 100644 --- a/mmaction/models/__init__.py +++ b/mmaction/models/__init__.py @@ -2,7 +2,8 @@ ResNet2Plus1d, ResNet3d, ResNet3dCSN, ResNet3dLayer, ResNet3dSlowFast, ResNet3dSlowOnly, ResNetAudio, ResNetTIN, ResNetTSM, TANet) -from .builder import (DETECTORS, build_backbone, build_detector, build_head, +from .builder import (BACKBONES, DETECTORS, HEADS, LOCALIZERS, LOSSES, NECKS, + RECOGNIZERS, build_backbone, build_detector, build_head, build_localizer, build_loss, build_model, build_neck, build_recognizer) from .common import LFB, TAM, Conv2plus1d, ConvAudio @@ -16,7 +17,6 @@ from .necks import TPN from .recognizers import (AudioRecognizer, BaseRecognizer, recognizer2d, recognizer3d) -from .registry import BACKBONES, HEADS, LOCALIZERS, LOSSES, RECOGNIZERS from .roi_extractors import SingleRoIExtractor3D __all__ = [ @@ -32,5 +32,5 @@ 'AudioTSNHead', 'X3D', 'X3DHead', 'ResNet3dLayer', 'DETECTORS', 'SingleRoIExtractor3D', 'BBoxHeadAVA', 'ResNetAudio', 'build_detector', 'ConvAudio', 'AVARoIHead', 'MobileNetV2', 'MobileNetV2TSM', 'TANet', 'LFB', - 'FBOHead', 'LFBInferHead', 'TRNHead' + 'FBOHead', 'LFBInferHead', 'TRNHead', 'NECKS' ] diff --git a/mmaction/models/backbones/c3d.py b/mmaction/models/backbones/c3d.py index 847ff576d9..cfb203988a 100644 --- a/mmaction/models/backbones/c3d.py +++ b/mmaction/models/backbones/c3d.py @@ -4,7 +4,7 @@ from mmcv.utils import _BatchNorm from ...utils import get_root_logger -from ..registry import BACKBONES +from ..builder import BACKBONES @BACKBONES.register_module() diff --git a/mmaction/models/backbones/mobilenet_v2_tsm.py b/mmaction/models/backbones/mobilenet_v2_tsm.py index dd37bdbb7c..af9f9d5e18 100644 --- a/mmaction/models/backbones/mobilenet_v2_tsm.py +++ b/mmaction/models/backbones/mobilenet_v2_tsm.py @@ -1,4 +1,4 @@ -from ..registry import BACKBONES +from ..builder import BACKBONES from .mobilenet_v2 import InvertedResidual, MobileNetV2 from .resnet_tsm import TemporalShift diff --git a/mmaction/models/backbones/resnet.py b/mmaction/models/backbones/resnet.py index abb9aff303..5004e0f4c1 100644 --- a/mmaction/models/backbones/resnet.py +++ b/mmaction/models/backbones/resnet.py @@ -5,7 +5,7 @@ from torch.utils import checkpoint as cp from ...utils import get_root_logger -from ..registry import BACKBONES +from ..builder import BACKBONES class BasicBlock(nn.Module): diff --git a/mmaction/models/backbones/resnet2plus1d.py b/mmaction/models/backbones/resnet2plus1d.py index ec408028eb..4329ba404c 100644 --- a/mmaction/models/backbones/resnet2plus1d.py +++ b/mmaction/models/backbones/resnet2plus1d.py @@ -1,4 +1,4 @@ -from ..registry import BACKBONES +from ..builder import BACKBONES from .resnet3d import ResNet3d diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index f1023342aa..ab1641e079 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -7,7 +7,7 @@ from torch.nn.modules.utils import _ntuple, _triple from ...utils import get_root_logger -from ..registry import BACKBONES +from ..builder import BACKBONES try: from mmdet.models.builder import SHARED_HEADS as MMDET_SHARED_HEADS diff --git a/mmaction/models/backbones/resnet3d_csn.py b/mmaction/models/backbones/resnet3d_csn.py index 97c3e420aa..4539dec01e 100644 --- a/mmaction/models/backbones/resnet3d_csn.py +++ b/mmaction/models/backbones/resnet3d_csn.py @@ -2,7 +2,7 @@ from mmcv.cnn import ConvModule from mmcv.utils import _BatchNorm -from ..registry import BACKBONES +from ..builder import BACKBONES from .resnet3d import Bottleneck3d, ResNet3d diff --git a/mmaction/models/backbones/resnet3d_slowfast.py b/mmaction/models/backbones/resnet3d_slowfast.py index 3db7794dd5..45e9d5a7da 100644 --- a/mmaction/models/backbones/resnet3d_slowfast.py +++ b/mmaction/models/backbones/resnet3d_slowfast.py @@ -5,7 +5,7 @@ from mmcv.utils import print_log from ...utils import get_root_logger -from ..registry import BACKBONES +from ..builder import BACKBONES from .resnet3d import ResNet3d try: diff --git a/mmaction/models/backbones/resnet3d_slowonly.py b/mmaction/models/backbones/resnet3d_slowonly.py index e50aae7834..89275809e2 100644 --- a/mmaction/models/backbones/resnet3d_slowonly.py +++ b/mmaction/models/backbones/resnet3d_slowonly.py @@ -1,4 +1,4 @@ -from ..registry import BACKBONES +from ..builder import BACKBONES from .resnet3d_slowfast import ResNet3dPathway try: diff --git a/mmaction/models/backbones/resnet_audio.py b/mmaction/models/backbones/resnet_audio.py index ea5792d874..d4fd9e1ece 100644 --- a/mmaction/models/backbones/resnet_audio.py +++ b/mmaction/models/backbones/resnet_audio.py @@ -5,8 +5,8 @@ from torch.nn.modules.batchnorm import _BatchNorm from torch.nn.modules.utils import _ntuple -from mmaction.models.registry import BACKBONES -from mmaction.utils import get_root_logger +from ...utils import get_root_logger +from ..builder import BACKBONES class Bottleneck2dAudio(nn.Module): diff --git a/mmaction/models/backbones/resnet_tin.py b/mmaction/models/backbones/resnet_tin.py index 1c650d3731..229c387cc3 100644 --- a/mmaction/models/backbones/resnet_tin.py +++ b/mmaction/models/backbones/resnet_tin.py @@ -2,7 +2,7 @@ import torch.nn as nn from mmaction.utils import import_module_error_func -from ..registry import BACKBONES +from ..builder import BACKBONES from .resnet_tsm import ResNetTSM try: diff --git a/mmaction/models/backbones/resnet_tsm.py b/mmaction/models/backbones/resnet_tsm.py index 2c4f999b5c..d1a383b33d 100644 --- a/mmaction/models/backbones/resnet_tsm.py +++ b/mmaction/models/backbones/resnet_tsm.py @@ -3,7 +3,7 @@ from mmcv.cnn import NonLocal3d from torch.nn.modules.utils import _ntuple -from ..registry import BACKBONES +from ..builder import BACKBONES from .resnet import ResNet diff --git a/mmaction/models/backbones/tanet.py b/mmaction/models/backbones/tanet.py index d66233931d..15d3487d1a 100644 --- a/mmaction/models/backbones/tanet.py +++ b/mmaction/models/backbones/tanet.py @@ -3,8 +3,8 @@ import torch.nn as nn from torch.utils import checkpoint as cp +from ..builder import BACKBONES from ..common import TAM -from ..registry import BACKBONES from .resnet import Bottleneck, ResNet diff --git a/mmaction/models/backbones/x3d.py b/mmaction/models/backbones/x3d.py index 4d6b85cff3..4d8e39b641 100644 --- a/mmaction/models/backbones/x3d.py +++ b/mmaction/models/backbones/x3d.py @@ -8,7 +8,7 @@ from mmcv.utils import _BatchNorm from ...utils import get_root_logger -from ..registry import BACKBONES +from ..builder import BACKBONES class SEModule(nn.Module): diff --git a/mmaction/models/builder.py b/mmaction/models/builder.py index 7fed5524c2..ef6792fac2 100644 --- a/mmaction/models/builder.py +++ b/mmaction/models/builder.py @@ -1,53 +1,37 @@ import warnings -import torch.nn as nn -from mmcv.utils import Registry, build_from_cfg +from mmcv.cnn import MODELS as MMCV_MODELS +from mmcv.utils import Registry from mmaction.utils import import_module_error_func -from .registry import BACKBONES, HEADS, LOCALIZERS, LOSSES, NECKS, RECOGNIZERS + +MODELS = Registry('models', parent=MMCV_MODELS) +BACKBONES = MODELS +NECKS = MODELS +HEADS = MODELS +RECOGNIZERS = MODELS +LOSSES = MODELS +LOCALIZERS = MODELS try: from mmdet.models.builder import DETECTORS, build_detector except (ImportError, ModuleNotFoundError): # Define an empty registry and building func, so that can import - DETECTORS = Registry('detector') + DETECTORS = MODELS @import_module_error_func('mmdet') def build_detector(cfg, train_cfg, test_cfg): pass -def build(cfg, registry, default_args=None): - """Build a module. - - Args: - cfg (dict, list[dict]): The config of modules, it is either a dict - or a list of configs. - registry (:obj:`Registry`): A registry the module belongs to. - default_args (dict, optional): Default arguments to build the module. - Defaults to None. - - Returns: - nn.Module: A built nn module. - """ - - if isinstance(cfg, list): - modules = [ - build_from_cfg(cfg_, registry, default_args) for cfg_ in cfg - ] - return nn.Sequential(*modules) - - return build_from_cfg(cfg, registry, default_args) - - def build_backbone(cfg): """Build backbone.""" - return build(cfg, BACKBONES) + return BACKBONES.build(cfg) def build_head(cfg): """Build head.""" - return build(cfg, HEADS) + return HEADS.build(cfg) def build_recognizer(cfg, train_cfg=None, test_cfg=None): @@ -58,22 +42,24 @@ def build_recognizer(cfg, train_cfg=None, test_cfg=None): 'please specify them in model. Details see this ' 'PR: https://github.com/open-mmlab/mmaction2/pull/629', UserWarning) - assert cfg.get('train_cfg') is None or train_cfg is None, \ - 'train_cfg specified in both outer field and model field ' - assert cfg.get('test_cfg') is None or test_cfg is None, \ - 'test_cfg specified in both outer field and model field ' - return build(cfg, RECOGNIZERS, - dict(train_cfg=train_cfg, test_cfg=test_cfg)) + assert cfg.get( + 'train_cfg' + ) is None or train_cfg is None, 'train_cfg specified in both outer field and model field' # noqa: E501 + assert cfg.get( + 'test_cfg' + ) is None or test_cfg is None, 'test_cfg specified in both outer field and model field ' # noqa: E501 + return RECOGNIZERS.build( + cfg, default_args=dict(train_cfg=train_cfg, test_cfg=test_cfg)) def build_loss(cfg): """Build loss.""" - return build(cfg, LOSSES) + return LOSSES.build(cfg) def build_localizer(cfg): """Build localizer.""" - return build(cfg, LOCALIZERS) + return LOCALIZERS.build(cfg) def build_model(cfg, train_cfg=None, test_cfg=None): @@ -102,4 +88,4 @@ def build_model(cfg, train_cfg=None, test_cfg=None): def build_neck(cfg): """Build neck.""" - return build(cfg, NECKS) + return NECKS.build(cfg) diff --git a/mmaction/models/heads/audio_tsn_head.py b/mmaction/models/heads/audio_tsn_head.py index b956e1f21b..4fc0359216 100644 --- a/mmaction/models/heads/audio_tsn_head.py +++ b/mmaction/models/heads/audio_tsn_head.py @@ -1,7 +1,7 @@ import torch.nn as nn from mmcv.cnn import normal_init -from ..registry import HEADS +from ..builder import HEADS from .base import BaseHead diff --git a/mmaction/models/heads/i3d_head.py b/mmaction/models/heads/i3d_head.py index bfe423dad4..f86b978661 100644 --- a/mmaction/models/heads/i3d_head.py +++ b/mmaction/models/heads/i3d_head.py @@ -1,7 +1,7 @@ import torch.nn as nn from mmcv.cnn import normal_init -from ..registry import HEADS +from ..builder import HEADS from .base import BaseHead diff --git a/mmaction/models/heads/slowfast_head.py b/mmaction/models/heads/slowfast_head.py index 5b195aa5fb..f8cb7d6964 100644 --- a/mmaction/models/heads/slowfast_head.py +++ b/mmaction/models/heads/slowfast_head.py @@ -2,7 +2,7 @@ import torch.nn as nn from mmcv.cnn import normal_init -from ..registry import HEADS +from ..builder import HEADS from .base import BaseHead diff --git a/mmaction/models/heads/ssn_head.py b/mmaction/models/heads/ssn_head.py index 3399d3dc1a..d51f921ce5 100644 --- a/mmaction/models/heads/ssn_head.py +++ b/mmaction/models/heads/ssn_head.py @@ -2,7 +2,7 @@ import torch.nn as nn from mmcv.cnn import normal_init -from ..registry import HEADS +from ..builder import HEADS def parse_stage_config(stage_cfg): diff --git a/mmaction/models/heads/tpn_head.py b/mmaction/models/heads/tpn_head.py index 1cd6501a9c..34d476c144 100644 --- a/mmaction/models/heads/tpn_head.py +++ b/mmaction/models/heads/tpn_head.py @@ -1,6 +1,6 @@ import torch.nn as nn -from ..registry import HEADS +from ..builder import HEADS from .tsn_head import TSNHead diff --git a/mmaction/models/heads/trn_head.py b/mmaction/models/heads/trn_head.py index f93818a4bc..dbc080f0d8 100644 --- a/mmaction/models/heads/trn_head.py +++ b/mmaction/models/heads/trn_head.py @@ -5,7 +5,7 @@ import torch.nn as nn from mmcv.cnn import normal_init -from ..registry import HEADS +from ..builder import HEADS from .base import BaseHead diff --git a/mmaction/models/heads/tsm_head.py b/mmaction/models/heads/tsm_head.py index e989be034a..3d6a5f6e00 100644 --- a/mmaction/models/heads/tsm_head.py +++ b/mmaction/models/heads/tsm_head.py @@ -2,7 +2,7 @@ import torch.nn as nn from mmcv.cnn import normal_init -from ..registry import HEADS +from ..builder import HEADS from .base import AvgConsensus, BaseHead diff --git a/mmaction/models/heads/tsn_head.py b/mmaction/models/heads/tsn_head.py index f36fb6a364..998e9b7e61 100644 --- a/mmaction/models/heads/tsn_head.py +++ b/mmaction/models/heads/tsn_head.py @@ -1,7 +1,7 @@ import torch.nn as nn from mmcv.cnn import normal_init -from ..registry import HEADS +from ..builder import HEADS from .base import AvgConsensus, BaseHead diff --git a/mmaction/models/heads/x3d_head.py b/mmaction/models/heads/x3d_head.py index 2452df027e..816c45a2b9 100644 --- a/mmaction/models/heads/x3d_head.py +++ b/mmaction/models/heads/x3d_head.py @@ -1,7 +1,7 @@ import torch.nn as nn from mmcv.cnn import normal_init -from ..registry import HEADS +from ..builder import HEADS from .base import BaseHead diff --git a/mmaction/models/localizers/bmn.py b/mmaction/models/localizers/bmn.py index f8a63c9d12..a0bbece0cd 100644 --- a/mmaction/models/localizers/bmn.py +++ b/mmaction/models/localizers/bmn.py @@ -5,8 +5,7 @@ import torch.nn as nn from ...localization import temporal_iop, temporal_iou -from ..builder import build_loss -from ..registry import LOCALIZERS +from ..builder import LOCALIZERS, build_loss from .base import BaseLocalizer from .utils import post_processing diff --git a/mmaction/models/localizers/bsn.py b/mmaction/models/localizers/bsn.py index 23f3e7230a..83843002ff 100644 --- a/mmaction/models/localizers/bsn.py +++ b/mmaction/models/localizers/bsn.py @@ -4,8 +4,7 @@ import torch.nn.functional as F from ...localization import temporal_iop -from ..builder import build_loss -from ..registry import LOCALIZERS +from ..builder import LOCALIZERS, build_loss from .base import BaseLocalizer from .utils import post_processing diff --git a/mmaction/models/localizers/ssn.py b/mmaction/models/localizers/ssn.py index 2ab9973815..1284f694dd 100644 --- a/mmaction/models/localizers/ssn.py +++ b/mmaction/models/localizers/ssn.py @@ -2,7 +2,7 @@ import torch.nn as nn from .. import builder -from ..registry import LOCALIZERS +from ..builder import LOCALIZERS from .base import BaseLocalizer diff --git a/mmaction/models/losses/binary_logistic_regression_loss.py b/mmaction/models/losses/binary_logistic_regression_loss.py index 343d1ca06e..ab23651cfa 100644 --- a/mmaction/models/losses/binary_logistic_regression_loss.py +++ b/mmaction/models/losses/binary_logistic_regression_loss.py @@ -1,7 +1,7 @@ import torch import torch.nn as nn -from ..registry import LOSSES +from ..builder import LOSSES def binary_logistic_regression_loss(reg_score, diff --git a/mmaction/models/losses/bmn_loss.py b/mmaction/models/losses/bmn_loss.py index 50e49729b0..9ba312cad9 100644 --- a/mmaction/models/losses/bmn_loss.py +++ b/mmaction/models/losses/bmn_loss.py @@ -2,7 +2,7 @@ import torch.nn as nn import torch.nn.functional as F -from ..registry import LOSSES +from ..builder import LOSSES from .binary_logistic_regression_loss import binary_logistic_regression_loss diff --git a/mmaction/models/losses/cross_entropy_loss.py b/mmaction/models/losses/cross_entropy_loss.py index bd192532a4..836b950c66 100644 --- a/mmaction/models/losses/cross_entropy_loss.py +++ b/mmaction/models/losses/cross_entropy_loss.py @@ -1,7 +1,7 @@ import torch import torch.nn.functional as F -from ..registry import LOSSES +from ..builder import LOSSES from .base import BaseWeightedLoss diff --git a/mmaction/models/losses/hvu_loss.py b/mmaction/models/losses/hvu_loss.py index 9d9b00567d..9fdbbfd45b 100644 --- a/mmaction/models/losses/hvu_loss.py +++ b/mmaction/models/losses/hvu_loss.py @@ -1,7 +1,7 @@ import torch import torch.nn.functional as F -from ..registry import LOSSES +from ..builder import LOSSES from .base import BaseWeightedLoss diff --git a/mmaction/models/losses/nll_loss.py b/mmaction/models/losses/nll_loss.py index 1c28537329..7bd57a50db 100644 --- a/mmaction/models/losses/nll_loss.py +++ b/mmaction/models/losses/nll_loss.py @@ -1,6 +1,6 @@ import torch.nn.functional as F -from ..registry import LOSSES +from ..builder import LOSSES from .base import BaseWeightedLoss diff --git a/mmaction/models/losses/ssn_loss.py b/mmaction/models/losses/ssn_loss.py index 492dc8ddaa..030ab3cd0e 100644 --- a/mmaction/models/losses/ssn_loss.py +++ b/mmaction/models/losses/ssn_loss.py @@ -2,7 +2,7 @@ import torch.nn as nn import torch.nn.functional as F -from ..registry import LOSSES +from ..builder import LOSSES from .ohem_hinge_loss import OHEMHingeLoss diff --git a/mmaction/models/necks/tpn.py b/mmaction/models/necks/tpn.py index 250357c675..7264b3c366 100644 --- a/mmaction/models/necks/tpn.py +++ b/mmaction/models/necks/tpn.py @@ -3,8 +3,7 @@ import torch.nn as nn from mmcv.cnn import ConvModule, constant_init, normal_init, xavier_init -from ..builder import build_loss -from ..registry import NECKS +from ..builder import NECKS, build_loss class Identity(nn.Module): diff --git a/mmaction/models/recognizers/audio_recognizer.py b/mmaction/models/recognizers/audio_recognizer.py index a9b431c06a..b17e44680a 100644 --- a/mmaction/models/recognizers/audio_recognizer.py +++ b/mmaction/models/recognizers/audio_recognizer.py @@ -1,4 +1,4 @@ -from ..registry import RECOGNIZERS +from ..builder import RECOGNIZERS from .base import BaseRecognizer diff --git a/mmaction/models/recognizers/base.py b/mmaction/models/recognizers/base.py index 7732ed0766..281aa547e1 100644 --- a/mmaction/models/recognizers/base.py +++ b/mmaction/models/recognizers/base.py @@ -84,7 +84,7 @@ def __init__(self, self.blending = None if train_cfg is not None and 'blending' in train_cfg: from mmcv.utils import build_from_cfg - from ...datasets.registry import BLENDINGS + from mmaction.datasets.builder import BLENDINGS self.blending = build_from_cfg(train_cfg['blending'], BLENDINGS) self.init_weights() diff --git a/mmaction/models/recognizers/recognizer2d.py b/mmaction/models/recognizers/recognizer2d.py index bda7db2312..16f6349be8 100644 --- a/mmaction/models/recognizers/recognizer2d.py +++ b/mmaction/models/recognizers/recognizer2d.py @@ -1,7 +1,7 @@ import torch from torch import nn -from ..registry import RECOGNIZERS +from ..builder import RECOGNIZERS from .base import BaseRecognizer diff --git a/mmaction/models/recognizers/recognizer3d.py b/mmaction/models/recognizers/recognizer3d.py index a4b420eee6..26e4668147 100644 --- a/mmaction/models/recognizers/recognizer3d.py +++ b/mmaction/models/recognizers/recognizer3d.py @@ -1,7 +1,7 @@ import torch from torch import nn -from ..registry import RECOGNIZERS +from ..builder import RECOGNIZERS from .base import BaseRecognizer diff --git a/mmaction/models/registry.py b/mmaction/models/registry.py deleted file mode 100644 index 61dc40c331..0000000000 --- a/mmaction/models/registry.py +++ /dev/null @@ -1,8 +0,0 @@ -from mmcv.utils import Registry - -BACKBONES = Registry('backbone') -NECKS = Registry('neck') -HEADS = Registry('head') -RECOGNIZERS = Registry('recognizer') -LOSSES = Registry('loss') -LOCALIZERS = Registry('localizer') diff --git a/tests/test_runtime/test_train.py b/tests/test_runtime/test_train.py index 6490cd454d..28d16cc1ed 100644 --- a/tests/test_runtime/test_train.py +++ b/tests/test_runtime/test_train.py @@ -9,7 +9,7 @@ from torch.utils.data import Dataset from mmaction.apis import train_model -from mmaction.datasets.registry import DATASETS +from mmaction.datasets import DATASETS @DATASETS.register_module() From 88de1b57c8c04a4a44b4e510c56e3baf13528456 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 21:41:35 +0800 Subject: [PATCH 077/414] add unittest --- .../test_loadings/test_pose_loading.py | 84 +++++++++++++++++-- 1 file changed, 77 insertions(+), 7 deletions(-) diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 9c5df56456..75c4bac011 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -1,12 +1,15 @@ -import copy as cp -from collections import defaultdict +import copy as cp # isort: skip +from collections import defaultdict # isort: skip -import numpy as np -from mmcv import dump -from numpy.testing import assert_array_almost_equal, assert_array_equal +import numpy as np # isort: skip +from mmcv import dump # isort: skip +from numpy.testing import assert_array_almost_equal # isort: skip +from numpy.testing import assert_array_equal # isort: skip -from mmaction.datasets.pipelines import (LoadKineticsPose, PoseDecode, - UniformSampleFrames) +from mmaction.datasets.pipelines import GeneratePoseTarget # isort: skip +from mmaction.datasets.pipelines import LoadKineticsPose # isort: skip +from mmaction.datasets.pipelines import PoseDecode # isort: skip +from mmaction.datasets.pipelines import UniformSampleFrames # isort: skip class TestPoseLoading: @@ -123,6 +126,12 @@ def get_mode(arr): inp = cp.deepcopy(results) load_kinetics_pose = LoadKineticsPose( squeeze=True, max_person=100, source='openpose') + + assert str(load_kinetics_pose) == ('LoadKineticsPose(io_backend=disk, ' + 'squeeze=True, max_person=100, ' + "keypoint_weight={'face': 1, " + "'torso': 2, 'limb': 3}, " + 'source=openpose, kwargs={})') return_results = load_kinetics_pose(inp) assert return_results['kp'].shape[:-1] == \ return_results['kpscore'].shape @@ -173,3 +182,64 @@ def get_mode(arr): assert num_person <= 2 assert np.max(return_results['kp']) <= 1 assert num_frame == len(set(frame_inds[anno_inds])) + + def test_generate_pose_target(): + img_shape = (64, 64) + kp = np.array([[[[24, 24], [40, 40], [24, 40]]]]) + kpscore = np.array([[[1., 1., 1.]]]) + kp = np.concatenate([kp] * 8, axis=1) + kpscore = np.concatenate([kpscore] * 8, axis=1) + results = dict( + img_shape=img_shape, kp=kp, kpscore=kpscore, modality='Pose') + + generate_pose_target = GeneratePoseTarget( + sigma=1, with_kp=True, left=(0, ), right=(1, ), skeletons=()) + assert str(generate_pose_target) == ('GeneratePoseTarget(sigma=1, ' + 'use_score=True, with_kp=True, ' + 'with_limb=False, skeletons=(), ' + 'double=False, left=(0,), ' + 'right=(1,))') + return_results = generate_pose_target(results) + assert return_results['imgs'].shape == (8, 64, 64, 3) + assert_array_almost_equal(return_results['imgs'][0], + return_results['imgs'][1]) + + generate_pose_target = GeneratePoseTarget( + sigma=1, + with_kp=False, + with_limb=True, + left=(0, ), + right=(1, ), + skeletons=((0, 1), (1, 2), (0, 2))) + return_results = generate_pose_target(results) + assert return_results['imgs'].shape == (8, 64, 64, 3) + assert_array_almost_equal(return_results['imgs'][0], + return_results['imgs'][1]) + + generate_pose_target = GeneratePoseTarget( + sigma=1, + with_kp=True, + with_limb=True, + left=(0, ), + right=(1, ), + skeletons=((0, 1), (1, 2), (0, 2))) + return_results = generate_pose_target(results) + assert return_results['imgs'].shape == (8, 64, 64, 6) + assert_array_almost_equal(return_results['imgs'][0], + return_results['imgs'][1]) + + generate_pose_target = GeneratePoseTarget( + sigma=1, + with_kp=True, + with_limb=True, + double=True, + left=(0, ), + right=(1, ), + skeletons=((0, 1), (1, 2), (0, 2))) + return_results = generate_pose_target(results) + imgs = return_results['imgs'] + assert imgs.shape == (16, 64, 64, 6) + assert_array_almost_equal(imgs[0], imgs[1]) + assert_array_almost_equal(imgs[:8, 2], imgs[8:, 2, :, ::-1]) + assert_array_almost_equal(imgs[:8, 0], imgs[8:, 1, :, ::-1]) + assert_array_almost_equal(imgs[:8, 1], imgs[8:, 0, :, ::-1]) From 910f9e89a054645b32e45244ac1b0958e01c1794 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 21:42:33 +0800 Subject: [PATCH 078/414] remove isort skip --- .../test_loadings/test_pose_loading.py | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 75c4bac011..6b246039b0 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -1,15 +1,12 @@ -import copy as cp # isort: skip -from collections import defaultdict # isort: skip +import copy as cp +from collections import defaultdict -import numpy as np # isort: skip -from mmcv import dump # isort: skip -from numpy.testing import assert_array_almost_equal # isort: skip -from numpy.testing import assert_array_equal # isort: skip +import numpy as np +from mmcv import dump +from numpy.testing import assert_array_almost_equal, assert_array_equal -from mmaction.datasets.pipelines import GeneratePoseTarget # isort: skip -from mmaction.datasets.pipelines import LoadKineticsPose # isort: skip -from mmaction.datasets.pipelines import PoseDecode # isort: skip -from mmaction.datasets.pipelines import UniformSampleFrames # isort: skip +from mmaction.datasets.pipelines import (GeneratePoseTarget, LoadKineticsPose, + PoseDecode, UniformSampleFrames) class TestPoseLoading: @@ -183,7 +180,7 @@ def get_mode(arr): assert np.max(return_results['kp']) <= 1 assert num_frame == len(set(frame_inds[anno_inds])) - def test_generate_pose_target(): + def test_generate_pose_target(self): img_shape = (64, 64) kp = np.array([[[[24, 24], [40, 40], [24, 40]]]]) kpscore = np.array([[[1., 1., 1.]]]) From cd46b40109c596faae58aae7ac3a3616d7ee69e4 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 21:45:45 +0800 Subject: [PATCH 079/414] remove evaluate in pose_dataset --- mmaction/datasets/pose_dataset.py | 66 ------------------------------- 1 file changed, 66 deletions(-) diff --git a/mmaction/datasets/pose_dataset.py b/mmaction/datasets/pose_dataset.py index b6d5f42e6e..2663c81532 100644 --- a/mmaction/datasets/pose_dataset.py +++ b/mmaction/datasets/pose_dataset.py @@ -2,9 +2,7 @@ import mmcv import numpy as np -from mmcv.utils import print_log -from ..core import mean_class_accuracy, top_k_accuracy from ..utils import get_root_logger from .base import BaseDataset from .registry import DATASETS @@ -98,67 +96,3 @@ def load_pkl_annotations(self): if 'filename' in item: item['filename'] = osp.join(self.data_prefix, item['filename']) return data - - def evaluate(self, - results, - metrics='top_k_accuracy', - topk=(1, 5), - logger=None, - **kwargs): - """Evaluation in rawframe dataset. - - Args: - results (list): Output results. - metrics (str | sequence[str]): Metrics to be performed. - Defaults: 'top_k_accuracy'. - logger (obj): Training logger. Defaults: None. - topk (tuple[int]): K value for top_k_accuracy metric. - Defaults: (1, 5). - logger (logging.Logger | None): Logger for recording. - Default: None. - - Return: - dict: Evaluation results dict. - """ - if not isinstance(results, list): - raise TypeError(f'results must be a list, but got {type(results)}') - assert len(results) == len(self), ( - f'The length of results is not equal to the dataset len: ' - f'{len(results)} != {len(self)}') - - if not isinstance(topk, (int, tuple)): - raise TypeError( - f'topk must be int or tuple of int, but got {type(topk)}') - - metrics = metrics if isinstance(metrics, (list, tuple)) else [metrics] - allowed_metrics = ['top_k_accuracy', 'mean_class_accuracy'] - - for metric in metrics: - if metric not in allowed_metrics: - raise KeyError(f'metric {metric} is not supported') - - eval_results = {} - gt_labels = [ann['label'] for ann in self.video_infos] - - for metric in metrics: - msg = f'Evaluating {metric}...' - if logger is None: - msg = '\n' + msg - print_log(msg, logger=logger) - - if metric == 'top_k_accuracy': - top_k_acc = top_k_accuracy(results, gt_labels, topk) - log_msg = [] - for k, acc in zip(topk, top_k_acc): - eval_results[f'top{k}_acc'] = acc - log_msg.append(f'\ntop{k}_acc\t{acc:.4f}') - log_msg = ''.join(log_msg) - print_log(log_msg, logger=logger) - - if metric == 'mean_class_accuracy': - mean_acc = mean_class_accuracy(results, gt_labels) - eval_results['mean_class_accuracy'] = mean_acc - log_msg = f'\nmean_acc\t{mean_acc:.4f}' - print_log(log_msg, logger=logger) - - return eval_results From 009723d1379392b0f32517c6300410020b369d0f Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 30 Apr 2021 21:52:53 +0800 Subject: [PATCH 080/414] update urls --- configs/skeleton/posec3d/README.md | 12 ++++++------ demo/visualize_heatmap_volume.ipynb | 16 ++++++++-------- tools/data/skeleton/download_annotations.sh | 4 ++-- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index 0a6e402045..c99e109dcd 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -55,24 +55,24 @@ |config |pseudo heatmap | gpus | backbone | Mean Top-1 | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| -|[slowonly_r50_u48_240e_gym_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py) |keypoint |8 x 2| SlowOnly-R50 |93.7 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint-b07a98a0.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json) | -|[slowonly_r50_u48_240e_gym_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py) |limb |8 x 2| SlowOnly-R50 |94.0 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb-c0d7b482.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json) | +|[slowonly_r50_u48_240e_gym_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py) |keypoint |8 x 2| SlowOnly-R50 |93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint-b07a98a0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json) | +|[slowonly_r50_u48_240e_gym_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py) |limb |8 x 2| SlowOnly-R50 |94.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb-c0d7b482.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json) | |Fusion | || |94.3 | | | | ### NTU60_XSub | config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_u48_240e_ntu60_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 93.7 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint-f3adabf1.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.json) | -| [slowonly_r50_u48_240e_ntu60_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 93.4 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb-1d69006a.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json) | +| [slowonly_r50_u48_240e_ntu60_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint-f3adabf1.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.json) | +| [slowonly_r50_u48_240e_ntu60_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 93.4 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb-1d69006a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json) | | Fusion | | | | 94.1 | | | | ### NTU120_XSub | config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_u48_240e_ntu120_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 86.3 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.json) | -| [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 85.7 | [ckpt](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth?) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log) | [json](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json) | +| [slowonly_r50_u48_240e_ntu120_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 86.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.json) | +| [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 85.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth?) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json) | | Fusion | | | | 86.9 | | | | Notes: diff --git a/demo/visualize_heatmap_volume.ipynb b/demo/visualize_heatmap_volume.ipynb index 44823ae70f..cb7c33a330 100644 --- a/demo/visualize_heatmap_volume.ipynb +++ b/demo/visualize_heatmap_volume.ipynb @@ -181,9 +181,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "--2021-04-25 22:18:53-- https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/gym_samples.tar\n", - "Resolving openmmlab.oss-cn-hangzhou.aliyuncs.com (openmmlab.oss-cn-hangzhou.aliyuncs.com)... 124.160.145.22\n", - "Connecting to openmmlab.oss-cn-hangzhou.aliyuncs.com (openmmlab.oss-cn-hangzhou.aliyuncs.com)|124.160.145.22|:443... connected.\n", + "--2021-04-25 22:18:53-- https://download.openmmlab.com/mmaction/posec3d/gym_samples.tar\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 124.160.145.22\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|124.160.145.22|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 36300800 (35M) [application/x-tar]\n", "Saving to: ‘gym_samples.tar’\n", @@ -197,7 +197,7 @@ ], "source": [ "# download sample videos of GYM\n", - "!wget https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/gym_samples.tar\n", + "!wget https://download.openmmlab.com/mmaction/posec3d/gym_samples.tar\n", "!tar -xf gym_samples.tar\n", "!rm gym_samples.tar" ] @@ -317,9 +317,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "--2021-04-25 22:21:16-- https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/ntu_samples.tar\n", - "Resolving openmmlab.oss-cn-hangzhou.aliyuncs.com (openmmlab.oss-cn-hangzhou.aliyuncs.com)... 124.160.145.22\n", - "Connecting to openmmlab.oss-cn-hangzhou.aliyuncs.com (openmmlab.oss-cn-hangzhou.aliyuncs.com)|124.160.145.22|:443... connected.\n", + "--2021-04-25 22:21:16-- https://download.openmmlab.com/mmaction/posec3d/ntu_samples.tar\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 124.160.145.22\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|124.160.145.22|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 121753600 (116M) [application/x-tar]\n", "Saving to: ‘ntu_samples.tar’\n", @@ -333,7 +333,7 @@ ], "source": [ "# download sample videos of NTU-60\n", - "!wget https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/ntu_samples.tar\n", + "!wget https://download.openmmlab.com/mmaction/posec3d/ntu_samples.tar\n", "!tar -xf ntu_samples.tar\n", "!rm ntu_samples.tar" ] diff --git a/tools/data/skeleton/download_annotations.sh b/tools/data/skeleton/download_annotations.sh index e48c5af7d3..de2d3a2861 100644 --- a/tools/data/skeleton/download_annotations.sh +++ b/tools/data/skeleton/download_annotations.sh @@ -15,8 +15,8 @@ if [[ ! -d "${DATA_DIR}" ]]; then mkdir -p ${DATA_DIR} fi -wget https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/${DATASET}_train.pkl -wget https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmaction/posec3d/${DATASET}_val.pkl +wget https://download.openmmlab.com/mmaction/posec3d/${DATASET}_train.pkl +wget https://download.openmmlab.com/mmaction/posec3d/${DATASET}_val.pkl mv ${DATASET}_train.pkl ${DATA_DIR} mv ${DATASET}_val.pkl ${DATA_DIR} From cd0d9483f1908fc97f0b393aafe72cba003f7033 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 30 Apr 2021 21:58:12 +0800 Subject: [PATCH 081/414] add changelog (#844) --- docs/changelog.md | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/docs/changelog.md b/docs/changelog.md index 3a738a0516..ab7e757a74 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,11 +1,16 @@ ## Changelog -### Master +### 0.14.0 (30/04/2021) **Highlights** +- Support TRN +- Support Diving48 + **New Features** +- Support TRN ([#755](https://github.com/open-mmlab/mmaction2/pull/755)) +- Support Diving48 ([#835](https://github.com/open-mmlab/mmaction2/pull/835)) - Support Webcam Demo for Spatio-temporal Action Detection Models ([#795](https://github.com/open-mmlab/mmaction2/pull/795)) **Improvements** @@ -13,15 +18,30 @@ - Add softmax option for pytorch2onnx tool ([#781](https://github.com/open-mmlab/mmaction2/pull/781)) - Support TRN ([#755](https://github.com/open-mmlab/mmaction2/pull/755)) - Test with onnx models and TensorRT engines ([#758](https://github.com/open-mmlab/mmaction2/pull/758)) - -**Improvements** +- Speed up AVA Testing ([#784](https://github.com/open-mmlab/mmaction2/pull/784)) +- Add `self.with_neck` attribute ([#796](https://github.com/open-mmlab/mmaction2/pull/796)) +- Update installation document ([#798](https://github.com/open-mmlab/mmaction2/pull/798)) +- Use a random master port ([#809](https://github.com/open-mmlab/mmaction2/pull/8098)) +- Update AVA processing data document ([#801](https://github.com/open-mmlab/mmaction2/pull/801)) +- Refactor spatio-temporal augmentation ([#782](https://github.com/open-mmlab/mmaction2/pull/782)) +- Add QR code in CN README ([#812](https://github.com/open-mmlab/mmaction2/pull/812)) +- Add Alternative way to download Kinetics ([#817](https://github.com/open-mmlab/mmaction2/pull/817), [#822](https://github.com/open-mmlab/mmaction2/pull/822)) +- Refactor Sampler ([#790](https://github.com/open-mmlab/mmaction2/pull/790)) +- Use EvalHook in MMCV with backward compatibility ([#793](https://github.com/open-mmlab/mmaction2/pull/793)) +- Use MMCV Model Registry ([#843](https://github.com/open-mmlab/mmaction2/pull/843)) **Bug and Typo Fixes** -- Fix a bug in pytorch2onnx.py when `num_classes <= 4` ([#800](https://github.com/open-mmlab/mmaction2/pull/800)) +- Fix a bug in pytorch2onnx.py when `num_classes <= 4` ([#800](https://github.com/open-mmlab/mmaction2/pull/800), [#824](https://github.com/open-mmlab/mmaction2/pull/824)) +- Fix `demo_spatiotemporal_det.py` error ([#803](https://github.com/open-mmlab/mmaction2/pull/803), [#805](https://github.com/open-mmlab/mmaction2/pull/805)) +- Fix loading config bugs when resume ([#820](https://github.com/open-mmlab/mmaction2/pull/820)) +- Make HMDB51 annotation generation more robust ([#811](https://github.com/open-mmlab/mmaction2/pull/811)) **ModelZoo** +- Update checkpoint for 256 height in something-V2 ([#789](https://github.com/open-mmlab/mmaction2/pull/789)) +- Support Diving48 ([#835](https://github.com/open-mmlab/mmaction2/pull/835)) + ### 0.13.0 (31/03/2021) **Highlights** From 939bbaa1ddacef340012eb148960132e090f55a8 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 30 Apr 2021 22:30:54 +0800 Subject: [PATCH 082/414] add some CN document (#845) --- README.md | 2 +- README_zh-CN.md | 2 +- configs/recognition/tpn/README_zh-CN.md | 73 +++++++++++++++++ configs/recognition_audio/resnet/README.md | 3 +- .../recognition_audio/resnet/README_zh-CN.md | 80 +++++++++++++++++++ 5 files changed, 156 insertions(+), 4 deletions(-) create mode 100644 configs/recognition/tpn/README_zh-CN.md create mode 100644 configs/recognition_audio/resnet/README_zh-CN.md diff --git a/README.md b/README.md index 941f14d826..5c9db8a084 100644 --- a/README.md +++ b/README.md @@ -62,7 +62,7 @@ v0.13.0 was released in 31/03/2021. Please refer to [changelog.md](docs/changelo | Model |input| io backend | batch size x gpus | MMAction2 (s/iter) | MMAction (s/iter) | Temporal-Shift-Module (s/iter) | PySlowFast (s/iter) | | :--- | :---------------:|:---------------:| :---------------:| :---------------: | :--------------------: | :----------------------------: | :-----------------: | | [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p rawframes |Memcached| 32x8|**[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_rawframes_memcahed_32x8.zip)** | [0.38](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/tsn_256p_rawframes_memcached_32x8.zip)| [0.42](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsn_256p_rawframes_memcached_32x8.zip)| x | -| [TSN](/configs/recognition/tsn/tsn_r50_video_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**| x | x | TODO | +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**| x | x | TODO | |[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p videos|Disk |8x8| **[0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_videos_disk_8x8.zip)** | x | x | [0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_video.log) | | [I3D](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py)|256p rawframes|Memcached|8x8| **[0.43](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_256p_rawframes_memcahed_8x8.zip)** | [0.56](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/i3d_256p_rawframes_memcached_8x8.zip) | x | x | | [TSM](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |256p rawframes|Memcached| 8x8|**[0.31](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsm_256p_rawframes_memcahed_8x8.zip)** | x | [0.41](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsm_256p_rawframes_memcached_8x8.zip) | x | diff --git a/README_zh-CN.md b/README_zh-CN.md index 511dfb6869..31bd029e70 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -56,7 +56,7 @@ v0.13.0 版本已于 2021 年 3 月 31 日发布,可通过查阅 [更新日志 | Model |input| io backend | batch size x gpus | MMAction2 (s/iter) | MMAction (s/iter) | Temporal-Shift-Module (s/iter) | PySlowFast (s/iter) | | :--- | :---------------:|:---------------:| :---------------:| :---------------: | :--------------------: | :----------------------------: | :-----------------: | | [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p rawframes |Memcached| 32x8|**[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_rawframes_memcahed_32x8.zip)** | [0.38](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/tsn_256p_rawframes_memcached_32x8.zip)| [0.42](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsn_256p_rawframes_memcached_32x8.zip)| x | -| [TSN](/configs/recognition/tsn/tsn_r50_video_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**| x | x | TODO | +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**| x | x | TODO | |[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p videos|Disk |8x8| **[0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_videos_disk_8x8.zip)** | x | x | [0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_video.log) | | [I3D](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py)|256p rawframes|Memcached|8x8| **[0.43](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_256p_rawframes_memcahed_8x8.zip)** | [0.56](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/i3d_256p_rawframes_memcached_8x8.zip) | x | x | | [TSM](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |256p rawframes|Memcached| 8x8|**[0.31](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsm_256p_rawframes_memcahed_8x8.zip)** | x | [0.41](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsm_256p_rawframes_memcached_8x8.zip) | x | diff --git a/configs/recognition/tpn/README_zh-CN.md b/configs/recognition/tpn/README_zh-CN.md new file mode 100644 index 0000000000..04ba85fe1f --- /dev/null +++ b/configs/recognition/tpn/README_zh-CN.md @@ -0,0 +1,73 @@ +# TPN + +## 简介 + + + +```BibTeX +@inproceedings{yang2020tpn, + title={Temporal Pyramid Network for Action Recognition}, + author={Yang, Ceyuan and Xu, Yinghao and Shi, Jianping and Dai, Bo and Zhou, Bolei}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year={2020}, +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tpn_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|短边 320|8x4| ResNet50 | ImageNet | 73.10 | 91.03 | x | x | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb_20200910-b796d7a0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log.json) | +|[tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|短边 320|8x4| ResNet50 | ImageNet | 76.20 | 92.44 | [75.49](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | [92.05](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb_20200923-52629684.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log.json) | + +### Something-Something V1 + +|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tpn_tsm_r50_1x1x8_150e_sthv1_rgb](/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py)|height 100|8x6| ResNet50 | TSM | 50.80 | 79.05 | 8828 |[ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20210311-28de4cd5.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log)|[json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log.json)| + +注: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +3. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 TPN 模型在 Kinetics-400 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py \ + --work-dir work_dirs/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb [--validate --seed 0 --deterministic] +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics-400 数据集上测试 TPN 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json --average-clips prob +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition_audio/resnet/README.md b/configs/recognition_audio/resnet/README.md index f597f46789..92c985ac19 100644 --- a/configs/recognition_audio/resnet/README.md +++ b/configs/recognition_audio/resnet/README.md @@ -29,9 +29,8 @@ Notes: e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. -3. The values in columns named after "reference" are the results got by training on the original repo, using the same model settings. -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to ``Prepare audio`` in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition_audio/resnet/README_zh-CN.md b/configs/recognition_audio/resnet/README_zh-CN.md new file mode 100644 index 0000000000..1d8ce01316 --- /dev/null +++ b/configs/recognition_audio/resnet/README_zh-CN.md @@ -0,0 +1,80 @@ +# ResNet for Audio + +## 简介 + + + +```BibTeX +@article{xiao2020audiovisual, + title={Audiovisual SlowFast Networks for Video Recognition}, + author={Xiao, Fanyi and Lee, Yong Jae and Grauman, Kristen and Malik, Jitendra and Feichtenhofer, Christoph}, + journal={arXiv preprint arXiv:2001.08740}, + year={2020} +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | n_fft | GPU 数量 | 主干网络 |预训练| top1 acc/delta| top5 acc/delta | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py)|1024|8| ResNet18 | None |19.7|35.75|x|1897|[ckpt](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth)|[log](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log)|[json](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log.json)| +|[tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py) + [tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py)|1024|8| ResNet(18+50) | None |71.50(+0.39)|90.18(+0.14)|x|x|x|x|x| + +注: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 +2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, + 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的准备音频部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +Example: 以一个确定性的训练方式,辅以定期的验证过程进行 ResNet 模型在 Kinetics400 音频数据集上的训练。 + +```shell +python tools/train.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_audio_feature.py \ + --work-dir work_dirs/tsn_r50_64x1x1_100e_kinetics400_audio_feature \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics400 音频数据集上测试 ResNet 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_audio_feature.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 + +## 融合 + +对于多模态融合,用户可以使用这个 [脚本](/tools/analysis/report_accuracy.py),其命令大致为: + +```shell +python tools/analysis/report_accuracy.py --scores ${AUDIO_RESULT_PKL} ${VISUAL_RESULT_PKL} --datalist data/kinetics400/kinetics400_val_list_rawframes.txt --coefficient 1 1 +``` + ++ AUDIO_RESULT_PKL: `tools/test.py` 脚本通过 `--out` 选项存储的输出文件。 ++ VISUAL_RESULT_PKL: `tools/test.py` 脚本通过 `--out` 选项存储的输出文件。 From 91e0714a4e4b08b27d666e7358514abf934ec6da Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 30 Apr 2021 22:32:37 +0800 Subject: [PATCH 083/414] Bump version to v0.14.0 (#846) * Bump version to v0.14.0 * Update README_zh-CN.md --- README.md | 3 ++- README_zh-CN.md | 3 ++- mmaction/version.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 5c9db8a084..59854b80b9 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,7 @@ The master branch works with **PyTorch 1.3+**. ## Changelog -v0.13.0 was released in 31/03/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +v0.14.0 was released in 30/04/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Benchmark @@ -147,6 +147,7 @@ Supported datasets for Action Recognition: - ✅ [Jester](/tools/data/jester/README.md) \[ [Homepage](https://20bn.com/datasets/jester/v1) \] (ICCV'2019) - ✅ [GYM](/tools/data/gym/README.md) \[ [Homepage](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) - ✅ [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] (CVPR'2015) +- ✅ [Diving48](/tools/data/diving48/README.md) \[ [Homepage](http://www.svcl.ucsd.edu/projects/resound/dataset.html) \] (ECCV'2018) diff --git a/README_zh-CN.md b/README_zh-CN.md index 31bd029e70..9f88beb63e 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -49,7 +49,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 -v0.13.0 版本已于 2021 年 3 月 31 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.14.0 版本已于 2021 年 4 月 30 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 基准测试 @@ -139,6 +139,7 @@ v0.13.0 版本已于 2021 年 3 月 31 日发布,可通过查阅 [更新日志 - ✅ [Jester](/tools/data/jester/README_zh-CN.md) \[ [主页](https://20bn.com/datasets/jester/v1) \] (ICCV'2019) - ✅ [GYM](/tools/data/gym/README_zh-CN.md) \[ [主页](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) - ✅ [ActivityNet](/tools/data/activitynet/README_zh-CN.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015) +- ✅ [Diving48](/tools/data/diving48/README_zh-CN.md) \[ [主页](http://www.svcl.ucsd.edu/projects/resound/dataset.html) \] (ECCV'2018) diff --git a/mmaction/version.py b/mmaction/version.py index e090d9f31a..906660529d 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.13.0' +__version__ = '0.14.0' def parse_version_info(version_str): From 85d094f9370c6ad500f1346c06a39d9b441a0396 Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 1 May 2021 13:24:39 +0800 Subject: [PATCH 084/414] use builder instead of registry --- mmaction/datasets/pipelines/pose_loading.py | 2 +- mmaction/datasets/pose_dataset.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index afe847df86..aa7e95b012 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -5,7 +5,7 @@ from mmcv.fileio import FileClient from scipy.stats import mode -from ..registry import PIPELINES +from ..builder import PIPELINES from .augmentations import Flip diff --git a/mmaction/datasets/pose_dataset.py b/mmaction/datasets/pose_dataset.py index 2663c81532..af505862ca 100644 --- a/mmaction/datasets/pose_dataset.py +++ b/mmaction/datasets/pose_dataset.py @@ -5,7 +5,7 @@ from ..utils import get_root_logger from .base import BaseDataset -from .registry import DATASETS +from .builder import DATASETS @DATASETS.register_module() From c6eec3670b270b39d6644524577805bfbcfb35dc Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 1 May 2021 13:28:20 +0800 Subject: [PATCH 085/414] resolve comments --- mmaction/datasets/pipelines/augmentations.py | 7 +++---- .../test_pipelines/test_augmentations/test_misc.py | 8 ++++---- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 38a4e1b5a5..68ff89be1b 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -9,11 +9,11 @@ from ..builder import PIPELINES -def combine_quadruple(a, b): +def _combine_quadruple(a, b): return (a[0] + a[2] * b[0], a[1] + a[3] * b[1], a[2] * b[2], a[3] * b[3]) -def flip_quadruple(a): +def _flip_quadruple(a): return (1 - a[0] - a[2], a[1], a[2], a[3]) @@ -142,7 +142,7 @@ def __call__(self, results): crop_quadruple = results.get('crop_quadruple', (0., 0., 1., 1.)) new_crop_quadruple = (min_x / w, min_y / h, (max_x - min_x) / w, (max_y - min_y) / h) - crop_quadruple = combine_quadruple(crop_quadruple, new_crop_quadruple) + crop_quadruple = _combine_quadruple(crop_quadruple, new_crop_quadruple) results['crop_quadruple'] = crop_quadruple return results @@ -1219,7 +1219,6 @@ class Flip: Default: [2, 4, 6, 8, 10, 12, 14, 16]. (COCO-17P keypoints) lazy (bool): Determine whether to apply lazy operation. Default: False. """ - # Only horizontal flip is useful _directions = ['horizontal', 'vertical'] def __init__(self, diff --git a/tests/test_data/test_pipelines/test_augmentations/test_misc.py b/tests/test_data/test_pipelines/test_augmentations/test_misc.py index b639229c54..4c1db1c087 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_misc.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_misc.py @@ -1,5 +1,5 @@ -from mmaction.datasets.pipelines.augmentations import (combine_quadruple, - flip_quadruple) +from mmaction.datasets.pipelines.augmentations import (_combine_quadruple, + _flip_quadruple) class TestQuadrupleOps: @@ -7,10 +7,10 @@ class TestQuadrupleOps: def test_combine_quadruple(self): a = (0.1, 0.1, 0.5, 0.5) b = (0.3, 0.3, 0.7, 0.7) - res = combine_quadruple(a, b) + res = _combine_quadruple(a, b) assert res == (0.25, 0.25, 0.35, 0.35) def test_flip_quadruple(self): a = (0.1, 0.1, 0.5, 0.5) - res = flip_quadruple(a) + res = _flip_quadruple(a) assert res == (0.4, 0.1, 0.5, 0.5) From 8d4cfbf71d60de5764d1891a6757cd77417db94b Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 1 May 2021 13:34:52 +0800 Subject: [PATCH 086/414] resolve comments --- mmaction/datasets/pipelines/augmentations.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 68ff89be1b..6195c95d98 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -95,19 +95,16 @@ def __call__(self, results): img_shape = results['img_shape'] h, w = img_shape kp = results['kp'] - min_x, min_y, max_x, max_y = np.Inf, np.Inf, -np.Inf, -np.Inf # Make NaN zero kp[np.isnan(kp)] = 0. kp_x = kp[..., 0] kp_y = kp[..., 1] - # There is at least one legal keypoint - if np.sum(kp_x != 0) or np.sum(kp_y != 0): - min_x = min(min(kp_x[kp_x != 0]), min_x) - min_y = min(min(kp_y[kp_y != 0]), min_y) - max_x = max(max(kp_x[kp_x != 0]), max_x) - max_y = max(max(kp_y[kp_y != 0]), max_y) + min_x = np.min(kp_x[kp_x != 0], initial=np.Inf) + min_y = np.min(kp_y[kp_y != 0], initial=np.Inf) + max_x = np.max(kp_x[kp_x != 0], initial=-np.Inf) + max_y = np.max(kp_y[kp_y != 0], initial=-np.Inf) # The compact area is too small if max_x - min_x < self.threshold or max_y - min_y < self.threshold: From a76d727325e748fc36bf35d6132bc97518d2df34 Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 1 May 2021 15:21:58 +0800 Subject: [PATCH 087/414] add test pose dataset --- tests/data/annotations/sample.pkl | Bin 0 -> 284438 bytes tests/test_data/test_datasets/base.py | 1 + .../test_datasets/test_pose_dataset.py | 51 ++++++++++++++++++ 3 files changed, 52 insertions(+) create mode 100644 tests/data/annotations/sample.pkl create mode 100644 tests/test_data/test_datasets/test_pose_dataset.py diff --git a/tests/data/annotations/sample.pkl b/tests/data/annotations/sample.pkl new file mode 100644 index 0000000000000000000000000000000000000000..ee61c7125247ab7d622d9ef6528ce01e88af99c5 GIT binary patch literal 284438 zcmeFa2fP(Un)iP&=Nt!f#DIzdx4X-Pim2C%dK3W@k}sEdnu~xr5JZ%mgNTZXNEVU2 z(`Vys&Utq>&E`0pJd+u3f*2R zb?G+fxI&>YYRtHi6NZi-Svq3SsT_8UYAN0`Wx!+cg57 zwe{^X=y;tIbHb1jW8%`$gHEhHsdS7lF=qVeA(PvOP8wM{w(ZU?ZP5|4O2-X4g`yMV zF{8&!7&@+!dwlzGgHFh595Z3WC{2T z@IP*7=a>mSN~g^#on9=RF}rkTv2<3q)-G*Fb(vW@yHewb&gqjl@ti?NaKf0IOXtpc zb9U*xV(I*Dtw;LGZY_PIfB*gu{1-nTv=%x`U5CwSKV{HJPU*sSB-_z!$NKH~f^8=j zw4Ka$s^3m8XgjmAon5f)+=90AD_hq>*TRKt7gx4R3zrryFI-u;x^NBKb!^?)ZY=aD z+*0UOXeopRSGcWk2V0-Q-G#n|eueuBt%WwWhu9t|JX#n~7+4rwc(U*`+mOQ0!f?Nh zEQ~CSW*fscmTf%SM8A~^+9vyLN?}T2D%lDU3= zr;yAqBnu14V!tmfB+CoQO24lzBx?)FxD9=ZI}D)$}U%Sxti^o%646s>$;RKXlD#xSS(%CS-Ngz-rE6W$9m3KNWR^IB$@3|;QcDXWXn|wYs5})PD zZ-i9&xGV2=G`_suN51soUUyZXe&H)5vfG^zs=s+oG}&t7&^q281!eaI)XcLwK_jHn$#nH57f0oT zq4M4ilh0hTJt{8>$GNM+c6U3~MgxQXQF(5d%nXx{qO)8fEU%Bc2Pbl&u>1~nh2;;! z&Y=8ZG&(B3A6^|@8D5~rS*D-fC)phnY4og%l7ntqI652^(6ASR(lvTht}T|X>nvSw zZ_0B+$Bh|r=S?^CDs^vN=HHz(rFZFux|EjEjjc;-$^@mGS{K!%gry#>a|WFRx9FVM z31{=%adPSAw(Gl0d8u?u9h>dx+3W+4!HULCnEFsW*t6MQIX2t7^-q2MRW94&xoqy7 zQs7Hexojvd`)QY1rKlN~b?s3-m%TM561Ry+++Hl*0g-53KeKdag+{>n`iMr{RV>}z zoJQQ^X~Ycpt2myx-SM8ko$9&T*`AAudtIF4S>jiX+{$=Wm0P8ps-9OFw@NvcxYWc7 zZ!*qw7{2sXOsS3|!HZx)#&%#eFd1VndB)O|w=^=AI+kMmF)-RiMu1wCdeHZrZj^dpUr>9l@{Yd+cqYXNsehwO4*BOsVXETpXfTyqlc9NkzfO?Agc1lXV30 zDNi7S0n^6a(|=0b{fV1AfqXhgAfIVFrd|#i;wfbAq|&o>6mn=vA%``iki*-D)lNf$%yU4iq;u-U!GuOb#-Ko>oYzu({BAEm=(oYdu*l zjn}&{p;(&OS(;Q&R7>dqi8Vkb7fYR$0WzgZRG-rTnOZDOYd%1x`vLMt43K1*dqzY{ zZzXhqml4vzi@DjI7w8|#0Xro)pI&90`CFtc5nOi$ps?>V^<{Eo=_0q-k7^%sof~E?FFjl&^K?1fY0z z8Tn9nquA8F&6S?laC)Iwda<+gQax#Xx%JID()vp48#O5{rB_>Du1N_>ueH8VlMX_1N=`ZtbSknI6VHdh{ALdQ|_ho|?{RKMHc#ZrYn%W@%=-*VhE-Yi6|{-#)AT zgmjS4)*yegSem0j{?W|RTkYpd<&iLU4fqN6nL49N)&%}8XA|s`j=**_1-57U0wY7F zHCUKa92iN;s}GDet@TO-(<9WW5=?^tf-?{Y*#gJ0A4XqbRLryn2JSL)hWfxX2u9S< zBji;HfIh>0rc*E)XPSjZGH3dQJf+L)8Zvs9k-SE#X9@_@L7;|!P!1?4ZJ)BciiHrWy4yE30{2i5%W(74xnmVg7Ti= zgkWOyQt(7jjLNIRr^DN$;DIHo}2l3W;i1);V`|o*f%+x-?-F-b#TAPD-@3#KE zudXTsuk#2mcTVX&U!p1luTK%)`^^yE2kr0IBfJk&+O|Q%{G(#&V}^O#XXt`IsY1Ms z;^3baOPiW=@XtI4|KlS~@oTDIQ~pXI)+Xhz6u+kWZB+hB?ORp(O5tlNUsL*;!dEI^ z+pb3St4-=(Qvjz*UCM9#a37V$cBdDEidldDvZ36(Vwhcrbt!;3&J&AJKHmRs> z8-l7j-G-vHw#`)6RRy+`*r_60%4~zeg2SfNmSWpf+fr|*3T^|*wMy<((QV4^>0aUe zf>(OKOawalVZCW&98IIsDIdSfK5uM<;|4 z+72(Y9d>9t9MN`Du@g3`*o{GEJhF~Wn%=|kew}i?j#<|7%ZkRmj%%WNKfHRRjuFBF zGcE|~&jbHD7HIH4F99@a@JqRY)(=3c#5*)C=@OGQQ`*D#3l#BWQQX6 zgiwF}?~h;x{GaQ=>7lT%0BrJB2)`vVK7#tgUPeWzsSmiTe4&LQ(eg(MljYIf$|x_6 z&J!R;;rfz}lNl;`p7>nyrJ#4RHkuqgt13axXN6DulirH1vtlQQ$$Wp>XOT1{)JNpQ zXo|lOdWgkQFH1%8Q0_-ASrZQQNo&C6QF%!;(r0|&?u?F%hIkN;pz~Sid~~<#6ZP~G zz#de(vi|Ptlp% zad~wVM8}2P9=v$J^v4J#^MV9zGAFYyj$Vl{byQgO{0Lh`vOYa|b2L0k_Ej#P%!w7oL+IBP*-~Lino#BtR6ey4%9* zLTes3hk%ys4&c~AH(ppni8M8B5TeL-M``ldIYRb%)nD1m9tqEs#-~}yYre;56oSlh ze+_D6r!VNnIqq(MDhXe@OZ+A5n%387wk(1WN`zXFSuOH~8|udkcfdpDBx+marg^!G zN3_em<*;oivNkO)GgT@NvgtWRC+ z>%-Q7U`WL%-Bx=D`T@#554F6;kem$?iG{PsVL%b#ko0A4qCY}lN`35{Vv+1HOF7_M>4$5Au*@y+sQ0^zBusj% zO!;*UtTSb_xmevWTDLtmy46dYyCWKXp*Q#Ns6X+Kk1FU9LiXHMuc*99dIWU-=k65g zPy4C1^YvU3bceg$WOZ$eA4e3Tmr;At(ew;;?sFIW)=~o%)A>9UbccII9QPGV`#Vbq z>IK|`b+(n>r9*WoEu|l-O9@KftV;OHJKb7)5(kfnO+=ez7?l_$AK<#x>O)Hq#sm5+2UFl6nH9ww2Tiz|%JuFbdGb5Q)jRtNdx|EjEpVXxU zr9Z7p2}^%gC3b&aN9_K>6T7hAy`x$mA33q#cr%pzWscbWRjW0tDn;5*Sb${24)_u4A@9JsY-=}ZIKj^La$71Q9gn3sXMgMb^*!_!Mi+?Sa{;m0I@$de% z_)bNNmiv^+(0Tc}x#Lt`dalaOa+j)=oExQPIZmZAbG^K5vT|NVuFA)G$yjo+%+^(@ z*rZ{TgG~a?%f5M;*CgJ&q-zpxRklrK*;IZ_W!6+yO{G-Hry|y0EuorhN@%jF38s*8 zqvn|~3Qf<%P$+w&09q$~ewFA6^*07!awlxSc9?t_RvAE}Ot?o@-yncdeC-rEHkKqU^6Cdl#9p}q6aczr_ zC&!bU?ughgxpRef$GFo&{*o;L2CVC4H5-XkBssHyZu0 z8y-C!^>F3)qf+<~dVz3kI1t~02+LPbcZy4vMdkORVNpx;03vKOKw>{;LyTifBUu4w zNZpSL;ZfvgH3|>9C(&3SBH+8_o`__o#D=!R4UP~~?+xUwu~MDQ&6MA9r@Hc+>bT?s zH_@FS2RXW9KeD?^mLt>CmC0(?O(snq+9VweRf)yOt+K&g93nYC?vQxfCG<%rjx};y z$P-F%Ci}>p7OCvPK-!Wu?w$x6;GF?3Zy&p2NDE4chp5pV-Ep&oa0H=u-AOL_LM`bo zM-drN5&u@m-J-v2ei|8@@zrxYkO_LE6J3`mK%kFWqXdKDOpTTsLL}i{(Nx4SJ%L*^ z&Zf&dWrA+{Mb|0?O0p|_CFtcY@r8DT*Ah!M1^wmG*d8UTgUQHgPeh%PEe`}VcA``@ z33rLYWp+gWJnF8(7U?>?$KX15B;t33rC_qVM?G=^R^y0=-piGFP9M46$aGxOJ?4kS zYBgnFG{N2HPGp3Tg-tejm2O5A@{C6D-f)b&LifXs(&g9?mMa-;(Oi1_Df(_G*4Z$b zi+f|F@oaCvyJ0_0doddB+T1nn6(rZb_~*DQU9wxF?l#r%QdFK1CL2)?M6yl16TT#G z(}j95gYcyA8TH5=5lWErusoB7h$jX6w>ODT<`eQ#>AHRw8q3BKD{w9jL%Twyo%r(pUF`z z9E{H*$s@vhdkr2ky5Ho{J*Nc{f8(>2k$LgQ=cuA@`W9!nFg{mh!AqjlS4TzF4yn+=CoHg5PEYOPhwOjgTGPokhIb&6ID_ zxN(T~jeWNdQI(QtDOoyMwp;EN?^A>}k3Sa&Ntl!nrr}~9;~DVZJO-2}2s z)_W*P4F^IsW2L(!@*L{JXd--DC(^l_VS2zTff<3zMe^>SCEzzLdMu(jSZinxpkj5D zU;z0Lz}7{us=QH1^Gb(x<~_L9(VDz)oI6&xI!Kskxw{i6m=d}7M=97lCr?^cpL*XuK-bpMoFrVSa_?mg<`+mun|dbg{hE9$u39V#QPmpeJW zQ;D@)uMbHTwMcxIvYT+dyGifxnRQ(69x_sz(N}S3#=ZO(<9>vq8Q+{4-=~a>>)p?h zXvPD+SgSAAjAlGYj;9&FM&y6S3IF?76TjkZ;ih9i!C(75Neu%I$eweVHIX$9s73Ne_oPqWz|Hb$*!pcA!pp1+; zJGvb(|4 zS43W9I7l}3HV7JW9p$0dP+!iXFk}!ewFYwPDLZr(5)XsHMhagf(HI!~45AM|U?&%z zgh3J>L!z++5~+}jQX3J7Br+hDbf_yeKfZDiO@w{k-KAG<3nWOFEl?bfeX+-A8QgsFMmV2 zML+NA6K|H@Bx|AOM!?I~gj9c&H?e=|r-H#}_A{GLH-&ARmf_pC)s0h$;VO{eKd>EM zr;egtgslUrdb%DmLK}U+ysI73aBO!8JxPce_6dfMM(~qv>Os{@z6FcLM$zE<6sf;QI8KsK8 zQO7|>tBg2E?&P>#iM1SL3`r>m=}>kP4lU+nRU^I}inzZg#>)Qi1r zW;{t583!qGq!&B(#U}e=%{WLWIsV1gHU3X?kOW_<|4sbkK*MBUxP>tZ;}Gx$7z47> z*XIW?1C*|?0Tiw9fOOt(?aTscOPfvDil@-wdMBRazZg#??2M-o)>HNADp#TE&nwQu ze}VsE{32oH;lHGejHUgbB9@ZHNnxk3MM$?wtQc{N z*tIAF3AOSNy{xI0YjqmIC#5cuKeSxR=kGczU@ak}nAK@1#7SYdEQ?q?D?pk2DOpLo zP%eyL=P_zXy}^Gmo}o5tKTtp8)Co_a?aRWdtI72QxrX6LJn$ZYQ9c!3gY$?QUsQZl`IRGrmgbDR{LVj-n2F3_%}`T z_UkC8Y=sAN?bUo)x$lORwH>OnWmB-O)g5GJ)anlTIkVOIvT}f}nhz)YIbkp0#A={D zqq~>~GuvR^Drf*cvj=7{9{XNq*~5-!R=dntXFeV|=w6vT(MWoY?wV4{K)Bh&%npXa z$&^eP#m$KB0YB+MB-ad8Hj&Hb$6oB`$6o3EHsiH)yxxs>`7g%n2s`8V2(aevev z-4>P?22+50ffVRy2=Ey^7~q+>FFGzt7NDHJ6xF($7~F%4WOO_Zn8CF0tnku^)f6zt zEJJnSdPJk5dzl`KBVlk=_#{ekJQkT|a60O9=G(IpM07I-gQzvY474sB#6*5{@3?g) z%fp+)0ihfiKM61x92ZRto}y{N(_ykT92g!S-GV9~n@e;Gv(s*2l2+I=JTtgG#B**@ z)CJ6VbyR*m>=xcgmqaKYP7Pbt@??K_8P*FN@$hCuL$WlQ8lLEHGKEqG z(mdMoGc|0UM&n$K)bm8dJ>6`+#x0fk8Y21HYUgU0A4+wmh8FtCd!%U5pL8?7l)QI} zW^Uv?RBTxw`KgF`Xfp;%S#p-e=`MD=KhlLI`mbhvNBhg%o_#< znDIc^V8#g>CuqPMZO?pVh}cYx#&B&qccU?q&E{x~tWW26)I~n9S7%}7 zj*~?)znm-rSFhVMPn|3R+ppc@AK@hyef?4Yvi}%iXZ+)Ou{{;4+Eac)7Z0GosU6o zve4e3#$hKNP;G??cQ{-t@TkDEz{(;eAThk>?hVKL$$QjHous1FS(@T`y9yUV6|>U2 za#M^$e5=uAWv9>#v?!FpJb*rj%W)#LuXK<5%lT`;-F0csrYxs&61HsY>Lk)$BC~wD zFL1A$;GOv?N8V0$rxGpo~Hz*FAL&XM!}UJ5HiStKCCs zr%)GKEIWam6|UX8fm4pEFv$A|o!O@{-%)o%X?C2_cjpp0nJ!<9bv|v;Y>WvufpJ4JSjd)ZuWcFhFIWxLlKJ>|U zre5G2&y92NN%Vl9qktW1zZ;8DC_-(=tWNL3kAe+H{J|75N>Y1}+^7oO`P z_qcc6U+g=F2|29ML_ST$wFuoRIE{`#d%6nO4-gYqW(2Hp&R5Cf#i577JSX;rm~eVJ z4ImVWltf=TLryGx(Y8kS>B6-6IVu_+JR_xO2bF=w!ej@?3Qvw_H8=sCT>Dr-N!>h( z#;vZ+-wQQz53BrL(kKQT50g7)=i)Th;`Hc8QSz35X57jimn;#secAUE=h1VVYGtWl zrNfLh#SI7re5r?PqOPUgzK5^mxdxZ}PGJ0?fZUB)>kM~ZSl$}+^f$@V*}+qNJUS=p zr*+AAUl=`WrCq+=4e(0@@H*h>aR~F5dWCvc0JuSoT#X$wA$rs|LNvBCiUQPN3T$2) zZh#_mB=0LP$ZNg;DZIpQx&*7@F8>nPVC6xe)LrA^Uw{~iApW8!TE9ft8UL~fR1G-n z75@rp_49AzUu{fmiGQszF^GS?F)@sP!zZ@i+`4_rOYv`V(5>BEhl|~(jUPR7)a34M zJ-x#7x5#V%tJab2<|UlVj(=NOscUfjJ39JqhyP36f8*a(BJ}&){1@ZjBb2oC>oeou zSH{0--uUU?r*nouq7)2i%BoFPt(Alg+Q{|R0F6PK6wt`swjRs~$~1Sf)ryDdT8D?3 z2VuA&LkSry$Rhyb`o`dmm&3M=NgHoAer!A#zH96j=4w23XYJaCy9-+0utBoM0k+}; zYksfT#mqF8H;Uohn)$clCt|K5-=E?$lf)Z!pXmfk3FN8IXzHNRwaL;|@gHb>@NWE} zA2@$R*cpF^a9BqdnPF9|#2zakvgaSCCBdiAdx6IC5~dit{r6C;!FxzX*l(KSya$fJIc523

0yK{|n?`63Jw`%`g0oj_u1?pCLeG%ml8F1wH}xS+)r z(q$I3z(QVNukt7p(6x+0typf0DAX>VkS>{!@~^y5p7OEkVhJfXOT}|dA2AXA@Iv}n zzYK!ugXECghpY!Mh5?yqIV{JBY?BRMYA30yKaZOW;-&t?Iwd~{1!1h zle_c0*JSX#+?@&9nOL0((M<*CMqwEiZF?pc!>hBsRuHbNObib1jkniNarvrQF6It%c&C^Q1z`Kk9Axrvz)Yd0l{bT^`N*K5HzhsJGUhLn zO>4ntlpVFbY3?`rNi3$n&nB^;#b?Klrp}ed%%~{!O|K88u>C;Au-atSN_Ec4_x7); zu79LOGcUy~aCyVB4)M{D5Yh8vJgGUBuroeRWTj4JalFb^Fv$suL&i?zzZjoHn3A!R zm6NF~PT@#o>{MUwG+(Zn%0l?qld(UM%3_0D>9@(Z0yd50fpDBfU`+EpQ)fQC|HlId z?gVSH5H*c!X#39wDf~lp~~NNJ4OGu0d&*OG7?h?#^NqyTL?S;k9uZ4vr`{@v=H^_iT5U`cQL#$b@4C8Y5%QI+K5+&r!4LV!H3 zNDr=)pXX|M{8ajLuN&xHhsa*WWD<2cPid})GWj?|V3qC!i_z1vlu$KbXSM4m?;TY~ z1>8N&pkx-I^*sOO+)sF%M}l+#U#IlWw9xm@3@l#iF#nH|;WN#<@uTHgr-?c|U-0BA-h-f(5OltOAF=kipRDGjgQ#~tfv0nBR-tkgciLhEB1Ne#4owgxo~pgVZRToUZR zG#BB{IDcqsgb8m_gF|4H4t)0JIVD9Sn8NX%(U_EriDCvig86M;Q^6I zJlieTURdIkv6_9IOy{!772y(FUH{-guk~P%a^V&J!ssec$dGLjs$d|l)?m3q9$z@} z>22bs2)ShoDNckd(h^!le{x0cE(;L=NL=IlY^fXX$2d1feil*(!;L;=l+YR5Tzm#p zPn7pePaV%9?2OMAC9KnpoI_dzu0FRhu_ZpQF)@hGZ%hp1Z}`L%S9j&8TgO?zQ4ggb z9zS~2fNo+^jRUtvoqBFE;dLr}8z%R~Rj4s2{h zj$lUHnF?)t9y~V1k3h&e6xkF=8idTF$OBafS%V_$XFlgqWa>&%uf38V-`B#Dm+Rez zqFv$N?JEg8`1!-&0imBk4nx3R7rN|PtYl>>jqElqXu2BRz4%{ z%#ip@?cJ&_gwMa{#RyGEcK55bILC0O%$Y?4#`JNzW``yljbJvj);}H?;rPABu@V}7 z>-5yTh&7X0k(m+b%|UMfxWg+wI>HNSp~@aFY)N$;fGpY_GiLZ>xRx0qTm|*sd;U>@ zqV{+I`i47>@5=b-Qec741OzkE`RL3S?s~`PRrYgF&#IzxgqRb6Ad9CSbi*XGT;xtx z;&>pZ#?%Yyq6AwECbzwAnt$FrJRac!_Hcw+`?AiHj}(v)umhszAusXhW`-Ao-jZr^ zzsynCdG~lDD@U6(MEGAU_|17{y`%%_P%8M)-QoX%NgS>9A&BRYYMkh;pF9Ch+a={H z|Ll|lOHtxWH^QrqIU;Sh&)n@sdPxV|#;X8oB9n(e9PDHvjrq*Q*NK=wa<2Epq&s0} zd;{UIj^5Cb2H12XS#^mm@lB11L97Y2btRb+<`YxcbPGpy>d2lXc?{L>{+0>%PjBn{ z$QZLR^-@Mv9obtIZT3K~s*Y??8DXp3$#I~>T6JVdQmT%Ol-)!f=}7PJnRV*OTggbV z*lmhKiErn>7~erCN_-($>`vumK1_W$k{o}RFL$>u*G!JTha4}*?;w&_M+&=D*Mn_b z5mpO(Rab*eJ&o++U-|WHzLzSp)>KyeF|cNz1CwbiRU@W8KFtkd=EoyMm0x`(U3sR~ zp%gInRY=Nod(7zip@)3Ahkdzb@6aRU_;=_IBp@T83ZZsJHsMq6da}LFlkJ7B zjg>tDDJd4Y=fqS;gOoIxwfuqR+Dcc&f+8zjKY2E=BnTRiL0ZXVbA*!vOT2s(0G00Y z5-X&YYfw8ODh!D6W-l|Z^p}Sf&@4CWT0In?PEN6qNIvfCkrIG&QD|@?_esdLtP<)W-L7bbsgNwY zqJ^Ybr&tGN?Xk1$iW{Q7z|dq~06<(DJq;|<5-y#ZL5;}~bb(LDLHX~z#*}N%B&j>! zWDeQ0DlOMSYYvyCAvB+}tod{^DWr22We7RULaUJSq|k8Aq9mbRg+zAZ)(u2Bccn|- zQOg0a+rn!=TleS-Y!#m{&ZaEaq%0wQ`j&4FfRO6Zh0sTg4?xeFvNg*lDD+c#ZYQ?WcwTHW`~ddCAB z6I)_^9kX?D&FMirSc&s%62tfjCC*Nroy@YwwW+n#&i@p}e%eaq9z zsA7d@RMCCD_9|8wqB6n?xs&5(l~{`vhLV(OWriuc304?RdWX-f!wMtFNa>uu@hLht zivMCfnoxAE?-4WOcI9MPVGKv2a~;0iSYNIgof}7vr*jW;r{?9HhN+*k{a5{*6-%{1 z_Mi84PFI^xy`58(QS0+;p3kWv)O1IA4O0DQNmGAlLno$Ls_K`jX45KGN%2apZpr+i z^O~j96FN1eezmhtbv^oYUHa+*|DX4N&d00k*Vo2UV{H9@x~hHsf8JbM8#gUh zvoKu&zuCV`yv;ZB$8H|lI)cqY8Hk&0wx)iw(DZ7}!u$fURSfw3p+J;rnT@wjlLi(v zWg4MjLBcwf8n|$E&Tl$10zY`GR@7F{A}-m)Di*|;0HtP6ML zEWKIWpl4l}>F%8 zw1Rmxpc#!#>1QTlRD5~MFg9WKyeYWO8k`5aKwi_}9mw#mE#$8$U$(Y_E$Z9c+bkXr zbrs{A;HmIL!p?Y-D6VG@RY&xa%2kkTtT+^YGXKT6lQ5<5Q;ow8)^bT|1m0l*N3HaO=!{Gj`L z9S2vkR#6gd)h3iLX}VpFN>Dm|DaqBiK3IE2c%m;(9lio3gL^hc69tNs55rTX z%RE1N1_j@B%saa(94?jGdrXH5uW*<0Z3EWV$H+Q@?+qmH;=7Bs<^_DQg_}Wv&+EIq zm!R+pS&v6vTdYAxWC5Q$coL=819(L9Nv>oSx|%3i?WRUAJ9Iy&>u~c1MZH1Wy#kA# z~FGg-L&X>7L@cC0n4NO zp@nONGleM`J1if`+=4g5qr76yj|gE%TFE{K8|Sqs#Eg?M=H zg^z{pY0pxC8^idwTw2w>?lzVx;_==VUSAm;Hd?qh>Ul$Dy0bdpsqW$k?JoK~E<@+h zAZp<_;H?L=jp+2hz!y(GR)1 zc%z~hqNAe6P&)=VZLSaG3%QO(2&3x*w4d)Gb>LJU-prEvOl7=?jtYA^rX1oX$X6IP z;m$bPb!rL5BZ9~EwdJ+$DlNHmlV8LM(e5qjSLwBNZg7~e9>)qjamLJMcRK5S%H4jd ze9l+kM9xbb(z2=1xq0tjvN={@J_N{J+|0k86KuJm(@lU01p2r%B!7#68BniI;bk?(3 zViE6EE5%F8*yZW4fC#1q3bAASU&*wPE8Km)7$vEAF?U3w}K`7Ue|EOd#=+%C!xT_TPArD?kvO4z4)o3=$GgFDk=JX9V%QVuJo zP@`Sg4{wcLNYl#n8Vn+a^cwWGS9lBeHpl-$3FhPZZZ?x|ao;_iMJ{`~XZREe`rN0` zt!q)#FPP}V^BWbEnU=aDNY+FXLKbnErRHe3A9JXCr_sxZPwhBpX%bgv+sl-~b zZH~^ac$~ha>?UH{T+%yyW}VpfHW?{Xe@Ahc`aJ%N@q9ut^%hjT3zUY$FZLf8H($vn-jYih^&(Bn_-B0#!p?XFVLh&1sd5!u zy-IOj!PWd1<28ioE4WrU8LobpBfWy_e7X00xn{56dUE_L_@Fez`%xFNq%*APQA~eA zx{-TP+#fUa0ZnU!XQ3a)<_9An&Zl%>m)OV;Ff$OGk-P;e&{2yR&PZQ|*$#Wr5)(`o zH{7d5AF1F~g&is;Ongul)`LtqQ`S3ziVuJi1dIVM_{Z&`M!d%kXhtFiPq5tq)pJlY z_GrOEr3%MYrW+omtSkKG!6FA-;M)nBJfP1RVTt$G0pQVYABD&5V@@Pl%(7EmfY1G! zxyM(+RA7wqlR457%2rJy*ZGht0QehJn2HZ*xnq>Z7^?{>eW3Ee+c%WlU&jwCct$OQ zRZ-1sMKw`_IBHBzTd6AVQ?$%Xmo4rFe^DUbKCQ$}F)YjbS)o$W3;jT+?^WTK6rAA8 zzJ<|O4{eB&CP}BiU&~@2!0HXR<_=)?+@inqaGseStKF5&#N*;zhAR>-^F=XiACUQ* z$)LVrAFByBsz9uPp*A1TSC3>w<^tm+8r`9iR+aC!;r>kE_Lo{G-G3QrpO6#JRKXd_ zzSBRaxe7De&0DQ62F3j3f^{Zn|Z}*p~4VM&2{PfB7Wdtc)t~*rJO5)3>;) zF5ap#!X3Gj<84Z;#U0y8O4Y?O;x@w_J4x^GnRU41b23t*w@Yz|-WU8Ai>Ujy|&CdE%Qq=N3^^RwyEN2qgOeZ1DW}a z?fQY%ZactJ(ED-FKdnQAo$(Lp$@o!J)hc~cHh`%olRb8m!Sx<@@wAZn-69;=p=tUbi7krtr~!(Bc(58 zq3>5!NW?dHcKmBftR=p`uCwind+~25yNS^Eo1}O6%sS%xTV$l~+;1z+JNG;M7vtY0 z6yh9(t?t{($*Aw|aio{-_kFoP@a3Albbm;Wf9alXu+`;7z{AgViD4P#u3#{EFAy|ptcDV?^IN(iQ}y5c-t%F) zPE-1yepx&HPgiY9|7$Ip)ybl%Ju`2=Ohx*`TQ9$twKAP5n)-X$x=yJ#y}Z9t53qEV zr`j)Sr(Y$jolcrx@hSDcKm5v1)rqFn$)>gjlzq`G|6Q}Z$Nnn~%Ria(eqCrDXmufe z{f)a#M$Ly9D(m*lZb_ukbeXqaV!*T=17?0lV}Y&$6-8c4VfIEe6M2P1qgn#xM1ukF ze~WiNCOfm*)CrDzAq?6;Z2=_)4d>llk0OYQVeN)@%GDNUhl5 zcRWe{W5Uk(Pegv}Al9F%Tm@qNnc|T9Kj*&~{{>-6?*CFb8OHi6jzsSN+L!wqU#=Os zmukM z($p8jz)Nai0(Q_q3IbGubL_MXH=tif`pwd5jFqQ7$nx`3$O3TT6wAVGiYWl;SB|q& z$>bzJ9SZTq{=6)UQw_R&;;mH~IOJ-EY5+N8z3E1z{>yeMx6XOYjN6T8e+4|*$%c)% z9^aeV9i70(r#!^UYNlFFW2Ectu};u0^3XSsGQ-%38-UDrM`c{+4*Ry?bTi_uY*ZgmiH1=8vItUQU-ZTCr5R==IX zjTlU=tA}*P7ek`1JT-5U;l&hU`HS#G$-ywhvh(@MxH6TIiyj;R#uOje!- zQkdJai|5n1vL8`;%`Dw&raU{H^sZ0OZr2j2iY8^{ImPN>IBejHbSb*eI>WC1mcIo{ zq)D`pN@hv(L``-DlIb|?ZfO0Oy}+GOH#@DH!+cXEP>?&MWjj$59pjBroNmQP%^n^p z9MY~e7>FZH+Q`~#=Tj%PX}dJnQq{TXMn!nmSE@SJP7PTzMnppsx+lU z{X zYWfUHH4OEf$z9VMTUc=RbbpLpat|&A{w+0{%-j`jlYPP8td3`p&7m!206_{y}QOG)UkzEoQcu=V>72K zxF-6-SAB>2ff4^5>`*NEyPj?SJz;134`PvZlIcH^*7{scV(<8$8WUUMe{M_+;(t-% zl$z7S_+ORSnM$Vr#!;PQ`tKxp$#iJ1p!2SNQ|`Hcv}pzZLm5@c^glUld%0dR{Vx@g zWSToWW)Ycs$+QbesX6ipWjB#bk0iarXVyukN0E_2m7^60svN_AF+P@1+Mq{g#>Xim z6HJfiNJ!-bU+hF*tQk@{i5!nqzRf*cT3HGFzx{=GkTWy2@3s;Tek4 z;b-z+jL#yhboklI$e`gl9I3<4^~KKf#hP{a`Q-Qx|4TX?H=b9b`&{yYe}Qm$z*!34 z1$JKi*LX3Rkj6YkyjNfq$pTCR)8xdWDFN6`AP7(i=-*CFoLe1edfYs|ZhEeNKY2@e zsV#&%L-BY=0htgGb`E*p$t4R43rXV|tG%BKb&|rH%jui7+}GRQ_hkq3*RZ+1ckzKbp(?2Io|chtPHz2b{V zYi+AZ>>Xd+nAj3u(wG>;mo_Gb@nuTv$5MuW%ukFHmhJ=tL!F3;5yPfd}bXH zxSou3OmA7X&SKrPZtT*#UhQ^P0-cf4LjTAG`kJ1L=ncY~ASUCkKZ3)uHpg z!nTa!VU|$*`cwvMekLblzDHEfBxDneGZK^0m#><~h26kR2bt$8L?@#=*btbXp2@%; zr)wIS=({yt#3-{N*nIN?hx-J~xnKE>%1ix%L|3vP(ba6%cMiT-A9@R6 zXB_AO`@vW>F@!2tp>dJoj3>u`F}{_sGM;WzMn>aq=Sbs86RV2xoxWJJ@zjSLKc2Lz zXg~L8*w^6)c$S<)_uyjshs@Ogk_Yjk9AbeMb)j0o-uP7ZGck%h+v_*CKI z6h*ih?ut$XKL=x@6Y!>rWQkmW^JkfPvrEINQ36tg91%i1!9|cS;$X{N`qD8ZaUWBh z2Vh3-&y+2ffWznTjl|r_1fYTxX-j3)o3>2s0H^8F3c&cieJpP&bDkQet zgJD4xB>Gma|L}W;gyhiY~WP9%ifgFAv38sa~#0>G2v(YATaI1XjmF)7N7Q3 zXTKWCjdoQS-^F9rNW0q)hkFP+0No4|6=?Iq286@GvlJpITAdm)mMH7Zg6-jYqMnn zjee%9rlj*)(ggygH8xGLEn=I_&XuOuw7go#wtf-YMlffq?vLfOE~!J7G^t-rX>Fyq z_)*G8gZ^pF1hf zOoT8$SmmI+isd8{5DE!%StRwGfYBY}iH(32u;!AygI6N-gaLR<(heS`>%7E-U+f`$ zI(`TLfk?|{-eHfo|v=tIT0b-gaOdLNm?oftb;Q1WhmzN5Ox#z(ycNa8A<8-!o ztP0U5uPE;wbG0B%I5mv4d6U|on)n%pym$32)?&fs){n67=@@~Mw zB=KZbz-2mp1ycw+5fPgCNNHHkqyU5N$-(tEm~?fR+r4R~(qkmAi_p0lr9y+|HiKeW>Ix$O8QW%;g;pjj{L=(`>MFYEeOlt{O{ z%6~C_jgW3>I}Y#F*OlY@UJiAKr&U#qCw!-QcmG{N8)dBME zp`uH+c&L5Bqq=OU*=sTbO=g!dLri9W0VmqtXm)8dwIHk7&Rj9uR~MR08nfwQSY*s7 z(`c75$eE!|3^V}G411cpXkNCh1D?pXCf?$(lC7Tvxewm{l9G*o#tXZS%pldv}kjpXhE@CSu;xbT^3kAVJaH z9Kr!RwT#ZXV1#470n1?a16p!V+oSP82mI+-3>e&CS>FqD{rLizJBP1{p}l1u%gf`O{4QC;RQB;wZ45<4cr^EW24I&-9lgQ}-0eXtGj@^%(FCp& zK2P~DfhVFgMK7@!&dPnjdQn+24GFwTb)T`|`59Vak#!iDkTXi~ffl2J|BNXhXF8VV z+>Fz$pIEYiDMMP%Q`6Vr%g>>D*W8W!q;)<@gx#PkbIdB)-uD^q|j#ofJ-?snL|+Mjjwjk-FeD8BJp- zoJOZ4B~~2 ziDA4*i7%w=W-&+I+Mn|Jj-EZ+hK}tuWODC&%^zn8dF{`%6}q$!@hXj6e!Nt9LsJKv zWjY$(;r~XY@Nc;ic?nnWUyN50iVbg=8Lv{tSV3cbk}+jBa$wOkbEy?Hzd>th*D4ii z8ICCw1cNnS!M@^bG+xnMk^g~~T-Gqg^nS^I8y*26;VS+mQ=ohp`d?_69$(l)V>PJAK49~+l!4C2D|3D9#LGqy=0UHQAdT#RApsl0( zEmOUU_A|0Oo{@ai)-gndq>9ZIiZ`pkP;yezwM7ZTN$Bvg5yaAAzLhKu^KJYW348J-rRw#ixMJ!W2G zsl=Zrb~Y)?Bz^mQt5!QTDc{!OvQ`GR=6`;kEFw*7f8b@PFE%kK!!V5~6_ERVtfs^P5~jSGa+X zU;F#&at5MJbo#9%i2d7Bm zb-KGPK+D7(_7(qG983_OK{{9i7rZ!noQY(oIy&S+x(=OGvVblMxVfIvPyyU$pbW=R zBH|;zo0wvb+J2R0e64_kq6_O5(0h|2x}iH7YFg*}ktH}6$}{3^EfBFvQ`ff2xdH|S zs;0^0f8mbxD~ICWw##Xz^A|E%Zbv2((?P$@fOu1%l|$wBIc!eG!~@7P!%&k+^cfzF z4<|E4A91a#uXa6a#`XzAEtzUyKhB zcE$$@hjqNn3si^wd&h^!s!ME%f2c7rh`-sG7{-ZDOc`vMqdJ4=50m7%?PGmAhV&V7 z*WKNR+S*z_qKv9R^hY^tn^W%=@nb3^Hk>;<{&6MNT10|#J|i%^u&L~H|AFfJL6x|g?%Sg zEp@-HaurM6Zz#@v{wDv$__qiv_xan($Sifg!;$XucYU#M`(n-R^Y_T{_u0XHzTlqY zTa$>*(d6J>DbleR&2-6X_ZZ9bil8FaqAG#%%?*zuJwVc(ggWFJP3vS4^P$C?Q@B>04KZ7+z3Q^YxD1J)hg828ju5SGgeEa^8urvN6wWS6J_KLqlTI-cH ziM``LR^sI~i7oM;DDkqI#324tC3dYz4C6mj;@RoY|8tJIb-az_;z#?a0VAgLZlBcF zImFf^_zSW-=8@altwVyVAJn-*@n5RILUJ^Wd!vT^D;drP&DV9pqX7>?8XF7o zcXe%g{O|p>|ADYG{zt-L9qV%~{3n&7h5zhJ{R?5Gh5t&lLtm&@3;WW-!9lW2JE|(U z;XBA9qs05}3W6K#t_7TSqo2 z^zL3y^q5@i6Avhw(eNm;J8UXwM>hF0S13MO1@hB9k5Pi3+v#KZ$(+ZMrG7n*|6+VR zq55^%%=iSIqgk8k*Kyo)RjByy=tY{7n|kp;fiTy>Add4aIhA)HT8$o&FS2LM&)t^-=3()!tguEKbDgTJL6Lbhjrv9 zV4kW{JiOC+ z_q4|%`4GK8{xBa;+y8?!(cf>iRcY+|t1>;idViKeZE036YF*6@`TK~^)MMmfpXDFp z*@T_(IfTPHn$Czkm#hYWbzWm4--l^TLuwinQgbW=`-u&&`q$m3dH?mAzt8S=QE9FIJt6f%Tm_{vnm zn!RW<7Nr_aah7?TRrM&QNin}O)1^q8lKM}WPNk|*lKK`Zxj-C zv^2IiHGrb7F^!E`Kfx8Ix>=k~F8eBrC)gv?-az1u#sfkOhh$2drs|qHrOo%$+vGJi z2juZvvnJ&mxz#AqnP)o;B-`ht>#h?qEIm}NBC_19IF#cS{)=%>!jy98o2ylsr8h^S94)?F;LA0m93eTL zatL}e;h|@UN@Png38%u~h+s-^g^ejnbQ^5zB22qNPfaoMH5<%@J>V@$P7IT@&Pxc4}2qO zCpxK>GCQK|Py<%6YEFWQ`+c2yP}WKyI@&S(`!If2?jH*WF#dI)i;jld=2j^=j|gs; z2z-xt8xIF835XLFUvS|LP}ID~gtibh9j@&xPlG0hMe4RXY^7sZng@N^9JLgi=v;S+ z`UuPPe6(58qa};r)Xd>wIUVk-yer~qyeAt6Pv}NHGZyOGhXow}MpR3y!yds9ni3r! zosTkSaL~qA;~tV|Dp_?o=Igszr)R9z{d^t&%E?%Wv0tN@nn}0n<1l>wCwfWBwHJN= zpgP(~=ga=TM30T$Ac?!)#zg|W0G{?Ywo5+n_e9sSNRik5F*R{R)SK2#aU^roV9v`8?dP{S4 znBFxbNZ!>nvi;Ff@=t4{kt`G$FqQ^6G@KfqpK_Vwz1Key7!O|>yg)a=gF@y;VLD%< zJm8vkgz zXd8oyR|8u)3d3b6^Ujbn9W7!O=9gi2X#&ye;XNSNQmpq_-Z!F%3t49I#pt~7_~;rb z;x-3U!bd{1c)KFL7}YH}1#dn~>D>Z8oHU2~V>I#kBd_v%+jsTmFphZjMSh$o5w{X{ z#Iofn9@;GblxbEQmUmqO5vaMk0so=x!;7p@R!z31O$JEMnF#vgi}S?LNY})NEJT z2j`Lrj8oauMzeM;)hB#UIBk^XP1qT9AQK+wFl);`co3x~`=Zt)LO%00jFF(hINXnz z5rm!bNR5sf65J~uMOvLm-a8)MnAj4xHzo%0n8w5~?(m5zSs%+$w~kvGke-qcdvNrK zF+HcYJk-aWqsEclaXY!KS9IK=LL%6?Lh*PN=tE9Qq9-WfZW20t>>gt2>ok!py-t(( zFUBQ8;o^lz)Ui%#rKt2uvS>&?7EL{p?{}n9vgBm*Jjlz+>32EOzKvD*3pqiLMln;=WwNRv^*11>MRE=lcME>B{wWvI99Z}QI6L4F!D>snhdQkRis~} zkOZAl^7QMhQc~JxE6dcbD3h)pBY%ceeztiGc_}*eaZNF{29x{ZSjHAI&VWE| z(zN*!XU-4CD>8xFr0GncMyAfh>ZP^vHOl;Khp`xMS(~c&P0-H7?K*#nyy%@t-*o~w zOv(HuEX)hwXkl%KM;V*KCygERfmskt`X)S(?p^?|%HSq|XROS)Szgetldl^k>%2^z zNz*1rn*^Qroj~2o{@)du>!jxIDL5Yz|E!am>*Z#-PDo;|`YW6BM6KN1weoFOW3-Z+ zB{^%m(r>9lMK(omvfg~?U8jHZrw}sLfN)sHBRTpoRi&U0(|oDvgeiS^o_NQg3Vo0e zjTCbOG+6xZUL-DLL|Z2Q9N@fP3?7<}o9rS4Puzqh+q~m0(L5E5|C%C(sIXb*A&+3V zKhE3XKZ?E)GV?`?!Y)TXfQhz9z7QKl(15KWIIPyHr}z^f_q;Dh+hx%d9BJ$7_%{Z(|agR=)iNUtoCxc-ac^@MdH+ymBR!yy+4aO2vJ``{kY`7G3bxb~2E! zKae}&Hi>bcN_1Ssmu3;7CA)4>D?U;-On;8=hmT}#;)W2Q79;)~8}(-`es`YOTER0s zmd?PNWW7G=`UM|^pyJTo8)1wG7i?v@8Mz1~i=wuGZrV(ad{4-FTb=;2^ur-PjC#mL z;S#(Du9ZVyyPK$qd|3JEoVQgkU!-_4kh|VYcao+n>CO%faM$89{1P(?^%Xjh>B=7 z&pjATS0i3><058R(vEG2q?}BEL04;Ft*okBOlR_Rt zI8#zejg(3r?~0eoU=e0_tXZXF}Ijt5%9AAfGhpth+! z2i|N>CNs$H7^PNp?HH{>0$RC3@k|vMLrw~`W+`DT2^~HrQ)&vdW|JjC`X>Lycn+Zm z=@Nj}TRLenMFq5E;?BdYV=JFrmBUBs-f7d{^WaKL9H!7J1y)r9Me68jZl2wlI92QD znS!aqL#e~B!*5{f>zE?U7?Bzx#^MieW{7mTy%aR1ric8*tHZg6njD>hB;Zkokgz&5 zf+WL6W_U1El!rul7zFgm&<}_w13kDwu$v^_6#5tdGVvS<-S!L#ra|=@9bJ%8V9u01KF9 z7|TLKT6w(nbzD?4y9K5T+p%6``{st3W7sp;v1iy0II|tAhV9@p+v{u_2fRsxFau}u zXH9{b%o%xqR_QxZpH^u-Y?4E2)HoNDMrzi$uuEPK(&WPgb44YfhHrZ+@eUyqatMcY zOwZAh`6>l1S>Q`8Bur_^BH|sdR%nTYQpwd&TsV@2uyDvW)EU+Sr<-R?-4k_u0fAM} zcxDiziC-<$0;3a+M}rQl`czp8q5V{gy?~Gucm-P7k37?ORV+m3k_gEs&WVI3;d5zD zf%m=ZAdGJLCQRrEcO1G>)@G6MqT|z2$KbRqWgt6!`rt< z%2`FD`EIL+UL4Yu)b7tv*DPSt#s6aMJpi<*!u@U#6?;WR0SllMMFA;Ouz;chg0M8P zfD{!L5g1kkM5Qgg*QM;zq<87y!o4r)$xU*T+~g*?$(MZT{Yy`7Ztnel|L43j@4RJ~ zU6dWpJ9FmDnKP$6{dtax@FY|ML@C&kK3NxLAQ#BzU8~hl`Xoc2&^9^&0XX=WzFlbD zNEz4L)Td%H)Z_#Y4PFCDKz-B=EqK+6Z&3*7YWV@47Qr?>9OmhJ7+^jEV%fEZy0$N&ZrWl=KjnD~iqH#29x&OI+;%0RrLx=y!g`llj3@=mDKlUZiVH_E()!|Gd zM2<_1u?2fQQ9vT#AxDt46a9^O#z!)=+x!YMrQW681D5+`X3)Axnww2*grs&bcbEld ziNs;GY=u-V6RAh<{%8|t9a~fcqO&pZooxgu1F#=IMNLpTnr?;Qh74j3Kov8?$VA#h zTbDuDd|;E2sltFI`y^d?t6AR^j8UlAyaf>F)5x)!@MZ)9Pc2{&5k`N3`%@ zn_dKyJOS~^r!1HM-@t$X-aGY?`$Q7=4YB>+D`no+fGN~T`)VmF)7rW2|9m z(3s%f$0UQK_veSuy?xmTO8Sc<_&I9VyDC{sZ-yXbVt*R8kantB_<@rbGU|hDYX&Gprq{q@qp08Ue1& z{FX&qNGqbPq!Y@Q0*wkNwqDUTZWUzqj;e|?y=Z%JrXTH4=H^&lwv(%l<%#BF(kO4> zv_ap#J#yY)(|qjWcDd<0yr$)eh9gxfs#b|ur=iFjHOeprhMlD~7Q4A6*t>_{vS=@< zU~hNmxP5v^x(U&7cN-no44*=I6gq#xIM_zJtf(svv>|#dZ6@+6A&Vs9^0T2fN(r~( za2qa&vJ1E|EbD?JVOH#ZBTGByd6=fSa$>8rxTeb)@+?h^L3bsjS4?r` z+$kNE1zD9)RtYubNGV4_K{XWS1#coLeYE`q0k+9@*Q zsLO#Tw%d>-Do66fa;TWdiETF|d&-(P?q^j9I{d_MS#*FDr#;dM?j;b%VE*6n7e#QwNK_z#B91;Zcs%DTm=zr$5M)-gbV-WZimL5HENF*e%H=6r1h<7rlyw5zaz#USkv%};O z0Y+Rp4YMJy5E|GY%=9~iH-7Qn^tE4}*zjNy`V}7wbC3r}eSLrn{PQ>yY5zd12&iDC zKi3Ch&kyb+a;l$!m^gw%hSw5B5&kO(ZqRv4f6q&C586ezyY^vb5&C_ERA2d^_RJ+& zw)X)1hd6^&NFsy68uuhL8rP))CqB~8epe|;Mhiz|s=^FZ#(5-D?fn6G<3L2He`?vR z6tMuGatxd?4?lMcH?tHvLKu57C1p9kge^R!&c7#QkIB`+RD>n?j;ioc5LqxkGvLPEBt>jhj=2Cyl9=IXQms| z;%xX9p&By3unXy{en+3MU)KnY@T{jjP!DRu(dZa!L<{XWzh%)0(u(LL>4fqxNC&OX z(`EIFPAPL$US{v;v@$F6GQH@GGUw%G`q9tI937+Kvs`tY+`~jEb3gJY512W+$AmKH z!ElaylY0&NPm_C_5G8dxI7`<{okQID znyK8DhB?d5I09j;i)x4fCAYJIv`{jkmjFfL6X!?kv`Q(>5(*^_CmBH$;*c&_)?rqM zP(h-Jm?JIYaMBI*IZlcJQgotf`|-2eu<30Av~vk zf#>G{)xqb3+c0Q~sykli_ec#{>RY#H?U7g5k$G+GTin@U9OFM6C0p$n(2{KG-n(0}vIG zYcMbo1yTO2kQXV|bFG3`ya5;(VnFFV5b*u%a64J;!j|nYEr%}DD7dejyDA)2`3d)5IXWnEGuwR_2K^H^iK{kvdUW-2*#1;(U z`vtdKvOjnRTgCJKTV8wr^&oScULEg$>A^Ox1$p?>y`jPL;TUm^BjD>$h`bq&MlOK~ z38t8AogIw#%Yr`Mvmv*JdfkGd;?pO?Lf@x7df)cC5xa4$PZa+USGnMMuNm&l*elLL zVHUItX0s76U(#NY9Ge9Xc!a^}>{BmHWM-+4u?YmgA!d*dratKpVYc}9(XY`BOvQ=SeG~zd$-+@~9##^B2i4FmLM}{Uv2C%tI(%^p}-6A}`aA{)#f6h_TFH$;j)Za+!bn7^S+7U@-Ckf_bm0ebC)6zB!{pl|4?1Z zaQu<=^*~|KdO6qQC_8 zpOa-o;Je-z{lA_$;Df1C`r7jR7rK!%LH(Dir^Xh0E{gTPQW@#lQV&P}TABG4sQ-p6 zY275P`M0`V!UFZ*kzZ~%3oKCoJvU;k{2z4AB>j*4mPP-GR9JZ|wB`S&JBbPEf96UC zh5yCM{Z}hj%AoMSamNe_y&{t-k)zIFO1iw9Yf(<0m)>1lq^bLpW-mvg#$>9@rV5Eu zY53AwzF4Xh6R3_vb>>sC)=xmm{mG0f-;~O2?-diSF2HM!c+I&b4R0co*qjb9=Zutg zMT%un`5SZPh}c*Y7#ma-%BJG2w(UASqA@amCITN|FObtf?m{!^~BlK;%gVaiCk zQY*PBcWfmqv<%XQNnsdZIMG_%I^^aFOh#ngixW8#8Up@JGa1AJ_H+6X5>jC{prE9T zIRd#QJz9W3p$J`l7NTm#bFx)zUGG&gmBM5$;(Ptn_Luzyw<0J%v z=fanQ7BUg)tq;D6oCz14GtxQHnAa7+@sjqvJ|8}8c0@>T_(+C`4?#}D7p=4^uY|3o zAVtn}r_?&+qGijV9AZ?Fa-mZ~w8njS20E9=NKsSq-5i{d!v|C8ev~}i{F<&dD<#x6 z7ZvUCEJH748;Urpp*X|HaC3>uj`kuN0>zU9mggl5FE2!Gbu1iU#?{E6=u{d+%G1{2 zq+jZpSyEkP=*SJ&SRoK(cve|K)SBq&sPUXknP@V0XgrLbi#PIW>SAJWaEnY7ng=wf zvrQX%I=oFY>EobGa{*KGOA?mxtyg7LewUtmPL1xP#~B-1p4k=NZ94+FFJRvCZmNfb zp?bdQeAwKq+8G9-GSG;saG>`Vyp6SzrmfdRYAdZJb>mMIFp1FLQ6D0;G_p5+~htGOaKcEZ? z4{vAb@bC|EOQ7%}e#@eVNkw`8ab@&~o{7W5f1HTSVvD2Lw7bx}J2vr_Gju8A?wtLR zsFR#ww=?N3B{X|AyD>*BEOTs;Gv0RFho&QHrs<2~Q1bOfkysdeCnv-}0S$ma%xR=t zT1*T$dLU6jK?$T~WoeEojrvg_aX7r6M*S%sD`%#I(#YwJ5?r64`xPlqV$i)iQhZo$ z+mGsc163q)Gl$}lo24;(ZPwByi`HXPmz?BGva`Anxs#JQd*j$s_eCM`M>Pvsdo64h zK1NEc1Je9h{ZFVAbG4c$j?vH&E(eGk}A~_VCN# zeYw@v;;M>r9{92`Kq>P|Fh-l7AsYeKAooZ0f%WUv@M((zhDpa|01Y)wlXJ|+N`%p= z`9jz$c+nh9fF)PLc7mSI8RQ0{p(i*X(**2*g=qll04h9CKn)l^nkUcvt}Wk?XIs#2W1_Hcr|LJ`OPY5ZS%VgX+`ujX?8b- zt$B!uEvC3tfQZ;)syLG^riwG!V#+dOoYRi0lx4)2F5@T87+3z`yWNHv&S}qWXBiQu zEF)5-qUTj2Z5i=`GR!i<&eE0<9k`{1+>zh1s1vE^?-p!V-dPXDmJu?5b&8rC*2y^) zZN?Hz0?NX z>CL(04Y_0ck=(}I^=-$UzZv%jP11h~N|O5%rjGw45s3d{S1bKGC0AP2<>Uo%;8+*L zBaT+-5UQ(h#f6ByDhi&Kf3X6C+ic`*RYA zv>YNG9woMyNry?1Y$7M0knM78zw9>uj-9?-2$huV0zxHyIGtTObz+Sqg`O#SAnPc3 zK=z~$w0?(TPb^q_#J!2cw;>9o%6&nrJ4&NCQ!mM%>EuVo`(zg^8JB!-F5et%R&zx)m?2=q=_B= z{Mbcx8!TRYuNpjuTaIMF6y5?CtY44}_N>RWAy>8zII5c9w!pBEZr9)@BSZzS$rZmca74hY!dZ>`?sjWqjj{(o?#dhzd4`C>XPs*mRoNrn|X? ztsSyAHpuhGy}H_(e;E@HlxEqyqtGPUrw5&$D|A#lF{7{1-s=aR*S+BHEcHPAl^xEt_Ku_#TDAFLbzyM@V~b#MhKE> zizq|PZGX<@A@WefLkKo#UVHEdS(t1LV9|GFUq2t>>c&B~h;v<-k);WnP(jSlG)S={ zn6|c;9EMAd4Fq>-C1HuX+^yM|!TNNw^t;){?WY-c6A~Apj%)1!Z81?Jvzo^h9weK~ z1UVPphAx^oBG_2@kms&ArmBqlX2dnnN6O>C)RrMSfu--HI12+Da*=JU3O={aqZlOi z5q#WOKsT0IbuRB5X1?;vsr862IUO5|8Nt3Q|VZ^&O&|z1DX!#Eqx( zR_mzs!CXv&UAfG8x)KT#jWtYzJvxOi$Klaton?qR-s69gtt1RPXjA zPs5SHNPqw_XXj-7x=Wa!)7;SdW<4W#00 zYD}tl#jK9hSVq9Ru?cCPwP}%rpNKrsodXaiS@4ymSlsc}4AxulIn0tJ(7&=rUWj{L zW^MRIFgJWM$gIG0_yK70peZdyItV~44_?MHZl-kkn2%v_hoo|||AOXtOK5%VR2X>YW^oST>FMZPj;sEVC zQ}%|+NK=-2IC@i=`I@r6WX1kz{dBv8rtB^9%k5@?rtEEQ#Paq2ItL5B!*5yiE~&8K ze(3x6bSKf24d6=h^?_Dykd-SXUw@xFCSTuvXKA0eSVxwYIF-_nrK$Iv{3+jvD@~WD zsqr)wo~FIiRCiiC=IHC3Ud+j=a$6sYidMh@7jhsDJK#e1l?vXMqlRM|Sidm^tlyaW zP1C=nXx^C8P1CwDg)1Y1_%Ehy^_!z_9bKznHSW*Rv~lpixmh|kHZUtPF>|DAJ>+Vl zkxLV;#e`~_M0MwwH5433>Xl0e{5ctoFF(XIH1Q14m64d$FY>Zp*wfV#w11MSa3K=WI>t;CXwT)ayVUR zS|gf>NRg7d1J!8ya8`#dg^wvXFhxubn49zgms4<5H^8fLuLY9WAEyW6pc3u`4k>-W zDy0uJwCMx>DSbGZZjvI|O&@1t8_DDZp>O(t#jF<@&BN#J-iiqJ@La~K0Ybf^6Sj54gDbWl=44r*IS`iHu z>M}f=Q$-I`xvVO>Oy{7=;ry0GBS>RZIZ}5LRrDyXgepf{Ibrik zMTDJDeaJpx43LQ;s6^??B?MSa*vsx$+a1K7+`MSTFSvM8WMJ2*ClPfbM1_Q=)Nae+ z5^*V)$YznILK5;Qg*kx$++`=+;$=L}Ekv@kg9f3x?H-3E=7dZtbSd5BaxN-F zg*$Z>aNqSOnVaGY*4hd0geE1xB)6ylIsb{QSPK!7d>AwNSZy#{c`XMPF7BLC z^c3P7GN}N;20qTX2XV<8V5V#FuGJOqM#_N=3thw|&+SLL(J#Ko{9Gd!tn9s0C=Kto zk)-U&@FPVgRWqsOG?pAwC065-fjQ2mO1K!gi}PWI6DJ9wbGYO^?lFq)uygWh=;pZV z`0#2wcg42G`8-%d563f%7as|6Q=*jnnC;<2YZ2wBVk7dnC(Nsi0=$9G*&rpPD^39i zEO-=*v|0LBPRVAy^A^sj2H3JIyeFRWj0_N-I~?@nNo|VcYU4|od&{i~ZqhPf7qUTj zmx^}bjHWA2QQZ+GDfXLuhDU7!D`pm_+}kH}7arrFgANb_IOD{wWYB^`VT*Vs zP#E~Q!y6jEl=ZS7-?&bG^bzP=B*e#t=072=h&~lc&l4E*jK-2TXj@(;+p8C6vb}n7 zCflnQXR^JzWg>*=A|b>?t_p+@lgKh5#QXi{yft!8kKrRmyS+y~*A4z>g%DqGI_Pd| zTuun_rOHSMk$O0qtjv5N#1yh(bKP>?E+K@NN`ATBED%C`#f_M8x` zvg*^UGMb?~i4bBYSE8&UD>uu^l@dZ!aK|XC-@mKjLWsv~F92ttn>NjT4ke*fgHi}e z;83^Wi!;HE6?(DJZ9}3LrcGg_LW{)(5)()8D;7KBms^=svqbfKrkXLb8Ag|)Gw$u>|t8Ol`YM`XbiLBD{ zs^W7eyDCl-QC9IuTC$a#r$tGJ})S8`dMF936w*Z~wA$iNDY zL>}f&(o(UyXLi?N}*Swz$+Ga)g|UC6mls>nEoW(N(;Co*h&kx zoLDO-*pfgiEz~L$XT^f7=o`ozQJ%An4EmO|BAO$T$LO=1@M^BgWrbJsbPkF29lvGK zeA1Z2`d)Vu;nfdZiNyNR$}O;RrAVxW+%XdCcOkK0KRy-@QXxuzpa}IJboQ;N@*(0~ z(4<3HziT|f0;S)n_#AOjsKhV1lB1P3%y^j^Wj-ugodkV0JjGS%>@hVI)&8E6_jPBw z%1lu*&4~MxqWN2^U`tieS#9ZNR6hI-TnJ(Tm=9CbinI#0)!26ddAm=>x z(0x?SA5AfC>>50wm>`7S+M#^0?K=tF^$e~N7=>K&A5lE23E_wEjlqjSCwN_pS0bB# zSVgLBD);N}J>rqUj=|*M22mB_Y>wyjThi>`K#dPyw$ly-RYas(6;#MMI!%57UhaXg9km?#$Q{fEqbod~|`z0oF3qu;4cp?XJYgBn8N9>#u#a4YDd zZ#RolxJg)T#7RgcY2pq(r?<>}R|G?Y9zh^S8!TF3$tXx|yk4qheM>Lq2mL7PV4O?l z`VsvyOnWv{m{EuK6#I`QD1r#&djvykg*^%(Nfg+O7#thC=dCOpCbc54+a(EG1|A=c zH!{1!L4euuBf%*D9{+Znx>YHSV7TE=>AUT6V89k(96b^cQ3dCH>~LrK_%KiVIXI$O<`LgHAJ3U5WcNZiflvKD>=+ZzBuX>pEK~&N z3^)4yQ@`=3=++`OWWbEf$jRg+{hH94ms zk1{Zv^CljpIL+mpb++Bk-K*#x@@W{>Wi?hFn8rC{Og^uz*6?SaU7qa#>OM(qdWrOS za-||()B9Au1H(IIaR4W1x~7w@a26n?8R*687f!j!>^u=fiy&u2QY|*JX9;OVR4EcC zPj1&UT1sAlA%CxES#f6XXt^>seOjlUM!X?DR@hSYH3kfS{RlVcctZ6X&F^o{*;z2Ri~s!1=64b zF;7~Mla|e-g)RjGl|)jK2tpFsMp{;pNFx$4Ln26UA_ONks5Mc6$p%W;%sV3$7=1W< zn0T0V$B{c096sBz)Ns$$rJVJ!{D8-G79I&xTau$~6gyjM1{|^7M#`dIW*enB*tC&` zv$QYGxu&_+G{>4Y!$@6rqehzqSOBM+4X|t>C2%Zh{_fY?R0{A@Wu>;0#^7fM z=gIchUju%?A)fGRX034L9qiC3tU_qOUqYfcT?|JOoe63};TOAQi7)J8g=yrY-qUAm zg3M;f1>XfcK1>)ztY{BH=y>mi#F1?4_x0}ax`#pV9-EmEBsU0k?@n(JnsEOUVsuSV z%trPs1Ybi=KshW4p)aAGx+<>XNXTKY83{WO!wj2C; z+Prh0xP}Qb$K42(CIq=Tm=_H8;c_tAo*(o9GegASolb2wT`$Kd5j-9X%1j?Y)kh{UGlLejkHZ_P!_b@$H~?zbBp#5TlO< zqDxM&5eN*~UayxN*FRB|V(~{8i9Uv!m5;cSKxt5LWD@oYQis5r7y+*{zVDRB$Mfc{ zeg+y9s*o*1YrGE7^7_{P>hf>l{$Z%v@>&pJ08?xTWzdz*@vL62xS(2Rzbd- zhwo|ol4VRg45hJTCR!!~NF9W2gXB3Dw7?EpM3<()Ix)p-g4X^7F=SOlF>6QEPmzuL zf~R2q!^|>Ytkng1$m7ESO#~OiRNN@1RIY~0RbK2~r6+=wZdfdmR2(O+4zMfGba2&& zur**dsVZh3Br8NGbT1lxir`y9b9aSpn7pXNDG6=2g%tr$omn4#7z{=TKzN0T{zC*6 zBgn)(aE*#jI*9)D#(8t01TlNhEQML+)B3ajCb1*N>RTy(KV1l`{}n_T6TOQkHg0op zd(aajc2RPReP&f#ngjJNK2ZC~z?TsRbG2fjKN57nqm{P57R)fN9RhC$JLT}O9Cd5G zS@cdo$nz=8*82h<%Kk@xwmLPl02W{C_6CvdmPi;wSiLoDnTprzH?SqWG(}z^Ug|8Q zN-Wc8ryvvXYnMSLxd2r}HKcWH=)Rj<1yo`0Xissb7ws+1^rL;sTpbgD`?=~^{wKIb zn*w)Y*O3EyfAQ+jucGPZ-uV-^%m0+SgMVyV{%0y>8<^xuMF&)(DdA0G-RwbSG$W(j z&Ti){77HHYmO$`fe#@dGq=4W-|FkkXs%P${qyiW|&1+nMv^y-;QT={qk1n~pS1E~Xr;|Lo_8YmeE7>E`}s6e7XnLv<0ggEj8+5@TsVgoV*0>h~z zAS%Gl&JY%50#rnzD{xdqLP#X!L?YBLAS#OK3Z&`K6^^t>s0(NeM_?q>#aXi!b#%qW zRB!+U z${(!Ep}-e`F98cS6F{sMvTrxI0jxl59Pywp5=Z*p;0aG+ro;Xa0Z`%aGyaGWhkWD( z;M{9OMfOJ_%XwIGqM^YEUQ9q@?=6^jiDHRmdBC4a&;ai#uQQJk*%uiD+X_GG0rr%rdin za|M!HarxvyotzG*AbvBc;u>T%J;8bYJa3vu2LUu|0>C!id^UU&JlQ#X!RrK=WwVWq zK{xu{&zuPH;R2y_^gLvebcdg*mf!zM3EmJ@gWrizDar&Fvd8`RkuKr8bpo0RLPWm( zn+ZhY-51XF5WHa*b)Jym_!1GV6XyPcVk<)gF#4M;=F%#0`Ew=NhYH&*iHO9NXB9>U z%(um+&^qXq&R%p@Q;!LH&ZgdZ(u(K;>4fs9nRbIV<}u~HqKn)rh7Y;Tl{o{Q z>nbC#pL#g@9cAVl==?5Ov6oOrw@Vo4+#tW)ZWb8m{5fvKXyVW7oOSyb_$`b6BB?0# z76=OdlI|o1I)9lfX^;MjmHVq!u9WuZuW`q;N5c%Wq+D!{WG#d~+fp7hSI zDCaAmzjs&K_dRD)lqZTlvrS-C&7a;2vD-{+1^@zYFklmu9OzUn_E24K&MRm!au^rTPpe zuC;NtXVDx4A<j<-3R56I%emUx=Ngy+*6tzke&US} z5d|4H)6u90$FM~ubTP=_2VO8VY#~zp3xFtYrUKQF+n=FHByR9f^k#_u9}fE({5}wj zmHtR^oOgz7Q}7P)ry-)B3ql2mTk4M!3)KZhoA)7@F&K{{_z%ddrA`~~4TN7YAil-x zXNc<;znM=g;tB#-!=x@yqfwD95}$e)|M8cJgZ>=#@DG1c~4e7dvh)?LK* zn+UCdR{1%xFz}R1wF@UqAiGe&zORQZ^(WC;Tis)_`lhH(}C;BO5^Z z)k@stR@6fufR$kg1KeUyz-+^8?MGu8_9g;OAe;z`hddlgsT0#eN&^TWZSnB4*Nib! zW9|yR^`7=81b6$)`T4j#P%On)7GQAjg{xhx$aOXcK%W&rhBYRF7IAh3reMh)WwEu-}@IQ+?|0Sn` zM>VzTbI9{wsgRIo>gniTD>EN?{u@0_Fj_^H{s5 zYw6z{%FREE(Qo0eL&v4koL<14r4h9Yj$2oVpLi2YhrSDucpUU6r%i~@a%erRTgdH9 z9tZx>uS4?bRGNp}pR9fJ|781{|0he}!oQd}$p4K=g;KxHS-I{sO;(gXkBN=Cex0_Y zuC%v>BC0EuHFK#yN4}I!V`je8uhY(yN^_)7@mV_9QQ>(kCaCK-7YwQJEI&9>eB~Uy zi_`pYN%>bX0ha%p3!s#L7Kc(w|HdJe3V+ifmU8DYNtgR|nyJDx?Q)*)b{;#RiwG?K z%ekV*B9&4hm|}u4{Tpk$(!a5jdhu`CZ9R7$YsPZFv5qYF8@sgUeq-nM^lu!vsrc8C zv8B`4V!rq{NAMP(l^}r~AzU&|=;B75Bt&$>PnOjYQmU?eN0>Jxb;SEEq>0|KQ7367 zWy8;5CT`(?`Dpk7k5d1UR+rX)o>r#Tf1Xyg-t@dWrHj4kIc!$L52T{EcI=PEsr8@0 z>fQ86>xmn3j_9@_$FZWQ3?*}%2>7NcGU5gvh>{y}Aa1VzKp5Td1F>~u55(M!I<7V; zVWnR(v$mA!esSjYY~%@N%wJEMRq7*)uj@!+RMc~l_6Vr|+*u4XFm+midQ!{|ZhD|m zX!wER;iiYvNi-xyX;J^-924qK)7}boou@q)>OW5$8R|c&F6HKs&fHz(@=%ad;Qmn0 zWr1fz`ZDb$QO`Llp9UUIGt}4=HBti)^h^yroI2{JX`-*X>4{Tl)t{!#XzDq4`mlzi zXv-RSIHg(x(^|9OX-JA5u>nWZoBWeuP+fmc=~s8!Jc<2M`Yd*lDgAqI5+SDe?0#a9 z7ymi)q{8&!B%VzEnTsq_Dt*kM-J!cVX z@>6F#Rh&MLI<@$(LpG{S>EBKk=cX`qW>|%3^xu&jNviihOv3a(N!fr!(j*go<>cc3 zTjk;nSago`>i_V|1}vnp^y>fWP9hioQ?4Yv`ZFungy?Ujquw;-j!CbcdK4`$y2wK~ z3w4uF@-ln+jmB36TlO)skjm^eO(*8MOJy!R1|J={pgbd&kwxApucx*uk<|)zEC(b> zohf4-M8}x!W9W@qcPZ8$-UA-mzzb%eU1LV#SBU_LXcya~f!NjGSm1pk`*I9lWYTq5 zJ_s|B#>*gmK0@|Kd;%8AW_Z8eGMiVtgkhFst5qn6&9JP}d&gp_BVEM^4aGGA?mhlk zJW!;pJY|O}zyd2MHwPq4an`YUNUz|rppQSpJQ46M*siVQ4+NNp?NY2#a&VB~wHxfU z!}&00(q?M$P3-wDv-vH!+~E1Z2V(TI0lTgivbI5*j9-a1G{o{5%j+t(`88VT17Rl_Vrer%gvOY=Ey9wiJDv(Swe*yro(fR#ey3P9 z7+PJ#FbmadAB@M)Mqs#vgVWRUOhU4LqekO{jT!3OvusPL9ejyB`;qk)f_4Uuo>+{1 zytauf_g?Y{Ex!uAF%AST;aL&%k(1LhIXPXB23|%1+~)l#CN`QE#j;=;Mo8?yjIRF1 zQFhGnrh5A3J@^~kA@ia~*aih-0DqMKu1_FS_Ge^fRT6OW0dtjL`(T2X^uThD-F!4B zbZibfNSiXm4|!xN;VPwD#(eOytfj_cQ4lG3DFXXb{`YvmWUwmQz=*ZB?nSDORTak? znm}+;M)U^JHY>dh&QOcJ8BCV$7Nrwwc#N$Wgwv?1gG_bsu?z;j#CuBJztJ1wF)}zA ztTpSb6Cs9htP%D)z`qMqsU`SEbYQzrjGlDNW_-&+kM+};RYJH>Ha!^Q4P;MDH61I7 z-U@AW5%WBJITS}2Q^ft||J7WdCoh@xc;CH)#UI;uP7cPg!zHF)=;i5m<_k*yqd1g& zh?=MmEw}^!53H%wd_b6MtpcG$PA!TGMEY``Y+hkE85^THeEJ#9{BX!#EiQAN8G zVN!X;bWs)`)iz9`TRAq%0_}u$wO)<4&-ZMMiq{s?#};~XoCTVNzzII_G!vCk#IBC< z8D?_=FhRHD6UFjkZy{m_AH;CaLjk9-sc3Pqt+QQmR^Y6`ltS$$Xn;28H4N5J2$}cB zLLe4KgInoIe9|x-dDy1MUbDhuNdmIgWA%o2RMX%aJk~1AWr!wH;w15VM=B=q+5jsO zv_Ffj16iPe!gbi)y^m2TW*LAE7Cv4KOeB^>6_4Fv<9pG6*3Yc5yFj#+sQ(C6gEDEJ zVnJg*!BZGWEDG-o%Kf)xns{TSuqEU5Uf zKG_81o-3~^G&NwE$7?wMX>=o@Lg`b}5B~)>!4!07*_i@ZlFf&Fq2bm2%eUAOlHQvGm1ONdVg>Sfr_7w}4U+}rqGyuMI@g9<0uzK?f_Ajq% zS$xbJzwux4v93H03}Dq<0q$UI#M;Cb5n#GE_zcG|0{8mwp_f4^I{{Kp*sFFj z^2}*=OTX;jja%O|GlRiMl&P_VrG;7&O(7t78cX<7>|2?=V`UJHu@eSSD_9e_s|BFj z<`@uJyH_Z@E*j;1i23n3eM1cgOMnuV8%6{Nofp)hb3T$K@&&nu&GkRP3z6l`ZYE53 z793-Sr4q{pm$_ep8B%PI27T$OJupS)qr}hhDS^x855pA^5J0B=NWjIZTHF@?T^-WGq?naP_^8UOPj= zXYi>UEwiOIAd}_*H@q+J^>6$ncQ_14u^*cbN~4ciRG_nK_$N)#HuNjeoM(1}Gu!)$ z>W&TSFM{YmFK{gj_FC{GL-Z_efSTJ={ZR&I*JFjtFtS_Qbe?Z7t-4yb+P_PVvV+X} z_mbS9fDR`yCIh^@0iI-e4`$LpGxaq);3Hty63R@PL9;3a64@&prU7GEh8*-8~;H?OlK({ zGDNnBo&wS=!r${VjJOAgI*{@4Dlj;c9xJ+Q%mnq*Ia7WwpkIe*)}G`WZd~!P;y-UF zg>^v*AQz}@p^W^t;-x6GGAj58OMQ`5%sNJWvjV;2&b>|Enwd42%wgl-(a#(fwEj*z z#sIe{eF)SuL}AqNse>3<#zCQ)0l-UuvKXC+lLRzf`b_gW|1iZ`&zFMSmV2CHA`Sy z7KO}_OMI=c+f&b@^fODH)w@z%*&}!v(oLZM zunOW`v68aP0*XZtzY?rdk7Xd3K;UiiW4uv_u>>Zj8XsI~|g}_Ue;a~BxQIU(7 zcb|b@WGC~TH_PS zQs6io!`(u10^Lj5ru=>XF>@BDI~hV&A?xmJ5V4q_2zrXDfS`jM+L;{>dDY4#6==t* znkCNbYibxRk7>r57E;Sz4c7&M;?u@gELA^2wJ_0FgTFNgfW&jf9+U%8@Z}Z}ME@|F zn^;%DR1gIb-!uC>&~*>+#Dik$m?R9v1{MO~OYRs)*=`DwIgV(s9vhxs=3KGEpN+S2 z_U&sl5t#N2FtmED2pFjJl34-*80+`< zeX)l7U>@Jox7yU#`q;p{9;V4TNoG{r{15#XaKhK(X6EpzGzO0{t3Gn1vV2{Bi3(**UT6?Cgy3lxf}2+$1NCzpoOfD9UaO{ zgAzI;c=i<@)eex&Aq)f%dp?u=KKYm<8<^xl*wY1I^lhxxr^RHw3j^6Eptm!49r1;$ zMY7CN@D64GodI{al?g8TtF1inSr#>-eKiBBywBl;G=xl*(_+k;gyVxjyto}?EL)cH z5j9LY^(!S#f}4@| z%*-`{ed6qycNE(W)!PJK&tMYJgqaQY+YEIA`^Md7!ijk?cSqT7&@Nmkd4U-md9Wsg(I&ho6~*yEJE0^cL{I4#a(kJI8z_BbuhWRFwJj4kl) z<*H-(>ZX{$J&4)dtKP`jy(UcHDp{v@b!yc_ahfPao&M8lIGyU!X(pZG(Ww@l+R!Nmo!+lH z*6*RULpA4gZcfAIG-*ynmT0{arIu4zIb~F$YjUb2r!Pv>L5ZrTHdf}uiW(_noVG<; z6{j_E+7anMr15at4QVo@xo|oPX(XQHFM3q#fYsl^*1=<>?6OKap?q&@wLGCxtd^Em z>PgagwLHan`QhwpX^(@8B<5cP6Jq+{GgkpUo~ zguJ=}F$H=C@`}1d$^*;>AJ>Z60Wx!SHH81*l=|R%pgbyl1k<(Q1HLL+A>J3*z7l>^ z)e}7kVHz=$tx0Ds>ui&*i@kcPr!w_Agz?wH z=kyxaBxTo1(nBLI$vqrILkoG;73~uHv%>$n=JUiUYtDAPORlq5k;+3nft10(3$)?a zCAQWYugkl%vG}F18G8`%gD?m|i0av|e&rXg!^m>b%@xro=hVVle5y$X>O;zvbWsOR z3ABz`Bs!s<8cWxQ(2bWx6kUe0BL6eX-ls8sfR~B38bq!38iaSX;RNdzoDF#2 z&n^&+3eU4_R2d8Awi3#qSK!=72FQ5t}p$0i`-{YyB(2JRp}?%bNG9Hw;CO6!SL*V@)o8&Y<4C7n=y zlBG0gO&;oHS66NoWU{MkaVERE7H6`nt7U>^pHvX%c3gEV?}PTtux!tdhD@3|VcP8O zy?eM_UAbNUI(G+!NyKBfkKEdM~%oJ4CaG!?QI4} zzqhjPebLw3tkD;d)u?uD+D&n5t@iA6k6yJGyZqYFFajb*m2#iO z9S(XRIVt!o^2u3X1>pC%+-=Wpuza8@cU=m2Kci9tx~WppYbx=aO5iBu z-L^D(T^a4jD7UlroW(%*4Q^>2y~%G`)R$Dm+Ohj9qkekmMT%-6wR^Tcr=GNfPvLnf zXP<`Tm39PbSZ=k?{v!sE^O?t6Gz=zDk0Xb{yE{-t))(6cB`oq zwcTne5#6u(lX&mf{uMheHZG^owXw0+b-6$`B)`x(va#2(`{b?u7CT%vHYeXDvvF7X zj+>3S%6IK-_*L5Zv$5xePNI#u&N-1b{4D2Y+W50Vm(<2x7do&u<~nw9y_Mf$m)M5p zq+Mqlf1Y!*ZR}ZYr|8C>rQLTMbDnnbZR~lmD{#Yda!$gHImvWx8QGIRzMkz~AafbZNiINurSZl_!Zl?>9L~ zl!Cv>iPJ3pR;kla{)VYjcK*uL=}Lcn>a?xDN$OO#zg6l~zrQ|Bl*hmRBvC*A`V%Mn z|4mY-0{``?Q$hSDsnf*&wyD$W|5mB95BLq!#MI!oI&nIm-z0U4qu(S&`}Er$=(2vR z1D#ky4yQM}Y3dYf4NRTYNCQ)6T+)!VHa0+M)Jd%Rt0&Gt`c|Ex22dLoAI|)x{?r-A z+%!$BXX-jhtZM2xahlAAq|PX({?wW5G%$6RJq<~nezhT~vkP(0lo;qWr2Byv? z>82_A;YJWr%zl{!nT#-`44t5GRB_68n_9oQ|Lpu=zMfzJQe zA6N}E_Hb4P4NslvTLaU?Lau=)&cNZOsk4A+NE+MWH6(I^D-%$-i*=_mKdJjq){gc6wQ5uh zhBn!#49^rrV#v?QkH}DXRUDfkol_j6p;S)kcn!JS!k7((*M-p=@~@+}WKMwH#M@?0 z(4Vv-dWUpEdAF3&!MiGj(ZPFGY5-|$bTE+faxZIiFr^*VJD8u~=z?!E7U@gyz?g!M z_2>8&J%hQWdD!98fgj3Q+&^#*`~rW9mUy5x!$bvV9z5mRno%Auu}9gaZys)#5jJ_l z@ZRev7r%4no^u7qVVO`?fCOPY1DK7)eMgBZoG0OaansALEitr`xQ*7(s#7n59wP%ZYBH&lBF3jM> zw;Uh#k8qf1EuW{%Ve^(LO1t|_ zeT8Vi&IGuVIK zh5(l<;`X2S2xKtH`+#j8U!!%MyeHK@CdN*EvAhH^r^L%b-b91(FVzM^_=4gbd=ei+ z%r|kyo9R!s$O)bO8>h^f^PoZm;7d7$K72y;VKKBr-ay)P24_L+R%>k1t&%$+uD%P{ z#%Or(nnHo>krPM-ZS0Ec8Lo>84vlZ;a$GBB;-34U+=^))bzwP04`QH9C%S2~SoY0? z!XThSH9js_4`U~c^HLSFcbq>Q@Afwpjbb+Lu;Xy!dJ`8GoLMn>VlY?;)P@Hd{*1Vr zup(CA%-Dih7R^{3)b@eRKqd_Xy@;J&`{Dclp!_;C2akDd@_>VgYuQ%w@u1_V%RY1- zj!k%|V9radYTN^VU|Zc!@RPxbkkv6=BZ8@F728jEnU!7{-dL~Vu-h@~vxzea-^!fB zyNAg~(`-jX%(b!RWgi@T+3;6Cg}Y!6ypV9aQ}lsd3Qd4b*+RU-zBhNs$&4}{J2Y{( z{BiIL$LUxVGI1*LcEQ4(vUI}`^7q4V4%jynRjl9%OvI<HN`P*9YIE+vR?Z2Xi~10yg6( z<-jbMuo@@J!Hh6Q&eX^310pb)vApIur+G&V8hq)1o9k?a>7UB?^-i^Ut*sNP^~5ezz6i-^LUT~ zPUMKjAS}UoZz689pAfO6Kd_)9{$JSeQ(=`&K|mm@7bop%4M-3Y0v`O3xr~=0lV%JK zi6ijte^r3$jAAo@Br=E8cT;V}4Q9G{^dQp(TSYu4ficW#nt3eD;AOlNhl5r!PA7l^ zK^E}j+e*}x2jyOM)&Bt?*iyKMC4w{J9-zShdQbrMVMP&PTH~U?%-$Nx1rFPK0P8AX zU~iz{W2|Z6kRMrjgdV^UeGHEOtX{n5=t0`J4=2GH%$E=GrF)ll_OlufSy!_7@TfZ| zAV?d_0SZlchs|obase*;ZD}xGT)49j$-2F-Qv{C>2@&(MRdm!^w*#|&8xsa+Jzk^T z@u#~N%S(LIG{S6s!Uig@5kGD zr1>;gGl^foBTd){Py9uAQMOm06f8&y!vU~oLeqOSiX%dg;o7k)p2|#;ML2YRk!{gQ zfTwU}U&6Y`Pwkvs)iOlSr~=w|#buT@5^agew+zhTmKhL$0bo>FX}H%_0aRY6ZTN>0 zSD!uh)(}B|G%Ml`VWJ<1i_w}G2XaB+0L(8T=Kk2M8!<4i+NuJR?GwPG=V#KmJ5-lq zIV}c+&cgN|lySvEIUMqPD^|o&d_swbV4u0u>%;to!Z^>IvQ-Ohao)flkZ%pH=71*% zoIQ$N!meZUY2S-rUdSE%Zb$m~u|TUp6fhWg4=!Lc@=rl&_S`Q33ieL*`80P<(x?%c%CFaD`eYAobs zK&-KBxebwZ*6L&Cupj_`XQBJb0S8Th+Q2%3h75!tA}o!zVq{+=Rrn*}F2*vqUnc6! zr~GxrzrR%$n$nYeIo`AYK>*eoz(9XlQPVz98~b2vV&TrCQsHtaqE%2sFS)oNH}Hia z%m!0>Kc1Jw1YnK)Y-2;EKqdrq(G;>#6^=UoP!@FjEr^#WXJD8}gwH#2FW+oe#(k)0Btjz{0~dgaq1;$Jwy@>jdl(PmuL)iAoCX`O?!^!>gQ%(< zlk#p0B(aI`SU2{uA)yw|5A12uM<9?MWLN+Qp)V$y*>GHUpAoi$D*B4$GJ*!t zq^m@rA}&FHLLWZ(CG zFUao}0#xi+xR5v`>Y|?6$Q4usfT7P>yVZ~?pAjvjFBBhrB45{6|UYaj&GXn&A$qQ(%i2lT(fg1ns|SVun=7Et_6 z2(V5tDp19I5jOd*9)KKZbs==lhcFdw74QK*Y8jlx2nA1Lh7w(eg;@jf`KAB}G%SwS zD`4tcL9+8gKJqmESsMYfE73gzoYOeMmnRkAQ4a9HVE4IZ)k*HS>z2Sm&P z2!8``dr)DI_(r;k+0L4zZ5+@N3mLiY3i<_Q3{OFAE7<~t2~eX6KwJrUiwIKjb@dlB zogfseVR}IJuGguKHqkg14-v}|S3+i}W%S(vcuJ|gGES-~h?e!om1Mr&>JqX4cthNzyfJqOIVE|6~3N5h=68j1NMM${T-W^sG zvtzTuRVbJYblfqBhoSNr1UVB91w!tk!V|P(Ftk_8p_yj@`E?{nS0ep$1q^fUWA9{WtLWgc( zs;weELO=ZW+p|S3)a2CQLxoCMD_}#TGSDHkXr1wLuyatApk;Fu(l{N0E4>$rTtg|V!AegF$ei?0m2k3=L}q7Ye9>oVT>a%kl9aX)c{JL zRektv67~gQ36wd!!$z%57CQ%g^)BOCGn;{dPb(q@RnaamBaDn|7jZOJ(6Bck+gcd( zS{8_K-paCkgcW7q2d}k~n1xOJcZ9xOa~M1LE1Co4t(Ch+$qX;_m4uPPapnm{^K;pe{70)x zzJ&BiOUWgeGyALtx@Lp;8Rj%p!DhfXtl@+9df+#-UeX&16*7rQ(poOpz({BxkZ&ci zq#m$miG_8FzC|W7p2!y6&3)~Jp>3!5C(Fh)gs)MWD2Xi+XSO^V`LEl@HlU4|Ibk0}suy06WH6Oc!LP z4rT{H6db#NNm@Y-FKbfpVsJiTVRkSW&@q53CMtnO7*oV|P=gB)FG^Yj@}U+eESM69 ztk(!%mk}cLHM-*-ts0Gn7^f~qwVj4uYX_i_L_*I(zA(|56QCMKV+RZ6S?0t;uyKqf zqRwlI%q7JW1n03t zPQ&Lu=6^_>Etnu4XAGbWciCLx^*fl2U7=yyXb{x{OCjc3!?Q4oyd6A39cRIp@9^~q z0;qwBw8;)ACt6>{T%00G2ZH(y5>mnYGXU`zdxGUQRo`;l2>J8<`C~jvN(eVsETqRS8oz>u86dCO~gTuRl z!7xAb5CFYuYXK19WDJP*AUihk{wWGxOBWIorjp130~O;B%yNzy$SQz;Islvlx`3V_ zT~-@BaL{E9WC7g|Nx&Ln1S%2Kjt9gI5!VP7UjW*D7tWRlcUNpBGjV_d0LdCi@sa9! z*d#{x zq7hv?8I9)vPR8#d2j2!S#mc5*p}CmeOf7&nqO{e};Dc;Xm=!-E_Zb`9>xdCWC0MKQ zt}_sPkLG!pZ|3E>m>~nP0`S)YoO%)bihhxl7Lo|s8=7M^1k9btp#Z2 z4HUu?(xNad=aEg8AGYZ)SrQi`5o{lOZcCy;5P{P!a>=feY034x?n4+4RXeKIo-zP9UkP0Ux@GnZK0I z7^WcR(9JuLWxoI+X@kHQfZkZ6U^t3yrps8{jD2&ffNrH;TQoq=&m~3Ot*FV&NTO>n zHt^bu3`H<}RJ4q>*`DrzdxT4$VWR^!Mg-#oCM+Bd+F41+vKz!+TpPUL2~0D_ECwhu z4wl4aK{{9vpd%^>Bpy)sS2c5aCDV47QE-*U1MPukBBbYJWW#(kvtgx`fkUvY+)!Qe}e*n zvO`aB3HG#2@R|_+ZhI$FhJj@wTsHkv8%ai>;@h$kZd4-%f;gi6VDw*%YFq&-75i6Z z;Pe1&8|np6ub3#fJTMnj7_~9`76G<9f_IvO5}_(thwn(spp=@2db!B>v~ zKqWIqPTmjkrv_fjl{Zoy2nM=h6%g?g;l|~P>$@F7;$xvh%C5A5VoX_ghY4vn5>~o1 zl?q%e0SiN`OMlVVTEKWAKV42Sktyx;X4c#$WI*7U4Jb`mPCcv%uuR|t*8CC{`&<^u z%M7)IqaO$Dgh_cbQ<8zL3Skr?LjRg-B;@ylMt=~Y$&8E`+ zw(LRS7maCZm4%iW%@O ze4@#S>3RSK%a$1hRYfN*pkJPqoMF2}a=jJK6|lm)c`>d2#XfiwFCL>Ffp&vthipaA zc@{n6Q?`~^P7Z7Upw>d^jbrkM5|ghY4EZ3R=F!16>H_0@vbJInV0{GXrO5F~kmH{# z!ZGC-WL7W}^s23=ji97wXtLzP!;RjmHpGC^lSTsm0KoJ#%Z|0YkZ+#EG?ehX8vXJ} zmLB@hY(Fx%)XCgoj&5Mu_mx-#c(hkyEcBgx1|7}Ovr+9IGfIvna76haSe0Yg5^I0YY~8Ufvy zT?uG{;Rei5hITO{H_(lY%9Y?gG0NBmtcJT)Az&~#$0nF&&SQc|ptvna_a&HFC6Kws zn~$i}GYG?UgT`WlZv+ZiEL`M|a0~|^_*=23bOo3|(+8BeQKX3A#7djDz17NcY)85g zR(}rEqp%qzLD^s_=Gs9xkq$;#{K9tE0Rjrh-F{v)NJcrB?!0eCIfF?nq9LRc%3sGG zry#mXujm7A6=e2~h8Aaf(Xir7KPt1#IONH2t_rqO9zmAHKN&w{V7D$4W|vK$<5~QZ zk-9+;mh5)QqqrQrpm00o(JG|4D5BdI8oF3?pQf;PB_kuF~3D}=amZryJ+8PG&d%K7mh<&3KUS1!a=yz6Q@24w7H z>6|8G$4BSTCbFjzW0%;FW_n zO||Xhi0_H>8Ntf~*9_-L;XELm^Mbh^M4!_Snv`Eye|5lvR#6zmN&Mdh-41C{HX zp_s~VS@acYb||LlMzS@~*Ia2RrdzQYR;<)e%;b&@#WO@6>txbb4aBYroRSY@{mew_?N&Q1U!hT2StK$2-1gK*&!Ho&?@=Kv4jrsRCJbl7A|E z$*>&2dS@5I!10CHggi%8S6qOD}lpka$KWtDs z5KtV{Cl{tC4!DyJn-d4YNr$p|u0)ucQUPZQ!^qSXB&Jljm(qb;3d6J17h0usK$W@z zrWA)mDIMYjO-3DmuzYp?l?pskDws%qIFNLBk6eh3T;Pp3ltwxbMmnrTewd0}=!yJr z66xR&#UUSrr{cel3CbA%YanUb9SFg*hl^4oX~U5<%RCzbvlDPM43(<(kz)eOj=h8hP~+l3U2rTvTn!$Lci&8 zQ$OVfIv_XDx%j4D!#DK>uHVBKa;2_gcinqZ*W+62$e!X9La6wFG%7wEu~d*c+UcgL zBd+R7jp(Y+>$4&LoAo$m*5ip;kN4$GeJ$(vudKb9XXW*qZ`oYJJ2Ky2GFmJe0Lo{H z(EydC712`C3FQx`D1~Jz1*NduO06J`DTS4sm$%GP3cq(3o(~8O5u$QH(`_+cc`w># zB#SdKP!#cp`7^rw>$XD~h2irkj2HltaUBAlArebfBAw`k%En}=2zOi|1f=lu+Lo%! zWC)v(ZFkl#JJ?b$^aq%Phr&>Q8$xK(`Pe``h4$nLv9@Col}mgU#-$(`?WH%^&jBId z8?F)1F2qN`bhiZtWM; zCycPuX|J6{)XV7gkanz)-UwOV0hEE?VQT@~akmMU7~xfjL`U#y+f3$UFcP(zwR2s_ z9O2c7t@@zKmZAy^8_Wz zZ6ebcnJb0uQ96uor5TjGjuHXc*&q{e@nHzfP!jNYRL&*hr%P(k$KeR;AikbyvJpKn zdk$&7!j&O=qRckrY;Ofk{IBgBsUI+aj2(*|EPQAim+)xIG1p_QsdkDS=j#WDy355` z1Yr-x7gx{}16Q&SqKvARd50@VSsv%bd;$6k&N)G*gsSG4eFn{9;cT>9eEITl2C{w= zsIklr6I~cSmcD1E?+IU4tvWp!HrFDP3=0u3@2C=_;l@?TBW==&$=nqV4iIP2(?28s z^Cgt$b;tziwPn}{AfH?3ce5$F-!f|eTy(@#l)I)~q22T`Z&?gv6sJpnGQc!~m+wVs zh|+hieFk!RRu)ZOCV;k3M#NqyH8M#HAGW5_}VJt&RG?lqc*yP@>Dij$aATRfpcYH#%auT#(4xI zjlsd_cpPaG9Yvp^jb4YjIm`UI|E@hvCU-dyS<@Eg$`);ugf8oVG=V@5_X^%ZA-a|M zeu>QiW}`l8%fe%*+^D;|KuaX6+f>1m+Pr9astK-@EIeLvBBVDiW`RC!;9w@4u&-Sz zHR)N|WbyS*)W8HnCrbOyhSPl1puCc1@k(yE&qm&jr2t|R@_B}iqM<98wXzCEP%S-+ z`v#hU>gOP;L7)+uKG%HI7%UQ`A3p^^V21;d$LiJ{5$$#T#uF^ZYIz4!u}OwI2qw!K(oOf=6MT^C0dEWLR+~bvHx3*_9k;Zxf7k+y(lfqnewBw{o`gY zS|!2<@@TaYK5IxTqP3(G%Abbj88kjmmeVU*$E||Q-qHHvOfT9{oasj!EfY~t22s!^ zE((Opo5?b{(45alf8h7~${#+FD3!Wro$_bKkLZetbB=Juy?1{xZX#tM})VaT$ zm`7)g74#tC60ba4U}2~}bNiW8SO(MV1h5{p1$;YTd^ zh{YcHLXTM7;Y1zr21~;cu{$xl6KKQ&42j%7ZsJ52vDm^1Eu6q2Ev$$|6=@-Sz6d@R z!b=SA{+v)k0*Sn>nVg`(2^X9kAuTc}5Dg^Ef5P4;Y<$A5Ck%N3%bjMh)4X(bKIfd~ zmJ8V90%o{?4K84S(|m6MyIa847IU-3>}xTzn&wf{Olg`6EnqlnQygYKGnwNXlQ23- ztehkiP7?Aa9oi-xyrv*XO%jGCi850VL?#_9rZ^BxakQ7}RCt%≈!ffGhb8t zXLeo=B3?Mo)EjQU?B;1Vbgt~Y63-2-g1s8Yn8s^T#ynLhUlIGou}`suV7<1B;Syf zEs2p(NObm}a{#g)UPK@BpIDl5;}gc?VQTvz6LCvniYBE$L920E7}I!-+-|NhtKLRp zbjw<CRa^eUNk?PQQjj|K2u*Z)M&n*tQ$({J6KcCa!)G1F6OJO)Ep3Dvl9Gh zo@`2eeCQ&ZT#u+Bzn%voO=(vB{&t$Uf6iv;?4iO<4X+sz;qR%rsFp(4MofXB!}rTv z^8+R&zZ#NP4=PWAt!FjG6xqRNVL(;jeQ9wsD8e^^o23jRM!l=TZfosl<;yx#&<9;v zD!NA(S`FOi4jo+ITv%9NUaF)ikCkganh$+mQz9t_giB#mG=4o-&a!fyw%V7romQ^9 zRi0rm;lZXY`9$npEKhIQW;2%O=Ii&%cpJC0+QhD0X2z2xk@4dSbTY-sHy&sX81l?( z@85L8s`~DsmJB`}v$`ztJhP4V5zWV*uFos=Rov(GnUxct{fwQLS=W^E5CA?^pT=%^ z2+d9hI*>av!p1pSnE-l%7od`Q7|OKG9m(v$VoL$#zIinl+E-jZ4Vy&>ai zJz~1f1n}V@m|bm#GGsUUDWe#s{3dYM#H>n1JUx%IHT%=VMzgD|A7w#Q=)=ec%c`n0 zp2KOoeAeqBo5~vQMN@ZY@bjv(w8}cQl`i5+BD^pR_7fIC%E6cey4Qav!B^{-+rV` z!3VC5ut++h^Wo@=M|M6MeX-T~c=W}p^GUhbm??egY{X3RGndLt@}fIh=U&k|^t#Rk zHwIv%p}*kK2UCs zm`VN-HyWYfkH#^f;1B#x>HIN!LcyfJGPm=`<3@KP`6rx(Oz1)<=BS~Llr!;J(6zj3( zLMB3HL8d@vK4v@SIc7NKHfA;EGv+X+E@mudDQTKy5@OBL;qm#>t<0*fWxl`08XQyD@D^GWY=@g(- zZXO{tr?@;?R!%{gh)f{v6@|Y#l=3yi-w=I6@C~82U*t`JH-y~~a#OesVKznC3`v&4 zYlIgIku^eug{a#3GteOh>YpuW^XKemcK&>z$Otj@FT`_0V(MRvV;K3D{7&ioW&0tF z{3~&zE2aKbXMvG_tvvSE%VT3<E~e&sIpfS?-kqx4|c_=*HzEER!F?E zwBAoNl2BI?+T%fwd)7@~R@k%k87(dX(G;Qz2)t6`kzP<=dn!b1UTd_#T8#x3Q-ekA zne{HA?B)dp{BM$dZmhQ~#<^TdKd-;t-o_eNy}vb}@CjjACtJU&Eg)X!&1woxWl8h+_LkK}_0?R= zFOQCn@?L4(QvaZR3B7U1%qw^@|EPV64Cs4RgHOc@!fWlHg*o|3Zfa-xlBGCFcRPO* zS_NkRt%81k+kR%}?*yt9%)a%CYp?#zaR(oCc<1lBHi`xPz0nts?EL-F7h9cwF#2NE z`G;|FQNx1%k+Tsj=pVaOSkUnoTz}$}Q;)pi7bl)wSkOO-8^bKUp{I&*Nf03;Gu>HR%NZW!xTv1^p}62g=P6Ea+dm(G2*16UPSpzx6w%^Y83qUV4sP z>i<6Oba~K!a26x}KbGhIQ+aNz5&xgvDI@+j^vDeliqx=?95&L!#@{~Dy~ba!Ixp>) z`c-*nZjts%!?sF|ol>auQlZyypx{CA*Q?5#wndX5C2fxybzWn6ly*m<&}%qVs`J9C zHvW25dW}MF$et+GdCACnm3pb#OV*YOzEtgXHF|l_)$8TK2DN*s)Jt_9wwPO2h1b>J ztw(c*dv>>V>FhAp+_2Z~)~lrJRnKiuF_&7oHH|W^(Zmhu;Kr$dOTAlHw%*IrarR{ET}9XkEm$vqA5$aN)rDdAx%D&sVEgemTKCnf`AQ)m zvc87^(CE5C02q~^_rDl%K@9(&GBW?G{mjmPi%~d&&Hwj!ZivnQk2p5!|EJ$6o&ROu zjQal_ce;H3|2T_L|9{JK79u=1)~Mgoo$ja)K7^UQiji2Q-L<|r%&qrt{j3s!?MlJZ z*r4!1o)ql#G|^CY4TwNSX z+<{jPOAdbCt2ph(TIfgh4|(NLQq`jFSnIPaNhILj$S&N$%heT@Eo&a03u&x=W?hw= z$y2RUYQf4ljxXj${B?o@Pumvm$q#yWfp>z1-DHMaj2oWe#zArLn0Rf6`%IgbYf<~w z_Q7U~;))&0Uy%f{cPuvlx=0{qk!%g{&Sepy;P;FxSZ^FJhNg+u(d~#6>dIu?obKqR z&`%65)zH(bV`v{3ES!KrpIGf{k$6jPOFtJXU>ABhE6PLy@``ea2alIVzN|F@K>|B& zd|v2t!lZ&l3t@n!XoZ*_lyqbQ?E0O0H(@kbQS4mbT3zcamqhTRjxM;Q+O9SDbnx5z zt1H#L1W^$x>mz=G9K8oxzf<<4np04CP>DnFMRjRwVocOI;e64G@Df|HcrhDgO%m=t z)?Z&UB|fXj2Fu$-x)&eM&k` z_0M_blay`*4-486J_QU2klf^-7NjL)qj~if43Yxe6RNuz6x_`Wf_Hr=3keN6K9pRu z>%B~iEx%q&6yx#8md7)!1)`CIv@oxR!yiT5tTt`suH4SUkmn_lXlOGCI7d)A(Xi~_tH>T9eT1)0E}-^GAhBQ?i>0wp z8jGXNV(2adb}CSmtaNIh!TUSl;Yi+>C|IT-0pj zpIFKq?H|;?csVTT3@=-ToHM*&8S+g?H;pxQBh%#kyMK`~7APBO=AI^5R?0KIl1xf3 z_Pqj3uef5io|`#)Zn+Y<9$6&?m9CUxQlInsluS}AF~QF{K2c<}5Rwu|SN6yiL397y zdPR;A@=8Ki(b)UgX)6R7!VGf+r}JJwBu?AEFgTrS23?-<0B3lM|JAq?M>vF zFz(GDCM;w`WO$qz5Rrksi!ZBJ*U)vl|7_QXmMHAX$8lW?1! z6TD)(>ROhfzo>3xWbB81Fn%nwLErdZ~yW7Xydt2Iqipuu)%sr2-4`)rv z_e{x0R@>DA5-De?g*axjS45neo%yzsZuqIaQ&rafB9O>IZKi@hu6{$Ls16Ogr;8)3 zL5GlaYKt@McC-OvY=m9sBSahVE!($OXEH~d!5CKDsOtRCC*RjTj;6S@LeLl?{#emI zTip^-u8u37@yn{S+E@Er`wB*@fbmduTl$ zwg^Rnz|i%Jj}boD!rQbd_8)6cq?K>)!9KO!*(FAsF}Z6QZ9lf3+4+h6)PZ>h+9-X- zPu&`E@yO0@qc65PKZ}dEjbP@x$HlKS%zO`L`_A}E;p8XHm~#8n)`V$i+?o1|J>8q} zwaF)KIb*BB#Cz^{_KJJo2o4^uyL-pkw+713w;SHQPh3Pq_VqiZv!6X8GUG28XH~#_qD!5LXZFA{@zqo7T?-tHL*hw zAC2u+LAtgVl)^pnGfBfA7Mb|N)&bRo2!BZT_#RaLRy+P9U&pq$ulFwhSs`8;aoUD{ z9usEs@aoL+|K~J%;zHP1z~A3GRlpba_O^?ewtuaKY%k3n;acD^VToU4ayj@McZ0H< z+}hSC-n#mVP+-O%b>@odTuRRPA9Aqey~lE@Xuhb1uh-Xy(Y_s>s2GN`+LX0JzZ2`u zp#c~`J*+_NaQm5^BkZTnm@w(YzTt30=SbHlwfim}*=fba{rWDpI#pc!aoU$TwLr|4j%7Pvp#h~+#W+YIMMZia&v@o@I*HnR68k-q1wrQr*uxS4^;bfZs*jv z(bWx}<}9=XzbubUDvyn&B{Rp3MrB{(a$6ky|VY9m) zMY`-2FuO}qT@6-Oy0j`qYei+1nks54f+VF>uP(}pQ);0a{nJR{@f(($8UZpFo^t7_ zS$;};tKp@mX7MS6$wq`6T4b86Ph)JIQTqN&`KTQ1fPnQ2CZ2jSWz zx?wBiG@{SKO4vm|^b}=jXTF$UMc+UJVR<3ekABy_HZ12&wP^sTQbONCg~YgmZh&RP z>t%Vzg#E*^3}ey8;*T{Nw>*}b3{!S$BuX9Xf|ao#6U$LfpSJ=MYcOjNLSyQ|d(XLN)YKjUn7uGpCwOu{6M2X>z@GWpzVMt??6(X#c!*iPQ{4WOnil?ayz0 zLx7zN8MRy`e2nl<2;1T!(K z&h~+jcKAkoAuLrz({TZToAmyyXmpbaeGye$;FU95)9GPYTpcdJU_{-a+If;NADL}X z_7_^W`UxTa#nxA<9b27?U_c<@#RXF?v7g!bbznswbw0dvscVz&>$`YF=d!qXPv6BO zJD11BoBJ-dI#!`c>T1Crd@E$xu+G9 ze`VYlCi&B&pd(A^!zBNzcqT}G&&!<|aj~D|U+q$(KN*PIW03r7TpuVmN09t$-Dt+< zb#ZKLUhj8G=LUOYbMkH9o7=fD?sRGXP0nI)cFJ=%m*>VBoHN}igY%$0QnqUh+}Hmb z65z)Cg_dXhf5Qg#<2xEQyC2U{W12s{-?8I2tov$X?EmF{8yA6%`|Woq7|+?T%fa}M z`kfQTchjAQM8MdX-`;2$<2g=!BX#R)HWPJPKSUBT3Y8=nT^P3he*G~RvMCvDumv1S_2?{H+L z@g0R`WxRhQBQ&k=d`MGsZ?jCfDF?a4xmotPPP$q4sYu?u`|i^1mp(|9_U7HwuWiOH zSE4t4-!;A4^n>n#_@*Cpm((}$AXnr!%RZMcHhrH)jGMAw)fhl-%0YMewmJ(H46+`1MiU+$pSnD$E_rwz>J+IMYf zHp@X8zoJ}rVa*FM*EHho`KhIlG%`Zi=u&AMrGZC%6c&3M4zYV&Svw>IOJ zK4P13pXOwnai3=Jn`Lj&akFf!<2HRuU%bt-&o$J|xKD?I&9Y}PxLLMb7~YI~3(T8+ zd%AI@+>8gg{=M1vX|B21_ZI2@zt}ANZ>}wUUN&*h1YxsmX%@S=_Dmu+WlOuu36 zvyJ^tpS1Ds(~4-k`*3pK_`k+0Zv4Mt#k6sM+YUM1kM}q2qsFsO8>;c_cj$WgZTKjR z`#0`ym%gZ28$Jt5ukru=bSNCb#{7mu!ubF8D9#Jx*@uI}#{Uh+`Wo-iekIVt#_l69 z!+8HXx3HZFv!+{%jm>TLGds73JxwwG8MdvvBc21+@{U zo2?#a!571Nj&t20`?vdqS-yfT+d*Q6Zy7OI=a+EcuSF;*X8KDbZQ9^G$-H(cF>IOn zG3w)t_)^WB?KY-z=kP2%P}H6_pORZkJ9#N^8Q|Z?z**Y0c@pvo4#tx)12Uu!=xgHm z?jefu){!=tYsX`fiI?#1`^dE{a56u}ms-M)iH{bRgetDRASJTTDlxw=5fx~=OcZ@U z7)FP;!)YbQu|)Q(C8Rw|{&|_r1d;$PK97w2N>CsUrKiie%~OQai+IL!c&V=aP>SV~ zBv;!vvf{E26l!fb57`LjBnXymN~&Ipf9mW%sU+;zmX zS}t*OWcl4WI@0&OFGvT6;@1SOpUhA81j+uqAAUq_){-YmFE^tST3_@FVdC#^-3)@q zOFI=WtGSI&>}rGLB%eN5_~PyBujpi{Mz59*fXnP0QF#yK2HlF!uV`H@9N$SSBIA9K zgGhkKE&6`Jzs}VBeIi=ab=-Q+ZXX;G`=5!_jptV9REH=9`0wW5yiM4B2JWjm6VexJ zF2&Q;|Dsa=9cuwOeka<-@sS*NK_q;Bg_UgUF5f=C3>=-t=9PExD~8OCvQ17Aet0`h zy=xalTHQ#4dtad80hK^zgYzSi;mNJNWZH@dCcdoR@bG>ivS$dJd=9vQ*xMY9V!U1y zdGJ1ZejE00s92DpAJ05`qSV(U-m%UrSN-A`^)V|Mq5vY##1P4mm91c7bt z_4ZF9S1}xagfmpUemOijLl$N|JynjsV+wJD-n_JnrST06Y^Um?-Q&9`OuY55PaY%9 zpvl`bo13oU_f>AW^NLFB#!F(PCfWK&t^#~29PP^ay6s~Vt$d-7UuvL*EjgF7-pZ|+83qS-axOz>v~x& zTp#D^@L!g~;$F02|3nk1FdvZJJ(87q9NP@%hXPFuDhLAFzZGNraOf7EUo%Swb5?n(ylha( zK+f3O=$umu(A7^DOi_TwFT%eDjOjfil>c1xWI`nToKxL~8l9mI442nF(~79((m=cu z3FXyZ0a*jB*MK={aDxGU6;g7+GH%5$2aM+K#O32bA1%FdZ0@4=cdinq!-T zIDHFv!>|1ZoXZi0$yM&Tdk$;_A4HN9aeAg$}W#mBp`qUSXF$ z&b$7Jl3?Q(5jsKc4-&{PD{ zI`NF`AA9g@<#<>{%!_9t@%%K9zfAi0Kw@mg7)VscuT?+?7*C7g(RX7IFqXeYWdDoB zQ~heiFDpa@@e?020)wpqzXseNI1ogO3j+ZO!CnH1_8=;?^0#t?oM zfj>S_P#a(&G$v@kooqu9bMAxyQzK2R(e$|70zh8{r6eEOo{PZ{oOZs3Y!fLyctIpI z9uIsD;qKi4$n9|GJj3C8B*m!8kHbUs>Z-2xQk8JMeKwedYUqE%0P27MOajU$gOHpl zv@A!io7?*Y8q2}3q?)c8->5=TDQxzN$l#)Y!fK-L6<|XXR-z>5M}}eE!SCsX??gYq zB2Er6+1_JJuH~dzl0kDambx{op)b|`{Q?aLy$bXAWki?wEaEX9%!hcZK*exZ<7YZw zL!>phEm{GK7Zy}N2=xP6_7F6@+uO%RG*{f`8KiJRgvro{AB5`yO1xV^VSR`Q8qKo{ z0zkq?ygQSOyQ_GZ$MKXsr4lIi#g_4hLt~X^2jjqVv|IlHxu%m*9o&m+2`>u_A=jW{ z9~gO8f;Ug*)lg4D1Bl~`iFMxao-aNW18#5@2ahY28M4^gw+g%-g8o|-!Q>Tkt&9Uc z3vXeL(QX0&)^Fe%w;+DPd;pw;+mcrsIOnT2M)!#T9W8-SF9Kz7&RRp4hYG~dKHyrc zgvf(_rcC3)?*6KG9PvlWNPt%{5!*gb<(-Q2LY+~f`elcu}dPD;Q!9%qN z5QB&wgcf0cJLB_b%Jgt>r0@yx%piI=NbzM@pd=?K2w)UBCk8U~M|wEw+)rTnrI2k! zWVny_;pBn#gE(`g`@Xi2t{>Jn6OB2S%U$!FHzSdg zDME*6h%CxMn}c~P61a=7#xVyC*YEeBL6ZWC-iC3$ zySOj_tQ!=~#U2$j?-JonNj%&56PHD_lB&9$C)DLp-w#WDeG-BKn)kqMk)FGbR9%@6 z@yxlseSQ18T*&2jzboR2!WUBt^1sSL6V(1iZM>RPH=Sh9xj7`G3j~CTEJv@k1cnpO z*ZanvWU;|y+WI?s9S;PB1k0u3Urz5?|LKS#}S=wHP;_8a}7t-4P{bMZdPp4GDgP>G`B+8dCU?9uOiY#rmx70wAyUSEb5-t z&m+3F50hbahcO8s2v#`LTm$POlF=RorQYQY)&4Ey;FBv8&aLu1iIC()aK=>&O;1)Z zy`bSlpWPmT6g$CGWv{|g0w)AbFfO7~jRzgHsL%rBb8&#{7Sq1(g(xRz!Xp0fM8U5` zNP(jYCqhCO!lk`~e#2s9L;;2+TqHYxgpAyU1MGt?wYk6(jq5goX&{ z?v1A&IY}on)fLW?4HaEJ)Gi?MvE+i?{SGF}zO-N#eNHq3EBlYd0Lh4s|$A zpr7B2@fVrbe;tS%aR<%ug+^FG9x;0r1Mq?9ILzoAr0`(H>=u;Cpa6NYoq|+DpRFtc z$!txTpy!R^gWJ;6T#Kx5?|(ruuf2j1Vqe;hcaiR2QHe;002OGUppQ~Sz^K+z+53( zMu=phO_@U-YRK|MCrqHr^Sn*4QUwH+5McDYTBy@w2F}TW#xJ*bizpQ_;ugx1eq=Bg zX>L9$G@W2W#uw~TSW5i~n~XrB0HUK9c%U*5s>y*mC~9zPdj~Atz!lEnWj^e?()WBP z=(3kpAP0isi>9uCh|`EMg`?=c$syF4SqRaAphtqLBLnovB+>!&z^&P^Z@$+qtxq$T z`zEXkRl#-P9$80zGkT`C%>cI#1%pq*8}uCOT0KPWf`=a+6K1tF1$@z*CIV&Oi>H38 zx|Rqxx%s0025V$SPtPTB9OFH`#dCAEx&4Ip_u3IhT2zs@h<_DH_-yZzoz?f%g01rl z%10}Y)=j%o%Y?Y|3P1n0TtXoU(1?9V;@G1hjy?%V9w)g+yhG8 z1*j3#Wflzi8HT3bW?a7`X-HLITp;eo1(xQ^?*(J|Ev5j@LWQh-Dn5LhnJ2^wA6QB~ zKwlgZiaJ9LIvleNw6V~Hn{c9Cj7p@DLS&C1Ns!}x!%?h+qDH)1oV_7}sJf-bft7QJ zl^4`&Kf?NW_QFOVJv;Du7GQtXz!ucCsL&o1#=Rb5XDAwqY9x@fJ}#|Z0abRWLz!cW z^dM$9p&hYWW|^Mt7Lw+hFK~JtLzA>$=t3dr z+tuyl%^yTF=nGb0<@=aoP#5Sn?;DojhrhfLlJeA|3OgE=X#X^(xsuIYi>kJ(ZRnoK zF6R||#Sgp*eo%Hcwss5Q?DVMn6%bWA$(PWP140i<@>^P70u!J$8+)_FhV zyOT?}n)|C?hujhp?#ltR4py-D5a^B2Cx%2_6SS)d=O^RIyFj3#lzJ^hFXS9QGQhdE z7eO5fTp)3M(G|QOL{&2sM2yPbdlMa;j3a*EI4C_B86$V3Q%2fv0>U7vvmkBb@Y{7nT%xXc8YXQYs$l(a41o&%$1L#`+$A)zg59e` z;!=_QgTy~*g4GH|Xej4I+%r*Lfo5DpRd85n#VI_{baNgGBz$t-5H=YQMj#T4!kcQo z5b{Dt2)+pc+rY_tNg8_gIDavSQ*-lND3R_WF-#6ZuMBO1QBVT`%~{0#CLzBLX7xz> z6f>_75a(5Q&@CTceY=W!d_XgNF-|=NiUeL8)Bu9&lbb$@%1k$fIVRLEv*{cKDJiJ2 z5VO4A92{a&ZFBhl3`s)_RlvZ2CQAdWV@iVsASq@=vqN&C?|hV}TW<(zjFRgLl}Az&hN}#)Ni~eP)2u+a zPIItGbovhMbl?LO$S6r;6QwMEoN_4qX_E{py>jT19TPG;|C9ra(AwQvzD8PUUdgFlt z54wQYFo(m54v_~^D1``5^)Q5*CPQjFv>@h7`hpnqqA6O@_`^#`r6k2bDpkTSK;{0R z9KUa$8)_6hS#y?)6h_%^yg&*1#aBsz=U`LcBTA9xbS!b$BuK|9=y}xVChEzb>gG1g zszR4xQu=B2JLBza!rXzuY(Mp4(U+>$9HRyKP#rMNCJ`zhS5Q!*$TdsL6@gDLMoP_Q zgo+@Silq8jB}tGKi7Peq!s=LOChlf++NMgn>cK(~@lS;O<$7=dq6XbDPl+-XSxbv* znO9ML-chxH8&82Z=+e7*%H=W$9aKcc6u1_aen$$3d=JtY9uaT~Uj96^H~-eiXOwet zV6UKJ^jA-zYxvSzMV>HzQ4&%=N}@We(D&_Kiq*227H8l+GH`MT>yq9Q_wP4tLd+0V zTaFveF>q;)OfVnD8%0qf%Sc2@7`VXb(_v>+)+N_2DfmCn9JeYB09T_Q`XZ?qBK6-x z2H)KvOy6YI6g{o_hOXp4Z&VX3c!VMsn^utz#al862?%;f8EOIlHN^OTm_qwh189#} zG(n)FAkyE*F#qReu~B7=9RnDLLVXrs#6?ur8!r|b^bib;)wLLUFX+YdgL+|8#OwtY zWlipV7z7gM$x)~<83Bu*Ll<;{N|l+@LejxuMQJuWM5-f)=H_0+PLN)o@ov$$4Bl&& zaym(SFq9A%o9e3dbur6%y2x>lSra}xMw*IyKY0NK!n>a zg#333tl@AHyhkPKB+rYZi{Mk&67sE2EeV8y^^@SUp-5>vS>g z#)V8lP@6}rh+GVVj)*ndRfhE0(x(!2asu;c3<5+rL{wNb8?D((qqa7%im>viUfHUB zEjGAUsMJ3W>G(z}^$Q4UN0E1c7jU4m38Xfu&^yM#fS&y|X124swnL9h-Sn*Qf2;bP zDfh22<1_s2?(MSR4=`E<57BWY6e06>$W;l^J;Fpu_uyxcEA_YQ>URX^zEetEIkf zgB!k%&KU;Jku75|>{W*b7ulR`M)b4xH;WEpb|?`jyNZJIqDI7jvB-;Z z{JlC>ib3Jw_9Wn>$t#lsB%7@M0&(v$YbwWLXIm5uL)9om9xnATlu=2&w2%^G;!w{l zYM&Qz9v{~cq0&Sd7vQL8QVh0B0T9gKAlkrE2m;g4HPc|owhS<@N7XJ0)uM@p;FWj? z!RA3i+60qei~|tR1utS8x>FSZ6Et3cViRNX5RukoKqL+_!u?@vhA8 z0$`>%&l=Vjn;D1Q27-nY>>L!-BA~E*tHC;=RGHZTmX{GFTaiYCkCeIfohEDN*Qc4& zTCyO!-G~(;1f~c#5EXt_zUyuHuEVhrGK{fIe2i?`N5vZOGFrwN2+2TbRtn>0I?~$jP*$;?Y19C)#9%ikNEAVU?!&PRb<^<`5-QUwzb^O_ ziR&kY_h7&V+-F8hOwq^y9~I!K04@Z0_s5tCoW;sln3V5Rl%M8WpW(l!gnpR3^*l0s zxh24(V_vQR-4AVyIAk3LJXWNohw-rKR1$-$871u*^J8+&S+RbJ5*xT5AsL4wJ3o#A zlVH$)N?2Mj7i)p?%P4+s#Qv>Y&WPnx6CxrEPB5R+Xu27CXExiaukm6Tb22{^H-^JjJ?wPyx3(M+x9X92DB@Q2yxmz87yIK@JsNKhrP5j)x5tQE zwZ!#-a&tu7s>j@D?B15fF}t_N{Z8pTVIMK&{`lO^vbfO=TeaL-n7yqikF6|^jb--s zq&vmz?Vk>XDvTiF5w0*3X+_xZ$e+A%X|zr$p}r??k!ejbjaHLL>%yU&#F@e*LN`P> zhW=or5zw#+X}Io$T716KZ#$i>(?gnbGpA(E#OVs0UcBjpyQ)79X*!{%M`?P0rUxg_ zORkgY_1N`XOdrIq<6ye|rMq6b+^rca9~ZwUo=lvO_z!XY;j+WYhJOsdmQ}@fgr5hW z3@#Mh9r!44GTa*o*Oc^d@hcm-Sd8@bY8F@ zLc15^M%UW%C1-(lzb}uyTpk+>?Ot)Gpxs`PFAHt@80#t&3Z+Hl@!_Y50zV4X<&{Bi z(Jb=c?~2&9GBk<{VkQzowRx-qqR6L35(({(PLU<#uKKFh?R8z9frC24b50`w z)Uwec`Q#;40}ZeSacsdsZffU>;&Q)+ELdws&*Sdi1NpAPvwT-R zk&#rvSDMz$j2@*)MZ(2sHP2c?((nElMoACDd>*fVTA|0m+PA!`07;}`<` zDZf)Xf7*Ts0sc(f=u-1P>nsr9&y~mie0gjv1o#W?6a@Gf0)(Kvq6mqIE0J?ei2qL( z@mMtoH|LjSv8eY$x3-n9&J zdC}j|27DfmuU!?+*)ce_XPo@_3TCd~ft#k&5r`^9%O5<*n{FQQ)O=+F}w{2iN#vDL*M^KItu= z%T4X_z(q9Ksri~o^C$FOYV2hs8>~OA*(P`;Shk|(aJkMm518@#Q)F%;eGiMu5fd%reK*-}BH~wy$W7euyGrP&Ib&sH5_dBKYFYJd9 z;9oi%xWFzD;NJ#m%MoQ1IY{}&uuMTJ$wUq(dk?tI81vJ5vL!Ne|2PZ_xsU2QnKspv zTQQ__VB3FMvv)&+ijWx3Tifgwxu&)%|7i$|Zmg-dETpMhmvSv-PYZq|&iCpc#)1=i zN(piuMgL*-uUdnT*WYYWJfNwh!sRjjzq-A5tey_gO0Av4a?yeWgU_&9d7S$4Yy3jm zWzmMq(ZbgEW94=0){fOzTf0`Sf8Mf$`YtLSW(OCBvhP>z{b_Xfp*9N5){(I^6I!je ziKD*??elf5P}~zfX}&x{5T#4NAheoZ!*#MV`bJ((k@?fwdI%T*Q-all3maw52epN3 zFVJ0L9-yYU_X11-!=l`Kx8AWVyLm_n1+@E>S9F{UbSLwRPD9xs=zjJZusZ#$p4#gC zD_tMx^RN9*>HHh}nVo-YKXu?z`$>!X$l?*5e-{_$^j$o%^Y7#0$Yf*xAujd@-L5+S zF)nr*3izL#p@1aELIKae`N|89oN>eLGp0^1V!d*B#%CmlU-Y&;Ycu;-!9e~AZ< zcW3gCzJdtKniv-*x-d|VPIS}^fdA@N41oXUcS`5K+s6R-bZ+N=#52PL~hE8UV?vV_LJ58&5%jExs8ju%-PF z3Vg-kKull)1^&(u4a|{}Lz6(aCMeqH^|e9K_Aj*Q)iNn&81Q*Lg-1>~W3sTOzAH#y zhjO`|7IVSV;?z=sVNOAgG%`>nBrXeT$My~#y1w4GO$+cr`@6^mV|1j&OfpO*cMN^= zHmFh|bTXwpUvm|FFmhx)Lc&~2QNOr-K_O?-A4rsB=D)hyg+*HXc(#P}%@zx8mki*W z@>5-<97`TwZ(nQyTN~1KV*Lb_U;?>&^^5i;_4)17nU-0?W^G~eeN^j5ZtP&* z&C-#H<6`8cHTZ>P!|$~x(_>F*?OyE?t7IG;F6CgfD+%@r#z4C&+b6>4YO(Y)mTBH^ zZ(GwsTS~QrLUaFP3oEgte^+~tS}mT{WNWi)A3~N{q@s8y#yb6#t;y{}q_VofM7d?b zeeK+6h2@Gx^{JHqbu7Tpi!fzf!&!$-icihhWir*a0uh{cY=ezMhUw*~7kogtks4u^w3xV(3 z_?2JL9==cBE%GbIqfLIrcW9@!9Qtdb^k?+_jHlm3$q!oC(8E-mHdz;&%!^HQ_~B^p z!;#!``kRyAochMZH>bW0&D@JLIV@2|H1UiQ-cZk^pCMt!Uq;mCQ!qqJL)u(7H66~% zm|14}RkfLEdRY35d^2B4L5r-?(V9Dtm_8?oHHo&RT#DT0R61wVIic?AXfrgPY4zxt z_2|iUS8!cZL{{=@>S9w7+pCLxy3xp{Qa1In8Qq4wnj$MAopd_G!SqIQ=?%p~n(f`X z;Zb@6ouKQvbpwtd^GWGd)rOyxEu(6DbtpzgM!M*igF^wF%H1Kkce}uDqj>v*-9NOK z?9hHhvcq`F7}&8qwUd1_26lGbO?LQijDfqYD_f5IE%~r4YOZM?B$Z%2X!^;8<7HMY z79L~y)uQ+(VSg4&<11(~2f3iMZ?XtDNR3LFIXnF9C@d*GSyy{*F}rIfG*p2h+tG5q zZT`=Qom<9fadK6xuUU6JC#L#@$nY>*ZPC)=w1saOZC6*fsz-ZUCY@GJ=@m}o-tI~@ z5P6lU4=)Q1gSA|WR%Xp?<}iwV>%om8Eu1sM!*PO|KT{vC=7i|5 zL$gwAv2HYqdYK28{>uwY4W5RCii}nc?on?Tbtqrh#K+`+8K^G+@Bi2VKSlamH&*m0 zmOJ0zX~eur7rxVCOY$I#8y1!r30)1VYRkC1dOxT+RXj%Teq30G&E$`Kxvarq%=u|` zdTX!xme$}C0ir!?{kyEX(>i6Vrd8+WTkRe*g0OvgiQb(X^AXe*;qgf&f2m>*Wk|({ zhtKb)tXCvAUi)M$hGPF_2~%SM_yuBtWI1<+CEhNP^YYF56kSDe9O|Lx>pgitg=VO% z(udZ3Ug51Tc#0bvR`^bRV8w+&K`cV3n-;`sEM3Ztn5*T=U&d_asMgLkfdRsHc8P&u z`0QE+#*gjgCA1&POL%`DYdfOzQ@1AF+jmi3!noLoad@w|fh`ZEQrkNUy1kS>tXtkEo{7mp&&!>C<6^&V zc|Vt$^~wF?_87Y516&^{H%I7}4|JoMOiYMlROKMQQ#wDl4@vSjb2|sejjnF_5NDxU z{6%@}(DK+=s>Q?HDXPVPtPPP$t#^Gg^vX?QJONo?p`sD7+>i? z+EY|-?ME&i#_T6-G0bB=;bag79U9m<0UV+$CKh;|Xg{-aQoyEf2p`@#*|kX*_FX)p zbIRz8M|Mt)ix>1g+3K7Y7pL@HtUAApi~BZYVUn|b2lkYAxKM%#hgCBtpL5>1*I$&Y zOsBg&u(!LD{?Wib@lcHTo`*V<vI%<&VOt%7=&hk5@bGCiR z1!sMAZf8oobf`yTGC13mv5}4Aw~>$IH)Z5rNx5HM&ZVJ7YEC^ulL@Lf-&1e)rrvB! zjWivl@zM`hGW z*}Z?Cl6?O@D7{*!oHyrnGII`bxoW9{0 zdvhfvZcRoxw{Yw8>}Ph)x1Tyt_3-px#Z!3t1?8y=?Hitck>i2mhIsn>CIv(5 z*}1$_mQ9AQSQ(-9wc3(*b8q#^qSJ^gUhKVM7Hu7-dqp`;&D6SA99RlsBip^=nPnjv z_FZ{H#GRoF?%Pa(E&lhuttAUDZ6Gn(FRhQcVxd|r3UV+x@`ArRBmE^Mll z<>%d}OI4TDw}w^mt?bW-7N>^lL5spjLb>y6M&@p+q2{Y?ewRg=mVO!mitf006FLaTZ#GV zY7W%LH!bj*W?Jl18ch++PgY)8HdR!diN=!+xDpM)gdO!W^)K7kRV+KQu~}vC=h8R3 zDM$I;57SEVB2&Ii;Xshp{e3trJkq?8pLNGaG*q`_jk2aHn^kzRX5gJ=<>wOveBMj8 z5x4S7T1at|x3bi353!&Z^`(2ufqHvO&%&6zp0HH!teRWT)R#0Z>XzrFtfDDaw>+rD5P?$~stR{7+ddV0^zgo>)V2`6|QW<7i_ODsswMJPNU z-fI1}fwK41#~V-9VO{wmE0(hx+{`lBZax4AVN8E}^HxF{ZY`n&>-iMe)B1c~Z&&a% zpUOH@*R2Teh`XAn74D91a(Qt~2|0ta^T}Gf131u>a&s0-qoK=+S5w_!7%i&5RqB}y zOpJyjW|xXkcw(Ve8&%7Tf+?J&T7yfQHe}ye4u6;b(m7%nRCkoseBaK`=L3mOdP`M( z{HgkqLjPR0`N-?_4h=!{8d&D%^S;5Pepy9VoIcF~H>#KSm8mtW8ZzOFwwH>n=+KM4 z@MKe_W57y^Vi`CRpm~8(&aVvAn-eVgL8=2xKfqhXeks|=5sZrrh2G^ z4}Mq=Qca*6Dq<@*8J}_t{FW9!h&9Z7(y+TWd{FbYkD=OZ&$41ZsY>0s)d2FwR;>Q>Bqu^YauBFqwke{IYofYOIc};2h@+F=y!xylhS6n!^!ENv9UD)wb zPw7zVsRn%$9uJn&ELIJlY)Z_}stS~K*X9cHkq4f%J<)Ru47%?s8SEf#zL;%?J-w$Z z>(0d?;1L5aDFWWF?Pqo_wVyh0GTCl~L9<-&ZjHDo7yRgpa>0+jC>MNOT-+$9ra9Yp z;IXd{DW|4gH0i8!XI^&lw3BZxwkp4Id*E?*C!II&L_8GAsh)>ASH=U&-D#9l)8oQQ z7Y54Fla3nY)KzYUAUnhFl+M-mA;@kaZTmpH^t?wyIrY`u`;}9npj!W5uli~{$Gs}9 z4IQWQYkd1=NqKy~!z#4#95pJo4gdBk;l_8|tE$`Zaj!COe8;^izwsUSDh9`Y+$az? z=C@Hcj`z2*b06PtV+}vv-!L86*uO9{*qFamn{LcL)vDv)_bOk_m#00GuD4?^is$AukmdBg8 zS#bW++w;YL_)G6CZf};&!uY0d-F5XZeb8Ny-;{&yV*Zyt=vo16`a#+eZ2CSe5jJI? zRt%fIX92MpH^vm3ar<a|JT@Nuuz>V}W|?OHc<)^GQ^{J{q=Y z9?#LR)$@3dI@g4`2(y@Li@C^k_A@)z+fN;MrN^k`hIoom$&KZyo9r8-l8)nn)k8)l z2kny~fS4oM7!5H?W2f_IxVH_82g#o4zSw@uu8d!5NvgL%%IENAT_mXRt`%Qob~|2X znZ$g^`%8+kATuZt-Z{d}6$$*3$Xz3w*=u5?3y`r=$cKt2^_Bun zx5=zCS@YY!Xme_g1g;CE9$~L4$HbW>j*0hu+NOvniqRe$Fb>*W(b)#hmL+F$BrFsA zS&)whL`~nK&Bj@@?$?{wHQejKwz41!Tv=2P&KJn^8ZVzKWya6KOt6mZ)bDe6mMct9 zrPCuX4qGuG%Wvy2?O$B%vN=-E;{YG23?u25P$8^k-;qY(*vJMG*?8BkXmMkX0$Qw)=SD|&gbP28-{Ya+{tf*T=%xiAAuGi!i8zmy$pX451g^q$4Pl+B5Qcjy z!qyMwydr_g1@f1+WTV(S%F%-*Bg9RSl<^jcO!;j`8QB-0s2OZfb;tZbTz=()taM8b z)RQ8MgOp8DY&{gw_$TlTkMQ}zJ9Tkh0nFxX>H+QHJluu$vxK0(Kx%~JwCB5G@m~(* znI0}FF|^>kCM3@LJli>g4mPkAg+h$8ACZpXbf65U(?R(S7Kcy%a~$};Q(|;^x%YiI z$L7+TZ^&SADu>kVwMvW@$=0Su9*&2g-?jqx!vP~;p9l#My$E=)Ae+z5m>DGuf=aLO zLA@IY+CG1-6zh^a4$g~Cl(|J#hNnu%s;s>Clj3N?($Wj;kI@aJ=gFWuSBb=aXt=jt zE`Q*DB^kyF*_w7Mt;kv^2=sjX=`a)xoK_Gp8jX*t##Q=LEr|D4_nJ$f{W^2KM&`rF;N}b>wHAD!0xL**hbE-w^nQ_d=R~vpivC;RE$~xFcWlTtgHvw0VA`I$Ru?yM9zs2JS z7_vGwHXw#B$v3^6Z2(VoFisW(a|!fXX^8Cz9D!E1Ru@JB1TbL7>iuprw8568sD&!% zkHrOoikN47igI*%JCb!sE-!0`Q2kkvZYf!W>f{C(O3@GMg7z+hqL-_Wd`Xu@;*eV^ zahReA4av4BDEm}@WS@E;=TMQyf?Ujult~XrlDQxDD7sVR%+-)CEWE5g)S`=I{ zNKkk=KM#(H4thfow%-@>wFv2-g7^y13(ZScfT>yC-R~(qM zoL>+iYP>BQkoESy2Ip&l=QYTTU~r@tLuHl@IIt#HNEvm#ES&`pvhvyfZVj^Ua)2vD zCm5QD&D@f6fQTV`)=8xT`E|9|d$w)~s*x4&uFZfjl;(;Xp!%|NJSWxU4v}277xv!Q zbx9DsY)kMXTAD%NK%XKRSQ-Q-+cN!KQ#LBj5IQ1uQM2N#vCWiCDWC z-h$bnObD(pw_&p03kL%;A(?%r1UR^=9{j9ONnC3_E_`qhI4LFZ@YmYE6bwNI1t#>u z-6TxZM}r_GU}j%x5L_%pktJdiRdF?Zx{*v!>T*(h@V=5w`YDNPhRR9&PSon{S!711 zEGd8`l_BEKp)>el^9AdzeFoqqrB;Q&FN0w>KS@l%UMy_ z)3Y+cgk&d%C5cqj;V!O^s7QCPfllH~_BUzit@<48_(df3I3V)3s6cM&pp%n)=ZHyU zF;;9VhhzG@WrabSTZHDUNY*03qArPN z$w(HJ5htKxq(MXv=e2K=by^aD>*Z_-u^V8u1A?eQRwAKluR%%GiE$X?T%4(-auXlx zEwQ#Dj8*yGv&IR7CF|R5FM_8 zQKDMnYPWe=EiHs?OUlIQ4Wgu+RfEC@JwaV?V*3XoSV|f=@LXF(8r%IMRwK}2)(7H;qx@fB!HfWdKZ2-gj)e1PQ)WsxFw?Et#$4yv! zJ$77ERDxSki8)E+l>_Zm#8yV4q6U`JDvJAE9j!NxFU?h%!2aqaYWvS4^F_X+` zc0{`pH7|0?cO&;JB`X7AlccCj)&#Nvi&9pEkfwX4(h!Pkhu3t0;oa<1t?1SxQN!!{ zMOz*Wq!^Pm6(Hg6{P_WApYTy}4;-~D?&on4v8kR#N4 z_@tIU8KO&kluWn`Mf-mJ6|wjV@U!Mw?L>p57Cx9pxX`wXos%wYGR>EXv#;>;Rdt#?}wIn+nE0O zK2%Boc3;;3kc5oBpJ;_dQXy!@Qd)qS=5G-dbX666Z8embT6#3QGy6kBy70`ZfQMF) zso#hmz0NFfPgg)R;yKTYWU3T)IVIM&DwV~l6x_0DQ;uqtf*MAM;X$3a9Kqg07e{#1 zxdJlNxdnb^!?J&}b9Awq*pj{=Q;MO?{D5$N3E;IOj5HPE)79EfB6p!$*t#8*g+Q`I zQ-uJ{6Gi%Yq%hV8aF5dj$@@O?WqN;ldqU)Ay0MjR)ylDD7b_}Bui=(xq9)7A1R23J zJtB-kr8YllN9G&*w`y30uh5Ek}J4V?jp$ zEGS|EPE>mQ#E?5=^?chDWUr7?MR=$R0(S8?sVLL~7p+nu_^Y4b4G^mnEG~R;AX7Ns zZrVQ-L8FUxIw*@te91wiQHH^)0^w9#KSdD}u~Z)o1q8wSc{ygIyR#f)Nm~-Us-sa# zP?6jgM>$$IxQ6QB*GL0t;gt3{5sE!F%~6R&T6=A=B`*ONMQF@8~lvIr}NP7)I(BqagMECe_gmF?lc zSPXG31&CgL*Z}xRE&Ajm)l|uMe-uei6_f3;hbF*a_;p*dG6+JvtpPSno~)pIC}|$2 zzkn&GmwQ&h?4f;@m9+vAWp#TPo-7SKv9>a#kcTh}DCy3ssmN&ZpY16Da8xBBqc%28 zgB^y?@&Z>CQWdi0eb%<7k>4SRUd@K+TW@!1v{!X3h^DlnS7(&4C#xK64yP}X4;m7Xli3>4VELyP>deU#()S6R{)lYd_0S9 zUyqJ`PlsARkh;n&s#}5=vkLkoN323%25_i_D!goT-Jue6l}AQ~)t4jma?IuwVM|CT z0x)v0WhLG!$ORa%@l8vWr$zyhwEJl}0nD3j2$nV6sfx*(bjwhHYQPR5&J#Tn!TVE&fBUQtdJuOJo)S=*z{g7m9 zd66cUhG05~MIjhU{JkvG*!7bwi>@O%w)YdSVS~-ns|sHtgp`-zmMx}PWWr~w6RV3_ zg4%x3P?|(?_0W!_6ADok8(^#~2ud6(tYBI#0{DS4SWK)YwZ2~(qZzpYMi&xrLN&4@ z9xEdDyfQ1%DG!0i!SyZD*dK^N4vi4+Cc(=w3LJ)5QerjNoFw7Ml@&yNItOmi9$#RQ zsghGuZ*+%yGT6ifU;V!va2F`ke}yDgouTI8ldiQX!rbFzOm$Og9_Y=j1a?u*t!BNQ4tBd#3l!n~Box`Di5DwM5zqmK&N58*jQ3;p z@ZIX}0?WkiY(pZK{IU|h(~Lm#Y0yLXbfMXY>T)uWUwNOvkF2@YUpH`r^(s?roJoj!%Oc}etx zfJyH$kSNtiDB7!u0&GdJ4Gdf1AHj~1t3`nfH(-VbGBgNO%Esrb*^X?A0<^%Zh(0Py zka-uOKAEr@!!lUF|Yi-2H1RD?Gp(Sf{zyg^G!^ABDuN32EPVlf|XM!e$OIu z03cshgWMFMJG~DDz)%s3LQ0D!5KC5*s7_PP&QX%sQ;%2#4gowH(=AF%bS7*91o&uE z8mtC{Bvnt>gUd|DkC#jF?DkLEQVShWvGy2Ij20zoDrQB&rAJCd%x?->PiOS9Yb+$b zLUf)O%knZ1Eo+KN*l1NEmIU=b)#~<-{#hci$`{DAAPaaxnuOjs&i+#*o39J%h|p&zET3FPrrK7 zFRwfJ$}4W2o~aY=c5lYlCQsaQ##SX9RnPs-Z{yxKnpjl7i?eSHl%H=mQK;^Tiwp*5 z`JK|a*Pg-PqzC49X2*@4A_CRKzI170Cya~~Kar^*@)WEupKWF-h~zx+7ZEQaj>UyT zp{+C7S(700Y5}I3)R4E7Yz4!)3V5@pr#H81?$F$rIU#c)<_64}kT)P-KKAAJ_GEYD zF2&7>hZ4^senDJ%c;IlJ;rhaTWmS_iC$r^nli=Qv{s~+QcmlBCPeXhL^eo`hcs=dT z*9>L3iMYLECpF(8l9kU1h{B-z3aIDW&+N>zpE_f=rZ?`7NA$)6<&pXJL%p#eZtN4i zv0x~3Sy#A1cpT^c^CBGB=Rh@hAiY~Q7jw%~gVCuMZQTR6j1Xf3<#Z9zFx1I2B`uoq z@_ra)QC&MU|Cm>wLupX^Wg2Jg)4=q#ck2U+p_dvMpa|W=F_xo5RK)$HC2Hnljvb(f zUr+@O6v4N*I;xP*Wd*&Qce4d9+8lQu%j?}&@|L}~$}P-lsI8v)C~_^hj2Mtka5sAm zMpQq+Tz(t%zLd;#dWhG|mOP4Nd;O;JK~uO_DoWhWgV8ZMbz$kThwNu|9=4x4W5Q4d zlUuL2_Uhjpckn?+bRKbi#Kj{!i$-5;bsmk2w~WZ6usAMu(<&};))T7ks$Wh!|Js2m zQ*J-yq!Oy`F}E{R-LHm2)%85oSsD-YhN^o!E|gGp<)}AQ-4kvFS}yZDrL){VtcqXX zYHnvmywrrM`@T#oy`ky)gU|Jcmg^58*B>ITHvn9JfVa^B+(rgy8ySMFKfqdV@U;H0 zXd?rj^#?TT4_NlIQ9;T21CEUhCN?ruSbq?(-tb=i!Mgeba`lJY>JO;ZA2e$?C{}N{ ztNu_{Bg0jV4n{RH)YRAkrACL28aq7HSb?C{74B(WVV%Yf-ZWN-rm;gXjTK;N>@Z5} z4wf`_SfsH+AB`2@Xsi%MV+AT2I~dXEutVz#EHpZJ(8$0*qr?A<9pYznD4+g7JtG71 z^oQOVE3nSkp>xIxlCyz;I2(*;vr%zs)*pIi{b6M`7%67`!Cy8Q&t;rZT8Ka!+{j4l z*@#c;+14K$WrGnotHxrz7Z6-eJ+02R7wV5leXETjj*JgUPuFbS-{oDAR`8UdGe}kU+>-V%D zj~Bi9))^2 zxq2*5>5*>Gqg|ZGc4QvQU3qM$BWQ2#<6E9_i-W@5;%*&Ae#Elnx^u+qROifQO^>gVNUNSedZ?-lzxfPlfDh2LL5718V z2@6x{(4RaNdQesVb$xtGb7)u3jpdS0+Q+n~v_g}g>q2j8xeUM)+L}cd{8&-wF0{D2 zb^9l!9LsU9b4@ulvd7C}piOVo*R(HapU*q$4Z6u9_kv`UMY*6 zi|doBTML|l8pvL*4=aWm*&`b8L@jT;H>itzj*9moz;5SCqRaGBzV~W<71NiBjR)eQmvHwFl*QE>58>-I01-S`7+ z>J-ivR=l`@uWIkv3P5C|tdP>WaZ-JzeGwz-Bilc2Pp&3c@h+UuZ!fT|J1vxlNAvcm z7}W1o5XL}r~fPkxL15)dwUkF??>pe~0Kw zi2Bu1V!p~G4 z)zAbl>TDpV-FzGtuw8z-x`t0c3`PhDN06P5E&R6uzx`Vl9R)D?!S`UuF~~t_l+x7S zJInb655Cc!QEV=CtE)h%4l81uEC{EISE@rRi%5{vTiOd6Fvr4-1)B$g3c|L0=aoWj zkVAJot}I(ws$*oPt+_aurD#bhWqQ$lYPaW}jM}s4I6DyX+!mWSr6^hh9x4Vz+?_sE zrOWl)kPC(;K(>{!ka!f5)jpm7T7k&Sy=$_Wtla*{yv*wOh4yP`=Yz=xDY3!oXQ--Sd$4b_F%S-HWisYUVS_T)U97 z^mPrNNeI0ZtHn>Su={p3UzSHy3DUth3}TT! zKbNU&tKDh+rpJE%@xbv8Mgfwz(&9315*^jb*qUD-3tOklZyp#2zKLJP>Kn`>j$umJ zKCZEFGhmjMf&MNtmJ{21G2@W-INsBnYWsGLn%#jaGwK|ijemsQ_(SnX1HeG@VjsVn zH^gb}tJoG7A2mg2vr?W4ELEwS>XSGr4lZY}1B^+Jfuyj)tC79K+rMaf-CHaVR)!-t zSA^%=zo}-1b&Dc(I&ATd?lhv8fQ9Aku!*;c7XZm^dhhA-)Kj?e$?a{d)#? z+7t$1UaW6yUsA1oRaW`(6*s`4unaD{Yb|*%jy&#XMJ0TuR6<_|YU44yC})TLEUNch z#rG%csDN3z{Q59^dN!Pww+SDBp>G=EfU%NqH7YIE#=Fd_aMWt>-fHd3E$h?D&#F-RHQWvxN9*K}H~j{kk1K zJs_aN4m8-|(YC4qg1fU-8@T~ewPl|2F4Sa{GNsrf%nyGtw+fa(+<_usy>3BT7=bS! zUXX=b!p@fcd$ZVb z;`Ds=27=8^V?4J-`K&UGInTV!!XaGw9tb@AZ9tB%0jt~(V=X)?s6AQWYoyGB$hF|g z;g{D^Hma0?a-7E8hX?Pp3K>rLXqr**e7N{R3nJLsms*3Lw7%J3hDyE^+r=S&z9WVQ z$7Xb4VBrj#>$x4Iw!TSrV?Lx#zgwh|cA?D}Q+XTrMo+R6(C_sOsjC-MK7`WQJVJrP{mWu5Q zODX0sNZVKrguk#$jL^*9Ju?w{@;!euqBaj zkJeV@qpIX4@XiY()TMf(nieh_`JTaGn+G>;ZXZy(&~?4&E#LdlIKe@#30in@c_lm8 zjM1b@+yNmu_y8wDcx4qVIuG9O*HjDL1Ur+!m%@uNKZ!yRqSlyDg{6#ZUVNX-dLr&R zsl9Ds2zvJe#O3sW1j0VwQ21GxloYxRcOUR}6-R>bC2YWe6AoZ(95BX~ z$Jl@Y>E3&=4K@x~h#(VeV}lV%Jn}`DoRL5XB!L71C8EhuWRPIzX?A93JsW0c!_3Cn zG@EAQ%+BuYZ2DK%JLlbZ?|n}~c)#C=4(Ijl)3Lg`y1J^m+V5ALu>t-dyxCxE>>@@} z-G^egZBPDi*=_&AdqMXR?=e%BRTK2iGrN!aG<#y?tnOojk*)6IgOTm-6D5)<%U7mf za&`Afzh;a)wcXeaYp0)aK=%h1%wN!6a8OZOKjrhtGrrFLUn9>(qPStLCsFt5NbtNb zwN=YA5%GeEkrH}QsJ5nf)~ApvJm>G+?(^OuRk-Or3%eU4(;Je8Ma9k^nW(nzRbdYc zPgHA9^=qQad+J{k72;F+V!m#i}Lxy3$CK@*k`!&(vVc4&UMi9e)O*D-d{%hP|VzN-LA;s|UMDvQtfAt$) z4F5f7jxiY1Z=f;k_i&?){@6hyj^Q!GO*;l-hZ%eH$4zGZ(HlRRAxLlhaHEjk*kJ}D z{c*z$MkdA%Gau=X8*Wb0A3M2uN&RKGnMrT#Fq4!1xM3zJ!{UaSr3{Z7W~?$XZn)`6 zZ|tBM%dnWqOk2j|2aR0%WBQF=27eEl!Ay)9ZWJ>fJ7^x$A2ZQhX4tQO6PjVaCz{nv z{%e@Q&BVCj<~I{#hZ*BcjN5`~&SW=MZP8?>FTvZg;E{N0Bc8#pQ=9b+{klb4pS}cB zoBj;_x5+-38pX`n(WgQwnUSCn%c%_@aq<>j`|X;+d3npCfU|l zBlV@(8snrU%@kHjLqBcNbSV>TjR8}p*?KdkEgCRof~n1z^6O;AOug|_7&bMZrZ#VC zzP@uKr!;}uqN&psEu1#}3Jja_;+%^4?rbDPF;l8 zk(ZW2pJiNIDpj2Z>nhdOebp$<%W^6tW*l=v(V0?tz|v;Ie*Gn$mysGyAK3uNNC+?E zLSErMs?VJ-P|O8{8z9rOq#6vI0O_`(WTebc06T(31oVfoQaQWMq=&j^aP*LJfd!5p zGPO}tYT&7v=_&}@S9SKIsstLX>}{xwMny&{Ax)%Fq_BGwQ05yAstVrp9x0lrreAVM z4heM4*mI`_8V^*XcUGfIX$w9_6U{fqy7s}LksRxp%!vev2b5&1Do47bopir|LYJ{f z=C4$jb#@3_sxhDx4MdEBeDF^fWrYcM#n6Y{&i!;9AES#1uO?^Kxe}mqfu={Um4PRL zD%2hiQ41eOoB2)7HxBM;piu)z6@dp#1O27d<{PU~N9!JmioVDgYM1J#VY}hzygD>4 z+ox-=Bqu~cO{$D59OeBGW%nn-<-pl(>St+HA+5)td0>cP_N*?*oGpBs7U0Y5k~VuJ4S# z+z#hUO%*y1YiVayWOQlAJV5rk2EAV)oT7$D&as}WOS2Nj`T-X}>$XT8=<$>)FxZ2~i(8O;Hzur8UKobp10L!tY zbX_lCo(lBnMkYz$hPq!0gnzNz++g74$;Y5TKNYFv(P|W&Z;3qD^11abGV^nIQ6GJc zU+J+~m?L9YU@I8w210jzaYS^yRq`V$4*NmA<0(0p)OLPZm62Ii;A{>q3{%yZRd>sK zCCYLF`#@ea6CtHn5vUn?IJ~CX3>BpZfOQndC-cxZ!z#ilYHd&;UuFUccq6&-v93KH zSbRpoV|sCfpJ#Y}oVZFa!zUw2;622f5Lo#89UkFCj22|9)osjd3<^WNE4iJD@yIMj-XoWe>xu`Z(2pna3I_gZE=oG1xA8X4fN1l5 zwPj$oO?T7<&Zt0Q8LIIHg#NfIGI*65y}P|P{dWy%#jUKI$TCO5T{%!ZmcT4ODsdzw|6gN|7rn|PIx5O18juh z*IxwV7PInv7nrm^(qka2DT>!=sW<0Pu-uh!_V(ZxN0>YAX^cWQX& zvO=4(ql)zy408=hMwq~n?XL{i@v-wIhZHtr-Mn9!QMN=0ACIbH+gMgZVgjhK<7H?hR*h- zv&zp#HC3SVcpW&mGz^mpg7&z%z(c2fz^WCF4Y@J!dx}nFg zCiJsO2_W5GQ!wFm_HpMLrw<{@h+Q#y${afEksL7f`TjzM7vhA^;Na&(vnot&$EH{2 z&>CIMdvScVDZO)2@Fsv-h+A>Yq zwQhuxJvoh(ECZ^rBGQd6DS*ByXwxBe$qM%7lEErRW1BbCRl)*E3{pb> zW(Me=u2_;YF;j*ITzu6g&+0NgH47`{XoNx}Fe>2efg1p)J$J^$xd7oRtgW6Jn z%W4p*7;5|O$=>8#%tG|$J`V33dl^fjs_c$FYj1*oT^XkwMFS3W+M=elg}lJV+s2lI zvAL75RgS?)*kh$`)!%}31}7)E1e$nrS)p&kk{sU|8{Diw3sw~RA_LN5sZ4ZCi0hx{ z?!pk`Al2k6P%Byo)v9aOP1H4RX&ft9k$Pl^6>Q=oy30(`G4h}}`lO(nN_QXTg^#^t zeAIvhnhdYvI6sVL)I-;xhiQgsFh#N*Afz_*NDHtZCYg8xV4M@QC>u&>CV@d_5f3>u z@UEc4h|TE2(l#A76viHcXgde!4$FbS^r+GK3NtU1?#%U7rlJaZZS+;6(=ss(k2z7_;-}U zTo6z-o}7`ocWWG5)AwwE_CS362tHcJ)vgY`yC(n=Fno#-;n=!zEh|JW7UWxl4k}02 z6k-Iya9IHG0Nj$pP-=$PKIx!F>D}{=K85S?Y?FMt2!8r!Ou`N2o@LI+r8n9OnbwqX zL1khn%U8{oKu*|(IlwqBF(<7eW5xQYMR0L!Mhx(!%l0Ci=0d;x(K=6rn7 z4g;12g7muJ!;dOo66>>(^5_K01_VteW$C#`MQ=qH&Mw6%QC%9R@je;|lhA-ZP&xCE z-Bz9Npy;FNVn=@?Q3>bKQMk;i^T-T~z}=*J!SU1|cH%z7EIzw$WK0M|*SJ;+Av>36!s^gGGGkQrCLG_-}!3|8}uW;{3VQ&-TvIzLG zwoq}S-i;J`&J7@&5p#~fq0LkQ8;apVrjZiC4K5Q&% zg-zZIx^H^V8~N)r3VjX*XLjH6sV{O?_b&}bwz|JQ7}@TQmdM)6;AX#OjO_XDG1Khh z4?E)f=N~ca%wvCaK{3t#4WCCoGTt`ZUDj+S;Z~ZCcExRcu(~R zR%24t*h)3#PmO6)W3AL!A2pUmjp0yZ4%ArqG1?>|T1!S{lQZ z#x$j|Ga0lbX>3K({>Hq}-fZ%vzEjL~aQ(b!1znLsqw4vk?0Lxp@AbA`q- zp|L|~3=eu8sjRD=r!c3I+H){PxiQj1L8#R5SlyBUBG;Th6 zoqBBQQKr|b^VE`u9=z6sr{+8L-KpbFy>{xdYyEX-sC%{4sfVt0(4Uwd|N3>ysY_1H zalc-;*8QfoxBJVWKuGQH6bkg$yccx;y7#=11A6Gx--uM`)ZZ+re%HIEQ-4c%q%}sT zzP~*^?^|V^wN46H_N`|EZt4$*^nltX-Jbe6Nt~e8OX1hOE?2k_63?x8R*T`)Gi>Gr#DSj zDn=l~`EF_#0G#t@wU#O1fgWVT_KVMebV-^_#dG6pnAxhRR zZ%F{~hJv8JqX}H;z!-xnEA_FkVY!}sFVYGc8~O0;K<>P%upyULz<57W?NxxOECMqQ z4P+8loPdN15HQNmRR@;c9Ta$`np5%^i6^W5N*3e#Qq}#vpt@-D-!4@5?|3ih{$1~R zBVR{w`#9d2-M{BkU*xRr?+-?{x_>Yj+3x=R5?Qms|G=*qBO4L);&1!3gU&eO@XO9S z;k<99zwLkM^T?aN&OT-2twix~gPweIHtM{EdvEFy? z$_C$qXX|=*ZiDCQ`p?hB@3(VeuKz4t?@qAy{9A;3&${*Q5_?a$MY#V&TkkG%;yJe7 zon`;|wch<^|Eab5cYQ{!|8MWe3B#XH5Q)B(1 z{xf6azXwl?)iD##h}FNwPlb(#`cH%P?z*@BEZE?AuZd^6diU&GKFKw{pC3G?)$H&m z3p6|b!9XJm*kYg&77PX&@xj!AMz}CVppiRF7HH%VQv@1O#TEmNEMsc|jl5%P0*y>$ z>jRDSWa>a8TiIfu5x#6qpb^k)Wnkz@r>%)>ET&o8rBNBT^}? z+v0ob^HGhMZ)hN8f0Mo6$+4&uBQ`R`$pVdRak4-obDTQRNF=8SG{VZs0*w%JvOuqJ zbFy$F^qeBl$V0anXauH{1sXx>)PY9KI$59*zfK)!gtL8tMpipnppoHjO`wtUP912Z zz*7Vo0r3_CjRbjUpb;w%1{y*0V4zp@+!yYZL=O$u;%Vo&`kujKT`9aCe-9q$N|AQ& zJH^|*?-X_Szf<%*`8$Q;lfP?mdH>%Oo)3EuKlU~G!LJa!*JAgHe^UrQ>|G1yC;qLE zeD(fKk^SI%vg2QSPp<|0!~RZo2^o-&6+-zPtZCgfoOt|Dp(I{-yVV?jL#2 z8~NQH5zfDgR75!cx}^Fy-n9tl-wKcX{+I~oe|(sNfZQ-em!z9y-SqFTur7D_=_5pU z{6V%y7qu^HIR{0gCM^jWdD;3R%V#b`!0G3>ogACqKXkjw26c=^R$6=kf{| z0z2tdKsva%sKdjIB`mDS|M=v*F>WHhuFbrZmq9N6GO_f^C!u68!l2_iCjvI#&2aeF zI`0WvA?~w}ZI5o`M0ba}20$Spbap}}7Au6MFS#kY#nr0oMpyH@*uJJUpL@vzYRIU9 z%#h5JAa!l@c?LOV7_X*7S>Vg2>ng*QXPH>W;hQyF43B-#QUF%bs*pm@<`#J~Pr}Q~ zUBvlK`NWr?r0;~70CgK5d0(nnQ98hMZ|SUdOGb1{l1jIuI94AoF-&+LgA(@_20$_m zX*g>(h1^ZiUqr(XR=ct@Zu4vz&*~WU27C%7SG}t|%QfCrwos#WZGo2pftNT{t0Y-o z@aRpp4$dUai}IozsG|@MxatFQ290B@v%+zX?cxBB-UFhtfJoEH=A|j9Rsn1g#b$Gw zBqaOL7PKD@d*ARpDnV0Bc3%gX+dNZoWcOU_>Q-}BP9Yv`rYu-etjfUQvG&PvoM1_d z`GumI(zHlj2Ie-Fg^*Ie;-=K++oL?TH>tqDqgqxvPdlPABTi$@;&9R7w0BT5o)Gloc8}V1|lUph|Tk5s*dCTcHmT zmpG)VY!T4V%xIKry}6^mu^6)$Gsz_I7&!>XxcOG&=a5q8l5*(6ccte{F&K0Nk2nl! zlAPO`SIlm*j7*b=_C#1)xnmUWcr+VcXr7NMxXBdbCXo;81c#PFVa58koaN=Y(Abz2 z9glCB8#rOF0rs$pe$#=VAfde_X!z@4Q1}9aZ{BcyP4vhXz@dk#(L2Kz_ggjXNT`}N zcQQo(DSy5flxz)QQMJyqVp?nBpx~pt+xqB{(}VgM1`4CNe)(svRo&bD;$tb)3NG3Y z7w!yrxlfP}NLLp;+Tzr4=aMc*Hb+oNo(=4XxCeC-XP5S+ty9Om*k+3|9jiY29!UOZ z`Ov1EOP}maVH@+zufvkYp=SI z+L?`%MpxFBnYga~k-Cfoi8cT^LU5mZY@_tV z#VfBkAZqFZsCz4Pa+JyG;y6Whhdb2YUr!BykQJ~EhG%uGJYy{a0H0)sdlR0KO0R-?Cv z&t%6WpYQyn04I9_IN3*QuOT&wZeri+bj|*8_k?;9dYS>i1+65w+6~o?8*X0P`eJMM z)(7hzOIi;>bQig%?xl-eSbf;Ruhy5w9Fyywrcu+rXfA&M1&Y(2qYGWiym!Zq0_U~& z;(2|?QfLm7$_wcXbIiw8KCiRcg&vOt0|6uhLS( zW7rcDjgUHw1Wv;lwFkq;wUf`vNpyxpih4S(XxA6`yq;I)?Rp@oE%5OXXJF5cqgw{) zL9Bjs!jo*J&Gq)aKz-)1_qMMc^NS1+>c}>)ElBeu zD)hzRyd9o=tmQV2lgAu7=OjelQ60~E$hEsW%pEf21sS1eMoFa_waOXP*>UB^GBbJe z^{r!DpX?c+L@I-RiycSjy>aDCliw(->&_VHZ#i<|cA5tvw zRKF5m^~dUQQ*}Z6l-5C|WeWFIJ`a#DLn4&fn)nSc44p>30UJ)KdnAWaLCItKE{a9{ z;c!Uqz=#%Q$%z+wzJZJH6FQ>^QzA_BS+6|nY|G+s>TEY(6okvtW%T%t?mq4W*adOZ z%nJ7njP+T|JfMo^*m14V8|#5}_wc%vnF8GO{RKYbKaIq-U1^|&s_?bJI9-(A>WpNq z9y7if>z)c^hfdH18NSKN&83Mh=w1$gDLn|aIljEQ)G5aWHJwlg2xSX26TN#2=W?2B zCk`AVirGe$7y&6@qd~sI9Es~R#>QVwxpv`wS(ah*9HY7$;DUmEzOXU|(xV$1D)*~= zp)l@wcUcOW0YM(X(mAI4+>ONPyYsdi+Lw>r-_T%#@!VLAu8%v3!~vAYKorV)Mdy^x zQTV`76QLPJQ^~$p;f_SWp*W{!HaQTsYmneL9-3&T8+r>`8=MB64xsGbJDv7%0g0wC z;6f*W0jy=%7E8 zWvO_+tgeZ=vVogb^X}@so#RUrvOHPMsTZ9&bP(3f3)!}kJb-yKa${#bhNfi>#5}`* zKJ4^kd*8YR3iHv3K03DEVtEt=%<2b6L-ULi;~%T?S_6?;J?`utXMT5d?4kX6E^kUM zpMaobiK*}aFaNVTSGUh-f4n#*xUn_5KJH6_PDTN$Atwgn(O4-l^p8ReW3_o5I^ph- zV`B#(sisJpq#w}lL;dzN=-+9%<;$Q0a3XEN+Y@vrPA>1*IkQa)@V*)@nO$Y8_-omZ z#AfGCyY-3sD_CkYB5cGLR+o3q3lPPceoNf&a$%iJ=cU9Z;9Kvgz7b2$g7$7T>iKGp zsc2cS2e;r`!GGuW@vT`Ug&qU69)sM{VAC%aboGP;W4)BsXH9sVAE6o5PdWaNFLJ>} zJtrw!X*v1~j}N!$qv^m{&#OSjdt%%F^*DpW;g1_1ZfJe1X5c{(!WOnVcp!J$#K& zdI6Bqp)jy*xzmOt`ZBtv)%|xNB_vDy<02*e_udP-|G|6S$Ui0^?6X~)+5L|`^+nF= z{-?pnR`)*-Mz*{Er9{>SZGYm|jQPJ&tl17c^1G*=69zs9%U4lA}NSh4+C{QBic`SUxq{n~$vNW#zm?(f|0pL>V!^P}%t*!>^z z<@dsPZNrZJ#%sOCYU8G9y{2b*w4;8H8aVFjT?lGn}PKjevKP?O?3D^(TuC# zbgS23Yoc-1-qZ45ZFm)b$IY(B4XeI5ZE`cJ+B|BQ3DmgBQywgtHErCiY22J?+>EI< zU#bn2YV)LVlcZi3-L>&iZD>>*81)(yO*9_rH4>_gfa34e2U1R*R-5kBhf*dUNC~r< zK|`5wqnO@9DVI;1XyP*NjJV&JWzcSA&`?FpP!p9wYZNhqb|`};CvVHZWYCagsKLlk z!;ql{AVW<&*43wGdIN*D8-oTKlbKx%wXm4Nl43HuiHzTRqlvd`95K`aVyMZ(pp8SH zIYXb>!lq&72?NFmLyQgvO$>T%3I+`a`s@Yz%mh;N-}fG>|L@cJ4{G=amHC6J`$1iO zuV#K-eFUiq^s3x@mFj)^^Pzh3KHd0aI`Baicb^`6sJ?org1S!^-KTusqQZGo&v7T= z9!>K2p{H6`JgytAb;9GC;9C1TuH~(@xwZawTxXldpe7!M3a#w87Is|cI<9B!)vpde z9#v~dQxn?#i&(&MEdR4C?f=VrLHB=q&zt`T_3HnBB8k<1+D@_hPxr1@|Luh5|6#2D zGo`^{j*{QFD9aANh0Y$GpNE?m zM=2g7y!l$^B>EjY?3Xd$SMgEf{zlCEO(E>yNayuXkCFR?C(AROMrj3KdvEOdM{lSo zSo|V`TY@X>9E`yYR&S>carGS`I3mKc3tmBht#QG~t_yEm*lAacT}M_Itl{b+6~2cK z;(!p%g=Eo|bg4_~_(}PxD-UDGR(NE7nO=|0rak8AHV|Lc6%xO0>nAO()w7>hr?;Go z#@XU?zZWw9(3;uOJYuhZl-o>8egY$+u~*#i6q{0|tq!@|hubH#LUy*IG|zrd}@)13X>m+fw{P zpG9$&tcicfYSyoYV3u(j(m#s=u&xE@bs zH`{i7h2}wbY_87%MjQI$I=i*V6R5*JWVc8Xc}2@m*|c5PAq;VA;??Ew`${w6u-31| zq_BS~ekIR}Eufvr%GgU@O3q4lJ^G7QAR=7Z1U>h!$hdAOrERD_)Y-9fD$gj>${>Rx zv|m$wGNdga1o!WE0DZ>}QGi{l7%-Uo{$?38bsLgcwC2j zL+9J27HA14`CELbeN}8PZ3xYQb@Ty8b~Zl~UMdYLL8=jWrSlD!Vvqi!OfXqPKUUo&)A1z^J3d-;oDvzRJK}!h<(aE5 zjdJu%%E!Vz-TizALU=JmfU8rUzg;LfMoArEyJo?5^M==<7353@(?IH1Xje>spVWs2 zT^Fj6YdQg0-AUPuVWtdr2n%*Y;8!_)FitK+yrcc8xW*JfCE3V9!(L=LCVRoU=L6N} z1WaG=*!m~<)>$xg_+h)iouz@iL~iwMFGm3eamp$92O zXlIM8{+Q68tqRNyiHe4V1Tf^Qx0VQElGj~9`SJD+r7yGrtK1X%!Oot=huXT(=sz6Q zQvq?l8ASerWB4?Bqg!fY(mfOR`rTEs8w29`r$Z=gyT8uV{dDx3YUHMZO~ik9n|@ym zxRWP*65$HUZv-9*S`Yo{A~q=xFT?&b70e_Ic=E{nU^f$Va) z5LYiku=QoO!k8Cars~>2WCT{N?2IlB7@A6^z?-2DS=ipZwA|Wmq?bpR*?vS%A9(T$UcUSLep}RARBm>=eqh$qHN6NN)2UU13I)8QLnrjs4Ua~=e;WCK$ z)1WO0FN9@E7Oxx5mAZXd-j^E` z^}UER9d!}CDA_Dq%vNagL+-c@A{Cx1S9Xw9MB&kJ`SDuY@{HCjuTCKbbImB{p%2IG z*4yU-${GSMtZe*%JwxAra6sX!)n1(_@bS(!+ue5s6M+MHcVREK_g>K5!F%5PKkAc> z%X7X-AY(MR8*22h)x(+xU~pnVj$=(R;q8K1!xl!-koHyI=zAbm}emi&Cd0wO0NZUW>ZaKHdz5C(Z{oXfi)RTMN z+~4MI*7mj8rrMF_R+FWDvxm%mqm5$j3~dN)0&)4*PutvV;d1}hyJu@QY&K~&X0~Cu z^=h_Tc3C!2xn*khOSxIH=jk=^wbik=$^A@jVC-1zQgUa~n7rmbq-;N8;}M3cvF(U0 zM{G7?qY+z-*j~iOA~qGVp{NZzW9yKchS)IVR-t#3kVciY;bd(p+5K?ft_k{)g1aB} zUeMjmd!m8l?vVjlD`YXLuk9jhs3t`A+h&}X#%}pg|omW(EQAp zzWAng@#Q%!66z^rN% z!*6iOFSW($g$|kcD_Gp%w6PQS;AKDC!g%~7xY*q)B56wp3j2I;*e4Qd&!-%>c|dY0 z)R9Np@n=}swD${EGgg@UcsKo4FueQIpRvDqraH7x|6jzCG^dTu<3N`aeUILwFveTQ z;cnXBY@gL0UEAumadMcKr`nX(YzG%>QU^nGLhx!kx6Y|{3?&akvv&10{rMoy)_!Uf zWnb66q;qm>cI(i>@;r;%v+nKH#{Y2N{Cenk*V;LKrbVCrXy*&m$#bIhtLPMtZttzL z;k$QX@OKZiD@<+Y{*cilimxupYk4)`!DOeYWLGV&Axp?Un2$KPRf_0nWJKfP+uJ;QSw6dftzU z9$=sNKoc?T+*9YBac=kXe$F{%dMWVEA%` zk&dtUJGZ-^H_|ctPZoCfj}N>35XqQ!_E)Z-ZlT@M;&$4_y3Ro z%4;^jALPrt;#I0=HSJwbcbygrgisxX%AocaHEeC2xVrYQue~em_!=9lY@B_KRrZW> zCNWlFC5*2-KWEsj=3%VHx?9$Xf;Wt?XSWbBRnLX0t?EAFYfja7-a%E|K0= z$y^^(jF{5H6c^@-*({XJDwzvq?^?MjBy_#&Ti8GVg-RPPzEB3%X}?+mUXU`R|=QZDF?)VQ60Y zJGXm?_ZXUgEj}cgpZV49*Zm4K|3*o6Xh}8<%@6aXp!rGA{1_K*d_HUizR{i&`@OJn zx|u+Rwdw`cRqey8AGeOJ=C+TuP@K}T*Jy`8XAe7~*l*rmwtjZ*OB^HU9AI0~Ig_zT ztZ%;Bu`v8!#3c7*_9<*BF6vY=w2zKG=n=NWogJzlv}aPLe%Kxsm){y)TJ6?4pSNIy z|NAVEMpx0@TEJpGxt2KN%1C)n^xUMG z_Ay?uZt0Ud2Rfuluh@{1DA-8Z8y(-W5TDiBwey)abINZzQt+P6;T7YFpGS)>iyLHK zCA-_N+PtH2vLd6E^9ugC=_v*-B^cM^-3lGoS1`8^!awp z*R-x=;cw&lK`TGotyY|Cc&l{@ZT#V3&vkutC+vNASz;pcCaKALaV_n4M-R?-_YVWA zD_h@c*>I0O*}f>W`SkB3D9?6|>BIv3M*B1jMXUQwh!(j1t%8t;doSpIJCLv7`p-{2 z_w3UT-Tx~GcfTX9Z*w-Y``y9FS>5kN9(cedvyM6atH+kr?MRuj+borS+HXAnSj%syhEpt+0;$X8mtmX&pPz z@qe}Ax>j4q&b0nltF9;d(->D@*UIboTPv{py=06lvd4X5jC;VC=;5OFVNv_5sJ&Ix z9x7r_TR*(ipK_E z=rgF>?~|!D!nNMOrilvw=4TPT_3HmK9Gi{)|3Wy|`>(bS$iL%O0=4iitp&zy1!`el zY*p%i<8}kJh_ALENMT=XMNkX=hFcR%Jmo!ZPcSYXoVW>^%DSLed^mniI|Yg3?^>id z{RhSa{^QF_!Gl z{fI*@)MykeOXBbZ;Su@VywF_G+cLwpBjIzD^2djp7|!n>=Nt+69*VmfR)&Nx#d9}w z_Kypit}B`>+K`*=IoMi0*{`|0 z=*7+@Vb(r+hiyZLhId-DF_c*(W(%2M@6aB-vvoyCAqoEGQ%cj++Da6Swq|~;I*k!V zshuL+ZKO7GvBy~vs;GUcxfF(>VxvvlE)+FxCSrd(TFzMe#V*}-5OlKlX!6J0O!wZ_ z*>TGQX{W=gYoi78Z6?}o6F<0T&u{8`>jXZ&Hmu8KTC!qF_5&IzGD4FqY^z@@w#yRPXP6J0fEgm(aF`n0G0W zEz58UulorkH?I%Hyp5z2jB--nu8Zzf)p68=QS_dn8acDM!=pa#-i2%0YNqTX+e17sp*Gi#u0`W|0b88Eb5xZM)ZX&Zd%! zs_Cd>zySAwHVbCrTDx_fda9aNtd+I)=Gc3)zuCE%AS{i~dT^4{4Q~Ho;zGO`r^VkF zR}8-9BA^R9pD4BvdZP`L+fPE2z^P|Lbt?Dws|({~&TVCi0E;BB+z?X|+7!Q3U{$~s z*4Q3f$<^%x+lnQUmD+O~arnU%04vwLq0u#*cRRRC^=t=9Gz++-tIxrrRU zwbw_VZPyovuAQ*f42Sivl1?04xwP!2HZy?D_e2+wfJ<5NkaMuR-xKzhbK1+@u=(xQ zo}pTWY}dkv-2wpu`Q0$5Ii?G3ETugOia~zS^}Nw;L-r(#o&%ZxjNTR&H->`(`rbgO z4V@pDiS5r7O(-zI(}iIV;OOb9(1JV6D3)OY-D?=>(2l>-ZjmX|#}@%NY69Ei?kVVE zVjWygHE0Pi0-!!FZoj*vOn9@;h;v`1VZ`Dfcqc_T4S!^yR8ZUPfx${UKCEXn-f)`S zFS3n4sPoZs@1jHy$W*0*_(kXRlGo5cAWDtf2}lkpr_LWIz&Q;}kU>J9u!1j54Fc=B zJR%U19UAO{RYBg^yD*Jlb#ia$=x>T{ofWvnpyOs{09;>e-qecb5jJcvH}{289f!}p z-ZGY(Z*h=s^GfysCd#)0)@yi!fPzF#L0i^S^RsCRTb6rRQx%eGs1`F&+MijDLn2h{ zXvktybhpYWK>B(jEI6)PWyj*4i>Q9YNMg+lOH!8fM+Pm}FG7W?3gi*^&?H{*y zY=3ravdlzL!fwn7+PEk(4V+BNn}ME)h??h#qH94}oM|Dkf9G-SCc0E%K9rLSQ@!Q5fL7Q=`;QOB=`Kr2AS z!tgdG2!*2GV2IfIp$9}|sBRd@K&~6A(M+~d&-$u<4S1;;19*<&vS#E*}=f* ztumGG58`6o8C}mPXn({aY;iU(FQf?q8QYgTPBt>3d#x4bXJ(xlWf@TyE4zo<`^J$c zv3CY<0af-1)4UtYq_PMg)(975xT$k-F=H_!MME((#C!rnFn#sp)A^n9`NYKQrYXzHi*0+}VDHNb?y+C?oNCh$ikF z@gwD9pC;~HA8MiJc@f58pYQM7?gie#VeiXK<-+*VsNyd2D|8gylJ4S?ZkUc@fiFcz z@%u#;mkz8$I2wY{o%{~SdyNS;_Zd5f6DM%KRE%K43?@`{ zVG0w*Fe#o4!DNUdLl7B4$Phl(E__4OSPK{9WK8W^Hw1_wI;_vVgut+NWm}5~Yw=)+ z25Uo^`jpF{f^V3LkAyD8aJ{m&l%}QlETv{CBTHFW%DGaom13+ESFI}tKvEjjD``rh zQp%E2a+IQ?6c42^C`CUh>q*H@igHqFlQNo3+p6m$3Q5^VN;*=yk#dbe)n7^_rcnKn zEc6NyQgD!xf)o&>Xdp!Zy*vC(WhY4iy_*}R()l-yf>Q)81-AfL%v+qAh! zxy67Qr*2!eD$^*fX>DIj)6-1T^d)0mYI8#_k!-in5Bg(jH^u-iR8l+Ygw)r4)CPeIt0`#ZOLhd07Dd-cNZoss8Hqztv`CDVr+y8fz7 zUHeUN!u%#oZ(>!xwy4c=Ew&J+e#`o6@wM_=cCEP?jO|&2q;;*aiUkKbu*i)o# z)gv^w-dXIdbe1`5oCS`w+R_$wF0r76{b$&IhTUfhe?!O{)}Fa=g#}AkudrNM5#s|b zSC;X)%X>lhZtr;`f8H~St0I+Ayr-nP*L!RfR|}3z8bC-3Lo#O{5<3YsUPa6bg0x1;>8-po6~i?Td~~ z!;Xi2n}710cTb;xN})|X-@CtvZ>QFT>5=$#+DQ3xdQFlZjY!D+n7?zok9!9)-@UN= zM0_|W=+U{8(4$|i;r;)=sL_&XoByAaqBN~bLKLQS4HP zR1e#;k;Je)SCTyMJvM9`;=^SzY`-1^$V5o|Z>Ew!Gb4P>8I*G%r-5+Bd*_oezBcKb zPgzB`%<`5hwxld$j-q> zeEFdB&N$|jgD*{M>5V>*eBIaCuZ?^o5(Qt>lc>8X5*+4BZBO=QM0`ucNC_P-RAcj7 zJ_R=aC4c93f8854&pDT9Wi&F)mNYPWf$dw^lZ7SOuR>4~Cxzmy(732%Tv$>IOa?_J zDKts3$++Mot+oDNh)%|>qtiaRwvpK6e5{qLf^SyCVACgsdy&OH07zL@pDaivMH9A`gBw9yW=JdX|xa`o3!Ar zt+>NTAq^91V}vmigv~KO5ux8G^YbrzFX;ZJ_q>r~duH)(MJluSuas23?Oo5}zbZWP z!?9WX9@iHl&vDAw``&n;S?D!#|0be`1mPIVQThyuBjWvqMf|_0ltE_yT6^?~ zP|&KOX#8kcqb7t}!7mq{lyHAp(M3I2eZENX@1?Q|O%FwbR3Z8IT_&8Zrjp+Kq%FRd zYAj~rLr35%URL}KC0y1djg6LBFkTNem|7)yjk+jWy(D5o8KA@T@xH7rG38B*mg+Iu z5ek_vmo`|I6iAiGlHoZT4|=Fqs4wP5$>-9$6#e#%;ekR;Pt)&5H8)oKM8`%$OBGeA z_L(@hbx^fe6nX(ek@&1PZ&3M3ai&(5>gc9yXf`oV;ICd9HV*WOig-0n>HGUIEBt`X z;#68IW5?28@L?+A!?O7KzUr*d-R@nKc5Q!$<*h!IEid(%I%3wxxCb(;V_mg(v~l#cbORAJ zKU)-D^oydsDYtA*L4*>S%6onFqoQ(KTrDW+6v_QBRq6AUW7E`WPgU=W^U47|H=dfq z*H_nszVF%6pht>Q8@^s&omF&lcksi>jznv&t3K53{xWC=%JMsfX8bj8H&A=e8#x)d z=woJPcK?P?eUY=ee{(Rh)&1SU$aeQ{mB^aI`8~g8jJ$|VE*#D!ryn`z=rd={`oV!I z`uW>FkG$;b?B5-EB@zXP)03$CcOt=SzSJA6zZ(&6h!`oMjY2ht^Y?rT+W7na&h7qz zchJV4@4T@4_aoD0NrS^#zelgdeDFH`|7t7x$^Hgkw55NcV4CV*D4!<(7iy`=|J4@z zQ~j;2`6vH7?#^zq(72ns$wIyE?}mrRo#IUvs!tS5_P0K1F!|s548ml8>$3^N|Aty` zs(*Vorx}KYLK!&OzwvVq!$QFmPyH{Hi&Our&rS^gnb@G4p8IQ^ShT3)bzxpJ|WPis`h)f?v!$RYyRfdJePp}LN^}1*r9`1GAI4oSBgqiAZeM)9af9q2;Q~j+^ z*i8L5RO?&%SG$><{BM0b?9_kjlRaDd+k5h7s&IWOXlwrVo*bGyTzlKt(%*5nnp+C> zpGKPOckd~ssl(&vm$oL9`}nPU=lQ3tdDmy7w)D3?F}0A=xrnc=#an{>? z6MVhhCry*y);mRN+wdlTds`1l-P?UqtoOFw6vc1%p;&&q54qyoeF!1n)DIrAQs3^Aa@V)}CZT=1&s#$dHufHJ91IUX_Q=*?iYEodZ|j{><+uAL zg5HKFjU%??c&Ai+8{VYo+wi0<_@5ZyJGZeo~L{4&N+d-pRw7 z=AArgk@^nbY0$cDZ$?1d{QROA5N*So&CxbLOmw&Td9pU|-G%^_(>r-mSM^TbX@EBMp@8eYOU5Z!8sowSB^2z?jQS-_F?OHrdPW29Z#Hs&%w0NnU>YZBN$=-WR za&39Whfa8^fAwMa;eS()Jo!8H%Uk*vy636?g;sj%f2p(H(!17aPxUwT-BZ20{~$Cu zG*dRZ!Q=k^@v zyVZHm_p`<7%;`8`N5!dlT1V}sV_Xx-9s3J|Eyn^EI~jX~GpwVJmz0#Ibc0l%50#cH zubji9al9t3Xnlm!>u{~CcT#4PrPONY0Hvhrd^ip)J81|U?ohauH?wx!%gBRrW};H0 zNzR5)UoLeHFtn2$r}^XZO9yvvaP!<|PVI}XHtbV904@uyCOxb4)TK1=tF(MNRd6}3 z%YQ!IraEX@e^D|d@b8QxfYI3(DCR7oJB6jnxzGJX%k^s ztkUl)*>fF)InAltZs%LYvX#c!qvcZTE!>g_s@xHp{R?2gS%F_IKKcVH3eF6MN;-OH zs$i+9dmpJr?+F7oXme5LGX+s31#?5Rx+vzs(8HI52@uG*vG?}AeaXr{5!Dsx?#CJECejs$<5rwgtoMjt4SDtJ(Mx)hwiN|GT9 zm@4&lN3B0qZU~7(sJsRgQxzBt2$@tO=f+{!t%2vShjYYmo3X01+5Ovmr)zTJLe3~- z_8M>N-_(kB7|}e{*}W4E_1Be@213E0p^JCoc~@MX_>nLPP^w1g zA51rg&?9ah%(xAv&v7ob<>wFV96jdB*BnwP{PGxJ%{Cd#jm3uSVm2$^(bLY1GJDhy z42WW);>rTwB?TMb1VxwS?dy_4r@?bxN~^8#OY)?l(Dp@+H{!nG4F$6ks-_GSgtbas zF*eza+Xv;3niscEsJpLAs)r1EB@0ws5@t0UoS^Sq%>RA+g7#%O94zTpP2q8H`FwbRN8irZiajY%P%~ErWb8diznS`Kf_d-XY_cNml7$nV}0fS<7 z&d@0k>abafyXlWiQ=&i_wM%AIGJM%H!>3!IwUWP#FS>#=90>4fVY*3g#wTsm2#8kN z9D@#83SZ9e#{Si(i_Mpk6`O6<42jVaDOZhQNI*uDp!n*}8SS~n5sr4t1IX$~e7iHc z7?jeIhNwOmL_jrHR^PF1@7n2>xtdiq^ukO*;ncRO5KpExmiqfSX)wWNy3R6&-&iK1 zH2uh~Wa|owqoUyNH2ZRg`}6F30xa`_ghyNNs#nj~VsTT~*Mmeo;JWD;2U!3(VU{x! zUfxph62OeMF<*)PpAy%D!iWTcR)HBjGewloD$Oc@3MU94$jYh}*tCv&)uW^W;mjeiO0(ds6b11F?+D=v1)WR$1Yr3jPgs; z(D|G)**T*EMk~wImDMcM(cADKKL~S8YYG;PA15u=ToopgB|_)iQ5!Obs=aAEF0{zZ zjEg>2xy~84_PU!)Bd5pUnUuA*owm5aCT$iJ}>UP$`MLjlo6B*Sq;QeEr6jCYb_o7S#PiEK&In3e=qiO z>N7a7UN&S%ecJ3%AIgQJMnhrodh6tl;O?U3Hm@*IVNNbx%_or`uKs-uP3u&aGcpHGmteU^#+3zGZ>Rz&Zy?&+2VD%((Nv=^vVljoPUbsG10gu# zlkBk(lxsbw$K&u&W1mx?ljQ|Da$Q26V^ep27>vGAkGAv@>st0sBMV3aBzgxcZ<7UIxqaayRO5jjpxOXd zL(wTUfyH{55!;+G))p+9`KB7l3hV;mK@9>fiu;XjQAo_K1=$dCO}tLCPPMW^oJ_7g+An$50Y+vKzFuF@ zLfEz#{nqz~3)iQ7Jy$-UW)Az zNN|E`x3MWdUSsPXBz)k72WA=PrO|_!F$muMTC=b5sUZZNjF@(pF$nUJGX^~eQ;WRi zivhabPBuq}wH)5cr2>;*=^(ne%oN;;PZ-Qhth2!HQN9_8SU?Sv4|-drB)&Or9u7t+*ydZ)yg=p46-V@!Oa<+)59Rfj!8bH94!psbURVPuZ841K=(P{p%ZwocRJ(ll7Bd=M@W zi#67;yk=q_FZr~Xeq%Y5twjJPhbNInR<$iHc!Pf2N;+{ACODk3B@!4Sga_&55k=6RXJtTQk5D5nL!o0YOmR>t2Y?(`LO*^n{CYb1;ha# zFATaq$_6$l4Ln^_s=BGn6k}+me+nnGPxV>jCH^)sV=FGKlt~-a1C-zZf=P2Mg|-`d_Ocv@SxZA^WHYz8 ztiMg`G8bI&K$qoZ%&k>Rh=WeDzi}5Ar7NoYCJ`1Tnpw7-Hhrsu?J8he( z2JX70Z8?2P@|Xj6!~g^#3rih@2No^ zgR9dL-5pab_yW|&EK?Q?y1(3~oE^{UpDi+|ik2R2L!lmmt25$uUH!~tvcnmzf!p^+ zt7GgDwGB39U+a9bb7X6mqT(?GGH3+ABy2S0hQe1$eZ8S^IZo%S&d=Is*Q3^SbXBz( zVb@%wOXB-&*pH6e-M$ix?yhz6tigC`FziKgjMTmM*>3SYq5WOdwB9^GCcV>agkwp) zmwXmF9Pf72Z?!&D(t$+|qq35ZG8Ps#lL4K6tTUg8!|gIN%E&7&L9nb$Z1gpkz!h;K z_Vu`C_tLgwl*&}AM*@BE*x)D)f?OZZ&&~JF54hW%RZ3MqfT@2`fq%Uf|2ZOMhO8i+ z{}4BrF0yerPthK|heogt88i)n%%oq3GTwdc1!RdQ&PnxK_TP!(m!X>Jv=xqA|0_SNcpCf*6H?mrJTEoJV% zC~De&>Aj%)N8a;B{w@t|-$}!n-GAj%U*xRrzaETib^py^WV`!sOJsfg@Zb40W8@&t zAItH>QxE#l83(lHoImf}bBgDwKlXWKrmwS~8krS|;`m`tqVB(s1Z`j9_~9S&ZuuV~ zq7pGuLWc;|#}EIbPoa+fPyWvB{%3FM=-JmU?EaU?^DQak^x=2)IlL=`3(D~uR_hz# z;x@RqbCc@0Lzi>L?syyOV9LROwC`uH&w!pmJiB&A>I}`9j5GUY!_5?XUGXv1xD`lK z<}^@FgX1(KW*N-VH!XM59yhIQ(|k6KV$<3+tyedVd7~P0NX1Ar%|O$#GmSM_UZxFY z8bmU1WU9z;kPROrymb>jQ!#bgG;RCo-9HJ4!>m6o5dW{<3%Y;iJ#XZQo^knak;=II z_mb+*y~oDoe+Z5oGd3=#85c9>{7y`ha9EC$jupx5Um2nROv}br>^C>^(0@Q+dC*0~ z9hnq1;Y>RO#)d<{Y$8c8cTr}^OsFJc$d7q>dk}fkYOcGuu5qVkaC%6!6%#>tk+yU} zEX+Y63%6wUJ715wUl+Fr91@Gws1hleZY)!f5JI`DJD0@5flx)IYYV9>B}VPaHANo_ z_R|XB*Mrrb9gRLUlHod{{n$Vh4*7hET-jPGNa%NbA(B44ShaDxJpBi8e-S~59*n*f zJ=>>T=KN&5E%MwGe~0^bX-*DOX?fIi;)PdRo0nAGzlhN=X8*H{#{cqO(EZ=u^F~fE z60^_vq42cYIo-_e|M7G7X%ix6b*J$lJNwiLk*)6Zh&*LNWV^dvM4o^e>@fY3tGn;= zYsScL@NiP7!LjoWpL5ptPP}x%DHj)N@NS<+e$&@EOQw(fR-_7Y(37gWeWdv9NP!$Y zfgJ1*5x*2MQbNBYRFi`peG0UDkH2%fJ9!7%tyrjZVb4LbOX^qF^~(^a!DnS2=pO|1ImzOfh}@lQrwsb4ddBVpk;>S8u%z16du;4JBslViV`H~GNXDC;cKbm7 z*yI0wt7)xRk{5=<_BY$7bgpg1|5v-kuk0bP5!m20vB4^g1lA&$c#nx{3Y&_UAve0> zwI3gQ?_T%=EW%SkBjPdrp6u+-DSf=mzq`s~_w{P${hW74%FWli2E#3b)#qXwevZn+ zmFxSr-y2pv@obEHIiBp0h4GU;_V+R?diYx>SADouUO(&nw0(Go`}-T>=~O@I{DcSg zARj?NRx;?g2Je4yIHEs3=!AL^84qp+N%Y_)(LE2Mr;DBq6}y%dWxIn+COhPf*YN-c z!s&Sq|8O|KbY_)H1#W89f48enc7d$gb_q>rmGHkOio^Z^Y<2gD$de{Sw!0sX$Rp-|b@m@4 zFQ4#Z#>j1aDireagPCU^_M>lpbI$pfCbO}p&m&8HowLjIk!6u8$V*SE?k6L~9g$*; zynHGmR)`oWp_M{4dHJ+Yfuf)BcW!qtZzwwF$J{x6HZrY|H0aCWIvwr}!j(a|EC`nb z;dUUL3-tTF?)810*V4w3``Y_;`oFHP_RAyhajZR#u*V_wJc6Ew&GRsM9vsi(;c);w zj&~EjZM8q_xH>*Ak57t~KCw@nmIsIPXmITrJ1)D;>+@>g*n^a;repQhczL{Z`~ti< z@EHd=^N{A)QA~_eFYknX@+_`>btWr?n@WZ%c`9sGa#l$RlfOz9E4i#>w6>ntN^051 zaV6#KC7y$PSJKi#PPf2%4RT*7Z4z5-m?w{aB-KT75!-z7fCTm|Ike`uLLND2&JW~C zfL>CJAUCG=z1H*3)|YbO@jPjmp7!NwCjMG6_)hEXO~o3!m{x~{bQ(OTVRINRhgotM z9*1FZ7!Ze%a2NuIp>LS-hIwun=7!;I7}$mxZJNo3DQuX!rWtFPs&@B|`G&51uFSW6 zyccvo?>%qifu33Ug-B&q&M2wA=v~jsUlSgAY;0D(VODN_i=6?AbvadU<#GeWaMUJJ z*v9a!kTPuwv(0H?2;X$Wz`>pQ#Zk^{AsIfa{q>MuT|g2Rn`ntI38BL9+9@M)?#6J<0G1l5x?NTBwNUxIyN8id4a9z|9%hYQm#A(FzNYQlM z8E=x`FNFA&dM<2EU*?=>`)JvD#Igk_SXp0hYVR0gGlnZqhXDVD;887>kH%H=K3h`U z+w&U3F&8+z?tZ6OY)ENhV_jA`EASoFL6PLU74hUtu^kR5+);hFh$bw*8hKCbwxY%7*T zU^L^&)rFS?ybN``LGw9!g5Xx5Ja&Op5y;mTXs;_`8%zq}jmQ@Z2#u_<0eH;(+bNEA#) zPonO@k>JmLshNzK5%CuyMoQ>^3e`-;ET4j$wfvpiZF>hfdlHk;iA?Vr+dJ>X+gvai z$z~*@k*r2C8_90ecMYdUqQP5+(?3vp0Sd+=S&zJWqrQG)ko`yoBw3JtHYBg)$n8-u zBgu;71ibW zh@`KO-n)MLH(&M5zUeb#oNWm=;<+KKS(lo5xwgk?L~<|<6GIG?OiaVa;AZeK`6MgT za5IApO~ca+WosJFreSXK*nM-7{-OGOd-Y4{%UCQMI0_?~>4 ztt0NISLD>NJ~j6<6$6wzOgvD-1`$Q294g<*3=L(8`WU0!j`p!fDZs)ZdHOgcd{Q5i z)NEOCOPHj523e(d$|nVv^v-#t(pWn!df*Ae(x$VXS%r8=WS4}h;BbX$f6 z3ulPIGc9a?KkiZt{R=79nyM-{->oLx|B(Cstk(3NvDbb-8F>3b3c?*bd$c}4pBR_- z9g8)gO==y{a&_l!)Rk?kFJ9?BYLUIG%iEW}zo~=;+_kiiw_~=gZJ)$65$pA4=XAHg z9cJi1AhvS|HmrSK70ye^ym<`0y}F_x$=Wj8o-AH3-I%+$I<$RJ^_ikmTjr2aXwpD| z0SE2UEv->tbcv;=8Mf8cD0nT!qIF20J0X;JLtj-yRd02^7UCux>KkpzM;BFx1-gF& zBDTBV3KE1+9bQP#x4jp1zvDe`WVaAS-#tOrGqd|$pZX$ab-yFRIzNnvttn zxpC=HeREo>uxc4nNUY;u?bI)svVnY zELg*Zr98h8>67YvuwBV{CAWpWN zb|`D$GjYt<0mhqJ)`=ZE$gmap@yY>)m#osGi`ta;)~Fl95(XReuxn%AQ+D{*w=o7b z!W-hw>KEf)O{RD+Mle(f*Tt2$vC`g9?ZP%e*2}B8WyN()n1JXde8bGRRoaSbQ3zRv zMa~-i91iTrauM~@A-Z@q%zrG;PsI9f-CEuHK#Nr8nQHg4vTvw9=x|`0IO)+a)5QV3 zO1d+zTo(LtoWgjq`eDZ%pyVO1wNHz$UCWf#SPD;#+q+M+j2E}ZE8C-N&~4v0#I>a# zvn0hus6S(w=Fhu3+%vx%+=Q+E5*$Kdvy}39=NqxG?_{wEcH<+}MbQ%M!sZ)eCB91P z;7BYUn5PHXBz-ZyvyHH}knLbpo{EcXUETUf96es$!h4;C*(_|#+6vczFIa-tGbHEW z*n*|P;9&4nzNq?UDdn!}=)$^?a$o@-rG*Zm9p>gKb}wq5sr@0_AjG&c*bz*jzJvIp zYWJ}#fj=0FYjAgRy;7Z6L{fKKMkWJI(D-oJ-jR0y1e~vTKH2JC9ObEXK`HMN?*-kTd(Rtrzp`e3ppPrO z)Th}yPl%k=y(}Vknh@FQULKJUlcfOn~ z;Z?rPUqAcL=HFMwq~~+@>iG0PjLFMv60V6~4~>*R57%RIZA607b^gxnUhf_2%mrA$ z8{*580i*AZ@qgojyjqkuH5XXx5yv^feqONV2K)KJbl9D4xsw;H1$=uqPPIeF82>`g zSc~~4(>2y2zR)(-8prmS*0I(+hO6jrjQy*fKi4jvYd6kmHF3nW;1H+2oi{~#{o2R+ zUf$sR@dc29vG-Tiuz* zSy5eiyw$iyqlrt5n8m~e7mOm%Rjgu=8c5DXHG@- z%cQ>orc?51mF&`#Jl225MY0!i6KXCi z9{E>gppZp`^##KZKoEIWXNs^2kw>OsNkjy2IyR$ab}jX(2CCpICoB9C%d-GOZ@1-? z{4RX*bt&ujW<(X$=1QLGHCS+8!slOWFKZo=3Kfq zW-xVnWkeX3VV*=|X6ez&^CH}hJgc!k91{%n-(UxetnENm+>V1vq%H<*O{IcYWv%qr z+yWG)EcfG}6;7t@E7Clz?yx*}p(l7J>ac3l>+Jj>Yb1{^VY%u~hRAA*q}Yuht`al2 z?T~rwoiT}!MuaLQ<>O^ZIS1O6UZl&;Rc?Y2vH2pXW;MH$J578s2|}*nlq13-lp%#f z1R>++VUz{-qeI^?(zs9Ym}rBU_rNfD9@_wMSG<2Ho~KJn zPg!0>oj&eomI5#B)lLG?1nkc8oczXMtH>I_?8SlUs@&iRB%afsb-N`-r zHji4ttI7h%=?ku57aaDC*%>3f?gJU^LCX+qt*n#XZ*CN*Yd?kbf`kH;1iJ{puocg9 zonTRBn9eE;Q_(fZB}QOG#4;|??c9W2iU7}m*_D^C8aI#M>K4XD@Z@ur&}Rl z)7BZ7qd70}q4&ji#^6 zNeLDD9Xg2*sl4f;LMkMpc56*aF950Bwgt)>&j| z=?2L^iVNWUVf;ZN8cY^D4?E(h?i}F@fCo6fzh;Z{M&WHHYLsw!=Po<7Ne?R+9b+C% z2pGr^EMN#aECI+pR6=H&ZR;Ez#3@AxcAW}srp+saN6FGjgO;0gx4}~6I%FrnA09`r z52uR(sbo^pmVMlYds6{{hJz32`IY*5v>XYMRw32V2%g71-bn5@U=Z<(l5Ghf8_jeX zjB8zB+ogy`&ZysE_BSAXk16Ha61n2lmh)h&t94b^E_;| zJCk!v%6!5MMm)@=YJf6t41-~o0zYtnTgbL?+^U3~>1CNfY5xl3rnE=sBA&oiJcK49 z&hq4C_MCQ;UwI&P5WzR;#lu=nM<(q{HXYZIwuSYi<+z7hW77tTnif}q1&v{&h|gCM zqtGeh!YX1MUKVjyx`e;NSO<%-FPvVs{yuR&g21RSK`FLh3RU z0sFirg1HP?F7Gn5nIf^BE&Oc`TS;L%O*>|W*L6+mF{B@GbC^NUf{aS1G-&OvUAcYT zcICgacFX7!R|Ps$f*j;9ELY*XY+aVGjmJpI!=~Omldi*Q)YKGn0(Rv*^O|q*BW#M5YieQd*wmC#ywC{;^VftkIHgoZiR|s4HLjY5W83e~+w!_rOz`{7-PjC{r02Du<_~A2tl|jgx3$c-=8#GEg_vFyV^5Uak{73MSdHe&n3LZ3pDdgp z8^uFXU372*Yvt+2g1sfAFuFJN8>=t|vfOj2tqS6)}W??gUPwobpR(6M==FP-I@G$Hq{2IG$5f4hQfGj@X$Q(Jrw)R^WIerC3iu@unqggq>bW|=`QeI zXTO>5^0vN(8A94!4p*yHVrul5%-Zb?9ORS(v$tEmGo2>WX50ksR`9$Y1BwcM%Hd9f zuD;|se5R%x3^}cUrsDH?F7N!^fz3K~n+lBJb#OPaZ7nTe@V=qFwqxHYXBla?j;}%H=YHBJJ}5ZaS89Gq=dB$kFfOMp@O|V0eSY;I<&JnN1RS28xbaVA0^Ng)1WY9boSV$U@8Db$& zC;~V4SY#3*=3W|vls#7nYYBn+bwyXKk;J?V30&9TBC#|v33P@yFlI%>%@=Id+Nn(u zS(A7WFF~k1GPcXzJa>f=5eE^~X~WYf5fXS&pYDwHChYF;Dpue}Xg*K6%$mjTWzXgU zLKu=k7C1+}xe0dc=aQQvB1KBejL9I&!Ybw`&wHccR{K3^iq@ z94Oz%Ys}8MQ9f$7#aP6*C*$nLgOP|?dspF;Us{q3jeDZjM31nI%;ixD9cqSd%JMuz zzH$=Mfy3B+a-;U4#^nu_sGZG=KSa;tStt~lMVUT+E}j%$XdVmm6rw2b3n-EPe_jX~ zIwt+Wt{!`mqh)Iaxa7^Sn|CH3*PBbapDzc{buzg>qo20s!!izN!fJMxz(R}4Swclx zr_b~N9bun!vryJ$dLO*N_}&Zxp2L@>6JH`aS=M)U>&0NdNyg%iSg(R>+RS)j)QCnI z4=Ieo8<41u_auOjorO!-xS1^t6hiq*6uzw?&&2qS4f*eqwuL_=Z7J?#wAaYw{lXtn z)HJ_JP;U(16Y<$9VidkF;+!gC9R66u>FHASC!F;xo{#cw#=<_01B(5M4-IW9PBfAJ zPbn^5NNLj#ix;Vqma1H(@Mo%UF(s+{*bhXw6hhH*myt`Csz0YlQ~NLY+Z_IqR8#wC zma4zfMORT(OV#8uD(8!oWP1OXng?CFPZub62up(tW&CqDFkhs6nW~lhwLqojy!^tX zMahkIZmt{L;Gxdx(&YMpYXr3hyHqd8FICkGmBIq+7b&o}{5q8_RoQCAn&k#Ox8bp7 z<(DlWH$Tu9GL?Zfi=`^RS{o^Q?eOSYelY@6b6m|=BFEBv4PpVR$Ik%I zT+ygmhBALFQ#3MN!?g|9H!850jB*~KSAoN{K2_rLq=mT8xxrz7t>MWu{GkoczaeFR z5z>}ooEyYHQY{AY-&w66lct0C@5vWGULM3#7{sISrHbB*_m12_))gisg}7IAG9Xg{ zcCw0iF*r6X74rc5(3U!)zR|UEuHI01EMnOi3|3P(lfY?wW!U^}L_8=~05x1^2L2;= zh~pfX6ZI_=qKgDtO^Ny!9!mzqO@(o6X>)Y)uBaKm>iY@r*6v%+;o1Hv@u}X0i=)$` zPr^B2fD|SnV(B~{G%C3}l2cK1fjOYeu}v5N(6Ram{K_27Kxd>d1Jzs!|IDC=)03yHIPtR3xnAIip*yKiNK*s zPvaeyM7@*S1CCF|6*P9{$Dqs& z)@WFCeR5$OexgCjQ2wb6(tjXr3;#&kQv3wtvjz|97ygN&nsuTv{7l4|Rf|Uy{#nEa ztB7&<7ZC|wAAFfM$M`vCIgn^X7a(%_M6$gag#%m)(h_c1Np&}N8Y&~$*C0h(?mVp}W1DQ{o$U(MX9yv#o%^;;qrDXt305||gfHrU-5<(#ZB_ldU;T0$aiy)<@ zS0?Y5P{A>1AYWv5PSFH7w%S1v5;IV>L&1JIk3bJVQl@feGC}UC{FM_X5D%A9j`+L5 zF4`iO050PKXv?T5CVfw#l})sebgvK}zSE2)_}d~Z?W4`_dy&7U6MZ1toNdAJa}5+R>{``V^+5F z&6L2a^Frt7 z8b#+~Q6uPAdX&6p=@gC*Sw3{H?Fn|DuCgrxJNa5O>USmYm*+6gxWRzK9eNr6uFsn7 zW`A;$kg781M21B>pYrKbN;rnWqp|l68&$`WwuR$J4*@zIPf;D{bV5Bb3MbYR<8YD@ zQ@;C7&T_kkw2ipC?YgNCj=T4&!Nzw_rr384IU%=eNUl~mMKx^KkUq|M-z9?W8e+Np zt|6yVr0H`Sf1AU*Ni}`GKQp{X*QA?<{5nJD-2pJQvAaxPk>}mkF3dia>J{!$*)qhX zdIz1Vo%brBlS599=1ZnO7|@h=!mC8tDnO$a<*#S-h$xltqj^$#LQ5-ON$cX+>~DsBTZh3rY&e-k)VcFMSFo7iE$A07$cOzG8FlS z36PCWI}@CR;5ta3yNz*RcgCK&9u5-0T(=}RKY=^26Eo&i_zzqQ=C;8Q510~+PyyuG8cxaa~skZ6NG18 znbyp)1!JE5k^_$Qyf1fLt!Rno5oCQeAOfJnZo^XnWoFxhX$P%=vfSQtuD;n4S>R#i zPin8KSEQzxoqaBX_@^UQzUdLN{rqSoop*b5A_1Id5qZc+>WnVu=V9()@rgnYjY7ui zxi$*lPudnfKzfMn7(Pf*9sB-JJuwRB)f40JVG$Rm?E52}<;3Qe+Xvn~aqQ4RW3Ro@ z*!TGq*NM%!O5p-k$lF#g6v4!1%jL!9izw1;>&f5da51T7TieWViLOb-=00$5cXD?3 zX2JGvZy!yCUpXwNe6zA4lCTaJXIQQTdbOeCQfgjMb^B%>rf@6g z^c0=Q44u#9BJpwg@gvVkGmHYvJP(_@?@#9bv$ER%xE?v*+z>8R_cDwwv+nIh%B~)y z)e`7ORf~^sh1I%}wEPizlg%VheT1{)zCgCohhZZ`tfKk@ZE7DDCM&#$xyvkL@{mX5 zBV~4y!q`AOnIy~~4j9TJIm_s(=j{@v(_&3enCT=6Bldc4;J4DmSUU>6@wnlKgbMhp zEQw0Xi7$Q}784CC+!!5O=mVw^-A{w?*SI;pyD)_<>aN2`sdzGg)JZV`?zniugnJX& zBz+pg=4;UqO~E;a5fcKB97zDYq|J8J{P@(U2W(Yw>yy~H5}cLR0^y^LuhD++3)R?T z5?Ta9GAm zBSlR&R}mXSB;tT7ViaN#<0@ht3L;+4{%t=)15P;US^Oa`*M_N1zIM>KzT;c3xT&vW zs#jB7{98(!mKA@bN}M<3Dus`!!jCBtrg|3Uzuyz#ClHF3`zg5;Q@w^FjiQh9w>k7D z)hN1pX1G?@{G6JCRM+!_^UQ70C$V-!8;u)-z)Hmylw1Qn%;a=zab*rz%pgVPXJ>AT z9*HjLRx89AmuQ%cOhub@VSrwU?>5k0=sHq@sYzRkzs$Y%^{T~d-(a3e0*lyCamgA{iGN3WI^F}))LlhaHI1W`b6W)nRyYOV}Lt1 zMZ=TcLY}ZQ0Oe_C`svBALO;Ua2PUPZgua9MmzG8{QY;s~1@42t(22}5%u>-~(Z}M` zG3LkongLVl(FiZ#ZP9_J6;F9aAvEb7c+*eXI}9dm3qwd-ipSCZrpBrv)-T*dQB$uf zVq>^j#Ph3&Q5Y&>k1ApunngT?5&Atw=q;S|ERKS=*a#gs^tK^)-`6tq#^JvA&#e>} z$57hz{^Gr=#Cb!mQn*bOS}93U|Jy~-9;8LfjVG6m&^sv7r_xQD!(F8MRJYCyck7zT z)YJ%lizghPIqyjHmzz!sz^8kS-i08ZisHUWjiO%|ygq(2_#^{P95q2YcLAN{Ccok^ zoP9(yX0g@ou)SYEKI*}bnHFR07E(tnb~of*TT$xKf?CXHz|E+9_0$#v;hIw2F9c6)_G)5#ODPUE?@QBjtKcoz!&0kT7KOggz#7 zji-!AxnGnc<#NU0J{4J{oZK@e=xlA#{$!DI6Gh~!PU3HKxSy1-YSylx5{s0ZV5Y8W zuZ-W`CnGg;Y39g1T>+5_i9DD;Mnyrc_+g~#vG11Y`(#!W(7y(nAvphuJ#aj16F3k@(GT(n5o=Ij4m{0vSKD8V#ccX zsc^|KZ`ImRy~{uz{@iSy;5F*^8|FusH*$5$q0a1F6`zaa>vp89=;aQIR_t9VJ4!8G zc-&3a;A672*|2iLgAuSI?9*bGmdhFW zRQZu<9~}(oY{RIK6h+~TlCFny!5+DMu}7d{)%z>CN~a$)mp8l)xRltay?Bw6oR#{$ z@f`HSuClin!Sh|o8N%kyQ;txmJ;?{{QuOyO%pbIXkVi%8KVy4Lzm;$^otSQiT~2Xv(}5;rqgaSU#FNX zbz>lI@_;3@fzE~krc1g}biR6ba+DIB4!ut8pH@$d!t{D#9G);@y8g`ItY>i( zv&ki~kyEb-(fz$9KYWEPK~GX#e2vnkUlup366XyAE0w}bRoF@ieu8E#n5fn( zIr8c9gS;P3p(dx)?Pp)nwjpq+q=Kpr&j1(p-bb5F!zN~uP}yfP^V z_#&_4FT$xm9YmBnd#8~?^vYzoEst2ATrN#ekK(S}k`hBnbrE@|_;s|7CvebYUYcr@ z@&pOyIt`2TxfvAC+vi?P+7_0OwiN$>PhG=-`h|9iYKV=Yqn;RrrS-%(EHh%tmX>qY z^Uf1sHe*`HkDf5%zR_U?B;!$I$22$HdP6+vrosJA;cxr@VU5%n`2t@qtKeDc^4lSYR%koZ9i<76+Y_+1oF46XOJP8c!1 zTxTuxq7`DpZSZHbP8c;gyhK6D>((jHTv^ZG=CFZObLEbiVWSGpx|1GkXzd+3Ih*s; z?C`Qxd&R09GxnSzLxv3cHUC-CM89vMq($8E3tadAg*&>yoEf4rTa-!8;ZeV-_+AgbhuwLCwiS_C+;#tgpX0@lz8hp9VQcAN58L>w6@l#LtrveabJQNNno+tGT z_u+|tN9>pMNG?3ZMfxsXd}l0VK*^LZ%T&>sQg;MlEg#j!L!DU%R9Vz~BF(tJnQ z*`9WPFf7!iPI9RSe* zPI?x*a6h#v9}O6FTgy#13~U?izi{&=#l?McPW?e~zbbLwkgF8FpbDR-L;#URFo+4u6R?CEku&Y`HthtiT! zl=Hf>+o1CdU}cJRIU8_+$#rPd=*-gMV6Nl2ZmK@0;bnIUm62hf5jId*+IFUJXK@TQHW}Bvv3QwYBCHJd_TCndCkw59k~ZlZldwU}t%v|4YG zrW5U3jkLPV4TCL{tUWJp@5O;BbDOT+^Q6K{upK=mp80$7J4 zUgs%@S#k_w4H70?4uJ;w#*I=dQ;`U+M=1xg9D>s_tK)*vGB#V*4KP{5F$t+$x@0b5Rcp`NgTQ3!|pVTMV0^HJl=@k$OH);Z0DsVcpxyYUx&m zhr`)xxR+H!D+z3w{f(<}_7c6$I%T4e@5kXsB~^cbJ!DSJ^;Ruk)( z{-&vKk)7^kZEY>181W?70G|^GERlDI>}tFcolh$_C)n(HFS>s^OB+VcbN<_^x*=0Z1uNY*U<~@3M=6wtxQUMRC)-Xk7w-T z;ge0rLcuc0W3lUij&Mgt(+Ud+=V?%=z-!U@gffz$S6|mBE^U*49PS}xxgXyc+mPR{ zgO1m}+wysyCPzB_cbOUV?5Bjlvq$$B6Axx= 0.9) + assert item['valid@0.9'] / item['total_frames'] >= 0.2 + + dataset = PoseDataset( + ann_file=ann_file, + pipeline=[], + valid_ratio=0.3, + box_thre='0.7', + data_prefix=data_prefix) + assert len(dataset) == 87 + for item in dataset: + assert item['filename'].startswith(data_prefix) + assert np.all(item['box_score'][item['anno_inds']] >= 0.7) + assert item['valid@0.7'] / item['total_frames'] >= 0.3 + + with pytest.raises(AssertionError): + dataset = PoseDataset( + ann_file=ann_file, + pipeline=[], + valid_ratio=0.2, + box_thre='0.55', + data_prefix=data_prefix) From c0477d36aa67421123ace37d67eb0f6803350fb6 Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 1 May 2021 16:14:12 +0800 Subject: [PATCH 088/414] fix bug --- tests/test_data/test_datasets/test_pose_dataset.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_data/test_datasets/test_pose_dataset.py b/tests/test_data/test_datasets/test_pose_dataset.py index 419289698b..f739ef9cd0 100644 --- a/tests/test_data/test_datasets/test_pose_dataset.py +++ b/tests/test_data/test_datasets/test_pose_dataset.py @@ -1,10 +1,11 @@ -import numpy as np -import pytest +import numpy as np # isort: skip +import pytest # isort: skip -from mmaction.datasets import PoseDataset +from mmaction.datasets import PoseDataset # isort: skip +from .base import BaseTestDataset # isort: skip -def TestPoseDataset(BaseTestDataset): +class TestPoseDataset(BaseTestDataset): def test_pose_dataset(self): ann_file = self.pose_ann_file From fa48dee0e36b3886c366fab204cc17fcdea75215 Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 1 May 2021 16:14:34 +0800 Subject: [PATCH 089/414] remove isort skip --- tests/test_data/test_datasets/test_pose_dataset.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_data/test_datasets/test_pose_dataset.py b/tests/test_data/test_datasets/test_pose_dataset.py index f739ef9cd0..a1ac573851 100644 --- a/tests/test_data/test_datasets/test_pose_dataset.py +++ b/tests/test_data/test_datasets/test_pose_dataset.py @@ -1,8 +1,8 @@ -import numpy as np # isort: skip -import pytest # isort: skip +import numpy as np +import pytest -from mmaction.datasets import PoseDataset # isort: skip -from .base import BaseTestDataset # isort: skip +from mmaction.datasets import PoseDataset +from .base import BaseTestDataset class TestPoseDataset(BaseTestDataset): From 17824f33d788f8bbf772c5a03334257faeb45563 Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 1 May 2021 22:12:42 +0800 Subject: [PATCH 090/414] resolve comments --- mmaction/datasets/pipelines/augmentations.py | 15 ++++++------ mmaction/datasets/pipelines/pose_loading.py | 14 +++++------ mmaction/datasets/pose_dataset.py | 24 +++++++++---------- .../test_loadings/test_pose_loading.py | 4 ++-- 4 files changed, 28 insertions(+), 29 deletions(-) diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 6195c95d98..821b14f4f1 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -1211,9 +1211,9 @@ class Flip: flip_label_map (Dict[int, int] | None): Transform the label of the flipped image with the specific label. Default: None. left (list[int]): Indexes of left keypoints, used to flip keypoints. - Default: [1, 3, 5, 7, 9, 11, 13, 15]. (COCO-17P keypoints) + Default: None. right (list[ind]): Indexes of right keypoints, used to flip keypoints. - Default: [2, 4, 6, 8, 10, 12, 14, 16]. (COCO-17P keypoints) + Default: None. lazy (bool): Determine whether to apply lazy operation. Default: False. """ _directions = ['horizontal', 'vertical'] @@ -1222,8 +1222,8 @@ def __init__(self, flip_ratio=0.5, direction='horizontal', flip_label_map=None, - left=[1, 3, 5, 7, 9, 11, 13, 15], - right=[2, 4, 6, 8, 10, 12, 14, 16], + left=None, + right=None, lazy=False): if direction not in self._directions: raise ValueError(f'Direction {direction} is not supported. ' @@ -1248,9 +1248,10 @@ def _flip_kps(self, kps, kpscores, img_width): kp_x = kps[..., 0] kp_x[kp_x != 0] = img_width - kp_x[kp_x != 0] new_order = list(range(kps.shape[2])) - for left, right in zip(self.left, self.right): - new_order[left] = right - new_order[right] = left + if self.left is not None and self.right is not None: + for left, right in zip(self.left, self.right): + new_order[left] = right + new_order[right] = left kps = kps[:, :, new_order] if kpscores is not None: kpscores = kpscores[:, :, new_order] diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index aa7e95b012..8ae48ee52e 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -146,7 +146,7 @@ class PoseDecode(object): Default: 1. drop_prob (float): The probability for dropping one keypoint for each frame. Default: 1 / 16. - manipulate_joints (tuple[int]): The joint indexes that may be dropped. + droppable_joints (tuple[int]): The joint indexes that may be dropped. Default: (7, 8, 9, 10, 13, 14, 15, 16). (limb joints) """ @@ -154,11 +154,11 @@ def __init__(self, random_drop=False, random_seed=1, drop_prob=1. / 16., - manipulate_joints=(7, 8, 9, 10, 13, 14, 15, 16)): + droppable_joints=(7, 8, 9, 10, 13, 14, 15, 16)): self.random_drop = random_drop self.random_seed = random_seed self.drop_prob = drop_prob - self.manipulate_joints = manipulate_joints + self.droppable_joints = droppable_joints # inplace def _drop_kpscore(self, kpscores): @@ -173,7 +173,7 @@ def _drop_kpscore(self, kpscores): lt = kpscore.shape[0] for tidx in range(lt): if np.random.random() < self.drop_prob: - jidx = np.random.choice(self.manipulate_joints) + jidx = np.random.choice(self.droppable_joints) kpscore[tidx, jidx] = 0. def _load_kp(self, kp, frame_inds): @@ -227,7 +227,7 @@ def __repr__(self): f'random_drop={self.random_drop}, ' f'random_seed={self.random_seed}, ' f'drop_prob={self.drop_prob}, ' - f'manipulate_joints={self.manipulate_joints})') + f'droppable_joints={self.droppable_joints})') return repr_str @@ -253,8 +253,6 @@ class LoadKineticsPose: kwargs (dict, optional): Arguments for FileClient. """ - # squeeze (Remove those frames that w/o. keypoints) - # kp2keep (The list of keypoint ids to keep) def __init__(self, io_backend='disk', squeeze=True, @@ -266,7 +264,7 @@ def __init__(self, self.io_backend = io_backend self.squeeze = squeeze self.max_person = max_person - self.keypoint_weight = keypoint_weight + self.keypoint_weight = cp.deepcopy(keypoint_weight) self.source = source if source == 'openpose': diff --git a/mmaction/datasets/pose_dataset.py b/mmaction/datasets/pose_dataset.py index af505862ca..a3c12dfc05 100644 --- a/mmaction/datasets/pose_dataset.py +++ b/mmaction/datasets/pose_dataset.py @@ -26,8 +26,8 @@ class PoseDataset(BaseDataset): For a video with n frames, it is a valid training sample only if n * valid_ratio frames have human pose. None means not applicable (only applicable to Kinetics Pose). Default: None. - box_thre (str | None): The threshold for human proposals. Only boxes - with confidence score larger than `box_thre` is kept. None means + box_thr (str | None): The threshold for human proposals. Only boxes + with confidence score larger than `box_thr` is kept. None means not applicable (only applicable to Kinetics Pose [ours]). Allowed choices are '0.5', '0.6', '0.7', '0.8', '0.9'. Default: None. class_prob (dict | None): The per class sampling probability. If not @@ -40,7 +40,7 @@ def __init__(self, ann_file, pipeline, valid_ratio=None, - box_thre=None, + box_thr=None, class_prob=None, **kwargs): modality = 'Pose' @@ -48,32 +48,32 @@ def __init__(self, super().__init__( ann_file, pipeline, start_index=0, modality=modality, **kwargs) - # box_thre, which should be a string - self.box_thre = box_thre - if self.box_thre is not None: - assert box_thre in ['0.5', '0.6', '0.7', '0.8', '0.9'] + # box_thr, which should be a string + self.box_thr = box_thr + if self.box_thr is not None: + assert box_thr in ['0.5', '0.6', '0.7', '0.8', '0.9'] # Thresholding Training Examples self.valid_ratio = valid_ratio if self.valid_ratio is not None: assert isinstance(self.valid_ratio, float) - if self.box_thre is None: + if self.box_thr is None: self.video_infos = self.video_infos = [ x for x in self.video_infos if x['valid_frames'] / x['total_frames'] >= valid_ratio ] else: - key = f'valid@{self.box_thre}' + key = f'valid@{self.box_thr}' self.video_infos = [ x for x in self.video_infos if x[key] / x['total_frames'] >= valid_ratio ] - if self.box_thre != '0.5': - box_thre = float(self.box_thre) + if self.box_thr != '0.5': + box_thr = float(self.box_thr) for item in self.video_infos: inds = [ i for i, score in enumerate(item['box_score']) - if score >= box_thre + if score >= box_thr ] item['anno_inds'] = np.array(inds) diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 6b246039b0..6a981e14b9 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -77,7 +77,7 @@ def test_pose_decode(self): assert str(pose_decode) == ('PoseDecode(random_drop=False, ' 'random_seed=1, ' 'drop_prob=0.0625, ' - 'manipulate_joints=(7, 8, 9, 10, ' + 'droppable_joints=(7, 8, 9, 10, ' '13, 14, 15, 16))') decode_results = pose_decode(results) assert_array_almost_equal(decode_results['kp'], kp[:, frame_inds]) @@ -92,7 +92,7 @@ def test_pose_decode(self): results = dict(kp=kp, kpscore=kpscore, frame_inds=frame_inds) pose_decode = PoseDecode( - random_drop=True, drop_prob=1, manipulate_joints=(7, )) + random_drop=True, drop_prob=1, droppable_joints=(7, )) decode_results = pose_decode(results) assert_array_almost_equal(decode_results['kpscore'][..., 7], 0) From 5942ea2246915ed75ddba3a52d5faf9f63afcaaf Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 1 May 2021 22:33:36 +0800 Subject: [PATCH 091/414] rename kp as keypoint --- demo/visualize_heatmap_volume.ipynb | 2 +- mmaction/datasets/pipelines/augmentations.py | 85 ++++++++++--------- mmaction/datasets/pipelines/pose_loading.py | 39 +++++---- .../test_augmentations/test_crop.py | 6 +- .../test_augmentations/test_flip.py | 4 +- .../test_augmentations/test_transform.py | 4 +- .../test_loadings/test_pose_loading.py | 51 +++++------ 7 files changed, 100 insertions(+), 91 deletions(-) diff --git a/demo/visualize_heatmap_volume.ipynb b/demo/visualize_heatmap_volume.ipynb index cb7c33a330..87f26a7d2b 100644 --- a/demo/visualize_heatmap_volume.ipynb +++ b/demo/visualize_heatmap_volume.ipynb @@ -94,7 +94,7 @@ " \n", " assert len(frames) == anno['total_frames']\n", " # The shape is N x T x K x 3\n", - " kps = np.concatenate([anno['kp'], anno['kpscore'][..., None]], axis=-1)\n", + " kps = np.concatenate([anno['keypoint'], anno['keypoint_score'][..., None]], axis=-1)\n", " kps[..., :2] *= ratio\n", " # Convert to T x N x K x 3\n", " kps = kps.transpose([1, 0, 2, 3])\n", diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 821b14f4f1..69fed07bdc 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -94,7 +94,7 @@ def __init__(self, def __call__(self, results): img_shape = results['img_shape'] h, w = img_shape - kp = results['kp'] + kp = results['keypoint'] # Make NaN zero kp[np.isnan(kp)] = 0. @@ -535,9 +535,10 @@ def __repr__(self): class RandomCrop: """Vanilla square random crop that specifics the output size. - Required keys in results are "img_shape", "kp" (optional), "imgs" - (optional), added or modified keys are "kp", "imgs", "lazy"; Required keys - in "lazy" are "flip", "crop_bbox", added or modified key is "crop_bbox". + Required keys in results are "img_shape", "keypoint" (optional), "imgs" + (optional), added or modified keys are "keypoint", "imgs", "lazy"; Required + keys in "lazy" are "flip", "crop_bbox", added or modified key is + "crop_bbox". Args: size (int): The output size of the images. @@ -596,7 +597,7 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, self.lazy) - if 'kp' in results: + if 'keypoint' in results: assert not self.lazy, ('Keypoint Augmentations are not compatible ' 'with lazy == True') @@ -638,8 +639,9 @@ def __call__(self, results): results['img_shape'] = (new_h, new_w) if not self.lazy: - if 'kp' in results: - results['kp'] = self._crop_kps(results['kp'], crop_bbox) + if 'keypoint' in results: + results['keypoint'] = self._crop_kps(results['keypoint'], + crop_bbox) if 'imgs' in results: results['imgs'] = self._crop_imgs(results['imgs'], crop_bbox) else: @@ -677,9 +679,9 @@ class RandomResizedCrop(RandomCrop): """Random crop that specifics the area and height-weight ratio range. Required keys in results are "img_shape", "crop_bbox", "imgs" (optional), - "kp" (optional), added or modified keys are "imgs", "kp", "crop_bbox" and - "lazy"; Required keys in "lazy" are "flip", "crop_bbox", added or modified - key is "crop_bbox". + "keypoint" (optional), added or modified keys are "imgs", "keypoint", + "crop_bbox" and "lazy"; Required keys in "lazy" are "flip", "crop_bbox", + added or modified key is "crop_bbox". Args: area_range (Tuple[float]): The candidate area scales range of @@ -762,7 +764,7 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, self.lazy) - if 'kp' in results: + if 'keypoint' in results: assert not self.lazy, ('Keypoint Augmentations are not compatible ' 'with lazy == True') @@ -796,8 +798,9 @@ def __call__(self, results): results['img_shape'] = (new_h, new_w) if not self.lazy: - if 'kp' in results: - results['kp'] = self._crop_kps(results['kp'], crop_bbox) + if 'keypoint' in results: + results['keypoint'] = self._crop_kps(results['keypoint'], + crop_bbox) if 'imgs' in results: results['imgs'] = self._crop_imgs(results['imgs'], crop_bbox) else: @@ -840,9 +843,9 @@ class MultiScaleCrop(RandomCrop): level of w and h is controlled to be smaller than a certain value to prevent too large or small aspect ratio. - Required keys are "img_shape", "imgs" (optional), "kp" (optional), added or - modified keys are "imgs", "crop_bbox", "img_shape", "lazy" and "scales". - Required keys in "lazy" are "crop_bbox", added or modified key is + Required keys are "img_shape", "imgs" (optional), "keypoint" (optional), + added or modified keys are "imgs", "crop_bbox", "img_shape", "lazy" and + "scales". Required keys in "lazy" are "crop_bbox", added or modified key is "crop_bbox". Args: @@ -896,7 +899,7 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, self.lazy) - if 'kp' in results: + if 'keypoint' in results: assert not self.lazy, ('Keypoint Augmentations are not compatible ' 'with lazy == True') @@ -972,8 +975,9 @@ def __call__(self, results): new_crop_quadruple, dtype=np.float32) if not self.lazy: - if 'kp' in results: - results['kp'] = self._crop_kps(results['kp'], crop_bbox) + if 'keypoint' in results: + results['keypoint'] = self._crop_kps(results['keypoint'], + crop_bbox) if 'imgs' in results: results['imgs'] = self._crop_imgs(results['imgs'], crop_bbox) else: @@ -1013,7 +1017,7 @@ def __repr__(self): class Resize: """Resize images to a specific size. - Required keys are "img_shape", "modality", "imgs" (optional), "kp" + Required keys are "img_shape", "modality", "imgs" (optional), "keypoint" (optional), added or modified keys are "imgs", "img_shape", "keep_ratio", "scale_factor", "lazy", "resize_size". Required keys in "lazy" is None, added or modified key is "interpolation". @@ -1085,7 +1089,7 @@ def __call__(self, results): """ _init_lazy_if_proper(results, self.lazy) - if 'kp' in results: + if 'keypoint' in results: assert not self.lazy, ('Keypoint Augmentations are not compatible ' 'with lazy == True') @@ -1109,9 +1113,9 @@ def __call__(self, results): if 'imgs' in results: results['imgs'] = self._resize_imgs(results['imgs'], new_w, new_h) - if 'kp' in results: - results['kp'] = self._resize_kps(results['kp'], - self.scale_factor) + if 'keypoint' in results: + results['keypoint'] = self._resize_kps(results['keypoint'], + self.scale_factor) else: lazyop = results['lazy'] if lazyop['flip']: @@ -1197,8 +1201,8 @@ class Flip: Reverse the order of elements in the given imgs with a specific direction. The shape of the imgs is preserved, but the elements are reordered. - Required keys are "img_shape", "modality", "imgs" (optional), "kp" - (optional), added or modified keys are "imgs", "kp", "lazy" and + Required keys are "img_shape", "modality", "imgs" (optional), "keypoint" + (optional), added or modified keys are "imgs", "keypoint", "lazy" and "flip_direction". Required keys in "lazy" is None, added or modified key are "flip" and "flip_direction". The Flip augmentation should be placed after any cropping / reshaping augmentations, to make sure crop_quadruple @@ -1277,7 +1281,7 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, self.lazy) - if 'kp' in results: + if 'keypoint' in results: assert not self.lazy, ('Keypoint Augmentations are not compatible ' 'with lazy == True') assert self.direction == 'horizontal', ( @@ -1303,13 +1307,13 @@ def __call__(self, results): if 'imgs' in results: results['imgs'] = self._flip_imgs(results['imgs'], modality) - if 'kp' in results: - kp = results['kp'] - kpscore = results.get('kpscore', None) + if 'keypoint' in results: + kp = results['keypoint'] + kpscore = results.get('keypoint_score', None) kp, kpscore = self._flip_kps(kp, kpscore, img_width) - results['kp'] = kp - if 'kpscore' in results: - results['kpscore'] = kpscore + results['keypoint'] = kp + if 'keypoint_score' in results: + results['keypoint_score'] = kpscore else: lazyop = results['lazy'] if lazyop['flip']: @@ -1608,10 +1612,10 @@ def __repr__(self): class CenterCrop(RandomCrop): """Crop the center area from images. - Required keys are "img_shape", "imgs" (optional), "kp" (optional), added or - modified keys are "imgs", "kp", "crop_bbox", "lazy" and "img_shape". - Required keys in "lazy" is "crop_bbox", added or modified key is - "crop_bbox". + Required keys are "img_shape", "imgs" (optional), "keypoint" (optional), + added or modified keys are "imgs", "keypoint", "crop_bbox", "lazy" and + "img_shape". Required keys in "lazy" is "crop_bbox", added or modified key + is "crop_bbox". Args: crop_size (int | tuple[int]): (w, h) of crop size. @@ -1633,7 +1637,7 @@ def __call__(self, results): to the next transform in pipeline. """ _init_lazy_if_proper(results, self.lazy) - if 'kp' in results: + if 'keypoint' in results: assert not self.lazy, ('Keypoint Augmentations are not compatible ' 'with lazy == True') @@ -1670,8 +1674,9 @@ def __call__(self, results): new_crop_quadruple, dtype=np.float32) if not self.lazy: - if 'kp' in results: - results['kp'] = self._crop_kps(results['kp'], crop_bbox) + if 'keypoint' in results: + results['keypoint'] = self._crop_kps(results['keypoint'], + crop_bbox) if 'imgs' in results: results['imgs'] = self._crop_imgs(results['imgs'], crop_bbox) else: diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 8ae48ee52e..d7311db143 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -135,13 +135,14 @@ def __repr__(self): class PoseDecode(object): """Load and decode pose with given indices. - Required keys are "kp", "frame_inds" (optional), "kpscore" (optional), - added or modified keys are "kp", "kpscore" (if applicable). + Required keys are "keypoint", "frame_inds" (optional), "keypoint_score" + (optional), added or modified keys are "keypoint", "keypoint_score" + (if applicable). Args: random_drop (bool): Whether to randomly drop keypoints. The following args are applicable only when `random_crop == True`. When set as - True, "kpscore" is a mandatory key. Default: False. + True, "keypoint_score" is a mandatory key. Default: False. random_seed (int): Random seed used for randomly dropping keypoints. Default: 1. drop_prob (float): The probability for dropping one keypoint for each @@ -199,7 +200,7 @@ def _load_kpscore(self, kpscore, frame_inds): def __call__(self, results): if self.random_drop: np.random.seed(self.random_seed) - assert 'kpscore' in results, 'for simplicity' + assert 'keypoint_score' in results, 'for simplicity' if 'frame_inds' not in results: results['frame_inds'] = np.arange(results['total_frames']) @@ -210,15 +211,17 @@ def __call__(self, results): offset = results.get('offset', 0) frame_inds = results['frame_inds'] + offset - if 'kpscore' in results: - kpscore = results['kpscore'] + if 'keypoint_score' in results: + kpscore = results['keypoint_score'] if self.random_drop: self._drop_kpscore(kpscore) - results['kpscore'] = kpscore[:, frame_inds].astype(np.float32) + results['keypoint_score'] = kpscore[:, + frame_inds].astype(np.float32) - if 'kp' in results: - results['kp'] = results['kp'][:, frame_inds].astype(np.float32) + if 'keypoint' in results: + results['keypoint'] = results['keypoint'][:, frame_inds].astype( + np.float32) return results @@ -236,8 +239,8 @@ class LoadKineticsPose: """Load Kinetics Pose given filename (The format should be pickle) Required keys are "filename", "total_frames", "img_shape", "frame_inds", - "anno_inds" (for mmpose source, optional), added or modified keys are "kp", - "kpscore". + "anno_inds" (for mmpose source, optional), added or modified keys are + "keypoint", "keypoint_score". Args: io_backend (str): IO backend where frames are stored. Default: 'disk'. @@ -364,8 +367,8 @@ def mapinds(inds): new_kp[:np_frame, i] = new_kp[inds, i] results['num_person'] = self.max_person - results['kp'] = new_kp[:self.max_person] - results['kpscore'] = new_kpscore[:self.max_person] + results['keypoint'] = new_kp[:self.max_person] + results['keypoint_score'] = new_kpscore[:self.max_person] return results def __repr__(self): @@ -383,8 +386,8 @@ def __repr__(self): class GeneratePoseTarget: """Generate pseudo heatmaps based on joint coordinates and confidence. - Required keys are "kp", "img_shape", "kpscore" (optional), added or - modified keys are "imgs", + Required keys are "keypoint", "img_shape", "keypoint_score" (optional), + added or modified keys are "imgs". Args: sigma (float): The sigma of the generated gaussian map. Default: 0.6. @@ -613,11 +616,11 @@ def gen_an_aug(self, results): list[np.ndarray]: The generated pseudo heatmaps. """ - all_kps = results['kp'] + all_kps = results['keypoint'] kp_shape = all_kps.shape - if 'kpscore' in results: - all_kpscores = results['kpscore'] + if 'keypoint_score' in results: + all_kpscores = results['keypoint_score'] else: all_kpscores = np.ones(kp_shape[:-1], dtype=np.float32) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_crop.py b/tests/test_data/test_pipelines/test_augmentations/test_crop.py index d63d902f50..d9365fd03a 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_crop.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_crop.py @@ -213,20 +213,20 @@ def test_center_crop(self): imgs = list(np.random.rand(2, 240, 320, 3)) results = dict(imgs=imgs) kp = np.array([[160, 120], [160, 120]]).reshape([1, 1, 2, 2]) - results['kp'] = kp + results['keypoint'] = kp results['gt_bboxes'] = np.array([[0, 0, 320, 240]]) results['proposals'] = np.array([[0, 0, 320, 240]]) center_crop = CenterCrop(crop_size=224) center_crop_results = center_crop(results) - target_keys = ['imgs', 'crop_bbox', 'img_shape', 'kp'] + target_keys = ['imgs', 'crop_bbox', 'img_shape', 'keypoint'] assert assert_dict_has_keys(center_crop_results, target_keys) assert check_crop(imgs, center_crop_results['imgs'], center_crop_results['crop_bbox']) assert np.all( center_crop_results['crop_bbox'] == np.array([48, 8, 272, 232])) assert center_crop_results['img_shape'] == (224, 224) - assert np.all(center_crop_results['kp'] == 112) + assert np.all(center_crop_results['keypoint'] == 112) assert repr(center_crop) == (f'{center_crop.__class__.__name__}' f'(crop_size={(224, 224)}, lazy={False})') diff --git a/tests/test_data/test_pipelines/test_augmentations/test_flip.py b/tests/test_data/test_pipelines/test_augmentations/test_flip.py index c1cda273cc..c2f3ab892b 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_flip.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_flip.py @@ -110,7 +110,7 @@ def test_flip(self): img_shape=(64, 64)) flip = Flip(flip_ratio=1, direction='horizontal', left=[0], right=[1]) flip_results = flip(results) - assert_array_almost_equal(flip_results['kp'][0, 0], + assert_array_almost_equal(flip_results['keypoint'][0, 0], np.array([[1, 63], [63, 1]])) results = dict( @@ -119,7 +119,7 @@ def test_flip(self): img_shape=(64, 64)) flip = Flip(flip_ratio=1, direction='horizontal', left=[], right=[]) flip_results = flip(results) - assert_array_almost_equal(flip_results['kp'][0, 0], + assert_array_almost_equal(flip_results['keypoint'][0, 0], np.array([[63, 1], [1, 63]])) with pytest.raises(AssertionError): diff --git a/tests/test_data/test_pipelines/test_augmentations/test_transform.py b/tests/test_data/test_pipelines/test_augmentations/test_transform.py index 9b94a922c2..59fd054e31 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_transform.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_transform.py @@ -71,7 +71,7 @@ def test_resize(self): assert np.all(resize_results['scale_factor'] == np.array( [.5, 1. / 3.], dtype=np.float32)) assert resize_results['img_shape'] == (80, 160) - kp = resize_results['kp'][0, 0, 0] + kp = resize_results['keypoint'][0, 0, 0] assert_array_almost_equal(kp, np.array([30, 20])) # scale with -1 to indicate np.inf @@ -153,7 +153,7 @@ def test_pose_compact(self): fake_kp = np.zeros([1, 4, 2, 2]) fake_kp[:, :, 0] = [10, 10] fake_kp[:, :, 1] = [90, 90] - results['kp'] = fake_kp + results['keypoint'] = fake_kp pose_compact = PoseCompact( padding=0, threshold=0, hw_ratio=None, allow_imgpad=False) diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 6a981e14b9..77f54062c7 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -80,21 +80,22 @@ def test_pose_decode(self): 'droppable_joints=(7, 8, 9, 10, ' '13, 14, 15, 16))') decode_results = pose_decode(results) - assert_array_almost_equal(decode_results['kp'], kp[:, frame_inds]) - assert_array_almost_equal(decode_results['kpscore'], + assert_array_almost_equal(decode_results['keypoint'], kp[:, + frame_inds]) + assert_array_almost_equal(decode_results['keypoint_score'], kpscore[:, frame_inds]) results = dict(kp=kp, kpscore=kpscore, total_frames=16) pose_decode = PoseDecode() decode_results = pose_decode(results) - assert_array_almost_equal(decode_results['kp'], kp) - assert_array_almost_equal(decode_results['kpscore'], kpscore) + assert_array_almost_equal(decode_results['keypoint'], kp) + assert_array_almost_equal(decode_results['keypoint_score'], kpscore) results = dict(kp=kp, kpscore=kpscore, frame_inds=frame_inds) pose_decode = PoseDecode( random_drop=True, drop_prob=1, droppable_joints=(7, )) decode_results = pose_decode(results) - assert_array_almost_equal(decode_results['kpscore'][..., 7], 0) + assert_array_almost_equal(decode_results['keypoint_score'][..., 7], 0) def test_load_kinetics_pose(self): @@ -130,26 +131,26 @@ def get_mode(arr): "'torso': 2, 'limb': 3}, " 'source=openpose, kwargs={})') return_results = load_kinetics_pose(inp) - assert return_results['kp'].shape[:-1] == \ - return_results['kpscore'].shape + assert return_results['keypoint'].shape[:-1] == \ + return_results['keypoint_score'].shape - num_person = return_results['kp'].shape[0] - num_frame = return_results['kp'].shape[1] + num_person = return_results['keypoint'].shape[0] + num_frame = return_results['keypoint'].shape[1] assert num_person == get_mode(frame_inds)[1] - assert np.max(return_results['kp']) > 1 + assert np.max(return_results['keypoint']) > 1 assert num_frame == len(set(frame_inds)) inp = cp.deepcopy(results) load_kinetics_pose = LoadKineticsPose( squeeze=False, max_person=100, source='openpose') return_results = load_kinetics_pose(inp) - assert return_results['kp'].shape[:-1] == \ - return_results['kpscore'].shape + assert return_results['keypoint'].shape[:-1] == \ + return_results['keypoint_score'].shape - num_person = return_results['kp'].shape[0] - num_frame = return_results['kp'].shape[1] + num_person = return_results['keypoint'].shape[0] + num_frame = return_results['keypoint'].shape[1] assert num_person == get_mode(frame_inds)[1] - assert np.max(return_results['kp']) > 1 + assert np.max(return_results['keypoint']) > 1 assert num_frame == total_frames inp = cp.deepcopy(results) @@ -157,13 +158,13 @@ def get_mode(arr): load_kinetics_pose = LoadKineticsPose( squeeze=True, max_person=100, source='mmpose') return_results = load_kinetics_pose(inp) - assert return_results['kp'].shape[:-1] == \ - return_results['kpscore'].shape + assert return_results['keypoint'].shape[:-1] == \ + return_results['keypoint_score'].shape - num_person = return_results['kp'].shape[0] - num_frame = return_results['kp'].shape[1] + num_person = return_results['keypoint'].shape[0] + num_frame = return_results['keypoint'].shape[1] assert num_person == get_mode(frame_inds[anno_inds])[1] - assert np.max(return_results['kp']) <= 1 + assert np.max(return_results['keypoint']) <= 1 assert num_frame == len(set(frame_inds[anno_inds])) inp = cp.deepcopy(results) @@ -171,13 +172,13 @@ def get_mode(arr): load_kinetics_pose = LoadKineticsPose( squeeze=True, max_person=2, source='mmpose') return_results = load_kinetics_pose(inp) - assert return_results['kp'].shape[:-1] == \ - return_results['kpscore'].shape + assert return_results['keypoint'].shape[:-1] == \ + return_results['keypoint_score'].shape - num_person = return_results['kp'].shape[0] - num_frame = return_results['kp'].shape[1] + num_person = return_results['keypoint'].shape[0] + num_frame = return_results['keypoint'].shape[1] assert num_person <= 2 - assert np.max(return_results['kp']) <= 1 + assert np.max(return_results['keypoint']) <= 1 assert num_frame == len(set(frame_inds[anno_inds])) def test_generate_pose_target(self): From 23bbb52c962e91b84af6bb25eda4a2d6e9b62b97 Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 1 May 2021 22:37:02 +0800 Subject: [PATCH 092/414] resolve comments --- mmaction/datasets/pipelines/pose_loading.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index d7311db143..be77d1c612 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -132,7 +132,7 @@ def __repr__(self): @PIPELINES.register_module() -class PoseDecode(object): +class PoseDecode: """Load and decode pose with given indices. Required keys are "keypoint", "frame_inds" (optional), "keypoint_score" @@ -359,9 +359,10 @@ def mapinds(inds): np_frame = num_person_frame[i] val = new_kpscore[:np_frame, i] - val = np.sum(val[:, kpgrp['face']], 1) * weight['face'] + \ - np.sum(val[:, kpgrp['torso']], 1) * weight['torso'] + \ - np.sum(val[:, kpgrp['limb']], 1) * weight['limb'] + val = ( + np.sum(val[:, kpgrp['face']], 1) * weight['face'] + + np.sum(val[:, kpgrp['torso']], 1) * weight['torso'] + + np.sum(val[:, kpgrp['limb']], 1) * weight['limb']) inds = sorted(range(np_frame), key=lambda x: -val[x]) new_kpscore[:np_frame, i] = new_kpscore[inds, i] new_kp[:np_frame, i] = new_kp[inds, i] From 413e98a027f37cdf17b61206c38ecf6a0f4d94da Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 1 May 2021 22:54:58 +0800 Subject: [PATCH 093/414] fix unittest --- tests/test_data/test_datasets/test_pose_dataset.py | 8 ++++---- .../test_pipelines/test_augmentations/test_flip.py | 6 +++--- .../test_augmentations/test_transform.py | 2 +- .../test_loadings/test_pose_loading.py | 13 +++++++++---- 4 files changed, 17 insertions(+), 12 deletions(-) diff --git a/tests/test_data/test_datasets/test_pose_dataset.py b/tests/test_data/test_datasets/test_pose_dataset.py index a1ac573851..782619e013 100644 --- a/tests/test_data/test_datasets/test_pose_dataset.py +++ b/tests/test_data/test_datasets/test_pose_dataset.py @@ -13,7 +13,7 @@ def test_pose_dataset(self): dataset = PoseDataset( ann_file=ann_file, pipeline=[], - box_thre='0.5', + box_thr='0.5', data_prefix=data_prefix) assert len(dataset) == 100 item = dataset[0] @@ -23,7 +23,7 @@ def test_pose_dataset(self): ann_file=ann_file, pipeline=[], valid_ratio=0.2, - box_thre='0.9', + box_thr='0.9', data_prefix=data_prefix) assert len(dataset) == 84 for item in dataset: @@ -35,7 +35,7 @@ def test_pose_dataset(self): ann_file=ann_file, pipeline=[], valid_ratio=0.3, - box_thre='0.7', + box_thr='0.7', data_prefix=data_prefix) assert len(dataset) == 87 for item in dataset: @@ -48,5 +48,5 @@ def test_pose_dataset(self): ann_file=ann_file, pipeline=[], valid_ratio=0.2, - box_thre='0.55', + box_thr='0.55', data_prefix=data_prefix) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_flip.py b/tests/test_data/test_pipelines/test_augmentations/test_flip.py index c2f3ab892b..e3fbcf6b4f 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_flip.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_flip.py @@ -105,7 +105,7 @@ def test_flip(self): # flip the keypoints results = dict( - kp=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), + keypoint=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), modality='Pose', img_shape=(64, 64)) flip = Flip(flip_ratio=1, direction='horizontal', left=[0], right=[1]) @@ -114,7 +114,7 @@ def test_flip(self): np.array([[1, 63], [63, 1]])) results = dict( - kp=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), + keypoint=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), modality='Pose', img_shape=(64, 64)) flip = Flip(flip_ratio=1, direction='horizontal', left=[], right=[]) @@ -124,7 +124,7 @@ def test_flip(self): with pytest.raises(AssertionError): results = dict( - kp=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), + keypoint=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), modality='Pose', img_shape=(64, 64)) flip = Flip(flip_ratio=1, direction='vertical', left=[], right=[]) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_transform.py b/tests/test_data/test_pipelines/test_augmentations/test_transform.py index 59fd054e31..c05b499f07 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_transform.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_transform.py @@ -64,7 +64,7 @@ def test_resize(self): # test resize for flow images imgs = list(np.random.rand(2, 240, 320)) kp = np.array([60, 60]).reshape([1, 1, 1, 2]) - results = dict(imgs=imgs, kp=kp, modality='Flow') + results = dict(imgs=imgs, keypoint=kp, modality='Flow') resize = Resize(scale=(160, 80), keep_ratio=False) resize_results = resize(results) assert assert_dict_has_keys(resize_results, target_keys) diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 77f54062c7..8a890db2e3 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -72,7 +72,8 @@ def test_pose_decode(self): kp = np.random.random([1, 16, 17, 2]) kpscore = np.random.random([1, 16, 17]) frame_inds = np.array([2, 4, 6, 8, 10]) - results = dict(kp=kp, kpscore=kpscore, frame_inds=frame_inds) + results = dict( + keypoint=kp, keypoint_score=kpscore, frame_inds=frame_inds) pose_decode = PoseDecode() assert str(pose_decode) == ('PoseDecode(random_drop=False, ' 'random_seed=1, ' @@ -85,13 +86,14 @@ def test_pose_decode(self): assert_array_almost_equal(decode_results['keypoint_score'], kpscore[:, frame_inds]) - results = dict(kp=kp, kpscore=kpscore, total_frames=16) + results = dict(keypoint=kp, keypoint_score=kpscore, total_frames=16) pose_decode = PoseDecode() decode_results = pose_decode(results) assert_array_almost_equal(decode_results['keypoint'], kp) assert_array_almost_equal(decode_results['keypoint_score'], kpscore) - results = dict(kp=kp, kpscore=kpscore, frame_inds=frame_inds) + results = dict( + keypoint=kp, keypoint_score=kpscore, frame_inds=frame_inds) pose_decode = PoseDecode( random_drop=True, drop_prob=1, droppable_joints=(7, )) decode_results = pose_decode(results) @@ -188,7 +190,10 @@ def test_generate_pose_target(self): kp = np.concatenate([kp] * 8, axis=1) kpscore = np.concatenate([kpscore] * 8, axis=1) results = dict( - img_shape=img_shape, kp=kp, kpscore=kpscore, modality='Pose') + img_shape=img_shape, + keypoint=kp, + keypoint_score=kpscore, + modality='Pose') generate_pose_target = GeneratePoseTarget( sigma=1, with_kp=True, left=(0, ), right=(1, ), skeletons=()) From 6d8ee7d81a393129e1408e9ea6ad6b50bacf812a Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 6 May 2021 11:42:01 +0800 Subject: [PATCH 094/414] Upper mmcv_minimum_version --- mmaction/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmaction/__init__.py b/mmaction/__init__.py index 7a516d40fc..56da91fbbd 100644 --- a/mmaction/__init__.py +++ b/mmaction/__init__.py @@ -3,7 +3,7 @@ from .version import __version__ -mmcv_minimum_version = '1.2.6' +mmcv_minimum_version = '1.3.1' mmcv_maximum_version = '1.4.0' mmcv_version = digit_version(mmcv.__version__) From 4d084f458596d668b87fb65fb263e1df986f74a1 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 7 May 2021 12:27:06 +0800 Subject: [PATCH 095/414] resolve comments --- mmaction/datasets/pipelines/augmentations.py | 3 +++ tools/data/skeleton/README.md | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 69fed07bdc..15d1492004 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -59,6 +59,9 @@ class PoseCompact: example, if 'padding == 0.25', then the expanded box has unchanged center, and 1.25x width and height. + Required keys in results are "img_shape", "keypoint", add or modified keys + are "img_shape", "keypoint", "crop_quadruple". + Args: padding (float): The padding size. Default: 0.25. threshold (int): The threshold for the tight bounding box. If the width diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index c941c61964..9b2a5aa4b1 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -15,7 +15,7 @@ ## Introduction -We release the skeleton annotations used in [Revisiting Skeleton-based Action Recognition](). By default, we use [Faster-RCNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) with ResNet50 backbone for human detection and [HRNet-w32](https://github.com/open-mmlab/mmpose/blob/master/configs/top_down/hrnet/coco/hrnet_w32_coco_256x192.py) for single person pose estimation. For FineGYM, we use Ground-Truth bounding boxes for the athlete instead of detection bounding boxes. Currently, we release the skeleton annotations for FineGYM and NTURGB-D Xsub split. Other annotations will be soo released. +We release the skeleton annotations used in [Revisiting Skeleton-based Action Recognition](https://arxiv.org/abs/2104.13586). By default, we use [Faster-RCNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) with ResNet50 backbone for human detection and [HRNet-w32](https://github.com/open-mmlab/mmpose/blob/master/configs/top_down/hrnet/coco/hrnet_w32_coco_256x192.py) for single person pose estimation. For FineGYM, we use Ground-Truth bounding boxes for the athlete instead of detection bounding boxes. Currently, we release the skeleton annotations for FineGYM and NTURGB-D Xsub split. Other annotations will be soo released. ## Prepare Annotations From 1e0f51ea2013b675d961e8817c0b804347e3078b Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 7 May 2021 13:29:31 +0800 Subject: [PATCH 096/414] more unittest --- .../test_datasets/test_pose_dataset.py | 9 ++ .../test_augmentations/test_crop.py | 6 + .../test_augmentations/test_transform.py | 6 + .../test_loadings/test_pose_loading.py | 111 ++++++++++++++++++ tests/test_models/test_backbones.py | 18 +++ 5 files changed, 150 insertions(+) diff --git a/tests/test_data/test_datasets/test_pose_dataset.py b/tests/test_data/test_datasets/test_pose_dataset.py index 782619e013..de9904a8b7 100644 --- a/tests/test_data/test_datasets/test_pose_dataset.py +++ b/tests/test_data/test_datasets/test_pose_dataset.py @@ -43,6 +43,15 @@ def test_pose_dataset(self): assert np.all(item['box_score'][item['anno_inds']] >= 0.7) assert item['valid@0.7'] / item['total_frames'] >= 0.3 + class_prob = {i: 1 for i in range(400)} + dataset = PoseDataset( + ann_file=ann_file, + pipeline=[], + valid_ratio=0.3, + box_thre='0.7', + data_prefix=data_prefix, + class_prob=class_prob) + with pytest.raises(AssertionError): dataset = PoseDataset( ann_file=ann_file, diff --git a/tests/test_data/test_pipelines/test_augmentations/test_crop.py b/tests/test_data/test_pipelines/test_augmentations/test_crop.py index d9365fd03a..872e4bb5b5 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_crop.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_crop.py @@ -29,6 +29,8 @@ def test_random_crop(self): random_crop = RandomCrop(size=224) results['gt_bboxes'] = np.array([[0, 0, 340, 224]]) results['proposals'] = np.array([[0, 0, 340, 224]]) + kp = np.array([[160, 120], [160, 120]]).reshape([1, 1, 2, 2]) + results['keypoint'] = kp random_crop_result = random_crop(results) assert assert_dict_has_keys(random_crop_result, target_keys) assert check_crop(imgs, random_crop_result['imgs'], @@ -76,6 +78,8 @@ def test_random_resized_crop(self): results = dict(imgs=imgs) results['gt_bboxes'] = np.array([[0, 0, 340, 256]]) results['proposals'] = np.array([[0, 0, 340, 256]]) + kp = np.array([[160, 120], [160, 120]]).reshape([1, 1, 2, 2]) + results['keypoint'] = kp with pytest.raises(AssertionError): # area_range[0] > area_range[1], which is wrong @@ -143,6 +147,8 @@ def test_multi_scale_crop(self): results = dict(imgs=imgs) results['gt_bboxes'] = np.array([[0, 0, 340, 256]]) results['proposals'] = np.array([[0, 0, 340, 256]]) + kp = np.array([[160, 120], [160, 120]]).reshape([1, 1, 2, 2]) + results['keypoint'] = kp config = dict( input_size=224, scales=(1, 0.8), diff --git a/tests/test_data/test_pipelines/test_augmentations/test_transform.py b/tests/test_data/test_pipelines/test_augmentations/test_transform.py index c05b499f07..78fc9c8ec6 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_transform.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_transform.py @@ -181,3 +181,9 @@ def test_pose_compact(self): inp = copy.deepcopy(results) ret = pose_compact(inp) assert ret['img_shape'] == (100, 100) + + pose_compact = PoseCompact( + padding=0, threshold=0, hw_ratio=0.75, allow_imgpad=True) + inp = copy.deepcopy(results) + ret = pose_compact(inp) + assert ret['img_shape'] == (80, 106) diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 8a890db2e3..f0432fb528 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -2,6 +2,7 @@ from collections import defaultdict import numpy as np +import pytest from mmcv import dump from numpy.testing import assert_array_almost_equal, assert_array_equal @@ -45,6 +46,15 @@ def test_uniform_sample_frames(self): assert_array_equal(sampling_results['frame_inds'], np.array([0, 1, 2, 3, 4, 5, 6, 0])) + results = dict(total_frames=7, start_index=0) + sampling = UniformSampleFrames( + clip_len=8, num_clips=8, test_mode=True, seed=0) + sampling_results = sampling(results) + assert sampling_results['clip_len'] == 8 + assert sampling_results['frame_interval'] is None + assert sampling_results['num_clips'] == 8 + assert len(sampling_results['frame_inds']) == 64 + results = dict(total_frames=64, start_index=0) sampling = UniformSampleFrames( clip_len=8, num_clips=4, test_mode=True, seed=0) @@ -68,6 +78,24 @@ def test_uniform_sample_frames(self): assert sampling_results['num_clips'] == 1 assert len(sampling_results['frame_inds']) == 8 + results = dict(total_frames=7, start_index=0) + sampling = UniformSampleFrames( + clip_len=8, num_clips=1, test_mode=False, seed=0) + sampling_results = sampling(results) + assert sampling_results['clip_len'] == 8 + assert sampling_results['frame_interval'] is None + assert sampling_results['num_clips'] == 1 + assert len(sampling_results['frame_inds']) == 8 + + results = dict(total_frames=15, start_index=0) + sampling = UniformSampleFrames( + clip_len=8, num_clips=1, test_mode=False, seed=0) + sampling_results = sampling(results) + assert sampling_results['clip_len'] == 8 + assert sampling_results['frame_interval'] is None + assert sampling_results['num_clips'] == 1 + assert len(sampling_results['frame_inds']) == 8 + def test_pose_decode(self): kp = np.random.random([1, 16, 17, 2]) kpscore = np.random.random([1, 16, 17]) @@ -124,6 +152,10 @@ def get_mode(arr): frame_inds=frame_inds) inp = cp.deepcopy(results) + + with pytest.raises(NotImplementedError): + LoadKineticsPose(squeeze=True, max_person=100, source='xxx') + load_kinetics_pose = LoadKineticsPose( squeeze=True, max_person=100, source='openpose') @@ -207,6 +239,15 @@ def test_generate_pose_target(self): assert_array_almost_equal(return_results['imgs'][0], return_results['imgs'][1]) + results = dict(img_shape=img_shape, keypoint=kp, modality='Pose') + + generate_pose_target = GeneratePoseTarget( + sigma=1, with_kp=True, left=(0, ), right=(1, ), skeletons=()) + return_results = generate_pose_target(results) + assert return_results['imgs'].shape == (8, 64, 64, 3) + assert_array_almost_equal(return_results['imgs'][0], + return_results['imgs'][1]) + generate_pose_target = GeneratePoseTarget( sigma=1, with_kp=False, @@ -246,3 +287,73 @@ def test_generate_pose_target(self): assert_array_almost_equal(imgs[:8, 2], imgs[8:, 2, :, ::-1]) assert_array_almost_equal(imgs[:8, 0], imgs[8:, 1, :, ::-1]) assert_array_almost_equal(imgs[:8, 1], imgs[8:, 0, :, ::-1]) + + img_shape = (64, 64) + kp = np.array([[[[24, 24], [40, 40], [24, 40]]]]) + kpscore = np.array([[[0., 0., 0.]]]) + kp = np.concatenate([kp] * 8, axis=1) + kpscore = np.concatenate([kpscore] * 8, axis=1) + results = dict( + img_shape=img_shape, + keypoint=kp, + keypoint_score=kpscore, + modality='Pose') + generate_pose_target = GeneratePoseTarget( + sigma=1, with_kp=True, left=(0, ), right=(1, ), skeletons=()) + return_results = generate_pose_target(results) + assert_array_almost_equal(return_results['imgs'], 0) + + img_shape = (64, 64) + kp = np.array([[[[24, 24], [40, 40], [24, 40]]]]) + kpscore = np.array([[[0., 0., 0.]]]) + kp = np.concatenate([kp] * 8, axis=1) + kpscore = np.concatenate([kpscore] * 8, axis=1) + results = dict( + img_shape=img_shape, + keypoint=kp, + keypoint_score=kpscore, + modality='Pose') + generate_pose_target = GeneratePoseTarget( + sigma=1, + with_kp=False, + with_limb=True, + left=(0, ), + right=(1, ), + skeletons=((0, 1), (1, 2), (0, 2))) + return_results = generate_pose_target(results) + assert_array_almost_equal(return_results['imgs'], 0) + + img_shape = (64, 64) + kp = np.array([[[[124, 124], [140, 140], [124, 140]]]]) + kpscore = np.array([[[0., 0., 0.]]]) + kp = np.concatenate([kp] * 8, axis=1) + kpscore = np.concatenate([kpscore] * 8, axis=1) + results = dict( + img_shape=img_shape, + keypoint=kp, + keypoint_score=kpscore, + modality='Pose') + generate_pose_target = GeneratePoseTarget( + sigma=1, with_kp=True, left=(0, ), right=(1, ), skeletons=()) + return_results = generate_pose_target(results) + assert_array_almost_equal(return_results['imgs'], 0) + + img_shape = (64, 64) + kp = np.array([[[[124, 124], [140, 140], [124, 140]]]]) + kpscore = np.array([[[0., 0., 0.]]]) + kp = np.concatenate([kp] * 8, axis=1) + kpscore = np.concatenate([kpscore] * 8, axis=1) + results = dict( + img_shape=img_shape, + keypoint=kp, + keypoint_score=kpscore, + modality='Pose') + generate_pose_target = GeneratePoseTarget( + sigma=1, + with_kp=False, + with_limb=True, + left=(0, ), + right=(1, ), + skeletons=((0, 1), (1, 2), (0, 2))) + return_results = generate_pose_target(results) + assert_array_almost_equal(return_results['imgs'], 0) diff --git a/tests/test_models/test_backbones.py b/tests/test_models/test_backbones.py index 24d5b6f4fd..065eb2bd86 100644 --- a/tests/test_models/test_backbones.py +++ b/tests/test_models/test_backbones.py @@ -490,6 +490,24 @@ def test_slowonly_backbone(): so_50.init_weights() so_50.train() + # test SlowOnly for PoseC3D + so_50 = ResNet3dSlowOnly( + depth=50, + pretrained=None, + in_channels=17, + base_channels=32, + num_stages=3, + out_indices=(2, ), + stage_blocks=(4, 6, 3), + conv1_stride_s=1, + pool1_stride_s=1, + inflate=(0, 1, 1), + spatial_strides=(2, 2, 2), + temporal_strides=(1, 1, 2), + dilations=(1, 1, 1)) + so_50.init_weights() + so_50.train() + # SlowOnly inference test input_shape = (1, 3, 8, 64, 64) imgs = generate_backbone_demo_inputs(input_shape) From a6aaf8310b7f81ef837403b64c0fd6ae3eca424b Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 7 May 2021 15:08:39 +0800 Subject: [PATCH 097/414] fix bug in unittest --- tests/test_data/test_datasets/test_pose_dataset.py | 2 +- tests/test_models/test_backbones.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_data/test_datasets/test_pose_dataset.py b/tests/test_data/test_datasets/test_pose_dataset.py index de9904a8b7..2aa5707b97 100644 --- a/tests/test_data/test_datasets/test_pose_dataset.py +++ b/tests/test_data/test_datasets/test_pose_dataset.py @@ -48,7 +48,7 @@ def test_pose_dataset(self): ann_file=ann_file, pipeline=[], valid_ratio=0.3, - box_thre='0.7', + box_thr='0.7', data_prefix=data_prefix, class_prob=class_prob) diff --git a/tests/test_models/test_backbones.py b/tests/test_models/test_backbones.py index 065eb2bd86..9357546fd7 100644 --- a/tests/test_models/test_backbones.py +++ b/tests/test_models/test_backbones.py @@ -485,11 +485,6 @@ def test_slowonly_backbone(): # SlowOnly should contain no lateral connection ResNet3dSlowOnly(50, None, lateral=True) - # test SlowOnly with normal config - so_50 = ResNet3dSlowOnly(50, None) - so_50.init_weights() - so_50.train() - # test SlowOnly for PoseC3D so_50 = ResNet3dSlowOnly( depth=50, @@ -508,6 +503,11 @@ def test_slowonly_backbone(): so_50.init_weights() so_50.train() + # test SlowOnly with normal config + so_50 = ResNet3dSlowOnly(50, None) + so_50.init_weights() + so_50.train() + # SlowOnly inference test input_shape = (1, 3, 8, 64, 64) imgs = generate_backbone_demo_inputs(input_shape) From 9c2cdd95a5519259404532f7cd1fd91cc6f9f045 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 13 May 2021 23:16:02 +0800 Subject: [PATCH 098/414] [Feature] Jester (#864) * resolve comments * update changelog * add config * update README --- configs/recognition/slowonly/README.md | 6 ++ configs/recognition/slowonly/README_zh-CN.md | 6 ++ ...net_pretrained_r50_8x8x1_64e_jester_rgb.py | 97 +++++++++++++++++++ configs/recognition/tsm/README.md | 6 ++ configs/recognition/tsm/README_zh-CN.md | 6 ++ .../tsm/tsm_r50_1x1x8_50e_jester_rgb.py | 91 +++++++++++++++++ 6 files changed, 212 insertions(+) create mode 100644 configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py create mode 100644 configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index 9d297eb4b9..2154a64722 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -71,6 +71,12 @@ In data benchmark, we compare two different data preprocessing methods: (1) Resi | [slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py) | short-side 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | | 1: 1 Fusion | | | | | 83.7 | 74.8 | | | | +### Jester + +| config | resolution | gpus | backbone | pretrain | top1 acc | ckpt | log | json | +| :----------------------------------------------------------- | :--------: | :--: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb-b56a5389.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.json) | + Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md index 61b0130199..10b8464a61 100644 --- a/configs/recognition/slowonly/README_zh-CN.md +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -71,6 +71,12 @@ | [slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py) | 短边 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | | 1: 1 融合 | | | | | 83.7 | 74.8 | | | | +### Jester + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | ckpt | log | json | +| :----------------------------------------------------------- | :----: | :------: | :------: | :------: | :---------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb-b56a5389.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.json) | + 注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py new file mode 100644 index 0000000000..d2517e8391 --- /dev/null +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py @@ -0,0 +1,97 @@ +_base_ = [ + '../../_base_/models/slowonly_r50.py', '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=27)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/jester/rawframes' +data_root_val = 'data/jester/rawframes' +ann_file_train = 'data/jester/jester_train_list_rawframes.txt' +ann_file_val = 'data/jester/jester_val_list_rawframes.txt' +ann_file_test = 'data/jester/jester_val_list_rawframes.txt' +jester_flip_label_map = {0: 1, 1: 0, 6: 7, 7: 6} +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=8, frame_interval=4, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, flip_label_map=jester_flip_label_map), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + val_dataloader=dict(videos_per_gpu=1), + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + filename_tmpl='{:05}.jpg', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=test_pipeline)) +evaluation = dict( + interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, + weight_decay=0.0001) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='CosineAnnealing', min_lr=0, by_epoch=False) +total_epochs = 64 + +# runtime settings +checkpoint_config = dict(interval=4) +work_dir = './work_dirs/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb' +find_unused_parameters = False diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 32bf6656b5..9a3d92a10b 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -78,6 +78,12 @@ | [tsm_r50_mixup_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 46.35 / 48.49 | 75.07 / 76.88 | +0.77 / +0.79 | +0.05 / +0.70 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb-9eca48e5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json) | | [tsm_r50_cutmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 45.92 / 47.46 | 75.23 / 76.71 | +0.34 / -0.24 | +0.21 / +0.59 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb-34934615.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json) | +### Jester + +| config | resolution | gpus | backbone | pretrain | top1 acc (efficient/accurate) | ckpt | log | json | +| ------------------------------------------------------------ | :--------: | :--: | :------: | :------: | :---------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsm_r50_1x1x8_50e_jester_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 96.5 / 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb-c799267e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json) | + Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index 7e692684f9..ff46670c00 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -76,6 +76,12 @@ | [tsm_r50_mixup_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 46.35 / 48.49 | 75.07 / 76.88 | +0.77 / +0.79 | +0.05 / +0.70 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb-9eca48e5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json) | | [tsm_r50_cutmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 45.92 / 47.46 | 75.23 / 76.71 | +0.34 / -0.24 | +0.21 / +0.59 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb-34934615.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json) | +### Jester + +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | ckpt | log | json | +| ------------------------------------------------------------ | :----: | :------: | :------: | :------: | :------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsm_r50_1x1x8_50e_jester_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 96.5 / 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb-c799267e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json) | + 注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 diff --git a/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py new file mode 100644 index 0000000000..2893df0bd1 --- /dev/null +++ b/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py @@ -0,0 +1,91 @@ +_base_ = [ + '../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=27)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/jester/rawframes' +data_root_val = 'data/jester/rawframes' +ann_file_train = 'data/jester/jester_train_list_rawframes.txt' +ann_file_val = 'data/jester/jester_val_list_rawframes.txt' +ann_file_test = 'data/jester/jester_val_list_rawframes.txt' +jester_flip_label_map = {0: 1, 1: 0, 6: 7, 7: 6} +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='RawFrameDecode'), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, flip_label_map=jester_flip_label_map), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + twice_sample=True, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + val_dataloader=dict(videos_per_gpu=1), + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + filename_tmpl='{:05}.jpg', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=test_pipeline)) +evaluation = dict( + interval=2, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(weight_decay=0.0005) + +# runtime settings +work_dir = './work_dirs/tsm_r50_1x1x8_50e_jester_rgb/' From 006952dec7b52f33b9366ee31fe81380db25f1a0 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Mon, 17 May 2021 13:24:05 +0800 Subject: [PATCH 099/414] [Feature] Support MMCli (#867) * resolve comments * update changelog * support mmcli --- MANIFEST.in | 2 ++ requirements/mminstall.txt | 1 + setup.py | 1 + 3 files changed, 4 insertions(+) create mode 100644 MANIFEST.in create mode 100644 requirements/mminstall.txt diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000000..035a5aa931 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include mmcls/configs *.py *.yml +recursive-include mmcls/tools *.sh *.py diff --git a/requirements/mminstall.txt b/requirements/mminstall.txt new file mode 100644 index 0000000000..7651fd8f8c --- /dev/null +++ b/requirements/mminstall.txt @@ -0,0 +1 @@ +mmcv-full>=1.3.1 diff --git a/setup.py b/setup.py index 2fa1e55bf5..6477cb0454 100644 --- a/setup.py +++ b/setup.py @@ -105,6 +105,7 @@ def gen_packages_items(): maintainer_email='openmmlab@gmail.com', packages=find_packages(exclude=('configs', 'tools', 'demo')), keywords='computer vision, video understanding', + include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', From 6a01f2b48338edea0c12458275a56e50b41b208a Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Mon, 17 May 2021 19:54:00 +0800 Subject: [PATCH 100/414] Update README.md (#871) Fix typo --- configs/recognition/trn/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md index 0d39abe6ef..2034d08e1c 100644 --- a/configs/recognition/trn/README.md +++ b/configs/recognition/trn/README.md @@ -19,13 +19,13 @@ |config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[trn_r50_1x1x8_50e_sthv1_rgb](configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 31.62 / 33.88 |60.01 / 62.12| 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json)| +|[trn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 31.62 / 33.88 |60.01 / 62.12| 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json)| ### Something-Something V2 |config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[trn_r50_1x1x8_50e_sthv2_rgb](configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 45.14 / 47.96 |73.21 / 75.97 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210401-773eca7b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json)| +|[trn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 45.14 / 47.96 |73.21 / 75.97 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210401-773eca7b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json)| Notes: From 8601406323566afa407a302cbed45a88b097a57c Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Mon, 17 May 2021 20:41:44 +0800 Subject: [PATCH 101/414] Update README.md (#872) Fix Typo --- configs/recognition/tsm/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 9a3d92a10b..6703975a92 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -33,7 +33,7 @@ |[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |70.24|89.56|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json)| |[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.59|89.52|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json)| |[tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.48|89.40|x|x|x|7076|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json)| -|[tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.25|89.66|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7077 | [ckpt]( https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json)| +|[tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.25|89.66|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json)| |[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8x4| ResNet50 | ImageNet|72.9|90.44|[72.22](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|[90.37](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|11.5 (8x10 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20200626-91a54551.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log.json)| |[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50 | ImageNet|73.38|91.02|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb_20200727-e1e0c785.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json)| |[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |72.09|90.37|[70.67](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|[89.98](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|47.0 (16x1 frames)| 10404 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json)| From 2cf6c2e0200b3343f128b139ea32c75e6fd9ba1e Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Tue, 18 May 2021 15:37:49 +0800 Subject: [PATCH 102/414] [MMCli] Add Metafile (#870) * resolve comments * update changelog * metafile batch1 * metafile batch2 * metafile batch3 * update README * update README * metafile batch4 * metafile batch1 * metafile batch2 * metafile batch3 * update README * update README * metafile batch4 * add modelzoo * update * fix bug * fix bug in metafile --- MANIFEST.in | 5 +- configs/detection/ava/metafile.yml | 195 ++++ configs/detection/lfb/metafile.yml | 69 ++ configs/localization/bmn/metafile.yml | 78 ++ configs/localization/bsn/metafile.yml | 90 ++ configs/localization/ssn/metafile.yml | 30 + configs/recognition/c3d/metafile.yml | 30 + configs/recognition/csn/metafile.yml | 54 + configs/recognition/i3d/metafile.yml | 246 +++++ configs/recognition/omnisource/README.md | 32 +- .../recognition/omnisource/README_zh-CN.md | 36 +- configs/recognition/omnisource/metafile.yml | 362 +++++++ configs/recognition/r2plus1d/metafile.yml | 102 ++ configs/recognition/slowfast/metafile.yml | 198 ++++ configs/recognition/slowonly/metafile.yml | 415 ++++++++ configs/recognition/tanet/metafile.yml | 30 + configs/recognition/tin/metafile.yml | 75 ++ configs/recognition/tpn/metafile.yml | 77 ++ configs/recognition/trn/metafile.yml | 56 ++ configs/recognition/tsm/metafile.yml | 687 +++++++++++++ configs/recognition/tsn/metafile.yml | 942 ++++++++++++++++++ configs/recognition/x3d/metafile.yml | 44 + configs/recognition_audio/resnet/metafile.yml | 30 + model_zoo.yml | 21 + 24 files changed, 3868 insertions(+), 36 deletions(-) create mode 100644 configs/detection/ava/metafile.yml create mode 100644 configs/detection/lfb/metafile.yml create mode 100644 configs/localization/bmn/metafile.yml create mode 100644 configs/localization/bsn/metafile.yml create mode 100644 configs/localization/ssn/metafile.yml create mode 100644 configs/recognition/c3d/metafile.yml create mode 100644 configs/recognition/csn/metafile.yml create mode 100644 configs/recognition/i3d/metafile.yml create mode 100644 configs/recognition/omnisource/metafile.yml create mode 100644 configs/recognition/r2plus1d/metafile.yml create mode 100644 configs/recognition/slowfast/metafile.yml create mode 100644 configs/recognition/slowonly/metafile.yml create mode 100644 configs/recognition/tanet/metafile.yml create mode 100644 configs/recognition/tin/metafile.yml create mode 100644 configs/recognition/tpn/metafile.yml create mode 100644 configs/recognition/trn/metafile.yml create mode 100644 configs/recognition/tsm/metafile.yml create mode 100644 configs/recognition/tsn/metafile.yml create mode 100644 configs/recognition/x3d/metafile.yml create mode 100644 configs/recognition_audio/resnet/metafile.yml create mode 100644 model_zoo.yml diff --git a/MANIFEST.in b/MANIFEST.in index 035a5aa931..a452def31a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,3 @@ -recursive-include mmcls/configs *.py *.yml -recursive-include mmcls/tools *.sh *.py +include mmaction/model_zoo.yml +recursive-include mmaction/configs *.py *.yml +recursive-include mmaction/tools *.sh *.py diff --git a/configs/detection/ava/metafile.yml b/configs/detection/ava/metafile.yml new file mode 100644 index 0000000000..520076cf6f --- /dev/null +++ b/configs/detection/ava/metafile.yml @@ -0,0 +1,195 @@ +Collections: +- Metadata: + Training Data: null + Name: AVA + README: configs/detection/ava/README.md +Models: +- Config: configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet50 + Epochs: 20 + Input: 4x16 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training BatchSize / GPU: 16 + Training Data: AVA v2.1 + gpus: 8 + Modality: RGB + Name: slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 20.1 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth +- Config: configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet50 + Epochs: 20 + Input: 4x16 + Pretrained: OmniSource + Resolution: short-side 256 + Training BatchSize / GPU: 16 + Training Data: AVA v2.1 + gpus: 8 + Modality: RGB + Name: slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 21.8 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201217-0c6d2e98.pth +- Config: configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet50 + Epochs: 10 + Input: 4x16 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training BatchSize / GPU: 12 + Training Data: AVA v2.1 + gpus: 8 + Modality: RGB + Name: slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 21.75 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb_20210316-959829ec.pth +- Config: configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet50 + Epochs: 10 + Input: 8x8 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training BatchSize / GPU: 6 + Training Data: AVA v2.1 + gpus: 16 + Modality: RGB + Name: slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 23.79 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb_20210316-5742e4dd.pth +- Config: configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet101 + Epochs: 20 + Input: 8x8 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training BatchSize / GPU: 6 + Training Data: AVA v2.1 + gpus: 16 + Modality: RGB + Name: slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 24.6 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201217-1c9b4117.pth +- Config: configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet101 + Epochs: 20 + Input: 8x8 + Pretrained: OmniSource + Resolution: short-side 256 + Training BatchSize / GPU: 6 + Training Data: AVA v2.1 + gpus: 16 + Modality: RGB + Name: slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 25.9 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth +- Config: configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet50 + Epochs: 20 + Input: 32x2 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training BatchSize / GPU: 9 + Training Data: AVA v2.1 + gpus: 16 + Modality: RGB + Name: slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 24.4 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth +- Config: configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet50 + Epochs: 20 + Input: 32x2 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training BatchSize / GPU: 9 + Training Data: AVA v2.1 + gpus: 16 + Modality: RGB + Name: slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 25.4 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222-f4d209c9.pth +- Config: configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet50 + Epochs: 20 + Input: 32x2 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training BatchSize / GPU: 5 + Training Data: AVA v2.1 + gpus: 16 + Modality: RGB + Name: slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 25.5 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217-ae225e97.pth diff --git a/configs/detection/lfb/metafile.yml b/configs/detection/lfb/metafile.yml new file mode 100644 index 0000000000..6e9a09f18f --- /dev/null +++ b/configs/detection/lfb/metafile.yml @@ -0,0 +1,69 @@ +Collections: +- Metadata: + Training Data: null + Name: LFB + README: configs/detection/lfb/README.md +Models: +- Config: configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py + In Collection: LFB + Metadata: + Architecture: ResNet50 + Epochs: 20 + Input: 4x16 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training BatchSize / GPU: 12 + Training Data: AVA v2.1 + gpus: 8 + Modality: RGB + Name: lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 24.11 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log.json + Training Log: https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log + Weights: https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210224-2ae136d9.pth +- Config: configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py + In Collection: LFB + Metadata: + Architecture: ResNet50 + Epochs: 20 + Input: 4x16 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training BatchSize / GPU: 12 + Training Data: AVA v2.1 + gpus: 8 + Modality: RGB + Name: lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 20.17 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json + Training Log: https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log + Weights: https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-19c330b7.pth +- Config: configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py + In Collection: LFB + Metadata: + Architecture: ResNet50 + Epochs: 20 + Input: 4x16 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training BatchSize / GPU: 12 + Training Data: AVA v2.1 + gpus: 8 + Modality: RGB + Name: lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py + Results: + - Dataset: AVA v2.1 + Metrics: + mAP: 22.15 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json + Training Log: https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log + Weights: https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-37efcd15.pth diff --git a/configs/localization/bmn/metafile.yml b/configs/localization/bmn/metafile.yml new file mode 100644 index 0000000000..ac60106a46 --- /dev/null +++ b/configs/localization/bmn/metafile.yml @@ -0,0 +1,78 @@ +Collections: +- Metadata: + Training Data: null + Name: BMN + README: configs/localization/bmn/README.md +Models: +- Config: configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py + In Collection: BMN + Metadata: + Epochs: 9 + Training BatchSize / GPU: 8 + Training Data: ActivityNet v1.3 + feature: cuhk_mean_100 + gpus: 2 + Name: bmn_400x100_9e_2x8_activitynet_feature (cuhk_mean_100) + Results: + - Dataset: ActivityNet v1.3 + Metrics: + AP@0.5: 42.47 + AP@0.75: 31.31 + AP@0.95: 9.92 + AR@100: 75.28 + AUC: 67.22 + mAP: 30.34 + Task: Temporal Action Localization + Training Json Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log.json + Training Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log + Weights: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature_20200619-42a3b111.pth + gpu_mem(M): '5420' + iter time(s): '3.27' +- Config: configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py + In Collection: BMN + Metadata: + Epochs: 9 + Training BatchSize / GPU: 8 + Training Data: ActivityNet v1.3 + feature: mmaction_video + gpus: 2 + Name: bmn_400x100_9e_2x8_activitynet_feature (mmaction_video) + Results: + - Dataset: ActivityNet v1.3 + Metrics: + AP@0.5: 42.62 + AP@0.75: 31.56 + AP@0.95: 10.86 + AR@100: 75.43 + AUC: 67.22 + mAP: 30.77 + Task: Temporal Action Localization + Training Json Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.json + Training Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.log + Weights: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809-c9fd14d2.pth + gpu_mem(M): '5420' + iter time(s): '3.27' +- Config: configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py + In Collection: BMN + Metadata: + Epochs: 9 + Training BatchSize / GPU: 8 + Training Data: ActivityNet v1.3 + feature: mmaction_clip + gpus: 2 + Name: bmn_400x100_9e_2x8_activitynet_feature (mmaction_clip) + Results: + - Dataset: ActivityNet v1.3 + Metrics: + AP@0.5: 43.08 + AP@0.75: 32.19 + AP@0.95: 10.73 + AR@100: 75.35 + AUC: 67.38 + mAP: 31.15 + Task: Temporal Action Localization + Training Json Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.json + Training Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.log + Weights: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809-10d803ce.pth + gpu_mem(M): '5420' + iter time(s): '3.27' diff --git a/configs/localization/bsn/metafile.yml b/configs/localization/bsn/metafile.yml new file mode 100644 index 0000000000..bb64bda3b5 --- /dev/null +++ b/configs/localization/bsn/metafile.yml @@ -0,0 +1,90 @@ +Collections: +- Metadata: + Training Data: null + Name: BSN + README: configs/localization/bsn/README.md +Models: +- Config: + - configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py + - configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py + - configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py + In Collection: BSN + Metadata: + Training Data: ActivityNet v1.3 + feature: cuhk_mean_100 + gpus: 1 + pretrain: None + Name: bsn_400x100_1x16_20e_activitynet_feature (cuhk_mean_100) + Results: + - Dataset: ActivityNet v1.3 + Metrics: + AR@100: 74.66 + AUC: 66.45 + Task: Temporal Action Localization + Training Json Log: + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log.json + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log.json + Training Log: + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log + Weights: + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature_20200619-cd6accc3.pth + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature_20210203-1c27763d.pth + gpu_mem(M): 41(TEM)+25(PEM) + iter time(s): 0.074(TEM)+0.036(PEM) +- Config: + - configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py + - configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py + - configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py + In Collection: BSN + Metadata: + Training Data: ActivityNet v1.3 + feature: mmaction_video + gpus: 1 + pretrain: None + Name: bsn_400x100_1x16_20e_activitynet_feature (mmaction_video) + Results: + - Dataset: ActivityNet v1.3 + Metrics: + AR@100: 74.93 + AUC: 66.74 + Task: Temporal Action Localization + Training Json Log: + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.json + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.json + Training Log: + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.log + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.log + Weights: + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809-ad6ec626.pth + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809-aa861b26.pth + gpu_mem(M): 41(TEM)+25(PEM) + iter time(s): 0.074(TEM)+0.036(PEM) +- Config: + - configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py + - configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py + - configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py + In Collection: BSN + Metadata: + Training Data: ActivityNet v1.3 + feature: mmaction_clip + gpus: 1 + pretrain: None + Name: bsn_400x100_1x16_20e_activitynet_feature (mmaction_clip) + Results: + - Dataset: ActivityNet v1.3 + Metrics: + AR@100: 75.19 + AUC: 66.81 + Task: Temporal Action Localization + Training Json Log: + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.json + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.json + Training Log: + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.log + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.log + Weights: + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809-0a563554.pth + - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809-e32f61e6.pth + gpu_mem(M): 41(TEM)+25(PEM) + iter time(s): 0.074(TEM)+0.036(PEM) diff --git a/configs/localization/ssn/metafile.yml b/configs/localization/ssn/metafile.yml new file mode 100644 index 0000000000..9cf416053a --- /dev/null +++ b/configs/localization/ssn/metafile.yml @@ -0,0 +1,30 @@ +Collections: +- Metadata: + Training Data: null + Name: SSN + README: configs/localization/ssn/README.md +Models: +- Config: configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py + In Collection: SSN + Metadata: + Architecture: ResNet50 + Training Data: THUMOS 14 + gpus: 8 + pretrain: ImageNet + Name: ssn_r50_450e_thumos14_rgb + Results: + - Dataset: THUMOS 14 + Metrics: + mAP@0.3: 29.37 + mAP@0.4: 22.15 + mAP@0.5: 15.69 + Task: Temporal Action Localization + Training Json Log: https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log.json + Training Log: https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log + Weights: https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/ssn_r50_450e_thumos14_rgb_20201012-1920ab16.pth + gpu_mem(M): '6352' + reference mAP@0.3: '[27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)' + reference mAP@0.4: '[21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)' + reference mAP@0.5: '[14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)' + refrence ckpt: '[ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth)' + refrence json: '[json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json)' diff --git a/configs/recognition/c3d/metafile.yml b/configs/recognition/c3d/metafile.yml new file mode 100644 index 0000000000..f0af255121 --- /dev/null +++ b/configs/recognition/c3d/metafile.yml @@ -0,0 +1,30 @@ +Collections: +- Metadata: + Training Data: null + Name: C3D + README: configs/recognition/c3d/README.md +Models: +- Config: configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py + In Collection: C3D + Metadata: + Architecture: c3d + Epochs: 45 + Parameters: 78409573 + Training BatchSize / GPU: 30 + Training Data: UCF101 + gpus: 8 + pretrain: sports1m + resolution: 128x171 + Modality: RGB + Name: c3d_sports1m_16x1x1_45e_ucf101_rgb.py + Results: + - Dataset: UCF101 + Metrics: + top1 acc: 83.27 + top5 acc: 95.9 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log + Weights: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/c3d_sports1m_16x1x1_45e_ucf101_rgb_20201021-26655025.pth + gpu_mem(M): '6053' + inference_time(video/s): x diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml new file mode 100644 index 0000000000..4c1cea7bf9 --- /dev/null +++ b/configs/recognition/csn/metafile.yml @@ -0,0 +1,54 @@ +Collections: +- Metadata: + Training Data: null + Name: CSN + README: configs/recognition/csn/README.md +Models: +- Config: configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet152 + Epochs: 58 + Parameters: 29703568 + Training BatchSize / GPU: 3 + Training Data: Kinetics-400 + gpus: 32 + pretrain: IG65M + resolution: short-side 320 + Modality: RGB + Name: ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 80.14 + top5 acc: 94.93 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log + Weights: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth + gpu_mem(M): '8517' + inference_time(video/s): x +- Config: configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet152 + Epochs: 58 + Parameters: 29703568 + Training BatchSize / GPU: 3 + Training Data: Kinetics-400 + gpus: 32 + pretrain: IG65M + resolution: short-side 320 + Modality: RGB + Name: ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 82.76 + top5 acc: 95.68 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log + Weights: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth + gpu_mem(M): '8516' + inference_time(video/s): x diff --git a/configs/recognition/i3d/metafile.yml b/configs/recognition/i3d/metafile.yml new file mode 100644 index 0000000000..8f2f63c730 --- /dev/null +++ b/configs/recognition/i3d/metafile.yml @@ -0,0 +1,246 @@ +Collections: +- Metadata: + Training Data: null + Name: I3D + README: configs/recognition/i3d/README.md +Models: +- Config: configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 28043472 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: i3d_r50_32x2x1_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.68 + top5 acc: 90.78 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log + Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth + gpu_mem(M): '5170' + inference_time(video/s): 1.7 (320x3 frames) +- Config: configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 28043472 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: i3d_r50_32x2x1_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.27 + top5 acc: 90.92 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log + Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth + gpu_mem(M): '5170' + inference_time(video/s): x +- Config: configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 28043472 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256p + Modality: RGB + Name: i3d_r50_video_32x2x1_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.85 + top5 acc: 90.75 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log + Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth + gpu_mem(M): '5170' + inference_time(video/s): x +- Config: configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 28043472 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 16 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: i3d_r50_dense_32x2x1_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.77 + top5 acc: 90.57 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log + Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_32x2x1_100e_kinetics400_rgb_20200616-2bbb4361.pth + gpu_mem(M): '5170' + inference_time(video/s): 1.7 (320x3 frames) +- Config: configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 28043472 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: i3d_r50_dense_32x2x1_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.48 + top5 acc: 91.0 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log + Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb_20200725-24eb54cc.pth + gpu_mem(M): '5170' + inference_time(video/s): x +- Config: configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 28043472 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: i3d_r50_lazy_32x2x1_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.32 + top5 acc: 90.72 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log + Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_32x2x1_100e_kinetics400_rgb_20200612-000e4d2a.pth + gpu_mem(M): '5170' + inference_time(video/s): 1.8 (320x3 frames) +- Config: configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 28043472 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: i3d_r50_lazy_32x2x1_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.24 + top5 acc: 90.99 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log + Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb_20200817-4e90d1d5.pth + gpu_mem(M): '5170' + inference_time(video/s): x +- Config: configs/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 35397840 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 256p + Modality: RGB + Name: i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 74.71 + top5 acc: 91.81 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log + Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200813-6e6aef1b.pth + gpu_mem(M): '6438' + inference_time(video/s): x +- Config: configs/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 31723728 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 256p + Modality: RGB + Name: i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.37 + top5 acc: 91.26 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log + Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200815-17f84aa2.pth + gpu_mem(M): '4944' + inference_time(video/s): x +- Config: configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 35397840 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 256p + Modality: RGB + Name: i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.92 + top5 acc: 91.59 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log + Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb_20200814-7c30d5bb.pth + gpu_mem(M): '4832' + inference_time(video/s): x diff --git a/configs/recognition/omnisource/README.md b/configs/recognition/omnisource/README.md index 281b3818d8..ac682536d0 100644 --- a/configs/recognition/omnisource/README.md +++ b/configs/recognition/omnisource/README.md @@ -27,25 +27,25 @@ We benchmark the OmniSource framework on the released subset, results are listed ### TSN-8seg-ResNet50 -| Setting | Top-1 | Top-5 | ckpt | json | log | -| :----------: | :---: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| Baseline | 77.4 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030-b4eaf92b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log) | -| +GG-img | 78.0 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030-23966b4b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log) | -| +[GG-IG]-img | 78.6 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030-66f5e046.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log) | -| +IG-vid | 80.6 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030-011f984d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log) | -| +KRaw | 78.6 | 93.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030-59f5d064.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log) | -| OmniSource | 81.3 | 94.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030-0f56ef51.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log) | +| Model | Modality | Pretrained | Backbone | Input | Resolution | top1 acc | top5 acc | ckpt | json | log | +| :----------------------------------------------------------: | -------- | ---------- | -------- | ----- | -------------- | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsn_r50_1x1x8_100e_minikinetics_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 77.4 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030-b4eaf92b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.0 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030-23966b4b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_webimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030-66f5e046.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 80.6 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030-011f984d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030-59f5d064.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 81.3 | 94.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030-0f56ef51.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log) | ### SlowOnly-8x8-ResNet50 -| Setting | Top-1 | Top-5 | ckpt | json | log | -| :----------: | :---: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| Baseline | 78.6 | 93.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030-168eb098.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log) | -| +GG-img | 80.8 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030-7da6dfc3.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log) | -| +[GG-IG]-img | 81.3 | 95.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030-c36616e9.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log) | -| +IG-vid | 82.4 | 95.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030-e2890e8d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log) | -| +KRaw | 80.3 | 94.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030-62974bac.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log) | -| OmniSource | 82.9 | 95.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030-284cfd3b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log) | +| Model | Modality | Pretrained | Backbone | Input | Resolution | top1 acc | top5 acc | ckpt | json | log | +| :----------------------------------------------------------: | -------- | ---------- | -------- | ----- | -------------- | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_r50_8x8x1_256e_minikinetics_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 78.6 | 93.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030-168eb098.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 80.8 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030-7da6dfc3.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 81.3 | 95.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030-c36616e9.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.4 | 95.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030-e2890e8d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 80.3 | 94.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030-62974bac.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.9 | 95.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030-284cfd3b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log) | We also list the benchmark in the original paper which run on Kinetics-400 for comparison: diff --git a/configs/recognition/omnisource/README_zh-CN.md b/configs/recognition/omnisource/README_zh-CN.md index 104efaa8d4..4341870945 100644 --- a/configs/recognition/omnisource/README_zh-CN.md +++ b/configs/recognition/omnisource/README_zh-CN.md @@ -16,8 +16,8 @@ MMAction2 当前公开了 4 个 OmniSource 框架训练的模型,包含 2D 架 | :------: | :--: | :------: | :-------: | :--: | :------------: | :-----------------------------------------: | :------------------------------------------: | :----------------------------------------------------------: | | TSN | RGB | ImageNet | ResNet50 | 3seg | 340x256 | 70.6 / 73.6 (+ 3.0) | 89.4 / 91.0 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | | TSN | RGB | IG-1B | ResNet50 | 3seg | short-side 320 | 73.1 / 75.7 (+ 2.6) | 90.4 / 91.9 (+ 1.5) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | -| SlowOnly | RGB | Scratch | ResNet50 | 4x16 | short-side 320 | 72.9 / 76.8 (+ 3.9) | 90.9 / 92.5 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | -| SlowOnly | RGB | Scratch | ResNet101 | 8x8 | short-side 320 | 76.5 / 80.4 (+ 3.9) | 92.7 / 94.4 (+ 1.7) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | +| SlowOnly | RGB | None | ResNet50 | 4x16 | short-side 320 | 72.9 / 76.8 (+ 3.9) | 90.9 / 92.5 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | +| SlowOnly | RGB | None | ResNet101 | 8x8 | short-side 320 | 76.5 / 80.4 (+ 3.9) | 92.7 / 94.4 (+ 1.7) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | ## Mini-Kinetics 上的基准测试 @@ -27,25 +27,25 @@ MMAction2 在公开的数据集上进行了 OmniSource 框架的基准测试, ### TSN-8seg-ResNet50 -| Setting | Top-1 | Top-5 | ckpt | json | log | -| :----------: | :---: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| Baseline | 77.4 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030-b4eaf92b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log) | -| +GG-img | 78.0 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030-23966b4b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log) | -| +[GG-IG]-img | 78.6 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030-66f5e046.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log) | -| +IG-vid | 80.6 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030-011f984d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log) | -| +KRaw | 78.6 | 93.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030-59f5d064.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log) | -| OmniSource | 81.3 | 94.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030-0f56ef51.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log) | +| 模型 | 模态 | 预训练 | 主干网络 | 输入 | 分辨率 | Top-1 准确率 | Top-5 准确率 | ckpt | json | log | +| :----------------------------------------------------------: | :--: | :------: | :------: | :--: | :------------: | :----------: | :----------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsn_r50_1x1x8_100e_minikinetics_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 77.4 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030-b4eaf92b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.0 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030-23966b4b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_webimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030-66f5e046.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 80.6 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030-011f984d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030-59f5d064.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 81.3 | 94.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030-0f56ef51.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log) | ### SlowOnly-8x8-ResNet50 -| Setting | Top-1 | Top-5 | ckpt | json | log | -| :----------: | :---: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| Baseline | 78.6 | 93.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030-168eb098.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log) | -| +GG-img | 80.8 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030-7da6dfc3.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log) | -| +[GG-IG]-img | 81.3 | 95.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030-c36616e9.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log) | -| +IG-vid | 82.4 | 95.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030-e2890e8d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log) | -| +KRaw | 80.3 | 94.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030-62974bac.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log) | -| OmniSource | 82.9 | 95.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030-284cfd3b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log) | +| 模型 | 模态 | 预训练 | 主干网络 | 输入 | 分辨率 | Top-1 准确率 | Top-5 准确率 | ckpt | json | log | +| :----------------------------------------------------------: | :--: | :----: | :------: | :--: | :------------: | :----------: | :----------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_r50_8x8x1_256e_minikinetics_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 78.6 | 93.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030-168eb098.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 80.8 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030-7da6dfc3.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 81.3 | 95.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030-c36616e9.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.4 | 95.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030-e2890e8d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 80.3 | 94.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030-62974bac.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.9 | 95.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030-284cfd3b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log) | 下表列出了原论文中在 Kinetics-400 上进行基准测试的结果供参考: diff --git a/configs/recognition/omnisource/metafile.yml b/configs/recognition/omnisource/metafile.yml new file mode 100644 index 0000000000..e8b6cf9758 --- /dev/null +++ b/configs/recognition/omnisource/metafile.yml @@ -0,0 +1,362 @@ +Collections: +- Metadata: + Training Data: null + Name: OmniSource + README: configs/recognition/omnisource/README.md +Models: +- Config: configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 100 + Input: 3seg + Modality: RGB + Parameters: 23917832 + Pretrained: ImageNet + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: tsn_r50_1x1x8_100e_minikinetics_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 77.4 + top5 acc: 93.6 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030-b4eaf92b.pth +- Config: configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 100 + Input: 3seg + Modality: RGB + Parameters: 23917832 + Pretrained: ImageNet + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 78.0 + top5 acc: 93.6 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030-23966b4b.pth +- Config: configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 100 + Input: 3seg + Modality: RGB + Parameters: 23917832 + Pretrained: ImageNet + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: tsn_r50_1x1x8_100e_minikinetics_webimage_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 78.6 + top5 acc: 93.6 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030-66f5e046.pth +- Config: configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 100 + Input: 3seg + Modality: RGB + Parameters: 23917832 + Pretrained: ImageNet + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 80.6 + top5 acc: 95.0 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030-011f984d.pth +- Config: configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 100 + Input: 3seg + Modality: RGB + Parameters: 23917832 + Pretrained: ImageNet + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 78.6 + top5 acc: 93.2 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030-59f5d064.pth +- Config: configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 100 + Input: 3seg + Modality: RGB + Parameters: 23917832 + Pretrained: ImageNet + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 81.3 + top5 acc: 94.8 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030-0f56ef51.pth +- Config: configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 256 + Input: 8x8 + Modality: RGB + Parameters: 32044296 + Pretrained: None + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: slowonly_r50_8x8x1_256e_minikinetics_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 78.6 + top5 acc: 93.9 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030-168eb098.pth +- Config: configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 256 + Input: 8x8 + Modality: RGB + Parameters: 32044296 + Pretrained: None + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 80.8 + top5 acc: 95.0 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030-7da6dfc3.pth +- Config: configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 256 + Input: 8x8 + Modality: RGB + Parameters: 32044296 + Pretrained: None + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 81.3 + top5 acc: 95.2 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030-c36616e9.pth +- Config: configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 256 + Input: 8x8 + Modality: RGB + Parameters: 32044296 + Pretrained: None + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 82.4 + top5 acc: 95.6 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030-e2890e8d.pth +- Config: configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 256 + Input: 8x8 + Modality: RGB + Parameters: 32044296 + Pretrained: None + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 80.3 + top5 acc: 94.5 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030-62974bac.pth +- Config: configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 256 + Input: 8x8 + Modality: RGB + Parameters: 32044296 + Pretrained: None + Resolution: short-side 320 + Training BatchSize / GPU: 12 + Training Data: MiniKinetics + Modality: RGB + Name: slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb + Results: + - Dataset: MiniKinetics + Metrics: + top1 acc: 82.9 + top5 acc: 95.8 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json + Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log + Weights: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030-284cfd3b.pth +- Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: tsn_omnisource_r50_1x1x3_100e_kinetics_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.6 + top5 acc: 91.0 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth + gpu_mem(M): '8344' + inference_time(video/s): x +- Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + pretrain: IG-1B + resolution: short-side 320 + Modality: RGB + Name: tsn_IG1B_pretrained_omnisource_r50_1x1x3_100e_kinetics_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 75.7 + top5 acc: 91.9 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth + gpu_mem(M): '8344' + inference_time(video/s): x +- Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 32454096 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: slowonly_r50_omnisource_4x16x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 76.8 + top5 acc: 92.5 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth +- Config: configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py + In Collection: OmniSource + Metadata: + Architecture: ResNet101 + Epochs: 196 + Parameters: 60359120 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: slowonly_r101_omnisource_8x8x1_196e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 80.4 + top5 acc: 94.4 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth diff --git a/configs/recognition/r2plus1d/metafile.yml b/configs/recognition/r2plus1d/metafile.yml new file mode 100644 index 0000000000..d6f7fdcb81 --- /dev/null +++ b/configs/recognition/r2plus1d/metafile.yml @@ -0,0 +1,102 @@ +Collections: +- Metadata: + Training Data: null + Name: R2Plus1D + README: configs/recognition/r2plus1d/README.md +Models: +- Config: configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py + In Collection: R2Plus1D + Metadata: + Architecture: ResNet34 + Epochs: 180 + Parameters: 63759281 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: r2plus1d_r34_8x8x1_180e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 67.3 + top5 acc: 87.65 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log + Weights: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb_20200729-aa94765e.pth + gpu_mem(M): '5019' + inference_time(video/s): x +- Config: configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py + In Collection: R2Plus1D + Metadata: + Architecture: ResNet34 + Epochs: 180 + Parameters: 63759281 + Training BatchSize / GPU: 16 + Training Data: Kinetics-400 + gpus: 8 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 67.3 + top5 acc: 87.8 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log + Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log.json + Weights: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb_20200826-ab35a529.pth + gpu_mem(M): '5019' + inference_time(video/s): x +- Config: configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py + In Collection: R2Plus1D + Metadata: + Architecture: ResNet34 + Epochs: 180 + Parameters: 63759281 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 16 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: r2plus1d_r34_8x8x1_180e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 68.68 + top5 acc: 88.36 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log + Weights: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8x1_180e_kinetics400_rgb_20200618-3fce5629.pth + gpu_mem(M): '5019' + inference_time(video/s): 1.6 (80x3 frames) +- Config: configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py + In Collection: R2Plus1D + Metadata: + Architecture: ResNet34 + Epochs: 180 + Parameters: 63759281 + Training BatchSize / GPU: 6 + Training Data: Kinetics-400 + gpus: 16 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: r2plus1d_r34_32x2x1_180e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 74.6 + top5 acc: 91.59 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log + Weights: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2x1_180e_kinetics400_rgb_20200618-63462eb3.pth + gpu_mem(M): '12975' + inference_time(video/s): 0.5 (320x3 frames) diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml new file mode 100644 index 0000000000..dc32dac835 --- /dev/null +++ b/configs/recognition/slowfast/metafile.yml @@ -0,0 +1,198 @@ +Collections: +- Metadata: + Training Data: null + Name: SlowFast + README: configs/recognition/slowfast/README.md +Models: +- Config: configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py + In Collection: SlowFast + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 34479288 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: slowfast_r50_4x16x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 74.75 + top5 acc: 91.73 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth + gpu_mem(M): '6203' + inference_time(video/s): x +- Config: configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py + In Collection: SlowFast + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 34479288 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: slowfast_r50_video_4x16x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 74.34 + top5 acc: 91.58 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth + gpu_mem(M): '6203' + inference_time(video/s): x +- Config: configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py + In Collection: SlowFast + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 34479288 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 24 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: slowfast_r50_4x16x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 75.64 + top5 acc: 92.3 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20200704-bcde7ed7.pth + gpu_mem(M): '6203' + inference_time(video/s): 1.6 ((32+4)x10x3 frames) +- Config: configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py + In Collection: SlowFast + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 34565560 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: slowfast_r50_8x8x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 75.61 + top5 acc: 92.34 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth + gpu_mem(M): '9062' + inference_time(video/s): x +- Config: configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py + In Collection: SlowFast + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 34565560 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 24 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: slowfast_r50_8x8x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 76.94 + top5 acc: 92.8 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth + gpu_mem(M): '9062' + inference_time(video/s): 1.3 ((32+8)x10x3 frames) +- Config: configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py + In Collection: SlowFast + Metadata: + Architecture: ResNet101 + ResNet50 + Epochs: 256 + Parameters: 62384312 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: slowfast_r101_r50_4x16x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 76.69 + top5 acc: 93.07 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth + gpu_mem(M): '16628' + inference_time(video/s): '' +- Config: configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py + In Collection: SlowFast + Metadata: + Architecture: ResNet101 + Epochs: 256 + Parameters: 62912312 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: slowfast_r101_8x8x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 77.9 + top5 acc: 93.51 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth + gpu_mem(M): '25994' + inference_time(video/s): '' +- Config: configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py + In Collection: SlowFast + Metadata: + Architecture: ResNet152 + ResNet50 + Epochs: 256 + Parameters: 84843704 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: slowfast_r152_r50_4x16x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 77.13 + top5 acc: 93.2 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth + gpu_mem(M): '10077' + inference_time(video/s): '' diff --git a/configs/recognition/slowonly/metafile.yml b/configs/recognition/slowonly/metafile.yml new file mode 100644 index 0000000000..d2ca9732b8 --- /dev/null +++ b/configs/recognition/slowonly/metafile.yml @@ -0,0 +1,415 @@ +Collections: +- Metadata: + Training Data: null + Name: SlowOnly + README: configs/recognition/slowonly/README.md +Models: +- Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 32454096 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: slowonly_r50_omnisource_4x16x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 76.8 + top5 acc: 92.5 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth +- Config: configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet101 + Epochs: 196 + Parameters: 60359120 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: slowonly_r101_8x8x1_196e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 76.5 + top5 acc: 92.7 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth +- Config: configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet101 + Epochs: 196 + Parameters: 60359120 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: slowonly_r101_omnisource_8x8x1_196e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 80.4 + top5 acc: 94.4 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth +- Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 32454096 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: slowonly_r50_4x16x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.76 + top5 acc: 90.51 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth + gpu_mem(M): '3168' + inference_time(video/s): x +- Config: configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 32454096 + Training BatchSize / GPU: 24 + Training Data: Kinetics-400 + gpus: 16 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: slowonly_r50_video_4x16x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.9 + top5 acc: 90.82 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth + gpu_mem(M): '8472' + inference_time(video/s): x +- Config: configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 32454096 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: slowonly_r50_8x8x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 74.42 + top5 acc: 91.49 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb_20200820-75851a7d.pth + gpu_mem(M): '5820' + inference_time(video/s): x +- Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 32454096 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 16 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: slowonly_r50_4x16x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.02 + top5 acc: 90.77 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth + gpu_mem(M): '3168' + inference_time(video/s): 4.0 (40x3 frames) +- Config: configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 32454096 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 24 + pretrain: None + resolution: short-side 320 + Modality: RGB + Name: slowonly_r50_8x8x1_256e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 74.93 + top5 acc: 91.92 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8x1_256e_kinetics400_rgb_20200703-a79c555a.pth + gpu_mem(M): '5820' + inference_time(video/s): 2.3 (80x3 frames) +- Config: configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 150 + Parameters: 32454096 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 16 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.39 + top5 acc: 91.12 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912-1e8fc736.pth + gpu_mem(M): '3168' + inference_time(video/s): x +- Config: configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 150 + Parameters: 32454096 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 75.55 + top5 acc: 92.04 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912-3f9ce182.pth + gpu_mem(M): '5820' + inference_time(video/s): x +- Config: configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 150 + Parameters: 39808464 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 16 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 74.54 + top5 acc: 91.73 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb_20210308-0d6e5a69.pth + gpu_mem(M): '4435' + inference_time(video/s): x +- Config: configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 150 + Parameters: 39808464 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 76.07 + top5 acc: 92.42 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb_20210308-e8dd9e82.pth + gpu_mem(M): '8895' + inference_time(video/s): x +- Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 32450960 + Training BatchSize / GPU: 24 + Training Data: Kinetics-400 + gpus: 16 + pretrain: ImageNet + resolution: short-side 320 + Modality: Flow + Name: slowonly_r50_4x16x1_256e_kinetics400_flow + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 61.79 + top5 acc: 83.62 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_20200704-decb8568.pth + gpu_mem(M): '8450' + inference_time(video/s): x +- Config: configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 196 + Parameters: 32450960 + Training BatchSize / GPU: 12 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 320 + Modality: Flow + Name: slowonly_r50_8x8x1_196e_kinetics400_flow + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 65.76 + top5 acc: 86.25 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_256e_kinetics400_flow_20200704-6b384243.pth + gpu_mem(M): '8455' + inference_time(video/s): x +- Config: configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 32863896 + Training BatchSize / GPU: 12 + Training Data: Kinetics-600 + gpus: 32 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: slowonly_r50_video_8x8x1_256e_kinetics600_rgb + Results: + - Dataset: Kinetics-600 + Metrics: + top1 acc: 77.5 + top5 acc: 93.7 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015-81e5153e.pth +- Config: configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 256 + Parameters: 33068796 + Training BatchSize / GPU: 12 + Training Data: Kinetics-700 + gpus: 32 + pretrain: None + resolution: short-side 256 + Modality: RGB + Name: slowonly_r50_video_8x8x1_256e_kinetics700_rgb + Results: + - Dataset: Kinetics-700 + Metrics: + top1 acc: 65.0 + top5 acc: 86.1 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015-9250f662.pth +- Config: configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 120 + Parameters: 32454096 + Training BatchSize / GPU: 24 + Training Data: GYM99 + gpus: 16 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb + Results: + - Dataset: GYM99 + Metrics: + mean class acc: 70.2 + top1 acc: 79.3 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111-a9c34b54.pth +- Config: configs/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 120 + Parameters: 32450960 + Training BatchSize / GPU: 24 + Training Data: GYM99 + gpus: 16 + pretrain: Kinetics + resolution: short-side 256 + Modality: Flow + Name: slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow + Results: + - Dataset: GYM99 + Metrics: + mean class acc: 71.0 + top1 acc: 80.3 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth diff --git a/configs/recognition/tanet/metafile.yml b/configs/recognition/tanet/metafile.yml new file mode 100644 index 0000000000..7f991f2ffc --- /dev/null +++ b/configs/recognition/tanet/metafile.yml @@ -0,0 +1,30 @@ +Collections: +- Metadata: + Training Data: null + Name: TANet + README: configs/recognition/tanet/README.md +Models: +- Config: configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py + In Collection: TANet + Metadata: + Architecture: TANet + Epochs: 100 + Parameters: 25590320 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tanet_r50_dense_1x1x8_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 76.28 + top5 acc: 92.6 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log + Weights: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth + gpu_mem(M): '7124' + inference_time(video/s): x diff --git a/configs/recognition/tin/metafile.yml b/configs/recognition/tin/metafile.yml new file mode 100644 index 0000000000..e765a40046 --- /dev/null +++ b/configs/recognition/tin/metafile.yml @@ -0,0 +1,75 @@ +Collections: +- Metadata: + Training Data: null + Name: TIN + README: configs/recognition/tin/README.md +Models: +- Config: configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py + In Collection: TIN + Metadata: + Architecture: ResNet50 + Epochs: 40 + Parameters: 23895566 + Training BatchSize / GPU: 6 + Training Data: SthV1 + gpus: 32 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tin_r50_1x1x8_40e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc: 44.25 + top5 acc: 73.94 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log + Weights: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/tin_r50_1x1x8_40e_sthv1_rgb_20200729-4a33db86.pth + gpu_mem(M): '6181' +- Config: configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py + In Collection: TIN + Metadata: + Architecture: ResNet50 + Epochs: 40 + Parameters: 23895566 + Training BatchSize / GPU: 6 + Training Data: SthV2 + gpus: 32 + pretrain: ImageNet + resolution: height 240 + Modality: RGB + Name: tin_r50_1x1x8_40e_sthv2_rgb + Results: + - Dataset: SthV2 + Metrics: + top1 acc: 56.7 + top5 acc: 83.62 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log + Weights: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/tin_r50_1x1x8_40e_sthv2_rgb_20200912-b27a7337.pth + gpu_mem(M): '6185' +- Config: configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py + In Collection: TIN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 24358640 + Training BatchSize / GPU: 6 + Training Data: Kinetics-400 + gpus: 32 + pretrain: TSM-Kinetics400 + resolution: short-side 256 + Modality: RGB + Name: tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.89 + top5 acc: 89.89 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log + Weights: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb_20200810-4a146a70.pth + gpu_mem(M): '6187' diff --git a/configs/recognition/tpn/metafile.yml b/configs/recognition/tpn/metafile.yml new file mode 100644 index 0000000000..ca0655ff49 --- /dev/null +++ b/configs/recognition/tpn/metafile.yml @@ -0,0 +1,77 @@ +Collections: +- Metadata: + Training Data: null + Name: TPN + README: configs/recognition/tpn/README.md +Models: +- Config: configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py + In Collection: TPN + Metadata: + Architecture: ResNet50 + Epochs: 150 + Parameters: 91498336 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tpn_slowonly_r50_8x8x1_150e_kinetics_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.1 + top5 acc: 91.03 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log + Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb_20200910-b796d7a0.pth + gpu_mem(M): '6916' + inference_time(video/s): x +- Config: configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py + In Collection: TPN + Metadata: + Architecture: ResNet50 + Epochs: 150 + Parameters: 91498336 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 76.2 + top5 acc: 92.44 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log + Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb_20200923-52629684.pth + gpu_mem(M): '6916' + inference_time(video/s): x +- Config: configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py + In Collection: TPN + Metadata: + Architecture: ResNet50 + Epochs: 150 + Parameters: 82445724 + Training BatchSize / GPU: 8 + Training Data: SthV1 + gpus: 48 + pretrain: TSM + resolution: height 100 + Modality: RGB + Name: tpn_tsm_r50_1x1x8_150e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc: 50.8 + top5 acc: 79.05 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log + Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20210311-28de4cd5.pth + gpu_mem(M): '8828' diff --git a/configs/recognition/trn/metafile.yml b/configs/recognition/trn/metafile.yml new file mode 100644 index 0000000000..c4e5586939 --- /dev/null +++ b/configs/recognition/trn/metafile.yml @@ -0,0 +1,56 @@ +Collections: +- Metadata: + Training Data: null + Name: TRN + README: configs/recognition/trn/README.md +Models: +- Config: configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py + In Collection: TRN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 26641154 + Training BatchSize / GPU: 16 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: trn_r50_1x1x8_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 33.88 + top1 acc (efficient): 31.62 + top5 acc (accurate): 62.12 + top5 acc (efficient): 60.01 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log + Weights: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth + gpu_mem(M): '11010' +- Config: configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py + In Collection: TRN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 26641154 + Training BatchSize / GPU: 16 + Training Data: SthV2 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: trn_r50_1x1x8_50e_sthv2_rgb + Results: + - Dataset: SthV2 + Metrics: + top1 acc (accurate): 47.96 + top1 acc (efficient): 45.14 + top5 acc (accurate): 75.97 + top5 acc (efficient): 73.21 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log + Weights: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210401-773eca7b.pth + gpu_mem(M): '11010' diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml new file mode 100644 index 0000000000..6c099238aa --- /dev/null +++ b/configs/recognition/tsm/metafile.yml @@ -0,0 +1,687 @@ +Collections: +- Metadata: + Training Data: null + Name: TSM + README: configs/recognition/tsm/README.md +Models: +- Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 24327632 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: tsm_r50_1x1x8_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.24 + top5 acc: 89.56 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth + gpu_mem(M): '7079' + inference_time(video/s): 74.0 (8x1 frames) +- Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 24327632 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsm_r50_1x1x8_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.59 + top5 acc: 89.52 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth + gpu_mem(M): '7079' + inference_time(video/s): x +- Config: configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 24327632 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.48 + top5 acc: 89.4 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth + gpu_mem(M): '7076' + inference_time(video/s): x +- Config: configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 24327632 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsm_r50_video_1x1x8_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.25 + top5 acc: 89.66 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth + gpu_mem(M): '7077' + inference_time(video/s): 74.0 (8x1 frames) +- Config: configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: tsm_r50_dense_1x1x8_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.9 + top5 acc: 90.44 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20200626-91a54551.pth + gpu_mem(M): '7079' + inference_time(video/s): 11.5 (8x10 frames) +- Config: configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsm_r50_dense_1x1x8_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.38 + top5 acc: 91.02 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb_20200727-e1e0c785.pth + gpu_mem(M): '7079' + inference_time(video/s): x +- Config: configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 24327632 + Training BatchSize / GPU: 6 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: tsm_r50_1x1x16_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.09 + top5 acc: 90.37 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth + gpu_mem(M): '10404' + inference_time(video/s): 47.0 (16x1 frames) +- Config: configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 24327632 + Training BatchSize / GPU: 6 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsm_r50_1x1x16_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 71.89 + top5 acc: 90.73 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth + gpu_mem(M): '10398' + inference_time(video/s): x +- Config: configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 31682000 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.03 + top5 acc: 90.25 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth + gpu_mem(M): '8931' + inference_time(video/s): x +- Config: configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 28007888 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.7 + top5 acc: 89.9 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth + gpu_mem(M): '10125' + inference_time(video/s): x +- Config: configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 31682000 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 71.6 + top5 acc: 90.34 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth + gpu_mem(M): '8358' + inference_time(video/s): x +- Config: configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: MobileNetV2 + Epochs: 100 + Parameters: 2736272 + Training BatchSize / GPU: 8 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 68.46 + top5 acc: 88.64 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth + gpu_mem(M): '3385' + inference_time(video/s): x +- Config: configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23606384 + Training BatchSize / GPU: 8 + Training Data: Diving48 + gpus: 8 + pretrain: ImageNet + Modality: RGB + Name: tsm_r50_video_1x1x8_50e_diving48_rgb + Results: + - Dataset: Diving48 + Metrics: + top1 acc: 75.99 + top5 acc: 97.16 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth + gpu_mem(M): '7070' +- Config: configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23606384 + Training BatchSize / GPU: 4 + Training Data: Diving48 + gpus: 8 + pretrain: ImageNet + Modality: RGB + Name: tsm_r50_video_1x1x16_50e_diving48_rgb + Results: + - Dataset: Diving48 + Metrics: + top1 acc: 81.62 + top5 acc: 97.66 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth + gpu_mem(M): '7070' +- Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 8 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tsm_r50_1x1x8_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 47.7 + top1 acc (efficient): 45.58 + top5 acc (accurate): 76.12 + top5 acc (efficient): 75.02 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth + gpu_mem(M): '7077' + reference top1 acc (efficient/accurate): '[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 8 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tsm_r50_flip_1x1x8_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 48.51 + top1 acc (efficient): 47.1 + top5 acc (accurate): 77.56 + top5 acc (efficient): 76.02 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth + gpu_mem(M): '7077' + reference top1 acc (efficient/accurate): '[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 8 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tsm_r50_randaugment_1x1x8_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 48.9 + top1 acc (efficient): 47.16 + top5 acc (accurate): 77.92 + top5 acc (efficient): 76.07 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb_20210324-481268d9.pth + gpu_mem(M): '7077' + reference top1 acc (efficient/accurate): '[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 8 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 50.31 + top1 acc (efficient): 47.85 + top5 acc (accurate): 78.18 + top5 acc (efficient): 76.78 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb_20210324-76937692.pth + gpu_mem(M): '7077' + reference top1 acc (efficient/accurate): '[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 6 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tsm_r50_1x1x16_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 49.28 + top1 acc (efficient): 47.62 + top5 acc (accurate): 77.82 + top5 acc (efficient): 76.63 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20201010-17fa49f6.pth + gpu_mem(M): '10390' + reference top1 acc (efficient/accurate): '[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 42856686 + Training BatchSize / GPU: 8 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tsm_r101_1x1x8_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 48.43 + top1 acc (efficient): 45.72 + top5 acc (accurate): 76.72 + top5 acc (efficient): 74.67 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth + gpu_mem(M): '9800' + reference top1 acc (efficient/accurate): '[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 6 + Training Data: SthV2 + gpus: 8 + pretrain: ImageNet + resolution: height 240 + Modality: RGB + Name: tsm_r50_1x1x8_50e_sthv2_rgb + Results: + - Dataset: SthV2 + Metrics: + top1 acc (accurate): 61.12 + top1 acc (efficient): 57.86 + top5 acc (accurate): 86.26 + top5 acc (efficient): 84.67 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_1x1x8_50e_sthv2_rgb_20200912-033c4ac6.pth + gpu_mem(M): '7069' + reference top1 acc (efficient/accurate): '[57.98 / 60.69](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[84.57 / 86.28](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 6 + Training Data: SthV2 + gpus: 8 + pretrain: ImageNet + resolution: height 256 + Modality: RGB + Name: tsm_r50_1x1x8_50e_sthv2_rgb + Results: + - Dataset: SthV2 + Metrics: + top1 acc (accurate): 63.84 + top1 acc (efficient): 60.79 + top5 acc (accurate): 88.3 + top5 acc (efficient): 86.6 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth + gpu_mem(M): '7069' + reference top1 acc (efficient/accurate): '[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 6 + Training Data: SthV2 + gpus: 8 + pretrain: ImageNet + resolution: height 240 + Modality: RGB + Name: tsm_r50_1x1x16_50e_sthv2_rgb + Results: + - Dataset: SthV2 + Metrics: + top1 acc (accurate): 62.04 + top1 acc (efficient): 59.93 + top5 acc (accurate): 87.35 + top5 acc (efficient): 86.1 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_1x1x16_50e_sthv2_rgb_20201010-16469c6f.pth + gpu_mem(M): '10400' + reference top1 acc (efficient/accurate): '[58.90 / 60.98](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[85.29 / 86.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 6 + Training Data: SthV2 + gpus: 8 + pretrain: ImageNet + resolution: height 256 + Modality: RGB + Name: tsm_r50_1x1x16_50e_sthv2_rgb + Results: + - Dataset: SthV2 + Metrics: + top1 acc (accurate): 63.19 + top1 acc (efficient): 61.06 + top5 acc (accurate): 87.93 + top5 acc (efficient): 86.66 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth + gpu_mem(M): '10400' + reference top1 acc (efficient/accurate): '[xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet101 + Epochs: 50 + Parameters: 42856686 + Training BatchSize / GPU: 8 + Training Data: SthV2 + gpus: 8 + pretrain: ImageNet + resolution: height 240 + Modality: RGB + Name: tsm_r101_1x1x8_50e_sthv2_rgb + Results: + - Dataset: SthV2 + Metrics: + top1 acc (accurate): 61.51 + top1 acc (efficient): 58.59 + top5 acc (accurate): 86.9 + top5 acc (efficient): 85.07 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth + gpu_mem(M): '9784' + reference top1 acc (efficient/accurate): '[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' +- Config: configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 8 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tsm_r50_mixup_1x1x8_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 48.49 + top1 acc (efficient): 46.35 + top5 acc (accurate): 76.88 + top5 acc (efficient): 75.07 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb-9eca48e5.pth + delta top1 acc (efficient/accurate): +0.77 / +0.79 + delta top5 acc (efficient/accurate): +0.05 / +0.70 +- Config: configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 8 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tsm_r50_cutmix_1x1x8_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 47.46 + top1 acc (efficient): 45.92 + top5 acc (accurate): 76.71 + top5 acc (efficient): 75.23 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb-34934615.pth + delta top1 acc (efficient/accurate): +0.34 / -0.24 + delta top5 acc (efficient/accurate): +0.21 / +0.59 diff --git a/configs/recognition/tsn/metafile.yml b/configs/recognition/tsn/metafile.yml new file mode 100644 index 0000000000..0c0a00aac1 --- /dev/null +++ b/configs/recognition/tsn/metafile.yml @@ -0,0 +1,942 @@ +Collections: +- Metadata: + Training Data: null + Name: TSN + README: configs/recognition/tsn/README.md +Models: +- Config: configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 75 + Parameters: 23714981 + Training BatchSize / GPU: 32 + Training Data: UCF101 + gpus: 8 + pretrain: ImageNet + Modality: RGB + Name: tsn_r50_1x1x3_75e_ucf101_rgb + Results: + - Dataset: UCF101 + Metrics: + top1 acc: 83.03 + top5 acc: 96.78 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023-d85ab600.pth + config: '[tsn_r50_1x1x3_75e_ucf101_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py)' + gpu_mem(M): '8332' +- Config: configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 23606384 + Training BatchSize / GPU: 8 + Training Data: Diving48 + gpus: 8 + pretrain: ImageNet + Modality: RGB + Name: tsn_r50_video_1x1x8_100e_diving48_rgb + Results: + - Dataset: Diving48 + Metrics: + top1 acc: 71.27 + top5 acc: 95.74 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/tsn_r50_video_1x1x8_100e_diving48_rgb_20210426-6dde0185.pth + gpu_mem(M): '5699' +- Config: configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 23606384 + Training BatchSize / GPU: 4 + Training Data: Diving48 + gpus: 8 + pretrain: ImageNet + Modality: RGB + Name: tsn_r50_video_1x1x16_100e_diving48_rgb + Results: + - Dataset: Diving48 + Metrics: + top1 acc: 76.75 + top5 acc: 96.95 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/tsn_r50_video_1x1x16_100e_diving48_rgb_20210426-63c5f2f7.pth + gpu_mem(M): '5705' +- Config: configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23612531 + Training BatchSize / GPU: 32 + Training Data: HMDB51 + gpus: 8 + pretrain: ImageNet + Modality: RGB + Name: tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb + Results: + - Dataset: HMDB51 + Metrics: + top1 acc: 48.95 + top5 acc: 80.19 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb_20201123-ce6c27ed.pth + gpu_mem(M): '21535' +- Config: configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23612531 + Training BatchSize / GPU: 32 + Training Data: HMDB51 + gpus: 8 + pretrain: Kinetics400 + Modality: RGB + Name: tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb + Results: + - Dataset: HMDB51 + Metrics: + top1 acc: 56.08 + top5 acc: 84.31 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb_20201123-7f84701b.pth + gpu_mem(M): '21535' +- Config: configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23612531 + Training Data: HMDB51 + gpus: 8 + pretrain: Moments + Modality: RGB + Name: tsn_r50_1x1x8_50e_hmdb51_mit_rgb + Results: + - Dataset: HMDB51 + Metrics: + top1 acc: 54.25 + top5 acc: 83.86 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/tsn_r50_1x1x8_50e_hmdb51_mit_rgb_20201123-01526d41.pth + gpu_mem(M): '21535' +- Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: tsn_r50_1x1x3_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.6 + top5 acc: 89.26 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth + gpu_mem(M): '8344' + inference_time(video/s): 4.3 (25x10 frames) +- Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r50_1x1x3_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.42 + top5 acc: 89.03 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth + gpu_mem(M): '8343' + inference_time(video/s): x +- Config: configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 16 + Training Data: Kinetics-400 + gpus: 24 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: tsn_r50_dense_1x1x5_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.18 + top5 acc: 89.1 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/tsn_r50_dense_1x1x5_100e_kinetics400_rgb_20200627-a063165f.pth + gpu_mem(M): '7028' + inference_time(video/s): 12.7 (8x10 frames) +- Config: configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + gpus: 16 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tsn_r50_320p_1x1x3_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.91 + top5 acc: 89.51 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth + gpu_mem(M): '8344' + inference_time(video/s): 10.7 (25x3 frames) +- Config: configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 110 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + gpus: 16 + pretrain: ImageNet + resolution: short-side 320 + Modality: Flow + Name: tsn_r50_320p_1x1x3_110e_kinetics400_flow + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 55.7 + top5 acc: 79.85 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_320p_1x1x3_110e_kinetics400_flow_20200705-3036bab6.pth + gpu_mem(M): '8471' + inference_time(video/s): x +- Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 12 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r50_1x1x8_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 71.8 + top5 acc: 90.17 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/tsn_r50_256p_1x1x8_100e_kinetics400_rgb_20200817-883baf16.pth + gpu_mem(M): '8343' + inference_time(video/s): x +- Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 12 + Training Data: Kinetics-400 + gpus: 24 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tsn_r50_320p_1x1x8_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.41 + top5 acc: 90.55 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_320p_1x1x8_100e_kinetics400_rgb_20200702-ef80e3d7.pth + gpu_mem(M): '8344' + inference_time(video/s): 11.1 (25x3 frames) +- Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 110 + Parameters: 24327632 + Training BatchSize / GPU: 12 + Training Data: Kinetics-400 + gpus: 32 + pretrain: ImageNet + resolution: short-side 320 + Modality: Flow + Name: tsn_r50_320p_1x1x8_110e_kinetics400_flow + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 57.76 + top5 acc: 80.99 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_320p_1x1x8_110e_kinetics400_flow_20200705-1f39486b.pth + gpu_mem(M): '8473' + inference_time(video/s): x +- Config: configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 71.11 + top5 acc: 90.04 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014-5ae1ee79.pth + gpu_mem(M): '8343' + inference_time(video/s): x +- Config: configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 12 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: tsn_r50_dense_1x1x8_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.77 + top5 acc: 89.3 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_dense_1x1x8_100e_kinetics400_rgb_20200606-e925e6e3.pth + gpu_mem(M): '8344' + inference_time(video/s): 12.2 (8x10 frames) +- Config: configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r50_video_1x1x8_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 71.79 + top5 acc: 90.25 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth + gpu_mem(M): '21558' + inference_time(video/s): x +- Config: configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 70.4 + top5 acc: 89.12 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb_20200703-0f19175f.pth + gpu_mem(M): '21553' + inference_time(video/s): x +- Config: configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] + Epochs: 100 + Parameters: 42948304 + Training BatchSize / GPU: 16 + Training Data: Kinetics-400 + gpus: 16 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.43 + top5 acc: 91.01 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth +- Config: configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNeXt101-32x4d [[TorchVision](https://github.com/pytorch/vision/)] + Epochs: 100 + Parameters: 27355600 + Training BatchSize / GPU: 12 + Training Data: Kinetics-400 + gpus: 16 + pretrain: ImageNet + resolution: short-side 320 + Modality: RGB + Name: tsn_dense161_320p_1x1x3_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 72.78 + top5 acc: 90.75 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth +- Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + pretrain: ImageNet + resolution: 340x256 + Modality: RGB + Name: tsn_omnisource_r50_1x1x3_100e_kinetics_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.6 + top5 acc: 91.0 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth + gpu_mem(M): '8344' + inference_time(video/s): x +- Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + pretrain: IG-1B + resolution: short-side 320 + Modality: RGB + Name: tsn_IG1B_pretrained_r50_1x1x3_100e_kinetics_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 73.1 + top5 acc: 90.4 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth + gpu_mem(M): '8344' + inference_time(video/s): x +- Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24327632 + Training BatchSize / GPU: 32 + Training Data: Kinetics-400 + pretrain: IG-1B + resolution: short-side 320 + Modality: RGB + Name: tsn_IG1B_pretrained_omnisource_r50_1x1x3_100e_kinetics_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 75.7 + top5 acc: 91.9 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth + gpu_mem(M): '8344' + inference_time(video/s): x +- Config: configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24737432 + Training BatchSize / GPU: 12 + Training Data: Kinetics-600 + gpus: 16 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r50_video_1x1x8_100e_kinetics600_rgb + Results: + - Dataset: Kinetics-600 + Metrics: + top1 acc: 74.8 + top5 acc: 92.3 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015-4db3c461.pth + gpu_mem(M): '8344' + inference_time(video/s): 11.1 (25x3 frames) +- Config: configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24942332 + Training BatchSize / GPU: 12 + Training Data: Kinetics-700 + gpus: 16 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r50_video_1x1x8_100e_kinetics700_rgb + Results: + - Dataset: Kinetics-700 + Metrics: + top1 acc: 61.7 + top5 acc: 83.6 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015-e381a6c7.pth + gpu_mem(M): '8344' + inference_time(video/s): 11.1 (25x3 frames) +- Config: configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 16 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tsn_r50_1x1x8_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc: 18.55 + top5 acc: 44.8 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_1x1x8_50e_sthv1_rgb_20200618-061b9195.pth + gpu_mem(M): '10978' +- Config: configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 4 + Training Data: SthV1 + gpus: 8 + pretrain: ImageNet + resolution: height 100 + Modality: RGB + Name: tsn_r50_1x1x16_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc: 15.77 + top5 acc: 39.85 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/tsn_r50_1x1x16_50e_sthv1_rgb_20200614-7e2fe4f1.pth + gpu_mem(M): '5691' +- Config: configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 16 + Training Data: SthV2 + gpus: 8 + pretrain: ImageNet + resolution: height 240 + Modality: RGB + Name: tsn_r50_1x1x8_50e_sthv2_rgb + Results: + - Dataset: SthV2 + Metrics: + top1 acc: 32.97 + top5 acc: 63.62 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20200915-f3b381a5.pth + gpu_mem(M): '10966' +- Config: configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23864558 + Training BatchSize / GPU: 4 + Training Data: SthV2 + gpus: 8 + pretrain: ImageNet + resolution: height 240 + Modality: RGB + Name: tsn_r50_1x1x16_50e_sthv2_rgb + Results: + - Dataset: SthV2 + Metrics: + top1 acc: 27.21 + top5 acc: 55.84 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20200917-80bc3611.pth + gpu_mem(M): '8337' +- Config: configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 100 + Parameters: 24202643 + Training BatchSize / GPU: 16 + Training Data: MiT + gpus: 16 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r50_1x1x6_100e_mit_rgb + Results: + - Dataset: MiT + Metrics: + top1 acc: 26.84 + top5 acc: 51.6 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_f6_mit_26.8_51.6.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_mit.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_1x1x6_100e_mit_rgb_20200618-d512ab1b.pth + gpu_mem(M): '8339' +- Config: configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet101 + Epochs: 50 + Parameters: 43141497 + Training BatchSize / GPU: 16 + Training Data: MMiT + gpus: 16 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r101_1x1x5_50e_mmit_rgb + Results: + - Dataset: MMiT + Metrics: + mAP: 61.09 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_f6_mmit_61.1.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_mmit.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_1x1x5_50e_mmit_rgb_20200618-642f450d.pth + gpu_mem(M): '10467' +- Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23917832 + Training BatchSize / GPU: 8 + Training Data: ActivityNet v1.3 + gpus: 8 + pretrain: Kinetics400 + resolution: short-side 320 + Modality: RGB + Name: tsn_r50_320p_1x1x8_50e_activitynet_video_rgb + Results: + - Dataset: ActivityNet v1.3 + Metrics: + top1 acc: 73.93 + top5 acc: 93.44 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb_20210301-7f8da0c6.pth + gpu_mem(M): '5692' +- Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 50 + Parameters: 23917832 + Training BatchSize / GPU: 8 + Training Data: ActivityNet v1.3 + gpus: 8 + pretrain: Kinetics400 + resolution: short-side 320 + Modality: RGB + Name: tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb + Results: + - Dataset: ActivityNet v1.3 + Metrics: + top1 acc: 76.9 + top5 acc: 94.47 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb_20210301-c0f04a7e.pth + gpu_mem(M): '5692' +- Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 150 + Parameters: 23939784 + Training BatchSize / GPU: 8 + Training Data: ActivityNet v1.3 + gpus: 16 + pretrain: Kinetics400 + resolution: 340x256 + Modality: Flow + Name: tsn_r50_320p_1x1x8_150e_activitynet_video_flow + Results: + - Dataset: ActivityNet v1.3 + Metrics: + top1 acc: 57.51 + top5 acc: 83.02 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804-13313f52.pth + gpu_mem(M): '5780' +- Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py + In Collection: TSN + Metadata: + Architecture: ResNet50 + Epochs: 150 + Parameters: 23939784 + Training BatchSize / GPU: 8 + Training Data: ActivityNet v1.3 + gpus: 16 + pretrain: Kinetics400 + resolution: 340x256 + Modality: Flow + Name: tsn_r50_320p_1x1x8_150e_activitynet_clip_flow + Results: + - Dataset: ActivityNet v1.3 + Metrics: + top1 acc: 59.51 + top5 acc: 82.69 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804-8622cf38.pth + gpu_mem(M): '5780' +- Config: configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet18 + Epochs: 100 + Parameters: 11555619 + Training BatchSize / GPU: 32 + Training Data: HVU + gpus: 16 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r18_1x1x8_100e_hvu_action_rgb + Results: + - Dataset: HVU + Metrics: + action mAP: 57.5 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027-011b282b.pth + tag category: action +- Config: configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet18 + Epochs: 100 + Parameters: 11303736 + Training BatchSize / GPU: 32 + Training Data: HVU + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r18_1x1x8_100e_hvu_scene_rgb + Results: + - Dataset: HVU + Metrics: + scene mAP: 55.2 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027-00e5748d.pth + tag category: scene +- Config: configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet18 + Epochs: 100 + Parameters: 12037326 + Training BatchSize / GPU: 32 + Training Data: HVU + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r18_1x1x8_100e_hvu_object_rgb + Results: + - Dataset: HVU + Metrics: + object mAP: 45.7 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201102-24a22f30.pth + tag category: object +- Config: configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet18 + Epochs: 100 + Parameters: 11211909 + Training BatchSize / GPU: 32 + Training Data: HVU + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r18_1x1x8_100e_hvu_event_rgb + Results: + - Dataset: HVU + Metrics: + event mAP: 63.7 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027-dea8cd71.pth + tag category: event +- Config: configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet18 + Epochs: 100 + Parameters: 12037326 + Training BatchSize / GPU: 32 + Training Data: HVU + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r18_1x1x8_100e_hvu_concept_rgb + Results: + - Dataset: HVU + Metrics: + concept mAP: 47.5 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027-fc1dd8e3.pth + tag category: concept +- Config: configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py + In Collection: TSN + Metadata: + Architecture: ResNet18 + Epochs: 100 + Parameters: 11236533 + Training BatchSize / GPU: 32 + Training Data: HVU + gpus: 8 + pretrain: ImageNet + resolution: short-side 256 + Modality: RGB + Name: tsn_r18_1x1x8_100e_hvu_attribute_rgb + Results: + - Dataset: HVU + Metrics: + attribute mAP: 46.1 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027-0b3b49d2.pth + tag category: attribute diff --git a/configs/recognition/x3d/metafile.yml b/configs/recognition/x3d/metafile.yml new file mode 100644 index 0000000000..2e4bde7889 --- /dev/null +++ b/configs/recognition/x3d/metafile.yml @@ -0,0 +1,44 @@ +Collections: +- Metadata: + Training Data: null + Name: X3D + README: configs/recognition/x3d/README.md +Models: +- Config: configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py + In Collection: X3D + Metadata: + Architecture: X3D_S + Parameters: 3794322 + Training BatchSize / GPU: 1 + Training Data: Kinetics-400 + resolution: short-side 320 + Modality: RGB + Name: x3d_s_13x6x1_facebook_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 10-view: 72.7 + top1 30-view: 73.2 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_s_facebook_13x6x1_kinetics400_rgb_20201027-623825a0.pth + reference top1 10-view: 73.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] + reference top1 30-view: 73.5 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] +- Config: configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py + In Collection: X3D + Metadata: + Architecture: X3D_M + Parameters: 3794322 + Training BatchSize / GPU: 1 + Training Data: Kinetics-400 + resolution: short-side 320 + Modality: RGB + Name: x3d_m_16x5x1_facebook_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 10-view: 75.0 + top1 30-view: 75.6 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth + reference top1 10-view: 75.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] + reference top1 30-view: 76.2 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] diff --git a/configs/recognition_audio/resnet/metafile.yml b/configs/recognition_audio/resnet/metafile.yml new file mode 100644 index 0000000000..3d139fd062 --- /dev/null +++ b/configs/recognition_audio/resnet/metafile.yml @@ -0,0 +1,30 @@ +Collections: +- Metadata: + Training Data: null + Name: Audio + README: configs/recognition_audio/resnet/README.md +Models: +- Config: configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py + In Collection: Audio + Metadata: + Architecture: ResNet18 + Training Data: Kinetics-400 + gpus: 8 + n_fft: '1024' + pretrain: None + Modality: Audio + Name: tsn_r18_64x1x1_100e_kinetics400_audio_feature + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 19.7 + top1 acc [w. RGB]: 71.5 + top1 acc delta [w. RGB]: 0.39 + top5 acc: 35.75 + top5 acc [w. RGB]: 90.18 + top5 acc delta [w. RGB]: 0.14 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log + Weights: https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth + gpu_mem(M): '1897' diff --git a/model_zoo.yml b/model_zoo.yml new file mode 100644 index 0000000000..21af3c4394 --- /dev/null +++ b/model_zoo.yml @@ -0,0 +1,21 @@ +Import: +- configs/localization/bmn/metafile.yml +- configs/localization/bsn/metafile.yml +- configs/localization/ssn/metafile.yml +- configs/recognition/csn/metafile.yml +- configs/recognition/i3d/metafile.yml +- configs/recognition/omnisource/metafile.yml +- configs/recognition/r2plus1d/metafile.yml +- configs/recognition/slowfast/metafile.yml +- configs/recognition/slowonly/metafile.yml +- configs/recognition/tin/metafile.yml +- configs/recognition/tpn/metafile.yml +- configs/recognition/tsm/metafile.yml +- configs/recognition/tsn/metafile.yml +- configs/recognition/c3d/metafile.yml +- configs/recognition/tanet/metafile.yml +- configs/recognition/x3d/metafile.yml +- configs/recognition/trn/metafile.yml +- configs/detection/ava/metafile.yml +- configs/detection/lfb/metafile.yml +- configs/recognition_audio/resnet/metafile.yml From 1056d29c1e96b54f38dd76229e8132a893276d75 Mon Sep 17 00:00:00 2001 From: Rejnald Lleshi <46654505+rlleshi@users.noreply.github.com> Date: Thu, 20 May 2021 04:44:09 +0200 Subject: [PATCH 103/414] add metric_options to docs (#873) * add metric_options to docs * add metric_options to docs --- docs/tutorials/1_config.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/tutorials/1_config.md b/docs/tutorials/1_config.md index 569e55176c..67deb42b46 100644 --- a/docs/tutorials/1_config.md +++ b/docs/tutorials/1_config.md @@ -414,6 +414,7 @@ which is convenient to conduct various experiments. evaluation = dict( # Config of evaluation during training interval=5, # Interval to perform evaluation metrics=['top_k_accuracy', 'mean_class_accuracy'], # Metrics to be performed + metric_options=dict(top_k_accuracy=dict(topk=(1, 3))), # Set top-k accuracy to 1 and 3 save_best='top_k_accuracy') # set `top_k_accuracy` as key indicator to save best checkpoint log_config = dict( # Config to register logger hook interval=20, # Interval to print the log From b99250e75dcdb31f78133d25b27ac7381fdc57f1 Mon Sep 17 00:00:00 2001 From: irving Date: Fri, 21 May 2021 00:52:52 +0800 Subject: [PATCH 104/414] FIrst commit --- demo/demo_spatiotemporal_det.py | 15 ++++++++++++--- demo/webcam_demo_spatiotemporal_det.py | 9 +++++++++ 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index f56cca2f9e..72f151ba16 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -281,9 +281,6 @@ def main(): num_frame = len(frame_paths) h, w, _ = original_frames[0].shape - # Load label_map - label_map = load_label_map(args.label_map) - # resize frames to shortside 256 new_w, new_h = mmcv.rescale_size((w, h), (256, np.Inf)) frames = [mmcv.imresize(img, (new_w, new_h)) for img in original_frames] @@ -300,6 +297,18 @@ def main(): timestamps = np.arange(window_size // 2, num_frame + 1 - window_size // 2, args.predict_stepsize) + # Load label_map + label_map = load_label_map(args.label_map) + try: + if config['data']['train']['custom_classes'] is not None: + label_map = { + id: label_map[cls] + for id, cls in enumerate(config['data']['train'] + ['custom_classes']) + } + except KeyError: + pass + # Get Human detection results center_frames = [frame_paths[ind - 1] for ind in timestamps] human_detections = detection_inference(args, center_frames) diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py index fddb2d5d8b..e0e6e47419 100644 --- a/demo/webcam_demo_spatiotemporal_det.py +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -290,6 +290,15 @@ def __init__(self, config, checkpoint, device, score_thr, label_map_path): lines = f.readlines() lines = [x.strip().split(': ') for x in lines] self.label_map = {int(x[0]): x[1] for x in lines} + try: + if config['data']['train']['custom_classes'] is not None: + self.label_map = { + id: self.label_map[cls] + for id, cls in enumerate(config['data']['train'] + ['custom_classes']) + } + except KeyError: + pass def predict(self, task): """Spatio-temporval Action Detection model inference.""" From b1a5b82433e080358f7a099284500f6d689933ce Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Fri, 21 May 2021 09:54:44 +0800 Subject: [PATCH 105/414] fix bug --- demo/demo_spatiotemporal_det.py | 2 +- demo/webcam_demo_spatiotemporal_det.py | 13 ++++++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index 72f151ba16..3550bcc1b6 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -302,7 +302,7 @@ def main(): try: if config['data']['train']['custom_classes'] is not None: label_map = { - id: label_map[cls] + id + 1: label_map[cls] for id, cls in enumerate(config['data']['train'] ['custom_classes']) } diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py index e0e6e47419..4e908ab18a 100644 --- a/demo/webcam_demo_spatiotemporal_det.py +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -293,10 +293,21 @@ def __init__(self, config, checkpoint, device, score_thr, label_map_path): try: if config['data']['train']['custom_classes'] is not None: self.label_map = { - id: self.label_map[cls] + id + 1: self.label_map[cls] for id, cls in enumerate(config['data']['train'] ['custom_classes']) } + print(self.label_map) + print(self.label_map) + print(self.label_map) + print(self.label_map) + print(self.label_map) + print(self.label_map) + print(self.label_map) + print(self.label_map) + print(self.label_map) + print(self.label_map) + print(self.label_map) except KeyError: pass From 9d9a08fb1f8cf7f7fe6e836508808ee29694456c Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Fri, 21 May 2021 09:59:05 +0800 Subject: [PATCH 106/414] remove useless codes --- demo/webcam_demo_spatiotemporal_det.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py index 4e908ab18a..0626a209a7 100644 --- a/demo/webcam_demo_spatiotemporal_det.py +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -297,17 +297,6 @@ def __init__(self, config, checkpoint, device, score_thr, label_map_path): for id, cls in enumerate(config['data']['train'] ['custom_classes']) } - print(self.label_map) - print(self.label_map) - print(self.label_map) - print(self.label_map) - print(self.label_map) - print(self.label_map) - print(self.label_map) - print(self.label_map) - print(self.label_map) - print(self.label_map) - print(self.label_map) except KeyError: pass From 14955d3a3200dc6a843baf86d719ca2803201d86 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Fri, 21 May 2021 17:05:31 +0800 Subject: [PATCH 107/414] [Fix] Fix 806 (#878) * remove extract frames by opencv * typo --- tools/data/ava/README.md | 8 +------- tools/data/ava/README_zh-CN.md | 10 ++-------- tools/data/ava/extract_rgb_frames_opencv.sh | 7 ------- 3 files changed, 3 insertions(+), 22 deletions(-) delete mode 100644 tools/data/ava/extract_rgb_frames_opencv.sh diff --git a/tools/data/ava/README.md b/tools/data/ava/README.md index 5ec84284fa..a416eb2632 100644 --- a/tools/data/ava/README.md +++ b/tools/data/ava/README.md @@ -80,13 +80,7 @@ If you only want to play with RGB frames (since extracting optical flow can be t bash extract_rgb_frames.sh ``` -If you didn't install denseflow, you can still extract RGB frames using OpenCV by the following script, but it will keep the original size of the images. - -```shell -bash extract_rgb_frames_opencv.sh -``` - -Or using ffmpeg to extract RGB frames by the following script. +If you didn't install denseflow, you can still extract RGB frames using ffmpeg by the following script. ```shell bash extract_rgb_frames_ffmpeg.sh diff --git a/tools/data/ava/README_zh-CN.md b/tools/data/ava/README_zh-CN.md index 9cfda9aaf4..5a7b96da88 100644 --- a/tools/data/ava/README_zh-CN.md +++ b/tools/data/ava/README_zh-CN.md @@ -66,19 +66,13 @@ mkdir /mnt/SSD/ava_extracted/ ln -s /mnt/SSD/ava_extracted/ ../data/ava/rawframes/ ``` -如果用户只使用 RGB 帧(由于光流提取非常耗时),可以考虑执行以下脚本,仅用 denseflow 提取 RGB 帧: +如果用户只使用 RGB 帧(由于光流提取非常耗时),可执行以下脚本使用 denseflow 提取 RGB 帧: ```shell bash extract_rgb_frames.sh ``` -如果用户未安装 denseflow,以下脚本可以使用 OpenCV 进行 RGB 帧的提取,但视频原分辨率大小会被保留: - -```shell -bash extract_rgb_frames_opencv.sh -``` - -或执行以下脚本,使用 ffmpeg 提取 RGB 帧: +如果用户未安装 denseflow,可执行以下脚本使用 ffmpeg 提取 RGB 帧: ```shell bash extract_rgb_frames_ffmpeg.sh diff --git a/tools/data/ava/extract_rgb_frames_opencv.sh b/tools/data/ava/extract_rgb_frames_opencv.sh deleted file mode 100644 index 6cf1281558..0000000000 --- a/tools/data/ava/extract_rgb_frames_opencv.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash - -cd ../ -python build_rawframes.py ../../data/ava/videos_15min/ ../../data/ava/rawframes/ --task rgb --level 1 --use-opencv --mixed-ext -echo "Genearte raw frames (RGB only)" - -cd ava/ From 9e425a743bfe6afaed709f487300eb3fc6e2f967 Mon Sep 17 00:00:00 2001 From: irving Date: Sat, 22 May 2021 19:26:28 +0800 Subject: [PATCH 108/414] first commit --- configs/recognition/tsn/README.md | 2 + ...sformer_320p_1x1x3_100e_kinetics400_rgb.py | 100 ++++++++++++++++++ mmaction/models/recognizers/base.py | 21 +++- mmaction/models/recognizers/recognizer2d.py | 4 +- requirements/optional.txt | 1 + .../test_recognizers/test_recognizer2d.py | 22 ++++ 6 files changed, 144 insertions(+), 6 deletions(-) create mode 100644 configs/recognition/tsn/custom_backbones/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb.py diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index 3466febc01..745b20cb12 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -67,11 +67,13 @@ It's possible and convenient to use a 3rd-party backbone for TSN under the frame - [x] Backbones from [MMClassification](https://github.com/open-mmlab/mmclassification/) - [x] Backbones from [TorchVision](https://github.com/pytorch/vision/) +- [x] Backbones from [pytorch-image-models(timm)](https://github.com/rwightman/pytorch-image-models) | config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :--: | :----------------------------------------------------------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] | ImageNet | 73.43 | 91.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json) | | [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNeXt101-32x4d [[TorchVision](https://github.com/pytorch/vision/)] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | +| [tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | | | [ckpt]() | [log]() | [json]() | ### Kinetics-400 Data Benchmark (8-gpus, ResNet50, ImageNet pretrain; 3 segments) diff --git a/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb.py new file mode 100644 index 0000000000..0d389c330b --- /dev/null +++ b/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb.py @@ -0,0 +1,100 @@ +_base_ = [ + '../../../_base_/schedules/sgd_100e.py', + '../../../_base_/default_runtime.py' +] + +# model settings +model = dict( + type='Recognizer2D', + backbone=dict(type='timm.swin_base_patch4_window7_224', pretrained=True), + cls_head=dict( + type='TSNHead', + num_classes=400, + in_channels=1024, + spatial_type='avg', + consensus=dict(type='AvgConsensus', dim=1), + dropout_ratio=0.4, + init_std=0.01), + # model training and testing settings + train_cfg=None, + test_cfg=dict(average_clips=None)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train_320p' +data_root_val = 'data/kinetics400/rawframes_val_320p' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes_320p.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes_320p.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes_320p.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=3), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=3, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=256), + dict(type='Flip', flip_ratio=0), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=25, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Flip', flip_ratio=0), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=12, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) + +# runtime settings +work_dir = './work_dirs/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb/' +optimizer = dict( + type='SGD', + lr=0.00375, # this lr is used for 8 gpus + momentum=0.9, + weight_decay=0.0001) diff --git a/mmaction/models/recognizers/base.py b/mmaction/models/recognizers/base.py index 281aa547e1..eea7f8ed8c 100644 --- a/mmaction/models/recognizers/base.py +++ b/mmaction/models/recognizers/base.py @@ -58,6 +58,17 @@ def __init__(self, self.backbone.classifier = nn.Identity() self.backbone.fc = nn.Identity() self.backbone_from = 'torchvision' + elif backbone['type'].startswith('timm.'): + try: + import timm + except (ImportError, ModuleNotFoundError): + raise ImportError('Please install timm to use this ' + 'backbone.') + backbone_type = backbone.pop('type')[5:] + # disable the classifier + backbone['num_classes'] = 0 + self.backbone = timm.create_model(backbone_type, **backbone) + self.backbone_from = 'timm' else: self.backbone = builder.build_backbone(backbone) @@ -100,11 +111,11 @@ def init_weights(self): """Initialize the model network weights.""" if self.backbone_from in ['mmcls', 'mmaction2']: self.backbone.init_weights() - elif self.backbone_from == 'torchvision': + elif self.backbone_from in ['torchvision', 'timm']: warnings.warn('We do not initialize weights for backbones in ' - 'torchvision, since the weights for backbones in ' - 'torchvision are initialized in their __init__ ' - 'functions. ') + f'{self.backbone_from}, since the weights for ' + f'backbones in {self.backbone_from} are initialized' + 'in their __init__ functions.') else: raise NotImplementedError('Unsupported backbone source ' f'{self.backbone_from}!') @@ -126,6 +137,8 @@ def extract_feat(self, imgs): if (hasattr(self.backbone, 'features') and self.backbone_from == 'torchvision'): x = self.backbone.features(imgs) + elif self.backbone_from == 'timm': + x = self.backbone.forward_features(imgs) else: x = self.backbone(imgs) return x diff --git a/mmaction/models/recognizers/recognizer2d.py b/mmaction/models/recognizers/recognizer2d.py index 16f6349be8..f7fb93ac7a 100644 --- a/mmaction/models/recognizers/recognizer2d.py +++ b/mmaction/models/recognizers/recognizer2d.py @@ -19,7 +19,7 @@ def forward_train(self, imgs, labels, **kwargs): x = self.extract_feat(imgs) - if self.backbone_from == 'torchvision': + if self.backbone_from in ['torchvision', 'timm']: if len(x.shape) == 4 and (x.shape[2] > 1 or x.shape[3] > 1): # apply adaptive avg pooling x = nn.AdaptiveAvgPool2d(1)(x) @@ -53,7 +53,7 @@ def _do_test(self, imgs): x = self.extract_feat(imgs) - if self.backbone_from == 'torchvision': + if self.backbone_from in ['torchvision', 'timm']: if len(x.shape) == 4 and (x.shape[2] > 1 or x.shape[3] > 1): # apply adaptive avg pooling x = nn.AdaptiveAvgPool2d(1)(x) diff --git a/requirements/optional.txt b/requirements/optional.txt index 839d3acc87..3177ef6221 100644 --- a/requirements/optional.txt +++ b/requirements/optional.txt @@ -7,3 +7,4 @@ moviepy onnx onnxruntime PyTurboJPEG +timm diff --git a/tests/test_models/test_recognizers/test_recognizer2d.py b/tests/test_models/test_recognizers/test_recognizer2d.py index 927f046273..8d4cf23744 100644 --- a/tests/test_models/test_recognizers/test_recognizer2d.py +++ b/tests/test_models/test_recognizers/test_recognizer2d.py @@ -97,6 +97,28 @@ def test_tsn(): for one_img in img_list: recognizer(one_img, None, return_loss=False) + # test timm backbones + timm_backbone = dict(type='timm.efficientnet_b0', pretrained=False) + config.model['backbone'] = timm_backbone + config.model['cls_head']['in_channels'] = 1280 + + recognizer = build_recognizer(config.model) + + input_shape = (1, 3, 3, 32, 32) + demo_inputs = generate_recognizer_demo_inputs(input_shape) + + imgs = demo_inputs['imgs'] + gt_labels = demo_inputs['gt_labels'] + + losses = recognizer(imgs, gt_labels) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + img_list = [img[None, :] for img in imgs] + for one_img in img_list: + recognizer(one_img, None, return_loss=False) + def test_tsm(): config = get_recognizer_cfg('tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py') From 2e20c5593e5d0084536b07723920ea8fa46b5fb0 Mon Sep 17 00:00:00 2001 From: Kenny Date: Sat, 22 May 2021 21:19:57 +0800 Subject: [PATCH 109/414] remove arguments of PoseDecode --- mmaction/datasets/pipelines/pose_loading.py | 53 ++----------------- .../test_loadings/test_pose_loading.py | 13 +---- 2 files changed, 4 insertions(+), 62 deletions(-) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index be77d1c612..5aba0f20ee 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -136,47 +136,10 @@ class PoseDecode: """Load and decode pose with given indices. Required keys are "keypoint", "frame_inds" (optional), "keypoint_score" - (optional), added or modified keys are "keypoint", "keypoint_score" - (if applicable). - - Args: - random_drop (bool): Whether to randomly drop keypoints. The following - args are applicable only when `random_crop == True`. When set as - True, "keypoint_score" is a mandatory key. Default: False. - random_seed (int): Random seed used for randomly dropping keypoints. - Default: 1. - drop_prob (float): The probability for dropping one keypoint for each - frame. Default: 1 / 16. - droppable_joints (tuple[int]): The joint indexes that may be dropped. - Default: (7, 8, 9, 10, 13, 14, 15, 16). (limb joints) + (optional), added or modified keys are "keypoint", "keypoint_score" (if + applicable). """ - def __init__(self, - random_drop=False, - random_seed=1, - drop_prob=1. / 16., - droppable_joints=(7, 8, 9, 10, 13, 14, 15, 16)): - self.random_drop = random_drop - self.random_seed = random_seed - self.drop_prob = drop_prob - self.droppable_joints = droppable_joints - - # inplace - def _drop_kpscore(self, kpscores): - """Randomly drop keypoints by setting the corresponding keypoint scores - as 0. - - Args: - kpscores (np.ndarray): The confidence scores of keypoints. - """ - - for kpscore in kpscores: - lt = kpscore.shape[0] - for tidx in range(lt): - if np.random.random() < self.drop_prob: - jidx = np.random.choice(self.droppable_joints) - kpscore[tidx, jidx] = 0. - def _load_kp(self, kp, frame_inds): """Load keypoints given frame indices. @@ -198,9 +161,6 @@ def _load_kpscore(self, kpscore, frame_inds): return [x[frame_inds].astype(np.float32) for x in kpscore] def __call__(self, results): - if self.random_drop: - np.random.seed(self.random_seed) - assert 'keypoint_score' in results, 'for simplicity' if 'frame_inds' not in results: results['frame_inds'] = np.arange(results['total_frames']) @@ -213,9 +173,6 @@ def __call__(self, results): if 'keypoint_score' in results: kpscore = results['keypoint_score'] - if self.random_drop: - self._drop_kpscore(kpscore) - results['keypoint_score'] = kpscore[:, frame_inds].astype(np.float32) @@ -226,11 +183,7 @@ def __call__(self, results): return results def __repr__(self): - repr_str = (f'{self.__class__.__name__}(' - f'random_drop={self.random_drop}, ' - f'random_seed={self.random_seed}, ' - f'drop_prob={self.drop_prob}, ' - f'droppable_joints={self.droppable_joints})') + repr_str = (f'{self.__class__.__name__}()') return repr_str diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index f0432fb528..d32df6e604 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -103,11 +103,7 @@ def test_pose_decode(self): results = dict( keypoint=kp, keypoint_score=kpscore, frame_inds=frame_inds) pose_decode = PoseDecode() - assert str(pose_decode) == ('PoseDecode(random_drop=False, ' - 'random_seed=1, ' - 'drop_prob=0.0625, ' - 'droppable_joints=(7, 8, 9, 10, ' - '13, 14, 15, 16))') + assert str(pose_decode) == ('PoseDecode()') decode_results = pose_decode(results) assert_array_almost_equal(decode_results['keypoint'], kp[:, frame_inds]) @@ -120,13 +116,6 @@ def test_pose_decode(self): assert_array_almost_equal(decode_results['keypoint'], kp) assert_array_almost_equal(decode_results['keypoint_score'], kpscore) - results = dict( - keypoint=kp, keypoint_score=kpscore, frame_inds=frame_inds) - pose_decode = PoseDecode( - random_drop=True, drop_prob=1, droppable_joints=(7, )) - decode_results = pose_decode(results) - assert_array_almost_equal(decode_results['keypoint_score'][..., 7], 0) - def test_load_kinetics_pose(self): def get_mode(arr): From 867b022fcf497510cbc2d48f2b6de1078990f42c Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Sun, 23 May 2021 10:54:19 +0800 Subject: [PATCH 110/414] [Fix] Update train.py to fix resume (#877) * Update train.py * fix lint --- tools/train.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tools/train.py b/tools/train.py index 20d075e7ec..44e34db4ca 100644 --- a/tools/train.py +++ b/tools/train.py @@ -8,7 +8,7 @@ import mmcv import torch from mmcv import Config, DictAction -from mmcv.runner import init_dist, set_random_seed +from mmcv.runner import get_dist_info, init_dist, set_random_seed from mmcv.utils import get_git_hash from mmaction import __version__ @@ -108,6 +108,8 @@ def main(): else: distributed = True init_dist(args.launcher, **cfg.dist_params) + _, world_size = get_dist_info() + cfg.gpu_ids = range(world_size) # The flag is used to determine whether it is omnisource training cfg.setdefault('omnisource', False) From 90e50e0f73931971c8f1b51c8a8c1b351ca61664 Mon Sep 17 00:00:00 2001 From: SCZwangxiao Date: Tue, 25 May 2021 17:51:52 +0800 Subject: [PATCH 111/414] =?UTF-8?q?Correct=20dictionary=20=E2=80=98eval=5F?= =?UTF-8?q?results=E2=80=99=20key=20name=20for=20metric=20mmit=5Fmean=5Fav?= =?UTF-8?q?erage=5Fprecision?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- mmaction/datasets/base.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/mmaction/datasets/base.py b/mmaction/datasets/base.py index bbed8addec..62fe34f214 100644 --- a/mmaction/datasets/base.py +++ b/mmaction/datasets/base.py @@ -229,10 +229,12 @@ def evaluate(self, ] if metric == 'mean_average_precision': mAP = mean_average_precision(results, gt_labels) + eval_results['mean_average_precision'] = mAP + log_msg = f'\nmean_average_precision\t{mAP:.4f}' elif metric == 'mmit_mean_average_precision': mAP = mmit_mean_average_precision(results, gt_labels) - eval_results['mean_average_precision'] = mAP - log_msg = f'\nmean_average_precision\t{mAP:.4f}' + eval_results['mmit_mean_average_precision'] = mAP + log_msg = f'\nmmit_mean_average_precision\t{mAP:.4f}' print_log(log_msg, logger=logger) continue From cbe2cad62a9638677e996f49e3325a81d776926d Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Fri, 28 May 2021 11:40:13 +0800 Subject: [PATCH 112/414] Update pose_loading.py --- mmaction/datasets/pipelines/pose_loading.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 5aba0f20ee..49be61dac2 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -21,7 +21,7 @@ class UniformSampleFrames: num_clips (int): Number of clips to be sampled. Default: 1. test_mode (bool): Store True when building test or validation dataset. Default: False. - seed (int): The random seed used during test time. + seed (int): The random seed used during test time. Default: 255. """ def __init__(self, clip_len, num_clips=1, test_mode=False, seed=255): From 012c44c1a00884f76ec81681be2521b38f996b54 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 28 May 2021 11:56:45 +0800 Subject: [PATCH 113/414] resolve comments --- mmaction/datasets/pipelines/augmentations.py | 18 +++++++++--------- .../test_augmentations/test_flip.py | 9 ++++++--- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 15d1492004..839fb115aa 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -1217,10 +1217,10 @@ class Flip: "horizontal" | "vertical". Default: "horizontal". flip_label_map (Dict[int, int] | None): Transform the label of the flipped image with the specific label. Default: None. - left (list[int]): Indexes of left keypoints, used to flip keypoints. - Default: None. - right (list[ind]): Indexes of right keypoints, used to flip keypoints. + left_kp (list[int]): Indexes of left keypoints, used to flip keypoints. Default: None. + right_kp (list[ind]): Indexes of right keypoints, used to flip + keypoints. Default: None. lazy (bool): Determine whether to apply lazy operation. Default: False. """ _directions = ['horizontal', 'vertical'] @@ -1229,8 +1229,8 @@ def __init__(self, flip_ratio=0.5, direction='horizontal', flip_label_map=None, - left=None, - right=None, + left_kp=None, + right_kp=None, lazy=False): if direction not in self._directions: raise ValueError(f'Direction {direction} is not supported. ' @@ -1238,8 +1238,8 @@ def __init__(self, self.flip_ratio = flip_ratio self.direction = direction self.flip_label_map = flip_label_map - self.left = left - self.right = right + self.left_kp = left_kp + self.right_kp = right_kp self.lazy = lazy def _flip_imgs(self, imgs, modality): @@ -1255,8 +1255,8 @@ def _flip_kps(self, kps, kpscores, img_width): kp_x = kps[..., 0] kp_x[kp_x != 0] = img_width - kp_x[kp_x != 0] new_order = list(range(kps.shape[2])) - if self.left is not None and self.right is not None: - for left, right in zip(self.left, self.right): + if self.left_kp is not None and self.right_kp is not None: + for left, right in zip(self.left_kp, self.right_kp): new_order[left] = right new_order[right] = left kps = kps[:, :, new_order] diff --git a/tests/test_data/test_pipelines/test_augmentations/test_flip.py b/tests/test_data/test_pipelines/test_augmentations/test_flip.py index e3fbcf6b4f..ee5d9f5ed0 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_flip.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_flip.py @@ -108,7 +108,8 @@ def test_flip(self): keypoint=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), modality='Pose', img_shape=(64, 64)) - flip = Flip(flip_ratio=1, direction='horizontal', left=[0], right=[1]) + flip = Flip( + flip_ratio=1, direction='horizontal', left_kp=[0], right_kp=[1]) flip_results = flip(results) assert_array_almost_equal(flip_results['keypoint'][0, 0], np.array([[1, 63], [63, 1]])) @@ -117,7 +118,8 @@ def test_flip(self): keypoint=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), modality='Pose', img_shape=(64, 64)) - flip = Flip(flip_ratio=1, direction='horizontal', left=[], right=[]) + flip = Flip( + flip_ratio=1, direction='horizontal', left_kp=[], right_kp=[]) flip_results = flip(results) assert_array_almost_equal(flip_results['keypoint'][0, 0], np.array([[63, 1], [1, 63]])) @@ -127,5 +129,6 @@ def test_flip(self): keypoint=np.array([[1, 1], [63, 63]]).reshape([1, 1, 2, 2]), modality='Pose', img_shape=(64, 64)) - flip = Flip(flip_ratio=1, direction='vertical', left=[], right=[]) + flip = Flip( + flip_ratio=1, direction='vertical', left_kp=[], right_kp=[]) flip_results = flip(results) From af9cfcd11bb2503716ff17cac5e9921806aa22e7 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 28 May 2021 12:07:56 +0800 Subject: [PATCH 114/414] Docstring for explanation. --- mmaction/datasets/pipelines/pose_loading.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 49be61dac2..108b876822 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -13,6 +13,12 @@ class UniformSampleFrames: """Uniformly sample frames from the video. + To sample an n-frame clip from the video. UniformSampleFrames basically + divide the video into n segments of equal length and randomly sample one + frame from each segment. To make the testing results reproducible, a + random seed is set during testing, to make the sampling results + deterministic. + Required keys are "total_frames", "start_index" , added or modified keys are "frame_inds", "clip_len", "frame_interval" and "num_clips". From 2092371bd0b555ea2cfbace4291e9f6f7295c900 Mon Sep 17 00:00:00 2001 From: Kenny Date: Fri, 28 May 2021 12:10:58 +0800 Subject: [PATCH 115/414] update gen pose target --- mmaction/datasets/pipelines/pose_loading.py | 19 +++++------ .../test_loadings/test_pose_loading.py | 32 +++++++++---------- 2 files changed, 26 insertions(+), 25 deletions(-) diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 108b876822..9c19e25427 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -363,10 +363,10 @@ class GeneratePoseTarget: which is the definition of COCO-17p skeletons. double (bool): Output both original heatmaps and flipped heatmaps. Default: False. - left (tuple[int]): Indexes of left keypoints, which is used when + left_kp (tuple[int]): Indexes of left keypoints, which is used when flipping heatmaps. Default: (1, 3, 5, 7, 9, 11, 13, 15), which is left keypoints in COCO-17p. - right (tuple[int]): Indexes of right keypoints, which is used when + right_kp (tuple[int]): Indexes of right keypoints, which is used when flipping heatmaps. Default: (2, 4, 6, 8, 10, 12, 14, 16), which is right keypoints in COCO-17p. """ @@ -380,8 +380,8 @@ def __init__(self, (7, 9), (0, 6), (6, 8), (8, 10), (5, 11), (11, 13), (13, 15), (6, 12), (12, 14), (14, 16), (11, 12)), double=False, - left=(1, 3, 5, 7, 9, 11, 13, 15), - right=(2, 4, 6, 8, 10, 12, 14, 16)): + left_kp=(1, 3, 5, 7, 9, 11, 13, 15), + right_kp=(2, 4, 6, 8, 10, 12, 14, 16)): self.sigma = sigma self.use_score = use_score @@ -395,8 +395,8 @@ def __init__(self, assert self.with_kp or self.with_limb, ( 'At least one of "with_limb" ' 'and "with_kp" should be set as True.') - self.left = left - self.right = right + self.left_kp = left_kp + self.right_kp = right_kp self.skeletons = skeletons def generate_a_heatmap(self, img_h, img_w, centers, sigma, max_values): @@ -607,7 +607,8 @@ def __call__(self, results): results['imgs'] = np.stack(self.gen_an_aug(results)) else: results_ = cp.deepcopy(results) - flip = Flip(flip_ratio=1, left=self.left, right=self.right) + flip = Flip( + flip_ratio=1, left_kp=self.left_kp, right_kp=self.right_kp) results_ = flip(results_) results['imgs'] = np.concatenate( [self.gen_an_aug(results), @@ -622,6 +623,6 @@ def __repr__(self): f'with_limb={self.with_limb}, ' f'skeletons={self.skeletons}, ' f'double={self.double}, ' - f'left={self.left}, ' - f'right={self.right})') + f'left_kp={self.left_kp}, ' + f'right_kp={self.right_kp})') return repr_str diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index d32df6e604..1b1555335e 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -217,12 +217,12 @@ def test_generate_pose_target(self): modality='Pose') generate_pose_target = GeneratePoseTarget( - sigma=1, with_kp=True, left=(0, ), right=(1, ), skeletons=()) + sigma=1, with_kp=True, left_kp=(0, ), right_kp=(1, ), skeletons=()) assert str(generate_pose_target) == ('GeneratePoseTarget(sigma=1, ' 'use_score=True, with_kp=True, ' 'with_limb=False, skeletons=(), ' - 'double=False, left=(0,), ' - 'right=(1,))') + 'double=False, left_kp=(0,), ' + 'right_kp=(1,))') return_results = generate_pose_target(results) assert return_results['imgs'].shape == (8, 64, 64, 3) assert_array_almost_equal(return_results['imgs'][0], @@ -231,7 +231,7 @@ def test_generate_pose_target(self): results = dict(img_shape=img_shape, keypoint=kp, modality='Pose') generate_pose_target = GeneratePoseTarget( - sigma=1, with_kp=True, left=(0, ), right=(1, ), skeletons=()) + sigma=1, with_kp=True, left_kp=(0, ), right_kp=(1, ), skeletons=()) return_results = generate_pose_target(results) assert return_results['imgs'].shape == (8, 64, 64, 3) assert_array_almost_equal(return_results['imgs'][0], @@ -241,8 +241,8 @@ def test_generate_pose_target(self): sigma=1, with_kp=False, with_limb=True, - left=(0, ), - right=(1, ), + left_kp=(0, ), + right_kp=(1, ), skeletons=((0, 1), (1, 2), (0, 2))) return_results = generate_pose_target(results) assert return_results['imgs'].shape == (8, 64, 64, 3) @@ -253,8 +253,8 @@ def test_generate_pose_target(self): sigma=1, with_kp=True, with_limb=True, - left=(0, ), - right=(1, ), + left_kp=(0, ), + right_kp=(1, ), skeletons=((0, 1), (1, 2), (0, 2))) return_results = generate_pose_target(results) assert return_results['imgs'].shape == (8, 64, 64, 6) @@ -266,8 +266,8 @@ def test_generate_pose_target(self): with_kp=True, with_limb=True, double=True, - left=(0, ), - right=(1, ), + left_kp=(0, ), + right_kp=(1, ), skeletons=((0, 1), (1, 2), (0, 2))) return_results = generate_pose_target(results) imgs = return_results['imgs'] @@ -288,7 +288,7 @@ def test_generate_pose_target(self): keypoint_score=kpscore, modality='Pose') generate_pose_target = GeneratePoseTarget( - sigma=1, with_kp=True, left=(0, ), right=(1, ), skeletons=()) + sigma=1, with_kp=True, left_kp=(0, ), right_kp=(1, ), skeletons=()) return_results = generate_pose_target(results) assert_array_almost_equal(return_results['imgs'], 0) @@ -306,8 +306,8 @@ def test_generate_pose_target(self): sigma=1, with_kp=False, with_limb=True, - left=(0, ), - right=(1, ), + left_kp=(0, ), + right_kp=(1, ), skeletons=((0, 1), (1, 2), (0, 2))) return_results = generate_pose_target(results) assert_array_almost_equal(return_results['imgs'], 0) @@ -323,7 +323,7 @@ def test_generate_pose_target(self): keypoint_score=kpscore, modality='Pose') generate_pose_target = GeneratePoseTarget( - sigma=1, with_kp=True, left=(0, ), right=(1, ), skeletons=()) + sigma=1, with_kp=True, left_kp=(0, ), right_kp=(1, ), skeletons=()) return_results = generate_pose_target(results) assert_array_almost_equal(return_results['imgs'], 0) @@ -341,8 +341,8 @@ def test_generate_pose_target(self): sigma=1, with_kp=False, with_limb=True, - left=(0, ), - right=(1, ), + left_kp=(0, ), + right_kp=(1, ), skeletons=((0, 1), (1, 2), (0, 2))) return_results = generate_pose_target(results) assert_array_almost_equal(return_results['imgs'], 0) From 1e1e98add5d49f222f9701190f0fc518d3e4d855 Mon Sep 17 00:00:00 2001 From: Kenny Date: Sun, 30 May 2021 13:05:30 +0800 Subject: [PATCH 116/414] fix config --- .../posec3d/slowonly_r50_u48_240e_gym_keypoint.py | 10 +++++----- .../skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py | 10 +++++----- .../slowonly_r50_u48_240e_ntu120_xsub_keypoint.py | 10 +++++----- .../posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py | 10 +++++----- .../slowonly_r50_u48_240e_ntu60_xsub_keypoint.py | 10 +++++----- .../posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py | 10 +++++----- 6 files changed, 30 insertions(+), 30 deletions(-) diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py index 7824643547..8ce6fbcb31 100644 --- a/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py @@ -27,8 +27,8 @@ dataset_type = 'PoseDataset' ann_file_train = 'data/posec3d/gym_train.pkl' ann_file_val = 'data/posec3d/gym_val.pkl' -left = [1, 3, 5, 7, 9, 11, 13, 15] -right = [2, 4, 6, 8, 10, 12, 14, 16] +left_kp = [1, 3, 5, 7, 9, 11, 13, 15] +right_kp = [2, 4, 6, 8, 10, 12, 14, 16] train_pipeline = [ dict(type='UniformSampleFrames', clip_len=48), dict(type='PoseDecode'), @@ -36,7 +36,7 @@ dict(type='Resize', scale=(-1, 64)), dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), dict(type='Resize', scale=(56, 56), keep_ratio=False), - dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict(type='Flip', flip_ratio=0.5, left_kp=left_kp, right_kp=right_kp), dict( type='GeneratePoseTarget', sigma=0.6, @@ -77,8 +77,8 @@ with_kp=True, with_limb=False, double=True, - left=left, - right=right), + left_kp=left_kp, + right_kp=right_kp), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py index b0d8eeda49..c0c9295e02 100644 --- a/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py @@ -27,8 +27,8 @@ dataset_type = 'PoseDataset' ann_file_train = 'data/posec3d/gym_train.pkl' ann_file_val = 'data/posec3d/gym_val.pkl' -left = [1, 3, 5, 7, 9, 11, 13, 15] -right = [2, 4, 6, 8, 10, 12, 14, 16] +left_kp = [1, 3, 5, 7, 9, 11, 13, 15] +right_kp = [2, 4, 6, 8, 10, 12, 14, 16] skeletons = [[0, 5], [0, 6], [5, 7], [7, 9], [6, 8], [8, 10], [5, 11], [11, 13], [13, 15], [6, 12], [12, 14], [14, 16], [0, 1], [0, 2], [1, 3], [2, 4], [11, 12]] @@ -39,7 +39,7 @@ dict(type='Resize', scale=(-1, 64)), dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), dict(type='Resize', scale=(56, 56), keep_ratio=False), - dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict(type='Flip', flip_ratio=0.5, left_kp=left_kp, right_kp=right_kp), dict( type='GeneratePoseTarget', sigma=0.6, @@ -83,8 +83,8 @@ with_limb=True, skeletons=skeletons, double=True, - left=left, - right=right), + left_kp=left_kp, + right_kp=right_kp), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py index 80627341e2..a9a3aa5f04 100644 --- a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py @@ -27,8 +27,8 @@ dataset_type = 'PoseDataset' ann_file_train = 'data/posec3d/ntu120_xsub_train.pkl' ann_file_val = 'data/posec3d/ntu120_xsub_val.pkl' -left = [1, 3, 5, 7, 9, 11, 13, 15] -right = [2, 4, 6, 8, 10, 12, 14, 16] +left_kp = [1, 3, 5, 7, 9, 11, 13, 15] +right_kp = [2, 4, 6, 8, 10, 12, 14, 16] train_pipeline = [ dict(type='UniformSampleFrames', clip_len=48), dict(type='PoseDecode'), @@ -36,7 +36,7 @@ dict(type='Resize', scale=(-1, 64)), dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), dict(type='Resize', scale=(56, 56), keep_ratio=False), - dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict(type='Flip', flip_ratio=0.5, left_kp=left_kp, right_kp=right_kp), dict( type='GeneratePoseTarget', sigma=0.6, @@ -77,8 +77,8 @@ with_kp=True, with_limb=False, double=True, - left=left, - right=right), + left_kp=left_kp, + right_kp=right_kp), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py index ddb0aa0ecb..31cb0da388 100644 --- a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py @@ -27,8 +27,8 @@ dataset_type = 'PoseDataset' ann_file_train = 'data/posec3d/ntu60_xsub_train.pkl' ann_file_val = 'data/posec3d/ntu60_xsub_val.pkl' -left = [1, 3, 5, 7, 9, 11, 13, 15] -right = [2, 4, 6, 8, 10, 12, 14, 16] +left_kp = [1, 3, 5, 7, 9, 11, 13, 15] +right_kp = [2, 4, 6, 8, 10, 12, 14, 16] skeletons = [[0, 5], [0, 6], [5, 7], [7, 9], [6, 8], [8, 10], [5, 11], [11, 13], [13, 15], [6, 12], [12, 14], [14, 16], [0, 1], [0, 2], [1, 3], [2, 4], [11, 12]] @@ -39,7 +39,7 @@ dict(type='Resize', scale=(-1, 64)), dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), dict(type='Resize', scale=(56, 56), keep_ratio=False), - dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict(type='Flip', flip_ratio=0.5, left_kp=left_kp, right_kp=right_kp), dict( type='GeneratePoseTarget', sigma=0.6, @@ -83,8 +83,8 @@ with_limb=True, skeletons=skeletons, double=True, - left=left, - right=right), + left_kp=left_kp, + right_kp=right_kp), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py index a5fd0f2516..3863ef7159 100644 --- a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py @@ -27,8 +27,8 @@ dataset_type = 'PoseDataset' ann_file_train = 'data/posec3d/ntu60_xsub_train.pkl' ann_file_val = 'data/posec3d/ntu60_xsub_val.pkl' -left = [1, 3, 5, 7, 9, 11, 13, 15] -right = [2, 4, 6, 8, 10, 12, 14, 16] +left_kp = [1, 3, 5, 7, 9, 11, 13, 15] +right_kp = [2, 4, 6, 8, 10, 12, 14, 16] train_pipeline = [ dict(type='UniformSampleFrames', clip_len=48), dict(type='PoseDecode'), @@ -36,7 +36,7 @@ dict(type='Resize', scale=(-1, 64)), dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), dict(type='Resize', scale=(56, 56), keep_ratio=False), - dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict(type='Flip', flip_ratio=0.5, left_kp=left_kp, right_kp=right_kp), dict( type='GeneratePoseTarget', sigma=0.6, @@ -77,8 +77,8 @@ with_kp=True, with_limb=False, double=True, - left=left, - right=right), + left_kp=left_kp, + right_kp=right_kp), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py index 1847aa561e..2c3f47682f 100644 --- a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py @@ -27,8 +27,8 @@ dataset_type = 'PoseDataset' ann_file_train = 'data/posec3d/ntu60_xsub_train.pkl' ann_file_val = 'data/posec3d/ntu60_xsub_val.pkl' -left = [1, 3, 5, 7, 9, 11, 13, 15] -right = [2, 4, 6, 8, 10, 12, 14, 16] +left_kp = [1, 3, 5, 7, 9, 11, 13, 15] +right_kp = [2, 4, 6, 8, 10, 12, 14, 16] skeletons = [[0, 5], [0, 6], [5, 7], [7, 9], [6, 8], [8, 10], [5, 11], [11, 13], [13, 15], [6, 12], [12, 14], [14, 16], [0, 1], [0, 2], [1, 3], [2, 4], [11, 12]] @@ -39,7 +39,7 @@ dict(type='Resize', scale=(-1, 64)), dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), dict(type='Resize', scale=(56, 56), keep_ratio=False), - dict(type='Flip', flip_ratio=0.5, left=left, right=right), + dict(type='Flip', flip_ratio=0.5, left_kp=left_kp, right_kp=right_kp), dict( type='GeneratePoseTarget', sigma=0.6, @@ -83,8 +83,8 @@ with_limb=True, skeletons=skeletons, double=True, - left=left, - right=right), + left_kp=left_kp, + right_kp=right_kp), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) From 323084160175d3505ebbe92d069af0fc2133b522 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Mon, 31 May 2021 21:43:36 +0800 Subject: [PATCH 117/414] [Feature] Feature extraction (#856) * support feature extraction * add feature extraction config * update * fix bug * fix bug * update * handle slowfast * fix bug * fix bug * fix bug * fix bug * add documentation * resolve comments * update README * support feature extraction * add feature extraction config * update * fix bug * fix bug * update * handle slowfast * fix bug * fix bug * fix bug * fix bug * add documentation * resolve comments * update README * move feature_extraction to test_cfg --- ..._r50_clip_feature_extraction_4x16x1_rgb.py | 45 ++++ ...n_r50_clip_feature_extraction_1x1x3_rgb.py | 41 ++++ docs/feature_extraction.md | 70 ++++++ mmaction/models/recognizers/base.py | 24 +- mmaction/models/recognizers/recognizer2d.py | 19 +- mmaction/models/recognizers/recognizer3d.py | 45 +++- tools/clip_feature_extraction.py | 228 ++++++++++++++++++ tools/dist_clip_feature_extraction.sh | 12 + 8 files changed, 470 insertions(+), 14 deletions(-) create mode 100644 configs/recognition/slowonly/slowonly_r50_clip_feature_extraction_4x16x1_rgb.py create mode 100644 configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py create mode 100644 docs/feature_extraction.md create mode 100644 tools/clip_feature_extraction.py create mode 100644 tools/dist_clip_feature_extraction.sh diff --git a/configs/recognition/slowonly/slowonly_r50_clip_feature_extraction_4x16x1_rgb.py b/configs/recognition/slowonly/slowonly_r50_clip_feature_extraction_4x16x1_rgb.py new file mode 100644 index 0000000000..90d8087f83 --- /dev/null +++ b/configs/recognition/slowonly/slowonly_r50_clip_feature_extraction_4x16x1_rgb.py @@ -0,0 +1,45 @@ +model = dict( + type='Recognizer3D', + backbone=dict( + type='ResNet3dSlowOnly', + depth=50, + pretrained=None, + lateral=False, + conv1_kernel=(1, 7, 7), + conv1_stride_t=1, + pool1_stride_t=1, + inflate=(0, 0, 1, 1), + norm_eval=False), + train_cfg=None, + test_cfg=dict(feature_extraction=True)) + +# dataset settings +dataset_type = 'VideoDataset' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +test_pipeline = [ + dict(type='DecordInit'), + dict( + type='SampleFrames', + clip_len=4, + frame_interval=16, + num_clips=10, + test_mode=True), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=1, + workers_per_gpu=2, + test=dict( + type=dataset_type, + ann_file=None, + data_prefix=None, + pipeline=test_pipeline)) + +dist_params = dict(backend='nccl') diff --git a/configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py b/configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py new file mode 100644 index 0000000000..fec95d8c2f --- /dev/null +++ b/configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py @@ -0,0 +1,41 @@ +# model settings +model = dict( + type='Recognizer2D', + backbone=dict( + type='ResNet', + pretrained='torchvision://resnet50', + depth=50, + norm_eval=False), + train_cfg=None, + test_cfg=dict(feature_extraction=True)) + +# dataset settings +dataset_type = 'VideoDataset' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +test_pipeline = [ + dict(type='DecordInit', num_threads=1), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=25, + test_mode=True), + dict(type='DecordDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=1, + workers_per_gpu=2, + test=dict( + type=dataset_type, + ann_file=None, + data_prefix=None, + pipeline=test_pipeline)) + +dist_params = dict(backend='nccl') diff --git a/docs/feature_extraction.md b/docs/feature_extraction.md new file mode 100644 index 0000000000..6c4dd1b1fe --- /dev/null +++ b/docs/feature_extraction.md @@ -0,0 +1,70 @@ +# Feature Extraction + +We provide easy to use scripts for feature extraction. + +## Clip-leval Feature Extraction + +Clip-level feature extraction extract deep feature from a video clip, which usually lasts several to tens of seconds. The extracted feature is an n-dim vector for each clip. When performing multi-view feature extraction, e.g. n clips x m crops, the extracted feature will be the average of the n * m views. + +Before applying clip-level feature extraction, you need to prepare a video list (which include all videos that you want to extract feature from). For example, the video list for videos in UCF101 will look like: + +``` +ApplyEyeMakeup/v_ApplyEyeMakeup_g01_c01.avi +ApplyEyeMakeup/v_ApplyEyeMakeup_g01_c02.avi +ApplyEyeMakeup/v_ApplyEyeMakeup_g01_c03.avi +ApplyEyeMakeup/v_ApplyEyeMakeup_g01_c04.avi +ApplyEyeMakeup/v_ApplyEyeMakeup_g01_c05.avi +... +YoYo/v_YoYo_g25_c01.avi +YoYo/v_YoYo_g25_c02.avi +YoYo/v_YoYo_g25_c03.avi +YoYo/v_YoYo_g25_c04.avi +YoYo/v_YoYo_g25_c05.avi +``` + +Assume the root of UCF101 videos is `data/ucf101/videos` and the name of the video list is `ucf101.txt`, to extract clip-level feature of UCF101 videos with Kinetics-400 pretrained TSN, you can use the following script: + +```shell +python tools/clip_feature_extraction.py \ +configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py \ +https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth \ +--video-list ucf101.txt \ +--video-root data/ucf101/videos \ +--out ucf101_feature.pkl +``` + +and the extracted feature will be stored in `ucf101_feature.pkl` + +You can also use distributed clip-level feature extraction. Below is an example for a node with 8 gpus. + +```shell +bash tools/dist_clip_feature_extraction.sh \ +configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py \ +https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth \ +8 \ +--video-list ucf101.txt \ +--video-root data/ucf101/videos \ +--out ucf101_feature.pkl +``` + +To extract clip-level feature of UCF101 videos with Kinetics-400 pretrained SlowOnly, you can use the following script: + +```shell +python tools/clip_feature_extraction.py \ +configs/recognition/slowonly/slowonly_r50_clip_feature_extraction_4x16x1_rgb.py \ +https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth \ +--video-list ucf101.txt \ +--video-root data/ucf101/videos \ +--out ucf101_feature.pkl +``` + +The two config files demonstrates what a minimal config file for feature extraction looks like. You can also use other existing config files for feature extraction, as long as they use videos rather than raw frames for training and testing: + +```shell +python tools/clip_feature_extraction.py \ +configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py \ +https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth \ +--video-list ucf101.txt \ +--video-root data/ucf101/videos \ +--out ucf101_feature.pkl +``` diff --git a/mmaction/models/recognizers/base.py b/mmaction/models/recognizers/base.py index 281aa547e1..bcbde8468a 100644 --- a/mmaction/models/recognizers/base.py +++ b/mmaction/models/recognizers/base.py @@ -22,14 +22,16 @@ class BaseRecognizer(nn.Module, metaclass=ABCMeta): Args: backbone (dict): Backbone modules to extract feature. - cls_head (dict): Classification head to process feature. + cls_head (dict | None): Classification head to process feature. + Default: None. + neck (dict | None): Neck for feature fusion. Default: None. train_cfg (dict | None): Config for training. Default: None. test_cfg (dict | None): Config for testing. Default: None. """ def __init__(self, backbone, - cls_head, + cls_head=None, neck=None, train_cfg=None, test_cfg=None): @@ -63,7 +65,8 @@ def __init__(self, if neck is not None: self.neck = builder.build_neck(neck) - self.cls_head = builder.build_head(cls_head) + + self.cls_head = builder.build_head(cls_head) if cls_head else None self.train_cfg = train_cfg self.test_cfg = test_cfg @@ -80,6 +83,11 @@ def __init__(self, self.max_testing_views = test_cfg['max_testing_views'] assert isinstance(self.max_testing_views, int) + if test_cfg is not None and 'feature_extraction' in test_cfg: + self.feature_extraction = test_cfg['feature_extraction'] + else: + self.feature_extraction = False + # mini-batch blending, e.g. mixup, cutmix, etc. self.blending = None if train_cfg is not None and 'blending' in train_cfg: @@ -93,9 +101,14 @@ def __init__(self, @property def with_neck(self): - """bool: whether the detector has a neck""" + """bool: whether the recognizer has a neck""" return hasattr(self, 'neck') and self.neck is not None + @property + def with_cls_head(self): + """bool: whether the recognizer has a cls_head""" + return hasattr(self, 'cls_head') and self.cls_head is not None + def init_weights(self): """Initialize the model network weights.""" if self.backbone_from in ['mmcls', 'mmaction2']: @@ -109,7 +122,8 @@ def init_weights(self): raise NotImplementedError('Unsupported backbone source ' f'{self.backbone_from}!') - self.cls_head.init_weights() + if self.with_cls_head: + self.cls_head.init_weights() if self.with_neck: self.neck.init_weights() diff --git a/mmaction/models/recognizers/recognizer2d.py b/mmaction/models/recognizers/recognizer2d.py index 16f6349be8..d3444845f6 100644 --- a/mmaction/models/recognizers/recognizer2d.py +++ b/mmaction/models/recognizers/recognizer2d.py @@ -11,6 +11,8 @@ class Recognizer2D(BaseRecognizer): def forward_train(self, imgs, labels, **kwargs): """Defines the computation performed at every call when training.""" + + assert self.with_cls_head batches = imgs.shape[0] imgs = imgs.reshape((-1, ) + imgs.shape[2:]) num_segs = imgs.shape[0] // batches @@ -70,6 +72,16 @@ def _do_test(self, imgs): x = x.squeeze(2) num_segs = 1 + if self.feature_extraction: + # perform spatial pooling + avg_pool = nn.AdaptiveAvgPool2d(1) + x = avg_pool(x) + # squeeze dimensions + x = x.reshape((batches, num_segs, -1)) + # temporal average pooling + x = x.mean(axis=1) + return x + # When using `TSNHead` or `TPNHead`, shape is [batch_size, num_classes] # When using `TSMHead`, shape is [batch_size * num_crops, num_classes] # `num_crops` is calculated by: @@ -77,13 +89,14 @@ def _do_test(self, imgs): # 2) `num_sample_positions` in `DenseSampleFrames` # 3) `ThreeCrop/TenCrop/MultiGroupCrop` in `test_pipeline` # 4) `num_clips` in `SampleFrames` or its subclass if `clip_len != 1` + + # should have cls_head if not extracting features cls_score = self.cls_head(x, num_segs) assert cls_score.size()[0] % batches == 0 # calculate num_crops automatically cls_score = self.average_clip(cls_score, cls_score.size()[0] // batches) - return cls_score def _do_fcn_test(self, imgs): @@ -128,6 +141,8 @@ def forward_test(self, imgs): testing.""" if self.test_cfg.get('fcn_test', False): # If specified, spatially fully-convolutional testing is performed + assert not self.feature_extraction + assert self.with_cls_head return self._do_fcn_test(imgs).cpu().numpy() return self._do_test(imgs).cpu().numpy() @@ -142,6 +157,7 @@ def forward_dummy(self, imgs, softmax=False): Returns: Tensor: Class score. """ + assert self.with_cls_head batches = imgs.shape[0] imgs = imgs.reshape((-1, ) + imgs.shape[2:]) num_segs = imgs.shape[0] // batches @@ -165,4 +181,5 @@ def forward_dummy(self, imgs, softmax=False): def forward_gradcam(self, imgs): """Defines the computation performed at every call when using gradcam utils.""" + assert self.with_cls_head return self._do_test(imgs) diff --git a/mmaction/models/recognizers/recognizer3d.py b/mmaction/models/recognizers/recognizer3d.py index 26e4668147..35eafaaf47 100644 --- a/mmaction/models/recognizers/recognizer3d.py +++ b/mmaction/models/recognizers/recognizer3d.py @@ -11,6 +11,8 @@ class Recognizer3D(BaseRecognizer): def forward_train(self, imgs, labels, **kwargs): """Defines the computation performed at every call when training.""" + + assert self.with_cls_head imgs = imgs.reshape((-1, ) + imgs.shape[2:]) losses = dict() @@ -29,6 +31,7 @@ def forward_train(self, imgs, labels, **kwargs): def _do_test(self, imgs): """Defines the computation performed at every call when evaluation, testing and gradcam.""" + batches = imgs.shape[0] num_segs = imgs.shape[1] imgs = imgs.reshape((-1, ) + imgs.shape[2:]) @@ -38,22 +41,46 @@ def _do_test(self, imgs): 'max_testing_views is only compatible ' 'with batch_size == 1') view_ptr = 0 - cls_scores = [] + feats = [] while view_ptr < total_views: batch_imgs = imgs[view_ptr:view_ptr + self.max_testing_views] x = self.extract_feat(batch_imgs) if self.with_neck: x, _ = self.neck(x) - cls_score = self.cls_head(x) - cls_scores.append(cls_score) + feats.append(x) view_ptr += self.max_testing_views - cls_score = torch.cat(cls_scores) + # should consider the case that feat is a tuple + if isinstance(feats[0], tuple): + len_tuple = len(feats[0]) + feat = [ + torch.cat([x[i] for x in feats]) for i in range(len_tuple) + ] + feat = tuple(feat) + else: + feat = torch.cat(feats) else: - x = self.extract_feat(imgs) + feat = self.extract_feat(imgs) if self.with_neck: - x, _ = self.neck(x) - cls_score = self.cls_head(x) - + feat, _ = self.neck(feat) + + if self.feature_extraction: + # perform spatio-temporal pooling + avg_pool = nn.AdaptiveAvgPool3d(1) + if isinstance(feat, tuple): + feat = [avg_pool(x) for x in feat] + # concat them + feat = torch.cat(feat, axis=1) + else: + feat = avg_pool(feat) + # squeeze dimensions + feat = feat.reshape((batches, num_segs, -1)) + # temporal average pooling + feat = feat.mean(axis=1) + return feat + + # should have cls_head if not extracting features + assert self.with_cls_head + cls_score = self.cls_head(feat) cls_score = self.average_clip(cls_score, num_segs) return cls_score @@ -73,6 +100,7 @@ def forward_dummy(self, imgs, softmax=False): Returns: Tensor: Class score. """ + assert self.with_cls_head imgs = imgs.reshape((-1, ) + imgs.shape[2:]) x = self.extract_feat(imgs) @@ -87,4 +115,5 @@ def forward_dummy(self, imgs, softmax=False): def forward_gradcam(self, imgs): """Defines the computation performed at every call when using gradcam utils.""" + assert self.with_cls_head return self._do_test(imgs) diff --git a/tools/clip_feature_extraction.py b/tools/clip_feature_extraction.py new file mode 100644 index 0000000000..90724a9ccf --- /dev/null +++ b/tools/clip_feature_extraction.py @@ -0,0 +1,228 @@ +import argparse +import os +import os.path as osp +import warnings +from datetime import datetime + +import mmcv +import numpy as np +import torch +import torch.distributed as dist +from mmcv import Config, DictAction +from mmcv.cnn import fuse_conv_bn +from mmcv.fileio.io import file_handlers +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import get_dist_info, init_dist, load_checkpoint +from mmcv.runner.fp16_utils import wrap_fp16_model + +from mmaction.apis import multi_gpu_test, single_gpu_test +from mmaction.datasets import build_dataloader, build_dataset +from mmaction.models import build_model +from mmaction.utils import register_module_hooks + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMAction2 clip-level feature extraction') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument('--video-list', help='video file list') + parser.add_argument('--video-root', help='video root directory') + parser.add_argument( + '--out', + default=None, + help='output result file in pkl/yaml/json format') + parser.add_argument( + '--fuse-conv-bn', + action='store_true', + help='Whether to fuse conv and bn, this will slightly increase' + 'the inference speed') + parser.add_argument( + '--gpu-collect', + action='store_true', + help='whether to use gpu to collect results') + parser.add_argument( + '--tmpdir', + help='tmp directory used for collecting results from multiple ' + 'workers, available when gpu-collect is not specified') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + + return args + + +def turn_off_pretrained(cfg): + # recursively find all pretrained in the model config, + # and set them None to avoid redundant pretrain steps for testing + if 'pretrained' in cfg: + cfg.pretrained = None + + # recursively turn off pretrained value + for sub_cfg in cfg.values(): + if isinstance(sub_cfg, dict): + turn_off_pretrained(sub_cfg) + + +def text2tensor(text, size=256): + nums = [ord(x) for x in text] + assert len(nums) < size + nums.extend([0] * (size - len(nums))) + nums = np.array(nums, dtype=np.uint8) + return torch.from_numpy(nums) + + +def tensor2text(tensor): + # 0 may not occur in a string + chars = [chr(x) for x in tensor if x != 0] + return ''.join(chars) + + +def inference_pytorch(args, cfg, distributed, data_loader): + """Get predictions by pytorch models.""" + # remove redundant pretrain steps for testing + turn_off_pretrained(cfg.model) + + # build the model and load checkpoint + model = build_model( + cfg.model, train_cfg=None, test_cfg=cfg.get('test_cfg')) + + if len(cfg.module_hooks) > 0: + register_module_hooks(model, cfg.module_hooks) + + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + load_checkpoint(model, args.checkpoint, map_location='cpu') + + if args.fuse_conv_bn: + model = fuse_conv_bn(model) + + if not distributed: + model = MMDataParallel(model, device_ids=[0]) + outputs = single_gpu_test(model, data_loader) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, args.tmpdir, + args.gpu_collect) + + return outputs + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + + cfg.merge_from_dict(args.cfg_options) + + if cfg.model['test_cfg'] is None: + cfg.model['test_cfg'] = dict(feature_extraction=True) + else: + cfg.model['test_cfg']['feature_extraction'] = True + + # Load output_config from cfg + output_config = cfg.get('output_config', {}) + if args.out: + # Overwrite output_config from args.out + output_config = Config._merge_a_into_b( + dict(out=args.out), output_config) + + assert output_config, 'Please specify output filename with --out.' + + dataset_type = cfg.data.test.type + if output_config.get('out', None): + if 'output_format' in output_config: + # ugly workround to make recognition and localization the same + warnings.warn( + 'Skip checking `output_format` in localization task.') + else: + out = output_config['out'] + # make sure the dirname of the output path exists + mmcv.mkdir_or_exist(osp.dirname(out)) + _, suffix = osp.splitext(out) + assert dataset_type == 'VideoDataset' + + assert suffix[1:] in file_handlers, ( + 'The format of the output ' + 'file should be json, pickle or yaml') + + # set cudnn benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + cfg.data.test.test_mode = True + cfg.data.test.data_prefix = args.video_root + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + + rank, _ = get_dist_info() + + size = 256 + fname_tensor = torch.zeros(size, dtype=torch.uint8).cuda() + if rank == 0: + videos = open(args.video_list).readlines() + videos = [x.strip() for x in videos] + + timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') + fake_anno = f'fake_anno_{timestamp}.txt' + with open(fake_anno, 'w') as fout: + lines = [x + ' 0' for x in videos] + fout.write('\n'.join(lines)) + fname_tensor = text2tensor(fake_anno, size).cuda() + + if distributed: + dist.broadcast(fname_tensor.cuda(), src=0) + + fname = tensor2text(fname_tensor) + cfg.data.test.ann_file = fname + + # The flag is used to register module's hooks + cfg.setdefault('module_hooks', []) + + # build the dataloader + dataset = build_dataset(cfg.data.test, dict(test_mode=True)) + dataloader_setting = dict( + videos_per_gpu=cfg.data.get('videos_per_gpu', 1), + workers_per_gpu=cfg.data.get('workers_per_gpu', 1), + dist=distributed, + shuffle=False) + + dataloader_setting = dict(dataloader_setting, + **cfg.data.get('test_dataloader', {})) + data_loader = build_dataloader(dataset, **dataloader_setting) + + outputs = inference_pytorch(args, cfg, distributed, data_loader) + + if rank == 0: + if output_config.get('out', None): + out = output_config['out'] + print(f'\nwriting results to {out}') + dataset.dump_results(outputs, **output_config) + # remove the temporary file + os.remove(fake_anno) + + +if __name__ == '__main__': + main() diff --git a/tools/dist_clip_feature_extraction.sh b/tools/dist_clip_feature_extraction.sh new file mode 100644 index 0000000000..06ad2b0ffe --- /dev/null +++ b/tools/dist_clip_feature_extraction.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +CONFIG=$1 +CHECKPOINT=$2 +GPUS=$3 +PORT=${PORT:-29500} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +# Arguments starting from the forth one are captured by ${@:4} +python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ + $(dirname "$0")/clip_feature_extraction.py $CONFIG $CHECKPOINT \ + --launcher pytorch ${@:4} From 7ea0e01d94fc6b3665c3df4386af570047d98396 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Mon, 31 May 2021 23:03:37 +0800 Subject: [PATCH 118/414] [Feature] Implement ACRN and Focal Loss (#891) * implement acrn and focal_loss * add batch1 ckpt * add acrn README * fix unittest * fix unittest * add acrn ava2.1 * add some docstrings * update links * add some docstrings * add unittest * fix unittest * update config * fix --- configs/detection/acrn/README.md | 81 +++++++++ ...etrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 169 ++++++++++++++++++ ...pretrained_r50_8x8x1_cosine_10e_ava_rgb.py | 169 ++++++++++++++++++ configs/detection/ava/README.md | 12 +- configs/detection/ava/README_zh-CN.md | 12 +- ...etrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 167 +++++++++++++++++ ...etrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 168 +++++++++++++++++ ...etrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 168 +++++++++++++++++ mmaction/datasets/ava_dataset.py | 15 +- mmaction/models/__init__.py | 8 +- mmaction/models/heads/__init__.py | 3 +- mmaction/models/heads/bbox_head.py | 18 +- mmaction/models/heads/fbo_head.py | 2 +- mmaction/models/heads/lfb_infer_head.py | 2 +- mmaction/models/heads/misc_head.py | 131 ++++++++++++++ mmaction/models/heads/roi_head.py | 21 ++- .../roi_extractors/single_straight3d.py | 23 ++- tests/test_models/test_head.py | 28 ++- tests/test_models/test_roi_extractor.py | 26 +-- 19 files changed, 1190 insertions(+), 33 deletions(-) create mode 100644 configs/detection/acrn/README.md create mode 100644 configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py create mode 100644 configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py create mode 100644 configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py create mode 100644 configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py create mode 100644 configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py create mode 100644 mmaction/models/heads/misc_head.py diff --git a/configs/detection/acrn/README.md b/configs/detection/acrn/README.md new file mode 100644 index 0000000000..014e6fc710 --- /dev/null +++ b/configs/detection/acrn/README.md @@ -0,0 +1,81 @@ +# ACRN + +## Introduction + + + +```BibTeX +@inproceedings{gu2018ava, + title={Ava: A video dataset of spatio-temporally localized atomic visual actions}, + author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={6047--6056}, + year={2018} +} +``` + + + +```BibTeX +@inproceedings{sun2018actor, + title={Actor-centric relation network}, + author={Sun, Chen and Shrivastava, Abhinav and Vondrick, Carl and Murphy, Kevin and Sukthankar, Rahul and Schmid, Cordelia}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + pages={318--334}, + year={2018} +} +``` + +## Model Zoo + +### AVA2.1 + +| Model | Modality | Pretrained | Backbone | Input | gpus | mAP | log | json | ckpt | +| :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.1 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb-49b07bf2.pth) | + +### AVA2.2 + +| Model | Modality | Pretrained | Backbone | Input | gpus | mAP | log | json | ckpt | +| :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.8 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-2be32625.pth) | + +- Notes: + +1. The **gpus** indicates the number of gpu we used to get the checkpoint. + According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, + e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. + +For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/data_preparation.md). + +## Train + +You can use the following command to train a model. + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +Example: train ACRN with SlowFast backbone on AVA with periodic validation. + +```shell +python tools/train.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py --validate +``` + +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). + +## Test + +You can use the following command to test a model. + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +Example: test ACRN with SlowFast backbone on AVA and dump the result to a csv file. + +```shell +python tools/test.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv +``` + +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . diff --git a/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py new file mode 100644 index 0000000000..7bd1ea779d --- /dev/null +++ b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -0,0 +1,169 @@ +model = dict( + type='FastRCNN', + backbone=dict( + type='ResNet3dSlowFast', + pretrained=None, + resample_rate=4, + speed_ratio=4, + channel_ratio=8, + slow_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=True, + fusion_kernel=7, + conv1_kernel=(1, 7, 7), + dilations=(1, 1, 1, 1), + conv1_stride_t=1, + pool1_stride_t=1, + inflate=(0, 0, 1, 1), + spatial_strides=(1, 2, 2, 1)), + fast_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=False, + base_channels=8, + conv1_kernel=(5, 7, 7), + conv1_stride_t=1, + pool1_stride_t=1, + spatial_strides=(1, 2, 2, 1))), + roi_head=dict( + type='AVARoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor3D', + roi_layer_type='RoIAlign', + output_size=8, + with_temporal_pool=True, + temporal_pool_mode='max'), + shared_head=dict(type='ACRNHead', in_channels=4608, out_channels=2304), + bbox_head=dict( + type='BBoxHeadAVA', + dropout_ratio=0.5, + in_channels=2304, + num_classes=81, + multilabel=True)), + train_cfg=dict( + rcnn=dict( + assigner=dict( + type='MaxIoUAssignerAVA', + pos_iou_thr=0.9, + neg_iou_thr=0.9, + min_pos_iou=0.9), + sampler=dict( + type='RandomSampler', + num=32, + pos_fraction=1, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=1.0, + debug=False)), + test_cfg=dict(rcnn=dict(action_thr=0.002))) + +dataset_type = 'AVADataset' +data_root = 'data/ava/rawframes' +anno_root = 'data/ava/annotations' + +ann_file_train = f'{anno_root}/ava_train_v2.2.csv' +ann_file_val = f'{anno_root}/ava_val_v2.2.csv' + +exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.2.csv' +exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.2.csv' + +label_file = f'{anno_root}/ava_action_list_v2.2.pbtxt' + +proposal_file_train = (f'{anno_root}/ava_dense_proposals_train.FAIR.' + 'recall_93.9.pkl') +proposal_file_val = f'{anno_root}/ava_dense_proposals_val.FAIR.recall_93.9.pkl' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict(type='RawFrameDecode'), + dict(type='RandomRescale', scale_range=(256, 320)), + dict(type='RandomCrop', size=256), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW', collapse=True), + dict(type='Rename', mapping=dict(imgs='img')), + dict(type='ToTensor', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), + dict( + type='ToDataContainer', + fields=[ + dict(key=['proposals', 'gt_bboxes', 'gt_labels'], stack=False) + ]), + dict( + type='Collect', + keys=['img', 'proposals', 'gt_bboxes', 'gt_labels'], + meta_keys=['scores', 'entity_ids']) +] +# The testing is w/o. any cropping / flipping +val_pipeline = [ + dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW', collapse=True), + dict(type='Rename', mapping=dict(imgs='img')), + dict(type='ToTensor', keys=['img', 'proposals']), + dict(type='ToDataContainer', fields=[dict(key='proposals', stack=False)]), + dict( + type='Collect', + keys=['img', 'proposals'], + meta_keys=['scores', 'img_shape'], + nested=True) +] + +data = dict( + videos_per_gpu=6, + workers_per_gpu=4, + val_dataloader=dict(videos_per_gpu=1), + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + exclude_file=exclude_file_train, + pipeline=train_pipeline, + label_file=label_file, + proposal_file=proposal_file_train, + person_det_score_thr=0.9, + data_prefix=data_root), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + exclude_file=exclude_file_val, + pipeline=val_pipeline, + label_file=label_file, + proposal_file=proposal_file_val, + person_det_score_thr=0.9, + data_prefix=data_root)) +data['test'] = data['val'] +# optimizer +optimizer = dict(type='SGD', lr=0.075, momentum=0.9, weight_decay=0.00001) +# this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='CosineAnnealing', + by_epoch=False, + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=2, + warmup_ratio=0.1) +total_epochs = 10 +checkpoint_config = dict(interval=1) +workflow = [('train', 1)] +evaluation = dict(interval=1) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb' # noqa: E501 +load_from = 'https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth' # noqa: E501 +resume_from = None +find_unused_parameters = False diff --git a/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py new file mode 100644 index 0000000000..50806ddacb --- /dev/null +++ b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py @@ -0,0 +1,169 @@ +model = dict( + type='FastRCNN', + backbone=dict( + type='ResNet3dSlowFast', + pretrained=None, + resample_rate=4, + speed_ratio=4, + channel_ratio=8, + slow_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=True, + fusion_kernel=7, + conv1_kernel=(1, 7, 7), + dilations=(1, 1, 1, 1), + conv1_stride_t=1, + pool1_stride_t=1, + inflate=(0, 0, 1, 1), + spatial_strides=(1, 2, 2, 1)), + fast_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=False, + base_channels=8, + conv1_kernel=(5, 7, 7), + conv1_stride_t=1, + pool1_stride_t=1, + spatial_strides=(1, 2, 2, 1))), + roi_head=dict( + type='AVARoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor3D', + roi_layer_type='RoIAlign', + output_size=8, + with_temporal_pool=True, + temporal_pool_mode='max'), + shared_head=dict(type='ACRNHead', in_channels=4608, out_channels=2304), + bbox_head=dict( + type='BBoxHeadAVA', + dropout_ratio=0.5, + in_channels=2304, + num_classes=81, + multilabel=True)), + train_cfg=dict( + rcnn=dict( + assigner=dict( + type='MaxIoUAssignerAVA', + pos_iou_thr=0.9, + neg_iou_thr=0.9, + min_pos_iou=0.9), + sampler=dict( + type='RandomSampler', + num=32, + pos_fraction=1, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=1.0, + debug=False)), + test_cfg=dict(rcnn=dict(action_thr=0.002))) + +dataset_type = 'AVADataset' +data_root = 'data/ava/rawframes' +anno_root = 'data/ava/annotations' + +ann_file_train = f'{anno_root}/ava_train_v2.1.csv' +ann_file_val = f'{anno_root}/ava_val_v2.1.csv' + +exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.1.csv' +exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.1.csv' + +label_file = f'{anno_root}/ava_action_list_v2.1.pbtxt' + +proposal_file_train = (f'{anno_root}/ava_dense_proposals_train.FAIR.' + 'recall_93.9.pkl') +proposal_file_val = f'{anno_root}/ava_dense_proposals_val.FAIR.recall_93.9.pkl' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict(type='RawFrameDecode'), + dict(type='RandomRescale', scale_range=(256, 320)), + dict(type='RandomCrop', size=256), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW', collapse=True), + dict(type='Rename', mapping=dict(imgs='img')), + dict(type='ToTensor', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), + dict( + type='ToDataContainer', + fields=[ + dict(key=['proposals', 'gt_bboxes', 'gt_labels'], stack=False) + ]), + dict( + type='Collect', + keys=['img', 'proposals', 'gt_bboxes', 'gt_labels'], + meta_keys=['scores', 'entity_ids']) +] +# The testing is w/o. any cropping / flipping +val_pipeline = [ + dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW', collapse=True), + dict(type='Rename', mapping=dict(imgs='img')), + dict(type='ToTensor', keys=['img', 'proposals']), + dict(type='ToDataContainer', fields=[dict(key='proposals', stack=False)]), + dict( + type='Collect', + keys=['img', 'proposals'], + meta_keys=['scores', 'img_shape'], + nested=True) +] + +data = dict( + videos_per_gpu=6, + workers_per_gpu=4, + val_dataloader=dict(videos_per_gpu=1), + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + exclude_file=exclude_file_train, + pipeline=train_pipeline, + label_file=label_file, + proposal_file=proposal_file_train, + person_det_score_thr=0.9, + data_prefix=data_root), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + exclude_file=exclude_file_val, + pipeline=val_pipeline, + label_file=label_file, + proposal_file=proposal_file_val, + person_det_score_thr=0.9, + data_prefix=data_root)) +data['test'] = data['val'] +# optimizer +optimizer = dict(type='SGD', lr=0.075, momentum=0.9, weight_decay=0.00001) +# this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='CosineAnnealing', + by_epoch=False, + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=2, + warmup_ratio=0.1) +total_epochs = 10 +checkpoint_config = dict(interval=1) +workflow = [('train', 1)] +evaluation = dict(interval=1) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb' # noqa: E501 +load_from = 'https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth' # noqa: E501 +resume_from = None +find_unused_parameters = False diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index ea44eb66c6..6b03ff07ad 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -57,6 +57,14 @@ | [slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | short-side 256 | 25.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222-f4d209c9.pth) | | [slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | short-side 256 | 25.5 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217-ae225e97.pth) | +### AVA2.2 + +| Model | Modality | Pretrained | Backbone | Input | gpus | mAP | log | json | ckpt | +| :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-b987b516.pth) | +| [slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-874e0845.pth) | +| [slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-345618cd.pth) | + - Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. @@ -77,7 +85,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] Example: train SlowOnly model on AVA with periodic validation. ```shell -python tools/train.py configs/detection/AVA/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py --validate +python tools/train.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py --validate ``` For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting) . @@ -115,7 +123,7 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] Example: test SlowOnly model on AVA and dump the result to a csv file. ```shell -python tools/test.py configs/detection/AVA/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv +python tools/test.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . diff --git a/configs/detection/ava/README_zh-CN.md b/configs/detection/ava/README_zh-CN.md index 79e0b14a10..aa1e896677 100644 --- a/configs/detection/ava/README_zh-CN.md +++ b/configs/detection/ava/README_zh-CN.md @@ -57,6 +57,14 @@ | [slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 25.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222-f4d209c9.pth) | | [slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 25.5 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217-ae225e97.pth) | +### AVA2.2 + +| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | mAP | log | json | ckpt | +| :----------------------------------------------------------: | :--: | :----------: | :------: | :--: | :------: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-b987b516.pth) | +| [slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-874e0845.pth) | +| [slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-345618cd.pth) | + 注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 @@ -77,7 +85,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 例如:在 AVA 数据集上训练 SlowOnly,并定期验证。 ```shell -python tools/train.py configs/detection/AVA/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py --validate +python tools/train.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py --validate ``` 更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 @@ -115,7 +123,7 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] 例如:在 AVA 上测试 SlowOnly 模型,并将结果存为 csv 文件。 ```shell -python tools/test.py configs/detection/AVA/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv +python tools/test.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` 更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py new file mode 100644 index 0000000000..f7898d0b61 --- /dev/null +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -0,0 +1,167 @@ +model = dict( + type='FastRCNN', + backbone=dict( + type='ResNet3dSlowFast', + pretrained=None, + resample_rate=4, + speed_ratio=4, + channel_ratio=8, + slow_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=True, + fusion_kernel=7, + conv1_kernel=(1, 7, 7), + dilations=(1, 1, 1, 1), + conv1_stride_t=1, + pool1_stride_t=1, + inflate=(0, 0, 1, 1), + spatial_strides=(1, 2, 2, 1)), + fast_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=False, + base_channels=8, + conv1_kernel=(5, 7, 7), + conv1_stride_t=1, + pool1_stride_t=1, + spatial_strides=(1, 2, 2, 1))), + roi_head=dict( + type='AVARoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor3D', + roi_layer_type='RoIAlign', + output_size=8, + with_temporal_pool=True), + bbox_head=dict( + type='BBoxHeadAVA', + dropout_ratio=0.5, + in_channels=2304, + num_classes=81, + multilabel=True)), + train_cfg=dict( + rcnn=dict( + assigner=dict( + type='MaxIoUAssignerAVA', + pos_iou_thr=0.9, + neg_iou_thr=0.9, + min_pos_iou=0.9), + sampler=dict( + type='RandomSampler', + num=32, + pos_fraction=1, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=1.0, + debug=False)), + test_cfg=dict(rcnn=dict(action_thr=0.002))) + +dataset_type = 'AVADataset' +data_root = 'data/ava/rawframes' +anno_root = 'data/ava/annotations' + +ann_file_train = f'{anno_root}/ava_train_v2.2.csv' +ann_file_val = f'{anno_root}/ava_val_v2.2.csv' + +exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.2.csv' +exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.2.csv' + +label_file = f'{anno_root}/ava_action_list_v2.2.pbtxt' + +proposal_file_train = (f'{anno_root}/ava_dense_proposals_train.FAIR.' + 'recall_93.9.pkl') +proposal_file_val = f'{anno_root}/ava_dense_proposals_val.FAIR.recall_93.9.pkl' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict(type='RawFrameDecode'), + dict(type='RandomRescale', scale_range=(256, 320)), + dict(type='RandomCrop', size=256), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW', collapse=True), + dict(type='Rename', mapping=dict(imgs='img')), + dict(type='ToTensor', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), + dict( + type='ToDataContainer', + fields=[ + dict(key=['proposals', 'gt_bboxes', 'gt_labels'], stack=False) + ]), + dict( + type='Collect', + keys=['img', 'proposals', 'gt_bboxes', 'gt_labels'], + meta_keys=['scores', 'entity_ids']) +] +# The testing is w/o. any cropping / flipping +val_pipeline = [ + dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW', collapse=True), + dict(type='Rename', mapping=dict(imgs='img')), + dict(type='ToTensor', keys=['img', 'proposals']), + dict(type='ToDataContainer', fields=[dict(key='proposals', stack=False)]), + dict( + type='Collect', + keys=['img', 'proposals'], + meta_keys=['scores', 'img_shape'], + nested=True) +] + +data = dict( + videos_per_gpu=6, + workers_per_gpu=4, + val_dataloader=dict(videos_per_gpu=1), + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + exclude_file=exclude_file_train, + pipeline=train_pipeline, + label_file=label_file, + proposal_file=proposal_file_train, + person_det_score_thr=0.9, + data_prefix=data_root), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + exclude_file=exclude_file_val, + pipeline=val_pipeline, + label_file=label_file, + proposal_file=proposal_file_val, + person_det_score_thr=0.9, + data_prefix=data_root)) +data['test'] = data['val'] +# optimizer +optimizer = dict(type='SGD', lr=0.075, momentum=0.9, weight_decay=0.00001) +# this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='CosineAnnealing', + by_epoch=False, + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=2, + warmup_ratio=0.1) +total_epochs = 10 +checkpoint_config = dict(interval=1) +workflow = [('train', 1)] +evaluation = dict(interval=1) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb' # noqa: E501 +load_from = 'https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth' # noqa: E501 +resume_from = None +find_unused_parameters = False diff --git a/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py new file mode 100644 index 0000000000..b59e0008d8 --- /dev/null +++ b/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -0,0 +1,168 @@ +model = dict( + type='FastRCNN', + backbone=dict( + type='ResNet3dSlowFast', + pretrained=None, + resample_rate=4, + speed_ratio=4, + channel_ratio=8, + slow_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=True, + fusion_kernel=7, + conv1_kernel=(1, 7, 7), + dilations=(1, 1, 1, 1), + conv1_stride_t=1, + pool1_stride_t=1, + inflate=(0, 0, 1, 1), + spatial_strides=(1, 2, 2, 1)), + fast_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=False, + base_channels=8, + conv1_kernel=(5, 7, 7), + conv1_stride_t=1, + pool1_stride_t=1, + spatial_strides=(1, 2, 2, 1))), + roi_head=dict( + type='AVARoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor3D', + roi_layer_type='RoIAlign', + output_size=8, + with_temporal_pool=True, + temporal_pool_mode='max'), + bbox_head=dict( + type='BBoxHeadAVA', + dropout_ratio=0.5, + in_channels=2304, + num_classes=81, + multilabel=True)), + train_cfg=dict( + rcnn=dict( + assigner=dict( + type='MaxIoUAssignerAVA', + pos_iou_thr=0.9, + neg_iou_thr=0.9, + min_pos_iou=0.9), + sampler=dict( + type='RandomSampler', + num=32, + pos_fraction=1, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=1.0, + debug=False)), + test_cfg=dict(rcnn=dict(action_thr=0.002))) + +dataset_type = 'AVADataset' +data_root = 'data/ava/rawframes' +anno_root = 'data/ava/annotations' + +ann_file_train = f'{anno_root}/ava_train_v2.2.csv' +ann_file_val = f'{anno_root}/ava_val_v2.2.csv' + +exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.2.csv' +exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.2.csv' + +label_file = f'{anno_root}/ava_action_list_v2.2.pbtxt' + +proposal_file_train = (f'{anno_root}/ava_dense_proposals_train.FAIR.' + 'recall_93.9.pkl') +proposal_file_val = f'{anno_root}/ava_dense_proposals_val.FAIR.recall_93.9.pkl' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict(type='RawFrameDecode'), + dict(type='RandomRescale', scale_range=(256, 320)), + dict(type='RandomCrop', size=256), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW', collapse=True), + dict(type='Rename', mapping=dict(imgs='img')), + dict(type='ToTensor', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), + dict( + type='ToDataContainer', + fields=[ + dict(key=['proposals', 'gt_bboxes', 'gt_labels'], stack=False) + ]), + dict( + type='Collect', + keys=['img', 'proposals', 'gt_bboxes', 'gt_labels'], + meta_keys=['scores', 'entity_ids']) +] +# The testing is w/o. any cropping / flipping +val_pipeline = [ + dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW', collapse=True), + dict(type='Rename', mapping=dict(imgs='img')), + dict(type='ToTensor', keys=['img', 'proposals']), + dict(type='ToDataContainer', fields=[dict(key='proposals', stack=False)]), + dict( + type='Collect', + keys=['img', 'proposals'], + meta_keys=['scores', 'img_shape'], + nested=True) +] + +data = dict( + videos_per_gpu=6, + workers_per_gpu=4, + val_dataloader=dict(videos_per_gpu=1), + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + exclude_file=exclude_file_train, + pipeline=train_pipeline, + label_file=label_file, + proposal_file=proposal_file_train, + person_det_score_thr=0.9, + data_prefix=data_root), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + exclude_file=exclude_file_val, + pipeline=val_pipeline, + label_file=label_file, + proposal_file=proposal_file_val, + person_det_score_thr=0.9, + data_prefix=data_root)) +data['test'] = data['val'] +# optimizer +optimizer = dict(type='SGD', lr=0.075, momentum=0.9, weight_decay=0.00001) +# this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='CosineAnnealing', + by_epoch=False, + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=2, + warmup_ratio=0.1) +total_epochs = 10 +checkpoint_config = dict(interval=1) +workflow = [('train', 1)] +evaluation = dict(interval=1) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb' # noqa: E501 +load_from = 'https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth' # noqa: E501 +resume_from = None +find_unused_parameters = False diff --git a/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py new file mode 100644 index 0000000000..e77496a39a --- /dev/null +++ b/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -0,0 +1,168 @@ +model = dict( + type='FastRCNN', + backbone=dict( + type='ResNet3dSlowFast', + pretrained=None, + resample_rate=4, + speed_ratio=4, + channel_ratio=8, + slow_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=True, + fusion_kernel=7, + conv1_kernel=(1, 7, 7), + dilations=(1, 1, 1, 1), + conv1_stride_t=1, + pool1_stride_t=1, + inflate=(0, 0, 1, 1), + spatial_strides=(1, 2, 2, 1)), + fast_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=False, + base_channels=8, + conv1_kernel=(5, 7, 7), + conv1_stride_t=1, + pool1_stride_t=1, + spatial_strides=(1, 2, 2, 1))), + roi_head=dict( + type='AVARoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor3D', + roi_layer_type='RoIAlign', + output_size=8, + with_temporal_pool=True, + temporal_pool_mode='max'), + bbox_head=dict( + type='BBoxHeadAVA', + dropout_ratio=0.5, + in_channels=2304, + num_classes=81, + multilabel=True)), + train_cfg=dict( + rcnn=dict( + assigner=dict( + type='MaxIoUAssignerAVA', + pos_iou_thr=0.9, + neg_iou_thr=0.9, + min_pos_iou=0.9), + sampler=dict( + type='RandomSampler', + num=32, + pos_fraction=1, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=1.0, + debug=False)), + test_cfg=dict(rcnn=dict(action_thr=0.002))) + +dataset_type = 'AVADataset' +data_root = 'data/ava/rawframes' +anno_root = 'data/ava/annotations' + +ann_file_train = f'{anno_root}/ava_train_v2.2.csv' +ann_file_val = f'{anno_root}/ava_val_v2.2.csv' + +exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.2.csv' +exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.2.csv' + +label_file = f'{anno_root}/ava_action_list_v2.2.pbtxt' + +proposal_file_train = (f'{anno_root}/ava_dense_proposals_train.FAIR.' + 'recall_93.9.pkl') +proposal_file_val = f'{anno_root}/ava_dense_proposals_val.FAIR.recall_93.9.pkl' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict(type='RawFrameDecode'), + dict(type='RandomRescale', scale_range=(256, 320)), + dict(type='RandomCrop', size=256), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW', collapse=True), + dict(type='Rename', mapping=dict(imgs='img')), + dict(type='ToTensor', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), + dict( + type='ToDataContainer', + fields=[ + dict(key=['proposals', 'gt_bboxes', 'gt_labels'], stack=False) + ]), + dict( + type='Collect', + keys=['img', 'proposals', 'gt_bboxes', 'gt_labels'], + meta_keys=['scores', 'entity_ids']) +] +# The testing is w/o. any cropping / flipping +val_pipeline = [ + dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW', collapse=True), + dict(type='Rename', mapping=dict(imgs='img')), + dict(type='ToTensor', keys=['img', 'proposals']), + dict(type='ToDataContainer', fields=[dict(key='proposals', stack=False)]), + dict( + type='Collect', + keys=['img', 'proposals'], + meta_keys=['scores', 'img_shape'], + nested=True) +] + +data = dict( + videos_per_gpu=6, + workers_per_gpu=4, + val_dataloader=dict(videos_per_gpu=1), + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + exclude_file=exclude_file_train, + pipeline=train_pipeline, + label_file=label_file, + proposal_file=proposal_file_train, + person_det_score_thr=0.9, + data_prefix=data_root), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + exclude_file=exclude_file_val, + pipeline=val_pipeline, + label_file=label_file, + proposal_file=proposal_file_val, + person_det_score_thr=0.9, + data_prefix=data_root)) +data['test'] = data['val'] +# optimizer +optimizer = dict(type='SGD', lr=0.075, momentum=0.9, weight_decay=0.00001) +# this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='CosineAnnealing', + by_epoch=False, + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=2, + warmup_ratio=0.1) +total_epochs = 10 +checkpoint_config = dict(interval=1) +workflow = [('train', 1)] +evaluation = dict(interval=1) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb' # noqa: E501 +load_from = 'https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth' # noqa: E501 +resume_from = None +find_unused_parameters = False diff --git a/mmaction/datasets/ava_dataset.py b/mmaction/datasets/ava_dataset.py index 111a5e146f..4a314cd96f 100644 --- a/mmaction/datasets/ava_dataset.py +++ b/mmaction/datasets/ava_dataset.py @@ -150,6 +150,16 @@ def __init__(self, self.video_infos = [self.video_infos[i] for i in valid_indexes] def parse_img_record(self, img_records): + """Merge image records of the same entity at the same time. + + Args: + img_records (list[dict]): List of img_records (lines in AVA + annotations). + + Returns: + tuple(list): A tuple consists of lists of bboxes, action labels and + entity_ids + """ bboxes, labels, entity_ids = [], [], [] while len(img_records) > 0: img_record = img_records[0] @@ -184,6 +194,7 @@ def parse_img_record(self, img_records): return bboxes, labels, entity_ids def filter_exclude_file(self): + """Filter out records in the exclude_file.""" valid_indexes = [] if self.exclude_file is None: valid_indexes = list(range(len(self.video_infos))) @@ -201,6 +212,7 @@ def filter_exclude_file(self): return valid_indexes def load_annotations(self): + """Load AVA annotations.""" video_infos = [] records_dict_by_img = defaultdict(list) with open(self.ann_file, 'r') as fin: @@ -326,6 +338,7 @@ def prepare_test_frames(self, idx): return self.pipeline(results) def dump_results(self, results, out): + """Dump predictions into a csv file.""" assert out.endswith('csv') results2csv(self, results, out, self.custom_classes) @@ -334,7 +347,7 @@ def evaluate(self, metrics=('mAP', ), metric_options=None, logger=None): - # need to create a temp result file + """Evaluate the prediction results and report mAP.""" assert len(metrics) == 1 and metrics[0] == 'mAP', ( 'For evaluation on AVADataset, you need to use metrics "mAP" ' 'See https://github.com/open-mmlab/mmaction2/pull/567 ' diff --git a/mmaction/models/__init__.py b/mmaction/models/__init__.py index ac7b4ce27e..010661fb32 100644 --- a/mmaction/models/__init__.py +++ b/mmaction/models/__init__.py @@ -7,9 +7,9 @@ build_localizer, build_loss, build_model, build_neck, build_recognizer) from .common import LFB, TAM, Conv2plus1d, ConvAudio -from .heads import (AudioTSNHead, AVARoIHead, BaseHead, BBoxHeadAVA, FBOHead, - I3DHead, LFBInferHead, SlowFastHead, TPNHead, TRNHead, - TSMHead, TSNHead, X3DHead) +from .heads import (ACRNHead, AudioTSNHead, AVARoIHead, BaseHead, BBoxHeadAVA, + FBOHead, I3DHead, LFBInferHead, SlowFastHead, TPNHead, + TRNHead, TSMHead, TSNHead, X3DHead) from .localizers import BMN, PEM, TEM from .losses import (BCELossWithLogits, BinaryLogisticRegressionLoss, BMNLoss, CrossEntropyLoss, HVULoss, NLLLoss, OHEMHingeLoss, @@ -32,5 +32,5 @@ 'AudioTSNHead', 'X3D', 'X3DHead', 'ResNet3dLayer', 'DETECTORS', 'SingleRoIExtractor3D', 'BBoxHeadAVA', 'ResNetAudio', 'build_detector', 'ConvAudio', 'AVARoIHead', 'MobileNetV2', 'MobileNetV2TSM', 'TANet', 'LFB', - 'FBOHead', 'LFBInferHead', 'TRNHead', 'NECKS' + 'FBOHead', 'LFBInferHead', 'TRNHead', 'NECKS', 'ACRNHead' ] diff --git a/mmaction/models/heads/__init__.py b/mmaction/models/heads/__init__.py index ada62589a2..4684f3f28c 100644 --- a/mmaction/models/heads/__init__.py +++ b/mmaction/models/heads/__init__.py @@ -4,6 +4,7 @@ from .fbo_head import FBOHead from .i3d_head import I3DHead from .lfb_infer_head import LFBInferHead +from .misc_head import ACRNHead from .roi_head import AVARoIHead from .slowfast_head import SlowFastHead from .ssn_head import SSNHead @@ -16,5 +17,5 @@ __all__ = [ 'TSNHead', 'I3DHead', 'BaseHead', 'TSMHead', 'SlowFastHead', 'SSNHead', 'TPNHead', 'AudioTSNHead', 'X3DHead', 'BBoxHeadAVA', 'AVARoIHead', - 'FBOHead', 'LFBInferHead', 'TRNHead' + 'FBOHead', 'LFBInferHead', 'TRNHead', 'ACRNHead' ] diff --git a/mmaction/models/heads/bbox_head.py b/mmaction/models/heads/bbox_head.py index 28e5bef243..cd2cc52622 100644 --- a/mmaction/models/heads/bbox_head.py +++ b/mmaction/models/heads/bbox_head.py @@ -21,6 +21,12 @@ class BBoxHeadAVA(nn.Module): spatial_pool_type (str): The spatial pool type. Choices are 'avg' or 'max'. Default: 'max'. in_channels (int): The number of input channels. Default: 2048. + focal_alpha (float): The hyper-parameter alpha for Focal Loss. + When alpha == 1 and gamma == 0, Focal Loss degenerates to + BCELossWithLogits. Default: 1. + focal_gamma (float): The hyper-parameter gamma for Focal Loss. + When alpha == 1 and gamma == 0, Focal Loss degenerates to + BCELossWithLogits. Default: 0. num_classes (int): The number of classes. Default: 81. dropout_ratio (float): A float in [0, 1], indicates the dropout_ratio. Default: 0. @@ -38,6 +44,8 @@ def __init__( spatial_pool_type='max', in_channels=2048, # The first class is reserved, to classify bbox as pos / neg + focal_gamma=0., + focal_alpha=1., num_classes=81, dropout_ratio=0, dropout_before_pool=True, @@ -58,6 +66,9 @@ def __init__( self.multilabel = multilabel + self.focal_gamma = focal_gamma + self.focal_alpha = focal_alpha + if topk is None: self.topk = () elif isinstance(topk, int): @@ -172,7 +183,12 @@ def loss(self, labels = labels[pos_inds] bce_loss = F.binary_cross_entropy_with_logits - losses['loss_action_cls'] = bce_loss(cls_score, labels) + + loss = bce_loss(cls_score, labels, reduction='none') + pt = torch.exp(-loss) + F_loss = self.focal_alpha * (1 - pt)**self.focal_gamma * loss + losses['loss_action_cls'] = torch.mean(F_loss) + recall_thr, prec_thr, recall_k, prec_k = self.multilabel_accuracy( cls_score, labels, thr=0.5) losses['recall@thr=0.5'] = recall_thr diff --git a/mmaction/models/heads/fbo_head.py b/mmaction/models/heads/fbo_head.py index fca609e7dc..7aeec0ca86 100644 --- a/mmaction/models/heads/fbo_head.py +++ b/mmaction/models/heads/fbo_head.py @@ -379,7 +379,7 @@ def sample_lfb(self, rois, img_metas): lt_feat = lt_feat.permute(0, 2, 1).contiguous() return lt_feat.unsqueeze(-1).unsqueeze(-1) - def forward(self, x, rois, img_metas): + def forward(self, x, rois, img_metas, **kwargs): # [N, C, 1, 1, 1] st_feat = self.temporal_pool(x) st_feat = self.spatial_pool(st_feat) diff --git a/mmaction/models/heads/lfb_infer_head.py b/mmaction/models/heads/lfb_infer_head.py index 2e26c346ce..1111b180c5 100644 --- a/mmaction/models/heads/lfb_infer_head.py +++ b/mmaction/models/heads/lfb_infer_head.py @@ -68,7 +68,7 @@ def init_weights(self, pretrained=None): # LFBInferHead has no parameters to be initialized. pass - def forward(self, x, rois, img_metas): + def forward(self, x, rois, img_metas, **kwargs): # [N, C, 1, 1, 1] features = self.temporal_pool(x) features = self.spatial_pool(features) diff --git a/mmaction/models/heads/misc_head.py b/mmaction/models/heads/misc_head.py new file mode 100644 index 0000000000..72cdaab547 --- /dev/null +++ b/mmaction/models/heads/misc_head.py @@ -0,0 +1,131 @@ +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, constant_init, kaiming_init +from mmcv.utils import _BatchNorm + +try: + from mmdet.models.builder import SHARED_HEADS as MMDET_SHARED_HEADS + mmdet_imported = True +except (ImportError, ModuleNotFoundError): + mmdet_imported = False + +# Note: All these heads take 5D Tensors as input (N, C, T, H, W) + + +class ACRNHead(nn.Module): + """ACRN Head: Tile + 1x1 convolution + 3x3 convolution. + + This module is proposed in + `Actor-Centric Relation Network + `_ + + Args: + in_channels (int): The input channel. + out_channels (int): The output channel. + stride (int): The spatial stride. + num_convs (int): The number of 3x3 convolutions in ACRNHead. + conv_cfg (dict): Config for norm layers. Default: dict(type='Conv'). + norm_cfg (dict): + Config for norm layers. required keys are `type` and + `requires_grad`. Default: dict(type='BN2d', requires_grad=True). + act_cfg (dict): Config for activate layers. + Default: dict(type='ReLU', inplace=True). + """ + + def __init__(self, + in_channels, + out_channels, + stride=1, + num_convs=1, + conv_cfg=dict(type='Conv3d'), + norm_cfg=dict(type='BN3d', requires_grad=True), + act_cfg=dict(type='ReLU', inplace=True)): + + super().__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.stride = stride + self.num_convs = num_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.max_pool = nn.AdaptiveMaxPool3d(1) + + self.conv1 = ConvModule( + in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0, + bias=False, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + assert num_convs >= 1 + self.conv2 = ConvModule( + out_channels, + out_channels, + kernel_size=(1, 3, 3), + stride=(1, stride, stride), + padding=(0, 1, 1), + bias=False, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + convs = [] + for i in range(num_convs - 1): + conv = ConvModule( + out_channels, + out_channels, + kernel_size=(1, 3, 3), + padding=(0, 1, 1), + bias=False, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + convs.append(conv) + self.convs = nn.ModuleList(convs) + + def init_weights(self, **kwargs): + """Weight Initialization for ACRNHead.""" + for m in self.modules(): + if isinstance(m, nn.Conv3d): + kaiming_init(m) + elif isinstance(m, _BatchNorm): + constant_init(m, 1) + + def forward(self, x, feat, rois, **kwargs): + """Defines the computation performed at every call. + + Args: + x (torch.Tensor): The extracted RoI feature. + feat (torch.Tensor): The context feature. + rois (torch.Tensor): The regions of interest. + + Returns: + torch.Tensor: The RoI features that have interacted with context + feature. + """ + # We use max pooling by default + x = self.max_pool(x) + + h, w = feat.shape[-2:] + x_tile = x.repeat(1, 1, 1, h, w) + + roi_inds = rois[:, 0].type(torch.long) + roi_gfeat = feat[roi_inds] + + new_feat = torch.cat([x_tile, roi_gfeat], dim=1) + new_feat = self.conv1(new_feat) + new_feat = self.conv2(new_feat) + + for conv in self.convs: + new_feat = conv(new_feat) + + return new_feat + + +if mmdet_imported: + MMDET_SHARED_HEADS.register_module()(ACRNHead) diff --git a/mmaction/models/heads/roi_head.py b/mmaction/models/heads/roi_head.py index 456d3f1096..9c167d46f8 100644 --- a/mmaction/models/heads/roi_head.py +++ b/mmaction/models/heads/roi_head.py @@ -17,9 +17,25 @@ class AVARoIHead(StandardRoIHead): def _bbox_forward(self, x, rois, img_metas): - bbox_feat = self.bbox_roi_extractor(x, rois) + """Defines the computation performed to get bbox predictions. + + Args: + x (torch.Tensor): The input tensor. + rois (torch.Tensor): The regions of interest. + img_metas (list): The meta info of images + + Returns: + dict: bbox predictions with features and classification scores. + """ + bbox_feat, global_feat = self.bbox_roi_extractor(x, rois) + if self.with_shared_head: - bbox_feat = self.shared_head(bbox_feat, rois, img_metas) + bbox_feat = self.shared_head( + bbox_feat, + feat=global_feat, + rois=rois, + img_metas=img_metas) + cls_score, bbox_pred = self.bbox_head(bbox_feat) bbox_results = dict( @@ -49,6 +65,7 @@ def simple_test(self, img_metas, proposals=None, rescale=False): + """Defines the computation performed for simple testing.""" assert self.with_bbox, 'Bbox head must be implemented.' if isinstance(x, tuple): diff --git a/mmaction/models/roi_extractors/single_straight3d.py b/mmaction/models/roi_extractors/single_straight3d.py index 2761bc4c16..b0ce5266e0 100644 --- a/mmaction/models/roi_extractors/single_straight3d.py +++ b/mmaction/models/roi_extractors/single_straight3d.py @@ -1,5 +1,6 @@ import torch import torch.nn as nn +import torch.nn.functional as F from mmaction.utils import import_module_error_class @@ -55,6 +56,7 @@ def __init__(self, pool_mode='avg', aligned=True, with_temporal_pool=True, + temporal_pool_mode='avg', with_global=False): super().__init__() self.roi_layer_type = roi_layer_type @@ -68,6 +70,8 @@ def __init__(self, self.aligned = aligned self.with_temporal_pool = with_temporal_pool + self.temporal_pool_mode = temporal_pool_mode + self.with_global = with_global if self.roi_layer_type == 'RoIPool': @@ -88,11 +92,22 @@ def init_weights(self): def forward(self, feat, rois): if not isinstance(feat, tuple): feat = (feat, ) + if len(feat) >= 2: - assert self.with_temporal_pool + maxT = max([x.shape[2] for x in feat]) + max_shape = (maxT, ) + feat[0].shape[3:] + # resize each feat to the largest shape (w. nearest) + feat = [F.interpolate(x, max_shape).contiguous() for x in feat] + if self.with_temporal_pool: - feat = [torch.mean(x, 2, keepdim=True) for x in feat] - feat = torch.cat(feat, axis=1) + if self.temporal_pool_mode == 'avg': + feat = [torch.mean(x, 2, keepdim=True) for x in feat] + elif self.temporal_pool_mode == 'max': + feat = [torch.max(x, 2, keepdim=True)[0] for x in feat] + else: + raise NotImplementedError + + feat = torch.cat(feat, axis=1).contiguous() roi_feats = [] for t in range(feat.size(2)): @@ -106,7 +121,7 @@ def forward(self, feat, rois): roi_feat = roi_feat.contiguous() roi_feats.append(roi_feat) - return torch.stack(roi_feats, dim=2) + return torch.stack(roi_feats, dim=2), feat if mmdet_imported: diff --git a/tests/test_models/test_head.py b/tests/test_models/test_head.py index 1856265266..d75608cfb4 100644 --- a/tests/test_models/test_head.py +++ b/tests/test_models/test_head.py @@ -8,9 +8,9 @@ import torch.nn as nn import mmaction -from mmaction.models import (AudioTSNHead, BBoxHeadAVA, FBOHead, I3DHead, - LFBInferHead, SlowFastHead, TPNHead, TRNHead, - TSMHead, TSNHead, X3DHead) +from mmaction.models import (ACRNHead, AudioTSNHead, BBoxHeadAVA, FBOHead, + I3DHead, LFBInferHead, SlowFastHead, TPNHead, + TRNHead, TSMHead, TSNHead, X3DHead) from .base import generate_backbone_demo_inputs @@ -476,3 +476,25 @@ def test_tpn_head(): assert isinstance(tpn_head.avg_pool2d, nn.AvgPool3d) assert tpn_head.avg_pool2d.kernel_size == (1, 7, 7) assert cls_scores.shape == torch.Size([2, 4]) + + +def test_acrn_head(): + roi_feat = torch.randn(4, 16, 1, 7, 7) + feat = torch.randn(2, 16, 1, 16, 16) + rois = torch.Tensor([[0, 2.2268, 0.5926, 10.6142, 8.0029], + [0, 2.2577, 0.1519, 11.6451, 8.9282], + [1, 1.9874, 1.0000, 11.1585, 8.2840], + [1, 3.3338, 3.7166, 8.4174, 11.2785]]) + + acrn_head = ACRNHead(32, 16) + acrn_head.init_weights() + new_feat = acrn_head(roi_feat, feat, rois) + assert new_feat.shape == (4, 16, 1, 16, 16) + + acrn_head = ACRNHead(32, 16, stride=2) + new_feat = acrn_head(roi_feat, feat, rois) + assert new_feat.shape == (4, 16, 1, 8, 8) + + acrn_head = ACRNHead(32, 16, stride=2, num_convs=2) + new_feat = acrn_head(roi_feat, feat, rois) + assert new_feat.shape == (4, 16, 1, 8, 8) diff --git a/tests/test_models/test_roi_extractor.py b/tests/test_models/test_roi_extractor.py index 414b7fdaa9..25853287bf 100644 --- a/tests/test_models/test_roi_extractor.py +++ b/tests/test_models/test_roi_extractor.py @@ -1,4 +1,3 @@ -import pytest import torch from mmaction.models import SingleRoIExtractor3D @@ -16,12 +15,14 @@ def test_single_roi_extractor3d(): feat = torch.randn([4, 64, 8, 16, 16]) rois = torch.tensor([[0., 1., 1., 6., 6.], [1., 2., 2., 7., 7.], [3., 2., 2., 9., 9.], [2., 2., 0., 10., 9.]]) - extracted = roi_extractor(feat, rois) - assert extracted.shape == (4, 64, 1, 8, 8) + roi_feat, feat = roi_extractor(feat, rois) + assert roi_feat.shape == (4, 64, 1, 8, 8) + assert feat.shape == (4, 64, 1, 16, 16) feat = (torch.randn([4, 64, 8, 16, 16]), torch.randn([4, 32, 16, 16, 16])) - extracted = roi_extractor(feat, rois) - assert extracted.shape == (4, 96, 1, 8, 8) + roi_feat, feat = roi_extractor(feat, rois) + assert roi_feat.shape == (4, 96, 1, 8, 8) + assert feat.shape == (4, 96, 1, 16, 16) feat = torch.randn([4, 64, 8, 16, 16]) roi_extractor = SingleRoIExtractor3D( @@ -32,12 +33,14 @@ def test_single_roi_extractor3d(): pool_mode='avg', aligned=True, with_temporal_pool=False) - extracted = roi_extractor(feat, rois) - assert extracted.shape == (4, 64, 8, 8, 8) + roi_feat, feat = roi_extractor(feat, rois) + assert roi_feat.shape == (4, 64, 8, 8, 8) + assert feat.shape == (4, 64, 8, 16, 16) feat = (torch.randn([4, 64, 8, 16, 16]), torch.randn([4, 32, 16, 16, 16])) - with pytest.raises(AssertionError): - extracted = roi_extractor(feat, rois) + roi_feat, feat = roi_extractor(feat, rois) + assert roi_feat.shape == (4, 96, 16, 8, 8) + assert feat.shape == (4, 96, 16, 16, 16) feat = torch.randn([4, 64, 8, 16, 16]) roi_extractor = SingleRoIExtractor3D( @@ -49,5 +52,6 @@ def test_single_roi_extractor3d(): aligned=True, with_temporal_pool=True, with_global=True) - extracted = roi_extractor(feat, rois) - assert extracted.shape == (4, 128, 1, 8, 8) + roi_feat, feat = roi_extractor(feat, rois) + assert roi_feat.shape == (4, 128, 1, 8, 8) + assert feat.shape == (4, 64, 1, 16, 16) From 62dfc3955b98b2b2ff4ba3ef7fd8005d1684cae5 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Mon, 31 May 2021 23:11:45 +0800 Subject: [PATCH 119/414] add changelog for v0.15.0 (#894) * add changelog for v0.15.0 * update --- docs/changelog.md | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/docs/changelog.md b/docs/changelog.md index ab7e757a74..67fccc19a5 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,37 @@ ## Changelog +### 0.15.0 (31/05/2021) + +**Highlights** + +- Support PoseC3D +- Support ACRN +- Support MIM + +**New Features** + +- Support PoseC3D ([#786](https://github.com/open-mmlab/mmaction2/pull/786), [#890](https://github.com/open-mmlab/mmaction2/pull/890)) +- Support MIM ([#870](https://github.com/open-mmlab/mmaction2/pull/870)) +- Support ACRN and Focal Loss ([#891](https://github.com/open-mmlab/mmaction2/pull/891)) +- Support Jester dataset ([#864](https://github.com/open-mmlab/mmaction2/pull/864)) + +**Improvements** + +- Add `metric_options` for evaluation to docs ([#873](https://github.com/open-mmlab/mmaction2/pull/873)) +- Support creating a new label map based on custom classes for demos about spatio temporal demo ([#879](https://github.com/open-mmlab/mmaction2/pull/879)) +- Improve document about AVA dataset preparation ([#878](https://github.com/open-mmlab/mmaction2/pull/878)) +- Provide a script to extract clip-level feature ([#856](https://github.com/open-mmlab/mmaction2/pull/856)) + +**Bug and Typo Fixes** + +- Fix issues about resume ([#877](https://github.com/open-mmlab/mmaction2/pull/877), [#878](https://github.com/open-mmlab/mmaction2/pull/878)) +- Correct the key name of `eval_results` dictionary for metric 'mmit_mean_average_precision' ([#885](https://github.com/open-mmlab/mmaction2/pull/885)) + +**ModelZoo** + +- Support Jester dataset ([#864](https://github.com/open-mmlab/mmaction2/pull/864)) +- Support ACRN and Focal Loss ([#891](https://github.com/open-mmlab/mmaction2/pull/891)) + ### 0.14.0 (30/04/2021) **Highlights** From 529f4543cb49e7b5b61972653935369bc9c92e70 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Mon, 31 May 2021 23:12:00 +0800 Subject: [PATCH 120/414] Bump version to v0.15.0 (#895) --- README.md | 2 +- README_zh-CN.md | 2 +- mmaction/version.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 59854b80b9..0b280ec6bb 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,7 @@ The master branch works with **PyTorch 1.3+**. ## Changelog -v0.14.0 was released in 30/04/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +v0.15.0 was released in 31/05/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Benchmark diff --git a/README_zh-CN.md b/README_zh-CN.md index 9f88beb63e..1fff923c3e 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -49,7 +49,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 -v0.14.0 版本已于 2021 年 4 月 30 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.15.0 版本已于 2021 年 5 月 31 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 基准测试 diff --git a/mmaction/version.py b/mmaction/version.py index 906660529d..32ea1c548d 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.14.0' +__version__ = '0.15.0' def parse_version_info(version_str): From b19c58d5cf8274906d7ab1b03f8c27291000ac23 Mon Sep 17 00:00:00 2001 From: wangruohui <12756472+wangruohui@users.noreply.github.com> Date: Thu, 3 Jun 2021 11:46:58 +0800 Subject: [PATCH 121/414] update nms config in demo/faster_rccn --- demo/faster_rcnn_r50_fpn_2x_coco.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/demo/faster_rcnn_r50_fpn_2x_coco.py b/demo/faster_rcnn_r50_fpn_2x_coco.py index 326258e935..124902d7f7 100644 --- a/demo/faster_rcnn_r50_fpn_2x_coco.py +++ b/demo/faster_rcnn_r50_fpn_2x_coco.py @@ -73,11 +73,9 @@ pos_weight=-1, debug=False), rpn_proposal=dict( - nms_across_levels=False, nms_pre=2000, - nms_post=1000, - max_num=1000, - nms_thr=0.7, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), min_bbox_size=0), rcnn=dict( assigner=dict( @@ -97,16 +95,15 @@ debug=False)), test_cfg=dict( rpn=dict( - nms_across_levels=False, nms_pre=1000, - nms_post=1000, - max_num=1000, - nms_thr=0.7, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), min_bbox_size=0), rcnn=dict( score_thr=0.05, nms=dict(type='nms', iou_threshold=0.5), - max_per_img=100))) + max_per_img=100) + )) # dataset config dataset_type = 'CocoDataset' From 3e5b432fd6a1d339513c72bd41b093f656eb953a Mon Sep 17 00:00:00 2001 From: wangruohui <12756472+wangruohui@users.noreply.github.com> Date: Thu, 3 Jun 2021 12:26:52 +0800 Subject: [PATCH 122/414] fix lint --- demo/faster_rcnn_r50_fpn_2x_coco.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/demo/faster_rcnn_r50_fpn_2x_coco.py b/demo/faster_rcnn_r50_fpn_2x_coco.py index 124902d7f7..33fc564507 100644 --- a/demo/faster_rcnn_r50_fpn_2x_coco.py +++ b/demo/faster_rcnn_r50_fpn_2x_coco.py @@ -102,8 +102,7 @@ rcnn=dict( score_thr=0.05, nms=dict(type='nms', iou_threshold=0.5), - max_per_img=100) - )) + max_per_img=100))) # dataset config dataset_type = 'CocoDataset' From 9d5cabc02c735de957b9e75a0471b3917d51f18b Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 09:44:05 +0800 Subject: [PATCH 123/414] add tool --- tools/check_videos.py | 95 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 tools/check_videos.py diff --git a/tools/check_videos.py b/tools/check_videos.py new file mode 100644 index 0000000000..556be1fce1 --- /dev/null +++ b/tools/check_videos.py @@ -0,0 +1,95 @@ +import argparse +import os +import warnings + +from mmcv import Config, DictAction +from tqdm import tqdm + +from mmaction.datasets import build_dataset + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMAction2 check datasets') + parser.add_argument('config', help='test config file path') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + default={}, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function (deprecate), ' + 'change to --eval-options instead.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") + parser.add_argument( + '--output-file', + default='corrupted-video.txt', + help='Output file path which keeps corrupted/missing video file paths') + parser.add_argument('--split', default='train') + parser.add_argument( + '--decoder', default='decord', help='Video decoder type') + parser.add_argument( + '--remove-corrupted-videos', + action='store_true', + help='Whether to delete all corrupted videos') + args = parser.parse_args() + + if args.options and args.eval_options: + raise ValueError( + '--options and --eval-options cannot be both ' + 'specified, --options is deprecated in favor of --eval-options') + if args.options: + warnings.warn('--options is deprecated in favor of --eval-options') + args.eval_options = args.options + return args + + +if __name__ == '__main__': + args = parse_args() + + assert args.split in ['train', 'val', 'test'] + + decoder_to_pipeline = dict( + decord='DecordInit', + opencv='OpenCVInit', + pyav='PyAVInit', + ) + assert args.decoder in decoder_to_pipeline + + cfg = Config.fromfile(args.config) + cfg.merge_from_dict(args.cfg_options) + + dataset_type = cfg.data[args.split].type + assert dataset_type == 'VideoDataset' + + # modify video decord pipeline + cfg.data[args.split].pipeline = [ + dict(type=decoder_to_pipeline[args.decoder]) + ] + dataset = build_dataset(cfg.data[args.split], + dict(test_mode=(args.split != 'train'))) + + writer = open(args.output_file, 'w') + cnt = 0 + for i in tqdm(range(len(dataset))): + try: + dataset[i] + except: # noqa + writer.write(dataset.video_infos[i]['filename'] + '\n') + cnt += 1 + + print(f'Checked {len(dataset)} videos, {cnt} is/are corrupted/missing. ') + writer.close() + + if args.remove_corrupted_videos: + print('Start deleting corrupted videos') + with open(args.output_file, 'r') as f: + for line in f: + if os.path.exists(line.strip()): + os.remove(line.strip()) From a6d919ce2f8b1ecc3c0ae61f681831fe5c478b6b Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 10:13:26 +0800 Subject: [PATCH 124/414] update docs --- docs/useful_tools.md | 8 ++++++++ docs_zh_CN/useful_tools.md | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/docs/useful_tools.md b/docs/useful_tools.md index 0e83116dc6..2b04fedcb8 100644 --- a/docs/useful_tools.md +++ b/docs/useful_tools.md @@ -154,3 +154,11 @@ python tools/analysis/eval_metric.py ${CONFIG_FILE} ${RESULT_FILE} [--eval ${EVA ```shell python tools/print_config.py ${CONFIG} [-h] [--options ${OPTIONS [OPTIONS...]}] ``` + +### Check videos + +`tools/check_videos.py` uses specified video encoder to iterate all samples that are specified by the input configuration file, looks for invalid videos (corrupted or missing), and saves the corresponding file path to the output file. Please note that after deleting invalid videos, users need to regenerate the video file list. + +```shell +python tools/check_videos.py ${CONFIG} [-h] [--options OPTIONS [OPTIONS ...]] [--cfg-options CFG_OPTIONS [CFG_OPTIONS ...]] [--output-file OUTPUT_FILE] [--split SPLIT] [--decoder DECODER] [--remove-corrupted-videos] +``` diff --git a/docs_zh_CN/useful_tools.md b/docs_zh_CN/useful_tools.md index f364557222..2eba29dcef 100644 --- a/docs_zh_CN/useful_tools.md +++ b/docs_zh_CN/useful_tools.md @@ -151,3 +151,11 @@ python tools/analysis/eval_metric.py ${CONFIG_FILE} ${RESULT_FILE} [--eval ${EVA ```shell python tools/print_config.py ${CONFIG} [-h] [--options ${OPTIONS [OPTIONS...]}] ``` + +### 检查视频 + +`tools/check_videos.py` 脚本利用指定视频编码器,遍历指定配置文件视频数据集中所有样本,寻找无效视频文件(文件破损或者文件不存在),并将无效文件路径保存到输出文件中。请注意,删除无效视频文件后,需要重新生成视频文件列表。 + +```shell +python tools/check_videos.py ${CONFIG} [-h] [--options OPTIONS [OPTIONS ...]] [--cfg-options CFG_OPTIONS [CFG_OPTIONS ...]] [--output-file OUTPUT_FILE] [--split SPLIT] [--decoder DECODER] [--remove-corrupted-videos] +``` From 911d3e13468aef239678b404a64bbc3495eec992 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 10:15:27 +0800 Subject: [PATCH 125/414] polish script --- tools/check_videos.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tools/check_videos.py b/tools/check_videos.py index 556be1fce1..a15e0fbc27 100644 --- a/tools/check_videos.py +++ b/tools/check_videos.py @@ -84,12 +84,15 @@ def parse_args(): writer.write(dataset.video_infos[i]['filename'] + '\n') cnt += 1 - print(f'Checked {len(dataset)} videos, {cnt} is/are corrupted/missing. ') + print(f'Checked {len(dataset)} videos, {cnt} is/are corrupted/missing.') writer.close() if args.remove_corrupted_videos: print('Start deleting corrupted videos') + cnt = 0 with open(args.output_file, 'r') as f: for line in f: if os.path.exists(line.strip()): os.remove(line.strip()) + cnt += 1 + print(f'Delete {cnt} corrupted videos.') From dd6054629f01e1b61c1f68bd6f2f54417a6185e6 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 10:20:34 +0800 Subject: [PATCH 126/414] polish --- tools/check_videos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/check_videos.py b/tools/check_videos.py index a15e0fbc27..0ab8d823d0 100644 --- a/tools/check_videos.py +++ b/tools/check_videos.py @@ -29,7 +29,7 @@ def parse_args(): "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") parser.add_argument( '--output-file', - default='corrupted-video.txt', + default='invalid-video.txt', help='Output file path which keeps corrupted/missing video file paths') parser.add_argument('--split', default='train') parser.add_argument( From 2c98e8b889641db76dd7cbefc0981e23e42bd161 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 10:24:54 +0800 Subject: [PATCH 127/414] polish --- tools/check_videos.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/check_videos.py b/tools/check_videos.py index 0ab8d823d0..246761eeb5 100644 --- a/tools/check_videos.py +++ b/tools/check_videos.py @@ -68,7 +68,7 @@ def parse_args(): dataset_type = cfg.data[args.split].type assert dataset_type == 'VideoDataset' - # modify video decord pipeline + # Only video decoder is needed for the data pipeline cfg.data[args.split].pipeline = [ dict(type=decoder_to_pipeline[args.decoder]) ] @@ -81,6 +81,7 @@ def parse_args(): try: dataset[i] except: # noqa + # save invalid video path to output file writer.write(dataset.video_infos[i]['filename'] + '\n') cnt += 1 From ceeebf8db6e896ac965a0ed1e1ce98c0853b1f7d Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 10:48:53 +0800 Subject: [PATCH 128/414] polish --- tools/check_videos.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tools/check_videos.py b/tools/check_videos.py index 246761eeb5..b4e1cd9e8d 100644 --- a/tools/check_videos.py +++ b/tools/check_videos.py @@ -31,9 +31,14 @@ def parse_args(): '--output-file', default='invalid-video.txt', help='Output file path which keeps corrupted/missing video file paths') - parser.add_argument('--split', default='train') parser.add_argument( - '--decoder', default='decord', help='Video decoder type') + '--split', + default='train', + help='Dataset split, should be one of [train, val, test]') + parser.add_argument( + '--decoder', + default='decord', + help='Video decoder type, should be one of [decord, opencv, pyav]') parser.add_argument( '--remove-corrupted-videos', action='store_true', From f692e60ddabaf2df0bd4be680b99085c0be059c0 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 10:59:12 +0800 Subject: [PATCH 129/414] update swin transformer res --- configs/recognition/tsn/README.md | 4 +-- ..._video_320p_1x1x3_100e_kinetics400_rgb.py} | 36 +++++++++++-------- 2 files changed, 23 insertions(+), 17 deletions(-) rename configs/recognition/tsn/custom_backbones/{tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb.py => tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py} (74%) diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index 745b20cb12..a1c3b8e81c 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -72,8 +72,8 @@ It's possible and convenient to use a 3rd-party backbone for TSN under the frame | config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :--: | :----------------------------------------------------------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] | ImageNet | 73.43 | 91.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json) | -| [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNeXt101-32x4d [[TorchVision](https://github.com/pytorch/vision/)] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | -| [tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | | | [ckpt]() | [log]() | [json]() | +| [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | Densenet-161 [[TorchVision](https://github.com/pytorch/vision/)] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | +| [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | 77.31 | 92.88 | [ckpt]() | [log]() | [json]() | ### Kinetics-400 Data Benchmark (8-gpus, ResNet50, ImageNet pretrain; 3 segments) diff --git a/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py similarity index 74% rename from configs/recognition/tsn/custom_backbones/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb.py rename to configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py index 0d389c330b..fd310324bc 100644 --- a/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py @@ -20,18 +20,18 @@ test_cfg=dict(average_clips=None)) # dataset settings -dataset_type = 'RawframeDataset' -data_root = 'data/kinetics400/rawframes_train_320p' -data_root_val = 'data/kinetics400/rawframes_val_320p' -ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes_320p.txt' -ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes_320p.txt' -ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes_320p.txt' +dataset_type = 'VideoDataset' +data_root = 'data/kinetics400/videos_train' +data_root_val = 'data/kinetics400/videos_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_videos.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_videos.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_videos.txt' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ + dict(type='DecordInit'), dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=3), - dict(type='RawFrameDecode'), - dict(type='Resize', scale=(-1, 256)), + dict(type='DecordDecode'), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), dict(type='Flip', flip_ratio=0.5), @@ -41,15 +41,16 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=3, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=256), + dict(type='CenterCrop', crop_size=224), dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), @@ -57,15 +58,16 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=25, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='ThreeCrop', crop_size=256), + dict(type='TenCrop', crop_size=224), dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), @@ -73,8 +75,9 @@ dict(type='ToTensor', keys=['imgs']) ] data = dict( - videos_per_gpu=12, + videos_per_gpu=24, workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=4), train=dict( type=dataset_type, ann_file=ann_file_train, @@ -90,11 +93,14 @@ ann_file=ann_file_test, data_prefix=data_root_val, pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + # runtime settings -work_dir = './work_dirs/tsn_swin_transformer_320p_1x1x3_100e_kinetics400_rgb/' +work_dir = './work_dirs/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/' optimizer = dict( type='SGD', - lr=0.00375, # this lr is used for 8 gpus + lr=0.0075, # this lr is used for 8 gpus momentum=0.9, weight_decay=0.0001) From 96ade26c85ed416e01cfaf7fe19802d93283ec01 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 11:02:54 +0800 Subject: [PATCH 130/414] fix a bug --- tools/data/kinetics/generate_videos_filelist.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/data/kinetics/generate_videos_filelist.sh b/tools/data/kinetics/generate_videos_filelist.sh index 14d84444ba..16db70cfb0 100644 --- a/tools/data/kinetics/generate_videos_filelist.sh +++ b/tools/data/kinetics/generate_videos_filelist.sh @@ -14,4 +14,4 @@ echo "Train filelist for video generated." PYTHONPATH=. python tools/data/build_file_list.py ${DATASET} data/${DATASET}/videos_val/ --level 2 --format videos --num-split 1 --subset val --shuffle echo "Val filelist for video generated." -cd tools/data/${DATASET}/ +cd tools/data/kinetics/ From e443f033f4409655a58c3f9d678e11cf2af07c46 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 11:25:20 +0800 Subject: [PATCH 131/414] update cn docs --- configs/recognition/tsn/README_zh-CN.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index f6e9c08c61..404d58f004 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -66,10 +66,14 @@ 用户可在 MMAction2 的框架中使用第三方的主干网络训练 TSN,例如: - [x] MMClassification 中的主干网络 +- [x] TorchVision 中的主干网络 +- [x] pytorch-image-models(timm) 中的主干网络 | 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | | :----------------------------------------------------------: | :------------: | :--: | :----------------------------------------------------------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] | ImageNet | 73.43 | 91.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json) | +| [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | Densenet-161 [[TorchVision](https://github.com/pytorch/vision/)] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | +| [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | 77.31 | 92.88 | [ckpt]() | [log]() | [json]() | ### Kinetics-400 数据基准测试 (8 块 GPU, ResNet50, ImageNet 预训练; 3 个视频段) From 25b2fcc95a32d5e45e950446f2320d38e12409d8 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 11:30:37 +0800 Subject: [PATCH 132/414] update changelog --- docs/changelog.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/changelog.md b/docs/changelog.md index 67fccc19a5..f3d58d3bfc 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,23 @@ ## Changelog +### Master + +**Highlights** + +- Support using backbone from pytorch-image-models(timm) + +**New Features** + +- Support using backbones from pytorch-image-models(timm) for TSN ([#880](https://github.com/open-mmlab/mmaction2/pull/880)) + +**Improvements** + +**Bug and Typo Fixes** + +**ModelZoo** + +- Add TSN with Swin Transformer backbone as an example for using pytorch-image-models(timm) backbones ([#880](https://github.com/open-mmlab/mmaction2/pull/880)) + ### 0.15.0 (31/05/2021) **Highlights** From 9fa4be9bd275e81de77697ee3d921fa00f0db035 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 15:22:56 +0800 Subject: [PATCH 133/414] fix lint --- ...n_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py index fd310324bc..815ef38d0f 100644 --- a/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py @@ -96,9 +96,8 @@ evaluation = dict( interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) - # runtime settings -work_dir = './work_dirs/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/' +work_dir = './work_dirs/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/' # noqa optimizer = dict( type='SGD', lr=0.0075, # this lr is used for 8 gpus From 408b2f69e4e977a8f3cadbe6029b7b3aced73926 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 15:56:51 +0800 Subject: [PATCH 134/414] check first/last frames and 3 random frames --- tools/check_videos.py | 38 +++++++++++++++++++++++++++++++++----- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/tools/check_videos.py b/tools/check_videos.py index b4e1cd9e8d..0d4e2acb46 100644 --- a/tools/check_videos.py +++ b/tools/check_videos.py @@ -1,11 +1,12 @@ import argparse import os +import random import warnings from mmcv import Config, DictAction from tqdm import tqdm -from mmaction.datasets import build_dataset +from mmaction.datasets import PIPELINES, build_dataset def parse_args(): @@ -55,15 +56,40 @@ def parse_args(): return args +@PIPELINES.register_module() +class RandomSampleFrames: + + def __call__(self, results): + """Select frames to verify. + + Required key is "total_frames", added or modified key is "frame_inds". + Args: + results (dict): The resulting dict to be modified and passed + to the next transform in pipeline. + """ + assert results['total_frames'] > 0 + + # first and last elements + results['frame_inds'] = [0, results['total_frames'] - 1] + + # choose 3 random frames + if results['total_frames'] > 2: + for _ in range(3): + results['frame_inds'].append( + random.randint(1, results['total_frames'] - 2)) + + return results + + if __name__ == '__main__': args = parse_args() assert args.split in ['train', 'val', 'test'] decoder_to_pipeline = dict( - decord='DecordInit', - opencv='OpenCVInit', - pyav='PyAVInit', + decord='Decord', + opencv='OpenCV', + pyav='PyAV', ) assert args.decoder in decoder_to_pipeline @@ -75,7 +101,9 @@ def parse_args(): # Only video decoder is needed for the data pipeline cfg.data[args.split].pipeline = [ - dict(type=decoder_to_pipeline[args.decoder]) + dict(type=decoder_to_pipeline[args.decoder] + 'Init'), + dict(type='RandomSampleFrames'), + dict(type=decoder_to_pipeline[args.decoder] + 'Decode') ] dataset = build_dataset(cfg.data[args.split], dict(test_mode=(args.split != 'train'))) From 2b83840823fc4d3b0f664a823789076059a0cbcb Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 17:12:04 +0800 Subject: [PATCH 135/414] pool --- tools/check_videos.py | 58 ++++++++++++++++++++++++++++++------------- 1 file changed, 41 insertions(+), 17 deletions(-) diff --git a/tools/check_videos.py b/tools/check_videos.py index 0d4e2acb46..5027d60b6a 100644 --- a/tools/check_videos.py +++ b/tools/check_videos.py @@ -1,8 +1,10 @@ import argparse import os -import random import warnings +from functools import partial +from multiprocessing import Manager, Pool +import numpy as np from mmcv import Config, DictAction from tqdm import tqdm @@ -40,6 +42,11 @@ def parse_args(): '--decoder', default='decord', help='Video decoder type, should be one of [decord, opencv, pyav]') + parser.add_argument( + '--num-processes', + type=int, + default=5, + help='Number of processes to check videos') parser.add_argument( '--remove-corrupted-videos', action='store_true', @@ -70,17 +77,28 @@ def __call__(self, results): assert results['total_frames'] > 0 # first and last elements - results['frame_inds'] = [0, results['total_frames'] - 1] + results['frame_inds'] = np.array([0, results['total_frames'] - 1]) # choose 3 random frames if results['total_frames'] > 2: - for _ in range(3): - results['frame_inds'].append( - random.randint(1, results['total_frames'] - 2)) + results['frame_inds'] = np.concatenate([ + results['frame_inds'], + np.random.randint(1, results['total_frames'] - 1, 3) + ]) return results +def _do_check_videos(lock, dataset, idx): + try: + dataset[idx] + except: # noqa + # save invalid video path to output file + lock.acquire() + # writer.write(dataset.video_infos[idx]['filename'] + '\n') + lock.release() + + if __name__ == '__main__': args = parse_args() @@ -96,10 +114,9 @@ def __call__(self, results): cfg = Config.fromfile(args.config) cfg.merge_from_dict(args.cfg_options) + # build dataset dataset_type = cfg.data[args.split].type assert dataset_type == 'VideoDataset' - - # Only video decoder is needed for the data pipeline cfg.data[args.split].pipeline = [ dict(type=decoder_to_pipeline[args.decoder] + 'Init'), dict(type='RandomSampleFrames'), @@ -108,18 +125,25 @@ def __call__(self, results): dataset = build_dataset(cfg.data[args.split], dict(test_mode=(args.split != 'train'))) + # prepare for checking writer = open(args.output_file, 'w') - cnt = 0 - for i in tqdm(range(len(dataset))): - try: - dataset[i] - except: # noqa - # save invalid video path to output file - writer.write(dataset.video_infos[i]['filename'] + '\n') - cnt += 1 - - print(f'Checked {len(dataset)} videos, {cnt} is/are corrupted/missing.') + pool = Pool(args.num_processes) + lock = Manager().Lock() + # worker_fn = partial(_do_check_videos, writer, lock, dataset) + worker_fn = partial(_do_check_videos, lock, dataset) + ids = range(len(dataset)) + + # start checking + for _ in tqdm(pool.imap_unordered(worker_fn, ids), total=len(ids)): + pass + pool.join() + + # print results + pool.close() writer.close() + with open(args.output_file, 'r') as f: + print(f'Checked {len(dataset)} videos, ' + f'{len(f)} is/are corrupted/missing.') if args.remove_corrupted_videos: print('Start deleting corrupted videos') From 1ece7ee1ff750499d898c1c5677245a872924b4e Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 17:37:28 +0800 Subject: [PATCH 136/414] polish --- tools/check_videos.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/tools/check_videos.py b/tools/check_videos.py index 5027d60b6a..c50942e4c1 100644 --- a/tools/check_videos.py +++ b/tools/check_videos.py @@ -89,13 +89,14 @@ def __call__(self, results): return results -def _do_check_videos(lock, dataset, idx): +def _do_check_videos(lock, dataset, output_file, idx): try: dataset[idx] except: # noqa # save invalid video path to output file lock.acquire() - # writer.write(dataset.video_infos[idx]['filename'] + '\n') + with open(output_file, 'a') as f: + f.write(dataset.video_infos[idx]['filename'] + '\n') lock.release() @@ -104,13 +105,14 @@ def _do_check_videos(lock, dataset, idx): assert args.split in ['train', 'val', 'test'] - decoder_to_pipeline = dict( + decoder_to_pipeline_prefix = dict( decord='Decord', opencv='OpenCV', pyav='PyAV', ) - assert args.decoder in decoder_to_pipeline + assert args.decoder in decoder_to_pipeline_prefix + # read config file cfg = Config.fromfile(args.config) cfg.merge_from_dict(args.cfg_options) @@ -118,19 +120,20 @@ def _do_check_videos(lock, dataset, idx): dataset_type = cfg.data[args.split].type assert dataset_type == 'VideoDataset' cfg.data[args.split].pipeline = [ - dict(type=decoder_to_pipeline[args.decoder] + 'Init'), + dict(type=decoder_to_pipeline_prefix[args.decoder] + 'Init'), dict(type='RandomSampleFrames'), - dict(type=decoder_to_pipeline[args.decoder] + 'Decode') + dict(type=decoder_to_pipeline_prefix[args.decoder] + 'Decode') ] dataset = build_dataset(cfg.data[args.split], dict(test_mode=(args.split != 'train'))) # prepare for checking - writer = open(args.output_file, 'w') + if os.path.exists(args.output_file): + # remove exsiting output file + os.remove(args.output_file) pool = Pool(args.num_processes) lock = Manager().Lock() - # worker_fn = partial(_do_check_videos, writer, lock, dataset) - worker_fn = partial(_do_check_videos, lock, dataset) + worker_fn = partial(_do_check_videos, lock, dataset, args.output_file) ids = range(len(dataset)) # start checking @@ -138,9 +141,8 @@ def _do_check_videos(lock, dataset, idx): pass pool.join() - # print results + # print results and release resources pool.close() - writer.close() with open(args.output_file, 'r') as f: print(f'Checked {len(dataset)} videos, ' f'{len(f)} is/are corrupted/missing.') From 19b4d1e254c4f5f67ca610c844e59840133b066f Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 18:03:06 +0800 Subject: [PATCH 137/414] update docs --- docs/useful_tools.md | 2 +- docs_zh_CN/useful_tools.md | 2 +- tools/check_videos.py | 5 +++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/useful_tools.md b/docs/useful_tools.md index 2b04fedcb8..1dbc5e71b8 100644 --- a/docs/useful_tools.md +++ b/docs/useful_tools.md @@ -160,5 +160,5 @@ python tools/print_config.py ${CONFIG} [-h] [--options ${OPTIONS [OPTIONS...]}] `tools/check_videos.py` uses specified video encoder to iterate all samples that are specified by the input configuration file, looks for invalid videos (corrupted or missing), and saves the corresponding file path to the output file. Please note that after deleting invalid videos, users need to regenerate the video file list. ```shell -python tools/check_videos.py ${CONFIG} [-h] [--options OPTIONS [OPTIONS ...]] [--cfg-options CFG_OPTIONS [CFG_OPTIONS ...]] [--output-file OUTPUT_FILE] [--split SPLIT] [--decoder DECODER] [--remove-corrupted-videos] +python tools/check_videos.py ${CONFIG} [-h] [--options OPTIONS [OPTIONS ...]] [--cfg-options CFG_OPTIONS [CFG_OPTIONS ...]] [--output-file OUTPUT_FILE] [--split SPLIT] [--decoder DECODER] [--num-processes NUM_PROCESSES] [--remove-corrupted-videos] ``` diff --git a/docs_zh_CN/useful_tools.md b/docs_zh_CN/useful_tools.md index 2eba29dcef..13ed9d51f2 100644 --- a/docs_zh_CN/useful_tools.md +++ b/docs_zh_CN/useful_tools.md @@ -157,5 +157,5 @@ python tools/print_config.py ${CONFIG} [-h] [--options ${OPTIONS [OPTIONS...]}] `tools/check_videos.py` 脚本利用指定视频编码器,遍历指定配置文件视频数据集中所有样本,寻找无效视频文件(文件破损或者文件不存在),并将无效文件路径保存到输出文件中。请注意,删除无效视频文件后,需要重新生成视频文件列表。 ```shell -python tools/check_videos.py ${CONFIG} [-h] [--options OPTIONS [OPTIONS ...]] [--cfg-options CFG_OPTIONS [CFG_OPTIONS ...]] [--output-file OUTPUT_FILE] [--split SPLIT] [--decoder DECODER] [--remove-corrupted-videos] +python tools/check_videos.py ${CONFIG} [-h] [--options OPTIONS [OPTIONS ...]] [--cfg-options CFG_OPTIONS [CFG_OPTIONS ...]] [--output-file OUTPUT_FILE] [--split SPLIT] [--decoder DECODER] [--num-processes NUM_PROCESSES] [--remove-corrupted-videos] ``` diff --git a/tools/check_videos.py b/tools/check_videos.py index c50942e4c1..fcd7b8d30f 100644 --- a/tools/check_videos.py +++ b/tools/check_videos.py @@ -69,14 +69,15 @@ class RandomSampleFrames: def __call__(self, results): """Select frames to verify. - Required key is "total_frames", added or modified key is "frame_inds". + Select the first, last and three random frames, Required key is + "total_frames", added or modified key is "frame_inds". Args: results (dict): The resulting dict to be modified and passed to the next transform in pipeline. """ assert results['total_frames'] > 0 - # first and last elements + # first and last frames results['frame_inds'] = np.array([0, results['total_frames'] - 1]) # choose 3 random frames From cf5ef0f522b79ec2619d0ebf7521f63734a62376 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Mon, 7 Jun 2021 18:10:53 +0800 Subject: [PATCH 138/414] update changelog --- docs/changelog.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/changelog.md b/docs/changelog.md index 67fccc19a5..923b30a77b 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,19 @@ ## Changelog +### Master + +**Highlights** + +**New Features** + +**Improvements** + +- Add a tool to find invalid videos ([#907](https://github.com/open-mmlab/mmaction2/pull/907)) + +**Bug and Typo Fixes** + +**ModelZoo** + ### 0.15.0 (31/05/2021) **Highlights** From 2d5b3c6487e5d5005aec16b61f7f83ec76b80e5e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20M=C3=A9ndez?= Date: Mon, 7 Jun 2021 13:22:36 +0200 Subject: [PATCH 139/414] Update ResNet3d docstring with missing args (#910) --- mmaction/models/backbones/resnet3d.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index 83a64801f7..063b21d959 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -346,8 +346,12 @@ class ResNet3d(nn.Module): Default: ``(1, 1, 1, 1)``. conv1_kernel (Sequence[int]): Kernel size of the first conv layer. Default: ``(5, 7, 7)``. + conv1_stride_s (int): Spatial stride of the first conv layer. + Default: 2. conv1_stride_t (int): Temporal stride of the first conv layer. Default: 2. + pool1_stride_s (int): Spatial stride of the first pooling layer. + Default: 2. pool1_stride_t (int): Temporal stride of the first pooling layer. Default: 2. with_pool2 (bool): Whether to use pool2. Default: True. From dd56557636205321430b93468917a9e71ff76e8e Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 8 Jun 2021 14:48:42 +0800 Subject: [PATCH 140/414] indicate spectrogram_type (#909) --- tools/data/build_audio_features.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tools/data/build_audio_features.py b/tools/data/build_audio_features.py index c2b7927a0d..f388cc0036 100644 --- a/tools/data/build_audio_features.py +++ b/tools/data/build_audio_features.py @@ -49,7 +49,8 @@ def __init__(self, sample_rate=16000, num_mels=80, fft_size=1280, - hop_size=320): + hop_size=320, + spectrogram_type='lws'): self.frame_rate = frame_rate self.sample_rate = sample_rate self.silence_threshold = SILENCE_THRESHOLD @@ -66,6 +67,8 @@ def __init__(self, self.allow_clipping_in_normalization = ALLOW_CLIPPING_IN_NORMALIZATION self.log_scale_min = LOG_SCALE_MIN self.norm_audio = NORM_AUDIO + self.spectrogram_type = spectrogram_type + assert spectrogram_type in ['lws', 'librosa'] def load_wav(self, path): """Load an audio file into numpy array.""" @@ -248,9 +251,9 @@ def read_audio(self, audio_path): return wav def audio_to_spectrogram(self, wav): - if self.melspectrogram: + if self.spectrogram_type == 'lws': spectrogram = self.melspectrogram(wav).astype(np.float32).T - else: + elif self.spectrogram_type == 'librosa': spectrogram = self.generate_spectrogram_magphase(wav) return spectrogram From df24a04ea8f690813564b9cd7f000adb3eaca405 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 8 Jun 2021 15:11:48 +0800 Subject: [PATCH 141/414] fix import name (#912) --- mmaction/models/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mmaction/models/__init__.py b/mmaction/models/__init__.py index 010661fb32..d612642376 100644 --- a/mmaction/models/__init__.py +++ b/mmaction/models/__init__.py @@ -15,13 +15,13 @@ CrossEntropyLoss, HVULoss, NLLLoss, OHEMHingeLoss, SSNLoss) from .necks import TPN -from .recognizers import (AudioRecognizer, BaseRecognizer, recognizer2d, - recognizer3d) +from .recognizers import (AudioRecognizer, BaseRecognizer, Recognizer2D, + Recognizer3D) from .roi_extractors import SingleRoIExtractor3D __all__ = [ 'BACKBONES', 'HEADS', 'RECOGNIZERS', 'build_recognizer', 'build_head', - 'build_backbone', 'recognizer2d', 'recognizer3d', 'C3D', 'ResNet', + 'build_backbone', 'Recognizer2D', 'Recognizer3D', 'C3D', 'ResNet', 'ResNet3d', 'ResNet2Plus1d', 'I3DHead', 'TSNHead', 'TSMHead', 'BaseHead', 'BaseRecognizer', 'LOSSES', 'CrossEntropyLoss', 'NLLLoss', 'HVULoss', 'ResNetTSM', 'ResNet3dSlowFast', 'SlowFastHead', 'Conv2plus1d', From 9afe256d36f64e9a327fc1e7c890a223c489bffc Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 8 Jun 2021 21:41:02 +0800 Subject: [PATCH 142/414] update README (#914) --- README.md | 2 + README_zh-CN.md | 2 + configs/detection/acrn/README_zh-CN.md | 81 ++++++++++++++++++++++++++ configs/detection/ava/README_zh-CN.md | 2 +- 4 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 configs/detection/acrn/README_zh-CN.md diff --git a/README.md b/README.md index 0b280ec6bb..c13e9fa93a 100644 --- a/README.md +++ b/README.md @@ -96,6 +96,7 @@ Supported methods for Action Recognition: - ✅ [MultiModality: Audio](configs/recognition_audio/resnet/README.md) (ArXiv'2020) - ✅ [TANet](configs/recognition/tanet/README.md) (ArXiv'2020) - ✅ [TRN](configs/recognition/trn/README.md) (CVPR'2015) +- ✅ [PoseC3D](configs/skeleton/posec3d/README.md) (ArXiv'2021) @@ -115,6 +116,7 @@ Supported methods for Spatial Temporal Action Detection:
(click to collapse) +- ✅ [ACRN](configs/detection/acrn/README.md) (ECCV'2018) - ✅ [SlowOnly+Fast R-CNN](configs/detection/ava/README.md) (ICCV'2019) - ✅ [SlowFast+Fast R-CNN](configs/detection/ava/README.md) (ICCV'2019) - ✅ [Long-Term Feature Bank](configs/detection/lfb/README.md) (CVPR'2019) diff --git a/README_zh-CN.md b/README_zh-CN.md index 1fff923c3e..a04ba8bcf0 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -90,6 +90,7 @@ v0.15.0 版本已于 2021 年 5 月 31 日发布,可通过查阅 [更新日志 - ✅ [MultiModality: Audio](/configs/recognition_audio/resnet/README_zh-CN.md) (ArXiv'2020) - ✅ [TANet](/configs/recognition/tanet/README_zh-CN.md) (ArXiv'2020) - ✅ [TRN](/configs/recognition/trn/README_zh-CN.md) (CVPR'2015) +- ✅ [PoseC3D](configs/skeleton/posec3d/README.md) (ArXiv'2021)
@@ -109,6 +110,7 @@ v0.15.0 版本已于 2021 年 5 月 31 日发布,可通过查阅 [更新日志
(点击收起) +- ✅ [ACRN](configs/detection/acrn/README_zh-CN.md) (ECCV'2018) - ✅ [SlowOnly+Fast R-CNN](/configs/detection/ava/README_zh-CN.md) (ICCV'2019) - ✅ [SlowFast+Fast R-CNN](/configs/detection/ava/README_zh-CN.md) (ICCV'2019) - ✅ [Long-Term Feature Bank](/configs/detection/lfb/README_zh-CN.md) (CVPR'2019) diff --git a/configs/detection/acrn/README_zh-CN.md b/configs/detection/acrn/README_zh-CN.md new file mode 100644 index 0000000000..3ec59cc495 --- /dev/null +++ b/configs/detection/acrn/README_zh-CN.md @@ -0,0 +1,81 @@ +# ACRN + +## 简介 + + + +```BibTeX +@inproceedings{gu2018ava, + title={Ava: A video dataset of spatio-temporally localized atomic visual actions}, + author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={6047--6056}, + year={2018} +} +``` + + + +```BibTeX +@inproceedings{sun2018actor, + title={Actor-centric relation network}, + author={Sun, Chen and Shrivastava, Abhinav and Vondrick, Carl and Murphy, Kevin and Sukthankar, Rahul and Schmid, Cordelia}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + pages={318--334}, + year={2018} +} +``` + +## 模型库 + +### AVA2.1 + +| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | mAP | log | json | ckpt | +| :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.1 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb-49b07bf2.pth) | + +### AVA2.2 + +| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | mAP | log | json | ckpt | +| :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.8 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-2be32625.pth) | + +- 注: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 + +对于数据集准备的细节,用户可参考 [数据准备](/docs_zh_CN/data_preparation.md)。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:在 AVA 数据集上训练 ACRN 辅以 SlowFast 主干网络,并定期验证。 + +```shell +python tools/train.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py --validate +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 AVA 上测试 ACRN 辅以 SlowFast 主干网络,并将结果存为 csv 文件。 + +```shell +python tools/test.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/detection/ava/README_zh-CN.md b/configs/detection/ava/README_zh-CN.md index aa1e896677..f6e9547c04 100644 --- a/configs/detection/ava/README_zh-CN.md +++ b/configs/detection/ava/README_zh-CN.md @@ -72,7 +72,7 @@ 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. **Context** 表示同时使用 RoI 特征与全局特征进行分类,可带来约 1% mAP 的提升。 -对于数据集准备的细节,用户可参考 [数据准备](/docs_zh-CN/data_preparation.md)。 +对于数据集准备的细节,用户可参考 [数据准备](/docs_zh_CN/data_preparation.md)。 ## 如何训练 From f8b595ad904e105e8b58aaaf83498e9421fa5c71 Mon Sep 17 00:00:00 2001 From: Rejnald Lleshi <46654505+rlleshi@users.noreply.github.com> Date: Wed, 9 Jun 2021 07:21:22 +0200 Subject: [PATCH 143/414] 862 add json output video demo (#906) * add json output for video demo * add json output for long_video_demo * add json output video demo --- demo/README.md | 11 ++++-- demo/long_video_demo.py | 78 ++++++++++++++++++++++++++++++----------- 2 files changed, 66 insertions(+), 23 deletions(-) diff --git a/demo/README.md b/demo/README.md index ea0334618b..ce3c6fdbc4 100644 --- a/demo/README.md +++ b/demo/README.md @@ -282,7 +282,7 @@ python demo/long_video_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} $ Optional arguments: -- `OUT_FILE`: Path to the output video file. +- `OUT_FILE`: Path to the output, either video or json file - `INPUT_STEP`: Input step for sampling frames, which can help to get more spare input. If not specified , it will be set to 1. - `DEVICE_TYPE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. If not specified, it will be set to `cuda:0`. - `THRESHOLD`: Threshold of prediction score for action recognition. Only label with score higher than the threshold will be shown. If not specified, it will be set to 0.01. @@ -325,7 +325,14 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, ```shell python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO \ + checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO + ``` + +5. Predict different labels in a long video by using a I3D model on gpu and save the results as a `json` file + + ```shell + python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ + checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt ./results.json ``` ## SpatioTemporal Action Detection Webcam Demo diff --git a/demo/long_video_demo.py b/demo/long_video_demo.py index 34bae7c2e3..f0e21fc2e4 100644 --- a/demo/long_video_demo.py +++ b/demo/long_video_demo.py @@ -1,4 +1,5 @@ import argparse +import json import random from collections import deque from operator import itemgetter @@ -32,7 +33,7 @@ def parse_args(): parser.add_argument('checkpoint', help='checkpoint file/url') parser.add_argument('video_path', help='video file/url') parser.add_argument('label', help='label file') - parser.add_argument('out_file', help='output filename') + parser.add_argument('out_file', help='output result file in video/json') parser.add_argument( '--input-step', type=int, @@ -58,6 +59,47 @@ def parse_args(): return args +def show_results_video(result_queue, text_info, thr, msg, frame, video_writer): + if len(result_queue) != 0: + text_info = {} + results = result_queue.popleft() + for i, result in enumerate(results): + selected_label, score = result + if score < thr: + break + location = (0, 40 + i * 20) + text = selected_label + ': ' + str(round(score, 2)) + text_info[location] = text + cv2.putText(frame, text, location, FONTFACE, FONTSCALE, FONTCOLOR, + THICKNESS, LINETYPE) + elif len(text_info): + for location, text in text_info.items(): + cv2.putText(frame, text, location, FONTFACE, FONTSCALE, FONTCOLOR, + THICKNESS, LINETYPE) + else: + cv2.putText(frame, msg, (0, 40), FONTFACE, FONTSCALE, MSGCOLOR, + THICKNESS, LINETYPE) + video_writer.write(frame) + return text_info + + +def get_results_json(result_queue, text_info, thr, msg, ind, out_json): + if len(result_queue) != 0: + text_info = {} + results = result_queue.popleft() + for i, result in enumerate(results): + selected_label, score = result + if score < thr: + break + text_info[i + 1] = selected_label + ': ' + str(round(score, 2)) + out_json[ind] = text_info + elif len(text_info): + out_json[ind] = text_info + else: + out_json[ind] = msg + return text_info, out_json + + def show_results(model, data, label, args): frame_queue = deque(maxlen=args.sample_length) result_queue = deque(maxlen=1) @@ -70,11 +112,13 @@ def show_results(model, data, label, args): msg = 'Preparing action recognition ...' text_info = {} + out_json = {} fourcc = cv2.VideoWriter_fourcc(*'mp4v') frame_size = (frame_width, frame_height) ind = 0 - video_writer = cv2.VideoWriter(args.out_file, fourcc, fps, frame_size) + video_writer = None if args.out_file.endswith('.json') \ + else cv2.VideoWriter(args.out_file, fourcc, fps, frame_size) prog_bar = mmcv.ProgressBar(num_frames) backup_frames = [] @@ -108,28 +152,20 @@ def show_results(model, data, label, args): results = scores_sorted[:num_selected_labels] result_queue.append(results) - if len(result_queue) != 0: - text_info = {} - results = result_queue.popleft() - for i, result in enumerate(results): - selected_label, score = result - if score < args.threshold: - break - location = (0, 40 + i * 20) - text = selected_label + ': ' + str(round(score, 2)) - text_info[location] = text - cv2.putText(frame, text, location, FONTFACE, FONTSCALE, - FONTCOLOR, THICKNESS, LINETYPE) - elif len(text_info): - for location, text in text_info.items(): - cv2.putText(frame, text, location, FONTFACE, FONTSCALE, - FONTCOLOR, THICKNESS, LINETYPE) + if args.out_file.endswith('.json'): + text_info, out_json = get_results_json(result_queue, text_info, + args.threshold, msg, ind, + out_json) else: - cv2.putText(frame, msg, (0, 40), FONTFACE, FONTSCALE, MSGCOLOR, - THICKNESS, LINETYPE) - video_writer.write(frame) + text_info = show_results_video(result_queue, text_info, + args.threshold, msg, frame, + video_writer) + cap.release() cv2.destroyAllWindows() + if args.out_file.endswith('.json'): + with open(args.out_file, 'w') as js: + json.dump(out_json, js) def inference(model, data, args, frame_queue): From aebd7b9be6f58064340c9d85648520548ac1e1b2 Mon Sep 17 00:00:00 2001 From: dreamerlin <528557675@qq.com> Date: Wed, 9 Jun 2021 16:17:47 +0800 Subject: [PATCH 144/414] rename lr -> scheduler --- .../tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py | 1 - mmaction/core/__init__.py | 2 +- mmaction/core/{lr => scheduler}/__init__.py | 0 mmaction/core/{lr => scheduler}/tin_lr_hook.py | 2 +- 4 files changed, 2 insertions(+), 3 deletions(-) rename mmaction/core/{lr => scheduler}/__init__.py (100%) rename mmaction/core/{lr => scheduler}/tin_lr_hook.py (96%) diff --git a/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py index 2b41c55018..b41f018eec 100644 --- a/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py @@ -90,5 +90,4 @@ # runtime settings work_dir = './work_dirs/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/' -# load_from = 'modelzoo/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth' # noqa: E501 load_from = 'https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth' # noqa: E501 diff --git a/mmaction/core/__init__.py b/mmaction/core/__init__.py index 6842299583..f5f617cdf2 100644 --- a/mmaction/core/__init__.py +++ b/mmaction/core/__init__.py @@ -1,6 +1,6 @@ from .bbox import * # noqa: F401, F403 from .evaluation import * # noqa: F401, F403 from .hooks import * # noqa: F401, F403 -from .lr import * # noqa: F401, F403 from .optimizer import * # noqa: F401, F403 from .runner import * # noqa: F401, F403 +from .scheduler import * # noqa: F401, F403 diff --git a/mmaction/core/lr/__init__.py b/mmaction/core/scheduler/__init__.py similarity index 100% rename from mmaction/core/lr/__init__.py rename to mmaction/core/scheduler/__init__.py diff --git a/mmaction/core/lr/tin_lr_hook.py b/mmaction/core/scheduler/tin_lr_hook.py similarity index 96% rename from mmaction/core/lr/tin_lr_hook.py rename to mmaction/core/scheduler/tin_lr_hook.py index 4fededd585..215694de3c 100644 --- a/mmaction/core/lr/tin_lr_hook.py +++ b/mmaction/core/scheduler/tin_lr_hook.py @@ -7,7 +7,7 @@ class TINLrUpdaterHook(LrUpdaterHook): def __init__(self, min_lr, **kwargs): self.min_lr = min_lr - super(TINLrUpdaterHook, self).__init__(**kwargs) + super().__init__(**kwargs) def get_warmup_lr(self, cur_iters): if self.warmup == 'linear': From f8e10ed63434be9456b97214ea98cfb2aa29900c Mon Sep 17 00:00:00 2001 From: dreamerlin <528557675@qq.com> Date: Wed, 9 Jun 2021 16:19:22 +0800 Subject: [PATCH 145/414] rename lr -> scheduler --- mmaction/core/scheduler/__init__.py | 2 +- mmaction/core/scheduler/{tin_lr_hook.py => lr_updater.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename mmaction/core/scheduler/{tin_lr_hook.py => lr_updater.py} (100%) diff --git a/mmaction/core/scheduler/__init__.py b/mmaction/core/scheduler/__init__.py index f2a29754b1..9ac369faf8 100644 --- a/mmaction/core/scheduler/__init__.py +++ b/mmaction/core/scheduler/__init__.py @@ -1,3 +1,3 @@ -from .tin_lr_hook import TINLrUpdaterHook +from .lr_updater import TINLrUpdaterHook __all__ = ['TINLrUpdaterHook'] diff --git a/mmaction/core/scheduler/tin_lr_hook.py b/mmaction/core/scheduler/lr_updater.py similarity index 100% rename from mmaction/core/scheduler/tin_lr_hook.py rename to mmaction/core/scheduler/lr_updater.py From 1c96c18349bc96cca08baf9c2c36b4667a1f4e61 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 9 Jun 2021 23:36:55 +0800 Subject: [PATCH 146/414] Add MIM description (#918) --- docs/install.md | 38 ++++++++++++++++++++++++++------------ docs_zh_CN/install.md | 38 ++++++++++++++++++++++++++------------ 2 files changed, 52 insertions(+), 24 deletions(-) diff --git a/docs/install.md b/docs/install.md index 985d45792b..a8a091040a 100644 --- a/docs/install.md +++ b/docs/install.md @@ -5,6 +5,7 @@ We provide some tips for MMAction2 installation in this file. - [Requirements](#requirements) +- [Prepare Environment](#prepare-environment) - [Install MMAction2](#install-mmaction2) - [Install with CPU only](#install-with-cpu-only) - [Another option: Docker Image](#another-option--docker-image) @@ -46,7 +47,7 @@ conda install -y jpeg libtiff **Note**: You need to run `pip uninstall mmcv` first if you have mmcv installed. If mmcv and mmcv-full are both installed, there will be `ModuleNotFoundError`. -### Install MMAction2 +## Prepare environment a. Create a conda virtual environment and activate it. @@ -80,7 +81,20 @@ conda install pytorch=1.3.1 cudatoolkit=9.2 torchvision=0.4.2 -c pytorch If you build PyTorch from source instead of installing the prebuilt package, you can use more CUDA versions such as 9.0. -c. Install mmcv, we recommend you to install the pre-build mmcv as below. +## Install MMAction2 + +We recommend you to install MMAction2 with [MIM](https://github.com/open-mmlab/mim). + +```shell +pip install openmim +mim install mmaction2 +``` + +MIM can automatically install OpenMMLab projects and their requirements. + +Or, you can install MMAction2 manually: + +a. Install mmcv, we recommend you to install the pre-build mmcv as below. ```shell pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/{cu_version}/{torch_version}/index.html @@ -113,14 +127,14 @@ pip install mmcv-full **Important:** You need to run `pip uninstall mmcv` first if you have mmcv installed. If mmcv and mmcv-full are both installed, there will be `ModuleNotFoundError`. -d. Clone the MMAction2 repository. +b. Clone the MMAction2 repository. ```shell git clone https://github.com/open-mmlab/mmaction2.git cd mmaction2 ``` -e. Install build requirements and then install MMAction2. +c. Install build requirements and then install MMAction2. ```shell pip install -r requirements/build.txt @@ -133,7 +147,7 @@ If you build MMAction2 on macOS, replace the last command with CC=clang CXX=clang++ CFLAGS='-stdlib=libc++' pip install -e . ``` -f. Install mmdetection for spatial temporal detection tasks. +d. Install mmdetection for spatial temporal detection tasks. This part is **optional** if you're not going to do spatial temporal detection. @@ -141,8 +155,8 @@ See [here](https://github.com/open-mmlab/mmdetection#installation) to install mm Note: -1. The git commit id will be written to the version number with step d, e.g. 0.6.0+2e7045c. The version will also be saved in trained models. - It is recommended that you run step d each time you pull some updates from github. If C++/CUDA codes are modified, then this step is compulsory. +1. The git commit id will be written to the version number with step b, e.g. 0.6.0+2e7045c. The version will also be saved in trained models. + It is recommended that you run step b each time you pull some updates from github. If C++/CUDA codes are modified, then this step is compulsory. 2. Following the above instructions, MMAction2 is installed on `dev` mode, any local modifications made to the code will take effect without the need to reinstall it (unless you submit some commits and want to update the version number). @@ -156,13 +170,13 @@ Note: or specify desired extras when calling `pip` (e.g. `pip install -v -e .[optional]`, valid keys for the `[optional]` field are `all`, `tests`, `build`, and `optional`) like `pip install -v -e .[tests,build]`. -### Install with CPU only +## Install with CPU only The code can be built for CPU only environment (where CUDA isn't available). In CPU mode you can run the demo/demo.py for example. -### Another option: Docker Image +## Another option: Docker Image We provide a [Dockerfile](/docker/Dockerfile) to build an image. @@ -179,7 +193,7 @@ Run it with command: docker run --gpus all --shm-size=8g -it -v {DATA_DIR}:/mmaction2/data mmaction2 ``` -### A from-scratch setup script +## A from-scratch setup script Here is a full script for setting up MMAction2 with conda and link the dataset path (supposing that your Kinetics-400 dataset path is $KINETICS400_ROOT). @@ -203,7 +217,7 @@ mkdir data ln -s $KINETICS400_ROOT data ``` -### Developing with multiple MMAction2 versions +## Developing with multiple MMAction2 versions The train and test scripts already modify the `PYTHONPATH` to ensure the script use the MMAction2 in the current directory. @@ -213,7 +227,7 @@ To use the default MMAction2 installed in the environment rather than that you a PYTHONPATH="$(dirname $0)/..":$PYTHONPATH ``` -### Verification +## Verification To verify whether MMAction2 and the required environment are installed correctly, we can run sample python codes to initialize a recognizer and inference a demo video: diff --git a/docs_zh_CN/install.md b/docs_zh_CN/install.md index 17992071ad..3a129106b5 100644 --- a/docs_zh_CN/install.md +++ b/docs_zh_CN/install.md @@ -5,6 +5,7 @@ - [安装依赖包](#安装依赖包) +- [准备环境](#准备环境) - [MMAction2 的安装步骤](#MMAction2-的安装步骤) - [CPU 环境下的安装步骤](#CPU-环境下的安装步骤) - [利用 Docker 镜像安装 MMAction2](#利用-Docker-镜像安装-MMAction2) @@ -46,7 +47,7 @@ conda install -y jpeg libtiff **注意**:用户需要首先运行 `pip uninstall mmcv` 命令,以确保 mmcv 被成功安装。 如果 mmcv 和 mmcv-full 同时被安装, 会报 `ModuleNotFoundError` 的错误。 -### MMAction2 的安装步骤 +## 准备环境 a. 创建并激活 conda 虚拟环境,如: @@ -80,7 +81,20 @@ conda install pytorch=1.3.1 cudatoolkit=9.2 torchvision=0.4.2 -c pytorch 如果 PyTorch 是由源码进行编译安装(而非直接下载预编译好的安装包),则可以使用更多的 CUDA 版本(如 9.0 版本)。 -c. 安装 mmcv。MMAction2 推荐用户使用如下的命令安装预编译好的 mmcv。 +## MMAction2 的安装步骤 + +这里推荐用户使用 [MIM](https://github.com/open-mmlab/mim) 安装 MMAction2。 + +```shell +pip install openmim +mim install mmaction2 +``` + +MIM 可以自动安装 OpenMMLab 项目及其依赖。 + +或者,用户也可以通过以下步骤手动安装 MMAction2。 + +a. 安装 mmcv。MMAction2 推荐用户使用如下的命令安装预编译好的 mmcv。 ```shell pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/{cu_version}/{torch_version}/index.html @@ -113,14 +127,14 @@ pip install mmcv-full **注意**:如果 mmcv 已经被安装,用户需要使用 `pip uninstall mmcv` 命令进行卸载。如果 mmcv 和 mmcv-full 同时被安装, 会报 `ModuleNotFoundError` 的错误。 -d. 克隆 MMAction2 库。 +b. 克隆 MMAction2 库。 ```shell git clone https://github.com/open-mmlab/mmaction2.git cd mmaction2 ``` -e. 安装依赖包和 MMAction2。 +c. 安装依赖包和 MMAction2。 ```shell pip install -r requirements/build.txt @@ -133,7 +147,7 @@ pip install -v -e . # or "python setup.py develop" CC=clang CXX=clang++ CFLAGS='-stdlib=libc++' pip install -e . ``` -f. 安装 mmdetection 以支持时空检测任务。 +d. 安装 mmdetection 以支持时空检测任务。 如果用户不想做时空检测相关任务,这部分步骤可以选择跳过。 @@ -141,8 +155,8 @@ f. 安装 mmdetection 以支持时空检测任务。 注意: -1. 在步骤 d 中,git commit 的 id 将会被写到版本号中,如 0.6.0+2e7045c。这个版本号也会被保存到训练好的模型中。 - 这里推荐用户每次在步骤 d 中对本地代码和 github 上的源码进行同步。如果 C++/CUDA 代码被修改,就必须进行这一步骤。 +1. 在步骤 b 中,git commit 的 id 将会被写到版本号中,如 0.6.0+2e7045c。这个版本号也会被保存到训练好的模型中。 + 这里推荐用户每次在步骤 b 中对本地代码和 github 上的源码进行同步。如果 C++/CUDA 代码被修改,就必须进行这一步骤。 2. 根据上述步骤,MMAction2 就会以 `dev` 模式被安装,任何本地的代码修改都会立刻生效,不需要再重新安装一遍(除非用户提交了 commits,并且想更新版本号)。 @@ -154,13 +168,13 @@ f. 安装 mmdetection 以支持时空检测任务。 要想使用一些可选的依赖包,如 `decord`,用户需要通过 `pip install -r requirements/optional.txt` 进行安装, 或者通过调用 `pip`(如 `pip install -v -e .[optional]`,这里的 `[optional]` 可替换为 `all`,`tests`,`build` 或 `optional`) 指定安装对应的依赖包,如 `pip install -v -e .[tests,build]`。 -### CPU 环境下的安装步骤 +## CPU 环境下的安装步骤 MMAction2 可以在只有 CPU 的环境下安装(即无法使用 GPU 的环境)。 在 CPU 模式下,用户可以运行 `demo/demo.py` 的代码。 -### 利用 Docker 镜像安装 MMAction2 +## 利用 Docker 镜像安装 MMAction2 MMAction2 提供一个 [Dockerfile](/docker/Dockerfile) 用户创建 docker 镜像。 @@ -177,7 +191,7 @@ docker build -f ./docker/Dockerfile --rm -t mmaction2 . docker run --gpus all --shm-size=8g -it -v {DATA_DIR}:/mmaction2/data mmaction2 ``` -### 源码安装 MMAction2 +## 源码安装 MMAction2 这里提供了 conda 下安装 MMAction2 并链接数据集路径的完整脚本(假设 Kinetics-400 数据的路径在 $KINETICS400_ROOT)。 @@ -201,7 +215,7 @@ mkdir data ln -s $KINETICS400_ROOT data ``` -### 在多个 MMAction2 版本下进行开发 +## 在多个 MMAction2 版本下进行开发 MMAction2 的训练和测试脚本已经修改了 `PYTHONPATH` 变量,以确保其能够运行当前目录下的 MMAction2。 @@ -211,7 +225,7 @@ MMAction2 的训练和测试脚本已经修改了 `PYTHONPATH` 变量,以确 PYTHONPATH="$(dirname $0)/..":$PYTHONPATH ``` -### 安装验证 +## 安装验证 为了验证 MMAction2 和所需的依赖包是否已经安装成功, 用户可以运行以下的 python 代码,以测试其是否能成功地初始化动作识别器,并进行演示视频的推理: From 0cc94a1e2d3b3e6f91a1a940ada6335f702d099e Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 10 Jun 2021 11:51:12 +0800 Subject: [PATCH 147/414] [Improvement] Polish readthedocs (#919) --- docs/install.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/install.md b/docs/install.md index a8a091040a..4866884dee 100644 --- a/docs/install.md +++ b/docs/install.md @@ -1,4 +1,4 @@ -## Installation +# Installation We provide some tips for MMAction2 installation in this file. @@ -15,7 +15,7 @@ We provide some tips for MMAction2 installation in this file. -### Requirements +## Requirements - Linux (Windows is not officially supported) - Python 3.6+ From 4621a7f5a16ecbdfc5ac1fe98cd21236c28d8893 Mon Sep 17 00:00:00 2001 From: Kenny Date: Thu, 10 Jun 2021 14:12:40 +0800 Subject: [PATCH 148/414] update model links --- configs/recognition/tsn/README.md | 4 ++-- configs/recognition/tsn/README_zh-CN.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index a1c3b8e81c..0edd90630f 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -67,13 +67,13 @@ It's possible and convenient to use a 3rd-party backbone for TSN under the frame - [x] Backbones from [MMClassification](https://github.com/open-mmlab/mmclassification/) - [x] Backbones from [TorchVision](https://github.com/pytorch/vision/) -- [x] Backbones from [pytorch-image-models(timm)](https://github.com/rwightman/pytorch-image-models) +- [x] Backbones from [TIMM (pytorch-image-models)](https://github.com/rwightman/pytorch-image-models) | config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :--: | :----------------------------------------------------------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] | ImageNet | 73.43 | 91.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json) | | [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | Densenet-161 [[TorchVision](https://github.com/pytorch/vision/)] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | -| [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | 77.31 | 92.88 | [ckpt]() | [log]() | [json]() | +| [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | 77.51 | 92.92 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb-805380f6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json) | ### Kinetics-400 Data Benchmark (8-gpus, ResNet50, ImageNet pretrain; 3 segments) diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index 404d58f004..63fe85e4c7 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -73,7 +73,7 @@ | :----------------------------------------------------------: | :------------: | :--: | :----------------------------------------------------------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] | ImageNet | 73.43 | 91.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json) | | [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | Densenet-161 [[TorchVision](https://github.com/pytorch/vision/)] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | -| [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | 77.31 | 92.88 | [ckpt]() | [log]() | [json]() | +| [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | 77.51 | 92.92 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb-805380f6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json) | ### Kinetics-400 数据基准测试 (8 块 GPU, ResNet50, ImageNet 预训练; 3 个视频段) From c6f57e915e0cd59e30b8ecf98b687402088d3d60 Mon Sep 17 00:00:00 2001 From: Kenny Date: Thu, 10 Jun 2021 14:45:39 +0800 Subject: [PATCH 149/414] no longer run torch1.3.0 in CI --- .github/workflows/build.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 83763ac1c7..924653e894 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -84,11 +84,8 @@ jobs: strategy: matrix: python-version: [3.7] - torch: [1.3.0, 1.5.0+cu101, 1.6.0+cu101, 1.7.0+cu101, 1.8.0+cu101] + torch: [1.5.0+cu101, 1.6.0+cu101, 1.7.0+cu101, 1.8.0+cu101] include: - - torch: 1.3.0 - torchvision: 0.4.1 - mmcv: 1.3.0+cu101 - torch: 1.5.0+cu101 torchvision: 0.6.0+cu101 mmcv: 1.5.0+cu101 From dae2f4bcfeaa74882d488cc450029378424bd755 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 10 Jun 2021 17:01:06 +0800 Subject: [PATCH 150/414] [Improvement] Add links to download Kinetics400 validation (#920) --- configs/recognition/csn/README.md | 1 + configs/recognition/csn/README_zh-CN.md | 1 + configs/recognition/i3d/README.md | 4 ++-- configs/recognition/i3d/README_zh-CN.md | 1 + configs/recognition/omnisource/README.md | 2 ++ configs/recognition/omnisource/README_zh-CN.md | 2 ++ configs/recognition/r2plus1d/README.md | 4 ++-- configs/recognition/r2plus1d/README_zh-CN.md | 1 + configs/recognition/slowfast/README.md | 4 ++-- configs/recognition/slowfast/README_zh-CN.md | 1 + configs/recognition/slowonly/README.md | 4 ++-- configs/recognition/slowonly/README_zh-CN.md | 1 + configs/recognition/tanet/README.md | 4 +++- configs/recognition/tanet/README_zh-CN.md | 1 + configs/recognition/tin/README.md | 1 + configs/recognition/tin/README_zh-CN.md | 1 + configs/recognition/tpn/README.md | 1 + configs/recognition/tpn/README_zh-CN.md | 1 + configs/recognition/tsm/README.md | 1 + configs/recognition/tsm/README_zh-CN.md | 1 + configs/recognition/tsn/README.md | 1 + configs/recognition/tsn/README_zh-CN.md | 1 + configs/recognition/x3d/README.md | 1 + configs/recognition/x3d/README_zh-CN.md | 1 + configs/recognition_audio/resnet/README.md | 4 ++-- configs/recognition_audio/resnet/README_zh-CN.md | 1 + 26 files changed, 35 insertions(+), 11 deletions(-) diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index 593e8c023a..2c8322fcc1 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -43,6 +43,7 @@ Notes: e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/csn/README_zh-CN.md b/configs/recognition/csn/README_zh-CN.md index 78aa1bc6b2..9a4fda15ed 100644 --- a/configs/recognition/csn/README_zh-CN.md +++ b/configs/recognition/csn/README_zh-CN.md @@ -43,6 +43,7 @@ doi = {10.1109/ICCV.2019.00565} 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 diff --git a/configs/recognition/i3d/README.md b/configs/recognition/i3d/README.md index e477ba2af4..d6fac8dd8e 100644 --- a/configs/recognition/i3d/README.md +++ b/configs/recognition/i3d/README.md @@ -48,8 +48,8 @@ Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. -2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, - not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/i3d/README_zh-CN.md b/configs/recognition/i3d/README_zh-CN.md index d63a404df0..ac10732615 100644 --- a/configs/recognition/i3d/README_zh-CN.md +++ b/configs/recognition/i3d/README_zh-CN.md @@ -50,6 +50,7 @@ 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 diff --git a/configs/recognition/omnisource/README.md b/configs/recognition/omnisource/README.md index ac682536d0..1860af906c 100644 --- a/configs/recognition/omnisource/README.md +++ b/configs/recognition/omnisource/README.md @@ -19,6 +19,8 @@ We currently released 4 models trained with OmniSource framework, including both | SlowOnly | RGB | Scratch | ResNet50 | 4x16 | short-side 320 | 72.9 / 76.8 (+ 3.9) | 90.9 / 92.5 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | | SlowOnly | RGB | Scratch | ResNet101 | 8x8 | short-side 320 | 76.5 / 80.4 (+ 3.9) | 92.7 / 94.4 (+ 1.7) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | +1. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. + ## Benchmark on Mini-Kinetics We release a subset of web dataset used in the OmniSource paper. Specifically, we release the web data in the 200 classes of [Mini-Kinetics](https://arxiv.org/pdf/1712.04851.pdf). The statistics of those datasets is detailed in [preparing_omnisource](/tools/data/omnisource/README.md). To obtain those data, you need to fill in a [data request form](https://docs.google.com/forms/d/e/1FAIpQLSd8_GlmHzG8FcDbW-OEu__G7qLgOSYZpH-i5vYVJcu7wcb_TQ/viewform?usp=sf_link). After we received your request, the download link of these data will be send to you. For more details on the released OmniSource web dataset, please refer to [preparing_omnisource](/tools/data/omnisource/README.md). diff --git a/configs/recognition/omnisource/README_zh-CN.md b/configs/recognition/omnisource/README_zh-CN.md index 4341870945..7e9f1da5f8 100644 --- a/configs/recognition/omnisource/README_zh-CN.md +++ b/configs/recognition/omnisource/README_zh-CN.md @@ -19,6 +19,8 @@ MMAction2 当前公开了 4 个 OmniSource 框架训练的模型,包含 2D 架 | SlowOnly | RGB | None | ResNet50 | 4x16 | short-side 320 | 72.9 / 76.8 (+ 3.9) | 90.9 / 92.5 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | | SlowOnly | RGB | None | ResNet101 | 8x8 | short-side 320 | 76.5 / 80.4 (+ 3.9) | 92.7 / 94.4 (+ 1.7) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | +1. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 + ## Mini-Kinetics 上的基准测试 OmniSource 项目当前公开了所采集网络数据的一个子集,涉及 [Mini-Kinetics](https://arxiv.org/pdf/1712.04851.pdf) 中的 200 个动作类别。[OmniSource 数据集准备](/tools/data/omnisource/README_zh-CN.md) 中记录了这些数据集的详细统计信息。用户可以通过填写 [申请表](https://docs.google.com/forms/d/e/1FAIpQLSd8_GlmHzG8FcDbW-OEu__G7qLgOSYZpH-i5vYVJcu7wcb_TQ/viewform?usp=sf_link) 获取这些数据,在完成填写后,数据下载链接会被发送至用户邮箱。更多关于 OmniSource 网络数据集的信息请参照 [OmniSource 数据集准备](/tools/data/omnisource/README_zh-CN.md)。 diff --git a/configs/recognition/r2plus1d/README.md b/configs/recognition/r2plus1d/README.md index 14c6a3b22e..bb2cf2ea50 100644 --- a/configs/recognition/r2plus1d/README.md +++ b/configs/recognition/r2plus1d/README.md @@ -30,8 +30,8 @@ Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. -2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, - not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/r2plus1d/README_zh-CN.md b/configs/recognition/r2plus1d/README_zh-CN.md index b3814af0b2..d720508dc0 100644 --- a/configs/recognition/r2plus1d/README_zh-CN.md +++ b/configs/recognition/r2plus1d/README_zh-CN.md @@ -32,6 +32,7 @@ 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 4ea16f92de..8091b57598 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -34,8 +34,8 @@ Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. -2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, - not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index f53fac0232..452ecc8ec2 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -36,6 +36,7 @@ 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index 2154a64722..e3aabd615c 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -82,8 +82,8 @@ Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. -2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, - not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md index 10b8464a61..33a645bc94 100644 --- a/configs/recognition/slowonly/README_zh-CN.md +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -84,6 +84,7 @@ 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 85a8a81036..38a23d6785 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -26,7 +26,9 @@ Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 8 GPUs x 8 videos/gpu and lr=0.04 for 16 GPUs x 16 videos/gpu. -2. The values in columns named after "reference" are the results got by testing on our dataset, using the checkpoints provided by the author with same model settings. The checkpoints for reference repo can be downloaded [here](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing). +2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +3. The values in columns named after "reference" are the results got by testing on our dataset, using the checkpoints provided by the author with same model settings. The checkpoints for reference repo can be downloaded [here](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing). +4. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/tanet/README_zh-CN.md b/configs/recognition/tanet/README_zh-CN.md index 8fe466da27..eebff061f4 100644 --- a/configs/recognition/tanet/README_zh-CN.md +++ b/configs/recognition/tanet/README_zh-CN.md @@ -29,6 +29,7 @@ 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。对应的模型权重文件可从 [这里](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing) 下载。 +4. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 diff --git a/configs/recognition/tin/README.md b/configs/recognition/tin/README.md index 44e56a2952..adf49412d8 100644 --- a/configs/recognition/tin/README.md +++ b/configs/recognition/tin/README.md @@ -45,6 +45,7 @@ Notes: 3. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. 4. The values in columns named after "reference" are the results got by training on the original repo, using the same model settings. +5. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/tin/README_zh-CN.md b/configs/recognition/tin/README_zh-CN.md index a047d3ca75..2747fa6c94 100644 --- a/configs/recognition/tin/README_zh-CN.md +++ b/configs/recognition/tin/README_zh-CN.md @@ -44,6 +44,7 @@ 3. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 4. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。 +5. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 diff --git a/configs/recognition/tpn/README.md b/configs/recognition/tpn/README.md index 83715cefb6..c7973f381a 100644 --- a/configs/recognition/tpn/README.md +++ b/configs/recognition/tpn/README.md @@ -36,6 +36,7 @@ Notes: 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. 3. The values in columns named after "reference" are the results got by testing the checkpoint released on the original repo and codes, using the same dataset with ours. +4. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/tpn/README_zh-CN.md b/configs/recognition/tpn/README_zh-CN.md index 04ba85fe1f..2e32d36948 100644 --- a/configs/recognition/tpn/README_zh-CN.md +++ b/configs/recognition/tpn/README_zh-CN.md @@ -36,6 +36,7 @@ 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。 +4. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 ## 如何训练 diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 6703975a92..af5d1eb183 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -117,6 +117,7 @@ test_pipeline = [ ``` 5. When applying Mixup and CutMix, we use the hyper parameter `alpha=0.2`. +6. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index ff46670c00..51de257f79 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -115,6 +115,7 @@ test_pipeline = [ ``` 5. 当采用 Mixup 和 CutMix 的数据增强时,使用超参 `alpha=0.2`。 +6. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index 3466febc01..ae80adf472 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -178,6 +178,7 @@ Notes: 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. 3. The values in columns named after "reference" are the results got by training on the original repo, using the same model settings. +4. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index f6e9c08c61..fecafe3a64 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -178,6 +178,7 @@ HATNet 是在每个独立的任务(对应每个 tag 类别)上进行训练 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。 +4. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考: diff --git a/configs/recognition/x3d/README.md b/configs/recognition/x3d/README.md index 6d99dc0dce..42509e6a09 100644 --- a/configs/recognition/x3d/README.md +++ b/configs/recognition/x3d/README.md @@ -29,6 +29,7 @@ Notes: 1. The values in columns named after "reference" are the results got by testing the checkpoint released on the original repo and codes, using the same dataset with ours. +2. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/x3d/README_zh-CN.md b/configs/recognition/x3d/README_zh-CN.md index c2e47e549b..3b09e5276b 100644 --- a/configs/recognition/x3d/README_zh-CN.md +++ b/configs/recognition/x3d/README_zh-CN.md @@ -29,6 +29,7 @@ 注: 1. 参考代码的结果是通过使用相同的数据和原来的代码库所提供的模型进行测试得到的。 +2. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分 diff --git a/configs/recognition_audio/resnet/README.md b/configs/recognition_audio/resnet/README.md index 92c985ac19..9acdb80591 100644 --- a/configs/recognition_audio/resnet/README.md +++ b/configs/recognition_audio/resnet/README.md @@ -27,8 +27,8 @@ Notes: 1. The **gpus** indicates the number of gpus we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. -2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, - not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. For more details on data preparation, you can refer to ``Prepare audio`` in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition_audio/resnet/README_zh-CN.md b/configs/recognition_audio/resnet/README_zh-CN.md index 1d8ce01316..bf1188ff46 100644 --- a/configs/recognition_audio/resnet/README_zh-CN.md +++ b/configs/recognition_audio/resnet/README_zh-CN.md @@ -29,6 +29,7 @@ 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 +3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的准备音频部分。 From 3f5af8e7d71b79c9fc55070f2ed4ecc418be1a5d Mon Sep 17 00:00:00 2001 From: dreamerlin <528557675@qq.com> Date: Thu, 10 Jun 2021 17:06:46 +0800 Subject: [PATCH 151/414] update model --- docs/api.rst | 2 +- docs_zh_CN/api.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index 364003b59b..4f2bcab21e 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -21,7 +21,7 @@ evaluation lr ^^ -.. automodule:: mmaction.core.lr +.. automodule:: mmaction.core.scheduler :members: mmaction.localization diff --git a/docs_zh_CN/api.rst b/docs_zh_CN/api.rst index 364003b59b..4f2bcab21e 100644 --- a/docs_zh_CN/api.rst +++ b/docs_zh_CN/api.rst @@ -21,7 +21,7 @@ evaluation lr ^^ -.. automodule:: mmaction.core.lr +.. automodule:: mmaction.core.scheduler :members: mmaction.localization From fd5531d4ece5dfc03c7da5bed9519fd46ca0585d Mon Sep 17 00:00:00 2001 From: Kenny Date: Thu, 10 Jun 2021 19:39:12 +0800 Subject: [PATCH 152/414] upload README --- configs/recognition/tsn/README.md | 2 ++ configs/recognition/tsn/README_zh-CN.md | 2 ++ 2 files changed, 4 insertions(+) diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index 0edd90630f..9f1f6388cc 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -75,6 +75,8 @@ It's possible and convenient to use a 3rd-party backbone for TSN under the frame | [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | Densenet-161 [[TorchVision](https://github.com/pytorch/vision/)] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | | [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | 77.51 | 92.92 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb-805380f6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json) | +1. Note that some backbones in TIMM are not supported due to multiple reasons. Please refer to to [PR #880](https://github.com/open-mmlab/mmaction2/pull/880) for details. + ### Kinetics-400 Data Benchmark (8-gpus, ResNet50, ImageNet pretrain; 3 segments) In data benchmark, we compare: diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index 63fe85e4c7..40c14b28a3 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -75,6 +75,8 @@ | [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | Densenet-161 [[TorchVision](https://github.com/pytorch/vision/)] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | | [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | 77.51 | 92.92 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb-805380f6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json) | +1. 由于多种原因,TIMM 中的一些模型未能收到支持,详情请参考 [PR #880](https://github.com/open-mmlab/mmaction2/pull/880)。 + ### Kinetics-400 数据基准测试 (8 块 GPU, ResNet50, ImageNet 预训练; 3 个视频段) 在数据基准测试中,比较: From 3488f9032e80d2b7187eb66bf35a1e776f838008 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Sat, 12 Jun 2021 19:23:08 +0800 Subject: [PATCH 153/414] Support number counting for flow-wise filename template (#922) --- ...sn_r50_320p_1x1x3_110e_kinetics400_flow.py | 4 ++ ...sn_r50_320p_1x1x8_110e_kinetics400_flow.py | 4 ++ ...0_320p_1x1x8_150e_activitynet_clip_flow.py | 1 + ..._320p_1x1x8_150e_activitynet_video_flow.py | 1 + demo/demo_gradcam.py | 1 - mmaction/apis/inference.py | 18 ++++- tests/test_runtime/test_inference.py | 67 ++++++++++++++++++- 7 files changed, 92 insertions(+), 4 deletions(-) diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py index 6e31858989..ac2a41f5c6 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py @@ -1,5 +1,9 @@ _base_ = ['../../_base_/models/tsn_r50.py', '../../_base_/default_runtime.py'] +# model settings +# ``in_channels`` should be 2 * clip_len +model = dict(backbone=dict(in_channels=10)) + # dataset settings dataset_type = 'RawframeDataset' data_root = 'data/kinetics400/rawframes_train_320p' diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py index 395a312b17..24b84659ed 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py @@ -1,5 +1,9 @@ _base_ = ['../../_base_/models/tsn_r50.py', '../../_base_/default_runtime.py'] +# model settings +# ``in_channels`` should be 2 * clip_len +model = dict(backbone=dict(in_channels=10)) + # dataset settings dataset_type = 'RawframeDataset' data_root = 'data/kinetics400/rawframes_train_320p' diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py index 486ede41fb..9dd380e517 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py @@ -1,6 +1,7 @@ _base_ = ['../../_base_/models/tsn_r50.py', '../../_base_/default_runtime.py'] # model settings +# ``in_channels`` should be 2 * clip_len model = dict( backbone=dict(in_channels=10), cls_head=dict(num_classes=200, dropout_ratio=0.8)) diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py index 8aa2e1f786..823cf2d39c 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py @@ -1,6 +1,7 @@ _base_ = ['../../_base_/models/tsn_r50.py', '../../_base_/default_runtime.py'] # model settings +# ``in_channels`` should be 2 * clip_len model = dict( backbone=dict(in_channels=10), cls_head=dict(num_classes=200, dropout_ratio=0.8)) diff --git a/demo/demo_gradcam.py b/demo/demo_gradcam.py index 0c6860728d..d24cc9dca3 100644 --- a/demo/demo_gradcam.py +++ b/demo/demo_gradcam.py @@ -87,7 +87,6 @@ def build_inputs(model, video_path, use_frames=False): data = dict( frame_dir=video_path, total_frames=len(os.listdir(video_path)), - # assuming files in ``video_path`` are all named with ``filename_tmpl`` # noqa: E501 label=-1, start_index=start_index, filename_tmpl=filename_tmpl, diff --git a/mmaction/apis/inference.py b/mmaction/apis/inference.py index 008e4e3d31..e31685b7f5 100644 --- a/mmaction/apis/inference.py +++ b/mmaction/apis/inference.py @@ -1,5 +1,6 @@ import os import os.path as osp +import re from operator import itemgetter import mmcv @@ -106,10 +107,23 @@ def inference_recognizer(model, filename_tmpl = cfg.data.test.get('filename_tmpl', 'img_{:05}.jpg') modality = cfg.data.test.get('modality', 'RGB') start_index = cfg.data.test.get('start_index', 1) + + # count the number of frames that match the format of `filename_tmpl` + # RGB pattern example: img_{:05}.jpg -> ^img_\d+.jpg$ + # Flow patteren example: {}_{:05d}.jpg -> ^x_\d+.jpg$ + pattern = f'^{filename_tmpl}$' + if modality == 'Flow': + pattern = pattern.replace('{}', 'x') + pattern = pattern.replace( + pattern[pattern.find('{'):pattern.find('}') + 1], '\\d+') + total_frames = len( + list( + filter(lambda x: re.match(pattern, x) is not None, + os.listdir(video_path)))) + data = dict( frame_dir=video_path, - total_frames=len(os.listdir(video_path)), - # assuming files in ``video_path`` are all named with ``filename_tmpl`` # noqa: E501 + total_frames=total_frames, label=-1, start_index=start_index, filename_tmpl=filename_tmpl, diff --git a/tests/test_runtime/test_inference.py b/tests/test_runtime/test_inference.py index e0f158c7fe..f378b75d5f 100644 --- a/tests/test_runtime/test_inference.py +++ b/tests/test_runtime/test_inference.py @@ -8,8 +8,10 @@ video_config_file = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py' # noqa: E501 frame_config_file = 'configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py' # noqa: E501 +flow_frame_config_file = 'configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py' # noqa: E501 label_path = 'demo/label_map_k400.txt' video_path = 'demo/demo.mp4' +frames_path = 'tests/data/imgs' def test_init_recognizer(): @@ -43,7 +45,7 @@ def test_init_recognizer(): assert model.cfg.model.backbone.pretrained is None -def test_inference_recognizer(): +def test_video_inference_recognizer(): if torch.cuda.is_available(): device = 'cuda:0' else: @@ -114,3 +116,66 @@ def test_inference_recognizer(): assert feat['backbone'][0].size() == (1, 2048, 4, 8, 8) assert feat['backbone'][1].size() == (1, 256, 32, 8, 8) assert feat['cls_head'].size() == (1, 400) + + +def test_frames_inference_recognizer(): + if torch.cuda.is_available(): + device = 'cuda:0' + else: + device = 'cpu' + rgb_model = init_recognizer( + frame_config_file, None, device, use_frames=True) + flow_model = init_recognizer( + flow_frame_config_file, None, device, use_frames=True) + + with pytest.raises(RuntimeError): + # video path doesn't exist + inference_recognizer(rgb_model, 'missing_path', label_path) + + with pytest.raises(RuntimeError): + # ``video_path`` should be consist with the ``use_frames`` + inference_recognizer( + flow_model, frames_path, label_path, use_frames=False) + + for ops in rgb_model.cfg.data.test.pipeline: + if ops['type'] in ('TenCrop', 'ThreeCrop'): + # Use CenterCrop to reduce memory in order to pass CI + ops['type'] = 'CenterCrop' + ops['crop_size'] = 224 + for ops in flow_model.cfg.data.test.pipeline: + if ops['type'] in ('TenCrop', 'ThreeCrop'): + # Use CenterCrop to reduce memory in order to pass CI + ops['type'] = 'CenterCrop' + ops['crop_size'] = 224 + + top5_label = inference_recognizer( + rgb_model, frames_path, label_path, use_frames=True) + scores = [item[1] for item in top5_label] + assert len(top5_label) == 5 + assert scores == sorted(scores, reverse=True) + + _, feat = inference_recognizer( + flow_model, + frames_path, + label_path, + outputs=('backbone', 'cls_head'), + as_tensor=False, + use_frames=True) + assert isinstance(feat, dict) + assert 'backbone' in feat and 'cls_head' in feat + assert isinstance(feat['backbone'], np.ndarray) + assert isinstance(feat['cls_head'], np.ndarray) + assert feat['backbone'].shape == (25, 2048, 7, 7) + assert feat['cls_head'].shape == (1, 400) + + _, feat = inference_recognizer( + rgb_model, + frames_path, + label_path, + use_frames=True, + outputs=('backbone.layer3', 'backbone.layer3.1.conv1')) + assert 'backbone.layer3.1.conv1' in feat and 'backbone.layer3' in feat + assert isinstance(feat['backbone.layer3.1.conv1'], torch.Tensor) + assert isinstance(feat['backbone.layer3'], torch.Tensor) + assert feat['backbone.layer3'].size() == (25, 1024, 14, 14) + assert feat['backbone.layer3.1.conv1'].size() == (25, 256, 14, 14) From e1e6147037263398fc745b9bd003b544e7acf0e1 Mon Sep 17 00:00:00 2001 From: dreamerlin <528557675@qq.com> Date: Sat, 12 Jun 2021 22:35:52 +0800 Subject: [PATCH 154/414] Fix docstring for 3d flate --- mmaction/models/backbones/resnet3d.py | 6 +++--- mmaction/models/backbones/resnet3d_csn.py | 2 +- mmaction/models/backbones/resnet3d_slowfast.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index 063b21d959..2f0eea8d72 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -362,7 +362,7 @@ class ResNet3d(nn.Module): not freezing any parameters. Default: -1. inflate (Sequence[int]): Inflate Dims of each block. Default: (1, 1, 1, 1). - inflate_style (str): ``3x1x1`` or ``1x1x1``. which determines the + inflate_style (str): ``3x1x1`` or ``3x3x3``. which determines the kernel sizes and padding strides for conv1 and conv2 in each block. Default: '3x1x1'. conv_cfg (dict): Config for conv layers. required keys are ``type`` @@ -544,7 +544,7 @@ def make_res_layer(block, Default: ``pytorch``. inflate (int | Sequence[int]): Determine whether to inflate for each block. Default: 1. - inflate_style (str): ``3x1x1`` or ``1x1x1``. which determines + inflate_style (str): ``3x1x1`` or ``3x3x3``. which determines the kernel sizes and padding strides for conv1 and conv2 in each block. Default: '3x1x1'. non_local (int | Sequence[int]): Determine whether to apply @@ -879,7 +879,7 @@ class ResNet3dLayer(nn.Module): the first 1x1 conv layer. Default: 'pytorch'. all_frozen (bool): Frozen all modules in the layer. Default: False. inflate (int): Inflate Dims of each block. Default: 1. - inflate_style (str): ``3x1x1`` or ``1x1x1``. which determines the + inflate_style (str): ``3x1x1`` or ``3x3x3``. which determines the kernel sizes and padding strides for conv1 and conv2 in each block. Default: '3x1x1'. conv_cfg (dict): Config for conv layers. required keys are ``type`` diff --git a/mmaction/models/backbones/resnet3d_csn.py b/mmaction/models/backbones/resnet3d_csn.py index 4539dec01e..5d041d5450 100644 --- a/mmaction/models/backbones/resnet3d_csn.py +++ b/mmaction/models/backbones/resnet3d_csn.py @@ -84,7 +84,7 @@ class ResNet3dCSN(ResNet3d): norm_cfg (dict): Config for norm layers. required keys are `type` and `requires_grad`. Default: dict(type='BN3d', requires_grad=True, eps=1e-3). - inflate_style (str): `3x1x1` or `1x1x1`. which determines the kernel + inflate_style (str): `3x1x1` or `3x3x3`. which determines the kernel sizes and padding strides for conv1 and conv2 in each block. Default: '3x3x3'. bottleneck_mode (str): Determine which ways to factorize a 3D diff --git a/mmaction/models/backbones/resnet3d_slowfast.py b/mmaction/models/backbones/resnet3d_slowfast.py index 45e9d5a7da..be1ea1a2b2 100644 --- a/mmaction/models/backbones/resnet3d_slowfast.py +++ b/mmaction/models/backbones/resnet3d_slowfast.py @@ -120,7 +120,7 @@ def make_res_layer(self, Default: ``pytorch``. inflate (int | Sequence[int]): Determine whether to inflate for each block. Default: 1. - inflate_style (str): ``3x1x1`` or ``1x1x1``. which determines + inflate_style (str): ``3x1x1`` or ``3x3x3``. which determines the kernel sizes and padding strides for conv1 and conv2 in each block. Default: ``3x1x1``. non_local (int | Sequence[int]): Determine whether to apply From f1df7504a795dacd87b4bf1f71e5ee46e44a3ada Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 12 Jun 2021 22:46:54 +0800 Subject: [PATCH 155/414] [Improvement] Support --cfg-options for demos (#911) * support --cfg-options for demos * provide decscription for tricks to modify configs through script arguments * fix * Update README.md --- demo/README.md | 28 +++++++++++++++++++++++--- demo/demo.py | 18 +++++++++++++---- demo/demo_gradcam.py | 17 ++++++++++++---- demo/demo_spatiotemporal_det.py | 13 +++++++++++- demo/long_video_demo.py | 17 ++++++++++++++-- demo/webcam_demo.py | 17 ++++++++++++++-- demo/webcam_demo_spatiotemporal_det.py | 15 ++++++++++++-- 7 files changed, 107 insertions(+), 18 deletions(-) diff --git a/demo/README.md b/demo/README.md index ce3c6fdbc4..f51f15a598 100644 --- a/demo/README.md +++ b/demo/README.md @@ -1,7 +1,8 @@ # Demo -## Demo link +## Outline +- [Modify configs through script arguments](#modify-config-through-script-arguments): Tricks to directly modify configs through script arguments. - [Video demo](#video-demo): A demo script to predict the recognition result using a single video. - [SpatioTemporal Action Detection Video Demo](#spatiotemporal-action-detection-video-demo): A demo script to predict the SpatioTemporal Action Detection result using a single video. - [Video GradCAM Demo](#video-gradcam-demo): A demo script to visualize GradCAM results using a single video. @@ -9,6 +10,27 @@ - [Long Video demo](#long-video-demo): a demo script to predict different labels using a single long video. - [SpatioTempoval Action Detection Webcam Demo](#spatiotemporal-action-detection-webcam-demo): A demo script to implement real-time spatio-temporval action detection from a web camera. +## Modify configs through script arguments + +When running demos using our provided scripts, you may specify `--cfg-options` to in-place modify the config. + +- Update config keys of dict. + + The config options can be specified following the order of the dict keys in the original config. + For example, `--cfg-options model.backbone.norm_eval=False` changes the all BN modules in model backbones to `train` mode. + +- Update keys inside a list of configs. + + Some config dicts are composed as a list in your config. For example, the training pipeline `data.train.pipeline` is normally a list + e.g. `[dict(type='SampleFrames'), ...]`. If you want to change `'SampleFrames'` to `'DenseSampleFrames'` in the pipeline, + you may specify `--cfg-options data.train.pipeline.0.type=DenseSampleFrames`. + +- Update values of list/tuples. + + If the value to be updated is a list or a tuple. For example, the config file normally sets `workflow=[('train', 1)]`. If you want to + change this key, you may specify `--cfg-options workflow="[(train,1),(val,1)]"`. Note that the quotation mark \" is necessary to + support list/tuple data types, and that **NO** white space is allowed inside the quotation marks in the specified value. + ## Video demo We provide a demo script to predict the recognition result using a single video. In order to get predict results in range `[0, 1]`, make sure to set `model['test_cfg'] = dict(average_clips='prob')` in config file. @@ -267,8 +289,8 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, **Note:** Considering the efficiency difference for users' hardware, Some modifications might be done to suit the case. Users can change: -1). `SampleFrames` step (especially the number of `clip_len` and `num_clips`) of `test_pipeline` in the config file. -2). Change to the suitable Crop methods like `TenCrop`, `ThreeCrop`, `CenterCrop`, etc. in `test_pipeline` of the config file. +1). `SampleFrames` step (especially the number of `clip_len` and `num_clips`) of `test_pipeline` in the config file, like `--cfg-options data.test.pipeline.0.num_clips=3`. +2). Change to the suitable Crop methods like `TenCrop`, `ThreeCrop`, `CenterCrop`, etc. in `test_pipeline` of the config file, like `--cfg-options data.test.pipeline.4.type=CenterCrop`. 3). Change the number of `--average-size`. The smaller, the faster. ## Long video demo diff --git a/demo/demo.py b/demo/demo.py index bc9adbe4db..32b34ea78c 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -3,6 +3,7 @@ import os.path as osp import torch +from mmcv import Config, DictAction from mmaction.apis import inference_recognizer, init_recognizer @@ -13,6 +14,14 @@ def parse_args(): parser.add_argument('checkpoint', help='checkpoint file/url') parser.add_argument('video', help='video file/url or rawframes directory') parser.add_argument('label', help='label file') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") parser.add_argument( '--use-frames', default=False, @@ -128,12 +137,13 @@ def main(): args = parse_args() # assign the desired device. device = torch.device(args.device) + + cfg = Config.fromfile(args.config) + cfg.merge_from_dict(args.cfg_options) + # build the recognizer from a config file and checkpoint file/url model = init_recognizer( - args.config, - args.checkpoint, - device=device, - use_frames=args.use_frames) + cfg, args.checkpoint, device=device, use_frames=args.use_frames) # e.g. use ('backbone', ) to return backbone feature output_layer_names = None diff --git a/demo/demo_gradcam.py b/demo/demo_gradcam.py index d24cc9dca3..860cce4970 100644 --- a/demo/demo_gradcam.py +++ b/demo/demo_gradcam.py @@ -5,6 +5,7 @@ import mmcv import numpy as np import torch +from mmcv import Config, DictAction from mmcv.parallel import collate, scatter from mmaction.apis import init_recognizer @@ -32,6 +33,14 @@ def parse_args(): help='GradCAM target layer name') parser.add_argument('--out-filename', default=None, help='output filename') parser.add_argument('--fps', default=5, type=int) + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") parser.add_argument( '--target-resolution', nargs=2, @@ -160,12 +169,12 @@ def main(): # assign the desired device. device = torch.device(args.device) + cfg = Config.fromfile(args.config) + cfg.merge_from_dict(args.cfg_options) + # build the recognizer from a config file and checkpoint file/url model = init_recognizer( - args.config, - args.checkpoint, - device=device, - use_frames=args.use_frames) + cfg, args.checkpoint, device=device, use_frames=args.use_frames) inputs = build_inputs(model, args.video, use_frames=args.use_frames) gradcam = GradCAM(model, args.target_layer_name) diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index 3550bcc1b6..80ea458ada 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -8,6 +8,7 @@ import mmcv import numpy as np import torch +from mmcv import DictAction from mmcv.runner import load_checkpoint from tqdm import tqdm @@ -171,6 +172,14 @@ def parse_args(): default=6, type=int, help='the fps of demo video output') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") args = parser.parse_args() return args @@ -288,7 +297,9 @@ def main(): # Get clip_len, frame_interval and calculate center index of each clip config = mmcv.Config.fromfile(args.config) - val_pipeline = config['val_pipeline'] + config.merge_from_dict(args.cfg_options) + val_pipeline = config.data.val.pipeline + sampler = [x for x in val_pipeline if x['type'] == 'SampleAVAFrames'][0] clip_len, frame_interval = sampler['clip_len'], sampler['frame_interval'] window_size = clip_len * frame_interval diff --git a/demo/long_video_demo.py b/demo/long_video_demo.py index f0e21fc2e4..19620988a6 100644 --- a/demo/long_video_demo.py +++ b/demo/long_video_demo.py @@ -8,6 +8,7 @@ import mmcv import numpy as np import torch +from mmcv import Config, DictAction from mmcv.parallel import collate, scatter from mmaction.apis import init_recognizer @@ -55,6 +56,14 @@ def parse_args(): 'which you sample frames, which equals to ' 'clip_len x frame_interval), if set as 0, the ' 'prediction stride is 1')) + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") args = parser.parse_args() return args @@ -201,7 +210,11 @@ def main(): args = parse_args() args.device = torch.device(args.device) - model = init_recognizer(args.config, args.checkpoint, device=args.device) + + cfg = Config.fromfile(args.config) + cfg.merge_from_dict(args.cfg_options) + + model = init_recognizer(cfg, args.checkpoint, device=args.device) data = dict(img_shape=None, modality='RGB', label=-1) with open(args.label, 'r') as f: label = [line.strip() for line in f] @@ -209,7 +222,7 @@ def main(): # prepare test pipeline from non-camera pipeline cfg = model.cfg sample_length = 0 - pipeline = cfg.test_pipeline + pipeline = cfg.data.test.pipeline pipeline_ = pipeline.copy() for step in pipeline: if 'SampleFrames' in step['type']: diff --git a/demo/webcam_demo.py b/demo/webcam_demo.py index 10444e4fed..7df09044b8 100644 --- a/demo/webcam_demo.py +++ b/demo/webcam_demo.py @@ -7,6 +7,7 @@ import cv2 import numpy as np import torch +from mmcv import Config, DictAction from mmcv.parallel import collate, scatter from mmaction.apis import init_recognizer @@ -54,6 +55,14 @@ def parse_args(): type=int, default=4, help='Set upper bound FPS value of model inference') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") args = parser.parse_args() assert args.drawing_fps >= 0 and args.inference_fps >= 0, \ 'upper bound FPS value of drawing and inference should be set as ' \ @@ -168,7 +177,11 @@ def main(): inference_fps = args.inference_fps device = torch.device(args.device) - model = init_recognizer(args.config, args.checkpoint, device=device) + + cfg = Config.fromfile(args.config) + cfg.merge_from_dict(args.cfg_options) + + model = init_recognizer(cfg, args.checkpoint, device=device) camera = cv2.VideoCapture(args.camera_id) data = dict(img_shape=None, modality='RGB', label=-1) @@ -178,7 +191,7 @@ def main(): # prepare test pipeline from non-camera pipeline cfg = model.cfg sample_length = 0 - pipeline = cfg.test_pipeline + pipeline = cfg.data.test.pipeline pipeline_ = pipeline.copy() for step in pipeline: if 'SampleFrames' in step['type']: diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py index 0626a209a7..e060214d62 100644 --- a/demo/webcam_demo_spatiotemporal_det.py +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -16,6 +16,7 @@ import mmcv import numpy as np import torch +from mmcv import Config, DictAction from mmcv.runner import load_checkpoint from mmaction.models import build_detector @@ -118,6 +119,14 @@ def parse_args(): default=8, type=int, help='Number of draw frames per clip.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") args = parser.parse_args() return args @@ -345,7 +354,7 @@ def __init__(self, show=True, stdet_input_shortside=256): # stdet sampling strategy - val_pipeline = config['val_pipeline'] + val_pipeline = config.data.val.pipeline sampler = [x for x in val_pipeline if x['type'] == 'SampleAVAFrames'][0] clip_len, frame_interval = sampler['clip_len'], sampler[ @@ -769,7 +778,9 @@ def main(args): args.device, args.det_score_thr) # init action detector - config = mmcv.Config.fromfile(args.config) + config = Config.fromfile(args.config) + config.merge_from_dict(args.cfg_options) + try: # In our spatiotemporal detection demo, different actions should have # the same number of bboxes. From aa26715d5c6a0991af4ead5b5ea03c46dd65dcee Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Sun, 13 Jun 2021 19:52:48 +0800 Subject: [PATCH 156/414] install mim from source code (#926) --- docs/install.md | 2 +- docs_zh_CN/install.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/install.md b/docs/install.md index 4866884dee..9ba611d3bb 100644 --- a/docs/install.md +++ b/docs/install.md @@ -86,7 +86,7 @@ If you build PyTorch from source instead of installing the prebuilt package, you We recommend you to install MMAction2 with [MIM](https://github.com/open-mmlab/mim). ```shell -pip install openmim +pip install git+https://github.com/open-mmlab/mim.git mim install mmaction2 ``` diff --git a/docs_zh_CN/install.md b/docs_zh_CN/install.md index 3a129106b5..f5883403e5 100644 --- a/docs_zh_CN/install.md +++ b/docs_zh_CN/install.md @@ -86,7 +86,7 @@ conda install pytorch=1.3.1 cudatoolkit=9.2 torchvision=0.4.2 -c pytorch 这里推荐用户使用 [MIM](https://github.com/open-mmlab/mim) 安装 MMAction2。 ```shell -pip install openmim +pip install git+https://github.com/open-mmlab/mim.git mim install mmaction2 ``` From 1b397782e20030e4de3f8644f246f93e3f06c195 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 16 Jun 2021 16:18:07 +0800 Subject: [PATCH 157/414] [Docs] Add memcached-related FAQ (#931) * memcached faq * Update faq.md --- docs/faq.md | 42 ++++++++++++++++++++++++++++++++++-------- 1 file changed, 34 insertions(+), 8 deletions(-) diff --git a/docs/faq.md b/docs/faq.md index 0272a0e694..8e14cebfb5 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -1,6 +1,15 @@ # FAQ +## Outline + We list some common issues faced by many users and their corresponding solutions here. + +- [Installation](#installation) +- [Data](#data) +- [Training](#training) +- [Testing](#testing) +- [Deploying](#deploying) + Feel free to enrich the list if you find any frequent issues and have ways to help others to solve them. If the contents here do not cover your issue, please create an issue using the [provided templates](/.github/ISSUE_TEMPLATE/error-report.md) and make sure you fill in all required information in the template. @@ -8,8 +17,8 @@ If the contents here do not cover your issue, please create an issue using the [ - **"No module named 'mmcv.ops'"; "No module named 'mmcv._ext'"** - 1. Uninstall existing mmcv in the environment using `pip uninstall mmcv`. - 2. Install mmcv-full following the [installation instruction](https://mmcv.readthedocs.io/en/latest/#installation). + 1. Uninstall existing mmcv in the environment using `pip uninstall mmcv` + 2. Install mmcv-full following the [installation instruction](https://mmcv.readthedocs.io/en/latest/#installation) - **"OSError: MoviePy Error: creation of None failed because of the following error"** @@ -45,7 +54,7 @@ If the contents here do not cover your issue, please create an issue using the [ ## Training -- **How to just use trained recognizer models for backbone pre-training ?** +- **How to just use trained recognizer models for backbone pre-training?** Refer to [Use Pre-Trained Model](https://github.com/open-mmlab/mmaction2/blob/master/docs/tutorials/2_finetune.md#use-pre-trained-model), in order to use the pre-trained model for the whole network, the new config adds the link of pre-trained models in the `load_from`. @@ -53,7 +62,7 @@ If the contents here do not cover your issue, please create an issue using the [ And to use backbone for pre-training, you can change `pretrained` value in the backbone dict of config files to the checkpoint path / url. When training, the unexpected keys will be ignored. -- **How to visualize the training accuracy/loss curves in real-time ?** +- **How to visualize the training accuracy/loss curves in real-time?** Use `TensorboardLoggerHook` in `log_config` like @@ -71,24 +80,41 @@ If the contents here do not cover your issue, please create an issue using the [ train_dataloader=dict(drop_last=True) ``` -- **How to fix stages of backbone when finetuning a model ?** +- **How to fix stages of backbone when finetuning a model?** You can refer to [`def _freeze_stages()`](https://github.com/open-mmlab/mmaction2/blob/0149a0e8c1e0380955db61680c0006626fd008e9/mmaction/models/backbones/x3d.py#L458) and [`frozen_stages`](https://github.com/open-mmlab/mmaction2/blob/0149a0e8c1e0380955db61680c0006626fd008e9/mmaction/models/backbones/x3d.py#L183-L184), reminding to set `find_unused_parameters = True` in config files for distributed training or testing. Actually, users can set `frozen_stages` to freeze stages in backbones except C3D model, since all backbones inheriting from `ResNet` and `ResNet3D` support the inner function `_freeze_stages()`. +- **How to set memcached setting in config files?** + + In MMAction2, you can pass memcached kwargs to `class DecordInit` for video dataset or `RawFrameDecode` for rawframes dataset. + For more details, you can refer to [`class FileClient`](https://github.com/open-mmlab/mmcv/blob/master/mmcv/fileio/file_client.py) in MMCV for more details. + + Here is an example to use memcached for rawframes dataset: + + ```python + mc_cfg = dict(server_list_cfg='server_list_cfg', client_cfg='client_cfg', sys_path='sys_path') + + train_pipeline = [ + ... + dict(type='RawFrameDecode', io_backend='memcached', **mc_cfg), + ... + ] + ``` + ## Testing -- **How to make predicted score normalized by softmax within [0, 1] ?** +- **How to make predicted score normalized by softmax within [0, 1]?** change this in the config, make `model['test_cfg'] = dict(average_clips='prob')`. -- **What if the model is too large and the GPU memory can not fit even only one testing sample ?** +- **What if the model is too large and the GPU memory can not fit even only one testing sample?** By default, the 3d models are tested with 10clips x 3crops, which are 30 views in total. For extremely large models, the GPU memory can not fit even only one testing sample (cuz there are 30 views). To handle this, you can set `max_testing_views=n` in `model['test_cfg']` of the config file. If so, n views will be used as a batch during forwarding to save GPU memory used. -- **How to show test results ?** +- **How to show test results?** During testing, we can use the command `--out xxx.json/pkl/yaml` to output result files for checking. The testing output has exactly the same order as the test dataset. Besides, we provide an analysis tool for evaluating a model using the output result files in [`tools/analysis/eval_metric.py`](/tools/analysis/eval_metric.py) From bc3580a29cb86ef01c22f7281c4056a61915d3a2 Mon Sep 17 00:00:00 2001 From: Rejnald Lleshi <46654505+rlleshi@users.noreply.github.com> Date: Thu, 17 Jun 2021 04:38:25 +0200 Subject: [PATCH 158/414] Add topk acc for testing to docs (#933) * add topk acc for testing to docs * Update 1_config.md Co-authored-by: Jintao Lin <528557675@qq.com> --- docs/tutorials/1_config.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/tutorials/1_config.md b/docs/tutorials/1_config.md index 67deb42b46..af4e27139e 100644 --- a/docs/tutorials/1_config.md +++ b/docs/tutorials/1_config.md @@ -414,8 +414,10 @@ which is convenient to conduct various experiments. evaluation = dict( # Config of evaluation during training interval=5, # Interval to perform evaluation metrics=['top_k_accuracy', 'mean_class_accuracy'], # Metrics to be performed - metric_options=dict(top_k_accuracy=dict(topk=(1, 3))), # Set top-k accuracy to 1 and 3 + metric_options=dict(top_k_accuracy=dict(topk=(1, 3))), # Set top-k accuracy to 1 and 3 during validation save_best='top_k_accuracy') # set `top_k_accuracy` as key indicator to save best checkpoint + eval_config = dict( + metric_options=dict(top_k_accuracy=dict(topk=(1, 3)))) # Set top-k accuracy to 1 and 3 during testing. You can also use `--eval top_k_accuracy` to assign evaluation metrics log_config = dict( # Config to register logger hook interval=20, # Interval to print the log hooks=[ # Hooks to be implemented during training From e91ee61c566e36f8afa4a938ec37bc5eb0e10077 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 17 Jun 2021 13:33:26 +0800 Subject: [PATCH 159/414] remove annos (#936) --- README.md | 10 ++++++++++ tools/data/skeleton/README.md | 8 +++++--- tools/data/skeleton/download_annotations.sh | 4 ++-- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index c13e9fa93a..754622889a 100644 --- a/README.md +++ b/README.md @@ -150,6 +150,7 @@ Supported datasets for Action Recognition: - ✅ [GYM](/tools/data/gym/README.md) \[ [Homepage](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) - ✅ [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] (CVPR'2015) - ✅ [Diving48](/tools/data/diving48/README.md) \[ [Homepage](http://www.svcl.ucsd.edu/projects/resound/dataset.html) \] (ECCV'2018) +- ✅ [OmniSource](/tools/data/omnisource/README.md) \[ [Homepage](https://kennymckormick.github.io/omnisource/) \] (ECCV'2020)
@@ -174,6 +175,15 @@ Supported datasets for Spatial Temporal Action Detection +Supported datasets for Skeleton-based Action Detection + +
+(click to collapse) + +- ✅ [PoseC3D-FineGYM](/tools/data/skeleton/README.md) \[ [Homepage](https://kennymckormick.github.io/posec3d/) \] (arXiv'2021) + +
+ Datasets marked with 🔲 are not fully supported yet, but related dataset preparation steps are provided. ## Installation diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index 9b2a5aa4b1..1ecf2796b1 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -19,12 +19,14 @@ We release the skeleton annotations used in [Revisiting Skeleton-based Action Re ## Prepare Annotations -Currently, we support three datasets: FineGYM, NTU60_XSub and NTU120_XSub. You can execute following scripts to prepare the annotations. +Currently, we support one dataset: FineGYM. You can execute following scripts to prepare the annotations. ```shell bash download_annotations.sh ${DATASET} ``` +PS: Due to [Conditions of Use](http://rose1.ntu.edu.sg/Datasets/actionRecognition.asp) of the NTURGB-D dataset, we can not directly release the annotations used in our experiments. We will prepare a script for pose annotation generation ASAP. Once accomplished, you can use this script to generate all pose annotations used in our experiments. + ## Visualization For skeleton data visualization, you need also to prepare the RGB videos. Please refer to [visualize_heatmap_volume](/demo/visualize_heatmap_volume.ipynb) for detailed process. Here we provide some visualization examples from NTU-60 and FineGYM. @@ -66,8 +68,8 @@ For skeleton data visualization, you need also to prepare the RGB videos. Please **TODO**: - [x] FineGYM -- [x] NTU60_XSub -- [x] NTU120_XSub +- [ ] NTU60_XSub +- [ ] NTU120_XSub - [ ] NTU60_XView - [ ] NTU120_XSet - [ ] Kinetics diff --git a/tools/data/skeleton/download_annotations.sh b/tools/data/skeleton/download_annotations.sh index de2d3a2861..d57efbceac 100644 --- a/tools/data/skeleton/download_annotations.sh +++ b/tools/data/skeleton/download_annotations.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash DATASET=$1 -if [ "$DATASET" == "gym" ] || [ "$1" == "ntu60_xsub" ] || [ "$1" == "ntu120_xsub" ]; then +if [ "$DATASET" == "gym" ]; then echo "We are processing $DATASET" else - echo "Bad Argument, we only support gym, ntu60_xsub, ntu120_xsub now." + echo "Bad Argument, we only support gym now." exit 0 fi From e67405a1bc16e7656ef771d9cabeba2bdc4a3bd2 Mon Sep 17 00:00:00 2001 From: WRH <12756472+wangruohui@users.noreply.github.com> Date: Thu, 17 Jun 2021 20:13:50 +0800 Subject: [PATCH 160/414] [Documentation] Add Chinese Tutorial (#941) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 使用 Colaboratory 创建 * move tutorial ipynb to demo * rename ipynb and add download link: * correct url --- README_zh-CN.md | 2 + demo/mmaction2_tutorial_zh-CN.ipynb | 1647 +++++++++++++++++++++++++++ 2 files changed, 1649 insertions(+) create mode 100644 demo/mmaction2_tutorial_zh-CN.ipynb diff --git a/README_zh-CN.md b/README_zh-CN.md index a04ba8bcf0..37aa4608e3 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -188,6 +188,8 @@ v0.15.0 版本已于 2021 年 5 月 31 日发布,可通过查阅 [更新日志 - [如何导出模型为 onnx 格式](/docs_zh_CN/tutorials/6_export_model.md) - [如何自定义模型运行参数](/docs_zh_CN/tutorials/7_customize_runtime.md) +MMAction2 也提供了相应的中文 Colab 教程,可以点击 [这里](https://colab.research.google.com/github/open-mmlab/mmaction2/blob/master/demo/mmaction2_tutorial_zh-CN.ipynb) 进行体验! + ## 常见问题 请参考 [FAQ](/docs_zh_CN/faq.md) 了解其他用户的常见问题 diff --git a/demo/mmaction2_tutorial_zh-CN.ipynb b/demo/mmaction2_tutorial_zh-CN.ipynb new file mode 100644 index 0000000000..73d4702db6 --- /dev/null +++ b/demo/mmaction2_tutorial_zh-CN.ipynb @@ -0,0 +1,1647 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "view-in-github" + }, + "source": [ + "
\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "VcjSRFELVbNk" + }, + "source": [ + "# MMAction2 Tutorial\n", + "\n", + "- 用MMAction2的识别模型做一次推理\n", + "- 用新数据集训练一个新的识别模型\n", + "- 用MMAction2的时空检测模型做一次推理" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7LqHGkGEVqpm" + }, + "source": [ + "## 安装 MMAction2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "JUFfYElIB3cJ", + "outputId": "cdf9ef1d-9e85-4a77-9e63-fc6f3ca13ae2" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch 1.8.1+cu101 \n", + "torchsummary 1.5.1 \n", + "torchtext 0.9.1 \n", + "torchvision 0.9.1+cu101 \n" + ] + } + ], + "source": [ + "# 查看环境中pytorch版本以便mmcv对应版本下载\n", + "!pip list | grep torch" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "thuLEJ7lByQv", + "outputId": "4035efd5-103e-4122-8107-a65777937ce7" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Looking in links: https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", + "Collecting mmcv-full\n", + "\u001b[?25l Downloading https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/mmcv_full-1.3.5-cp37-cp37m-manylinux1_x86_64.whl (31.2MB)\n", + "\u001b[K |████████████████████████████████| 31.2MB 96kB/s \n", + "\u001b[?25hRequirement already satisfied: pyyaml in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (3.13)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (1.19.5)\n", + "Collecting addict\n", + " Downloading https://files.pythonhosted.org/packages/6a/00/b08f23b7d7e1e14ce01419a467b583edbb93c6cdb8654e54a9cc579cd61f/addict-2.4.0-py3-none-any.whl\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (7.1.2)\n", + "Requirement already satisfied: opencv-python>=3 in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (4.1.2.30)\n", + "Collecting yapf\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/5f/0d/8814e79eb865eab42d95023b58b650d01dec6f8ea87fc9260978b1bf2167/yapf-0.31.0-py2.py3-none-any.whl (185kB)\n", + "\u001b[K |████████████████████████████████| 194kB 7.7MB/s \n", + "\u001b[?25hInstalling collected packages: addict, yapf, mmcv-full\n", + "Successfully installed addict-2.4.0 mmcv-full-1.3.5 yapf-0.31.0\n" + ] + } + ], + "source": [ + "# 安装mmcv-full,注意需要对应pytorch1.8和cuda10.1版本\n", + "!pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "qKiI1qelB6BT", + "outputId": "1d269eaa-814a-48f5-dfe9-f6952cf5e851" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Cloning into 'mmaction2'...\n", + "remote: Enumerating objects: 11360, done.\u001b[K\n", + "remote: Counting objects: 100% (1029/1029), done.\u001b[K\n", + "remote: Compressing objects: 100% (587/587), done.\u001b[K\n", + "remote: Total 11360 (delta 603), reused 721 (delta 436), pack-reused 10331\u001b[K\n", + "Receiving objects: 100% (11360/11360), 37.17 MiB | 14.99 MiB/s, done.\n", + "Resolving deltas: 100% (7930/7930), done.\n", + "/content/mmaction2\n", + "Branch 'fix_nms_config' set up to track remote branch 'fix_nms_config' from 'origin'.\n", + "Switched to a new branch 'fix_nms_config'\n", + "Obtaining file:///content/mmaction2\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.15.0) (3.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.15.0) (1.19.5)\n", + "Requirement already satisfied: opencv-contrib-python in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.15.0) (4.1.2.30)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.15.0) (7.1.2)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.15.0) (1.3.1)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.15.0) (0.10.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.15.0) (2.8.1)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.15.0) (2.4.7)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from cycler>=0.10->matplotlib->mmaction2==0.15.0) (1.15.0)\n", + "Installing collected packages: mmaction2\n", + " Running setup.py develop for mmaction2\n", + "Successfully installed mmaction2\n", + "Collecting av\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/66/ff/bacde7314c646a2bd2f240034809a10cc3f8b096751284d0828640fff3dd/av-8.0.3-cp37-cp37m-manylinux2010_x86_64.whl (37.2MB)\n", + "\u001b[K |████████████████████████████████| 37.2MB 81kB/s \n", + "\u001b[?25hCollecting decord>=0.4.1\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/64/5e/e2be6a3a3a46275059574d9c6a1d422aa6c7c3cbf6614939b8a3c3f8f2d5/decord-0.5.2-py3-none-manylinux2010_x86_64.whl (14.1MB)\n", + "\u001b[K |████████████████████████████████| 14.1MB 225kB/s \n", + "\u001b[?25hRequirement already satisfied: imgaug in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 3)) (0.2.9)\n", + "Requirement already satisfied: librosa in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 4)) (0.8.0)\n", + "Requirement already satisfied: lmdb in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 5)) (0.99)\n", + "Requirement already satisfied: moviepy in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 6)) (0.2.3.5)\n", + "Collecting onnx\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/3f/9b/54c950d3256e27f970a83cd0504efb183a24312702deed0179453316dbd0/onnx-1.9.0-cp37-cp37m-manylinux2010_x86_64.whl (12.2MB)\n", + "\u001b[K |████████████████████████████████| 12.2MB 26.1MB/s \n", + "\u001b[?25hCollecting onnxruntime\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/f9/76/3d0f8bb2776961c7335693df06eccf8d099e48fa6fb552c7546867192603/onnxruntime-1.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.5MB)\n", + "\u001b[K |████████████████████████████████| 4.5MB 35.9MB/s \n", + "\u001b[?25hCollecting PyTurboJPEG\n", + " Downloading https://files.pythonhosted.org/packages/07/70/8397de6c39476d2cc0fcee6082ade0225b3e67bc4466a0cf07486b0d0de4/PyTurboJPEG-1.5.0.tar.gz\n", + "Requirement already satisfied: numpy>=1.14.0 in /usr/local/lib/python3.7/dist-packages (from decord>=0.4.1->-r requirements/optional.txt (line 2)) (1.19.5)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 3)) (3.2.2)\n", + "Requirement already satisfied: imageio in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 3)) (2.4.1)\n", + "Requirement already satisfied: scikit-image>=0.11.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 3)) (0.16.2)\n", + "Requirement already satisfied: Shapely in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 3)) (1.7.1)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 3)) (1.15.0)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 3)) (7.1.2)\n", + "Requirement already satisfied: opencv-python in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 3)) (4.1.2.30)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 3)) (1.4.1)\n", + "Requirement already satisfied: audioread>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 4)) (2.1.9)\n", + "Requirement already satisfied: numba>=0.43.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 4)) (0.51.2)\n", + "Requirement already satisfied: resampy>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 4)) (0.2.2)\n", + "Requirement already satisfied: pooch>=1.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 4)) (1.3.0)\n", + "Requirement already satisfied: joblib>=0.14 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 4)) (1.0.1)\n", + "Requirement already satisfied: decorator>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 4)) (4.4.2)\n", + "Requirement already satisfied: soundfile>=0.9.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 4)) (0.10.3.post1)\n", + "Requirement already satisfied: scikit-learn!=0.19.0,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 4)) (0.22.2.post1)\n", + "Requirement already satisfied: tqdm<5.0,>=4.11.2 in /usr/local/lib/python3.7/dist-packages (from moviepy->-r requirements/optional.txt (line 6)) (4.41.1)\n", + "Requirement already satisfied: protobuf in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 7)) (3.12.4)\n", + "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 7)) (3.7.4.3)\n", + "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from onnxruntime->-r requirements/optional.txt (line 8)) (1.12)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 3)) (2.8.1)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 3)) (0.10.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 3)) (1.3.1)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 3)) (2.4.7)\n", + "Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 3)) (2.5.1)\n", + "Requirement already satisfied: PyWavelets>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 3)) (1.1.1)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 4)) (57.0.0)\n", + "Requirement already satisfied: llvmlite<0.35,>=0.34.0.dev0 in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 4)) (0.34.0)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 4)) (20.9)\n", + "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 4)) (2.23.0)\n", + "Requirement already satisfied: appdirs in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 4)) (1.4.4)\n", + "Requirement already satisfied: cffi>=1.0 in /usr/local/lib/python3.7/dist-packages (from soundfile>=0.9.0->librosa->-r requirements/optional.txt (line 4)) (1.14.5)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 4)) (1.24.3)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 4)) (3.0.4)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 4)) (2.10)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 4)) (2020.12.5)\n", + "Requirement already satisfied: pycparser in /usr/local/lib/python3.7/dist-packages (from cffi>=1.0->soundfile>=0.9.0->librosa->-r requirements/optional.txt (line 4)) (2.20)\n", + "Building wheels for collected packages: PyTurboJPEG\n", + " Building wheel for PyTurboJPEG (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for PyTurboJPEG: filename=PyTurboJPEG-1.5.0-cp37-none-any.whl size=7478 sha256=ea928a968966ea04f37722e3866c986be613835f0598c242df7e44e6e9d6749b\n", + " Stored in directory: /root/.cache/pip/wheels/87/62/6a/834c085b372ce84e5f95addd832a860edd356711b9c7918424\n", + "Successfully built PyTurboJPEG\n", + "Installing collected packages: av, decord, onnx, onnxruntime, PyTurboJPEG\n", + "Successfully installed PyTurboJPEG-1.5.0 av-8.0.3 decord-0.5.2 onnx-1.9.0 onnxruntime-1.8.0\n" + ] + }, + { + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "numpy" + ] + } + } + }, + "metadata": { + "tags": [] + }, + "output_type": "display_data" + } + ], + "source": [ + "# 克隆mmaction2项目\n", + "# %cd /content/\n", + "# !rm -rf mmaction2\n", + "# !git clone https://github.com/open-mmlab/mmaction2.git\n", + "!git clone https://github.com/wangruohui/mmaction2.git\n", + "%cd /content/mmaction2\n", + "!git checkout fix_nms_config\n", + "\n", + "# 以可编辑的模式安装mmaction\n", + "!pip install -e .\n", + "\n", + "# 安装一些额外的依赖\n", + "!pip install -r requirements/optional.txt" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "No_zZAFpWC-a", + "outputId": "ff4558ab-30ca-42b3-bf4b-27116d0629f7" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1.8.1+cu101 True\n", + "0.15.0\n", + "10.1\n", + "GCC 7.3\n" + ] + } + ], + "source": [ + "# 检查torch的安装以及gpu的使用\n", + "import torch, torchvision\n", + "print(torch.__version__, torch.cuda.is_available())\n", + "\n", + "# 检查MMAction2的安装\n", + "import mmaction\n", + "print(mmaction.__version__)\n", + "\n", + "# 检查mmcv的安装\n", + "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", + "print(get_compiling_cuda_version())\n", + "print(get_compiler_version())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "pXf7oV5DWdab" + }, + "source": [ + "## MMAction2识别模型的推理" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "64CW6d_AaT-Q", + "outputId": "8b1b0465-62a9-4a8b-b1a4-278a5f81945d" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2021-06-03 15:01:35-- https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 97579339 (93M) [application/octet-stream]\n", + "Saving to: ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’\n", + "\n", + "checkpoints/tsn_r50 100%[===================>] 93.06M 11.1MB/s in 8.2s \n", + "\n", + "2021-06-03 15:01:44 (11.4 MB/s) - ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’ saved [97579339/97579339]\n", + "\n" + ] + } + ], + "source": [ + "# 创建checkpoints文件夹并下载tsn模型\n", + "!mkdir checkpoints\n", + "!wget -c https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \\\n", + " -O checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "HNZB7NoSabzj" + }, + "outputs": [], + "source": [ + "from mmaction.apis import inference_recognizer, init_recognizer\n", + "\n", + "# 选择tsn对应的配置文件\n", + "config = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py'\n", + "# 加载上面下载的checkpoint文件\n", + "checkpoint = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "# 初始化模型\n", + "model = init_recognizer(config, checkpoint, device='cuda:0')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "rEMsBnpHapAn" + }, + "outputs": [], + "source": [ + "# 选择视频进行推理\n", + "video = 'demo/demo.mp4'\n", + "label = 'demo/label_map_k400.txt'\n", + "results = inference_recognizer(model, video, label)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "YqtUNVTQyLMJ" + }, + "outputs": [], + "source": [ + "# 查看视频\n", + "from IPython.display import HTML\n", + "from base64 import b64encode\n", + "mp4 = open(video,'rb').read()\n", + "data_url = \"data:video/mp4;base64,\" + b64encode(mp4).decode()\n", + "HTML(\"\"\"\n", + "\n", + "\"\"\" % data_url)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "NIyJXqfWathq" + }, + "outputs": [], + "source": [ + "# 查看推理Top-5结果\n", + "for result in results:\n", + " print(f'{result[0]}: ', result[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "QuZG8kZ2fJ5d" + }, + "source": [ + "## 在自定义数据集上训练模型\n", + "训练新模型通常有三个步骤:\n", + "- 支持新数据集\n", + "- 修改配置文件\n", + "- 训练模型\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "kbVu0-D-1JT2" + }, + "source": [ + "### 支持新数据集\n", + "\n", + "这里我们给出将数据转换为已有数据集格式的示例。其他方法可以参考[doc](/docs/tutorials/new_dataset.md)\n", + "\n", + "用到的是一个从[Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/)中获取的tiny数据集。包含30个训练视频,10个测试视频。" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "gjsUj9JzgUlJ", + "outputId": "7aa8f278-95c2-4073-8c93-2e197e12c6c2" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "rm: cannot remove 'kinetics400_tiny.zip*': No such file or directory\n", + "--2021-06-03 14:55:03-- https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 18308682 (17M) [application/zip]\n", + "Saving to: ‘kinetics400_tiny.zip’\n", + "\n", + "kinetics400_tiny.zi 100%[===================>] 17.46M 10.5MB/s in 1.7s \n", + "\n", + "2021-06-03 14:55:07 (10.5 MB/s) - ‘kinetics400_tiny.zip’ saved [18308682/18308682]\n", + "\n" + ] + } + ], + "source": [ + "# 下载并解压数据集kinetics400_tiny\n", + "!rm kinetics400_tiny.zip*\n", + "!rm -rf kinetics400_tiny\n", + "!wget https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", + "!unzip kinetics400_tiny.zip > /dev/null" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "AbZ-o7V6hNw4" + }, + "outputs": [], + "source": [ + "# 安装tree工具并检查数据集目录结构\n", + "!apt-get -q install tree\n", + "!tree kinetics400_tiny" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "fTdi6dI0hY3g" + }, + "outputs": [], + "source": [ + "# 查看标注文件格式\n", + "!cat kinetics400_tiny/kinetics_tiny_train_video.txt" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "0bq0mxmEi29H" + }, + "source": [ + "根据[`VideoDataset`](./datasets/video_dataset.py)中定义的格式,每一行表示样本视频的文件名和标签,用空格符分隔。\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Ht_DGJA9jQar" + }, + "source": [ + "### 修改配置文件\n", + "\n", + "我们需要修改配置文件,同时会用到之前下载的checkpoint作为pre-trained模型。\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "LjCcmCKOjktc" + }, + "outputs": [], + "source": [ + "# 获得tsn对应的配置文件cfg\n", + "from mmcv import Config\n", + "cfg = Config.fromfile('./configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tc8YhFFGjp3e" + }, + "source": [ + "我们在原本用于kinetics400-full数据集训练的tsn模型配置上进行修改,让模型可以在Kinetics400-tiny数据集上进行训练。\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "tlhu9byjjt-K", + "outputId": "a1c04b76-9305-497d-9a97-cef55491a7ab" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Config:\n", + "model = dict(\n", + " type='Recognizer2D',\n", + " backbone=dict(\n", + " type='ResNet',\n", + " pretrained='torchvision://resnet50',\n", + " depth=50,\n", + " norm_eval=False),\n", + " cls_head=dict(\n", + " type='TSNHead',\n", + " num_classes=2,\n", + " in_channels=2048,\n", + " spatial_type='avg',\n", + " consensus=dict(type='AvgConsensus', dim=1),\n", + " dropout_ratio=0.4,\n", + " init_std=0.01),\n", + " train_cfg=None,\n", + " test_cfg=dict(average_clips=None))\n", + "optimizer = dict(type='SGD', lr=7.8125e-05, momentum=0.9, weight_decay=0.0001)\n", + "optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))\n", + "lr_config = dict(policy='step', step=[40, 80])\n", + "total_epochs = 30\n", + "checkpoint_config = dict(interval=10)\n", + "log_config = dict(interval=5, hooks=[dict(type='TextLoggerHook')])\n", + "dist_params = dict(backend='nccl')\n", + "log_level = 'INFO'\n", + "load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "resume_from = None\n", + "workflow = [('train', 1)]\n", + "dataset_type = 'VideoDataset'\n", + "data_root = 'kinetics400_tiny/train/'\n", + "data_root_val = 'kinetics400_tiny/val/'\n", + "ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "img_norm_cfg = dict(\n", + " mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)\n", + "train_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),\n", + " dict(type='DecordDecode'),\n", + " dict(\n", + " type='MultiScaleCrop',\n", + " input_size=224,\n", + " scales=(1, 0.875, 0.75, 0.66),\n", + " random_crop=False,\n", + " max_wh_scale_gap=1),\n", + " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", + " dict(type='Flip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs', 'label'])\n", + "]\n", + "val_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=8,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='CenterCrop', crop_size=224),\n", + " dict(type='Flip', flip_ratio=0),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + "]\n", + "test_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=25,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='ThreeCrop', crop_size=256),\n", + " dict(type='Flip', flip_ratio=0),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + "]\n", + "data = dict(\n", + " videos_per_gpu=2,\n", + " workers_per_gpu=4,\n", + " train=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", + " data_prefix='kinetics400_tiny/train/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames', clip_len=1, frame_interval=1,\n", + " num_clips=8),\n", + " dict(type='DecordDecode'),\n", + " dict(\n", + " type='MultiScaleCrop',\n", + " input_size=224,\n", + " scales=(1, 0.875, 0.75, 0.66),\n", + " random_crop=False,\n", + " max_wh_scale_gap=1),\n", + " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", + " dict(type='Flip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs', 'label'])\n", + " ]),\n", + " val=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", + " data_prefix='kinetics400_tiny/val/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=8,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='CenterCrop', crop_size=224),\n", + " dict(type='Flip', flip_ratio=0),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + " ]),\n", + " test=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", + " data_prefix='kinetics400_tiny/val/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=25,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='ThreeCrop', crop_size=256),\n", + " dict(type='Flip', flip_ratio=0),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + " ]))\n", + "evaluation = dict(\n", + " interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy'])\n", + "work_dir = './tutorial_exps'\n", + "omnisource = False\n", + "seed = 0\n", + "gpu_ids = range(0, 1)\n", + "\n" + ] + } + ], + "source": [ + "from mmcv.runner import set_random_seed\n", + "\n", + "# 修改数据集类型和各个文件路径\n", + "cfg.dataset_type = 'VideoDataset'\n", + "cfg.data_root = 'kinetics400_tiny/train/'\n", + "cfg.data_root_val = 'kinetics400_tiny/val/'\n", + "cfg.ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "cfg.ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "\n", + "cfg.data.test.type = 'VideoDataset'\n", + "cfg.data.test.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.data.test.data_prefix = 'kinetics400_tiny/val/'\n", + "\n", + "cfg.data.train.type = 'VideoDataset'\n", + "cfg.data.train.ann_file = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "cfg.data.train.data_prefix = 'kinetics400_tiny/train/'\n", + "\n", + "cfg.data.val.type = 'VideoDataset'\n", + "cfg.data.val.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.data.val.data_prefix = 'kinetics400_tiny/val/'\n", + "\n", + "# 这里用于确认是否使用到omnisource训练\n", + "cfg.setdefault('omnisource', False)\n", + "# 修改cls_head中类别数为2\n", + "cfg.model.cls_head.num_classes = 2\n", + "# 使用预训练好的tsn模型\n", + "cfg.load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "\n", + "# 设置工作目录\n", + "cfg.work_dir = './tutorial_exps'\n", + "\n", + "# 由于是单卡训练,修改对应的lr\n", + "cfg.data.videos_per_gpu = cfg.data.videos_per_gpu // 16\n", + "cfg.optimizer.lr = cfg.optimizer.lr / 8 / 16\n", + "cfg.total_epochs = 30\n", + "\n", + "# 设置存档点间隔减少存储空间的消耗\n", + "cfg.checkpoint_config.interval = 10\n", + "# 设置日志打印间隔减少打印时间\n", + "cfg.log_config.interval = 5\n", + "\n", + "# 固定随机种子使得结果可复现\n", + "cfg.seed = 0\n", + "set_random_seed(0, deterministic=False)\n", + "cfg.gpu_ids = range(1)\n", + "\n", + "# 打印所有的配置参数\n", + "print(f'Config:\\n{cfg.pretty_text}')\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tES-qnZ3k38Z" + }, + "source": [ + "### 训练识别模型\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000, + "referenced_widgets": [ + "81bfbdf1ec55451b8be8a68fd1b0cf18", + "4a9a4d1a6a554315a7d4362fd9ef0290", + "c992b295041a4908a6a0d4f62a542cca", + "57f2df1708fa455ea8a305b9100ad171", + "8c947d1afee142e4b6cd2e0e26f46d6f", + "adf3a16cdae740cf882999a25d53e8f7", + "e6b45b124776452a85136fc3e18502f6", + "974f4fceb03748f1b346b498df9828a3" + ] + }, + "id": "dDBWkdDRk6oz", + "outputId": "574904cc-29fb-4b0a-ae2f-1dcba0248455" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Use load_from_torchvision loader\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Downloading: \"https://download.pytorch.org/models/resnet50-19c8e357.pth\" to /root/.cache/torch/hub/checkpoints/resnet50-19c8e357.pth\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "81bfbdf1ec55451b8be8a68fd1b0cf18", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, max=102502400.0), HTML(value='')))" + ] + }, + "metadata": { + "tags": [] + }, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-06-03 15:02:48,410 - mmaction - INFO - These parameters in pretrained checkpoint are not loaded: {'fc.bias', 'fc.weight'}\n", + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n", + "2021-06-03 15:02:59,146 - mmaction - INFO - load checkpoint from ./checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", + "2021-06-03 15:02:59,147 - mmaction - INFO - Use load_from_local loader\n", + "2021-06-03 15:02:59,233 - mmaction - WARNING - The model and loaded state dict do not match exactly\n", + "\n", + "size mismatch for cls_head.fc_cls.weight: copying a param with shape torch.Size([400, 2048]) from checkpoint, the shape in current model is torch.Size([2, 2048]).\n", + "size mismatch for cls_head.fc_cls.bias: copying a param with shape torch.Size([400]) from checkpoint, the shape in current model is torch.Size([2]).\n", + "2021-06-03 15:02:59,235 - mmaction - INFO - Start running, host: root@dd065c1a509c, work_dir: /content/mmaction2/tutorial_exps\n", + "2021-06-03 15:02:59,240 - mmaction - INFO - workflow: [('train', 1)], max: 30 epochs\n", + "/usr/local/lib/python3.7/dist-packages/mmcv/runner/hooks/evaluation.py:144: UserWarning: runner.meta is None. Creating an empty one.\n", + " warnings.warn('runner.meta is None. Creating an empty one.')\n", + "2021-06-03 15:03:03,913 - mmaction - INFO - Epoch [1][5/15]\tlr: 7.813e-05, eta: 0:06:55, time: 0.933, data_time: 0.701, memory: 1654, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7604, loss: 0.7604, grad_norm: 14.8813\n", + "2021-06-03 15:03:04,822 - mmaction - INFO - Epoch [1][10/15]\tlr: 7.813e-05, eta: 0:04:05, time: 0.183, data_time: 0.006, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6282, loss: 0.6282, grad_norm: 10.1833\n", + "2021-06-03 15:03:05,630 - mmaction - INFO - Epoch [1][15/15]\tlr: 7.813e-05, eta: 0:03:05, time: 0.162, data_time: 0.002, memory: 1654, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7165, loss: 0.7165, grad_norm: 10.8552\n", + "2021-06-03 15:03:09,840 - mmaction - INFO - Epoch [2][5/15]\tlr: 7.813e-05, eta: 0:03:45, time: 0.824, data_time: 0.620, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6444, loss: 0.6444, grad_norm: 11.3933\n", + "2021-06-03 15:03:11,318 - mmaction - INFO - Epoch [2][10/15]\tlr: 7.813e-05, eta: 0:03:23, time: 0.296, data_time: 0.109, memory: 1654, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.7155, loss: 0.7155, grad_norm: 12.3879\n", + "2021-06-03 15:03:12,109 - mmaction - INFO - Epoch [2][15/15]\tlr: 7.813e-05, eta: 0:02:58, time: 0.158, data_time: 0.001, memory: 1654, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6797, loss: 0.6797, grad_norm: 10.9274\n", + "2021-06-03 15:03:16,265 - mmaction - INFO - Epoch [3][5/15]\tlr: 7.813e-05, eta: 0:03:19, time: 0.812, data_time: 0.613, memory: 1654, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7126, loss: 0.7126, grad_norm: 11.1647\n", + "2021-06-03 15:03:17,416 - mmaction - INFO - Epoch [3][10/15]\tlr: 7.813e-05, eta: 0:03:04, time: 0.229, data_time: 0.049, memory: 1654, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.6635, loss: 0.6635, grad_norm: 12.1194\n", + "2021-06-03 15:03:18,283 - mmaction - INFO - Epoch [3][15/15]\tlr: 7.813e-05, eta: 0:02:49, time: 0.176, data_time: 0.014, memory: 1654, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6978, loss: 0.6978, grad_norm: 10.3157\n", + "2021-06-03 15:03:22,394 - mmaction - INFO - Epoch [4][5/15]\tlr: 7.813e-05, eta: 0:03:03, time: 0.803, data_time: 0.595, memory: 1654, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6795, loss: 0.6795, grad_norm: 12.0900\n", + "2021-06-03 15:03:23,662 - mmaction - INFO - Epoch [4][10/15]\tlr: 7.813e-05, eta: 0:02:53, time: 0.253, data_time: 0.067, memory: 1654, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7414, loss: 0.7414, grad_norm: 12.6038\n", + "2021-06-03 15:03:24,541 - mmaction - INFO - Epoch [4][15/15]\tlr: 7.813e-05, eta: 0:02:42, time: 0.177, data_time: 0.010, memory: 1654, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6761, loss: 0.6761, grad_norm: 11.2109\n", + "2021-06-03 15:03:28,677 - mmaction - INFO - Epoch [5][5/15]\tlr: 7.813e-05, eta: 0:02:52, time: 0.809, data_time: 0.594, memory: 1654, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.6899, loss: 0.6899, grad_norm: 12.3528\n", + "2021-06-03 15:03:29,778 - mmaction - INFO - Epoch [5][10/15]\tlr: 7.813e-05, eta: 0:02:43, time: 0.220, data_time: 0.026, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6337, loss: 0.6337, grad_norm: 12.3525\n", + "2021-06-03 15:03:30,887 - mmaction - INFO - Epoch [5][15/15]\tlr: 7.813e-05, eta: 0:02:36, time: 0.222, data_time: 0.058, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6425, loss: 0.6425, grad_norm: 9.7286\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.7 task/s, elapsed: 2s, ETA: 0s" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-06-03 15:03:32,826 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-06-03 15:03:32,828 - mmaction - INFO - \n", + "top1_acc\t0.8000\n", + "top5_acc\t1.0000\n", + "2021-06-03 15:03:32,831 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-06-03 15:03:32,836 - mmaction - INFO - \n", + "mean_acc\t0.8000\n", + "2021-06-03 15:03:33,250 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_5.pth.\n", + "2021-06-03 15:03:33,251 - mmaction - INFO - Best top1_acc is 0.8000 at 5 epoch.\n", + "2021-06-03 15:03:33,255 - mmaction - INFO - Epoch(val) [5][5]\ttop1_acc: 0.8000, top5_acc: 1.0000, mean_class_accuracy: 0.8000\n", + "2021-06-03 15:03:37,510 - mmaction - INFO - Epoch [6][5/15]\tlr: 7.813e-05, eta: 0:02:44, time: 0.848, data_time: 0.638, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5897, loss: 0.5897, grad_norm: 11.0816\n", + "2021-06-03 15:03:38,830 - mmaction - INFO - Epoch [6][10/15]\tlr: 7.813e-05, eta: 0:02:38, time: 0.266, data_time: 0.094, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6937, loss: 0.6937, grad_norm: 11.3882\n", + "2021-06-03 15:03:39,638 - mmaction - INFO - Epoch [6][15/15]\tlr: 7.813e-05, eta: 0:02:30, time: 0.162, data_time: 0.002, memory: 1654, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6607, loss: 0.6607, grad_norm: 11.6493\n", + "2021-06-03 15:03:43,948 - mmaction - INFO - Epoch [7][5/15]\tlr: 7.813e-05, eta: 0:02:36, time: 0.844, data_time: 0.643, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6503, loss: 0.6503, grad_norm: 12.5117\n", + "2021-06-03 15:03:45,085 - mmaction - INFO - Epoch [7][10/15]\tlr: 7.813e-05, eta: 0:02:30, time: 0.228, data_time: 0.047, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.6313, loss: 0.6313, grad_norm: 10.8442\n", + "2021-06-03 15:03:45,922 - mmaction - INFO - Epoch [7][15/15]\tlr: 7.813e-05, eta: 0:02:24, time: 0.167, data_time: 0.002, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6310, loss: 0.6310, grad_norm: 10.5798\n", + "2021-06-03 15:03:50,322 - mmaction - INFO - Epoch [8][5/15]\tlr: 7.813e-05, eta: 0:02:28, time: 0.863, data_time: 0.662, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6283, loss: 0.6283, grad_norm: 11.3411\n", + "2021-06-03 15:03:51,521 - mmaction - INFO - Epoch [8][10/15]\tlr: 7.813e-05, eta: 0:02:23, time: 0.240, data_time: 0.055, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6765, loss: 0.6765, grad_norm: 11.1512\n", + "2021-06-03 15:03:52,331 - mmaction - INFO - Epoch [8][15/15]\tlr: 7.813e-05, eta: 0:02:17, time: 0.162, data_time: 0.001, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.5961, loss: 0.5961, grad_norm: 11.1990\n", + "2021-06-03 15:03:56,661 - mmaction - INFO - Epoch [9][5/15]\tlr: 7.813e-05, eta: 0:02:21, time: 0.848, data_time: 0.645, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6524, loss: 0.6524, grad_norm: 11.9008\n", + "2021-06-03 15:03:57,882 - mmaction - INFO - Epoch [9][10/15]\tlr: 7.813e-05, eta: 0:02:16, time: 0.244, data_time: 0.061, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6937, loss: 0.6937, grad_norm: 13.0136\n", + "2021-06-03 15:03:58,697 - mmaction - INFO - Epoch [9][15/15]\tlr: 7.813e-05, eta: 0:02:11, time: 0.163, data_time: 0.001, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5511, loss: 0.5511, grad_norm: 9.5135\n", + "2021-06-03 15:04:02,948 - mmaction - INFO - Epoch [10][5/15]\tlr: 7.813e-05, eta: 0:02:14, time: 0.831, data_time: 0.631, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5565, loss: 0.5565, grad_norm: 9.2178\n", + "2021-06-03 15:04:03,954 - mmaction - INFO - Epoch [10][10/15]\tlr: 7.813e-05, eta: 0:02:09, time: 0.202, data_time: 0.006, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6199, loss: 0.6199, grad_norm: 10.8341\n", + "2021-06-03 15:04:04,855 - mmaction - INFO - Epoch [10][15/15]\tlr: 7.813e-05, eta: 0:02:05, time: 0.180, data_time: 0.011, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5853, loss: 0.5853, grad_norm: 10.9314\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.8 task/s, elapsed: 2s, ETA: 0s" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-06-03 15:04:06,763 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-06-03 15:04:06,765 - mmaction - INFO - \n", + "top1_acc\t0.8000\n", + "top5_acc\t1.0000\n", + "2021-06-03 15:04:06,766 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-06-03 15:04:06,770 - mmaction - INFO - \n", + "mean_acc\t0.8000\n", + "2021-06-03 15:04:06,772 - mmaction - INFO - Saving checkpoint at 10 epochs\n", + "2021-06-03 15:04:07,188 - mmaction - INFO - Epoch(val) [10][5]\ttop1_acc: 0.8000, top5_acc: 1.0000, mean_class_accuracy: 0.8000\n", + "2021-06-03 15:04:11,319 - mmaction - INFO - Epoch [11][5/15]\tlr: 7.813e-05, eta: 0:02:06, time: 0.825, data_time: 0.620, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5100, loss: 0.5100, grad_norm: 8.8945\n", + "2021-06-03 15:04:12,449 - mmaction - INFO - Epoch [11][10/15]\tlr: 7.813e-05, eta: 0:02:02, time: 0.226, data_time: 0.042, memory: 1654, top1_acc: 0.2000, top5_acc: 1.0000, loss_cls: 0.6959, loss: 0.6959, grad_norm: 13.3499\n", + "2021-06-03 15:04:13,350 - mmaction - INFO - Epoch [11][15/15]\tlr: 7.813e-05, eta: 0:01:58, time: 0.180, data_time: 0.014, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.4929, loss: 0.4929, grad_norm: 8.5170\n", + "2021-06-03 15:04:17,700 - mmaction - INFO - Epoch [12][5/15]\tlr: 7.813e-05, eta: 0:02:00, time: 0.851, data_time: 0.649, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6076, loss: 0.6076, grad_norm: 11.6095\n", + "2021-06-03 15:04:18,762 - mmaction - INFO - Epoch [12][10/15]\tlr: 7.813e-05, eta: 0:01:56, time: 0.213, data_time: 0.032, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5356, loss: 0.5356, grad_norm: 9.7047\n", + "2021-06-03 15:04:19,608 - mmaction - INFO - Epoch [12][15/15]\tlr: 7.813e-05, eta: 0:01:52, time: 0.169, data_time: 0.002, memory: 1654, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6340, loss: 0.6340, grad_norm: 11.7714\n", + "2021-06-03 15:04:23,829 - mmaction - INFO - Epoch [13][5/15]\tlr: 7.813e-05, eta: 0:01:53, time: 0.825, data_time: 0.611, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5467, loss: 0.5467, grad_norm: 9.3259\n", + "2021-06-03 15:04:24,969 - mmaction - INFO - Epoch [13][10/15]\tlr: 7.813e-05, eta: 0:01:49, time: 0.230, data_time: 0.042, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5878, loss: 0.5878, grad_norm: 11.7431\n", + "2021-06-03 15:04:25,994 - mmaction - INFO - Epoch [13][15/15]\tlr: 7.813e-05, eta: 0:01:46, time: 0.205, data_time: 0.038, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5018, loss: 0.5018, grad_norm: 8.9612\n", + "2021-06-03 15:04:30,330 - mmaction - INFO - Epoch [14][5/15]\tlr: 7.813e-05, eta: 0:01:46, time: 0.850, data_time: 0.643, memory: 1654, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6634, loss: 0.6634, grad_norm: 12.9608\n", + "2021-06-03 15:04:31,497 - mmaction - INFO - Epoch [14][10/15]\tlr: 7.813e-05, eta: 0:01:43, time: 0.232, data_time: 0.048, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5646, loss: 0.5646, grad_norm: 10.2523\n", + "2021-06-03 15:04:32,322 - mmaction - INFO - Epoch [14][15/15]\tlr: 7.813e-05, eta: 0:01:39, time: 0.166, data_time: 0.004, memory: 1654, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6504, loss: 0.6504, grad_norm: 12.5382\n", + "2021-06-03 15:04:36,355 - mmaction - INFO - Epoch [15][5/15]\tlr: 7.813e-05, eta: 0:01:39, time: 0.789, data_time: 0.589, memory: 1654, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.5893, loss: 0.5893, grad_norm: 11.1704\n", + "2021-06-03 15:04:37,811 - mmaction - INFO - Epoch [15][10/15]\tlr: 7.813e-05, eta: 0:01:36, time: 0.291, data_time: 0.117, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6413, loss: 0.6413, grad_norm: 12.5114\n", + "2021-06-03 15:04:38,647 - mmaction - INFO - Epoch [15][15/15]\tlr: 7.813e-05, eta: 0:01:33, time: 0.167, data_time: 0.001, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4747, loss: 0.4747, grad_norm: 8.3424\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.7 task/s, elapsed: 2s, ETA: 0s" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-06-03 15:04:40,575 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-06-03 15:04:40,576 - mmaction - INFO - \n", + "top1_acc\t0.8000\n", + "top5_acc\t1.0000\n", + "2021-06-03 15:04:40,586 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-06-03 15:04:40,589 - mmaction - INFO - \n", + "mean_acc\t0.8000\n", + "2021-06-03 15:04:40,590 - mmaction - INFO - Epoch(val) [15][5]\ttop1_acc: 0.8000, top5_acc: 1.0000, mean_class_accuracy: 0.8000\n", + "2021-06-03 15:04:44,502 - mmaction - INFO - Epoch [16][5/15]\tlr: 7.813e-05, eta: 0:01:33, time: 0.780, data_time: 0.572, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.4760, loss: 0.4760, grad_norm: 8.9694\n", + "2021-06-03 15:04:45,694 - mmaction - INFO - Epoch [16][10/15]\tlr: 7.813e-05, eta: 0:01:30, time: 0.237, data_time: 0.049, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.5583, loss: 0.5583, grad_norm: 11.0941\n", + "2021-06-03 15:04:46,780 - mmaction - INFO - Epoch [16][15/15]\tlr: 7.813e-05, eta: 0:01:27, time: 0.219, data_time: 0.053, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5707, loss: 0.5707, grad_norm: 11.3002\n", + "2021-06-03 15:04:51,458 - mmaction - INFO - Epoch [17][5/15]\tlr: 7.813e-05, eta: 0:01:27, time: 0.918, data_time: 0.705, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5781, loss: 0.5781, grad_norm: 11.3368\n", + "2021-06-03 15:04:52,369 - mmaction - INFO - Epoch [17][10/15]\tlr: 7.813e-05, eta: 0:01:24, time: 0.181, data_time: 0.004, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5642, loss: 0.5642, grad_norm: 10.7471\n", + "2021-06-03 15:04:53,264 - mmaction - INFO - Epoch [17][15/15]\tlr: 7.813e-05, eta: 0:01:21, time: 0.180, data_time: 0.014, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.4448, loss: 0.4448, grad_norm: 7.9083\n", + "2021-06-03 15:04:57,485 - mmaction - INFO - Epoch [18][5/15]\tlr: 7.813e-05, eta: 0:01:20, time: 0.827, data_time: 0.617, memory: 1654, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.4346, loss: 0.4346, grad_norm: 8.5470\n", + "2021-06-03 15:04:58,807 - mmaction - INFO - Epoch [18][10/15]\tlr: 7.813e-05, eta: 0:01:17, time: 0.265, data_time: 0.077, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.4648, loss: 0.4648, grad_norm: 8.6081\n", + "2021-06-03 15:04:59,651 - mmaction - INFO - Epoch [18][15/15]\tlr: 7.813e-05, eta: 0:01:14, time: 0.169, data_time: 0.002, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6353, loss: 0.6353, grad_norm: 12.7139\n", + "2021-06-03 15:05:04,048 - mmaction - INFO - Epoch [19][5/15]\tlr: 7.813e-05, eta: 0:01:14, time: 0.860, data_time: 0.654, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5173, loss: 0.5173, grad_norm: 10.0505\n", + "2021-06-03 15:05:05,140 - mmaction - INFO - Epoch [19][10/15]\tlr: 7.813e-05, eta: 0:01:11, time: 0.220, data_time: 0.032, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4610, loss: 0.4610, grad_norm: 9.0271\n", + "2021-06-03 15:05:05,992 - mmaction - INFO - Epoch [19][15/15]\tlr: 7.813e-05, eta: 0:01:08, time: 0.170, data_time: 0.003, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4900, loss: 0.4900, grad_norm: 9.4134\n", + "2021-06-03 15:05:10,251 - mmaction - INFO - Epoch [20][5/15]\tlr: 7.813e-05, eta: 0:01:07, time: 0.832, data_time: 0.633, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4717, loss: 0.4717, grad_norm: 9.3263\n", + "2021-06-03 15:05:11,296 - mmaction - INFO - Epoch [20][10/15]\tlr: 7.813e-05, eta: 0:01:05, time: 0.210, data_time: 0.010, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6269, loss: 0.6269, grad_norm: 12.3093\n", + "2021-06-03 15:05:12,249 - mmaction - INFO - Epoch [20][15/15]\tlr: 7.813e-05, eta: 0:01:02, time: 0.191, data_time: 0.022, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6329, loss: 0.6329, grad_norm: 11.7156\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.8 task/s, elapsed: 2s, ETA: 0s" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-06-03 15:05:14,159 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-06-03 15:05:14,161 - mmaction - INFO - \n", + "top1_acc\t1.0000\n", + "top5_acc\t1.0000\n", + "2021-06-03 15:05:14,166 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-06-03 15:05:14,168 - mmaction - INFO - \n", + "mean_acc\t1.0000\n", + "2021-06-03 15:05:14,599 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_20.pth.\n", + "2021-06-03 15:05:14,603 - mmaction - INFO - Best top1_acc is 1.0000 at 20 epoch.\n", + "2021-06-03 15:05:14,606 - mmaction - INFO - Saving checkpoint at 20 epochs\n", + "2021-06-03 15:05:15,008 - mmaction - INFO - Epoch(val) [20][5]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n", + "2021-06-03 15:05:19,127 - mmaction - INFO - Epoch [21][5/15]\tlr: 7.813e-05, eta: 0:01:01, time: 0.823, data_time: 0.618, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.3904, loss: 0.3904, grad_norm: 7.6698\n", + "2021-06-03 15:05:20,196 - mmaction - INFO - Epoch [21][10/15]\tlr: 7.813e-05, eta: 0:00:58, time: 0.214, data_time: 0.024, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.5884, loss: 0.5884, grad_norm: 11.4530\n", + "2021-06-03 15:05:21,218 - mmaction - INFO - Epoch [21][15/15]\tlr: 7.813e-05, eta: 0:00:56, time: 0.204, data_time: 0.032, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5800, loss: 0.5800, grad_norm: 12.1364\n", + "2021-06-03 15:05:25,640 - mmaction - INFO - Epoch [22][5/15]\tlr: 7.813e-05, eta: 0:00:55, time: 0.864, data_time: 0.656, memory: 1654, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.3669, loss: 0.3669, grad_norm: 7.3256\n", + "2021-06-03 15:05:26,903 - mmaction - INFO - Epoch [22][10/15]\tlr: 7.813e-05, eta: 0:00:52, time: 0.255, data_time: 0.063, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5618, loss: 0.5618, grad_norm: 11.0834\n", + "2021-06-03 15:05:27,740 - mmaction - INFO - Epoch [22][15/15]\tlr: 7.813e-05, eta: 0:00:50, time: 0.167, data_time: 0.001, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6190, loss: 0.6190, grad_norm: 12.5605\n", + "2021-06-03 15:05:32,036 - mmaction - INFO - Epoch [23][5/15]\tlr: 7.813e-05, eta: 0:00:48, time: 0.839, data_time: 0.631, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5490, loss: 0.5490, grad_norm: 11.1925\n", + "2021-06-03 15:05:33,384 - mmaction - INFO - Epoch [23][10/15]\tlr: 7.813e-05, eta: 0:00:46, time: 0.272, data_time: 0.081, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.5988, loss: 0.5988, grad_norm: 12.0808\n", + "2021-06-03 15:05:34,222 - mmaction - INFO - Epoch [23][15/15]\tlr: 7.813e-05, eta: 0:00:43, time: 0.167, data_time: 0.001, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6084, loss: 0.6084, grad_norm: 11.4491\n", + "2021-06-03 15:05:38,546 - mmaction - INFO - Epoch [24][5/15]\tlr: 7.813e-05, eta: 0:00:42, time: 0.845, data_time: 0.637, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5125, loss: 0.5125, grad_norm: 10.9388\n", + "2021-06-03 15:05:39,792 - mmaction - INFO - Epoch [24][10/15]\tlr: 7.813e-05, eta: 0:00:39, time: 0.251, data_time: 0.059, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6036, loss: 0.6036, grad_norm: 12.3427\n", + "2021-06-03 15:05:40,640 - mmaction - INFO - Epoch [24][15/15]\tlr: 7.813e-05, eta: 0:00:37, time: 0.169, data_time: 0.001, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5052, loss: 0.5052, grad_norm: 10.0184\n", + "2021-06-03 15:05:44,885 - mmaction - INFO - Epoch [25][5/15]\tlr: 7.813e-05, eta: 0:00:35, time: 0.831, data_time: 0.623, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5324, loss: 0.5324, grad_norm: 10.9933\n", + "2021-06-03 15:05:46,302 - mmaction - INFO - Epoch [25][10/15]\tlr: 7.813e-05, eta: 0:00:33, time: 0.283, data_time: 0.097, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6386, loss: 0.6386, grad_norm: 12.9881\n", + "2021-06-03 15:05:47,135 - mmaction - INFO - Epoch [25][15/15]\tlr: 7.813e-05, eta: 0:00:31, time: 0.166, data_time: 0.001, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4406, loss: 0.4406, grad_norm: 9.0257\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.8 task/s, elapsed: 2s, ETA: 0s" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-06-03 15:05:49,031 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-06-03 15:05:49,033 - mmaction - INFO - \n", + "top1_acc\t0.8000\n", + "top5_acc\t1.0000\n", + "2021-06-03 15:05:49,039 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-06-03 15:05:49,040 - mmaction - INFO - \n", + "mean_acc\t0.8000\n", + "2021-06-03 15:05:49,042 - mmaction - INFO - Epoch(val) [25][5]\ttop1_acc: 0.8000, top5_acc: 1.0000, mean_class_accuracy: 0.8000\n", + "2021-06-03 15:05:53,064 - mmaction - INFO - Epoch [26][5/15]\tlr: 7.813e-05, eta: 0:00:29, time: 0.801, data_time: 0.590, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.3512, loss: 0.3512, grad_norm: 7.0619\n", + "2021-06-03 15:05:54,188 - mmaction - INFO - Epoch [26][10/15]\tlr: 7.813e-05, eta: 0:00:27, time: 0.225, data_time: 0.030, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.3328, loss: 0.3328, grad_norm: 7.1553\n", + "2021-06-03 15:05:55,139 - mmaction - INFO - Epoch [26][15/15]\tlr: 7.813e-05, eta: 0:00:25, time: 0.192, data_time: 0.018, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4698, loss: 0.4698, grad_norm: 9.4666\n", + "2021-06-03 15:05:59,226 - mmaction - INFO - Epoch [27][5/15]\tlr: 7.813e-05, eta: 0:00:23, time: 0.799, data_time: 0.593, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.5434, loss: 0.5434, grad_norm: 10.9087\n", + "2021-06-03 15:06:00,493 - mmaction - INFO - Epoch [27][10/15]\tlr: 7.813e-05, eta: 0:00:21, time: 0.254, data_time: 0.067, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.3672, loss: 0.3672, grad_norm: 7.5920\n", + "2021-06-03 15:06:01,451 - mmaction - INFO - Epoch [27][15/15]\tlr: 7.813e-05, eta: 0:00:18, time: 0.191, data_time: 0.014, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.3633, loss: 0.3633, grad_norm: 7.8609\n", + "2021-06-03 15:06:05,792 - mmaction - INFO - Epoch [28][5/15]\tlr: 7.813e-05, eta: 0:00:16, time: 0.850, data_time: 0.645, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6003, loss: 0.6003, grad_norm: 12.0149\n", + "2021-06-03 15:06:07,078 - mmaction - INFO - Epoch [28][10/15]\tlr: 7.813e-05, eta: 0:00:14, time: 0.257, data_time: 0.068, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6538, loss: 0.6538, grad_norm: 13.2297\n", + "2021-06-03 15:06:07,941 - mmaction - INFO - Epoch [28][15/15]\tlr: 7.813e-05, eta: 0:00:12, time: 0.172, data_time: 0.003, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4151, loss: 0.4151, grad_norm: 8.6073\n", + "2021-06-03 15:06:12,212 - mmaction - INFO - Epoch [29][5/15]\tlr: 7.813e-05, eta: 0:00:10, time: 0.836, data_time: 0.629, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.3997, loss: 0.3997, grad_norm: 8.2630\n", + "2021-06-03 15:06:13,414 - mmaction - INFO - Epoch [29][10/15]\tlr: 7.813e-05, eta: 0:00:08, time: 0.240, data_time: 0.050, memory: 1654, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.3257, loss: 0.3257, grad_norm: 6.8715\n", + "2021-06-03 15:06:14,279 - mmaction - INFO - Epoch [29][15/15]\tlr: 7.813e-05, eta: 0:00:06, time: 0.173, data_time: 0.002, memory: 1654, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5843, loss: 0.5843, grad_norm: 12.2261\n", + "2021-06-03 15:06:18,611 - mmaction - INFO - Epoch [30][5/15]\tlr: 7.813e-05, eta: 0:00:04, time: 0.849, data_time: 0.645, memory: 1654, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.4302, loss: 0.4302, grad_norm: 8.8877\n", + "2021-06-03 15:06:20,008 - mmaction - INFO - Epoch [30][10/15]\tlr: 7.813e-05, eta: 0:00:02, time: 0.280, data_time: 0.091, memory: 1654, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.2355, loss: 0.2355, grad_norm: 5.3905\n", + "2021-06-03 15:06:20,850 - mmaction - INFO - Epoch [30][15/15]\tlr: 7.813e-05, eta: 0:00:00, time: 0.168, data_time: 0.001, memory: 1654, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.4508, loss: 0.4508, grad_norm: 9.6814\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.8 task/s, elapsed: 2s, ETA: 0s" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-06-03 15:06:22,740 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-06-03 15:06:22,742 - mmaction - INFO - \n", + "top1_acc\t1.0000\n", + "top5_acc\t1.0000\n", + "2021-06-03 15:06:22,746 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-06-03 15:06:22,747 - mmaction - INFO - \n", + "mean_acc\t1.0000\n", + "2021-06-03 15:06:22,756 - mmaction - INFO - Saving checkpoint at 30 epochs\n", + "2021-06-03 15:06:23,168 - mmaction - INFO - Epoch(val) [30][5]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n" + ] + } + ], + "source": [ + "import os.path as osp\n", + "\n", + "from mmaction.datasets import build_dataset\n", + "from mmaction.models import build_model\n", + "from mmaction.apis import train_model\n", + "\n", + "import mmcv\n", + "\n", + "# 构建数据集\n", + "datasets = [build_dataset(cfg.data.train)]\n", + "\n", + "# 构建动作识别模型\n", + "model = build_model(cfg.model, train_cfg=cfg.get('train_cfg'), test_cfg=cfg.get('test_cfg'))\n", + "\n", + "# 创建工作目录并训练模型\n", + "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", + "train_model(model, datasets, cfg, distributed=False, validate=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ryVoSfZVmogw" + }, + "source": [ + "## 评价模型\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "eyY3hCMwyTct", + "outputId": "54c2d6ce-3f3e-45ed-b3d4-f628ba4263b0" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ ] 0/10, elapsed: 0s, ETA:" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 2.2 task/s, elapsed: 4s, ETA: 0s\n", + "Evaluating top_k_accuracy ...\n", + "\n", + "top1_acc\t0.9000\n", + "top5_acc\t1.0000\n", + "\n", + "Evaluating mean_class_accuracy ...\n", + "\n", + "mean_acc\t0.9000\n", + "top1_acc: 0.9000\n", + "top5_acc: 1.0000\n", + "mean_class_accuracy: 0.9000\n" + ] + } + ], + "source": [ + "from mmaction.apis import single_gpu_test\n", + "from mmaction.datasets import build_dataloader\n", + "from mmcv.parallel import MMDataParallel\n", + "\n", + "# 构建测试数据集\n", + "dataset = build_dataset(cfg.data.test, dict(test_mode=True))\n", + "data_loader = build_dataloader(\n", + " dataset,\n", + " videos_per_gpu=1,\n", + " workers_per_gpu=cfg.data.workers_per_gpu,\n", + " dist=False,\n", + " shuffle=False)\n", + "model = MMDataParallel(model, device_ids=[0])\n", + "outputs = single_gpu_test(model, data_loader)\n", + "\n", + "# 在测试集上评价训练完成的识别模型\n", + "eval_config = cfg.evaluation\n", + "eval_config.pop('interval')\n", + "eval_res = dataset.evaluate(outputs, **eval_config)\n", + "for name, val in eval_res.items():\n", + " print(f'{name}: {val:.04f}')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "8EbJVEEmrv0S" + }, + "source": [ + "## 时空动作识别\n", + "\n", + "这里我们用到mmdet来辅助完成时空动作识别的任务,首先要在主目录下进行安装。" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Yq5e5l-zEMpf", + "outputId": "178b2d61-d00c-4b93-847c-efc4b249ceaa" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/content\n", + "Cloning into 'mmdetection'...\n", + "remote: Enumerating objects: 18118, done.\u001b[K\n", + "remote: Counting objects: 100% (207/207), done.\u001b[K\n", + "remote: Compressing objects: 100% (163/163), done.\u001b[K\n", + "remote: Total 18118 (delta 87), reused 113 (delta 44), pack-reused 17911\u001b[K\n", + "Receiving objects: 100% (18118/18118), 21.50 MiB | 33.66 MiB/s, done.\n", + "Resolving deltas: 100% (12576/12576), done.\n", + "/content/mmdetection\n", + "Obtaining file:///content/mmdetection\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmdet==2.13.0) (3.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmdet==2.13.0) (1.19.5)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from mmdet==2.13.0) (1.15.0)\n", + "Collecting terminaltables\n", + " Downloading https://files.pythonhosted.org/packages/9b/c4/4a21174f32f8a7e1104798c445dacdc1d4df86f2f26722767034e4de4bff/terminaltables-3.1.0.tar.gz\n", + "Requirement already satisfied: pycocotools in /usr/local/lib/python3.7/dist-packages (from mmdet==2.13.0) (2.0.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.13.0) (2.4.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.13.0) (0.10.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.13.0) (1.3.1)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.13.0) (2.8.1)\n", + "Requirement already satisfied: cython>=0.27.3 in /usr/local/lib/python3.7/dist-packages (from pycocotools->mmdet==2.13.0) (0.29.23)\n", + "Requirement already satisfied: setuptools>=18.0 in /usr/local/lib/python3.7/dist-packages (from pycocotools->mmdet==2.13.0) (57.0.0)\n", + "Building wheels for collected packages: terminaltables\n", + " Building wheel for terminaltables (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for terminaltables: filename=terminaltables-3.1.0-cp37-none-any.whl size=15356 sha256=37a2b87aceff6ca4b32508fac67142e960106f99a33a0a1d2127aaaecd9fae0b\n", + " Stored in directory: /root/.cache/pip/wheels/30/6b/50/6c75775b681fb36cdfac7f19799888ef9d8813aff9e379663e\n", + "Successfully built terminaltables\n", + "Installing collected packages: terminaltables, mmdet\n", + " Running setup.py develop for mmdet\n", + "Successfully installed mmdet terminaltables-3.1.0\n", + "/content/mmaction2\n" + ] + } + ], + "source": [ + "# 克隆mmdetection项目\n", + "%cd ..\n", + "!git clone https://github.com/open-mmlab/mmdetection.git\n", + "%cd mmdetection\n", + "\n", + "# 以可编辑的模式安装mmdet\n", + "!pip install -e .\n", + "%cd ../mmaction2" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "dWLxybK6INRI" + }, + "source": [ + "同时我们需要上传视频至目录mmaction2下" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "'wget' is not recognized as an internal or external command,\n", + "operable program or batch file.\n" + ] + } + ], + "source": [ + "!wget https://download.openmmlab.com/mmaction/dataset/sample/1j20qq1JyX4.mp4 -O demo/1j20qq1JyX4.mp4" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "AUw6xa1YrvZb", + "outputId": "566e2683-9158-4173-b821-b9d9a34cf893" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Imageio: 'ffmpeg-linux64-v3.3.1' was not found on your computer; downloading it now.\n", + "Try 1. Download from https://github.com/imageio/imageio-binaries/raw/master/ffmpeg/ffmpeg-linux64-v3.3.1 (43.8 MB)\n", + "Downloading: 8192/45929032 bytes (0.02826240/45929032 bytes (6.2%6922240/45929032 bytes (15.110977280/45929032 bytes (23.9%14925824/45929032 bytes (32.5%19046400/45929032 bytes (41.5%23068672/45929032 bytes (50.2%26279936/45929032 bytes (57.2%30392320/45929032 bytes (66.2%34471936/45929032 bytes (75.1%38543360/45929032 bytes (83.9%42688512/45929032 bytes (92.9%45929032/45929032 bytes (100.0%)\n", + " Done\n", + "File saved as /root/.imageio/ffmpeg/ffmpeg-linux64-v3.3.1.\n", + "Use load_from_http loader\n", + "Downloading: \"http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\" to /root/.cache/torch/hub/checkpoints/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\n", + "100% 160M/160M [00:17<00:00, 9.30MB/s]\n", + "Performing Human Detection for each frame\n", + "100% 217/217 [00:26<00:00, 8.24it/s]\n", + "Use load_from_http loader\n", + "Downloading: \"https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\" to /root/.cache/torch/hub/checkpoints/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\n", + "100% 228M/228M [00:24<00:00, 9.79MB/s]\n", + "Performing SpatioTemporal Action Detection for each clip\n", + "217it [00:23, 9.19it/s]\n", + "Performing visualization\n", + "[MoviePy] >>>> Building video demo/stdet_demo.mp4\n", + "[MoviePy] Writing video demo/stdet_demo.mp4\n", + "100% 434/434 [00:10<00:00, 39.93it/s]\n", + "[MoviePy] Done.\n", + "[MoviePy] >>>> Video ready: demo/stdet_demo.mp4 \n", + "\n" + ] + } + ], + "source": [ + "# 完成时空检测\n", + "!python demo/demo_spatiotemporal_det.py --video demo/1j20qq1JyX4.mp4" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 341 + }, + "id": "oRabUF1TsE-v", + "outputId": "ff8cee1a-6715-4368-edf2-ce796fd946db" + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 15, + "metadata": { + "tags": [] + }, + "output_type": "execute_result" + } + ], + "source": [ + "# 查看视频\n", + "from IPython.display import HTML\n", + "from base64 import b64encode\n", + "mp4 = open('demo/stdet_demo.mp4','rb').read()\n", + "data_url = \"data:video/mp4;base64,\" + b64encode(mp4).decode()\n", + "HTML(\"\"\"\n", + "\n", + "\"\"\" % data_url)" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "collapsed_sections": [], + "include_colab_link": true, + "name": "MMAction2 new.ipynb", + "provenance": [], + "toc_visible": true + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.10" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "4a9a4d1a6a554315a7d4362fd9ef0290": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "57f2df1708fa455ea8a305b9100ad171": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_974f4fceb03748f1b346b498df9828a3", + "placeholder": "​", + "style": "IPY_MODEL_e6b45b124776452a85136fc3e18502f6", + "value": " 97.8M/97.8M [00:45<00:00, 2.26MB/s]" + } + }, + "81bfbdf1ec55451b8be8a68fd1b0cf18": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_c992b295041a4908a6a0d4f62a542cca", + "IPY_MODEL_57f2df1708fa455ea8a305b9100ad171" + ], + "layout": "IPY_MODEL_4a9a4d1a6a554315a7d4362fd9ef0290" + } + }, + "8c947d1afee142e4b6cd2e0e26f46d6f": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "initial" + } + }, + "974f4fceb03748f1b346b498df9828a3": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "adf3a16cdae740cf882999a25d53e8f7": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "c992b295041a4908a6a0d4f62a542cca": { + "model_module": "@jupyter-widgets/controls", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "100%", + "description_tooltip": null, + "layout": "IPY_MODEL_adf3a16cdae740cf882999a25d53e8f7", + "max": 102502400, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_8c947d1afee142e4b6cd2e0e26f46d6f", + "value": 102502400 + } + }, + "e6b45b124776452a85136fc3e18502f6": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + } + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} \ No newline at end of file From 851b92bcd84288a8ee3c7366800c187424fc359a Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 17 Jun 2021 22:26:25 +0800 Subject: [PATCH 161/414] Fix link for CN tutorials (#942) --- demo/mmaction2_tutorial_zh-CN.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demo/mmaction2_tutorial_zh-CN.ipynb b/demo/mmaction2_tutorial_zh-CN.ipynb index 73d4702db6..0d7bf32b08 100644 --- a/demo/mmaction2_tutorial_zh-CN.ipynb +++ b/demo/mmaction2_tutorial_zh-CN.ipynb @@ -1644,4 +1644,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} From 0a6fde1abb8403f1f68b568f5b4694c6f828e27e Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 18 Jun 2021 16:42:03 +0800 Subject: [PATCH 162/414] [Improvement] Change ResNet3D default value for stem with BC (#939) * change ResNet3D default value for stem with BC * fix unittest --- configs/_base_/models/i3d_r50.py | 5 ++++ mmaction/models/backbones/resnet3d.py | 12 ++++----- .../test_common_modules/test_resnet3d.py | 26 +++++++++---------- 3 files changed, 24 insertions(+), 19 deletions(-) diff --git a/configs/_base_/models/i3d_r50.py b/configs/_base_/models/i3d_r50.py index 629ea148a7..fee08bc291 100644 --- a/configs/_base_/models/i3d_r50.py +++ b/configs/_base_/models/i3d_r50.py @@ -6,6 +6,9 @@ pretrained2d=True, pretrained='torchvision://resnet50', depth=50, + conv1_kernel=(5, 7, 7), + conv1_stride_t=2, + pool1_stride_t=2, conv_cfg=dict(type='Conv3d'), norm_eval=False, inflate=((1, 1, 1), (1, 0, 1, 0), (1, 0, 1, 0, 1, 0), (0, 1, 0)), @@ -20,3 +23,5 @@ # model training and testing settings train_cfg=None, test_cfg=dict(average_clips='prob')) + +# This setting refers to https://github.com/open-mmlab/mmaction/blob/master/mmaction/models/tenons/backbones/resnet_i3d.py#L329-L332 # noqa: E501 diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index 2f0eea8d72..4d03933e69 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -345,15 +345,15 @@ class ResNet3d(nn.Module): dilations (Sequence[int]): Dilation of each stage. Default: ``(1, 1, 1, 1)``. conv1_kernel (Sequence[int]): Kernel size of the first conv layer. - Default: ``(5, 7, 7)``. + Default: ``(3, 7, 7)``. conv1_stride_s (int): Spatial stride of the first conv layer. Default: 2. conv1_stride_t (int): Temporal stride of the first conv layer. - Default: 2. + Default: 1. pool1_stride_s (int): Spatial stride of the first pooling layer. Default: 2. pool1_stride_t (int): Temporal stride of the first pooling layer. - Default: 2. + Default: 1. with_pool2 (bool): Whether to use pool2. Default: True. style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two layer is the 3x3 conv layer, otherwise the stride-two layer is @@ -405,11 +405,11 @@ def __init__(self, spatial_strides=(1, 2, 2, 2), temporal_strides=(1, 1, 1, 1), dilations=(1, 1, 1, 1), - conv1_kernel=(5, 7, 7), + conv1_kernel=(3, 7, 7), conv1_stride_s=2, - conv1_stride_t=2, + conv1_stride_t=1, pool1_stride_s=2, - pool1_stride_t=2, + pool1_stride_t=1, with_pool2=True, style='pytorch', frozen_stages=-1, diff --git a/tests/test_models/test_common_modules/test_resnet3d.py b/tests/test_models/test_common_modules/test_resnet3d.py index 5bec36af62..1da93c55d3 100644 --- a/tests/test_models/test_common_modules/test_resnet3d.py +++ b/tests/test_models/test_common_modules/test_resnet3d.py @@ -190,10 +190,10 @@ def test_resnet3d_backbone(): resnet3d_34_frozen = resnet3d_34_frozen.cuda() imgs_gpu = imgs.cuda() feat = resnet3d_34_frozen(imgs_gpu) - assert feat.shape == torch.Size([1, 512, 1, 2, 2]) + assert feat.shape == torch.Size([1, 512, 3, 2, 2]) else: feat = resnet3d_34_frozen(imgs) - assert feat.shape == torch.Size([1, 512, 1, 2, 2]) + assert feat.shape == torch.Size([1, 512, 3, 2, 2]) # resnet3d with depth 50 inference input_shape = (1, 3, 6, 64, 64) @@ -204,10 +204,10 @@ def test_resnet3d_backbone(): resnet3d_50_frozen = resnet3d_50_frozen.cuda() imgs_gpu = imgs.cuda() feat = resnet3d_50_frozen(imgs_gpu) - assert feat.shape == torch.Size([1, 2048, 1, 2, 2]) + assert feat.shape == torch.Size([1, 2048, 3, 2, 2]) else: feat = resnet3d_50_frozen(imgs) - assert feat.shape == torch.Size([1, 2048, 1, 2, 2]) + assert feat.shape == torch.Size([1, 2048, 3, 2, 2]) # resnet3d with depth 50 in caffe style inference resnet3d_50_caffe = ResNet3d(50, None, pretrained2d=False, style='caffe') @@ -220,10 +220,10 @@ def test_resnet3d_backbone(): resnet3d_50_caffe = resnet3d_50_caffe.cuda() imgs_gpu = imgs.cuda() feat = resnet3d_50_caffe(imgs_gpu) - assert feat.shape == torch.Size([1, 2048, 1, 2, 2]) + assert feat.shape == torch.Size([1, 2048, 3, 2, 2]) else: feat = resnet3d_50_caffe(imgs) - assert feat.shape == torch.Size([1, 2048, 1, 2, 2]) + assert feat.shape == torch.Size([1, 2048, 3, 2, 2]) # resnet3d with depth 34 in caffe style inference resnet3d_34_caffe = ResNet3d(34, None, pretrained2d=False, style='caffe') @@ -235,10 +235,10 @@ def test_resnet3d_backbone(): resnet3d_34_caffe = resnet3d_34_caffe.cuda() imgs_gpu = imgs.cuda() feat = resnet3d_34_caffe(imgs_gpu) - assert feat.shape == torch.Size([1, 512, 1, 2, 2]) + assert feat.shape == torch.Size([1, 512, 3, 2, 2]) else: feat = resnet3d_34_caffe(imgs) - assert feat.shape == torch.Size([1, 512, 1, 2, 2]) + assert feat.shape == torch.Size([1, 512, 3, 2, 2]) # resnet3d with depth with 3x3x3 inflate_style inference resnet3d_50_1x1x1 = ResNet3d( @@ -251,10 +251,10 @@ def test_resnet3d_backbone(): resnet3d_50_1x1x1 = resnet3d_50_1x1x1.cuda() imgs_gpu = imgs.cuda() feat = resnet3d_50_1x1x1(imgs_gpu) - assert feat.shape == torch.Size([1, 2048, 1, 2, 2]) + assert feat.shape == torch.Size([1, 2048, 3, 2, 2]) else: feat = resnet3d_50_1x1x1(imgs) - assert feat.shape == torch.Size([1, 2048, 1, 2, 2]) + assert feat.shape == torch.Size([1, 2048, 3, 2, 2]) resnet3d_34_1x1x1 = ResNet3d( 34, None, pretrained2d=False, inflate_style='3x3x3') @@ -267,10 +267,10 @@ def test_resnet3d_backbone(): resnet3d_34_1x1x1 = resnet3d_34_1x1x1.cuda() imgs_gpu = imgs.cuda() feat = resnet3d_34_1x1x1(imgs_gpu) - assert feat.shape == torch.Size([1, 512, 1, 2, 2]) + assert feat.shape == torch.Size([1, 512, 3, 2, 2]) else: feat = resnet3d_34_1x1x1(imgs) - assert feat.shape == torch.Size([1, 512, 1, 2, 2]) + assert feat.shape == torch.Size([1, 512, 3, 2, 2]) # resnet3d with non-local module non_local_cfg = dict( @@ -293,7 +293,7 @@ def test_resnet3d_backbone(): assert hasattr(layer[i], 'non_local_block') feat = resnet3d_nonlocal(imgs) - assert feat.shape == torch.Size([1, 2048, 1, 2, 2]) + assert feat.shape == torch.Size([1, 2048, 3, 2, 2]) def test_resnet3d_layer(): From 830f2a447bb12215a0bdebdc11f7ae1b435d6359 Mon Sep 17 00:00:00 2001 From: Rejnald Lleshi <46654505+rlleshi@users.noreply.github.com> Date: Tue, 22 Jun 2021 18:40:53 +0200 Subject: [PATCH 163/414] [Improvement] Add font color to args in long_video_demo (#947) * [Improvement] Add font color to args in long_video_demo * [Improvement] Add font color to args in long_video_demo * [Improvement] Add font color to args in long_video_demo * polish * polish Co-authored-by: dreamerlin <528557675@qq.com> --- demo/README.md | 7 +++++-- demo/long_video_demo.py | 36 +++++++++++++++++++++++++++--------- 2 files changed, 32 insertions(+), 11 deletions(-) diff --git a/demo/README.md b/demo/README.md index f51f15a598..71fa55e2b0 100644 --- a/demo/README.md +++ b/demo/README.md @@ -309,6 +309,8 @@ Optional arguments: - `DEVICE_TYPE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. If not specified, it will be set to `cuda:0`. - `THRESHOLD`: Threshold of prediction score for action recognition. Only label with score higher than the threshold will be shown. If not specified, it will be set to 0.01. - `STRIDE`: By default, the demo generates a prediction for each single frame, which might cost lots of time. To speed up, you can set the argument `STRIDE` and then the demo will generate a prediction every `STRIDE x sample_length` frames (`sample_length` indicates the size of temporal window from which you sample frames, which equals to `clip_len x frame_interval`). For example, if the sample_length is 64 frames and you set `STRIDE` to 0.5, predictions will be generated every 32 frames. If set as 0, predictions will be generated for each frame. The desired value of `STRIDE` is (0, 1], while it also works for `STRIDE > 1` (the generated predictions will be too sparse). Default: 0. +- `LABEL_COLOR`: Font Color of the labels in (B, G, R). Default is white, that is (256, 256, 256). +- `MSG_COLOR`: Font Color of the messages in (B, G, R). Default is gray, that is (128, 128, 128). Examples: @@ -343,11 +345,12 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, demo/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 ``` -4. Predict different labels in a long video by using a I3D model on gpu, with input_step=1 and threshold=0.01 as default. +4. Predict different labels in a long video by using a I3D model on gpu, with input_step=1, threshold=0.01 as default and print the labels in cyan. ```shell python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO + checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO \ + --label-color 255 255 0 ``` 5. Predict different labels in a long video by using a I3D model on gpu and save the results as a `json` file diff --git a/demo/long_video_demo.py b/demo/long_video_demo.py index 19620988a6..de253832c8 100644 --- a/demo/long_video_demo.py +++ b/demo/long_video_demo.py @@ -16,8 +16,6 @@ FONTFACE = cv2.FONT_HERSHEY_COMPLEX_SMALL FONTSCALE = 1 -FONTCOLOR = (255, 255, 255) # BGR, white -MSGCOLOR = (128, 128, 128) # BGR, gray THICKNESS = 1 LINETYPE = 1 @@ -64,11 +62,30 @@ def parse_args(): help='override some settings in the used config, the key-value pair ' 'in xxx=yyy format will be merged into config file. For example, ' "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") + parser.add_argument( + '--label-color', + nargs='+', + type=int, + default=(255, 255, 255), + help='font color (B, G, R) of the labels in output video') + parser.add_argument( + '--msg-color', + nargs='+', + type=int, + default=(128, 128, 128), + help='font color (B, G, R) of the messages in output video') args = parser.parse_args() return args -def show_results_video(result_queue, text_info, thr, msg, frame, video_writer): +def show_results_video(result_queue, + text_info, + thr, + msg, + frame, + video_writer, + label_color=(255, 255, 255), + msg_color=(128, 128, 128)): if len(result_queue) != 0: text_info = {} results = result_queue.popleft() @@ -79,14 +96,14 @@ def show_results_video(result_queue, text_info, thr, msg, frame, video_writer): location = (0, 40 + i * 20) text = selected_label + ': ' + str(round(score, 2)) text_info[location] = text - cv2.putText(frame, text, location, FONTFACE, FONTSCALE, FONTCOLOR, - THICKNESS, LINETYPE) + cv2.putText(frame, text, location, FONTFACE, FONTSCALE, + label_color, THICKNESS, LINETYPE) elif len(text_info): for location, text in text_info.items(): - cv2.putText(frame, text, location, FONTFACE, FONTSCALE, FONTCOLOR, - THICKNESS, LINETYPE) + cv2.putText(frame, text, location, FONTFACE, FONTSCALE, + label_color, THICKNESS, LINETYPE) else: - cv2.putText(frame, msg, (0, 40), FONTFACE, FONTSCALE, MSGCOLOR, + cv2.putText(frame, msg, (0, 40), FONTFACE, FONTSCALE, msg_color, THICKNESS, LINETYPE) video_writer.write(frame) return text_info @@ -168,7 +185,8 @@ def show_results(model, data, label, args): else: text_info = show_results_video(result_queue, text_info, args.threshold, msg, frame, - video_writer) + video_writer, args.label_color, + args.msg_color) cap.release() cv2.destroyAllWindows() From d92fcc6abf24a2807bbaae79c5bea7086f43fd1c Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Wed, 23 Jun 2021 12:15:55 +0800 Subject: [PATCH 164/414] Update .gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index b01921b39f..020cf31531 100644 --- a/.gitignore +++ b/.gitignore @@ -128,3 +128,5 @@ work_dirs/ !tests/data/**/*.pkl.json !tests/data/**/*.log.json !tests/data/**/*.pth +mmaction/configs/* +mmaction/tools/* From d4c3ba0798c37172c33b80b5e760711e2f496ce9 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 23 Jun 2021 20:42:42 +0800 Subject: [PATCH 165/414] [Docs] Update Docs about `load_from` (#953) * load from * CN and faq --- docs/faq.md | 5 +++++ docs/tutorials/2_finetune.md | 1 + docs_zh_CN/faq.md | 4 ++++ docs_zh_CN/tutorials/2_finetune.md | 1 + 4 files changed, 11 insertions(+) diff --git a/docs/faq.md b/docs/faq.md index 8e14cebfb5..0813b3be0b 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -104,6 +104,11 @@ If the contents here do not cover your issue, please create an issue using the [ ] ``` +- **How to set `load_from` value in config files to finetune models?** + + In MMAction2, We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/tutorials/1_config.md), + users can directly change it by setting `load_from` in their configs. + ## Testing - **How to make predicted score normalized by softmax within [0, 1]?** diff --git a/docs/tutorials/2_finetune.md b/docs/tutorials/2_finetune.md index 8997b29030..1ec072ce58 100644 --- a/docs/tutorials/2_finetune.md +++ b/docs/tutorials/2_finetune.md @@ -91,6 +91,7 @@ checkpoint_config = dict(interval=5) ## Use Pre-Trained Model To use the pre-trained model for the whole network, the new config adds the link of pre-trained models in the `load_from`. +We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/tutorials/1_config.md), users can directly change it by setting `load_from` in their configs. ```python # use the pre-trained model for the whole TSN network diff --git a/docs_zh_CN/faq.md b/docs_zh_CN/faq.md index ea4de0b945..b2fc15dd34 100644 --- a/docs_zh_CN/faq.md +++ b/docs_zh_CN/faq.md @@ -78,6 +78,10 @@ 实际上,除了少数模型,如 C3D 等,用户都能通过设置 `frozen_stages` 来冻结模型参数,因为大多数主干网络继承自 `ResNet` 和 `ResNet3D`,而这两个模型都支持 `_freeze_stages()` 方法。 +- **如何在配置文件中设置 `load_from` 参数以进行模型微调?** + + MMAction2 在 `configs/_base_/default_runtime.py` 文件中将 `load_from=None` 设为默认。由于配置文件的可继承性,用户可直接在下游配置文件中设置 `load_from` 的值来进行更改。 + ## 测试 - **如何将预测分值用 softmax 归一化到 [0, 1] 区间内?** diff --git a/docs_zh_CN/tutorials/2_finetune.md b/docs_zh_CN/tutorials/2_finetune.md index 69442b4353..0c4e7c09d2 100644 --- a/docs_zh_CN/tutorials/2_finetune.md +++ b/docs_zh_CN/tutorials/2_finetune.md @@ -85,6 +85,7 @@ checkpoint_config = dict(interval=5) ## 使用预训练模型 若要将预训练模型用于整个网络(主干网络设置中的 `pretrained`,仅会在主干网络模型上加载预训练参数),可通过 `load_from` 指定模型文件路径或模型链接,实现预训练权重导入。 +MMAction2 在 `configs/_base_/default_runtime.py` 文件中将 `load_from=None` 设为默认。由于配置文件的可继承性,用户可直接在下游配置文件中设置 `load_from` 的值来进行更改。 ```python # 将预训练模型用于整个 TSN 网络 From fbcbf409b9c52f2993ccb1a7b32a384c0bdaa299 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Wed, 23 Jun 2021 21:07:25 +0800 Subject: [PATCH 166/414] [Improvement] Improve Metafiles (#956) * update metafiles * align keys * Update .gitignore * revert .gitignore Co-authored-by: Jintao Lin <528557675@qq.com> --- .gitignore | 2 + configs/detection/acrn/metafile.yml | 46 +++ configs/detection/ava/metafile.yml | 103 ++++- configs/detection/lfb/metafile.yml | 16 +- configs/localization/bmn/metafile.yml | 22 +- configs/localization/bsn/metafile.yml | 22 +- configs/localization/ssn/metafile.yml | 9 +- configs/recognition/c3d/metafile.yml | 14 +- configs/recognition/csn/metafile.yml | 24 +- configs/recognition/i3d/metafile.yml | 104 ++--- configs/recognition/omnisource/metafile.yml | 56 ++- configs/recognition/r2plus1d/metafile.yml | 44 +- configs/recognition/slowfast/metafile.yml | 84 ++-- configs/recognition/slowonly/metafile.yml | 185 +++++---- configs/recognition/tanet/metafile.yml | 14 +- configs/recognition/tin/metafile.yml | 31 +- configs/recognition/tpn/metafile.yml | 33 +- configs/recognition/trn/metafile.yml | 22 +- configs/recognition/tsm/metafile.yml | 275 ++++++------- configs/recognition/tsn/metafile.yml | 384 ++++++++---------- configs/recognition/x3d/metafile.yml | 12 +- configs/recognition_audio/resnet/metafile.yml | 9 +- configs/skeleton/posec3d/metafile.yml | 118 ++++++ model_zoo.yml | 2 + 24 files changed, 848 insertions(+), 783 deletions(-) create mode 100644 configs/detection/acrn/metafile.yml create mode 100644 configs/skeleton/posec3d/metafile.yml diff --git a/.gitignore b/.gitignore index 020cf31531..722a50e523 100644 --- a/.gitignore +++ b/.gitignore @@ -128,5 +128,7 @@ work_dirs/ !tests/data/**/*.pkl.json !tests/data/**/*.log.json !tests/data/**/*.pth + +# avoid soft links created by MIM mmaction/configs/* mmaction/tools/* diff --git a/configs/detection/acrn/metafile.yml b/configs/detection/acrn/metafile.yml new file mode 100644 index 0000000000..fe41d6db10 --- /dev/null +++ b/configs/detection/acrn/metafile.yml @@ -0,0 +1,46 @@ +Collections: +- Name: ACRN + README: configs/detection/acrn/README.md +Models: +- Config: configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py + In Collection: ACRN + Metadata: + Architecture: ResNet50 + Batch Size: 6 + Epochs: 10 + Input: 32x2 + Modality: RGB + Parameters: 92232057 + Pretrained: Kinetics-400 + Training Data: AVA v2.1 + Training Resources: 8 GPUs + Name: slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb + Results: + - Dataset: AVA v2.1 + Metrics: + - mAP: 27.1 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.json + Training Log: https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.log + Weights: https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb-49b07bf2.pth +- Config: configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py + In Collection: ACRN + Metadata: + Architecture: ResNet50 + Batch Size: 6 + Epochs: 10 + Input: 32x2 + Modality: RGB + Parameters: 92232057 + Pretrained: Kinetics-400 + Training Data: AVA v2.2 + Training Resources: 8 GPUs + Name: slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb + Results: + - Dataset: AVA v2.2 + Metrics: + - mAP: 27.8 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json + Training Log: https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log + Weights: https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-2be32625.pth diff --git a/configs/detection/ava/metafile.yml b/configs/detection/ava/metafile.yml index 520076cf6f..abb89ce202 100644 --- a/configs/detection/ava/metafile.yml +++ b/configs/detection/ava/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: AVA +- Name: AVA README: configs/detection/ava/README.md Models: - Config: configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py In Collection: AVA Metadata: Architecture: ResNet50 + Batch Size: 16 Epochs: 20 Input: 4x16 Pretrained: Kinetics-400 Resolution: short-side 256 - Training BatchSize / GPU: 16 Training Data: AVA v2.1 - gpus: 8 + Training Resources: 8 GPUs Modality: RGB Name: slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb Results: @@ -29,13 +27,13 @@ Models: In Collection: AVA Metadata: Architecture: ResNet50 + Batch Size: 16 Epochs: 20 Input: 4x16 Pretrained: OmniSource Resolution: short-side 256 - Training BatchSize / GPU: 16 Training Data: AVA v2.1 - gpus: 8 + Training Resources: 8 GPUs Modality: RGB Name: slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb Results: @@ -50,13 +48,13 @@ Models: In Collection: AVA Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 10 Input: 4x16 Pretrained: Kinetics-400 Resolution: short-side 256 - Training BatchSize / GPU: 12 Training Data: AVA v2.1 - gpus: 8 + Training Resources: 8 GPUs Modality: RGB Name: slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb Results: @@ -71,13 +69,13 @@ Models: In Collection: AVA Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 10 Input: 8x8 Pretrained: Kinetics-400 Resolution: short-side 256 - Training BatchSize / GPU: 6 Training Data: AVA v2.1 - gpus: 16 + Training Resources: 16 GPUs Modality: RGB Name: slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb Results: @@ -92,13 +90,13 @@ Models: In Collection: AVA Metadata: Architecture: ResNet101 + Batch Size: 6 Epochs: 20 Input: 8x8 Pretrained: Kinetics-400 Resolution: short-side 256 - Training BatchSize / GPU: 6 Training Data: AVA v2.1 - gpus: 16 + Training Resources: 16 GPUs Modality: RGB Name: slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb Results: @@ -113,13 +111,13 @@ Models: In Collection: AVA Metadata: Architecture: ResNet101 + Batch Size: 6 Epochs: 20 Input: 8x8 Pretrained: OmniSource Resolution: short-side 256 - Training BatchSize / GPU: 6 Training Data: AVA v2.1 - gpus: 16 + Training Resources: 16 GPUs Modality: RGB Name: slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb Results: @@ -134,13 +132,13 @@ Models: In Collection: AVA Metadata: Architecture: ResNet50 + Batch Size: 9 Epochs: 20 Input: 32x2 Pretrained: Kinetics-400 Resolution: short-side 256 - Training BatchSize / GPU: 9 Training Data: AVA v2.1 - gpus: 16 + Training Resources: 16 GPUs Modality: RGB Name: slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb Results: @@ -155,13 +153,13 @@ Models: In Collection: AVA Metadata: Architecture: ResNet50 + Batch Size: 9 Epochs: 20 Input: 32x2 Pretrained: Kinetics-400 Resolution: short-side 256 - Training BatchSize / GPU: 9 Training Data: AVA v2.1 - gpus: 16 + Training Resources: 16 GPUs Modality: RGB Name: slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb Results: @@ -176,13 +174,13 @@ Models: In Collection: AVA Metadata: Architecture: ResNet50 + Batch Size: 5 Epochs: 20 Input: 32x2 Pretrained: Kinetics-400 Resolution: short-side 256 - Training BatchSize / GPU: 5 Training Data: AVA v2.1 - gpus: 16 + Training Resources: 16 GPUs Modality: RGB Name: slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb Results: @@ -193,3 +191,66 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.json Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.log Weights: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217-ae225e97.pth +- Config: configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet50 + Batch Size: 6 + Epochs: 10 + Input: 32x2 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training Data: AVA v2.2 + Training Resources: 8 GPUs + Modality: RGB + Name: slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb + Results: + - Dataset: AVA v2.2 + Metrics: + mAP: 26.1 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-b987b516.pth +- Config: configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet50 + Batch Size: 6 + Epochs: 10 + Input: 32x2 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training Data: AVA v2.2 + Training Resources: 8 GPUs + Modality: RGB + Name: slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb + Results: + - Dataset: AVA v2.2 + Metrics: + mAP: 26.8 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-345618cd.pth +- Config: configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py + In Collection: AVA + Metadata: + Architecture: ResNet50 + Batch Size: 6 + Epochs: 10 + Input: 32x2 + Pretrained: Kinetics-400 + Resolution: short-side 256 + Training Data: AVA v2.2 + Training Resources: 8 GPUs + Modality: RGB + Name: slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb + Results: + - Dataset: AVA v2.2 + Metrics: + mAP: 26.4 + Task: Spatial Temporal Action Detection + Training Json Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json + Training Log: https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log + Weights: https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-874e0845.pth diff --git a/configs/detection/lfb/metafile.yml b/configs/detection/lfb/metafile.yml index 6e9a09f18f..e9b15fc94b 100644 --- a/configs/detection/lfb/metafile.yml +++ b/configs/detection/lfb/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: LFB +- Name: LFB README: configs/detection/lfb/README.md Models: - Config: configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py In Collection: LFB Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 20 Input: 4x16 Pretrained: Kinetics-400 Resolution: short-side 256 - Training BatchSize / GPU: 12 Training Data: AVA v2.1 - gpus: 8 + Training Resources: 8 GPUs Modality: RGB Name: lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py Results: @@ -29,13 +27,13 @@ Models: In Collection: LFB Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 20 Input: 4x16 Pretrained: Kinetics-400 Resolution: short-side 256 - Training BatchSize / GPU: 12 Training Data: AVA v2.1 - gpus: 8 + Training Resources: 8 GPUs Modality: RGB Name: lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py Results: @@ -50,13 +48,13 @@ Models: In Collection: LFB Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 20 Input: 4x16 Pretrained: Kinetics-400 Resolution: short-side 256 - Training BatchSize / GPU: 12 Training Data: AVA v2.1 - gpus: 8 + Training Resources: 8 GPUs Modality: RGB Name: lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py Results: diff --git a/configs/localization/bmn/metafile.yml b/configs/localization/bmn/metafile.yml index ac60106a46..f2d52ca714 100644 --- a/configs/localization/bmn/metafile.yml +++ b/configs/localization/bmn/metafile.yml @@ -1,17 +1,15 @@ Collections: -- Metadata: - Training Data: null - Name: BMN +- Name: BMN README: configs/localization/bmn/README.md Models: - Config: configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py In Collection: BMN Metadata: + Batch Size: 8 Epochs: 9 - Training BatchSize / GPU: 8 Training Data: ActivityNet v1.3 + Training Resources: 2 GPUs feature: cuhk_mean_100 - gpus: 2 Name: bmn_400x100_9e_2x8_activitynet_feature (cuhk_mean_100) Results: - Dataset: ActivityNet v1.3 @@ -26,16 +24,14 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log.json Training Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log Weights: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature_20200619-42a3b111.pth - gpu_mem(M): '5420' - iter time(s): '3.27' - Config: configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py In Collection: BMN Metadata: + Batch Size: 8 Epochs: 9 - Training BatchSize / GPU: 8 Training Data: ActivityNet v1.3 + Training Resources: 2 GPUs feature: mmaction_video - gpus: 2 Name: bmn_400x100_9e_2x8_activitynet_feature (mmaction_video) Results: - Dataset: ActivityNet v1.3 @@ -50,16 +46,14 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.json Training Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.log Weights: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809-c9fd14d2.pth - gpu_mem(M): '5420' - iter time(s): '3.27' - Config: configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py In Collection: BMN Metadata: + Batch Size: 8 Epochs: 9 - Training BatchSize / GPU: 8 Training Data: ActivityNet v1.3 + Training Resources: 2 GPUs feature: mmaction_clip - gpus: 2 Name: bmn_400x100_9e_2x8_activitynet_feature (mmaction_clip) Results: - Dataset: ActivityNet v1.3 @@ -74,5 +68,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.json Training Log: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.log Weights: https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809-10d803ce.pth - gpu_mem(M): '5420' - iter time(s): '3.27' diff --git a/configs/localization/bsn/metafile.yml b/configs/localization/bsn/metafile.yml index bb64bda3b5..d3ccfbbb5d 100644 --- a/configs/localization/bsn/metafile.yml +++ b/configs/localization/bsn/metafile.yml @@ -1,7 +1,5 @@ Collections: -- Metadata: - Training Data: null - Name: BSN +- Name: BSN README: configs/localization/bsn/README.md Models: - Config: @@ -10,10 +8,10 @@ Models: - configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py In Collection: BSN Metadata: + Pretrained: None Training Data: ActivityNet v1.3 + Training Resources: 1 GPUs feature: cuhk_mean_100 - gpus: 1 - pretrain: None Name: bsn_400x100_1x16_20e_activitynet_feature (cuhk_mean_100) Results: - Dataset: ActivityNet v1.3 @@ -30,18 +28,16 @@ Models: Weights: - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature_20200619-cd6accc3.pth - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature_20210203-1c27763d.pth - gpu_mem(M): 41(TEM)+25(PEM) - iter time(s): 0.074(TEM)+0.036(PEM) - Config: - configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py - configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py - configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py In Collection: BSN Metadata: + Pretrained: None Training Data: ActivityNet v1.3 + Training Resources: 1 GPUs feature: mmaction_video - gpus: 1 - pretrain: None Name: bsn_400x100_1x16_20e_activitynet_feature (mmaction_video) Results: - Dataset: ActivityNet v1.3 @@ -58,18 +54,16 @@ Models: Weights: - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809-ad6ec626.pth - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809-aa861b26.pth - gpu_mem(M): 41(TEM)+25(PEM) - iter time(s): 0.074(TEM)+0.036(PEM) - Config: - configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py - configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py - configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py In Collection: BSN Metadata: + Pretrained: None Training Data: ActivityNet v1.3 + Training Resources: 1 GPUs feature: mmaction_clip - gpus: 1 - pretrain: None Name: bsn_400x100_1x16_20e_activitynet_feature (mmaction_clip) Results: - Dataset: ActivityNet v1.3 @@ -86,5 +80,3 @@ Models: Weights: - https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809-0a563554.pth - https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809-e32f61e6.pth - gpu_mem(M): 41(TEM)+25(PEM) - iter time(s): 0.074(TEM)+0.036(PEM) diff --git a/configs/localization/ssn/metafile.yml b/configs/localization/ssn/metafile.yml index 9cf416053a..29da43cabe 100644 --- a/configs/localization/ssn/metafile.yml +++ b/configs/localization/ssn/metafile.yml @@ -1,16 +1,14 @@ Collections: -- Metadata: - Training Data: null - Name: SSN +- Name: SSN README: configs/localization/ssn/README.md Models: - Config: configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py In Collection: SSN Metadata: Architecture: ResNet50 + Pretrained: ImageNet Training Data: THUMOS 14 - gpus: 8 - pretrain: ImageNet + Training Resources: 8 GPUs Name: ssn_r50_450e_thumos14_rgb Results: - Dataset: THUMOS 14 @@ -22,7 +20,6 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log.json Training Log: https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log Weights: https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/ssn_r50_450e_thumos14_rgb_20201012-1920ab16.pth - gpu_mem(M): '6352' reference mAP@0.3: '[27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)' reference mAP@0.4: '[21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)' reference mAP@0.5: '[14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)' diff --git a/configs/recognition/c3d/metafile.yml b/configs/recognition/c3d/metafile.yml index f0af255121..8dabc63831 100644 --- a/configs/recognition/c3d/metafile.yml +++ b/configs/recognition/c3d/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: C3D +- Name: C3D README: configs/recognition/c3d/README.md Models: - Config: configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py In Collection: C3D Metadata: Architecture: c3d + Batch Size: 30 Epochs: 45 Parameters: 78409573 - Training BatchSize / GPU: 30 + Pretrained: sports1m + Resolution: 128x171 Training Data: UCF101 - gpus: 8 - pretrain: sports1m - resolution: 128x171 + Training Resources: 8 GPUs Modality: RGB Name: c3d_sports1m_16x1x1_45e_ucf101_rgb.py Results: @@ -26,5 +24,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log Weights: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/c3d_sports1m_16x1x1_45e_ucf101_rgb_20201021-26655025.pth - gpu_mem(M): '6053' - inference_time(video/s): x diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml index 4c1cea7bf9..9dd6136cc2 100644 --- a/configs/recognition/csn/metafile.yml +++ b/configs/recognition/csn/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: CSN +- Name: CSN README: configs/recognition/csn/README.md Models: - Config: configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet152 + Batch Size: 3 Epochs: 58 Parameters: 29703568 - Training BatchSize / GPU: 3 + Pretrained: IG65M + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: IG65M - resolution: short-side 320 + Training Resources: 32 GPUs Modality: RGB Name: ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py Results: @@ -26,19 +24,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log Weights: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth - gpu_mem(M): '8517' - inference_time(video/s): x - Config: configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet152 + Batch Size: 3 Epochs: 58 Parameters: 29703568 - Training BatchSize / GPU: 3 + Pretrained: IG65M + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: IG65M - resolution: short-side 320 + Training Resources: 32 GPUs Modality: RGB Name: ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py Results: @@ -50,5 +46,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log Weights: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth - gpu_mem(M): '8516' - inference_time(video/s): x diff --git a/configs/recognition/i3d/metafile.yml b/configs/recognition/i3d/metafile.yml index 8f2f63c730..5a32517893 100644 --- a/configs/recognition/i3d/metafile.yml +++ b/configs/recognition/i3d/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: I3D +- Name: I3D README: configs/recognition/i3d/README.md Models: - Config: configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 28043472 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: 340x256 + Training Resources: 8 GPUs Modality: RGB Name: i3d_r50_32x2x1_100e_kinetics400_rgb Results: @@ -26,19 +24,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth - gpu_mem(M): '5170' - inference_time(video/s): 1.7 (320x3 frames) - Config: configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 28043472 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: i3d_r50_32x2x1_100e_kinetics400_rgb Results: @@ -50,19 +46,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth - gpu_mem(M): '5170' - inference_time(video/s): x - Config: configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 28043472 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256p Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256p + Training Resources: 8 GPUs Modality: RGB Name: i3d_r50_video_32x2x1_100e_kinetics400_rgb Results: @@ -74,19 +68,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth - gpu_mem(M): '5170' - inference_time(video/s): x - Config: configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 28043472 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - gpus: 16 - pretrain: ImageNet - resolution: 340x256 + Training Resources: 16 GPUs Modality: RGB Name: i3d_r50_dense_32x2x1_100e_kinetics400_rgb Results: @@ -98,19 +90,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_32x2x1_100e_kinetics400_rgb_20200616-2bbb4361.pth - gpu_mem(M): '5170' - inference_time(video/s): 1.7 (320x3 frames) - Config: configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 28043472 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: i3d_r50_dense_32x2x1_100e_kinetics400_rgb Results: @@ -122,19 +112,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb_20200725-24eb54cc.pth - gpu_mem(M): '5170' - inference_time(video/s): x - Config: configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 28043472 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: 340x256 + Training Resources: 8 GPUs Modality: RGB Name: i3d_r50_lazy_32x2x1_100e_kinetics400_rgb Results: @@ -146,19 +134,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_32x2x1_100e_kinetics400_rgb_20200612-000e4d2a.pth - gpu_mem(M): '5170' - inference_time(video/s): 1.8 (320x3 frames) - Config: configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 28043472 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: i3d_r50_lazy_32x2x1_100e_kinetics400_rgb Results: @@ -170,19 +156,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb_20200817-4e90d1d5.pth - gpu_mem(M): '5170' - inference_time(video/s): x - Config: configs/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 35397840 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256p Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 256p + Training Resources: 32 GPUs Modality: RGB Name: i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb Results: @@ -194,19 +178,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200813-6e6aef1b.pth - gpu_mem(M): '6438' - inference_time(video/s): x - Config: configs/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 31723728 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256p Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 256p + Training Resources: 32 GPUs Modality: RGB Name: i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb Results: @@ -218,19 +200,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200815-17f84aa2.pth - gpu_mem(M): '4944' - inference_time(video/s): x - Config: configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py In Collection: CSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 35397840 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256p Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 256p + Training Resources: 32 GPUs Modality: RGB Name: i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb Results: @@ -242,5 +222,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb_20200814-7c30d5bb.pth - gpu_mem(M): '4832' - inference_time(video/s): x diff --git a/configs/recognition/omnisource/metafile.yml b/configs/recognition/omnisource/metafile.yml index e8b6cf9758..16ca8a1896 100644 --- a/configs/recognition/omnisource/metafile.yml +++ b/configs/recognition/omnisource/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: OmniSource +- Name: OmniSource README: configs/recognition/omnisource/README.md Models: - Config: configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Input: 3seg Modality: RGB Parameters: 23917832 Pretrained: ImageNet Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: tsn_r50_1x1x8_100e_minikinetics_rgb @@ -31,13 +29,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Input: 3seg Modality: RGB Parameters: 23917832 Pretrained: ImageNet Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb @@ -54,13 +52,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Input: 3seg Modality: RGB Parameters: 23917832 Pretrained: ImageNet Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: tsn_r50_1x1x8_100e_minikinetics_webimage_rgb @@ -77,13 +75,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Input: 3seg Modality: RGB Parameters: 23917832 Pretrained: ImageNet Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb @@ -100,13 +98,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Input: 3seg Modality: RGB Parameters: 23917832 Pretrained: ImageNet Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb @@ -123,13 +121,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Input: 3seg Modality: RGB Parameters: 23917832 Pretrained: ImageNet Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb @@ -146,13 +144,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 256 Input: 8x8 Modality: RGB Parameters: 32044296 Pretrained: None Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: slowonly_r50_8x8x1_256e_minikinetics_rgb @@ -169,13 +167,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 256 Input: 8x8 Modality: RGB Parameters: 32044296 Pretrained: None Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb @@ -192,13 +190,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 256 Input: 8x8 Modality: RGB Parameters: 32044296 Pretrained: None Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb @@ -215,13 +213,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 256 Input: 8x8 Modality: RGB Parameters: 32044296 Pretrained: None Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb @@ -238,13 +236,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 256 Input: 8x8 Modality: RGB Parameters: 32044296 Pretrained: None Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb @@ -261,13 +259,13 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 256 Input: 8x8 Modality: RGB Parameters: 32044296 Pretrained: None Resolution: short-side 320 - Training BatchSize / GPU: 12 Training Data: MiniKinetics Modality: RGB Name: slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb @@ -284,12 +282,12 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - pretrain: ImageNet - resolution: 340x256 Modality: RGB Name: tsn_omnisource_r50_1x1x3_100e_kinetics_rgb Results: @@ -299,18 +297,16 @@ Models: top5 acc: 91.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth - gpu_mem(M): '8344' - inference_time(video/s): x - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: IG-1B + Resolution: short-side 320 Training Data: Kinetics-400 - pretrain: IG-1B - resolution: short-side 320 Modality: RGB Name: tsn_IG1B_pretrained_omnisource_r50_1x1x3_100e_kinetics_rgb Results: @@ -320,18 +316,16 @@ Models: top5 acc: 91.9 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth - gpu_mem(M): '8344' - inference_time(video/s): x - Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py In Collection: OmniSource Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 32454096 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - pretrain: None - resolution: short-side 320 Modality: RGB Name: slowonly_r50_omnisource_4x16x1_256e_kinetics400_rgb Results: @@ -345,12 +339,12 @@ Models: In Collection: OmniSource Metadata: Architecture: ResNet101 + Batch Size: 8 Epochs: 196 Parameters: 60359120 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - pretrain: None - resolution: short-side 320 Modality: RGB Name: slowonly_r101_omnisource_8x8x1_196e_kinetics400_rgb Results: diff --git a/configs/recognition/r2plus1d/metafile.yml b/configs/recognition/r2plus1d/metafile.yml index d6f7fdcb81..70f8aaee85 100644 --- a/configs/recognition/r2plus1d/metafile.yml +++ b/configs/recognition/r2plus1d/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: R2Plus1D +- Name: R2Plus1D README: configs/recognition/r2plus1d/README.md Models: - Config: configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py In Collection: R2Plus1D Metadata: Architecture: ResNet34 + Batch Size: 8 Epochs: 180 Parameters: 63759281 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 32 - pretrain: None - resolution: short-side 256 + Training Resources: 32 GPUs Modality: RGB Name: r2plus1d_r34_8x8x1_180e_kinetics400_rgb Results: @@ -26,19 +24,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log Weights: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb_20200729-aa94765e.pth - gpu_mem(M): '5019' - inference_time(video/s): x - Config: configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py In Collection: R2Plus1D Metadata: Architecture: ResNet34 + Batch Size: 16 Epochs: 180 Parameters: 63759281 - Training BatchSize / GPU: 16 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: None - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb Results: @@ -50,19 +46,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log.json Weights: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb_20200826-ab35a529.pth - gpu_mem(M): '5019' - inference_time(video/s): x - Config: configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py In Collection: R2Plus1D Metadata: Architecture: ResNet34 + Batch Size: 8 Epochs: 180 Parameters: 63759281 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: None - resolution: short-side 320 + Training Resources: 16 GPUs Modality: RGB Name: r2plus1d_r34_8x8x1_180e_kinetics400_rgb Results: @@ -74,19 +68,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log Weights: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8x1_180e_kinetics400_rgb_20200618-3fce5629.pth - gpu_mem(M): '5019' - inference_time(video/s): 1.6 (80x3 frames) - Config: configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py In Collection: R2Plus1D Metadata: Architecture: ResNet34 + Batch Size: 6 Epochs: 180 Parameters: 63759281 - Training BatchSize / GPU: 6 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: None - resolution: short-side 320 + Training Resources: 16 GPUs Modality: RGB Name: r2plus1d_r34_32x2x1_180e_kinetics400_rgb Results: @@ -98,5 +90,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log Weights: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2x1_180e_kinetics400_rgb_20200618-63462eb3.pth - gpu_mem(M): '12975' - inference_time(video/s): 0.5 (320x3 frames) diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index dc32dac835..5821af9ce2 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: SlowFast +- Name: SlowFast README: configs/recognition/slowfast/README.md Models: - Config: configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py In Collection: SlowFast Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 34479288 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 32 - pretrain: None - resolution: short-side 256 + Training Resources: 32 GPUs Modality: RGB Name: slowfast_r50_4x16x1_256e_kinetics400_rgb Results: @@ -26,19 +24,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth - gpu_mem(M): '6203' - inference_time(video/s): x - Config: configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py In Collection: SlowFast Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 34479288 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: None - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: slowfast_r50_video_4x16x1_256e_kinetics400_rgb Results: @@ -50,19 +46,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth - gpu_mem(M): '6203' - inference_time(video/s): x - Config: configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py In Collection: SlowFast Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 34479288 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 24 - pretrain: None - resolution: short-side 320 + Training Resources: 24 GPUs Modality: RGB Name: slowfast_r50_4x16x1_256e_kinetics400_rgb Results: @@ -74,19 +68,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20200704-bcde7ed7.pth - gpu_mem(M): '6203' - inference_time(video/s): 1.6 ((32+4)x10x3 frames) - Config: configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py In Collection: SlowFast Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 34565560 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 32 - pretrain: None - resolution: short-side 256 + Training Resources: 32 GPUs Modality: RGB Name: slowfast_r50_8x8x1_256e_kinetics400_rgb Results: @@ -98,19 +90,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth - gpu_mem(M): '9062' - inference_time(video/s): x - Config: configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py In Collection: SlowFast Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 34565560 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 24 - pretrain: None - resolution: short-side 320 + Training Resources: 24 GPUs Modality: RGB Name: slowfast_r50_8x8x1_256e_kinetics400_rgb Results: @@ -122,19 +112,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth - gpu_mem(M): '9062' - inference_time(video/s): 1.3 ((32+8)x10x3 frames) - Config: configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py In Collection: SlowFast Metadata: Architecture: ResNet101 + ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 62384312 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: None - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: slowfast_r101_r50_4x16x1_256e_kinetics400_rgb Results: @@ -146,19 +134,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth - gpu_mem(M): '16628' - inference_time(video/s): '' - Config: configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py In Collection: SlowFast Metadata: Architecture: ResNet101 + Batch Size: 8 Epochs: 256 Parameters: 62912312 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 32 - pretrain: None - resolution: short-side 256 + Training Resources: 32 GPUs Modality: RGB Name: slowfast_r101_8x8x1_256e_kinetics400_rgb Results: @@ -170,19 +156,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth - gpu_mem(M): '25994' - inference_time(video/s): '' - Config: configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py In Collection: SlowFast Metadata: Architecture: ResNet152 + ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 84843704 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: None - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: slowfast_r152_r50_4x16x1_256e_kinetics400_rgb Results: @@ -194,5 +178,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth - gpu_mem(M): '10077' - inference_time(video/s): '' diff --git a/configs/recognition/slowonly/metafile.yml b/configs/recognition/slowonly/metafile.yml index d2ca9732b8..da79055056 100644 --- a/configs/recognition/slowonly/metafile.yml +++ b/configs/recognition/slowonly/metafile.yml @@ -1,19 +1,17 @@ Collections: -- Metadata: - Training Data: null - Name: SlowOnly +- Name: SlowOnly README: configs/recognition/slowonly/README.md Models: - Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 32454096 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - pretrain: None - resolution: short-side 320 Modality: RGB Name: slowonly_r50_omnisource_4x16x1_256e_kinetics400_rgb Results: @@ -27,12 +25,12 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet101 + Batch Size: 8 Epochs: 196 Parameters: 60359120 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - pretrain: None - resolution: short-side 320 Modality: RGB Name: slowonly_r101_8x8x1_196e_kinetics400_rgb Results: @@ -46,12 +44,12 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet101 + Batch Size: 8 Epochs: 196 Parameters: 60359120 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - pretrain: None - resolution: short-side 320 Modality: RGB Name: slowonly_r101_omnisource_8x8x1_196e_kinetics400_rgb Results: @@ -65,13 +63,13 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 32454096 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 32 - pretrain: None - resolution: short-side 256 + Training Resources: 32 GPUs Modality: RGB Name: slowonly_r50_4x16x1_256e_kinetics400_rgb Results: @@ -83,19 +81,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth - gpu_mem(M): '3168' - inference_time(video/s): x - Config: configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 24 Epochs: 256 Parameters: 32454096 - Training BatchSize / GPU: 24 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: None - resolution: short-side 320 + Training Resources: 16 GPUs Modality: RGB Name: slowonly_r50_video_4x16x1_256e_kinetics400_rgb Results: @@ -107,19 +103,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth - gpu_mem(M): '8472' - inference_time(video/s): x - Config: configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 32454096 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 32 - pretrain: None - resolution: short-side 256 + Training Resources: 32 GPUs Modality: RGB Name: slowonly_r50_8x8x1_256e_kinetics400_rgb Results: @@ -131,19 +125,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb_20200820-75851a7d.pth - gpu_mem(M): '5820' - inference_time(video/s): x - Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 32454096 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: None - resolution: short-side 320 + Training Resources: 16 GPUs Modality: RGB Name: slowonly_r50_4x16x1_256e_kinetics400_rgb Results: @@ -155,19 +147,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth - gpu_mem(M): '3168' - inference_time(video/s): 4.0 (40x3 frames) - Config: configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 256 Parameters: 32454096 - Training BatchSize / GPU: 8 + Pretrained: None + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 24 - pretrain: None - resolution: short-side 320 + Training Resources: 24 GPUs Modality: RGB Name: slowonly_r50_8x8x1_256e_kinetics400_rgb Results: @@ -179,19 +169,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8x1_256e_kinetics400_rgb_20200703-a79c555a.pth - gpu_mem(M): '5820' - inference_time(video/s): 2.3 (80x3 frames) - Config: configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 150 Parameters: 32454096 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 16 GPUs Modality: RGB Name: slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb Results: @@ -203,19 +191,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912-1e8fc736.pth - gpu_mem(M): '3168' - inference_time(video/s): x - Config: configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 150 Parameters: 32454096 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 32 GPUs Modality: RGB Name: slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb Results: @@ -227,19 +213,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912-3f9ce182.pth - gpu_mem(M): '5820' - inference_time(video/s): x - Config: configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 150 Parameters: 39808464 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 16 GPUs Modality: RGB Name: slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb Results: @@ -251,19 +235,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb_20210308-0d6e5a69.pth - gpu_mem(M): '4435' - inference_time(video/s): x - Config: configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 150 Parameters: 39808464 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 32 GPUs Modality: RGB Name: slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb Results: @@ -275,19 +257,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb_20210308-e8dd9e82.pth - gpu_mem(M): '8895' - inference_time(video/s): x - Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 24 Epochs: 256 Parameters: 32450960 - Training BatchSize / GPU: 24 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 16 GPUs Modality: Flow Name: slowonly_r50_4x16x1_256e_kinetics400_flow Results: @@ -299,19 +279,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_20200704-decb8568.pth - gpu_mem(M): '8450' - inference_time(video/s): x - Config: configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 196 Parameters: 32450960 - Training BatchSize / GPU: 12 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 32 GPUs Modality: Flow Name: slowonly_r50_8x8x1_196e_kinetics400_flow Results: @@ -323,19 +301,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_256e_kinetics400_flow_20200704-6b384243.pth - gpu_mem(M): '8455' - inference_time(video/s): x - Config: configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 256 Parameters: 32863896 - Training BatchSize / GPU: 12 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-600 - gpus: 32 - pretrain: None - resolution: short-side 256 + Training Resources: 32 GPUs Modality: RGB Name: slowonly_r50_video_8x8x1_256e_kinetics600_rgb Results: @@ -351,13 +327,13 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 256 Parameters: 33068796 - Training BatchSize / GPU: 12 + Pretrained: None + Resolution: short-side 256 Training Data: Kinetics-700 - gpus: 32 - pretrain: None - resolution: short-side 256 + Training Resources: 32 GPUs Modality: RGB Name: slowonly_r50_video_8x8x1_256e_kinetics700_rgb Results: @@ -373,13 +349,13 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 24 Epochs: 120 Parameters: 32454096 - Training BatchSize / GPU: 24 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: GYM99 - gpus: 16 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 16 GPUs Modality: RGB Name: slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb Results: @@ -395,13 +371,13 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 24 Epochs: 120 Parameters: 32450960 - Training BatchSize / GPU: 24 + Pretrained: Kinetics + Resolution: short-side 256 Training Data: GYM99 - gpus: 16 - pretrain: Kinetics - resolution: short-side 256 + Training Resources: 16 GPUs Modality: Flow Name: slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow Results: @@ -413,3 +389,24 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth +- Config: configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Batch Size: 8 + Epochs: 64 + Parameters: 31689819 + Pretrained: ImageNet + Resolution: height 100 + Training Data: Jester + Training Resources: 8 GPUs + Modality: RGB + Name: slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb + Results: + - Dataset: Jester + Metrics: + top1 acc: 97.2 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb-b56a5389.pth diff --git a/configs/recognition/tanet/metafile.yml b/configs/recognition/tanet/metafile.yml index 7f991f2ffc..6ce789d4ac 100644 --- a/configs/recognition/tanet/metafile.yml +++ b/configs/recognition/tanet/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: TANet +- Name: TANet README: configs/recognition/tanet/README.md Models: - Config: configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py In Collection: TANet Metadata: Architecture: TANet + Batch Size: 8 Epochs: 100 Parameters: 25590320 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 8 GPUs Modality: RGB Name: tanet_r50_dense_1x1x8_100e_kinetics400_rgb Results: @@ -26,5 +24,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log Weights: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth - gpu_mem(M): '7124' - inference_time(video/s): x diff --git a/configs/recognition/tin/metafile.yml b/configs/recognition/tin/metafile.yml index e765a40046..f67be352d9 100644 --- a/configs/recognition/tin/metafile.yml +++ b/configs/recognition/tin/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: TIN +- Name: TIN README: configs/recognition/tin/README.md Models: - Config: configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py In Collection: TIN Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 40 Parameters: 23895566 - Training BatchSize / GPU: 6 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 32 - pretrain: ImageNet - resolution: height 100 + Training Resources: 32 GPUs Modality: RGB Name: tin_r50_1x1x8_40e_sthv1_rgb Results: @@ -26,18 +24,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log Weights: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/tin_r50_1x1x8_40e_sthv1_rgb_20200729-4a33db86.pth - gpu_mem(M): '6181' - Config: configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py In Collection: TIN Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 40 Parameters: 23895566 - Training BatchSize / GPU: 6 + Pretrained: ImageNet + Resolution: height 240 Training Data: SthV2 - gpus: 32 - pretrain: ImageNet - resolution: height 240 + Training Resources: 32 GPUs Modality: RGB Name: tin_r50_1x1x8_40e_sthv2_rgb Results: @@ -49,18 +46,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log Weights: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/tin_r50_1x1x8_40e_sthv2_rgb_20200912-b27a7337.pth - gpu_mem(M): '6185' - Config: configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py In Collection: TIN Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 50 Parameters: 24358640 - Training BatchSize / GPU: 6 + Pretrained: TSM-Kinetics400 + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 32 - pretrain: TSM-Kinetics400 - resolution: short-side 256 + Training Resources: 32 GPUs Modality: RGB Name: tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb Results: @@ -72,4 +68,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log Weights: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb_20200810-4a146a70.pth - gpu_mem(M): '6187' diff --git a/configs/recognition/tpn/metafile.yml b/configs/recognition/tpn/metafile.yml index ca0655ff49..b091904d46 100644 --- a/configs/recognition/tpn/metafile.yml +++ b/configs/recognition/tpn/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: TPN +- Name: TPN README: configs/recognition/tpn/README.md Models: - Config: configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py In Collection: TPN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 150 Parameters: 91498336 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 32 GPUs Modality: RGB Name: tpn_slowonly_r50_8x8x1_150e_kinetics_rgb Results: @@ -26,19 +24,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb_20200910-b796d7a0.pth - gpu_mem(M): '6916' - inference_time(video/s): x - Config: configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py In Collection: TPN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 150 Parameters: 91498336 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 32 GPUs Modality: RGB Name: tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb Results: @@ -50,19 +46,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb_20200923-52629684.pth - gpu_mem(M): '6916' - inference_time(video/s): x - Config: configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py In Collection: TPN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 150 Parameters: 82445724 - Training BatchSize / GPU: 8 + Pretrained: TSM + Resolution: height 100 Training Data: SthV1 - gpus: 48 - pretrain: TSM - resolution: height 100 + Training Resources: 48 GPUs Modality: RGB Name: tpn_tsm_r50_1x1x8_150e_sthv1_rgb Results: @@ -74,4 +68,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20210311-28de4cd5.pth - gpu_mem(M): '8828' diff --git a/configs/recognition/trn/metafile.yml b/configs/recognition/trn/metafile.yml index c4e5586939..10890f14de 100644 --- a/configs/recognition/trn/metafile.yml +++ b/configs/recognition/trn/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: TRN +- Name: TRN README: configs/recognition/trn/README.md Models: - Config: configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py In Collection: TRN Metadata: Architecture: ResNet50 + Batch Size: 16 Epochs: 50 Parameters: 26641154 - Training BatchSize / GPU: 16 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: trn_r50_1x1x8_50e_sthv1_rgb Results: @@ -28,18 +26,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log Weights: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth - gpu_mem(M): '11010' - Config: configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py In Collection: TRN Metadata: Architecture: ResNet50 + Batch Size: 16 Epochs: 50 Parameters: 26641154 - Training BatchSize / GPU: 16 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV2 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: trn_r50_1x1x8_50e_sthv2_rgb Results: @@ -53,4 +50,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log Weights: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210401-773eca7b.pth - gpu_mem(M): '11010' diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index 6c099238aa..11bd167de2 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -1,20 +1,18 @@ Collections: -- Metadata: - Training Data: null - Name: TSM +- Name: TSM README: configs/recognition/tsm/README.md Models: - Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 24327632 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: 340x256 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_1x1x8_50e_kinetics400_rgb Results: @@ -26,19 +24,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth - gpu_mem(M): '7079' - inference_time(video/s): 74.0 (8x1 frames) - Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 24327632 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_1x1x8_50e_kinetics400_rgb Results: @@ -50,19 +46,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth - gpu_mem(M): '7079' - inference_time(video/s): x - Config: configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 24327632 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py Results: @@ -74,19 +68,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth - gpu_mem(M): '7076' - inference_time(video/s): x - Config: configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 24327632 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_video_1x1x8_50e_kinetics400_rgb Results: @@ -98,19 +90,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth - gpu_mem(M): '7077' - inference_time(video/s): 74.0 (8x1 frames) - Config: configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: 340x256 + Training Resources: 32 GPUs Modality: RGB Name: tsm_r50_dense_1x1x8_100e_kinetics400_rgb Results: @@ -122,19 +112,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20200626-91a54551.pth - gpu_mem(M): '7079' - inference_time(video/s): 11.5 (8x10 frames) - Config: configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_dense_1x1x8_100e_kinetics400_rgb Results: @@ -146,19 +134,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb_20200727-e1e0c785.pth - gpu_mem(M): '7079' - inference_time(video/s): x - Config: configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 50 Parameters: 24327632 - Training BatchSize / GPU: 6 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: 340x256 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_1x1x16_50e_kinetics400_rgb Results: @@ -170,19 +156,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth - gpu_mem(M): '10404' - inference_time(video/s): 47.0 (16x1 frames) - Config: configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 50 Parameters: 24327632 - Training BatchSize / GPU: 6 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 32 GPUs Modality: RGB Name: tsm_r50_1x1x16_50e_kinetics400_rgb Results: @@ -194,19 +178,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth - gpu_mem(M): '10398' - inference_time(video/s): x - Config: configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 31682000 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 32 GPUs Modality: RGB Name: tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb Results: @@ -218,19 +200,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth - gpu_mem(M): '8931' - inference_time(video/s): x - Config: configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 28007888 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 32 GPUs Modality: RGB Name: tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb Results: @@ -242,19 +222,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth - gpu_mem(M): '10125' - inference_time(video/s): x - Config: configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 31682000 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 32 GPUs Modality: RGB Name: tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb Results: @@ -266,19 +244,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth - gpu_mem(M): '8358' - inference_time(video/s): x - Config: configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: MobileNetV2 + Batch Size: 8 Epochs: 100 Parameters: 2736272 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 8 GPUs Modality: RGB Name: tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb Results: @@ -290,18 +266,16 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth - gpu_mem(M): '3385' - inference_time(video/s): x - Config: configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 23606384 - Training BatchSize / GPU: 8 + Pretrained: ImageNet Training Data: Diving48 - gpus: 8 - pretrain: ImageNet + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_video_1x1x8_50e_diving48_rgb Results: @@ -313,17 +287,16 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth - gpu_mem(M): '7070' - Config: configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 4 Epochs: 50 Parameters: 23606384 - Training BatchSize / GPU: 4 + Pretrained: ImageNet Training Data: Diving48 - gpus: 8 - pretrain: ImageNet + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_video_1x1x16_50e_diving48_rgb Results: @@ -335,18 +308,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth - gpu_mem(M): '7070' - Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_1x1x8_50e_sthv1_rgb Results: @@ -360,20 +332,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth - gpu_mem(M): '7077' reference top1 acc (efficient/accurate): '[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_flip_1x1x8_50e_sthv1_rgb Results: @@ -387,20 +358,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth - gpu_mem(M): '7077' reference top1 acc (efficient/accurate): '[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_randaugment_1x1x8_50e_sthv1_rgb Results: @@ -414,20 +384,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb_20210324-481268d9.pth - gpu_mem(M): '7077' reference top1 acc (efficient/accurate): '[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb Results: @@ -441,20 +410,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb_20210324-76937692.pth - gpu_mem(M): '7077' reference top1 acc (efficient/accurate): '[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 6 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_1x1x16_50e_sthv1_rgb Results: @@ -468,20 +436,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20201010-17fa49f6.pth - gpu_mem(M): '10390' reference top1 acc (efficient/accurate): '[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 42856686 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r101_1x1x8_50e_sthv1_rgb Results: @@ -495,20 +462,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth - gpu_mem(M): '9800' reference top1 acc (efficient/accurate): '[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 6 + Pretrained: ImageNet + Resolution: height 240 Training Data: SthV2 - gpus: 8 - pretrain: ImageNet - resolution: height 240 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_1x1x8_50e_sthv2_rgb Results: @@ -522,20 +488,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_1x1x8_50e_sthv2_rgb_20200912-033c4ac6.pth - gpu_mem(M): '7069' reference top1 acc (efficient/accurate): '[57.98 / 60.69](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[84.57 / 86.28](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 6 + Pretrained: ImageNet + Resolution: height 256 Training Data: SthV2 - gpus: 8 - pretrain: ImageNet - resolution: height 256 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_1x1x8_50e_sthv2_rgb Results: @@ -549,20 +514,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth - gpu_mem(M): '7069' reference top1 acc (efficient/accurate): '[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 6 + Pretrained: ImageNet + Resolution: height 240 Training Data: SthV2 - gpus: 8 - pretrain: ImageNet - resolution: height 240 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_1x1x16_50e_sthv2_rgb Results: @@ -576,20 +540,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_1x1x16_50e_sthv2_rgb_20201010-16469c6f.pth - gpu_mem(M): '10400' reference top1 acc (efficient/accurate): '[58.90 / 60.98](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[85.29 / 86.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 6 + Pretrained: ImageNet + Resolution: height 256 Training Data: SthV2 - gpus: 8 - pretrain: ImageNet - resolution: height 256 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_1x1x16_50e_sthv2_rgb Results: @@ -603,20 +566,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth - gpu_mem(M): '10400' reference top1 acc (efficient/accurate): '[xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py In Collection: TSM Metadata: Architecture: ResNet101 + Batch Size: 8 Epochs: 50 Parameters: 42856686 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: height 240 Training Data: SthV2 - gpus: 8 - pretrain: ImageNet - resolution: height 240 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r101_1x1x8_50e_sthv2_rgb Results: @@ -630,20 +592,19 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth - gpu_mem(M): '9784' reference top1 acc (efficient/accurate): '[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_mixup_1x1x8_50e_sthv1_rgb Results: @@ -663,13 +624,13 @@ Models: In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 8 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: tsm_r50_cutmix_1x1x8_50e_sthv1_rgb Results: @@ -685,3 +646,25 @@ Models: Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb-34934615.pth delta top1 acc (efficient/accurate): +0.34 / -0.24 delta top5 acc (efficient/accurate): +0.21 / +0.59 +- Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Batch Size: 8 + Epochs: 50 + Parameters: 23563355 + Pretrained: ImageNet + Resolution: height 100 + Training Data: Jester + Training Resources: 8 GPUs + Modality: RGB + Name: tsm_r50_1x1x8_50e_jester_rgb + Results: + - Dataset: Jester + Metrics: + - top1 acc (efficient): 96.5 + - top1 acc (accurate): 97.2 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb-c799267e.pth diff --git a/configs/recognition/tsn/metafile.yml b/configs/recognition/tsn/metafile.yml index 0c0a00aac1..d17f50e036 100644 --- a/configs/recognition/tsn/metafile.yml +++ b/configs/recognition/tsn/metafile.yml @@ -1,19 +1,17 @@ Collections: -- Metadata: - Training Data: null - Name: TSN +- Name: TSN README: configs/recognition/tsn/README.md Models: - Config: configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 75 Parameters: 23714981 - Training BatchSize / GPU: 32 + Pretrained: ImageNet Training Data: UCF101 - gpus: 8 - pretrain: ImageNet + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x3_75e_ucf101_rgb Results: @@ -25,18 +23,16 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023-d85ab600.pth - config: '[tsn_r50_1x1x3_75e_ucf101_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py)' - gpu_mem(M): '8332' - Config: configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 100 Parameters: 23606384 - Training BatchSize / GPU: 8 + Pretrained: ImageNet Training Data: Diving48 - gpus: 8 - pretrain: ImageNet + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_video_1x1x8_100e_diving48_rgb Results: @@ -48,17 +44,16 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/tsn_r50_video_1x1x8_100e_diving48_rgb_20210426-6dde0185.pth - gpu_mem(M): '5699' - Config: configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 4 Epochs: 100 Parameters: 23606384 - Training BatchSize / GPU: 4 + Pretrained: ImageNet Training Data: Diving48 - gpus: 8 - pretrain: ImageNet + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_video_1x1x16_100e_diving48_rgb Results: @@ -70,17 +65,16 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/tsn_r50_video_1x1x16_100e_diving48_rgb_20210426-63c5f2f7.pth - gpu_mem(M): '5705' - Config: configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 50 Parameters: 23612531 - Training BatchSize / GPU: 32 + Pretrained: ImageNet Training Data: HMDB51 - gpus: 8 - pretrain: ImageNet + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb Results: @@ -92,17 +86,16 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb_20201123-ce6c27ed.pth - gpu_mem(M): '21535' - Config: configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 50 Parameters: 23612531 - Training BatchSize / GPU: 32 + Pretrained: Kinetics400 Training Data: HMDB51 - gpus: 8 - pretrain: Kinetics400 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb Results: @@ -114,16 +107,15 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb_20201123-7f84701b.pth - gpu_mem(M): '21535' - Config: configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 Epochs: 50 Parameters: 23612531 + Pretrained: Moments Training Data: HMDB51 - gpus: 8 - pretrain: Moments + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x8_50e_hmdb51_mit_rgb Results: @@ -135,18 +127,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/tsn_r50_1x1x8_50e_hmdb51_mit_rgb_20201123-01526d41.pth - gpu_mem(M): '21535' - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: 340x256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x3_100e_kinetics400_rgb Results: @@ -158,19 +149,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth - gpu_mem(M): '8344' - inference_time(video/s): 4.3 (25x10 frames) - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x3_100e_kinetics400_rgb Results: @@ -182,19 +171,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth - gpu_mem(M): '8343' - inference_time(video/s): x - Config: configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 16 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 16 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - gpus: 24 - pretrain: ImageNet - resolution: 340x256 + Training Resources: 24 GPUs Modality: RGB Name: tsn_r50_dense_1x1x5_50e_kinetics400_rgb Results: @@ -206,19 +193,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/tsn_r50_dense_1x1x5_100e_kinetics400_rgb_20200627-a063165f.pth - gpu_mem(M): '7028' - inference_time(video/s): 12.7 (8x10 frames) - Config: configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 16 GPUs Modality: RGB Name: tsn_r50_320p_1x1x3_100e_kinetics400_rgb Results: @@ -230,19 +215,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth - gpu_mem(M): '8344' - inference_time(video/s): 10.7 (25x3 frames) - Config: configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 110 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 16 GPUs Modality: Flow Name: tsn_r50_320p_1x1x3_110e_kinetics400_flow Results: @@ -254,19 +237,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_320p_1x1x3_110e_kinetics400_flow_20200705-3036bab6.pth - gpu_mem(M): '8471' - inference_time(video/s): x - Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 12 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x8_100e_kinetics400_rgb Results: @@ -278,19 +259,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/tsn_r50_256p_1x1x8_100e_kinetics400_rgb_20200817-883baf16.pth - gpu_mem(M): '8343' - inference_time(video/s): x - Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 12 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 24 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 24 GPUs Modality: RGB Name: tsn_r50_320p_1x1x8_100e_kinetics400_rgb Results: @@ -302,19 +281,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_320p_1x1x8_100e_kinetics400_rgb_20200702-ef80e3d7.pth - gpu_mem(M): '8344' - inference_time(video/s): 11.1 (25x3 frames) - Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 110 Parameters: 24327632 - Training BatchSize / GPU: 12 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 32 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 32 GPUs Modality: Flow Name: tsn_r50_320p_1x1x8_110e_kinetics400_flow Results: @@ -326,19 +303,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_320p_1x1x8_110e_kinetics400_flow_20200705-1f39486b.pth - gpu_mem(M): '8473' - inference_time(video/s): x - Config: configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb Results: @@ -350,19 +325,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014-5ae1ee79.pth - gpu_mem(M): '8343' - inference_time(video/s): x - Config: configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 12 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: 340x256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_dense_1x1x8_100e_kinetics400_rgb Results: @@ -374,19 +347,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_dense_1x1x8_100e_kinetics400_rgb_20200606-e925e6e3.pth - gpu_mem(M): '8344' - inference_time(video/s): 12.2 (8x10 frames) - Config: configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_video_1x1x8_100e_kinetics400_rgb Results: @@ -398,19 +369,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth - gpu_mem(M): '21558' - inference_time(video/s): x - Config: configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-400 - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb Results: @@ -422,19 +391,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb_20200703-0f19175f.pth - gpu_mem(M): '21553' - inference_time(video/s): x - Config: configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] + Batch Size: 16 Epochs: 100 Parameters: 42948304 - Training BatchSize / GPU: 16 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 16 GPUs Modality: RGB Name: tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb Results: @@ -450,13 +417,13 @@ Models: In Collection: TSN Metadata: Architecture: ResNeXt101-32x4d [[TorchVision](https://github.com/pytorch/vision/)] + Batch Size: 12 Epochs: 100 Parameters: 27355600 - Training BatchSize / GPU: 12 + Pretrained: ImageNet + Resolution: short-side 320 Training Data: Kinetics-400 - gpus: 16 - pretrain: ImageNet - resolution: short-side 320 + Training Resources: 16 GPUs Modality: RGB Name: tsn_dense161_320p_1x1x3_100e_kinetics400_rgb Results: @@ -472,12 +439,12 @@ Models: In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: 340x256 Training Data: Kinetics-400 - pretrain: ImageNet - resolution: 340x256 Modality: RGB Name: tsn_omnisource_r50_1x1x3_100e_kinetics_rgb Results: @@ -487,18 +454,16 @@ Models: top5 acc: 91.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth - gpu_mem(M): '8344' - inference_time(video/s): x - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: IG-1B + Resolution: short-side 320 Training Data: Kinetics-400 - pretrain: IG-1B - resolution: short-side 320 Modality: RGB Name: tsn_IG1B_pretrained_r50_1x1x3_100e_kinetics_rgb Results: @@ -508,18 +473,16 @@ Models: top5 acc: 90.4 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth - gpu_mem(M): '8344' - inference_time(video/s): x - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 32 Epochs: 100 Parameters: 24327632 - Training BatchSize / GPU: 32 + Pretrained: IG-1B + Resolution: short-side 320 Training Data: Kinetics-400 - pretrain: IG-1B - resolution: short-side 320 Modality: RGB Name: tsn_IG1B_pretrained_omnisource_r50_1x1x3_100e_kinetics_rgb Results: @@ -529,19 +492,17 @@ Models: top5 acc: 91.9 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth - gpu_mem(M): '8344' - inference_time(video/s): x - Config: configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Parameters: 24737432 - Training BatchSize / GPU: 12 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-600 - gpus: 16 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 16 GPUs Modality: RGB Name: tsn_r50_video_1x1x8_100e_kinetics600_rgb Results: @@ -553,19 +514,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015-4db3c461.pth - gpu_mem(M): '8344' - inference_time(video/s): 11.1 (25x3 frames) - Config: configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 100 Parameters: 24942332 - Training BatchSize / GPU: 12 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: Kinetics-700 - gpus: 16 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 16 GPUs Modality: RGB Name: tsn_r50_video_1x1x8_100e_kinetics700_rgb Results: @@ -577,19 +536,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015-e381a6c7.pth - gpu_mem(M): '8344' - inference_time(video/s): 11.1 (25x3 frames) - Config: configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 16 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 16 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x8_50e_sthv1_rgb Results: @@ -601,18 +558,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_1x1x8_50e_sthv1_rgb_20200618-061b9195.pth - gpu_mem(M): '10978' - Config: configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 4 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 4 + Pretrained: ImageNet + Resolution: height 100 Training Data: SthV1 - gpus: 8 - pretrain: ImageNet - resolution: height 100 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x16_50e_sthv1_rgb Results: @@ -624,18 +580,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/tsn_r50_1x1x16_50e_sthv1_rgb_20200614-7e2fe4f1.pth - gpu_mem(M): '5691' - Config: configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 16 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 16 + Pretrained: ImageNet + Resolution: height 240 Training Data: SthV2 - gpus: 8 - pretrain: ImageNet - resolution: height 240 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x8_50e_sthv2_rgb Results: @@ -647,18 +602,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20200915-f3b381a5.pth - gpu_mem(M): '10966' - Config: configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 4 Epochs: 50 Parameters: 23864558 - Training BatchSize / GPU: 4 + Pretrained: ImageNet + Resolution: height 240 Training Data: SthV2 - gpus: 8 - pretrain: ImageNet - resolution: height 240 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_1x1x16_50e_sthv2_rgb Results: @@ -670,18 +624,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20200917-80bc3611.pth - gpu_mem(M): '8337' - Config: configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 16 Epochs: 100 Parameters: 24202643 - Training BatchSize / GPU: 16 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: MiT - gpus: 16 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 16 GPUs Modality: RGB Name: tsn_r50_1x1x6_100e_mit_rgb Results: @@ -693,18 +646,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_f6_mit_26.8_51.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_mit.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_1x1x6_100e_mit_rgb_20200618-d512ab1b.pth - gpu_mem(M): '8339' - Config: configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py In Collection: TSN Metadata: Architecture: ResNet101 + Batch Size: 16 Epochs: 50 Parameters: 43141497 - Training BatchSize / GPU: 16 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: MMiT - gpus: 16 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 16 GPUs Modality: RGB Name: tsn_r101_1x1x5_50e_mmit_rgb Results: @@ -715,18 +667,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_f6_mmit_61.1.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_mmit.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_1x1x5_50e_mmit_rgb_20200618-642f450d.pth - gpu_mem(M): '10467' - Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 23917832 - Training BatchSize / GPU: 8 + Pretrained: Kinetics400 + Resolution: short-side 320 Training Data: ActivityNet v1.3 - gpus: 8 - pretrain: Kinetics400 - resolution: short-side 320 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_320p_1x1x8_50e_activitynet_video_rgb Results: @@ -738,18 +689,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb_20210301-7f8da0c6.pth - gpu_mem(M): '5692' - Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 50 Parameters: 23917832 - Training BatchSize / GPU: 8 + Pretrained: Kinetics400 + Resolution: short-side 320 Training Data: ActivityNet v1.3 - gpus: 8 - pretrain: Kinetics400 - resolution: short-side 320 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb Results: @@ -761,18 +711,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb_20210301-c0f04a7e.pth - gpu_mem(M): '5692' - Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 150 Parameters: 23939784 - Training BatchSize / GPU: 8 + Pretrained: Kinetics400 + Resolution: 340x256 Training Data: ActivityNet v1.3 - gpus: 16 - pretrain: Kinetics400 - resolution: 340x256 + Training Resources: 16 GPUs Modality: Flow Name: tsn_r50_320p_1x1x8_150e_activitynet_video_flow Results: @@ -784,18 +733,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804-13313f52.pth - gpu_mem(M): '5780' - Config: configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py In Collection: TSN Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 150 Parameters: 23939784 - Training BatchSize / GPU: 8 + Pretrained: Kinetics400 + Resolution: 340x256 Training Data: ActivityNet v1.3 - gpus: 16 - pretrain: Kinetics400 - resolution: 340x256 + Training Resources: 16 GPUs Modality: Flow Name: tsn_r50_320p_1x1x8_150e_activitynet_clip_flow Results: @@ -807,18 +755,17 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804-8622cf38.pth - gpu_mem(M): '5780' - Config: configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py In Collection: TSN Metadata: Architecture: ResNet18 + Batch Size: 32 Epochs: 100 Parameters: 11555619 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: HVU - gpus: 16 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 16 GPUs Modality: RGB Name: tsn_r18_1x1x8_100e_hvu_action_rgb Results: @@ -834,13 +781,13 @@ Models: In Collection: TSN Metadata: Architecture: ResNet18 + Batch Size: 32 Epochs: 100 Parameters: 11303736 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: HVU - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r18_1x1x8_100e_hvu_scene_rgb Results: @@ -856,13 +803,13 @@ Models: In Collection: TSN Metadata: Architecture: ResNet18 + Batch Size: 32 Epochs: 100 Parameters: 12037326 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: HVU - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r18_1x1x8_100e_hvu_object_rgb Results: @@ -878,13 +825,13 @@ Models: In Collection: TSN Metadata: Architecture: ResNet18 + Batch Size: 32 Epochs: 100 Parameters: 11211909 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: HVU - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r18_1x1x8_100e_hvu_event_rgb Results: @@ -900,13 +847,13 @@ Models: In Collection: TSN Metadata: Architecture: ResNet18 + Batch Size: 32 Epochs: 100 Parameters: 12037326 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: HVU - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r18_1x1x8_100e_hvu_concept_rgb Results: @@ -922,13 +869,13 @@ Models: In Collection: TSN Metadata: Architecture: ResNet18 + Batch Size: 32 Epochs: 100 Parameters: 11236533 - Training BatchSize / GPU: 32 + Pretrained: ImageNet + Resolution: short-side 256 Training Data: HVU - gpus: 8 - pretrain: ImageNet - resolution: short-side 256 + Training Resources: 8 GPUs Modality: RGB Name: tsn_r18_1x1x8_100e_hvu_attribute_rgb Results: @@ -940,3 +887,24 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.log Weights: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027-0b3b49d2.pth tag category: attribute +- Config: configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py + In Collection: TSN + Metadata: + Architecture: Swin Transformer + Batch Size: 24 + Epochs: 100 + Parameters: 87153224 + Pretrained: ImageNet + Resolution: short-side 320 + Training Data: Kinetics400 + Training Resources: 8 GPUs + Name: tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb + Results: + - Dataset: Kinetics400 + Metrics: + - top1 acc: 77.51 + - top5 acc: 92.92 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb-805380f6.pth diff --git a/configs/recognition/x3d/metafile.yml b/configs/recognition/x3d/metafile.yml index 2e4bde7889..3f7e9393d1 100644 --- a/configs/recognition/x3d/metafile.yml +++ b/configs/recognition/x3d/metafile.yml @@ -1,17 +1,15 @@ Collections: -- Metadata: - Training Data: null - Name: X3D +- Name: X3D README: configs/recognition/x3d/README.md Models: - Config: configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py In Collection: X3D Metadata: Architecture: X3D_S + Batch Size: 1 Parameters: 3794322 - Training BatchSize / GPU: 1 + Resolution: short-side 320 Training Data: Kinetics-400 - resolution: short-side 320 Modality: RGB Name: x3d_s_13x6x1_facebook_kinetics400_rgb Results: @@ -27,10 +25,10 @@ Models: In Collection: X3D Metadata: Architecture: X3D_M + Batch Size: 1 Parameters: 3794322 - Training BatchSize / GPU: 1 + Resolution: short-side 320 Training Data: Kinetics-400 - resolution: short-side 320 Modality: RGB Name: x3d_m_16x5x1_facebook_kinetics400_rgb Results: diff --git a/configs/recognition_audio/resnet/metafile.yml b/configs/recognition_audio/resnet/metafile.yml index 3d139fd062..a13b5fc594 100644 --- a/configs/recognition_audio/resnet/metafile.yml +++ b/configs/recognition_audio/resnet/metafile.yml @@ -1,17 +1,15 @@ Collections: -- Metadata: - Training Data: null - Name: Audio +- Name: Audio README: configs/recognition_audio/resnet/README.md Models: - Config: configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py In Collection: Audio Metadata: Architecture: ResNet18 + Pretrained: None Training Data: Kinetics-400 - gpus: 8 + Training Resources: 8 GPUs n_fft: '1024' - pretrain: None Modality: Audio Name: tsn_r18_64x1x1_100e_kinetics400_audio_feature Results: @@ -27,4 +25,3 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log Weights: https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth - gpu_mem(M): '1897' diff --git a/configs/skeleton/posec3d/metafile.yml b/configs/skeleton/posec3d/metafile.yml new file mode 100644 index 0000000000..24c6e30c70 --- /dev/null +++ b/configs/skeleton/posec3d/metafile.yml @@ -0,0 +1,118 @@ +Collections: +- Name: PoseC3D + README: configs/skeleton/posec3d/README.md +Models: +- Config: configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py + In Collection: PoseC3D + Metadata: + Architecture: SlowOnly-R50 + Batch Size: 16 + Epochs: 240 + Parameters: 2044867 + Training Data: FineGYM + Training Resources: 16 GPUs + pseudo heatmap: keypoint + Name: slowonly_r50_u48_240e_gym_keypoint + Results: + - Dataset: FineGYM + Metrics: + - Mean Top-1: 93.7 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log + Weights: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint-b07a98a0.pth +- Config: configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py + In Collection: PoseC3D + Metadata: + Architecture: SlowOnly-R50 + Batch Size: 16 + Epochs: 240 + Parameters: 2044867 + Training Data: FineGYM + Training Resources: 16 GPUs + pseudo heatmap: limb + Name: slowonly_r50_u48_240e_gym_limb + Results: + - Dataset: FineGYM + Metrics: + - Mean Top-1: 94.0 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log + Weights: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb-c0d7b482.pth +- Config: configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py + In Collection: PoseC3D + Metadata: + Architecture: SlowOnly-R50 + Batch Size: 16 + Epochs: 240 + Parameters: 2024860 + Training Data: NTU60-XSub + Training Resources: 16 GPUs + pseudo heatmap: keypoint + Name: slowonly_r50_u48_240e_ntu60_xsub_keypoint + Results: + - Dataset: NTU60-XSub + Metrics: + - Top-1: 93.7 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.log + Weights: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint-f3adabf1.pth +- Config: configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py + In Collection: PoseC3D + Metadata: + Architecture: SlowOnly-R50 + Batch Size: 16 + Epochs: 240 + Parameters: 2024860 + Training Data: NTU60-XSub + Training Resources: 16 GPUs + pseudo heatmap: limb + Name: slowonly_r50_u48_240e_ntu60_xsub_limb + Results: + - Dataset: NTU60-XSub + Metrics: + - Top-1: 93.4 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log + Weights: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb-1d69006a.pth +- Config: configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py + In Collection: PoseC3D + Metadata: + Architecture: SlowOnly-R50 + Batch Size: 16 + Epochs: 240 + Parameters: 2055640 + Training Data: NTU120-XSub + Training Resources: 16 GPUs + pseudo heatmap: keypoint + Name: slowonly_r50_u48_240e_ntu120_xsub_keypoint + Results: + - Dataset: NTU120-XSub + Metrics: + - Top-1: 86.3 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.log + Weights: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth +- Config: configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py + In Collection: PoseC3D + Metadata: + Architecture: SlowOnly-R50 + Batch Size: 16 + Epochs: 240 + Parameters: 2055640 + Training Data: NTU120-XSub + Training Resources: 16 GPUs + pseudo heatmap: limb + Name: slowonly_r50_u48_240e_ntu120_xsub_limb + Results: + - Dataset: NTU120-XSub + Metrics: + - Top-1: 85.7 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log + Weights: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth? diff --git a/model_zoo.yml b/model_zoo.yml index 21af3c4394..1336135c17 100644 --- a/model_zoo.yml +++ b/model_zoo.yml @@ -18,4 +18,6 @@ Import: - configs/recognition/trn/metafile.yml - configs/detection/ava/metafile.yml - configs/detection/lfb/metafile.yml +- configs/detection/acrn/metafile.yml - configs/recognition_audio/resnet/metafile.yml +- configs/skeleton/posec3d/metafile.yml From a6e8977c6530097038a858d76a4bae0e2a86cc7d Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 23 Jun 2021 21:08:45 +0800 Subject: [PATCH 167/414] [Improvement] Adjust script structure (#935) * adjust script structure * change PYTHONPATH --- configs/localization/bsn/README.md | 8 ++++---- configs/localization/bsn/README_zh-CN.md | 8 ++++---- docs/data_preparation.md | 4 ++-- docs/feature_extraction.md | 8 ++++---- docs/getting_started.md | 2 +- docs/tutorials/6_export_model.md | 8 ++++---- docs/useful_tools.md | 16 ++++++++-------- docs_zh_CN/data_preparation.md | 4 ++-- docs_zh_CN/getting_started.md | 2 +- docs_zh_CN/tutorials/6_export_model.md | 8 ++++---- docs_zh_CN/useful_tools.md | 16 ++++++++-------- tests/test_utils/test_onnx.py | 2 +- tools/__init__.py | 4 ++++ tools/{ => analysis}/check_videos.py | 0 tools/{ => deployment}/publish_model.py | 0 tools/{ => deployment}/pytorch2onnx.py | 0 tools/{ => misc}/bsn_proposal_generation.py | 0 tools/{ => misc}/clip_feature_extraction.py | 0 tools/{ => misc}/dist_clip_feature_extraction.sh | 2 +- tools/{ => misc}/flow_extraction.py | 0 20 files changed, 48 insertions(+), 44 deletions(-) rename tools/{ => analysis}/check_videos.py (100%) rename tools/{ => deployment}/publish_model.py (100%) rename tools/{ => deployment}/pytorch2onnx.py (100%) rename tools/{ => misc}/bsn_proposal_generation.py (100%) rename tools/{ => misc}/clip_feature_extraction.py (100%) rename tools/{ => misc}/dist_clip_feature_extraction.sh (87%) rename tools/{ => misc}/flow_extraction.py (100%) diff --git a/configs/localization/bsn/README.md b/configs/localization/bsn/README.md index c274aee77c..a16767ad47 100644 --- a/configs/localization/bsn/README.md +++ b/configs/localization/bsn/README.md @@ -71,7 +71,7 @@ You can use the following commands to inference a model. 2. For PGM Inference ```shell - python tools/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] + python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] ``` 3. For PEM Inference @@ -91,7 +91,7 @@ Examples: 2. Inference BSN(PGM) with pretrained model. ```shell - python tools/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode train + python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode train ``` 3. Inference BSN(PEM) with evaluation metric 'AR@AN' and output the results. @@ -115,7 +115,7 @@ You can use the following commands to test a model. 2. PGM ```shell - python tools/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] + python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] ``` 3. PEM @@ -135,7 +135,7 @@ Examples: 2. Test a PGM model on ActivityNet dataset. ```shell - python tools/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode test + python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode test ``` 3. Test a PEM model with with evaluation metric 'AR@AN' and output the results. diff --git a/configs/localization/bsn/README_zh-CN.md b/configs/localization/bsn/README_zh-CN.md index 5ec8362292..6d0ddfc2df 100644 --- a/configs/localization/bsn/README_zh-CN.md +++ b/configs/localization/bsn/README_zh-CN.md @@ -72,7 +72,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 2. 推理 PGM 模型 ```shell - python tools/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] + python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] ``` 3. 推理 PEM 模型 @@ -92,7 +92,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 2. 利用预训练模型进行 BSN(PGM) 模型的推理 ```shell - python tools/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode train + python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode train ``` 3. 推理 BSN(PEM) 模型,并计算 'AR@AN' 指标,输出结果文件。 @@ -116,7 +116,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 2. PGM ```shell - python tools/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] + python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] ``` 3. PEM @@ -136,7 +136,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 2. 在 ActivityNet 数据集上测试 PGM 模型。 ```shell - python tools/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode test + python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode test ``` 3. 测试 PEM 模型,并计算 'AR@AN' 指标,输出结果文件。 diff --git a/docs/data_preparation.md b/docs/data_preparation.md index 0b9553c22c..1a7f736720 100644 --- a/docs/data_preparation.md +++ b/docs/data_preparation.md @@ -76,10 +76,10 @@ ln -s ${YOUR_FOLDER} $MMACTION2/data/$DATASET/rawframes #### Alternative to denseflow -In case your device doesn't fulfill the installation requirement of [denseflow](https://github.com/open-mmlab/denseflow)(like Nvidia driver version), or you just want to see some quick demos about flow extraction, we provide a python script `tools/flow_extraction.py` as an alternative to denseflow. You can use it for rgb frames and optical flow extraction from one or several videos. Note that the speed of the script is much slower than denseflow, since it runs optical flow algorithms on CPU. +In case your device doesn't fulfill the installation requirement of [denseflow](https://github.com/open-mmlab/denseflow)(like Nvidia driver version), or you just want to see some quick demos about flow extraction, we provide a python script `tools/misc/flow_extraction.py` as an alternative to denseflow. You can use it for rgb frames and optical flow extraction from one or several videos. Note that the speed of the script is much slower than denseflow, since it runs optical flow algorithms on CPU. ```shell -python tools/flow_extraction.py --input ${INPUT} [--prefix ${PREFIX}] [--dest ${DEST}] [--rgb-tmpl ${RGB_TMPL}] \ +python tools/misc/flow_extraction.py --input ${INPUT} [--prefix ${PREFIX}] [--dest ${DEST}] [--rgb-tmpl ${RGB_TMPL}] \ [--flow-tmpl ${FLOW_TMPL}] [--start-idx ${START_IDX}] [--method ${METHOD}] [--bound ${BOUND}] [--save-rgb] ``` diff --git a/docs/feature_extraction.md b/docs/feature_extraction.md index 6c4dd1b1fe..9b28889d8e 100644 --- a/docs/feature_extraction.md +++ b/docs/feature_extraction.md @@ -25,7 +25,7 @@ YoYo/v_YoYo_g25_c05.avi Assume the root of UCF101 videos is `data/ucf101/videos` and the name of the video list is `ucf101.txt`, to extract clip-level feature of UCF101 videos with Kinetics-400 pretrained TSN, you can use the following script: ```shell -python tools/clip_feature_extraction.py \ +python tools/misc/clip_feature_extraction.py \ configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py \ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth \ --video-list ucf101.txt \ @@ -38,7 +38,7 @@ and the extracted feature will be stored in `ucf101_feature.pkl` You can also use distributed clip-level feature extraction. Below is an example for a node with 8 gpus. ```shell -bash tools/dist_clip_feature_extraction.sh \ +bash tools/misc/dist_clip_feature_extraction.sh \ configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py \ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth \ 8 \ @@ -50,7 +50,7 @@ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_ To extract clip-level feature of UCF101 videos with Kinetics-400 pretrained SlowOnly, you can use the following script: ```shell -python tools/clip_feature_extraction.py \ +python tools/misc/clip_feature_extraction.py \ configs/recognition/slowonly/slowonly_r50_clip_feature_extraction_4x16x1_rgb.py \ https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth \ --video-list ucf101.txt \ @@ -61,7 +61,7 @@ https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_ The two config files demonstrates what a minimal config file for feature extraction looks like. You can also use other existing config files for feature extraction, as long as they use videos rather than raw frames for training and testing: ```shell -python tools/clip_feature_extraction.py \ +python tools/misc/clip_feature_extraction.py \ configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py \ https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth \ --video-list ucf101.txt \ diff --git a/docs/getting_started.md b/docs/getting_started.md index a31934aec7..6e86e497da 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -86,7 +86,7 @@ Optional arguments: - `AVG_TYPE`: Items to average the test clips. If set to `prob`, it will apply softmax before averaging the clip scores. Otherwise, it will directly average the clip scores. - `JOB_LAUNCHER`: Items for distributed job initialization launcher. Allowed choices are `none`, `pytorch`, `slurm`, `mpi`. Especially, if set to none, it will test in a non-distributed mode. - `LOCAL_RANK`: ID for local rank. If not specified, it will be set to 0. -- `--onnx`: If specified, recognition results will be generated by onnx model and `CHECKPOINT_FILE` should be onnx model file path. Onnx model files are generated by `/tools/pytorch2onnx.py`. For now, multi-gpu mode and dynamic input shape mode are not supported. Please note that the output tensors of dataset and the input tensors of onnx model should share the same shape. And it is recommended to remove all test-time augmentation methods in `test_pipeline`(`ThreeCrop`, `TenCrop`, `twice_sample`, etc.) +- `--onnx`: If specified, recognition results will be generated by onnx model and `CHECKPOINT_FILE` should be onnx model file path. Onnx model files are generated by `/tools/deployment/pytorch2onnx.py`. For now, multi-gpu mode and dynamic input shape mode are not supported. Please note that the output tensors of dataset and the input tensors of onnx model should share the same shape. And it is recommended to remove all test-time augmentation methods in `test_pipeline`(`ThreeCrop`, `TenCrop`, `twice_sample`, etc.) - `--tensorrt`: If specified, recognition results will be generated by TensorRT engine and `CHECKPOINT_FILE` should be TensorRT engine file path. TensorRT engines are generated by exported onnx models and TensorRT official convertion tools. For now, multi-gpu mode and dynamic input shape mode are not supported. Please note that the output tensors of dataset and the input tensors of TensorRT engine should share the same shape. And it is recommended to remove all test-time augmentation methods in `test_pipeline`(`ThreeCrop`, `TenCrop`, `twice_sample`, etc.) Examples: diff --git a/docs/tutorials/6_export_model.md b/docs/tutorials/6_export_model.md index 8db4804bb1..d445ab1226 100644 --- a/docs/tutorials/6_export_model.md +++ b/docs/tutorials/6_export_model.md @@ -28,7 +28,7 @@ So far, our codebase supports onnx exporting from pytorch models trained with MM ## Usage -For simple exporting, you can use the [script](/tools/pytorch2onnx.py) here. Note that the package `onnx` and `onnxruntime` are required for verification after exporting. +For simple exporting, you can use the [script](/tools/deployment/pytorch2onnx.py) here. Note that the package `onnx` and `onnxruntime` are required for verification after exporting. ### Prerequisite @@ -41,7 +41,7 @@ pip install onnx onnxruntime We provide a python script to export the pytorch model trained by MMAction2 to ONNX. ```shell -python tools/pytorch2onnx.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--shape ${SHAPE}] \ +python tools/deployment/pytorch2onnx.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--shape ${SHAPE}] \ [--verify] [--show] [--output-file ${OUTPUT_FILE}] [--is-localizer] [--opset-version ${VERSION}] ``` @@ -60,7 +60,7 @@ Optional arguments: For recognizers, please run: ```shell -python tools/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify +python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify ``` ### Localizers @@ -68,7 +68,7 @@ python tools/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --veri For localizers, please run: ```shell -python tools/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify +python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify ``` Please fire an issue if you discover any checkpoints that are not perfectly exported or suffer some loss in accuracy. diff --git a/docs/useful_tools.md b/docs/useful_tools.md index 1dbc5e71b8..be9ff05663 100644 --- a/docs/useful_tools.md +++ b/docs/useful_tools.md @@ -95,7 +95,7 @@ You may use the result for simple comparisons, but double check it before you ad ### MMAction2 model to ONNX (experimental) -`/tools/pytorch2onnx.py` is a script to convert model to [ONNX](https://github.com/onnx/onnx) format. +`/tools/deployment/pytorch2onnx.py` is a script to convert model to [ONNX](https://github.com/onnx/onnx) format. It also supports comparing the output results between Pytorch and ONNX model for verification. Run `pip install onnx onnxruntime` first to install the dependency. Please note that a softmax layer could be added for recognizers by `--softmax` option, in order to get predictions in range `[0, 1]`. @@ -103,18 +103,18 @@ Please note that a softmax layer could be added for recognizers by `--softmax` o - For recognizers, please run: ```shell - python tools/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify + python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify ``` - For localizers, please run: ```shell - python tools/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify + python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify ``` ### Prepare a model for publishing -`tools/publish_model.py` helps users to prepare their model for publishing. +`tools/deployment/publish_model.py` helps users to prepare their model for publishing. Before you upload a model to AWS, you may want to: @@ -123,13 +123,13 @@ Before you upload a model to AWS, you may want to: (3) compute the hash of the checkpoint file and append the hash id to the filename. ```shell -python tools/publish_model.py ${INPUT_FILENAME} ${OUTPUT_FILENAME} +python tools/deployment/publish_model.py ${INPUT_FILENAME} ${OUTPUT_FILENAME} ``` E.g., ```shell -python tools/publish_model.py work_dirs/tsn_r50_1x1x3_100e_kinetics400_rgb/latest.pth tsn_r50_1x1x3_100e_kinetics400_rgb.pth +python tools/deployment/publish_model.py work_dirs/tsn_r50_1x1x3_100e_kinetics400_rgb/latest.pth tsn_r50_1x1x3_100e_kinetics400_rgb.pth ``` The final output filename will be `tsn_r50_1x1x3_100e_kinetics400_rgb-{hash id}.pth`. @@ -157,8 +157,8 @@ python tools/print_config.py ${CONFIG} [-h] [--options ${OPTIONS [OPTIONS...]}] ### Check videos -`tools/check_videos.py` uses specified video encoder to iterate all samples that are specified by the input configuration file, looks for invalid videos (corrupted or missing), and saves the corresponding file path to the output file. Please note that after deleting invalid videos, users need to regenerate the video file list. +`tools/analysis/check_videos.py` uses specified video encoder to iterate all samples that are specified by the input configuration file, looks for invalid videos (corrupted or missing), and saves the corresponding file path to the output file. Please note that after deleting invalid videos, users need to regenerate the video file list. ```shell -python tools/check_videos.py ${CONFIG} [-h] [--options OPTIONS [OPTIONS ...]] [--cfg-options CFG_OPTIONS [CFG_OPTIONS ...]] [--output-file OUTPUT_FILE] [--split SPLIT] [--decoder DECODER] [--num-processes NUM_PROCESSES] [--remove-corrupted-videos] +python tools/analysis/check_videos.py ${CONFIG} [-h] [--options OPTIONS [OPTIONS ...]] [--cfg-options CFG_OPTIONS [CFG_OPTIONS ...]] [--output-file OUTPUT_FILE] [--split SPLIT] [--decoder DECODER] [--num-processes NUM_PROCESSES] [--remove-corrupted-videos] ``` diff --git a/docs_zh_CN/data_preparation.md b/docs_zh_CN/data_preparation.md index 733636d057..9fa2fd47d8 100644 --- a/docs_zh_CN/data_preparation.md +++ b/docs_zh_CN/data_preparation.md @@ -76,11 +76,11 @@ ln -s ${YOUR_FOLDER} $MMACTION2/data/$DATASET/rawframes #### denseflow 的替代项 如果用户因依赖要求(如 Nvidia 显卡驱动版本),无法安装 [denseflow](https://github.com/open-mmlab/denseflow), -或者只需要一些关于光流提取的快速演示,可用 Python 脚本 `tools/flow_extraction.py` 替代 denseflow。 +或者只需要一些关于光流提取的快速演示,可用 Python 脚本 `tools/misc/flow_extraction.py` 替代 denseflow。 这个脚本可用于一个或多个视频提取 RGB 帧和光流。注意,由于该脚本时在 CPU 上运行光流算法,其速度比 denseflow 慢很多。 ```shell -python tools/flow_extraction.py --input ${INPUT} [--prefix ${PREFIX}] [--dest ${DEST}] [--rgb-tmpl ${RGB_TMPL}] \ +python tools/misc/flow_extraction.py --input ${INPUT} [--prefix ${PREFIX}] [--dest ${DEST}] [--rgb-tmpl ${RGB_TMPL}] \ [--flow-tmpl ${FLOW_TMPL}] [--start-idx ${START_IDX}] [--method ${METHOD}] [--bound ${BOUND}] [--save-rgb] ``` diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index b0e7dde2fa..2da9320358 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -85,7 +85,7 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [- - `AVG_TYPE`:用于平均测试片段结果的选项。如果被设置为 `prob`,则会在平均测试片段结果之前施加 softmax 函数。否则,会直接进行平均。 - `JOB_LAUNCHER`:分布式任务初始化启动器选项。可选值有 `none`,`pytorch`,`slurm`,`mpi`。特别地,如果被设置为 `none`, 则会以非分布式模式进行测试。 - `LOCAL_RANK`:本地 rank 的 ID。如果没有被指定,则会被设置为 0。 -- `--onnx`: 如果指定,将通过 onnx 模型推理获取预测结果,输入参数 `CHECKPOINT_FILE` 应为 onnx 模型文件。Onnx 模型文件由 `/tools/pytorch2onnx.py` 脚本导出。目前,不支持多 GPU 测试以及动态张量形状(Dynamic shape)。请注意,数据集输出与模型输入张量的形状应保持一致。同时,不建议使用测试时数据增强,如 `ThreeCrop`,`TenCrop`,`twice_sample` 等。 +- `--onnx`: 如果指定,将通过 onnx 模型推理获取预测结果,输入参数 `CHECKPOINT_FILE` 应为 onnx 模型文件。Onnx 模型文件由 `/tools/deployment/pytorch2onnx.py` 脚本导出。目前,不支持多 GPU 测试以及动态张量形状(Dynamic shape)。请注意,数据集输出与模型输入张量的形状应保持一致。同时,不建议使用测试时数据增强,如 `ThreeCrop`,`TenCrop`,`twice_sample` 等。 - `--tensorrt`: 如果指定,将通过 TensorRT 模型推理获取预测结果,输入参数 `CHECKPOINT_FILE` 应为 TensorRT 模型文件。TensorRT 模型文件由导出的 onnx 模型以及 TensorRT 官方模型转换工具生成。目前,不支持多 GPU 测试以及动态张量形状(Dynamic shape)。请注意,数据集输出与模型输入张量的形状应保持一致。同时,不建议使用测试时数据增强,如 `ThreeCrop`,`TenCrop`,`twice_sample` 等。 例子: diff --git a/docs_zh_CN/tutorials/6_export_model.md b/docs_zh_CN/tutorials/6_export_model.md index 9adb1b1608..8dca014cfc 100644 --- a/docs_zh_CN/tutorials/6_export_model.md +++ b/docs_zh_CN/tutorials/6_export_model.md @@ -28,7 +28,7 @@ ## 如何使用 -对于简单的模型导出,用户可以使用这里的 [脚本](/tools/pytorch2onnx.py)。 +对于简单的模型导出,用户可以使用这里的 [脚本](/tools/deployment/pytorch2onnx.py)。 注意,需要安装 `onnx` 和 `onnxruntime` 包以进行导出后的验证。 ### 准备工作 @@ -42,7 +42,7 @@ pip install onnx onnxruntime MMAction2 提供了一个 python 脚本,用于将 MMAction2 训练的 pytorch 模型导出到 ONNX。 ```shell -python tools/pytorch2onnx.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--shape ${SHAPE}] \ +python tools/deployment/pytorch2onnx.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--shape ${SHAPE}] \ [--verify] [--show] [--output-file ${OUTPUT_FILE}] [--is-localizer] [--opset-version ${VERSION}] ``` @@ -61,7 +61,7 @@ python tools/pytorch2onnx.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--shape ${SHAPE} 对于行为识别器,可运行: ```shell -python tools/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify +python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify ``` ### 时序动作检测器 @@ -69,7 +69,7 @@ python tools/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --veri 对于时序动作检测器,可运行: ```shell -python tools/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify +python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify ``` 如果发现提供的模型权重文件没有被成功导出,或者存在精度损失,可以在本 repo 下提出问题(issue)。 diff --git a/docs_zh_CN/useful_tools.md b/docs_zh_CN/useful_tools.md index 13ed9d51f2..f036be91bf 100644 --- a/docs_zh_CN/useful_tools.md +++ b/docs_zh_CN/useful_tools.md @@ -95,7 +95,7 @@ Params: 28.04 M ### 导出 MMAction2 模型为 ONNX 格式(实验特性) -`/tools/pytorch2onnx.py` 脚本用于将模型转换为 [ONNX](https://github.com/onnx/onnx) 格式。 +`/tools/deployment/pytorch2onnx.py` 脚本用于将模型转换为 [ONNX](https://github.com/onnx/onnx) 格式。 同时,该脚本支持比较 PyTorch 模型和 ONNX 模型的输出结果,验证输出结果是否相同。 本功能依赖于 `onnx` 以及 `onnxruntime`,使用前请先通过 `pip install onnx onnxruntime` 安装依赖包。 请注意,可通过 `--softmax` 选项在行为识别器末尾添加 Softmax 层,从而获取 `[0, 1]` 范围内的预测结果。 @@ -103,31 +103,31 @@ Params: 28.04 M - 对于行为识别模型,请运行: ```shell - python tools/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify + python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify ``` - 对于时序动作检测模型,请运行: ```shell - python tools/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify + python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify ``` ### 发布模型 -`tools/publish_model.py` 脚本用于进行模型发布前的准备工作,主要包括: +`tools/deployment/publish_model.py` 脚本用于进行模型发布前的准备工作,主要包括: (1) 将模型的权重张量转化为 CPU 张量。 (2) 删除优化器状态信息。 (3) 计算模型权重文件的哈希值,并将哈希值添加到文件名后。 ```shell -python tools/publish_model.py ${INPUT_FILENAME} ${OUTPUT_FILENAME} +python tools/deployment/publish_model.py ${INPUT_FILENAME} ${OUTPUT_FILENAME} ``` 例如, ```shell -python tools/publish_model.py work_dirs/tsn_r50_1x1x3_100e_kinetics400_rgb/latest.pth tsn_r50_1x1x3_100e_kinetics400_rgb.pth +python tools/deployment/publish_model.py work_dirs/tsn_r50_1x1x3_100e_kinetics400_rgb/latest.pth tsn_r50_1x1x3_100e_kinetics400_rgb.pth ``` 最终,输出文件名为 `tsn_r50_1x1x3_100e_kinetics400_rgb-{hash id}.pth`。 @@ -154,8 +154,8 @@ python tools/print_config.py ${CONFIG} [-h] [--options ${OPTIONS [OPTIONS...]}] ### 检查视频 -`tools/check_videos.py` 脚本利用指定视频编码器,遍历指定配置文件视频数据集中所有样本,寻找无效视频文件(文件破损或者文件不存在),并将无效文件路径保存到输出文件中。请注意,删除无效视频文件后,需要重新生成视频文件列表。 +`tools/analysis/check_videos.py` 脚本利用指定视频编码器,遍历指定配置文件视频数据集中所有样本,寻找无效视频文件(文件破损或者文件不存在),并将无效文件路径保存到输出文件中。请注意,删除无效视频文件后,需要重新生成视频文件列表。 ```shell -python tools/check_videos.py ${CONFIG} [-h] [--options OPTIONS [OPTIONS ...]] [--cfg-options CFG_OPTIONS [CFG_OPTIONS ...]] [--output-file OUTPUT_FILE] [--split SPLIT] [--decoder DECODER] [--num-processes NUM_PROCESSES] [--remove-corrupted-videos] +python tools/analysis/check_videos.py ${CONFIG} [-h] [--options OPTIONS [OPTIONS ...]] [--cfg-options CFG_OPTIONS [CFG_OPTIONS ...]] [--output-file OUTPUT_FILE] [--split SPLIT] [--decoder DECODER] [--num-processes NUM_PROCESSES] [--remove-corrupted-videos] ``` diff --git a/tests/test_utils/test_onnx.py b/tests/test_utils/test_onnx.py index d76c4db369..472d247fa9 100644 --- a/tests/test_utils/test_onnx.py +++ b/tests/test_utils/test_onnx.py @@ -2,7 +2,7 @@ import tempfile import torch.nn as nn -from tools.pytorch2onnx import _convert_batchnorm, pytorch2onnx +from tools.deployment.pytorch2onnx import _convert_batchnorm, pytorch2onnx class TestModel(nn.Module): diff --git a/tools/__init__.py b/tools/__init__.py index e69de29bb2..dedc85c424 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -0,0 +1,4 @@ +from .analysis import * # noqa: F401, F403 +from .data import * # noqa: F401, F403 +from .deployment import * # noqa: F401, F403 +from .misc import * # noqa: F401, F403 diff --git a/tools/check_videos.py b/tools/analysis/check_videos.py similarity index 100% rename from tools/check_videos.py rename to tools/analysis/check_videos.py diff --git a/tools/publish_model.py b/tools/deployment/publish_model.py similarity index 100% rename from tools/publish_model.py rename to tools/deployment/publish_model.py diff --git a/tools/pytorch2onnx.py b/tools/deployment/pytorch2onnx.py similarity index 100% rename from tools/pytorch2onnx.py rename to tools/deployment/pytorch2onnx.py diff --git a/tools/bsn_proposal_generation.py b/tools/misc/bsn_proposal_generation.py similarity index 100% rename from tools/bsn_proposal_generation.py rename to tools/misc/bsn_proposal_generation.py diff --git a/tools/clip_feature_extraction.py b/tools/misc/clip_feature_extraction.py similarity index 100% rename from tools/clip_feature_extraction.py rename to tools/misc/clip_feature_extraction.py diff --git a/tools/dist_clip_feature_extraction.sh b/tools/misc/dist_clip_feature_extraction.sh similarity index 87% rename from tools/dist_clip_feature_extraction.sh rename to tools/misc/dist_clip_feature_extraction.sh index 06ad2b0ffe..f5c7a1a607 100644 --- a/tools/dist_clip_feature_extraction.sh +++ b/tools/misc/dist_clip_feature_extraction.sh @@ -5,7 +5,7 @@ CHECKPOINT=$2 GPUS=$3 PORT=${PORT:-29500} -PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +PYTHONPATH="$(dirname $0)/../..":$PYTHONPATH \ # Arguments starting from the forth one are captured by ${@:4} python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ $(dirname "$0")/clip_feature_extraction.py $CONFIG $CHECKPOINT \ diff --git a/tools/flow_extraction.py b/tools/misc/flow_extraction.py similarity index 100% rename from tools/flow_extraction.py rename to tools/misc/flow_extraction.py From a5ea62d91ee0f7f20ca8556149465847430d1edc Mon Sep 17 00:00:00 2001 From: Rejnald Lleshi <46654505+rlleshi@users.noreply.github.com> Date: Wed, 23 Jun 2021 15:59:36 +0200 Subject: [PATCH 168/414] Improve check videos (#950) * [Improvement] Add font color to args in long_video_demo * [Improvement] Add font color to args in long_video_demo * [Improvement] check_videos * Improve check videos * polish * polish Co-authored-by: dreamerlin <528557675@qq.com> --- tools/analysis/check_videos.py | 45 ++++++++++++++++------------------ 1 file changed, 21 insertions(+), 24 deletions(-) diff --git a/tools/analysis/check_videos.py b/tools/analysis/check_videos.py index fcd7b8d30f..f207b7a43a 100644 --- a/tools/analysis/check_videos.py +++ b/tools/analysis/check_videos.py @@ -2,7 +2,7 @@ import os import warnings from functools import partial -from multiprocessing import Manager, Pool +from multiprocessing import Manager, Pool, cpu_count import numpy as np from mmcv import Config, DictAction @@ -37,15 +37,17 @@ def parse_args(): parser.add_argument( '--split', default='train', - help='Dataset split, should be one of [train, val, test]') + choices=['train', 'val', 'test'], + help='Dataset split') parser.add_argument( '--decoder', default='decord', + choices=['decord', 'opencv', 'pyav'], help='Video decoder type, should be one of [decord, opencv, pyav]') parser.add_argument( '--num-processes', type=int, - default=5, + default=(cpu_count() - 1 or 1), help='Number of processes to check videos') parser.add_argument( '--remove-corrupted-videos', @@ -104,14 +106,8 @@ def _do_check_videos(lock, dataset, output_file, idx): if __name__ == '__main__': args = parse_args() - assert args.split in ['train', 'val', 'test'] - decoder_to_pipeline_prefix = dict( - decord='Decord', - opencv='OpenCV', - pyav='PyAV', - ) - assert args.decoder in decoder_to_pipeline_prefix + decord='Decord', opencv='OpenCV', pyav='PyAV') # read config file cfg = Config.fromfile(args.config) @@ -140,20 +136,21 @@ def _do_check_videos(lock, dataset, output_file, idx): # start checking for _ in tqdm(pool.imap_unordered(worker_fn, ids), total=len(ids)): pass + pool.close() pool.join() - # print results and release resources - pool.close() - with open(args.output_file, 'r') as f: + if os.path.exists(args.output_file): + num_lines = sum(1 for _ in open(args.output_file)) print(f'Checked {len(dataset)} videos, ' - f'{len(f)} is/are corrupted/missing.') - - if args.remove_corrupted_videos: - print('Start deleting corrupted videos') - cnt = 0 - with open(args.output_file, 'r') as f: - for line in f: - if os.path.exists(line.strip()): - os.remove(line.strip()) - cnt += 1 - print(f'Delete {cnt} corrupted videos.') + f'{num_lines} are corrupted/missing.') + if args.remove_corrupted_videos: + print('Start deleting corrupted videos') + cnt = 0 + with open(args.output_file, 'r') as f: + for line in f: + if os.path.exists(line.strip()): + os.remove(line.strip()) + cnt += 1 + print(f'Deleted {cnt} corrupted videos.') + else: + print(f'Checked {len(dataset)} videos, none are corrupted/missing') From a29e4aa9f9cd5e730b5a99dd5f74748676e32c30 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 24 Jun 2021 10:04:00 +0800 Subject: [PATCH 169/414] [Improvement] Make demo more robust in cross-platforms (#952) * fix demo * update * fix * fix bug * fix bug * update doc --- demo/README.md | 8 ++--- demo/demo.py | 79 ++++++++++++++++++++++++++++++-------------------- setup.cfg | 2 +- 3 files changed, 53 insertions(+), 36 deletions(-) diff --git a/demo/README.md b/demo/README.md index 71fa55e2b0..dca7a20f37 100644 --- a/demo/README.md +++ b/demo/README.md @@ -37,7 +37,7 @@ We provide a demo script to predict the recognition result using a single video. ```shell python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} [--use-frames] \ - [--device ${DEVICE_TYPE}] [--fps {FPS}] [--font-size {FONT_SIZE}] [--font-color {FONT_COLOR}] \ + [--device ${DEVICE_TYPE}] [--fps {FPS}] [--font-scale {FONT_SCALE}] [--font-color {FONT_COLOR}] \ [--target-resolution ${TARGET_RESOLUTION}] [--resize-algorithm {RESIZE_ALGORITHM}] [--out-filename {OUT_FILE}] ``` @@ -46,7 +46,7 @@ Optional arguments: - `--use-frames`: If specified, the demo will take rawframes as input. Otherwise, it will take a video as input. - `DEVICE_TYPE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. If not specified, it will be set to `cuda:0`. - `FPS`: FPS value of the output video when using rawframes as input. If not specified, it wll be set to 30. -- `FONT_SIZE`: Font size of the label added in the video. If not specified, it wll be set to 20. +- `FONT_SCALE`: Font scale of the label added in the video. If not specified, it wll be 0.5. - `FONT_COLOR`: Font color of the label added in the video. If not specified, it will be `white`. - `TARGET_RESOLUTION`: Resolution(desired_width, desired_height) for resizing the frames before output when using a video as input. If not specified, it will be None and the frames are resized by keeping the existing aspect ratio. - `RESIZE_ALGORITHM`: Resize algorithm used for resizing. If not specified, it will be set to `bicubic`. @@ -120,13 +120,13 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, --out-filename demo/demo_out.mp4 ``` -7. Recognize a video file as input by using a TSN model, then generate an mp4 file with a label in a red color and 10px fontsize. +7. Recognize a video file as input by using a TSN model, then generate an mp4 file with a label in a red color and fontscale 1. ```shell # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt --font-size 10 --font-color red \ + demo/demo.mp4 demo/label_map_k400.txt --font-scale 1 --font-color red \ --out-filename demo/demo_out.mp4 ``` diff --git a/demo/demo.py b/demo/demo.py index 32b34ea78c..e470502875 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -2,7 +2,11 @@ import os import os.path as osp +import cv2 +import decord +import numpy as np import torch +import webcolors from mmcv import Config, DictAction from mmaction.apis import inference_recognizer, init_recognizer @@ -36,14 +40,14 @@ def parse_args(): help='specify fps value of the output video when using rawframes to ' 'generate file') parser.add_argument( - '--font-size', - default=20, - type=int, - help='font size of the label test in output video') + '--font-scale', + default=0.5, + type=float, + help='font scale of the label in output video') parser.add_argument( '--font-color', default='white', - help='font color of the label test in output video') + help='font color of the label in output video') parser.add_argument( '--target-resolution', nargs=2, @@ -65,7 +69,7 @@ def get_output(video_path, out_filename, label, fps=30, - font_size=20, + font_scale=0.5, font_color='white', target_resolution=None, resize_algorithm='bicubic', @@ -83,7 +87,7 @@ def get_output(video_path, out_filename (str): Output filename for the generated file. label (str): Predicted label of the generated file. fps (int): Number of picture frames to read per second. Default: 30. - font_size (int): Font size of the label. Default: 20. + font_scale (float): Font scale of the label. Default: 0.5. font_color (str): Font color of the label. Default: 'white'. target_resolution (None | tuple[int | None]): Set to (desired_width desired_height) to have resized frames. If either @@ -99,32 +103,45 @@ def get_output(video_path, raise NotImplementedError try: - from moviepy.editor import (CompositeVideoClip, ImageSequenceClip, - TextClip, VideoFileClip) + from moviepy.editor import ImageSequenceClip except ImportError: raise ImportError('Please install moviepy to enable output file.') + # Channel Order is BGR if use_frames: frame_list = sorted( [osp.join(video_path, x) for x in os.listdir(video_path)]) - video_clips = ImageSequenceClip(frame_list, fps=fps) + frames = [cv2.imread(x) for x in frame_list] else: - # revert the order to suit ``VideoFileClip``. - # (weight, height) -> (height, weight) - target_resolution = (target_resolution[1], target_resolution[0]) - video_clips = VideoFileClip( - video_path, - target_resolution=target_resolution, - resize_algorithm=resize_algorithm) - - duration_video_clip = video_clips.duration - text_clips = TextClip(label, fontsize=font_size, color=font_color) - text_clips = ( - text_clips.set_position( - ('right', 'bottom'), - relative=True).set_duration(duration_video_clip)) - - video_clips = CompositeVideoClip([video_clips, text_clips]) + video = decord.VideoReader(video_path) + frames = [x.asnumpy()[..., ::-1] for x in video] + + if target_resolution: + w, h = target_resolution + frame_h, frame_w, _ = frames[0].shape + if w == -1: + w = int(h / frame_h * frame_w) + if h == -1: + h = int(w / frame_w * frame_h) + frames = [cv2.resize(f, (w, h)) for f in frames] + + textsize = cv2.getTextSize(label, cv2.FONT_HERSHEY_DUPLEX, font_scale, + 1)[0] + textheight = textsize[1] + padding = 10 + location = (padding, padding + textheight) + + if isinstance(font_color, str): + font_color = webcolors.name_to_rgb(font_color)[::-1] + + frames = [np.array(frame) for frame in frames] + for frame in frames: + cv2.putText(frame, label, location, cv2.FONT_HERSHEY_DUPLEX, + font_scale, font_color, 1) + + # RGB order + frames = [x[..., ::-1] for x in frames] + video_clips = ImageSequenceClip(frames, fps=fps) out_type = osp.splitext(out_filename)[1][1:] if out_type == 'gif': @@ -168,19 +185,19 @@ def main(): if args.target_resolution is not None: if args.target_resolution[0] == -1: - args.target_resolution[0] = None + assert isinstance(args.target_resolution[1], int) + assert args.target_resolution[1] > 0 if args.target_resolution[1] == -1: - args.target_resolution[1] = None + assert isinstance(args.target_resolution[0], int) + assert args.target_resolution[0] > 0 args.target_resolution = tuple(args.target_resolution) - else: - args.target_resolution = (None, None) get_output( args.video, args.out_filename, results[0][0], fps=args.fps, - font_size=args.font_size, + font_scale=args.font_scale, font_color=args.font_color, target_resolution=args.target_resolution, resize_algorithm=args.resize_algorithm, diff --git a/setup.cfg b/setup.cfg index 88f1b53656..e6cdc146a2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,6 +19,6 @@ line_length = 79 multi_line_output = 0 known_standard_library = pkg_resources,setuptools known_first_party = mmaction -known_third_party = cv2,joblib,matplotlib,mmcv,numpy,pandas,pytest,scipy,seaborn,titlecase,torch,tqdm +known_third_party = cv2,decord,joblib,matplotlib,mmcv,numpy,pandas,pytest,scipy,seaborn,titlecase,torch,tqdm,webcolors no_lines_before = STDLIB,LOCALFOLDER default_section = THIRDPARTY From 79fba89a5e01580c851ea9fce7e28fa7024b8830 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 24 Jun 2021 13:21:06 +0800 Subject: [PATCH 170/414] [Improvement] Use Pylint to polish code style (#908) * polish * polish * polish tools * polish tests * polish demo * remove topk class --- .pylintrc | 4 +- demo/demo_gradcam.py | 2 +- demo/long_video_demo.py | 2 +- demo/webcam_demo.py | 4 +- demo/webcam_demo_spatiotemporal_det.py | 33 +++-- mmaction/apis/train.py | 4 +- mmaction/core/bbox/transforms.py | 32 ++--- .../evaluation/ava_evaluation/np_box_list.py | 7 +- .../object_detection_evaluation.py | 92 +++++------- .../ava_evaluation/per_image_evaluation.py | 136 +++++++----------- mmaction/core/evaluation/ava_utils.py | 103 +++++++------ mmaction/core/evaluation/eval_hooks.py | 5 +- mmaction/datasets/audio_visual_dataset.py | 2 +- mmaction/datasets/base.py | 2 +- mmaction/datasets/pipelines/augmentations.py | 25 ++-- mmaction/datasets/pipelines/pose_loading.py | 8 +- mmaction/datasets/ssn_dataset.py | 2 +- mmaction/models/backbones/mobilenet_v2.py | 8 +- mmaction/models/backbones/resnet3d.py | 2 +- mmaction/models/backbones/resnet_audio.py | 8 +- mmaction/models/backbones/tanet.py | 36 ++--- mmaction/models/common/lfb.py | 2 +- mmaction/models/heads/bbox_head.py | 11 +- mmaction/models/heads/lfb_infer_head.py | 2 +- mmaction/models/heads/misc_head.py | 2 +- mmaction/utils/decorators.py | 1 - mmaction/utils/precise_bn.py | 5 +- .../test_datasets/test_ava_dataset.py | 3 +- tests/test_data/test_formating.py | 2 +- .../test_pipelines/test_augmentations/base.py | 4 +- .../test_augmentations/test_audio.py | 6 +- .../test_augmentations/test_color.py | 3 +- .../test_augmentations/test_crop.py | 21 ++- .../test_augmentations/test_flip.py | 3 +- .../test_augmentations/test_imgaug.py | 3 +- .../test_augmentations/test_lazy.py | 21 ++- .../test_augmentations/test_misc.py | 6 +- .../test_augmentations/test_normalization.py | 3 +- .../test_augmentations/test_transform.py | 12 +- .../test_loadings/test_decode.py | 3 +- .../test_pipelines/test_loadings/test_load.py | 2 +- .../test_loadings/test_pose_loading.py | 12 +- tests/test_data/test_sampler.py | 4 +- tests/test_models/base.py | 8 +- tests/test_runtime/test_apis_test.py | 2 +- tests/test_runtime/test_eval_hook.py | 6 +- tests/test_runtime/test_precise_bn.py | 5 +- tests/test_runtime/test_train.py | 3 +- tests/test_utils/__init__.py | 0 .../activitynet_feature_postprocessing.py | 2 +- tools/data/activitynet/process_annotations.py | 10 +- .../activitynet/tsn_feature_extraction.py | 2 +- tools/data/build_audio_features.py | 16 ++- tools/data/build_file_list.py | 2 +- tools/data/build_rawframes.py | 18 +-- tools/data/build_videos.py | 2 +- tools/data/hvu/download.py | 4 +- tools/data/kinetics/download.py | 2 +- tools/data/parse_file_list.py | 42 +++--- tools/data/resize_video.py | 2 +- tools/train.py | 2 +- 61 files changed, 390 insertions(+), 386 deletions(-) create mode 100644 tests/test_utils/__init__.py diff --git a/.pylintrc b/.pylintrc index c487f62da7..b1add44f16 100644 --- a/.pylintrc +++ b/.pylintrc @@ -60,7 +60,9 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". -disable=print-statement, +disable=import-outside-toplevel + redefined-outer-name + print-statement, parameter-unpacking, unpacking-in-except, old-raise-syntax, diff --git a/demo/demo_gradcam.py b/demo/demo_gradcam.py index 860cce4970..0b68cb8071 100644 --- a/demo/demo_gradcam.py +++ b/demo/demo_gradcam.py @@ -78,7 +78,7 @@ def build_inputs(model, video_path, use_frames=False): if osp.isfile(video_path) and use_frames: raise RuntimeError( f"'{video_path}' is a video file, not a rawframe directory") - elif osp.isdir(video_path) and not use_frames: + if osp.isdir(video_path) and not use_frames: raise RuntimeError( f"'{video_path}' is a rawframe directory, not a video file") diff --git a/demo/long_video_demo.py b/demo/long_video_demo.py index de253832c8..bc82c2ae9c 100644 --- a/demo/long_video_demo.py +++ b/demo/long_video_demo.py @@ -215,7 +215,7 @@ def inference(model, data, args, frame_queue): if args.stride > 0: pred_stride = int(args.sample_length * args.stride) - for i in range(pred_stride): + for _ in range(pred_stride): frame_queue.popleft() # for case ``args.stride=0`` diff --git a/demo/webcam_demo.py b/demo/webcam_demo.py index 7df09044b8..89a926b972 100644 --- a/demo/webcam_demo.py +++ b/demo/webcam_demo.py @@ -77,7 +77,7 @@ def show_results(): cur_time = time.time() while True: msg = 'Waiting for action ...' - ret, frame = camera.read() + _, frame = camera.read() frame_queue.append(np.array(frame[:, :, ::-1])) if len(result_queue) != 0: @@ -93,7 +93,7 @@ def show_results(): cv2.putText(frame, text, location, FONTFACE, FONTSCALE, FONTCOLOR, THICKNESS, LINETYPE) - elif len(text_info): + elif len(text_info) != 0: for location, text in text_info.items(): cv2.putText(frame, text, location, FONTFACE, FONTSCALE, FONTCOLOR, THICKNESS, LINETYPE) diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py index e060214d62..b5e407cffd 100644 --- a/demo/webcam_demo_spatiotemporal_det.py +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -223,7 +223,6 @@ def _do_detect(self, image): The format of bboxes is (xmin, ymin, xmax, ymax) in pixels. """ - pass def predict(self, task): """Add keyframe bboxes to task.""" @@ -571,20 +570,20 @@ def __next__(self): if self.read_queue.qsize() == 0: time.sleep(0.02) return not self.stopped, None - else: - was_read, task = self.read_queue.get() - if not was_read: - # If we reach the end of the video, there aren't enough frames - # in the task.processed_frames, so no need to model inference - # and draw predictions. Put task into display queue. - with self.read_id_lock: - read_id = self.read_id - with self.display_lock: - self.display_queue[read_id] = was_read, copy.deepcopy(task) - - # main thread doesn't need to handle this task again - task = None - return was_read, task + + was_read, task = self.read_queue.get() + if not was_read: + # If we reach the end of the video, there aren't enough frames + # in the task.processed_frames, so no need to model inference + # and draw predictions. Put task into display queue. + with self.read_id_lock: + read_id = self.read_id + with self.display_lock: + self.display_queue[read_id] = was_read, copy.deepcopy(task) + + # main thread doesn't need to handle this task again + task = None + return was_read, task def start(self): """Start read thread and display thread.""" @@ -685,9 +684,9 @@ def draw_clip_range(self, frames, preds, bboxes, draw_range): @abstractmethod def draw_one_image(self, frame, bboxes, preds): """Draw bboxes and corresponding texts on one frame.""" - pass - def abbrev(self, name): + @staticmethod + def abbrev(name): """Get the abbreviation of label name: 'take (an object) from (a person)' -> 'take ... from ...' diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index a8a9a03363..aa3450c939 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -225,5 +225,5 @@ def train_model(model, eval_res = test_dataset.evaluate(outputs, **eval_cfg) runner.logger.info(f'Testing results of the {name} checkpoint') - for name, val in eval_res.items(): - runner.logger.info(f'{name}: {val:.04f}') + for metric_name, val in eval_res.items(): + runner.logger.info(f'{metric_name}: {val:.04f}') diff --git a/mmaction/core/bbox/transforms.py b/mmaction/core/bbox/transforms.py index d61e8116d7..b051e2275e 100644 --- a/mmaction/core/bbox/transforms.py +++ b/mmaction/core/bbox/transforms.py @@ -15,22 +15,22 @@ def bbox2result(bboxes, labels, num_classes, thr=0.01): """ if bboxes.shape[0] == 0: return list(np.zeros((num_classes - 1, 0, 5), dtype=np.float32)) - else: - bboxes = bboxes.cpu().numpy() - labels = labels.cpu().numpy() - # We only handle multilabel now - assert labels.shape[-1] > 1 + bboxes = bboxes.cpu().numpy() + labels = labels.cpu().numpy() - scores = labels # rename for clarification - thr = (thr, ) * num_classes if isinstance(thr, float) else thr - assert scores.shape[1] == num_classes - assert len(thr) == num_classes + # We only handle multilabel now + assert labels.shape[-1] > 1 - result = [] - for i in range(num_classes - 1): - where = scores[:, i + 1] > thr[i + 1] - result.append( - np.concatenate((bboxes[where, :4], scores[where, i + 1:i + 2]), - axis=1)) - return result + scores = labels # rename for clarification + thr = (thr, ) * num_classes if isinstance(thr, float) else thr + assert scores.shape[1] == num_classes + assert len(thr) == num_classes + + result = [] + for i in range(num_classes - 1): + where = scores[:, i + 1] > thr[i + 1] + result.append( + np.concatenate((bboxes[where, :4], scores[where, i + 1:i + 2]), + axis=1)) + return result diff --git a/mmaction/core/evaluation/ava_evaluation/np_box_list.py b/mmaction/core/evaluation/ava_evaluation/np_box_list.py index f9b101e6f5..ddfdd5184d 100644 --- a/mmaction/core/evaluation/ava_evaluation/np_box_list.py +++ b/mmaction/core/evaluation/ava_evaluation/np_box_list.py @@ -120,8 +120,9 @@ def get_coordinates(self): x_max = box_coordinates[:, 3] return [y_min, x_min, y_max, x_max] - def _is_valid_boxes(self, data): - """Check whether data fullfills the format of N*[ymin, xmin, ymax, + @staticmethod + def _is_valid_boxes(data): + """Check whether data fulfills the format of N*[ymin, xmin, ymax, xmin]. Args: @@ -131,7 +132,7 @@ def _is_valid_boxes(self, data): a boolean indicating whether all ymax of boxes are equal or greater than ymin, and all xmax of boxes are equal or greater than xmin. """ - if len(data): + if len(data) != 0: for v in data: if v[0] > v[2] or v[1] > v[3]: return False diff --git a/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py b/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py index 95f0cc501c..508a076def 100644 --- a/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py +++ b/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py @@ -29,6 +29,7 @@ import collections import logging +import warnings from abc import ABCMeta, abstractmethod from collections import defaultdict @@ -101,15 +102,13 @@ def clear(self): class ObjectDetectionEvaluator(DetectionEvaluator): """A class to evaluate detections.""" - def __init__( - self, - categories, - matching_iou_threshold=0.5, - evaluate_corlocs=False, - metric_prefix=None, - use_weighted_mean_ap=False, - evaluate_masks=False, - ): + def __init__(self, + categories, + matching_iou_threshold=0.5, + evaluate_corlocs=False, + metric_prefix=None, + use_weighted_mean_ap=False, + evaluate_masks=False): """Constructor. Args: @@ -244,7 +243,8 @@ def add_single_detected_image_info(self, image_id, detections_dict): detected_masks=detection_masks, ) - def create_category_index(self, categories): + @staticmethod + def create_category_index(categories): """Creates dictionary of COCO compatible categories keyed by category id. @@ -277,14 +277,8 @@ def evaluate(self): 2. per_category_ap: category specific results with keys of the form 'PerformanceByCategory/mAP@IOU/category' """ - ( - per_class_ap, - mean_ap, - _, - _, - per_class_corloc, - mean_corloc, - ) = self._evaluation.evaluate() + (per_class_ap, mean_ap, _, _, per_class_corloc, + mean_corloc) = self._evaluation.evaluate() metric = f'mAP@{self._matching_iou_threshold}IOU' pascal_metrics = {self._metric_prefix + metric: mean_ap} @@ -355,15 +349,13 @@ def __init__(self, categories, matching_iou_threshold=0.5): class ObjectDetectionEvaluation: """Internal implementation of Pascal object detection metrics.""" - def __init__( - self, - num_groundtruth_classes, - matching_iou_threshold=0.5, - nms_iou_threshold=1.0, - nms_max_output_boxes=10000, - use_weighted_mean_ap=False, - label_id_offset=0, - ): + def __init__(self, + num_groundtruth_classes, + matching_iou_threshold=0.5, + nms_iou_threshold=1.0, + nms_max_output_boxes=10000, + use_weighted_mean_ap=False, + label_id_offset=0): if num_groundtruth_classes < 1: raise ValueError( 'Need at least 1 groundtruth class for evaluation.') @@ -399,13 +391,11 @@ def _initialize_detections(self): def clear_detections(self): self._initialize_detections() - def add_single_ground_truth_image_info( - self, - image_key, - groundtruth_boxes, - groundtruth_class_labels, - groundtruth_masks=None, - ): + def add_single_ground_truth_image_info(self, + image_key, + groundtruth_boxes, + groundtruth_class_labels, + groundtruth_masks=None): """Adds groundtruth for a single image to be used for evaluation. Args: @@ -420,8 +410,8 @@ def add_single_ground_truth_image_info( masks. The mask values range from 0 to 1. """ if image_key in self.groundtruth_boxes: - logging.warn(('image %s has already been added to the ground ' - 'truth database.'), image_key) + warnings.warn(('image %s has already been added to the ground ' + 'truth database.'), image_key) return self.groundtruth_boxes[image_key] = groundtruth_boxes @@ -430,14 +420,12 @@ def add_single_ground_truth_image_info( self._update_ground_truth_statistics(groundtruth_class_labels) - def add_single_detected_image_info( - self, - image_key, - detected_boxes, - detected_scores, - detected_class_labels, - detected_masks=None, - ): + def add_single_detected_image_info(self, + image_key, + detected_boxes, + detected_scores, + detected_class_labels, + detected_masks=None): """Adds detections for a single image to be used for evaluation. Args: @@ -468,8 +456,8 @@ def add_single_detected_image_info( ) if image_key in self.detection_keys: - logging.warn(('image %s has already been added to the ground ' - 'truth database.'), image_key) + warnings.warn(('image %s has already been added to the ground ' + 'truth database.'), image_key) return self.detection_keys.add(image_key) @@ -536,8 +524,7 @@ def evaluate(self): logging.info( 'The following classes have no ground truth examples: %s', np.squeeze(np.argwhere(self.num_gt_instances_per_class == 0)) + - self.label_id_offset, - ) + self.label_id_offset) if self.use_weighted_mean_ap: all_scores = np.array([], dtype=float) @@ -557,10 +544,8 @@ def evaluate(self): all_scores = np.append(all_scores, scores) all_tp_fp_labels = np.append(all_tp_fp_labels, tp_fp_labels) precision, recall = metrics.compute_precision_recall( - scores, - tp_fp_labels, - self.num_gt_instances_per_class[class_index], - ) + scores, tp_fp_labels, + self.num_gt_instances_per_class[class_index]) self.precisions_per_class.append(precision) self.recalls_per_class.append(recall) average_precision = metrics.compute_average_precision( @@ -569,8 +554,7 @@ def evaluate(self): self.corloc_per_class = metrics.compute_cor_loc( self.num_gt_imgs_per_class, - self.num_images_correctly_detected_per_class, - ) + self.num_images_correctly_detected_per_class) if self.use_weighted_mean_ap: num_gt_instances = np.sum(self.num_gt_instances_per_class) diff --git a/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py b/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py index 6265c17d7a..2d06672d89 100644 --- a/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py +++ b/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py @@ -40,16 +40,14 @@ def __init__(self, num_groundtruth_classes, matching_iou_threshold=0.5): self.matching_iou_threshold = matching_iou_threshold self.num_groundtruth_classes = num_groundtruth_classes - def compute_object_detection_metrics( - self, - detected_boxes, - detected_scores, - detected_class_labels, - groundtruth_boxes, - groundtruth_class_labels, - detected_masks=None, - groundtruth_masks=None, - ): + def compute_object_detection_metrics(self, + detected_boxes, + detected_scores, + detected_class_labels, + groundtruth_boxes, + groundtruth_class_labels, + detected_masks=None, + groundtruth_masks=None): """Evaluates detections as being tp, fp or ignored from a single image. The evaluation is done in two stages: @@ -105,16 +103,14 @@ def compute_object_detection_metrics( return scores, tp_fp_labels - def _compute_tp_fp( - self, - detected_boxes, - detected_scores, - detected_class_labels, - groundtruth_boxes, - groundtruth_class_labels, - detected_masks=None, - groundtruth_masks=None, - ): + def _compute_tp_fp(self, + detected_boxes, + detected_scores, + detected_class_labels, + groundtruth_boxes, + groundtruth_class_labels, + detected_masks=None, + groundtruth_masks=None): """Labels true/false positives of detections of an image across all classes. @@ -159,18 +155,12 @@ def _compute_tp_fp( result_scores = [] result_tp_fp_labels = [] for i in range(self.num_groundtruth_classes): - ( - gt_boxes_at_ith_class, - gt_masks_at_ith_class, - detected_boxes_at_ith_class, - detected_scores_at_ith_class, - detected_masks_at_ith_class, - ) = self._get_ith_class_arrays(detected_boxes, detected_scores, - detected_masks, - detected_class_labels, - groundtruth_boxes, - groundtruth_masks, - groundtruth_class_labels, i) + (gt_boxes_at_ith_class, gt_masks_at_ith_class, + detected_boxes_at_ith_class, detected_scores_at_ith_class, + detected_masks_at_ith_class) = self._get_ith_class_arrays( + detected_boxes, detected_scores, detected_masks, + detected_class_labels, groundtruth_boxes, groundtruth_masks, + groundtruth_class_labels, i) scores, tp_fp_labels = self._compute_tp_fp_for_single_class( detected_boxes=detected_boxes_at_ith_class, detected_scores=detected_scores_at_ith_class, @@ -182,8 +172,9 @@ def _compute_tp_fp( result_tp_fp_labels.append(tp_fp_labels) return result_scores, result_tp_fp_labels - def _get_overlaps_and_scores_box_mode(self, detected_boxes, - detected_scores, groundtruth_boxes): + @staticmethod + def _get_overlaps_and_scores_box_mode(detected_boxes, detected_scores, + groundtruth_boxes): """Computes overlaps and scores between detected and groudntruth boxes. Args: @@ -214,14 +205,12 @@ def _get_overlaps_and_scores_box_mode(self, detected_boxes, num_boxes = detected_boxlist.num_boxes() return iou, None, scores, num_boxes - def _compute_tp_fp_for_single_class( - self, - detected_boxes, - detected_scores, - groundtruth_boxes, - detected_masks=None, - groundtruth_masks=None, - ): + def _compute_tp_fp_for_single_class(self, + detected_boxes, + detected_scores, + groundtruth_boxes, + detected_masks=None, + groundtruth_masks=None): """Labels boxes detected with the same class from the same image as tp/fp. @@ -249,15 +238,11 @@ def _compute_tp_fp_for_single_class( if detected_boxes.size == 0: return np.array([], dtype=float), np.array([], dtype=bool) - ( - iou, - _, - scores, - num_detected_boxes, - ) = self._get_overlaps_and_scores_box_mode( - detected_boxes=detected_boxes, - detected_scores=detected_scores, - groundtruth_boxes=groundtruth_boxes) + (iou, _, scores, + num_detected_boxes) = self._get_overlaps_and_scores_box_mode( + detected_boxes=detected_boxes, + detected_scores=detected_scores, + groundtruth_boxes=groundtruth_boxes) if groundtruth_boxes.size == 0: return scores, np.zeros(num_detected_boxes, dtype=bool) @@ -282,17 +267,11 @@ def _compute_tp_fp_for_single_class( return scores, tp_fp_labels - def _get_ith_class_arrays( - self, - detected_boxes, - detected_scores, - detected_masks, - detected_class_labels, - groundtruth_boxes, - groundtruth_masks, - groundtruth_class_labels, - class_index, - ): + @staticmethod + def _get_ith_class_arrays(detected_boxes, detected_scores, detected_masks, + detected_class_labels, groundtruth_boxes, + groundtruth_masks, groundtruth_class_labels, + class_index): """Returns numpy arrays belonging to class with index `class_index`. Args: @@ -332,21 +311,15 @@ class labels. detected_masks_at_ith_class = detected_masks[selected_detections] else: detected_masks_at_ith_class = None - return ( - gt_boxes_at_ith_class, - gt_masks_at_ith_class, - detected_boxes_at_ith_class, - detected_scores_at_ith_class, - detected_masks_at_ith_class, - ) - - def _remove_invalid_boxes( - self, - detected_boxes, - detected_scores, - detected_class_labels, - detected_masks=None, - ): + return (gt_boxes_at_ith_class, gt_masks_at_ith_class, + detected_boxes_at_ith_class, detected_scores_at_ith_class, + detected_masks_at_ith_class) + + @staticmethod + def _remove_invalid_boxes(detected_boxes, + detected_scores, + detected_class_labels, + detected_masks=None): """Removes entries with invalid boxes. A box is invalid if either its xmax is smaller than its xmin, or its @@ -373,16 +346,13 @@ def _remove_invalid_boxes( """ valid_indices = np.logical_and( detected_boxes[:, 0] < detected_boxes[:, 2], - detected_boxes[:, 1] < detected_boxes[:, 3], - ) + detected_boxes[:, 1] < detected_boxes[:, 3]) detected_boxes = detected_boxes[valid_indices] detected_scores = detected_scores[valid_indices] detected_class_labels = detected_class_labels[valid_indices] if detected_masks is not None: detected_masks = detected_masks[valid_indices] return [ - detected_boxes, - detected_scores, - detected_class_labels, - detected_masks, + detected_boxes, detected_scores, detected_class_labels, + detected_masks ] diff --git a/mmaction/core/evaluation/ava_utils.py b/mmaction/core/evaluation/ava_utils.py index 01036b85f9..159297fb7d 100644 --- a/mmaction/core/evaluation/ava_utils.py +++ b/mmaction/core/evaluation/ava_utils.py @@ -35,14 +35,14 @@ def results2csv(dataset, results, out_file, custom_classes=None): csv_results = det2csv(dataset, results, custom_classes) # save space for float - def tostr(item): + def to_str(item): if isinstance(item, float): return f'{item:.3f}' return str(item) with open(out_file, 'w') as f: for csv_result in csv_results: - f.write(','.join(map(lambda x: tostr(x), csv_result))) + f.write(','.join(map(to_str, csv_result))) f.write('\n') @@ -157,7 +157,6 @@ def ava_eval(result_file, label_file, ann_file, exclude_file, - max_dets=(100, ), verbose=True, custom_classes=None): @@ -186,52 +185,52 @@ def ava_eval(result_file, if verbose: print_time('Reading detection results', start) - if result_type == 'mAP': - pascal_evaluator = det_eval.PascalDetectionEvaluator(categories) - - start = time.time() - for image_key in gt_boxes: - if verbose and image_key in excluded_keys: - logging.info( - 'Found excluded timestamp in detections: %s.' - 'It will be ignored.', image_key) - continue - pascal_evaluator.add_single_ground_truth_image_info( - image_key, { - standard_fields.InputDataFields.groundtruth_boxes: - np.array(gt_boxes[image_key], dtype=float), - standard_fields.InputDataFields.groundtruth_classes: - np.array(gt_labels[image_key], dtype=int) - }) - if verbose: - print_time('Convert groundtruth', start) - - start = time.time() - for image_key in boxes: - if verbose and image_key in excluded_keys: - logging.info( - 'Found excluded timestamp in detections: %s.' - 'It will be ignored.', image_key) - continue - pascal_evaluator.add_single_detected_image_info( - image_key, { - standard_fields.DetectionResultFields.detection_boxes: - np.array(boxes[image_key], dtype=float), - standard_fields.DetectionResultFields.detection_classes: - np.array(labels[image_key], dtype=int), - standard_fields.DetectionResultFields.detection_scores: - np.array(scores[image_key], dtype=float) - }) - if verbose: - print_time('convert detections', start) - - start = time.time() - metrics = pascal_evaluator.evaluate() - if verbose: - print_time('run_evaluator', start) - for display_name in metrics: - print(f'{display_name}=\t{metrics[display_name]}') - return { - display_name: metrics[display_name] - for display_name in metrics if 'ByCategory' not in display_name - } + # Evaluation for mAP + pascal_evaluator = det_eval.PascalDetectionEvaluator(categories) + + start = time.time() + for image_key in gt_boxes: + if verbose and image_key in excluded_keys: + logging.info( + 'Found excluded timestamp in detections: %s.' + 'It will be ignored.', image_key) + continue + pascal_evaluator.add_single_ground_truth_image_info( + image_key, { + standard_fields.InputDataFields.groundtruth_boxes: + np.array(gt_boxes[image_key], dtype=float), + standard_fields.InputDataFields.groundtruth_classes: + np.array(gt_labels[image_key], dtype=int) + }) + if verbose: + print_time('Convert groundtruth', start) + + start = time.time() + for image_key in boxes: + if verbose and image_key in excluded_keys: + logging.info( + 'Found excluded timestamp in detections: %s.' + 'It will be ignored.', image_key) + continue + pascal_evaluator.add_single_detected_image_info( + image_key, { + standard_fields.DetectionResultFields.detection_boxes: + np.array(boxes[image_key], dtype=float), + standard_fields.DetectionResultFields.detection_classes: + np.array(labels[image_key], dtype=int), + standard_fields.DetectionResultFields.detection_scores: + np.array(scores[image_key], dtype=float) + }) + if verbose: + print_time('convert detections', start) + + start = time.time() + metrics = pascal_evaluator.evaluate() + if verbose: + print_time('run_evaluator', start) + for display_name in metrics: + print(f'{display_name}=\t{metrics[display_name]}') + return { + display_name: metrics[display_name] + for display_name in metrics if 'ByCategory' not in display_name + } diff --git a/mmaction/core/evaluation/eval_hooks.py b/mmaction/core/evaluation/eval_hooks.py index d96ad87a6b..9ef5a8ad34 100644 --- a/mmaction/core/evaluation/eval_hooks.py +++ b/mmaction/core/evaluation/eval_hooks.py @@ -4,7 +4,6 @@ from math import inf import torch.distributed as dist -from mmcv.runner import Hook from torch.nn.modules.batchnorm import _BatchNorm from torch.utils.data import DataLoader @@ -40,6 +39,8 @@ def __init__(self, *args, save_best='auto', **kwargs): if not from_mmcv: + from mmcv.runner import Hook + class EvalHook(Hook): # noqa: F811 """Non-Distributed evaluation hook. @@ -362,7 +363,7 @@ def _do_evaluate(self, runner): # of rank 0 to other ranks to avoid this. if self.broadcast_bn_buffer: model = runner.model - for name, module in model.named_modules(): + for _, module in model.named_modules(): if isinstance(module, _BatchNorm) and module.track_running_stats: dist.broadcast(module.running_var, 0) diff --git a/mmaction/datasets/audio_visual_dataset.py b/mmaction/datasets/audio_visual_dataset.py index 6e10b4b040..e3d5fabfbf 100644 --- a/mmaction/datasets/audio_visual_dataset.py +++ b/mmaction/datasets/audio_visual_dataset.py @@ -65,7 +65,7 @@ def load_annotations(self): idx += 1 # idx for label[s] label = [int(x) for x in line_split[idx:]] - assert len(label), f'missing label in line: {line}' + assert len(label) != 0, f'missing label in line: {line}' if self.multi_class: assert self.num_classes is not None video_info['label'] = label diff --git a/mmaction/datasets/base.py b/mmaction/datasets/base.py index 62fe34f214..e4f753388c 100644 --- a/mmaction/datasets/base.py +++ b/mmaction/datasets/base.py @@ -90,7 +90,7 @@ def __init__(self, self.video_infos_by_class = self.parse_by_class() class_prob = [] - for k, samples in self.video_infos_by_class.items(): + for _, samples in self.video_infos_by_class.items(): class_prob.append(len(samples) / len(self.video_infos)) class_prob = [x**self.power for x in class_prob] diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 839fb115aa..bdeab950cf 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -274,7 +274,8 @@ def __init__(self, transforms): self.aug = iaa.Sequential( [self.imgaug_builder(t) for t in self.transforms]) - def default_transforms(self): + @staticmethod + def default_transforms(): """Default transforms for imgaug. Implement RandAugment by imgaug. @@ -327,8 +328,8 @@ def default_transforms(self): type='Cutout', nb_iterations=1, size=0.2 * cur_level, - squared=True), - ]), + squared=True) + ]) ] def imgaug_builder(self, cfg): @@ -554,14 +555,17 @@ def __init__(self, size, lazy=False): self.size = size self.lazy = lazy - def _crop_kps(self, kps, crop_bbox): + @staticmethod + def _crop_kps(kps, crop_bbox): return kps - crop_bbox[:2] - def _crop_imgs(self, imgs, crop_bbox): + @staticmethod + def _crop_imgs(imgs, crop_bbox): x1, y1, x2, y2 = crop_bbox return [img[y1:y2, x1:x2] for img in imgs] - def _box_crop(self, box, crop_bbox): + @staticmethod + def _box_crop(box, crop_bbox): """Crop the bounding boxes according to the crop_bbox. Args: @@ -1069,10 +1073,12 @@ def _resize_imgs(self, imgs, new_w, new_h): for img in imgs ] - def _resize_kps(self, kps, scale_factor): + @staticmethod + def _resize_kps(kps, scale_factor): return kps * scale_factor - def _box_resize(self, box, scale_factor): + @staticmethod + def _box_resize(box, scale_factor): """Rescale the bounding boxes according to the scale_factor. Args: @@ -1264,7 +1270,8 @@ def _flip_kps(self, kps, kpscores, img_width): kpscores = kpscores[:, :, new_order] return kps, kpscores - def _box_flip(self, box, img_width): + @staticmethod + def _box_flip(box, img_width): """Flip the bounding boxes given the width of the image. Args: diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 9c19e25427..ae198d42ed 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -146,7 +146,8 @@ class PoseDecode: applicable). """ - def _load_kp(self, kp, frame_inds): + @staticmethod + def _load_kp(kp, frame_inds): """Load keypoints given frame indices. Args: @@ -156,7 +157,8 @@ def _load_kp(self, kp, frame_inds): return [x[frame_inds].astype(np.float32) for x in kp] - def _load_kpscore(self, kpscore, frame_inds): + @staticmethod + def _load_kpscore(kpscore, frame_inds): """Load keypoint scores given frame indices. Args: @@ -189,7 +191,7 @@ def __call__(self, results): return results def __repr__(self): - repr_str = (f'{self.__class__.__name__}()') + repr_str = f'{self.__class__.__name__}()' return repr_str diff --git a/mmaction/datasets/ssn_dataset.py b/mmaction/datasets/ssn_dataset.py index 76d24324df..8a7f1dd0d2 100644 --- a/mmaction/datasets/ssn_dataset.py +++ b/mmaction/datasets/ssn_dataset.py @@ -767,7 +767,7 @@ def prepare_train_frames(self, idx): out_proposal_labels = [] out_proposal_reg_targets = [] - for idx, proposal in enumerate(results['out_proposals']): + for _, proposal in enumerate(results['out_proposals']): # proposal: [(video_id, SSNInstance), proposal_type] num_frames = proposal[0][1].num_video_frames diff --git a/mmaction/models/backbones/mobilenet_v2.py b/mmaction/models/backbones/mobilenet_v2.py index 5a093fa1fa..5dce73502b 100644 --- a/mmaction/models/backbones/mobilenet_v2.py +++ b/mmaction/models/backbones/mobilenet_v2.py @@ -107,8 +107,8 @@ def forward(self, x): def _inner_forward(x): if self.use_res_connect: return x + self.conv(x) - else: - return self.conv(x) + + return self.conv(x) if self.with_cp and x.requires_grad: out = cp.checkpoint(_inner_forward, x) @@ -275,8 +275,8 @@ def forward(self, x): if len(outs) == 1: return outs[0] - else: - return tuple(outs) + + return tuple(outs) def _freeze_stages(self): if self.frozen_stages >= 0: diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index 4d03933e69..79f98e0f7a 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -935,7 +935,7 @@ def __init__(self, self.pretrained2d = pretrained2d self.stage = stage # stage index is 0 based - assert stage >= 0 and stage <= 3 + assert 0 <= stage <= 3 self.base_channels = base_channels self.spatial_stride = spatial_stride diff --git a/mmaction/models/backbones/resnet_audio.py b/mmaction/models/backbones/resnet_audio.py index d4fd9e1ece..63c0ff0d8a 100644 --- a/mmaction/models/backbones/resnet_audio.py +++ b/mmaction/models/backbones/resnet_audio.py @@ -180,7 +180,7 @@ def __init__(self, self.in_channels = in_channels self.base_channels = base_channels self.num_stages = num_stages - assert num_stages >= 1 and num_stages <= 4 + assert 1 <= num_stages <= 4 self.dilations = dilations self.conv1_kernel = conv1_kernel self.conv1_stride = conv1_stride @@ -222,8 +222,8 @@ def __init__(self, self.feat_dim = self.block.expansion * self.base_channels * 2**( len(self.stage_blocks) - 1) - def make_res_layer(self, - block, + @staticmethod + def make_res_layer(block, inplanes, planes, blocks, @@ -241,7 +241,7 @@ def make_res_layer(self, planes (int): Number of channels for the output feature in each block. blocks (int): Number of residual blocks. - strides (Sequence[int]): Strides of residual blocks of each stage. + stride (Sequence[int]): Strides of residual blocks of each stage. Default: (1, 2, 2, 2). dilation (int): Spacing between kernel elements. Default: 1. factorize (int | Sequence[int]): Determine whether to factorize diff --git a/mmaction/models/backbones/tanet.py b/mmaction/models/backbones/tanet.py index 15d3487d1a..bb446ea23d 100644 --- a/mmaction/models/backbones/tanet.py +++ b/mmaction/models/backbones/tanet.py @@ -41,32 +41,32 @@ def __init__(self, block, num_segments, tam_cfg=dict()): 'on Bottleneck block.') def forward(self, x): - if isinstance(self.block, Bottleneck): + assert isinstance(self.block, Bottleneck) - def _inner_forward(x): - """Forward wrapper for utilizing checkpoint.""" - identity = x + def _inner_forward(x): + """Forward wrapper for utilizing checkpoint.""" + identity = x - out = self.block.conv1(x) - out = self.tam(out) - out = self.block.conv2(out) - out = self.block.conv3(out) + out = self.block.conv1(x) + out = self.tam(out) + out = self.block.conv2(out) + out = self.block.conv3(out) - if self.block.downsample is not None: - identity = self.block.downsample(x) + if self.block.downsample is not None: + identity = self.block.downsample(x) - out = out + identity + out = out + identity - return out + return out - if self.block.with_cp and x.requires_grad: - out = cp.checkpoint(_inner_forward, x) - else: - out = _inner_forward(x) + if self.block.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) - out = self.block.relu(out) + out = self.block.relu(out) - return out + return out @BACKBONES.register_module() diff --git a/mmaction/models/common/lfb.py b/mmaction/models/common/lfb.py index e942dd165e..f54ae36e31 100644 --- a/mmaction/models/common/lfb.py +++ b/mmaction/models/common/lfb.py @@ -14,7 +14,7 @@ lmdb_imported = False -class LFB(object): +class LFB: """Long-Term Feature Bank (LFB). LFB is proposed in `Long-Term Feature Banks for Detailed Video diff --git a/mmaction/models/heads/bbox_head.py b/mmaction/models/heads/bbox_head.py index cd2cc52622..3f3bfeead0 100644 --- a/mmaction/models/heads/bbox_head.py +++ b/mmaction/models/heads/bbox_head.py @@ -122,8 +122,8 @@ def forward(self, x): # We do not predict bbox, so return None return cls_score, None - def get_targets(self, sampling_results, gt_bboxes, gt_labels, - rcnn_train_cfg): + @staticmethod + def get_targets(sampling_results, gt_bboxes, gt_labels, rcnn_train_cfg): pos_proposals = [res.pos_bboxes for res in sampling_results] neg_proposals = [res.neg_bboxes for res in sampling_results] pos_gt_labels = [res.pos_gt_labels for res in sampling_results] @@ -131,7 +131,8 @@ def get_targets(self, sampling_results, gt_bboxes, gt_labels, pos_gt_labels, rcnn_train_cfg) return cls_reg_targets - def recall_prec(self, pred_vec, target_vec): + @staticmethod + def recall_prec(pred_vec, target_vec): """ Args: pred_vec (tensor[N x C]): each element is either 0 or 1 @@ -144,7 +145,7 @@ def recall_prec(self, pred_vec, target_vec): prec = correct.sum(1) / (pred_vec.sum(1) + 1e-6) return recall.mean(), prec.mean() - def multilabel_accuracy(self, pred, target, thr=0.5): + def multi_label_accuracy(self, pred, target, thr=0.5): pred = pred.sigmoid() pred_vec = pred > thr # Target is 0 or 1, so using 0.5 as the borderline is OK @@ -189,7 +190,7 @@ def loss(self, F_loss = self.focal_alpha * (1 - pt)**self.focal_gamma * loss losses['loss_action_cls'] = torch.mean(F_loss) - recall_thr, prec_thr, recall_k, prec_k = self.multilabel_accuracy( + recall_thr, prec_thr, recall_k, prec_k = self.multi_label_accuracy( cls_score, labels, thr=0.5) losses['recall@thr=0.5'] = recall_thr losses['prec@thr=0.5'] = prec_thr diff --git a/mmaction/models/heads/lfb_infer_head.py b/mmaction/models/heads/lfb_infer_head.py index 1111b180c5..69bdf8ae2a 100644 --- a/mmaction/models/heads/lfb_infer_head.py +++ b/mmaction/models/heads/lfb_infer_head.py @@ -37,7 +37,7 @@ def __init__(self, temporal_pool_type='avg', spatial_pool_type='max'): super().__init__() - rank, world_size = get_dist_info() + rank, _ = get_dist_info() if rank == 0: if not osp.exists(lfb_prefix_path): print(f'lfb prefix path {lfb_prefix_path} does not exist. ' diff --git a/mmaction/models/heads/misc_head.py b/mmaction/models/heads/misc_head.py index 72cdaab547..66e1b2c3b7 100644 --- a/mmaction/models/heads/misc_head.py +++ b/mmaction/models/heads/misc_head.py @@ -75,7 +75,7 @@ def __init__(self, act_cfg=act_cfg) convs = [] - for i in range(num_convs - 1): + for _ in range(num_convs - 1): conv = ConvModule( out_channels, out_channels, diff --git a/mmaction/utils/decorators.py b/mmaction/utils/decorators.py index 798bd2f4ff..727fa61df3 100644 --- a/mmaction/utils/decorators.py +++ b/mmaction/utils/decorators.py @@ -10,7 +10,6 @@ def decorate(func): def new_func(*args, **kwargs): raise ImportError( f'Please install {module_name} to use {func.__name__}.') - return func(*args, **kwargs) return new_func diff --git a/mmaction/utils/precise_bn.py b/mmaction/utils/precise_bn.py index c01bd4d109..2751b2e736 100644 --- a/mmaction/utils/precise_bn.py +++ b/mmaction/utils/precise_bn.py @@ -30,10 +30,7 @@ def is_parallel_module(module): """ parallels = (DataParallel, DistributedDataParallel, MMDistributedDataParallel) - if isinstance(module, parallels): - return True - else: - return False + return bool(isinstance(module, parallels)) @torch.no_grad() diff --git a/tests/test_data/test_datasets/test_ava_dataset.py b/tests/test_data/test_datasets/test_ava_dataset.py index b2742e501e..270b40fa42 100644 --- a/tests/test_data/test_datasets/test_ava_dataset.py +++ b/tests/test_data/test_datasets/test_ava_dataset.py @@ -155,7 +155,8 @@ def test_ava_pipeline(self): assert result['timestamp_start'] == 900 assert result['timestamp_end'] == 1800 - def test_ava_evaluate(self): + @staticmethod + def test_ava_evaluate(): data_prefix = osp.normpath( osp.join(osp.dirname(__file__), '../../data', 'eval_detection')) ann_file = osp.join(data_prefix, 'gt.csv') diff --git a/tests/test_data/test_formating.py b/tests/test_data/test_formating.py index b7f21f7638..c840a6c20a 100644 --- a/tests/test_data/test_formating.py +++ b/tests/test_data/test_formating.py @@ -140,7 +140,7 @@ def test_collect(): results = collect(inputs) assert sorted(list(results.keys())) == sorted( ['imgs', 'label', 'img_metas']) - for k in results.keys(): + for k in results: assert isinstance(results[k], list) diff --git a/tests/test_data/test_pipelines/test_augmentations/base.py b/tests/test_data/test_pipelines/test_augmentations/base.py index f007c01d9d..24c8a9f62f 100644 --- a/tests/test_data/test_pipelines/test_augmentations/base.py +++ b/tests/test_data/test_pipelines/test_augmentations/base.py @@ -19,7 +19,7 @@ def check_single_crop(origin_imgs, result_imgs, result_bbox): if result_bbox.ndim == 1: return check_single_crop(origin_imgs, result_imgs, result_bbox) - elif result_bbox.ndim == 2: + if result_bbox.ndim == 2: num_batch = len(origin_imgs) for i, bbox in enumerate(result_bbox): if num_crops == 10: @@ -44,7 +44,7 @@ def check_single_crop(origin_imgs, result_imgs, result_bbox): def check_flip(origin_imgs, result_imgs, flip_type): """Check if the origin_imgs are flipped correctly into result_imgs in different flip_types.""" - n, h, w, c = np.shape(origin_imgs) + n, _, _, _ = np.shape(origin_imgs) if flip_type == 'horizontal': for i in range(n): if np.any(result_imgs[i] != np.fliplr(origin_imgs[i])): diff --git a/tests/test_data/test_pipelines/test_augmentations/test_audio.py b/tests/test_data/test_pipelines/test_augmentations/test_audio.py index 0a1e6e13a7..e78b328ffa 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_audio.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_audio.py @@ -7,7 +7,8 @@ class TestAudio: - def test_audio_amplify(self): + @staticmethod + def test_audio_amplify(): target_keys = ['audios', 'amplify_ratio'] with pytest.raises(TypeError): # ratio should be float @@ -21,7 +22,8 @@ def test_audio_amplify(self): assert repr(amplifier) == (f'{amplifier.__class__.__name__}' f'(ratio={amplifier.ratio})') - def test_melspectrogram(self): + @staticmethod + def test_melspectrogram(): target_keys = ['audios'] with pytest.raises(TypeError): # ratio should be float diff --git a/tests/test_data/test_pipelines/test_augmentations/test_color.py b/tests/test_data/test_pipelines/test_augmentations/test_color.py index f8d267ba01..72b3ccd4e6 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_color.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_color.py @@ -7,7 +7,8 @@ class TestColor: - def test_color_jitter(self): + @staticmethod + def test_color_jitter(): imgs = list( np.random.randint(0, 255, size=(3, 112, 112, 3), dtype=np.uint8)) results = dict(imgs=imgs) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_crop.py b/tests/test_data/test_pipelines/test_augmentations/test_crop.py index 872e4bb5b5..4c7c6c9be8 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_crop.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_crop.py @@ -10,7 +10,8 @@ class TestCrops: - def test_random_crop(self): + @staticmethod + def test_random_crop(): with pytest.raises(TypeError): # size must be an int RandomCrop(size=(112, 112)) @@ -63,7 +64,8 @@ def test_random_crop(self): assert repr(random_crop) == (f'{random_crop.__class__.__name__}' f'(size={224}, lazy={False})') - def test_random_resized_crop(self): + @staticmethod + def test_random_resized_crop(): with pytest.raises(TypeError): # area_range must be a tuple of float RandomResizedCrop(area_range=0.5) @@ -116,7 +118,8 @@ def test_random_resized_crop(self): h, w = random_crop_result['img_shape'] assert h == w == 256 - def test_multi_scale_crop(self): + @staticmethod + def test_multi_scale_crop(): with pytest.raises(TypeError): # input_size must be int or tuple of int MultiScaleCrop(0.5) @@ -201,7 +204,8 @@ def test_multi_scale_crop(self): f'max_wh_scale_gap={0}, random_crop={True}, ' f'num_fixed_crops=5, lazy={False})') - def test_center_crop(self): + @staticmethod + def test_center_crop(): with pytest.raises(TypeError): # crop_size must be int or tuple of int CenterCrop(0.5) @@ -237,7 +241,8 @@ def test_center_crop(self): assert repr(center_crop) == (f'{center_crop.__class__.__name__}' f'(crop_size={(224, 224)}, lazy={False})') - def test_three_crop(self): + @staticmethod + def test_three_crop(): with pytest.raises(TypeError): # crop_size must be int or tuple of int ThreeCrop(0.5) @@ -275,7 +280,8 @@ def test_three_crop(self): assert repr(three_crop) == (f'{three_crop.__class__.__name__}' f'(crop_size={(224, 224)})') - def test_ten_crop(self): + @staticmethod + def test_ten_crop(): with pytest.raises(TypeError): # crop_size must be int or tuple of int TenCrop(0.5) @@ -302,7 +308,8 @@ def test_ten_crop(self): assert repr(ten_crop) == (f'{ten_crop.__class__.__name__}' f'(crop_size={(224, 224)})') - def test_multi_group_crop(self): + @staticmethod + def test_multi_group_crop(): with pytest.raises(TypeError): # crop_size must be int or tuple of int MultiGroupCrop(0.5, 1) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_flip.py b/tests/test_data/test_pipelines/test_augmentations/test_flip.py index ee5d9f5ed0..b11409c1d8 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_flip.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_flip.py @@ -12,7 +12,8 @@ class TestFlip: - def test_flip(self): + @staticmethod + def test_flip(): with pytest.raises(ValueError): # direction must be in ['horizontal', 'vertical'] Flip(direction='vertically') diff --git a/tests/test_data/test_pipelines/test_augmentations/test_imgaug.py b/tests/test_data/test_pipelines/test_augmentations/test_imgaug.py index 58a1cb82de..9ef533f1b7 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_imgaug.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_imgaug.py @@ -9,7 +9,8 @@ class TestAugumentations: - def test_imgaug(self): + @staticmethod + def test_imgaug(): with pytest.raises(ValueError): # transforms only support one string, 'default' diff --git a/tests/test_data/test_pipelines/test_augmentations/test_lazy.py b/tests/test_data/test_pipelines/test_augmentations/test_lazy.py index b021e3e0bf..8031501b3b 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_lazy.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_lazy.py @@ -10,7 +10,8 @@ class TestLazy: - def test_init_lazy(self): + @staticmethod + def test_init_lazy(): from mmaction.datasets.pipelines.augmentations import \ _init_lazy_if_proper # noqa: E501 with pytest.raises(AssertionError): @@ -41,7 +42,8 @@ def test_init_lazy(self): assert assert_dict_has_keys(result, ['img_shape']) assert 'lazy' not in result - def test_random_crop_lazy(self): + @staticmethod + def test_random_crop_lazy(): with pytest.raises(TypeError): # size must be an int RandomCrop(size=(112, 112), lazy=True) @@ -99,7 +101,8 @@ def test_random_crop_lazy(self): assert repr(random_crop) == (f'{random_crop.__class__.__name__}' f'(size={224}, lazy={True})') - def test_random_resized_crop_lazy(self): + @staticmethod + def test_random_resized_crop_lazy(): target_keys = ['imgs', 'crop_bbox', 'img_shape', 'lazy'] # There will be a slight difference because of rounding @@ -147,7 +150,8 @@ def test_random_resized_crop_lazy(self): h, w = random_crop_result['img_shape'] assert h == w == 256 - def test_multi_scale_crop_lazy(self): + @staticmethod + def test_multi_scale_crop_lazy(): with pytest.raises(TypeError): # input_size must be int or tuple of int MultiScaleCrop(0.5, lazy=True) @@ -237,7 +241,8 @@ def test_multi_scale_crop_lazy(self): f'max_wh_scale_gap={0}, random_crop={True}, ' f'num_fixed_crops={5}, lazy={True})') - def test_resize_lazy(self): + @staticmethod + def test_resize_lazy(): with pytest.raises(ValueError): # scale must be positive Resize(-0.5, lazy=True) @@ -290,7 +295,8 @@ def test_resize_lazy(self): f'(scale={(341, 256)}, keep_ratio={False}, ' + f'interpolation=bilinear, lazy={True})') - def test_flip_lazy(self): + @staticmethod + def test_flip_lazy(): with pytest.raises(ValueError): Flip(direction='vertically', lazy=True) @@ -341,7 +347,8 @@ def test_flip_lazy(self): f'(flip_ratio={1}, direction=vertical, ' f'flip_label_map={None}, lazy={True})') - def test_center_crop_lazy(self): + @staticmethod + def test_center_crop_lazy(): with pytest.raises(TypeError): # crop_size must be int or tuple of int CenterCrop(0.5) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_misc.py b/tests/test_data/test_pipelines/test_augmentations/test_misc.py index 4c1db1c087..9710624de9 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_misc.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_misc.py @@ -4,13 +4,15 @@ class TestQuadrupleOps: - def test_combine_quadruple(self): + @staticmethod + def test_combine_quadruple(): a = (0.1, 0.1, 0.5, 0.5) b = (0.3, 0.3, 0.7, 0.7) res = _combine_quadruple(a, b) assert res == (0.25, 0.25, 0.35, 0.35) - def test_flip_quadruple(self): + @staticmethod + def test_flip_quadruple(): a = (0.1, 0.1, 0.5, 0.5) res = _flip_quadruple(a) assert res == (0.4, 0.1, 0.5, 0.5) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_normalization.py b/tests/test_data/test_pipelines/test_augmentations/test_normalization.py index 1548144aa8..f28f7607ce 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_normalization.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_normalization.py @@ -8,7 +8,8 @@ class TestNormalization: - def test_normalize(self): + @staticmethod + def test_normalize(): with pytest.raises(TypeError): # mean must be list, tuple or np.ndarray Normalize( diff --git a/tests/test_data/test_pipelines/test_augmentations/test_transform.py b/tests/test_data/test_pipelines/test_augmentations/test_transform.py index 78fc9c8ec6..adb0d94618 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_transform.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_transform.py @@ -11,7 +11,8 @@ class TestTransform: - def test_random_rescale(self): + @staticmethod + def test_random_rescale(): with pytest.raises(AssertionError): # scale_range must be a tuple of int RandomRescale(scale_range=224) @@ -48,7 +49,8 @@ def test_random_rescale(self): f'(scale_range={(300, 400)}, ' 'interpolation=bilinear)') - def test_resize(self): + @staticmethod + def test_resize(): with pytest.raises(ValueError): # scale must be positive Resize(-0.5) @@ -111,7 +113,8 @@ def test_resize(self): f'(scale={(341, 256)}, keep_ratio={False}, ' + f'interpolation=bilinear, lazy={False})') - def test_random_scale(self): + @staticmethod + def test_random_scale(): scales = ((200, 64), (250, 80)) with pytest.raises(ValueError): RandomScale(scales, 'unsupport') @@ -147,7 +150,8 @@ def test_random_scale(self): class TestPoseCompact: - def test_pose_compact(self): + @staticmethod + def test_pose_compact(): results = {} results['img_shape'] = (100, 100) fake_kp = np.zeros([1, 4, 2, 2]) diff --git a/tests/test_data/test_pipelines/test_loadings/test_decode.py b/tests/test_data/test_pipelines/test_loadings/test_decode.py index d0a6451614..ee7291c001 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_decode.py +++ b/tests/test_data/test_pipelines/test_loadings/test_decode.py @@ -259,7 +259,8 @@ def test_opencv_decode(self): assert np.shape(opencv_decode_result['imgs']) == (len( video_result['frame_inds']), 256, 340, 3) - def test_rawframe_selector(self): + @staticmethod + def test_rawframe_selector(): with pytest.warns(UserWarning): FrameSelector(io_backend='disk') diff --git a/tests/test_data/test_pipelines/test_loadings/test_load.py b/tests/test_data/test_pipelines/test_loadings/test_load.py index 097b146899..5d30242713 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_load.py +++ b/tests/test_data/test_pipelines/test_loadings/test_load.py @@ -102,7 +102,7 @@ def test_load_proposals(self): self.bsp_feature_dir) load_proposals_result = load_proposals(action_result) assert assert_dict_has_keys(load_proposals_result, target_keys) - assert (load_proposals_result['bsp_feature'].shape[0] == 5) + assert load_proposals_result['bsp_feature'].shape[0] == 5 assert load_proposals_result['tmin'].shape == (5, ) assert_array_almost_equal( load_proposals_result['tmin'], np.arange(0.1, 0.6, 0.1), decimal=4) diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 1b1555335e..4369d7d20e 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -12,7 +12,8 @@ class TestPoseLoading: - def test_uniform_sample_frames(self): + @staticmethod + def test_uniform_sample_frames(): results = dict(total_frames=64, start_index=0) sampling = UniformSampleFrames( clip_len=8, num_clips=1, test_mode=True, seed=0) @@ -96,7 +97,8 @@ def test_uniform_sample_frames(self): assert sampling_results['num_clips'] == 1 assert len(sampling_results['frame_inds']) == 8 - def test_pose_decode(self): + @staticmethod + def test_pose_decode(): kp = np.random.random([1, 16, 17, 2]) kpscore = np.random.random([1, 16, 17]) frame_inds = np.array([2, 4, 6, 8, 10]) @@ -116,7 +118,8 @@ def test_pose_decode(self): assert_array_almost_equal(decode_results['keypoint'], kp) assert_array_almost_equal(decode_results['keypoint_score'], kpscore) - def test_load_kinetics_pose(self): + @staticmethod + def test_load_kinetics_pose(): def get_mode(arr): cnt = defaultdict(lambda: 0) @@ -204,7 +207,8 @@ def get_mode(arr): assert np.max(return_results['keypoint']) <= 1 assert num_frame == len(set(frame_inds[anno_inds])) - def test_generate_pose_target(self): + @staticmethod + def test_generate_pose_target(): img_shape = (64, 64) kp = np.array([[[[24, 24], [40, 40], [24, 40]]]]) kpscore = np.array([[[1., 1., 1.]]]) diff --git a/tests/test_data/test_sampler.py b/tests/test_data/test_sampler.py index 2e7f2b1524..8cba48455d 100644 --- a/tests/test_data/test_sampler.py +++ b/tests/test_data/test_sampler.py @@ -25,7 +25,7 @@ def test_distributed_sampler(): sampler = DistributedSampler(dataset, num_replicas=1, rank=0) data_loader = DataLoader(dataset, batch_size=4, sampler=sampler) batches = [] - for i, data in enumerate(data_loader): + for _, data in enumerate(data_loader): batches.append(data) assert len(batches) == 25 @@ -58,7 +58,7 @@ def test_class_specific_distributed_sampler(): dataset, num_replicas=1, rank=0, dynamic_length=True) data_loader = DataLoader(dataset, batch_size=4, sampler=sampler) batches = [] - for i, data in enumerate(data_loader): + for _, data in enumerate(data_loader): batches.append(data) assert len(batches) == 50 diff --git a/tests/test_models/base.py b/tests/test_models/base.py index 33b4339ba4..1e6b475f43 100644 --- a/tests/test_models/base.py +++ b/tests/test_models/base.py @@ -39,9 +39,9 @@ def generate_recognizer_demo_inputs( Default:'2D' """ if len(input_shape) == 5: - (N, L, C, H, W) = input_shape + (N, L, _, _, _) = input_shape elif len(input_shape) == 6: - (N, M, C, L, H, W) = input_shape + (N, M, _, L, _, _) = input_shape imgs = np.random.random(input_shape) @@ -98,8 +98,8 @@ def random_label(n): gt_bboxes=gt_bboxes, gt_labels=gt_labels, img_metas=img_metas) - else: - return dict(img=[img], proposals=[proposals], img_metas=[img_metas]) + + return dict(img=[img], proposals=[proposals], img_metas=[img_metas]) def generate_gradcam_inputs(input_shape=(1, 3, 3, 224, 224), model_type='2D'): diff --git a/tests/test_runtime/test_apis_test.py b/tests/test_runtime/test_apis_test.py index d3e5dcc947..eb1ea1ecec 100644 --- a/tests/test_runtime/test_apis_test.py +++ b/tests/test_runtime/test_apis_test.py @@ -29,7 +29,7 @@ def __init__(self): self.conv = nn.Conv2d(3, 3, 1) self.cnt = 0 - def forward(self, return_loss, **kwargs): + def forward(self, *args, **kwargs): result = [self.cnt] self.cnt += 1 return result diff --git a/tests/test_runtime/test_eval_hook.py b/tests/test_runtime/test_eval_hook.py index 4af9b3c498..c95c88881d 100644 --- a/tests/test_runtime/test_eval_hook.py +++ b/tests/test_runtime/test_eval_hook.py @@ -58,10 +58,12 @@ def __init__(self): super().__init__() self.linear = nn.Linear(2, 1) - def forward(self, x, **kwargs): + @staticmethod + def forward(x, **kwargs): return x - def train_step(self, data_batch, optimizer, **kwargs): + @staticmethod + def train_step(data_batch, optimizer, **kwargs): if not isinstance(data_batch, dict): data_batch = dict(x=data_batch) return data_batch diff --git a/tests/test_runtime/test_precise_bn.py b/tests/test_runtime/test_precise_bn.py index 3dcfdea592..aaa29b6681 100644 --- a/tests/test_runtime/test_precise_bn.py +++ b/tests/test_runtime/test_precise_bn.py @@ -52,7 +52,8 @@ def __init__(self): def forward(self, imgs, return_loss=False): return self.bn(self.conv(imgs)) - def train_step(self, data_batch, optimizer, **kwargs): + @staticmethod + def train_step(data_batch, optimizer, **kwargs): outputs = { 'loss': 0.5, 'log_vars': { @@ -171,7 +172,7 @@ def test_precise_bn(): runner.register_hook(precise_bn_hook) runner.run([loader], [('train', 1)], 1) imgs_list = list() - for i, data in enumerate(loader): + for _, data in enumerate(loader): imgs_list.append(np.array(data['imgs'])) mean = np.mean([np.mean(batch) for batch in imgs_list]) # bassel correction used in Pytorch, therefore ddof=1 diff --git a/tests/test_runtime/test_train.py b/tests/test_runtime/test_train.py index 28d16cc1ed..509fe33eac 100644 --- a/tests/test_runtime/test_train.py +++ b/tests/test_runtime/test_train.py @@ -18,7 +18,8 @@ class ExampleDataset(Dataset): def __init__(self, test_mode=False): self.test_mode = test_mode - def evaluate(self, results, logger=None): + @staticmethod + def evaluate(results, logger=None): eval_results = OrderedDict() eval_results['acc'] = 1 return eval_results diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tools/data/activitynet/activitynet_feature_postprocessing.py b/tools/data/activitynet/activitynet_feature_postprocessing.py index 6b11e2b6d3..a822b3d1bc 100644 --- a/tools/data/activitynet/activitynet_feature_postprocessing.py +++ b/tools/data/activitynet/activitynet_feature_postprocessing.py @@ -44,7 +44,7 @@ def pool_feature(data, num_proposals=100, num_sample_bins=3, pool_type='mean'): anchor_size = (end - start) / num_proposals ptr = start feature = [] - for i in range(num_proposals): + for _ in range(num_proposals): x_new = [ ptr + i / num_sample_bins * anchor_size for i in range(num_sample_bins) diff --git a/tools/data/activitynet/process_annotations.py b/tools/data/activitynet/process_annotations.py index 6309ed1fda..0bd87ebe9b 100644 --- a/tools/data/activitynet/process_annotations.py +++ b/tools/data/activitynet/process_annotations.py @@ -24,12 +24,12 @@ def load_json(file): video_dict_test = {} video_dict_full = {} -for i in range(len(video_record)): - video_name = video_record[i][0] +for _, video_item in enumerate(video_record): + video_name = video_item[0] video_info = anno_database[video_name] - video_subset = video_record[i][5] - video_info['fps'] = video_record[i][3].astype(np.float) - video_info['rfps'] = video_record[i][4].astype(np.float) + video_subset = video_item[5] + video_info['fps'] = video_item[3].astype(np.float) + video_info['rfps'] = video_item[4].astype(np.float) video_dict_full[video_name] = video_info if video_subset == 'training': video_dict_train[video_name] = video_info diff --git a/tools/data/activitynet/tsn_feature_extraction.py b/tools/data/activitynet/tsn_feature_extraction.py index 858992877f..5e213cdc15 100644 --- a/tools/data/activitynet/tsn_feature_extraction.py +++ b/tools/data/activitynet/tsn_feature_extraction.py @@ -102,7 +102,7 @@ def main(): os.system(f'mkdir -p {args.output_prefix}') for item in data: - frame_dir, length, label = item.split() + frame_dir, length, _ = item.split() output_file = osp.basename(frame_dir) + '.pkl' frame_dir = osp.join(args.data_prefix, frame_dir) output_file = osp.join(args.output_prefix, output_file) diff --git a/tools/data/build_audio_features.py b/tools/data/build_audio_features.py index f388cc0036..3fcc410eed 100644 --- a/tools/data/build_audio_features.py +++ b/tools/data/build_audio_features.py @@ -74,7 +74,8 @@ def load_wav(self, path): """Load an audio file into numpy array.""" return librosa.core.load(path, sr=self.sample_rate)[0] - def audio_normalize(self, samples, desired_rms=0.1, eps=1e-4): + @staticmethod + def audio_normalize(samples, desired_rms=0.1, eps=1e-4): """RMS normalize the audio data.""" rms = np.maximum(eps, np.sqrt(np.mean(samples**2))) samples = samples * (desired_rms / rms) @@ -104,8 +105,8 @@ def generate_spectrogram_magphase(self, audio, with_phase=False): if with_phase: spectro_phase = np.expand_dims(np.angle(spectro_phase), axis=0) return spectro_mag, spectro_phase - else: - return spectro_mag + + return spectro_mag def save_wav(self, wav, path): """Save the wav to disk.""" @@ -146,7 +147,8 @@ def adjust_time_resolution(self, quantized, mel): return quantized[start:end], mel[start:end, :] - def start_and_end_indices(self, quantized, silence_threshold=2): + @staticmethod + def start_and_end_indices(quantized, silence_threshold=2): """Trim the audio file when reaches the silence threshold.""" for start in range(quantized.size): if abs(quantized[start] - 127) > silence_threshold: @@ -183,7 +185,8 @@ def _lws_processor(self): """ return lws.lws(self.fft_size, self.get_hop_size(), mode='speech') - def lws_num_frames(self, length, fsize, fshift): + @staticmethod + def lws_num_frames(length, fsize, fshift): """Compute number of time frames of lws spectrogram. Please refer to `_. @@ -232,7 +235,8 @@ def _amp_to_db(self, x): min_level = np.exp(self.min_level_db / 20 * np.log(10)) return 20 * np.log10(np.maximum(min_level, x)) - def _db_to_amp(self, x): + @staticmethod + def _db_to_amp(x): return np.power(10.0, x * 0.05) def _normalize(self, S): diff --git a/tools/data/build_file_list.py b/tools/data/build_file_list.py index cf72692477..340d61d3ff 100644 --- a/tools/data/build_file_list.py +++ b/tools/data/build_file_list.py @@ -114,7 +114,7 @@ def build_list(split): for item in split: if item[0] not in frame_info: continue - elif frame_info[item[0]][1] > 0: + if frame_info[item[0]][1] > 0: # rawframes rgb_cnt = frame_info[item[0]][1] flow_cnt = frame_info[item[0]][2] diff --git a/tools/data/build_rawframes.py b/tools/data/build_rawframes.py index bfb22f8977..6f39fc1520 100644 --- a/tools/data/build_rawframes.py +++ b/tools/data/build_rawframes.py @@ -35,16 +35,18 @@ def extract_frame(vid_item): out_full_path = osp.join(out_full_path, video_name) vr = mmcv.VideoReader(full_path) - for i in range(len(vr)): - if vr[i] is not None: - w, h, c = np.shape(vr[i]) + # for i in range(len(vr)): + for i, vr_frame in enumerate(vr): + if vr_frame is not None: + w, h, _ = np.shape(vr_frame) if args.new_short == 0: if args.new_width == 0 or args.new_height == 0: # Keep original shape - out_img = vr[i] + out_img = vr_frame else: - out_img = mmcv.imresize(vr[i], (args.new_width, - args.new_height)) + out_img = mmcv.imresize(vr_frame, + (args.new_width, + args.new_height)) else: if min(h, w) == h: new_h = args.new_short @@ -52,7 +54,7 @@ def extract_frame(vid_item): else: new_w = args.new_short new_h = int((new_w / w) * h) - out_img = mmcv.imresize(vr[i], (new_h, new_w)) + out_img = mmcv.imresize(vr_frame, (new_h, new_w)) mmcv.imwrite(out_img, f'{out_full_path}/img_{i + 1:05d}.jpg') else: @@ -229,7 +231,7 @@ def parse_args(): osp.basename(osp.dirname(p)), osp.basename(p)), fullpath_list)) elif args.level == 1: - vid_list = list(map(lambda p: osp.basename(p), fullpath_list)) + vid_list = list(map(osp.basename, fullpath_list)) pool = Pool(args.num_worker) pool.map( diff --git a/tools/data/build_videos.py b/tools/data/build_videos.py index e6e1f78f12..b9f00a868d 100644 --- a/tools/data/build_videos.py +++ b/tools/data/build_videos.py @@ -119,7 +119,7 @@ def parse_args(): osp.basename(osp.dirname(p)), osp.basename(p)), fullpath_list)) elif args.level == 1: - frame_dir_list = list(map(lambda p: osp.basename(p), fullpath_list)) + frame_dir_list = list(map(osp.basename, fullpath_list)) pool = Pool(args.num_worker) pool.map(encode_video, diff --git a/tools/data/hvu/download.py b/tools/data/hvu/download.py index 19fd8ad0c4..0884bb2fbc 100644 --- a/tools/data/hvu/download.py +++ b/tools/data/hvu/download.py @@ -18,7 +18,7 @@ args = None -def create_video_folders(dataset, output_dir, tmp_dir): +def create_video_folders(output_dir, tmp_dir): if not os.path.exists(output_dir): os.makedirs(output_dir) if not os.path.exists(tmp_dir): @@ -154,7 +154,7 @@ def main(input_csv, dataset = parse_hvu_annotations(input_csv) # Creates folders where videos will be saved later. - create_video_folders(dataset, output_dir, tmp_dir) + create_video_folders(output_dir, tmp_dir) # Download all clips. if num_jobs == 1: diff --git a/tools/data/kinetics/download.py b/tools/data/kinetics/download.py index d1281ee79d..27aecfefa1 100644 --- a/tools/data/kinetics/download.py +++ b/tools/data/kinetics/download.py @@ -185,7 +185,7 @@ def main(input_csv, # Download all clips. if num_jobs == 1: status_list = [] - for i, row in dataset.iterrows(): + for _, row in dataset.iterrows(): status_list.append( download_clip_wrapper(row, label_to_dir, trim_format, tmp_dir)) else: diff --git a/tools/data/parse_file_list.py b/tools/data/parse_file_list.py index 3adb3366bd..e22009479b 100644 --- a/tools/data/parse_file_list.py +++ b/tools/data/parse_file_list.py @@ -161,9 +161,9 @@ def line_to_map(line, test_mode=False): osp.basename(osp.dirname(video)), osp.basename(video)) if test_mode: return video - else: - label = class_mapping[items[1]] - return video, label + + label = class_mapping[items[1]] + return video, label with open(train_file, 'r') as fin: train_list = [line_to_map(x) for x in fin] @@ -210,9 +210,9 @@ def line_to_map(line, test_mode=False): osp.basename(osp.dirname(video)), osp.basename(video)) if test_mode: return video - else: - label = class_mapping[items[1]] - return video, label + + label = class_mapping[items[1]] + return video, label with open(train_file, 'r') as fin: train_list = [line_to_map(x) for x in fin] @@ -257,11 +257,11 @@ def line_to_map(item, test_mode=False): osp.basename(osp.dirname(video)), osp.basename(video)) if test_mode: return video - else: - template = item['template'].replace('[', '') - template = template.replace(']', '') - label = int(class_mapping[template]) - return video, label + + template = item['template'].replace('[', '') + template = template.replace(']', '') + label = int(class_mapping[template]) + return video, label with open(train_file, 'r') as fin: items = json.loads(fin.read()) @@ -331,8 +331,8 @@ def convert_label(s, keep_whitespaces=False): """ if not keep_whitespaces: return s.replace('"', '').replace(' ', '_') - else: - return s.replace('"', '') + + return s.replace('"', '') def line_to_map(x, test=False): """A function to map line string to video and label. @@ -351,14 +351,14 @@ def line_to_map(x, test=False): video = f'{x[1]}_{int(float(x[2])):06d}_{int(float(x[3])):06d}' label = -1 # label unknown return video, label + + video = f'{x[1]}_{int(float(x[2])):06d}_{int(float(x[3])):06d}' + if level == 2: + video = f'{convert_label(x[0])}/{video}' else: - video = f'{x[1]}_{int(float(x[2])):06d}_{int(float(x[3])):06d}' - if level == 2: - video = f'{convert_label(x[0])}/{video}' - else: - assert level == 1 - label = class_mapping[convert_label(x[0])] - return video, label + assert level == 1 + label = class_mapping[convert_label(x[0])] + return video, label train_file = f'data/{dataset}/annotations/kinetics_train.csv' val_file = f'data/{dataset}/annotations/kinetics_val.csv' @@ -368,7 +368,7 @@ def line_to_map(x, test=False): # skip the first line next(csv_reader) - labels_sorted = sorted(set([convert_label(row[0]) for row in csv_reader])) + labels_sorted = sorted({convert_label(row[0]) for row in csv_reader}) class_mapping = {label: i for i, label in enumerate(labels_sorted)} csv_reader = csv.reader(open(train_file)) diff --git a/tools/data/resize_video.py b/tools/data/resize_video.py index 2bfe4f55eb..537cd45e9b 100644 --- a/tools/data/resize_video.py +++ b/tools/data/resize_video.py @@ -105,6 +105,6 @@ def parse_args(): osp.basename(osp.dirname(p)), osp.basename(p)), fullpath_list)) elif args.level == 1: - vid_list = list(map(lambda p: osp.basename(p), fullpath_list)) + vid_list = list(map(osp.basename, fullpath_list)) pool = Pool(args.num_worker) pool.map(resize_videos, zip(fullpath_list, vid_list)) diff --git a/tools/train.py b/tools/train.py index 44e34db4ca..1bd5a0d6be 100644 --- a/tools/train.py +++ b/tools/train.py @@ -161,7 +161,7 @@ def main(): if cfg.omnisource: # If omnisource flag is set, cfg.data.train should be a list - assert type(cfg.data.train) is list + assert isinstance(cfg.data.train, list) datasets = [build_dataset(dataset) for dataset in cfg.data.train] else: datasets = [build_dataset(cfg.data.train)] From 1f9dedcb8ce556324c4dbc6ed91756e895db6183 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 25 Jun 2021 20:10:09 +0800 Subject: [PATCH 171/414] Support efficient video decoder and PIMS decoder (#946) * update video decoder * update unittest and docstring * add requirements * docstring * Update test_decode.py --- mmaction/datasets/pipelines/__init__.py | 11 +- mmaction/datasets/pipelines/loading.py | 198 +++++++++++++++--- requirements/optional.txt | 1 + .../test_loadings/test_decode.py | 65 +++++- 4 files changed, 234 insertions(+), 41 deletions(-) diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index 8a15583ce9..f6f94d6cd5 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -12,10 +12,10 @@ DenseSampleFrames, FrameSelector, GenerateLocalizationLabels, ImageDecode, LoadAudioFeature, LoadHVULabel, LoadLocalizationFeature, - LoadProposals, OpenCVDecode, OpenCVInit, PyAVDecode, - PyAVDecodeMotionVector, PyAVInit, RawFrameDecode, - SampleAVAFrames, SampleFrames, SampleProposalFrames, - UntrimmedSampleFrames) + LoadProposals, OpenCVDecode, OpenCVInit, PIMSDecode, + PIMSInit, PyAVDecode, PyAVDecodeMotionVector, PyAVInit, + RawFrameDecode, SampleAVAFrames, SampleFrames, + SampleProposalFrames, UntrimmedSampleFrames) from .pose_loading import (GeneratePoseTarget, LoadKineticsPose, PoseDecode, UniformSampleFrames) @@ -34,5 +34,6 @@ 'AudioDecodeInit', 'EntityBoxFlip', 'EntityBoxCrop', 'EntityBoxRescale', 'RandomScale', 'ImageDecode', 'BuildPseudoClip', 'RandomRescale', 'PyAVDecodeMotionVector', 'Rename', 'Imgaug', 'UniformSampleFrames', - 'PoseDecode', 'LoadKineticsPose', 'GeneratePoseTarget' + 'PoseDecode', 'LoadKineticsPose', 'GeneratePoseTarget', 'PIMSInit', + 'PIMSDecode' ] diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index 69e1d38e56..3ce3fb70b8 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -756,13 +756,13 @@ def __call__(self, results): return results def __repr__(self): - repr_str = f'{self.__class__.__name__}(io_backend=disk)' + repr_str = f'{self.__class__.__name__}(io_backend={self.io_backend})' return repr_str @PIPELINES.register_module() class PyAVDecode: - """Using pyav to decode the video. + """Using PyAV to decode the video. PyAV: https://github.com/mikeboers/PyAV @@ -772,10 +772,26 @@ class PyAVDecode: Args: multi_thread (bool): If set to True, it will apply multi thread processing. Default: False. + mode (str): Decoding mode. Options are 'accurate' and 'efficient'. + If set to 'accurate', it will decode videos into accurate frames. + If set to 'efficient', it will adopt fast seeking but only return + the nearest key frames, which may be duplicated and inaccurate, + and more suitable for large scene-based video datasets. + Default: 'accurate'. """ - def __init__(self, multi_thread=False): + def __init__(self, multi_thread=False, mode='accurate'): self.multi_thread = multi_thread + self.mode = mode + assert mode in ['accurate', 'efficient'] + + @staticmethod + def frame_generator(container, stream): + """Frame generator for PyAV.""" + for packet in container.demux(stream): + for frame in packet.decode(): + if frame: + return frame.to_rgb().to_ndarray() def __call__(self, results): """Perform the PyAV decoding. @@ -792,33 +808,132 @@ def __call__(self, results): if results['frame_inds'].ndim != 1: results['frame_inds'] = np.squeeze(results['frame_inds']) - # set max indice to make early stop - max_inds = max(results['frame_inds']) - i = 0 - for frame in container.decode(video=0): - if i > max_inds + 1: - break - imgs.append(frame.to_rgb().to_ndarray()) - i += 1 + if self.mode == 'accurate': + # set max indice to make early stop + max_inds = max(results['frame_inds']) + i = 0 + for frame in container.decode(video=0): + if i > max_inds + 1: + break + imgs.append(frame.to_rgb().to_ndarray()) + i += 1 + # the available frame in pyav may be less than its length, + # which may raise error + results['imgs'] = [ + imgs[i % len(imgs)] for i in results['frame_inds'] + ] + elif self.mode == 'efficient': + for frame in container.decode(video=0): + backup_frame = frame + break + stream = container.streams.video[0] + for idx in results['frame_inds']: + pts_scale = stream.average_rate * stream.time_base + frame_pts = int(idx / pts_scale) + container.seek( + frame_pts, any_frame=False, backward=True, stream=stream) + frame = self.frame_generator(container, stream) + if frame is not None: + imgs.append(frame) + backup_frame = frame + else: + imgs.append(backup_frame) + results['imgs'] = imgs + results['original_shape'] = imgs[0].shape[:2] + results['img_shape'] = imgs[0].shape[:2] results['video_reader'] = None del container - # the available frame in pyav may be less than its length, - # which may raise error - results['imgs'] = [imgs[i % len(imgs)] for i in results['frame_inds']] + return results - results['original_shape'] = imgs[0].shape[:2] - results['img_shape'] = imgs[0].shape[:2] + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(multi_thread={self.multi_thread}, mode={self.mode})' + return repr_str + + +@PIPELINES.register_module() +class PIMSInit: + """Use PIMS to initialize the video. + + PIMS: https://github.com/soft-matter/pims + + Args: + io_backend (str): io backend where frames are store. + Default: 'disk'. + mode (str): Decoding mode. Options are 'accurate' and 'efficient'. + If set to 'accurate', it will always use ``pims.PyAVReaderIndexed`` + to decode videos into accurate frames. If set to 'efficient', it + will adopt fast seeking by using ``pims.PyAVReaderTimed``. + Both will return the accurate frames in most cases. + Default: 'accurate'. + kwargs (dict): Args for file client. + """ + + def __init__(self, io_backend='disk', mode='accurate', **kwargs): + self.io_backend = io_backend + self.kwargs = kwargs + self.file_client = None + self.mode = mode + assert mode in ['accurate', 'efficient'] + + def __call__(self, results): + try: + import pims + except ImportError: + raise ImportError('Please run "conda install pims -c conda-forge" ' + 'or "pip install pims" to install pims first.') + + if self.file_client is None: + self.file_client = FileClient(self.io_backend, **self.kwargs) + + file_obj = io.BytesIO(self.file_client.get(results['filename'])) + if self.mode == 'accurate': + container = pims.PyAVReaderIndexed(file_obj) + else: + container = pims.PyAVReaderTimed(file_obj) + + results['video_reader'] = container + results['total_frames'] = len(container) return results def __repr__(self): - repr_str = self.__class__.__name__ - repr_str += f'(multi_thread={self.multi_thread})' + repr_str = (f'{self.__class__.__name__}(io_backend={self.io_backend}, ' + f'mode={self.mode})') return repr_str +@PIPELINES.register_module() +class PIMSDecode: + """Using PIMS to decode the videos. + + PIMS: https://github.com/soft-matter/pims + + Required keys are "video_reader" and "frame_inds", + added or modified keys are "imgs", "img_shape" and "original_shape". + """ + + def __call__(self, results): + container = results['video_reader'] + + if results['frame_inds'].ndim != 1: + results['frame_inds'] = np.squeeze(results['frame_inds']) + + frame_inds = results['frame_inds'] + imgs = [container[idx] for idx in frame_inds] + + results['video_reader'] = None + del container + + results['imgs'] = imgs + results['original_shape'] = imgs[0].shape[:2] + results['img_shape'] = imgs[0].shape[:2] + + return results + + @PIPELINES.register_module() class PyAVDecodeMotionVector(PyAVDecode): """Using pyav to decode the motion vectors from video. @@ -828,10 +943,6 @@ class PyAVDecodeMotionVector(PyAVDecode): Required keys are "video_reader" and "frame_inds", added or modified keys are "motion_vectors", "frame_inds". - - Args: - multi_thread (bool): If set to True, it will apply multi - thread processing. Default: False. """ @staticmethod @@ -909,6 +1020,12 @@ class DecordInit: Required keys are "filename", added or modified keys are "video_reader" and "total_frames". + + Args: + io_backend (str): io backend where frames are store. + Default: 'disk'. + num_threads (int): Number of thread to decode the video. Default: 1. + kwargs (dict): Args for file client. """ def __init__(self, io_backend='disk', num_threads=1, **kwargs): @@ -954,8 +1071,19 @@ class DecordDecode: Required keys are "video_reader", "filename" and "frame_inds", added or modified keys are "imgs" and "original_shape". + + Args: + mode (str): Decoding mode. Options are 'accurate' and 'efficient'. + If set to 'accurate', it will decode videos into accurate frames. + If set to 'efficient', it will adopt fast seeking but only return + key frames, which may be duplicated and inaccurate, and more + suitable for large scene-based video datasets. Default: 'accurate'. """ + def __init__(self, mode='accurate'): + self.mode = mode + assert mode in ['accurate', 'efficient'] + def __call__(self, results): """Perform the Decord decoding. @@ -969,13 +1097,18 @@ def __call__(self, results): results['frame_inds'] = np.squeeze(results['frame_inds']) frame_inds = results['frame_inds'] - # Generate frame index mapping in order - frame_dict = { - idx: container[idx].asnumpy() - for idx in np.unique(frame_inds) - } - imgs = [frame_dict[idx] for idx in frame_inds] + if self.mode == 'accurate': + imgs = container.get_batch(frame_inds).asnumpy() + imgs = list(imgs) + elif self.mode == 'efficient': + # This mode is faster, however it always returns I-FRAME + container.seek(0) + imgs = list() + for idx in frame_inds: + container.seek(idx) + frame = container.next() + imgs.append(frame.asnumpy()) results['video_reader'] = None del container @@ -986,6 +1119,10 @@ def __call__(self, results): return results + def __repr__(self): + repr_str = f'{self.__class__.__name__}(mode={self.mode})' + return repr_str + @PIPELINES.register_module() class OpenCVInit: @@ -993,6 +1130,11 @@ class OpenCVInit: Required keys are "filename", added or modified keys are "new_path", "video_reader" and "total_frames". + + Args: + io_backend (str): io backend where frames are store. + Default: 'disk'. + kwargs (dict): Args for file client. """ def __init__(self, io_backend='disk', **kwargs): diff --git a/requirements/optional.txt b/requirements/optional.txt index 3177ef6221..34d92709b4 100644 --- a/requirements/optional.txt +++ b/requirements/optional.txt @@ -6,5 +6,6 @@ lmdb moviepy onnx onnxruntime +pims PyTurboJPEG timm diff --git a/tests/test_data/test_pipelines/test_loadings/test_decode.py b/tests/test_data/test_pipelines/test_loadings/test_decode.py index ee7291c001..8b30fe7e72 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_decode.py +++ b/tests/test_data/test_pipelines/test_loadings/test_decode.py @@ -7,9 +7,9 @@ from mmaction.datasets.pipelines import (AudioDecode, AudioDecodeInit, DecordDecode, DecordInit, FrameSelector, OpenCVDecode, - OpenCVInit, PyAVDecode, - PyAVDecodeMotionVector, PyAVInit, - RawFrameDecode) + OpenCVInit, PIMSDecode, PIMSInit, + PyAVDecode, PyAVDecodeMotionVector, + PyAVInit, RawFrameDecode) from .base import BaseTestLoading @@ -43,7 +43,7 @@ def test_pyav_decode(self): assert np.shape(pyav_decode_result['imgs']) == (len( video_result['frame_inds']), 256, 340, 3) assert repr(pyav_decode) == (f'{pyav_decode.__class__.__name__}(' - f'multi_thread={False})') + f'multi_thread={False}, mode=accurate)') # test PyAV with 1 dim input and start_index = 0 video_result = copy.deepcopy(self.video_results) @@ -73,7 +73,7 @@ def test_pyav_decode(self): assert np.shape(pyav_decode_result['imgs']) == (len( video_result['frame_inds']), 256, 340, 3) assert repr(pyav_decode) == (f'{pyav_decode.__class__.__name__}(' - f'multi_thread={True})') + f'multi_thread={True}, mode=accurate)') # test PyAV with 2 dim input video_result = copy.deepcopy(self.video_results) @@ -118,8 +118,55 @@ def test_pyav_decode(self): assert np.shape(pyav_decode_result['imgs']) == (len( video_result['frame_inds']), 256, 340, 3) - assert repr(pyav_decode) == pyav_decode.__class__.__name__ + \ - f'(multi_thread={True})' + # PyAV with efficient mode + video_result = copy.deepcopy(self.video_results) + video_result['frame_inds'] = np.arange(1, self.total_frames, 5) + pyav_init = PyAVInit() + pyav_init_result = pyav_init(video_result) + video_result['video_reader'] = pyav_init_result['video_reader'] + + pyav_decode = PyAVDecode(multi_thread=True, mode='efficient') + pyav_decode_result = pyav_decode(video_result) + assert assert_dict_has_keys(pyav_decode_result, target_keys) + assert pyav_decode_result['original_shape'] == (256, 340) + assert np.shape(pyav_decode_result['imgs']) == (len( + video_result['frame_inds']), 256, 340, 3) + assert pyav_decode_result['video_reader'] is None + + assert (repr(pyav_decode) == pyav_decode.__class__.__name__ + + f'(multi_thread={True}, mode=efficient)') + + def test_pims_init(self): + target_keys = ['video_reader', 'total_frames'] + video_result = copy.deepcopy(self.video_results) + pims_init = PIMSInit() + pims_init_result = pims_init(video_result) + assert assert_dict_has_keys(pims_init_result, target_keys) + assert pims_init_result['total_frames'] == 300 + + pims_init = PIMSInit(mode='efficient') + pims_init_result = pims_init(video_result) + assert assert_dict_has_keys(pims_init_result, target_keys) + assert pims_init_result['total_frames'] == 300 + + assert repr(pims_init) == (f'{pims_init.__class__.__name__}' + f'(io_backend=disk, mode=efficient)') + + def test_pims_decode(self): + target_keys = ['frame_inds', 'imgs', 'original_shape'] + + video_result = copy.deepcopy(self.video_results) + video_result['frame_inds'] = np.arange(0, self.total_frames, + 2)[:, np.newaxis] + pims_init = PIMSInit() + pims_init_result = pims_init(video_result) + + pims_decode = PIMSDecode() + pims_decode_result = pims_decode(pims_init_result) + assert assert_dict_has_keys(pims_decode_result, target_keys) + assert pims_decode_result['original_shape'] == (256, 340) + assert np.shape(pims_decode_result['imgs']) == (len( + video_result['frame_inds']), 256, 340, 3) def test_decord_init(self): target_keys = ['video_reader', 'total_frames'] @@ -187,12 +234,14 @@ def test_decord_decode(self): decord_init_result = decord_init(video_result) video_result['video_reader'] = decord_init_result['video_reader'] - decord_decode = DecordDecode() + decord_decode = DecordDecode(mode='efficient') decord_decode_result = decord_decode(video_result) assert assert_dict_has_keys(decord_decode_result, target_keys) assert decord_decode_result['original_shape'] == (256, 340) assert np.shape(decord_decode_result['imgs']) == (len( video_result['frame_inds']), 256, 340, 3) + assert repr(decord_decode) == (f'{decord_decode.__class__.__name__}(' + f'mode=efficient)') def test_opencv_init(self): target_keys = ['new_path', 'video_reader', 'total_frames'] From e5325271220e633ba36391c86c5fa830f013e4ba Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 30 Jun 2021 16:40:43 +0800 Subject: [PATCH 172/414] Audit the usage of shutil.rmtree(tmp_dir) (#943) * Audit the usage of shutil.rmtree(tmp_dir) * Update test.py * fix * fix --- demo/demo_spatiotemporal_det.py | 2 +- mmaction/apis/test.py | 1 + .../test_datasets/test_activitynet_dataset.py | 16 +- .../test_loadings/test_pose_loading.py | 160 +++++++++--------- tests/test_runtime/test_eval_hook.py | 3 + tests/test_runtime/test_lr.py | 3 + tools/data/hvu/download.py | 5 +- tools/data/kinetics/download.py | 6 +- 8 files changed, 106 insertions(+), 90 deletions(-) diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index 80ea458ada..4b77305e10 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -190,7 +190,7 @@ def frame_extraction(video_path): Args: video_path (str): The video_path. """ - # Load the video, extract frames into /tmp/video_name + # Load the video, extract frames into ./tmp/video_name target_dir = osp.join('./tmp', osp.basename(osp.splitext(video_path)[0])) os.makedirs(target_dir, exist_ok=True) # Should be able to handle videos up to several hours diff --git a/mmaction/apis/test.py b/mmaction/apis/test.py index 4f4a52224b..fea43cde65 100644 --- a/mmaction/apis/test.py +++ b/mmaction/apis/test.py @@ -131,6 +131,7 @@ def collect_results_cpu(result_part, size, tmpdir=None): # noqa: F811 dist.broadcast(dir_tensor, 0) tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip() else: + tmpdir = osp.join(tmpdir, '.dist_test') mmcv.mkdir_or_exist(tmpdir) # synchronizes all processes to make sure tmpdir exist dist.barrier() diff --git a/tests/test_data/test_datasets/test_activitynet_dataset.py b/tests/test_data/test_datasets/test_activitynet_dataset.py index 2947eef3d2..0a83b93fae 100644 --- a/tests/test_data/test_datasets/test_activitynet_dataset.py +++ b/tests/test_data/test_datasets/test_activitynet_dataset.py @@ -1,4 +1,3 @@ -import os import os.path as osp import tempfile @@ -130,13 +129,14 @@ def test_activitynet_dump_results(self): 'external_data': {} } - tmp_filename = osp.join(tempfile.gettempdir(), 'result.json') - activitynet_dataset.dump_results(results, tmp_filename, 'json') - assert osp.isfile(tmp_filename) - with open(tmp_filename, 'r+') as f: - load_obj = mmcv.load(f, file_format='json') - assert load_obj == dump_results - os.remove(tmp_filename) + with tempfile.TemporaryDirectory() as tmpdir: + + tmp_filename = osp.join(tmpdir, 'result.json') + activitynet_dataset.dump_results(results, tmp_filename, 'json') + assert osp.isfile(tmp_filename) + with open(tmp_filename, 'r+') as f: + load_obj = mmcv.load(f, file_format='json') + assert load_obj == dump_results # test dumping csv file results = [('test_video', np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 4369d7d20e..720580a23f 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -1,4 +1,6 @@ import copy as cp +import os.path as osp +import tempfile from collections import defaultdict import numpy as np @@ -128,84 +130,86 @@ def get_mode(arr): max_val = max(cnt.values()) return [k for k in cnt if cnt[k] == max_val], max_val - filename = '/tmp/tmp.pkl' - total_frames = 100 - img_shape = (224, 224) - frame_inds = np.random.choice(range(100), size=120) - frame_inds.sort() - anno_flag = np.random.random(120) > 0.1 - anno_inds = np.array([i for i, f in enumerate(anno_flag) if f]) - kp = np.random.random([120, 17, 3]) - dump(kp, filename) - results = dict( - filename=filename, - total_frames=total_frames, - img_shape=img_shape, - frame_inds=frame_inds) - - inp = cp.deepcopy(results) - - with pytest.raises(NotImplementedError): - LoadKineticsPose(squeeze=True, max_person=100, source='xxx') - - load_kinetics_pose = LoadKineticsPose( - squeeze=True, max_person=100, source='openpose') - - assert str(load_kinetics_pose) == ('LoadKineticsPose(io_backend=disk, ' - 'squeeze=True, max_person=100, ' - "keypoint_weight={'face': 1, " - "'torso': 2, 'limb': 3}, " - 'source=openpose, kwargs={})') - return_results = load_kinetics_pose(inp) - assert return_results['keypoint'].shape[:-1] == \ - return_results['keypoint_score'].shape - - num_person = return_results['keypoint'].shape[0] - num_frame = return_results['keypoint'].shape[1] - assert num_person == get_mode(frame_inds)[1] - assert np.max(return_results['keypoint']) > 1 - assert num_frame == len(set(frame_inds)) - - inp = cp.deepcopy(results) - load_kinetics_pose = LoadKineticsPose( - squeeze=False, max_person=100, source='openpose') - return_results = load_kinetics_pose(inp) - assert return_results['keypoint'].shape[:-1] == \ - return_results['keypoint_score'].shape - - num_person = return_results['keypoint'].shape[0] - num_frame = return_results['keypoint'].shape[1] - assert num_person == get_mode(frame_inds)[1] - assert np.max(return_results['keypoint']) > 1 - assert num_frame == total_frames - - inp = cp.deepcopy(results) - inp['anno_inds'] = anno_inds - load_kinetics_pose = LoadKineticsPose( - squeeze=True, max_person=100, source='mmpose') - return_results = load_kinetics_pose(inp) - assert return_results['keypoint'].shape[:-1] == \ - return_results['keypoint_score'].shape - - num_person = return_results['keypoint'].shape[0] - num_frame = return_results['keypoint'].shape[1] - assert num_person == get_mode(frame_inds[anno_inds])[1] - assert np.max(return_results['keypoint']) <= 1 - assert num_frame == len(set(frame_inds[anno_inds])) - - inp = cp.deepcopy(results) - inp['anno_inds'] = anno_inds - load_kinetics_pose = LoadKineticsPose( - squeeze=True, max_person=2, source='mmpose') - return_results = load_kinetics_pose(inp) - assert return_results['keypoint'].shape[:-1] == \ - return_results['keypoint_score'].shape - - num_person = return_results['keypoint'].shape[0] - num_frame = return_results['keypoint'].shape[1] - assert num_person <= 2 - assert np.max(return_results['keypoint']) <= 1 - assert num_frame == len(set(frame_inds[anno_inds])) + with tempfile.TemporaryDirectory() as tmpdir: + filename = osp.join(tmpdir, 'tmp.pkl') + total_frames = 100 + img_shape = (224, 224) + frame_inds = np.random.choice(range(100), size=120) + frame_inds.sort() + anno_flag = np.random.random(120) > 0.1 + anno_inds = np.array([i for i, f in enumerate(anno_flag) if f]) + kp = np.random.random([120, 17, 3]) + dump(kp, filename) + results = dict( + filename=filename, + total_frames=total_frames, + img_shape=img_shape, + frame_inds=frame_inds) + + inp = cp.deepcopy(results) + + with pytest.raises(NotImplementedError): + LoadKineticsPose(squeeze=True, max_person=100, source='xxx') + + load_kinetics_pose = LoadKineticsPose( + squeeze=True, max_person=100, source='openpose') + + assert str(load_kinetics_pose) == ( + 'LoadKineticsPose(io_backend=disk, ' + 'squeeze=True, max_person=100, ' + "keypoint_weight={'face': 1, " + "'torso': 2, 'limb': 3}, " + 'source=openpose, kwargs={})') + return_results = load_kinetics_pose(inp) + assert return_results['keypoint'].shape[:-1] == \ + return_results['keypoint_score'].shape + + num_person = return_results['keypoint'].shape[0] + num_frame = return_results['keypoint'].shape[1] + assert num_person == get_mode(frame_inds)[1] + assert np.max(return_results['keypoint']) > 1 + assert num_frame == len(set(frame_inds)) + + inp = cp.deepcopy(results) + load_kinetics_pose = LoadKineticsPose( + squeeze=False, max_person=100, source='openpose') + return_results = load_kinetics_pose(inp) + assert return_results['keypoint'].shape[:-1] == \ + return_results['keypoint_score'].shape + + num_person = return_results['keypoint'].shape[0] + num_frame = return_results['keypoint'].shape[1] + assert num_person == get_mode(frame_inds)[1] + assert np.max(return_results['keypoint']) > 1 + assert num_frame == total_frames + + inp = cp.deepcopy(results) + inp['anno_inds'] = anno_inds + load_kinetics_pose = LoadKineticsPose( + squeeze=True, max_person=100, source='mmpose') + return_results = load_kinetics_pose(inp) + assert return_results['keypoint'].shape[:-1] == \ + return_results['keypoint_score'].shape + + num_person = return_results['keypoint'].shape[0] + num_frame = return_results['keypoint'].shape[1] + assert num_person == get_mode(frame_inds[anno_inds])[1] + assert np.max(return_results['keypoint']) <= 1 + assert num_frame == len(set(frame_inds[anno_inds])) + + inp = cp.deepcopy(results) + inp['anno_inds'] = anno_inds + load_kinetics_pose = LoadKineticsPose( + squeeze=True, max_person=2, source='mmpose') + return_results = load_kinetics_pose(inp) + assert return_results['keypoint'].shape[:-1] == \ + return_results['keypoint_score'].shape + + num_person = return_results['keypoint'].shape[0] + num_frame = return_results['keypoint'].shape[1] + assert num_person <= 2 + assert np.max(return_results['keypoint']) <= 1 + assert num_frame == len(set(frame_inds[anno_inds])) @staticmethod def test_generate_pose_target(): diff --git a/tests/test_runtime/test_eval_hook.py b/tests/test_runtime/test_eval_hook.py index c95c88881d..21d7c927d2 100644 --- a/tests/test_runtime/test_eval_hook.py +++ b/tests/test_runtime/test_eval_hook.py @@ -1,4 +1,5 @@ import os.path as osp +import shutil import tempfile import unittest.mock as mock import warnings @@ -341,3 +342,5 @@ def test_start_param(EvalHookParam, _build_demo_runner, by_epoch): runner._iter = 1 runner.run([dataloader], [('train', 1)], 3) assert evalhook.evaluate.call_count == 2 # after epoch 2 & 3 + + shutil.rmtree(runner.work_dir) diff --git a/tests/test_runtime/test_lr.py b/tests/test_runtime/test_lr.py index f2f6a8151e..89dfab6cd2 100644 --- a/tests/test_runtime/test_lr.py +++ b/tests/test_runtime/test_lr.py @@ -1,4 +1,5 @@ import logging +import os.path as osp import shutil import sys import tempfile @@ -103,6 +104,8 @@ def val_step(self, x, optimizer, **kwargs): ]) tmp_dir = tempfile.mkdtemp() + tmp_dir = osp.join(tmp_dir, '.test_lr_tmp') + runner = build_runner( dict(type=runner_type), default_args=dict( diff --git a/tools/data/hvu/download.py b/tools/data/hvu/download.py index 0884bb2fbc..fcdfaee86f 100644 --- a/tools/data/hvu/download.py +++ b/tools/data/hvu/download.py @@ -40,7 +40,7 @@ def download_clip(video_identifier, output_filename, start_time, end_time, - tmp_dir='/tmp/hvu', + tmp_dir='/tmp/hvu/.tmp_dir', num_attempts=5, url_base='https://www.youtube.com/watch?v='): """Download a video from youtube if exists and is not blocked. @@ -150,6 +150,9 @@ def main(input_csv, trim_format='%06d', num_jobs=24, tmp_dir='/tmp/hvu'): + + tmp_dir = os.path.join(tmp_dir, '.tmp_dir') + # Reading and parsing HVU. dataset = parse_hvu_annotations(input_csv) diff --git a/tools/data/kinetics/download.py b/tools/data/kinetics/download.py index 27aecfefa1..b4e7e62a7e 100644 --- a/tools/data/kinetics/download.py +++ b/tools/data/kinetics/download.py @@ -58,7 +58,7 @@ def download_clip(video_identifier, output_filename, start_time, end_time, - tmp_dir='/tmp/kinetics', + tmp_dir='/tmp/kinetics/.tmp_dir', num_attempts=5, url_base='https://www.youtube.com/watch?v='): """Download a video from youtube if exists and is not blocked. @@ -69,7 +69,7 @@ def download_clip(video_identifier, output_filename: str File path where the video will be stored. start_time: float - Indicates the begining time in seconds from where the video + Indicates the beginning time in seconds from where the video will be trimmed. end_time: float Indicates the ending time in seconds of the trimmed video. @@ -176,6 +176,8 @@ def main(input_csv, trim_format='%06d', num_jobs=24, tmp_dir='/tmp/kinetics'): + tmp_dir = os.path.join(tmp_dir, '.tmp_dir') + # Reading and parsing Kinetics. dataset = parse_kinetics_annotations(input_csv) From dd229423dfcf9e91d474d8aba42b27dba04fcc49 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 1 Jul 2021 14:12:17 +0800 Subject: [PATCH 173/414] fix mmcv install in CI (#977) --- .github/workflows/build.yml | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 924653e894..3cb0283ab7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -87,17 +87,21 @@ jobs: torch: [1.5.0+cu101, 1.6.0+cu101, 1.7.0+cu101, 1.8.0+cu101] include: - torch: 1.5.0+cu101 + torch_version: torch1.5.0 torchvision: 0.6.0+cu101 - mmcv: 1.5.0+cu101 + mmcv: "latest+1.5.0+cu101" - torch: 1.6.0+cu101 + torch_version: torch1.6.0 torchvision: 0.7.0+cu101 - mmcv: 1.6.0+cu101 + mmcv: "latest+1.6.0+cu101" - torch: 1.7.0+cu101 + torch_version: torch1.7.0 torchvision: 0.8.1+cu101 - mmcv: 1.7.0+cu101 + mmcv: "latest+1.7.0+cu101" - torch: 1.8.0+cu101 + torch_version: torch1.8.0 torchvision: 0.9.0+cu101 - mmcv: 1.8.0+cu101 + mmcv: "latest+1.8.0+cu101" steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} @@ -135,10 +139,11 @@ jobs: run: pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html - name: Install mmaction dependencies run: | - pip install mmcv-full==latest+torch${{matrix.mmcv}} -f https://download.openmmlab.com/mmcv/dist/index.html --use-deprecated=legacy-resolver + pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu101/${{matrix.torch_version}}/index.html pip install -q git+https://github.com/open-mmlab/mmdetection/ pip install -q git+https://github.com/open-mmlab/mmclassification/ pip install -r requirements.txt + python -c 'import mmcv; print(mmcv.__version__)' - name: Build and install run: | rm -rf .eggs From 95486d661b6155e17257ba950de30ec2593623da Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 1 Jul 2021 14:13:06 +0800 Subject: [PATCH 174/414] [Improvement] Polish localizer related code (#913) * polish localizer code * add deprecated warnings * update warning msg --- .../ssn/ssn_r50_450e_thumos14_rgb_test.py | 10 +- .../ssn/ssn_r50_450e_thumos14_rgb_train.py | 14 +- mmaction/models/localizers/__init__.py | 4 +- mmaction/models/localizers/base.py | 138 ++++++++++++++++-- mmaction/models/localizers/bmn.py | 6 +- mmaction/models/localizers/bsn.py | 14 +- mmaction/models/localizers/ssn.py | 4 +- 7 files changed, 154 insertions(+), 36 deletions(-) diff --git a/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py b/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py index 5915889cc4..cca88f1999 100644 --- a/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py +++ b/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py @@ -1,5 +1,5 @@ # model training and testing settings -train_cfg = dict( +train_cfg_ = dict( ssn=dict( assigner=dict( positive_iou_threshold=0.7, @@ -15,7 +15,7 @@ add_gt_as_proposals=True), loss_weight=dict(comp_loss_weight=0.1, reg_loss_weight=0.1), debug=False)) -test_cfg = dict( +test_cfg_ = dict( ssn=dict( sampler=dict(test_interval=6, batch_size=16), evaluater=dict( @@ -42,7 +42,7 @@ num_classes=20, consensus=dict(type='STPPTest', stpp_stage=(1, 1, 1)), use_regression=True), - test_cfg=test_cfg) + test_cfg=test_cfg_) # dataset settings dataset_type = 'SSNDataset' data_root = './data/thumos14/rawframes/' @@ -86,8 +86,8 @@ type=dataset_type, ann_file=ann_file_test, data_prefix=data_root, - train_cfg=train_cfg, - test_cfg=test_cfg, + train_cfg=train_cfg_, + test_cfg=test_cfg_, aug_ratio=0.5, test_mode=True, pipeline=test_pipeline)) diff --git a/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py b/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py index c64766cb9c..435ac635b3 100644 --- a/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py +++ b/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py @@ -1,5 +1,5 @@ # model training and testing settings -train_cfg = dict( +train_cfg_ = dict( ssn=dict( assigner=dict( positive_iou_threshold=0.7, @@ -15,7 +15,7 @@ add_gt_as_proposals=True), loss_weight=dict(comp_loss_weight=0.1, reg_loss_weight=0.1), debug=False)) -test_cfg = dict( +test_cfg_ = dict( ssn=dict( sampler=dict(test_interval=6, batch_size=16), evaluater=dict( @@ -46,7 +46,7 @@ stpp_stage=(1, 1, 1), num_segments_list=(2, 5, 2)), use_regression=True), - train_cfg=train_cfg) + train_cfg=train_cfg_) # dataset settings dataset_type = 'SSNDataset' data_root = './data/thumos14/rawframes/' @@ -116,8 +116,8 @@ type=dataset_type, ann_file=ann_file_train, data_prefix=data_root, - train_cfg=train_cfg, - test_cfg=test_cfg, + train_cfg=train_cfg_, + test_cfg=test_cfg_, body_segments=5, aug_segments=(2, 2), aug_ratio=0.5, @@ -128,8 +128,8 @@ type=dataset_type, ann_file=ann_file_val, data_prefix=data_root, - train_cfg=train_cfg, - test_cfg=test_cfg, + train_cfg=train_cfg_, + test_cfg=test_cfg_, body_segments=5, aug_segments=(2, 2), aug_ratio=0.5, diff --git a/mmaction/models/localizers/__init__.py b/mmaction/models/localizers/__init__.py index 0d50890994..523d3f20c2 100644 --- a/mmaction/models/localizers/__init__.py +++ b/mmaction/models/localizers/__init__.py @@ -1,6 +1,6 @@ -from .base import BaseLocalizer +from .base import BaseTAGClassifier, BaseTAPGenerator from .bmn import BMN from .bsn import PEM, TEM from .ssn import SSN -__all__ = ['PEM', 'TEM', 'BMN', 'SSN', 'BaseLocalizer'] +__all__ = ['PEM', 'TEM', 'BMN', 'SSN', 'BaseTAPGenerator', 'BaseTAGClassifier'] diff --git a/mmaction/models/localizers/base.py b/mmaction/models/localizers/base.py index abc715593d..893678f6bf 100644 --- a/mmaction/models/localizers/base.py +++ b/mmaction/models/localizers/base.py @@ -1,3 +1,4 @@ +import warnings from abc import ABCMeta, abstractmethod from collections import OrderedDict @@ -8,12 +9,119 @@ from .. import builder -class BaseLocalizer(nn.Module, metaclass=ABCMeta): - """Base class for localizers. +class BaseTAPGenerator(nn.Module, metaclass=ABCMeta): + """Base class for temporal action proposal generator. - All localizers should subclass it. All subclass should overwrite: - Methods:``forward_train``, supporting to forward when training. - Methods:``forward_test``, supporting to forward when testing. + All temporal action proposal generator should subclass it. All subclass + should overwrite: Methods:``forward_train``, supporting to forward when + training. Methods:``forward_test``, supporting to forward when testing. + """ + + @abstractmethod + def forward_train(self, *args, **kwargs): + """Defines the computation performed at training.""" + + @abstractmethod + def forward_test(self, *args): + """Defines the computation performed at testing.""" + + @abstractmethod + def forward(self, *args, **kwargs): + """Define the computation performed at every call.""" + + @staticmethod + def _parse_losses(losses): + """Parse the raw outputs (losses) of the network. + + Args: + losses (dict): Raw output of the network, which usually contain + losses and other necessary information. + + Returns: + tuple[Tensor, dict]: (loss, log_vars), loss is the loss tensor + which may be a weighted sum of all losses, log_vars contains + all the variables to be sent to the logger. + """ + log_vars = OrderedDict() + for loss_name, loss_value in losses.items(): + if isinstance(loss_value, torch.Tensor): + log_vars[loss_name] = loss_value.mean() + elif isinstance(loss_value, list): + log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value) + else: + raise TypeError( + f'{loss_name} is not a tensor or list of tensors') + + loss = sum(_value for _key, _value in log_vars.items() + if 'loss' in _key) + + log_vars['loss'] = loss + for loss_name, loss_value in log_vars.items(): + # reduce loss when distributed training + if dist.is_available() and dist.is_initialized(): + loss_value = loss_value.data.clone() + dist.all_reduce(loss_value.div_(dist.get_world_size())) + log_vars[loss_name] = loss_value.item() + + return loss, log_vars + + def train_step(self, data_batch, optimizer, **kwargs): + """The iteration step during training. + + This method defines an iteration step during training, except for the + back propagation and optimizer updating, which are done in an optimizer + hook. Note that in some complicated cases or models, the whole process + including back propagation and optimizer updating is also defined in + this method, such as GAN. + + Args: + data_batch (dict): The output of dataloader. + optimizer (:obj:`torch.optim.Optimizer` | dict): The optimizer of + runner is passed to ``train_step()``. This argument is unused + and reserved. + + Returns: + dict: It should contain at least 3 keys: ``loss``, ``log_vars``, + ``num_samples``. + ``loss`` is a tensor for back propagation, which can be a + weighted sum of multiple losses. + ``log_vars`` contains all the variables to be sent to the + logger. + ``num_samples`` indicates the batch size (when the model is + DDP, it means the batch size on each GPU), which is used for + averaging the logs. + """ + losses = self.forward(**data_batch) + + loss, log_vars = self._parse_losses(losses) + + outputs = dict( + loss=loss, + log_vars=log_vars, + num_samples=len(next(iter(data_batch.values())))) + + return outputs + + def val_step(self, data_batch, optimizer, **kwargs): + """The iteration step during validation. + + This method shares the same signature as :func:`train_step`, but used + during val epochs. Note that the evaluation after training epochs is + not implemented with this method, but an evaluation hook. + """ + results = self.forward(return_loss=False, **data_batch) + + outputs = dict(results=results) + + return outputs + + +class BaseTAGClassifier(nn.Module, metaclass=ABCMeta): + """Base class for temporal action proposal classifier. + + All temporal action generation classifier should subclass it. All subclass + should overwrite: Methods:``forward_train``, supporting to forward when + training. Methods:``forward_test``, supporting to forward when testing. """ def __init__(self, backbone, cls_head, train_cfg=None, test_cfg=None): @@ -42,19 +150,19 @@ def extract_feat(self, imgs): return x @abstractmethod - def forward_train(self, imgs, labels): + def forward_train(self, *args, **kwargs): """Defines the computation performed at training.""" @abstractmethod - def forward_test(self, imgs): + def forward_test(self, *args, **kwargs): """Defines the computation performed at testing.""" - def forward(self, imgs, return_loss=True, **kwargs): + def forward(self, *args, return_loss=True, **kwargs): """Define the computation performed at every call.""" if return_loss: - return self.forward_train(imgs, **kwargs) + return self.forward_train(*args, **kwargs) - return self.forward_test(imgs, **kwargs) + return self.forward_test(*args, **kwargs) @staticmethod def _parse_losses(losses): @@ -141,3 +249,13 @@ def val_step(self, data_batch, optimizer, **kwargs): outputs = dict(results=results) return outputs + + +class BaseLocalizer(BaseTAGClassifier): + """Deprecated class for ``BaseTAPGenerator`` and ``BaseTAGClassifier``.""" + + def __init__(*args, **kwargs): + warnings.warn('``BaseLocalizer`` is deprecated, please switch to' + '``BaseTAPGenerator`` or ``BaseTAGClassifier``. Details ' + 'see https://github.com/open-mmlab/mmaction2/pull/913') + super().__init__(*args, **kwargs) diff --git a/mmaction/models/localizers/bmn.py b/mmaction/models/localizers/bmn.py index a0bbece0cd..cb9bdc4477 100644 --- a/mmaction/models/localizers/bmn.py +++ b/mmaction/models/localizers/bmn.py @@ -6,12 +6,12 @@ from ...localization import temporal_iop, temporal_iou from ..builder import LOCALIZERS, build_loss -from .base import BaseLocalizer +from .base import BaseTAPGenerator from .utils import post_processing @LOCALIZERS.register_module() -class BMN(BaseLocalizer): +class BMN(BaseTAPGenerator): """Boundary Matching Network for temporal action proposal generation. Please refer `BMN: Boundary-Matching Network for Temporal Action Proposal @@ -52,7 +52,7 @@ def __init__(self, hidden_dim_1d=256, hidden_dim_2d=128, hidden_dim_3d=512): - super(BaseLocalizer, self).__init__() + super().__init__() self.tscale = temporal_dim self.boundary_ratio = boundary_ratio diff --git a/mmaction/models/localizers/bsn.py b/mmaction/models/localizers/bsn.py index 83843002ff..e65f7ecf8c 100644 --- a/mmaction/models/localizers/bsn.py +++ b/mmaction/models/localizers/bsn.py @@ -5,13 +5,13 @@ from ...localization import temporal_iop from ..builder import LOCALIZERS, build_loss -from .base import BaseLocalizer +from .base import BaseTAPGenerator from .utils import post_processing @LOCALIZERS.register_module() -class TEM(BaseLocalizer): - """Temporal Evaluation Model for Boundary Sensetive Network. +class TEM(BaseTAPGenerator): + """Temporal Evaluation Model for Boundary Sensitive Network. Please refer `BSN: Boundary Sensitive Network for Temporal Action Proposal Generation `_. @@ -44,7 +44,7 @@ def __init__(self, conv1_ratio=1, conv2_ratio=1, conv3_ratio=0.01): - super(BaseLocalizer, self).__init__() + super().__init__() self.temporal_dim = temporal_dim self.boundary_ratio = boundary_ratio @@ -225,8 +225,8 @@ def forward(self, @LOCALIZERS.register_module() -class PEM(BaseLocalizer): - """Proposals Evaluation Model for Boundary Sensetive Network. +class PEM(BaseTAPGenerator): + """Proposals Evaluation Model for Boundary Sensitive Network. Please refer `BSN: Boundary Sensitive Network for Temporal Action Proposal Generation `_. @@ -268,7 +268,7 @@ def __init__(self, fc1_ratio=0.1, fc2_ratio=0.1, output_dim=1): - super(BaseLocalizer, self).__init__() + super().__init__() self.feat_dim = pem_feat_dim self.hidden_dim = pem_hidden_dim diff --git a/mmaction/models/localizers/ssn.py b/mmaction/models/localizers/ssn.py index 1284f694dd..32c0dedbcc 100644 --- a/mmaction/models/localizers/ssn.py +++ b/mmaction/models/localizers/ssn.py @@ -3,11 +3,11 @@ from .. import builder from ..builder import LOCALIZERS -from .base import BaseLocalizer +from .base import BaseTAGClassifier @LOCALIZERS.register_module() -class SSN(BaseLocalizer): +class SSN(BaseTAGClassifier): """Temporal Action Detection with Structured Segment Networks. Args: From 570babe7c44d7bd20e40433f00948b29a63e5cce Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 1 Jul 2021 15:35:18 +0800 Subject: [PATCH 175/414] [ModelZoo] Port CSN checkpoint from VMZ (#945) * add csn * add config files * add csn inference * fix * Update README.md * update metafile * update metafile flops * Update metafile.yml Co-authored-by: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> --- ...{csn_ig65m_pretrained.py => ircsn_r152.py} | 3 +- configs/recognition/csn/README.md | 9 +- ...frozen_r152_32x2x1_180e_kinetics400_rgb.py | 92 ++++++++++++++ ...nfrozen_r152_32x2x1_58e_kinetics400_rgb.py | 15 +++ ...nfrozen_r152_32x2x1_58e_kinetics400_rgb.py | 15 +++ ...frozen_r152_32x2x1_180e_kinetics400_rgb.py | 92 ++++++++++++++ ...nfrozen_r50_32x2x1_180e_kinetics400_rgb.py | 96 ++++++++++++++ ...nfrozen_r152_32x2x1_58e_kinetics400_rgb.py | 31 +++-- ...bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py | 102 +++++++++++++++ ...trained_r152_32x2x1_58e_kinetics400_rgb.py | 31 +++-- ...nfrozen_r152_32x2x1_58e_kinetics400_rgb.py | 15 +++ configs/recognition/csn/metafile.yml | 119 ++++++++++++++++++ mmaction/models/backbones/resnet3d_csn.py | 10 +- 13 files changed, 593 insertions(+), 37 deletions(-) rename configs/_base_/models/{csn_ig65m_pretrained.py => ircsn_r152.py} (76%) create mode 100644 configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py create mode 100644 configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py create mode 100644 configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py create mode 100644 configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py create mode 100644 configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py create mode 100644 configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py create mode 100644 configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py diff --git a/configs/_base_/models/csn_ig65m_pretrained.py b/configs/_base_/models/ircsn_r152.py similarity index 76% rename from configs/_base_/models/csn_ig65m_pretrained.py rename to configs/_base_/models/ircsn_r152.py index 2e827026d5..fcab416cbd 100644 --- a/configs/_base_/models/csn_ig65m_pretrained.py +++ b/configs/_base_/models/ircsn_r152.py @@ -4,8 +4,7 @@ backbone=dict( type='ResNet3dCSN', pretrained2d=False, - pretrained= # noqa: E251 - 'https://download.openmmlab.com/mmaction/recognition/csn/ircsn_from_scratch_r152_ig65m_20200807-771c4135.pth', # noqa: E501 + pretrained=None, depth=152, with_pool2=False, bottleneck_mode='ir', diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index 2c8322fcc1..a347de5f4b 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -33,8 +33,14 @@ doi = {10.1109/ICCV.2019.00565} |config | resolution | gpus | backbone |pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet50 | IG65M | 79.0 | 94.2 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth) | x | x | +|[ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | None | 76.5 | 92.1 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth) | x | x | +|[ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | Sports1M | 78.2 | 93.0 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth) | x | x | +|[ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|8x4| ResNet152 | IG65M|82.76/82.6|95.68/95.3|x|8516|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth)/[infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-e63ee1bd.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json)| +|[ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | None | 77.8 | 92.8 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-d565828d.pth) | x | x | +|[ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | Sports1M | 78.8 | 93.5 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth) | x | x | +|[ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | IG65M | 82.5 | 95.3 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth) | x | x | |[ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|8x4| ResNet152 | IG65M|80.14|94.93|x|8517|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json)| -|[ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|8x4| ResNet152 | IG65M|82.76|95.68|x|8516|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json)| Notes: @@ -44,6 +50,7 @@ Notes: 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. 3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +4. The **infer_ckpt** means those checkpoints are ported from [VMZ](https://github.com/facebookresearch/VMZ). For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). diff --git a/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py new file mode 100644 index 0000000000..8d352419d8 --- /dev/null +++ b/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py @@ -0,0 +1,92 @@ +_base_ = [ + './ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py' +] + +# model settings +model = dict( + backbone=dict( + norm_eval=True, bn_frozen=True, bottleneck_mode='ip', pretrained=None)) + +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=1, + test_mode=True), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=10, + test_mode=True), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=4, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=test_pipeline)) + +optimizer = dict( + type='SGD', lr=0.08, momentum=0.9, + weight_decay=0.0001) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='CosineAnnealing', + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=40) +total_epochs = 180 + +work_dir = './work_dirs/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb' # noqa: E501 diff --git a/configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py new file mode 100644 index 0000000000..7aed801a62 --- /dev/null +++ b/configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py @@ -0,0 +1,15 @@ +_base_ = [ + './ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py' +] + +# model settings +model = dict( + backbone=dict( + norm_eval=True, + bn_frozen=True, + bottleneck_mode='ip', + pretrained= # noqa: E251 + 'https://download.openmmlab.com/mmaction/recognition/csn/ipcsn_from_scratch_r152_ig65m_20210617-c4b99d38.pth' # noqa: E501 + )) + +work_dir = './work_dirs/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb' # noqa: E501 diff --git a/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py new file mode 100644 index 0000000000..0cc11366ba --- /dev/null +++ b/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py @@ -0,0 +1,15 @@ +_base_ = [ + './ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py' +] + +# model settings +model = dict( + backbone=dict( + norm_eval=True, + bn_frozen=True, + bottleneck_mode='ip', + pretrained= # noqa: E251 + 'https://download.openmmlab.com/mmaction/recognition/csn/ipcsn_from_scratch_r152_sports1m_20210617-7a7cc5b9.pth' # noqa: E501 + )) + +work_dir = './work_dirs/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb' # noqa: E501 diff --git a/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py new file mode 100644 index 0000000000..ecc41f1451 --- /dev/null +++ b/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py @@ -0,0 +1,92 @@ +_base_ = [ + './ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py' +] + +# model settings +model = dict( + backbone=dict( + norm_eval=True, bn_frozen=True, bottleneck_mode='ir', pretrained=None)) + +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=1, + test_mode=True), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=10, + test_mode=True), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=4, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=test_pipeline)) + +optimizer = dict( + type='SGD', lr=0.08, momentum=0.9, + weight_decay=0.0001) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='CosineAnnealing', + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=40) +total_epochs = 180 + +work_dir = './work_dirs/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb' # noqa: E501 diff --git a/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py new file mode 100644 index 0000000000..7e3bab7f59 --- /dev/null +++ b/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py @@ -0,0 +1,96 @@ +_base_ = [ + './ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py' +] + +# model settings +model = dict( + backbone=dict( + depth=50, + norm_eval=True, + bn_frozen=True, + bottleneck_mode='ir', + pretrained=None)) + +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=1, + test_mode=True), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=10, + test_mode=True), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=4, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=test_pipeline)) + +optimizer = dict( + type='SGD', lr=0.08, momentum=0.9, + weight_decay=0.0001) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='CosineAnnealing', + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=40) +total_epochs = 180 + +work_dir = './work_dirs/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb' # noqa: E501 diff --git a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py index a995cb5ca6..db97c917f5 100644 --- a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py @@ -1,7 +1,15 @@ -_base_ = ['../../_base_/models/csn_ig65m_pretrained.py'] +_base_ = [ + '../../_base_/models/ircsn_r152.py', '../../_base_/default_runtime.py' +] # model settings -model = dict(backbone=dict(norm_eval=True, bn_frozen=True)) +model = dict( + backbone=dict( + norm_eval=True, + bn_frozen=True, + pretrained= # noqa: E251 + 'https://download.openmmlab.com/mmaction/recognition/csn/ircsn_from_scratch_r152_ig65m_20200807-771c4135.pth' # noqa: E501 + )) # dataset settings dataset_type = 'RawframeDataset' data_root = 'data/kinetics400/rawframes_train' @@ -33,7 +41,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -49,7 +56,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -73,6 +79,9 @@ ann_file=ann_file_val, data_prefix=data_root_val, pipeline=test_pipeline)) +evaluation = dict( + interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy']) + # optimizer optimizer = dict( type='SGD', lr=0.000125, momentum=0.9, @@ -87,18 +96,6 @@ warmup_by_epoch=True, warmup_iters=16) total_epochs = 58 -checkpoint_config = dict(interval=2) -evaluation = dict( - interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy']) -log_config = dict( - interval=20, - hooks=[dict(type='TextLoggerHook'), - dict(type='TensorboardLoggerHook')]) -# runtime settings -dist_params = dict(backend='nccl') -log_level = 'INFO' + work_dir = './work_dirs/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb' # noqa: E501 -load_from = None -resume_from = None -workflow = [('train', 1)] find_unused_parameters = True diff --git a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py new file mode 100644 index 0000000000..d6110a4a83 --- /dev/null +++ b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py @@ -0,0 +1,102 @@ +_base_ = [ + '../../_base_/models/ircsn_r152.py', '../../_base_/default_runtime.py' +] + +# model settings +model = dict( + backbone=dict( + depth=50, + norm_eval=True, + bn_frozen=True, + pretrained= # noqa: E251 + 'https://download.openmmlab.com/mmaction/recognition/csn/ircsn_from_scratch_r50_ig65m_20210617-ce545a37.pth' # noqa: E501 + )) +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=1, + test_mode=True), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=10, + test_mode=True), + dict(type='FrameSelector'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=3, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + type='SGD', lr=0.000125, momentum=0.9, + weight_decay=0.0001) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + step=[32, 48], + warmup='linear', + warmup_ratio=0.1, + warmup_by_epoch=True, + warmup_iters=16) +total_epochs = 58 + +work_dir = './work_dirs/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb' # noqa: E501 +find_unused_parameters = True diff --git a/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py index fe41038755..67b371233f 100644 --- a/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py @@ -1,4 +1,13 @@ -_base_ = ['../../_base_/models/csn_ig65m_pretrained.py'] +_base_ = [ + '../../_base_/models/ircsn_r152.py', '../../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + pretrained= # noqa: E251 + 'https://download.openmmlab.com/mmaction/recognition/csn/ircsn_from_scratch_r152_ig65m_20200807-771c4135.pth' # noqa: E501 + )) + # dataset settings dataset_type = 'RawframeDataset' data_root = 'data/kinetics400/rawframes_train' @@ -30,7 +39,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -46,7 +54,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -70,6 +77,9 @@ ann_file=ann_file_val, data_prefix=data_root_val, pipeline=test_pipeline)) +evaluation = dict( + interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy']) + # optimizer optimizer = dict( type='SGD', lr=0.000125, momentum=0.9, @@ -84,17 +94,6 @@ warmup_by_epoch=True, warmup_iters=16) total_epochs = 58 -checkpoint_config = dict(interval=2) -evaluation = dict( - interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy']) -log_config = dict( - interval=20, - hooks=[dict(type='TextLoggerHook'), - dict(type='TensorboardLoggerHook')]) -# runtime settings -dist_params = dict(backend='nccl') -log_level = 'INFO' + work_dir = './work_dirs/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb' -load_from = None -resume_from = None -workflow = [('train', 1)] +find_unused_parameters = True diff --git a/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py new file mode 100644 index 0000000000..d0803f68ab --- /dev/null +++ b/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py @@ -0,0 +1,15 @@ +_base_ = [ + './ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py' +] + +# model settings +model = dict( + backbone=dict( + norm_eval=True, + bn_frozen=True, + bottleneck_mode='ir', + pretrained= # noqa: E251 + 'https://download.openmmlab.com/mmaction/recognition/csn/ircsn_from_scratch_r152_sports1m_20210617-bcc9c0dd.pth' # noqa: E501 + )) + +work_dir = './work_dirs/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb' # noqa: E501 diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml index 9dd6136cc2..31dd84f33f 100644 --- a/configs/recognition/csn/metafile.yml +++ b/configs/recognition/csn/metafile.yml @@ -46,3 +46,122 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log Weights: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth +- Config: configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet152 + Epochs: 180 + FLOPs: 110337228800 + Parameters: 33016592 + Pretrained: None + Resolution: short-side 320 + Training Data: Kinetics-400 + Modality: RGB + Name: ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 77.8 + top5 acc: 92.8 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-d565828d.pth +- Config: configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet152 + Epochs: 58 + FLOPs: 110337228800 + Parameters: 33016592 + Pretrained: IG65M + Resolution: short-side 320 + Training Data: Kinetics-400 + Modality: RGB + Name: ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 82.5 + top5 acc: 95.3 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth + inference_time(video/s): x +- Config: configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet152 + Epochs: 58 + FLOPs: 110337228800 + Parameters: 33016592 + Pretrained: Sports1M + Resolution: short-side 320 + Training Data: Kinetics-400 + Modality: RGB + Name: ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 78.8 + top5 acc: 93.5 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth + inference_time(video/s): x +- Config: configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet152 + Epochs: 180 + FLOPs: 98096676864 + Parameters: 29703568 + Pretrained: None + Resolution: short-side 320 + Training Data: Kinetics-400 + Modality: RGB + Name: ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 76.5 + top5 acc: 92.1 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth + inference_time(video/s): x +- Config: configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 58 + FLOPs: 56209211392 + Parameters: 13131152 + Pretrained: IG65M + Resolution: short-side 320 + Training Data: Kinetics-400 + Modality: RGB + Name: ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 79.0 + top5 acc: 94.2 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth + inference_time(video/s): x +- Config: configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet152 + Epochs: 58 + FLOPs: 98096676864 + Parameters: 29703568 + Pretrained: Sports1M + Resolution: short-side 320 + Training Data: Kinetics-400 + Modality: RGB + Name: ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Results: + - Dataset: Kinetics-400 + Metrics: + top1 acc: 78.2 + top5 acc: 93.0 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth + inference_time(video/s): x diff --git a/mmaction/models/backbones/resnet3d_csn.py b/mmaction/models/backbones/resnet3d_csn.py index 5d041d5450..aa190a2888 100644 --- a/mmaction/models/backbones/resnet3d_csn.py +++ b/mmaction/models/backbones/resnet3d_csn.py @@ -43,7 +43,15 @@ def __init__(self, conv2 = [] if self.bottleneck_mode == 'ip': conv2.append( - nn.Conv3d(planes, planes, kernel_size=1, stride=1, bias=False)) + ConvModule( + planes, + planes, + 1, + stride=1, + bias=False, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=None)) conv2_kernel_size = self.conv2.conv.kernel_size conv2_stride = self.conv2.conv.stride conv2_padding = self.conv2.conv.padding From 647ba48b45ea3a4a29f5305666c0077151222ea8 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 1 Jul 2021 15:35:54 +0800 Subject: [PATCH 176/414] [ModeZoo] Update Modelzoo (#938) * update configs * update * fix * update links * update metafile * update metafile * update metafile flops * update metafile --- configs/recognition/slowfast/README.md | 6 + configs/recognition/slowfast/metafile.yml | 23 ++++ .../slowfast_r50_16x8x1_22e_sthv1_rgb.py | 107 +++++++++++++++++ configs/recognition/slowonly/README.md | 24 +++- configs/recognition/slowonly/metafile.yml | 110 ++++++++++++++++++ ...net_pretrained_r50_8x4x1_64e_hmdb51_rgb.py | 93 +++++++++++++++ ...enet_pretrained_r50_8x4x1_64e_sthv1_rgb.py | 100 ++++++++++++++++ ...enet_pretrained_r50_8x4x1_64e_sthv2_rgb.py | 97 +++++++++++++++ ...net_pretrained_r50_8x4x1_64e_ucf101_rgb.py | 93 +++++++++++++++ ..._pretrained_r50_4x16x1_120e_gym99_flow.py} | 0 ...400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py | 81 +++++++++++++ ...400_pretrained_r50_8x4x1_40e_ucf101_rgb.py | 97 +++++++++++++++ configs/recognition/tanet/README.md | 9 +- configs/recognition/tanet/metafile.yml | 50 ++++++++ .../tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py | 100 ++++++++++++++++ .../tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py | 98 ++++++++++++++++ configs/recognition/tsm/README.md | 16 ++- configs/recognition/tsm/metafile.yml | 92 +++++++++++++++ ...00_pretrained_r50_1x1x16_25e_hmdb51_rgb.py | 100 ++++++++++++++++ ...00_pretrained_r50_1x1x16_25e_ucf101_rgb.py | 100 ++++++++++++++++ ...400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py | 100 ++++++++++++++++ ...400_pretrained_r50_1x1x8_25e_ucf101_rgb.py | 100 ++++++++++++++++ .../tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py | 8 +- ...sn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py | 8 +- .../tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py | 8 +- mmaction/models/backbones/resnet3d.py | 5 +- mmaction/models/common/tam.py | 13 --- 27 files changed, 1611 insertions(+), 27 deletions(-) create mode 100644 configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py create mode 100644 configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py create mode 100644 configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py create mode 100644 configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv2_rgb.py create mode 100644 configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py rename configs/recognition/slowonly/{slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py => slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py} (100%) create mode 100644 configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py create mode 100644 configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py create mode 100644 configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py create mode 100644 configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py create mode 100644 configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py create mode 100644 configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py create mode 100644 configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py create mode 100644 configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 8091b57598..126d68ed44 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -29,6 +29,12 @@ |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| |[slowfast_r152_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x1| ResNet152 + ResNet50 |None|77.13|93.20||10077| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json)| +### Something-Something V1 + +|config | resolution | gpus | backbone |pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[slowfast_r50_16x8x1_22e_sthv1_rgb](/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py)|height 100|8|ResNet50|Kinetics400|49.24|78.79|x|9293|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20210630-53355c16.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log.json)| + Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index 5821af9ce2..475e246294 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -178,3 +178,26 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth +- Config: configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py + In Collection: SlowFast + Metadata: + Architecture: ResNet50 + Epochs: 22 + FLOPs: 132442627584 + Parameters: 34044630 + Batch Size: 4 + Training Data: SthV1 + Training Resources: 8 GPUs + Pretrained: Kinetics400 + Resolution: height 100 + Modality: RGB + Name: slowfast_r50_16x8x1_22e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc: 49.24 + top5 acc: 78.79 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20210630-53355c16.pth diff --git a/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py b/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py new file mode 100644 index 0000000000..d97cc8f613 --- /dev/null +++ b/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py @@ -0,0 +1,107 @@ +_base_ = [ + '../../_base_/models/slowfast_r50.py', '../../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + resample_rate=4, # tau + speed_ratio=4, # alpha + channel_ratio=8, # beta_inv + slow_pathway=dict(fusion_kernel=7)), + cls_head=dict(num_classes=174)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/sthv1/rawframes' +data_root_val = 'data/sthv1/rawframes' +ann_file_train = 'data/sthv1/sthv1_train_list_rawframes.txt' +ann_file_val = 'data/sthv1/sthv1_val_list_rawframes.txt' +ann_file_test = 'data/sthv1/sthv1_val_list_rawframes.txt' + +sthv1_flip_label_map = {2: 4, 4: 2, 30: 41, 41: 30, 52: 66, 66: 52} +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=64, frame_interval=2, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, flip_label_map=sthv1_flip_label_map), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=64, + frame_interval=2, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=64, + frame_interval=2, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] + +data = dict( + videos_per_gpu=4, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) + +evaluation = dict( + interval=1, metrics=['top_k_accuracy'], start=18, gpu_collect=True) + +# optimizer +optimizer = dict( + type='SGD', lr=0.06, momentum=0.9, + weight_decay=0.000001) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + step=[14, 18], + warmup='linear', + warmup_by_epoch=False, + warmup_iters=16343 // 32) +total_epochs = 22 + +# runtime settings +checkpoint_config = dict(interval=1) +work_dir = './work_dirs/slowfast_r50_16x8x1_22e_sthv1_rgb' +load_from = 'https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth' # noqa: E501 +find_unused_parameters = False diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index e3aabd615c..3852a04a46 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -68,7 +68,7 @@ In data benchmark, we compare two different data preprocessing methods: (1) Resi | config | resolution | gpus | backbone | pretrain | top1 acc | mean class acc | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py) | short-side 256 | 8x2 | ResNet50 | ImageNet | 79.3 | 70.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111-a9c34b54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json) | -| [slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py) | short-side 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | +| [slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py) | short-side 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | | 1: 1 Fusion | | | | | 83.7 | 74.8 | | | | ### Jester @@ -77,6 +77,26 @@ In data benchmark, we compare two different data preprocessing methods: (1) Resi | :----------------------------------------------------------- | :--------: | :--: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb-b56a5389.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.json) | +### HMDB51 + +|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py)|8|ResNet50|ImageNet|37.52|71.50|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb_20210630-16faeb6a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log.json)| +|[slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py)|8|ResNet50|Kinetics400|65.95|91.05|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb_20210630-cee5f725.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log.json)| + +### UCF101 + +|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py)|8|ResNet50|ImageNet|71.35|89.35|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb_20210630-181e1661.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log.json)| +|[slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py)|8|ResNet50|Kinetics400|92.78|99.42|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb_20210630-ee8c850f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json)| + +### Something-Something V1 + +|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb_20210630-ee8c850f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json)| + Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. @@ -85,7 +105,7 @@ Notes: 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. 3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/slowonly/metafile.yml b/configs/recognition/slowonly/metafile.yml index da79055056..80a66339ef 100644 --- a/configs/recognition/slowonly/metafile.yml +++ b/configs/recognition/slowonly/metafile.yml @@ -410,3 +410,113 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb-b56a5389.pth +- Config: configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 64 + FLOPs: 54859765760 + Parameters: 31738995 + Batch Size: 8 + Training Data: HMDB51 + Training Resources: 8 GPUs + Pretrained: ImageNet + Modality: RGB + Name: slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb + Results: + - Dataset: HMDB51 + Metrics: + top1 acc: 37.52 + top5 acc: 71.50 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb_20210630-16faeb6a.pth +- Config: configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 40 + FLOPs: 54859765760 + Parameters: 31738995 + Batch Size: 8 + Training Data: HMDB51 + Training Resources: 8 GPUs + Pretrained: Kinetics400 + Modality: RGB + Name: slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb + Results: + - Dataset: HMDB51 + Metrics: + top1 acc: 65.95 + top5 acc: 91.05 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb_20210630-cee5f725.pth +- Config: configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 64 + FLOPs: 54859868160 + Parameters: 31841445 + Batch Size: 8 + Training Data: UCF101 + Training Resources: 8 GPUs + Pretrained: ImageNet + Modality: RGB + Name: slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb + Results: + - Dataset: UCF101 + Metrics: + top1 acc: 71.35 + top5 acc: 89.35 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb_20210630-181e1661.pth +- Config: configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 40 + FLOPs: 54859868160 + Parameters: 31841445 + Batch Size: 8 + Training Data: UCF101 + Training Resources: 8 GPUs + Pretrained: Kinetics400 + Modality: RGB + Name: slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb + Results: + - Dataset: UCF101 + Metrics: + top1 acc: 92.78 + top5 acc: 99.42 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb_20210630-ee8c850f.pth +- Config: configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py + In Collection: SlowOnly + Metadata: + Architecture: ResNet50 + Epochs: 64 + FLOPs: 53907910656 + Parameters: 31991022 + Batch Size: 8 + Training Data: SthV1 + Training Resources: 8 GPUs + Pretrained: ImageNet + Modality: RGB + Name: slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc: 46.63 + top5 acc: 77.19 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20210630-807a9a9a.pth diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py new file mode 100644 index 0000000000..1a95cc0155 --- /dev/null +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py @@ -0,0 +1,93 @@ +_base_ = [ + '../../_base_/models/slowonly_r50.py', + '../../_base_/schedules/sgd_150e_warmup.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=51)) + +# dataset settings +split = 1 +dataset_type = 'RawframeDataset' +data_root = 'data/hmdb51/rawframes' +data_root_val = 'data/hmdb51/rawframes' +ann_file_train = f'data/hmdb51/hmdb51_train_split_{split}_rawframes.txt' +ann_file_val = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +ann_file_test = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=8, frame_interval=4, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] + +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=2), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(lr=0.1) # this lr is used for 8 gpus +# learning policy +lr_config = dict(policy='CosineAnnealing', min_lr=0, by_epoch=False) +total_epochs = 64 + +# runtime settings +work_dir = './work_dirs/slowonly_r50_8x4x1_64e_hmdb51_rgb' diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py new file mode 100644 index 0000000000..588c6b7803 --- /dev/null +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py @@ -0,0 +1,100 @@ +_base_ = [ + '../../_base_/models/slowonly_r50.py', + '../../_base_/schedules/sgd_150e_warmup.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(backbone=dict(with_pool1=False), cls_head=dict(num_classes=174)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/sthv1/rawframes' +data_root_val = 'data/sthv1/rawframes' +ann_file_train = 'data/sthv1/sthv1_train_list_rawframes.txt' +ann_file_val = 'data/sthv1/sthv1_val_list_rawframes.txt' +ann_file_test = 'data/sthv1/sthv1_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=8, frame_interval=4, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 128)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(112, 112), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 128)), + dict(type='CenterCrop', crop_size=112), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 128)), + dict(type='ThreeCrop', crop_size=128), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] + +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=2), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + filename_tmpl='{:05}.jpg', + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + filename_tmpl='{:05}.jpg', + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + filename_tmpl='{:05}.jpg', + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(lr=0.1) # this lr is used for 8 gpus +# learning policy +lr_config = dict( + policy='CosineAnnealing', + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=10) +total_epochs = 64 + +# runtime settings +work_dir = './work_dirs/slowonly_r50_8x4x1_64e_sthv1_rgb' diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv2_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv2_rgb.py new file mode 100644 index 0000000000..db92d92e67 --- /dev/null +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv2_rgb.py @@ -0,0 +1,97 @@ +_base_ = [ + '../../_base_/models/slowonly_r50.py', + '../../_base_/schedules/sgd_150e_warmup.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(backbone=dict(with_pool1=False), cls_head=dict(num_classes=174)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/sthv2/rawframes' +data_root_val = 'data/sthv2/rawframes' +ann_file_train = 'data/sthv2/sthv2_train_list_rawframes.txt' +ann_file_val = 'data/sthv2/sthv2_val_list_rawframes.txt' +ann_file_test = 'data/sthv2/sthv2_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=8, frame_interval=4, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 128)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(112, 112), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 128)), + dict(type='CenterCrop', crop_size=112), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 128)), + dict(type='ThreeCrop', crop_size=128), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] + +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=2), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(lr=0.1) # this lr is used for 8 gpus +# learning policy +lr_config = dict( + policy='CosineAnnealing', + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=10) +total_epochs = 64 + +# runtime settings +work_dir = './work_dirs/slowonly_r50_8x4x1_64e_sthv2_rgb' diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py new file mode 100644 index 0000000000..3dd5808b34 --- /dev/null +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py @@ -0,0 +1,93 @@ +_base_ = [ + '../../_base_/models/slowonly_r50.py', + '../../_base_/schedules/sgd_150e_warmup.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=101)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/ucf101/rawframes/' +data_root_val = 'data/ucf101/rawframes/' +split = 1 # official train/test splits. valid numbers: 1, 2, 3 +ann_file_train = f'data/ucf101/ucf101_train_split_{split}_rawframes.txt' +ann_file_val = f'data/ucf101/ucf101_val_split_{split}_rawframes.txt' +ann_file_test = f'data/ucf101/ucf101_val_split_{split}_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=8, frame_interval=4, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] + +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=2), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(lr=0.1) # this lr is used for 8 gpus +# learning policy +lr_config = dict(policy='CosineAnnealing', min_lr=0, by_epoch=False) +total_epochs = 64 + +# runtime settings +work_dir = './work_dirs/slowonly_r50_8x4x1_64e_ucf101_rgb' diff --git a/configs/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py similarity index 100% rename from configs/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py rename to configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py diff --git a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py new file mode 100644 index 0000000000..b59a759a9f --- /dev/null +++ b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py @@ -0,0 +1,81 @@ +_base_ = ['./slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py'] + +# model settings +model = dict(cls_head=dict(num_classes=51)) + +# dataset settings +split = 1 +dataset_type = 'RawframeDataset' +data_root = 'data/hmdb51/rawframes' +data_root_val = 'data/hmdb51/rawframes' +ann_file_train = f'data/hmdb51/hmdb51_train_split_{split}_rawframes.txt' +ann_file_val = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +ann_file_test = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=8, frame_interval=4, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] + +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=2), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) + +# runtime settings +work_dir = './work_dirs/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb' diff --git a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py new file mode 100644 index 0000000000..da2341030c --- /dev/null +++ b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py @@ -0,0 +1,97 @@ +_base_ = [ + '../../_base_/models/slowonly_r50.py', '../../_base_/schedules/sgd_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=101)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/ucf101/rawframes/' +data_root_val = 'data/ucf101/rawframes/' +split = 1 # official train/test splits. valid numbers: 1, 2, 3 +ann_file_train = f'data/ucf101/ucf101_train_split_{split}_rawframes.txt' +ann_file_val = f'data/ucf101/ucf101_val_split_{split}_rawframes.txt' +ann_file_test = f'data/ucf101/ucf101_val_split_{split}_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=8, frame_interval=4, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=4, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] + +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + test_dataloader=dict(videos_per_gpu=2), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + lr=0.001, # this lr is used for 8 gpus +) +optimizer_config = dict(grad_clip=dict(max_norm=20, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[15, 30]) +total_epochs = 40 + +# runtime settings +work_dir = './work_dirs/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb' +load_from = 'https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8x1_256e_kinetics400_rgb_20200703-a79c555a.pth' # noqa: E501 +find_unused_parameters = False diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 38a23d6785..90a9d2eab4 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -21,6 +21,13 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tanet_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 320|8| TANet | ImageNet |76.28 | 92.60 |[76.22](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh)|[92.53](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh) | x | 7124 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log)| [json](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json)| +### Something-Something V1 + +|config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|47.45/49.69|76.00/77.62|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json)| +|[tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|47.73/50.41|77.31/78.47|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20210630-7c19303c.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log.json)| + Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. @@ -30,7 +37,7 @@ Notes: 3. The values in columns named after "reference" are the results got by testing on our dataset, using the checkpoints provided by the author with same model settings. The checkpoints for reference repo can be downloaded [here](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing). 4. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/tanet/metafile.yml b/configs/recognition/tanet/metafile.yml index 6ce789d4ac..5bc7961afa 100644 --- a/configs/recognition/tanet/metafile.yml +++ b/configs/recognition/tanet/metafile.yml @@ -24,3 +24,53 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log Weights: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth +- Config: configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py + In Collection: TANet + Metadata: + Architecture: TANet + Epochs: 50 + FLOPs: 32972787840 + Parameters: 25127246 + Batch Size: 8 + Training Data: SthV1 + Training Resources: 8 GPUs + Pretrained: ImageNet + Resolution: height 100 + Modality: RGB + Name: tanet_r50_1x1x8_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 49.69 + top1 acc (efficient): 47.45 + top5 acc (accurate): 77.62 + top5 acc (efficient): 76.00 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log + Weights: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth +- Config: configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py + In Collection: TANet + Metadata: + Architecture: TANet + Epochs: 50 + FLOPs: 65946542336 + Parameters: 25134670 + Batch Size: 8 + Training Data: SthV1 + gpus: 4 + Pretrained: ImageNet + Resolution: height 100 + Modality: RGB + Name: tanet_r50_1x1x16_50e_sthv1_rgb + Results: + - Dataset: SthV1 + Metrics: + top1 acc (accurate): 50.41 + top1 acc (efficient): 47.73 + top5 acc (accurate): 78.47 + top5 acc (efficient): 77.31 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log + Weights: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20210630-7c19303c.pth diff --git a/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py b/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py new file mode 100644 index 0000000000..d6ff915721 --- /dev/null +++ b/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py @@ -0,0 +1,100 @@ +_base_ = [ + '../../_base_/models/tanet_r50.py', '../../_base_/default_runtime.py', + '../../_base_/schedules/sgd_tsm_50e.py' +] + +# model settings +model = dict( + backbone=dict(num_segments=16), + cls_head=dict(num_classes=174, num_segments=16, dropout_ratio=0.6)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/sthv1/rawframes' +data_root_val = 'data/sthv1/rawframes' +ann_file_train = 'data/sthv1/sthv1_train_list_rawframes.txt' +ann_file_val = 'data/sthv1/sthv1_val_list_rawframes.txt' +ann_file_test = 'data/sthv1/sthv1_val_list_rawframes.txt' + +sthv1_flip_label_map = {2: 4, 4: 2, 30: 41, 41: 30, 52: 66, 66: 52} +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=16), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, flip_label_map=sthv1_flip_label_map), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=4, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + filename_tmpl='{:05}.jpg', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(lr=0.005, weight_decay=0.001) +lr_config = dict(policy='step', step=[30, 40, 45]) + +# runtime settings +work_dir = './work_dirs/tanet_r50_1x1x16_50e_sthv1_rgb/' diff --git a/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py new file mode 100644 index 0000000000..987336e081 --- /dev/null +++ b/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py @@ -0,0 +1,98 @@ +_base_ = [ + '../../_base_/models/tanet_r50.py', '../../_base_/default_runtime.py', + '../../_base_/schedules/sgd_tsm_50e.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=174, dropout_ratio=0.6)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/sthv1/rawframes' +data_root_val = 'data/sthv1/rawframes' +ann_file_train = 'data/sthv1/sthv1_train_list_rawframes.txt' +ann_file_val = 'data/sthv1/sthv1_val_list_rawframes.txt' +ann_file_test = 'data/sthv1/sthv1_val_list_rawframes.txt' + +sthv1_flip_label_map = {2: 4, 4: 2, 30: 41, 41: 30, 52: 66, 66: 52} +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, flip_label_map=sthv1_flip_label_map), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + filename_tmpl='{:05}.jpg', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(weight_decay=0.001) +lr_config = dict(policy='step', step=[30, 40, 45]) + +# runtime settings +work_dir = './work_dirs/tanet_r50_1x1x8_50e_sthv1_rgb/' diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index af5d1eb183..78036d3c04 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -84,6 +84,20 @@ | ------------------------------------------------------------ | :--------: | :--: | :------: | :------: | :---------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [tsm_r50_1x1x8_50e_jester_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 96.5 / 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb-c799267e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json) | +### HMDB51 + +|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py)|8|ResNet50|Kinetics400|72.68|92.03|10388|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb_20210630-10c74ee5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log.json)| +|[tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py)|8|ResNet50|Kinetics400|74.77|93.86|10388|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb_20210630-4785548e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log.json)| + +### UCF101 + +|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py)|8|ResNet50|Kinetics400|94.50|99.58|10389|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb_20210630-1fae312b.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json)| +|[tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py)|8|ResNet50|Kinetics400|94.58|99.37|10389|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb_20210630-8df9c358.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json)| + Notes: 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. @@ -119,7 +133,7 @@ test_pipeline = [ 5. When applying Mixup and CutMix, we use the hyper parameter `alpha=0.2`. 6. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. -For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index 11bd167de2..059e2481b4 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -668,3 +668,95 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb-c799267e.pth +- Config: configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 25 + FLOPs: 32959844352 + Parameters: 23612531 + Batch Size: 12 + Training Data: HMDB51 + Training Resources: 8 GPUs + Pretrained: Kinetics400 + Modality: RGB + Name: tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb + Results: + - Dataset: HMDB51 + Metrics: + top1 acc: 72.68 + top5 acc: 92.03 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb_20210630-10c74ee5.pth + gpu_mem(M): '10388' +- Config: configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 25 + FLOPs: 65919688704 + Parameters: 23612531 + Batch Size: 6 + Training Data: HMDB51 + Training Resources: 8 GPUs + Pretrained: Kinetics400 + Modality: RGB + Name: tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb + Results: + - Dataset: HMDB51 + Metrics: + top1 acc: 74.77 + top5 acc: 93.86 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb_20210630-4785548e.pth + gpu_mem(M): '10388' +- Config: configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 25 + FLOPs: 32960663552 + Parameters: 23714981 + Batch Size: 12 + Training Data: UCF101 + Training Resources: 8 GPUs + Pretrained: Kinetics400 + Modality: RGB + Name: tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb + Results: + - Dataset: UCF101 + Metrics: + top1 acc: 94.50 + top5 acc: 99.58 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb_20210630-1fae312b.pth + gpu_mem(M): '10389' +- Config: configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Epochs: 25 + FLOPs: 65921327104 + Parameters: 23714981 + Batch Size: 6 + Training Data: UCF101 + Training Resources: 8 GPUs + Pretrained: Kinetics400 + Modality: RGB + Name: tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb + Results: + - Dataset: UCF101 + Metrics: + top1 acc: 94.58 + top5 acc: 99.37 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb_20210630-8df9c358.pth + gpu_mem(M): '10389' diff --git a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py new file mode 100644 index 0000000000..c73cc685ed --- /dev/null +++ b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py @@ -0,0 +1,100 @@ +_base_ = [ + '../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict( + backbone=dict(num_segments=16), + cls_head=dict(num_classes=51, num_segments=16)) + +# dataset settings +split = 1 +dataset_type = 'RawframeDataset' +data_root = 'data/hmdb51/rawframes' +data_root_val = 'data/hmdb51/rawframes' +ann_file_train = f'data/hmdb51/hmdb51_train_split_{split}_rawframes.txt' +ann_file_val = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +ann_file_test = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=16), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=6, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + lr=0.00075, # this lr is used for 8 gpus +) +# learning policy +lr_config = dict(policy='step', step=[10, 20]) +total_epochs = 25 + +load_from = 'https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth' # noqa: E501 +# runtime settings +work_dir = './work_dirs/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/' diff --git a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py new file mode 100644 index 0000000000..8fa456dd9d --- /dev/null +++ b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py @@ -0,0 +1,100 @@ +_base_ = [ + '../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict( + backbone=dict(num_segments=16), + cls_head=dict(num_classes=101, num_segments=16)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/ucf101/rawframes/' +data_root_val = 'data/ucf101/rawframes/' +split = 1 # official train/test splits. valid numbers: 1, 2, 3 +ann_file_train = f'data/ucf101/ucf101_train_split_{split}_rawframes.txt' +ann_file_val = f'data/ucf101/ucf101_val_split_{split}_rawframes.txt' +ann_file_test = f'data/ucf101/ucf101_val_split_{split}_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=16), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=6, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + lr=0.00075, # this lr is used for 8 gpus +) +# learning policy +lr_config = dict(policy='step', step=[10, 20]) +total_epochs = 25 + +load_from = 'https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth' # noqa: E501 +# runtime settings +work_dir = './work_dirs/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/' diff --git a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py new file mode 100644 index 0000000000..bdc430804b --- /dev/null +++ b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py @@ -0,0 +1,100 @@ +_base_ = [ + '../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict( + backbone=dict(num_segments=8), + cls_head=dict(num_classes=51, num_segments=8)) + +# dataset settings +split = 1 +dataset_type = 'RawframeDataset' +data_root = 'data/hmdb51/rawframes' +data_root_val = 'data/hmdb51/rawframes' +ann_file_train = f'data/hmdb51/hmdb51_train_split_{split}_rawframes.txt' +ann_file_val = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +ann_file_test = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=12, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + lr=0.0015, # this lr is used for 8 gpus +) +# learning policy +lr_config = dict(policy='step', step=[10, 20]) +total_epochs = 25 + +load_from = 'https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth' # noqa: E501 +# runtime settings +work_dir = './work_dirs/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/' diff --git a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py new file mode 100644 index 0000000000..5b6c07d478 --- /dev/null +++ b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py @@ -0,0 +1,100 @@ +_base_ = [ + '../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict( + backbone=dict(num_segments=8), + cls_head=dict(num_classes=101, num_segments=8)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/ucf101/rawframes/' +data_root_val = 'data/ucf101/rawframes/' +split = 1 # official train/test splits. valid numbers: 1, 2, 3 +ann_file_train = f'data/ucf101/ucf101_train_split_{split}_rawframes.txt' +ann_file_val = f'data/ucf101/ucf101_val_split_{split}_rawframes.txt' +ann_file_test = f'data/ucf101/ucf101_val_split_{split}_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=12, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + lr=0.0015, # this lr is used for 8 gpus +) +# learning policy +lr_config = dict(policy='step', step=[10, 20]) +total_epochs = 25 + +load_from = 'https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth' # noqa: E501 +# runtime settings +work_dir = './work_dirs/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/' diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py index f03a6ef6fb..3bf0c10e08 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py @@ -7,12 +7,14 @@ model = dict(cls_head=dict(num_classes=51)) # dataset settings +split = 1 dataset_type = 'RawframeDataset' data_root = 'data/hmdb51/rawframes' data_root_val = 'data/hmdb51/rawframes' -ann_file_train = 'data/hmdb51/hmdb51_train_split_1_rawframes.txt' -ann_file_val = 'data/hmdb51/hmdb51_val_split_1_rawframes.txt' -ann_file_test = 'data/hmdb51/hmdb51_val_split_1_rawframes.txt' +ann_file_train = f'data/hmdb51/hmdb51_train_split_{split}_rawframes.txt' +ann_file_val = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +ann_file_test = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' + img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py index 9c4ddd354e..b23a39e8f9 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py @@ -7,12 +7,14 @@ model = dict(cls_head=dict(num_classes=51)) # dataset settings +split = 1 dataset_type = 'RawframeDataset' data_root = 'data/hmdb51/rawframes' data_root_val = 'data/hmdb51/rawframes' -ann_file_train = 'data/hmdb51/hmdb51_train_split_1_rawframes.txt' -ann_file_val = 'data/hmdb51/hmdb51_val_split_1_rawframes.txt' -ann_file_test = 'data/hmdb51/hmdb51_val_split_1_rawframes.txt' +ann_file_train = f'data/hmdb51/hmdb51_train_split_{split}_rawframes.txt' +ann_file_val = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +ann_file_test = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' + img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py index cacb588bdb..c01a744ad3 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py @@ -7,12 +7,14 @@ model = dict(cls_head=dict(num_classes=51)) # dataset settings +split = 1 dataset_type = 'RawframeDataset' data_root = 'data/hmdb51/rawframes' data_root_val = 'data/hmdb51/rawframes' -ann_file_train = 'data/hmdb51/hmdb51_train_split_1_rawframes.txt' -ann_file_val = 'data/hmdb51/hmdb51_val_split_1_rawframes.txt' -ann_file_test = 'data/hmdb51/hmdb51_val_split_1_rawframes.txt' +ann_file_train = f'data/hmdb51/hmdb51_train_split_{split}_rawframes.txt' +ann_file_val = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' +ann_file_test = f'data/hmdb51/hmdb51_val_split_{split}_rawframes.txt' + img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index 79f98e0f7a..75febec50c 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -410,6 +410,7 @@ def __init__(self, conv1_stride_t=1, pool1_stride_s=2, pool1_stride_t=1, + with_pool1=True, with_pool2=True, style='pytorch', frozen_stages=-1, @@ -450,6 +451,7 @@ def __init__(self, self.conv1_stride_t = conv1_stride_t self.pool1_stride_s = pool1_stride_s self.pool1_stride_t = pool1_stride_t + self.with_pool1 = with_pool1 self.with_pool2 = with_pool2 self.style = style self.frozen_stages = frozen_stages @@ -836,7 +838,8 @@ def forward(self, x): samples extracted by the backbone. """ x = self.conv1(x) - x = self.maxpool(x) + if self.with_pool1: + x = self.maxpool(x) outs = [] for i, layer_name in enumerate(self.res_layers): res_layer = getattr(self, layer_name) diff --git a/mmaction/models/common/tam.py b/mmaction/models/common/tam.py index 301c69faea..db15bd8049 100644 --- a/mmaction/models/common/tam.py +++ b/mmaction/models/common/tam.py @@ -1,6 +1,5 @@ import torch.nn as nn import torch.nn.functional as F -from mmcv.cnn import constant_init, kaiming_init, normal_init class TAM(nn.Module): @@ -72,18 +71,6 @@ def __init__(self, nn.Conv1d(in_channels // beta, in_channels, 1, bias=False), nn.Sigmoid()) - self.init_weights() - - def init_weights(self): - """Initiate the parameters from scratch.""" - for m in self.modules(): - if isinstance(m, nn.Conv1d): - kaiming_init(m) - elif isinstance(m, nn.BatchNorm1d): - constant_init(m, 1) - elif isinstance(m, nn.Linear): - normal_init(m, std=self.init_std) - def forward(self, x): """Defines the computation performed at every call. From b3abdddf10058cb8911e12bbe44ee42d4acd5866 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 1 Jul 2021 15:38:12 +0800 Subject: [PATCH 177/414] [Improvement] Support Torchvision transformations (#972) * update * Update compose.py * add unittest * fix bug * update --- mmaction/datasets/pipelines/__init__.py | 16 ++--- mmaction/datasets/pipelines/augmentations.py | 68 +++++++++++--------- mmaction/datasets/pipelines/compose.py | 7 +- tests/test_data/test_compose.py | 35 ++++++++++ 4 files changed, 87 insertions(+), 39 deletions(-) diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index f6f94d6cd5..85d6f0ff85 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -1,9 +1,8 @@ -from .augmentations import (AudioAmplify, CenterCrop, ColorJitter, - EntityBoxCrop, EntityBoxFlip, EntityBoxRescale, - Flip, Fuse, Imgaug, MelSpectrogram, MultiGroupCrop, +from .augmentations import (AudioAmplify, CenterCrop, ColorJitter, Flip, Fuse, + Imgaug, MelSpectrogram, MultiGroupCrop, MultiScaleCrop, Normalize, RandomCrop, RandomRescale, RandomResizedCrop, RandomScale, - Resize, TenCrop, ThreeCrop) + Resize, TenCrop, ThreeCrop, TorchvisionTrans) from .compose import Compose from .formating import (Collect, FormatAudioShape, FormatShape, ImageToTensor, Rename, ToDataContainer, ToTensor, Transpose) @@ -31,9 +30,8 @@ 'PyAVInit', 'SampleProposalFrames', 'ColorJitter', 'LoadHVULabel', 'SampleAVAFrames', 'AudioAmplify', 'MelSpectrogram', 'AudioDecode', 'FormatAudioShape', 'LoadAudioFeature', 'AudioFeatureSelector', - 'AudioDecodeInit', 'EntityBoxFlip', 'EntityBoxCrop', 'EntityBoxRescale', - 'RandomScale', 'ImageDecode', 'BuildPseudoClip', 'RandomRescale', - 'PyAVDecodeMotionVector', 'Rename', 'Imgaug', 'UniformSampleFrames', - 'PoseDecode', 'LoadKineticsPose', 'GeneratePoseTarget', 'PIMSInit', - 'PIMSDecode' + 'AudioDecodeInit', 'RandomScale', 'ImageDecode', 'BuildPseudoClip', + 'RandomRescale', 'PyAVDecodeMotionVector', 'Rename', 'Imgaug', + 'UniformSampleFrames', 'PoseDecode', 'LoadKineticsPose', + 'GeneratePoseTarget', 'PIMSInit', 'PIMSDecode', 'TorchvisionTrans' ] diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index bdeab950cf..64cef00f62 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -1,12 +1,14 @@ import random import warnings from collections.abc import Sequence +from distutils.version import LooseVersion import mmcv import numpy as np from torch.nn.modules.utils import _pair from ..builder import PIPELINES +from .formating import to_tensor def _combine_quadruple(a, b): @@ -51,6 +53,43 @@ def _init_lazy_if_proper(results, lazy): assert 'lazy' not in results, 'Use Fuse after lazy operations' +@PIPELINES.register_module() +class TorchvisionTrans: + """Torchvision Augmentations, under torchvision.transforms. + + Args: + type (str): The name of the torchvision transformation. + """ + + def __init__(self, type, **kwargs): + try: + import torchvision + import torchvision.transforms as tv_trans + except ImportError: + raise RuntimeError('Install torchvision to use TorchvisionTrans') + if LooseVersion(torchvision.__version__) < LooseVersion('0.8.0'): + raise RuntimeError('The version of torchvision should be at least ' + '0.8.0') + + trans = getattr(tv_trans, type, None) + assert trans, f'Transform {type} not in torchvision' + self.trans = trans(**kwargs) + + def __call__(self, results): + assert 'imgs' in results + + imgs = [x.transpose(2, 0, 1) for x in results['imgs']] + imgs = to_tensor(np.stack(imgs)) + + imgs = self.trans(imgs).data.numpy() + imgs[imgs > 255] = 255 + imgs[imgs < 0] = 0 + imgs = imgs.astype(np.uint8) + imgs = [x.transpose(1, 2, 0) for x in imgs] + results['imgs'] = imgs + return results + + @PIPELINES.register_module() class PoseCompact: """Convert the coordinates of keypoints to make it more compact. @@ -154,35 +193,6 @@ def __repr__(self): return repr_str -class EntityBoxRescale: - - def __init__(self, scale_factor): - raise NotImplementedError( - 'This component should not be used in the ' - 'data pipeline and is removed in PR #782. Details see ' - 'https://github.com/open-mmlab/mmaction2/pull/782') - - -@PIPELINES.register_module() -class EntityBoxCrop: - - def __init__(self, crop_bbox): - raise NotImplementedError( - 'This component should not be used in the ' - 'data pipeline and is removed in PR #782. Details see ' - 'https://github.com/open-mmlab/mmaction2/pull/782') - - -@PIPELINES.register_module() -class EntityBoxFlip: - - def __init__(self, img_shape): - raise NotImplementedError( - 'This component should not be used in the ' - 'data pipeline and is removed in PR #782. Details see ' - 'https://github.com/open-mmlab/mmaction2/pull/782') - - @PIPELINES.register_module() class Imgaug: """Imgaug augmentation. diff --git a/mmaction/datasets/pipelines/compose.py b/mmaction/datasets/pipelines/compose.py index c4d315aba5..2f0dd7b817 100644 --- a/mmaction/datasets/pipelines/compose.py +++ b/mmaction/datasets/pipelines/compose.py @@ -3,6 +3,7 @@ from mmcv.utils import build_from_cfg from ..builder import PIPELINES +from .augmentations import TorchvisionTrans @PIPELINES.register_module() @@ -19,7 +20,11 @@ def __init__(self, transforms): self.transforms = [] for transform in transforms: if isinstance(transform, dict): - transform = build_from_cfg(transform, PIPELINES) + if transform['type'].startswith('torchvision.'): + trans_type = transform.pop('type')[12:] + transform = TorchvisionTrans(trans_type, **transform) + else: + transform = build_from_cfg(transform, PIPELINES) self.transforms.append(transform) elif callable(transform): self.transforms.append(transform) diff --git a/tests/test_data/test_compose.py b/tests/test_data/test_compose.py index e32ba7e6f7..86379fe933 100644 --- a/tests/test_data/test_compose.py +++ b/tests/test_data/test_compose.py @@ -4,6 +4,15 @@ from mmaction.datasets.pipelines import Compose, ImageToTensor +try: + import torchvision + from distutils.version import LooseVersion + torchvision_ok = False + if LooseVersion(torchvision.__version__) >= LooseVersion('0.8.0'): + torchvision_ok = True +except (ImportError, ModuleNotFoundError): + torchvision_ok = False + def test_compose(): with pytest.raises(TypeError): @@ -35,3 +44,29 @@ def test_compose(): assert repr(compose) == compose.__class__.__name__ + \ f'(\n {image_to_tensor}\n)' + + +@pytest.mark.skipif( + not torchvision_ok, reason='torchvision >= 0.8.0 is required') +def test_compose_support_torchvision(): + target_keys = ['imgs', 'img_metas'] + + # test Compose given a data pipeline + imgs = [np.random.randn(256, 256, 3)] * 8 + results = dict( + imgs=imgs, + abandoned_key=None, + img_name='test_image.png', + clip_len=8, + num_clips=1) + test_pipeline = [ + dict(type='torchvision.Grayscale', num_output_channels=3), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs'], meta_keys=['img_name']), + dict(type='ToTensor', keys=['imgs']) + ] + compose = Compose(test_pipeline) + compose_results = compose(results) + assert assert_keys_equal(compose_results.keys(), target_keys) + assert assert_keys_equal(compose_results['img_metas'].data.keys(), + ['img_name']) From 1654ad55684b0de901254066b86b62e3af09346a Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 1 Jul 2021 15:43:15 +0800 Subject: [PATCH 178/414] [Feature] Posec3d demo (#976) * support posec3d_demo * add label map * fix bug * update readme * update readme * update README * update demo * update --- README.md | 4 + README_zh-CN.md | 4 + demo/README.md | 50 ++++++ demo/demo_posec3d.py | 273 +++++++++++++++++++++++++++++++++ demo/hrnet_w32_coco_256x192.py | 172 +++++++++++++++++++++ demo/label_map_ntu120.txt | 120 +++++++++++++++ demo/ntu_sample.avi | Bin 0 -> 1119546 bytes docs_zh_CN/demo.md | 67 ++++++++ 8 files changed, 690 insertions(+) create mode 100644 demo/demo_posec3d.py create mode 100644 demo/hrnet_w32_coco_256x192.py create mode 100644 demo/label_map_ntu120.txt create mode 100644 demo/ntu_sample.avi diff --git a/README.md b/README.md index 754622889a..789866d251 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,10 @@ The master branch works with **PyTorch 1.3+**.
Spatio-Temporal Action Detection Results on AVA-2.1 +
+
+ Skeleton-base Action Recognition Results on NTU-RGB+D-120 +
### Major Features diff --git a/README_zh-CN.md b/README_zh-CN.md index 37aa4608e3..1ed6d4f762 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -26,6 +26,10 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa
AVA-2.1 上的时空动作检测 +
+
+ NTURGB+D-120 上的基于人体姿态的动作识别 +
## 主要特性 diff --git a/demo/README.md b/demo/README.md index dca7a20f37..66bb7a5051 100644 --- a/demo/README.md +++ b/demo/README.md @@ -9,6 +9,7 @@ - [Webcam demo](#webcam-demo): A demo script to implement real-time action recognition from a web camera. - [Long Video demo](#long-video-demo): a demo script to predict different labels using a single long video. - [SpatioTempoval Action Detection Webcam Demo](#spatiotemporal-action-detection-webcam-demo): A demo script to implement real-time spatio-temporval action detection from a web camera. +- [Skeleton-based Action Recognition Demo](#skeleton-based-action-recognition-demo): A demo script to predict the skeleton-based action recognition result using a single video. ## Modify configs through script arguments @@ -438,3 +439,52 @@ python demo/webcam_demo_spatiotemporal_det.py \ --output-fps 20 \ --show ``` + +## Skeleton-based Action Recognition Demo + +We provide a demo script to predict the skeleton-based action recognition result using a single video. + +```shell +python demo/demo_posec3d.py ${VIDEO_FILE} ${OUT_FILENAME} \ + [--config ${SKELETON_BASED_ACTION_RECOGNITION_CONFIG_FILE}] \ + [--checkpoint ${SKELETON_BASED_ACTION_RECOGNITION_CHECKPOINT}] \ + [--det-config ${HUMAN_DETECTION_CONFIG_FILE}] \ + [--det-checkpoint ${HUMAN_DETECTION_CHECKPOINT}] \ + [--det-score-thr ${HUMAN_DETECTION_SCORE_THRESHOLD}] \ + [--pose-config ${HUMAN_POSE_ESTIMATION_CONFIG_FILE}] \ + [--pose-checkpoint ${HUMAN_POSE_ESTIMATION_CHECKPOINT}] \ + [--label-map ${LABEL_MAP}] \ + [--device ${DEVICE}] \ + [--short-side] ${SHORT_SIDE} +``` + +Optional arguments: + +- `SKELETON_BASED_ACTION_RECOGNITION_CONFIG_FILE`: The skeleton-based action recognition config file path. +- `SKELETON_BASED_ACTION_RECOGNITION_CHECKPOINT`: The skeleton-based action recognition checkpoint path or URL. +- `HUMAN_DETECTION_CONFIG_FILE`: The human detection config file path. +- `HUMAN_DETECTION_CHECKPOINT`: The human detection checkpoint URL. +- `HUMAN_DETECTION_SCORE_THRE`: The score threshold for human detection. Default: 0.9. +- `HUMAN_POSE_ESTIMATION_CONFIG_FILE`: The human pose estimation config file path (trained on COCO-Keypoint). +- `HUMAN_POSE_ESTIMATION_CHECKPOINT`: The human pose estimation checkpoint URL (trained on COCO-Keypoint). +- `LABEL_MAP`: The label map used. Default: `demo/label_map_ava.txt`. +- `DEVICE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. Default: `cuda:0`. +- `SHORT_SIDE`: The short side used for frame extraction. Default: 480. + +Examples: + +Assume that you are located at `$MMACTION2` . + +1. Use the Faster RCNN as the human detector, HRNetw32 as the pose estimator, PoseC3D-NTURGB+D-120-Xsub-keypoint as the skeleton-based action recognizer. + +```shell +python demo/demo_posec3d.py demo/ntu_sample.avi demo/posec3d_demo.mp4 \ + --config configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py \ + --checkpoint https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth \ + --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ + --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ + --det-score-thr 0.9 \ + --pose-config demo/hrnet_w32_coco_256x192.py \ + --pose-checkpoint https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_coco_256x192-c78dce93_20200708.pth \ + --label-map demo/label_map_ntu120.txt +``` diff --git a/demo/demo_posec3d.py b/demo/demo_posec3d.py new file mode 100644 index 0000000000..a2694638ac --- /dev/null +++ b/demo/demo_posec3d.py @@ -0,0 +1,273 @@ +import argparse +import os +import os.path as osp +import shutil + +import cv2 +import mmcv +import numpy as np +import torch +from mmcv import DictAction +from mmcv.runner import load_checkpoint + +from mmaction.datasets.pipelines import Compose +from mmaction.models import build_model +from mmaction.utils import import_module_error_func + +try: + from mmdet.apis import inference_detector, init_detector + from mmpose.apis import (init_pose_model, inference_top_down_pose_model, + vis_pose_result) +except (ImportError, ModuleNotFoundError): + + @import_module_error_func('mmdet') + def inference_detector(*args, **kwargs): + pass + + @import_module_error_func('mmdet') + def init_detector(*args, **kwargs): + pass + + @import_module_error_func('mmpose') + def init_pose_model(*args, **kwargs): + pass + + @import_module_error_func('mmpose') + def inference_top_down_pose_model(*args, **kwargs): + pass + + @import_module_error_func('mmpose') + def vis_pose_result(*args, **kwargs): + pass + + +try: + import moviepy.editor as mpy +except ImportError: + raise ImportError('Please install moviepy to enable output file') + +FONTFACE = cv2.FONT_HERSHEY_DUPLEX +FONTSCALE = 0.75 +FONTCOLOR = (255, 255, 255) # BGR, white +THICKNESS = 1 +LINETYPE = 1 + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMAction2 demo') + parser.add_argument('video', help='video file/url') + parser.add_argument('out_filename', help='output filename') + parser.add_argument( + '--config', + default=('configs/skeleton/posec3d/' + 'slowonly_r50_u48_240e_ntu120_xsub_keypoint.py'), + help='posec3d config file path') + parser.add_argument( + '--checkpoint', + default=('https://download.openmmlab.com/mmaction/skeleton/posec3d/' + 'slowonly_r50_u48_240e_ntu120_xsub_keypoint/' + 'slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth'), + help='posec3d checkpoint file/url') + parser.add_argument( + '--det-config', + default='demo/faster_rcnn_r50_fpn_2x_coco.py', + help='human detection config file path (from mmdet)') + parser.add_argument( + '--det-checkpoint', + default=('http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/' + 'faster_rcnn_r50_fpn_2x_coco/' + 'faster_rcnn_r50_fpn_2x_coco_' + 'bbox_mAP-0.384_20200504_210434-a5d8aa15.pth'), + help='human detection checkpoint file/url') + parser.add_argument( + '--pose-config', + default='demo/hrnet_w32_coco_256x192.py', + help='human pose estimation config file path (from mmpose)') + parser.add_argument( + '--pose-checkpoint', + default=('https://download.openmmlab.com/mmpose/top_down/hrnet/' + 'hrnet_w32_coco_256x192-c78dce93_20200708.pth'), + help='human pose estimation checkpoint file/url') + parser.add_argument( + '--det-score-thr', + type=float, + default=0.9, + help='the threshold of human detection score') + parser.add_argument( + '--label-map', + default='demo/label_map_ntu120.txt', + help='label map file') + parser.add_argument( + '--device', type=str, default='cuda:0', help='CPU/CUDA device option') + parser.add_argument( + '--short-side', + type=int, + default=480, + help='specify the short-side length of the image') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") + args = parser.parse_args() + return args + + +def frame_extraction(video_path, short_side): + """Extract frames given video_path. + + Args: + video_path (str): The video_path. + """ + # Load the video, extract frames into ./tmp/video_name + target_dir = osp.join('./tmp', osp.basename(osp.splitext(video_path)[0])) + os.makedirs(target_dir, exist_ok=True) + # Should be able to handle videos up to several hours + frame_tmpl = osp.join(target_dir, 'img_{:06d}.jpg') + vid = cv2.VideoCapture(video_path) + frames = [] + frame_paths = [] + flag, frame = vid.read() + cnt = 0 + new_h, new_w = None, None + while flag: + if new_h is None: + h, w, _ = frame.shape + new_w, new_h = mmcv.rescale_size((w, h), (short_side, np.Inf)) + + frame = mmcv.imresize(frame, (new_w, new_h)) + + frames.append(frame) + frame_path = frame_tmpl.format(cnt + 1) + frame_paths.append(frame_path) + + cv2.imwrite(frame_path, frame) + cnt += 1 + flag, frame = vid.read() + + return frame_paths, frames + + +def detection_inference(args, frame_paths): + """Detect human boxes given frame paths. + + Args: + args (argparse.Namespace): The arguments. + frame_paths (list[str]): The paths of frames to do detection inference. + + Returns: + list[np.ndarray]: The human detection results. + """ + model = init_detector(args.det_config, args.det_checkpoint, args.device) + assert model.CLASSES[0] == 'person', ('We require you to use a detector ' + 'trained on COCO') + results = [] + print('Performing Human Detection for each frame') + prog_bar = mmcv.ProgressBar(len(frame_paths)) + for frame_path in frame_paths: + result = inference_detector(model, frame_path) + # We only keep human detections with score larger than det_score_thr + result = result[0][result[0][:, 4] >= args.det_score_thr] + results.append(result) + prog_bar.update() + return results + + +def pose_inference(args, frame_paths, det_results): + model = init_pose_model(args.pose_config, args.pose_checkpoint, + args.device) + ret = [] + print('Performing Human Pose Estimation for each frame') + prog_bar = mmcv.ProgressBar(len(frame_paths)) + for f, d in zip(frame_paths, det_results): + # Align input format + d = [dict(bbox=x) for x in list(d)] + pose = inference_top_down_pose_model(model, f, d, format='xyxy')[0] + ret.append(pose) + prog_bar.update() + return ret + + +def main(): + args = parse_args() + + frame_paths, original_frames = frame_extraction(args.video, + args.short_side) + num_frame = len(frame_paths) + h, w, _ = original_frames[0].shape + + # Get clip_len, frame_interval and calculate center index of each clip + config = mmcv.Config.fromfile(args.config) + config.merge_from_dict(args.cfg_options) + + test_pipeline = Compose(config.data.test.pipeline) + + # Load label_map + label_map = [x.strip() for x in open(args.label_map).readlines()] + + # Get Human detection results + det_results = detection_inference(args, frame_paths) + torch.cuda.empty_cache() + + pose_results = pose_inference(args, frame_paths, det_results) + torch.cuda.empty_cache() + + fake_anno = dict( + frame_dir='', + label=-1, + img_shape=(h, w), + original_shape=(h, w), + start_index=0, + modality='Pose', + total_frames=num_frame) + num_person = max([len(x) for x in pose_results]) + num_keypoint = pose_results[0][0]['keypoints'].shape[0] + keypoint = np.zeros((num_person, num_frame, num_keypoint, 2), + dtype=np.float16) + keypoint_score = np.zeros((num_person, num_frame, num_keypoint), + dtype=np.float16) + for i, poses in enumerate(pose_results): + for j, pose in enumerate(poses): + pose = pose['keypoints'] + keypoint[j, i] = pose[:, :2] + keypoint_score[j, i] = pose[:, 2] + fake_anno['keypoint'] = keypoint + fake_anno['keypoint_score'] = keypoint_score + + imgs = test_pipeline(fake_anno)['imgs'][None] + imgs = imgs.to(args.device) + + model = build_model(config.model) + load_checkpoint(model, args.checkpoint, map_location=args.device) + model.to(args.device) + model.eval() + + with torch.no_grad(): + output = model(return_loss=False, imgs=imgs) + + action_idx = np.argmax(output) + action_label = label_map[action_idx] + + pose_model = init_pose_model(args.pose_config, args.pose_checkpoint, + args.device) + vis_frames = [ + vis_pose_result(pose_model, frame_paths[i], pose_results[i]) + for i in range(num_frame) + ] + for frame in vis_frames: + cv2.putText(frame, action_label, (10, 30), FONTFACE, FONTSCALE, + FONTCOLOR, THICKNESS, LINETYPE) + + cv2.imwrite('frame.jpg', vis_frames[0]) + vid = mpy.ImageSequenceClip([x[:, :, ::-1] for x in vis_frames], fps=24) + vid.write_videofile(args.out_filename, remove_temp=True) + + tmp_frame_dir = osp.dirname(frame_paths[0]) + shutil.rmtree(tmp_frame_dir) + + +if __name__ == '__main__': + main() diff --git a/demo/hrnet_w32_coco_256x192.py b/demo/hrnet_w32_coco_256x192.py new file mode 100644 index 0000000000..6ef3b6efd7 --- /dev/null +++ b/demo/hrnet_w32_coco_256x192.py @@ -0,0 +1,172 @@ +log_level = 'INFO' +load_from = None +resume_from = None +dist_params = dict(backend='nccl') +workflow = [('train', 1)] +checkpoint_config = dict(interval=10) +evaluation = dict(interval=10, metric='mAP', key_indicator='AP') + +optimizer = dict( + type='Adam', + lr=5e-4, +) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[170, 200]) +total_epochs = 210 +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + # dict(type='TensorboardLoggerHook') + ]) + +channel_cfg = dict( + num_output_channels=17, + dataset_joints=17, + dataset_channel=[ + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], + ], + inference_channel=[ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 + ]) + +# model settings +model = dict( + type='TopDown', + pretrained='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrnet_w32-36af842e.pth', + backbone=dict( + type='HRNet', + in_channels=3, + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + ), + keypoint_head=dict( + type='TopdownHeatmapSimpleHead', + in_channels=32, + out_channels=channel_cfg['num_output_channels'], + num_deconv_layers=0, + extra=dict(final_conv_kernel=1, ), + loss_keypoint=dict(type='JointsMSELoss', use_target_weight=True)), + train_cfg=dict(), + test_cfg=dict( + flip_test=True, + post_process='default', + shift_heatmap=True, + modulate_kernel=11)) + +data_cfg = dict( + image_size=[192, 256], + heatmap_size=[48, 64], + num_output_channels=channel_cfg['num_output_channels'], + num_joints=channel_cfg['dataset_joints'], + dataset_channel=channel_cfg['dataset_channel'], + inference_channel=channel_cfg['inference_channel'], + soft_nms=False, + nms_thr=1.0, + oks_thr=0.9, + vis_thr=0.2, + use_gt_bbox=False, + det_bbox_thr=0.0, + bbox_file='data/coco/person_detection_results/' + 'COCO_val2017_detections_AP_H_56_person.json', +) + +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='TopDownRandomFlip', flip_prob=0.5), + dict( + type='TopDownHalfBodyTransform', + num_joints_half_body=8, + prob_half_body=0.3), + dict( + type='TopDownGetRandomScaleRotation', rot_factor=40, scale_factor=0.5), + dict(type='TopDownAffine'), + dict(type='ToTensor'), + dict( + type='NormalizeTensor', + mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225]), + dict(type='TopDownGenerateTarget', sigma=2), + dict( + type='Collect', + keys=['img', 'target', 'target_weight'], + meta_keys=[ + 'image_file', 'joints_3d', 'joints_3d_visible', 'center', 'scale', + 'rotation', 'bbox_score', 'flip_pairs' + ]), +] + +val_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='TopDownAffine'), + dict(type='ToTensor'), + dict( + type='NormalizeTensor', + mean=[0.485, 0.456, 0.406], + std=[0.229, 0.224, 0.225]), + dict( + type='Collect', + keys=['img'], + meta_keys=[ + 'image_file', 'center', 'scale', 'rotation', 'bbox_score', + 'flip_pairs' + ]), +] + +test_pipeline = val_pipeline + +data_root = 'data/coco' +data = dict( + samples_per_gpu=64, + workers_per_gpu=2, + val_dataloader=dict(samples_per_gpu=32), + test_dataloader=dict(samples_per_gpu=32), + train=dict( + type='TopDownCocoDataset', + ann_file=f'{data_root}/annotations/person_keypoints_train2017.json', + img_prefix=f'{data_root}/train2017/', + data_cfg=data_cfg, + pipeline=train_pipeline), + val=dict( + type='TopDownCocoDataset', + ann_file=f'{data_root}/annotations/person_keypoints_val2017.json', + img_prefix=f'{data_root}/val2017/', + data_cfg=data_cfg, + pipeline=val_pipeline), + test=dict( + type='TopDownCocoDataset', + ann_file=f'{data_root}/annotations/person_keypoints_val2017.json', + img_prefix=f'{data_root}/val2017/', + data_cfg=data_cfg, + pipeline=val_pipeline), +) diff --git a/demo/label_map_ntu120.txt b/demo/label_map_ntu120.txt new file mode 100644 index 0000000000..863633776e --- /dev/null +++ b/demo/label_map_ntu120.txt @@ -0,0 +1,120 @@ +drink water. +eat meal/snack. +brushing teeth. +brushing hair. +drop. +pickup. +throw. +sitting down. +standing up (from sitting position). +clapping. +reading. +writing. +tear up paper. +wear jacket. +take off jacket. +wear a shoe. +take off a shoe. +wear on glasses. +take off glasses. +put on a hat/cap. +take off a hat/cap. +cheer up. +hand waving. +kicking something. +reach into pocket. +hopping (one foot jumping). +jump up. +make a phone call/answer phone. +playing with phone/tablet. +typing on a keyboard. +pointing to something with finger. +taking a selfie. +check time (from watch). +rub two hands together. +nod head/bow. +shake head. +wipe face. +salute. +put the palms together. +cross hands in front (say stop). +sneeze/cough. +staggering. +falling. +touch head (headache). +touch chest (stomachache/heart pain). +touch back (backache). +touch neck (neckache). +nausea or vomiting condition. +use a fan (with hand or paper)/feeling warm. +punching/slapping other person. +kicking other person. +pushing other person. +pat on back of other person. +point finger at the other person. +hugging other person. +giving something to other person. +touch other person's pocket. +handshaking. +walking towards each other. +walking apart from each other. +put on headphone. +take off headphone. +shoot at the basket. +bounce ball. +tennis bat swing. +juggling table tennis balls. +hush (quite). +flick hair. +thumb up. +thumb down. +make ok sign. +make victory sign. +staple book. +counting money. +cutting nails. +cutting paper (using scissors). +snapping fingers. +open bottle. +sniff (smell). +squat down. +toss a coin. +fold paper. +ball up paper. +play magic cube. +apply cream on face. +apply cream on hand back. +put on bag. +take off bag. +put something into a bag. +take something out of a bag. +open a box. +move heavy objects. +shake fist. +throw up cap/hat. +hands up (both hands). +cross arms. +arm circles. +arm swings. +running on the spot. +butt kicks (kick backward). +cross toe touch. +side kick. +yawn. +stretch oneself. +blow nose. +hit other person with something. +wield knife towards other person. +knock over other person (hit with body). +grab other person’s stuff. +shoot at other person with a gun. +step on foot. +high-five. +cheers and drink. +carry something with other person. +take a photo of other person. +follow other person. +whisper in other person’s ear. +exchange things with other person. +support somebody with hand. +finger-guessing game (playing rock-paper-scissors). diff --git a/demo/ntu_sample.avi b/demo/ntu_sample.avi new file mode 100644 index 0000000000000000000000000000000000000000..42f8e03b1e3a294e026b340ac8bb3b94e66effb0 GIT binary patch literal 1119546 zcmeEMWmg@|mIi{mySr;}cXyZI2X_t@2<{HSU4vV2cX#*T?(Q(;&fL3Z&0mjghOR1s4bi2)jQBh*>}6CjtRM zh5-SQ`WFHL0sU_Yh%Xcf2p9PO2>;jcH`XRH2#7P#$>#I^S1eacQ)ekwj<5gA>|b#R z|3~mQ_VPIh$j9gPZyR4IF7Qu(GwM&l-}~R|fr4Oz{43U}{=ZHCGs>td$YOu4{qMAx znN3apqx=W_!@xfb{KLRM4E)2uKMefCz&{N9!@xfb{KLRM4E)2uKMef8V8GSE$wBOY zRrW*b$`eId;RS-fPWg=o7&oZ{wyB-u~%fBDnsS8Z{;8Tp(36F@?Idq(h)wLB0K_cpoS#sN@l(VkuOu$X@Re9tL-bQ;3NZOp zh}t`N*Z|Cc#H`HBYz(Z-EL@*L3n0*ehl$C}&5iN1Lk+OEF|uQ{cQR-C8<)`nXlwIn zWA6a8w6}BSAvQ5GHZtL7A$9_o@v{<}0*r0!O|1D@c$j&ZiH+=xY&@I+{LJobJk0Ja zENsNK0DcRAJF&Bi@h9RSc5wFibp3o9IGOUZFfxBSeLje7E!_d827e>6e0mr-8QGZw z_*poJO)Q-3ZH){*U0H~MP5>JlOXp9-<<4bl0{jFfj<)>FpD7rbez&&+@Uybev#=1G z894(D9GtB!9sXMUcK}BR1A8+wX8@3&ft46&;q>Y88HkJ6#@^oA$l`Nm@NXd-v9pb( z$!9YE7BCarIsI!96H8ko;NL_n?SO#K&jX)EpK@ax7bhbR0~32&2P5F;+~kuZpp%iM z-DikTLnouZHfBynwg6{-HezD~2aiu|Y5K{Wv4N4Pk;C6Ij17z}jhz23V(A3F~Lq zpJ(SY(q~pij-N;6@5Y1pex7DPXdsB<`>ef$pjY|a^7L%BrwM7XmB$bz!s$ukBQ_%V zd@o`Pl$PA%F>>*0t&Owb`)cD@+qZ|6RQ;@wa(6}KR+y$+8Q^Nw+eS{ zw!r<{40v;T4}lSv$B!#;2qn}+W~{;@n@#^pcB(A(?;GU0%Z2;D0-q6;{7D52;j%(k zceV-?%jiM+Ci>Iv(r(yJcvx_jkOo`mtR3XXZKd4af_U5O8~E*WBPHTt46%Adx?{pt z+@iX=VRAi(IiT$bl_hx>esE5rdjx0uPF*9dw!EiEv+yPW#Fll5>Pn+An&c%MW~hEP zAMm;dY<3crLvU8ezO}%;K<)-;M2pYrZ$NO)ZPXY^xv6^}N2rN={puwtwh|5`7bDN0 z5Qnl;NN0DS&6EDRggM-!Hs;CV34O;wWbjME>6a#nf&bNfLLfx_)9g7_f8^~{hm`}pvW z7qjq9a}0ddJ??&R6p^VQ{&wnBnD+-)j;|*Tq{1X%!*&F^r=p`vEEpcF$g?0fZ+>87 z+5ymgh9ns^>#o6XBviT~D{)viK6mTkZ2m$FOUz&|-Bz5&Bio<3z-CG2HG441I>fg? zRaSr?!$+t=iVGDfe9=yMRA1lZY(k%O*U1( zKtnL{j8EYl0xPwYsD5nzM&kYfi_<$uXQQ*N;aQ=YtT#^msusk2BB#8tTpi$g2QunQ z+7s2~8n#`MX3J-O*WVcYdW4TtTa2QxJm;~L^VL0rLQalIt)fXv6WXfaAftnL?l$P* znMXaoxG;7X0z5Qe{Y9fZ10`yQ#1La6(_ar<$bM1uA^7nNUoETByUv&P@l0dpZtd8j zivoA^*hZiVsMH6JpYzu$>HPPhX{r6uM%Bq#jE2lqHC#xkHA^a+)Iw+VXTx_g2n4~W zXH5om`NAiuBdwV;=AvNjDp0#Dd_uTQdb+XcgVz0?-^VQqx8$ANldJ8ffB{vZ%D{;wMDqeo+jQ+6nxbc)tW}Q3Y z8_S*jwi~wh#`tKg4fCk6=Xn|XF^s~^IMw|C3Yo?0won~gz^rw1-alTQJtC9I0x?3n zUle>OF>nsbD>{cl2Vw;2?nINIDhV-Apr^@NXT|w@(;Y*kl*y%z>NPNt_RZUew4P-S z1eW~%BU_AuE?895Ug&h!RkPiT<|C-tggEE?o3*)~&Bb8HGb`h+IuU3}_#c!&@^9kE zhZaQqZUoH@h-ZPZmFmrMQp}>=XH3xZ_MmOZ#axG(y&bq6#GZr#ck!D5yA*tLg_A~E z9)GeRiRC0cvQY_g_hkj^BPZE`@O zHp_pC4gnbZUYNC;*Zzi)dTBS&dbY8Mkesu_wUe3?lJ@3QY-TG{)(MA8iv0~>n`GD; zEV@|^V?4%!vYO^`qx$ZD&NhTA4_nMnHvMO3=;4W`wcrHBPqLK_weGaZ5TZ#;%7VTOkM$Kx=O*ZB*Ukw}S?&~zu*NqjvAIBD4;bnA7A0*sd1lAMx7z0n@EiI>a#?jE?vx*1{%OmuIYxSNef6;Yd!@XdH6d1#AYRx?w5 zRU(&iN3g3)tu)3`%f~3d0WS0BPpdZNFpeNjUyN2TtvukfU0e{7S;qL zl~WJ4N>lo`FDV(GDcTP^cxJK7z84{yyzsS6%4U2Ovx*_*X&vbH~vx&sU7ci zweu`;`PRD=n|K`eTlERC=&0#3C3t* za8lAJ*$}riTGt*9YC&7?vYm1keezZB!e?X`*Z)i%6)!YQNl*@?GI0jSn^afnJtm;C zm)*(k4pH;+>PEEV&4Ox>1~JFaCXici4({Uo7x)5|c;LB>H03y*h(*=)TzoKw+A2P6A-fn(@6 z7yVBfH)BQbSLgi zf&Qi*@KwxA3L1=r@@RQQQ4;yY-nhft?_OmK^J{V`A|?D6b-o>N|Mr{0qqBY=lu}*h zd?kLGizE@j!$ZwYC$&ca@Z%X^&(jJ}hrPH!doD%3r8# zE=AI+oLog3?MKeJuuf9GOaF*lyAHc*2HjkJNG&Vk)f;xz5;v87oqSOUKFx=wsOYwntWtC{wg=k?=Ek4o9$tM+GVXrwVyO}FGLE6&q&n(?056`gtx|cpq~pi& zeP2+@fTEO&lCODL(6dkv7>v6mVnhv@UO5hvB0qfVvMb1SP){0zeolMFEEU0pilUnX z-BHX;j55vpxYp7+4+8oc&+$CC3_~=(Yd)p1aCl`$sx(@`5Jlcayxpyyf_La|LUskL z;ar+)Gt1a?N&9l5h%qmuw5CqPuk2pAdyQFvtc{F4lvBg^m)DRT6+&lU==Fdu+UWY} z(30aYGn!ATRsTCSyJ=$&TP)Ohh7L7jI2>O~c zEM}R;kC()ZL6)un?$0fHa0g+U*fL#p83QbR*l;;#Fo~=RIUlkqI>|6N6j#tUo3B-Q z*0?my?*541ciHgbMqYw|AXHchNdWbreY zkVev3&v8y({)FWAl&Pi!XUJJwN{%uE`w;{Q9o1uecRS?MKAfg`HRO@NmpdoYX62hBh7-imA`u#420JsT;6ZYV243@t3vqJ8&BhPc`s-Z z8XSNwwGd8#&zHy)meQSlG52#1Hjl>D(S<&rt%~iCuW)sB7?*>TvXh*^hxZ78&1xLo4;nOCx$qkj&Zwq%D{!?Rtrx>6MLcG%%Z@Z@63+;6h4H>D1QC{m8Gi^qpxNOgdefr^ve@_8U-&DSbe?3eYIv(HZILqA0o{{{*aQ>r1 z|53Qq6LR1EZB`S`n2w_ofcB0M?NGA`g9T{qC}x$ImS59diPY&z`;ysckWSwJknfSLZq5Ox`{iHdZA@2T^P-P$jtd>}+-R>B2on zMRIJW2Z3{}sv)lcF7kZ|ZGl0K_4oH@^b&czgFlqz64szy$Fn!o3qaqpu00hm$ACZO zUI)uzl<)x>`#ws2Zo^heNtz$n`#>5hdJ_ywd7ng3@n+0eNH;%`Z;-WgewnSHc9J_b z1}uVeh`%WmFYZ7FPr>L%kN_Yel~q#al4BkUazhh{yUqM59$b^>lpG+_qRSIoINCscG}J@Q{$?620bKNvCgIc-YJd z0SnPuyk_l`{584r%Vdz(>?o85mRU0qI>bK@774p<^r4p5mJJ9lyqt zqvuk|3<~fV^6yuBj`U1`6ZPM2!_IOLq=(;VWd?uiUd)M8i7`DyHSRpDcC8=mvgxg7 ziyi11Y&|IWQDA~}Delt`lX%HDo(s4q!cLK74wX5L7P`U^2`(+5sVC}p!?8(8`bVNN z!898NA5Po6qH?tydp#EJ;9%u}Zb#5;ECIliT4|aK1q|TwCFN_PMNF1{KXDUsGm;+ z@uSDe6IkykE|pt_U!;gsy*|8B+)*ysR8Nl?5mXKNaTAE zOzcNL7@R!M)|3nw6RUC2>bYzuDzEc zb&<*&(6LGJAwVlVHUWHP=wa}u0VP<%HU0Xws{#SZ^fI3N7Wa_I582UYlvwaBK1Q%` z)cMVJ{XEDc08>zajgYlYXZ2BMYZ`JSy`$>6^wjCfRJ6{fTP(5B!dq zvThMcRmuF+Aa(#3O>&{TT2|vi1AuO{$ug=a{Sv<@8Bb&$zx6^}}KxvlV=``u1?U+vsZY~ka{ z?o9I?K;i{aq2S1`njJv1Xv(XLm;Pc&^0VNqX64n#<<9 zYb!s`dK}ekEICtk0VfOkgQWL^Hz?T{fx8~@6~V^3ThM6F!(}f0r+TTR-Y7=+p@mi> z4~ER;P+1>j%;u@H>ycxCpTZ*JZ))yxWWu2SwrR*4R3G#iPM_7dXDZ8hCG$2c$z3ObPFwA1_!(g z(mUF&%mV=pg;5LRn1)qI=QWN+W=R>L*zUBQnUizT4x6Mn?6mE8w&4O1>xl;0N)3exl+mZOTq4AqnD`0>pe;-Dc#@1aLV!g< zI?#^vG?Ni9+7>kQyqnCfDk7`n;f)BC(^1Q~Bvg|uzA-aMZ#6UH+1BhcNmZGi7i0VR zGp+=5%`xyHk=~9kLlH6pFl>87W2S&pNoD)R3|q9EP07l1tCS}n`1d4yTE!(+?Nj9E zuMigJ`7syag)|4_8r-qN99(*H!JIjK*rynJQR5@5rKzwDLGiZx3BAw1u^s6BS?59m zbrQ>1#}XtILhd3N>755;0kvYmd%ck4coepR);j#OQUMafWxQ7lSEvj~I!L;y|qKu^Z}(Szn-Hj|lD+z1*UeIS3d z)p?KX7AI3cLY3Z;joL=x=iMYk(@JPxalz^w^>J8*pl}7rQTl%CO(kr?&_{WbE#TX= z;{A=Hsn--QK1H24%lzbSb_yViSt{6hE}e$=zqK z9o?!oq^y+Fca>RCk)5XuD>m&nJ_^B(}JME(3Vu=cJ}Xz`z5TH(#q-mu0LAa%^r?&KE2g5Irnu=@#oUQ=;*bYY%mKU?Xt1h($V{=y;uDT;?Va3gGx+AY zlzx5adIh7G-E3Z}zYDaHlrJAsLLSL;3)$~Ujc(BQ)P-}(Q_+Ko^Jo>SX|3p}z`vShOKxgRpE+QOp593b7Hw(zcJXd`xx&p%_?kkqzp_tUZISJ%*)tilyIYF|nOH|ft*#b?(k^n~>D%gZp z3NQ@eO(DqakfMT^R{HZP&;4762J(@fZl15$dwDYV0@dC5Ey7on^ZFP5I2sKkLB+an zt5dtN-o0Tqu%h=@X@17dvR~nbs4p$3k0aLQW7+||>yt3Q(8X*G#hT~zEJUusu+=e^ ziTS<*bwn~8K05cy`*|(%Cnk^I14oocLAFO7`6wwwt7b1#j?_{U0kxNa3(;6)Q#blm ziJYh1_*@+X4dGZ~@#4gqJ@2%aTG{)S^;3LOCqhv#O}PVR(TM04&AB_-_s-S>?_3$s z4X)f_wOA&8(p=x2RHps!IAtv9R-j?a@i;}#Nrpl^ofl|yn!DRNZ^X{id7EPC%v*dk z83`%Bn+T`sV<~mib(QJI>n8q$Y`ErxU0SEICjTny@$t1(6IDZLYFy(`Ef}8OmwNMw z$K_j;eH6w`0_q-c*!Qe1O zf^)G;__`Cl>D;)%9YjiLhuc~wrnGQV8*dNmz7MY9hCDQtqA^UZbaOkISdoA&YIn;s*>|I1xy-J5{DjSGvg)N2ZsOV@mNdT!3$n|36$?%DOe z963RriaymJGiE}RtWQx;v-pbEqi?-wl)Tu`X-YEh;xPs911Rl;IhV2in^wXbLTQeZ zP155wDoXcQ9O8WT+AW9#a;iGIA}?!o4XK`;AW)bIQpl1++RhD0vr7>Jb4?^79aej- z9xgr$gF|Z`8wP40zCHaX=>3e&-3K(?|9P%c`6hiP8n>X z5?ITZC>MZ}#`xz2J($*%ZbRUgG~W}RkCdk@vJNtTyu~*5#_Oud548Xnm{A>VYjAM| z&qa*p@t0ofspe#Pj#x=no;-{sXHvrhKqjIOPsoyfs5w6CLKSZg$Bu93wlq8zooL^5 zs=NKb?%7-Bjmo`QYCcx8=O{#LX-7Bpv%q}`Z8gDmJ z9=>vUxc~mxe&=QXxJYE*b+&kcH4^NjFN?&_Z*K8np3W$MSdZ$9ILM)4OQn7jr5!>g z^X%oazWBI^#0~sSLV|5K&05P^0%VqIjuE4Md zdDgNTAgEuA^|1ep`^aWM4ss5hIx&a4L_nw&6QztIC75Cy3Shk$C~}{5uyc5n3AA&i z^4SW2Kf|td?*^EVx&bq>l)NT<;qYok{+K5l!&_MPa_Kk(8_0zvMENS~lm4$a9&!H;5kHHpYZ`r3;nC^k3yqS$R9{FJbScxAzEboXZtv-0gA96zL_ zLK|RauwXdIBs?=oveJK%Y5;L`2?D@thSYs-NabP=|7%fTM{i?@fHODyVjuKi z!EPes!PWj(Ga68d`!_NOgK)>o)~8^^-7Y-Q(?3*;?ywrJcb04q3=(n|_fe)|sT$rt z3pEAR-4|LKr$JM#@B_?cxR(Q&RI*TZwAat_5w*GnD7{xD!f03G8~AF@O8XEG&U%fL9IZ zuaXaIU?OhDa6$yXxR<~5bl!5HG3}8HWaJ^juNv4s+6&^~zrg5{hSw=WUNrxiPNkZ@ ze>(Pg_)5`o9$`E7-Ad@{n+9@)XUY2(jc8RV)Lzp|e$gZJGJ!?d{n#1PV_?wv^dX)r z3mC8=REpuq;sQcUurwF%11&lispz{0!`!%!Fbax%IWWSssG+tsQ&oGA6m6RuZF zVV?clI$FCxqb7oxG!scC=m=5)m&EFyz2E?VNw_R5#Q-twkKU?gZu0Ji!XG;AbpXIE zSONZ^CUncB1{eN);MLO5(`0~d5xn!)qq+B!*vyZ?&C70;a>I>@hbK_!1a)=hu_U^y zWS~NV5t>dFGVj-HO~N3V;0bm#m~4vEjRIW45ELRDNSk6mnF*6Vdh1B;^369peo&o4>|2h?@zJvkj6YR`Y~{YG z;IrvW0|eG&1~V!?kpgW}{UcQVkY~Cr7e6YVPU+#l=m~%KO5!H%WBE-Bs&eIQZoS;r zY_Id%sr|>im@5#LnXtOxHB53nRxL8V`uIX7EaVE8x$awWt@|t-_Au&4992~DAtg|q z=#6j^x(hv8u9eW_A$+vSp z&iYac<+e~`6fL(|QPQ)cwV-qXXy}t&e$P7agfe{8L$!c@8>WCB*BD5;yfLL@`Ud(xG|Hx?5tM~gy0_<& z=uREfFuXW{U9#HzZd@VmYObaVP`twzGASQ%OFu(`;qf?gd|Dm4=~)7UlHc@R#SsxoY!z#HnhXp2X3H=@DC z5!LAytzX#Z2oleyY?AT+tl&`<)uR_tJ$J`zW$LW3Mc~jGAsf}qTmOjA>d$Zy(NKC- z)l_Lnv>oTk!Jb8W%wkMzuxqz)d=g=zmyxXmEcdNOdoq&2Z22r1*$acN@;YeWfA*OV z3ZPKihcxpZdzy+&NeNH#p14Lsq^%qTFBgTSEEEGySCpGj zfH%9W;LN1TI3K47lx_}hSv-oSMXi;9v>&&bckS7j;E|h4odT-diK-v0@kuWk9moyR zoWsIavV)#?ez6m^7BQFodXBBudx)7RvDj(0f)&+Cn&#t3uTN*`P|at2NJ-Ya4pZec zj!65a7v;19wbxiF$+A_lh!D&qW5?`sHtP2?bnu5+b7?Brh|K$urv??0&`F)y>f^Cw zsAZj+Bhws?%GzG?Ov6c)T*UbSY@dsYzay2=kVP%1$o=RAD7IXk{1H&1zt2DA1e{11 zwJ{(MI@Rw1Lkhf<4ic=i8#|~U=2?FSwW9^*31OKiWZY)g827}<$P4gIo($&6C--)S zbozW&*mv(fNz#zhp*>p;1=+qJx=E8BFWK$>t~T~%7HZBL%Au&9W#|ytG({)PLo~#p zj)8(_yamkOQnUE|i?c%aOwb_qD9pDDg6Q4>!?fJ0=oPc->Wo(CBfeRKr^*0liD`+N|u-2zHW7zpf>drC;*XZz2wL z-_cI*Xm1x!cngBDn+?5H(vN^W$Pau_a!%~68jr;BLlg!^gYl= z&*2N5sQ}yMwYiP<#V~X^kUkybmkc1>mRXAFdz$}U6@wZabKeKZws2wp))Uy zK{OKl)0DnjLV_jK=CYl+Bcw7jF0?@Z+?AuzFEG)miQtCH_}N1xhpu9Ef;n0EQxQ_! z;ZRO!dtA3SB&H3b0J0Tfj9jZAO^#zQGc(6RHcz>WD;)qHBE=JKc{Q)UPF1tS(!OvG zT)|IGvX0iz*ETlq6q@r$JL$N?yBy3d#9bQdfz-LlGK+T<1i^)L7ad7!xV338s3Byw znjs1as#I~kCG2vqd@|E#+r*^maid4!hc2Ly?O8jgf2zf#t$}#&f_ylzRf%&=h`4F% zL3(}u0<}4!A&LQ4ByVd2LLa+h(wlWCmeAXI7ha-C!Mh1E_ zrUg@E-Tm99Gf$Z9#w4M>rKZCp#Y-yxH_Eq8{w$t|4->F->D(Dh&~E51+hMgR&7y0N zKQV-yJ}nj4o=U!vr<5tPR(N<$&ncCXJ@GlBNG4nUH%n?DPtPJ3;ClO6+jvST?1npe z6v`3jku)l6$#_ZeZ@;_u^xPEnlblg03c6yUU`*?@KQ<=%GMtKFO&B7osuEC(DC=4T z!p{^e>|8|Xw|i&JVca>rY`x${S-!onKvutpYdfupct3?>F5xQj>Vb8&9J!p=K`{>0 zeb2!u28KwVSMfb7IOYlAV@3EZH7cUa2h{bISn156g3ypqA=0iI&KDAAfF;e%nQOeu zaPvP@=qKZKKg4qg`3&xM&n?(zJ%j)k#mq|;A+ywxRpBc{OAXe;4cOu@eQy}Je2E^) z8Tv`8z1s`W0VSNEW{$u!z4;@!u2?J5Udt1W;3a$z$%pF31r90Fj0kh-!7;=)56%REuUL0U?c zkb<%kJOJcwnSO`~;kW`7@Ga*^Vr-GvU<;|(Q?aJ1pBOc(EL~ZsTn>%fWx27|IKz&t z3%Q{CJ2!;FS|d?m+>;;>42;ZURGgU~7xPz{lk&_0Z+)XZ9kJPWscZ-H`CL;!D#vf% z?3(B9}__6I7n66nE~*(7N( z$*}vP=7=zPhfOF-ymavp;KDCQ-M4I@G<}VZS~vRLU@5e$tP>fLkL>R{xv;)jZ{pK` z_{B;{j<-B6Wy?eL(B}CbT|mDl>BOM-KsIe^w16HsBn9b^`~cO0lch(H_beBn_kF59 zhO`&nr-P4~5LkE-19c9UA-PREfjI_Q$0xWWL1Zh7)d*4nf|QE%{F=-LeMX=ac^@fd z3n{xMO)fd*D<+>T!ku{J&WZhnMgdgKeSYTg5sBj%sVlkke?;8x9wBIoj+(y^@^w8tk*5E%R`cw zWIK|XS`s7Lm3>ES((PjOj_Ueqp~$NEGY$&`m{#S7)Qi(BZ`>p6Jwo6dV<;DiWm3^% zoz9c%ESucFvYJ11F%4iB-*Sg;8S>=ZFQu!|E}h;%oyt`Z8M<>bE5lIl4gaE^LY%PQ zErfUxf=0^L&>C{scY)e6K{9g*ZVMufjeDge2(fIP_fg47Gt3`J&NPo!KYmbo5k|*n zK1c10?_y5k3cZtP;3_JNhW{8qBa@;A&=abhmMAK3K^F{#H4rD)ub3FfY zbvR}%{@a@YR$%z6V(rQ8hVXW^NczcPU>^~gLavR+bXS{VwS#G=1bLxuRK9E;% zir>QPk>y2NBFm2hKML1;jiHV(W(K(7ANW(3$m#Da`CyR+zZH>k6*m5Y>jjo-sg`b? zfb%@=7M}yXt&XF$Xq$wsSA_F;$7$gtD|Za;?XuJ=f88JmqQfW^4*W5G6@`^;6&AQZ z`#mvVtE`mJsnL|go6sl5#>I!i%t1y8?I*ErM3KQ9E)o`f{hQ0(UMBF8&F!mdm305r zK3lLrkEX*c<_ce};XA>Ta>(4%85 z&v3yvS@)^waoz0zY95Y5BxhCnq8p5wz_cwJ^dJZXC+}T%d4gL;{Q|IIBxR&MhQ=2Z zOtT%buM;2fv!sho)eQ+=WbeFi-OP-td2MJdhZ>kYP9%#_7P>H%e*nY%@NSgYkfDRy z&ijiw)G$d$xpKXRiNmA)nz4v?cGwhPLsLFhsehS24hm6F(?)MF-2 zkuS0b?vX(JYb*ZaQ6_9sJ5vkAQ+7-B>Zi~yEE?@IG5;t-7A(#-8s$C|svmr?w;c+@ zjo0E8)gfg*w8AJirR6SO8~O!sRumd}P+-3$TG3bw!}OgNpQovrC*7p{k6ozv2llx=Rc+EXIE1wWhw>C3aXa4 zjjSsFfN(|ZgKnoH{Wctp@X_Y85$ElG_AEEN@7Mk4^QYYJhy-KbauMuGX-py$xRQ}X;E?9IKKa@OqV~x<9;YWpi zmp-vn(bpipMK;C|5s639IL;{rqLl~EgCoSjC=Q3Ps zG&24(dW+=SDD6bJSXi(odXnrn&XZs;OA}Xo4%U4wC2DRh-lbfqLYUWJ^OFLOnrt~1 z%sP2GlG;&Zmqtn0>H$|Z35oOp-mTV2vOo-=nz&m4R?@Cc;r1iCr0q*;YyRh-=*C7e zS9Sl;i;xMp$%Pa^ESh9F!Ow3QI||;^-7`34uCR4#pLg3p+(jg2cMGNINw?=$Wexcg z(*s3B@i3A{qTea5Uu)J3N{yl7UZqxRhI{(W7MHiK-5o>cw!HmtyaH%+C1}LUZ`1Wr z6fk6d%Ay?^@%e0i@R;A@QeQkP*yA8siso9I4+1%BhIzA>VDKaFjLWOMEHGv4v?n^b z*nSVvSIw+vjh@Cc9*(ihD~>WEp#e@}R}oC}hL$V&Fi4 zab4ziQbkTfHb{9YKIZMP*tn-_B*4HgjB5?2t=$0Lz**HWVD_7D7Y~O7=Bz7P;Cl2> zS;GQb{I(5cGE4qg;8gajQ81rmzjGu`U?0LvARyyCNGLC*yiMWh_C4Q?j2f3N2>7}x znM;#WDJa#(S=8$Pj)m4C+}0z1(2cMz5@DX6j3@!m%1Fr@`aSFxeIgTcFu-HI2$-+G z$}4lRvxkCPSsmZ2Ia|~x=Z;Ba9K#>Nt-sDA+sn=xprKKeUg9 zSD%W(|26s+2HZyD~6~Sx5W2N3$l>_4N;K}xmp&K`Duw-Q~%?}fT?j0Y4#RI z$sRD`Hvt#rN^6dxNqo0u*Q=MVxU+yz2UYK7C5uP;iaV7TzOuBdeLAsm zKl3(8m(e~eJX(GZFXX23l4nZ&8nS0rty$7lxU4C?50q7ix1A*{fMj(w4MMwJM9L=* z0a|0c{<7&YsMqy@CDiMo?}a%%nPP5ZBfSjgQFGU5T0CqhU!(Ms$cS0fap@ZC@K)6PrX zkO=f&s3Hl6)fHP4%NZ|Wi+jpW#X9Kv+lFxfi~5yNd`l%@_>1flaYy5V1)w4@dH6BH z=APZvkV#<5!s^@mx`bOHvnM)nqOTFEq2K6xbDU6_Choq35kAaw^aUy<7Eg>IGR8wG zQ}jGae=QEIA~=`)BKxcnrapOYd-C7~@qyl>u0?h7Ylq5&`qGpeYpkd1R5AF``Pr(% zn!J!14yTC4L3*CF+nQhiHaCR*rRhyED^jcTTezg1t^hI*BBO^g3{@1u5C)REW0#iR zs7G#Q9lo<@30)`*wH`8+e^RZ|{!Oax>a>^HixBR%F@fZykL$p(k5a=0oF<-}tih1b zVlsxCi-+ds7~1>h0of6DKN#QILQC0I9s?o(MRM*Y6UBG9m{9Ed2~YQz8;6mMZukc} zNw89gArXgYxnL>&Y6I`qo{~f!J^RD1ki*Vm)dJxUC}Co{7u6{}!)&i-Tz1Zen3eeP z0}%n0!LmQCFL$LSO+L}8-;85Z;4_gDv_cB^Cf?e za|EnT{p&Ngo@eTzuC9F6(+_9NjV|Pnt5HZva?{?Q&V7*A^w{;~6XD#t!QCxqZ7()i z#6(_&b%=c<;7ArA{+1;8*tISJB*9C+CV#f_9@&{5^=|*Hf0G-5Qsf?ElAmD5Ub!p-m3=JB8u#X){NBi`D*7gH5W=++fj4~& zRr_<+tD0gGkuqlt%gIQy7y*N|=aOIt!szxynM6`$lzE@R%i?tWm1$6BF!GkHqfbgX_6Pg&m&#QG*@d+8H2K4qK;n3ZFWjHiggeP3Qtv)7BpPUba(s9)xzzwI zbf_@hei6U0^%{hLN*xt?axHx}bCPDmCZnr&jqd&xEGy#T9`v?cCMYlBDAeD8XZq+; zch}*7K1t`+!w?*4*|6?D#*mWQNoJKk|8>WZWR%D>N>Tvh$Hp^>_pr=D$Naa=VZ(jP z5D3et1=?_i?@Z2`LVLZZQ8{bw(-Ydyl)Ipm!VMEXQ!d{wu*#6M8nu~q9W6B0vN?}62uf$os$LpyWAi*0<3t%@yb82JQpC>e0!uO?dYlDQ zM<1sny8;v<9^JHdFFLCC96hFcEC67=ze%B>ma-cwv^Hw0XpwDv?vX+F)w}eR9J@_! zJx{#~4_#HU$F4xXYf$9sazmQ-uW$`U_~q;;4Ay*C)Mz({-gTWKoEB<&+%VR5x}G4- z*v3<^G>08#>tWzncd4N`C)f`{16}zuK(eO1zMpQlo@k~+ zE^z;Q?)x%R=SFB;8~w&!?Q;6+aQ!RQVhtRrf;ZEYl)Pa%2juq+Bj;KIXiGP{_XSnl zX3d{8AiZujK{{gt3~3?7dT`L8M&*~(&A$HyDL~f0q1`(z(;t+47u)wMUvEXn0m0^T zeH@P?$|m_QnVG~j8jI`kO92m(!CmOXL|kx47G0U*e7Oew46ESpE1n`?g!(#Kw~`sTt)c!Hf&ZwfBgLDO@nd<2;uY zg25hvX{>+8>`MsxWICEznlnM8DPlx@p=jFZ8_##gi!>gh_=KROg{yL;uI zMp__C7@}f<761PyD2!z1qr1qeX)lXFSGWoJSb!j_(u_D`dtXj`QrJQ3Jb8Mv$4S_9 zoAqE1`kAKMNdfd_t=#7g6`dS8?QQ)gm4*5Z0Jw@a>lQhDhstPA->2^F7}d+IOrc)S z*)U;5D&1vaY$@&6G2=PA0yT0k7p#o|(OJX7ye7>kYy&{+H{cCaCx!L}Oyonz$}eaa ze5)UOUZy?$<{*FrRBB|Xl{BsxNp5oBhna*l29acm1o4yw5b{EN5gdHmgv zXo%SgAnz-tYz8~;)&lhyOkgX<(J~=C z>~DZqZs|_eq|Fp_F)jJL@CBWdMt-*zo3fQ--W ztzCB-*wpJOO*DdZPI_g;rmUTH(nM0ij*m zrq|^ncK*;x!Eu_zf0R$o=Oc8p?%zN!1?h7qv)J+~FyDP3^=Y*^fc!#Xt5~QpLE!Lv z3OO{=>vdD1l}66G!l{!lW-ej2;jrRRB>7Fnl}}CpMlikw)j%%rOYlZ#szGGAu`fs0 z?>lTDD&>}cdK~E{tf#i$IRA?}#1y|;NPE5}Hh-y|5|m0?W3sqzZJT3xTt;Z~z5gsn z4xiGgh^pr%!(4D;2@)_OOM7`xOM$1-^rc*dU#j78Drz#MRV!J z*F3i))!mb3-{cwEPTuvQk9ZWu6s)E#*%@_ z?ctTDbX4BSnygW_O{GJU0M3C(LqA$>Nq`L~`MPjlW~KjQrsx$6l$T8-p?apm!rW_f zYAck4vMlpBeDrQ#D=v8beiV>{0;qtI{sDU?w2dRkHTanB&X)Uc{S!3UfgCaDnSr9dD2RkAT^jmBc~L2i zZ=)+SPv@em4SYDqK9eNt`Hvmuo~*Uz59nGHJ0@i|dNEMh>t*n+_JC>pN|0g^O<`Fr z-u=leX#SjzAcyknsNY@*-MjKMRtgN)W{#Rz=-*oF`pg_?4{()5p_{v((h| zM-@o_M_J>{88fpTF~u? zLV#YMw_K#D_=Ooz!1v?21S&G~l0KS6*fTk~dySPS0`AyiDJeYLj|3Xw?@~sn(}I3^ zi-F*d?@W13tZnI7zb>n+Qwy$Y=BDzO_$`NaFOLOS5XG1>Y-1$^&0(U_+^F_|1V;Gt!(pKn7h7&_3*$NxBq z{Ij*yXBO#a*fi|`Mzchy3S4>02H*<3UPl@wdJnO)#@Fx}I=($}=D%K9hAJUphMNP) z);DPz0AYBQ?$_eJ6F_oJ3y-P-AaqEF@%wb|Nre_r{J>$$wcON{dFP;&i@~}~5+mH5 z2m{#cGbm-k#l&&&S2WY)YVcgmif+UR*U${>7kD9AS5EVIzMsFuJS zUyD&wZhzLd-13yFiY9I+)NPKSucNQGhd&#b=+p}&s( z{%7G~O77~_v?J%*M^kENA`UQBCxVE0n*!bpilpX!>n+~2uhMx%Gu4f04A*Imu1P%# zzCenM342v?3RDv9tpH;Jdn8bG`1L6&?^SXLo`%_bc$ztPx^MtK@uI*L(~k+qJpYn1 z)aN_>JOG!^PyqZuJRd)sD?m4R#Y02+WV_TE3jbHXE9;I>Xy; z%BLiO_dE#uv9e^czNO!5uiBWf0BNC7Cxfk_i7&8;g(T^huovIEHEYRDLaN#*sf>L5WEzQKikm`3hNC;=<+G;M9$pGYEe;BzPH%k_=rMh?TU$>k4T5%opktg9~* zL9zo@U35v{^L%NuDvCUZHa$bvjcB$=l$GELLsVB%Xkd#tiI4VN@R{Xtu&zDe!hR2@ z^JOkeH=7GL_|n9~X36z2eKh+C%6q4-#hO59OCgoN?Z8zRxO9Z0GfiuNm%ar*3%NzL z%mvRX2?V~;#k*yE4AgHL_(w9YWSI)Q%_N7Wrwv0(rz+gi&Z0`>g96?x1B6ob1-o;l zR-|j)Ulq1H>rY2q|MMmoYhjjhJ|1FNW`{KPA|V^{sbkbNv6qIF=0QWcNyU7Fjza6< z7}3UPQ6fG86+D*HneCfJ3u@M-Jk}C68SQntA?kEqnOR!wYGY9My7?&7*1;PpyK=iP zgckhq-+I>Z-WwbLLy>{b!;pU;=iqt731JH{TqiBseTZ!yt!(%c1v`cf3^{DBfn9Kg z6C3U7sUdcv$SG){fc7YYtFW(8nX-6JQ7Bjz@qNwyn;~C` z1dpB-sH*#-G2?M&J?oPcT<)|WnIz!TyIuf7Gi8#o@3zhiwt5SJQZz#^c}( zgB$S#`0$>)*sXg4?#8(Qt0|ruw*?C7P(A829tW0!eaV@aH7}e3cQ4_+-KR=w1C6YV zzEbn-3m`Fyyot(DT8Shp_B{}fuasdW1^osXM4A>|0#bl4M)SJgY&ev2cB+2)iXB9sw3aivT?M6Xa>1=6?fF{cC=aDX}5DGca$| z+zf7z>yx2gy@$_1)LN(YMV(Mj7yC7|cjN3LB?chcqJ8EUrq;3t-W9ybnGw1~x*Y0Z z#k3m-zJGH0qnp6*z#V&8TjbN>Rog1q2Ve9vUOVYl-vR;a8zfrs9(9}ysz1Zat8A~# zR+no5?S6S8hN2Yh%VGsFt@fH)pKGwK#$n3R<3SbE)FW@V8WYGGq0j8UOe~gpj$3PatAM(@a43}P=02WC)wG*Irk#H; zmuR=f4B$;bMbutRD&1Y=UwkFVCBXQ3ypq%)ibB?x+h%J6Q|_&BK|_XvZ^y@u>G#xej zJJRF!Cktapq*)6TUqHysF;f(m=d0qu77+{EFuxq9xgJ6H$H_das)ndfXv#I>72B75 zo`$1v+=BT6%21nPWOqM6$8Rr`dU3*;5CHgZ>}VIz820a;U&wirC1KxrnewZyd{-*r z`{`6(w_GnMmF{p58w~qT87lUv1e2T;hr+c>UR~!W!hMCS^X^D90Aq_EA+pV^)M(t+ zH@BVW9m{M1u-$-kYe5(YAIA9-cUpmdqixpq@`VMoCf2xQpJ*S(>D=(P@8h>FS14yT ziy7|)$m0wsp;q;wjxzY8aHsos_bC?!LnD)SJlNY z6}WPlVpzu6%8t(4@9d0CI0snOL6LIM!ocH=9Ae+p7U&g>5u(C3Q5{<0FZrdepra}P zoQjnAZCZ7gV4@F%FRQ;xQ+}pFZ7bh@Naemr)vmF3yJGsp=Er-Z>uZI99MQ7gwq^0k zyeJWBUu{bD_Ps8?JMDT}U`Pz%FXtXbxI_O<4Q8D7l{uM*e@v2pxeMn#Ge$JZ@oF)` zMwY^?=VP_u#e@Xf=kf%zfwqj@xmqf>@BLluP|a1?*slx6RLz{O)_4Bd724)KIM4?V zEy{%v&gH?IOh9lzcR>ul7;4on(8te&@ zkHrzrD~3m`RWSHoX;wQ)^xIyYtOmMydY@OQ$)Lo~;jo`BdC<%Hci#XMy^0`D1qA6y zB++`DzA$aR;=>5Z!uxJgB?AtoN;VQ3beKhdIJ}mdh>g_ z?c#nL&Y@4nQa<+-?hDOr=dg#M7Z&n@T3{V|E;^<{>*x>3&>gD+(a1cq=A#{r>T5XM zY|#1NO3`(?(q;z%9jThn;+Xp{^(w2A#}U#~ZnAbIhAglk2jz4TqIb4TLUcGf`1sl|`WSdb_Z_SnTHV>)O5M=M8pjB*sy}sz5!v^N>sVKs} z3QYE18!@v2NIhte`b7&-Ki6i#Liqr|d+6eBKS-NTx38e~)y4R89TbJwAW~1Ub|F)K zIJRY|Rl%`gZCHvM+^${qPEv9?IR2|7PzrTYBaNCyE zD&}j{K62CxPa{4{j(3e3Lf9q7#^MdCW`^&C@6@RvgpLi?V7#*a7fOy|KsPUNeI+$o z=Thm;2Z)Ey|H^kP*6@TyhGt&lAw~q6R)W{`ZT5cXrGkqn(9(g6#aw<9k*9YLuS3V# z#9soMIOfdjzHtCM>>1rb8JCduELI&ElrYap$y-E0<@ei>-kyqNQ(E>}L~BV5-ne1| z%1rfLQ-I@hXXFS21Dn|YL0S{Mm(!|=#2vK}Fq~3jJCu+zfb_@nnh77$pM)~IUyuV> zSu$TNtC6lO7>|csMt`Cv^ItVeNcBqBxT8b_h5jPG0V2kE^0$1c4vO*wq;sVNwXOT3 z;8I`+>2>E@0bVt)JXe=4O3E8kiTsQWS5NHs+eA87iDYAJ0FsDG5GJ%!%S*Gj+Lh9z zRuwGRF+L(rQ6O5em%H5j>I2|>%~e@x1Ou=keH3UY9Ne2H+j||w7*t!T?ckguoOci*_t91y61$k26jwPoo`8r)-ab=#Mr3XL{M6f)M}i+VhOPg zR*eunN-fQRm@|V%)h{4tJSsa{tn}zDd!Un0{f{B2ZiRQyw!DbH*U7{uLvK?Jh;5M@ zaBTQlMGE>urwO&HiL8J&R6w)zhSG0|a)>`0FochsJCt8l?B!Rec2szPVe&Jpm zlj+8u=@VTbop`G|f zZPY;iC*ysR4k-J0(mHbMjnV~%Oen*s=}2`D2b@H9qi<})R@sAHvVm&xa3SNW;HTmi zH2cicQtm`tEJ8Y|i$f3T3U|B|#+$uN^?rFn27pVJr|b*-LZjgU`SqbXcY@Q>ovSWV zOb-K?g$LNw4Q)0E>H16qdnLV#;)=G$8bOy=il1$gO5v5z9Du5eKV9x&{(%8X{dNlRb9@;Lz@ zPsmsbQeaudqyH)dk4b1g>(M&ifcX=99w^7od$y{!a!`Euu*oDVuGtOp7z!WbM9|>J zkJ&qis|g-AaN=mAHv19re5`j7F3E}U(phuJ&<0PdhX#FZ-o3iN7-S6U3Ep<%B{($C zcUuxKNnBgR^go-4?og5H-qZdf>*reb4a{?Fi^1+8;r#e&yhzXT0$?@H0+;uQZq|~a zjX*p;7!)AS!_3KO17B5>6ehDtcBGm&iu!du*+OjW?*1XRFe4rUN+EhLh{V$Q68X=A z4nIbRjkEaq{H7P+*;6j-gzhq8o#jy6x>p{}h9ITt>1ZArSe&4Muxd4+XE6hv9shOc zBQRdfx@fuGzv62DTQF(nT9qW_>_f^2PJ^eR1XJeD(Lti>M0D6T6#3f znL&K(Z&>Esh@JfkZiDUoT0S=kT(1E(Ux4NuZ?vywBv=;W3s z7t>AFAFe_$$HrHh6D|nTY!>8EMN)oPRd^p(D5K+!*y<}uNhA(zbRofewTs{i+s%AL zza57VNIiA23>3T>(BfwEN{<7e*X~`NEF{owQ}WHVkb`m=a{&z7^sXL7zcJh<@QZpU zbxt9=_zXE2s)DW;;>3T_V6NZZrw&g3j;2{G2}BI3M|(5~i{#K;)@pJ#t6t0p)>26L z4HaEJ`s!=cNY!?dK6L8JteyiE@fBXx(83;0ld zmnh7i1kLX;OZ)UHN^_RxlbhkzPJGPZ>UnWv?KTWU&8|3km7ZfX2O>3USNA>zZPAt6 zZt!7G@QQyGBc=p6N8v9&(SB*cfn=r>Z6Ysx{#}>6uLOR}w(|&=sSdI#_?~Y44m03= zRm$njAgX|LNZX7%_PpRSsDnNRj)m15=>86&ol68~TgB%-BNFJ{}Y z%e`a=>HD$DB=ixKTctrJ!E)1(Y3w27r5wV;3Zj|aVfksdMiX!}uZH+@j9X8!Ck^N3 z`X$&Ei^EAw>k2+R(3h!u?@1+!6=;C1bIcIeC9s<+S73?Q@Zl*X!%I+MDon4Ya3JEL zlAMDR<@NoS0gM?F0<>V-0}r1|ZD`^SEbDBF4=jg?qY|ba^*US7#twokV4GpUybcc0 zn^_D5$`ER^i1Tcx2PYouhzMqA}=B)a6y$N)1Rz?6PT-|w}g;xG?0#K+fmID}GG zrd1W84Lz)SdNi-Lr1;`~WE%lVCa7&{^mCTcoDjh-6ts7r+lu(SNFZ)j5crADRR{QJ zl{gV00aD^NyIJ~losl^odee4s#t!IS(6f^zqIe6;9gIzX+B*jhSwML7fs3GiO@Q+l zTk2^a(DCL8P^0e!2Et7~ltJP4fF9w!f*H!6RMJec&04dvwn<_DWGc=jkD+M$Al%s& zVhx~}Aa$!JE{V^$aY~FvqU@#>Y0Mj;ddl%bLMb&Kf0r*bRENj?Ys3uupRKi9h?ebQ zV5|MlYS8c80mReruVn_=*_c3(JNUlvY=9)C??>Qtq-^qjT6yhJL%s^OAsEJah=vUs zLAi}hdk9C`#dnFMPiWTt!a9qH$})u(2TF-{Dc%T#5;7sO_%>1J<<)m+UvySoyy5A4ys7P4)+P0nay0d5C-<%6LqyUnY1B{2M;g<53yt&&dB5q zhfm!^_C$sgl9ZWWo9RGo*Fi5Q;+F*O zF+FV__|ul0Io_-f(KUA6vPz<@u^ejwzZbYdee6%0(We2w%Ju&Y z;-5-#LvzP0yDFs{NbORZx+4}9+I~*dt&;VK>8rYXZU_w5B;(Hl6CYpl{My+VcTE~( z8qcjtPnh(GiK+hpFt~hQqWMXA9z*Z`6BHnDcb?BNOD_F^(eD19)fotdb#R6gtX#Xu zn}mr{EojO$M;2r{-lT2Ti-1<+)6`O5#|_4<>)Kwz{v8iN&iH{aPBpOBGSP+StD#O} z*4ShGt8_I`y_*H!)0l(S5f+oHC(R3LwL8llH_(+Ee2WECX&7mB=d^BXmc(x*#tOU4BJ|95#G^M|G6934StC? zZKT}IWIS9m7_t`P7xv@iNh1l2e&9HUX7JgAkyc8NJ}&F3!dQA01G(NO8;z|&dL9{G zd!Na-V%O!tB~cP~QTH8TMC1$F=t={eKV!Z050RS$sso2#|G+jDP_xWHiY<;Rmj7CNwT0fp#&qNaKV#?}C2pz5@VKnYvdiTIUFC&G86Kid=3#9{4DQ&!+l0+A+>0!^g^QN!}+W zR<_OX)NS9A3j=_oKoOX#d~4=w(N);ZS_Lj6@mNRF5+_I0ZTi;i4|2k8SssBheb(e! z_v7+FWF0-Fn2SX8KbE%$%4=_Y&eo#e^h+4fAP;SZp@z5qA!RHCzs|Wwyb!gNb~_TP z%u2#}U#BIysFgYgt@(h}q<>ml1DX@`b{TsZ1^nKVk5*u~8}; zO2iq(7a5r*H?<3s?8Z(s*qE!=aUh>KXAxG`z&lvbxdHmggyvO29U8jMSZ#r!ro?IN()`L8=W2y49a;^TQls4V zFi1y(6{K+_#wmAP^6sX%cUx61t&lRXmQ;6%`%|pt$B6h1Ta7e~q7r3}r69BDEN+t+ zjw#|tJ5B0YoQV(_a8o*86qP1f+7)#3V!}h|+}$PC_m7YD=6E?58{{Lth$nEw!XhX8W#i_QGJB%A$dMKMS| z>KOuXu+#Mi$$^Mj&$x2T$7`RpKij0|EbM(|>pyK02U!N)v>qCWrWlMX1kFWWB;8)|Bmi z!#<<1LCK`cCm3{y4nz`h*0cu@*8NYF^Z%Fo6zW?$M`PqUr&NVRX4Tu91BuE|)iMhv z9{N?G)uKC6EkgVr^@_VTZOrqPvJ1FJ_<)(f>95p|L_%MkIY-Fthqsa9VSoU8K@!)h z`mGbLQ)aojXjWIb;Jh4GXbsQ1b;w$teJlkp+Z|i$%s11L44+-X4EBrAz6i|fAUu?) zk=Osa6T&zd*4rBatoWa}=tuP3Y;cHzZ_UY!*bYK^6GLN+g~LW7@o6=)2d>S3LPCq) zt}xZHRDr09nH8lq!O76pgJ&l@7Zeu0}o=~1} zE0;9^CM^EXP@f(usv~_DE!0+Jw$PlbtR#B-!wM!8qSH97`ES}Q!Nt*`oXcwNzQ)0k zO~cn;gAz^Xr@7~Y8+7`~`6kJbwW#v!{SmiV@~L@KIFa(YeZS00x91E_)|D)rI&o4! zNp6$&*&sQ|$^UBmKy{NdG2m#yWE1q1_|L17WEH!KXMO9v3|(Q_QG#x%1Ul^qyfMfX zyHA#su`8`8IbooSiaa%<4EO_W;C58z`Wx1oc{w;2sBL{@&I6VY!sSa7X|MToBE7?A zH+ULr&egZ-8xmVcd0h|j5tR-Y6T**4NKnGnXJoB^&d5N^w+l8knWyNc0)-vy2r86? z!G$nE9=l^bkA^q>&5PXydhXUOd0C zg`co6c>V8E`t^ddJ)iHlQImZXYApDVSznRUqCbW!hfs@`^N{F~niN4|k$<*!^`J=y zNN%)cC0;y32RTg=ND!)Oo0+7BaaXI*sl2=e`m3zvS(S4vHJ=TH2L-<&f}Ul)wC zeT&L-H4WGY&5x|L+`UF7nn{Kz@t-rZWJpAk{4#d}O`X^qj3e#86aJRnkP(L3&RLTYE~?LY2;TElj!W4axEvKUfzHCWOa zG7W3qf^296ea^dCYsCE-5V4n)!5=#Fy|TklBA8d+;ozweEEpckBF4Ijcsb)n4}9i3*>Fw;+wbA zaaMU&MM!Qg1nh$cf-R3?6LNK;{xBYF8s^~XFX@d$jqOV7t`earRRGck@~v$p>yiFI zTkb{)SR}^jvx9XTr4G+EhbjRKOFw)*@xCgM52DqaKpm_zSC?wdv^8(j5qVE7fHESr(i@gdc_4z zX=%|K-u3#6b6A3!fI#zm=UDlRCMI_8vfgkB5)gRtIAd@s>mf7#8`G!{61m0qyb<_r zxJ48g(H+S{;Y@{c<~0G0U5Li3EZS6E7QdFL#vV)yD#8?F$Uj>*J%*8wIakXcDy z!F2;lWjE~vl>%0&VDk9en2-J@HB1GaJigN)U&o3c1`5DL!8DXrRm24k1ZPVyfLys5 zk;Ls4NI2}zTbwYFp2MVq$8269XvMy+61&Qfy~OvnQ?MQ%d&3e;#HY>;i9I(K}lXR%?Mdl@pZ>z2TxAu}`>Nh(u=`Dipax zPrt|LQlIpokVRF;1D}CRUl@1THBm9E>0z)FbPDcorv{cE-eu$<`cA#IL;E933O)m< z$A%%$s%vJZ*Pq>+`g%;bg}~d!7yJB=-Rx%kYlNXID{CcI|Fou&9k6 zw8E2SWlfqn`Cv6`SeaiLZUnl=-cpf5JDyk35RdoyCt`ITkict;FHjIYcDzhwPdMIZ z+Cmox+0XJzK%%`K$SGAiemmUC<@5Ztdc^;?;)QkZQIae| z%>K+;MyvZN8P)o?i)m0V%Z3}(v?3x<5`RCT5Hk$bOr`F3UBRf3l>u?~5S z{W$X&;d7^X$&YHxcmzQ}fS~RA43kvBaEv!l)8)-=WB&wGMl^!Rn#C4lo_#q%_!E`I z6JDmDsvWdl;b(p305@s0*H9^x0yMvt1%CL zFB~WL-2xkYxKuR*sLhA$RMEFU4K7EvDrv0?OD&x3ld%G#Bg`ss^sp%3u;7c2;Qi;d2hfp{n zTQsUi)XNVyJn3w1>uN2ae+{%H(ki0Us|+w~dOsOA5@LPne`H0w;>)=Ub>mp z9@qitl=JnYFxXLhu$E~{g<6_dFTOF4k~lTEM~Z59`Z&6aQVq!deR5r}A249FK+|(OZ$>`y{9vYBUw{&mzy`p_?VtA-dH^(!R7cJ; zblRQOV&rpcCamrL=~39a;e-hhy)jAu`DSJ2_C9dVj>(-m3&}!YGRvW&Xb6KqTfZ{p zd4W^~_vgBOynN!HHb!F11364PcHiyxjTJ2SZkhJ;*q-LI@xrMU*plln|7T4j^RxjG zpx=nD^T!4Rmushiq%35eLBp^VJ`_FT>Qd!0OICXYUR&7ozrn%wWvX(jIoWMdX4{q& z=d+<%e(b(^8<;dx)W;)QSOrJR%NC1+} zvl|oQV({bmvkjp0U7Z??{Bsc!Ww8rZReFlT)Y%A5*pYJeYb2WI9^?VUmwYUKpIG{; zDrtC^2l@O1AMf_1qqyzkly2ua2YDi63lKXX8+i*_M^?frFr0axNol-hTmy>}87yG% zQp`*0zrH*o$OLh06+(BAaihW9j~6Jo2b@@<`S$)!6O%CnTJAfx=v*e>>Ctg1SS;fW}}*1*F9^ZxDv=zL$6yI0M<$6cnPJ|A=3Y z!TQ=X4;``NH?BuMF21~cE)hIg;HF|r2fgUe{ZT`4%a<)PXCXg;!9b%$s8AYHGXCi*EvnndWZ`fYI=Yzh3l!Nl!xK zCe>b9N&V)m?SqWV^6e|_JalX`&2BiY6}IAeV4#x?Qu=1O5{q7x$<>BIrM z5C`NcT1{3wU>&beK>EpwgMJc4vx(WJ=-_Lh)+CGntTl11DZwZJqminVs5rXv3J;`* zdE}(tXx7<;S)Fe&`RCKy8sRp|&4d?JnDiP9!Lg4_K0E6XAv=C3p7~;wb+y2X1*U~; zo=NGeVcC5KjQ?(lRkibxZ%23pZC-OmOR1i49xr>0X6Pb= z^|QDy4;oRBJud2%u_){RtpZ4@hwuR>fDti_`-?f^h)HnLE<&F{)orB zE|^>^&1XanV3yNdd{p*2e(Iw6xv3dlZ!gZzLO1|Gtkd$;6d*Ne>mgh$nmN~xcS>Rs zvmL_LpN?r%{p(yx+us1fO?~O`@W89;^4z9LN<4EjILfKWtL_=|oPG-XQv`Ov5dO4% zF-reusDc1KHL*)U>$WPjsk1-iiSa6OP(#e{bGTDoqDqZN&ssmDe?ZL*L}>Ebm?ia$ zG;eD@ZJk$zc;~2R6TGWgvRnM*eNnY0kk=v{esUIk5kEzquD`ZG~;G--=LXR&EO@pjM#gk z6|q)&i>L`#NfEJH$rz7dqgjO$Crj+Z8>!rs%z>%CiQyD4fbk{J`B#L5)Fx%}o78AG{nyS*FO(jLV znv{Q^(=ohIKQHU>PhtiaB_lCL8ni9&;KzeG?-iG_7ck6o{giIS2$RO?T_aE{5sHh6 z8<`i12eVR^d8}`z{fe*7N5OYt2CS=Z-$6$dRq2W^UKK0wP~eZzJei&hEdMYmL86Lx zvRB`(-A{XWp_=$J-_QQC#Y$hAc2Y;VCt3F8N;A_1axb2x&9NQ|#lsP$1IF6U`lX%> zZj9mw{R$FAk|pLHlu}L&yl>*`48(<}VgOe72hz>G_as7t(st<@f_US)$xevm&pk64 z{YtLg1Q5_@8sIXtz2yZL>Kn0jR;m9sFUR@kUeI(WMvetHV_oAk<)1X)x^bE@QFHtq zA2n&+-Ak1M|3-pSMp~fxWr5-W=fwuXTEc-frUoqOfu%{Sz2DV|5O{|RX*Ft(#dGG`1|X8fw4W66oDg&RTXx6$Awp!q zB+8nbp(VN*h&3yBP*YC6FR-P$8SXZ65VhTY6! zD#VI$(3QpJ^`PxjJ}GKF(-Z!fV!b_oy{k%#iaX09P-Gx{@bnorNEV37qg|BDABKw< zkn@VR%VIL2K05wZ{J;--8DSL8BcEc1)oCHL!}MwhVEUVPLpr#Q0@A#l?7ojchX{z!+zg-i3* zj!Z z_v~O0eOZe=2&|p7-Tsk(2RE<50WMcr92dDFB~b^iCwm!dA-I(4QD8n_@8r1L1Gl#9 zi-#|-dBy9db1o<-bcmf+2LV_%8)FxU8;%Jy{qS49l>|MTmf-zg9A?H{Wf5_4?LA5{7;KBd72h{S&+qwT zOHr#7>v36AGmg6*86W4&73M(rf6)jK-vP{pzC4#>d<{=Vow6RmY-!l-6@4HaAjjOd z7RxH1DLm2)z~syc3+t{1`I^6g!VV4pIrbpsq}=)HP)Io~OFaua^GbMCpoy4nEmuo_ zcy9koQ@E9;Azt2h#%Ob`(6T?`hoem!vAAf;7jLIg6lB6hbbHlh2hKS#>*)XgCYX&G z;=AJK9JXo;)O`=Eh4WBtm9Clw08pS=K#FC`w79xFMh!uu0-L(`6gW=9m>RcCDWIqE ze!(xl{0M9W)sfgC+eEKA7lpF8^x;in$xRqH2`k}&W}$MQb{bV=ij@H*B9t|&iDu{>nC9p4)Sq#ik6 z@DNMz$ahbau4?3uC(ynlFdsp~@o)YXoFFzf5Rv}fC^-qpsn@E=w3?hj zt``UNeUgQ>DOpWu8KaydY=53&QL>&4ASqfPK z(4NGM_|aXNwyzb>?v*5mW-|vs^(R0gvBEiU?rmIl00mYlb%VKsb$X@t#p1lBY-3y} zC>kH%LKWfg1L}AALE5*y74yRPFdhPQoxhPh-!EH(TR|H3MbV~>?xDPh8}kH6@H9q~ zdCC#J^u4#H#D%cp6-hx%F3_uos#J?6`;1Ch#)jCZOoiPcL?skO3U4VQJ=EN1A{d$= z!gNL`HHH20)C-cM{*89_>+>Hy--)hk7B+(9SpD)Y+AUR^+^7iHZxGkg7DhE_M!_y7=@Wh<_$`3hMpeDg#3oOO^6^zJJiUZ>7bQFG4z-ji&YxtX6{lq zVPMj?-Pi0_`gTljYlH^+kF=+UNz3g&q@@Thz0+}$NQSPP>l+k1>~`9j#1)NU=sVNP zteR0yT6&Uc7;^;gt=wKHFB7iCPawM6AEIFIK@$O)k%G7iOA|2y-azzm5uC|bGEH-Z ztP%nRm}Ezt_hApf{-6CRzv2Uz14WA!JJvCd8N$x86*9bnAi! zgTlr3XE%L5?oTpNp7k}pIjv-m=Fc4ofGh$f*IJNrSz;|n0q+0tO*`dwC_324VEVp1 z|ITIrE*hI2A!t$qkd<^e0|A$VI!i^;?hFmKFJad|x%-#K&c`-piXV-^svj4ujpIq$ z9~>%|LM@D8Mk!dW3o`r$-~DKS3_~r5VOV_B5X3jFw8^u;P&p=`iMojrSjm8?Mp)$_)Sd@ka$Zh|h+72-egwD~C~4-CFCp3z z@mpa7E3XpmHb1o7IGaYZkvMTKgJ5167J3}|ddp6=i-C^*$s#ixVpWlgn5-LM^1rLp z6%J?J(jMuasWW7r?+T5zrLqkdcf`aoSow?;UM1bG4PdoHyxf?(Sob~*fecmD045Q$JyNgxnJnT>cW z?HPy|d6I?|IRSs?CuEI#fD9H=Vd#)0D3AtL>t@{#L%#=^YyiK}Fh_9X11(pmMgd5Kx+UNL$-HBYA{&y>Tix&WvQ zo*8!$eR4IYC?;{wBmm)YNYQw=`#DhQ%796?LshV0UASOA*-2m1G{F4F_@#Z$R;}Xx@AsQF$M$`>y z4#6C-k6VQNK0%>t;1G0;1ALF^z3?PO6o2b?-Hx)!@f@b&utSk%ufB7O{FKZmw@~Xx zki1YoirTYcaUJ?k8@HE$OlTEMNLYATwHYKsn^-a5OU*%UvNHbs| zjv(^kzi_^OWiEcj@34=*PRG69wS1k`7yv~;y1&!^+UImAYFFdS^f3P+yb>$oR%I(i z!Hl?H@$p?0uh2uRb1z4I=pA3JFgPnX#sR!ND#f9je}QI-iD4NKOa_7Is~*VPoqhHt zAq+H*xbDzQwFYg4XJId)CPWnpDUyI{N0ZW)-|iJ9=V}iv<2+z zk56F898*PiCOx?87`*T%L*>x=Kl8gfLYE9GDG4jFIB*28e-!3L*+^h)*njIe8A+DS z(|{2;^#8lT>G>nfQyI|pDcC=3d@RuU-c{V4jt+Gy1ejYu%)u@E#&^YsU^5RXidV*+ zVh5e`i$HJxcJ-UtmDeeq_#--(>FO^2a7yCJQU<$WlW$DtVx+?R)t=>^__xzt_A!@e z?76S`gcR2^9jl93X$`=1%!be;e6BZPG@!HJkfOG4{z6m32B|52`MsKUEJDY{ zY8*>4P0Oc(5Mv19{d?!3OJwk{w9Q0>X|P&>V^fNO6x#78H2H_U^{l#yLC!?vcoSs5 zFuRir@r>>lqFlNhwgZ9*7g?6F)naEwGcdAIto7Cf8;Iue{2(z3OhKLWITw#*mz&<6 zJV-v~??C)_ear{?UX^cVF!j|^NFSj7U1MFxz6SaV**Aahkaz3s@jn)YO`kjH6i!d# zyYkc3giTNWax)wtda$9HNaIiJ25%WrN#gbSeulYi?aeznmx)HiE8;Nkv%~>P`>Xfp zd~v1*NN2bV6{a8JpV0V=w{0+m<3ueb!pWQ~a*?HRr|8Ub+sN4N^Xud+8^K3uk1LaC zoeRSskyZiT5j?B)1WureRfC>~;UyugJa#QPBScL>Up^0BDiXwU_ig&SotywqCgfoz^b*z-oeaOPQ5U--*5=OB_c4 zgQt8@=TgU65j7rd@vwVJACI0SQwaJq-GI|6JC^o8afLl$duRmuZr4eVfXbtBI0lkb z%+{XQDDI%l7Lop&kjX4F0we1dW>>rg>lnqy#%Rlhqn=2IV&4|tHXu=iS4QUVhHA$R zg>@h^5d|<~3EB!O_ehC$eyQXSAM}4O*zgzlDH=;jH>GqcAQ@v<3~g)&Z*~B*g~a8| zt7-^1fs9zXQ_#2$rl?|b{--FUgDa_>7kf0Gfh3+uB%=4BQMmy5eLJEfI_L{1U~d(e zFLh`6A*=&HNK7Kbw!T-+Xt_VZmM!psra8=_x@f5?!^M%UD3+Q`8PKC6+8BU1uv~Jk z;#-|2g@3md6nr{Rk#B(g@U5qR9_W9Hh5P_u8ei@Vj1&;sDjI=#nOItcco=)m%Zgl8-9O|;1!++mIVeRZb@-ZzE? zP!1zloCAzjOkhxM4y#$9B`)EHnT+J1FvB&A@S$9S8hp0jp?Pb1s^@<}drOXh$c^i& z0s7DI&eG}~YK5^;QwIad$*in;xxYwe2Uum;&y7Xg@*#PQ#ZdXtL5r%-^;w*j%3847 z{sv!HqT65fAk7FU zJlsKL=Ck3cnpE#Dj1kNm>%yfS zv#L7`Jh?ZA_0Q{DWxY{_zx}LrjnvJfKXRZe^F~%&G}N5r9!4Y@Pty508AH2F%*_k1 z(ATcQMu$wqsL%Y*f_z{7+2@M-ZEu+_qqZEx~`HG3I zYDQ}YcY7LvSQw-R(=Deu^bE;ls8ikL*AJNs-v+w^2i<)ZYV0Jli4__V;d|c5!AcUh zpC5#<;bVgGv7qh>n3vz>-YCsHIzizXR}|C&GQh{0ee1}IaExchW7trZq_ODGyhV}+0eWa@VL&|XurUjaO*s4){!wOF!1}j?qTXeqQzu>OIfZe4<%_k#9t4*;}J+f&}SpE94XZ5}Q@W6iv zfZV9X7ibZ3E$_E-VAB#=UMlhhkg2}2?{l992yPXIb%F&NK1FlZRFF4Jb9Knot<0$I z5UT9Cn)HPCwM-*A6j+eLa8~Yc&qUp)GyobvB#E4x^Xo;&{^rpW@7ka~71c9%=dSTy z%FiqS!Qwjyw5uK>D=X;oa<8@g3{_2Mt$v6bAQA@X3V?0F+JOb&NNhZ^lB`5 z<6#vo)CHEF64suN#yf|rWFJarH3O^OReLtC#%#^-oHw(mNEOFMWP$7A8-NJ7G^JYu zK@xYmFVP#lgwg@7lb0*Wv4oDaD@tv|4bLcG zD85nMG0e~x^RHo`@S8-mL)Mb>b%Kp4D+|XRmD*obR4H^&1=9ABbhf`q`R4fNo{xCr z92e4G+u9Z+h54E!kf-ak7yZ8>7YfWP$zKwKUgoD=?FA-*Inn|?|^ z<{O4VU6j}d?F;BWRlRGT(j&acqR=RB9!XdMasre+GcZ6R$R)Z{7%Kk>W% zsYhL?Mii&Bq^+x3{-i)%R9Z9$6@2;$KgUxPSa_u)|9(o7(ThzudSRmzmIe0uzoI|n zxuu`VIcj(UCszDjyB7LD=dBI}JaLvIK|;NSr(VcXsD=mls`2o3$P6qn7U2V#wqbKO zub*m0{%~%ipjMFF{1_2YihwC{p#;w~E?A&Umo4Qzxu9k6rpbjJaIqxX7S&*i!;#1w zwb@xa?7cN^1?Sdik3xKkG5=_F5ry_3SGr==`Kx05dh3y2B+;(E*@v!o+1{?Y5|ogf zk`PVmJZ+lS{W5T4DcGJ4);pF6v4QL{XyF4q_@jttL{8?|*JJNJYS+ewQ`CwfSX)v9 z-|ZZc*dM&m^XPE58{U_0PE2w)LznHOEEX$@v)*gbA1jXh$XJErM;rH(%D#l*U_6A zIY}#2hQHgDsS+hSjo`9HYn<~$TM#R?mn-vH|AVbe6SHhzL{KPch-}?Pc+?rFso;I9n{y!9w523sP$cMn0ILbW(USTu zp@;e>J3_L{{g1pS0QiHU3VnRDXC1x$o6iduxL2R|K-4*6fk2Q+>~D+YW+~1?#k68K zJDAi2v_8rML^7x=(o3{DBoH|tcZCvebZV-et^0An3ose+(~-nkkL8>lTGzh8AW$Yo z+WW>n$0y!?ZRCR{>O(I9hx%*;uzGu525X3871HwNZxe{?>kf{J#G#o(4WqqX4y zujFr55z+A0CW1_#AjSC$5YJml9Gcbcm{J{jd&kWbr?gm%Z87HXHl32W<+C>glWhz$ zRDr&&p~!gbVJ^g`zEAN^B8>Dlprf_}#5bFkQKfKN<5Faw(^G{QW|Qn)ZpSntDKt8)G3FFY)QWk64tLhGOJ0sMKTK12~#Kb zTXe^ykUWh2eN({IE)dDjOPsgoj*D|RW{1?XIW;@QV79~7I9%rEZ?_-}I9VPr1O+x1 zhxH1o0FtMY56v!dA3N#;>Qyq-PN{P(dIFc5c_Ic$iRH<`nDA!mvbKwW;Qejxh7L`% zyqU?Lukw`Bx-bkH+`Pq01$r#0CaxgW^W&CY4aU^Uw+ur^c5+R03_zl;vsZ5ZJs@t+qdt~E0*q2I9>OoiGzdL@3;}F0$$~#g$mj z$`3d&37vliG?kdFhYBHJt$k@=!YqgHJ+&gT>kR%efyHV_o3_i|rWkR9DNK`V7=Ex_ zOJFX}3swhmgY^gC@$2b?rOFQSU~iDonre)T=yYzJx!3B=Uc_y)MrXqxRd==ghZiJi=x= zB+wjVxF;)3WwzE7q~pT)cwYqv6)$1P&_97nb1h2Y2cuWq-yx7C_HJ)w+TAJKU8vOc z+iZ?JV*W8D5EnY&#qLuc0J28Di{6PF-IrGEUh7{Qs`+K^$#k6>;}fh3){dB;i$(#A z0-|(?xlAvx&iTkR8!zj&@#7c%W{f3#m&YS_&{kjv{tFqdBfa3>?CMTSU8a8Qtc}xb6g`IbGpKgtz{izY-e`)jH*Fb2 zWdY@uW`zJXPV8tY(#ye;(f(9y3-F5 ze|7eXRbhpz0#}`T3=t8Ke!<{}F|PXmYUG6*t@Y=NiORHv+FhUAC}IN-V> z^E0n|23&I*vV1SR>*tsMRcx%6kA+7Qcfi)Yycx6>5auP2zj>b6gyRjvgvUvvmnT_9E^V^YQQtnNqK*k z^AiXnbG|&0+AH;)Z+F&}2`KtUZ`y7mx}z^dL>M9toYRe1S7JK!!wfvcx2_GvJd%N^ z-N}3Ya-?vjt<0Ctw8ee{t}}3elaj~n>&4YAw`BEEELW5j5UH&ucl@-=bXiU};MxSg z$K02Bd3&VkBW@U=3k>6!Hi0kQ0Z%&*jG{sFNxL2;c+se*?nua1;`uu%iM({!tlZzpSAR#0BuiX6By}HbGKni7dVwqnQtl6uRe#1y8!lMoSrL`?X z@}_OnFs{HWD9-F}M;wkGQD=D0Qz!*KZ9-^uV^oL{OJz&Ll1!Bcm@Koib%T$=Z>{*zY#;92iaK|Q zET6(g#ELvkcDq~lpxz+mS;uLpD#zVI2$SCXjw22RlGYDIDuf&Dk?sn(9*9Io=)2iF z)2mC-Yh&f>rzQo|-Ff!dg=zrIdbp-swGOTHMNc_Kn(gtAN@)fL%ZP{6hP@fW^Zp>_ z>mnkxsEpr09Crf{jOPUpq09XMeStNeEOql4ei7Eqgw;s4Nv4tRc#k}P|hh3q1NOI8Iv zE48yphK!jZ7MkZZ32E#qp4w)O^or-<)2r9P{s!OgRobB_WUQDpiqXcQKwr*uoWa3D zL6!w6VWH=T9)#p@egCE(z>|SKyWeblL;?@<7_GXP^zAgwv62e?k&76x?wUd~OxB!j z#!(>f39jYDUyFq~z8R|B|C(1;Dh@0q1IN7GJ-YmcYn7Kls1a1zE1LJ%lHezTZ{NYU z-=0L`Uw=I+&i=v2(D4jP^PcVk8_D`g9HNrMfckPgIVi4U2^bX{9EZ7Rw~bFMJnlBa zS1F;h)q@o8J`!%pk^r&|EM$Y`mE$IN)=Et4{DsZeR%gBj^!})ioDEcC;K) z?Ek0#8Fxx6I5$4B0_0V6^u?8*V!hk(%9js_USV7kCe@ud`_&hJXJz}6S!P*cLOgrt zIVrD&2VUDQX?5}%mj)r2seNNpy@7I&_6=#NSszhzmw;|M6x)BcG|QlS)-wv!*ayjNY1j) zaH9edNx*Iia?7Gk)#qa#%IC#!77HEDiQ_SN7T*n2n&iew+-hv)HH(|5_zhhNNPCBh zxZ?8)CNpISz=?YYDAflNi2K_O+w?`q_rScz&?A=J`}uK=&i zdR7xQ3|OoWoQb#TNd-2pOT$~^;{rBHMTVOXMUv8{ziDjG_VB9T#IK$Rv1};u0%PAa zYaz{RE_WY|scFLBAj>1MAUBpz(Ss;GHll3m$6-X!fSw49SmUl>@PFOb68gK|kDihG zv#hgy=5KK|*uN8&QMq+Y5k|>EDG+s;ygfPNsBa8W`+-us*Ju~8v!HIoC-chN?)6V| z-61xn9V5s7`m^uu?&#Q33aE$w)O;jgw6sb>vRtd>QsayOJ~Ip=rY}ywd*+9B{fdk5 zK__S&m5RNhhq7=Q%hSf;)64UPvSs)y4duH6W;@LjJ*J;(N4#lJ1SbJMrhB;8OB}w(dEO&I*oEbP1$f_k;Y0)gTo~Z&UxUPwjeQKf~hM z*Apk2;~xlv5jC;mxO_ z&X-Z#h@C>ygC2>cCab_?iN2;FrMCp_#|zKj`viCzFVQ?>s<(i}DY{d0X2PT>0`la2E$D^w7#6O{ZoJ0`ah@ z&vCUb(#WR7dX#p_a)Eq&Xo&ZGk`OpgogTfy{fb+%pUK)9ZYjO>s^;Qml0^+uiX}f^ zS0pW>GK5)5eXuZy6}s+&#d597w;^fZq(9k)KY612UgMor*aF=~+YT(9O46J$k zB4mbBuyLv!n;@LMdezg0KB*amy;xn;_U1?Y;X;RdKOBlj<^ZK|9be^Z>r!T2un|RG zu6&Z=Pdc$0plzHkYkz!?fC%ZtXhQvyx#)RsWM1GRH9%uT@SeWy!!b3TX4 zWV+}Bn$lT3u`)TO&#rKIiP!!(&+xc>Ta((e2#!AtmW$4{U@pUFt%97gc~z&5r7SWB z>kkC@E*MdQ6Oe^qjmmyspRaYOBY>wM2Y)M=NNs=pFumdy2iW zt%p|0pfXeE>RGBAG5+Kp!;<&QAJR8WPfm%bmcf0f;Oa3S105t7>gDz?M zQ^b3i%h&9Sb*r8Pm0G2d3gnd+lB%kxyQypA5J?(|*V)qwA{TTn*Q1b3c8~djm`2ij zVBf8DFa^4X7@h}=Hm)CxWN^sI4~Jw8%9`?ieD&P=!|+|*^;)J{Y?@;t!GSYoaBhj!nX0l<)yq2 z2fo(uaiv_hdQt+2<+r4vfxncG7IZG+-+RDh&j+0h<+_+u6LkmXrF1*=Q|_ z;IseDDm&H+MC>mOSt>jM#Zy_>CPRoP&-YlAbhd|b%s{Him+t7A$~e=fGjd@>wsV$R zMpw9Z2Gg32?f00*8Hwrgu{6WcMU;egDBGOzZ6fsn&SS-zCAV#fZu zOx<_*iWV2;unQ#{v358D?!Kj!la*}zEqC7pIq#r=o>~ft`ieY>t$UAGtCRmc`zX6c zG=}06UGeQbF40p`U{MGxCB5$%VX0%W4Hjr>~d6?(@@ zER4`%=c)D~lB~+E;B4xD{QJKI-6=I8E85K*+7=7=lirbo*3O$e+A+zD z)V3WSaI54qiOD$(!QfZ#y7qKn^Tk_vY&i#uEakb%uX4n*^MY*DBWN%T3F5}L!_ebF z=2|zhG4yoY9*voBIT)#hV?8B5sn3~O(wDqx7Fia8vA6^9y*Vm@Zaya`9`2FH;%sHsX zGta%1ynEF)Bu4XyUTI2YstZyO`6XBz>2Z8U7L!dv7jTKBu>zU_e`?>AhQPeN3=W2! z!OCTZXZ%p8W}8Z!SOiqy>wSmb>t}5Ckq#+g$7cvtqpcrF zJogFS)^r4nLQ>%=6%pLG1g*L*{Vu2#Rk|UG|KQocq4Tgf8W1YssBtQS!#-n^XU zLnm|*1?lw*0YKT+?q@V++zcAh#_S7L(2ia*Q=r(0kv8q;?TxZweaB=Eqr;bEpV@fFo>v#QgQG35l1V@MUg zp!=j#a{&z&-(kzPfa~u{nBQW(c$dyLR2Ukj>mo=X;3cN>!uf;5wJuBpNA@%)p^I<= zH3Lu+J>0p>H4oC!#-}|~S=K@W{U8o(dlZ7q?-H^k6{2hzRzuUjyd)ws8{P{xy<;4i zvkP7_F^zBeE29Wp$NNF0M5Er4w%3_JYk-_*1}5xd$Mokj2x>J^#xwU!2bs%0U(e+P zZgf)#x{cIgBTV1He1*saV#p(cfxDiQ8R!BO_b(ofEW<-Q4_<3@iyn~cF3CNTV za}~JvjMBbesX=f*u4iX}y^U^VO8-aQsg0{IDGuEONK*nP4bvxD?ehZaA5o(FH`>_t zQBdfxk;|UatH*xlHVo$UbrfW(nbJ`?c|aRpP96B-Mod1P+~KYwrf7*mx%yZKYQiDP z#MEskwmCk1A-~MgRIociL<~F(*wJfgpzc^N`glSXODV%gGnIWidEvp zhwMA=#27t>!P_pk@(t5dJajh^QRg+D&hnt(2rqcKzl|Yu#=$_WIV-kBi|f&H7b zN#Am8UkgjOG3d=ga*8}bEM;8Yn5=#Xr`%Eq)!;@?VvPPQSa+t4o~;E54M+|(=NIly z%kza6B`oDtJPV9R(92^1!Y)I2};2l{5xLbYLiT2>7z2~bNu*= zI(JQ<;Y~on#sUNI`Rp%9DA3VQMm> zTzKacXo?Q}(I?j)&dG8RdNxF)LHGJ2?0Ys!mOm2R!qPdvrG?bK>JG6PFJ;G`Yh)+1k2n#EJRn zJox;CT5Bj%d}?6KArcJywxX?uJGg#^0c55Wt{7Je@Y6%pQx zuqn|M!!Ea4f$-e(XPK9TPAcyk(vQY|KM7Og`v_v@-Elki@dO}JQf5hR6IpygK#HA0gmeXq@?Pj|Tep`$CSi%%RqgH$E`x1^@jo z*5D|pZtKjuM^upH{q&Q_asgcfYo~Bpsldz)q5e&>T;a-Ba{HYUv~muFK=OOTC{NVt zj|upSLZEu7MyG*|Pz30tdh^I`?IqiJ5W;VPTs1t@bfvmHuR4^K>tm)u>LA`W+8{ z>7@0-0miI;}zw?CGu2o<0jxj;{xeizn@!%ji*pb`I+Xr;}NjU)z zAX#8)eIG$^C)brm>Nl0NDCbADK$=qYpi9A3+?@f;y zEXl~Lh#Dt$5ssZpp}w0MnE|Pu_XeW+EE@BZHN$gp)!$AA02M zCsvJVd>0u@{J+?4gn(3xOz&$iV=X{^KGs8FHwx)=C0#GSX8lpndEV@tZ+PK~ z_7uY*!Co-Tfb!J#7W!4w51dY#Kc*YjJ z#C5O(X-J~$tJXrLC%cvZIjFjkIt8hAHkpvvf8L30FIl>lKFNXDn-Sr>oWSWAfI6BU zdq!YVc7l}vn+Xh|?rVaGW%}Y&Q{hPPibg{)uk~4|Cm#O}$v3Wk=;^yB3Gr-$7|Qj; zT#zF?#^>=o3(m=nJ8W5ZLrpc?)9%Wb@(^WnW^vUu8M5=9L1a$=Nf*CSCUH;8HYhbe z$Bg3|@YsYFx+d2)QF%4y35*1=u`xICX;R!n4E*2r?Spm7^ge4Ui}ed-l2#7P|JVf# zU(fk-sy%u3rL|PGLk^AVL&CkiGf4T~Jn<5ulrnUY^kx6ljqC_~l=`Ndhw?_9Ss~WQ3!cM<6H|V#7U5_JkYKHtW+Wn59Q?!xUa(Xwa29SSbOu{j3 zU$%p^;ngKa(HAnqFV2w8%NG zo`fgwfC{h5^peHNd?b=#m}eHR#SkX6;EL5MdcgAK(R(dHu8T@P#!APjP(LXrKCif zf1Xok$lQ!Q&>Eu~9psxVtmbu8o;gt!3vzznRU8dxocat277Y6MqeU~Wj4@H4;#&qb zFQ5sN@NW?U??7(8PNR)iyhSDr>8^`mXaY(#l{Ya1 zXWHsU%E57#<_8tyR*xxl=94u3f*b{I!nqn#8$9TrOaMopWXems>uG5cNzQx9L6!dg zOAI8!JK#^qMaU4+;U)6~f%r0NE5AlqDMGhWT~;vWOOF%BMHNb!^#;JpKLr5@=Oka? zRmh^HzgH8WVCj9O9`mVX`$ zs8&}}&MPZ8?A8nb={I#nY~z({mLCDWpZ(D+hG8;N;UyxZId_?Eq1HXsQc6y%U^vHo zos}?xVW0h7Bo6}7lbpwFsDikJcMpb@e?H!mGwU>Z914R6kic>|=^E;bR%XnrvZ8%H zx1)P!l%sM;ehuiDRjoulfqqI!|DoJwz6sfvoc8;!NSkUBZR;PWCnZN8?xlSrykudP zREMnum*G5wJ(Gkl=o5{|%0zb%ZS(aSEfqCAC%_RKPyT1J{$eU9aL$p1ujh@9f`hq6 zFqODLK~A#R?hiRLBp9oI1SqFP{@&B)LYh%l4!PA&ZTrieWq|}@nqDEIJ~nt z)@b}UEX(SiD^RLA7P6)RfLR_j%H&5=1@p~&i(iZOv_d{X&#gd^fuUp*KJN1Z#gl1{ z_dm`k2RjfvR1((eu@gV z+w*ipR;KcpqmS|(i{x|Vgi~M{3=om+p{YLOwuI%nP>HPS*wKzM4#K=%JuRcjO|O$R zwVeD&JMRftFlm+HqdGG$j_uj^s-i0Y-uK`}jpIjam0i%fq?2Os_2rFLPD{y6CLH2} zW8r!Dm*y#{kO+p+dxtY%1PU$MPFmBD2eE2Mn4tufC3Ip{WDU5w_V1n8u-Af=d8Zm~g>cs8Y z$K9h-W`NUM06Ee>+x|vr#7R;sS^dIk=1Qz{9xNo0gjJ{}qW?S0;0u2pgBVTI&6GZ* zgT6A3gnFRQxT=*Kt=B+5 zjh0ArmYrYrfm9; z*L_lIzE|$mZ4$M5N>$hr{HApI#_Cl7KARMnwcHB$e~sT$TImkI8#UYam_3|>^LzE4 z4aqdK!Qg$23YA;a>E|P1pPh5$48Q6&j|-nAABAD)PI^$uxnJOHAy7{OM_w7?DDLPh6*Fu};tv$tuEVh;8uH9@HotcZBhl{Z+^uCJH$hi+>fr?~%FiTb)(64+b$nr)qMEg}oYI;zHH2_=U z4O!LFwLdJ;E;#D>Yd)NbqWqztm(}4$$na?^`N&0uqVXP0a|=vyXkb8Xuo7N07y?OD zCALbS_Qn`-Zou>cfVx8~WlyF@M^yKDr`s$}1roD6=m0mR4z?})gn+nmJKXt(afT|o zMWWknHDAOL{_IJ#a6lPV^_vEjcgafh8Lg8wpH zJRWy8v9$wi#P&W(qk#oP^vO9S1fz-BIag!-HCL^~;PL}JBKi4#e?2Lm_v7BZeO&g1|0* z!wjjMB6R~hSxliWO49v+m>jYa1us<>?*6kl>;NllmtKn5+!~`b+SGjCzHvzVW;ecy z0quiC_KwRbMdH#TF5kw*1NUgp%fW8SNC(B+nwRi9%?=wTdW5~&SJsXffGNF_BL9qN zfF6e?xGrtFTN^9k0GYls?6Y@l!=;U>Nvhpho^g7YE!VNfJG&5o*S)R-d9`!o^<)!w z_|-!HqS{r8AR;f^PjI>pyxORb12V{KDAZ#$3^e73tWvqg z%D4JnO4fnHh8=d0`}6N|%)fD!!$BS0ZAN04YN5lyE$+Pb1qXSXf+G9ibJTdZH%Up)9D}v2&4$WwZEO5jDSNx0%JuU&VqlIorybePQc;x(V|c zt%j+-otE>TbiXB@-o#E)+{Y!zj27+^ssx_h4S$?u49B>$mo6yWY%O8oJb>PBR~F|C z^~Xfj03bKVJ-f5ir!;PpwP)kysRRLv@QBm8m((v!In-Xdk*LObc_U0)8+T1%k5$r^ zi^!R+J5juB1MLr4*C9Xf-yQvq?AwNrqf%2h(LO>vWY_S~GDVauvQJ%O^9Ip~kl7ZC z92--|7J0JIns6mBF`40iSn+hT@*umAL?t)#q$4c#ZQnzjA5J$)AHRqj|7d*}aySR) ze@>{-dE2r2yna6T5~mQ+kir*aT%ech2|u80;}9$2oXlA69sBRRWhXA`88k6!r^BbVZ5B`Il)5<%{ z8zBW_5;X%b&%)G~5t`%{xhi(bDNQXBF%@b`W21h_~yoMXpR?xE(+{2B(@2 zQK=Rf)MnA{%}nuw){a#7#Bx^H^#9>3+nzeYbiE)Ad|;j0mGB#4tymKJAer|I_xq~Zx&q&@&1FR`>3bnnvwse30DbQELL04eCT564Dh;SS zTliEhn*`?4yF-7)I93o<3-z*l{(2$4upI~%7G|Mk+ZNR+w7V?RDZJ}jvjsSwx+Mi8 znp47@;aE`+)*_*41`qg(JCgE_ipgwZ16HAsW~207_6KK|2#IO7jv0|ezlj@fTP8Lr zn-FmNYeVz`m8dd%d#_DHnr1W2Rm4-cpI)p5HGRC^=c6hIyRL;5tAWHn_vFseMSf?9 z45&)dtQys^C|7Ntg`DrH^-Q(~H!Xo5-bwms9wd=hjYPRJn-T!PGKihlIagKX-^p}F z+SJSi_ooA=iL7umhJbKE*)h`>ff9N_U9hN=EnhACnLk;`l^}r4QZVRnAr!6)&Pdeq z(_t^qvZTLPH)~p2>}tt$eXD~0!`j_Ob))9&f>AgAf{H(PTe>9M7ay~R8z_c{3G1LY z?o7Y&9AZ?G*HZWhd=GrVP;b@cATH(Y&Xlsuoa`s%2w$zw+K}%U2t@)+!R(Cjn&Rg?w z;<{Y%p4hD}44}tB;1v~vF(%Gwu4U~@SFOz4DH{I%mTyaxt;8X+EtmLb#2NUtv{1Gp zVN+Vy97@r1Kt^6U0hbI}K{z1>7Yr#-iK3_Mp|8lt&K&u4Ld}2b7N3_k(@X7`y}S2@ z=nzkr!FaWSWa z=y?c~ryW?~Ie+wl*3PAV&`Gr-xsPL3m-Av&sNjerRM38a!}dC?nn1ICEOL7C1)FtF zSiu%O#{+g+OC_YTaLwgrZqS|>`grOFA5-4G+(Q3s)^HV3r87ooDxxGrvf-2gg=Vq` zc3y!q8zOn^3n6hYeBgXH>9VN3&#(?t_W03(f-Do=3(+YpP|UcgZAP}6=V5;ae%hm& zndx=swb=GX1A@-?GV_&0yk>n(vVV00A%Mb!b z4@v?Psb!F`zzf=4#0AhyGMa!0E&t==D!!X2Ur~LxAApyr-GNC~qu{fRGsX-pPzYQ7 zMx5jTaWq+3x)KY~W+@Pz`D@N1Lw{`Eahyj`$Z5%-BEnz4?H$Abi1yc6Zn~T-m%RJ&EwS_ZXpTW%zdwO;pZolN-esGE2uaKg)N7mz}FJv zr+(jk3o+W=HXV$(I6Otz>UR1YKNBQ@7}!or+rFm+j=7Ft-(Jb1W>%x=xy@z0{lsY1XN;?>Yg!Rm`TZy4XsOkVwI0|UR!e6m>rc5OQ%IV%VDxbhgD*zF;$cntcUW5ngj`XR^ z#e(7PT;rj!Xaz+pk0&^29ok@VW@%1^Bwl(tS-e2V^nZJC748$5=c|@ zprEoHczo(wz=L-`jwdGioQ2{vRS#Hy;_e9l-jr|nf@#R)j{oicJWoo2%HT9HkcMc& zs8HKlt$~MUqPEYCaCT9Kqa-t=_EKRS!EJK_!w{ipw$#QMabY$K*0-7BI8tIlSFOl1GiNMNA5JuV2CkA{k@)t|bV5H(&>MX^u`~^EBc1kAo(5 zfB|0lLHJ29)_l6z3QE7nRJ;1`Y!TlmFeX;vgFhlms$)2Hhkf}9BGIZL8Wjts~ z1C40vwOs2WW$m}_Vp8l3t;;aANBg&qqK|qniqL1V3EA_%71$+aUv7ScpEaocvNAAx zhn6r9WNXzFy~@#vzk?$M*1o*sA9!i$o)OE#?%{^LO6lC~@bxa{^4g&UQ zvMyfAN^7#vtOaLsK+CN2{^_d3Cyg{$erYBH2$I%o^mdgidFpmc`K2c}IPHY)0xGu% zvFN34P_}vRxk-@JS{yW&cJ3Mh>mZX}AFOl#B*D_ZlE+(p>Szl1l1KC%lI606diQIA zg;%>yVt(DY{IC090qVi5@ptJBwouXXUjW)iGu+v}KXkEub0#x)>F{aM$zM1ehW|#H z+2rjy4C3I*V1ER&O9aN2k3{w-^P@pm+5t6KIsXrwey}xM(Yb%Cpw{$WC7`g$ID)og zjbEmm)N6p`R5>Prv@ngMbNU@Zb<;xi4+FM2Nf5hX8bL6kzi@%XF;Ls_DxzqxBXk-B17UmbhE%NhN<(^AdA&SiCMij_0@D1$OLs3?*3Mn}zg$-=lBQ zLzmD_$VO(9YfcSUmki!6hCAsc(*TE?^o5{X8Zm>~*N47WRuY_Oi@i#5EtqtqvaVqJ z4!?QCL*-eW1zI(F`AogRM*C#e8y}fLA{D8SX7^7w&%?(|warJ7AP=yhvahEn!wcJ7 z@*NMB>W3Ud2TJBPf>foy1wuA`Bf^9>WFY>))K7Z4cY4e#|50?bpqDcnxNhrOsK}cK zq)0K>tEo_`o(46Mq88uWZNzw(m=QQoNDqR8BP-MLWiPMvQeqMP=&RaJxXd4xZp=om zpG+4KS~VxP_wQ4eZCNYZYevA?h6z5epzT^Ox0xkDnYLv69%TUgqzcJgo%Niw4^^Q5 z*Dq-zAF~rt>M7!+<`fWq{b!|))6V_&`fzeh4%|jX06bZw!!GZtGQq^miBMfYzudFP ztGZS6s+G%-SJIF@$@eRni3UgXJ1t~yt`{)cnUhKd5)`e1XVhN zNtH&j{Fhp$a5Btv5fc|GeqrVZKB={#bv)zLBTKAvaf?sepXH$xP=r@XM}g0tz?uro zqe**q$!ne5^?UA|`@~zYxb4I>XF zsRyotq$QMP;!@2b^5A7XpuC{A7pKJ=%-PrzlTfnk6+DwUH2C3@sQ4=@Wu}#X(Gvdf zB_s4K7})MVer0lJBwR+;If<_wje2?N4N|4M$&>9eb50gD5tXWkk9yLy*qUY2PltrM z{svF(GXoI-t1u1Z{0U7FjFRkz&LxhrQ`1szBZ9Mpv$y*RrhD$JP}&}xYvmar|6zY# z7P1d|Y1?Za{W$MXkj_Ki@T*4P?2#Sx86HK!Nc-2QZn5si+M&#P z0xpbPVV(V8(&0cPepHcg$G*$kz8#=4SI3>BY>cV_xnKo)l87gP_PSM`X(<}u>K(GY z_UGFUgZ2?1h*R!Mj>jAa5@_G_V8>JSsK9*Y@Dz)>CS+P?UYz+o>nVp znEU^kdzQs&=56T5cJ*H`@L}><%!?f=TC(yRkr~-7=7qrZFY#5SDpe@@Gxc&k?xx3@ zY1SWt&fiWMAswynoYKJR4``zIly5FG*@|z{6-c?14K9$x$QUvb38<^-jRP>I02jrX zh#1r)pkIL9jaq~iN!l<8jJ~*~K6jDY|;sP0S?qim^0T}E- z8Y5iw%GgM;8IMwpJntb@iuF{_Am;Kxk&Z6kaq|x>du+cBNB_z(OI&5W!5VJd@`n*A z8}S7k7A-6T(8DP-zk#j-umx+KEP^gELk?bNw)s)>|1$K0_)#(hP0DuqeIxGP#q&Y2 zb(us|b?j5#JhY_#j=*!#3-J;3?61J6;e)U@ZB5-g+SUfeF94xOd_A56{Gt$zKeGaf zrfoqxY{B>c{X247N`5o(07Zw%fLNQF>yM&=$j7+hYoR1BkSH`-qj?Ck8V8!E!)&Da zO-L@qeGTp^a(f+)y3mDxe@}G=^p&uwId7(c%#*)ZYSZ+C>BtwefF42=NSUa?NO2PL zhm_f1i0{;}Ute$`k?xizx9WOd5huZ%n>pTBSCaw8y*OO09eLLl2m=Jl_hU!pZy+Y4 zNJ?Bkx?lRbaw7*iVgs|FEK^{owzsyzjMttF^Kmpo+WH+%F8_sTM!vyQ)x2t}4}_Vd2`h<-Q{9}1C$2>uC_;8`3$88LSO#&P^Lg`cCVsJ|H&d!1+%qKrY1)o z8aGI0MCtAN^B@fpB59jtqfH55STDcF9hAHxJ^FEgSI!rdVWPHOGQJJVHys~Wbo*(- zo9`#};NFGNT`2NN9n)U3g~`l|a{n;KWy%hT{UoXaS`!a}^q00c;dt-%WX0_Q(g+;s zp=T{VeY9&&M29~2`OHJ_RlI`3@$FvelwcVVRwit(G6v-6#DU>6gtzkEDCXu;!6+4+;yT`H!zu&^fbI>X zO0yL`#$1tGt!oqd@>`qDqsxXpF@mklwBr?W|CI*jWK`m zJXT^hSeBf-o12CUKe{o(K3IW)&}QN zo_g_muh0oqtl+ipj+j)`6{RE&&mw}gW%t;`wW*-a_2645EuWFj(rCsaVDB{x&tOv- zKV!OBxo0J4KE3#%e9@}ygQwcHHMoUjYXEO}jd3D^T$888p#~Q;MiK8y?a)zO^!owq z4U3>!E1Y(lEjZJsrh(BebQ4<0lCshC8-pHcYI;#o;YAb3kd?9sj?;S2oR73_)H_8+ z6^9J!8C-QoXA=M*O_`?QH56pk*7?qQfJMt8Bu12s(}%c$P76#;Uq3%=f8QM%1~j%e zKn~bhwTN2vzwhkbxlq0a@OoDoPuZ$@E%x$V~p1LAX1(kVZm^XwXli$p{EF)U{e%SqT z^uzPr+iSM*=7G7`WOiQE5NuDdqBYO zAB~a(P=D*jeZkI9KJz-4>=5|6;@Fc42{_Lg-D(&aWpxw&W(m$bBPDA*e!M%(G5;W& z-CXvIBlr(76iBm5j{3ncu&-Smc%H+M2fh@fO1HCC@5p*39}?N{k8Z#-Mzj-e2NTnH zy-qw>b9e$)27^(B!0^&l7a%J){Top&@Vw;}zdTo8_n1F6;qK{3xZUIbuHsa_kD7uv zYv`c>Tg1HiB;mmed@4p3e%*%TViXnGP#0kNscf3h(t{E0Vnc#SJ0>Ga9@pRQj(} z#mActJI@x(DQC(X?tw9NV|!~!jB`}T#{^QZ)>%1;GDQlzJKSS{nebeJiQr8uh~^cP z6K3ow9i}JUe>w&tQYcWY$#Mk26Jub4Z`kZdU01%!vY@NaNc}{Z?BbJkRWxMMB4ZFd?VT;8E+%37=%kd>grZ0@FunRLa6(a4t+J^M0(*<6(mFfgnGM_189&`JGsJr^s?pxml z5MxzfEuH@50W>ws1M?6oiFla06etw0B7b8s^U~y7<5AWH@`|=bByu*y_#Y%&s)_%o zHQrtZEV6uN3G4Lp4NVaA)YSAavC#OhQn{XLRp_TVLM}Sr2T3`@Yy)fQHJv2?;tx%e zx7gNd^D6`k!D{Y2?HPSYmj2^^Y2J?uUpscbJ*sT3I>G*3@4J`BD0^;7`{|go zC@)^3>gz!3^!}UJ6MD`-Plq)EJEIpjGvW{}AAMUivT2ZV&+LilkAu3NYYc6=n+F`x zoS60A@2T=JCf?YuIUMRLES;1O)nR?1%(zVZ9V?L{&QB@ z)MDQQilK&=`vcaJ!LW|v2k3Odw2?51N{yt5$>MbkHCnl@;`wXJu+q>bA-F^X@@vip zy(GcO88!>DX+XGOVw~AZ{-zAn4iys&n&FG(91Kh9$gl@H&0OkHQWL%P9;Za_wrqas z#RRGg%y4d^$p1Riyw02mhq}w>d_|^Apq7m>WnFSx_O~1-bn(CriK*6wtfw zmf`$FC}F|TgQU+=9nbU55-Hyp{vSi%$O%+ z>bOxuFaWfi0M4noEt>5y$p<^i?+xhQ;CBghM!*gBHXmS0g%^et1eN) z0Cci(y&acmr5-kn-nPV8?Eb0|{~`r3ov0$+l$$6oI%*Fznd4dFi=2T zB_!>>oCX#XS9_kC&YyjDdrWpIhLfSlvn*A&hNO^n>0H(V-Up-46wxTipW)eamam2K zejoXb?Q+5f*m$fdq=`L96G5AIU4h`c20DGKG(k%QfTzVxO2^sNa;^7u3NkeG1QW*& z8DZlIQd#7sd3CPe5bTxx@$Lvd?%dV9{57>ePi-Z@d|J=V*lS>0gmYj=d_vM3+*&22 zxEeFVB#{QT6KM`-H)k=IJFHDE1gP8&iCBKGE^G>hscdr9rPL8Kr9fTC9rV0Db{phK zd9))b|3;HtLc32hsFf*u_yAw7J-7UWVt@JHIITX}e}lMW_8vv9L8s>b0^GAb@8-We zpErZI4TlbeAGgaJ=5V&wuj+PGter!vUh^M%dwP<@j;ZVXKGT5pSJ0k6J*m+jn_Z9y z{Ewne9Zn$SJF3eldvDcI%o!HcxdpivGOfsUc~!DXR=c<6G_&}sy8}ik*flv!700#Dt>&x)p5IF0H(`dwJrvN-&rqW>5K5^1V zB24mE?EvTra%F2_afFdok@!?#KiZv?95IS zpbfYA&Ztk~+V0E9XI#9=)pp916zbQ852rgjOaTt(s=6_2IU%;Wj=^Q=W}93(9dx99 zp5g;HEI`70G)pM2%zxAldX(*{xdMX9n)k3RaM)tDXRL3hzcPEtt?GKSUv;9$AzMss z(6*c~dtxc9Uzq^#j+iJx&-l3JLgGhmEQRQsaS*zLTZ0j2SA|yR__~X#dYu?Oh%mLn zC{96&W&G9Q%D!4W{$slwFf+Z+gUq5s^xMQPiKbXMs2BfT;#ZhpufU)rPDSxIO1;m- zqcl?L2Y3D}K>MCP|MNO6@fsxb5-e1(05koq1vEQvJu+wP!UX>^_7aGFzk!j3Ju4Z*DeoBK=_;q%QvFdKq}#U-7kiF z%@A0@g!SbVwFsljY?;&_#bCnrf=8YWP+gU{U*ItJXndE?XZZN^BXAas`>>K({w&G7 zEl3o{DuhrzMnXpRbp_eeX>NJ z+2w7UG*JqkQ)9^5P|LYI&=Q7&6UlihvR`Qewqi6c>K9b@vo9%b1wRpI0|1n!A;Qh> z(MUDXMctaKxaG<(cXmb`I0ST=C*r7qn<8^T&%)uDhWI})px_-`{c{})L{SH-i1I6T_szK zd&CN)68y&_V!CMn+_oQSM(Fc;%LnvwHQxIsU2^Ntfaz{cqJQcZmbb?ICKE#c*eVaP z?I(bxU({6v9R@fZPy1NNCuZ1M)MtWDDoq=yD^6uu{f1_7UK8KqkptJedppzBK4Nh& z3hIB^nQ?ku-I_wa_;lY|Z`YB-H7t-!kXDy6`Se10EfH9ob#N6KT{!F=f>9&7+NZKN z`4*ZOSu-YTzx5u%T!w!m3e*4Q z=2so?9njC^lf-p<7K)yPeAP+zgolzNCozJ`KSF#3jVdkQckrSr+#6$0c^dqB9HcWQWlO~go6>nz<0Buiane%;D1fjIK+d7Z9Ys$HasF}DTK+Gx#k6K9{BGk?wlok=B_ z^dM|+aQNYlf0Q;w2K;A{b#@YFI zx{&RjeKI-s=^jPl0JrVInZURANoMQZXm57Hqn7bpbKtb0j_b2xQLlpIsG&K*N(zBh-h!RVO>fvcPs zJl#jBQ|A-rM)d7p`IkSCzBa>t6?Ia{gA;0o`wuYb>YKsoZ<&8 zZ=Yx1p=;TTEQ5n8{7c67fTaK&%jTu0(AZnA_Xsp(QoOY*sAweZR2%!Jls6y{=AE0R z@P^gio_?-*RVsTS>K(x=+icmWhy^&B`7TK-9re5`wLmK0q82PfDgm@ zK=aQr;P?i#p7+yMa%0C$k^#CQPWA?h>ZNZUIFq5JcZpNS7MC#Z;gvSQM? zgvf{&$}M~xu-d;z10r zq!g5j5(fzL`loW=S(244=ZI@tpo`B0gHJK}~VT)%xYMazLE7@W1RG?SVhjH>R{6NTZl{YZ;};Tb42Qr`vABN>@6_7ad0bO_tXV}_}OF%gJZ9wi=oJa`qT2@IQ|T?BZ`eU z7nEuKcdjO%R#sxfI%)gtTtHwgU$blK)ZzN&+QvZ1JA*E50`4ayM4@3xRVCH9HRhw_ zj|_;@!z9_fJVFaYn|?(r_V7$Ui5>Ns=M7k|K)|9)TrtL|IIAW+xu^<(vC+KJX%B7w z9Dp7<#@sX~!x?5O(Gnty(IvotkmKQgIVIPY!F3WfC>Z`;GG7|tVW1#VmZ2SoM95I2 zP|!Lq7sxzUV%`p0rw`Y=+>w~0SB*X|lxd(BPF2@4O8XnwU&$nQkPhbs z%0K&%7H6N2PtqDdF5)ppw|JU0>uwva=E-9Ag%@&Dc+$&kOFGyCnBId~#8h{Yr z9WhPhFE5H$+ipN7){ZxAFr&?ZhIaX}wChowz3^pdXB$Xj$#{ zA$rnA1UFpQpmjJ7ZNmJmRuP*=_iT`vNNj>UnNeP4+ul`a5=y;$sgX?F>AndfROerI zcxPL5L$S!wZv9oWw4rxd+b$IRR2W`Kk8{f(NpotMo$o!8XBjrfWpy3U!t$}8T~tTi z))!wLkmRFOxsEtWvg*Z#^0?bG*vu^Hz@*qjqs8qbNYvAp-eU3o? z1E{`AN-0RSsgyQj;_b3{V{D3iLgd=kvb4Fi&>!{nFK#9Z0v_Qa>zkkWE`M?LM&r71 zxN!@MO_tKaNpD?}vcap(VW(8Ew-!i z6Q2G1Mp7+U+;zK%ys5>Nciq{0FC^wv@SC=LdjMywhA{_CEU$lkilVqB8tSeuaBiPd zX%i}l#K{V+ctCo1c(;;AL_!s_dr=0EBjiF*kuTB-Us z#g|BKDxuESb;O9ftn+`=aufCihIN!uoWOMQy8wFPpGqLz9r7qnE6W??E+Ku0-MO_o ztRMa@rJ}N|>mmiYYtKu{8*meSyTM`6@ODx8HED0~&eYM-%wS*00!XJ-nWv-nz8Lx&R5_=OW(; zTR$|v7n*kDM-q)J+7o(8C3GWEx39&vaybZzB0qby*7W-4kQl2<35W3`aOWQG?~pA3 z`?;e?Znt;&SazUy(vXaCnc8UE_51f^4s&qM1T)&us{Xg%0O%aJn`*IQgSfbZngwvO z^TY50);7cA#14 zoxZrS9MT{8b4d8jzdMJ*wfp#VU&bz)oM+N)hVJI@qk26U4^xh>-sJz->H|)P>v~aB zvS9En`*5}GPm2rwNuf%*=LC>lmoGk9Dl||u zeHgfi2MyHr=uck!iw6qgBP+r5M}P~_(Sg}k*NsN|J^qE<ACV+cfa>LQiye{kOXiJ8+&6VLgm!0;K zt#W8!KK&>dNok4Iq3?1_?HN5qo6ukrEdft$*ad<;VvOTCv_eA!%0xkxMcW2PoE4d) z3=I<21f%FKL)6kGiJV=|mLGK8voRh8XN0N7-eG=4c*-E26%QJ7l+oCK9d7&#JB0gT zg1CYs#5E*0%gkT$Amn1gXn-BFlXbQP;fvS*!9dPUJjn5+n$?h@P3f1 z@Z>Ce*t0Sdc)BX|te|!=Z0rze;%m>3av?ZPe?56Iz-}ZRBb zwnahw5~J&~iLv0?BtWeYSuZsnBs+wA#bp(_zjhS5tPqH^OW8)Aj8#;)so|LkjpBsU zjlRroP?$Z9%z7!{VX3_l6O6M+{@enK%wuFCXr*xc!IAZu_ixgN^&`uHabudy(>E0p=_Yt@PItG&kCIFqo~ zG$)J3(?j|=BZ02Udy(U|zXp@?iBN6O`=={Abqqu5nLCJJtD!fEWRjD|&0VM2LGo)W zya(myM@neVou~5;N5v6j7UqGAu?l2#=T=dZS%muYNLs6CgGrzrf&R)_ULUw#A zOA##aa9(X)g{9!`J#6+Dby>Y@bDnlDrA1vIq4MWxxxDbnc9m47P4+ zNs>-Ae*7TWraFm^k4I@F%|upi+wVJErZyoVyd^gEiw*O{TC!-uceQ8K<4fv~CV)~dG529p@C*Zl z#sGI9t|09^T`vyaSM|NcwZSl<0QMgwftC%E6HcEM`7~Fg&H?dlzjDOBR=Abqn9T*j zcDPdg{!U2_?QJ&&JSJbg2_yO6((7AER{v3xrm!0}?hEjwDVVb;K2KKbhGC}f!^B_3 zOMA5eQ&6xJr!&CzBW1|ED*b zA1tB>1%H8fo_~~LmmWF1ikxfenAAF7SCg6qG>O@J0B;WlU(i1O-mn&-wOMiZHl%SB z@O0PH(x04SwvfFY%Kw{2GT6CGiES!v<#~x3&_p9P0M#yozV(HzVbC^N6yHii#IhyQM$_)2ZZpuG4nDg&Ueq0 zomqBSjM+Ie(gb~L{8VX^;+;$$9tF}iIvQ^xUvl!Q5G?}vDlSEZYpY_-Eyct2qVyQ1 zG<~roL^sy&`4fpcFEHIS39z4X>P;qyYZrgOCgIyQXyx=Uy|uAsxhoMgR@Husz5xL2 z8`<=9Qax@)-J_9S1a2=e-QgHS=fz#NS+j}exw@256nFiJgDfv5*nIrb=B} ztgu1HqjRBzytVJBJNa@>K<5B1iicS*E_D|BsEu3~MGh#EGm22&+jZ^B)Ie;y6 zmNq-A!1^AVpxtUeE6;Dc#_!rbb`;BsUwKsd-6~5G%CDl~!AV&&Rvy@sq}R2b!DlX) za>{_!M7AZAII-guZ%KeITHpxo8Xgi5pE_tkkJ?i^hpm&~@M<@X%FYICr9miB=in7k0EWqR>t zg<9quK{Hi;iy$Ku>5_#f5tR%S=A0zSGp-=Ie?4KLaQ#eL6}X5_(Zt8{acsLQL1<3= zpQZAJ!QK6LN8eO5xSX!N{Xc{;&mxG}ixr11bVz{tJX`d`$#>B}WMjMx1C@hCY#Q<_ zkn_!nSsJuFfuEIEwMd?93+|1fJT~bURq&2I1}kzmGLm3T|2p)xT-nWR37b)r%urDP zIGhQ2I*M?ASsEP0D%5Ywz|Cx`*ppgV9b0`3#bLoZxj_Lwxfr~M|HFR*^quX!n$zsN z4q4{o{5}SZvWs07$}uqqrW{nk9vuJQc+qi}6*W2%2R=PZ#JwhOJu=sz!b%RVxxl=!DJrP8y(mNIs7(E@R#3D-j zH*TDbtW6Q4I1%$7PL4E$a-bJDT1*6(^mEJl*ai3?5f zt`E&J>a*F3@8hj>Kr2lD4x`%B#Jm6@s9N$PL@_+m%CQ1r(wEVE212XHeD??gtVQ3Y zz<&Z^tAAo$QydJ@^XJB54iBv0|66r)z8K_TQAuvx6X?52)z3Labq@Fm2(2Cr9fHo*ju5g*~b0(Q4 zAk85b>v#@3y0X*RO!bw#X-D><05C9QV>@&J000000YRD}Y=6p94eN8Xld$*kfp`L- zhKW8!H}_qwEvk^7YH3LfEFZX|pk~l!fgZKp@^ttkwe=jJ zVT|hq(I2I&*tEZPrtG7h2c?j?evH0{**Ie*7_& zw-PRagkE#3U4qW&@pflM;oWDm_;c)_-|<6^2qdHuwA*p~UcbAhOUV27DxtcBfs)nc zrN`93#c_AUnDVcwn{)1(oz(8~Dn)$#t!K;ac1^^!mTs zX}X&>b?p9*irluB7TTm$bOKZrNE#&I1Bypu-I5wymnOhb$n(ED~w(!ifpxnpLrC6NzXn zknwtPQuw5H0Cdml_;?!&2+cX}1Mr_c^ocR%w7t$V$@kyuH|=m|0Jt^K!MK73yt^wp zP*1%xg@O=AroOw6oX`Qj;yqaqf*!$a`3A5l&{=1|1&|;LDe##vhVryg3pQs2ZkUwR zPP}M4b84%RyC4i@kQT7@$IM%0ORb4$jBw|=F~iJ&OvzmsN+1aHI1(m)=Z{Vz_;e&L zVK57hot{^LvnU(*Xy%&*l~*IbrPXr}Eo63cX89F^vwlW1jB`8A)gs0U`;#VK^>;b4 z^L=zZ<0vN+`Jn1-wE;kc9ZYY2Ur@49!jRp-Q{}MK36m!URA1(5Csx^opxCg~lLoL- zH42)Dk+3UwZmo7nt(*P5Kqc#PCzqRsv8<4B1y=SN-~+?WPxNY5U@in=V8?7NZ}h;tR> z`w+qN(lffB2ua1kOwEkVRd#86`=PHkKKGU_cj^GN4>TR^CLN$;+HA9eknqTsAsoQNLjGWmjvBB3QZ^OD5su?fp_9F3-TXZ_yV(f(80 zqY*lAS*61WVDy~G56M|pcv)4w5NLQ-YT)_aMCz5x&~a%qwK!ocNbXUzZ$DSwDi9db zJ-Di1Mzu910FH1s%D}ZYhVoRbfYNLIsw=P~v+j+O_7obu<^iXY80ow9 z&$qGMX8HUf04vc~4AYTT++$}g2tzsV>>b?bUM?|YcT({GgIUF=60Km(kXLK<@mviDL_>f9hYN`I)$1PpYjLb~s_v8iLdZ5>H`kp*&VCD{z za!%HYVly@A$rFB+P8+r%?c)5JRFXR#72=modHw*q=K5__n*`kQ|ql>s#wcWXdQL6YZ8tE^4YJGBef8pN3#=EdN|f zdl_RJ0lzU_aqCPh;DCGFu?pqf(x{<{?7b=w1aI(5dhj)CJ z{pAz|$<^xH6?rKk%hqCcnSPElH7ZvJ!yh7CG-{(Y3QD8>8?AW4J$)?x|C1;6J)LI* zEkWEYsmUU`nkNb;X0Cj&PhPcvWD*&AuM^I8&7LfAt{KXy?PUwU<*PV*_Yw&!M_GGK zEb4f^#VWuZrxTiPB%Dl_c1go}!eN7=0u#B8Gk@I~tk0&4zP*h_NE||c6v@*$lSa|) z$S}15_ZoeE6|k};dm&Muq%xJNQT9L~Wat}j>b#m*_vM`He<3V0=wsjR0Z1JKULCm^ z7oHJD{&+F6zJzH|wDllZDt|NqS?QZD|K-Oo1Im@8bv0!1_47a*U0KN*lmOY@!>b9Q zBz(A=(Z4Sf1~{T#Az5Z*P50@}$y6Z3s!6yI>Nc`HSg-*Y)IV+Z16&;O{LyGk)5(5I zY2I~Hw0W!b7JE(D(5|NdTOM4d9&H;H!ciei{j{x67Ttld@1rr9=&HKSf{%(KYGk?vKOW`zwPC_=DGfvx z{{CpI!n7mV37@Q=?X@8o@0F(u8$30Q5WBJTO2E=v&vHb;Y2k$xXs}3~tn?tZw|3LU zK58v8Z}ohDN3@FGPWEK&>CFzq+IzoVk#uJqfSii7#LZ(>K2r_{<>Luyehde!az>*= zRVE9}aW^neY)N3l6@8bbl)u5tS%o`~9H?}HXi@lUK~Uf(A$8ldj8eQ%MjwJyMmJ`@ zA{<&Q`8m_#l#ZkCAy*v1ijK@^N^iE$IAWigB;Z!)j8K*OwlV=KG4h|mbaQ7e6rj^_ z2{WB%Gv@xjnaNzvUBQgf=-Qnj`%of7O7bmNo zaDzvflbNsg-Xy@vpjVRV6C8?Mg8sW$4oxk)dJ_vwsEcvmlu7&K1f9R*uI6h|4639r0Ql1kqg|7^l58K`%; zsh_LK&!Bf0nJ8&5T&VcSu08^mJku}l9QKfv->R9ACzrP7jsHYq!{ENGGOvaVvG#36 zFEcDN8(nfJFI}(q79_1IE43yE$cvsLD1*k!T#PbSZwOhh)|8hbG+Qn|TjX6yanROj zRwDffKzD0+b7`J{MECD%#&gfwGd7?+>9*=YJEkQIXCcHPmYEnW#Z{4;Omx{-;Y6cg z)v{ZP0R5}aYR(}cwnlVWPNIL?q4(<iQSGa4?bK1I?j1{rgyH(y`JL`wXO!+C`K@hbwYI}>m9hLoeD?YoWOjKKXk?|8 z2*kcG&vdZ*AQzOUS$#J?RZf3Bv76$X;A>uO+IT&IJO&frJ1|afar88O!e1!(LhE>O zJt}1&1;0d-5KE^C5)Q~=@v$i7>fVafQ#0+f4_nRTS{FzEk(^2Hfz|mWUueD*4yi^u z6y5q=$Ws4t94Q94$hE+_g7P*IZK1@cW0GrK=?8b>8aJ&psxzY*w>@BQoaw`FRjJoC zXGEy(7q9iz_a?6eK^wDpm^nYR8~&iGCM*q-+oL9@5B=7)#N%EgBz@4A*hzz9Vy-se zcmTVAW5KS>;1xy>Ie~iZcrvnO0GV<_dvVX80nh=l@KJ~r5R;TPyfjmZ&SpG^Oj+YiPiRCVpz&Ty`yHL6kK(Li?tIr)tdzc{t z0)+zum@K2ZZ`7j0R8ukjo(>4HTqJ<%Ty{f>a(KTiQr@DABrF6R(>L34l(tS7@)@?A zdTXZ%&$#u;<98^g_TIB*NX+3EPU8Kk|TfjzmS;5Bw4Gl{edNmA}E+4^LNG5e*TK z{l30`WFfO`k2Dwj%3r5rZFrm2r7g)A55STdD2d%HL>CR07y)L!v2T~oMF;P5XOG1ly;Zh9v1+=dDvfNYw`tGZQh{&>G?q(u-0UJqkn&b*6)#PVyF*IU zPnUZ>@%D{Ocosqx_)7IVIs+-xl8LrI-n}~|mhTi42vj}oPqtO~mR@-jcd2zZo-I2% zu93pA!vWdq0OL-A{jKE(mDH<`k9W^9=aFEZ}D9oTe>t(tO5pZ<5i14Q(E9uign|&(>W+=uuL~! z&Gp^v{9?1(_1DifDZ1NQ#$Y?qkg*DMCr6)&>lS7-cYFPQ@phj{`~^rM_DgE?_Y zNr*Lcr)(;B*!P))!Cps_Ign%5mFabcqJ;jy!R|wj!*;dE9gb(8y*vh_o8@y~W~GEE zWKKH)d!h$%(7XdG3dHFYH*{}=!pmITbIYyS0#obe069R$zj2C5(^mK$%>E6&NUx0V zO;r(tK^38ht^e7SVPHjP@FdN-l|KGJp;#NU9f;9>(GhNn+%uS?B#Y&5)eNGHHC!jm z@@)64GNktswQ@Jg5LPazBN}Cu`>IY; z;D#(|>^fQKSY%~`E%to*0f*eiRZz@=v2KtwFlA}r!-izJijzuwig9RcG;FW;;mzbnvggUgVeQ+1W=@(@ngiY z5PEo72>lQIExxXiBEhyYrMoZ8;TtcYn8Ixv_X>Jxk;{*m%y$u>ta7WQ*c^}B66W!E zAC-ZRGg0(EIt^rSJr%nq4RR&vYdV}eID12mAidjj9tTX>4Rl(zSG^O4+W^AQ@&3(o*$0ur2MF|KR~OPgOVvR@JQVdTy2SBr7b-`O&`&%=0p z?XzAZKt~iT^Dt6&y#zt6hbv^4{`pRq8`*EsYlWf3G<7pw?^Sgy;`<}_3QHQX#`|_d zd`8lu6pbgCd##SmK|^@pGrGW|I+Hau?~$hs$wBB>rj@tQM8{jYf3IJNFDJOg^1)nr5 z#l7I&$aPA1;?X4iwOA)d#5ueGc}%y2B0?o8JFdd^Z3TKmVeu%yJDSanyw9JcWM?>p z_M*g3jFm6}bw+jl>g%USU-8!=BNm>lJ=l!+RW%MKmKcEiI8=zyBh4|?{j&*A8J&RJ z6XNur6qR&9zB!vHVM*7MBk(x)M-yU=m&D5P zy49v%)7fH5=go44bwVHQWe;9-697>7O}I{eEq-^CGqh_E)+_DmH5^b5E8i`XbuQ(T zn`EB|`k)!J${m)$TgyZ2F7j7Aoj6xodQS5!705pMRAgqSs0BewGcsDBg_YqCvZP-x zA&Wuarv3`PnByIdvn0Tl-Ht?}s)+)^aWGprZvN*#uXsQ!o^-)HH!25q*k{DFqdNs{ zov~{rUo%fu+OaZXjYsM+5;|37#zPh4^(VRgdoy648al~%dR6OPU%futD@}U}cPe^5 zFCRDpP)DO%WrqZ;_JBmyDQ|f)DHh+g?jZdg3tA`MvtlFq`d;iQL>|iT2N`6_!K4I) znA}FW#o7wSUt~Q0q2>K!5LytZC~s1w%l^T;7xW1p_sE2YW*34bR{Y&cFBO%2@bx-= zwH2gStq(+=k?M&)qtYlB_z2iBKt-df8_!+qF$7rh5#M^n_{xz&H(mn;F$_+Wdi$c6 zGW*O@>%Lbkk>PTN8TVT|>Yi~TwRZ@5sf+INh1K1I4sL98M8Q7#WX?n8y4sjfe>Wxa z#gX>t2kC&f`3wTwi`?5!ARVZ*?Vwj!I$0fs2eti=ne?AjkQaZUCNtH-ZAeQKhGo)R zXG9)9H^F5*M^=#eQi5teu(9god1o&i_cbIM?tc zFEPT^TyXoMNK{0G{iMx)A@W`?IVV-BOi^FkckEC>g_@u^hYRb~k2bNbVDZ|$oKc-`@pQ!2@WUcF5pVp24(xs*JBI}Zf4zRd3@y% zAJuU+VuE7L-ScSOY!o(HoZ8SYUc_2W)S`Xs7a_g!DGFD_b7ss6dLEB<=q_mfr_)Mq zJsbu+<8F^_;Nk%K_VTE%NgtT#jewMxrf_IAE+Q*2B!-L#tgu5!h;HJInoS)H@yiS7 zA%oeYJ>TF|5^}@hLbo+QgpW&pVY;7qGto8dg|S-9uhRcg6YmRAxz%O~7Y84>248Jr zx@-1%sQ7)QUULyt{7YxZ4^g~rff$0-AO&XmuwD4)ztq+-k<6a^dkOjEhHr_&OAH=6fa7$xd}Y=J z(7^RviXauY`;q7 zS=2U;<)R(W((^=+)EpD#3Nv$P0hkLkQ3}1=>js+e!izLR#A_&$`3qkXsu_l#Kj3Bs z@}OFK)KXO~k$Kx{daUxYLqFLI{p-Qz#7CUs$P8{uJ0TtYJd?o;&b~d#FvB_y2j&2M zqe+;Qcv{^zSrj9_2jb?RciG9wt8AM~q@VBlOoF?dxn9Be^&2cGO9R)3p!GKa<()qy zW3faCWj`E^64ytrf;JPl%8-=QiH2R2h+xnvLb&qHzgy`t*NKrE227dwlS8X>X)I!J z0os3Rk$oH)8%FD{A#YcWf<=s5Nfz6Oa_2EsxA`l=9f&W_R?%gn&)1u6GbJ@uD5uj9 zt{}o!13`gLkKZ`yryAi~?Gy4?jqZSKKa8fu^1mnI)@Q23%2$3$AGPq_9K|R1zEVeY zC~t?CZ2c-YEuFKbF-LJJP(tpct$J@tg|MKI*d$J&SK4H=Cg3`u2}U6vDw~X;<{>uc z>Fixqf8Z>gup!LVTiplL1%Nw-0D8pV^&Y(p^T~J7ggQF{(#2WFy$aK))=ZCIbp)RK zBIB{wS;xOw_x5Diy4Nh!yk{p&SH7PWtsnDznGkr8ByYcrJpccoMbW2PAP;8#7yQ4H zMPON%iJL*51Buc6sDIKFOQnO|H(Q@x_QD&1sgK1^!>9ldt&B&X0vf6^O?Vm*%@@*L zkU^X4?w|eVF1Wc35S0s{h?2A>b92+}V>wO+uAg>XuZ*~hTiGmS!4ruXQ#Wo=>gO#x zAx|UsM^BuIGt)m?FgUV?^1bRFM$OXrV>ed-F+}jJZPOWO*P!;5EI0Vz1#=Kt>Qm}@ zi6}fQq_N5*C&%(Zji282XH2$7;E-wo()-7&$#_V^qSEvf>Hx zpU^kpH}fUdsj`-Fib?HASd>4At;tt&K2qeBQCZGNnSQZ;J)vSXLEmi~!f>TPi+x2K z@)lwns8Fw&qC~yi`sw^4TISK~(1LHegX((;|IpUa@J#-+U0tL7Zfhf8@Bii&3X3|- z0*_9gHyy6KUH!Sk@;qjs195@FWUx2x;w@?%GSbOr8`Gc`GSA=ILm%!U)59 z#*%j3{j)FQf7@RFFK83C&P+(&Ji%T#ajefO)VVZ6K8 z5L<yOYO>cm;3=z{4C^ z#k(k0>C!%Pad8pKo6`}9F>Q3a(ZSb$k~OZ&+)qm%=94tGRt&hYXydtdJ!t0nj8I6_ z?iPyBZ}djB{`<&;<=*3DhIKB$h{0)olIzg||8T93;X3ctJL5l&))sBf?53Yu)J^IL zAles;(+B;cOs$T}S(FFe!Rk8$jYL2l>$k)+QJeB{CsW+(!@tuwkrhuFS;L<|jeMit z^9+(urv@%sq#C$g)us*oo1AJEsNNSz3|Q${`cf7qOllsR#Ux!vJ|EF{33(mR{F~J6 zkPl?3+$n4cNb+?oIt-8SaPc*3m?@$v{&Vxg=w>bnUERvPTj-ONjmYXGA198k+W~(W zlJ=p$!Xm0iKnn-Z>?t3%H7bjE&fh{^He<=BeuWmI>qvp7HEG;EOfGx%%{L=#CEk-b zd#p&oj|{5-pHFDFw9srVgsi6VhETD0wIr^{CPwly!M@Mnef&iDivN$AE~833WrXHN z-aT=gV3qC~dUSkorV~oF2~6Ng6V~4p;++7I^z`)PhQ0b)q?A-b>A%|pI7B4iv(#TQ zA2WOGnM<_!K%}Pkm(4>*Fwzo}?ATdc?Wd*R+nj#%Uq8HM<3O(%3a8e`yk6&ctu|+O zh!#k%v6?bN;02ldbeOf8poNHaLY|i8i*p}4%kI3 zqV<)KT#imm_{3)ZZd#QziQ!2dqXm6%oo?$-H47y0_Dj7r?7($vh!%|9d7mduLCAjF zqK~r6Zj0b$E2KVyVl5*NdID+9{L>QzTINki_($MU08&f9V@Dc0h6$~eRW1I zqF!wroCYqO{cIS-*e$+R1aHt0RJfne`^sA-v)z#ASq7i5V{)c4XvtN){M5(^J+fAJ zZ#`@iQV=C9`K91UeqqS?bbG}cR6wr+3YWFlv?&Ao^nEOQXsbuJEdr(am13C67InXW z+S9RA1_M!~9WT&0`$pK~DDq*ul8l+Yf-+!caw?K_i^WKW0M~xM&%R^4-fYe<9cPN74LbB5cHrF5QgMkLbR0Bj!j*H}q*ybh=i3mqNhc%e3*zQrSEX_TYv27Y zguY%8TMq6JCkSeIcUE1S+uQRtwW>$AdN6ihu{Y+c#AUE2=yU!Y)i2Dpq0xZGy zq@y|7E?t^D&{cjINdn3^buqZ_I|V2EVAOME;^AkpT(P zSC`e?{Om&W*Zg|u@zwY;Od~-kk1zFbe)Z%&v_5NE-+Od?Lxqe{YZfuV4SY;?dTQye zM&hY%n3_a4Kb0N=vrdi+EL>-4AIg6+zgX;5xJ$U5Pnb+Y$Q9H5Nm=`}cu`I2G}nbk zzNP;P@i#m~*~F$C5ICh6A_h@3Fcj1~J&jjV^p_~FcaFM9W|rxcJXcQe1z?$1V7m4=u<|Ad&pVi}z?2u(!-T;kdEAFxwe?MauF>P$xYs!|DE~#=5)=rrc@JQG~cT z+JXWXEux}Rnui*`s%^5d9j)1DE{$fpcA!xd6eQY#VT?(M0KW|zNIgKF@nUCMK?zY~ zSFZ~0xVv8YF(ZH>qh_u<`hg;Bni3d5Q;9CDs~q!&pQvp!7t<~G#%enq=;jy z%>uC)DOsorx2{b$b^r)6s|kfOvYavvDLM9YMSY8nt15SuKpTH8mqDi_ zQ(JJ-ZM2W%@uHB6|0LQCkE06gg+>vXIe*5RjTle}m#9%pk&`CMA`F@#ZI87JK; zl++g7g$j{wRfvVo4I74?VPQfY<`&YtA()BQE`rsXeYV*RV&11PWuQ~;T+4m0i(U}0 ztLLs?@7`0yoOj2^e%7_Y=jDKKU+>B9!goK$r)g~4NEAWg9LZ9fb8J`??IdK(nr8qK zIe>60`}s*HV74_sA*;sYCKt@Qp-j2V+BuT1jT>lo6Q+7~~YMv@dfL z3AR7}bF&iB2PB~-$8~kHwL|Hssv1#gw@J^&U!)!t{T#!47nbS=wKfk0Xk709KE>4y zzo_qi{4FNK!nU~C1^tAzqEq3tlZ^8z%etzSmt!pI3MVHv**R6P`0?Ba7tDFmyD z$OT5QDcN#Xj^DZQ7)$PNM(qw~wfr+xmE6)Zk|HRH&`03qP_E&4AUSW#XI||-p4WvX zP&_Piwe@|tWFSToC1Yl^nUdeLlqG;y-hkr_#>IM)uA88aMtQr?w!Yr@|9=cn`EO+# zf%tOQP|kR$lj^@NwC;T|`SFvi#KjabSal1f>Oyq>F{735MIWiy71L<>Ao%8g24Q1XNHfT2I7l! zAP~7EEJov;I3=saTMad$J{C%EZJB@xi#j2%@4+;pGdB0fSVWXct?!k!>5n`YD#1kMyN32*OAx2R7ulvoLloYpMt#@_!T4)YP8}6Z}1z zIATec@Vt!$9WduqmRfCfs!>rxY~8eL9e520eGi8penYk9fJ7A||9=@M$A(NqD5cy9 zXe~`{C!q3#RIk6lpd!vi)&)lIKqKP!hzym=N5tR>V%Khi@XOqLi*+-+u_c#lVo>Q+ zkH3jrUQ~!+U=ybek8n8-L7{#p>^|rpwAnLp7|W<2&p%296Cl?Vvk1^B08cfp)ZQt_ z-T67Z7zNm%A?Cp+u6w0@@17S}V9@fkUha11j4mF9PFUmj5Y&N-6uXkLM{#nGZj&(r z3m|3FzmmEJN6DS@eMuN#pP{TMxF~+>Fu{$1Cn$VB zr?P$)agT$j$Qvxjp~)aVyi-Jly2@(`)f?2+4LIaW_>Y*bUrez`v}7{SWOP>snihNk zQ`N{Em#()VD->v`iJ6>Ja<9>Z+~hej@A?6(;H}MOR71D|sNIQ0zM-NwJQ#jaA87Cp%{4jD)8lC%NaV#X%vx1Cz1b#L%8#Ue*e zB?McU6V;3+?%xrr#WrU&ys<6@GcylYGG)!-mowOPz6YU8zUr-@l~qW~RSUJf;?Ddj zwJq>X`d(6AQFl!|1v~0-`cRTVL=8KU9tcHZj<-Rc7XWZbpP^B!d1(NlXroZg7)BIK z(!zpFD@iu6vvm&l9i@a7a}lIL;;Pp<`F#~V=*Pj0@&HM7Kjr2=yTe{F3Y(ZqpK$g= zlM&)@!ugNVM{R8>jJDheuInVNjkJhZS{HR9z@Uj20mMDFMRL$?7I0D9L@Wp2PL>E7 zy)Kx0Nn%PTHan#`BD3i=$VHpSOmDT-vsp>V;ZnyrU)Emj;K!Gu>eL#h(?0^Wwmy^x zM^bS}VoY4*k!^Y?IhCtisuz?xLQVBsO^#c)ae$IQKx^V1Cv% z;$fBRdyB6y zzQbeW^RX51A{s%)rCt-Aouy45*oE|jDC6xM06-BEV=;I49$HELvZVq;zamLSTv?n@ z;)@-IFUEKrfBc(-EsUKO;bjJ zLehLU{+sxl^XVH>{MR)o?5Rz1CKSt`6Um7Oq z1D?j}jjw>&{|z91qjtIrYr!N?>@m60auIgA&B}ST3bW2@nw@nSCt>KC{(Z7^w(2Bm zLXUSs#D%bKRE#JuBuy=A+z(Thwxkz~5lCgZ{|P8FCo^{Bi=BoaU4+lL^>Le~^?&TV z;}TU#@3g|agfiOe4E{MB`pRLev9XP*+kAB=RtqX`Rx0h(lEp!VEy zIEdLj2LDJt-4ZEkOsGnT`-*d@WJwjHjVVIwrGRn{cVZ0xMPepx0w}?qk$`KrwX#== zu`vYfwO?~CYH+w9zRDPD6*;0;1^VvL367;VShiz__DZvcw5Yz8g=kcKR7st|3^mM;MQ1GUG5}|+0A!o*`xPBb%kO(0{le$dmiLi`Q%f!On@|D4%_#HY z9IKu05HsVND>9zyigmS|!>O+qAG^@`{WD%-~cxMMntbd9nkOcYJwE z7_CyRudfIIAt*`s>7?^Z8ovoIWpKck*=9ya6BgJ;o-Uls=klVjG) z(iH!n(BBr2ScAxGe%hxIL4dGUgFPQw=JvMFHt;!!Qs0=wF{4!Do0n9iqzA`>lZ6d6_n>ZRbZFO$=F7$^V-_Qkup0#dVOf{dH;gdk1!@@3Xc^C8JLP(MU%Jqe`-W)D@;H&mR$ey28a=3J&DHYjS`c zE(W0sO(4wxYn)v{ERz>dd?}*r*>-2Zz-#y?#DHX#_pFvs_fi&vDA(PkH89CoX8W!0b&&75y2B@XH_siAmkoG+;ub5?zhpkC zwngP4{z@^ybH~0Jed`>m*f*1q-_Q6qu$j22PB{|Q*5w$sP14JRTZUme1?eXxtIW>c!GWY`>G|2Ifd;x6DPfO&FK&s=NNx38Ugcu1KGR&N)6?Ymfn) zrqe3t8_SXRb7hC$K#7AZU0V3TMMmc%KF~Bi#~>AFa9<(C0Y$w}GP1!~D$83{1&9!7 z&cH<8_B)TiDpHFP9bc1D^LtzLTidae#A5VtBRkr+{+qFyJw-)`uC~NkO&>Or6fGhMG&I_cq89H7vdAKJrcxJAs%j8*lGRzE()hJfKE*YoUZ=TI=fZwz zil@c2yANk>f`%ZYAtIA;NJbTY?+Fh^-U&;3f4FLMFbwY-yqec|R9o14*myoJ)1bd^ zsxfIa@7=wfs5mLQ4Dx@&2@V>D&Uk9p&+8;MPwEhdYa4uWlwzd_qIcbrauMS*N48*5 zA5ic_JYZ68mO_weMZ@dZ+99_m=AlyIW@Kzo22T7_lND@SJljd(^Zru9k30}vc0H!@ z=5tUxC@zM3hxCqDWf#OO9uwziGN-YAFj|w*ZTtw3UMfqci?pV}q{*bBaf4cd>v9AK z-JpT^z-R3gY%ASR=b#bSZ}W^W-hva(P$Eb{yVqIr1-t! z4EA}WTLgRqcZjYM7{xp^XIZ_iiZlPdb{CQRywwNKHD-4vR%s_M-NO18mF`SCZz~bB z%=b8-ALR>UEbnH^@8Hr_2GikYR~`n+C^>bVSIse<`2YzvL#pk>^#Db{J!){m&m1OO zD%&V1857l(ozn_&Eas_XWL~mIB=p}DC^>+ddz|s*Z#kMZkx*6~{08_p$AR$bm8r9* zD>oI+@-3m4RD^m)3`Zh!9l8~dF0;$*U;Mf&nK#dV1!db(`q+Gv%+l^ZydX64q@P+g zO7$|XMC}ZK&h_fO-r9A_XHJDwrcQ!E4+dQ8r)>;cxb*ki(O2rJ1?$=h7GbR3Xq~ki z^dS9;;zLcH)-Y-U{>uBUS|t$E8&e9{^_xV$riMY^uquZ?IdvM^27$G49hPju?d?HL ziDJXh9VTGdKdeldN#WO52euc`#(O!7Ok#}+(x#+M(6WEJ z-{NfGSZ#yLbMH7W0B>a4#g+Uqx++?%DFVOux(w`0^{VnWx=?GjT8>~&p9!%e&CWZ72bR2jY z1kJw9tFW2)3k>Q+`z+D#c@r!ccupW4Y{D6yEUva@QQDL9c- zLZB^s(pR<;Y1pgOF3W;PA&7Ok!t7E%Ri-9DDD5^;;)f5VvKJlLYYdeaq#50tV~ie3 zpHA*kJ8s3zM3HqT<@T;(P@k4FqDx6x%;wh5cy?&N{d++YyMhzI^~R^F{Dxq+hB|!h zR0}srQ`Kz{wp~~k37-)2Fu@d+6%m{yxDxPnYU7U@Zu~$S18=&ecM1zhy>YvS-=QuW zyit)d>Q#{mAXUeT)fC=83prrFH?*SPHFJ_h)oAb9`62D%yF#VbH&|7imHBqqfnF=^ zOx>!+#ooI~K9cdV9Sxiha^PVN!wxsUX?1KvgdX<>0QG&Sza_GBH&5dloT}*~1E&6lU0h_amK^=zF|A6~_l6cLDzOFj{<~oJKo=A6Bvl@k$tc^NGDb6QTAKgnjQi;;pJ^j=o*B<%eT`;(5mO zeEwUS&v9 z&7~CinIRv3wsQcpXNEdkkI3EhX}=Q=vWpJ|Qwmh5_UZWJlHfM+Z|!PDj+SzcvY~q0 zq(|jP@Bw#k8ndp`wNKa@r)b3te(DBIC)Lxz1uL0L6-(Z4*pGkcEv2aeaeQ-hAJ#c1oW9!mP1>zNcuzWa|G574 zn?lkZo$-VIsj!3hKFb!ULud# zkDlDS_93VJgfAs>RDM(yxy*r>Dh(Rhl@nsoz{pL&`>Xj%-?)ommsq-ACqhoq0~bsx zzva9YZy5hCQWY!sqUBhJ8Qe(R3HqQSn9fRMpIx>kVf9+BZE&d*Ut6e3$)PH{kkF!a zdiW5%XFPG>M2LKDWZe=`FfQT&1fCkz*mqjO-3a_fD%{gvH$H~$Y02(O(f zv-zMX(1%uk zqIoxV@>MtrrTaKjZTd&M_9kp<&sLv5%WY^^v>(Nl&K5yJG=6^gsOqjvHHyrnm32q= zjD@}ktoFkX5WR$Wv|M%jsX9x&S+~7&9%GGa5}JfZx07ypkPI!eCFM$iVQg&4|8+(x zrExZtviSf4oaW9xu7Mz4su2JT4fNc?#1+k+>h3xDU}0$T!y=8WEXVKD(feT(HK&9V zoZJRN6x-OsE-eKKr|#h?`7zF!_%i;Ih;u0#!~pW~ri9LTU15tQyTD~lDPgb}Sg6}?aR zoh1(J>fSjuo+uhW3x_(Ja5_SvAQ!wW z0BZv(UOM3$r6MAZ`#j4mA}qji&e?AKJLN<(-Flb=ZGng1dt+V<;6!F`u1o zzTd)?3J4ulOPVrc8v%?ai-f4AIL^}W(q$56bZ%TVhX{4FMe|nLp+c3Y0I^?&rXF|y zq}sc+m%<}tcl%tW^%-Z*Ywhdj=>%ot|9FKPf1L*DcdU|kld#{U-0~O>nuKan#|zDg zLLfi6)!N-Y2(}W(gP134phaH#Z?6Y<>xN_dRf(kk_31(roVUH#!dMnnF&g+bYgo3n zqx;snlxhwGvG3xBmNT-ZYvVN%&;ps~ykomKimT#h7UcKH6sQ5H@MK4r)qb!);n!W6 z4uAqY?FrK(4T$T;pQ0dH3EYdFTFCUc-nG`#L-PS+m$C8QcdcH;j*Atj5j_~CQxXvv z5dKkWLOqB{$@T%4%YnU&Iz9@dd+uY{9-~5&pZu9(OCGs=*c1S5z6wj z0aQ$4V;a%3KwIp{)NSyhCz!3J>jCRCB@D%nrrD9e)K6GT^v5yYe5Sp%5h1KF$VQ4y zW~TmR#yeo%8lPYypTD}2s97u35p8x|KC3qhtM1XOUQwRI>t@f5*^%K>&_h#3U>#|^hU-tn`9P%P*0FMILvc59k?Q~Re)|ML95P&_6x+Q1wZ=PI576Xg}df}U>zP1 zM@7t8ZtK_(dh5c?ixsY;&bhbs2VrHu=M5?#+0}iFyoAarq=%}x5timwywh0;@Wxw@{2x1-KO3eb`xN_=_ScQv;UscS03z*kmUlq4syGcwk@L zD~^gadA)8)N`ZWiQ^+Nq%RsXu>#$}~LV{GRK2c?cyvPHn^C=opBHRk1C_yk8!NnIO z0S~zqP^Vj_I=l$mPks;2jC9F)@e-QB%ChjMW%Ap8Mu(8UF4*0WOOGnjqDOztVE6%D zJrr#QrAP=G9tV$&&0WMol(;|4W_-yJ;PKOjYmGr9Q)j38cIHy)8qjoBYln>_$p zBBEkSeTZ(zI4!7d*q4q@X2ZbQl}pv$>|qMBlot*HpLf>n=&h1*e`0`!vvT`XknqAS zyz|R#Yx6}g@%la#^T2l%X;x_zg>;=#G3_KwGAPSf`;#bgp=s zFnhsM%dEjNB;ZioPA>&);F=v_K7bTIps54{4)_~CeIG>{F$`BqCPYY`D(Z&fgm|NU0f;h~ z6|LgH>q5y&pPnphtN$^gFKW=UgYFk9Mk2*Ph*&Z zWq8S?=-ATT^4@(Xp(9d!u1#-dUosLv>;$?`KQ^~!wWRQ%y zx;sRTCp9J7c%T37JY$X{F%@t%!xJ2Fgit|Op#U263wgSYtq8(UmVAe0Du{r)e$%Zt zl-`>8S9qmO=6xtJyZ}6c6;iMONlE<@9|2JdXTh>M7oE5;zUl*7EE|2MlV*P%3>-$+#2KuWx3YMhX8-;z4#C$ z)U~^`XUH-^K(`x8^B+UZ(A`k%O||m<-Km-BXHCa73iw-5NO{#=f;jyggz^MJVdSDV z;jk>syyy;`@9*GI9Cqw-5~*B8!WK<1G*uLZ?EBjrc}FKBAKmdl9m=Y6i|YJK;SEWE zBe7)J;+OlfmR=5MzRwzqQ<0hTcNnY3?(bg0_?v?d@`u;A5MHp9NdRjg;|+C3QwA3I}XtP(SuSJbR+0awY=tjs-q-pNm>E61c;3d1*q}O3)^y$JzJG zTE?RGV?MZKZBvN%o$R4WJ`D*%#N_WfZ$)%e)fHc1-n z-6S`wz-Mal<(ce(tY=9l7qaC)3$;TwD{Cb!CV4x9onb;TA~gZSQuJ&pk_sgk=n9j# zn=67*9=b$GF%0Vi_Lkm1k%b+T#ypgY`D!f0pFo>^v5HGZt1|CnOW%D2xE||bhJZiH zq<)K>TaSegp>^u3IB($w)h|5`eVpokw+SMnJV3YCYEm9T*R+555^laOv%g!zz=eim zoc^}#A;R0Na4QIQ8qXPkpz}2U9CNWsua>abfSC*@^M#cH_U`K+Su8LJ#Y0QpV{i>K zi_UZe=Z7T~ZPoZ`pa9d7z_&Yw4+2!(!UmLmVcdbNo1p7Z(9V9pp-$VS_1R=DWjawh zIKj|l&Zfb0*0K#@4ykE^*?%*r4$T8UEH-^<(DtCYnPiEG0bnquVbDZBuohT)s4iLK zXP;z&b=hb5+*bvAA=gE0jzDGb7uoNj0M+J>!LfmGGVQV`frb2ILL@eu;2sYLuvd28 zVH=w+l)r;<_Yg%cesgpKFIsuzNbv{uk{(vGHsfKx$GwF)^Y@3@qsrNJ&Sb@T-c;_Z z-!1pzo5ReIowVJAxT?M1!B+OhumHHhE>!ZB7mOUZ`Pw!!by4ibn(7URgFzG5wYpzD z^Pq~pBN(WYNvr|}^fAVZ$UYT|M;-8j)QZK*tW(VjRNHZ8Ed>MFrNpO!Lp| ziE$@fj(mUSbemoBwdH@pTVU6Wf8HA;fa+q37Bu*V7hIa5FzqSdrGp;~cDXr2-(Ebk zhuW!Tn1xWVey5!ClVY@-nabs;>Ye>t4e8vp4tXBmeVMtNmxXf)rH~?dESJlPYo&fe zf98cg@Qkir^Jo{ByT`jY_z6@7y3O_57cXgU{6+yZG&zFOhEm(3yXO9Rk>V(`#zY}n zo@dytPw-DsZtyD`vc_3Z`L)NIHuyhJy5^ZA8BSsEIR)X<|^`!hKUTNe$^FaT}QQeuR7Rhlkj2GYr;jsghWy~pW1_>i- zi=*u8&Ak8eM9owv>2(gnHsIK|(MWN4#7{z5i8%1u3t%v=rk-8^q(zz}6YEzlM6D62ofL6z{qs(Dtvl zf$+fiTX^hqemMu|zKiuGbfGnO(O-1EAd6K?`Wxs-L#UJjXA{q=+^s*c#80md49%+`D6QUIRi*&Awhr2| zk#%Un99?7eW<0UJ}XcO(rO%C-!jd#bS2SfsZx zGkV?DhITNa-W)Ar*dLoRmIJ$QR>aOh4R_; zl4D`e8N;$Yv=QPj6fi*KR~B+p*{8E5UHoDjG8l*k@PmLyy|8GcvHZK~JBF4R&qzYG>4-2ko*h3`NitjESB~ zw=WL{+);I#wg2(qkX!~<=+V?YTAbe2@JIT$ibb+iVa_KyEG%7z)jkzATJ++y|g~QD}#_CXdA3QMn9yIP5V>ZAkY#Dn{V>jcTGp8~O&k-+= zVDw5HXSo({ddjBF`XqDJ+4f>K5dKO{ctYV``ERvqu6r8Ox1;z>2zuMWWrRZqW6MPp z&rnB}H?r1IrVX~v9`ok$oJpI)9N7nYFCqc7S+cYy+B&#jr!nvrTe@+33183{q{OQ0(0iaTwReu3L2`AL!mpwke>`+a z(`v&rg(b;Yg}eCs-A86HBcH-kRL2X=exOYY6-i>Y8PMCSAnh8_Z#pbl|F~QARi>_r zs8ajjpw-UoZYcRb7VI&o`0x4T#z~wRg|Cfe1M^z1$+r-71OfpNo<2DHHr}fbXn-NA z6N29#LbIWlC&KJLJq_q+hBEtz^D+854l@{X7U3T+P5e9XC|O{Lk*B&uk70AOK1l^0Z1}=nDJ?Rj6NJ zO3;9kc-wOk$yXybzx^KD6W$BgTi2Q6pW4wS$E7N~HFNa0C2&Q zLghFsF5giv?d?9)PJZP~aCzbeI}%$Nu)r$a-h%O}FP-`qIFYouK2WzU+9DeQr$|My zS0_|*!}VoeTN@@($-YmP?_V~WWYv)9kz$OzAZ|0t;9dg*uv7Z=9&?DIbMIO1=YmPF z+mCe>hsqY5X}470TFf&IFbJX}CqK-v>UGBpRDrBI;b+ix#U9E146#l#8r(rgZ@GWY zxTCma-A;WR4|G~knqqIV#qVg}yzxDCYhq5N66q*qlFf!FM*O5aw&;j?Rf5q7QTI=% zt*2;+45G}a&8*l9AV{oqmYjk?8g3s_QUY)M9_TgzJn&F%F6YpBTnX3}F9E144uE>H zy)^pSWY}GLLN`EmonAA$VMx#nqWd#^%sFS|TMzcXg*Y3|4mNO1-1_a`ULzmH72C?p zl(3V6Y~}fVqu@x@^sRCi*8{=7g%RU+y4Int(ct(t8?_6oI_*R70_0Z4op(n|9q9Zqml4`}=G#Y`}8=^jy0Qr~`S-$B*Il@<~Vh zmwH-eE6amb9}&rEgc8zI;T)L|Oe2`lM*k>D47nd#dlxJB#3%?ke|kH4P%nw%y4DEC3WlBWN4={-Fn5!!ZKgrQGBEF$0UEm{^ET=O>fduGcg9~psi z?ucJJL=EC_rey9BIN=-$O2$tz8w?_0po90+e#Rqg{5ZCKIPOzN&r@jyd-BX+xhT`| zHE!={oE3_gx@zV3*G{Oyz8EoB7(lhi*;`Y>d-i;$}>NrVacq6ntBbmQgu;I3NOs9da4x1y#UOLIA+I>ni^b$8PWLsJtkp|#XAWH z5$0B5uXe`1Nu`MoS=zch}Kk?v9 zDM`TPTB$#$I<b<$Bb!{vrsFMeB4L8W*Wgoez&JS z28fLnsLv<)5??SgfA#ZBM3Jyxme&XtD&MCpIFPN=PA}`3xq(qx?Aw z_5;4Mkdpt*n*C)5V~wXU-I-|y^4I9Sgyp>1S@TO3&ZyiiOgpN2d-(xz zLNdC{8GCk8YIB=ZWeN_xpa_z)gcVIBEc~RZpd&mLZDPPc_tl6Qx|_QeQw{;NCy^lD z{7f%tZm0d+OB-Qgnt>BuXTGW4tF;@)gmSF^yYzdUrfE36SP@oV3-RF3wD$LRLRTA?1*o?Rec8hs%kpOnN6L z<7M#4fxsCW-x8UNI0Sy*iyXtEKisYLFkK-B<$vN_O4B0jQOJvOP72aa$<@4D5;X=) zp;B3&p)=HiUri&c{jUfyg4;cY>y~LY>aHx&>0TLBG_Bd7>jNw97Giyi%`;kK!z?v{ zWyfxs%AIv>7A!zn0CR0}gXAABv3v1&09I1hxDh-Ce!!R@lAnyDLnrK(_URPMY*v>? zjkxSGi}X`-cT(`kRkLOC^{%UUk*x#6FEtMY1$P4jlfju@s5#r!|NBhJku3Au9Or){SM(M3Rs|PoL6suQT z%h9@!sejbl-52om;&|J*S!R=tr9f&B1Y;5q^(&~Rz1g(Ek4%!mVAe_yE^mqZqYCfn zj{9MH^?Z(5cdgo_J%;B(Zc#f_;w9xI;a*U4^t$1xidU1u_9F@U(U0kH|Y0A3B@sSJ_GIjKop zq&8YB@A63y7&^CRg3BI?a9u&0fetA&9vM|LeDq`}jNfeh zCpvBEAc-*|+1Bd8torpq4PJt!|~FUc!r zp&QS%;bap$ZHfh@-qaq)(`Hdru@(0}Qls(-_Ic6W8sCKl+zVEm>oR72z8y|0rP9J^ zQR6cM#VAF|M{oWHaoa9OME8oLY3r6CzRXNwz#dACVs%W7MBCny&&%t&@!85!US`H1 ziF}nR>e=7L0_l-%ykA=A%qcq~L+&^a(*9UirLgL?=Dr)L=LW?0?W__4`=P_(5$Js0 zmh?9G)>+zp8(bqf{}z*08le){eeaPQ#F8(KkwyMTNJidA;(`5IhAQcm*Kw ztB;(1_E~~BSp-ovDdOG!HI{w7&%=`A5CI1j#A746JQy7-r_X<=HyZRYHFyMDaqasx zizTlFA7A`aS?PgUusC;{z4Cg4?x>8-pq;`SPZHl{x~E+qYZbfTEz4f5A*o5W$HvXc zPH-(Z>dOw{{2~gSzHbAvyOlw2u5uaevYaSp=8165ekqUTey{-j`7ubaS9hXK7(lED zMs^|V6`F4q>snR}f7URE+r8Fb9AZW(Zm%2fiw?>&V0TH|9}Wzc-!#D2Q(+ty>_TlZ zO57A9V>ZBPp)DBr)hvm{7B-5M2%W#1i>Vci%!#A6+JVFS33;~p5R^*N$XK|7D1gPW zlKH47(2;W_H>R3d@36@H0cIs}8dIu$z15`kK=lRYgN%mNxOgfH8w5~WpWS6c+#6jg zFC*YJR3t8W?3f6G2ix_B0;}r4F;jXlaYZJyML;qnnLM;Kd0_Kwf&Y= z!s23i_T8Rqin4-wKtwr1aj=E1#L-rX(3xwO?7PaYo?)r-Htpi9mvQBVEto#hE{I5{ zhlC@3CIFa)CnjW^l>H@%i9{Zc11o`jn*0_K9^N62JIVry*frwQt7?jY9A5=&R5%s2 zo}oAB-)x9-uc(FuRvA)u!x+}dax8zGHf?Gl%Z}mglYD=Ufle&8JK1H*Xj}?ji)GAiD~1y_DQ3(!9Zt-5tvV%J~3 zG`&?Cnoup3L1^FGA9d3Fs<*6FGo|L7YKPh+4@o+Wa4h9s?8IiJ#EpTUWR52Qmz;D# zDc7NY2qnxzqLiG?g|x$6CiGN-<&v1@4T-1V_4F@iyhVIb{Xzu#Thi)#z#(v ztnuY$1#{rW3tIi!`tUzciDJf2U=N9V(oiF-Skm`hY8BmXf*BruElb{}oiu!yyxH}4 z@yqrOH3^QC0>!pSliKsA%gT0N2Mvb{dd3hI{1wsJ68$K zaBUPi*zh_vLLIR!;-!9Wp9|H_{ief?+gZp`yC<;7T*_^t2J%bxUw*RZ>`cstl8AJ9*^jWApW)(c_whF(aqn2`>Qbtj8KVPL<%cb4A`s>}#jg50#w1kLsJL z4RX47a^J1TuueCMvkuPhrteWK0--2uly^{+TCqHw5M~~Kx^7<696gDxoV8m|@7D}v zV6G^)Bsd`UEsVY83Lfp1oH3O+yXkfvuiQN359PtFOMWskH^RbS{gJmUK?m_0H6kN;meg6c) z#P5rC3Y~^I*NUPooFT3>-p;_yUZv~VpR855y1sJ{12Sn>M$QZC0oZ&rx2Ca2cN#*zzvO)-yXiwtWMM>S6d*OsUD)~nBvz`g-Q@|Mz_K6kugSX;7 z3#ZYeo(sh^<*yqfK{j?QXS)V3UDzf{ifQWW*5?;0JB0b6j@~>fJKT5NTHO}_wJcH~ zyL^eIrGv;D9kmQu(pZOrF=5VvmO^}Vsx;Y0$8|#CXNPYQ)|nr2Pr>NRAFfILwfSIhE*NF9;kL~mf_z^psFMtalmRhOcE zW#w1)wDXI}$th_-t%ir+bVEUy=2)}JRiL`tL9zx+FJ5!xN1h7VNJump=MO`ne)6dJ zN+~Rn?ObcfyrjSqjYZ_+M0}wnIOU#x6<8r_(iW>sgZZVO?bI(i*djqbe}sm?!n5K- zE~_R2;;ym7hK!%EEIG`gw%@;h-tR{?x%@$eJYsBIJ*214tc?M+M~+OO5PDR`T5=g= zTjV$(OzOfL-5#r8SP^434{~5_iAwL+q3F@r^Wo)-!9lrO2-YB>?EBst7{oEv3js2y zr(b{cEFi3Gj1cvHtu~LDmv8lUBQ+D9{A-pssz!ZB2^}DAzs`DQq1V<)(eR~dCGSp# zo<;G{pi#gACD$pyb`!8gO*zA9Q@)6_r8u+=1LlxhoTIu@f0L4K%z{Aq z9(16A{vv~7S?^-PhrwUFbqIj^1_B?o++{gT^<#hO+&D~gl6|txvA>mIQbGnEF(mwf zW`TN2iT)I<@(WB-CHD{lbEho-_>{zDk?zo=9H@&7 z>{y|D-XboLVyfopLL}Y0nn_X2m_I&2}9V0^6323OO1rTS;W8wAz#A)EHFY4FdInU%V=Q>b^E< zzcLmQ{?I#W%0NIt#-Ne2DXFOHWa#_rpn_ZSHYA| za8|DJ`VkXocD)83EELtKEB>58dDd>K2sJ!5oYDx^zAXGKsP0yhbN0qvQe$? z*LCh7C1E@Pfn^2xLk%aFE0wr45OiI-l$IM2a>pZ0PTvgwUyP!;iAU&{W5@`S5-E4~K*~r`O z@?CKHdqQW)Jj(KWUiMoMs{f#$iXr2ac*e8<{-C z;>P(m@(N?Fn_2hoHbCtI-gnyu%j&O5j6_V`;+cHSOzif#0$IET{rXknrrQ)$SW_4l zQnDp;Z0s{Vru?pJ&?S!)yQSm9jI>ni1;Fgd_4|Uja3$TX#e#*ls>`3$B{?#IFAhnAz+ zs+gusj2!8J#IZ`^?MdMK1d)l}E(lB#L^vh4>>r8dhBr|d5vbo*e$uYG7RD3^UNE&o z=g?m}nxKJ*(BZO(*@DPTP6FEsIas`%G)#E6&OSKr@b7Hr=c-d)%61dlhB03gtBEo+ zXth6G!35J?UGk5oMnCVX%o>agvCF{>HLcZUJmcJ!+MnZ5ObxkmXMqiGl^<@r5w~crpwO&wldJcc7{*6KiRGC72%}ZWp_w`ro^b*} zCf=%V=MgJ#@a`4hEKu^Nrw5-{Ijto=#uVIdN9^t-IE6e(sve3peC6zJrvkYlow$&~ z7ufa6H4bf8(bB*FAY~^^pPW!#eitR0C}Kxu8C0R*29iL}`9;?95+C)l@5*rVJzqnP z)D%qd<`<4)2+NkIWb(-okI@`eQ)fx*$2DW2#(ucO{5g9lAvAv)(_=;~yOmpq#(`?j zPaSpU4oDK~Tkx^TKB!M9M@LaWvpX3~@fsUUq48)o4Y@P1pp|p+<#*Wv#Bi&~R{>&6 zK9P(Al3M?@Wf8}yyjU>43#N^4T6TDD)olMO`KrR~cn?$TXVDm=Z>exa<_&MTfH<}Z z%O-GZQ4DA(%CE>tmeUtQ>Pn2wsg-Lwks%Yzo)GRcb1!CaumHXOrfdR8p&Rxo4^DYm z>hGPfjhPMn<*9(cfo#E{bVSbQrM6E3p`cVU^_oG!lXRKzYzn$Cq7QFZ*IW@%%1Pga zlu5ByN-P<~x=ld(G@7VJoCNT_LucUox@ADV`7?k2#i$}Z5U@`yojD2;M6LP0MTy^h z$_9D8?yeIjc<%?bc)IQ1jMJjwl;|&3ATnGYB|+X}4~5~DF0t5Z^v5x5vTSK7*cf3N z_V%MY`9lv42_s+9#X}o*hPD`=yQAh*(h14lVA28sTPl$TPfNZSbfg9< z47#_1o8uA*GJdhc-7_X&`kk%+zUGsz`HsTiT%ACd`btc{qcCP%e}Y*Cg7f zY%euq@0=ff2ywzGB9$JOT7>unIxgh6{q4e$!IK8mRV=9P#-rc$2AR32H76e&X=k-$D*}^hgz|GAkfxX@| z4|ns~c7+@-liH3H7L9hJqY+X#83uqE%q8FLwJS}ukQR#{@#ixHM@kOwZC>}mR%Q7w zyw~RUinMg{cjrZ;!-G@wODba-!np*067dEmhDAKyO6&+<+8Q<@SL~tk4gPQ-u5mc( zW!mkIoTb$_QypfYk4t6JzPB|k5}tu|Z$~GqV$A}Hc?^ervM`80HrSsJhYs;fFw)8p zXb(T4s8Dh6%=yYw==l4_M`IK}U$GKt=GM<7IKd&1*23`_I2+VAso&20jAzYBv*D6b z0mVO6J3Nf=qz)>79s|Azm4$o2l$d=D>iilR3sK|80Ns+UPRkPpt!p0h?@FJHT+mfx z`OQM2|0WG*lPHtc7@Ggf3lLb2AmfX&jQ+{zQvRHAL zili$V(RiQ^at)(o{x&S1kG!6ySxde}+<#Av?vsxMaOl3Cgr$1%Ev%*AcZ#wMIGxKNS9Hnl z&k;-QQ#85<$%%oiDgFaU zvi1sDiB--<{Qb_kSEh-@Bdzh&%Bl%OF3dQKih1a1i_5KgEoZ*J0V2}K4Zq{w@43~s zzYBQpy7GuKeUt5efU&V5;m&w;1Q*RLJj4^pd*`3+d{s_l5IH)E2Ex!e6)J<(ObVOF zSn8#U#e9w+L84{bq|*`I`TvBfC*ZoJX7%R)xnpQrjLaBUD^8(o|c zB~5&DlK*XBafN|V4z!W=7mwFZup=_Mt zn;ToZRNM`FZhCJbU%0Ps5QP(F{3#fBJCrHvz)ke^F)+!?YuK$ z21j{xg#X_l3mM6r*6U5d0uJNWf&^EjpWQH>kYM^*@o81Ml25K`UW)3V13vo8j$_vh zNFzqDunYS>#XUR*WKICB&T&%^Yb>0Bf4V?V+S;Iw|Inhg$_Eg~==uy0pTRTTfE)MR zoI>(vp{`Kh<2s**i)U-iRfUm)P(yxX1Ds=XnjeuUtj2-hCT7r zyjinBkO+wBb-WIoEaZ?-9euTYG&If#HuNY^BAi|xcse!TV**BUlS+(5iB$ZEIah@U zVpDaN2s30U03^~iK<JEOC1kI@>7=BJ+J#($GO?`RR!0+kTN zG;z4lKIvmkPYMRyR4t@3KpyAU9g^9?_P$$C6aoL8lxgjeG2`j~EJI8<`D2BCY3=D( z_7#4mq%b7B?9ttu$MUfKYY}JR`Ea>tl3s5jy)!-9?9M984vagC2b7dVe&5QE`f7iV z=*xqT?1Uv)04|oCFsD0I=V`MV9)r1VvAXDggWCuG0n}eF1l1kIAY3F3?*Yoc2NZdM zj$*?;{kUMx)l@cGxmK+2#@DgVNIhUg57@&tte-hO?XO9m9fHxc3w;}8L^ z9sH+5sb0ZPdDW5$fottr0iCxfArs0I9mO}_X)G`(S^z}w1A3#`)vVLK5jcWhG@%7X zqzl+G(!=D+lTJZipw?VB-?}Z1TY4(T^E2tRM8#vL0b_;(ihWC37hoZrF14%EztBsf z(7X*?l7L5+V?l7s80`Jo5%U`{^!L}j-T?hk><4W-9zF%2Yp;)evzwvbrB6r7y(FNh z)m51YcNxIWBKm{L&ZsqQwwn{tn}JjNKd*;@i=x+V<j<&gOTbPXJU6-o*!Hdck`nKYSO?qf;>yCNh_#rfebK#fAEfqKI-0TrGLQRB#A9^~n@ z1%6^O6eKUBNuc5~Ex+}p@Xt#@@@x}i)E8xosbztb)A~qE@&ey!4>}=C+~a+e`Ygzb zM}P@U)Db2ZQa2`4z9Tsz%b96fbcODfg4aXr!|@=OYt(NK69@(+%mff2GI34JHYL z=mu9nn-j`Q19<&Kykc?P5uYcDcw%7y6;%b3HcU|REIEf&F2mRi`Ji5g#mif-?2u3!)lnhgQ)RxObAMvIp{?oSh1b&N&msWa$0AyU!r+K_ZPREOJM2|tnOL)kf7qg8nd*Ptn8`f-r zPHMi_2J*IJt8}UZ5ax10pn1ggOl0l?-p^K9ozKz?~db^Mm$> zH;Y#7x}G4h=u1>dgpaPJ?SK<6TajZ)SpUJv!Z*~2_j~2#rYSzs?Kg09J$d7q->$&B zrDP1m+luK+Q@J$VtbQ?qgE;;;^P^F%-G%$p$^bAKYC8WAw0}voCJuT6@* zU)hpj`*dLqZ?Lc89KSbUX|brn|5dhkTkoZa!lXn?XYK?>8XqUMaK7|99ow6p#CjDP zbGw@{X4FLAg}YEuo^~3smDcH?O?IP*QQ5+=2oJ}Z@dQy#kZOGMXlm)>h+lQ~A08wb zCJ`=I+X4)tixM93F&!*<0!S&gmMAN$Pw->nRCN{{LwjK|rpl5&X`Mkg{Gc}clGBCH zsoCyEqn-G3pcLxV<6mAERivTx`v)ZP zvqGw@Xlpsp#^5X%w(Qe^Ks2UP5Ik- z`BZGl7AtDvEY z;y?3yI zc{gizvjyjo{rZb)WY@$WRbJG7Cz5E^?+U%j&B&0@jC*Uz5Ji1YxY%N7xd280QVnli zV-CZcsIn!s$&T=F2LR*Br6XH+tUq%A}o7`HkR-P$?k3yv@Z zsTNLnUoxjGkpRryB{lr}wq9A>iufzyK2g$FCTm&g+K)>j^)Pn_SlJ%fV zaUHE6>BBYn91C(7GEA$nbQL{5(D&3~{e_!)+$6afmkHYg9+%e79H-?_*5jiq5E2re zBoIZIpF4v1aK|2Tp$SawrA&2-R-|%On+pst`}r7{4|lclb5A)TQDL@8$`%(%sRKoN zsfg&tZxpIiAD(yxu^Ui_Tp!e8Q*(U2I!j9sL>{1=g+k)nN3-9W@;XI1IVpZ`GEE7Yr^lHaiGcg5c-9TE$)G z9m^b?{{^{uycbmS`K$0}2=-)0EtM}&=OOjJSbo?3U+6AuY|t()YCp{IX(oYFl6_?Z zV`kMkk_2E5k=M)s&l`xyiS>w}Yd9^Y7r~*M^XHfUS>jb5z-c9RtDNcdXY&I5mZl)q z0pY+$0V!{xq9^NL;zOIm|M9Zq2ipRdj?^7S?}YoD&avCTg+}GzIx*j)%RoBlm-$4t zSN;3{7tjJw`J;g)mZqTi9{;R72chsR&l#TW`6oxZcpDn$hZHJz3dsPQ?s{Y(p;wM5 zNj}idd15KXC*N!TGq$MaT+nysEFn~KBs=SF{cvb-#PeXaAVU?;W$*VelQFnVAZRxk#5!v;Bc^Y>8 zeRKWp_NuzjPk1=Vra}|%qa!$=0>^6OFaQpvSF}Ebm6!%1GUVLE6>xR zOx!FeJOe3|RxbAG@;^eazx`GQLCB5udiC%=OI|eL4asX~!-^Q=-Ig+yy?54L1L_>f z2NW9SFM^yIe`;oaDy+B~oEr@ooyJ}EDSB`6px7l(_b3dbg!JpJAq2TwZPMEsWq&DW z3gwGpb<&0}1_(9LuZC5iTX2oXHap!iq4!c&tx)E_B;ActEj2}UEy{>+9H0_4UD1X5 zsC5dxBYDY*J#Xn^}=UpNLJNV9V{DL`DAR6qsG93@<;P$4+^jfrCnlRJm^*v{cMw#fN zQbc)o(I{g}kKu%6q1X3)kO`Ra^F{=VC-A{+^|`*%Pv(t{hb*5N2B4U1BNZNhoc7kz z+y6FsQBppGsFl!IdEMqr8DhSSf)Zk?KVVowJ7LDME$ATF!~N<1>KH!x;~6`=?#ZC^ z4EV)$o~fXWdk4qMMg!CKt)1W&J`&!llVAWuA^-?Zo&DY^EjjI6|I1@@9*Mhp7x4A~ zb$RCduX2QJ%;F^HrV@B)JUNooq2avesB8Uub^vJ-yv9W(1ma}4b2Vx=s{mdJQ_te* z$R}qPFl4Pr+b4QAP`F^XUDaH+GAXTD1z~h#LxGI|6nQe1>3Tc?Jx6_-*&VCIX#TCS#BjQD12i8 zV$`mo03u+DM8au6E3cyi@Z9+JU66W>@F!KlAuC&zfoTrkz=K|xNr0@Ioo`Mq$c0H_ z_m`Dc0`u})I4!v!7CZXSihXXV)X1+M~V<35XyLOPrUoXYhw`&cWV&eUbpH5)Y-E*@W2ZjsayF2s3&0a z={`Ml@Yu(neE^L)f$ECZ0eMV32XDCqi^HCtsC>HJe!g}i(h;m)i(;{W_ee%b1Fv6P z5RE=n@`D~OTDG9T^m6^kSe^7o`6bCuQu;;pUB$fDUQmk1Yx*-mlUXfTX%vR#?d;3D zkot_`XYKq*+f}~cYT7fZ6y{9)ALm81YL$mreK^5Z%_P>E_2m`jPMVt0h6cnFZkA2g z6?r=z6@6uEy^=Q-GGiqgBY$2NoASqsEAzkhT09J;i&>qh*WAlTKrWN^B7h|GYomV0 zED{3*7K7_s-%Tt3WW)Y0@!J5UYD(4n{~se;G_|K1Edi?w)4*(WZ{V1(1GP>#lq> z{Xjed(-rXZBVwR4$p@=}imMmt7Xasmtv7wuQaH2wXkuiwmSM`V`^)KEYGLk(HEe5H zEC%p+E<8=Eq&C63Gwn0kHM|SpnAj!k#!Z3hvNncKM9 z<*ocY_49dR{FBH0>}QQ8VxN0nCa+mik(w}3Lsf8NMEL^<1;JZF2n1`oO~wG#$H%K= z%-T+AP>5RM6YRUNWnvTX6f(jx%vVw&l|X(CX-#7zZyk+dUl#xBk~C76MYATYg*|dY z8^1i+N~AEh>H@LJzQO?Je!{ieCYhZ4;GA6ZkW^5HZvNw-smzUe!G_AAWIXJL*z&Tu z6|GGdDF)P8u>*!xz?#3&rvh&^`6Y6bMo#d14zjA1utjSb9HrMas(ryG{r2AtGPCc- zTiueV?oZLpv4e0~)?8pm;ln`0Em`JrNSA?9%Ev@88Yo@6UCAJ_$yDVJE5YvUbBM+; zeJ~GHSzh72pX4K+0cAuytAzM!3;!CGmj7R51W3#7BTVdPUF80F8=PosXTkl;=5OHj z`XA&*;cYuT0L&?~8cKcW3N3wdFSy#6LLg7I-gKLvEKjF@-1>XO^e8x}Y($dhq9E+o zDk-brDe}r-V5-G5n~IB_a7!6gDnho^;|q*Fpo~v#E?Kg;G}IsW4p-tq64M5ASE2QU z;~ODnh86wa?5J6;mlFPpy*HJRF-Qe4oUC+m*>8I{UV1>p4fcjP>2qy#&WW9V@o$1; zxJ%%Zp!;nEHJFB{UJX^fTx|sqbTcA4D@Qe0e2QVWDk+^`KOWvK(@X?ueyjF%PIqEMa`uMv~ z?p}dVo*w5R&XWdG&r`HBKJV+M8RhUEO0M^34{Ph1Av7OqB0GKfd?29@BvxXx=V)_7 zWEiB`Nors=B7h4F09KtJub6gkv34?eFJvB>(8?IWooUMJCpT$}d2P~Mi0{&$2MmZ6 zKB(qFJFQWUWKEupFUz*AlGes6f%r4^=mJ?k54*MVd%ta|nid__^n)}y%JNI7+mM{O z2iA?_lP*lfc1}bXy^-TRS*fppfZdS^6}@{AQV8W-qIC?C@Y+I!IwQVcN>_IBBuOm~ zI)QS^L2omCKk`t}f=%pv6ZFqC6mOt#e2#YI?3UZ&yRu<(2I8Gp<(HMs-#F_}k{4~; z`2K_hG09W=z1aB&xUve>C#|)cueH@1KSI0cHffIeP|vIuKA1%kg9s{th7$I^xZuJF z`B}Mmnfp$WKkdt`CA3^UoBZA)BkBg?X6l-GrKlSzUBQ22vcS2b!?`kvQbl1eEE3(J zLWE$u&n2%oV6vUGv+&9w!DEXV&Y^3QA0rNOt+|aPzK7QbufNI4U0S}}P1XvJb}0#z zz3E@0O?zOnY+u2@_ecBRCbOm<)`?S8+zyW4}voU^6IYTjFV_eD;FT zIH~@Slj~&ArHWV`<8^CwF>xQDpkc67 z-^}R>8GrDGud!*}(r&kh4x{Pp);huxMs#VUU@uG*r>u8Tt-X~)sy z1!md<&iQ6XVp#<)YmaIl%~n=kXoyN*v*ZR1-H%}g<-A9F;LA8Zv~UV{+?{g421b<~ zkc)8##E>D)L7_n$gunD^OS~r^1;*`(@33AEH@}g^s3n@rF&?wMl+cbWFF^*nQPPz-|i|n>Esc;aBM?8%su4VxvoTl?6#Itjb zQPwNER7zVVbcB-$6%FolEWsaoL+%6@$Ik^zU%%V`KuSFt!EsM?ZNnKSUGyERFRSuN zGKr+b7+a_(-nYpbMdIA@ux9OL5T|B!J;k^WKdzds@__R?h*}-R9#xTB4F8mcx`)dG5@#R>krzGrI)H)54pJk z*IusRnvt4vS7r2_+Q|m#uxBfvj2Y6cw|)$AsEX&lez`pY)x?jDYmw=gc#ohBWbox6 z)@^b-u^G(WWwwDj`K8s)NtHqLr}@AnG&3s0s}uP?xp-5nQx}@)=?zn&7(HfO81unJ z*$njUcdtJp2VJ*(lU>4*!I&-I3SLy7zof;?qTep7Jp}G@AZ^(jMLLB}evl~4YU$|- zb&Yf`|4!m>;?JOfWxelb}4%{%3GDowI#Ta@kSr*tS2gM1*O0A>mv)4R(Vrfm!v& zoc`Tj!QrrBk2Urg`Pm>l?S zeEhEwNSy`@y5VA@lYvj2w>W~NCC$%uXTXX`U-hFc-l_laEYdD>sYW|fh^`hpyT{+B zE=eT{1vre(ot3NnK5tYY_$E(ie60VCPDB}N^?&-@R3~FOyK@fBDd*xQ>|?;v3fY~* z4t(F-ot__6k3a{sWT0A?6$%af6JJ;as6^>ej6Ow&thr75K2o52-Prt3Ao8LE=lPr$ zeA39Y@uCfCA^(&v=?WMyFDF)_Q5KJuIdRuzJT~AVp)3dG4Y+UqnH22d`oAm|>En!+ zvI_)#GzAk1gU(a_+bzkoQi-iF71q7&HSxh(gEqxb@~0{_LD=LqZ#LhGmD+b(`ka=~ zQy}(kUv5!NRyMu!GP3lpd|hh<8a#I4x!Y)|N32~L%0>^)|gDGDp;k5EMJk7 zETX!YyDmONftB3>7FoqZ1r0XB_=QfmgexXx)vmnwz#MzYzD)aSB<|?yzdF@q1+=4>?XQ|5d z%qNUr=oG^XXSdM`<~M}G0cRM<3&%lM?x;4y*O^n*+seOd)Er(PuIz|9B>V`cTs0TnL(rdmK>iIeN zhXgje!7)F2Ijbq6NdTZV15Pf-TF5O(nU$;vH#3p+#$IdM#%%W;7?caB!fJdIHVv%a zjn6w4O3aD`sp~>E;<*+^jp)~q7f6?ugd3qlm>OqjLF;|UWSK9Eb@J;CM^Z%96`w8% z6Pa{j;CX4$Lh~7W3mf?FeJ*hZ9{9fD`Lo^x(raXDd_SZ}f;wUYR!~ro%Q-b>rw{Ei zn~J%SbOScPmz~eDw~fBpzO^)J6-H`}iWQ{gx;C<~;0pjk@lb-qyV;x5kT-2)kO*AM zd02I6=r`u!z|O<4q8$N865CGzkr5bSndGKo8Mo_j1aJf2^JD#0sR_Il^0E8DA$FFO z!|?)G<(@bqqJr+}9=EPs@svL8XH?dj#+To9APjJ_5p+gr7B{B@Ntrm#S%h>eO$3UqWY zc9`o3aF$baPCo+ypogH|hc>Qqo3qR^((NcjW{>e5$UmK;gp98``ph3C?oaK5^2j{w0yE;Bs_~5F{}sYTyWM$L?FC# zQ%3*Op`fg*W43uM^g47$YROC_^Klud15f? zP5J?R*V*qOHB4Jhun{Uy=2`XREbWNP9v2^*WB&rA$wNRyT$i0Mj?C#_CpT=yj*AAl zv8G2Go*6&<#|-oLOmzD~pel1De;w?DNiT((yB)Z8?=Au;fRT{%CI*!5cAc9=ow8)L zhGeBure!qFdLOgs#&#J+ezj9?Ct~6L3y-sv=9`M9%r;~G+BjIolSu~Mm_i*cp5l24 zu(Cj+LiQ1{E8w!pA}F>BmWP zrRrcU8bbxSn2u#q70&{lU#lf9Gh$|}R2PxS&(8Dk9V+nxjS77EyZ_JGiIH@_g8rqw zU*FXAKhqtrYk!4N`+90uydU4*)pG(*t*=$7Eb!EgnIdxCO-Z}fHCYDuF@@SgERV1g zbJONG8jQ;;INUoYs-bPlE=2^C(hn3v&qQeNz41n^-d?5C5hqzV=UC{`DCtg6ZopB1 z4spDkUKsvx_gn)@06{>$zxJu7M>;ya)tK}ird(lFru5tVJ84ZxF--1Q>+B}p&{NNS z75FP>_1+Z#@c#X5J-#)EcGW7}+6i*MYlYf6cwhC0hJyv%hTHq1Chh*{0Dfb=p4JLC zTUENs+Bm^nOe&$(tw~FuxV6ZZ4772gm!5rRMbA}HwUT!qgr^y!R$z0B9o-L0!s5T~ z5A>Mfv93r9E2VD}Kb9l{?+Rrpf?$3l?7l|*D~?ZfSU}>v1Uo~UI*1Q!l(WrVxLUhD zD>i_R1r|2{_rPkh#3CcTsI?+|SaspGQrK*|MW1e9myV`V@X~QYL(B~etDX#Q`7|WW z=s2iae|XY)&>|XL1>GO<>xobCrP6f#AV;tnof%)|9XEeZ?9%iIp+$44U9ylJ+XguK zqtZgvS2@dhwqx{SS*D(sZ9=0_%kVQuRwyzilw$+_Z}-PpPmEFU8NP)?Pw#J zgjzPX^zsh*c2YkM7tN}JyU83hs7fK>^_D>WmD5l-dfhFvw2T1e`ta$I@{8L{>RQwo z)jne~F^f`27zT*$`Qpf0B9xI#g`yIn@+=v{Gap%v3)G>%3`)l|tf^EZpR?}5Q>=6R zL(AhLB?^*{$Zkc=os73;Iey(cf&)UNi?m%-c_{j%@5+Q}Fn`VDk23_O8w)_e}E^+=Q zmm)-Mi@idTQRg>P&-WS^ix!5^df0ggWnA0p9=u%%(fZApgyr;OT9Qkqx@24#P?Y|A zP0wNH6`NrGQHteA-d{DX16K(UliT%1GpI()cDpO_p)#CIE!=r(da zQu8ji&W)?rdawx}k(`E$d^DBD?B^V46Fe|<_&yf|bqS;M?37Y4eg0hrtH(X?47f+e zrx=I7Ba4i3(4+A;?Yz;>And`0rJ%z$l&tn(Fz4kp+g|oQ9g@;RGqN!YiI}LXA)f~T z*XZ;VFq4Y7GnA+Cd+6TbTQH`9{6g!zv;TTr9Mn1SaUX9UuO@Hc4LnCSf06&@dMmUl zwW{Ne&wzwX?lB4}8y^})RpU9Xy7Uti-FPs0CS6Bn*io`Ab``0CkZ}obQSUbX%hv7_4-uS=tvx$O61`*Wqk`5UyF++dcKw zKh2TG6cv!>{0ycxG5x@hws_v=^KDT=bST0!L@(}kp3(I3dyshAOw@vr4>8Tp*i-Ev zZENe0UX_*q=p0L?#$Q|tdz-(Qff|pZukfM6HVbQ6BMFJ;4;-1hn&fammH&HxaKw_5 zhPH#w(mFo^OkcaG(bm^F)D@L1gVZDipT~Gup1lHE$-E&)dAU1ezm6hQ5XLp3BAAv+ z*P#v-WdMkPtl+3Q1h5)vjTqPAc1Jn21f}C!To?xY+`NG7v=R3o&eHoR)hd&K{NHdj z88kDrK9eB(7|fffg=!IZdQ-I6pEA@~%UU4#2HB)7$l;zcileOqJ=h}sP2Pfw_1>SP=*2TV zS*NvHUWE*IjiR3iLLV>OIDO^RuyM+buO(^RxWmIfHYH3q;-p{w=x_BRB>@*d@Tris zeE9jv1s;wB7|a?Q$vAsK-%<*GvOsGhxeG2+_(r7}W}uB3BCyU}f5wi@JIY=-3Wd&Q z;539y01dU}vbHLBT%e;V)+lVcuuM0TcI3?QhJ1hzF$N0V^p76_m&c#_Xy}HtaWBJ{ zuf(?skUG(v;W|xrh84Z#-R0g0lCZrG5YV~39*Qalw@2(n&7KmHWsr$$bNd8%A;B|N zxT7wCm$A=u7~Rk!{IuL4TXVsy${))gk@>O+@u$bd_*q`#nm=RTdPYipm+P z;tYudYBSJW0Q1fcu5UTU)uIwiYw~+G-vv!&kwT}a5_liX}segY1 zxoZX@^Zr(yDL~(k3%xGF711z8FNwPu`j7&NRl;Uzo3V*$C=tv+g)DY{Rr$etv=G&r zRF`>q7ntfv^lAY1RP9vXp-#a09D8Xemp4*C`(D7Aiz6aeW!i2w!<^MO=xQm`!#yBs zz!@#@_(sr~0#!Cdq%k>pkrlfTIb!mgAH6?R5+ zr?pd-I$prP2Y0oJ?^S zn%8C*EtCRxbe?y;x7clz*r{E=e@tKVj^?Q%3XiWe>x)qZXZITxU|>7~9M5DT- zui{S(s-Cwpk@yR^=d+I{$`|pZ28bj$9(=E)S^4OIAcR>_?7M*Q&FTqLP8fQwH zMXter0~6{C>zUAnmRQ)r4KC3^2$8yv(2Cv)Nz6Qg?12SHkqsh2Tr|A2txE#rfTG2W z$6%?Ct=(t>kMIh6lg#qw8=TZJb3=A+bq(;j9I!jTBi?Pfh|L>v=rj-wj03o0sBks$ z7;E7GvMD`Gg+Bi+IO$7jOa|p>Gv8uiS(VafP{#F8ca-`PhrFpUp8hnidkZF2 z=v)T=S++zFH&v(>ayBZfcN0d{J*J%+%U45kL`P%u34(W}NH;Glf3ZqjoiXE=%UU#k z)3>*{)xF~DZ?CYqjmKC-0TmQE2!~ld`vF+5u&)_F4SG8$L;>DiU>Hjl_O zZxGTn)%BR;;SeS2UKKV@T%dzItNQU)+!MFf;kU$1?ioY-lq#qH5vB9f2*gN9ul z^7AFdQoz-VgS4oiU)7v#FOm19Z8lIUCBD&kd{kh3x>LzGfXB)h^4QEfN{ZtM6f=y( z3n}3}C{j*5T47LdqK#Us0`$xkmc4&FKIv^zbt@#7ZT2Ke*6Ib1K_>qWJMIJcwb6{5 z7k)Q+bQ|s%tWgy$#yQZ$SLth=m8X@{Ez*ORyM~m^9|Fmrg_8;^2`E*F=9$!j+y2f* z!Yx!cXqM>?e%t0rzzv(QQ0cw;`Uku5VvH;m)dx*d3aB}h5>Be5x`*^9?@m>6Eweu? zrhx!Nd^n$GgUJ@uBQyZ2wRF~qs<32IH<4uZ#aP+1VA5glh*lt%YfYJyCmUiLINuts z5R{s2hz$PzCIB3;x-)eR?LOFzik?x&i~@a0eDSj>e+^bX%KeQi!sO)84#o{wyE+v! zmiiK|2CbrEPKgC=&t!#jriEE9I?^2-Xk6j_aP-&5iO((&d3|lC1#AIAFyN=m!jeV` zB=LwBPD9A^oaox!1EgY>pKq@$bmNq-k(klyjx=C_!^3jOaNncNoXwbkTG}^;-`*m$ z&U$vW1tWbs?l_`Xv+kKXzxzyPy68I4o#*1zp)x=3n!iw22bN4@WJ8WfnecEY>VOX|&PtEL#q_BBFqZpi#_cuU;>?{p#H4=cXA0kp>%H`^ym+dj z@AjUyqR`PR0Q;Z>fR4iGEr%uQvZAN{r$SLFYZBXm>b9oTk=&p5-1l7Xv>;bXlGLUr zRjU5cp9G%&$1gB@$_&p|xW>cea5}yNe2}eyTTSqkWF=-q-LA&)D9+)qS^y?eR`1o4 z7{`}UDw0JG9Sknr{)?9wIiFt2FYViN$a9yuT%eeh1j5R0>qOQz z<#};Rr(0sHyQ7R=-|T9EA(;%=Q&#nk=M@YRE=9hrUKsNOeOX58J+TS^o_viuj^Fs?x=LxsWcUhBDSGUMd$ww6OBx9d#0iDqc zdCFuMk^uu zs3~bc44KmY3I21ds98NUfV4lemPHMjWtdX@XHU4D`Wzb~2nE6Y88x-u@Q_qh18|>gGYb%~k-u9*M3WZPRN86*zmt0|6){2m52OtG5>udY9a^$wI7t z8h_l>k&Jf3T5%C2*zT8sbKjf|yFxs}8o;A@oDXCIs)>^T*;Z4g6ZlYJc+7ur<<@np zti9sr__m$m$!v1IeKGCY7|8ZkNc)J%sZh7#c@sKuGm%BGTQ!p9d>Rd~<}b0X(!^qt zgm-KKC87|EAoN$+P1f}Cqpz}>%z5DEo!rLz%RsBPEJ7`zU<<&MNukCoIg)2p63zyf zmLs0NyXAl^$oG%u-dQCAi?LJQ|Mm`}R4wPeh)<$yxLFM8z7UzD?v_Le21YD=&L{IMFd{s|af398CIJF)|Ft>mo1)n!=-e;g^r##a(ld_ZJwK zJX7*?3fMB0?58VX)E&_Sp9TXwf^WeL$_lLoY*%s1t6`8WYa%?WnewHlo&OO@#L}!p`;W+Dlr9^E7089pCP7w_$ z_G(}u13#6e6-Uj4CoY+AD@1cM-@<^tYR17=dL zw(M`diu0J4ve~HoE7V4mlJ<2c&#MQ^U15?&sTn!UXT^hFs{`WETPJ|dQC@yquOn!f zr*qhieS9SOsqIRk%ONqN6Hw>(+vr^9vk5Hoe>Sg}!{fRK?|^&8&{Ora*1Avg=@Tu# zDu}aMg(@OH2X8bIoA!~i?}1{`k;dDyui&S!PKzwxRUP8YlQmr)nENG?iZ(7d9UeZb zluAq_r*SY@c9S3<&6@niUjY<_UaL0xece0$znpg22(eV*u*$3AbW5a_GMHt_`RieJ zatek=c&S~k`s5T#0?yh6F20Ul8tTv_?Ma$6Swb`+O{q9b2o&X0C`4X(;$C>hb7~~B zEOP1Z5nEyFPO(l{{f=${n+Y<8&3OVg3>pqAeX1;KZ(}``ZeuXX_@|&KkK;>6-HmL+ z@>OP@7JGY;xNXy2xwbo`hl9>Nq+3%_xzs%cG@0)0duAI#moYgB%th*uom3%tpb!A8 zq{Fs&gOU)RCS+TR&Y&Zms_V;O94mu;wtf8RnnRQ{-0ki%!`F=d|(KdbVfq&xfKLSlIW%jr^g|S_g-61()N1+NS=#WxCd{ht|s+84Pcs zmyoJUz;pLk)yY!uENG~s{@&9%wq79E!FhtR)=ex87++5OyfhOX0KiqqlD+)x9JSR< zKW;^$yDibVnWd&?aHV`#wKYlE{2yPIQ+%YjWBKp@?JcR-e9|>YkoxuM?ra!m4?})V z&;YJWACLe>C80dDqt9;wNGnJy7(0wgFXdnYqi-0lU3oy54(dJ7*SLN=PPO!%sCxv}r64AwDFeyK{Zr z%-Vz+L%+tR?0dtWk(cD zNC+QX(Lxq(V?y!4Cca@t3TmP#sfsii*KXX%rVLAoa6&0NB)`l<_a_mE8)yOe{dWUf z5f$i`Vohzr;h|0i8J~V6B$u)OQs}Ls@vkL%lM>{hu;yQ7sS$*&q(#;h0qL{S?ptP$ zg5&8cqsN~Vd05Z_bzH9U`H#*w_N_Sj-L=AM4am6d8oK>Bx&?x*T{^EgyGkKOPx_D$ zU=Z}T=$#2{V#K!L(y&1?IFYU>?-1YgsmHB^xKm*M*T#E$aN-Not9C$>bxx%?opd}~ z4zHcP(isx$FSfq&y(PKq)quVgmd0tvlk+oxm1J2G@L~Vt)m2wi>1VwHTUO?@=R|J_ zo|xht2r&%*oymw3e)imOaX;jVn<@|Cxb8YXPwtjk3DFy1Lc+KbwEv&l9PzmIdvzzU z$@zo+#BW#1seel-qWN?qBEP+vAI6tA&>rtA=||2!b)vyG@#Et{nHA4RUFf?At|Ob` zxCg;m+blI$e?E!vUQdZh265Y^<1C<5Qme!)NfG|a zKK7mP4EXhm_Gt!Z3~Tf{{K8)*(5fkY!QXH}`}PhxKX>R6_GUeigaozQFKVWFP5;etsu|35z>cVmkE5=lEYX7j?MT`^0*{Y1c< zrA4}`S1(T!j?MK!S}YKns;>QHBq0I4)}#WfOXWqSk_+O>0u}Q}GxV6zWHBfvk;`9Z zkh3*kcM80}V^)+%2pqu3x&XdV(MKRUionTndrTv3X902`Ix6L*_th2XSroIc=tFGfkZX<8RgUL3cwn6>FrHL80P|#^F~|?Q zc1&E3APqwCE;q~HlUq*N+)4l~>0r8X5B4=egxoc=M9rd9rzU6hm1wfs3PMlz^(C8P z%4r4*`%=saLl1+ZH zO@vp84T9h_$}DA=%)`Y53qHI%e3v~y#0TkxfKF$5dr=1qA8MY<*?Zz|(SV2@;ERd> z-qpC8F4?!8pfO0-9)&UpSdk0}bUg4pC3CKaj^~-}~eL&wEQ$3i!cmZ{{Thea`|;5S!PWZBdM-X8R@j#m`we zJ~$YGE-_{}CLdKKC3f)qntjMpUq9mDv6QxNJdX&(+qy8+(parVlvDguNy>4}m|cFx zQ;0B{38(7oS%qttV1gJ=$~);Pa!$iA>@LLErmK6}%8&!Ed?d{tlSBZH;q0 zuLO@|H?k;C^>3g80zsVA3qTl=+#=f~f4D6!Zj%cFJ^1yJtTM*)6n{=TP5G{F0)mqn zsSZTBjcdo;&66o_TQjx_i0|79?*;H2Bz3u7DM=skLc(9;&pbrDo0xMDm~(~Cgt=@{ zJRkbrvVSC!BHK?88;|%Nkq<9sdI6=KOFd`!KmvJvN1@}-`2}IB`WHR7a@86X3yo83 zU7V}Ua9wY|Af#6>3bUc~dRgstqa~)#m{hp%8h-P(k{D2}FVIS`A3g?zqneH+pf&CN zmb2TFSU(rZ?V4PdiE`bUonj_c^+M3yIdCUH1rGZ`_3@hu!PNfw<~pw^vs&-Q{-Dl` z!%>g-@W>K)fP*-V7<+!JP&rp(VL$(a^vqpU4R8)ZG;D@=>Nt1(;W3!XZ_1M-yC3pU zp*7l=C~O-x0rj?%2wcC42$Ft(YHC-aNS#^)@iDsoMIh#G@Yxr#5FSn5dH<15`Xfl2 z{9)s7zkzE)RXx`CK=b3Z(cNkYOD@53Q&DJ$GWtsA*6?${mdSjP)vr?__$;mrlMFNk zR{aG(DUQnAN|Kf>`|zlnkRQc{<2TENC4fd+uwq|%9#abwn_}0BRK~e^{NW4_xalVl zNuK5Xk4?HiwuOidkFi00H=8D}NFJEkq-=|(ovOT#S8{buX*xI3m0}-4W?2bTJ;fn@ zlJTR330-|H^mE4=4id%bEw@xrss*rpH;IO&=ceg-QS{HKq@hfD)1=Lw#hD_7je8xSBc;-#)9fxJA%F4l~08C#95^D zf{Z;Y^;wfH#89}Wx?5cTFl6;Rmur?flC}{UJ>t|6=o z=%Yj?h$F2H=#Km5R^(uqjd?l*x7zbM_j*!`dk$NaD^}EGlDz*#?n#caPVH~seh(dU zIH8YKA|1?mkMKai%Qo!i6@@mVCSp@-GaYPeh*Z4aiP1yizIH;hmj&a5`|%-W^Amy+ zc^OsjPasM0>ShF}C$RU6Rm>|dIesT7Lcu&{FpwEDnMD$JmhD6lUV6S+pB zRoJ$(_@#6YfJmv|vOg1DR+=wyc_oZ@J4}pnPrDmh@p_Y$qgG@p68(x7CTFDClR!j{Ct=5b>E5s z212%B((FNty&Xd2NaX(_ig*8!6;}BC?~+@yF37(07wN#kve>ca9$3*j6JPnT-Pgtf zHh5~daB5-Cw%a*_o&Wsjn64+HsG!7=6kHL|P8qTDcVgU6OG@@R8N{GM*z5GWfW(HW!>w$TyLHpp0*G{$dzX?Y ztWb%k@CHYh1x|1v`>sj0%Kg%{tUDzhl{or*OPi6&z{BkW2MTEPpp!QyNYr5=09U0$ ze=l@KifJzM)mL^5YJ)4kSy(RrXu8g4D}t7oV1d+2U-EFWXJ9i67<+%CcN?+DMVqsg zn_|0qdaD$l1RxPJF+X%6I*v(OE`*h0nZ8Ui2yww1!n|)xnX86$cRfZw9DS4ROlQ*o zVbxlLG(TnhO?)N8{%f7%zjU7IRG1#US;)EM(uv@&O0@49khED1CU&ggDtx@~zk;T- z>p%$$SDKiHmSw}27PKL|R{cq&g*hmXZ>&B3J7~uAq@>MQ4h92=T=;Sky(wBztCF zOpv3qxpdrJv_X&u1JcJl)i_UL{}2|iQv7oxN-dJG#P8v!;zk@=Vu?$2TkC^yELAx) z2%aSX?k-{~76mf*f;-c`#D#b(HV5H}aX;ZsLPz#X5Yq#SH7)M{F>yOwe!|b^wzBM% ztIH5x;p4m~i?8($0GxVd;LHF5i_w^W*p?-t@v&9C5G$(vBuTr}fZJ$t=moM0dd@e4 zWbeuxfh>R48TW{nv+%A&ar;5-0d2dd;%1@1@Q-eWEv(kcv-!FeJTrUVk2Vd0KUvqF zl1Ogb#Au^n=x(up8Eh9%Zk^3gSwMl7ZcnES1JP{*PiUrR_tFgU7UEwkIwCh4oP)pb zzhs(lL&dps#>fH^xqKPd(_!tf(1=DgwFz@dR0}GhJTW1>c_Kh0cKg=? zz{xs8^*w7iH`Pz=rv?6A-!Jw^XO6^OTPb^ za#QR1P`HuvS3~{Ca3f5D!sx>ADo%KR2Jr(!gn5&Cx~y|2`hx!IFt3UBT=&S@jb=4O zpGfh;AspoCa+>1d>{*=9zm85IudW%t=tpOiCq{ms4mvT_Al*@zjs_8R4)Ien$7 zP6};zhGdQqNeebrxgftEf?#0{;T707bo${iSJT~8inZ8(%8OS>4*76)p2yo#`KVh* zRrEY#`)?zLB0fL!G0bF}u4!Idf<=4X$&dAvlAP>Va_9%50Q8G}ZBB#MsP)fvDKkk# zb;OPdb-}r;40*odi0d<(2qbg6+F4@&-;U`Jm^-zF;J6_|tSI@K4sg{8SNlbY04)~p z{MS50<~04~$Ze=@#foXQ(f)ChyJqv9fDJSyieMpT14{{8eP}=|7Zn&01-DF=YU%5W z^8DN-jqk#fOrL;wtitz9@sE_>#)FMH4TDfjwS@R`LL3U@28#S&PZ%dJs0)F`_9tcqp=}p?u zKrN6mDeHXVeplu|`W`gvikUt=hz}9_VDkhV7KwLK8kO^pkB7<`w>S= zzY{e~QB7lA2vq=6@qknfsb_{>yhFu+Imt%Cbam5~T{edeG(JZ{iRM z9pdlaKyOnIuC-K-8pBvu=cC$HuKb9&*HYIvCq9wfeU+UiJ{8PYZjgMkD{gcegO&ze z$4M-Z1S|qIH&9jT_2CIshK(RutNvD>W=~Dew?C zeWY_HM)I~C>xJew|I{E>RG;DsGZPjHu$xGoR?b704+_s27D_ryw`iWTv)r)Ap@?Z-Dc=ExLTg;&AwV zHxMZ78CFg9u1Jq_VJ4Tirm|1RLm&odqn)`DC`Yh*$fao6*_|PdFb~bTl?l?jUcna? z)^!cv>{t-VIGbmkMUyk3I;LyNpf*pK)01JY-L@S@t<&V)VfJc2&`gl>Z3B+Ec4M#W z+4%z)6f@h@IU0j+=(Q=zh*mzJ6jJ&KIPwbT@BL0|phfQ3MS+wBQaH$NydlzdN{BKk zz&Br|T>!vPk&1FA!>d*VJ|LL{J)cqV--Mm5K%xo&>HD0#I;o6p2LDZ3rtizZM*ufH zw(DR)2o2+Pj0_u-Y^038{H(co>pRfwUG2k4{m>Pkm}oH+f?{C?>9EuO|JXqiUv;@8 zdjgMg-IH)TYGFHQ-V^J;mIqq>k9EZdtJXaPT6bDqcTxaadEg>CCH}B9^cR^D0+1Xd z<%W|EXUX9P|FTO-cBCp5kr5=rEe3Yo3;iS-f$Z?`?}F7i&%I8azXJ91izXtO_r;4P zoS;2Dgj2Zu2of;moQYeZ)_(jxu7Oi8fXBwMh`5QE`z+3VT31i>T~l~E-Ny4%IQy4Agz<2zYtl+^K*$^m{e zam?am1P}WQG8~M89E*Fhc?z!Hke7jYpRY`%6+7fRLjPC{&R7waXL+NCF?rvykyBL! ze9cmLd#Z#8QO5%Fj2k|9IiK!KX2J8?$TI9!qc5!1-TUslE^t525JdFLOCN}WAJ-h{ zO_WU)iy;gP`rxr<@O_ZGhg^iUg7tazEA);1R=eIHr_ZagT)W(Rlp`;CyixYDJwom% z&EV#|xCj021{v6K-M%UG54|;DBXX2MsM~iKn{l8!45&6Is`r(4NhGsyP@D6M_i~ri zS2)Mq>OHQAnN57riAPZPgyZ4T$Cx0*p$B(M3ZA)O%OZK6QF~ZL;&H|1>3g z^>HBjcDjd%6n*h|md4qR?;hC>CRpk9#9769P2L|rqcl3`>IN)+JX}CTPq;F!LY#8` zrJQw@t!L}XeGT3Xh|~t8YhRXEW*?Es4}~r{Xj++4Y)~^8K@k`!wmV(#8g^A)h_Ss3 z@v5II*=D&}lBz%qECmFA#GsjKGwWgEDu5>|5%81(wXx!vFfWCv@K2&52LE>8-ih)Y%5tm?b2+piioz!d+3kBs}HVG-geDcAHfb)XJa=VCHY) ziPN?fcfQmUyRIli_dt#GD_POfI40w{pM@!gj|FnK8UE#2Q9joKU?|*RiJveB-12V3 zs?cwZ6(;at>0;K|^y3+_g{_3LIvNkJbLrNqjGzC_cG8vntvqV^k=6DXUy;rvjICzv z`d>QHU(u9@P2-t87~U|uYHL047+Cxb5_Rv2-Ls}z;pmZ&6}vqoL{D-1Lx|1QT14)V z7l(~mWWttEn(cGvxN{r|_~kl$TE_QLcz@p1>npteRgr#+;SgJ3;UPKD%lqN(#nb_8 zpkRALx=U|>$mPaz`D|v_rZJ9>Iw4{CFt!xGVk4NOAiWbFZf+j)e%FP#YMfEVymOYGH5FEmNe~Q}Je+O6QE_Tt+c6jrcZ!hm4$G zV8v@AfJ{^82z^XjByR=cGhf!x)u|;yFXyCumLgbLrxpdp4WGd-D!3tU=hP^1)H&ou zjFRA|20g4PqMl3)f^FgAvY+y8gqZMfcQk-aUN-+g*YTsxsJ5?TToxd7Sl$!yoz7H? zLL+Y;9hBIStZv$cr*qTH;H*QnhI%=zURAr@=R6XQNE+bm3|`Mvg3rMsEhr0Zj3$}` zKR11K;Z8CncFj&p22IRubYw_X$e49z)WU=G{IOwX?wZ&(H$r=B3vg6k*hDsoK`tV- z+s*2|s`csjnxJ8x77Bo3VIE2(`V6Yd!?$DZ;Ju29$2 zQZFQ*I5`)aiezq-29uiF@66PQeu37=fLNq-Tk+-{B9JCuk>{-P!OUTYi2-1D7*f_R2=9@g?% zYo9{?I>>-V`ZM-C`@@5LF*)bYAL$=8Vs)SBHY&0~QWksh!K7$uz0k?O9Oz;H>Xbu4 z&D-Ftcdl^JgQ@1ej8)`@t*|V5t%Dzc0}JcU+iWMk(?c=Q?{Mfar1#!7EUGIYNgk@N zn4eg~m{9afjcE_K)DPu|GL4|PHPufLCU_VxOv8CBLt$ELwsL9}mYvRu$$P5D z+04W%$eEG!hHV=B7kzS|;_&HDOa=YZb|v7_N%-q>2I+F&+~Wwze_m)h#M=jnWaZZR zZIQoQAbnorG0)p~RC)oPtn`enV{M4TZ-?l}IhhdbAoZCd=x$QfM{!s(g9D0~^X9|S zjK7A#z`FgM5p_B57?%&X7Kb+aaQ^9=TC3lxI(xVS)Q0*ka_$A1Axo`{=yaIrk{zz# z?LtqxRhX-*7Hyg-GBZ)M%PE=Z-zd#wp{u@q`r_wHS8EF#-_9;@LFRH3+a0CU2Q6)%KhGA!`|Ph| zp3kxKL+jPXazLFBmc&JNsnYox97k~63W;rzty`%FOmeTP$@kCM#_|$<3<8jPsxXcr zcPb*U>q2(l{atK+a)nds?TGmTP5QI1R9B;1aKnqbAk8Zw{V_UEba!bSyCaT&F?U;} zxIpZen7cSrFikg||6<5y9>g#w^W(}M82uDB3H3>uZCc|5(M0|QI)r>Z^EvGkWHU~i zdGup=NSfjE4<)iX!ywuNl@~#cfXSO#@y7%iar}gnnH`mYf}^-pF>-hbtwUqJe&vKq zFhR^()efT=%{gu)PQHcjifrIW@t-^*81pFHd=S-cZ9VY#p~Q;X+VZQZEJEOPB4zJ6 z)23mVnkkpVndW(FLgZN+Qqkz0yef&c4j0v}s0$w~=mfdy4vu5DtYX@D2QM7_zzU`J z5Fg8Y)rkB5LMXJ1V?vdC(5DT$7V45#QvGkRL3$C{d*4^`a-RQFw8;%n?3}z-O^S@7 z=-X6+-}xOE*c_5KP=4(@K)t##O~3Ax0x*XBngtASm_Cv7uwK3V^vfq=qWIGd0M>in zHP!RH2H6F46Uo-!kMJf=tq0SCOnnIq?lgxLchW68-Z1zo;0I4-*-M{{?CoT3dPsG} zW7s<6s~i)SUc4$d=5kl#-`GDjOwM9OM%~Ya7W|q%ovLqNQVB{`-nps_ZNYQ1s-zKq z3Oa3RgH4c>0E?2-t_|0z>|Xh<0MK4^z?f$1?OVJ;N&D+~xv+aqY97cy&0 z;Q(2tV9jq^5ToymWi8$Ui9Ofr{v)NvUUV3v3rq{)Aw?k+jV$49mNm1kBEbaurN<`^|E59CL=vh+gmc4&G-?0@7 zUiG^H|37rI+sN)=SKC~)Z_UzQ2>wW-B&W;k3Lgr@=Z~Lk8=1yfPta@?C%PhQYZ18Y?!HqD-B^;x?i2 zq7;olh0?iJMB>pJ!9cIpEDcVQcg9#^~0!gf+t%ip=X-RLC@#HN(#=-~XWxoM)s&hw18 zI18C#d@(@dPL}rB0-2}t#!QCkRlNRuq4`0gONs%CcZ?B>$I` z+YO7RF6iT5CY1wLn*mEm+?FMrx?T7GIps|SN;=}1DMN_4f7}JF7d=uRLL|hGmk;ED zoOc%^1lHfB0>nmc%2Aoj!0EEA^P5s#;>!K*cOoUEHgR>cZ|cmQ;3ejrW&4Xt$Do)} zBJ8|>h3&-AZ@d?aLrR)4r*D%ImzF@`S*5vQ_>uCPLf>$!kSOSj`1aP8Dwk_MQ4%e#~vVI;upi|aT zwKLI7IB~8#VSrHB%`yzA*(uUEoz@jsf7u1?rKC}xLI)3Ee40JBJxK^bTiqL!DN3h# z$f=0dB#h-#b_{idRd3cTYU@Xyt^csa}vGqin60S2ki zvFU_XH3t=lbMJeE>O%4Y#?EuGnrAYgmt(}~{>BiszQDMm= zl;#x06xq_1AA|);Mmycpa_%;V_gj?^k4Rsd?0ntWU5DOOC9VRWwPfX=IOz%< zbj8>HITJU%{F2S?ocT&()7e|CmTriPhMNT$lt>rvT}Y5zP4k656)A{%^da7gy)zPd zrV5fyS+9)EfnK^*LPvoHJ2l!#8nbVE8H~Tx1YM&fxOQA%;F~GY}IR)ddx=nP*q4c_K zN)_jR#LS1KjXT%><(P+SqUakj^1Y2fgghWtj) z!U3LdRsgRuK?Dix>$%; ztzDF2-G8gi#@rAMjZV0&AH(qT=NoQdm&GSgp50*8gJPT8jAmss2 z8|DM#p@i}bZ;GqGovv8YCL`@hR>JN@+y7^0k<)Q-RvIg!};Y?6Ou_jz~5Hnb* zpSI6(_~}PD^aEX-;t+v`H-d3tAC`IJ&7!XuA&`VTJ5>S-pvkK%VlR5#}8dYgBJQz4nwE zq)h{-NWhPjBE>9%EKZau%=55|Ot}w0I>W7~)UY|~dMV=m4R6e-N0+SLzo~hfG1Eg! zkJoBRk{I#BAbWJ-i{rqD>Jg29TS*^?_RmE_T=Q&}2pkpL*Y{=y&NZ^Y?ib z{erP~c4e$@%Ay+#+dSFPI;0lnnPaH8TCj}9nHScPbj9m=lrdIvm4+9Pm+o3ht?tAn z`?;z{zfz9ts4^UJz35awmNA#;wl>p&ujzy-KD=wt-6RbQB z!jLg;5RKfn^Lc#aE@K~+()FiH4Vz&&}*FCv}ugp(%`sz?thwsssq-!c4qGu zx*=axE#%=`7NtVfRbJB0N(&;BrN0fiU%gFVb@Cv4A{fFI-OlaeQk)H_X#nDGHn88- z&;d5<;z{4_@r2JE=VO(!q~92cEi>VW;Z)qe)c-Av>%uDy^n_WmZvCsa0^DerIbnMv zu61KooHU6>9E}fK$TCe;@z1hpxCHM@Vsp;Z{j||_LjLK@OY1hpE2uk%R4x}8S!q6j zv-Je3sl4RyJGzMBc$4%?-#%MK$Ib?OvHmcP6$Lxh*?exbhvlHkWW$h(1OS#7!n&XX z1PO=nsp6K`@Xj9taD|^zjwX3j+h;?c=JwHq!4CzLBedF}u(w2?`Ux45F)1o9ahMuL z#+Z86W|lxV>zIGmR2SDzk!)*0jKL!w%5NMKPuvnV2{~$jBJBC`h_fhvr8!-8%~6~G zR#{qu*3UH$*HoEsgt3Kt<(GcFn`1Zrn(t3)A1>p4J7845_W2T5wVj=JA_%vimr4UC zkp0Jiw{fFL>z%3rOstR;)E|PH{30^CLF>HRn<9XZYaZ1nBOs@GVx5v$zqFRsk9Ole zKwl%sdjx@&b^L*-ca5I0>9h)5w?jsB1sJhiIO=Rz);|C)K+(VRHtiph_|DF0Lkh*5 zYVhUey|r`sp;eL`IU_?y*`W-4)Fup?Cl~dQC92$L#xLXTF%g>UjKQfbc!KzsA~S5d zAze+UwBP0tF}lqV_wKmG{TX)0(3U-))cccvm&W{VkhBsMjz#T*9&>=(NI3dk`+4je zi8cOJ3xNSyFi9=0eLh>>U8QI{hUJIK{dDY{6d+o!UPMBxLM=dKJ2yS1Y;9n+d6!6`^hU1ilIZTnt=`ua&Z^M|j^^ z9q9&M)If6T;ffNc_#?X2O&^46N_Z>`5tzQ;H~^Sb9ed zjru!MOSqCW=Px9n-cY+tQArxYi1ny_D8g|0;)67s1QFbl7uK4;T{d}4uUk27T$gxL~@8D?k50rtmWy~Bo=mI``6B?jJoXZH1wj zxx&XUmXf*jCZ)N!Vd?S?n?x}kf)aVZy2EN9?>_cTN#6&rgF5#<@)n*x(jl(ptnU2!VsNhDtk*iuoXem^~f91&Er2;~CLcR<&$Xz3#tbt5Ul^Zyn#?Eu}S@0PBT zAN>`b*eqUB8qnoDQgs)?8L6!<6XN?G=GnC~CJz6AYBh*Q^Ja8oVNPUE$~YRGA;)Ki zce&nOLjM}_-%|%|-ExQ#q(jE60@jO{X8^MF^c=W2GQ8Nc@Y3a^>d|8jdd|Zf z3Xt|w+D_jPNO&Vxr3NlJ4$TV)~S;@8xR! zEG&`LO!>Qsxzg%zPGT7VZ9K_6%wii8*cogT|C;g!2%Tmzr=dLaJ>Jk?8W{Rq2kOHP z28Wm=EIR|i`n;gC+ANo{!_b7E^C#o|w1IUc7WLtY&bWNf0cOHhMMOvJ6@*TGr2mWD zFm>}Lme11o3kWE$N3}6TY_+BrHD;NpA1892{4+Pb-%9b8IW5SgTj*a*U36mM0@3X) zf_61A6uQ2j70mK>gvstkCf+o)Jn1t`>VW5WDE!SgX*q8zma?-Xol+X23Nzt|WJ=d@ z!u4DWllx&ErSk-vi%wRw=+G)!|GO7jyV`h~e8MZ<%A^9-Z*TaGIm=nx<6I zf~aw(RWmdtdQ4F`UZz)WFg? z5TIhp`0d1&0bUa1c>Kg#vdaqOzy%!Ei1pQXszGVlLIUNHq0LMhwpIVcv1H&1w}o|= z9j{it3s07%=Qm0*ELacU4m`KgX<712^SO5FIeegGMCQVxbf1E4cMw&V*`+E0?5|`Q zO}4mlkUQ53m1C=`{H{W)jggx*4+kqIjO|RQTxFI+f|_|`KfmD8j_!(v?&&~O*7DrZ zbvB-1X)AnGQ%x{ucaW?!Tc|mDw~DxB z{#=v`OY`e2$+=RQ#VPdhragTCYT;RJhC9 z1p0j8M7LlT9KE|EfELn%+M&Jy&z?ohjadrl>!3j36S2cLA%X29%f&B(gon6 zp$IGJ8`&Vj^PR7&AY<=CCm@c=$`RNYs!Ywm#qDJLAs{?>W#H!^R5g&V0IKwCte5Ct zTJnTtM~+7%(_~1ga5Idmw7TN{wXY=2c4%l198B;qZ2-xnQYaEBCLP8MeY-z}#CL3# z&`-n+Um`H{-f45>U6i`BZq1^>$DBMq?1_iClIx-bjPxKK;>U`_t3%|V4{%s_xqt5W zf4qP(ESAG?Df1+u6yq6Iq}(KQ*X%ENepLVcW-2qV1Bt?V{iwXAfPRRWmCyMO)vYo3 zk3ej%2_ZEsV4}&+PR~8#p0LHsqkvg62{OL;3#h?$Z))}I@(e5Z#t@d5I;IL2)@!)1 zPML65jE4i(e||39LC`l_a=gK^L2nge>LV3^7|{j-6DjxQc(a+LELQE$H;yFov<7|f zw_jf&IpG@VJEjjL6#v1KX5{1^z7`8=sF%D4olP$Yb(`o(Z6+7ulfPvvQb@x*-_V~Y zLjAhbBryRJ9bI5{ZS^tk^yw)>Hn*ee0Wjf!Yp?1CVm%}@H z#IoPyS~eO!m47+>NTMc5usIoqO)Gjkp??a;dvvSuO0(qkYj|Y@B{;VVA-sw7><(&N zAMQKryEBlOfN6#Dm-om!^}UyhmQRRI$WD@IrriLiP-eCRC?OB^Xb88j7)2v8vvjfT z87#1fcR@DnhLTc{3;W6k6GALBtbgdK#wuqV(4}!YAt55JlT2hCjvJdHc0`l=C~$0X z`1(*VS$fnNzfsY2|Bv0Rd^h3|J93WH8dFjX@H0h4ypUv`u{$@1`90o&`ovqJs)4Zh zQ{h)@32{8}!*HL(-{j+}W;SH8eKq8-SSh!8{Y>)}B9?HS(caO*d+1boqfipV`*xp% z;}Hlk_JX0>c6vYU%Ozy^itKu{&l^YW(3%I}qWZa(NRfEJmOhSRzmddw#?Ft=xz?Cv z*b&7Y<93P_@?p&PEEsPH#E7@VvDk6ts(79X0#f4P9YNwD%yA?$xD-Rn1ZL^EA&YdV^!2IAj@Lb0i_}f>)0D{7_by zg|Q4+l4z2gkQv@naEof@CRF-L#mPCs(UU>)^R-U#rT|mD0)KlmRo;hPGB}u0-aW;O z|D7GFu*qM~Q-!bBzqRvVO$A2>mVyD|L{>ScxlJ?=i>n`@H=%4`PF+Q!#33p8xe~o# zWt-r;-c!)Bv_{&a0wLx{Jd|by&cG9^xuxk2jt9eL@?9aMUseO*oOuLJL}0o501&3WnO5Z z3ANZ~s|fn&jsWK~^Ld@;zf@wxP`-3OWRVkV7}bGw`Li|{X&6goz|Ekjs6+1Pf-J$28jggSzywKh=L4r8o#eL&v+;gvq-RUejy^lyt z|G6C;h26jM#t-AxGUKw#%^)e+Y`0hd?h`PG4G7c+ea+92WNw*KjNjybM>42pxCba; zvx2QJHumufQ0Jew!BzfxKz-oto%ah)7(ZuD4(dj(WNIhQOnz4rcNBp?u?j1V!rChg z=4xD69n8oD;LzDTbTw{e&U)SccK2mvd8Z(8u;ii55>1@suk$C(P#C%(U6zO1=z;>1 zp3m+V4=0UYk$7%+9KE`}UBT~~kr=luu6Tou zUMG02Qw~?g;)`qsxm-yjs(ySl;$6 z6Xtz<{U80YzSFyjU#DmeUUY}3l`Oz*NZ&=JqjkE=isGCfo8v_E zxbBdEq=!h#laLYH`m0W;tOxV=W8uJu9@NTkAeUBinJLIdFnSx_T~(*n%Rd8;#Ub(1 zEBlP7!q$J`O#YuE?&w1AVABDo1#&>&LH>rOU`2g=1a=Zj?+!=0^8mF0-RdNER=c?+ z&1V7D9)WYg9bI&fpW#7NQYV_`lM0W(39=))Xg81XhP#mZtEnS3oyIrGoxH#5x@dGm zN7JJ$o>alWA)UVuW=AUR?7Z6^C~6=bEscc1zb!R?R|=s#HVnY=%~ER0$RH8}TZsTE zfErLOYQrBV;%d#kibeO#yJ@R%whZDtH!R6eZ0K zZE6TVxlFkFjLLLT?UVFvJVT{Og~E{L3@zs+LQy9PybVfc<@*#&l$n(GxytL*lKD#) zHSs~+K8$?%*r})&Q{gY~(8uCdkTD ziI!jZIw5gQig7*vb5MF`Mv--!Az>P6#?r6CxS%>e#-#~fPmCcZ@NqM?pEx_0*gQ%? zR9bN>l}Uq#J(W652&rI|y55okiCSddc~xm!X7=0OEFIpzkXO0Xvp(oL0kw~f`UV=Z z7WaL6@F8jyvpIiru18}aG03sU)J=flHh8cn7Gi9Ir_YFbcZk^>b%^gM5!-aC!G|#v zI(r%5IzI7%_pho8vR+~#K|Bt6%!c|jHuy$+2${>~{0q9tno#$Om=`_jf4EjZc}z(x zmr6vRy8`B8uZB=y2u%VT`COomtVU1Lq#1X(yu>drsQ2bO>9uX=S1tr9%?ej6CuApr zORIWjE~9gZeq?2;i3mYaP>`>VARF3NS8uvEi6lv@b2g&3qE8 zP(l~Te>gC4$7vav$7?ve&fW%P7~|+zp)10~m$X(N1(@(XLMM6F_o>u0@7iVu#qVq- zZIQXVu%tG1MYt*(U0|s?`3|HZNamn#Vp(NogbvK3Gqq$;GDsL3L0aPw!(o^NO}Hi% zTzMqLXr5aT2No-cC`udbN;S}fuFW4qHpu3>sKuY^4v`ajTzDo!aqQy); zq^Y=;^0^ZJ#Il4%E{aikcv6KD(&n?@^?}0K{-syu3ag@lXeCd?#{`&ZdAn^ws-YnD z)u|UznS_lDqwA=gFO84-nU;{|xyFLbH*R6|HDB9THSa#!Y)>o8x+l!u}pXX6i6U2KWAIp;P2Y_LZw3r-qaPH0* z*)&t|`;{z3V5m|cZkB^cO(*hYBU-? zE9k@-rhN3?eimAovBj_4WPnR?))XIl=-+?#zn_+^GSQ24Gf*;N67of5S?^BHMgna8 zz6usTgVJ(S8FRH4P?0k6s=Hdwt1cQslj*^aiN0p&$}0M((9uf^EI`C5`|i>{*!xX_rjoH+Pza8oBuVObi@(cw*yaFK^+!$d3p+vnCWb9OhMf;XTggq! zhUXzD6vz7GxZxk&#x*;Qt94rCZ4!Q43zmCxI3Ec&bfg6x&g&r@KzkO?B}A3@#ypI} z1jy9hCH*IGgiXH+n2+HJ4AAVS#b&b4N<4dukowOxcGL0YW5ZyU*G)`&UykUN_Py*9Mo-g~{M zfO+r4U2&z$kC;oGam3wW-lVz|m}Bav%p&Aqz(g-DE|;pW@L;Abt24@qO`{v(9U#*HMMHL`2nh5`(n!@fh}_)CrGVxEpC5L8 z?tWga_{x@jz`Xymm$^)m)BB~MZFSWE!k5cEwnSygjffF=$)fT$8(MJWBuxu;*lz^w zVfY!}@YR#c@TF1Cdq|ANE%&XO1tq~}@#_Ni(QJ5ZLn^-fF~DB-fFPjMi20y_%{#Oi zYE_r2+OH#y90^lHZ?-!kFSK_XOJZ0joXr{&L@DkXXxe>}MDVPU27Dow1!YjF8L)AZ zGkoU*SHxs?_+`s(2t^Zv6sXe-Jm$`-T~YsokUG(|k7=znM$Ru7Td4le-3;dnipeFviKBmev}4tq#sDaj|nv;lR5!o9B{FvSVlLMtnB8B=5XN8TxVbgmYm+V;-v3^$4M8^ zjt_J-7#;pzu|UJ7n#5$V_L?t<)g?@JUaKnFDsbW?S*U7`O`_o=%pdILiuqOq?q>mQ zpgN>g_03&j+JjN%%Wyak*>8@1nQ{1`xYLX*NQt0>JOq_pT=DrreQ1=Xs*pQVMovOs zmPW!m@Z}wq|BZlD!bSf9quBF0kh%p;)7W4cRZLcHNgKNx{6rR|r8vCqKZb7`u0q=#_;LTCN&-(+M{kF10z2UAS z+EI-_lW@`Y`x5i!EXICnH4p-ZAt{10IFP{~;MyhS59BT)-+B~W;7JLakI@4zjRTS5 zSmuhcZD%qIWy9R2ToX7){$2xD4JDrkslI+ornZzV(P3|6E(=``2#u$BDSk>)g4V52 z*G4PREjTAb)f~5s)N5d+6&OlrIFvga|1`_8Sn0Jq_}^`dft~-m{29P7rr-_f_FuLN zX@^<3WyL5)P3jXIk&Fv3rb@G$nXd0cpU>QE=S-_*Qkzm01e|4fs?mKbiNs*ju#a6D z5|kARl4QyY*Jq9&q{y!aMV?_J*_gp>6fICZxs5OT6nyKP0ibsNjxBw^ZZGFlkT&usNWKNKQ}C z7bswr55KmMQ*0+5{$Sh-FFtLAM#d)S9-|P0FjH&nqq8-KIDxXYsInA*vFYWhjZoq| zZccuBv0r^4_O|P-f%?1}nj{!J3^4snqt=}-dk6k`y(=gZpV4h?rtp+EfN;y#vd6*WkKk9pS}I3spWBLhTgP zv;HY9jtXIqz!hGccs{{X4z?G7u3pSyJHqhIt)@N^0#JP>VNu~klz|;9?)Ls0vk@Da z5KRPM0BRWe*+*ca-hDjadANOo*H0;U)>WE1u%qpYGzijIzZH{ab>0cZn&k+!g6OS= zsqhXD(1vLt!_w&iD8gt>VRot|93@F)y_MBc0=`ahsQRdxnv*me;jD=>Nf3Yi7hWqgl2giwRfkt>Z!8a4ISuh%YKp(#=Ns8u!m{C)mDAf$U1LG#e%02MC2Nc!RA4Tx z!3KSN;La>bU2&b=Z+H!;n|Of$X3=bCv=f}I7;L#R-!ii0>ECP5)O|c}Kt>NA#FvL} zJ(U0#=nejT)<-ho&;}#ze&$2Gp6qdO``CN<*eR1fY=8Qc^;Q52UuJNl1?Hp$9{=a= z^^-3_{U1A4Ht{GsduxOl1`Dv%@VQ0yaT2g)rk}#4RNQnxCOO3bT zI~CsU&9+4_fA#Zn{3Y<$rEk&P7T2#TH7W=X$-%B0SgSc%Q~i(*W(L&ms%t|C{Ez^s z?HF3rKOXGBrhL^N>ka9ErmHEufwRtFMK){(`X}ip*fqs5a(P3Dlmi+HbSAx1B(U(mMI4bM?8RRFWTq5eaT-#h zX0g}%@LNh=RvZo%he8GK3^as3QRn;FJx@e&Ki#yl%bd%?OaleJWKNNHnYBX0|YcGaFN53 zJw<4DNb~Q}W43>M}QQCcwMaC*82Sb zQmvH^&;gLf4tm>`&^Gv3N5Jgq6hLuw(o+C(1JSau%h8j8ijEEL+uB&>;j?c8@1+f$ z2F=5$5>o3>Ws##te9XzGUO(o3!M}vW1lrdH6xFFriONg5Rg{}5orFwS30m^=+ffD) z=?lN7UUHi0CEDW`I#w-$Z>&7^4uE35IvSo|I|NWefedUjm&CdtL$>NqHL9)Ynev7G z)+`g|JqWS7rzb0{jeU8dVOS8d8mdahQcmG@0>XTpv*x^BlZX}dcXJqS;nP5bAnK77 zZ97Wdn-K=qi@nEx)5kK+0vhA6ebbI|RtmLsievyo>Zv0i#K(lZf~6TrDI~+&-NkqIdI%kB*%){9~m9#NG|$+N~S^ z7?(-hb>Z=F9JIELe7{WLjO?&woOc(_AV}IZQ?uELOhQ3MBWu#9$r*-aJHeqn7dPpR zOqPhNgPz4sSu8wUqjr^{?(Hy9XBiHMh6 zlLk<|Hk5M-lFYlaOp!4#Fl1w*F#rGn00032o?=CR>N~RiYSh3gSpITnVIpzSJdH zP)q+!g71P4)`s>ml`oP}+b)#~MAFDK*SQU@KMXNKLCo}? z{|cCQ2kEq_1`F?bMy|5P<)YRm(3+?J=|egwxtU~4966}9?bNg(y|vxfFx3fX<>Icx z2m#i}j;Q7l_lnz*O>Dr6tO)6P3ZWsN%42STvcPundS`NJ zl%D@A7O0<)`F0e!M?sE^VLE-H?wHEr{MuR_E{+q$>Ta;v+PHm7#dpL#h0b^??|c?; z&mc}Au1cmR{%pxoCh{ldP&+K!$!pW53I5*^Gko)~vVPkw*7spfZ5;?wiQT2T@qkXf zc?zm9EE15DDP-rgsx<>%5_B#u1&pM;(eB4feFRVTFfQ!`^H~L7VDJQ-_n8HK6-SF_ zsq6IEY7Wd423w$BsOsw!N+2Q6f3LGHlod5rM*VzntT6v_rlc92{}mN%{~#vtoJGGon22u{UUz=#9%P z(UNT+mWTGo&&1{hlunohYy^liN-d0{OcX!*xOxl6?T0fRd$qgd1SuC$(ta2QvUe2x zgcY%1moeks*Pv?ot}|9C)9!C@hiW{h-o>9SK3&U4-triJ=80t`hDsYitsd$zXpsok zI>&b7uvPWa(i3^n6eBTiL47}o&^jzq+t6AhsfNHYWXtg)C!^_pg-g8}{zSp=(k;2qJp{)bpK+G5k1^yCxLfTa3T7AkF19bwHrb`^NW~ zaodKL_&QM+AFM87l;@9@kv57HrlrEmLz)v_&51jm44c1*GIo0P*GX=YkU&D1jH@;O zSLY0ZOPMb!02e3UtKTinn`B1hQEIx;7|qO`iA~_XWRG)daF(42FpG$^od2BwLxD%@ zx`D)mq%gH%_xwp-Ge9Eh7rqQQx$uRRe~a`BVH0gngg>M;qR`=|yFOT7$}E>D5dvb| zcYgaQ1@0Zb?L__8g90b5=(8QPtf!{9$&>Pj4mvxuKH)BO>P%7nGEde~0$dOrzZ zs%(@^klw?5C5uJshk9kU)(C;~xi(hYr%mH8#EDK8ol@y64NBm@}fN zpc?g^!yG2p@Bw?9gm!L*NKyy#LWnms9<3Tig0Kb8!oH<2^*@iJ#+=Z1!XJgq#@D=qu!(X{uC0P@@>)8jz?&- z|J`JPllV26O@D|3?t#=(oFKHkde+ zS5=~&-IA{-PEn`M1?EH?`SCxQDRb8LI2ZDrX{YYdCSElL1(Q9$VKkWA>@N=HSlW;o z!X_^d#rh4!EC_wPqC2;r^qFONWC#< z7Y@B^kQ}-YU;MnW&Npiy(Opup2+c>MC~<($cpBwEAledcuq|Q2?uyS?Kd$a;k#bBt zs-@lMs#u1CtbYejEGNOwn>##5Yx|ci>yG0{pHgicx>w1O`1-wBUI|mm%#@eHoqXWe z0XjH$PNuDXcd(8WXb#Hn5zU)LJS9b@Q}J+#cGGvbCSWnzW+yg>b%AyMe2F&pd&jEZ zO3Q`U<%^JiWw{(Fw_QwNM>W}glxj@j=~Lb)!_7KE?piN$$~j7drdOJlTBxot9!BMR z>hdh5gY7+#?@b2WOIK%7o(dJ%Xg_hCh7Hy?lPYo;8*1DcmV96OvOvP@P=wVQFteji zXQ(2*{&XY~Q3HYttjY}MK=qx!LBROueTyC3Ix9n^=vu6iN$ibu8ALSTTATqsO>S5; zz=EmnK?!#$>sAN%q#Jup0pBWib((Qt^{ebOM6zf>#6|yjhf)c35YZ^!Tl`PK?)S#1 zh+<@LMe6nzV81zn#OZpEw4IFyC;@+sp_0MQPZn%RbO-4!U-|LTcP|c)0%avt|_nS;nY8$`rFgM{{Ik7rT zwR7yG>`X)J#BHyo*~F@3kyyvYq*?52COc49@=k= ztoVdSv!SdDV%drX!%kAPy#P4mLcln#qz?K?Grorj&4>@bcVdrwv{^09+WC{oi3UHc z4VsktAU!*$*mXLEq<0L>QTP{WORu{|n6o(Xjycstl)`k4@%O0M#ohWS*7!x|3{BbX#1;^*l8vMq|oY(80~ z0SuXGc4%S<->|trkQ>-HN6-acwS{uRXq-QbiSY?C!06`dcEBY%zXl3JSZbMatF!6&F?{(ZQ= z@1U4%WyE@k?59A$`+4?WXmGRhrL9yrUspOH)B^Q09B99(%=6t+JT?=v(QHb(OPGAk zWP?isMkfQL1*{`>E7e=j!{)O7iF=8T9!}?Ksv&yWLtPajAUC-9UlUM!!g9G}8yV)gQ8{^M1--DwZ{dDxm|1-D-+u%Q$W{6r< zQV!e4V+AKtrTs@v6pGYO%q?a5M|Y(r<6JpS7&_y!CAm|Tg|08p2xk?l<2A6vcVX48mX9enIVja5KGeoMV)1`6G0j~r&+aby6q-@+_gq=S(|Gx>`4(Tu}NS=F$K zrrNjqz&1zYK=)$;(&T>CJ=*|`_fMd5NbfS!{SY#Vf-N)Y(UAFK4wR_LwHewNI*Gv6 zGJ}FV3HB!;s|oo#%JNeVR@W@B|{oElv%0>Svb-apHiosdbaMZbCwh+oH)Aa+h?2O6@2lcKf1U&OEM|0 zW2^joT_rTKyd&xtl2d2K>PfVs7o_F&Ov<{3QvPVkC9feyABe&31Yf8=(0S7I)F}$K z<F{*|{h{4DCn9$cdsX&o}tkiTboed)} z4uw6F580Dg%ngpyfkKvxaykrXKDVwpAP;WbsL5Rnk97ermL9XF1H-&1-k-9HqjXg@ zg|J;x@&W`u3juttJ|MYN|@XSz>Aewh%0lP@WM<)phfdeafPh(oi-H4p- zK{luFdk5eK%EcmrTaOGY4gQ6ZbfPv`1LGK>WJu&X_Cdd2)h^RIR{VjviCD zB@2Ieaoj*NBkBvA|FRH|WpU#nUmdwaMtaHyR*YlqW(?T0Df1H}+0gw{>pl=cuTJ&)>B$hHx0&L@QWgx>+fo z;4#%)s$eseM7-uWK!s~=8Zi-ZPj(6^vR{zGd^3=2%@q(4Z!K_XN87CY_;B5LDZ zM$XOx|9R~S&RO7pk~8U0;fhveIp9Y`vK+3>YyO{*5C^xYw#}6L4<5g$@G~9DF4*Py zz4bQ9JAMTcrmTB1ArYr+&)S0&M=D;boVX_i-xIwI;@wOAWed0D2 z;BC1MARyy)sM@1s7Ywyt^zhUMYmu>>;|#_!t7>Pisp~(%CflOjnJ+F*4j8+R6Mw^J zofi-QXHH8j#1vuc#lt?~_1a0D(#QhGHjhSxCNLuh#zv z){cQ}O+!|5wZ{R<CK|!V`vv4KAxxJwDU1bGOdhEn5zDNAgNE;Fi)-Zww1YQ;2LW zr22WjCR^ATR6%YcP=aMh=9_Cp5$Ni;UEeG(ktvhAL56%7a3Fa2mES<0d4QOh9W+c( zo;l0rO?FzkxOO{3tvs@Xmn)a9}1|n<3;7-=Gv_DR#{*;S(K>D1;3VdB_n4 ze1DYG@6R<&cKUhqtG#bBil9FFDSeaxVRM-hQ#7s)CkoDR*d ziVUM?Py{WrXPQ*6@iHX33OtvZV`Dks@LiQWdonyczxi+%bL(Y{+F35@&(q$@Ut;Na zOv`4_Ne{24#f~dlD^)Rzj2nmdnu@g&GNdkOgm%>k#9y}BzG;sNa8_XG%}#H<)?Gzo ziUBLwVRM~4iHPP0Hi$s}{hlpPjkT%uUup^fh0OX%Wt*5|S)%_1oh;R6cZRi}cfdp3 z34Ov{nQNPesO=s3ROklX3gx7rz-`Qo1QUWj%}Cf2;=IiRM=1JRXX=IHwR2Y0u#XuY zJxkjSkT6t7V#wXB+(;do$weO;Jv<+`MxZ3m9?!5#FEf+lPRrRJa~H6`P^31{R~HUs z&)me56y?uojesJTCkbV=IBXg2Kx6Z*90cq;1$)*v#I}U&B`^3);Kurx_z71Xtt8qI zCd=%OhcrnYwES0&Y19|hlo_t&K}U~Gcj4v4_L)Nr?--Aq`Phm=e2wvBi{^a-4)oo~)`wggYFCmHSCpzrjFqCpA;x$1VMxlSP%l4Q& zk(JjlcROjf&w8nkqGi2Gg?@hQ&P-R`EbP-s9J6wZXIGH)ToyH+sU6^z&uMmCgRQGI@6g;Jr#3;Oko1~?T4bhX;!t0F z+Z?wzKQQo#w%PapGBJT{i!jKcFC!XtJtuM zPRu2Qu39;H(monSRt;pYOJa;=zNp2icG;7Mc;*JvUt9u0XUT)WOe1OjJjrz6`@!q; z-uR@O%~k}3<~At6)h2sd0a^4GGln^80gI>61@CnwJe>tTHVkRhMTEb@>Oe8uT^1 z7sPX(=8OEG8ugWj*CRHuT1}dll*={Wz1O+Op3v8~#*$|?A$I}tGDhE)dd1%b@g1t@ zuet%9P!(O_)PvKC)3^}Jt0jR(I zESya`MSW6Q6gg-j&8KhznccnT$i0*j*ZBLg=K9N&+b=w7Za4JZnL5fEE})F$i}CTO zsZA^?qSwYlrM_!9w1)jByU;+L4ZLVog0U9Yvyo{3PZ}>ptd0gMegBy6@+znJM+b5g zpdaPq=*}C4HpJRUS7!~iMPm>I{U|%MPlo8fdWspm9ifAdrI$~0z}0sIs>U-^IkZ)h zY3*_cpq&Nl#vrl-cU!2SmEA-*#PhukiiFBclMCH+9MiHp+?WbLYxXk)?#;NO?1M0O z-q_fKcsX!#yz~QNe_8Or`kea^z!aM;+Igm>{AcOmg>XSkW3fIK0+G#z0S1X}FzXCg zn`4Mg*yHxnYXi+jT#oQj*+@YEDKPi(RijSa|kcK1{nwq`@yb1fB7ln2gy`|Yk1L4G?hOPooC=mz!gFor_==Xw1OmQt{t?^B>;ILS zKsV1wStOE+bc2VpBH#Q_Yz_l7gSd?J9>^@h>0Sle=oFctsBH`uyb;#U&bT)&rwLY% z3TrDa7a}@Mn)==aRAlx`$#P5^qX&c&5~WEt<(D@!_OBV6n4uS?4y@8*H`GSPxQgNL z5ij;Z_5JPN8`vhH*ACU$Ofoj6u~Y*<7i}TmMvj7`IIp3B;cUSwj6T&xOp=xbjpWx; zX4rTSt!_kS#Ci?Jrz ztZRZKl+I(b`I6Wh;NnbgY}2BoE?8K1U}bsH+Yd9hBrAueiNXJjUVzJBEmp3g$WXo8 zR9$nSb&Zy%zY!8CQoSQfLd<$dX;MsVJH?n~2Yx>rAo_AC9Hn6rHPs=%)(SdxFhfQI zX0Ln?f?@4TLjenn3e&XLX}?1X7S!Ls^dkIp3HmrP1!|~m5Ppmpe%p2kyY-bxs^7Gl zI_`XnH=Vxgpv!q=E}+%Pn=Yy^fkt8Wj$pAZXZ=@3$Oc98g~%GEybf7H`S{9<%%^8n zP4oR&Bak3zrS7P`nx9I~W>OWzgp;!qC`_2ih;4l&h|m+iG3pSdVS!PMdbN)B+3ZOd z56-9tf3k;YMCa9R{~j7)W&zy9Wvyu1kGgVg9*J&c4KY9FtG8oq{wdxH<~Z9Jgv#Y5 z23HlS0rh)?mAfqN;_yB#+#O67Y0MsdK*ij_wgdKthS;8?NUR~SEfR?;#xTAf)vG4) z=#KpPR=o3{Kuq;h_r}2p0EmX8ysckeWCCrq8~VuL2rBc9 z+DwNL4Dl|rU5-XQrrF#e_aZJ=Jq(~jVdV5wt)UB+vna@fcmcvt{@Plk*I z9pZ0X&y*T`g?tEj{@w(z#Fo`*MXYP?fX|yHan)tkd)8AgPvOt(tC;nc$fH-R;q|5? zr(AxKU8g5QsE7D-^EmuG9Pz>4ox{Q^&;;7&tcCfIG}hsx8Q*ofRoPFK zSSQlbKV(|HcWVpJo)&wmppE;_zx}`AY}Xg2>i;mpyv|zs&P}EDhLHy~2Uf6l&*b6ix;;^f@-z4wcjc1JxbYr)U zvA7lA$Zw06J?c^bYO8)*^ezWurJAbp7DO*PE3(teh4v34(dA){_61{m(na`!g1fqU z#;cP&9F3^2eg0!BuGqOO_PLK~zREfPpoXXya??{9;=N|#Z(vf)6HItqy=T0I_E~~4 zK3bLur6x3J9B%Aa9sBx8G==7UzKM9y+%jtZ#^!VOy<3PU1tB0Q@#cWUI>MG@)G=xQ z1oc~od|j?Tx$xi|3W&+j{8PRl#@}hbmWSLj%DX)bc*)-=3lV8v@R3SXIv+(^Fmqd?R1UKcomjo&JwfDBoZBf*Z5+Q&hrP2V5b|iV)UMt(%IM!E1X}lgS&*fmgCdmYVSjGR0-A=^>@?@yJydCQ z0h`=mdl}SftslhXeR)1kX&KnByRDVf98g_D)KW)%s9^+of$ubHJ#%QAAH(;Dm_#)R zy-T2ZUWw`Tn)fr!N(+OHy01!=PIZVDbyq|^b7W+1wKqhHsE$!sm0^5vy*Jol463%q zNVa+grOb6TF`6SI6C^38)CHybm!iGD8qoo}hEXGjwrCGz zN{|D`5J@GXq%A2vvT%S?n?G#pACa<@a{FqN!g@YraS9PJU@qYPA6t(3QnPP`rDl7L zz5n@m0d&@JiVDo}$eV9Ec}T~yM?DGo(SO$`YNjP_;*p3x^;|D3RlB~VGo$0Z2ul^h z2}XS;7G{v0d17Z5Kx5QsVXg*QbRv5e8Qr01ia}bsC~|!FDwWCjSP6wi<^h@Txu#_|r5$+EX9TMdq=n1Zus z`xQxT`KCtS6+@Q2MVY6YX3sP@r+E2DR~%a!ILCp_Kjaj%0(w=Y9CH7|V|_+rrcfNJ zgRMPtJAK7-H#PWB9pSf6x!`k$F(=f7rpQEomALLw6*l*}fk3Mv6hLiI2WgfWD}@W^lq~142c~JO%SFPp2Vd-#&H46vN35Tjq51m>odWjb zK5;VVvNvTcV<2%?u@-s)ep9n&qu3WV5wA*8MP5oo00kRu8T|_o%D~wcU^TsX0NyAH zcS2z3S(Tf}ECS-?!UzF&oA)g(+dau->C}NkP7wD)qWQ`}9mzF23Dh$;@)-}g@Eu~+ zB@zx|b6HbrPgZo4IQ`H=_h>|(S`#B8IL%Qx=3wonGCa`cdgh+3*O)5G`0J5{Uz(b$ zXzrw$43Bv1eO`bj%e4~bQgB@E1V?c)zYUJrU)mrI_^|3w0eUds06RO}h1_La3x= zLk&;TbA~0n#PW6YqJ9bE^T)h~F@pJNY(VHgOya2Hr_(C#y%Rga@D3L+3$kotLD$Ou zX}HJQ_8X4L&BP7%dMYrhA;ifFqj4+?%i#dK0OS3e3c?gzvlKpa>@NAv5+|26vKr&d z-6|kmH9*aGeyTG$((7vbUd-2UX1>kPp?wN=o-2-k)~M!;6#nOX6(lGR0;}xS11u^k zC7eHF*{^Y+Y-(n6SL;1*KTrneN?Y6`XYl4A2w6X5GB2pwOa;Y}1r}^|TKQENunCH< z?TfS|hnD_C_nboxCAhFB42oQQR^^rd;H%o1Xs3U(W;mN>~ahZ>-rmIdUc{ zaJCvjf`OBb_=l1ifBks?SQRBv3+@m$RyU+Ac6nN#uke()2m>6?QTD(FN%K#cCrlA){0#9o!DwZ6~8KW^=pAWI2}R%Tjj0DB_+ zW7^54PmNEv5A7?8KJqNDSykTMuH4(x6GN0>I1UK>cRkt&VX?8NJHZMSA$paUCE&Va z!uT*V2Ig&sdIqT;sFlkAEfcl>>r?g)VJ>ys(l;8E3J%0-yRq~DY1|jvhcs< zJ(EKyIQ!$tkM*`d`&*QVKtTPY-OQS$tmPeoT-&~F)Cx&s4swpTiRQ0i)zO~rCadO4 zYVB$1?=Xh` zb@H4EAq~dRKFgtM1~Ce$voy1i54_o!&g@Rfigi~PL&Ccat4Fwm?NHo&*_dao)Ya&G zIbml#Woy(+GiJj_x7WIAWianIHi4<4n27DAxyR#wo}$kB4%-{nsK|w`P9ouUyULf@ zuCxOD?^Q)3nTH?x6Oy_w-GmeDR_w5f@vhT5Sf610>VQ8ge>IPcTE?&xZ`6ePjC|0Z zstcz+w0{q!Jhy+GfX$7%V>5t>|D}&5*}G7+jC}IJo0zpfq8yq)&E$6Ng@<+un%3Io zSpI(_gsRC9a+bcXd4i1|ePiONT;0(3WOV`^U)Gz^ z0Ik^3*;XCtt?x$n&DpyUyXFIYQlr{Q_Z&q8NEc{d&S&=&BCz@Qy0+2B5qiz>2MMgo zP1bdeV6tjbQ-J8U%0OOt@(G=Jh^a zun0KoK-)q^IMUDhqi6!riC5&REea*Ay?UXzmVExNqE4BJ-$+Wn+@YFZu0{-#&IYAy#zCtkUUWdnL-kSGyKod$1;mVpxNIP6U^U(TzJRUBR%m~+>8 zGNa&5&m+!s7czD)s>9d_0aMuLQo`=Q$Kc)XjCqjxX+LL%VWp?-%B5&ZB~|&5d&T#< zteU{Zl8Xssc$u*O@Em9^S2xK03d|L8D`o-Jz^omvxUGYBu1MwCPsf>=*=*b=i73dM zOYlRK2ewp%ti6Hg@J+ezQkxPvo_3Uo!!&ht0;Vw-jPim(gKESgy5r{(5HJ#FBF0a= z{Rr6U7jQ1cBCbv{uRHf(7F;-(;QqX*Yc*vrEV`6zUgz*LO2#*CMa~))>kCy$UGxkz z4-zE5H65Z+@1M066U`~Zk!fNQp)9J-Y-<#ovPq+4rr;b zdp~AMFe4&HDaaU=&Yjc&I7OosSWYU)3x#aH4A>dIv7zI)3JQ4$E)qdBQF{7;;?c({ zev-uw@-bpA=XtzR&3&naW{Egqc#kR(I&gSAI!i!EXnc4qo(!;>ACei_i11*9@MA^} zD1};fV5xFcp!VwgUBG2Y3mE=$2Xu)$JP=oSd5Ij{_B$7phj=4$((r}fIVuM=@ zM&I4DWbh@S&`-}|PN1)SHpNt2{*QqdpksN{d&}whxGk zlp{k%p!%yhv4TX4ZHf-**v{gFHuQYd$&D3YTYH7$KP9gEEP%jqX$oKY^hAa=UF4c5 zY0S!NQdLvSsu9p%7{C9&YSa|g(gy`-h5q1S~5&kb4W+EEiJ-dElJLa zPOTKhG?B_XE~cso=Q*)&B2h_HB2_1l4UVIIp+w3gCI%IqtZYNFRCPFtLG`)73KAyw zeqld>VWZ2udgw)~{_P4W1w((JnCT#&Tgm`a%E=p=>)BHIRyV2K405YmeJQ=yY2Jz=wInmHqy;j8-Y+#ATvtAsOEhc6}vA^O_+)q~RPIy#5IV#Q4QMgRJ<0`Ir0eq1v0AQqMMx!zRU&F`9RuiMez z>)XT}62^a&-c!%mir<1N20iMbo%QTFtzboa0yW+?$b`K>mNb|q(v{p)u{E?oPMexC+pMTp4FV1# z>g#&o(BP?r68YAi)r-wK*wwOd;H!Dui}EQq8Kth!S{}P{Ggl2qQ*r}biki;U5F!%K zzXg^T%pa0h{hU_AyS-Md&^U(ZzeO#qGR_leiKt-0v$#xpp~|O4el(A0TvQ_r0nbV$ z@Lz4f*@*G<_m~1juLJFj5S5jSFtZnM4rfZs2>W!XQLn~SilV1wr4O@5t#oVLwUIVq z7ZyZ@s0FA65+vL+3LH*_7WCM1S!)L<9hwRy#cl`ejsxBZUv}U$wc(1I$)uK z+IDgdZyJmMIeIC|p%u2P>XNf;#G#`r%yh;y8zR^f->Ao;+hR) zrUR=FuFyyFrR<2puY>d`p8v>jzJQWUD0%AHxB|UG~qow~?AWP~r4O42)@8 zRv5T`8$o=k?zVB2J{{)Z&WZ27Xw!dVc=if+)*%2iCWk8-y$rfSwAze$L-QVYzRvh? z+b#`|6+U6)Q1QmNJq}Y^`>%X~8|n`M=65xGP$9R5JDGbS?)H@g zB*{RUpuC+Hu7OG=iLg2>V3K}x4Cwav!G_3el?LPMb%fKk0ALz!XDP;X%c?1C`hXt$ zBY!&T*Nvg8aao;YmXFGfsk`A$cdvjb91Nha(}mDj8I5T!)j9M9k~cpO+GmT-S^MMF zAFTe<09`t@#@@_M`(K&yF_HcrddllE)4OC(t%*L!9PhR_+_JXvP=rL$Fv;AZO+Or| zi`Vg(qtnhYe9*%KL;m!M4O}d$=Bm)tqCanEijH%{x0+?x#q_-nwesH{VE3d&*UCCa zaT^EbEM{5YH4q-A+_awT9^?;Ac)|A*dmZ4T<_13{;Q)zTUk8O|Ek<9DAp0SeR(ri4 zJMfzAY=+71eA8>Kz2_ zjlF!{v5!6SA^L05o9d(`&AJ?sY=S4jy6V<}(ttrbhf%Q!s9iknqAL9k!WMfil z0000000BXoWf)WcVnb4vw&mnNngD!yNqX@Y`soCMPc#bxHMm4Hg86ntsWKLS_Z8~) zy<99b@b>Z`r%`@5X)7}pgA~w~w&?N0H!ua7;mujnffeLekt%ap;fX?HOM3a(u8oG& zo|C|jf{N@O?~807V{i+fR&Oo7;G^*kr3;QNZ6P-msm0F-+VoV!IJ}As&zRyy5z>iZ zTOiP7ywW<2*Z3!aem=eE3FNl0R4)|>uw!Gu^T7oVhKDz5G4}i>`ge=3GqsW-E*Ki3 z(}b@&pN-E5Ub`cdah<3d$VClS!e|W90%iL*Nwd97hLYBYwzBr6(N?hA4l##h1|GQdYi)tKL3j!!EXS zV56+JP2b%8yDo~Ml85_ZHo%8Y!yBVrg7{)iti%YjOodf8G5DL2id}pZiQKDL=SWfdcWHpa%d8hu|K9A33J}z^D1&S`cj7f zI%erp@PJ}&mDmC|0nHDP&MAnad4I?MIQJlp*rmnlhm+z`0b_6%Opeylz+*keRSnTU ze>NI>k7RwdIG$u1AG}D<0hi)Ro?u}wnkBTT+@=}INqH^f9Ae%SreioKA1On9g`8e> zr0~_QiDxxwV!lxM6Ezq;4-ex;x=v97Z7N}}3}R`tX<3uDf69A3lST(68)%qj3Zm|$ zRA^lslr|gtaAjJ!A2p}rL3W5f7>{LjVrM9&Q9I4gJm^8XeIA`$tom6$IKohSWG^*S zG0#`Kd94#L);uz62cF@{3V*}(ID$S_VdiJv-+%ABQC1H0nUo_qGXapSxrcn^zVOPVuE95qDcz^R z%?b@MFuw)T(*LFARoE4E_ieY=iq(K^RYpX8%RH^RmjRWUxNgi5G|Ka_=7|St4&k!z z>x0HFv)hrb@pIFX{GpNfmi6~>L0=EXWr_)a8fZIF4ywSQ7%ODi{THF>@7-V9jI+p$ z_9TR`Oi5%+_Qq74B%EiSQ4&no;cX|6iGI3w9X+ZP3!1el8doj*+Hl0TB4*?_M7^e- zmZOHyP`O9u4qaB)wOkHO#K#WicWarclM4?$EZB$&RzHypZx2?w{_uMgx(tG;5`g)U+K&c0?$_pm&Y<|Dj*_fn~~y^7Vg zv?tiYlhW**8pDR2Be59!omNz=3GvkM*^3phW7js`BpwF zxWbGSHY_RGhz`>iYYsh0z%sF$myH>XL0+&fqEaNsg5PGvWtV!nXnqF&^xLc!@ycef z&;j7ed*Lr)U_x#%g|0}a3_`OcaLf>D^tY@tI4of@W<{#4VWQNmaYQeQ-8%9+KaLIu z=Hw7pT$H9k{jaqeFe(@oj0>WojH@1HL8LgqVdjy$8BKfmhWoX*(1ioH#fH+BaQ0K_ zqqN9(5>&DrD|z}?d`5TtjUK*$++POm_!JPw$UOv}Pei9RZdJV^?8iXqT@MBVLD{zE zDT%~B@3xNH33vtl`+WK>IxajqD-0`t9e7fas!sQpJTxD`fIKP72BE*W6huo(qzR(m zm{2f@B#l>mU=06)iaw!g*EM`BN3~qG3Z{)r0R>To>L#b2gKi%flN$TjHxW+$O*^M`B?2W8W1OL-VVKM z>Vgm3hy5Um!LrVyX85BTbnJOmThgnY)q&dUZk{?Qgl|bMrlCVtWqBEuxO7ta=#&nFEfr@MC7sb5h5JX-csF5sAbMIjFu4MZ<${BtL~mjP9$!;<>rzR(Q*AVLI>OUTaZa6tL6fL>Hla=xHPywv)m zmv7S`aD4S!;FSi*?TagEUmUtF9gDVS&RfbLcXb;j)%yI^F=_1I-mU8H(mR)HG7>!f zGMT4tdx(=ayG=7~jCPgY`=4o8VwYw@WeOIwBpMvD^F>bvbAcy=H8FVh`T0Ehh zxqZ(KbmgZH*>-fD1_WPa;CKL0Wz3pqXo|0q_UbNEOg?4W%Tl_N#*$mAMzB)B8^%xZ^7iN#SQ*_DbL_z616mxUb&2Q)w2d3YD3aZHMOE9!lWrNu!lou}1@lqjZND&L%KXimUS*#OnpRLRDC z5-q`Q1r0Fc8Q+NQ-{abxoo}}E5aq9Lj}?J$RVmh0Yl*s}DgF;xEzE4gXv0Af`S`tD z4SUK_-i8i4DvN@|i&I1=xt-dN!uPFSgigUu;+R@8)gADtfVdK%!Gw(f_qbQ>{QowA z)8h1LHiU{$NjzDrNL099f7>$=QRu`mdu#bI z-byo_mc+}#yjdnc&|}*Elu~)Z>vkS>vSGy#sHCIJWJ>kwi?Nl2r+2%hGlm^|(tRn^ zq`R`+yKkG6HZNV2<0<7N zHUPc|=}I-qkx_&&-iQ3L<+^{2C2DF;a|`bUH{exN#63{WDBs`ul!Df6=b?vuM51Lq z&vrrD?Q@bg!RX%SpL#Qw;Vc!E7gZb*XC(98p%^)#ARu~Sl)JJ9etpvWD#;iBe3#6F z;^O7VY68n38E%>zfGzv64ex>G3}$ZY@@zziO#f~7TPGTC%!6Ri|FTE~&I}ZM4(LRY zaBHghVcQqjCU7&2?|jFWk4mQG3$V{4(a5xTL%BWf^N0#ZijIu>N@VMg0kTy+XVdH% zXE4x1EOB(}wnSDp%?TqA6b!JZ8%q*m&Ry9-hfchQpQS$Zd?LykaM_nNmK`O{yc7SC zkuI{i&k)XamDM5=_T)+P)83J?uErwo1EH_!{H=rCGhSN&X!$h7Z-k&?pTfkec=_iA zD0N`5A_*4lM7pi3Mz5-sS&IDFqZGVX;OSC&lPICzd zfsVyPjdjS zVB{46i7~GLgqFM^0mA_PwgKPuBOc_v!`Um{Lp5KNqT5>O=KddTW*}CzMYuV7p|7E8IlO%*B5lF8mhY@ zX}ct5PPAhs_V_sJB@bG~+I#R@u|a)x5AT;svx|s)o(78l9LO?(!M4I5^hm~oZ)gMq z?o6S)DpFs=JOqQY`A}1VE!`&Z6R;xA@h<7(J;T~cePU2&KHYLhU%vE61X*SRi@xb4 zDWUT4pU_+a7ReP;!FNz^{gUW=01GB`j`F9qwV;JZD{5bueqnF6wn84U4^9LS8)VxH zrK8fBL}QJh-4uXKvWnbvb5kq3%&J_cFt?$<+v4O(-?%Y9JlX80txFz+&p%aJw8`#< z$o$3q{E8N-!0Nv(2RH32Zr{IIp>P$mzxjtIkVqm_eafs!uL-Ab_$u&aELB(?nl@HH z$`{WC9IF zgAE^);?s74pE<$$erS1C8R8|q!>hhm-Xb+fS;`go=^ZF$T?YnM!9uscQ0-i<*Jmoyl<1m9+Z$79hJ#j(Ze>aI|p5muW&9vDA(i1h^vEd;%+G ze0&CgMw>`~KG^X-uVDObKb1KT+vl`rZxjpj*(Q+jD>I6r2E3kVMay}(x$HvN1ldwG zQ{^n>UPPMKnP4MMQ5BIsoqzJ+T9xf%QqO9g0qP#YWoi}RA4C9>UvuMx7S2|l%e8Y( zxWXjo7|c-JC=}D^Jtd@Ky1YFihK2$tJfD6WANgVvW$}fuXqx0`TtvQXAUt_6wP+l2 z)06TGb(^jB3(LXY35dV@Opb z>PwN?h3uLJ)CqS-nj$JT+qQWx@ITmQuj2j3YklE~YPy=>xePSo@Sb4io3Q^*Md^XK z>Mc#tdfukehh1rHC$6vUOVLQd1&=+w6WCG z`R0=pd!NTq0dh&nS79n3yK=3gQeyO_I9JWHw82|#yiiRtLh^3pb zqmMs4<4Ze)pCMvo@Ci2#0F3+Rj+Vhi(ua}^%3%`TNggc_f)lmMV)*ks2iqdj*RTVk zE+fFuS-Xle8_t97otv|IN5qvuLcc-Zz5VEejV2CZ2TluR0o-3^#$#xBJ=yHYwbuV@ z2Jh%YNDq~jmtd-ZK1T%tFj?XOGUnHSMw<}fDv~dXPyZ<~ybw591GsZEWtDs{_j_aEzE(r^;`JVisTC0`xLrES z$b}O4Z`6HXkK>Fy?2Wy7!j$&viX4Mta`WI?jI*OjL%F|-$riwm0GtLNT1SYifidRQ zR0-x5WVr5GZWnmGQFoM;$fmw@$t~)Khd+B2U#{zltk$?H0CI|K9++DsR?gbv?+}cl zDV|{O{23*%Gd&yu5#>Fbl;e8IIAUIZPuQ+OHz6~+vfrC79?J<&Vl_n zEHXxB_wvY4x68LC(9Hsb(T6$8gK7|8JMQDMOf_N3u`*gSVWqpxg{Z6XO`v&9oFM7% z%u}>}`?n-OnP>ajiAa0l6!w2A%@z@S1{Glg9;YsM;xGOd9qrZHWxA(!O#C9(H1MKCjmz z4Q|+RoBW-1tdlO5^ zN#7?vgX>biAbZ0+f(P8K?Yym+U!OD~sE$YkX0f5q)Tfhs0bexgBsB+Y!#2j4Y$<;> zcA5Q33(+{OnR}gie@;f00Vl@)I`IN{hYycl%84!iJh7U)uW-qRYYJlRx{>lH5RB_Y z(@TKF>WoL$z|}YUOn3E`&f<=D0HIz@@I^Q0a)I~wjT>m(2(C|T+4fklo$wEjFt5H^ zdgYewH*TE?9*ei+w#oq!kQeSX-pO?W(BGYUo6$p>9t35XzS&>X%Vs)OAcStKB9N`` zB}Sr~f9zVgIWLa{mmfV&5{HT?V3?FuA_KR#4uX^C!sRsd{icS^;9ZBx|uX>9UXyARNQk&vX=pFP)D1oslTp+U^j6B%5 z(yWmukA3JDf*pR%{@q(Qb@ap$|CY;Z;{r(Vnq&a7{a%AJh)U;X^A+7R8;#R?DVhpr zmsNWa(@+lt&ad711hVY=^KE&!jQ}eAeGHZWUb6~s2f=lZ)^DLn8aGK{rHV%27q%TT zE#sObBP92O=%}2TXhd@m%y^bUL1z~ZV6c#?L(mJvCGsXbkcevEx21(+5*dI*r_(fT zzX7BL9z{lk_JojNxwGYsdSbhD$<*rs7E}ZV4%X?F4g{NP>XN^lWdXcbq z8_Mn|8*eS7>4R>~6k!p5{~+FI3fCy=gUiR&#)>D)Ko2>meYM+){Y`dpO0O^9m%Qyo z3MvdOE8Fi|Wc!3cc%`f0XJv=bFyOt{v1 z7gtUMx61*x&7c3VV1$&J=2^lV+~I`MXBBAv#=?8Cm=Wg|o{0v?S2wWZ7HhcH_e3Zs zlB7Z72OA|LvLyx$<=H!4yRzofJ$?I*#jxY~^JPXQJ@dOWX&M#$2Ghx!y_dT0j<(`0 ze`TRt6MqRcYaI#hcWDFE^Jch8QUJL6Y{dDPerw5s%b?+;&0_uW$blB@P%iM{Rp;S2N1;-7Begw1;dlXUv2@kkH!ZpAb!H(VYejfFC08qyy51TCE1t6UX8X( zJ&=!z=3GJZIm$>Su^%*nGAMhiqzYKA`ei`F{C0RQYM1;@RhR5A*4vWfOySo3Tdau) zWtW-!yA}epma2<(;MW0toYPg>k*3yujpctpOn0|_?jr1&jg9!r_A?iaI&EUqIV1(g zF+%eOt}x7}fxap^;zr5e9qy|t_n{BwXJDvoWEn2km=txJkkkt|U$^YH0c|-^se(D@ z$`rbh8h}Ocj7TAMj`|YjY=ND&UU^nI%aX&D?6fjSg@aNiP0;;N9NrUM);A)41)Q#v zMWiDAe#$ANjI{`s;oBAgdtU2bDnKyo%vQQ{7tex-xqhG8L=Ul!gV3p{X*dawhtQks z4Iae!Z`_cyAh0RmCt;D5#OT`kJf>Ha3q|`+J3+8b?^wTo+`+qTDxXI#hui5Kg-xLU zfC>4c0gWC-`h3hYXBFs|c;F71oe*Sa$m^i5sCmsh0=o6AshI8=*n+RpO}*BBM#iB_ zj}4eFAfrQNEmA^gMLx<>_@LY-sxkM7fWF8w4Sz`OIM(kbJT_Lr;_-vp^ie>{ z>RxT)`Q&g#o-MYt)uzS3sD>vTXAg!jd;`x`)Iu(a`TxQT>>LX+*IFO^zuv%%nw>i`OyHBc%-84@e z)uol)a`uy3z`t>4_7#@gFMn8t{N4QByeV;*We%|hFEko=D#^ki*ULVhRnm*Rxo+=c z$F?wQj22wX^txCLCLM3|Pmtf=XVz3`|Ht1V2gC@f>HBE|QZ-A5heuF2^QGhMmPfUg zndS1ji~kMJ4E#{tJ&RB^x#YbiY3ohc2Ho(k4^O z_C1>={!Cl~9L`jffz=vY6BfWpLzxbHojn{A?K4e)qSHXZKb5EgXsasrvKj#2#iVTv z$L27F+{fH%(_jXJZE)DA(UIipk%fhoXG=C#<>LRwbyO(LZi(`Lu#!_zOpvnAIqWks z9jf9$R_&4mTVRZmk`_C+$>vHsK~OaAQ_r=xg@vP>lYOs%dXC^@XvL% zMypZMJYx z8y5OW>Hzh+fGuVEwS4COTnICA5U)28p$D~cv%FP}csp7j58yy0l!&m=j9Mt|y!ujq zf;|?#TzI4vF9d|Ao{lmoLF!sw0oLspkA9<~?E@?J1?XGZA~go2Uq>Iu*Q_r?z6izR zPK*6dWwv}X?2)0dWFy1%2LKetHCxklw=I)avlz7>l*sizARz*YOx?sA2zLV5i3pbz z&h_hR{pk{c?xpjs&D*{R9DV!G<3j8L+heOW6eTHzki;e)GzaIlgVM#a0i6N6?0ZCd z4*G>>@k#jwd1D{-xT9L2%5DM;gOGT$Q4`f$BpY>_fNe6&_H9Q!=6IW39UZkSn>rgE z6*ZC9qy9f8B0P*TrxwgmUEIkzmD6i-*nes>HCvizsEMtC@=Z1QfV&$c-iO)dgR|w+)6L+3ru81GA7tRJ!fAfzOXG@vo~&J5asP)5(8E07+AHI87Iq2~e)ao7 zVM{Fj@hAcxZwvKzB0^^ZhVZ3Z7qekzc3rWLuYJAJb?Y&l{(zor<$pTo0pvVcX@8sX z1;(`HOVh3UeOw!85L%$}gZ(`$M}v*+jCl?G8FVO&*uUPd=~|u)ihjrWSYW@p#Aqgz ztoCe^`{mv(==qjN@RnJ<)ve-9hWp&P>7va!q)v0i)9&ooXZp!2Y)>LZ=Re<^pU|}W z+4A%y)XsK$k&K}*uI6(fwwqIxPuRa!%uWY3)jo41DJ+$1mtR40U|eRd@lU8~=ywpc zwUCABo{RiW`z5BF8eWu8Y7$nq=o31kr1nfup+r-fY5j_lv%w%9;q=B_!KkDZ#P@Pa*6-! z6@o;G<_a_$G6iAM+L%~JOmBE&a4G8jQyu~a;G=$}=#3{HEdJiZ@Q3`}^AlD3VxE{< z7=52GQR$3Q@}$kQ?llP=35%+bY{sRVaXC{Au;5aCS zT6zIv&;c(}L1CHLRH!d>jAGt* z7B6C3*zT@Fc8`S9)T<;PBY}PYqwA|ajIeAl^eyB_wW_=t0N0F?_bAT(!=UtCk%q}_ z-L*r#$Hsp*ehuC)mvdObcK>NCF$lisdRIodqcnu9$MSi&g5j4T3@C#W;aXgoJsnau z6W>~*@*0aZCvJA@f7}T@kPKi|NIcYTg0-&vu4T?f>`2sm7+NHcYL26?VMD*EW}+c= z8gjQX!#@caH`_pdibjcvuzE^&{DRxExG)yAJWH|Qyq&f&Dre_Q<-%enUXqP*q1MRf zxLh8W2g`|;jH!c@S!V#CHV^wl73#~jghla{@K=(l%d%3?OSRQVKEDWRl|&MNI)fNX z0&%~285P#AD(_wc7VjTlf8xoCIf;`!Imi?-=Z6XyeG$EdV|Lem9?A@d1k5GR#WlPR zFs>~b`<5vm-2{k?XV&e&^d<6M_&{S;mtC4q zto&?`5~1c77%ajL2S#z_ZZ~95_wq3#scVPwmuF8&w zwTcn(Bq5`sCp;3^yup{3YJpq^sNXaIP_hDJqaZy?CNXH_2xt5bHlvHNE_d}P3*uf; za^Xo3{~-XK{zwH&PVVXV_*!k5@tlAD9S>9TLXQvori4}U;We@q)Ruf6)B6wt`+4Mx zl?UQkdcU~;aXBviI0cvA68))PY}jS!2ONlu%Uk73PuA-NKUZdxE`GsbiOO4)=WGW& z7`pp|tnLmZsqTx)@eZ?{P+6U~5j%f*H@hLvY2XSuFE6%zR4VnWYS2P%F+MA&+j{eFL zQTsl%o$Mwsr8)=Yxgp-9<{x~Mzxjfq7apZ{2`d5G5!2YD4LsDRDHS2t-J)phHqp-S zdJUTgFIG>onop{PtFL0R?#wfuS3IewFSE(2ZH8n?6@U;kQVvi@bBhcYMq9oM?Z2r+ zwr-Bm=s}*h2Ad56mkBcolM#bVrn`iTjHB~6GRmcW4<19mAZg^${;y!8$NF{8*&P(C z2lG92rFF!@nZ;mkzT~rtm*iOCm#v zc-}Q3LiJCV#S>kG1?bNGvx;~Z%%p>vvpPY8ofipNCkdKLdx7iiNY$3&H*Z(3-{p!C zxroQtk8H)7QC^Hd&93Hp*+IIm%*M<`3LJ>(`Tk;E7CDB$kODO}`lvUGEaGn_GUZ3> zeFh-#)S3dgeLz=n9mF7iUbz;$c|%x-3Ri+JhWj-;eG|IcQqq*XahGy!or6Rj4r{x^ zn2_&$Z|4_7KOp|+taRk_;}n~zI{FVk;6Ig9{+Bj~3qwvfz&YO%$aKv<34u5hG5YZY zGZQY|&rlX2)``fx^>eB$L~Dfix(3b^FNzVO34~9i{xPc+d{M8|vBujUv`th>gmGaf z4Kkm0zI0dZc&Wo~NOS__oxYFI0PUFs(PgHa4|!of>Y-}najU(^&3O}#q`tzR4fP19 zqrnIGPrF66jqLv9m1dbI{rolkLHm;sG@`R^74_O5>YC$u1~Kjen^v(xALjoqAb+Qm z$A?`$3CDI!cgl*lnd6Jy(4kazO;)*hGou}`7uv29GBPX&F0Trv_X%g~10~jRy0>b_ zUJ)zrX!`EL`m{WXthEu0xazs%RvsrgKBpLyWSuRb)AZWOGNXM}KQugdAqcL2Ufq`h zOKfuVE38x39*h~H2(FVdCWtnh`(U{s2!gSNHJEZtx)sr>AkDK z1bz1k*tvG%@=CZ5OS4WQ3MbQFdRRn51OzIkikNzu5BVBrD@0N1_Hd=Jn~b(dMLK_* zCUU!=FlL56DRSb{WAv;Cox9Tb5f#gf$_`&D_>lm^4tJ}+&Q`C9U;IP`(pc|R-Nl^8 zudYCyvZkcZwrDg8xZt?Mezg(xgGcs`J z;!Qsvk-C$6twIUH?Qd zlP~P55;@_dD}7N*Aqv33Bl{)c?Eo@5I%nfoMdC1Y>-%D&T)N1p<(gm_76Vi>7>#NoLoU_#JIa?x)dpz*BU%gcUPsU(Ua08- zUWRWZdhD2E54&8^;wWE)g)-U}l0sI^B3qWmPU5b<4t1$#(++f>!rhwBKIt~9nqlct zGW)E-h1V2U^Rb*Zg_DbxsjUHn=Y};*b~G(jf=)jn<&cg?mEx$@t#BpIKJX5}$4g%V zFsOn0y+2;&4vr}Pbf=oX9T1M>vax@Z)kil{agw7cpO zyDAH;&|#xF(v`qRMp_=yez`(_z##I7kSDv0nWCHm;feyi`ijL1{^5GjME$d`(on6F z)wMTo1!=Fe|8|FKywvZ4z2@6ZW@65Y2Mm3YwFg3$iup_mc2~dca=eCBAb^~>ZdAj) zl9BMzhcp!O5JQvuut=PoUO@W(3$~OF#96(*lWQ*nV2%n;Oa{r!f?D>?1I3zAhG|bE z?QckTJbD;!Er`URw7D`tTg$S5!;5-gX?|GY;s2J0p3Kwox)a>Yhd{}sB20x>Llcm?7m6)X&$4(u8 zpk(U|*o!6Th^oA`9WDuZD}hI|DMz_9Zj zsf4kC-Yf()a1tb>O4`!494K=i_1?|XkSTu*hj-bdA%Qnr=S=#DVeC(#Pr8^V{X&P} zA*+fInoj2W+Jz^rY=aH&3@65$@0lfs<5qeh2_OWz6H`_zFrXR)Mx*QNjKPIn@n>(& z0O7PQC0mB|lYbU%t4ZDwg=*-or23(9n8)F1{l#)gh5K&&a1%1#pA3)5(b{T1fjHo^ zSg~F-5)0(9>!05uS<1P?(>v<5J*$tdwC|+?@>*})xL`}O33Prb-t zzixF4XG6V!5pKP+YDTBKcU)d*N4q4Ft?^5xeGkXr)GLYW7Dp5t3ZIz#N2~_9a>Y(8 zJm~!?Q7C)T?wrN_(TMvVe4q?VBDj?h|8i@8;@@XjX_^Uq0NjZD-1+VW9(zUhmtqe( zULPn;=SJINE=04UM^L?Ih_Q^X_nXsYEugM42vmbw+Rv#w-CJQG*4 zm#8RziO&;lamvLH28><_B6V#@qvR!o?3*+&WnDSpvJSxwH?#5V^qH62 zgzxnmHZ~VkT)lRKTXG{RRK?M;>$)R|XMVnfPd_*GeU8OUP1F?+*yOa2Bz}G&aFUm1 zNc=DGZ0&~Lg(~E!EEm%I=sPCH8%}k22Ns1U@6l>o3Q!^Ds;SRTk5q^iCXAj$7?r*j zKbd>8f|kk@B45%ts??^->CdxcV3k6BNUm&s5jx zNykiKu%2(+Pz)#>a&j=x0@pN~OdJ?l*|z@_GDje*#JU!0Zxn;LAuzf92yw&(2P}G? z9_*u@=yT&Rf(jBS7?YiEd}VN%OzlodQ!?(0MG$k6=6HW+{Q?ZBwQY(1tEO-xV|L=A zwqhly+)p*IDmnVn77|k*CC>E?}KtjIg0s=7N;Kt5XKNK2O4kSo^ z-jqFQzB+F4n>zrs>u-XXw|><|3)l|oRa{$A<{_oj1BFEEJ}-H7zVQkb;W1S@_woe4 z|K8+M<&-Xvt8g&zfM>K-u3{KYBSE zf1&`3cdLsBE>`=2P~g)#bh?s}AxrV|@p+5J$>k=qx(G_$Z0T&5^Vz0=z*;SVq`xG| z%WAz@I!r;A@)I2y@*|oY^RBPPAfD2>gXFYB(R9 z^MwlfOEPDzzS#0&u|M!_@H?XPJ7K4Z6O5alMh2m~l*ykNQoAy)b01|O$ph|_C9bt_ z39e6NZZ{_5$Hng<5cqaWK~&}(4|J_E(>P9XqlSky>?H^4(9tf?=w&JJR4esLI?A$clbsg8v3t>b{FyCD8GlKpv80ptm{{w}t>4iCd`HsUQ5s~s3pb%b!Gd9Y z-RbjJK@qtSjwX1QElb~gejgvHE_E}UJ{4PVqK(GaO|>axkg(3TqWys_Hq@p(YLXtn zhrk!Fcgt?qh!Q14#>w*ujPw9BOVm?eFI}()npo@ytfT7MS4B7gcfyXBAr7@j;7Y>5`jDs~Cn zFNLvs&ed!ZXvWS6q-+mWZR(48psSRkYBZV{)b**Uwu$kPb)ZQslTz}#U*eRikD(dW zXKB^x!{6XJ_JHgg_&$OD;XUmPE(Gq`C%pf0S4*+SWmrqx*|OabbfeT5+Xg09fynRu z7sE+FK?{cpBd021{8fuWMs@|W!o85K$5zFIiD=vb{hps=5jebuNUE;nJAm1_qs04!0!rtd-lH)84 zSJSvG+frPwNs~ z3RBNBD?HN75bbcynEh;br6K!&0oXi+rr@CEiD8}|c7vRDS)=m-JL~~a6e$%nlBGgY z93rFAE{3{ksZM3py-5)cmtZzjG$~}*1=R+o&=JlJBa-|MzMv~+Rr=P6c+^+FJ#}_Y z3*V@zL`ub~)?M?HoN#}o|1A@Wo~^a5Gwl3q>+#BAn$G~|3Lhf~tE|_0BBZEu^XT~O zzmjdc@KZ*sZF4eOb`AXV_-nHPwQuMEEEl*5!J=l9)%GSmRI2!WQK{9XZe)D_Nh@lh zVq2f&!zXLZ8#Y=-Nu2aI6IqBti>1Cs3}r}$Ssx-d5AeaP$bq#glX-SNFmn_+kPW^_ zg}eeFof|ak%rs|z*ojx<4I+4b68#WD2P_3>v?}sQtaTTK(dT`_n66}09dXdk*T3Mn z*|iKvK0)X3I8{4~;NL$ohUQO)BER^2C@Ws>hlmTI1sjCoxkFT96Dj1vNWLRcZv?3{ zFf?TRBi5dV19#Uu+oeDb>Xl55z7s7R^RuVTFKWQ(H>gkcvwIt(cdNsg5M*Rc4?J$F z!xi6k^%Ehhvh)+tZKQv&(2uMGk)q9uz=5T7FP@a^Cm@gs6%(~3aItGuurP%#%46M; zY$S z!=&1;_DB<}kU09}yr`~P*~q0fNfT+l{|Wa7U$JvVHd*C?xB^*H;E`}U->H`;itF7+ z0VMwcMWnBE9v_G)=CFPM2+7tB2bfhAWcjl1=PtJE{z>Xtsj=rUkD886@{wE=i$Z3? z^x-!o9pPk)T}WfF%--0a6aiEqb^mJ?*4-E`A@P5S$i0oThsH#j;&Awon*jG;vTer* z+L9d9d>T_-W$NC(AhIJNLrZ8k@~lLSiN7viIE-i@&^!ZYtatD~mGTNzQJAc;fyPTM zsN%!S+mJ8TQFV?Hz$dHhZO#j_;YFD5ZLi2qH)$wQDm(^W=|WtptXbMqu02>qA;kIS ziTqRc$;&>O$n-#sxTsCazYL^UOS?wmmwdPa;iU)qOQ`WwJ5e^YO!7J2AXVn%Z?v_d zJN7qfW=*UdkC+0L#l>>!KRmUx8jQldDtz|J9oUG~M{dojqR6>SsZaBp zWy6uqzUZ2&(q;qMT5)=mhpH=V%NPJ$OSas;CZhs)f(jZWU-f95t8X`XQ9)6s(bz^J zpOEf>v;mRIkZ1olxQf=7Dv%z|3MMy$*CWaKr?O61{zMSx@z%^nT?dk7`mmZZ7!QVL zeX7T%mQ)Jrh}RRD<{fT3k0vRxZTLxk^x)vk8}P@Zog#x#J_C~lM_~bH!DTp6#%grr z?#{Cy%c3HIcDFv8&U!>a-Lj^TD>qONaA0zZ4vN`qF~-B>GPu1Wq)1Z%Iy=Z_*wgkM zi(q}7D$`s-ltBYPFL~RDYlCSvi)z#oRMzMp&1zM#0g5qfnU{U3hqn3l0fWT18>rVDNuFqM2J-xJc%pLT<{^` zxtT*ZGDI(xi>#bU*D!P!V`z!W#gWYlV5Q?Lia+g=ehW#do1#kMJ&TGn$5L`yE){Sw ze2)0$rzy+&Lj-K&UGkz!Y7)G8>hI1*2KW7uUY+;`+w;LKQ({>k0_3Z|=6VKghaekm z0-J@1q)R*|q2Q2K(l0cr`u@%eo_PHnhIq#Rx`+W$Hr$A6XS+tDaKH70dN2X%PuRX1 zp_;@DH?44BGw1k{V11lg{;7| zA=+5bxgD^2E+_*z?bCHErbIJw+V%`V!LZ8sDQxJkyV6hBEv1oOKve9EV|hxk5Coan z8z0j9KY8yma#j?pnPSAOe}X2@AM9W?;Xir<*JOlSA;C@H7byQy4T-o^1kh*#R<9_v z2j^4~X!bu)0T6>OP{ct^jsS&^xYD6Tm@neQN1Mw8LR6y~ZvaiuIR~dP0zsZzV{d;7 z634HhD3H$kPeKmLh>fSWIC>|9Tm0EFuF0}vJ!+(fyN+J=}T;E0M~#Do;|5hK~kuj z?=UEJt!gQmq5K|M4)L38m=UzSLqJGAJH|n|;#{`_;D02oURvD>7CQH*SUm{jNV*5O zaRRX*xB0E3ZTs#AzL?>rpDnT5H zil&_6xd>oUIdt9W4_CIWq$m?VU%}t%ufl6{SBjtsVE&D+9cRbz(}}1%fwc5@M1B~B z6`L`zp^iGe;jr4^^_a%ecAI5DpG60X;@!oj(Ti?0DbF!=ba5h++`Ac3ijxiH){;Za zwXd0U9k%sM$v{wKnjO{?@JgsVKht*yeVfI40F;fG0p;(UI2c;Oo3lq*+b{s3F7Sz; zO1*34dJb({q7+0~1Joe;t;yBge#30O(XVt~)P|EbEx-Y^a+Z~FH?2{ptBCuk%$nm- zS4g1;fcWYa@>BWZ>k7GXv3I8T55CMM3d9<0gbmqDhQV2d^3sk>od2Xh4c^2qlh6%q z$^)GWC*aIQdvRy=!y2u~&pKcf8$y6VFft_1)>x*0$PSZt+&_%*w zYVAzMl*D%v?d$ILu;HMfWn4)HsYTAuHo@MZ>F7OzGT<}&TLm#I_0E&N0TRX>`&sk) zv&q>#^rpRrsY9B?eX*^ZDqTx#6(5IF;*at-Zqf$D1V@V#vc_?W($hIgN;YG0QqqPq z^)f z92g+UVFjq`bFwqh7Pl^z=A-(DB?g=a@p;I5gU8awW}nUe)8AE~L>W2_f&* z*z9rAW8LkgCIpd2vZm;K?y#aFc^T6*h!S@KFc(X&wYB-(12+U~8>F1_^$OTdXISN=%7j3M9Of>qu zgU3ZgkEwokxlbDuCkn{eZT=>^Q>8$xpKk;!V0>ZuhT3Rd3{-}xNY%Td79$C9wCO$f z8=gy|WD!Vm!|!N+rZ(v5MCvkg;}!tXym2f%Z*?1R{cZvrlwb@|ZrZ2w4TJ8RD}+Ca zz!bnw5yi+;L9o^Fz(?iS;{#`7o8{q&GbM!EM&cnVw)`_G17BXQ4&QSIU266(g0O;V zM-SasCL2`@*@elqxN_oz>$EcWB5h9%nUyT3nd5qr^kT~^ML1(R@kB^p3>fCi|hvYuWZ)933} zwUucUu+&CCXqFCk7yQY@KgZ61)eY#fi#<`AcB(I&M0|g~h?sF*fwLAp;H2l_E{LNg z{u6E8zc469Licv0dug_FJ~`uuvQxHel2K<_0YVxY!If1pn?o5p`ugM%rdF!ZVC5M4 z`IdQMt4GYkB+QV?xod=CDu@nyt71?=Tv%9t|K=^_hU&Ypbr%l(r7Q1z-hH$tn36A* z8q*_I12;=mG(-l%Zt*_E908T^jhj2OQ$3g~QjgB3ygXfdB#T25FN`Id!;q`&oI>L= zivNWG7@mOuuCnRnm}Yl4;E~PXf2?gvc!?wF_PwyYt%cp!9){PjJpSa4aRgL*+u+gg zR!BHjk?Ws|@oM_*5n&uh_*&1}#m-Fa=QWsn*n6@<>%ct_ZOaE}`f>{ro`iElGKN40 zoFYne2cj#$+}UQ8ymp!rBR}R~=JiPw(0Px6xwe?NEi-|kn4t8JDqPGy$Br9UK|<=<(V0LhHY*YW|S>=LfyWp z+v0yN+qE)n%NEMY3U06prJ9t5{zy_~<-^ipsH^}Y(*@7Q`eOPtIhtzOOPffj&INt` zsrABTLlqM-+snbZ03kEpSk9g~y#%v>W}w*DgfVp!v~i3&0>F@aiqUWBO5qNPfcG^# zs`ozhhp5u`>P15wc-!7C5*Xy7@DoUuoSMSDwSkXVeEa}9JHGcLDBW4m%4$l5^(q8h zSZbl;DUQyEphhhl$ofA}&at9_V=Us3kDihtPB3Y4=tzUZSMx8sViEy2Nk^Jjn2|A2 zvWS08*lYkmdPz~{P323MHpA^~I1X|I`TNHP!9RU$hq&W#-8Q>?LeColp$OCNNps&t z05Q8a+t-{_s`Q%2X8$Dy-#V|OmUWP&UquVNa0}olyL3z-Er2-A-ik=j2^kZ-Y~7kx)7x(b%w<< zfBlSmzEWY=!Xj!|W-T=)#n#rE()5I_gCTaauUA)i^2r2F#;^$P7;%7gs{z51SrX9{ zZq}-x_1O}YqFk%(vZ}x-XEy*Koz1Qcg47q1mo8`<{n&st$W`Xl9B2rAaH^;uT(_T) z@{|;(;p1$4g<08rZCzO_Qs|@7w^0;E!~{bA`3n{Q7vVT41sLCFLfG@G0)bb75ClTk z@LwYiw$M0}_OjY$7;zkZ!pw6Xo)1GSI-Ek4p{sA^Ec;&6Sizz7u~diWAOV~f0nsnN zljR~=7VeTTdxqdG=*Vo9F#I~Os4>8CB1IQ+*!XxFz3FrKc;4s37~cHBMzYjB-1J44 zqtWIJtG4VP$cN&?3O)jDZJ_oa?}q5NQfwpv9XZ-HInteqcibipYgew>mHAHCHfChb zdA*O$N${5IRp0mdlLhsR92eO$8%-DM4jR9#;#HU6xS2U8I<~ugf6&fZgbq?sK)iK; ze$?EH`vx=ZRya+&b$A;S=yXCwuYFoOe20K*O&Z-5>~AmzA3vLy{(i8sD7p!@fCeN< zP>kk>>$^(J$c9|}QOc$+W@|~x>d2;A(i)B?UIOK+?_)=Y(es80uGv3aQ~jT z5Ocy!(@t4`L0fFbDk@HV6+oCJ;(I%lx|sijL#PGZB~$?7$#AmwTO(!1vDr!j@=C zWsLtgF63~KFJ%;G{w79>#c=J(yu1DJwP124S<#q5Pvn_V7oWZ|2p=uDpf04sJ?@G>R$8CZ#GpV00j2$fsWFOqxfvgbz?s7p*F9uhWLD zm-A>e{gJ<0_$m~MXqJ>7~K_p8XzP?!^Yb)6~&itxH-*--$3E9p~Q(1uq5Jm#KsJ>bysHUtpV{oP&HM}GN@Z7_^$3`eO>}FV! z=ru%DiicE0!meB8a%|9x+vWITsHc5`I%(vv0Hzz|A}au|c6>$a^fmVcghJIt8v@(q ze3xo_maunJhvJ=I0jrv^dXEV^SXsVyEhN|*#*TX7TeW^3pZ9Ot<-bxn zpE>q`0#v_ivcJm}6=o|4hKQ`Z%ER{|Io>1d(SFNs9|H1xgS1Ql+^t+qt9KpZ`0AGzgSfis-m{1@@Fi$j?y?jK`VO!a2BEXEX`?g+G+Fi@ulvsFJh1B$*lHD6KFdqNV_{O&P zYueqK@uRXbJ>2{|X_vk|jn?SVo>HP2I);QM1Y0leYb6lR{p>F+?t?YZMJQ*o+Qj1@ z=*5l&Fz>{00Dv*ujLY2NV8~hItSf_i-xD=Pr z;H5A0D5op?HU61{G0`C0n8QzYr#E8q>M;KAQ1#P>KaU`J7ysz4EWnz5enCfpotinH z9OD-W$p!>qA4(T;EMZp~nJR48dZTTX7yMJfH!s-8M9n+I{Q~t`rJM3`yu?)%zvpoW z-6k@_yzfPvT6-{ODksXJwio&kJs^PNQ%I7xKqNQa`t4k=hFhgz@Cg1{3S6FiO`Uc8ZxtYsO##lR|(#5@|n_0`Rzz8r{DXi|{#{f=Du~y}v~M zI!83NwyKPgr)5Ga5wrZ1a#1;nS|k3egGnhoF=r<6Z-U5RKR!v>fZO&& zPo6Z_3E3$aGNB5<12w&P%ernfi{d-IvcS(qFC{`D+SCnE$xZz0%NUNMFK`Ttu9;c^ zB*mjik3-cHhI$M!DVn~*?YYchXTSr>NOmjrg^$d-qkohqET+kL2F|4-2@Ntt`A_?*E0t?_g(^|Rz_|2P4dB(Cm zyhi_8u8y0sJ>7D&$)IKDAs*z5vnVna9}%#b06-;TQ41Rz`>NbCJNuZ~O|4{5yF{&C zGh`g~a53-6XXA6G+seNG_ik;gzaXj(eaOqqzz`gH}9#KPD?;9m$#c2ne3fV)- zj5`8|)Qp8S>wWAi-;b&~D*9(&`g<9;4rN%A1J3Guc~ZH1ABfh}bg7J1Bfi>$onDAgf}ezUZ%@x)U>N?9(01@>@me4;1%8u3CNLNE)oGw2Y| zBCdqVX)Mr3G8yF7v~W#ik0cUpG`_*(aFD9;r3zvjxvDlT%Dxvz6J>QP*P2bCZUTGB z%1OOJJSowU&jj_HO8Y|0A>CtK@?r)ey>MAvf*=q#=441hq_d+B zrio_H16kCYc@T)hd9yOmE?T!1kFPFsA!iZxDnwvwXnx5rwC;)J7*mC)pF zwgX%Xl~%0)$sNkc2QaNQy>78Qt**f?fa@f{^YF9dB1d7cdGwh)m#Yt9i4#&PG{2vh^^yu(T6wF;9Oj!0&TTy6Y8~NE*kPQXf z*iPl>?db7RhpixpO~)-24m2~LaHMrwZxCN9(Wr_tFVeQ5y?k^mnGaFQ z+gq-mJ2Ot+wSR+RJL3I|K*2tcf4v+U^;wQ}nbC6svHr#JhPMB}NwZSg*=j@R-MiEp z(u#x3*9pzqT>}a|XO#Gz0i)~0SC=wiSNr8~C--arqazN z8hiDo3i%_F{tMH=EM6JYs`CEAl&I#-i$E-CwcS-n-ZQ0FjtFe4bOnVWZwv$?YAf^z z4uX$a?Bl!BW|V(~`Q{VZK62EiB1O)y9sTV8>?T83i#hIanZ zH*~(2rH<9c1*^2LGf1TYg$;0h)YD!>&VCm$oB(n_op$mH`fe-uS!fOo zA$NFyw1Ix%NZ6z?p$!+qUw}s@Hky=KhUa#(g=Oz#Hwj|&Q3#?IR!9sl`EaIqI%Tay zAe^gP5#spc5+7rF3PkuSVjSC1taxpKvuHO;9k)R5@uA1(kZLVSS;)?2s?% z36i-|zh0gAd?;8wm~a*0)^Bd=c$(-INNa8qNfsul2Hp9LVdXO-QmjQ2`R9oz6>?v3 z8&r!LX9HtN7V5yIp?itR)AMcf^9Xk|S^Dv-1cJn%siU_{D;8=HG9qbm%miCEuY^Yw z5>R@73<=OJSg{bi?fOY3JbqH0_Zm#L+CC~F1E1W zQ27vPW8L(o?9F!FY9Sn^)ZrQzq6v<1k^}B-3Jg^E`4-<$At#!dqzQHddqM^Mh;hVv zV1418D`hy3;qc+#@ePDvTH}rUfOpwH?_KTIf-qYO17QxHN?`s<*k{f&$-WwPcxNES z_rORNJ+%;el7G?l9>&L(1#Q$#o^Crt4+mab9?OBL2`i-_&7hfa3&HcI9qFq#dHTni zytV5q1P1&NU7gsX^?es&DYjxd3$?FrH0LT@M*6k!OK6!Iw&;YJO)q%!!k?tS~%GpiLM`yKiJcBkPtAENgLF|GM0&yuN}m5pi*Pq^E>=HsS{`5?eW0krCCh z0%w&&bk$$gXQ}f%-|HvmN76)!fow0LBiFo3?}rvP>$=s{4OpH&H{rB^_g{A_{!EV1^-{50{$)3-13mMdaiU%55` zuSWz*nrNlCqWbtkW**UX+(S86BI~(Le!XDCnT67y=R=52qv={nhR>%{_#4e(zSqGnKjm3oH9_K{; zpKf@{X7BW$g{rPh8IHaaZc{bOu^(-!JVu)tlU4?zU*42Qx7fQ7QGOhf3#A@}5#NW|{ll!OV z;s>FujGf9uH$FC8DQ+mUO?LOSvU}8}MYp>fZSd{0N_$EUN#@IVFHV(;v#K`AnuUvZ zZ-h5NIkDD|m$ViP07M~AnYvObeh{Th=zri@J!2Xqx^U#1c`rrbEtj$Zm#kRZp~C)c z!njdVk%ls_rPG5S>>tRM5(>{!mRsE-bnfoguPv{p@0dcj#y)H z76ixSq8YdOi&>o5w_6&~rP4$jx`erqFn&;dBV$96fHgIz0Z`=Ry1eJx`Z6)9nadIW zkyF&ONu=JV8L#ixkGF{9(Ff9Ge<}l>Z$1U72Cl;kIM7hAlt}d2NgpEAMm#T1F`Pz% z{C=Y3Ezq4wq-Y?@q=~$StBs0yBS8={xQCoc3G4suB9{iVl8NC~9nYU+UT=5$EzoQMR{A!vn2X@gL7#^hzLE6}K_ z`$N6=gW-Bwq{jFox5S<4;_)#RQ@BaI&aHL1CP`OA&*&m1pN$(XLBp4F59=?0Ji)~| z3i6V_J;>RCDuS~IF zI|C)jJS`ZWX6Udz(g^itd)(I31)@GUEROXLQ=N$Zj$V9xkBV@L=3?xD=kq(L4d225iBe)F;ADQOE5LL9U{be+DokQT-a zHJ(X*L$NYa=P4zILc|I^V7EHeM~=op7tMPChCp68p)urx1GFqsDem)t>Z#9(?^^=g zF9u*Cp3w;2`qxFPhX?_fg`;$~hreaC5TS0Ana%OYL2~p z4MgjkIiUy=dQ28FVHeh}KRgf6I)6@ZHZa;ue5$2qvb~1$GhM~QM(T=pYQU|+M5J1= zZ~lpLDDzvlIw*ba>KWe_R=Z+W^elXoI&c+&Y}dp*Wi(1K$^CDHr8GqqpOgzsHQSHEE7H zs-aOt7smyKLT`}u8GUSezf*{kdrScD(%a_~O%&LMQx!qQp~m8-hPHR<^I}DxY*9XO zCKP1_~raA6o8hd0cOkuua z1Vpv}ilwXfYj2AuvdrrVDyj8pxBR@b<|T1yb3dB~(q2*G{$c6VDkHjtFvd z{;Er+*pXF)9C9Q}$klVvP*nI!pfNe~mtd3e&Z(tN5nk7zNOU0NL<1DSI7O6c3fdA73K#bMfG=x%BjS@+BUMnhDpXO@qDR>W9XS&jM`uo!vMj=lc* zqkxPwA*HOp$=Mv&%}uncgAl>CnFsXEVQM0Ir_Rn#>*l>iE<=6V%Xk%KGI2zU93OSL z5o8jqG88C+(dfmk72|zloPhW0cd&_HQmC?6a?=$-3d*cFS9-`v zwIWY*`Zw0N?d2uyh&omQ<>w_z9lgZDIVA* zKkA{WUypyA8_+W)pb>paCcG_;_0?I0BH*<`fyEpO1-bA%B?q*ltbt(WYgmfW=RcJzghvUg9TqmfcL<73b@f)O zb$*}Ykl)&e)=cDe?%=!-H4XOvZ6*r=)#Ug7?UN^?)qEN9o!qcw?QkXFLbU=GO&!y@ znQq%!{@K>>vA%`0M+=VnDtzSJ>U&hY}GmVM% z@OZw>RD>HyH5ZDXF>nJ0(&GLyS={<*x3(I4mjw#zpzL^0(U;NK0z)_WAZ>=Ip5^^l zhxG@Vq#6tkr#{hA(Sgn(B(%$TYzugRW;(bi@YM}FLJv-unBygEhZkFT17 z!Dq4bI!dtNhW@~9ad*YYqlR4Yd_}w)XQ@sQ`I8gL(5*I_Nl-mGqUo4LO^S)g{Y9pC z-P+Kt&qOQRol1;Zq~A)9?W>3&G1N7;c?I37-t30-57c8Z$9au};*WS_|AyGH1X3l= z9R~!vITc~+6F5gz$9msuiCOnmC*osYZ4#?Q0+a(cemm*JNWVEcsyF4k-^0`h)($x( zq}pGC(3Yt}`A%X&PBg+u3c|+c5}#Kdr6b_C-X}ZS@bJ;uQJ$S7}4kAQl@prm;7Hy4RQZPm+cfi;-!ox zvC4E_;mmt}+r<&|yncVj%1=)-p_Z2%QgwPTID68{5^gDluy z^2vpJmw=%$stzf+E5t=y1V*7PFPurlf+yPfI(LmG1H;P%JPlefTvUeb(O-oLlSI2) zJu`6oL-H_yZeFNQgF2&2E`D#SGDH9fs!{Ib&5QElMJOcFWR|M#;K^W`@*aDxD0}zg zgSHbnfzIVt`x;0J9y+xK?lYc{u9J)-q;bbT#A!XWRc#LjNZO~$T3pa`4DtVfPsVo8 z(D33_VA%GXXR2&}*bbDkYFH}i6Fm|&=^jMDvp zbU(25BQ)16&Pf)CzTKupt-gHCzEexU9>ahyT=hAS17U_4;*YQ&I~i%JhFLHJH4+aq z+p+;G=jvBz=<2657z3=*a-ya8Q?9mcsoC8S%yNTtC68UR?5fJ?I(1C{dCtR8L(;w8 z(=*ZqyxWL(**lT&A@<&r=35g$tkFuiUyug*=1BK_;Pyqs0(tUwN!;Zs&jL(ain4V< zH~O}?AnMgq8q_9B2+>}?&tx0=(!1lVbk7n+dbcWr8hI_<0fEUK7$4_Bk#F zNZ{nu#3mW8!{|0g_j^)VA?BH8KGVod5+&T-)uw!SVfrQr1^Cv?4S!8t48p@pQ2C=< zv8Mfr`kcSrcVBP?yo$(v4Bqw3+n#^=kRcU$O!;aBQa0CO%l6(g&WkLL%v${GdR1IQ z@8*e`oIpLv=0P_x4nM#1dOzg#EWBJlAFE#-*%|pKCU4E_cs<@}C6I=-R=Q7SUU|^rJAev73rl zqEf`_*+>G*>xFSyxAtM}=iz$`i=m;O0u9iVk_^f(fh!If136I*e{l~If^$U<_v-}b z+MqocC%(Zm11P1oaKFuKf`1{0UN^V)Ddb>^nBBP?OgL=X?>9-MMOJ2NJyqVy4Uu3~ zFTJCDmVKYeej6Di$0i)?m+SVi@+Q?^_mM8s@(q_0&d)_lEHbG@F9$l5oWsC$@66Rt z^QlTFS)g9)es(fIyF@pXh$D@JNpS&E%^K)n+nKw0I6r_QZl+>2K0MZkF~R*8NI^f}>n=INsx4 zJ_SB7KXjzIL0WP-kG*5K^ociY9kbcOYeiB9#8QVlo=8>;LhC0->XFv>tic|~5Z}B% z0Fp5E%@*M{zhy!V5DYtK%(6tIN@4#^V#f;iS4)!0?z-IE9{694<<;`pu5=~dv;qKQ zyi~2NSAV~13qC!Wq@67$(OZIZ)(SX&l3$e2Uy^yM%lUzt^nJavm03;@A%8?HWDR49 zh*Y`3J4x&-4tPzmkU&SkmD_lE4~l!cw(4Mb05`>^Xx7Jt64fqUT-h|pH`k-jsa{uY*^_FMQ2-(YQtiGB42 zp_x#|jKle|g{8PkcEV9>oyqu@2o<%lYlq8B!~f{unJ_~T=r^glgSwUBZGuM~&b~7e zG=h5i3#8F90E=wR(@GFziOzfsPf~lmmEa*irgMiT-u;lYF=_6jBFeUwy*MG-O_Quy z;2}7YW)Fl##pM5`nW&bfy32tIGMokmN5U>gUQ9taNwu*goOLB7l+ zLF@W9=~ihGf&JlF8jizLm}f&@3Bu`R<#Nlm{pRDAYu z;+}~-%0F+af5~I_say5~HvGl5LgwKZ2vIjk)yt=94~Zrsw#_PbGp+AcckgLmKMv(7 z3oSnz!5dK0QPNno?`>+_xHl;jhXdnj=I>23k}{6Vmd(52N>Ct@SuKr^?7t$B%!Bx+ z0zAN4&gnBmt0&?!D6MDh;Okv;q-HEWRq+p*HcVr^B&uJ48Ie*%*AqfVV4PV+tJM00 zn^G|1uTpn(G2Akf2cpEJr2*ZMYkmxnu_~L9s40E>VFRS8yfX>%aWOP_uPn+J2fm^l z!>(&O^_!L^#SK0)gQ!E}>UhcMX~Dvs*#+ixkU{`Jv^!FAAjP^bosXxv!EWrCFi};l zgd1NyHGG`R8T7j5=Re42&@NK`Sj>6B#HRgcfiN&+W7#GE000000Rf(bMSu6wYl&&w z_CUu4{!oI>4D>%m%XCD{Swab>LhFKmj&-*hl}WmK_gmX%M#wu41CC-UY})AXNTWpr zT1@|Y#{l(I&Q2}TQU6POZn%jNt~6_FOj>64f6&_ly+FmfaWJ}Lg9hWK@P)&juFgRO zH*|{ZIj>ECB|mSN6P>WUnvka?OtV8k9tp_Hy&Iq7F7U8p*s8_M*96_&oblY(v{&7v zkIb@}A#YirSATsi`VE~=^MllM-lRRIM#Gvr<6hZv(=c1|Gw${T_1<4eFbaI8qs5TA`t z#vPNi_!45hixybSrk4^*KZ6sJ?os=6DM%j2A5(#s>ju>50S)na@yVlV1a5Ob@T+ycJ1^-|{mmIt0m@rENQ- zrp;g9@qIZVCrmevEb!;SAJVK_{G4s31^X&=($r5U#1qz2{f6s&I)r@6@NHhgRiYJ| z2KBmP5girfciS>wUIW2-LG0ZFspJ&dPK!~RXC1aRBJN#X{^0Np(|riT|6>^lFv@G*REQ$fpUkJP zFnu+0Fq@-LSa-d4zn9u+|7RgN>iYbJ<`6jgmb`MtPW(W8$TNSBJ^GCZ7t(NwZDtiM zJzrgDd?OffvX)w7l-AURQOnkOV2Sa@--Um#m|o(B7}Dw}SOM-^>zHy!>frA^4VKRd z0e*qnj_-MS!w7xKsimzQ9-l;yUFlefXTO*I3E{r`*ZpUDNVO>I303a)KD+w2>>r?+ z#ABb^i!?d+u621i^p$5yfN?v)><3_k|176L@rB9xb}E_qc&CCDvoarc?XcuyKMqEu zZa^RQ-Zav-@X_C%8*w+5vutL4aEe^9g=$+LD0CcWS=&1^6un{&NHcvLMu9d8R=mF6 z(gU4=w}dPlB4SP)LC5Z-d;OmT2F5nh_wG#FLLW`b^_dKD&u*i=67L8p@8i?0nDP|j>gU@mdALDSvGuq;;jE&%%Rl2$So{FlJm6R=a0qKx?cAXpT> z+hix+Hm%e-b^Z~VUdg_o3N$CTTdNPpGHTrmuF^16Lsag_T|+N-K-(2+rBX)F6;RrW zWPz>#KS030MnPd`9tc9oZQ2Rkkbn|YbBRxlPj z5X2J;_h0JVonQ1o)zx)p&U8?|L)$CoA)x20Kl**{O#!Fxh|e{W!hx0KfrQY38YVeR zSF^+_Vp5B6Xv6_9fP8oc1mfF6R$3<19W^?Go0c}r98Ds6Sd5VjGvKp#fb?Es_Ep0; z7g4d>-zzx2Qq_x35>!V?!vD}Y$5Hl+(U*qZDR(quPs!?*^yo>yck(=JfJxxXlQP5X z3+ge9kX;gpn|fUQXh1`ba}t$qpx_4TxBqj_k40v~0!3sr*D{7%XD<;q={n+RCxNeXuvK1aVv09ntfa0&NWIO8p+tj79q;9`E`k<1gQl@Sf} z)A1)RA98%_q2Zu_!L!@ATroL2EE&{`8tgM8mI3SzOG|silrVKP3*-38lLGOmRt7fpgfXTUFw%9#j$rI zfy%kun{S!3j3OxA?!M)eob9WV1~kg9E=$15vVR8R*pQEsY1Tz+>94-p2Xp{U$iYA_ zFYyZ62WY4w(z-YlcQ5wte@9@-AoJv06QF=Bq5mAMA0xBIqqOM7uUg@Gx=!qfLpjx; z@M_4dd~BI$zhJf7YMf=`O`O-=No=QEy>(8do2%sm1t~w zq*#)so{sg-*<$S>Xk&m95)@;*7Bex{N9&5a0+kW_lG45W4q15b8!X7+@^UU`ak)T4 z)6aYX-(ejt&vKUZJ=XoU-o!>E3vkA;84K-kfveyY06|dSwAaM_ZjL`(((|*#^2Lh7 zzXdOVlm;D3_pB=O#2cBsyUnlJwb7imZS{yR+o~JcXF?l!v4fT^C_+gcDLgQA zNk&TSfIL~_b(86j4y+;+TV4dCcLolWcfosu>%b#{4Bm{-{?4;%9P@J7@o znj3RWvY6BUXXqi?;TK^WAEClU-NqB^a=yg!=E-k~wkEZkWI(h+3oA;e>@@pfFDn|D zf0P0iF9qgU%7DEDB9wG2Xn7XniMZtmoGmInn{E9OVSVN@Jmw|YDwc65m8O_eq4|k0 z(-_6Y_J=KoU@T{dv$)mbL=>`2|9-Z8a}?T<2EH`lOfhL%C-JOPeD^w^F5`HS3MZu@ z)X&>;C@vS<_?6)O-g8puALsF$rMRwYGXqDreV^a)*#9<4S_my&q$CZ8kcawzlMX}E z5yslB4CqtOaY%UoXDj3r*lVe7H6{2f?N<)8mn?!oJl z1|-wSAsZ1X;zCI%YL7{LmzsG{ZrT%5wM1BZNfxwli8%DQEG+kVs!}1&4!jRxUgs)v zv_)Y7LI#;q7TPeMc-SeAlnN-cQ|y0{A7dgC_oYWJtp38U&0%%4Jx#wo`~yOYtYSgJ zp^Vlyxt7JkC=foZ4+hOqI*?6-~({haRd<#OOu$Pul%YpO(9pvT}RQ zZxtXC_ky?^-Mpi>o#!QV)rP(hG1J3oY{jzKh9wHSIq<|5Cq?dG`$JqB$}b)U^ZF=i z;}h^3nvwqx#ct9|WVhs4>1SQ<=mV=znBC_eR+~cDg~OVC2cvaIIwxscUeR|(b9AJD z4aM6>?+@gXjTw>UjESz9>66q(bv?!8JpIFVfrFC?$7KQtMoieyid5yerYUEV|0RVTPl)w8R zT~wyEoG{6kT7&f#f=R3Yti~`XriIgg0WLP9hKmkM*~(NRDYpr$%DUo= zNs+vPjm=1>;TLQKl|Rt5E%ZPnn7*_kS6JN`@u}wa!Cz9`?&-sfB4ys>9MMTluztw= zER9l)i4HOQAM*#E9q<9q|GprUnSxoOF(qKjXPkWRGi#;Uw32mvTBn9xdFt%)HCvDd z*uvxIb`-z@Z6LaxFfJk!I%SWHnQl-z>*G#RUWExJdZHJNhok+Xk;HHNeawWOXjHr< zeU{Sq&2{as2)bPOjFjM}YQY!rt?=f@j{2Z&_H-zdBXUNh>_Mtfz;69bZI8p?w}pH5)0kDZp~Cy8g`lv;^Rg?*Q}fatdQv5)fkx@7gUsB7Jge#|R3uhoHp z18EPXy73@i3-Ur}I94uCkkNOP%imnYvU+zse{pKAqu)12F3Z%>6~_oTm@Kj|449)e zXd_fGW&4PE#_}@ENB9MNQId%$VUIkg1>CtWW(;1f0Qg8|zlQTFcF~p#*!n&Ey5=|k zM7xq1#J^(7u(X|{tX!Ww{WB-L3>_!h#&x=9N?xZ*`O66Oh1UyDFmqqN4?DE2nAVc} zHjeXkK)*KKpeKwH+erH&qV8P=w9O76d&2yKyfegjMJc@hjy}#EF>J69iq}=*>203y zyx0yw2o-3YaY_05g zBM?2EO;}NTXGfLqEKn>SkHVXvjFBPiNvlC3xX%X1dYI+z|7NRfEE@=%H5>vnd)lNs zHpGAI$^qI!AQ{riu#dn}EVR!|s^_we$DnKbD;DgJ?5XD)ptoimepKhxw(_dzE~vbN zQlox^TTTwFlqi;98(un6@Z*5sO;_hu0#0QJEds}t7mlZQ*O9P~CtYD^^AE6PEw3I7 zkH6hes@#fX#RjKVv=fNm64P>Ds&^oxmT3GEU?qM_#WG$o6WD>&GrbC3!7LS%&MdIN zf{&MYM;kp2BMteohU=A8%4YR2Yqw_0KA%bPBjsDI?o7@Mc1rX;nL7L1$lQu=Cnxh4 z4U-OPu@=HH^18U`aL~4D1#m3gNeEcgd0^&VClP+QX&rdVbP|(W<+L?184h)L?+7Ap zyY@^0U2VD6ctf5u2M5(1w;20NJ~6_ljOnDl=I1WGo&+d`zW*y<#}YZtA8}?B#bf?< zW+ndjNzXQZzKo0w&ae_$?`ZJTP_&Mx+NGoPs~dWP(F`|C&Hhku{iLc^W}yo#E)aZdJT7cD5(8OVWd?YoHwe=eYgTMcnA zR{q=&KCa$8{q2pq{4#%AbT>QtH-hCEavw1PLp|NRJz4kB9A_&Yc#CmG=JDdTcSiV` z8g-iz@-S-(BG?$cJfdinbJz!w6YHPL{65X4PN>ca_}ji61-=w)cXAA=1kW?UV~=OD z_BHR@F`HeO$@iP)XAz44dAAZ&gK2PowAjeO6<>j^ zSDw|xO7q*)Z+c`wFtY6mbzrCfO4Q*#CCSIpK zJQBh<|1qNegK?Z6$8I>QR`pkgbS_8tf#noPYE9RU@|%w*zx^$zP!avAz`~l&^0RYVZ@wYSj!jU!OYxqxe*X#zw|?P%l#S&&{r~A7xRRm+1H4LmrCMN~xzPP{m9gIYNmOjCT zdtN$*z+t5Ya#mWe0eGRfEA{P%G$l*X;W{X}L?$8j3U$Fk_f0Je0auztHWW^7S%-x^ zPjd8)VnBBHJae2>`{`SUihxJ>V60t749R+FO z9R7&6Z4}a0BZ}K3aU`r3)xM)s)zjG>*A95hPPmoKIwr5e1I)#^q&gb&`SOymWD9o? z;E>;dp(`fqO}nkQuRZpZUXbd$>qT5)0c-)5CBSka+oN_L-xT-@H(7?~AKj>9`@(Ac zlYlO^9<^a`C=J^3T_6V71he{X&_*O0D>YPy;+08sjVj^t@o^YPu3J@JVMh6}0!Qs5 z%Y`x<$Ue7{NUHsEc7xC|{aH6$AcV)bzpCq>swryaZMEgBpKEblTPw{1_v)J9g*_~2 z(Jix3#`**1Vwz;uETILBdEtc^6b# zQe>O0hk7q+o`hRvhHZaiIT*62fPVzjF-fBUNqFdGY67gvAVMT=t>Q2s!m$D(!xf$o zvhXAp4QSc5b>=f;sE@a)1@ENCg^2^7f+7ibnmTVyh;YqrbGum`T4|!@cD}7`d;Xy^oywUXj>Q3&!bP4|@h*kY(Piiq%)957$p@J6pD_?x}5)OI%{FtN1FSF08Dnfdj@uND2-JxtF^{^Nyber~E6;z5M@ zQ4GbDiZx-ZX>l=vLKEM^#b_Fvwiacb?T14b+F=C4YR~dk##G9L#0qt$j_$-LRbVE@ zg6QqumV;b0cfyTdZFw$$Oi0Iz+HhikI>A`jQBr`we@|~xxP27VuIazx7gVtJksOfB zlcPoE_Rt!?becQ1v=5;2X?o(PkfWQ0LQ)lC@<;&2d?b*He{x&qklz{oDEoqR{zS!QGz*#qCrl;ihs4c)5Hv`IK}8~|e;`%xJ)buYctqH8L?gkoaI7ew zNoFF9{>a#=+o;W{EWzLByJ~w$&S{lPXoyVd|2Z;zeLlv&(!kmhzHO-Kh3;dZK@)}!yj?pRGepw73qJb*+NE#DQJc!Dym|*Sbv(9SV4aVa-2JrGe z^lm43kj()t<#487<7`5P`$yS3&G)I{a4B^pE#KPq9u+R_GC+#jm91#O!Z|y zp=o&oI!yP{V_gQ8!~h@3fAxe()!w?{mmB3UBZSrStD&O1`%|Diz?ycX*7W-;gcn;| zBP)Xs0}}oIcUEEwdr|!)2mwuz7*R-RcPg@z1A2SFfLh7^^sj5OSjMJ#Eu(0dZnyTp z7-|Z-g8|4SvF=f&0WLLRu!Q(_(3B$zV}5!~;2Y?q_?jh9Bo~9)PPSS2#v@3bbxs%~ zcDN#vGRM7@|19dU9kz7C5Y(ZI*nNZvkiUdCP2)lZ2A2Z&F6e?L^jv+h6}$`bl^3U@BC%s?wYQ*)yBpEHI7T=gJ`XH`ljWa~! zT!*G|qeh9_N?u!H$SJR=dg-f|?L1nLVA2UV!es&7fIm_fmKVDJ&<-ff)Gg_S*#j4BtpR>{Q29c~E3T~D%gC3t;H z=GW<8i1VfljC$jI#kXl|%4ms|zm;W?$_RD!0n-YTx^WI0@xFxlaHW27b9Z|2{KttX z5dFxc%v>dDOW^pNt%-H-Xsu%H3x0z16=Z1ZXLHHaUquwBVt2R^hG+~Wg|HAxk)hZR z8-S2S3x+d)gqcXk#?k0@3J4#=szS#-GO*tyWz)_xKdOEUmx@PF~=evwK2JdwZqo1R}@N&=Uel&A20X`!HhK ztH02uH1y|)FKRS23`vzIiHq=iVVFDJqP|EYtM7@fe_Vvjh@>(XE7Ly@)HMF_3^y-I zqsa1qyep`^EbNeQEdl_K#_4ZG{~{>73{GA+f*xRrICjJyg9Ti7gzLC8;MZDu7$isv zPK=mczsWVA+M!hVN~uM+O;Dw|5aJ|3N&U+lf@G9UZ3^6fNGC3r0CcZTLVSqxX5>FP zai-XGR0M}cIK(c2f<`&HV`NnCHYv`=C#9z*Dj7}d4jhJMa4NoKtGlF*#WT&&bWLQ| zXAzJficW+*mJWF6br|F$kX8Ry-_ z?@pNH?ei0RMIW6qoS9(l>PuGcp8>{pYP%&WzJz`lSZ8tqt~Eb$&a1{T*&aSjU&aYT z7TJq?r+VD;V{|dy#dcD%d-DLP3Hfr#W)3ZS8u7VH$O4)y5k%~x4!3}0?ES5u9YF(2 z@*EC8HLj|~i57h`2vK5A@(!F*>pW{xO#C3I_%LeK<@z+*skEYmeQt+C)87OF`hg_{ z_6e4zP<87+@e^(UM;)jp>x;br23(0kM9mQ;H(Dj7gY7X3eJc2}6Kz~=`-~YH1Uf>~ z&*-iHe(HV3F3E5L%ZTGY z+#|8Fd5nefL}1#Dw>+>OT1pbIcLQ@#J~h&FEOV#bP$~gnniGI-WyTitF!cAK=ob=M?H&2n+JlST zTsEj100%65(R8@x@d?Vi^JA7an;?)X^mKmTpX4@u6@}(YJesMtlYjAX4mq};q^e}h z(XCQUT`2$@+Keh~wR~HJS)&u1@FJE~%@qQW8|m|ns(u&Ai588@d)})6!6%R8Gy3w; z@rYum=9I5%<>vdZ7plf(shw*G<3MX`)lfE6P5$X+_+ofpWikaVz=m1%2#90){wpMY zU@TAV<#iOh>l=`p1=N78e*7aai&KM=1`yJ2TAq4@ycrUyvY4n;Ca6!YaF&fC z=qH5s&wGWfX>@JJqNJH^0pvsxKxfNixl?tp|1PO}*8yt9pmJ(Z4fHrS-n zp%h13wNM=id4nmw!27dEVlWFp*_Fzc+GZOJ9F|9e^L?L;D!L<08jjzO<3h+6wWxmW z5WNnvkL$A7dURiIAaY?p6t{oLI%_8)Wl283RFOqLEWqJZhdph#5H_V;@z&_o;^2Fn zOB8hg#{~Q}SUDc@uZYeLOPLwX#N!njSR9IcB=-%x8uR%Igv=@j`vv(;OyVe*oM`TFaHz>2!~GNLzP}qA!{_#L?B6xwN%wL@P z>us)=8#kmupkeYA{rT4l-c-R^F=d!vbpXk%zTAl{`=ITPRVlJ^H*4OhTFxO5)gq*s zpZaSR{b_Z&61?)j@+69)K4~}nU^i**r>uuG9~61=!wIXFC}x=+_E3^*fGy#{{TNt$ zuPq8a^VrHb8|jx-UV*aHOAa=aTE}zk!p1?AS>*Q0s#=I85_Itmz_(`v^+t5^?~ zie0;fPMWR6`owUzYn3lD=M}p671N}{BIHvBAb5G5rZ35J(@bUjwx*qmOUW4o^2c0K z38!B=Ge@$&#?8eqJzRWV7b{i|-OmRoVHuO(DN!z=C=d?f-kiDf53zHuWvxc9iAD-h zWfttu9<=)s6(gu3!^32zmd6GKLD4JaX83W7sdx|RqqxwaLXac3fTE}kc^R|p0&+s4 z`B9y0uEK5@qiDpgmpe78KcHhSDR?GUs>+WT{p}TNy4lssgO?KbbUJGuo{b>oodmyX zHgp(I08JTTQuTCQ@y$8aiuC!6xTEMs&cE*!gN6w{b=|IQiwJAaUne_;=q-%2+~i;x z3gVZTHA`38F36*8iaW6r6JgrBE{7ME;$w6IN|*OTPxO0>upsrPG2VWu-i6D8AC1|G z&>^Stl%+B-L)ecwE-9d*UyFS_2t|f#5)`gHn3|z-eESR~+vW;MT7M+Cw`_wt`Oqm= zQ-J`3Yqe;OwlX0V3tEo-Cs2?h8)LQ{8rjLf#{X!6cDv*9{UN5pJjk}sodbBUDa}hh zpRF@}F(rsdP#faV@`M-o5~#%~Jg)yGKBK#Bu%S&v|Jsgu^!j<`4fYka**9HmK5qs+ zE}6#)MJJV- zV2)9_jpd+`FmyMTzU#WGm|fSlqNc<~FP_yQM|igMX{3b&LFf~wn;)H%x{f61!CTrE zkSTkeBuTCEAukh9?Iui$mHf$y)GmD!2xJ;-bkjD@~~f-b~b{84Q14Gor&?=;O*!%CIG; zfpJWCnZKwKagc?xwbO?FOie>Uputr6VHaacwwC>GDmK|a03FUB+k82Wu|%U%%1UgS z+-(1TQbZ&}@(6+%N9P)yAKPuBS!Be}8vr_z#Z88LUT?gn^H_Ba>cy%rr>Sco9S?TC z?w_(H{ot0J5r~kkXK2{8+Kr%!$zLtZ8i=*NJFxFa*@^pgwO>>#={Dv7^W@u=wyTID zhI#*A-kWZSqz)Z#Xb}35JP%bM-E_(fw31y_P;K@r=GS!pWS(j4X?4s)T6N-Fp~3g4 zL9=e6CBA$NOxI*-Fbk#>mO$_MVZzM&{O;67VXkg-g}gAk#b43NqitlOXV2XRvFDNi zVK#8wXBp3Zu#th`tU2^nA!ZDGfkE6pXd$UvLL^vvB)cele@PQwBn<|Rsne&+ETH$Y z66@QkO&~No6qo2(%45`aw2nm{(2sgOyoj-botZj%G;u;)3zvgClAnS>VyXfqb<6#x zbDRSJFfe3ew_X4M00001L7ImcQ~zP0!~*XOYvsFLcWMz~E|F*ZHxG$7G))(}`E5aI z;tJ;UJhVyRTc4!>7@}9>uIc_~wnwusfcxoeUr`5_@I~EJRf=a1@)~%x9vqEN$41JB zPe(pG^d*N2ia+p=>RdJL6~d%7O{NuClW8V87ciamj&K1e?%;!z!7`>7404)XJ|NR zBuF`pRe)9(TGY$uJiv;LiJHKINj<@yxL{&>6Jq7}>__MWyTb|60wg+_y=$9X145Y9 z#?5AFAMqHmW+EeK63B9ecMhSmgt%`oH}CQ3K=gQe0y2n(+Bp+{3oxXLuDgO0VLn_Q zO#h$~`-*3%FXmDkJ`ER00A*IugIkFIJQ=i@L=#o<}XsWOk2OCZ4 zRGG>?by&E}I_e22_|6pkD$We-J+MvO+x6{Z|1ppn=Ey=Fc>sAgCDamc9L!OA4Emt!k4w-vYao=M1meI|*Yq=k}MRLG7MrdWcracjMfn!;;f z-IeTef2fa)W0U1+GHYIyCFP3dQo6pO7k28s$P`7i?4fs>?Cmo`M!V+oOi%{UOVi03 zjuHM9Z26|E2ttM$P{_yUEe)g!#sl_bzbu${5R2PiTg)I_4w0&rWn3HpZ@nHo7@PL) zZ&3w}>z7gZ?ss&Z~091Sl7(cmV4#o(hHQ z-kx0z-EAqGylCrVE26{n?@OEQLame{!`fV&usJt0U;dgJN!oyQrr9+N*sQp zlkDu+_MU+vxe9)&L2@1Yij; zV{o?Q)ufk?qsLjg?GGw&r(AUS>Vawg=SAv|Cn962e$JpaWCM{rV?ijYrRJr1{d7j0 zaShFdx0^NeVvIqStdSpFR}}F19&WxRO1sSuGD!hrh_i{GTi&QhxOMIK#Cy3+3mojf zAEb`+(gdtsws^s_nyyC-VINqhdjtn&_3VTGowpP)GVECk&hsiZ!hwkwz}~qhM0I`= zx|+nFfS1&CvzgyQ=Km52A^Kl0s_lLDg$3%UgPL6$zk=e?OKi>#**9@Wud$3|Eqv{Rps*{=Xec$_#YcdQm7Uxb6_l8E>?XiNr zK(7UDl{oa9nzt<1LX@;M6LS`#jSr}g=O~>i8z^>{F@0V+2{O_{l})&)0e&&_A~EDo z1BJdHaVvBW zE-z=3w_hBng8lf*+OccG_*+7lp)Eym^Y(f{wn{{2Xg&(8|4!N-~jE&MPxd0Aq^ z$$MnQ_nEKyrF1gJ);4h=f~x8Iw=)}Ckhy&1@$MXfHQ`3ezuS@M?xogI~w-mK< zxp_vA!j8%bS*Fk5S`8XHnmw<(SvyE0&+m&?2bX(9FkoVprvt)+`YL!vbBy9vt%5GM z`vsQhg}r6fMU; z?8d+{3Fc)mmYxQ>wT-Fb0>s^!HAonY9%-r=_GJrF1vC|%AzSPCu`IIo1njp)vH{4- z)1ZgdSA!$RalMiatf)S&aYZj~6h9c){&H2PALO+AqjBxaf4IPbF6K>+u@&;w0(o~S z9ggvR7_Ix z_PsgRYAnUl9ay138&sMz-I)bCGUvu+;@pY8AdpBu$?lnUXoAl7CH&rr`e#cu&a0^eE zHPtwi7Ix@R(mi#>P_8jNz20DzmwGQ>MEV9p9ZEg3zzRbdsD%bMz#a8VjxloDq}IYv z5z`;3QO0JcM!rx(ywYYkmNuuzk*|}psqRbjg-|~3$q{4!4al5aK{X#O`Ti$Ns;?M_ zznz0lHrftp&q=e(SIE8~dO{BQoD!@hrdOk~JID~kFoY~;V*8=sMpa)oSfAWpOY;LY zyqNBnUuN-e{8Kg`)>_JWZ{79c&3v_rF;kj zR&yaIq8z}x36`a=?5)(DtFjq5r%x+r$b$1?9SPslX}-R0fxKJdRa;7za3{_&1)>_( zX_c7P>#!boZEH&JCs4y>3fqN4!a&wl;a0R>a5GZyjRze#PQje;-7(=}z(R4j&U4J2fmBF-yH z{)otg(NyFeYM8d^1WW%@P$5)p?rLLwF>7I7f%{hxudI%7J**0TGuR@Ms)aM41}j25 zt4Lsp0!oGcf9JBut^m|QJ}0Cp)LolX{Lv&Jd1j6Wc0nghg&hgL&eM;V30TdhIkMX_ z-om7Gx4u>we#i8a_GTq%YK2fx{Keg!y%l*0CpmvJ|VVtwpYKsAQyDWkPvsdB$p@h z*_DX8*_WPq76Fch=oHpGv8ml(>X zbbpsD=EZL_CI8mR6qK`z7vxv6_;2-YPq2H@wK1=8E#P-76c&%M2iH^)`G48dgC~&C za|BBF{mEi=KUUQw06yn;S%H?a9iRy77#Xla9hFBeDj&`8-=7qH!@2^X29c9u6zWFv zaul@?BV-X4**iCfT&xn|nMH${2&VD^I3NIM{%W)g1zH~njzeB+px&$r=ZS?5+T$B$5}zFz?{$@4I40 z9Z)iS?*4)Cyc&&0dhrzw3t(R$-;t%r8}lKw!mjELOoTe@DWWZ|;e_C!MPa_5a=IQY zH}r8sKzk<^$Gb?1R9@&wYL1>x5XPmu)DlN5q`ZMB%aVy-@U5J8IM5#I8M(^G+N2SD z1q)O4s6~@Je4NCPY8dE+@isNT?OOhrZAZ8{0oh%#O`I0*nEj%p9UO1@9kiGqq|tvD z4;)*ad}2mg-q0wIqf6}`bnWnf@*nUjUS}nysjtPdu;4{GT;+Z-~$JNT-oGNFo29T_-_MBrwqr)Y;Dbm_b@z>rvl+ zP_Vrf;$nn4P7gDd*!RWQ;=8N?QQ%pS!dVnBo0Hp7{RvcN=XZ-T2H|xJ7BwLl zo=R~ieTTvK0y7b?!URtn;v@LhJ#Ju!!uxd^|1>JkN>I~$T#Jj99D{R-PRA(URE)5) zv!7>mvJJ=l?QJdAu30z=6=J;}G^NU4h}vkCT;zxTeLpm+B-fS!Nr z{{!QH7aSTXN3xAI+AR%n8JFKR9cj^b!g?T;;T69~`4zx^I)!0~cg$l4k6QZQUL4>B z8=6q4S;!S>lUU04;Hd#-o(4+nE^-GQh4oe=WQ^g({D{6aw9lf9OjVlaQuu66EK;0| zo|?od9qf(KLJ(U3FFc^?jJ2EzBS z>c}sC;H9u}Q%R@-kTTs@3v#=lwq?FR30wI8exLk?GF-92zv?OthXF7+U$t*a(zl_R zH03Umq8j&^gLUv1+ogsp0FgV8C(qUYV&%ZrmgqAoHhJ0Tkhsmhxdw+F07A8|ZJQM6UPe`!Dgz*-LZkuZQK8VJ3eG_n2fDcyJ#rTfH}WZ; zh^|X`z((lCMNoiRH^UfFioV>dAd{_ug6U72)tWq>ZODZ031sUynk-18-nGFmsnOCv zs(!8sfwq8k>Y08dw4P_Y=Lkb-iIci<>O4=m8j;|!*#u)wg6r&Ou=q$`d@4s%zl(p7 z2BPF5T3xfkGK?XUWHZdcZ$YVyLjML{6;sj7xWGw+w2Z|Lb>QvAd3!DjnWSbnwiDVX zFJ~^OLx>Ok=k2SKh2jbA_Lor(Ilw~q1jWd-9|Ws_2hDb=z>LwsW+&3GDe=&dwJBVn zEIs6$%SwX8`w<1pT1{4%%q!4d($W8^wjMI-C43cKiJj2W)wLY~#(%Q-sNWxabtI6N zu2~nRMub1Fa-Rtyk(;;2dh~dp4v% z(J3I3a2k;|es zt$VTd6SF8Y-NW?7PlZzzB^+&zT`_X{GONsqCE~9PO;y$@H7VT2Vrk16Crn? zb3~Pz&S<-IaOBfnz>ZO&vX+$c8hlcaB3h}KtZGkBF&5Rj5_*>@)~q3miK|E(YnO>B zVrO_9)H%Wgco--EEr7vc!h7CP7M6PHPfT6uvueaAe8(7SSqz2R7O;D^26FROuBn8p z0qn8ofL$@T7s-QYi~QFmQDY+*G)YM=mxlCRxSX%jAqvuz=<;SW|;jH;Lk{k ziB}_mi$9ardXIFt(AYN*AcQJYW%fs1R!tLL9n@m}FTQ>M0C4nwIDy7xb@^Xn$3;Z| zhN`CKj$f7Mu&QRQI2QY}?xKHengH=zmvD-L%cc!W<`I{?M`ADNAtu7vu=xiU?9He~ zr%~969U1ETW|FqF5Gj(aMju_w;t~RAe)ck+LO44m`GO)QElmqtw^iJ2+1=#9 zX|M(cffVWY*iC^NFAoQ3+Rg?1W1qh%QLHR659)@%K%y7EfDl1Z1i6IIt3_`VygbMrhVST8qy6j%$sH~Rj!2f**Tu+{+4vas>%1+gpYQKHcbg?EtbE*7yWcdPY^IDQ-y-f6 z!ONR;FbJv8PhdfwZ&!DYo6+z@!#z|@Bh_(?>qx0tHm#=o9*0%QTr`ftd`H_+Ov#B) zP<4>D%_A#$+jK2UjYQCIeHTg`zD>`gHA=mhscqpNmI;Gf-SFNA$l1?;QYWY`2?MP^ z!|ryXBlA0$D2_lc%2Vt*=?+j;v6ssP9?b*=zI^56mFDz;AoR{Nie#tHqt~|nyS(JH z4Yk=@M5M4zy&ZIIDCAzJ3`d@}cK;H}dDAz(EIyh6beyZKzT%Wqc{qzd)h7K)Z6Ibu zAnWDY%v5_|e{Q2B58wAh8%A`sV+k?U|5J(5!|s0d?Ji?G*Jh4Zn^tt_Jq$5`tf07xjxii?-5nx~9|h$kGe$0&|6$hr zUdM(Qmx+xLdyZ~bv4n{#fwNbs`g>oOCVd)+h$rv>*9)uyjtE0KR6%L4u`=QVvt`H; z;~d$58p7 zi=|&fE%8?CC_AMWV*}lwF_+h+H^oIx+N%td@R^0>!cXi--Dj&w^qE>i`I!UX5Kv1n z$5^c!jNXc_x5ioYYtS!_KW3M^oDQI*;7=Bkq94*G160~D^tvA`PXjGKQ=(x|4D8OP z2JN9x((%5er_0d0_Yc>X1sx3BdWnby2??wyPw(uP3)7HPX)gsi5=r zBW5`!?()(R#k{*~{utL_)K|-w55jZi9D=eg0M`lWOu3ay_t;4qtbRR$YJNw04Zcp5 z>d*WrDHIMNNd%=4E{|{H+E%TjCckg3=G`12Q859T%+J@pdu=BNfw+USPMpO~ixRZ8 zbVbi0|EMe@fX;Y0>A50&y{!oEfWr(8f^ta0wrK_IiJfp92rG>b#cUiIC72}%Psc&+ zFBN(8S(Q^RN{i%A8~=Gj1FWGvk;A4ugb(-57H)YtFVWAQ35KD)5~`b#)eZ6*Fg*6X z)(BT2UCNCOs*g6giPY>7oU0jh+hhe8910L0k#oro1WIs6!TZe+uD>{Gm;c?7MbUQg z__H;TjhUj5*_foAI;`Zaqo056-h>LXc6C|Vm< zZOH0@<^nC$Ev+5R_k4+C*LI@oPV&zl}v651w&r`CkDftp+oja67d>Fa^G`)1L z6ILrSlGM-kYj)Yrb`bErtqgH-JTXQ;WH>78k^n0N_&P`L?%I8?$lgmf8TBz$;Aj-r zCRi;xS`le{&ay<2E>ran-jU(Nb_?#nI|prbI0mm746^((jRT`f~eBu;Jx!D@yf!ZU(6;|5?IY*+m75aj-!<5BWXmR3fpPy&|hQ}co+s)Nj5B)ZHTe8Ix}Qj2M7B0M9yU; zp&l_E;rVv&4+O^%LO&v(Bd8Z2JuEU7a2d(DE;4IxzUDH(*8At4z)Oda9naoqirlrO z$!a%uVgZ#c zLl1^F$2zVQkmFoRg#uE5&?8%+*Qv-c0AxR0JY#|beFM8&n73*o^~-Te<%>%UW~#dW z)P}~_m|d~<^#G3?)o?9!ULSZ4*8E-jtA$KDB+6Z-cI@gY6Oy?9A>zW0h0h!k*k9)8 z#q!3}-|fo())@a{bI2^^nol|gqy4|UZsVBr0W`|yJ>H16gG6pUU#k?xY8*}yp(+g<8APObrzq%-t>-mWr7&F@{B1|srtqIN z;M96w;<;6aOLo@*D}kRUNC!TQ^BJ}HI88FOCkICvjNkAQ!@a^i+n9;c zU7RHyW&-_fR*3E*eO;Ga4;TM!%gCj`O72h5v@9wWLmsH~hsdN{JlFX1-4L~#)es&K z5!)scC3on=Jl;my>ew-G1Jw`);4%hv#3^ps+xJE*3Hh^0khEkT>xE60+9(f-co5fA zB7%)TXi4!K8i8uYKHz|gtHGhGN+={R(aQp^=Ts`;+6I%@LO|lOID2L0~--#WqyHLGxalRR2P$<-c;QOPBKp z*4%M{70rnvSSUr2$3S*CUD>t#Mqjn#-MAo zSE+d-&K3?c+w`KH{)yWmXo-i~9~zPv#mPH(mw`z^#;d5%b3`b7%hLLcz^PUx3aBi6 z=9Fc<jY?5Q4eK+ zq#i7|;F2W-ax8q(?^{RNdd}ti-4Z<|w@=OQuEqL`dqNc$AFQS!b~^kBv%FK;Ru;+n zB~lG`K{JUTt6ia01+j~S3jbihKpN9*N#;rt_uCjc0cO(HiIbO}p-{c@rJcf@uLoED z?H|g}>v3l7{PMV){_C&a02#s=DuLUkr<55EEm080B4{yzrUVbasz!g4k|Py9CB+ml zNXr{lDRlsJo#vZ_Jo@O?nXm+VXLLC_88(SXYaf7C126$LeRL^>`IO^cR$8bcVN2c@ z;t%mrn|0GH3^3JiMo6>#^d<`KG#2OF-mG=Nc~=X#4!%g*g{$8pwhDt-V_lIJaSI^1 z3`XQPt2EV>9sW^?5pL%8iQeFc=1s~Jr$iJ|3E?HWgQNy&bEm1)yFe25o4Axd)^E2f zmrSt^=I@T1Tm(*)nv$C*OG>8~a|JHOFR9(Sd-5?XLiWuhRc?Obqj81;{`ucHF-ghq zlrm=hK||^%g_FyZkRegJGs^UzNUluw{O)yWI$c%(9hI&A1}Lh`_C(OioV{EKcBEAF z4ltFk9p0$hLj5UE`JhMLVJyU9_J{Qdrz{i4z$=@MAM&{63iTMmj7cWYEyuE5g8vw+ z@(`+#*jyDBLxR<$)~WfPJ>wn^tiQMGwBN|I8_sBx#^CRFPtIg=7+yHt+axI=X*@iP ztk?0=73`5Z+-WHO>15=D5X|JuxnK&HBiWSd;W%8FZF>U22aH4XM7}<8wgwn89(ZcC zUHJWf+Lv^Fs7wD$QHz&;gI-jm%0j!p#ab}(fRz}k#P-A~wCc6n506zEvfM)k<+~66*!vBGx3+x zvFnH{a3@K@dW<%^A!^ivSke}|V0z7RLhaQEkPfdWw2HswAhL8oc$7v7Y{cN(OY*xY z%4YcKQ>7y)t}LlQNWn;A#1q$wB|uf3o81%GS=k~v{uM-DdcZ)} z#^MI0y17fXkrlPwUBH?I=Y0Dd=|~wFHzc&gQ&Bla_j@7o)yMNyx8(OMc-Kxv*1?ln z6vR`_|2_iy;rSPT4!W8d&>Kj=AadaljB1fu%rC2oU)jxvx;*x6YM8#C5=VVtUdTM6 zbUcwqP}rQLLMgo{6DzGAG-OjMKn}z6&MKcU6hYz;rdV>6t6~q{N!c(a{ZiZ{akR(o z{$^B3N}n4NXqhiThEvE)2-qdl_&BHHqU>WDgrRQ2HbsX>Q`=r07i#zkKJ1n@@G%Ol ziO8!dYs%hQVDVJ5uyYVxynFSPV!>hSZAzBr#4Rg;vnQ$XKUn{y|IIj6gcAYC%$@f< zc|5o771QBv*!I!PSFRq9an|%nlF*0Ih>$Mn>F9+3m$4S*@IqJ@?ipl685e~P=g@R6 z$cZ=ol9^PXs>*{%vK2JT7i&#Y6vjvU4mp?ua6zCB)+_g^StP=v1ee(naNFw4hjak9pSgm z--hvdZSX>7!vFQNeq`i`H+UAC=PuFtiH5nEEQ42+vK^85|HAwxf+aGy`UEQnM zJ!oBeCIY*s+%`U{S!32eTvhe$ZBMg5waHcP04LdOAREPXO0sa3T9RhEbr%+!B<4!N zMD#KT-RYgR+Tv1NzCRGvq2C2qg^jhP^cuaKVM!#6)@NJ~Msxp~rtJ?iz)vEF6D1Xx z?J^i#H9Ah2S9r-CHN5(T;~ZQbL90D{`PF0 zA&suZK~Cgl$yMqzt5sOZNbS!j*LC=>LE$<330mQ?a3yTVH>A^Sl3Y3g2 z<13P|nnet>EAgOjqOnf^9(y{UlYd>)se@M(OT&mUP6Ae1NbsecxX9SQ%d;LR9^b6rG5w1ceI z?i>u_8%H=<(Ua6`dlTq{IcMH_vj&$kr=2&=G@6%k6SU_e`=;16mpjAVqN&Q7^u~`z zCEjELEPpxqr3ZCtCl&^q+#{=W{N*~4Py!(*p$ma$B^t@Rk|`gTS5axHwr9zA%A2g1 z#NQ%;>2ibu7utTJ6}zsV=w)3?t?uK=tHBMoWQp?w?&?E@oFM}|+4u{k1O~ihQo4db z9s+JlBLJfYaQvPn4~N22N8;g$(@R4ce#XQFkp9lL5%K(_-^5)|@JW*_EaJ5$bbXK5 z1V`goFnnW;#<_X0m7jW>|Ez#G_yA^AMO2O4$#8O3H1u~zlB0udd}GE<`Hz4&*!R!Y zD@MuNK&Xetl&;uk_IG|*1F*)ne{EnDE>98MVvrbom|ait9)KvKB+C! z!U_XcS6cQ1&tNr0ZfR^^0LP#)Su8U176X7bU0zu31w}By8N@X?fv|=Mqz4*(*S_*o z#c@s6$|$j$wfR9$?s5Ou{XZ(J3y5Y%k1kFh5}Z}~CE?PSa}oYZ1FBu%CCKC{>An=D z8H@NVUWM(!#X5I(iaH#RUOoQf5e!UC8G|(?`j7{pIiN{r+cZwraBWnj7;5?QXM)#rXfEeXpTcD|jtl3R9 z08vweX$irpVWD{eD{Otpbh9>`3DTGN!&;jVETkn30S#^J1?hBX`lExUX_wCZ6^Pi@ zE64n@T6c4^=WvJ~>Kugydh_d|^xd02ZtM9qROGMExM~ohtE!S7!F!ta@{2M7RU&Tc z*!F-N^(p(n7Fv)DIv!zNA(Y0oosr(RLZ77_j1pDSS4Mjq2R*e@x5s$CGB3*UuUf*9 zgc51KKm=&;KR#g5#6cM_eV|BG3r)R}rSw28Z^|B6=wk50SFMuH6*gL@b$Hi7*v@Ka zAyk;F`@lr3V-UxP&bsBlt&U2)wc`}?Rg?T2EJtbp<=0RPULRv4*{a0O@*({}1F-v2 zm9&~~1NAJzDCz~p(L76c-;)TylY*lb2x6^@5I~3f=4L=*6S4@ahXI3)y+DHI+Z~Kl zmUWay?MpGtrEUCL^E*msODvEb8)1)JdNqPMi>?vJkd`K{N}{c1DkKSZ zVFAL&{ooOfver%jaBME^oBX>?G>8s)klz%-`FDo0(Ws_5Z`5!23kk_ZMggg~;mpeUlGLgla& zO<_WexG6{VJJ+<0o~1Hg589y+kIU4;@>H*V)d8YxH|B(!lmn0-A*{l?tt=a8mkjk= zFQO~r6_6;V86_>_`+~;kKe(Ut4r%9!fwT9a|9w6>% z!(;ZInYWcn`{-K4<9ZLJ7Db*_ zYK2UFLMN(#7}ic=e>C`V5W^eFCdjy?;2Hs*%E-`~3j{kRN^;rBAfnP*DIvad<9y^`}6| zZ(9lR|3Q`WXiR4KU9O7(hdhLb!w43(_;F0Oh~LkwCHSGN>j`>k%t=K2&ROj8yA!S) zCo7|mYa``rzH754e{bw%UfGwF+!=n6Vwx^of%Sv=U2#8f13s&Uii*}Fbh~B^lc}ux-pGKLR-TGeSv_zs> zxaVI;z#XD<;Kew=Kz1M0hiV@?r@S`2Hjs|zdNxW=m>)ULsadw%4d$r13N5zC z8`c;Z6ySbdqfv>d+Ol@qOV+<@{dP7;BJ*^3KXLuqv*H$aXFve?+6r7>Q{_9TlnGa~ zB6~Iba3W4_yo7c8c=*)cAM75bc+0pOst%y?ykz+bFj+Df$NctVeIY$@qYLFAf}FviWP;lGrs6Di1+A z>*n5^u+u^6W@1`33#9+P$NZvOGEX{rQz^No;MyZyxbrw*moZ?*0=Xd1F8*yQ!$W&y z0&xil9y)Wr=zsEDceXjT+#A90g$7}1>QKWEJ4iFgfetjkYJCZMgkGD*%$FXOe@TWjm6$9w&Y6~Z!@-kzL^DHI3+50y>dly z0pJD(K!ly&$`ap%w~3%GD3=(0tsh;OGrVp)2!nAtqCe{Jx6RaK(=94;lJZ~kPJ8Md zKRU~>KSM%AoEYdz3l_VXkKomnmvRnSfVnM`_${F(=o{vRJo^ruQ;tS*wc!i&$HKe zJI@heGmtx#U-T@-*XJ7cMJv0&b5x&^72&~Zl?$dDLIUT2tEOV* z_8ojP9km0zd(cX!qPEB#`>@@MPmx$gWi^zZZlew;fQX zRs`uqI~fX_fkR_daMj9(RV-s!~(ZNKVwWo{P<)dHFw0DMaJXpb1BCsDx6&!P?BrIvbEnuH-*POi(XO zjesyFrGe4JELPajd22fYX!mc>|I!26=k}_z{>)ez;zvk}Pel*u4VzCdNzey@|3>KZ z%YvSk>Hkdj%Q27$hcXUO)jbt>&m5_>1K>|YM~3k*e1m}HD+HWTUFVx;1`iL2EX_5C zk+#n?6#_20x#xPXAGBgr+%V5&ryyt!+;oGPmPc)TC5=YhnV%>}yYW*o7vV&Z?2>r( z)15ADw(bw-ptCqS5YYh=!_#ta!mWwZbkE3$aN~b8`yOruei;9OscgC47?YvK(2b>( zd>u;Sdj4BY1qb2j4X!xO=c}qy^ROrYK+g1z0tKP#$NNzd#Y_723O7>}B3b6DMkNL% z>1?{Kd`>WesJD*(7|g^EGre9ELQiieN!2!h2okhD!T>O?sP686S4PC!rpb}0*p8JT zQ~S1C6|E{}{TXeKSc}UHnO8khfe`w*VJ5pI(l|LyMYf(_sIgs9K`EKH7tb@Q>*u|I zY$6Dr9iIe%b33Yh+Ep@TVL%fJ$B*kepc$LxtOSG6<>wNSIL?lgzCgMnv~F7w#s5u` zVS`~EK3y=ZY*FJ{Bn$IYrp5kHzeGhT%zD33T6UUu$CZC65c2Ho+*;ecTxrDZf-k+PuztvpwRONcyosQtCIR6L$nrchC>>im!Tg@U1 zv_QZ~zvs@z*ys&~LsLZ&nOu_J5%0>p;G5LIeY0s6r{wG=YlS|e0ei9G-0byYLQ6;c zAJXzB0i7Oh_3YJ9e5H&;g*sZ0(eUBXlG-oobZ?}YOT&yiXm{{+zKFXW^{ftFuv<7h z+pV?d`ubBs@L=bL`;f(K5oRv_sEl<+85TICGdoT~3jYFS0*An(l5xrU8COTh#xLG4 zZp!5VcG*HHC59LCqhvFZsT+2@Gw7y1D#30iKK*->eIo_PK1_HgnZc^cmLJJ_pn`i!n5t}V zn4o-TY=8@jvrMAZ{4`r3prWaf9ApV&aR9F^z&JgYBE*Xm-Qo@eSWAVn(}0;P+;my= zkW{0kFb0DTN6Omr;yP}F=97yT^3{?Htwji$YR>VjEbI|aWx#!9vHB2b;EO@@N$w12 zmdVR*B|Xk=j%q9(v`H_KIPkxsKlQb&JwNmc(F6%d(t-5bGdC>xr4{)H5=ojunhZqZ zK>c5RMFZJT+5i{>1d!ZFvDduOiGoy{~Xd zN`XH`9EIG>ziGMHCG)-D1x=_Yl9#EqpzIuI5x#hY8N=jS8@2t_QwI zKb+QMpJcP4)mblMO5+fOx4Yi|t)z|mRvDj2*sG~FIL1u8mUm&>|E3Aq=7hXimxb}& z6&H3n(Io@jcL4xS_FMiEku!mZel-B^STXU~>~X$D$~|94YXPbP8Pm?)NZrbyA4jrv zLl?J8h(L!;ecT4(i*oIX#e$u~f)sethXwqe0(M*J!NJB;bD`@=qdK_WW?HgS%`_;j zi9#b%mOdVF8_O^0A&>l^-Iivft2czD{6lGhhy&LufKOSdIMyd#m|KbFOAi>-wmulD z(3&C152JF{6LyyV-xd zzoGFkJ&osMP4oLG>TZ;i%ze@wXzX9zbc4Z_8oQ68_5iJ`j#3OcLen$TI^jG8Pn(G! zYf{}qR4S;7Scd*BiBSU{@Rf&2PwGH*W(*OoTM@uz693$+!E7d@H>+LnMM|s^W8PF*h z&ZbOAr^bJB5Y5+yD==T;Ir&E^Q1pjUovBQ}vU4HB>V%6u_g4~dNLUv^w1B^vf#az- zIr;029#w4%*(c;QsJF>^8$48YS3|45J9z;uK+SDb_*Dd3aw@?d0vok#U0Q{5m{%LJ z&3ZOOzXe!__2?)67pZjzm7c+QW;_Qgvx>kqI*+85%k-pMZ@k~McNZm}OC<2^6vD?W zbClu+mpt;kgYj8KJ(oELe}e!Jb0(__-F!%eR{StIXMnO!@x2s!VFB8iu_B(|xyyo+ z=uNvoz;H#MyVV(X)MyzJCpOSf&^#DVgQc~?IU&?uBPcvaP`l?}6-Cz5RYphZ6j+nT zN`B5)QT734`mKmjnR);da=LjAYE7e$c7<7s-YO6c^(pXsOSS;qpNvm$@ zEh6J`tOn%*$&f4bam6TEF+&6}ie-US!UH+WUd6Am`V4srJ_fU3G#; zwe#_b2*a%!#Q4)5`SSN6pmEQJ3UfC6J?s`&;Hx*D&Btb-ifqIP0B#4c8iED7=?h zZpzVsaVjlG(08*gI=_z^V~IVzlguqN{pz&zHI$1lQosW|1BC%8f&b8A5z;|@DSdO! z8g;*E>#kWn7%*3uv3p}}qHz?V0 zaQ|*^xT(1i#->ZorDad1SUmxD>5%LEDM#u<*1)^uwNV+X!Y$O)+afMN+K40){Y+eZ z?CsA4<8CTTyPNgu3XmiV=z>wbY&52!PCjm4Sxy4%zJzALNhksdYc(+nlS=XqAO9D7 zy?$&?rNs)(q489ABXI3rR}mG;RVp96M=4l$Sg5;WImV^q82c=61Nbv3u`9(BEe~`*+EEDXj$ZtwpWP;w}qAvGCU;H@s<9M z0X=seG8CEqQHk}U?Twy^{6=@nAjBTJO&6|SB+c3y^GVt?>HKRb3V66ZG$mPYi5l5g zOywl3Q(QsV#d>0>(h8+zcJ+*KlE#-XoRM0IWWnxiqg9cCQ+8C-{miWqq9CtWGAIm6 zgIKuZ^7~IQ?5SVs77l^lf&h{RBCp#?T(GcuMWuM!BOx^Uv39aJX4r9p{kZk3!bFta z@7~sQxFzMh3`VtjV!IcQnswl?fo5##m%|fmXm})9h+p^E4eFXqi1Siu4BsCF5 zD8NnAo6B#RQ1FOh{GzQf^x-28yt6Wv)LIyXI|(2Hf^rT_!lQnHA8Ig0>Sls+coCfc zog}z;ef2Fs{y2%U-J680;(^pMw1St) zIZ$D5)t_7GYY*x=1xB?GFwo|u-P*fh5o}&(149->_7dy_T@_dYY4C<61(MvG3~U_S zOFjpT(-!#%m#|wQ#fS`36GeEWv=9+%ti*}I#yzs{L0gjx4D+YXy0cs-Bf|GWqf|!V zu8s_n6_Xx`+tY_JU`(}}5z~4#P&z)HS_ELs>6MR7yD^bqidl8LD347X$(rQ#nd=@u zhoYbxmD{wGE$RQwYnPokqxbU2|w6lV#l@J(*cP2=*3U z!xF;@6zSl?ia#qt2U1SyyyUrDO^!lHP<&q`Mhi*Vk9iBz4l^Y`nlV_EHG*z@Y8Gk08;i$N$Bj%29;5zu5x+c+H*-IVPo?TyC5d{@}JyQA5Hc>Xtm9eULc?F#j#c@F;YS z5`lEg;-mWH%x5=5eLkG~?sgtR6eS>qA8H1kk3+UCExVJAy7)tLB@bne!aU93<3rD( zn)0=0EvJQTK1 z>K9z;l<1P5KG~?#lF<%D=?+1g(twl^>?uyV9`DeAuyD}3?F8^Ryh+HT!aDe6W85<;V1hcB|bzHd4-Pwn_|EA&jGt|)? zJQs=r8x8)+&oUq^sgNw4D(?fmj2>_Q6A@BKh4ub`$K0x5g$ zgTJ)~K-T{oW8q=EwJV~11Y8*02{gQdM4}$sMizYS{ZXB6w)BQIOH|qyFZxu@r_lKE z)1W8o*BJ5}4Od>Zu9m4wDkO`){m2;ymxCyx!HQc?dO69ShhxPb_@GrxS&l)yztAl3 zRp-q4ZPyXBdMnaJMp%8X@NiYy6!*rRPYc)x(Dkf6Wi*IT0h~2xI?qx`>z;mat$mdE~z#>fkzng)jVn$myuYz0_S8yJeOEM*i;!50UNh&H4o6+2X zC`UhM5oin#x|iFetxW{1k)T_8t;~uV73a+FavG((J5HKt&-Pia(#9fv*Rg!9?R))1>J zinWtmsw+C~cGXM2kckLeN3S|;h3ut2WMQxWs*^AmK>7WYjkA2S_OfvN|Y*@$%bM2%&`+tb{-vLBPG zr4ged-w&vQ)DEBBiRDo1!ky;x!9)+CJzz$%3(5KG^!GW$%STyeY9F-=8eM8&RYrr! zkGEx<;A>QyzB%z+WBRrLASBjn$H$M|&)AuwQ)l8j{#G<^IJ(78Uju zQ)t6jmga;-_33iRWuaz4NPwAxo|k%luIU?rNp@t?$f=(*hB8bN75fA z6|T=gC3?9i+PnedqkT-uhpPcjC2jN_Uol=qHsn?(PSwg7OjYO;uV zK^%~w z8iMV$xB4v}_CxWnYA6=HETa|ekk&pzMX4i$qBUCos&s8TZv=|WF38i~_7t472&rn* z*S1#$(Z9*R##~TaJ09oP9<07*S0HQvMJ2;cd|k)=A`&OyyMb*DNk|q#-Pdtdqs4tv zHR#S4BVb{qO0%JYKi$$0-gmP>BEd~isgxX&vQb@MN~qw=6)Z^hbpdqb2sP&(-g=j# zk=q@(hoQM)X8YR0a?|kD*!;Tjul~bl#+^B_kCNjyU5JRc2$_y--(ED?uXf_IZ#Cw; zru@)RDO&Tq!KE<)Iy>R2QxX5I3zwc1!YNg1M+TcEzq;#9fYb5ldzjPbh=Z z%IVXTy(kASIdMu-S$i#KFl>q7$|zi%PRG!GIZ4;w0PH?i9z=`epYD={^vSm`6JKUf zIYnu-oSDAU2h{dwljUmJAY`!SHf)5s5HH-4DYBJ+h7!nSf)1NM^mPT)SpSj8OHwDG3-M0-+43j~$Kv|+zwbJX3_^TOZIg5K7X0N} zky43V(PE#%#`_J+059aAVjFWuasB0S|+bF(<9O?pc)w60-A`aVYRtXYlEu7^NLdl z3w8r~7)u1B;JizWBxYTTqXiqj5;5rUdd4?Y60Q4+WQTrsvc&&T6kE^~ByoHqfu)0< z9~Z7V@m%V0E3=Us+M0_69n8p)`5xot5t7kmuh{BL(B7<1;4k~!9#7Qyd z$ZAVZ1rO>c4pdf7YtRe42F6|`6to9VNiT%Vp#aMZu(d)?K)z+-XaeFBb*szd3aept}f0;QjztXM5P{ku0nWho`9h^{g>*^Nt%wx7IW*6%S%^f}n{ zX#alMB?4d6Orb*md?dXp-F15no<_c0Goa;KWPy0UpTcCZsaWJ3ppJB-sMwMoZIFdL zFwv3{X=eidY_fcLXXvYLtp222&i~v%x1klsCc~hLx51&8!BAHh=~ll7AnsOnIpmDVGxnBUp;6^6R^-m?SI1f;jl22HbV25f%0xQMjd^ zAm75$_LR@?i;sf7!A~jET5nI7Bp9nu!z$lVFilS_V4EO+Iua^d%@visDGCRlFUjjg z&sL^f@Zmi1MGC`1MFA+r}wnP<^UN7yf6xpxfxE*Zbl6*w;MJ7tcf zo>mnsn?28Rot!=ze?{u_Xn%-NfXA8ISG{8Vyl0GrndbUoNZ5+dqx3yHm7^vq(sUq^ ztlYX6583oEA3TTS`BR`E4y>5RD3hv*4Ya?+dJuM)``5teV+f&Q2p15K9EdEIa2WaP z>&3q0;6<{>K#F!ZEroag@%H!+&(Ef{SddV0bW1c&FD)lCy%e~+G@g- zvGEJH%Fu-rq_kClK)QfBkdcrOQ>`op#D*_n#JFr4LDsgyV3#k}_si30;dU8IP#XwO z>SsJhLHiJX5{zR2sg;$5Y`;UO8IC&pRTvf3s_4%Z(i6Ig7(~i?Bm@#EYs|(avzJv~ z>euQ;nRzEP^A8EKgCK?9!G@_GPpUXTG0jE}d{z_xlOdbA5n)NWx}Cu^;=AQ;Xg=x> zz(5q0TTR@x@D3kt2)x1|i+UZzvOV@0CdwE>Uo*eot zl4Zg&FY(6c)!JjZ9xLY`ByY5+2WRuPly|nJg!V5oZ1C{@s@fd??*7o>=CJAkJxb;K zgbHtQ>WSZ4Ko#}lX<@FSA$SnJ#q7PB16>o9=S{I654NW!>_s2~*zrmHbv%LOn1(%? zJUNOKLNp5gp|3&73APv49pYIC%9?=%Ci4Tn5vm7{3j}#XXKCGkD$*h;OKmIzx;Y2z zx~q3(q|1_sQ4U)+GZ9CF4}-wrPf6Vkmz(1FTy#+Amg!Q=V5~-I(VNoGQf!bNJ81sT zA7;3*7)mBGwPC4u>op4m`7h*0(Vf8R$OGp8WD#$hiJi*q;miyDFqWNhrgEARBJZI_ zKgs*jFY_+Q)l+y=G@~jg-Fw`#k#A60FK?84Zh=aLfE_*<0gZE#PGx#MTp~<^IPM#h zu3;zB8WUB~{};@x80J(^c)F4@O&eOgh@#vFK9av0&!(7&au8h8(74wy{yCA;w`1 zk0#Z!q~`&|o9Cuo7nwX-lPHHlfRDoNt=^D2HmvlJ%m2HkzigHZV%lYPB8x%%cgFdj z;4eAKYVt^kKKlT2q4m70lsV^vp&e`$Q3VZe!2&3t&2GgaggY|I7^uIV%4I=+1%XFk z79Bu+j7hXgY?@v1V6v##1r{@Ud%ro=+#v%-M3l)d$EzJYt9zPZ>y1UU_JovPtUpDi zeBJtu8aqlmV_~_)HE5cr;Tx<=UJ{bz(7?k$$-J(tOx!eoDK(!~)-_MAtkru*jbvTL zP5&u^pF1I#v>Z4i>c5>n%us6R+_Sj_<}1qG;I#?%>%3KZWs#$b`6y8SbG-oKI0^2- zE$PTRDhNg*xp@Dd_o`40Igih#Zry%2@(A&|3Z3Y&CyR7Ko6;;*Ax7yW4DB1}nL?mA zv8aHPFafH8mW)(j4CSkehf{_#ax@+yL%HlLs<5Y{v1~u#d{+bl(^je=LXS}U?WE;E0oDCXI(vzQzcZx6N6$}(3l)P z;Ss*H1jUjfEuMt68xhmE(Jc7UUdfg*=bXS1>YDHB$_)H=+G;ah()Bdr0p_3>uOj~U z#j*OIa(-Uh*D_ZalDyuD!{3>RBva`9edlxo@YmjU8a?#3p~^$*-R5}2M8iDXT|zT+Q17|#zg|TnxY1SVKtCsf5ZMZwUdr8ZvYIx4WxmWH#yS{9sjYi1? z{;{9trzx*?@uVZn7#jz2*B!A^kej3W(xddddWL68fn&Ja{lluQ8Dh2WP61asE?s%6 zoDQT?ZBnPNfBTO*%hO9boe3P$sTxyxO-_tBp5^m&a*=;sgEF5Re8L}!$`l+kTazx} zkikkxDE66EipmiITr-|4=?K{sIlM!i^;7@if!b59j20R$b1M9QC@>w2#;Cc!#eu!-<`q5NR65Vs<0PRfzE6PnubkBu^5>vWpMcjJ70RLmh z?*#IyF3J9sE{BVs30jQ`RSdXm#{HY(&|~M)I%}cixVy&8fH}#-MIqh4`=K4AQ1_|L z>x%y=uAj%Y(`2UIc$#7rGSP=V>oDxj=TdHkY$IhiWc6WZlgkC6rIW+;h-u_X^zJN+EQ2b}z z`*S=n0l}In#K^IY%2qAn-7sZWFhEsk%MAC zH4noK-j-`o5lP-Q9pI;!e^x$5Y?_AQIN&5zh;*{$3zInYe|1R3IINUw!xh9}ke)4( zu9_tQ{r*#&yhaYIO|VEvhSv~tx7TBBh7hLc5dmqG%J*36Z3jdSYadvDm5S9xX=ujW zeNX*IS2^|SiT6mmt|fmhArH!O8)F$bwU3p8_5xGQ=rWXbA1OT4`I?jN6zG-v?BRPE z0&-tMslf1#FCTTCD(}fX%qRAJdb$vPnqUba^1BJ#cEW8JNr2O;yGZCq(kO!4XJCeX zv-Tha4}9mR&C-0|8awE3M~Ll8*L-8zQ`^O7;dzoUV3JYx*bo#x)gOTt0e$F0=ODy& zu}XJMG*c~*0U81<4yS!zU%td~`x$7J1-Ba42f{$7>OoN?z{8-K{^V5;C*oZHl3_%g zs`UlRBKuAr&#V*M;37LysjZlBhKhm&z2|`~Y5+k%zP}^?Xit;=PVr0dsB=7)?9Ibh zs^x2pb^_Rr)__KfR3*X87(^r+bc@yNqQd}xHBJ@A>FAgzle<>ec%!_ zrbqwwGwC4!BX@Ae@KYW6acQoy7Uiveq;z8;e#|1EyX~!-rwd zvAZN2<^M~TpW3lJn%Nn1!gDoYc^8abdPKIhB?f7A5rb0p)@`_3)E(Rw>0TCGR?^5l zGbYS%F&$19;{3DooRZFihTF5q#!N!;`LE#dNgi0H8wHP^iBVy}OKL)Sbs=#uxAnAg z6ntr}>57Ouft2JJH=sqoPRq-eV9XE~pj+cZlmwmzZUCWKpROb&%a+0J>b2W6AEz}7 z&`}ToPFg4$EutDYF&a)a#j``{W_L~-XUL51+y@qj9hB}-eEb};EyJ_6)1Hl~29F*b z8~!QFyI~EjY^k@bz);#2aqN#X_f?C(M{H}mV#kX~o2aW=6(YBbjYF2fRY2? zkOTAytL^Kd*Vv0zHj-kJjiP6cNOyji9brF9f}N0e3e|z$(_2-!8hx&jg=XtBoNOJ! zg1M~2oewu3fW&QOaNA!QId?>gbk@6b+})uw@zsB3;S*anoVbOj|EKUbd^(mS`O-AFQnJ`pkZu4_qM#AV6(JF0NC#-de}B6`i-lb zb(2WhpV(Y19n}T={`iNtZ+%TuZqlWZO2>pYtj$&J)o7PQgwv%k zHvxBNMpxr+Vxkx>=K%>dlrpu<7$U-Rx7&+uSBf2f(%!2hUz%9i>Ls7p9)g zEVw{h3$`7|riho+_KyQPg*!Z+g;N?L*ZJYD6EK0z5VifJnCpK0@}`P@bf23@G!atv zurTvas05~(VK*gdkZ24){{a!E5R27kPXKp7%nG~hZ$aE@+nUD!{yfMj%i#`gF$&t6 z8D+sH49pLBpxC_!CM^A~l}pt+qG(O)%hz(jtf8}jyxvsSanEx8{t9Ho7}IdR)L0+r zS>*55m||CCpd>i)%%wd@kYi0(#zl*+0Ck!lQ$i&-ZS{z(5>KzO1z>u~0a(q3y~UhS z-v!08#;f8Phyf`Fu``N~FX2fCnqw`RRinC-Hj|6jLQ`~-rEk*^Kmo2|lM<@}kTs@8qZ*|-;}Rs^XRB`H7< zM|#05K;I}mqJK#!L9FuJj*2n4$JvJ@T?`feqK1(GH94QEs?+Xy1jG#O+3}?6RGSw< zxmzLY8Z+I{f>xIW9SON02&wMmuWVged}nXcUjsL(pLrztAsfaES|uu^JvS3~@B&D}mK1RpIFF(;eO zD5*fZ%{uGNfN9&)R|=;0g-+#^F5)p33OxU{`&28)O$FwR@!J6s!Z%O=y?KuwZ<#U|KjBDHS#c>} zfcDKt-J^r?$+7e77r0;bZdA1MtBp9Zw?I9J1kxfjR8~-=m_Yiq_1b}Sfq)R1MqDNd zAYQ`MS2Q5o{^XimO@5$_f$F!9q&#lGt^c$SVz7rt60B!|O9=Ea`!eByDp`6a))JgG zRy!S6&$P3jFiVCG0ysyqEoL`7x24AbJPD4Ud8n6viueGJdu`{^o_`sA-vIyR9Oe1Cf|d+`>1x=@%m`%|x|n>DY( zbdJ!3<>ESV4e)YkgXs}ufhxu)#T$>Fb!eU#^|tF)0{H*gnP*pt%OT#nwW%nyLzb*1 z!&eot#hm^4De*<>n`(tRs4gD+m-(&Z7?CGgS(x=x%7TOPlzeE8pplE)Nmh@9^L>$x zj^G3h%hB{3+$ZQ-_ z%2P(4W%Yvq=J7{h>_FpOrA@g+@LN9*6wE^|RkBZnRy0WRp2soj>($x+rL>0M{9E{r zM!TizxXS^vefrrSU+d1xQ~uZ7iApzD!^E4U7?l^&k9j?Z*}Sff;{?)!0>jPT*egzS zE6O|av}NVrh4al<2rA&{M@h>?iQ%R$R-c^E1DXrg^x>T_-IE+JEuO2~a4R1vSQQhU zM#1+b-+#UamJDh;`hMG$MFMj{RCXdNxQj(~N>!oGWhWG9!SR!X64|DP;6uJ93BxHE zeQ;)K85dwObS;uxjdT6eGzL~|9vDz>J>g{t#cp^Aq2TKhg;m~=$3 zUiqxB5H?^gps0$;nI5i=us@|n@wKAZbfFzRn4hO{jzchh zE~0ix7(MRqaH`+`6JO)#;v3N}pWuGEsKBI0SH&#pXl0SD1n( z#lGvvl^xm+&CBXtbf!z{6zL&i^l=X|=t1FrB-K@6!YVp8d(z4#F;X@2F z82mw%WgFIRABci>9pck4EXV(+gssCX`GklG$6r+pLG0wLTFZM~dj5IRBh(SfXlIIl zD~$LO>Y&p^+)~R`_qqZV1g9l=-aTcf3ESKx0#Ts6?Nfm8uV@b4Z!aS#*JMqdfw)0c0UbH z&bDH)D3_7j{nWJcpn`N0-5X57+-9?Md)yaqIPsp}7AlT=C+y1peM$cB@cj?@#1+1d*xZ{!3L+v?l5JIRQ# zPl2MMVhs$|9$3@|2JqjA&^n1c=&Cf*4#EX3Q0VPNN!2XyT6~88+f_Ca=m-0+RY60n zohP-leZwrMaFJ1)K(za*_J@#hxu*UeoP_LGXHuEzFp&!!;UCZG5EDkoj6BqaB!l|@a09d$Fjco#y=DH{*Dl|L z^`7WzoZ_#&i19d}Faf3Rsb(r^yrkX?8PgO6-K%lC;k_TzErgGpcX8sW`_JE|Vp zB~Jp4G@6gk60jkgK0jO%wHAtT{{aU33gyQx%|M&0>jic$tJh5tCs>_5L#Se=nAE?D zfRn_RXv9ihY&@cbiB97CQ*=T#<%>D@KFq$WvZ4_6A5|gPXjQUMW)Sb>U??&4JKhM! zPt#Frt^j#{3dN@Vz6Gb*ffN`yJU;@EC1m*&wd;d4fKsd>&w855_vchwdPK&*UgP9H zse1G@2e&GE=5(z7>a-X>R?s_xJBnG-M3jQ$1AM0$?xQc3G>Bn|5 zXVL67w?~P|o+X2Tc4Rgd*VGZl!%8HMu>xjEVX#ktpbv-VrVI}{XSKQ&wOzmc(XFn+ z5eb%2=Sy^i(u7w!RU{ZeZPi$;BoOoSsJ*g`TzmqV$Jm?3K}JK^Ra0HzzxrW4MC3Ka zsa3>Wuv9J0KxAphd<~o9^DJiL0unYc|4M%6ZF`YzKcIx5N1|K&9;zAex7vp2VhSd_ zoOUL&>G4c0@;g!N-uL-i5_M?blSknK2asOQf-Zp;p!g^k(`W0|aUMVjA`-%@ysHSH zMRj51SvZOdZVI%A%U+DjYk$Ags!)kC)cN$aEsF@9O@m4aja* zOEqCwI>pLDJT(-)>Ahy4`-v%E7Ay&S#m`ZtuDDeL+70GDsBGcQ#@(*Gc@W$K_w3uT zL84ut#v|w{9p$92QE+H5sWGIq!c23QAwUmh*q4$;%8abQay&_!PWIUhpIqm;6{;$( zplPlz_P;B!5Cc$eKM@YR6>n_$Tt{etYYS4$ZJ#Kyftpox zSlWN#sG%nFup>n+mf+LN!Wwn_-;v^4p)-cUA#1UEuE8nv5t3}M{bE+VJP8IH*c6)$AXgC>q1 zPDcmI`IcqdmHSBy?)Mr&)tgPw7b#@ucz9tRdY*6ACu921w4NdoGZSjP6u}sE!j%>~ zyrWOUmjH6(s9srH%A9ptLnYygFwyy{R$AUDnOX(rD1kCNN)By)X%f}l;n2#x{Q26_o>lIoC|YU7+=qgu%s(LCDSk!?Wy@?>^^*+ zs70L#1YeUBR_Fm=%B2BtAMt_DNgr!~ZG^(m6^wZP0DR9GX&bliFDXsRCvtYn)HvLD z{>cDdOsYuS6Fkfb9#eEIZZT+M3pS2k`BAxK{(aJ*2g1z$+0eKk(kaE{80M06lO4nR zCgFQ;6_7E~Y&w&gN9)@*`mM0X>IdU=d2oWazb)m@apNevYh;j_is6D}rln94{Hksu zcJLAj*C=*+^xb%EZTBq!E4TyqycZAlX&GQK4yQ?VBt5qk9p0_|F-6&%7H78XEmcM* zvk?3we*U#Z`uWUzUxcrv$w=s7K4;1;Lsx*BcHeN@iO5CR0+?4xFc+Up)E@qF7qQgJamR{6E!{$eOBE+z{`AjvXLH80r)0hiLq+ zboi(O)HUAO`&53)k}NomVm<8PvbzPVg+HzWic2cXm&S1;UW4XMa(aGV4@*dADk`pN zG$9&8)f@I?WvM*wQVzQE0{_xlfP9eWEA$_h_`gMc~WRnkU<4mRxz7(b9O zxWv4dw_J<474emmGbbmhl( zQ^SO~_Tu5-&B8|S{Q^VB!^zK;Y_KFHrv2;7!XkRy|1PdRVMoy}mP67dO5L7FC55Gx zf-VI6OCLb9BW{vLGnGpgN@!C6JLdD^CksTeC~o+(bIL?%(A3F&J~=4sa3))$A+HPR zsN~gKYtSKxY7WBeVtc6khThTTh0DaRf)|$e0{Qi9q5oNm7!yW9qPaclpZT!rr~j=L zwUx#Nx2W~PFj(dlteq(+KK_m>XN`FlbykY)`5R7t9}S7*_qqF|SvJ{Ic?df|YfKh^ zmeR%<{kBCVYdTJ%Cz*>2qM#y%t;JX%|BABaX=W{Fo)V;}%B+)%5QI`;iUssX>+pR6 zP4bj4VzJLXsNk#MW|mAVb8oI#h;nyv2xi?Ya3^&~dwLn3^3W(V8mD{*Tvb(s15iLQ z#J!QXQ-BF_OU}*<5l&C{aogvp>53D;H>fBF9W94=GZ?-NeU%wvWI2V%wLn`zw~Ew% z*Fb4`Yk_gm=N>D)lcTOzec1qugLQc~7hF&68y`J$N$A0sG5fsB`Y87mN4`x{gCoHM z_Uy*P3CR4F|5!}_1)ElJ|BS8f7V^nAokI>w@X58Z@ z?!U+^_o9{n05Ke}%>Z$t&1`t_$R+`ls;v{JJnkI_1gaW0XM zqWo|24FB_zk4Zjd%Aw0|4Hrr0Bm!Icv0iTENb9w8c#00eHSe{DuMSIlfh+x&Ub05k z9d&iN7Or+i$y`{SdqJOm5=)4uMl&fS>xgfUb>8&hu*kv6CDgv+#e%YORWaVwDfamWP zNd^FSzYe~g10p8c6(AOl=J_dafp%x+`+niy39cmz(X49YRy8roNQ|$0Y?18tiosf! zoCnVKGb;Xb3Gr(1=t{)0pL82 z7NK(1Y}3Rjy|{YmkH$$oWJHy{{eUC16R%TMI`S&RZ09wM&9n!RHuv{*tWsjMEF4aV zKuR1#9IC_Ll@e%rzd*-)`c4$Hh_or_{2nHXVQRg8flS@Y%o~eDM@rXcEw63d#_=!}q zWW~s2c%H&az>W8$n)i_RLC+Kv%1dgeBeCS=nF$${vv04; zRt%B5K5*kTHxF_P#;E~-#CYB(H~Gidy3047=3mtf>3?jPt63&P%kDEFBmJqDDh^Xm zA3!i>A+EGG=KA-yxvkOTjpWpIc3036O_RqxOANSXel0U;4@_U4`6P=o@NX&l-cS~o z3q@+o(JuPRHifhf=P;#K99?ON4@dkVuNpdyHwZ41N<@V0 z65$O#%cEMSzZn*m!jbd&5j(`%Y8CdK0dy1-;eOX0UHS-Aj>0s%0ByCCG#%-`E>0XG zgXDx%e>7|cGoeSoK@X-vvxOe4TWjDjwTBTFc^NQ%;Nm;a%ze6Q>i6o*R}?^t^SP{e zS(7B!e3G%If;WP5GqYtRGNVDHoYEFDZ|E?vcAEgb!rFXM-EKKQNe6X`Pd@rN?KWIX zvuV|*H||O(q&L|)Bo;5^-nIb(eMWAYIqNZximgmvP~Qm>&}4Y?I6N{l=Pv+0K}@>0XmO4 z#pdzRm}Np&YXRdDqhJUlw`jhdBDMX+ka?t_!E>uq8Z!A0hjQEw#`qok0xLB-soYQ7 z!*l~2reFS*z`yJ@b=3m#$crE)^*NHkzC`U`MbwJzsPi!^te881Y4M!9aA9c#kAe zX!VvAt;?$hvU9Zbj#CJG-sR32J&4I$##ebBC-0PnYiPH<4RbIy-K#UT;7LQr7$7sT zv2=$m2LwPR=ABWy6LG;@DgLNF6I zs>ViYq(66vO@J+{Oh-IF9W||9i|l+ErG$pnJ9FY#G&lGXiz!wE0gAF)OHqd&u{dlx zCRv6P=m5UcpkU5=1*3BpNW?6e0dD^Hf5yccMu=ipZURxuKD}V1T8C>Xl{Vn<)NB7J zpXUUh*{}k6VsiauxWfd3O7AEyb;+nz9 z6Bi`bm}mDefnIwtW+Uza1~fBE4&jNCD8UB<^yWIhBwbFrwWDL?al_lpvXP0W%DP3w z^*OzwnD>Mvyoof*8~hTwo61}Oy99DZ$I~Jiz%(vuIObo9d!K5u>m*PD$FCoLW2K;G zMUyDL6C_pyz1*I7o->z+AQHJ)E@G=NFlXV^(w3%35O=$8PF63AodQTS%OwfB#6H#k zrnp6}n-y-VxQNc#glFZ0AXvmd(Wx+togrDnVNut&bGh|E1V_+KzzuK+W+2F80VoF? z5tU2qGjf8!f!n8Ap#(qcQuz*N()*o4cjcOTbu%gkQNd>bZAc)nP}Q_vY=fnpS|?-A zBKYeJexVfFg&NJokZ^>SHbh|P&AeCHaT$#S?8zJFHa zmAWZ?&boTW(tb+)B4|V;!?2?Z-Vjb;@?JlmW}u@jHb2cBQh0@Ww>O?wrOa1q zee`vzmoqMiAI4q)9~@X6e*j_ibao$TEy_p2<_MN zxIO@|`n92Zw40yG7+?3^l`D=dLK{plO#!Z>iEx?qO8oKM25LRi&*)`3MELM`0}i3eK(%Oq`NJ9>s5sL%1bytJW#z z&}uk~L9!2zQrzrH4an9=9)FJP;l4?|k-G%U39hYvpv`Rn00uj2n12cSx12fpr-!)w zA$+}B*dU?V5dbhSWMf5C0000000BXosTfnw|8CBLPVzngM| ztz*3Nr1Lg%LA*R9gAdVM7Xkto!5{JZ*3c6F2YF~W3TG-wQ;a zunI(XhFwAT77zJ=Zq1}sFCVN#$#t?kvT*&qG9EV&g`d(xT1wm3V#?=+eF~wwan*C_ zp@X5|pq%w=jU6A>qWiS4{|%@O+Z{n)xG#YROhxAc{9jMzQSQ%5`Ho*;Zh^Sxy@87% zqCbe%Ja{`+egit#%v-_MNrY#+S&21Hp5&HC75vfG{ad25qXv6!*zx=V2sWk`&C;uG(WJ<*w6FeTUW zZZ*WML=$XVdT9-6z-GHwIOS{#J6X&ri9h&G=VosKi4NKTEsu@}PBtBmQK+M@=k9eT zvY_Al-j3x5anN8o+NJJs7rB=ibXN5y;~evfvi2L>7Y6I=DvoAh|Npb#p0RGLarHFV zFi5v}07dU_1OpJ1u|922bS3BcOwpxD>ug9g=8C`x21un5Vhwzm^Ip(G!LQq=DNp#O z?(0obOrQg#-{=BD+URQ((jqYsiMbWNKEWD*Y0W_=pw#e33R2)R_lIwE%KLXbmlf4ktU{_w?}!oDpZNrMuga|lO~t3$B7RIGnq zIwN^N7%wFw*bA7f2Qq4KoH0n2H8Zo^pSJ=Wd7!bn40%#BMOeHxj?!3hu?!(4x&8|32 zxXQw4%t~Fu;sV}#xA5Hw+}a6LmDXv^t_Y=v{f3qpBGK54*Vu#=3n+Q#0Z0jaD}zWQ z7jD@#0RVfBxe+J-&W-QUW#4XApNBV^d71g629%;=0ozC^sAG6{{!a^S^3SfanU+@^ z7uDrSgAWG*6pyVENQ~P{Yd|h^3JJJa zYQ)#a5ZSG6SA?}G1M<2lGio_GCtAam4Vb3&-#b`Tu061*evir5sz#Mynt*F3R+W|k zE0l$}5gzoP#EZmj)PFmN8%C@~kNCwgYS7w+qAR8zREaUnc@(gNcqgcQ!%=Gx?~0Ao zEJ`MDgtR2RQt-2MMl}>lJ)?uxArAb5?22ouIC9XtcyH-)^dfOUsd6ENgQQFd&IO{@ zYQ1p~I#ll(c?~FfMb#&sIl_2W4=jU}Ol`MyK06j2@gor&^Q*<#p|K$5xWsjy$ATeP z;YAdEN(VA?Zof5HgnI}Y0&1Spi!!;2aN&Uq04Wic35Ia+lo7hF?R@}9BZJ+&c!gWG zWK2)QlQDN8Au1Wjal76YZg&(qke(9yOR~aw6WM(?n`a{eTeU7<^47awM^wX;@IIW~ zmV_b15M#4&4&G+3@{6o?bHgfA&4&roYZIv4wN_&Jy=na~Y{2Kt^ZWq~!^NG;+0Px= z5~um;tIK|(&TB(mo-AwiLG^AJNUE$2(=?Y1jeFjvX~5{e(jwx5w?38Hc0DfWUGC2h z6l6!{?Lt=|Y3B7wgqKQS^6b=J58o<9r0Y)A3#G+w%l0K0?2AYesb|e3V2U$OGsvDv zj6PdzkuyVEDq$5sP-`Vry5Sf1luIHQgcM;l_)c?Lt@VWc9zWHaBm=xK&C>n2S;iVM zK%wR(;_~)1c=rT;Z*G)gQi8*x+OJz_hM@!6Fu>EUP4#{wr-*QInxG;&$j~@N$sKz( zgV0J6OGUxfQ?mj5#jyN)W4%(((~CvQJcl^RA8!)J16qt6t4j#{om|ru(CO|?G3(>L zsLiVNhkJ(JSk;2UfftJoLsdsearxX`HrXL`ayJX=4une$$EGg*9X}I>z65V=MDj%z zMCHUvUXK`LB^j!&Wn3~#Zj$PK~Dxb}Q+o`*}N_$y>l0X{|j)}&O zMebS;%?6{AombLGl2S&}nxmSyt>}ou!-NDC2nW4tTr-fnnu+e)+FliubKmMg69_qO zIKj2<qrOjKbf6EH>Gm z;3dxvy7J^LW1*>$M9vF@3-IfVK@}P*o}-klgo}B2{!wQU?EbIBN()50Dp=0P6v#}U zgDddaraS;}+}M5C!BC&(Weha1sh6~X4_a1(sYT;5FTJ?M{M8fWBDuQxJfNCFl2xMq zD@J;d+`l?x8I6cl>OhT~29dr1`MB2!at=lB$I^YqL<6{_mlU>=PK%5RT`zC9>&t## z=o!p=`bmhHY0PeY#{fK#PZ%rt7L!1vL+2c11VakYHre?b6@!-UaK@|9=Q?($t5UJd zUEJHt-IKy04#J5Dn&eJDABjW#odg9s2zJu^>^~t2mx}r0_#|hl(x>L~YtBXxvIb{c zym4a#deKPcL8*>xlj&|*ehdpppL&>dpZA;G>)7aHFpKEo?>znq(D`-+*_`(=;@vc$ z7VGE*Q(MhxaIHRhoeL7R!=3FdAib2l&TUMdFSEo>Gj^h`QjMp}|BneF~^3x&v07e!cJ0HIwn} z<~@m|uZ?LGw3j;v4|QAJfUO#$|A90e%kyV=inLWENc$F(Om;(wv(1ZymESj)v2 zrRQzVXxxizk!E+1Q7dHP8g;u;Zt2ugVavywZ&iFw-+ zm?7#5kh+o5OIIbOMItFo$mzhI%Zjt_gKae3bzq6<JbFlD#*kesm1N3{;3 zaP7#}jrv2{j`8HX8mH*eZ7bNq;@z5HmzlgR3(pRpI`GEt_bzdTW>4TLkzfP8cOy<; zukN{oW%pV#M{+rIVN3S(Z0^xIUaz}R``tFQv2>Qtmqay+3~WTaiP`~e-dTC zdvtEJNjHD@`=@4Ae0rIpTfF!x%N#ZCP;nO4>=6pbwLEN#VX1KeJDK_6mv-r1XorbNS_DFv(dO0+lE2ePik z$d8+$oBc`y9%Iw{vN*qe54|RoAXIu2@-l(9mRtW?pCZ;j6qSrZile^gE+QsA#Kix^ zc&or@9FOAVx@!5*uT_?R%888W>KYk#eqfbL+}3t}84li;^4ihvje8D^k^1>;8tAmq z_4skB%i9(9c&RqB^kNSE;*L2AJVf4tJ4i-O&-pc&xwc6$Z|#n4Mfm zCO*dpTU8X<)GnG>|K?48W`(utznE=gxpLE^W<3=bfwAkAqoK}&dJl(C+S;u8UBnlK37o#;p!JdTZD4f!ak(U z(ScD-0MFCMg7!^oxi@u?))q#F*Q?Lp1Bj23X(>mav2QNriMcRK@q}4wlH;KZ*%!>? z>dSiv2iDl^csDBrkUZ>a*$Mf!rV+EYtK%u5(SkKDQ71%(3X5 zY@r#q%L!7XU z9E^8RVhoQ&G;eU0slP%$FfTylg+*;mzsx7_nZ)j#M8WsaJV23F=?Yi0nj-F*9qVE4 zPtpO$^SlK`rr~TlZlbRz>-L((V$wi4g3ywjIzjB`ta6e(29h@kE%Y(Ovb*iVE=>kg zoiD}ZOm?Py$y-;N?0ssc(`ltYBVV(HK1;i#D&DnYe$-Cpvez;o| z%`N(~T2YrdH|;IM_Fkrp9Ba5ji3d0}!waPM4>iCr(byXxJ+{>PUx3o3Am~%XM^Za- zn0n_Ha@1)CM$2_!mu%^pFVBUZl06_T`;#h{B_WrJvQg;r(8YMpq`!L$;gnODI7An}+>f<%cAaA<+=*fmOVbnh=4s;C3tyhj9<^_s7s2{1 zy!siOEc%rYgE-F~dQ)p~3d-=eV^VteI~DMXho@66+L7E2+D8F;zP=$US2So?MBdB~ zezX3_GzsZ8t%bs8d_>a>n>L1wTTuuaPY~z_Nzq(Sy`JR&lXDA55T*YfjjM29;;*9Or%y$ME5Cvd6A~X9Uy622kgJq{FGM5heP! zZ2Y9cCUXthi!F~hoPGVTDkxk#7;Ip z3mOBRF-1XB_>7*5ix3=b#p~apj1eV4s^hx9Mnc@98{Kknu2GA&7&PmA{6O;4?SbVU z0(wC9L)T5g1Yr2w9{i)@hhfTh#|X|)6nZ^5dxCd^7;2rnaYCjRdV|rlniiR9%ZDF* z@&-%@C77km2r|0j@7@pT}?84hq%|HH_xj&?ZbW3B>c7!`#C4uYBKMX7 zjS6tA2YU6@6VV!#B&|QwczT|E$clB#Qj1aNR{)E~4IfyupnWXPGFN1C;=EhuTJ#31 zBE%U~_PUjq92h;ZVTS;a~^5pxR*d!k}l;*9D^I zz3IZ;U|x1qo_YD(=Rivtf|MtLHFWw@by%HY=OWbeH}+KKV6fmL)M3u!uN}y-VfTlF zL`ssyW9jOyv~2jmzYds&q*(r}WWp{XqYHbY(*R{S^H*haZW~A65Aq1jYW0lCNgw5G;{M<4-@@$+PbEr-| z+syBdgn8cbY%;QX0aYj#Nh+Ygm3m^g(tLOi6g*>5jY@sYM00BlT3J^#XA5wItJ)VaT{A}r5LEV8$D_|Q5zKQa7>Hc32_cT7`ZfB}PF z=0n3zhNgOrzz8PgOX%8aO)}45%?=l;uZM%RWGqQDk1j=q232l*!Tvj*L7^Pcwc#>0 zvHx>pHg7I;lI0ctzHMb%VWOA4g*>e>E#fZW-_x5|(GIe*(cn3%?$L5ez7G??Gju`_(aM8@20LVzqt zcN2sJlmT+r3)8jeAB|r6_uoh5RwbVwAyxnAG;fLwYwI;I3?91uD&cvUhC57#NGBZv zExqvvYS7h1IHTm{Sw&sPhGW5(4G@sd>F@nhJh$%&$UF_7N| z^OU12%O{gJJns3m6(Df2XjU)9(9JFeL5O!Q^Pxzah9^aYu4BuBzme~jz!e1tB;aEc z-r5SscK;DyhMQm_Fbey*16)3BP`8@=ysY{!PS#0u%(Bt|)-1A7@yTHvp}rNKuUJ+0 zyR2uIW%2p4Sdi4i{eU}2M&&MBheAfIjc_#rIA3X3jR)<`tx(Bc*HZ1U_v{&H^u6(^ zdzw1<;u500nV_gY=Sf7L00KmGc~yZi&Zp(n&FFHQU$yAV<6WA($jtg^Km319sG`rD zgW9BkD%st{U}(LGMcWbkV)L@oyqn+n&{)6DX~bm6bTQ}xKOsW`bGqKw?bt#3deu7z ze*aB((GIObw33ktCtQqFg>qEjLoI@AAFx>%9HaSErw!L_30$)F=riMO+!qEa;8F=3 zezaDJcqgr@r5hx%MoVarsE-EP3q&UD9Io5iYBDUdwVSAiJ1?J!C^^hGMsa@3bau>m zk$1|Ys};u%Ahqfk;;AJ~(=*$()Q8o6vMgR%T#PMpOG6$gR8mE4Ojt@H+m%SKv%xYW}@~WZLfSI)7Of^6R*`7(d7itSab4173vzdXQ5) zw5AbJRQ`H~u{=s^ugdzY>d-HA8q07_ogDV3}IyVoq|mu z_j+dgV{v+slbg@Z$@i9(1W0%bA)r>by9+fB!UZ)85MYWj`^c+ySE@M8F@%TavXji_ z=Sv68ZEZ+EJgfA0UcqOof#$8^*_f#rG})Zd_armn)d;b+ns!_cn@4P%V0R3Px!KzQ zMnJj0ZNi^~!@5)ka!A(ChbxQZy}XQ&Kb1C2N*;IBRKH4L`ja=f;i4$sK12NZ9E;&) zfAXJ!Fh60lrEaGVLnIn?EGq3DL{pR|OK806fA^B%^u_gy%*5-)td?#1x~B{+ey_COGngc{;2FDDY&GP>p*OcnL*rLNZa(dn zHO7gH(Bl5>nK46c?tl9+hJuuqu25RqHI)}JN1V~oWOu_JWBBCD1g3>cgC)sdz?lKI zab@uN;0MACsy2Y!@aNlJ{TNyEN6Q%NtE3!Y&mR&sinHgrVE0kOyCP5q+P;!PPx*PP z)Tq~QMrkmAbdJM;)*NK4_9f{aj7c8JJ)g!58w#Sp-Vg0lC03dJ4<`1$Mhc{*na-sG z1+4S1L)TbAv81*2iUhTrb)tu})p_eBEnI&Qt)IK{bGlgqC?9ur^a>ZR*evf;LM}cjGu5fR0@0<`Y)T#+LEi5Uj3pVL; zwH)OV>2a+0X7S{crs#&1{jU-ei+0xIOY%1W=`V~rBRzDGi0;3X+UUJN8nuWH^h3BP zyk7th=)B`m{AA&K#P!j0CP0{RHtHh#bwg%+`@7u#BB`)nv&7a>3^<|+3` znjtd=wQ|CZU4P=_c~zO52bLRuA6PRk;SdYDRvYX{ov%Up?BNhTYM{FC@tnT_5F;cO z_yhr*Mi?WVNc-Rs56(QSAM!0ZB;ZKC5VrpB)ag;B2FMRRo|siGb_n6>y(V|WSL%%scZcaOh;1e*lLn1~YC6wWt5 zVt3@td1?3gI+DLX#%0CB-)2quTHuSs{&f4p^jG5PmM)p;SrrB?wkVTq7J2vfqfigS zyW0Y%wBiFNol86(;u2u1XJW97Q>3C(sVf}`!{C;2Zk|j(ZmO5ukyc67Zn^7I>d80! z3v)A1hEc!m6M1_gdwk?;NEdqTlb}jh0#_Y#h9fU74g9VEn8ZxVipW$tV($r$i-aJ> zprh&Idg2$g60kX(er28f(0iAi3&stE7{wJ|baVv^N=@GE4&lKncU^VSw}E8EBGAOO1t;1A zNXA%lH&SV(fKbg)+-3sf7%2)c17Z4bUWDV%Ek$V+!V#rrffYIezTH9gaNQ0)Y@j1^ zwok%8SGjk8v))(fT&P0(Ni5oV>kuLDSOPA%k|lL~(X08TAUFeb7Yzi+I>C7P70vWm z5=H~MYdllDYO)t0t|d`)k2PBN6l>)9Fwt`zh!^m3SJve?|1_Un@rF^W>lMsPH{Iv*>Hx7VvNA7`(@pI@`Fx>o}IZ}sE zthkIHsTGWIZeSf)2*`U9pEiY!urh?weNNz7PaH#GtKY+L;wOmjS2xd`M5#*^;V0+{ z7G`1P3%|k%5oc_wrGv>v&lw1`+L=FG-s2#>1R|(j9$Bw8ixfKY;R7p|_4Evx29EqK==NOu>f+-^AkKZlvdxdYXBw=} za;1>yu)jLF6w~{OrjUb-RWQ(P8X(0W&NdIXfMFkp`c=ti7{r}oEP<>}5tZ9>9|Z0~ zPkr0Q5i-L<$@Z(yU`-y;h!^4_Yy&&d#n^^EJ62SkpbDBeRbV)h^;rk84$1X@zvwQ0 zAU<&I9OK)F@v#evbW~E(JcWSPBsqoFwVgH>+~Q_OCGQ~@FPQ#*UV1qL1)C0^!Dm>R zGO4uC{}m9-&NoO56~H3Cv4X&hg8t<u!Mm2SDA z22S({jgiL`M-99RUJeo8N%T;*u738xwR+cHql0(lw87Z*pRFUZ&>UEpxL&pYKqRF* zm`}GBnOZF{et&V?AdkrlmMy6SzTbUB6l3;Z3@Qr3I0q zWH-U^FtuD*fKK>Jekfa`bZ{)~!7_F<*5v~P~K znhpeDe@c0gT~l?7Q_FP=dXqF#z^Z$4&SY$B>gw=7!15S5Ai~dxsb{H`wy8V=Kl`Hh z)i7z>t#q;1^wKLsL>cMIWFXVJ#Z!DU5GLeC6BgSgK-PK)N0;fvj!;^Y^cUG)_gTh2 zPGgq&R<)Qeb4#!>H3gfgV0a?W?(QDY5Dy&^pvaC$OapicgR^ zu0yF**a1-hH!_gzyF_Vm4{A9`>n%T0%C)sy07iTfeqFz$VuVgGeBqWPWxBMvn6bey zOb75_X^ujIrMslNRrB=R%M0O8UGva9f`Mw_!}o!T;@dOw_aX;fd^kca5*{UaV{^y{WC0RVvb{{Nxu&O|1o>t8Aw} zKeGGzZ9d7LP9F?vW^IM`ieb9S6_OeymSwY5>~zWkm);@=_IWRaSQ_S5v8l@CVeAc| z$&b&Fl@rkKV)XK0t!d;jEtkqEmnJPx-$ImAAOIu1{T?Y|DPd7wdK8)yKAvz@Vk40# z$9=ulF9UR42Og~C$DC0ZW1$f(lME4=bSU?VLVK=0%CLQddGkbE9)B(P-S^ z6a)-@4hRi@t3Uh&#d&tehYfg;jE*s~6p7$t8yew;W!-QP28+t0ZUvoNClR`>^f$j= z4Mt4m66#jJ=a>-iu8%l%Pp!5J>we!tI?5He&-iCl59^>XAbO3(gRY!tH*cr`zdPt< zE$<>u=5Ydqy@Hivhq1|qtK3h-!9~At+oMSzd7k}b>v4y0 z28|^UT}?qqW4u7TLM;%pkkss%J@WyiX&OREgj^0DaY!~+^_q3Q^`k^>jK9w>%0S|1 zjn0#+V80w`G(n~l);x#uS-Dza|J?4bk!d{~YFKF&tKTg_hDWUB+QFwTXG+)3Isw7e zBb~sI%~orLt)?gtSR3gH>r&XACn@Mhx?9?%&`3hj)3pL0|JRqyd4@(EED(AGSEydz z<@N7(f-PNUV4Jvb1QDd?mPBqf_i?h0L^OKO8^bX8I<0b+IXAti&+l{J6sC|RQ&66V z@c^cEA!?^j|0%WmmfOi;NS@gZZ06EsOr@Qi>!knXGMLu^`9MV{({dqkeq^9hESTJ2 z@TP#EJ$<@Ex>bLpmD;1F9C?J-dj|71Rl0rfg$Vg6J-mU$sLBUqvod1Gmi&>XlAEzh zI@7G)aT5<1tVe$&{*7Zgw4R1wr*)I8i~hu2^&aF-MxV+w61AsMXOpdlG7^_x1g4tU z?z8~+gcc7$$p=~+6ETt;e)Q#9+f{P~x`uMDaLLz4J)uIg8cq|9>kJRyiCQfi{{*w~ z>nocHWu1B(1IxP0zHOny?>Rb zI-2aFG@pW`j4yb~c1beYMVBAbJLCNY5FztcSCKrZm(4XM2 zby11?fbr-+XN2wnkrlz{%WR%P$gkP3^Jv(Swn%Uno8OO3LW#xe|28xtmh-1g2sX%p zz0!pAuF83IvckX9WBUs!o+JG;{w=|=K(NW)*E#|EmJp4j%i6{fl3XAbl3MN?1L?OvO@F5N$O4H;yL$%V&T2*5 z-L^F!EE5@S((nFvwtWl&+jcn4SVR{-OFswvi~R{B7Kn6_iFt3-jA^m8oXN_gL5kQY=uF zPf4qpgjzUEkJmMX{+81~{`dcwkJhFwZN@vyr~`N{LJ=f+a^vXnn(6lJ!Xr_R2Vzl8rCxVur9oxzo@8?!p8Q9Ef`!PyOT1+lQTYBEs;5KB>BE~Lpip_2? zz^2oM9m`AN>3GA@*5+GJ^`UGUx7N5rHTWuBvWn(^Ks`r&*7MU->zt(>GT!~!wl{wi zkv~$eRAT32$s5oVH&C@^DxvuCFdhOARHTrU&I;r>a^yQZ;+7?rz_JjvKBf8uW_kV- zleO5^=+MO>!n8=y7C44{jLgGU&cF6tRZmlUDoXzV~YfTaqv^( zo!~wziovx$(6^@{JVTJ&UY?POi57p7B!W2axaVUJ=$rYp|J{-6m3?!AfzL;K+I&-B>_3{?wgoR%QHX8PJ z>Y!ae0mP5=H&PL(v8SJ3Q!+fFpKzxOL@74Vz#REX9K@o(qFE`BI}uxdVQn<|n~|w+ z`=!Qi1xkkI7Q7jBB+fIF!JG>6vHT`g%Ou9Y)^{>)$~|Q7-3ZcA7b}jo7bLSnpdN z%E$sIH*fnKg>!tSPgX_?J>EnfE73mG6;X~+mgZr`yLw^;S5AK4U-{6e7%3i|DY%p5 zQOd|(AD3m`VZz68{xw?Y(0)FSxCdUOvwh1_o&Fvsa8#%@IIuQ^x442zwt>sClnr{% zkJr?owou*jY-X#nv4^va{9L<0v2^j&GVhps^XOJ`dI(75ppzXJy!jJWCFCd3nSsIWVcQ8@pk$6fiuNKFl4o<#W^h z#Lmq+Th`C_gBHhnuRl~QEGBfsHifA$m6(aZrI{xPN6nRPU(VPbG_Qc?Vb_2<~HQ~PHHL-JZZ+O zGMGkLL#;b~jtFjYcA6i)OV3C3J5Lw$vtAi-6j>z>1|@7tuF?#D0e@tTK(SlN{*y2Q zd=1%8(cvj$m?(jmfs*<~MexNCuk^i~ka0N=MFkqOIoYb}XI(z-D{V6CI9M)fBnH?z zQ>;fgy_=Naq<{liff9#`Zq`&`N}#Woy;K`Sa z>>7K3A5O=4Rq)aWA??CWgM!X`s>@o}K9Lwv%8U_@V{ZjLr{Xsscw>TxqKwg1cGx|o zs1P}dTVWGJU(J=VBThgC$Fxn93IQx^HPi?oO+Nxi;H5)SR|lt)>yRm3zD(e)bG~NC z*fbpp#%B7OE|Vbj5 zCSi59t;V1JF&{F2ywczr;M)e{X$>c3M9Isub0ZlU zV$rbH+%GzpKKDsvHBw)FRa#8j{}e=*e>hI77`@0wp~KuN&Amd~dkaS4xeD7Cq{it~ znYM3kYRWyOB?UfI&f4>C2jUVJaETlg3wbFB0ExhLnsWx>+%<0V=O;pv8na&nGwoMv3mCvHYu6Zi!c!dWauJDT9Kv$kjUx=vzuLI z>Rw+tB%c+$V_hA?G%M+wsdqCtp>}hh7$(G#n63sEZ!yMKW!XV-RixS1>O`0~4s6Rq zX$b!cOQyQZ_}U|~QhsuLc7s`(QZUew8Rb;mv?Sd5GxaZ zlv+b?Wb*Z34a}o8wyHzkQ)~}%vncM^3@qp)Z5}|ZR$!?M*n6O)unWJ8P!YDV55^*S zzOu(o2UbR97RN_huxv(FXwNft90pJ=I-GOBaN(HZV|OPRtvkXH{;XOU*|@%!J=CmZ zEHbE7t`j0Qt(Ck=F(MXIqr6EBovjhJ=jcx~~rSNcuE$o8uEnukYs z`bJ|@;{%$iAgH5=H|Zn-n4OdD$#WFYT0>hzkvZ~4$W9ZS00jF)1v z#1_mcwhG9ki89ir4lq{U4xLS7;Wf6*ZFsJiM z00f8#QC-%Qu%g}^C)vbd#Hy>|7l1{+3MBOa7SHSfdE~cM?`@XZt4lZD#%YjX{ntSW^Yk*?x2Z)N??gY1H_HaN(pRGz*f^ z$(GK)5r2BdS8Rx0GCxMFY#Q2>_A4oB6D|(WzW16UQf=sLCG=x1CjH|-S%IN3LA>3G z?Grn-hm}m{E29z-I5QY03WQmJ+oFA#fyCMh-qo;s?ookh;^~eNm?AM*TbhI%EpirM z%8WWX^PbupRta>h_dK@-SalX!lxvnV)$j+yjBvB@(LCUo?z2l-u zEvtNPxuPc|s2giG`o%cp8=}v&EnbDmV+JE7TQ^xQ`9y1K$!XMep~ilM(A%+-c-{|W zljwmCxcN;B6h{V_G6mM}GLjufaHZb9cqN8}+Y?p~7>47`0&00eJ{3!_nu4XmaLFs8 zXeyDkC~&_cl%aWm8L&6i68#yB%=Ga?=SRqWFlxYG-rIzP?%bf_4YT=R;%+`j>|x)=?GM+ zW~9P%FDHzU6|xM!m*^|KkX$QkaI{bif80k^BVUPDhaLLq3==uT8n3^lBP{<###U>f zDBOZ_s_49vzYyli(w>H~2MV3mC8R23(jH$Vz`)dZs7@MdU&gNE$QN`tkoi8zoTxd7 z1nGj!Epi&mgO*5wL5vgZ3#_csIVesLc%Ymlx|l{d+OTKhD9dW ztPAN49IjAgmFfQy^1Hb_)Tix(t8P}5DQ5lLqQxaC7m$!QANIGcyf_>(Pzn? zw5sLJ!4KAeOMS5%^bUp_`1;v8xd1I*rj_&f*V+5>Ievfjd%Ar{@_#DsBDh&l(!g#~ zGIH4#m9ENCr^gQcfcr+>8cmw0hMRnB|}9;DmVp{jS~X-X234Jnp(P^e(kr6jHH^mddM z0%M*OWx$AZ$G|3v>iYIFFK@`Ht&@I*D?VCa2^oNNg&y$(+OeT%CB!0$$xUfG_x`&W zfam>PN>8Yjaz$y0*Bn79URC-T^UNj6=BcePB7m7^@UB@yj`@YjT`JoSI6o}U@20D?V4XQHJUSVOBVuWiF@&-@gkfl9hOMgai2{# zja*%w&|TD*uG`tHJ%VqWucAD<$IZ5~@v^6KhR#z?FS%jT-!^Ob5%`>Pks=6OnI?X# zzvumHZJ!OmB>{hq$|kIbnWMzMkyiMgftJGq-UfX`=c_xUz_C7qd8hzrOj;p_?|zk+ z?HQ7&$=0bn!I4T1D*{O9-e#1cY!y>^($O1+`B{4>FCHTpV2H5I*ni5dpr z?8#~!vAA>V3hflk7!UN2Y61mQ-S@?i{q*Pd)2bR0pw1wY9_Q`n)E(u-zhI08^Z&Zo zkzAh=5c_Gu7^7YzZhn?JLZAb~jb-7fF^bX_xcZVD)LB{XpH|~!UYnJ0!fLEC+`>XS z*PwpxKOWW`rgHAY8Z-Fy$h;xwBx|7+6grpCO%|Hr-lK`N^*v>#l6-o-0>Uhs))c)1 zsi!5NB}!^Uw8OrhDR8Dt{Q&P?vAKGgpH(`DaU(ByOigSgi$Dhk`zP1`EAEy&4~~jz z>)FpW#<~Puax+-IPmPUKi@J%bjZ=_rr}Hc2;#%`mjFEjzD#uZ0egD_#k)2xh7MH4? zW+@qEZeQ|L3#WEARg9~6%!w#cf+p^{3}6pkUS9rFhCKo39l2vjWyEc|wm+#QxtNk( zD}!?e3Pk8U$smyJw`E=1(sXUZ=|DQ9i?S7&%>XP8W{)T$kO>azOvQl9t8KPAZe#~^ zSbpx&iJ}+MfXw;HFV!xXUH<-n0=ad=cbZpH9EiAU8G{2n$WSm8=JPoch?QoxFHjqo zok;I%L?@_OYlePSfv>J?<0v!()V$94kek*BCFE=F4gQMZjIN}aEwVfbP)ma#Mtzm; zhkv5Up<3y*h8f@ij)gp9@fddy{D)$Wu}Jz)@$tj>m0=EFE7G*WQJI@$%uOAk_f2pxknvdLLp~@!|kxG_+_$vZ0kAC3j!9E4oSCbO$za8 zOJpUkW0!u%?5s$8tsFd7Zi>CX{mO7KSCCESe0gb7K>%V~Xe@y!VkL zGC46kvVYTGGFANEmE*nF3Wsa|5q>bMmMks&$;wdPsvW8HjNBXlt5Pe{$p$DhgClTM zrb@Geqc(->j#`iPvFiieAHHbbO*NLx?#HM}?6_hV-gCDULze;VQC(UKKRCvnKUUuD z+e5lQ$Hs&lo+OIq?<(g560;yNzgZ7m4AtP(xIL-OMwYNW+ zw^OA;i(|{y|F!+$x2%A)P_Pxg=8)w zNYPDf=(e1lSBG4;Iy#7;S3fsQ;{rX1_#~(~iXlW0dgx7Zqrru%QmVp>D8kH66yrqf zDH)X5Xqey#eel$pXy3VZ8!vH<=h6g{M2e5ogNm}69#{BvB*E0)B2$sRpvW7@e7wyW z@{-}c6JbyG&3U67@L4qh%-R9;8p1G=^$?*OrD(OED**gGU{s07TYvas3#Y0{oK{sn z0$kY@N4oo$6=`9~M7M2Ywu%9=!kzUi2M*Z_Jd>bdq}1ZX2J%xK&`qyRk^%TQErxbC zoP3FZg=EpKcuQObf^WGBb9)>b_tO!B)7H+{15v6?fhpO33Dc8tJ5$_Y_+l;YF`7uc z+6Sr~nv|FaU+1O$?>V&6!TkXi&~B4o*H5td$q#u$DBmzA4H<_GTyyaSxv1(_iFG59 z4n{qO4%48+F!x>~~VUyd(f zGA=#7g5!A;N9XZeV?-33N6;2WN^Wcvm6iI3X(Aq-sdKk{sog=4;GKuHoyk}Ogs>U) zSWv&r)c%k^VFG}@@`)zpbL&dj6A}649fv54VH=3L8t8gbO*B_b!z(=ojK{Z7_oOsW z=dg&q7}fXkb8byRIF1~a9kC9sypp5s%UJ6gaZnZl(2@q6+*601gkMm>sl7c=MPJ-O z+4s9gepp8a_O4M2TxdRltf5!tDmE!M8xPO(=n;Pti1bvs zzM1^9ntu@Dzy6D*uYjGK#2F(MQEbhc?A{SSfp{vJ#J?F-0R9@Qw{Xc9#P%>DR*qqH zN3tPc-YY0zHG-d+GlS-`eKMv80v|HegLlaE6St#)XUeJ?KY-FH#=h?|$mE8_f^o$d zVS8DT%~h1tJWS(f^z zh^vCw=1V~+Su`~}%~~>WH|g-fEsSGwXj-j? zwh_q(I&K)|xVd#1+Jio#$1YDG+CuglkKa=boUXDs2F%v6Vcsb`Ud0rI#*dFGhUA-K zDuu63p`${+Rt}kYm7mLxiiKiDbBQf<;4Hl3sVUYn%cT3TkIL+N2t}y;6})w=+mwSf z@f!K*OZa{gbR#mSMPzb}?z3NuXKK|rYozG^;1um#(UyM?!?Q$t@0WYLdj9mQE zB3YKE9DQ%+$e`P)4hp9ud8PjsC0OqK+?%pE(ev7Za$RCDghAkl`Xa5OHzk$QOo3nTiO7vxTI>gHD z^ark?>pY*2lAhD8LEBy7puBpI9ceE zSg(c~SwzaTCVBh%u5fOgq|;Okj%ZRY#ZFMBlv3$+zVDq&6lnlrHE5hU{9OHi4fb7i zuAIfqF4 z7+bQhy_c#!2hQoHL-{ag|6`C{pQM8FT0@By-IIX<9Y$?}a)~_{5d003G z==1g!zMrNyr98}(#H5t(aX&=m6i7iFSsUn@*0d&hO5dn1B4a4QfCoAef0KmIeAWYM zLd$b!OwA*0DL+?y&?e>Q^QNB#sd`Q&YEfh1LZMcnaGulX-bh9zB+T$C*rE3)sWEgx&WV>1Z)@NRkRHfP-fC-c?z8|t^cl_CF&ZJs z)vX%Y^XZQg(eFwjzI!o-gj=Akk`9iYE7i8=z)cin9N6 z>$a!XxE1*fJz|V~I2=!>OxqSiSQE71C}4LLDC@(E+H<Zcg>o7s>n@G0K_)LG4k<~RykhAB?fI>8v3rq z=8s>)NJTAU!$>i+Yo@dZVK(&0ROv$xN>@Q*P>AmKc#;x(Fb)XB^zhL~JPm~|zXjuH zEYjO1RG~ryFMtuzo!0VlC|zr^gI+WV>4Ao)J|Sw`a{uk(hk7nf`~1IGQFNWnZmy@Rng?{r%3$!iT81lV1O7C!EN{;>C< z;hANEv2y+#mZRH>Li9g|4T*Re2z1q+Sv6bPbx8H%;}U`_bBYpxa*i@tH( zJy8X9#`?&yjEfSUnp-s_UUPHq$}T#I zn@pRID(Z^v){;+~!V~biNe=3I=ASN-hs|%oMWNvthQ)jkCBGgoqMf%A#Ko-G z2sjKCnZ&*Pq`MS~ZwCxJ5h_IF_zlK<_vQ#UO~R;PsAcgx!|!Hul7n5wF}b zo}H=e1pQyu8dk#+TFl&);IiO}PTO3%@8*e|cCjahHbDS^v2`KdrSGfR_A}5jO%mSr zT?ndp*TI8IdIlr_bfn>1xQ%7v8BM_{Ns|dV877nQJNj-{P>*VfAC4G)C1HWm8j(QX z!6j^|9;B((U@VFF(}vG_|Go23nU9&8$G?qW=j}ZgY|sb*ilo)Me%*b=zFDk?oSM1GC)HueyM%rCb z1AtB6a(aUV;zuJ?01h)#5=u=-IkrKesH)rpYQbm_21mIPcGC1~%&WBcf}2blA>&Pc zDwK>2h`U`hIr5BbWQm1{@ruQwH>I9(iikUB`mmogb4kui$I;Z=JvhiDgU{=#2S8QR zAUx6-E_U?8^RW`5x9vHF5tmk0CB(v(b4H&XWDh0~k?_^85nK@3s2L;Sh=t6;8KA_O z_6FDw;p>4XcbZ@yLYBryU?1hz0=ijfAZp|quB@MZ&K&tcI2ZSdBU!IMtA@<GCaK+hDToBd4p|2mk=ZY94KnhFm1-&5jy>XF(=k_N7YMV8KDg}I6xpdlY zm>-rS+=qDIGd9AxJcb@tm4#;I26a5lpN@?602(dD*D`dsZ60$!&=VTNx%;j-1 zXmCJ(xtH_KdQ|G(gI6v%b%}`&R8rgQ$=1^Lt*I%yxJL}!l!+vF$l|$Tr>M3hRz-^% zLMpZTiGy}dX)&Oa2P=rl2xIpdgo8Y<5#;I42JN`j>ZaWT+M^SAdlv-X+!f+G6kXxr zE`G@zz@eeO+1?QlpzzKn$r${KoS4F1IcA$ZjsqaPv6;l)Fk7F(Z?l)B^kvC%Y!G5H z_S6SYwgCS?`@YAstob*uu5JD(IV|S}yV)i*5(^BacIegYk! zr1Y-^RD2+IWsds@2G0)07!??etG07k5Q@wCK15e(Whz1_12p*(5`m1q%`Oq-i4Vmq zSK7pgH;Cu38z%O&a#OmXzAKq~ZYe^G>E;#>kCg6LB@=$fkV%tV%Wt8I;6fn{XZR&N&&>XVZyPxj30l?$ZB#&VsWVwJtd~3HT@GNQCO?HWVpIbKg;NcK;n$ zO!K_3+MEXsW}X3cl>(NYjxr?zXXok=h381G0BZckBKXF;fL`%Cu!y>siI6)+mq;;v zDtnBU-An?R5Im-NJ2(qotm@Gsioj`#Z|6RXV5aSI@)b_{J7`e-`KLxB(A8~ezg9jV zIvz_GYZMk=KpZx=SxAp9ji;U+Q2$Jpv#I=|ne%?-Mj9nS>%HxC3I%mokG*^)0!r4V z-k(>RrM3;UJ8k-q6oBwM2F6Zj;}Ll-MlxOqZ7iA9LAHmRmR95b&lS57Zu;8s_#I4= z%$=W)8-LUAzcSRVr+p;%SMyYu^cq}Ve0SX<6!JYE$a{i{-4jX^1^iat80x#lFZMO< zH4F1+g9DcPp)#U$L@5NiV${QrDDhuUH%ab0Pc;y5%F=%o)0pPpjTWk}`@utI+dr&r z+-5=}ZGWGJQUyqSW-=as{0>S%dc6z67V7bO5Sm@X)wYP{d{|;&jOm}H9_0exDoo;l zz_**?`*^wWivsF)U8XXByAJJCL zyG%P(juZEjOuGif#iZmB*z4k=m)i6SXS6*5vR zSAx#HC)*v{IfY&1K6^TLemp?+VKhktO~BvGX3j+a*g$=Ob$|f?!-#5t)=H6!LS8^C zQ~ik4*m;pnwqu+8B}nCAMze|jPL|GNWG&jgt^ON~yN0G;2Faqf9w-6$Z3uu>=53$2 z*#&Oj4w^73Cm~}8Z@tmQP5cY_{hI-75M3@;vQiY3L}eruT`{j4=rS^!>!=QG2s8Z5 zm!H(?Ydu`;3K$PNC(#z7zdP{`z*nKVlYqu<89?!7$IbS=Rrz3AKfQr|iv;y?8pLAb z6`F!s3-RqjOAmFOCVc_upAO@dX90GS$YVnPNZp;!uKeA^q!v-BDK9B-+}7Lm9VmeN z3Xhry$az#p(~OHPM&z2h*~T0!Z`4tHdsM+6H`3T|IoI$aZf!DoBT<=>Mp38D?Q2l+ z`DlNsKl?RAdax|6J1Ks!05C9QV}K$6000000Rf)KMgR4%CB`4PyQ@t|wI0c&LaNK< zioZ&T2&TxrZs=%iUZmz*0$HE3zFx+ZS9p!Nw$iRiRPM04o-T2K zDkg`yg9D>Y4U`JY`yJyOU{~vkh=pDmnG#nwJ!P=|{HtN*f5I$aP4|xasbC_18gL8C zh=WhMqjn7G8~I=km+|X^K4Uu1cZpLL&gS5z!nTh8;_$ex3frj=s~7tmu70y96^khw zDY<{XZ{FY&V|07+gLUBy%^r`A#$V);?@Bf?R0bTD>r>dZ(XVeIB2%NEeU~{kkthQ! zA%KN_(Tf4Q__IvxANR%bZLvF&DgYcg12@sLZdy|f<$YF_I!J7n;A`c8?4(>c)-*^5 zGvW}?EB|#RprW&sJW04R%g4t(iH=>MMa{&P*cumV0zjF~_V5iOPjflGK_V~ER3N%` zahgjm;tSv26I&3e%-j&Tf02R#J0g%YREbI<@xNS>n;~ zmMAoqvpftH*pq01eN!VAP$ovpeN?=uS3=ywHe*+nXI$&b#l^Cb!5(@(Lz-Yuf5`_n z=;uN{p2;3+gE|A#gIuK|8Fhvt=HTs^s9UZ zp-5+}sd)cXvSf4M-Vq2WF$2H9 zkx&6QXNWNXfTnSO!{;KKMo1B;GV`ETqv8GuHD@yE@M6okfkQy*k#TpzVo3G2m`|SQE%H{3r`Q{$&<^%+TQYCy5n;p zyjmb#^9XHfLC6B(cPO}?O98Jujhd;;I_C}LLfwoOFPiS8resw_AdAdRo}oEJ@@xTj zS*5({ZEe6L?=6uaP366L5Mz!J=15%qWtKX6(U1rl3h=<=IsQXEqT#?-Py+W`KyD@t zj^LurA(-4T!Cua5h4U}4vxC{!_xM;FpFaii&ZT(5e&v>4E_|0i)j zg@ZEAx=2|E6w7sMII@<@R#7A{*q^goQEh7L5wzcndBo%FLC_qUI8xI89~XAA(*0LH z|Bo=FMFZE47dSdRec?sTUWZdaLj;Uu!T19p(u;wda-o}1y8pT53+(Q`Yuqo5ZuziI z?WRvN0Ol;{3+qocCM(Pb7CZ}{Io#J1oN)<0w1fRTgbt|}O0j<{?6Fv=OMV6aJUuiD zrL{Obpj0<*>bzVMfrTg}3HFN2Bj$-k2Ad2rS*6niaawQ@F6EJJ4jF>6Uaexc26zR_ ze=}Uz=BIj~G$3Cjdo1XS%T3c0`MBBkhbD;v4ull|1TB))sUlvXq6uZm>IfH-*HB9M z44yR>n?YJQG;PLf5(-+HyE_SDo&JpY?b76{-T*CRYu=!9R{h8b^OpM4HQcFJ1G!aUti%#SDc1!HoZ$vKFL(QC3CM#Y9sch+9INg)s~XBV67rx(&BV^v?@)v%0Jo36SW^Vx%T?*IO$$a}=edMUu+BQu8&TcV|BD z^IxWws#}t~?T&0PN|6kt>Hz14mx#m<_)Z7&&H~w-g)0j>Ps?gUQ|#h$<{eiyZ%f6< z5qv4{F(3+OCZBDqpc4=B()9#2qgLYJz1-iVcY|9BB?t787Ba}*Wdme~#B!QfkPQD- zK=Bh72p!Ce_P2K|;PihiwwMgjl1{8FmC?iq8|s#%KS*)$s9q}S27WEeBqWC>oR9*7 zxf!0=zpg_*d0wd)`W1Fmay6BjCAL+$C;9+TK(D`Q7@o(|c0v-A=TZVa>_5!doFY$@ zRg#KF`7@=YveFyRy&$sUc)M?VoB`yMS%GXn)TxE%K2awLeCd?-8TW83~!PHKTcDaZs zOl`#b7|EZ+ATds`rkTkUz-82dmlStInPC%dq|&mE97!zV29zbfJ;={XTgZ(!;U@}IjjN+*vL$!}gOV)&Q8Tul)FleX6m4er(F zZsj^tJRV}Om~<9yp3kguHCdgw2xu7L`xe<#Z>aI$p(e*90e==Do-~kihwqUAs^T4X ze7~TE(ABdJXerhK(uBtJhWazdM%y5k zcdr2S=FSb#UwVr6pcs+G@A--4(7khk;cI>bPByClqQDXIDEgTGljka%EPv51NNIm6 zNg-2h(JXK5VEsRovaI_T_%v*nKzr8ka{e2t6Avn$<6til0|*r~7=F9C-s zS%bTslo(@q{Ep_p)qRwSNlqt!G4?h&ml#Ekc&DjDia&(q^Pq^XE(30o?*pjH8#~qt z!ASXO#|gVJ$!TJ11W|%{{+ICtE84NuCw_S-wbk{J=dKlcB8S&j;1)HY)b(@fk8rpq zET3o%eS|3Gdvb3$yKE4H5Ze$VdTQw#UWZ(hFfQ$B#3@GT#?(c2^Btm?tqR-ca5@8} zCJq@`RTO!oaosnyR0k=BmPyD_A^~E5 zx?@Zq+01P~k0U+)Z4dw8Q*%ml1a+g8Z%peZ0BPM$ZyD&Gf$aexW(p=FRPB zZIE2`M}I&{(x}jV6RYQqM%=-_NwRE0__@?q%><7E=s4hpts)Q15>IC81PZhgW~U;Wuc%Poz3RouKQ=&-SHmzqpfk)PaYZIO zie>iKr^R$3*3@%I`I1(DsgS(SBLJ_9jUWE7-*MY6KFHv_s~Cp2X0TI@oO8uQ44`QP zA^a=!o(TAlK{LZsBKQeXZ_BjWQs%7_3FXD`B>9`pvt2ukGIa&Z#2qUKvZkGs{| zlsn7fk&yOkc)DC{I;r{EKi6M)x#R4I2q;QnUdE5L%1E2jQxOGj!d$Jsn_KC;bWI-R zaia*&Q1U5j3!k(Mjob`RS=dEg8A7EU!LeV@5_w-`q~)a_ybJZLy5y5v4E;CZj;A9* zxp1gkn5F^WCSttL7%!>{W?k{;XC*|X+-pqANO!*vp$2m~2YgM=t^H9G09&Q@V??jO z@itllix?NEGKJncHw@ZvfBQBrxe;-|a>XFRenzbp3{?I5{ z7}*y2G^^iPpPdIe5g=~j49$!`g^rN0YCxObok3{b=EQ3on0}kJ4GXgm>|AEid<`WU z>lt1uF%AI9IO^^9I=ajpzF9Y!mFxJ|eUp!NJllm#f;2!kl!~4TtZfqWH z*a7dOlZdo6q-hTI6YIomiw*Xdg+UJ(F+YviveEpF$ zbO_y< z^`P5}Eqw!qc$PN{A>y6vqnGo8 z!kjIcqoyrAGq%7~u$up!H}(H6u!cpL6sCUB%6{;({MxF&dG?L!r2e$XHhY z&3zX#h$cq>VZf;Rh!uoso6zE!FI@B6?AktiTfvY^Pal<^AEuhn)dc}82}HdHM!M#? zvsNUmD*+oDH-U^lyD#VYjRFJ%i6jK2au7At$|FM2upa>5fNKIgbA+h@m(I9UB2;B0 zYil;CdEnG`ZJl)r5No`!W)zqf#2!)q&8nuML#VCSVSKqmCvLtZP-dlglf%V5^MwH! z}lQ;K)MI;fo@;i ztk9tGkiYY=mD9MG#t-I};r(hV9t1j+yiC5!1M3bGPkwhGL62C9cxyzs;7qd07VK5k|@S`+^7&O0G&vh`ksmtlqCR>{_2K_E~HH z3lVS)4cV857Pd;+>a~AGmT!IUr1s!cyD$iW$}0#uy8NV`o&*9F0Q!9ueIbf&f>rKG zfBQ{5@7SPSh8K1*i_1)CbiWu-%(@x;mn&n3V^myTuq8cD`r-)gH}q6wZo~qEBZ?g! zTSqI?ZSA!84V^1+JChuu)=YTNFVzt$8P)0H4JTOnl3k-j!xf{v+dHZ>oDa()@Ry8* zfbm(ZaXs07kk~fS?*j&Va@38bJ>LQ3eAXKtszA@1K;eb@i07s6_qp|odVAza(hlgr zVt!rw83>VI_cO}qUSA2RNX~<85G0&Vnt=Xkv;1|Q^E&G#sAIDC!lEq*CUJ!Ms!Q;R zNEze!g-Bc;56K|uQRYl8#2d8BlwRyHZ$s=ZDR^ED#~**g;RfS8-RAzeh6 zyyoUUZxJTLc-K~2_3~dp%gHEJ(~ZxU@)#XQK}$i+opZ$c4a<(y9LXqTUgs4^g7wEz z-&0=EaN41b?+s9Nc`35`{l+qG6Z~Cu0mqK%N`k_w<&x~-W5|9GB8k|th<(Vk z*X%de+2C>yF7D6p#!dfpTSVrFU5W4AC({ebtZ~3V5IkSLILq9S15{IEiaJ(FSC~3g zu$9KOH6p3y3fA=j`x79L2zAS-XV%5Y6Mn^G$CV=cB2Y>t_f-;*xx$MK1Z#u^8CKLz zz2X>WS?6a>+eorFB9F^*)IthVI*chXZBQ$itK~QLK%vGa2Jg_U-B-WznV+i2XpR~H z@xpw zVbnIYP<|@?He@;W<9QI$3Q@C-b&O6D1%_3=DCN~D?%ZiBWcJs9=0fkq>5eI~ocD7y{raVB3i90F92 z%Yy&1Mv4$Z4>{h82!bfN@@4%0ME^@!O68xX4Q7CfD0 z7D-0hWLA#F#!^As1w3r3ix(V>BL>``^Azs$t557ir15GE;fHUCfIqI88J_;zIEH5R$uHG#;o_8r!qnW_XVyPs>Hr054-(IfV$AcmlxD6I?H{v@ zdnLV?V*Qth__gUek5NKnSMV~XoX3p+G*bJCxM&nOrbg_bh7A${$R+g4kN+EhVMgH7*$??$$&~ZrKZ9sj>;^hQ2heHjkxwfr+Mwq7Ox3I zX@mpyq#!jXIHI~=gPJWN39`Y%igKN_-m5eVI22=q=l(?YI0&^t{&EKQ!S2(N^KoxB zcKpZbr^2fz_Yi@#45Y@%6)x0Z^(D zsRJR}j`?|Lg}foNdzb`s^483%2DQIm&R!vU6|`#H$t3D+o1X`H3EkMe(z=eSu%3Gm zCZ>eDqFvI%gSTDxBd-2+F)4>#&F0GQ&Dbj?Ty~`(i9(5|NO^w6X5h4FKO*-1YjQk4FU0z#^&y&s^8~8cp z+yUu)_jB!ui4}?=?(1j}caTu8JjGz!6B)8?WKm!zV$Nyk6A!7lchg_u&7M3U6o9I~ zHLd;YdDi01>?2g)#(Gf(ur(B*SJ$SJYAF&b(~#})A?TQtb+Ct;DmQYP2zZc!sJM|* zD23MqLI)H3S&K?fgc&Za4Yfqq6lCWD#tWubBRc||LFff#f%sa`fA3uUuUKoF*UhN(^1|x5_n%7>h zg6&G^HiBMt(Ns{)uIxYI*oHoBwY1bxb6{6xVmStre2zuKB`iBS$BfFrnUeazUdEck zsaog=&5Ih^#-Kdtyd>0#$BpJ-qCK*(XNevGQxZ@Aa-yQ#@qhMSJ=GuQh$r7?=|&V4 zh8`tsqlmu4B|-RjB3?+TT3&Ol=M+Ws+Ezra#*z25D-)~y`f=d&u#0_u4~efe1yXme zA4H>VuXxRu{W7ocr0`b~*lks?(6GQX)3Hag=&>k$1K0ur9qg6yQ)B$2tD_>PuuU@| zdS%rSK}(U7{Pq&s=M-lyla0zZ`QD4ypY%y{EN6)DA=6;>~RBzEeGgjkF84paA4!gC(iK@Zctv;Qc0+E8sVipXF1GQWGEWFD!9Wf7Ho3#T zNRB`14@qUEC-+eP^8RinbmyiTxiF}p7Cw|eY z$`p#0b|OK?PXH>sTvT^!^Xy_QO9I2g`zI2oWh7(>Y6+7~-;yF0?>W6=!E zf2$K)T}2((zX81Ppmc#gQ7C0W^$O>U!E8ii=JN9Wm&89xMA&2COd|Cjb4IC6k)x8F z5FF@^z#8?Hn5W#1F;mc3#)U;MLUJjmHo{mGdi~443_7PbbqqiO;j~u{4)u)t68^%>!=n1zcH5OS9UWcsE9;cZH2xP7OcP5`+I^1_{Niqbpl8rq ze}=17qVqT*|1dh8q~mTUvUR_N9>L0s5uwW^eePP3h==xHsQgt6em?IfS1h3q(lPar z%&{7CJQMz{B;t%JoRoX+C%au13A*G2grks4t{oJ}N}ZU(`tSnhN7+f^DVC|zz>fCg z1t=+1-f>rZp)k$fd&ol2LPpuoBu0Xyj_?J7X`Zv4YyyCt($3OQO0yYs)T3GP^1 zem9KE{)Xm%2hwS8E)3n=2Y#i5R=q(Aq`LvBrjUd$Hqs{{?!Y_%omC9P@--ZVLX^P0ovAJKj73|fDn>j_AQs^HTqgiC`F@m zth%Nee+jRooydD6uhZvS6lKG<;(86_jA| z>Zf4@TZea;nlUR8fV3v1(U1S_M(lGzW6IdLH7iLHw^sm9Rce)tU`mt|ch7h1@1VgT zCjbjN+%Jz?l|O}Uj8xKrZmUpN1IQvTBrfHWm(9{HP@;$*K=rkhs&fH(VGuxjRfDzQ zSj1+-q5OJVBrB<<>0}fXQl>;Zrq0@HF>mufh;d<2cnyW)Zg=+w!2SnDXWa;Kuc+#3 zI%p( zc0EiBs8Y@IO}4URl~otQ;LpE%B-=~DYQI5jfXFBTina?YH<#fY=#=koi=>WYf)V?xMN88q>tcW2CH6E!CJq!Ete=*DSiufUU`gpQG)}H)xZE1Dn zWu#_D=?RHaR~kY6&m*D{XS^B&Co>WO6gCcP{dBJU*}M!Cmf?9Mi$Xpiw~X9vmYaHT zmr(;0JD{q?`k z#1fhKI9n>elFOLvbiMB$9@Ci8=8-YBKrpZm0&Hag9A7{+fyJzY@e*eZ09FYsVA8i^tcp}fe+-;7 zCa;@UMcw8}H>Y|z7scV}P;k_6GV*jqnV!8?@YAV-1e#{OHO*LI`-n&&^!M)}PL$%$3u+=dj}se`tON8F}TrxYOSA%ndajO*q!9XNi zH;4gz4X>FCYlmccLOd=t94CeJML>HWtkPobjG_uV;vL)hP9aM_wW~RoBwA3x6Jcee ztj8Ig*Sa3mx!^@cqa;lvv-4H~5|Bs3i5TR5X(c<#On=UHyX@fL${;LtXiB3zXs_01 z!R_^S^N>hLd4XLcGtzM_hLfkM^3TH2rww1`!WM2&JDJP}c|RWzhe3!on8)1NMoUS{ z%azq`&-P=l0?yQP=edk_54pfR`9RzgenF_A!^4=bh1Vd(vs2^77oXu^!%6r zYe5kBS?RenbF7L}i&#Qv@g!lkr)y6gkG(#qd_9sagmu(DM=WZhv66%pAcJJwTEW42 zMP(yNZ*m|V{AHj1GJ8Y9T6s5JHH`)Ie2rD=95{oiEb+0XHvSo>>7Y^G7n28ReT>u7 zEdG8B-v-AMM-pz3Z}ZGNfTVo;!|EwW{(xJrGH*ST?9!5Kc1=+PX}tua)gl#Zy*OgP zvz&^okfuUX67Zz`%36oQqCnaTiP39`a`IfNp$ zn*OiT!1R+2p* zy{B>Ff>PhB(bPd-w`j_;Ktv}qAN}aX>^ir+81k&Em#vCdT{QlEOMFpC8%UgiFsP-2 z+ZsFeowl*BOc5mJvB??g?*?nn7rZ(Isu~DE(P`jfK~}X@{ztew$0Vs5Xa-c08ZpGv zq~?*iyyNRF1QF^v^=LAVbjA9v?!WHAj5k#4soJ(n7Z!_>nD!boSOuX9Rq^QIR`xDM zCO_srQ}Ba_8oq(r>xl9l_j#Z0panu)O($=P8vS433M z3}MT>JY))C6#HRfGGbf@{*9r#%q2}`u2|&DH(!&eEtKG9@C%Uy@?;wfP;5^mtlp0C zjE-xa;V2>YTB%@Ex2j)!V%n*_v2Iz&{JgxEcKgGO(qr>1mshi~3&>5P;%1a$xy&S< zYy+;n1~n$=kmTY@&Nj^$#t|vvKsvDoRDecI0T0Vl;Hd zYVv2+RD21D+2Pzd8(0Q%sFLvKSpO6#m|Bos;&*91-SjA?Kq2ZnhLfc1x-ZHLM@umm z0$TT|qg_k;3&*?|6*cf@A|Dxz)xHAojA}CDf!<7qvA7uiOx7YOX^^q3QSN5!naymT!1^H zi&v^~a7DR;FYGBzg{j&f^f=*+bwfnO1>MNVb1SFj*-OUOeTG{t9Bbr@qLqTZ{kEy{ zL>me4nR{?7#B4)Q?%JI;B+Y5Dk)xc|nOC#T9dMEHSDrT7)vzlT1)z|nTfkl_T))l% z2b2VnpYeXrnvsL85h?hMVlM+tTEY(grt|I0j#f{qxi5ji%ifv4`@`Txl$iTBJ@-;P zMMk=MMIL;oPlWZH;-CCGu?HZGx4>go6g?&!NVRr|iKfCgOUT5%F=0p9U1K^Nau?rHnIF9177>k{ZZ!-u$w}7h_8n9u?p%mj3Xes zjwAufr)S{!yxjg-GhcJTR`%c#zH~nhQIxOky(0lvKYE1FXq<~wqSTRa9gUPJ{Oxo} z7Md~Ppu6#sQw+7RelSiWiikf3U=lqX6^3}&3bJ0iQ?3K#&^OwgfA*#x?FO^MalJBsV5@H~z zMhdZ+l`x`5Oz^s$vs!?-iXa3Y{N^c3l9hvaqLdL=MCvV*7K$$?d1e`~+%qN$tfKnI znA81H8`|CwWJ*DMLtsW)ZM-$LiQzl04`v9t69~uRw{#@yP5($}@d}jXxUpB8<>8kV z?8FhisuZ$-kzZ#NpT1&-sND)yL{Yqs$?8~aB?gCJKE?JnO8>Iw@Qo!u=PPy2Lq4Gj zMtYnoHbjDa0`X6O1saZo74n7WMqk9xv{fB>pa*HqLoV90K&he+R@UOB{?e(Q#*uKE zor#-3_raqqgVurnrlNt2>>=<@PE<$2OSfdo2kl<^l=0k^^(_(UBPgKu#W!VS5S%=% zFu#2Xp{g25P%r(3J}^FXYh-4FW)5x@nB(2*EVbbsfB|l-&8ol~?<;A^kMLV1ErFH( zf`RvNj`$q_=NWAnOq;6^ji0DG3}f`?mIj#rRFf-m>kD2V|Bs%x!lTc)!3esV4Gq-E zz>D%w%lqq))kLHX8)7n(BsbIK5uFrHmoh1j%qbr*oe|5Or-r-Vs?zh>SHN!?Jecd< z8r}e^@1F(Q@7!*MdiN`tqqy7`=5Vm6(f1cHmXcM!_CfnrQi-0(Y`Sq=Hv8E)s^(|G zpM(mbF#c=$D1{rR9``mx-Z}Jk$jkHFL@y-dhyw8KR?(hBt)^5uVfMVtmFmJ>Y-LQ* z16rJY;aEAr2L(PC+agD_;^h+tLQIB%`1{O1z_oMtglaOLA?i!*Z6NvTS%}Ro3}xSr zQUylwsmSL&SO{;lw=K{7Lszt|)2}-RARIPAyZW{#oNM+*t$Xj#LXjtqRd}t%tYI4U z2rL;5k~QfYXbI*P(*7p-qoEQx?j6|y#1ib%;#s9p>F^jOfN&}!>JQzxV11X)#shjtjcM2){HPe8XdQ%1`f%2S$XoV zzLn0&^E!4dVZ~}Gea*0f%xkBbca}jS#~gL$vu_$COI{%IK$9fkKeQSV-Fem8Nt+~y zhUO;}E%S&%_}jV56wJ5Dv|^-CqB1?^BF_AU>n|(k-FxKh5>|V1`RxEm~wnRnm!f6(p<<>_88GI`}^lY*4YMgRX2k%z=DI}c|`8`k(U;lVjLChF?+u$ zX@!ohNbGof27q zOkN=Pi;4tLZtJs*K03<%P_}+b;J(uX`c3uKE^7eb<;F@`@CCkv(Ia#RMmAyS&P>Jz zLS<8x;10V4dP#NYXCFI#*MH{<3kG*vpA5m6@Wj;hvZDQGl=Nj{lw797RHX~&NcjW4 z%gOP+P0kL;KVGKGqB7q@BmntAaWp^JQ?tk8P7-Uh`ga3Is=Xs2%ZJI=vyFoXUi;W&5!6J809hj+NaN2Gnc!G+xe`(Chm=bW#T6?&`*4*^@yq zHdC0~Z!ES{qh7dIuMpFs&78>l+3&I2Eqb>SSMv`nP;Ko>ckq*P*tnQJ4|&?`(Yggv zKEmy5lsIrzJ+J4D%y$7E=0?x*ku4deMQ}&d&WupHN+~3g(16|YzlHEh%XLkaq{9fr zzLuS&T4>4QJ5=2qc!mg2u-!I+SBL{)0Rhdgw`a?T86vt25^O-qDulH^#-6&kR{R0n znB&XrQJ*NgjMfhu#VquSJY^7_NXw&j*?0i;)%#WL%tUqga>mV)ylN*SxY!MAHes$o z5P_mr=aI)d=MmPBeld#&5CP?+@%G=~*w3bP;gbuFiZa-}fEKyqs(6f{ zJS~qRDrQRE2~&>QI5rVxu^c#mc#~6Q_6!P;Ori{QtysKQW(Za?mDJ-KXz1cr(=mhJ zNGJQwL|O!Az1VTBh>(z0;ZJHf1Hmt)7>+a74ckd&h2ud-4MHrTUkcdjmzR3P8)TyC zL4+ii-%J)ynv!wfY=yNz_C_0zaMlZ3@)5Sxv-@M&i1OE_GqPz^!e@vin>S_EjE4AY zNYj@e;LuM^Ym5H#@I3zoiF{Q`(=_Ouk0SaBy!MVgl0V?K$Q>)_{sppA$?M5sPY6b~ z#5^K1NRyFJWJ}4{*IH7rrvcZc+amCi833hmd-xNRh)Ao9&cT(1jHH0qi!M#N0W?%- zw;$~WWLwqiVCy66sqKywij^a2CvQ}#M^5E-({glI-B?J*=5p1o#gW`$8kT?e)L;KX zC6jM)92KJNkAt1u`)XhX4L`Zg^5{y*CJu|~Z!>wijF?|s1>Y!CNf^V-Mv$!Qr*XB6 z+q&q-v83hnqi$O?v7;^-5JXiFJ=q%cYQZf%0#I&FsEdt(6piS zr)uF_vX`Ovg7zj_YW~4!0K!$TyZ8S?>5V8{7m943#Sie`;eY?@|5^HEymDA4HQvgS zL)fwyw4M0n7(Ogs{(mZ6>=HF)1tnj($!1XH>!SL#j7(U8L-&WG9Zm>jt zBY-f9)_)XH#c1Yq*M;8=@?}=?Ty4t}4%asdX*+Ak^V)l?0@U*R2dq&=-B?`pO-=Cf zjRy>F7x{4k7ZWV-dZ$Cv;hw+h2u@OFt68=nj?%}2{=Gq~n%eQxC<+Dede0hbnMz0v zXsKcDK!+9{K}o_3jQDo>=GGd(JkkLYmg3hacmg;10rz%2-ohsACu`od9pt#Y?6r=zQ`KO4nS0NX_RmTj-O8G zn;WC|l-UblO{-mIMnZ0vttjq62=6gFkjxvMC;&YeIxIh23q;0@LxkWH%s?+w!6z<` zGr2tWsdY5IdrcH~K~t*PuO4*Ytj1WAMCI}}37!l-aj=43hMYqewzduzz(;WfaSnQ(bO!tngy zm16-DHKcduK`!FgG~t*5i-NiVB;x_ZGjYkJ_2$|k1_@l??b___xQSxT-ekPiX-FK{o9HkK5(nV_R2>yp8^jhP{-m^i;XH z>DLkCos7~(D`q)twvACB75~)8rq^N3RDD%VgOtoblbw$YjP3k_`3tqpzc#cdH!_lh zwu_#Qgfl8n1_q@IY!j~!>UH%idy@mLSOQbR4Vwan2V}yL7$mRwHc!d699M^gVeF^K zbEvI&1Hdi#Cp$&kGi5#oUAI$VVwa(#b$xw>0)T;h#CtH7G z3EE{~DBh}fC{&$nNIz8&09Qd0QT`U5u0Z}8`uQ0}j?rugledKZzIcWwjaTdedX`-$ z>KB^v#N1`%C+w*GU+Fhlh`T(7V)g!CRe<$X9cv^Je?8p2nkg4@>3F$K)bv2t&0GE? zj5EtAw)=?`83vReO;cJ1>x6z*Ku2s~+H(&&G^jEPz;DLTtQl)-cLPNS5V6yG6`M*C zJs+_uurK{d0K1}89tkive_W3Oan`=eo&nsnu)2C*Ww+Hpi{$ywQ&N)BmMDUX#lNQ* z)S?=ktacKExvuz3UUA9XuoF3gQmNsMR7@Af-9skn47MRChex_lu<;1wE=S_4`NN|x z`j;sYr`6yO`^R&|0A9)w_v*m-FF#6f-m!VgTvF5Z{4Mk2U7LCs3yu7bI+QIa={5FM z^FP_v7ahrp?+Jz_yYE~`#cAm%*?McWMzy8$^~l#kMo4X z_CzucbvkO^QD92wcw~GIod|WVQNA=LVVdMiz1m(Bh?8V`@pa9%2;!UfI+j4``X4tr zE2Mnnu4HY~EdM;Fa(=N3*9F^_bZOW!@l1qswGHWm_QRMqjY3(H^~9UHwSVyK@JMb$ zIrM-aE0(rW#&?967`27;as^S9(Q7?fKADKv)gDZ9?e1kvQ5S<}$PpANw&4J2B)7b+ z-CVXhtgk3L<5okBuKR`RpKIJ!W_RgSsd$fncOYAKzhKZ5Dda_N7n=?zl} zMTbI{rM}&3l20dJ`v~(g+HoR!P;zLjf8EWXEi=3N*Qw`uMIuNl$iGdS0gMBAk&N~F zO^o8V8Ag2;$O@t{Rz)O95~nMQ#s@ykRu3-RW-K}Q6r38z!MWiFKl7qd@_GRm?UH!^ zCv_EsSaz2^UOgs4Y^!p-1#S+$j1jDGq_N4=)g)g?sipdTxe&?rhK!JkKAt=vxGDdK zArcneUmELH?B!z#9mqv?3R0ZQg17{7J25zPP}Sc#fj$FF!TVK|wMhD?dNRAqFWgX9 z6u`-%Z|&vO^ZD#!Vq+imuHf_>K&p@IhwnY z=K!?6%uCDA-&^jfK@A=SzKZ8l*X75?DMF3&2E;}Ry)=;x>fMX3QU+oXa%su@RjJ)U z7k@D=k4d*>{RnyiI_BI=%OJ#|-Cm97RWdV83&}z!omnaeLNQB=o3CCD2pj8OH7U{i zzKzbg{M$mKY<45k>zKHSV9qQk(H_Mw5t1sEqC^yv4jxWCKl$Ijg8`Ss_mOW)$q~Pk-)(B!V zu)I`2vpe%A$aavx=jlqCrD5|ml8|1pv@%w9u#f6j=ViqTtB`52Bf_>*LA&Y>N~o&n zO(3fbcB$CUu^se!T@dR^{XF{>@rYT`EnPC{>4H1ElcU$8(=2y2H)4i#o}CQw#4MPp zS=A!Ox;LYcKb7U&oKG~DeFI&{H{WEfp>vCQQl2&BgS=BZVIboi2zjg}0vJe2o3?Y? z_Rz_YPHwuM)m;pPz$IQGhz|T(Ky_Twjudd)p#5%4I8*2?LHKtrlkEh1G-W(JMv#8r zC8LT>R+@=BL_ZfLN0iX7;>~miJB*C|v2I=ZRk4gN4fK-c%{1nt;K?4#TkKjB?2r|t zs8fP2pQ|8)H@KRXqsIYIl*hV0v>_;WB(mh-O}H%V5R=yEu_MUJGZdvI4vB}AdU$2i zZ1NF|>yWYYmUhH5j4zqEy5&flU3%ATJuQaMHtYVibbwc{-Ijw%-q~tC18Vt^@HAPb z+<)cmOAxvFzCd#fK+*dDBqA1R&fJ-cJjKF`gYZjcby;=ZoXTnv@OfKnJ#7Y54Eexx zRn*nMUo9d~fU`Xa*`lTIrU~xKDmbpSA>a%`$1Tww{McBibH)8jEO?x^d(Qlww*m2GI`AfT*+>vA=)0KD!YSBq8!) zE~h;N-O|i2H+j^@9NtlbG{blv8=Z2J&_kjMq<{ z2|;$m!M_^jlht!Bx6jn!@(x=|V#!R23p^hZ%L)aoAJH-&9?WoPkSw)5OLQaT@cvGc zF;o8$M5p~0$4K|#Vly4tvpghge&ND5<45-%nXCZN7D?T7ZnJ~&zycZJ3zpe=PAv(fLvgYE9+Wc2Br&dA!074ZjizFE$4bz5M$Raj=9QroKCg( za;EMa8{dS3S+%rJjV}!^(5-bYX2%**9bv2p01w}9%*n56>B#_%YHd>Zlj5n-l^rlh zFb%Yj7!Mr*f4yHQy|s3X`!S>~(Q#eQqD&WIxHSY!m|2S>%=+zTgeO5@&Fc8OfIsPc zINj*?+r~W2W+|^sSyF zbVhF!9a4N{82Py%uNbVFBWZeaaBJqi*`nJw?7^+G<$C7 zcR+sBq86v-@U#!%irp!q$tk`wzZy8z+X1oV%rTHS1@0BuY*a)PF9-Lla}OfPV*M%(QIYBxGIzAhi`3f$8}i zCxE?s*#Dz1kSX*P_lcS&-xF`jgBRYK4yVBxb)>+#h^U zw8Sn+fjI2*s74kGsuf}Ya3n%&p#ze{d^H459l2wVp4MPFI0Vu_5xzj>rGOW9CW*h( z`=1^vss_qsYW-b~`ii_FcQLNj9TRpUNv#9TrLPim9w$`cLLG7$P9TegASP%G`8IA= zTv`f7$6U!E1q4lz{SiMWu3SMD{tQUjU;2982QaEI3N%@@KTLTLni0El(Vp?PIl5PC zaNma70yMK|S46TO1Q4zIkK(Qh>p{i#_ zrjIYkYG}|;@g!0Vs%?~e;^g}Oi{g7b*XH9ZTw!9-If%p}(V)?Q)Mlg6?Q;@8p{xSm zos=bXnw|0TU)~K=bE0ArSo5&ML>qLV2-@x0G?pkp*Wfu_1#NvwM`PxNS_0?VDmpyfiCjL#ub-n%TDZ0*}ykazrme?q|_l^WOT*7DqK{R zp8%6wP;(8kLVAUPiZ3d~4bhqWPXmwwqu;aY1D|MNBg`0@Q>E{6&CcqZi*9=tJ8lHO za_c0t#U)SS7OCXzr{EM9jO^goitM2W+ISecV+|T6sfYJhlrpR8P%W2#1z9|K^IxF) zYPS_t{!M}l2*I{SILpc+%mj#eD~Ti;+@XznzOnEUW$28&c!&b}(04@C3+ux~`+`KM zlfXUg<*W?p9RW%AoPjl{Rk-}$e{v=9wXV$@ODYN%+%-rUZ}VnjQ7|QP`{9vwa`);& zt3-25Z7&>}n;f&j>JdINaJ^P$hX0SBtO+dG5|+Zs(WzXmoHJGnR6eYmTe^n@9FCDF zT5^oU7@YZ^bpr|I3&zbgeoF4$)NUl%Ipz`qhc6_a`f-=+ph<*qY z>g~GUcvF~K2ljY@a8Xqj3A=?knFtCA@{@>Kc2wYMBJX<&nbw;oeOz6v6doNTBu3JD zv)dGKymCs>zf;bs6Vgx{^yGuvidUT5ZhTtd?CB^q7UWW*ksg!k-*noD(wtb2$j}~f zu=s#hAVy`_8sfB`L|w$t#aScsGKFHHzWrA0S@pj+L3^Zh-)~>DBP<9n9kkW*=YGS> z%JICmqf+I31J)}2{R2B7wlhGGIs#Vbta*zK)SRW1i@*nJFb^nd+2XH}i`{UK<_}1; zlnWS7M{T&M^RRXkm?}U2qyoRb3Tvf7VPAWZ-8AX4K6TUmFR*1|oZ_F=#Pes5Ce%Z9 zovG=R&oD-v)8D(su~#nL?&^u%+MEn?O$# z68837Qd+eY^{wAJlcWX5U2x6D05T0C_{|i`)^cbHjcb$%;Oz(8}FKEh(CGTItui)?7C6{01bmOv;sfC z=2r58O+P>Hc!{XLT0b+8H5`ETs(9jJWX8#k2O?KsyHib_b3`;XIk?-rcbrXD28tc;IT0Im_yFYzK7N6DA1Q-)6t1GWVuZ@D+*)^_I@KeyZz&iJ>J;qI#m(LIv$@caB;jfOK6Ux1se z&BD^2F?XMcly?#xghW22*NE41ZB8(;B*pspI>=v9l{R5bOJ zTNERE>t$&@v`&z-DWC7$CUzZ3HhZ`oniu`uYG_OkA~UIh%2hKoAOi{XPv4tuk@dT+ zxbV4Xvf9!LCrmj0q~2M6IM$CT>YKeM)QCUh1ShWtNS~!fsVWBkYBpDl?ah3;r;mI!eoVmcO(TCW zVUQST-)cCl4Mf}(KIEjnJk869c6h>^1(fyyXQ3M!KhJvP`@W)I1l-n3LUH146L?#t zOG`bRCX)~xZX(J8*Pfmp0M2m>Sn%2`PeIePC=M7~03H5K1Cci@NRt}<4^@Jno72Xz z%0VDt1`vlh{{2vVR=T{q9H|I%`!O@r7nc`%t8(@hjMJsv&)kZehSe@vYZO2nH!`up zWTSp#MlHys%bxWLNb^)nksz$?aTw43Z1f{OpL+u}D*x>BcsyYmv)Qf1gU{8-P;B)B z(HsOkGbSs3D?N|fymCUlsA1INE`e5{C!P`M^E-EXo-4nV-#B&;mRK{g5XQh9%g21+ z_-mtTpx}1NK*eh;66i?{mDv<;ELPC`){gkr7-m$=biNkyoDKF{vUVDq!`+hAJOhHb zP7@Zk5UV`rZzh_dOeNfAlnvg-j3eYDk$i+-s?0jY2Dtyir#T`1$`UAY7xGWMKON>1 zQw(Ze;6DaOsTac4LCpk6Yk1Sd6c5Z%apKDjpbbbV?5*CHc~<}*V6TW))2+t!Vhflr z7{KQUPpjN7+rQn$Dc|*hJZg5lSh?-myMZp+Md?>cjjaJ0M-?ZVmnN$1pkJ(YK(l)& zs$ElML3@S{V1^7$_YYQqRCT0Q+EsKzRvLt@V3>Pz$$gWSwt*X={~Z<$M84cI;|t}B?(lZ5+t^<3 zu1l6|fdYVN(}JHnDDArWSeT|c%V5r6vZqTqTOD}bgQrmGpsh7iB(Y3KrBUby!{1iELclS)KQT|bW0J7+<1e4X-~dOVu0uV`R4Yq+IeZ`Cqtv{w6epaf4Jh@g#7t_OmRbav2G5$ld9bfsANoFd|;W-$B9Jn))7Wd{b_QU9N&ub zhzFtLsIDG80=0HzX_Qi7mDn}n(TRq~$SmS=+8@cA_i5hnDleEa5V}=xoh`kVsY50jQ{TNT;RAI%b-bb<=%_$imM{}%&)hH5!iz^rCnZ9a zDNr7TJLDK?lJYvI*pTZ3L>TGPE$QLfvd2aHU^p~c|FfDIBhm_vYP48yr=Q^T8erL~ zkgkp z7zg;7yb+$)P`KZlw%YLF$LCj66H}#}fN4KWrQs1n1CDvPRh~W$6d~I|#F0)?JGJ4| zP?u*sO_U+Gc)3=*OyT)@c`Pug(vAKLQKfMHT(OtrXJB3tgg204?;6>#dugT$gqm=J zDRnx0VCCA~AT1btr<9ByXD&8dox$1~zcnJ-j1J&!8lrc5iR&fE8~9d8mAzNPc0hEV zP0#!~vnzgZBE|P=e;VxN#M)Nvg|=1=)_g{ky8f1N(;=Y0lfp1-sQIKzEv^J7>p2k{ zt$!gnX4G2sZE)Axs}xce2tu3ceYEj1edox=Q;pc8&pNcQ#1pve!U3XgI28vjl@ckC zBEnS;u{n9O1poy2R}IQ*E6i2D>`Zi9inRgATDg+}dilJ&Jv0*QYZFzRV%@CW9@qpkQFarYSpePh#Jcuz~Hf^@}57eZ}fMU-3 zd0H=f5`{f*dm^Y;D=q6w`{VmEax1PO4yD%F(Ck{(k+uI<_V@^$HJ#) zlKh}9neD5~-3xesYK`F|fR8r4*2Nbd=aD53m(9CNk-ZpL#OzDU@$x80*~0 z2zb%P-{tb$i?INcCGZ_8zhrA655S4n56+O^rWYbW_i*B$-B!d+j`xeis#>ke_pzi; ze_APewb|a#zGLF=O1qc>qWZ|dF^2b<(MA^1C;gcw_p2~i^w2JDXQ;m=Ao-q!hV^I@ zB)8`U>1rQgvY=dp;bFb*9h0IhS++9gTRYbDRinhYuTKGWNcN4H-t`x!mjQwMm8!Bd z`U8|x_zy6ezQBWk-t+I4yqF}Xn)rBJ$)3UdOL2(NC@;lqIDY|>Pb}G`6&z+n=TwhU zMkw5LDy?vyr!uyiJUrrhok3m^omij>GK-H(S#nL>}I|H%Eqa5mN6+63p{AZLAp+FPv!xpAih*n_kHx? z%_v&|!9hM?u};!ZtuCzli8(vVg6@JBwAIH|u1-tJT~T_f)Tl(Usp1`N1#O;vj&tcQ zbWxc-wjp_gGQO-OdUw_uYc?nXa8pFlPT4pnu}O2jU4TmLkN)z`xCL|RI)ch{!FzAg z?<#dmGeBSc)God!MkJ=UL|amMgp%qf&AY8#)wOYMZF48juO7O*C^r(*FnYDOry$}u zwD?d?lbMehM2wrM5xM$RR0lsYsJ>>AObO@$X#P388@|JDgW3OGO<~!1JzFMc(jAB$ zvN`M#7&Pk^{GYiU0Y3p`0_espi%|yJ;?rCAMzC(4l!!i* zctH&XTo7JEvt!DpD5dR3fsW{zlGwk7MmB8)|kNvpeg`R#XkdZh0 z@Przc53)=ABMSlr&v(A>P!NXc=|a&fGvW_CF%!>Xi$Gp3iHgP7OlboZ$DqnYrHAKk z>XCLxN3UCvkGbk#gmiU{95H+G6{?!=P)5J`L2Xpi3Yd=mwXdukzWgcXr3z(fL0dw39ggU4omS zr60^dP3eOkO=<|_mpoa#3;aZ36q!;mPlwOhEC_0ip!T9x{5nw8B>1Rd48V-+W~h^~ zF=(x>v`k+5 zxEuOCOmB0-AB2q?@qML7<^!jgwA*_V!d?SONocABYwkPLzoF1;V0)vJZi-J3s~)W^@Zemm00cHXE1T55NE42_E-D>M`>Y6Ht% zuQ-|!p`99qgONEJ%gEIM{>B);SXnBp)W7$~RYTH;7>zL&27^9MrQV4Mkv@Sn0W)vB zYkD_Vr7wlS#|(8!U(w@0wf&eNocsYYh-gm_3OkfqnA4PxkD!h3b(`rIlz?944Vofm z6)RQRis*{aN!`Pt#H2huTS4=c(E`0QkEad=s9~JVJ-q_=oxFI$#vco1rF_qfwgrk) zGqJu?^X{Js0@E(@g#ycj_rk@39w zC!q#CEA8n{^tl;E%h=G8G0gzrM4&kS#k9sVv(eA911Yzr336w*5hZXrEIBG=cGizr z`s5VJLSGB>Y+qj3|7pF`F_Oph`b!nEO2@$ca;!Z;4c&CfN*~Xtu+6@e_S2oX>+BnD zMT>vi%y1Qqf+)64VQ18w8En)azLGOWC1%Cr@vcMBP&;TcxS;+K=;B8vB*c!ymS{?(&?r4Nx_*B*jFqo0$3*)XAev zqHKt*A`XCIyoxY((b2PF5T{UGqL9<}SGHA**=yboy zOTly&|D?U`xxon20wHcMLwe8W#1U7i86$jN->f3UpA!cBmKmKWAZ$p@pscsCY~%0_@BC)dqw*Uq0xd1^uUT&aSB%>S(?-ui(x7>!*wISQIVP=1 zwvjAI>*6g?D{3Y{hwQ<{?j1aD{c4$HWC0~@oQ&9`ii0vf6K+r$M1$H+0IwP86NZXiYk#_gT;49oLiIzk+r35VwA)#E8p?J@o)0iYLfkWdca=2%fk-N! z2~={)Si}71P0`^MI`@gt4FYTR*FyBCx%iAUJWp9Q^cS{(Ozc;ky)%FFj(*=nVzuIJ zkB>H3j} zwgl92(8)8BsEFManMX=oibPZJWrAua^_ z?#cGXyI#IXDy%}TY^vTzg!JX*c~ro3d1hBEHe2f7I`qeaQt+pER$iXo_H4uzsahG%)a!>~0R*J-JlaH@nWx;)p3y#Hea z31KbN(xQ>9lhHqispk4^prJ?NRa_DpFME#yuMZziVNOd^|5- z7DPG>mh!laVf_97ov3qt{%*9kWlPO=?3_xj2oL3&(&iK^)gZI;z~}wll=h96+%_LC z!xpp~IA(2w-Yt-EQ;~xE?-QkLprMSF>HWaI%#`-D=Yf6DxqVV@oWzb8Us08PWE&{j z2jOfsrz!X0_dPl@CV&I$F~J*@Zhzgy)KjE|0+j+|crFV+7k6s>SbP7)V~}P#AQGt9e~4L ztl}4sYHb3qvjae^_kqog?>6sM=Jls~uPbUE`zam80uf)aN2sY@6J2DLk*G8f4V4H2 z`m!Fl>FKW2*sMSALe!t zB%UQk+pnNM8o6F#go*XT<$7Ok|7%8i3DS^9`> zQlzc*)>jml3K`m}-lX1bfvDwt$<7Bt41E4Y6snQ~oNg~AmxK8zc&L3T#qdN0(H=e5 zU?}pUooI^~Ea6#7VLLHsU*G8$Hg`p{r`qRxRbpy%oX%+>}o2RtN)Iz zF6DpvoKlWUBHIJ`+{$MYZNN%;Ab-cvCphx@_Q$8^`jLtcFiho08xj-$ZTiW=`BAq` z+6zox*}g=e#V6+wXyL_nd*5_U6MV<_r$moqh$Z^%Z7*zgc@mseC#+>&jIy|+fBpy1 zyAJAUEnF#K!%|ccx3-P33<@gXhO)_!P&n5G=cA%AfIOo}fpUoG5(7uW}LX{o<(Bu&E1am4ZzJS&NTMFeE z8iNVZB##`* z*y2+kdEeEp-W(?a#Hq(_OHp5DgfEiv$1(a^AMF1oHeDctP&m`qK zuKZTwGPe|X&7ZyCcV|>CFp%xS{#AJdE&Kwh`ZlB>}QWtJ3}PHy$8CE%d-Gjc%a?zrlmX4@8ZVq{g^Pks50Sf<>8 za78-42A@sPHRBO-Y6aIU(0k{lKMp!3cj!1PcndU(J))7OXp0r^MNTa~j&d~3TCuQi zrF{cqV_c(DC)kbqrz(UMtKC2l(s@SG2pMl_-MBKT8~?#|ULoXkc)6O(`b+# z78KcI7P4lbr@CZqEmujnF%%@?B^Q+c&4*tT9)HnJn_H1kERkIc1xjQhxB8?2Z2|!5 zdu!<=)va_TMYF#s?U595&5cENlU{+zo|;%=Z*rH>3~J1c29Mn<_$3QG&j+BmC~Pod z#XszF{!k(4PxM(@;W?vPNv0oh(VCXQ@{qvtlGsKclQghI{1=qX5;2aYq?|KT6Cth4 z_}pLw@e60G6T{%48P3TP9Nw*ftP(Mj=@35$Utlx)rwKJifL@v}+K9la%W1JO;}kvl z*E(l47}Ue6_;A&TL6=>~NJHZj9S{5m+fEzJximW*Ttih>oWlksBl7(%?) z%As|7VkTzU!OIVaQBA^5P0WZ)Q$s7Ps}^Q8wp)yMW5(OuLKpM<4=vpNMKoF*Uw>ZI zcg3D{xS@K0E=i5Q%X&4QT;h(CTdfcc3VJrP@jJVf*s>j*%Gu%c5D)QJrcw8ph9e0S_`WUw^*NrZG{Hjg%SL^9$d9L#~t z3AGen!fi9T2&Z5(d13qtFy6P;nZ@l%ioPx}6yi2Qy3pMTlY2B3$(Z?vO&^f?94SUB zcF}}(O2aldby9UN@vHsuUpD^i5OD20a{5ZQ0nc+lT#h11b;Ch#ME^v}(g7+IAJQ1S zZPVpMjO%OoB?yQveA?AOQ$5!%;(5G20DK=HMk~2EBpy`n9LvSS^RTMYW2#+%B>!dl zXEYGt+@+G!$Q^qBXEm?e-DOJQcN{8qAW9Rtg!3EFRh2;0Jsma_VM?*Qqnu@hbL=A1}GDj`($&x)tLN` z=a*kdoMowxCZPzd%C5H^BOx6R`LfC=-ag0O_!d^>=K`v&Q#g1606Lc z;?Y42w7K9As|cNY*SB}Iaz5ybuhr#varfMvbNl~qIam#sB(1hbbiD}w-Q$b$W zKTzcn_S2oh^N{3aU?$oAuni(n?@td?dE%1UqfGelQPBorAYWj@8dbiQTD+9uezW*?ctPfo* zlUz}7SC`tRRWaAwczX4z0$B4w&VDuB48xb3Mv1|`Jj1TtwDV3DQ~@oG1V2#&j?Kp` zP0E>Kl5+pyO#@tyzeL@&PvDO?mQiw8k$J}+$bbfI136huTx0cV81_gY5e}4%J;s6v z5}ceryw3tK7h(g%$HYFfwl26{lwV7vUJ9!!cJKYMMYv2HpQ_34a6v+>C;3QNj7Bvr zCPVrmy-y=O<@<;Yr@NEWIXvU^!rZdZy?lEb(NO0~c51YZkJQ^;AWlgo@kj$M8l`c= zW&+^)1*lbME)JWT-rbwkd15909#98pYs`uY6gp4nB5j*h8K0Y#v0IIZAx0$F;uf?E z+^D&IGs-5=6vK_@Rnj)0w9*>Mzf99H-H}dz^4h#Dfn<*g-b(wMoOnEIr8%=2|wxPV)$Y5oW3{|&*r}?J7}HV(7_hCjylm0aJbOj zXhPNs^id=NunyKY2aLn(CvEIna_t3g`F`T}ik1}difi8}&z{JHsoD7MPgAu{=#Il3 z_*f;3ll|)qHZ%G?jwJMS<|WULamehbj$cN1PSkXs3eHMuQ)OTMk7{r&IFZ;%&3W-U zu|mGLl!;0v%{JP5VCB!AyZ6&V!DtfB+==-(00bvTLq4fRST#xzTUpN)-RV7$m%6Xb z$8p=E414?kFhd}^e_RE<*!wq$#8>XG0|bhF>v#X<$L|LySUWh;JZPMRo(0f zI^Z;|7y?q?O56(9ZKH=loqo5nFfNCeJPs*+z`*l#s}``_v|+*yA9cB#Rv)tkUP06%}Fx#80pg>q0B&WO1+w&z6R zs3*7sZDG=ihd;sHL9})2i@Yo`<1380(43~wi!%!Qa;dlyN8m+5rVI8U8~`MtQCW77 zLLC?9@p+Rwk3IEa-(x3w7=JwK=~d5`E?W8x-LVRhuyoSbIi8SyZxT&7DD~%%-81O! z?g+U28+ar5jRR_e>A!y|NVo0#Vtw3H-LN+^8*qI?KySJbT~O1xs?*D+14ay_BS8^x zSXKTZAvEo$!-oOTA7SEcK4JG1&Uu#*(8gFLb?LJ}8 zIbduYr5oom^eBKm|AfXNimq&68NhNAO)OqRrWmOVU8LrQ`rkZ8m?5i4=UK72%Kc<0PIkESpvc&sww%WQ38_{fHHuva%I5VTR;BCufWN*5#Ga5+(Ha6aF!NW$ws) zykP_H7~Gn6EZ?z^X0jFFbMEAlqvDA#X;DF%TGoyhUdK*%+2+amOYbrE_mh=L(uLc7 zsnI^BIjIPSZqx|DYbK5K`^vE2gAX6?J<4MLe1<3<@3i45m~Rp%dw_>@O2Me}7-1W% ze2|!`Lixt|l%uE@B%ZnCHaLd}4cYli^IC7p`m8tw%ui&M*eGE#BPvwQ_PCK%*3wUMaG87$XYE9{>blD3FC|hR-DC;=X~dr4 zNALq%-VhhHgQ|qrYl-vrjO2;>iKmfge|a(pM>qDOnWU|h#db7E-|}vZN;3rr>27SHYEW-2et&V=^7+u`#}U&lLP?~amJ;e?nHMH=aAA|XN$f91}K5Z z$5pd_%Stzp4>(DAT?S~!#M$QYVqHulP9_mu>rP5y!$t@-xiiB*#NwP3zubfUTtV%; z)!KZFHxySO`(fC?^)i?-R{-j2HbK4XW3gu*B+*2bv*ucPjP-2`x+f<7c6+(QQajW+e^D<$4I!twh;Vb>}&3a*Uv)ext?b9ywkxzsvRuO}sM$1}7 z!vJbQ5WvVU(;2`bRz!F(k>Rb6)h6JEhfM!<*P+M2-2 zm&~W0$&G$RuW&Nse+ZBH0UB*273?JIDf0FZfZ1)kPpoV{KUUkIx1wbb4yMJ_XYI^tih(uZp9tr)!pPMc0X@q9t40dgqkP6Yr1T8@7YNMa!!K`FOW0 zF_WfVxtmleIQc6|{gpvxowZpnmq>It6rGwYosFj{1zK#oX^?j@mI0!Qsafl6Mv@2| z*ftU>Zcyrbv4pf+={+>`YS=Xv4`x$mp{ja;Ry3Ak$u%`|Qv!J?WH4n3j<$s^ZN65V z;D-il!2GOwB=CNYX{)Bbbe=4XRaK_U8}U!8%sp@kAJmgvwW^3#vyHk>URG&wUzgO- zK(wC@B|OH)rhseVm?cz#Lm!p?HTX~mUowHl{*9~;J-ORrjrv_%cMc!q4_G;3`rXky z%`fZapS>xw)1#e!U|=gUfsHdCDohNG^4n3M(nAcV-K_+HG$Mj-A`Q0&?e14w+gm*qv|;tlc$-c44@XE;>P#FDYq5tGfjN2ExpbCVLcxx@F31bQ-eUOA^>fuuf7Xze<%N}T~ zBjJHnpu9wVMN_wBq8m@JG)WW$War(x^U>CAJD)IR_{~V~0n}$tD`0Fhe=wNv)D7IM zv@@^k&MT$}APvBj-QJIG1OCe};vRHk0~eWFUZA}@KYOlJ3 z6<5-sR{q}_Be2eUwwI?wt|Jfy9;R7F1xst&w6SWyf!vIfO#!3@ujJ)Q%|Sj$oBn3&Wt~&5-K}RL=Ik=z3F7)0P*DD-2X z8s*wEetJWT+i+ugUS(8xHA_7AN7a%?c#aX*=Q?PVj$(@6 zSw}nxxgOJQ>;vM;Ld%M=DxCL}vQF;DTn-34>x&<5+u`UtVi&k|^r+rf*G~#1dVmBx zA@>TLOMzj?B1nqO3xC@>nTQSXI(4#CQ3Rad3i>s%PCLz zV-LbLV-nkz&65YR<>KqlN0*d>|854#WIANE1jFeqD-ceIeJ1g>vc1g}8fi^=*TW?r z4Q)f7xR@9Sr85rt!ug1abyft(JD}E3Waz4=@2w^pl8Bs-#Wh8d*F7Q+Bm4LI5f))r zW#nsShvx9<@@&b$+usXsG2umc=)JDOXRy4r;OO7`g&;UX-pnWr^ZF8uqM1_;W02)P ziJmDs%8a6zWBen281J;Iw4Ak4Om*tI*3uq*9}k?ZZ6*}*-XtVnT3=`DXOWw>8mIX& zeFJ$`mLGZd&mdbLYqY|S?i4)%T9t9}79@W3y`ic+k>Y#YI11Al$&^EoOVc2_Ed#j& zakMxFfZfeiI&Y!Wq8wuKkL()X0#JT(VK}ZZTXE;ItSUP|2L%vwi#kZzi*;;Vy}`wc zc9w-#%H*vO4A(fJvC7=+5EJ)^D?E{&^L`#aIfWU8%1^_fkjR@?Qj0sku|ZO?D<^Bd zaA5TtTSOfEKIMs6t_mp=7*u60wiTv(W3#kf@m$MHj5pv(Y0(ec^ao}XUa1Fw7 zLuSx|1nKjyia>dBWP*TK74NvEk%?2@L491@=ziUxI#kn8iVhdr)Sv+?H_K)9mJ+qD zO~YZ=R{GSm1ISo#?IxOu(t&6pt%lN)uOUeam5b7AYd2Z=?le;eq^spzyQ*%+=I}A2 z_47i*e`F+;iRWFK&mAM95>PtV<#Bni1lT%d z_pu_Bv{_=AYeM#B$liZ*5a%6OCt>c<>uYW$2U70D4Mp-gW97h9jb@+hx$^pX*J)6y zM9*I5xB~wWibqwHHq$*NxrfRW`wlGbu%$%){?1JrgcRi_(kl#=U@K9{_RFE(OtQZX zA^1P{oR!%(^rSx%sWh-#xM5M-XMQXv6RhPou(Rb`srKbR31xmtZzMA=s{qOLn{jw9 z5$*160;2QkU5+k(WURoU4K;p9M$lSA8blVTRutf))(O|rAWwH>gf8uuKnd(0J-nxl zOyo~*l^g!ehG=D7BUzZh$E6@OVJpG7z0Tk=7=_8!bM%#}qwU=75;guCtY?fPb zo*no1%$@>y7KHokhFk?~2rucy=Cs^JUDSnGTvP*YLfs>(Rlj?7d?yZM5d{g zH~X02apLbu<KQZf;(h_QzU^#BzSmkR;yl?VO?{f9-vcFQS&1d0*#w%?Wg?J{2gQUe- zWT@AkBwNZEQvfR!jU&f7BEC#xAr^kzPXrGf2nzn~Vi2*EZ}dx_aH0j`I)Tw?ONd?c z4Syy#ZkCN^TuDTl4oKCa8yDQb8_b%W|AddUDJ$n&KazUx<1MKIaVGkre;f9CNvJtb zv@%B@TMC?v$j;kmk&qa^<1wbFZ)01br_d)hlMCb6)gx0PmRJPKjEn!1I>^x=Doij< zoEm+!sOFhAVQ6KAYL(z}g#kph2{W5e{_TypUd$^bF+&R1vk`PCahX#BuqX zD5oE00jL=oD;e1N3(AO*fYN|l9jGR!V_>(3*thb1m#NjjXRLtBtNTk1puAzw)tn(S zZ43b~S;d1len6wqVCD)eLWUuPqUrw@Aap&46D{Xx-)T~>s0Wh%;ZJ0C6`j76JTCc+ zEjkM?b!pfrwEGG|^&FC_`eBUB(oj0YmN1Ix(6zSLqwy2p>KUhY>nwfqy`_(3Z9kZ9 z92+pA&Du^*Mkz;1=RR~nrg4Py_3vB>d8B>lE!SLM>Be5GKEYfN)pmJMAp1 zk`N*)Y=^w59UZ3?T8^H}&4HaVB-%tIUZy)a({3uKVroAQq7Kw1!|x|=!si|t#)Mtj zNEO*b;aO=A8{B;sPvov=+oyOJ$)Y_-f$x-QeOBFo!DP^-K;)}V?BMJ&@qPm#^mh_9 z;Qxq^`y;$XDN|Kllss?Fs$W7TneM{ei$%KR&!5x*?eBqluaLH-@+Fw1mSW2|hASM< z)p%Cy`XkpsDM6Y_E5@NbG}~yd(cR*Qn$Q984Bl0{1`l>1tcN$?_|(>cKK@2_j>sHh9EN^`eSBG z<(OMI6l?|7OLxeMm7E4IAHqZ%+}NT&hn{u=n@7kls;ZMmj`OS9QAd0u-syQgW@GZs z1pxo5Qc7ION6waYWZU%s!L*r(MovKcZUCMU?}^dZvR&Ru+r1WLxCHLCpWN?)Xh?It zdoi``-gyP06vXubdBARgJWN^o6?T|$@wOrFSz|d32kleGSJw!*-hMecb!v^wWv()G z0UTqk*TpRS5Q&IZZRqsGbGNJyV0k!mkgw9ZTLj7@i^d~g9 zdx$BQE1$?J43mE-Os9pTXj%uMu!E~Avt+0+p)0>s{Fw>+-E>{^U_m7HVJ6v@I^~;06*Sh3;uv3~YRD-BJ^fSD zj$tn|qu$>H*n=y}8TCf#IEz5}KbcR4dOV`- z4<5i?W`uWj`l>wDNF{@WwxTNin&r6of>wSEYqv*_FHbO*PkJX61P$MYS?gvB!E2@9 z70ZI(YY%A9qeh@}2dk}pEI6vTj6~#0i@0il2Sc}(uj+?%(Dk%b3doPi&`>!PgljC5 zXK0-ydGxpnyTXRMK>mkZ1KfT4WXEKYlS@1+0Sj}SJzpMM?4P$A<-8d6f%_W5AGUd_^TT3 zE`-K`7wr*UToetCgG&4_nBPR#w3)7G@=t@Y#DuqhQ0wFKHFuUU_uP&gf6MyE+aw&m z#bNuHBNL#vzrh*Y$9W9382mO#(81lDz6^Tncsf|c_db;!%2Gs>`ETTg2nW43 z$_-``9rB>uGApNwcv(#MDIe-i$8~&lLxRs}ERl}`eTF zK5IYFq>9x9LI>wBaj|^|y{&W@2H3c44a}vqz6&OmUAeyXpbaP4e2tB#r-Gx#`Y`pU zb{Gz3JpDIDyFpN>BXdn6G&Pk^7dpPU7Qy02)5glNnoOyDrodW;lv@3`;9|br(zq1uh-&}h44&5S|4nofhc-W4G5j+WB!0_-X2*3|9v+NYC#?j! zG;o)JT- zdl0t~SM!cSyOdG3|2_KzcQoQmqhHo7C&AR*t!eXrJ zxK>4&%0mzix0+Gj%Nw8BY{RzM-PUA&-zIAX@)rHJqZv-vIYHyqi4bn>#>7E1#KWhj zAEV%K0T?h&@CS*i_*g-q1w~_nQ2qYUy{_d2-e$_AQ&<#82<#~ z+-}Bf(_jus$Jc2CLuG;HO44(yuaI2QTRZUZr-bhk3tvtAHU#v4OnJkYP!2TwvnSHN zv~di+K*1f;-HEV=KSTQ`4i<6Tkw`m!xVN`XBkrRlIAmioM!QI9q=G|ze51qZmi zLtjdb6$4knyl_i2)^OHyhI%DpYEJoct5$E1>YOn&`1jZuQ48^ z;r<|UP?cdvYnlh`ka91dD}#Z@s;kV@@e<>-pBsH(nJZ7rxP@8H|iiby)!sP3E7aQFO3 zM68yf1;j+A7|~^^I$4H}Iww;=n<|>*H=I zSMOaQ6%^NaQ+7-W<&zInuFUJu8PJA?HT;*zr%$}rGITOl+M&}dF(XWHT zMA2&_G6qP;T$^3-e)yd3Gyk~!GPz(|eRw&-UEC@_R;xQlp!q8SrX(ZzFi07)#ANZ? z=MJ?;?><;d!FAc|Ol~6L4sVw}f#SdKq!jcdP1RdolDpx_cUEzvU%ge)Av!*?8R_}@ zLtOY!;b8G-vNLI$`>={4N{QO?-SLy+Do`f(i%^HC4t%6PqwB-D4*>2-BXG~|Z7NCu zA7=}fX8Ge($r}|7e6^k(QIBES_`B3otD2YqNI4UYUqfM+wECJSBvgdU@MYofCXqm0P|h075CtuZ%${mCHH<2hBRYQ zX-jNxf{=l-l#cEBV5HC^DEM@kE5OrfJn09jDevX9e2%uV-?U0{k-1%9#8f|8iMOWb z81fOP0Mj^-jte_@bRH@~(^g(Povgl|JR3{daHTTR8!P*Bf6c`=YSt~1Gk>6Mn~Qmk z87?&ksu2mnQk1G-c(a2q_u}d4BkEp}43;^M(?{1> z0o^@fV;iJ65El+RfHXz#ow-aC?b!;rVxN$gb${;VeUW$UrBRVy< zW4&0683S=E2Vw;PWe$BjZ#w&}QDb1~K9?AB3EO5%;)f}oUywT#667}sz0bO}5Ey}o z9K9^?9So>YpEv%NT$e%O>VO(zglmDnKM5$A^*PLmbW)!EI&g^Jn2(3QhgM`pp5^CbFGy!TjCdXu5gZ!XdPov{P+91y}NlPC_{XSLKMGT#X$ia4w;+6h8-rgn_CWJ{Y}{#@pVp z4J!P^vJw8hREfC&pRl9>eGzgPH!Nk_K(VNZNt|zz?(U_;055b!)9K0!)Ol(an40{S zy>JiQ=3Bcy&I!>$=TMy4u3=G&zYc;Mqb`L*Jued!FGi4hCnzov?Hn;cXAE8X0&M{pq}t_nj7lp_jDKl>Rg+ z=}T`N?5);~!j;CNMG;xx)wMTAolf$;ezB@@Wu~|y;xWLst^K>cmOI-eX6lU>hKa3J4|p<|)A+$luu8gkv$^KZm45N0$P zO;@DCKGhY;E@gubVJ5Uh2c6_zioipSSYvzJ3K|cdb>U`j8k@@lm$tVIncgj00B?bi zP8_t|{qP;NBgLw()NUYg2RhsGL;`-VcZMf))N`Z~B~D?i?K36!!-sy{RgYBL?DWLp z>G;;Z3j|)Yi-H|G$9_1P>4iCdMNVu6Pk@aRf%X;6yBjzUx(i|#90B%-r1z{7<(^bj zH-53YRAb;hryi@AggX^-5HsIiPbL!R!QqVoquXo|%T?KdCdBx?4eIM;M9j?%Jgnpu ziL8vwR=8X;ucwe{OX#TbjD+umNlS+y;UI#F^rh?(d0o#w^Oe-kQd1t zHoH15N$u~qEnkD3rJd5>&yA)w28S@#(!{<?{EaJV+=XHP3kJpM4s)7( zC4hc~(+zHk89#PaPb!z1Sz)SMn56S@eb+q_RmCSET`Bb>sdfg~oaJ{XVRrmGD`MJ8 zZU(yo&i^=>+bPLzrU|XA2zAIA5E=hly4F{8mZ&>Sa8LC9C)?QPSLQ}w%%_F+eDDmmREHFrMQH#H zM)@w{H4miql2kzm!o1aSR*5}m?1Ey5Fp|QZOV6QlIiH*poY!F|8r**V`;s8R@@Peb z_B+2RG1~Xa^Z--q`)oO{Ty_s6RgyJWBiL}fen+h~{NLoK3|#(~Cq_}?A3aj>AX5xt zkYLuh-VkDTI|Mxw+bH$dj_P)q8as;%e0V!7G(9<;Q+ad)) zTl;YJoqpFaOfaXiMTd4{J5-eoOAMlPPC`>X-Exm&n<6E$UZYVUi9hxMZdKzO6&aoD zR6RhjHmdxf5?^wz!TMO8f~E#D>jwK*AMW(!pM}0r!PD%=c;Pv?t3XHnM97;iKjTg$ zJf=*GzSdVOwG{ozyhz)!BJ(r_qf&p@x7&EnK>D1d?5at^%2^!q1oM; zD{*5Ue6(eB9<%<^?^ROT3(*}3)>k~Z>sLh!wS6_5A$<5L7`=kCA-hXI)v^e9fi?^^ zDiS9|Yuuz#v1#=|5j&06A#krVvGwy-u`q{c5==X^Qq}D-*dJ*dm;W?_J2enBL@dNm zZc`?dk}}6x=weSC9WQkHD-wUO2yD?6vZFfXXbKbnv_{bMVy9v9;mp1b>6~gJfjs1e zL%eJER%PEmj-!W74ldMpJ5XNz2nfmge3%5$W;krw>)oyP_#kQSj)POFAO=l|nGV!n zdQ=`h`Y1Ee`Gy7Jf@ZjU#`1gg=HXP)t&aT^=PRg)Y8O z&8uyd1 z-|4VL>EX^MczY>ez`BH=*nTsw$W$HXF?E58Ns$-G4uSg^BfV3hJv3MQ3la(V&Vsr_ z9hQzxZdEhdh?yBqt@!<9!MPh<&@be(aJQxL?+88(=BzHgQ`djKe+5AwsRueNCh`T1 zawyJ?aYC?huS=gH)ba-;h1=)~SN#pjrJ)6ziv;R=9Ztm!&p(Ex0TWsU0a0L$P@2L*)Fsktiq@_N3oJNl2?QBY)_IgZxY6Lr|c z*{QSGtRGwn=I(ZR3IO2^h&#r&uXQ8=?!nXF;jWbe5H~8LKz(a6bfg$KWGun9SN{d# zquVAWwiO3mvn$+Lg)uh56Kfb_Mc(x|d(8}jiIjKAtyld7t_5f>XaXxu@R$#!n8P5~ z8nEtl)O((Uvtzw*B|oa3k|PA=B0Vf|QX?i%dCPiw4WRJQ%_h7h9j0+m2z;3r;A(%t z&<$OX0=auW*u-5EGbd81PUpijmjNWNjaAFie7Z`um0Ii@A*;Afn)P)umBXdw?r86W z<4GGZvbQNb8czU7=J`~jf?kDPFP@CRi?qZ9N|Pl_ao(*#YRkGJqbV&2XhRj2t~G26ayyZTbO zkhS!l`Zb`K-%jk+A_nX_xKG17=@MY$NXDeSNiagfNZ`(!&?qR(@BDku~yyjs06tG z;-FA$N?(D($WWdZOzU^BhGmr2U^=*WUn0VfOu=n<41Mg;Z_k)jZm zZ@fSJ$l3i!5HJ(`H)0X3*x!tq75X`Bg(6qk=2OTQl9A{7P^(zSdBdvss9jg6r1xxL zZ8{uYtHc`K$fc#(PVL?nJF9*Syv64s2? z&v;Byn_8B#s5}y+jVQDi(E)*6Fa_l~hy2?Hsqg1{&ZiHC5y$4SHMzq=U!JsBzN*>M ztH7tWTpeMQV(IioOALYd3oV9jx-`a-PSv`S(eyT((>B5mcnr^!8;~(hM718Oz4OUA z3xvFJ0V+HhiE9SmDSjVQDj^VXe!9gLR{4=(G7D={}IQFyuIfDrzl~+LyaZW zckv0Mr841f?@lFJMSF>ogN&1s;?zW{EfM;A5;QoLosakJA%Y!J!;*=>;33$#?IXCwjOD~fihnHrbyRUXt32^jNzGo4Wz8enZG4>;&6ep!<(3X@dKton zSS&JQJ#+(xH0lc)4YM@kvcu&cXs+QADs?Q27sUveX5^24tR!XP>U23O`FjQJshiYt zjoZoXEt-U{AX;mdoE;$1O9Yn_BjlAaN63mZOa8DhHZoK`BIM}c&ZgNryeT*zqc!)Y0L$>6}jGe=;b@JF8 z4Q$k>bc(-GI@Ri&JjU%Tq}oAk|A`thXLl3EVa@U*Xoz)7<9L{Mvq@5#-ea&;ee;j< z6Zasi3)bW!e?1wJKX9s^VEV$+em9zQC(WOI#52ogp)zYWM(u;SSM7ErDYT2F*Ca@lMdLKj zj^g!!jmyp7onN3I@*QtBx|CEA*yHqRCvtR`$Etsy`k`6{`2s3f)2wtQ#*Vr`IwE08 zediQFo%U;r+PTGC0aP+$H?`CRi3oyXpNA8g$GObSfYPbKDL9rg5};4bZLnO4+()uN zH7~GY73-!}gxch{`w;M_co3UffgSzAJkHBsl2qvtx1uvmA6)Sji8T8@QRUI5@#I9g z;9jYe%M?y&b3%Y#S6dd{Dl$sfsz9vKjYyN*UpQkwFSd_WR+$gazz_nb&n9FmH`?wc zYuDb23sa|`UzR)X?oGp9vIw_n{;5dKRuS>bstYD)tRMpZ zsBNa8q<>44A6uOebET|q*-gPUo2~Zap2rekZ^NQ~{&lzmhpARCy{KbOe(7Sr2YS6{ z3ci6C-1hkc@in`HTK-(W!M(my+a_X_bO+nd6)T;{DgJ*Y9WJXiXdgXCKJil3{9gB@Uoi= zA6j6dAj`nbVE-3;uZSEfL4 za-Dm(*F{$p(;Mr8V@d!*&0b`%4&T$D1LkI9uCTvKTmv*jtYtG~eNP+)k(_v?TxU!A z{U6*@B1eKJP|x+!emzK?m8htdXEYmHdQkZs)@s%EMq2&y-`pp*?)OG%EFA;b707MP z?jmaMIGY9FNi8&?H-LFUI4;er=IDMXD2aYLIM8DMt7^{b<*(LMS$LLDVd!wGr~kIZ zexSV`y1E=KrJVR9Xv#4=v$hE2$FeZ(j{vcu63HXyByP!nf+ z+V+^NCJEu=MQAe*Gpm{#x8m4EfO>}iCg}SiK97n4zTqyO{KTU1<3HM936}kw&WSOX z9Ub@W81iYhc#HTM(AFJX&bAn}2We+RnL& zKcN8Nn(wvgEt%mB{Y~G%)cpr8UzWrYW@3Q$jmz)mG3u0n}Y~ucRP+4UnD^(uF z$tcK{D`;L@g^V+*-@M7p#}hrm!8}hPDJ1k-EUx~HVCh#hDg7{?%}D^Y34kh0=A%N7 zdN>Jy1zj^MVvdF59`gMI8D_{&X{yS#0V0Q@&}@b<3C$Be;sCOBmZrnZ-^1hi$Hwr@ zV&G+aNbFb8V#F538&m5NI<%B?yq5odYMq(vo)X)~MWBih|KSlypuvXdxf41*R`2(D zhR!io@4i-LhTH}4&k2Te26Cu12*!uh#7E^wt+ZY?c>7c|}=Nv$C_$3Sw(W>*LDSkCre@K8&9aV85pibq6L4DBVuwkMm9BpYmnLvZb=Jl4Z(^*}-DoBg<2Q z0i1Lh{P;evMBWR|MTC0|3l4H?>NLcUha?0Hntic*QyhO29gR0E{Vqp_LDxQ;-Sa$1 zTm|5m{vgTZ-O6|`4ifaRjG{!LQAr`N?9eGTJ+ljPEqmLkz-;rKP2#6!xdog$fx4 z24iXB4RGGy7GuB#cxniA$ZHI3f8rrS3@#4gbU+rX`%z0l*Ot8vbF9+8)o)^*Q54?G zoMVi`xK!J&eUJls_-vu~%w?^u?;q%V)Y(xWO?jDVE7`-z z*EfGoW;d88q>g%MfPo@V{YVke+Mai;1qzTCbSh19gq@j2nAfZx4vAUt5l(LDCq;Z? zdQ?{6WjB5Sbu@0&X|+$N%e-pd2~P2kw;?s__H;3D7DckkSu; zOuo+W_Y%Tk>XpYj>+tVlf?VVVn+i^Yby4WzHadIgu2S$fu%0yQu7)%*t^Kdcf3+vS zQ|RpLjEG2%v8Hh#YN}1I`#flYrU6>yI+)*w;x5pwPHB2+R|IpsasdeHtQAS8ka$Y2 zJ!{a_yBgaY<{<6y>&fKL#b;;*r5gAekIT!gOLcMziGi$WtOe08&3%}=s>m<|-+`C& zXj@vZjyI_u3swA3ZbVsop7Lrb)$AHe2Rz2Wr;hUD3rWni|16%NQr>%);Kv6b1DB>4ek1|fgN6l2EYw08R7{5x zwSzrry@vp%h-ydta)WvUbbV=9tFw_<(0Pqoha9@sk`PwBEHB*BQZmUwyuk3+viBo~ zEMcSKZP$H#u6sbAt!J81L1aYg|C3JsLv6zOxZ+9())(773 z$(Sw}9~~V)YcM*hg4)wxXT06%j4)n*THkCZeU$@@--~tyEc~Ig*3o}H&h6krVV#Op zoY80=+3RXzd*=K|wn!qvd4Xo{AF5ct^L(O=h6lBgBIw%0v-gM*6cOXgLmzsnOB38Z z!D3FQqGXB3)GOE_svuy@0e%WN)Ws`H){!Q5`4)#~h|d%}-HS3K(H$Al_3>`$ka=F# zN|srAjeJgKp4Nzg%s;Exa`9i^xxIA3!;|u3JA&m%n-(T4n-6_0KAmUB@OMD=LS0|^ z#Z=cSDP~=i^^+QN9+QW#LCdiM+%{$uB0o^RO{wm&L$uCjuLp ztCHGBlF&_^rUD|mmLdoOf^b^FS6oZ{=6Pc=vuWZc%2>tOQS%?_Q((V&_%mCKck|FR5?1cATS`4)l7)V*DwOgQ( zf*+!cI{LkKEr3;ivD!V#4fDoCFb2)~4ev`IZ6lWrUq&p>*vHHfqAfH4fY3USf8O2| zd0NYBeHPa;kItQ{>LLb~lIQYc)WZkmJHeK#O|=SFAdaw_TLblQ?Uj=vx$=OwoS;}E z{Su-Z$DE5b}vF@#T z0r@7Xd_iE`r2fXKTGhk57d*6u4Q(`vAU)rdirs%wCug&Cn*&ekBaV!tJCn1gQAZ&=-BMHF@3#?bK4c*D+kz{Uh-rnbeKvx-b=8mLUd& zC<`f2kVD?G3jHaNu4nv3qO*O0xcj1sY`=FldZJwK;_id4>4B496>s~+MsybNHpv$` z?mNMX*PdBj#3_fLd9aI)DDt6{Ami-whp-B#{_kTqDXLp{wv z4H^~R08FU_i(w_nb1q1*hU4>80_oDTy}rD?Pv5Yxmhy~t4In&Nc+Q%UI}r$(2e=>O z63mDhg8R}86?cE^Ve^8aFh%vBx3@7|X~`j8{LB!s_$L+8Tq zwmV=z^YPUz3;itpp@UJHa*vtrch~l!BHI_A3#H%Q|7Tq3ko!Q7Ir`^tA{q6^GY^Mp zpu%PzH+M&yFk*rLl;SHfPxdAPRyxtue|q`ZMDUW~5Em8K*-qHQM-S|++4AttbSdA0 z>y`d=7V*Xc@S-L9=4wjR`uYtB4-c8c01F<)7t31!TOa%W<-7_p2lCTG|kQ=fgnXghe&?}U;>R- zX6TcBXnJl`5El0b#Ls;A5U%u#_r^F;T~tcO^Bcl?5?U_$jz>j<)5{A5kh+{!KvfYFVY32zwS(oee!gom~wv@oAUIy{H>wXQ30t9XzWmj;w7ZVw*`iP zZ5NQbO)(9z@-Ebgc1d7iPHFJ%1LuNZoq%2j;VccOz*QitkC7J@ln%+t*Jd0%@}Yxq7))kM0{})vHK1CF~)u?7gQUm3)ieKt^M=QspIp zrXECo;H+7(9W%;H+&QKE> z*U=Co&Svvjqp4q}Xa0#(Mk)LU;(2|I%lQ)EyaLk#YC9?>|L6oWFL!K0>2oK_dSjO3 z9C^UPTk@74VK}K|infeyLBQUU@7W!IHPKcoZR^Y->pr0)y_!c$EVnU^^b>tWWDh#> z2q&X;Z?(vXw5^dM_HBcMO^FP4@!bX#W-FT3H>FkPorYpv!TTNlnxlOcVtinLNL1+N z-#WAI1i?8%_tD(ir7y~;K)uv+iz<({H_xqx1a$!GlwD~w8Iu~e7Hr+J^;4;nI!9;2 z-5FbZ>Q&MAwCxZ0br-hPTw=&rDna5AnsHq-2zH<}5}8x8jONYTT{ys;luvINlaYL> z5vf3KlTq&W^#zDIMNKeybL|!a$F2-4vt(dBGnW-knX$7=l7>P5WB$ok8Df$dKb_%C zkwH4cmS)E}lycX)DB~dpDjHWt!An_7tEGfBx#6p}wTp!8;L?6=7JFEI?vz3-&)mZ% z`AKv-=qmaPsnXY$@WCXNVw=n`$Y-PZdqT|JfNi5xdM3S#stt^T)J9TJ6@USH2ua)- zn55R%Bqfmo*=+D!C5>Tydk z+Uctq4K4LrGU3Szc=I z8wN$x!rkojeZAWG_VJNSJDhy6bpqFWkhY2TFXHD-Zz^_YYm|kU%nneJyXkxM5qGtL z)qcOdGF0-3J2*y%c?>=q6b&_xHiA`yS-kS^L{U`z!j^FD<;#dCnqXB=dhv9=ULM-2jmN5@W-+jw)+a`P6>9gWC@5Cru@ zi|ncTHhJ)|)qk`bzfy`FR^c;uod~NtGK``hyETL zkBZBO4uuBZOG2p@_CJhniAq6>pbyOc+f_?g*0}OEE6w<%A#~d~fTS%`rCy@bqUJL= zBS_oo0!#81qcV8JUW*0rT^5|7A;rhh5C=LqVD}8HDS#MMsp>3%{qvAwPc&$3RHPt} zgMiKJn_VK+4%q&Q1Pb#%$%0{r&VAw$bV_;7if!c2RSzohjF`+WgQ(%C;#ZQ2Xism7 zJ>Xa6hSZD~N5sq7yq{bhn^?ji&yT!m|6h@cFwS(_|+{u%ZeC& zyC)bM9XBSPf}$S~xOhO#>cX<21R|aqfJ)M`OlR|cNBc2^La#8?Zi0nEN$1IIJ2f_b z&dqHIA1MjORv4OminQ#fJ;fn!M9Au}`dkK0<_S`v_n%7c&Q<7#*eq_ZeZ(L~Al)mK z$FTwuoH~0TooxV17i_O?Ym;){r~#iGd~8nDS>%HK=VSX)#o6aA>AczKH*usA^=;*y zQhezx+C8xhW25*w40h!vT(Z%4z?Q3E#)pmsjj_UWxWO_^D4|va)Q8q+iPjt3E3~NHH8QQs z_TgLBc0|v+t$OfVns9s#D6HO`XJ*1#jU=&}lWP0)OCMPCJ#>&sm=ObH7-`HDu8rZC zx?7f4&PFFwx7@MhG9*z;dlO=<`7$n3Cp1f`LU_US#&vD<^{;*hl}rsL$-qL>Cv)AN z=`ue~R>zbcnWk6k6PF84;a?F+dVya*^rkg#*J33sEPE%WD$-p25YX=g2^ z+?0%J^$AUolNU#$FSJ2<=v56X88PlM7?haEIA{8TnE%Fwg!uaMuTvmSwO42e{q5KU zF<9%IblLK5f37JMEWr1u(+>b$^}T0>!l@(Qwd4wjLwmj+qLB&x*COavnxo5)SMW8X zY_>F)qfu6}3tL+NQtDBC(uv9)A;>>L>*UF!D?IvIAZWqnx*~;xs&&ZrK~Gz z7vJ~sC}-t8AlYbSyISJBfZj=t%TK#c^j=qGr^p1veG}S+QhU%}m!Zd7dmQ42L9>`X z{a%_8U}G$t#lc{H{D4(2Oo?~BGmu%pd-(Sb$(S=F!fFgj;yn~oOMcsPX0NX%tYV6f zFoUS={K{)>Aaglq1K@1?y5e*UuS9LAMRWx>KgdNU*8~K7Wv&ksY0C7C)J4p$K+nE< z>g|&;sg`J2iRW*2vWcgg*)zdDYlHs=Xk!Q^wU|3Bt+stvx`UAP6%28HcZK7o##c}w zy{9JyBJUWtXqr0$fdnD^CIIK50{*yjgUWuBr_J?Wy_{61RNt*YoX8W)9^~x0GD%*2 zgZW^wqA*5v9)CAYiqU;Y8f`t+&M&*S@~mX9K#Wt5TzYAD9SAVO+8$QdpP!x1Pe?AL z*G8c?Vvwmc654|f(LsP@G2`BcTwOeX*{~la2fV$1BnB;9Ozksov=7f$x#^sh6cp@F z!AkLFfxxH`NwqjaQq5UJ(a6;bX}46U1fgM2#w{Z06u4;E_-EDKE-CA+1|`4@U)$WL z+uED9??#b}-oUQVLecB~gd-@=E*^-gs0l#5ejq+Had%N3*#x5fN8J}d-V=~rPvlZB zC#QJ>m*Kf()_#X(FXAiPQj}~gb~qz1mE+doFmy!(3eK-Qwn}d zERwF3P#NPOmVB;>37gy?ijz0C(mR)qlK^A=c+2VsRdeH#$lcV-?l#m9%m5RDBETFni#u8wc zv$+DJRgQY)oDkbtP8r-2GyC!h;1OKoJiz|m&M3^ApFsjeYS2$KtXN|+_C@GGRno&V zXL9goJN3i9#v4$YlQ=l0eLYc)$G{$|Krvs)n2%CeK-`e|j zg9yk1CSOCUui7((9>8YpaLM@M>&QQec2zdw>WiytSgrW}1%0b5%2VG8!hpnGMGuUz zihj-EE9Jl7hQ-z1X33)DoV?P6h$kGO2 z7U(Rce50E2k8paFQ-R4gD#1F7PuVUmYsG-JlM3J_Gdr$>cBpIRtg!p0HG$ipBQ}v# zFCvot!er^n_I1Lolx2|UpeoA*fc-`IOj*g}&Us67tQ(kVi39wpFGmL?247Hy-5@g_ ziFLrF#lr)O-v<@$5LnL+KB~VeAP3DYry&TC(r~t5qH0KJtLOne zLI`~#{HhO!#!bw|WFDu*#o@J)noVxsw!Az~i%j5RA5Qg-;K$fM7OZrWe!DqW16vx z_{cu>*hcG2!%|PFiWYLP$9M_Ofs&0fMm+(g<*T8r0(>7U1ela%Q&WZOJj+Urg){Q}oBl9z7Qv-nC!GSGT$SjF%T z8L>|%j}_rn#sC-5C`$ig1OQIM5TruF@hxW)In^q5bflwN|0(f0d-_lt#O&yQs3d&Q zpM5j;SYsX}ZddX3-Nt?xfLcv$Z4=^v#l7!wS2qn_;kAKxVa;17z)9f2x%HlOd!&B6 zDpiSZ@y#7rSlOt&eLM)sKi3TMB~{=VYw{R?5Nelb@riDGs2B19ikgAMoyT#D2IlYwk5H+b_S9 zQ0#focXG0w!KL%W{)mqBEcB=Ukg)&Z6Z-A7lj6=<>ppIEtoL@ZB zC&$L-$ZcVfl=(Br7k?L`wML?VC~{qBW6+a3&aKnDMZ<4Ns76EbFHwsjlsnUA`}$2x zj=9V}bQgx^vkW_8jb;w=)UCnv2186QgF5@;W(f%QT(QhI3|fkw_aNNgdaswMlwo9%!aXjcDG^PK(-miz&T-HweX=aRfIXVFBk^ zubSCf=zyOHJd8e7l2~H?^EjRgg2;9$!&ReP(tU*sGK#uW62Z0$nL3_r@swiRVMp4y zV1%mN4h7YCwK43-u89<$uB?nDZsyogN50Jb+W|y$~$r^mZpl zUa7@Hy1g@|jMV?`;DHMCOtmb-KU1(=suVr)mtQ%W-o>6Qzb1V zWF!Xp3$li^p%VOHSTE>kRU>uegm@my8?>pN_`W8yABuqqiVKACmF7RP7z-GbX`h)# z@(pND(2->&Is}t9_-?1bmH4}Ipq9(wr;AW(L;nEvtotV|%U#!~ekFn+tCa;08af1X z-sqnhdW3{z1^O?R)nS?CiC6S*Bo?QTVMtZ|wJ+Hbdi4F*w)qm8bcc5Q@uKb9j#&%@ zn!iln8Ern#UXuBq1uo(|J-kFEb-H?7t_JLhoOnN7Jt+_Cd>;@>j45yjuD8!jsV^em z-@t|;iAQyN!OknM#1D4|@FNAY+2gx}{MRu|-&8_+46k#cpc1Eio7$$G1&mokfI^xp zK`W2adUBe2;GhG$ugwu;>8xs%H(hFNktX*hF5a`TL74${lw>|X$4 zF(Xt8aY^WdSuGEZz!c2bfvW&!>5GGWVIG@V6#Yakz-oupA+b6)5B~i(qy>hDg+=d> z;yPgDr+xM}>>F&l4P!{0Zl}$q(!i0!^uF)Dn;=(7U6N6GzMbkOBZDylvRcebC{EVb z+3otKO|`dzti?%i9kFCc{b!`Wi{=ECm}ON|tAF0=6GT4RKMMJ&UGuy@P?zQ9JWlhp zjZ{g&Fv5tM5L~J?^a}-sQUubDjxL-*3TISorSaA!kWtF+BI}6~#M1iz^$>cmOC-RW zL{JoE-w$cxzE?D+l0?rTLwn(-*6sB=?e3DexenOEyrP|d9X5MQJuzJGxQ zaIv#Ak|D=)Fysy=ZePw+FaQTh@V744?xKVA)eolh>0y@x=6tuZTlD&ts4V~+Js212 zlTRXZZqP&bY20@`soAdXG!!m=E@b^i0pmdA#)d8^Xou(4+tM;!UY`q~2>gql!BOf{SAC~{XcUb*$?iaY+gR^K4! zJ-CeB!T$cCu^qY6`GjJvciCEGOnAbVwS9TT%!Hb$zv(8L=l=GGwLT;|uxgMG=5g41 zYAcBRnG}jk-NprK#%m(Ieo~<|d~lOfcW;Qtxt@>|8EZs31pE*DW`eYCPBTC&7ss<9 z7f>jk_B9e`^BfxQYFq5uGCm%%KC>M*aN^S(V#ftriKVmj=P3YTTkIWbcEizX1sd5P zAud$IC1?>XgW3x@C<-+F3Jp`_e~tx43X)`wrobu5lmZgj7QyK1W)kW*&>BIO*nu+Ug+avp0(?OQKztY&kUdG3> zLEiQosT_>}gmICQcK(H2){TU#5gU3?moaOd;N65|P9u(-sAL`=R@}jmjYj#Sv7Zzg zbI|dGK$Qyb4OzEA;SCu21}27tfz~HdXCTT3Fajh_LHP($`e1BFt<@@*dEGIZ?a_`0 zL3`#4o2x@mQf8n7n3M{d((rdDa;@7h*t z1dSzj!W}E|CL8VLf6<#o!PJouF!tAz0-ElAk5-McHN2iyn z>dEWIQS|PJ6yc1fp@e2b-6uEeT;hY|fn*WRKN@wBxHsEI0p1h54S8U$koY6oy>!Jd z_xwFZQGpG~>}_XsD_Roof=QO=%*0=zOk;lghKg#u@v@$qomj}bL2;t+GL-IOmJS0qwe#{_ybTsfRA^RtlZLkC@5r!vVM#!P-U=ii;*n^)2X6BVYyhypzs*ruA4D`A-@T>FiUC zbxCxw9)?Q`-b|6Ka9J4)%|d?kSpD=~k4IL$O%1Ze?h{q&Z0CYCvgHqjZMxK!i%Dke ztbHhbHfWvb1B;2cC5*Iye8~eh3;OWQNjd4&wcB*!-K&D*l|n#oMr8DP3eBC^MFr;3p1H;;^fQ=QpzA?00P=ye*H3#%d;U6Nl@7y1dPHl zS7+<9Ev^y-`QtiqF~i~Cqn6oYgj$p*fb%m(dXa!#64xXzQbJ}h&u1w<$~Z>*3oHew z*!JbzQpI?wE4|`lxF}8({YpUo?68hiN zNK};m46BmRub+~x*~wS;fj57y3tl@gq`jY*v*)32L2>piDiBlMB6YOQ(SMoDgX z$`;r=XmN12@A?j4SFoJ_U!$H#+D4=j_cy56d_x* z^m7u=zk4P?24e*TW)p&u&~uyZcG)FWuc`3}HjfI9UTp@%*UMtIEf(MMrDiO3_eeHK zC&s=!SV%==MM=Q6N+VEm$3QzXuw1E2SJ!M`8<5M6!3o~D%#G1JEyu>3zatTon8&R| zFL2pKRA&o%#c4#O7%3IYR@%O)nc+zoPc=^`)DK!+Zytro+V0h2#388-`Rqt|B~Gg7 zf!*{3V`+N}^r{2L+YsvPB@L!&Lum0-qAQ%zR03$9^dH875}^=6E4)P=JedZ<1()L} zAV+?F3vDHl&Exw9;5axp$+Iu%0E4XhLycD=W@Dp>G3`$XWlN>^1v|fl*bwLjvE*mk zk9Lx=C1PA}#o76{Nz;R2Q#{XAN!PaHHiG=kB&i5N0kx?sEe}P1wkncuP+LoijUN^S{mi#T*GSDxOr`>%Z50z$%JjKgCPwl8*4)j?JHDrk zhBl5+ylF!{Hp@A%8!MOP*OMn9i$v{iyZ%{)N3XNDk269cA%q)WwnJZwK@ka75ORn? za1Q6|484%u${RviZOhl?=%!hPb*N$unW2htw&b1H50HFgGz6igOhZ9`(woBuWeP%6 zpV_m8i$I}g)~7A?VFS185LV3r!+rI6{W<<$?Bh)mw!BJNKbpdTcwIw^A-&j*6hmD@ zY4ZDQ06;*$zo{1SPk^^|ek(`k`JMJ|CWl2m7Y8Pq%sD1ES0gV)G&8rEc7BmQY>ol} z>ocny34QaneArQ7{+V1H-Qk5((VmNXHFO{_J7W@|gxAEW_9jX=y(|xizE$upL z_^B80$4<7$j#I#GoaIIPl|UjYo6;PKA&Iy0pS+$l5 zG#?w(Hfv z!&9`@X%Qw4W%ss%2<`Pi>BA>0_3MJqdjmHbn`)Y5&l?$h_8yvFuB-XrUzbs|Hb*3D z6upM74_STVjU&dK>t$M%_ce;z^!Y%Z=ux*ND^xn};T-wY1AtouJYMPbo5BR`bsv<+ zsXHNm@Iq@_u*=UC9#2)>N~la7TrNpvsQ-c}92yWl*@y0mESzj*!78_l%XC=ec-Ehi zqxV5eFu6pmmm$YP6?U1`#672CD<>|HutLWTM zk4ZJy?)0>dfDyQ1o4n6bKGK>F7~525Tm5wCfG*jVC9mjvMoRMXj}O!hVmDl5{WKb2 z4a?6rTYKbkX$Oif49(T=MI}M@ePZT;WDJG_iP~eu)P4o@UvCbl;QhTv(3v#3q=|fa z-0Uv`E(Qf@KQEpAk5!Imlv#(p&h1^bMgk9+PI)MZk>QZoCCv7yft7gkO^l-#?Y)+c zt*aIT4X0SApG$G?)5$}Udwimv0Bw`&3BL!)%8hpRI%#PZaIHbusr|sIt@H_-4iuXd z=a**#H=co?f(pTwK!I_5bBY@tN7mgr*aZXMD=LO%knfr()+B{wTysDQ&mb;@tbjV& z4033X%SO&tgO-hRiBj1|fS+$VDy=jSo5CQ&9V%>a%m{LHS`Mm*e%#sY9XxpF+@oyK z9jc;&FaQEh`G|8glNRv_nP;*HM@IxX%>6hTt|T$i1Cxo32K3MibwiFLbmo7$s~w8z z(|3vVd%9cPd{ACPj`LbAq>kni*umz*hY^uMuu@M?>D_>FYvV5d-Xq=r9v&jR77dq) zGr_`5AKY}xe&hHPeL7|moi)NkiBF?$b61PTxFYDnBth0cayJ>iy7p6q^a>dp@&)nS zJcqz$);fO;hjpf5b8R<{7B>=uoy{Zrb)0A8kL5>rS`6My;?@&998I&0zh{Kbt-N4` zK#?M_7JBvRNmPNB8bM6>D;5BGoc;WnS?f>A>b`sH^LGBDe;^b>{87pI{d(HwP}|cB zi(%?3T@zJ9w4Dj527p-c3-w7?FW;*KWUeOIso6JdYUAYG9y&~aXE9{wx`@X?Ee)?>*rtlNc`og^ z#%AEY0H@Eq#!0UO{G!R7F?i5r{*BN(iH%OD+*o?b)`rikDSkW)CrDUiGAwp2Zs|!D zm7u#E@hk+{sK(E|fyQ@s-5md676aoy)Q0E~%Z}CdN+byy=bra*6g*I>J9yw|l3V3l zjxX>^wYL)+F2VE0d-ao+&;Ky2sd^m}-L}A5t;C8@c4$Y7lxH_9=o*rkwCZVGCPHvO z0~oNRk=W@br*OW_2pO(Jc4DAEOAHGE?X7-=v=0$CPPSxft}{D=%zZ0fo=1nioT(-M z0C|CeWYk5FcK3Wm@ZlD^kX8n}l66U$rKJ={e@}@7pVq?x z`J^THss%{s7emCvrMo*Zq4?UaD1tU{Q?sny;~dBM&3R}_j(n*dSv>uu>ABZiGl9X} z10I{c!TxnBvd?F5vR{~zJ#P*}ye+_9q#A3?=nf(EP(XK(d6{aA)T@|j@Yp)E^TGBP zlB%BOp-dxbMvZhzs@VW8^A{StiQ2p*CIKCaxlhAwXhVG$}9B+exJ z$PvzrTzlc4x#ylblR^p4APt=ofTq4fB_+ri&@7;^r}=22zOCLt4m9*eP*dIoOcNCy zf0W|=-x6uru%yTO!!bzea}$*yF;8AxIrfY87ux;d$54ymqRL;Q0%LYG2w+u<_po-N zk@s)lTrU4>z)#|#<1#N8%qw54@+{2d>0PZ(OYNJ{6p6~!rgAQw1Fm=;+(F5fsRGVM zbW7PyH`p9gCkMEuLZ8F^qsVCecCyF)ysQ8yJehiKR)}4GQhDD*i1pUldK2cYvIQf8 zpN_-EWi)8zbl1r-FqMv!@HQ!2D>v^-by`# z0VK^FNhS43l|t(XCUq;zob)@!!-9Wgj)e#5rR@bQZx-0WviC|?Ff#fM7kxn~&c27q zpK8Iy{9c}Zj|p{|WH|ZWSOL>Z1oT>5@6>=R#76d1$|fzkszAW|8`q!wf8+^t8-#6h zBDKH-;O2BCa_C3(@QHOQj{jl;v_P9!$g8I1sCIt?V>7N%h2UiEKKQRm(JP!#lTn`k zs{QIfN=hn^+$=gOE5B>p$uwtIUoGo`>jwhB*d5j5OQqgd>y<4Wpw|X-j>};s3eC5x zKEFZp5{}}7&2xr$Ne6R;H~JkCa#H%oj3+!P3eF<#qU_0~I&u?SOxMlD8Fql-84aTu zFii35>B4=r=b912x)LZqOnq1bG~D2ziBu}Jd+SMuXTl9D_;b|0mt4f1B_vbEIBE%Q zE2RcX#RBZ}P=fiMz%+u1tF687AAG~+2}91PsH;M`*zM$4NpZ%>Zz@MWGY>o1ihp;< zgXfNh@LbE}162%G6i|<%i+XHq(v1VzOeE=@2QD(r)gb3&%md!79^_0pK$MQ@QY_#) zRosB62h-lcePQvr$R>qPFWTFCG~@R!QQQ#!>~bulLWJ{FjEQw9aZ8+wuQC`5b(iz7 zB;$GQ?Y!7Z{VwKXfGF~J$CHMOMDERUJ8MKkqrh-f4@)CRlLmwlu^vWOh^0Vxs6C97 z!6?IJ&*8V(6T9XDckTX=1upxstbPKrjiJNw+w7k_uP~k2Hci2>g%%oP-x97R5i?A~7rw6!T`lhhgb!9$}}$^6Oi&ZyLYa7~eMi_*M)f+4;X zc03f{Qv#-CmBKk8i<0Z@gy#>-blchAREHR;FACC10^U0U5b?HwQo+y_`N1b26=<42 ze{NQ+nnvW?THL4dMdixdo`GSTjcNla`6ylsy=pB1x!Ay_RH|Wt+7rc6%JXAUE)V@_ zoLBiIcF*Ylexu?^G={cPGyKMKk(W;ygc*Vml2bV=lJ9k!ML#zEXytCKD*K6K zd3M5ri1S)8CbRLkQm-1LBp*s_){R}{6~Bz7sr7W6-7-vHa0*|eBjP=v4tUhPF1%0o z?DBP|Ge{dPV6utT)A0*mx~yt&!T3_wXsk=at9o*eAb~hREqbmHY$F@1;QdcF?_ZL< zJO^SG>qu}WYh>+pTT_(8FB{S#;5O#%c+hDue)s_ssEO@jK+UtTC9fOJ7 zJt}BH6_zO5NLx`u@Q`v?bC|XjmkH|r=4lqV88sws6Hi#c1w2MxW}c&s^<0kWge_}v zxPMuujAuCadS>YgFB7xo;p1zBS(R@R{HL$jZ6Z`rm)2tKx)Lk*Q=6QH{q$R+ss5Yg`1GQJ9R&80Ia<{|T zN{h&wXrKncw(!3rx4B$b17K|`uoG!oVvl@W!shYE#7!f;ykNH{ef_U&BcPgLuv(Oy zg#LL$dPV49&MO()?EwFgIf#@yXx^}7t!e$5Beq}sMC*F^9TP{?p$}2Lvj$nF)at3W*4=awDoigXLra zs2_<%5nEuD?=#Rr>l%|a)5bPf%>dhC#Tx7u{NFSyoil)Wa%LE_Hyh|X7XrARt&^a| z?5@HZNAFBZ4F31l1Eg-Tcu=1d;SB$+zE+6=Gtc@t$W`v$kwv6HQL?P13_r-%$f3<* zczMtNVBsefWO8O+)0F(QX{I(t2Sba-mXI3dxkjGQ<@@0j;5r1`br$pZovABH40^`_ zf(BR2B~?eRQX6ZPD<_KY+Z7r^EZfdHX$jE0b`$;S4ryitYae7Z3orKKTHDclyav|c zd^2M!a2?DQAiCQE{AkD5sjO5&z@7dW%ofgW`=IzL5MiUngh+JCcT)br-`A^gfce^@ zHE%^d8OPB<$rCE^6Z@dCc(%h&lsr_iIMvb^RPIeJhQKK;`CJMX&!B7SuG=$^ut`G_ zb>z{Gc#j=z@ZMK9v^4LrVuukRNwln?dAymRK*01N)J>2YlJVI{-!Nm&$06e~8kjB+ zFU6l#1pffxWDxvW^2D`zp=C6##_eFTEXUxx?_)RAOaf1D9(Uc%y^VcvKlfnh?};N& zL4@^Qko85gRnVsZ<2n8@D=O*=Ba3GYyMP7M- z{_#(Xyi8^tC&_!vKD9ovt@9wmUL9}3st#mq`pcowZ^=2%jz-_Hw!ljli;}>_VqaCH z9tOJnUyf`{jvWBy2<1}itSVgo$J?}8JT#F1DGb8&YI7Q?Lq0ppqtWbm z^5hv@nLIeP8}9<(QoEky4zxM*B)V^UCvSFFQ?FrHU= zyES32HHeF~4*B0ZY2rbI>XGv$4o0?JT4%e4=q7pqYWA+%Z!h->L56EpMR+wIrLIU$ zG^<3Dp#7)nMQS6@rZ`4YVH;1RF(n8?u^IV29cF+&)q2pM1*$D-b}%Zy@ayd`75ZkS zygG22rDWVOoa9bve#bQ)w5%zscZKS{e0w70$S0vUkTIjqJk7ZcrPqBa|J+h4`wCj8 zJ{MAMNgva`S=Twb`}6pGv{@)|4{dz^UQrBs!t+!=F6Z+lKO+rFz0z6+bk2^dmKs@` z9Lp6)?KtvZ{=7(4b*;bljK~|nka2gdrmf3ly-)8G-6xu#yw$0sEj@4A&-HIN!n=jVmKLAI4-{;~9e5;y3~66_MH9jViiwTkA&lap6Kvz%XIVvWI7BHI zA0@EDtjW;D(MEKXa8=$Jj`hb-I%XnVP`X$BtVLMQ#OzVZEJbkFh0*;NsN1=KN%W`v zFYKZ38==k{5fNQbbTe=C5RKxTv-UK*fx zoMDG7dPUiy>sjfTeHb*!Qt-r4jR;dhWs`CbuuK$XM7ol`r$?t>3WD!?%7s;>)yrMI zIO)tmOsae%YiB^vmX28^g+SRh!jKlDO^d)6DVC`_>J|5sBE2JD*&YF*b2du+j5AIP zCn(o@g&wFmz69V?tP$@=>U2;!&dmIL;~xxkaFmlkHiyM+VIhj6P(R$x-K$qM*4)4j z(<+g|UeocR`;xj=H!LZy*!z30WN-O+C>L5eB z0l&F{agc%fHLo~>pNKqrXhITxJQ4r1THo;*_EG#AppTM|ore6LP!q`KyrjWJjA)wB z3Tc^+)Vagr?VyY2?fpExBD7zNfpU$(UKZAU=w#%nWA_7Rb#XuxmYWqkxUfbT(dW(#?26z$&Six@)PCqp7L!oDCAQzNh zY3k7!-4IBu8-pMsyToGOZnDnI3le+)_V#0pI~`@hZ&dr~>VIJbqW4Y#FiPdnY%w%KtIo>E?O(#wzfkuQ4^@gSw}w76+II$?rsD< z8#gd4N_Ok9eRRPf505c~?^@1E)t`pSCdZC24)>w8a!p4?aT80LhLpA_&Rk=>iIz}z zYcP@#RWD7WG(FOGIcx+MFKcw65j_6k-IJ~Zbl*6qAhV+QtzP;5@)X--gFa^e6DLNj zs)lm*r1QNJW>)FA;#JoPT9d*Y2%l9dzOidGMzARSJE0#?2z+3gPMkib+z%F^yM6@y zTXOwrNXKpa0fQh1OdtjH3(D?L2crj!ZqOn}fLA9uF-_Eolg9=c(;h_$)nKOB;8dIR zGTGLICq)6Kk)+&5hST&}nO(h*lvY3>26Srl?wZD0$3}OIB!#X7=VN9OR3G;4GllSh8{Jow% z0X_%?|95x~#h?W%<`V_m@O&z@r^$k0r|)C1MAKMDZkk0!BnpGgMrl!$r+l`T*UXL@ zF{>Zp#kodXgjBeK{ixsD{GfOkF>9$%xZQWNkh%{0i50w_@&NP(N5Y&@d(II<`X4M* zLRBQI+o1FG>T&^iT%;6FxmXhJ!$c=Ewe?pZacJWKybTmf8c0-9(6Q|z+N=V?2%-{w z9zwvc^9ehfX~gy*h|~hE%0ONbGgMn>Dqi<;5sDr+z!@M$nj$VTS`NAbD}wXqr&GQZ zv1W}FBPM_Aw<-ZnmR5j7_Eyx9JZOuFnVT6xApsAfpzWv&$cs?_+VPeUG(5NPxWfG} z#&%J&JYP;kn?5~^p@Cpv)J#Uy>hN3d*H#jKS6E8^3AN!%`Qf4ag$CK?RJiS<%Bu7` z8#$*-m_+Uq+v3mH^~VrH~KCiiWZV9H*aA(h_>**#U=3OFKw!l{sieH@(lFk^{~&Y_A)-(2M;2A_b-XyIyq& zs*Pi7gznSBV>ez&;p%nk%+!Cv!KPUZ^eoc$s0-W&2sZ8U+L7&W+y|BRO;6!A)(RGW z#sf4we(TIYi;D;iDb!UnRSCm-f$$>fwf?e<_YQ9`0{6{43ftnf1rJF7shAQW#_6f8t48^934+LqX4VxG+s-;?pX^$cS6FgqyoWW#}VI%j5 zTMn?8%2m3bTP0y`#-gKEciHk_RCb#e+1T?wZj@!7=2VVwG%qADNyfYG3rI%w?eC7S zmm96@Kx9I*^idlRBJx+*E>@a%q6<;t&sa+&v_FV#;v6!@d)K?9b5&(EVhdH&;!liE zx7;&HKgSD9$8QBUwM6*I=EW3n+fp+lOuv!8LhIoKnb^){NHf;d#xFk!OHYn0Lh}Cg zPq|pJZSLN_8<$|D2Kut?Zbcw1G6BTf*3J;!Buut*2jvt}l5{$9+2667jX)p^Mkh-+ z`-Q5Q2=JkFckC)SP$uvA?$dsHP;b0k^GvxySzH5uj&2Ne4W9D>IMI-Uiq&FPO-P07 zFPm|RTRwpN=uR5!nx5aFF$!RXn~83Ylj;Sk%$Tj2e0hqDi1CmOF;W9pWGNpffRm~d zwRHgBljOOwN2dTR0n8_g00xc3r+#$vVo+M3q!e}7D3?+|Ar1iF@ZKJ1VRen82hFBIC+?|~qrpB&`nCqW6n{>Y z$3-X%{vzR_=L8MABPxFYgKXBzgh>K^1p0zNO*H#6l=FzGI_Lv?ZIs}H4wxZd)n8Dm zBvV!e{-sw$sGIdvjL8$45r*@ki3C%)-Pl>oiuJtcm$`EA3I~gA>}di9tWX)Sq=96cEYM z^cr*rYc@{ODV2_cSRJ$O8*0n>GV5VlpG)OE!B>}n{vE^>Lh*h=FqnRP8ODs>Cv(5d zbn?&=KtQ~5YwZ`d%-?MzpLHo`+*vzRWvjH?xl+=msN$M)l|84P~Jjg!DW}l};gXIq~%#1fHZ6m*ZIe&re`_snPaD$%tcThztY0?a}*SegZvdjU2 z3B`3u)k-IQfHQD}AohxOWFg>)H&~dF@d9A9&WEd&`aF_b_iYVrMqY~9Ys_I6X`Tw?}*k~)VSS^u9gP%Ik=C`_8jS+~t4vXq;MBbk397#n?4#Po( zJ4ZoG$K^yZ`9(=1&UalI?IHDFT#@e8_#dCq(3+N23^{gu!jnX$Xn;5F}Y$=ar&a7M`OHtOJCVCp1 zk`=rIpR5a3JoGcZoKDkA+Fa~cUh@WM;${9*l8HuHG(9V6`ff$=O9FzExmX7(1^(An z(L~G5LXprSAbM6&pwPITo;BXA)Tv%&j}S<`=bVIRzB2TgaWVhZ$}Ci`cyfludpF99SA%ZD!R9 z9Hq*_QsELQ1k(rA3rrHE_Va>7x|kI5r$WzabNUj7jM>jmSW;urSZa!Xs(^rh77%D7 zZIYaBjaAz4B1Ye|_>f+DOadYYg+pZC0o4%cI!%9AlX#N@4B1P?BfYXS;m2c zbTjFOPF`RBGdXz=Z_Jr^W955%;(vcZvSWff{-{f&3qJz(o$MyCx_%+R_i)7WF@a(| z_~1B?H)V<~hH^1D+vgWthcae{eeflbXkS}%nv3>e=kq&S!A1a@Y4XsZVm_vl1v7wO!M+P1N}ni_TgL zO2vr^ZhX?&U_qd!hs32``MSNFZ1SB0cIIoh!e zSA!?qdNl2Jkh74z?Jpe*>eXiHm%Zz)sL^~X9FP3JBy1UR>xR@3Jn$y?<$$salZc2+ z?AkiH4mI8=s$P3E)}f6Iv+4NfM!-7c^5YCBgV(GsMmKftx~$m_C7{aF_occ`OO-Wh z*()1v>7OVKgw;zLam0W~V;8@JTHjF9C;P^|_O#D96kJ}443FkEKttt9QfHH^$k2qU zol)DA<;XQ$_5NN-DwDoS99lsG5i`r&Npf~*9ZMNC8`CglMcg)KEw3dHXVx2#I0!Xk z1!O&JjD{L(>It)<4M6HwbJ2d9zO(TX_*T%t+$M%Ylr;N5>cHpjfXFy8Uxu?oA?^Qj zJd_4m?d2t5j;0m2BW_tb3@tyR#eOYP<>ayciajI+({JeRphXCqnWnwfSD%(5mh9BY z>Z=S$YpF2ScJgRzL)Ftc=$T}?tp)Z_!WPoo+@Tb9>w~yVq|?3rqLoSXVdfTF;GSqp zeFhT-g0VnrfY*{_Wr`aa@)6{-gZ{;wQJ* z)~FH-boJEVUAO3A)1|ZOrc7V!Q#fXpjc#q_2(`lK{gqN@YaXT`+JI3eDzK6acGPOf$d~~%ve*j$-}&rpAa{)0=>Q~J=OG5gnx~)?t8a6@xxn1BlzNb)!cHT9A=i^u zU5@ZS(rC1Dk75B#sl!C;KzJqSP~>2c4N@=bOns55_wvvx)3+dMgCY5#{HJITvMx}@ zI}=G=m}vty)^63DOa8;-Jk_(w9eKLwmD#c?hfd2v5rsAn9G)a$K08|bn9-5gq0Mh1 zG=5NQi_oAuN`T0h<+4b;D=ZW55c`f_kT5j`>%Sr#hUbS8nr!p8S9Tw3nzXAMQr_s` z$%Y^&I9x_|so=;J%49J2OAf}!c0QLQDqj)wXycTvOCM0B4k6w+C^Ic@bjKx~uvw%v zWtsEH!QfCuwj;hgZ6HpHz^0?@x3Q$jFPaI5!%0!$YfVL;O+O*0n12A*i_)}gL)xQI zfb+Lk(*_rf5kQLzz!BuBIPulIZ^(_f->4FGWtuB|HQ83xNa!}wYkUk7fa04GQ%>lB zmS#f_C4X9ouSP|6c#2HB#h?nS5`dnTauB86x)w_QTdB9c7W)oD`4vNcLKuc>{&Qxo zeY_@`7s6^n91UGv;R*hyQGzYUrujE<)>jeY-VWOqEykn}XN6Uu_S8M283EeUFAD3D zp|6}aHOl&@4d=8WxLJLH>H1Gq1w9|5ZBH?kqu#{2fe%7I#IcmIvx=%}<*%^CWUo?b zL0C@4`I21$8QuZ3x>p~S1yPE2j;^^}csQx~um=xSQN?caimlgJ;HcbJk|y3wHC&4_ zI9?#^Ob`j~s*D7gtq@e5ZQ=YSLSECK4W!K3lbV@TOPlDOjdS)+y>G5s8^61&$V!j* zRqZueKPdxy)4;^oTJhby0&M+b+)2#F6R>4`+l$^bl(E$$=`40LLa26^g#$vY`(mO9G(#ftGQl)$eIA)Ygfn>HU zKFx~gy{2sjf#3-zlhSx*!gJ%s3_30-l1{pn2Cs*{u1-|1(Cam^_HFnuR4BjiK9U=!*XS|1TiYJIPj9ox(w1G1Y&?9@I7Jk&cL|Wi8m@U zkfkRA%b^T!jp!?F#EKWg7G94S=fwsW#_CCc4`S^gKU#(!rN*Le2}_j*64Hk=ES9?c z$lLue=LD!k&`dh?T`#jo=4&}9$-(uTeK>>yt++nT8|;}DpbN+ChIC^m7DlB7Sne(N z2TV_^(e>B@dh)oHn~4`s2#zrIBa*z_k^|FmnPkE;?Ujz~P&0+AqG$v+)ae43!V1eJ zeijd>nG{$D&IaQs9Ean2D{hiwwzAeDuM;Oh*{~gHC_4hzPJaw(sufIr>);`&*6E-% ziaIPF=!v!J(8i$DvZIU+AU$v(oG9?lrM1eEQkg9UNaf^rfMnY2T2$3mt;x;F742Q{ zak!r`UKf+;9;u};0KNOLkjY1Ty6Z-2xq6r&VCK@ZnBUrXE9+E$F2k;aE(3S_=um`b zmUw+3x^ih04Rqur1?^+z0w zer2gSH@BbN^H5_|D9Ix9Z?=+F&-GSr|3;3!$3~ZSMY{{F3t3Wgp(lPxJ&`nChb$@)x?a=-Jf`hB())n!z3zJ!*e+)~ z(IOfh?apmmJ{3?Nurm_y^A(V&8~oW({(Hm@rJ4YDX_KmO469S_nHWX!Pl9$Gz0>a* zc9?K+x!X5&$E>;1bvZ(d3>ksU>F>_RWw|js|Fs#u?Sx|^vXyA|Srex6y+pWMq66Xb8`%sTe1p_JpXB!CMrXe1mi zR}}E2^8xpG;{_o7?(1d2iNMD{^L>Xgt8EaAdfIJgLVd#d#)fJL< z8E_<`;w4I_1%b#=&dl*%+7b6|t*}mE1(T`S^qWct)D&;i$yD&o@(@;4D2;OKnbSN? z&^>txSL!R3WUkb*CDzg@qSqCXe)N!_mPG5V*By43P9wOlA!?ARw;aYJ89xAnTRMx9 zF^y(_ot|yiFD;2DTtj$0DuSD_GOdXJD{IR_HsLJ|#1E5Sj_Dr0*3!i94<}=}LBwH% z?#_m%{w5)d5K1if7(B@Fiu|8vKe#s_h*QiI_Jt;+xJO1H^F}H`Ft35nRGuuc35p@w z)PMbHwF)9Z2kF0n6PmqM-DJCff4riVTcMFM8u-T?;|e0cU!A;ikpU?m`T1&S$hhML z$7u%nq!bz~r=ZVT3*ujHSJ{@jBzvE;jnBs^Wy+U#8KzYjnH7A0LCnI@I2lo~UzdkR zY+rvP=l5>xRqHgN2nTXW9{C1lJa-3p!#`_oK5p{@r)6dxLeQj9CALph`EKIC0DK_6 zH0x%4R7oT}1#>!O(3uo?n-3qDV$w`t;zzvVr4L=qmfLL;(%Rlww%Jea6x}e_?U`0A zbapa%Gwc^)m2V}KxmJv(>T(G^Su};KB424G0!UWMQ9?0S0$SR^$1AKLc*XtcDR<=} z5n1x2l5>Z+x~i_$u8XJeImc+tz~w@G6cIJMm7&|Qmq#Psh8eDUQ+L1}8SEGMzUGWm z2iDosc+nXpYje-AHUW{;An&UKIoD8Uwg0LvsW_Xe4f49N?$)$#%5)<01TU8Om0P0t zm+=q~k}>$C-zfu8YGY8>sfr{h3;&#(0N3AM9i7lT=Zc9I^V4E&THxM3bj%#*pE>RtnenLT7Z{3$+rA2{F z@1ITpVu&nS?s>tm&J4<}1nEwQBKhvilY7s*hX~|)X!Z}p={fmXCL;|r^OfK{VqG)8 z@emJ9(O|8W^74X*8nWo;(H}QmvJ1#0)EX4jCmn1J$;kRpQ!bYmTDWRqn0ba6oxgG| z9|pgymT+A62=cr{9jmNK1T?5Nbv1~EIz^&?gUnkYpaQ(Ct>eLWRXRL2KrA+6WqQTRD{Q>kkR}$}?6p}E)j;)^0^PO$Mzseb6@_D*%Rt&&iWxpl z$k9t$PmFI;jX?Rlf|B5X1r`Dq6)c9mX>BCugLR%+#*rF**GN-j=cfXEbPM?8Bozn{ zC;S}XVx$qwkk-}KX|MgfTV%tbkK>nhYv1q-`h(L{^alCIxBXmwQr@KJQLO#S#qBsk z?3%uSAB!Oriio-Zio&U^V*22lds3^w6D~T=L@@(4KP16IZV>g0mvY?l^ zRM7qce1#4=006%rXf5y5=K?>#G3xdEXPbbiAI?YVL!}XM*PT^GdtR1GTmNt_ic`*y z6sTwQfPHT1i(TxaRo>H;pcmN-s@?3~vL*4w$r)JE9;@h97cNV_$b=ri1ZW}pt&~T5 zC?S|^?JLb|HIlZwn(0ZgB49=&R^H5q7a_-f%@A&y!I*}@k>R)Y%-!D&o(yM zBZpN_^(yCfRvUj+eFXdY(DwiVhvNVr@YLBs+Ka`y;$e!Sn}sp%dC@C)w`uisGw|vL zt{AgzWBR!o0Fmz8|%GBEW<5yMC-9J6IMw^g-9d!pI>b9 z96vhXva({00l0^_iW_~|kE zLYV6S7`A^Z_q=}l8CHl_ZuaTFHM+cip%@p1E;n}PUv>{Ve(Oool~WZOd+7vKZX^VH z3S@>Q2TpmOcN9$vMc_FJTE$3-koqg4Z0iJorf5-00<-q*k!=<%y97rqKm{Jgq#x)w zEpERrKZ$<)5{9cEvwL5Pyx8OJmVo`N>@Z)MEUn)d#O0hAWIE*zparA+Iz!)Tn-I-0 zt$e?h5YGW^QoNT5wTlV%_`2J=l1*;Xr=`BoL`xHeXY&tTI4+>Zc)95^J^`8u)~IGp z{S-d@T;K9%_KO{vaZ(b_N>%xQ05C9QW85qN000000Rf*2MgRD{9}udiMu1M>)#~o7 zOLdOd0b@z>g?e=ma1Qr+ui*FH0I*2D@#W$n9nei_2JFd?^d6}gO$#GdhKI&wV-CV{ zSxnKL4uxLB*BzF@zpxf{hllRtDh<3TCV$CcqTYT~)T5cy9hgbRVn@{H$}FoY=QFRS zl;`vSTEedpaP_NPCEcgmamEmP5>wU|-+WBh3O78n$>QuZyxzmslF-=`&gq|%1e1|c z=)>X971lZGN>0lY-afmt+NUz=V`=ms?Ws*7T{v6k|A025UGoxe@@Kd7%eDvpu zH|#}2TA^uG9xp9DQ<_@TZq+HV#waPwxyXlOG94&9dHFYUM$+l(9gl_^kU?^ZD1vDI z+HSA2|DL<|LrKV;cFN7Q8ORf~dMja_bIjMjxC)X`p`ui~W70^u%{NY36@wRgxi<*} z^!8lAlZbILssrm+rg^*>(Ux-I^}`5%wDI&+KD?6tLs`@D<|h9PHgM*d0wFv3s{OoV zFiBR=_o5b-v7Z9YeH`h}8rQtpS*23wyABB}NMcnZ6;Ra|TRj95VAgL<*MH{)ou=_3 zuou{R6A%59cuqB5Nv*QDlpAA44i^Tt(l-7P5cLJlnNMc@DA!VImlUBag2x-chuiuD zZ!i}o&71a>W82Ga@8}IF;06rc;Y=w&Kgh$!eueWRoe+@a#qmH=)2E)CSZI>fvy&K{ zJzcU`xvS^^JBAtmZF$I$UNJM?ycX^jrM8LA+M?W!M`Q5KRdvS_t#rB)K7|k#9m0UY?feK@!M$ zB-3V)rODQ#SuA;7Mc_xuS~OIlsFg}jT=qU%k9Ki!K~(c>klBjvj&TFI4^-p;x5~Gi z*yevG{Y0%>-6k54?Sh{V99wFx&Xerj&1%Ixa7wQnHE3lOkoHy#1p|RrVfmVT4jGt? zQ=?^2!Rs824=HT5WWOXR{=y#u>W>%-QJbR?QdI*`4sAqxP=O#P51uWU_vJIoKA?^R zI|Qj%IjJNx+u_Y`O3P>g@4USx!qu5{FrfBvprFMTIVty4>AZAD#K2%E^wX>fAD`3t zq>#i1C{-s>8ry=$vy^hPx#Vf=`XhK;wV&yy69fT=osxu8jEDfh=a>1I zoD9TQEHnFWD+B#N1xrG8iiH|PP^@yR7;~|>a{}0Y)lqI*-UuZ7MwMxx7x~R5=y^Hh z3R<$p?U>XzoJDM)0c#m;C0zcR3vAIn4tGua+==vQjmZ3c9x^H}W=V=RcgGrnGP?|@ zWT*kM$EvsTW((bm4CNd;Ne7GVf%~?wQ0iB3-A1%Ba0I~-%ua?~JdW={ig4jLrOM_} zM@pI=a&L+PH6Q!Mm&`B4=FWLoVK$gLn29+X1~b%F+0R#0-QZ0y^(N-BbvJ%dmi|z? ztt)))xJ1LJ#*oRYl}!1mg8~$g%bOKOiIaUh+GkY24W_Dl@a67$|HX1J6=lYH(IgE- znQ+vd43+logwc>jzuDq?V-a2*UpU^#nS2y{W-=eOkTk@}-lDv8+Wm`6I~uixpxty$ zh!@IYSKCx@7{#S;XoiR>qB)3V;eKNVIpy}E{OS(M?ppTF!8A`xBd4@D&sRyT#ctA= z8HDC*hj5*7&AsXlEHd-}XN9Z+0N2ZH7G>7amo)}v)hSBx-(#I^x5X0g7(PbWMufLc zqPV-Q(q@bKGJwzsVVgYwqtu7J8g@SzA^9NhyqBr>@E{6ZJKupRzx+VBkbwtmM1?#@Qw6#ziAF)VByTMqii8So@Jzq`uf^ z_d!!lX@ls~>&c*Cc+E92$zCU_@F9S86$14z<*$@$JY}SZWPjjG+Y};uwEc#^0#ehp zX17$do8e{Y;xR#MZW5S}u%Yt7r4M1K;SyR;%w&!8FgsFuvXwp-jdIS-Z7=B1r&rRx zWQ>Kvx?4ZS_gbT+v~DmBo%IfB!JAHy0hU-;qgVi95%W4@~=b*zk>9Pg^KKbFX1uzv-SY{i0feOM45iY0LL75b*tUa12~kh6j&DkcFj0L zmRUGwHyk06nS;osDmT@k@Cv^+C!I15D4f>%GlhB#;jeJOhJm5mIpX~Def&YdECS{Vm?{u5I%W?#I*lG~{u)!|+!@hIR#3Gtbw)?g z>DO;#h1vuVuLg`E$OcZ7*}N{vJnFz#_YEEkc4e}6MwU`~CwHV`-j;3*mN|({AcwiZ z^(@L4i40l_kG?2CM45*1m6`*jhT0aJ(1s*b1g@Zgw;CPkgFrMIi`f~^Ll)>@5!>~uKLePvPbO`3JYZBoX?ek-*D;l*rI5M*f3c{*&~2py{a zOESg6RD-%6-8u=j&9k`n%UeJ*7<%{A)+2i2P;>ZU;Mcz?4X!G3St5b5V;sFZPM zx-7Fn7G5Z96DCIU5OddgS!m_nHUty7Pdtubwe3l+6}aI__v8VOpt1#gs)>dT zD;$J*ye59B%3QAb?Ydr$+eIXMTohH;rYib00`CgRZf64-3 z$En_O@UAH^`xT)~!WZ{waK5?Gwfz}Sl>)tQ(PmNxCQfipCq=|YU(}-q_{~*(SnXaC z2;Mi#dxLpAr*jTSX4td#qs}cM7B6kn+IFl9O$VXmn}`ugt{)z9UaDEaw^C5l@K*?fJk<)NnY=$}+Spy!aq$n{{Tp{Wm zo{o8TqI+JDfBls&U)jX*=qr0?D-bs8v%XI)_mX=A#GQL^S;>e;B71!4(I%6mCuS6Y z3`jgy6+43dG9+_Tb{=?teq7NT5DX#L(8&Tad=68|QbVGD&uiGcFsmYnV~d{@>GbOh znyVyJ(2b2+d|*89bNeWucoWlP3SP%NbYcZ1CZ7%$LdCwy^673W0jmIN@=r$Jks;sz zkxOeD{vZ(J(?+<)_I%Azx5U(k#u8YFzW0eQt`}o-HN$R6(+!QQCIh@v1ywE6M#GYR zEg|wER2&nYDz2(iHLPV5ZWj=10L@U+h-our_806MR|Uh8JdyiwPVb;Lp!!7OoHL=K zh*H!O`s3LpFk5YhBa&xd{-H@Wm2Q__$ACnaJd@U;?x}H>MlD>M^dAX;X>KYfrtn(x z0o=I{0Y$`fS)x!o5L@GLv9#hj7`c4yMBgwa^GILb6$S}CB!r; z=4wOIljxGo@8LTM?LjBWAS>){S}$68&pI59&?l-zRg9@775Tr#;jX)>%Nk!{H%xN*s zZZABbRHEkxsvQvB`!fzWhTKf>l}f~-RDvZQ*gdaCrtJ!~5!}ByXa#BbnauVBcYs2= zb>ATQ*Qtx)7|2HC3kAAW3+wh*EL3kC&Xj`-e%Ns!{`tCa+KB;h9$;9wjky+jUgCxi zREK(2-ktktG<-r^84E4?2Vckc( zH!H*d!(pu*LG4A<1%U{pB&c_y8_8swxNq3t3=^iF2YEH ztE6JG%`3jyvB|X&HXrnD{HdV2=#o{XGH)?NPBU$mljbXN?B zsOy&7oUb5H>GAhHbcS&)-*|usQC7*Tt&tqFpjfaB%UT5&J439qeI@l?`qDB znenZp%^-S{&f%Q8NB~kmt-q7sU$hQL8>IlaI9*y17DTg=0B8oS zxY$JHbwWa~NyNz$gXX{VV1u|2KXeXQUHgPE zecuvV)8C;eet%BSCEnci1L%Zq_=jbDUU|Pa$ZkG^%>Rvc@DA(sQlHZscd6V(Xb4@o zAC)F!R@)#pT(RO+Qab8)~0C)Iy5bc7ujmrqhTg50@`!XouhFQe^@f_ zdXT0+=X9nX=G>}+jiCwlFUK#ltrWFBpf)u|out1Vo&fUq!~8eBerM|DVtdZkOoCJv z@Y@YmXvg|Nz5+_Qn<-fwr~1zLhUaKq`aEZ9tATBQ?-#G-MOpbyya@QBV~gP_wBz~{ zlYL*x$LPG#OE-r$4a4`s`hHPCMG<1H5%z>2=h%5lRwu#;_eM{e;baB&V5_V{{f2q# zu}6h%7H383X!4fQf~e|bL-Yj9U!GHcj)uCHio^!%f@3xB5^^ShBH%0;I`8ls(kC9d zQOx0u_c~r=Cs!{^OELe7_5>wFOz~<=>Hhpbh^dqYO}Y`YPTUZLqxIgO{5Dz@GGc~dWC@_igoE*e(>@%9Wtlz{3Bty z@xO$Qwwr-S?miTw2HTZy5x_8iLi>*57*{{X2BeAkpQK^SZ+v2W>Liaq1+NPr(zfq! zc-#opld1b-YVo=zoBh$wJY^FTF5TRh=LmZBuvqyaHQ{V}I=}sVY};mrLg%qqUHv|& zQ53!lQdiX%yzlLs2_AFPE6>}uQq@+bzcTHxKV^C304w3^mkDHLy?kmr>^`C!Htd6u zDEC}4D3367#?wR!f^!yST(KF`p|nfd%th1G^3&1z0q+XZV%~h6v{L>$n+kzTk2FJm z7pFt^&y#jJ5yn3^ZW1uoW8{OU2@qf!-@C}qw@r^fuymp)R0Z@w&OdM01DBl-a|Vo+ z6x57_r-`?#9_xAKSC_(AYTqdv-|r54e8Q_q_N#HM;U)mW9&B zDhQ*jh9;bpnepnNrsJpvL7Jd6ms=sVI4=tbHU;vnQxV2I`6W33JeUYziGx@LsEP=e zWZVLYXxWumSJJRGQ5aXe3X;9vtDVZvzSe|=oX!x?6$zbpI!B^vM^5NFC(!l-PeScQ zfSx!dVLxW)Mqb=h@Qz(ZzCFp;FM5NX;(UGWG@DPf?p2U1&~&e>nTT(}l9g+5T$yvQ zQXi%%T^u}Y_Cv$2y#7_mkM>cr6G@x;@$gNffW%9cF?u~j$2c1TKzdsUixu9kHLSJQ z0h}kh2+K)cIo50zSlve{fA%= z(ZxEUAR|-t!SVysHGiZ4Gy+y}D^5gb8)EoA^TFa-o9`o)_5MRqAqSDflX?0I(cK*v{UaynG9n+fZDBeyDo5SAjy^ zwq44N4H}eT7ZjR4p{Yz%sKnS5s5yz7KyP1(HxEUU)=r={;$Oq{5n>y)Q^0CSeRMKN z_3n-88|+Q^o2V{|-a?&G$zTa9aC2gp06wiD013~NNqd;ih$$DnK}to~vKDU8YAckgyp#fr@Xjnm&@LcBwO z11f*Pem2k#oUZ9nRf$$7cnX@HQj2$zc8dZ@psUw5ALB~#XKN18d|NRFmhBR@`9Me4 znF<=7ph{K(lr$^|!fJP1mX+HXpZdi&DgW}x4ocg zSBqvu(pOM>S?s9;Xnh0^;bE$7TNAs2WaH+NEvyd2K|Ah&^?4}cG_+i&rADYx099ww z>2J>@eRc(IyzrIKi11i{K`tE~oZ@8(Aa`{I3a0`?Xa#t^B{94X>CFnkS}H&`0@BN~ zeQ|xr>VsT%&}OWxpLLnOOHU3V+C5KK$W;!a40ImG!SM(aO@d+Sm1gmWwK}Y-pxy99 z#+E{ZG(vSS77|EBHVGTeo8Ck%Pvuy9b+HA~AD45UM68APv@-ny)i~d5?5)_6T4_i1 zVgTT3bEhYhlI8{sw%XKNS~ex&>&L>r34Wg5_wHPEMB)|>izS6jz6KW0gF*ys!F`iA z{?5&*nL|E9=x8MB7Q4Kiwfj+XrCcowtsdKQ@g{d1^27&|sCQZOdmY;+?v&P5Gvr%n zuYTr)4oc@zUi0-9qig1o<>}6=w=L&Qa5f|aMCoxAq|ByNS9EUzsaLIqNd#T8iN$p= zv@ATk1ehKfh$Z(@M*$xSZzlS+su5m4NabCAGcWRq9sGlqDxQ+5aARKst;+I#7q+f_ z*cGI!Gn?`RJK+jf4WwfroBRIjj3b0*Q^aC)2;t#+CU%&+%W-iwf}ty5ton51IZflT z9q7WNCN`+L)13zTIanLYa zKVM`j5-Kv;OF3|cryD&r_yBw-uSKms4!i+CRPM^@Tqu`wc)+zZI+Fi%Ahkqw^dq(e zG`$m7T{v6TLES!3L69b4V9c%m2j^uoAf-7T5mOM`&Qf=uV8btKq}j11L*xh20e4mo zA7;ui+Fp6nU0cMzv>R>{@q70&KFypzZ|Yn5TBYFK0x16S;4w~bMvmHfI@D;bTy5O8 zwKH5yKM&gwU0w{3(UF&5vxG|`HsFvI5Aszh!KvO47aOfp9fH14ihIk?_@p}&RE#}O zY@*-1r_%&!joL=4F&?axS11V|gSP!&&coA8-vMDLpA~%Axi<5_#hSM)o!_{Ls2KN> zjq#^$XzIiisBXGbthe9+)j+bMuJ8+Q@KkF2-Y-DdPNEi)bPnuWVQ6pH`CrEn(|{m9f) z%W1z}cj%EH#p>gJ7(+o3)}O05FO^Z0Dfr)oBrMzN%n-lpccD_R=d%i1VRVf{&bcu$ zA)(2_i-IChJX5hWYu;B2S3~CPr<0K=DMtsB47}s&DX1R@x5|Q?#8Oy#uS(D$yE=?b zjbA95??B$Lm_--1=L~xnZ@3;j{U`Ay|MxtOsrj>_OMHIir=nL6#`gvR)#|hXx$%gPqwvnx`kbaO5ag@g#3-D0#0 z7ZLzuywx4*w49#SV_B-Zv(HdN+6TO;jTcptK02M29Uzp&y$0kb3-)((-AQ(0jk~`c z|LZi~r$`T8g6OA2XeHvE_p#o``e|2mme#VZ7)hXs&o`*uuYi)BK;3jgWM()YuDf(y zkn{G%O`dl0D132}dZvO^2Kdu8abB|B=CJwVVg`K;JKo^y$2U-tOxomjGd{=Y!n*Kiz z-1p?VFFXL3-XrAhZBvG_sqqDfQ@iUwI3*TrWXBo~fV6oRJusXTF~Sj?l9GN}S~fVT zCr~0hxdRd!uKCHr82wEOzvYhVWUmAyu6ddhvMFqtNAc?cfxwhS1ibc0@dnI6MvvW0 zA0l~w##>lm53qhx%JC@&rLn)_^rIn{?L8S#IA(hab}P-pz??0c@1{z@DrW^tlwo;t)`;Zp8n!q^70TE8qf@0P)I5ogzC@(V0!CuHxAcfnTHzFg^=k{A-n*CPH?phefk58IT$ zgYNebQU(*-7S1{*1T6JNoiSqI^cQ6(!5#bQ8w-1-#rUO)4H|K^`!nFUC>hxgju@QO zM~zyh?6Bsexo^hE5eRggqxmPIcGyJ84wN3bAcx-X_>UB#!V&q0wy6Knbf2*dV>~HZ z;3c^OJ_?=CB@2S^j6#;{9^q9ATdF;F_OlvsTSs zosOm4M9)9!ed{;-FFXX4nG5B0Wc zu43bg@k=%=w}+my1|Rkj{|vk2=urekNxxS|deOpd7Ij$JbN;L@m``5m7~F3-{!)JS z#xS4Ley)kM-PXl^gyo$f;4b_vd_@H$*hkz^yBgrX77Bjy0@o_f+4rJB%Gg=kD7-cL zgYjP(DcCW?3|rQYuX6&rMm%wq-grRM-$g>4)9VdFMyi$2-MoVogoa3YSwIiO_O5=S zD047Ig^q-w_>Uz^9Q<;YI-AMPLvV9E_H^-(bXT|M#=hG1;gl)P7hLth;pFaVA!Jnu zuX$D%uFyDygep{6z{2d4?e1j@_qI$Wze~H@5hq@={w-vdXc)wQf!o9dq4g^E7!p9# zSy%q#-K4JxQ6-c1srLiqWZYTLvJSeKuB1?+Kp<0_)k`3pyhOhp1fQ3?>P43 z=3=*RaGzvn7LU*v5c!4n8+QYs8$UP+RK_onmij(^`czSAI;^Kq_tPT`d;ayon!}S@ zKFU2W(h9e+N;dC34P1JNi>wl4s2S6N7K_RNvd@s)FACK^_)Y-@Qm4|%7To+A)Q~-* zn)@Z7Yre9-m9J1>5`_vg7Han(Z<)B&K3w1!0uy~sO0AubQ556`v@q(@321?}3X%4$ zD-Wmv8fRpJv_n2t2#v?lGblqOJEvqklf)(}+}f|&;$ChoZkYLz^|U$30!7?u$$=Hu+g$7v&%~3p+(y<;lyX#|paZesmx>E9loRvF9QA5`Oqi!biRO+o5qad zYyBWg(x1XQl0P`pLgRuOihfqS_J$Zk8Ahd2$8U|(VMPSgAO2Qpvpf**dn24aYr^@I zE#>eb6nkGZnZS^J^kNqN;ZZB>LAa}?P;H`WYNt|POLw_$WQ=jStRB z*q`^q{?kzRZ)9XHLk!8oWjnw|E_6VFU8R;y9T4=%aFbQUYU*6&16IdiFXO;e6`8B$ z$%Y7Fl9umomDg9+9gXEi-qBCHKU6Js>!O%FjSFf($(;3FI=-nY;R_;)&^TC{ELA9} zN(w2`0sn+Y=P2MI6hShiLT#R+@DbC`V`M}cK@}t#YXY!AFwfMmX6C`8)X{dQBEoa( z%5tS@nGjPH$!Eiw9gseEGCVAFoWm6B=M|V&QQaXUUWTHg$TZV&8hjEw?$p|5l_;MNUXMyB?70~+ zXDR1*b(OoPqbZ;%gdwHD>=~aApwCEiI>{|%X?-4RS6rSbc&Tr)DC3z%il!C|XU&wx z65=%NTNz>m(d#+e{-=>t3rNGH9~LwD?fxP?2I_6{HyU*EuerDZ;K_pZ`q-OVVAW{B zbydyr@#~%Cy2LarAZT)00x$3GKc%s&-in3G42Ho|1t8>2y!Y;%a=09YRsTEYUfA0= zet@g#s3>|^DrN`lR9H2GS7Omfm0vhGh5P0$xN9b@5}MV?RcZ?4lvGoZZX=o8>*EOA zMZdBlPX@)A%peAIj6{!@XgN4@RJ)mcuV3oR4{4VlH8xhA!*X}+9Vm*dvhpQR7f*&~y zW8j7#vZ;&V@U;cFb}ABtp-o6DMgX?lsQ>A?;hq& zpqNs*Aj*fxJw!W2tjQI1Uetm7J1pRF9?qr*%H4G|5ofr5N#Gydy#a^%KCMr$f0lj} zTw0ZI%Cibk83NVa)IS5^M$lQ*HY#=6=!sHR;}L}4Y;wgY9kzr&Bg>o)AZtl zRAV5b569ItLP1;WL<@ylF&xr~o40Sz8l6(Fd0=vEOf)*pK=u>rhmoVm_)5oM`-x{h zgLa+Jn~)!xlxFB+ac=^+9)5aqNB#=)E%LDIha!VDom&K%8_%NMX z!$N6zCt{?0qXs3@v_w_?kFD`ssBT5}z-VGE+=dvFqisH6Sa1r~FXb5O|KAyuM2wEp zu&DGO&26_R9Gzo(W{qZtGyl`Wvro~#xDi=@FqgcBxr+>;va7SwP<{Y=)8> zBswrzAjUT>M-0k8c;XF6J;@L)98r(VN60*F`p^H-EQFq@74t=YX=@RPLJQEGovO)zIu}C=)wq7N}FN1p)zZwd-inB zoKP6N^;Aa0cDQVnBC?HlF4+?mosIKBC>405B?bN&H6lPO9<7E4lP(lZyEOMODB19e z6EtJ2m;%HM8Bl*2lj1uoQc+^PQn<-aI}%>dr)S)>tb8nb=oGx+ zIn<%N5$LeDtENpWi^AVUkwRxkV^4)bphj%fNokY}X$p)}h6iayi#Q-$&XM-EyT%Mk zT@3`XS+u-Az0G{y)^=1+q0s6?Gy{miP17r4qsYm;lV3~noZi~HyGS?))qzwdoR`@5 zCsQDN?XKd}a*rlCLVXEc=jxF^%_tQgj~8?$;KYMo1~~6M&QYR%=dB3kC|ilL#b_O- z@l1Ur!gIUV^0IJ)l3K*TSYh{#sG_q8S%I8<`Mr=j4d~L;{N~*@t@TFNLi3~RDGqPt z?m^>5ZfN!}%Vm6)9m0V*k*?11=L)JRA=`<6Ep};?J=)2 z@Q&z7+Zq2ZeZH(MM*I*UGaZ!RzfA7N#0r2$ob7Cq0E9WfiBB5NQ~9Ae@%w^kyyON~ z;Z%K`dtY-KeZCSb97T+5Zms$bIN3m{)^kHOMVc@%k!c1Vc@hE#)m!}L-07!=1(++8^-MvQn zOe;|LI@+5!AmuX*8uhR-;-~UzNsND4IoO#JDRp9_vsMB}ppd}K@GUTHKlm$48~TYe zJtp%^NcdI^?8J1e1jk#75C5I@#%_s+FfM)*oa^gFH?mzF8vo76RvlUUv;R>(#3a;| zO%0iiE}@?ZL-E61fMp(uSV17;qKopq4{Em#DV*(&X?><|#PR;YV2$pb6B5wuE-_+Y z`H^~qNm1U+Gf7&GKU~p<ZA4)bHcE-qpKqTpJc+7u;pEhrK3NW~YHd5$2Y>2z$DF{B zsgq8Yne=L5o@C?R&1Bo**j*XaWbn!36~9bT9+ii2pQp01UZ(kXZ)E?Z#93Psc@M~V zJLT)U%$zZnrbI0$Hi#;JdKzVb_+di+Y``c@2G`j@>j#&2U5JR5ZhVLx1Gm}Rse-!B zDT(nriqhe=Sv!dO=Eez=rnU67rUwY9+K)^uKHHuK=glNz$GWvnf zL-Hgg5?a}m)CC#htj(5Kd+ShQ@Q86AqP{U8y!*wuM2@uiE=5n5G~^}A*l9fSuPBe& zNa5bDnVGh#w-;RxH%6+elQI``EvVS#kL%2_x^bWhqqC#IJK$JfShtCX)hH+dvxq;A z-=sTftMma~QdE0)Eu;AEIlrT1l9>*mM~W8EE&D|y)nvbMaFeNkH*~A(R0|v83{vN6 zrL3`sp*=`2{BI5^PFA;7Hcp@}qnduSF||HJ7O!62~_ z+a}WTH$HDajg7FSrhx?W6{#U`XG#jATRi_V0|I$|q8=AYwIPrF#AfSrwv0`$h972) z%uTsmgpps8;}6QT-{r;YAkdn#{VZatnsr(qKE9g5q|6Uad_@KI6+9B=YN8Q3nc}h- ztaFuD2xR!Se&R|~F@B79l|tb|n$MVez=v;$E;6Z{+uA&$2;s|{Q>W3CO;1k&bq%B1 zEG48x@!|YMPB3ajKQojUC_{Y-^cUI}cQGhg%2>`^5>MhyQ^HCe8Y^+__M;JaE2eaK zyb2ROX1zfLlsAHkKnQx2VZJ8($sFgc@Za8uGnQ35vau72={?2DG9zP+L>ujT5KJSn zX=6&{HPix_1}G|2u>pqcmY-SYuBp5rtsS{JcF~2{rXZpew|@aW@!lrua(f>Zuc1^3 zTr0H=)1?oQC_xza%$m%`5+!MCoxSr%>z;A{$xbm#?qHWL#v}$G=_FaNvZ}*8@_B9x zF`)yc7IFB@OqKrKRiY@Qlbpruq*c70ieuL1AKj+zNA-_>7cKEanG`b{C+fC@zRPPD z_KdcS`ck&ZHOD|lTr!w80jJe*ES@a(VxyQ8e47o#AaJ8@AOL4dkaW$T400=78$m|5 z(QqWQOKk!wZMi1DmSyT~sPO74>!A`D=7U2#0%HQ3~=T%$ADQcv9ryE}z` z4a-;nAds%Bc(&YQnge;-^O>6X4n@2s6E=r{R$9lHQGI-WbV1AD>IL~iOi`XwB!lFo zoY=n#nL+?w<3r0oNKZJ9IP1e11aS7zPa}5ic3QLU6sO5VRD$n%{0`p_?@oDC5&AAd zk3NC3+e}D7r0$Fs9yXb0qMHfyBWSN1;c&K0t5~GN_|tnh z4{cj3#6poxpguA{nFBmeF7b69E47udBIO<8&%`K_T9+vMLz~#>R-E(heg);iM_3~S z_1xtW9^ZUlk5lVS7t}Eub@U40PHi|okv4RK+E4g}eNEZ$4n%Ycu)M>*#64*k2)yNC=4y#4JKnw@ z4P5ss!j7eu2pGMvPhB?eLY&p-9Z6h?&2xc@fF|;_b&<>(?K&+E>|SQ={_|FrmRP_@ z#P^*|&f6nrd^Xh~Kb-o;%hK8p5)$YNGLXdd9+ioSecTnFbj=93GHH+)?Qt7=CO3y8 z)XYictS##%qn8TwB%7}tDa2uwtK~VRuJ+6PXGy%)@NPj$Z4K+#0bB>BBYCeXmMvRd z_PV2Hh)Kta2Cvgj)NDbe{l?BDPAu@!=+@jP3JGJ-kWJp_APh!n7R*pkTpR$tv~(=u zj~t6#j==YQO#eEeD)$}uOYS>@=ZH$ZeHIx3viG;LYfC~oKMksDqc+>b}m4<&B|u<}3de zy0%tbyl8F19N#@lVX-AX72|n=wh7VVpLm)!zzY`h7IbzPGhF5>Pv>vWxBk!VaLG~E zVU1o#khcoa(L!>iqQ0eQf3TDV3JI$T)aG*DrYxDo&0~3@yircD5d2;KHRZ3AD}PO} zJ0%@Xkpw$N;caMpU)5dAZKN3i^fvWy3J?j{M?S{vKn7hNO&J)R!Nr%+#1@FxSA>mB2cO&{ai*^%JBbw_q+qwXBhfr5I_EJ)MZ7#Hdk) z?{%r*byF?;VpfFo+wj6}Cl*rxg(C;7Pxy8}7RCO`gSHMNEG)^`!MZ3X^xIxpgIhmB z_(ry@AwzD319K8;b}lbh_

Z;|Ii(ErPr=%FMDYNz1;~C^%o%m^%b1(@J6aK_f}z z4+tcNPJYk@1fz0MLwLU>-UpJPD|Ub{4I|q-6?It2{JJ3?XH6(2nX5%3S1!R5k-|vq z+hZRSfMu@HobRH1UOqercbmEn>FyLMnwgvnPW&7)!)J^-GyH7fXufTxC5m?>xeQ$U z8G+7AeKQ@rOD9dM^@XF*npRR0rl%=M^9NBwnyMJMt+&Gt-+6TE_P86b-GJf;q+ zd3H;SJ<|f~Hq^fYlWxy%BlSw|51i#6PMdNx(mZ;$iUKR{o@o8Tc?*rijzu}3o|!m% zT@b##?Cw)mZ~iZjHUod0xNw^^25)j)bOqh zte2guMypgkehRr^N~yldtF-XG@dxMZpeEX4fAm;>wzmUOj?&Y1`nW@8zypOzWygh(*vm6}aXmMxDH-ek ziGM=33wpjwyK>f)S6^moWQKr4IHMncPr=idHiYt3=?SBT@f(EcMnVrwLrpBkJ`nM6 zZY%+0i}KsSOA7OQ-2bc#g4=x`!iV{(pu@M?GztYHPbloe7bmvexgE1&GUOUk=VNbH z!{a_mQcYKkS3+Tn`M}r;d?7H>cp6wATB)bF_%`HG9`X2kGbTt9hAR#ALP@HB>`Cfb zcEfzYG9$PN6;Br)Qy*XjrqvgOc;v9j!0-X>-sf+3kaFjtNIMP-)GNT=Hz!#~O>&>4k&W9Pf zH%KE_Uds(y&6Op5Ul}0Qn@0P$=zE=a(3AlxQ0SP9q@M^( z{3Pa(cJq_72hz_6B}YoM`(g;jkwfu~@&18j-GF(ZXHE<8eb3xGsdKfH=?vOhE$Z@A z2H&MN^ajSH#_o$1#n*}`Sm@CTi7GY28M0SSL^)xZAfCc$`l?3ht|5D{h=O06CZ}gd z+B7%iacn(wg3#(-wNg@yAg-&(2mPnqq*>%BLF`CuP(s9Jp2LZ3cD)aj&8tgDLlX&) zCDEJH6F0Yq`862y$)51WT%mC%33oOAA5|J_y&N(vW!?=CVw52K9Ej_Z49D%Rqtpj~-_Pnvj0T&cbvdUZ$ttg{X&%T@)* zj0Luub}YyU!j=%DWyBCvM|1l0M&CUuOE_$h9uxeFw|UcEk@T4tZmPw z_s^R+g$*V&6Rze+;&kT43{VTvy&%JkZw+5u$UzY|slD)l4G7ld7tO(XIb1zC4!w8p zX+4P?6Sv@=`(9+Gd#y2oNYn3d&C&Nk780wT^^Hvok^j>V;i}pIc7`~&UpR?Po!O97 zB{D&HqG^`0J6jKY9K#Kqd#+nk!IOyyL#6 z{0dp=WnCiDXd^TQ*QYfSu(euocib!CgXS%={A7C2F(^mo+`gNK@QejJ?Cloi`rdyQ z0SWzVqm|bN8mgjX;zZ#b5-%=wqnyQ6AZ&c{EEMODLg(xO&}9HX<5QG5?H0#|=b+*G zV#Ug@^%-L+GTnT1(lAE|K(m$OZ8P!)km;7!7k$2?)#GsKjNY#L%BvmH-x(EASKs80!!6lFitW(+_KEW&adJh=x6q@gy?82=gXCusEKS*3?e&&;!+2K z-*R>~3Dan`f|kcViIEw0hgQvn7$A|(uS1K9%utAvvn~ybks5oh`WQZr{a;x%U9&nfJ_+dyY7s={dn-=o{DYX$j;3`1WS55#@@;BOJ28%^lw;AM;Rv z4QUyK< z!rl5;f#=);((*VZz1~Kz4FL*-2ZHac+CiNxKb%^Lm>3JlEpT`?o@XqkFk79kJ!4Id z@lYS{Ue<6fZNl|K%AGvHoQY(De8?`+_@J=1rYr?`v_Bk!U0q^sc?FIb<%}om;y0+Q ztoaT9`;|!@7!RT1%Ks=aMoe@nBqZ3zH@LmbEeM^DTZGvWoib<1c@IYHHjx`2!-SY}aC5JMY4I!4zX5m%&;3kU!dG9iIAarm7zo=01eCppnI=E1^d{rPp zp#2RZIx<_fn}NSm-?)T_5OBX9O{d78H_Ugr5a@?Ftsy{H8okslaGfMjRh9!gC_(ww#EY>Hjj4>F=nf)I56EA$JVe(2&s?3qT@F6}CARCGGWC!RD zbrkVvm-K_kr8!x<5^`f~vEtf{LuH<>pm(YMU5owstez$wQ_Jc~W_MWDWJ8ts6r=q{ zhNZL2I!Y~h#j+Q}@}r)^#;H`YZqRG1?^i71b)gTzVdgveA}NVEKa1!X(PZ@T=wzK@y{p`S-NPgc85zRth;Y4lrkpP|)`QgCf!chxck(lt`C zf^_VugVi8$%l9aOnp1BnTW9iAiVbOx#p=OqLL012bVm`3zn>E@|1Fj8xHJVK|UuL_EFya871juRW$yEzV8VPftjZ)4(R;?St0>%3|LqV5>S=19rnh_~>q0ft#@NGutu^`#Y8M zx_?}LYxiMmf@Ba30nkM!RWAyqIy-2$o}O&40@d@&9?9i~epbv5{Fu;LJCfbqnGVU0 zaH4=eAYW`xFchViHTzXhs!{o_Z!1Y~N;;e#*m0Xb9A}c0sJx&76HxeR3=tlr2bas7 zwsU!RTga^fQ16(5?mM*n??Tr;B|sowDAH`nUBei~s8l*4^&919EDw%>=R zVFzZ2M|iw#ksfigQCXJ@>8v}3XKBaK+mbe2*?xI0dO`GdYj^GxbLrpb5efnLNMVBJ za^@4_)qI%g_IV1vcv1zGVl$WDs4%r>%`ZJ_Lm}{e`#^BXht2M%7U3@IVVkSoKv|iX zwcNy8Jj+XVs3)+J&%+4F?hMQVy(L0oF7*0#_b~j9Z}EE}*zI+JXASR+gy{IPzWDyC zG1kmthr?apmixqs58q1Qu(pSRjB&EGST0Pj&scA<{Q|0cX1kdQ`; zw^QtYd5@r>Xh)Y~dsg8|@VZI*0$OR$Nuu?BZjNft=Kr4XA} zz52HHN2~+o3|GLkjm4}@sc}v|bTw~7TT>BEw%fJzJZBR}Y_H@B!TyQ6(ne+t=_b)7 z;cRuRg8toAJU!$E!V(XDu(XpXIt5LH@amb&4p30UK*b*iN4D71j3vhc?1sfM$0X@GZ zWFBAfkF!=3h09sg)cP9eu8uR^TCV>)bICK`^15Ll+hY@)vgBB4U`%RzRs0XKI?Uw3 zXXK2<3U|YWq&i3F@I&F~?pNIOC7drQ!>%j(F>=O69-u8HnjLNNU-Jt~l7m%%?5~q) zywrqW!kfg9%$eR|GVS7-6}*;m9VD0?DsCEQ#N2&3JuuV+z=yx4i{*Z$JNYDrTsfF> z+hc5B6vs!=MK-Cc`o`EcFd>ddQQFPUDiCYs*h+Nm`s@Wckq5dLlVM+*+A_V&Spg(Y z&Y~ z=LV6)#DRyfzfjuq zU`CHGsVD;cD}V00*K+}g{$v$27af7~hr&lbedl|gJ{27A?gI%1DXc5XrcpX86hx(XLY zp*uwG`HXcO9kuiK1)F00iA|q?jiAj-xOy4kdoAk9$+=1Z;H_v&&`tl2(7gAFGHp=? z#vEnWoG|<$4Rr%RV~TdXjnv4%{)td)W^lI(dpqwTTP(qF0iQz~i;1i*6k_}DG`4P( zwi`!ZQ*V1wQeXiYc(Oe;^F64Y9g$sXhJ|asruMLrg8T#BgX&6DWU1=I5m7?8Mq6h+|*>_1W~3@17KBGu)jIskwvw4m3b=S<|e`LUUw$oN>V)bDvvlma}ucL-5-!vfo~C6W|2{Tx>`9o z>w6%pk4;;B4y|O7_-}|p!&`?a^JCW9_f)}vO;T?W%jV|`y+K^1`zWVKyr9c}P!Bc* zKw`n%io2DH1UwsmAJSxrTY+WPPhX;zBIAbTI8BFogYK~eHg7FEy0y|l5RTZ*?`UM$ zpu39(VbCaqO842{pJ7VMiHz!$8rp)dWQBhcqD5sApxP)`3F~Y2h963?Nb7K!CW}|_Qny~J-u${cCa;bl`UK2 zoO;Ruv+9iad2RlzPl(MHX_#(u=*o62%hW8Maz1S+hCe57D|#Lh<$xMRG?A=)@nJ^J zg2;HW8odiMywKcUJ5L$!jD2NJ_WMEBFDA#Waij(?F(bLj8^BDe0k9Lg#wVKSZ1R^0 zi->5hVO6(>ahd=JrG+s?32LR-9Qs)DeC0%Bz{HIW4yUFccPRw58-O$fxN>N|zBx?i zzLl`pGv}KJ$T|e^>tc_?`wKtoh+3wzFiowlI`cArwS8ulH^5qIGGB%y&Ulk68v#mM zNmrtBMwdMS3FzrTje6hGOEpu`Q@#%6d|^ai4=)`64urZ1Dj9FN`sHR5>TMmwW0OI3 z(%CQhFp2X?RL&PS%0iLe%V}G?;cuHFeHLA{m7oYMb4IaGZfO=Td5GJA*&7Eendp* zh6q0`=lvUe-}c`cHVy0|%nx{?mJWZ$7)I>eE~r1Vk2wE?D2_dARF$z(lgeiCpsyG< z(m?&jLxG<2F|T`WGWuQ}gWY(Tcq@v%3>3^uY%$no71JE+>P)8tK?_ei5!LJ;fpZZ) zfroam6sDHO>Te(u>#`YabqA*)W`}UD38izhNZco))Y-e;@IkGm#wO;vWx%jGApOfb z;gA3l`m0*d&X|sQ7hm)G&6o7Dx%3L%=~^0`b!Y*)uu9|t+!!?31lANLxYk`TXBC#p z!l!NAbW#Qcuo_|=08!3?7XG7{ItQ*l|PW%@ENckUdhFm&^n(WXbf zF>d|WTv9d=?vplnssTlody$Ad`M0`*+{!f3md=cuX_Y6XIn!eZ4 zxsQTQOAp9QcPF#*T5W-`cSu^{ zugBq-3O%{IQ1#armEx9qMEJ!-G;$vsWpnM@t57*E0YuHKf)N?U!=(59oDhdK(LiO&;TgWsq#$b(v<>yL|Kg|j z!KO?Cny12!MwI1z&iYO*g8@M3q5;R{dL!|#qoi}Yl2#81-DJlOHU}h7YJYW$)$okTVe*C1s3VY|3ZS z#j?fv2=WUuiX%sypB%UzQeLO?ne^?HDghJ1SgZy6mmAzdy8%+p5FvgPRzmqA=)ALQo@P^Dwz}CFu9^ARJQ*SvE6!vd`@e8ZvAsH*l+L0d z0yqafl{w9bV)T1TglLyNO!WGbr?wm z##A7ML0(c?c|B@cr@L=x~ma7k`EA7%^Axtg|5&-$T*v1P&H zVwa&YzuLmA5S_flWR`w)j;;~y*@U|Jnf(MJ=pRdY+)Xi*i!}MYA8bhTN|;u!$|h;K zGc?Cy%+SQkVakrJR3iQV!_dqzeXiZWvvUYZ;I_WX`nf;F|XITCc_ zAkj=;sm*9PIy(*^1VzT*6a{k9Rd#0poF3M;#||&|janND)U)VGUaT6NJH&P@1MN0F zcF7zj#^ZEUJc^Bd-@6}V+HV`6t8pQ<+;z(d4Sy?Ge&FwGx%QZ9PE_GXrD>Mp5sl50 zW6I=%)|_Ktm#%&F4_JnAQ5^}+&1+rN=kc@tI+ZQLo1$zbv;Gd^zlZ)WRsj5&wZJOd z`niF@+aa488gbQ+o?Lx0nn3w)l1=1U=}ZBm&o%2MHJ%V&%6>jp(xN9mNNvba_YcLg z6lo+%!#;YW=?VO~07F2$zc+Jyny$q&5)e#3UG!kuwOB(hwFyM-ygDPCwbOqz2X-Dt zOJIJYG1@9co$5-xqWxz2=MPiykDdc1yxvA@9L!NMPW+xwtmEaJo+dCrXlEsadZU%i z(GTU&TL=zRf^&BPfUWoxN=7oNB4VQ1nSx2Sc>_d^Ygbc8AVI&+-*&9joO@TR)wv)# z-W#Q<;J34rI!;iRG$I$(tm#@#oSKkE4?91e&4Z{jjZCJS=*+3A3C)EaUv|>(-MuC3 ze*#+vu8VO*iy&heg1-FS2VcyomPp)p6Hzl{rG&Q!(x1cQA7{(mA<)v7(PqX5CdDK} zK%W{iaQ^Xtdc<5fD`xHK5&}{UV*K$M%_B_T0;MjFJLxYW%ygOZRA4Zcz%m-;_#pMi z%8T1$ybS`;sf&4Bu+gQa{{;VQ#GMojI7O8F7O$*6LFpDrB^E+3dK*>SnF@at1OYnt z1v*YYUJpyX<^}n5=P*Ab4nEU$60Qg7s$pkLaH0O2BJvXO#8c*~avFz4yHg(_`HF1+m3(-^rLTS{D7!=TnJv5{pp}Yqtf~qq z+I)G1mwSX|AS4}tV8T8xg4sz>o&^l##6O7M=~gI|t#OivPdh`icNojOu;X}*gV8I- zlH*|VL^!Q8BZ$<9D&%~F7FO?>z3^FXEV_797ZRxLQ_*6hYqs7%Yml6_ot!_ z$SM^z@h^0J8&^T!)r{BNj=u_2DWe0f$_|posS(+HZ!? zRZCZ-9n^gUHs_AFlOmH3nY>XiLSc(W?Z0Z{aqp#4Mo)+97&H2Luwq=fQJ-psqOqCH z1O=`nUth|~lx)*py06RaTWUq5hQY`6E&sX#FAJ;_*1}FcV*GztuO<1c8yyui6pd6;GDE>#QT4$^%2gaf@?`(1c3|0 zsu{8%i3$BPpA%QLo?69c`vNvvR3#w;K;YWO4DXEE!)t`1>g}iK58S-!Ur6d1hI1VW zW$zvJ_?#L4Dix(l;iv=|MD0y5U=6DXs=Q_uKeffyiHt(K_wbf!)1tYp*wGS@bs7tL z`7&Lj=hn-4*1RG@C%q4!^bgVmN!QPm(xGQ?Q$GmXtmsGGp*!q(%BHA{W8L0qm)5ggs(={k|96W%E*+$2>y` zL{pUaHD+}pbk;#02q~{)fFNJl%l~x-GE}$dnx>Omq?BsEnkJef6B;|ob&8sXS~r}J zr7!UoxY=OMAVR}R44T>Nh3@(g`K4O>9Lg_m4*X+mAcOd>GshqzqBaj~u4=sx&$!#H zu9)2$sRidh+u@N{U@B1(1r=)?J4x;s;w8yHHTpr5;oW9@-mHb%4@dxi`wW! zuAe~JI@$e>&?SGmi?3kdjOcN$AQfN{TAk2DjtyWW_$i0Y$=Kj~fePw$()WgA!EPng zb*+e+$=Qp_cP{m8C(XPVR~r`-!G(7~7=-B~Y`)<4 zy|NRA_c1z*8cihu(GE(QNi>yV6R)l@<20q; zDgYJ6I1b*)52JhpKVI6fsLoJaqDjULG(Bbdyj?d&Q1wkUpESS7hPu2?pF5)>2LApW zL)>hL$i6CtI0)W45dRR;@Zx(*zml`33asr74gi0gSxYoYEk!cOuZI8@GnOkI?QwTn*WJHxMYqv| zqCDINY%jB~%ux*bHulz?vM>&IwOd;z_95Frl{9m4tJ*>WO!L9@vq$#|xOF~}UEQI{ z-?gKC7G`4lePV}&jow&n2ZX`jnvVIQPzA|VkaThG+%8_Haab*1<+u*@5jIu3{(LBP zeA_p_dt9o~{%B542vu;;cM!DR7r-Gy05AJ`NK(^b7JMdlRT9L5gpOL5Y{US3Hy27h zm1FnCW0E&CU_0XL>6a&0dYIb7S<1rau5AE0eP467HL-N=*HGC_FG{k3<+jIbA9z~W z-`+S>Mnsto$IWJRbiUFA%abGn!R5oUM$!yadz0cnjr&PfOA_We*0~-ZGSe3< zQ^L!mw}~sO$jEh*p8+(OU>Pe-w@`kxMbiIrHw`i_@n*t}X;xGA8v{vhZt%{7pE z$#6MwNa#P7>JhMpoJ7~T;D>dnaz+wKt@}h>4I$6u_s-Hi>fKDU;O2}0r7>Ks0t4z3 z24M1O-Idr%FJHWE30GLDm0^fnwQ->YHDOw7)@cE}cq_9lqEd`o391$ND?o^3f8$yH z+i=3#1Ee;zS&znDu02C)6{iu# zJwZQ-b$F>z+T`RNWA9Zr3joFTvN-IgG0bHIpg6q%Xh_hOAnC~934;Vd-Y1hNgui$6 z`rt*VDN1K~j^X;HHD(7@5w1)NGdMe)t`1HIj_@aUg0+N`)+ls&M-~5MoMJ z+JMXMP$#_kM&+xr8=WUq+1&sh8Y>%H>EU_>f~;j%o^h`t6x_l|(5GpQw^raB?lJYe z7Ahug$z~l4fd*S$0Ob~NKRA747B^7d|i4lSr z7`ILKv#;77^l1zk1)NyOpa|)PJnRa%T(V%*mxXSW^84VZ2?a$VbkL|Tv^t&6;2Vvijn&zS@e#lvnz4y1c*1sU-Wmxye?b%D!JZj5`Le8lu+`mSj z&3{F7UUx?m;kN}Sh{i0As2s)k$enSElDm?tOk2@sF&N?ie_;$rVI$LVt6C=M(~r_W ze?wE8ci*0yV{L$CYM%8%`6Ke8=%sqp3|%$NC}*nO0D;Hk9boy0>Lt{)!C`PQr6f*( zBXO;4l(`^N%Rd@XJ7}DSj$Uk+55N01{f{oD%Q9&D0ymqJWhl{2IBfl;Nc8@!*OLc_ zKJoiA%MxUgMyuLbBav4KDz|+OgQlJuL9gx84Q;we zg}CLUim;Tbs%^zTE-yvpwN(4gA<;$s38dxdDx1+Cx}tC^OAQY0RdJA8)abp3p^+ez zN$`g#!{x|D35kC;yewuu;|p|dk;m^&lTlmTBsJBJADM@zyrsVodRk;3K>mG;NS{dX}o2O3UgCBl{jUbnP zTCc_IbV6C=7aN{XylPLMtAwUc2tOUbdkpX1Ulk%*c)a;D$JtcC;Y9Vjx1C{%2+O3X zNN^X)xYa2Q-86GqQh4z~@?pzVck#c}8e2F*s9Xr41xt-`0;&Cqv@5r< zFG-OXMEU~GF8doUp8v7pB@b2kEY-)wWet(Meyn3!;kOwz+IU7u`BKJ&-jL{>@-YvF zJ}pG}xbyX2Okz4kZ1FVmMT?zcTuR%42apV~(N+ed=R~D82)5w0Pfks=g5L;d3ze8b z6O!n^4!0(>XOp3Er13gJ- z?gYl#>xAV<#w82c{PMUB)Ci^B;7MuclOZfd9|3;?k!6)Tm2qMHX-0-AN2gUfhgNWI zbL?6<6EHnvsTlxZAGYiRwg8!bkK$kM7Hj%VWEuh+u>;0FNfK?Tu~Hh;w&sQ6r+IwV z4If@&dO{tcSOBwfRSP=PRG;q@)-oZZ&Q&&`w?LmI!o31lM~;OW34R=VvgJQwJS^_d z)k&&I={2uPfz;@y3q&V1RF_~`rvcbpfW!Q)Rzq9h*68nA0`T3>62J`xBUIan?HJuW zhzG_D*b-&FejIV;+;<)VwS_E5;>uK@*(YXG3*N7G585ncnDP<6Ef57tC}`PN4L!yN zd!z%GtsR}1jvLw{-7?#*XOrhYc&dUqZqe&jR)_<&@12ypD=<{n&CD3RZF*LhG`xXy zW)w;tsE8%n)xp?zU;CL4-Q>%);^aBA7Rg{oO5z)|uq(5qqy?=BPXg^b?AIu8xb>B$ z#6sh_1_am&dE%X_MUWPatV2yYEZUz}?(S65B-Cxtzn6K1h6?Q^B9mbmRze~Dx!Wn7 zVDBN)b6dXh>vez!*>G!8VbvA{9CoXuO$gI_1LCQ5qc2zi1qe*of>4y@ ztziN3l2+Mgi0^DkuI;ihXP3T-w=M>7F2tsxX$8{;pbp3m)Acj0>!NpP0QBuUdtGLY z^~J#cifDnCVR1bfMlLhBB%3|%e~E1zMMLBt{UDS?rO(z=C`b~;Ct$j}GJkm2RgC!d z-jzE~d=y6H9iqzWzpxB!lRJtt8Nug@8GI#o%`~QPs@TPyZcyo5JNCg);x`{3u=c6! zO`SDZvqOY=SSlF6bG*kntB{@RN4d125+MY^Ede-N6T=ws>lyp%)Kq7-65G9`mBT%dK4d%Uos6CP~7aY#_JFZe}vav~ey6#b??z0N+$uH5J)& zo%$IE{2(`{YVTzDi1APLdgII}$++eLmU=&Fr3|Jy1_#%kO~6;ESp|TfcjQU)Oz2Kc z?NJThKp zbOT*&89p5D_Dl@XHny%pW@>5MjYd3%vqKZ}Hd5EAUEh(y>&`!QQi+mr_9@Ge7l^3N zH=(mPq}npq)%dS`o$WHZZn$G}{$Z{5W#n6>b`lQzxUFi&&=^5WOW=AeePQ=gM9s+~ zDZR2;Kt2O-Sno8+x^O5XEOc#gz2G==Ua9WQjE=w|H?ZI=%E6XGxVkSReMi)sjMd+O z@gJZvs5seyQ_+1k!L8a^r5%YdedK@~a8|i<^_EyZ$5g-#Z001Z=@^HR1E$gH#2YeB zWfVMTYefEu@i=FNWg#Bag_iBt1G_dVcQQ6DP7{C&rLwm@8x8WuNYu3Yb%y}JwX~D7 z;Lqv|4wBRy09&A4 z0P1f0nyWcf%ipWN3fEl5;Y}Zg#@V|ok&XpMsqMy_Zpmf?kseliw}uRfp3Hlh{9$0r zdjZBJRcgx+%0v53(Wd;mk169UUgDq*Ud(fiVW zvf0S4H;$odF9G9Ev?~;X;ipL3#Kuk4^kTB`%+3ASh-@3Y_%_Ty5;(~wX99Vg0ZNOJl&z4yEv))I*W75=VO;5 z-2$gzq7!;(~=z@11 z*1W&j=W!bFQTq$TF8Sz{enxu)H4&@iXSi9&_*$tWGPL9U-p9<7@@5!W10oYxCiVj1 z+2A(5Ky>~!!UA|6Xx5wq=egE_VYR>LO7i(d@3}V7hGs zt32`(crL65etty8`t@pEDps7zk9Vu5A$4+2OM1%w6HWr1fZ^TKX$M92v$BJ4MI zh(ZeG{mk8Uq$mm42v*ZZBzu^Ba6}K5CB)JX6ELDriEdZ zC9Lct6BN;e;Y0g2m7{6CdXKNF*CB3K0Q zmJ<_l8_1~EZzBt7fRfOf8MfPj_hHjP0|o&7%=2G~d2xAf_5nu~xrFo@We}+Kw>_1| zr3@ENu_9X6%ESE}J;g6x$&ROWGtv)}mY75ol0O+Q#DSajd=r2jIfTyjiYCXI&P~7@ zBAR2PmLmoc8dsU;QG`)ULhL6TymfP7xVI-?V*&`0KSgpY0wh3wAl#I_*kw0T`j^(J zG*;4sifM##ofRDAyj|#yiGgzlv0~b2?1*Dq@@9aJ&G#SB9!tRh`}%dA|56M%Q{cB= z008nILcJB*{IfGEqGN~QfxvKwa+6qRt_k0&!xIgQW~u>SM7bqa9l-TRdV(EWgqrA3 zCCDVGI_QC+d!&gBRr}njByd_^7j+n?V-_MUIf880AnD@}s-iVJ@jwEK*~)$?%9Dj^ zpA(>T?r}A2c1k7FW5x^ad5?dE6(QTPrf+EL{xq-RP0KCQPEA|XJz>y~E5ws+q>w2e zPyC-ydV~8q_Zn<547OxOLugXqyP-2z)lX!}NAy?PUif#E{PZjWBchxq)y+&qJR&df z(n%7<`>cP)1QPIQoL@_@p&|>#SXOt_dC~THU#5weXfNV?6+C_eCPh6Dx%b+MOS9_(~eJx2U|-pXhZp;Ms=o9WN2tRvN}0*KK1p;MJN6tY1^f(z#-3gEt{ z(cMEp@Ww-V*=m=V(fPIbW2R$&vxg_~+NTDd?hW!Ia2e_O20H2oq0{PXM#+CFq)NBz zd;6{vQ?(N_>Kj7*pn_Nh6f@b1jmo1`6-K*dSqf(P zJqtzccae-uV)yWS0K3gKC)umWXFw0)VRtQguUKf&P^IBd+e4gR!>W)K)euxt-s z<+Co<$W6S(tXrZtW@OqBM3OVs z+a2wu?bq-bLXj^ksS6bC1TT=88lfGrRaf>xbhM}b5S6J-l>oj6kL}R$4-ZQy z9VMC#ODwKMb*7{r?rVZL8OexHQVUW+pY2(IyF@E89tNmIGj2@HTL}PQY!(A>EPd; z1z(VS`(@FEFog~tTefJMX^-?mEx|D!F+PR+yB}BlJ`UK8mAhNHJhkv+(`2KX0e*lD zO}M7GZMDK+2 z)@H#;a()LmYr-O)n*l_7`wRN#*zJTjWHLTg)U!I?6N<90xt4GMpw=dfnC_`>&IWd+ zL9JT*c#FQ$6~m3i$fle|oq3C1Mpx`#mGPqJM$gd2|55_CYFoAM#I-`6Q>%^|Rxmjz z(pbb6H7F)U3ZaP%oxJ=V)W4_9mN%g{`xB3=pY~7u`cSxCj=?FElTL)MQbi(LtYaKV zC>oIdpG|yZJ0038Dfk}O_>2oxti?J^VRWelS|n_c`epf!`Z5HcP-7icG%bhp?|9rT z4`g#^HSL;XZg;#T3Ab$CRpCo0p^9Q*O#95jTB@YQFUxbjBQ%WDqgD+|0W^48K|xWD z(kX*@l;65@_pji8VU&+<*mcZ6{h2mQIbX69A*K>}1U4(XU!kcYyQ0PsZr!GYybqCR z5g&gO$7>`NqsFWZToAh1_|F!5HL0Xd|Lnl=P5D7aJH$TQL1IOLon~@uljF#enju40?cvM zwH`ljX-J|#O{BMtvVfoZ5CEdCQx2&)+N+9@Y>vs(6I&FY)NJ z-a)BRkBTPNED%O-vuf0uK@xVK;ZbI#PCEzPBP{%L(zOa{=n8!vR~06hX54PWMiKs? z5GIPnTe9}h@PF%c^`$G{%{c+_jBt|KRHZKqb^V?C03C$&l@=ZD^NK?BA_!;I?UgK( zyHevt%054hQT87!K@CbJ+bfR{hKB9={IJw-xTZUDi-yc9GJ5$y8M*h6VyW>&2SzRVgWvGLBMSTJ}a>pVLz&uVS%S433} z3%y5lLGyh;vKO327pKHx#FfKx4OFtGXRKWu40&QhZWatVyLtj<@QAYLjA{Yc`3Sja ztm?N1+Auhnd${B|QT}tjPv@UkXc1mgnPE=1qf7XoLd+myXkM-S7!XlJV|OAKb;jg4 zRhyr%OP_H6M`j53(V21h>>q89t2ZS2@IgXQvx_}1p%~JJNuuxpcdo0pE29&1xRr&R z%AQm+_qe3?nTpK7*R(6wfc0^W)Uw9@2G8DjLy}<(iREdIfK*6lM?<@42mm$xqd`JO zg4J-`j^O$8FkO1(Pz+ssTWt4H5*q$@#*FcE4eD;=j zfC2lpN+*Hwu$Okr$BqiPkV+n8uvzfSan*y{hi??Mh#Fsg!>}Uk_lVbFAoO*89uS;9 zyy3GJUOJU5bMP_}H=Yjjy?jw>!xnnOGAA=aQeIg!ah1cA} z=HuhD-u?Lf6>_$5X30=_U8Amo!VrF z%~=ZW>M<>I=?k-yL#mk&?0xWcW+|0WtbK&=O^c~YvSHF;=YXVYso8M(ImWFZIq%R( zSE;Zu7&r7Rc{fScAr5~})iYknU3Hau*rA^{$&aOfuz;x!-Ra36Y;k!;JEk&C68)H` z4q>uWJE0+`Nbx)M)VjLc{JJk@)$;V-@of-<0$7k7i6jnmy!L56C3t~gca8uHCvF#Z z$@v9qxS8$0Dis1Al)$h~pW=YMd5E>&^9?w5QL=W>t24_$$ks5U?`^_dbMNC#I}M2C zU5wH$w zh@21jRfBkddB<;BEq@g=_sxr0gOueTc@$bEb5}hON+W&uvryhHtB4-U_j^#*X0 zF*Jpn85y*gX1Mg#eL&^_JduU#MZAGK(>&gB_PH~wqeienZlVPon7w;s1618o-S&&x z%}n9QwWDzSRKv51G@4sXR6&d0{)O`$XIAZJm9)HqlJN3>$xaBSf71$5ql-{=6(3zs zC;0OB!KNr}fGvrYIcS2ll(8H!aa*(?uw1z36r*LGGq^=7P8VGNGOB9KvZ6zbZk$T& z(>==3(EMNGiXHgE0gXTJR4r|}WnUP{qSCCY4MYv#qp*l9AEuBLQN7~kY1222!vIFg zv(_!oSBmX=tTqjOyA>g4IaUMl3j0lh0`6a_Hq^hl&yk!Nnrq6%ccapVakRwq-F+*4 zkgMBpI2WN1by+0wFeP(7!S-OXYht(YfE3wph#zTe`zOk|6GYyOve#K6mXl?k7x->A zrpLuz)7Dr(MFTn|kGV2vc48wj0ru+k#+KNqKu?4HVhGtA9<&WY@?OD)Sx&T3CJ%ye zi)dJAJkyfl7L*~rl-S|A2Sd(BBR{n#Jw+IRM8=#=Y=@c`;gK)Z>1%a8^2h|Y8>q6o zm$oMg=q}!{uYr~i=}DC!-_bx1;y&h{9D9wtJ-!;})_1KUlZ4p#adixgO-CUJw2%#k z!-c@cO5MI(0G2xs0dTKG#v}IAc4Cl25$uj;de!ur4-I(?M+o9HqlQizl8Sh*H=2R| z=s59PQ3J_rkPp=01B)}fh{mNqk6sJ_E?*5i)?nMzCUx2tb3W^u53OJMYEeAeA~Tv7 zIh$_@p6O=9nw*YVI5wa$%<9|Is#bAqzi@5n-~D}(qi<->Nd7p>0OFp}%~sh5KIQ(4 z82QHTDz&|BCKQ2+&zN{8fcGPkP&B5IkojV`mADTO9<~c+r}^) zG3!jHLfrA8;Ja3hGh5e)x%$q#H&oP~lVHYJk~<`+ z0v8$-7V!BLQoiCOI^QIAzVW5or65;70ZfJKi`P}#5Sw?Ufk8hm#5~|P-~A5@naEDr zQ#GtaYL{!pn;%=cC~U!a$(_YPAbsVxqkmpL`P)mn{M!DtDX5;+TbM#cjb>s@>-3Z-T_~G zCEc6=L--P6ilF56Ctnbr>DB;5FR=nW@^k60f=*T%p9IStFB^t0`}qaPEt2`il4ktw z!FQj!iKF>B{GPXM{q6#cMj`{|My>LYtlfdTA@*z4ljg?c36aExFJ=P+Gp5BQ0O&ME$9jSkFrx zDL`tSu0qLkICVXM2E)@!l4l(dV=Dn$1AQETpQ3eQ&G6*A$;|6+_1KnKyr-I2@wK>p zQ498V(8Yr+qljd3T|y6$>|K55s*>P5ip|=4Kfv1CNYYLX>~?hLtYPhxUKYoBIwxS0 z=pv4BMo8^)x11UvKD*VqDC^WPG^I=NdizM(3wH>i7A zTlR|LjrHXbl0c{F;bqc?pr+`giwutkW@WCIOw@lkSLoR99BirUkLiP_*vxpeftL0H zV)Eqma7&f=Dg_~b1tC2)`>^P*j!6m(%xw=0I`6iUC8*=n#BLj4mS`O@J50@Fk zU`#gvP-^myD$Dl7>6Om8Gv7!qc#DQNT-lBTqJ>~}q^VBKJ$bG9rxmHEs-sx+GiCmm zxt(|W)dg@6P|A*02nVi#AphkZ&>)69=v(G-rp#BQ5#WaVn^~fn5N4cVmhX})Y)cjW zYC=gAdx_DcC?CWYLyleY97ZvQj>@aiJ#%T{&KHA=wrECT4Fm#P6|74;ooa(c1cU`P zhIM3o)t8Y;V)kwPqOMKtuBVb9Su+YH)Z+kRKP_nMqwU!Q_F$|H#Nr zbTA%=U{1#etoyYsBsFZsINY<@-*zwvW{H`yhbNSWCtDu}+_h796@98(qqjFptf6lX z2v0d-s;zfa944k%-ni7c|L$#NW-;;7_fj-u-1YLuP2w~Av!MmSP#M28`&ui#=&%& zvPg$GJa6=V6t;ID>POv?McqkLZM+qq5sB$k3O}9S+HVc%IxY!;GSo?Bt4#mUk6i6i z$}BQ`K`{W`f0Bd~#M^+Yc6LZNP1{6)Mg6Xx%n^1l%eF^|U1Nzyp`E8|9K8Y?X37k^ znY2_5shuV4Z>qm#6V#M6f64Y$^fi0~2?i&k@j4X8SYts#Ui3_|TeCutP}$=|>TQ!C z@CCc={)a3u_A*Y7s)|TZ5)>dt(*@$AvGz*d+c}eu#^Zt5JUFnO38Cj&S{jqlZP5rx z!u`9)*V`Ux+Bk2%9|U(2I2Hdk8M}8BvYB|@Y#M$~XF~MX6&-h}2J1L3SGc=Gx}!0c zVO0I=krwo*e@@5;`8#1(v>E22VXy~ftrP_0Ki7+fX3Hwgd{}~EP}>%l-?{dutb-9k zyt*vYXw?DV_MQ`0A3`_kOCkMtFm7)VlHB{Rqwb=d&T?tW^C4jgEtP)t zmET1BfkQxw_CF(PVwVT8yGj@cWY!e)~AYZz~N^VXbb5WlrYdy@RqM!Y?f zEl5lP4+I3yCa=^SIN}sAU*O9gR$hSckJ0bF*+aGkrMNP55t`x~*@}3+*mc!7zBs;h zRpH;nLj}a#628J#(BH>WQpeagNQSe?_;B|`RJq_e^xnFC)JwMOQKVT!;im>zJhZwZ zDq_~o00rt9Jp6t{?T&4Ke&D-R7&`oIspWWs)+jh0+SVtYaFPK&L4YSS)IY}fZ^j4K zq2M?uST)N)=;;1l(-DUoz|mtOlANIHI>9b%Z>mnYeCtR2&bD)slE{63JCnko03Mg* zNYF;v$pPWxf`!-q=-WdaLsAjz)|)r==MV$Oeqv(i7S;rfMKp-Iyd6#%ksTJ>HW%8^ zx@0Vi?Or}wFI)WxtM|GZ1DEtLt!Vch3AZ>`(hn|Kct{`ycZ^XS8ZA=E_4P;-(m} zb=5+=vkJBOj+(@j+kg7M3p7h?uT4Yfs2b(q_BaHLWu~YY%{ISu$7L7p7(3|QgdStM zv7eE<(tucensDRK6^(C|<)Vq>#7AXeUSX@k|B78d@|D5wCo0c7bplkR|bMhhYJfG8yLIULz(oN!v5kJ!Md9_^p?uXO?<W^O(w_RVM*8a&;s^5lXaQr-*8_JReZbG&qPZAoS2XZWAhrpN6+3-?}?^W zaN+p_d2D)yG znZK=}@16rr`O_q$v?ZSXLX{i814df4RV=-G{BO|aDL(h= z1`h(Zv<=Hp-F4c|Chk$EX^30-RH#tPjM&ou43=|$y|!r{!cw+HaXGT63$Ukt5bhIsgokM0>;^Gwt*4$-&l#)ON;an`i%b0q z1ncUXcc+?fMU^dnlAMUri2UG6qlPe?RP2BcG*J}-5Bb`aG=Pnt>_KN9T06gWkZ`mx zcBRU`nFw}{6uApJ4gv62zUosEZTEo<;0(~XQ!JQC3;FwA@7K)LbOmiO=@~UO+-89n zF8`iF%(Fmrg4_sd|HIbb{KW_Fj{zZ)!wpjIEwgSr{WnRoiGJv{i+MKtm9Ap;P2U-v znen@SP{O1_m~@P4Z@g-k6pbbc@sfiK?<#fu?H&q4=nydPa*W#6BX4`*?S&3+3`r&6 z^BhW_l?e;0A!Y0-K+f%~`(5n}ibsfL_Iy8tPQ3(q183hz&)}YijUrD^=sL2Qu|t=` z?t?U6@l=JAK4;Q|57Uei$+T=KQNp9qP&o07uvpbxQ^>q1gL>m+i1<^PEuzAq29xab z(}_vGANN7AuGS)-#VgYP+9f#9Ch@2CK|WTzrZ>e`Pl;!%$Vktw+P}vw2&*0icI84# zQw!q(1Xo{o#bPmu@!Z5o5ch@{1dR`tA=?ePSwBZn;7d#ky$dFv&2MXeyK;gV zdWk{3jhPKc`|GhbBXFVWe}`1GcRt$L?77hmCunr!Gxqx*ZoM<4Bp7>6d?Eu}I7r&C zjY8D3ZZs2>*|DXGA@?3oc?5KKP+R&KL|dam+{XsyPBoMcXghiD_vCs`ZKyZN1ukpJYfdT{G)B=ELOHfsLoit@dF*c){yI zD3aXlK}dSD7AO2m_B=H%*dW-5{XRgfM)_34kgI{D1b*5iCA6^NRaZ5%_O=iZ& z()2*i-@D11qQBpFs>p%8qF8PH4#TZtFeoezH0uCr^w*7^dbnj59X7A<_ET(`n655X zmIYuxNy|zk_Y`$GfaYw_b0g40J4+2|-uye&BaKdAbp}%xtqH2qNpznI$yGsj=D=Y% zq7}62(}B#M!YWCGFY>p;uJ6LFM<_0|qo6Z?Z^=S55*+*r!q+rJPLbaE*0Yx1NrLXzHq8{pJ8(&;gU8vETwy;YN zNG37oRBAvN&++hErML4GGVL&8v3~^s6y5LST}gQ%iz+L|2JSbc^!`G9{7OYB`l*`J)f=`#KiB+Z$FO;z9-~CHdewx*T%I{*}@f1veR3 z3-#0^&YP#QWgou_9fUV&7(LNErHNUFz_y<>DX(Ccm}_zy3_oH6dyAwWDdk2Mzy15^ z)kk&b|9^DotL_*CbRI_hwdN=-~J0_b;&hP}O5Ym)>-8^(@hU8oP(REp>oUr$2wo?b~<; z82iU)`d!b)u<)myMsCnILbfCSgK~LxrrlP&D#LtDEb;Pclx1aRB1Q$_3!x{@qF8oL ztkuO2K!j{)GWN1{x_@f-b@4|ca;3;PLT$eyjo}!QhekVc6Z58h8vF<)eE02Rs zOQ1uh)HAbt4#x;J%ELq}c^x~V*HzqA~gwh#CL z9|D*Qg)?HdyYkygG(VI=M)1KXhpI5~mpBVa`@T!`v}m<-)x|x&J`=~iul@cVKw;O4 za0&Ch%{0sHvw12A0nyM%fNBw4~}{; zHmqLs>e=ly6eII<7UQt~pMao5y#OlJ0yvY3kYh1_>j_S!D_y$Nj^^l$K;pK-5co?j zZG2Mpfck4dC3Gnc0EdQW-8S<4-*z!do+vKu^3{&o6gX(jycB@eb)|72>kwWHxwo+} zSWlSD@#ei@i{N}Yb}p$a5z|&cXieZ%rp!9r|g%$nn>QXwc?egBapgeGR(;f32X}KB1LvE+<7ju9?}b46Q@p zR^&!Djip2*;7=YuLf0Vavrv5*iK{>k8D9o9(lU8TPYCrtxKactRK_J8yIZXfyX{|TSlM}Y? z_m6_xzP9Zwb7qrr2AnI>?>2AM9Ge5u#9$2|H*JOo?-6BDE^N%*#<*bMf_}A&CnZ?# z2S!bk2t#rfl)-9Y#dG#aQh%b&mWKz|FSJG?mKgVt9|atk^dn(o?ABiva*fdBZP`bu z8x0LyVXLQh@WOKQl<#rfv|K|qO`qT0#2aL2?hS0kQCH_eZ*CZ5<|g&6{1smI+$TML z*1Ye|<9*#K*_L0w)3+f}O3YozBJ<2;^cTwc5s;v{9rn@5LWJ7uLx4A!%#ri&=fN(A zN+A!nVNXNi%_jNc$PU+#GTPCp&pd411K1D&Eoc@0r4YJUq6O`bucN`dlFz)*D2KF5 z$OEDLL3q__@r?~&TAmBPerzRQ-Xj4ICv17MU%SiVy`IK69jIM~C9D#Z+JLOa(fWK_g?elhZ_q#i7|s1}5-{;`C0A(lU6+ z)dm_HWly@p({bGM9kQUxGE?}Wx#Jt|Ie$r;44&(&Vr;+G)E@=1GQjI-OYsx(KHY)% zPQ@(9yD2WY5&gX#%Vt!2!ZK3;Xh z9dh1H(A>-d7_@;$0BX8LFn_DbgH^(t7^Rd{c%M#Bi$lzN?EJTt`l|DZU}jH;=<(&5JUp zDTa|$X_pS1=o97idJQF+F?X*f4?hKYDEBH1OI%9d64KRrYy8kb{VA=JY{~?39NA)M6f| z)4?8YB3V2{k5ZM<-1OW2Fj1+;69d-`7glTzT0Sy1IIVMdq^Jz*`}n=eMW(j9y+X=s zORTuUi40gMWU+i>?3+45KW za6=HfSzucZ(gAe-TSN2XkHnBj1UHHFj)H5tdr2yIm(+Tf2U2Wu3PPgQg88>u}< z^wa?y+VnqIyAB*idPpdkSASK8E;i_od%i= zWX&j!3Z-8vEf+{PGvIly*@7L}w{%?Aw@k8we| zUr>JFNfGmCc;78Cyl}Au?gNh!*VyMv2K6o$xYrFEpkLgiCWe_@**AwT<}A0y3sN0o za@mVBV=;KEk|JpGS|9|F2W5xk!awO!QOn6D7}l)w)?4YxEk>-$y&!CgEqkgg0l5!A zNl;+orquR?$1-s_HYA_AxX7K6C%u)i)YYL+X5ORVN?v{`T+BdF44A_>qijs8){$rxKQ- z_*qkzSq%px>&`MYNe2c10}SvY9?>)l5<(*0tfatKlOdG%tAdn(+Qmf~LsLY5F5cR~ z$0!gu6~sK-OEQ55lsF?R#@cRuN+hx*#8RKTMob6ohy%_w=8cIw8x7d9gVi1KAnW`L zfwE@>P-F=F{V__w`oWI;3sYb|_NP43r8~SH#fbiuXnn%mvEVYZ^MEBk!5{SngW${e z%cGsAlJ{kQ)Na0{t$Nfh4a&68LtcDOBg%8mY+05rux$XtTQph@ZycCM{bch=w38Jd zA0*Ea^u-Q{$EhL&Nxm#%|3m;c2DwZ8L~3v3F8WQthd3#_lHL_xGGZ8|HJ0u;3~U)@ z?o*o4dGN}RF-_$P9?7_}d3HtG2ts~1+S1$tx6i)1E$0^>|2TuyLn&nVMU=Xx0CW1! zG{xy!^w|-{U`i~o*KGlwy(?nj2b_vk%h3zwW_gr4nOO1Rf|b5z0Pbc47AoA4*iWXt zWw+*_>R;R}#~iR6hd{Q?F|d%teKi8axQqnviOskcX;NJk$0DjHdIXXXHpvG&%+$p) z+|O9IAHM^=bH%CROP|qb^sHVwk2SIWxn~?jD-&+zbVUuyALhEi))mAm0L$c&vqikd zd*YJ$hUqp%mGokA|Np4BExWfOd-ZkbIMM_izs1`xLG^xfioE$o?)wmBcKB|6z_#4( zo$(emCDvfV;k>C?{{#^Asus!*hPFEWrrwf?DWLjXi&Y2M7V%+~-Sv19d=%OoUt(HK z&`C50ZAA?g0r5b@JCMkqWd0l|rQ9CH9!lw51SS1%CmFz78rna{$U@i&6rfySj;tcc ztEb!zp{a%To9&K9*J=&ib*xb766)h#LWZ0*;Ht<6iB@7RZSrC|CAA45U2jHx@OB<6 z5*$g5BpC3@jnXdDyZXA5?@|XfaaAcYig>;}QbQ{&S#MQ4DK5W-*y0kBJ%9~ci1c{( z+~@7^7v>?OiX~o-qkEJ4OZN_taJjGJdnxx2nn*E{(%PVI&hF0s?;Tzd$lGg8VPb6t zs-?YCQj1T5=Lth`S&M?9#gK=GjK7^<{@kz!ZT0BoH=JFUKJau2^`hW<)0C7JZ7&aQ z^!9)WIf3VDT@NA-46HF(oqX6?#iY5xL*GSM`PpIef1;x4+-_0y0nC9Uf`t5+co?N1z@ z`;-F+G(lNW;P>bAKC|&}@<5K$oq)6IxvEVa^xQ12Zzu=e)Ys>!*WMmZ=A8WaakqeU z-~{|nqoz|;f|=%u8E6wwuyWIwAQ^VCh+PVs%BkoFzQjq#*n}|9=q8VLoTL53b=ykn z4dnLw9bLIMRonx3fAacZOX4bIo7^BFsR0SauZz48gYQKplSX2aWa4GjQ{1L72iEWrMP-NBEwyan8R9v<$XI0=bGnYNQ$tV104lC#NKmxRC2N7 z;r8=sGUWl#6Xr zG?YHoGR#O3jdd;ofD)Q22jIs44iL5w$+wWw`_$+6TIma~X@letQX5%a%l81Hebhp2 zX~mln%2h6iy)AW`rz7P-p1&I-GWhLdkmEI{e+IF`|xbJ40ck2~SQ|1rO!jn>?08C60HFeH3>{km2`CByOj?#PtceR;XbS zs2-HVwk>d?6IsGF4W3I!Oz&)6AjrmSr&qgFK$nc{+LssnL$2{4V-blmlA7`J+d_^a zTIBc`W7^-3Om*LFEzPhUG+=Jv`+1vz#ar)piL%|Dxk|kCe+&(NUm&nuDnGN&X+Jd) zX?KyX0+v>OH=Mv zL2<+Sf60Y4EqM@Ik}}l)fQkuR*vH6&Z;a5yJa$Y4+~b`ocfD!ScvXnG-jP7hB3gFl zgH3V>GxX`K&9&lMDiY3hiDHX zE{6sSY%fx7$CT-sKbW5;n>=o9D{75_2p-84jlLSj%oshVFt`!-^;r&2vFgIZB&L(nMSTYggoUpg4Q@QuJ$2om zI&MXs);pbyROnZOw8?;UYc|@rk-ew=wQ(2K*=PQt5|$~T%{`rWT}apQ7$-0&GxR{h zOO>84o^kec&Ju6i2z_=Hrx122D5>?6Zx5Ni3cQaJ4mTpwo@qEtwK8;y_iKf?I*tU! z_e08u>+9D&e0R%5nhs)=@H7~X`?LPHQ?j!fxk@ziEI+C>6v#w=@l_HJUcK(QdZ zlLVI8yU0^^_JC;491J(qA(e%_Q$~2P&SdMBK*!1D?2da9~=A$>$bn`Le<+z z{UVtS--)W!SnmpR5Ihic<<#nfl2hJKv5Ci6{xUfHK})9Ct=i0WYy4yu2B>qElRgW{ zkx%g(T(YeOC31t?{=VO?r60cqA;y- z%Fkqv%8Ue&c)p%!1NZRfpsBHZ{K{b;_N+OTvFgYnDRZwoE?v$gtgUhjwS>x!Er$@K zNFZ@woWWpxf<8lljH8UM`fIEt%7bQ!X~TJ2ni96$o+GSzGeRqMR({OUU*}Men4^cZ zm<>&{bf&ZxKSbXSi`KhfV{A;a8bposYz6pGVKqQ)#HJ5k&H8T^SSJ~tJ%?uXNElLC_({`wRMMhGH^&*Isn{BG zV?Gw(VwzFM6NaH&y=&U-_$9L&mbOm86V(+G>hHAd1XF0!;8)hO&eb4D*13Pp3_XA7 z8Yl0Z#hZ>V{TQ$5hAOy&CFPR9o>4aUp^2tlE#-%D!6iQcI>3NP{#wQLPs)>FAxEs5+p1~lB`inJXAG-35(P@iroGFQ8PkJb z1Stcx0|N4F}mke)wMEWV3CXPwAfW#glh56xp z+e6fy3VxzRzyeQNBpVl=cTE>hcy4nkTnpH;X$;5Kl!u35>ud8jR!z7e_I(Co%ihf z-WtYRC!5tqbzVt-%mnK%fpH^mH;bvA!jLY_CvSxIzCf!Os_Eh%ym$>}m-yTiYjS7{pIq+J!nuOl)oWR!VLTaCmF}E4w z>1;Xna031iSSt5~SK62N28Ktis%>@c-2e%L@Gl^k4ShUGgmR0ENy)p*3D_hy(xWC)(B{bbeM*57)@@6CMgdc~( zHt=Y0Y^e3F-5`Zo3VSY!8hZ8C>R#V)8l`Sp4AnP&HI8!+@3Fu@Ao}WX$zGa^0+-@7t7cjF_f}@-&3G~g>a6am z>dvKyD~1!2NP!^8Igl;CI%y_r|W;I34Up1g!{goF=*u3AG~ z?VZy$ony`vI~Ol6wC(HWXzq=6*Nk_{Is7%S^+VAYhJKe%-wzm>4ywi44!ep|pcsQ4 z32PoAHiHTAG6#-pCr^8>N=A+f$D1+*p*2B5bpey$q;KQ?@=N=kLF5gZ{s=CascYq7 z*K6v^LxP3bD@>~Ny*BsRkZmsvRr0mzp(`9Ti011_Mx&9-mLu0n=v?}%9+tw^+a^ii zRhXk3U*QYMS0U?)?ysNQBt!UU%xqU!?YQY~I!mNc=>)n2!_?THCJ&??NeZgx7azx+#(W4eOblS% zA(t(Wgm4npsr2{v#ezd!824Ux7{$akNw#sHLOZ?1f}H_2HQ@YXipt#M)CwFumLFE1 z<0?rOh`nnvP&j!iEB&>J$hJly=h+A{w;$k`8hqi~DtRF$e0lwba6ffADoZ3aIXN>J zJe(RNEYfBX{X);ceE3F;Sq)SsF~GaAh`UA+;NXKuY|;^EqU#v~hdc`mI$Pk2mu4$_19wvSv8g%xVu zEf4WOoDAhAs1sy#6LUn$6%@S>GqxaZ5gfe!$2epWb95+8J}~QkRf;R@FpzLzvD$a! zUtV9ogA!ohx7<8z{G*`aWJIUmOm=di1+K$)(q!>|@Q1WL-b{($$v*KKmyt7~&b2%g z4BL2fg2lV;w`Y|i4sH5?Ng(N($|QBk5RL+eTaW4}f$bf&fcc18bhJj0P)rjWY$m*v z5t^b@(?x)yJY@`GiSmS;Ge(MqaN`}u8J$^Mr#V-ui>2mk!T55;0WKw8;UEh?D-d(N zGuA8ZHoGq}!cvn_zoq2RD`#jev?9)}^Ghh}2=aEIrp~us9gqy#AXyjx(2@~b3(Guv z`5RC}WCfUV(40E$1*yPlG(Z8e@Cq)0jiIgt?(%AXY4!x{N((D%t*8;f~v-Gj0zB?(Jo)k&h94=LQ z*0tG)_YMRA=>hV3oI&Rb!3Y0W@9c8#pGbX&H(7jQ_yI;Kaob@2DzYWWJ|tKqtvg1{JH(jJe@UE~cr>8*rNz>JAv2IS)DdBRc@+mbi{<5IAv{U8{#rBr;j4V2As6%=X z1|=5D4`#hPh$dFyKr#i5PPfaaYJ3SICF$y%E$>>2?#v#-<8F0ZS8MS~g0bPeSdMrc z>-oTL@+lTOTU*KZ9AF>rEz-zZ+?gMD^ke*9 z=s6nv^OMkMO=D87Irg3UsVo#XlmQT9Y&CS#TSl#0AwBj&1}ZtNrCg6bStN>q2&1)A zU)8y-C_5~JE#(eKHE=Bg$5&3tAu}X#+5s{`g9l0HA7kCSG-9|qSozh{R&w<6DT@L3 z=e=0yt+WN6fwebGhQF+h?vyRbNYs`}9S@I}gYrmUoFyb8vR_>SODa?e>HE5?SjpWC z!FyR-2#Ve~)vANR9UVwH#Trc^-@49ICQhyl<`X%(W&37?pw-i=-7!Y>+BXt^GF-YP zh_(`gr9VpM5G!O$F%)RpCyPu0_7eqJvmOSsvpZJw_xiHM)^LX|w-(wJQ$&iXUNsOo zf%a0MqL%|`q#fpvLNfEKO`kUt5f>uYleR53Uah-PlgG@DsgaA&9E{{&KeC2n=k;l#IyS}g z&lU3Hfq$+C1O@d( zG-UnbO}?9!SK~hazRm}PZ8>1$fCPa(eSSiIk|fU#F)K1mp!1&uME$|-^YHnx5&Q;^ zCbYI0GIzlWk>yR*hL~4gS>fy4Q5->^3Z0k)4!a>%43}Q|E;sj2xBG2Pf8R4-$Y#Wr z8hEc?mJpx&GS|iO^oL`x-8!8(IkV}om4iBX% zqz7oTuv3p`!v3DvV@QEGw=fb?YgyqV44gb$t96{eCA)^FY#voQAh#3<4o;j47_xfE zqq*J>Xs|o9{?eFHY_X<-87poNWGSIZb=0)V`&=CG2sv-H=6H(Pjc>1u*W0*YH@6<% zEU=adbB>Tux@v3L+g<4i+xp`i&OqLvNMzA3lz%|Z+}IQHw&hX28ze549=X)<Wn0~jfT>#8!rF1>^ey9%f5#N2K-TIHE*O^EbPwGT%mK|Rut1GBg# z5Fd(K+VVq77ITxYwSDawI%XgI87rCAZy=_Y`xzsW*2;M+cvG%f;m1D&OP3W zFHSz)(mcccCWf<9p?Bi;lQI@rt`XLJ5Tfj|Gy6IaeqiT@BvMcMHC^)EgvbJ|^ zu`eQ28*`(`TJ+}JNLFKIpaS+B2yoULS)JNPYD4UOGXsD$+4>2MqsEnJ)>(ri)it1+ zCHBH^g7E8Vzg>5JWX2Q2Zz$=RtjOo89=V~7N&KqVvbwp}3ap2Qi)JCu>5s@px(hP0 zl^lHmF9Z1-4ShrdNNkjSj%18UIU2FFZWL5yJ{?6G-jqboG+Nk_enn-@4!O{^?snxm znwm~cDWsDB0n>%aEf<>u%kXe7chRbkw<{YCdIL9?$bmsUhMynLyye@xK^+8Qy+1UZ zD)%P;mN>mtYFVKHg=J39=Toy$7|PC?dH;&)$L0yZ5E~@$N5`)!sJ6X5L=QGd;(&*< z<~XR1LorEv;wAkm;WEzbEwIu;?&&%WaHlf%nYV$1bFEDsNmXskPlQj~4Cq+?Gi)`5 z0X&)a`b`IbH0yHLSG#CS4JI_Lw`yZn^26wL)IfSF>;@an%zX8dwMm@|s=JQ`H6e|# z;HrQyS(T59`20oJ*ys%Dpn6oT+jFu|$6pN1hH;u|6|hK}49i%$SexHFHh8M}#DEV= z@PU;}TuD1s3_2JW4dg#bXpkV$c1j%%DC5ZDN@WUJ19`p&_uZH|D*zgjLV{RKl-omE z!dSf$v=|hMugou}E&w%Lbek`U^$<K4|kN1f=&g_dk*19{1Z3e1Hbg$ugq)t8$FDZFZu zxc9tCP!K!Y?+`cV8p+Sraxh7e2xV{7R9C2++&y z)Z9~2$xJMdE9D&60C@LjzA)-Z(!#vbP-^29XO3t*6-n6p4WMiM5RSR{1=k$o3!`f? zt2D9yxG+CHgNMc}N%z`!4V?r9cT`m@8aE~#rGI&zb@MX<4R1r(o@Nn^OT^i>fjXV{ z_hf)X(mTvVX#ttP2%bHAt%hES6_$>-=$Y|ZpPKKsvHh+9-bnEbz;%77dyP(E)iy5P z2mjL8{T|ImZR%}0s1KlA(6p^2krlcie~=eewyJx`LYYSXSMt5-POzh^d$o=QBbXBW z`PAxFskR@==mXe&hH6A>0#W17ZSD&~3Z1=oRqbR&jZc47YLOj_HgVeTbpgLA%6DC5 z!0y(gr`wo?6}&Z4_wg>Brzu_+@r; zGXv(~{^!BIBemS0&QjKtLg^`c1*z%(!BXTKl^Y&W{Kji`Ya+npEymTHOCsG!sbDz7 zPfxb2PFWH+fSij_5%7m*sj6$dh;!EsgX|{Uu#^Ftd%=$@pa7$3e*P;Jg{bWBa6qXQ z!r`qwI{XOUDY~jF!a>buy%7=Pu5w=XhW%7KNuoBiEiSIvC)b20rWtVlHb226fL;j8 zMM+tQW4>t5kQ$Qn?aW@b65!2mf`8aW9Tg=>QZWl-9*bAoGTS zhKf_dS4do(FA5&w{=4dx=)Sl!)P%5Zovl<(g`)APVe5HJc1vNpm}HO_c1K}==ZfuD zG&%D0Q@Cv&$tz4{SixzyEF{}%v4pPs3A1S2R=(S?BlnPgVL6cVY>k2pEvoeHB#(>m zgx@PDdRn|xQ3cTI+DnmGVFGITm@xjW70@2jXgoA zS5G`0Ye7qC(?AqX`cNL7=AyFTtA!u3NBTM{*GZefjkT%0-H6aZg}cd?$^cWE%VENo zW1tC&nTUDK5h%1OS`e!I{$gQvN4UhEl_a%tcFv#+UT288i64oM%>MLLr!N z?PlGoX&C~ji-cBZl{%_3E(9upew79q&{%r2#JTQ_v&{FQyynDjI8@DY;*k>gBIvj;I+^&G7znsYFW17cdx|_F9K*VRZ z@3?Fn|MG-{FdrBZxLXI#lecsplUXqn&YZ+DXe0iF6NyducQIx^8fVn+HhIA|zRr3- z-+zZ-xXXm(TFLTqr#v8jMYBe+TOT|8+R|h@^a$uA;;*XKD=Zd2;3ir`>&91$ZSbBB zkB2|qaE)7b=!yu3Zqly{247ZsSBD%iB_HLKt|XAYdZ7?e!?j`#K9WRwQ6uv%|ohtH+aj3If}| zin3wsk+-y8G_bo~>k+kbQPDwlNXA+04P>VHhMaL*t=2jsQ2|e4QK{75p!CwN;uf)w zkMLg^XmdiO^CvADuafO1~TA zgs}6H0Le&kfmPj0Rg!XNmz1coLgT>MC-bZ@9RGiv*#q>R{*Ga6+?)A^{riReMgm)^gj{7H7l;@pQw2>qh&I_V`pwx(xQs=ml>ZKW~T7ZTWrBmA-F>!vw?boZF`e29O+z0`)3 z>JAgGT0dsS>%SLm|Kyu>(QrUko`Mr5jU(|!lHpdX333hYmnx>7=PxxlqdQ>9b&D3p zn?cE!G^6=Xog8#=KzwR-1NiE*B&$>i()8BAoe7uIFQA3*>@zHRkfRRd+sz1iWEEIqO4<$J_jQMpD zd^gWh`3fJO(2gW%OfA)0@8#U4VxBUXj5Nh_pzxcj?eY5E$~j4iB6>PlAc(9*3-AtH zb{4HHnasM%peJWmt<>ddwxFOLq0mIRXLOzHO_`R_Qf#P})bA09+jZ-n04zzpEKYX~ zJ-MMgpgjKtRzv=BO1FqNdYclu;@j5{92K$Ss=TgFjqYx#BFET6AuINOB0lH$P2uey z;_!a!752^;%`m91@{8s|Utw96`bTW$DzFQ^S6j+a;5JEg>m+kGs&J3?C77$$d9 zinv%zS{>x&z(EFt0REO9ED~I%h!|tI0eiomma!!%J|5l3CU{m`Uw`I3mhfSs*=k&I zno6w$M97oi%Sgxhb5Irz6=BA#trKeZjIqtu9qFVEL38^bQko4?AU|3~ZlUG=AT*sJ zu+ISl5R)kw@x9`&xUQp8!|~%1!m8RREbrg(JQeWOv~c0Q)85Ko2uL^B)#4jMA!1Aq z=o!#pAz~CkUWHop@e%K>&JLZ>Wx+{P`SS0&i(k~qHWx-qrl`IY{FI?##)5KeBWq;w81{c4l^rQ#x^D{2_t=tkOC9(0T*$H4q-{)BV`>zG8PbW0`N(I^{%h=f z;n%}wYBklw+eap|FPO}Gcdp`j1!vDE$W&JkQ^awl2@YBS79#y{2~^7 z@Og)sh^L{1Sdui*`H0%LpqiN#@7V13nkSf~ORf-NuJ2j9RLM48_b3s0!h>&;`amfszrdca z;j!q|_PSVwC?6zjElS0fq$4pLytZe4HDtm4HSwN2aXT!Wtr8_5K=b|x?Je=^3_KRP zk@aj}>Wq#pD*|`1*A#^RXL7IO%`9)yx)E^!X`~TPhGh%ucH$SwfVbu zekFd5&tt)P_jB$s*4>08#VJSGIiI3QUb!_YV5(w*hED=_jWhT|i0!%Jva_pF5Br!Q ztT2Zz;=r!h@@MYdZKfgi^n`dRp3vpgEh%zg&)R1!CVbvJn&aH4VB6i4Vxg{8ECN_e zBIHXHXjJ7mO`2lyp*qW{5zT%lwuvU3J8`xu;D||#E+y{8w??c4+}U*fUi1U3{8oF# zYrtTeKn6g%PMD(Zq@~Zbt$uX3jEt9v9#l%82(%UoE~6XFzl#_`B*Heqi?+RPy@seJ z`B3(|u@aGFl12|@1j*&0?L!i3lrpp&q_P3%y)^s7>{~XYXv>U&aWks@0be|f|2khX zAZ*N&NXpobdQ$HhDa*sL{Aqa_5L-9E_hI%sR$$2r_@5a$bUrq&tD}92?|)L&n)Mom zP2FiQ8F%_l4!;;7_um3R@>HTW?QsJ%yW$_?2Vyo)`SRAFp^b?9)QDhGd~;C7SNOh0 z?DC9U^xGS?W09$Txj-nfbQfnf#Q_whm!I465=7hX8dQ2JX-%l&C%{BB^t?Btp~@Hn zAmPyw;%0nveM`L)>)nUb!C%3&E4N`&H(w~u?B1BLFok^figC$^t5Rw79GTdM>I9ip z1Dm#yk-YFv(}Z!C!GCeVMZ0;@@ay#af=g=(qv2dT~$37Z_5e_N{iJ zKOuTvqT72#`~Fc@MRKNazX)Sr)zfElyOFh9Al2SJ&x(YIsvVKHQ7zy_vPsRW9CpuJ z&agnBtSS%2zYZ2X15JN%7!Hy+bxq*1+JQ1idJZqzzZ0A~rkOY;tA!M60F28!lc8Ny zRy2i8>a0?dHs4&TEajbc%ab0H!xN!c2S`+2OZ45L~gykuSs^idd z@9`vdfOoh!bA4*}=oG5z5oiomW@t&{*885W!>>~j}Y`DD~5S=;ULyWt`bO37P z$7xu_RWeT-E(^uTt8$@MPc8PKC~rYWH=Zy1gz@IDTijA&W72fpL!u~li(c#rAC};N zJOeE+W}5OwMoyNmSwjt6^nojOLN-+H<=SG==1}al0im_umCBzqLLxez6wmJ8IVAw` z+e~|>v+fq@>4x*ucN35SgiQM9Y4`Fb zQTav8S2M-H-?PDq9Ul5vN9^7hP_#`<#dMKEBVku9DJj3`>Y~mD9q&C$HuAPud66nH$fD5OBfv04=bdzKbPnJXnf$ppGs`Ydm!c-gx!V4?WDsu z7qRjmpd!?+Tw^!^{xgl#AtlB~hJ|?vh;(&5 zTV04m{lmDnBz0U}smUqD?5u6Yy1Cm;xZ7LnIGbXwC^-vd_(=Q}j{D%4jwX71F5NaX zEm@X_>6hzG8azmN9pJ`kjUE=V^>T{UQny56{V`;ag+5Z(3wW%NI7=qdi)FKnfLsj; zQNRmhSp1f3=HwV8Ku$jhN`CO;DM)B5TBliO!rze!ekDOeFIATNE?+jYg_#?{RR4N+ zfO3_OJ1BpD)q9k3olVTeCB;eHb$R!1 z-IW(5cUscIsy zekX_t%7PWcUDu{NZYzOImH_+w!ymtrwb3bSHKAS~>Ba;X_qeikVxr`TRGO+yXIFhB zTX3k{qWZ1~%c-K)_WnQHFAagBE40Q$uGBN@3n11ZIKTp469-Cb%+kqWd_ek23^k_Z z(wLg-;dJY)Z%r+hdIU9tb(aHe+b4?czEZ|MK~tzf63!3RU22e4>$E0SX%dd(_IqME z#G1uhkw^S1#<~0A^aG!+u~ruynhc~@@y5q5k1HAi7aQkIUZa@8lIt5YK8i4Db)r1E zc`N8^4pHN1dnz+5NVEfyWu48s(tZb4!p$HFC0%y)6`1T-5%xkL{!hM}YBdVie26ed)J-zviOKAeb|YMP&w6Dee=X%FfqjN59lxcdRnS zVMK*S7~B39{tLhu#kOoZXt#`mu+W&+`O0CrV4ewRqX|vJkKQXpqClSr9WVK>KbaD1 zsy)YsoaRSFHs}iEirRkZuD~ zX-goAi@}d5Pl%o};ncJ>4CnJ8B)8& zCO`f}@o+mcu4{%rKF4}dXHw|(3tCAu1k|2yLA92+$>l68`FfQE_vLmwUqE0NA-nk3jS^+=yY}InjnrIiei?$6BB?zAze$X&d&^!nuuhGN#NI z7xtD7zor4~lFLCT4%!Lyz~0Z$eoen&Kj;0A6+|_u*4X>Y)%U(|@(M1h=R^%`2^5>b zBZXnJ{doVtIprxtm-N;YK{+A~_pf&w-xK>$>rpS*y63|EWJuB2c&-Xd96&8duT<&t zll^(Ky?tv+GN6e6M+dG7nJA7+IyHopGQz1<4zmh3Trzx+AxyJ)eA2)9`wpEw6yxA} zC9O;bsuyu~83!L~t%hpTl9{=-OAbFtZ-mY;7eo}SUJWOwQ@3qgk=}c!EzaAbAV`;j zJ>4}SYyrUm_JB()2%=@Vfy#H&d<|%+`O0lZ$xs?b4)Z|af)k(VPXIndH3voN(Gssb zIO$%yLL{V#`*Q6e&N(nz@r_`3`HJoIsCS5>KLp_j+ee%Y+h8}BhT4K9 z6a+H`sHv4m3BDb4=}R7xKdGj$$)-ULHRB%?8|xEv2tKV~_nIIIy6Yj_23S=N*KsTk zZT30~tE&C`TV}KSbkb@nhN%@@LJF7POHvRr3ppk9fDfv6qfKKO5y6Y4HhG>?&qGK= z`h4f0-Pf~KMMs5_-DzG&LY}8O>&!%7OEU9g(}k0=^zb(t~fUhn4aud+ZJfcgh(WQoZ7f8HUkKD4EV{_WyS zBXwdbb^BO|O_Z)62ysou+#HxzS&S`T!gb*p3ZOWx(7;&?ANy~pFf|wus&O9OK1=Nu-u?@!mM@bjJvn#5rcz4 zw-nK$&FMS9f}g~Ap+=^D1^TZx!al9@X}?>;S%x+IGnBgboAKDXt!pVVwndw_SHQZ) zv*2p1%FrFLgHWR?ZZuzmkGAVq_Cm)lWa(u$)UiLd`gmxE0IP60xHu8gw{~- zAxOnc&Scblesv=x0dl%=b+FKy=uqpKYBKNZOju+Y_)-aCz|2eHq*xZe+ru96$RHwt z-i`>qkWh(v;mR6WIPwcZ?YU90Hojc-^8fRW<96^{Hr63PStk}nd>Vznlz=Zk+5_LT zihas8XA~*HfHQpQ27z=|95^$*mTcRcf zTjKlF@G6d9xpT?u9n4Ssz!d029TgIDI=ft#>qPI{mg3qR{p_*mVT@FQY<OBLV zTFKj*_)w5QQGQ;pd~0c`oFd=6@pm|9a6UA72b!-U=5EWFLYp=`1V!aeKsGH~m`6Vo zxiPXFBCF+vq5P3MbY2RFgsIhz*qbiGvB5cKG0sD}-^TypuQ9Ic=9N&!W>o~FgOP1r zd+Ddby`dK1E?77!ryg+{LRn6V&E2UAFrUrd%rC?h!lwVZKc5H0x*~{MXP)5`ocrJE zZO6JYfUrTZ$rG_86LTiaYdec*(2ze47mQx)S^k%#97feZ5D06>a;zfTd7$_rc=y2D zS?}>AL$zcB&X)6{;S{66|9Xmws$9>C&Bd%YR&ywYAm;A(Gg#iZxZ;8AHH1(>`VVvQ zWn9NW4V;Ksec2n_tEAnUH&CR~Iqv8{faSU&`-wuv){(&rDGcII!O$hBP>T0B(bX$D zTJc$MZsEB&pr6u;$LjYTlEZ0syj(q0!nZ@S?z5&F>K>y96+X3Af$iJdT5!n}pQVn+ zQH)}2pt=0|d)Fo0k9>t;N$~%Q-q4MP77(2rgee0Vyr=B#`C=@Q<+=<|)|Q7VqQ5`s zBC}!`&1^3^A%Q8fuGOh@CO8C0M@n7*IHKIfwnxE%$`=qhBk-8dOGVLTi#a9(>|%m zcRgu{G-J0S=O9f;!l1;z(in+ z;biM`&wfTQJp?pU$NrAUi;hI5Pi6j(S^beuhMkZ>C=xbVrrV5>P&`Ew45tz_n_soX ztCm!nOl+-Yg3y9{%I_m6O?G$$%$e5w8XR7n;p!!|TcFlUk$g(Z50}R_ONu^}TW)9w zQ}+_(&V<}Z&1oVwr!uC7Vi>(9D@(asY<#&U{rHzje|Wq^a~EA92E0l1+o>)(^_`vJ zK1ekz?~mcXHpGL)>PjK9Vy-Mk9`i#45Y1zDb;YC46}o>xA=Qxp|61j$Ds7tIJYpRH zX|6~!n0~6e0UmiT>c8|Ghdz*0yeJ2sero@GH@yw>eE5H2ZYWKn#vg^78vC4rv|+Wa zoUXb5qN~C}SpW9Gln0)~x4NsLQ_y5i3^qL63pVwTy3b8hlI}oOrhjOBmi`{-w;8jG zJG86DR8X0;Hr@V%gEjfGbWP`Ym7YvT+5_d;Rt(iJnVI)t-dQxC_PP$(t&M~>v~&w> z;$Vc7vsG@Rv;z0WpsI;wSA;wL3CPTN^>=0;#VToz1juC z&fz4T%H1E!22A`js`HjYE$S?X!C65&r51%?(${ZY1=v6-Tu$r~Y7UF()u*aUc`r(N z$8oQEps#HIebK}mZ{ZL$%M#9`V7m|c?GT|g{Ipk{r<$S!#wLe+Z^g{_>kD~uB@+)W zATX$;t70t{`k?K%J2o5Ut+xO_K)}DN@|_x3YP8otKC|rM3AU@t#q(sa3p*XC=YGcY z?+h#%yl`8P7Zfhl$#oYx0S*|&tx9-i7x)(t)o?*Khz_3?@6F&eLHT0P z7IggA7viFR9z<9PWajWmhGd@O{+Yaew z=sstf!sRnA(XYo1ajt#T&ZmyZn*&aN36N05wD52`_+8?hPCrGMF4qvTPw8!`e5DiL z&oE*5{|WS0k0TUxT_-6j(=z+5`$m+rK&(AcubG#I4^_bkR_IALaxJ62;BIJ5>?deF zWD9}O1taK<9Y}gqGo@d+dw}|&^Sg7frvUtETNlaloXflgB=}Cy&N0tV{!1LSZeWD&Q3?}i z>N@81V#C4YBo2}|-ERdXmUP|MNFmH86ZT=A!rQp^$3|_Bp_`dX%4+Rej0J=?d<+X_ zvK7_}duLPJVI5CjZpEU|PnjbAp2TO3cJAs7R+@dVnyz$<7i>b4u)Cor70kW)#7?ry zCWH?Qi5D*+O0t@&u^pcGhygvW6MhUXjOdaIt&gu0o2&kc2~Gk(Op?m0ZZD7H`VZ?C5h_-u>_#yD<_*F+~>LJH* zJe6C9sDl|t%D8t(NI!8xB{{J<@kmSLr;P+SXO7(YB!)(B#2KfE01Mf`Tk+L zkT&L=&DrS9!Jzr@TyC{}=TM{$?{@W@NkqmB`T_#4!bZ&t&%(QuLg`gC^J6cO3ehC+yB#f(Sey{<=9$! zB~whhQqh2D!a#2hMV(Pc(yUTa+Sa2Np{UKb*NMC1KLP~S1S&_yFsHcwR*Lz7AUjVF z8yMTV1XzVwt8EP=U~_OF6PBw zeCl^Ebs!(U9#6-UEH+h^cXGg{lt~^w5mbkQawrl~EkT28*t^M+u$BunjnX==wzibM z4-ngNJ5Q;m2BO))7)q4$*GX+07(Y#&jqy7CR)e94h)>V=t_)^Y+YBKJwLp8E8SXFh z1gdcNR$We@T=P>+=aG}wSRlRuK>*7z9;H#@l?AF{rO(+l9x=PXWnqMQ8+-BAeBQFj zxRZmDOV#Zr@!DYTi1v?JjYhs?&T(NmNe{YX`PRY7jrK%@lfagnIJBWLPK_14L{{M5 zKhLNK%8mp{tI~N`2a+~SJDV&$oLGTSbL2K~A)qO1K|@P$96n=*K8?_ZFBNh1D>bTi zgGu_SU6@~ia!OoSdiCgLi0zFrJIwR8E+!9!1-l$&-kIfGR+4+UZeMdszyNNWsS%wk zR-wIt*-Uk)^G7G$!`@I{dt_PfSNqjBAHYJ>>rHqOXMRvviJ^@xxgPkcct-iu3+E0N zo4mV~FxPjFls8lf2$D$?`Znt*8zOmm@pQG>?rcctYV<0ZoW)@pDcLqD+lM5%ZD=0< z$G{$#E$Gs#Btd(~{Qa{->d#1 z;K*eN9?q>TPPG$%;R`0>*<=*tDdM>)Y0;n4ekzDW8_buT zkDz~IsYk}FG}+J<_)+2`=8>z(y;W-cNCBUngDqOngZ%B9G_0mT-$a}1b`QOR0qrN< zJm8Pgei?3-&sv0Se+>;|kg+fAXb!<;>mka%b~qelT~2vcsx-DL5v0$rx`XScdB(*+ zG>de~p@Ju+lKj{^ccE%Hu}DuAB=~POv(rH9-nfz41eiNbI2Y-*oA>s4m!*0esi!*J z6#um(K*=#Y4R(a|BsZJ1Z#QwqSA(QRHSbhSsH32f6{8%Ti4_AqNIC$s%R~_8*MM9eOEG9VjC8Y4};XLG(DVyHO9UfkWIzdrweBUXLY;gF zYJC&7YEq>J$+;=IreZS!6~cxB{_JO`7K}CQtx3#gE5rZ*%uHU7<;|vf1)w2=^y=*3 zN|$obi?U^NN6x7*YSY~Xb4+K#U^!O8JEKccGf9hz-Hl5eTl}KvjxBC5Am-SOD5J+3 zG~m4CN z&9lIK?kKPw(SIjK#}`eHl&6Kf8fV-(3PoZExbDjC&7e2F5aiXLOgA@lgBc9cC(4kO zO)2TcY%T$$n-QJTKY%Q2L>L_;0^Bs&16(bHTw&$GE6x5l0{GCbe8h_X1oMFSVNxXC z;y81y;SpDKJOJXVGl7YpE_iANp&WNni!v#iM*SUUs@i6tIS6Q}Y1qzOuqu$b(s5QI zS`Zsb{CHw!lhU|WII=bEA0(#dC_2TSh7t)?w4LGYmdd?{&8c9I$pi;0MIpS-bTMHK zIz{_Oir#?xAb3l|to;?nqM}0w37&q+QaKlQk$)<4nR@3&erK0R4&beu^OZ)+XsE~p zqks^~1y?PLv{5r}>?*4GW)6eX^=rDVx44Lg!raT(cIv%Gsb2+W8)4L zs*!2;OwRGtOISj`-bSIp$&xd%bL{ijLLkGl=V2-UNeMA+PC$y9gICJB6Mrsnm!5cQ zFopVJ%&m3ln}0|`k6(4B7mDbFQ6Ktx>pgzlgx?|}uaRB<|3&_8uW%^GEu~tSh0Skt z5e8NPd}cP%K7;323xEh-^H3ToO*w3I?BW#?^5X~ZcLb#4cdkH{F{_H5QdQ91!XfA@ z*LkT$micFb)Q?&TLXE0myeZ#CYBvX1B>b_7{Y+z#hoMke_;x~%=s87~HtX%2;YU04 z^JhYESHE5Lh<7W>d^{m0V`2b)SS0#+&vfGkPe&~?0r6Y@vYmO~;{T1NNoB(HDF7i@ z>b10yuAGgRCF@14H+i%k2fG7#wiS=BOg^#*u~?v1@e!fQ0-dnxhni2TdQT3-B6+L8 z;PZA3vfpA*>?T{4$y zROJ`|MtY+!kBF3v^ve;g_ zu`6O3`SZgw+RcjCt)JfR=l`L6{WGN?JV{kjzdQen5zi;wZT^(4f?y?sT0@)T#!!xZ zS|A*cWjdWQGu;8g+y(Uo5H$n04HnVSKErAV^saOD?`s@*#`{fnrTTODh z(@h_UrZ@P zMCKV_;1-mA81f2@p=eD56^>MoP*?18o7sT@gs%U>CU(PFkFM=^MALtG_M_|kQ@OYO zIb+=>P05Rjw=$m8nA@^B0oBzx%-D=lOoiWy{~jHf*I^(#N|^jb>4war0qS| zl#zAUTeM7pyu1)|Fb*{krkDG<(gx8}5hecPJJx$P;Cl$4a%kIH_=pxe_p4Lt%D9)%mY*eAhdPUfPy~VU@G&zV9BR(MTH?X|>Jxdg19y z;WY6(j3TTgo{{eNu3!?4#WTYNAwT2a*Q|edzGI83#cYEDeKH%@F-U6mBI9}CCxKZi z4SN!u=Dn9^p8Gpbi=un+ZH7s1`Oq16&tytdCGVU9_iT!ryYb6|mts!W98{|4!=>Lhjog=%tiw39moWjV38`r$>#VG&&q zaCrb{kT#j=?r^=@p)-b5&#%yF+^FRnryI^Jkc%=bmkw){+rhRR82i6H^F_6Cu;yF; zIUtcgV6a7|-SIJ~DW&32QGDEx<(Q)~>M`~OA=iHD*(@*xLaW^z-w&fxUlC_3ze`ZN zM_j7W1E!9J>x*(DNI{qZO`rN=o0O_Z!@VciURHl8SY?I6IT&-doAzub6#Rkzn9&#` z7O;0>8nEmNzbUwa0!atEMT%wi(2zrKTn>eVn&tsymFbwIp-?B<0?ebZ(J7u9q4`f| zdCDC!edRfSX_A5*@dJ2Wz005HKgAFE02j?I9Hh&6^F`V=0Ducv*Y+YFoWTDi4eK_^ zwm$%v*e@zlQ36CYQTzvJBsWO?kQH&CZ@z-Fm(7F$zQ6>!le}8eagC=@gUW1eCGauX z4M5;m2)O^>uxD1zN0tq|UjmTd`>M=rV@XZJK}%;dm6E=fDVx)P$S0aCviiI=?)Gh(Ieye;*2 z?T~is+#?$dMfynm-)nm%5sOkhd1H(3`wz~e!+(-=I$1xmSid4AHk=PaAbQ(sd%S=j zw|4(axluhL9#-YC{0DUkR9q-&EvJ740I4bs}aW<&PP}^yOtpLX2Q6q1nk&_v_q>ffNvin4J>uUjpT?Op=oD)q7(EHin8(wdK8YV z##?*&1LngkA#iQxC9mub(V4=xlJr=f(1<;flsYqx>91}6A46EdcEB+A28VcSJ~SO$ zH6K>CZV2Z?x4SZq@dyxI$#&TDg&d&hs;e?od0(qFW7-(r4k63o#LCG(yB=ua@V5K6 z9QqS?$d?eoKUtLdd$|49h1?B*=AJTadA`P&v~E`6ewBypoSS3=-^ZU$ifT~m?_xFu zufG%LYmwi;3Is@AOM{SqB?1)Em3d9#cRP%!4X$rg6T#ZIERmY!50I=HcDfYi z34Zx2N{}galt(xd{UfbM+6c6Z2v8gT4Y>uj&rnh;x4kR;>)}3%ynyJO3+IPr8dd z`TLB^MR^>B5ORQXS=$tW?S3W?qAOBKn<@7Pk&nvU&yjCz-F#}1n18OWY|%U>l0$UQ z8O}+^Fd}=M^a1@G5~9VEK06)digXg9y4&5U8ra7nNE`57aUJi}M~`6mX=^6{RuX!v zmojfUns<~-7ygyzbVM+8YfxZblFEmGX`xav2m3a?3!nNPPIG2NB$ZepZLjk62U6b?m$coVUH1g~!JP2Sx~8 zxEC*k3ewdNZ5Ar0Vh5>qOim;=6byxRgy%d9jgkh&$)1W6I53?0;x(#I_WT6U(FiW+ z*gP=+2#j}pQ3?o5l)Np_%NCC?)D+MTvkDr5jO%?1>lBXDQA_H$Dm)oBIqoi9ECejD zDb#6d9{H+F`SItd*A_M9`(i&tJYsRbuz>&wo$(~9XRtm^UQHdK*-iENkrbq%S?QPy zr-(^6q2jcBFy-L!n&YWT+wkry}_0Tnacbe zRWy4Y?k8!*w>z2!QTzz+T|}6e5}8+5AFJU!Oz9oWlSNU5G4WCjcp1gh@qb-rK1OMg zWjDYy;F|FdSp$OM8!y5?x(@jRWpE}E#?5twpMuwl?`m;aiY*y9F&(+M*v%La?M}mf zIN}~R#O_{MAzT#*q!pHW8;VL_TW5Rd_`Hxa8tw=f(#SM63y^@%C~p)g>wKKoO&A7w zDX*__0}=9olLGD{mj7*)Lx*R4abQl;itu#{|LX=U?~qaYkUVYbDBEqvJqV?zQOEOG z!V;ptfyP+TJ@zpW?`8SUGi2h*WX$dCgAlYDf`OT`-?$)gs9tF@Im5OjnZz_v7QPR& zhlWrYLtuHy@PSnrr#>^vK#6!vro0Iop(ApM5i^%1L@qW@_vr|B4 z&j;xh-m+?D7o@`wI@hkH`8O_@_R&*G1p0cvd)Osxl#=fv2=|_RFL5=8v|0-?F;?hPTNchjI)UL0dFKILlVX?|437g**Z@Qdx;u?W--g9G-G-KXr zFUp&e9o_QZrqu0(h)vmCcJe0_?uu@l+k0!}lAY)R#9@hD(4m36mItMG%hndF`1H3Y zg;9-^1~kLhgWjc=fL(k5m?|GHU6s32PJ;9wTAeI3!O4PcX?zC^s(_Y^Q)_t5YDN9C z7&6y!{$msllEO~)#AV~vEkH2tMSV*Jecvm?Ti9=qx>rpbOXJ|1SMn{uiia^2%ny;U zEEhlsZW>{r2%d1=g|9#(f;rU1#O;u=lAWdRy5eJ5_5(|8sidWf<;L#Ur zrwHy6smJXz1y3c5y-zLUMMt~RZwMrWx4@CPE&vwVX-0cq7DKOvj@9ojf8?v!~@ZMcW@m7beghI9U0^61TfKz!fCM zTyYAl*XS7-X0X{|%6ita&jgrQ-DEvZFm<`$@B#AmBoyS^u5M&TtmrARx85d6(; zLlC_)aM0WzVbvo@nWWICjCd)b-{l5Hv3Y=sE21!Yi5Dh$T}MB2`AHE^Wv)1$#PJGP z?bsj=_ulOMd|uf#K+#*v1&L7~aUbD}m}_)3O<6(wSjR?aJ;4GdAmR(Wf`+7oOB6;n*gjX3QE_3mMYF(bT(s(OH3Ba1 zI9Xv3r7(m$yR9=3I6MYL{REGl2=_lrFTY z{~*f9Un`juO$rnXRaM0q33nz)7f{pJx=|+tQ^GK7;qEUwiq@8Oq06gbbapF@Cn6uT_)4>V zK#_rld}5r{IGnH(t;dncN+E;%h$D^rs0m|XiV>xyN&|t=qN=J7(T%gg@IE!oS4g)n z3I|K%c7gsMx}l)sC}-KN$=DnFqOJ+Lg=|QW zC!lMI9HcJ45ho)}P*~e`;cKESf!=}NV;pf`!;(IQ7sJ;A*>jU1!+t{XVrzrM*Q?*_ zVUe6K8uD(;QRe;o*Si64_xzLa!U-CG1D4U5VaVCpWr(bOxFdsy3=B|CXsr8dJ43MI zD$+|G9&n38K1#U!bFlF6e~hZB>R)krrsPA-bW-~?tNBDUq$KFr7+5;(a4zK5zVL^L z!=}4Esx7xeRD)Dp3+LO&`{baxnKAc!shX?~c35%`Es%~c&cy{}y#?jcM$L;!3iFXm z(dt(r9J0xUug??R&=Ze1kUiF|B<*#PequGAYP*cc&Ek_9OF>=8R}*V7i}BdV(!!31 z{{x;ovztjqzsWUd5rs^L5dY8%IUthU(Nc+AMrX&)3f}+fbwAhH3oEz<;i#vjE8FQ+ zzfNGbvpEQ@7^$pk70YlH*XKeq8Dh5@lEdpGmEZY<=^9vC=_aTHx6TnJ zss^Z%QdJ-7JKu06pgxb*-y2_|cyGg}wBHBH_veBUsL!<_^C?jbujG4Cn5>wMU~4|$ z${u}ByYEYHL}D1HcxyR#@}?H(EpcA=?aSoY=F4Q@=UdH98HBhO@9<;YRxkaB4;;tq zq5~h>a@fRc%H!eXt{c81nPp?mC0p2tdogg#ZQ0NK)?A&xIeF9>XbL&r+I7$ky ztpzSe&0O-_&W5lN;PAv?y#m9p9>tDXzkv1=-auR~<0F)_wJ87deETBM7SJPoPSc&U`28PRz>`yZ~-KvR0IqTLP_$G70yZnY&W6X#*dl}1 zGS(Ia)#PwJ6`ScHu9S8P{A7QVWc)eQZA-z^ME14{NbyVYdnF>n`U@<9AfC>*)*5r1s4!!5u`7*~w4M^Z;< zFZ@-U1m%`kdN;;iBl+X!mgp^@Y;trxlc^kTZYex(?S##EUut#8a2iYBrhvx2WJAak z8wj1nrE&!vN#3~k0x#3o30XBxlw8SJ5`l19xGM^W^epYBpR^Yc9a0)@4WD49AwR*` zt1E5L6N?3T)i7+>2Y_bVK9#K4O!Y(}L>}nueQ`SXlRE^WJ7|Z1`Ha z%v4eV#H|wS7(j%dOhA-Nu)Zg7tpd#7e_MD7dCU25jrW?8qeO^`50?}!F{RRGte;0IDr;4Hvxtb&hm<#Ns0*9s zlCdWykndlm81Dvvi%WbOFmnG4#m5v(!Itz2sT1QsLtB#mj#zE-$C$|`Wi_PdwyM|+y~kiY72>A8 zZms|3fr0BdziQ1-KNJDjfe~6HX<9z-g%~_^^;TzP29xvkVzJ#HY$G@XzNYlWtI?Mm z2>61#ETS0+?gMn-6GkZ{jD2H?#UvRAeCO6ITAnp*uU|_&^{511fDCSh&_>UrJFC8Q74DxRet`R5$F-Totwl&_hbOK6$Te@Hsrg|5)mCtCou-;TD^KTohNskQ)MZv z*$tHt2)K>J$&hv@ly+GJ;1?8KpNb3C9y<|hEEhB=;GcWBF;^H9#$O>IAL!NNE2Wgg+#PP6fDNw;a6MZUA%4b6ZEiPONC! z2}6mnY~x!h0f&flT2mr^Adljzt1?1kZ z9Cv)nIA?JM8Ja&PSPBmTJ$*C0154KG*_ywEo zqStkFIAL+LY`#+ZT01i{*y_axQ%o<|QO~4otY3(hqoK*vOi_~gVR9i>iW-XT@kdDE z$g?Lei0>2SA+phzL`8QZuM7C>Ap=CnW;z3f(W#sqr)RUi*7}_m`Ubj7mX?x~-<^D4 zP?HHIxXJl^u@qqh%%k$2_Oiu6S5e1!#XmtIrVty#U5mlcpq~J3qjRyC29AT9(3u4O z?su7EGAoQkPj{8<-#qpb7VC#LZf4Y$Gu{5?v}DOpG3-qU6B{XgP;*^t@&56NKHNj-1h4x&|Keko5@eY>^E zZH;yRLn#5W4=0RLdNx1Gaq0&FFLF5n%;4`X;a^ zc9aGGW7ISL;$&H`;jmDq@Fd^dAMl{H%`5X|_>FJ$Np4pJW^uOqEE#j(ei&61mTN$g zL}PcUUx*V`XymAQB_^vO>Q2j8%8;os1H{C>%`AabDeHBv}dMzOx)-;G3F1lJzvEh_gJ--oB(mhy}F zA}I<*ZJRC>+=E)5n8Yal5dC+Qdaq{GC^)xhJ-geWJ-xVxuCqx28saB9zwTl~n4dEO zrSPsWV6$6)m|<7|cA1%G#lw^{BJFms2RA^8w)Wh*?>zt{25@PxdlG>FQ7`~Z;nJb0 z5HtS08pAOg?e?kHH|LqoXIWyS|N2Sh%%t@NNnS^ZWT#jn;6w{HDG6QdDgC=#xfvt5 zhVcnxyoUsXzoAmqpWytIFD`y)%v78Q`^0)vyG7_L|D-35f@#l&)-b@U1?N)%nQaSl zodd)>q35ebU6;%+9Z8C(r2BXG$MUDy#h%mJ>Wuz~#MBafDk7Oet3F&sg;$D85jl>b z=vX}}gg&tD2O;aV#(}xK9Jz16q)ml-K9^R)sWn^a)_%1Pg2(LP*zDH!?_J(B>;>@p zxf}=p<_Wr`JWDP|!e;xR&&SP30=TJ7(@z2GmitEZr`g3j_y8V)p_{UD4g=Z=KpGLA z35Y=d`0ip+Bf`w+6uf*851RkiETca3m2(MXPMOM+vY{U_f;qjjp5BRQ;7u6u~rqqBirjZE%k9A~}x4x~LRc5z4eSnY5I+;1FH?aTlS??91xTr&MW z(VYn7W3A*_d0}lfZuk{ha#arD%}rX6Ns{s`vO7xRSN|-heBTK~E_{LVg4fT(Pfo1F zc=V&mxOuIuL|Zn&7;0godVS5Z6FC*$U3@QH_p;lrplf7QhhoGLKkCq$3S!sX3b?%5fb za09s4`~tGr-~>>hS7vP&aa^71J|5*)*HT&Vfatkwuz!<+nFZo$8SQO>W~=$VR4Vl5 zLJYqBjJiF7m6SNb@+c!^_m0euI2yIF&27fXc)HxV{-0;#96lIpJ1b5)K;Hi9x);L4 z8xMKU-5w%(=w+U6yYafsmWJG#YBUMSt*O+aI=)KzD%}!5&kHGrMV|_ZCGPcR82;KJ zi)>;;#v2vD)#6%^8pXHR3cvyEDXkYDiCUTq*ID}b z{mvb&EdCl!4N7iLea5ue)gYTS02xF`920{Pvuxd!a=p-01ot8jnW#?tyPqIotJX~X z#J&_I3g(ju_4U8NGxYMU0xl0=f7sP3lFVFJ3rq3(N_1T1KM>^M3?0zl?iOLjXNfrB zNnvW@^{n%A^j@v)!@%W=VqjdgX(cn#d!7y~+}`&Drv)U40^G+k1XJ)DezWlFk{pK6 z?)7{}ix`nDC|S+4v^fg7yE*^n(3fc|Odt_E58wO$vYR@wT)%4BJBX-D9Tl@53i1ZS6<$;nn4U;vY>iBa|qWRYIEG677yAR@% z|Mb4F3WOTFSh}CKt23iU4`ma&Y)zD<;U}M@=q$;Vigm{#qCF4@B>g-f6Vak?OT`?$ zC&*=99Xr8xPbStE{ZD@pDHEqY6TMDLA9&P2ztmUF(|sM#k%nc_{gjb3BDs|V8VZM5 z%M&z0y=3T)#!)Bqz(EZFT&uR>LL2K&+~1N0E)1Oq{SA^5fW=t6dTWqxIh+`|d-L0J z|01R6B3C}Z8C-S0R#s?^B4Z;=J-@7#pW18WIhy-I=*v{L{Qe7;QP$O9oahNrra$%p$I_w<<-xF~GC=R@ey zC0{B1*MBDC7jYdHcpZC}KMJd{&}t<}do(N1grF4rg%1Tt( z8B=FKm8CIREPn%$#txry87d**VUTNt!hpmc?C@WpeS{G$!-hDsqR$?1pJDP9trO>1 z9u;mdnddrC!|kll6dU!|%ZMx*^b7Oxdb{_cUt7kZgP>7${49ouo-vWcSUqsknFPgT zHUCqbj1jTd{v?azC9fq&9OG-|E{n-$EW7nc+CLT`^ec zHT<-)oVt*YgN$}c`DDX;gD1mY_oLq*bx4_3 zI@N;_$raB3*A25Ya@-42s+z>e64$(JV3euAC1zZVx*IOHiMW>s?Y625*H6zNmst?URaNZVxN^;Vy*O5%g+kW^Nr@TQe2iyz$NBPyS~! zq6t00iwka#$*5s5Le#G7dR?ER6i8#*wV!rMe_sv-^p3{6zsE#m&q~9%4#sDc9SGy- zNF+Iv+WYAk@m~-?8C7J;d@FUs`B|E?Caxvz5yFmp(Vk$R*8i;{k6}3aSZ5rG-YaxJ z!XeP}hQIKb0=V-j2T?&*IcqUZopgY(Y1xIt5J=_o88Q56f8q7^k5f<9mu#Jxwi1@QkAeSkZL>C zz`XMMHFPSnptY@Ds|v9vjt~UV_COdIdJMuTo5FLf>GI$Db)r`T4-C2`1E-bp%NL4< zuNcBu`~xuZNXO+zuKILDsZ-`K@qpQ-&!voL@`ME3Ln7bxBNXu%-Eb}Bw2y~t$O6L; zZ%Q(Cv%~BW2zxY(=s^zG@oqgMU0W{#4_U$W_BiGm!F;KBl#!YN6GbKNTy%+)OMMFZ zk05!VQ+2ta-qel%$(fy&^nhfG#4Z)H^oSx+>Yc96nQ94;f|=hyW%QE)0w8x*WORR9 zDQ9>>M}>mB#m6{4k)(LJTwQDfOqQs(CDO@RP0Ovt$lpi@f#m~H;6(9tCL#<*_B5`S zF+qxpz%#xc{gjmnq$P#9!O?N>_FHFw7LJFzNh@u~M5JaMVgAHe-sS^kz8k_*S!ngr zsr)=>JI>|-#=Qo(pz-qA9=UZ30_ZF*ASoXUP_Y0ZixtJIw%%t#TfFep>`IW(9ZCuR zT2&TFNnS_Dqeq)9w!EIpzJu|?b+}J^e6vQ(2Zv8;m!j5JT2Q{t86k@QP4YoAOdP z&Re0c9`6w6A_q5L+8_@)l!B@9CFx5e4&Y3)wqAu{!^YHM=M@iNy3D5ZqH}F5Ggf!; zX&KOz^w%ffe6+pjqz||mZB#6QVeZs$IfX$ZDb2@%qJjIq92`-CYG#@&7>sM^!zoCM zzshpls~H`LBN`dP%fupPjAE!snk?aF=`Php$y5s0tNWM1TJ)M0TZnAhVK>8f7$@I6 zT=nd37?c@4QTe%S`G)_QL(QuzK6aQ~SLMh*e=w4sBkGnMm(14Q{24ct7X9L~;!bvn ze1rKgWR}BdNUPSvH@tW;#VL;_NNT{>u@{~aJx${lC0msXi6whR>|IsG!iNBDj3z(S z#=AF{+0CoY?IX-XPhP)eFLsHh7@Idl(I8Uz73z1a40oh`8JS=+p%v`qq|(5V^IvrcL8;0yr6WR2VC>j3fO*2yMw4J$wb z?NP&XiZxb8=&t*~ zU1V_}9IAaTeFrH;5ezeOS+Sm~c@A_FnFQx}evT$y-OwlLwK?!zq1-qOpyW(Q`-xtR+C zia(`Bqg&M4kFLKWy^}lHH*69hnPji5*!oCyc7EpE-sEe-Sq!Tz74{oJAGB)x0!@G} zL9UEs+K;|My?bKoFL5ftP8XCq-h};6U?h9Clk2p(RT&W8RR_XCF@Rf*WJwwzrO!fN zz`7LVQp&;hc`B717?dO|M^L0c0bb%T;XhSOS?1Dj{(sSHqb*@*w&>OHKHR`E0g{fStw71x*$Pjj z4yOAX>PpU|j7#8)Mfux1R9brEPwMVIudkmn%*}K+qfi_f9>nrpejyVoLscswagJisYDv>qh*#GIqgJ?TujJa&V(Sc;b^ zd0Yz2_5B^?&93!S-}ca4Hi~e<+IO$k&B~Se>ozBdI_nJDv{b)7S@}Qp+Jn4UG0OyL z{O5LXf}nc-6%pp<{L28?X@zaXN+@Zsk|7~4WwZ??(f*?~3uK|zMVbpaf@Bce@FUdl zaGUd&ll&e04aso~=f4FTlEhEhec+jL8L6|ou3bYMk#9_vxI-1~A!eg@4~Dk>%*Q+3 z#b_)*Fs#JTk5Nb|eLRwcagyL1UaEF7Xjodk90ubt7Z*sYCIm&(8MfA>nU13?uy~{F z<3(h0?FcKC3O^;Rt0|0*Lx_H#cB-aYh?b$LzoW!WLo**PqjdiYc=I=0iFI8}gO8;6 zVQn3pR*i;|1D^KDp=HE+w_iBhl#tKhY4^86z&YzJg&By@XDLL>H8E_TO zh4D}<>Lxm_q)9fIyYjRD#2}!ICrgV3nG*u+&azog=ebT*OhM||Mbyp41)~U zJBDwBGRqHQLQy>4flsuEM~)>WM(UF(5L(LAJG}20_Ydd7>b-bZEES zTV>+k;nP$+#lVgpC6Qe3q!z{Thpum?)SZA>8)g#0{SBbvH3(eN36GD}AJN}g68_B;UPQ*wX|07sASFN6s!NJ+Vnxg5> zO7J+1%1Ofjn~h{Ud?XP+hngPP6dz{vGIG710~DupZ}uix7@;3j%RXk(m>wQGieol(qm_V=1<;Vt}{lgaT87 z_WQY=IvK=En7|cW*6{g=sW0q{Op%^A6|BVMGolm5s?4Dr85Ie$4GYzj6i!_8YQz{= zdTNfdIDX=yhxG{@Jw)Ewlj#CVbAqm`F$vc<%_q)f3qy9|?d>v)Dy>bwI7w}ylwb$a zinreBcrb8*hZP*jJDmMNK zldarG1vLX%MjhbT2Qz6F_q7_dvex^23 zFMiz`l;A4HRb|Pq5z6!+hatL?uJ(nHh_E0 z$*&5S#Twas2~>2I@+?Yxo2t>06d$dyvi0n+{X zv4AWE%`E>UPvk*N&d_8T1u}sTeo{+BKlU+8>OXxcvQ6RVed`ab20)iHv?nemQ_ZAT z?<*v02Uas`$0{t|p%{fb;#^z^9brWW`p8skLzQ0BhVikdKE3j`+qT+s$!C(OOAxT2EZ6>Ye{z>E|2Y)a%mgxiN~LTW3&fIoT|3!z5)EXhKI#if@VxwPje-#4;U`mTy{kpAR?-%tE8A=IDlMlGSoZ7*8c9^; zOQyN?J@)Hq^7HRhoAPwIXw5SGZyVr!Z5E*5sBA)c9845F6V#l?7wR{i4JXf?tQ=K| z&IVd8gxr}A6M0!S@VeCFACg@Ru}K&C_~~+24A=?3yx}UPJa!Pk_-ta7_hWQ~o+x;Q zxs4`cCE}ljs3d)gF%kf=Mbr-fLqgw$v~p3 zZjBayUB0$p*x9G_w*M3HWbzJXw_H-@GCHe`zproNFhpe6KpruAR$I;$ae8VMkiW~P2d>2A9kW5e z4AYtmwQ1oQs9S3|*l#qVAkCnO0bvueSyc#FDbGxO)X&5Llk6D-A}(AdbCS)h5*5ld z12QxSF-hy!wkeJ}O@ruULV}mu%0dyb@~54(aNI_Gt=w}W67K*xK*qmVlxudQIv2xs zE^)=rlq<0W98PDL=q~wLPIe?m)@fO3!BR_h+IvHiOB|yPq@0hmsUT8UtW7AGk3mDv z1CceBGT&OlocFj0bat^h z73qbd;#Qew(mmj2C?2Evc!l>Dnl82Hz+)%q*4G-I%RH*@-fv|UUJMk_gxDN6*&`gX zb#iur!-&DGW+rBLF(1)OI0DseO-G~Sgx?Be=m!Q^rKuUat|pppMNYPH>~z#gzJD~IvxIVZT(HjAQEUd^hAe-EXEaS4W!>` zp;xEPTIPS!K%#?&Bg*F`xWGXoA7v-B2Z38KVw z0kpZZecMjOz(#Ub1l!h~WZ;xBYjY##^72O zW*FG-Qf9Bd)6FHZJw08;+Lb7+f^X5`k42}~%s{x;!rD@Q3_?!^wwrLfBo9*ct-+G{ z-fJj8yFLAk>Y15^{|W>(EqmkYucIZCc+j0q<0v57h4&s`Hb0xUxZAK?cJ^5(A@Yya zLyQNQ&{5P~+Pj31o89+nmPp{O@TtWw;CPQ(>eXRbJ*)I?(0pt(Lgr)1x=P#b*@4_Q znWEm5Ad@Z!VlY9&HBkS&P8M#O`FntA>ESE&^5_jpDLeRaBZTml_iUSKaI7K^=+`T* zB6n@uLhhmo-i{~*3cbG6Wdi=)UJkmXgNv;G5oKf z$hmzvcal2T*0-u>zQxQFC`wRQm_7E!&gUc$pNCqm2F(f5wjG#(5C(>pk9K%OhQ9BK zX89c>Zq_WhdPBp@g;CnuShByu%(l$4o)#1#w6hq@uAF5Z#f$u3a&woC{ZL0Hz<|%X zZm1V>ixWFO#)4KbSQ%JW{`1LSTyqO*RbHJUqz&4*0^L8_9I@{TvWVKSY3Skxz+#9a zwTO1lonh1CQEhN|z`mC5fmH{Ao7N{LX9^dnyRXMkNd(xfiASNQT{)(mrqHwmo>yow z`@*_piDEjUrUo6UQ(U1={_knsV|BSz3!#!Gz%s44giJr?8A@TYKxpXxb~T2yybM** z(W$Vi_Bj-_5tvHr!rpf$zTj^$tt{Q1pVa?!27Y?aw*CD(eh zrZPA}!vCn+8?~#&WDJ@bfIFs$DZRrykYpfwd^U@w^M%a@)a^MTHV03Eh19$ntRaWt zT;Wd=40;)dYyb+zrXLeva=CO(xV}>ustt*AJ`9{K1V7Dpsc9s($dT-DU7t29wY6%WH*^LGkdUtj2x@8bBYGqLJ`L zMmN+HXh)zf9WuklPuWb(+`K<3rP|r;$!NF4oKGB@z7w@rn7XsLvY?r&Kj31xc-n=( zAnP@Nhd9#wAwa`b5rCu6)0kx)X8$RcRvhL}o#}iRsRwVD>RR4}Xl?7w`|wBu!3%Q1%7FV~(|cf7)c=NItn@tmA_ zADUq_Pmi&v?`bz{E6SV9(^H;G)s*!iyjSbJ}z-4hOoMAYP2{ae?LCxpxRt0p*g7#s*8*>^2btydr_1zIDZwfwKH6b$gy^UVZ+HP39qO z{m`_cTZkb)X~w3iEJ$}W(N&W&VlDObD%z5W{pfiq`b1hoqTSMT&(xbQ1yd zXXK_y72DTnBF?G07J*bGd?|a>zpO(XKIdo#z*b112IFBu|LT6y?|mMKR~Llw0Vk6X z27&76H;4}WrvB<_+@;NwSJl1O_lcqI)--ZPiPdNy!nXx&pgh-k6qK|$qDa^y`W67c zmS8Fz&~$6O50(kHQ=eCE2cKY3l?xT^>PTPR6!2NWr(0pBy8Pz8EgkDWr7A6<{z-@>Mpa^iEdYjQ*?>2VP5wUa1RB z)aGm_v1!e#C(SyytH2XtF^xur?zfgM7zt7{3BnO*{D?6beGV#f;DNE}f% zm?-p&+e6#Sg<84ug*e)T>bwWKkRN<*42I0KCAn!zlrXhw2-vlO7zi*%mj&F}S9IDM zO_sve8$!4mdx_ie;UMMX!ib98ORkYDV%fV9BPN|Kyr@8Ba_PK&`5B9}+x^#`EOR>#e=)&56Vvdqph6=!S~q;vR+`syFIAoiTrvEh%6obHLB?g>kwSSdHTjL>4K z{xVZiY;@>uQY{s9isX~ z)VRBll!~r!%h+;DC`60MiRvRl+x`DrfZtpr9z-NMWh!b9?v?P0s4xSsDAp}yv({{b za210OZI%0=E=>WvDEzahHM*&D#I%ahJ^hen>5ju3Z7{{iClAwx+!FJ!uSYuZD-2`X zR6>GBsgaMhLzuCgw^f49$P`CNgudeFcPu$D)4JG`zO?~$1b$g9`09UQQ(0zv+kQ<@ zlS%j6Fv?to6-R2w9zbDtm0#04+py#>`=tB6jjJFSXvCDeXx}#+k)tWGGM`ub#}9Da znX8t5&9r@+{(SN0sN4rzjUdyT-G)Z!Sh6cBSZ3{G&BP~$=YSEy!g+}oy=*>J_ZA|q z3R{^~2DY&TgRw=t7jE{*bLUu=6C z9}#zy>O?7-R)XZACGISSj?TD~XBlO3H>?@wbgMe+_Q{RyW=bC{odqdF4``;uL=Dm7 zh(Q#>!z;`nt45x=iRCNtTnuYU6k&msDuMZ($ua5YZ6rDOlg{}9ql;GDQg63>a6&cc zt#m}Y7LI(e1vfE?JWkhDTd{MnAo++f#h#@t*4dj?n3>*Zw&wP_!_i!m4IN9=M~NX? zLz>Ko*DOps-mGiiH0no2Z$hO1vt0UP-UriY)6zvWfBt|$N;Tz@Si+ZBH2xnAT>Ouw zMqPa+`?4-lT*NFx84G3O5$m0Qa-KzWt4Q^5d-Bk22loh+Wwtwp_y_xpv6ziwRds^V zk`Kv3e}g$-MUt-WA_EORaU=Y;<`X!UrV#C#XF1_tRwgAxPj!?~VCP2QF5EYBXP}>i z5|O0(no9=Th>yiX;tZ)9%#i<)(unbu7JZl&mG~{T|Ed`BgTC z|3#!x{dMlerY8@#&UV`>$U8afH;%rL?6Gc)XfvSWSe73TQiSgD8b;{zB`sv7Evkrp&Q`GkE)9>l3Sr3WzF zWplT4ReRdv!3`Ve2#e^>-E^wF;YL|yCzi`&zHy3sxu7}qY!mRyT3s>_T81Wo>liK1Q=2 zoM>@2g_&SGF^l455RS z%4P1eXVs&Dl~F!#-)rv;4A$DCAA?!OuQ+9*#Ui4ur(74F*75|_~K}-v#4K{f9aI3?W7VW}& z$2@)@i8`huGr zjko=>g~n$XRHZEbQ9`d_udf5BZswyr(&K8c>Tx0j>-*`XEf#_Z1|kt=(JPbRW-Ml2 z4Suj5a>F~1^`0yG=-%DQ_tLb0x=GX*Qg5^b(Zy8|2Fu| zH=WRlsTy((HL#2%q^@=t_P?0}$#@Ye^h&oqRY*>CqK^jj z{jr0{N6pR_{;VrgvF8gAuTDUXIZ--A!MfDh7v#gl)YrimLhuEZg+#JR{ zC~QFo`X9%Asl1!_`q4W9)Qs7`$bW|chRtrqK3wR>gGfqDm!qrHJ8oXh@g3)NXN zWM5+j+f-AW$JLyf^v(ej5ll8!AaQYboY{MNA%K$ENr0>aZU1;}MK~}uHXbtcGR7zg zoVME@i7mA3Y|lkAY%U0O?y+2JuYtHvdP+f^{4PB#1|~8@ zDi<*KdCWy6MbbsXMS+g=emf+~0##}V-?^=_RPcTc+nD{^JG8mY(et&c;E*N>fhMoZ zC`4CWY70;GgW|o27Ert0lzk=e$@Cw0Y^@*L=t@>~(wz$9Nq7^I<(TW$5@Sa!H2CB6 zQLUHF_n2sHgMEcp@cV0xR+Uf*`W+rBF?B6!# zP$`FhSeNBdM7WS&%)4S0CQd*fj$%$?% z00|ZIx2G<^W(5=XB^JPD%N`g=F4ljLV5LQNCy<0REma&g za&yr(PCdaa%h^PR!)QXAhCnQ1VUbxw6Hf4$!MRlYFJucPonJ}h0YSX1Ehy;>kJr(s zmRb+bQqiQ}fz>)4xYM+DLaG41H5my+53XsWH@edUPv|w-R@Xut)Xpx7XS|9o*iBG9 zg;BHv>Ue1Op@dEe^!4n&{Q4D4)C)N5%j)bkG8&FG)5pdhMTO1WX#4yJW|+k0QR>0m z7C*5LSQ)qWAQpqm4+;yOL>&HNQ%o%W~^oX+84pPq)CRM!->UhJT3 zoI#A){j!WMPS$z4YR0x(0mEHVx;M2p>oc|dI*+qG0ZK#nIKEQVSOl%DMfe0k{ijkj zvBVl=J>^0UxxnmQ+c2FjywWM{MbXW!%9AmPMgnuj2y=#j?=W>hNlT?>*INQ)5IhJ* zo%rm&!kr)R<;G3qmNFD0X99`$(;?5Cq-dSyD%k<3?G2Kc3zR&E`00cDD?Ojz_EmfU zf#uFSDcLt?;N=Tu9;*^Hx~)K-wlDAS%4YRAcOKUhF#=g(t0}zGeVos!%`&{@>#7V} zXt#C#wq@76aweQafNxfixt{|bZiq5*-3pf6*CbIbjvcQLDo{DNf_IFyB-EqpZVGin z-1WVPY$gs005NjcgQdv`i%RdWa&ut|6}1z%NUebM1Rg@{v_^rBc73f1+htU&`P^}) zfzOG>1fx(N4T9ISFR=4X!_SNRQS`=0S7TXZ45mQr>XIDz+xRrdrGog8xt_81;#al( z$<-PO>A<`|PqD#X4YUw*i%h$rGP=RYvJC15IIyHjZo=QNW8tadroB4 zE*~=9sODlm%f|06pKb0^#i1&{h+4$0ggF92i@A7UTMwFrd7R*Zz#MYHpYHyDWk8rp zY8Ixp(8H|#EVoZDG?N*`_qs-Gti;Be+3!2>K#!eyjs9nl@CoR#THs71z%onVA7U$u z^02VJFlj~;Bg14x@K>Pn4C6z|N523VU!Q41%gW9}-}{8!HE=T%#k)Dnvi;>d9yA?~ z{X(K6J4T}??|LuG+!m#Mzzidv*UpBhuati$$pj_te)X(VO**DueUYVPG^FdKL;-QZ z9e39{P4AyD@vQb@O2@j2u+*pvo%g$(h~yq%#EFH72M5cr`&wSm%_eS#c+^~x7NtPE z&e&ubKWXB))kM`M1oR^hJ-B;bdHCAY6Dm?x-;lsXl zO&Lz^d$S*!OV~`QIZF*t&bm;*t+E=3A1hhCH*IkO)oehhl|O=u4lNh$ij55KrKQGq~JfH;e^VDHy&-wBE)lhzEB1qA#HEx+1{@p zKe4hAc?ETEb##b(OWjOt7q0j8cj+R`JssDBX>Y-w-fFaVuY1acgXg;Xv9 zj{dM5&!=MyAYZl=tNf*nOu(CAQkNWRaZ&b|K^j`C$+S^;PT}j zH|`=&FQE;n`rC2VL~&iuWzOLo9!ugoDxgi3Jlu5GS$<@ZVIi0KZ9?-5fvtiJvc-sh zaD|%NwuM|{<$n_Jwwxq+nJ~MKv|J&Um{%|5q=f5Z`f)C$HQBYFNu++0IgZHihxcjFCKN$s0}nY#5FEQ)y7C2s71)l45%Fv6sZ8bWXtfigZm4xMA^STaIAnDEjr*D(37>tB~7H~a86 ztHhV0!`US0PrtmG$rH4!bbu$B{SOD5xfy^k3l}I{?kbzq$38yxu;UD11gcaI=h}VQ zM7}*vj(5I;YhBW;+jz(5-vmOYwDDL11<mC7BSw!OCOXqV_a_ZftrJ|F(uws~Am?xu_k((QfdsxbQXru$)alst3A)~9@C3YZ)o2e$5JeleZf=uZ%c+`yd`F%w zX=z9Cb&7qE^=$KUF@ONR<@DtME7L^@L?3$$xeG9UH~p=Xb_ME}(TJo9!(A^CeRdXBay0>G@llhsYM!*;-!km|=Rn)) z`o6nA-b%eeM*3wSYj!o~Lei*bh`ieC*DFl|aj^JcIY?&^*dVb78~nQxjq9^pXBdwz zyRFA;a8L*tBvo-`PtO5ki6k6Eq7B3_#sl7)_7aY*tiY1lUZoVQ7uDk~uxsBWcl+{~ zRMQx0l1$WyMrGG4Jf;X2BIM~66?c{L>zJmGkpzaubd@HYU38->^=f@o3htG6ONI@B z@tikLiChglff0x%M2#|PpRU1V)t&|rI||Khh#YU)05Qfs`I}d*Dg>CCmv_&OX&gT4 zM2jL)(dV#*a^S&@szY171>XNM5gQHYDXt9e+DX^}c=}5_+xuBFGZ;}B01P?PM?Z8k`(v)E$oIWmlI;_Ow76jh`kpYOdLe^Z{-U+)Q}7W46X=r z&p=5o)PW(t-B8t+P?MN`AwEq<@mk|yng&*MpNUD!BuAP|w2mffG$8p0IQoi*ign?; z9Yyo;?=BJ+4d)2uD)rO+II-Nsg5sP;mpMMs-kLNJAXQid?ppe1o~%UOAnbzZWyE02 z1?=!&6hf)P;~^GGWMh(`L7exxTwf}gBGNs7d+y9~R7J8yqgx4`1pv;OdGCxv*6NfH zSvp1cb>fN>SA}znAHkW4jC+|?K|6hZxBJI}6owAs5{^H$Uy*iIb~dZh$JGXDWSJjs zs#2g)f=`Ba1jPi|2MDLy5tP#UGnGW;G=JE)aB|z>YupH84z<~urbQHy2RJjR;k3^! zhe({sgVE56@K@Mxd}-y{l-E(bl}QTP8m9nxK%S9Qg0(52L-&!pacq*m0Uf%t9q3@L zlwklji79Hcvd=)_sSTgKzat}8j&mQoy|G=Lcj(x2&UWVzS!#~17fv~ zA@m~R`bITs8#-eV$}w1%>;A$HaHua%axftCULB@6gnWo+q{DpkFkQbsx)Rn)btVat z&RaxO3(y-M4~+(sX~W2LR0#}gy;Sx^QXJ^7G1Q^mAve%P1JH7EU>!)kXDuY%V1o`y zP;m?M9ZtPKK?6CC@!ZrN`;OVay@%L1^r-xtz4Wk2ISdMk?2;ckIftYnlo6=>;CP*6 zIWaQuE;h42ZX7Cp?f#A0rpzdOFRn`iVb3{sz&ED*$tl?aC1ail-5KWU)v7SST>7=K zsJPf{E}k8+d+TqXPrgfXF^=FQL^etnt2$1@Uv%#YAg1-?|L`yD??ou$ao{DpM0$Ph zFRW;R<2gvip3q`5V|?x0x6~ysO(#n=SGRs@!oy`Y#$<1h%o2p@uSY9;`$@%pGuMv_ z`}hJ`rEfCh^X-4mG!ZOXN$Bc@G=iOyj3B({e^>rDm>K=B9=(uJzfQ{%aTY08gI;Ts zv3AyULefy8^!FOk-q;>_op^8p1VQ{3?iXWJ$iM!p;O+<*4!kuO z^`ltvJUBA`f=>dq#Q`5#%LE{4X9{Y0&<2Ioxnl!CVuYX{&Dx&J>}noar$x_MNSgMj z?s4xHxDB1}TqDlbfG+PurdX1CJlJMIIt#6N2^u)B#2{j~8;k2tcQ2IM$^bd3O z#|sL~ni)pVW#&Yi;56jU1$lHh$XDRS)f^DEfU>-O@sLh&$nI{|dh;t^fsrgxp&v~? z`R7A#&TI3jXn$rFUSI6oyF#4%?G%Tw^D0)TdJ{$iEsbt{==|u?;QVTz)fHX)Dvu4x zlpe74?T8XwVAF`K0?m5U2mFKZS{Rn(69)!`R z&+QBSfaqtCD}Exunv%(e#I$P5gBXhYT_AVD&1%4!X$V34dT(alMKE!vzoqVK~s19U6Oyfp0E%eW?P2Be-We&K#vG+@ATYpZjj!+OyiI78 z&(^o8>27q9)%8q`T|D7$x{)|I`x)53!V93IYSjYfa>3+br3r3dO3j55YSh?i?DbLCNuuP$>}CIQSGnuy1H@zfoOMTgLxq4;yf7s2UFMCljF)my62EY;Hux7H32Bd-{rZ!mqqDPhyy>a^< zs^`k#7M}9!LTH@FZFkR)x#u)<+zzHD%(%LXC4g6ByGk3d84ew6wh|bJ!S(Dg;Wj_m zjG)}w`e7@t4Y3t?7a#+IxF_j)yqxkr1RdRItPqG~mfr41NDLCsbKi+KTO%>&GDo0I z-O_y|h2Xc_|K&ac1ty6gWqWjKi&;KI{41QBWUgH$c18n(zdF18;4kzVn8WCz7THEN z4c1N!3J3PF&~C}E<(1Y{2BY4=A2ZPq=fA`Hn^xa907X9=ycpO=K!s0Y1$8>&qwnB6 z+SX=RBDozD{u1W_!a07q>BcDit9nx;z{}hCqZwb)ai-oRZxt;CXa{CaY97R_x#mq% zoj~dC{wrqMTP1&cV-x3B$9MhMg-%G`@O2_h)!Ir|^c4&uZk1Z!n6#Ya?NF} z2Z5*`_UpNfwtlpexCV9y;}B$ynmCEHmWUjWB(PXwI9r8_0?B>Z2N5k*u7qaEa6&wu zE3bmroRbk3E-HYR*=sd>jn+Quk0i*o2y+Ja4 z^OhA#2x!i>7}VliWhyvBv2S6GGo^4_tywkb_vQ=Ek_Mx13sG)cM4uF}#4oxkLr;?^ z#uf`_y@u$!3%yM_@0aaN7+;{PZ|9J**pmdG;YJ*7dn!P(@P#xwM}=Sqw)$*E zDvn?23?Xynm&n$W1!MBZGkX55|Lu^bFeZ4R%-J;`PsSumPX128+r6|UVB{Y`3)KQOGcMjYqRvNJA26F<}T{rVmQ zRmU|ml5jxpP>r2H;6#0hdqh3#%Ins#+hsXaY)2&1Q~3nmX{wO5I59bG=Kd(hW?a6_ z-VIn-1%qJ}kk}>~^4ekr=xn>GuQr}rX$OM;WmSHtQ$4+Xe9ir0CZQUwG$oM=ThKsi|c z!J1Sc6|cOeuynkCOPV82lB-^c;PXbRP25~~h6F(Em%#geq+7erbGf#Xkb&lLvn~O3+ja2TDJDf<)0L~R6~XzWEtYkQ7eP`zc;zSi zu%Nj&%AA_;Ud`F1kc-Vak1ffn?h5r)vyP%V2;*IAEhgco5m~;mZ}J4`5`b*3)h1=| z6qoHcy)Rt}8d;Dzg+3pq_@L9DZ*cA;{|HVSDCP`9=s}Kg#Fj9^>lR1Kv1FjQ|63UG z9(f|3`DZOX;+1>|Q||}=F)uQn2Zo!=OGC^06mm{`Ow^)6!mHZgCSQAGbHE(v}IC;_nZ0amcM z0suu1_1Ug??d#6_od_B`YOFViywCqh>D4_7Cv>0>UG}HHlNcBCSy1^tUMT`({qNb2hr`WMO=I0 zU5KWdxc`5S4i2K0=O)&`0s&ZmUQ25Uopd`TElems49o`>Az=O^p>?xznE#bgAZYC# z6p>it>!^+_1E~}Kry+8<;DRVz#p!l~(IzW@2-IIY7bHmg{YDlQy@LvU5_`~YPnah> z+opcfz>oM$ITnpeLW4#aLWlBg1lj2#q$I1H4;q)pJ&P_+5f>ItulMBA9A=s?u1zy_ zf5%x@u6F&jSb(~7wq6w~()#aa$!;j{t4;yQGg1D&IL3Hjr2IxxXh6@XrlR!h@^NF>4enI5`Y5*}OL<%v)`P8GsbX<@$2$J$BdCRz6X2ZxE+e{* z-|DVky~5hvi89p-Zf&VDaPs}8V%Uqls|VL5soEDcD;gbyicniMY0$zCQA|(B+qbAV zp%c<6?QGeRZa$?-} z?W)K^@Ezp7(K|EKWvDs!bUat_csf;`VVFDLFZIbX>t~l)IdIxn$cXm%)-JN7TU0*Q zWX_)7Sz(x}Gf{^x>#eP=agI?>$e>7>2#C9b`nMU$+^xmkZYa~oB%t?2X z#~>E$I>|V&@w1<5;zKo4dBPp=7~2)vEvS^HL4v>XGmd-Z9RjDQP;tc=PZg{=O3z6` z0}?(}`c#z*?xU`cIS zYS-frNy^M0nmZ!nRBB?7DV|puXdGX6D1G`Yt`~a~({s>3ip}E@a1*!xqo%H#R{~f$ zLy#2!$any9DiB;|huoxj1IE~2u!m2f(}ThA%LH4fXLWYHnAee>J_4qg5bQc)*q#_- zhrsWk=D_9T>WWzeflp+!@OK+dZm(NiRHlP?eb?}vYxKS|vEJ-7$O^U5z_>uzi<0G5_l zL;uQvW}a&X=q7!%kl?d6qA)c`N?f0B;)^;s4BkvfbO&_xCqkCwPXOurcIUcm6h=8g zw?`(!Ik=?tX^ITAHr3SCnxzv(3~Nn zXN7AEZtvV$yf^;`rv5m!x|s~EtZ0s(q-Vt$j%^G#8)4t{2@kvH|1UkZON>w5gYJKZ z-)qG%{#aCbPQeUZm7%j2|Me-@jHLhnq8fTOx>NV{urAHK07etQYm8)eZM0J+izKBi z0pz;U;_&?EmVLJATCMbeme>jrJazgcJ{r}Yk-n;KyPFbp9qEE-2ta>-m4&+PAlyi1oFVsWw}IY#4#KSRi>5hN;1uUeejjXV}m0J7wL1u>c&8F0zduN3j>0O|!Ws+7v54UsJpo+1?^l z4`MEdjlCq*KBm)vk!4sEjZRkU7bO`1q$u_6#pdzY)Y>|CbZR3cqN^siFcro8`X~); zjj!)bm}!0*&B~kXE+w%16YID`sV&9LPwHFTWizf@<6N(H;*-RTE?jYo`!A)ShIZtt zQR=PikpAk&6s=YgE&RUn4o3!&#^NlCr7x_-pUYlmEh!>LGUo$we!kK8%cqx-4Am5@ zo!o?v@WZIrBaMrzjF+c^JJ*pXhmf4f5#7Ylw1auZA@YN2>Y{T%h3}#A;L0HZ?dApR z2&B$FUQ%s=(kbzWp-*qhBR$TW2`MRtGMs*WK)<4;d1NDvW4jcaY<@IcSyeEG1^}x#I24uWUuaST3E9S-mB`)nC023LKzG-ax zRQw7v#+sAgMFWqA$7(}6iYaoe?78Xdwc;W2#`D4EXAgRhnBp;9A)+=(C|CnIWycUL zR<+tUW1A(deMxE7+9OiS$IqdkDEbYxl`v9LM@lNW(f~YbH=FRQpdNp5C`>qrz9%#0 z2=6{9+oH;%&wXYjWf9e0I!6?4s_o;Z+w~kyHJwN1_WrUKmW1~^u(kJ!Hw*F*uiv*P zSc?e9S~X@1NK3qy7+d6z9xE&V_DeXo@5&qC*IdplqJ#yvzyagEG6ETN@yz&0H5e;a ztC=>|s&PsJ;v(F-5T#OP>(2F{1obcM)O?L5eX|3M=BkJi0)z93eIAQi0ThWnoQe+h zr}+*Yxtlt2Gy&zhCm5r%BNdvp40x9nWaRWQ?D((EH3||WEv0RYU-EvVgZb~E=L6|F zj7W)7xk08#OJ~Ud?OGY~rWf}KP~~>G&YT>$9Pbah8%U%|kkSiwZb9uNt=?w69E!5~ z6&Lrffn_z{3WQ#nH1eQuVi)*+SJJOx| zo}dJO^&9^m>3j_oy9aAhcRC>j-f^>mN-04)zXtc(UgLHrVJD18;?_idn9_Ce{{AaAQze zyn8D9W)R4T>H?9d<&Ix>GH$3cau;|J66oxh3hEppzrQ2}e?cw#GnymH0*(oJDtK)S z&%n`-be$P1fz6zT9K!Q&cpBGD{JELT6K90?yE)3kF%1n0d*ui) z{$l~#z1sLEK+xBiC^)*@qy_av86fUnSM-$>$BwI?mhcF!G?fvmA z&*e3GSp?AQ7=rW-X|M$mC`C&KJ2uNrXMt??HVY@WN zcmYbe26TH~n%TzdkkAm^o2@zWdoi+<%M;PV#gJg#eE=Z9rJ_N^U6W2_;SnD3(%WtS z5FRQ|9Cuusn4{#*4Z|HdQ|q_PogTMJh`ssmpUzm^C^BT`UUE`*2!i#j_uULc0)rdE zIzM!TH3}%B!m-B%>138@&6AyQ{OQjVK#zw zipENWJ#h(q?E*E(@TUthm*Uh4n1znWO;q z^PqQ_`9o-SLWuzG;`}y6=%JDE(k#T^jPy)2ioSJPto3OYc@(Oz@eDRcZ24$X1mGZ; zSYFUWgSdI(M+%7y1a2rH_7`=*T2Am$BB5jK`mb1Jy8Z`%=r(B;AA->lMWD_2kJkxN zmK?z^7y$Ue&+oyWm89kf`;Iy8w>6XwpT53vURW9)Gr1 z+wv-0Cyd4_=fc$Eh;-QSCaz`b%k$&HR^|apT$JrWBXg0dk0%P1H- z1=Lzb^dp@1Ou1R-m)-W6`BxRMH>0LTGCV$Y zg68&RuYSQ#X)Aq|*wK7)&Kk@mzA25(AQq8%CN$^S`BQP3*wxgT1Sy#F6`FhJFs;fU zP53)Qiv3{s(~t7^3Ce)|cKM>#TefSUG}LUcc+NWG@Xh`I!pVjg{Z_v*QpE#62jiB` zOQwlTRu9e~-hxOv-Mv!16d`cmGB5s)LT>+1wfkhQDXtKXGn5qWIa`Wcy zW8%TW+v>m!}<@}GWmzR$(aF?+om(P ztF(JL(FaPS4-?**Ci6&T;xLy>H>mTamo9vNI{`Njdz7U|?`BpRgs~X# z+VlPc=xBaFHcO0SbfQyow`@kV{cYZtxgvc8t=4%i^eKhxgJ=!_#a|eDMfgsAW0zDs z8+7cnn2ALE=RkUomGSqt@kXgrAn`+YlyR>sXC*GQ3@doauB_A`WNs4Y2!`PFcZ<|1 zPHZ~DizR>;Y#LkT?0?yyZdO0_7B}}1pEE1~Xym-)OPysJRk7vVWbloqqrF{Z?t}|{ zIqe}(S=UL1ye(IfsCvHc(or79ck6ZrCO3(c1nms4EYU+%JiXj`_20sPnZVA2ew7n# zkk7)GJ?+NW1-J8xZBf-_{4IE;#}!~$oBd4(rrVI{iZ(9ipk$-qI+OEF@raZjAz81L z=~57Ro2}H@P#Nr2fZJEm!=)K?JAB<=?MWM=HvK04-|qxmxJt&QWs4J|J7~`eZUvt8 ziA)ZgGd#ANd@!)&H?~65{NOZ`Fa#-D8_ecVjs)Q6YNccY&ejubl2KZ@m=)0L2Y#X2Yejy|v6U4C6Bo8_MjaVbC&YoF_QH z#K%984<0W}ii*gr5xD)?^+5gf<2FT7_ji$7*!N6O^exg}8vUrBzR=7aHPg=zo!K7Qx<%PFBcL@q=VhIt*+%La=_xRlLn16(rpo&ejsC*Paq z3{656f|#h1Hmybk91iOts)$}mpV1<5@)dd4&kMgsh45X!x^f4*db(6jjB4oQ-xQ66 z#FiZ=fIGD)XAvU7taf!hI$Za(xy8_R{ElD2XoJJ9&Y@Kx9Ct`S7zIjd+lh?1_En&r zH~Nq+U|8W?8F?sSX8NTIIWeXDS;8Q*^nn*yhdmUWboeZA3YL4JfX%U}IZ545_IxW6 zK9hK8Vy<=bZeC#ExHYXY8(f#6ko-hCSDcU%x~WiQInPXjNv8gM)l``B@$-pZ3khzH zmC6wMN1bqumpbcbg6kl`1o#BI7E_a|(5uFR>n$Qvy_2A*OILe1LZ|3bBH3z)p{<1{ zbd6+|LsVp;2s^xW2E-6>SzxoX7Td|vXaQ%0z+T8QmYwgKc@*s!UaNooK35n_@3F#r zo^Dwp6dZaDypp`SSHGJ=8QU+ed#I&_s=1+^eM{+tI#Ac|&QHtCww{}ZLtcwEC6A4~ zQ9C$&`Z9k82wYR;gPW_*z+E@;U<9nxPO|bQCSXrDue3=SI)|u8M>Z?;7L4&4Tn`&_ z07pQ$zoZB7>6pe^E3{gL*EP}zH46_CoV?N;GhJ#DAH2{&1bXWe9=?I9i3b#v-fsz< z9~VP1Z0+^+Op{V8XW;ExyC6*W-Z`90BT3VI^dK5da5II6Br=bQH3Kmc;zJwVln*@=`{P%_r;cT(KbTaLNZIYXpd=syOBf-)~8~+2H}* zH@71kZNlv{Sy#iIiuw&R2MOt=>cQ$NHs2k=_PHyyHZ}+=?o1O&pJp`XPiGzdb$pF~zF3vgWDVS}> z8a%=#f&f2hzAUC?%wuSwgLeWjOsJIeKVr|^%IUi#B3#z7&msJty5d?5uKyO&KCn@a zIB8KKB)%zbOK3wfLam;c3jC~F`+BK}&SR+{(W>yyTj#8y_lA+m0h@px)zoRx+r~oe zfok#Vr(HCSfV`G^6k=jWM4<0BbPBVVo$zM$oh=Ra19A)ktbA=B&tc#O-~SRfHZE?0 zOgt{g>7WEmeepyFsT5&{N41g4>?oP=-i`nNqaTtwk*mgdd|`__hMIfh3z28OrAgLX zp}E9RhBgtPxhL2p0tJAWgJv`lS3kmH*6qkZZ%{a^bJ3;N_{>fueQHwCL?h;LUG_vr z9849YG-wluc58Cj;te9IXp+|rHxa zr9%C$ioZdK$~R{^*P}P8j^zU)$y}r|-G520Kg(*hB+>Yw$?Us|4xmel1f-dzNjww1 z29yHtRIO(*%-=?UW`0weh@N201sN`}>*_k+yuSq104S}w&8aN%8xBxkP^_uhPQZl^ z*%pGQl>In!3Lj3jlC(~Wqx`h=IE(o{Q?LjkskSqTN7KcS3%0W-u!z}0x5sc*{jptW z!u%_3S8$z#4c;VX61+n5Z*f`6(LH#^n3cDV&Liion1Cu%kvXBxJgRxl_8-E>jd*my zoyxy(DKOV^s*|QaJs!M91^)O10le8M5Af@^?O-wXPEtQ}nNt?Mvk1$W+C2kNS_?>; zZ9A!@%9q5UyURtvch+(iZkQ|89=EMhAfSxqI2u|$(OSK+Bw9yCtHQM=(D`8Ciz7$N zY}lQET=ZZ7Ko=(U=g@FqInZoaHDS3}ZWNmn6QlrPy=~FZhdm1VfxnjM<3~3JsQ{~| zf`q%p1N7emrAR{SUA*+HvAy@ZG$1@8 z;2SXK5eMqhD{`jXYttv6dtnHixeeFe_r^k+cEiro9i`W$W-{6t%#&{kybUQX4@3EL zWzQ+*t$q=|GRj02}1wqi4mBK%QdC zDbpMP8oKb+9B=|I_YegoSZqr4Uv?7Owavg-*xA|Fe4ag9&qBeX@}IACwdaG=skafPo0mD#d6PoEhymKn=0LU>&-t|{{VGatN6MTBU)IsbgG9^H#03PCKnnAO9pYSrkpgMFj&f+xfY zyW812p=_?lK}-Q6FG+*IiFByRRz0>{@YY*H1;Au2wUQ+T3Bn&CV5Q|A5P@&jj(o5V z6tP#A&MK)xO<*br(G(tuU=j+_8s-{o`tUq=WHg(kwB)lJyzr@=ph9NH7eTzrpn8+X>@1%=6Mo57$$l);B#Aq`@f}* z(Ff@w3PS7oXogI-#49b!O757a*gj{F>eZ<40|pYo-&dMT5?DTN!aCG~DmrziQ#p0G z(WR3GpozKn@%fP2fKAp2+1m#8h{44Y#IJ)b5h=0XF({AG{=tXc=>ixNVAtV!cT{ri zWRe`Sv2lt#i{b?sv-V|(*0Zn=j+s|&=8Ya7No&te(!otgzou-3-l<@P$7;2j{K3lF zo0Wm*JJC_eepPoMTdQ#rik;m&8lMovk!~kXgsQN0^=BJSGgZt=N(9JDNhpuVgNt$&tQ0 zt}3>JH4LsT;oaa%M+PH^?SE?esqSI5Zo-Kp+F2@l^lk6rTr`vfxwvZ8QUJJ>$8sG( zjCg=0d$Z$*mC@V4VDO_@fFnZdiou~8iQ6%Mzpn^hN7@7dfY2%FzR|e@ zab{-P;5i<;L~+8lo#^-hiF*kSH{kTQJX@h%Eid`-Y@Ke@dr;;x1mR`-qc)=tU9%~p z)vd?D`yV~Ppk`NABvN5XS)#zji{p`#lV@|_n%;}Q0kbn|i;~8jqqWx>v!r`JW!JKx zcTOYl@M7yw=(Za)(LBw27Hoa7fUB*S(~mDe&fH%9dE$+>V_mMe;L7bXmD!dyqhBQF z$_q90?oRcQKX#1?K~n0q23{a^BSalLmg??6^kRbS$!uIGJmgwkn3XJ5q_P8Vys}`gH8wY1CFgG7iUHH^OV|?v~gPKf``JSD9uH`;*OR7YtYyadt_Nl zX`K|R=;L0lj_l$?eIRlNvjYU!*FxWv2Hb4oasmSI9D~5RknWmFMXB-r=43em09a99 z5Kz3W6`UU?3edB_P5N05ahho&#uoFG=-v9kp^y%Q>dYheuvWU{j=UFpo0}wNO9=m} z=$^7fU8~=R2?Rd4=B6Vz2L{>0IgYkrT<^`uaif~z{24yM{i43T;w6sj>v)qhf~Ri9 zx&*GR_B&1twy}t-insOGuD1}QN=0p|e!EZZ`9EkyG(oc2@3o5?N6~nugAHk6b!(oS zQ6~x$1(C~%5IVD+w8;#!oBvK$=05)gPGn@vqFljQQx=_Vn0?|wdos0}qy?D+;gUqF zYo2~x_4U`VV|>SEmDpt zh66ixN+MHPdxK)A$&+b+{1C^5S+*p~D!tpemtzxS5v5pWiU_zpVp>a?@nkXU+c!i{!;7)SYl=z23faG~oF)j0 zqWQ11IcdJ^6c~`eq^^>gjWltJM4e4vscce%R6v9-;Y6$3;n(2wQlYh9q`x%wc|!h5&T1m8rjD_ev#h|lBTzP8%b*c~30o`p@rknf&K{#ORz$Dj zT2l%$zJytGfQPRArmgwG(|>(d{!QjF+?e^gFgrwC5*B8>aPDS(yjBuUV_x(2=y)F2 zb8BGC*HgHnXbS&(!wc~q$h|kNVFDV32FC%cL856e$+^}6eGP7o#e1}Yk6h#wD3L&~ z)vm@k?*Ue}6v*@bO01%0+9!(<;SMO|tP7CBb8gtipXRZRJpdrz7#!w`IUTXy0-bT| zQ$G^FdOI&bccxacHrJHMV%Z8*Zb#vX7Whs##Z=O%B-dn$#U;W)PViX~54<5k{UGWb zKMfG0b3qjn<>u0Ty<~EDNkDo-GmaZ51%55QB*ua~TWaEti;?9SJ059$*LN!=NU_IV z;k2dF%qPPRZx&zhmyzwG^G%}~2R#VWronemEH#xvOiub3gHL;$tU^PvZaKkOuj1F| zr|T-8*Wnx#VT8=$+ff2bOLx@7lH!Y$w(2M(y8)!g#cIwmqUr3?-4g+K-3Xg(PM_|^ zOfBwM+jvP%#@2xf)sCHjD}5}WYQU_Wy&(0%1p7DqSIhd+nFE^`>3mY%=r2_qAmohQ8@gq}DbESB+L%3~sXa=Xcd-KYwCr<<{-c8D^`p8Z_SsiHy@l z46~74KM_%5nkf$23(*|R?=V^5LRR4NjuybAT0s@tVyspj=8E7hGXf7NZFUG%_)uy6Uc@_T^h(=v89Wc&%p*D z%0t6p0nw(IR<>ghGQk>V!Qy3MnV)$WjfD}ROov4&%iDd`#${<0F-PS7;wYjy?&A9A^zKi&g1cfJ^upVP5qd0kKQ-~vGv zSO4B{0P}rr1)Zvu;8`L@a&ee#W40iaWS`H1u4|L4Hbd*RbtZ}B=NknCN)%2aP22{u z^qOU2A9oo`t2)3A!GR$LRq?n2QIz{FHT&LOy*$#nZO>6)A5XYe0DB7a2ihF~mBA@ka0pRa>^geG*9ftvucO_aTs0*Za= zR1^>a(+KmRv9ldMVCEV6vKHU^lv`%r`@CFs6IB2MjTl(-HFByHYj=b!8Wx<7T#KJE zRAYNz7f+p((RAK6wI^^X2};i=+kSIhT!?$Gdqy7)Va1rcyXLzn= zt4hBY9BAC!_c%stA)TT|umVJ0xjP_Q_gRhpDG~dqNP_rFyKt@mMiKwx7SG9m(jSB~V4b$j2ca4jtp_5=i zVe&CDs=|-M<#2ON_z`0jO^jy@S_;xsiAqH8%xskU%^RQI9+sK(Fd$I~_?C_?(=x|j zX3uA?)cmQRC^$rN(c2I70ADGB!tF(cAr~8V;7H)SUEEZ21_4hhl(X&bh&!06;P&~j zV4)IiyXiY!l1636*&sP4fV8>3VzHB`#$_3%roV#xxc6+7I zvq&s6A*<1M``=i%Dz@&a&LsmltAh9@7Jm-C{@gO4LHNd;v%lunZBzfd@1+=HYwB5~ za%KmyTOxo$YH^Iuw`h~|x*&>z^|?PpsfNu@z%T4?*o;UITMcMFvzdRa$QxE9H;rVBxcTbR3oRe3i zKejB1PUKq_6TFZh4$v@Lkl6BSDa-adVLK;szWbbPR^tZSB4E3S<{0N(U$v{DxrXqu zUo+y>Pyl;w1qvHfozi)HXyBhd0}4F@+A&9ayU|BR1T8140f(;qH0_PH60w7t-ua28_aA0YzM$zXiT&z2)ql@ zY6ZxmL@@#H9lxJrGbVXS#Is5>q2C$q6zfKH&LJ@WzwtJP_aTN5wdpX|#~~neA|rf( znbj-V(|cKE{%(V$lsv%v-6%wY7L4YWsaDiZK4)e^n_~)GiI)9!0qoJUk8rAGl?>_A zesOpE|01@^m@V=mB^jUZqg2Qg+^w-Izj4@x_sQuxq>PdCuwau06)XWmGZB)0rAsuG{N+Lmn-OE&)j zbYK@9S*xFz#Wa4S4`NURUd)e#`ea_e!~~%l^{V&+`PZz(W{qd(DFD_S7u=+x$xxq@ zg=C}^*ItJ)zV6twdEy2b;L650P6G#7*@;`_M{GuTK2KiCoNN|OCcCQK)PP1Bd7Pf^ z_*JUoYv@NAldbt%G2mX4Zh|me;V)pi4r6ab{GI7?Cn7UXa;SrCYA2E3Cu3IiZZhjT zDXP&?yz{@u0j>}*>{wkd^iX° zL{zl!>XFdbK8fU$cGA~*aX=m27O=U9YI3yaOCFzj!q(L9{WM?T>1{Tmj`L0>L3iV# zxTZnn)Uegf-pEvEg2(sHr^yaK^qHuJd1di?a+A=+;-n)S$wMw8?0+LpJ$50BNE=)Q zl_d>SR+;TO6uPtyP`t9a{Zl!!Q?03`>|rPF;`vHTDqr^suyxaMm8M2U@>cMBu*qRN{ywYSdkLF zmaG~mm!&Qr3f8Pf#YK~QnanBM@mxgiZOob~0vq!YKbW0+NNULtj7^JFZj?!Ch8+8c z;#5BM?ORm$hIdmaIS^z33QQupF);fi--O0{;fxy!&P?UtNg=q)7CCPvgjgT%RUcN;=>PGvhf;CKGP9C()NH6F%J-zu z6dU;9rgQSV-+L+?$l!vl*B4jT<@(ZJCck zDZ)N)^NjW{T0>;`oa+yNUcWP} zHSbZE;>~0k(mQm}CQ@CTbd9fEHV};QVm5Q53K#J*nKO_#|3=66@_kB5CzMyU7_!ZL zN3|O+SxG0Yz`9wiU0aqA572c@x$d7I9V_I1r^wL`Li@UcM84tQ&BwtE_zB4`PVbn~ z=Uh}zJ$TK&1Lf_dySBx=RUoxd&zio?f7Tf2ZG>A0aWv4mwzwVAru5`Y^^tzoeI*zg?0khYMaetB)N83{ z_5P`E-=OW?O0Cjb@r)b~4oYmhu8-UY%%?Zg2lQTA4B{v_(>7L)tjmsgKI?x9vSA}f zn}sc*tHKPxpAtlWh0vLT=mqJ2Cv3F9>vsgGLWc`oT1+;=Y+qiY*7*JAWvv-vht{Js zBh^dJZ!?kQ&Jbldp}kg9X`aaV^nH)RV6G|Cc&1Za+^7vmq~l^XRlO(imqx-Bhc0cy z)=ndr=9nvu@L+gy2POa|e&{u!ZkK8W9~;`%`Nt|D(CY2e@f42oev7f6qyCEi(N#f% z)=0X~Ww+__a}KkMz5$T{5VeQJPA5+yg4jDMj3exYFm6-}$78tnI~2abz0Q=}6Av!A z+n|MYuyY#@GiJcuMU?(Q)@PjU{2`HT6|4dOob1!AB}S_G_F5!vSTa6~*n)KUWNu^- z12gnzssK^u2s4V1kkdj5fQ?_p7-Q<>g5>PJL6$qg@rorJwtXC4sSEw47x*^sC#B?j z3!oL2boJ<}swx;*qK#x&E6rFp{%*g5nLj|5Q-P4Y5Vh+Bs7?!Y`dm$VeSOi@?%z@X zEci*^d>JB#q5nJpZ+82%;(?t$!c0gvpl*)&1rWR?Y;*BHSGoTR4oGxUic86=;Od#yB%a;}K0?n|(_!+gz2mnK zSr}8EVe|~xf!r{Te*EoavkS4ZwaV&h;oKt?E5RSU&97S4^TW;MvJz#Uey%z&HSUA* zpod`8sA3|K2qoLflxwHRf6#I(1(Ot8+(21qyU!;TGmhLcMVNwwE_Y3O*%P@Qfkr%7 z=DSlz5S<_9-TMzRX{P?kA{6Vgah?$+?C25Z-DttAd7E2r#`{0{n9%b5SOfrBBN<~) z?y1vYL&=#YcHX~hXVM=ru7fJh?O~)--$kI zV?(f~p-ryNov8@?Y1mkokuI%c9yP>8#9ZSSyU(|blB@yOHtEj5yxY6-n@|-hfoX`Y z0*g>mD()ZIe7HV0cDjZuORZ*!eA#;w72OxjeN~s)h}}*%czki6Gghsta!>(m7i?MJ z0jX-SmhTWk%8+Vj+-rUlCMco@W!46%x%X|{P&}DxADKMfNms2Tz-wKXsS^u<`f%~= z9>O&=4xq7S^1kzaN(VFsL&q!=JfbdrqXY!9m>We4KM;f2iA|rzRPJE)Uz-F{lEvWj z+H^$T_jdc8cL0ee5O*&SV%<;&cd!2HBD?bcOqUpw99PfVvu~s;AmKxa-(JVFRj-ge3XpdcZ|vKpzYwBjWy5pse8{J^CVVwua`FHrFqgWQGMbDC0 z=?MH${DPL^$1|3jtR6M{GC#`t0_}#_BU(l7@Fpm&r%mBqTW)itO@Uj-Dpfq3O%GPx zUKRGLI$$Mu1F|dYrSlYxv$IxJjo?4qHX*a!JnsH*DD+Y~Wb`ksC7|;<$_hvV9|cOA1Y`*VEuwX2qb5_-`SDC}7bH>0#yKD zRQWG3=fx`&8FKZ-FgwDRw>IFHO_*@&xpmGXcNYj&L+(+KgYxWTc#5@k>YtP*_aC4y z^*ufQbf-2?y?)W_9<%`#1>mcw_HDf|0P0{C`yMpSO!kWnS8q>#dRsZ;E?y-RpM(?Tj5}|&v!6Q6rqC{H389!QamG z2WoLAuXp7DTHUBu=~zg3I0)UY%&6HH017NDq@95tTS*r7T|DwepVq7%_7rOl#@m7dXS>6omri9rJdgUy$U1kkTnQn%!v6?eghi#jOJi2jHP11J_K zP8G}@pWmEJ{$!>J_Q894{?hvXlgEZCubIGmo|t-vVN3>rV(-LAF`FA``Q;}_=|A8P!TH66j zzER733N>BLSckhI*pz;Br-unlm+^s1LsqthodFpr719TDbA8+`8)vA9)xpfb;{63j zzO|gtRj5FI#i%dZfAz7IpF9AE1qOv`r&>Pgg=*c{BZ+iFNXgyg3&HFBh4!JlCYYT> z`u^HTD=Ht?E}+6hQl**AVSy$edT$XEa6mC9+NI(rO9tr$ z-^hJ9+P~}U2**-d1u9&qBfTYb{siQ1nB``Q;O#~_0mxZWR?c0bkAVqJL!sg2pBV<# zZ;c#!0Jd%GEuv^WRWERB1HCBl`$-fgR*cWJD1Apy(+1 zmSW6Jq*&145|5hF<^fR?;=Yu`@f((x4M&am7heIr_2PYjz5!XlTrgdK)JX%$hRLr$ z7{hLtUrWxXe4QcX8zw^_od0_-dkhT%*nX6Voj5q4Zh40=^i5j?1UpnCb$!%Nd@%*l z+lPYKsmyxhoCJyD0~vafSb6Y4*&3MJ>r7L zM;758Sk6KYhfNk!{NW{ot0lD{;>9+Z{P^Z-XM!0VNXY%5w})8$JsT1oJ1igihtXNz z@mmw>e$DD|l&S87lmx$RGKmJ4wJbB?w=`9>6vz0n_x|I20Qq`bW*C<_6M0>%s}$sP z(12%^ZWDoNc}2-HsB-!;(-&fdJ90c8cP)uw@gnwyn;+*@Z5?{f1OU@mP1{#kZY1T!hJE6+bMLcHg8Zv*^{d zXSdaH=0^#NlAiL3`sPb@J#nrj2SNELGFR3#joVCu#Ne+Lpou~=JqlJxyvke^yR^-Y zot8)UMNcJU!9wm1C^4)w1`^Rf<4~mu{)e;1NABvnQc^S+pb8F)d=gryk-u^PnIjhT z1SItGQ;JJj0W8Y(aUsmMC&Y&?y&Rb^@mgo&22(}QLPtl#8_JK`(^$te# zB4_{&ncrIpUd%hx!+*y;`8!#*Q~b2*@NC?kS9xR0FFY!dJ`hdt0`Auvv2MK!n^`P} z<&=mZZ4)t~i0pR#du8NRxDmU9X~+IVg>L*HmDb8O5(yPK6wIw9a0h?JNe+?RqFxxx zlpzNs9k(&s&)x=-?X%x09iosDQi^AS6tyiQMS<#*_>9hoG);#Ati)tAUGf9>8~@N6 ziCq_79+K3LL${sMGi0*&YEuV7mR#fIKNY7h>!1oc*_r?CKL1TkFHI&9oIl!K)IgwU z+$BBt6I4D;rpIECA-*qHnKVIJ@MHKzT{VzWL0gT{V=6%%RnA-@mhX4TuTG&6vRsGP zr${obqG|nNv)_>*3iAT8bmR!eUwZL)ZliD$Gbk|5vyUc+Rj5{bS)b{Jl{>zM_Wbp# zN1rqv-2QWr56FH5r5n3Y#~Z~0(D9XrsdVsUJs4>uo|E0#M|1|p`X2A^)^?AhBWPtb z<9k`#CI`E4q8A$)?hgrJZb6M0z^{Y96wkP@i*rh+uK7yMRLy&j&b*a<(E{X$SulgnMCThXFeP012=h!*wwW#5!n9U=v1@X7PFHH zK~O~H5O~Uk4eQIp+Ban6k@Yl^3BzxJ+!+jz_%!*$Jd6~sOj~mol-WjnGulnshZ0*? z9M_xn))C8sml!1$7cr+SOtDH8RdduHAV}jAnrJv8wqpY>%EouHr&dm`8;q7=B@z6w z>-ePo#g4)~fz_DgUBIP*!qUE!a(ShDg6m*~o61V^{U^yLoAi9a6p0uJjD&i*7u#^0 zbcOAx{Cu!kvppR~msP*3Dbdk*ANY$sczR=d|D~8TLPVr-VEc>_xOjF*MGE1))>c(D z<12iz!NRW;wx$0vidJh%7671L%L(Z1uCHTtd|q@W+_asH;yu;hU6`fygzg0te+ID> z0feo<2k$}~$xBAHaO)X#T{36LQ%v#PgiLfNmn5Gzicr$CR?7ZR1fwL7^cnz#bz^6h zM_Si;6E|NW3z~BH0$R=HND>uUB+X&Psju5oMi5dq3xSJ;q9w>Oq{pM(AMxu?_|hbn z5b5@Ab93})EHPytRmC&YwY!ZG8EzAa^B9OU#oE+Aae+Q>vP?ES8hY#g^vFh3%jr#+ zlqD{~kd^9Y8qF)WX;YCYCg%S>q=a2OA_QM8bdETGVVl`yssNfb(6j#)Rn%#u05C9Q zV<##A000000Rf+INB^peacf9G^e1)8bCT`Iv@QM|Y^z;U#g+`%Owjz}lVQ_$*Ty6t zV7Co=VGc6!R}pnyZ-i1P;OCF@B3Uu3tf{SV)nm=H$5;X3d;MLyZ8aUJBqdO}+1mQgs?PFcXBA}1)~4ZlMFtc#86 zEjnTI27|6N*2QCrPa@;eT5<5=!^JTUNGU_~iR{dzGq~y1ztUB1o4q%}htdO+-HsjCM-tCLwNRbbRRADq#+T7+b#yZ>mjOlc3ZKoQ>C7lI_9?YWa4vD zv;0(Fl`wN@`z8i`%L?z4yGLuOvlu=kVHp?^@|F&FK*hAO=`Sx^Bp2}{ciSJj9~?;S z{#A{cS4u%slY}ny9i15z@#KDU#x+#ig(6Yd6+Qqg+3bO_u|SQlXkNm zIaiN-E}yKltAvY1y%mvHgPSi_u?;-21do1=8KGU67KByUrf$VfW)p6k=9)SQ=JTvH z?TS>$E~`}0mhpyvXZM8^-=x2yPW{;xz{?9z4HI_m{K_vh8iGs3`FXlV% zBEl{i-p{PdXU1bhn9W!;okz5q-tICPQeMJdzk5e-Y^;LTg%cHBKVB{bvJ3$8GeGZJ zA>s1pyN^_0CTK%b=uFF{59P*K;@|#o@7tZdvRv^33V==roLQ62$(%VtaQY)_R)lBM zLyXI>gek`5CCKGI4~oH#8Gw|C%?BH#3r#PUv1-i&+-&A`Z)$*Dmfk}hf%{Dt9i!BK7LCGD@6Bw_*eX2ICUBLd8>sUv zQrCxgxx$G9n>g}8nunc<$5=kUIgE|vvI*ckLn+-^QKLWZ@^ez`Ieu@(;8~(U?^G#L zu?s~u$&XdHC5NFgb_hDE=AKxP;^@m8m#N?>@5jG4Sr_)k)R|e@B)~@Rmr;VZpb{dF z^Ii17<%M|fJQ*_NC`fWA$FJDwl&gH^;5K3KJ{u}j^`~0%Q z1JG8)Uy-fHm*y}OOeRf65Q|#IEo+jes0HsMxzOn~U~XMUJz7V+JidR#9Of-XwO}ae zD3v@TGi)_a7ZKuuo=DSHhjONbIpW}gkv_5}IRtv?8&8)cadXMgKZ7+5Wo-oVa!)U> zZ7Hh7G1&+fc(d5|m$W(zdCB1M>OujakcQYB-KCTL%5PBK0>ML`_dD+ls~P0sC_DYN z`y%T9j@KpC6+5)#Vq%f8Jj%oF(^AaR=e}YtWFGT)XHdiGF)fEQeD6mq0^IoEQOf+a z!j@TC<=$}=9Jv2(T`^)?bdb*PkTrY$R@r;@G>hEz%z8#7h6O8SYI8b&aZ4nwXA0+k z3ZBq!s0cSd&t;@ue;J3pMZe+{CVPxU_I;>4I)vEm_4hUNsGB92vf{ETce9u-A_Cv_ zAA2U6tXch9wBhhKe7q#sumhcr$4c&*KnsfnFWHDK6qiWT5PVXM6~;C@XAV?mGy|ar zXv&!^!Ks}l4-D^7$vFKip>(A;5sR~IcAA~rj~eG!ZpjJRV495m(LMbwh#H6IaN)D@ zW3*p$dkbgdE*pX{(=mn086yX^_I11J-s!ibD4Jf8w%pD-e5klnrGq%zK?>4%Dy&AZ zzTu;s8uIgx%0!ysOl?o`Mu`Sk#7bN?#A6mUuVH^*Z*afKsoQ2gMeAW*ch9GZJO`!K`#?cNA*k)q}-+V z;!^5$Aa}2;px%m%2DSRq-o3lhQsb)fhY)Mn->eAM^$a7vVd4DQST2e^_~@F$`tkx7 z?%YFtlXLYvrbWL5XDy_&hg%3iC>G7uMuft2e&r@gABOIJR;k(C)jLebz6?y6soyyU z5roj~s8Oo={>y`b3R*sxqw}wyvG;j0w@mFC_{R;MTvz< zRwCf?rVBFwo8V`3)hAmW*9~SKTNKkE5{QTKy%JAh$v?TuzvQd!h8Rp&AGFX_5+5Th z@+*EBs4~QLg`gz%yqkwWgJ}wGr?d6li<>#ZwD;a1bEvPeuB0(VjF2{on{XUh#mMs) zexVGGxlzWPWCr(2@bArPcChzQVgnJCh=4=Rx=MEJ1~o}g@!!q%<+tPKT2Ml1Ith9b zw-8BWt)z6AYpqb&(~!;fU36?L4i01k>E4yi?S>5Nu$C?Fv7SQXCc&7mGYh#er{nrp9~Do@i{VHM3ezU~#sHudcgJ?KvRL zEM;=r%wt6 zQVQ6NJ}-EA`l>f0`i&Zv0B*CDt6rdv2EKLq^d5eD4<9cH&k8XU#^<5FxV(k4(l#+P zJ@e0W0qlhJ@9+8Ae2Vt^O&VXTUpBIj)=wnKm$*_{-aL;*kTszuZPj0*2#^jSz2U%a zGiJ`nWP=zmuQI~LQ$C|EWRy5OLwjV`3rgZUz@md4DRQ~?z|?u$>&^78$eHQlHS-5% znq6~6*;-X(#HU{`~d?p42)%vX?3^E*Woq?9>ZH8! zM$Plshy}^raGn;(;BEM=1^t=*p%yDMhT{27zOP^&LkKDRbh`Hh)@Dnkxo^Px!QKUA zI*%a3T;<+Nr!Nc6@jyZoQ{RR#VtVor?Mk@pj^rSVxnmtcNQV*ij@SqA)@#%|jgC+( zO2*{TBD%Tow+vmQU(n2GWEZ7E+ton>FrRRTm7^=XnSZUU>_>Md930Zxc~TARd-I1q zDb_7?w$qW;O;Qr+L0sNR#XNTQXCg6S(Zhr^Ebsy_87>|L&7lb`t2)7ECVx=_G=`C& zRs2(jmWixuC7@1C*%RSeTzjoc>LiK*Zjz;N5i~|@kwkeOe$+FcYh*Y`IAlC2vsn$Ffvc$NOD>L=fO2`&B{z-NwO&-pD(*5*J(y% zJfDV19l&5ye*tLYs2k;TT&qAkYipyx);5M)`aYZWj!-GZ*vOwjiC;-*F*MAz`5Ry@ zpovHC6G4jYPA|V$oAQ09>W_FToWl27U}?Yblxw`4iwEu98=B5zT7qrj1>Sq6iA zqLcjxBA8l{ePD@sVRyb9PPR0sUb#F-@TRIK&}YgxhBZd@?-3vMY+GRtqSc#HXR~#0 zTAn$<8cuf`*<56272?{RI4}MpS`v5xFYSR9aID|4gc3T`WiE=7!j%>%BZbUF_f@m? zN(*vvqyzTV7LGTBIC37{+8fQ)oto}FhcC;4H6k;aH>Gb5yu7oM1+(;SPFhS8tP zT&C&S|HMf77q)GfQI-RQ~^3G=D>Hn3m}>61n8W=!}6nDF{t&@_r0?8cf2ZWLc~=QW=R zE10q6To`nkN5wBzq=$cM0pZi%V$cCZwvFeIWDxI!6_`|$xl-Ig1#0wvF~C5mfk_!g zp;aM5{{^@BW&l$rcQbF-$uK}N7 z)hw#<+C2B)lD8NA#G`%-=`Ip@xveHkQ`e1c%pE`x#}H1 zQ<@nXg`*TF5E6()aPl9~pFl|bS;d@+Xb9zEe4Waj_`qcimo@mc+aw+puhEA$LN4@G@AH=-#~FgYd;G6aHMia^4dbz= z(HItX!YZE+HptOl!8^|zPA5y!B8SDgpfG;Tg9WR9w53AZ>`}RUJnSV--yNJK z15=UX(A65QXoZmq;SxdH)n|v`m)B(^&n!o(x!4Q5ju_8QhuW$Es(iI`5f5`u%Z`K> zO|)K;aS|SMabO!WgduNVILF z97JQWuX@$65IEa65_O;+`B-WoP{LzTCe|$fiOF{}7EzCA)Jpq|!~I`53AI zHy13dnN2V zs#D72xk;m=zAOFs#XApjyc^8%M_~^=O~vwsDgVsSl(pB#V6G2X#Bx=!Mlij zHr4uAKRMP5elJ|kfl4662~s@e+HouUfWcarcdCYOfG8>6FLV{}3?6PU!VT2nZ;6CQFM6kT`qDqqpK zu|J55gKdfAD0eVn_5Z%;?;7^fnyA@@@k-G#FRM`4w!QNQ>0-KWH}Y$P!_nxymYDxo-&$PfB3lP z(m%K?FFXHL*iV2gX|8W_g{eZ)86ApuHc?&c{xgT?KmiS?26r>fCi@~N79nw1&*-=O zC|hqv1Ig4}FKk7R?5eG=Xm zGYypT?hSU+6N&bBEjVD!z5!&t!Vi4h% zEPl>xzr3E^mMb2iSR)D5i%os+>YH_8S#6Yz5~Tei^ic(0o|*WrO3R?QDR;$^?Gl$z ze-mZ;k=&FVau@SpUL5KbaLhzDZ;yH4rl2;XJuwDOUf5K(MyI)RN>O{?&dV>;+V*FD zp|zNgGAp?nMOkP*VjPoNor+@^)5}g3^70QhMKm3g&dm-ZWLZElEV<7sdg+C`se)I2 zuqjqG2;&6o9O!yfBen_t1U#vEA!+>no<7OkSvK80KsPNpx#Xz|`xcVlz+H&V&@E?- zw|XLTuTi7afJcOFgNB9o|9ZSHg9Z2Tk%|mezp3WOox?xgHN8&4 zeKhB7V`;{~r&gi6GUHkM$uPFp<*f@?!I$LpJ;c($~#GGI}v7#%q|TrgriLW4)zn z&c-(RdyfDyuA&5PJW|SjXm@4CkAn;=LcgF#F`wih#H~}O9L`J(?d(BgKVIS9eZDOs zw@4Xg3tdXQTZ2YV_q!Lg7OhD~r7?MsOcd!&Z~i;4b_w#rjGb&G*6hAzcUQdf&Fam| z>Q<}51H@%&wx3k?0x5`MBhJf-OS7H)SOci7Bq40IkO{|nX}5SLbwuAv6|a!V9V6UnFD=qOsgREPub2`C#c*J=wmhcmXkqIyvbJ^kbyWc?xNep0&D>ndHy z@gaJJK83Q!INl!kaK~j#^r9e^wxCWG#Q)yfGx+i!IvYslHI)?|jB_jkqWnvADLbkF zBLj;PAcDNR`zUpIcz2n1nD|v)2l1o|Nc(WJ@aFrzmpztVF6_c@@Daa6|{LqN_fo^|b+|W1sv7s~BIg$UhT-8i>J92OU!D(8vj34o@ZF`S1bKEZ0x|kG*{nN$^T*-jQ3*ZAObsq4}DTEntML*QDuE21synN#iK7A=EcePYWvj@B+>l<^ArrT(roH3SgC6QXX7i6 zJ}XRVh-x|RT-ma6qhI8F*ww>JX*Y|JJ(BzL637DpVz8cdmabfM;{5h0kh2FBlfAM` zkGV)ZS{qw4b>f+6;qxGt2h26|k$6D6Bl@qz^XE{y^mibWmUaV+5vF6npyNp>F8YLM4Pg)^lcAd%@#p2d zc2aWKa)9Fyyh&YzolhiiE=tLZMB&`VwmUiI3Smedb(5n)*MeOl7b|g6 z%1f$l=%EoT1KWf~E4;HL-Ao4ih98&q`OfB1To@SWc}&UdM+}~&oL7Mp zo%i$ru|SM@f%Wr3fcIfY1J8eby}cE;kr~cuMO|c&Gtp+jkYdxJZ0)k1j|2>1c#fC- zDsk88Ff(HcTYBtrV>0zuMhRx4bem+~uQ#t{$8$fw=verSYn4v$zoemQ{G7Q}pp2WH zMR1%KE59-4@!+*W^2fo;^NIYRIMkc8^7`%0tc}hOEl$hFiS5KA5Y_|LoBYUtX0&Aa zOowEbPp=f%IJqDk>~r(aeB7X4LsIcA>N#`f*`)nbhfQKtCECYhV-wV9)gvP+=N4in zo6BaXcPESvzZ!fvAfuVr+E&>DYVYIdViLq@b6ZJx$D)Ztg}X7?f8s_WG*wkM?PoOO`%0P9xG;@ub#~NH?gATIK3RbotIE_j z3%MP7v`rAv;t?x@!K_jA8mM3~d`b>JjH94p4F`1wZ(>Zn`agEnOqa3XPXtCKxN6{e zq`dgJACJ+QU3Ub7>yDzZ0TZDY;gkc=F7eKSx;d#(z7I@q0y{hK5&Y?_%$qp5$wzKn1QDC< z>s@L1Q66c1sD8b}kQza2ZcysD<8*DF-frOY@}$~MW+lo2$o!+1ZrsAq<-~%h^Ct}Y zZ+JN|);T(@U-S1|5b`8pg+gi@Ly$Y4_#wL$))pDx^@gOV=mZbI{zvj68~=8^4`LOapG1NV-%?xRKW-ekSD+C6M9sg%6X*5G{)+s zK4(`*O9FMZ7JuYAEanaQM;R&hx&C~m74I@b{jfO|cBf%Y=PQlV;Oh+Kn?0Pw?%1>S z2CqmMQ7h~pn}?UzLZ+Z4hr2^O+SmGTZVQmsVJ0E=67wTpT1;Vgk@m!Jv5_pfnl*DA zvK{AbWdhK12e#mupB8*O09Z98?71eT`3ist4H7TI%@Sz#4f)F!x$^MxzojLTORbCH zbbSP-so*py1EVyisV_0QReWkjU0s$w=FWCdEnzS7D$rW9xLTqK zqDt#j>;s42>mnZX=An!SpWz7%yS#%}eJy$l4e5X5Gdr8Z(E){X_^tYWhr9r@C@0dm zO4G4>Z)e45c%AAa(EX%m^?DAuGuax$1z1eTnK(O-q4p`Ar`KLrEqs)nw#c}q%tffZ ztu0aI7G`u~Q1*^5`d~u4=+n&LL|Z~Quy-O(5*wSam0KhkT3rXm?pg}y*~826K4tWZ zR?ZEQfUH@(6$`4Im8RuO8}^$>gr8sFKl_X>8xmp2xVk*!Jp(YDwf?BYd7pTr!%DjU z5I*VWapRW$``>CLVv3=r+%_E+VlOh#HepVQua7a$dCfqN8`MkBuq9Vu7rU4*BJk%z zYc9=^W>Gkwq(EBj+z7rv1;K1h9jKv2K!57#lTjZLcUI zMIqC_Ba_?@v)td`d*0Fp+r;m}&stG&E%v6CiB5BvXIi=@$ySR#3N(5}a#|Q-7FW|^ z`%u#kW~Qk!9;yC0FtyPg`DTQAQrfXRo-hfdX1JrUMsT3FsjNvG?Po-3;kW{}C?Wd~ z@3-%tizr}PT)vHc>k>skKnBYRlw$Lg#R@RMeEU%Z;pAb47fhT;ev5zN#R$7q@r+?Q z*|^1sV6w*)LWsUoa`(}Id{3!>$g|oH1WtONf{)faQK#@!IcEg=w zBQv$Fv`G1I*2RMSvGXQ~L{G5aS3epcma9Wl#SM3_ll)KF2{sU?be@*v>;p^GF$!lS zj{g+yz~rTD09G>ew+4x?;ot9&Bo)M#W$LVUQL_bbWa(k}fA$ZV`!vH&x9UR6c--k! zv^VJ2HMKQmV7F&{8L@>CR8;&4IUXkWX6meSqsp`6hAF(Z*Q6+cqJ|Q%u!21+_-%H_ zIh4TQlx4>loGXn&?E+1V#U9az-wpN>1DpVwgTv-w=*`8XVfuK$u@z(CYmk1M-2}-w zIf@|(Llug^Kl#%3zEf{YOY~frL@fTo*JfKF8_k_axxAMK)apj)G1>S#gaGE>yeF%r zX^l}evN}YpwMnUEzkH+F`hld|4|txjF4iqV=gJpGz5BNLq9<(9ra+k5l9-K5pd5I2 zk3vj=rx0yauQi&(gzy};AexR~OQ_JVHF_2s$@32v^M$4MI?to1@hO+DnL>Ee(co*$M~|_jjlrsH^13m&$;_D zxr7mUA(2ji#lNf-`&M+oU%!UO&JM>aTrPdy2VZtsm1wAIc+nffWq}+=(h^q^mkn#! z41om{D-cA}dsgcEvbJoemdsPr-fEW)um%Hm5z5;s6%zd>_JxuJY(2rXHNvfsCM~0m zhK(7^sd*;3)$J{C+2DOM-;X)jG>roH@sy&R@b90AkpI>t=4B5hYkDgb5q3E*25j1W z_Yf~}w=Bgt7o@ulzor2Zd$o_cGE0R8_31_X7-7G`K5qb?P~@k(yMS~y%2wR-t!cy} z-ED1_EUr^S(o_-vL)X!E5**?JtQ7HIZA1*oxWLf>%-#EMFIUBG_%yh0zVPKT ztc)JaLR_!wWRr1D+m+4PM9gy)(7YO+Qwx%2%?O44mXWn$BKSXvJCmF!eK6v;{t>6< z@;%G5F)1mVU4G^k<$?o$ZTE2;;w(!(nO8e?AQub8?njGB9hL{pex^&BoIxha8z5r1 zVsAq~38|l1qivPe%)3sAsuWOffoEKxQ;b^PYoD>W`5@-!M44tAUXb96_7ow(nUB%p zzX_{X%#F@&)(fH2bxxcuL@OwuxoNSJ#an2gO;p564*ZFrE*HMf1=K?}&XWIjN`O<^ z$03Z#Q{bV|3$kZvXjusL;W>JwY{>O@u0Ppr*~FF;IvdFBMF;;8k=x;4l|!Uv6@^Y0 z%x_*vAM5+D%3L{R&SCJ9!o=ay% zT#lPnKhUgHJz~GaVxs&!p&EZw-ym4RTe57^s5U{u$Opnv=L0+8p|{MBNA)&IEC~{q zw;ChljXZ~~8=;Z48J%zhW6w@VF~%Y=aA01f*P7xLa6*G&AX?2BNy4YFkg2*~d^3YF z!Ly*cMzOn8)#wOJYtKjOltjUGFci_p+y%mPMUK+xSdG2gYTG+Nc&r(9+QqQLIG;=9 zQ~p|W2=>kPY~@g&i=@_K+#{Y(;OFiOE=%w@ZZyiM+o~s+XgVqws)p`u_bY5bnn8pS zpMQ-jddYI#L&2BKuQ7$9fsQe!^@${&{dIS<)7AOvw3-t39hV8a+N&s*vKpiOQWAsIx4qo=d+v;Y(Iu(L z2Vvy)a4~1|2k)(No8EvOT4%lN|n0ZrS_lBj3ZN= zJ``D3k_`06p#fttyh7k|n}*#*K15#n3^aVH3gHWmH}naKR%xJEw9?dUAxPxxw(xDu zZa111E8dnwz#(Z3s98uvy&9498jm(egAQWUnihh4Lnn_4H8{oOxS~;`kU}WbUWn0RODzasAh_eK3{z&7X8_rS$w#4mBFMes|i?4NUB_&v>(D^};-Iq`{`o6N7 zl(0CCYRmqm1CmmjN4FTqv7fa|MZBZ+oCSm>w@G0Y<5VY_=(G2H15tMyq7~x));33~ zcbbxsHCo)uxCz4t1&O=mzNuo!{tgX336q;KSo2IzBu@Aq|1NR`g4i{4gB%D*&T{L% zydip%L3|W=%U5D0ii%T%eUTe6+Te}|*^Xit)#Cp6NbjtVmzOr&C#?P`rZ{&1F@j?A zprR~?^b&vy=fiU0O)yGUb4dM>?Rwwat5GDoVkEvf%KEr>f+mtR&Sf#wTmzd(o?cQL z0S{)^jJ?XO;8`c#Q6hsb2fB4wiz-J}Oh0QE_>bV<>2&k)hz5kVa?;jB+=k%*;9I!Wu_D;HT(4LKjI>1sIJ;VJ;Qt zYdKR@2p&XaGo{{7Hxi~3V(#~#^U=AA(X5LF7Chbq%EhMW?Eo+^WMdmk0000000BXp zau_0ijX#sRf{Cwel$d2tlGwD(KeE-So(#x5{6N(Xg*vzE&>zlWuTe?%o4Rb7{uIB z^fgmBHjzcE%2M9Pyn*@0g;e=20Rwa_i9B8R?mGADhXR{t8@%QK_kOn@j{T#STFbFA zk9;MMzvo-5owB#kRD{zDydDvkDSe-5D*C;G@bsr$0x{4e>#7zEah)shdT7${+qUOB z6nlxrrprvE2Dc(SacO+uvsS|KjguMP@#)=Q<5(bl$xy0l%_L>jv0uh7^-Q2u*`wVu~(^(3lKd~(q7T!lP9ypqnm5^&sUQ}Bk1&^S0 zJ2TxaTTvp)AKP8!e`j^N_nYcE2%Kr0{~4*fIERh?1W_X)^K7OMc5rG;-1-U~9qMg9 ze(ox!k>ZYq0lRY7xl11SMsXB=@qbESE|#M9ROsE%Sm2JRabpymLx$0~7&A4;;VM6k z3~oV1xGc(J>=cTUH;qR;#{&)=jqahXn3UBoHB!jF;h#F|mS?T1s67kG1B{@e6 zmG+JumZ!jRcd@VzQMw)gmd4N+dh6A*$b>+xH1r~apgo5)7#^ia3+DPt=!gk~^gC{} z*!;Et`l96qp-|$q82XvFYrGVaH~;suAu%a1;i1~^MS)DQJCT4@H!!8niT*+ySX`d8 zA6@2+rBDMp%Q@yx@4RYy2Qq{z;N#hkKoqM4lWLgl$K}Dyh@;*tzFFS$Bsipp@IAxJ*^=0WYnO(jM%kWu@2 zX41(PdclWpv9R`*2@AlFZ$3iLH-@%STc-&=zIqLGq7==yVtmZZT(Xk%gYNuo>XkHw z)|uAaSMg_l8j~>m05~4yiv9Hy!Sa$((a+)gP=!*S{IH6v_@Pe zL^d&o(E_Vg&al9{%X0IddcVVCe4gzshJ0>II6epy-Rl66JSq&K8Pb{7HH*fViCvkp z!$NUx>WeWjXnPOg0t9qgOJ*;aWsf1LPc8^$AJ87oc{~_dX}MN)Q`&_)AasS=nI%sQ zi_a|QPwGHeI`xsgizL(>Pz)nKQJC>1h64n1;_lwYfXu@6yoz0Lp>$vyhOCT9RM+9# zelF=uuly5yKZ_Oy&LB%{;c|nsByR6XvPLIiR^NDSdFUEXQWsUVln7PIcA{S7$@~p5 ztndud0IB}rJJYub$jE{Gprg#u$@?#$9C|NJJ%&M1H8rr`A(<7HXXYw3{vnWb>VOC9 zmswuxB0k&n>o}*k)chS*R@Hey;Nf=NA@|a4g1i~9Gew{uF?|uKQx0Ju&9^+4j`R$w zgL6!6&qJbTlYsQq_ERIJbnEp{{ab37fAtKW7)ZYqR7U7>=c|!I`fJP?Rf0{s8$rF8 z2BJ<>0D?p=)~7q~vhv;?lT}wvxdvzJuImHLI7Vruy4QNA5$f~(14 zYkRm-r&!-_PY&6OigDkws7qUod=y3QvB4_&NQOyrxG`mep@x2~?dP9ePigl&7ZEy1 zCa<{itqH^B(v=Ad^PTG?QY%Xt{_#;Xgo?!{9Hi3H*JqvrY$ElovXw`$*Eh{ZxkSAG z);3Cw{znWHP}})DG;~r}ee~rox?VXGL`qS0T%VvtA6pS!1X>+?Msi|@OwUldD6M%( zpT7k4wf)*I<25MWx|ph`MoaDk@mjI~2~C=`Qw}RI%^P}Eo+9F`tNr&)!45~Yua2tH zqPl1pQiX!YL`utv*NGkj)^8S+&z{NgEr^J0_$MNN{Z)J^YS}@Oq|kT@cX_#OmmVcqu)=)+OA+&Pl5y%g}laH{tIUkSOFGm zWPuZH^GX0G8q!KA5i&yR8zeKEG7b*QiTPciqD?X8YpJ)rl8{x*bA8ruMg}&4`()94 z=UM_jw7e()PC&mf-8+67>ib0FZL{sIfn&&Wu}ql*W@kkf0hRbXWGz}2cJDh9Ez`dl z78zT1$NUvpWdizPXyt^ae>T3X0+#Wv1V#h%f3DGhho7uQj(`;cX|Dl=^vWK%pzr>giFO!K&ZViUS|Q*16LGZaZ_YJq;N-p^?P zGruU)7Jv8$h)xhzm*n9p2Vyyj870k2ffw`|?`tm`VhoV&S_WqFGbb@;(aXIGQ6`Nk zIUG{>_{{TP45}dUU(+Hlwy*f&1>)=fdyt@%8FO*9O$jK)d{asEVhy_=Qs+PeSlM=m zWEG$a_{L11xUOG*72|JBdv%&8j>e%GpH-g_WY7y#ssfLu(6bJCVwmc0G~wngN}c>Z z>Im-p==^13!1Z^#rWtdk2Htmo+61keQIR6salr?A@i4}c?F1GLm{;NQ;H-2SOE zu1Q+>Hz^Haf}$roB}eLC^GNTmN_?N1g-5D>;N&Td2gne8{}EAC`!$$fe<;{?7c0vtqtEP ziOZNiBFrM6T!P0Qs*IM~H8EdazwY#%OsS+HvZy9RVo*-PCv_^a7mynWol^Kgt1K(Z zy$@ExII6Rd8P{ope7MS{fv7(^k+9cW#lwe^?M-jl?!k7;U{P-ifjUlyzFnN_&6*Vm zXhlrXu^LTO$sttCpMDZ2@%PELZ%haYQZyep4n|W6XiUI(w$Fi4vMO8!(md!|TLsmo z#0Sx+HX81uz8QtWXdrc4=T)__u(`h_k|O%yrzb0pm#XU{_iD&yXmDWLfVfbJC3#c% zD~Y&5IRE$dl-YfI6<9FJCl|hApvpNPrUmnkfD8AKy;-D zDw1NGZAGo)JMRH6Jydk2_Y39}UFk%l=8Kd!mlPZaD1MGHNVXv6Idy2iU{x-MtiNqI z&MIz&%ZLX(W+3pn!QKv~pYPL^SxW?QMBGh^jA`0_hH;yLVIJh4ShBxsIawTCXS6N47 zo=J8p#BV8V?+2K1KHmAaP`Bg|zwr30^OHk4Eh>v@RsgFHM^tZ5uL+fM#g2r&lDg~h zeFDki{kUfM!7yCGjlEqnkvcV?k>!bt&NQ_HSAzi=hm>w`mOCm06&hijy)b1FWeera^; zM*Wr^p6Oto&zmT6B#Vm-C>aNn$#Pdcj>6P+uW{@+GVlRV&(A$v_V4`kPRxgVHAEE52@2S%WMqHim?*C^)k(D0CN}53_@VimSo`? zUqI=`B-ikLj9H&H%T3^Tp1L{g_nC_O_wI3VjBM)gUn>)xm_|E&X{(|IV)O_d7#Fc9;rs-<8e+K0(wOj3{cG_ zJ^%k|rpB%qQ)gmfOTF&H4a@N^nr7!vU-CWMwP zbluxI0Uo{l0!xkoYLpAshF3*26sax_=PF-S0wsf+^R44Zew9}ev?Ni@o?=1RIHWdz z!`md9a&#YDuIv*ktocHEsRr#mDWBmc;;pe?2Zq_Uy7QAy)t#SJ-UCe)39W?wMt#(D zrs}2lMLemP-%h=mz#-k4;fJPsTO6%M03Vxf$Lk%N5WPwripBtYbC=?@(>nfUaeggD zRMzcy;l0-uX8&FhOc=DMD1mC-Tx*g#aDewhxkZoR5}Hzr!^UBjmv)%bGmhhZ#-_9= z`g+u&@D&&!xaD^k&DUol^x!K*C_KB#Q`NO!)RV_U7nqW7Xt(ZSq7typsJpJr9c)tYekufiLo)ca5 zlZK@N8Rg8 zoZ}~<8f*Bb`@*VM53N7kd6o7_;auLQt+XMf<)X>7cStNgt_t}C!)wf5U^T76= z7{YvbU>%VV_K0>mEkb+$j93&L1iDX3}TG8eB4 zWobAlDY1!Wa{pX|t(0mz|0RjSUd*+me@P@C_*XXPvzzy?;>q)M`MD2aU8 zCZ(0Uvqg?lmV&eR4$x~U-wyB2{U_adz+siqYXAtWPHNQ=$Z_1aT?7g7&F0||`BrFA z7P=}NE5C|%*blC|L>hH9$+z8A`OEZAKuKi=AH?mAW>3jF#;x82R{G9w4%vUfp(cY# z2*k}}atzXT!dPX{w=^`QN2}-2Pt=`YLMBZ~8|=!eQGx2E!(yqm#@7-&g@g2@!gF2_ zAeC#rKm6Js`lKoe=8Q;}Xr7+7IgmrnAM`z#q0OJc*T*-mtK7_;*@dFehZa=i7&9aq zN2wNvqg7z!U_cXCU4V`~F_0UzaDk-JY_qHQmb>IQR@pM6VCG7^tQTOE)!<&*)t#|G z%~aM@DFS(h@u5kyI&5+Dz2Ra_Hr6Mxjyp~xB%y}zaea)whfoU`a*rm7;yy&nX2&@s zXr%k`4?e$*9%)N@_al#cZtTy&XxX|t#g*&5q|6N>h};j-Fay9{*D`PPzio-m&5W&8cJ z{b&JivXFl}mLLFZVFDKUowNxcamS!kr|BtX?je2}H<`-b_Q!prt)Y1e*%gj*uA?*i zp9#E=Z(f^8Zuo+sPproZo6y<|^q1!XFY|qU;7_Glesa@+^T=TaWu{>jl&CO&A64=> z-IZWt*5Q$D)E8=aak5Ewu>k*__|aTZyH|c**j5jhM)4lYG3?I2vMIWNv&`t;mh{0| zcl)vfnvAub-BGftx=*Dk^w3$zJ`tJx#8|i)P9*PG zv2IuG6&GPCl6888Gz_QoGrp|TJR`Ua^)RIq!s)cFun&_%>wZ*ju`%}G`MqBqisZN% ze1WhD@s>K*GbQ(1o(4M@`Jv@uyTgxjd{upv!L*J0JT?m(E@Kl?E{&UHBJ;ZI-gtVw zSKX+9>O6+S^*DOLd^Do*V@0IH=w{ArDaaox7&wsY5cWUxoxivq=8714LJoV<^-wW9 zpdt;h%t$3Xm8E1-46~qpI%Xb|1yaIuM8cGV9VL)8>vL~2eA!51odp&%Mv>N_f_|&b zwSpFfe==k(<15DP8XRq;DlcZtby0}5Zg+=_y>!2V{}NUq5N1W4z66L2#r#ns25o2- zOhFdJ5sNbCacZo5P_p^~6B|Kj$@6Q$-Eg-KR3!hAK};hd+8W%^`!tSJht|g`ebj~r>d_jJx{ysC2u*<`T=6=xmt@LI@fSwbCM1etEOx5HZ1GS z$~FIepgnPK_@FO>VW$Cig!ys5jgFL)*f+54x6@O(mW0!r*LitH zzr2E?YIF5l-)#Tmsy|mLH+sc`oX|>mw3t!%z09#{4E=@dGJc9PCq{lWpvl}8zwZAP zAms-Y%@|$c>n|1@PoC_Hnq(l!&w2#kFYo+EoPRY{aOUp|F;v%wu<;j+dmtBs0)}N% zflxv~4W2Zt+0K1dGlsb*|YV@ajCBm!zd>fM@GoWOdIrS~6oBp8o{y#>omz=G@o6d3K>mT;6 z0ozq`cRIBDi34Y~hX<%7zp&A@meQD;kb+FqxU5+V5im*E)$a5gL7onysxYTU*uyWY zF4?HJH493z+cB~Bxqdl{Cd$NI7h>!qtFC4;N!IE)|EUV^GVT=A_2n!iKRB3G?RAt`-%%QnmtSvqXEyCwP)`eVcJjGE2)At;`%|eiN zl+Sx+TmZB<7fWj#qVO?4Y8oJ*aTr#G4r=b5tP34gQuX~Hu+vF&ph+-?Q_aNPuSBLz zV`qpAhOU3(g~n{}Z!M*e7AY1-;O-J}Zb!KnZbqybjzX#L>tS~eOTm|{>FTJHZ~)QO zT(yMmFYoe0;j6@eM#QoCj?!FyJDr$4#J!qoJs7BRk7=|zpfIQ$bRbEC?MlStb^;nkPa`5*;SC`&0&4bcKVuNAs-OIM; zr99;=74^L_IY_M^^$~V|=VmcB&W|@0m1bmFux)6m$BF0<3%P=xM2%FNtsB}X_-t<~ zO=pM+w7&;c=r}qLV$Oi5oEO!u^bf#yiZ`rvY#J79?Q%7Qpym*2(EPuK(#L%C!b-<`F!iQoXy;_`+T1@p}Ko8Jd$opY;*7z?#($V=9~KDw2|T zD*&#p1+_et(@SRkACgTqqYfz8w^^SV?Z767Mc8F*n@*m`&TgG^XWo4hxe~X}O)C!( zlMa_YKa>8__ddh%ow_mXHP%J?nTET;+>tL~F_v}hcxUXum<3N85ReA;^1O|RF!~Dx z%5WxLB2cwvOQd}ARN2>cjiR%S955|Z^xmZh2P|G#zvM$nGn(&+rX)vxdzH5ucbBr$ zI*>@rPSs>bxge|A_I55c#jL+H&WZ$&sHA|1zKWAtQMZXuSH_ye!}lT3%vZBhws7uG zu~DE04;h33^e1R2s>S>^Ltu!(+XYBu!^wszR+9`7sp1}DU$0E-=|T3eF`0i?<06GU zM3^lbx~a3+-;JbJFFwH*yp+LgUAr|Tv5URJVfGA!j+tGSlvb1H{X}1DrpkaoQ*xJd zk}dTq{;Xt1_eL@_jTpFGm-dl}{c^b@wZ=fJM#;%E?8~LGCmh1`V}Q^)Ux~oqSxKX` zvK5+V;dh5h#;j+FY3#BOcxSpdA6xXxYa{>wWuN}2>QnnL2DdMe*5{&|t84S?BYX!G znfA{jb)PF(4hI_s{KH=uqgbVKe%Fl>j=w*;3KEa3p*Tz<^}bs{%^sS*Yt5=KNUZy+ zl6#5P3T1m50a|VdXRdV9A4FY{gHBTxMZJ2W7P%6!2lNGqSZGP2I?UX-(uT++qaiksZk|LKq0>|8@ic=6?X6e)Bsp8XFL0|qdVWJjY|nHS%- zNCI6JjGLj~r=ZUB+AN4&TD{@2Ve*scypQ>n9dQ0|R8ShJkuYtSMnRy*#OMmS2b-Cl z+Edsxto`??5~SX~Z!1#lbsYX0Q3C>}cWnfHoPY9G3gN9xv7%t%Fy@;&tw=5*oq70k zoRKqfX^)lIj;JSwK#=kdwj4&cQtG^l|E5<`wDOIkF5=PH+mcT_dU@YEfSmKpac6q4 z1CYRQHXmGM!OJDQ7tSZMlbvQ>4yK2nx*?dmPxvI6hE(G;#GqSBAr-#8vGdI z!vH$JE=HjG zTZBtK^^oURL7q1Mk=HQ%OD%oajQ{y|5?()|f$a$oGe{ZA*)jE=h2NOr;+$S8h&o7v zF`COg$1Q64^gV`X&2qDUQGpJskjBfeID`G-P0wAzLu@n_l;x@APdK5)&?vsY zaduWZ4!iA)hqOar4prXX*0bx!)+O+ip@odUA?@3~8dh%DJ6g1t*G3^(iK`#cl{LRP zgQ2rV57>%Zd|QD{DvG3EF5|9_wbg~iRgUQva-06YT2DnbB9G^lgjo7>4!1Hg%S}06 zs^f*^<40!8fs++x!-WoWXa(dsL97jMK$jeB(2=d-QG!9acnXaJJh0c{yUzf^caXnQe)D zGVVn_);8Mo-3g#eC2Z|5W8wV1^b_NV+Yp}E{vmuhh65hRyB(=hxQDQ;tlHYQa2Ohl zIgiW0+(H|h4|2Y2_g7LOUiSXLLHj<@?=Wk&hJVbU!LkJga zspiZo+;-Ud3$x3fuR@Vj57`q|h>Mrl0!I}n@UeIunDj_*tuYy7){WXy@PANR#2j`{ zL!@WK&lzQhEklDUZA8(001VdF`#?zwSasI#CjcgxLiBEMaFJES0iheZtBRZf*jl-OLx%V=_)YJCa0UlG$On%aFznXv}ayJ zTzUpn+mO@TxEoQUVlHMf3_5|CUE+oe_iqBcrAx{kSRf=$v5rsCP(<>nHc6d{CfA-t z^I#mTk$~tCcP>vJx=9D6vdQ;^lmjm|xs#ltoP+O(${2^{Npdg(BEI9Zz&GVwPF}o+ zb-FdNxR;K8WcLBDD*O+ZWH91Aw)lz-zv^H`M)d#lj8J_u<&6KK$vVsZsS4@!?G zKlVx7K;}l4*FB*EndA_y*elkcOREryO~eyv`m@a@bqY-kBh0~P*V?gt!iAw4!7%AQ z%5p8&QBB<>%5Cy#6|L)g@L%g!6Ve4rm+X@>QYUugu=@FgR$k?C%5hwmHBRg*x-yl7 z!T3Wj+ntSyD=3QxkY#cgrANHdqD4~VIK2Vj4d<7fz+n*7@#Y&5g%F-SViYA5149@q zO~LDtW}tnyA7YR1HR1Tx%MTs^$K(%dZG2(^en# z@k|fA@I=0%oaTp@O6 zkp{vu3vD|{Q^#Arl7r4N<@cC%nO1LYF>MJMniP`G`O!p;JJ&Z!j2^GLEDe%-<&E>E z`(O>$0p}GJ{-ZQUB7JbVh&=G*9_2|RlYOcn7(VFL4tKuzo|jH-x#BuPmA68eNk#O0 zvkOR5rO(F^ACC8+pAUKpkDtB07DP}Z#$q3?rZT{Y}W9>~i4Ir*XC@MqDBI zNZN15grW?xJVZBrSM4QjlSMN@uE9IPr+Wd}{4&@E%<+k<@XKIx6+pV^T1r z%wWhnSNN7q@2_I)&>B}rc%`70e`S)4u{4eO3E(-6neIAqsSSiQc8p;NvgM40_NG$1 zW;Z%_3w4%b`J{**6P%aEZJ~Rc-$Dm{$IX+YYYC={z&Rm4R4d%=u9fLjUd*ysmY~01 zWYCoe1C=H?2kP6y7MyP($IQQyeolxG9?8bTlpl~GI9`O^YzNZDBW8L+uLxF8UKtJUB^^~g1PUOo)t=e&<>$$(v$s<)Mm2DW$ z*nXhrL~E!1PXHn>x2d($1(VwpcExgy4`hEz-efTtrzE5AnD%zC>qf=Lk(vJ5dvyjd zlS)w;`J}KyTPuX}W4^AC+O4gO&N|h+9Hw3F`?JNchJQAkJAN3YOql=1t3z|vb_vEa z&66no+UZE9xS4nrSU_1M?meIh_D&9Zy@KFiB)Xo4uA#jXH8%AJcm+OH+H2S}-hIsx zW4h0;ct4B5P%1DZTxX2frmVS_9XEXz?W&SNLqsx&`NrW|5;>%g(TBmL?Wr2C&^^j` z$rVs4@9noO9*j)@dCfi|u~?jiqFNm3?}k45;Dwty>mLYlOJT4!QIJ-~+x!2iW7;)} z{;pCejBiyIQG*v#9c;zDB?Jc({o=yc$YGWLp16I7&3il!oc}G&A|*E?go+o7r?E_- z!p1|%!zf;RMGxFGq}#buCK)70`yEkkjcWDoSVsX%_^1YGj}4dZGZQ;0#@agN|8@bM zu;}NiW)EH6}qd=9Af3B2^*lbEeh? zhLe0_OiIk^W>{qE-$I}%g&CqBbMmRMPa9;CSn)Y=m5-8J@ePNGe;Q$yxJP=$F^oD~ z@bYVyC%uVY_BuAaVXlAt&MXoBg4s9{N_> zlMaX2p`C)Z*6(Ue6p8sB6}XfCoCZ)T31P5Edp^8dRzb=<%#)b=pdp`^45KzKGzZOu*IHWhK-zunHi>5}E5VgGpAWF>+NX#xS zR>c4rgn40e2=NmPkUFm$gIh0LEGU3oxaWf7Ld9r{UK5yAa$zta z&CjpFKZJO?3{X;EMbuabV7#UwjqJyOY*UB7Oi@-PA#qK$Lt^Ua7PJ6bK&8J5>}TSt z5i&Q)aC!6fL{2M+X%bq92SUdGs-La40LQmJJHtE|R6_A|Xle4x)dX&tw|4n6{j^ta zuCj`gne;@}r)7VS6s`D3UY}_5jS>`^voKtjq0=lLFehzt9WRIcVcT9Daklo2!XI7k z**E8`eSZ`r?0sZoxVLF&g~AA(D=hl=hhQWIv*G-fE`lE}pjGd~bV)eSnII1n=Dabo zn2r-D0b43{8%DyDLADSXeVVpUDf7EOEBfz>+Da88_4*q^u zVDH96k^9ZJ0WCc9B3dQQEK!cGX?REXL<&eV zKo?ro<^shKrG@5Z$Ss%npKnZvD%pEMuW9u({pWXp=OVOT;v?wBBfRM}L-+;pNZy zX)jHbdMhv)wyp*PfR}MMBzj~4X#xJenFcSElr#j->Xumpm7 z;%)|~ZWye^eGo(c4nKd5J;@{2?*kjLypS zGN0Xehng}60rxTVGhg|hrmpAHiU5=MQBKj^4K}td74!_$Glc;MZCE*5W6Tf1R01T> z8|^H}#-6z3#YbtYj~?-z&%e!VU3Uip1 ziXh?>PGg?0X}qu}1^{YlVg*P|y!^tvateQmD!ZKo{M(U-yh5yH-6xoKU9slrvP*YYl6}|I;KW_LsoX_@B$-=6l!X-0S-nNp=Cj4c@c1COH`#4* zbK=LZ=T^G2(!e&tM({kdm`lw1QXarhK(CsHq$pc$biNbY6r?np5!Snym>4L^s2%!r z#8&zwGk{;}+Zl09|3ti6Bw?sknfMqC-_kbN+b!Mjabe;HBMvQR)4j9;6{>x)j0x&W zTJx~uz==*CLn!?v9SmOIIT&pO|FGo}pi@#Hh#w@%N{S9XP_nTGSFvpkx14L0!blz~ zDXVNwx9ItId${~UE_0>Zp~&65iQ#`$X*fyY_+szk!`uDP)FpQl7=+{#K%6+-)6OFP{XosGOA5p!EB;1h zloFGo?}CEzsryGY;IpN}?eW1(nVy&5qF|`0`QDI?=>-eQgJBij{OT~?c^(}Qrgsyg_*J=nH z{>ewSBvsg<0{$WzKW5z!(I4%hAMQEnH;CD1vs=5@*Z~Gj1xNtHI1;-U&g&X3Y}>Gp zN*A(tW)>iA)Z7RIxm~->R(RW@8dm5k7Na+eGIfxhwey%G-?U%f1$ej@moE~YgZbFe zhu!wy8`(ui0&%`1lpW%xf034cpeS3dlX#TB@=j|OqM+W(Q}YO-epOuP0EBalJCPI= zfaP-bqw)uSZlH4LCDDU`bU2Z@gUN5De>xwe=$4)g%@Y;*Eq3o=aD!;ymI>}SLkLs~ zEaX~8qTtmW#B9mZ$hDr|oS5(Eo*5-%C}}yCWeObS;v=qt?RD~Z0ySsAz$6SLol{pK zMV45*eTLL|Zk6wq|C3!wnX#QtLoxgkCWEyF3^b}WJ>6C8Ndf+<_-H~%5;iotE+eg; zUwblR{jxh2>GXt=QvHe7$E^&B1t-z*sC{~3$JT40hc)Kyf84)SK&=Ls)Y}u!g4W&v zOF0~(%99b5RE=VzYhkvEA-QkX)(i4DjfGtTmyG_STYrvguOh=9Ma>(vke?ld+*q*H z1dnGq@Pz}u0W?ABDN_|^1n^0f^Kec${jP4ncRlCMi$=zFi0D3$Y=rvzVPzy1wnU?C zc-XE^OoG+bf<38V2mTXdROZ*%Vf@FYUuSy6JqC?XXN-j?&kPdo5gKAX7;{7NwRMf2 zBgXQg7P_h3uE-XwXxm}IHqz_yqdP7Z7GH=}Ao`MHJVIDrKx>=sC$>Kk2u@4)nb3~^ zHW?`!rdySLgZgoc$6S@sjg(_!y^^*8EjB>Cdh!M5GAGKPSUtO}dC_LH25?~%EvTjb zg%;^CE*K0K1z%T)UKYhSW6H1vkqgg4Z7`Jv!0!Oi-DAdJODApm2B4|kWi-UWlcT|no z_41qBv-QTXd(i?!BZ_@FDWK_Wa>jzP zs}UJu(Dk7l-~vw^0U@Ky)_pO1fDl9iQg{0gdtMtJm~*FA7uYfBrC0nFlCIS=@Ju1> zn0_GqXW*RbV(NlA?=?i5Hu?%-&)VbJLs=Mz<-LeMS$%~riahiFi5eUYlZEK{h#Z($ zTRd(eIRtHC4bd1!s0TDEANZllm0z0fb1j6=7b0bWdhYiqsF$0RvO3_=4g_99%j0dJ zu2%TX*19d_rfV#TAr6$EfwHhLVdQ+%hGNwbcLkt`vSplr@nq)rel|4>yXUSnhXGhF zi$%s`9poRz9auG6LFulnf%M0(g*>qM&)&;wEvw_bM~~6g$=P@F+69hp_a?=D+L5E` zcJZBklcd<)Zoa8nr>HKYf`tP#XtE;N>Re1GVR{169U0-M&4(Bg3124)1_S^ZW3)-n zPTY)9L%1|YouZSPVn@3oCAYW*8&?gzj^)BieB$u{rIZNixgg}%rqI5Oe!`&Hc@}uh z?f!Bq7-Q%OW+ckt0<}2I2)c3cS&qZ|A56iP@&f`2-E9Xf_EwekBbbn;vD~-n(GTA~ zt^m#*tT>d_@nHqMAXQSxq3#c&^ZSca_`8py&xtFi=e#A@3)W5kIChsMNcngwxvS`I}=YKe#lB3_CMjwilgD7MKQ_R^mX! zY{U=6xa6?Dq+l1{k)}|6%}Rg;_PrIc9`FR z9)Pitgdqmg0uvz^(nUC+l5bp3G-o(dy;D-K>v7#{rphg8FDs8K;eZ}BcV~gA{##b0 zr#jj25^ZVrzljolEI?XB|4LxxWh;$3TPxJ@Q8=3YhQJ$HGcLk-f%;@{?V^gG-cODOQqT(_DOxa^&B>z% zzC}cug~H)LXmWJd&%nk`ymj=Wl0tiksPCzua^y&ngvO->)T_2&xW=o2Y4HBm&61^& zy)_};{ZcFCuDtyVB{Ga*{teVK0*={fLo=)K5#!kL5YAfCPS4$>{oo6j3E$QjVs46D zKaPoKs)rH}xHg8AK<0m?Sddg^@98jAJ{B74fQ7mW5+ZHAvEI)#?k?0hZyTGmuU15S zZ%s39jF1X#ixVfO>IH5%(@8wx4RZt_v-&SRxFy9gCn;54 znH2<@M+c&6^RnEagOQ#qz2;5zA`tCFO`e!^r2?l4EuADgkp#4W5zncj6>O%6l=rXG zBqsgNbyjt8Fk>(0>RBrr8A=l~e(_}*CU8lZft|wJHyj)n7mc4S?t{r8nXy9YtY^v` z4JfXUun$97B&)4rQ8@pcpIIka2iY#dnOxIZSNWP^hA)(2)YJ~%a8_L<*spm#up`G% zxu7sP)zq&$nsypbyq=6n4!ul~B<`j<>*#TIigp3Uuo=`&_dCAc5A%8<(arSjN{F|g2J60nF%PIc8WJqi&}B*k5d^3MNp{+A;@QQn--lqwlz=4&o}}EYPJ99@ z76gH?CYQW^Wv^&QHy3))bp2%b0OL0Vps3Y4qFUm$l6*-05cOpN1AdR3EmTSZ_t&hc z!+LI$H80cr1$M`u>zOs0%MEe$js+OW14OdvfuM^`MLE;U64hf?U(50fx#qade7#k8 zherg+MuS1TbfgmAd5@%so;S0$y6tKf@(SD{Km@ zybvSuH4f{W0@shq!H_%IG>l}KociS>bN{;!GcvR!uHLW#$8CdAZ;oed!n-NgA$@>ppkaDA9d>tug$fwR2Nb`)Di^za zQzm#1MfIU3lD{01(JC>K(K^rnrHNdhPExdUqH&4fs67UfIeoAic`&K8Ly5HgPMJ71 zh7EBLgV8c^8gy{COQ3LpFsy)`*c8hy91)OE^1+Q5u^4=E@3hEtmaisK&sw9_3`=fz zs06Y_ct5uNcsx9Nve+R&q`<4meN_h)GGHkLCvo^uj+~XTz9k()pz=8K>H z*cu@)ggXrVKD^+2>WWFsXA7@0B*JQT_0vViQEGy)S9a{=1TOYR*qE-n4fc3kNZCu& z{lR#4{CWizA=tk^W;MlQ1?83RJCJyzlEEumYmFilUgnPlvTA1$Ruj6+fF9{}g89ra z=sX3zRXX?xVNAe|(K*UKSemN85j|YD|HeH#J!5&&k!^;v%7*U<9XP>s@QzJpIEKLQ*h5n^;gbkz+oZ%TwlWkt_@IgzR#lhWD)W3;IyP>?B z=ZINc76+Vc41ndJyJ`EXVp@w&Qc^B@JQ)i$Hfj zm=uOlVFKF~)%SiEU|Zj$=7wZ9_Iz*!~I`N~ZtgkK<2 z4GT%&mE&0|6-b#AKCWx5czJ~mGMa>ejlVzn^lj0uHEcw${n<)@VOU0krK-k+lzV}< zkTSj3-O7px5MXneUS+;Kq1&@j@mv%L!{%kYY$X@%aCxp@XfCGpD!YtJ*=-2~qGX+p zi@ta)s#*z`F30FCao3XYi(QoX5Ai{i%e2K?TgY(ovJfH`7;8P{a}@hTa%d^vw2kFL zZ)mnan29>M|D?XMEg3XJ<{SPSSy8f%t>8dtOH09&@z~z=aEP>PIHZDfiTAX~vOd_y zCi?{*T1%WJb5m~=(#$yNp(a@K>)yQ-QK_&2T--Jb6`AaiSnVkFA?|KS za9b69XGh0$xywA*nCG5DOqR zP=v8{>6^6$?Jq5n|8$#F@_Zl=GmKGF6CAvL;^sw@^IR+PXOtuHsSRDJM#^G7pJi_= z_FmF!;55ua>^+U$J%X;ZvogsLPiC0c)DH3aqNr#=kc1BcIRx@bemX4-|AVW4Wi+u2 zgCQx;pYef8IdPgEb#vaXxK^cYVOL#|i6ZnIjjZ-Kb{ZgO{LDWPM$->W{0h}{2}qbz z7CYtW*@iX}NtSGGG+0bE8eqt6ZomD!4AN#PK{5~6U+k?@#&3P5*9(e53KJ{hA$?*J zK%C8-`{H_pKLdKai`?|RC8aJj$$rMIq1sxl-K<&4gp{YmJNKkw4a+T~?B;3*7a8i# zNE=fyij{FjE5>1)VORbH@VK$kbkWH#>SrG%3gKJ9m2=*+Qen59Ozv^IoHPjH7D+Wv zmPsmTU|A#a^tOTf&UZlE>s2E$Y-rg|C-!-DC@-qR3Io4%=rEjOZ#ZB(lr0T-;$AN8 z;>W}sdxMI^%Kj$0b(6qT^uNq3c>Mhkz> zYT5r-_iA>2eFmjstPA5fauhn2H~9_`%_#0#uS-ZWib0zw;d(ObmQTGL(vx`FqXW_O zaq7dL68ndoLyS4ZVq%6eZKTZhT!xXO9^E4MAwRy!!(?D{!{JFy|S893a?tC4PvPs}3 zv9!{c(HRQ+?qA((F9St&h_sX*0v|_9prP_zXHA zrix3oROs1-vUYKj$alQrDdHkdJ|=h!1^)#fO=YC0wTw>o5lmjgpEJ7Ou`-dE-E1eT zRh#z(1U?#%YZ*d#2ZR2(KW$;M-u{b{L%>L8Ju@LNZ8CDj)cD^kLzgOL#qwR*RO%CTWeZ_#1m zJWmB7)QHJ|-Oz2Hk4^b4PA#Q7V0a9GAtxUa)U91flHvYo90Zj+hIExTMJf7A{2QHD zbHbRv@JwyGxIzfp>yee1N#Cb} zM=_-*-4Z^p^|^>4%~hbOgHoPBVTxC*I{yqW-5snM6jJ(iDG*lG&xE8=q`EaZIRfkyk7;G1ovEC>ML{1Pk(T|e% zZl#5$LPImp5YBdc+I|qDh5n~t@yjERvx|~GP;JmZfJsXANojRLJF!YOPE7&m&+VUg zwP7Qb^$KHjGTe}&)vWG-S#0r&yL*z-cw}W${ah7exF4popG3lZ-*3TI<#J z^EhO+t3}M*D6oAIjLSo2|7Zm)5@R8~4K?+n_(GEC1f9lS$U#QO2b(bjMv~`n1BfBM z)AeS~Z60CwwzM9O@hhcCv&D?uvB-8bunBIUu33O3;NMe>(VQ~AyKhRuH!E91Gt?%F zCFS=vEniMVWJJl&!N-jh&l=)w0U$lt@T7}x zjqJ%btdPeoQ7SdKG(0P)A(yl38@AcB=R=aI&*lBnoJM`)Fi|BuUHO{dI=?!qwBcxf za{whfmt6Tv3s#pT+E0-f)kB%})W0vm4U6Zh-EI4RC8^8D$dYPd*fD8B5{UQiREQB1 zG?r`}db6@}Yj&VkSSr{6rJS6IotJ|gDFtP9>T4oUQoIxMC~TRGJ8U8500|2Z*cF#l z)z$oZvHNt?bUjlYw_>o$G_vcv95_-(aSIyMsMjE(b361|t;xT_oIj{V_3*qPk&t2@ zxHZBaRUVP4W~A_tBzV;+sXql9)&+LYZ&B`~DXAJ2Je+A+IVz@qk#_|Frv=g!1Jxa} zl6D7$rQ22USsaJC6H<>Y>++j}iG0Aid|})cKJb8Cx;HbM5C-7coGkuLB?Nr-8VNLm z9j^+IjITm7zGuHVTd(bdPt#V+^XTXvVQmLu6MHq?-h1vMoLafUte@2Mrl*>w0(q8H zL4}lDf_s?oCxx!^k8n&(gF(MiwNi>6)gQ7AcpO%|N9RzIe2`PJLu!SzRdsyi#$PjRaVCtx97gWA;w!Er16bCmb4e| zu(K0?(>}hYb?qHRq&TaquVz%RHo++bc>U6Z;I4jRx^^I$->Y%cZ-*_kbnD*0viR+9 zXEdfDe4wLDVy}Tv*AS3&ZP;*JogiFIzP$5(k+G3^Z72x1sQ5I(iBnFTyvCCQQRhna zU~O|X)1N<`HC5m{J6WVxR*k&QC7cDAp`)H{nm_bjB~j3fnZ*mOu|vTh;_zSHUE@Cq z6tSsW0IS39UuF81Tyj-GOn7-X1nlVEa>sf1@#A|17`;1>pots*Y2B$8g>{N0lS6lz zr{5Y-?|qQAFAN7H!5LRAA_~H#OjLH+lX*;OMkF?|*N|}~o|0gTX*|zaKnT=Zo$2`2 zip-epmjce{7}TOfBD@pAQraPT4Gdm4nOW`Fp&|;Nv=l&QzcMB??j--bAL`-99;=8%)0R)YyZG2imqx}@DTBoX5nr>}Hv)=0*5nlSMqGG8y z#hL=9h9fEjcgU&KQD?+`W}x4H^{v4U@%Rp`$)vEH#xY7LmR)y+tg552FY?Q2rsK0l z)AN5H65mObPrAE%lG%7ECLCDHy5i`42IfIiDIeyLOf%4h(JP7*$0$isSJ7+^y+C8F z*1zwPg{mR}3z-J*C#EkTc$E*Rx=4&z*OogQ<9^_eP_7*c5u#|a4-i8lqDCyTz%P*V93h0%vi_6_a;fh`U$(@BN%j>(=OAr*B0CFE-zy z?vGnpY=@X*gWn#h>>L#)A=6tI6ehOTIPt>3)J$-r`yN z*As%xt}OI%vaJf^WEyi!A}~GywC~p_&6AfP9zEzOC!GVvBPV1U4la{P?{tp-!$=FcXD^azYj?HD9S=-wOy45Ci6(NPz}{A z-{;>vlCZYS8q#iVgt5Jjew&vnlqp9UdEF@QWw7j}Dl9M$_0sP=FkWjb=&(&T96zPt zbXk&mjyq)4I*|GsLhPXfU{Pi($wI9VF67y4S*BT}<#|`xP{(CX*Vt+`EMS|FKZ*xo z`{&57F-ZGX)_bUS{K^Nl*@K&hqH!(Kpdm-R8U<;N1}U$4hkG1siq^m#EXIDIkRhC> zlKU}g9yUoF8#YD3x=nN2@j7(+1lmZlSY!YnPr6SkNs561+_6S$t(;1AZ~aU_u@tKi zorGuexW(VMN(}@M9)Gt0H0-sYvkf}{&Xgm>bMdo2?8($fuW$u4cI+(*jvM-dP(8~# zZ?&b^p=MoG4g)bOj4uw~3NJV3Kdfv3y08opin+tsT-??P=uxbWPQdJ_1$c8>l>)ad z7P(fAj?k3^WfmrWE|Jqr+!M9Dnl@N_MlbBb==hT|=_D6B3V$@DLFWok3e~!J5N!Fv z`3ZboC5A=!kQtwWZdVH1U8;bPVpdj2Ej-nzL^@-w2_rB=!yKQi3IkmFx94nCARHGz zGTC;|rjGy~xjF%7n^1iED(dCePc`%De?k3jF+HrcFt1%&SOY#^M^VX z`n%YT_FNI%VJEP6q6S=#<_8l8wH+UZyyNOdwlMyWx&^9`2}CbQg#SA*4ldQSXm@{p zPEBT;yi@o-K!`p|mqtjU>NLKN$^P8rF-au#Qi>PZ*mMGew>UlL4YSG~j>WuSSB@ z7uJIisySIbPTi1Uuv?y&2%Wh=T)lY7ZiOnqCMlZJ_J%Ep5Zj%l-PZ zz6*=$x_bXPJ+FW;gSn^}?4)r-8IT#*m0A=KNVC)&DNf?4EN7t>jl2Q?yFTW5G zA61;Rt7B|(>nwwS)!A`Z=%8X#{2j=~j222e`6Vn#f{(tr8WI5N{&1~kS)0Lm1l-ebFX?cQzHLvJ(#;8UPycJGtdxYR(tMNB;LI>aydb_f zf}dAGj)2MCzC>XW$oQ_97i=F2@#&_b^FG`^=S9JBmC^-*0VUr;1UW2CetpN- z$M|ULGID@^n1#K|E`c4j606W#J@of9VV+`zs}+)C9q&Mbh+!B3^#V5(@6*th zRhzr>iyD^!>la|Tt!da`9E)kG)m2(El3t)h^anCDZv=dZW2xh6Q*zu=tH_eUy9lJ+ZrT?!CBZ>*~?i%Hcjv<`;?a%!s>ROm(^<_WTEuPOv38@l~ z6_WJ2zm+yf;aR**33npT0i@l=2VSb>TkOMc zf~^@|?{O?}(wuHDZlGw3`#6g{+O6+{D>A#b>wXQ=2JG^HOFZ5G9=8xgDz}p`6=T47 zP3INRku|Q7L0-Gxn77W8q&Mq&I;~uc-tc(33;%#We}jqA0-B%wU&l5j?N&sZYI#XkBr#^3pAg7r(KKYu6yf*%qh9~`#pPS1%E6JX zc}%RxsOvJ~{+X!chRWhK!ec0WO7nTS720AbI!tFtNzjUhkuf2tMaz~+zuy$b8TsK_ zMRTB$1`?3W&d3>ujnOR)7 zn+M$h!7K{wG8Gc$(g^#CEf@MzR%p!^3dbF-Jt#5e^lfiqXRk4$vjvgBZWoP1Eth=g zN@(7dK&KV-Yeg1`XAY&xl=esMixN2f3BF!5J%KPRPg3my2m(;nXZp<3*9!$CS3`_e z@_w&w2~%?9PGP3e()?F{)^+lJ=fT?JI7NSZnb=Uqs?bhV9nrV74+Iko)y$66>+XTQ!S;E-sj z+3X!j|%f^Es1X}=GZ zTyr;FyPZ9qiQbXp>lI~@1K_&ajfXi>4^Z}`!z;rDdfzrPtZAXi@HTM5wwaKBa{EFb z@WDVz3q;RUP%TcV*;c;2nHx)rO^8Y6u&$BJl2t>YybLYwpsSCy|2kRzWj6_3HG=>P5xlO zd1Sfta)1N=h|za2T7dA?)3lKtb2jik+&DfYx5j`9tJ;tKpKAO^|J>AD%-)Ce=cM_O zb8BaNb~U^;#`+23YW2QkNLa#$1Y70im2Vkxi{j6W?^r5I`1u?afxJ=`oPAx=Bhzz1 zT1-(GJ|BL?U1B+mE{M_CEHAT8=ei%M1-j+&RUohP%Rx$`UIV_)A)Z!=YMh-{49c(i z{Uv3)uH}&`b%x!@n<8h*Y_PML9wM(82Sic`6?7-WV!xopKq4r~^or*^=9L;>l!*{ORnspSLrqhB=T!FN&1vu7g-!K%DK0t0Zr{b<^2al0S*bG-?Fi)x^xzR0>kI5F8+%xE5!vNBae+i>xYkg-i2 z-6{pTSO?4<=G7u!J@5^G{*Jr%@)Z;u<+;1eWx?f8Zl_)}b0n2>+R(TVqG%O)j=g2^ z4C4qmZWP^iMa+$co3mB%Vb@^H$M{T+dcFUFn;+8bnwOvld=v9{A@2uZ*lVn_8wU|- z>zc=+nJ##Y?@BALR~|<5;WfNIQ{ll$=1yBKbVF;$j&OABI&Ijp^X} zDCpIp}WMhd=? z@`821repHbEW1BEyQahYa?WK>qM;r)6O2OViD}ogY5E-M_s4Sl4|M%Mw@usQVGy-e z{*YlICAugjkpq23p7aMTlLY+*_x)*bMMV}CfzAEMa$Qalo{pJQyL48ODIv2JaodiP zEwuFVQN=%Ho6@Z~^#}_w0ib4=V*A(r zkPu5zRhSK-s_t7Ug3weFNTc1=&j<$+ zY$3c-$fo(4cT_6NC&51EY;ln(UewX)F{bkmK^60^rck>IQ}Hl8oUw$xNSr!W zXr4+U(yec=;qb>Jp1E~D-0@RfCJ}dv-{eR+rN(vje2Dwl_o*f}=8jFmv*s52sEQLX z{Za)m_K&i%BU9>9uBiLU?S6()vx9(Nu)1&`pqsQRT#L_on04~(oqdw~t6Ut7U`|Zq zj-VZJlX5?#9~&Y$dw8G;IOUQ&DOXA(j_zHHL2i5b`*m&RVIa-YZ6$v{lM{QVhZF%U;WD?2ekr<}b#Pg(1qXEtd6Y z7ud*9pnWj}!zkNZwvG)RBaOSK)JSVk;$|C0XBRK$f-ooTl+O!Hv>`m9n&FQKeM+oo z3p84IG4cFyWuJ6o#|WdhN4JTU*x$fx#-C1bdQr@0XM z>_RripQICmmqXN@A05Q={&??@V%O~*Yka(tq94U^5DZ)?J|Z@SfRD5?Dq%C5F$F;s z?UcHwIvV(>YGQt75|UvVxLkab#t?oUj&7X^V9~}1nD0m96=yOG%6(l?tVWoLoNQ;B zl=xYjQb6P&7DArzgg&11Z28!HeaK_Y0|5RP8 z$|0IaLxM7+!rj@3Sr2K1L0Ft_YDQSNBHJo(Jw|Ae7!qfMjf|(r=u%sW3iINe;4tZu zQ*zi4r>-;|B$ITX$a&|3$U9&le1Dm(P>-PARNorJ5b!6Ut%YibERN3?|J@{A35I4F zWm9zeib*sDM(g9qW!|gUIlwgquCF?~>zGeYT+sbcFSWo(MMEWtW^IxYG4Ro_heDoX#LYLM87L9#NB%jgzZ3L_$z_4aPzSQxNqkmZ{4 zO%>=g?kw)58%yg7xoQ)XR4wGJi9-FNm)sE?p7+V^=snYMlm=Q^)wxxR3^Jhn`R6#I z+;Hd(U`kpt{Ecz9`5)&C6IuY9dMh)00<=c&Xg++I{uea+QM3wJ$i5Rz&Yqw0O*q4=hf6s!uaWU`IpZtv>1;r$=n_WvpW z+iWRI7`!XOfmR()WPo;DwCGOAA6(i9|3#70ezG+)o4S^#E}uKOI2v^B8JyUXHHqnv9%owh|}Af`U_hbp6z0K z{I#MpA%5}XC*JqylQ;F?b^Q5fKPrx)cZ{&Eie_8te&{m)bGe;xDx)ga>kB_>sCA@? z75O0xQ5bhUf9?B*g>|c&wx=v!w_eDZ8_GT6#LPJ`ju0?rg@=W)*rMAN?Qc19?*H(B zhPW%(I|{F_fK8H^*}`C5GW}gsg-_EfnEvUyzCg1LFiVK3B~EZ&U_Hw?>K+_*+MCf@SB91Dy4Vd{;}?7w{& zbh~nOBMl&9X-WDRrZE-z!2Y|;aYt@j*J_px3rT$+d#Vh@YAFFX>F0g}CUa6h%zPxA z-(e1YqkhZN*xG6AQ9CUK zDKd6R)=o|6nZyB z2lZTcKpMI_+tzgv2gHE6gkaPs@ZR42t+z6RUpi@uG-x%(Y?9&@sG4&Ji?@~{z`5cLue+QmmVyQnvvu%eZG3lLA9G3>ZCzLfc8n@b8>-86EsEaRCTh9r z`X`tVqLhC31C5}bM=$pgmRg|;y?zLQGFY5|B2yUCD)Z}7%vp$$gLpzfm+5fZ-oeJ? zyM2N9w1tlb!7-NHXUNHR-i!8xy-w3^Y^RNNw7vYK#0j~ScUg6#Pv+Gnw~4?!j;S6R zr&^O%s|h<~rHe4d+zA~49cxSsXruXk6TCzraQY{m8lHH(>t8>CAw{+otJcm!46n z9JPB~UKgcG=U{81;K7|wU}px6bz(k?VV-}E4lw#+=4oS0ZSN_?11(2nIE-3VWx?o# zz$tj`m&r%qh^1mk@vHAEeckHt*Q2t$L!FfX%#*}=-7u`!%QP){oFOOJ&IGja6TSyl zn_^*QM7g7+REQisfMpg0G%SqXtINW0F}hB{AIi*t;i*0BV-d?2|91P1#W@{+JCj$u z_O#^ACYZ#8v8@S7!@#RMcJj}0#>j#ScMJJ%fViV+Mi9ut61C;lslFR!!QBfq;~pg~ z7M5_$g{hkUdw3NBkBYD#iVXqo`!smdBSxuxEt^k|2iKQ;4U2wJ(I2K@oZl2s1?1_v z2On(cG*i&40$>wb^{=Pm#hs6wBAy|#`$ib?`Sa8}U}bE=`rR@L-TuP?kLqPA0*+|6 zp+h)dg-bGr5AgXqzAfwp;s}DP)Aq$+XU$2sc(UhtpUh25=aOHOG3@|V-6pabMHcj` z_#t8u!>scUBaC58Z>yPsgsTt*(HZ2=3dZnaF-K5lp-{`P7b0d?KB>&$jV=HkQFhKd zMN@TH2WsSH9l%Mc0e3z}rwc(V#zkPYQPN>au3}yIWo#vK^K^Lopq-FaLE2|W_m?KF ze-S3rzzEISY>FO@DY*9e8bZv#$ieit*_#~kRVY%3W7*_Y&JRMhTw18<#`0UihS?`{ zs?E$;Pp_x&rGxOH)S~l0S7c^QyzM{g`YtulW`FT$o)613-l?7}D!Fzivf1Ccj-{Hv z)e{nLE075Ee}THQGc)}9Dmz=gnx>TKpzWfvJLYz&Op!~F-Ek?9+MtHYdsS|)s&60_ z^`0K(066}b7}|7lxJ43ic!9fd;Y*bx0(MR11WoOU!$;VArRM8^%=Dly$t@{5Zc`}2^O^f6;H4R zN~SM4$3&XXqnVIMct)*0-lZk%Ay;THL`p)@qRZ{FCA0$bxQ|D9SALC8;%&vT^+64% z{K-Vv$lVAziA&Vj!@Zhgzx)5$ki*Jo*D_x&ZT;Wl7Xl+yMBI=XP<%J4=>df(WPo+ah&*IS zZS%w8fmzBhR91%T%pl0)UE%{`j&7aaoqegHy#v!()KbNy+Rwy1|DV9`Z{`@T$@pz*<7x%v~zC)~#T~UdYSKMY^X5#$zKKOD;1)VXyESCh% z?Vppt%k|Qj0@QdqsOyEUU;&=~14pANR$8V~j>uUV?FGF1d6lF?$QcFm>yFUZR1&rT zp2Cw1T*3IykdFZVBlJx)#d?N53=`bW51mqNBUIrJMn6v}27p|j>bhh{3tVEI8jb`E zVf$Q6OcNDSa$7Sdn#@04jmH2VT_fJLNB^fM2qpZ`YW$C-25w>pC;5}@jEM8+v=6gk znSK`!p>SxUdm2mp<$6t_wfk0BeqQPqu=PtA$O82WQL#=B`0m7QB}M;L)UFHtaQqcx zJ_*4kzF43?CL^|eE{*YY*7$q;<{?sP5WJ%{uo~b|gT>97>6xK*VB06nu#k-aUGFwrrYn7LludDRKqC(b}aD z#R`w1Q({%{H&M>2-wV>?Zmj5f#PZR0llk|`Q{n+h-yCjfk9ELZKvMy`PuN?f;CG%9 zW)8VD1lfWT#PEPk#5dXH3Q!#t0E;9{Fk;byhw*S%hWyo zdS95Cr9rCZ)@rTbG3uUG0a#W_5%(~!7e@B!GKoJX$7mireR{8P+xK6(qQdKt4+b3p(u2Fykp0_+>13gKEYS`?|H)2eW?t?0D~Q|1s+T&pH8Fafm{gas~8~{Y8dh7mhb0&%T(!JL#nJ0|INJdEqC(y}**q zup|ASwu^rBPQ9ZYoS9TlGOfKBnV|-#$SXU`!}vsU#Vk|&SxV|l#Jh&$1|||@Wei`} z_Q>_LklRMzNAHArIRRl;&l7bT3DfyuY4ax}@Cdd#NEEA^zg&>u=vhAo2uCX??}oJI z#eY)>i%GV$`&TON-x$S&4V>-9rD)a94Lad&eSbMrzZS3Hu|+(SX@k=|&dXoQC$5XQ z6xMno2(jSHvnK3Jb(=UDg=#xr;yv*gaB(6R#o}1KwiN_LrSbm#B6bE-{25+VKn-n7 zG#OMbzy3OGVAn1|G6+0D|DnkSPAd>s=&@LlYB{pz{y_)OPN;LPiCFX^c8I*;HcHz%4Jl>#Hxk&4q=}sUh zm|_XYL(PqW8HQz99h-nQ5E1%iEaQmwF6j&Upav|1L^zfLaGvwl4yvc*X{+i8|Nb5) z#R>pzkebgMpU(X?Y|^V@;HldgWwj5e^e9^udOrD=)ZkD}n>mD^QkcW!C^`vq#d*vo zW+<3NE}6Im^wg(XZ=u~=j_2KPoB$tUlD&dIQDDg{sOdI*ec6d6-n?oVh@GXmcJ(Q# zz1V=cIHIPk%cGwkuX`Y>HdzOh(j5ya0XFsio+Wnm?OQkmZK5g)7^tZqfXJ243Oe^5 z1|Bz=zU9!z*RzrMd~8e%hZZ>e-?)00N$hybH_5w^$2ioV)OaQVG(mSO{FnF#k_=^^m?5$jOXm~r-bcCwPi^+)&O>8cotW#k9+KE&>yXWI5?Bx-->|nB zxz~%&pW+qs8a9fOA4(h(aDWV}7u7YlYS7FUNh$lNkstztxtR=zR0>n3MkAdQQ9DAu z7S|6BbN>{{(M~>95HtSy)42zX!W}UeRhZ{jxHBgQYS(EdZoL!49V*(9$HU7hL^K}f z0+u3(BVVoI!R))LN`JFuOP1CPBU-U{?&5%1)nCWuf9k6!AO>P(yJbJ(ih5zpe3s{` zLXXmb#0y6Pzm(E)t%KU(4V)*6ocsIn5v)g2o+z5o?)ymmSR^i!_3E>}kzl|eS(m3S zenEaJU@2IVQLz;>OhoOJ0s|LS*eXRD8XcinK#}_N(Y%l+K125`aSE4r;CINsqSmhZ zLfJ@f^Knr)jS`% z;iL#hGtW($sh4_pp9emH_k%AoBNOoKxiy~}`y=$l34uVgLxtAUce_SA0ki~;8~61~ zsw*? zlR5?yXL#1sD0j{DH1E;#_*G09lB5&z+&&gP3zNt2M!I=lk4-m!V=O{|6~;)OgMG)v z!|cgOH~xR#iJ8l@&Q5iAMz%+{|G%K0?_PAj_Uf+rUS63H;4Q%c`~U*|iA_*fUA!EF zo@f7$MJJ{z1R9C!6LC3#VN58CXGRd7_erW`*3CN4GEah9qxENFAOGaSH8=t;dss$= z%CX*n(>!sLHf|AbY>P8Bqf?~xz@VHvauV%vv>y z)=AObSbq`UQ9H4km}G7rc!av}+m!{ZjIkETs1&0eu_NxI)x3m}e7$+OXRuAI@{kyOghE6{LV!L*c^mS^M!l~N#hlG4mhks+pj$B|Zs zoX0pM!v;4%C|ni}q`)?GolKlA&kgHxn>Z)D2R2dL+er!e%kAWqG1mKkey?C}$={}C zWC4O%@EraL73QWy@_atqbQUarUi`@BSgprEdNCI_usZ73+J37i8p3CUqvKh5MWylM z1?HGf%fX1_ji>V_jsck%ktz9KC1sU)eDWWkzYNYZ5)fmRhh4BeCe!<*nwh)%zbEux z0VATh)iL&C$d~lTa!_hmAr$)(1XdByniXX+J#xG;7aw&WJUyyLEh95xU+ePKy^?u78XgYQ-Ay}eS4ao zf6Q;e{`9B(@!Q7N29t24L+)cKylQelb90^*ryE%X)zbtHpbE4pKzEMyD>jsPIpoMz z{;WPc&XEpa)|fuSH4)$d8^wrP3Cb8VSXloL8TAB2o2cYyTbZseZ^ntCMyNV_c4fmZ zy7&X+Az3`ko2HgtL9IE53PNID%1!RKfK8IameluQlX8336!4SZObHIUS2$|S#QfW z;hD2$p|DUlL;{iqNav;UQPoVKloBB@APD8bQIrT^lRQ3a8D(yH9runn0TPRG5 zBf?r_Pj0MGY?C~qpH^6-{ucQb)Q?@XuYxT+m4Q}k+CkbPWb85SFm;Xls^xVdVp{G3 zcpoW5+YM;ug_bYnRb)rJUNNKQahbftCpOLdg?cYPzgpDk4GKkKCS~jwDwd<+?+D2% zP=TOoZG~I}v_=;sk<>YB**}GUO`P+F9&(OSF76gU-3pgy?4x?X|a#!LLouDC2qTZWSgLj58P(S3KZLz0!iU zD*n=rX4$}gl&3-g3U1BD3(-ZH2DPS@;z29 zC+-d(IFsN{GM4AlCZ#|jFu>jl-HGKV15S~qOFqR84i7@Yn6H9);$~Y= z_+#zW+_~EhuSkuX{nSMN&cp;n7=v>VAOV#?oncleK!EawOPOqY^9Sfiy(h{z{d`@W zM(?;mZi5K<5j3pIt|+|C#q8qW-PRn(PjuZ#J`yQ;yux4toCraN?sm2f zdr@yB7$ku9aKRbro#p$OCJYBb>;Wmgv+)@%>cj%PD%_#fdefU*;xArA&L+$eHnjH| zE@|Yfi7G`y;&a8byrVJU;>>P3*$Ul4k2Ckx^4N%EV~zz}1N z-l~5}uXCus_uHg$Qz=9*tiH;nI{MUdq3U-FMEZP98#(ErgN!DJL1tlI-tNeaP8k)@ zX<8y?uufhIXO9?wKWgDFQ!jD2Grt&dOH`P<5a{7W2Qq4PX|vQ}(=F+kxn~g198bJt zmr~KFO{|W4V8llt;kLHnk{|qk7RVxkEPD6B!hrPEdYc_sx6WjVKVvijXDMJxV_y$M zGY(neYpcp)4L8w`fx~`%2`y-LpsSol!ig9o>w0^$zyD=g;WAtG%w{=lmB9^;W^MEv zKgE$hA%EH>@B&Z_Boa2Wnwdb|1yI=W)?mLwwlO9RiV?a2_6vW*=#TEPVw^Sr`54to zWd}ci0ia(!2>cSSteidc7$KK!Vo6xaL%5nR>6Gbdc@)p0n&I-GMWm8@g z10!ar8~wlFc8=H9R4gY`X2DMoX`~A5=V^}8SqBx2ZlQlG$6L#l%g=oaf$in>@izRL z`;Jpqn9fWtE5I%PEv3+Hw{(jX{G@%4r&hiI87=+~eU^lEN!a2tp`t#+HsQ&`$Vku^ z%j%4rOLs(bUZ}owNjUzxj`C_IMs;9D{9Zzo6u4f5H<&V^YcPDUc^PFxDQj%P#geyW zSTcSBtEPF>$ZEv}LRaDoFnv7UB@~-&dKt1Il6((`FCG^sRy}k<-bt%jO={T7o%)-R z*+CJyi26EAT8r%3?_VzwGM#IH7V8aMQ9u75gp#KYPD`sf%%*=AcR0G;vPd?_`ATC9 zY7cCbwpqd#+!MFLuGgN)hPM;3U!ThSf{lPgv-AKU2OX5ZZL#2Z zk-*jprY4FxDm+&!M`{wj7S+_PWJ~7)`|ZQ{3z@CyCm5%9Z+5nBR!HctGbGB z`S{>D$cUzjJdWd>_PA0gO+Z5p%_TRjW7V#p$zFiQOUm}dLD6=FAodne;9=dRi7ve| zZE+n?4sKzHy6N$-IX)cE=2KGw2R@W4jTPte;fO8-3_Xn(f#K}HtO(hQTGA(vrdYj6 z5%OIDvL|pnR;g?T5PRb$nXnI_;5Izrsna}%3{VNvl=*W*&11P;six$c8^cNPUPv>L1G8{xT&(IjlRHoMZ2 zRCw2yZec{R8_y*0px5y8*&u<$+$Vq=n}t}-)JdMHeP(Yl-jGV@9#RxsEFAwU?*Nx-!n zN085%yH4%!qlbX0s3(Ml`%BTHua>6#LytzInwUgE>3Ir4*8j3*Y7?PxDAPrdaxMT` z4mgtl5y*E#iH5?6eB5~Q4fv%x1J!3q4QTY$hnG=h#%$6?Ek88Ic%u&08MKF00%59L ziD6xyOr_L%aWZys4TG@ADZ{CDuoB^;yK>T2d3Vecft#GSuntd}A!(MT5clv`q#qc8 zk_-LPRUZI&osKY?ziUu{CaD3y0jfJn)N+@V0^#;(7~&Wd2rB^xy*;+Zpp`|g#^L2W z5lD$DDb1S^m_3Tum zHx#(O!&iR);i}sMC=KG41Tr|PSns9X(c#>v7w0f11z~J*< zYtw>Mg5C%Lu$iGeTw)()WE!qnQT#?(uSy7(+BG@U&%x99jUmqlmMze_)T1UJ`>qXm zdj-XN!7cFkgF>`n;4Kh&OzT7W#%uoFTG3}&K}q`-t{VqUMzYHJ1W!_!hfkD%sYda4 zh=YBw_bONY+Ppyky2=d1TTu9^cUwv3LyNduOFFIXH72Q++B;AS!18rCjW>-`Z1zDv zpJ+@U{L9b=Z&H@n#bPW04(C~C%r@6CO8P2yS(}jKr#{FFQ$s;4Zx(D*euO8evVoz3yJYNQQJyrQklCp1&X9_5&9zzHTS zJScY=wv?i1T^AnO;)_81B!l*i0g`Miv4EG{X!I82TiUQ2a%*uI4RBG$-_sXPI^y%U zGYjvL8+TPQo>b+Y6r}hp+f;(+M;Le5?-PrFcdSw(h#ME12Q-8u$}!Q{^1MeX?v;(3 z^~rLqcvQTMkkKKewDBC_D0R?Kp%&&$LqY+@SDE9x%NGdvwS?`cpAH#gotjz!d3Y>^ ze&eLd*xTv_39I_;wE+T3a)bm1Z%3-d-ZIPu2U4hUyeZ==lI8#sBBru+3+wr(o=#G+ zN2!wwfC@K%I`5}#M-aMjYUA3M=IkNxvM?Q5)XwCXF|TzW8?9V~UWnZwe!R&9%5$JL zCru|4NJ!XDaZRw*ogO!Kkt-!#g9AC(mce4W)Rqd&%-Ug6eHnYplOW&gfw#rkXn#)8J)Zyj?Hs;JwWn1I-_9=Qk*S*vYGi5J2TssW^1?n1#tQ?RZ#C;Cw$&y z_T3o-ELQ=+P;4lh)JmNEdjTzxEgN`;EJY9WRisg+&}H9RBR?j+G)X>g`_eMF< zwpRF!{3RiNM?~J{MiJQ)XMr?&Kq!(fBCwu#U*o*Hjj>0k13_}<$WPpIPHLMv;!_46 zS>CFb1T#Z>AzTkDkbcM(NS@C5cjZOlM`-O`XD^~m-D#E9~e#k{6&(KBC zg{r;jO^vpZ95TPOzc93FsGgKGjnHTS#nCu$R#Lef_s3L*_!&GLM>EMp&F6R|*jU?SE=5qqRr0ySIbYo|B zPgAo4j(wY8;u~Qd3Fj7;lG-G-jzR%d`Z~RLg&@s}1U5T-{huyla_|8IV?)jO?e*GJ4HkZhG=esLYaxJ;o0v(&=D%oA}*Gxl47y5i(hbB(Hdig~ywhm@S8 z*dAF9HBKM1Olk}CtHSRpvAP1^5oF`3hVmg%c3XO_OZE+-d!i?aWjCm7#9!EI^oR|@ zahPCLtCDRtZQQUI#)oh)BN7Zv>S&zKJ@nIWa)U?BgCH1sNgU259z3*(e{URny@4*c zHSu^G7ZGzqF83!;`a?o;{?fwKN;4nc4o%U9yf%8%GL_#tTX&E3E^5;yh;x|QA}y&J zb`fdf@NA=NS~(%7h*leWXe;!0V{Owj$h?olsr*;Yfer z_x^w1=xI>?XbtbxC{DHTbgq@4~a7d27OdF`HKH78Ih7L zIkD||(r*Ny^>T>DZSjbW!7Od3wVCLb2m#5a#K`VhQ+)khP8-~%pMlrXQec7kG*@&s zP?PV@-J0=6_weqaXAf1%$}O|VMI6TK_o15_z=5T&$5f=n#eri2pXfX>l^?)&Q`KX} z(}4JFn5Iw7(W?#s&PO|6KHY>v^ov0lKh$!fJ#+vr`P(BShKgD?ruA``O@^w|MSsfi zi9#{h(j)ZesKq?cSLyQoWe`?Tv-#CZDMDU2nvCl(XyYS<9wn_yY3Ii*iyWN(2PW2e`hx6Xc{)>g@$W7_(xhn8Ptf&QIwlbZ6+Da&I6O9>el`zoy5wi; zoOR>&!nt95p3r?^7ReC80-RyjsITfs$;;Ee3tKCG7@d!F3n-|@?!fHH@dFN!u22t{ zx@-r8E5BH0PS8ub{RD@rcj z>uab8VVM4UG5-KR$vZ$Z@9lU6nsd~(cJ;!nGtd~)w$>x)L=j&>f@|HT+vI>$$D)mg zl?x##V5yUvy&u9=={21?Zo{Tf+P>=i~!qNA8svH>GuS} z?$2z07bnoW+;^qyQ&vy22%>c%0}r#92s(~_UdMfui9IMD619c9QJwlOSsgBVD;fR9 z;`avav%FYO$okOr54UIgwE!twtZP9l98FpxzZC0 zxxC#W@`T8Mb#JO)h#*brK$PL~@w!{wb4EGh94|GSOtPq9_~MvfA4ra;rA%;nWl9S$b5d8t#<^%ub;`4M{lh~ zcIZjIu`i1!L(cUaTbr?Kn|>4pVAvcnnuy=Sp}iLNj4XaA^uHqo8H_N*ZOApaHcc$j zb|sD@t>sF?=ZlsGW&oDA75WU=;P$9FNB>sVTE4`ApDnL|4f{+A=D9a6JGvxlgp0|q zG&o3OwuS|p(K;L5itnKh0+*G~zY5(@9} zW`wVx=YaL~NBlBdyHv!pu!nHnd>R==qce$qf|Y&5t)dq=p~e zN7}sMn~Gya)5v;1wqTrq?UV)%a2k04QX>tq6LR#9cD^Yp4f|8e%v2W+er^PPH62k- zuo<+6@UtrOj0UcqXy~l2uye^|k`{5?{)3*adDe2ID-o^+2cD;BHZm!*JFK=METCo- zeF5WbP_cKrh*ZcDe@`&Tv`CC8RSJrP{zWu0r;Frf;)8wI5@Me)e%D+rF}<#5jYbk2 z5M~3FV#?|5I&9DciM%^2DRVQ`t)}#n9`w$a)6g)|V%=4;GOScH5q2Vc)v7xCf<#iH{g)m6TD)3LXgbcmYf@(+y+oylP-x=Ka`-9MLSEzrQQ2y@LGKn(RpwWn+ z7cG=Qmsga&n0TmtucY|+$@|qIJ<2ezr#szYZ0}_+D#U8DvQiw{4bL5zIF$Y4@w``- zc9{K`TGf*DwY>TGmAi)Zp-L8kWF6SmrqoMR2IsZg!XsNTCU#}tWT*4b0}HIme+!S& z$RJ$f&gXb=^2r9}7t`a>tb#(^ur5;gmtlj?BmLXK8y6_TEd8Z+yx-%%^^A@$c!r0% z+=dZE)Ym?^6S0NItuk4j|I7@PGQbRrIGsZe)BLp#G{PbWI1q zvs+#TIw}n@n?n)gR{Jehvup06EOp}TS_RehOF5}eOq++Gz^-#XFLVc`zo6*BHOTz! zwmqRJwR~cKY1dN}Mf^4#tWc%NKXV21`l<%T3<$ecTHi0dv*tP_hC8n$Mu>TX@ALN` zT}A&qA?{IzCV4Py#2eA3c&z+iMjJ%xZ+%*F{PGt}Vy)LMS~7y|8gu`2;nF$0w))kj zB>M7hZrQvt=|!|98z^+^4RY0)eP4{rH$Ja`FgbW6#j&h>M;<9`mp8OSh&F0U$L>T> zXcf=;$4@qu;>3z+DiWCuQTV#d9Yce=jgzQo8ilbSL4n(gL<7YXSXKdLJH6AZ>{~02 z!Ni=?fH9gC3FTCvsR{Kp39MbA{&U+d4Ko1riLuRXcsH{>yOX0zp*)#u)87>b{;Y&! z@t;8K28&h699ZgJ_w!1ha0A}8d%FMu&$U!T?4fi7eRun*LW#)YMxXZ*SVD8}cA@;2 z62^Vfy_nrvz)R2I7EclUT@g3DQ#gZE+=e1LHYAOpx<~%OZ5id~K$K9+>v9F8Luf48Ml>Y7ivQ|GdQ9HxO+n?6QJyftV+=QNRM`O3_*k$8>sZ}b$YfjPW=Ww@@Gi4|a)R&ldN%ekO z&pk8+Y5B9sF2yV_&)fhKGDPkQ;-4~g!hv~{kW-|J1nyccoHM`4;+;)Qtz)?Z4`OFL z)NE6HN2FrF<_&4UtZFe4VW}#VB%Q+>5e!q*j;e=D+ta&}N)WT=;pqTMrnwk@qq^tIPS7%+jj~?1 zP8U!G=N&imOSWy>Ui*A>Xe!pc9yylkA_l5%#;=@&$PPOwu2|k*{FLL$F+YNikZb1- zI{@TUL`~}ctw;&k73FWIcc3_WYM1QWS(DADPozx=PVN=-2(f@0f)Fsex}B1y34Y7ClgO}?1ZOD3 z5h9pZS5-%w+$%=7oHDna7^dv@z#tWEUe@bIBtRW_ITj0{(YLTRAHGRp&|4!XKdSd< z?vjRrH+>XlF9fPbBQaknpSB)Oin)*s(c;|){y}r)d!Qo@`>MM1h`nLiWdg}syDnti z;slMYqpd*7ImB72F$5)x^I_~=b^hu6?s2M#bcQ;HJ6r6BWH|lOTs_o$8n^1=Hi7Ab z!{>>n+FDO~X}ob(GYRym_YHT_jGQIPQAaOLpbb>e0gywUl{Sx;8K{5A6ppe}n29mh z_|vfVEi7C~N~5e>5%d$<&2J@BqZx{s|DuN(e&Mgps1f-F67Sk=dBPT;^1{Ogwa8mu zr~2>zr(4S%ha?b>p{B{Vv@Zmmg?Zt*5XtD)O5HnB7xaIc#=FNoOU0}FwMwZW>(N&w2D%0b; zAb+%UA^73c4>*~Uo6HO##`)n~e{8D2utyH#FWOG}l@o}n%U5yZl{#;U`DHi%2hdS) zB5sch{u%U!y-AS-tRzawNhn9Y9H(crbjPs z4O(mxB)NE_Ee~1yn8Wug>*)veUj%|?k1DW*=O`ZYd4eam6nY+%TB)&N#!&g10@~12 zlDDvM7_7_WhRFK$>Akx$4BDX}^;WwA|az`&u|4U6uWs+HNc>;SW#{csobRk#px zgNM4>4VoQhIC6Rw3SEBfeUTQ4=#k73^NY%1c;!wW~F5EzWT2_`WibT{4;qu zL-INDp!X7s%1E^+bVv(H^7aU{6F>x`nO@}@-cO&g%UhW^A(d-g6r$rA`Q_inx!WQ` zK_f#ELk-e8#q_Xo%0M5?SDDTW(RK=W6&k4`9nV~M4~WLJC_yDUcw`|>?%ci(!qI>9 zr{73Q2T#N?=a08y3UiVW4|*X@t5k}V@$9=83@-1v%^q=E|B4p(hY>H3+XB}tv z24kd<{U@11t)ycirmBL$#R8f@j44%8NMCMzJ~1?S0!m0qKUkUxkc%`yrV>BV z^^U_vKpT4J>F)9svY3M|8nl)4?A)~qboa+!7Mhq*#+=NlfF2A(hJ)L8vc>bx80T~& zb!n-g*(FR1OzzrIX!&5dR6(aNJ}A99j0RYNC|O2NofgR)(XJeTGKT-%H5V&&C+(y zi^HBY-7he3Y7G&LhNV6iK8J=_-)|W+FUzeLdFF<>0vRHN1jWX>$aRWzmQ%V>Ak~un%)Wq zhx$e#x?S2Oq<=G^GNxI2c{mPEnD5*_&)=19HTIq)$UhG>KW)FX81?9|y@Xm%j`Gc& z;hwf{dZFg#SpX88!(uRUO&4+U^l-vM|BE9kzaV-+qa3vp9Yhesz(-Xvb7)Gz&73=KC%LPpWQRlov&c=HWlj?I2N`Ppqx_ zylxP`v5DhhbpnF!#DHfZlHA(PN6l+FlOW$~is|RGDY+9D_T7|~nC(yBT5cUha2Ehv zh?dCh2$_XDS~9eoOdE+J!>z7FJvrV8YsntTl7vID`RlSDu-J76GBz}b*gW!nw(RCK zat#)|H@h81glxyiJ_mw>!`&i(T>^D7YWJ~rLD#G)XkKfW zBiy*Bv;x6uMiAPD5Tf%ocPse(;Qq~bK;%E!)2dbBEjMe2uS~QzHHV?et}zB^ zgal4MX6<#AJ!WYSBmpHR0BR708P?@4bDcqoo;&Y9v+-wMYxsfBRN|WrY?YK^ePs)n zyr}8{Ct(c&O>(5P^!rR9Q>2rnCCmykjnj}MMfcG9d9!QR3Vaq6OLcQb8pUGkUhl-gnkBkJ_W5zUh7vRM_P+?w0`^ooC?TWMy!a z!G9zITBhjwWK^fDNvjVleK)XQKF-a*Qf@{Jbbe z^eUJT{pMv80&`s>6xktEDMp;?C5wMXWP>Dnibm7tUeRdKh>c}caAZ^x+>&fxS9aM? zz#W3i{cRmOZMucD;`8@+|LmLKQ{;6_CpWR88X?YmWNQIvo0S3~31heVo`z+FxZ?z` zT#%n=NbLS9{qcTPxwn2nTvt?T9=Wb> zIfJm^#Wa;Bd~zS&m*wDDZLo-@QCFscsA7RV?Wv1$rAG-`W0Uz19fsh}pE*ErURF$8 z46*ALGc2v#9IC2s#8~kjF#^F+~-Jzq~X1~P-OQw+cPxwHdmBPmN^M;`X|HX?t{yZ;j1Th$*ih??a4@-0Ybq|K7+pWG+&E(-5np%rYNe1xh2c1{%OnC>-%f>SL z%CH>2lz;txaqsq^ja1Zg#nZvOfM-NHS?w$BicRuhRX`)r!lVoj;XMy*5AvsiXjsU7gOa#NtCOF-*2+IB zC<-g`E?#~ndFCPkSH%>we+vuW`TMU7BJqi15G$ld`?}u|SVb(}(P>kve8P%I{Vimi`DitSK-ss{hTy7v^Vv zZmz|Ix|D?R?kO?v%7b#C`e1KnVD+A=}luA2pcX5WKQ2u+~;%G%a{2PwmoJaV(=AqtCmH4+FSJjk{BnLqUOF{99KlQ zSnuF-J>zy)RKtt9FLoTRMBSbuzRnh$DS{r3~TrUH!M!ND}pvk-J%^IKenInKumq zZ7d$>)*)?`t3RV8brl2W(yk!H2z{tVyKFr27bH{k)>U9N3C&M?S)-3>YGT$~A$NN~ zO2R>oZ@BM+^H{5phpmL8f7L6w{hpNz8#SX3G7MX!)1R^#__b&$3yDM9gd{ zkMyx)YnxM#$x9qQ+3w~BmFyPK2-n{IS96onA! zhXR8?!&U}In|sKUI{{wXZ2}hpsXNt}ND+>vN+EcL5}KCOY0^Tn%G@mHtQM{6nSyPM z|N0}1gxRcydkRVKMomdsS)3qA7yJ07e=8znQ|o8KifsFT*u{H(O>vEBc8fnskT#Rg zbupJzZQp`Wv5&$-@o8uAt-*zcDNTh+Dht-CKBIbeJTC@i=q|Xr0{GTZabPaFmoJJO z3dMaNje`8=s?|f0WnxA;0G!uZgPEuYssV3?X_-s)CfgL&aVkM>6XA2b3U}@akS2WU zTtg;ojegrQ>MK4@jIoh0!tvvKHak3@{To7-b)ZFq>iK-w9C}x^@f zmVZ{U0O5abXFcw%EASm~^$$zaLHx$aw;wjNsU10XOIV0H{V zgN|Fm75#aW`^ypFima(LaNYC3ZU|Zzoyxt^^r%EHw*p-j?+(;QZ4@WpTmI`g#It)G zYxIHwa87H4i7@sy7=O73q3I#IForYAG zg@DP&g-xqy*eAUD$tF2|jrWRrtRMES3HD%vKJbxf1bCZ%0JQ1e@Oe_PP(TZZhhN}6 zF`7AI&pmUyy%l{R$V;&;~3YiVh4vKX33|+Rr3Y~NaNAUB=Yk_r>Q?8aQf9| zo^{^vW<2jQkiacSUeY_8^op3s#TM-dcil3#s0=JJ! zyLur~uisjWy%>FheV(6(%TmDZW3i7rr#ZxZ?LWGSP4kdftX_e7=qESwx{y&j`n6~; z;T!yoe8GDSR|pje!;$^Gb{q7aB5GtG*jUIqJZT4hQ!Bp|RCQ4`2x|Va97RvV1qT_l z);$8HAVbJrS?*x+ET|q_sBtC@M-_@Rw=`mX={a(qvqK=p0%V-~oE6X@_CNb|Iqdh# zdoj(R<=+pb)|6N<7`@ZQKE=RSeEL>#)EN7Nsz-;8i|tvJF$5n-Yd8OQA9mKa>o_WX zRzSe*y)mj>q0Q2MoRyTY(Ghy+nPhCzGl(!6)^W*+uofTu^Ye$jFRe$#+&`90C^Fo( z+JXw6(1f_}0&vims9}jPKNq|_JhB;`Ay$Y=&G&6)Gq;0CSo$Xjd-(YELf0G15|t4p zONjkTI%@GIwcPJbAF;5Qr*3?>J?_f+H_xxIRB^jEBx!;tmbLVFAYPqtPOOu&OjDe0RXHWFBW@dn7lPr5LNGG=SBL-JhqUTS^FDm zX=_fxD9zv(g=obl$-_!3xAIPrw*r3Gb&V?H^K2ck7?|$97-$MTQ)oHg27|&ib?1X^ z(`Wp793sJ%sGW~i+I&`_J0VJ@P(S>53b;xyj28a)Uqsc(D2w=@_&l_Vn|c}X)?!u+ zxtgt`bM+p!QfXlhRfVMqiH6#etIn%wt?u_VXR4@ZMH26?Q5RcA!ZgPJ?nyl*c|>>$;a9Hj3;-C)(=Xe)Ncx?$ZP;o#WLS8 zXhULNx=6(VAT#N4jDP?01@!E`cuq!+re-KG%~xovJdciR(UH(W2H9NC&YU+K&|yeD zQo}E~4)KM&v+`s^6)`_3s3XcDFH-gj(saqQ?alpiEC%hO4OIBQZrb^= zn2A390))U`8DsJ8Z}e1aLEi*sS*#gfc25=VW>4lFbPwqzRGGynBb68?0x>n)ilY1F{w%deyDf=z z9^;5M!Y%OpO}e8sv_bbM?L@>tu3|bN6ya|+8h$!u4tBJayL)n2Hz8~!y0Opzd6pY@ zGF|J&N^}C4U24X;>BaBprqH&LgcFv!n$Br4vtalKC7pJbE<9)_-G>VOC8lQFl(q9Q zba>|;IzCl2C@M%BT$|)>wgVVdnE25XvXJa_(W}ZnzQfMHE0U&GSovwKY5kWRdkbRx z!vF!$jaR6KBwV{L6wk19`@Frub_+@hs^Kmw)9p!aLg=h42XR#Ig}ErZ(??)=vqIiD z8u$Mq=UAPPBq|1r9{NEG3=dE@`;l`yC$G)d6n=Xy5#fx9Be8Fcy zw?hZxc)b*59rlLC%-}@Gk3o>p+z=HtU95Cx@rmbV^($S3G!sK?;>eRchccJ98t`Y1 zp}_KxaJttY`4^lZ6035Ca>~tXu?gGP#5@54auo^s=wnJ-8d#Sfu-T5QKL?KOaTdKO z$eXnJ0cbWNE|Z0u3IB~=(F#%3XpddOlP^M3Lo>L58MK)&O#4b!4~JDC%a?LNP;EHV z90ox<77fV+!s!X{GI9Z#<#GgT`Jvt_L!bS`j4|JRB4WnKC=ILavPx)W3=$L8lo46< z@>CL)O_yn|;cxp8S4%iHg+Pc1OwaZ%y^73ry6b44&_7A)!X2_w7%1Rbk2DyPktt|T zH7Sllq#P@sI7MVV==$g2oO;27vI*-x^8wG+1`*)>ApS8%_$Eeg;wBVw%kMxY%F^?W ziEYKc;44a9+|r~u!8HIpK*PU@X{|7FO&xHr9OEnZ5OBqJ>ch|XNpLeVsPVeCFW@4E z$I2t!clZs#s@aVRgbmlP%~2lzDvtcw355y%mrHp}xI8Uv3~B~Zxr0D%>-0NjYl$W= z-AN{ro&RPm|9!K$#VjY4GG$kXBxsiS#&Vu@Wr*A@+nT&YQU>vw)P zGH=sGvTtZwmpU+Y{++#9BaMvv8Epps4+|}qiIV{+fd)FHM&sf&D4_jv`i0hB4oXdR zh`pBgr%zvy$50(TR`hD%d@@Yr)+jWHwp#axIyS_}_d52g?$&uAv$srsivuT9eWv6>xg7V{0jNt4!qI63o0 zO-a-S>`s|d)(e{YqVHK%V8srwFA%=hQ4(?Bb(8*O|)u>GZxDfnEekK`2 z%Ane#AySA5r<4!;~OrOgCaDioeuVLg=5r>8WqQ&hsKMyzxS#&duXi&2KpIMaI- zO0_XApbrTRYI;cU6L3&`|8zn1kO2U43xc2pI%vGB1MI?f!#M4=YV5~KsCIq5UiwQz zbKMGH@F&S)057_hCt2Fk#uD_;v@5odtW3<|GwcoO2_Md%T}2xWXG#5^801*aZHLRy zD}ITb1Mtx*rT;lvH+0`rZDWNv7}n;Ez58`lhPf!E1_fqScl?TXAyBOyF7luPzJ91Z zOOl4%CEv|lnX<=o3c4BZ+#Km2QdLCNbm+Xbcj@l-dsgV&H#@98!j%~UbEAD0Cab;>Slr!9 zx#O2-KTie^hvRS_-a!v3@zOnz2oS7+zYGfx7in2YR6v z+MQ(@-#9{A+&D!_!*Jmc73@17DNZ>v+Z9Us4+PvX%9!b+e3iFYcH;Dur-9QsX}Y(a z7NZXzln#u#Pef0!JYjnzbi$M# z;?TGLJs`1D-LpAPwJz5kxrA1#r0f2PUc5wC>ergno9N^lT9XFwe@hsDU}8*ZAv5QR zf^3ObAJW(#IxoM-Cux{MO})WK;}Uv` zk$tv$hTF!aJfdjPum=Q;prf6RAq?w0YjJ=JHnxQm{O{WWOSS>FZ;am=9^*cRhX94n zkWc+BUitD&L-Fxf3;cR8j7IB)O!C-p&3+4!SKt4$!F-bbr{FNSc5$1l&XS z>{3wk;yxjxs4>dK`{o{`o`X{YAi)2QK0kmQtB?06w zBiP{}oXN0Yf4rl60975(yki+sjm*mv&?g>Ep|AvP=!S3k6#rKmw~DIWh5vzm)rX~q znPOuRS2LrPdKFWM&8m>_b1-m1#(q<+6-kPc`|A^ev=@h-WaGnCCdeL4>KBb}Vk^mA zK6}hwo+my2_CRZhGg}^+x359~lZyVp{jn+?>JSLQeS~IWPvxQ4;CvKm*L8mpz=5&8 zzC5PtkJJLN>9Ua&P5X#88J6^otlErUq zSkF?rV9To`7(A>*MlQ|e_hB;~cGN9`D9(O7JNpLJ{$w2tOgwO)3_7hO^8P;^>;X`=F+>8J&N+`N9wiRRCHf4DRKqdLoajd;2 zMp)S7zJWzwEUA0_*;u9jd))L(BLaV6gLGw4osTeS)&%MzBj!>zN*fD;3p2!p<46bS zmTwSQ0|9EME%_P67M@E#^elhW#>MF5?&)U}a=}nr8>$==5ju z`JGnpYiCr~yng>ZwVX0mW6HUe+IO|3K(Rfo^2G*wWdHgchJcOa%T2=Axwer@(B|jO zO0N{LyeVlJz8os{QaNW=If(Crd%(8~tOp^BVCI^|C{mOF4W*#wWYzJ1=gqt=ABKU=ifY(EkSc?2xVf3;V zP}xJE2jz|)=TZ2mRt|(Pn#o1wPs)WYvB*Z|+^E_GJRin}KB^5Psjb+-ZpvB*?I^VV z1=dFxX8pvYL@Jo%_F&PAE4;z*idNjN?8@S4OacE#wgea8V#IUnGs4_M+g}uk-LWQN0oUve>}Q*S&T3~?Kz;14 zG%gry!gMFktnk^GccRC3K$30pNQIpiYH9`4^99$A9I=}Epz5e$Au;wg-KLmTdo$@U zt3G#mD(U+7`JEQmqSiI>vjXAK{b||A>TQtlDK{Tkx?6T2WMwR~K1j~oPugnmz*r7y zKU;pbsl{U;tF0^PS$j&)%t0NzTO#&Hd9g~K;=0OwqNj&eQ;l{4JuR4nCt?#Jp27E* zqHCZ&tPi%#Cc$V#h@&*1*CgMgwI(y0CJB7lCKCuw+A$3?akd#WI7HP+3$A_96~ zJ5a`eUhJvY`3s&7Y7BhtYDi#Dd`$n?`k^Yyu?k1dkxFmGK@O z2DC+V+|oA5#w;cuDWm`Qt^(2x)gltf)bf5} zFA$D5|Rg0j~#!&^a!hdGFKgM!W}c(TIWt-t+6xsTUwQ z6!H(b@~z$Y4s=w84{Y~SZXcj3diLFRB^IYc^25M)eyij=W)PR{?ekUS@I!?%=m8zE zDjk+tWdE45iyVt@uc6J)$E?ZfrVOVzfJwCGck4DhJ2D&TgF)SgdAPx#1V8t` zW}?pkC9!7^G%e$GXMAZ*j0a79(8<2Q@W3tY5MtplB|ork0MI+!eyPiOQv_A>!|}c8 z*g7nMMUYgeN+N;=e5&dDb2o%udx$`KHAkcw+?NWe*bC5LYt`@mGMNk;Xz=aeMI}#O zyd84Y`QG!+Q3RH}=13V0scOBK!-##w5oK^XCFm(0JKeUr0E);mx`#K}eI#tUQEzbv zGUU_{2Ego>FljVpfh~VoDqNnpbWR{7Ispw&5U{A6z?Ji0^;^$jsfM~{*9(vVP#$pq zNyW!CLkbQ&+Et?B%hzBZKf1X)3gJp@oH@kvbJYBAMN}~~?ZvVqD&kI4{temx0-w_m zjh<>3{Z?;w^Q-YN9xg)RXx(wfEU)CKITXhz3fr`*owegBJ=Nbg7VpT@2LY}L5cxH(3mQh*5}>@nw5)nbHGdbD9p`2!V4I8;`Vb)onei-!Jc zih7(-D<>iQ?c}3#r`yg+@}>CsWpZorzame$l#%Dq3^%Mpjk=GrB@CQ#0AGaeX{CpX zWD6mo@osdQ9xi837N?Q`|E?dJibI!nJ&R?I*$9>~c>`Yr;kl{M+_D+aA^N>DhdLw3 zcfH~GCgF~AEDVKDRQPPMYF0GBmXK*!e)?3`n{@-%al{wFa|d&qcq!YW!uOSgQfg>b zKEY>EA??>FUvER?S^5C{hw)fsfPX%knI=s?j(-9BP>S#sYAX4zL~fxARFqYQlvV}j zrc;z*kijsh?R_1G9;ufPY<&F}`{#eW4S!^Y?TIkWZ8@N%d6re-&I1Opt)MXT-z;uoheEzdC=0^%)GT*t7 z_68}gZl(Y(#+EKf5A?&Vjz-{qO`~y8`PuJ6W?icc4p=Z5waG=z5>18);lXiN&1ezw>)2 zp+UW@TZE5g2|^;|wFUmfr>JI=B|klDauKQ_6yIO031prKxjhpjP!B-M2p+2PWp3KJ zAc$TTYQ#c_J&G-6Ta`$oGJh_Bob||tyz(_thfQexY&f-Dr1vXnM%y1r4t&QJFxXeo zpFy@@E~gyf02C0fzY-l%gmbHx#musU#UKe59SKxH%>?W;XzTC7TPZvjRL30jVy2Yr z0%%7=XJF2DIu0b~q(nwORjQW#2>HFb3Ye&DaCpJJF@Dw*Y&DVzW#lSMOw<0@)VknN zHF!>FJrh7ZzDB+eC=)KVZS-Q}Avm^9!vO7YeQr0to9Rs!Q!kiDP=@H~=^{B^X z1}MNxV|!Etr}OIdq~^dz_ zf%TdZoVyS(kV5IlmA!jKgFTD~qJ{}p$!TPl5KB#!SEzmTiHXIDu;fEP&Ma#5Iq45r z60oUsuD8p`-6Bj%45f5(XPwuQ%#p4Uootw0sJYD>7NGrTy}<*o3xGEe^l#?Q9mE-; zBf_|1dziaOkcUsH@HSFgA+4S6W9_FDEm3k%PE+WPwrUqNz)8sXY-@c}HDq$6xm3j` zbt{mP=Gn?`y5el+bFOJWXM@Cdoa&<_8Mtl+43&(T_hTv+r( zE)cQlxEV6hx%-Mf(v=m;snxFr@W`G){+$&ouG21c8A-%Z`w~@xySi;uYS>gOPLQ*- zRtelt!lvW?nqXx_zO3IQu= zofs>s(U6l9nJ8uMR!CIy`EV95Xw+e)Z5VDQal5bFe_G37I%vwd^|;FeEj@v%Izo5m zQ1D;VRk9CbYXK=5-gOmJUo{RD;%r6 zvzO?;D<+PXtX*7gCVhW{d3nXR)7yHk7ACZAw<*O}UEFlBM)eu;C+9-xg1#bDZ*P6>afTb-SaYaVr5GF)FF%* z`Oo%GL(iKPQV>G3HQn$5pQSkwPEhoi2(w7DJEa@80RAkW0|cr}0%5F!1)I;Vy4W}N zX^LV7OWiwUeJ;q0wqj7-10bASMz0z2d=kM6;FiTMlD5c2%3XvPB;d7lc}%D~ct10= zs9?nnlNFuk$~n5@eosnLRHmjYgnP4POSBh?Za0@d#7$rU)#r7e{76PQ$Gj92H5R`+7hq!3eLE+0#<4RmXlUj9X~J zvjd(j{xn$lJGEu?1M@jIvefV^7Vv)G)vym7nCx9+ObS)%EA$DpnN%WA+&Fa$>{%I> z6~$h3k`UCxSw)iuhB2%+=5eLed$&cyI~w^>bNvis=TbPop+bbW^qeY*+if)e8m|gB_nED3Lm_dp0p($Ul}Q}W0%H0k zqWMk#pQV@+*iqnQRM2fcy|bmtX|Z4@*{GYJ>iNn00cQ z6wup8%4;L|pk^VA=G*#xh96056+^kCsLk0OuZ@A(rNJn#S;+&`g#jlG2?)2R)Uv-g zHUqJu4gMSf4PC{?_hTUQsX3=h_x!q(;|W?y3M6ofZ@^ufl3T@;=wsG|I%A4`4Dy-T z@Z+Mlw7Z)6Fn&P2^GFMl7)1q921*|HxfV>aTgN?u@3fWM@&cMi49@m@R^E2t4FCjrY^m#a|?Q62C`JIhJ{4K zKVHukKP5yFs4?1d`)puJ8h*VpKBVV_gy1zI7H%tC&2Ip!W7J?ws%z@(ao##@UJlGQqeFHpxyI-#_Q$OfF9$@4rJ#o^Nce7E@jcR6+{Jl5I7w#MG6ZRjKg6|1t)dp zwE8jT;mi!f4ovIhlOTt3$MV9lZqc9?oN@hm*i*@tgO4TBQ5N1RQ!@p-`&a;QkDEL| z%QoC!hD0i|^Z|2f*W+97Ex0yt0icwV5SeNX8APNiJTk6&%AlRB8K|OQFI95K-7Ex` z%b~z06m>}{X!{IwkoG$Do78JTPPwrQ0#WhIqjxyl zyh_{@9#|c!Tog?9sL-i>h=5Z1zOQGt+>th>@yb}8@8&h302PTo;K(drfI-Q_9xpI0 z@*&A&l?+sXEB!>06b3ihT~o)Lvkhhl`u}mok%GC}*n|qb)rf1v=?CGCB~+ZfF(tZ)YNB_b z^QS}fxGFazZ;3(#na`P>TX-#ohw1WBZLU+UNFjYEoTLoHts*W#f(K`42?O#*VHx8Ra{~rz_HXiHX_DvjJ0WfyBzqV%BF)R^ z!Vtfj-aMA-(8-JgyI4LOpQnm5c6aDd3~6y!_*A|Kn-?tgXEs2?GwzEPYAFHa+$^k` zrUCUVs-jz>t09p`*#G9Hz}1~3SS0}IKaB4J;p$>oSBKE zCGd_j69D65d&Lr>oZh)f22R%`D(D;Ya||+r^PwNZN_@zw+H7=1*n4sqqmG^*{-J z=lO`NpjKpx+39(Unw|~Pi0)ovGyGk9d}q-7mf)1|`W-#1AVvXhJN^}$ZB4} zaJIoD&c<+C#7!4?I06X3Bz_Y{rlJ(;9&=VvwVZW zJGg8R1r48K8LQ!mX%|Nwv~!&OF6IZ9EDS?rsBV{Fqw)t|YlwXcYDAmCNb#Oa={_iY z7T$hoCWae8sagbzmu>&(ICtyYdeOx+cZ z5>R1sK#ajFY=_{f?qNH_;9ftgA+u)T{V|{L8CA#ImGsY0&73^$^~9=`VL&fqPI!MA zjVsT8$cogMMus4x;rT!27O3lkc&4)h;(C2%qnk~_wyW#@oh)6C9z754Yq@x^Vu!LQ zpRtWs924sonP`WI>^_D|l>~B^BEBz(L`7!bi@;6qK1c>L9ZbKzEKE`YN4>V=w7(U-)xx^r4np6<6VKR3Q5s> zndO~)>-(dNlNDap32g?peyO_kV5k01II0jlCX=N}^mUbb;nG@Li3w=Xq5oOne?`*6 zsBr+|T`eH@BWV(r+{yhFSo3p@@UIk^1I}J1b_ozRrymA}ZL|82qYmBRySe{WJi&JT ze($Tix%!=dj5m7)I1W*Ts#f$D`O41GY0#E8UBh5*S~b7|XU$V7*V(4PfDbC;UQ(?g zVgpdq!nBwC1+)mpY|XzqDps^9OijNZx;g=?XNZ~cJMJbObeUU;0Sk3=j97hKCtobE zlDVY>v!O&nT;h|VsF{Tb!4J_ucu9ZeK48bf=qul4fh-d1NCijE^Yx%Rq-4F%^jXyg z*yGdh0}}HXT0VeTT0AZ%``@om)GVUBL-4;H!Sf{(;qc9U1@KueI+DGqyG$hRIujir zbz!}~fM*NNBOY4D_@D3N%nP?paFXGYF-*x>wXsWrA>b$8dWlde(M7b=OoCmgvAax% zQ)Kaqdw7!(9~_3tYRgcw;OKMo-Zm1&v7-zJa&}{GdG-s#ECNJ$Bw&r?lDwi|iPc2O z!$tivz)O!Q{zY0#M*q6-cg8h4X6hU#_hf9`X)&}RNxoOSeX)-;?aOi<0u(sC`D`9p zrMz#(xaH5r8&n@B?EfN)@s>+vOQ7xPW9x6nMJtznNbP-29W>{Q+cTez;z7~y_f-B0 zaXU2d@X#*r{}g%VtM*UnsJ>JAaBtdbkmrSnPsI#k2*imXMHEK3zsX+fDf_v?VWyZQ z6K$wGx+E}7$%8pWtmiUt9!WdN(08o_BNG!pO3+XhqG+8RA6UTsd=B&({AVb9C}ZV+ z>0kLl30*>m4G+ZpumDIU%k&N?BE$EMhoZz{y?-X{8l#$m z`j)Z*$*R-Pu1?!FL#;<0cBJ5!Muvkhlr^CD$opJ)H<0?=DBMke{W4|e7rApZW=gmuDp)7oB=R(@0^Kta0 zVN#pCq|!LtY~Y&!XFiXoCa+%g4RxIHOS&(dq%6XhayW7OCpx}H8PN8CGqb8eD_Wm? z?X2{l&|S}%Vns@jv>Y*4ej(M@)?+jfJU$EbV1*&g9&5yJ9qM%EzvWH$awmraW#s2y z=*4`N%Ao2OY1@fr00D&7|FZU3Uh#X;+#Wpamb=|$!)mV%F4#QOnO1(E+7Uiv;^7l^ zE^9?)mUJ*$AS_I(_+oeiw1shNZaq!h@NJrrx?Ev=W*Pe(>AEopGP+up{Ww74HZ6|X zIv@{k_%GU<7Z$j^8NO`bz284uJ=CHt`j=C1naJNMhN5R3B4({h550Y4A=_Z8jC=nCgwBW;O)tt({94R6L@Xk;@&`{sKu zUfhwSY@qgw#{BQ3%_beF(TB1@zEkNgJJ6b+tw(9btzP{IPq+k1ph^-jH0`6RDc(GG z&Su%&k{n<|1J~@PgC)}!_pC6!H|%)HF%wYoG8@Llze#Ua@PBeAlT#sZJ(8_wIwgxM zTMn`W9;#gEd7DTN{yEfy$>EI!D(EnkC(P>aC@ z+Vv{Ef&_W^{?QfxxRd|M_C}aA!)~|>BE*y+YBWZ`u z`%o`zswGB(^#}c!Nl36atvp?7Dz7G^BRn-Pw2*YO9x@`Zj{}(fN!>o%s$9|)dpe;6 z;_ps&8xaiKl^|~GippKr*K@mwd@kHk zci@>Y8eNJ7i??y1tbkxp3<$5zlU=UAvytC@95jF8`)KUQ zJ!yN>C7tgvs`juqK|&@yRhG}p_ytu+5eC)-tfx=BeE!5Q`kd59{Nt%mWp#m@Y=;If zyPQkQ+TSXE3_{$CBS~W+q#KyBL@Q3?!aTSN-W#VK+5trz{z2iqG!RZv8*Kw1!f3q~0Z{vp&x)uY3C7;=I%N$@8=;S2(lJpGd9b9cq#U2)@5!Pv zdi$aa$w=UGyk+5vk`kk)(7;@XDf(zB+Xt%8HmxJA6b>ewnu@wi^{YYZeATcNa_}I%L~hUJU6N(4V(j~H*F#TdA(tpKqcq3cxD8=n zpwp^S%LKw-H_qKfeV_U@P-g}IRgjMU_r&DGjX!P- zh7I@Tf-s)35PErl$FEH?@=ZUaywIcYlxC`ZO;}N!uXi~=;CLVjqUojC{`zk(Fut4< zj{~twO>OIT>#;vRxDyH}w* z43GbKhZjL+qLkx<4QSt<4Nszr5P5ItWQxk9&g8e&t2v=|NJh=-EbpO+vkPAMU!v_c zG=h^+<(6OrK~zi>`vVRj?ewY-K)jnR<3#f)zZ>&_5d#Kn^f|Uo^YrTpW{J+Kc$u?` zLMqESoBDNe@-tPDgn%p39zDEeXo8Wys^*=D9Abxe0Q9s)!H1W!ig*ZJppw{}3~Jb* z6iY3mnn(gxSnxs+0V0%)OK8xM06b^r+BU|u=tzag*-5`vTB9Pi(?&zOd$c?qD>$@gd<3Y-UBbtgu=DYiUMmyRy7@n^NKd+~ zTy`Na&Q={L``|HAFRKyeV`VL+-#~?Z_)xG?s%hz=G8f;IM`p|M0Y!Y1V)eB0p0m^6 zvWGf+#G_K7G^)pS1}mxt=buniu&j-@p@%*_5>)&C=hIeOT@{9$1mGJD))DbU?h z^SJa4RqsJRcS0vFaYF{5W=DidRPPzH+NbLeKh7?{B6XqAOPUB^h88yF)Bj7`RI|i% zC(em3zXK1XJW)LD7zi5sTK?a7j@Q$FwDxTD92N3Hr@Ew_s_B?f5The;r@P>y~5Z?KUYIP@QqVNV`;g2ZJz`xTydihG5W$a(Ny2EW!>b_rF&2iWzKi3MqqYw1vE>nPM{M63d&L~E z7E-APCA@A2tyywD?8ChSH?X{n*8G#FjouG}0W%!ULc8X1yXkN!1riw3>~1Xjyi zd1hu>URaS-2YF>CAlCSL;%02wiA+qKWeJ>z(m8s$q_($YajUt(tTt+;n z)Ztj6B>q&8#EFRFsji$){<(wkH~r4E-u8KqSDN#P*qT-V)oj4JwYb{E)q%VT#|K;S z+KJe&>my*LJBCI>sc7Wxk9mU4lqaL7H3l6KP715V~JS0g|)WG zlz*#MLy7g(KSHR7on^Bda^Z_VH_tk4^9f|4a@D5qV78A}c0)v!xgMk%!aLR7xXWCO zo2CmX#PRqgSj(#Q|M(0h4+Ve!S`NuFk_QK5w>b1*HKE$ zHP#V6Ty9)Fk_(uNR9z8Vb-Yzhf`LZk%v(V-Wsl-+yE;{uJbZPJu zzw}sWT0iynRM(6Qr>%5lG^-J^z;lNd2?q?g-*&9io5FX|h z7Y(ILDF0ztnR9n}AAA*)bo7tBV9DF;@fR z?_kzYSR2bU{q^ff@TCqc1$@a*3_2b#`pxn66&fD=g7f0S{7hBoUaW^vtrmIA+I%Os zi~x>YmM3IBNE$%SJ|uju*_kxl0!tf4SpOZ6Cayz-Nt$%`-Px}Wk)rVWw&FIqIX(}@ zdzh3%mh9>J;S$-TV?=V9vF@9H;_)HA1zH=x{2)PA;??;_NA-GY>g4Ty*-e|vsS9eS za%F(am~r}n;qop%kI%CVoU>bLN@w&Pjc}g3(>yL?y^~TjFOp=xK4PCr8JQ$;lcabJ zx@Ko|+P=E*<)o#lmXt@;qCWCspx6i8<$>kcU)C?g*#tf9q~pZ^=86|~Aw*bVX~qm# z+F$_CWDy<4%rC2JM|dcu9+O^YeLO3PI5eBOV?XRZM^3h!HR`aW+-)jT$B3xd)cARW zWCOHA;wgk=NR-dF_K_^|zlYDvW zZ^}7yI88Qo>FxsAjMa}pqK z`M)q06pkGx(J8HJ+nZm#X^IJnthBdXS`klN5Mru{fH~C0SE)u;@tQP@LTw+WR7+vmW}Xkgqh5VfP0kZrKIBZ zTD{weM)CMFPkWcx*0SAbyM$P^5chl@Rbb;-UFgnvn6V!a`rYIDEj!@#oA>bT2q&ox z(FHc)j7-7E-Ja|*3aORAu$O*8W}x82L8oYTXtTR0_AchT+)JLC^S%Vv^i@=ty8jBn zpKkG(G{wbY%JHvUm;PuhjDGMs{e4r|Mc2kTVhQ29Ovc^!s-wKtmi76F2Rz!f+q`L948hSdQUmLkH7#n<|Aub*>V6d!pX7 zzDZLZPH)p_2@%hzchC^yE~g$lXHy^~DB>IEvDU=Xa@ncRF;{CPz8(OcuKJQu36w}n z_Jnt@+Tr2;-q3g}e|CNEgxAB(z%<}U8&(!d^1S0Tid!V!^&dZ>V6go{*~o7&^Z6>7By9t%5Xq3T@yS>v3MH;d6p^K5GRT0}jyq#E9VfuB zWM6LVN`W9z|DWf~Liyer$-&y~V}3#`PAJDm(vcKz*!bs1hHjvTe>MkkhvGCPsr_8; z6oYDTtn?GBnbIxKxzAm0=^$6H{*HP=t$GkhDznf)Kmk~8S0QP~$d^61lB!03K#p(B z2QKs=W+&1QahY1|&mK9qvBQf0n<_2I9LX-!kQox&_l2G0#Sm*P)@XQVa!M#;d%G@^ zFX+#c=Mnk(dZ2>EQ@FwT#(>o43OT!|r(vZ0IMb9RK=TOz9+@dVk#$g1Iq8Nz)n z`l7?ZFRqP%HwN;ZZ1nD;|JPCn(sVidoEwSqG4zi=B~WF(Nm`jx(1{jx0;veFLXwXT ztFDwg*6Bauqcs<2L3MrNi?()aDRV?hwkLf2pKU(IX}O0CB4 z6U8_fnoDp~-bjxSeSy;mlm?Q7GZFY>(+i_0jW_=G#Bz2m2;sCUAX-F_Us*+_2qTk~ zE#QVDTz$RyyA<=A3^|W7Zmpx|pl3FLY6G$V2*dL7j5*+U3$DB?l3vSl`4>+c%@)wQ z!JE6QJ&7JdsCFT!MxuZ%kjuTS`FrA>kwaueX+voJUL%Y0rMCFm~;z_pPk!;}+_wZM{3 zE)krG>ryM>Z}&;Yh3DBTCDJ9kzv2IgtR2ySix zch_==yE%IaDnH2h90uCV3>QDi@gN4zb6Qsa0J=rGCPowL^)-rk#rLG*Lz?jHR1LfD zbXvs#?p$p5enB*~3Yz9)b~hdvJEkPP%E-^gcuQ!z)EgN72Abcd3u&DhK5@3kD+dU^gH!Et~D}TLl^zvFdi~p%BgS1V4ns_EQ_ zK%o<3noxT6drYS21oYik+CyCUKq)zePx=Z4B(lYQJ~^z5xZ0>+`KBV5hiozNUi<%4 z6B2o7z64F&MRt@>aE&ImMepD?!rda4dPNz~La|0A7fh!-xEkTPQ%<2pjtRL6i|=LF z7;_~r?3BV}YD+=Eei+U%suw*OHMY?#f=o>taq>@|ECS@Rm*~GzmH$q zBbn!8K5Cm<{#dEFGcfbXl@bVZaC=|Mtn@bh-#Mt*K2I)2Tp3Uj!3uSA7x2V}OrtZr zJx7Ep0UFup2XQ`WkFa6g;I%!9b{(l~bY9mjm8{m2lsYG!j80Q{e)KPtgrBNd4 zN7Zr|m?O5w@claM2AJTQKEK(k!M)7U1&8xJao*BrAWjHl$S(i|uDW1FM$*UqU2k;@ zP!H(PrtDW`GaH2OuDotR;=VMrae)TFl4)C%`~H+8X}uMwU8wZ@X+i54KqXq& zSa!&=-G2sLXx@#QfNA$vS}<@5AHb22h(%g~$%K)gIXAN&gfe^A%)pytW_$;u$p_>X z2WPdPK~+i}^z_yQcK0D5=B~t*a(3Pi%Gr`MR`2rr*8vJ?O#qu_1xXh2&n9e1P3<&y zxKz~wK1Pu#RVWZ4FddE_xu?jI#rAvilRn9{ZwJ?*kIpT*68;~s)j2;V&|ncs47>KK zZ8GASx0;%tmT^hwiRy@MtgYGqznFVd7HhCn_oP$Y+2W?N80d`sabQUYl9hgOStj>v z{h}%6LiG`~b4z!~HLYR#00(=PV;Hzkh)xLZ^A&&x>v)kr_F%fN3&|mPeCuVzC zYKC~hmi}!_fxPy4)6~LEdbiYF_)fmxwMMkR>l9!k)RPGmSMJ$2Y?9QR315ytS@1S^bVQZrtxEV0eH-KTMB415Y!_Ps;f|DoFg??$Qla>F=^t z)6D8}|B#DaK9w3gg++rJ@Pt%3SXKUV^;{<3my+)zkwaE`cAj)r=SxqZ^UA77yRb^7 z?p@N<4*?yfgw7Xx5OB{-R?8xwO&i9J!P6Rc^!nX|SCh*YTgDiAD(~y(pA1F-okdtc zQ`%}#`EXqWn0SBG!S_MdA3mX@_64fo3VeXA>7N>4=K_oQY|-};;n&D8MdfsGlhUGJ z+SD_qmP2lUc7<4Jb+-xX^Z&{7Mzh7k+nM)M9K+{HQ9oGXd&}p^Z*dIo+6$0%Dz`=7 z>{x6cjR&13!Q+Hd+W$ThfRPNOh@>BbYh_1`-$o@{Ye;7eeub*&d8BVJb>ZxR-vikf z5~~!Xy3(y*Pa|2iB)3~aSqGfSbCsPUZxF^DDP%N{S3F&JneGaKFV`ovyT67**t&s? z6exwe=RBwAo?z|om^0kSi~`fgm_k$;+RgT zV4gLv1X|1f{si2+F&K$X2>=Acxr{r512e4B3hCb=F+s6T73+ze!y7BnG77FxFz=tG z3zWxE#b^U^(WA)r-p;-!Ti3gHYP()+aQ!Kb%|s)=mo0lHd^@tHs`&Wr!2{yg)RTox z^m;D_{PDf*FpJ^_w7g=I&1pW+%qD&Yx5ujJpiSCE(r86rxvUFDBv)Qui6T@HW&_SO zsTtA8-WEQLh;g@?)z54Sfre{*ak?BC=P3zapg^C8@Z5G+^(3=5{)yg-TbX$EG$4gH zA5<0Gh!=g=$e!<4XEe}_j=SfYnCW(3jT! zHxze>sUkAj!5-PH6l^Hz1bjq;0mR58Gi!3$I7DjsBAMIthbtS_9~J^LHk! z3C&f!XXShF>Y3xH|5`^_gT~SFSUY4sA)jf&;T{HTdswhXsgApUCk3do_S!!c0}npH zESnkZl3{X6D-Oru8YaJKbQ5}nGuUxd4&eh)bTE@mx&ye&_w_xZ z8NsS60hJ84#8w}evnsa=GObg)?5J%0#FXYM5A^za?5D6~=;p^`>T1UCiG_Bw>*vR@ zY_+nlFp%6aUcnY1lHaVm=^r@--hl{g22_B%G6t1()bHK}4sVo&-*$91}QkN@}ohKuk% zXqNlO#ZOX_Wc*J&IW*Fvmm91nBhzHfqHT%tzB&v5O+d20_%@%~aWcz_@`UB< zW;o8=U~}^crA|Jt1$!opoR`)kt62 zKm@|p_TVWRx_$#b$Q&mA_cb&b+GO%U%hTHqcvLFKkp2sybe}>*%l$Ls3+Ya`dnDCNU2gvld-o~EI2u1c* zv8k{%FodO39YBLx%1%CRrQ|Q^)BC`mo?<(9C2oT%IgE@3b82_5pGuS_ZRq|-N1pLu zMBr~p=;bUlr%z>x8^Z4?n@aVyrz2L7si)bR*<7(z5nrhOG62pZ7<7OKK@;Munce1+ z0gF$=GCH7W@5sMvl<;-Ha)woW%GH8q$@3XABz1-IzV39>Tifz(+=Ympy?j!)`Q`8v z;y&8NITGi;)HDMN+F?!1D<^gsTriGLwWBA%mBCDO?Z7?r3?z$Gd6D?~9iP9W>D z>O=^zn?q))v9T2J3$s#tgywafTpF1jv)cJFtp~2?;)#vI200qqk;rSamoa_BB*1$v^;j zl+=lsANF2nKpiKX>*bV~iV#6nH3kXcvnH19!yEFyO)Igip;(Z^4^YZz{Ox~0K z{j)&ukuqkj_}qH$DZ_2AAwUQHj0Cz_YPDiK4Nm`V3p6|I*|9Z{1JG8urTfgFVGYZh zprigqNmyu7+%M+00BqXjjW8MLX-}X{hO&Q*%7b;4w5I6h2?U#dy%cY>;U0ZtlFg8r zFEbYm6MgD;(DFWWJ8QEevvNtF5~VifzY%`OaKpi!8dC zswx3A6u2khfI_8Em0Afih@H7x`Geq!YQI#t^$ZN8bB_@f*KiDw>JjChHTq+%^>4pUvsarai| zd5@=XOV6&#zv9b7((lz_clBpRzfqGxqENN|7cX_yqV*;$D~4e*RwO&O=G+U^YQ}@% z116lzOJz?7)h=98Au21srX|fT%Kck(5kMB~R9IgFF$UEzO$qw}Y@}$E z{WEnaPulLQ)OeB0D0nzRVhME@g@FJKN=?{Lp%-od-K7B>tEo20l;7T? zC=LC2>5u~zx^fnr$MVSO7MTt=S#z9U=&UR)j>u(gRVe@X*K9+fOCC{5G7>J)fBdhv zIX)D%j!DLE=(oB0f@TorUn&^b$$~E#vu}abs_U6d)>Y2R=@ZmgZbktRb!#YXQdPZ1pGyF`CRGICSJ1&}NB@}= zo4BN0A8f%L{=F9nA9143H??Scb)>TQqHH+C+^ zzpYz(+bSWJ2X@AvuG6aqe~dSJ3TBWn$6Y0FfoV9a-(cWi3J_k58X=__&NfasTo1f? z1AfElGa#mW8XpLc*Q&ig_d`mucsq%+Kx#JaHf7O>4pl&lV{SN~I)tR6H%8sO`HD`-uaCrPHb9Qo%S?6>_V!;W{MEmm04|mN_Iij}G zj^aBtZ>!4JbJThHGkjUtvuK~=#U5oDC#gyG=4ib61xLL%PUX$*^;*SnK4{@)c#U=P zrz4()Uxr_*UKC1VL>D>o+joXYRlt8qT?bG;KCp7ZYu*w2oq3n-dza7%)if|mMd9W5 zHl?g5A-(Z4KZ55pt_B||WBTU})tM&M>vQ19{rF6`K8k5wRl>Mt^+-N?TfLc1qxk|; ziNJCvaZKh;wp*Zl8Aq@9D|OC1qPp2!xSWyYbq=0qt=2J)7-w&E!{W$nhyuqnzygM( zD#yicg;CAA=!^tJIq%BGz7Yux+!C;mQym*GUijdh!07f(t|;JyY><-AQ`#9)9z-6I z2~V6o>%hN#1Z>M7o#9phm77?MsOufJnA zx>{%>pb9la-`8aqpn*=U&UFS|*5056&=W71J|7SSeiK2LG1x6>d*EysKXy~x(#48g zBTNW&k;&LOJPB4NRBQx${tAK02lMZ(1!vvCPnhRC*y0AIuOVu9!XXviIG*m;T<+H4 z_EMv!vvJ|bvq|C#z?Np2GEktVW2)UwTYQluBC%6+eh&8DgLcck?;efqEnTL*nkQO1 zu?yeFLDRa#i4_QMhf z8W8iflYj*l%|;`$tQ??}^_EOpn9-7CT6bIKWGX*=h4Np6HiCynh!JfQo5s7-A6PtDzmeeroAtWkH8FR2Df*M2R*Nu2@cqA(%KMS4FhKUi zU6vL0t-9`qf!4x{lC0n2gbm_=+&9SjYzh4u8`|<<(M;1wlB+;z7T&Z$CHmO5bNi2_iOX|EM#}?LTo6gB!N>wKB3Eio(x9Y^%EvJ^duxrM}XC$Zn4AZCc~4$Oqu{~05qzhjWHo-s1KkW zXio8sXe#U`9I<5;j5`5!ME`GthBCpIAMH`MZXrgjNnU!?XQYzPJm_CWSX!g1VapjN zMKQC}Ob(%K3Y9Jkwd=E)xeM<_m*RgCw_(7_<*7e;X#C>nZ0zF6bJ z9uFeSjP$?qBR~`7)z9Z7$iV@Nk!}@nPcWt)Mf&^SAW|I=?F;KkNridMYUVsO6wez^`7q z(}|9ffTAs<_aQ4kq2zeggjst238(d{-;AoYZ?9hWXSNC45UN}L4dH^vY5qz^meh~V zJ77+{wMcyc&gem>K7^~whc{`IHk=;w*qH2IyK=LrlY-u4>ay}--FSRWc1S*zy-=5GV@TFO%y1wEZ=5in4nv3FFm6?ae4Ws=W#YId6$ z`MQP10#|rB^KF>G3d+KuCaM+&;Wnwt9!kgP)kvJAhv`4_XB3I2J2KKOY-1J>sOyTy z*3`nH9I9l5DZy3NsGEt}eAyY1H@*{|Er1x@{c>>D)JsXVT87cA?bh6)CNjX%o8VHz z0R)EwAO&M&`o$QBacb%9m2XyjTsy+y3KasxUEHoC>ABBTss-%aC-AUH;t`uunSUcP z93+SuOn-?&FG4Wu@{o;n)5;x`^9Z__WBo0n1a->fzNdFjdS_roMejQi!6$Z0ijn-pIk!D%qGc&`SPz5XTt4X^F-aeP|1lT!Nj zNR26UR2@5ENbFBmYD=vbIRMb5$sz+5s|ZsaH0pVI>LB3FW(SfdyrneR2!$8)SXZ6f zpndR%HL#`4gZB21A^f4|P|9US!3sl+#yuL8$t?C4;@4K0VP3|p)&{r1ww^awi<|Ay z9Sf$+P8Rv*!M5atM*bQiWff+>+ylwt;J&(_gJ3Y!5-<=#T8HI5{mi<*_ zT%yu@w(E%V2P3$gfjrN&@3Jb~*_d)|UOEbBdD-S1s}%_o8=AwDbo!m>nV|eE2OgM1 zh$UNi)r3Yy9YLQz=K`;pXX(^yB)Kz%kR^8_2-IU2t%rChXPmB7r+yRyq`%1O1o;l_Ex35SDFlS}#nbZkfx!v*MES_7~}3kgsr;y~l+zv}A60C%?|8Z_nuT zmT?QWL1%hZvgP=smhfRfVu1ryLy4aZ`G};m+!PD0;7z{mo6u2il+sL&3-~~Xn(8jY zKS5F=y4e&Zm6B?h4NHaF<4JBFU&F1T&3KQj{whLc1=~|8#}ba+9I>YuP18jn>XyyRGD@`wBi4=x__tyg;p7Hc zZj0=~I;~cuM25+V6+Zs8}EzdpVke??-q`od*j) zGX;yEpjvh|Z#ghWW0NR(`IUn>5vm_I#mdGH7XK;+OmtLsPZ_hU-+8>!j2{qe3eodsExwsx4I6xS7nx8xcocvUk$Py$nfVj0GVkj;)%LP`C+`SlYHUv#~h zN+#=zRekx=v^M){jv|Y~@32$`sB8NiFqf?X|2g!lL(K}py6~n#b0wZ6P3`+R_sDK9 z5#$D;_uPixoEM9H6If?gh_cer#og+on<5WJ@h7AjaL>oL2gdDOg#XHs@N6UH7Yitp zB$sz`kJrH;7$daNW*h$8_X)Eq+@yLd%5rXcynl!moad%Za*4#(ALDoQsPCaC_WM($ z*hmZ+DV4%*1cN}Tgl=Y9ZZBXqL1b6_Q;XrPlz!Cfl95iw`&{&nvxbd5j&)IH&F1Sc z?a?ymaBHNv5DF9kH3!3pUs)gDwsSa zYoT5{0aW)-W20o&0-d{%Emwlp?B#PqvyBEbSMRe@Gpd{h2=Jeij970mk);;S$FoolR z4N%!T_e4GOuXFK=yr;inrjzvD_yVZ9go>> zTHR0*bF&#PwH;xjR)e<*R79y0Fy71x#!lR;ILuc@CDzaa*{X}=H-M+9t5!ZHa>U3K zoZ6zpaj0k1_xPH)*>h@!{!r|Y8I6_hnlBX&q6qkQN#dV~`<57K9!Z-{ZEtEWVRUR4 zuv~YPK8)pvYr|Uwzm(GH#BIBY3pM42{tV4<^%sRq#Drz2yiAF;+wGt=E+2O{LIUX> z4sa&x^}HmmQj4CqwQHsUk$?1~GLL%0G7%x(FcLw?xBJ&Sb`4BuK}KC-A$U2b z@zfkFGChmhAx=W74Zb;O=F~9x0v{P}$x_CpfeqDn32$mWR~m8;PrQNBH5Y8u5I;+4 zR^AKxyQf=-gA@O|3jt38psW1FS4j=l?Z3BV{*u~4)7qet274?gl~|=bPBI=WMi49 zhl8!t9TV*n1-VtbtP%~Q<$;A@Ls?k1_)iD++m;9_J4~tDpv2Ac!XO3AKIVvq#^9Iu$pd7Lc7_8hx zUQ-{9en^O^0G-F0@NijkhN4N-FrU}cmYW3;vd|kJf?2wkat?GIX0k9h{kI|2e@oOs zicc9OyhuxuAK-8623dl*kJwovjfxVI2-0?0ReLCe0bHbjQK%qPB{NB`tNp>KQ}!v# zx3CgB?rAph3qUg0a}#%-)lwvDlri~0vxsc6G@)gafIA)4AS~verCSk9|Fc^+k=ipM z$U|E3H_!Tbm@+#@*zVZ}qT{DFCH#3z$5U`lQ!nir`O?n^BIGTv0LtJOKrNGKA5!8R zHCfAE(k2)K`!JiG3%lS(cK1y$J>on6L9n0AR{)^8If|jpk*D-~-~|D9+zvV9Zfd6} zY0-B)B#$H9xn_<#Bnxo|l^@$sZt;FW(SYp47)-#JXoS3SBDD}HAOdyJ+%>n~Z<6g1 z+yC3j6K>QS2^=Gq{`0^#b>1L3q@-DczCdcCR1Z zNGY$W<*$P&ay3{dJq}Aul7fX zzPT0wW0VUi@S+g1woP*IyjC}hg^Jr?-x?PB#b8r+3e!lm>|His9uxJ2;Pjm=-5lCj zK2U`lV{%Vg44HaM(80X4`b_(er-ia5LRRuhY#L;^RC0G>Wq_Y7;ZZa1SVmK?S_!}A z?hqbr8u;4zoo};0)(|YZv77*L_?6pX-g*zcEuq7ZdX3aEsGO~iWev?J4ihr)=tav- zLWzyHozpL@z~@5rTs{JTZy*89^sznCqe|e%B-BAyXY*R@QvW&X1?7}k7E;Ul6vd00 zFvt-|=5v8Uc`<5nWKGkRpE7oovTy99JUM`r^g^!>i7Fp9lJ;`USspMk`#Q0M2XlQE zu_+E$r^HmCQ0JqETY5w$y)Jjq_t?kCW}$f6#`RIqYX(;4F-yK>?>FA9f8@C|>GdL1 zJ$ZlvcNIbz%e!zMa>wg9V$@7~)B`&9b<@1*=4g}o&3CfHyRh_yR)5%kW*|uL-U@a` z9u|cGpNbP!iK=Vl2h}AgqvQ?9S`U2sL$xfqvU8xnaOb3XjR2pt!TR`^GnwixUtMVK ziDO}_yhW?#S*h@@`(Jt$6=KI*bBfdkLhm{UDhVSQccbDB2aclv$UnK-{qGvA)AFN& z>DU3JQ>Gn-)c5<=jG5ZE{OSY-hRee5)`qxi2Hn5+I+m1nX!Itn8%~M1uHxk9;3sw( zri4f~bzpXGkhsP*b-gNPW1*&seQkq+Nm;x7dJ~LVwU&Ek?=Gx1HW6x5S?JuYmYlrh$bwmm+wlQAWm5$pgVZ~$+)?oC9F(#dF@iBaL_!#rC$}9R=mV?F1=RM& zX&Fw9wNoQ`T&u!k$QLQo>}w#4trQ{_7hHt1IrubMh?S-NrCs$^7$c` z8ZiyDCcbT%#>}gn zn6JVh+=b>z`Z;`=T)B19$47_gd~lRZM2bWf-9XqRv3st8i;^iJ$AYf?Ig8fui{8m2 z0FGx?%&=9`}Is{lWI?lwUHUA|8lDg;}jh>5H!8{*@i;PiDeB zN^zKR?l97Evz}l-ys!$Q6O`ow>Jz^>HeBXV&o&$s8{{Pj-hUemfA9%b89a?~K7p|` zM(@5YHFKe0kQ3mOS5w}|`&rR)cXMAg8V8Xi^}A^IW$PQyVz2TgKOWW=6@E-2Mn zA^8|z0zqDtqI6MyWfN!BH2)FE5acFpfYw~#%y1&qACbGOV{WS8I zMI@zQDXi$k)H;2 zNYNn$k5+dtiETfA$9Kyj{nzCtySQ~9MFH0_lrF@HxP@6tT=3aHupH+7DJ+3*GpLS0 z$M*0=4|r;i{`Kznq11@Uu^!=DVXG zA=qbUeeve6%(ZiUyLiphLh~K6-`UZ_WBW4Ep--;MB=&Cmc6OQ$U|wGY+elxjEuww* zoJ0v9ABzBSKReBrn14OK=}N$(t?Y{HI}SNK%riwX0bDWfxR z38WD(9UE4A&F<-V1#F#IIX1#B(n~Pv-{0-Y~K(G_y73X%c|XUIzY!^D93ltlu$t9+Wa} zM=ZeCC!yCFYt=gr^0_LYw;3z*bwj$KV77}fppRkxy2(cV9u2j`I1}L~yz$;nm$J*u zNf)LFN%<2QGx!zQIzNICA@tsQb&t9UUgtU1VSRx7o9_q%DSaGjcs`U*4M*r|WSWBh z)u%jp8f`m|eoEg5dU4(Lm<+e}A`eCDMFtjM<5L67wmhFsq1MoIRBHV#nqL^oH|_2ijk&0ooE)Y`x?k zu?L~!+gvEC(9!7yq!d0my^7PCb+y&E%g`g+T;-dY0W4itgKz{fpafYs+n{t}Xw)4Co@H$mMBCA8KRL@KCUj^* z8lN^XOKHKfFzOk zIPN^A%DGIsdCbChqfx7^${ocSA_pnSVATfoSXviHl98ZW12R7p-5yNdQ>mGm13Qabp190DEZ?84c+ciU4e<>8gfk^<%Ac0krC^KmM(!9Or&2zMn({C5|Sz0itt6Pw8)A z*dkvq2Fe)Xx!za5@GT&oV%Q;yt2aY9T7}Iv^<-cpR%aw}W<6)z)46fE#|!sZ6H$gA zHY{F;_*E;R-92`#o9ta)Nts@8Bqh~JM(&;qn^wGFpklwY!WyT&T*%0L@bHX4n$V!= zBijwfTg%FL%n%4_s`|KVah7kXw{K;a>AnES3`)c~a%4XGB~(cki)c!ZO?^xM{htWAl>DRl;{kTOR~LS7lD{yBN=vvQwiyqmiiVv<%oP0 zn$B4d$3)Gev>-#6mJxDqaT@^{o6l`8X-MkO@is^#s#oOX{W2)2xJFj=eKO+PIMsum zgae#-7x`)JKIR3+brnjy3J)3%84v>8HX~!>-Qi&5O^X8^n~qSB<;`@T;k%TMpPAE8 z6?uISj_m_)q^GVHQ8Z5IZWA{BrYrQvX${t2QKRau(GJL*k9Uv zm)z&8vUbQ%4gJk7{}8@!%)Go03eMqf@63GMrs=}ZC*~Y^i5nA28pn3LdSa_yacACc zIWNeq2WD^D1*?-K(YiB4h0VR%WNZP9OYnQ?g_##D5OBltu+*hthia86Cf~bzqL&O;975 ztpy@SprH6*ZPB;583EE*%_G!&o2x6{;@wp755f;}C>nRqTHm|G8NEL zRW%=d;omSpEO5WNq7Z5#xvgd!^m@r+0hi zFAiY_uFSV+%1NuUFU1*d#MBfZlKLI02_wh(0C1rn)bUX7WpeviM^oBB-)N?foZfAG zKAe3Zp3Z6O)`_M=}+ z;hDEs*T)IBW8@2;aaSnUb-D8&hgkuk^_c`bYFQ3F*KVgpqvN$n%<^GMhEypOrj~&O zOPz${n;(>SYb?3P!sA@c*M^*PyU8^zlj`ohM(AvJ<^1;1s#xT^#}_8@V5D>&;UTuC zJo0F+eg%;_FHS{Gu~0<4KpA7X`H@UrEQ1pC`JqR4QE55p#J2nsc`Pjhyx}BaQr&Mz zIzCM~LCz97<qYjSR$nl+%Pj;miKz!djj= zQ-yn{df4Q1RA?pO_UiupvC8hOK{edN@}ZAV1SX^wm*2`yFj8`|SB7SavZOInxkLiqF$&k+C0E9ATDlGf$$!dpCeZfz{hE5(Qzggt*|5$ z59RDfiy+oM>n6`=x?P{%5ru$B{fFvxcQY&(Yc#1Pg?}uTbE3#h^4yIKoL4esdnFPv z^BUz|I8T!OIN*>?8W(JLkW@619!8lpll#4?{2Vh`eC=ii zor+E}d2@{QCzNi41xL@hkwJFstq8kIl4TxoTMd@~Jc*+~>v|08dYkf5bh6D}Qny99 z1T6z^eKmr4r91g)S&!WSTP#+;Cku}j40g#3mfl#Dnu@rAN%lBuiu>j({fGus4+0!N zUz~cKt~PkCA|e>x0v6={gj5?>eWH;!PQKi7CW^p_|58m_UTb$XuCwrh*qklFUK)1= zEE93rq$W$TBK5*DIyhe)Wd;K14xJi?jTxScFNxAvDuvWF)lH+(fIfQ6s%1OCnZScOK9fO5G}XX@K{^ZXR`(^%aBzr zz@PU^O?!(&g&gph<|F^OscvD^gLrpTxzW=}%~_a$_9D)B;QQcIj)5U zA1dYF_vU2%s+JCGhmPA@C4@(Pzxc?vePf+fpGFIy$zlrXTB<@Gw*g-I2I}9TL6j|f z{I2~;L|T{h9FCau+AaL9e?Ovo_?^udl`3o=xpwU!z8Ypl+Eg#Tp{l)oc67S$A2k2# zg31pn>4xBpg>@a34;iw(3m9`70AR<=PRRK|ww8MU@jfFruaUnTFP%EByH^B%lB&;k zYVaDS7RW@uy&R8U6ysbt^)9r`PI{ zhm6D_K;8C|t8%U?1``gt4V{=)6J-a2w0Zxl;9O$!zKdK(HE-ftIoi@3hah(@z^{T{ z7r0d`ZyOz-s;%lfiL*hCH-O`|{rJ`gK5qr%Li_4Q;4DRTVlG)gT|U5kEfSp$j;1Pu z$v5vB7g>Rg>BY^o|LsYi(MOwjm93J11OQL@Eq)b9y*57U3}Tb<2f1K9DDiy@Wwg^W z7ze%Ei)`WXo_ZaFMlMiL?E&)l?Y*8Ny!2`Xh69MRyd|7*oJdD|=d#Qv%?AIza8^$} z76~61IDfpc+U_<(6=(qx3*(dulD=D%uuVY2aDVji48_P7^V10<%{JGG@LhKhjKJad zt%p>f$^i7id6D-FT#5=qd0zApz+C-A#$0?VWQLzaX_I6by%%g8*T) z=^ta7@54TNdelXK_xN(iTjCSc;$%cZ@?JrPs!ni2LUFsJx#X^^bah(%#-;KfnRX4B zExj}d3eABlv|Wpe*mbKAQ+@rdPt<6?E?>&`&sUv?#)(ztTI85xmu(eam61bVMeLkr zT_lsqk}&PPbpu$Cxl8g<-t-mHjBGOmBNr?T3hu*i%BIQCYfw)As8wZ2YtM7jhRa<# zuSliOkb-|!URZ~dnrHpQ+q8wvdVRR-(6-(i|1NS*skUeMQY#$P{T@ovzCrdv;z>lfyHYY7B=G_@r*5^=={cT4$X zFgKR;497eQ^fR>Ck(fi@;H2~2t=lm%>Tuu%XUZ+}90-z1mM4vrVn|TOvehw%^?O@I z7`xHk@hQv1fAS?_!13i;@&|I(5nA^}q*}jJ;0xZ0sCGMKWz1?~eL`|KKQ?-jmCleX zNngsYQb0Xb_&?=FuluHxJ$%q3RGz!?^;2EPw-Im=G+#q`&G8lSTzGi&HLG*q$Od*< z$y}$C6D}9tv>qskQ0YuwIL}AM3_g-E=QdE@%%O9O)p@M@HJwSoXeaq4-NxMI63c9F z{HW(~0rF`AJNF}A&)~L0MdKS#P2TA$b5t%0Xt~+Gy$=j?`?dkGO(zop0jJ68@W}xt zt2TkG2eE495tP4U;8%<`zaTy?hBtqet|^I#Ht9K2Sm^f;}_8&#}bA_KA_+urw;NRaVMYD&D)Sd zRIZA80vME)#*j9D0fWLf#n>FDtd`H-2)G7ZJnkw|>HrUwqIPT;k%98F;|BL0Lx>{Mtyf|YvzJ%yUhwdhwW$Uw|VZ@ z&5-jZzF_o2YsQu;0^RT@Dke;9cj9O+Ob;=)D-!9F;?Mg#BCdZS}SE}Ylq1xRELQqYuLKbx*SvTLwi5xaP zy@N)PSNV9@LBTA_rL5DJ9t)sKwP1VzWaH&%+DP|r$Xs@&i`2lKUs9lWEQI4hZpM1X z<~|<0R5nXevVZW%M>J@PhrPczy{VXy)#E$}LIaLnWe9^H>7wp`_C{k!(NQdIONeEu zogdONb`12=Ko~=bGy8K?V5cRs_2ylmfeb-EgWUB9&a4t2|M$nOnh-+OlI?P#V_NXO z;Z_|MSKB-7<=`5x&j@L3PeN7e5w25H(kt*6P(c>ho?h*8(E!_e)*=uKUBF@)wlRhhL45%!#%W*pA$@259a_1`PZow6KGZLZ4gG&Ty#XUX33~n1 zTrU380$swHR?Bv{CwA0!rFctRl zHXp+mP=?AK|3mKD??5y$67@s~p|lhWU0|5Bus)Q;f*(td3SD+?z-#)l4I=j8Jns^kqaHJd;Vh~gd)7|sx4IsFgqg@6@095KL;?MT-E zAc+$MqFHU4=afl}JH3l=d&Li3XR6SjtbXpo5<*KukY)_bI*P8^;TRyOY3F+3VsYGp zNAB29{;!I&uLa6F`o7ljSPsdGgThY+r{>R&wS!M4^J5_%`kl2ZSp1GX^i3SKbJ5B0 z{9_#LDrMFqg6sQWLy^(43U%qWFsIwm8j|dFH(b`SmmrHe;#dVG&LN0 z(d;@N7+U*N-PL++CjLQJ5UMADBq)2d_J`uhF|$y7WnY9Kxz(Qi4}9`{?iRzp(?M*+ zOurx(6McNj?-iLMQz)GTCbQ1+Sk_@)ZW`nJ4EZLBI*-n9ve9!W1{qV@5z1@;1@r=C3U@+7 z+9&&5(u$^|+{Mjr#JjlEdVGZIOwS{qb80<8Yde8315K#Dl{<0&4|3zF$D|fD@^Un? z@`po#$o*XdF_lS9;P&(T7(CV(fDj^|V9MULeW{D|TA4I7R*|I}hlIa5ehImrnK1N+ zV?U)NMk%S;`G@L6NMa%u;vrhh@olF9mkRG3GKMf=ABn^Ww%xm1Cba`X`i^ZNbqE~W zHLyp_JR=Q9rltJ&KU|I&J5iRIN8G7B71FN11}xymrL)yrqd`VMVnV59fzTzK>MiOU zy3OB+YuI%sI~e;^TuA0( zP$14_L3s_ACXo{^a#jYi0`DsuXYz#SixgIc!HPz|+P>N~dY+9>b&-U8Q)+v^A#r2d zGf1vyiuO22$Tp(P$gf46M!X8Nf(-G!OuRCZxrOvM@2fFW9`u1g(a~pW?Z|&VLpb74 z)LAXcMx-wUk97UmUnq3~>?*ATs14X^jrZr|mF-+6j8ZM{BaJI!3RFw<2KT zChs9J?lzTGj7 zd2>ZZz(^4D&d@(V;DOp`KGbNlLaODCRy}h$w>1=_bfdDWtWMc$I0000000BXp z_82e!`6;I;TILuBYnNd-;Uf7^{@t!4Rb`pw$&GP-2%|19WeGUyk0NS+|B@yhwntbl z{_oy6U9y@}$vS1emo12Nnalx#m^JDT4R}-0LqHVZ)^qG`FDeP8Vbu1p+2>+O{^EM( z38kP;=^}290+9zh_VhWqEI<@G0&Z4!*>Wt0#^{~HEJf{#F^Ow$YdIioY;4bc-}D+i zLwyT+B_;t^kJi?r(x-u%FM4YcSps9y3~4;ecF?529>!@x2szjnifrgV`fnC`0m3_9 zqr5Kf+IfY(HKb5FApkc($iMVzR*sSeFKFk&?K;SB>2K}Libx>}iI;LKHZLQ!V7_!h zFyIPdF2*q*qHv6RNo$MV|77#PoddX1ws${Fq`b$97w1ON_uykuMWNzn5 z+6S)blRru2@ufL%`D$Fq^Sfx@h-nA1yE`Qy^RqhbI~!&&U}Tcc3&b*Ix`1zE4QpC> zhRm?OW|{63IcO3m7zFm?7cY(|Ry4d+Z?XYd@^#hQUh-qGG!`>2$Y#thGy4ukZjCaT ze20IfO_Y$Sxnw~qo2!X_NMD7kxXwCgIh6@-BZbyURXx$H`wvE8#x(JVQ3)goEGhk? zpO%HCw^BX`@`5)w{Ij0bg$ZN)4Q30n@|bc~vOXQ}kzf0gYTcCOyGtlKhS5vpQUbMu(Bj*014FhI=zjFyI zlmbv(?|`}*rlkFXb;S3nnW#jb3$4bM%4bb;--iZA zaeS~8>15s1mxVgeS`8$(!Bz7U?=PCr<}ez5W9; zoZurNkREhCFFU4)JV7i?g-5?UurlsWmm|$KXP<6QW`fe=PwCAN-Fn$`SjD%H*<$|_ zBZ8Dz2APNMk!XxNBv8^?=MW(XM_qgbF;}~H(t9EHDgQs*4{m3pLEk#r2+Dts-;tsy zvRns5>R~Y@U2!a16;w=zg9{)3P?fMNtc}LrT8M7l=V~=n6hkvC0+oBtlK~llxPJu2 zSWQSToY3^5W zqke~g;4pQXRv=Q}Oqe2~HecsovOMB{cDw`8#0+ro}=?#dy2e$S&$lOGu>+Dm{#yTLvKgR@K!iUHu;z3Z&*=tjt}}y zDf`gq8Sh?j&sQKGfLPxh@ylbnPGk1<*Z`ZEp)zLpL_luNWB8)x3-27b{JhYK0bK|Z za6J*-Lymz%Lg}Ruit`FhLc-#!A4_Uiw2lgnjiA-c+|M5gd&^FKSb6j+o)PgF2^sRq zaqrLuy!|8a9a3p$+u*sacvT7fCFw@A2`W`9eW~NG8EL(&4qoNkjK7~&B3B~kbR(}E zU}?E%g@S{;$i4fAdDy@pn@AKm^gBQy4A6UE+Xg63MdL%IV=o^iOH-mn%XIdY^^14l zVU<0N9VyA7Jsp4*z3I+%>`p}Bf6|G1Oh6CZJ0sTq)~tk6ub_wg&|m~6zn9uXBBMxc zdjzpRsMR;Vaaja2fmfI#c(=?>Hv!_yq$rwTWI~`aIWJ0A|H>&sdjDKc1&g2qZVw-? zCHQEb6(&Y}zeB(ea$lm6yxeY$sBd7$!kWi*t|7#np|Vg*6xytrF=F7%bCmTcMciO( zg+F$;9c)9w5pnzLAWcJN*cwPN1Jr#gaorKvrpz_jl`Ly&&kXrVD39FIfrxTxTU8VI z#co;)@1f3Yo;ehTr#vH_wl8pqPTmSnUA_u5DHUHm#r-HX7eRn_|B*|o*vsFZLX-4t{SI)tj;HwH_U?U6 zzy7(9fEQe??nx+G-x@~SV_vfDMuP~iQ)r~Uycm_voP49GOZjbx5erYdgK$Ey+(c!Qy~FW zBNu#R=?1ID?0+5>b;T#+ZyUAzefNlkVu0=^+PI0ro-;x|8P_D|jQ|_fH|k&Msra6W zD8><<{o^8`PXBr*N+W#Y{aRyPuyLNuuH~uWoj9}QM`1zy+w4)f;C&YQSq&8wEc!V< zwJTgmBUUoU7aj2e$~J{-Af0MuQ?kP*y90%O$0j*@_*45pu-BPB#g~1+c19Qb0{Gt8 zcN73PGBCGfjxQd=mSV~&d0rC>fl5R*Z6!+Ad20)-yqVMk`U`jDPpE%d7=(cb`Vgx? zWD(~oV@JKqTXWbP+X`g*_M}TguL3;@)&QAe-Boe=ibII*KsnO=B8% z2*1{GJqAAVNy!ohCz!JNUhOf#?Sz6a@j<>Yif$eqPxYjCO{?$ud^eDWSwcf32`1hG zE5c1B%Z?P{EZQCBbgMwx3E!yl7Dbw}gi1Bs+5=8~c2+u>M@02SB_5g3Ukp!7qfVaN za||ufJr=ORqekt#GPke~xM54%r7?j55pC8bthI}b?s97v`LxyqWAQb5?VGKd^&=^; zW*=}w#d;pVgj~uxF9;Vei=HO0o}zS} zt*SzJq~1)B>Q=IB9n1-Dqvq)LMr$D;eBvio(W z^?em8m6CPjxX}Ux6h6BBGW^js8c6gI0&PjPCgbJYiR|z)(s1zQhC38}xC1B_nm31q z_!OkRkKaPy9yqHUuwF-Y1Qv}d$G5Ngx2c8hnuljto1vMZ)>t$6*?|&jPRES`k-~JN z@hxsZ9lKyP!xDh?XVxa}6l4yH7+4q>*tyDXGs|bX{qGcv{vQFRhWh(UmHj|*mAzg> z-ITaBigo3`DU|*UFw!p*5&%xH_E(-KU2sVN9-nN9=-lM!i0sOn_8{~PStO4Hk0{EP zx7OEN7{hm=-$OEu;BK%Q4Y2v~mc$9h+OnhNfSN|ShPXx5_8 zHh{16j)6S0yhrBI4**VX+ksQUR>u$Sbn3vlfp3v}5oUX9 z-Dqc6ha|1p!OEWT6jxR2_`6-Lt}CeeH{wu6#+4_R2Cb&RW>QJxW zl(ZCvxYKSc({@RI{n8Fou0SPmC4G^mehe;!;2(h0c31BY8v#<13HMHah5w*fc>t%l z;Syg}YzsYxs22zCc}*7?vq8TI9GBn4ERq)=i4vyN}0}~2N zbZdGG=N~>;jY(#T34#jLGb2u-7Prs8nt7pVPj=HvMdCdXYGMWT`VF_$74w?oxk|rA zxG0E-X2j~840;CgO?EOf9ENL;iS4slNPw9)@ghCjlvKQ2dy9u z%~41ElDgWSzH8Z>vIMftPcqI8S<1foe)=c6x0ofw&l_)3PCpZnjh55IHgBJ!;z`k| zY@H?QI+Jz+?s4%*luNlstrF^B?KZt>v)Ddca-$1W)=B1UIH;O;#z7+L$)LfAKMdhS zXxf+tXswkaD6G1(e9Kl_l|iJ8N!#=tf{%=^fP!~E=vZqXslj~?9pHyJYTTP=fq&MG zE&ne(-<@ptcf@Hyx<2Tc9W*K_bmEN5Lu4+SijVoTOT&`qTm;Ag+(9c zp-tw#b|I}>Pl+QTl*T7jWu;wK&laovmlI_;U>@VAiS5mc`TDv^$Z&&wkpMm(GrGd zeui-v;9@TIZ z`6;um*ZMURV)6#7kuJq+j1TVtR~TrlfXwJPxXiTbC$RIBPqy9jw#@K5es5&Dlw6r( zl@-1Gr82XtflGjN<-rf)Q&E3@OoW>XtH8UI-U<|&+KZ?ci20`vd8*|-x!&JPf9(y# zR}1$?qmPv!z}#p?>Q6N+G`Hd~HJsBHkf8V0X+Bl2KrMhp-QVN;**1Q=I_DUU2YCYj zEJXLJ^7MLi``GSY*QdrEO6ehkQBu)@uqWs=K=x%$R6wOZ#UH=mAfsJm#3n`$V!px- zn7g0!)reWesP!XCu`oyKJ)Jf|!5TATqf3$)v@L&C_vOyq@0GDI9E zUYRRt*t^!lWmJ8fpXA`)L}&=hE5zY;O5>xr@XAN}mYY6l{%{QlS)Z;*!;^1e3$4T8 zQJ{(;xDuY=)K74YMxRoe!2@QF%&S@a76om8GHnYjYARz-ac(6E*9FR6xyg?hLN!kW zMp<{Ci^d#PmqvS^SXH`1+DDV0>-~sWi5gil9A{3>J1}Jpq-f?Z z1r^yUPSu`R1cp%>EYSNA%VZ2bUYl5#6RqyHl)`+TPA#&1w(;iU&uM%ef41eWL6OO= zv4wGjE<}#!lKr)B1JMzodV!GqCA4;CmbRMqzx7*5V%AW+E7^O)n6g6j3kby`5#rL% z2y2#4s^1B6Zj=5%yV^jr46e{Idyr160>gh>?U83%l6am5e6Jj~Yu8m%*UI}xQC+Fp zn99YIY+wl}$Fa_{QR7b@vYLk7!j{LYJwia(zFcqBzI6p1(O5StGNjEATea*qV71WJ zv&XOA;X3uWvz6#JLGyBZwALezgYpXH8>aZE08H{|2~qwz+SNIdmFCFN^3-|mz>z>h zd<0r%y_S@6aD%!Sh&yyx4XhCC(8McXISq258b{}C+8R_jeaq^jQ?E!WG|kn*ZU1hbK`W9sqbBer8Vd0e zsk-IE@RqCPIsC?4=Nl9cOwf);LOE)Tx5x&R2nQ3uzRo%?9sK5+mb%7n7Rtm%ya&a@ z_TFa^`3yG5I>127IHx4%B#eOoSfQiclBorNDU1xdZ1y+m9d{U?r}hLMG(Btz|0!3! z=8%@eBe%=pF;{A9(|R7J(Q*ry4E+Btc=eh+nv8`wXF)q*rQz9Z$Ur8l`br!B zXfIojFrxqEX@%sk>`LT}>ub(Qvmo~PWv&URGt9RmXZ)7m#qSZ!*=lmJXMk#qEt02j z4`wkyast@twiQ7e`1RXh7`lj?9zLq5Y8^P#nC&cCWA|~1RTr*4zx#rsoQLbjU$EfG z;^!B9wJG(?!0Pt)7_U>|;6Dva(#+G5j={(;a(l$IB8%6z7XeH7JdxjBrkfxzFPgNx z`n3SVLY)@nsJukc@x$U|T-0Hh7a4|9ouh3rg~!Rw#mqpbY%JX**#rL=Y!u4RU@N@&d2nVc37@Z85tFyqzW z!M>NvpYKesaS{Pen~il^nuzB9HIF8tIIG6aHPU#kQyN8#>q*&W+Uq?lrz-WDgDv`# zCDc?`1VYX3Zh?0qGS@*QTo{YMj6s)C!|)EbYx~!qf+y1ld*{@1m3d$dT|M1rQnM|y zH;>A>0fjsP5Yj&AkxppPpe3(bljJyArq%aw9xlHvi%#fol&|dtX({lAK6(f_z)xKQ zepGr9Av##zybUNeUfGeoQ(Vo(_k@sP5<(NgT>jpt?A!*mbi10QL&W*O2A!4smUQjj z3^3PK@IknQ16BV_+66)9TRF?l5V}l{bI=#m(the}6o63&=Bk3LftgN?PN{NB&2Z1@c-NwgZb>%WDn_!MJ z9{rAK3?$QiCdT%{jNR0|2u0VsQT266fYI?OZ1=ie0q{0GeGZN*y~(MTcXrU{J)eMi z7oWEQrpQ>%*&oQ36yyc{ROT7)R}&{$?)q!TsN3!sZ8ErUo}hS3`mN@&i%_+z{9+C} zgL)t5E!1{+2j&0k6)QJ4ZTVI2>|X3vGfF9$`H6{ zyQxSr){(^=`4_YztiKq06*sLYjK05tG+I)aeHkMP$zfq;5!J_NtW-&&1ku;2CwcqAUt$^El9{vUV`%{ja5xd*;|)q3 zww_S1PqhDm&Z(DMBBky#Suz&v-##66)kI1*H)!p76^QQQ;T3p5KWxu(@$<~WV`olW z1M7#T>)61RaatTrLI@5%)M+Lf>aa z5C>jo>rWM9u7!|%r!fvN;OmB0MvS)@%U#NGE`gxv6T%{ct?_Y`lxx2YXYvsbUW~3P zps^J4a(+K~`ir}N-qsI^t^(nP5pu%ep0YA>bI%aLmS9rGdi8P7P*>QR`_g0QsKXRD zS|7S+7t`(%s>1wL(FP9r7MXzO`mtRtchfk`)J&`<8$#)(gVpqMKdxL?$A`hY2Yb9I z0^>jc;L2pwG*@NULnXeb^SXG1E=@mI1uJu~k#Q@s5WIh=F$o?G z6qSnN;X9=j%AAIsCGfpM8h!h)1hj%tTON&WG6DPdE7f{g;exgS)71IFuZ2SKrHe;G zB)HArK0n91fIGHu-dCJhy0IX0?QR{bOc`Tg&Tj&A(z@23*@%kh-oiU@)#m=?WwHai zP--#3%Q@n36ydJWt`A9PJq?rB%fm)2H0%qF1u^^F*_H-?`~C+iZN?x;z4IH8S^XoA zUQTsnz5e7xE7+#Z@ObKJV}(!`F`fewha2I(k7GYud2fR3&w74^UD^rWG!751x`^?ts(pa3 ztKYWRyAOV|65k+1DDA?rKQBt)&UY>__8~M2ooty=Znf0Za$}p3h!UD2Mp$edWTJ}V z!GSRC?6X$5r<>Tzd4i}tK$JM7g?T-PhmqdW({);nBlglh=N6ez0Oiyy*?{w5tIxph zAyal=s1CD|Q*Y-AXv)R!wCAEf8~9D|3BIf_8sJZ>@*$%A4CM681i=j7UcJxIaxW{2 zspiRBwb1RCGH#!k0N22Vozo(k=s2|t(+?80$~L^nNK*nzfQ((v5-oRgfN;4p?Yh*n zila|BR_ofkpJP}NMe`>@m^D-$EP|5VrzKwzz@RE9EFCAvb`SXY2JOJZ1TSjFllcP3 zWe6U-G!(awJ)p_WD6@u6H~1X5j8RJi1ZxmaG!06BRMf3IaMthS1-P4Q7pLt9hZ3h% zV`$cPhu3DB43pd2Kq=W+aie!o<&@D{2RVbo$ZYnYsv=HP+oQM_a(uzTGyk+30Uij6 z7U@xv$q#FB*6sNN8eYQ6&tp`vt01qkTAsgV{3qW_a?xR$Qod#(g#^ET;lyKmcvOMx zBE8+fP$BtWd%GsCt;gvh8;$n^hg$fzW>p>RzE^ayl<%jfX%IeDPUE_*xmv;M4Z*>QRWsEuq3m`@#xF|KJEerTmcU|wQ1 zcw{&zwm@S&o<~-?fB2MB`wp-+GpGUt^9(SQKtMd~FaVi$8hy-Lx$|;VvQ>*grgRp4 z)Hn)+awwd!S6AS0L>#Ky%ZWU}^h8Bik}2G@^6P?TQl`g~z3uvWq^b`U?-_5YS|05* zG(_{G&(BLP4^wgeCX2dNS4XJYnxu$zp+8xdZq2;#c%;Ezy0pVIb_Gz|fh8zAAv6#p z(4-5q(rlCE-%kCf$zt*(Vbw?cfA+B8M~$|ssH6xS-7hpPrIWkev`IGg!49V=QgAZP z%k0d(*CP+J%3*()84-5N&zXadO!o+>wa@{l5d#4AKTLIYgA=r-ICSo5x{iLOSgjGb z<7t}Sc`nXYATG>J2+tr|i_U1euMJv%U+Q*W;!pldMSN*S74W|qHNtNv!4|OnV z2+?D%>BXU_jw@aJzO?ERAtRnv-iRarre%1Zm7VHS%-9MGbn5i0;SZ|9_|Y!ka=^e7 zmho31gFWtPC)up~Wh|lpL~aNm6yQlPoUO&>Bd;0kf2uUXEt4D|!L!o1x!Ne|cD$w7 z5JvphPW5HnY>1aME1|cn(2q1WC~JPwM7+|u@@xeV;}%5q19AaXczksIE~{vLjxY$MC0gwi2$l$AHLYJ@Gt?c+|a-h ztufZA#}S+O<_@A9n-W^*-0hdV&b^X&>N|qTM$y^{mA?QR%9v&E+jE4{9@9Wj_m|i- zq)CS|k{@#3s86)19+fTj!-aW^ov5}B?aP2_Agqo{Ezn7RmI^pCNP+Qrzl5W3LIt9x z)sFLf!O}!Q{<;^M2^z-+JQ9Uxa^TDSJsR-`SI$* z(hm5*C|Rub1Q~QZ(amV!<_EVsKJ&W}i8DP7DYX%PRI&)jS3UP|s=UQO+pweODsxI1 zu8Oc_gIEDw$4@TVv#!1EwgL+Eu`B-)Yl9_YaEd+@w^WPsX08HpEZ~0MQ}68~K}h@6 zDx}7#U&}% zuswb8UKD6I8CT9->ZiZvT`SN}7& z71*SrY)%wH;FBXe1hp86R_ZhqWp@%uim97mAm&G(F3-fl`yn7KG8QjR&0z9b2(8*qhfBD5*sRXINNhlE*$>HHV6dsKMbYA9#vb6>OmgXlxcmTqp`7* z#2W4k2=A|y7c+26)TaM)(+E&`7UTGMTw3WBJ-;}=IR1x7W?}*XW+b3XrK?B)`Just z86^KoZPRUNevWo_V5GU~gM3wzilEOi;WW2CtdNhI38p>`#L45==vqZj4x5`OCmKr8 z-qm`>6y$o#do=4&wtvUvsHYmAQT2K*@vK}^;}feG$2s*ZLIqlFZjF*cwhkbUMCyi3 z`Zj{m!NzmwlM;|-&DOU$92yZ8?KB;1r;XH6RNp5q9^nc z$D$=fZ00=ObjY$^!l49KDIa?FT)xm!t3Fq6`@eS<`6F{mF5inJizu6SboyNgjUR(u zfut~QrC1t_4y8;HwgzA@rLB#i?6T-f;Uo#y>r&a1pfO8uLUL5H%Nj_nA9=?v^>b=& zN~+(EV!+tivdM0G>p-bU3L)M<+-9i!SGE6dc9jrJIWA37%JBZUWJ!a|u+bi_ccbM= z^Q1D}1zM(|MW}Vce8O`hWbLdf-(OcQ-+KP$A1HGodn{3S0V5F~kP42EJslxf&@tVQ zwt_U18jet8U!2D?-~ zVRkribjr0M>@CS`%s~8i=g9UB){s8RG(XBr_96{or7oupN=a))gUQ_8N*((3 zdu@9}#RT@9=@~G4ovy>D;L|XjM)QJ(S6^Nv&DuFS zKtj!ko?-{mpF7lnk6~1faqQ}Mdp24Q7IsC6DF!`8<2%>WBdQG_zJpio?{I232P>6V zzU1bpWL~Nh?GWw^-NS5;=)n8KzE#Tj?{7R)NgvexOemZR?r^|24IMgu5Vu0SBnInZ zakPtLJV$$dGZIw`kG;A5Hp_IyBWSPZq(1?QN}Fr&y#4=&S(c&gCSo#V!%?-PsWhG- z{m@GiEkDstZJd%m6($oYQ??j1i&2@0rhQjdGWT{3z3YOhP**eSJ;ToFbK$&ZHeYD5 zwKxPMxW!8Q05@EU^Y2L$MSV~wuOpl}O&LAK`~7V>D3o=W>Xby%MNuvj(ZmjwX1(w! zww!hJ43tG>Vi)+-gAyn*0e75oyQ7AM*)e>*l(LZGpxyA{jh};!9p{ZI8Zc35mSfy7 zs(-(>!#nv9O2F~oIzz1@U7Q4!_X8WqEVwBg6$y`py%;v`2~R->{|7?xuxU|XXag%` z=wpB?Q^@n%cIW-=N($@|$z{=qI-i1>xAxU>p0Z9X=&38x_?p0Fn4l$Qf0+A_AX(g0AO$S34f2mk*$BSJl`*ygmbFDD1B;4!Xxe7ZnppEr!?ZGow~K3`Z#+VmUSqOd2r zrt^&y6wvgt{+@7WW$CWzK?Vl#EFl|&bhC7Uju>DLekHRlX&c=OLo-7sB5JPTi+ild zAg?#JFk#d>AkRy^1jtuOvQ{umPhXscoh7}GFB#5Kw8GI{kJ>{-a|Xz#^*<5|LaO$_ z3*>H)HZyPrr6#ylLNU^h`VkdoSUw5C9!FWd>gt#%-6Gf;sQOxj<8duJIahbk{*3bq zcq%m9@2$}q654tfrmm@O_(K@-!uy4a29I`Vh-LNYM*JO;5qVkx!{KDU?r_Szo8HB9 zfb5V$H^Yeu^d+JZv?V(^-{Dsv-52r@&oC3et{I?#U$)Tu87$J?_goU8 zy?I>3cP7Wn9<>G(ULu#y!N_}gL$Bt_j&I#(TJKEu|`0pOo-MlxCf&|Atx9?!~`*;CmNNj?V#KIl9oQyKIwqDKOGm}@Lb`k8Y>)*tABU3J$K zXD)Wd+Nh=&hm=8;@4uG@2g|f8{nT9}zQd(mX~Pix-5-|iF-ZJY0XE@7Ka}#6zs77V zB`6C8tu6TZO>?nwt#^pYQ))IImxlE03oMmRQ3>U%{R)3XCseI~*4<5Pzie<~WNB4w zHu3;;g>>IH+%|r@V`_#%4QL3@+3UAN6r{-%9%4wW%$6AOu2@@~++3LL)!p#AdcL$y zYT&5y@uh#ciOoFw3@PSDtR8pac3&WGh&G zYwUgzP90lbVt8MSaUu?~0c&1U z<6EE_plP2wQzU&Qzys(IxDT6jTnqQ}y4*pCCm>@&&?|j2ltOS?3Q@ESBf1?74SaV3 zds@a1b(^jJ&{Y6XmNAB@y0=~EPa+LouR7VqXR+=ne2$oT=!%gq!UhDn>fT-*mTt{f z$QnT}Iu(K2gnN^N55>TPvTqmzp97Cnv@%vowTFwP)-do*@;D_{6lau;UNQe8`$2QUJFl47Ahexo zV7p>_y$KSrXCT~3j2gz^OOgnUe=zWl+9F42YUkVj33K@4eSe8@{Yam<;byVVXb$$n zgq{pM*&IE|@*|s!;=_$%9AWAJ#~u;MsaJ}ixmDU~<%*c^_FQr@-yDfW4i(5PegM#i zIy*LF=&i5QDFbpxr1aScG>=M~!;|0)RWs#VO&dIb4IVx_J zI|{3;(HVL^!3PYK14I~cSk=hZL>|P$!#9`e@$u;cOsX$llqCq4&1pA!2sQz8`ML+* zf3o_A_=@8od4kA~@2I}GBrehTwqfw}C8@#+6f7VATd7@}-S3wdv#3v0YR)9x)&;{T z?~1F*J-+X6|3dxAbIz^<^B$}zM(AS;Y%i^0R;B@W|HJj0lfgwbanN`s&$tb|7k4}PA!Lvm@_C38FLxsAjG*b63?Zr?_(Th8*?Y;U}KGcSqE*C@P zPbh1B7?_)(n5#%RD1~i(2@oXTWU@PC8JFQ#8iDp~o$0l%p3xaAtG^-!E>}IN!C(R! z#prd&xf`g9#sqeeuQ`sqGg|PTXyQ{8px$mkvCzQgUufRn-J(N3mbiZu}!lj#ONxKFy%6PCE*Q@pEG z?ch{(`m%Vgq9cg{q?Bm;s45k0*OodmXmnoWo}@maJviS%I4VZYbA$n)K^14?Cs<_P z!x0=5=dnWCbKTN8ez2$W$pzJYARifF)BpwK@6>Bvl0F9n1#>w;ndL?yDqcl3uHx|* zYFB&!d6iN%GTm5+0w!+wz&1}u>WCE7qpAe=;f!CbS+45dI;{gNq*Xw&N>G|||7E~9 zeksc#>lnu&Y9sCMRM5o+;dxD=PK&V+E;#iWxp-1YJ0ARpmaCucE8_R-84_daL@kwC zM`?g>w{?sA6TO;ua)CD7yVVoPg{v4gqVJTYtm>KG$_M-${#KIPC`@GU*}63Ji8=4B zyF`8xWCv@Ij`aL`kly{bC_=UtBcb1{&AJVGV8{;MzWCET zAz}_MQO^%#8X9AFU)mfFQWkkA*$&=K@KtwOEqs71LIA|5DwMR8=QF|JLw+Ui;}(1A zTOJ%qykF>^rfYPP^m%|ow8T6WBc-Oyu_8>h!Pzw7@Bs*)Ic(o{;PuINYw>D*4v7!3 z0^*r0nEA2(cYY3oGqQ}hZDF&R3`Kt!1g9c_s9sYgHjM3cS^ZO@q`_>T;3BH}!=g^B z9{XXIr!o#&&M@^lZkbr{<`M+88eMNr`h3vJ&32MsXfcpfGx z2|M2~gWosE31`Vbi?m+q7t8D5Y273pTvDgT!yAIOLWwYnR0SB4!nG*TLZUZ!hv$dM z()xwxJ$zHcIp9tU)XTI|owD|usaq@MD|~cjwZe}WpRR=h=>tu6kTF8+dCATAJQJ}8 z$br*aPzt;*`feq(_@g2VTZ=Ep1VdD2Orz>tM-p_Wp=S#2XN61fr?PK*kJoRvZjg0W2F~jM<=?wI zdQk+`(XmP$VvEAc7)1B0BFElI1AB?Fx&t3&`9)mlaP{{SqrEzMJb8K{9oQCSyGOb@ zUc|)LyCAi}G%^#Y6F>8i@7?^90;9|y%&MP*idh^B0#Ru%E)_PJq&Gcr$*V>r zfzt(IxhLy5D(8YM_bRI#>{-C?&>;G34SHz?vvV;FX5R8wOaQa)j`Op}PZKpDDj2;0 z+BC$FC9ii*wT|=TAMBXsXypwGlwFDp6IYKMGmK+2)C1MMEES&!!3-N*BsjdDXT#h0 zDETT=A*~Ud+!5YfHoMnl_Zc#;Fbbkg1ieH_M*@(breS>SU>_ZH*FlyBBd;)(=i=Xi zL{_!;fOt#D&ze@2PG#u{=HYafY>{&tstIA6msEqJ-m@%xzbr8Ejy(H8W3M3jIBhx_ zqmIFe^9&kwW8;O)?ks;&?q2{um{E3rB#ueHRw+mpb} zXe`V|DGvARtJGnk%qijX7vOTP4reUha}H!LNQ79Vlp9K){T7<)b;`Ov>R5l`mPqBg zV9`Z+zt9bIqBb)L8n<3AAmp**fsWS3vu~zo5hw3qjmJ8S2G}2vRKU_(uC6-M>AxGJ$7!f+f~UWtDzP?lA%S+oIfqFqNphT}pXYQMa+rDX&I;h#07F%|!fjsDT5p%K|7v!UFv> zcpP6$$~CqsnP?6_meoNcx~(AHlpg$DF&*qu^PZd$cQ>l<3ZB$ptTzNc&CCc4IqBoF}Kbr0iYEPZd9* zbju-X2Yt}cDEu~;`n_rfy8ow}!wa~K)!~R%%-hG_(!+{IZIKnS=6{3RDZAx0puby) z#K9gPOm`9sy=EGa-){@2;k@;3Ey-8ihJWFt=Lsr~n&vQapR|iHE4*Yq?E65{jsJAk zY3>82%0=SMM=EXR2dqNgsT9$>&{^jk4`XwKD@VrS2?Vu72fObKD2;XC^3`eiP&S(9 zlA{`kd??P5+ol}Z4^kUp8yb=}Fr)xM_EdY69v>Ckj5^s9j0=MPYvW)Yq$XzC!rM|X zQof*O4vVN>Jej2`Z%!5kvt+ZR3VP1!jPxWTLodeSDIGuNU3s!O~B~NTY6L3f}N|==2sVN z{@s?iW%Z7S4`@}hhanoo2W?WA{-{{K{$aTggtCUEXLP5y#%GudJL6 z$Fl@0@kokcw0YECq#nYFLHUDaZgzKx5l=4-YSG&sVET-+(q8dy2(>P$%8|Qo4;7nOtj~)z8sW zS8WLhd@ZmmSHv@>bJVNfucjVDa%RDe1mYg-43>cvFJsW_Nb@{y6{zQdi(OWCkl4@j0hyAsQX;-X-yT3S)q6DnfEIIQQ2t(3KkSFKM~%-A*>e z!Pr%tK^Lc+))52 z;s<^brH3x0l|2jQ>U5iF3YueF+7 zZebmoVYJc$^B;y|YO*Y#4`W$0LT5916ob~xKaonTO`rcrDKNdqUO$#=3b{UmXkTfQ4v1Jk&8bjvwHKK#&Yo!{v`kTLZz3mP=0p*uuOzlkw%Pb@8^mc z{RBm$vXtNPkKSKr)rkfT*|E1-I0(JU){kv}&W?tE%Rr+(>~6QwVC;)8|Mo{;XErT#9;rXTvZjasNOB6jesM^9@JvA^ zh&5egFXYCPuO6qW9P6@?+KidakAwbC0p9Ix6e}vv0~n7OF07lMn(+inG7Gd?8O4m2 zBO?G=1Yd0vZ4@2-WA-JU^?pFbd#r!PFeWq?N}^zyr-#Z{kEQR|)D3z~4C~pXAV0h; zVsm&!kshG18gyYpfszeOqr0+zbR_(nA@8XW;-AU$NzBBt6UZTSYV>wy!w6ONJgchr z17)LXn@J<`&j5Zhnp|5OwDo9_{bIU4b0J+}jC798-?t)?4%)vVjKWQ^Bp-NXohA?0 z_v?ESV4>dv`R%N7|LM$PgX!V-S73zBFHOak$y zFMQYge}z|ut%3Zrrf7)LAXQ3 zJ{~Yo$07|jt? zFTZ^0Rr7gIV-lVE9IlU$Ks1-F1%@C|D>ZAcf#T*T&p!zlroZDuoSO5pg3ot_vJjt`6V_PL+#;m{R$9D zSt#JGw&TaGIvIA8MxcTK(7QJVOZ3z2fPBlF?|GGF+L?m;7T~GB3gyrpX0GN>hfNN2 zkT<~P89uq8{7Wd8&Jj82>Qy)xP-(})Wv^=hyEJ@~DP0A7_#4cP8IVsLmJcAIER?#U ziv`QBF|>XsuzSKWVHfcfgW8|6$sVsnjIm{+shy;5h3jDn>u|B9jRmXg7{4k&?j=w*uwhW#i}N#83*qdmW&U#=8fM2^XA6h7$$W{N8kOH? zmPpPaL&CP$CwhEI54t!o&Jt0xLY66bRRC5%slNtMEd8hy#Q=skPha$iHbTO5 zh3I#>&d)4v{~~TYIf8Wi?jx1hNYby})I$<>w!?tGdcO4PB`jIs^4shj=&rjB^5*PSy|UTT%So`UNP~Q)A|H`zv3)GZ|OIGpWIA2Z^dTyGV^LWY$dh zRr05t1_OW45@3*2owYix7QxlYeiD{TZxTnMz1p}&oQFngEBu*D3mvwRYC9WZ9U7su zO+PR<-pxSVF|wRNe{Q< zGo^3ZP$s~T)N~|o7{y|*4({*Q|j>KRfo4rO#^ zZWWoe@TKQ$kfsUgYH#TH?W(J4AmtPm+8d9%WeMJ!fKqDPk4K-fh%?`Jza*c{yxjQ; z>c}!t@!~gE5bvsgiRql9Q3%J_c1~_)GR?j{8?w&W8W0ZMeA;V#{V5n-?H3;)rF1I% zMOF4dR?A&{OtwlywmaU?+4ix;f`<0BuVq$6Vu2mcMR?7T{=?oEz(t&4RddT$ ztI)(J#Tflt`O88~v*jXJ<#E^gY0Ang#HT@{QP`B(L`{#!3|)W-*1HHHyr$diSod zJJdfJ&=~~y+J;5~KWen;3unotPVr@SB zUw^!ytpU(^K0~#$o(;?UTHOkLyUJKGhQ{cvi-;e2gF&Q{t}= zRPJb0JZBA%+LC^f*-?|bCgc5vn=j>ip5j30cwx9D)XYF^EcpxKSvKZEtoiZ%FKMj! zPvVneDUK&@;Z(5#Pjl|p!!U=1%Y`#{*L7#o^ZY_bY4QJ9w=)!58Pw3lDZy7cr^TFOE388k zd?HbVf~Fst@C0Vk4y6Zrgl;Os1Cl);nnAE1%7f@ z1Y#>ughFM_gnti;BZsE5QXo)Wfsi-&1N+hJB)Je*Fg@ z25Nc$0ppg$Y`>O zWAn&9`lozmhN%Il5|=yj}Vp<%4y&dWvF-LA#8Iz4}!^_o>xNN zw-vmvVTPcl@hS)}i8Q2kU1Oi6rPp4Bj1n_KBpZG-FaRs>Q;iFR!KMN){ySBMagIOl z7uetcm;|LDW}}Msm?B95fRDow{D&LvKnunZEt@-V2eWMc!k=0K!UH*hp3+-|E%KQ8 zbtgSL7L>69C*R5=K-)|E(=)FJEDcs zCil}$LcLnY*{6{13A2_rWCF|puYy?a?c$c4b?+TCjQ$hSIanwt@MY)EvvV7sl2Z=f zNtX9b<_LEk(ni!-L82Cif7xSN_J=VnUgC}QO163;wpw5{`6}fC;NQiyc)#Lxhr%4tT%(OQ^>6D!S%_35_)u<9)D;hh;14h(zn=a-!46(uMFanq?zp^7ZZ>!*dp!agHsXdgjH3I5Zsw zfFQq=VY_~F!MRb)_Wp{X-$`Er({YQledC2d#*nDEc2QMdjD@M|JJQ|#+*SXP^vml9 zfrM@$a-&jS9O|er-=-HMsc2(rHR>0m;V>Bx!yVFC`$>!e@ls-cpeYMJKu*|EA7FBJ zl!ARudj(bYW}gIKqf9v6Lt}k}WJp6{w>1Iz$k-9fx9o8Qo$hz};Lq9xYZ-Z9Y*7wZ z4=Y!-TOq#7$at=e_X=XAEg7}~$m@X?tpVWPwXKl+*(q8OksK1Bghel6LIDY-%UuyO zf`T}gj}ps2Qt&zGC-4LRRfEJbj2OFrX(}p_wf)dHf?yhzy$(u#my^-Go)Dbp(DcROc&Fq$hR-m6rH@yL}%^N^#Erc`DWu`edYk+F@s;9Me0qZg81I$-~v z{fZ(IUxfdu78p2*;wZx-l*!ug?8nS31oSMMY6{7I47+u+keqmXwoCB?BI9F2cPwOgSXO6E^Wu0Qew< z_*|Vq_Kly{rzYbjEx&_d#EPK4C;rKBrfGPb`aJjgFBl-8aW(W`CEO98n+V?zUX!&m zP)`7;iR-7l3lUu?UZ$wB9e8R^1$4UC=j@-<@#P%)t|nLkxfJC*JA9O+mSSqx60s0E z84JDH_bTAHWz#_o=L=sOyt`wwWs2XiP2uCl_Nu%9#aZf~sDZ6mvcmyp1@ZkQnOujb z58}1^El$qY5&AtWgleD5?_5xUxyGD_lCr=+m6L_vk0!1hcq6)O>X4N+r=TK&crd!ZI1-+CTq9vrf@;(_X2lnkb8OS}kTP=I$!9%f z4UXOx^z!V8&gaKPnIzB*R__DN(|LzavFs$;TC!px?3r7!d+uQ)t@lkj<^C-A1MTmi zk%GQ%$}QP*;J0ifCYJckTg1>uFXvHt{T;Eqxn1k!_=(e*Bkp?RprbeHj$nM&W zNF;s&0`I3i82zxUX zo?T11d74aNa+x#h%d;=FLfUR8`2hX;38lYGp(g(}k z;5NDly(()k!JZ8q!CNDdNr%9S;BeVHQB|z-)m0ig)B!n(WqD9bT_jUwhFY+7)1n7$ zKKpgFe$xfY@TyQuS!WnN5U;1BD9(@Lxv1U2FcQ|GSXH0zGb}a^5o*US#0+>Z2K%xV0DYMq6F`Gb4dV@{G!L5b z!_Z#v6{ZT-MaJ>XF$_!fZt1?lfUNBUwD-Yz74Iu05QvQ^;JFs89k4K6CO-<1bTYIJ zs<-aM6LAj*m{iD9^d_&YgY}Tl&Vzn>4>(WLzArQMTktOQtxz2nm?+$OIx#J z(FV70cC{)!>&kiAIwZNG8F?M>vm1|jh8w8lcNniF#FPo2fRX1-hci*L3=9~x{@WJQ zvol}SaJuuNYguu$j-DFU@082@3kFHI>a=MZ`G+zQdIQKHWtNT0x~W&4ph(q*0c3#; z!W3q#DFzUqI$*wZVUTA*=}D!YlozdyA%2h>ivsUMY6-|to-@tFet<9?e#t*#H+FqA zX3Uo+Pg}KcK1#!YsG|v}G2DC<&@BQst3kH%dtQ5d&5J#0Oxt~5ytDwEboFv;-SJVr zTLS2_<7S&wzWaMa_&jLUz+fKQl|;Uz-|`!{$$H_~V5whxV95q7%CTe{zuiDndV<+F-QYxV#(0fy^3d z88T_PHqmPM=1E_VK&$*y5xp`pKc?@k-Bj}3+B*-lRPKt}p~)-g%C`OT(4={xNIkp` zrKO4YTe4E4bJ+w5BX|{k^7m&sqAR<+Lv&<{zng~Io`62#rq6ygIdxYA^0LEO%T<#b zarew|aaF6|WDwGvsw=qmpBCynS!4uWCs>9cL{BVnSR)o`oDVuI$B2%*a$3szvH$l4 z7e!?1RooE1TvikoqYdHO^+6Ui45nzuZYH?C+rTZUW%`~-a>48sETDoG=oe|e9rg>T zB?GysRPNPm^K^gOlL0D6Hn#}zG*nd{F$aghOrucz#b~95+-D3rxc$fvxuM>Jb*=w4 ztGL-Ziao_|bxGu+vAVP2s`%jIt)!rjsQSU9FXwv;-5Z<^2RFr! zbK^n;e00$!`Mz7G-}IOTzFbAJ%s4<^1PgPZB9n{FTV173S%15!e@P3bJ(>lq(;4!@ zNs2QQZ+3KjtdH3Q00kM%;x?17*>U5F=CjQQTBUt_aGofYj9PGhGk!ogYDpWg zA#~~HRSdCt1RgV>%TIb^5_5XDnxS#I&uA z1z^}6>{<3`uH`Mk_saq70;r}tmYv)CF5a}eAmBTbr)%14lf51IFN zchnH|lO%dbf3lZ>vYMGh)gEm^1O2w=aJKt~L50l!ya@$F!rJE}gnHY^#yl+8KTpr> z?2uNC-#xfdW(9XeVaq4Kq|X1Y-ZD6)8Cp;crj6~LyJX>!oYf&cH>{^dF`EV3SAfhT zGm~JG6LmYSKMVhMS{1~~kND*9N80$sEY(k@z+e}eeo`*zf)TCd0+HDtBig`dhC}i} zz-1;}&Q8PpB*97Xt+zJHLdz)Wc*;4{e|Fhc3EgbvswGe{?}<@^%J+{MZY9j2fJgl( zEONzh0pn9d4SY~6?#KW_T|(NI5^c(HMv$N)v!5@Nfjbv#hx5m$-GH0dK^iYTR~9d~ zPrRK}vA8aBR8WIpuvQ0c*`yvt0aYS)1M^*>-Sc^T_|}!l{C3Ot%@W`Q3Z)Mi1|?*o zwa=xDeAC?Es_S#eESKTMD{^}RQbms&2=*7QKxBPOE#69xXEILl=3uuTouZ7oyGXRS z+uwy$lqa}xep{gWFSG3bbMYD~NIOt;ujbv02gR#h;EULC!bKB7E{kdgLjfF;h{bn$ z833|tMoJ)?(ovOK73w^v!+$hCZb(M7r7}Z20{^@tNB{k8M704pkQ&=U7!cG>3WRpz zJbZYJ5dT=coy|A;bZHfP-x$I^jlKJmtJ2kD3Z@VLd6x>XUNH)k{rwK^!;eWg0D!aC z8(VUN;l-yOtZ!fL$-5XiB-1Y!x33R?=CxqrtU#BpB2ZXt%KoRK%D4{#wDUqXcweRm zNPM?{3x0>(H&MZiy(j?4gcOq)s-sQFw3Xs?%w&sJOR_aW)WH4ib}{46$U^RdCRQsX z+N-*N`JTsaVJdOGV#~YtmIFvOzVX|xC5sJm7Xa}ZqxN_;RjqS1H+3;VNs_cVQ?8hh zsjOQT`Tf|92*p>TJ@16iUafPo9Sw&7J3zF-PG#6}7=y8uf8=!7#@ka$mqzNv_E+X< zkw%`$U&NSdbF8;YHiwUL9nhaOPeeVG9Y1>XW&r;?r8D8n*jI`0kWh0^5AFa)%Tu)L zA3t{_mo>itj$|S4zO2mZxwx|i+NuZK00OKOEZyfODgW1KoawTN+j5;Sd7Gf+acSrW zf%uvA(l>4(bC6}2Mw--KsW`Zv0)~ANeV*8%Zm^%5c?}@ix}>~hcf0FGMd7%u6A2Jh z)X?hs{oW`V#GEog*@55D_X}+Q8|0J)gWcnKbH>~kJ1`1Zwe}>XociMV&R2SvorbSq zMz&yML70`#Givq8Ai`DAFWsLbv{D1e3YtwY%chHy7O$&p#+#!7UNnY&!nGqkS=c&5 z02$o_3ERgc9q-;XM+ReLqd?&Osgxxsi#)GnX%57d<2MAk+oDRZ+t5ZeA`TA~1#Y#} z@94f`b+A3mDbpAKTxE3M*TUZ(4}QY(D9;+A09w~Tll}b!>$PT#s@u${A>kcICCB~f z`PEW)7xU&ho971$p!|Ht13>iJ`Q;Q|7&u+$Hp|lGgxJcpQ^X-T9?iY*W5uIl?OBVO zedh&IdW^3#iMs|NheaQc`RKn}#H^s&6;Bl!4Cp$7j|BK!TN#{4U9=v>BXXmTHgrrL zP(P{1KJV7lf&P7TW^sUC0;1|Tu4ht{YX3>V-(UTNqjoD$8YKlj^$=KVA3gK3p(FuYxI0@D9i2L#Fd30h#q@mGdvp9h4dkArrNdIO>=Z~`^B8|O*ob2*yOqlC4{C5WJ$sE zFA72F$d|8yowDBtF!6Out?%8r=E?@Sc-=VR>EOg_L{Cp1t3m!LQv#Rzh}pR8FuYRV z4H)lAa7P5w4e!#vCdu!}*1StnRqO`a+8O1|zyj*@l9#LM9p@N&kz-1^LX7UT9Z6zj zeqk^d_7*K3jDQscPPlN-;Aa=leV)_FZd$kxF<@ta-4GZ0UIcu%c-0$lxYSVMm@M&4 zo{=`g7O%nhXjQ*x4^G*Q8<-@K)buX>QtxXWwx+aes|Fhb>1lJ4^8BQw`W7(S+X>GD zj<0CIPyLeP3WXU%BMyP&cqhLJz{r&uD%2CXe3DEgOyxj}-OHdtOFF#4((QZmk4u;h zv}_X6^C%lwn*o0C=HEQyk<44cNDfR)mwJdJ-b3D$>-zoy3ZXTuAm&z-`3Xb-TAm24 zsr|Ni=SZr=>vz2@Vqg?LbrpO9cd2#{lrl92f#y*D2HWkFc?!JyuPi5S zJ5{?lNzO`y`JF;1>lz4=rua8IN>1GE!e#&q#_Rpy6=_rzCH(?v?qh{kMtTgXJU>)x z1{&*O2J0Kjv)$#QiluZ{fg*Ufo{w>EC9t2QH>X+?nA2b_;eAD@_}|K>d(zv3s$4HV zQhdmHb2;5K4gY$c=vcYLT)09wBtg781#X&h^S3;%&$(ExbxwHjJMgvo7Mng>d2!%7 zayy?(6)z2Sd+4GG1tO+zE+O(xEfCI4bMNnzV}~)H3b_W(^3oUu;6y^xuNpe2h>D5+ zX68ShA&a;!;S8f`9eTS<_;oImoe)k=qz67lE6@Y1T*)^_zbf0pxJTM|xj{0*QTbKI zTzG2MeWs+HP(sAag^Uc2Op?pEqpm|GP*wZcS`!pONJdqGaEyE{(1)_i(GprXBIGeQ zcvg!k`ZJaQSO_PIeC1Ij!9FvxcYVUUtwr=cV~6{<;X}-Vf)~c8?+mSgoJBk}Y-`+A z!3NkHHNT?;9KpA->C|pAY{z~_?8J4h=wPa`BGS*0xlEm_SFF<5!FL4D`}5e(n5TPz z_MEB$hM#p0)TV4!Tj+C9^JLDu)UW>rz9#vyHcNR!NPjS~9Lv1+K&vBS2a5+)k^2ho zfdeUeU&&nC?miMMuAy+r0u$gr@UQX|HJ%M?>#-@2WSYe)HQ;w~^`iXNzy{N&JjfB8 zq+|cJ&R$|2j~3dKabyM+S!DU)YpG5%e@jr!f$xTj^JKlBCizw)>bF18(J46e=$+x- z1zNvl1u&OLyo@JSsdM6dI3G22?T$8AMAFa9k6)5?8{3)iT>5r`xlT^`fZMazKQ<<( zn-{-8_F|<&498oI2)B50`MS!VzR5IQ*1fho1;=!kmY13EQ;o=b#;HDiT}$w6NAW$P z-VQV}p;9`a3GJ`D+!CWuux04!+uTz^`$f zicRine*m0aRQ_FI0fl>!`9pCJq>A_ws%+VF5mE0l8qLNX_UzG=TDv3QT2crO^jT#q}eJ-AuS_P9j zC5A#xb;F+ujYI2hTk}M~xm9$UG6Gbyutnj8lVT}4;CQQ9bK3Xu@C9B=6I%{0&#@_2 zIz9i+j#Bn@_BPVVliJq5Q!hebr99!siC3B+S<6v8GjQa^3Z^JFAqdRk{y_l#3OjzZ zLyT!h{PiTD1S9EV3&~Xfi(c-CpyHUUm%h+U77AF{qErAGlp3(s`eKkp|G}Bxfzsk{ zoekBVwD)u(Kl@H9&1IF>zRz@GdfH+hqNT{saj+Nap!4^^TNb(RJq0Jod1(XTu#v?+ z`R5H+k-Rqv*C=J@esTg=lOiG_GmSm}AuK&14LyO8xv_dD>U6p=L5%Cr8~qtPpk(30 zD&)y8-MfgA25y*fftD`HDGU1M&e%d$6^k*X)Kp3R9f|_ab1~K7X-EN<3XSCT5idh! z*aLQB85IRxMj+v%+%@+zD@PRo$QxYqCM6Zon@d@h+vv5uYeg6Mdr?J8hcwG z$yG1xU<9LHK-QL{B(?@f8#$R#C2w7j=FiE8%05uPZUA$ez6;Z^8^xaw;){Przh7x#kqrE zm)XtEY-L;|ry0K)mjg%)wAEg zJ@dFhAG?Q}id53DUKCzdUa0ZSP`12y8cAN1Ax-LZYI3!)3peh-fb7S8?x~gAK%4C- zqBi;T~kwS3pQdCAf^h8Sq-huJ__s;7N!pg4p2i@v?7G`>0cAMKM*>$*din z{^)JK$!GvEnI>yeso~9V;yBx0<@N%K@Oq_-s1&%H0;71GBYZc|Ih-D(LZD`8{yn$s*THpPb70EDf?hLG7B_&hMm~I`4Lr zL57=U>}25G7Y-c3=`fRMb%_H`S|r1XOhSo0RXH^c*o6a+=6QbE~;$vN!=KKeIo@Z>0fh?c|DO}|_)O4o^K`(+-b(U5@ z4BM%%Tf-Cb^BKr8Vgs_7=qXiOC9kI+!}5tHtacf8kxC95 z9H*O{nFte{N8vz2I;S8+x=-0_unCKcALdf_fY~O&Mw>f5kC)pIsOn6B zH6qVg_ikp8_>RnzwRHy{0x~rZ?#B+;;iw@M&iVg4=5TH?z367C%*vhH$oF2puEkjP z2B5}rX|vSV@qypn_re*(C{+(vkb!RV)M%PYqqc5pQGi{ut4mPpx$3m4d@Us#&>E!^ zGRU#AX_fyj+FXQ!Ap`6W3*WHJW!dYYgCTSX0iX@>mK4BUZjO$G@pl@#JtPCucI$^-j#@;oG zTVJp&Bo?q{D{tN3EI4NIcwq!6&(M%e1#v7US(7M;u)K-ViI6c#^M7>$|Bwn!#V4#v zzz;Nt^Dn#f(_mM)TNaQ>YxB{~Y%XJYL;Sye5H401u-REhZXGK(ALC*f>Os{h%CgHq zb?4=D*gK3%5&P@f)HFH>8Ck1dD&jHqyUMa+Ex5fei{-IleR+ONUR8Wrurs3|6AJzxfE**~x)KbGfqYZXwGz83^!PV=bdv!ooylbI>&b8xmQZ8J zdloex1rNxMssV%}Og9G0rNJR$MNnh!?5YQ)d_v9KzfV(|>}j`gSDn&iX+IiT4kFS^ zkjYkDc?-@&ew->e6o_wdO-Hc$dd~n6(0G*q615!|!e_;Th7#M4wXJ*hhUP+1l!=}%Uvs+y z`9N?H0zR6Fhor4W<@%)YV2YFrccyuA;`H*)swLYN2jyx}1C8&fxl7V8_2pTczpYc3 z?#f)>R0X8PU>gtiG0NH*W!l+}m^ELmCEB8L=9obpGP*j3B1Ql4@SWq)z9WQ<>cuNvK5VL(#F!>txY zNmDfgncVry=RId3-&r$Sq{xA-*(Enc3FOfx!0AwZ1J}_lw^n1Z;P#G~Lz9kR3&;k< zGJP1mCY0c4WMx4s-&!vVa;+*6~8dHUO!$$ zBy(yRkn|4-$2g`j;wn~5^Gg+CSxZCWIJ~;X|MYb$PBcJWuWB=zd11=GA55jx?UHF@ zVf!R;vHBJ1lg9^BdciF2GZR5i*0%^p*C_~BOoLXLipJ^M;Jk?{tl$Rho;Dy9eE33x zn7lO<3WW8ooy^kC_kwu?9Djt++<8Mr%cKSbn_s8UamO&J_cYt%q2Mkk!XZf@MklHlRg#*t6^EK2o>-_7u8;s;H7$ z3SCy)!W>v8;fm(}>!lP`6TRnnNT^T1EfPWTy8i($*-7l32uCaRvDvbc+&@l! z-Y+$+pExq&`mLDr9ZMz-u!uaV69PVvX!9^E`UR7tP4P;g+yG1Gek-^;o|&3)Fsv(P z+BXFGH@e}T8ql`I(mBQ^!4+B0uTejY8FwE1Z>GPO{V^n7B@7gXyii0_T>7(2sd~;gT@wkw#a{`BGfjzR@GEa~ z3{x~GDAOA{i7~9|^&wUaJ#lZ&#(kCbg=!!=@P#4(Te-o#5a2>A1b~_w7+*NXE*x(k zbARXxNyu^AX-x^VJ4&ax56h(EzEwl%Qd#EdEr&iiloU@o4_cADPprc6bg8?=bCaiG z=jqyd3j|D!PlLn~e!#xRi2Ug8 zVizNkcKd$sQyabHj^jRqQ{7o-kHAFUe*_nAhz9G1l!ME7S&hpvL-|bX|EOou@3WIQ z0fQ}N&4FLD+itNh`y_OBjLm0g+j||K4`pRSBERSr&c);GD+-&p|ScHVJ2dJg|>2oG{G9AT#<(8Y(BLBzPh<)f~- zFRw$L?vB!aGXYwC%TW5$Vr<9P=9{W{(ghN=>E^N24o|Bn0nb(gfM@4VkG|-xomx3Stk0PanGpCW<_4D5j3(9+g3w3W55e>hW(Yr%wcFoL3%+z*L%rf|R_ zQxzr@PdhlHe!<D@|% zpj|BpfZc5`$UlHK53cEdBWFM`2f@>;E2kfrHN&(v)Y3wwho{YW|G;VpS*%#&*$bsx zB&`9qI)4|*9`1L**oVtZM|6py^Hvhl5Zwa7LU!Q)j98qr_R55M>!}I)iGC^)3Ei-z z^cbr15MVA4;qqG$NZDOPl)P0dT#oS}-!`oOdA$TX&Gp)8WDvi*zctCFIeht)Z|qkG z8CFf%Djspx%e|iSSI5-HFX)!>o1IlS2w>`Twd3#Xz*_)R6;$IReNC<^Y-R?*zdwcQ zHIEiJZ2VB=+VF-W$OZx;AZtc7Y6qiUN`Wv>Mc0BY2f%!uJWl;dI#T2Wvw`1f)1a0= zb?IxueI^rmAzZtN&TXvWvw>ThlaQ)|v`vu*a>UCjntIDt4aqAo|45GEE4~j!)EdE| zC-p{k-7MJNdxf|5Mj_rmvO-e?E5;+1Zky+MH6;i>o2sk*3MRNjI%Ve*3pd~1(GndA zHQr8R5VR3UB$bk0=@u)OiJ9CabAl)Dcx|%Rx@__qrFK4}&20QU(o3zI8Ui`e*}Tv# zhy1XV@_xj|_r(9}YsWbVlwbwx-oQT8$6CW-AU)gfhdaJ>^E6pS#aIIO~MMn%knD z4Nkpj7Xvvd9e|Ivh<2du4MkFEp;f%MpPNc&>?kjydW?3=hQh!!tAOhxsNJ2ts7`Mu zHlHyCTf4UH0EHC;q_IcJa3#@dK7Jd_f8f5JrxEYxDHDR~x~D2dxHBCZGRC}V%}XdA zngR}i)_EO!sQOL~q!5*E(oUtTP#=S^v2nI_&yZF=rukjeen%6dkfz|Ba1!QPwDh!! zTI}Y>trDT1tGg_D(fN+#pcRu4A%@}SuE%>;3TI-*AjbuGY0(GPCaSox;RXuyQjC{S zNg#U>PF<{>Ar`@a6`zn;Hlj`syxsw zWL+4qXnJ)7e(#@A6jk~1T%&Z4EiKa_+$mI1Jt*|eeYmj}M?Ci}D929jQBXHU!8<_-U8X-wX|u8!}P0*kaP8F^b=y|MgbxBgFjp zoG>I$@wEZ=$iXHp21%d^pt1?wgP}CR`tYc$1)8?y(aMjB_jA%)C3%oR;^W_KY{o`C z4D7fbA5ewz%R6=5N8Y9f6HpGINiZ?9*=0g%&<*l2^j$)W2<(wtOp&evV6gCn&+*+;JS7ok^ll8|}i zzui$uU%@+j5r)Zpd^<*5o?mhzF~$xhny+-pRTvo8CLdhCt$WFLgZ%|1K|R3FRW=~+ zz6F}wjg*s8oAHN1zgmXZTWQk)@VcvTJ{$8sN^<(uGJB(`FP1#h<1nk01@h836&wOF zUt_YGka(gXXW}dh?x4Cp$;+l(^^3S#@=#ASZ-z3}D81>;=s_9GPCic2qP&yD*#nJ` zCfEs-wAoVFr)!sLmb%Pe#sLQUWeME+2p_pjwF>Q@9l%a1x;3ew^AbTzsSWTi|R)&y@!R6=z z=ve1)Qe40|tgdWY!6&@&GS6;OmOXvtLN^`%S?UsFm6`TV<0?i0LzVfiI=Hd~JbY;0 z2p_Pi-^De#kF^KC3QZW^?&w?3)f^I1I4N^)oDSQOiNZLJ&n?%+fO+rt0nRc#Zm6eR zcYq{@3w2;bp~-+0k3tF)&wvN<6$H4g=aq58UHcL1^Y{RmP+sGmG9np;;N9rKf3Lxl zmyZN~vSPX)+YduzIe~pJr#(o)Nk9xn0ugT!%=};5SK+G#F8h)Cpvca8P^Vz=T>o{K zSUMf^^Yxk)P~h1U=lZp07kVTJ4!8zci~#=~~GEW_<0z@M+R{Ks^(PP!t4&r4>zfxSH zv*b#seKj|*Nsu!U)l$Z`XPBF-uyl<@35n^ehaYb8I@BAn+ne}3fTS2lC|xtoW7mc+=jgPfl%r7X_vBNrMnI|z1${GKd!p$Z{;j~idSPmjw8|kaPZfRjWLj#I# z0(VvsM9|je!%nP3l%ZmMt~BZZ5$qh5 z++b?wyHg4t--)xmK~z^R?y1fdMl@@|&>fH`o@~TSW;K@(7!`$|bF^A|#^ZV}aA8!0 z)7Ko*cdsV1TGDn(BW(OfS!T7A8l?}5s7&DGX$2K8xlWwlYy=CqQ)AmK>ozxjXkX-z zPI~tB0K;Sg=&*t5IZvWk{Dv>M#&GS`|p6$T(J zww*OX;@+{upf~$!Fc6E0x27 z4H&kD_x3~3KrMbLKPHNJMq7)oPSN;5xKN6p(MRfx&&;sRPA*^&W!B1Q+L3r!=2IYb zAv)h4Tuz=*tI(x)54<>jkFWAWL5x4$-YN5N#n`e3QQI)FD5`Y?_V%J!zfT8Hjah=| zv25Lu_YUu{4`vW8D~88h<|HWUfd4^J@ZEIYmlapFWq1mER;Ccm7{BBg`SD9@W+MKb zLyCXC@h%*RNc$zfdChaM&HWz=gN6{^IPVgW=Q;Jm;0+;uX)P4m9d-+CYsgF`9G-6R zGPu}8YD)=mr&Mblbtm=1zm3M&2G2n@q<5gULGhTwYTWdQGP$Ay8Xe#}RY5@T#zDgt z3Q2#lzdmY4#IT<$Y)2zOq#q%9MQZ~l#jJI}9cZQ2Lbl3@b&OjPz$IWY1`nG^(bv*q zo-|pc0$4<3dUIHY`d|B`CHhRx*#d(^;;N_fa; zsQh^M%3#A@EbKziA@mFlZj{HkSZF%Ubx}o1qTVb}3I_RT>ibp+zu~YoSFbWO3Q4z} zwYFP$w)<3k`{DnQ<@_De1^~?3NuO|n4usa?=Qj~{qEgsJqsm=tI6wyA%3^zh_q=4sy;a-)h zeYl$XQEGi6!|dCC=>p}4btye52&`}|mHruCc~2Z{iDPrf!X?Fe2ZpC&Iu;Lq;H*{Z zlbm;R%|hMJ*)=Cez5f!?4Gjui^ZtrKsrn!^)exXwJ5uQ-90o0x2=}RI(D35Bun|-) zg9CJLQ=h|_D!A{g&p-NR6V9(fN0%Kj*nn?fmy!x`L(h{sZ5NF3@jKCxBSmh2XH5a~ zm~7TS`|c)hn_7k6zF>Af5AwW6=(~as=VE!;&QEOjY$L?At}?L1DVko2LCswyPp-f* zFYtsez6ffq!00gSm`-Y^nRqq*vycPz%3nHs98)ezQ6T9*u)<>~KGaT|v39-wcFDj~ z<;}E~Ht1Wes$e7Nf66_S*aah&To#%db$6w06r{*c4CPQ1Tlh16>j3r>S^-8c_*;`5 z^BCz+T%yMZS2*`hYpikc%I<&c_`xpLByHUS>H*@46SwNIHOyB}X9f^NO zP_1+3Ql2`C)J&f;kVtIQL*Rr5jV@?h^Gj%;E6b-Xmq>;IR5RHVdso-`Y1MSkWR9* z*kXAJ3WbYqolQK;g+)Hkn$E%y8bzUbH}zVrF#)9y!)We>FG|Sn%Nw@C4S;Uapv$F= z{W^Z(T6x=NgkB6rNl$JWt<*Px3_h}yF*xfa)ZGb?=_b)8Ttt9M1jGCZ(!t??%ZgAv z>1YP=%vKAW-Qv=xN&(zUAzc8+tL1*q{wSn6{nqlFCe{u6d`KsoWzJjn=y;1W7cDlK z=OqVaJg~@w4ofUKL~Zb0J@K9+sS*lI4`W}WQL3(tOca^mKcbxORb*k>+gv{G!otLf zxs+t7)xHkA7(2TW!dAlkINwJN6W)9_wxm_D$v0($b%vpV;0*Ea@`)_#7$7w}6r3X9 zXmUk-@gR|DlIev%6Sl+&?u`>ayOpyvcd7{Ek@ZSWOd z$LUqc zlws$Ir}FX}t(vWVp9jbI1_;rzy2=zEWu4MBU}GE!5N0rbC>8?2IHLAeOFf_A}rNendPQBC9WhN_ION- z2dl%)26{=j9Sl5Q{}>L%_uQ-h?>YU+x5AeKmhRpirSnS)kelL|M=(fUtiMavLn9p@2+A~d9Hb)7M}g>CN>CT);{z7gJr|K^b@4hCT@kG!egfF zRr0zf1JFU8EhW@^ofL#pM1z)*I2KZZ(+Xm@LEA{Y@oM*yYhxi^A1Yq)-Jebgt{^Nc zaIq0bdra%41gzNA7Fq|;au94AMC`!25)GsD_>HM~ozI9>`ecl9UpO|F4jf+oJ&luO zpD^6RyY`t;p+z~A*P7s~*EIYPEO7X!D)tBYwPs*Hr(9n4N&4LxiW%}Wh`SE`OO**yOAgx$hWu<)dE0&ylscKP+!>o&l)8gfyDgtqr5$4A5>FhFv{>AeH)=8jt2VnoNGF8WU zbm5_YGQX=4HX>>1oc*_R07gK$zYs+fEF`?W^DISjJZhw19+#i)`R~V)0;w4x@WI(h zy?N_3VE72nzhrD8cgp0KYHS)KdH_=Q`uoipF!EMrnBUuy<+rVcD!Uae@(BFdOJ_G} zhD%(9_wUTe{B6I;27~e+xoYxm#9-r!zp7sI%NIO!K0P|$(IP1zA(fs+V;Hx@3aIU{ zJ#exwX2?XYJde4>RKoaE?-RdYr^9zrwmxs*q${fzCtyOmvxQE|31A#&1at657+R); zjJreXtYdOh8#hDlECoc#$(20SvVBcDMqMgt_)j8%xGYKN8wvVZVC%?CB|?8g z>yR9 zF3=U}yB%Hph=3P8;sL)ee#7r*+Se$A4^vp=gejZGgJ3hb+JBJ7{6Ip$zSE_iT%LCu zJ5zi>mBwP3$*#>3RM@#^U9OUN6R7g9i#hbDS9yS}#kj&{=?u9A!w}GonBzPE(0vc+ z=MCLZ7cmyo#aWHIQ8gPk_gx}Wha!mun37er>0%FV)_6s5FJ?bhXQdGDPB=ZZ4r<54 z&$onFTmFW|D+Adkp`Q_qZ-!x7-Ct#;qs@{HDcGKFWY)LY-s;BA(1kniXT>EEobdUi z(_@@Ju;_f5Ot$fNZvyC?ux`|HhD~ta*&(aa3xsKef3KP}KathY&PYYXkLt`7%V-^ z+X$U=DOvfNK#fEm`)qcHv|Ix(==OaRIZ;VB!;EuJF{V!Wi>R4iA}8ku5mBaCCFs0s z9HZg5Rj{~lU|gjXM=UfS1jMnB^-rx-I$K>YD*trxFDl+vyLPHxFg#&DmEq22O8Iwr zPxYh9;d~9YTDn+x?n2VwjM)Vj8+;@~HIcqQT*;&kc&A?6JyPLIbbn)(hPpOKhcAdG zUB?%ozWVTUBbl`dPoAQiW?p)iU=Hu3X!nkw7%Oj5T&ws5yVaukG-+ea%jCuz zQXXT*0|y_bO_@7J$y!{f1?|Q}Ec9hu|IT2w7wPZ!L1GWoEvgB-VM}D)h1n+8(W57N zK9!)aRC#kXd@Y0J-ssn!N{5;8Eh|sd?{7fCK3bVG+1edWuPy&k7n8J9;DJ6cG6w%H zf8466n=k79gZCQ(6g(J>^<Wmd%>F>GOKfPU_m^UQ)WyHA-I?vAW9YpEEC9;Yw zp-KV0XXE_$wuPjioc^#^g5Q$DZ(iaGHPV{A2*GUe`D4?Gq`K2ou! zHM>@B7iDL5lo5B?CL;lMUX^tE;xisVxSl12-Vpp^>rpLpmt0g`;VJFj7#QRw&v5aKe znU+jE=BM0l8Fpm8uS7@NQb0m-CermwE-xl#q1idEY=nf#tumRgnquDFLszHK%&PX=XT@Txn$_~+xHtfdPcUL!9 z0AsZJU49feqFxT%nYH@h2BONJxb9AOF}9bp4Oo##$eBASMk|r;0B^80+qVyzb%Tgq zFdsIw+fJIm>h{A{)-@KosRHT}-pN}XpQC^C?&!%}Wm05HeMf{zbJ0UpijtJv!I#75xToV1&?Wi?bycB_e{nXv0T;{;4SPVM<~mNCO<&q8zY<+V3Bs@?h@&M*B9XVw zD5?-fY(ve*f#zLSG#c~|p97Y-tEP~~-8&_uHSEf3Yh%XpHS!R&9MBX;$D=U{eo zkh=n!xn8M3L$8mQ0Y*g#{59;?WEDzQ?ZcfK5$()Q z`O(AdY{gucATH!K9pJ}qduw%r8?So*@-+mv7`8LgZ9X>-zB*I)ix}a>Dw2mdPUMzN zHBJO$T7>(MFAj5tHBrjE1mi^Uk-_*n?S#(?{H&KDAq`N;vF17TVb3-z#t2`ioR6+B zQU&9XWCy-8Z@)hlJ+H4l7R?+P7Wcbh5+@aW2}BdneD-PT7?|p40?)Y}Nbt)|uktUw zdY3$gKs7VGreF+_8sI)ify_v<#E-y9v39=9|7syb(~0z9ipa(}=WC(|mmjbr_+!r6 z^9X|Bor^xVlz#Knix{)43gonX+6J^Rbx=kPj!{+(MYCtY`*P*^?hsa6zcku4frNtH zQtkRTg6mT*${0@%5;gXLS7Ox`_YL%uD1tTf`vNvDadL%Ss4$iOQgWM3H<{W|2li)8 zk&&V&)YhAAP&#;b+r5Rc15p;j!c}Jsb50Mmiu5^vZD#yfXSKrz?Kv80t$lX#l5$5{l2#OrSk&R?(e$FGGbh z*kY|ctok|)>S;;NNV%YE{bnzI-~|jh?}+#cAYuNBs-e6Kmu6yzQ;Q~APC!MX=z%B@ zQJH;J@`;&%k3$)ChI*U9i0yLkRzhF-AK+0I*>lWtq8Ik~sN5ydC>%Q*#82}Atc1-u z!`MKp%Pmxw)IYQZ^LR}#TN2HDLF?mv&Y|*6yMUNBIK!u6!QNJOGV!Sn0IK!sUnWYZ zt85{QxU)6gb(Yt8qivzBOZLR>X2#4BJ?6vO!F&k@X;m>B{i{W2c=Axn{mjE&tXMB( zYYIhJ)NqOS^d|Q!l9lAh8w zt#MrX$*!W}r{8*6(GXdyr=g!~E9!4K#6caAJ+1l>Jj03uJgs2~B+?KVwO zjaUIJys5t#@h~ih+E?lkDsZr5lNA7EA$Ay_k;vVU-u(j2C1HpwNsr2}iUgcPg_z3H z=$D`5@&G)4l0~^y+R!<5jKJ#0Pk5Q&FY+yU0dnNN+sAiW$3>IbRa%M@#ME9DFF07RrH=F2oLA%?p9!VnKLv@bSt`58H8(;Y4((&@b)z$Z-PB#bPhV#R&oP4G z3&l?N>uFVuIxXSpjSzZ<+%G^BlrT_NS^##W5?Z((6OmJbPU~Cv@L?*ZAaoaz)*#55 zpXg>4&K`+l=#@bA$#8fJdhm)zqOjj~{e5l>A~@O;XE$0~?V&U~hCzm!7e(Ki7CJ?e z8ICr{ltujw{~FPQRR}M5f$#Ft7+_HW&e{$a&xa=FG;a8Ylf27d3DJf9>{uKbeoX2K zOE~=sLuk~b6>;um$eYFn>VCcf+FS-$fZJnhu)ve=xB$i<8!WQ>LK~pu4)|mB$?^4V9P#BG3)89IL@-xD-Q?- z*>QS3&R}GuVw)V>)z+;$BX96yct!mjoqPGS5n~nsS-1y%c?hdq0RbShFLIrwXXT*1 z8&M6D1BZ1tWMJ!YD@iDFKOwUaVGS?U^O2j%i9|A%JmUyj^T`%~Xt317b*^8j+fU7b zT(onmT~DK~ww_fw7{pzYecX%7BDoIkMHhawL7Tn8KY?E*f&hp8qt~O*5z_X-%J_#3 zX(hc2cRP^CNU{ec73g;TnyD~sCJ?Gb`P-P7+RO|Zoc}b2L<2t&zSlut3d;G2OCs># z7tb{pYGFd>Mmlb~18AxKypt6F+4qcjzwYe$8oWmD+t;wi%^(?qg2SFgrkc&=iWi}Z zhy+fubmj`rsjk|gm;+!)jl>CcDM)7kJ0joT#6Ty?M71xkC?NVyL_V8O;qAxOgbx*lmB@G94GKvO zDCQT7aw}Oh_b1F}JC#MVB zzQ}8GiKsO&*rAJueet=-cdJM?%Go=m7h8RWp=7l}8zG9m68ws=jiL80*nWnZjq5i* z;xmhw4iXR;I}cVP4pMDu=l@LG5Y<%Dm5r;7+02UlZ$3KUl)exLa#>BH>LDjGyH{881AdF>#nl~RZ>=j$&TMIBo)@&zKzzBuTb zmMO>xke7D#5V<}7SRpoC((kQx+hJCA!c`A9UUJb_Un-ZRu2kr<=F2d0pu@PTBq^`b zCJ}BL^tXgAh!p37eR)K}4$@?3DIyol%qyG@97WIQwU+@0u5X=uW#Pi;~HGR2P z-s+vqrob3h07QaLuHK&WFy&$_zi921hY{hWw(2kM<>mV?UKH@r!LW2B;iVa791%5To5hvbAm{yLHkATpN4Gnrq_xEM7No)t(a_ zQu?_ra4;4jo_m)=va5f!3=)u_aQShy&D!hJwKc-nfNYlBa%_SyVT8| zFI%$ZdXzuD-DxHM`<8*fQ7C8Q0(|<(x28QhDpQz@3p(y-J63I`;kLVFG9-+n0&A9N za4>vle62Gqnty6ZhF`W)P|H2OTS1S@mAB+J+y?L61922T1Ij^j&l#gL!GVvECc?J2 zmZW7JItlT%Q|{>}v*8ma$WNE>z)RINWLhqzDMC}8uXJFjMLa`a)|RB{LU9s0(QTXy$3FCfHBwp7!Yy4-EPoj$pI4ts zy1eLAoY7ku7!I{>+t{ow?T5Pk16bD@=Vm~1$=xV`Oa3d$=7*fZhOuTcHt-9&g<~I^ z-&_Bq{-bRqglE+ZZc4l7P#xZs{@G)8EjgL{(&jumje-WtS8%HxmG>aj9o0Q0w|k>Y zbUY%BT9auGhKuR~bftyi`m~Ze+HfHq9RkF{0Z{onL1LL(q;L3_tCP!;c7evovhaq4 z_@{P*INmOf>8TB$r4~XnKkZuE-5ct`l~qfZgKMmXtAc2_4%)u4O(}YPM>b^OBl0u- zLKjitf$cDiM}LjoZ5i&TG6?q{RRL8YQx$6rqyBh%6G|_ieF#U)WLvxx+n*nv!|ktC z5PHIml9Na`lz48PN(6mJ=JHb{3?QI^HFS#`yIa zad{XWhWJZKIjw+WjV-P0^wJAQO*2jip>^LV*NmqRO*Y^t=YU1W+%R3%5|lf83nq-6 zn3PeO?=2Cia41i8tQ)+>)}(8f-))6?v|`h%Tz4tkH+Wp8DptmUSiM zk+D|Dr1Gu;sR*};ex9Ig@%e+x@5k+y20T%&QY2FMo#k^DyxIdpp*pC-RxtM@M)weH z2c+e~msDog>(nOtCxwo8ZNnbh3**Q7ZsPYz_Lz;L>9KcFT#og;ReXcW5x6uca3$@e zQ~{>yu<_RGr=t=ndzDgXQvU%r`n>-V76_QNOMCcf^%y?YQFHx7U+H++FuS=__c`|~ z@o1Ak?R9EH>aV`F^V{LXLn_}wc%dyB- z`_^EPe{0nAlpIr=pn5)L{V70QNnL+%-JQ8BbE*8wch&18*=Zy9wdH`lA8~gIu>pY2 z)kK`9t&a{|?XM_ud(WBg{aV)Vi%E$JfopcCErB_|T?XfdmoQy5bxusOxv$i3<6F1g zQnp*)k9GwOcvfG3fwBOEP_zMg5Ql^;v~ELgdWmpiD#q==%RGe`zwKmffY}M$TIv~2 z(nT4G=u4^~HIh4t7M!%2kR~u|!C!jHgq)C?fescqr3lR=znoGBAZ%&2uGrYOvyL+} zI-wsN=SLByo}>(=JKm=xQaj+usMuo3!8dDZp&-XP=9gUS^aFntPkcQSsK|xN91(By zF9A}MVHG4Xcjv)62ZG(_QWfObl*jK&$rF9+uo3({}kG=R999EJ3e zAu)o`J=hr-fdpS>NTseU&_tx=(f{(lP1w(hR3$yTS^((Vygk{T1HNUjaJbcNr*gcB zid1l*7c(Y%OeDM)Y*XWUc|-K=Psc=2))H(!|XY-WQ?FX>Ub-3zv}EQ z`Vt_+tWk133+7j`Ma`H`3q=pUW=!nQ(f9hDRpxPWW-2^@Aub=Nz$|*6beQ8#jZ_2$ z5vs?=?v4KVM)(5w^2oeyuZPX&XKfb}C^rSup8}~R%4VYazEe$k(AixPRE0;n1)k^8 zmsyUBJEkZkl+El2g5+nEq2@s^U4be4Wa`(bS|9vFjyM~+lPS3Xi|9wcvjLgP=6MRc zL;^w=%1oklzJ(BkoKg>k{a7|8{X1gG?5<&Z=t)igCoVmU`H9)vPuRL$XW!^v1? zcZ~{;G~3VSJiiXf3Lvp_AAVn?w?khC47Ww#jG54@a1s!GR*vO6f;ec$h|53tc4u+km*Cu*Fs%a3Okdf^6r8b3{;DMmKZR_6k zOUCUx-w|{D9#T3C8kN_4JNC5AYhy8_8u34j)o6(HR!uPOb_9?9kgo07;=D1$9+kkb za)%3UB$<#c*IeT`1uKnKH1IPTjRF1zzAoog>sPiW#)#?V-AdS>zb!fw56$x9v0F~%~{;r=I_%;c6Q)^>+hbA}j3s#3s5 zAZt-Ib7iMkLU5(~t2+RqgjdghD0mY4euxsaty2Dk79J^MG41|l)!>$>#8soW-b)i4U*HHq(U1pSQu05zhx4MBFCj3OCzfD8t=4RSq)jie{n({pTs1Mix!Lmn6qe8u8B$)m zTFWB6H3zE+aARI!zvUbLC58tNg9^qK{i{`;%ir0;Cw}YhLgonr3|o&1&dc(%MR!(z z7~1D?*{qFSm$_4pKO+|u^nNe>Lju;Zx*Oe__xIQ23RNYHyMNspcT;u!0%U4pjUH~5hDV4-=o?BR^Q|a6Hj%R#C&ssqy_Cmy-WrKkn`*}mOnHE% zI==KkC1Qb$dGu`&7g(>BI2^x=*OSc~Rcs_VvHhhNj7#F5Wkjp(K~9($V+rrUgAfZT zd4M3gQ;jQJrOyr6K)tz9tY1G}HiLvJTzaxSo^=pC$gZt!kpQw9B`@P%r>^^IU>E6xuo8;Z(%>&XOV*| zb)6Xi^P&=fb3XBvkts#rheFhYj}WP5qPL0JCuGd}6pGOTUFFPQ0TcApp+RB|0*82mGX6wpFLN@y}kQ)RTNsT+i0bg{6D!j)KPFOWUN0k z5CWHXBiW|BWnCX|hKAX5kr}XFeXK*RK>zXYEfZBc^54fn$+hBA^E*JUfRheAWZyAR z(Pe!R*HaS@LUOuqb#wt20g*kis~~;6wU-XA=<^$Yq4wiK=-yDALyB#bCS~#ANL~dc z^8m8wB;~o!kkr!eRwHsDf`cRH8ppjIQnmZaS1Wh7rT<`{i92aMhu|@v_JgHPe z9J$@WZ(?0OwHZ_NqvEv|(~SsWwp;Kj#|22i#QZsW)Ds~9ZA@vWD(`r9NW{ilvZL2$ z7nxA)+ZE5^=-0R?y`OZEZrx1btSF|t)f>DJ&7Ahn3`>PBSBOwzW^-YLKK~QnA43^x zhFfan#%zeCq=MP=%4eVj!Sseo7<0Gwx_b|JY1$!>>bZ%@AAJryOrG!esZs?qHDx7U^8U@~Yp~y}QL!amXdh^Qp8A{1cw$4>K83hGc;E{A&2iZ)8u& z6gXw4Gp*iNw(zOTN0`>Sjw2?m>6N3?{6cy;JA8O|X#HAUn| zrX3S?#{Qf)MI1!yJ6N9nEK#lzeK?e!6rdymGR!{xMV85CIv+FE`_BbiXO%8X)@ZYe z3fY`h)|jwIA`y5pTV1D`r-JLBX79m%S=};#y!v2cD=B`6l?DI3IPxWdpl|unZ&R%| z`wjoK_kwX`f+664fC1U&<9s*JDrR*8TbbntTqFLwhhgQ#0}j5T%mEzQb@0>U>%Xcm zIIm|uQoK7b!YxdMBF-cF(wC|)&RGo5ks6$BDng66xU}CT+=s@Qu#>FrMf0C^C#Ic2 zLnJRu0S^Ct0&zKjzw<_!l#REYPVu07W+$EfOA-w5Uz3zN6FzsmQ3oFhu)*^?3v=L5 z#3rO00pM09e@HB>Q6sX>uUB4zjZAIQi&3Rw{&<+8224CHrw?{cDz%e2m;P!4|;^G3Or zzB~R30{|y~nx7I}vnQp!@hhbj+Q+0^Er{5(M8A_0Y=8al0YhN_1h%Vyd2S6GpivV_ zp&;~j2^n0e)6|OttPIf-tlCpPz-Q@(uchK=MNCG+H_HV|g184h7sk7cStqm4Oq#y- z`U$`n_c48WkQR1Wr~q}GJ?{o9rKVY{SRk!N^fca9akf5zZY67$)Uz)y%SZVaZ|3Ut zyA5>)NlUq!WROT~jN9)&Ga>p%&HSrpO5HlUnqcbYwo23CH*=7NDhl>!Zvn-|b-Li)A||t)qrKx|^I|?O+64AT^6<(}5HXrjxth zxuY%j@AweXl1yIIv>!dMwJBYwxVI-LR=h?uUezI1(yo^Rs8z++O_@InN~m}G)F(b)7V!UDUfG`x zXLDyVw~g!~5p8!}%A{B;h5DjekcMq@Yydn}=pxnw35-B#_;V<(5*zP>11#%}^NT9H z0!7^OJMR|Bd>Cw0-p_&V9U>Ac4NqN-!}XTZF;c>&73O&@6ij^-!MyA^f668vUTF>d zR+=)qb>@K4=j5L9fPXuWo6K4^m+s|asCk|M4x%@e|A{xq*7sHt*XZA`=2g!Mg4G-) zrDSVT`>^w8&cZZZ!*)d7e>$wh$H2;io5j~)C7|gE>E31v!0*`DOZA)+CrF#CufwW4 z$Co)-0kujSET}L{lvxoefZqe9xcYF@>7clZ^2s>1t|4~A#C%rTu?S_}A|3SBux$^T ztEk|D-YyBggy6l;#dBq48ji87>v}=(a9OexGK2h$@DvpP2&FSo$>P65BrFmUGov9k zGXhW~+lt`;yq3)xkaKHvVQq@t)`tb3GYf}*$&d@XU0))Oo)f3ChaW*v7^W*ni=nCW zB~nyxwurVd_1@y z&vOLa`*?00Gh_P!9`s2+;{yH0(3X_)n7X%38oR6qnA>UxIa{E(>UMklXL#+t94)PS zdxslId*V2boqIj7&j{IW4p-J_@0`(^)4_x4>&^}p@AW=>7yHyxBQ?}@YMgnNzgu$l zltsAB<^t7h5;mSTF#7i}Y+0!OF`d(^ZhZ|Z(SH09pzf|#hXv(jm|E3y0kH%G8x&Ca z+ku!izWljFUq)@;8kqF6wtuF4Hqz4K76!t@Ttt~;8mROKb*cBEv!${VVP9bTFYoMpo?N-KH;z*vc zZ8>>WBE|&Xp1q9v>|Gws0}XyX&)V=N9^%?m)U$anapJVB@580dHPm)1!Ga^NH=Fq;oV|k;< z_{UiYj>$85gc0LR|D>SW760NW?0^>mL|+H+#M}v|D@I&SL*yIrBgXhe-~ZAaJajFy zxsP6VOb4+Ux=0@TQXC7i^gC}c1LXsky&BlIeSuCmRe1POAT!&beSaTT%m`7?f8#o2N<3~xJ{e&65L_JO1h&-Xtn%K z4Y3`rjoO~*h;dnG5V@SqfpjMmc;3kl4JLLqWn-Ly@;tDBdt58T^v^uW|I&4U6FedT zfRyM?>?rF|JIH5A>fP-&cC2e(Y~^27cz~+Cp*cx~hSby|*&8$ah$1l)-f!N5s5BJw z=`@oyYEnIc9}5K2`4U#UI8{A@DJt48g5Cqed)#zfeVA ztKF{YolpnY9DiLSGuj3hDx&Kp?Wh^;p;dz=kcDRafKZ~)ryJa(&b9jvo^?TW#)5EP z9p8R4mh;2(JkbMXWh*3spLjMWpB5QJ`w=P_`cFPAI?)elwA-4lx6*SkK%liXgf_1T z?6zESZJ>4ckrZ2&CY0%@wnv5BY?+Xj{h&Ij2+`-EMJ`^BR8-X9_p~!V^OPYda5=3`R8z1x*YQIE?Uefy+@F4xXg(9Ds$}_X@JO&lUX2E!ht!b3eZ``II_W3IOKZB! z!1{&IA<=q73dtbo0@Y1&A0PKJLIo?UZk(5)S!d%VDW!<10LcC0a%I=ES)`Y)jfk#oGUi=f0OQS00>M@`${zqMHhw@fl z-N58Oc4b!Q|F>%=M(sWkhw{I4-5cUV+#ooqDbo)f+8ggYhDGBjcSe9XYxb4s3ATPJ zeA|4raYkt(EwXBIWEp4!_y=?4sSrD@8JQZJ%e^HJ(N+|77myoA!F&?EsYm`v5N6bq z-i0gY2V2H6gKu}s*uc^9Rc2g^Gp5zaT1U@{K1^)0z-MYaVLJi<)=?sxIa;1a2Au?p z3T7SQ&kP3{`_<+g8?n7?fuR+x@3zFVBWoh0cesiEg20Wz8bq11LOSOzMYcj%9ADY? z8{W$zGJc!olY;!={>a_4t1tye&M~gYP@#~>x#FM9*%}l{r`?TRTo4&(Pza7N-g^^&_c1(g)#WWA2;of#@ekl@dV_ z*7~hfcFqZT%|R86=IM#8%Y)-=m;a|FO^IDt7Ah2AB21 zq3pZo!12oQT>O~9i*tGt*|j63NR-U4Pj`J zSZdsP+IN-*ovIN)yhdKC7yR@rCCL+kAPB9?jlTX1K0K|Fc(Xpkz{at)(j#1OjOhYG_`USO9&eVxUZl1STpUl5~q=2YGVRu4;Uu_%!>=por<+)FCpzG z_tB;8#~i2)_WI5*Y?r_J8Mh+w(+ksnd8uswvXfBXEd$nS3t$*gy=Z(K2g<*iOFTD9 zDTvCuz*H_pXy8t@cWksV_XfDFOJjP{&|{G-RB=l`^W8RJUT=~=&cPM^=fk-Kn$bw< z@r`rD-lpPDuS_G~=^Tl47Kpe%#x#0AMll?l}Tt{cAs94%n*|%tJg+r(0;g!OjlHXX~uS( zBo6$5k$dBPwQ7EWVmV_%aPy-9)`BPX?4##r0Cp18cY%PlN6|3`wP_%@nqXy`#F!xx zq+6W6lkiR~QKpPYUFjfC*E^hP(Lp(P9}gUI$=??hZ@5euAC-%}bf8v`Kj(SOK4OCGNi zRkw8e?>wMYCM2_Q7d=`-8ic_zqNjj_cf;QX`#_cSmL0qludCPVzGZ9dOSVe?tK}g} zDt=c^rSK=)0QkcBtH3t$e1=8AI7~)UszNw}%Z$&-H1pE+7BtJPLAv9a&Ey$BM*MwE zi+A@4kR7)ed&Z_+vsXv`gQ+v{d|^vr&u%s2MCEWuj>c5{+5=p=FNaeZWL1;gE=%C)oSPVycY0B7;$F(HxZFaq>a&QJ6{8=D z`dI{(R(^2g1>xJ)^44&H@CW3ZApi4R98Xl!JBE)rn}f_eNtlp z!ESeG)-N@8$L+9M-S}Bq7>B)YgfJ4uCn>hGnRA30yVJ$$=|yCQflXcj0DmmoM_!_%+$Y}mA2 zKhiw&=44f*_ZBhyM%k#aGfJd!p%|Ga$!}{Q;_v`oOTQWEB(cF(VQCp*Jf(xBXYOvI z3S!j0PF^HkX|0wT{H*Bjr5 zi?GgZ=JpWrb%5;FLAPm`fP|FGvuso87DX>h*4HJPtJ}o87E;3(b9QF;TP}{VY;^Bl zVY=5Wlck9QJ0OrD61S3#eOqfLDT58?`c+n_O0k3STT6`CqzmDniE0h;Z$l3n3MoO; zZ6P#i{+QP7|1If`Utq_5yaA9XF*Kg9KG=9{B|NOmNAdP*0PMuy|JCVnHo35W9>pWO zAZPvr!4v3?2GwA1O{hH0(o#y97!6P9ZyHkg9tY_VOo1%^x6*_HE=j>5fEsJlzifEc zSZP%SkYwnAV3L|Y>s+%V^zni&`0HlO2tQ>vWOrV&SbpefE+*R{DAa^AoZ8($X8PKV zx;|a@@GMa3R?QDmJV9;uD7-@Ol@^aYWlSJCi^J}4X$dzx+XvOF=iECQxkR3ITyarNd>a2S@O7u{gJBX{*K|v8oqS- zke(Uwd4~FyM>ZfjKVu$3IlNF(n1OGwZUPcM$`_k$Dw~h-pdufahTz4}L*k|nXC@b@ z@1xyA`W7Z%my4E$-WnzgUz9y`nz?$5A$JQqAySc3nbt?_cJj@)O@dHM8`$P*BfkDy zUU@EcXR~KP;e&{ExtzF9*hSJ*@a{&mEjOhfSUsqZ%s^f^Yvc>#zO(j}lL$ZR5S)5o1a=OR$F*dmTV)rG}s+~=|dUU*Q`Y6303HKY~Ip5jafqiHlQHY zi{SU|h}|}!RL6}3aLul9nU z0J8?p*Sl@co6~SaO?&cvEipZKepQhMbmbrW*+q17;00fAn}UOstYpl#zd~Kr8n6Vf zZ2avGNV4f~vpBjG0FC{QXx+e$Qve6zae;r?k65NxzftS!gLP{~;P(V=RmGVXwdXHL z7uT+Qg!kO<@HNk10o?{WfT{Dr4HhI%9CI^CxsoE7-TQ@ORn79(VwfhD`&0(`XtRps z!q^Af6n#TJ!8@{Y_3EFK0g6J;JWlrij3D~as)eT1*r|~}&SgmsRjw_aA@EL5ROTrA zINF0Bb+M*N#`YGqi+9S6qJ^r~IL%dREjByPmLNa_H^yhOJn9r&&FBr!%8L!vltAhiZ13vY z{EGxyx=u2k4QmyEODVJIhx}F-NgtMFel!j2;>nya?E4s3N~3-0RSL5{h;xBz>jw~O z5RajV6Qp%nwk@IxTPhyYx${ig)qJ)JroW@&Q#bNeef9Ln$1GB`T#PTUsm{H$a72Eu-4kvh zBu*y-+D`6FfLJ9VVIw!+FutJq{)`EXF(B<;kA4hTNpF_cjBGhaZvyu55P2>XMmwl6 zVMZ);=;;`Vn|MHipJnyJHP;BvWKFp{QgEijU;|I#r!b#N=uVArA(Xa?;8tdRP- z=b8oO)khLs#^eOjV_zn_BNxN_?2UW5`Tl}>m-WC3+!QD<9IWnh(T%d%!=m$|6ZZJX z!o-Tg8lzdyUwid2d0!+<6MZ*=gz<&r&YHl-gyEi;Kl$6$52cj&Cab}D6d-ACtv8>@F!zZyi7`n4JV3+0ZR!Tv z9pEnS#Yoq2LX|MWdWYlM2$p6E@S>XuefS@wF0V`wo|OS?C0H${RCjZH-LM(q(0g78(bnpb@a$N9y$RSLuzT!ZlIHq*6mREJ~MG%0P^^;$qy%d91O;7eU? zq<(bFo${Z<_tHV!5`HxDjHl^faq)kUeOH(jlUk$0!X29mIK z=m%SZGd5>v4^kR~N!KM*ClRNdZE9|&j8pFk>fXHEd?&u5&tMQ#RwcoX^vm|xv<4V( zm}Z}^ImchN>q1}4>kzg&3nmss?C3LKwGwL&)q2&RcgLy1-53*F>yTddbF}(YaCE%; z13~{)cJFsIdKB;_wyE(bx=ZylGP!7^D4$zYq^NxJu0L`cFa=mHC3U7QosHM(D^9sZ zGR4;B$~p4T#n^1RThiKCVnoD6^Q}jRDeck7XVA1-siP>NJxCemGSqBTI2SUt`RHc$ zIh)8#>5q>g*KsGcXDD5tW|onVXsN&J&OmZ61KdNI9OXReAfi&#@Qw1l1%3lLJtp3{MtwL zimEiD!umE?9e%*YhoBnnv~0HMzu~!I5em?Ot1%d={z#10Y{&I2201djuUA;7C!07x zASwFx`*#7sjdk7aejgvnPE3>hWsCkMvnM|69!-wl@mtD-d%uPR&xsnv(sYI8uucYGToxT^+B+-W`g%#p+K%;R< zEYMp?Vdayk8dZv9r24}y=lEAe>l#o%P4LDA{T;xR_3wh*AVZlo@zg(gAR5&i?jjH4 zUro2x_VNvFk-c*p=v`KdoU$fq(6JL0)r>acZ}tniua>)K#PBE)=8SV*ti&ePeU6D3yq*Z0p=AB!1b z@-ElQQvu_tNz7sAW!&YQiK_h4AY-y0I?Qh$%0>oNG8aZ`K}42TPLhhhkah#Y7M{=V zYNo!n#-A_GtemdKjuNz*)L>1+*&3Y-Up3+U=rR-M0az*}SI2Y{UW?G9{F2c_h^M2a zfgsyLI?)qDp;`TJT(4S#t8))9j;DzOkVti-pv^xM3KFAta7{dMBJg}yF&1LKuiVw| zn9DL6!_uYHE&`3Ap1Z&v!E`|e`#lAosdzTemV(481OKQhD5cst&dv7dKDf?bTW1Db zJ6Te4sa>EMMuxzO4i28Z2&KXs7qszZG@7Y%g$5{Q4R7ul6$Inux*8;O6&;-7|Gn zoV{#)65MFe>96OA_PL{tf>d*asR zi&tOYaPvBpWurUFXX&@eGKRmE3Z1v+0|T|97g=&x%fzGYR|!S}U=~S3>S3=O? zS-JAX52iP8?s01{-X89MMWECj1ISrD5Ub08;6!XP9iaAvHunX^gO{sbVCZg@J8SR6 znTs3f(uB{UEiDuVxJstYRuR&K7^3BDg)RZ3kv!;IwQ-sgv0Seo zFv@(~+eq2GSpQxr7JG+g0}X@spThvQ@MW_7+64tJCzGUkb8p#aL5w( z*Gcg8wj=yRZmC!6(FdM6?vQ?VzwuB(gwTD@Z73iJRD6?q#YC>M_!}fGd!K`U-Fp@WwY0&dIo$v zK^9OE1vh=&Nf=q7j%D>jJUf!9*ZXw8^C4ihLhHP^>Y+NxTz+#W0{fwV|Ge#E$%TZ1 zurH_!wX=f3s<52T&0Uy~4SM|EJ&sQI_S&$1BRH8LDpK-~UOzyRyI5En9WW-|g^&3R`^ zlqOl#N09hSJiJY<$C)MA7I!^y5<*BrLjQ=!$rXe*W~NOX+SnejXFDzQ=CG#I|IGsl zjeKbc9)$=zCI~+_IBfU3`k@C67*e3e$bYoEo!AS|SqJ4B zX{XmlmG8^M7fBobTf%N~#ziqn6{~0<>yY-Y-;y(L_uEO0X_hq8eRZLmAaO`Tqx7z| zD%-F5Q~m>gaMj>HMM|%tbR7_Yq<5q?W>29}l}Cea^ZrMGT+?_Wp0ocMD8mE5&$NqK&enocDif{a#Kux>H0=GuR={}#1Wmyfb;E=(F#)e3E=!EGX^-`#B zD;{o0C}3G5p<8d3pVG%d?uw5OGpluK_9?axxfFuBM{QJ6cJx|_ex?BwQ!tlByD$`eX%bc+S^1Lj!$OEkQ z2vwyI!@Kwn}Ts+BHJ8f^`We$THcbIwFW^T>(no7(nW@v;N=UFWZFB*&| zr1@*|qc`ydfQIKbGn;YG`egv@GuWq5Amvd_2kfcC2any$1`4sFuRj?PPPLj&E?rrN z>P>>Hh`(?+wemIwp%$QK1C+1@?XsnN*Z<2P-Y}a4Dcqj$#xmy{_ngq^-^=^lAJ3p; zp(T+wxA7F!hMP>Avf_|u(79ZkLN*6f8BKdZ3a*FWwtGG_W+}x_i7lB z&&bcR1gD!Z>+wduCk^0p&=3xcEIxAfRYn%A#wuSx>ZqEY_qLP$MdnYOS}cb{$ew38JGEZg}FziS3uv>9-_tYd@-Btsd$lp` zhj{WnvbRT8(Rb2P^J;FT#KYU^AO<{WAl6dWjRN7mYIVP#{$vrUM*V>hUD%5pNB~8KVUKqqeKirtMTb)edN^l4qXsYD$?`poy zh!TX7Cx`&@m;Or4EY1YuvGWr-akvIP+v$(8W3&e0bQ-^c;pg?9}L9`!qe0+;R)bFVrlgLMFl+nzd>ew1I0zz-e9-n%P`oRc(Is*=HXfI&i}S1&SPZMC-Cs2F#nT zFhsrnZpcG9H1E2wyu+OOdZaYlQN`GjB~Avfj>qaC*JsrcKo^vtD^bd!lIZx#Sf#86 z}^O#=|<^> zk#xZ3p1a*eCNK%v8?gsPDg$^}bc%bz1+PXspQ?t8$g+(wh8|gnm`)-jH%o(#lFT`_ z5c8b)GrIKb)$G^mmMs*gO2Cz~7%BS33kYf!v+6YDWr*Vp%_O+8rLdgE2Q6xMGE@T$ z-TDvUT9ZGXKo<|ossXM)YXCxX}` z2(fx`*M^4I0@^(7EGrxXKChJWvev>m0>&^`P(rj%K6L#|=sr!@1c4idn$6~3A?OdU zlkkS^ndm$VwDOjk;gM{$lNGyE!xXenklhY7yoDqRD$0sS1mIlZnkOEqJtlD}f_K`$ zNCVID(7ft_3h*t5Vwy7qhpnkt#>LaD* zNVdI^liZ+JFAW!NML1Fd9;wte3)XLZ;*!C-V_J5eSb1W<^#H$dJSD6^MmAZ-k`6R#RFte14^Xz2{jSVIqeg2}a8go4m*{8e* zGrRU6suz&Xjya^uVSFLBq)~Xv3hrp`*ryT5c+B(3fudPWS`^v|}IUdkO5?Nimr{uPWyN7n}&Q!A4R{ky+6;vz$( zl<+^8>5H7XxN5mLA_6b?EwtJB4s0vYMp%FNXR`i!(BSj{ISC-~?_)Czrm8yW23RAr z7F0-x{|c%b7HS3>Dwlj2Y>`z1kXKE<9b~)__K5inFV#6UjRF^7LgLa<==a5z_tHsU zykrZp63IZL5hdqu;yuK5NPGZbhAw7U1m#ejDHP&AC2;O0UJoG1DpH|T%D%efjvM0A zm5@U{viya!MGm7$slfL$SX4JFM=PudAG7;~;C@rqogn;tCMGtR`wdf_mNJOO~lHB{!0w8|sE^2}NJZhxW{iM7ctTHWX9E@p2& zXOI#Ig0Q!x3^NmZq_AUPPKP)FPxv)WxP;4iL$bPYY!9me%imS{Ys6+m!wXGc?{Mf$ zR$f#k_xb2`3AKZRb@Ycw8y!}7gFEXGcht=rB@jZlo{k^NvMED+8I>ZZ1kVLBflhWt z*Li0@N85hn0X}^2eH0C0&=^H`J2#D2gRQZ&rAt>o9~yi0M*7BWit6a+4&9Ef56h&> zOBq`0n0OGiZ)+d^8qS+2r#dJ$1q8K`LWbC$4esqvvI&O?1rwD4l-C~Fi0qwYrXmMX zGA*0Wj7WJ@);7{_V)J+VXc^P#ZNFkvBvol>}AY%I5Qnn>TeNdd-E9(Xx)2JrUe|bE)TUdTY^mPjHaPt4o z>=r~pq6vm7%T5y)7KiF0tnJin=UMPY0SG@0ReJ{Cp)bfI)Ml>SYLE?Jq%pEP&K=M( zV;<`5R8b+b+a95u^-~_tt`hElcJ_T@Odg{;TCi@(NC_UYEfRp9CaJI!hiI`Q>g~e( zoO56td*0WyI0tqw_$Jp&t&Bu`A+r$KFLTQ)_CQnccoVRaUoSGOy`{h!5WuooOSK*4 z$v4Z>l)&Mmi1DY8Vs5`J#;W^Xu!ldj;hzl|cI0&Pp@QFt1ebob&O+cDx*H{(j)`}Ke|#hVO$~(dvT|&s z7DVb_ri`xm!%t`5FIF^-^5IJ81Q9MRJ(S|6Vd%>xBiTUEu1#obMHQz_WlCcsR z6<1Al>zF`ui#yvCm4^D|HM!TacK2WvDXmrgP(V0X2cq&Y_%$?`iCcRARSZ|xob$3W z$fPxLXzzPbayZCoCrtN>`?rdtJjn8;*EIiARI?^pKFXXeSH(SiI6UUY)2U`Cx zkHLsTYvyl2)7pmHg5_DKgjs}N>(!cC6+z{_+(2q=1r!uf&dx@MrC^j_pB4&N#wV?= zh!C%AucWc{{iqkWSd%DF-ZpVtxm7*Uz~ML9eT%7tTZ!%JFaHAMp8`-Q*y!NPR32SC zCdQ3dh_pV3GhT$FGuAGNrt3rcs3I3lIY&4t+=Fhq=;e9mxSN5cX;qnB4QSv_B`0MA zbJO4H2DQC&$f2%YUpU5-Qkh@uQDDds?rE{mrb7r}$UK>MdbBMCV*CQ7>@@?%9O`Pk z^|J|le?@!bFR;O8^~jvcWL~)-U{MOr?ROBu!Hr+WRp4Tu#!io#B~G#Vo&CG=o4nuN z_Y@4=abSt4kaZBJgM1d(qAJZWn)<^2azQ~waUN}O{V?frFQ-MFk5qT@&KXfyhKP;E zrYtQN&4@G(RB;LTTeoj~1_PxaICV1wKk|VIOYq+Foq2DE1`}g(IDu)y-8Bp6m3k&a z)N>a1l_x#mEBeLu3jgm#I{(~?UA}BjFX-p6x5d+1JLBcvu?)^>#D4M-0^J4TZcWbzVBuVpycFs{QwI6FksFV zbRKlBVPL+C03Eh0sT%Nz9R(sZM*WwzQ>r%%g49`*fj-Ls6Mn%z{ z(X!&}c&mjUzp{SE;!__($t*0>r$_tfKBChj{0u1=J?!&*cI=ti8_)RNAkZ10hiEh# zV^VQ&fC|2(n)Nz$(at)mqYMxnmsz&_FR5|_#GQ@d5N<*a>oXdk!bB6C%q*n2RMVC| zuodHY?uz4ND?7^B=KzXg`9WXf$?kOQ7St_~;5R$wOSu^pPVPfu{!$9ThZ!RYy#oSv zz{;aLz#8+Qkl+HSxM)})p={l!m!SpAQ|o!qYkj!+|4un5?RNjF=mqGLrFs@cRg7dj zC@2OOYD9jJlUw5>H}6+Qs7h?hFgb&r<4JmkYdRTpQi_HvH%xRTx!Gd<&ALlPSB99p z$F)kBq>hEa7@b_@!>CfXK{*=6g?Sl#U1UKu8*n{ua{2jYQwRl0jwcOmH~3o7o?!7ymdq-$yKa~6tEe#L zU38T{kaSea`7tLf?dq5IF3fu-@XZT8UmO+UH%b-@TvTEK;(_;ImdgeMFK#aMr|}u| z`FC~Yv_V;q9~%m`Tp1>(T&$1oHYIfu9Tl1J%#fctOYGt_4><5OvidQS>J^nV==dez z8Uxl@g}yk|2#yprlMp*JQD-j&V4O^|k`OTZHo3!vc`Qhmm%%hm84W+Iy{CD^F4xNO z!HNr&l%&@l{~pfr$8^_5dzR=oI3KH{A^$z#{V-ACmzrG@dblCK@*s-ADZd}x53tB8rpztSU4sDUtnC4ND@4l1?6nq)Bor_wbz{dWmGm1UAeC=6 z-t6`3&00L&kQI)s{ne)}GY{Ww;ThV9dg$|m&|toVCRpM7Y`dQ!mUn0Z)aLM2g`Ulf z{D5Q0V&dIfd&RW=6B0v!`}g0~ zAUHzT$z*n>V~)d!I%$tCU|VpR0H`mlg0KW4j5yzM(_JG%To9d|2%|qaA88fl{^OGl z*89%&UJUu4`OZppNw0fpGfElos1<#Mw&nN}9MCWV#KZ>orlcOiBqn|RpKjX;JC|JS z#k0Cs##~>*y0Vxaj-FmNp{H?8d${9RHf2utQ5zP1v&rTN3yt}=Nl19wWb-iPMFJ7N z6OBr39K2GH;0Dt;jgBbAAQl-W)0dW$2Y~)PK!bSIRQtDQ4zFh7k+Ul0x$D)zwQHS9 zku2FX21*?JN%H@N|89;&0L)q-ca@fq<6*mP_Xsg!)*wKYhf4qui#Bt%~}6%=GB?X#3NDoj%c!RV%d7GSX!s&ZZGHPKcko%ZU5loj(47D~$n_XE_oTK- zvaZP1DyeBE8`l5udq`}RV90ec9GqKtV(ulP8Q1}#cg0svj0X|mJF|Z38|(%jGLj55 z3B6X5Jh27^?P|naV4i2s1BG#rT0=kCd^B^L(Y%z`3D|$rvUZ}Z&WGOMHvbvz%~Hq- znJT}!xh!YkbniZ+S#BBzY1oogp8m}p$YnBYxqpm>D(#;p%7X*^u>sc}{hzCD&v0E; zw~(93m*`zwy9Wq2-NbIg3iStdOb%rpS3ty6U(6m@AR0I}E|-)5w75H7JroCsfE(Ha zdD}ApVdrg44`wXLy-AN4M|Vqca$Fr0V{^#_-*oN$+03 zFe7BeVrE+1=|+2|H@KM0tXV`|*t5#zt9v$4!PAf50Wf&AZqHlpVdpdNpZL6^06t5O z`sdo^q_;qRGomdiuZyg?7xlF}Z4Ou{r{p*LbpCknxy#?>ZV+k&?``;lDksvg;G`Ei zZ?!}X0yw`)X$=WO?!nOMVj*+($~&Tm$FDxY={R^Vw$={e9WwkfLvWrL^Uy~*#8*+6 zK8+MKzu+MkCZ8SQh}%TZZc2RGOj%wQRlCRjhJxhf%DSXpyNzY)I`1dttc*+J;iW%k zbPXVaS+~=kJ9`t}I#b6sTaa#MuHHMr%~1li8{z9EKasC1MGYeXx|wBR@y}RX(I?*d zC;j7%Y$woq%Dc5qoxEAmz8POgPDDa~d7|7hS8!U!CNtMbJ~2?KZV+}yn)=lDRkO2* zMJ>Xw{e}Axn5XE9>2*B}cwr`R>&;L67B$o@CA;0(56exoXd>nvGC70!H)dx*v)bp< zcdJ*qQ;12~1YS%o2{AehpHU5Om@+d*dxSU#`XcMK8LaxW#m-BqTB!F?E`g)w!Gr!u zqMi=B5$-}0zrJTB<~LEwCR?x=e}o)+(UsWfUGC;Gewd!eY+#e>rw@Ive2&Z1`%UW{ za+qv^%7W0LG;wom7mi&o8}rUIqzZwo9K%hG$hqdNMb=gSYyf1b2; zuIbmDlCWtEKzgwwh&~;GiP!U2>N~gmvbgLC0+;|KP+BRZG;zK>6gT0D5EiuFL2~yR zG);3g;S6GLic5iefw>R6nIQ-cg^`2t0yQ~eUeg_Y!`rhTV zv`yaQ;So=4je4XfZR;GmUkDjuvTpvGg2$N^+mh@Kq1ZqmyEG|jQjIhJk7Isg90|7} zsGhBUb5z9|=?9oK4ZdAEvMidAg_A`$7;}gFu*1k6VM!ONZbKK9+U zI0S(5EQF<1{nsc8?crrVw0C2Qq_&M4NGd@%$C z1f5>=d!dux9^BGz;s#Rb7e(B`0K5Vh#HM=bO$DIld1q=|Z4gY(P7^HZ?n(=1ob~Bz z@DKtgkHM-HlBsYwk}{^keX*xS%cEzlVzo0y<}SPHT6N3G7U+$4F8{F=*}~KFv5!?z zTm{&Id@I#+V||Gv-4NK?x&y$>cVmtsZNzHQNl8%LSkRSqU$2{%hMbb7f*sj5z~n^| zH&4Q~O~jPu)Y}1{19(9owV*C{L4Hf;`xw?4V7yCYpCBw%2#(=I#qFb3S$Ujw@d0%SL`+w9Zguq`8F0@vj6{Nb zS7vCQ$bA)|yxKF$>C$b&Q$a345o4&ESym!&REy$dxgclEH^%qVpYCTjU_SqlY4 zS*qw7ViVV>4hxT3(mmr6r9~|}HXRVuvo#Q)S6zHt1{2*l7TL9xl4T}$b*DC!(Ouvl zSM1Z|J}D-BzP9AU0x}GK_$%can!Ay}SsU>0`ALL~&)}(U{OIMM|36Uw2L!%AeRM6zwczdN&H@VDV41QX)qO zTlw7qb24b7GI?UG&5C*%sRE1oL5G_b-E8aK$0}}ilN9?@U^OS?c&>2F4FdhpvpCG> z|Eo`7)Sco8VF&4wZ#%#jQ2zjsI|pK@1k<(*kJJ=yMeLTLxcd2F?6^VFVg!x;kwnj) zZ{*L7QU-`ipgDbB9z8KlIrkTV@<~7X;TT9CGGS=$07b-~OKoe55!OOd4{_mHhqig% zjg=}Y&{bQfK_l8FA3+nj&k@hL-VNzzYQ@Q-Z#{5EkS=fzz8?t)lR~C)tqrkZP+c)x$^HuvF4V_;Ag&$$doU%lNT@%eMkFFf93?{63`##<$9q>U zRY*ADWaEU-m{UoG&nAPp=zOxiB4cg^;AS4#RL*3p%!k=@J5PEq+hlbQbwOw6A;l%=-EVRknx~${6&4V=)~(e|ZkLLT@42I*bE$hSajl)iDdCYrh?hVx2e4nY_NNY;FK7 zl}7ue6)>QJCnsTL?#f54Y>6Ja!xtepF=zTeQq0cOm;duLU@#XBRf*kXN5LEAb}aC` zsw*}%Z_)rdi7I?6C7TrH7kBatlipfyq@1X-=Aa(EiQr>9T6Vgk<0oCfxrN?qlMKS! zw!tH-p9kLMFyowie8(xXdX|y0lCc%ELBj5(ggL$W$@Wl$2{U zfs0}~&>w1Ed6KZZ=M@HqtI%J=IK>#FZZ$)tReS5{ydpej__PYpiFz+2?u&!r9M2nR zMI{Zh^hieT&w50NYIG3R_V8;Y`v5y@M8zTj6j)u5m0~wY{aR=H>31nsy$IDR=hWSS zjjT~Vc)O<_9SZ?~w^RoOP`0TOX!N>Q?}8Or0Y760Fufk% zgCJGDcS-=a2E3HytY?V?y~}A#45u>o8hicCqIUTX%1Qb7HKZo!rS4j@kGZBf>0;fb zo5>kh@P3_%-KAnzi`JJn*`4-ni|HBxJ0)Qkuouvc*o0G))1)oXV)qlm1qE%ndYM2B z0QM(l+2NuKn1a*K!UeQA<0XjrW*Bc;B4TYk96XJ5#9lH{8o>uAkesDrGP?UR&BluQ zD+HW=rzil?*2n)36Jm6V3=3NkzIT|pEL4a)%qdgj3kRgA93mBFYNy7n;qxypAZ1ko z6il;WWxT#5(ZmVnC6}9$E{+!DvGV!#=G}wft<1RyM((Hh-U3R72E9at z%dVcLdM(u_(1}$@BV6)zFS>hSm+Xbja@y28M zv6jBa|h82k2w%6odRa6BGXl8LO z9Kij6Rl^87a7biA*h|j7oM6lg<4#NX_Kt9Sm@8ggO?o!u60Wf;bW?ARBpD}GHv>+} zd-ePtJccWh++{ik;*uG9j4P7iuHESU-!zuJTP-`Z9hZoT)rb^>PwUe%A>KxEVeq~^ z438j!8TexO0;s7(eUf6X1bmyUSsk)EAC{E~N26BtUFAqsPvU)?(DTWd%JPr})#)uI z7=(mz;|ep~ZA{NKq@D?#ScqCCz7?pzjL=wBnPbS-BebDd-Cp6S>kkvjrHFP*+3?CU zOt(CRa?F*VOnj@j1VJZeLBrO1Acz0Sf;=1pZw?JZjr5jbIB!>k4dI66CWcVl?u(}8f zD~q}TR62H9&ibKys)1pRlaYXuI`RyA|9Rs(fbCdP|JA2rx$k$myq52;?;Nj*IsEQ% zzD56^Ms%z(%iy;coA~E#a<4;-j|2;Inq2pkMNlp&EMT*vLBV}=dgX{=@JTVJ)W;(W zz1o^oyMJJ9aO#@$#6Wpe=pEE;3-Fu0u2c#@OUM^W*><)Wkd< zf5&EZ)K0ZvOTeqLPV6D1ixMVR)JEldC>OmG1ftXP`;2J;Y5Hi1_>;4H*8Tmve=4(T zpya0Nl$l(;tXxPCp3;;2Pj^!C=#rVTF=n2%@74Qz<^~DP1tI{Fbl5V5VtTxJSLW)c z&tx}^wB4Q#*80ddj1Yy(m1ZngU<5t(4irpQ^O)r}rR`NbFcq*8ZM3!{uY^8oC;xX+ zYlG5S9-Kyc=`h5bZ|FUl?I<1zp{Y$J7f;%Mj!+I1Gt?$xBTabkNSQcVz-<(W-yGX& zBL4XaG+-N|8py*Hhyg6v^G=zM;O&7`MVh|_)uR4W$NzZk@|}Q5d6%3b(!4Stw;q7J z5D&UfKfl!Q)uc zbt5xj`MBnc#XZ7g?@YkDMN;cgacz}W6FjkUEKzetnwL^t8XoVc5~RKRGWnfp)Zgmc zP4~{|yGZFv8y2)Wce$n)94aKHNrHnlJSI3m6r59NBX2NK zK2mTKhbg<9wZ+kxCv@{9A?a%pej|oIfc3w)m0f2qoxMxQMlg0-Pp{BG=D>gJ3L7%u z@o7vPf^ev__~gUED1&%BeIjQ+IN9)Y!Nr1Hke7kr5)jAZl8asDAdAnOGcfzKJ5aev zA^wWaq=^om?gP~~@a{jwmQ^JBtEqBr6HPdj!obd<615n!zx|~4m4$9HAZ6hXz5fl_ zJP~h~ZjNh~2%s`{C2pGU#uTu212y6ymssTPKgLfL)X%44l_X-R+K?vmVs-Rv-gk$g zArncL2k4dE9dGQa!EER7;l*Pbw@;Cl9M&>;ZRWJX`gojnH~QGBYV~^i$GysR`WKvm z^gd$LJM&Mu5=;^wOb#gYyj5m6t&1WQLXLEB&TQT%mV`#6Ch~9b|1~ zwL{|D6Bjms2wIdyLQc)EhE?VXx94H7(lTnw;xR=VULP7SfEowYQ<~bU=jQvBrD?~9 z0BpFLYr8yCcn-3gbN!_9WZ8miv~M+B-F;Z9Z@wTtb8bAV*KWOGFy{)ga|p$zma7Yq ze$|#mDA~|kd8@wRR8F4hcqj{!5fGCyN^@33(S9P=bw@&@8HX7BO-VnoO9IdqO=R8# zog;F>OA@$jo^)%I&r~kq-^k37&f?MUmh%~NP*Y_CS)BescxnEpyatqRag;Yg+MkGm ze46PAH(~j)DyL+?P?`S?sb~6Uk1GH$(ZyUpQ0p;r*M^M)HJt`l5VY`MQhsND$H*2o zVo722%9RSUu$GyETr|4o+|Pk%nbujcT>opR*?XmR!!7|JBp_Rq!RdO8s27tnIw*TW z5sxEjsy*vxjqOWl*goA>niI)41P_Iu-%0JrzIwdL zDqkq2EyF{iW8%%80J%KG$}moc6i_7J$GlV9xV+U)XCS+_u9S`HMNw;LqrJ!?)ddbC zC5?FrOOx|$7lmq)S;Ud;p2VqOQi^ym2wwKqIMwzMaU+VVZH#ye-xuj$mN1yPlX{Rx zweyQyc7MvOEDHmheQ8IIBxPzgmnwL$Mi%sKAqusxpMZ<17Q<%qX`kGv(YKQ@k31Zv zk7gSGegkjJdZ?3fjm;#DkJr5ND$m!;&8xl4X`kD?H9f-^KE^^cDLF0i^lQQCW2E3G ztRG5^R2;rC@Qnxs{9c0A1Zr?)@kE_J38z$JATx=>J3ho0E6_sBGgaQzmziN*ZxHMDEeN%ULc&eGx0BWbw1UzV|KEP)RR>)Z-5 zR;HL(Xay&c5TTtCt2T!o9-x$tPzABQRsbXs-560l`5d>f3nEES(2z>%<7Us#NEDgq zu{iyHY$%bz!H>`$>zm}gxvXtU6PsZcmkxb-Int&(xAerO5BB{*dSBrvxBpjvxXoDs z%sIwX;QE{h^(IGB&CGH5s^*~t3buAjj2k{Bm~8faIRqf?ew`ZRNUoGiX)6POezYxKJD94&996=z?$SnhRjiI_eu$<7~zOdeNCQ#Vz)%UQWCpR2*_b{e`UfRUUN z^cex3c~eSBEY%?N0TDUTDoV%W@HAXCG&SR{+24r?yoyQq@#Ti}RGchbQ-Np){mF~} zTVPF8y_ZJMeyBEfkw;{(o-7prvt56Z3bi;Rx1|Skmi=D7y}o$nW|GhC0o%a16G}Sq zS0XV7=z3gYp`wH;sqEqmmhJ6-#8uT`W(NY9l6itd2ok>2`kgnBxv&T%aBm2dk`1cp zxZI`*ir=^!Pv~}rz`Y=uSz~=SShCzQJ{Jy{xLSdFOqQMi9=109_a;Eu2W`ZorUu$R zORW7;{lfWr_Pm4crXcKJ!962!k>E0T1RHC|f{If=sSw_bNIM5$>94LvfYI@HPN#c< zB7(H?pP`PqvnaKmG0Ro1E~Shf?UK6wEsxm$H{umbiALSo z1L-2g(Bfpl2Ybx}HLF_{6=Qze{m`au*xJDfq)hC#Mf4aMdzQRS(DFEA{!^JiRBF2Y zEaD|qTA;k=-^D7Vdh$>eLRmcwdKFG|Odt|m&dlW{;@yWAV&Jvs&YBs7U0H;u!EXL0 zCI2p1{!%>C?Vpq4NnbMmCRnXn<_KRg;an`9fAvO^Lql(K>Jcv#Z9kY%25e8U3$F3u zB=x1E7btm_w3`m@nmyvyjRN{uCZ~)yDeNQdwW>dU*i}Dc;@y?l1#9Rbz^-3Em#HVW zX>i@qNhDg!|IW%^L_0$85?cVp*enIT2wXWwX9EjK1a?*Wszz2Z&0UcXMne+gf{2C# zvI*@v3C))geaMGFbXeLldE`2cfS`@^e1xApfbiYKv7dzDH>1gz#p+%w!K6RWP42bZ zlo)n}5I+nl_CN!`l)`wB96Mu?RA{a!1XG$nZ~;7U1jw_NPTewJ1ELM-Zr#-&S&yLcM$g^%!!d_Z=LI*ESDV zpii)%cQBd zDiCV+xhU^OF=)Z_4|B2S=*f#+phBLuv$3m9>LogGC;&>i?ZU=!rh@o8qYQjoePsA8 zZiWifq#)|MHT#JRDU-5-FMC$dNcMxE?dN^&bz+p*PFG|CP3zisqOV^Be4A(U9mVD~ z50KRR6LtuCK>XEhJiS-~Xx|AH`DGJ+>)rfQD5>RQ6A#9J*3##UR9V2#ahsU>eZP#7iywPN#k;~(mOGWjjp4W`_U#|zv(d^}j7K;N@pAF% zDhFCr@`wk<&sQP3+U)k53!AO9TipltayB5z2|>5fg=ZTi?R_i6hZx*tDwfcJxq*_% z$H#CPXtH8kZu?~;tfmxj?vi)p(&4I5i5*|2Ta-Y9ywcvj0f~RRNWmq#)F)`7#80wDZ=)8}vx|5_$Y+#Hl4nD#!Ht3{ z(^3lr^*irPmNmY4mzu@0D9s5Dh`Lc@wC)$qE5 zi<-9!A33EoM8o_A{x@Xp;amvIy{U&V>UBQPVe}A@nCd=~z282N}UP!50YJ8rnna7y^9use7o zt4nBCG2hg9r2S^Jlsv;X*SBU1>D?wgIY`TU8nv-03kzUx@ExM7nCiCucKAI=0mic`m>5hQTax}|I)9`ETGY@59eWu zEJ14pQo3>B08>D$zpkiv95QGnf;?J=uDnONG{g;|jk5+5AT4vyo&)+aJgEg1XF5<| zk=JPR<5EVQq3W>smqINUa_HHc-Ls$xr2>E`QgZ!}N$ggBk22z(<*{mHMSAXur^p9l zMd6tY+9fj@@>0T|TB*IRPHTj4q+WqK`}Qb;m;a?cD}{lF=Cq*a1q|r>tHXxdz2LzA zct&9c4$ea;PQ>H}^(|%T34YQQ28QBJ(BEmt7~aCfe-g|)1H36+&Xb^0<5zS81~ZQJ zd4>)>Y8n1$RL_}5$93?J9eNi=$C6Jvl;T)x$Jt%2E@(2-#sxzGDMQgRe))Bvbk zhwRQ19JbBSEi(CXAv~K~V)@QWarY9jtcBZnmvDfqpM|I0&S}b@5mCx%U7%!FD&REl zAQ^&sV|JQ>=-1XmH zDBmN`$bUrHmMtA7EuqW8>nGsfj$ZpG^#T zpq1IxT~9`+4la+bCo~VnM2~#ZD$iPzie$|YW1E}-|GcorLV?g7%&VH#0}4tE`Q~9H z)^-&xH3r=*QYThq3_G^kZp!0IJreYO90AWB28oy*8^ zGW=tJ{@1hXMf;U;v_51E4zu*1{^@#+yck(_72LzE)A2V%9KWQb%+4YR3?))}9L;rv z@%>M5XtdK3eh+{!DYnSIikW4rJ)s_=9s=8f3TVlg_y;@AQ(1q@pz#zqWaJB2=ZRv3 z8|N^)eLp|p@HLN5Ly)Xzeiw5Ba@i$|gTis>I7{DMZ+(bhRj359dFcNpzLTfan>`Kx zsUA20kY-&JRQA&?)8?ULSlqY0VMqd*L3-_h;tx+@1VEe!R7{TX=IKHKV!I7xMqVdu zY=IHaD-%OXlx9@&VV2!yBfm-chwoIu$Yt>onpnmob>#FEdOgn9tZL2)<1X3hzB^7X z7T1s27R!=&UHLmhq9AQ%HK{fG;x@ClKq=#kJDZenr5;NG3`-DSo@TT3Y+A$JGDV?@ z=iGM^(zyB=34FbZl)y7XNBM8w zzY>s|H16~D3H{(M{QAAU6taWeR7up^s~8{ButUcKcb;KgJH?c&Biq!4M zhp3yl(op*~GCDBs_3|=XTpu#7AQe+|oRBq;yoKE)@p4^?;YM)cG|QVu;bD^_!bn;K z`G>MJ>3<{6JSr|lm!z-N{op%!vBS%HuP}~CbIbNS`w^)w-k#^Vo(M$t@QH=9%lG z#GU~^gW=u)6@xBd3xQq2dv~9ATQeI^A6PCsNMzaIAAh*-!&8ad;$BT0O$)JoJ)99>9oiVFLv z)8D$Z%7eRv?;12$HWWZzq){51r@mwxvlc2EfjeX#=^z&9F&dJc0LTjP5#_z|Pv5|` z^6<-wCiTtHgZrT5S{|CeRRcC#UAi2V+Wr$bQ;I(#iYhQV`KAjQh#0!k zImx0zgeF0m+S^EQTH16$OAqquE{B@e%HdcONUv(6Bx0!(861^&qr2)_tJ}`6dVgy9 zv`@$DIW;sCP@1}VFx610Yv`#tsiN1+UBECvdr>lSu5tVb@3~vG41mE8mLCJ;XP5D* z1W)ZQ41#52RNxeRPBuwR}8qNmFd?}6f;$jgTW!N$d4O1 z(`W0R{C&oRY>BJO52|#Jp?_clrQdj)-~-v`hbwlecvzj}(985s9W!#i5wbYt5NYr{ ziYwTz#e1BT(}HC@9Cn`l{*y z82l$a$O={dB34e}*&??}sbsu*Om;Q|1HpYFGaCor(bM6Yw4g4m#z_b2pO}GB z>@8hlCv9hMp!q#TO|KF}!*(sV|788ZUy@TaCB$UgF&z1GtS&omZR=nggksLLne6JVweA|q0itCJO&^H0EiSiWAl>MQE&H>TxY<{R>z)QDf>Hw#83(vW< z|1Bu#4WB&fZt}PKN55^MK9OLKg)_!{a%m_RE^QR_G$=6bcR_3uY_+A_ z?0{+d5K=P}h!%Hh-Q$dZ%bBC%7qAoM$#9RJ0u`dXxjBS}TJV#4B|7F-8AdS<5G{nd z#$q`+-8Fzaad_5kchF`xvQ(MOqU@+b2KDC#jzxL~N za9`VDwpf+9Nmg{wUgL2~k zLtiip!TJQkW4oI0xmQ((1VTZ!yJZ7fd9LG^bG7%}{`-cZ_h?~D)g+P%`JbE!%jdm7 zNJE4x!h(OO&4hkaYGbT(zW`f#v#6jNv{CcE9@#~b9c!oy$faAu!9`JC@WJnIUGUvJ0q;gpt@ z@_NP_p$cO2+{gqIedkY8<@t> zKQ#+q*7`)n;ze;w_qXeQg_h~ESr z&Y8zU$1%4bAODVe3&lV`@fhT2bspr*IT)8o+UbC}yl`dUx{|hm2K+USGm_CzUXh;D zaJMW$el(znS93HsIwQaRv@{fi(_cx05!p>7J)4y6SC(ZHuEbbs3+n5k=F>7T#&OEe zTl~kjxFBvlajOqLrQa%v_Arkz#+7cXLc-HC&5aKXNQyu`ZF9p!LwsfP#tl&rFTIuM z8*4J{99FK6PS^ey+IiDQDK2LHuoX-jp~#0lc5`yA6qPf#>jHqGynP+nxghJpkYW*e z-xTE&5Lo{-7FicUHwCVAQ|B_a$$|=r$~1@#_-m#T>JLD4Pi10Dy+@(@2xC-Jn*8R@@mc z=hW0h>L_{NkcoENm2Jm<2N&6wQA_W8pN%ta1)vbl#PLgQ737IC6HA6y<#&N}S$ckS zAYuus-cm=9Htn?v522s{Ni<#qkSP4wjI>>zLA^?he?FQlN%FoPz*TF;Tuz0P?{R~T z7P1NYKFaNh#B6REuwAWf7YYP4q^CC1Myn6)F7n0_b+K+CuhxPWKsIp=}g64z(9kObCvWde$AwTU=knyjsJs-~|;t2K%21EMK!kh_PF2I54N5X#?1CGlUQ5n~OdMczPXeMEO<%I-F zp({ZLb2Tx|nB^pHu1|qj3mUr`fPC&JFX$p~9Bmw?d?0WJqyCwgJFPv4WvL|To?*Yz zq*L&8U^(okz|bZ7oKQbWUylXrq{NF%sbSbCPs$V>W{nM>C;11A%-lKq)ge&H-~1LM=!1L;Wu8u%PNGsG5P>I$*r>ra&nK zrh5}J`%LH3MB?U6e;FnNiPEjbksEwc%&P{38DeFrYjzV|wU%5mhJB9+T&$juJryvu zq*xQgeFd5zgE%c2?4P{coAANDD&f^ElzihSxpJ@?E@3ZfTRWmQmNyD0Qqm7m0H)XW zF7jhq&+=DIkce#l54DrlFd?9DcNj`h>?)(PS#h5+oSR_q5;(LYXC8dn&XK48L;m>K z?x2I&a3hF$b_!pUTT#2iML3q{F;s#SV=Idxe)fOGYHqC^XrBj6y;bP1`Iya1cmf?} zlViTf(j!YT*+8O2Ec!V}u@B!~Q}OX`f8rxM&6J;Hhm@95bNvLVg{<2*+0yqV-)3;# zbtRkj6r2U?&`19>G>yzvTKxrq5_u}TofI8HOS+lQ^X>7+O@fjj;eK_H>VG@|5Mn(S ze{bSIB1iYy&n1A%&x&Q*@B9ek)!_7~0kHjWU{KV9vA;^ZTHdCSZc=NkLNWXvRMlkd{>rxS9$6E zl4HQSwKo7ge4r$%e+i_Wxa$?$8jo8oJ9?pQ1aPgxj$_`e%ak~%)51{<+J_CAi516- zITtteb7Np03v$WilUu7f7%RRCxW70jZc+mqN>8u#F;C|8ve$5P#6&!^IbB1Y>|#*f zqB42}Uy!YRgy1^7L;@Fi#$+F~%c5^>Bu$D3;Lt*iT4(6*k-`${ z07ym8uiG>ZQd3o~kGuV62T>j=mAg1{Wln)bO4va-UwGnSC@H6191KPUJOd4Bvkc5U zW56Be;h1C43nIDau{Mfj9O`iixI?q~aK)BRTQ#HS0UDDW)d{N&68G39qT6GcmV?3Y zP3Qsdgq&;uj}e~S9v$j?=)O@3>Z#?knKCEK(rikqyyeo+{Hh4*1+&dRZFi#H5Wq0q zM0j=Te-1{YTS=-#c!MU^`5eI-i{`i$XzDMz04|%6KeX&torH)!9fybM7h0{rqt;*V zmE^|4UK$K^Qt?=JS>UvS_>1~Z)@>`U5-k++R@3~(09bqrzTDKI{0bB$VH!Iym-Lua zGkkZBEI!>R{#c>0G3R12>j-A?$|8I-ZAtD0MNd@vUdWIPl-T=|_53B7VTQVL;!C)1 zE}J`*?w`GXAk2@Qh71rb|AWt@<`xqq1$kH%7cCAyEWBeQyN`sK4omh7n$xQS)hBI3 z%}g`T6vrJ_l_&6ngPS8}DIN&a&}R_;HrJ&)5r!g7J;Gs3Lols|sgtvSl|AnN`1Sp&9Uvxb z)Xn!?#3@cMfi3` zWnI|>zVN`~kjz7j-Rdoe=DTSl7#*VFcT}qlfzS8zzn2EDzUNf9-{V<>s=hELpaT7= z%47na`dQC)ee@~&vpKXU;f2d`2fAfKRJ^>CX5bS95))zNG;yyYDDIb zViuL{yyP=Vg&_&FrB$2#sdF+{Mk=K~X&h8h0Zou6#f!J%k08yK+S;48NvALjIVjdR zd?^^<+WenwsCbX1Zt&1h?~UYtUK=aG>^80OLi!&_pUL-cLIT*->aq?H3&G4rF2t-% zT&4UKE$`;)9~$qoU%`lEM+mm)jyu{hl@7PYeG7*w}gCp{AzF7$R}r z9=o6?P9@g%ORiz6(m zsHY(yN`epMdx8K#clTvW8q&<&4+(GyD5`hZfvEh!ChZR|W@Au+^w2e_Z~{83Cl1mE|#!X zg}$S6@F?j=s{XF$KPxU^iX85FbhI+T#HtBZZ8}xOwZM;Dy00{eVau6eu2?1{IvN+=VMIx7F=h!Ei}fFcFM&jdd*1X{}_^D`RMmDt&2){N2p@eHPp%5eZO&t<=_U zYKtoU;TuFCSsITwO99hmB&Nub1iVZb+JpSE^UwQv1ftaOf#{J^sKxzTOvBqHPnOj= z*=_#$8-)>4R&U@>j~!ah4`3Y^ZpJm`yzQYt>WLmn%j63&khkI8Pl6l80YC)=9!M3!aOVB8h_^H|ry^}DEw z0)b4+`a3NFSz#dy>D#NxL32wXnnO%F#Xw`xAx&#CTDe@K=}(>4`Q4FSDBW@cElaxN z*}*#U5$Ro`F)eY=xyS6k+M3L6<(wnVE(H}QFk8FiuhQ7qt*0MXYS0#>EtkBtj~tWh zDrY-_)eDk4BPn0K1l!4tLdcGgqqd7-$+VwN?bbq&&TYn(G)+w5VE`+1RFH{RxS(|V z8|X9xh#BsLX3Np+NC}v?h{KpYBz4xgqa*oEQ>5Y=)EK;F{W#lWJne3w$UL1`u_v|fXwy3QLP_p_4us6@r2c_39jjWpvrLRZ zQ=7{l^d(RV4rWnvg*GWHQ)eB>8M#RZQKjL=PriG$u9*NFE`J?ae)sL+zTt0Xp#;Y|q4+Ot+ znp7zfa#_+SZ?qqA&y$@mD6fSS)U`0BpIcQH6~vCwfGk$%vr}QVR=;8*;x{`k;TzEn zBrbjx^;;Mo9!PRYva7(m-euyf-@8s^paAG?>8bMBQo=ZzDfd z>x2F?SH1jxnv&C~%2s@0niJ*cF59NQ1R?mx_c8E%YbF+L{qlM=@zspgYsGQKKD{|Y z213nSbTlTjT&sqP>rbcwcDDz)18mVye6aiO-VnU1>G|v9TPB|+0Z9^$n|UAd7l&G+ z^LBOlQjFe6=VHWknHQ1{^eqpG8gr~(boSPnjSR!XeK>l}(L{b=?&y>#ua8E1f5}A0 z6fFDVS9>!(Muj{CkZ7697q1TCpJZbMKkj%i2)>|(8D0qQt}o}iqi8@HSsFq5#|L?u zQL&$?vngWC`*YneU>9VRv>12EPRvhlZ4r3POendtp~*!BRNZx5bn4`Ch^h4C4DS95 zgl;&zq!D1H;%{Uu$e7RZ?6s0yKLW&CoDk1BE}#97LU4 z8KMJ*gKUSj#DMJwsa??%^#E-QB*yZcc>Nw2>0;@&O9s$6AbX0?8m{_gz1w@ zD(Cyg1=s5B#6`a26NZ)#Q`Z(Y;zU?DgQ^YDpgOVil~5sTl<7-LAj`RhaY73lxjeeS zFp$FIiM5>efoi&90akKk^a&QD@b<6oF}0uZ(S-ls3=>1pyP*=$;ET?5JaLHVA)Tqg zc8IL1atE~O+>||Bow;_;)Q+!m9L^!cNf1u_xW9r z!5^4jis+`&zqdz>R=i)Q)JOC}1ljv`FYy+VOPvY!7SO&gE}9BDl`gE9^>_R@(2Pg;27#b-dBAQ!7x>En};ZxM{4b2G086G;urryZIBj&0l!o zGZk+xOz@=m#_8oCe3#$t=NrpIoKClLXKvd8cImb}gdV96F&jm{FWcJve0+bX7-QXI=z?wfKRnx991Uy;EZ{=i zxe*HaTcUU9GYkN}^J?ezrNti(OM4Kum&<0oYFBF8^n?{T?6-1ki73Xr8yG2|%sbPe zfL0=^&a&F@Bv4T%)F-=+bJA%`kF6DX(;Rp+yBT^J%NCzHSM7fCNx=yHaPjIkb*CxV zJLHi-^SZIer3&rYiD43PXkNDj=e%$X%*|H*f%!|U)yxbiLW7=@4p1ynPB!>+ke&NV z2SFE6H=l;!r-7iSMXJog4`~k_-)B#rKu5H!78WnR${m-U<0iwTHKaz)pbVi$8)hJ5 zagBPKXF52yO%=E3oU$Ss6*@JBVmsz(E?Z;qo#XMBRa}oYjJd>EHa+uEfpD!!$4#2Z z`gcoz8!@YIHQe0huF`0Q9vZvIHz4?g*hQ4w`z=zF8K$wJ3&k`|3TraGBM5@ndd@8= z@?{G)Ho1T17~0{04!XE=Z^;a!2%4Z~^_1s|Wjz;Y0sKexL9grzay^OGY)&-z(H-7V z3?&%;&D#AQoeP91;aZisym{<;f7uuZK3JmeHyUq9|8ho6ZUAC{uPb~xObo%$B*U)N z3TTJ@a6%H-8wyV%&Xn_4Mu}q01g#B$Utq($G}Rjf0Dk}S`(_fR#TqIte6AT1_WaUk zeSIkO##?lTCZhI(4fAs=wcR@}19bb2rj1nUU3LOVI~-A}N3ww4RW`RQT2CL`=@=(IgD@~e;!0}+b;yyU!>Q;QR-kquY!5?;Fj|S@9)N3 z%XcEQFJi*de3VHnf(X{e50K{qWg;w7m%S1nZDabyk&_Z>+yY&}5*c@c3W9V7dd>4s z*H2PMD3JSORlD)E7aG>R$T4d|RlK?YE3X}|5{|c?z($uU>dG)BFJ%&ZlZ@fpi>jb7 zkYGWmS!W#82P<60spAUM3czp=VOJLzQ0PB_;Ovz?jknl~42(}s`%I33Jn)nYmF9gn z(0_Ux=-El<5ysdDM1=f~F%xRyyb9WeG7ti8#=^*8VGhJz>V9Mil?UOj$dBo_0xF`ZllDnaKD{cFE5TA#dKJK3)EK_`fW+A3A(2M@>}uC4b=EV40h=l0nop(g zdTfFRE3A2z@{4u;*gH&@c^^y1-qq^TNP$w(%|o^Vn971peR3;Z+mU$J#Z!JBB-UQ*dHn7CM>|6+ zbNQ+5u^Ymd6@^WX5^uSu_S$37wJy+N6V3thPlFqr(Yin5ZLb?5hR@#F6DcY>&zl^h z_6yz=Uil&|_MG;*y62R7vzat%2AlSL=R{PYG=>5XcYV+HFCUf>S@CIV&W|4&JDS7a z+gsa1|7l@kbq5Bw>Ss|&W|0BbyMSYoP>s$Yu=Aopk;(OGJvpDz| z$p#~@ws|3U)(XO(OUwUFil1`z&OZZ#Cq+N<3$zoYSsQz4lJpQzQ4HE-3<0^XD;~O3 z)FY1}x9xw&-WYDrt>ht#krxa|+{mZ7XT{v=yq8n#k(F~Aq2&uW{01~j%{|_CjW+u_SWb7Uh3}YhW&|`2oxJ)DArC_m$=`)#xOZI z%%xb!@zOdXe9(z%RTuylhY>5}-N%wu9tDQBz|ETXh8 zDJb+wC?#T#^f?BhXqcDfeLifc$vOhGKIwQgSD%rjW%%Sx?f0sV#kk zNYI1H73buuX0CKmU*SRn?jPHNsm0C20IS zV9gLSU6~CM^nz{qA^nt0$>qh4*1wj(!6XoU=D-2NL8^K#t`rOYy_-;LFbi?@lboET z9lp`)iwJlxI-Se{^^J&n+>O_}z6<;8X23dlV7*e9*(zM(^Qk4AJzS}OKD#b9f~I9S zDd31CovWd)cDE_}+GR-G{dQGKsTRfP}VHdfO*07qAi%fWP5XPj@!kW+BL^p5-N{8%2$e2mF-U}#2Q5ita^Sqj`mA}%u6rA&MIXP$FXSQBHV3ewt73zXp8F|9?G_z~Z3r{VA z!9%On6bOgh>@zOrH4~)p2-_cxDaU_J0<-Fzn>P|TZCan?ErEh|`YTB-EnYP(`XTY2 z&7;D;O-q{kwO+RAB>K6QIG#3OIdL|p_zg|n<6>a_O^!LYDt-s5pI7QEzY#gkZvz`B z`|`AB-SnkGV6Q_&N|Y);j{JHMd90wY0DelA?jye=eGT^`dytl10w0L!88Qr>MaG5omOHlzvu!y}5AylN>o@W^I9?a$3?O78Oom2h91! z$^aQSO`Ux+8v@5O7>VMirCCxM$BxM59CL8{2YHoIjyn_A;L)@QEbQ@kpoANVDdeq! zdq!l+*CV3)*P#tu37Jug%`(dG-WZ7_T1paLf$bfCUvW7?WQF%K`!I6MtTrqw4GQka zx2(0R+x!HScI}f6YK+$pg-!N10^j{3H-?rWlSu9kYtK$$>c!ShmW000000YREM7$^T}9IQ=V(|z;J8^;^F@ldif%DwklhgTdaat16W-mqORaNw_oH2R+c)vMJX)=OracrFS-Vxh1o(~|Ub~8W^CKWjaQFty z@$ZM6>bDULYv0yit4Aa(t1g90T>z)OY~Q!66gPhtZUa)DK7R;|D$KS)w^F9};NfEx zs4d1z^uQp|>$bmGUT241S|wn78223W8)4C6Xn9{JK5)TkF6dET%^Q${Cl(DOfW^Xh zd##w{k7tXtd#a0p)H?Bd)4%ii0RXI@{;0avIO+qJNVnx5^>GA|=yYZX;*@XinfwgjxNu%2O|5mC_QD^#wh0r{l6>gE7Np z+%^Fr9gF|yZzT}DL^mzFL7@H(in}`2UsYAsRH7qQx*qRa`h@sB^#x3cu74xfx-l&LK^i8#ZsUhbe-9vq(!mUb{<}r=|AzB8ar>> zUaRe%S8Awg=-=(D$PLxtj2Rkp%^DzcHuXz%~aVl9#aJ zNszLP;TE8czz*su`zHqiM~#@I$bh7)3uLA5yd%epO~V=4P{vE~&LP419@M#{U+J?H zgxIz1>b@drxARaGf(jbQcqj~OR((2%g|jg1yY}+M$BtgSAIcV@;{CV=3M%rD#spXZC;t zyrbJ}<49(xdJCW(1Elffc}Kf9Y-RS3o)E&!l5lS|JZgbdisPu_4U#|&Ud_d_Bh1!R zH!PUG59dl+XksReF-J_yUgqxd^KRxizy#{%xqY< zTj~(F)Bd=8D)dAfQ_aDp;KJZJVJmQ2skb}olZ^gO8glaG2+BGv!1v_Wqv)hGHcmP1 z%5E4^^yUjX#&YCL)lW-PqTvQjN#Sh3mwf(M;1N}h)vzluk#U7ZcWrLdtydw=dnqIql6bz`IguXJrx#4PtLs)rRSMNC*;pk+kF#-r$@@WdvjZ?Yr|4mh0&S zRI1Z^|HgO8n+Y?0o6mG_sS=RV^*OWGfrvgY1ga!1k7cV!Ta-KFTAl@UkJd%wUzug8 zP1?-}ykpfV5TsFR7qwovrq1KFw%jtc;2oCb1g+56II+%AhU$rI8F2r>tAcu;0xPXW z`M^Fx?U1N~v=e#S@FI3qFl%!hj!(^-QF*h!I8zj)(Ute!_Y;%>;8oTh*be2^1^tWs zO=kU~XVKH7STFcORlsPR&(+={4rFNAE)3aIe8mBMcXd_6V zJo_iEBvDphp6#>u-p#3~#y_HpoXdTFU)=etGd>xO5G3B=Hn5V&A^p3IOW4xKhb1t? z2^kubI;-cO!iV;UtB)*6sGZcvy1K^4>3x;GYyELpk189pkDTtWKWYc!KUVyXjF5x& zx}`Z+MAUJEtzTT)(|7B$?ioz7;#xeXTYs8_LhB9j1V1xLLs5M8lBZ7L<3p%OgY;@Hvw1R7{Gu6${lw@Sx zuKRNlsa_z%Tj^}YUu_pRo{Nvb zmPr7kC71yX+}f<5u<`!BU=q!dR7q>QFpM}B{91RtC4G!)fdpCZc*2ouWEa|;$?H?^CDlw9MC+kazD$0>~2kRt;ZilS~D6bzL24) zCcTJp%{hB-70bibhF5L^T7BQ)hVQSGs80siI5~;h_(YKpNXV?WZVSMdID%w-oyc^E zLGiHYRvR|CoQ(B~W36Dknwk|$Y!R@akH(fhk+tU60=->T>2rtMt$j68{}TrBR=gUU zxS86mS~_>J+}x$ne2xb}AQLS4@%XN#Zfyj{eP%17l;$YylfspXsGDheU+IM@14JN; z^tLN@r=RQj{AVA>VTqa z4Eq6-G<&?Ndy2pT-`phGHV z!!X0FygYXh$C#VmxTlQjr2kM-gso*`bi3)K;?oId;6q6#gGwT*{g%c zExP7i3j3B(pO?mU1jYJgYF6UXpt>!xyEs_WzJ7#JMQh}OqFKFR#Nl_V*iBa1k5A_+ z{0(I*BSm@&ddX27Gw> z;ChbI{qJhsfmKhzEq{GlF&s3W9EiU_!I`c#`5fi+;$XS#aN=NeH;$F{*s(JHTl3R# z`)`WjE81AyRYK)r+|cUpxTGH&0nq{F9fZ_m4XPr{`Bg{?HRE9uZk^-`!K6=+fj}5Tx zKnANc;tR|yPa%C9U&i??R{K7_+4uQ|NF{Cx;CoY} zLi35}d%Rqu%>(DQIqpF;8%QGlJ9+B%k6(3N6|MP~$2F;GJIC#%GSKdXB~YY;O8}vP&2>CRXe^rMyQos)y+uX%Z{$Nsh9BZol}ebUxIIhOIRmUCm#ys-4;0cgKEG+WA;`bhV`T zYTMts-S63({IM%`%d*iK3_VxG$*b?EiCC@vQ+G>y+(F}dNX{ay48{H^J>b2IzAvbe ztP^x!Z9joZ(tggY(&@e0(G)1#9a`7%iEnYPHKT>yg`;uzASfn!!l>fRYBf1DLj7T_ zA*8zRNdN#yNh;vA!OM2-w?ce~x`G_GRb-mMB#=WDE&};~@Ug zj8IZu>20dfujpEj$Ca}q69W(5gxlmIE#czppZ+vVk$T~j_c*_)je6FdnhXO1$3KzW z>&ff9#P|tmyy?t^hQ#hsS|`u zjr98b4Md@DHv%uhdS51K-7Ya|89;pqNDw`44n=Te2+>~&bp@EsxlMUU&``MOCxIe4 zk{GAfKzhc-hU|8y1Zt5#J#~X86JO&7|8}aJL@ozO%Oc1=n9(%eZgDfb2{Jm+J zu9yRN2qS}qJmGy9>;KTwA^SSpi^t6z(fbAqv-@++s*I}&p%Bb*kFI(*%&cdRp*4Qm zP@Z0}2|y`H)U-u2>ahpOUD%hS=YP7#LN8}r)pK2k{)r7#yIiu)9LNbxk@&ba`t}I9 zY@c*5S_kyMxCUQ_4iJvW0QH1F@Er^i+NaG!#)`C)#3ajG&}LTy@A+Kc#|n4}UCa}8 zL05&=_%({qIId7SH%MP~?2deB-px$LVn_8$8zz>WZ}IWS%Yd&hlhRixVyXo3m1n4g z=vyK3c~wxmSxCJG!C+6*na;hkVg7bKmz?JHB@4>*{x5*d%&oDQj1mPrh@ZAlu+>`o z1Uy>~`_X^|DBbAS&D_Jc)6@)}L}e-&B2n9iM2Fsp$6wB=`X4_Y)z%QoB=R}s>DVIs zmCjE%;JE}&tv%!W{Jg@jaSs{{X8GZ}-N97M2#R3#Om6AAWS)?airb5}`63Xkra&ML z1Be*V!~Q`U_wO!8=#4Gscwb|g`+21d?tYMG$vy(jDfQFvLq^XOMzv&e7_GpY8d^=`OIf^SD z4BELS{0JA4vF6IpT2u{Ymt*q)%ePV}H6ap^qwBw@O=k3iNnT1t7yJ4TFvcUcc~46^ zBQ$^tC204kEciDu?d-y7Z6v^Kv^Ab(y$eT*EOQ1fsg8OXdD>i%lR-oxgr_MPfA54T zK5`L1*K4DdYhoZb$*Hf+7$ezmgg`Be^rzxP7nTR*Uw{%9n>AGlrfgzaBw-2)X+KfE zkd5fm4=P(vvxqP#&}NB7@1rVu9@A_xP@cd{RQ}8-d`UZU)rmPLA@*CrkoBTVaDgO7 zvSha8e_uBfE;0A({@4=X2q#x5@)lAQrNbvO6)61R_4w#FQfQcs$rAV7KL1dO_xM{( zEwTx6QnawNk;}z?5M|$Sg&IiyuhTN?&}HbWCJY%SEHK?H(u>JY5?6-_6^bv93y;=zZCftjmc`B=EpZsG;S35DGz&bR0??JXBMg@ie`nZ; z=Q;EBd<(v45FxOPPdcPod`2|h`4w+afZ5H|o!WZNf6_7nB9<{I$t)Ki&R{t#+{n=_ z?M?JYpkK4}!1UoS`{~2=`FrI#1Tn=RS76e`6~rjgqxst_=Nt=Ib~^50cU4L;{d;vK zkTyHiW;x1TgCWPFFxP@tFLM+8-hX^oEBu^m`sZ4Io;Y)#S4VF@OW$7PqYHUT+3}i? zePHoa%q?nVGpX}r3f|?M#tBlZJAw{sBQTKfzlR~Mm|t+DYhkjLJL4C#Yjykj_TdfC zWuu_nqb)@(Nab3Z9`^rT`W*))k625LGIXQPW|T%$8ZnX!Z4jo0WqE$cI~Q4q;0rb65x zY&2^~2E_D112S-Nz8kwchK(wdT&6L@=Qog)C5f&`Mb-cUi>;;k{rC2S z58bY2Lv}a%GS;mv{jufkLQpl+ilBav*m;mdLhYuoL0Kfh3koKbGzBhzluO*3;+ov_gdG zVuvEQ8q5!eB*v({e{WkEN}ibZUj=32BkUV;b3w6LdqJrtynHx=a2ta2t%B4O#}L}dM)1&#{qolQYNZiNP+b@ zM)aZJI-e&>Z07;k{m3}CoP<*!pm>@0k|2BE8|mF+t$WHL2(Z5BsLGa$vo#B$d}doW zKXJ#WSC0mRy<^)nXo-3avZUw~YIH*CDiu-)Bh(>CGqA)7yUVV15f zd2@8PpgvepAfJMSg3>jOknvC!tBKQwlya zsX&r%ii9m1^NHYb%Y>ih?X2A%9DWM`Jtg)sVRi0?tlkS)bLY&@u`|rG(^+r^joFmu z0NdP?QuF)u05_{j*Cmb`|$rW!)mw;<+Pn!rm`vsYjT;MJ{RgyTP>8<2T1Th62b?;yk%!SOH-eCGlLuK6O_vy72z{4>}{ApJ2`X*{to z`atv)@XTv7IO0-Z1Y;;^*m@-Tap7oM+ss~U)1vd>q=*I3_=gda(P9I26<@@kRkI4U zqHY_Hh(cuoF@f+lOG>L?S|%OA$_(h?T@13-=(sqz1YLli_rWU35atc@o5Q-b07XE$ zzxa^xGoEZ@`R4Wovvydxb(SenuVU+JhF|e6-u%v)%LWyPTK5 z4?qB>moRp0PLPiCQ?CP01?+#O%4ZgO_nX7tu$)7X*wxt%QKGNB_di=gcmA|=H93YU z4K7lm6#tf^VkH_u^Fwb3$`eBVSvuXrq!z|upp<}=G1E$XJS z^E|g0vmZT8>L~k|JR+t{Shn;5WoeY(ll~*HRo|eJ$ofJJW!`m58yCNa|Bjx{IcUhk z_teowvmo)a97gKc+mSfcM6;dt$YJ9B8J+Sj3lys_Bp}AP5O0W*8*$t08FRkEbf}P) zS|3ceg$%~8M_cvrwonA{UT*X$rFXi4;aQjcZ}OwsR9&%mIxWgo3~~F-=LVf6O`*N4 zGh(JBlOw#|hpuVJ0&c}*^QeL5mVWEl*%tku!_8TGic9{Q_KMqDKYdT*{Sb`-K#NxmKN^b^?sm`Gt$sjeOVb`48#ve6LPTzh(dM=DmhruK$g<)B z!+i1h!|f#2rC?Lbq}IGkF+}Byn@ZDrYKzj_|i+)Wdw_aOUOu zKZnttF(DosAw}S@nxph>qDc~kf_gR{i4%f7v$LUDbVJpm=rlCXmW--4HDOd&GI!9@ z6l)Gya>+Pi_vjt%1#4L69D6C)C|Jv5vSP3Yzsf^q_DPFW0Us1a9VvIvC8q29d7a(+CcwiIBp5u z6=ie^JnHCje)h@@4BK2a^^^1ePg8~6mg4mt7wCc~a1vyos4t zW*p_emc&08vBk`pv~{>?m$R~|g7TO)lsTPPf$_wu zmjIDfpP?4fxmlRh-;n3%;28|*VD0QFSaiw+-KPyZ7=b{^81G_=X|)ndZ~^RsLK{Y8J%QvJqI@Rz zcDup@hg54w@7rr zh7Mt~dGI4*O%b9Z-8j&Gw_Skqp-@aE<+w?#QW1C(Iwqa2#z#NttB$A$mmB@hi(GkF zG0R5Nr=iDS=3F;wsx{eNijj15S7dyekIe;g9OgNf+rnewxxFR5=FV@zmB(s(HrB|R z4v%MYLNxLMPq~%;CjD+XS&MN%vLF+#X4GatT&xL$o~{j3dz-O!TUT6Mx_{N48qvph z_YP95IBJQJoE-m#;W?PrT;%=Qxh3&Dx$}qozvXNrxhyUx9DHcrBn6Z(9%I5vu4~u^)==ou3R`98u|1(1= zxPvL6z*s*raxl`(lm8mtXV$paER3CV(BjwLf zwTeJh%~IFq#irrj0GikUhYn`*X&_J2OG>D>1MI|#rDIquNB(dFPWP3-qPqJf?M}p(7*b+uRm-s1^ZD!l0>;ieO>CBNwao^${#&|(J`klmz zrLqk`xD1co@!v5^p0nWZL1aSYz6eQxC?t2EjdL+>5As|)04_Y)G3&RugJolnp^3eK zu>NP2IAlfNRndP&!aWrN%7+f$18Bkx>Sv&tV+xatM>(Uq8p)=gdW||v+P8fqyap)= zjQd7&RmN5?&d@52{cI1?quAeC+cJe3bXmT~w!W(Qj=fJiVFBI`SL|{wL!G?uLr zs>wX_H();Bn6vTjg73YDK3fX4)fMct6ZXVlU=?(mvH`7%oW`bel@$N1(;ix%tlBb@ zh**IReX#2?R$#YbNV=w2?GKuW$rV>n=zbn(`n^zmRRjrN_QodAE-Lo8bR+&YB&mmP z-sa-x-K(vPK8e~IDCaHVPeD}PnI}^dMpcU+azW5+W?*ld=WC}h`9T8+N7gR;u?AsY zzX5-VFj(&HDGrT(57$)1AYei5?MHt*M~W)UIha1UFanYIn33sHQ2EU@l>pJ|+VYXO zO>?!P;Zm)w@xBc?9Z;CX-zx z+*6BNmg-zuP3JhRgZ!ZI{Fimx$PYsVDXI2JECr}XiJ%jtp{9`nsw(VSsLvBc@ikJH zeNAjWUC*Zf_Ci z66uC@#*F!dmy139d(p_B+|s#Ifp%_JWbEdZ6yvxF6n^G97C3 z%(EyWItq>7o<_zgMROF-Yv94}PwF>VWGhn$!ViIrBRc|4-U7l`qdo10HSH*voe(aO zrfxM-OQfO4Q^NibM1e+vTB-Ne==Bqw-V(mLJ9b(4MjhlLmJHsE z6Ha%DRB})ShEi-GCzpdbPzrC&S$Nab?r@gMVM%GE>ypD6}7nf!VRRAz%kuwRs6^=Y{A_sRPJ}G=at)4 ztj>hsmuRox^#U-63NoulsjRW-@1%bqMjQ&Hp|FL8Nh%_BE$tPE!b>W05JtXK-VY{O zpf&QN$@4UqT!h-}fbQ?ZyD(nMI@gkl5sQWs+-PhF7484H6sSmYcmfr@Ya2=2Zu{$p z8jXOuTpcPPjg*n&_2UJB^jEwyD@8w=(`IVj^M<`JI|~a;X&)W>@kgfZ(mn&~APs8B z3&H$VY-2!D0RTGVU)}a0Hf3pc6~w^7*0%Bdj-TX=(W%i4Mv)6OahFvIazUOHEm*nw zCS_SMmXH2F5woob<@}WwdqVRuOAOnbqOu%T{0~3CxdP@^E<5CPU3@zBb)~t=L4A1% z7>}O=rCq5_!*xFGZxKXQo7P*%K`%x)IFRc{+L)%0E7%LPeguO4Wb1Uvn> z3<`yrT8ftM3jMaMB?7=cpzb|>NCSxQ^ijb?{3o2%i3FD zc(=d?MKXZmu1jkpKN7eWX!awssKSD@Nh=)Q^{0Y3*t4^1Ybc}sK2g|)-Pj2Tr59a? zQ*~&WgS*ZdO`NhLVSC4Cu@EUV!s|!k4^ZZuk0W`_(QeSi6mv>ht{%a&QEw}mz6Ayf zIEh@=^Q1`VUR=Ip^R@lu_mzGdyaTo7T%dPBW_L2FH<(mKu-42kAG{Yx2=>e$#}hT| zhf#!z~||Y$sZL@a5MBjA>cyUXqzD`!FHds4*3MEW;LaE zQ2L+gKS+5)i*1jk*)I8Ktoe+!3V>L%a|iOED7_A}j#tdZ(Mc;R=CY166W)g)3I6|i z8xF66IrcZ-DZZcQkw1l3e1Kk|2f7{o7}QWCp-M_$F;kxK4WGxW)dLd6uLSKny@E4@ zwX&b%37hchc@3IW;GWtn&@{`gZAvgkil-bJUB$eSg4==gSAN+sd(~&xU3_A?jq6Ef z=(fEq*>r8M=YsVKDK7q`LMBw<+zw)%CCWKOV)Q|F^pRe}T8rij)PQ>Bi+)%$BAX^z zs4GmrpP(BOpso>FT7H~=>MN3?Dv->cjx^gfbdg?ZP zJ|<%hx0YaxWKBm=Q@6g5!LEU#n34ILLGC~iA481i`aM4F`GPCfrcWHR`(g6#zT8iX z#Owo|pK?4BuN=*sA$&=X+hd^m+fU3T_#(W-$NtqI^LCC&OHuaF9x*(;5)69QmHmYHJPth!V1@}TodLm=0-0}}%tpJF9L~%MSu%8B0U~gVN zL@|xuid9wKSK$Pr0kA6&e0vB;VpQdXoIKzS;-j3&!;^Up=alh<0e3j)%F4z61q~c} zlV-d(m3vzxVpzyU}}NYV9&vgFh|4MQ~(!QO+z@&2CHU&0<$rLZyds3 zA^mX{*0^w@e8h6Jo5Htw?-&%b*- z4pxWknwfF3jG*pp4v$rrIffZLJ3s}h0Av{Lj>!74nC8i%r_^JHRC3ZlHJE}y|HW^k zcm?lXqVCJ)LJoh7E~nS$-#sS3sU1c3g6;3<@dLH{M|@b~;c)JVfY~SOd!>F6(pzq8 zjNma?r<3?Q=xZTS5Ps#QX&{980;gQ>{c)7!L*5R)tb1@tW>flt6ozw?+nkL5>wG_@ zQfR1n@i*?;IH<-XN1o%}fZ(PDgEuF<4mG5oJNzxeL`T2IZlTKYg*P%~ahfYn&-@l( z*>**M<0{#g5v-$Uu~6yJzzTYXncxe3?rcseidlR05%CFTRUu_1{Eu6yS~kC2oCZGK zP0oBNc13FJ42ot+u<|KU=8U5@wrp7zJEY;Pb ziNECRc8TPcqMoaIn6bJL>a{i2d?NCVS`?nfH)Q)v8HeAz-PUwQYzMukmv0&-*Ne^z zvZZ3LYfnboj31Fqf;&;iEbj#0CI3sJ9=?BcOvGGi>~*jU^Ut&{0;P85_>>T2s+3gn z%?&eUHX6;I_yf5PYJ!}BbHM)G4#6eY7=Aq3H!G7R3$F_B#m(23n6axVEhxMk*GKQLth zNfQSPOyQeb6DalPp1$+LCn|I>q+JEQ4d~~W@*G`c=bMe`lWGpp1s8I2r37dI2d{{9 ztBM!48JMOQz;a_H0ol)Vc&q9G^V=Gv)!qL5plS~x=pkH29P3F{uvER!W(pBO$`j1S zk`w>bHIyu7?8mrv{`n8ihwueiGuydMInt#*1P?zzOT+F**rVKMnaM8J${;_XW*iRr zm0cKr3X>Wz_i_5}qHFr9ydG218td|eqSLrTgVI;b`Y@yhes?2P_8R|*SbSr=Au#8b zb4j>pCchiA25%SK5|XR{V>);|za z(zyN>*S3d_7!tec$K8U1!bAS7_y!nR0vjyGA>4Y*dLkx}O$YuVW_Zh2OKrA)Z^TI3 zEZ*n!VIF3a8%S#_THA*H?{p){;-mHxOrzF}B4NZK!y}!Di-XFHWn5L5s{cV{xb|!+WD00{3!~9)5TYCF` zF1XF1oPBK(?WWKqwITec59Uv?m^vYQrP<}J5oZJ34z_Ur@rXI@!ruaXPOLf(72w8^ z3(=CZV9ROr5O33A!d6AmP-<=8%G{Ta>?H+*d13LKu}>Nv(yAgM5A0h#t{vo$&+k`S z_gTm4Ds5}2#fA!N#aD&LB7Y;YlxL-g1^fiJ}0v0NaN^zhMR9LIforHuX4i){G~Vi zgb(@Etb7u4jQ1kqoWeO_2FTCtS>Fd)C4;2(=n?% zcjSakF>QPS67xd-6ly%kWsulS@7l^g^!uNqu5 z|7Mq9C1fO&3D{&nQUn`pQ{H#j$i3U&3v?eOY!Jbg8m|VOMdKGrhN5J|vddK}=cXl^ zJAqMjSY0)E;1Q3ZM>=1%b{3$IcLdq-5Cz+UHhv;Afz=Pi>{zlt13 z?Po}`+mdPemtz=?uHw{}h-+QZV`RI2N-hR~_}-=zjV|LNQNF_Ovq0;*Iz3jq23m^X z362JSoyJLfr4xj~If7{ExtuEGZE}jci!6Mi)%{t=ux z%r;a2yMJ4g{Do(gI6x6nAMo9&VPlcai)8#_e9Lu1BbyUQy(p_a40q#$6m#P8rnlnI z^?P8BvJI?J10<-;SiS;jB2~-IjXR9I=#uvQ<3h^%&$V_gu!r~U$KCd=6Mi>zTx3L5 zoIECmNJ%ped%6@PHu`iJ<8m>hdkpH6uu(J}@H*nnor2a9QM?lYY6;+4<~@pzc$X7j zqE(-qrLvcVI%xQ~el=x@&l=$?{3;_=cl0s_*nfa2$BTN>{!WxLVw~C5DEwN1)K`xN zzJXv$Utm2bt0i**D`^AG#94oQVSnJInB?orfMZMqS?&bSdED=vkrnp+cO*|#>Sny>9xKHk)u+C83F&w_ z9FnL+h&LwW)2C5Mtg{Jr3QP&BWp_{;Nf^=&pl5i!EsD#}Ae8 z{t6Ir3{lX~{M1iLkNLbf`=7wO#&}9s=}U2fhzIe+Hu_JD>W-PS{V7TnUIzi8257`N z@v!7NgxS}j+$~G(J!AF0;hSxI{JT-|Q1VyW)Q$bMf>UY?geKS_@NLAZgq0Vjsn%j2 zseq@$ooPdR(%aNj!~k!@yOj9EGiU#Ml#yG6bVvRE)d$lBHUJHnP7>+vXP4~*2d2)S z>t@1}kPa&3u8_vkwKgG35d)e{WSa6p>aF_uO<5aoF=snfzZy@bJ-fx_(&x ze{O4|U%7%& zFXS&?+p^9xfyuDKE~2&a0XEz9>1~SRJY3fdgYlozX{X8lAi>)WDR74UU~-^B&B-bn zk6fCYHB*XYwG`xZxNE$FX^6OkrUF5Xy2prQ$@o!~f1CF475J^A`iHD#dq?SRuwFRy#F19gl(XT$f|Y z*hKp4>Qu7aYVot|D8;kpujl1nMYD5t>D=7HMI8el7~K~R*djCG)2(9SJ1v+ekt6CA z?7uZ|^?OvUkmqi|1t#GKqvnHq@QB-mZZZfT29LY5J+Eb{v_5gCkP0sl+~qFPzEz_o z-Rv|!wpVG~MTbIKwHw*sL}f=n8u+vnG4R4~n9mM!gEMpJPcT9n-+L;g!Yn3!4`fQz z8tXXkPR*Jg3$An>k@js?SocV>{(XKB>;KLpYl8vVDG}0Vyw9%|%#RvOIveCJBGeWd zdRMZ&A7FKqcEH3f`Qh2Ry({|Lp^&I%pfOPyR`Z+T^c&5m>=Y8;UDyLLGFoI0jcy%BhBbdnjbonIs#3?yzH{5=VUVh%% z0oOr1y8x6ZY;Y)3!C5*Sdi+Ou>;S1V#edZp#Y%zQV{#exl6!!c*GKeXXDnz zIssj9lI2zX0uw8|4?+UT{F>jgbva)3Olq}sOXQ~jW1J9g}X+f z@h>#(m#PJQU8{hsHPG|=NAs-DnOVCm_tDCwro68igoboyP!y-8b<|;yJ@$#yK(P9< zDMw7r*-v2AF}=tQO-kO4et|&D5=^`OAy7EOA|OnE;gVn>;-58n1Q5fD_vn5Y-@WzWh}_cUbGa3-LV&;Wla7>>!Wbf zn2xOCAp2xWVK7WRytd$cS{DcQYbkbIJ+zzvkQNQW>F`sp(!rF2Zl*To*#s|K20lU5 z{0~4km}C_YonCanS9vG(A{24?74gO@=qPE(P&6PJN67R|twJ}%lnKSQwIau_26JxQ zCj`!3xW5&YS-ovDfqk%-(Y$DyNPW8l`xRsbvyCTM@v*%<$CZ)wbvtQk20_Q}Dtkky zq@oZ*Y1Z;9W;yX$Ab-gh+JUk6Pg)fb$R^bm?ZhMVqE{tg%hpfmD6b>(Q$L;mW~Uef z^rQ;!p>t@m(3&ozcR5`aBW6=!R9o>rI!Yt8`=W?{F6#mQCZ#ZH=Omd;Oc|gcrpZ z{KW%I-?)WEH*Sdr9j)4R-{IFHlKZiw5ik}tmOYn;Mk^=x&dgEMBHzt@29^{V^&I?2 zJ6s#=v}g!+dzqzM0{^voI2$xDgJzi`Xs{p3@gi~+fz{F@GLPf=ag$2nL@s3=9Ghe| z+iEsw(9sLq?+c3^R$(LFf;qj$b$AO@#eduN1QnEKQVeRZ%3ugq*3t{q%~{Bk*v$Io zpC-$r-6(|+w=72F_3g&IvtdsQhjPW(T?*xKd6U?^m_#x9Q>Q0B zlS44<&h({Si^AvDE5W^`f+wX-dIn{3TYt{?UN*vsC7!Q_|2>he6U{wiwz2+DBQrQU z9HNw0Ln(@PE~tCdjV3Affoa6f4YcTdEETD5#O2h`ZP1FDTES5f+9@nry<8wgIY^AP z`pu6pX$ovNVffV#tVG~L`y*>--38_rNa)+r+^lAO;y{ZoVXM$z2dauWDE7J7cEl}x z7yZC>=BMud1d8{`h4q&bp>ylbGBus*g1!VKBQI96No=C!t=nrdV zc4vL1O~67`NYh3iYTjW2Bx2QvZS8|)!!4OSZ51(Qj2wa6xmLdr#jU^tqGHUU)O!7v0LX6;<5wZd3({%f>U}HpktT^ z&7>T4Nr}OFYq6UrzESVhuBSxG5i_;^)b^Ua9DquQl?7|Z zPa1nCOPw#R6L=-yXIWRne@~Fc9m#sb9+T5zWeD6f;>!hCvtRP}z`X|JtG{;AOSbuyV2; zCOX=aaW304G1Um$UEk5wxc1^{o4#6p9zBUegCmIbmvnSq?9FaevYbrmzT8ehFo00Q z=HTT!0Svp#B^Md(a(1;!pI9o>r3+d;0Txz2__$z2RL=hAVmE$sxf~2~VAZ)G1VM?A z$Wbqi%%#SXKk_E)kkku|XSsPFW|SpS9TCI9k4fx>py|Xvgq}kj7Yh_O8@}~UqawEu9&sTM z`1NF1vXmUQ&52q!>GOPJhCGseIuvCBZ=vpt+8_t7mF{$KTg(=Oj~x7vnZ9>=Me1`i zjQSFBU)9a+i@OWbJWH2@76-=>6I&#nD7(RM-F1#&%CElEv}0Ic*IxR^V$u%S=DEM@ZO&<5asu$gx(xU#2SR_V_}BOQLY{cR7! zXh3>r{Iw#ojUr)Ji+l77;w~#5c#0Hvkx~|U8`NVb8<064gn?EZSDD-Pmk@QLb{B@l z2f3`!4IiQRlt|7s_aw`Y#bB_SO7{#<3W58(R83oeh+T6|^tFGd7VeQ{OFL~E#E`>P zja&@Q&Jj}RH(Sh<)VgCQ;#a$l>L3PBtWMot{?F@Iy4}rbx@4*+>L7J$K7DQD#`D~U zTJ&p}RrW3436UY1H;Q?-;vDaj42z^{v`8vRhKQSez_iJYVI_;Z~gS*Dj_ z?92rr6vDO>uHEtCnEg_};6fMY-%OG+Pxfc3AAkJ2CPd@AhGU1RL8t$g0;;&CZJq=2 zUbq)5r>9uw19be?CyeML(hR~NZ9;7*C9aQZuC$N0=q)B!Goow$4b5DzV#K}R>AJPK zAl<4Gkk;g%IUC!qi8bz6O@+Z;M7$Z3Gu5y{CDZAqppc+-MUe>#Y8m?cHvOs-^Sm6f zt0v~Cz`n(yX@1#oP?B3$@lTd0#jHs8B9}Z#5v{j{BE^v)Kk;Q{zz1QEFRhfQ4SM%G zVnRs7iYVN4PoZd8ypRL1Bp^yOoG~=1nomA5EJ!`5op^P~Q+b)uGeD?H|9o+H7s#@h z_51xNAq$cepkdyn7&AITUkvB$#q`9UGaAKNX1N}CV>&`td-a?vLgUd1FZ1i~5(iOg zpOPy_^R3 zYGlmMl3)qp>E;k_iKdf?c>FL+zvA8Fk=RAwc;A8~1(Be`+S^G9qY z!uMmmf^xUEOD^2*oSK{@zE=M?qK~#$B|5IIj z{YL$n7#l8DhjYR`7id{OPSx=PINAGl0tH#oQkH zeKr!_S})eqDIaRBV1HKv4UL2l9ZRX2L-S3%9#9~8!rN6URj<_rs(NJu zP4|v7NiivK^9LNT?F;e)=4fd9u4JL(u2=ASLyrHwRt9o;_NfUG=3;62HIwEuq@%a% zlmWVU!iL!S2|BVHx5Q<@pOUr6y*td;MWt}ke;@9<<|{YBPp6MoCLxjOB|8K#0J-zP zb6=N<<-Y}+88AJ0E={_`%sfF!O>A|$8mQ`Am2!XGX3fc^okc_jWpcO11U6;bZ3bF- zeySg2TwTbPvS=;ZaYfbdOy#$~wGOD#(4dyWOiC1eosRCm5H%HQVQ{h^dR)R`QAHsq zE|ZGZP##Ql1pZZvfF2SdTr~G>VBh(_Ez0RI$a)&duPtgWt+h4MRsQb*p7>ahcS>H1 zb9|hNut%FMWFbDE%)S{p8TONvynqC@U#p;%)O*W-v;FO6zcLNlVt0Lk5(5mm9peHu zaZJk4=3%;D>Nnywh7a@`L_$8AM4yptqKoX5&&Ie0ZZV^!A8BA>xW%Q(=a2bX&&64) z^!yq7WqJKV%UqC4uC4^T;5*TH*evi4-n|bY4LYiSd41-2tE6OYnB?jGaow4O4|3Cl zf)}36q+7{G)L-zWhO@6JYV%ZKL9<-G#o_8;x782oA{|#0TSR4irhHGEE)G7Hve&F2 zU=b~6;ax4<0;}uMlRgv&c!Sl=m|Zkg@l|Mj?+T(2ozc+Z^0^66d08woIFm#Am_0Tv zdLK4^8&9lPrM3*PYEp;@<^|upvRcf!KC!J&)lH3+!iAq>{?{cp&N$BlzlagH_)LH7 z3RD>w!fn!*Hf5l(&(Vs=e0Iijm}c}=?Juv20$hpE(-_DQq(k6jYxn~)-F@P#6dqsJ5Pq>f~NhC z^hWd{NWb1L+`EL=!CH8l(~`GD2_eyf#?7Bs(R^+Yv>%ENfA+Ug(oTzSi=A}~)30Zl zV|K?^2wCk|RdubcCPjR_2~VEr*3B{MnjBgG=6Oq$VWnrpbm^cwr~a}>42LyCzbGJe z{VtiG@^wp`28be|e{paj%QwJ~{wTS^%&O(qwQTZV0e;i;vpBCB1A&YFIN8J)zm^mu zzcZ{eM+AB(RKjHv)bOa(@O-sJI~M0^=~qAoc>6#a!nXff34{fyEJeMGH5ZF64?%Oo zfONNVnLm!sv^bj;Dtcn1)lUEUc011-ctvarfTH^lk}VL~pS=e*VneJhWWf}nzloD6 zN?3iMuz5|XAx0+VtMIsoZcdE6*zX?KThg9>4hU;Wm2%mK{$HqM2MB|a%?rdT9%uIwTU_E39sH?Riq06CNb{+=@ar% zGX?$}PmOMRl)lrF&o0J?aqTrynEIY@V*~^r!mdD_yxe#0d1lSATe~+>YXZG4q=CxJ z1&F8uhQf1ZJXby$~% z#0#I#fVvm9e=fY?Y$>?r#PM^^%?VTU;kG5R!7s zvUDs@%9w`?S~42uXFLm5@MZ+7(h3TJ)-r5}lE?Bl3sv!#$2A&Mj6n?pJvT=n9>(p0 z+j+OTtWZ+8upPidVOdWBI5vkP(YFKi2oa>jb-8HDIfsnkm20qL|#Lx9bN?{es3uTar8y=p(9P! ztpkZ#t3q{Ej2Za}&`4Kp?WI@);#=JGc&F@}tgrTr4IN9rvSJFYZ^S6ME+@^g^+eyw z5fttG(+I7^!n)7tX0)^OaB3(9lG;?18bjVKYy*&WX2c9mbm%?p@s!+yk95#uZgD`2 zo&vYfr$_nv;h;6|JOr7LILUZ6Rg!kGP1}$j2JxIGnqU&U!+}4 z=$Z#2%Y`BHK?M9a4f{b(bzHzfesl~oeK}1-yy74EjJlJeou#^M#4(*3^p9Iw2$IBl z;(x=>%_0F>%oW0)O|rVOap)rz+7$V8K;s?ZQ$YPH zN|TLh_z@2Dx)kwS&SorSMRSi#qb;uuE>7*j!&*qz$q+(*Ml<72WilPjj%z9Y__mjW zB`gR@Y%6*(Ha4d@{-qI)U2Z2DH-2&Ac}xpj%^VXJC{d;uaATb>99cxM4yYuvT+Tma zXesYjkE|JwF9v~%m90MlsU3-LDe=gF0387q;s~c4hNibIAr^nel+`Xl{JQa>od4kj zMu3j1*t?=i;bK=H?yKfWk>p8vus*G!kek#QY%l7^oKSPaRQ*Lr>NgL+C{U^diNPO? zlh;C$bL-fVjJ9RjJwtlUepow(1phT~p9>**JUXB+s^#J>tr&^f(RHn%_Af@0>cL;y zfvYx{m;0a-CB%&-z@Z}Dhs_ zMP(}*;~5F@Q>yu(z?IMU0bp_d)^G3bA!i9EkLePqd&Z6M3>MHr!1u zQVVlW?NC7?W*wVV;kX37E%#whV$~9!FS`$UlA;uxn&+%ZpIV*(IJ?n)x+|4mdAg{7 zPQBMW+O2CApTR|5v)X%bV*i&;^h)P(;3&C~lLtTHGq z@ug3wN8iMp7>)FX8u_$FVuEH?CDr~C6q7CKZT+PQW( z&z#bj8U*{gLasnRDWS(%aOxFW%{vCzugKpPE@`$gu)!V$m}0dN({cJV**pDi+n zGR4lxhL5Lyq^%v1E_tCdv^y<)0FB2F$ee&}o+|CrjgvIQ!{Qab<>7`o%ma z`K7z-1!^hgdGrIhdTRPV8JQ-DZk4Bb^B-w?iHG5GQ}4%%quf4-l!p|DQ+<%idvFkV zwv6wv_0Rt;e;Zq3chEuZ$6RMu$NQxiPU#2RY&_q8Bkr+Xk>U*AQmP=HWC(5$`ho4K z=eOsGIu4|4TUTu}7fPh&vJ0{b4~7cdwNglIW4CE~DTBsT3Lis*L7$x1-RNDFk%O>WzduU~SWvHKY(5h~2f*7Q@h2np zo~1##OEYcx{lyD{RxE&lq99G&(LNGhhDPydb)Yirl+=NiHJB|L8T&Qec`#$kp z>KaZiD&9mKF7Uq;F_*%API|3=w2m*t0W;$+zQeY{@SI)Vz-bLriS3sob+Ai(R+)Yq zo_jMLvYN_Km1gqUE`_Jgf#jiku=e<;;WgF02z_)un{iCa6@aS<@LV_Kp*i;3*#jXn zoNrxanSg{60IiOVw&ss!MJ5<>8 zlJNDnAk%_H_fN0>JZ*b&Rhc89&if83KwR7^^s-bpL75zj+qwBdcRkcb0K6ZQ?-NAt6YC~XP_0Djvx`G z76k&`wC(A7A0gn;thf>C2>^ytcV~|b4z65kq7K(N@v|`?0ay`0@B_YE==32&2MHkm zGfLata#lpqF!X1FsPC^+WOSiHR)~lvAb7NnAC>7-{$as*n^rZ$Ua)^{<)#KjKU#1> z|Ap{$nf9YL$!Q%0Q1%@{L$87Q;tE*8Q{S~3=+n? zdhxXbT?+98g-ZL_jRFol$k3cZTw&jD%Y!dFZ0+Q$ zl`tY*w}*S`2`~^cQD(2!*|d-Vuj=$#u6v<8A)tn5V}4F$aD~wSM7s$aQE|qNPbw-8 zznRzHd;J50H*vINRd7tAic*))z5)ipscIas2k;1Odm)ZFx37hM0e*XcrsRU>6{U8LYTO zE@fARc&L5utEZ7lEy4rpQkk8d3^{wEqD6lf5p63cpVuDT#vz8mpJQ|A-j*8G`;SMO zD#M0lk3?7+>oojLjS1-p#A0&vi_|IARmBIRyo{zh!foEkC-~ z>HzN|t~N$Gc1=t)dz=h}&jx+aP<_2-=b2a$Ykwfs?ToMD|v^rI!;nm4zby z(7C*@FE*LoN@j21ugmas<(HZVa~{@98Zuq8xz}G3$;R6lC=z6vDPHx2pgG>`-p&l2 zZ1az5T$MaeF$o*SYd8GrChhGhi|kD4!(q5=qbRtCaXnPwCI=?OlnWhzki$b)&W8D( zNP)pbcn(a9IrCIq-MESbAFAD`qBJ;aGe#%MYzMXz zj}cQyt!FSPNO7ufuZE*p5kM&4a*k-=AEw#?gUo%rVpt8Q$xX;DEgmZ`c8%*es zV}$ujDS*vONx69}hndM4NSaLf?f`Zc&1R4S)kiuo3p}g)s@N%Fl|w4w#j=1wLElX| zsk?1DU_K#Q*-#d8=GHlYNOv$d6dFU{C&fZEnTno7iA7BfmM{^067`SE%`e`lc0F}X z*b7~;_f`BGVEmuZ)6@SJv~+EYy=RqVKCp%&c~kf`gQd2>nL&5cC-IDfv((WE9RIRX zmJ=@~;v6wwtRRDFmShTQqF2a;{77}DbKTJ`4sYf)QH#=y6$h=Q19`E)FP2o^5rt;u zzA=TEAa>1JDp4xpH{Q(EbCJpv(78iKAQ#}f z)z)?yQ`O&TlFc`OSLLf!%HReiiH871K)S!Ke$d-ksNLm8DWvXPxGbMBP^e#QCaY#o?TLd5 zy`Q#T3Z5*QgKxxvyVaFXF2LXG?KkQj9R`@^#?%IQ$;HiUAw0z(=$~pAPXtz{@Xkks z(E_ggRmq8d;&eHO;qN$J9g};n74f^c7qJB`7=M-Q$>wD-)ACyfm?xS;v&&ta{lm(R zd($^)j4Z#Q`@`ZeLLq~rE-@k7raYlR3LcIxS|9{dCk@arXm+yPy^gUkn^C+Cu9$6O z>XB7WVfRAxnk3*#T)&rj5{YGbc(Iw%VS3UUdMrk}s<6aJ6Tj3}arDz(=(u9X&|(dY z<}p7Oc{a(+H(3O8RD9dPz_d8Z8D)7dC#nIMMoC7S^+Cli9g|}@314&zCY6K|jYr|I zt1-iX0#uVzf^?68`SBdApWBq({ym_aSvQlT8A-o2=e5R*M%`=yX9EsFUVC%mOwU9q zU{W*A(^5nE(Dl}rHdvr8%=kC+zs+~1O<(Zcqu2DhrMdu=vV9dXfgB6Ju-60fOzGauLQ)ntw!1!2|>lhE; zPqYe`vpL%VR2c$xxF{F*>bg>Awa~ss${X<1T`i&-K~O(e+vDN8y?$5ctR z&X@Sjr!J4ryUU>hEM&XY>6Q{&uv-l6ZFW0$%4Vzl6`>^w1sZQ+ola9R$QyM#eUHM+ zr&|$U=p21VDTo3)jZupK_ZN)@M54Ln^3dFeLNJ|jUp3|o6VG9ltQZLYa^&ciK|aP; zB<6H~69b*i%S%HBTKZ3H$1NIIQdf&Hx)rc6Fl1v_ZvX%Q0003&np+rA6W?dmO`nuY zK!Iz+r&B4?0351+4uOz%RR7$O;4Cn#5Mo^_!;k(S}2 ztR05N9qxXdL&(j~o5*f&lWF=KU39l^suP3lH~rld-ip|1X}y_o)-?(U-JASfQq^`a z9;w(n;;}ZAVZv9v5iB4OnvBSzTx*JlDEaYM3q7-#Rc1Luh_0?|L|qUrLD66o^9SUW z>`^@wkJjSVI$)|O{ZDIBDrNAVny3#Iq8|>IEm&Q7k9HQ1V674_l_&QSc&c%dGpmTwx!+iN2WaE$!EEZU7tCf*+gBNwnKs@{qx9XmRY=`o-zoNfBpd$N)mfuR$c1S)q~T{w@no`B zJs%CJMt>xZ12$Q$`fV*I;XO#J-fGLQn%BTy-fFPzlN^0=e0If%(=*Z&-Jg98=9M?( zb$k3bPMS@hl{(FQyr?_jSt9e70PTKM-(U=X(uEyK{vVM1P2`ANSYPQvj5B^77;#Ad zFET2F0M{lr=zuaTh(@w)ZhEM&4yn4)(WTB`YK2)QCPIq2UC_b6?lnN{xBkgSFgD`+ zv>GNnrH3GHPrX&oZ{GGB{AA~?p6iWyaK#UCeUiSbw&>uVNc zjLewY(K}GP&9Iuc%5PhTY2q??ASBss!DXgAvNpKzB_@Y?5?_)mKU0TYQJ#WMMp=19 z(*H)F=9viZTDjEFE+Wd8J=B2hfV`p9n_d4TDlx-`#@f>J(nmf|%N3#)7B6!g-n2`Nl+|ySCnu`Hi$Hp|St`RHBkGOGq>;F7 zLgZE;WCZF{x?e7tS?^lF9h5cOeHY(vJP@*OlTP^GFnd(|6M&CUIMBPDumo1Z*cbbk zmUr4IlxsygNNTaAXs>eGiiN_0g(aRhy6spG&1O_EYRur$2evfsP-)l^;g!(wyG+_? z!!;2cJTa#q^zAA!my0_`jz=>ME_5%2Bkj2hrF2rue(<$hcqZD|dA%AB{zlYp+VRFN ztceTNg_jvZK1H+=N-i?hJ8p9hHs9<%R(`mb^+3dP-~&&E?pe6TJLQP#wrd7hKjZAM z#GWB=E(4!J@ga|k4!#6YwD8->Fm@7Cp5$DiUnfZQDF1i#Y^rJeWU4w*u@$xdcqTL} zwAVa(o@rlw*C^rFGXHo_6_B6LONuv@mP``Z?q`AUg{nFN-d5>)5K9Omm|aC#`m_RO z%-u;Ua=@--xmK?Fj2QDionuHCFlteLmzSKn%~zpZStI59LZ@A7V|kPg-{ijl$hK0z z|DiF&MFTYUVihHX$L#b4G=&W=v(IhG3E~~)J&_DgNK$8Ju{3yfw!u7*ltP?j!rg4LbE5{n2yzmAdtnQ0?C@HA63THD|F!3)@4A+^+Z zK~3%FF+WY5mC}C}>o{p%{~&w7k1S1vT3xq`0=N1d>k&>grJV6I?u^d7d^_lU#-lLh zh+I0Q?JgQHJq8xiv>=d?Wbzxo;XV|BzrSqa&i?7uXr!fGSKfE0k8QlTt>h*9TEC0} zGP^S0SQz;QG^KF@CN*EHgwX}elJHe`wU|xSsg+xga8QS*t4xsGHa&YJmec||nP$YZYIxhP9i=?m+%LAQ z8JI31+$#>{2V{#^aSvAfCek&Fz5DByMV&l|D8hu`e9yKZ>c@!KF16IuqHBwONaMsd zK`q6hl@r*oX!VO5MuZae#kT(;g)=4(St>+tu2gGxw_K);!<4H;1oVz}6r1l2tX&@# z-L-FhRxvLlV8pHJM>BlzWAI3Y(B!6{QDkzxYNTq~rdn%QG&2DIEcH|)9u6^oX*s@h znhULe^d&(&O5ESDKE(Bvd@qMV1^&@5DKw|;V`qUNon!a{`(T_ zFZ#hF$5?}I!<)LN-wGBm-y45wr%@yU&y_BVYA(b&(}`g(kbu}x3)BrN!;kBh5SY7b zhdwMfQQM>Ln(N!ACE14ApcBvY4T#X5p2vOUzfRd%&BC*d!NZh)DTEO7=1^_MtR}S` z>CS$+oa437nCt`jN*U(qpj9xP!ANZ1Fr4fP*3pR za?}Yx3VIK~BO9_&tdQg-=t9=xOH5UmfZ$%mr0Orfw^g7scuCM_r(f1cJsIJ@0u|qs z{@TLEihjO(a48(sU{|UE2@{7&ZB@kpbYR#BO3=_$J?DZt`zdg7c(+}H%LGW_nkYar z`l>}#V`k!LeuR)=%n+Z;27KGyeW+);E417}(w+~K(&lVjW;ez_|2*pRVAup2OUj^w zrUehlklaRJWvAafKXW@J<*ut|FR)G}$3kKm@Pcsy^qT1Im$B)$HnT|2W@N6k%81Wj ztDdDvHdoO`+5yR4Fm}^uy(WqO0+ArqvHNWAEES3%8m(Pmkni(SyXyl|Z;yyooN4;R z6k(x*g~@Edr)8_s;~<;~l|&VGB%E&Zjbn~9HA0Z~!WW!XBRrqB1FaKAbB&;K@Dvnw2ux5}2{F)qfNf_Q23VTSJq1>iWdI zdHW;y^O;3mB3huA?jvSnn5!Rh5moEABBq#ZYrp>iw)E!KIRSZF^i5C8H0MpT*9Kxw zeMCk5vk}~2%Y&}53pZo!0o*z(t8r)6XvQn|JaC&<~pm+KUbf6 z2i%QJ#8Xz^zeqZFd1R~i;zYDI?I~dha3A5v#wZ{nw!m+DXy71yFXUNZF#xC2x$!?m zOSPhp9_fkViX$w`YcE?WMuRSs1OW9oxL)XEk3Zhii}NCNs)nBqCNdx(-g3s_H$6H^ zFQxWtyewsWS~PmG)u>5zr%fUA#IgxbCjD^&N-6UM28bSk>Q$Lrz!?+on4K$RqOtkj zB&6k+yVIwApztl5sw5wT=eMcoL87T87q1cRmRLpwy&->zlczv#TecBWFZ!aZ4^jbL zhBIEjVRgpTWQ;9Zt*-q2oF|UxDo=tS3Vsv&!wpm_Y^$8xoVVEKl2nTD-nJ}A{DP?t z|GVH-%_gC2SUv8JH53I5g(b6^V5}McP$FX_wV$Ch_bZ>Nwl1MINs}kP-O|xnPy7|1 zp}RQnSrTl1<3&|#)U>BKs@GU?#G`Jr)(Er~O?(%)+xc-tTOJ*#dp5@Xd_a!^1#OqixVW zAlD05HPn`B^90@Ss=j?^G+>o4LgVrzF#1vkChW+XUtMLaUo{eaSeiaCDSg?MdT5Nd z4)CR(J;34NYD;~uJ^#3$V!)PDuPpu|A0v3!+YO0id~!6w%CwDf&LwdKU{J3OEck_8 zzKbgUYDiRBeAWF1N+AP62mwDC6E7fmS8*BydELAAV^uN+&;2I5@r4NGtK1~;NsMKx zOK|YSU9lCaYsyHcH0Qi!E4H&Df#wxN<}9*dV@|4Lbh^P9qZ{`3W2gOdpe|ur-S_h3PGd9&R4iUx6l3z=sdo>(>n!ERM=Fpg@_N)5nW zu+q{ZVlzw%MbkoOTb;l(MpCL&I=it~I7&kbfukRK zZqgH&dCIv#hI2|==mI6mAnY%}S=(jne#alBdG(RE*$ES^dS91ljVRYHde@H)rj_u6 zO_|*+VimiD%}?=$KP$!?sJq$b_95XXlI?Z}+<(_z=JSJgrg1=@=ma^|GA8pU`Hvk$ zc5pt^=B|9^ygdMCoI16e2FG&5O>EIqVA zc3g=X2i!x9JXxV~J)K2KHPU>n1vDMaPmQ@~pO=@6P&o;EmfO)z@*r^3CvqQE$ga4= zY0ndz`@r!~ais<2VqAIPr7QWw7}WNU6dwy>yg6yN?SVB!*4MAiE*!`n@bD1Tl3rx! zZr7yI*q{|0HhYCM#U}t4%E=cZONnmojp{g=KOSE@^!Ym!>OL|e3^lhJ+q&e&_sDg8CKuZMqdK`iI;iFCOXdnuagWM(sD|dh z%~7j~udEmcdhuce&5OX_A6WLaEG`!|zRNR$PPVIPO|VJX=+{JRF|uq0tPM*85^}zf zdXxU(k@_0b2ufxfi*$*e3(7Js>XeyPxy>m>uQ#&1=DQUv;8KyXv$;g)wkuu@T7GvD zQX<>a12P%mZ_GA#C&Ypx(D&rCtKuZ$i=jO^*>E?6;z2&|2Ew=0fNc5yLYg=^Rf}C z7Ct@^|b}P#C?P9kA6H6ewa%-!;&^Q zw^-eDdYk}337g8bcGLlj9+VVC!wRyK-=$OFk{JytX>q4l+-3+*m=#=Iz|fav6;`gp zqgMfaFhXjuyfwv8i^47XKh?}mDJ&ZI6NZVQfbB`5izHw7!@8_3IZ42x0iC^iFVzIU zfvchRi-M;xA=@zjgS`O8q5yMP<0ia$K!N`Y6Wt}PaFw~_jtUNDguBq1`Fck29}8GD z?L(nQ!jlwk7%PlnJ5p74pA+2C7$&QA65d3ni)u)DMGS&<=(Im%;<#*SSnQ#85Y#( ztb}hY130p*^*#&p17$|bi1BrQ5w@{I$)ZQ%*g6GT zP<#a8H~Nc?FN7(aHgrSO9k P_~4-3LguabJPZ?A{-c8jU|j^{cwQm0R)o4;s!~ zu!i*C-uWHu3K+_{#01kySvG1_vFmc`O52iTIQnat{mXiR7^p$~ zo&n2_l?6PUZM(P5_mrUn*uCBvnRa&SvlJDiTubMI%3Kd)k9@4G07c8dc3VsnSF2r% z{cRQ&tfX0Kt5OlrxX_m8t|H*jQX1!`xui)7qMPYDBw37gBJ4g#tJSGEhH9&aWmk~@Q*+;XHm@z}2T9mwN(k`G9I@uQDY-)szKE=lkp3>D zvL@Sn!e*J@w78U;@$_?|U1)%d3bU@~IadOlA__`P%G96%kz7}CO=O)LIpDHT9I_R@ z%RgA@d>jLrFJYf3y`gBvDy#rYJMV9e?Q6Bqar~dovMwmaQ{D8dh!TwPI4QIze2X+5<$NB~O(*~<&2=DxttZDl~ZA|n|b3XTI2!xK`7YeeFF?Eyh zII+Fnw2xQp_9u;o8^Hu{iEezmiTLu0n7W{+;az&I#x-;gZ($1f7A{)%&p|oP2U1q-9%;eFW|YXU7?B zOj5mLOo>6b1(&W`tq^LPYlJ5P;qPZ9B%N=lmQtSf&kTm)%1ki+DJ^gz&ZLjfkXu%55s5&CX zmEa38ys1T)mHKC`aDnCT))te#UfsJMXDBTM@<#?01TO*%eu@uN$2;2ZEaA++gD^QT zK?Ix5QhcH%Oah!V$tcTtK`#O;lIZR=NBSZyvCBnvbQ%E%YW&=Pg-rQQe79D81S4j2 zsN5nPl8DFK0_&x`+?0%vLLN)w0U@)-$wQt(h~%jKOEkc0t&yl^|`BjZrg`mpT`rUaaV%F zqfeva#)pn2Vr3CfzM*c|82M~6_hno7etaZ*n<2B!UTs@Pb~P34wvvTsMKi5YHVlJ? zG%WSOv6Bss^H5ByV~mE{{-I#pL4t-#arh3@QHgDMZ3N*VC?W~%oCK%@UKS?{U^ZDy zb*y*6^`i~x4+Kpa-F>(XyA zSdBgA{((*?wyGAg?ni4lG%hU!Po2{G<~@u)3NMAt7;F4Rg@fG0EF%x-t?ENlG~8N; z{~)Nk*);$_D?$4E3Exp-mE6E-V-fH2I;uTKB@uZlpr^6G(`JCws!1tH#oj(1;dfPe zicwjU#$`zvC8hrr>x0|mpnR7BVtZwVXyioVAJRd(sMft-i~K0w#uEQmaYX4p$X6@?U>){D+YN1&qzHJS~Ht zU?WMeWJkz6OtwMwf0<2~u7Gf2BS7^7$}XWsI=};5=dbA?k+x04FE+G5m5h7ht_UXKH~;2xmCVUf zhwr3{m0Do7!(kyOcaq-emf#lk8-tURd)YiD7l5$91%sbWRk7o#kf-t3_RC;}LC4l{ z&s5RtH=q)r({bw^>I<|gn~xv}DVeQYcE}HldZlwH8Uo8i5v(Tbi>`p1XA&%UJqCjK z0dPnyxzzIRcd`ZVb?JKH+1nN*AaNz3KO)Ju@aPipajhESO)XnOCgR@mYao9A&B>GY zAWzoMCJG;Rk>!`T#+f7A!717ThJtW%e$ z94rg0f2^TEBo!14N4kt~Om1VK2y(vZ_aqo>@L}Ds-ypqKEMz5IRk+4n!t(w{v(uIs z^OHw=lmqZ16jV>yvkx_`)PoxwTawo9-x!TkUV!pUg?d5189icuacH&X@XEvP7z^IeDTlNT9p7ufPXm=(y4N!2HVEq{T)NJSu`XFnl7WnF)I~I2 z%@X7hTd4G?GL5jI;8>n$$S*i#_Y!t$iAVpNB5%$CEPO1 zdpGKvj;*hoxUWf=^Mnnb;18^GZgzD0Ld=E2+dlo1k0WUE!PD|nbRj_Nu#n`)c|rHE zP#rgAE&pL`d4xHUNf|TYV%~zk0T`^;C;S0g%GXgpiI#5%g;OrUi%G9jm#ACow{YV~reOKG>#aao5 zaa&Ck2|V#+c#ij{#NVF-+(fX|++95jet`)~`EQg|@RFZ5V>$5yES7%FRGn3~H=%?A z#Kb{__Ky3R*5b+Z@iH0}8kpnRz@t({x&pQu5uy^XP*+*4B`w1aAJaB_c`4ut@059w}`PK_vy3aSn(iqN5d{nvhAMU zqUwwp)<^%&xt0F^HcF97&Hr6nSPt7w77Ltl9x;&;t`@eDn@Vn$@1~YY_Kb< zC#B|aq6JtDQ_(hrgFj!D&djUFsu2Pi41e4#r@GoEuPw40Ck&9A*LFX=_zY@8R4Y&i z4R)^Pr;Y?g7HwyQEHXPOz54+3IC%CiCeKFPitODInV3WHN$HgwWq126<8`#bM_I_a zy_sCmbU(}BbWm#_-G4D*vyY=mCz(q)&7_PCxYLhKLXNQ7=6_>3;2QB5= zcnJq9!yY2HmVs4Rpm(06Wb5Zkfjqoe>l)S0#KqM~1wB5gP=i=v^#mKsaM>L*bLKLc z;>p(ZO1gT)qg7@(hAzBUOl&d9X zhE%aOi!oEXf4qH(;cL@=XcGWE02`+R@h=hk#hlODdE=WVeQRco?gmU4rcN7bc$>3B zvTN7xw2ax`Y61l6Ns{^dV$niHlwKY*LCk>F3JJArLHU0(!b{GoryS#ulsZ)uyA;gd z@$1!5etwtSuM2cpUP`u2&ZgLxQy1V=ySRr;;+V$in{Va)Ni#a^xrIW`YMdLi@muQ- z9K`^cBFqmMLv}Tis2Ne63S$jyvVg%)@=X5gnoeR+Vb|XY~u5FK6-l<$%{-cL|X~P(vgQUN#R#Bk8QJ5gaR7W*4knrv8i<(jSmt{tDU~UgoGCw z#&}*MOC%X^Fnf%^(VLbsy~?wO$s-_l{Ze1Oat9K!g)Pqeb zgiz68Nt?l}tyQS1>cWkly58m`T}|w=wH!|RwWD?Kt6twSH9`%F^oNI78*xps`j<3I z3{;?Zb3BYRXO~aiNs1P4m0(y)_+LHG;v-*L`lZEbh5z>CgC8fRE1|ZCS$puwIXvDC zGf>kIXqGydK4QzHE{m@leqx9k26_;~N-{{`i6&3LU&5wM*xN__f3zsj2`p>*13-=z zg9yTYEuTGi05#4g1J)me;a@u$lkr- zKjxXN%Kyn*!A(~d#=j|$<|x{Hn0t|v8E2r$%a>AYY;F{H5M=e|l_!KX&?5XZrZ7oi zF2UWWWrv22$xe2z)s9`x)m3^EkWGA`QXLFP0dJIh23pcHD+k6zt>CFDA4JjW9ha*^ z$9tzX(lwj03utz>HnlKJOxtx{oa2Hm7^Z*qk4ZA zet&Z4ceLEHuSw}lT?_M#hMUuSIAx}GuVZ-9=7U$`RRZc7s^t*(elwCR-m%AeHBdc?X+yfP;%zjiaNbb0<=ljc)$i;-bbX%er2nwITl&{ zhD*!_NZ&_mVJ$hiM7)aUGSp6w1PwSGy4;a4QpptSrakN(0w@9k?t_KO7Ww@sb@&cS zo*T9~W~JZryNdcI;HC4vp{`2c~bld%2Uhgb>g- zf%IqZX8rQ>L03=iX8tVMlY&lugkW%5-=~_-}nIQ$@)uszRXTSHz7+2^g zVC0x0$XG&DQde)4--15g2NlHL;kBk8GhEtg{YeJ>Dd?x`?#M*7a54!b&RMDmQ>0>q zD@YXA5`+r5<-SsU9XVa?{2;`L1d3z9!N%#7t#%?}a{_Vq&+t6FuFjTl7jzH;MFxQ)dH#VeWp$`$y?v)Fru5^T{V%alFuhvsn?+Ef(v(LTBL!(Vl9rVkgTq$itrYW_@eTC?>GyvHYY zz4H!v(~Tzw0_oN7 z=o5)??26Sts==n#IY_2$k=Fm>%39pK=V8lzRtxo|u-qqY+xsYrnDDMRjj1;?FlLKB zx!av;V@lRUj`9jEWuFu_w zJxnzcs+e`?Moyu%AlP*WoQ&q!iK-gz@jz8W!Wz_%08$8IiO=V(S&1(1+o7hYjv=-F zc#kK#|FT`c$9fGV2kDUt9TLWpQP9378qr*G4e$2KGWdSjl-w2c5<`TTLhZP+GD3wt zvo;HgR{R4$wduGhM!KT<_+MxRXOi^%L6idWCoVj1`r7nbXg)s6$E>dfNhCVadS_)0 zB?)&XV+(JWX=SAjlQjffDbi3e>5!$CjK9Ag+1)@V6>2%ycAHjAER;4|Y>33llm?PV zyH@`%bMZgg`jb`hkIGSore|=&V#`i2tU_IgE#~f;e%}=p5Y&?90XapOufERsa&@QP z`-ge|m-ZFxsP;C)zmEJF2w~f$HKZ`p6xifOXRo z`@pPE)*PM(QElV#dqIP!3l7(O9*@Vc6=a)k6l-um?}h5_J}G0Ycacx#oL%!0&Q_F2 zso{#}p`CMam9pAc(T!Q5C&wmuFi-8q#{#Of7LFl}XZxpp&WfuJ2GA_MfXf^!4+7R% z5&r#V^KX|TTR+cs$hQnBo>ca3noVh|pNs6jgO4KLWtl5JU{5)>g9OwlhW24==%om||xD!cw!E`!5 z&D=KFa8&$qRT{lreqj!)-XO(6$mZe9L!X6``voC4SvovWPh4&SndT7qNXH*|>%dqj z>`9v$!7RXJC5>>Md@cH1OG6ljfTRsLYL~^YvHIlb%@FIb4Mleb;Sr@G95HU&4Mzg3 zr_K^zM={GJi##>hyDvLlVt0oBXwnRM;z5@!?vqWyn=-C$aI4mCX{_+vrI>w3{;$eS z3rKO`0)}iinDY_Srk+tQDgsH0l6Ed}ECCG)qQFxF_g1E1$<>_&18*p7QR>J&X z6GS;n8dHgAHdY*GUWDZfnpe(3S^%}LuCL|^;~#IMlJYV6l;nU$8LFzJ&hBbQtB3A8 z69%TvFmu?BC5a)N$U<_m!`JUi`pYR==3JsMWyfA1RJfnC_bY%uW^&ZOWK_^ZRkqc` z0LZc2?H7wNW^5qj6%EfDXe3orS2DD!=wm!%we_z+^f0OYgB0kts0~4G%hVQ#-2XNZ z1Ot~t6R@6v$P}9jCTQOgv;8IJcx{V&iPwq31Cx?w+sd{#w7+*Hc&)Yfc$&WAsd6v- zXB)r6WSW8Z^*eaM;kGlCXqlPsUHI+?rqEXi_^?lGw7;?h#L1C$+M8}krc~V>$|FR# zZ*ocKiB6>>pwf<8+Z%w~v>*VId@Kk2py-4?p&Wvv;OHKAUQpJ`jclYh2Zh~)|Htr* zi>(Rv7)421k{+c9SZuzNs*Pqm_@av|F;Y%~8-lpPRR)L=bn-O?rWhNc)A{9q#_Hxw zC{~$O*lmDha8Lo=KbC*zOLAQ?Whd7N967}*JFNp}ZsW{6A9#ImAo#xN-xxCHCM2Xf!CM~Rh{yfCSds#Vd z!gMMmEIdc)ZAgi&HIaw%26-60dc-u!<01<+ke+4+TThBR+SsCsbd}-D*;~4pU;=)` zU4|NI-ogB=!Fpa+4-C=o;H#w4~n}yXbpoURbMRV2eh@;aME6sM2GSb>WJHS zAu59~x$-=kOa3~4Ks}Sd(nv3v;~W@0<8?4q|1{f{<6lLW!SX=htFjpzpQb3!7!e@n zr=cpSv${Df7~5k+K1&Xsl)*J4Gx$!7^-dt?P6o`iMAZ5e)vj*SL0&*?bwJ;$M;O6m zyJ&xsS#w}m&LwFfpx6rT@BmPKQLzPvg&bIdfA8RXrdG5Sf3?Lu`ZqaTdt|>TeCY;< zs(?s1!;CfO$1eLe7k>XuULI7 zbb0y+diEYuLLH>?4L;^LL2Op^9mJm$<3tFwgcar|Wa1atDqD(CAKS+L$mU1`19w}K*fLXDKs{!9Vf z3uVPFCOU>p70rlj*&uel{JHvM6E=+vrMJhhfuV3oj^NuIp3cGENN3V z30?@rR8A}FwSG;iwkZBMGwM#_-BewSZsE(@Fi6)-&xYJ5Jn+yFYm;-|OxzEPM=4{svPt!=eihKG=lT9lZJ}{7~Ja4<4OFCH6_N z{q8SmaH>Fm*}=ScSI13EngqhsZ z@H!kf5!cRP$7E!mi5=Zap`tMaIpqt>A(C;x@p231 z7J@J}rjlnk z7QYq*Y_%BmEzeF)LsGhK{yBSLXdxn5A1KKCJ=F7Shztx6sM4Y-E`L>XrrjxgJQRoY zt=5S@aMmF0<-7*f+Np-_Fno5n?|@bhDtR68;(UE7xW@0%@l?>C7%ppO_{9IF3&Rh^ zI|#I4G^)?mpu7#R6h|-*(-2DwmC(s5E${TrTa?)7Vu+Iwl7!%cppq-~fja%y`#NMd zXERRgE2#t(DhU)<)MSi_71ZT`;dIBuRzA$peY}9O#=F%6>@(dv;;Y^l46adm%Zm%2 z&c+}yRXg)zVR}+7^Hs>-%FGAk91p-`H`XjEZDrtKjP#(2NI)IQ`9WlzfzEVOr5{`N z_fb5--_bIJlJ|~NcN4ygnv7~r{ifLHC${25N!1*(D zgq+B&I>i@N%;Zi6U-8bLg`da~w(CG(zuw#A>nic!4_sz}P3N>@!B8sPk7FaLtf_ zOKnw5(_L=LNdNAC4oYFoo6(iAc9c1)Cx?hA@)=K3=g*+t2SaZGuD+iG45No7lA ze7nUDzigC<5hIfsMzgV*{W17h`ol$?DT5U0!f{NTJIPS7nHaa_rFz(|;Y{G5Y-0hz ze64JD`Q?3HTAX|)*I$O-|%X`9KLo%Szd?z^YPS}bn zs6nO)w=VF~l9JGTx%9EUsu-|u!&{9AhuC{GWnYrpC)iD)xW5j<2ZigIBc2X3Pw-4R z(;?lnmZ|lm0H9zrZ7>KA;yY>h-W%awgF5Q%T#f5-``_-we5zY4Y!ltO=T>bbS{uC@ zu~~%SA}L8b%B#TTD7NwE*ox-wFg?CGGx7A_SbTjw_QSZi{%%Xw)=2%Q)sa<^ z!C9J3cX`uRtsMwwV6-y2vTLwQD6(dExdSYp?8wJ*Nc#y~G96TSn3s=&c6xL|IU7m> z)VbvT&O)vdMCQK~vrr^8p*wOwqBtlWoN*$xYl^Yys@m~99Q&DToAB8aV1u}e1b@rjUvl9S&Is?Yq~$JkCmETyE*hewIK zArEUy*2mcqhlh=JqRxS75Y>EFN&s^Q_Kp;DVi?)R9z9O5dBQn;r?wob`S$4Ub|ko@ zA9th|Tx_loU2iq#x-PF(&0Ux_X!gB{K*=$W$g{F}Jdqaf@4hZ4VCjB5yO2%?CIUt~ zc0k440S9kZEaGC_ch3*u+MHw$Haj0tSlM=hF&S<;1=?&Q!Q5E;5*b}_dL1$-IAVmL z5Qokf658gJkQGHX++e`9JL-SacwuJi{4XuhV=lD0!Kt_mcb>SWO0#6O*+(#i#!uP$ z%Nbc;A(?&fPEAUW_R=7IPJrJlt>oqhCQs;x`W&d#9M_MP^rOD2V|>Q{%|Lt-zlJO$ zd_0RXxsm0RH#c3eq?8sEUMebaC47>D%q+@$rAgzsl6G{BqX~Tr@(bqUV{Amdo z81n)edGKbYFJmt$5(`qa<~G?(ey0-7<65$~I_3UI49zO$LldJOk7vw%{(&6Z(_;sg z#a7`)2AX^-q=vf}^}3`FNL3t%0|yS`3M={0A)i@%)oUTLFX)ZtPMq2OP6tJvjhbhF zS!zX>7)LksBn^T&UdvOppoE4dJ=A?0S@$e(F96}n)-RNv{wHu({303Zb%-YHNd4LI zBG0iC&Kv^v(fDh%YX-m?%qTjmG)TV5>!dv2RWYKqeW5Vvzn&BW-)NA-_LflW{I11- zw~S*wECHPWp>MiKRjP_tX$cYLSebwiB$c$E?ac=>=r5$oUt3+ES->WTcK%h!r-oh@OE1 zZ^!JbAX*n9W5cJ_;n8^;7C2_LnDl|xfGuyw%tHd(2JY8PIt(JKFOdZMb{AJspOK;h z3TAFhoS{HUw;AQcCPC({CPQGyB;BrxbWk%7G^DO&Qz|i1V+qVgF6SR?!yd>Y&sqZ9 zy!JOdZ+1~0wM|^-mv=e-%xB7jf6-fUFosHugyF}netXcXAH1d`Mnp?lrMo5~DqsP$U_1|C^ zF0ZdP0$IOZ8#Qps(ZO%Q6YUZ4s*1!y%z-=|ilWh`UTD;Em{Rtk@Fck~Aq!-ueAM$Z zf}(TeZ`d>+I%mr>Lv4G8hiG&rq||l5=W`UyYaKhdwObbJsTObmhP)K_@Z+L~1tPaO z8dh3Gu$}m8`|PTo6LT9LXV5@Y#n5%yN&uHLy!(-0GFp2#0!JWWyZhks7Y2Fuc{rAy zG2b-09Y7+M86r`y%RO2*6-9`;6h^mgMmH05u+7iRGyhB)$!V<~an-y5XufVee1UE~ z6&8A#)v0YsIv}jT@6rKnFUc88V@!D)O%&)#D7%Cp$h}E3blkhKY6wriYTRNrViS=< z!An^{_!`vggds_rq}6dpk)=!h88|JJX(_si2e;mPbxLAO6ArK#73uN~4BEg_Q2l?` z<}-DV7eyP!=Trms+c-Mqu`151Rc8VzE3=jsxQwFl$Ov!3Oy8p#4cQUc-CTKS87{$oj&-NsuL}RH5Jw3c95-N)|jwSPPG{$;U4DQ6=9iw9X|<2gyTx@O;COmgOHD8J zb5l^sN;`K??D?1EsIMhpj`4a(R6l`c8ut2u?(|GZx z2OD^szs11fmw?<65NF{EekasKM98PBKb9&s>Iv4Xa_U9G~X5Iqm~KSvW8RZ!z4Nd?V2-%HPlMa(H+d~&@-Pv70IG^V?62ze05Xs#ew*AI!=VZuek$L?s+*G$ zw=X$hxla=dmY~6D7RVwP4bm=0G$t^hP zu3L_YqZgMTM$Ik?rrUYVdfPHey>*k;&D;g=y%wzTC;+9bga*#VavAQXb}^6HP2#1n z-9hHE4;tJiCuALPV_)Z5(Vqs-c^49)&#@?^l>u-{0%bFx0F_%!wcDjnhtKF>Uk>-_ z#4isKNRfJb*7UqAQysh;7EfYZ27vsvB$oRbi zIO}n7Zws11R|kfkZB7}L_BH8% zOa|V?O*?duykj`SgmYWG=Fbe>P{xHXQsz9TK5D>?Vrzyiq=$kIcg;hX-ewVOf?sCQ zx3JBgre!L14l4+Dja{U;i*oemrcQg8`x@^#M5pRbN@sSORm<TuB~|A_ApczIChs!)Iip8-s~OhT;yBaAa)o2hO)gx(s+2KktV02LNrJzVP9_-o@d zw-PYCu1;82dd>oqCe{t)-lz29+O*Rkc!1<@%!6Ova0|>7-rQpZPQ}W2!510OG$;hc zWLdHc8f^Q{4@Bk|{R=$!-j1%sx4?nx5ux8I7k+BW7dg>d2L9GCe#sd%c>Yq*CmQlg zqj^?44`n{1!xs&{zno1g{L|9>!~WuYw}`g;Ksp#Jp}cLQ_IlT-ln-I1481WS%a{)* zo|dRf3(=X-n;VgPwzW@RjD{4E5Pw7cno?vlK;P{m_amDV5i$9C+5b=6-$-Nkizn{r z;|bIU5Kf>pZCI(x&k0Z(C(zJ);n4=Z2VL<#VQ7UI!kaO!$Pa zc&C`u82K49(Y~nwf3^YlaMxfIyK4!PbgDG2+8Hn5oCLr&nFYuynn$rbu5KLKA8fB0 z`D!FGbF^ogiQhp&(T5C{dz&)y9MDC*M&;seEWDf?fA(jSey9hU=BhUpQimT)tR;ol z;V+8dHJK5@AHZrXnR(uBj!*c~7O4fbYp5_(^Gb))v1@0`x3Z^stro5172%rm098P$ zzglVv-Z_8OBpCszu`4$en2t3U5#>{XG4r`Jg3T)RJ%5|9>6_VzlveAALDd8&Qw+3K z+T;%a=FMM?1j+;zN8foZl(B>TGC+T*b!Haj@`&Am?dGbAply%x3zDnlsTAEZo33en zYG8}3lp*3IhDm&mCRIw&##^>H4g_Fxlvn%_Pvp$f0H_EOM7mkEQc>@)5C_KF`tgl2 zzGHm=uzX|7&N=^|)mBIqCC!MWE0Oj!h`BGrL>nRt<`=Kv7-FNjEJ4&g=pe|}H%(jw zNGT3$l5V~IL0$8;Y0meFjdZ6y{}mqQ*|u$vkhlfZCG6u`^x#_?&^y2B@Dv1eu#K$! zt$%lVH=Ga<-WCMT>LSEDR}V?-8d1q$7+WS3fJ%JHJIQh_^{yADR1V)uK-(*3A2uYd zzFnw1@p{e}ON0k%k(@)Tfv{Tv1Fzv-C-X=N+@ed8+jz0;UdXKbCm{x`nz@((XFWt3KG3-95Msnt`|1hLDT>TBSsgq!JW1O>v2+BQjIJ6k~= zO~mez%K@5HLm9fv3V(r%RUFmsU{gCq6IB?@q3eUarVTKj(eQ;OcCfuuQ$S4mE&)oi z5SDeP;EWvt^9sA=Ox)|8z}VVCz*nnDLEx5FOQIRJY~|;gYb}qA?nH4~tDu;XcK{>28)dv=mR)&H2d2P# zMjef%FNQ9y15eNSLB;9*10QbcZ@YJE?wR8o^i}efUwAfI-Ur6j+cGkO!?G=>UHC*u zyDHpJiq$Qz&d-azx4GG46Kxd8Y^RE_fZLwDdaNaI@NuhDJl&r!&b5n)+yMJZjZdlKvnj zA`$g$_~JRv!gTTnWjoFL)#gHTAqe_HfplCFgp}vYCk6MZBvWgO#}%4z(fhifg~29$ z)}Zmn99PxbI6OkS7p@)z^p3++#|+e7oAs7_q#)2u$i9n71mwfu9L9L?1Glo&^j^hT zlVnv&I;^ra)y>I{2-^}hQrCR!VmEQ=04*r$mT%rVAup3MTakG>g4;~u&`L~+0QcUB zj^-Mt@!gZ9tWI&Wjhpy0#A6;~A%}2B1z|lkjcH>M5rKIu{fJY!h{UB?=yJ8qvXy9w`9+(WnCMyv5vJ^}lQ|6HYL zIHo$`a<{rvIz4fI_i7b5E}eCf9!e*5%o0f9GJSEc-uT8oPrd1lgQOI-XAvV)=;;Vd zSTdUZx2wl4cD-blEO2o%fE?dtQm~$S=$RDKz10TTBCB)cBWlixnp{vJ+C$V5XLAj_ z)}o~s47XjPJHX}@xz+FBq!;hr>iCPj23H%d<0rX%ZRjg2KA~s;P_V)SUy4qJrCV+3 zc<~}1lo1>8oCs8Hh3$V7;T+p|*Q7to#T8*Lv4jBHT4o(b*cEfeja!_+IYUt&ENda1 zgC*wDQz+J6`zem}z|3E6jt2;>UGG>l@V_P81bF|gLdYED=0^C>W@JLN@Glgk8VuG&bVRDM1;H7*YL8@YXUt3AaI5 z)i3n1C}g_D%!k<_BUD9>d76HK&RdfQGuW0z;SOFi{fbO#^DvU5M=Zp2cmN=pA8C5` zh<8Rieh#h-U%X>)0RK=EvLb%uh2LPgbb>-dbdsrE`UmeHJhQafads;PLSk&kbB{Fc zoK=tkY=PK!w(TaG=Eizi!!3X4O_%1Yt-fE*e5ES-#3*=l<@BPAmF^V(lOe)jY3L2V zm+2s7*mEf`C&24*hB}(0&g8b7=l|^Ac6j5htfmA8fC&r?X&u6mq7`dS^2c9Jp1W5& z0SVr33%UiVon}7#g6d}D95dbcprf7C7XGr?4`Pnx&v2~%?FNV+(vsc5Pjf&6FCfI6 z_`fQ{dqb%5d8}Z50rHdEacD|eTv_82+2wyYdo8_-tTkaX96?+)Gtl?e`H9c)>V=#u zSUI$P_X^TL1Y^=p=9%@AW@%}=f|IC1g^EJWe+dbBudsrxk<|%OV1VsAK~9!x212qL zoJMCny`R#VEiu-+J%GTTWb+mi=LWn!n#EY^D=6eQiW9IRRQnm9Qk>!lYh0<;gky$) zyu~^RbsV>;xMW#skH1E9Mx!OeboZ&S^e)r3CfU2wuqoq#&8Wk$V={z;9Avi$*q@Us z3l4`st^KX(K!iUZ46*x#VDGRgrPh}4gPRfAM8@)Xqj*G*=YD(bmGP{jhC+rrk4;qh zCEXjLuRq2}2hA-Bx4a(z=KD)mO=sy0p`y`47;Re;yid%>GXKEj1`B#SE;QQANCp1M zoG$T@-k6WV6mX`HV!CQQ4(!F?FShuTV?EM~t*k4Gz(4m6Vb1D8d}q?0we4X0hW#39 z`$PbmKLbI%5p_V_Q?t4ZKLMTzMMrfR&&!n_5`wE2Oz6IayaKT=A~-zp8=zwke?Mx} z%#l;HrB7(Le66Mk)Rvv4DdMsC4&m+N1JPgReQ>mQiuV}5@WhJUDH?0=Xe-b zs_fWI-Ih4)=Ak}4CfLaPD_jJ2qmUtZL~zSan~EY= z6?FAw#B3<)EWgL!Dxzj&!LpD=AZblxq?D@VoS`KRN~a515KvY6VGhNX z3nXY_d_?14O?>qph%yiJQfl3V+kY${Uyq+&ex3aNfy*{|!czWum5WOT9buyecjST! z!El+R1ocqXu9gj^3$1MmG7*Xmjc#tdEMdSScRyeuncH|loWc|jXqk})E0R9Ymr%Co-$ztV5tGNI^)-GsanS0l%P9tQ&* zS1R48s8O*3VJS1S0c{-OiDYCah5|8_cPhB{#%Rb*1kvW**~^prgCBe20({3p=u6#_ zmFy!X8y$u33UXG4GKPCTgZlkl*P*vmgXSi807!FCH4^@BL2)cKm6dHWffQrNNlUud zfP)92H(ir^;&_=FkJ<;6ReiaGBo~LSY$+tQgehMT?|-wQKPAVGbyhAOarm`6hGgM@ z#C_OTA+NjF0|G=PTTAi`xZ_ySEMD#mL<}<8R#n7J|5NvT_?~Im`fjP8n>;-lXKkgF zQN0qDv2y=``Kro644K0;OA@TSg|(*J5nv-dgL-NW7m0c#(LmMIQz$~wMoMps)AXZ0 ztyETBvT?B^SWX(d%U^it%Krb~(Hy$e6apQ}@IgUrx1T&=Af_GHQc_ILbyZj^mj zh~bSq1yXJ$rqYi`@XCZRmn9`upxOsngSIhU0ZDeTVq4W0jH!kApHli*;-P%Sf|0;e z8|h7P^O&*r{eflbbL_4L_>sFEH3oP$nJ!l!F+AM4#jw=<{Kvm|S36A4GBI;5N9$4b zMsPM3UG}3!5M_Eef2%_a6?JOCh_{4`ik2lIQ){7$aN=Lyj~0Ld8K0KeFs4?>ve&Sn z+^rWr-Ge9?cwg9si4~4VS2dkGht+b}IG|S$;}h#CaMsh=p(to|rpKHu`9Js+m-HsO zPEeBq6icw=wvaqKN}J-l_qER^-KAS0sind*Yx>6!$GDz@)60GkpFItXycP=R7()3t z)J0BJC|@2V5Qu1i`|pG$5$&EQeMiIus@KSc8$d7MhZ9LaVJurzS`i}3P&F!F`&y{} z54E6?2bbtpX|GX)BC^?}!w;@tffDu{JRIPph%r}jpBIm4btZ}vO6wA@xwj`6r4F6@ zaVC3k`se7PvVJ}BZ6|9=1aybyPOZZo&a0v(se z>;|ug*~jq8hOJ`?N2&nvRby!}%`kY_RkR$QGL8H}O8ODgW9&&>MpG_VF50tGI-YHs zP0Ev;R^n~S+-Wc~cr`}WY+wVZ$y01i8c3{I&!>OjB2=fqGT?@-FWp|hTY^HkutRM$ zS&d%sLKZ%vGMn}l0oLfe9ONAa|N5doPCMGY zNrnxr|!ZA4{kblZa}F zm4^jO8fO*u=>Q)0x{Ap?Y2vp3S^vmI#bs>cWNTEzMIy{l4I0sO1%rW~o4rpXEh<39 z6u&D+AdjmUp`wd?J3O15IbvlUs@r2t@|l*_vul)1zvHP)QB9)934B{<4JfUxFdPosoR{Dw+_g1>H)@^6o zB;$YX`+6I)*UoUA=JU$O;IZ70tW`QwqC(Rq6SMGf^9w=gS-Bx20ib`_`=%MK^D)n` zbbmPb-Hhy2TqqGYAvoEbY4-YL^bX*nd0W!Mk(TMgRJx832NfYt}@ zC*X9o2ruZw(@WINk*O4=Vuf(|Nkb;)Vd@Ic+SSnfyKP-sJ!FV>?+~D@ipcEjE;!yF zpi>ioRs0ey(v49Wa|u-V4&i!a26F=Zz>blzXf+Ce>;zv~akpP3Llky{NDl)p5uc8ptG8AuOg z{6*j)+U^aFa~IUOK|rO^&8@Yt5U}=t^${G{xzqK9F)Geb+m8#EVYw9>zO(LWy*kD| zVTNF59YVG$9z=LzA8`z1v)eB?f)KPM7n3*LomZ%DO*Fg~R(haW;+LN)%q_7Wvu~@b zPb8pvMRFkFRRfZXeV7=gFE~5O9-2ebj(mRa=GwX0WjKLWjG60<)XN zi2aYpreAN%rLo?t2Z?h+=-vy$crRwRSw}lGe5$k)69N`F1KanPV@MTe%UoWb>@QrJ zd1rLfm;h~jxnXjr;e$mXUqK(~C6dH;1f+* zIe0BKCFP~Udr2@)K{SpBvg(7x$pz)3n=9d(Ck}O0yXnNh_vuEqOAW?sc}B&wiZS_{ z3j`$-4iA;am60^C5Uad8XErd_^g1+EQ0Jc~DDvvhm92UwOTR3w*Np5XA6m z0KNRMrvIcMjuUARvDBsZiEn!?0I~%@V)G+SZKe8&NMgfUs0HD^n7_v@C7q*K+uq*v zR~^(C$z&?y%4pGo^XGJmn@$4Z#U}5_lmtc|Z&rVjU!W8rla^#Z@1=`wJ)N<=s0d^LcrAw|_4>#6$J za^gxedd>kvwoRhDci{Y#PJ!`v1s(MMwZ(YXMOLT=wKU)02GK5o=sozU+?1Zk!)&(1 zPVbOdTWpdhtNm95MUBDR`R^|h`~=E=bIJ(SQp0hyzd1`k(BFt}g7IcjO{MD*DdG9p zN-$hhQ7P2ZnjAXSE^?a1QUbPsaRQ8N(jB8}@RUsCw81- ziNU~-dN<(fre=i`acwIe1fs4-SmL-@&XV5U1SV4Kg}b!@T1klbpWa6Q`%{~_r+`5F z>EKsX@@D1mm5!fOx`xYS)bIWpMNdg~YI^%QQ3x>cel*}a8wY;=xkWsHBy!&peH|!h zPlRO0UJGH9mk2P*35weSN-ZJw%z=J+Mu)J2oAP-QucEq*ila9wHtfY{Zb_X*#GABD^~(LGN(X!UuDf8Z!C zZ6J}_n2ea3^A1ZmlwJIqpc)+U^M$usq4T`3Z^*~od$c>z2B6fdVkCYLFc&ZVFcwAy zKjA(~zyXUVYj&%;Cx`i%jzbz_AHxV>b^8`e?t3r#!Ai2-L>0*BFD!AOpE-r_i-qDJEilJ1P-D z-3U)09a?qnynH5l8}4wfBWteTI?NW$J?^$KtfOyto>YS5m_~gi(KIfBM|?O>KYaMq zZ6J<`;r8mgyo)+3!1fipZoL!|q=n>_b3n})9#f&$S?g~t2VG3lCq^9yDWU8NkaVyo zfRAJ?#9|3|w81Jz#zJtg?GK&XLb*TeX0vQY8q~ZOPYC+BE2cmdPr`S{q=$)hIw8FW z#Hy-FG|%rgERG9q=~Jbz!q!lA!6Y13eFp;U9kuNbAzIO*_UzW&qO6WhBZ3 zKAS7S+A;oje4Tc>E3E~u;s}s^gk9hhOm{ONl8$wkM|5#xob{`8bheasU~f$+sozX? zn|_9Lv{P}cM@VON#YMs6fJYo77?7ij8p5CY_3Z#{8TT!3XQV_;9+ZhC^I;YhwhD2t z=cYt~)NmxN%bPtG@oqyrjLM90d5^+V-|j{|j;mOrfbxzEftJgI%dZ6Gr>ZBlSaV?` z@jfc0?o=6*H2W}VzHQEf)x|)63z(XUMEmNWhKR4w<8Y$sd?C{sO714HpiUy3%I~qN zEaktaGUVhxoXe80@g5mr6&0yjx@2cM6b7UuM_(=9@9)%lPlp<@5T-AVb#BCODq}uV zXVyBMg*amqy{(04akk%mSN}`bt8qF06KQKEnrLtIC5d<=L}&iz(2s zBBeq~3f$GP$7@LA&aFbZSTxpTs5ro_wS`9q79to_iu`&OE<7M#_D%idNTW(}_xW;B z7|rR}xDvAj_y8e^UfNtKkrdMt1-dmlZ9GBdFBAF)|dltx(KrI zzBsle&oqbzE6x*9;@|v zKi7qT%t_c{a#a(YnTSk6MrDTq4CnoB&Th&e0?xwK?1Cl^qruNjv}N-^Wi-9kR$VrG z`{m+Q(24XENB%U=8>C7jzN%;m1c+lP6oeqH(sBQz0w6z$gVvBQ2yU*=9SBx zTxOJlOR^=v51u`z0dG&XljN))`1O`s{~)$eG!L8R$kYQYeT9!u+uefNJCEBrjPx?k*cY7K}zShk_e-(RF4co4lVjc@e_NVU*Zzv4U$B&*LrhtfoK znA*KolUf}27xbjsvDg2Vi{-AeBF&ef7xu5aCQhkRhm&7&gs8slIb5p-r)#vE$ zCxk?zST+OH7WX>;evi?vYuenF87!8u68n~#Q|G&=a1T!cm{>811pjA7)Xw^@&k)K^ z>=`s4X89eNgAT7v42CV-8t4_1;5#2CAZ^(K@SNuAlt)S}BH`>djemYf_#<*ly(rABe2@MOVV@Lv!kq*4O~4cnJt zvz7ajN{H>18xSa~UHkMO=mL_(*JpD<$Fu(H&>!0lhv69^k!-1C$)c8VFS3z+}8?X_a4gO4?GJ=K^4jP)=g2ABfR>LTqJWjXnvjz;14*K}G2-jEz z7z2Wp%$LH@I3q;NnEM5r1cm_uQRY&_$+3P?@ha4uA}OVJ%o>>)7rfRPLp zljXM?`egKv-!(tFpN^bqZu|~qcv?6OM9A^uPcp1Ci=@yRom(JJsWrk+l65ZogPh!s zF6+P9cDk>7m%Fn4INcF$3+Ayt65d!X3j5I6^eFVZi#uXl$LbOj_}tcc9sj{lSFYO? zgnzG-)9%`TX{4wCs-JRkX$SsPnlc1l9&Hw+p?z*P>TMki(b zy25~H^1}XX%(=XoQZ&2Lk_(Y}a*%M^mkus7#Iq^c^Ie!3=Y8zK zJYjtbQ zqvq|vF$A-r-B+Yon5~ZXpYMuB8_Ejdr46NE5LE{i0>bmHSy7VS2?d>eH%+#4c)B+6 zQ=-(|Zmt@0)O(v#efofE{~+SUcAh#cX^^Hp9xwhz;O_BYB6_ z=9Pc=4-l4B!)M%nNYliqG6+cPCEL5)EByJUH)IQV7@GfQ%{&C@wIfrjMUbze?rEZ9 zb>@9A84O$!i;(6mL``ukBS^L*Mm9QUgW+!+lcM5!sK@_$ zh!b-tW%0gJm;cMkwls~<6JhIV@}A$Q|F+cur+c`uwc~p>AqHS^MUp+?;G4?9)`A#~ z?#5gVqqi7_0y$4CXxW{`0(7S!xY%M+8nMPl@0H>b+N$-TTB?s~Se#B?d8kAVkU()c zBB!h`R@tT5ZkY>tIVvo$n!UMgB#OslZ_B^5S0I24PlT>;Xee%~BV~0bNqTV(*G2D$ zodwjNZirQTo+SvOzYq7tz4c9b%MZjiPE-W^G4HH?1Z2p3aD1V+EG~Z!q6k(^ ziZY6KImdzo0@3+jp$TMtvd_BQqt#unMdbcEN}y7nPN@kc&o(gP7#Qx|3jH=2;7}^l zz;IHcJqA^HWWG+TF`E3⋘3!T>JMjROvmnzilxH1N=|2pOBSFfW&RT)Y z&YT$0cw~j@vdxkV)h*T)J$Z`#NV=c65jXz;EFV|TO0`I<4pdYx$iCc(;Jq;KsiVZx zvlyyvZlcp}Q1%MYKF`LKthGKe8xym1xqGEbN0Ikvq&a{3X++BtV|;1GA)Kw#5DNF4 z1`u&H9UtG`OXRi070SA^d%TIGv4=fF*$`=-?PYapx%qjH5Fwf=NpDs$v_*1>oj;XE z;D+$ULM-U_WsAwM+tt&^rZC~Gx*Z#-(!VF+=-76N%~gR(a-~0P<=;l|sCu~AWFlQ9 zaR^+0!}Z9^mFmWmd>Z;6RmJ5YDXPhDCP<{)S37>?T7KNkMbQyt-M|Vk7>b&gg8i`pfA z_d?qYKh2F~OIKVzvb-f-pO&&cL3v=w`e6e(A4&ha*!)#~iL~f?p^e|6N2*?m7&~F9 zdJBHk8TUqmK%DU& znQ4e+{o{rvb=a2Q#~Kz|`OLyn}7PXsX)n>;|$DA+RZ#f)^zTfN9@O)UXB7DbPeb$zX|K zDe{UiTYWChXgl{;=w4V?Mt&-mY_93gKj019k_5Pp>K8ki=ROsKxj0*)MbKAbnv*i& zV%*q#9`E>c+>)j`G<$I#)&d?&sM<4zf?zL&=S?;pJ?*t;<;pFx;wttzSClnetB6RQ zW5zJYSf|$;&N{eyN1}5$-(Gz8L(pNSs4u>%X@u{**W2iQ|4gfZFLHtrY2eY@5sUcFO6^;Jhz5dI%SCoq|YP7~6tF*lWn|q+=lKVQ|mx zebXhtC{>&$|GU*=+(->8UBV%q+u$1^(MY09RVjG3i9ho24DjU^49!UEb8Pvh*rc;j zi@@G4^?Zq{fXZI+KBbRMr>js1pQ{zZ0@ow?+ip2LzHF3Yr1Oj57tf$EIjBk)3)LvSw$Slf-?*#|7L*#8} zOV+%p;%BNL;15mq2p>~F#LxJ+D2bgc=NA!O2xQP;6J#?n=He#@GPanq!?6q|*c?(w z(ydMUoQ!p@-R#U&w6nWz>W$O7a0*?>2cB!u>Dw=R#vpINveO859(~rmOPM3w=~yg@ zO}7a~WnbT?PRY3doRkUW$>~=X%d=`U*L5rhXXSE&&3wN;BOTkgA7-_B{7`+a??O3` zp&R3!W26p2DeY2Prg8OvsX1h)b>3_DOSI+Mgj6uRYi)w9`mZdl>_=(iBE$DaAAWTB zeS#a;!ua|Fx}yTk)=1L$mTbwhw^KN+o6A$+hL2PUmn_sl79cnb{ZJ63bD~2f!#OqD z+Tf~_L=_|inv!02t?jL!dYzM}9XC&L$-EPd<0(q=5RR;-+T=KA(=gn_+86_O)z)*C zrU;~NU~kb<;N~qns2(#%R53%GwVkz;qf4I8;F-Kt$U;Gi@8_G1M<&*U#Mf{LIxNKQ zS~+K<|6Tee74df7%>f2}Ni!ma{3qat&QhCe2MOYjTz-H)L;H}gwab*HbOf`>*WCi_ zORlkF42Iua-C4q~oplT<17RtKTiG<|oc{`m-}6y(GggR%!=Ow)I6$&YIaub4zu@NP zdIA2L3iDL*Lo?NO7%;z2r3zy zS@o+Cz@09V)oJuRRx(ivGw4wH8!r%}wgpVwpa2Sm-a6L68vSrqL%ja%cKR>V1Gb7r z6R4aCqgkrnZb-h#(&tHZgOR2Np0*3+ zV=zP^&O6HqDOCIdr4!y<)@jS&m&)VZntAE{c4%OSTR?}JnT}cj8cI7`nMxQ~cRmeZDA&dz>bCgLN0PACzGX#cwYPyi+MIhKiG!-NxUhnh=E#qm1{WS&YGFwc0#>C&Yj+v!isJ>H8%$ zR`>e*orgO9YtUAR^S#i3sm;%O%Ltn@8zSMJa??>b@V64FBjQLu$qvaEzYIVGDG`H9 zXExu;S)?i)By1SXgBdbo;xNkHH|BxlOaRm!HkGr9c&^T|K<1>#K_R(p-oH#d+-o&J z-;ST6owrN(8j%Pd<0dY9bfPGkd-~r0@2}mOvyyA6b#$HJ(xZc63#x{Kjva5_2sR=# zX@9~PCeUm~b6VwuTchjvqAYJ#CI9?uLn`1!aLq_@S0M`X)W+&%Ob%7#$sPaPbFdVB*7B4L(JToDpK5vw zsd#577n%jmtUc{dDmiqiEigd4^%nj7_3LNbe5iVNt8Qy6Qxt+a$Y5(arIeM_BS!-9 zNZ_&bOf+zol}!U`NL)`t&aHGeV#tl)ZLga9n5%i>48^ZZF{kaBS;@S{_TdXLDo{RW zG@W;VgG{wm*&>$jZis;re8FJiBrVtso$JRFOaNqdA$Z9c*h%#qFyo3yDd++)@B1>%&h;z5PD$&xnJ*}B_83maaWVWDy~9%6_-m<^&0 z%bJZ>JJqxSM= zZFAxl2d)h0j8LoElHJoY@(sIC`(=`%sjiq1R{ou3*@|-wrx`P%R;7xSAUP66fgx3| z%H%o%{AQTuhTwB*H)E!|S2fp~z@dlEUC{XzU%u^RnYkCaRY=${*U@eo@!!Ctr2som z5}v`Uq{ll7B7p4iz^PP9NL%H5yo0|(az_at9HYB9L#(5RaCcXw3nHQZAuNDjbWY%9 z3slkhmZRwo#*<@}4QF~ISC;}>jF@*W(2RrL{?10&ghimGA6C~7SSF73I(C#In!IU2 z*65)$PL<_G9h+DjgT$;YRL9N5BPH~8avXX?9G84>TgBn{x~4&}DO?SI$%Fx7OvrAF zohdSpb%)*}SnU$w%3Vd14)TND6A~!96PG8)J8aT`8O2~M#jbU5erc7@2WJ>7;*qD= zuK(CS7kn<%6)Rn(=9${jCW;DDB!BMk0KChMRprhxtdRAyTqGI3cC!nBjmBde0q47m zWq|m~gRk!;)Nu}ML3~wZ*<~^7{tceF@&bO+;W(K1+#gF_y7rUWumv_?3Yh*wzfg?w zUJ(j$zfrHyR1&t7aTZXh{JaQtchnpM79j~5o*%_%AH4o1GM%*MXB({9oHR~e3;oqPXy&DSi*<96}q=0guzMDxI1H$rdPr~anvg&B#vT|Dz zEkra72X9sHY0|{RZ}S(*rD1%+zX#7$uP6z9!A&?( zEZ-7W@&oc=7?JRq@37*%(^z_Dm6}s8Z@Ga611+Xm#%G=-u~%D?Zp zp#TJuPDYdzMofu8f{$q$d9nFMB&6grK~|*E{O)66uJ(k1(EhHxWhj%tzGXUWUK-s{ z2zMQ*FukQwk*w0Jt(*QawKfVkg8cW&rR++t(|%}P3pN8C9L3Rm+d|V0p$EkPFfe3e z>N5ZU000010iJqA|M+S6e3eyN-Fwr19r51CcD&vO4F1F1+uDny3xj`~!x$uUseyg{{PxMo}pn1LOl=`PmB{f0-`8xJ?#J|&A%Dix2)ifhjp({GzyMU z0=Saj+)djFEGN(ak0NLl-)w_ZbX~xtk-IWa;lV5v)n{PM#&VQLHKEOgE?b2~;ct-aM}?n;xlfDq*r2;yNYO zPYNtFych1Qej^9nFCtH?Bc{se_1b}HQOk&ZS>p^o$N)}-U@rCD|kRCq;HY$=JOIb>2MZfnK?G1NdrW01L zgOiLfbC|wKD4mN!zPL?{j6AK#@;SYh*25NvKne2B8~*J58<1+q-dmJT2GUp>JK26} zk*E$^%}I9NsV0^lFY{iQ3z_|ULVN{N!1273cS%fcdwa>mOcdD0*##p~wZ8r53F?m9 zbOi_N_vMfaeT73Qz{Ta8ow2-I@=hoh|DOw44B=RA#LoggwkLD(nZ8%j+J~jh|Z4E2W4uPsApyhU{n%j_b`+@j0KImEWFm*dC^m8pjORAFzd-wh~w9BAn^G?vBUnjzO zM@G*mXPKGPg9>d4_U-$2>}H+=;ur$e=tDAJnJrY-WGWsycEH@FYzlHI67$aSN#gB} zifSMq56Ah>Ebk9ck_NdPs`s73{rMp53ijR@j4N4hs^2YiG?=k%TCmNgD6or|Yp?GK zERkOLT3W^6nZCFyu@d%RleDfd`M_`k0cmZ^l)NsO!Yp)}qRI3-? zCA1O;fLW~L#Xxw(i(WlMvq<53&6A*oHtj`>%tsmVRZYKN zzP+uefcdvWpCwLp@NLj-fymTFf}QT=>?BBiK0{7d1IzC4V4h457n`*9xx{`*I5cki zvN=MzBNo~$=2pnf)Xnp0;uJRP&AcQk(k_curQ~eqme$$;1Iux-=+Zn6+1^@_(j6Kx zT0_j?PR!{vkE=@_y0^lu%D8fvgev!Zy4KU?UUv0ucQbFwc^5NyW}h3qR&~hB0ISHL zc9lGpYW@9|TxlM?ppSKg*E}}6-8uRXy2~bS8Hw$7rKpG#G!94`SX?+{YLRh$d=c*% zR=+#^%70NF)(JzaS|K@MGn(78AkbwkeTvewPEzxUcd=X|6LP96IO{((5jb)2e-q=J z0AjUEY-)yjmZ}Xoa@CgMjM+>hYE!tF6MVD1R9{Xxz{td|Iy-q|&8C*cGZq~E$s60;0V^!I*!jB_eCRSGRDlJrqfCO8u)1qlufD} zGS&2t#|^be80;I{X5XCPA__Af2ZppZAPQ^LIFx1h4{WF^Vz^IO$B*Hz#eg+vw?0N5 zgFw5@PbE5hdtrL#B?q4F%HEJ9ucR5>JV_>?#jc z8?F_@175>tz6Y>w>hVQ8E49H)QrOzGTeHytWrcH(_MPV{Qvzk^>3+249sZByAvNmV zy@a*@k;b~yWCq^a#&d*8uEJ3x73eMTvlppLK$%uoX0ZQZOI9h=vS6M#vRZpr^_mc> z@}px{b|sdzi`XZYuM&nUbc(x6;zSy=3{*v$v(<}D_?qm8p;o>ilz5#4uOlN#|3^B} z%|Uk4jf3P*#!|kOJe{_d%~OOvw-9{&)R_#DqFm3)Ek&MUv{GX(dX=D>cnSEhpVjkM z8+C)56Au=$vRXNMHvX2G@TpMXO;Q#w@+Vi+vw;2CVxb50lwLh5lGi0J(>}&Sq?C?R z(&_^W?9mc~HGr82#p zA&xvcWvb=!H78Yq>JI#5HQdXS04Qwjy5y}6$H?(WuWC}{Xo`&tuK5f;F()6kII+Gg zvx<(B40`aB3&xLr*}nuiRVd>eE{G|#ThDbQ@LJ7f`fj_9kg*iBN_hK<1ifT=yDq~)qKY}^+E&^7w>ZvqO>;4*Oh8fugeL7n?w`^g=ydy!{^ zj-*GVk>GPaS_)oDrU+agkLnd!xen5-CXmk8=BP37os{@DhL~f0XUN`6d{Z#{W4hcF z((71SG!&M|D67NJ?Qu3gU3AW*}ZIv2$C6<`-O_ZnqZGbAM9^Pajsy6nU zu8NojdVkd}sfd){2$&^v#F!Ce$@T@8+uiy^m}^wR^aEN(TPLN3*jpoy`Xxjb*gDYC zBQEd+5;tC1V38ulg(oYUky%Isqv$+I#R8Sh;~bDGl&}ST7>*K?p|tPHfrwArQ2gtc zJ~orKm;QNxJ2S2-yOpVN>>h5c=_)K%vMT>HZ^XVGt4-xdh?f&IL{x&R(bZ>(jYm^k z!zz~u?+A>8%?*nIflfiJcJm|~Nq*P)AHu=Il+=fsf^VI@Ha^EKAQ)?lUzw4(!b8i& zwI=zC{HWML=L9|}Q443@`ek)cWRRZuX-sRq>?XMO9SE=l9LcCU(*LP*R3ETAR9-^n z9})&fYP-Bayeu7EhKIq*7l3@BRTO($ZT=kIozWxi_h+O=#ho-C1=iAZ?4n#TlHLn^t)6WvuJq4PU z-&aryVHwPwC_-47W84+x8dYzt;MG^xEX$w5V9xb_vGvNHKfkv%sb#L{to0R4x8@`F z7jaa7g|gG@6RNP-({>@NBsq!ve*#nBOkAHE?%u=hRSzRgw**8$z5PLc#I9WzoYi`* z21Al_^Xc3VHdQ5AV}YVv!moy0^p0eLNS%PcD%zUJ6M;G~zz*u=6xw+ukmJ8dr<1L! z8Ak+Pzr+{R^G*)okmZ^9lMjdg`r9&8)*_;Z z)e6?gy9It_X?@1K-5Ew&RZ~FYR4uQq3+1COt>JV$D)I+qpIu#Y0+Qj?knil*(HdUX z!_KafH{e5EVUc_MA;I2r)5xg>`o8s2Cc)s+#-TWh-!ZOgEn)igm}-x>ahq(CP9M0^5Y;>v-0R47R_!>D@=?u z!Q3Ho@3`M3CQCE>eh0;Z47lkt&K}9@4fUI^a{2CIhz{l^q$eRp-;NV&E*>Jjl z2)F>K>COiNFE@6C*sf`0fL7!G+`G;1;YV@X-|uIHo`CZNn$G||K*GPJ+yj^>qyZSq zV=9x7)JqLl=B6LO82CmH!w#?Yn`Vo=OdQ5Q$kOlbLwhBYEDdtTf1rQRH)zAYpO9_| zj7=f8RQ5p0(|981g1rsI9EqY75EITw8$=wyuUbzz^PFpBIdV%aNtJ{nV@#sYmcB49G=T~=t)TtI@RIaNw^kH6|I(;V^#{z93ia&6X2Cmv z=)k0R^TGevW%ks-R&TrEHPJKKd~iKn5WZ19S6x^!QkNA<5_@IbOc|n-Hk@oitm&DE zTrD__t&E$vJxuVV+f~W0=GsY^Om{Uq`{Chy8eP+#+PKuz3&*p$dz5S4Us`zm9p-=a(-iVVfwgXJP53YHj#W2|T@F`t?^o=b0D6`&Xv{d>S) z9#e)DB2E^S4(4*`j~|OhV)v$P03o7Ktan&_7H^v;43Jcm!aJLl%N^qQ!5O7pNdj)g z$ahX%+S;NbnUQ#0YB<#+V2Ir38Pw}@DRZ>EvW}as@AYCsmk$!ks6S{ds=8%i@K(Jc z4aJLM)o>Ble8)kf!+AADV+-}p1U3ibW2?gY5#s32x1|r#CBs9=vtdc9JZnz?rMJCN zl4HO~nJZp?EwK^-j{5^fKI7nJ5Cl{r;753HQQZdJPN>0W4NiTd*3Fi##V&zNT#06o z@WyVa%7Yj$zFa%9av}@JwVhdm{&+RVLtz0K&-|a+kcFM+pgKl7Ok}p$%Q!n9WubX1 zBVRIy1tS%uFYDcwRKVv#OYfwk%i?A?wsV|5e>SN!9-s)hqH^gI@LpbX^Q z$crQC7fx@uL7|8a^@sPycnIo!lwx`%^Ik~6XeHp7ok@P)?Mefs#y^ zrN|Ity7NT6b1~!I{{_(VmQeps=OnAv*-HtGq;tPD7U+Dc zh`V7_UVXD#kt=j#^1;$^GD}o7HS{$e%ZqK{V2*oGopf8YCdUs~54`;vRFv(;#~PjV z)X)Ng2(%ti`VBG{hNQ z$L2Yo$A)boiQc943Jag}K&J*swcq;Udm_&#g=m=0rsERuq4`PzD!|s1kmPnBao0`M z7gz;+$ahi_H&`5M5U>lgHJ!6?$9yKqlJs0m5BWnJ{>X_N0{CsS!kvvj9DT^`dcZ^z z#`#l&18bZBWlWv*n0`+5-!2PP_Qzx>MnT3hWu=kpt|aJHHp1{{FK9LW{zXEfJL|N! z6+{1Hod!_zX}o@}Jz3zCsqV%$0)=Nt528OpnP=rZ77&6RBg*Lr85BF@;!jN)TK4cZ z63v|7iMxge#z=)~q<+=k3437H_WFW~Y?G9m@NCP`^yhneaswY_6|Qt>Bn#dTi-ymi%)SFOndhvdqz1w6 zM2G(`&i7+vqkPP!v^^M@(`PcaRA1bz;MSit#QaD0k$UGCM5~K9r0DP06eJYIb~axW zYyo4rNM*OQY)1iLwum*3XdSikDG7Y|KUHj!o~3T~erRL*OugD?zqyMY zLJ8jZ>NS8$V7WNq!H|GdUI0+{SPz5ft?vGSXV&uO+y?jp8k)R28+4>y&n0e7Opg$b z_y$ziadOsFdo$bPi2%KRHxEjCuNoqmQSS)NTcpQT1YKgvfn;#Vw$1fiK~n6s9vkjXwIY_TLEEV`Ub=D|Nm`wc0N@o1&f3$~A3$ zB|;(~aQALSmVQd=eXvaV#Gjgy$T%E&w3%vToD#X35#EVxi<|;PZUBfaZnY^XkkOMS zwayPgvlp&?LBv?q!&Z76KcUx63g1x<{N5gaL9r@wMv2GgVU5{|@p%>4gTj2BAZz>= z1Q!(J%e*cClQ~D2AxV&52@SL$A@Z;_+Ph~7ax5cj-v3c)E!;tAyW|hK5sUCV3kJZ$ zp~bLqdg8V3hyuQkmgdC>R|1=st{$74$sz1$`60r7^4C!@5eI&gU=*fv z+aVUv%95doPBrNKxhVZr7Kqa$W?gGKaM)}i%jda^vBo2}Ywl$sOr-{TZ86yt`C~Zx zL0j7wc-3kaMS@O)@$EkqoWx}oG!#u&>v(FSRp#Pq6I=AsbA}g4<^So2*WZTm`^u2U zkiHX%wrMmKg{T^K*knU6Urfyo91Qa9+)l#qlu6^8wZJ9)6g-NDctu%K$Fr_5oyi@s zXno{EUI8fl2az^F=Lyi7)bHJPs+w9urP1%iYC6ug_IOsd-RP&=qY ziws^~ljV74Ad$WQw=$Zm6v$2hz~LXNOo2jO>!S;fQ(Infr0$Dr%z&3}aQy~@VrXDA zAjuyYZv?tb4Cn_|?#}SRniiNM5o#AFkn{On<_#jeKY|>tOIa6hrQjW;e4xZr-Ef9) z0oYw^iL&lO>R!3;@JCv==JG#ADFUS+DipiIb+OlY9@>Cj6^x`Qn!3CSHl$0L}{GED{O(IB7ZolO(x-ml&07FBgM>AOC zk+cX0g@a3*>)q00=8Z>QwZuJ*-`0gD=2ovo*RM*$X}gG+&nY#&5}JO)Tj8*pgOgOt z`tzy?Ikby<{!99ib+_X0$gDhFx@ce<7lYszb+nv`8`o&>R?&Q3cYL4vtg00-fXhG) z4FnkzHAP-9$O;!RDm%FHBcI8nz=2a;5zAa9itz&rBpX(5;FjuXUb2vaiy4QE(our5 zGUYb_iheeOQPJtVvwXsF+GO&FTt#j7fpHJl1A0zFPd!jGs&bz};h~cLvm31FC0!}> zO?r$hOHD6c$XcI0uj>?j=jEuoBg0v2lw9~b_aQHVx^F4s-tVj6xh~X}M=YDCjen^t zt44Cm{dODjI3zLl~yA8LL?)C5AjN&UWt9PB8vs$7yPx@U5I-nD+ITsc?qv z!kF&e3UFd251pYamwev!_#uE;#>S<(sz>xbmlXJerQx1O%5wR_3esOF;Sv8zHr#(5 z^*TtC=os>u1V|1N&O=;lX1FsFIRsfKN*XZQ$$_*F1m004j)xN z?%?l-unul`qp*N;mKOpFG6(RIMBFHTUXj1h79y6Ww2X%D~Xn?pb|Gy$>4ea9}kPY z>9ez(>t+1KlcIRZ8QL*a5iMffM#JbBh)XSk6nplt7XE^7cdo`obYkjXarFsrHw)kT zpuyx)+6t#lFF@9x7ghjs48|?7G@~>lKXM-}sK6n?=^QAGB`~Y~>^diK%(Rulvj`dO z)M;Y~4q!-RjC9OVEoSjFH>UBsv6ZRk`;QWR=m9u5WYYFhK*+Q0y%)W%RyddPcky(= zyGgh;*Bo-$Vd+89qZ#lzP{_%}XFq4gO=P$8#ogMp)#?qRdaj*c_@Vf6` zQz4IE|M1_D#IsXaW)oW~8@Ih7SU2A?l$JICSX(pE_nu1#mu%$GlZ<>^kfx*Fr_U=| z2LpIwb5nL#|1?x*#s(n>_N%nrq;8V*Qjv#G8@L09s6jVjb0Wf~JO85F_SA$7>6_H} zRd03smFsSzh^kBBzef;gw4h5+z$xyXH|d1!4&w9+tK@iU?icxp8|>@kkb|NZXcKq; zkFhPn^=^rO-e+}W=Yf)!1TT3XoH$Au7Let10M@}y4p)dbuC z?{N0+fZX2glJ+DLdKDCD5#S|^BIl%Ew+YH1KgUnIjDNPkG>= zSpTV4|BMaptYcZOpgYE2rI4uyg&#dU(ybLF)C*%kq~uoYgYZg)?{?f-KA=k!LI{?T z?WcgH%kO>3!|l;bDS^zaQ$Yy5pC6XJs=fzCQ6{+zkPjm-eX#2LKe#sWSz-jpxFJ@( z1)(+X0|;c3BbDe#t%97O!%2+qX{tyMx_G9UB=jVzqZgzNhG%fx$#`g1tQv^)UZ5uc zW(R71WB41bGG`mU@O>tBQfk#IpYLMVNjO(8u*Vta)UnI5bY#QGlhe8OdYaTSU>jN{ zc`Lv?S#Q&@NmAGMSEXm-nz~6Fo8mc+ah(-1J+>;u82GogZGx&YkRnh(+w8ueI|R@FGbE@Zh-Vm}?HBpm8Z_`{1w{PQg9v5QM|eqR z#dz-qaimmPtjPB22HSOH9y$wITmXX|6lo^)6`CmeEyR5(Z#WrF#Wyo$!bh zYeD-nAz|FVbudsulJs%@qdjC&`&z5A$gh=K_ckK#=c*l=ZB}Kt&-xx8E_keiGw$aE zbzJv$LrQE{CGs6cTGtlWM(}D;WHRA`_HB_pTl^5pQd07CC)olXOdFzGp%{uxoq-7p z?Q=p+#js1ttoocuX(-twR<1oqMz;7k>8Pq%Q0Cw${TWi#@KxYzSH7BL17J>qRqXIg z*3;c1JYWmXa_eB%hmE+!GSr1~ z$3RCZT6kcZr6La7kBI~fg#n>vdTaLF3lxn#l73RYC+x<}Kz5|(#s2hGxR?~CD;%h+ z=(2G7h#h;RSstcOe~RLP%+CH4>@;JMWHnfFkfv+Jm{b4_D(yJ`mrMAJEk4&tz`s0b z`weOvQ4jg9RvHNUyh6<8EQb2reZiQy|pJf@{>{Xw+`uYC5gZ|C12K)`D=T$^VbH>S&$sdY9q04yj+80Tm(4+9jn6yF|i`?4uq;{N_w=0ZA-DbQ0`x-F?NVRzZO$K?G#HVdGns#o+)~cOW*NmN=G^G>giEn|E zeIUWn6W57F4BPc~3K;cWwruLuU@^sgkJxo>15!l%#EE zeO7PuB}%wNiQihcl?j`pUA^=L$L%l-KiZjsgI4`@33lj!AYBA)h^7_}JXw#3sa>7GQ$3pCfjuU^qG(q3BW3(TH&tP2Cq_ zyZ2WMKlTR9N_bRlFiYm$?7VRLQRW6fmeVn>O47l8n&H;*_E9xfP9X{b(_e*6(>#f* z9A9cRNkB?Bg?5a-1L)c8?4{}t#;C5T)Y6NVnIPERUYEf+rb)|e4%&ULrH{eIt`Pjt z;1^Sdpc*$3DTZvV4e@{s&;m!eo$q(QE{v+)v#Nx8rFSenrRPwfD#4TR_L%$r`%TUr zG(i~VIUkn*8Kug76ZnyFJwwE;*VDY2{daCdmYXUnq}AYRJdJ6{#u*&bl!}CYEXlq2 zncq=qGP$gwZ$fk0UIH2JS3XhtJ@g#Q@%uAa@%4D~$+TK#S~l_3DrEPaLoS3j7htL5 z$v_^NUml`BS)_&!#go%T41zvV2y#y?pLme1+J~_Bq)*C{qp=Rb3ck*Q{|RpAE%-6h zYM!3DvCKA7;ic-J2pCMM{E(U>tcE5j{jkZ%T6Q2U3Gi%(ptmO6x{^6&^Q5@tiRYHevt%~I_#Hh$1{ zFp9OlO8^MVCD7wq2Ez5sBc!W?f8wb+#T;L*+lP_+>&*z0^UH=;kogN_o~RB)G9a@; z&HhL?(LD<)zy=eX3EnKnp$CocaUh@d_Au2&LW>%kcz**(46juhy2Q(2jMT_pq4_k# zQ?IQ)8I5{8(`=qA2xKt>R%-&)sTFwz_-XEvMT>|h?6H@ssyyhW?jZhQ_nR2DjSxDX zxi=q0IE1N4b##KRobAI`aPgB2mx|Rg6>or87sR@t=@zk>={7$GVVEVKGG)>Ofyt)M&ov)UVEcC-(a)H zt@<`xjSaoywip!T0DEWkfWxcivJ)Ck;1dfbC4eT4CiIXAC}^KRs%xqZacw1DOHkp! zlE1QG?rjBhnA9k``gn^hTg%)zA9*GRxB`=q8`>(;WiPwyG2bAoztvxAYvXR+0zo0M z0Bi`2_L@;QH~BqpHQW`})Mjfe%SCcwZC27iTD1(#TpwTLh9`NSn6?qbD2>pJVzMr9 znPdTUA*)z+trxxRHNuv8soir9ke%>O@rc%VFSvhCnM-#6ioZ|(;*fX*VWObJygp~| zLT7R^4rag?0tTn2yIJJdK?WeRZxuDh= zRw(ZLUS6LTm;uI<%b|zkz5m#A_jz%U2ge3H1De33PjpDeJ)N<^A^~AL{lsuf#ly0n z9Iy2)wNAcv^Pp1V#fOS3e%#N>)++e9@mTJ16A;188;I=3gzeQX|?W| zw~CXQU=8#vV&8}U(w`O%w!?HmZMs(IbvfrpJnPWBp|Wh{CrK&&L>!j-SXQ+w@c)dO z&J7Dsav}FI1AcAz@B85u*qZ-e__BG*oM&0Y>jv+KcM1c2svDl^*1Gf_15`aci*Xs3jw z1iV_Qt);bu><{2Y&13F@mOPjsO9e}E_&hKSeQQukDcX|?O$&4n4O20~%x~RRAhVbM zEeIh7t>byvGksY~3P5S>0~a0a5227(7rJ={ z>T(3{yetX@fs5XotK)(djlD#9oo>%kVD*-`Ay@S+jdMWqhQKh{s(%wa2lKLkeS4C% z*3F)|;0|SVEYj4-!yt19mD{`6rqFa zbF3AofwAZKig90SHahGE@P7(>f;0Ghq2A~-oPZ>b#4$zW@bE5(LS2kCqu4?0hc?dH zPRHgzmUCdxUSJN5XWmJPZ=Xf36eINuL#wMfDLban>dOaB$PjWP&K68&P*W`YeaqFa z&em&;ZB+qEu5$A+2G0KoU2NMTxQ?TC6<8}G%zsJy?)-@%!1la8kk@`N?ALb5k0k;D zP=?vf`u@`B4;iW^fw737Ued=ON4jJdOV{uHd7l$xT@iuq952Z^FNE*#nsBK@%FxAp zcnF*tA>PB+6v9(|UW<&UdUkTlI}+Q@Z?+TQR~GHtOwy|?Y6_EyTAY`bPqnJ9!}RjA zY@6TGV#`XJdPkdY;zvZ9m6V$@PXc%i8EOsimZA|>D0GDXdPWgIsN07$P=95 zKQJfmAXtVv;b(zMrFw?BmQ>m*Wow>WzS;&9MK z;D(r2Ew~N*& zfnKfYU!-k^1H1H97VAp6z3d4EJ5vT~jNA7O$K@fZ#q39Ql4|3P0)6L$Zfgk_ys0SXm^gLKYnm z3)MP%fgw~C+H9bZl03OZY&oi22}O+8HgME$aI~Q_G-YXsNK|e^(e3{__ws+u)60A! zz2(9YMBKP=cVMq(1y%P+K{<>xFx{%HtY{Sti^i830iP(yXP!Mh7a*;Y8}^)m4vd$} zxv9onR#pV?EyWPrcq}f*j95FBf<53cU>@rXy53HELbhBl%*@e0g$fhmkv0*-Ie{~V zkp;9=21>8X#D-ZiCMDR)iA*+n1JeQLvQBb02d3065FsIB&W!PC{4s3zJX`(C9kgdk-UAj-`mmveG6&5CMhC&eOE=Ry2Q7f{gw`D{#JYS4uSiNfbk|Akkb* zG1+J08Vv!fn0XFymyCUFldL5`yca&$t?qE$c%?_f)sIC_R7aXx(V%J z`q{l|NySn}p#}cxwgih%AN<* zz%1f)*h)RDqpZ(+Oj>l5c*f*AP#DTMTXsUWo>mlFd@*Zsg zC{}iu5!yhEmzjvE|Msn(3tJ9$<13ynh8CZf3-|X>I_hK&9O2PRzID;>WL|pyLILFUp#z;s(S}kkBTCa84mq z-yrKw;-OqL0i0W?p_|lH6{dwH4Z*Vqri;X8(kl^*Jl6jUc({Oq8=W0>t#kxK5IBjM zA+O#BfG`Ko1_q$^zx5O}pr{&jU5uopWeX9m7s^~c1WqB+p8-h&_xFBbIQdfOtuOYw zZ=4t?r1OQ#wSOOjVV_|)Ewsd=`Z=(wnW7tQ{eE-l+E`9p^Yw#c20B>7u+;1v=P!wC z(X>}FT_XDq^OC8vR{t&&z2K~vSWr%!GLPtG7N2~M)5l479lZfnx-E{$e2ZO0^jKHa zi7`kf;Oh4M!(9V;GR`tiQi8I$i5@I~x;nGI8f$fpj`XWxGTxU3H#`>SD(`Wl@Arq- zp3@@wRd4ZkCZO4kN$o#w7^+I@h1QopKmRunp6{Y;%gWuAJix6ukx8hbJP6eA7Q39! zNMZ>ynR*&&n6we2@rsth3bW*|lkdMii=XiRN+AO#ED8yuW7LLMBPIu zz@~D#1f>c|3~qHw#3M4iwR<)64N2VpsXlq8J&H9*=^-4bQ2!2P7e$Anm3eR^D0Tqn zH*R+Jw3hafa~^_t!8T*z%@^%7#-_iMbEhgYW5()VnqH9Hif&eWCx%>|-GdI~f7Mkng*nu2 ziRqa@V#J2{`-zqkmc@k04bNvZ3Z1t^U39g_c&F#LZxeW2=WV#r=)V8_*eaJ=^-zL*x` znK*PzZ?M;5s+LamNxPs;6p7+pm-{(*LfT~RT**y%M<$cE%E-lCk!AbU*RRYqgPVXx zZr^>%ZT&uhw(-Dgmeebwl(k>bT@JHZ^9OgYIigZt_qOJ2CZI=;)8Qs}BO}Y)%wwC9~a5HFRr8xANc6S3{RcTVTNfkh~&1~T&0lV;-sIKILW^PX&HRL5vqp%xr8IeifOuM8@Du{vd^h8Lds5a7Ultgl;6C~e=n-S&s4G=K295>fWD3)P{vDd& zVOVZ|H7!N8oFxdkY1$z+sOeg`g|wP~wUw+#jQ~D&?)woT{791B$pqs8+cVG-VPNn? zAY%-=^0u3+0sF(cb_VTFQLfR(hJ1~jx0(keg66E@D!sOn0TgyZg=9|l?lMjzK3H1V zGs-m+BwTZ!fkq+ulhHA)$;dO7QToF$c`>M&> z9Xkg~zeu^(O=+4A)Z*%;u(3D2V-@7U{mE*UcM)+*2JG{d#DKaTO zq3O9s2-`Buw7Z%&#}f@smC;uyEr&b-t${#~bG!0$A#z;of^+(4@E;)$5B4brsC4m` z?FiaEedZOM1NlD2N-Y@*Q%0!jV3?~__Urz|PEZy(JhJLX#D^LTv5UzbsGC|0Sp@-i z)Z}GjI5y1YC^aU`Nh9U^Blcr(ZG>(JNgHog3*mr3ii!|ARbpp) zY=4EuApHDXHl4KeSZlR@&~@bs=7SUom>u5D>TFd6x58UaI8k_4-h!ZY7nDH9yl?Cx zMj6D^-9l)zA{lxamGk6xjfvY5j%~rcr?oEB=)PqtM z=%!>ZdpNJskk0IKb{`HauamD=p!`>C(k@ZZM2EsFxO{hkzlmL&UvGT4RjUxRJ^>DY zb$W;}Ye{ERZ-WLKY<)_;EOsHt%Z>38qT}-Ja2iyif`;1$xnX46Pq~@~mj;{{`w zfRNOVJJHsIYDutf7}i7S{c!EA^t*JZ;^m#WQ@1Cp1(%LhkN)jvcb`6Z7DdnohfqZp z^zTnb6)rcd3MmY8Pkpkv7=>41ajB4ZqsvNM9xe80)tvS6n(i~Vyjg#;XBq}~uS=q55l#o6W>0UNUiIm32B_+zz&PC@FfHL3&0%m*Aky~v)xEu%y>Z?&$ZtjCzL2pL#Vi2D>EoQ zNRN$BQ-A>~J?r9H%o=~aJQ>j>1!50uk^vHR>$fAWUb=~-=2pB%^3(z{8{kl44%Rmb z|JIc)C1z$z`eMcKq z()!30YIyrAiDY~&L8p`OQPe6j&7qNhlC{08Qn>6$_R#>_^XIPyxK(|tHV zYbAnA^#0%GDpy+Cyi#_Jz_qwbZ%$ZzH=x`pjQtuxasQb%Xobyi+g!NfMAjq?mE7y1 zC7sMzxsWs>Xx~=7qYMRrUr|1r3g|P&$Ad|h8$HV+J0PM*CM}Hrn204?4idF!F*$F~ z{p5o-!Uy3^3`96X4g6UKfU9FuXYCSqUYg_M2Qr{{zrT&%l4Oip%MmVe()6cZm)C0k zNETknKxEH8=UTGx*mS&#(R4DHn=H&ul2+c1gs<)MlBijO5aZa61}g!P=R4BgloY`_ zpa6HZJ4=b^LGkV(9O6UeQAWY_nQxDYH0cR*5iUB3fh3fAxS85MbFpshQB;XOxy|zH zGM8;+QeS=rR^)kdTCWF*q#?{+Kx&VuWj$zc1Y*6+p0$@)eW(df=PZe+`30P?oS4J+ z?=(o&{&Pm)FLtyf0slnAIF{J0&aVgUlvtW>#Wg}B*oLT{s8>=E63cW1(O(yVX!GO} zW+=CO88(j<^Tl%lrZ-di;Ur03=X{=wD%V#IUKBI>iR`@)`ZP!9^yF^fXwIv-!$d~_ zW_?@o17~w4kIy;IJ2KfV*2Fx<#lGpUecC?qjB>o^q!lb65{TZewZk3?rf;epaG)_L z9^4+do}pJT*I_t%NSQ)lfQuk*S_k%hBySx23DW z-)=ZBx}ro~Y6;>n_>&`#rpMNDyRBs8zjd6n1NBlDIqf|tGLzz3P!yAq9$~>}y0UdH z$s>}P3JILotl0`Y%ZGhAT#YaPGLSuY4|Vm?CmFC0cJ2sMsop``(nqjn)9$p?TXzba zf$9y9VLUZBW2U4_q?BT33!OP2Nl7GBr{fp;mzn!x3(m}KHA)Ian};$bpY19Xr<7%N z{9o)lhBRcjYjQJY8Jtx6&ur?5TZKy=MxGGKMchB+m?qTz3Ujdf>1hw?7krg{ZxCPZ zvAB^46P^*9F+SgkBsNXWz(dD|#fUH)mT3!ivbTBbX&G9G=vkP1@SXUHm`qt(#yYPD zDreU#k9!c#2eCZ)SIQFurz*e?L={No$Gg93m{@u_-9#fU`D$>|on z>~9P+dS-Xd)NsuD*fuCglSc0elK0b2&z@&w8DxSlCgW_TCu#?90QEspS*L=^A&Ye9 z2g)B8S6g9k1D@Y1S#F%uMDL*McFyxmlfH?yf%4kZhNu2U-TyKg4a=l`_rUJ=(OjA> zq1!#Nd0pl%;vd{Jv_C3xVYhC<2valU4+;~$)X)b;uQFjPjk(2phz_me*Z??;r*2mB z0dONLEh_5%C!D7ZH78zyREb@(0$fnp9FAOrNB?WH+L-}?%qHm;RTMUaob2%s1`)R~_SY=ulxcR-^4-w+VhO6?i3WA0jz&v`)7r0vG*~0(gPg?4 zoo0*57=2ze~GXl2-QU7L$ct1G0`X+@8Z!$-+OIyYPh7Nmh#7dGPxW zcuw;i1i+<#M&PedKTHse50YJbNOMvNyET>RUJkfohk*wfC`s}RyMgbpO^L;z{VSe7 ze;|d?nEA?CKwFs1pPDNMS!85wVwvz_grmww1S->-z~nf=gUj}! zJ%}|Y#^N8$w{M2Zthene(hSEnl3J`01mCUzs=buI75#GI0v+d{a>5D$q+fz{-C1|` zJ@-YnDopY(J%AruVsA43zXVy`=0`DJ{l9kt+3@})(gzhqUx|AaUw&>+RLoyGGjWXv z2Hk;oXkg1G?`)s>0tRh1xKK4_d?J8Lh3#@??@gXL;=bP4B$o(oxT}_OldY|0_e7fA z1@%wTj%5t!Lceqrw>+eu`wIO&mT@n}g0CQ= zr_+w}!UV8iG}zP)E#TBAs2sYTr*W+QT9ST{XgFQ@G3iW$?euQOe$wN|0+h@px%NRP zL$)awtyO43JAW7U`78Up-;Ldo;$cK{12kRezCyll?xSTL4AiIrrI7=pURx>I(YTbw z#&z!ab1`NUa+@`If7o_5E09Es(&g95qv}-@+?}%-XvJ^G`emzJALF|8Z6aTu)ZrZd z4{;F&&W}~3daBSjt=J@*oS;^K+=$1nCpPae3Ag()^?DS%MF-8&ORW$Ypn>d`o8(j} z&-y;juI@)!gD9kPcfFziGdq_Q_=Kp1g<9R%5M-r)B1KeV!P`Nlb@?&(A)h!~)h`@v zFYrWjm}fUhw4t1L(OQ~3rxwq)nS6x#ofK>cC?*7A&Jpo}4D!<3k4@X6v+(DUKI_46 z@p?AYhBl@!U_jJW&aFs2{zeugQJ}d^w5GpKNDq< zja5i91{8OExoT&Gzc3(wH`uar@ki$E@LHTqGbb+(F zi`kD`%&||iAf%;pq&B?hX#IF7d^;!07H+u2eXjRyDQynvWC3kIz^c7ieyH>Bw{hyV zP&>>G@*zm8Mm{41v#ek0REPOY8eHCLrd@Tmx|U|;BV0krEC&yfbgFZ>twlL`5w*^xfK>?kh;7Py8==I)n6GVHWM?Xh9(K+&&i>hC zF6Io5UQ!7&kn`*m$bV5*jeftLyE?It(T2j&=DcoZB}8(tv`$UsyXHlqgwh743F=D- zaa&bvRDS_KT@Fb>SSX#_yj-GGROHZ+PdNh4Y^JSEJPdXF&@vIcrTZd4qPG>jS*F1a zs6Fk2KEou8raiMYc^bK$Rx!AjFf_;r%+!30 zAQx<3U`}KB7!+{4?HlJ3fuY6@`jJ!(ljrcaXj1T1?|Es25`m*H^keYH-4TR;4t2Ah zA*ZYrX=oyzu$eg-i}!ci5$qZKr-51bK(jYVIryq^e|!O|Gr5CH5OCqm>u9E@SC|cs z>U5tCk3X2@G0&$emfwePp93PnCTRKRG|F{iY1_`@xzFh}jw|$yY|w2ylCpZ~Mq#W( zhcdm3xKs=R^dhq7;T5W0QvFN`CyKJwm!sg+&JP~hJzv%+s&Qo?0X`F z|MruL5y9{Qffxm{(2-H!&*4a6@1C?{*z{M*H?R?4DQzfW;%cQe!is0*4X(QK`HobN zcPYs0gVm&%E}Z?arJCs5ZWuuPjAg6UHdVu@%}BwQj8dW<36tO0Js?L&T*hWZaYr@` z|3Q(QwV!Wl+qB*nuRA*W6A|L{^Ca@}kIbVneqym1L^2r1NFal#kv-A-tt$~spfQDd zlZ0|&d%9A0IApTwlV>BG+OeK?<4IvDR(Cyv44lQm-U7*yRxjzR+)2}9h&WgO;nVK~ zmh^g~9*V%DD5;k@Gb10agt(jJh zJ934oC`yL#69gs3<3#bX8>Sf|4Y@BrW06 zOp)r$txs8_My|mda%rwrAVFBst*mw;iyi|OCRRge&Z&wLf*ZE*4Jm!s3cpOw|rArfehT&HFESQ8Chd{MS%%LK76>cZtLG1LhHl-hrH|ks_f59 zRR0IK9v_*tR!)kYm;Qtaxu|PBE;%GtZyzJvCY9GV(zPcKc^(=D0%%Fw$7la7D^Af`c9 z7|{!LJrhJ6sRSv>jz#bBc94+Q%?9)Wo8P-MwkgUI*tV>B3gS|fTCh8{fvVWr8=x@$ z_!`4udDfNEe~qw&KPHIBnv1G&0)9F7BEcR|Ix_JChCJ`a2MRb_dbJUI_uQByXSoCf zu8^AX^afRG(+lQkUpGgy(}sGau^r>^R#P+LbT!REw%(~ZbGU?)+Pg2ZfkDA)_mi6@ zPUdA*H3*B)(^-3UYFnOqeDx5T){hNuQz2*F%hC0D`nOE;Sb&}RZ8?O=dT&&FFN*R* zwe%eIMWuN;D*}j$c3QqQA0YVy$0?!VC>4wZI8s;#-{~^SSiMt2>#n8X(n6csjjv;9 z>V&I&pJvJRnyh;h^1^l}3p5l_?ZA^{=R^(4Z|(Yhc6X*GJtHwbM}PC+8g42)yhDlG zqJ3Njm(eRAtzQ>@L6QNqOO}reWs6&UneOvQ$>MZQ`r{`p+MZIHz5!Kr_?i7e9UPoz z5r05(-*jqiZt9~PgqMfq=t^7UTDeY3qVs*s%rYt8d>o3Y*b(!VYx|aQ2cFmQbq2Wr zQTpG~lu(7bF|;$`7o_!zqvQ839h1m_C%ID1Bz|M1&pd!^V3kEZEFu2wh7#LY z`NBcc+);ZZ5_4$btXsn1I-X$4Hw{%G@of~v7w$Lq?I`QW=%SODPRI_Nj$*GIiWP8A zp3B@4oRsD149JURPuf{}Kb1f1S$}}>%{0!;WBU<_G>xu6@tTA7A7+RD<@}nB8A7S5 zu>sO1VYfp0fQhg#0nI|g+y^oDxd{DZF z^)3b95V*v;YnXYsB9vl^>AN zp6z2;76KzU&1eKj;uhl)19%k%iE(|Fypr8-2jy0Em@$gW%j9QME(QB9Ny>V)HFTGQ z%TCTHVaUVVTDNB~*g}~;|3D;a7pDilKY@v1LZ#`cWCg8_w1!kFO#erbw3x&2#i!g* zqglsf08K!$zsk$^ospH_8i_#vQhrhoqden0kqa&eEpSL{|6hLKtGBV29VWsdZ8-(LC4&2F9IzCAl;DF>oZV=`K|;sXdO0s;6`fNCfkYa$ zvr3((>oJ_CeYk_9qud^bMX zU|9*?F~yA&#pZIFqrD^)^CvhP^Lnmz@TJMwEs3Vn{RFW5UTwjw{A$0G-oQ=b%t;6M zf>m&05=h5bXs6X?^shQYf6TY5HF2y^fwdC0xIA4A=YwLvwY(JP7vwG!{XdQ9e3%29 zR|Gz6v`+W*&wz6}W~%HAX=JdP@0TsM2yGSY7dd>UC5BtYO$Dir^;lcX>Xil(i9xA} z-MD^IUpmY05d7sSCK$`O?&W208z3BBchqfUZz`<%tIM;>YO#9T8*SL@oihn=)Z?`VR#v=1 zxO@|a>-PdPp5tC?p6tZTWGCBSF|9E0S~(xv*b8I zd?5d;#M2^(54?_#!;;fOuMZ%W%hdnz&sfzBxl#jbXJf!muA{5zo71IGHOkIkr5CxI zY>0Z^n_q_8M3i;e^PEndWg27nr!bdcBL(!fED$=OS#OG$5idtbpnpcGp6IfeEE}c1 zShF5>3m9i`X02d<@GjrSt<_}k9K(2|i0y*6%5OajBBpdCEgAa;AZGv09R~<-fJxiq z6HkBau`&# zoGcPB@ML>U&EJ0XQ5I>-xze)h;lNG*lQ8nyn+Ns96bNL0ag27F11MJC$I0 z2OqyMHnr8cNouzt#|hLo{>H((BJUGM92p|$vgW8IN-N1==-`#4b;S@K&mIMva4i|O zyKf2(zfDwg@nFen$^`iuP5bsJ5QL$fpOsRdQ+MrV{#3c@QIOI;Ux;_te28ymsYkG$?Ydj}Hy93>%bomQ^aLFuR zRaZL8Xa?V_gGTG*ZsM&XSF>>;?n)t6Df3{VpBHsv#w4ndmRSo3R|XWw<%d9C2Jf}$ zGF(-Jvb=(>?HoBMCCv@8F#Oy?F+#iH$_nD40GG`Lcv`Mqn?*_Ky5t3yZ)8K_kV*ev>IictMJcTZn0>(ygCc_I)Yxy&07X%85lCZaM_V`P+T@@6@ z(AYDCV~=e9<3#eYGQe!4?;F7D)N9101wUf9_yAm|5i)feUzBkm-9Co!NXH#7$jpcB znnS@8!L2ROkVT1-l>jI5P0} zD|^{)4-&gc!j5_uKjPkU%5K4@;Y$OHnaNDiRj9T|VqZk&)0xKK8yzAj!|~Iu4^eXy(HBn=v@YWe82F#XP6K2*dB! z;i(LhJvz6}dlVVn6J4(QyzUV61=B~0DuiBOfzdkUKIx;8LKtP-Bl*Q708seD;t<|}P9>^kP$?m}gHTi!%kY9~+GQ?Gr00kRw?vGi>Ko3hYD4x}qvzT7} zXiUm$ZyFT7*?k5f{v7=Th%1ISsQh<|Fz+gnteoe^|ud ziC6XW*~>8e+GCSdV}~*&a_zchHk3w@xA_Y@E;kmeKc|ePPF}(lD;o1GymzKS4jVG4 z2zmg7M&tPi4#(Wk^4ivpv=v#Ie*j<<0nYwHiCZ>2YGQTHozDSI8Uj`gQC1>8HSqA? z6!QEgv@kVo&za?J^`fNR3IuGY863nJ>{HW0?iggt&5InVgE%t= z7ryxf-CX|_LqQ!nGjdIoAG^GKX;a44bX~P|0V>>h(YOk=`+yPA0BTy@>~@vF6Q3e6 zD-4X~K~d%Uk*piwXoAZxZgzVu)FRzqG}ASC&%64Q&FRDVUfDT3f7HoM-_~BU+L4vW zrNgkj+k_5xyFiR(+Pe*Eo28Grn7B1*W=iqWN%eKFDSLNqVE*l5_TqdAnM;}OhI(NK zMTxJJpBz401A|TLudv>mR?$rrmrC5EnLR%)4-ARFSU(v*%jthWC79T(@K&wZHR_sS z6C3Hz!kPHlaoJdDhd2)O98Pl)8?{0K`L<)z=IVwjpWsh7&0?={UuG=s^TNRt6=id0 zRqrT~`O0G_gPfD|djSr#zenfLo?f?Y(GuL^qN$;d2@^lKZK536Pz=Wdb}l)TTg9I^A;Q8W zZ*6WrF#&=fI53n{e(dh<@YAN~gz^GfB#sU8J4J0MwoR-x#l;%MQIv_)u?F=A@#hlB z-QPFBw@$J-$p8CkI)15+k0DLa=P~K-h|I^afF83@;w|rpwE^pkb`6He4vIHN9xjaW zn1Fu~Gu*C-IRf!y73MB_fz&Kak!zoF+{7r?37=)np029Np8`6w5lWSXv=Ld`FnZ`T z!tDp68sYd{SNTg8{|I{^M&(J$O3sl%eoC{1_;}&{+kjeWk`ZO)UNoJ1uyB2rAALIV zTtJR^pe9?fZ1F};=H(;TZZ`h2=#Jh*aocFHMuL>`+DO)+i7-3gK^-CSSrIU3E5vF{ zl^F*1tUy-`otJ7C*6cNhMA1@}J&By@&UI&8wafwRn$+5OxL&Hw|A0mg!-@EDj@bK--VmI-2@#RR3gD~h-YoA z3BzzA5zbss7eTC#_EqyTkmkk%>m`^q1GznQ&Lc9p+{B{t=z*L!ZsOLFI{g-a|dFl@o&FT2-02^ z5R4ZhPseOkMOwG|4)?4qhTZAvt=X*7ZBrlFLy=T5vRruD;s(AyrkA4gsCe_*=mr(dm^Q18HM&2moT<8p z4eFI4w6ODwBl~99MVGcrP-g^^sW{|T4~U#ls|)J<&lP`&k_MGBmF7Jzc}}&1q9QF@ zdby76C8kc`gk1W693t;iw9wgfnK_Lynm^fb{M2mFw+qNl`~p}XeQCnPPw(P@&`C|=9;C3815 z2cbJGWc1@^VpR`N)QmFtam-ywP7S4Qz{oiJnL#UJbsXU=z1*T`(3&DN*S{n@EpqjV zbua{nGRzsAwGWcj&5=73J_t(@N?6)oIf^nUmIEwdmux}8kZbaD)ElH}U4(o+&(D`B z8_nRa#K@X3`Vpf)(Tkhiwrfc2gn&I3j1*}e1paH92#Wt_av?P$Uz$cr$mEkppMo>q7<)M;+$Qb`0J3i-t7EG3vU=P3Bihop6MLpyU~)jAEmAOR4oTLx0(G| z9X(?rH6GWI!8j)}KuLQa&*DAP-vet*!~1k5F>pQm5t-(bE2$2ao`^yy9w#EUh4^%t z)C`Wraa;_eqw3^TXyCo~P?mcIoM+3+fxv!DjUu=jh3cKIXPcdVtfra4y%&AL)l(LE zAB}1pT9#kI{W5=?0IChurwSSct^}%?n-$_iDz7_G7s1%ZE+NO+IB&n1!`}H(s$2WP zG4xy8P__o}DWMq~!uSQd;cy{*{A36=qk?!fA@_AsOJ(=3)dJ25kDSVM z#MNHp0_u4^g>n2*u>97UW0+xy$mX{`w{xy8V4N%0ors{QDOoR^;6@MmAfCvy+Tf4) z1A;)M)JgFZFFmCNq*#l@khGk6oBD#jrz7C}5ajT)<~jQsDX=<$uGGlZk54?g)(vKfgpTh%O4?TX?3!ZqUiV(7Alwyk?6`wBCA%j6Z>aD zu=X_liy?szX0fh&@M@@`KZ3j1+T|uc#VWEB@zf=yTSo;Otc$PdKU1t7gt#E7zFL-X zS>djH<*l#+_Xd&Eo$=xF7AB&f$z9r;hk+K7a5*=PXFMv@L>);0ag$BRaSBqF?oa^P zt*y5jvU&GNsY9Cf=y|BW@bR}A2rksy-05=+@XV=@Qwt!#i-XNXqd+0GQMEPbKbtYR zP3EQRGufO2t3HMn*Y%T;zh)0bJZog&fb1RISg@h|5lB|k8mR{HfcV7Jxi(hTt^!O+ z4~1o{$qmH=)nGA(8*_MdaQ+PO>RBbmi=3a1J@Mf-g7X>MI@$h`+VW!4mDqX@ikYAj zB>sQI>2tUV5>E*nP*?vj`F5oPN9ks&w$#E}=(&r|yrK%lO-@6R7XorfXYswoonTT# zi~lU-EN1G6&JF!;IJG-ajiVElt}p5Dk}J^|@pby_%|IP!kB^^TK-T$iRitIp!2SP- zWVyx`$^DDgX&GDE%KEi1pbT8$$=1aR$17fVW+3ypfLAPWarWpM0>Mia_m=n6G7Co8 z!%H&2ibgXhg^}3d>Z*SXXUX*75U2xB?qB-AgeN4wKn`rU&LXBn;#MX0wrwQlTQc$l zbcF8Y9ZXx$wtn$qZ>`b=3M528b#%qQwzWrQ6oaxvR*y>NeAV`UdrVyd9VD8JCuRk< zK6r@|Z@?>`-R+_TIsj9kx0ohDuSWe9I^kDtq%1sc^CPTQ#Hgq3)0l^k+Yn<0oXa3QC`W4u$P zK$;-d^!{Z8kqR#t6s4g--b}u`^OL&kw(MG3dq~~{O&+v(5;z9%$Gq&6Oc!?*09%p! zd$RH_C0DbiMZg9AkABb95bnM_e*zc?;+f_1ubuIX(m8Uy|3VeqmEGcd{qnMAe z=QM6;G_$lyH04r z_eYbLQ^vdQ-SY+_=eB1c6{U2}bp9eSA3p}{;T#{-g% zLgy#@4p}4lG+VbeR+EUKW7+*EzI{wD?{j^J6`9%-D|a0{=kD#f?-fc!y?GVijjt|2 z{^S`XvFp%?uK^|~d$zmxqQv~245Hn!2UB;5d<3R-a!Ld^EK;ZsfL zJNF%PwcI5*PZplD0C4mCNW#arbM# zoKw4o;%zpjd$Ay0@|7fSelLqDxndZvk6w1-PX|O z4b3^j%WkWnKoRYSYI27&IA?-{BWTi8@_GWzQuw_tDVu09aUjx_wn*{Mo> z_uq52i6K-rPc4HCFx1v_zESf$LScX0h=E%WQ=`F#R(=8lfEGgZRkikw zsAcyPdc8){%J%3`-zj?&-#kWY)5;H_{#VG+i zf!SD@VX{qF0)>>J>%2_wK>rp#v|IypK6c6<(Q^`OYI0aHk)6o4ycdx7(hB=iS@Lds zZO^{wMPcK%h(c-*gXB=$EO#GOu=V|x4k}j5titR7MOm$uYj;tHdbOW@)W9)tQ*ZErO>V76dR{ha*)FP9%jTJ|pkhQ*S>&FHSSTw5{h^ z$%2tby;gImp1S+{B-tbsz%a-w%6x#pN%fchgBG_PX9t%*$7tgl)0PBja004-Q6?5y z;&5gnk4HW?%Ys7LtB^(HQd_Bz)zexxR7PtJ%lV(Z-jxqaV&!X8ie?|-gYfUId<3OC zSE(DEi14?oLraLmJuL$IcYjU-aC99$+w=>;pEHpxtqr;wyaG5)aqZ4y-FfMY&cv7@ zD6X(Tt6xB}ah>6RuP(&XVP5Ejt4;sTNhSb<8a!mjH~ zzqfMM&X^=Hev+B2*FVF6Y6lE5V(bghV`2#kbNvg zm7^trE4{3a2Sx?hF(6^rGt}NtQ|nHrS_eK%v(`^C6V+v!%Y$PekT6m!9hmtpKnd@ihu{HnVD%4705LN`v{8Y$mbxFRxeb7bs$96spfy)qW` z>C@+F_1&-(UZB=I7Zs1{rZTfyB1KHae#+~B=Rck#iN4#%G%MYvpsZgBZ#%~(txR6z z0yM}Sr|poa?sqN}GtAP}tPAnzcV^XssZyUWqvgSo_PT!#%a-Ac1C02jRRx2-D9q{M z5FcBkRMa=D2F4AQNPZw1Mp$Q`s~fL&p6}{q08eY?6`@Dt5BK$C=Z}B)hJr8K8{kb; zm0X}GexmF)X#4$w@gKx^ibx*c_h7JkPu;qAykm3!1B{SA!Pfm@oS?S*qzVtSa#=Po zeS4)9;&UTSF>Jo!5~q8&tv|T{7>X&&b{<1lHnL*n&$Wk-wZz+_d_rJ4X>iK+JAqsU zCICVaEtysJDDC$9z>})UA6cOq?y*j5vjiCwKQ*wgjB$dar09=kqr@N9I@zGF)k~e~ zV1o-a_$;%E#}7AMcCQQZ@g-J-84h~=F1@wQ8TRa1+c89X#QJzB1Q2JVzG0Tt#xFUL z&)kzhtP~@DP%Y=jT%ny`p;@fXrEtEM0Y71{`)J;FsbKX(o_lhC78nUPK`$E zxk75I-5IdLy6DE?g#A;Wlu?HI8pNL&EY&*$q9A4{W?Uj4RgAL$_5oca1m)AqYR-1A z6Hx8MA)390!{eebGla~pL^6R$ox>t(cruU`5NhMMTAW=zVt07n>SjI1#)=C&HBQ@@loE%EIJeuJTR0AK>&1o-@yC&ds$L!Wc zFO@PLirDX0hM^#6R+H{Wb#(&Jv6;)D3yMmY5hOj9U_e|HTV(Z+FQQd+0jgMp^Ab33 z$zFoUE>LP&Cwro(CLhKVIuA9Z6jAgLO}_fub}yiN8quq)SW*M z9#cQf+xf(+c|MZJo+%;9gI=)t#SiyWW#6+fg4y@apP`Dc-yWu=d%z4)-PPtfDJ(MN z*!x!};eJVswUdI~CgwCv-?RZPd_QnE54V)^Qzn3^=-*@1(nNHz&JaH5uT-qIgoImF zcxC#JwF_J!-J)TxTm1tucq|8^6qp-P`B?Q^V9|wI60FI}_(Qfi6gt=s699_z6a`d_w4 z8bi#o*vy|~#rOF%EA?QA9_tAfTZi3x)^RANR;=D6`&`?oI=x{`kVgJzah9G66X59wiZrnrId}u802_;+I^oeJD&-wGXQ|un zMEJr!kaHnFl$C&B-r0@0wY(_YWk^nQJn7`QN&eK{3^EY>di%>MNS1B2wXkA7K9ml= zmRB9dl4p9eTlQ1#n;-^MVV8c|rz++nD`doDV5Yp!NMa4LcBw`PLDx*fpLv<-B-G+8 zf)ST!!Gf#Vfz_AQ)JSa9A~~0J;j6S(+RbYwF3h$E^{)K~!)G*8_3HaLhHnKW+A41z zxvG0lqoya_6a4Z#oN4^!34eQK4hll>sl6!$N5Y2vXOcl08k_ypxTd-|C$~A7jKQjC zosnBwD-OkxMo2`YCcdlUCQqAFu76@MsH!vlT1!w{S{7f-fINHA&+BALiaH-fi|lW8 z9K`ma+!jO{=xqwR5)c;na{}0JA)}CY7ias23fLJ%?}7iE7~uBjXew(SFbTBlmd>|q zgh&tddEmT%UMK6_8U*`i!+%@}`#w)9k|7I}J~1#SY~#Fw#aGV#)rtCP_sdkvC$hZoIc+}BZw?Z>i5stt+x=x z>Z7O_Sq2G+G{q_;a{v^+sS>nf*pMeMb55|(S@Zrg_Rdt&g!&4uGwmMdakvn}_->We zp^=(V66-quk{a%6t_{Rd=d4XP?3Q1BC5K&0!%fey05TXgp@k^-clE+E++?w}^Wi;F7K_>~0`ATQ zqLz`x0ylCxZ0L`fzbyxYB`L#^*jufjPMJ6(L29J0#SlEz{quh(q6{&H+LXKY`CiF= zhCk#<(&NvA`h#{)?%QzeqeWZt1z@HI*orH>cCL)Q{|<%m!Q)*XVa?ik4?EpZ6xdvu z5v4*7(E&PJ*C8jY5iIPdiG7x~B3wmK3z(c058MNfhDx}si_49W*|Opi+xY3D=g5ex zIxpnqcYV4ZJU(RsEeNZaEr8UMC#Al|@is1O)%H zw7-4nY!X9OXF+RND?c9eA^gHf-opG6SW;)C*9`ksOnV3|c}%f63lN(hHimwp_kJ^O z6VVTUVzaQcUhUoGFw%<-&VBAlR9_Z{6*r3EYq`k9Qa?TR#D|LTvteG?%ys!6=l@qi zQQaP2S>M!HK#99uw8gks)A=2S7fcwXcF*;aRCz1H-0mL1-wSOOU6%OfKShS_gLgV` zGlWJz-R}4a&|R1jgb~Me-@~vVT`CI0fWz3khN$O536)cAcPozzQgjjcwIuIo%oz*T zj*?0Fw$#5e0HY>>l~8u3>J-QU!h*ZHuOp>wXRyoK34Cw(LtNh*66w8WOyaZ8J0zs`VHT z&ZRSD_4!!e&%4A1`DGmHv8Pe{l(<r-e*PrRwvI9pi1%27q!_*g!q#CMlaen+PP9(_3Q^E*md7 zufK?-!0iGNObJc?QxNwW76J4-cT479HW3>yIVifB95<+CwP$g$-vG-sl?L8M)ENGg zQ@3~jbu&WO?!E8n|3c3EF^Q-Ptfv3|g2+KR(Kt4eS3P(`uErm=B5N3HVeX(E*C=2| zuZIsBfJAdA8(wwukF;0pubSE-vOj(uaY)ZEvE>0DMdBcjXZ64Zmib<5x)R%)CR3Q!&F>;FccW!~zeo_|^J$GjcWh z<+YzVr+B~^-vsL8EcgFCJy)*FyvKqd7C1vZHDlMKr6kn_Pr1j9xe%W2#=%#;@KWf( zan^$dR)x0Ewlkg4TDx1p?WYKP6IxIz6PtM`NjO)gC}Kq&5#p=c)-OXFm$(O9(s3Q2 z$xl8qSjK4*Z^cNmOVUxdyYBn{0xIzw6M??-JI;2t-7D+l>jWRyo|I~sWutnFUDoa!Exd_c=GAsC{KpUVQQ`^*9>`tD=HC!NE!-;)1G zK5Cq7ZzDvsR>)g*vh>g+R$8{FC*hX$>%*8fQN{u!D|X5GajZ1`A@mb& zpr3~MGl2;jzwE+>;j3(s>uk1YtX4YCOXGl}lm&jiE4cIkZyQE-S#be^G$A)Io*04j zjS$=1EKs{TGH9DGzB{7iYFB2q_|AuUpVEu3YvgP+2wo)!Tf3`&SgmnJ^D_3Tf_h>5QIm9k+{OA^`gPukz4_8j(qM z2^+I36W4H>YX6ftCzn1_(W6?J?a+>=YA zjAS2&85|vpxk(kRI{hwWN;c&R7+EA-#-?aA%^qw10)Zn`=>_SAx_ zoj<8K_~nMtgg!sxJ@l|X6m8F%1a6uz{LvhhSlYz4!SMfQktQ#l9MbztCrRgG3xEYH(vaXLv5 z!@yn7R?D1&rd-^~Y+u-nt~?r|PJ5p0bkj}CqICq38WX_|Ws4m7lMYLN@5n7&gYD%Y zX=luGjw{~Wisn`7AUxeKO)MX%giv5<49-E6@ZJ`+H|S9G(>PQPa7SOYFL1xAJ@(H&c6-JiXMI+Pn-aEz6G?fg&}AFXHQm>V>Qej1+EyMv#k`Lj z;dPVZRY~~IY3u=3j2u?#vS{@9x_EY$|WPnAXwUrrXjdj&@imG9_zI4V~ec8L@SBG3DqecyIQh_ z$3L*sO9Z^}YKe3igKMaWB?3!Zw$FTeFc}o{xbdoV0X0a_fb60d^dP9_|WX%QH}q7UaP7T#%IaNN&rD+NgZ~G~)sD;h=Y6T8F}r?mU*iPrwTP zfK@08JQ+p&28ZQ6#dWE`l$<2pOzh;GV%G zG@^V-n@92|q5T1f>W++sz~S9pO}WfUlAaS-RxR;^7Y0ygpDa2;VSUMj z!59j(371uy(PQrrQD)7@yBud7KHvnf_TXnageY3aFzJek8Z5l3SFtrAXPnsgLl<0n z)I$M)!t;KNL+CMf>!KT0EXazDHOcy(B4|AWOo|8DM%_*Qim7XWRuOsLjmg|FEG$<2 zY~*3QjW;7Cz`v;Z5yfTB?%W8MG29*8#R!uYuLru?mV`o8%9pmZZ1A3tpkPPv=g(#K zJYYBY@;ydRd0^$Clg4EnwTJE*9P>BI_JF{{_kvqEJn7P|$JE zXhX(LqM5pIy^V32&{gDJnmpwy0yRI(Xs1aDf3s^7Z{GfB`zjbl0($buz=OT{UUIkb z4>)2`gf?jugCj4=?c$F9Nahux;=c=0D%Gn1VNy4gr*$N>Evh(dU!2iWgvA$&Ctqz4 zW6?y)zMq)BPD8E)WZfp26UM<)^=%JF#P{YUdx1!|TziO;Ubsw~nGWW-Qsfcf$c;W& z-6acQL^qW2dlp3@)G9;&0wh?_6X_KLl35u^WGa z2bzjxa+TrW$_YmHU#BuRM%id_sq~zM14TVoZR55st#jZdY;FwoX|n&$yVbzP;DDWF zVo`?nW2!^svVeu2XEPCKfIR=$%r)uR8;aA8K=Z(VoZ!<~@i%c7O^X+L z4x_>q4RQ7;2^{68Ca(_(r<(mBpd`4DeK>CJTH7Y_K|R+z(K(I?e*RK!Q+vP4u0fy= zAZ+JT-Y7`G zA9Bw~wE9nvMj##51%4ykJ?+>C03B2mRI2OJm<;)0pUtXy4k9gH?Yr=GLrC+5ewQg? z>Kwm4&nmOY^;k@6X={QkDEC!9W!XbY&3|>wz3P!3ZJ4$7R?j*; ze!mKV*93^7evu6!9Zbk0l?b$ z+phb!9Q5Qq&6GG_3tM`)gYldF@7!yHK4_*3GWWxl_06FOPcnhMiJGpHpEq3LFfe3e z-b?@h00001L7JTyFaOK5fDn}}?P4P}@3|QTM2Y6#cViU~P+c`PgLb3(2$KJDSXl=j z5P8I%F{GcO8db`QWhXRD*tqsqji&X2qI`F3x(`dN7tH`>;q4 zNCpvkZC<1o?*i3eF^O6XVCyluAUZy4(N?Xx7nKgY@tF zaehgHJD3zoUy#>_p`^Ylr{Rs5xrv;=BCIVrgH7C~R@&;+-}h{7_5!e~ z)*^P;1M4vN9m8{Aqo5kOvd!=Om#<;Cf`U1iFlHs4{Wfswo&}M@%m4$w`rK%))6Z+6 zk8owIK}Z4V(!J`P{B*qFi4<}+&lyvPq|5=%$4rF<3>6vuxLrRxv^G;CVr43@Qf?Jj zQbp>&kX14pP`rE2@h?2AmEMh4fW`lwd0>*TXiKx0fg6~x?8b?itIk==u=BuZM5_o)+xRCyH&LkH|9-KN|<4$%+LMFFw2sv z-xxFyTLYeH#_#NIvd`dr+r^`cvA+dEL%2Nqj`xHFqE8|L!-@C+=69w4FQ8>apP<8F zR1xQQB6j89Ag<~w#;PP?kkA^;#QO!N!I2OltjLa1{U~g;?&0XuWTpM%KSXZi;PX9Y z$f~;B%x#GB&Klqt0yU?#g|OXAisj7j2}7&3mpxBVh`-aVOp8WU$K%}C0%oteN@rH4=vSG-6+r zWlr1AJtaE%zh8bwe-_smI#-sVJS#l+!k1~`E-)ZD4u!sR zu*s!N_NWn27-}RQ^fpr3Q5+QT3VuGMq~uBAa+3deUx)!nbeoTrE*KZ+=|D5^CpZa+ zBI>k3{QbPB!6z2e_;ZDXUBS@@jL`+Hm)hQ=;HIX3!v7Gt9|hd>PPDy7xn>IWs2Y;DDW%BAs|A*W<*fYr?l#{e= zI6w3i=deAz4YEK!jeEIdtIDV%gjge;w<1=8r-AMP=~>1zD6Xew|DmJ=t4@N00d-!U zUE=Dw!07GiK>NRZl@1wyOfPz(r?4Uni60et+^K{CHgJgip^{xcu2O7;j3bQ(K<7R; zl{tFAP+n!fH{utY!rC;g0TgiB2c5e}Zy|8vG^XDUVP{zWE^ukY34B2Cv>*P$_is`G zPpeZYwogQ+$yyFYVzeAzw`i;OaT^Q(Uf@S@r|O7&12Y8YEEt0gLF!<=q|%I)&XHK=F#Vt#u8c8ING1MbsRO%UBl=NIYRw7)B)p6 zP43i*zp4IsMm!m@&U&Zq~$ZTHJ41vFh*SDP{ifw*N`uZC2GnU2L* zvm@osv30BUYxLBzqf-mU%AEM)haRnd+VM`Zi!zFBYbXpFTX=T9Sw;HF%6$GA;qN!T@r{9Hhdvqs=_|CnmF7D!NPWm5pFw$J5E z%9&BGV|AmYUAzYp-w2<^$L2Vzgg@VydI0$??`T7R-fm?1RIx;j(PW0v@<3F%#Br)2 zcY-X|i);J)>M*?eENAMjb!2>#LhJ)szjmA#htW#i2UP?JhnPdoVcJvEEuP^svu}QdYUn>b?EUvT6_UN=6Hf`7kjWT-H`7 zA9I6O10^L) z#4_^(am!{%fEi*x3R)0+K_{zOo}GurnKeMIu2@qo^K|*7e2vDR;eEoDU~47Dq4_I} zG5XE=olH>4kFZ)}pM4^Q7&f@erG&<#VQE)esOI(TEDNb*8$+?soWxwIJIdgAx<{I5 zgRu|zoM@;Y#Q}-doRioJ$KvkF3Ef^`GQ_ddzy|1O18z;B+Q-}>1Q{FRzBv~B{3ZOlamQ-M zCkYNvG%}8&%1*mJ!-i{6*VzM^OB9NoExg15erpbS)+2wGRSN!Ey@73OUHz@6DI>V- z`S>Mwk#6>yX+D?xit3vi1o*+pDApND3 zC%x|vfq$S5XSECKgy+QQ{1~jCfs|`7w`ArQnPwWx55;hd3eI4_F1e(7Dfl7y;SdZfcj+Sr}O<@p7le=RucvqFgIKbn!50v$5oIgBk!f9@Kp zEP+cGW;+4oS3nQzh&m^mC6Tn`uri5wtBj}?&h$-6;)hvD6*rA7a6TfEE67(;XrU#u zaW(2SR7AmtT2=7~?a{f!FwCu7>-SH_RS~^_iloY2iu|AjH*H{8OjQ zG50A|dvD-`_)(3J70dQ!YU{1Ly{yNK;Fcp$S^Y9yCLRl94IjatjKazTmFtYmtQXgk ziSNgC`XGeVafjkaHDZw<{{+t_K_U

_Yh4n=zSI#%I)~CBfR}#exp!PKo!V23Pe>c0~rIjK C|h}QzjvZ_E=h8{B07oC>lC-1)7EI zm)uZ+n?&!S?1^V%KOQ=K(UE7&y_@!u=I%5jT)aN(^gJ@s7-~WIHSDy+OI8HkSFDXc ztojYU=)WnB5CV+}s3P;V&q-Oyk+H+>Jn8c^#ksR`mT19S3*d^su*mchFk(H|Z=QWD z_O@By&e=NS3E+?YM~n4U9rVZST`A8>j+*rY?^sgQEFN#jCXKFatHv*HE#bL?q|B@+ zO5PJC39;?L(r4_vefom)e>N_4oc>2mLn3$s{ZhLaIANU>lNdOm5A4UBt(N)5s>t?V zoiR0pV3zY~=*hctDD@f^tZrks6Uq@O3yAd3QlctkCjL?yK!74g-{dh!^1Hntyvv$@FF&Tx5vtxu;Ru*U$rwnxi~VpjSU@5| zDTSR3_DM*~9b=JaijQg8_(!qBOS>#n&FPo3UhyvEAA&esn(a&u~F1l-1?Ah7*d;nWb=uus34(JHHFWCw| zHKUdgu9g6>67IYUf(CpK&2nO~f8K%S4(8s9p(7@v zA?HmJY(F~vKtb1B<%zLY$OtnXEV!CT-Ss5dZBjeG-d{f3G|4(q9b-MnPCklsproMk zyj`AxsZWK^*#(-D*`Nj1k!7NkIVQL2PLst$|3+I>Fo=p86?HWk+k)zlX`KA7`^i0N zUQ_o_5*MtaUif<8&}(r9oe$IyXRHwp=Dobh?-}sC#}w57>C;#_gP;ch1zs0g-<$+8 z&!lJ|t{TepoG#nfi>8+kkMk;`IgcX$<&*(p563vC=@-D6fw0;bw>3(o{ZI*=&^vXK zKa}EKulp#4ib)}=_Wd{NclD?ogkH189oUW0P|c( zRNJH{PPQl8T?~xl@K3fzcXv9qh94Npp3p{fbw)JQXh6awV;Te~JFxD^nzK?s=b4Y? z-apewEO|TMHT`#m7riaa9+1KPBZbpThn!Ep|Hzv@uY7ekEN!?p{E(_-XeO76HMImD z3(JII@V7$kd4JC`NI<6`F=}biP;Z~LD_-lk?2;!i2w&NmWpTZ~u!q^K2t6d(pA(KP z2JzOs-lUQCO1EY-)y9tQ)_As-iplMGoK1%NZe)5+)*U>;1_g6$gx5S0|IAi^%f{yi zKOAQ85(|1!gz648d#ym>8+ucLk1rr^#0m=BX?K!ujj9XA8Pr_V&&!sDC^+wCq4U~+ zABTrz9t^jaJ!RL9_5!O?dl!qvo5M)@;1`MLDP*hPh+h$Pre9J-mOiOTNf@2AbJ7tEWHp`>Kvyi7 z810bd79d0kx&nS2NPH{Z_$#dh83m&#J))>juQ)sRIt;Uq{BV~u^f6yhMvKadFYoXlK3RI=?-qIb@oHsk~fB* zVW-ruVPQmFHo4E2fmX7Yx_4(8^Xc=fRvb!wYfdiN=2<``~ zd>CRiHe__|_H%W6IryZbMn)v2R)PS52_>!5J3^Uo2Vxi3rTsM4!RQM-PU56mWr8y3rZ|G&iTPC%lvyBS70JtRVDhc7L=7?m8M}b#j@Z6 zI$Y*+?7%T|%@b4dy0sctVlQAQ3s>@8Xw zXg0mH-1FIg5P6Y?n47+#&Y=`uzBM1+NtYkZstKl>j>38a>M8h$_qPIRXNPt(0}5%5 zQq7^8O}Kw@IsC{Q9vH_}7dI)OYL3)6n!W9O(q9JVvJPmaE#z^4bI@%}o_L=e>tPI?8jSQE2qW4RBi~#Tkg> zacL8S&;{Wxm9Vt%PBA86f1+}X^u=h<7ML)?F;V$Z%$FP&-+Bq((>=}8SB@!HJ8Sxb zB)M$zPEX8t3ie;%HM^pPIl&rs@2&gCKE|)LzI3TiWyWE@Fs6To{p3qBtC6lbg*T;7 z>(b;{)6doC<2>tL=0szwfs%bR_&JcuPL;|7*3sIWQoCszA}1RPrTZ zA8%51byKargPVzd9AzO%3*Nx*J(Jj_zk(P4oc@w(*t@<~MOr>& zK7$P3Lj0?hBo~RFA0kFfn#`^g!qwpQd#Je3|3>Y)?Uu>S{~rHTTM3<2J^kZ|18V^F zm3@sI5#kKq_yqr@5A+$Tjrmi8`$oC&IU0&Wn$jy%I&_wjgL>X`xe0!$%6oPwLX9&D zz^bD{gVX^%iOh*aW2dR&yX96Hrnw@NHfiB<4HQg5QGI|*f*F1CNFtfWddEw!5?OoI zdgD~EBT;vP)t-8LAc<9m7dQ`9qzvZAg@k669V}G@TV(V4G)@+;nWY-;m;`vvp4IX#$@p-P!E9@Ea0-i& z0E>CDvnp%RAN?&-(c2#zx4I8bI|_{!6p8y3>&@#Ii35x|6HCW?RCx@$G>7L`uD)xWF5fC?}b z^rZCVQ$QXCCA#zoo^U6qA#~JxYif-sshMUH! z0YOy}CgOCxTTJ)KMuf;#)!B0I4!EL+O0x66$!-dT1{|Zn8)t0JQ zcL|W9zV?2`$gqT4(XSE`MY!@vLMQsSrPTjO&v5K^J;{> zO+p+3-VXFT=Sp5Qfgtcen*lF`GRl@}gCOG=gm=-l!N=a3n?K)^7C|KfIwX4r2%pGK zZ|Y;G*j7lc*sa%XErw21hzk`}ksBa}kMhOqRQ6I^E)5}Jalgh>9%0< zxh&vTsC)WpdxNS~(KB>`99Gb6icfX`iMaznfn1DeOL+T=$FlzV18}vl3`_^K>o>op z+e@aECSqtBrf-R&HH_Zk%s+espF#4RE=#(h-{oHyWnb62oy808y@l&#Zwvs$UwfGa z`(PgENNF#Bh6;XXtoBYHEy!$>5Aw9Qsq5SRqENE?xiD6Y`P{z3{Jmzp>2%q>Z-gNl z?*{LtlODB2@T0wI_ro>cZW^hFn&N$eu9_S)yV4U!~!QUFj{X|oI(YM^K9X9 z{N&u-Z!x>{V+Ut4>~)3Gl4*+5>7FRv9=aeKZX?*Nf?mx~z{ps-a!5Eeb6zgZAdIZc$63V&>w%Vh3{F!}qu@=Zwb zESk{62E|`QCRmxn0Xs=5a}OOwZLKoo5*zlCU#?EQa%=C3f|KvN196&vUl zdORxaR5k+0(%6FqkhD6Ah}l;4UAI|r`xH1wGt=p9EuGD$J=$;OLPXo=z?*1w_d3pY z5m0X=E`+4oo7`~>5K}YeJ?Zs69BcA90*j+hbtpNf%ozqkr9(rb1=XU{W;i>&h3}TB zF8!WWrfycD_v!S3LZ;{CM%CHwN-G`#GD+)1Hz^KQ znlh0p`VS6dF&DjT(hc0`jfQ%{KaPLl6eE0ZmnrVap1HarpTl?*r`_9O8>dh_umorv z4m~S=0!*$VAX@B&XS96+u}^DpE@YvfTg*50zRB#hG_$sXQXndq%VZ7>0WGU~?Bf#hc^<$24!DW`Kq#yNSI82D= zM3)MAeH>t+Z5Y}KES9x*dpisZ-rY4syi_%UI9Ed?Mwo3Fy0NBP?5R*1XSSzGA+o%E z4|wu8%x_g*p!g4}a`mQ3s}mCo5dxAAC;Y9l($p^1NEDWcU~HAb7rMnf!K`DJ3?Gk# z^Wz6If8-sj&2Z_0low&wNcMU-!1q_fAXMaib{SZL?URWa&A+4U(0U!S`M#Ieq)aVS zqRb8@l731+C?*uB>Q>sh;2{)9^)Ni&HK68XFVc5?%mo?Q_U5D%7kopLaFraou;%7< zmQvNq`(L>rpC~s9xXdd_yV^KV^`DhO(uk>VJ-)Q8?M!yHF{rY(o^H{zI~2BWwEB?b z(G;qn^Z5nnkc&xm-kX>@PCKyko0Vc}*8KzV;E>!=F5*~(mGka%(w^16XBUf^LE5V& z`E+eWRy}`Bb$x2B99;dF`)Shs#!f8MG<><_ct~O3qEo{lTsHaE6&dgqg+1Y(!~{CO zQpjYc8uu(+o+8^1?(qk_D2x}$*t^&LChnX!CtRtOP@93_{ZgWnBylimkVi*`7UGEO zY+Sxk`rW+?0U@}4V#4|HSHxy-Kh7nVd*< zleV-zDCjT<<1T~AU#9}y+_dRr6KER@X8YjNgDEY)#&?Hv3-wG3g+!<3fsVY2K8d%>>Q5@*(lGttFN`5pZ1>nzzp&YT1O!eEo-L`}Lg zR^O(sqNJM}N8o55G1tow)UNW##DQy-tPav&)50y3;ct;@hd6b5DXrB;1t%6ExXK3p zE0#0JQj8JBvf9}HD)TgrzQu67OLRqjWEgx;=`+g@ti7w*;k4OJUJ2NnILKoM?9FIf z3tuicE0saA;ZLC$^farcOL6Fu99x(3mV*MeLznSY^zrA@Rcd)lq1LLAIL>>gJtIzc za_;v+VIn8&?h`O_%vFyBt1p3ZGUSo0E!uR(moh9$YOurnR?uiX4ulU|c?JkRT%#W{kfD4eSb{Ub6hL-K2%+R5+kowk z>co14CwP`laq(afVOz-T#fmVXi~4%2kMzv?jO|<}{?L9=qWUi+fGh@~Si=OOqu$|w zxmW3vw$9)nyJBh5h6v{dY{rlm)7FRB9l7i8%m!L}!($m=0S!wEs?H%mTZiZ`M4BzeVjegUzzFAO;%42QYVe$5F0iuklGj^%e( z45g?1`)GwZ>U$hq)6Qd6qhNu~dd&JUNE4+DC@I>l7rbfu`B4}1w8(}mYFai&krDm2 zd6H8y7l*yL8S6IJoxA$6Nhft7`zY)3JS*{%p+`YR_gP8W#)UdfJ5#lmMk1DexR(qu z6BYkv|Go$!NkN~~Qwk%0r1IL5;-R#qQWY~FQt%+sbbAMPQ=S;%eUY=HB|v!uZVHTf zUa!S;6Y1t$+pSPK6GHA$Aw{$tHjnYRg^=;Q(SN^FQVma17hNMq<9O9MKruKq*-+CE z_01u8_m0F%$Cc)}r*bljFxONz#a!woUJ~ft%jH@G2#*;S*^sT|jhhb#JrDpZrHB%G zR`Ee@B$89~+MtiwEby?+0GllU|DJvt?afx%?Tm7Zquu13|K9A;m<$>_YJN=0( zAhEhsF^vivn`|STuOm5=ULrNddkmnH7CA3SON*woiPQ!QVi>fEET z0Dlf1e$s_ME#nw?X~}kTp~N)-uc$?{v~jONLKabk0c<{_HR@2MvMboOMHVI28~~}I z!5FOVs|gAV%y(w^Jz7(a)}9bN#5DhV?;{QMmJ z>1Ore0+T_o9Bz@0#_US1%SJ2rk)P6LSX zaM=};Dwd5-_WA%AA^gsX~M+E zzs8hA=HxCixvIOYV=Vj zevalNKHigPKB&qD6J#BAhMkoF+(cZMOY7NR;Ku=7PAgP|FDw%3~C}_pW@F6&H6093s#9GHCV6Ng#W5 zA^9u~`89vH%ICnzk8G(@kc#T=KmH=o3+C(f{K4g2Rq*{Gb7=NDXHu^K%z}&c8|UYz zJigarS0jzlg*Or&eGj#*#vRh&4qF}nZ-^aGBG)lf9$i$GLx1^h`@ z0UDt-b**_Pup}kx$b<@Q)e&7U9I&E}SFg;dP~7Q?k*Ys8_ma4WNu?#wATRaie9g#j z`_Ta*t-@t(^@JSGTxT@HEVa3?m=xov3N#VA4dXg}Ww7#@WRMu?q5!1n+Vb=4nFc!t z+UjA2Vq+=l#I^p6kFDQqOx9oCwa8Gs_9db>u+$U?p9x|FK$gczUF&`+*lrQ_Zs)H- zQG#^R7Y)pww>As4Cv~I*o&|6~vTAvjO?L3gzfhyG0v=Iod~;}j5LOwzq<-l4Y+i&^ zippNCwh=mZCM!I*7g%Hm2w2tB3TGjiR-6fe@mFAfT)PP@LG-*LhAg9G=}=OU`P`oM zADnu+lwB*>XWR;5#V&94VB#-UTifG3*g3qMUvb{VjS)9&Q}QNob<-L&%$??FP`|KV zX&vbdfw&28;Q|Ok%I+|W4Ur_*3;CRw-KKKv4QhtevsTbKrsXeNP4er}rjF^o_n-j! zR;-%a-{PYxtsa4Xa|3xn*w1n!bqIRG3fC{L4AaSyWCJUTE5-$@PB~{~6^)hxcP{R@wlf^`rdXiv0fZ>bJbl z9xr>A{FV*Kl?zu?B*RD|^!-j&WGm5ImW!S007h(J<$a4E1hxXsSq*u;z$^JsqJ3T~ zbOx)K#_rHU4+tdqTBRe6SN5m)A&on}@)mJv^uU>oLIoCzMW~Hlw>bgTP$g&{+#KJS z&BWLhpUque8&mtjtL1EBZ9Ql0tTIltbs9adtM>~dXX1QOC1j?FetB21^|><;L0fGX zX?G0B1<#u40V*0D$Y@1(<_nOPj8JQRfk<=YB)iM%CmFKGgN8(f?js(-!23mZ&0AzW z$PW6FfkF;?c&sv2CDo$(@Ufo!W(d0p_6!H@so@SHH2txx8C)h8sf$VI;k%<T|k}vMn&c_5C@#=XQ^yvX9i!b(BEQnJ+=~+%Vvg zQ`SB6h3sMwcprEiLRan6)gxFgl5*fdd6b)QJ~M2P)J+2-D6D{_D;|{qc(6EI$cB{z zJ>LM#okxHLqkyziUVec<5?#x`J0?7ZB(AlBv1+co;tdoz3cW250#N|&aXBK2%z)__ zB%>IT^&UYUp@=m1dOVc^Y%`Ld3HE}+DLs5i*_6UJ4Kr*2%3Cj59Oy$wy!^M+6lL8DBp?BF0&f?$jd zc+wpoIoM=2ZLN-CKt7LU)2|z*CwJ%OzF5OpO(jZhmoD+kR;h)Ve=&ju&~a!oYdS`UfB5E>|;nQDTCJMHoq&!Ai7 zt3@!b+nb}UljwobnmSQoX$*>|B+~| zW}GJl|L(sl<1o{}I$iqG_o!$3#JXi56@heUs3f=oogwH-Tb z0o^ytxj?_RHvQLIzqMggO?v;N`~&czyS!-aAn5`7Nw(92s7InC*VSsCUE~ICQEh*H zQ+7P)v*W{7=m;UR#78J!X!^VYz{rBOul6aU8DcdQKb{>i(EE07QiAJvczyUQbN zV>7k6@AG|+_)Svw&Z$Q<{rFm#1@2a0oEnc8b3@uI#kqHeZYVG)y%k}kH=4`T!!^EA zEopVuTh6TI9kf*zJn+kyd5N(rCrpv zQpS5*hGX=)58)Qb5QO!q*BL0u^9r8OKK&QCo?d%evUMgwXnAx$G6XW&X&J$dyU*ai zSJz@p{j7wvh5g(k~XGScUi!gQz67RVDc2IwjW?!X_R;HTjFsctlr$GbaQt}(t2W+ivnd%v#r$F zaj4U}6l+O{2>EQL8EsmqHAggvf^jEU8lY=VRPqvDSk4N*I}S;BgkV@}8T}nai@b)< zOt%pR{gw5EMSjy;H|!Nr;2@mV@{ z6RQR>4^p>~Hc3m07wGOW{tpSyh~AQDK}LkkKO-a-x8(NevPu~{L#chFQCSP|tDOQ> zz2%Ho`X~YS5^XY)ZWi@iT!M$Y@%91rx8hZ-4Fd_yw!r{MoP%fy?G=P3%G&P*y-g0?Xyd2=pt(E@0yrHRx z)e;Fxu6cpHUhr68et@TqjEYkXOTkl7ETX&P#gJ&J z*fnXORDozTn~?Vj3{HvX0|5dM0)G_Gh%`@WfwTfnR863af}#VO+!?>(*1$msRmci^ zX}D6C&0U1P^E-``iAo};PPz8Eh^(m57Aa`<(-Ir2BlaBzBAdxB=$(OCjUYyxbja!o zYikxn)%EL0p$W0dlaK;hpXph3yxsOgpo7}8TX8Tg=I)Vc%ql5R$K*NhAKm*zIA44aelw+1EJX=n^82V2Tf=)SKYS>2*0ha5^7(E2f`|ULbAWTlDlig(Iq?a&7w0 zF(6k-;VgTr${n7bJy;!dzLj<={kM!H>|@JtrN zrn^UFoeF=Ylfh-WI(7gm{rYz_2)NyOqIB>Us78tbNoQ*OH-jfzc|7l7b0!Mpyc%ul2G13yr+=4AgS0bg&_J3Z#mIC9J^=PZcXA5 zs8RM}o7j%?E2&|4O^gcep9@`Y6;>EBev(WR?N)WAuYeBMk~$rhm$sWn-^h!&wmfx> zzHk4pkNGZ1EwXtmIm`J-o9fkjs9Bn)<3AFkNN+gKyAQ6j{%r{Pn#n;01L&WS3FvSI2lj&?K)FNxZ-y%hl7$_t4a4^NSIT zsVDzfFbM}srixl>0vWzA=|cfc>N=+LiAaeIn6XF!+f0Ct275ghhXzfZZg9J})io*S5%(nxH+t_M;Sf%5i5^<2P9}4;yW~Zl z*bEbJj(wy1Sh|+yMPxmEOm=&v!&*xa8r5RC#)M?PGISHI!z&Ja5l2B8AA3iR^zaol z5+|*9#F~cDjr%eUg&ZqoIH(1*)l7i6(5A%IC{7FHHa5It<~Fih;wfl9g#Ohk0=S2* z#M4<~xWx;M!~;n&c@Ox4%xb3ume@)#HTsAH;D@EV3=8Pf7F`06oz1^s${-S46zLlR zs7Spg)NfGMzRQ=a_phV-bcgw^g57*Q_Z<(ym3&-nDTgB*vDRhAP~^MbSnNz&NtSF- zb)Rg`)Y^}_$bu$i#A(tg```so+DCw!GT#`tLaX5K6!A-IQk(%;1KQP8c5`|QKZ!`Z4n9!ro|uCFi<+{elDIOis-x(Cvwc=U+cwyG z{iE6Lf2BNdOSr@UZ1{L;kj8L`rG2J39Jz z6l>&#NkLW^Wgl5vqv}Pxoc9rWHcHn6sLv?MNOg18g6MBUZ+faxX`D4+xe zm-dE24=I`0h~BD|d8$&5IEtv8D;13?+xGQK>Dm9O`Ct=!{OTm>;d8A16CyZ=#^*Vm zArGVxzkWK(}c+n5vW6LIN}Ih20xys<%h?NcjRia=R1{=4INO~o(?=C#cl$*CQy2==Pt}I)Q z&2J(j99u=@(zVqeKBTBovQa+n&esCr#r1MKAW;?ZgoJmPA5!eSAYpDq8$WTrWJK$rxktJA-EFPyYdD1P)8K{$GAn3EbYi0tjx zL8g^~KXh))2W{omW~L~9f*%A;Iw1fbm|}PxvfKJh2eN69_xTJyH$vLF<=pR+8MBEea8+J^~)3lvN`u(ComXU%HVeR3T1ut8dB zNT$Dj|8Gw0``2avla{o!*uU=Tn=&Yr#KYl@EY(YVjm8a`zUE&T&by~Z^D~i;iQ4p+ z!5ndw!vRspf(>C9bPAK9A1?bm;25cc}M;qx`h$eA=^j0 zZl%Xfqg%S}eP_zO9r!`oNle-VmQ+qxY+e9bA%I7#ET*z$vuJ4t|DB);hJum1P`4@v z&Cvdzg`Kj>W! zyXt{&8EG5vkjw6Mcp9mzCnl{v)3^)z#|Pb$0?-bn*5IkgE3Iqn1{JJ$0tn_shUXoXo9aOSfNm07opav^i7abNw2kziVk8x1;Tcp6wW$*H^OM{ zJj%inkAuXH+GM9>XF;3;GF4AsF+sDQ`YXW&j=l;m>zRJ7vo2ibiOH;ga?q==c(B&c zsh?-48DuGuEW7JY<&a}NH1d}J6x0+?C0m1E=RX{P%_oY9^4Ivqp& zBSaxAyA)<{DkAh5i%YB77dSk(saD8WV;?j9?)4XO%PkXBU1*-CLXAbZ{yZOyr0Qjy zg`(+>4{4g^7w;i+1f%JEc0~tf@>e^Uer@s+N4fydOx}Wfod7xL(VvtRY4zzwq#I+s zC7TkiB145~Re6`=QfY$Qf+P5`&*kwk5wSRr{#c0}Z(g}3|B)EMWSgkEe|VPte>>0f z<5)C#s+wP!8I!LFyT2$d#1+_FW07wbVu)x*AFh5YT%B7KsyYa_#?e=ftmV7#nKxnN zF8Pne13AqfY)nYpOG7JiNjkPI9mJ#N6h+edY!GzQ){2&^UD4rX;k}}o41T#e+2!j* z$UCo#hQe?HZxl@S=7e&!nB)zWcu2G^!nBFwcfjO4i$11?iD9zYb)MsJXLLV18zd1Pl&- z8(DowB2Qhdz*K9*`ah9W`OXC}@n%UQ$G1z0g&A2I`*%9VxyQ#9xXjp4ZWTu#WUko7 zz08RZtPFirAi3N~8)3iT4=z8)?@Zb^se!}p7h~W(GJ&c@FI8=^TsJNxsl6R_-8d$B zMybq}`eoXwH93^dUzo+6O22ag1ry1?ALev5pIZ z&sL5_P8liYa4X}n&MYEluC1u;893*J;RM$OoF)1S>Z=4QG1}WcS6wHX&tSSRl*>PR zxcL?!3DxKUTT)b%Oy7UOzT1YI73_#$B7E{{V->-$&Fh#$9F8J`d1>i4d6OEzs`?aG zYOyH|Kp&-?XIdkD!2Z`hU-OXF0FtS~eoednCW_l>19M#G<}K%HJAn;^&dmdm>wy1UBMNiNE=FB%zb~#FM*- z|7>|re=mc*gv)s5q$WmW^angt$45qW=`6V6z19>#q)JtX9_xK(VcJJ%_xb{)KDfe- zXd~dJr%*f8ER5;_MF96{>8CZ#yK;aa4uwxvF$Xw4!2=_}OkIw`7kq%hV~Nnk7ys*ZFY65DD#&a>#`ATLn6B93@geW`0bMh!%FNqcj#FWc(?u+-d z?|84|NRW)(Yj0 zn=xOARYe4YpOh<02k3Q+=tXlU89oOJS)NQa7Qblek zfKs7W1X&^Fx82qEIiq|M@eIlQp-d2Ugjf|@hr}OEP^Iy9Ii5?edClzFbZhFHVNNs} zik^;=;%7dl7V(lg)s1&?2eAi&c!5W;d2`Hzmx;FX;Jtn_W`$S_I6vEr)=B=4c)&&w zgDTuy%O;5uwxVK{ul=WWd&&nNR;z4Psmx4rsfkCp`;Ltd(AT?j>hSPECC@?V?d<9? zJ*=<@;k;eCT!pX!1I|bgvslh*@HEE7Z8EfF=qMPX6->GOPYh(-3M#%jh=K@t&d|TJ zl7Rs%oBsH*!x?^mtsWEz*@K|X3*hJMSKF}oAIWFEI%iVAxqhG0d$ECVJ1`*#$ti}$H4v_Rbt&e% zwTJ%(Z^CTl6Y*nA+1&}&zQ;}YUtXm_p_zx}CT93OwT2<#vQUxnkRI3M@UL3x<1$~5 z$mXiA4`-FXzcJj1pK|Vn9i`E|5MSgTRxqFN7$zRe;k!U{Chu!W00PCmTz+UIiR-k)jp2u?h^&e zc>a>=&Yi>=q3vw`$B>hM^jx|NX7*y$uH=a8MRzN~&4GLQUn+lJ2q3VV`#9&sp1?o# zwD`E3C3$^>x0(E0Y^SCFmP^#SJQ5)CDhh9mH&5Q$1Hg4Lb*P(n+|00XSH%lAdFLMU z*J@6aisGM+K!Po%3E;qrE;$|^b&dNvB{XF%-M}LJz;e}wRvXmNMDIHF7MKAhwZmC1 z?pZgD{Z5Jq7STS3KF@jEf*2kxr1I|X5CM6RJG*q+DzJJ~GHVRtw5u7}E@a4^1$rRUCB1R!roJ&1h4>J=yC3g`U%$I`*ir zy^v-z>7Q4~Gg0w|_X(}h?1b7te2M!5-n%cM=v9C`CIMs6NVc*jGV%sOP;K%su1>278izqD z!XjJefKiB(TRV|NAsUnM0AnfO&!^<>xyXsS#8v=R3t~P5CKX={_7v%Vnc8e}eC6ax z1%$jBT5>mLX4abmM?ehz+KM%o7GZ&Q0_DW=QL#tz%&lLcAVZGpZ_Y2rBUU>aA*Z5m zJ}P944rv=L9ZU~ewDnsK(o7ONqVODiY660eJ`1quvgofrtegHMphI~Ia^rqevBLZUuDqR=U=@9QCV3DR4nZFh-<&|-mN-P5 zDiL=e2vsH8A;n`SN4ct~aXZCqf>3BL7TANK>d0AX;&6PyS_kd%!*dGeft^RCI_$SB%80!?vs0(L}_TaTa|z zqE**{Lb`tA!x^wPi3Ra$4DBtnf|& zmyZE_j;fx#Of-|;YyXhhw@|WA-|ESz?(&$&ScdC&V?|D#7e%n!Qnl`S3rX0K4Up$f6Q0sv?;>22SBKEsavv?dO+ z6q?3dFk=rR+tL;wdx+{OEo}&DagU#N=KAm@cK8?|Jnqk(u@MyzqO(foe>yJ7SXVq| z;NF(zmS|XG0t!{1SGi_D993LpGAEML`x!<$)g)UX+w*#7v_IO40NG4UMfR_+${@nE z7-UEW%X4WxaLNU0be|9M>GZpYA=HY5wlReaA`N!tNHvl(By9l-5lh9%Bk9bBj~XSl z13IGbo}Lryc;#n0>sCgCh@v2=MlY^hR^z5_`k`^lTySvxp}WBY z@Dcf}Q#$%$AZs>842JEoJTO5y>AZ-i7&MZG7!&paP_QfKLXZrFy)!7rQeX z_+od5sf?DTca^{r`(-u}twmQH$S=K$!u6!ybL zk6B4&+50vOKw1Jg;b>Y%;j54Z2Z(*9tn%YYUuk-423kD$J?3w?Gaz{EIrQ8wj3*7wrKzv%>D4D+bg!D(? zA2iYZHpw@UqYPEtL!E9#@S>qf3LyFl}S$ULDrL=gkFeYF(3Y-{T~#l+lE)un^*R97AK z5_a?a3Dw7pyGM9%zD7Y8ec#*|7m7avhwuE8)qyZcw;ipC`$N|VN5i^(!ixBLJ$9(x_AqHn;Kmj9+w-qEuZnno&pKQESvhm9 znH=_Qb)=NdYTC%w;mqNb_Z(Io#pVdzUr3OwIq_5%;su$?qHXp`>9T99p@}#?u)e=? zF)+VQlfpc6FvfAgzElFO=w-Dh+;OBdfEtC1h0^wl!L*H@IWO~t z11j{FB_BC6S?+JJUO^6TRXvU(=TW>17Q@~kp%f8sE^YYihos}~Zw~u_f)b3#8Mv*TBTl6^89x*W+$m3;)81SAS6m7&7NVHqPZy-oOk3J9#b84A_LY zIxxp_*#I6z0ka4V^5{i&c2ZU~)nlQ_b$N9zSru_GFl1xEE&u=k0003&n!Xr6U_$u) z?Jej@8BYL4K)Juqw;dy9zfwNuBs0GX!vfwJ{#^)0GUY&<@43^p&3u6E zy|qc6Pel#9X_W-wGAm|Z1PU%wB6aQ<<{uG{*SzqyuWi)e!}JCvS&BfBBy&EM?&hL~ zTh9(`@p)tjGYq{);|&e91mI=6<4#4kX!}6GkHdj%WP+c)<7+-NEHLZQ2wt(h+$oH&vkFpCVGSofr$#`^2VIQ3+p;lLI5y=P^dOsN+i67_O7}{v?bW zB2{(8qH!Q}165G-EekEZU6@Yve|!|SkBzvEA5u*Sft25&2UtQZJW5eicczD=YdSdO zJ=oMzz(7{7b7LNOQh(8Obhl2g$zMLOg{HN*2GK*8RB`tLGZjUKak!|7*~LHDGl=%o zF0+p9JraxL>AW%qRuv%=4hb+cCHACR$F|2*aP?!lSyHk%(N+7{`u#xS?5;^m;z;Vg zCJly{$mRr{QjF;Vv*A#krM1hYFw(RjKH`IH{(eF$$+8r4s{;|9Ebm;ly#EWJw`%w{ z$!e18Zq~7Yb7<^aiF8} zrgXbNXExRss%lZBF`XZEe0zt*$+AP!6-u4q-Gd#jZ6Sgcna-reXc;ir?*t8*la`b> zCsi=OCT6Xm$PRrc95vKf6@=QV-$SYjDHl!c9-UhVZ4S8#6OF&;8dJqd$a_11D^oC* zdKcU{zPPZ={?cQ%`g%*}@FvPY9<;AG7j+|>tiEb=cWa}$45sRo1oqj+`3SZVgA%E@ z@A=&#_G`CbKBU|GEnxspixnvkUE;bmZ;D*sTSYf}>_yLf;HysExGcEhl-Z|nf&$5# zJ_@^67Wgn1PI`^W?gdd?5-bx)!QO0eDn^Si2l*>~8~wd>x)V=b1Z9P?kwy={nT^8H z=iRZ#Ug=;|wB9Kau)9Cw85%Lhp5p5HLFBP7DIv8E2u0w=4gc^x+ndJ1k8mvkT(T6g zr5{VFI=pADvvh$Gro;alK0TrDRug`t_gbO%HfM=ICc6DjjD z9^;J4-QP~2Kb$#V8N&2xOZ~^9j+BqWy_U0-P`(oEZKD(Rg`UNm*d#SXeh zf%5$o++1Z-+C+X%^1sJpuS2^Od!P3&lsEK)O$-@kseOfOs&o?j}mTR%3*V(#Xcx-gvDE21Uv{&|ClupD*l1hSwC(%Vs5j zj`chXX24O`c-#mW?uMjJ(on-#P%n7t4dr-n2+SW#;(>_L`xJ_iXKhbOLMUqpf!6N_ zY*S9ie`pevm?*`Z-3wGET^1XNfVf?K7_N1IG&d&t(J`CFjn zZn_$@_Vin4f;DhFmtPTm26bYm3!u#xuxbzakS_!LffqI*EN+tr1moN;pizCK z=jeR;-CR0A32|3{w#3d_PCF9-?dqMqew9QIaFP8%29}BjH9VvS(-&T5)y`Ad247Ei z=&m;61&6_t=Yuhj_EY%7yJ#}5-b)xyW+aVbQZ(N6(YTHV*NA;{*WZFweOmjB&U&9z zewq1V)Pjp}xO2nSFC{1Osz2W-OG*vUr^Ujm28?y;CHi`D$RvxI~lSiWN8su_kigiLRb2eR^)d*^OrxRZ$nPeen(B2)M z${mM+JOQ}ak$pXm2fHbV=TskS#sMAlKv+g87S&u`2&AQvpWXlJ1*TA!f)?xh`{@qV z>wZ3>Ws1_VpOnI6yt65BVI6Kbt2*PVQ;BC-^p(znI7EWI`=?v@+2P3N($pTtbVjbVQTMVW)1p7ZUQ#e2S+`oVBt z6Zd~$Am)6hx#;jb3xu#06G9Ok&J+`zjs8gU}IP7mUouM?A`lQ|+frs7S zyFz=boMl$FFM*>ukK-xt6)}D&8*^w>sZM&;hw5^PbtMQN;v8pU+eVWb6~TLNBqGdJ zAEwsv4~?!%F*i9Z+8)!oUS^Re?kIrCt$47&^P?QSCr8L!jeK^#$DePnXs(J+^U!0~ z&(k;HVbAHl(m$LY1a_aP-qHX6X4!RCEkQtj%5VFS2*ww})I!E4I_dZqG;*+aikoB%Ud)87V zDq>ne_UOaUq9|<8*bsZa0<^;4PRo*R8}A4iCbOC%S`#T|6u$~uKW*&H+s1;oaWk;ob!9H)j{oh^0O zQvbW()Jp@fiL{BxIdVJLsxL#WaSOy`UaBz<|2zRjrlXJD_Ci|A*}%ti$C_9d|6W;L z`C>0(#3JwxoN^stHb|7jC(D5Pv1}ei%H;nBHZ&2@#e9x;U7qn9e_cyfcVhIFvx#7{ zL)lbDgxLG?e+?wz;D;BCSx<@sqFPQXv%y4N9RXQp5M|tuJ-)8@9+B@yf4J;e4Mea? zswcdbo7m7@f}Cdk%=w^hW2)Jx7nRO-Ek|$diT_~%vk7o4BqZy(J+7z)ESvAfsB_C305R1!Aj?;pT=m;@wX zvqShC0HX_gs=qNdQ+;BUk{H$LOs??xT+!s!IR#qjOqYa8wd~LKE-xB>y^cv3ArTsz zn&r}t_5yz+aG#=Fn2IJz7*xH|tQ&FsTyjk!;?DoWVL=-s89+JxOA}eK0Ikd86)y;D zS-k%aEzOq8a9F`t+knkt*+M#FLUX$i#4Mc&5*V+mHK-48bJ>ZJU0Deb+^~`CLlo1Y zJWU;7Jw`+_Ke+|iA`rkZT$nf2RAsleL!t?!n z^n-G*Wu!pnV)9_^$}7fE5aE6&pWv-;u#5;5EQcrFNEPtb^I{MI*W-ZKLgYSb^2_EC z=50LWcIc4A0BnJa1g#E6mhQi2mKK_?1gGM9tV`HM7s<2hg|H7?A*BEwpGP25#Bm4U za21>?AO$B@4&z%a|3Di<{G6r!H&=MIT+QhhXKQP2K3%}^C{g&EO7l|8I>1YpY(gHa zQ|W2W)=@BNPwnVcE82z4d}E@PH;(wmazM>m8o2J}v@k=+2;ol(XT~+NjH>On@~hH67^PG~_5HfIc+o zjj;$jVGsMouZo;bmn}X_P5*ojXfHr*XHTxiE{KvV-u^A*6b#mRR?XY94({!U6f(?u zJDM;GcXDLDRJ<6f&T4qixHapf73ihU#oktdM}%5&7{8B0qNkW&HpzIPTln0HK*b}a z<BL>&ls|~~vaAO;kca@0t zAo!kIFM-1}S%@6%edlrsA&3iO@V(n^8?lrqh(Eu=LYLmJ%HQm5ZQ{7EbzI!7oll_` zj;%`Vl5D3*l-=sWdy*@K{VL;&|(iz4yV>{7B?bfVyIfdW#GrO(f z&m)jt)-buaqP5C_`UCrSQrG*8YI5ba4=lFqa8?B7p#smQqZewRP-RNgEjwhy5Fx5r z@+)sL;HIc$(jDtUE=;+JSm#X)Rr+&^refd*p#mIm+!ZpY9LIQGiqHxLu2jll?QCtp z#R_&#oRzuf%m*DRm9cXh#O%s`;+&*Ly3?|S1m~zesV0IOq>=os$m;zer;jncSa2X& z1A-tpeyWYGO2Ea}-kY@omz^trS*j7tEn18){T`@e>gigPl-g!D1^RbwnWJuiagKhy zVrw}Ep;1I@bw$SHmxay2_o{rJPT2p~u{ahIcJ0KkLS`1(&1}KnQU1{{nyxATmarF9s7}YV9C;kii5HO{b^ZN9@unW6>?cgr*$t7 zP~t#tY3k)PZXZwqIs2Br3BymMPp`Q@#d(pmj1jBNevE`MGA1l-sw9?}A}YZQ=Vq@> z7Wj_q`QA`6yg4{0DbM12>OjUjuGh&gqv&nZk`+97#0E+mE)0?N65q0kO+9Lv;}I2e5Pt)vE@7);+LvEldKX>3i5I+QDvi! z--NDcX$&;~?Opn|UjG9>@VX0~@K;KdaBMdg%w(Wk7PXsx0S^_$9+ z#_IAL`N{7OO{_>ElH+cYD*_}~?m879r22VbE=9*~R$#qqujIOOhK?Q5#B9^;+E}?o z1Q~&V*ZY#I8{Q|BMNh;<7ej@(fhSCV?8ZfLJypWXiaz1$UPfLXFONH^8A4aaGWR;d z{nO=u^0@HhLwFS>yrqvDgtRX4()-Qx=-k@1W3GMm&fQt`#2|!1Nxlf>=17-&YlG5N zn-XRM@d?;Y?0LZdDes-~e7>>mrpRMnHgZS19BdH}5iy|qa1$n@t4|&m7II-Gf@Zd& zysQHu)=RRqcr7QB?rFv}UKr$9QR^}Z8-cU%{G_f@b0yDRK=r_I#{lX2Sl4R~H%5Su zY@k@S$@c{F7}J8inX7aK@EHv29M#N$@iNN^`vYk)W#q;cA>AVW-`38lDNrgqnqF*k zWRkn}tEk=%3Yb$lb6ybML{4}kEkqVUX4OE;9t9$BzP29R5U=hiE(UL+k6zpmMg87K zxvKPwyx#~g;opbSA)}@7=y?qFHc269Fk+qh1g7xA^J2dtMDc57hiY-x5J2Z6iVe$! z$--m^n;`Uy71w z#1+*;fuyAOs}wi#RsW}>T$$yLQk^2+6sG7i1ug2`D6>Y4G!xWCz-8fx^0Q?R#e}Ah zwu$rCBVV-159AY)I!U%CMc)R`?_{{cY8RD)qsEU7;eBn=i6++aw$8z?BxWaW9E9u0d9x1pp_wk^_09KJ1#%$N@;-Ck|> z&>W~#2`eVpH@9(_K=>Kd458$V~Diw+oL=_1`frUde zs9Pnl7hpi6nkP~>X@7LQJ{8U9BEU_=!DdeWTXXDjkUG;q;01B?@4jP~v16Ueo1E(3 z=54)U9$Bp6+I3^!S{9Y3!aemu?79B$N($>{_HXKVl}6^Nc16k$0Dwa6f+Adb4z!P> zV`XYxB@g$ZZu?zuP_n-rIgDX7O-AstY2(XCP1~o59of^!^(2W;pDhu;PIVT_3|N*X^( z{M?=@eX>_&rzs7K<;0%s@6Z95*Mn~KB=)vNO}scL>U6C95@^t6pY#T?_r(SmOa(ZR z(~(ao(DkNo~=6NC#%Eaj>xUKz9P7qcK>$YtLbR){7G-l6*p}izR~_e5sOD? zTZ5AyDW%fm!hHGw&fXWwE2CRo5^2XqxDXZhj~1`xjTQ!GIYwHK_gike_ahjQ-5;t5 zI=tk6@ymPj)7d7RzM>{5=52zdR@}aVSwhG>3IrB-tW>$M;Obi7gz^p?FOa|nOn=I@ zjFPvsj0JmmXLsTYy>ioU!?&I{7e4;MjCLs{&1Zp`0MkZCXVv24@}-{Xdj z_`(?gU_$%G0f;ZVI7bno|46XwxOn^FO^BgY;`LaYpe8~nn=Qa49$0i=15;@b>+i4K z{9>XQGx6Ba^RI)%p^M%Vjm4MARDFw{m>ln5?~=S7f*8QrIrK;QX-`9RFNykWMr{q1iFyr@wyyMcHa~jq z#CeR6*)jEcQ2pSS;4W`(+2RLAqgUq5^=FU(5u$Do9%R%0hngKJ5cDi2JB-`vSD8QN zJZGEo*acy_6}f{C+)!+cZ%f5)BZP_4g}G`OzUS1(j_$y7OCXoellW<9q0PEQ_;7itKmbb!pBz`RzSL?aJK5;Hz;nP7rL{o)7pI@C zXV=(}vwKa?BmqFn*&VbL8tdrSGdmWl8|7whm-ate9L%IeSp_^KjF~PQtrz&4-@N_g zTPBgg5mnK9Ifk0WlcuFPS0v#ibZ38@Xc@&KrV>xMSWkza`1mWPZxEuG}rR>ZE z+ungt&hb^%tSqGoM{(|rUxCqVigLI~iNC}U`7X4l#RuD!p@vUO&CbK-G^F$RR!Cq; z^smz&YrFBRi4S-KaZJ?DhpCa`uJxp>D85u`rVb-`G350~wG7LKzr&jo6wPTTQd|mg zN<<|L;Z7qJY97M)&jPwpBPdJV)Jp)>WEkKx`hm^)tE0l!(Ey}vQ#a}brb6dSy*LA? zvHcz24J~=ifLXv=%SBJvHH?|T*NNi9#3U&&Pli83JZt+;CPUmK5Ni)}8E!Szo5L-M zs+Fz)>e4?}zZgk?J{oIbI;-K(krj9M6d^B?3E+=a^?`Dq9;RdzBMNedpht^3Px(Dg<7KfB8(B1_;{4g+^oxt_rsX+_h|CuIjC4R|tk9GD!8xH3#52T4K0xa6 zEx%~DZ6wWt(I{1wBs-41#Shw=S|ZDA`g~ zmh~MWSD9y-tC|+)cqTZQ257zj5`JJMqtGhcy~S0$mg|9`qIv*}Ygg`|r zAIdiD4K%{3bV0t-{(nI%NKTc{^&)9Je;p5VjNrZ=KwL52I&sV?N_xan zzIM<$wceFAgDhavhsZ>7pV;S}F_AZ2Rv6Mqil9%2nR_eWCUXI{xj)bx5qF70!a|&P zWQ81Sz+8o3CRTzsq3GlqWM&KbJN()rf@#FQ# zwk873XfX_~-l`IvJxBSlNg{Wo{6-*zUPU933-BR2GiZ8mwC6TZ$M*Lk8ojeuZwA}W zlSQ{1@JNsA{0VRww_eq=j*}AMKm=@!@}c4l$vl|EEgJIL&qzR#AT_hd=`PMK@QCS= zPYeIvIzJ84x(()otw%d`>HI{M9umn%b1+6$Mu9`Y^?9OT&_PC5NM=_d zjrSSlKC>%`69znW7Onwa33-<@H4 zSm+XKYCj%~{`H)vG#z%zD-R?%D1~;Gh-QUjP=eHk2~fymcdr}?vmue8iyCZeeLx5M zV0#3zHd^-m!2(m}{R`@Usz|!7j836dk?+La4aS$a92L2egOO5v2p9Nhh*e@E3pYlq zv84#B7Kd09G z++8dile1iTrc4Q1ZbKw`Ie&vW{uLtG&FAz`gq+1?5hhH@FR$1Ne=C?k1D^2%4Gz~= zhgTwjtd}B3NMRkFpnh_n$tUb7V;N&^(%=Aj7^Q&nxCn6!{*>)k`BLalSqYCnGiWj{A&ASa?gaQ(75!LlD@448MmG;s8HHtj*f=D&-LBm74{W$(mK1 zC+}?}&PGiaVW+%|m;e)4!h~ZGMhP%x#Bj4=@OlehLr?$HobPHb0LCw#G8GMfBpxD@ zSx1a^=X}}?yzNB%lW1pmKLNgtv>_gjzAKL@rnA@uW@{f3w?C)gmQlnTQ)`apH2f`E zhQS4XcgAN&V2S+8J9s{M15YJf{(38cp(?vKfbo|y^@Ctyu__0OGvv~B#bY6M|3{R% zlA(~?Q9%^LW<)di*AnsJFmnyAJP7B<;vp)W!|NGcNBrG@P-*G4oO0!@RJO+UJ!1Xu zG+@H(OT)v8kt6u258&l-?^Z-p{|BZu7>l$;q+dYWi!(8D#KHL1pUthAr6}>MSURhD@TsQyN_KB=Oh}>7Fn55-AQ|b=Hm=Rkm0p&jRBIR zM12f3wNck*m)wts$1uXpEuTwn&t{||%NAA;`>6Ruk4$(_gCNzW=*2N|L)U`fnE$7k zq)PCVh1gu5LubCU23sQ9ptj6^qjXQV$VhHc=xN2=hb0jSosu)KA|EIEbJ8qX^>sY75DuF2axhWQ|QUHH#{Wcff-Ix+Akl7 z6Gc^x&zH?;Q@54m>N2vR^iPj%EqqUOhFxj0H9i^e@fmu(+ot;beP(tJqA$8~``2dd zHD!;}x*TZKy4RmiV0M~Jg<0?nA_pZ@zsl_wIiMplTg(eM2QMx# zdG1L2w#0(bXQ6J-(=Q@`uLoGSuh6N$LqUVDV`Qo|_B@aTkoCp?x!yp+XmQN-;eBUD zKv<`Mp|YBy!Elbg2B%jhnK{<#Uj~&9B$F}5Er#KrowF4FvQS4>R1|KLVAzDRdVdn- z+U)GUjpG%K>p(?7$Z zk#*IVtrYS#6J{3TRyh98QtvRQWLEYj7c!DdHL^JsvdiFZ@3U#}2|5g`al3hxg@z z@|L@q8y!zDSE8hSujoqzrj-9C6IzXR3Q5&}TQgBcOoCOEAOb8o0TRGk0|OYY$>G5U z!T=_Hst&#xM&D~@$nHb=Hi|o1&e1reg*iiRLYRtfZ9k!BU68y!*tzpyDt1l+)fi7Y zh;|aza7>GO?*CtLky`=RjElv;9a`j^<>VNZOu4sd$}FZMC*Nq zoaM1w2a_|m!*^#6dI0vH4XhyJQTjC%CQ*q%sTM$C|NE?`l4P*o53Eu@LKj@S2{UCk zBlVje-%Iv}`hvQ*U9_zKU6HJt-%6=c~#CNK~OERiWsGP_|fW?HFBA|6pmjjk)r zoqfPo@r~I^3&i|p(eZ5S276%6h0P2(A*PxlU`-j3i>7aOCsXiG&FuxCuxY@f!v}+{ z4TkDZfVq*_FpYbc*>^3+>U2O%yOKQy1vUituI=vLt!66^t%j3|qcRsMA@@*S`4&F> zx0KZu%uI`;;(>YO+QA!327jiWc2^c*{@q@2uu-J02`>3*Xm2@=#;cE9htFvS5T#6_ z0`cC%fU!?`)XtMuqck=-63ihXUEDdg$7|}l`btnznD7PvA6hLVni03YCDXl5kC9+d5?bBCxz$6P%6q#UWQUde9 z9F?chCb2l*`lC>pHUcH>Qu@kj_V@5=GpCm`m24LHaz7f!vfS*NhoK`eMnFo=f1&xZ zUBtEX6~_3h=ob>Kz4y*w85Jr43ZsfVSkm1+Ke#4f?M72 z`vf9NVTN)#{JaXcWCViCyW1j{5uoftS$ZUyxNXeBNcrYWm`)@aAoD|aN88Fcp;AZx zXa;s1N>GFX@0$SW_JI6DhEh$(TJpKmzx~oGkV`AX|8baou1l`}KJT}`b!E+#T>pUZ zz|cOEOPy_2>SjWviJ%C5z6s0*0>XNOGN= z09RTJC*7pU(lMtkv`qJU6j$}60WHq1h4jLI=;*$zFO0Zm54(DvcL+uy)eBb$wBivE zS26>B!WY2oPbX0f97@}k8;!f0RbvFqrp6d21;YbrAn2+p@;U}(CS;NxZL z>8#=Jvj_Rlm^DcG1PNqx69%3M+?t^ZnET^j{(`+Y+_lAk&{+!kusOCn`Ld4Oy?2bf zo3{;N$#0?7grdW+LN|+f30E7|8drS4EQQfpdkY(mv(99rsQFX&!gS4D72^|UATYH} z6A{i_z&kO1260}@7rRhPvDcpt4>-UbvPV1g-q2$CxWVjA{zXw<{Ylrh#+BAZQiCGg zt}6YQy|~C?IvBL`B@?=i(40uksE5*YOt6e3cr;^XxYM9I8h`a*s{+V3B^11Q(n-vo z2et%Q!`)u`$Cw6ADz2>U6tDSzSmsyQ82^dJ_p-T%2WMUUYq(q=i2osK3VGD>01ERw zFfe3eJ1zhK00001L7LwfEB{|&o;pb6+uUJJpKD{b`HXq4xZXwx{R;4 zlu=&)x(T6!kfo-d)^ICAU~8B%cDJr|kieD9;AsyR{8cy{@T?!5U%pCg9n;#k#gG|ns&fApT04%@#fAwS<&Wq#Lu{2$z#<1 zhWHLiCsDInF->1&fMfU69DV0#!8f4AIiPO&8K*DmC`FV1CjdD^X!K$Ki#LS6o@=cG z(e-(y#N!hYDe$)n`etP;hGbHqaC5%vJI5X%Ms_(bu+ZQGMZ0=E=zq=)o4)C#Yu&bv_F6S%z1`l8jtsM@8rjvOkBp>7<2;GS^hFI9{sO7o@~%1onAUC8MSjI8!a4Q zrnTko<9bIJ7Pgp(^IjnPzr-XLQGzd>40l6{9PO<`|oM zBtk4m8#85wIftaYfFSo@*%zVVE0g(RhZlYR^06^F){LeFbXdnecO!f`g7?M3@&h)} zi(fbwAFex(gXCRe>w18N{Y!BHg)RyL_V(QF$jsvjhuSFZ-* zA3C#yu9Eux0_KK5M*%vx%BO#6_hR-N$(*|9!%R-dgw$4k6*wA9Jv({HnTETeOjR`a znCHtJx+mHLSmrTaF0g|ySVC^{Zr|-c&wHlp(@>?R!xh3QtMMVifpWFj|6E29hWf$H zn{N~Y%$0#HQ1~TUy@e#8RhEiJJLlbAZ)SY!xWRW?AGx*1xf5r(W#qXVZ#=_ARCHHn$sxS73*3C`*lbJ;NGv3M%;Q&DXXW0Zj*^)nlHFfd_VN3>lOMu`ZD(f*9E ziJyTL^(M4;3ZhseCkmOV<=3Qt;t7ko_D8JmA=&kE4ReZ$X+_h{9>c zUte*uwr3*}v0l@6uW?#aJO)U}`q52YErv6lSj};+d~kD3vOT)!%a7flb2*F7f=je&UUi z!XK%dR~XO{&J9D;QBp=chJ7`{{Mu~uCj>U=r(kztw3jsSSBu3VvL(IzZdLM`_9EJw z@Hz}$I>+n%)RSBAjI*@CzeiZ}aKdc~c<1%dYT*&j4;po8U5RFrgp>n^Fx=*6uRIyl zhfxGT)ofH{4tJBD(_J=JSYo$>D^2Qw&3(SV8JfmL*y(ej0-f8YByAv}Mb5g}g?`W`lmO)VCU|Mdiho#ID#Rm>yuhBn$X$-+!k;?8-?SSo2w2{3=@y zn`UiD+@Z7)0WFVyz8u&FKO^<<|9eM&hCBKA%<@u|kvjga6iK|N&vnaG>vBq_WMVF{ z58Yc7-Rs4okc4=bZN7lSsyEy0M=A~AlLND%H1#rZ< z7lPTp;g1}cz(rI=JJC}Surcd{C8=Z;vb1MPefg(mXkzKPi3geRB!34Ms{Qb5sxYCi z;P5`G4s(v&utT!k#uHr`>o-I3PICYvf19Lri8M}uYkFBYH)g*}`%P+*2P0r&njmi??LFuIujae?VJ?R| zCtO5-%DElO+JA;+LH_TjAD_V0P#s`9Z$eO_D`00wmtdc){k6i5OCn&Zp_=? zBF7+|mR=W{o<%^3E;b1l78GyYH!L;#;}BJF$|q1EHEZ0XySZ(}=-9bNlgt{$4@=$D zgu#CG&0)jhO;epJH^N7Jf4m%PVdzJt10rl|U3>Cdp7SxI=T+hrWv}{9$zSf7=>u;# zjhyrSvXLi%X))XW_hFpC%V8b^4`0SmnJW_oow;YOaet3A3S(T!%lQ)V=gxymDgra< zAeH4bDL1*Dz(~lD$r`V4ySOJt<88uN7 zI<3RbWv#e(Lg$*YCzBP#buhCCqQ7<_XxX!O29*7r6J{T}q{1|RPbn+}wC7A_JTD}Q zl6q7IF2zp z?qR#L*%ZM?u%j*B;$jQ(n6R{xXWI2QvCKuQELJ{w%?S%QrGqCT5|8t26LkKM;-o9P z$&o8L^}{-(!wO&Xb@Kje8}{)0Q3gAy#MKD9@W})?u^p1;vBix^ngL%|oCxG^m6V>T ztS)L6iGY(;-340w2lIr4nV~l~|BU|L#`59(Fj}g3ht(L$OgM}^r#4*3=Vg&nfdk~FO~01ntiv8=yy*4vRCG^Wi; z+Brj#ahVT0z5mgrKKMXYB-OswSsMMQgg#f~;vfIcY#DcQxbY z$QqaI<o!Ahmmw5gK3PSTcx=E^R7l z;kxeXsH8unEzX%`$ZqS)Lx@fPg6vKttR?c*z2LT)*{q89`agO~<33N^2GdwP6!1l> zR)Vs05trrh4Th~kH?sAFq6@gbGaxhaxpTZ{M{bX5ZzqbOFot0syan7etd2{- zpOBLZyFe>;ij-IA`fXv%UlwBri z%^)*#=lCqaek(M!hYc)U_{SRCHD;aHJd$))Kmlxa(4)9;BDh{K1oV$7+{=*S-b^zptQ}UqzL=Q66);&HgG>jULwGk5 zs2jpCZ(@7}%rI>uc?5aH)!tNX$`=TQmmgxr@Yx9O_GI5>93PlxpcMlNoQT^+!Oh@> zfDRjn8EsM7eW-km!zB3kw?Xvxb$e*f3Fg0?UT*>UK6$6V>lkD?cPnoeOzYr$jjDl&w0yu7^`%gQpr(&$VSc(8j=9otM%mvpb6 z{@XdJ$`+tWE64;58Vs9JKZHt}(A_7$5BGe!< zh2&z{$4}vsT-0g#O}7F5MlaZ?#iQjEN{IEj*>?VqEl4v`r zZWH15w-Dw9D`by4#S?F+!Q2>3nw8Qh-%#o*hXHf5H%pb=H5q_4*5m{M2=90Pm*{$W zAE~j3vz2-24_0>wN+S8qshuy&7zuxMVehX}+zKJI0|NAjd+dLc)OCCzP0lVQ@I z?M*@c>a!cQZd=k4kNVKUW;ZJ9<^wAMq9I#r0h1Ls2Qy@Dizdo}Wz!WJzPG=$Gj7;? z?{95~TZD>QlgId9CW9ipWuuG`~jIq{BEvSWh z{DGO&)d@kR_zKrc+6|%z-DWljrc8OF*~{U@Uq?Sw!oMbZNr4OQzz(7tnILD8rBXo z!0-jXv7OCRd_~#6)^~-Rh14}V{-fmgI^RN^c!8JVJA`9|DdOdPWVY%P z^KR=qU)8Z_MQ06&&2kTjmw)iRIjUdK+XMH#7kyf0_Uww*JmsSBwC%2|@r4UCL)YTQ zh6fJ^pw6IP)*~xc@6#W(=Ki0&0klND=6*|N^d86zzmIQ;LkyXTq$u1Fi8<**K$4yx zTz0Vy+Jxq3-x8FKAY@6A0#i(gc_$Q z`c5ypCZC0X=P_<(Fajv(CjWov0FI;QFAB2iJV|-k8(Uh9e;wz#VO;gz`8-J}EC!+r z#T^_8bXU|z(sLSnTX&zWF+$X*d{2Mjo`zQ;NP${8YXCt&zQ1dyZ_W9=c8X;MOLV$* zrA=saC;@9u8drv^ZboigvDPU2w@V6|Bg3O-`{AwQ(e#Ddx+9KgK!pI62R3v zYSwez8)%qd z?VS(hmfl$0zVD#=6W==N(?dhFR(SA^AOCAA@x>4zHBW}UlL+I&B&eSTHv|)Dq#scj zrEkgq)XM>LB#v|jX`z;Q^SHg5u6Q7%#I*e@b<9Pe!A1Uv(l6gVp#3O1+7iXggKP~e zH7GpG_Kzo%TYQh1WwcJ>;KWk!6?8o9D0PDUxbz>D`O@HZsi#{y4me((f&0tLD#5QX zB>}X;O`e-YEzk87Rn_WjPzwKaG97&s<2P&vqp}7$>Ebkot)LNzH+Zj=A+(ZaOhx9p zQH|PI)%)5}ggzKM)bJ8zir%xNK_ur1S!&y;nZ7T1v^;r}NTY4%NZNDvhHE z5wa7%m@g5vIn^SU0NiQ@E4^`SNp0*G@IGoa)A^MH{?eBA5@*a^`A93CFY>>yrA`!f zv)ms@$Q>XQ9=-}E~DP0fYNgF&;r6) zFa#G7r+I7Ei{qH5rXIrXz|EZHV3F33APu9wED$P#%6)WD?wZEJ`!e3IcM)AdxC#%X zrDj&QD#v1bGV}LajhAV0$@~I1$A3 z`=qpQbmofWVYVov0F1wT%AIXcwdTkJu8tRV5_p2o2vfSLQH|4qm0G z525SSJBIHP@%*s#wF(u(4A?Z+@$T>Mk!fgDt!bm#yO?#3QR8FguF~~ha@k$*udzL~ z<>34<`%ZXwujwQjG_=i)JTaWQH+GuKIchjb1*rnARyrfcnC~rCYNWv5^K5MaBaqY; z{AoU>QR=}t1B!0K=^XOZnKA`*8aStXL2>IWO)1h5Te~kQy5lEAR-P?aE$a+uN1+u) zW#7Y)>g*9yrNKjTls{a&<$Bhcp->;a5x&)}uC0q`v2T=ekxVriT~gz8O!uAc2anLI z$Bjxqfb>MK=^G=n?2iQ-hvBI#&ieBJYyv19Qd*bx7qBiks?(aG(f1hN>O%r*J+J&=(aZRK+%$3?PH4IJv{ld|tPF-u*!q5#zhinL|Uo(((IEs1@Vy5t5Riemwx~oU}9j9#i&` za|-vl&LE;!gwJluIhRe%v<($F8;}=`rDrXPHp32dCafBOH+&J2K0Yr^r$bp1zZ;6U zJ>Cs7mualpw3okXr@2!!dc+w-P@*Wxzlp+Jl#d6hZsTO6zHo(J;B9ifsR2b;9uoAP zDutS22?e0ngRFOYWpuWsBJfpf81BBO_6usOz2?4MM{ptNa*BNYNALAMwCb?hd1Jll z?TkG(M<_s{!;b_szb14dHyUMaA6I`#&{Oiq2KBfzQPm!^>w-Gf);%vW0fr_y04#Uu z{(d|d2JThub@g3WM^d-7E=uo9q)3F0FLwb$C~yQ5Q!{w8UwUm?1T#|AAMqJD9*zs% ziMZiYc!d+#cNsy@x392rAUUd#hx{HHi?QaY^4Y&Fis`cw+PosIfct=Qv1!hSI)cT! zqv@X45n{`D252nns3?0q~_|NvQl_KnA!MCHs+PaE1Q(r`>uUkc531HW1AQ; zAIEhmqc;y+){@6q;LvDkcH=s3U5AWc)dwN5rL1D8MLw8yCVU%zZ9#=$5^bSHZfYsw>G*I(Rab5}^78L)m zDES402eX$KK#beGl+rB zr)P(s43fSO&VgH!@xz-FwzX1v)g9Ej*YJDKFui+3{EK-e>;-`#E$X2IlyM<3Wae^7 zJDts{z&8yNGxYt>CvD`19S=owYsB7FGD=@c$g~`MX*AwR(IZDPKi{bXp{@{Q_l2iE z+c-&=ANpZf%WhtZ!>9>9OdI3sd#IwY@YX1u)e;C+B&9A^{zj3tL7)CmNd4ZACf%68 zWj${~i(WLEINwT@U2Rgk)izi|r;GX5yWg`|I^eZnbdVoDXdZekw+SQ(4D6ACdtU0D z?fm=!K(}yYwS5Cex4)E4V|qqeyJG3M*hwlttkkA!K{Pv$7~s_Z25g(8htlN^S0AV6~ zDP$3fV+}x0w_Xq{Nb#y39J>C@N*BBOqn51HOMFcvFdfXYH+=nOs|r<=`%)g{0)pH? zbF_n7L%SG%ETMyV+_iB+@a&`aGVLKz%kONzQb=!3%KPghwk@T9Fp78;sOWL1jxh!p zDdcTD6n&uLN9RH7JqVTDoieE;*y zp}NqNsPXJUFpLIEf4}-S&O3L=W73k)!eXzj^2-g-KrV{8fHc`fm0|RX=>L9^FTD6@ zAFf_@-B4UB&k+3UMWLw!=*R5v%Y!F7fJ3DS46{deu?v78GWQzEl;;B11$SEP!w(x` zJ3@bi_`CBd(0`K@{h8WIZfo2124{!?g9IL>2y2y%QofAZ0p5SwX6emZ@FoP0;Q(IP zJ_$qds1<#F_Js}{fm$mVezX<_ywxgq)kF}HWgJPwXL8Nc=r5o>%Jp2bQ~=QEZH+cl z>SI)z2vg364?SslZu_8N{Kc_`wCql$Nn|h)4x>p73pQ3<2eAM~q?m5b+fj*l^UgGpo>t33|4Gl6 z=Vh)W4HR@;s`PiTXP{A2JN6)>qHLf!!;odq{d{M z;Vu`3J>QJGmcWtdE9mnehg`?mig@lN8xtJ33eLkii^xFV^V9d2EO&R;P zphw*xFEj<^nU#KEVE-jn|7K=Gnmas4qO=caURBVY@kQsG5OwGQ?c`7jht$7g-Vy1BT#^*Q^^;&PUb9Z9jT00BecpJ#Ja?{o!1HeE{9y~o?JmF=lrHw}V<;#%|&_h4e{}(BQ{^sr^q4jdI z+{e)FtAYVf?$*iUj@w$+A@eT!z7RPls<7FN{Hi127h16gs25^{Mwz8QS)mfEO}_8L z0cFY~3bjG)iDy1qbzc=%2?KX&{}mN8aOX3q>C+<{8Rj*vXO)GaBpdSBISBnQ%VYB= zlO}}w4?7evfMmnK&a^qRaf$idT6gf+vE|M{MtF1vyt;`z*T_70bv%gcV3~cdf3Vqa zlkKc~O!$2*9z`Y~_m6~LIbN@_L$(KuW`$QfBW@lzNo)iN{Eyi|ILN}rl?ELKJ^Bnj zc)3j#X2>sWC#WC~AiP&jwBPLEl)Rvltid>F^tE0x!^?`A*u3s>s8Lf)^@FIE7nu{0 zR%i$t6}SVTqsm3L#gY1rLpMzSUr&RM`R*HosYNN&#PatCRC%{u<8ZvZ5?~2W!w*^1 zS-gz|X3}OiIqH+;b0~7VR&TN9hNa%DVdc{G-}yPOTfBjrh9>&anxJZp1MxVLjHmAd)_paON`G4` ze>E?|M=!Zj4a1N6M?K7&1UI2nd99dK%3qop2AI{|Du_&}Dw#Wws}7GZSVLpbzt`?Y zbXr_#jd;rCtm;7WmyU*6k41#ojW?3sqHT(YhC`GF1ljouB+|nX;kNxcNe@S#I!BGa zR!D$dlO4|nQF*&q`T_HI0kopV%Rc#>5&F-K`uk82ZVC^eahMYx|Y+r@J0zkaj z|Kkz$+=t;zzf|0!ruhCFe<+<_;aqa(bxB|_62Cf!DC8t*sh-56eW8f%y9QfutN`_1 zOQ3IZUgp_Ep+9x3$ZwfgzdT$rCTjgJ>l3*=SpeHjbdLgu)Y33cY7(UARWNk3_Jd8S zQ;@?)^#?1g^q!OGbPDboCa<;a#7xy97WVVrP{0jBvAh(lGgkv0DG>qDvnJMXx4pZ# z*0*UbkGmP~IrXAUiU(hjQop@b65%4a%dMBnwPd?ds@qEOLt&vpYn|{czS2`YkQDx^ zjYSls-D`na*B4QP)Z;f8;&RZ*`m`%I3^AeO=7n0bj!y&*ky_upw_h8U8w(G|=@L4m z1$&+H6xvmw5TL)C9}d2h`EAQn4T+3~O$N_?rw%ql;F4ec(Xy8VG`ah zEjVj>Qy{y7xq-=aw8^F}z>fTFyZ8pw#9@@OkibQDYYLV?VOE4*KY|{LRu^x4ClKQXl%s=WQjSN(@UPbV^K%|6Hr^R z0WgNl3zx|ZeZPQ}+&1(6B{8ReUq-J=JdIuBM-?6G$V!6fgun{FgxSFrl*NG=sW(k9KOM)b{Hj$zI%y70 z7bj@VrNa$*!g5U1DoO#xvPv*DM37fploIO)GG6lCf=RKg*5Bl^m|R*|rx}j8;<{kM z<_|@j$-Y0&DPe(sL-9j}upgXgT?+W--o}_n8iM5^)E->Inc|u+9?oPO?2Twte5u@m zxAtXS+sWP-!AEq^T`SE*^v4n)lEFW;GTKFAA|0oA2lgL#jh74UpOF<}mHrPG?%=`l z1{y@nf769Ex?d@$n2(8-00_uNZH|Ue>xQ#?mPCFfT1?X}C~?nv3N`TcvaZKJf!)vG z=Bsa#CAd@S!)tGtv93yD7!BzZK0#JLxR}>hez10g%35h!pm^pX+EY-tEnyX{^%H|b z&{Ru&fH4+y!lM^@v`%9t5u~E7^c8U2xP^wmwGYW;yg~QWSpHq~<7Ors!L!D5Bj#0S zt?dAnBC`G~XBTBnI|up@2TFt};}jpouL71+*YYfo5BluaOJ9SDl8-ga&itV;ryM-3 z6|eQ$c@X69hg!BQ9~=p6-CUQ1Z}6bRg{{#6WYa)otMNC!^H3|9Kh1jrNljV7IM6v9 z!$!_z6uKbs6%N6(^YMyZ=3X>B4=$=Hi9?NZ=LHGX-V^WJustbe+MVAaz^4hBHpp@| zAr0ojM~$zp0bQB*Bla*Am?X5*1i53^x-~;_>?Z771gmOfcCWf2>Zt=f&q!=ldDSyeM%EydS8(W9gG|Pxt zR7v*rgTlq2spb}wJIZXPb?tbu&3DUl^U!Jw0N7r{hk_p6(0u1|DHaS6}15jFx4n=ICa}(Dg?_!cG0#Q=-ur%N!lq+dg=(2#RRL zMuEreg<@ql+wH;ZE2PO5aQB#L*&eSBXmz9K!tJhoKPU7o7KO2l|4MA5ZnD0WvV9@M z4K&Y_&f_zZ1F?H`oul0sUwVMurxO)egqmW=dtf(u0kivZfHZNQDpA-4PqC7R9f9Fm zqzhxZM~hC^@8CrFe_VxCyUcaYgu^`k6p}F(QvXZvwI~uMrnATUBW-WNI?uV(a#F*J zIr5VPY+M>(xZ)eKT*?6%1-I)`6;kz1%bHfj7on9&3kWL3)hn3(b)bAkW^@$Tb_t8J zNveyB0#C7K0V@GChmIhi+4P0K4^}?R7U@$Hp2V9P)U%AZp0Jbr2#SB>YZ0zw@Lp4h zq=KgpjL~aO!z*kxz-d00$$B!j^6;`|GiRsGNJ4~Q-O{p}ca(ks&HJ48OMNiA0Z|Xe zlL=~U%zlpI(%y;`e61yW5L5FKmMZT$&)C-#ga$va-qGZq<0*8&_*sDcF9QA>!kMl= zXP%~ub(QWA$xxmi-bBF--GpGQaLtoU_ENB3BFONmF@gWBkkrVuWEft&IlnfHq|4{; z&!_KnOsq|vijq{PY49A-aqaCZlg*es8g1U&t5B#do6k9GMiN_zl=E+~PmKx2BG@#~ z?*_7q{lLR>=6r(Qcb-#FD?C`<;`L8ZSJA_|1+xwBKx1Bz&vkE@NrR*%cLSXi*x0!g zB37KXs2a5~I>EWq3}!UICQp|3#*gtv8I(r%W>kt7X|9CAdL4No>Ln&#QL>3x)Luix z*N{oaOsU7%B_zU^0Ux=nJKe5}fQ;w|RBpbaOKqitg0+T}GBB!MuiXjX9WtOMb-DeB z#BdR5k7km|7440AZX<_hcZ@a&?IdQa^Sj9NqBUGpkK7>UcxGS)S75OlORI4%myDN1 z{}fz%UQ;0@Yp*~XAU^*A5>cQ*z6n9`$4DF|#4=IAxTgL!H&`k%r6a8WEG}J*a2+ED zl*GU*`_i38x&%cyDqo-6kG+i3A*V4IG|vNtion?OXTCr)GqEK#Y^M0WnBTC!@6WLs z>xEb+d@e|&&M4H9$I6Ff+oh~s#-4X1rb?iK@6$+uDV(?LiJF%TJ9lL~g&J>~ra+rn z`{EQd7OmBVoGldn$qjc`NnilNAuvJ^&OGS|K2Ufp+(a1>_fyjKkWPN;MGJ?Ba|u4j zm|y9&CiaE?MqXyU zq!q7m6xc(~8hk2nobDLEm{{f&t{sSCsN2`mSPZ;G6!`!H5`{zPGfOy&-~SNNPCCE0 zCb@@2yIBK{f6ZyUiK}lpRA@OWnaCr0|7>KJ1`hSVu*vZ`^H|rKE4K~UtVkQR(LccL zarcVdFz7FU^)3+y)-`!sK<(QTNy-PWqu;bFMT2(^`$s(iwX)wUNz)Rq%1S!M8ef0m zOcWh{Y;E%8UCqFAFIQUCc`oQs{r@P=(lI$3Cv)*dI zV>DuXLXN4h2^?6Mn&CF^^!dh?LFR18*&@p;`b?^i>J)&S)5;K^L^`xo05C9QV=*iM z000000YRGJ7$*N#7i!@+Lp9LH@9>inli+Y}S|3fId%`fMQg%i_vU{VNC)ZOeYmb@Z zG#b=utip}XNXiF_tWDp^xZ@{kG+f-jH`YDgY#1KrSOc0Bmuvu4$ZA-DLLp_j#ph_W zyRvCp)skLTy%do&EB;NLkG?hKDDkb6388B+;2BsC6wllOuxy9gV3Y^Bs z5>)JysfN!GQYG6(lkwAQIOh?hkEENw=061Cj`RZ$9SU=@guIu5m7}CfWH+Zljf^0d zc&w>ZYx(K%zFv~y^BIX#W^CF$P-h~xZOC|U9ZEdmD%3s*Ln=fx^s53c{Ir&@6wZ!% zkUbdhkOp!Oh3g|$j`l3Fu9W+*&Q&{$+3#R9l3KObD%LZ6xkNm}Mk)|+fMNaEn`m|T z%xTe3<6_Wf@;s>oVYX*?X!hYT7!}G*Zqq%F_Z1@E^a}a#Uw0OQIaiuLQ&@7RWh$IV zjNw)Ovcg3>Y6F_MQNiNW&*4lhhqFM^po?oVMeaVHb(Ze;1?EW-igM2@iKV?vHX-p@ zFqn}02I8yfN)3aTJRZqzaU=?hFbXzBOvXNh|*w1w9X>TY06ErSS z<*x2RMVOv~y}v1SNHo!s3M8$770U2|J`p6{v)hvT)uo^#|Ii_E#cYy#6 zZu)j#dvMwn5wc=RNB^Q;UTFYw*c|1a(uq>|tejo2<5XMI6OJkkN(W6PR~!}+D=FlR zi;SIhq;<8W7-G18ZJRCKIV-13O#BgeS4VgDZCmthl-MwzclX951cfdU@v%_F9IzyL zj!az&<&%yo+v;KzM+46lF!YV%=C2j0D@!FPzq#{32(*^-%i{zRW6jXJ@QQ#D1!oHzr6OeEm(nc?)|pZVa@QFOjJe8_#v z5AzI`PCvE)I`B;Ke(umMq!p5B`S+P{H*tx*a^`u%QnP8rX5lJ)YY&X6?BxJyqBcv^ zfRZiu#->g~{`;?ID2qC}nZG*jn*RP+9n@KU>zjY^ADpgLa5l&NX43>Mzu415njW;1v3 zd55b>lFdGe7YW8qlco+oXZO&#>g!adwWSu-3;;()h{;j8RC|e1U_(Bd@}4TF?~l&3 zaH5>ND=Ra2|NC-GHbQ{137^R*@Kw{%hfc1UVpHDii0()W>JX8&7WG57D7_#`MqAj3 zIJe$CR~0G#rBD|`T5JZa({mP`RhAIh-y^G3P zZljpuAOh$yYn1(+9Agn&&4aWBC0l_ z3xOh1kL^)Hc4SV3Na6{AXCLIJ%peeg2nfgaB=fD}QTh>CW<5vTQtN~srG_Hh-rMjk z1D;r`OM+k~!sv<|5btLS=BS;-G^m!ue@Eh0k^|PDUDu_J`HI&nq+ST8`FywIh^aQ> z=`&0um-sDz_@I(vp*n0MYtZaZHyE6_UhmgXM(e!+-J+xkmar-NsLSAbzdaS;;kok3 zxqNW;7-b>}>c4EV;EKwOmu~-%K}k?|UZ9Ng0BA0e?l{##+PY5VxkJ4FeAJZ*UMhTC zZc-~GQ3hnQc$T5**%+mE9xkC6C>!Qi64?wbCA@$5r*D4qO&=7|3VgXZ8l2U?{iXxq z?j|)10ijxnNKf3Fwsp2}Wp6mnqRE&~jTPWtSmI%^knSED@14kE@!avR9O{D8=rqG$ z^R=G2fi*8fQEn>-iW1ZbD=3(G|*L8Vk-Zq zTZ6_*v22!KMkCWd@L_d((8eCC=a`qeZ6;D4d60I+Mu)ZHUxDU#%r|x9)uDQtPn5df z96RkeE>AQOi^H!*iZ-fl#y1?*jKe^h2A^ci{=Wq zvqu!Un)_F$++ao^j|cRIBBY#0P`cIEdUSDkoovPttMRkiJt@W}463Fg07vh*wXk;C zxvs4#X!@8k;~j}$tDI881j9(T1|#iA$;%7csK8Sn`E)o^eUrbOQSNbFqqVlOj14l| zYIfUB`QED-;HX}CKK?maRgiWvYP2c&w-pc%V=Twau%z+5z^Ito3yW8PP;21V85q37 z_<Vzt!@QdXJ zsMii3LwC}1BIA-8NCSeK{&F57#2v&D@sQZ__x3pAgmck$pumM#T>$A=?Sp{R)Vj?*m6^yY5Fx`4;j zuKhk^>QzD}eC0ID$ds(NnE|+h6$C|eMyp9t!&$2^ZN6Izgmj6hzJ8Mr2rF0EWwgM$ z=y_Gwq3FxSZnjnKz3jD#w_N-$H=Nl)Txb9eOTIHV7ye-wwZiT|M)hG+1s4DLouS9V zffz9_k`cxH&NTvZBx>;Xtw({#Te*cgy!xeU43wNe4)c}@DYq-cBeJ>lP{wZGuu3FG z?X4j__jt6{dJ3YwdL(jX~x!b!C|a zCRP67S#j?%Ty*u$BZLu}PfvtnyVPEgL*X$@fp=(f_Hn-(k!gQXWA}%a?RR2?hHev` zB~t4-Q_D(Rvy_vS_l}_a@arv22l6qkmW-n`) zMVEeU9|jRz8if`+1t#pui?}Qa?D8*L{`azKEbTrub9Ra*hUP5@Ln4y2UcnqO_JnXN z9d2ns?znbsGIk2FpYM4R1i#`snQnnq(`2mGlAWYzoRT-H0`WTs*kb);ma-jef&9JE z{)wxO4jqcId~t#`!~N|<_R*0h44uqjoJG$yjf0=)c8gga`BhQuac`lRGroR^)M|6? zFl^M98XOkfP;WPDOsEHUM~j2liW4TVBS)?6k=3+k`lQ$%VhE~821OZ3@5%`-A~UI z40Y?MfO@6d+mPpZu@4U@L+oU6~%DEgakt>>5L{}xRWUQexz`}|wTbtgB#nwE;V z2G!?Htl>EgQJeM)y+MpOD}RjFL8>at;h4AVR70w*;U@6uW&G{XddtxOt;7B`f5D%= zg-V-ohpjbZaivp&KrY50Mn0-EU(aGl6`KcIS%9&_vMvOXN_c(Vpa9u4e2B?z=sapn*2*6a(if zBw71@EOQn|?xV8WneFF+K?+h&Q~>vWsuIwa&?((Y(*!1gKadBe<&gHUerz172As6K zbjV1NFmEuxC0{lr&SQ<=ikYvm1I&&sO2mE}Wg%ePOhtvWCB$Jv>?gVdXB^f%JE#bz z?C_8AzM>Z#=|Qy21|OF<8bmQ7>eDd=!z#BqdYDB&?k0+hNlD5&0({s2qiS~NdV8@b z)<)m2tY0CtlI|Ue013}Y*c{qj|@r{ooBzom@gs>EDSsf%-)DnhT8|>K|+M6ZAkdm&5uH zu$dEiS;*}MsS!FVf-g%mzfV+WRpZ_|st350_Z9S^?{z-FTosAoEWnvfEMxMh&S*fr zo#)X;0Gg$`XEZrmzSJ7&EsqQhyrw0?uX@PUZLfFBXwM>`8LJn`u4NtcwoGz)yXB`hPIVth$wqPrZyXIudDo`jt~Ui`4nHe zyw(y9&}`dK@XTK|W3TJmQ8t9+KO^f4s=yFs%yBgSr`rH1Pv%;REG&7UM|3@t+*l2s z{^cxb+MOB@qd@{&{X+y%1xW##I)>+09g?N}EhLpDOFkxdjb<)JO7sDMFv<$FT<4tx z6}&z~Jzo>P_{0YxS4*@wy_=1!SWwZ26PM*Pg)C?l4A!AjV8;C@#RBN{&Fs2^#MrZ# zNl$Xv^aVzBB~$Abpm!y+8A9R!LL!`U)l+~eujj1*u}V|51~@mNoEXDZl09jwW_*Iz zD&Oe#eN_gNNQtpTQdPp^9f$)Ue_nMj<^!k;F#xcZA8?x~bR+zOZsvWBWU#Nz!ZjB` z^Pp&64y{}#Ebzj790+l-WE-_F+};srPp=N8jOg{xs-lAA%nc~m)|wY?Q>cMWP?yCo zw~2fsSv_Ssay|e-N}&FF(+TBnsxPV`VO)`%z7d-{%^D(bmI}vD47`0V5!rHsG;n0` z5&yJ6Ta8B?EP2;RUYQT{j2ior#I?|hPb7Sp%J-s1^)a1HhlCIU9=xKpv?k6`1)y2M z7q~`G*|0L5{)}0xx>6&_`T|f$FpGLX%Wc?qRm(pZ_iR$f;12d^yoTmHvruBs!PU<+ z{kiTTUXknplPY1McGhGAAquP$Ppc$S@mMB3Z;aZQSyzAI{j;fSX08-t-;7N}NVOA~lpy&?<-C1+S?sc5eLKIJDU(KjdCPzw!AGHlM7 zNR>$9yL9_tX#Tja>!k!G;WY9HRXdgdj7u!$iCqSdx%8~$V}M?S&MTw_-QZjmURRzu z+&BY})@YFynp%uw1}*^Zug3_}bJS|VYD^w&-tcV0O6E#lH;Y9i2=9_wXySV429rBd0e01P(e6~97%a8=g_ocJ(-?K1 zGoa%S+D*U^T$q2ERHaY3jV{~4+VMA$!*8qS!N>+q12)wUjlY@~ZQSxYcLb;+71HO1 zDf6pZa$6hV8|#@Kx88Ghm+ot;urAX1vP~`}DfxnHWAKML*-pgqq4iYEsQy=B*(>sT zyBog(<@%`WLg(J-?E>OgcUUtofn>4Tc`_E-`+vh?wWR5Ch>^?S%Yt{~|C}v;Sm>)W z<+WnX?B&w}>|Hh%qQ$AI?*EF(5?kTI;Op8yzL35+K?illF5&?644lr%7KgMb61z}T zqNR;^{n#7kX)`9s>Sq>!62vfczP}*btwZ!iAP{QD-9OShGd9Us6^wK>Fh7yETjDx{ z^GVkK+zOg92(#aT5R`!@?z+c;G)x9Fu7p+K>eb>Q6psow=5+ky;QCTEQo-?CBa2aX zd1WR+Ela{yI2mic`F$`5+EzXL4z?7zXO$!8o4Y8iSu}_6GCRwK#{Cd_ZYuUu@~21O z>On0TVXTdw&W>BO@3Ji`3a&`1*C~?oamrUd7{t!9*wg=*SAT8dQUxSOs*b|KmTP!lY+W&LoO~nql zUh}5}{wq0#&tIP1Ex`3Bcds-|i*gZ+-MNu%BC5VoY{QwiB;0JBmh^M#Y^WN&DQVmy zu2*jq{+7c`>e_C;VJuylWEdqnRb#KS*|~r`HBowp^+=nG3PhsHnD`O`6ensOk$~~O z6=5Tcqce>6!q9u#&+Zs&YTf>tW~bK!>IDb!1Rox&K^YuhfZ$8@pMs#U)1O%LTZ{&I z7lYIabaSy$$;SV@+E`&wl^8K=jl9JXYv7Eo%~uxW%_)yR#kSG8*DCP$y*-k$oQETp z*UzG*6GUB92nlVO?8Yl5)}Ep%xrlQ>FE%$;xGT zsByn{H9D|&rcr0WqgeGK{?AjP=(TJX8-HfbiI)4@&6}Y9{~*@E%Al=pWRyIuCG0_+0eeqs#2lO(JDlN$MqJv%KV&`o=hs-37jJ!HscpkFyH& zn2ZqqHG2Q6j*5wfZ)meq4-PEn8}gD9gpd@at1y7X()L-e3-Hy+r`=%zyB}3ax|y1h zfGi}qsMZ-+rD1VE;5(7EzZL9qd@YtC!BD>qhUHtZSNd3{J}X4S^4)quXS6LtabjVh z|3VbcdR$8OH9P-6q=yoG1@$v24irO2(Ytzk1@E`Wfoj%vV*X41G%Ss`*;#*T?2I1Y zTPCSDS@2^UP1#Moie<)#pU`&b)xCR%5`Di`5zRgA_7dv7c+V1gQ?DM;DJ9SuJ!r;d z;V?6t7;&2+Y4c*emj7dRx~Qn4E*m)or9ZWTOa5RRr5O3p+t zN+Q7W@T9Y4s4Vu+Rk6krz>I{?n1L-KRmQT%7hYi)1f{L^w{efP?n{Gqd~U~^aB`2& zgW*xD7)a)x3kyfXG==^6SY%%K3hkY^I$$m+V(d*hXwl2{zPr8|?aB=(|H56MaNTba zj+xvl!vcALy0N7Mlu53mH-m(72;{8?n@Dhm}x0L#vg>|WhLj@9kl zXWrg8_G3+^L4+BCckc3d51v!`x4EwHypO>hIkg>Vj=V$TRPnr_-6Jf%Y(0WM^ZmJu zoPM2Hd3;zW&T)@5L1nNcP)nzos>M^uHlXCQb51%7W9*Y{M5st79)rj*0#(cxTmRxw zAp;C2ee5LGWq>tMB<>~LVY195BW983q_ovH5D~aZF$MQxqyR9cQ$*g?D}f(B9GAnR zXeeM%V#!<76$n<>(L}U3IK*FOB)2S)5$by-z@pF4V4g{laN zRxR)&WO&%E!}62U?Zlg}E7$@eZ+=@}tweS8_~c@)$1R6h!J{0Tof9xPmL-S_WVfR}=m4NQ!Gf=lC)Xi^{fC1O`@_0NPoSp$xAywYP_|@^CaY3& z&$HjqF2YPMd^-INq|2^HWbogTZzh|W-w9HKW07?Iv{l!f9PNrVdzL5;XHE3r^NZ#X zg8S8e*L|<;)VM;{>57w-lr%ci@T#0$q29g>xYg~^d3G5qEw@?6%^e)a5H>W&1V7d_ zb$#)M^MoNlELu5V_5^)Q-RZv7N3YiV@6S;`EmHB^c=`XoIx^T3~w8L zn@_5G$D#f@1`ZudkxuSWLewT#)j9$DlM?2VxbU)s2Mr>l3!3CdNxmWw=C~PRh*rm` zLu4p~aB%>U!ALt91XQ3SLuF_uoa#d;td^stj1A%}pYLx-wTvd(@cu-^5IaSHR|b~f zc*>Ptw$UZsLo}?y+AK1KHyyo#X`fAo3-Bx)mD3C#;n-fsAhU`kOtEQTq5 zAq+|D)buo|EpJSoBO35Uyitg0%_EvPu?AafI%Q}DdmH106qAz8N15zdKgA+KU*OEX zHbCGR&{*$#rV0&(xq5bWqPnyC&v2vi%IXCqeEB=aj6VvdA^BWAIncr2;RLx;hbJ95 zM`Av9(NuU7A{L&NjSEOWg~*p2`Bh@ z=5gb-$tPA8&#nr+JHqQ(Cy*shG{ZVM5nL=nj82JP`fn9^al2DK%83z9tziZ(1n={T`j-_@K&cq(e98Pb3d^nKfV7wp zAz&zjKyj~BNBDd~wW4I5ckK=Q-wob)Q^PFe07ZoavYvlwxEl1;bxrNO38&8n3LC`E z5@)*bK*p%{$w#_PDCFnR-cx(-cmUfX>^*=1*E~XLi>gz250K7AW$+=}E}Y0V;A5ge`f>pt&hG|73rCMwgBntoGVc)2|3Q z-vXn?x#^bIVVSdX-(5DM5 zDf=|31_Zta0#3%sO-v90KxG>iNPY=`=)zGy#^mFoM=fi23?o3afN8)o zuOkzqUb8}|_NG5gsnVB@XEtdQ70hQ!U%STNgXX#wB&LrX%S>whJOmPaPC?pXfmpzC zT}(k539W6{1ypNnd_&bJoHCG_SQa|uljcgDXHE`B$LU`LaI{GHj8IAK zuLQZG(=nfNp2SfT4sm1v;W1Si6-|hQ0%48set%rpDFmI8NkwHg2Q)kl!bc+d%DBI% zmODiR$J7EmpdDP6*L_V8)u~R0ab^bx1O^>GFBI?O`{a{et>pFme~!i+L_zK5=^pY0 z?h-+@;m`7;n`Gkn?HVHf&B@cK7KeYeFG3NF-45geAM6z4Pb}j_$j(sJvSXK)_XiSq z>h#vUGpc=@-Xu-E)?Z4+!fy$)(O#FTd|l9zbY*oK>S^jR-q}V0TyqXCLX;8X`G50W zs9*_`Yan8|56mzN`)0fuD3NUp8#RC&1Z_!Zi6v2AEu-BD5GfL(MauJWtvXEQsL-pN zTf;9RsolJ7HXLYw7^$gqo1-woTq9X$N7aYg)4qh{)TZZ0|wmmXd|M%5qk z!mqAu&vja((!NW49Y9xn9T+vcCn!IcI&GD2gALO+MC@utLpS*0OobPu;Xw*ln{@BR z9GF8`KY}y@)(CtGvJG=nBOlEjvb8P~?FiVu?HhVFHU2-|J4qz*E%;&37uqF=ty+61 znZh~na(BNTXg+bN*8_Zz8cau1cNa#b`hy*GnWD2m*r+#Fq>ftn|C5{4tS?C{383iEIRl17r-8~YgwA(QT)vNBHZ67Eo6B2pP1SqF@Ks)+mY$%2OJ>{Re5$G( zRfh)73-XO~-P@6}!VLeewBsa=Bz-n0PZl z1}{xw`erZn)C1@C5L0H4ZgF|g`wkP%zzgIIjn8{aUrB3pJau2#d?c)%`VgF!*8TL*|^*H?FLV2}tuSa-o zCxC8X>30ZYBw83~Q2pWaONj{cl%Sx#1}+O}7SOca0OeVgpy?xlbg`W%>~oyd-pR|> zoP)BU?DnBT+{y_9xY_AU`xEdr(@|Ye_?AXIydsYTzu337^a%m$;F8cI`x)lr? z_zGs`>IThkiQ6K$?2~l4e8?6g*Y?NHwLj}^eG-#NhrxEQ0R zp)$yFO2J9#Fw_?63|R64-7}v6Xx2p+ic2^9V$AoI&JmJLd=8+I#BN{V+ZMdc0a6F; z6pe64DNM0T<`tVB6&%mP2+2Zz$iq5?wYq<(fRGWVuG6^^uUe{5Z3?hX{x_~v@Z zblk-XtcU>}?84o<+ET}mXrew%Az@_s!H7^6lo?2ocX@>^w!tG}a^TlsqBvD4;6aGF z?6KQa8uh2$gr+w-2<5m(o}CS0Bj4NKkW&=8jgA2M-Y0kppKlg+eg0+(+B)c6evjga zYEo{pZp8$Hk?@nOx+u2naHG>AFLuo$rMP_%=@{iS&-Ww1is3hIm7aEHpLrBG4aGM%C6s+2iBU>w(Te1iif zS46Zfzd8$M3!oKvd#4@2G?|Q`+;Tu%sPiKxnT~iavp!i3pf7A5%rl3ohkGg;ScD6* z5+ooWo?Q`VGhUx&M6zV1^1xCACe|hBf?^P2RUSC=;abp^imwiTGi)ROUb=>zc~@uw zQ`cpIL8=~8sg%k~RfQOcl-oR*9OJ!Z3VQ*O`YS!m5)x<*-+-Uuj6TvUJheupbKhf? z>(km}zULbaB6`quc;Eui?#q+kq*(aZDzLY&Bz&$@1e@-x%Rggsf6eUlQEF9kNd4n6 znuJT4;7Q8YKpUh8rw0ZTf558br<_|JS_SB*+@Huc#{{Aiv^TdKBtF6s=wHE+?NUQi zNQx4`V?pf@6WcL{$V<(>`U81gHhf13fL321eZ{)=gcU`jvlad}!h6)YJkK^(FwOb| zZXZ4|%*qdC8#!BU_6HNmY>9y07RBJ)En1O;mMT>~YCz`=C1NF9=y;3wvjK)-WDaaQ zyHcFVzi7`};X}E9B&ewxj~=hxFM1eg)7niPsi^`FVlIwjwbX{dD=UYs#tp@J1hK&A zg(}`1E4-TQVTWTV%y6-EHIX%En{Tap8h2BAiMoGHBo&UK)-!q9@|H}M*@PHI)b=HC zsPYQUx+VTCA^J&upUy6v5$)1v>6$g2|CAR}MTB3$**ouc$pfs}B^`LJ{jmc0g49@b zD!hE^sO1+>zS;x4BXTbkOU7`cM_m!u@X0y8-~C3uX7-Jewx6^lUY*z|+m||oJk6m@ z2TH_S-yBJUv~ISFv}8Ezvw=yZza!VSDqky-;86-sm{V(sXEABWP&GAJO>5MiAs@*J zUO^l-tUS*D=I8rTgNPjFiv|H`j{9qX_t`L-dqSolHC7B;P~!1jSwZ_{atf*6=1Nc{ zlR75}sovpBQFFQhDJC?;(r0oP!F1Pgr-n^V?I}Nxw1q(@boD-!lq3{P$XpgOpHklW zT_`cUtAGZzwOrW&QRZ{cIm%VEym&4858<=utx)M=f7`t4-Lp%qdD?g|!CSyi@=?Pj z8P+nr=))uUZP@lFZT zs*mbFM4QoCV9$f7Ea!PA-n4rhVmiF~R*O8;K8*68ewcTMeBIN$xr0u0B@Rl5(t+fA z2fIE6tvu;tGo|Abl!>I7m+T(1jhpv*Ohi!#ymZ1;n3|Nl!IVCBN$IU6-Tji(z#hef z_Yz(AriE~daJI)1v{;DO2(VH_R7Z4mf5}RD9f4CtrNTV%|J9W56J&{%O@HqGJoIej zB$?q> zB>!BxnnG$JFn9N8lCY=WDD%?iR>Bqbla)x3=Q`DQ%&+sTlo!_n&-;c}qSj#G=Cxlp zm_!QFpqJ<832UVvLMru~1Vj*K!@qr@B58HMAzau1Pj?1gkVeBPs0;m}11&%yUFVs| zGG0_~^~h-h`ZCep@G>o=Ol<61oq(mhECoJ|5c(SK0NTZsbyCt^c>`xlNThm%#HxZs z7duwoDG;}reSJcT)&~;L;-FF3F`0@ZJv<0wdlKb^;uCB%U@gTa&yrO4s^nr+9P^30 zJ@jLF5X6fHUb1AT?e)iAf)%~-oSGdl!led@&?bn zcu}S=w~esPQ!8QX>`cWhfZJ6S>Bd@fr@_Bd&L*&Q9FN#K>=11}5KW5`dOOphI6O%V ze!v_^|CwG~=V+fD1eWZ|jBw;JYQ>?Ael~Wf$kUcz4$%RH4^A*&n(;7g#NGOEf~c3p!nfYG_>|IM>@G6e^3u* zEPw&k-M%~40~HNKXx^fDhm$}bHCkt!9#E`P zI@VACDGx=*d)$h*}IgXx!32*K~wViSZXJe-W0jO?{~>A4k_9Sp7#J$ zIs5f$>g=BHPGEPKoR%;`;X7bCL;ePsh$6O2KGRQOpBa{oh#AN1&-Wz|yAJjJ; zhHZe0#MPYt~jP~K*wp5>QA$7^_P@pHgIv$Q4!-GP*K}Fu?D~VcJcT;*J?u?IBt7g2HZl> zZa#+A1}@QR(<19=vgktP(+b%uQB*4ECPFd>W>p55od&R0eWn*e&yp*KnUo=n&s3Io zZrbg}Bn1*c=zA_3>;>O>Zl86`IGim^yaRCS;1y1%AJ&S6^1S;oAItITB`d&{Uu+?{zwGFpW++< z800Qlo)^ByNZ$3}l-ADDRZNy+&@im70?swPJ+>h#;YFi6{N`0-ZH@F#8U&APLlm(q zl{txh$rlDr=ZSDW9yWCeH~7Wm1J$EPz8cbXCIcCr(_4l%Q^69 zN+(@CjrVYycVu=&*ZlY_aC+GN0UXaYxXr++kFO|v8D?ZIo4k!ebD5iFP)zb96ZNIYSbWz5%tt~Nav?;R8Mc7SrD_72Urk1SCm z?>hu4aPBB`y(SCC(n>9Opo0JnIcy_?tVd?2-5~)Ih*m=1VsXI(gL6T5xEfT6+q7-> zZ?6>_Po(a5zk2rpmpY4SiOwaZ#F0zY*Jc6>uldF{;7qQv+cn>8s5u<2g`4c|9eZsg zwPYI7EEj&!9t4${v2@y|xaR?Gu;=D~t}c%8lj#P1kNL}M)IYOdwjl@>JM^Qq$m&$l zgpt<2*l2_aROUVb+wfY=KBGFs>9Koe6Zw8KHJRC<(Rhm zV}A%~)41UVS26?9aHeioGN$gVjqTjOxk#S{ zSkhmPmoTY`GFr9+0fG9ob4dAr1A~a8+n?#WUk6-Llf9;@c(>0qDCLVnkBzg7nt949 z{iwfKp}R1&v10IOT>>b9>$ratJ_xlxoA}D9N5uqJO0*X7-;$ISVQYPHwdQrAbotkK z1wYNu4oPtvd!C@Zi^Rwk5Q^0RChrVNcQ+N5ypfn=zMszgVI)FCnSO&hvB`pZ?dkkp zmhH^=AxfL+StKX~D@%Auu}vqi?3<*HJs9VrLe8z658+RdzQB*~4 zRql9~e2m~9afzaEN=CT5bc_n2>AuQ^jIL+HFl_QIsE{i)q^XK~vU~(&lXDsPTE$9|R9M#(8b9pS3^G>13 zW{7KRI#6)55c7WI!+%DjUm_PJ51UjpHwWakf2sB#U)bybJU?NtgJ!mLzl2b&nt_TP zX8IfkQzdD@PR5nsh*e7w;_tAy!6~P zirr=W$f}Q}YE;>(L+XY97y?#0{ZQ|C|A7*3s4UBqI4VJgjbxM~Q|gqJ8US2c*=UqW zp8jG;U-PWDbMxt{c_aGvbv*!jjFa+$9gSy=>P(9(iiT9I8I{pi=@cv!*%={T8$U0u(fk2wjCDTG#{6`siLZdtv3Fm=uwaM&mI+{)nSo307n3T&1d zrdA?am(Xfyi^#go2Q+<2b7~C7vu+Z=%8wQw!-@LSX6uh3DgIq&jZtv2x5GaKL%Xo9woOk$v{U(h<8JLfLqh z;tx(;glb6Ru_ONhYsBn)gwl&CW+!ggv|xx~oCi`dt}}&j*AuH6P5hz3Jas6!Ug7WA z6&K?=_y)1DdVX|afU+S%)=kgPSXsS*s$eWs%=|hZB6kP$U{0OhF+T{)~dvA8oQi$M(z2_B)~l&$Y>#E-uBn?mc^J}cvD zc4+iKhAvEWmjYv8kaP9MvE$pTq_}7M*kuESgF;%UU0vdLTySy(SQ0Nua^z%o5=iUZM{V0Kz+G#RH>_Sg zZ7N8S(Lv7%A=1;YY*%e~)!oIx>=}Eb|Ld4>#;cD*3X+*u1NYMd#Al~3bW($qGA_7? z%@_8b)H0WCw?vet^zqwOQOl?c70bmA)+k&lP2wsWA~$up;?DH8-aI9;E{Y&rK_?0f zsQtO4PlFE+jc2Q9xWzp9jhG!20UQ^?rYcADtmROEv1S--dAxRc-(rjZP8j)RDVD$ zw#ysKt4+_-{YgJgqJ!MV$Zx!LJ8(tWA7e)-`nYohRygcDm`TdUV{%Wj|>$&$pmO8Ys9A1xW7a_`= z#~%3yPxY{-lWj4y_Uu3F)s)WRemsI(M4cWZGID50nkput=epTQ(y#m%I z;Kfr7Z1lqmb9`{$ZoFY=|Gm{H*1A>YbD}sJCL`kvZ*$QE?l^9W{9o$PJ6*Ml$+*&(1*0`D9iAr3%&tGHN3l zA#;#Uxrgmq7d$T|{B(D*Ttbo))4g-qofey}W|`_{d?C0&Jy6}N5aG*^);?qYA$7#O z@&RP10bd#3Yu3H5=cOryzx+8Am=46D+up#Etc2$$5fJT40~B0B01{m@KCnLfg@z{UQt!the_Q8;=fK3_ZL%+ChqEsGPH8a zPOsg_E^3BF9Jl3cSzmEx+c+RD-y_fjR%f={A+_D2Da>t%If{YGNg$S+J+%k;m2(?Y z>{Nt=M0V_{J&Z2-8Zib)N^b_dj`)+m4KiYe4NsC{ZO!D^5v`5~Hqv^2=~(A5PsAod z4x|`=Zy(qto|P14@2{D7`xckicL>Uoz83^Gx#n0y?%0j<(42FL=y`=TWQ>ayB^-n6 zSK4!?e@Cy3!u0&%Su4xiTuN0#J_qnl-;W9AQ-YXkG0Pe-h~nJoLIVZmzxywoJOka| zICn8fXq2#QR2gmCh3zNMjQ*^c+e50i?se+8(4xKwmzr8Hb})4H2uJw230 zE>D&+Y+|R<6Sa83n2~vMP{Km1gq@`+<)}E?xJJ)eP_ndHLVvM*E0rkh`?bT zh6~=RzvluN-QNBtOjT(%REScL#<74~>$14r>$X!VR#d0eN$2iZ+kjv=+s8UEz@AFw zG4UVM8ej;qpk1s|rS61aDok{aM2UfrcB0eAt;)^4E;Kvl1;CJ}HN?LU#3%l65x{#~ zqk}K`q(C0Rca+Ch!P2yG6qIH4oscMu_o_^kVUXEp{>iJPqic@i<^J%KZB|>UC@74e zS5Dk?qDMfGD0^V8Va zZVmbYB6A=?2{H8=z_0DPBN?^ee8Om4?z!!j`pdpMzuGoyDW2X(K~742ytu{a{h+^* zuj)6$_uU#k*$ccIVJpw-U>cQzApC}yN^DCLtog5xgl;yEEiu!ZmL6_=`=~3q@^SXl zRLiPp6KIkpayhva{28_AGiD9tqy!M^T5ltSk$)S0i%Z?JJ+V4zS3JYv4Pi<VXEh>JJ_F`+1z|mbCjbmhB@yhfOlaghi?4`Z zKlzH3gO>NDMZuKL3vPW530<Z^ld{{sXgMrUE@r-`()Ub{EEGTb&68+qu->UD*#S1+dg>>SA6 znRHy}%L^!w;vD-~eHj>?#>5L%LRg4Ut3F^p<#mZ6Tkt*lTpG)iC){9ADqrG^6B3{Z zm%uX}WtrXrmq#w*5F`y7&kemOvMuNC=q~u_gvKXXNT@ImL^YWNg(aG*nLBE72_GVf zZOPisBSOG^m)wrgJf+Br8Mk!mew;eBj1h)D8bwJ;I?@GkWjb>&>c%@Y7DX*s+VI@u zEvc{>^-kxY_vH<9&BqO42D88-Fbt~5yWfX%nb(;Eb$bT=eq2~EIdHSN2^*QTa=7AB zw=AnEa|;*J3dk#py#EtPTM`zD*U0p(kvfwNco8SE@~L#fYQQ_)ecaT>GT@{W+oC9H z05zQ6Qkzxb>GNyxGzHCn7LgO5!A7qB_8b-hKs@&US3eTq*02i`Id@kc=s|QTybd9A z!s(ENlXkim>qPx+ZSt~4qkdpkfQ1Se5e~}5Q9LFDSl;%}_8|!6rAmN6_n^n}U$eZ2 z%-gzQS+RI3r^}J`%~JCXH4+4Zf1dJT3-NFrND$kY?F97v5``W8Dnpl_wdKg5Gzq1< zw=)!fa~fOl_v3L;aV!G&N~|LqXsFD`_&Nj5+BLuRfDQc=R-kpgh)#!> z!e5JIE@_&mTK`glR?vrKHGSC6W`Qi_MEeMumC(~Fwp0efU>#O1;=2%Yo$8fgVtvJYRa2RT5z^(+(p-d z!0DNHHj)X@mckDu$yU#N*Z!oUq$))Tm5x`i8CtI_pzFv7VdWZ7#RPx@NDw=}pL>z; znoE}y?6^7dRmjQnL?q!B%}`sCE-8Eb`?UGfH>OH`#4saw8%r)h8!eZk=ce2*VLFTO zC&N~~?GyG_*W2x9KDL@(^xF#5wpV{>I)TfbU+%!IV0lLxsO+cXhi!{^y&?m3GTIfv z6SmGla>0-D5O=gahcul2UULr>u;KM{Y--7Mxhn8qS(fsO!tk9wmZR&&ISDp_89vT^ zB+_OsD8m8{<7UlDmOSacOpPgK3EorA|?=R7Y;I zrQ9$xxy_vVIJk%l!HUq6XHP+ss){IX=!ZS-B6G2SHyU$%%>NC7U z*OQ2!<7kZ5XBtxAUIN4?o{Pu_e)eFZ5`Ui=5qBoh9lIx2lKJTuQ! zC!l5a2B^{MXyHljhmy=6v&L|!5%je*I$?=;?2)9fgUHUfE3qo-V#uYS9y`Hy|)0y}2P=%_x%@ zGv)_bc0E?4#$z}!-t;vcp3s@uT~CEO7)6|M;)i(FcU~rO6XQUgkt$5;Yy<;C$me_| zU1HwYPd04G<_Ylm?U2!kpdw;6m3HQaO1eu-#pd=l4*;X#-iys4j6+Wgw)`J0%S{jh zf&B$zVXm!{CT;0XufTWQq`1%nV(80#IxJ{6+y~E;x#$&)QzpKJ?{><{WR2DuNNGCZb5ZsHSdTjyZ{6=C6M8BBEqYu$<7643Ce(S6 zbN?a(EpMCAze`*Ho?Y6b%UwSvXU0AD?5#CJi?<^I2+lWH_uIg;KeSD-oZBp!ne&{Z zdBvaZvtc2WmeCvIOm4N?pnGHPDaH_io{oL|(A~FY*&)Lp>>T7P{Gxf8^)TE<#jI$( zjsM_lT+!gAfJWcR5rKKRN?%AB-p6az^;=yjA=a+C^nPFDme9o+;q^q6HL!N<^NrBR z>m#9A=C4yTZlh3COcgG0_r#lVjP}Ow394bOb&*o%WShy2$-&dT!fNbN-?hG$S|Djp zjbOS_xr!##I}a;RE`4}>!n%1dOYM>O)JJsjF?yq;1>2+$N) z$IkRm(J{_6_RDlEW?XYxo^bN{i`(!~TaD1{u2LhLa+u!!Gm3>f(-W2h$!kdeF>CVM z)uwP#29&_E@RJe?<7w^G#nSKX@{AVKCQEX7xJsT*msEu5Wgwf!WohgvlIu6ZVuE2OM&_wHq zr0mqQd0Y*z+5gNudX8+r8eZX`G|PHB3-3mTRa9*Hj(GZaoeUr9;>MgA9g;AG^;`tuvA?Y-T?VM(l*$z~pDYRL zws!6HqY-NsQAextAy%=Pz7znaT0IgZb!*#(o+>>+I)9X@ybc`y9Cg-~Ue9H7Oy!@QJJK5UwWH7!cuJalvWGbMDlN24#%g zzM;3Uc*#VV&ZE}jGPOcgj-Tkc2wRxynsuoQi%QJV3fr)m)&j#?!URc7uFeptqo4AZ z$#Y?%XsT$>%rz7*BOq z&DE@0)7i|KdQS|>Ln{5m*~ycG3|%%>umTeK)hKcBLKS@TX&3{u2c}ltm2x;J;wW!< z0n4J7e;jf6DBF}YFR>pi=|zQ0l6{-$I;alLiGgn+Co9tG06Y8@hSRs&$IvUszYX*@ z=$u`C7zCaFb&5BUjs=Z3ya;U{nIF8ChQ6~fd%pKzoMIn|H(eDsTA@3MP!#p>8Zz-2 zR`{|Fqvt!IPi$LE-8p&=dNQ5vjo>+)oqgSp=aRYTcMjDVTJDQx17Iums0UvMXmw|8 z{qpdBLSgc?(zx7#*!-2I4PgZ)0x>!c(%wXA5#q6>(0KuS}XPC5hM$TTCyrH4z(TLgK9HX zcHz+7>mXX>=HYB@AFkKHXH!hH<2*AA)tdg+uHG6x@TEu!=`}?v9Np`2n1x*hR_31T zhHUj+3Oq#*3R_pTK7B=R{wVgFQk|`ZsSL?1qt4X1Zq^k3%=S3tD_BiR)~~NB>|KWO z19Q`Q*FZ0tRvSQvbU%xIs@(9O#AvU0LVsKB6sT2j0qQ=b{SpmUPT5{ei+KgC{@hMt zZ><}?8)>38JAcq3T+G?#5tkJfQM_nIPXnMnL96H_?|K>M>B#I3uY3^zHw zc=;80(SBQ8@7;h@F3^oizEq?Zzy9G|B=&fSGYK6PF(Zq~2na9`@+k5R9}gn(Hb1DF z+C5`D?EX0nc1;&7qm8blG`@jZ+nP`6JYP$A5O<|a%d~)m=!(%q2wbR&#{ZL^CMTh9 z%i=^!MWFtz?&2d@-GDC3F@|bK4{T;NDn$E9GpF3+YjTz+5d6#6#fQVs|97>l!DQd4 zqsO$(>qeokebIH(*N?uPk8$$9v|S<00&1}}TTQM3-0zWAUFF|6B6Mm>y+_3Hl}-TU z<}(L-7}Y9`Y`kTn+dX@qx*YV;ZKHgl9F06?JN{>6a5v$rv5L#eS8d3g1Tu<-9isTM zT#|=i&^0ZpcvofMl|Y5i&IW%QOBXea-3-@g<>61R!b_KFvg_B_A0A_WBP%5JNW%8( zJgfc^wWy}i+!$)~^Y1sShx#`MKI2TX7hF|@1Udo0Mu~y=ph5?E%uAIDUEIzZo^Vr; zUzP4<2#&! zZU4svP$WQz1)Z4jXBn8hgH@~Ae~JSLg=HE!98g{cWt!kBZKtp^vDd0(FrO=<1;qIn z^zm2ISS+WkYM-f*V$VnM@OiN$a|4mgy~v}db{+}0%l*riK&ouD#wJKb6O!V zn7O4OpD3>JMST0yCz5mJg$y=?06WW-ywFVK2w@8HuK8 z{KT>5lPX|KXJ)P!vUhF(Aka%8B?2BT35m)d4ufO37wH%poWnq=VO!}SL}0Zn9gTz9 zmGOvU#fb{JJFkTfKkM>bv70S#WB5b7-b;f6QAqgQ8o`w>4OBthAwWgzYnQs9_n@3U z0Oh?Mm3kYnfF6&;{ckR>vY+Fxo+kV^UaR8Q2?iV8j8g&0A}X~AzWaxMfAENCNH_SJ zQo#RAFpvY%5iGD+NsSW8GsZf+KJcW;A1C;VKX^L^gSvY0Zk-0NaR3;1smkO+@V9FM z`g?A9*g}&Gs5iP2=OXw)$k1ZraOw$OJmY0;L+H}kUkTKlqQaqa(1272lunY3Wo5WP zBqpyNQaW~xO~YGA4X{dDIDe|Z;n$ySg#41RW5NOYIvMK4Iaq>1DtZ;0;ETrZhj)v2 z*d@!l)6Rr#nYR=3ST#^8l|!fU@d+lHq*X~)&X!(tbxe0DRO!{pn#56?D)oXqisR0` zQfW)5U;it)7%Hi?z0j&>5t`4J$U#ao#%oFq`EqK#SX9$#qEuFKT?ObT#xYP+jJvnD z$X3eCg>m`<=Su?tZHUAy<#V3P|E5je{9|zXw19AUxG@FXGEMgwvAe?j1YR?nAGkv2 z>Gx*4|9bL7!AAac$y3L4ujxzekp@~F>A?-2Y}vLfMxI@2@?07;t-BlraZ$&zhINYv ze|X@`@TH>yb1DH8e%QApAjtYx1?R?bq?2e1bF~0asWjf4h`wZcVBc}ao02ve@h~2@ zCI!iNC2CpF0)?K3R3H208$KFv zV6z;`#KT=%;)BHNDdQhX2@_{!89?p^PZCgY{GrXP&Fb*MnQgW(OrbcPCRj7#7_%EC zM(#$T`$%A&-YL|!DrT{>6y3$atw?dcFL9IIF zoR9Ib`-1-Y{LZ60)I94S)!X0iNLdqOw`zwnt=v&n1_dB36S&{h=)H9{vXyg)iviBjyL3LKmVVTSe)NZ;E z)M70yc<+(;ISBIUiiMio8&oMDZSGstqy`P3gZ~hkyNPhwG4tqRgA4cSyl}CzE_{ZU zB8TB*f7e9XQ91#*ly4tv6O0)vnW~&PEpQ&4kG8dF36Y_w#tbG)pKZ2ybX%r;M`L#+ zXyqRpR*wlw;WvW-9;Q{ITM-~^!6+lQ~rj$3VKmaf> zWMkH20000000BXpBp4?D{p?xZ`}vU@Q}*H2sxw_o%vqbv=f$`-(5eJyjI14F*@x+G zcN%!(0`H;JNTWUK8nMc_H3odL`-?#^Z*D?w$KKl~PL&mn`Tt!rta<43!fu*KRMduY zDQ(B~)#XSUuZwNuXfAIfc#=yq-ukfpm4Qj9H{f%whXL_dEu=*Y2No%3``#7-A11^x z(s2F3pqMtB8v8>MiTe0C!0^s|DL#LUh+qG_$1hCoL1atP11a=s`mm)fMAt*{1hu zx{na;Pm5L*eaY%6+(`JgKk`Pe>YU~kvew0uGDB^N0-m|E5Qg>ld8n=VvZ_nD=w`!A z+=Ns?1H&JAf>|Rx2TcM(H{P(eunfqEt_QNXKbbT>Zmt0#1`)-R^%*)GJe&0F%gPMY zK&x3R7~X@*ZPuDErpI@=E36O0nul zi62J?ENA#)R@`C|E5Xx5#C(Lit4TlANMDJGaPPZPv6CeE87M{K=2!UBDK;J7!c`}d zar}KGGOl!og5lYPtxYehi|+(MiN`m0G!tB+ zcpLb#S87r7e|@ts9?h3oCug8?%gHPB-+TEx9IAmhXXOEXp8LIJ3`+ICo`*h8jEmgG zkB$RJx$U*-Z=k{Ak{46bBUa~|cI=z&)tr^-e1zjdI?nfB4!+B2i-qQQ^3*pK+`@k4LeUR=rIvTDx{2;|wa|Ea$qsO;wOR?T zwXPo>>$Dndd*sle+nh3U_s3XO-gCjcm75pv`n4@<@DS=bmeZ~d+PKx%Io1nPo1We+ zOcbfmyVyO>)zAk_U9~Idhf2`ZABU4hzu$QBKp`sG(MWmzNx(PN_e{+xL+RJ_=^Dmm zGKilq00#8;H8{SI8-&r<1q3J(oQ+O^MHMB5UkgAgTT$UoLl>lp!8L)VUR_%|q$>~H z*!zy_2y#rW;Y@iPDo1pJXDZ-3@D z@XhVV0!4XrX8xUFoXT;aUw54^gf-Ku*mk5%R9(^C=Q9#;p{v&S3VRLj4d2)+G9wz= z71#OFI%Gd(jbNbjWJqBnjT_ZEa}{I6g2ITR)qeaaBoP9>JNB}#45S0?BE{(w7^^F_ zwIWsA5(h}Fcf67#MMxR?KgEz82wEy~2H^u(SLZ1LO|KG!eg)nAX_|r<>y-92Y0+rq zQ`B(W{)p`U8|+%t_mm%tgdn$qGxVeS&$gN_3dK5?7#y2O!L)(Ei70M<$SOYp@=uyG zh`EOVj_5sb9K4%)xXbOunVCcdvKoP6<@qiFHe<{!iWvSmj`Wn@2{t}8!a%l4$fvNz z5Pz9(%2m=VBiN86*{ufB|I1`E5O(n z=TxcxFT*n-tV&FsxpMq4w!|Hk2b`+joIm7=T(e50!gQ&`Qv3v5cwb$cw-^ z=Aglt_|vNA0$s!38`UVJ?sKgECbcSj^lFaAYE6Hzacr}j z3Ll<6M%~qC5JD0b1}CB1Bnql2MbZ|BgTD<>$vd%jh_grG$W3eQGvVq|(X;qt_S6tG z3GF5`ysMM7Sbg4)YFjJxYIA+FmY;|3fWBpdwIC5>gst!IP3A@LSpNvAkQOYqad=3{t z8}=9!wg4$I)h`1yMM?In=yHGgn5TM1S`%r$;vhd}FF1L3=P@+9M`65Kh~Zo0dDp%( z$sG!*aZVs2E7?wU(hNu^Y=~W63EulMs}|t~l0J%+T6fXpbi3;rNCI&-CmJ-i^7@FZ zNyjr{HH-{Y*-@FyK9))He~k|NlCFP@#h(AV;7!Xsug_?45cn<=^SUc#01$My0Qh8~(gM9m-xIU-#-!+o|7 z+Q|Am=)nr)jgYlIvnzQp5k%l4R?Hbw|Qgev8<>v^d6#YyR34RN%_a&;#$Zfy2LB;q&J=EUy)n1 z59FsEMhYc-C2)-#%a3H^`5urNo^jhzy?n5Sl35?F`q-@uRmG{DPFyU$T_|7?h~^Gi z-l<&PT8Lf|$-pqB>d2g}cguOvE<5d=A!oxpxkNU5$$4d9H>xc$id&61BcA{I;ms(R z^EWh_GHo)>rXJim;z+(AgWazVrCT!;Y5oI=x<-`Y6C=<};)%4wjnlxOt`BhOxtUj) z3@tAXh$&6p@pjtwThyG3y=aeq`;KV={~xwu4Z44{)dbx)JPP4Tg!u%H0o+_B!;{O5 zgLKJts-GOffB9EfK+NeiPaUlkVi;!TnP`n`20;fo)-(J%)2VDfba_a&V>HS6WaKib zQvSMim?lhx^!LzI?<7ube$e7ie5nGC3AS`r!NKmIC0!sfPrZ~|10F>rm)D7CN}`(wk<;Wy3|60*N{!Nxhg{)n%HTOKv*tuI^RjN06RzZ3=w>L zu<|1Cx*4sFU`D=b{O>M^+UN6&90m8z2iihCqvi7_LZM$<>Yqj3V0}{lSaT_=YVXhF z)zizk9v2KAg&&}YNu>v62n*iSs~^1>G{*>FzcTF2k!%nm;u!owqC_#hmfqEk6sk_I z5{=-y?Ac`Dq7SH1FC4S5TY`X#aM&VRa!zMutSfoik_Mpy35A!aK&=Wi7USCm(+Y|oI0LJ=kM?I zcC^|gqFt)GfLJPfq@}e=e9TjeFR|eI%-3N26+Upo2ZBa>?=+Lmrir5MIc;i0)Vwh$1XG>?gN$V=eR8nf68pUmM3Vm*4y>A#YhO@&S zm*c}b%7m^P>H&&(fn&_Dvfa{&%0dZj(ZtX(Fg8j_JL$yn3UqS>7YaSozFePj`6 zol2xga67GCVhr1RP?%>9w#pHybeKc$JbK0F5Nejq?tDVL!(S}!S_ZtX%oaYR*yTTw z@Rq<#Qh60NWgqj_fMZj~X7^x6hI};5vT9WY{^Gg}?DD}CK%%iRb7?2(!(9WIu<>8Fp3?eMvn1F`!+5C)L1 z6t~r-vw|l)M0CYc2@ShL&);9>QV}9!Uv65_&b~qzr^;JM3ia^<{XJe-n2#2NV!83i_x;@s0O6A11(tD$YlJ$>G)I8{b~ts zq%i(DJQRUY-&p;njgupee3UOn=1C8^wdJX3BVL#rKhAT9vqkH@*JerXx zCwD_SWASl3=x+!=(-Dx>nd4Ecq$c;Vy&KHbV=^)?!{A%z^DBIkh?_E{3W}Ij+K;n; zJ_b(Zkm}O7%wM$WJpyPNBit)?E#OjRSY!dzLPg|bpU1)V6zmYtZjkVRu1_Mi-pMC> zk$#Vq@aoCUS&9*}4v|^^M&F6Ae^wibT)-%NlD$I-n7STJ*H92w+se-(s1^pq$|JCF(Q#+tU`sIYa8H*t`{e6; zl@oEHjj1~^bS>JQ0bAVb=}YeNagj{${eEQc=fWmq@ARodrtjqd3NXDnl-}w$`D0dT zanG`8E)Wa>qhYcG@_V{(zoziXL?&09;qVA9ySj_qqX#R3mnCOI3lJ9BN%OrlufJf^uVIO12wa7_{$F^hQ{m?tB0)Ai!xu49rUk9% z8yip5rSF}(*G-sZ#FpO!C&}tM!;KHLrEI1y%Ngu}{o?PJO|^ zNsGOH$os}w`l6pd8e{7zaxgffMe`kkBhlCYl+EVfP2ZA42aqSkw_TC#w~uE7%!+A3I4zX+KqkVdG^`CxkpxNq)8mGN-6P}sJZ*^AGXxabU8jg@9;}XBo{$>Tkm50xv4}?&Bgk&}3Uu>NJ zl{bbiBtaVaW3oJG7-J1U{Ac_w_^%$MPOZFk0Ue%`XoJ0uh-U#5KOze%ivf%pLR?b4 zG0WPDz;q`=qwtmnoJ!wB&2~A$d2Upam?)z}9Rte>u^V zJaHwzVUwgU&(4bipQla{^t^WNUxryODw7CvnzJd}zbCUg_LN>hE=|Naf_u^N64%MB zfsn^BG>MevzEY2UtIAubxT`-9Qi#d*92Uu=3>Pnx4ei=EW{MY6YW6OlP#D^^|3xx9 z_anvKR4$|BT_E}yUc){ZbD<=W*6YyAAQ&TFZK|)P+a4a(=X=$H3|a8F`W!K%r7xoc zP1|`+Lqa#w>efUSdxHD~oSWoIE?f`6!+hhBri5sz9Got1MJr1aa@M&6R17wLefR>D z@&hz5`SxiVVt%1ezA-1egV-%9vX0>^Y`3h_@d}rDtXksY5R>kpvNUnlhYPxs(ymNf z-3frc7`mL%2=kxV^NN=8J7>Z5>BnDw+Gb{H*Z6W%~Wbz=ra~4{=D}+l$)JtgbOfeg^x|;r(s{Dkke5Easyp zRI5S7Sz+UokC6MZW@#m~t5}s<1{V~DaiZ{DOy=2SNFOP~wm>7wp+y)~LpI+4tqF!& zR@r%Sh*Np!0IoZ#n?2VfsJYgqOf<&OXufAWB&KM4J8Y5qUM_DmQY1V{E^T zADI4LZ45`0OS5j{b`1#pz^RcRC1;H)NN$5!&=mGnR4+Mrj;&OM4VEpgno zmq!!N@70=P{KL;4ig576;R23JN1M`_^c3sp%t#*HJoH_$3afLuWFr{t&mm?Hb;B5i zBK_{sbA%O>v;=GJX`iu|e$P?5C&iHOK)MsPFExU9ZooPQBW{YPk8$DqlH_2e0abB< zFApI^%5RmrwmSr$X;D{0_TdXyZ`J2b0Ffu_Db329V&0%Z6}K2BOc!TXXpZ^-R9+s( zY+^EUpP&YkRUY#Kw2JUAc@3fy%Vx@jiA8f{ewKGP;W4q_tB%T6bXXJ&EJ*-vg*o84 z!H*X(o#&eV4h~JAmGBXoq(?J^tsEB% z8jbN3$G=iId1JN2M0am&rpp3%@IiiP`Uc*S5dooPOPiib81772F1095f2O+CBjDj$lt~%Z6k$XN^h3$8AQeM#H$vI#}^YH13r;3$HxjxI^rGR6Xov_TR z$b&_b=@ru6twj#Rp-EG*ALPRp-^ohZ=-zE9E#L?->TEMFaP(lCC)XPd#C(qIHihxI z{SrqI$L=jn>RM)8Q~O$;=$l+YBdU z30ed0M&kVZ5QDX2ARn)``TyTm3Bi(7x6%p&gVmf%nz;m&EYz0c?H~O-NOFAcR128O zI?oh63E?EP-hj{e1-(8KZ$$;V1r2!261=NIo&*_QU-duG>pLR`P0DcyURjgkcy2Oa z>Of{MXF?wR+a+y1CG?faj#cW#Kxla9w-nd)At)y+h9X)}hatUhF}tL`EIBOXY(G+Z zMgJuCsS7Ndq2COAH@uZn&=&C3o6rbDr<@N%AkJR@$2qh@hCybLjEt2~ zsG7Nsrl?#$$lcKG-+`x5%81_%Dw|b!Q=FnZCU_qqc}?{_sB-2JB1vow*O-Hf_&X(% z7HB2hM&Syx_8UMgGhYmU&K*#EoKhlWRg%Jc;%Pqdcf+jZZoiyR2acR0Y{J%`GcZwO zRlIT>s%h6-;XT1sEV|MBgb0CuSe$ONiE053x#uJ#65h+Ws72Dvbfdmc9&X$ z{Z%@w+)J8WnUbknteJifFID4m6}yYts%h%;S#va(V0(%9!;7J(4fAN{W``b8qe{oK z(pRg3-a}tSsbXXw)?ckSEL`Yv`9FRYT0KTjG8taEO8cS7Io$jsee>C0R%EXDsM^1Z z8kh9yH$&2C%zdvGUAE2~eT91;B-`9e4bi_8BE4Yy+hWFX1n=BXhyV<`okJg|5|X>Z zA_wrcB<+lM6%oa;183OrO$)ASu&cM(2KL?h2WKRj*Ju2ng+QjVuOCVvU8NK8n){vYES9sS!*QVK!Od2oo!;Y(#LK zJ3jTq1r091O@34E%DB3MA{nNOLN8JJZNVLU?i=7VX-Me(UE5^|-oA?m%CRQucIWbf z3Gq`|m_IhU0N=l$Y9^@xn}CTNfU{KPx1~h&0%VVPsDo)?(cWUYU=lY3Ai@0xYiRVR zLWPh?`^)LNEkBB#6of2NWq{?vmh-p>y=tmj&Xt*{N^T6TU(voFOfA9x;u|xLuR7<6 z#Ku&Ao->5B$tbequ^;td7kqlSt|Lx~ym{CY>r7liar{RhAIS3gHNWz$xo3TX5)JY@ z&)1^+G!=$om^aEK$M1d9^VdInR$TS2BW}f@S8avIM|4+>=bO7^9OKy)>kdrhulB3AYWv5cDasGdQs zRwzRuk+4EaSOeES=HF_c(`{6@yonvX#U5u)42bMtj@OKZ6q8|y zGjD?x8u-|~*ph@0Mvb=)Y52aW&;n#ZXwu|s&F*+@RUEsXduR%=zu|25s4v~j*aj1T z0KKZS#l>mKHq4vKe%Y*?p(f=mVfz&7FsG`&wr2JZS50_fIdtp?)jxl|MTjGSr4uk5 zV)VUE2gsYUll0MlNAAC?SrO{Udps0cRS3dZm>NG0(*m{SM?=5UyFUKgu&-jAyf%Ep z8~>D|DGaj5!#s81nU`1OSSI$CKS&iq{KKP{WaIB{FS&PTr zcn7~vm*U$(IjnHxK6mawAO3gv;y2x_dqsfs-PbnP4iO@c=mgm=4=fs2$>q!tU=7a zM|tUmaqgSFpTst<{V@TYxysrUZ!i)`0Ap=jm4Wpl*PigQif2)OKU8KCB{NBjo#236 z-K|Fhdmq%0AV%XWn#u$sY2!nVD!_}ZA^u$julL@OnJkzQw+FOG6SSeCL+By{Tyawe z3U{#085j^r2|ZapD;?aOdCVGnED+0IGMa|501l~$$j%jeJ+DTjg;|&UhG=~`Orh=R zC=_rb^OgE~*L`2;wLPZtl*#dr0441_#uY(72%3QtE5q+uD-{gB(x zENYOnxlZ@exq4gP;m1>eD|~xaB|=cUAQlspz3hXjeqt;y zkjDy?PjnA+HelYS34kt811^ngAvbj0)1)b(~W-(Z7v1cR3&Vm#6c$BBN0;N)$t|8dM$bXXQp65_qvDar?$o&` zp)0YskhCUoI}qvHDOshUa5{MQ!|FEOdHFx$X*uJzhVd<9yWASwE4fg7wx z@A1p9|CnWOib}i31ET7=!#`ACB^&-y{PaU5HbEw{AFS5Bjb2#nep$B7SkU*q1OgNQ z#=L850e8uR4G@|z3i7xU%Uy(kqR7R4xaVRw`kJsk2g&r?(;imuCBuyXk1_FGEV>Cz_pfW7;|E zW35FBrV2=+HK6NrwsCd$>X+3tL?W%qKHVcu(W7@sfT0`-1x<;!i-QU!TC0B{>XYcm z0Oj0$0>r(b-ja*Bsu*zLi+7zpb?TIf4FUR$8^Xg{12ItAng!^thUy*{qXtabk1 zvFc+atXJg+jdPdiwEm+WbmCiYP7H=9ZHrMaf>oY+Jzhy8B!y}epFJ4DFOi@wV8_qv zrH`}kzU*70NTr^~x0pJEtV%F^%s=W^tY+)byEW8KV^j8Y7};M(z9IzBloN_koDgWdM0k z7V2mxp{1(_2PMC0qGF-YI z7;fQUeAh)OkO9|#8T10ci(`=U4860`M~1I~v0!Vv5zl&=3!&aF^)QHkj1qi8jo^U| z&&>GUb1vzQ|NM?@)(C;TJy&by-BS+6djHLf;n2$IzQvUGX{KK1%imk{-M5@C2WMM! zNM&LMYtqdEvftnOSn?uE9Oh2R~q&Woph+4~UJJeNvn#`B$EHC?o=MbF~7*=jEp>4zqN87wJA zjrErx2omWaiMe`T1i65ZUu9?8lSPXRsV-pnV?ddu>2ZQQuQY2}_AKO$kp4vbF*%&( zfqL5f$_*24`#uyT@;F7Njk-X9TR61C3P$(xD2x!VZfuN8DEYM03Mw6oyewI4za1C& zwXdy>-Xk>tQ7il+Oi2RpZukV4U%fBYqttVG?-_d|)5NYsExFrTD|&Uos@3n@7Y-9d zTqP|}-06h91v<0u8hhq@{ZSW7J{cOf_D__0s_<&h@$abdgz#+Cnuo4cSz;*XJ;^^+ z{NJigem0@PY>{WyMEKY%HucO<1xoyN1-)0P)_w#Jre7mG_kPFCoauPuIElL1sMM{} z8=;M=URrC(w!P~VX-s@_14XaRCZPVAr(Fo*6!<7b=L}}j_%VKwzN8k^$$df-;?VTWscaOh`soU?oHLYyV`Do*3-N#!@vt95cgQ0;nN}WIg!#f@Vn>*M9{O1Mwt2k3ib4UfJ9=4{mK;iZ*an*8y|dRELrtX-9+Yf)M72-$lL<0^i&pX>8Qf3L@N2BLMqV3vv*0Ke+`q;3t;DR+ z3KO3N$Cdunv*u3RV#?>~tIuif3I**Bo#H>j>ApK5sN*?{fOL(%gNzI{XP^s>O)I>u z1||Fv{M#A@t3mU$_=VItR|>-TF%-z#?+@Y?(IE@!hQ4D}Eb+SpJTcC~g7;GP z=SHBpk}DIxzE>x)fKO$(YtCL*{{obnWp z=$V@G0f-1TI*1<1utKBA@*+nczo=liLe^}`7%w6!QO*Wl25SRYH@xPhM5o(9NeWzg z>3OeueDCK}k@%>fn8*6n3o!B!E0jU*jWU9vChJ4XGjW18Rs)*aQy_A6oji6GG$5kr zfeLeM<;PiDI+=|zL7_W$<5d)L44qj&lPJ=*8=U%C(yIg9i*AA1=>B=F|0E4t)3WB-E{P*|xTD z_uK)=Xl2~YR`u|h_B|Nk6HT%A^gOYa*6+VH@u5pioC#l1&wDSyff@7zbFV>iKFUNs z!s_Y$n?P`$^8Y3u#Pd&8SBzY)NQ<<}UD5wPE^nr>3`euP?1y~_bzB8$Wzt{_OHuF2 zM6$_1iALabw6)2P#G^o_sr>E6(UF}dp2FGrXx)zRzKVEV4Qsw9; z$kg1Fi8ilhCdqgS_0F|+VtLqwAkf(fZT`2md>7eC`t58vwbaOhp27Nd?Lh@Y$p@%( z*ICKs)utwnqSDK_j)a-E^IE%T2 zA3ok)DVp`!&X}_lDnz^^)Dv+4<{`GX^K+qeHXE7DRT}Cb6nZ+_sU^B3O{~ctQH`SJ zAQN{eP|#169VJiN#%^_@?RJHeH$7PXnurBvN(>vH@$b$0}4rzU*s>@VQV`^Om{r-2}-(+JTdId*qIp4Ej4ez#TU!%@e zp%o)i=^3Lz+5ilydI0p0MvjB?nQ$2RZUqPB;UdjW)T_V1kx?wjpkr6$g?U$%l}n*+ zRz-Q01u9)jDet0+`loR>d5l(`KcLzCLb@#L5${Q{PVgL`wmt}x%$dOE^#NvdH%ko3 z^mnfMSow(eKkyX#9?MZrL<&=aNF!)P-(z_^!r?TNLf+{}y#Hvx z#7XzmLym0S5TSMlwS)a41T3VS$FCnegOU98m>`{}@8FG8vPbU*#K%{qL?+JzGswMY zFcusxYrpBHqB(rk=$V4w>CUZqSd>bv&vgDB#8<$AD7nGnp?|Br?;s2kc&v85?3=^* zRIyjGrVumedC+ahc?14y-(ZDy@=G#Nuy`jG@bPJIsV=cb44AUlPSOO!pFA(DJSiK<99- znEe-)2M*ONL&qCC=q2gUGa}nBH;^v&xsekECQoPqUAN*az>2l2r~ z4X=IvVce160dde!VCChxMd{cgSIPJAJ+1E6%J?^Wsefm{SSCH=!ehy6 zk0)QmjBj*|&$UtS$|Y2S)cE6lxq>LwCKaEU@Gu>@ZGT*i4GO@uZJaQ!6LW|XSktSL z!bKP9KZ=k8)Ls=kZmEF@RssYtJ^LtA)nh$B@&e#oKM{m_=A zOM6Rt{h$MK;Dz*j&xMex^9R}Lz4Y%%C{zvDcR#&YXRXh-k=-`@ZZZq;yX=7gZs_T7 zg@W5dySmZh@8f=<_|<pR zYI*md8MkbC>#CMlEJ9#Bidz?O-siBoIb!;|F)wMOq1Mjub^drFO}M^cZbeEKl`RkL*Tj^be#wt^u9y_3!*xuBR`j=Yaaud)&l1L#6gd z)QP3UG9xZwFfoE_QNEH*$OQiq9eV6)iRIqt%Wc_TIBCL!&>GFYfqPlpAG&Zpbzo7` z%~yh9(yI?#QA=}b^y#=L0+Kb-727A+sf6lwj3p<*#sdyHW`L0_Rngq4!4CA(8U?)D z*SrF~sOgpePR9^r1HX)Ej@ev?^5Z%kw?A>`f8~chlu?HAonp|4dew?O-oZcGJXhF} zvSstGx_L&DhW5oeqyHESTm6v%t{Eebe2yuuVK&oiRO{n^%qR6*L`-kFgw6hqQnycV zn)+N&+)3KJ<=%%n`q_DOuBWsQ9%@hOe3zgiW&|ZK4OnC?tEj_jv=ROU$1+=&@HE0- zbUNRoqKG!&#Tt<|pt1?5^6P-_D~v}izlk3bvB1MG{J{kG4V7&UxLX`948_YyImguV z8{9%wFjW^#eZh8q*zd@atGT%wY}6FBz~BrRMF>K7um7Gx(|X5maPo&5I3Gkly4vb^ z32nE1=N7M+T;I6s_tl8=nP##7mq@uh291q8*Ge=Uk?rznL>A{mkU^BriKH;^Vr@V} z?<{?7s`(itfLV+L#!yHG^_P}%3MAOTlJd4P#C{iuXC!I8#(scMT9k!FK*7p;GEt?s zr7Va+v0~D7LHv#orD^&5AGA`QKQq5L;)_CUE`;j$MfwL?)~SC6r<>l^G1>yA^{WW9 ztCwe^WMn!xP?`)rVWxD_&4LQ4n}Fbt9#qFZnEj z!Eq;dz^Rsfykvb@UsVkBv=JrxfZTU5tYYJ|Q6_x0Su;ClCH)o0A0;d&kk>{qL}jjU zzMzrkmg)dHf5oKjqaL1E#*V^o@RgwLh zX0vA$U4MRYv<3y=gbUB8Wj|+qOF4&li&pnxGYsg0OV`Fq$yUNu;d@Ve&12p9mg2E3 zdsqtk-b2UwcK&UFq5b4H5>?!3Z2OznFO|5Hkk@v+tgH1~fX_{KICt(@m@lseE?|Jn zJl~(N^uEKJ>X4h0tt7>Pi}c8~&XG_4WnB*3jGk?w7KJam(ewaJkC8WA8|qQ|h7~xv zgk(e7VMQW!YtywOS#{r#zzSF6n9=DzB=DyuR^!JeUd`XZMvCO$eXk%t*Su zgyHugNun*R(vy!C4NBCvc*oG@suTvkczJp_S_QLH_Fcmd2@~V{;}LZpR|H`%VX6l} z5R+LecCmZOLHTTfu{d-f8^*(LVv5Zncf*cgZ_Jr+CgM$gZpJx&^F5%3XVs?j5Q!o3 zvtgPS{`5f?;@B=5tLWU1us+L8QvkGVLJ?6EyTFLB-vS5xz}mylg)$+`B{$dBv9ZY= zNK=XouupxKKTOmQ-?!^%8@APM?f2)z_F*>V^+<&=X(}6+y`Zk(otc}G5wO}>8XE~@ z36}`)(NVI={Tz;T76V`8!j|1R#VGI(EfYD?hG)+SNz>r)NM$-Ho1oad!=iv&W zFCdnGv8H;>Do_-kajY$U(%P>P2Gmv}oX_K&U`aoqZ8)eg%45jmvsk6ng2Ryoji00W z5R$&2U=LihR_DL2V6Eg&WUzKR4Nb;B997PF3a;IG)T>9~9xOi*nn>B_mJE+uxoShS7L>~mDleKHnSN;{tFLJvk!8Mv(WiuP4hZ$7@$BjDsx+~uUy8x4IMUhbU z_Vx~Oay;nJ+2bKc0P6yb$nnJOASc~yji%MBigDUg7Cg|~jWaK0&E+;qH5FPiHTqll z{0(*NOrqf(5Mn=(IF;LMy49Q`)n3NQv7(^+L8!9vpfQt|mnw_sx$S0ja|GSNp&q8_ z>M4CTbgi;K#2|{r#TG|n_#C$H8)zYBB?iAy(^O?_%c#n>EqQ@*(icr2=M?J1;lF%Z z4Kt#YlSLx_A|T(wR`%eL^b^=8l7+MO`GSMsP4^F6~GU)ObqmNWFp$ZYjBdrl}_pj71N3_R`XhEf}zjpQm$^sR1#CNbr+Kp_;3_6lGhAefk+PKwt-Pt zz*FbRTmrN5$G-34*|wrK+*bY5yz5!weQwNL%ec>D>y>gC&-;z;J_ewPf{^XtRKrEK zU`zG4yAMByTams$c5vSn?>nok18V@La708i5^P6r1YQ^wAk4L9a3`y-=_|rH$ZR70 zj4@xgwrV-4^IcDw&P^6u{qaU-J#FW;a=LU122B;N?*}7n>>Br{V0etr>rax>SKyvB zA)tVp41K&Gw|58s6XL#EBxhLKQ&p7qJdXrxA;F-|?DTB~s7+IPmu&91yp72!H3(GZ zZ(nZ*ER{aSXW=5kHl3(}A6i?3QU+#2u7JRXH}skd3npR0qa{b9{F0vhWr|tFv91Gs zn|3{NvMVhlR(*{Hxd=yG^(XO2T6phEDF6>ILf?66hYE1s>2w4ywN1iqThOCX;ti_R z_GwoYi!ChN+my1A5|la5h1N02+TG4X?5GvRO?B+JUO=~vIKIPh7ic6Q=zdu&w3R8w zm!kIl;SR4Pf|=cvBQt6D&xi=h2iRLGwCYSl9~nuVO>|x-h`ikiGL-O#1ZW@qZ|QBL z{(i^Y{-T*6gU@(mh6goXIjpXJVP`jRjbbgLB`asKbyJ!>n!1g{6j`usa@ZS0Agtz? z@Q=$|-&5sKESG?L5GB;!dy}lj*9y$Xp@5A!8i@)Uk=xj?b`CC9)O-Pn1JUlw#$qYN zx_vK=S&5vBo=lAjvxF*>z@*ambfq zMF2o;9JvyYIwp`8FRhsIZmn}xTYa8GIrsj7+uMANSoFqRU`eAPJ&20M|KHB@k2;MA zqNRb@fMT)N(JA;fNlic@?H5$;H^;xYtpbs((#&4$g12T16EI~sV|M?>A;RjVA`6nc z>PM(tU_i0|%5~_*+G#E2bkMQy;f=8}N=&3Q#9Bj1{sVPyUwnZ#H{MCcZ}U& z32EYzDyBq!p`57;lxe#bL2*SqAu}ruN&ec&dj1>kn2gKb3Mka1g;c?R2y&1%^Grxb zR`!!;EkX*Ds087Q{q~tS(789UJOh7hf?$PxVXHM+o?hIy*6j1*eqqsqSG#GL&~q{i z|N7_%&jbNam;6Cf2t7T+tJYkqKStHbj7ON2@B4JemqGael}()}g*5wP_ccvJjPPPj znpgE)1|(U7#Da-vUyhpUb@4}hnFW@x3fj{HShDm7RTvLnra9PrjeB5*qSJ$CQ);<(Yu--|5V!A`Xc6Rwy9~PzJu_cIt7)V%5j%^*esA$4 z4(@^U z^5j^uU}NBl!-#Sc+$3-?Q~fVM91LM=_8PrKVNN=7KrogTwLpr&Jj*qY#|TiJ07KrT zc1`Ncb%2WdoBv3g+6^@JSp z6~-%zjf_C1Locod^{+)C^U2;=Tppi7l*`umx$7pIEo4iG6}L)_2lv9R)$ssvCGoWZ zJ5^?<>?H^l-)S+vN}K&X|I77ft1Z^$pE=JNia-WD10cPU_oTH6#CSPom>B!{ z8K8Q?&~5c9^C6%oQp!KkKR8_K(5doH#bPkK)U~y*bI#m%D74Vb#BM9~Pb2C5r0Oi` zXv&H|2KFO)<)<0YH&*Ktpr6d3EcYBwQguHh2TMPPL*_ToFlo|TNK3})HF|qd5T4@r z;LNOhH9!C-W?kDHS_nP*uWGJv_klF6c3_r6N}kbzrCh_oL8R(?0G%`XTq%*C!z8pz z)tLA|>8ECTR8n~fpckat?tp)ulwr>&)M;oiJ?X%tFE3F-P-Ll7R$heM{I<#SyU;LU z5wyX4Lqufx1B*Td^5P<$n$ijrr3<$%@b#9=JlH^N-;laklU2{$hG)?L$f^mV-b^R5 zQz>lsB3Hx%yV6m;@QB^9I;$p=OtF;N7Qemq6*`>+KIMKX$0TCShK`S_CWD(5`b9YG zHc*CJ`U5VSUV5`jE&(ee*|yeB8nHntG~yx|1LOfrpKePI$U!#^jhHmGy`N+_;UM#d za1nQBc&)S)EDOuV+989+Ujk zdnoby$m(0iWCYKNE?U zoeecg2?XSSMkXTY(!rR48h4QHS?ouGCqFX#1nO*B-sC3v#MxVJy~%q?8VWtaMX-h) z&7ca~qNh@i@e)K~YH{F~cczGvs?UIE-Kc7t8$jh7($TT&CVFgEWL>ZwexSvLN&nCi zi)WKHY?Xf$0Sc=*FK|_i$FD^aufQ_wIf((2<){W?f)KuA0{OeaAuO+9fB%vu`*wc? zGQcl$20SVKVY8bIqs1=x0ftAsN!H3(ke@oi3nn={%oz8LR?BT3_Hv$x&J2VETt^kP zeRKypgoRfL$h_UffGIWC8e;v^-;TMsrh7T0@_n3M1FZ3TiGMhCk0e~S;okdCu{+yZ z1l(d+w6B;dEyeVih>}Dpqzhc!WRyzGH$cPZGsRIhw^ETdNV2_jx4sVP5i58~KN8+v zBPS=<@SLP%Hiqh-g*96`Y_%dhSvl8Bz@xQRt0!sWjX%>d^)c!FI}9791#;x5g@sZY zaOE>R)7KyaJF-^$tTB#4W2izZb|}sVM$mm^FS?-gUQH5BAVQpJYOT;z(`4usgv-sn z2|#T2ccCEc6-oD}QY1I=U)P{;So=A0|HkOzj+!IR!}t-QMcIrTcdL(*R3V}IHB`X$*ltG5P@jREx2@)1U!$&9 zp>}az>GcB*1vU^PgyG=cu)jMcgfmO&_;_q;4A&!QW;H^6u?5y5Y8@|f+0Q?cF5zSX zYQkcrqafh44ujwPI00F*%dY%}*ewD_|K{ zOILV4*IFF^UN+C|f33B;snHVeW1z>AP$yG!2gqxePB-Gd7`ekT9uUklM zJ;TYwG3FOg1ZS4+n%3V#9; z%sOyP2zK4B_1IVOAv49<(qaqa^#0w%Pg``QQ*k_i-H(f48NmFFInhAHpUKV#n5^_j z=3->0IniD`Lyig2sjlZmfU}%*s=(MEy{rpj{otgy|FODa|}w@|n%6r5Iuhwx?|aP^mW-$5JWF{81mIXbe|pX$A{b ztH&~$^>{RVg4o9lO`EE{7;<()Lj%hWxc&+f(E(R1ihZXhR1jIoUziEH$Kj1qx}pnp zqG}-ROXP)MQ2ivQzxWy&vz%F7%ZF}-m+g1*x|+2j4(EL!EH#aDjV5bO4sIrEWlOIt zax3h|7+xtuFN#_)c2}OweChZI9*D382}eJSfrs{F6eV$WGmzqT&nbiLm)J=u zu6=*oF)m978WIf;1u_U@e=k=xq(r^Aj^k(c6>6|cGOGrmN|TFOH&2YabXy~q21I4$ zSkLtD-J^?1?S25@M>OW&0*;w4jIvGl_lJC1$mGzIp9Q0gs%_=$6jeO!rIfbY%Kk1l_(sQJ2 zma!Mc=S#qVwQD!2DhGkO#UfwnKI!GYc8}z5^Qwf?3y^St&@$90l>7&U{?sWzc8nvt zRQ4DajMD=Pbf7j92_<1Xi%bAH5;klM)`XsQRCcnPz?; z%cq?svCqpOB80f8Fk|*HfgS%^(sJBL`ZJSOA?dx>tRohZwD3qKwyatd9Bz06zV{ZH#T zfy9T`?$&3PvJknx(a^YUR*j*;<2K`};u}SZ&*$IZ5Au(nH{CTe|Nem$RDY(-G4?ow zA=~hzZ7uf3d<`ya(}P(fjI$%PJ$L%R*6&CqZA+A_+r&jAGzu&BPP1c%+ zv&X{hAP^geq4x(P3w#9~&zvY&`t)>Fc|$ex)x(?UQ??2s?})Z;(G+!D zap*BNkjH5!A3Dje6E2ZlrP+{j{zL1|l5EoEZky3{yEM?vL;E0;+uV)tT{lX<(U7VW zpZoWc=6c`K0Hnk`We3xp@E6x{x&EfS7u{~e(1!DFPRw<<=6FwkeBZs$4qF~Q9YIK6 zmC*tCnrF=y!8rEiIFzk7y3D)b$A{0w@SXOJZ;&2Voz*qXpM1Jh-jHjH1?>CL9KkiU zL2R#=i`_KkmBfT*G-LhOCiyx=x<5XImGWi4)XzP^p*OE@8d5G$%kHs8<;rOhE0G8K zBmXVEbk=4YziS=9HkMR?5;DUcjJw%*-^*W`P0M-Z7jsfPKENd>LxV@m`s_r=J6;+Ucwcj(d*FG)i8h*<1G~pn1xW0f~!70>Y$#5wW%>~y1Is-o>&+zz< zahyP{{vSK4OAV;M35$E5lDx?0!rsXODWM-41oMKe!}d(~CQSNPwG(!i3>2|=tI#IJ|J-W z^RU{T3RXkhT3kiti(*dAI{mP9x)6dQTO$(Dp!x=!wUWH4O*)Io! z_R=A2@m9k8TtFwvZ&Zr7RH4}2YqG3c>F#msQQB5{SppHBjLda*X1HG$!=SYZaD1UF zUSs?qlj|nzOu8VB@9;~XSD#L(WL@4&5v0F=>?cxV;w%OZ3`G-6*IVBNph9DtV?m@$a?Ys2`)~r!XMcR9`Z`i0qr^b}tQOL<^rz_cu&{hlf103=yO&PGrq(+LME$29mPAak?e zS`@W+7c^DMIA!xyzjU{~(1gJ`oE}RCU-a1q8=p$qC#wV|9kB3pz!B>8>(VaBP9xlD z=xpTqv^eQQni}a?H#e|KhccAzX!@o}Z%G~>Jb4*^4IOX);_$l?ojdMgOV&-ATOHCJ zT3)A?5dVNr46Gv+(7aHlI}Gz@SZZMBR=l6;96V{j0pCYLT5|sjjU}5l+t1N~vCFdJ z41HM{>A$^olVBz*IvH_+D!Nbf&_mz;Y5*_oy!CQOvaD#^Xy;14|L^hy7ZwlA7ez;+TOK7#^w-vPQKBehfQFxIfvlxijZgoX$?~@kct>=Yh>(%0qQ+* zU5z^2jpY9s#o%WYLSmQw3%axY?UV9BIGN*=ux!dbbXv991+A@N*Qeaw__UUFD&zqg+X`6rLO>T> z>4)%9Vip8l)&dMdxT1}n89$^xqr6*mvALjJKb z*7Au|FY12P3?N>;N2?3el3d!w9+$nuO9l0zeDS|}1NUO|4~cu^WcQ={Tq*PzP6YA@ zEWc9nCq%ND-OZ@5I{PPIFLk)Jht=a1uXN~^5ma~@oCc|ZDOKSzsd$KN*sL)dIWO{3 z8WhwX){E{ZY^Z%?Ux}M95U@{I3#*-S4)g+J}V>rEb<}nYJBvs6+YcFg%owhXW{kzI` zR*S1ZQ*et><3yJp4&}3$80s7ip>)W1WH;$Rxc|8NCLv>r z`|OLz@GZ@TE|j`-F*EZpK){F8JpZxqI4z5MC>5;$*&MH|!z=$5x<~~$$8Jrg09Qb$ zzkIn%uP6ykTOE8M%XkP~$$ym@>i%egB*$j(b5!1_FyDmrfi3|*P%}JbtV<>92nJ0A zm`F>;`^MVl%iVvMe)v1S(4|%{gX)x&S}=QXj%C!gwFSUwwoN%PRe)%8q<4_$X`P4+ zk?9!PmY%rOwSf6+IVC3*xue%r`{4hXBp8HIZ&T?k0+sae4*dSmPb3bD?89>HRg=NA zVq^Y0wL{W(Eg%(iW!!H{S@R3yJRD#pI359|1S$1NnT{cqCTnq>Ku&iWGYIX2B&faq zMREi-Ds)de1e7w(>0PJaFaae0WfV2hGP^VNwBq%$38Ais;75%}6+*=Cv_`#I{2Rie zlz`6}DG^EtWz(Zj455-ED=GBx!Q5~^`w=YNb|A9<9n^!{)GVoMWSV z#xR>(%R7rI-2aOa@$77|D`$|MQ%F&vUQRgmA6XCV&8I!nn)w!G@-|>U`iElV7;OA- z)%ltB^@7JZg#sC5mF zDZUra`4;o1 zovqy7Hni>@HPaOo_|vou*3tFN0H813q|y1;aB9z)TYscZM1SX0X8Z!Snb{nZ+#w-i zeWPT+;SJnx3Z6A>HDGL@ZSVw{GgEi;oM#e{epb>&O$o@@`n5pU&DxZtWV~nObFCLN{r|B*f8_iQoOvkh zrPdSVxS?$l=U_3*%`cLEJPK)ovyPf^Tl(iYT7fHkdX2qc6p&WZ)QR5b^uu;lYcT{8 zGiQaPikr^F4lah7?-`yfiKtUoADeX@hhO!INoF(8L}fNLgEHPd`a-5)>1RI!C7MnT ziBVYyGa$=!RN*Rmhm)yoG0Tj5nUjG>J@(gcsTzBv7WqtCV{mZu+J^yBOhWIyhKHzb zqyHgGDfptGUPZR55@yGi5dnNtn9dAU)n!`>Q96BkGVHF+I?wX+d@0z(dkaYsP3b|T zGJvu*p!omG{-N~qVqiIRu_@W^U1|g?14y8fZ4Uh7|`{_Q7*2cQ4g)R@21sE@Gak=94y&hZqg856YG5*&97gTs+%wqvd7+|w%WO|3 zC)fV?UXF0^X$k)O)>}Ndo<1kNF34Pi=TO1Ys8MH zYD}>-M9yWqwq_%N6`^5Y%?jQ13u8asLB`V}3LiD-djqIB6GXeJPVDACA9f=2Wh22U zUuG`dlhk-Ay+ zeuX3hOZ#otqW`WmSw&W^2=ZF}bfYal4mswM!e|b(OU8L4xQluto8Ft7C48QN0q%&a zsRv!aLtQP@AP*84HGq39xUoXY*VawN(v@gR6cHPnABIu4MY&T8*sw<vRTGwv-GLL&)D(SY7?Uv zHjm>A5Dv-4r8&XrC`Lmw)x|!QH%h;@U+zcDLH^Z8d(7>!X`9e=*qO4n5wyhX$`x4` zfb9wi=VB7D@YuCnuqiBp$|sma+^X=}%?go@wStyQ90Zux7hsW#koZzM40S?HodS0V z0XY`ud2idtFV3b}lf)MBHsF`LFNDuJKZ#U>;eB|*@6+1r&~j?N-ou)djYH|7=zF{~ zHbrEHJEq$FL~y4C$9N>1?ApEIgIiW5gSfS;a|odynE}wm;2s%929MVAjKXKJ?xb$luO)nHk@9r&VGcNosD>QJj3@xMv)&p1_2(LpR{ zZ7ojkKco$rmXiz5r~%U$%UJ(6#Du87Xel-SM0Bx(MAL8vLA19VO~Lx0fVkQsywn8* zpq&TWN&Qd1x?TW7a?WT}nfrEowtnuTE!H_-g=!RX;`4fe3ZOI`Co_6zw5Z#LM~QO_ z=-p}mb#&h~g$&jK6qgp>LQDf z>(xxiZehdPBnf#m%Gm_ad6LqawOC>o-q=3CBN1B=5CQCov)j@rUb~6c9~3=u!aO(Y zF_QUhLEP@jgP-3{vZOL2FzUX|r0XBP>%YpTnySr%Gm!lFByj?2w@TM&Rr@5O!=e|4 z3;yvXy#XWhju4qz+iy2PRC?|{-cJ{xC(4bgpA)j6LFTFYSXD!1Y*$ZJI39hRYEDD~1+E08l_d%s?jHt+#I=7=`@ z4-aC3NQ&ke)6337MjSQi~X&V7jGVp8Tg`0$GuLI(zSw;7|yq>MR# zgKkcca=nIkmil0V^SM$b2#ZC@7BKisX)F!OxtWO%STIp|jZO}Dn+pt4DrG9uNIFyN z|JD`T(+AGG&0(ayaMBHV>>^IPDf2(jdndKTWUthZBVlD#Rc0m+)Cp48jt17ywvBx7 zTi!_X@7t;=ze;Ie``|A$A#D3GVWs{TvCG1{-Ac&65agp~)#5_*zc}NIt#fgBQ*B>Ss;8edN(q3rD&u+=@`8hq)7s`$h zsc{8uLbootHYTD8?3xfhHN-ugVBl*J+(8#kWfC1ifuM%Sm>LKOZ_|tWoH+FqP^3i} zm$2m{uYFf4_c4T_G36oQ=yAJs`&h*9Jf=KwRPpuqSP%k$)@UB(L6@Yh(&Uw{P;gxz zs~&_CAe5)?D0(xvX~lXv>2*-b076u)n`QPjV&$>}NuG0bbqmU!CX_ioC^C7LT!sL# z1QPz){Kn&PFF_4wT~wGN*#7kkt)MAHo!lF&JLVNJxNo)R7txIcev73FnA7=R!Kq6B z4zdFcHRalNfNPt{EUP@^?FYBhVDxi$&CX? zs}F8E?C*#jNo2LN=p*oeto#iO-(8F;8+AJtu2R0C@CTQ1)G!@Fij9mG>e{1Qz9MlI ze|V@4Vy6p@S`icINb&(b;lH$~b!ecS3hfgp32ru69ElI0h3~j+w=p5}@EvJ?=EKrd zdj4@Rx7t|1(T~4ZAMfH@s%1q}N>8s>ULb_!N>8}nAznGtA0xLLj^1S0fGMa3{vt>> zuaFZ7XJd`*hExM|eD}5WW1U4jmRGJ2M$S)U=;gMF#^LhYctm;EYxZ_kPEIao`QS8j zOfcVLU!Ck-TasleG~Zde-RBTi*^?{y+gX4<{SlHuX~`d>N7nma@3YU(D^fjl7RB46;E7Ma10d}(wXr`GfFSRGUrkn2 z(*3ySwc|pq2~qhmp4$M8oj=q_j=&+lF2NDj;o{(diINs!^whGrw^quQ3X5Kjxb@z= zrg1Eg2Uqv#Tn)wANMgeRA5iFtWRx}Y^m}AQ zV@?)EHkxbe&(cA4mMO{p1{9@8LYs&IUGxrYFj1)yqt9l^nWG(xUln-=q;g^m%Q&$} z_1R{=%q|fPqF_bMV(KTD=@!AR(;4xsNjr14Jx_?o#1KPX@^#iiSaq=gB;D5ie}i#p zN$%ARNDbT+Et#4ok6M55h>WIXAAkrGN`|{A&Pc1!Iggq@0@{Mkv!t2EY_hV5VVKDC z`tWy}VwxLPF*-NE-#^NNaRGA?vYH}_l}1F=Vf2q;*;IzoEKtkPG0k4+MPv1HQ{y&o zd50iPsmCQR2{HOAX^gQ>#-x$Flo}0?0y27+D4_U;w^5Eo+ta~*D(UQ@629Rj^s58K zV>_xLq%eJnjS{;?6uzSx^C?@!oxVvWK2%F30@9 zq(>`Ly!6l6fwE`P7{>ti&W+Do>wX`%@heqD(A*FONLm8~_%#Ti$05j1Q$E0X5#6>C z*G=v0_G2jm93UnWf$QH^K;!LQvxhdj?C2r<4Q_fP?slDELJt33YByUI zVf_h54vhT5dlzi|GZ>@nDohDip-+x)GRs2M6-V9Azu*BT*N0b)r{xJX1%%R-K5DW| zQF3vU{cf7uCuF+X%>jfH^N|w5bk+~00mSfX4cq9jeeHBWggT0+?^Ptyqn9Iv*!M(9 z#X~{5(adl!H%PIJIj3QD70n5lr&v)Oc^JHdfUR2b^5b z?X08%XMZ8&ml6fd&c~3=TCqZ94;E@uPs;)JDxx6l`$^e1|HXPA3iAC~xF1$nY0>xr zpm}Jea)0KTb8~@iR~BPPOQhEj>Kz4h zYP18gSlh?vEcRspgZ^do-Z$_9GY^|2arj2CSN3)6OV>(*c+{WM(Q3wvZxR7CAhCD7 z(wmR7Kh;>!PA_Obq=qGAHUJDKU8-Aw=I$3e5Y)=hW~VtVhd9GIXytnbyvpTF7D2~b zp3{q#zv4VzjFt9KD-zOCq{ zZ-hz1H}c~6s-zP}sYcHWZU(M88Vnr+^a6y66qc$Z4V+UUKWwoh=czC-WMh~i00000 z00BXtLPZfgN114LubR>k^@3>&R=%RU5B`CKZz$&Twc{;UAsARU2_J|>)BvM=oh5Da z;8|gMYycgJeqzB``sS_i(S=^e?v-++4StAqMxv;x{WmV-vLpwN=Og97zp2V_X^`{I zfQ7BTJaHZ}cGLm2#32~fEZg^^Oz!;$BJb}C%5t_w-hXh!KA!MZb$!rg{voMy>n7xg zdA>=IZxmzxddIhtPfBg{8<|1B{v|vxi9e_CoC+)<$o^1<%?5ez*E;f5M)RK;U6U}% zU}CMbfcC&^Ztb4*Ycqu7%a#Bnbv5br+$-Z5q-0PTNB0QoGS< z4Tvj%cq#;9M=n4rKBE(KBL*Tbyp^&j{)aQ1=ikarkHp@u1P? zzH2e^W2biy=7n&#gBbgb4jbCG<}1N`Em{70-Xoggw`4>?&+EB&x;S*C-01}SWk~j! zgD;qZ#G&a6uE_vxjmUm#pt$g$am_zi@kc#T;M-8iZJM~tF7AkDv9EURdoBXy7$jfn zq_}x^YV+sjK!4)gToRNUzY$woSHHt5(TUlv99xYdQar|+{R8y(3_=v>c5=QR(`0dP z^w;hN_kWup*GV_#ltRe1zTI0ZY1Y-gIsal*#~IY9d&RN5 z-Z-1;XdPTDax=LZXv6x*pYow_qe)uC?w$BjeKTl<8gtNDp8%{?*epHXt^oq*CKk!D zC!OV)%JgOliPt+hmfB?IL1Qw=&wQtqD!d*&eon#sn{)M$QW)QrTGau6L-S3pSkbuu zrdLPD(;J1@%}v#Jf{~phlG8v>RySm6F=^hqdVIi4 z1d!ic|4lB~6^9|}`P(HBz5_tCXJpO>4xSREct#Jrj51I5Y5mdO1FS>E7&#JU9=H80 zO>!QHRjG{X!3(_NL|gwm039x{?LtlyKZHo-XQp~+Pihq;+6EgB=WJYT6? z1mRhPW?3zQ)^kRD(fKfTW}q>;wkT8r*zkyJW!X2+o~%TTgO=VDyRPL5yfITZ2l=?B z`{x{(Ax85p(RxNWv*YE{8K*b>(@_YYv$vi8?iR+{2lg$$Vyt%(%EDc9NN&tsiGngj#sJ-dqy$E2312O_5&24k4_%p^Xl@EA#Ed+ z2myiD2Fi!rW561CfRw3?!kjZyisf0G6S9mFt2%6wh* z@|S`aOtgm*Uw4*F@PeXRa9`&q4~SP{?jkp>zPK>f(6MF!93Td|;+uXy#0H4)>0?F7 z`xj9dc>ZCCBt6qK!L~V+My{rFF9=qMKo!l83k!CG@m10A&?R($ z>p@PF4{oQR;#%fl4dk`%z*okE0rXv?5^Z?!l#jZs!bvMB&Xs$6?+bDJa3~-w@WSVw zmAVM{&?ax-xUUE17#f7+8SlM=7E-{$%c7cc{9|Az`8OWbSllTm9!{qlLPHQDXXm%? zT;}F{FEB?qftjt66;`Qbsz{(!5KE&fB6%t7jO0lwE@7C2r${$!f5(R6)?~b zOM6edJ1tsMh2+_uTH5?YAvagFZ&p;AOhL9}ifE{7fY^j9W=&KqUAn6|u`&Kufe>#L zO!C5MZf{lYSj18`>L3*r+zY&1HyE3%i@@&~v>a9P+acRD(5UdI06CfRK+Vf}~&Sq%Ck63UBieQ^vbY$(o`kw#X| zY_u`=r3>;wPXu###cNY7w`>d}V^ydcZ(ypDtC^>*SK>A!9;8Z`a)RkxpnC$#iYySl z)23UO;Dm9}jjk*`VsA63E;h0%%pUFn&749M#>Mg?l^vd0LN0l<$hm_YME(@}F4Z2b@kWc+biQS^DHTObl-r~&#< z9TIsuR{+Sf{jp2t%TG2%BB{$0%mgd@loFD~Cp*XboNxJ?-rD;;;+4O?_s%DG1A6>K zdy!826Wg~2^@?>xb3!;5*Mx#|lONOHf@8hz-S(kZLYI0etg2rW+I#C`Kmm&Bz}4&` z9l@f_mC~BuDix#xL!_Ymed5%=KGHViaMC4-wIhH^B0VcRql+?P;-7a-xD<3` zIM%%7dGl4lyJG5MvMscEk7HGmLlu%FTYIWB-px!(r$BZ7{U$S>`|v}Y5o?F{IYkHL z!}Qgha1xI=eJ=c-VO{B3sA@oAA!#WabBHOGTz0$pvP=pP&Wi_Ew(N>glzgwPE#Kb= zl1kmFM77>n9girCIEEQ4El5qva@oeRMRB&Ek6!!q5~&}r{L;?VyIkO~Oz0_&D=7@m z>5U`dq{3r>=6>U{i6`MS1{RlR4Nc>brb|^vMubs}HTydVlW$}-D%I1as#@Fur_1qF z^ME7me3$hen@s)EO^xZsI$aL}$~3r9Cc7_Z_*ChBN7Zx@E;F);B3ZV*ow>jCbg{qF zJkzSNBcZ4`4RLI}5@jv%THSM3KhmCa$duK^7yt4rt%A7-Uydyh8sEcMv(3WI5f5OjT7i$?49|nN>lb@xJ{=of|0oEj{JL&p+Q4beL#+Falr@{3Fbw# z%LN@IJoN>a)Z~41^ukx$wgLjZiwJ^<2sU07#c?eHJ!bjepqxZH0q$ZpC=cRd!kQZHcm9#EQ8_YpLB} zOisbmc~zM$85-qtc1)DAhKt8k1kpZEH~M7gvL*_w(oVhw+g#dzS3B7smG6PhO?eC> ze6K4n+tbZ4>9162=OWoEP`Z?!{Xy3*mMOvDS2@t|5+QoA0$H$9nr~_N*Y4=hNT?&8 z)`zJ5AaVSPd+cLLwY(h^mSPDZKI-j9n~H0~ zw#T(pz~9KuT>$tb7(*+OIuE@8@hQbqt+CSz^cG&&2T0Z8biXblAoEkbg0Np?ECpxD zFm618%XX?f(oBoKfjUckDC|mKJsFvY5f0brq72DNOI*@?Ms^naCjX+ z^6UPW%>#YNn(;_!!d$tb^Z<??lHl#qrJSB<=d*T^tpAmft@YW%e?7%vRnJ&6gj}d;tugp*z3IQ@q ztGS1b9B^OQ6i#WLn1}NTo}azSb58df^xot*=2ySA!mv5RT#ocXQ9ih!qB_`HC%rxg zabQ~Xm28H-@Yv5}#l%9-qF|}+Fd_>c0)l1cB`F2LxNBNpiXzQ`7xn>G_N_v|HDPlG zr8gfVzRd9UyOc{_8}?@;i^urvdkc0!u zT&)%AL2DjkapC~{=bbqN`7~{a!2;GeSeI%@zy!fQyn)cy06B@C-+`1p6WSxc#7jKI zjgec4F#!r2334<~y9=GXh2KZ}?MN zur1zf=ml_sGAE@G$bX2&;o#aG$y5=z8)h3f%DF)%8rqEJ0W=&H* zpX$na{e9srxU;^GnaO5j*N~%V&UL_2l;6HkM@x9Q@=oBeMpcp4086NjhaoCBQaUp^+3UlL3yrn2+NljEM!R>2K)Faj_L*769Z);S2 zfKwMd$$H2qs%}t(TnPBTB#u79aMRdgz;WvllztpZ05E|{J8EB<;HyslHK883KjV5c zwq7qx^$hT+?nJ9B$4fR|Zjtc4n}3p*_%y3Frqc@B1{K%a%fF;-8SkBEFh})3<@NuR z=Uz99?A8Ku%7qt@6q8RMK&0UhO?k-X6D{Rk`9hv0Wia6m%F5XH<1SSg^I#x-uGh>8 zbl%7e&t?5WDxbRLp?&8U;uxd`VTHJBsAx(k)9W41Te z)57jXmFfD%6Mhw>=_d(Rt==<{2o6LO_^;7b3k(7~4wNjfs=t(&3@BWl38ma5zC;BM zaz}C7TIlzA1Vu`bfq6o{_3of`fL)-!IRpRcYCO+lb`QU4 zW8_VNp17OD*u8BRzy6~HT}*jmPcrR@(I@kINiM9k;ue*BB2>V;qsSg$)-B?&ALj!a z-bs?K`}=obt7OZvdf2Hlf{m*drk#9!8j>&Q|broo8j4#jKT7{tS1+yCU9XXf`RfRu>4T#G~q=%-Xp%k24R zqf&bC-6u%2S?;3{>1_8_C#ouq*OFP)Cs#!50+u_5q~;$QpqLK6N9IoCSU=mUPHwaF zpp70Xb$PuxM{)IIOE;@2TBP`9Gq4^zV^aMvD)n)UK3qmpXV?E;0*F_~u!8ojhK1T+ZaN#5;K<{#6cW?*~%BXsk zcQ>n1T=4PIewjab>7-t<5GG>H_T0rM_D1%dQmr@iXu6*Mz0UGAJM(SiZ>|);%5{KK zL7(M?d#yi>f!>S3FyWtHEmP>>Mb03<^qcdgz(>L00x|Mlf3B3&7M(|V2(ozy=o2@% zc(+rymn(Z!*yoNDS|md8*hm77FMvu4%uinDR;721&%Al!NW0XF0V)t#11Q&kaz9`> zp?;o*!`%sQ0(bSyAYt8&Qf-|N{R^r#D*V?jYEr`s%jDr~`Wk73s9d8Wna7xulmd{J z8c1%Zit%Fa-n{V1nw>vF)k1OcA@yhN@^ea-U>;g4xav5H#BJ!4bh=b{l;yUTQpG>3 zFNuPu*^2^Y%`@r2=kIkQ2R?(VR3A5#^kbB{d8#PWs)jYYk5d4t+&sJP&7VxB(_oD5 zcVXTvfua}4NXtyg0D=+nKhL8v?uaNm#g~u+mkZ#L?qeQPXFRUP$El+iza!^Plr9Gr zf}AI{0;suMWybGxjGpvHXKbV|kiB*N6~`orK`prhIGn_;Vv~%ou`nXy40_%=o8@w7 zZTd@J8g9Ufi0yt6xJ}Cn__Os&GfAYXs!r8K?cQ&MUgHLd(JBAA%AM2 z`>F|#7FgygICV>tP+d?pH%u*auTNQRbm6}{t!7P{n1bGKFVidk_%s7ZVChW@G(0VC zSwOGvw+?gV)giMDbVmFZOBJ{q+a({MQ{;Y?Sz!Ex^M$33$aKzZr&Ws<}IbFS+Q6`RE(axel zqN9`vRY|bbATzVNAz_OyEH~FdL zH!DUIm$Z%P0>@ZLs(SNNfAhlTxsio&7Z=8G|Nn1)pe3{B@GbDHGv~DOKNRryq+cJayNDHd6u)Rqg#wR+d z1w==8uTBgt1N9@T!Z>B-n!9ref`B2~m&U>OaOg^9x}(PuPu$;Via3{96#Ls)ZxP+u z)eFc;N{MzY@?u~jjKhZ3@rXc;1UIu@lUULrI|;r{!n11fozRvxMM~z+97i4$wR$BQ z%(s6q*TXP0jAJGYY!gi%>*Jjo9Z7enNWuGhw-+g1k)n$2<00)Hdn%6aqIF2Ct zKKi+=a@5nAQEqmjxAVW)F$dAU+8dd(7>$lWXj~~VGwhcWx9I2-&SBgJ2(!GKIy3>c z<-nB?bW?XG^@;ArP}jLSnE#=zP+|N{et+^~r7sZ(Xu?!=fhrXP3`71%wqVoA&=hCGth`pk~DIv z(q$ql^4H>NcXnIZBZ1;#_-geE^3_enE{(C+6OrVnr3PH>RS3&(Art!;m2QC#POo$T zDu6WpkxPweg_y~BXidltY0jv2c@D4Fo6ZGu)rzx-rZ=SI3oO^Pugs?)5G zThIo`KAMC2xT+|*W20=HH~c3IMLJ3PZ-B9=vO-O{C2-iq(j=s;v&>`QbZ=2Nupf!v zCR_H%cl+I+vaKGU4PY>1V?0F4a>72EBpYGzdffl2C@Mx8W#35(#b@fc-$18dig7|N zTuXGKJYGV0H|W2Nu$ElMzn|G8e*>6z#9W;0|JLqk2axqSFoRQ9o{0dvC?b(7_!S1 z8XcvT;fjhKZks%6SjJRayR&6v9B3OW#}OCKE9|=ayDL8Tgsir@KpBJEF0DLzwiaEr z)q#<8kEyf8>vD$*3->K#33|DMhFJFWkPX43iF}3q zL>@4rQBhh1l-|W{Btt#N6k4Z>_62#JCwP{{nU_Zh3K_d!<*Qzm@wP>O30>(MNvdp% z3+(-cmWmD;jcWpuHjh;MZrzA@!a+B1f$3a}ES^=7PXs~;2drO!5fm%kwb{Xd>OXYZ*#agZr~gRd)>y81yr>`W8R{JEX&svdoz)4XJX znTVa3_RD$J1nSvjBxqJhvMuRp3VEjN_LnO{pMW$vOq7eD^{fGjXq{SMc_Zc)O`tQM zgP{QomdC1JfyEUsbR1AHX+c4! zQ^{~fGwgj4Rs>H7KWm3^75ftJUVnuq$H`3K7n%om$YI7qHLl9#_O7eUsthmcSBspp zY1c|V=d-(qDN`MSo$SYzZ6>JsNd69bgOAP-Wu+$x(c$i$o|3H!>Yi5EgIj0teRy1B z8dt6VpDUKmTrTsNu8D3Un2wvUl6BGi;Dui;5^Gw)^05-v2(}MXs{IxcK)0?xnb$)O z5kacc(d-O1zpwt3;6B<^xgEGs)>f7P2JHZ$6))?i4EdXq9vHX%gMb{XQCbCI7QVg! z5eg$52?d`+-iBl()K1))lIBVgHzX|kcuAwud+E^ygN#%I3|Q~w3H^M$F?xc>FD(?M z{%FBsDElaapiRYRNG;C+O_t6-G!nX`pn#=v`q;PJ=$q^W&bO5F*L>lp-J>?t@Ky4~ zxVv8+M-~MaANKb(;Z))R#}Z5}_!@!FF!Yu%B-1z(agq&Rns*Y|TP-kWLBlmy+?uAj zCu8J(=k45qFOYxNz=}dn-O#x-%Fi7OKm)rD!wnWAv#0h&-J7LTu!Q{-Hp8Ro434evg77%21gN(bWQt# zz?=EfEh?n5M8`xkg*&y0; zq_|f1VtSmEj8?}Rhtx!WA1HF4oh*;D9%7_uKK8PYK9AhH%e>j05$;E1_+o<_$vu?@ z_Y)jH3{-6GM2lu1e!&2vMP}pSN4)68jd`m^{VPaX+3v&Gei1e>etiVFZsdgtf~q14 zw8{91B8%=Q`I@rku)r#xATU=Cn(BiYw=L>Z@>3rpkfOn)yC)WXubbj?bo6>YqVyyB z)pP3UQt>;Oro&YcEG0swdpfk?sARqL@7Fvo<@yEOp~E8*`LTy^Sbi!ETAnjLtgXdJ z-#NkSppu%xG6ElezqOhyBDkw0w{Sd0ChE0>yn#jDdH`Tb_9L#Wjv|=$?pG9iW&lm2 zIAjX9$pzUqTWL!AGqdYr>>w~OWMlXq00000009A?V?;;D{!k@)<{-G^lv3o>o0= z;O6DE$=nwKvjZLC>E0}a%EaaM9lCa@nnG~PA4?sQIQ!pd4EsEsdbOzY0mVbWp21V# zT{i61!UbQ#08aP@s1u^vl2UCBezG)Ot_Qm|LrI)S9SqOuQA-BAweqSap7Kchq6XvP ze=0S1{S0id51Qit;RI=zgCd($sMBspyD>Zt0yBD^E=ZRZS#{BiQ1QHA`QMdMeBa!$ zJ_h#AEDIv-lW7`^!W^wqW!rgB5Cy4CEi1*os#w&3aUMnHmh^MT->>6D_qT0B^`MGb zj1|kAD!{}f?4ikJIsFX6Qe3?s|4h8}P)2_jIOl%N4k}t~q{O;w?N~W4{-4o2Oe98Y zNV3l;7HZ}*%m0~iJP^2DJMk?yfD_5hgQ<8Il=szgdRtq2T}E#f z5KW@a;}sO!w>YGK`{EI|P9%m&CFwqQzfZgn2^MSH@);XcdIm9W= zbU2VA`a5eAz~Hm^a~YgJyBX*I#11|(`%lgpp~#K=4B)e=X&LNe4%YRoBqg<~m9Jh% zEH#A%h0-rVUCM+xi=|+wM|52lEwrzTUK?@M9S5zHc(zqV?oN1W^Gn7oVauc_U{DsF zK#jH{xDMWc(IWwmmS>l#E|T)PmypV=vaGG_@KScOU>9>^E56fbtH>#rwtDs>*DlQ! zdwn64vK2N_Ia|YM{4e2KM;6XsJ}{1j2tSz;jIshSUP^{(W~=?Gt(GL8BNu0Ia72A8 zSb7d$6IC16^KVB|fr1&@4lC&-PrjMom|Bn4Z( z_1p%7A0u6DzDRASGX(O}L1g%?&u3*wr+y^1N@*#>svH;sPnG%chQJ#1Fs&Z{hbfqd zcE(wPv#?$vEdfa#OE=c7Q&-QsX{Z$dgce5JIj0goOc8(msnNzf3K(1C$~=u8NJ`S# zSHbz$!;rbkhEoFf8@!rCvh4HDBAp@y6)Ig32o0Ve5m-*|E}y3)?D~R|GOg0SN<&R* zHBhCB-L~bc1+{RQA6Hep3?cdhLK*MT;H8Q>PozHNq9MCSpIj8q7gJIa6v**4MA!j} zswo@m!-0dZSTa~6ZM5`1d+0rKEN$)@j+Lc|U0Xk4>8;6<;df52Gi@u_arZ|LH1*># z0m^0(uLv*3OY3)i>mFaTXrs%5uCt-5vf6W>E} zG%Gk_}?P02UDAzDyylDLv#*gBC^rOM*-6y$;Nnf0EPC+I zvP;*tiZSjI+%zPOVlkZ2nDP{1DyJwYDbQY3nl1`B-)&g)eEFzSy__MFg;2Zq5PG8# zpJo`|NYkc5E-Q5U!qhn+-Qa_{gG(MFI$JJ{+g5i}1bh)Wn9H>{((bBHdE@l$wiV5) z`3+uiNy5KZ^|NvnE@hsu2Im9*r&xqAneETq1)Nwx{{_to6?3*NgS5FSPA_FV+Ybt;lo?&R5xalag>T;R`XA{ zkfB%2X`fGKJ~CuGKq9jkpe(kFb<7$ij7chuyE7rjCyP>*t8bd7+;z z6%9HRXgg$9413=V-CuuNjzoc@+qRnf`o*6;TM1061ImsIDbh2ZUk9=us@w!`Ms!MF zoUN{%GvzG*DL~od2nx$el=TZ@Jlwhp>~if|G>7)DfZ1UAk8%^fbbl}YcY<=Tzv8tE>C+mgy^t>A$uw?Dr)T?BH_qHI_sJN4NO8p9pOhFJz zGDVu}4sAtQDrijE7;9Symy>E3f=hy)`2A<%;k||5QE`Mz$$+GfwC;|keJqZeUwCD^ z$XC-Z^c5EI?nCE;2u@wu+Ii5@5m2z9LJL3M?l{sp(hY9|lClFbGCd0wJQt8)4iW~( zTOLmnG9)593C=hSjrZ{8R#dyYFmAFWvFBGg6o-{3JZsOyw55B;Cz%&Fm=m=LqAk&n z5++QYjrMPnAK5VCf2N&9)C(9xfN~nOMUYUM1eWy;B{N@qrqf+*sDus{X1QQ>8dzqf;HK}rXc!9^o^_|Tbcm30nJUqt|G^cEeHNKV z7pbw?D-Pug>%IR!)5+hY$OG(VDBBtRUCGvc&fZtkEMV!)E$6y!H$eQ~V3bF6pp1u( z>b?Z?ZuH)rFXo|&=C2UdS1NQs{viVU{{fi-(xdPD=8j4w+6C{S2j|}G;>w>353H&S zOZW8aWI6J>r_plB7l)js;wAfqfZBkE;mfB@66xSH((aw0lvDo+iSM-*sxfx32RNKctBGcX3GRp~2>t@%I;aC>5r_yr=8|bHziS z))&)rtjMFh?rzV+L{h#g@tH+r+kLHiV?JxofmCO+oFtP=7FuEo$MI$`SKTvkk4|!n zdkCzcdg}rr!^ky$`(M8Edb)B=Hv|K6c6J?UzDDup_w^YdeI69$wq#W@YU|j1NKhbi ztzm#d_1qNHS!X{@MUfw<=7YAea4E)KDQ+!(_rk6A7O_L&_(&eiC++|%jvw&W*ZI5H zeWK3O#-UGxr{!Z+hte%zFB8D$x%zEJZI)km#^jb)#=<+ge0Ba7i;l#UhBCg@<@#7J z``nVd`RyXh1QFLTXh$5Y=d9O>!G-)^yy`KncOg`+nFC_dC{I&vd;IT`cSE=|_(L7^?(u*eaQ%*IZq&X^v@kIxw^mMN(5)E9hoX9*N zlG_u%g7$=QI_0)74Zkj3(fT)VR5*C(jnZrdXHhrniPpM6tVlNhwM5_rB~Q(HJa?P@ zA48hGw7tc8GwQ(F5oR$)5kWkxJ2onHHtp&RY7G6BchnA9MeP z^Di8@5ECf0>~c=-Ljt-Vt_EDci;Z5$o`Rfp`h;`C*|r}vu_4lrgy0&;Wiu?AlLFp$ zQZwuI7H8%RVZgOmU8Luk3vHu?a6^JA%WEDKkrp*~mkX!vBq!O5_HcDpG-HtUJJX?0 z2|A(74uo{=kOA!@2n0IO+vB4fpDC!P>SzTCj7n;jEx9DbL)N0i{bH3LbAZKlo?lUU z5RD&(<$2j|v`Wqm$k`BjJ#4{+%UIG;~FZURZaynk^WcUGq#Y#PMVNx?B}7lT@BA zBn{rd5T-NtAZ~3=ve?dFo0Aj#Odw|C5jtAak0rCY3%*vFNNS($$ja~+r>RKUH!lV} z(p|uyqr$!n89odTI?-%G#*aj~L*ym*CBZ5BtDeU0cu@Dgw&4uQd$ka3Q1zxA>zqV3 z1<{(8>%zY3ZoiamSYd&|dzb_z*hBmeEANM3Dx?|P7)qr zzK=HsC7G^Yn5+KIvZia(@1k{9)xJ(!+kS+{ zU;ruVeFc(jVI>lfRWPnzad$m1O+@gWs}$qOr2rjaUcF3p00+wWx?P+To|MJSnZ)e} zA*>~u{jmE(vljvCyQ8sz^U4*fQK2(Jg9!8=cuAJVj15ottjActb=>F8YOI=A0xI}VJNCnQfNqLA zYoC3Z>v{ihUO$X5$jSAO^$1=IcC*e=&RGg;#&^nO)xL)nZLE*e8lN2H_hiI}TZKOX zNGVKV(+10vn_m_=4Htm4jmzFRkajF^c`XWa(JE*LU`+y32j6f8ac{SrQUttbqXf4( zs%l<`PeHtsyvj?He$iYFcNh;EOU-0_Me|abgtpT9{R9ZcEh8g?hIkci|NBuFkj8=k z7B=5B&Ut$@hg(md)&=MIfydMsVLj|=3TNP zhtiLJ^OmZl+z10@kFD2>gD}>uKWi7ag5XnC)sc-NohVT{P5P%^?u#on6{5-r`i+HH zRA(-!g26%Z*#>o5c_+Pqp@M>J(yK(_*H4OtVHi%MV*dl1t_css-?7A}o6fS<%(TDUc=3z?-rb30xB%@Hhq4SiWl(V6&GIjK zoZ*dtFz5ktX4E`r@7-|kOll)%TG5;IXotIu4sfJ2tM5rI*_qH+*_gKsJ+!=5NkM|x za1ztm%WYcrm`vvt0$|LN-|R@QvWA;~JB=Y6@iq1n3%Jy1Aozc8|6 zB-p)lRN>9K%oeijUc9ecj9qmRrtj=(l6a+~&W|&ogZpAwjfPpwjkc<7Hht!K$@=aY6#o!7Ef=Oo@(m)EZe$JL|`;`?dxW zXbvMD3YIONQ%^s8F-*2ecP*W*J?_O%_`B&mD->PSU1gKoFZnfXUXII=h8`#gg*752 zLkWAXyFkG>p`vqdQ)_tJuLgH$AC0@Xd9&`^6b_5a;j}<;y&^0I z2Ys3$!Jv(IxfIr^@J4EFjI5-t-i!R4X6M(f3{7M-%S1;z!=;NY&e^Q21(G|i(IMU@ zRAfd`ni2(ZSWMQ>b-}Pkhb8D76?`Nga4_vQ?qziduk44=4^rqh)(Cd;@|#d!OCQ-7 zOnh$Blp-o>l)_Zwu zwhLeqT9)qNY_X=+Up?J0R>h9tCeXuNxatfNCrmh|ToLL?b9;N`i zhYC=PUGhDPOMVo-kfq~0fBuhBRT5Uh8 zf@og7dycE+&)x0TGW$f$hDIqhNPk`z>=#~sp7a(q73ZXqw&IF;{VF<5p443A+63GR zr`M%`wVd=!Apm)dXd=XHN_>T+rJFdedKO3U3&DMK&3ydzqDifZqliqtrhg%v_}eO7 zOj!YxHmOXSoYV+THVXaRP&YQB{o{1m(hR(y)iX__nz{7ICGE(9?djk0j{Bjdj!*ZP z8Zej3Xk@J{RS(7W%6jUEMpD>@lU$K&$3zwFVy77wh-b<;n2}aHmZL$^?!-mqR+mZ^ z?#TSR?rwR}`qw^qb;#bsqFD z#TYfICyb(iMP$IaFfWgKNw01VJy_a_-|cr0#u!=!q3uQb>in?DHIHoXCMROed31){?@!Jo$TUVw52xA!^tw8 z$Mj1@g#P`8y*^Z6t|4asMW0SMr@(^Vl!gQl7ZfwWQBlciyGR5mc=LFDOO9AGT2GI| zxkM2l@&>0NLB-E+mf;p_7W89WniKIpM{X;FLP7H=XCXmjKtyX({R(96-dR~-%M6O+ zl3T4L&2-plC$y0;(>K1!oJbdrKttmNl8K&JPZfSyQ>c%Q&$dP1&a=ZZ-T^`k9nxs- z@%(u*@61LUEbzTDY9ACY?$R&1$}mpPP=?|}p?1Sp$FMw&K3h# zDRkQCz9!6;lnS8F!02n=w(yv)wbA zHN6pAcT+1gvG#m#5%Fu)jpMm>95vYkC{yhB(-5D@@{t`bdNd*JtHCk_m^dd~Nslx~ z=y|tix<*LR_!h&sbeYmHZe_`seW5ayLovZMAg;4viL*Ru>c*+&sYEiFM{x$#90zw}~M=|62=C zq>%GH=`H~82tNShnV8Gol4>7R2Xpj2xGPTCgyzyq5u~uTl0bAczu~STwbVtTvq;hI z*W|JGrROz;09x)xMq?SZ=u|P2P;@m`8cdO@a1d2uGv|R`5=c;AB)3iFww%`oD-yQl z=?XmMGel?rFqj3O8CJ{HuAFYaH1Jn1ip@_7#>Km@ELby}@;!5o!Fk56k`?rjQog3e zGF1@SR_Z3ALrptUJHoS<%s-f{pJxAG$49wr(v0X=nB>A-a-^=ySNp7}7NwGW#tVV= zfOPoJyN$+?LWTY@-Am1sXlK;1=3ABP2w5!pe>7O2%u zrLlR0gVn)zf!%E2a-WuaEENMicFW8K%C8_r8G5pXe9wb7VEk6pXYOXiuqR+1=&16G zwh1aT-N#eD+CF|S{svd>0>!+{JE@ddQ%W^T_CjJ*{q*_fsS!7A<5R*jkT3+nlm#J# zF;)46A^In0vU3JR9rx$!2Th!w3t6x%Cr1jRL{8H%kT!o(76~cY-}hNM?sc zVUC-p74<_GUWcLO{!S~L)J^@~vG5Z7zD8L%iyf+MuLa$JUDIF@%Aq;bgzUi*)^H?= z;svZ;_SeE$Cw!Cv8|$xxirHY=2q#U~!6_h3KE=aC?w@)HjUq<>hQcL|0)H(h;qe?kR zkSPpuVuD1y;)X$DGx}7HOGgT`0jOMf>OH}p$o8++%?MG-!+}EjV3oW`*F%hL5ISH@qebJhE9@7G%P*^kbN=P)p;C!sur0!O?eD z!(;oVP1YePJkn2O2G-h^X<_M+ng1^OTlVA4WmmpWU{>)<#gYi#zw_#_F~zuZg%C zIO5CDF3DU1sH&p(DfG1fRv8ykfTmh(X_~*)M*{zTz-i!N#)tKtEprq!ZJV-W?Dv1a zA->j|Hn-1jZRFgFP$;GLNgW!%F5s#ETSIusZ#;rtrfJ!xMr+*GDRx1gP^S3!$f_6l zMQiEyvH&@5KS`6BdjE|b0 z)jSt!N8qE-9idPMRsAPFM0(pE3X1B>P(4L0-aj&&2++JYSqd@)GXNJwvaKndgNb6! zD}ZkQ-n>5lfOIwoC!AB*8%@^**wQ>N(8%!c9ZW#d$DfqLBbTB^?gU>_h8l#S^s-gSW4BFVsj2l&1Ay4Px zn&2e~i9hOFYQfMCx$_#t19L^`$Uj*ElN;A?&|QNmNRRaekS|`= z22A#&OS)g@kBv$P!vt$(*R)Zy`Vt>sv*i(4GP`}4wPe1aHgKcUu%#zgBZG(z@Zje^ z#Naoxl~a9Xlehc9Y(y+yMT)*g8iPHU9VSs=bi7qp?84xYX+XdvyeqX@5$s1P{i%Dl zDT_G=6^vdGC2bmaW*mDM3zCbR()BGla9*X^@OH+xg^fz*?%Od=s#84Xe|T zZ(kn$ADQr!yQc9V(NXDU@gp~x7P3*boL+RMq|aUSBPK*kq2G;O_O_nxm^qL-MVVlOr9El!(fnLpBquBj zSEnPs!r^ocY=2EKK4CFLOfV-@KTvWg5^`4!@|n}i5iqrT8v``QyfZ#{ds)O)9vJ*w zPkjR5t)a!9_E}|<3?*RD1<0Q%Jtl@9$ZAf08Pr4}$RXLHt;6=G>mDK!YnbXm<9^1J z+9Cq6kVNYGNZ@`KE6(PPfcCEhu*hqZ?w(m&?pzD@)o1{^+bT-qX%(Qpn6}wCbwgZ8 z4~$4qlGpO6h}~!M?&`*v!7;i@>KfVD^(fO1s>uN(fD1rL%xk7Ja*_rSB5M`B)Arh^ z*+s!v$?BuI1|OmnO1Tcd;EXz#1RBD1f@LQy7tuhLmGkXKdwuG();H^N z_h_ul|0$M--6&qD_q&R8=#RS@DLD4^_p1WQuC25H=ImS%x;X`@qPODX@p8_@IMA8A zd^H)LDFPWwC3w39Wbqpj9sxLOou(X-oay4s8FWGZ+P8qTQ0-~3k zEkv=XeRXQh&SXZoa>%7Q4pRqyX9+hY+>j$nL@}4K`5tx^9}c$NW0Q4@igSP)A~35A z69X800j25aub5|rmQn7<54iKrG4DQWWr-gz60gI-caCqFVJght=MN&qfTxBQ)%zmB zji5NAqZ?}XiRh-?8;vZTn zvOHOJ4YO)NmwEzYn3qHJtNN$AYRr#De2 zphd2PW6j7T++2)8lRy)AXhRSi^u|t+dv_7|K@&&Uty7r137h()kU<1t1pS0eixk9W zcrW6E7>eH7u$YCpPJBmHBJnyJ8x@3rJd>h9SFl&|ouom(^O*bqcbldm%GyoEz3ZVH z0YS9YZ2Q0&osDe2YTAqn+mz7rB_7XA3BQVg3qo+pAt2VL8X~Fh&*d{>MK;=0^yr@T z*hy+~1MkPo7tFh@@84@H>gy2FCnC)^(+g67QF$55_a`$Rl z5Q!(E7Ko_7-Io(IOS`fd+1U(P%FvDHwBF0@ z6;79({%*r!7SpQ;ELUw819KrAV!RQJ)@TLmqKUsGc8W#SYds)*|HxoqrD z`J$*!{@uBy%_DN)5>0IJWHK62@7>kX5P!g2&a%J5(^WG4N=O?1)eidKK^RIT3%0F| zTRi6+5-u|e{ySn`mRuvJS7bNjnUpyjtdlAX#s$U88l!bKI)39I$AK$s4desC?|(6w zk^NDqtv+%HAbA8yk{(n-MLcZmQ?igt>==AVXJJd&^OO}z%+iiU;F+sAt8Hv9nysT_ zdE;=R%KBn3?6M&jucqh&DllP$ZY%!r6SoZJXF$CRLe8r^n4_S!O2D?eLm*8wk?JE{ zGIl&BR%YiTTKRlPz(McCu_LI=U>U-DK_8b}S`%~{LXpPLa{Zxd#X5End9Qw;jlNc& zbl=l)frhmuDE_-0gwn!oHo9NZywTtuM6Fi)w4}Q*i@XMq1oaOU0ydHPL!_K&dC)|Q zX+lj?JKHjj)ewzjakhhF6x#$eH5fB9mAR3M=5Ls;jd>Rn$AbR5fHz<^-+>KI)m0T~ zRy_-_s9E-J#Y8U~T9L71pIITUH=ENT)qwnuKBAkTXvTqaW$R1znSio=humwre-+nF>O zt=9&f^6Ofz=n8`RPgE};x-J)!VZ&A@hutA3>o3bx0M#XHp}o(f1)jWenYzHg^eij4&{ zWPiw|>S_7Kd+B=0BcGEocQPwP%n|z7-}F4<|5Z`OVFIUEH~C(RKR2D?%xdayIfhsg zo!h7zu)%rC_R4m+#-=8y2@32rZ&-1&hYn_bvam&{Qm$FNJ+A@8w?|pm?o1D#E=QC} zBux$DQ(uhyv0?>j7_H1SX#p$<+1{XhlGbN`hR2?70g6Rx zix^7miafMyJ);BBP(sx}9RX4o`>C5gtEYCYIt$`0M9Zep5}qDxy+_0OkT7;woIQI} zo~Qvy07iSb1m46l#m7PW!QzaMlMx>5nD;%^+Ym&&X&z6F7_!t-$tlKSxS8;^Mq3bO zDy5$?>g;D`x{Yir>sJ5RyYs!0(v1r*+=M-4ofQK=4XeW&rnBT3b-eZU$0+UCgLQ+| z4po;+hZH>AoqfCHU#X&IAXn4BJkld@%T$@hhiX?n^5>2u4SArk$es&-mq*qu(8qRMI~XRRg{G5L+D!- zN&1C{g+{f3H!MVx{}!-#B71=AJDP`5=(yQo|LaSL)bB(<&u*IHb8*Z=iQY(LwelXE zOaeRu)tHZDAgK%J@Q_!UMAX$!UMy53+mZ}XL?jh!v%yMVbR87?xHK<5OI!*|f|QJ@ zJTFyRd!)E{B>VmZJV8JE&Mn*XvJ~Y|oOmBdw$`m?q`yrGRUeOkny0jijHPypG~1|0 zBG5L$alDklzmDuvpQwQ)3Q4>D@ZmJaZRv6D5I7|07W4qqFofH2YG$1{U!~WTlviVo z2cX~@%)c6pW7D|H8c#i)gXwd*;t%obrwEGE47AlC&ZbYyk`lC*iF@0;dz@4bBy;eL zk-{=A8At)A#yGrq#+M5-@BuGM0(tGTD?c0HKkJ@?viA)tBd)XyIA!IQVJZ2MiYiKu z>C^(9f6r9d*&DE4Fd%>Oa`E~PBof$KInAv)ZEy7oX^;69g9yX`FyPGjty%a(aoBhR zbO^~V04m-bO*k4ZCaho7+Uo?VEqXk{mFJrnf}zj%w^)=*p=qDBvX@d#Ok_WbgNMzRZhUG^tW|pBoddO-u=ZRERSHR zK6v=y%j@zTl;oG|@zWp!KOwoY9`@$-Hd-L0T0_++kC*`R7$Nmz5L1tIO_as`>a6E3 zg&yg9`_^}iN>I8^c(7GLZb0IjcKmCVMK9MRp>y6~J5Qey`0uq>gU{R=0uaE6E#59@fj{T8RENc?Ez%B6;7YMQ6m? zkzqP-kR7#0baT}K%21WtJ$R^cmf%vqlf^Kg|t*@tFV=8K%p_0g?q z5b`fF7amR(5@VIhQ0QmOs6r^`>GOR;E_N?~O-1!FHB-xas&f!|uv79xGzHIxJJ0io zSNOYBXqj*3nMVMF<9__R49p{&0^jwR7PMbZ$doNze)uS|`(lJRD0gjjWHa!(r7 z&L>{RkjoDuc7ML}hjfq_l=<{7sE(W{_UBB*4|I~>_=IJrC?E-X#>H47ig4?Mo=~CS zwkA@{n&G&p=?_Z#G9_J)PlGri5;GVIqOUy!Zh2$}huwKfjrwYR`@&;R2V`kXu}QW^ zBd7-aTDk4oKK2LDVdl_Wka`@otZa}#(Qfl#+nmV7rH=kiFk1=QN8nav^NEVTW6R_) zp(+Zyj&O4WwvO* zZr%wI*MtDys~`ZR7T4)@WMGssU~ojE!DqgRssY#lret$IYjlcESRG4c$N{A|@5A|> zB}!iIGo)|kBk%KtvvsBGD}5dB0%O?}425Fwy+8)|lT|#0L>~fq)!!>rMH*nPCHEB+!Fn^ z9Y?sabZ*@e4_&rCTV3nzP{o(--4(&)gNcmLG`HQGh1HoE(3SDD+Rl5Z1Meh=RgWcs z&EbvH5T&m`9B>IVg9TCtli#St7O_Qy&S6i)4>4(dK|I{|1M7nnQ|#pm(S>9u%ygMU zrv;9^WUCtKqt4Vc_lS!WTn|ZM$U|_AWMmbSw+L73z8`YOKmoeaLTTIl45Oa(Q zd0D1q#Ev@v2M^`Wk{&fzNfY>)_egkaz!DDit|CuDV6Q?IqcYAsk0gpa>it@(omKC_ z*ut%2kW=&7aHzE$5B9s5$Kk^4!&D|sSn!v(H8tjJO&wh{R4MAyq-*(A& zt2Vh2vB2wob~hH$QsLK&!^k+-B#qsHR5gG=b2J zGK6PJb&xYM^!h**LTC^oa zw>L-!$R*n)xr-V{fCj3u4PXs@icf1R8~Kmr&sP(t{mzsiehXdQ5lP$93iU7hX>2iS%{hbQj@Jw3pS42)kx&U-Das_kb&K1J5V^YEHg zKGoEK&D;V?X`kB*(EWF2INA&xi!UNG21bRIBFUQ5oqE6-94q0iJtg8yE;+0hB(tvD$)Lek;uFfzkK;V$b+p(Jl@EL5IH=4Lk4rm)b)72>_))Lspm zT)@X3N{y_(#w|LbXr+o9814W>%U@UdX-q9c`rzr8UbtX~C*| zh!e4tBYQZsJL&kA17ns>S3kuIk#mQ|>pis>_;7R{9xaPWcOu!m^4<3GMJCn_z$O-L ze_Ij}Buyy2i8^UsJigci5C# z-ZjzQ08?cp1##a{DoAA12CK+v#@Icty1~>QHkVsqTre4`r@1h-1;e%|P7lW`k^idj z>B%JKwlo9JzSYmVp^}5&9N0OHAr@n`6tuiBAV!}1OVAMnTti~|%Q3OWhaOzE724Ep zO&la|!!Ch(UPd;bJ|x7h2B-##!v>IvL$8b>=xpsA3d3uGp*XjgI!(2=ec%&#-beHb zlMmb>uTOy4OQ~cdDNdjBPY$`^-$7BBAU=A?`HJmWal2VoZfK3TClEB;U9MoJT=d&Y z?r<{j|9oI;@f;INwS#E-^)j5p2j}NWr7M(RE}3DX9jAQLaHltn$fss~T>f@I?+In^ zBd>)?H}v!JTr=?a;b0lH2;nyXxp?eS=^$|5%S9yCX$y>?o=CGL5{4%sqNI_6@^?2G zoZ@~2m;&wa3;U3R4~sY#QO08P~X~Z9!Wax`H|^{Pi1p!kyA(h!hQYV8-{eW zky+pG`PP?0Bp&}3gIHV1Exr{i7T|=-=K7M%@x)1g>d~XIy5wgYV1tG3Ls~$L$mUy{ zYXXbPdnFZJb-;}zC098CUs`m;|yX_Jp0%G zhcdt(!&X8caXYn#*Gt$e1!Icg9N(npZMx?bvCqi$O_5?G$F~v3)HRCC!2U2AcR3C! z;p=zza<#jd9|!JUme;!$O{o$|7Op$cMh6{hR?ewReE&jVID-Y;ovz2k_}-M9h(TClYs3}mDC6d0Npa4-payxO7lZ>- z9jUg1ssS2@%E->4X1>`}{Atchd6q{|iL8T#rZ#v{(CVoPj$0sxIzVcTWjXzry8=WT z) z-E{jBQLicBX5OcC7a>2xZQ`VzKi5X6Wi7IlI>Ny>Q{wfM-BnDWmCf$7>8bYmX-qe~ z1Lsz^xEHi7rI>|4-dhi1@fvJslIhF1x3>YgGfd!e7lkTgB1_) Ul;(6x@oj-&~% zYUs_4kPbD@X`RuSm_yfhNCq#v4f>OqI_peQ_-z=vJI?yjFMHHM7l2A$sCF0j6lRO6 z<3}%xanuK}v}8_M?zYw}J%Ed-1ze8dNRiU*Qg0@E)bDZq2B;~w)85AJb`OA5jp>3c zd0mm6zHbj<xm($U55`G#{2T8qO~Jcwbn|hWJSjTvg6j=w z9qw33%P?RKnIh*vLEJRfg$+7WrJ3L&ma?Gql$^EYaX1ACAns7Pm@Dn0P-QNX&&a~% z&tO{U?U?qbtIG!E`M=rIIccH8x@+b4Z1?;`OlzBZW^3um1I$!Cq?*;1XFZHFeU#QQ+Ex0{g2Y_96!tfNDVsK=)_2`>gaf2xSD`X6744c%*9mTid-rH^6r>waupiy{K5Zp^VHqXEsF*4!bn; z(??)Uo%QgZH{LtD$;;Mscc_L@Jj%5Ao@VaZA8L*@aY2l_AwZBin=Qd@3%6FiwkOAFU{GdODnK(_Y^P>5 z9>3`^g|I_oKf<|yM^i%tfdQgP={sS#66(0UVQAX*`-Ej|Qww0lPxlVeySidB>832%Xz;WZ-BXu~9#L+Kw$Nyp zaB#{Blf>9-nLwd6zB zp-p_n0DxYCE{a5@;++cjs8j;0$G(kFSq_9(5Wc}mNEk;p8Ry@&k*r4tyv7@F zEa`6LT-40{``yLtEwd^{JaGN3(QmnPi0@vXE)NrEQ-6)IJM?wnW8sq;Xp31UDPp(? zgZUMXM8Jmy>{;b!bj}R} z*|Ke(Yo)Ks`m3l2SY!O?snlo^lBe6EyU$E!T=_7?VZh&Tq|l%i^CE1-j?iBHZ3GPx zQM@C@|5zkN&Fkn9`gwdjJ=f_`;Taij=F{SscbEzQ z6uH8ezw*oM_!6!daAWkYeqjg-bhaoaVZK()8NLAoSq?-nW4coo=hXBq@s6SRUs@cs zb7k*^F$>>;{oGW7$)#X!Hv*M8PSEHyl0_O8W7gzPUsOzlA9u*pGEQ%C3={ z$JGSj^Ob6I{N_)q1V0J}xeHKNP}V4qFmpC$&b;fd`%tRG*tR5MPn1feV6@h2^9d9P zZ1Z}N^u0CPhOuPcyt`M35=+{{N~4Bl1U!%*n|CtzfigF@T8eS;d&oB(3@m}&0eDPk z!7=3oGLI!u#IYGOB|R67J?VXfV)&olzfCWc0@>&ezXzHn=llEIv39126SUg0%Q}#` z+B>%=_->IQQ!i1UjTR($vWdjbjmQNTUhvF$YlIPit-phW1xy+ z#0WiDz9+UVWsjx9Ar70-%SW)1AsV<~1+7{nn|B7YR}vGd36QkdKvIq5V`H7YVvnrD zT=AvI9xcDfTV?N-*VGjpcCv{1T4s^c1y)E1%1?QVts$LtNJO$Qpa3p(-VPeZl+nao zzz*Q1XIg=Ny(~8XnQu^;8(Nx(8gH=0jH{Mw*pCPicXPVW=-x@it*Dn7bS}HER}8`~ zlf7rMT;Y6C*HT%Ts_qgemvr#vsE8vw;F{t3U;Dp~V0N4-lcgF`04@9}E978?#DiIE z*=ZV-AX=r7FnQ(r@t(ybeEUZ ze^#XKX1{jb21WP#sVUS)Zz&GsA7=KJsy~9&-Cu zEnc^1`eAP9@c?vKJ~UPEZpgwUI00OaZ;QQ-D6PWRTZ|bT)t^jCA4|m={0}k$wpad) z9wpw_THL#_T4-I|IMZ%DQ(moa8?G85#Yl^$ z!@d;-picNx05&hT1?tQD0lgMQR=e~Fuv(J?QNEZdy58qf;N@tA`0N5F@>*9GyE@!7 z7za;SA^6}k19Sw{Cb+hj;rdLgqq)@q?tT_xqQvgj;!L+fv8F@pmAkGImVG{6p%gTJ z@FF$){;+_}W>zwCNySYP#RuD3`P|>dRWt{TTyt`@k=*kQLg>orJ{XIdJCQt-Y2Z>H zv?tqPaA&h|X|HLCMtY{7`ulaZECZt7tFMN4;12)7v)-Rc9dw6P-V|E4sJs*SS3;UC z1<7(kp$XyxF-+i7FDRyov0(8_B4fW1tYs3XL6)y34>M(8zvv&ys{8WifB>Pp=cpRo zFoPEeJ>JDTz2!^tFF6nbNh8ZT?TnH1UwVtU{G^QHj8sDDQCv3sPrIT->QWK|+tK5r zAl3*$L^^DBNVj`rzs4%{$+6}R*#=iE7=A8hY_r0LfoeHn(T!iw>_*9-Dv_F6B zR7#jXQ|9w3J|Nfb!_v@gQO^gX^;pL~nhW@*Z6ZL$-U2^qMg_H#ynzNzuN)VRA(GX< z0p3ST#+koREJ@v%u4gT&9@yaR%B}WVdf8f9(}kUWqCf4tkUR9pq>>lcV}>?R%1U}= zjePqe+&PnAG^H{w9cL;PgAa*IG6pc(bpKEvUUQHcjCivCGS@G?wP5^Ou+-7)YMp_PE-!C=-Dta6MMMauTknNUUDJ=saK(R3th%J$e1%s)C!{KI$qQ7l2 znGH!NuGfIq%~7L4KboFiqX;l zZuX3+XWGJA=)n@Oo*Uv0w;3Ar(>`bW|;rXAZ~M`ox`8f-;*u zhh(lriQ>Zpt`?;WxmVc>r|waU0nbKT=`e-i+!c?8_L+NVi8!|0sIiZdQ4j=DP4kT$ z^_d%;C|y<}@};A+(jsU!EE9xg)u6}v)`F08|-a?N*({mR*94A|Van~p$= zehu8quiZj61um)SFdfJ7%Xa9~U@P1|UkgyfGbd3|*m9X_=WoVphso=GF z4sYHHhTn7@A01gHXG-rp{Y086;!5f4;#Bvw(jK`CoU`wilAiOzXMfGx!9MR-P7U>YJl7j22$eCet5+ z*x6y)1EC8Cm7C&F4ScieP(zDOUiAyuv_9U1AS1Q(IrSH?Fv(M%c9xYg+f`>*wrzM7 zf-1nc(Sr`i*ZFL$oJWihl$tFs2YI)l=iuvi`J^zfA@;|5ge>I+c*!h=E%>@FGc^=Q zH#z$rCfXwT7fDQEjr<$gh|9>L9hHnj-KhC~jqPG8A7jd3tA2lC1e56WzjB+l!KIs{ z0d&Lp>RB$3eWCm{P*YQi9Xt{XH5E6aGI%_C9YmITB`_}T2^YD`>H7FC+J}l*6QqjT z9Del*4X3aG>zAJL^<2`XO*!74%SSITidME!TjotDu}&P<`8;W`t$7RKx*StZrcTCRn^j*$T`|8r;HQp+vf z>$C2iNim&J3C0#!#IOF)B_+1NZ8pJDg~zp%-JA?QU(GLk>v`3bp(s6Td+b|S7sHrc zAN4Ydx)9en7Xfj|gNFik$=syrxU5pVtKgm)5N1Mcj>IULmfX-l;ba`*LhRXeuSjmt zH4{~qddq(#yAp6Tqi-y%R8}8Ui}n@zE1I=SiIjYX3U?<5q%dAI)iWhZSkUMwj!k{Q z_w)!o2CS;Yi}5dGuMcq*ih(!-nj_#*Q{#>w>QyHzo5i6=!{&$yN!4^_URDM*>$ z65_CG(&MWnuN)hZgF?`J%1Bv6c7c@!KyT6PqjdRC4t2M<`S}$k8{r6D!g~4Sh^}y} zik0I&vZMd=Q(G`--3kK*WCHM-fu4BZUximdcuX72d4=+S{qH}JIBBI)7{%s4{rneMB*Mz&c4@NJ9BgN(cVYe0FIhc z8ty=UW2Lp^YA*jlgWj}45QPR+(U_$}({oKBY z3RIfhB<;Khn?#D1a{eXqoQlqD=#Sx9uabRW?9z<-}djSU#-M7G!!`r&0SvbP)^brr{5NlkSF#=&V&EHBZmBvcDSTI-=) z-P}ovsP z-RdD-PF5d=;O3(iu_A@dKVKaed4b^Kn)5sn>OV*UBL#mZ8=r7g?o&!eChn8h{Bvx!L za+GYA9B-y9lK@IUwZB!8vnWO^)v?>R9+ui!VEPb_%g?d{na&gw6l5K0EJfK>uyRI< z2={xmLaunLLg9xVB$d0fEMo@QgN$vrNOB9Aan=V~eviKLKJ$9Br6^>8Mi4OttgO|U=V1KPa;>(DNHD^vSMl_aeI-We)327| z%jux8)QY2o?2o1?BG~bk>%tyk0oRWtc)UZTiCvAx9cDn1^INpR?53D6zOkz~{#dS# z;xFkTG#Mm6VIX48kIHLm^*uZ~B%RbdjYiAukoQ`v&@^)-O`+)YE8-X&=Tj&?C(gVu zbJLQNDFwxbXntV@@2Bz_oDbZv?IS4SB{stkOzBxd2WE5rUIdBi=SA`BV@BKE>e&0x za2>E*i-Rqancs}dcuu+-QBt1~&&aPOHOCf-9Z}#zv(nFdT`NuYz>*1yfP!aTK0M>| zhl-w+4{Fy^WY?}9{m_uHUh<P~P3nB%X&{mKKA)GLOrEk})An<6I_V=|eT! z^>FPuLB67*^K1)g6E8@Wy}^bA{xVZ7cC^eSMJ%kj{e`J2xo^J#o#IZ#CDTZmOuHi5+WMgtD0000000BXp zhCLGmXKD6D-pr`4QCNoSMcY?@Y_rYDaLT45h(LCfFF700fn@_d^4^Y#$?g7#BL-OL zljB=w5X)6VETHXu4-Ru*)1b1MGyG4~U2}py9rP9QmqnL;ZX-M85x|NMrU1;=s&tM9 zhViA~3d!9pqSo3qr|&I?yE?t{8EaCqq6alEhiFSyH{Rt2>1&6weVw_j!nb4|O6mY|G`+E&a1!>V(of;H08d@ z;pW{!d7_#AYPaURbF$_slv)daa-}v{-%F+6Nj#4!eg`Q7ND34}1d=C;9QTqAFV5AH zI+Ls*YFO;~^-4lO!(pyx&=-OFdoGE0tM&sVT%kV|6Q1~o;{xWqKx5ZFaR<%MI2$W62`tIx5*L=c=R*w0}pno$BQHQDCQ$gMo7;tmXsinhmj6} z*1mF6&*qYjW`^8JO|6BqR4p~Vg{eSI9mSoksim21{#n?R-bGGQ;5#T)0jD?W^6c@Q zA-R{161u>0T0ox{(2GBOB)KQH4bc>P5oL+kf9_zi2K1M)rj_jVr`@r=|Lre%4J+(8 zl0+J-?jh+;-fTJ91l6I?pjq_ZkMtj2z6kYG0dD(0Kxd-iuJ_IS-sw0EdQGmphAwYk zZZ!({4EAmd(Ag!Cii zik{4=1afCwV2Jp0SstOwQx{@?-dU#2YLj*tU#Y~)se&4b4681nAZ<+nC(?anw*wR4 zAR1NOZ%VbOk6u3&mU7n`1b`cyDvyuGIh`g$dYq|s6Xbn(UkT3m&evJX zJAhAAkHM+G`X95y|5LABZT9fO1M)(@yt%RX7nCSz%0hqNC(%zKez;#ihlgAJL!2tZ zoBPM3r?bFj0_C|IbRAwB?z6UHx5RaOhQQE$Z!?}QqG$pVDl8#kXbDVgQIgZ(WRnbazx6;ryXZjx??GN zOpHbIdCA=Jt~ymFZs(Q2qLrcH@Q7RE7_T0}fsyk|jyF?#&kEc=WbfD(2*=D=V=)ZcifsHTMaz8#N z`~Ul$R-D=(d7~M*Zr&t`c#qV~%7tL=oxih0+wWI~yL{`a1nR|GEAXvXpH+;f_gNP} zY@6oYNKaDi*J*K&I+^hq&+0lF;hCA-?sB?ExmMy7^KY{1k|9+yO!B5Y=noZv>FlyS z)<4l!brLty=)7Sqtb8xoiX>c||I4OZI~e$zH`%SiguASDF`7In4W^J;`e6fl8a(Sw zB;DTi)vhXLrZqNO1kxbW0}~g&)!oM2B3bDPCnSur*U`zot5nxndr0y;=JDwmwu8%Bw-|L7JRV28ro?a&!a%rU7cE+$-jeBQ*SaC~Q)mtYa3+-PY{Du^$wNR1YIsvOcqBzO?+aY@sEgq6t zvIEiCIeEkr_+!wSMuW`-=ZZh#IwPJ_=4*^|(lNsv-~7SgIa++2^lUNn<3*K|-Eu8H zBGb5NhaaeKAQS2C@K3C-enNsY)$a*8!WKI9sF=800+Es+eu1)yQ9;Wb;y}2DuAUuB7)MWZWrb#4;2NqC(zG z){e;4N~Lzu0r(=v^T_3{9gqQq<8dKmOuEAvg`BG~90}9Ly*yM@+;;60Il=E9dL#jR zrMJmh8UTtelv{DwON2`Co3lRmB?hU|?6M$Rv=9x+f=5^uPKM|$VP@zN+b@P5=xB`} zo@s=*`jMiGx{K@o#BLmm27bN4kL~f?-da{h10)!?LTXK64cr!7aPcICJ#si3e;w@> z8f*!)Zz0XN1xyL348wzn3PjV-kRp1=E3L&<^;hZNIlN~WIYcg1K!1$d>*U@Xr?l>2zZ4Jk+g*P=D*3=K517Q zo4r+3X$G0c0#+aBS!u+Y>1>2hnk>;$p{m)t6GTZPctF?smwc!h&X2K$rgOvoLlIj` zM@J-36$iznkmmKUKte)bCR*O|k>a#kcdtqqFf5KC4Nmp(Qe*#-C4FZEt5nSB$9y5{ ztGoJ}8`vu0agpXMPcMPBc3ch7*JV-zL4x1yYxqyE)S2KP|Bp!BhKryPbx)MWQR%v8 zY7ZC7N7;L+vwxy>4d37PZ;a7wF)ylnSm#2kf(zRp9r1~U_c-#*o#VBMov`uQ&)zcw z57m%aBvcY<`|ko#GgA;bttl;eD!%P{IJFl!ow1s*K9y!3O$CaX9l|8U-8TmE%d3kK z6QZ6aUTp7$a)qeyC9d-agFGgB{DaeHFL7DPTKsI6pOpNEWwW1Nf znp@sf{vOH$v{R<*mk~-KqE^T6J>LX(VL9~uIx8bGFHw;z<0Q+#h0bj0^!GOq2Kn5> zVA9@3ZgZ>)Q;*QO_BU5eir+($k$51+V!}uKKeqOQWf`N3h@FjzypC|A-^j4{5XTk4 z|0V^X=uMou#^}W8$9k2HJz6ycJUdWyG>_6CENG|C$@bq2dC&G>XKfOjBw^Zk+ZVr6 zP&~=;*8^)E_b>CduJ5GReP% z4MJeA!~<9e<_a1#kOPr=ku@dVB(Y^G(`_Jhy5 zMYwOkz`g3Ihf3ta zDfVktr=CMA_ZSz5ALj2W{Qr*Tix*m*=_hqf4^rDA_c2A(*5c25@f@{jP8`iYPh z6L0tA*Z9Ep4ZNn(e|vGERnUCu_Z6+Mx3oy7%7FUbtysm%R<`mf`vdA6ua}-$Zme&J z_=#%S0jxt*UEg;S23u2}AIXbDlrn3su&K-y5m7-pWW@F4MhP0#kYUkj1qT6tRymON4ZtqP`(_mG@kL4 zXr`d-d{>lFo+RDhDqoZ&B$sumO!g#zwAF`;tP`0lTNQ3}gc*27@h74f)}*Gm)S6ZETp^sGAO}q;yv2TMo>} z%j6aXD}e>hX0b06pYhQZ3)yf1TTmk>3lcmcO|-89XGY+cDA_wG#5 z3;FWdAPNDM(BXw~UH%s}V2l8oCatVQ@r0B7P$nF%se&VTZ9xI@#WnE>_!+EJZQ%K8 zDj0yfq#%JSMkTim?H?xrnE#?9JIMy|Z}vkQ*Fdrj0?)&}%c4v`#9Y8<)!qH7bnJb> zYH`Y_LqFi9*8y=tsPqHQS)iIB-uaQ=pP@hTyPha<^x={(>v$Ey-M?W~NwWKKw3K`| zGamF7v&rjR{M9v0t--CPx#RSt4O%&S%GP4{GM6$z|qS2Tr$y)bd4sI^^fBJ12Zw>+;W&k6>lKDjj}$-X_`o5|^4n0F?81?D<+? zD`Cg=Z6=Z{2*b?!?B3`AoUS{HP=Ux(6jP;lZVP%Eh9rlf{@WAS?Q>R^lx&`(!p%2+eQiQrQS%{`@r_dj*(4;S?yzf?HL=T&g3k9Q=ELT&$(_5up4Fjjh zgZD~ShWNOp+_H292t>3jme+O3Q`i_Pj?SqwXRNi25B*~2lP^s@aqFd{3p)mB+J-WQ zpZt`z$Kz%UxZsd5Cq8p zoo^Y2Gbi39!~(ITJk^26aASy!1O_fc8ZuNx?9`bIzO=;&0>s*Q+T1z;3|QG!6&oHT zOsRsYXc8R51V-NvMLY;HqL0%eb*;7$H#3nXqp1)&UypwyN=~~zgTC!H4Ua@; zo97W`k(2$3I17zPm_ShdVF+HqfyH-NZDOX2cY#&36!yxPJn>ushu^)Al>vjG*5ru_ z{<;<7Xfe(Sb_*Oczk|^9qe%JDTDhz-)q@M32^e(S&@UOH=o(5Fh|=Yj{=z(UJIAr!tgeC zXzcRtn72QVe3US{;gB(_wI92xDs7y}bT}QvlvTzA6GD=9IN};47<`z!iCP`* zra$!!jJYJR@&%Mee@gN0#U9z+TO4?7@Q;>lm{_F#ji7u3T8AGJ%BO^cw^hB|Ja+*I zZ&>TfZcS{Rl!p^6{?F2`@8usc2KLnFqC1_$*>BJVhn{{a^I=|!FnL!$y z2-U-2akHdR`3+2^;YDNVZ_l+Lns@o5aNPDY#Tnw;kv`%()%9VN1OVh0o1FnWXJ#yU zZ%#O8aPWBoS~0=K@3H8arw| zU%|h=6HEai=fnZ5gxaX55-0UYB*%batB|LohvO3@5= z8+NvN=h7anH!G!X5us^dipP{0%0h@~r8&7!ckj}nl4JDGE(Zr+2t z(299p2TGCMwbMp?>pb59(iWCtu0c;qI|(?bJsw_C+eyEYaGhw`gn6tDu{tvAfQvzi z61K&`3 z#L&1WCOv*1n~H5)en0({k3}m{qpRO7n!g8Sq_=7f(*qQ%XsQ^qHDXB}XjC7)h|bY^ zSz1*jjkA7ie?iJP#s5=TU%{IvXkl|*x--lO?cz*FyOWaVL)uOL?wTuC?h34sKeM*( z>O4PZA6$AD1PV^lL7xu$uy~ntzBvuyPg%s`GOT_n6{!*qwSV_7qh!58TPTi1t%@N? z0hhjo-g=Vd1s!JT+10x|1ggYWdH*+=&;q@@f*mjoPxPb|ad);?q7bDH;V!PQiXq(a zeAW1C9q-zu&P;l&` zrj^WRM;&5>D6b2yExr4rsPUz&o!pY(WQj}x`@c_($7eAm1(UI`j#J*4MS>sWq<#`& zvoe;Sp?$!tD;^2S-Z3OgERZKr*5@MU(jXE$$}`>hz#KJV4wDqF>IQvWr$S=!!jxma zF-XiMisa)BO8K9H!VGE2 ztr|Qy9t4!{l5kg0eh3%ns6qE?{||sgRa-b&)rb(TakyTHtrV*~UdHA2V3-gh0&w41 zBA;eg+UKE*{1H@d_COqm1=r3gh~l=6`Z(=$Y~!NZ`6*}RopC}l_qRD8YKR0a*uz?_ zV{DzsU_{pc`&bG&?DiqiVZ?w3UV#Rg)~MxxVT-7VI0}OxRFSM$BeR%PnU?9(Y}IE;@h~H;oR^^mqm?B(fBk|9(Ioy>oj3 z>lEzh86tvp^=M1LbibJZhz|Y?l-fzGy83qOA&-OQm3jic-Rq+#^tu8Rs#2;> zul<&7wCDT8ov8#`u*;)vM?-=qVLMe9x^Gh$Y}(m^Wa<^Br^d2avoTB1Pf&@2+Eu)! zi@5P|{aOS*i=kkZtA8VnuzNQonJ4BhhFrKx@W@Zic%^(rbEb^k=_W83X zi!bh%r99_^&GA-8%EVfRdpI+QypMA}a$8Z*(J|vh#~NTMY%`uI%nrV4qV!P4s5!Ex zycCJU@0-hLY)$H75NVt^?2BVx(7d*Ekr7ae*#NciZbY~HBC~5jkb@#B&5K8UgOgJq zzJ{*$S$>*+To_J()J*e<9l`}`J*y8zE4Mtm6KO=#j-Pp|=|5BNum}f@;LKQqr3D7-a|o;(F^E*eDHEv!Hy~lBI(eO ztcIql(5c{cK=#uP(e97PSCmiBjP)tRaaB>CdVf4}pP9g`)Dv zO2rK^jtCpJPC&l=0Yov7C{8{%UqwMm<3TN6lyyR$ns$`3oP+|)%>Qqg>m87#Q?@^X zb&8a-w_U&z*lzwEBPSPU7GsDmW$0}`kenLYiZBr^>gf6P#Hy9a8DIz4$ zi>d`#lQ4&cx99~@R#(w_z@RX++|<5Zc=wvZp8{^+A9|4m)3^I@c_|ylkDi^EM*X?O z#+p^Ua8kIv%htJtKQTd~J7ai+f-YlJCUH&IDN>QV)fG>sya=-0&wZq55D#mWh#O=& zNtR|Aya$rllj>KDnM1~7L}XQB5VALqtKivN(hBvLRVQ4vbM8#7dAwQ5ZE}HIdC9uP zWLbS0Tkmi24p1Id-5ojE9@a%WE+ey!sFn{FaabX_U3(UuU6%Hax)s|W#n1;duLFU3 z**3$OrgXDLxtHABxkN~3OcB5lrh&wQUfB^Fk`H?jUp3hY6xW{(Mk;**JoRcGO7z)% zP+c&l4HhlXWhf>%NGq0+N-~)*)Hq=VrY)p`E|y({Zpb|%=1H}U_gmT_2OF$qSSRai zXP`eikW!2?iM45XwYU%==66TA0;V}vsd(@IPB>d2wBbUk>Xzt8Sjj~DN7M}4BX%q! zOy=tW%D{Jlc%)IP!GT}<$iccduuyey*y_DfWXeLIlbTT+jT3oYMIqoc6?@U4&GNS| zUxUnIY`pB+Fl7K(_#~|dn|HmoZ}0xwz@Cn)(-P1l*U9T8ql<87jCr?W)UFBSIDliiVL96~8!_y-*W+IoNcnh_WGa!lAbGBRJyu7NWWRg9RN+ z@U`0R_o}<7fEXfG74gA;pR#K_^MUiTsDnlUE=cWwN#2b@dWcctQu>;7;saG}nc(t) zw0=+l&E_AHogLT|=?&Pom%O+^6&?dHRt+g0P^=P`Bn&6on?W${OuyUJsz*v|d~ zC-Sc1+USDO9_^;L_#-=UQZQLRrzS+ z__0GBep}|4);(`NTR!wx$A^53a!VIv=b%7XIGd7Wi&srcReK4p1rf_pft9Y5k$;kg z4RkiPz|c=hMxv1jy6j+5rJdR3-TdY4l)PoHyIM{VnxGrLU~KFz6#E#Fe(x31CUrB&Ilz>|iInRE4^`JhJRn?xH4Ko-uH9X?2y!i+CTXVIh5s)Y7x2WrNz z@-zud`NU|-CF-1DYCiN{j`*G}lA(Nm=qyRCauE<4{^q^4zX_i zCmXKQWZCXihnzBZ(*`|^n(eO?mDqd~XHM^oBvbCpyi zUF^d!{=2>f%h=4 zr;r^kBYwR}3`)}pw6h|-XRBy>Kgr|GjczUWkJ z1XW{lL)5iwtH@iIC?V{K$;{5JcQ?8f+(lSVBqOHD)iR+u9yvjgSiw^ppN?0>kK0j& zTU8@}{2O|B;VV=Rdv}Kc?7Esa+m(*_DO~5H1;NFbDCbL|Ao4WDe*nD)^&Oj%1;xB; z`NX~BT(#hX_08tsF0bC?C%m!->bOWv;nJ6^*gh!ru(%~N;h#_U3a_r#tnq5WS@sQ3 zB0L|o{a^UYoixmhE%^FaG7oxY7HN{TWMm>R5fz?1IheS!pT6KNc=n(CPv2uX=7_yi`-9Z;5 zq3CeIwuX07oMaM0FRcWn1U1^HkAq8Se=tcXXHMo+%f{tjWjE(4Lq3}RhP;T;X?%@j za5-E$z)1pu@L8sH0#a|{j9qFa}QL7M5#Z{Y7R!hpmO z9N(d5G5W4TWVEs@5frh`IjE`+c*8pKvC#CSbRLoxq@b_L! zp0wT;tm9V|+b9C&R(6+6WQ)?Az?7j~Ety`!ZB8D@f&J;GC4Y}m$^7BaUr7rg9#FS^ zB8D@icvtD#R)2P&;-f?7dBg(qz=47*lpn?ZmJ1=OGQRG_4SM!q40L*ahXe0yo~Hq_ z;@LG{fTc;kbtbpHi7_CFzAtDvyZ=XL)UUaL|<# z6r+5>Sd*cBp%d6>D5;1s9_&lXx-_QJgWNsnD$AvGP~+3nGow2Hw1HtR zue7`2yIYb|q zx=*PKcrG9dV|o%&Ud;#_o2k7NdXk&%6fqZt*N)(IWYvHur)o3|s45hs@pUTttPgB; zkSg4_QZ><}W%7=@`^!ueosQZK{LI1YGNB;abs{SqKXg*+>86PmiG+@E-d(7T;mG2u zj6r_Vh0uAGI$voS9QhCKWSNy56QQ+Z=!S|Io{$)J-s*jw)qbT;g$n2EcJj&>$rjIW z^OXC7iMz$9QXAn*?PDMn`oDQ6+n~L1a>r#I>HF*drPCy8m!!M86k0ORNiBtMDQaa+ zP*IA`YFo z(}OH-uX|ji`FfnBxTDUF-x+J7eBrDPwY1-0w{<{!LR@Xr}=Ce0Mw*TE=(oLNu~dmKYJ^8%0Mz4Y`PSre3K3T zrTYZ|9iyMIg51ji5v2pGQ|attK*%xj!m~<6H|MXwA9(CjnwO16!_Qlr0e0+}o6i3) zP%R!AD*fe+%o*7q>vR~rF+XVI7zfkp@^&1&@WcWobfS{}rIkV&(i zbwOQgsRhXPX4_$DK%SMv+P6~n1xozuHvZk-5OAv*$Ve^)fCcO3=^^EfA_mQ!xFW0M z+^#w44a1Fj@gHA(Vvi@EC<^wE0P%41(bwSmVBQ$W270Y;6Nrv6kRE&l{jv3sfUSTR zG_n1}h=$w8+OjV9MX6kbukV~gwG@Op9?)Ycbblz$Y0+DmcF{=Ktpf(OW#k(TT)eEc zFfe3eH7Ec800001L7S&OOcQ^LoNmPan(D*ld$>6i--uqsZW>Ykcan2-#-U?elh5#D z8hGpQ4vC{VFudbv2+ca%LqtN(m-+xjA|TTwfq9&$W$lMvwCd^D{%;2ZsIj}(zt8@( zc(>b!=bj2;)UQd20QZ``<-D@fNKph@&zA2Vw*tRG#n!5PxYsAR&Cx>>*&ZV_dVw)+ zG*A-|ZEw>BL~h4d?!Ew17#X3Gb2a+v^pVyHdMeaqu3t%{}psSmUAA? z55so4?&{ONvvJwmYzN+SAY|04DpkCK5Juup8AWxye*y&^0|IlVYkgg@L<9<^2NZ=WaiI{={|v;0^RNb2^h*Y&{_ej zl+k0nwl0;67-zqlGgQrKM0Lb+eCYIo!e?Ye+0bR z7bip{?Ad|Lwp~3u6btrI&pui8xjBfi>#QSrwAi`*kNasl97=uy*xrAF0NC;i+WtmM z8$_0F4%`CnINn<(PDp}z&sEXG0#uRB7}kZSzx{1SGzbM`*;nM|#%vxi_w?7ABQ5HF z0Jkf)S5g7nJn~w)EyETg1;Gg=tQx?pBgj`kOnd~rt|%X`1VLC63otkj3nzVLP3H9* zd$>iQ+PYzUZLzezZjCTUJ^C98n`SCP4r6I^qFwJky4HpokMP*KIx95D$3U0q4>tWI zOMbOU3Jex0SQyS*%k2_6$dHrRBYP59Ox4vh(cSB}6Ydb1eiM}a08cU*dEl?Th zx9=AkAVZa|F1pEy6)m$^ro`!a9(MY`m-*@EC}K2oAJU>cHxd~FgPFndkC3~=|JzqR z=u3B^>iTb2{F4{)$G6zq9mcg_HUF=aC|Z7yl68c5JK7YH^?MHP>&MX+EcD~F1XA`- zrxjw3bu)@Vg$CW~cN|f5?FvaC{EfIUvrRR(L^eyH+#N!t^@U>XCvP{kD5`i3Sf~Fp zz!o6F25mhc`iBWb8DWZiEQuld^nhI{X54)Bk0jY@OR_|ImBaTMO|ICAl4 zs<;ou=`D<#PPz?;e8$7qu&@cojoxDH^P{B{)7s7ft@w->*I0}*Mwwuqgq&w zzBDw!UwW$HJ6Cb2|2`DreV%D)LV?>ub$tKpi9JBe8d(A-m}v#-&4pznWcqLrP%U7! zW-$|bKl;$?@?Ez1x!1!L#hPI4y3;l5QhVH*vC+fuT+3>j5IElRaa_TOD&~>?oQr4r zFL;gSJe))gC`CulBH@#2B)^$!CS_L3f1_5V>%16IY4aVwWQWBlLI30NiuN+{yNH%* zcP6TWR};>jaOT|DKgTT9^6gk5hje7@?9z{dk6h25EF!QWE9td&Gb-?Dt!amnq`D+6 z;of1LjMj7PCX`ye!A)_iiGm*E%1x?lDaE78>Y`R_!58r?|bZoU4OOo1}8$sU0` z-PWqc^lEc8VadZ1JV=>8F0&i(bQ`=M%RZ0(7sc+SYzNUJcj(rsLR8T4{1P>z$#x16%nyUIIs z!|6qWOPQ9y2!#%rhPk@K*MsqE?y9Rt`@@{BG$dRGKYwMt=WUg~otR_{KIsH*!MV{n zd6V6DFp@tfQ-cSc6R zQTIi2fM@Je#jm88m1Vt1y-EQUb~?SD79AV}s7##~-ZtZWl$Q8UZ|u9t(g()#C*1uj z;f-`v(p9E9_2ul`s%F?6X=m3!icgn1;N=poq)KY*-A$v_GB0_BFR|Hmo9JH|wk>#<)Wl{k!N!Dhs6e!oqj_iUDCrkNb6!9(_JI%nfC$ zk7y0j{3OCjEDrRr)!3v=slw*IZfP811L%2tjl9pB!IxcV?x`o-k<~?hN%rv{FNE90 zcq^GjP`bdsB#xTAdUzn5;lbFz*TF)SIUbHDp`^7Q+)E!1r;buNpn?K^g;Rr5Ao>V> zfvQj&UG(Fv%l9@d4Th6b>kg~}dB05HgzWi?$SL|_B|PhD)E;uy$1&~!`%L5k!i%Rq zB7r8cSy~C(gbc22xrV%OLPev1bJ*abW&FHS+>|>$g3#8fCe_;>S|Z)SjR&9Hb7|4O z8`WKpw|S;1;`Bd+yeQ7P(0{*HKX-%&KfA8?DKt@3lV`BA+VT)oBCAuIwwLj>5^7Y2;rPFx%8g&& z#WN5fwRDA#a)l91*95*GaC+-9I3KFSLvGkF9OIq+hy`9G9C<)4niQ^Zk{;eU5=P^D z|EAM8zOkSBJjJ*MG{ycbUpsln@0fAl>B+C>OmZ;}3_D5X>G`M<$zG|I+UOJ^ms>7& zPTt0>295@G1N#*Qwj=&w@rLg;B|_!$;8f`q4?En2Al~smF*D*K^9QI zp&_X$FpztedD0VePk$iCkUDPnAzRD|v4*-7vRfOxLH*-5J%fjpzz6X|?>+4lBMC-y zEp8v$7XI_M*7+8r{~g5}Qi|Qsw*xDw;(0G@t7rtm4>RzF#xt7nWN$w}W{156;urh| z#bS_ft&qB8pW*uPaDzvi#w)FiB0yZg6hGdqcl|*y6|B(W{3H?DMe^_|?VG>TnEq7+ z1Oqq|_CM2Q*uxmqYZ>@nXfNeWq;Swo^B5h8DaUv~Z3oA>PTBNCkC3Nai{sAMRb>g) zo0D7SC%_-x=48+1t?Q#dldiXljMGmSHVyElb2|nZc?%eEO#>3ZT;luMo4+1_^NO#5 zd?Uo77CW-bj-SG$&6Yde4-CN`;ZG7Ks5j=1uY!}6F9HA~eC~)0kTe+2Z_bh9w469+ z_qB{YNtR7_00eyweK($)VxSsM<)54QFFXn6QVX&oXTyVUygMFbuW7ODrrw68T9mkO zs?c5ZfGy+sFyZ;?yqsHAPT@DaLg{_=SdMNL$fJ&U@#09By=%SeF^GqxG)N9Kq>{(T`0EMS0 z6Q0d&cPaJ zn)&R6$%fjX$GCcggf~G|C~b8UThWdvjQR!z*X(<_L8OkB^MCeg7nb2U^9@KuTUUw3 zNQIfQuAuu4f=$&p_{<33kd)qv=)O&boL6S{pfgh;6l_q~Tch;*WqEsC#ezX{fH<`~ zW5~@<&|N%b%@pfW3hy1v#=cGjrIm$#p-ElHDjM1zO|UD|j^`x9J*I{Xn^qY+?~6Ap zYv5%mbyh_DD*Fd`&Rk^HM`GmmBE5Xbd@&;pvpJt|(frIOx8m6G76~jIl`=a94rke=&fdUYEb?;r;XcKL^m1*Vm-EsmC_iq&G2W)n!PGjmqI=l!=pww;gB z{`WFZ)y8Lxm!Qx=h@n|*EYZvvAmPe=ctmqU62Z&_(4@*6C8!L6URM&r`;r+s9Tt=p zOM9XXht8s8$7cVxyw__X&2e&xAQ=yItAUkY&BFd_55E%x%NWRoxl$=@lldYEjvpFt z^5^uQIGmlp4r)0o50C;49@4t{#j(ncNI1hpZgtwaDFLmR+42^MDD-1Vv;OD|P#*i; z&1};rremBq4oKL3&+B;M&$$^ncnqxc2hz#32fG!u3n|K5q%|5MFe^kI-z=aso(?sT zKTe{k9)XYD-fCTil7|JUS5lPd$vw4)z=)%lw~82%555RjcTt$&)dNujrNF6+#Cy*T zgePAe->1nLzKK<4*;HUDOkQnusU5S5+wuyEl*B7}90oQ3EDbZltRa0x?4q>^Lph@<}YG}SRC z_^1N6lSKi;wQk7#gZ2g%O%_pdsl?~u6dQ`y8#oo(2583GCN6Q~~{zB&A%Uq3kSiQN`OVUpFBRzYT z6fK6MjJ@hxHI@)&E9lzLwU6=FH_dVRxnf1wr27;7XO-CF&3SR@(M2gq*=kz_B{T6&C+pm3C z^Hm>ds{Fcd{BPymo3h1V`wZ~~zNtKLDQ<0PVjV+?yM6%(-4&xfCe#m&Ku&9GL2>g5j5Q62|aX_v^l@iCPp^^wf)t8x`}m7f(he-N;1@C z7@Uo8@jR~$V>%3*ZngL+^amfuqZ=~b8?uel3X4v7dEuGixnYA@tvZhsR)ezkgcVp6 zVy-6}oRU%H*+?nG@TJ}eJd`Y~6hkhm4_vu(E_4Qnb|Ve7-kXe+zSQQ_Kqa>=S{C=; z+4~1A78nwv0KpwEn^8p|Zaz4HI3)f~!0Oi97!auXWZAn&A6C|%I4eSWifs{;CzlCi z=Oxv&OlY9YxZawCLwK7xoAjeSB|?y>0l?;Bx+wkpYAJ9jk0Xe9jzS5MS%D#N&Y)4N z0(Y|P5C_cSHLw%p79Fca&!=Fdmk>*L8^rQl-n9gkMGX(W5J!4zcj2;!m{ zw)m5L;6cS3n=ia4_Eb4uH+ef)TTt;?B*xJJsS~5{Q+G=%YBjZ|lK}xZu>v5Tjt6g$ z>hSr(MJnp`XNspk8oYB(V1G&F{mnR`)rrTK26ocq;L(U}z3D=wbIe6u>bIZkQDczY`V!Jcx4ojj zHyTsRl7tHdpdEG;WQ&|i08*o|`jcdGV1~qfcX`SGNXUEVO*F>nmK)%W^?JWZ(KP-Q z2gI+jJKi+|&vC3BW+w(|Jbt(iT|k|$(zD;Tfph?4l+-gWdOm7UR=^?YbK~&y7qAaQ z63scf?^81!Ul?h0dTeDyumgVi3`qev-}%vOT)i#lE*0`i?4x6+iQyb zx@}L+8uYCP!q;C37{rp9`*2PWpx~?z7xTZlfBmwx}B|>HG-gLM+Z@Auz2d zrmTTiGSv;+e_yP!Mfw7SGlCspQ(+6mX*k3Bb{JdwLp^$lg2gl8ktd5zV!%tQbi+f8!?zS8 zrL44nAqx*_h};8at{LIZkmHTu38LH(6Oa%&uN7#Y1K*qcLrv^qLqWFsb-_9^WRB>E z)&1}2eg-($)qEuN$xW_wP>V&*ea`a})n&^t?Ifof`I~gnZEX#`pxwAn($jnRfK}0X zlTm{;(jCOBX(25(Sbs;t76oynL9q6CL|rS8cAiQ-TwA;b>-e0-U|evo#v$OL!YZFu zu%LBTkDwf~U<8QWSbFge16C0eP;Sv53kbz?Sk7(%)nS4QG?SnKyy8h1rzw$=Z%cH8)D}nM zglarzhEvJzIX2@GNRk-;P%PY&BjMsnA{rxlivRvZU%vy*JSBm>)nUlY%)MXzu|eO5a?bz}(vpU{TzyWNR+woII{? z?O&+n@&k5!P020-GBrAA&VV_@1l8OT_u{@sUs?T}K3kjnMrGApMf-_ekA<%Ai4 zE!C8-ztb*CgSt=05Ljvl9YmLeQitwRM2@xP9%oges!K-C=wV3kyZOl+P8~LbZnpg` zGK4Q1zXK1g){;9r1tCZot+yCLlyV^K$BrPzHFy8lWvf)lg#gEkuTl?Q<^2?Ae6;C- z4Iq(4$hbMTzR0+#TR#9B&ZCXvP8X`3P6~>S$^=HQY9+ z39F7B8|0{vPMq2)Z#YVT_$4^4A;`3N{5yznJ(Ah00Qb0#<3IB_RY` zA>KvQ@pATz!y&KJ%Mb?LyevrQueCJX^?(VR|4Hk5i$oBC*SMpuHsb@x=~TWHxO1KV zV%|3IPc5Dzj3BB@)nXoG-+n!-$`JilH5CtmQr@7w{7f!UR7)}tCwIbyzgY1(w zUbRjy=%~T`z|@osMMYpcAwnDqH(bf?s!0^gK^toEQWv%j74m5=Yqv5r4@@XK^{d@5 z8Cz8GuKul;&pBz%rU$d0LV}B%?G8KuXB&vk;uubseJ4|~g z0(QEqQ<;8I^$h-KxEXqU3iclOq8)iB@ZY+F)(lgJ}2uWc=`b^lfGd*fR==(n?2(aW|?H?7#BWVnZB~ ze`dS+hb_w>1t1F@aP>(35i#RTn4Kbnexbn$A{+P-DOjDX0_J_Iarx7*fY|pc zU0&YoHGYB0PzFhCC#7B*u_bbBq=y~fzAMLSO!bov|NG<;#k@4cB>Vp#C{rNf%2-=` zOnl&O2>48G%5bYf<1l7@X}4ciF%`wJEx|9(_A3p2}l>5E}M!^`J#IMVUEeYfbqvPCWkPODqf`F2`j~15xz_i=P){}D%cq&cZUB_S#I#RaoxlsD z8HU5>f{f;ybym!w5D6;yRR|I^&kxZlFdI?~283b~;b!k;7v{uVZjM{x(e}gQ1c?F- zqbaLTnBez@X66FmSi5VsxKv8Hn6`sSFh3Z?;Gjs&@!010@E#;WC=yLTZk8T z6)SEmlFI|7^HnjT5=eY?C;Yy>^a-xjiEKb2ZLH znJTzV%Wl+X?8>?Qi$xiYHYJNPI_aRU8B^J+PpCVx@vwCVGO)xE&WN`$^b6~1*!@@& z*-3#IBtDK3V4P~DRa|utZ_>$qy8BZh=CbLEUkPLMhK>!CrZHB2WiZ5BJ5&tr`k0*# z!~Q7@2C3WARpr4RHa}5S1>1<>v!qX5U7w@XZT{10@^iq%h|H zM#0RTr*YIVPrgZDMj*E(g>9z9a#XUU;_R zOyX_>VyZ-K-(4s%OZF$3UXQAsKwz0_jyOz;D*37CahhyeZbrt&g3`XY%4W_Cwzi8? zN=}cu9wdVb6uOlGM{tLnYcf?%dosz$1O4rING`eXW_SsFj3X84YsMxqCfJn^i4Jq<3nU05?X^J7^(N6$J*^r z(?Ib%12~?ok~c;|C62*wkI_S|*3nGP*4O|a`=o%@6YhZi>-3%VE7c$$5X1$?EnDq_ z1IxSHy+F`4u=h|jzCqz1R_Ws`uXjAPWnU|{9kmMPfM1McS}hjv2jl6!7-Zkx@&ya? za${|!nz*d$RGfvOLopp0U2q%8--uE0VX3E}|9378@r*MzbZZ1F;Rz3`LRw(D#q>>L zpWOOaDwgxmYIJ%yPy8or;7kCZ7yV>ODv@2nc}Ycv?bO2qhnX*cii}#G;ps?{g|)2P zIW;5mRK*%(PrDxK#W*mg;{`fhVwLL*3K9lvyc(w?*`s%kFT3U1|2<5MtOK;h#w9BK zVk}E!S$f%A%(?dd<(u^a)t6PWk7N>z_*S9}1u-ai%tg288Q{VwWpNb7jtWKs#cDkq zJul{GMyU)B4e|MGYQxU6QoaO}7OHPuBXdix2q#Vcg4i`1-w7c|xz1r6QI%egREw1pSXD>XnoeY|59V^K>_PaNrxRsNlxh z_;_bie~)o$lOM=8%O0a6(?O* zR=#5@Fw;%@)Y09DpQDAk;hPXyGigxFF2S%(z;z|rcfpJXhUwk}_ky25l)~dPVPnkw z?PU5j+!Gyh*`o)rWRK+_8|rTNG<>;?=BCNXo@A1ckC%0=9*)h(;@d*H5XN-K)$;G6Q#9{%N0uO4Cc)A`Um!Tb z4Wp5>>=?^{*gI+GaKyqJFEe`jLF#P3J1V0V3abTS-0%gKiX5U|!VF-v^Q_dd)e9S! ze$CipG3lAK2IuSC*n`R~&J#J8df6J=oB1u1)C3tmHh;958#+ zY#9ER>a#*A-=h=k74FjOjk0%4o*oB4y?zW~!lt-vUrSR;1>wh`q941*)-n)J#E2{j zFdWt}QEUYIGV##PH!5%#szYVIb>ss}%<Pt%x~;Akea@SRnzsnkZ914Ah$#yX3T4D;sxv;sJo zSO#H?zL{o|N8*u79&(mM478aoonjKjCURyvjcy{HFLyq1sjT3*ERW0}^Mkx9bctk5 zl1U4}N6Tf?WWcYnFF6^1#~-1*rbm1ciN_W&Y0o`p8Ms1NOHE%|!Z^p{T_Rw}s0+cf zEr@~i1oxTEpYc>$JFeO(t}(F~Ec3dwCh*=$$ZF=wV@M!3JaV(egFbILF*%D6UPxR(bnqX7n}n_cF&ll9TUE!b)Wx+OX);UIkLo z;45%jYdJZ`Jrdw`9i(7ySY_6ras$?t|3v2G`N$Y7fPEVgO5i4SrhXghJoyB~(24T+ zms~Iz=H6Wkr2v01P07?-AVSg-F2k-kyw~{@1qMUxVA>TFgL(SnVgpP8SLf~c3}(+u zUK+70FOG1Q>GE<2q#5NdF5EsJ!!^&Sa=ip8CJP}*mndJ1 zRs3Fn#4nlX!FU?QQBn#QoYxWV71KDY!$^=jAM7>83Pm53wok(?!>HeX*2rzn4=zsOXfOL z19E{}QPMZpk4tuLR7m5WFsbpov0aw4Cm2L*LODZ-dTJ>#em984=@am{TL~6=bVh(N zB7YMJ@#SIp$(6Z&M>Ss86_6+`FBH?MpQ^P!1Q7fyNm@G^!bjFd6NAa|&X^ul(y%@Q z)?wZy=2Q!bSp;)KCf%!x=BWoh{bCIu8+rZz~LW9t(l$`6JLFuesY-8Q3NR zw%6VvfIL1Re}MonFl1vZK>z>%0003&o5=z&AJIfl5&z5c9CuA8&*QpcKn(wg#gu+p z`y`u5h^J#iM7&A_ub zq(tWAqQ9J$w>WXDvp$BGfsQGdt)%mp?3Y)wkD30mJ<$=Yh84_O80B+vG38%D>nimv z%CspBv8^EqwV>TUXSG`;Xf{^c{k7cIoWznbXat6TrS4Tr^@{G$K^!^kg(v*UmtmTv z=%JqJHx_ehVl#i_an?5E_c^`M==9&m%Pi?zH5jWlKNst&MxS@tx~0X+Pz4Aa0T<23Y9WuV(BNKL+{ZZ6nI)CAD5r-*kSM#A)ubddQW-Ecey3JED)x|#B3wj^)C4AAPUB}+ z3m}HmD^=B`3C|j#jAb-%63Jx{;X;kofSCJ1DjIvhVyvJhL0uqfQ1MD}J0G~_oB(UN ztG9-e`H4}UG&K$)Id_q2x|}^4B8@_I{WJj&K#PGek=@IUiHSsd$Xe8+aSBbubf;Lr5J4YvOq@)&P*GOtdrD=M_sYLG3IwNUBYgvJ;TXg#is(Do z(gdD5wS5!;!8WrvDprdIk3$sPD~n4>Z`aLn!FaF(yJqvL1mS4eT#VN(_;c#l!#q}S zd@2R$rgxO1OHW(^wvQ|DK3pA%j$1VJ0oXZs@tWI(_&JUW`+(Rsh%2dg5b(Hlbo5Q^ z{q@zo3*j{Q4Xf5STf8&u!pTbH=2+N#UkEfxHI7-enxF*s!J{Y97^s-Ra_)D_DG|J9 z9AY#2IlB5~h{Rr2&qRApNkvi)$*@d^3$ABKjVSH0AnaaH);x09v{!qXQIId$Fmqy4wUeUTJ(*d^ z4-936rK%*$qTj`$XNL=U!LYfO)FzU7PpUuFPRamA*}L+k=bKbaPZe? zXxv7>2z7ai1c&GWkFgd4&b}5F#ymZws!(5Vp(;t8piCaOTEs-S_Wkh1BZE9BSTAIp z{Ywm`PPi{xFrA7N?9(eUV_}?^)#J>qydO9T6XzQojACaTnQ)o?gD;LM?mBMTqS=&I4Ml>3lApe^Ri&04jwdd;a$^bA?w~bATK{lf zL=6yOh}{7&?FDa~e@A*?fV-SA5+yYmoOG(`d!}$nr~K%FKUf1wU{?Yhf_s{nqpUnLVmrWzoJG1NIf06Z$&fN2O|FaUx zq;MOn5jKftu^q1(cpK7>TO3-V7k_EHG1RD)gC5e%nz$=Sfwj=^n+IqF8|1@7bR|Ky z5Hq;6Akyoa&h#+Mycw(`LMgYQ$=McAjJPE;XgTP}-O<_C*2(`vQnReq0$=gxjY9u! z@riH@zsE~vliEZS zd9a^7PCY@3tb?dkMTt_u?ZA71@MGuIDA!C z-7r%*SS11TTyQOopM})mW-1LhV&`LZb7}5P4skG=J~?GV+Xb781+kmYqKl9Nv|4q2 zGD@JWX^@Nr`25}T<>ZIL-VddYo(j3&u3E2lOf@M1~~~au+TZ^}MuR&XAmi3j)2)r0y|Gd?N%W%%jY<`2 zYq=Y7!XsQG)Y`3kHA80fQWZb9pYw$FGWv& zc>6{Ks}^{wtP^ZE+&|=9v6fC5G@I=MKp>&0y;oGkNEk`|oeyKDq}dqUR+?)M-96wT z)^W837>O|WVuTPKB_P+X7Xn8>jgNECLqep6PHX(@(czhBM8i(4eYs$9w(?2@J5#w6 zGa|HD+AeI@+{w@cW6J4@LI+}q%d^U9EC~jsp$Qkear$K_gK`-sMAa3~wLxK_I?UMh zc8XHk6d)rcWwbqxT5T1=Qt!V~QQ<`XJ3^zXR4$rfVOjY4)x?HPCH9aVFw2ljY%}xC z07Fm(as0<}v^kzxgfToOpIW)wXZW;Xu z?n`eFmV%hVdG&m&&bMGlTC9lT-#EJxdj=dv)wsJtC%L&S@TOqLrRIBvPzUobL;}*V z>nvep8_e&}M77YA3?%$kf`mR0_%;P81nfa72KMjFTYc>cKu4aH!o?n64?Q)K?gN}No_>GlzSJph4?AgP|EWIH#uI; zNH@kq>H?N*{g9;{@b95SN4q)5Ig>O{_bbxE3FOlSE{jo^Hg9m8#8mGEC+PIi(Aqg=!5Tkryxm2O{d+jDlg+vy>$ zBrv<%{YkK3$t2md4sr^8hX00QVtp4v-rhroRw)_rkUlJbJsd?3hgX{%iDm1amlq)i zIT0{1C#VxJ+c21vE!+biXeml`ycXkUsH>!_CCBXl{5;FVj2%Ddehl3TgJ0VWRylYT zc=A2hTIfmpDqxz!XMQ(>*y%fU%Na~!)uFd?-C*a3k{Qg?zOt6nwXgfOL6C#ZGjBrsqAqWK zs;qh9cOiPi46w10E0ZQ;CEaDW=l$;q2tqwR3PQ*9wr*7siM2*#hVJRxjSE0LH;_ch zCxmC^BM27x-V`wOb33OJB6^0X@I~cQ-GX3Scdmp%>;84rbjX?^dDIzl)nZ?9~hdGLX2; z70%}Xdj#juo2DF?Vn=bMKZ|9Jonf|Dwe<-Ha}p?Y+Y^62fL6q4vY-jf={wk4oj|sO zZZ!ne7FDY%@GsYs(GID!+pGdmbuM*bB!&CK%fijTViDIav@@6-e&bb@*~d1rIcejo zb2D9qQ;ia!QE6No!mu;Nq;}R7FeG)i(e1R($N*)(yvw}iixv11&^ebv~oXoe8nZD~bV+qw|uIZY}u-!+H<4l}YOOGkqWIiA!SQ)yn>AMrbwVKK! zV36Up;0}subjn}O3Ulo1+O+T(c+AF;0gqQjjwYlIyMr?83F+rz4^4~XHOl57v8S2K zFE38v8{_}qk4Pw)|NnlG3Od9t#vUe|=2h@+1mOu(W!B!39;~q4YI>E!tS)WVS3D)+ zd%Z)&h2%E|rS9rJ;u_n1gSg;H+a!S(u?tYG)B|&qi+ZkVV~UR9bkdvICUQktFO{^? znCYty>?Wnk%yz0!X-m$#FLS76C}0rhY)eHogs@a1WL^rAUUYz z&wF_68Y||W;T;~bx8Tpqcg9Q)=$uNgpV&pik5P|~9sK|~@i`#Vf4aJ0_9f)8eC#Z!P??}I`WPtd%?_X5Ksu{3f(aN!yD-Vr@2S~>CY)KkjD3f38?$uAHiSHpD{QV;W3-P(3|f9VpC7)e#F+Zm)1W#G5r2rf!UZ(Z0z zld=DU*X$~D<2dTK#i)#9WC zLcp;!1TCP!+^8d~MpNSO&5hv4XA=w-cC-}#f*Km=pvO#UJ<)a_$g2rCu;@!g7m&+= zwioH%pzG{%EESW0wHsk32YiympEOn$UL|!lQ$phQY|TMZ$-AbrfkR=9CH2dG?`OM@ zql(owl-YSC?Js#5?xO0g-a0T@CA5F`u2NHQf-gD1$2^s(C(KnjX2iK5+jPW2)_Zq{ zLS=T>H(!v(uQ6e%lTBPCaq1HHn^Mfk$EDK+{%8d_wiMMz-ZAbZ~WH#fq!$?w>iAkqY zk}7Rcb6v!Tf_2|;81;OaVR*x=G|E;@Vi}SM5?T?gw=bnG*0guF!LE{<2QCdwKvjM{ z+f^9=blrL55RZWwF2(24mu4SG>j*3Q%k#q@S|i0tEs_gND4_FR+I=Om&f32MJxl$x z!PmkB@B%sP#iX1i;B1_o6?lqS8LA0`+z0Of8rvbUJ9k-&+^`Z5e`;SXuOx0k&cNxV z5nO}l0F%*Mb1m>Sg*Tko0*U?c!z}1}Z}_4n`;T6k7YEtnCm`3px>d+?+$$Op{|9^S zV5MKk?qIj(Q<>4Knl@9l(95Ls;rTJ|6|ExAi@B<9#Bj8)9FhMg>mWWa>%7igj1*Ou zH;V4dC*_y0cKVn`1mY>!b>t@sRRg#nDnKazceH5ngocMOt^(Tq&7W!PE|-K@W50Xf zUp?$|Qb=?^hT=7u*qcTX>4w9tw}}NGJ>G#-d3mczZw-+pqK1M%J+?s0wVFsZ>33IB zuIpilI=hm^K=}3_v*kZ^NGc?Tx&j;Bb1OUC{ck-OdZX_UkuJu=5E*s&z;7=?gi1D4 z>)wq?S!WM(+2#63dk`mDl71N5V*@k$ey!A1H5@$ImgGpp0;1@?Vky18u_(EUoa>Yw zIUt~~m-7@jOp?Z)#4Xf;ly-4%ISP@=-4Ez7kJy?s3U+9`+D%(9ABx3C^S-_ zElhG`+F~)@|02(i*6RwAh<^2XFvkyqn`x0H2eKi=-;{(LII$5N1D^b4+%QiN7-(L7 zqnd8?`lAd-AB?j}2YtD=7oG8-Z zfkUz-S4D|pZw%)O!z$K{&h*a%NEzHFAG^eF+SBZ6hE|_O(ILesu03A>CVVs<>!&uG zvKq4MDC<2tcF&)M1^H-9mk0%W@Y8`^C5=be1f2%_$6_Z?L_Pb~=2%)fLNfhqYK`J} zM^89wD^2UQbpU1wl){~-lpi=x!ZZokQ$| zTpV1nY=au)gNATszV0GMkg=Fc?mZB{_H2}>;BL&ORBU!fR9ORniw?$~>U8=~qzMwg zUesz!m-}dQ7IGX^j<#_K-o6zf{W(YlvrDYuWjrYNsX&8O)L~TkJ;xtBRuSoKkp*MA zm_AJ(JjMRs7GB&@WI0eLCm*pft}tsJNeDfO6*MZ_@u)_>(zlvCspsg9UiYg<`VZ zAin3tkGMgFM`_}g0Oh{KP8j&E1W;>^Vakp7v!nv?dnue}E?^sCNF0^dcYE(ER4z#j zyByFb9wOPI#V&|Zs#j-0Ppd-WIk4A}b}wIVY0FwM?$~=f1q3+gfg|O7IBN7OFl7|h zHi0(NmAr+@4DN0T%hh!XkfaQdk{IkN&@>q~b*5bbVU4 zL?h2+2)!;ZKs4}Wkmxfc#Q;qu*F_#V+H&I(Z050+&v1}c!>J#Fq|8h5o>v?=xoc76 z=Js9BSk{}H(XZ?B7Z3r0z%+gDM30Je(RFmG^<5w@A`oi<`u=f#Og~I&1u5evX#~55 z2yx2o5cM#voP3=srUw6IxXD1`^x<_6Dd&ahEcR78Qpq|}&jM^?h4F5pX|leqk#kVc z0XjyQ+3jk?B9qq35Y}!n2IX2;11dGg1&MU6Wv{@x$ZrVTmp9SM3b?stofHt-P&6gG zxriVInk`t8&7SI0o05qg1u@CgBAAUjElE%mQB|m|go3UmH(Q1*I1}vxC^C)5jNw&l za{7h#Ott@Qey|lYL1(frVoN`*%g){kw zvl?M~w1ZC&q7p98=~idOFVxu=QNAolOh!A7T4C=^?hTWteLDOzlk?pt*Th|ZZI%bz$TgWmh1 zdPdeO!oQYfM%1NGc&XZ2t9h`(Hsc}q^PBpWr8NJ#8F=esxmA$0>AU)d++$dy+;dSY z3Z`7=@i+FM*i${PWkYxLZMt9}q2*5t4?&M*^}^g5O&P)4@h+#J#8}%{nl-@-Vx6D) zs~6#}2`vQ@kS9qH_YHSxeTysh@tz^>DQ@tXn1}e$PN33QEAlrk9;0vs?Lr)aDSXFx z%W(+I{rsyXKPRR!Q-~^8x;8+;T{jfb9*^7#+|_+vV_Zv(i=gw~QPM7Xe*A;!y$@>k(29k>VLXN(`YGWP`oM>)(!KGl-T5IDreLVfQ)F~|H(ZsmV{CT2IZe@acAdk zU~~>#xCjya@0ODapiu}k1zjMA8_#F!;jq^>|IPX6xgC->L!;w!G65EN*kC1{p`kWbvniIC`yM&T-E(FrX{h%`mU&pu-j(k-Q}D?u#b7u30zsB%inVF zj;oOo0vyevs>-FVs1088R6h5M`(cZD4vq6tP7g{xQPGW**}#+<9A?C;6<27j+K&6j zTP$3r6|9QaY9~70ft7202uB|Fg%`c$XkL}(^(W7#uQ&;$1oAnhq%erZ{5pvBQ|C6% zjBw7egnOsokz@6!?g4^-js4|p+`7QWUr)fR#P~gwVF4f}c_)s0IrFCj5A}~63c!bk zn$Os}X(`8l;IZet{JRzQPNSroOS6$|Jjrn^=`W%a+`8r6APQrNB}+=j9_KPZ4}^&~ zS#yOXk}cRmwT>&&XJ;o9HYFgjk*{hu)T=?bR^=Y?0I+)YLrxmhR!%) z1;#jc1YRjTAeh}l708nW8%2eZoKi3_sxo>YCBtwF60$NaeESMK!V6*J6!EuqTR1_l zoQoT!+&K2)PE5uDjUOQbY{4fVFzHwBG=1I`8Ijm6QN9_#AqTry&B{h$2w8}SESMXc zh7&2*Nd|>3=|v%gBcVyox&ib(476H?FC{v|;M(>~cf?!FwGa^vag}6e((oyW4-TvI z7KA6+o1UKx#ox*IY*&y5A{CS?ov6lP4>7(sz;=wGEhElXQAD3dNA`;{`@iI0sigRY zr=g}ob9~jHA1;u*4)Ud1ELwXIP-hWpeA$4JiJ4RPy!F^HQ97W>yksi@*C933N98k2jvd2D;J`z~ zyvSGdL5*Ez%@G|l3-kMlsC_nWaNA%*Fi26+o zFJo_oU>LrJ*Uk6fTcu=UaVv8p>O)``U+~~8c9SU1PWsjC7Bio(HZ?9BP@q$sGk77d z7mKM47|UlDA^a`xE|rq_V_qj;G6Z7Jn42oBxK5b_R|5BJ&p(tqCP{^@w7p8cN~+#A zHciIwx%q{APXl$eeAjj)5x!s=p4CzbnwfFHdH7ui z6d% zOb0sk*2HUwc>epU^}iCj9OTK7zN-B|alXwXmdxOokhdbT0!;sd$FO2V@n5DnScN(8 z-e(G(Rxq_=cx~<`suKyG3{?7sGK9g)SL^zbrsXP3Y10(csdici<#n{@#u4fbNL-vMZKWF?qC^`AKJwZ%PYuk?Yy(0loPbJrX)HV+HlMB@&Ps zR}7Dl`^~+O0_WO9RIY$Mqsi*Vs?0+vD%rG0Q~9n1>dSjC&id=??SW`@YFVy7J}%Om z_8urJ^;v{)lfBY3{?%(xULfs-0L`mCQnG*9ZCzZU|8H2zH>_mPOh*p<2y|Tsq3Mj! zx+&m`;2D`_9D*gQ@U(3trF2lhSTIt$R;Vlh2JIDzwIa&7H_Jo^f+1C&vUxN>OAP7> zg@bAa1NzKEg$(JVZLrs|7ZqP3Y}_O`M|?`JGcV#M{3bzsc-}nL-zI5ZW-cR|YL$^V z!FKytA2W7)m_2`K%w=+n*%6lE`f3p=z;}s8O_H6u)>pUhK=D{u zme%$TJfed)B3&YF$k(-85WW8SB{DAic3P>;Tgh=o0y#ZeUQ=!{z0OnL6KL-aRvL`k zsyKd@5@=*Hn5?tewYV-eGpMT4wL4d`5Fx4U+%B4%*Sx7;=Rx>ADDT+M)s+KZ)Jzhp z{}ExVnL{8w>5FP+&yX)*wHsgN%~4}I!pN5|Il^q52@f0f0;+%M`LveYP{|fx7a5&& zEA!Aufj07xrSi;Y+P!dCmM~EpV20KJNXK;8xE*qFhC0piz6dVd^ zq^mYb=c>=7Q<%JZnQ|3~!Ph?bn02i;Wh2^mZ7*8`=u^~Bw3C@{mf#pS6$1%;>^s^P zUbziYuMlZ+;>kJv5DmYM#;T>{e)uSXUNzJwa|$+C*1beTDIRt!gC?}RWOJ~2-jcd8p{UAeEk6$m2#oGuUT+;a32hSQ`M zl0`;;Y!r~|M3tgj?Lb81E9k_!uM3?p33kN7IK z>OU=|eXa|q0mIdKP;!=295@B+G+f;*P+BVlF>hHB5cv7`)7=qyC(kD=@*}T0H4}6i z0iV_qhT*rc21UU}W<=qPQTk5ne>F%dA%1=~P~L1kbT;eZr5Q;Phq>G1lA=~%?q9K!sbGKHP_pDX>p?aobPYwqCP zFM4|@@3nWO?ZuC|eNon~Dr?=n=;xxoy2eBE<8Fwevk!ikrF?^Utp7V4s##%9weR)W zm}r7h1;6q=$G^G)_}fq$-)0D=0*jO(+0VuT!5t}~|B$bk=@~}lsY_D=# zpTG_{GHyb!*%vK2IyhdftKBNe7cI6Yr=Oz4TX|+3Sg|lgi@WJ5C1Y*PUa5A$xMy{7 zWXI~I0Nz0&`yOUb7KmX9;UId8m9%HU)q@&eP$R55l&}jICBj~tj<@}`;8DqK2C+@| zZzThj*4Fk@{I!kpJan=?{qqHiAclM}hbflBII(6osllz;*{jCWObQlmNA-Z>cU=8( zjPv)DSyi=0+i3BO2K1Cf7gR1scI)(KvT=rn2?Y}e2tbZ!emyW(%g9YfNCKl$B(*LV z1M~^bP!0JocOx6hlqde&RdKGV0bhN+RRA`C>Gc}$RY{r}P1d5rYb^nrNmz>qc(s znm*z7>mz$;+%nb|>zIAD*dIkKlM?n5M@0X2=?xnzJ>AWw2UaA)k`5?>79k=cka+oX zNE%Q+XVp>gcqwq|7(@;D zMR)d}vd{#le6ENgwJlqyfVe8;=c4`aXV!O_|G|+bpjbLFoo&Kil7WYY4u~nVUC}87 z;#LxbBJ5`9HIP{f`-?)4`xMEa2`6~fJwg6ZS}0hlcJYng*=BWBA=SAgch%nF2>f45 zk>BjPO09Po-RhE6Kx7xxIV9_L2>Rz`h@;A(L$=u&j!60Q|BsuXpyx(}RU0dTpQ%c@ zPnt&f7bly7$9O?}@7bV7NGKBh55xhv(7ASQG0=v**RUEsW;G3S_y4oV#*(j5&fB%S zhO3;|15WS=pj>IacQ)@AD_m^KUx>T`nihUScH+q;lYxc=-j)s&k(VCJut|DC9^EeiBCf})+xPCqtu04fmAoQ&N|yJ|sxW`DIPhw%nsLGp0QHS@$nM$ZTa3xV zQ|zb+ySZedwPX-JUF(?5yUy_P!YCKbytd5dSEYjvZ4_N5VC9Bf({6Q?#KhGWW8^Zeo zlq^3L1Vl2P)U%pvT7j;Sl^aY;9dQmP^7(Ss)k}z}g2)5fK!YAD8JhBJ3EM@R@oRZ= zld2UKsaEsD;@B}x^T}T-i}BOab~%nogYiMjoARO+2!lQ6%x?R%6;}X8j#^^;Uy@qb zcHLcd+@0tJ^}Wz4P>%(o>QXkRXj=%t^x+5+7f0V&_KIA5@o@Z*_Dwq?QPay@k1b>N z1S=lP2yd=73XPpDssF^s2*zrD(2zUDj)cOjQ870};Fv;^Z$5k^<)<{3U(wfD z+_qCt1IV|MbfYPMx*~ywQCz>92~(|ygV2JOrPVvf{QmedeCD8!&|=ke@#mcyuwwU+ zT-iI6qI0Py=YS*=;WV(U$7iBrs-a7hf52-hwr!$!DxFWW%5rQ`25l4@|9K-ojw3AYj*`^I1!8a=%rpnQKH z_bBy&Uxt;sY=9kh@GE@n{{?po==eSGPxob-`p2XA!=l;?i@hs8^xbKMvTH38+_#zZ zNJ-}{n7MA}5{UO=yYKt)jzs8BeN`$ymN<;vX6eN@j%wCNCl~jbmJ9LRoBgwRd*~xI zrP`?XAET}1qyFVR;2<)aC^3aBb4&6juyNKEE~jp7GTeDB(@_YZ#3$09117$n4a@8X z58ekkV4-YFpkR9lpfs%>ekS5{pXO@@#n-Ghgqx@zUCTfi_+)pmtCH>3bE8}jn^7hYT4>YZgBXlHO}v5 zYq{aF22_Jx5H&0%MG;T>beM$Xxnd`cLE+dI?`z!PBdsXBtBuVtq_yabS^j*7EuJxp z2d4Mt|KZtaNE=Vm8~B$CRCS;D3rxO9T=%tF4w&?!QZ?%EbN(ovczC#l!4Q|jdDoiP zQO=IZoX0s9c5tq{kl>H31&5c%06sv$zg6nHZ*dJI)bJ)Pp*dE_n!h!xEzfyZPJ{b? zST?<@$tM{RV+giGvia$+{~Yf2dfbqBF%%iQ9niKRu;(K8U@awHYe7sA{7d$RX~wN1 zi~b_UCet%N!dF52D&7GsrM{|$5cr^GS66$3Gd!+AImU^h>v_25)?wGTcDF%A&3dN8 zg&bvT2N$s_^(nZ1bz=KZU3PQ~X(;oh@vZzqi6BD7ae_FQ%A8mLniSo@f%KYuN!V@v z9}>%|0qg^f!k@t{_p`3EgI@yIrH=~a_ck#B%q7!*yGcX9REvOYh%f=P$}*zK9+Q2r z>+e`@9w?i|Od(wW%>ma-6~{iiByosYVIh+Z4_mBM!$`LXF5Pd2PUz@GWVNuV#I~sW zAyo;AbK7$3n#axDM0O}t!9V2X`cbCufVLpTgAeeg0}2|EFI^oo&I%BH zR<3jk%G1rn$SqEO0R-K&yMedD7*%I}t_ZQu!+$x!kZ7TEmt_$tshFtFOSp#!&#rMe zF#CN`?72!m2{&xuC z`7Vx{o0DoaLCgR~iNh5lPuHg0d0JAI4%}L5iBuayUcP&naE8J>m3KItLl$m^+d#)&9-!@Aqlc$T+}ToJQf-Y9I{$8R&{XrWI+KL z{4qu5IBhN4uEZW{p^@jVAOOdxf(!DLg`E3#UYY+p`S7VIL%ORZ^H&D9O7Qmlmj&j3 zeT!wBZ4vIAwq_>b;M0svmf;dn>Z#euQjO+1OvF7y{iH-l%dd!)$m_ zf&v=FITTZu8pU(D==VIcVJcx+ ztjH{(BW@kpVF-i49x|zdsApy3cnAA(lO=O_N7;_fa<&$%XN24MQ6;U;)d^zX=aFSf zOpZ!HXV&+Tg|4lsUlB$onmXd1DK*WPlx!|Jy>|mY2D8Bgsc2o1CHaj?@e{*upZ3^6 zTN>?0Oy2e=jLR9+r{IR?$#@upu~slCvFf*K&!K zAwscK_0d25gR{>Bf-yt0dVPYX!2}%*|6W>Jt*;Qduq+sI}xKH9)jao!}ze8CF+q^OD4AmADN$w)7Q69qEkWHbX z9nYg{Tlt&kLg?5$!a0uhBjCo}KCv!WsWkVh~p-lM&1iL#kwnl9kLp0_4S(ofU-jK<~u zMYgK-XG_ZN?mq`FiG)-1-?d{H$tv+ShRVtq}zTodTkr6Yed zqe4LMY5qUH=Dv#t@N&+pRBLBIETp(BcHjZT1ABhx(qF126p`8 z1V&T+!cqU=-@xw#f&r&G&h}#RH}T9 z;r`;nCl=lnFa2)^lcHH=As1&ql{IiGlIfI!h~w~ydUorr8h%XKO6*{Hakil2m~;cw znng5#5C#)I|58?t%rEJ}=e&k2kM9u%7vmW68pXBcbmAR}T!L8cdv*t^v!b!bNOL`*IU3eDK2mJwl^o+A9xLrw}cs~xh}<{khI-rM7PP+X!PT zr>~O0B2EW0_C+3$d*AM(Z1eLbxx8)$7EkWY-%#%@3bS_RsQedCMps~3ppTf~$cKIx zw;iBA4W{7YgEmGP1%qOpmjfES7#<1++A-*5ao)(1f6$-CNLXy5XB5slrY0=C+J&w9 zhh80NE}wcbXvxX>e=xyZJrE*F6))GLOscZ?=;r5Q9Db|qi`^xF7`AWmz$rmL7a4)f zxslF$2XHN7%A^YUSV?(6-y=ap8!h3#F%56eBz~FePZ)}@YZqyQhwDHCxUr&@+3E}W zL1eqiM##pmy$5K_Ij_<}1GbtYfj`gwM^dq0L+O*cIKpy&K_x;st5N4tMN-T+J|MUc))3 z40V-+b>FH6f2DY}cRC=j|IWi}r=gDC{WvIk`VB;xsk{=;%l}>=)brT=Ztld88a*8BO;@&O6P|6jnI1CaC2e2FpQqS?mRxPs$=d2Jpgwf=s>==$MmL-~4 zNxIErbGPJtx%xT|LMe5OfG1hzxp>hK|ss>+!GR68$;e|y&uded@ z38zJPqut6)%UJTctex@vyiY+fxN@X@SXpG?g;uj9xK5#vvY>UWn^i`kpHnX>)`dGJ z$e{Wrm`?htO|PH_GQdI#0Y6>B;J1{Qw}bb^;cyw&K$W3n!!vdrhtuiq*^+&l22i|5 z*^o|XMoM^It$Db#;Ka|UA+UPGR@PNiH2Cx1&7Mh2iH@D}4EcY;$jXKdu74sJe!sQJ*cu?78euRZ|Bez2g9SG8L9y%^fF@6PASNgbCJgt-!@p1{o z)$csQ?-y;9(&+6=LLtz{FN^s{8w>K^T(MuST)A8n^O^>qrKjnjmL={WrKYi&W0WM3sz7NCebzAdQ$3q>&lfnf)pd7fKjMpGm#UCJaT_m1}-ID${D1rnq+UD zbPRW2WfMU2m-|+ygJ!Huwi)20B<>e8rGjD`tLPT3qE)wrPXzO>f#yD=iu>9xoRawg z3}+YI31IM&JmmYmE~N9>z#ZuIY;ON{YD4_s|A?EHj7}-+{+PlI#KOq^CbrE?g|rZX z7)I!EHm4@@o6oeWU70+H`BBAUVe6@EA}?B` zg{DZbK^tJ6j6##%X-!^o58J4cTR}2Oae~ZD|0MIEofSWKz~$i6;#Kn#*6tj)i6Jgc zB=9B8KdK(%>Fq0Y2Cu3c5T+itN30dD(E_Zo4A432_sSbQSXXPpE0FXC1<12O5C$NS zxTi`EfvMna6MDiWw^Y5|=Z5}S^tm{?`*AMgNsa|<*v>JvO$c!f2iYem%NnjK;FOzH zqwq#hiC4Zf_4xfQe5{-LkQlc29sG|fSYx&XEndSmUd0qTS1n{^9`WQI-JB*{`kMFM z-=dov9SPlxdQ&EmIs8ZLR%<_wQ)E~Wbu`;eu8bWl=kd+dZj;T-2Ov;VFoIJdm-PTJ zTgt{oX4yCcE7lb16D4Ad`{GW!Yykycz34yP*^laR$YT zvsDVUy6H6x_Q*kbDw{yx^Ph9*&%g0l$q_X1JfipJBFU;`EbOmvsD_Wx<@%neiaZo1 z8`BHR8E)2(iEO`C&sbkB(s@6*mdH2c*_eyd>}RyeS)THsnXy{WhG_^^h3)^Ew-o-% zGvZkXMwJ?60;)zt?WPq8|L92Ri1^d_eAEjL~#avF4 zz?386V@;RjU!2xWWIG3+pwpRh%x>`-2@5>He|8r0v)`%g7&w*gyK1rwgSuwe7AT?z zd=2>RBAYbEua;XT|DE|Y3Dz(bUw z#X`=HoHHl}f2^#xybutiYHRkEiIxd-7o;*hxAR6GraQI!S+UN=KS3t$_f?>3%YN8q zmFwfb!I8R-gRx+y&A#nvHV{srZs^^)j*o$QwBkxAZG*JWDh`E z5PQY%S3yPJpDi@;tw{p@{Lp*3uC{O21bPIvD_V9gKVzX|$UAp|y?1?p7rFUl(Rg9W zIBFRbpBV+d(k{fg*_rWaTvwu4)N)-W^b|sDX1+ArAxqmQD5)2kTw!3Bho84P%mx$-&!8C_qY?0dmR66YZZ1LiXz$#3|`+`5h{ z&3kPoT`UW!-~^`jh4p??Nybi36u^KygIu_caN-`k+l`pVI%J5HSCn`v;8C2j20{fj zyBxcvh4jtthM_PJj8Z4!>Ul%%$5k(yA@n3iF~1jgwWN`Dvi1>1>uxQWM0y%nO3vp) z+AOg)flE_-KD4_FOc~@AbCGHb#aIT;qj7yRA%v#t7Enp}FV(;&9Z>{7;!a~iprfrK zG98+qEv>5rCJV9c!R@pN7l9r0r=kY~zr9Y<9zWl_yZXEyS+;icrp98(_S@Ni41M|1 z4^K?*mIY*{dqPS4Y=je3l-@-Whb75?PVpF)la}2K!+F{p#gt&mTAnXZ#wE-{Ofe<4 z5W(v|hKQoo?uLegg9W_kU^jG9fi8lB=&xnstfSfAt*&TT+V1y;i$aGaS}2SlvenS> zDjjATlC-1|Je+M#eE;uUbBOdNQWX6-M5k@2qb^F%S3|JK+7PV0?}FRGD6vL8-jAK=-p@ocD-p$>J(AHly}by*(u3#sW@bdlgFL^_ z1ulIRBqOi=!KI! zs>0KYmEqTsw_cSUJ6y`TQq9&u>6}&2H~Z86dC)@QN#{FF6DPTzCvAMvS6Gyhq8TGQ zZgRt+3QfF{sjDCm7Al_F1+ztnt^(k9w z&f3|Ae;oLFk*J86+IiprFfe3eR388U000010iWnZXSSEaCS#HaZ3IL9TuH(u`(~sA ze2(O|;Q(j#LHgovJ5*E#Su>R}t&5CX!4CkevA#ISpRLXc;t|Ph@j-k` z%VR;Ms*&t7mtsaO?Z?dW$r^bObyxdd{2J<_oEkd^CY%$<$)k`nq8QMG4x|2*8XRBe zf)OWI15pLP7~ib|z`$hcYua%Nu&mV4l(w0(@c~qV4;0pcGkKx4(7mj;g!ZFo7cFVj zL{siTPbvxXk$t?wsGWE(k#tFcK_zdDySKBMIu1o6lHU3}bk zHj-QJ9u1r<7g*4zQpB0nH@xq{8fAP^^^uQ$NKqX~tI+}Qm4hL?~&*9(1p|v1RE`(Ui zRN%Oo;9z*LC!WRSZ_R)lB%J3~yD)8GBLmJe7qJ+cf#?;7>}~n>(RRM@2=pyjuU$-vd~tZy9!fpxIACuZkzmn5vNT;Uj3QAH*7Z^MBDkkcdogEx`tt zYHQ!jnm>o{9unE&Xe>Em_fPsjNlyHs;3|Z@c0>0`ziu7J)S+HlTSmtHk>nWF;eXAz zz&`p!A;hF}3C6YLQq#wdJ&c7;eMaG3mB2p&`qJ6wj{=qg9Bo<=JL zH@#q_(n1|unj5kC9@ekxbVjUPiWo6kRtogeKU#%@CE>!hF8w5+Xde&k2KmbBUUl)m zL5oneHp2J)&|~{uoXRpIF|7C}NT1tfVZ2OhLLzPbBY9qM(sqRcUV}oiz^a*j4gJkz z{A(iL6%sZfu@N9TjL&rm`gcC`zp9+3BjS26$Xt`=-e4@#GUdQ=Rg3VF06#hXG_E8Y z30gKu3oa;*rl#3V^Y;1@g&(i-xn&5~Kzi7y2xy4B$d)#5D%ahI7CJdN3P zz6d+woG|bPQS%a1xC_R$1L|LW2m3|7Obe$;p0i=PS@oZ_qOa;Xm!}nQgB2014pc~r z1}~Cxtcx3?e}0&C-x~3W8P_dFj;GWNe@ZfdGIdL=6reHwkM2w&&s{ z2ufZHZ9MUVqs}c(dbG8Pwf42%*4?o|jF@mxDw-}&yXw=Q%p-AD=;jLM++*Jh#cJ*V zQfzmNqc5|hgNA*{ot?CP*B^0`epq;f2&JJT0IOu+fKK*Ga|lZlYh1FKk>gJ@0wuHm zje%Mwq>?ecJg2wbOICz%1H$}&T6O5qoqUe)`-nN&YGhB1G`8lU zf4>B!&*Py0!0C`59x>SsAW2k7`PACtxT?uAuO}U^5F(fdZ&vO3OvBWKnA0u{xkxqB z-n1h8@pdzsYS=}h{|(ZC1ODd%!4IQ3o5R-Y&%7@zU)g6ri(2X_7ExK+=;03`{nv8mXMSlm?3@eW+Y^lQ(5g)E9V(~n!TWjF~i=yQo%tRm3jJl^br zCW+n9nJHBKx9C}C&!iMsM&zs(GuCeS{VLwnEuki;0{S(pC7KP$rwA;w(wf#-sgyol0&XKmeH@C;_Zw`lm%a8f z$;N^tL%brV14*WGa4Nja_=LXQ#?w+Eiv{(hoVWTQnNB->=s%zKKSXw5E9SDf_Tk=j z#kKy73%KI(MfWp$1lxQ!ZyZz|&MXBTHq(%T?d0W43D+)=5#E!br|(J(%zRI9r_b&( zq$fy$d9_y2?1J~W|6CAR5t3z6zpzd`-AXY*_G4cpskhgcpZd4he_7`N_YAo3f$bh6 zouInYcgKVQtVd+A9eCcs2DI&^`zKOj)$F;!UrXj*O`@v4IzXSMB0o$Q5*bz2vM)k$ z*{msXcYE(W{D%*g%FSDI z-(;lY#wZqtg35Y`{If~xxM~Y&pPCoxfOaivWNJDbf>ahAWM)&2%lOkGZC_aIE!y#w zX!XVD;NxDpqm^43oqvgbIeHh}YH1OvWa{^|4am--#bKMC=9_wKOh?nQTU>@?B+Yqr zp%BjpJ8;SmhC12ybbA$`GJF4B0MD%o%C-HZ=2UxsmLPsYDULS3rP+(lhMtHRl_JZ`SG9}dx{0A+oo4G+P}zgdXV8$i8CCx<;DqJ=H<^rVTnmRJZ*DG z*;L5-Fj;ekHWcg9!`0&CE)xp$510@N_m!*l8VTSME88t#<}-IGcK9ILPsq@9`%UZq z=_TNV?rsPbS7N-=)t0AZkwagua+`o(he%+n+c2IGY4nyAW(ubCVt>vhmz)DxFq(kQ zJ8G((?37Ya^DW!|T!sRXdV+cSR8B%w)uu%tb4!mu>GLjoE|rqmtG#HX^)ExF%`;>v zn)S95)PG-`Sgo9b8mD>e!e?V%^kK(+0SvUFA4C-jZ@JeGJl2};D~=M}k9GP@Z%hWH zrx?43%};K_X!u2lq^N(d9JVqQ9hk_kTVqaLA?ZU4ma8kVXN=3KT7zQU8YO|4;DE3G z+~CP169%!51A>hf$+;ycppY7JJ4!!FoO< zP=E0v_V3SQ3wnND566T!h+6(;}FD{kY-pRCN?Mp`@BLcp>%obuR1a9m= zpxLH@9z(VBO#QT$KX^BQr=o$%A6L*SX_|(JF5gU3hli9@y>B<5X~SI6ywI< zAFF5r+>s^Wf|84=HXJA@>;|77Nkp9hSDP?GB0KrY7Sxz;XX!}-rXg2!HtY&;^kwoUr9gQTePX5qIt^JyF%*Ni3ZJ2lpeDPc-?kN z{61Rr&uONg#S9&tR48BmfL!-VslT(qITGC&UpdbpGf;-em$w3VJc{<@Jg7yyJ?Gm- zJPk{^r_0cu*OzrUD;A{ zo_ireC;^+Fls}{_`M5lbS@E=T{Ygl%Y}&iXwKtkjVS<{ckYq9c!+nDHS>c*Jf|)YC zB1*|a&p~&hk;>3XkjRjMzJ>(aFE;AT?PSy$PwhIV=&nV9J9QJ7i)h3zVCH9JEr3dH zR#KQb)D-IWG`u%Yvz1P((Yp=aDTg5*fY(-gLoX6!ma2^v_HCZGurs(TA(N0wDzx5i zT<^a^8-;nKNnm6Y>w6&IC;L5j;IVUJy*xne&?uLLC&iYR>1qw*UttxFX}L7Q9gpPG zd`kRuC&zC`g<>sUe52{%b9Gik|%ii-k9s1wV`4y;=wmJmfs#QF{tt=CqmnxFE(St;M| zOQLcTfuYWkwn*d+z(C*Dmu#|Q3&+x_zRfZ_wNdH;tRn>c#1CVF-+C9KFRz>n66P4+ zFsaKGwbal?lZQ&-y`($_Bu1Bx)ePQQx%Gb>;fZ|1Rwwm2a@7+T)JMjST~`dEjKSd8 zP}?v^1g7T9{S|oe&Rju0^<2P*K3%0qOwP0LIRchR2FJzGpk^)Bv>xXB(8xOqvrEV?iMCrne1{BtR7@LcDWwFR7NPDFp# z@j4r)|NnClwTFr{@fBM`>;jQyLhtyk8xa&92eu6xm+pLFRi4Uc-Aq+USVk*}5)YND z_nIdIBjc77OcclDt8VAP!e0gFItJG|OXE(x%S%F@u;Ia)oZ0d=AaEe|QXJ*$#D@N< z^2ep`KhGhmeEI$sbXL0T2jb2` z#4=F(qr~m(VKy>_SWAHP;_MksvSmInS_@`773bEkOb!pIrYA=@wg2s$)*)8?obI(& z!NBQ`=X!&-m5Jh6P8mt-gj{XM|B!-Z!c=7$@HE4Rv2LKSb)k@Ehwq!_{{1!WLEe2ROeO3+fwJHgues$8oKLvQ=@CS(UO(xR?(h=O zW*>%z;kb7Ccu+4&n*N1F>j6a@?lEOIP^oJ?Y&k`E5IL_gNPGhzO?N0{&vQ2J25~nz}GfyCuxlu zp^s})a8{CLYBE%z59c{rIGC>xczEi#n^^ZVv6tXjK3it{u4~MP|MIU>6h;)F)RY^tLtvteIb}y_Awi}g{Ao7Yh&^5U3=xA@PO1d0T8UDAt zqPiy$w-;+Xi~N)Y5iP%XhGS}M61dKBakVD2X@n7>Xx1M)PJ~r@f)}X{YUkVqt?0yX zlftE35hh&`#M-cIcequ`+0MWKkGY=*_Q42VD|z&6buNk-shmpW<^6mLG;?YR*deQW z%ADRxBT9sR$8M*JvHi;TLF4D*PK^LHvRPm^ln|lgu?*TDyJIzr*{UTxd3M#*CSUu> z-18#|aD|HyUHotpv?`Hc8H|2<6Bz=bLu3`=tX1@p8~;Rz-6ge!knHi#dCC%VNE5vi zV|C!kv{p;XV*YlH+q7;;lI99~{&9UYwU5CU+MNC@dz>F`+Q2&lk%`3Ytmvo`J}nT; z$cPF7Jf%X2<|n4_h9u^qI$SkdL68;PZJxaSA+YO_MT56522}*aQs5N?tUb|9%GiRr z@m@TmW$J;mjK5^DcuqI4{f$=ayqHdbdARFW>21OzHs&828r7f@UVFg}Vs9+t!-g-t z4MwoPQyCWnn#jN$G<{H|=w(Rjg!Y9GfS}f2YflE_iIiN^>LUFzg9C>oOEP3=Mxw!; z*>&KrbO+>YD7s9~%DZRv&Eo7o9EoR|T`*641DmC6EX6Zxxl`z96e$LLkhTA}a4Z)+ zIhWUSwr)!I%QG=1RvQbGatbUroeMiWsMyNinr+4a7H7nBD7tgQb14Yc2F&Bd_tQ?t zWaG`P-I)U-Siza;<3|38_&mSHVr{sW1Ic{^41Qmy)9~JK(dB1G5i=ipp?BTph9lvG z(+6^bc%kA^gS65$*_ov08PAsJY-Ml%tEH{1C^{(-kxKcAvNVg@lDcK>x4%SB#=%er zn@lC^{|vx=+DEs>G56}V1!N7mn_oS42rEuFX*662*Jxe(oa|g9YMdW|%N2r{$D2Yfu%8Zz>8W=iS1e`G?*3H!?(NHm# zKGgW(XZHVioc``#wfHsQ9xk2x;pT~@c=jo&T`w+0QFqQDna z%VeQ_;L+IGa>@PS3>?K*I32s5V%^28sU;=$; zz~h8qzJ9njMuzBDODk8~n=_sjhk1MAUg2m{pUD+S&v z>r2MAj!qcmPX3X?P8R2YmK5i~aw7*{(j$Q@$gMkO7tq6cL4{@;Ti zGlbjw{Z8vp;B=X({{f|)#qV~e(ObPG@kn7BUcbfIYZdp12~EuAJJWi-m@zm3mJl;X z8*`G|`}`4yAH|e`^lbsN7hKz*%8W%woJ$sDmzjkI<<-&#Zg^b)^(#T`eaj4AXyQz! zraL&R$&QM)96mQA{HA(jSgZB9{0IQHAtY)ViK{~B(93=Qmh34g=BV1jLkN9flOvn8 z(jF?teGAJ0@rnJWIe;MbJLta=IN_hmiXsVuG`slh4ocTp{k+O3&FIg%q;*ubyqQG~ z(!a!$UHB|Soz;T~2$5$M-3A%GfDIN(DSexcEFjAsEqf-MjExkJuz#7OviU}HDjMOs z7b8P0`3D49jHuteX41JF?}4+2W-$sQxPNS}R*#dowoN~-jUBbg{)X3T3x3-Lul;>> zx|9@ev%Mq#kcb<)E@zXtcO1DLuwXH_MwY7}I;TCR3_N)KZ{vykDQ=Tr(JI4OvPrPu zza}7kfUKr;u`i6ul7RXrJ5D9K=0lqZmr+)(MoTc@BBSK~!aAmAtK5RgAgaZA9|S0T zA9>2|{XWq|y6=q8N87gTolfWTJ01+=O6={0){>es0X1ufGjWJ1g(4e{+3})7mB#I+ zTN(5-|J*{i+UK;Oy{+bjVOrPJ>)APD;g&7BTEXI&#dD2h)Uz{h56in01%jBCO}#|P zue3GD7Vu!i`6^<|h6S;ddlgeeTxLwb9&}x7l&jEHCmwj|;LO$-g4F|k8Ff$5dWzc* zjDnc+gE}e?el6lrFlo(gAm&CDriGrA@dYY?a%xf{)0AM?x9f;1o7ZFe)1s)k82B~vQ8uwrl1`kPIGN!yd1mtQIu zJeC={fM7K>qnYu_VwACdZD4BKA@&HyKo~$RCXo9`H6!LWY>lk;^I;jR)v8plZ96;b zNs8aYq06BBoUbfB8Ws{lxyw>@^Vis)6qWJ()Zj2SIgJ?LJYmm4rqt6Qc{FW<9_RtEN$P%sjQa(NL#+Di@ zF7JKo^0$ikB&U>NAAdN)!ZrL#ayWPeUAGaj7f z0H7z!A1e^!&VV%o?6SYzYEdo7O=aM_*#Gg@dGG}R5hF@$>%S@{UypMP51*{$TTDW4+@o86OGB87@dYgG%_=`f2G(N>ZF|iz=1#Pqv1R*X zh2r&fef$Ef!o@w|mB3)zU)5t=Kl;t(g{E(aTzueMDzSr~QCsU-rVq9BZmL~=-5F`$ zTD)_Ts)olP2c=Pv-7?aL+m(2cg^)M-1ptYB3pyUOO!zOXj68c&jEN!yUUI9eS$~+l zSyye0c6q8e?2{1GF8Wi{=SagMi3O}6sGw}Tz^bw|D_2#+K`4m^1;WBPVFKwaG0+V{ zrFD;}rkyhIP7psN2@6a6U1!hIBDf&=X~02hsco!}Cm|VEc5zQrl)~?8ZAr{N`0-j% zaNDFVsRyxd{R2KN=rG}!Lkv;6WA;QLah{m3H@n71$R~QwE9T8LIHmF`cr;C<5kmY| zr#00jyNNz9ayETVSjJg)Mr&BTg_{(->v7O@qsN(Y-Ff-lAV3{2dm-}C;^-;D zDI<&Z%DLH}5z{Wb6u1zp{gF2+tMG;*YkVM=>$iik1$3Khk7yi1-1{@F803)K}oQ1?g`FN!ROC; zpf5Yss<}U+A&x(7oG5Zvu1yB0sjsZKXh0;4Swwxu7k0NPBc=)>bf@~dbCHHKN8Pu0 z?R|Wb&4v1i%o~yoZ|f|o);9@52EB}Bu2kH2ufotpO8ZF#K7jnb-A7b1xO0eq4Uw5C zE#Y2Ul}pNLL)Xne*kRWuFMn|}*6ZG6s41_O9VUH~WO~;pjeViJ_uSujWvOqQW%ULU znLOesw9!!@IVIyFBH(q9g;(Cd7oI5FU4~Y4CLNi0zvw$`o3xUx*WSJKU@52ddyR3N zt$v*>GT_bBilL_wY*;ilUoin!lHY+^wWBbiE&R1b|sO?1GNJB~@umr335VV2Mz@0Z2` zST1v`l#ChmgWUIM5WTxv4YW!;JAtHGw3Kpm<$>MwDAsqd@dioP<`aWnbVl&FE2hC% z(xUY5MRiq*By}cH02ngv+g z7oYU=sJ|f9MPZ#&78Qb7YaL1nOTq#@~*10rU`hrdjyG_e{YSO}ahJYKFMQ90i$hP6up|3*R4daE&*;!3Z$5mattydB&IC5K>MQ3F-2twn zBF{LCiN|*M>)7>ESV5g8B={FvIL_T2xH&R4_HH+Q+k?{I`8c~u;6U>Rfb}{Za4A$n z%_gi(fd?FCm{2k#&Mxd}O0qQ!8xTNhZXC~Tbko9-3ASuE$Cfd#Lc~k`Igg zTDasmxC3v7&R*y=uC@pQV0%^9pbSY1r8;bps`5kZbzw-{P=IlJ$h`$m9DKGB8^2Kd zz7A_q%0%RrFs8AN`eQliD_2o88-QDDWO}%rT4SqYFqT~o-GncP%ZkVmG7^ZlfDtHx zoGRaU;1!oM5y>83X#V>fAN(f#oxw%1+9v>|j2Dv5A>(WqXw6Mx?PfPc$OXGNVxdLz zjF0A?&c0ZTl>p`4$pH-%CCHa|DM9=`Nq>V|UiLOG9$Acot@-aCZv%gsu zmA#iSI0`Z(-;Yh3!?(afJmD=ZCWApf5IY&eP_f3=nuz{6T!M2_cwo2fNSLLn7!zIR zirJ%*Kkp%G%!G4*AjzikjV@gZ!rK>io&dB({)_hs5oT(EQV#8&^bEo;1;i-|84KT2 zpT(~KKvilWNv-ZXMW^|xcz>zp8#j@wj;);=Ix|F_Mtva!(#HP_2uJ%w%6)E=3h%#_ zj52;5c&8X?J4qZf&Usie(ZHb6Mz6p4_322h&e_bDU}%k;p|6;J5q$ESy>jnA_m3yy zSc*nx=-=}W;x!vb)ReHBMqEEXBrXBV zFf1*ECuS&CoB>Q!T45dVpmv3iazuegDleF{PcH*aV?80{QeWHgt|UGL0rSil89CWc z7+i9R+CK=@SPK|6_pkm&*r9lOV&lP3aKYj)$st6~PST7OJ;xk6P6CX2PU_9@?)pm> zKEmThMiXQ<4Wrl_Td3V4LfT4HU@cok)m!8;tX03C{g5IhE!)AWK|r!&JdDdfZV)6_ zfkos8$}vJNfiuhM)<~PbHoLL}asBM=9J4~XH+Ep-^Fw+uyJEf*_zK4DuLVx|$+h4h z(Kn%O6FxC35&`3&(JW7$b!V-r~h;FZ-SQC8ps+3p6O`1H0vW76@`%Te`ELSY}lo_Zo924})7wXZjCJ+d( zx%+yD79H*%FYM2nlaJGV0hX*+>v$ovdfo?#$)iu}$d658;0``x>Q{Cwz934rpK_%Q za2{b!BLgIgJ&Fd%0`2A--*f2e8UHo$UH|gsJbPtmG8%nO(hawQ;hqe9#7}9lwwgqCq{JKsrS+ zf0}ErW7C4(`sWiW(^8Tiq;K=R#APb7Rlu+n>mJXh#!xs$^FZLcW&2)5abY4oNrH$r z>w^FoWR?V>I5xk~$D#e4ka9|v-`)4#GQAPa#_l-bNy#IRr+mOwZu8Z?0@V(Z=R?HQ zFPiS9bmB^By*l#BdQ8~tzyf+!Lqk4bEmImFe_e#Z6yTL`CaBttQ0mf;#mGtjk8ef#+UdgPzTJ`z)&&MCYM{%Z<& zc)9fRhvrV(J>;L96XxKEvDi7EW*XvzQJQX?Wt$#NMPBaBBkZGt`W zx;sgek(;?v4Y)BFE#JMVsRY|u!&(#vlw0AA$%?4s@R}cLTBb!p9-1WkD{J1>BR?qN&&pZsk zE%mWmA6WU;g4xFa&Z8m+Do(kSuc1@n1rx5p8gM_w|D*)GRdT)g$&Zop7A(tX=3u8# zJ#u7*(wkLWd8gvRdl zP}Qy%s(J51Bb$0cH77Y^*YGA9a}anUD_n*+C<)ayZ~Y8qgC~GzM$ne%Txg^zGCIIG zBBm|6Md{9vXpxAWThUuY*v>C95lI-dU;96G(0-yvi?N@b4T#&BK!0>z0I&$C%mH$@nK}TXS0T~1s8egcugJh4)F72px%YSYx-`v&M4NE5NPQbxR8$D`D`{^E4 zBbUkE3X~mTacUi@OB7_HKBdlE@O1MCF5^Uh+v6={Yo~Z#NMiWrV4!1C6N zk>>Lm7Bv(B0jt;tvH!r~@bTbRz&e9EC#p8)dY=Z)auDvejLn?)MSE2Y4Pp&6MzVHK zoEfD6KQ$S!{n!Qv&qg6Z%g`-fwlyG{i+KU>t^+u@50$p!Q2YTZPZ^3FFaL9T-vYnQ z7xy}LOaxck#Djj{&om_Y+27K6rHtKzn|0tZR$5oCn=d0hT!V*?eLYNkygGEYEk?i+ zfs+S$RNSpt)(o(_*6Mq9!?bu0WW95;PZw&l@eEqkf$UCo*E(sWRVp2lAE8H6jn{k^ z$w38yb~0L|^U_**NQ$nzr{}knTBoc`o!RWDJntKtZ2E4IZE}48N4 z=g=@d6BZG-DaM{K>t{Y)8821%c-w+}y#R1<%Z%PTW5s!AVrjPc7DMi&yzn4XCETqR z&*m4t^yy)Q)-RKmm%eU9ub8UGwJ-(r#U+J~gT-uYQx<0_`r8!lG)+W8Fi*GYGd_GvxULdC<&PPhI zd2-?`SRm+`R!{fA|Fn173-$5(%W#JqbT8vnY#0B0Kzsr9=ZwYJNqKXt-1=j#YA$T` z>ZhBg$k-JTSjHW(ZKv0_gFYh09RiN%Q;**tHhvycMHZK9E=#Nn@AoI{ww)cO4b@Yh zz+AS#ZB%t^BN|GkfSZ_oQQb<<9y#xj9ep$d?F6}Zx6gU(a#ile)&hloH*GiAKbk~& z{ADMUuYV^7Ij`Nggg&*S_N3^p9IfFwk9m-#D1#i%^MK-3!Q|y1y5`9ZtC~X};jyT; zif0})*)&DL+md)pdUOAaTw8U1BIJn^;?ac%?Hf`PP9p7l&0x(``Eg>_=XKP0j0#cB zr|CX{W+%CG3D)+*c=G=PFN`Fe-kF5zAP{_Q7*7hn^vp0#DO zeOLAz_s_n|tK@BBK!`o{wz{aCB7{k&U1~ZWRup;!BGpF(3PYOf1ocn}4<)$28rSMM z;pXo2?)Ax;2D0c`)$d~+2*cvGRPH!c-o>N|I6Aeru5923vL5gd47K*+Ilu6h8KyD` z>~|ru*WY(SwFIm(s8lJ2!zF&(SXWfZ+S($#^}e(=+47@LUolwm$@$OFj?y?eD$Tf+ zJtgA?z)^luw+y4nYMDkm00^RblJ0|4;n@m-=ccz^V(_1?c(Zihj7mbzaAmA;Is%RL+HBw9+@*ZE7v-O{4dXWKv3M7Ty~$MUT@RbAs?s0?QCp;WNq zXad?^Caou~3XLF#^$0XZ#U;4e?fnl%&FR#EP zKki{f$??||0HhC}Vvi*CB^oJTnBBabqO(xV_q$mbC;AuI|DIsQj4rdkb*n2_+$@NOb} zt+TAqWw@t9;lVi-1pN#JPD_RDgZjzVl9CmyV(k~)Y=a-_lB`g?$8V$&>ihm_p20Gx zi^$!rycaiA+L?mA_sO26!W*m6L~(j1w+{*5PR(Xut96XICS}P$v=~ibOnofHx!OU> zkW{x=#1sTE<05I66Oo1GZEC9G7GG8k<{pg#k60drUT)d;VhPZ#evjx;2)PTDX;3|K ztc_aLVKtEh{hu3+x{JHqE@aL8lDe*#)6!ppr{Wl z6M3kKI%*c|@~_~|MnBBzCAN5__yP2VCypQw%IOVf;yC$+EyWhw5dA4>pXzjTuq08s z#sJG!KyD`i*KQX)q_32IYPqHf8Q#3GI zl%TB~N7)sQy^I?Bib85OP8AgVDVS>INtZ%O=eAWUrerx(wvcc16-H^(_n7S)Pn3;<`(45qW*$yun){(WK!;G z+Sr4j$Af#MLZ?)0?ArcG(UKELfTF9(7}EXhJonpZBU*J^5f$Qwls4Dj6O>=5B}w2O z>X*cJ4E#C$;J}XdBRO;vezKWMNYR)|9a?e?IK83|ja^0AYiUT#lUC@#^lr_bi?VblwJOR(eu(d z)1XxirlTqPbx>-^Z+e1WAGYJ?`8!)cZ{Jbn$}!_led5Qw{Ss=&LV;W)^M*MEHURpH zIsCa(Wej3jKtD@c`9N6eW%$Rms3rm0$+!NJgn2xXbSb> z5bXF0WS^w}Q$p}(EJcih{9`Rv98#gp2U|G+;eFHM zad2A0P_T6X?f}HoMkB30-7*gLO$&mz^jAI=jk5*^;Fe5^as{54=Zz(UY&ESsgxvKD zNo0AtLMCBUeHPQCTde3fZ4_SpNwK#(%8%%i=H$!FzRfsZ`Q&%qO?Qa#Z z=EyvU8Ma${j}Z@I0q3)c;O!#y33`s5P-j%L`z}2rM;>ntN!WG!NrzN{B?s zE8zZb7P#Xn;V(r&!^`qbo}S1j24(KWRt|oia(kny-Q)PkM=Je zo8L3LPr@Wn-NChckao)hgt6x{)##7xz!>r29B3B#Y={s2%E9+>71iII%7|YVfg{2F z9(jAzUwfnUbeCJGyqDelTN_2$U{6 z+{ivVvLN-_H6~)!X{b1HAH245ZK!HG#%y0cv0bjb{@T3FrxzTZMlO0LYiK`rOt_|}Z zZO`AkFvw2z+>X=mZ}8aLJP<=^4vc8ra*1nEMMvgw|Yjgm(((QAhemGvdo+B1Z zytk|9{+ah=u5S|>?~uUxRa}!ATzBX46H8&>)2MnslejdtAlIYPB0TqodSnZP>#c2W zdyW&$B1K`gM$g$2#n!jZD^%E???HqOoN6qJ?+6Em7pdTEZ(c603Vcd1I~9u3KjY4l zP0C1>T>}-q?7AN$T_|%jUK+c6Fs>cUr^ME@@T|9a`Tj{#T4Fwvo1go=c`9J0p_)`Zh*xauuRv=H^9ygrnH$4~bOG8rS);1c zbm-44e2&o7>lQK=wBcOxL5~g0Bcy*LUw6LtsMiI*TM4g*2iE?ou3mC%$txzDYKxVa z{0Wu61r@Uv)}06G!_710>%y{%_@z}cSszN$gjeNiBBu|=wjyh^d*+H4MM#Po@?gRW z$zz>cD-1kMy8;e@20EIXji^bIH}P(2N5npQBm}-%z}^(vnt0q>&KuGtW^(}F4#E@3 zoy^rot?|kIK1(Ei#5^O%e6=Y=uggss5hh6oCJ`6XPFg!`LUL?LGUf_-iitjwMcuvm zsLmAiX!goix4umWw3V?gy-LatTXPZyms!TKbFiT1+UNv)sWk5wT+{O&@xXUgPdjQ^ zP|XXeaK3l`Z_iIF+Gg3DkhaK9H5no9QICL9IU*SWjGWONr%|vXv5`)n#He2(-v-92 z1_Kp9QSpDg&va`mRy~)9Cl;yX+!RTNW#7LwO_SL{(UhVO{jgCpBK)?t{nG55lfH~# zeyp9{5z%@!i&*OuG1z@%V#n*}`_fZc`oihzfvL(G2`HDPt_c-Ct_k7I`EeK5c-g{POs~8x zYW<{D_SQG92uFvr$F$nPv)aSr$`g7Ut2KagFg2Zp3A(*Z&Ua%Jv{I2mqn&yE`;ZP! z^m?ao*iLRVwq~JqBtK&OZKvP{endu^#S)1XlmH!4av>b&gkwO_->im1{MB>v8Tz$X z*i+%rp1GPqVS+(3D4y7u` z-+SS#hbZELFYevxP>Qi^OqN~D88-ci1_Vxv(z(Q+v_;Q2gM!`kHV61?6KK6WwHYsf zjO-56(?zr@p?GX9h55^5v~T8rDWrMT*qRx;OEzTkjGQj*USCOT)i|W0@;5Bt)(7p$ zu3Gqq$GGp*?>&HJyuXew1OFEYzzRd97Xz>jyNhh-Xin|Cq8@fQJ67oh3hla^$!C@6 zY*nH-m+EM@ilQ+s)X>+_;rrNjT+g5slS{Px(-NsialEkmLg_BB9ngQpy=S1}{$JZx zvlOYQHTA4_J_5nKeif3X$ImC+XbXTsh?uv5HnXh$tp(4~Y^Xw>k95g%RD;8%pvZoE zrcL3p8_TmJ_Qh9{b-FP_xgCZ7mX+m=2k#+^C7<5Ps`MX9vpRmmb6GB{)QEmsKWz!5 z?furQca?*D>^^?-Jp7=vLh;10VZw{#!hz(2dn>w&7)pBJf;}d{mVnC|3)uC0-joXJ zmi?C4Ee9$PaKRG)wQvTSzWBe3x|@_FD96!A=3tWCdgVMSs91XuS+w;ko6W`cwM{pf z+74)9l9Hz6aX>Rn2%X%?QeryYuc;K9@}?C~A8-AdWc#+{HVNDTfvpQKpgz>hr5@Fc zURh)*F&TU-n~2K9mav*6)A@~K-t<|wgByPBeDIAotfrg3dH9sY{Py^DaQ?-#E%#3h zz?^Ni>@<5UocV-S6>4YlI&u0(G?&+s}R?OPS)yoF+-)8!rz*`c&ChQB{_=1Bnepb^EqsaK=1obUkQb(B!P1!|ApBC z?9Kf-5rHU7k7KVzT>ZR;5jPMsiL@NCZzJVBIBMv6dZeHFt{+Lkca}Nl&oDhY@M1?& z*@X@QXZdK};q&x$ne8lAZAwnBo}Bm?`P5&lT0KkNUV5l=k|A@qAl0kJtM|nyWc=Z0 zu&v&&lEp7d>;~LXp>z749&TKFt?Lqz{rnpKP)hCwZ9OxuT>M9GN9lUbjzh^=j@#zg zxS{llXcsBB7MsDDd?NHmLS`d8L4z^O{4ju!c)tb#ElnYXY1V;hK{DiZW{-g6@4Wxp z63td{d=9npw+3mKwRKZg=dN_=)o)@)EuD~>@xlS>@OVLQArQyxfkGkBWdn4&>=RYN zAC+mRRleZOXgcVEgPH`?0_K7|Ll@q5d9IN=`H=HhYCm#U)^C#pu{SwGv zO0`q{;^{`Qdc=L^AQzxJNxkzASOY`%O96nX7Z=BDG&o7gSEai=4bX1PS;K@|tV@3a zLU|XrdX*AI=Yxxh%l41RbJ&bq~^P`4CkxTNeNaAb+mM zWQCAF_U0yW5NG1a&zH%%OkG#ou?VXt{&7ZzMa*to!$)sIbmuX~^bL z9JJEol2xRw1Z@O#j>w=nF_}h2R8e@3c1Q77LLia3o$>IKF_pibttfN>K^9_K08s^B z%*0%KSpfEn<1;~ncxUKyNrze<>3UfUOSQfCr8{x7A`{diH#~h&TAM#=Wu`C<3iAoL z3q@NRKI9uOlPVfFYb|%)R{J%>p9qPiUeG0)pU`WUjZ7WXR2tdOG&6Zd7?3lJ8uSGN zmA&&_bgX>orqdrux}8;D_Yz8sEr=2>tFz_E60F_o;a@Z-RP1HIu)K?OJl<8LOn@#P z;z<&7Kr}_xJ?ZG5q}s|GHK!&0To5MXiaei^M&=%=sc2QEBt=HZiT$W}boo;?NKVDw z&^mxL$g3-F=dD_bcZD9=2#A3qxUa!LMBcN|tw}m$&Q*7c2TlMl=H19A0R5^WpzpLu zDPmyKbtT3}Nrhsx$+zAJ4Ayo<)v1yDr;U~?{wtXV29LVB!O&u`z3zQDt%x)ha==j$ z!WXH*c}Dz^m)}kWEWhF;aI*^K-Q#S;@#?r7kC{kcoSroS9XD3XWgVU}##BToa)*2% z9=J!N$z%)tXG@)P-2(((8E~)O=5#@A@K;~u){)`OhtpfmALPowpalsXgUQY? zh#kZCyLrq{{mHI)JTN*qAsiL7@1m{_JUN`VcGh&D*~@|Kx!J(Fqp2#+^_Ck(iG)&L zj9GH;!i|SZK%SXiEk4P5UAq3yKJ9)$QcOcea2zF;?d5gM(jxE;QMtbIx22S$&o03BMGW4lgV2@2wQ(JD zde2r!kPh8KX*>5wauZ{|BnL1)*3K}y&1z>5>kaDO`B(>qRzE)VEmOiKs|ws3$B&}V z$Q>p>M(|#T9>2hGMtf0@JHud*H;_88zMIkBTN}rg--?LQP)C!?>8oINcxG=KD+Gfr z1@EV3L>gM~YoM4K{NPcoSnsfeB|d@RCVZI+^S0qGE?a6ftL}cf*~#piZHj^9aePT~ zE)hxviFVPq@=dT=Vb0+DTK&IL%$+sIN3_bX+jPWZn6_l#Av{Bxuau9|q?jBR!P5o> zAr5qbBu=peq0Hf+54AX-MM}Y>yU4Un(1qjmZZoxt>+JMd{J=2x=Wcqs<_&8GcG!Xo z066TYOS&O<^+g#?Sa`y{nV0+u*1RH)@k;+&ObO`~Hbgn*WYua)H(W%m6X_P! z$_if_j0rO_ckk1Zy4s&GtCz;{PPkC+;Pk%HWu^BU7Qn?me_jfyBdZBb?MJXk7vd@u ze0QI>*E$ShNPDF{plTMob_6a~DWj~|KgNIpf0GmaRpPP}kF&t0m~0TMcpi{y1X2i3 zs&w90xM7uuLO@}b6o(abLx9BC3lj=hDkVBxSTjBLNaaq2NG6EnxHhw4X zB}wGk1|gT1ALAZ2H~@C0dCi4T*6#}r)2*O6lARaUzy0LM_mrdM(1;JiLr+-bjU{kl z2Ci|(aW|>nl%*A4m8?pBDo{%h0)(HU8J$Q<_s+&yNJ(Zm=#2$h{a-T3DF0+=N`}iu z5rP&P=k691wjP7z73!PrE1K@4X*=jN!U8NX&3iNv|MTPrf7SXY%>#u_)Qo30fWvo| zQGeLYa4n2~n=ti#QRusudn=qG==1?`Q^-2sR{OM2y8S~kcdo7HX8@gOLjT+c7HTIXsX;0gXTlA4cZXLI zcsL6=TgAoCPWBV9jm28D(EC0HUqm7xB)$C(MCeQE{O0l4q%kJRi6T@s)J)aWsOZdd zuUOm4FV7g!`-snDm6veG$Tq36?-DmlNu7ooXyTF5?@!(7N8<+g+s%>k`j7c($)#jp z{quqTKpbx2vTBmq`^8;)@jmUkbG?+t!itYbj(^~#Di}HWz0}bVd88-xu3ag>J|W)n z2Q7pbZm8X>-LR?Yeb~5MXC`dF3`z zs0?wyZ@k^t>rlAuvPC%Lgq;g>;AjX&jK#GoXuGufez$} z-ONhx*rAS|a|!MAZ}zHTG+3aa5(Z;yWUr2#43a`zNGsX~|DA>|%dwpQ0}#_+-jFTT zHY^>Nf;qe;G3ns;47pF`gRS1`nE>Jt;-Q{m}V>D?!OCmo}E(@7E7G+~GF)Cqlih zG;`E!?EL&kz37K}CcWrfPkn(XcG1^gqwJJXSyWQ-2fj6g%VjAS@+^@vU)Twp@nyYg zG)tH%GvOMHR=bkq-DveWoS}1uuB$4|K`=06V=N~C000000YRDz7$U!WjRlSMZZZLT zb>Z9l8wF;H!i)3hQ819yTC!mTYaKF8r8PhxY;I$~-y!3EipBo_8)CxopRko_kpzd# zUPY;Cuj)RmS!H}hmQQgTwzATxpBxZ%%g})ub(%3NS#Z(I>MxHSf&ufJpe0G*+rQd> zhY1;h31+Aq-qmT`o{cV3leT#n>pk3O6tqt3M3MBy-gm|)?qN5!s3d|s5;6t|9aCBp$6dElyUZ? zk3Xv?piy;oW8N)2@DkeJ1G}w|p~+v}RwJ~@+}kv73G*jGXNxb_5R`xO>PnL{R**)= z=6n0>n2uq+7QwDwH1~ZdnP+CP&V09}-SEQmbZvsTsB#`QW>5o|eF|D?6_(l37AI-K z@p5b_qPJ4&zjX8x+H!wvRZ&s5m|+mU)09sKRYZaI)k~`pV(7-dNl?+)oFl*as6Aje zE#C4-q4z`22@^($TT{Zfi)UKDKpJRXtcB)kQiA?>o$>Vh7E|rjvfT(vlL``K11Uhb zRE$jROdBeVmpCw*ceDU)oTks*18_Mc=M)puZ7eF46dg+yg8NH%xKHsTKSDZqBUKQB zT+~38UyBv1%Ys=^m45zYm#VHO=|H)v>sMrUx)*kYcfc?px6#1Vy8-HY9;tf-OwsJXVl^ar>5HMW~Idd0dPIskhli zAvs-Rz5g=1-Wb&>u{noI6`*i}-lOGVW-Nk|nKRO_!UBASx$m6-7`hkM%Ya+H?<SZ1YZ{j!ob#G*?cMO^^0Vl)X8%kr$Q$os>qW#=190%Hp z4{uxY#S42;3nX5*#09m!q3L~4y$?QH@`LU&DVvXN%)*_oY(DUY;(Tt zE~T(3`g&vwTyIh=d-`_gb*}xfR0Mk!JB#D=`D%UC>3QPBWp{DeYXG7WPS8#5A4?%J zhm)*&YwqxBZ;tNTxO5VJUcHM#0Hb%(+NdYtN#)GW^X3Wo0T&+}yhBz(v={k$?g6ne zaU7gNRJ|VFi{cEVxZm$(!+VNDj4FDi#=Dj$Z8P9$Fx7JqUjWmw20-DU*)8ozdt%7 zp?8Dut7`^`Xku~_V5&jj`9bYl*lZ8=PE{Esq2ijaTn%6!gWa!iR5$iC;=dL@k)qbv zJ!bZZS_J&R-8ohNj|Jozd)H@cICju=o|3@%T}sa1J6F@20f$|^Q3UjBdOl+-d22+C z9&ReymmZDku6c}Ygx_M9r`}bhkREScjkR>9M}blOZT8w6-y^(;lS>v3Ud-0~g=>Bm|QnKAxGdz`n)Jou7k=8Hvls z-c=;;_boFLk3xUD0dnlP&$py%F`vfgE3^wiv8$11)t$l-!t?Otm*^JwvEUplAHN9r z?EKFMkb#qr@WgrAE_chlEM~{1JbFx>IPb7Nt%Qd7eBNT- zvZ!Y|FcoNhy^j$(M#A>g;QOsw(-G^TxMJN_aqLtGxy)ix|9(#*+V_z-(9IbDS980Xx8rQBro>h@CUr1)573 z;Tz6e$Z=trca``0`1#2`s;)Q~^nxDV8-37mPZT_4Cyb2eF7zg?Erq`#iS5}|whELI zsuM)deH!B65@;N3mA-cEDf0#aFAQX9-4oy;UU465LXQ@i-{-oQN2lUX2S*gNdzB0iEcYJ~qrv6cY`o3zVR#7>xn_T^3NFrw=GQQv`HnP++Ttv}{{Z zYUh1I@U3s$SlJ6n7UpZju4>~p&~##b)5mCycSb-e&9;$7f#$2eCJ1?Hyj*p6Eb$%F61-c%6pzVy-1EeIze)HY`BD;167imA^m=9!`ZbC5_coM?yJ1^ z?0A(Jk+sGh1`7$Q9)NINB*cioSRu`-yj#?yQ%4R!Wwwb4B(50PwwrK8~Zc zDV1v%^u~P5)Iy&IS; zNPLXXOd1z!UIs#ZW*vS1>V*~CTR0)D?8R*E?1&(_>zI8et6t}Gr+|TL>xBGWXd&LM zEsa+4_D&3(ZJ zGEa9vx`em+Y;5SRB%rNfSid-c*&wpq_|PX+O|Eaw>sKePT<_oJ(xZ^#r@}KQDF~Es z_~_yBc-KevTwb6HIK-~>3WyL^El-w(g1emzL6c!=9jM1ZbD^qLx}Yf@BR)dRn$vK_ zH~u^2+fB*H$|T^=xs#g(VQ(^V!FoU!m)bxg_h9K4!JdNd##h~+B1=p}-g))Sy$q<3 zK^6V5XxAPcCw^sz*v9qCB^-+Ug8Z*j7c9?_4mm?xRG)Za z5l=)K-SQpAVc9f)(}Dt$eg{QafvrmTk`-+r?QKqpQ>2?48vt`D%^k>`!Mh)lyDaYt zg=OF)oe;L{n%~_8T+R1W4mD>AA4X4(dA;S+o5;c*a*g3BodOp6QXtHhoSU|`&*EeF zQSSlz(}rF@x0?r z2>;@|KdwR2PPj(m!*ifvqqBZ==_S)fIjzi--+P2Z6ZzKWld8(2jEbmjDSIK)P3{3c ziY5bg38X1_4=+%1@q2feYmj(yGtl;(c}SA$O{niiIj&rW_9-x(Ti1)FP9&Zx?_SX?Vl>GU3_z> zE+ z*^~eD*qXIWv5VM`J&3X4y;f#vL(&?OVg@rdNWs)Bb}mv{;pvv0MtE}VbRT^?J79&) zXiUlF-0xRIe|Y63arA7m!=goI&`xcppoB<$c^TR$SMdM9?jFNzV$v5+e}R^qyw2&? z#c}WMZWYMB0-NJ%XgMK5z%WW-*`~BX#vyMg15(0r;{hll(Z#T3+h=PIQsXGp{X+vC zIFqR^ldzJlr1ba)!NTggc4dN+Q#A+;c$w{+W&mu(wFDJLmmtSdRr7Px8p(-@0sEe3 zAm6gGbLB+UIO$KPU6@14v@SwhGwP+hCx})7MqaJX?~}?7{*T>raYQ4?R-Z`PS5X!C{#4=rcM_`RZCb8 z4X*;G$c~>aI<#Nx8p`ME+c>K%B^(5HKmMS@YHOre1D6y{m={l-hv}DZ*=6Wz^23!l zuY^W;L=IMjg5anAF6pP(J;JyLmiM2yX32xp+fg%2CWOk&$=~zxDyQmc1}(QfAKF1c zX8342puGHXcO-RGK{XFcqxX@SK?7pQ#%4JIaMa5mj=ya-sAQj2pe^o z)Oc^lmhhlRdsuv|ye%qkbipcKPbvj3IYN-FYY~a9u64q1K_|as3StUxe6U7h{QBWnKc*#r3l3+ENuj0 ztMUN&i{>uttrN5Qd;I(tdhtFT?Fl+NssOnSR?>lx)7j#O&$T@DNBsmp60B97_E6M3 zlsUzav$``1Z)F?6Qep`&>`O|ejQ54H3D4$bIKiPKs;lzyjYF|k_w zUgWBL<&aEl+7u_mlor6-aqSU2SK9oh~qqc_{0!4FJFf&#&QUt z=rTA)T(AX1%%X;;&&nKANJ#SK*OdeD>IY+)SHd)O-_dSM!Y@jE);l)o=RLN`EX!5E zfml5Gx-c}0`s6V#FxFSvQxnwd8Wf$_i=Q(&Cr9i!s%Wra-?mk6WG(!^-j!=h)3YR^ zqZbk1Uco~$9i@Ba>-w%7-GnF&n~qDjskZhbI+Dhu=Q!Qf}C+-(@2 zm{~-=WID?SxsR~mi*2-BdAcPwYcNv12nP1zR#*nn2O$}!QQ}NBDqOs(fgX)#t2iBP zLKRJyE~ZB}ZH60l2e8Jh3Xhq5@M>>CMLFwC>#o??3FJeOAm}OL))Xc!+-o0{Rp8MC zBtL^3`iL9o%S|Yb`^kvvrefxEPu>JMo$ALiV?8v#Ph5NCzOOjziQHL!K05 zWBUZ=5JGN}OQOwEktCu$Ba>fTpzFU~ut?NZe;&Y}h)d9ZD$OlxZPnCsX7-nNa-a2|N8utUa#3d%D=TDlimWZ6ZWp5d zU5D$r=3hc-64I8B_KzSe^8@py5|0oshJ0t;$Td(j<(sS)70u$#NR*fiZJ3V`D`B)Q z7$ftcm}1Soj^0@}$VvC>*T~M&TqVr|l@KJ7G7P8uVeUhmkesE*<87pSPA_nJ*i{Ty z{?qMg0LIv^kk&wK5i|J|c)WZSoyLJ}yNu7R1-O%GPW99vT#V?19yF#WjdcZ*m&m>d5h+>z3hO6UF zF6z)k*XA1pDC+IELb+?syX@~!WPq8vEW@kfTcxDf`3nV}n}m$>N~$l}WYtx*nT1`dVEe$umH@68}J}k`U8dKwmiKZa4{5B=Cu`!-tz}Q8SBzbI!gElG<)J725L|VC&mOC z41Lc!yCgXgecEP4jdwPe=&A1ojQ@@7$s~>dD*g`xB?r}1WXf;6G2Y3y+ken{cc9La z7r16e8jcPcsk27A{Od44!NB!NF+#DTg=d7^NzP?6ps3$y5D+wro=r1ttfU!StmqZKO}MwE$I z8QG`m#P)?=9cxCmUTp%J)hM{WeNd4IB&WsR~Qc>O*F^Ca3Qe=CivL2o7-w z&_$9_LEcx_O6z&UrX_*%>-zTJJNs7tVU^9lEj17yRNJB}z}5Pn>rW$40(s+;{a`+@ zZr&vQN$}{$Kh}D{QY<;jU@jwn@+0Y`)9%?fCj%3z{XJBK%_c-iRQVXScU7haHVu(o z$g<%woGK=^Vk8H})%+sfh_R;(EEV9TRqPWlBVl7pC6neYga8-DTut?3tdYr>ju8w_ zc`MKzug-5u8~NIbr>^DL#2j{yfN=?MwEcq~q_4gIcaatF;3ZM%b2+P8l12vbFr5kjejwx&aS(>o zKY5}Qoi{ixU*S29A4&9az)b!FG6t*Eil(UTH`s99?)PQ6!((B^)cQg~mkZTi!36m| z>RGzhhCB>7SNf4*q^AtNCgDDUfBQg3s9NgB17WJC5rS33+W&nt&i!=4pCkVZ4{yWg zb290*8uE3U44f(A6E3=uv5iz8FW_#o>R)fC&+ar>0B-kE^Py7NrGxYAfsuG{P3WIm zXg|C8?0N-+GTc7pp6j(#gd0iSuVwhC@pZ6R(M=SbrBxCPn_gw%jz{|62vSh7)R6!{ z1%VSRgJy6JgoTs6tBI)-y>Ea?GoM6Sb0Pcwg@0h7#*s6b+Im`hM`}8` zQUT$Gvg|RM`Uhyzms923lwX!;iyx+{PRY0rOxHF5lM2}5B}YS?yJDZ3uT-XbkC=Gm zA@3n5zxY-{G<)DMk%Pkis~4+(Re6OV<6zPLj4hpM2p8f_a;!){o`S3DIC!70{^FwG zfcpDZRiKk!o=EF*Hrq1#cYl1f0XBN;Nx+1O1IK?6lfN%JL*vZ7PAJr+!}|BKQ4n`7 zBZEysxXUYYZp+~Xv?*)Irudu*nwD%9y6plXIwG!VTAa8TYBHm=P5kl*Lgr#~`?37E z6sc+jWrIGcXAi(H^B(cMuSO^|gFVXsoaB`@t;AGrma z*&8sT+0!r6aC1dv7eQuMxwiuoPWUTt2T} z46P7!0Sq&yaj-S3XnP%mSF~v)*sy|p3d2AjW_-#@zN1$?s>EViq=?IfdH`f-#~1$! z-wbLKGl%lWaRfI#3RubJh+8tVx+GtDW;2!pk>$715&!f5Dz?YlVIQLdU8k5Z6gGhh z&g<{YmmHc_H`eMMV$;gdt+1ebcvO9=y978jm4a8uUgN|{u~!&_#E|!Ua{)NsC@3aR zF4kP|^ZSRA8P@RuDf!57)qfx<9r%&{QQG9DEdq`v6=mef*f`p%5EQnL20SZ+xgu7o z%^V$V3K5%KJ7UH_&5jq`Gq@qzb2R2Iyi2;sS8PS~MqXq{vXqxuVc^V&EMfW6m<)+_ zVlcm__yg<1)YQH<3Yd|7c+D(YtPk67D4p`8%IY!?;mKRjp3e;2(BZ+Inv6*KRS(R+ z@~s96fBJ6K|2`4imC7~%9nr#=OL~xd$K8LgPZvX$-VS6-&E^5X38T-Lc_D-2*B0X< z#C4x&ks4!@{8Y0d^pJN~bsI45W>wIbr-D@2M~DFOf1~sj$i9N#bKe z+=*s8Usm|+Ah*dOGVb}0Z{-@XX2r+|EkkOMjw~7hI(orSEtN$`Z-8V4(h;Br&K_=y zR(tn){B$unm}?^uZcWYizMb_t1~Uv}t;Ginnp1xBp(jI2#bA;PzJDb~DVo@xDRo(Q zS!f8mO-=b6=&Ivbzu{r}pg)IwBqZV`E3=)V56RlN`-B15cOSJq_+%&Vq^5AcQ`@FB zBR&`eO6wd27-e65wvHx=rb^j)wW2woLeL|Kgj~+dIHRO1X|A}dS8vsl?Lhru^l|z} zw!XlCWdW|i0MpOVzUZB|x!z{lADAa>_#S-|mqsCJtQ^8GGzWg=yAOi((}JQL=?F)h zPvW4*pw|9=^RJA72edA6k9q|{S& zPY!pCSPDMiJx~*&%mD=MDX|K=L(l88K+mJ~i`@ zC12)j{=m?g=(7HT?m>K)&pGg^#o6As)|`*}+ndmp{x2CBTLKS#S-@bAl_VxiIUE6g zFd?`A&z66z3s&LrgugcVAJ?`4@~|k z+CQi9o3v37v0)8WINl?4yj5EO+Su7rdoN;4e3-XlWPj5M7ETB)F*EcYgZ z!m2B>ze1)aY80{R1(*{*kisKH%Wfr)4~A#@4X;wAgejwu?EgAO*}5OPu$Zzo2P%Ei z(OiU{k~Nk#QIx$d3^ebS1Gis7+2u;EKA7@bK%$Ix%cfD6wl*ZZj^R7H*OiYnLW!Vu z$8aK&6>?QRYqX2!vyoSd>J7Pki#D>nd+NgkB5w3D1!!PGF!>0h&QajoW_M`v@?W6BZ_1PUbV-oVcd_`^S}$o&9fs=u z>d%5)jBt5eb33;@HSSvDE;u#_zb)FWU+e=CyESG*4-J^@o1M|WO|_~(U|rr%+6ZAf zwPQ=#A^}O~4tNHG+BMwrv`E){V&|h70k)`*#j6gzX}G_8gemh+=S!^vMqwolL_(M~QjEQGHJ z=?MuWtrE~`JRtj=q!U^??t;XMtVNkWOy!yw)!3+fF=ax z3<-9d$2QN7kDNY2A10d>DV)?yv&tQ(QO{3ryxW|^di_>z?R~fH^+#6>F1Mj;@ME$) zK$16V)Kra2S1?ymzIRHJ6Q{H11F+{25SjSYc_kddQT&hQbtpvk zqb)mEDYL6%F4aYuv>L@qGAvU2s}v!ZvbAiUzo;6mKp{Outn6>_&Fc%u~bjP-VgLk*S{EiQ#~~t zK|oX)ul!B%IiuaDj(_@-*d<_Z5GpA7+6m*+(j|0f&hx$c$f+RT=&A4)1oD->szZF^|zEfofQ!SpGLA?#iF zK3ie^pSLCCFd(>a*B?KRjoji20e-By7rj2()r~ zu~%vXSTE#-e2VRo0}~8mpyVAwm(E)gPc>C7hq=v13?qhjP!!GsH4pzz|?#39NdD(LD4F4o%V&Hj*Juc*qPLOu1$u za~F1aR614jBWhyiGZC_;voXbhW|0*BsZ}NglTq78ZmS@M0;AdIUGA^satS(PL44B{ zWaR;~^u``EOW_Mm5msX>(&@#_=9b#qLo3DP*R=B{7D6(?`w8fr5?OGYtupBjY{;3a4PnobmDibmzXo8Jlan4Dr$bPwMB$-WB*i(Dq|{oU5iF*Dj3 zRvKoFwILZ@7$tcr#{)Y0Ug-atnDjva($bo8dl(Gf`}33*GFl<|2}UqOe^Ag^0kro>hHJx&Tyfd`ox$0X4f>V zmllYvq2xa=^^S0GxHMHFX;TRem3NJa-x3MereOBL{l`SSaPAli~oXD-BeAY&(^4HsBC)ZG#uBP7(L?5kT z@P&NrJ?r|c5NKeqMBxFgAZ5X*oibxd1{zpr6nu_vVuQw&^#Y_1Mg6F|-C2^9B7G!<1Cx`*jxYRo;jFf>e#y>vH?xa4 zBx3BxvNZ}1yBH1hO;w6g8^i!bK)S#9OJfh{kG3Aqy-;zFMn2PG1=WIom4prEJ}a=G{^W$f^E*Fu7@MqJWo>dr z1rRTOafArrk%NNkW9{e)y9qX*cT@1{P40CMahx-Pr_I%gOeW% zWcTON`?9)wW69TgbO9^o+$^?h%-|Y$)noT*LztHsly%K&ecx0cbzMY&)(I3MZLv1! zDnF?icbQ(Gv!GTRk!?3PzGCOV(=oq~9|)QfhO_ zUE=^rd`bKCOgFqsAS)w`&+=u?h;_fjM`r@bSJ!aX)uV%8kROc2BO0gwG_D4IFTt%Rk5AHa+F)z@xVs$HJdY(%R(yx)9TDw z|M&=B#DAFn8x-R#R_F6;xD}l8r;xmdkOQ0*#Zy#;Z?iEFahcp5>ud(h_H6%2dAwZ6 z=5ajm&=#+YRCmm^h;s8dDJ-3H$LGf>JoE2OR>f-hxl@J)Sa;4ySC@~mS~{gjBYL|T zIxe||f0et6_@c^$8`8HSXN>DM+z4}q<>eD3JVY=sWMela0000000BXoEEpoc)l{Qn zv%kIDJuY+TgXSU$zL^7n#e<2OnruT(ux+k1dV%2;?U99McnRk{r>$Ta$lb~q9FD)h zcKShDy*mhvV5lIlm5JcU`n=O^o|@{1({?c}+U}&qt#vwbaOIb`Bs+@SiC0%xM7~7d z0kO=q=u(`~N&r|BDlNs-m)Cq z%c;pC=Q1w@g0sBnk(g})9Z3Jiy_1WaKge5ryl9HHZ18O`zHy`L9PWeO1@UBjoS+NF z;Er+5=jH!#c8^2N>4#>?Kv9m&^MpxZONAJ*u=DDBOak^px#2QxzZNHEcVd$Vvp zf!o~tU1df;wxKI~m0_S8v&{L9u5WyTp(K`4#7K>|YtdUR-*Sum@aUtRZHPg`Zx^W5 zXDi4~KLuXxAK%?y|gVyc#wB#BSTbpIbMCFTj57CxU zRPUQh@$@aKttBn%l_B&Ms>z$Z@ClPchbqQcegZ3s1X%f*D--1(*NPHkcwb9)+cAP5 zerNesVYy{A1H;Aw4h-?4Q1{8}gGL?a0m2-{2HjvvMTn3Poj}YKA|WeK`w+@ zDj&p3%)lfxsY18U`?H*%uos|MpW{_{*T{`JnII$22%ukA2|sxHSCtTpk*o`r5JIkW9qm_wJ#(yBwCzZh`-u)Y_YrDNZSYz724*hs7zaB zDx`}Fgiu7u;Ja}I&C&G52%D};`KKx*D-b~Gc)q?W6mTvx=kUky^pj@%<`P6B49u5& z7t<&`+O5seG@w&zFq#2=C#p~fGu1I0qd9Q5|CKRrGPMO7O$_ck`1B`xUP*;s=R7fn z7_)@tDKJ)~8qug>d0--W)c&pd|= zLwP)kMNe?;awGX^jyROz>RwNdE3h(~DpwBiOU+~C5qirl+4})AW^ySiZg6Ky4K$a_t7`IrjrK=i3k@9G76y4kt_?x$q5R6d=9c7Egp?&LcyIW zeThGdfPrjOOH)3+sf8o5=6ES8S>7BlYWtIdxlv4ExvvgzvX$N5F_op2-?YEr2R1&aCR2|N{7#Gr|#p1FRlz-ZI>x8$OV)xix7pDu=je8Hk2`1HDTEq5q_MRXsvR zzA^AQ`#!V|Ka8aw>kozN$XAOM_4nUWZs*a9)M?+jMI4oW=Q$4B8RgDp#FWeA)^O?4 z78V(o#FbK_TpLIMv3tw>6b?rg2_RI;H7@(EZVTEAbc!rCy4<1U7e0j$gQTH=FP4cK z1Ov|lL;Y(aYLx1VW+^L7rW;FCCy#nSGpZ$Vt)=+^gT#)w`mSi&Fjr}HY@}~KdEtJj z*tgTwQPbxO;TVw8;+`yqr{9G&h)6*Nj$TOz@n%JsaQ`#WJ&bjc>lYsa7w*)za)6fe z_yNsdY=!&YF2nFE`8AmSRIH)BTBg0#acO-@a4(yb1YyX;E_Lx4suj#4di@h z0>tR46`qldKj2ifS<(qXtUNlIF~B_@^+SFSKn zum6_TEy&5#=FX7R$hVO=^R#T!^OPNN?om#HE|zI#r@8(6vq0mJfrMY&LdCZN=#$%) z)OeP}ZmjsCYN-xVd#$YRVj1j}?Y6+jc{~Swog7eL*g>ijRF{KtWdX%X*Y4?Q*6=m& zVtkKCdvON`L4lGY`pHValts|^)}e@gDL~kr^CW}p#v=xa@e;g$U2w#ivpkvg@>w-uu@zI0o6D4Se>n6e=XRB z2^utAhc!OEy23ni@zZE4F{ss>!}F^ zUlD?Tl!t@NYLeSE#=d~xp?jTGN0tq{WTxl6WCwiz2Pn8hCtl$IzQG8?T{tCQ^t&{v zk`IWN{vuoU;B*?YjHm@&1QSUSqIbCJ54tSsf;N+W^T^>phBdDD07kvOy_6kym{*N)YGdl{*R zI(!6gKvn;_dy3pW7kJ{zwF(vFV}Ywd9T$!_n|GV07TK|mVOUsZV3v`Nqq&S!&MW>% z6aYF`zqtY!k(RN4z|J=&AHQ7Q#a2p$4$%Y4f@XTiNSqrbcpp-Sy+J=$43x}pHhz1! zfH(~;M-xG~VrTH~*k@9sr8v8bin*OpgNQU$l=Ej~OWBoLnGlVUk7stq6iE;JD2`?h zjMt(7dK@S1l+qDh8KJWdw&Z9;IjMqGc3)FiQkljT44v?$%V-Q55`5YR8?89T*LRU_Ul=;IUt909ZoNY-_R z&7MRvBhDcL8nsorc#AW#!H)`fQiloqI6AI-yB*JU&&Y*Nsc=Hoxmc=%6FtiFEsYbM zx)*g>`nv$2gy4orP;Ln;wV&uTPJLS48t{n}Le6-;H%^BUgKtRYXwflJg*q|R3qSa; z?2CeA4s|8C-!B=UjZT?-xlE`a3BXgxE!4Kz2kDc4CT8Mx08Q3OIf`!qI}QX`Y!!#V z!?7Z#DjaTlKrgYUztd>$bz|mOB^rbK8h3{}dNkP)t2Q&@#v%!3+~u3)tH?Xi`;(?1rPk8K zw4i3IkVWZCB$Q*`NM+@(DxxrL=os8bMmd^)_AJO{9XFlSa~bI$Z#3^h8X= z$25z(>Z*9=h|Xga=O0ged;J=%5xQ5;Ws~p7t`o*DPiR1H}mM%h0Yvgew_~U&aCV-0Poq(1*ZD&hces%D1 zhnwfVdSj{C-?hIAHM-`5 zQK>$JmM;b(CmgoeRZ?E;$EEGJc@Ge#GEMXYWN%IzWbT2B+?kgua~%U?U^=U}{#v~RT11r9?K*TL?1MnLt~;e$Zt zga#+=F)%nA;Pr-q6;IKHozYb01a_xVlImgU8p71_fZ2(c)o1RKk;W>4=}B6{$979^ zqYw>}9T198+ircBjszX~d zjp}aXH~1b+-b+6Gg?SrpcCGT{L}0AU;6C;vW54}M^rCa)1+Cvm?Bf?4ARf4*0R3Nj zlB5LPn#v)uLQQ~eYXJozh0=s)=|hB_7}0@Lvb`>6;Rik9z-c^@dszM7$zbpM z5)8@C^$YFV1bj;Kx_*-EHe~f)kvtRk+N7=(x-HxKb96^>uM0=ajZK1PASGj2QYx+w@Y<2n!n0L@;U4Yk<1-AE_kixQaP7ee>aAC&=P ziG^6sVSS)zrYH-O3blr^*ap_*4d8-qO6nN>1{3Su``0>SwBcu0znIjeTU%Wn?PJo) zg;-Y4e3n^OC@9Rh?)rPOe;Hvw-HEGHTS`!9a{^=k*K#)oDrM%ndbt=W1mt%G525Oq zqtJ3VJM*}$Y>hr%L0bB3#2yO1a1GZVRzovnk*>Jp#XB=H&JaE(xIff{;KIc$SmtDY z0WeEcSUjyQx%oBWu0Y@7fj*()lGeg6r5<1D>!+i%-Y&4`-V~fcfheG-R%Vn*?vud> zT4g>Rd`D2qp(I}6DeyL-%~FWu%)&FF%34twTtthPg`YZiUvJ+M%#4*z=epAgc<4R*))pu2&$1lB_^ z$17*`5OsE$VoNAhJUWJRYF6Z$%l~7W1_)T#TicYYM&`2)KZ^=;MAmbS!y|wm*B+FI;+Eo=7_gvdE{k^d#~&` z$=4G)ZpOgNtx~`6ya#@tK`tq~o2k2D%E({Bl~BEi%+1M+ti=Mu+SP<-dFI4uNSc2l zO~2Na9_jfNRKy)ly>5|3Q-w$}olOqAH)Kmk=I)Yhh7YU_Y3USjtZF|h>}HT(cU+$6-8N!kmFo(Qs@N3y62yAMk`Gk8 z&b$FLlIS)`cjN^hHG3wn`8Mt5(2W)R6{|#;T+EZbUcDo{yG%Gxv@Wn^&CuFgH8Wb8 z&1tp5C%K@9!o&8cN#beF1m#X}bp4KHE8W002jmo^>A1R=$K!f&*gPQhi)f zBjKskoDu#lwjT+Ho9Ha;1&>L^lE?Y{39b3hCbw9_(~6_;zZEnNRj724d#BEPn z0**tH$ERkO^o`gH;Fe;4;s<a97huAtjK1J~Z@Wd^!P z1CNkNsQhZ=sE77w7vX3;4^xTfPzS4-eA1WZXL2TvRWC8<}C)RO?4FBe#tRS;Vo>~;`j91)J zlAZ*ZV*Z2wKD!f_4Y6Hmj=VX1Z%8`=JK0=qiXhhz`5M>z*e;qBzpbOR`(W_Q2{AH3 z1SAu9PmeYpH6+_%nG~6qULoV*X!`%ceX1DgQj1slr@JeWCT35$jP9R}2zDvjL1gOcTEO#u_vO&wsKz>x+u3H9!;5p9KHIx^zmeGfHg@vkSVPC8Qe5v(<$zJ z96XI+vt7M{y00gRRe|Q74R-5$BTTxHE>M)d!#5w{xyWTTbvY`C5ExaEzlt*7ZB8TYu7jihF?*;x30Ug z3iUGvP+dAMRiv0bD|bQ0Vd|MQ;jHq-oss8vJP6-$%*~1p^CbNAjZ?GzsUZcVLFxfS6=cC=qu^{rHYT86L_D~F@@5IZgT>pqT z0PAZ2uYWyh$IsP`pP{1iQz4&3+U&P8Dr6^g_gc zy^&}W$fdS{pHfsl*{Zc^udS=)Tz<}CILs;t)Y~&Sz8mhv@53_OkKL7Z>cwJN!P@#; zN;i>tgg&@@Oj8-Ydp)Z~a>wCU8mTlZJaLCAwF#sYj<~G}#ERMCCDVYF70zyjISZ@+ zcMi8RL9Qc09-(35-jr%XK*QOPME|<#adrUU!gD8;?}19T>ODd(m1424eplsUI7v#v zSVJTmGNZ41Q6Z&o_)>W9aESh#4pmlxlM@MSUg*E!eaif_StH*s&6C1}A#8XHzX3@J ze6QIK=6-YWj>c{jGvZ>|Z~%=QLKeOCHv?Ctv-fnu|HAg!PK+=RW>v*tEUlzmZ=9!6 zd|(3)b;hsASpXD!X)Xrr5nmPK%Q=zPdBogUbETE|`Ja)EDyTxL{XcP9hlx{CknPn6 z+y#AxLHY6&9P`SE^>24f>#q@LWszMp-d}n>o{?`ZNn78sH;qupP5 zbPIS285uv_|CXh<*dVOGRSShnAeF+uaV;=`enTxGi$;~@*e7`JQwGP5J$x>v z0imO!TcpG3KwR0Qy9Z7#4XQp`OXRmM66h%FodgN5sX2hh3WrB#P0E@8FK*AYwfApD zAp`*i=~=-7Q%Bu^$O|6Wt8S_ED}Yydx_g>u*3)_R8#TeE=tKx$)+W`9%=Tb~%0NUY zOL^**xu>0W-atQHsnHtla}APn&@NA+FqXk{=XEDQEXE{ZH}Ep*Wy;{z%GfwBrbo;9 z=jg|9ov7<(q*<}(>AnsNZgJM3^jy>`@|%jy*`eCc3=9xmNtPbWlegkW0(9(DF4xK2 zB3RXw7HQ#X5g6A%_oMtxs2zzyo@MU20-=ANBx9 z-3H>Ijvb-_tH<2kBLLSXi`C*`gdrfSqY+Y>9M&)nU(t>_x3B3V3r<`Lc;(|Rb@ddQ z9lSR7P5s-$p)G_KW3?8Ti*XdC8fzP7Pza}(qkEe}-rU)I4`?NolZskp%4ux`uUc22a@;ILJ6v;=GpznFE9P2g{1xmP z!FdpT98lmY#amUq!8Ep^*!S;ggaP*I^KJVf#&YzLBp7FlF@owTKw1MhJQY3DliEc( zwrJhCz4}PRn+02oF-AanFGG2Q*4cn2jG2!fL>qxo;R)Xi+>%fXE-xvk;9kfgihrjv zpmDb`4s(h>8d}%?X)emAxEHUkv}SuQ$Ii+Altm;asrCB%hW-UPIu>p_T*7CjIvv+W z6aY@wHhirgaG2-LB;_2WWhXCQPoIu&KdSQ5(nyZ+*ZrM$>^7>v=>X))P>ZTzS>PVU25XLTjr)JMj9b91+}w?b zh@VTQRwaq>a(VYzJB1@+5ta(^42v&t9))_I`XoW;=1U?W!L z@9{M(6H{mi`vTTg6{EHV&n1Zyg7_rN%FDzpAlz!>0co`P*6-CR23%&;(42slrSzcX z_;)prG;7#SR4uP%N+{e zSQa>P0flL^A|v~gNL4hRtuYEPpsW@DM5@rNs9pRYpkQU$AXXYt{h5J>YFp$~;t^o(W*(#3m9zvx zge`ayq(RlcVLyFwEbllP6d;@F9$)?==I5Km#E%H6;Z2D8CTZ-MDgS6mUm6ymt|h`k z({Xt$WBEK28K?}Fna@0#jW+WL6JkA7@#$T&_S{v}UE$nQzfc0R*;rh(I`3k~n@WS` z1F`XSD=Zyfe`VyIoSuV2KYbah19mr9jGo z!_4NZ`mQ@_{-2ZfCesv%@)bZ4w`dF8A^^L^f%}r5ei7H9VR)!;C%BR!(=*a)#TU2e zU=sbG@y$e+Y4{@vdXOEX)Nzb( ze1pZ?K+D)w3W0t_c;D>9VpXB=yCUkTm+=T_a?D@IFgZ@67`=_1>8{Q>;J0TmWq*&8 zvjfUw$>5Le3YeD;bOzc27+DLdt;2-~2`6X16L9M4Db+tlz@y4nRG&H-oP=SE(!4oHZE&Q zjq3O>I>MJ0HosD9EC@<@FVAhtClh5)DIt60L8U{F39BeCE1hM-&N_vC!r*y3$XvUE3>dO$zDU{J(=(@05+e>x-?RahmJSak&Sc48tN<`D zWMiWt0000000BXoO&B6SIfw&e+XGYp>PnFEQGb3b2Xw5*55wU2aBK$CHoqC)O%IH_ z7Uip^hN{^Au31I|+ z>}gTOUIxN4dl~H&t` zhnRQ_^!jizjWvMQ@KC}%O~_SMxm+|&IfZ8g+3>kf$bUTax)R~2quZaH|AX4ypV7Lz=vt33>5u#*%q< zs9OYRx5&PgNqhui-NzyznBy{6ozP1~ZtBB1b62yjG0RqQ6v4ZZtjC5*m-&LUvolrI z)iYVps1$^srMazYze+`WThow%>ddLN$Q{^J!Ye|JDj;^d4$WnyL z{DZsI0eE4C06rnO$Vv#zA}K`54N+srj|(vn8HoBbH={P7`Tk^h240oin=PXxMCI~1 zualq+sXa4OqoT;(y4U6|{G!^Gj-QT~=)n1HK0QMs-{4v*8zoqwPK9Y&=qrPDt8gah zR92Z9L}Y+MdcE6f-WBUDmf82XRCo#VX|jXY#J#RNx%%9g%gV*W1jtVCFj&*_Eiz2) zC@$Rd3YHjg&j&W?zn-)*BJjT7!yL;O_kpS<258M!0f;Js8*Y?O^LP9V;YFlqRvDX;ftolAhHRQkI1%TL@4GWX z@o@N4q8aeEfy>qB89^!5b{A;wi}nRFzgS)zso)3ph@&bi9_D{&WxQbi3V4!O{@bYB z!~Nfg8z%qCLLI-|_EWJ`fC(8@Q+ZhOJHZP{JwAi4&Otb8kR zEivq)h%u&36*vRk-!-=ersGKpxw6OefSU0QpG17+;>nuQ$D=kn@0Am(>ivIKQokRl z2KhMYEZYXb&Z&EL?vFuzR{&TXBa~?934fV@qg+ zQJ`T*L|w5g*<$j6BjkzypLSz|JO09xJkbjZLk_nt*2)uT-S+5$TaEj65!y64fhfB; zLrR?pirj$qx*?)E$Pxgf73oTFd`*ETT0Zz+D^cPX1vnRG8y4+w-blW{{D zvKkj0R)6B%mb>-~Jw&Uk6x*^s=f-oOU_HmI<#9}-8;sZAlv4(bR+J;G4&Bt?9^Ka-k z{uw{05{o1gJxDe0VUSHqt_aiEf%6Y?9+i-G;SqGjR~7H3y`$zi50rx-!E z2-{$awCz_n-{0p9#2&%~sYYmAuBleXK8=fG>hWKw~?XI%+PEq^(LzMt+#WB@{1vySa z-!8Buo>Z!842zghwUqBJ>$xX}MXvi)P8g5u_%WL_*+b%X#vx`!Zy&!c@*7 zD4LVxJ+}9zd!t-r#UQ(>C$xxEYb}rarF>HFGlta){G6iYVX3SZ(S|F@UXgxDxKIGP z)FKSUGKFvPjQDCxwtTDObNc@TNuDg4FAD-(*)}4T8zeuI=mUFGqMUTy1bG~n=FlG0 zQ+D&hEIk%2*OG8lo7Cs=MlZ!Z6)bqc8cIA)1hqX-Qmd;;uz9zg8O`QJt zsF~=-AX02Lw}~mNts$NfJxowj0K`2k@3=OAjSHVfz6tFzt0k#;h@rFL^-uY1WS7sG zm6Rm`c^ec!t<@}|gT8JJ1Kw66dr1fZK?djM`cv^)%aD^^^xFk{TwRm=F+~G^-?oR- z6b+6YfTM)`r`^;nB|a7fXP z4g|A#piZL^6Rq4$&btX+`?NdhJ&tqEdSOn__(R^Kvp#FdZ_x!v} z@0`i+P7KRo50blSc2!T+0XkroU4gw=7Q{b!ubHq4>~ab?)-LXBKpS^iz`ZEgEy z7OnK9-f-i#%-G2j=u=!4r!QkuSYjT((+u1qnLt%wWY~ZA{UVfhw;xakAaP}&B(FRdhVFgR{g_mO@`UnLpo|bS)Y82Cknfa3fiWBb|4k;6MuW{ zVYyduWA&`=bi=}2quJAzkfq(S{N}FdqJ%MWdm%7D(csp_uxeolVZfTY9;C9$$0cxo zC_MPzs%Pj?GeAJS%4=rya3q4!UUUZSBt7218w^_|SLz0=-(+fr5JQbTduoBfy4Cfd zI~hZR@Dz$0wv!la!s<)a=v~%IlPm`pc;eDS37v*X2B59tWi>!MxQKL`nbBO$zwF@H zGZNV7kTEQSjYr$+@S}$rAPBy4vi^P5q~q0x5=ItXKIVE zVzt2@Unx_Vd{ac@%)@B5>jX$7p8^s*#=xwVdZ*Hpn#3|LlC>Y{SJlb zwhHQx(1gF`=AUeC=`f}KB7+lyjkCYshO3ee(8 zQCu0H2RA2aGz{(5Iw<``^ICd#9oBT=W<9>}yX@D|Jsd3j?;V~U{$^e;ps;$);ghcD zfdiMzXIS-jrgIy!JsMtycUQfvmMCfJ!{l{BoM1D;7?cpGtYFF2-I>5MuM0xWM(I4N z`5Rjo2Tb9lm|mZ1dkT^#A%Cg~|H;4@Ij9o*SyzP9*)rWF%s+(5$Rh9SDd9P`cxtL+cNb_yPqT|$=j}~99a0j)=+9H6|U;@8g@}%aZaohMgg|oN& zCtX3h4bttGd^c{j{_HGA`CMv{3fll8*s%FM|8Bw?50N_h;d*7+BI~S{t35EODTzA6Sp>vRZw40)7xHm zFJFmCkx>}SSlUgKg1WUH4(g&^CEw$ovM44}!LJH~@^*ZCsjx%72hek+vrALS4XJ9= zo>=j?4&9-)Hl~F{+n-blB+N`qiQrJBM1(W=GIJl$G4%P_d=%7uBk$Y|llidD-_A z66b0VIcb-IzyX>#J7TQVJuLe8?=tI+wm1248Ap0Cx_Ru{1p;cY{xKxjQYjD=wkM}@ z@(Po|N%5sXf?3WdYQK2sP!k|2KxriPluUG74-5_KfPpCQuYy+gFVa!Hb+jsc;jx-| z`IP+a^QD5GL;O=g`L-!^>BlT&ianxpcrf;WT+Rk@n1XVKN0ZN=gl5OCBQJXgIWo7B9y+zAEy|7&{RvyvQgZ{OPp5Cv42Xf7K9xN z(=JIT3^kp3@@-1yFfsrt{Ys(T93dS-n4#}?-gP?drZ2?79FzIfgTRf>&Qh52t7*-N zmA5a_rTs;BAw6hjRXpc)iwHBcRR5CgLAKH?48iJQ#u)CF}Z6W*M6m3{BYcLw9u*>?V|RkFTy_rBE~2 zbnHl53#tUC31R`mZ#3^PxC*N4QwrKaV9_EhaG3}7M$ezD)ABUzcECVZ4tlO1gPhd8 z$2u%E1Ej-jcpE8nQCYe2G$FO-Jmz!LQfWt@DS=xT&P6=84%|p{0y#W9BUQ(VM+7h@1sAuc&5Tu^)MDaTUGePH)%q;1%)%Z7`c%Kq~e&iTTD1^#ha87vG?*o?quL zo#U!-JP7fU#BpNHZ%BLc{PQ<@RECdm zQVFN<71?P#!IKdBuy~UH7}Sr8Ds$uLb=zTb7xrdrdQOC(-K!H7rsV$R5n-%#)vzWXDcD&?+Fs8D-V} zna+6K7$cM8}kf4IYI|LYTclA6`M(|?fWqmaR6X&;D_Nu?!m$v*u%u@Gy|_c zHPVR9334P7I$R?)|LFFI2em0W)Z96@&}-?f?D-A;7nhE5+Pu{KOw>ZQFVABtuO|*| zZ8b$Pu}oiXI%aY{%x9F6u|y~9vBrluXUU)jRA{_a0wQXq0hAJ&udyP>2mm!0UY%E# zC2T$(To#*J3GHvlbCyakA!p@@rG>G7wrislJ)8@{@n?~LzWn{WwXTfUd@bcOPeK*o z@)Z*Z#hp!zSgIVmstK%*W6tb92dmxn8vhZptfK;=6kW?X9IdSft-@!bL zD3Z`T1i|o*$_e$ULsxe@u|H3U*vnf}Nyg%GWF9awu8D!<7=Qz+gnMRzhRJL8H+-3P z#WP|!ebH$D6qu~MN;Sd>RY3*E#_f3jQF{C~rJe+4hK$Xl&`f{OVGlPAKH064jy)fB z)(t0s7Shjh_F*fA$nu=1ZQKMxGb8#wHehh5%O!-_9O@i{(#ceyIbNrNlbp8q#@~5< zoS!5V)?GZk!m863LwK;_i!Z<$bY%H=W}D%;gzC4}=Y?aqwT<}cH#x`zQ6d2IcI5xJ z!WCc?TgB7leP#1Z9vmDlfIg}CXi}&GYoOi&F0b^u;k7bF{DGsB&#|=hc>qJqj zY^f^%Zg8-R;<{6M#*I*-O7D~x`BO~fM4;5YvOMMCE&hH~=~Mp?0DiVSz`~8c zIQV~HNu@-5XZpg*6d1^vqA=-wZtCNoRJ@o?Gwd)FFdOsxeN72udH^!80iFIX1lMsq z#E~TvRspsjUWhZ0>9?bYEiTqpEza7cuTzC|rTL6+BW$ez=mv_KNTzf4>kv{&^Ktjs&ij%(Ni4hxh5%?2nw*6tcx33xx z1xNTvA-|SC+{Lh8(t`flFeU6t&7h&&4IRrqOJ#?MokAiIJmC{BlwxbgQkGfKU^?bm z=cn$;6Ax|s;7d}^V1c?rFD2$Rck_JWj)(X*dUAe(mb9h zQv1;xaTT}ppf$u+@~F7vpmj15C!4NUYy2A%=!s1=+`~Qxee%UdA+|@+LCWq~871~X z4_t5B7Ph84XoT@+gQ5FYZH8I@|RCoCWj7q9wH^*1QKe; z+_8K)mb@yjUWdw5OSg5ck~tWpN`NnNeD}>5#;#Ta2$g77+Hz* z7ya^1@GxI*&%~@T{pop}22vsha%* zugmu)=+;L_6mRD_YxW51Hf$C*Fk}q@_`$uUhY7AgGJ11>D%*LwzEpRlc9PSDcMJ17 za8$t1;DOO+q*&YG#8r&JOquQGL789ZzY2|PGx{X<)2X|pfZOlU@y^|S8YL;_ z(8mBVJYdtT$$TbO&*I3FYFqvGYGYm1`g1p*DC`YJ?WE{HzWt8b zN#Q%}!ejL+nCJ#qKxru-D*&LMa2mKolVt%*AdKCS%JZ(|*M#S{bfIZEUsM!^nuKoQ zraQq89366dj;wM*nKgm5hC?#jhA9C2h%Z_b5EdmZ6!xUVmg81=crSO+MRq%sXdi9| zw^w=Z$517G(OFLEnx88NHZ3C;Xq60tuV&|9{g6L|DhRqo4Ju1FAKUz^h ztPC#3gBTZ2QA=co!TW}VGIxEHh_2PaBd-x7Of>;LTnXP_nu!C`!OQ8G`L!K6IP2#? zXMK#1z9GplhPd)Zg8;Wdl{^KakpfXnKNA=GfgpULiC}fs9Jd<>7dRF3GlD8{>Fs@q)D2&`c3{leV#cMI*KEbreKBcYAo1PII(xj$Hj&21GZCi8VVrTEmW zH0PJYE-QuLu@3d-$SYZ`*E_UL;EczzIfHT8g6|y1rA{A?s)R)7bOry(5(Y6^_2-}; zHe=|n5!pFijUGMlhZmA19&|l7<%CTj@Ku(Rp`Bj_hu0^TFX>7`DD4wWSli7Im?^cE zTV~t2A(cW~d}kZ*SenjJcaeFb`XN@$h{nPE+>Y!#77yzMV0f-~lGg|2-_o2m!kgRU zIbfmqZsM#N?|>{Prvf!x(ljzR?|Qx5jh)lBlmV#m zd;%tUpSI?}t|mVqgjDv26E41^6ha3J-NmW=no`{6qcq6?Y8wIp!5P`;tohEcVk3M& z#S+yKLSF@1Qw8u=-dwtW>YlOW$T@RL9BXk#34{0ayAE-BH{LHNr#lML?EwPF=K!Z} zJwdzwW}uasqh^NGYo9}tnt9Ek$U0L(rIc-;@oy|i@GlpWx;3g8bo^I-NkJiwx|;&m z63je;1LQ!b%>~Fg;!9@ept-k>#{Gp}efw!yD13vThel(FqskwaqL4w%AI|>4k48=< zExb7Pg-cNJR&Kb1*2q3PQmc?aydO{^yB|rvLxzLF1SB{R(G2{N+ONP3_*r@HWSo@H zJ6_nB2IDN^6=RKXGJ`G;Kl(MGWs^BIcQiF9K5uyV)2!`cfJksj&B+$%d`uBcUP>i` zSrVP_7@5{!r!SJkrp3K@8D4-X}_{0 zM1hk&8)C~JZpsT9!Kw1CUlRS~0tg)UWeRA^%XA(Zr%0$jh~|GE z`{@wTRUL(i%BRbmW2}X~pSB}D3*E%#1ax7%Mn|1ek*Zt`-mqs7Bj%ISIqJl5sB{6d zAHM+97AaD={lmehJ$F1d^KPq5Wr+9Z%zqKcD9YWO(6(hgPmZx`f@GcZvMwE_-{s#3 zN%Z#h#SK*N{I>?NI2+<0p-+7|_UBcjIM+oKUWojRJLqIp=f>5Fb%=wD8Q>9DGLX$h zh#&4DEGyJsXqcEK_%MqZK&9>j^p?`uv$h4{|3T!SIK;S z#$9#gh{{%8SO49yr2}8U-iGF$zprm70Cqi0-<4tNy|PG{utxc_v0us2oH&=iuCkNC zEP@w$)S+#>t%h7*d;Ch#(a~}yiqUCvT$_fOJH_a+O#SpmD6~^X@4Xp1O|{vJbVh6D zOm=~*4;dk9C5m+$_1cUHj)0GI-p9Z4f*f(Xsw)@?Nyx z5^{r?Ez`A(CNeWQ#thP&&Kp+mP-E=*PlJRBsaQ|ob?SZ&c+b+d6rXjWD{j>4T51gz zbK=a2+r&cbhem{UU76gF3-5y5Du)A`%Of>Sq=#dt_EB|Rb74Cf=E?8R)B6y&Tp=gB zplU;G6hHIbBO)2t+nzxy($K&72v=w{07Zt%?9>$AC`;7};^Y0p+9n1?*4o)75VqU9 zVfG@Xo#m^dt#7z>rSi4J>z2t<8 z0003&ns68-|5jikKW6s)L{&^5h@z4OU}rcu zp&^P25Kw@&GxwhOAZ6`kG9#%q zOYt~7F?C^lbDuJ}1GgZ(xbXXd@jBZQ=W^ig8;d+L$&3>o9aIP)e#8#)rC2UN{Z!&D zo0l!Snd-&GA1Y}hKk8$@DC_|b!fN7{} z!#2@tha}`wz^yJUjy2Ln{D9#+K_Dg-eWxwZ$kZ+AWzdiJwFK`MpBy+ufkh+ZcvSG( zv9_DYz*UKRsYM-%EI2Gv1YQ)88cVTpw@?pvw`EC;`fXl|>qq<@(*=s2p|_I=-(YBexvL;6EIE zZR==1b(aiE_RmMQQfNzcAfV1?eO+{fxT7R0$j+ZmfLF`p3)fGamn5t!FfYL@YQ+se zDS>)l880f};@TwP3=pD|B~Gc0+%j~97FL#l427+BD=F0WueAJ03e@+iy+aN-`ibw5 zIhl-e>Ne>tfqy@bd^(ct@V%#7J+m*wshR3c*M|S*Jvbv7+{R0^5FnEJc%-Pg=iiep zM$JAcGNPi63_EPKj@?OR1qn%5^@}Ustj^C7s8H>Y- z#SN93oQIm`!7?KkDjyR69+>m1s9coHPu!t2ebs|nwk~Zc%M!9Dge3}apgb;t(x{qI z+j9rVx02W`I!3f(7Zub%z&b-yj=>%OcE)>3%TV%U4VEwMOB=d<_}Xq~ON3_aD%iJNtaw|WoI&osCp(m#}MWrAOK0-q%yTMOn`t(8Bjx?NloAiJ8GeZAzP@$g+_nQ-uOhG6iA=`7oa9>Nx2qYCW}qjvAfV%Z z^ssa^4oZN%qZAzYTk&O=y!DSnk4=Z<fFo>Gf1 zz(@NkrF=>m;gFSpxS z?TQ4ShRyz6H1YhV^b@lUI6SlWPHLVBAMJ?BAU>G12|tq($4%k>&E=AUsM|CEj#3v8 zfnAu2p?oWzx<~r!lQ5H3p@j^s2*-T|^V0NNxD5U0jLVYfq1z%rF3^iScP~@w_C(@e z4MJ4^ z!hQI}EMpQ&wVa%iJc*LMwhbGXf>YJ;8Ibmv;=<)A(92wNX_W_9!MlP-V19xk0kJ+F z`*%7FQ5z7)Z;p3ny{oXt?F=omD*Sjy)9y=qy31c70q=~&lxeQ$6-hbkEXC0!gu`MF zcJsV1$BhZ1=3R*NH;vL&)ga*=+Rgr#cq@p6J)952cbQ1GKocRNvk?}oI+w@TiT0}) zt!3|2;(qAXo#p587g-@SmOPir6VK67iDAOXfnD9Ht`NYF3+YXqA6+VVme=%Z>SG@j zp()%ZHGd{eF^A+*@C$kHiOd0MfYvfUJ3jcd4pym&(pkQ>L9Y>wft~<-b0VqsLSrt< zH{KMv-M@Is<=AUEbFDWE9QAJsUwjH=eiX!tA;o#*Kva_2F`$+64$VJ)$c~OeCgAjf zLZNI#p{Vt43WiHMgD0X3htELJfSMVG!M*;AZ*dswJc24YTKv&EDd=HkXBg(;e9qOW zBcCla)vH0GREbBesj8K+@U?9_i@G!>=s z)(o>gX9F>8lBnv$;utluzG8!TjKs!1mMSkM@G05EAdyk~)&W&Dtd*)`h29(|bj zE+cf7DAJZ>b^|oLWX~5`JdA3UKE5ifw^B)m7j2ruE;P2&&dG}wSQctdU4B~zx)!=1 z*r$f&B}m3qIdAhq4GhksBF^85kHv*D{?h#=Fm$}F8zHor=X9>BbATwce~#qhE5p^C zv7ycrN|`~9#`a_NMLA|R_75IuODU2_a*z@%T+~I`*x)okv|pP%4UrS^wIsYxfJah1 z9h%k=+^B%N-MKEBRpUFT_+~d3L1u|tP6z3Z(9-6?Ki0};Gt4QbTJOYZ?-Fx0FFNAG zJcDY+3+YDVW&~*S^+h#jc=*ERa4`k|P)Cbx$537lot9)gY!O58CqG7VQIfWB4gQv~ zq_|%Q8H`;yKy12XL!WZGwjMwwhOTlP+H}Uu!WeLX4JhXST~A0a`v1?gEv;t0zPsObn<%LkcG5 zcWEJ;L)whaZYiLQ=ZP?ELe7l5Z@xGvePqIAHxACy}D zCRiB?2F$|H10R=KTQvT=}g?bO%Wus_8)DkR!vBM|lt!i4+8`CyTV{h~MZ7e&5sH?}3- zUFdv$7}kSJkJ_toZ5T6}T#YOKSAxZN=&~bjtw6e%a0oK zui~5ulCDTzrA^YJi^;+hX{4n!tVMztAs&F&>ee04)ia1`S94UI-f&LGhe7a=L&F2> zFY9zl(Kynemw&H?eu<*^Fu&N{R=k3VD5+L(dIcDsC6fSNg{7dx7+QX;p6L}-d!Ytq zv)MxLeEg_yxHJZ91_hIm`va{m!Y338gA;UM%Em0q+F&j?{H9bwJ z;*x^ZP6Av0ky-Z0Vjq1X(=c2oVVwT^+5DOt856Otz_m=6d(bG-cAe*&Iq$lyRc_#N zuUG+cD4v4IcNq$Yf7I(qS^vrhmmKX&{-`xEez)I*_rD58=FlIm!4UmkVpUnw(}%hu zE6-A}>nDA)*YP=Tr*G`Nn+VI2Lb~=JNkwzpsqO7mVhC^=O&Q485QH*GRY8Vv8X`#r z)PIf*@n@k&bDd&Rv{fazFxvB%xJIb@j!>NCdq*H?*;PaRHNN#i85lRk=Y!r0Qx0}(NhjT74s7PjMRwFC@AMe=XjDaWb z_(ekldC#yOn!3tDh*(8Snp?|0?uQCykug=7V@ZPS=w*jXywq1!bVii7bAS9+>h~mt zmTGgrH_MSgMv5vr^CJEwrE`-{b$v}rZM`Ri#1a`5jOS|ilAqR6;ii!>ezto^r zogY%$?_OV@4cpmzKS8`8X3<7_7@@Pe@>BDPq@-WFO`x?)AF~s}QDw3G{=L$zUX!1X z)^%2-AaDlbfR#D*V-+C?>1L(AgjpKL=omg4S0t8Bg_+~U5?vSOl8HY>Y(pj6xYTCYM^W?g_4;(5a# zY+w*>#w|mkkyY)K77n+5uvzRmUsmjH3zrvE*Nrip?AI~r`dT>!MMfWw6;g?PIcX7+ z_kkzj#>}E!tqT{j)5uYlY}d51+h4pW3LStm)@rq2`1cMI$_AVWCzBCc?6 zB20K+?m17%IZ6d1cXsota?+C!NRQ~8VpY`-Y#lTBeMw2Ju!df1&iTV$=~J&G%3dwV z+2&ONjY_ilAGEE4N8qnc@jRFI^5Z&lKyDo}iK;S6#`)VLZBbe`>9auDJp1mV*H#xy zz*wuBawtKY6?S%+7H~$SHsOH3TdJ6EIH!}^^c=o5o`ETPpj$_&H+o1}L4@ljmm3=O zm-Mfz!iSMs>b>h-`_LK@=)mMIE1Ua8NeZ=FN`nVa?TX8;Kz*)?WV{?RMi&opEv6Fq zj@XVtnK6CuWhRc_H8Q(x={er!WeJ)z!|I}N_%P!TBRGREK-^QXf82CJRowrS!zCq> zPlvtZ=jp8(fXw~Ws3~{FSG$6Q+g+TCPqE)~g#V?67Ky&EhrC!KUSL+jmpfDQ!l2(; z@7taOd}_1TRgs9CX0wyOhraiHHxH+L7 z4|16tEdoCaC*E+GdE7abVFo*XTNcXt!vXN=)Nfowu|YZhqk6E^_}}Y8ROiz1@p_&89*XAw@Q8(RJF#JI4U|E0YuGAVRivQ;V3oZJ*HdwNY{T zuTt6bD*U);^rblREsjL9`nY*de7I)?5EIKVByar?C4^y#w337Vji* z+nF_PeKr>O9f@q4!djTN(qSn2FH6SXvR}}?EesQ|({~T=b`TUB8P*#KnPF0`tG0dd z9ZZvs`j~*IkQ82hAbA=dQRuT~%26nT&3^fce!v0VO@Zq=-vo~DmYQ@dxR@=_*svUN zLJ)18bbgIzp4;|@3;Rw$z+N?pM7d?V{T176ApXR8aVv6+OoIP)p@JeQ59aTCF#xH| z3vPu$lNMB@6_s4x>~5>_V*=El>>JhTb3lcAQkXzC&HMFrnh@oFHl}AnT9J`NIEb*Q zeA;fP=b4j%l7V*D%$)^=&&wkC+~#?r3hj)9oo*|EKwh^c+sgdNvQRLL-W%jWXJ2a9 zU06aZ9W7S!`HsbH05f0%o%+2(<^7NdL0;a}B1N)za$lKe)RLO#>cz z`#Eel_QNE)Mk>P5^7cA-DL48MEPu5|0r6#{-Z>ASOdKNUy3Iqif2wfiVq)R3>y=lFVFBqh3Uf%AG+6!T1OuhPLKTh_n-GdITpZ zvd%8B;=wl%gy&ig6XG*JHI~`S+yBJ+pYjKdOj@jKb5*kJHuPrO1__N)+>z|jc3LYF z+8S6Y>hu#8)e6X@AHFbt%2Cm3CrU{W#9;uvYM4lJ80A<`9~*zU?Jp2g*=8hguQ079}EU;!`%lY z^FP0~5?^8LU9s5O+zN~i2DZe@vgC6)k_?SYS7XwK{gaUoGMD07KOWt_s%vFDl&(?b zz@*SKi4>r#_DB}3Y~vtR&XGV^_`Ch>rAzW-x3qszyD3eM&_;=-*cW^E%5HJ-Tq$w- z!Jex^c$N4wI>ab_*Sq@BMqkZwmlEo$=RGatZYRB)==3VIW;xp3;Sn@a&H1eXcd=P3i%EI z$$^k9&7rnqkD6l%Rxm?GY5tn>c@us@| zxxwjHIaS@U`(0$4BsVBdJbKu#2QL`cZ|CRglGRbfuBU_rJXCmocgF{qeEj~}+p-gB z>=jH-eo_b0AFT4855}^8(vz}mJ+gk_U+CSC1~4gM5<;dyl_c}{7H4@q-rEjX0lZNH zD3slObmhmGKxqbQB_fO}Y2KMwb~svz(9P=4IAa3Fu#~i=iut*?cqQ+BDt3txv%hJ9 z6tQkyoh6<{lIwk=-oozR$`4cHenJ!>HKdai^WV-g~ zL6GrJEuDO-o%L%pk@SWyT~WQGa%?(B$>zDA%>^oXWF6c$BjjUlx zSLLOcSZHS2g$H^?I^-MG^Nb1Msb-1p`Kc85=N`+rPbkQ`R9YT3Xq`cv_8vkiHv=uV z)2jiC?H8f6Y)^aCzL45(Y&}JC%4zS{YD6TW8r6>WZea{Xcv(CoE~tcn6@Q@sJOV;A zOGA9AMn6Or7IY6zD%0N|d^ifgPjX@kWyYWQU$I~waC~^)c41ocQDiDeKHe>P6DPCZ4#mK-(b}IE=(OYZLCY$JV$W}w; zU}_t@^1n6n9~{1~t5ht(Y>XLFIQOi9&7RUGf?+gIgW0dNA&4TLOPVyex5mZ6GedhM z+8FT#V*&WB3RI8t=e6FTYNG`h(MdX#QsxccY&O?UrvO7EGv?lZqm^2Lg~-aDqK0;q zE6h)QoQ^LD8}oJvbL%B;YI|^~?g0yUn3{kIu;W{lG6f)9XfzoLiN7z=H6MMCZeW%7oxpP}dqyySDjk zc2|CpJY*TsYmVG|x_UXrPzEwF`-=F(%%kKknN+vk57r^P;j#rpggO1QHA&Lj0py2@ zv8e)?0*#UE9x7-T26tnv)eN$NFxSKIoaO2~xIhpzH+3^)~_bHfz6>R&u6q z=_sK?R6?hpBu7gsc!DR;Hob_<3{@(RQV~Nxq`g|+V zUM9<-4a3m5jZXWPrdsx_pE`97)lzc`87ro{GAA;7-yiEp;FT zKT}~~@?V!KdcvR2Q(f#+D*)zx!fZvN*1^JWKQpbl#lhK*6J!bg$4S`j8=+WX1BvG> zbC_W-FPr|qZRVghQCs25Xrc7Bqv?t6^ghk}Yz#6+p`WNJd~ba;RrNrEXTn+~y`J>J zxz1W$i7kLZgzJ&I#&j2P0wAhhmou&F3MFDv_ETE#&$d-+Jf!L-_gwvQxEj)AjWBh9@){7dMFDXVWxlb?Q?=LO;e4gyZZg!6Ry&D)%M`tNbU zH@cYx1MtVm(LdCb9Mp|s8G=CP#Vb_Rf(MPvv4g2fLBpQogUJ9SM=V}qVQteJJ7q?G zSNHx@)f!^J%NJ1xU#u(x1<6)mn%X2P$t*H|(-3b9^CZ`tN|W$$6#n|FlC9O}XAc(2 zqlDy(1JezXKj}yFt;?wl%Ki_~RQ5%^f@;J>gyt0OU77E=t9C5t;97mMggi$o|1glY z872JDE^ok}a4JVWM?-P}hP8ag`LoY*Nr&VT|3>%i=9RJWC~LbQ2yPZ&)NY@>J188_ zJQ;Fv<9;PCFHu`khc-5HugQrju>-S6OY*qas#u~TJ446i8i#aV)MsQt`5ANPrpwo{ zxjsR?UYvAclViD4!Tz3M@yZ)xJShGZO*g%hVOeQULbxr*c8o8u z!B>k7+Q2x36#yH=CbVdyQ-GFCwTZezSoShXi=McE)ra0;GA@i(R$kPQq zANgU9tk`+#+x2(`u)aJ3KghQ+s=hbhn}KtIn)>Lia+lYHdeQ zAqd=k40wV(fAD@uc<`@mOBeV)Ckjg|TQ)>GJQ~OQ6_ro3s;flYcnP?zIeUrF=t!~s z>-%{NJ3Utm0i;zE|Lhw=Bl>twj_mI)ghdLQI> zCf}8ls8JtUK-MTtHiL`h;l$lI0Mw-fKP{B_EVVBlK)62Km?6^fcvtjf-_dw z0dm17$eOY3;K1PWY>=845&14PdS5llWctmP9zsH`LwfsE*K>O}Bg|lM zW+36P^gV-n|G@TDB32q_Vu@5LDa3>6qzq$ zi`+{%Fb>5kh%+hTKuQEn;i}&QR8kwj;nBO9bf?zyf6oXbtQ89S)ERCWQlK!I#G1edPy0LTBuSbZxvO+`mWlK0u zft_-$Z!jM%3t_;yg@#zPIzG!1C-6vM7AkG8i4_V-G7~WlL#Ck{9g=Qjhg>hb@Mcbd z!Q6|OW5@=08i~jh`kzcMv+)K0ydwk2AMdtoA z<|RBdir-YcWNCnu%4Pz?QcgPFTflRDAy&cjz9*oTMe}`!SQILNydIz~;|I9nSBOpF z94~1~hwt*__x!9qUdVx7c<(ferHZ>udyg)jJDNjK%Vt^bvR~!nAJXCW(^rChJfj|p z_237)q}Ak32c8yxOQA(CV63 zEvf*vb-AlfZ1nME=IyR6mBvKX=85p~Fb@`4ji11DyjVy)V2bhN!Nr|3&x6~`&#Y;m=c z(9$KnV#U-4Ny$TCdx5M;pgwCPg|szZ$7iR{u>OzwVAgjkt@G?N_1%ioa@WI@{LVnw z>9x>Vor{S*C<7?$nGM0a(HFQ6aj`qhM9}T`pesauz4YPDeU<$zQfzvCP~Kw8`qYJJ z$7?0@h768zAWN(sr{a5p(cA(kMgi2&y_)Z9$6UYIQirq|!ZSQ*l<*4q}On3PUG3XSt3H>{~rPkzft>U3PJ}m^GnrN*%l{G|;>A(&atmn1*4^ zk{@~Nhx2@c;NykSqOKN;*+R>G#HRjFtf0CB;K-Xar`a5Z^|sidBH~ zdtdYG{tV-ey(>2s8WwJhp%Ev7kcyk09TrzZ9nr3W$$?19-&yNSIb=MK?F=3RgnoQ- z?$@uI=sZhtqSJV*2e=hYDc-RxQUYl_zkAnlY3FKCsc~F^JbWRk#)|B4^Tj#kwV=#j zoEOpiX~zOwB5DKU^++3lihmWb{eB4-%$%k^*E<3}X;Hm#te?eZhM$VO&EK(c2oAKY zT&;)jLv!Io#2On)*J=T=b&x;oEiB8MX`e1P&uB>Z^&zc<80_*s!Qly zjMAODDK*Ow#syTZx+Ato2pW6IAm|{MmqR*%^VP+Q?z{dqjzAax_DgktPN1)D8dxc| zIgJ(59T1qyKOHIJ=ukw;yo-K?*$Wc-yl>94nDk0>Tm9>NE(ECNrc!?-&4*!G(Fi&A zR?1FwG8GETCsKMaAN}%5jpqiN@sTWM>a9WAKnQ$iK@g3`#%!~{@d2?!TPxAJDOw1#41tKKvY>wklqNbyO~zFjxHhQvB+%~Oza@< zeDtC_57iOV`*n|SOR8IXUmT2q2*AZL6t+8z^T6W;*Umj_*CCUq9aPoY0(tjUqR*PZ zG8Mh=&?3dSLK5=j^c%G_WsR?w@K?0yCL`@HzS-6f3vAU_%PLWl2RSU>#Es6t<+Ije z8v%jq_rvm*-{r8I)2;!f)vA%1k;emi_pyCo5e7|PY#*wG(t!2)ZRX=kL4wBj1@o!r zXa4Y7epQ7t1Q_;g!hM&7*1sr=ycSRqj+bK+fsdD&6GUpCV2G!nD;=o-xL5}6TxpMS z1X%uv2|ERcEgW#0P+~q3gc!pdh%g7a?+%u4lO#pPUw_U{`Z|9(b{0M-Zufg`2a6wP zBI_J+Y5HSpzH59X*~`-B;^3WqTCx<0|Lwv-s!t$8%Wc~&L)%gbn_-ZNH?Q#xGO z*;nq9gy@w7O`-9myeun_&Dy?a&nHdszJiXeeo#26$dddVfMFgFHw>j$RP>S4vu@87 z(DE|R-cpwcR9@MG(V(iDQW5SBwq(<7A2krgr2+eVhf@G@J!$Uz6pN3-0C~C|U`LeU zDGYoYqQ#aVI>JR+PRq2PnnqA;WG&F$uSoYZGIe0I%!+rZx~}etzr~RN0l$`u_k@3WI+Y>arh%w>i+LZst7bG7&l`Gsdux zWJLkj@?^vwj$O(U<%Q2wEI0-Rf%812D`=At3hNf}5LnDIDbwmCJvdqCAOk_oDtVv| ze3bb^Oo_Q#$taqpgch~O>E{w5$f9UYiNnp5ftnc;uHYJg(|J3}+}!fADS3adY=UL-7PHJrmeq{fyi=(aA!EteKqWkE5S{wI1=t)USUf@{y>y zl0m-QGLzb^)>ehE>!$Cejt?@cF3mqMUGyg|iVXLq*z?Gkw$|Pkjb;~0Iwsn?yTjhB zC06)U01`{NdY7r}z`m6#n$Vm|+HuAw@g*ksqjm0PW`}1NNxl+hY1HDOqM|$nP6MeiUOrIkVSOWsm(j<=98Oc(;M(aw7mKxgJ@A zsH9JTHf~Mot|LT*0|^%hjmmY5GoM4yCdD!L6wmLyOM><9sO~Idl{lR96SfO@{nVL< zhGc>OTrMwRGRn$2G%=~)5ooKeA8R#B%PN8V=Vt!f`uf_o5F9S~zQDJ(DjW z1a{c;V>j?s8%%$L$`#($SfDNr(6Hgov?3)4Gw6nRD@D0eO*itZF{TqUG0B%WGQgP( z7un(4lC!*$^<=u1f!ewl@Lo*aXr@R+Y2_m)>7D2_j++gZ|6TtP7Ea%=Q*4Lwpo5BF z&)XLDqJ{2!CIwQ79XVh!eNP^Ete5({nib!)i;@TM6}@btl)epxd22c@!Q~1{10#L2 zqSLjLlQgPU40t|WK%?wp-q=x?B0#P*o*P8v3pAm`T+1vHIqwWn1kA`>QS|L&DHcB6 z(3Hu|`*uKOd}%InKsq-y5L}_EzuJQdOxm=xXQu%+rEVT+|4wm+HF~Xe4#2<$4a8!U z9PPMdYlwdQsl^uq4wXhyEVQx279IY|JMJ5Yt{dmVKfv#kT$7Moa3l#IyS$Hqy`Z?5 zKuU3GECbv+0@{*hRDuLXP@%T}XYl);mA?vjZ!-IFhD!gQ)zxw+A_41YPoibC4)5@a zSh{Y$%r;&i)8};1*5gY`L`F#c%_YGB2=e)rDYQ! zCvM=kJ5L=3Dktmc9*7%dt~ys^(PD`mu9e&zDmgt?vv&q?hRnqdiCJiP+5c2OeA3UE z@e*&2RGAoBtyVPPywQA>3eW`e3+_Sl#nUgw6_GM9qy)uunCFoOk#)KXXL<#_FO34P zfH#PIGUFUrYrm^SaB%19$8~7lT6`7jQOD7ISokZmYC)cnqW4<6v$Fh6dvAE2oh~gu z=HwlI$i&-rm=6X{t+^4C0puLuIl5A1kVk7uFmsh`ttlrMJVk-0##xKQZr8YKXSLPZ zQ;~@5q%ln6X1#O0Y4_pHr^!>aI0t9$x=12zodf4cXu;Om5o!`Dmm0XW|+02j8?^O zaky#UmqMZC_)3krur@aOE4@lH&~CLsivSl-8QoX^8FL)&sPhE)V2IF4yq!mbU?+!& zVg5!xut)wdt73Cx(nZ!lBu5zvF$L_sKXaGFDBT*CC;i49$f8NRSuc$fyWjk#ngRsg zfAhP1DYJYj7Hp_`$$e-0H`SWtt|-uH=@b zjR9zw+5|*w4&<%-%)@57p@*pdGio(HRINq!JBmJ6lAXlxx}+j#(oBq zkpi*g2!+`nRUIb*CR)WpcZd8OgiL+@@#=PLaUNgXQ#^`T$eVM^!j|dQG4^r7u&SGgiLwW^t=FJsgeV|9 zzBxDvf+}C28jfF=?>om=+rCT{Pw>4OHpi4rwQ|}db*q8^n>gG|ZBjQEWv#&c`-j!a z2QWf(_U=k_Ds$p5n(#Yzmyb3#Me!x8vBR>FE`Lu&OP&^?u-1^N>&xn@QW;R-rq(sL z?X!EhfYz6}of2m`4%-?z9E_@&wFM!z^$$86vp7!iOza_Wd+$e8P)34tkPwMd#s#sm z4osG9z?Yn%h}nO1*+Z}Rm8YvQz^X?ZlNPPgw@FH)M&f+TaG0@G4FEiSjyxhh8tFl&`t*`tsnj=E(nM`5e;R27?7oPA22wuCo&$jnZ_B#AvYq~~8 z6IxIeyfiS}Y+#y(w8uLH8bLlYELqy48RKMS@m|Nj<0@4WZg5vsh%?l4YW1aDHOrK6 zJh3BW1>Tr%ZycSIE+!ZEjorIqLToY52*3dj^@e0vxZ_5xAI=K+jXvsddzd86zQ{`m zE=|_I%WPE^`vON}o@2zlM>_^wTrtW{_n(jkNCO+_e67qU+OfSXODHO$Zf+3h7tT9@ z=#;e&R1Lh;-l=HQOKO`OC63BQaPOX0yj7^LJROrmBqx26#%{*cJ5o@x$+nE$*(CQ{ zKTyD(3&LcDX}b*%M3Gk%JfSS~7ZW`kOaMs`FB*1cAz13jM=X1!9_qk!?b@ZhqZxVu zOlUIYl=rM*u}Tj|>1w~qHO5bgwwZ|8dq~u$fpamMPc|2co)sGADIJq_tL6;R_=hX{y{ zh&49N1Gnamey*(^Ph8WL{P1IS01v0xgEit(MK=0|s*JrYMf&bwN1k{kARw>xs^GX( zH9n%fAqIa|TABNsg>P9y;wsa=PaA)8tjf0lQ9!Q0wpzMw)XH(kx{GQ~`oK#4Pexy6 z1l}(&)&87#zx;OBL}*q5sV6D(<2|bCnaS}Lx)SQO``PgnYoASe4A+`P3F6e+E>G{6 z%7Oy~o7eD(ex;r{hFt&I9#_=ifS|#Th|s0DbtAj)7lVy;`!C!|r-b-+0|(T9T%XWv zisRw1Ge&PgebZ<_HkoI_=D>+laa@FyhKeuZ0&3Z>SP+ER>qOEr$c_he?+^G=_19 z5(>e--Xmm8+2wiEe%bxLr9naMDxZSiA97El5;=!4oNKRJ`up$;&_6id%K2a}7{Vk_ zQ%8Q@C3oQZ^D}#yvk~#{H=ypaHn8x)Cge-LwXA zaz6&k4UTsWl5XYOD=Jz|+}!ibs>gTPD zD`51d4<9^IPFs>Nd8MZBZU^;#h*7N_NS=m8i;cc9LT#^56zPQSMR5))^Z~?uWd5`| zv%iQ*E+E*l7+^ItIw4snQ2ma4>Gho+shQ!fv%4J)8L>`ATV_pwg|#_RUcOD9ep=oHxeIoW^Dlad6Qa!bPljKSUjAi4 zqlPJ3=ka#(%i6DE57fB{*F}yI63{daQkQPA2w$yKMbK8CZ6hDc(KuGZWs;KG1IQEOF#2M^T+I{kXY0D>GM8}dU;B#-p zPHZraa{r2*R)dR+?`WZYba5f0JYz-&x(n9<9)<`%pWW+g9?#MVWqc?8O^PCDJ+y74u^!xvxe}l+u1EkFNP?^qz%XOoT;-2RWaX1v^0!AfJ-d zsJ+0#5?M8*|Fl@#tY>gtI0|cj>q0SNj?S4>h>Iz;nkv;$)o3Smm}`6lXhZNab0QzM zMeYMP*DYDo_Y_#M&ev-^m)P~5Q&gUKVp@{Yb)b%GXdJlCsietr=5ViQRwhKBg& zJ3uRLwQ$Ln64*;HuR@r_Z(Ky<&U%Y2)vm?RD@PaCUV%4BEia5Zwr?)D`So+fNfbIX zJi3+OVU5%CMh$-0YQ&=N;ovfn~RIPd@vfaoCok2s(VG^k^#)weY&GkUH)Pvo*Rn0V03SgeRa{K4U;MF(h$L%2E{* z0uOK!%VGYLepbYpg+Ynxvz4Y{q7zpG&T-({6J()GY5B7b^D;0|`=k3GT%Pi<|1qQ2 z@m{9wuJ06FmI=Ksu+cpM^k;NB-O&oKc=$w>`9&Q5G#)EeX7(dMK8PDlO(|qDhHBff zrETL^5uPE0szy{}sse0l4(vY-Keg^;dU&V-u99TW58g8Akka+n>fRcRGArj?JhxLE zmi^IlKU|~`*~rqj&ze6iTodcN2YS;lb{Bk0!dm6tbC~}6jpWLQHNta03}0j?GwKMe z`#l;|J7Qy2k9RwL>b^Hp#7#zaZPm|<_PsW9YU>Ic>J=q0E-D$h`Fi*dp&hwKoTKng|AL-Wx~uk9 zPGqTbBEX0v2kRpnk8Xh>IVt(9ENP9A+Ulwfwl4EJ$Ur>zo@%KPCv{X(OK>`tE5LCp zWp?VIhl|4$@+wHdsjnk2Je|f}(7z~{Kd^gY^gxvUHsv(T$gxe$-SpIhuL&qxWyVGp zhs}~7YN3+(-eTkFt;M@k3^_W2>Z3MOZZ=gs^kWI(zQH6}CS!<9`Yg?1OM74$ojh-w zA4QgS?r7iUQ(+g@0g3hat#oH2N?nxgY*Ovq^T1gEGgz^n525cw9}Rg>IUfJ!#%|T< z2<#%@M&2z|q2cuftR~NZZi1$iEk^p58^In1rLTkTw~FZlo`Yede2_mPJBG5cLv5+0 z#hE7#b7TLYf-Z+WkUc-Tn3{ zJYbT-+_imlcAN>a8{}pUyDoF$!eey~(#}?XWL}JETd}9ClzQ?}Z)pt~3ACRTm(qa1 z?d;irKSmLV;W;sBhY7%^0*LMB!f@Hn+z)I2UMd_2($5jpb*5-sJ`_@F^__x9lpdgZ zU>2q8i0@5W++YX4D{$<*EZ7b2t>%WGa#UnNK6Alf$Oe*S*1!_H-E6T3M&LX0#t*jV zu&o{|be?WK+ML+ij7X#Ot35jz2yIECPcAkToIEL~NnVym zMS{SUw~~CRv->n!b~H&LAdR9Y^>wxcT~OGGxCf5{!Uczz^zl;lp!2$M!%UjA zQLM%O%a3VJnbo<#FypXGMkd?wlbnDS8zR>g$Mddqy~V|RKP7kW$TEquH(96@(+(jW zs@TcoNJn|U`UEN{Q_BbnB3Q{<3(pZoQtqf`*<`ynKG_k&Rv+ZF8dCvoTIB&tYE3w7 zpw(26ZFr?*J0{(JIJrG(UzsY{qSc9z^BOgC4H|Ec{_y*oM|!F144_rjB>8e7t0q!B z#Xtrzgb-y>24SZS`b7z1vx0oSoH&VOf)=`_7oVB3nqzziZ~03>9m2ajON+{2@#!-r_)d4dR^o2dJqq)gXOT^d&bbd z|BxcAM}}0?^L#n#ATf%XCrLv;L9t-zrRPV`Mh%(q0lwTgbPbd68{WN~OTm;A5x*;Asz0GEPBG+#Me&amRE%DTZs@m8$iC4Q~hl4}43^b)>-sC3Xv( z=7meMw0VLgtlh183Y7d|Fq$MF@OGX#{S`M=P+ok(-m z5p3GY#zjBec62AqtM9B6LwL?AKnw@+o+!;Au+-+{ou7$6SA=z?qdC!7rt?mx3sUyK&%BB8%R6F4wVA>g z`h3C`Tm+2Li8A{oMv6jN86PAN9bX{QnbfY0jRLEJO?JKNlJhhcZdGrksgyzq1?tVz zCS-DH3-5Vt9WhIvPLU&4A}kVcxVKqpx=ViEFLLdZ3)FX+qs1Gj7_aRPlBZ*kU^{*Q zZ)wW*G>rzgkfwvim-9huzz!M@^lG_h67&^u=qh>sJx6}nULq@+4>k868~`oA!RIC} z!5tx6!BvfjfYL6e>K^sI*EGuuq{L#Lly=@1g0O3{3$1HJ>c!DB5p9KH7vEX_b9b64 zTZ9p}qT@qH#wyL{F42m#++Up@B&riyEZ8+|$Z`O&%H2G9#iJmwsgqHfo^_opm1$@^ zs{!V;yz-oIX_)C%pDDff%8Cq5lKdT_ONMn4mu(kVt0E8Nwr*QJdyNxcPn)sSo`;8n zV$!$zCvoVmM3>`C{KH}qcp~V#fpohw>{`(H)7cM|Y!EZpI^E0aazx7t>o^uD*-?&$ z?3_$JBcG!mXE^nEWk16rb2=Kr|F~)?7|D9+V(eG+JVP`{zFZ419gNXvBLD)7y<#^M z>SWU0v`n9@43|fPXQppn9GpCBXQ&$PtCJWBH)!-2Nad zZUH2>cxlkV$p19Q>50m(Kzk|0>1Pb&OYV0e{BRlt7kPlfvt4!+jjr=DM3#e5pxRzt z9W^0Q#3I;duIja!k2yx1@~3GsO2GQpm*@k7R^d{8mDKE5?c6cYQg6PtY5`h(dCJW{ zN(9u@O>8UPZjo!CEoEjFxTLeTNP{(`0xwB9;Xsb}4g;>3`^s;w8^Fy1`?ZOXkjaBa z)gb=KZRV1``mfnc;}%}Sp$K|Ta@_%uThKa{M92P9sI(yjS4p*jB6_Ujk|F${p%NM%1?GKodB?E-55*KC7Wu#jMcjk zZkx*~uUuU&D76>C=(6dxPm?u15~`=HSafl|=^RWYJck|$ zypJBKdD$Y3?q&03iE0fcq3+_v%g*UhL`sWFC(5Y(q?SH3lJA_!RZ=i%*!RxRYxIXJ z1Vr`NCWpJqOM0&zta4wrzOq+aC)=zHhM%>59BR!kLNIScklcd`FlvVNh%kmmFMZ3Q)+odVgd@%Sw|dV$^=;T@ljf-75wL1xA9r>lcJjwb8MFEx;@GdvM5rZV~@G*99omYW>Ji+c&#f%~g|vDKmw^H*DF@_q%-ABkPR*qdP25ZBspP@Ogo>eW zsw$?u|C`Tq=c0d`E$iEAv5u#dk4%jp_X%Ma*hh8h9?Zc8_-_rCSz}-i1Sad}1Vrj` zVm;2vl4^+Z16HEFblP5T#XJWsKb>XGp*KviK^KXM3 zjS|Ff0H-{MmVrK(8Trqp(t&()H_iH+uA*}YW%wGAgq14Qt>~}0wih!ZFT?D2)qc?? z{D&*rnAah^dp5U0RJ;sl$-m=BH}L(3p!u{<(4I9d;z9_(sf3q;T9fgHkyQJL!@T1c z0=szw$p7^>PX)H9qjf#m zH$2rkQwx15A`-q{-uOsLqSW`!ckn<`5say%IUXwf-E^qyUC7oupzBjeg8L4Z906Oo zr0#$6{8vWgI$Sk^%ln{m0l!`H2IPM_)Sb+P3ia+12tP)~Bgx2XgIlEy#`_}$LS1PK689vFJF5zTg$ZVT?8;IhloIe_eI_mT7z8Q~ zB|iQ5CPe$FE7tyy7R>xLf8ntYv}pep)Yp)Zq^`_{uj+lp$M+sjcuA4hEz(1HpPUfP zyGf)nT9SG;$?yZ<8+G`LFkwy7g3K?kOmf%%1LCk)|H`4=2Dj=e?P`^d1bANp8fR6E{D^ zMA@+A_{3M2s^rdT;Dfqj9;OEdcBlMqNL3wlmBEiAA!*)`iXY%8zPd<@mDH6`0^yx* z#!$l;9l7RKeZ#?g9;(?0q=r=$jTnv(niQ-{=e$BkP-8to@~g|I&{c9QzNo#LfKZXt z=dK1iHEO5Tg=y18FphgercDRCLJhyAw98MzxOi3-Wr%+CnBKbFIyD46-z5b8S>Mk^ zld5~J3!YbnRsVa-k{?<2ChR=!iDdMNqj=$$2|OeC?|_h-<6X zr9KN8Y`eVZC6#@XH6J%^R%k%BNq8@jz@(KJ@C80kvS_;`xIbnL2)>{*@C7D7blw3t4|`#}_y=B_>Y7KS_L(Flpe7ga!}aiJ2sIsr-#H%lKT z0K&=b$-;)e><7?)Twe7HrMGbvu1Q9V`FPsSHhNUL)l>zxSO<{%n60Cjr?%pLHFu|D zbz0+UF=hI3cXQs-0Az)@Sw20y-@%}UQ~CQ4@r3X3P#tD)Cb)I!#~q8esU--F%)$cK zhLX%%F?P=8G{*n|KDqnUm6FGLQ?-rD&#Gbs&l6B-|owG@!2S)MRFuj2PjyI-ZZ{@a<+5kDVoX(_b% zOvglE+Q5`}GvwMWN>f%|hhXsuaYuqR3q44mp3t%3U-45Y(*NV+D?Kwl2B`jnp+EU{ z*sC0Tl%mc>6O^zxQn7Odkg zIzSIKy8j}%G8$Ww?WWm)hua=lSReL#gGb<=^)j&e+|$Nu=!LuZz7l0X?0_%K)VqyF z?M-BU+BHrY8|lhQ&7cb$5@8UpKLL*8kG6HzL)8_W_;7hkz<5DJWJUpOq8g>yksQni z^)(y|VYezBrE6dqlp(W3u1!js<{FlMD{87IQt5lePX+f*n+Puk-?)!Ac@@BoEdc$5I7qN z`OSLY6jWjCity=?qQO9SE_xMG2!{t-_1qk@cTWoV`%W(lp7GZNApWr*CP_tcTQZy} zXB&BOquc@*tzB%iUh=QM4@Ka?b=wr9^EsGDOSIN;{NuHRgwIfx-MC|JWQ4KH1)?u{ zY&}}af(jYtKylH$fqxUN1(tP0=mN0sa_g0j|3KdNdQ*IN{3z(XkFHZ1ddXgA{}F6Z zribT+tH=0Lc(5|+Ej#Z0y(A={MzK6J(w!tklYFV^1ZIGiuu72w zeR2*&vKI1f8M)8h$|lV<9uNRCdS*i9JzLrejFNf$!k(o(1+AMT2BpuMM=eRiKUUv# zV`0(lf1Q;WITUsFD2G=KaoN)R*UEJ{V406{U!8(clI-5@MN%(LCEspItDK3Gu$3JX zppKSQ+004drMmn3!PXcxEA~Kfkt#{fbG75Y92gvkbr$jz-xJ!{6+|lSV;Tu z#exr0@d6Wm{@VL_zj0th)>W5R*RR-iI9gMJPe1peI9b@u(!!4FDc2G()-sW2nD>y+ z@vk4U-djzq(e9;4mNTwrQA7Th;iOaC8P2I+0PS7-&MTOyNoK8~iVgS#TZ}o(AZGH* zm{p$9Z_>~|F3e6^XX^>R)0xZ7BjznX)sx0M3vnJK-r&i=P0uWUMKmB zRN~j~ePR~$(wscU^C-cygHk^*NU}vMP1deE^QS-|4$>{QBx^V6Z_h{N4vS)^c^fX9cK%7?0C)398r`xEGU>3_>Ql#dJ-;v~16`1< z0meEtI6UWTYucxRA6sy~*<%1?_;@)>k$-)t1nOpg)RzL^>4j-+hDs~WL6$>_9kbzf zVD0UZql#@}tHPr#rHK8xR|9OB@um~{9lFS2J1bMjo2lz z1$la0;{`?7eMJu$6n(oRxRakLg}XbhW}kvU;fM9K1DW)t?yBOuXyWD4pp{yXLq1&g zmdX*KQ$ylBSc~p*c7MC}L4?rF=xOMrQuw}823nBul379|X#R;6QO)&ainGDwo*l{c znfZtcEwk!0)v}NuCQ0>$Mxw%GbWdmu)ZBAc#zK~W(crIg>>qRpw&XAF#Zu;Nu*W56 z#ORy~Z0rxFu)1cIrR7L<`dJ6sMQB@bFp@-md|4xw(%3HuDpAhEHET&NmV*;p@;3Y& zh0scXhg!~Pe@qN|y+u_87gCas-TYbA7FYnb%Rh3DLS%i}4?)vr<`T{@5|m5mEyn^^MD6b!IgRFtUy_%I|?0r3UY8fGfFzbGFeOU~i-@q3gXj^`tg| zGhCx_*00$U_?7CJ-CC)ISM-|iAg8?=wA*Nqe+99mf|(`66d%c}&b`&wz5*Mm7{RyPzWuxH+Ywa_*P^cTyhK;hLKUuEij0 zb|};_?Cw~`&hpMo-*pfy8M4xvSHNK&>M|bn0HE7~w(MBkZ^r)|_Xh{6WQsQ=XZj8I z>HR<8n*Y_eLGH{^$@D28d0_x3R~uXxaWL`Sd{%3D*G2(}&XT4)odvRt?0>Ej=0d;n zH}`Qj7T4mSV{aqyR5gCRx%Rn1ur{e`UNLYPeZp$r$&wLxtc?Ed{E7~PHSisixEeja z%U^fNVsOZbQ}nt|I&u`bVj_y#!m`&UJ(s$MlCKRjp7E8knX$vZ8mgKiaITLKx@TTM zR0)&=r&x*-d=!Ee0`8-WypylA8Ocv}jONOAxB?O|ZFZM3iDuQ9RVi1DxjW8ub9D_a zg#D9ud$eRRMOqw0?+0UTsWS~(0%TXc&}4wA4-?l)Po!fYrb){IwO$8m{YyJS zNncMXW+b?)Ndj{mL2c09NGhzi!#zkNx1K}Gm@Nv_BnBR0~i(MA4dLDy}P~qOUh-W&x z`K@KCfcjh2emDTcO0MOTli%KSS`o~i5}yz!o5u!Qz9(ozq?zAaktH%8H5_|2ilF?R z>E(R)KR&zP@P0;mi%z$K(i_q)F~t2jYg8Jpehc8Uhhkqs)&4r#%n&E`Rc`+nW4vs^6LpN=hF`-JIQqMVCa4O;Pln&xRTLpMsdiX- z5xVOFmb=}DWv5MhmYgO2C@`I6CEOtZ)4Va2In4%>-1kYr_a`MMpalS&fn_HTvZ2*Y z5^1Y}qRhm8-1e~5svGSnA~SwS%}MTya2E^V9=s5<`BV>IpfJCN|7{rhXO$2h1SV+? zR0gJ(S>o^j)G93Wm*Z1cMYG{%fntE8?6D&v#J z=PBv=LRW&bxB3{x4ljy#uWqyHr1TVCX??qv9uOPW7%)1}Ev!z&s&#Bq^9tPwea{Gc zm+4(^xPs~u-|`~80W7$%_6Ebb749tw1}$o^<)N|Y2#x9ex@4R-4g~DuFYDS73Pxxw zqlq2%TmDV+7@}64kce$XD#8}2zc{(D11bZ140>r%GzQJ`Nfpv>5joCa)Pig8XmUf-^T$e~+PZ1ibURM#>W{2`28hb2lQDCJ7m zRd%|?+nx-rhcFmzzGDJ?i0sDjXv&gUU&6700q-Kj#vw)H^?HadT;L*N0#pN~hbX4q zx{mk(pUT6RC$((Xa!O^%{7lqe$gf=gvlu}V|3L>ah&?bLiePB1GAGkSRjA(iZwmao z0rk8E!LPkQ?2N?|Ph3U$qBLg?uJJFc{(E-Za{#>%S7`9X!A{aha%(X5vKWVpf^)f^v2b;_vxg z8wazp-QYZ{iTfBU8s!?WdBbY~{A=!kSd8Es%H{tBndV+KPc+e-eEwWO(*sJ;-zrBX zZWilwY~m9qLiFi|o9@xgqxAk1S=Lt{VeN58Q4&Ma5VC z;Q%l&WMh>V0000000BXsjztkX-;SC;;m!`Gv^2)aMb4mBB@^kd{6c~2?n#=*T-3$Vl?$=%=mg6FxpDW!~c~od{Avtw6MQo{Wz=Tl< zz(b%Mq{?o5L5|u8{Op}m4|Jt87j}6#>bK-~SZ#;e_{!5`7Z{Yf6Cd_51lq70X}GM8 zZKKdwFVSvG<&=7?uUhictm9w`l4@$C+(8jtFm7v$B?mZP`*u1wO!^pEzK>#p$p+nP zr(pLtQbS8ccO!(Ztf$M*Hn>V$8eH>4dbxpnSXhcQ)@LJE=D?KUA&UOe_};lfmR~tA zmi^L-0d|bKEFd1La8I@=B1#Gr-P;F(>X0Djvzn_vpU4s}!x`Z;I$1$gWYmck3g=H+ z-c#Sog;LS1wPSHml{8_@&Nr3Jlps_%)tD+pXvbQJ@;NZF|Fql5)T})2&J-6t^ zIAmk+$`dOfha(Iuo26pk*bpVzv%BG`%&n52XOe6+!u!U*AtJD3l>3>B6~L0p?`O7+ zn||ffZV$=e(?RE-o)aRh>;mvJ$!smX0|eIPQYUe%T-w()P)Hu zHa+rI1zC!2L7V|9b;!gOB@o5Bk~?!nYn=uBT6R|`UA&l1Bap3;BJ)qv zgvZP77qRGNSc*z8_!A^SYbwS_y;d|PgEQnVM+Mzyj@ZxJ6{#wXMn^;rGm<&=z)}H3 z+N+I4^z?aP$rqp~_w-?iN32iV3_t8C_r`>2t#s-Z-PM2Vwp9j*_2aezn)l`HVT`X4Dys8x$0l+_@?ia<8m%*koNEXU5K+BUj^Q<@MfM^0x}>k3fM3Sj-^xWk4DV_DPCa zg6JPI!&Y5V2=~Oki^4=n_7fLvT}Gok=P+`EZ_SU;y(e{Cc*{Z}{jpY6%gi03pT>^$ zxxbEgjlf%2cp%bmYGn8wn~P~M^ptULm1@v_P*u;KR7`XqpOkUqa%r1Jb?UKt{Y8&I%Y@S{>avI5C63CYQE~-(b#%@;o(DlXz6G*9s8X%u4m>AqCGdr!Y!CEj z&NnLl2=MdYrYnHmeTgBhsk^v03o)+i)$Y-V3aGAu;6Up;NK{aY4{B|PDwIS6q-xQK z+0*w_k7wDt6QWm>odkiu#-Q(+nRoPqmB~(Isu}XVIgPC`z}eidCHZ7iBK2$?8DDC^ zrxom_U{ICA?K#zAP0qp^^zoB}>`Xao0rcF|v`Mo(##e$w>b?h{c1NWT5Gqx)NJ3a_ zX@UpEgYVz8EQG;}6sFX74<|vfND&HJ02JB$95xMH*FhRHvI*`Eh&&AF(*GNm7mqGE zvy`{5#RCg%)Dvgx{INj{h+1UW-&CNM5u;3tsyhWFcpIlkSZ%glhB$QeVe5?NZ z3gQDeI^PdvG-?Oslw#iS@X|l0(C>I>45Emlsc|7AUN|3Ovi$fFZ}_k4DQ8z-6}v6g zrks?4Y9oEKq7+Vm951Ne1~|yeb|t`OU>0P(1f>LkCR9M2%JlU!^AGfI&=|h%A63t; zWwTqLy+Uf0tFodJYmSO#p3u34+GGV)HG4WiYbNLPjjU1n1fc+Mm?1qxwnmmxZZvMJ zPY-FfN^1GJNrH1yyeI zhDQpkZ5cM4H3F%|BlAUWWcsP=Yf3vIkt>aS?;@ja>TQI~=|GW@Ppc`k7ur9+TYk$O z^QFknRpMhJYcvv-xUV$Aw)XK;=D8tCd-{RO7CsrCUn>i-s04675czQc2Z_IsBet>` z=Qi!GeKtdAX?_H~?>kW#05L}1+Sv9w(*nuap{ILARTq;WnEnll#lH9}E}w|Qc{4i( zjZo1C6fYO=@?(Fzihx!b%I@X?kY4oduhzP6WWT z>$s$INE`1&kbXsWjBiEqIFTbY7K z;7k=(T6l?8+7c8&Mgr*VR2B`aSHQprN?=+8Hm7fb+qb$!A$wdI2-|jqIHu7)OIb>f z2HpbN#-vwq>*B#fs)yk)OPFYSH@ki$n_|3Mn{$U#(}H^r2?_xL7&17Wz~*YAsU5GE zaW2%wZkU+xt>^%!B7&V^lf zb7o6Nsy?OPA7(4zEE3V<3M1j~l~0NJr}?eWx906G)}y9_Z%4bh=toNtUIA7eeqDU} z=yXW3-(r2&7k}1jtl{N41~L6elzJ`Vf7woZFe_I#x4^U|&e`?c0rz>{8dx8#Z1UD{1Z(~r~#^}V1 zsRi1!!2e{1u2gMHH{VpZa%NQItl867+%s;zXZ}^{ECe-U_3TAzP-@56KMXqwlh*AV z^;?(7HaET-dxJ<7zg|U{&P!P%JVqrRMfjXB%z_ggT?}sjw%U$!@JEn?H;=-1O5Di& zu;rvJuhtQ8wPDm<4BM73Sf~`bXF5sNvy|>%IkaK1FX(rA%Hr{Dz(Sc;qkX)aBI)SF1 zF{h03(u74FpMkY%Y8}(+k6uPhzzr1x2`@>Rt6MW=?-t8>f>~Gi)DD2V%O~s5bu{U2 zy|e|jlWM*K(0N73A*x~Hie^}7aSd|(0?uc{>zONB2aQQ zhhGLNV$76+6jOd5euCZr%}Py3*$iW=k%pU?!&sHL?1?^(sn@2m{{y>5hs+Li+DWOf z4MEz3ycMML?l2;K)vDpQ0;nJt1*^=Rx;hYt4Krfn*wk+Y*e^x)En{$#-227@ht&h^ zBL|jz3SDLpnmhV05pRtm+mG9VbfWTm**N{=)wYnK3@wUSOboVJOfSXKh1*UvABcax z#;6R@og33byy&UgI<;;iuj@6(9V2lJc``MQgVt7&%ywB-Y^qHy9>o$KQ3`vxx_L-E zVACz#&94Imthu?H9rArkujpieP0E#kTG}rnU1>w{F=-?S8r#FDoQ4RsYlYTPu;SpDrL|#M(+OIF zC-#P=VeoT;3h@+`aj1}P%4v~M!Y^W?Aty)#RJp}?zc0*JM*w;Jrree5YB!~rns=d; ztwtv&-i%n;B7wsxq{AdpFg%lW;@S`6NKa4qdxmeF|s9_R&Z~QoX?bx;>sl~kT zxhvYUL=Qw3d^Gz`3e6M|-q|YB+`yu)idM_L-^C3#m`Tp7cTB?hTiz=Z1ZL7Zm&)YhfwYrtdr{5app-^=YRKPJ+w991&Da`rU6hDe z;iz8|DJF7j`}S+4^}&R@YIjBQPUs08m201lq7WcTC~d}Y^d5p|0N@8gn@E{!9Scp4 z6h2Q0u;*GAsS0cX)0sl@Q}BDJG6m{1+S+k{P`*T-X3i4Hb1;d}pofr|U;9jEq;<~~#M+GS1Rp}hRDcY}Zs z(0Is(w&u+moq-(UJ{~P0ya(EC!a+iP`>qnsR4A_^$NEO^JFB~UMNu_d^qbXm3x1i z9z}r`2y26*%|zwfva17|kYPv1jf$LJEIWr@bwzr5?}p2Wd#fadk&7LZ;?}(PI}6O7BPUB?IljHLpB65h@Glw`)%e zAFYqHl!AeZO7<+F#wjN)FwYq~!*wsA@QD`j*1#%g?JFQ2Ua5V2FpSpuo5=?5+Ii$m z1?ZcKRH|~2pn(tJo?hs}*c%vaGEUu(2Uhx1o z@$uG+JqDfTlw3OgSN6yfva%$-hd<}>KmIT_a0}j8Mm0pH*7j`i8QV>jawm}A6}lz2 zU9O(Po7@uyP|-h(anCLCXe(q4-{X!>`>@h?LQZoJ7Og3xO!gjxe3o}I2F~F9Syb`` z;09|Qu)xc15o~Ar)l7%6PKp}BhKI&wFUF8j^Q(ydQ+#=*6~NayD_iSzQP1NwvwCKX zVzZ|j_-UKr$0(?}_cFs#fKNy4AKw@o#?L0pzSRD~`h;%2$!)x?AVmKgG3EWn@8}b4 z{2PXUyzGc7`@Gi7Qb-|C)3_6nAQK=A22atc?&bh>@DL~ROUrMZo8@SEh&{yM5(ImOozY=tRnpEZ`a9dG5F zO$1-<>zu9|;gv~XL$h57EO;Fc{p72%8D=3=pr}k_tGy?hjO10^l5kRo;cT}Y>yng} z3CrmI@q4}sR-W~cKZKZSs0;5$QXz?Xd({T;=1c{t5isy1Te<-0u~G44`mk;;A3l^V z*J|mny^yyr`~8Moa#SjV;sm?uIl;&d*~F3Q)GfNYTmkeKU;SUs-8LpNL{l6>P zynedePGmApf5Bt#d<)_GF(1L$^jtzASyyZ~{2W-q0~C*5|M?d$Mf*i=}VB z2Oqq5T_Bk&k7IncCP(&b3!;5*cB zIHU6Vqa%kD&&G%rWNtXCqw{*@nZRBU<_o$-%N#$vuy zxKz$CVeejUtr%1dI89U;vF7Uu&hnn}VQJ=?!47d{a7=WTWJNR@80RTHa8>D+low?U z*qlO~zRxgg2gLwO@)f9uLAj_EuaBg;8JtvGIOWR3xJElDCxV8zin%?XjTo>6Eg2}O zt+D=BBLx7w7R*YK*OZF#m_JvkkwLc`J2JaHCy$OFbfV+{6(~R&rU(dpW&J z?Ahf@Tbtq2%eRsLjbeQrzbsExnx zkH)n`pFQL7HUFe^Ucz*&Q_4Y?8X3_sAj&|eG70e*jqfS6%Pt>!3!v(4K>ydtIcRCK z(ovUTTdz{!))*X)c>N>!$MHR}v|jmq`UEjj2djA8xbx35E9`Gd@^i)w-5D!1ajxJb z3olRuv9glMShACgls|wshF;Y^5)AOo{zK)CohK$A!|(uuP^?iOlsyBs-HP}t5N>M} zm|;~M`?Zzgk6mWky&cV>=BUrMHfE9j58Rn9|~?j?KMEUxhOF+^HwyIC@p22JuH9$~8E- z$A0EPYWlG(o_TTeazP2PXad-m)}0VLCRM~j`n|>vPzqzJggW>Gf3u^=r^=VV6drF6 z_+Nxhrb5reepzmyPwOWKkORRI`s%n4Qqht%bubqjdFm3CIDhox)zGSh;2=A*5wfs? z?9}_4Q3>qBLf=8q)3sdyFfe3esulnM000010iLf!SHM&3M)q0NMx^qim3Ywx%OwTl zl@1zAz75YI$GX5!zs7uixPNdz*o)r~Acg}9%Te=XOB^0Uy;t6O<8jGA-e1IFyCXxn z>}oS+QV|bSpIURICZ$KueMSc?loIH(P}-MaRZ5=WB9Xm133xw}1l8@;F=72%09Ow_Ax?D-irSF&D%Vw}8klDU7YDfJ> z8Hx-9M)p!jePV~HL|qXX6#LD)(8gL>UPagndo7H6KgWh~HQrqIdIU-!osCmZ9fU?! z3oh50bEa?@_z~?D0suuoy1(DDvQf*MhS#&NXvV1IGMx$69vK{y0a6tr;h@}2E=io| z9LHHSuN{^H3p=y5B)|V18%4Y@bFEFU6D^-izf` zU9bHK%!62Lk8XekmUv)#jO7^i#h=$LF-(Nxo*Y&@x+1KW(=E!Bv~%M1p!Ic5RqxJ zd8Zt0u)GAAVPI1mvuVGABz*hj=!qU~b|gcpqk@bKvU}ufnLuLX4*tBiJxSccfhir5 zaj>6A)%WtP2hCvYqvxv=;XfNm68a}~Ng1iq8K_m)Z8Ov`*Z3h;6uyXE&+ayevw@X3 z@Du%>mm$3CK!z9;f5O)qWy$lL{y9qPbNJGtbUyPcX~!tekdSK^G@DuPRH+uybtR!y@umiqqVmK6|FtqO{rNkm-w;W7VD31EhjKSiiD(MG7HDT z8fNyNDM~RAzCq!YesI_}CJu$WYhF(C*Sa_ER5G%o}skKLjp2UY0mQrKhMw z;*BQ79H>|*%fv02T~~0%zvG;chTLlqh9Rc)1$-^qRrzYk4|dr@_VLfKZVvP=ulu7s z#69#}yhYM`bWc!<BkPBTAcz)-Q=%>Vn+n@qF16LT8KOb$^*Fbv}ZJi&9r#bkym0tL|#>A|9GX8y@sPQuBbW4Vu z1HP5GrH}$mI7Pf3$Ym!@Mte;+r6gQ?8aU?%#Jks(WhR+ofK19DW&BtrEGBolY+HRxyGth0v$Wf>bUzLu{;c?NmbPG%biH7DFl^7S;&6(y3)4l8 z#CE2y;m@$Y?-i4g0$cLupIa=)efzP^0->66uI~lxZ%J+JGY18L>|4XRFbeSfr25eC zx>I8|l9DyWdc#tilJCk0&aoq1b<|}|CjnB6;t^MuBW>|^?4lN7kMa6efUAc9{BmAJ zyf>Lt098j`{Wc3Z^l6PyEayP_f8conH(sRf0`X*N`AJi(suA26Gi5sO6d$++CCv8W z8nR8)<%0cXdNE1%z%a5$TEf2T!O65+ZP+;fF?O8^x7BHoa=~SB59}vW=K9(3_ipXW z;_B6|(sMPHn7U9_@#LK1o!M!# zGXNaHPVI=+iB6M2cTB3MuHMzO7`hrQIx7V7;Rkml?vC}pa~I=9oT!XW`^mhKUR$ZC z|Y7t61YTyOXtpKgNN&6=aVZSSRYWSbySv zgM*fN1PWAz(K><48YzJCtt2aCQFE&^A8pbsD(KK~Nkc|zSR9Iy@ zcd<^FTfij5cLm_eEIKV<7c2`W*^Ixjk*(GBu#|*~VfE|B)tf~Gdn;ebJ%QmA7-w`K zjTLyJx)Ppm%v1cIsA(m!?jQt3d?%yQ<@bm8C$lMW$l&5IS$DiNG`NXsBB80(!+VBy zD)HY4%pBU8)_kEYyKfdMF`yAXehy;VgVqm{1Yxol6@ z;X)X6I`#8sJf26#2aL9UlU^I2l5EPIQ*C}m`P-!srtWwWIKSp)Pv%(-FY51x3a8v8 zTaENu!y9tKnXQnv4vdw}cP5Q(hVB=}=lQpkq6@za1yq780{;vv@bsebR1}Y2MpO1!#`g z6*m9&fcGYBGyjZHfYzw_0I}qFw^YhFq3owQ(>5u=#P{z2FyTuW&3-0r=H^Kn0$~LhVvT3)l?}50iA~K?ny&SDG1k-wYq>f-nA!iL2H9M#Z6p%=G>#kyUxuEW2Mis1bvI@j8;3ITs%lsgUCE8g(}? z*i_!NASF{s0sp&QxT!LhQ+7k{2G>+wW9d%+3l1YhdksZV169-Tq2nhLPc#OgajR!S z$X}ZmTSZkRHU#^R7`1H7OABpmW<8 zvu;LvKv{W3W6MSADNv`);;{*~X{i-V9%d=QQ6n*1;bBy5USMnNdRmP?wzi#3Oz6L`_9}i%51=-s`OSTlMTN zlyd0vqS)^;cswhy1~bFVX~bxono3!Q{neqDxQg2NXptdN(>0k7B3v$!ZUH6{1og&XVpjsP?2nmy9YA>lW)(V6gUk>&O8NBB)q7Asb8O!1u5J zRVDSC8ddETyGM)X=m3hN2fuG26Ql6s|0rzLPS7Gz>a}Av^xhBoXJ`p47!Si>&b>!f zc@;%mxlo6t^m41`6pTv>|4q>euMF@RB47rF!wQiaEXPEL-6iMUk%JfM_;jKu6~!CZ zr5Q|W40@=1BUcDNYiZ*^SO3B;WN{jYucq^Q!ZiPL=&_h*E=sEp+T#L{nC0aefs&U@ za4Tj<3;w{~I_i!t>=6^%7?rd+TmkCL8%ara|3jL{GNYzD3+h#&peC>2MX z*`HTYtk%bK6ZFDmi6Z$au$CL26sg&eVx?}NE6A@Bi4WwnxElJF>)}H>|6c3KFjhpk zmKM9m!B-bfzE>c1)^Ux@{S<{pO~t+0Pv)Wn3Ee>f_|AO8R7|!L9If*>Gdn1ljI}HFXTPx&2D^2w=q@9WQKka|hyA5wa#`Wb zewI_)BjH_5{Zgsb^RH#X&TN06V+^}^y0}+*U4+OJ#z2b?1wEVHLn+ z{Ma#vR=m<(2^v@q6?Mvvok$o^`$GwnK}YFt^YH zn!gw?#5`Azj^Ol2igSXK>0hHkr+aFmMI4R($i|z89-l|f+^hM|PBKJ#2~9|Oo>wJw zkJV|T&G#4|RMdZg4iF;q*I*p5cjNdio%qVNoU`n`3J(SmSEEFokiqXh_cGdu4i3&2 zS_Q^ncZBdChC?u`~H6I%JDh0n6o3BY@I>icZJPiaEZFU6- zaFd(Pev^!Nx%SOuY&fN74IjZhdheL&|9M6tIBzFTZvDLmfaFAT>eqki=IB)zX1#Ps zS?r8iAf}V}df!AJ{d9JG48HxtT)0-gPsd^` z_V`*txk^|pExT&syCFJH#UHW1mTgi%3885z6h*8DN%0Rb*k9N+{Fn2l9fU&m*LGX( zhJe=~`cCJ5X4W~6pa63a%<2cw)l4Et1$Wv-rtfC}8gpt~l&caUV$Ld`!(iR(QLNMG z;(7e?0dgcmw*%0Fi|ha(B!)jJP!IUB;!H55u#hI-qB3uti%bWM*WrB8+tk%`1`O60 zgKv-Vk%p;n=VrE-pSyoaMB{M0DY2ZmY}3a*p;(xRIEhJfeNHZeD&Pah>gHb$i{w4? zy|(ZGe5+-vamIr8R>&l!gMd|A1$I=Nv-7^cu(umoZ;)x61^_B9un!*u5=|=_%8S#+pSIKUToNL4C zo7a|kq`z~OcydtaMT0({7o<}tXEST>RW6~+7+}mbSq}62L950&*PjBO3lXV~K$+F; z#aK-t>iQiJ<@MPbqGHN-3*y6}97mQ;qZJTTF21mL%m8j$e|ZNzDFL93#&bStxGGK3 z0n^0ot?;%s!`xFAdj`gWy2gmt~_9qx1JE^!UfJLt;;>K%2Le#ri85?K{*0u7Sc( ziV!|a_i7*(0g0j-H$QV&@P z#X$Sq%_G?ezzT2}-B}5}(*hHer5s5v1g%;;5wZE~Fa@&~@jBDvLj~ z!L}o`m`9bma3APO_>k%l`vuLc%^I8;%X?piz&KgWi58&O>%M*UgI-rA{{}ia@4y$z zdmQChe>np>ez(r#koxrWdyB3VcR^lai~-8O)X|F4{)GDdy9N<$_z8)neLI% zAR`@rcHjOmFl1xvG5`Po0003&nz1yb!C&3<_rY(c6y7cTOb+8amwlc;HQtMB=e8mL zG{$M}uw#s9tQ4w8ga{n0`zD`u^`~;C6jd-DcymMQL@6Q0nCqRU?O+!vqgFH5)+GoO zQk|BZ9J8jQdvK?4*ic*uZ$3)?B$W@A@V6JtfqV4%EEFJESqh?RL4MsDwVe@qt&a+( zURXAF(AI^n?T82)z@eH$j1AgI`x_spk77OFBy5anJ;x&K|7E1?&AK01O0tlAq$Qeg zjIcIIaTA18P|h}0zp?ZhaItx=*#96V#Bk5C>?z%R#%!315Lsq7|be*cw(Kwy`5Kn1Ln+e6j&uqrW_)GlG) z5AHklKDdEJ2gke=p}rvZQ4_wJAZTvvsnQ2a|=EnZ5NvTMJ$@Z zEl5z2Q-{sUmx@UB_ZE*xMV-lYGj{xO!rlroax7wEeV*s&8l)50S%&ENqVl3R8A6>Tm zhOqA)QO^W5?=SpV%>+nFLU5Mxh_oQz4SSRa2Ja*_b`PstEz8+DGilQ4sBPVyML^wi zV#!VUD#cC?`Niq(MqzX87ilE3``?Z8iNwiJ#X{J zF6dT@5-}#5$n6(+S|Lp1m?6)Lz*CmfJ=@%2=W`V7EPXx-Q08{LCw031EjrSE24wL1 zR5+>!O#!-V-PyPbe~ay19--*1NcF!=?2dgs!AuQ>_WNP4+Z`@426 zIgptcx7s+{jMfa7={C>3wP|BNt>rs$d@_8;t$aYu!!9~w*o~C6;h%{VJwNC2-4TGhZWEOmo4M{m4|APR;|uWfj9x*Xmq7~hrbU1X|-K? zl~Zv${F;svWI+A0OzKJ8HCyz3>j{ybqD&7d_B7yG)Pyrk$Dy&I0kYof+z;9eS6`le zydlcpg+reORrUatZHG&U1?3`M^mwR9;zH1T2ic7rz&2y<=3*m-0x!Pu4-XCj;Tp9> zOiFyOy0xxAND*13SN*kDI>51^fa&j{z*FZ)JcgpGR%@UsG`N%oYb>0$&pMq=<0LC8 z`0h$Ef$eO*VVG(`<4B#hVvS9PmQB)fP)RHIv|VEYfwrj-S(vhh0>%OJnWI@b5y>P| zvlTDZ8_q3DVAQ8ZSyWhA5*W4s*=To|FITWNqy`S%{^$Jkxp|7FZgA+G6#SGuVma(K z!{0STdU30b7Pp^4 zJ#HH|bqzKE+cOq*ZBdLF!eaM0!>eF3o8#w1AF4sUpUAQC+DdzS5IhPnDY*+m$F;o*wV1AAnnu)X(#u%o;?@_MdLA3DoqtvCZX}O{`6rK0;^+m^r^Xs4}lM2 zcb@>IanT6zFt{t<2ua5EXs#UoNBMIS{@6}vdH6ePm9U9HJaj>hLJ%RnNKk!A#sS%5sE*>i zP*TLgyu`J4l<&ONCIfk>ADqIHCWZ%|_rNXSncW8laXlXYn%4-9Q&EDc5@JWP?Qf_KcXnxm4i=R;uZXC=zUUuK@d*IFfj4G zv+dtkR;RdE>oH_FrqSbWA6SQOAZP43SNz2&c(s4p!Z zgHQcV@qX*uH5IrY=nJkxY}(x@%eKf!6a{N$OuL-{vH;|AYyfT7Rj!!`-olynXivo54YF$g1KcWg6 zsc>qRJAHlO?4HTa`4an&_*_y>?}EGHhBZAWDydy>#F3Bo6mov?JOw(pmj~iD@lqXJC7nTtdN#qe%VScHm*6%??+m-fs@MDC)-toVQ zJj@m>&aB(>#ZsMTh?H92x+5AJ4@rraqw*_vs2i=Kg z=ekg1?iKafPHN`26{WBEc~)6mFc$i?B%G3;Ty5Q7aJa2AZD?)cyU*m>bHNKRXLC(b zjwePB<-3VwOI_=nF^Bd%hh&S1L|a~NOlX_oA%$=|H3zEyvkKsp%k&Q3R8{Ca2zoOb#@E4d62xH zV8gnYUZfHvhZLDMxEnNky`irqT_%ni6Z$@_Qm@aaIeMmv^!5ylxEUT%0Ef<9TDvvR zctiZ{7GqxB=AQfS(YVCtPv9aa4(Dm5)7fF%k2VdpZ%f4Y3chkQ*cRb$9PQ>0fx7H9 zJ$@_lD+?u}Dt9MW%yHZZ`7V#mhykkQy6tw-Qs-@Xc-!beCy-E66?v>?;QD$+Wb&g4Pkb3a-zpjJnf8Ex?BrV&-`;Pkb6;p(9_$zxN# z;0)@IUwBkAMSyTW0rjwod`PF)U_5Hxmp2#FNXIaJhZOEsC90jHGc8^Sq(1iYBp9Rp zauKIX7LAiqTJeNZ>)mO4$B4C%{OV`du}h!`>IQbZBq=B#cXN^SyYd8Y9l6|KR*Lkm zkB)L({VheA_B?tb7=E87FKJ2B+ zCZ;g8^Q8*Rk-A+nD_#Xh-@sngUT_ zIMRmFcglV+@_OQBifpW)S`Q%fNL?bJ3}t%#xHM~tB_hDe!%YdyQ{pF102>VKzW}p# zc8}~V`no4G7OiS-IkO3y3P&ngCnx{4pkk!ntsUzcHmhkn6VFmoPgNb~wI;C<#;_vq;WIMNH)4-; zD;_*QsRw`aWV<Xy1W4z0dpDDozUu>H z|4F~Vw~-?t1HVe9VzwrYyu-R7p>iH+r8ZsRtv0Wc%)*>^X5d^L4OibB-kBOG&=!sY z0#AE?#2bzJ;~JZ#aC-@(1}2VJT0?)I5P=R6Qk6zZ06bey$!@C`KeE&8(7u1ylobZj}g18*+H34JQ4P8V|O3|AfPo` zXdfNv?hQW5Y;>^ZAz?J%Q);ABEyMRXw;+U|=t^o4)bBWe?nV=;%Oy##hSzLJ@|coEtH-ofk%zbh+;2&o&kF zVmx8-u)rt~TRl-?({R!{p5%4K-j|7>Q zvP)^ln@tx?7(j0D4hIUOW#E)zh10w${DNiZMQ8N;(Vm4b+E??)ywVkF4L!4kY3AlI z*}fS4Fm@DNga3*?jnakl&C@k#hXwvZr6*_~a991|X(_V%y{E7GxVaP5w8e|9(`HSE zr&dzE;>zX8Z32kJV7bZFD1X>UfYVj{ct{MZXu|dy&?;3y5PXePZfMnOH(p=%2!Py< zqfIH705aP>Jn9C*UNUDHfMtoS7HY|R% z=>!FL$_Av9W?MhEAI-Xi%M{32IC3(3ZxUQL^U!b*$!VxviTfKIOJ?bvlRZ;1+SY_3 z=a>NP1L^z0I-T8t!GWuV8*vZwBKwo0ysiMQd{_!aZ;! z{|S0@T&adO+?KoM3g4bYwSSdg!SjDupYE_(hU)KD$c1rd*)eCk)nUI7ZK2FRIGIo1 zgNU&MFNJW9IHHJ|yNj<-rc2F6j3;~~QA8T9f*G#&Hg|gTRsGP5csGJs_%XIaG+9pq z5ad(PM^$di`Qk~~cU2B+)hH&WQ^G;Pdxo3n&}~GxVSpXHp%HwxH+|PXP_Q5-nbh@$ zpsoUIz1;LB@zA)q$k$LG#cr!qDItubK4X#XZKUN04DDx_X&z|Logn^bhlZw z=*P?>Cfrf5mLcQEiCu5Y^bsHB^^V?yZ#l@*VcFM9^u={6Z5#nt`CF+2E4R07b(b&+ zWLupr>&t7-2Kh*J?+P6kU>rztAnyE4Win17q#Vp|D z5Kpd}d(w(?_?VZEuU4Is3?1=5WP***qyCnrWBE18>wg(M9@}@&It~^osMvLqdt`ZQ zV-~wM;IX`%CkEXYnEhN ztJWkx2lhX(Sj!wu=}wHqzEW%qwcORcqk5ct^t(uNQq7|YAzwW3py!j>71w77pKIRe z9F#^yN-hd48|9RKs2=NgRws0iVqH|{$0ken37V@KG}^CO_T-~hfYUe9Eg=cb+LUoG zhq|w@+Lxr39^~J9kBQ_-3|W11idJfzhPOe#LHXTILbApJdMW@;XQ-O-Z52251nlSl zjtoRnr3ummYR6*4g-VeysPy`SgA4KMKVXG*g3REY)AM)pv%37b4epiFUOe@Q9i<}> z%#KG_eMBhbfbe1O4IO;;))|otZQi&+QP{1pU_qEx(pXDwZgVk`{1Gzsa^WG*1lgHO zXXSv_Sr5|2_aZ&W!-JQ7nC^=47=y^3eBUX6>psKi8(H_!^+_=qR?Cv637xL93q=*a z`9gj-npn&?U2+5U3iGr5Lw`T0MCbJ@{yXT8x&--YM*yZMoy!v{or9q^>)PTslOleh$yGp zo&>E*C|ztmklFqIC1Y%2v#9%Y^`J}UO=LmFh_rl1#6COwZYepSQ}T7GG*NF@*w0Xu zZ0?yTn94mH8e-N%GMobZqB(fTP|H&{tV!T0q5H^FhHme+s3mGY1`Vs+Z!_ z@NS+XBf1c2v+c3Clu^L7^`4|F5Dp-LnN>bncaOj_8OHB4OOF`^M81P-5uL)Cwu`6> z6c?T0sAE-^2KSdGzmoU-CK;fUA$sl@w@hf-bFmM8d#vqwpqv&UW0XsZimJc`#&`N{ z2|?K?s_4OgNAvq;EKCYn#{s*cT}}s54x^~Cz7U(pEx-J>G>DKxKW@1Vc9f{c)U!{s zsLnr~BS3i$-yO(QwXN5hoT89tDsv;6$eh;T33*CM6qq@;GHCY0y@`BaFlucOE_b$H z{uFzr<#p^_;8ke4Un5TIi;?E|(|pM6H>KA|G&Qx$R?AT^bUD3TLb5gM4=>Fzdbrai zEJu)H7#ZmRhP41Uq@5u;1%2rTYgIP_V+n~=JZzK0GyGBOCidn~)A3(aT(qW!^HCX- zv2ZnDm1vt9c%H6u$b*~TP&ZR*SzWPE^-vJt`!}aCo|Vy{;&4onq5FExvnTD{OhB%y zxY+4pmi53652=WwE=iq}0=*vtA2hSw%&?g+LwF|7}WO*f#4rL2C%hp=xyBbgX9V>F@PnbYrG!O8Fe+U`JsNUT}{9SopnaxX;CqCJT95IZf4+>wz zV4D#!KgRZJtUe3pi_rUqXdYurf;Xb?%%XKZYt%fmF@P?nIvQ2ukeG#h+BoF}0 z3SfM65gm^_f#RV(VRIfA^bT76TOTi3#xgMOy63*&<-`peP z1Z0oUn6Rrj#`x}AEMwzySDTxhXG$M`FG-9oOv4SN2@MsL)6n3I8tGhzoj>$``P%Fz zXw?1{E53c@f#y$5QKMZg4YL_yHz7Mu%@t`71nJ?GZNP{O`odN7S23Wwz|k)cj7;yn zmFfXV`*K=zIBocupPi#GO}gmuVmwk)TZ&L39_OBG;mZ3*#;%!yPjE!ZIv=_dmnD4U zX&w=R5wpZPT|tdh&!NMOS?JToe%a^X7vbIU8cp^Pb0--#Tv)`NZRaKw3Pd(g|X-gNX9Khx zz?9H%kNy|LWO$d)U;Y{qo*`xGB$nnv1{WY^ID(q7;Yfz)h{1h5NkjCP>hbAb6c}9j z`5deEG{SM8xtkN&?W*qjiY)T+f2~T5XXXP4jNhin0v5=af(O3jw54Ub^ATK<;-5hq*k-y9L~fMz<@@@W65gyFW-?3#=Lw5bn0+H*zZG=l0fqJkIO~HAhqjdhYDW^z zcFbKsGb_P*dp2Li=VKJC6_>Znw>G~lgo=R#Xnr36 zJNS)A)1PrJjgD(7Ug5AC^bn|!EIvUtG=n_9`LR^sRqqQft-jQmPY-C;MTS4-ko`Bi zB!`2z_Ef~yDS$&uYIxd_W)ULGC9x2=o{wO{3CQs7%l%8mQWqI0g&-Sky&QWYz`SNb zN&idX5I%REOkl%ENeS!d8`>Ed>OlO02u;da=wCzw9^}k->^FcPFEL(enT$_JxyTY# zeWTB4`c?On&jqcikNXGavf4i;nfJcUnA^(t96rslb>%>1^12zfNPpLiwW6(x{NEIA zulbDAgA<~^^87`Wim5Zz7j}pH#D!PWubMeJ?nfY+ zF%O-<7MgG0pxU4v3HsPvSXtBeq#Fs7ANJSWo}~MW!m2A^rYB?WQ~nwkEnhXNp^V*0 zv9^0Ft)TY1%}Wt==cYi-Vh%*}%Rmv;Af8{?HbLv> z;zb`?1u#T!{#%0D^FZL`sy^^x!qo@3K4u}rEYC8nU-1Q;Q(yL`fB^=|tuW;XqmvQj zR$6O*dvE%ig9{NKw)gzxx9}tBHVfXDduJ5x7=er59rFg%!y9)ZeOjDo-~R85(GO^D ziD@iAsF;D~Z$C6O^kUV8`_)!u2o%;y2Z`y&zteWsL@$#aQCvm8MiIJp{1A6KrqC6C zIosF)3a6Tf{dG=|%{A3spXA#EVGeh-c($89kBwv@Qjj^8$Ihg=XrIa~@c(XKN15ht z27aw0#cU6}$dEpz^ln#PbIrp}4zcb+KQzl?6w1)jzYb>MwCQ255Ku-(&=qJ0#JW7qmT5 zA!cNwqWzA$PkZ*;bkBT*4eXRgRe9vUt?WGP=ia>Xf@CY+ z0erj8oE~HYak8k*`z6zn4c<4zh#^{a=x{t`adTVAS-fNg^MLYPKTu z`7k(#lw>6N+tIwL$?II%xl0yDVj2NAP;lNH~O z)kwcDq`Q}UAEfnD8JRC43G#ng+z9iY#E)3x4^ao_B-zf{h7NxwHVmToWz#J!-l5{J z#&}P4z?)+E*ZP+quGL*jq7k=efO;9_fu;0mJ*+PAJ1~^e3>gSQ`N)tY8)v!A;tWI# zA<1iXL42`ZkHKI3B=SMkxp}ci69jgmm*o#x5q46tt;0PS4_6JEd*f`%Znt1aQ&UmCx{31VL?lt|mI0Djj zab5oZwQd(QIGb=qM^h=6?AY)ojU(+HyCfXbm9ux96z;qKF8w z_Kp93g)M6#whH$p;G0fJxmYASRTM^ELWVa?_e}FgqTO&&o&2q~y|I<$M00Rr%S7v! z+210CZC7szqV5pG|L-L?`H;s@4mzT;d2aV(#Z@DdSM-!ufq7_RJM{yZK&$I-dWK~+ zJ+5T+*^`cnUG0$#AugisI3nVE!4?O!(~)@W0X;eW>Rkgv`EOUb|RW7-1 zf^7{v@P4;)2Ra`0mDygUX=F4iQhbB{*Z=9c!9md!TP{XEk-?q&rB#7 z)mq|7VRPuy2B~D?r;1j+#`+<=ka<>5tM+%x(#|IYv*#JfopoTDhQ0(u610*p*3{b< zaWzxvz|n;PDe~vg7MW)_Yq~SYb622p$lVM)Iq_7MfO-fZ97lPCN1kOMj|s2?bvY>Q zDiKF?p*2zf6yi=C{VEH;$(0O zw)Js>f3O1WVb~QdS9STJxK-;@H|OH1)ZoRks|#!!BDHvYN-3$1S$ObX`E7fZ$ge@U zy9>{Ge=3+!P`?9LqS_{ol~zl=h*!F`A*-XKLQ_>^@7^12FqvSbaM80`=a)gjH?Qc^ z^Df1Bld(+=oF#DDZyN*&hpR_M=Wxru`b^QC`No}dBnK+qStVYSLeQ~EcN2eaEP&{3 zc4s%mK_#4%E*(2$^8AekB9E)EcbgXM7a^+?Us=YJAPoHI zU)s#Y4SHbB=_7i~pH>0jB?>`R%$_&Y>*VzCU-vGXDpPNN=!oTSWhzKzyhESN3sc~L zm>Vfm_HPZp8CUpfAlC)VoVI;o*Y%pxUF;>Prb-sl$;PqpzfcUB7+V9QmOjcYYqKHl z(~N*#uHsd5Lt2R9WQMHPE~w_pQdYt^PFoZR0!xNcxN=?QSVwg{h~P=bVs1>Hf);TC z2)7KE4#XHi0#W~!Z@+9-6gL_dAb&>^?>z-WGS1u#3pp}xu(OOCS+!F+>0^5ehUGXx zxDmt6bu+5@=}u}tte2jDjetrRg$n+ei}pBvT2LYNJgp`WcgGRQ+nu4+_)_)T9A{0AM>Mqp8|;6JndDAY zH1*gTd9lEzz-2Euu?1A884mPyj9qy=Fgrmm#^J^?JMzW^?ViY&e$>{cD*@G3O+SVM zeUnUJwBcr6IXt|oW}s}(ZA2#IYeYv<7rciP#PXJc>qGxsuVyrl%1lM{=bs* zII?`!i=G8pts7dgR~5tUKL%|62XN`OSht0pS%4sV(mKW1UJoXsah{iUu5=uVTIp=_ z&zhknu#wItHd+{XPTquBQfgjqUL`HhNYtJDoSwEC5{RidrGYeL8|8ovRfjq!tLU7- zoE=3mJn_56kO-{6P9Bg=w$5t$+ykRXCMx}Fz5NhN{L|A*MV=A(I{Fy5Xb&{eQ z&r-RXXQ`-~F?LgeZL+V9m^R#Z(rh->goX{+HtK~9n~wX4_L54v^Kv}C5AKxNp8(#>enu^pcO0R&9BA$Y!RZg`R%aktWa1v;ksyiaQVXEG2L0GD`> z>?4C153Ka87SzCKBXrgJ71^V_a8Rcl;|enRVvVJ38kk4`-S5=(#`qDEU!euRSzbgU z(7JGU1sx@N%r#}o?-Pz07YU0o*}`k7%7b1u%w8ql<{LDiT=9tvCNF)8-cYAd32!Wbtb~v7t{y4OtRm0uqLMINd)SiFGJ&`_y#NlC}JNr%DV)ys35ae4D_m%nABAT2CFSPU9h3 zIS-sYGL{RutGg0L+?$Xw2JG{mj_Q1IbTc?EI3AfAgSsGQT}mx1F&{3?$M#K9AN@E8 zwjQ8s>bY#uC{n|3umc2vI@AAXT86Y<4s=CTaW9#_i0L9(<6Y*i`l!<4nuEHi0%EPp zvAAK^kOpv|4zJS$r#++7s(o?~s*r%G*L8;5wHyh*z zmbl5mKG0=(knZQa>s1D+rG|#q&gqv_BNAKj2f}p8i#~Lg7j@pz_@sFZyW~P~(Ofu! z?oSYA-{}-+PJLb*JAF9oX-$!;Y(0*4f&j!lOaPZ5zY>@Gy8vUE19Q2`u(yZRX%DOf zr)TH%C<3w8g@2(+g(6)3M_mSWfk?V|1dADR5PoMC8A}{=fQjq?NEewJl-MKb>ImeRYub;-`-*kZ$Q)TpiRNRm!p&&QxlY?yLC*@4Q zRAO{>a(X1}0x>ok__RfBheqsGvaOw(CbqGqQ`jE+?ORhiNCCHLPi3{Hc8JHA6^}$y z@`5otX``@&1a{5ihAiu^{yALb+HCuL-p5_8&MT$YCZ*O39tAc8*yVs{c;931Y<_{w zI-kHwR4h=@jI{;^X*#k4ova3Z~>RW_A|5X)Dj35eiXF@N> zE`e665#L;b|GnnJrI$NoS^AgzYl@qI+Q4VRX%c!C$V~}7;XuoLeyo{Ad|EChLy%;% zRj-l9HOZIE2-EOBsYle^t39b#=wvxNPMLUrW~?oUFvYarZY~}UGaRRyB-!s5 zL;x-tiTig(93Q@udYO9TG2(r>D?_14xx38qDgt6LiTP^%V81}SkQ15`WgaEpfYfnj z*$!7sZ!U$NcxoOE`*OrvK2VIA2#X04l=L{z-qxm9%ca1hlG!x6{*_J0OOO3v*`e0R zbPhr>v0b_1jg*DBAD=E)ptjL@=C73z&ysG}bhNWAW`SL!ff@Z97C^|96uv7%rX4>t ze3~$nQ6hfJ77{FkDI#YDaSTWCI-Bz^Fblx`2&fs8&fC{9%&GFu6*c+To7A+YBbN)3 zZ}&gaMtEyidVsCh%1pQ?RWn zH(rNoO(nW&oE=X2GD=2pnMy5;WgVTiPNTnm^x;zprg520&U?JhL#;8wXt?%wWG6;Z zV)b#0=xz6`#Isdi1L&}Ht*LMel(JEwG(ceP2wOhJaMDUf&TEJYF0^Wu8P&#N!3GdH z4ei+7*n3zSzEaDB*MjO7=}*${0cX-w4gnCAP}u)`<~Toz9rvWeh3&mVK%i&}%uf7{N1kAw^jmpyyU;&`c!K6=~b*eqOUe(k07W{o0>H(Q6qx9M4hq4r&@7Cuc>WbiO_^r2}Qr( z?(A`$@e&14%!_%OWrBqIYP;k^$nvX{6-Q)i!F*~KR^D;S*?B^$mJYyYT*Hk_Rh17}95u<xYNsnX=r8QFGo+jn| z6n`tr%XMzvz`!T)(rlquB8cU31`Pw0+P;`55o?t=Jdaba4n9 z32Z^J|9%FzO#Hy-zHEF}7Ubjwul=F@0<~4+n}J$q+OoT*w2dtk++zYDyS##Y56tGK z(U-uFVG6Mw)}EaAq7AmTjKeKci!h+TR|CchJL7@6OPu*XSo=m}$J$_SO-Qm2KE+o$ zB;B+)S`hr5sRDlSaWP!914HDK5ux_wR;Lxa*CD(^+sg#Vmvn`9ME>CZw!BS|2yKiz z&@RgGtYtV;M(Ee6eJQryxCS9&s}{qZRe>}F`gM?kOQLyd7pX!=;1#hj0*oAr{~P5n%BUk%}Y-WuYJ2LpY}nHm-}lWG9WYkj3lqw2jn?dRzh z{)T`6Ffe3e;Wz*Q00001L7LM&6AB_)|6gK!;^=_HC;TNZ^V(8Fm%Y&^<3_L*RUw=H z-TWtD*FH5``4<8xDR492bkp`>=Ch0iR$7xZMxKHnB`+8j^b&-lx*DTJo&5(V;{z9q zhfQJ`)`16xS!AW@nOzwkeAKPkaxYH}I5gw`=`dJp%eCbn{Yq7y2Eq5)IJ1I&qAh00 zE`2$h^-zGMkw9UY?E8D#L%_^hG(``xZ+qKFR|q~B9XZDZN2qxu^HH~14oVj1I3sfR>fx^he8(d@IK?rEeMe^Us!S0{F!U|*&2C|)<{6o9EmRVm#q zO2;=1c%P-|M4yF!+tE;G3+RY(@Uz8yAnOEPnY^UV9;f{Sq`BT1Bse@VI}4$veB&2T^xuG!j~+693m|&T zUwMTE!3jKM`rP?hq@rwcV_jT61_LdVtg~&jJ2fpX+^iRtDi1n;yXL*8-%>$!paZca zW%Hl#%UEMhJ!zJ()T{iS5efuxpDzZc_)sZWCP5!__b zeN6Vf7K5n%I2KP~pIsid+b>IgayBbdNLkY{LH6kvPSk?58`g*dcK_NDj-4uvIT-Pj ze#%x#n5HKd1rbK)q*m2zDPA5SG9k$R=G?$d4At(7BQc(P_{6ZrXE;4Oxl)wu*szYX z<5i+nW345S9TQ$OG*@dpwxwzD5W)+#wVfpW0NkivAQC<~)n9r~7v*~&x65gi?e@9e0udoZ^1Xai70 zP?4f*z6+>j!Y@x?V3j@Q9*uO*a8VZB(I-nj9x$AcF}p^wqz0jZ4%R5%Vg@WFCc!vu zyvW*_T?UNud&1;v{PoTC7o1@iWab%FLFXNd@B5KncJ{c?XFWz_h(-|m?3H!BT&}I@ zB)||2=XyJTm%qhe(!DM5<8yb9Lb9?=3BH68d1H{1dR!U!HVqFGhoWBJ(cm$AwKF97 z^wHTj4ulrWAXeUmjb<98ulf}3*br{W)8u8+TD^o%Q%04z@$7+~UNvz>nJFH1_m<~< zy>qv?{T+-7R1}EFuj;+jhjJBp>F*RS6RwyjAA#ndy~<>B`kvc`)NDW=pX?R8z@nkK z8F=j|a4ZwmL9i)F%dE|CiX&t2yo++1+uj5(i6?ON(!t3O8Lg3++n48*9SghcvnCJS zTpQ=3?~r;goI$~3=O|x=mgB6Y8HmaSv8CEez|vrg&x=t*jd|cEQa+)QXncnM2!)F% z>P{DQSCi4UiO=;91BWn|8wCT6ybM($qxP|{b}(KhR0qE=B#!DmsC-+6{0-uGGDPu2rEaw+7m;LgX%Bpr^gN|z&*<++= z$@fP9#R0_ykb~QWx4zn)$UM-+A>;-12IK`Gd5b_H1mwXMPWi9X{#2MNWc*E@jbR#; zvWx;1LWep2_0dr=0;cIjTB&3sAHB)7O!auYM=kCxwlPwS!I9?{p1DjZae2Q&HSDUC zKK^daLF;tdXFi$RSptQdR$kz%JWrN(QS>alS{SuIGz;pEG>a369)Ks?ZXh|`g+l-` zMxq>?k6#Nz@^m*>`&Y6|;O}kZh+rYu^Jl916I$pNy`9kye0yQ3F_}DVYrDCrD$Swx zpd|Zh9<0a2my9#5j!-jVRPeRlaEJE-#0$SXBBr>h%Yt3HzPV-Z?0ai+G*mm+k7|I6 z*yO+Io5A|IU&qm2EYs)OBT*0#CmDDJS7GDZRm2g{2CQ8swM@{{?+msiMIwaZYh18e1Q!g_{5g;=MxU*i;G>z&GkK)^L$Y z%Rl4JSUIA_B$rMTOI8k|nv|E?jtZ{r@@=;3xccmVES`nBDm2$p97x##*B>|v?G5#; zDALj76>n~M%X>`VkvDdlj}7r-g4FPP{;ZO1o&dDv`(u};DBoqmRw)|!rKu{%+sWj( z1itI_g>A`cBYH?Z0nf3df^WO2>Ku?m6Cm_s4tm1sZvHbkEyU%w_P|ZvsP<%mJa<$N z@_y;xBcny%o7*9-C?Vht;kpzMsF8U?v}F<&durC^uO+#mteb%U75>^TXHJ@}Bx0G$ z7`78|ZfJ#J8y+CRTa;D|-Bn#t9=%W0Y`Gf{FVfujM@WEUl`ODDjaIb6>?y(Dnn!)_iJNifGsdDrUO>`n)LPA(uz6cB8_Pv!qtK2!(K zG9dA=JU_4YVtbMXGw-&owH5ePvPn%c_`OaPj)89!a;}TQs}>aSAlWSWAW$Pt`;Ab@ zO$GK3H+lFy5oTl4omqjTBxfZr>W)(AdV^pzM1@MX$R<9#fV2vQB&M-*Tm(`(<=!w$ z9e8;Z|7e~X*mKK0?e6A7sVN+1LnIP%%HEim?-Z5Uwzu?xPc3%gJuN~*Mm8X`2hrkiZ{0~=!c+ie?mA|g<5T<}KPYCR^n^GQOf zPf-yoq*VMc{xh*^klj}EWFO-Bz{MTEq6O?wJL~a2E%Ea*_WAs`#?+Ry5jy=)dGf*E zFR22?id;3WH$pr30%3rtP!;^}@C^eLQFaMX{6(03oq@an-Fj$K9Trup)3MQxeoV0I zEd$naE1Ce|Y4-gn9Dmcwr)CB4ZmyIXVxWe|rNU+Jc<=&y_IY**@%wIZ zyZGZ(sergcR;31Sw(*5?|N634=Qb|>a|iXecfC)`%j8O9QD{rL_!y*jw;~0;c=3&#n9> z0w?=Blw}3yIYI@M(EAVk@xzB!VAP4CevDm_!SnuVb1*GNk^Ct9uAPhxRC13lr+H{Y zW@8Y2d2SE5u0E%8WwoC7-_nrD#oQFJ{l-V$T!t3$Us-fRxg2BXhrzS+0QRMPy$AkY zf?_Y}R@!JWNqz!%G!}#gBth_D4)ceI)?0PXnP#wdgI}~#rK<^-DJ1`@9M_}1tNfV} zx|sV2s!%;auJ{(p8h*bnznEeh$favFL3Sa}HjijC>(qBGd`%H3HRpOndidPfmgr)J zfb>-l7V_k>5jvH|pfqrnBUJM}Pp|DxqDR9c<*4xC;sKd|Lzp|Y&6%(v<^@)nXIS=j zW<4#V1HPlTptu(llv$sOA^SF4>c4EIMylFO-z>N9@lcg>pSwG8Lo%dgP+u(tSNY#> zJ|AAbJ;pia-2pttq|9fy4!+Uw-*vbJG>iN}9nNpN4L#$;?&O&+S@WNdH~7dF1Swm? zA9$=W9cl~3B_Sc4K?H+O5F)8EJ!nhR2OglDAOL7Um_C0TVQk@y}bqWc!ue7crtW8 zAcmwML_ZKpzX?I^<*$C1l1zDL!|zsxic*c;UwwsA%ye3IoK{Mc;EHQT0B8y%Q0$LZ z?lAkPD*3o+2m4S-a?zp=oB{zkj*d%^fpuaCZafaQcb_@GSeoWBHI z9^b+=4}l_Gssho|#@=W5<9A!rvJriGKnZ8PrN`=Mr3Tn;=Y9EbuPPkGCnBUWc9^`k zG#h_Y>r{uxz2-z7k;x4a@5XM1j+lm5>Fp9@USF7@91`nBz`(MXUhnsWShM2=hnEJG zoD)SjBufsJX9Jvgm!ZA{YDU~n>Jll=3`yMjqi2a0Pk(ZB-Yx&g?F;EJ(_qX_p)r;13 zQxbjPZfF^M)|Bg1-~Izib!fr^vE?cgXmG!S-;ASSdrCp>WRPI+CA3>cg zGq2@xAlp?vZoLfdGmTuGDZtV~0zpdUj!z*IkhAHx>6{SyXf?NcZKTq2!O6Bghya2` zv0x{JzHNrt&uQ|2@kBBU*a{Fqhjc7oaiTXg^lLNf%0#xFkrp>r| zSlTIUXrF+(w*+iF@$^NTV>&&@PKw^2{k&0wt(~+Ah{ZHb+QdO_GJKYC#POo>zlqg& zPs0Yq17LsnBk-&+fa3AHY&XVV9)Vz5o?y6Q`eFmk9L{}cwo5p9+8LheQ;@j~f~eeX zlcw7NDSN6ZeU|m`FEe#*9~Ml###YyUX3nrQb{mpE2~?g+t{NZ_(MqP*SzP%1Y+>a+ z3JsgZBXd%VwQIq|sK`}RggScm!q~8=r0AZ&!aW5#@43N`0h%ljaT>48D%+j3EY2-d+Cc{RT5gnesg& z#YNDZNDgPZIK^Nc=;JsuWO2#aM`pjNw~4xp-b~b39`Ku2Of(l0z=~L(&|pk3+F~c= zJ9O~q0l}j$0viT+*AT(8kqk3?@Gx*0zUGrHf}0kyf4Bi{F-hLDLV%yaFcyfe zXNkh*^T<<8j%5pXBn(N?|7!w#PBZCb^uoP!@1<}QF#r(YD124-ES$IV&$cLvos%FY zMs?KTY(&M6wlw=(JLmJf8+S3g+uyzk!tkOcF0^mdOR0E^dVa6q#(*Y2@Skg*#&bgw zMr;T69tHWs>}(2;QlznXy1OE`3?`ZzRkDa>1r2ZAJx7j2(_&1qkn;3c%s!K$F{5*vgaD|9EMBLl~Vn6-=4|0mw4$gR#0$#E4FRO%)$|@Z_wJvMdYu5Xnh__yn45?(TmWG2MkCTbWr+kTAB=(km~r zR|yT6!k07=+LH>@&n+@{df4;i3JZx%m7B>_s} z{|RSe(M8jddI>e~_Kv~f@lm#y72}GU@BqIkPrOAp-NQbF(NUL#Fr=5jF|L-^?sJi2 z4KBwL>RDs)MN#v&Nj!Q@_pN429DeCd(1pirrAUNU$ZT2mbsLlfo8KzTcx8cP0!zml zV(|QiYERLamW+ZCR2xDv8=de^*jYpkv5g`RiC9*7L6C~Pj(s~2e^jC@!_-p3Xn!9< zp}lJH+Lg3JR7dSK$Sh-+M%8yqN=F-LXz1^N!jxv|@`Y46U91rCW>^0`==!=%Aw#Le z=;`mDqP|o+=luUkb0mmgtT-9QT_TI;>p z;GLl*4_1``_!-?56J$K4*L#6bFH?CJ0RAT@fjcogvmEt~s4M%4R*K&Hh$LQJjSzoP zTEWLX!A3<%?-%nt6N*06h`3b0T_>NnJ9tN~N9|oXGpO{9~lNpE`W_{(vtCvii_XRGDw4PN15=9@^0|D8vuB5QOtxA7>zeiiAX= zha5q;pEF%nkGVH`rnw4ljj znlcn(#rq`uUol=Z8?Bn<9la#a0?hiB+~ohL5$XI-&63!kp!6~|vuA14@#W$-Eh|dv zUdlyE1k6s5^ELYWrOwG(=&Xi6W|Wra*dnV5wTTm~tyS^M{Kbwaf*x7)6Lh&2@njC6cGI_N4 zEt~EY9X5x81q{|16rPLhA2^$dIt6YBfY}y$Llzof`(XCiw*Z{*11Tk= z8*8NgJx=V3OV&C3QQ&)L!(q8vZ1T0P>8p>+`N!vVYEYtcG~q8#<@I@WLWpmOosUSd zD6seZ)nDx~jAEg~gE}H%Gq3)8H`_P#?;dK|8j!_dK#Fr|I}v=^wn9duKZvB8$7M_% z)aPFsIZ6n>RB9qnB8&})$lS2j9+&kbW0(d_iYDWP)T5p-WQm!;FOzRBjJf!=%p`p* z_Uu~24dJ*S%PxsO~C_7QOLDcbQ z*x$0l*waevxVNc3cA#IZ#m1p|*Jo0c+4e2qqWO&}PWs+;1tS8`n@Q4L@-bpld*v9}GPAeGiv1uLn^7v1*0oNowrCNm2 zWbW}nfLe3u7nB>zA1vTPK0q%#bejk5is9CNSkve{hg`Z9L>kqHcCweo5>KG{O1Izy zGCkXV;tvv}31d=jjDWes5-DJMN>uH|CV*4$yrZBcDiD|KuR<`ZKX|^V&1Y67B7n#r zu&Fz(uNjI1;s3v)tBPylT638%okV|fq1uKba3UJ$29d}JE}XgdW%N$07WOCOB|xA# zN4smD?y3iI#{eXDbBEv|Ih?kXyZpjKF+^&>5NuH*-}Wbr(hyf}ti5UF0b@IqZw8g? z354cX)x2z$EF!@VRfRY)O+(Yo@R9EXJC4#c3hQ;sg(%jO4_VrhwGVEY>?NqFRXIQR zvSeT2OgWmSp_r$iPSteAQ~@n9?CRPG2Lu{bgl2YowM?|0go4j!Mp#arhzFn!4lv(L zp$jb@Uh4U%|AFhlpC=PAx#2y<ef2Su_HxypUE%^QyHQ^VkjfK zuE8SuVV?VrQAV#wh{9(iEUxf*-Dlyh@&k>Z)mUzme2C?6)2>rv^H*_@)%M0kf0dq- zqu=C8H*D{o(eW@uMXLhllSMx23dRBvTOJm$*dMr&iKm{jSU%(GDVe{u$`5($g^fd; zxD1y82R@gU3S0=Q(j&;K9~q1K)~?46by|>6ru7!fTuLxV>*}E_m)h1Oxw2r^L6j49 zWG5YnGIZ?QH}I>w8$=&ew-uD4Wuv+!GvVudh8a{gr5~}Yzz%)0eCZzY-wg=9?vGmqB#*irC1YDKQ-Yx-XENVn(T;qPRS7*k=0szjX0NMVmHKej$VK(CQ zhd?Xwe)fo@Kh;^zp8#Qo&$MPLaC9EYXqR@o5WZQ1# zU)Y&ApbMr(J;yj+Y}r{V(zDz&>C<>tTqCr%huikH-Hz>_vU6CoMB?3vIs3D`HVK4| z9jB+c%Q`_VzAApG8Lw~sTYK16idk;(%%QndmpBPZAM>9NVjA|w$!dNK+%D!NVQbz* z|MdN5$x_1^D97P-5b%-VEi2uT)!sUe;>iKCl^%o$M@yn~ssf#9 zyp5L++z}j&)JHo4v0s$R_f%Rj+(oQ^59HTCg$9bzbN&Air~;wUOaD7*RS+z{ ziu^>I1G*98fHH|~Vm9N%NY}<#6*bOT{~(|ZOf%AKTZqTx$5<}<w@)s3)Q6ipArU z;aS)XC2ePQ`nFzDN6>uI_#N$(PFoO$y?HBR#?1!j zrjhRRnK1%c3z#<%;p7S*#3sF-3V3h8)8?is5P6GRv^7lq5gq>p|}4qM9>y|49Nz!(G^U&fV4c27kp#XcbxBC4kvg zhP(|R^QfyY6!$U9TQ0g)p+A%HPE8XEo{60Q4(RRo@MxNsr}K;gdM?DXXRHQ*IX@6c z_VW*Sm*Bgffcs!i35P*g=i+yit{`&L0J+); zGxG^<+hgolqJ~HLYBii13M;$a0FOIrZ)deGAsE6uqIoSQAaRl^P>peo}19TF-Dn~Sap{^f4?Vsro-;KV$AC-ST0`x zN%UIx`k0l-yfx7QhCG=reGMcVi=B@yfh@J*RVMe5F2KwRyF9d4KiWtMeY}YMKYWED zVsQm7_xmQvR)u5&cl>Ts4_pKVI{K9aUC5hg6CHS2jBe=RDfyYYuuot+R#QA(Xvl>L zxP1Ib8<0JSJXWPG4d{Pp<01aPHw&fLd0NSYr8)?%o2T{ znoES7%YA-Wjny&BORH?uv>;~p&(OCCuDDrBX!B)`;rTTZQ$}jvqWpyc$buhxlW>I) zaCkJv3;F;Dyp8*Xv`fNCwhs|yQ|`9oyI*w{1=*tnL6P2ye;!;l-Hi=bfOu05*f^Cy z`)g1P7$!-$D&GflW3!&VR?GNM&xnPA#t5%d3Y^@hYTA1He=bxGay#3I6_}D6l`<}k zntaW$I0kHG)*WMJ7Lis_yv_IP5jePMX^WwAu|-JlVcmj@`COYX_`!uX)6pD7(KQNfL6Y zB4zjhVTJFgl$~&Xigd@`OR_>5w!s8rHoyHB=(S0m*Lm&?Nl0~%iFNe%%0Gsy*V`<9 z>M^Eh$(qi^vfh`QO_<&Y1Rd0w#<~klR>eB)QeTX~7v(vt%Dii>Uxfbf8 zlF#l5IPHK0y5W~o8%x$kAlX$med0X3RuI+j1YF(hE=dM^EU#k3)84&_R(KrMk&aoMVX?taSxNrrGm0@rrSiLJ--8Y%4mj9^;5Dn0z<3RI*%q@9eGCnWE}~en!gS$!NHg^-D+f78}lO?YiKS;wz#bf2xIbcR-li{1f?& z9is^aF34`;P>)jM6tPx$rb8;tiF&LaNhQ$q3~Bb{;{DksD8m|#Xb86uk3-JhaK^MN ztc^Vp%cJNo8TRxP74avH89Uj>f9_lq`+6d-)g25q@sYkZX>n)-!xzmDS7x^gAN@$# zI-0!-J?}0HT$K_&&VaRsJNtLTv{0GSOQ4^LC}tL($<>&n0!{0rmhmFa09{r1 zk-Rg)Nm(p=q6sEv;l41KpH7rQ8xx&8LA);dot3Y!XSKu%f?6wdgD_4unFpDv*Y+9O zP4+8s#Md;rc_%?6q|dZ}oj+(3YP=~2Bh)@%to$YKU3#;@FCV66>M2Q{2P~43zki^* z_FLSa827$5Ioi3nfLhc*P3&B2J$Jvq%(7&SBVs_PEKrbFZ!j##VdBbNQ4^1Z^>m$v zk+O(QBz33@J!Bcevyb8G8Eow32-?PP%G=*>8}Ml@2KaWdB*&{XwBOounV)LEn%pDL zuajlwI#+R%_i$6#pJ{queF?>xpq(xT{Ur<4fsSz<{fg*v2 zF}wFWJ!xErth$HOMcfVBrPO2E+UTDLTaZ1N3y;hc$wLB)$yR%aV2!Hv6F^gZJt(8g zE?-D0)W?RTpY~1)@*<6kdC!T97DYek{0kW1AxOQYip@oe;PjpYww0RMx+8==5zD2- z0t$%?3LG$^Cl1=ud0J{L6EtB;v>H4+rNiYmfrJ-P7!~g8y#}9*2kOt}P@cK2`m?~X zXk}4@G)k^SuQFvkNb{Cc0VEw9$Yl9NEY%S@Y4PwUBETo8O$trs^C71h#lYM#JCO-F zqZFejGL9z+xsH$`&T0R@kYtEZx14(G&1E`4JU7|T{kqF;0^#qOuCMN*8XVE&w$&(l z;f7xJdkei2nOXA4+kh-oVY52#(o`r_4z}v}YY7%+s}$_qgk!WnfE7iAt!AEvBUo)mKT4VYTV3TsAed`Vw7uPDgl#dsP=HaM20 znoY_)M4~J->2y8`Av%=`*+i-nFqCu;GY?2#`BAry7*OX4C8(ryYJ~kMjI{GtW{4aTS+is)p!Rjas zUHT{EeuW`4OOg(*%MQzXw}Ve=G9d{;Bb)Bo$EL*B4m9Bio6F;>rCo}#?xdZk9B#v$ zY-k5?(why`I!3t94P?+#K=nF zM3;lpZbaM4i{)UHm));=p>U%?b_<$_@M^D5=~4J#zhL-8;{1k8+b8pZrcBJ2XmjKT z|MAz5pl~$4(plVb;9T~Knyqm}t6$@iIFb_V)O>Uv)j_Ccr7-GQs$mrlI;e8vFKW@ZkTLp010TNBVbAMMSr#jrR_Spwv^t{w zkC>#!;4vj!*5mPpf>r<>UqHW|Lq4&qF6bmL(MNLuDvYcthtX8w^B=){JZ3=n%}PwW zU7WK_ZCcK@uo6_C*J5wDlYHtZ{Xn&5_*3FWND!FlEo>VSXfGFQL#MYJmt0IzvY4ca z&jS5XM%bN>bWecB$FUSMO5xJwrMZL=o~r>=9JSqoOy+zAY{e<)p<25$!Z(;O14#7 z%?@g^Ybvwr>6;F2BnZ<$csGwn&)&l(qLmvZltrY1ysYA2?oMO1(M@(!i9I!1wjgZ6ngsJp3bG6c|4?@>(Q`V|P z8*UXeaXol(zSw^(z?nGBX(Qw}75uj{9BzCTmo}m3pGoS{)QiP`s+g9GCKRQAN%%we zX65Yz)WuploSt-YWD9*Yd|lShu=X?<0Z0_8`uvjhx5-LUDyq1blCzn${#7rsfG?bg zI4J;3A`)J&>s$wTA+l?KS^b~`(mU?$|1B^;wy1Uy&b zeEW`uXvpIu}LLHot-C_094Aj zY>qik)faa~kZ;tfy$GaE8gV)WKhsZTGuh*v_^U-Z-wOc!7^j%n2r^J3-6YAq>=yx> zm9l0a<(f>-o7FHPJ?DY5ZP(eWYLZ?>PjwPnL0)QfvS)_nnkg|KEpKz~$;gmSgHASC zKlI*FZHXU_sSsONyBaIO6-V}!`Vl{FD+|L}Wf8N@LraXeS@YoVS@c1D64q5}JYD}^ zt_haiMD2ea4KIT9rb+UC$660=$=rGkce0-eu-;Nmy@R1u{d#|);(Fa|wk1b2i)5)D z8^XfzVgpUS1`9svh5kpAGv_BKn4j+w+4cm(WE_c1-sq?z1^(?D&RIQighBeMlfiQN zB;T^J@HYP6KP zy{^-n0anRi=}Ex_k3}!`O4BZsHB0H=Wq;xAQ75dlcv=szVDN}mRwz$6qwo*dpn;$} zY=~9(-?477U$9Q0haC99kO8DG7wb*s!*aGIfVWp4wn~*>h~`$mhvEj%e_w$n6QOQs z`xm$8g1(+{CPG3A2o1C_PKB~1p}8L@Z9~S3G;ZF6)=a5ImcVfulJt@UZKsMrflJ?0 z%vfb)wztWQfCIQ4cxoFYi;a9~iN?~vfQ&=mG`VL{AQ}<@PE6TpUwS(I?b-|6EiS@y zaev=dK^NstBe;nWGfh$p`xCR};1%T%iP9G|Kv(%DF?pDFk+0OKm}x(QIBo06xBBKq zpTuU|MD3AJJ7*vEZ1J)1NpX;@u*oDJdW`=NB1aIgheO1u!w zp-BEW*ObMXbAj*0Y?R!$x*CI7|Me{XpLMNcV68fOVx2u0b;1dv&ve)Zv;gxjr`hLLg9#kc7dt0Hvt=_hzEFnvMF);oXO8^HpX zYL>!d;Kszj4R2rLXX%G(PfE*oI@I1!#!bBc>jv+aiC->ZI^}Eeth-L#X*?C- z6>l>=n@elH0&>k%Tdzcnk6j!3+=cfC2HcGmeQ94v%CTiC78TL5?iVtxT-pipIZaUv zZB~m*q{U7QDkKa(at=e*=QDj#;{yr1KfF3+a`BUu;d?bA0-6EQqp7x`DC z76Rh*v-VtDHPE=sbau4hgeD?5*BHP6I(Q4HiNhx;(+wk~tGK&a!)NDk7aMM){JwbaR(QS`MCpVn2%Fc8`hyo`8V2 z9)n4Zj;H@t)iULU`%5%6my8Kk7^46!+sPAYYtb1*RJvh+`V86qu{B*)nvfDClbnqV zl6{MBK8jHtc%XoC;&k;*tWPvNg&?vR@r;!Ny-n1<>H-aO#A))p#3+}^ccR}8uZ$qs z@B{$7ROk_L@xncoK8DtR0|Tw(g!rf4L%82vE?$trhHFx|>6BhhbC#Bz;OhN;QRQ&z zhw3F9WX1}w;=i*da>ycq3(l>gCR_4Vxd>~V!}(P%IHE+oE|56!aBFj5XdR=@#vGdm zM{}y`I}&J*l=+>&Ztg3GD5W|p;W`N3yp*^yR0XA;N6D=+W^8o;KMCtKjXI!H@e#8a zJ-_qjrQ@%Oi|%FNKT`Sqx`7G%38g{*sTk_*X1u?{ZEVr9jye-`j2eMiBh+W7d>^7t z&*@JM};ilT#M77Ci`K@l}^V4q@>sQ+!{=VJJqcn1wh6txylG|&`1Sm`A z+V)3QDg3sHT@#!m-0!pDRy}B%3opK)sp;&*iiLFE5*G8h*D);+gn>qnI__%7I5nhp z9i*I}Ek-{WSTB&Dcb*|%d+Ml$>$-Ce*~lk_BmeB19j3lWwi@{eI5cGH|9YX5#P*RmqdTlbP{4fB-#Iq3Z81I$7F*zqxZN6sd0C%Jx#FQZ02Wt|jf75uT zZ7tEl_g(+=$pL~~NE)ENMBPtpsbNOED#Irh%(~O+X%ol@bm^^w8sug=M$9H?8qpyK zUe_!vjNz!SG>w!kteh)7TWe{(DS(Itew6DJmXE`;7M1^aOx1gOIc5h0o<%r#2d}#9 zmm=@?#on4A)`LI`j_&YJSq>*@fwinw!RWMD$(TJsl5b6FcxRa6rY89CGIs`|mgRu) zS{t_vS#zRHh}906eX%K6+e?MAnh&)7MhOt}%$!z~nQLfz93b)D0Jr(zy6myu&w%)7 zfa%6pKDI6<=k!m`-QIb)-Yo{+(e0Xb+%$4=D&dGZs`;{5e*8@lFl``Hr%wM zZQHo%#arT4RDViW@)92$H5wQKoqY{HA3hw0Te?Q1eP#cdX3ZN1)o_Psl$!7`0^xA) z2+gd$B==u5AP3$1g)_L*FMa?~K(4C=MEsYl^4PvXxGTCqw!Bp(fY;? zM!(~X+QGsC4MaZQIU37*PGC1tRRbGk4Nzq0Al~Yb8?E>2tchplNUu zANUXycZFx2Cc3LhW+U;)U#Xmi=o}%Ae|lG$4;DY(h}^Rd^&m+X&YTxFR1c?88F&*1 zc%4=7cnKezzx$DXX$V4|->~a`2b%~%P3q5wFM zQ&hLqa8wm-pE=kOT0B)pjxHOho?NwLMh|l4UijBqY^pV^*>n6*ufy@8U74d*I%Y!9aJnk^6O)w=WvJ97fKoA6 zjl>`HV`Ggb8;~!M3lw}pT86m+dZT5(tkB03aY5+-b?~kAJzuB-aBAPxh-s-<*$!@M z)kAVoq%IsB&c+!u?8?YXAf&ZNgaTdKZIm^r*12Af8;Q;sW7Bu^kOj`{V;B?p2MsHC zNg2#JUP~9dQTIPldxuid@$@N{;U}HIG4Uut%>4XvS{ERD8Npz3@f0|Ok}Ht22RA#! zkNOX+*Rqvxsis#^6ci{-f=PO>=}6m~n%Nw})S~OHd!plu_;1c=29|TQzwQ8k6%(}1 zT1{iAI(r+YbaCh*h8J|ug#=GYjYq*?ja^$*;`P+DBB zm4&Zv0ITrB2*!4zQsKaaNnK3#(dz8BZlur)&MZ(c5VmQ6IkW`lw8>tz;@Af+wI@;C zQhetO+V#iXFznZ@he4^0KeGwDBddiyt+<0=e1E70qoKRewKtJo4|1}0q)XN%DVp7= z$?luO@AlL1EmIuF#h0+tw%UwsI#Q$dUY?N9%xX{@ftT;3@ec#AjI+Z?@z~z{R@)Ku z`Zc^~p&LWtSm~Z{U+n-Z?y0Je59z!kBjE_DKeo!AGW63ZeP-~8KC3h>w!yGGDnPU2jJOGO&xaH7^JFE#4*)Qa|=2#m+Ccr#~3Lr2|izcE>C zTNJH$Vzks z>cj*!J~b%gF{nTavo{!=p=>jVr#gH3SjhW2u&|S*u`aLul^n8%`~rjNR_z!7rINaXL}ec!y64yuiuNQD z^_Nx{$Arc8W!;&03!MBgnnVY z$?=_;dKDBu<#{|QO=$ModnSmU8lGyMBm~sYinSpN{e0uaaKf5<1*u`V%ol|x{%y#u zx^*B9K7b#z0F}Sm-KVFEuP_a5W%I9AxiDK^wy7%ED0cNQ9oqGoybzy}=kMeC!+W^A zi{ht=k0UsgyP9?3JEshBUdrx*n@F>>DCxJwt8#$|^zD~7!NW?hzsj$OuCwH<%&1kL zVw#DEnEVah?d(g46Dm~)YK?R`F?9@Z(d@~P=!A8s96@iV=mbQJD;e?{55RQ-*yBcg z^&wX+P|^tv`{@3z{)=()F59aeF=X+?&ef8T7Eu0)@bpfm zBXUF1Cy>8Ati~2$rEY@yTLe$GXcTjO0GdZ>}nC!Y&9?t$RY{5Q{ZW~?4WN=Ox=@OYTb|WIdc6oX2N^29; zyNr-4vLc)U@cg>K7z8PhRtd&lidQ$~-2VA;Fa5UZ1R zLsH#^cqpv|X;>^1K@{1M26>A1V(siRhvW#|Xf|w6f^=>~)!YLcY1mv}$e*8Y=WtU} z)UsA&->ig5i_icdjxW>`zh^hNgSg=YRsOLFx9~zCKvp7Uy4d)AsO+b-dy1&pu5B)# z`7A=isFzrjMbI_axk^tBczmoB7{SoJbUzQUX|AQ$ zNKp(TXcbh|eyVEYFNR*LTu!YbwqdnU<+|wu)=?L~ipU%W# z_AXzJ7rbc-9Du^-46}NYeI1kqUvE455j)n%%y?l|!Bu@i z@h>I`Dm>j^HnIhad%dhn+!9q?R*VA64xZ-Xo>Etf17X8SrSD!Sz02I`KNSOX5VuMg z+Z$TN^s$2ZCp^m8QPRkK%+gBEYcx!a-t|^p>5Q6Wf*S_DtgXPunfnZgnNYC(9b4kq zNI_?ob8F)Y?q$&AO%wYFu^?+G@+G=1(c#|vK2=CNYon`|llm{eSQOwdA4a84lN+HI@F9!bFbIw>yK<*h;GxwRZ z4o)CI$R(fc;hT!;F!ziV)1*Q@4-?2rkaLA1F|SSAaCuUQU;*4y7b4X*`neZl6mxJT zJ^+Vo?AmmReLvd)uELCr9tQ#@&9|#raE*7JOQCt0Y6e%Ege*9MgUboKaIS%q05Ri2 zNx#|}j@I%6e^Kz>Q4*+oZoHdg^NpqW!@eN&#l-(NiR85&c5&(J^+)k=ZrytJn>JMJ zfKwUVKia`;6^89c(=o!;83!6*MYU<1jC(91nm^5Yw+%j4l~TmVz|r0kOe*M*zapE_ zQc8H?M3CfQ2j5fjJWgzkh|zWXMKMGei)SjC^xXGMx9xdtP=?Bfk)~CB4LGcXksPT* zkKa`0m;x}QoU17JtktC;-!cHImTwt0ruJ{2E2<_P{MOt<(vt`8dR}n>eBRyeY|xm3 z95$j=ON*eC8JdlLD7$XRW)xjn_>@HCfnv;fM4hiEOvO-^N^vVzNd7G$w@W=>kl407&>CD1FmUsJ)iF zMQ}e`U2}hx7ro-@+q;yp21Nw)+_rMHvQ(o#@x<`~7EIAK=$@I^ub6$av4Y@A=YNyp zo_e`lU1km%aT0Ifg5>jq5+m!ZCl|XXiD_@Vf)_|~b>Ir+h{HKzpzNzA6#DG+075rz zJI8Sj|J(E&noGWly;*70&fW$^gxvhQ!u`Apw}18nORMwmCx96>M1jj<1=I|@7nzQ+ zbnyoo#|m(MoaY<*WxelTGlznf@G}H;EjkUBriiYJxy6PZ6-`HrGoYjkI-il3hSasc z@w2Xq;<6>xoqpx1}&Du-?_~lMY{Za9PgTR#b zzEhAC4D9AyLobL@91qA8W{aBNQkBv5%C)1@fiA%n**Rq&dGD6F9u@gP2ta*gIangf zU2C*A2Pdi8YWHhM!g{gE)SGDUZJD)qNfU$eypu?@-W?6+akc1P@6`cW@=p%5E=JQ; zv!6}963<$U;E;HKcKi570^ zde-!wpmPdF)svC+=TD+V9@uyWbt6kL_pt?kJ<@rMp+Z|u)ppgOf$>&$Ii;ogyhkq% zanH-+M4nBqi#Xu(%fvpu7E)^f=b7=|mhICf^>VmRXP)uNy1UGZ@qU3)RRPi4C#l4} zv+8NQmg7AnfEN^sejLYgrZXmvRzbROn^seloHKhPK5-{fgQeNqUVRE1;CVt} zm&Rd=`X-f`z87>LEhLS;{gziv!);7_@T&$L}$IzF`Vlh7%Lx@?4X*}5ZpOK0Z-f<_V_$z zo|k|O`C!&*FopaI3pZ=Uh$L1dz-fk_%LD{2)F^;2)1aF<*@Pu}wKH~Fra|TyASewfa zthKIA>v-g_x?4EUxdEojIPFciz zrdTO9?dks==)`?q1G2kF!M_VKl$09!)PnoTkVwg$lKR0G2Smtpwy^D?m%UBXJOZM4 zkJFy#Jcr62QLv!!w|+Td?Y32!i&`zYxy<_JfY5AndF@`u zwd=`oUUXr(a=gqwW9Yf;0}v%yqd0Ux{W=^x?rrKjg|dh*9iU!*d_-G-hpU=A%fUS$ zvwNf#S0j}~AIQ#t+?V?VM*Gd~3Sap}orGJthQHsCSk4vwGU~&*xMm(ImY`nCl?ICg z1%CJEF*jvKBqNP0!x`sxhE#AlsE)C;1t`jGZx6w3cMWw)wCGqYF}yfsY?f&atcm z0ifw?(M*KJoYX2}(d1`RF*pP=rB?nS=RO;iycc4DlBtR2;fNjhmumt2N~%sF_CqqF zw895(zXWK8ESxVkNnulfaW4NiO59;5LE^D}Umfuig5X#w%YUy(tKenPphl3`DwQj@ z5dOo@;X#ZZHNhjiXi|T&j2KDs!1YmJ4gD_=8^7;``1D&ArbA!(vXY|6|0XRR_P__9 zTCmWj_v1EWq>BRMB#Yx-t$F%9w!(;p;n5bWopYN1fV z$OkcQe&z4?QccTG>+AuFgi>v=p99fAf7U=TwM{+nsON`SZ_#ftSlp0V%8YSt z&if=H!}y8$PcW+h4On(aSCw;!A9h6>YKlce$P=LSP8`<{tU?GtK!_vBY;YVvL`gUy z6X;IV`|GnN5j>BC^~~CQWseg`d}@v z!Jmx`GtXAL$>fj2Kx4fJVN|_Vq!G1cJ%in$te{CYY~C|3SYI>JMUB;pnliZDTf~#9 zwXsDE$E`5-zq_|9drVTWtAUjU;CbB~%VU*=cS5~H5+omtq zrrkb-{+C{M2(U;Z{WO*5hv9DVw%Bsp1sspUs~o;XZIn>)xBNIVd*vNVk(6nJc}{HI zm-1!Cl?4m1R(}~rUy^OIu1NyE%18TP0ZGeNuo_-%c83EO_sKARY^-@?dT&UtA9qN?9k2R~ z!O}dklrag7oC+%hr7>fFAw=eiONI=O&AE{UO%UPc;XNGWVLB;x069Kc1OvG37+^og z#wQngtX=JPq0&_r{H}^}H`)h9r_`cw!X=8;;Kq7O52u{9oER0b|71$g>}byqOeO2s z2Sn`po7k{bmCEdh>E$|~mw7G=7r-%ZGmsQbhPk^A-+lvvMEG$IWJ|eSGvvo-resr3 z%%A2*iCHK{#6bh%6NIWz3p z446PT*gn192lfZ zUr;be8?uqz!LLGoFfFV$kA<->p?EI^j~#W?%Mp68fXu=s=IZC?Y$m21&9@K{confw*U? zG-)56QPbkq%J5QR?Uq;2T7U;IzCEFN{fd|)huF}3kYGFbuAD%8FzM($eKkp$=zC-H zygNh>@tDL5bd7tPYNZdDL-$q zj34nF??{jR8|W0h7V;amjy#pDgF!YK2@dwFQAt>a&QPJU+vVih;P|g>nqBxcYRQ~@PP%_4F<1tDhmCs#_&3<;)f+3^chtNYsH z4_#mNHwcNaI{urCktdmXTh0kb0&mcCOZ2s8q=Qk~aBt5WO_|=eY70b|N%qdRYI%v7 z+C>~*h|vPB+yHI)P=Wqhd)H1LdL&9P89HOB$cr>L%Z;X&tZ?^cd!csZG+V)p|6|J* zFDM%PGB22WGnKYo^^mfGg|dMfUi!PhT!XvSd)vmlF9p@k)x$@^(m6WhU*jzn@o@XA z7+1&CGG}!B;)V>CN@~i3X1$)JFI>Qu=m}>eO;A4@M$cn~b`{l=Mbcav`tnywC&Z|w z=kJW$C{xFN%lMf-r7UTC93N-9bEeYX!yvs*D%JUNpr+ShxWzSepJ$cxKv{WX3*7BN zE*XhnM8)taj~@kia0~?OH@d$(b?^+GhNcGBYN7xb!C+w7;rx#VFQ)(--4gu6ZUkG9 zwxeb+zOLwp83j?PX%VryVo)zs+R=T9lgnNy3_i@jhr#67WUPm|(z40@1SUYY-@%dL8oX-_o9xv)WUb_2F8GOE73-8or?0<84flXujF zRP!Qk$*7p!*#LUSg;F#KW618FS=n709a5YFvc<8Ew6jbM@-*}ni+>@P0Mt0;*RNK{ z(Pj;HLCJe>#&{7S%E?>=c2)dIMsHZvV+WO|E5x7^OmOaTu@eK66}!AB_k~ufP)=92 zWO~7eOC$7od07p8T(Tcu$Vsl96LojiAxt&e`Za0Z+xj4hx4VW^X=$DyPc!*j+FZn2 zT+mmofW~-30XXli3V<>E&a{2sYxlN!(5^8(^AZ?J@IR-^+hc3&tqHY#jNZBBI#T5k zrzDkJ(mPSeXFF|I>Y8V`xkEWufqoG|v-Qv$3@4Ht+P}i?d30nt*Mz+J2mmXCH;ThW zz?_j^Fr zt$%QhX^LE|t)b!=W&4D;MIC-MC*5ZV%iaK_jQHamr`LRnHyR#^`o<}s+_@JE`La-A z%~4f;Z}4YJ{ED{Gz(l1Gx_%LBgkM~O#S7tI50fZ~IS%65!?7E1vuovs3K!#8{N`*% z+Rj5!V1QrVRwwxdl1*jLz%fZj4ntS!12|?)>8URUDHO&G2#Ym~#skJcKs=x`VqDn3 z)Y~nf5$h}J<1dr~>AP%g%y1F6;y>K;C1XUy|+MRwy&8i!c zX{C^b4cc@P!BLRdaB^!7sj~4*?F7_IR~0?j=J;ssIp%*bK$un{OK!yTq9FkPMhNGC z^O(JLF7}tjm&I1XK|&wl+0-?YV^pNC-N1CQxvPe5H+inYgy*5mdN`4+N~zYvBp8gI z0$T3GzP6mS=6O-$c5BnCWaOkKc%@UH!D_;u#Xz&WIkUOll|FhKs1K2Ar5&T(%9Sc) z`<)BLIfgl%WkYoDA(O_7%Y5?lU@XAqm(=y18sXLB9vo~v&*LJ6xM zN;mhiid=dbEN1VcJ^d3~?)daslrv|nujQT9mflP~wH>V}@7!cuQxa_RqXZ$@9(IXe zTAE99Gds6^o#!dA89ncH|6;#uOSIn$Lzlj|f}|Ks8LYL~NbHr&vVc@MB{yzr%a(R3 z+vH@$4Ta2a{ht7>>S+rJo28ty-;z^6jRhQl6^3Lx^x}jR4_|`s6O)DzY&!4&PiCEa ziF$>{{#x0If@Oq-IvyL=myGMAi%YXQn$`z~L!jy@0j@N=Z1Ut0$iwWV#o|e+%}=t9 zURh@#@f%z%P`&|c5tB1Mo`5NFQA|ZzxrLJa5u0W_w{Av*Ksp`9rWAImm-UsNbAJTw ziA3=uBhooyjm9rz zl+=IwiQ-eSE|G`0>~@_?I_W4t)$G}buSPF!u>s|EEe8^Z%CCRo-HZHWjn6>> z_MCK>bV!0>ngmK-dSiF+Hd%o-WAy3ahx5FHEMYvo#ojHDTd4A39%NZ|S3da(zGhSH zNPf$*cWke$OHAa|n)I}eP`TKNO;71a6=@22JR%8$S>6uB2&{l~pN-zj)qF-^t;#`J zq~~!Ey`c)nUtvcIx=8*#oPSoyNYSs`bBDF7rlC4Cg;~bXNnk9=3iXat?-#+Oxp5+K zE+q5$Lg$2UW+lqYZ`(wm9(*+AWi6OnJ4w&eYfmttyPed zmv?pi6-J#UM4`uni|5|D$?bv1(YCGgX1dk?Uy3~&<9Fe-?Wg0O>h7t4bfyxfPW5hE zL)1sN!-&cBkFCoQYmCoWDwdfvUZOc1%qt%mLIahDc7QmeS$qa3b<9y~2 z!GdxdJW*5%IB21B@EBw{UldAO9H|AdQhE%hMJ_3Dj7cq=Js`P#xD)^M6qhH<(ix-1 z2Mf2WQVs#{!J@$!u%HzH`-1wGYBW zAP#HEtnYD5b_yI*8V4TR^lTpRAm~3V2q^Oq zf>U3350u1x=XXiJk5;tnKHbi^5$5l-=>WiM`KYx%bbwi0{kRrZqwQw#c89*pm;d2! zNM3Y%s_*fQ-&r?G=W_~3LkiE74!hsd7G%m^$MOa;Ckr}eGZK^vnQia6O2$%?qc_-W zB8g@d!cXV6oL@GkEd`@^IskaW`hN>>;^%K!677K}d$Ykq+1g}skLPncMsKJ%kBJT~ z7CFb*GC?RYM5={_{K-jxnmsmnP+Z(GVv%92Jd1`e6gzuIC^6HQ#r05yPN$N#(Flja zJ6f6)c{WrJuupO`iIMIBZMatZ6!Hk)paBj$JHQK623edi@U2k;Ket|Cd{_^ONDKgW z&{b*xa7jJr>6jR67a{Nq=6Fcqa{mfe$S1)YDKrB!E#k_bT$JkRSB5mk_D8rPbqMP;za=a(s5(_siHcOY%a z0lefBbgPP`wK@*gySa{W@|MX*kj#=FOq59|v0dW5js26Hkmo^8h&5j+-vZIiZP162 z)X^l7I6e8(e4vf3oaPKu$za^2P-7cK<7oXY*i;;ROJa>ryzZX zWa-A91Q?850-Dog&bVu%H{jf4Q;sbm9f4Z2v^jrJ$tTQa@f!C^9wCazySOVW4&$9B zA=U?fXCWLXC{V~t1g!3m`NvR2?Y!g8QsH#7 zW=A2>tsITZwMYi5qf~UC{k4C$e8-SOuUfC~Wvx5rq?lLW16D%D`4x_8iD41+&TXu^ zkLXIutKM(XFe1(F@Fd}ef=)jzWN&o(Ot!^~CdIV@W@T?yo{b@i)0Y1L=Kz0z0t=LT z8Uxsra-VkqQP;zPE=KV22&7(;1Ixmi7L0?Ax#v#;rj6z>@M1!+wJEr6{3<1HVt5)6oWsWyaDI1M20ec z?IuAuoHJ*d*9K8z*Xhh@Xw4DZUT#B1=4`bPQ))mr34!txQ!_QvoU-YxEE6Z2o5 zsH_(qIaN%x2}984_}G_Pm|!Z+Df zcM#G|=R4`|a^M%3zrP3lDh%a8(~e2o8H`RsnT;a2HU~f11G4q9Ks8ocr!!=P)no0d za$6c17c^D-jAZ8Nb)gGMv6&&Kf{;?@bQ4}?XBc+s40bFl#z z)`#;_SLV8FGGogkS8uF9jo{S)F-F#x5)bNmM3^+wSM+jB*CQiswy zy(p11TsC3>TR@X-meGE73@CV0Iw3pV@ho4I4^_F@HkgUYk7xatNd>iGRc?Hy%%h>M zy_RTTt+i`3cczLxT!!rckbLrh3)}05>&Yg;A&?&M*N+MzdyjdWHK`}1~H|R z6a#t{Ut=!(!tr*1^>N_%OnmX4JQN2CaD4*qB8E&qjxq?miwE8sCVNsXAoQsy9D8EZ z2lbF5S#u>Uy4bJ$4UGrlL#XSb7W`FUn(x*ptE=0>Sak!e602x4zp>r) z@WT|<}!JRU{NLSWlr8jd(sh_c-wR)LTwe#;3<03?2SGcY||vB1u8yiAA{qq1|ON3z8(-74mVABC4I%R z*`~%_ULXq!R29=!Ct7#xEd&-|*AK~2+u`O~VP=eQ8Y2h`yz?mV>v0z*{7) z5Nfq#J!A{;ypMpUi;Wf4CGd ztV%8abFYHtEAY=vkYh5bp~3?{flq1`ktDawbuv8U;88(w`--4p(N#QW`4hdu>M7Yi zuDi=&-r*~YB;Vy$VWm$Z&Wm#CwG(1E;i$~)W+@9-r}BxcpST2W!(Ak_rpGxn2{SVjo{+G3KAkEu(KOsyEO2 zS4qayp_S*-Il)!*jT^y=U>d=OY6Zkcp=NPKYYE#T(|1Q#5)&K#mcN<`Tb?~5Hr6{` zA|LD--EPAMTZ;m9fs}+Lb70l-sjMO~uA3af%{$#}D`a;0-tMy>TqQtjiy1{hu*FFt z(>tU7(ZODTr|=crbqH1rduFH4@7Wa=qfs6gi-1PmR%VQ%UqjDbKu{FMwtW39Xs4Uo zh6-(1oPM*91$#dt`23B%*xU8mB)O!(5+&I11TWbiWV+iSqsA0QgLcR>QqtSgOI&Sz z6o^DiMBTjwCY~1yj|ix7{jBl6>0g4dbV+^0alE!u`xI;`3rW{*f%%$F_ji>FzaIZ^ z9FVrto$_1*%}%F)|A`Z@HgbbZh#W?t!Jy*wdKv3+`;QgQRXqolNV{z0M@u@ukf;UU zh+D|jCU?A~sHbyg@|`2Q)WsaG`Zwy3GwoSpuSu=WpAhxm&pv~N9reyHU!yH_Y8xmM8SIws= z%kaoj_nC0k6SWYyB7C%ClYG*gqxKyNCLR~WxapJR>H1S3rd?i5r-(UD>FUF(OUj9E zc2^N)88!z$2;Gum#D4P2H9r+65|etFo1)~C?a!I0MvIdS6+x2uQgCm0ie!jE^ZXbqkL z+(fP0O}u{A+jeyHv+v(KnX6l=CyyoKLYHm>_qoi&b%=>D?Yp4|fN%zZk`^2@GlL?>UFG)QQ6-f84Z*zAf59tF5948I`V;Jo)Zz`wTmdODO-0G(h|Q)+hQb+`ii!Wl`gTMq}y^dU8N&q@W{1?;*-+Ji(~-LK zNn~3?tG5jKb$%Yg9IrE4*mb3VGg``*)|x|4Ljjt2Te*XV{7rHW^B?(=y}T65OiiH6 zB(!@~Fa2ZCO!@a)_trJQyReNvMpIMIIx1o<7uNum3triTqh5L`SaDWlrpWgpSpJ=~ zH4W(;ToQQ28{2luHFEqal)TpBGHTWY9l^zBG%|&A@j6JR79nSYuG0*uVw*}c-{iXe z{e@OtmQNTW@Jz(!bm|yP0A**>o__+1hu#bmFU%)+D;dDrEr=H8=pbIka7yw6DEulL zTno`86uo>p7vTlaquR2xpk2cJz9ZD>^h|JvBUGm>_%ra53Pxp5vXp5Ly=@#BlwoT> zP(kY4+c~j|r!ojNoaCSyPR;W$ITBUjY!Dk{YQ(*Z3% zOvLp%IMnlKhyj7mzNWY@>RhmI{WRjM6LVA(Hy!n;uwoXMwD=sVQFlgS7O2{HKUP#C z%pF`R1P>apZA{D8Cd~Vq$R%%~3I%leGM?k?7$$|AFa_X}&1zvB49v0;@K-GpD;I6} zv$wSQSR9cdInp)ngwalIhu8;`Ad`&xu!40aiS%#f$BhQNmSs`~qEc7Y&)+WBVwQDWIr{;iqZWebuBcE+a;7a-&m(DUBpfA!UO z2hBUtK0=v*3gaxs$_azVb@qSo%xd~2P!yL67&ah^B{ifUw)YZfUO=ue3UG&JPUAs*a%WhHA^a=>>EL-I}8Xz~Sxow4gJ&x`L-8 zEeED^z4mD!zxgl_n~w6IvO4kRglGw($%c_q;b?|Y45YMG>ml6L20%R6B1jnhX!{$E z)6junkmI12zPoa3%%6SU_THzJ64sWE$icET|FVAHh9UD`5I;q3To}PrKS1O}7aMIk z-mRv;-ekO*7U#k4RpwNw>#vJQ(^kmdhE(n?;N!rtv+vq^;}&~@H%_ZUYzz)lUpgP~ z7$^1S^Gk@Hmy>^RDLIb?|L+gt5O@u!SpPYj9|9raZ3_3*R25zk1h0wKQHNdm?`9Qf zE$OIse&WNM^a&khky)2py6w2A^6iMtSb;x>|Fvkw`)E&XDcPJb7x}H6wrCJs;0t-& z3bxk}3&3ush(Yxb*?d3?)Ux3u!_ULWP>pQOFxC>6@JK+Hpe?fg=hs4?1|V3uYyRoi zy8|2U+mUA8{#h!4(|Y=?koW=T=vb5h(2_l%oUL3n0yhnbDLBRTr{86LLm)P9dzz1= zUpgKGy|e*Tubco`())jyEkB9mZPGz^RhdAq@PzVVU@lSyldcsehGdGHaEmTjGG$M) z<{hg;eajt;Tqa}XA`H|(zaK`^+(ccrWiY@Aw;06YYXxjFi1>I(-+Qr5%4(O}Z%rJ= z4b{#@;e?13vE)j2>Y*%8h)^T|7ETRoQI5wpfw~v}D<|OnqDd{F7L$^HsYkKHgjLw@ z367eeMd+_T46~VzUulX6ul@o)+dEy42!4V5buTIY za|mQy1voxzY|ud`UY1YyMKk@~y`Kke$K;P_@|ibdXP$U-cYma$&*H|Bn{Se|F%b-B z)E{+Nfd~SDaJ$%Ge8yH)Mr*)=2U%nf*2V>YsK=-rz^srRy+CkQ8kT5Nf@gHW1*xcW z)IX6;m*z1~f1(>A5d|Wy{U!n7G1Yau_jT%aFWMpX@zqos)+^pIpvt%QV9hoLO_QiF zu2m(;?BkW*$wblDaFetuV|XMwzNS`BtMiYe#q&Tod5Agi{k8`1N6HHqTohc zDI?3q>02@k=%qxb<8S9AvuFUvA>c#bqi`!TjRFR)sTJewQThtM*Q^9@9m8gpQCaZf z^)bwj-GEwM%G2^KXG#z(lkgPGng@o<1P0s%OUtmC@IN6W=uP`*T%W~@@wX>x>et05 zS?5GNeqK`uzKqVN`o@cs8RH{5a{6qL?2XG7PE&q-E?j9^dIMg)kHxmi(BTI!q&am31gA7usaMnc-)t=B@9!Yn z8>u$1#jBF`K_?Ty zyvtwnt+PSglDTu(#P|R}wy+8yzQ55w9WXh4@|F7Neiw4V6lD!iIC4T>h}c`ZmajGq z+9n#PejI+6Sf(99Effa|$5Hp-+v1W+ISGUcJ^G(5aP;i1a4XUTJA0Uv#%(=8r6{5% zq#ZyLbxu6PiDD>o^@D(;p71?a`KZJ3d<{Usfd}XY7ZvvQ%8>f2J#$L z#+RdaF1hL&Dt%`kpj<;y2q@eBQBcc;4C|Ns%9A?UvaGz0-hT)Lj6U!S2NLd>x9UPm zYf?>#AjCxYVuUmt0u1Vsceu}tikr$D#|ennro_n!)-HQZ;;zfH{?;%@r3|iEh*iT* za9*@n<8}oEP{s63>0Sd&Fr+CeJh6O_Lgj5tv;o|zK(I<=IsAVUYQ=}Q16=eylY7pR zCh(k<%B{Jr?Eox-Dt?W(q33d`qdOANAmwogMMLT!Xi&5jNIhWlaVIrod|f231EPwI z)v(04b2x1ZZKJ@_iv>%}aOdy0O{xQ8)kO2>eBgl%k#x$QD#t6k+alb|(h6~VI*1Qg zup>1N&HZfG8)b+J7*)+;6h3tQ&X(_J0CI^l+ciP?>Bw3~{!FZsA+0y6>2t1gTX@NT zm`;0r3iqLeA4Px2>@+kRaE#AevUTHUt&C3RWdg9ojZ;9 zR$O`Xj3W_XvI|m`vjsDP90^tD@Cc}YxKVj#{~EYRm2rIkq*F6oLt8Nv=hPV~Es4KC zOUVg7{BWOMF+FJ}HHiOwcqw8DbVVXn>hNmq!68ZJ%5>%6(7Rz8S<+fc1T*)zM!WPR zuE@yJA&a?J)J_z7S}HoP@=FUBoVQ$vodX2l3yEE})TPBI|e0lkJV(A3QZ4r95@?N%Vq~ zVaB*n?e4b146+Ww6|hn9t$vfgb^>nA^uJEe4D*^*jP@5_Q1?`jWeze#Hi=M@EDBt$ zWsSs7vXab-r~P>^cTE8zaHRhS)!1ohm~1oKu|;NPQnf)baeG*(Q3TC+dIv8ff1`6^ zgkensgq}ZAcau44dHOFhcRH0P+D%kA=Uw5ZZlpX(7MTxS5lZTc*(f#bD8M&F*6`8W z+h7%ToDIr5Y#k@W--@UT@G+yd^?tL#cWJu~?Jaz3!d_fPC29B}I^Z^cPJ`#Fgyk6u z$1imLZ32_fHCu&$U$84rNTvoe!zlgch?!9_-MI_u7+-IYy(dh8efTKRhN}rCSA2-@ z*{=AMh3xKD( zd4rtb(*9F9SL4OF@TKLl8_?H@VwagsrH!Jj#==9Wa37xz6siZPO!2qK1p-kw1MQLN)E3b<-8Z(=qYk(WRGgqF@4=# z%4mbx61UVOy?q7YhWT1H9b{>H+#6we51+;zUL{7e6vT>AU?*Fx`iW#yZi1xQGU z;*M*}<`mON^WRkXPc1bLuW)*`oNDY=Q!rTIEls=y*Gx!*sZQyl0Kt^WE9bJjOcSbN zh{anZ4}2=XP+*153tFH+T-p(&8BJ+p`{7EDmAz=BqS5AG%US3(+FY0J8^zotwNPc><^0|$8;XViU$RbuG*7hw%2dEI%Uak(JaAxA-+bp&Ax^6mB40G} za_BS;T>RYEjrZYb{P9S#){p@9X$y)!&hQCvNKjmx&6~Ki?cIUerXlg<^u8BYTuW9a zdPb|OTyBs3f(eORP}+G2@Gz@9?QgAL*VXoV1$np_$tZpXlhX}9zKc-mIwxxLKD)$7 zV&rx?;S*`?gKkw(08wnS{x8E_=yj(GOqiJ8JK%AsW;yvIRlm$();%Dg)mH*u_nxxW zfE4sjxH@X|J*MI(&MMOJr6ZF8YY(%JPi6n54fYQV{wMmh(2zb2&%NRG@sg<14-D*U5kCQ))Zr6EWr4p$-3M(NjV!ZfZ5*D!yh z?AFI(nWGc^pJZ4znttCO63gwD?p)Pp;9F;NL3~qg2Uj738T6%(uT7_AP;5p<FZ2F}(7GvSANGG;$1`|bEU}{^%$r#8&7_hQNW!La>N{!w zQ{uMg`)*KVEd#1&fc`g3saE%+9WioN0Q)Mn`K9sidqIy-BfBe_HMGn)O6KkFSnYWS zuRygfUaV?oZ3Q9CcgbfUjn`pWQ4KZ?^X8kbfk}gyX%|x80zY$5oW@d-Oni9EBLF-n z^$47iy50k$LAnQ1IaiH@RB&f%7npu^*a$>0U`B*u06skApl$Cr+NCWLbOGrl-)R%0 zrAsdL6W+dXXjysakfz#-hjQu5hLng<)*e@k=Tdn^_3%3-d9_=E3!hk?6^->?I;cgQ zRvbtxZ+RH%#2;H+O%0)!=RCy)RkdYu$A{tW#{KEEG z+!@`#0A)a$za_m{0KxO5GTF{9PA91c#6a&`?Ywx|23?rC8$|L?(v&S>64iq_Ra9~I z?&AOJ;OzhAmB8oZ{H@I&=6%FQ;Oj^PwpPr9K+N9{d@K6}tk8>ie~59Pu@+7{7;4I^ zIf<6>@r|?$2!3X;^7+rEx4td~rp#aQ@mE-w)QCl`!kD19=zajZhoed{29g|OrMm2l zwKI~pfoZsC8j5P9>NRym$Y^DTmPcPXL;L{|J;K^0n^Z7TMi5polHwL}4or$1%W^7Aqc6iP#IxxHW;!<5Kcs7l#6B;;izi2;WDVF%0Hwj2YCg8OQlE1! zeBdQ1tgtE2lR&~Q-UkVWAIor74LV6eJu>=82G5Kmu(a%)@3f`W!Qs|s$hVi~?xoEM zwk`In*>Di6kbdXJo8T9f`{AtdCUS$T7v!MxxS_0$q zU{i%ro?5*qj3oMIhQv)POQ!3Eoc%9mHpQ!J_A(KAF@0AoU^$Q&rdB{MIb%|tBFFCk z5qz>Il_=4}D2Nv#U!_D0NX2d)3e$xPSz0>IMHl*gS?EwlK&}~HBmayfl=gZWDMmcK zaLxq-9K-4Sc1O_Nyz>0EWn3(Sb5trWn6CAi*_Gkab}Dw71p%{wg7)lA@QpX5)kl~-SvyKCWE=wI5UM?cp__W~$ zBy-Ya#enal_GfX}iM$eh2T7ribqU}Qp>0qHZ3mk)-B5j&k02yeYR0i8YP6kjKGY_E z6x1mP@G|AH?f0fprIn8mE3(QC^J2oLAAb!JIpB}pc;n1F7do(C89Kvg?rQ@F9$~vv zq#Q%~5cxi^0P`s0P2XcN5TLQk)h_5=uKXh6#HgS*0+yUV(^X!Idm7rKb}SU4b5T$6 zg#pnBV$iYT-}D&ElG@EkICP0@>|e-&@OorlTKX^&;DJMhLY$E?UK-E0>t-bUflgj3 zZ*GKgKRovzE28o9%DDmz%~^Dzql=@#a+xX?*7H^ILbg+#+P7XgyGuf}+8H+xH~pKP z0-G->%iIVPwRYXpITd=3-VC#T?TrdN2P=aV_0C;Zh_^VPAFmKKkCWRjdmP2($3==% z+X6@49XleDI?j|i?!u;M2=w+;x8ZI9fmjC)<8saeELwAE~|@A_f# z$1QV8CwrDK(i<7vNMhuo!AFg@fdXEUJS1xFj>{FgxP;HYL$}Bu&9GTrJVS+qXg{fj zMV)F*Hj+FQbnP|OH1b{a^k9;Z`sZ%%N95lWaE5V`nUpngTJhoNlFPFu0vzjZ9rT28 za1mI*vQeMA633V#h@5dyoB1i_DmEcPhFiq0r@!L$nIH2HY9omsm3)9swbZeItRyIl ztJ%(BQ3^Sp-`b7dcInBc1$0LCK=v4}Ozz~31Da2zuPdu_)j3{c9$ypTSiim-Ef_R6 zg~45@NsPz$^37A{Jof^`muEjc3wRWoZRhb#XyaMWKPna?Csf#+Mk7tg8e3c_ErUpO zRpa10%1be%#>%DC&<47&ATuH=j=-x=w(>s+7a6v6XRdX3_=)x-kKc>qcfJ2!1igOo zlILMME1nB6MWM_%;L2DPITd&^h26y2q5Arpj9N4FZb!G;^YXmD<(jE+aqfJx&4d^B z9Uf6oR!C|TRcU$#z07$9PeXVx~Z_*j34>yr_N+N zbRTvi0wra%Fhry{{_dzLwFR0d|8CZdhSd=#_+B-kf@^DD#GVUwLjryX7vPIUBPH?` zg+#{Kh(N(_CGZRU-w$_!e0CgL%@+|R`S}AYX0$(ybQWCI^u8GZCqd+spzEq?#}a(? z$IsD%X9lYXrHm_iCbhQo*xb%hTIbPB=&CsSgZ-D*v}@vRUEOObc$E00SSuB!(uucK zAdd)?lfyY<4mB1!Rld9BasX)$IDW$2%v8sJmwKuyZ7o23u{}41R;-!A<|hqwTu^x+ z*3NynBGwZ`f3!21<2vU{15i%?MJ?{nMvjG(khjA@w@*Y4yvSzhau4~Z-hjYe=LK%EO#=- z%Bi_xsF(m^L|sRzk-pg?Ivh0h3misc6R(^}hM7`k>UF@tG-}yexV=!a{x)d~QTtL? zixbLSC3*-r={1gU9DVB&AjD*(VS_jjbM9pPpFP}hz^Lpe>|2sBQ z4p>`c{Qhn?X=~-Kw`cC%b+rKtYhWk^!~(Oj6xq-2YAPfQBQ$Zy5bcH=wf=}W4e*(h zpADF~d8>5&rvRefYlrVYA{FBbKc@}i(ktLngwE|lnExzzU+ITZ zOBFyF@p)3{ViwIkp!9s3*42{s^W1b>xR#A@31ranY^X9mam-W=1#6V$nq);dF3Kmt z)hdCoSBM3O_dFxH350U7C@60c^2U1l7UanTHPT`+NlPqj^= zsGVu;kKxN_WH05m*%^tGLR@6axVV_x4=TIGW1%=;#H=-nUeFVs=xKh{b>T25fFAma zJ*ynPgzKJYES)@GiRggtKxmC@DDB+c?j`q43lOD~1l9(upCdQyL|nX5taxrEMq4?E z>RTJ^lAjbV~geVe4B|X1Bw8D>oGrcc{dH7eOyOd)SC?n6`3-pv&H4tO=*rb z^CAw{E0dB!=Y{nWviFW^xX%ltlhIwgC;JV!gcp-9fmyN)UB_$ChO+rnN`s7!Xm0aU zXfK95#lbAT7s96vFrH2J_zxFPGpO8KlTt|-4>#`57EfF?9#UeT}_OBK{fzhe{;^uVx6a2)B zrhiXlmMq=YEg`#iK@f!{Az-1qe@ZjZ?p!Wg1IbML7C!Q06uw!pt7rTn2zSAt8Y|usJ?mle-YU)Q;5_(^m@E1< zUBwxLlevi&uSS&Li{EfoKmqI=n1NPATgpZ|;0VgES&TVi1&1$MNbgJ1-~Q0>ktcs( zzg%@t3OV$vfw=i`YgLM*)ueFD93pkXBF?4Ln`hoVAeJ?m<&&Zd?%H-Ra{UY)DwpN= z9ApFA!NlQf-|0-O5@+W)x#34Th><|XZ1K}Gf@?*UN59vN$Es^J5EW8If`%B+feSt6 zHjz52KIfLGNA(pbP3}<=SHrPi%WyglPim_&-ovrqk3(2*MB8&+9`VID`dmftc)OXp z?Dn}p?_v1G0tG1s=ZBo8!Ki7T#l$@XF3HqdLH^iVJissgfSB`V2Rc5=VKYj@2Fa@M zu!ve-Ud-2tlw`3=ad{`K&znPXZz}FR(B{s{V%R|S%uXR6A`NYL4*Z7WBUi+4tAjsM z=dAcX)wKh~D=DZMU3^*gZ>ubI$(V#N-S1qask<{_JRU#=IZ4^fd5Wt9CPnNao4-Hn zbOv&}IUcCL(JSXy9!a=ajy~+-EKLRGmvJ#BtlLZ+e-;AG$x07z_E~9m zs+Vdn3($A5$tiG|(Ey3b4K5{L8Pi0pt-P;z+HDSewV~MMzD?n>r(FTXiHoag|9_`8 zbL3wn7V+crvw7IDF3eq?X|JMi zAjbO?T{r7ESph0mbbWmFKi#WMlsm00000009A?6hz;lG)A0g z&6;tdLt^QJ+7Z<*l_&uwG{o07F(23RaUM}Hh!Jl5-xD))r`~l|Kj}ag$*J6{P0&@b zQV`kiL)Xsd&bfU#Q5YU3Az>VLV*^#5c8FR8OSLVsvFS8cN%IL@P=%oxV?!6JEP@MD zead{RTA=@H@7c1 zQ)Szi%sUCzGCGNW^2Oc4RZ)$jpuL25iylrZu=6;S1u%e_%%#AD>t{Aq6!Oe;nEcou zJA;B3&;%ZO@`_6ovP7|HWa6PutZtN!t=kmtnvNGJH<$I7w#|bugh*^}spFQ6vY0)I zzNvJNVjP>`iW}y-Jqr50KJwkW=2sDjnfk`eHD;XM;I)+H*BZDlLZgPh#wRxr54uuR z^BTn*Yrbg97JW{k$fK3*=+Yge>uGR0_} zT8dE564CNMwk|br5RwB1%5SB0aV5$`%f)m+RVI>H;Nk&6HtSr4TH z7&q6!wW&5Lx3pGygNpzyD#S~F92)?QTZVyd z*kL!FegWiXa1U!>1CgGX^=Rpt z3?&76YS3xyu)p`wsf^r+Uw9TBu|M9-jcFNojIvz*y~i;K4IPNHVKiqjZx&xk6L`uZ zHw^Yd;U9aFy>aKbD9q7Rr(-`?)Lu&cnq=g>Y~^X(lD#F<1vl`k99fRC=D4d?LaK97~Wp3@nJ{a7EptjydQP;6F(G40z3 z1Rr&vOcTlYG~s_aUVvG zRv*rXu4TSU;m`65?SX046@Uf9s8@&Ml!SA999Sj8#O>${*$N-nYnnkvo&Prgf2JLW zlD8*}f7?fe-1$ugpM#9+pTD6Zo03X>vx85WORMs`$=9X51U8VEwqovh6fHN5-UsSd zVqEnJl&Ts;-LIL3V)ui}^L1!v*wR9-aNtIlCq~XMvpmaO}1bYo~}$I?s~|1EXa*ba&p!K1xyJrdbyw)(TeNSLg3DN9+?Pf(;sc4ogH zp6Sn|T@!gHD~baW$G9#gf3|u6z;}3e`N+2a8dKA$D{%y#Or+mROuB#IQ$zWzaLfXsZ2WyG7r{htFzOU~S@m)oihGF>$GV%|#RSO8;Il-N#!g6ch`xALg5 zLh8q;D%<**4SZ^FJRj777cL>zV?{MWH)w5*;veg=eK*R-%w3AMTtRn!++swQiXk?? zGRz@NH5jc~igiAsjRz{~*;v99p$|EGG2geI|r_@HlSiu%RXNbi@p{rqW+Uon(x z$}qh4Pv!leB5|2+QpSmAm~Par0t4G)i)~YQH-!AT06k&&5t^KI2^yPF*Nt#)(rbn% z*62Dxr@Box>N5y&11ccI%v8`2R`*R-(-8Jr=U`WC1)T1=GTgXD2W$!e*VdS{#L|le zg`$fddPb?94DFo)6w1;&V%6p+z!K%Gew$28U|O;}ZtW!g?5^trF>l=~#agW6W+4Oz zld$foo?M-Ih_U$z6HPbfV{+fCitbGdJ8|R={4Bf$Ey(`}Q?}}|1Pc7g5WJ|hCKjm* zE~8reVa75pp>9qr1zBTMFb$Dgwpzr3$7{As2qtsycozaO@@dy@&KBdb2XaTh3vse6 z1P7@reSSt0!Vy!`xNim!MvOJQlYJ2j1QWp4pvCDS)0l5b`_}-vO3AUCnh@s`QAepr znHTOg8saE#X^3r$&m2#`n#l|$C;i1#`ILc!%}LwUc`p`B$u8)6o>^^&tdPkcZ{svtraHsE`ktj;Z={+tb-w zEJ$Vv+$q{O1l!2tg=Y#Wgl}SdVC%<98L(G~=IL}jCEkv5e9}68@K-B$DDQ0JjyH#BuX=ZvYm=H4Is?KOr;BMsW2 z8@RaMlWAsXo17@xf4umQAI_~-s()_Pi?A01SXji~eyq=?e@o%M&xM6JsQEaVG=iA* za&m0BlCz(tLLHd8gHzL^U4?s)SIHG)CUTKx8&W7G z#Z|LS0Jds14&?QP$&6#Q-}5` zUT?sdZk1qd2`%VnQlNLEEw-voPYfqepcv1KEp%JYcdAfV5*ltKED?iC#dMa zw6|OAZ-`<-z<(fO!XSZj7$*xw5C%o}@|4oaQ*VILP3Mh+Xdf1hcW0>?M?%jVMwzNa>=l5Z6pYr!MsgHmCM?4&=G9$8?boANRvK(g|Jh>xHKDL&WVl6B(PBFk zF&BNh&6@&&)ig2v!}1Vmd{&BaAex}hy+ZE2iPLeM6$am_&nXlTO@$#PUY88^3m18#VTCbPm*g? zFZ#f+M8lkpgj)>LWK?j;G5#jpb7!wU!<(K+4|_&=4>X~c96LO5gnkc1HbMTbrI2Gy z>6-fz$P+7aLP(=^vw(|7$fwTEfE8z*xe02gh5pvrcq9bxGcnyQPB+%drEcLHBmy)^ z2D9$Ys7BxuWmTg-GLW)GcZT|$s(>$oL=a-R0RB;V46|rKFL0`$&aN%E)&NIo$P?MJ zx~XZDR_!M_Z#_YQ`BqlS0zKf&<~-zc6OtyGnpajm72p!_Lsx=CkAWNJ$y63IgQ^tZ zY3vI04GBtqEaZXT(6UZsbud1c%^Fi(nLGvZBC8=8hr5!<#cS=FFiVEBW6MtlP7ZA5*HSrs)5YspKsG%F4ds#27fI#{*HI*f6|@~lHP+K06zU78E*P5S=&}8h17lT4aQ({ajPd^fhR+?>; zPu@@Nw003a>gPzIL1vQl6}3b?yUWHF*G>fX_6+SSzy4ZRaDxw|35`F9zm1>4sb*^m z6{*d9BR2>%BVPxa$dD^LRM9jO@)@-B3j8qXSOkDSqg_{<(L*L|>B08V!XTpq44cIa zwl9{hi8ly^K#62ww*ZB!(bp3nE3a63NG(Bq&L zA9W?}gkDsPow8lG>0auQzAeA&6M17L zWf0{#`?AhtViHZv;*|f9TNkkXXrfYHm+2AeTAM0j!mBXa&JEq2Xk%i7eQhM$4njfARj@94RFvE}JY7ISUq4F3PVHLUYB+Jw-?gzQMM{6Du zzIEP(nZdX>V?E{82@%@I$8&mn!z`=O40=7UO(P zem&Q(S5q-{xT&Kb3b@bkVI2pIMR0g5Xq6XZvpwO_zZ^drKz;xMq5GS6yFx&|?#n&m z6B72k`Ca3I6mxADw{W4L7lB`Cuu=&g5*U}ns6|Q}2#D)K!HwJqpbPP(U_0&FUu_K~WF(=!6IIwnWb=)llk?J$V zTg%{Mcqe6sV$Anjsq}iWS`W_kpJxd#Z}OXztmN5L{BK}j>wcO=$)9vS-fr}h8L*+NzB5xJ0 z?hsMjTxRurSBRar%pd!(-URZ)T;TIF{2sX z^CoHD&OeS~+@C!XMCHoCuk1xKr$?!lZGPd~!_=CTsfz3MNa4Pw^*_a9G}b-;VYO54 zr+1!atx}*+fdDWtWMi%|0000000BXp7Z_9j^km=bpWv6Dk~HM<68tLo6=N)(Pq;&U zrRCsm$3`G>jzNEv+ zo(xtBx%Q98V|cTAP!5;DC&qh!=5=3D%U(F%XSKa>>{6aJ-v|YVh$S1ju=lF8P)Uq~ zUt5I%4;xlW+d$;=kOrD?EjmtZHCy#B`t&46(M7#VPHKAbc^E(yGZlO(<7+vawSMby z#HzRLYy7exv!bN>S<2zN=-a@ujnn9q7Abg2rv_g7Bal(OD@%<5dhv6g+0AjHPwmK_ z2uUCf*A`xtnuelFEk?%VB(?>fJ7NcL^B~yOvP68H|7%hXfKv&l^KJrPH`l{w3v(#2 z7Tj=gUd^kpkTKE(sYT*pQiae8<%|zYD;6@U=`25m7q;=*A_LhsGqW!wqG$CH@QCw- z;UMJ$Lg6yCaIxRf%JuTb6bibJIdFG{?;&p)v7oepK+dhr>Lw3yd4pYnEK>f|GWJjq zRmd$`j#$6Qr(2RHR2H)UCmRR+ z9@IwVtmggM$c7S2xbnVs^aS(jRlJkdAdnN*gTU;h_ncakvOjZlzf5$osb3H7F95Tn zh0)=`L!Af6B&zfONIc6Bb+Yx{mutO6s7L18M#$Kpes^5*cu#n}VR~a&2;flB(YP!A zY$PW$m=;7=o`)9zNi?83s9I&DgL~EAAn@$_&?4gKfr~&+rr_rgR**Cwpd+L1Mis$SDT<{-dZD{nhRJp^}BfQG-r7pTo6aq z)wR4Uf&XJ4sX%{1#pV=PgdWY0_b)s$!~`Y=WdrW}f;^e^@Lge)A;Q zxoe0ds051_?REDsIQ~{j} z+Sizp9RC_&OTU^aQTseSrUVRg`1kHACqAaS!N`>6=Dyb~M*jZy`f*~tO z>&tm*)*{eB58TV~@6xh?6i23@K5u{^d@Bq)Y7 zac9Z&v%2BY-nuoiYPm0iC(^|E+8IS)__RlzzuMl5)LRIUrxzFz%h@^DAp#IMm{dJ# zq6NEj%k$K0bR|DH<^&>#dJ(vy)F1S+yw_~A(zH0SY ze+U0zva|F?GF~N>*o$IC+ECc$MF1>-sGMOX?@uskP;?k7h_u+<^l#g@1KP$t*t>x5!TIw6TUHXmlfAxSSM{~+?zh(y zyc-4}cIx+MG&C=*1wzZb361)@iUP1g5Tg&gxV1TOny(~Uir*V#-30p0UH9)1aI}^( zq_8^^c`Y3|5yD{TUVze1eKk2;jZjTx>N6hN)d64^o(9!-TPwF;ac+28<^)DBcr@D{ zH^@P2iJ+q;9>-6-_njom0M(91w`pFJk<+?DEdxHDvyn}Rwxdkol~>mP`H6)7Otl`4 z>F_@&;as;P>`t*9wlr)Cn}2W6i(A1x?i2JyrZ?mt#_(Y8xBL?xB0i})rKqABbP|Y0 zSo2w*>x&@(>%`Rsl5;`EZg1Z**M&*w{JuB>iNG+)ByV0Y0w>A{L(8-r00xj&nvk^4 z%7BB4A)%5)53v05D-)Utl1@%L94?w9)u%|-zftMa1%HF}($aB;wX*-c>IHpFY{^bc zu5m`OE+su!YgPJowR%LzNrc^^tQF6TTv#H|0$BDAjt4wdJL;^JF``+UpqA8>h2AD< z``HlhAr94#+z4^*oHT-b+Mk+Z%1iTHm}`^@=I8j!0!!SvA&R@yNVAqZ?%rXvjdwkS zl7BI@(+5nKX7SZ=P>*E4gOXC!y9Rr_X&AvX?{v$bIg)ScsT6}t5E>)(oXqE8sOwUu z(L0e?UpG10SI|C6)LJh+>+!h#;QdgS#J%h%rH(t5I-iGQH zA3R3Cy{lgQK9Rmnxtn=v3?K3uQ0x}d5p)LgFh63ms*9q-n_Hzij$8&s^arSC*DGS4 zx$r&XdjnrGGxwx29Y3gkyi{rz46!j$2XB&uY;QBNNa>2sSz)~I+uoqgXUCta*uX%nGi`yGUo?=EFm zgOxHRi@3W~2-kpXrSPlZ&7w@Z+$^rS%-6yroQULweULP+fCl%eT zBhW5EA8NGU^P8K}+{m6ka_Q(f3~@O`F2lENG?%u)mvT&U`Kp_bI>-=^*8-1N;Zk~A zBUA|2%he49QKhkfRe;OYB;Q7gmjjMZGq3@Dj18{YZa0wE!019w zR;vm99S{J34&YNlWHzfDX}xUBQgEbj zSFDX_bV!xR*r3FB#_YRusJUO$e6j_~vC5^(I}`nujL3h22OM7KGLG{QQoTmU8Ng^>@ME)GF{US13`}qJssx1up5!FL5w-5~mzzS#`U3 z03q*6|Eq$$GA1@QCYxR*-p1MELeGU(OBxl&X5(Y`m9))t1?#8}%|Tg*K=ay>g_cGt z#6mqi;4EmXoN;6Z%!WGE#}b0u#bQN58vdqc#FR|pohg)PCnwqgJnuW zxf&eDwAZr4#9*eRm%#7TL^H%QUNPn&{Orb8TaC|*>%fZw!<>?oY0OiaR>GMVlA;5@ zCY&)GbMpm=QYzHt4aPz$nZh#gD)9`{Gv-X?pKCZbT#@{ z_uj>ZD#N0u^$&{?$@uvFPt~i?r4@W)OeS0uoKNBtBcjlHHBHo_6U$@aTu@K=49~6PSDPgQiII+w zR!O{-R~XXe&VLOXqesVOkk*R8FG-&J#*j#vgUEBPWBhTM2A96++Nhq^d1vdZR?Z9b zV1{H0>)x}l;+2HsJ|ew@HTbMu;|9ckwM|r+*4j*+BuB5!F34BlD7LS}Cg-7*moRdt z58?Rzz?}6tr{ahZ&yDHk7famTd1*LfHg`ad-$0UH3x z`TrLdcv;OQ|9rgcQe{|7Sc>}4aXgBAJLzYXxape66vl>Qa@~!d)I-zB_>vt2<&!~u zPqQo`Of5*uM-^EIs4EaL*1cNkjYU)vQMvzBM!fo`Jt+21@S1b#OK)Fzqr&yiv0Ukx zN2pW>KCHK=-tq88p^$>8O$SH^$}~nD^N88XYCfF`h?y*?JfihmfOdQ)g};hn7gw97 zvLEx-_U4M{R&#AXxB1#W2I#C}>v{mfXMBzvxDf7G7SzXP6Q1ym5{y%-nHqzl_8T&k z#UBEY>Vy6upeDqFzAn!}Shhyouw4J;`5MrluFklNh1-dTHmEG6a5T?&<_n%v$wxY7 zk8{x`kRVI!3{EDddw_qnR2&>JB_3XpMU2J#INOl2FoZq|o3b>; zmx=1}!r)y=8XeEJSIwD&$G-yH-OR@US2jHn63`kB1uz$R6wLl^EBf1Bs`Bu6N`IGF z2&?hzy0aoYNasJY-`?aLp%hc&)n|)}bZ?c2DwRe##J@apXkTMesuRH$>*|9xF9~(?iJLaKpu=i$mq8$5xK!e5SPPFBVfE zk4(wN?Xfj-q@a~Tg<7{BI~i$%Os?^PflkeKNY{x2yM5xQaz=IiRrn`vsAwSaC7 z0xqdF4*v9{V_20(5lD-j4pms)hkzi@wRh&Te1ap39!o-XUf|vzCUD_1iAC>OwOlNt zCL}#6u3Jw{!~vhHvt(v3k7X!n+s=~XxNWr7A_%5AWGyiTM(+ORqUsYR19RW1%c^Xi z>QC`mAw%F%qff<6 z@Z*Nk^fcC^kq1a-_%E$_x>;v&fLls?9~$?$IfO>|B>~Y^ z;xM(<_VITJi`%#tV=;=a#txr)09h;&L~3fSR^4ce61SjRESnJufmxt0@r)px6h2bC zL#1o|kMOqYJ~sA2HXukh$8|5L=jKkyPj(eU4=BoM%wx(2Qm-Zz92TMbrNqj0BRE4L zs5p;p3brqnYN!`R@I6j;ij_1&NHhhzZs~&{HEz;L5wHd+8`1N(fBx*8hjr>X?1u$A z>Qs#S`&a77TU~b_tZ{cG&FOBI+!GNcsK80S^twf#DM9o#x2tNOgMYR&dORE3=?zQo zape-qEF^xxpfvIYD;hFgDz~sHDlHe1K&V4JI{yClXJ?7 za34Ed8Hw_1T_;taLUHHX-$WWfcS~Fj6SzpiOPRp%rVEQp70~fTA^eKv`YTx5yNLlk zOQk*)uP07$SLv0?d}@mlz-{5SQtQA(yb2!}uMQafc}BVv=bDkM9RE$f8^6H2{3Og_ zgp<_+7f__%ez@^qS2N;B-zyzUI>}p~gFe4X!uy1X0DsacUOz-z=L;nfqjTs!bG5>& z^kOq|U+0ojM$ALFl@k(dx7BDh6is&lVbuQ9=9P}9&zo<<#Ze}h2=uy7cmho`GivYJ zMm>$E_cH4AW2hUm+DN2zmr;$PANyLo^ansmIR|J>I~K(`{8qwO7GRSXv*aONhn;Oh zXgZA-aw)Fly3T`)4AAOif|!6F`4=NAPAbcFop6d+@n4^E2^7r#8qfN0Xv^G>B|rNjBKM_Q1EE zVY10yvGfTj1YLRv04Z++XYtaYa zfm45;icvemt+1sO;;itXw?m5lByyoyY^|j2m7U?lR&4znkXo;7xDO0nqD`;2`_9xq zC-BlTUx*JT5qwgZ-PY64r~|1hh=#Jrk#U%aZvx-;mwMV562)#yj&zifrB5J*>zWayEdHO$>l}j1|qwbUtO5 zT@*H60QwLPZoL??FiQ7Nj%nq;%h3M$F(Wu^{PqF?n!=@;hWrFRz;-*Cq)|PoQ4g&_ z6U7?G$x0e=$~T%UnQj5&Z_EPp`JuGO@r|w6Bl^*v&VW}#0+@K4DlRspyxUvYcH`ll zLa^LfB5Z$eH|pK9uQkzf+}E&%Gj)>oJdTnwL`CN#V$pcD0ZVPzmsf8F4ivSP-u%n` z_z+ohvn-*AC^0B$n={@n*PJ>K29&PmfV05k8X|Z;B;B%_9w~4F*KPxtmQ!=vZbVhX zjU-W;PsaovGN{^%!yd|^y}+vZmo8JR}J2SnMfIpbk?mueOtk^(mF5#3AXHz&1j8uB*a}2fCmE5{hE>$ zM+mc(#rL9Bz=9GXoN_^kg2EMd>lvv}4ilD-JZQ56W0bfmVOG5@!Zx9223FPwrDif5 zuBjyQW#*C6b7Dbg#F{Jd>m}rXaMI+uLQv^#fZkoAM`VdUqpY6$FuvzKN_$mBhj5?F z^WNNFNrgsxFx!9A^epK-SysTKbWakdNZMh zY8AK1KK}9u+-ck#(3I9!C&+TCEc*y4HnLt4L&ybu^tM`hrG>>e1Cf&Rz_l@`BN;aVzjVDM&{LF5Lj$*jtrlr7Fz&FD>k)k2b(%4N2KPs#7M= zV5uJ^#Uo5NiYD%}z9xlU9y6GL<R`3~CgPwt-X^<>2-ruEi(b($EPe;^!Bdx#pWN#woe-(#C zO3n=9wK#V!6YsI74rcNyUu-HXSECryXVb~)JRg1 zf&iKQYA~b8EsE39N)eGvmC?1jnFDZg+ zpc}EKA2z@=B1AcgfKwmjb{{}A{^XjPZQ&-Eo737yVuyN#7zyG<%3Skew`^GicA+g@ z*N2?RS9{-aLSHJIs*)0Y0|X!vprmYq8>9KtrDZfy7SQ&Mp9{NwV^-p-Rt#_5J4yUc z#-J(8dUl~ER7^t=yEpr+yiZ~g$+c^%HdW~-&uDESc+!FMV@!nLX~NMw)UFG%qyC;H zi7Y!WeB@DrxGoz!YegU2aJ2Rg7ri5cSuG&`W#n?@gLaCR9&XL$!9Zb0``52r)b`jm z`#^K#tU;C>uc2VOBWaH~i{LnIP!3k_GxJ^hnM7tkWuGvh<6AERnN@RQvShOSK{{-l zwdy5kg?O_?CoSmY(d08K&3h5PEmudDo&7+F^>d;T1E0+J9b+~)!UW~UaD8q~D#OrL zd}ZMTPe*!=3DJD`0j;E`dLuDrg_0oj4KK@AW4Ztx5#&7rnx14D^ExEuc@TY9rqg~3 zgX9ivmW70<-7L@*W}9DujHQUzGpk}?7$*C)r*YgzPo4zgGSgB8=1G;%s9aNY?w*nA z@;)-c^`>{(4Nb>KK<^ymPdC%-Mcy^Cp ztuC*tpx(f$L2%!aTf>3|$2A9D&7*-S1|V06E;oo+FR&Yx!lmw~JO88sMoR(fg(=eX zk%%D9h1(xu9D$u&#=TiBk;u;d&cDRd7s(X@h4oC#GlV|#Q0GsbwqMw2`h$J0Nn(6; zhwqji(^8-sC@D)ZWTlE#6oOe5qm|k_8P0N!q8V zk%hYjLnO=H#`V`sRs?Jm>`%#oUI2&1Q@#;T?`R_WOVFl5DVd3|fShVxxecI|-z9jXc*aLp&qu1ugJ6GGskcZfd zVxzev3$JqpN_g=}`6I6f`@+y691g9H=b@}K7zd%_NU2nAR(^L(D3rsu9)FXUs@mn? zu@IEEG9=lf|NFiCx?L+U=)LAxvoAydO@1%yWY8)g;YQ?jD{WZH_I-csdqwSP+A^5k zmr3>#Mo}wKT(;bAgtk13$uX+`9K?GHn2Z{`~UR~G2gJb zq??NDe>u-qi7a*rj(SB6g%lg^yjjG0 zfvF-jt=B)M81TzSGfsF*N^qMH#W$xd6ZyDMH|A~C@oY2NJ?_hC|0~3GlOP6W2z{#5 z-%!UtWkjC%DfUiqZ!1>vpX70~+BD{r)=<84`!6JEg-J=badu-?^cM=+t27|4--$pgWgD4U@WWfA51}l+ za)^rfVne%}QZw{E8*9XT-pr3w4y#X3jq{fpf>WN?W`l-~rdUVH>j_u@ZuJu6(X}yO z0XkUx+~b1g#et)op=^6yp)uhMvR5b%P{rN%M{x)x^I{~jgd%p)X#Ka){ax4K3=7RN zEw+1GtYFY)?4*3X)0SKHWC0tanJJVZnuh!)nChD-pNtIE3Yn%@3e7xSM*3h3{aWa$ zDDz-i6rR?$LkseHTFEhK5s|9Kg3GwY-cT6)+u8v1OyGmlTmJX5?W1)J1XcK9tRUIvZo`q`@#m$P(1o`EV$9UFlz~MnNk8i;(9zv-G5O{Kf4>vabdRD;jV$OP< z(6THUrb(EUA}yg$$uiL2Zg1&!9l?kntYFe%X(97#_>xb$TY^dBYnLl~J2W^! zg``ZO`$bi*gfR@MpP&&)7n!qc7IOb*jX&nCs}I-cs{NIZuRtdCrdSXzP^iSrM5NSJ z%s4*%yqm*u5S4p1`7WXW2?c|DuJ)tXXi=;-m4_iOIS3%N_q{7AUrt%G`^*|LR$F=X zZw#T{!XD;!uUoi9Y?9kXYe;UGl6Ej(w$6_<-P}KzZr?*yo8h_)943l=Z$-Q&K*^BB zb`{K5e9}EIYOR<8LCVRO)q!X-jlKjO1=-opoj8yms(T1pPDt&IV_Fe$#A>GT&(Rzc zqytVdwjuP|e=T=hQ+aq@%TTW>a#C6No7y9Ox6)Y{?T<@yAv?jO7wZ2{OferM4)#WwiXgXg^xG7A@}n8(mDj-Ln_{}}k={V0G=JsHBSl~9Z^9SvhfoZ* zb-3X~hP{7VG(X@iFn1Elmiju=Ko=$W0z?Zm5ZcG*K(jiFLT74`P z`_Lj04jH|2AWThoo{vLy zFptDStabQE$dW;nnV5*v)FW(X#s|rA#mTAdG`lNBM zKp#y11pxYJw+`pKHk23caT22aDl-^iy`#EV8^uG+qSE!V_lzX;0Ck;uJiEVh@h3>b zLwG$;H!{0VR8ro^Y}s1aUq+Bi+)ZNhg3%hnnZk_p z9}%`fClJMs=j0Rsx|V>ps_rQIwMz1Afs$W@>QFIY0H%itK}dKUeO$WkZf(z!Jx4?L zh{NIEyO%%bY_KhCRka6L9B&KwJsaAS-ZBR9ON)Sb=hsf~_y%^YnuS%1mTH1b(6dVm zZe-l$_Bs5qTmb+IEdlL`p*sfX6jLTYuC<_Y>z&^a0he2l6<6)M99dU`xIrC9w0t3O zGoUZDT>@uL+5G~g>CX%}%L-LRL#q4oyA~8r+5MsB8Yr*yWJij5)O0SZ8=uT-``t#$Ou2SnnA>F>qE1!;#-y+7z*XJ;b+bz$~bn)YRZOJB`D zAgUggM8S^n?MPjhfN}eRD)G_%t2LeVs~Go0tr}ho$vsYF3{ITr3ENudv3GMtBkCzq(B7-j`idaZHIH}i;!^_-Wxj_$yJ6%+pui~IuHe%J~i6b zRxo(sF@|Q7IlCskqNZ_plz@IRXJhdIiu-z&z2WRDacI>V8-TJZfi!82w&;u;mCjo@@e}Bp@?Wg4NkwKFpU#eH(9?nhGHQK5EzQEF(T>cWbcH%r zsneK}>>Zq%yF=*xW{$!BzV-v`@KJp@kfQcha6H{z^It7=N)ybqz~6!nNhWx zhKaNRBri?VRG%=qEALMX?*5XUXP*7Jw1TnzNnnilsQ!;NLB67Bk1V2D9l&aM-`Q?ELDVD%UZ2^Of{jT-H zHbBGY&#XqE@Pq2vKdkf?Gwuqi*`Ab5HD&@0PlGTtV|4eDtsOoSA+`3hR&78^b3?Y# zLrrM3k$~I(mp9%ZtUi^wvsU(?Xk7u_A)MV+!7N15x76NJP;kP9XM$m=1RH}N_%8-_ zv`{pOa-qfxn%@kqjX!PRu~7;@c=Zp-U=u8Buk< zNO#nRbL8M(lMl|QC5rilw2i%bW{kotoN$XtBszxc{6;)`#lof8%4@r8Q@t&c92}0^ z`<>Uf3|ZFr5JVW#+(*Nr!A%DPU~+Q>7+w3Dau zo0SeU@(LWo>zMM>%~MfwgHr+_20c7+h`?HAqZ4vC@$5i$G`}r2S&o&do(x-fa|n9Y zf|D`XQM!vevOge3E2rw(C>!A`J#mf=|73t#8K$Ebc>ZkwiK6)VEad|RM+P#s##D_1 z(c}uJUx%)SLd?}2fgxBT+nuA&9AyrscIN-*Mij-VimR-b3}5Jo?`;_P<5#Agda)SN z`1i(^YYp9na(6jlbk4lXq|5I10y|p6LPbrmD>X3^+R_WYd5`$RoU1jM3WR|D#sPD8 z$`tSy6rPRo{O}>nh{3WF?u;L#6DODv3NB(3Tv?V;_-K%!ZzWheFnKKrFImvf}D^9i?M1MghACN#9{(5 z?W09tvWjT;t4@s*aaI}kiN$(TrT-i&OoM?`a_!2}L_oLs{ebHY-M|g)-|!K=r_DFz zxRFV~2~HIRykq(-YgODP(UwWIs&>(Lq*=vlIb26zG@u5xmWRg^rO*jx!m z(>hWRl-pLr!v=gCd9PZMUM_-87_LN_*QqJ z6IkBHNtZ5ao4Qt0P8ck`Z7c+zC;{qD3809gZJ3E%eZCn{X9&lL6OjbvHYfPWnFlAUwwK=$@-U{XW3#I6;8;FllX#?c4X$$ z4fL1I^=8ZGK{dZ@CjB4jo9Qb2D=RU2$g)3 z;lP7s0^912C-mcAvR+vNH8ssL*s=&dHW&!>W!fA_lg8kgIw4JK{p5pKYJntFMSb+K zXf%vTiXXJLcbmla_r`LM*3oPC_U>YXF|ra>XPyA_2dmQP-Ur$CZ?$5liq0_C1ZyEe zBC?nv7i`*HERn%#{5;wmJmUD)z#GnFDKVj02pr6y75iM-G=EOC002NCUCdY2DWmf2 zf)iQP|5%~z7raJqeH9#SefhI6MS@KZ##A#IDARXEgkyb!UYb#@wOw;Kx$;jghs^Q0 zo~MQ9IPWvGHN|GAiVhkH2e;O{X>TgC6V_1*%8ixq-uN{brRmaLV7TN5YQ=u2?zZc- z&PVFlkKZ&a>%SkE8jrhw5`hr0xt{IG_2tQ9x`t3PR)wg+H?l)QS9UEKzBO;*Fpa@< zOSqN-8a;#U`B%=4XUoV#PM?B8*%~q;ItuO5jNLy^P2a`(FZsmMdoFZ_o_w6Z^6d9l z$L;Oi^*tx`TKeSmy(IIP@TwRt3T4xgCk{uWxD>K=PA(Ay>LHOO(wf}dUPr_$4dKzb z7o$F)JaE`XehD^WmRm&hVEylnrAW@|Vdrl#n^L-Al{a_!fNvBZ+c)(ASG)ohJsjCi zU*PkwhXy%+16@F(?sHoc4KIB8_K-LPPVt+y)-t^*uvlH`8&g>nYgX=kb3G<7j0c_8 zszdrYMyn=*f^C>$&|FrSAHr zWr&(_ED6eTU*PN-DHXbs?Q*yQ@jIDxCZ{cUPR z(_8W%XX?cie_UN1xD$dQ`#^*N!->{!GR!Yw;id_|CJ{qf4bks;eHH3K?|T=}2MVO` z%w`*{s*#+>JO?k*&=!cL;iu3#n1KwZquIE)2Eb#`eA7@W^{-I4n{&`VS0Fm)hR_e` z5-er>WmAGp*1HP!SCu5@8nX}GccbolL8tGN6v^%6AqDK4>mFZtyx!-S0+8gYxh-mJtlk?Z@SseCi?XQsR6 zf3{p%tQT*#DQ=zi*EF%HZPU0ABTP6s2W`Qi73VP8!9{=gTkUs(f2dPsWrezl+h z>UMu$;^95{X1{d$u2}JVfBq{fg4)KaIUO;#>xvRlxQpO4X$}{FbvR{hM+N1}^I7c~ zk|^}F+;Wt}XUTK>*!0}NimspYm{g#--;mAtWzbLF8uDlCP?dica_@My%nS&}M`X%m zV$*+MWBHi@+Q{Z%sjm254{w)W(9h^V$!pZy?#-S0!BA|;no7lTa_iggI`8)i{V(eO z3~D)W62aeZRj_RMtn&I*nBrUI3~U1JP^vh3vd{{PR>gRtzafg$UDNs~QC}IU!NvDLTKu^}KOyGla6ittE3j{(hUNNBj>y zwOl>!F$KW6zcPI>!{G5%K^{20PJukddRcVBh?f#u+uCy(*MROWB(z*}05UK$qY=le ziC{})JA1BOVbu;=)uuI0-^5b$&A_v6BEUc7X=|~|w|b@~zbl9-SR*AS+#Y~&F9_qK z%=4@N?A%QG(36P<-qFt+G;60txc4L7aIWE}8j9zIL6GtKrzRenh|uL7_dF1V2w|9?WwmIsr^j0sd?+~UxVhXa)Qv>wUa{#(ATY%pFSZU_+8Vlg!+y-7}`K=Rx5)Hbe;-DmIjbuC{@fx*p5D_x+O)e(RyYP?yd{^#+_FP#*K@ zV5-jAKW^cGNd1g)+G4Af`#6Qv(AzIyv>h)=Bjq{VJ}*v_)! z-}J0MDhy7Bn%Oai^Y+bDaEZR{yr_+PB;Ow@*4cvK7yy&=+sDOOx_Qni)$9XNzS@B= zs3Oo%BIR}`S=*1SqLNqFLcq-8k4wUoyFm@MR$|q(BjW&agZWz7TCIi@=N($*;i`Hm$`1sq(${jP5m2D9b$C(bqn3=`(b+wu2kaX}Rs{MCp@^=kzOC@Xl=k><2hXi@1 zycW0fy?Tx5X#{o-9|&~K?YRXcPK@I*UUnFV&mky9r1XyL6onk6#j-ZEUY$+Tf+p4o zaaVK*-i1lC3I#)SPB>urWA3X2ltGKso}wg`7s+w3`VKqBZto7_@Mp5`>~C+9LBCQK|%J2HJlcpPTV*w?t*7+?F9BH>t}$3X2W_VR}J}H z0`*><@mV9v?xFu`(-)_g4{3XNbGFprZ`?h0s}^qzCE5eS((m$jaSnqB5WP#d0cqu` zi2MX~v4xFZWx|^rIaPC1a5#`q?jf8Cd&6)xHD~8HYdRdixrc(u@~}BWjG9>Z+3--N z(KZ-O4X0B-nOM341|GV{;BRxxm}r|@57-!`1kkX~$D@Jr{uH;OrPgfz%*RJ-9i+kK z6taoArdh8Ot7pwO;&XPQr_qzBiQnzB{khL6PJ?-XkGN-Th@@#wHoFDO2;+l+Uy(An z2aI9^q96J~f?MFtyN2=ElQ1ou!r@!EPB&P?wolt~8=|yN4PmknUNe%vN*J1~+!sp_ z4x^t~L=`U2MUP^6+o|bt8+;W*$i3@+-K7H!irN?%UIuSYT5tk;P)Vcu+P;GeAuTE23te>kwzBmmQq7@H&h zi5lcr8^O;Sax*t73_P-^qg6#r5@mg@c!ojS50DL?_A5WUf928rB0A&;%rG&OWtf*i zIU+nB7z|Tlib~V8{629iVN{=Hf!4LQTsxAXToDz+Y(OX-*5GFWpHw+30zkBf{Y`;8W^hfdd zQh3%ZRZGFN7z`O?LOep(Jyd<&B1N27H3f9cCE5avwtqm(z$KzN^%8rYa_pi=na!85 zo8z`XNKNsNWxcSj6ubWSbwbCdA4FkL>;(uDs`mUudy-gU#E}uL9R?@s!G{HB>cqYR zg;wS7d!U8H^dbmI5!#WlWciW4VhSzb`b#&TUGo+fJsi?QEGa9ZwGbm_SzCK7JytZM zji~E2_i<0xe;}8A4}}zJkF|ft=y#j^cz{?8!B+KygT{b?FuikhjcdLfziTpO64ToQ z1-5WFUl7}CP`Hz~pQ+6jN?1SkVZN^`Xu1;4s=WXk$!IVqf6w-Qp0=bGdS4+ZS3>LJ3FjXG%@eR z2nNZdJmeW*hX_X^l;LDT6~8R%(frNw93cqK7`Ux|5Z_GW+3BG78;%!T(~5=^i|pwH zHbkIlsR)a-$m(tr>Lr%w1tE+Y%Kpv(RO3N)v_dBuvgRSH{!dKqgu;cc8dZ2_Nr?8} zWHeZ&klwA&m$h8S_+Ru7BuiwjVUXWrEl14#j8RPhp?;F@P{)+n&`l#MKJ@YbMXOLQ`KtNc4)e+Um^ zA4W?m-GIWw>Z-M7*2Ykuydf1sD>% zhRnpac*Y1*tz=(3*W(Q1M7yTmE(Xn=_8X@!s=oOgl zb22#kCR!rPDDW%b#NzE~Ge1<3_S~N!1`zt*hmt7R#%J9t;kb8Yy%|L^mhm%GqL~>} zt?|YZYG`}Wj`Pd4VE9w#eatygG1i?_#`S26b57stPs0UW@rj5OTg(Ti^~0Jc~UH$5_@3{r9c*&CfdX?#@zWWgfipE>H8 z&SZCloJLUg3Q*|ek>?SS%-US9xl`pcc_+X$unF7+Uvh$;U-DT&gr}7nuKyQ1>$DP+ z$K|JO6u|WvHC*0<(hlfq_>P9>L6Ced;tMET-xuoLpRFDor*+}rS8=U2;%_SK7X%P5 z{F$&HMPA|1hW40VyrJS7=}9L7;WdG>wny>K@d2BKIcL9Ga=bMdMME}Jn~VwKyAZ!j z`NM5`8@>?>3dItNe6{gVDs~z`th2dNd)sZFV4}cM@FtRG`@vq}y1FT*DnCrY*0{}T z)N^R60M`1ROmk|TPNXa4VpFg<@$c+;HM%Ya`WV+MIHFDETJt=D-==W-=d9D3$o@i# zGk)P$+p*2~A1F)^-IgEDV3UEB2@EpL?a$m;#!w86Fyx4ED%7kJp?J5pr93}M1uubM z(y;-~}0oOFaLo~~Po(zXjX#hw)33P8mNvRC}k&MU>I!w?K!S4G@{Vai* z`NYpWE<&Jgnx_>N1Qtk^nOp)wj#PTp_|B>e0Q}_KZ?Vv0rKCbk}v?6O? zj2nl+J=P78JHZHmdFNF=dP?3xh-Ptl#X4bVm<8_o`h`4?sqB=VRl{Aw z&7or$WO9eFZt-oLbS6LSS=gF?B4!;!OusSm0 zD^W2RzBD&B__#~6o6aHaXj$#itNT{M%Ntea>Z;^ILhW05Z|-RO;T>m-o-~i8E30+y zNJ9LCemAv5#{~Vg^F?ia?WYL(y$Rp~(R)LtjJ%!BH8*E5qAv;OEtl}O5UyoRSti{VVX1lbOjYqQ-Q%p5{M@< zX=E?75?0y~GydZ}|9J#?M`B~5_me*-o8`17MxUvqJ+S_RQ^-FwF237RF3|by_Cq#k zN9id!a~i2<8ZP0x87dAr3(O~WkpUQ;awhfaiZ5?1s@yd+x`8{S=gSGU)MM7#a`n!^ zJ%`^?7fPJ5fToS1a{4y_$LsCX z-#v^k63W}r%NC1_7yn+Fv}{ITbAm1H3#k zhNXv^h$1}0L7?VrqpWt9HI?s>Gy^9uW}NmatJUa@TB(sQsHC%g3FOW2VEA1+X1wuw zGD>^6S2rb9TvyIqK4ijP`kjC7^Ct`lPM8|XoH7JBds>+$3C)YA_Pz*Zx6SFurpcI6 z|8d%y0LOGNx>S@=N(MG~?>bYgek36%k4R8dBa-Es?9e4`WSr54OV6huIx}Th2=i|z z4I!KN6h5oRA&D54!|p{QSsa6-IQd4E%s}NKLO+4(%aiuGP9Rkqz`AfM!F{qqT%1h$ zRE&$TgMb~p64ZhzO`)nb?OVMPllofh0|&Yl(#>01aA&qlfP03toX202NhcNqY%l0+7}86$lI}A_viTZGRv*pxB2T;Ovc7;AwoaX;uZDz>Vd$55g3XCb zC`+_hH#RB+3G%J^_t>5sozB$CaV4_$KZ4kG25W9hN4V!R8tWu#4z5D?!VoQGgtB5O zXzVBy#C{cUag}TJ`^|>1?giIAF$#0(CjX%QB2ot`r4P+!{TF&|e_`=|q(fclNq!FA zVvX|5S(K=G<_yd{I`rOr^;6&CKyG>CnLY#u(_M`rk+YPJAOdBXe#6jvLA#Y6v!x>uDtU(ljqYZlr-H1CAzQov@izh z#DVA2{olqZ?OPaDN2{HJ&ygHJz3sY<;)q#a3PLUW7(~`@_9o&pl1zV-)1CmclS+kb zNeqp-$yoNM3XABF?E0`WAL2%qyBEr?J4Egi#Nd+}6fN}q(x+gaWKAUL@gjiKbG)Ft zS=q3LH@(L(y|87V&zjNbcn|dikd}Ox$$z4C+dP>*XAHD`TQ&>oP-V-qSh3~|SL^&@ z?J9_=rwYh?D2_Zi6+=* zC9Y1}6RBqQfAYJI2HP%&&iAyh>D8$9!NRM52b<;mIhrJRwR}Hcav~ z!!4J3CF-XGfrp-`yF71EXzw!QnHieBI~q-uw(^W80Tpv4i|yvv@g5A5lb%S}vp@pSK-g4anP z)Vz<2% zfaBGKkWn+KPeA&Nwj(;Kic&6Zvb0gupMpu3)8?X5fFjG0K>1zam##!M#XTxa^e`2U z&;Tb4Ab)PhrFb4FUAfaT^sI`zRHa#98I+CURb*-p?TmqNJaelG%?+v1wJF|a`YOrY zop(Zgc<0Aq>0}D(b6r=1Q+g^(NE(U#aa;f8nGO@+O=-y>WWZH z5f)PEhdbVvIbprl;>~c(qrS;B8-@Gk(`Gb0X-u`qQjmBxqZMr`rd6Z++GeA@a}{y* zS?YhjejC%sfZ?nYG=z!?yjm2liZ7(+hMt#1eawlhD{HtCLihfa0D3mF6$DN>uNESQ z4o0j2@A3w&_;0RbCFKfyS8Ucp?K4L5d~xccH|8EBNr=F5lHo2H5Z*rXp1z3>R{YLJ zK&*d*=()P!llWYW5ae;ljU z^hi_=UK5@Iwfb&H0p!_2!`*oWT%@SxgZ#ityRsAh3tWc2Z99SBHMZCh@A-`8!eW)4*H48A7F=S4mx2eZ!{vMec zFnKm^KoEbeNZd8bzMI{s#%#({}fke5unXu1tMFkG%E3I%YvbaJD%8&q)F zMj#(91~_n1(KMv6I)}5h`q<);pQa8F1~dJx@_i{^m%|^C5AnU~XWE-kjd?bLqej){ zbC!8QX#^N!%JL>%Dw_lh!GCmMZO1`Cz_ywc!$sY^d^xN|q6s*$U8YxmM$LmCn8AS~0s3OSK{ zJTFP-=i|(564_gb>#gdLrj_WS7=#&J=Zeos&bK(XwJ2Ai%lPz;MCi>Aicju{qKwS3 z!XvSuyy`k8iv01qn?7f`B^O=;$Q83w=H$-hy)odGrI*JuZyyJ=#nZv+6AZv!3`$$U zQlH4ym#Gt*Rd-t{!Siu-YZkR&UO2EqfhyUQK$z?kDS4EFDj@c`7lmksmO7UTepuVK z#eM&>^Pj6VJuWE2PRan zbKQUaET}9CcJ;@WE^m?mMadE{Hb7g2=wK#t52;2EpHPN{5yWu9in_}&bTs}>k)T#~ zZZiv)Fb0OP*zcF+(k-Z#qq~%fJ7!-QxI(fFzr5@*#i0ifMiagMc5tqf45~UX!K4A# ziT#PLc`}*4oA+(o&mQ!1u8Mm;te9ZKP_K{w2HuvZarRYt$qyP3A z811TpPP_Ahr@R8Y9_VGEYIY3*9&s2?VzQ66;n@4vHri>g;mbxpUnmt zUgN6kpl{6HYG0|G_bsEuwSZZ`Y-|q3^odN`N1xrm!k6*yT+?-@!~w=KR!M{cpq^na z=V&6(HWM|3OT7-fonO1$>*g~3O_y&;IP9-Han)^2-)K3ei})v2tr+d9Ij?-{E9k|Y z4pluda#E?}us38OQIUn$w+cBe%B${(woU_lDHe=)V)&YTztr0KtBuz7t>|Ccf`1F2 zjkP=LjQc?tdz3-S1X6q+RBoU(^xeNk{qJjtc(Q-sroYt1?u;qqLmckM$4!E&jF~6; zX*8S+O&JtxUNNTMeVj?}^ej3J>8&9=peTcmLa6wb1T#8*kv&tlRm6RYK3kx6B-o3w zX|IfAWuqibzij3AEYUF}4uUeMo+-UVNJPJg7s;EPVb0$GPu4@E4k@qdlB;DRmMU?iTZ=*C5cS!TlddHyV0Z4pb7jA~8%%Bpva{ z00h*0sS11^(PHIvjW`%B2Us{2%N{19O5^_1sLRVjc3Pq3Fi=T8H;T0V!2jp6d@S}x=MX1)0z^B1 zEjHI~^udxm<1UsG3z8X2FPWm2ULV7PQ$5_XP8~y@oEg|U**OjqElECaX$($%q&6)? z^|Ylh`A?Sl*~|pyc^h9$-hkuMDrUwHvea&GL}63yH~<=lfdE3IPWM~4CnC$F+vfln z<6I@q1??fW6SVjeC>ZgQA67?IupDudv)oV;l2I!t_HV^EsM&#?Aw8G95-9w2OYS+j0n8@d`P zbK^W$ncijFVP~*9@JZSAxndp&#ooo|50|=oXxoYA)>jTJ3=YLez7e4M zIP7nA9`cM3d2u8s0bW&uALFfT8kiGp0Bq{?t;T`AmBUVMBhpWBAYMkJIxQUVm&1e8 zozL%5q9fdmx~fUJU$e=_1|7(lxqYJqTWfQCn9`ePtYfkG-($6KIywbb1QK8w?oDB_T&N!vh6?UZ)gF<-$SW-r>X##*#pQNoiyi zJZQO?Y`g|e^cl|xXXAA~sABON7Bd>grkA|b=akz;LM}kwmMc&uwQGg>q7g=%aAW9Rx8vymrbILm@A%GF5i z8)wTvP`Zw`B;@KG3opF+2+;W@Uj%8*2XuZcOuYB#+efBgBW$eX!BrUu0g1cT)8pSY~iTE7gU;RYZP%kz*)2(0dEj)mxgwcS3c(>_azOh}F;KxQYEenv6+^dWmZ2T=r%H~i(5 z)|;8EwIjNLMLxUoBAYA5KW=cSa?iRD@EBa-eJZsisHLeGi7k0(6^H~)ogijVQSU4y z>C8Y&;)XgEV+&;m%>{_#B_FK{MpTp^KJnk&R-bR}B7Q|W6j6Qd6U?os#`^i9ZzhDD z#9$lB?;LbVC$W&h!ag|F`lCn&d#GxO2|a$Kz<6bpN7C`Edr0`Z`2DN4jbjr@2xCk5 zZvnyt=7gF&}_^d*=;O4kmr}(4?&y6ftlC!1REi!lDXg*bKPU>QwDM?=jws z+DL6>6>Fv=tI&4cgj~XfZJSC9@*C@JtQ#iaez$6Yt}W%vNmIgyUn3s^eU%L?w~E^? zj=)b_&6q?c5WH!TEycTqg0Aa_U^Tj7)+d3e;ehgKJCCFN1$W{cbmO~HM5uvqlRens zZYg+1!18J7T%g+6&iK7}20Vv&m&l_Psj@pf{ssdvYH8Vho>aQUh&MfiwzIOC^qS8*}P?w}`GxZmB9 z+RyuJf@<;9klY(uVsTx$t|Y?pb805j;UvcYol^xd<#w#X)*)i_z@y5-$cwi4`!~VrK(2(?rc8(SvI4m6`&$ggAeC8)!{YOB}tmSO2krHk~p? zcU}3O_#RMD8?3-ksFwkbdWjr%b-~`h81cicTvqH*>Tp1|M>ptN!1Y zAAx5WjN#sI`k$fXD*lHa3^#!cES%w_nW3PJn(_hV|KKwKG6N|agaFrihUwNSgRnZA zOgpFveS>|rjl3|_Wp1jH+-Y^>dT5OfPn@Lt}_Q?xX3#clCu9p8HEbz@vN_9p?D1fFu#PTywVIh4$ zioI)}k)pSbpz-FjB!H_8jYubPcE)wsghemE*)qS*I=4R2_9Gs`Xg}|paGnH;(sk~* zJM8N24_3Mj;GHQQdlJ|=I|>BYLWA&`d|ng%3xF=H7u=g5u|w{Hf``27IfKMwo6}Xk zXwch*-qw!Nd{Q=T*J7}boO8au&WRhnUER|8WAan@6&_K;cw&{;XeG=0p~}GH^;AtE zpJb?M1dTtSEmweTICTMuOQsJ~6+|5~sP`i3q#zA(sTy-l=ndn^PZE$wdUuzcm<~*w zuMC{f>;YNK`E{3N0Vw`I?TN7UHCegf4L-Rv*po1rwmGQzTCx~IAhel(WnMj- zq`>V7Gmu|u9p=%K%iJ?r%IedjqNjNxdA5qSTY8cuY5v@-i{qWD4WObHy+%S-l2%?R3sIf!=&6hoCM zR0@ zPs0A!fBG&IluE2Xi*pFm*8fT7G+2CG2(PC}Dqy;J7Y?61=4$6}t|>EPak`_`ioCZ> z<6*2X$buph_EN-P$*ZN{Mw@{>W2D#kpPhXfL^?I7epq^9j0YLB5nd@E?1SJGHOoS0 zYWtw>sE^QmOR?P{A~Tuvgx+!9s$^#CN9{`a18 zT9JC(lR=ldr+GdMUtKoNeMxrfqh6%FqV>9K*EZS5&@cgwBT zHJ;;H!wHGMP4S!s4#2h?5$?8;4M1s%Hb}ZT;9*Bqu9!B6#tHg5-6Yy2tl6^J*dlzG zHMHk6F_pEYYkJ;XQ_u2a(mJ4P-AxWbVi>zgjc%|-OZ#qPa@3D06wZ_=;`QW!5}F?x zy&Z2VCKrMT;_ndjYhLN0mu(YZl%01`6Nxebu6?dIGIL%M#=iYf?sMBWV5^hDk6vq{ zP$<2rBp*S@&;-={kNJJ3M~pwEe3SN`d;dPxU*9yN(~l%AbDN5wu)17=lD~%0{F^5J zOMxt8g+rb#Z#eBejxm#@tdC|}_IoEOMK?s~^I(JX_zyOh8i|`Ms~A14YiM6v1X?m^V{`8kQ+G$gR2{hZ&tzAy16)85C51n|YNn^DA<(hFiaqAw<_ke{ zhb>zRxO4%l7=J@nv2CEADaZ)jIot!>Q>E#cjGpK)b}Ql`m1)g2)I3i0_(CrV6A7!F zV3nV4A%dR~Hs@ap`sNYs<@%`-eM4!=xw|Kcg=HpB z#On`6352<@1Y8dMau4tS?SNYG*;Jp93FDuTk zXMG(%bs#Fx+g|~NP=mnC6uy>W#oO=kf^XTD{iEenLGW7<<5)ix0E#SpO=^bo$gHWX zWSPz(?ny$4Id)crB@={7x6blI>CuEd49gB5KbzA$--$#mOr16ERyZKu7)q)a(4ZN5 zCuX5oT;mzRY!5ml!~U$$>Zj$w%RbEtDgEf%o$WImJ#W1w;$%Euy$gcFI` zh}kpyRe&Br%fQ+=u(Lc?{4W%f^u-J__kl8-q2a^)Q;#(_eX&nxkPNnYiWr`qv0GB> zfB>kxZIO)&Jna|q`t8KOxY2HMoM=ZjE7{G4CW<&HsxA72kRS@(KRX&jk3WksFs?Ba?&Fi^q_tR!v$6NMBg@+t>er`M z2f5_29l{pciOMyO3p)ua-DJOCgHo{jEyoVcMN-#SLi}PVWnjOA8$0pZH+ZpCRyGJz)=_V%yoLp3C=;+ z2P*n$iF{@o{YKK1G_nXA@c5`30a``A1LaTjw&5UD9KEpIshF0uCE@YCVd)oD#~Pr1h2Vx1q~0# z9KhbV3Qk|G@R5sNkFjUQo~!QbTo61&KG+lihmkYu^2KGv{ozm-=9K}GgQ#cj&*3xx&7TLseCeVtLGBtWON0C@zPt6yJJk}jkch=ht(@F9ys z?1oLE4+QlEZ;F8*hLWGtkhKonxM9#8s1Pw|575Hs zD?A4Oq+)@{87zS_>nver62HKFfZ;m3_C*WA8T7v3^uPl`0hU&&Xmc1%Q`hyr|HDe% z#@q(*fuYP;!FJ zj$L(xO=ukcCJbOyu5&5KnV9XN8UNrncm8g7k8MgA%w(}~F8$m*N2eVb$));3|MmYo z){Y(#6O;@`%rLaNFwt#Jr#u!atF;Ov(RP8c78Hu~`bYK^XvYWM<@15LwK?QgxNkNmg7%LJWY-kD7B9Y-Ze!0(*fvLKuGR>YIWaZ;U>mEI zJ2~2ejbrxOf=_dViwhss;Ha7Xn}+A1_tE3z`o&Kk1vWkyfH32TaTRY7X#LHcFR?#p z1ZVaiK$Lrf_Z+POf6Ok}iRpa3lr=gx9aYW5O~XNa_QGR(O=1(QtmBwKF^Oj#1b!Ol zy_{LoO5p9YdK_z4cl}oSI(TUdUuW7KXHPtFbRj4yHY{8#TOsK*mxU^|V|K}J(Nqvh z(YsXyLImlRbXDHx%C=dZ(V!W7DQKfOgPYVcqH-x|M%KA0A#@EJ=A@-bNPSVvAMjQ8 z7at7@!)_V;=!nni>r1Y6lULwgr)UvO-%T|C(HR}BYkHWlIFBOBAQl8tw`?p-H(4r9 zuGvBI0tv=CZ(W(Kx_TaX%3ZdUhSG=r#N&rk{?q*K!?^13FQRb1$?4MB$=ISX%r5L9 zPygxNE-%BKDJ+(bs&P}^O|v+;;sdkNG}~8pyNOi~-P!tuz~x6CDseA+;D2dBem8c# z%h)_vP=t+Ia*&)?KHM-?_c!lMJF8`4(paC-RgcU0QCxI&88{#@l+L$CC zesR2WsTN_*(d>JPG(wXvLtAU?J{bMuHL3};y3ejRO;s#Y@l?HU;!Q*Jg?wzo@GdEx zk0pwJ0T(CneSZD2VI6g^O~hwhjf92kR5B*n=>Q$=Z~Hje3=IG%NV;bE*uELp1^keB zdO-c+z3;xfJNxdQ7!YgI0t<|^G}@2ny7qJyoOw)Cy^xejkM(a=9MTLOMzo%AA0MbH zZ)SCx<1r=GIJ&|GlRi7uMa(dYz{V-9N4#)O(_BUV`AQrZm$*zT2X#apr1Ca8rEL!s zeptv*F!qF4dN!u=ioybc&oPmWD5Lp1Lt zM57)u;b`U>OYYW#3anFH-MkoVX!3q?k;C;mng*Y4Tn|7(H{Qz=xq>Hgx)TNiqlKfv z0{z7eO_)(>k4L!6vIhu$?(aVK#>y?FU`pSonAXrdCi^OWq$^F->tR~g5ckQrPaqO! z02iW{uRM>Q;_wvraB+F}gYwK@{cp25?h z=j#gd3i$m|Ly}7-yL(zK6>qD6oS9E4S^xB3N%7fE^01V}2Q^KvuiZ9Cve%(&e*M~- zz{kh;67s&s0DJf@zJbu7oM0BgZIq>7Hui>J;k`at{XnL0&(5}C|EF^*tE?F#p zc$NPGh6&CU@VFrsMpO-g^3i%V9KZP$ts@XBVmFJDR5PASZ2u}p{U;}yyGt-}Z;DPi zn6+ww|GRLO<+RXQlnb`=Z&WC3WV>zwm>*ekW;4fEHy63@OhAGu&EZ^a%qDyVcq<8z zqB(KXbb#CuBH^^%M6>p?NTu^mN@^_4U$dj<{Dnf&HxjN5t3Yn=pkkiPsinL&;=tzm zauhbM_SNJUZZTc+m`+7!POOmLUphV7{G5P?mCk-Yr~P{ zg4g=!K``MW_l41Kv4eGR+s2sK-BHH$?G*!8CWT@}(^_Cpp61}QSnq71AYR6ZS+Z^+ z=VuFa&fo(q4r1aMPE}m5wh4_|@0*x4$2qc-t_^8o&B*h7_l9hf;KmIzIjH6?^={98 zv|QTS!5!*yy-?1W9#SI}P(T3NiMUtB~CerYKcHVVGYgLlSQV_-j2_;lB#U+wZlc0U{b zxB4R-psVh$>ovn2w@*zV_!ND|P5Kp#5@;wv%@DzA!lSOy4&Qcg?|^w2p}N*QD{I%Z zTWg^TT2j>qq~T=C00oh-r?Fe2RE3@CH#WANR~UnDmi>Cw9SSU6#@l`{8eJ6Dfqy(N zho7nPjrBoxl^kiTH3H(*jq#V~VyJBn&aq&YAYe$x%T2YuoD@Bv2*&QA(K9oYM1H|{ zNN;jKX#LYbNK0K3efy=+(HRqQ$DHE*3o=lT&DUuy9)3rw3;+@C9F5~0p3Ski$!Wv) zlj3%sX*UVW4tJ_Y{ys6;URoTiqHV*N6=CCe?_A#~=U2)golE$Q)srWo!*q8!&0Pgm z=w9SLm>!n>vXbz;W+pjYY+3TDP$c%K8l0E}|F2vJ&K`N}h+)o*S|JX?{?%5C($sXc z(}_Q-pSvzLiZJlLY6E_b)7ZH1v}PP9wZfkkE&Y%B>B$^0pHKgNp{8MxxG#lzne~AW zkZf3gf^Z%j13pzM(S`&q9OW>^-&n_P3zthNO!zDmvq!#R)fL$#Tzh3q|5L;lH4(>( zLC#U|shgUbh8hO)lhJy+b#kZ(37Tr)VSeKS>iI1|Eviz_Jnq>>%qzpwPG1dmc zTqr6Ss2c1Fq12I-0M~Gh@za*l)6|$v38Y?`OwiB7hRZ<_E~48B8kF%@pY0Ohb-*j2 zI2qF%tqtI3TjLY+K|khV?0A(5RXSxZV0g%3C0R-&#+9g#O`LBLY_bFC=ggv@i)yFa zygZLUJ>uF@sWo>DSzRR)5+QPHi_~AJ16|ajvS@}3 zx7L67eY23o0d9?cFil}baj9~|;I5V;9496?l!d=lnw5>X5C#pOv#3-ixdn~ali8Bg zuiREbn($uB7J!e}3Tjj-OFaio;8+#cpcbCcdN&Z?sZYHIuRZ6Y_K7zqF8bl80o=?Hzd@+BQ7!uk*`j2Q~jyk}$*9w$qGdR`>?BW5de ztzf`_-jqiRht|j@cDUg!GY&;C;7Sb*G}qNcF)O{4m3|G%=uVo>{2_|9c@Kg;nnh@< z(zTEuUSEU;a*g|3TQW5s_Sk$M#$+_dp7i^geQOclMnG^9@Y>l`Njj<%axt5wJy5US z!!&7x%8NET>~MO}RLULIn-Bav^%Ubs(r6xApOdhcPmc}r_Z+bX;DbofChv&Gaza`M zH3ll8V8Ac)FC1azH&RR!50*FH#V00&1q3GAL25hG{}4^Wb@IxG9(x>C)_bdnbxGD6 znNgY*)I?os*p9QSjefhMskjw)SgiK$)2`-50_E_Nr?S`W3{A}s(m-b)$uN(Db4LYs zkvf>=@|ugfneSV41ism%esRppw2xoBHPXZXaz?kfeW-3%W6Kr(9cvS=v()f6EOIIX z$zmZH>sM+yrp|?ARCR`P|t!#I(8?$2)&cM6@4l_8kh!yv(WG~!d53xfXqf$oAM(Y4; z()P*pJn$^%k#AUug6gDLoSqVc_nMkTp2{Z_2%r_FMq_Pz@u#PmXFZ#q}nqP+w8CBO>m_7@XErdGvz~i zx%ypIlx0HI!LoemC_-1XnJtTODA)w*H$Im9vkDPDbZ0~Q&(^zB}!~_sV7f% zR)ItUes$qvUfUzWGDf0r&d}J(*}2-M@xKQPp9zF*n9Lo9MT~{qyu$**%yBI4yletceNT{P-)0lrgHC0tJqy zG;@*Z6R5t9bDPp#vIYkCt?LxVkz=R3cTu@kn*z9UWA7DN5T&qwciGa@rIj3y+%p&+ zY7Y>R`>1|v^_jYrFy2cgBi=kadvDOh)wi&yrG3JUQ32MAo&L$l`_qdKq81OnviL1q z+IH8JPkwCU?{G1xS>~-r=wRZ06ifv?W|XgTgQ>rIr2K9!M|i0t7f-g*3{Kn4d5P4# z^dP6;=09lzy(sj{C}AcqNNu8N)QCf@#ugLcBvUKeVu1?32(wJ=Eee!>kMMt`g%6*< z7RU;t0%+3w5K&?}^`09L&HUDiUgoWzB^~h(^Sxmfl?ov2hqp|1jh+Jp0GxcWgeykk z@?gb19<6jIA1`%Si1}3=XSM_FkmcJQyl@he)Z_#WQvnXgg$`jqv>?b~i1|Yp4FiA& z64MC<@bD6Rz@eD{&)?_SI}b`~wUI}BYZl7C8(}JLpmoYw=I-?$#XO69mY$BQZ8_ro z>H5crhTdHdfdDWtWMf{7R|L4H!v*`I@~?cSum0@;+9JEk%#({+))IF%dRyYNUJJo(WeQBiSTn^>)1M2 zsIL&`ByPPw45%tu$izxugN`y6O}I_A(pboVM6GNuOS}wS6vk#^d~idLBA=0oMmhS}FkaYvApmWj&*YvVuxgi&%PwWg&@=4=kj7Lrn4G2FhQN(+aw|X< zvJT0e>WzF<_x zZf^HiE+`ayM9fta;ZE|w16y~!dX!2=e||PSpd`DpQ1O`!b!U;w2F3$>KYopUP85H2 zyuI6We8z5GVk~~dHV9mL;fufaIWIhld75BmePhYsk*%zeT%eq>_$=>}AbpBhn{x?q z1Wi&gJj7QElZ<1grAj|ffqfi1|MWo2jjZ{nOFg%k2Kr0X`N73mw&$sWqa?(eIS_kI zt+;p6PIUIgJI`P-eZ$2uN;L@{mpD1W4Fi9@VbJZ4daTq?BcVTZXp(4gU-ZEDSP zve3`!4Iej8br}9V%Zc>xIL|`!PV#LFBTO{VzY;_-$96tg&~2zPvIGjmO`jWDou~DZd7t->S;)4- zP3gCM9iVVMW2n7WKO`aIb`46z#fE7PaMcocTdGkR$(hQd@lp5SHW)Q>_sO>V_Oa%J zP@f;&)F}|rvRigjXr|~#WP?b}7r_K5b!8<*H6oZN|414v})fI6{ICgvy{X=uj;`G@-8~hOk4Y1HTGt`PeN#KmPV>x2TL~K zM(p_BCj|)qwkrgWKkqYQk4*BEvggi>2(;C2a^;A6X#QYTzGaYSLJeBfMhSf;yfBb7 zQ>{#8Xsw~{d0D;H5h0r-EnF_@fI18N;LDlfFaG&HSWbOQt(FELlgVnCv0d=%#;tHsGY7NR_*JU4Dtt?3CXfl?Fl*QEKdzzBWSRNx%d40`K}l_6#b0HXc~g zGj{Yk{Y5x5x$O9LKI0Iy9$dc3JTC20yYPN|b5zW>JioQs1 zlfZ}UE+)@WE<@|1*;v(LWJ@iSbSmE4Ss}4QOwNc@H%swxF(LQyh3|dt3i$u+H@m15 zbQ`qx9i8j^GO7Fr(>YZ{*$-vJF6kOf8>Wa8r}t|*EVaiBNrem2Q=JZ4C)dgJRHyXc z?)uslo4$Ns84_$`Y*H`-85D=Y)dw^jgCs^%yGpyLRM9`>7w%5W2F*FHo@N$M0}oSi>lgof%:skG z0FlcK?OjV$YZNH|L(BD{kZb!Yj8m~Z&-0Co)8hno9a`CA~ZqQ3R z&73v~7WVS?#HmpwQt*+I_u|SyIbv-TcrE4gsR0l4Z zyk+6rb0+U9@G0)(%{2sc^pQAg2faKOA~aq0fjGVl-TGI3!wNV7RE=BhLLvi@#c;*P zPV#qxNyk+iI1vJB`wFkJ2!1%J%Ag?=2(AZC&?2m^F)HDS(qc`jwf|mKFJ1IDAmu zHh^HG&g+;khM#_~30IiE{qXERoQ*(?8$gep^Hga94MmTeNhZ2OhK$x?(~tmr z1%HqX0e9Lq1+lif_`pM{XD&8cKPoJKr@uV^D_r~9r==>ov*>I`hueO1ovArdY{oKa zZAVN`EE3{bFdyEx$~+HFUYEH-vNLeOcK#l(*mz}{-A!~GE+L5NHdZ)#Tq()AqJZzy5x6bNB@gP6ia9c=%Ef!VGl{*HiyOyR9S&5x!`3kdq+QYqr8d z!%MfJOug!C`UCSscd+w3c#^B;3M7KNKt+&ea9%L0-Z2S*XOBReK%3uqQ1MS*=qNy7wC&}h<0Q5SOPQ{ef*d(2*Iika99 zwWtCXgworWPHUAHM2bM_pV`mV$TlGO9#i_usfTy=NG-8Yb2ii>X~<{hTTe6i@&w!4 zGoOQ?Nhwvc?0?+u77n+t-QPSLUYA+kqNg@+Xs_1JrH$a7vdEc?oGEAFeI%6}*Dtnb z!OC@FrL(iz4^KD$H9nId36#e(rb3kXd6?1ob+ziMFoLsW!%2{`?Em+r3>d?gp=Jz$ zS&F%U{tfcwbskSki~SWGB-U|Y#SxJF@;CiJpD!&oR5VjqmoZu&{Ky}>#^E1jH@wk` z%&Jhh2>&eQT_U>JsXt+_@olh;p~rOD2g5??YF~%@O@(T&_gBm41UnegB0=mziOlBD zFu}3R?~Z0EBx%0~DB{ew@n|CM+-42BS(30pIniI+40%f;T!e2(*R@6W_ zkMqSp^sfqj=1v={-A_@w?8v%z=k-DSDaX}#@uZbgv=W3=>bF%Xa%6jLkJgbqy)ITB zbp>|(4^oB#7Z(*qt<8celB>og(8CUL7;<~(bHY%^1ne45rX7>-T*3^ttEk*B zWHDNkLbqMAv1N@F>u?@mOMPPVpsF4)ae=Tbq66>cO8FN04V0s`D$QP2H;GiO?@&C0 z=O*!p@i>vyu&L*ON*eF7h_GTZyW8W6zjStRW3o5uHBAU&C&X1ZNG8y>kD1+5*tFAp>VJLhWH`f1CghtW&2eyO;^eA z3SRCLHkvKr7Zz3?IGQ))M0n!fP!q{#g?{4XIf6==dfc6dF}H&QAlmc8lJc5 zT4;JlfPIF(eY$y!+Ue)1t%L=3_v@L4g$pgpA^o&pgLzq?=Ec6QeWzcS8T(>OdTX!S za(=hAn!QQHrxoAxkgnB-lsnsxrZ}5Hh9*{jvy2t46VBD`fxYZAVfBPKUkj4f@hVC< z=64GYhJM>!h8JZ58a-qXcR)bkiAgLN+NHy~7xZ6NuO9I`{=4w{#2KaLF{wG{RXouy_@0bhi5Dh6UUY z1Xawe+;v{wwP(qx6{*LfXeF)vomt8R80K#jdksaX4l69B=ZH@j#Sv02m9=b{F@LGd zYr@k_Gv-$>*UVi_yTK76}IxJ@q^u!q6wMt zk;-h?&QxH8!c+WYhF{H4arLcGXe*7%I*K-Z*AjQjBGyx-COzg4%MV@D{4UVyw#wYy zBW?%v89zXzLxJ_qKtPj;jPlw7kug(K_81@FsUU(xh^iZELN_x0N`mVzN{TM)g)T{| zwrI=I69YAihe9FZWX+_Kx4|DGU5V;pTxd|@Ok!URC=KJywt9^o30+JjYcZ4VA}AOa z4G>btH@;H)XTjTr7LVsK%IvhT^HF(iQ(rvL=byGEg|RbCvmm%rLY68tpLDZJt!Gjs zXLIU*Oad3R#ruJaYm8JoHQ4%Y%*5?yia??%;F2MFLlrV4#3Oz~u{p_;&mgg3(VAD`OzvU42ey)6h*p z9m?@LWbUQNY)CzROe}JvHqRt=Q_GOIEQF)bBeaO1nyL#@daPSeI(RpBel z1J)hOot-RjwZ`WuFyIPhTV{Ba5L`jjTpJcUE65@~OrEmEWz+Z@1`9S`EQgl<9(Fp$ zwYhN5g)J9XN6n0ztp&K9$2_L2lRB|^`xhV*@oQiAHE_Qe<3hwoVa;FzTA#l>UftWn zB4nv{Yh1~{YSFIjK0E>A!XK%QF+aw8x}K^$?AOx1Aaq7jl@>+GrIP@dK; z02|uYC&p_3!i=pOo9gXiwux*5rMF#*J|~;dIZt){flGy!!ftx&;*_3VVU^vy&+Lj* zG>}smjGX$1sBze^Otmf;+u>k86GN4nZPK41SVF631lg}AgaF^V#*nK!eB@O{E+Pnn zGPAnQ&nz+)Tb6}MG0Rx)E-VPL35=cIh$yBeU~@vtq_Wvpc7QN2WMd{}0000000BXp zTo^C^$Rf+w3k$l9f=IvK)_r+?%oaBGDBVVZP~;Sn>8}G9nZ}{3;t7g5R-V!ji^2dT zgP#jz`rW?Zc^ua`MA;@j`rDtmY!)c?z#gQpP)Ud;Pw!9dFFHOVIA-YBUYpe0)G5H@ zAxsehLc6&Uv%pfvkZNa_14C?;*>KXT8bWh*<2<=q*M#Q{C&O)W+q)oGfyQbUCo}d= zCr^vxZ^yzF&xy%TSm2*MfAlet;7)#ic#>%+^rc9&=`>mO4gnbeV)ndt{Q|iix^2rA zBeT#Tf*$tC5Wp`7nOp;|)T-tZA{%;vH*Dcff!iL&k87^T{RxuL2O6a0cb1JP+FVs zTlRwU8mXTOgYpl@Ql$tyI*}Xhoff8QswP=@$)*?Q9Sl$U3=vc5QN#~0T8Bn7RAOT% z-pxa43J^+%tUs5;e1An{NS)j?RLxS+JYAf70VOMq=o@FFc)8v628pB-q=q;9acKjT zKrbov4y7|`RUMlQS`yi7i-15S^&lbDtuJvqq@w@W?|?&oqI(G3JogQoc^VXEe=W}crM+`vjxybh4*rD4uyrXNIT#F8w_v}`he6eiV$4RScZu~ zr^KS*HYXg6*7V^^vnoB1{bWWjgbxIMO;~Owwm6_yZNCXAY5nxg;=8}aetW|^Zb%ej z+Yq&)gP743P-3~+znzJYPRA=vkPXRq;eYP+>5Z9jdk<8+`qG$;;;TSy{RLQ}Ga^_W zEDO)N``lP^oymk`-_#MzDn!7*oN3IENiu(I?-N0vavH{6^)WUa`_G_^06%$4)V2QU zOl-6x)kDtJxT$tQ)M5|1N0o5>CSX3aaw?AtWbt?>Pv-L_Rqb+ai6`b-Ec@jC zoecy;x$2vt@G6eNtT5>=nRg5UOV(6XGzEO!fSEc{IIm=^w4nZ;IOez@5DsTGD(WVB zu=G1Mx}k(D{T)3HTh5ao$o(0e%N8v&xqI>MvHh%5_V%PR4*nz($E6zr+6Cquhh17T zdE0|ay)u4dEbpOjR=BxFQmPWPc$ib&jw!%R60OkVRTINESjn9CcM&GhFVya>#hE|94k2eBbY=KkX(u_uD}KTK}x#$X4(WQ0EZdKhk-dHVNg9oofcHti{ z|NTh3Q~`UwlT`F>KDNv4wE?EIp}Ok9-RhA_d4(BX{|jaIh-yHD?SWMV4fy>8J^Dp9 zfzoxi$x*I0HE2H$K5#=~vKrKGq0opnw8N~!YU$7x(PELv)%vEJBPw2?t@-oE@1Vj&MYg@>Q;PCWVmB?A}7c)?g8AiNTC zJqf6VJbP-_SslQ+Wb*5=O4wy*A_S*;w)oXuB61I{XtEkkJ)XP$-4*SX8SYsJfvQMF zUBSD~K-R(jWh=N^xE>P~Z}vWx_tGxgxvpjRN6AS*N8yp6VAylRVkAScR*bWktx2s? zyyhjlqv|GrqBnW0%^^{w%$;AoYd{s?xrdUu85KGE00S8S-6R*g&Tsm6lHnOAk(|T1 zW;Q=GB3H2|^f7Mn&Jbw0M79;+wXhE>{Rj3Rz?2_|#Wm6ih^F~;yIgfWAbB0ca(s-- z%!6iM^lBlJ85^&ntX{LIWz%Y$sW;%*WP9pwD)sx2{OBgH4xihBmI$$|?1|ITi2-#0 z#?ead5W>arfRvRRf4!)D3)rys=dY~JhCB2^rqs+77L{DiEAZWH;ZX87PYJ2&C)k}a zy#e|m-`x(v^^!roWE(&7>>Qno!Ny(^^TYBu1Be_ljf=0Nt)P38 zF4D17$`9>!jx3tV9@avm2jO<`xhfOcC+A_CvWwCB5HFk-^1z;s5ev|#Ro<93-b(Im zZY_}u2RXo3qxqP6=RA;oOR>_UCr3c(RYMCCzmSvkcS^zfMieV=8QT-uI?XkY|H5yB zdgG}~KF{>!z04MT#Y1NyuK{-)I*>O>+=c|dbPkziQyDQ5mD?N-mno<;Q4j*_NtH=R z0dIZbCU_3uG`Qo50d_xvRtR^L5Q~d_$o|126C_seW?8=Il2^KU?V;nva#8tEOztRh zu*l1D=TcT`o#*e>&Q@_80#J;;^#d-B@2~yAk9Oe*)`VeLTh_s>&YzBux`z_YrNosFsLudZsKV!}dA(k%-+m0kA!A+J4C=q(Usw$;i8 zF9*amnbXSbyU-J6G8S*x-4e?i^2axdR&0-J0o4ESI(vq%{J)_IEMRJ3StRQ_N|MXl z#T%K$h8r72*pReUi9H2wmXPnSQd&M8i&OZf&~)|p9e)fxCFWn63UjNEp52fQ{pdu^ z`!$GeR@R%xwojZeqUTB=w0Gu*+w8}+n>NNdb$m{TVGKeYvoV;ibDi+jiFHTN=+C_Z zOZ7uX2Nv*!sSn*Y4{9`4b&r7=@XGW+fI69bx@sYHpd{e>$U9N2?&f=c0#^Nx%H4$C zn#G={BCfY3!o44@rOcS(|LYbBwL^_+<|#4Au$W4Jo}qaX{zcQDx9id;iVW5<25Dzt zA7BeR(G)j%pqV$=XitncqOdB(X!wR_k;owch*y}Lh&|Yj^ngWmn2y*LkySSpSXEk| zCqhXyUJc_Nl{&a!rQzcPR+X0!A}DkiKs0UrP}UeJFuJUDoAp=eG6?5Y2-C6;4xf2TY*PJfZ}<@j3I?qV{RuURnay zuVA^KGV&jDz@3;dVBwj(=d~RFzVjYb$thw`u6!&;iB{kVAp805Dg;lQuW5hIG4{$F~m<(|}P9xzS#(m zy68(s<01@=ySpY)nZpIvKFVYKw&Ed%1Nj?SkmtXJL>ai+$u72EaEwb)20rkBM}KPb zb`UN+h{1P7o*!63Lbxh3k3y1#A0FkM*f~hPw?5OcJ-IPI5*}}U3)Q%MxZIQi-K4Gr zHO}x6aQbjg5jtpD{L{nS{RU)pH zWm?=KB}-b0D{dWdy~H_2a^({?sl*h^ajhfSp{;-c1I%POzR9mkH#{fwDP%9mc_>r$ zR+^n*;0_7emU*TSkOqQ*iXQ#WTxM@3hg*;e@L>IIF?=I-<+fHGAxjq_z65q9A9}!!xPnKe+PGESWcaqpy>~LAi zG@7qBB|gY?x?89{C>jj8;sadCvCy{Hq0*x7xh6D~gKyz!s*YrI+*_5FfGn&DyFs+% z)zoC6$(UIFy^RPXiH}i)^8jgC);@M9$ah|l%>;=+kSDA82}Q=2T=JMJ3HNtfL%*ZK zO=%Y^vA4{f_WC2Hdw|0ymlw>_IB!;&pEub=ci?NOdR;7Vib`R62SuqgiD^e&!e{h} z(XI}_lUbpbma*$~N(AThuN1@)i3KFua1ENsG5hl}lUSaV^eG2E8*rK31SWJ?p$!2s zJttWCXV7p=WywHnrf6|;Kf^RFY zquachhUhT397XID1BC*r^^0~ne4#DX?+?nCVQMxCKfp((fjO3`d&?ECn-!~3WBm|^PHRcE##s8_s z?G)bJ%tm9_vN3{GUoF^{Ciy)-$~}om)xd7{_CLOFW#-aJ($vH*8ayb3 zSr51HGdf-DWItThw`C_tGUg6+AcygtkGJMW>tBF7o!y9MRzCET-g8T?Oui$}>qsv? z7W$uLF@Nz~N=a$tL2kmq0vV9Vfyr^Jfa9qSIdO1+CiKp{8&6$bJd9KHZ5IlLf~3-P z;c|xw6C~2R9CBCfpkU^cJbH~-iA69&kwnMAEd;!rIh@GT-l~kWKZj*}bzcwbo{fUM z_bvksYp%KWM8Bl%IKSG3AOt#nIrA$-%BFwPUNM)wTjOg#YM-RWdL+&i@Cz0%sW^wD z`N6NJiT{9KV%QnItjjvSU$sS=R}O!Fq#D^8em&F%a0#5uq!U*YIpfw$Izh%hZ7ly) zXOxKjR)nV@lETaDl#WyzCXvA~^=PNiSJi`N#MlCsU+e62QW(~Z`ZoEn6e(Yt<&%xS zkrhQIfTcUIIU9011W60a&Rb4C=LlxL6Uq_z4e?@7G~TXEdu4~@12s=IzU{Q<7WOlC zIT4)8PCb5Tq;Sp+tQH)#Gc+_!WofkX@GwtShDUR%F)mLnCeS^>s}!P!o) zJ}At*(^&1I+zhqgw<8J&+k^MpTWM!?t%!StP(~eWH^|@DEnZ4XeF&Say4UwO#1vm4 zda~NpgNV}g~()G!3OY}N>8c9ll28S_}Q*1 z54(MdT>25~J%10Gr<0&k4kTtPZnHgbf{f^c0bnV$aFWb(A?=o!j^R50s#N;vz>T;I z>Wj4^B?JJ3!Q$2uH1aG$vO|o8wJYYvO;CUWboOq33cnw3{;xOBX*s(CQb~Li-iUx< zu&ohnH&5gNe$`zUA_`0ASx!N|%=vjIK5#?M?u|arME?27#MAyR+6;60@! zps4d^%Redz<8hJL>6j)PlFs#$tYndRw%eW#CVe`O+_~C$F~wO+H2yW7vxLt|s-X+N zH2jc3=WplacU_9eA0$5rQ`x>OadQm`vHlUclm2b=Q7iB+a2X+~0M9n^CNeW#@&=@% zn^F|e43LA#)pRRD`qlg9d3Iv00V}KDkjTsO%;0fFi*&!A`B$ro>QD}QbA{4|iL+yc z7f;EVV*Tf@UK@9gh<`c-CH04lKrK>0`NeAKH)9IB!X6IF06*Q-dOXin&hGZ>8^00= zhjcgP$~b;P_58YynGb+)-Fg`=eV?nUAPV2L@Nw$rJde9#DF|ZHegRCGl@O$9>ir8g~C<@hfk7myoOmA-19)#5}`^(|dK3$+YmV ztxf9}&zbDi2VdXj!@FjiRfuhC`lwYzshE~*d$SKkuw+SsjASY3@A7`=${R;gFbW8a z3c=RxutGm_YIZ3q83$u0S)O;H)@<)bQo^54v18VJUIz8yYCij57X_yA?CBN=5M4xZJ4bl49f0xZ*PCSGq2d+a`nC$@@|@yP{}2tFPWef{pO9KquSo z4cljl=oT0!3uy)AL+l!9zX01tntONqgN+t2?}83L;)KVOii|8(50_35Q~7>j&0|4w zt?f<)+B&gUPvyCC1KBJN2xisLNExlm?$?*e0plg0GJOwz5=BDz@uYoJ*<{|sy^#LQA8`Qg$Bnw zaQk}nPZwo>S}o@)wH?x>qn;$6MB6r+mi%dQ(pmdZ?5m#mzZWzj@hH-_#p^`G1<~-t!3SovB&od#9zpiPj`hK7%oe`g|zkcC2 zwJ7w+6<&ayiCLXU=$4B7A&hv$ahF>?Uk=Sv%Pv0#IL>Y}&QBQD?@Ubc<5Z>UYi;acA}&V<5ZtF;KAJ)?(k(+D|00DUJ!1!{LZ)H-z;N zK(xg0!Z@;&pS-T#RQ|p0;!!2z%8e(Ud0?dBYqPYHCGTHoCpuoHp~?sc^qA6pjPglj znV=nI8Kg$flK0E#KZ`Lujg7lnXxfSQLbB17z>$$l9Sb^$7HoSAK4f2(5^)ge9wSv%d6eKB^1;B6! z6w$GzgI`6rv5RyZ!!CGgLmVLTKd!)j z3ss$R^n}HR+;ElnO=w>`Ly2GAt6#iN#Tn*v_H?jS*xl&90eg%(hS6uy-ry3c$ei;4 z#X#}d+!y^a?~AYEKijfj_Dcr?^?C-GpTH1c`y-XOeS5UZ$V8S)O+ooA?=BT^orceK z^WJTvQ|93@WExdg7}fv$9Q=&x$w!_K{V3l*b+3x5&A07`Vn)^86Xo+{cD0)HhiSmk zPT8^6rysLb$wnAl(-01=gMs2hQJhNG*}RdoLp7iLpZR7GF7#*;$Dwbl?XMG`S$;Q| z^zj~CO7*)NsLJ1w`Fw{myT^*U!*3Z@=<6B^DIIsjl!ut<{(qA7a*!jePUAAy1A8J{rfO%hN3zw>}G8u_qn;qkn=v8AG`(`VUi5<+s%s>!a+2lD$29GtvQ)gAb+svmq%xQ_sTL|8)1|t)kG^J#6QSyFiYZ?6bhXiQyZAWQ2Uo zH^1Ah)lZ+cgsKKPmkU@R+!a%{lfeVymz<`6T}CLMImZc+2(`hnJV-2${z{ys+&CH- z+r{(jH3{~5wl%p}bQ?GtN3um99RU*m&3az^cK2Ku6(3X2ellpH+zDr z^KCFO<3`teaNExL9t~Vs^hUUP7$!sPu3^o=we!y4VR{?9 zh=xb%be?V0YkuS`Ghb7asf#!Ev8g2`7oE5U{a(RBTc7OHCX}6gIA`laXWVH;nez$4 zihHHaH8>@mKQcMAG3(iy5e%e^BeV!I&SykGR^&{2_cG_(Pspp`$VcBSnKn*-gKUiu z#nm;MG5NpjD?4c?idUa_?O4q!>FJ_lkHh zJb2&(o`0fjkf-*z6;)m3s&3!j2N;LjLqR=__1oOqUPsWGY-N8!f$QMLEiQ79)!rTP)U* z5`k8c=<6=+f!Voc3cQt>*G-{(3YbWSes|UFSz?FRD-TFa8IFHs`T-JpjH`Bb26=&9r>+5EdP^mv<2Qc!*!*$XUc z1YmL9Fl^b8d^^A`Ui<{{2_qh zQA{#`E%RlVA=6JzFK8zN!!oPLr#1+N!ea0_r(U{izIhM+Fv!3SWk$`@x%BzdP0oX_ zlDMUBtxGe;N;2L6Xyi`RdCqNAK-fS@ z9N>_S;m{YaOscip2K-K5?r&-b`KFgx0+z2pAh+fprbI32?8Li#qNs~ZIX11w17ub? zL_$u>7{8zsn(ar_8z%b5^W9p3^z{88_idYu35CCMnY*!8CA~D4ev4Q)oaVFPoYu6@fY^~YHXBCKzAtFpJ zLhAhV=;U|o-m6WhIHvawLQ<%(WBPVFSDL-|*>U<`wMCccMo@fK*Nzc3^=ckfem;ZA z5UWf~%bXE*6+lT!AVON8a6YR+YfYQt(_{dy8P5d8*x%yBt;F|o7Cgf`1&alUVS1(*bQ1t`HjEu?MBQ(`0c{#tA?TX zmn-jMJmr+Db-ITTHsqmDBPkRO8G5Ir(jGtZw$Wqd=$H{_gE0eG%4bC`R)#X8IRW0; zG6X3u7}o_@2LwO({7CEN0BwC$XGvs$cq;#;ZyQxQ6xvVpptbl)i6jSp5&U7g zQg2EWi3A6eMy6-1kF@-FwT!3rwr9M1vs{PZ_F=C%>462=CjYJ)L7SNA#DeCM_U0<1 ziTfadz7ed8**C*hZNCVNK+F7LUH^2V+Nt3*W%3kg0Y&0*v@Uu#ooG%_eZ)B?%;O7K*?AMu-SM4Xta5jt(AJ!oJZEb|yY&_}IB;SeN>C z!RXy=^7_OHUx>6qFa8xxnH~JY!ggj9{L~(85>GA2a=#Ub&R+Xa%Xpb5$XgW%#@y~g zhK%Q}cGFN98(r;n;kd=a#e0NM`2ZTMKdY-bpQ!!3AnbQ?YmCrOWQ7@z?ECmWyqMaI`%k~w z`}cJdE`s6;|K4=>eBX6Fo-4F?{O4mr!B*ecROKm41ou~`=XD?T3DOU!dp^hxidMYb z#n!8>9?=P?iPOzx>L^88I;efaF3c^o)FYeD97*oH>DGcjFqbALkZ4n zM=rwO14)|pQNG5s@qCAfXkor^>q#$!q!(rr*yM{M6vpU%Gl3sAZ5($y>y%8lyUzosU%7%}AYv6gKy8j@IzaQx=Es%2Mt5b;t0={Je`Fv?y@9XK5*>(U$?rRSqeTVy~1rY{iU;;>m2dEu1#x3<_NKao{-vt=@2eMyV<<+$?v?%&#U$buuwSd zb?`>Q)9s@P`PlKgAC(y+-t_}*sIwa|L0yht>KR+KEM70Bf#d2Mg_!%z36jYL*4O)g)d&Tb!9Z!_Pxv2hXwS zHLcZV{BvI$I1;>KYc=ASY`EhcWs+?|_zjA6mc?|~WIKQ5IG)M!lv!MG&76s6N^@gy zX|n=834^Ymxn8E5;boKwJZ4fTg)??jaTF3IJmyQm0s^V=>^aR~w6$48y!ss6## zKy%J?_Q4xFWFPJfk0058n^kcc?FnzLaHrRodUEQ)7f)X5JUb?Z2IEhv&Zyj`GsFBV zp$j=3u{a3tEO4C~dXU$q|9>xOCKaVGtC*K3kSRLIuhQ4;GG%zLi-P&^CPk(>KUss? z8wrAz=5dw=5UP?b>Jbj{JzQRiuv#;~9nu%@b~&{clWVsVt@@u}=SnTxR2G4SAOmo} zDkAQx_pEA@5L+G4pdL{OvLsuGPW;f zz|}&6P4jX&8r*NP7;j+Y9~CF6wIs&A=<5pfvrHT|REJ2j#>SBWxT_2HNNhCCVt?b! zxT&30$~9~rV!%(O#+H?%h@_urw#_l<3=E>`V&3{Lb_VD8dLR;{xQL%KGZ z>Ljl(u6^)M^ZB?_$+!>p@5fbfZb!IO7LB6)mLo^~&}V_PY}#C0x340rNyxGSgyKRM z!l2J`_r@>8)^^t1I10Xk;AQ*e@CW!*(WU35l3`!dfGGSvQ-$dC;Bm9P8|!Uq85O(; z73&ib&p9ykyaBL!`C3-DNdZ97%&$SV7cl2`sRVc2;;?4Tr>?q&g*u|E(~(NofO_hh z4AHdFpCu%irHAa~A3e4!z4I2^_QL`@s`!PI1Fl@)ubYvOOcU);*P(k06=LF&*7kxB zl-IdS=PV$b>bJO>baABEpLF26ciHx~%PMru`7O)ejLkPx!0z-*4cd;JL? z_r!)#Q^inPfad?=S;cdSyh6hz+RysX7G!(D#Zf5>rezlxRGeM}o}qEKso8>?SZds3 zyqM|k+v$f>LRku4QOW3)fb`>5HBh5ipoHSegzt^i%poJ6t^mzcUlvJ~LIh%SrCd+A z%M$TOg`(7Ix!=6ubcWaxu}6Qs~(7Vq=f2d<|$El&EfwrR7s!&G!( zZC0~WqpoW!ne(dmDQ7}jw=;eF$s8PHeBeua0`3e=BRO9MflMQ4zY<|RgnDdNYr}?gQ zOs#HcTrVBhLGj}%9govF_V!~9YMRfDch{4THX+M1?o`v*%$`-}(;6w%vi46`P3-_} z6z>|O7uC&pV#2>3O1bc>$lyQh2|97$qP2PV9ZUVc7H%m=ALz@aAk#$2*xp~ zWV^LJydJNUzpl(+`m_9=-G)^)@ES3Qz%!z-@3W1-dK^afCggj@DNNhT z-xuq@2iJXD`7>E$UtPMsHyi7hm4jl#D_}%!pCV}f$4w|;hV}B<)@|}SGien5y$*{fY1!nKuWWXIJ=njP`v|O+fxJ)5M{gNDW z)b>Um&S=Q;-p)qW7N1VsLxstC+*_lXc2srBiiYjulU-&e>Mubed7{~!J=xzDgV8VRx+LfQzia;8 z4R_?shuwM^U#_<)Hz#eiHM4%ZQ$5f4{R-M(GWwhkN7yI{-w!~qC9l($e$KVD&cS~r ze|njKMOT6he&O6Li((TUxC07hZcXQ@i)o=il5x_BnQl9onmv(OBk|;Z3RGq?F3MO$ zZ;0xDtnyaX9a1_=s)y?Z^67Yw#Tm6r-;>cuY?usp-Yj=Vsvr7Ne^zY8X5;o2<-1WC z4T_lVbI7xfEj}z*U)1_+Bpy@~$?JKJ2n)7$;|H^*Bl#nY!)dR4jkWcryIx1-h$B|~ z4A--LQ&w@)fO*1jtYy~px{xO8y}#XTLWe+yho)v_i(%CrNNtVFRtnK~IaFqGz$0Xz zi2%Nssbao|8a7>qo;Q)(N$;B;Xm&0*S4tBDzivY;emLpl9L{8S?1^!-5`C8VvM*;D z8Zm-7eWy_HR4;0+3v+E&xiB=H>$u3a9A=a|feweW*I()Vm+#u0w%(gd!6(#0-;wv2 zZx>RR7l5oS2;iWD)G-IBhWOV1KusoQy46f^QT&pT9RUMi4W4OeRgVWfZ-FRD1GJo}p)EaL7ao$=~;oOlC`mG{%)_Ghr< zdvkLRz z8{OqNp(D)6h|BtsaIWkon^6mU;M1G-g!XF;3*l4dafk6=*j<97`8HCT_l|}DTAglS zTlPOZU2#*-sqT6X&8!q*s1zoLfNSA!U&+}MU<;sR0`TgP?D5u)qry%j`q zt3#KUzk~gEl247C2 zamZEOZ#HQSE+awr1&qaIc$8IVAX~9`u$ExoH!*P5pOEzCR5Xx3hW7&fs1(G?rMUDX zULxCFo4VZROuuYnrlgg)rY!({l^hbN%~h!lnG|0&lii~c4sM?gRu|m)7(6>e9Lgb1 z?-jrhS92*!rb?syTCDx$FudGAU2z0r7(+GuCQ4n#9DRBt8^kJkBwcWe1B`5Izebh+ z+*P+Kf?B-T9#Buf&fiU$vQAt+Xyok9ZOi1%)~;$E>@E{Bb46*8^DLJs}p`}_mt@? z|6WR|g1B_lZu4rKmc{0o@f$k=& zcrWgTt)C zOu1hQqmxZJa`VSYv?IJ+sR}i;X!Q2q(9OL32fH1-RD#au=Q z#HM{~wmy*C&@L3n+-CRP?Ey#BC5-zzl?8Jr=uDWM{ji6+JxK z+QAtu6D9>ry;VzddAmwaE~)MWrS>ZoNdo!VqkV`N+3T=MU}PNAZ9^8L5i#XT1G+T- zsX;EJzq1bHn`rj}f~w>J5O7*DD#F$up?VL=V(PYmezhur%@LJt^Xaig%D>5i?)=l_ z!AG!z;$&=H{6`IWh=zA9!-e-JjDr6Pv@KG%2wx_i;&ghR{THTKeHU*xO&YgS1|(`! zAQrMHImLx8(lk^7eG`H=zL6!0++k1 z?9tzNX9$CX>@qJQXFOl_q?vY-z2hGW7~PVW8rr)8JwyP`h23iUKAdpxXbxucP|AD)NtaSI$SV`6!Zw-FMZ-t1Z}d+0CMa z3oJ>Q1_dsaJlCy^b`n5E$!h%7IlDoqf9gaH;HJouGgozkT?Cc+5tU^DH1H}nz=+=w ze|nvTvfJg#-QlKDH`R8gI;?vW4ti^=-zm`;%grx_>bJd~F9!s^D|bxs+`wAM_HAH>maCtX@?*W4j^>BMVm1Ij zt)(Yv&S|X4RbzVh<`G>1;^CF{{=SXJ3o*{!X0Tc+(^iDj2xT0L-JhXJr*27L^4pn<$-!eZ~mINPsU0`NJl%O#_O(HYvJ|ZpGvWNo zF)J)y9LhO6-_Y277VHx!RKaCLscq;c^kP}(fflxiC-=4NbZ;`jz>vs>%i_N_-^N)U z9U2h0Wmpbhp5e7)NF@ZHM4#NX+=q>GMjX4CvefGzt}$FKCNS_R=TVK4RYM+c>|dg! z$2b0i_T4KuRuBKE2fd&uEJCE`d&%YqKr#9T=v-mz2&hG#IF?c6HZ*OXi=L|H+(K6f z6JDfF253zL=&JREclF_c7q7SwcT5}Q0nx51=RXZxv$LEj8$tQLNc~0+yBS&n@$Q=K z>6EQ>*@q|V+QCZcNZ^%pfD0d?D^&Wp{rrcS9JsqSseDFwOXAx;deT}*xUR*G{V1aP zGT%JvNg>)C!$Q~2qK_T!#HH*=r3O4f5zUQIph|eO2xr*Ye&U5(=Z1}?R$}Fp#e&r+ zr)V@oTOCfT@Yf~O7w!W%|W-SGoiqtiOrP==^@R1$8;>1n%NLm{<2`RI0c7p#?3Gg-M(h?Q2k%gT74DzFOF^7KtxeIL9b zZ=CYx*G$*C4vr`m_ADK_f;R>bSvf9|jb&tzkEJpdVmPe?s^nd6i+-}z)dyXfqC?5+ zV~V>w39XmbSVmAqCqy4$uussmF6n8(++a=Ee%*!ooSx}F+&h7mvh+r<_>4A<=h>det6MIQcN zGwaOsa0JUtJ#26&y&w25Rk|ub6bd_#41QJcu7N(d=TLP67BW#i%js46zre=cKLGz3 zk&G-T(xxPf+=+nqCZHe2}| zX)$@Czf7{KOj|ACL9rrSdHyEl13c6<8R-w38*8g5?cDwoX5Z|~ml+a;;|*-MW*4CZ zqv9R0J6Twa+~E=BKKP}uX)cq?FnbTDvlf3O=1_TjrE;o8Gt`OrjD>UVk)6g>njz26 z8yNMy#9t?BzQ7)c1!C3K@1*wNUn&;vrifar!o(MM9qpglcjglJustt;7s_)XrN+?{ z$)^i4K=K8v(H9Kq(W%t_e8~H1RvxC9H?zLhB(o)v_+ zhR5|2q}o{Ozs9W{_Z)e+9xbKi-ZkXH|O#4aEO4zai+Syup}k7;?+<7MB+vl zGmes7chgzyD`&z;exxH6EXB*IT;j5SXzQ(?*w^Do)=)$?cAyyu&2gOf)e%Ux>SqJw zE_zoUrk>}!eBl6h=O93E$kDz8dv4=3d^^|w0zVg%{4hcZGts?bED#rf$&B3#!xob3G`SR_qmJ`zfY0O?LZIP#L&_3H^i zJhjSGbs9XB4L+PY>X&x9@1#aHp4)rlDdzHX{MN~aK134&Lr=Jw$~9?H`=mjq$xtOA zZ#*VN^GjSL_d1lzIn$zBZe+l&tJV4Z1F)?4=^(`$qq{qLo~Itnc*?e+nd;fo<8Qc@ z7sQ}#B(vg+w4hyTKTT8y*F|+xg)gO@mrQf#s*aH|p@rtqoK>8Bgx4R|ou2KXv%Kjg zB3{}MQa9-NWrjR`YR~TZSY!g~_mz@5!9dqn6|+&D<2_=NIbmCliwLYZ2H-CuI-9BV zvn``UqT92UPgP1Jv^)n2OrFRG%C|9z+Ankk)NXPZ{DEe#%ehu+IaAN%k1oGUZ}}=l zEcAh1eFc3NWSG8+L$0Gn0MNz9dj(B1(rNp=^E-9!Yp*vl)lcG@&AWw7{zb=7_jLlG zkl7h%)sz;d#0A=|kzq}}0y! z%J+J2?G=HFv`HAAM9T@IQ2LB|#}w${&kMWx+J7+_)H6bd4n#Qb zO-TdY9Ej+4n_YKxDcdQ`f4p>b{|mh;5Cagb)s|r(2p`G1J9N1Ii*qv1j?)q<6o!M| z`8PyyUlVy@f^1X1o=2tDXB z$#g{!vd>TV0f9_96m2=^gSWoi{bZ#U*quMZe`t#qOyp*{Rwpk|H%)Cu5C_rRmbd6Z z)C}@-?167SlsENjpYHNMUnu(ex%<)X0I&oWj$SNNIJDnJ+n6__1ruDc-gjG24>=t; z#s}{ve-l^&((M57%IV>IC6V(4;wkzZkW0N*iX~_Cr%XAk*YyPe{0|Rm@uRjwKQU#U zm`)d(0(*7F;LnGR?%BY|6dSYoa%X!ao|nu30Cd9DE#(Bqw!(a* zN=Wn54Kw0fCyQPgyLyibg?kbRwLSvmC9IvB^Y~;>gNSGpQjX{&>2F>uY#9tGqIl3V*jQT_WV^w~RmARYx7$Zc!`P}k$%|_hx zLO)FnBz#NHw19fW9Kc&>Yj_I?s%nU{)!o(;2*dP0$;YHB=0vGD}*~s+mq@HTCOoRqm2w)#~(627-m&i zBf{$R-nBS!J=u7YGXf!hIMo%&>AhR$)=#GUskmwfM(}QbDhQ^*mh!W}C(DBKKhE zhdC^WoycmGHr0=tnX;j!DU-$(d*or=9r>_X%PLrP;;Nls#5|`xZE5su)#p;_|!4zQg{eFhHI*>kvbOf^S)}=|AxMag3esk9&KXR9a`Zcje>$_X}T7o3WloVquPMC7H zv6wA65!0T)*m}?ghvc35%2L?mIrJx_ea$X8eDI7b)K$WPb><;)_@0DbL+Ol$C&de* zI|_8n;yD$!yrpm@e*&pKl8&;Ig{92YWi%%K_=Ra}GmoBkocyMLN5tx2?JU&7Ufdk;9Q$ z*=YdtT@W+a3LdewB8s=lA@z}Y1L1&Xin=uibl&p!B-%VcN(rz?7G1g^LGvWgJ1>6v z*fQ&rl#ub8^0hA`fKZ)N;USAx%RK#gd&8kisOhBvaMGNZSs{Xk+D(ALwgg&{LXbel z#)~R=7~O%>6WbjCL4yyz2Jr`xmm2KvJp4@VZI(0c(yxuNOOD~iz*5p!#C;&$oXw%cJ3uua> z-c~)UD^Q(6#9MG3cpHd2=^nB17t5MqNB=IThJwpkqq?Y7YPhjFZ2=w=2#}9 zrRz%`H)Wk8>a2GqiW*YgX2`$|Hp840D&MVzB%3Vo-<*#(52C9MyP0rQQ2nJ!bt^i( z!J5yCWa)kHHRo3v{E=0+xJQ$x--Ok6@}LMfP#8?h-+{x*N0X~Fe`$CQ7zGtxYv0=*PsDs(@M_oYb)3O3mV zt9tP)B@jCqSdRKLtODesD8o$}^y;^0>1@5WICe#lEpQaNgvlRp#wx$wfjci2yD@Ao zsxP6u*A(LhVCuq}_hfN{oe$R-Pmi!!yds*a{IILlAl7zwl*9+B!lfFF@wT|k9opl zao2g;Ag(xfvHrRf0Xm-K-5V-LT~){bH}ble^Lfx2+9~QbDPd3v%2P*$lN5?yIl-0B zk+KMXe7+bnT#++;BV%oTmm(xLS-pP3C$C7MVH04TBx~+ zw-JGLa&j&A9jD@QFBRkpoAFB0R@%Zh4+ZrOZBdOKwzLwp z%ZSd9kEHltM*iO!^@I0kd-eutBSpU$Nln>47NR-f`y*zO4&;lE@qgS!y$R;nSz_YV z2FV%05I_Bk%DIx^DY;iSvOo0fU`%Mr1E(K#Tmk4Xdd>`Sn_Q#23TLAO6+Qw)J` zLGYKvS$r0h4EcSXZZg7jjphubeXgpzY+-s9L{L4$xHOuuxESo4l;DAqQiA8~vNTdQ^i8ttY!JzEOLSC?!sMr5)_9)&ax?IFZ(L=OKd#oVb(<&G09N`yEY2yGdc1y-Wx_Cokj0h1?h(c#;f{dcrzCU@v?&T|na~EW^!4xeNqW6mp;;E7|!MNv_`%no+brtE>=y zfa!@FI=p6pY*|A#R@q*9_>L5NDt>+Z!}URXN{Aj>83){+WsJ$R!vH4rS6+vm zUpMz#RED)Zf3~sf;D$R2#+ez=uB>u(5_p8n%TEKHagFXIdH7z&kvo_;hdIOHRXS*O zFhyqkcH?2^X?+#!*;jb7xWvrz9{aLere?%~Lb_)SA}OjjAtrf;=|jIYy$vYG7JIl^+Z)0j|m^KlXL?>J7f zHvJsmQ<3d);Jlde z$~Y41$dN>c#5k&LDbcaOMPAT%Y%x*JL*07>eP0VG{ik01WxI)!&QrP*ZKIyEh2)(Mq|tG%$sIP>toL6|#GZ z$%!pvU+k=_ocRbOE+&(ST1eUHv3)nWIpg}pq`%ybuUdJ*cP3F*9fP@+W2936k%h z#e3ffKSaS}^hGCn#_1%X__sqWijY5Vgxee{WEPp&oOlpEAU?jo!||IYe)2Q3-fo(~ z(*$kS{4n1_&yIOa>9!UIDY)(Xc(5oZ&^Jy4aLo9x-;c2VH|?2`h};QzpLCe&9sXeP zxT;pqpL4@myiDOFln(1TrKM~NeO;uv6iGDFx}0^%64|Vdhu)kO?ix)agI0F@_#*N6 z9TS4*#q0^2`Txh}{mz11_?zM;c@5|7nNxz67TS!ECyF3a$rrmT7=+gpAi2I5{-}{l zSbgYDs9!slw(Xk^6HljVa4Fat#y9|D_r6v*<~F+Q-Wz0Ke9K@PqV|ZAWs$Y9w_t_y zqIN)&3F1a$qL0!$Zr_MxY3Gc!2Aims>hl;@2D(#%{7rQU{OkK^DD}6V(em-?AQV}N z%uD_0B*E%`?uV5r!G?)D0g9e6$9B{svMap_!=QA(P1{`ZvTQ;dhum?V3JiC$T2ha~ zYq-=M?wIdG2N0V0WY~qO%2D$kV=)d3WH}`L{VeK;>5n^>LwH><638flp%-I^_XpGwCUC?2Oh&})bN6r8Y0)04I_rsw zrrR3g1tXY^h+N2lL#q8itEA)G;x6``JaUxZhk;f>Kr``w!0QrBGR;C}Qp!*OSc_!) zVn=KzN{Az65pi_NO@%kAklERWRq;rvZ7JK5Ydmzjx$-+nNea(#9})Ot(cY2OR3z~+ z`B{4V&v=$n{9Ax^>W+-hg_YX1>A#f4w&Ae~2{45P#DB2vLiYq@{Oo=2xL3G+1l(a) zX|eq;t9OJzEpO+dReVe&={<=3oI1G3T#2&AcF{Q5VE#lO7>Ke;asrh2(6_>{9-O)+ ziE11Sxas!gsKD(zTH8Ky^H5OY7}%FpkKa%*aC+B;Db#qXn->s#ym8T zVt3B*C|td}qED62J^g7piDqZ|YU0b>JapBOTAp%!7xQC(*sccRYu{u<04G2y2#hTP z+7xixSAkkc?XmLl=BP!``qq<{Wf8)S&vS#u-*8Eq^<7FAR*AoeA%G&1H;CcL+d+*E zqXrHAGyUD!2R6_4>YNbgdVC|me<++AWe9$_rb|?m_RLVW3UxSIeJ9+~jeVpDxevnpOT%-Aj|v4oVU$Y{ZC>EtVr17H44gqne)4%U zqF6}bm&^v|6J&zFb)(JM<$>MGyvRU@0HcFwQ)EZ41w^lAY8Mkc$?`Ny#T-pTQl%!& zfOo5{mHcCQo06}96mSAcOUD*bJdTe&=Kg2HCuizoCSnDV5i|B%16C+qXw&YnIum7p`#htX?R|Da;Za!(BO{6G-zug;R$8$x?CeXVFSYh9>)!WA^{m zqEU>*`ekm3s>&mvy8Nke!FV=oCVp<*@#aHnT%Lj-I>vZ;?9esa+NB*;4wYeM^h_hk zo8L~)+_1Dt5;zT@#d5Rry}AWxwvLLW6ZN-*8^70f(ysST=pU9bWtq(Zv~9dB(=3>n zhtE|sm7}tOdIMqd7_cRKmkY_6ISbwKi%8};0+?1fy-je4>!liSO3!3hFpps20-$v; zg;fUydk-kC(o8eIikH{P8~^qG)YJc;2K|vhOI&O~kq(IJ`~O(^Pq+p5I+1%P_cGz( z2=$k9fYdT0dg8lwqJ(@aDRDKwCcg8WeWyyx zs2WkzAwEtF4bdP0no5bSf$iJ0ubuNj4;GhHM|q>Yo>AD~FT8f--?UenEOFG%^X4Ek zeUtciLF5+F$G2eoD8#~pI$U9y(4lxdHtN(vpNDA*uSC9pJXlLII2nes$YI~?Hlq7`JSg{d$+?Dg& zp59`2KxKdg*ooU0(Tnz|F5S1O@~7QvL;+f?m``fXCTU_}V8DPJ8)61e@`d1jl_!>e z!1mNLGIHdr==tNhZueKkY_W&+BC_&dMQ%rxkytl6jcz_$N_)G8m}lYui{dX)%HA6X z_Mab-;}G;Nm)2Y#%B(u4cjqj8`d;9}z=&cDwHt0ja@Hy2= z={|v&q(H5S{}ax*iLmtw8>_W9&MoJm!i4Ivri-~d`iUTb&l5@8#*aGo$`3rj4;FxK z(>-2?W$8ESt%OR8W-rgm`u@k-6O!S6DHp zFj&KS`-G_A=IOlXlv=(75&L1b3MYdGM&QzqA}sv4`%1@lO2ari*lb8V%Y!%4vlyze zle*(mP8UaihCY}<`derflkh=< zjKU#ycZiu^C=D>~tu1C%8};1I^n$D8ifSNjE6O&w+r@LiJDqTRI5K2DTotWren%75{3?%oUMQmxg)i|AjKFzQIK0$V-U>fn(hdg`gf>@VWixcyv7 zrl(CSim6sxOAa}sCTPk6MC&uy5IqZ`HEr`D`qhUi&UVzpR^`*2zwP+K3XR^Tr?UZ* zoY$*`y4547X6``tq?sjYnz*Insg7&NZ@ye8?3B6y#hz?GapVk=(|+KUQSU;?304>d z&)69_A)kp48ByU?NM$W!^&x6oGIEt^LC);!a}UF@6{^sCql?2vi2zU9Fhko{;~W{L zH&|VJOfLpkn6^6ZU$!MBCO)F*ll_6=kBkF=g|sA+|zTyH2YMt z__Z5OhfTmFRSGUb5}KX}4~=Ess046yg1{5+r7`EYwis8cvWGKSc`1_;5H95jSk|%A z7m(ieINYs(ps$ZuA_Ban$MSJyKJjgSfI}waW#q5`k{UW8==!o87AC8cO4Oees=@4q zFT8G`7J<2uc2g_-11_6sgnzIR?6>Z^`agTeho4(+W8&T>kQjEOSh|eYF@#=GgN8D} zs;WX$t);52z|h>tUEV%5Xst2-pFS^o;M~#4_Auy*W40JizV^MBd(zQFVU&k{fE`;Q z7uh0_85M?dykBp&KF`L|3%>D^uKc7FI2a}uWAouFGV^BEQbkM#|02lu3kvxHJt}mu zW;Gq9xM=K>lAq!7!X-+-qq-B%PGCR9GE&b1gKaviH z1#%|d=QVrsjvJ=nqA$bw>#uRdP@QKT+^E&@&QaR168M2v6p$v{Oo5o(h@g%mL+l?b zh;%BYs)AOW8X=u0H()$e(vD-CW1ls{dJ$Id5SC54-d{#NTcQJhGTeG1JO z$YESXIM?SF+QFBbB)zmB)}7fT8~_2Zpz2U1l)}i3dVw*Gw--X;XmflG=-Vt6DM8bu zYBMIXKdz1>9Dm2(UPhf&;IzONhv1xlRW*1+MkLqJ7{*I6rnTtPHFy50?QOjA-3jkD zGO-vy-uuLlCt3uwA914;9wWWfm(tc%5I}e9>Zk4GiFIb#U4kJ>6;d?PKq|jAuay0IF_?VW1EBt*K4iv*a|(r zw`{|G)CeFPbI+NH5Y-*_=(H#T=ZtJ8YK&w3eI~8Ov4;|@y+wNl(W2L)&VoF93TwUD z3Ofw8yR_9tTJL!kxTAbVN1T*(zI6;oC|a_aw4hR(vVQ69Y7Sl z}HPG3) z!KiT(JXd9`FGW+YE)82{tu4(Rr+~miz_UT%p8iLy0oL}DdD8dckkvhD_|oy?RdLesR@~}}#e7irQH+fJ0!X8T zq8b~~)R5jm2e8;aEwy4I4XCx*ymryw=^E+TOJ*2+6+tx6wV=`wEC&2HZXeZlf3UQF zwS}t{7chZec0ElG?~&v-CFD43j)0#^hC|}{@{~ZRk627(h=ioLNVCXlb9^7>pHS9i z)Xh`VkR$3+9fl&!2xsS#W`x2~_w|Vt{s){l+AL1f$zu}2)VY{gjF-I1;m`6?3@*uA zlcD>pQUC4l<;1m9QE+dGT@z4a&>Lt$i)ZJe^w~tP-cV=H6r%C8U+&lQMPhg-xm&Sz zLJ^7N37?tNdph9ourdR5`x1>-DrdY}hF5p^7b)7;9NM9wUMt`MOjNJ5T5XSe)2m0x8He#J?ej%3e)r;*5 z#R~&Xv(-4-?bb6akl%>&?qN=r<1`ya=@?fh2O{3b+^Y(-ekANDZ6X~>W->09NA%+2 zvzoy=QU2~Ua&EkH>gsVnN%#?p_`gQa#-)7`(62vg!&4T@stU+jUv9zg)=8T&uhEmb zM32b2>*$sf33qGJ9URUQm}(dpO;M}Gn+(W(SpTs&X+!OT*`aAoW0#phPuEC66>$pQ z%`DspbfZUWMuphN$&JrW$fdVO=oztvPO6GD{kWo(p6&f*V|WPgPLcz^?J_)S$;}B{ zymY6)sqj_UuZE3atVCjVZr4CfI0fUu+`xm=1hBeJ@`CU@Fko3piZmK|?;PWhAu$ut z8PxZx&=dPjmDdngb3Z=>eT>;e`s#L-lJhAejGobUtZBPQu)j+}gJ=AF;ozDih6Gw_ zw*o}7Y9Ra2&2u?@cULGLI<48Yhc27N>^0<8Ih1S!eQ*kZzUI-ZD^`+A-B^~g($;RF z^R3*CEA3>lS=7jnMjYCj#)%CJ4`Suxfdkn^QW^80+UYO`m~X#y^zpDqV)D1U2&L|7 zWxHoJ*?L3b`^xH-T@kVo?|w^#6PM}4e1DHtnvwm+e@5SGrBHD5R7VnB`m6jbm-Y-- z@9z8miseNyOMnC|-gWn%3&sEaBacJbn=G1^c>2eRQC=p`P@SdJVL<1 z*#oBYY^O$8O=6-y-ib`F9&;-96~I+xUPp6WUz%&^r@5NV}zS6A&RDYzNs` z`VrdK!7kbtxp$~1#c%Y8rujAnIOT+=oLGJ|fTN3NAhRhoxX7W>#I40grGz?eU5r-0 z{bj(JAEUQ@H*7OCQ6a#YC}vb75a+#|ZynU|mE~h)fHpQ38NX=S9X!+3-$lSKs0y5S+K@RIM&by+mC`c?-9NG1nBru}F4k=xr^{_^6X6Al2BK$F_i z9pT{{6(;Xz&R9-j{l~vSUm|6U>gI#m-u;cjRm-nSuO{NDCmpvO5RicG;xv`6-7{QV zG&~CP1j2V(1Sb$GqNB-n1&8|<@f_^ET1SDJ_6Y0LWNJ3X1juZ3lNH$n0zgGQuWgC) zzeIY%S-Pb)sF*f_kW+vC1L00BVddaa?qo(iXWg)TpCME1VOr)O?}kzQi*Sc6xvgAf z*eFeJA%_{Y=~^zuAP_2EGT#a*0ABQM)-DQXix9z4<+D}y71#KNW}>o-jgvLfHlm{W@FyF^NBHbJAk?O+A?qcYd_ zA^@}^oVt&NKMah0H5ohib^8^=+yV8*fRM4iiNnIF2FmS*hHYBW>P!Arm!!cZI~WE(!~S*~quZ z#Uo-9a-WK{rP}L$Y|cJ)>YDtNJM_jJ)C9fmZQ1S$g~A0${@uKjq#rPSYQJ0)l2hNA zq(DyLw0<{TuJ}R5tf&IvbT9$xI&dZHN|s&)Qmfbv<@qJn3-^f5++pfF+5AF=>_M(8 z*Ksf~WMiBi0000000BXtdPNaD-_-Nx$f#{wdFzg?7FX*duN+3R$!b`xU6^Pas zLD|=RRfE8~MoV*X_vy(qz@8VX=1(3ioLw0%^3IAWsPdWp!7AO(8^dz+Gp|D1B7gXu z^FgH&?u%+I@R5+GE(70i!u|<&r*^)0wa(3eYuWT&4k9i!CRcU3I&I=pCRWZl&3PE0 zEn?v;0@K`HFZs07`%XH!m7$R;b38`7`PlU5c;5CKIkG5$%-~z*;$Uqq_xP*?`3C_)WFhsA&jFSk*QnG7? z9-20uNfCRh)FO^T^v^l2w2Z!V9=@#4%9<_L4@Dey)nzVEcUHKo=-fKpe962hc((f? z1^$z~)^j%)i8IsD#UrJ}H+A4zj))hQs1R(u=eM@D~EJPt}ATdf3WBjIOlxkq42WwY$%rC`6u z4*jUbI8W$>c&wydIX4{k2@5R_2PO=O9fXYt5OTpO3-`$gBPxMhlyhgfjzAwp_d`W=>4vH|it?YWxLZzhj?C{1A%n>(Xjm?!|9Uy@JV zuT#VTsf4`-TBTU~IWCLJ>^);rJ9?@Ohu%VzFFjr zH5$0h;TrIwVUY+XgKsSPZoTOZuW_$gcSr+xYnX8xc%K8@u&cqK^+-`;t#zqg%xda7 znsEPDkzgM`q4fFKuPz%lBgGft_)KiIlY{`5vVb$|JQ}@)uO{!506)dkPL*?V^YcX* zvO4lvu@$7P*l$D~z9%syibs8B-0{tNq5%M1Yja7Pj8KvUjRRc!sdxEGrzfB~Fo}ST$lEf3$z! zcN4h#NdPc6^m330^uS9mkz3i8j3+h4=3#tk-n!Gj_BgZUSxm~qwhIyp8LMVsB4}V0n ze;nrmo16L)Bd|@p2s~X^6C2kFhSvkG6y3;ky2fD+=$y{n!&q3>pZxRc-gp>8(G5Ry^t=b`K zjE~%bCXeyzxC;A&f8dB00aIlGZ5Y)+I!MOb%@BH#wrm|tv<&75IkQSPlJ-@ z992Kcm|w%cVe3ABT+2iwUL11a_y|)e78cStOJP0pEZ4-w^U@saoK6L_t^C1-vQMV! zBkwzAkULVat33wx?|Zy2z}#{bY&>a9i*4g}F>_|>IzTT=1yp5fk%}UlJ?oKL?44|E zUe?1NC!+1jCaZE~sIpKPmx8BH0w%uU?iO)(f4o60moYDE6FTV1;Hk)2+jQq6F_Jf1 z0W34eocV%7CO?I^D|dt&gQ^e&3!!MeG?l_=K-L3AL+$f96QZxpgx@+=Yf9(0)#c4! zADW)6lHFNk^-9GcKAx-v7m>0^moqh&T>H*>@)B3CsSUyn@VkK)2^JB3sYM$I;&>;zN^6Oy6p(Z zH~^6GKq~9{#pfycTWX9W$Ywem)1aOl#pcb0g~0q@S2Kh|IV+ewSM69izK(g;V4|Q~ zm2-5Tw811DUTCc6VQ-$T?*gCcA~*!Z%M}QY9qYVxol>Fk?Lyq1WRBF?21nG0Sp&B;&>&C_ zjA$@JQHyOmkb~xAcd_c^Dk?%B!;LddH8K3PXUn|fI}xVlB!Z5cf;rsqs|^Z7#W4by zP{sqlg!O^By-PbIkZ23ZL=N?M);BFs*onBvwANu3rGnn4e2!l=ZYyl8BImxg6CbQj zQ9TGE)jd?Kqi}(I%5D`z{2M;L9;xC!vjnFbM?Wv^YgkfxZw9!k`mF~R7>`Nu#Yvvr zY?CLiyDU6Sn_=`Yzg{Qcnx5GZVa!iAJ*X^MRh~ogvAPVTkPqldJmv=A8VP}XQp#;( zTlp|o#=&%5QGT^u{AJKO=@{)-#Uxxe!;53H*=wX>W0mQX9QyVg|ISL$7W+B~0LOroU7 z8L~t~oZSq%WizDrRf=+%?aR@tGh~nlZO_)O1i@3&2d?nQFhi(63nlj#X79+&Ell7# zqZFHG`AoT0n_TyEZ)l2ElnGqlHbChZyRzvBI88n}%iP9)d%S z;E&o4CRzt6!kRBNVTX=F^#TD32!cDh!+yU1^l^f+yB+ugZ!)*Jq0~drW?&bBPQf7* zJeU9%%B@Wn?>_!AU*Hc^pHW7@sG)d> z|1SDAZ~hRH%r}(@qx44|4wdP*0eCKAYK2K$BaTy4M z4>}P0H{v|E;0-axqOZ`@l)|id6pS;!&!D9Pj_S1MV+EeqfzlfRJDi37q=T!zR|n=gBr9QL*aJX z-GhJO$*1^!@mV+aZtn(COv&>v8HzfJcb+JvQZ?dh!t-hHVJWEA*!v&alo5S&5AGH$ zdOGpY3 z+lp+rH9eqhl+Ax_A-1_Rl5Rr1ord>*{U#A8T%8C^%6Ygbm8}pCFM_sBJ<6?z&WmlG z9Ux-0C?enA1tWPKho;rZ);+=#gnc7HZI_c&7t|JAe%z~g4eGn>&{l@<@Y37#!+u!5 zdmO*mS?gf~o}XqfCgJNXn-f-c{)!(p@m=w_1!(=~%;Q5hT4?J5)lx;`$K}_6^p#;P zS<1QWz*G^cA2l!S_hL7`K-q!Z+nNK>`XrANra=AxoY z{|A&c+z!jmdU#Y?Df(8_r>RCn|5O<=f$5j1)_HUqG~4h}oF>T-xSft*v7{Xb zvMm;q@Fze-Zt;b6vVax!w>$M^^}$7J;@_i^9E3%$P>HnU6EE(0P?aNWS|OerK-Rl2 zs{~DL6GwE#L8Fdr7#0ld8viLnTe7RMZqJ1!Tq2{S_9U{_xmo@Ic@z#K% z+X46HJzvs7SMgb6!sibWAL3{~nzdw>E~U2izwvn6@cDNEa-XLLEdC1ysil8lm0@(FDBjWUia}SL_6dDsJpWMO4gb6WIsL%aUqDu0Q~dFm zQ)R<&0mrqj^|nBq>n7i%)l5a^MuS|D6ij(m$7tD;*|fxSgW{7{=6^aO1bm2Ya$G5? zx~yy+eVtvj_ZHuCkI;=+E?<#}`FFsN2>r*k?E(_WWUck=S(&lWL;#*!VPgMD#WD@P zl7nP4zIxlexEg**I<18B{4Ye#N_!WXGQ+IsIx0mIkDLNDcy9e!sco2T*+T)lEmE|j58-6%#w*NYteg{G zxv-v4f?Ld6b>si9fSw%oBimY3Jw^Zck6pw?DWw@L{*7~yS8b7GpV1yG_}h7fW8c=e zF7K)cm&B1F&u*1*n)nM#zFV#_G6!1MiLH<6s}vDX5!AqSZ#4bizSClQps6qWFrp5wmeXqU(2V!FF*`=IdXYkLWpX0>}6 zF5wLwo zBe>780ZWT|ji~^bcq2s#4BeRVNMF(`4w;=_B2%mHrn1f+ZT4|T2xNy85hnC2*u*IX zp%91LlXS>+aTS>JaE}3Bw%msQs^}{P{L7YnfNpUnGRlwfW?9Ip5JZFE63bID6XOw6 z+(~)T9XVEv2rDnzI~xwh)7ZDk3A!Ud7c@~3#=iuiU2rqv5BntL)RW3>HP|@eUtEdO zor(hUB6D?#V+?X0+O88-iOz19#<@Pr{rjc14&n>^@7wJWoIfR zi}^P_;j!s}IZ#gb%*u)?g`=(2cmuZ_ckh4B{fNu{^K^q6Wmc{$m&R9*4CJ^LYoIQB zRMB_thAj7aTYwAqsrb)%zkh}tt&iuyApzUt=ijfTRr5MyEo~+%@d_P+_i#xDo9NRJ%JxX-X=rHD$HtrLo z>U!0F`_a&itk%p8`nli>RCxgM-a;w{ne8wWWkh=}a03Jw7S)p#5VOP3=jY&7RT*`~ z7;SpbawQ$Q0g@Ch)^G0dET|QG_+8`R12v?CtDis^GkMlc9hv9>cuUtmMeSc#vl*tKDwo*bj<^Vn>>TBPG`CK7(- zE=z%{C7h-l4SH<{T=MP;Pb}Jg3We)*L>A4{u*7!8bWwfObWQTFbqwr#u=lzA1nzSV z^4wO4Ll&_MV!m^8dR{4`<27_;dp2D>!SL&R~Ifs6YztzKWN_$Gp`sQf^Z?;60qRSy3z(Is16bcH)*IU%bW zBf(iS%ZH+}k4-L-6T$J{h6w-f<2vkPkt1S+Bd-@n83nrGQI^j~#o8vr2#uB-rOh5vqbY~EB@W{q~ zp%^Wu-9_Bq7o5N^RWwKX>fV7Mj8>0&ZA)un0A7Q`N|K$!QVDXTb0a>>yD>zT#6DchJdc)O*qave8@4@&y1){j!h|Q zvnUDIgw(tLV-l9D%awIR|tLux`m+`eP+s0Hm_eah){lXvNu_$_v2lg@M~5|REov6bZY zGN^Pf(3@S!sGHN7vwE231jUR@HB`-}1YWh>Jr|KvVWM!Z(fs#Ua3}LSDbr&dytOK^ zUDmr2Qp zs6%6L=dHq@vw^C3G!om??{ZvPIf3di=0VCCU%cu#*JlbCe!_z~?0csZbiMt_?V?7P z*&Y$7+2oF^2QjT>@EhY}os%#Gkp%*-F~J-EuhkjL{_)ERS%wTo5q$_^JVS<<>BWwL zTV5z${gr6lDA_RNV&wA-6FL2zyLUIAX&@Hm)`{_2V_EfqO^r)W25JUkGk&S_$ORJ| zOzmH2#2*kYJ8Ixk2QV@*!#Zr}0MA$u=#4tV$}^cNL7ddq-!tICXt_}8iX&3Q^P1+r zI@JJiW4!sQNqg=X{AffoY=@u+#%&`9NVO=*uW?I0)1h-W(;$!YL|Bdt(X&_vHL(hS z5hpr`LV;%ACzEln4oN>={~XmRe_h)_-uUTj&t!Hg9zp_hUL_X)Y}KCrZ3$>$7iCh_ zm|j2({i}cHyxC`&2VEBB9`N42^->BQGy4;aJxGd_Ue$AP($`IXwZO9YYRfE|iYSfsw)>Q1?btABCCjmj~8I`IHuD z91SCaL%wtNBIf}nyb`l7XU9(%J+S*-*1{J+liz=}R~KGQv^XJ{{p%a9HJTk{nyIe8 zSZlMp&aQv&o{NQmHzoo)9vX^}B?fEKYom&cyr!b;#0>Jtah$6pA*YN7b6piW;B`{& zCAyuOk;!TmOCGou-*c3)itZgYryb(l=Jk5rc5H~gTX-URu|iL)%Ss*ka?4G_ zCoIqIC4&2E_#dAuOMWgh-1KkA{R(6$ERqE7Dz*pHbW(`Jk}TQzFwf+GB|$Y!xC7Pn zR%oV4yUt@x8SlwsSQ}TP(Ic6P-=%P3IRXyXVZ>uJ1}s87(K*n%FVgbrM@t`>`G$XB zDS33Mkq8K7kmHaVd&%3qJS$(!&6{Xa?~ za!oSeG|D84sjmO7mSPcraKMk#Jh|+>s)z22ZDU+zidQ8(cb1U_%Q{KQ zG6Clrkce$=3zJs&V%z90*@SoS4K0L0$nj;9?{5Om))Pfp`eSODo5(fly@m9VAeDEP z2{zGRjG)+bZR#^ap(1RyXkW-ZG-)o{ib8TL;ykIN?=g~sua}{{ktk>{Oa(?p!mf*Q z81j$pVo^)jVuqvHu;H_dHv>=J>hFg1T$tyc(Daro9z~>X#g<#x^b@;u&;6(Syx46dNHR9rNc=`xAkC zkIW31To4ePGzK3qR#t$>ByO(244;4&r?TR#sTQ<}CG?OGih_UjGjHJK-bQ1~V4v%c zyVI?LNV2(tL5MX`SOf;2iO9=w>Kd>-xxNt3w*VLjeB1bGfUR%>sN}RjjGmv>I*c$d zWMe!U00000009A?n?&E#^XK)yZYh+iyQaFd5;X$f)S_Gs!k%ONckNW!-#(s_Jz~7j zmPH+50pKz1!~-0U0kR~&WYMbj*HxX;WKAu4U*depkAL#s!Takq@MD&z7ctZ_^jzPc8BLLASH{5uLIc6QdX( z0CPoth`SvFVuqQ|`Tm4G2}Mewz}F6E-iW}6Ag4JOJ50wo^3Za=L7(6n?0g^_3q9(W zBtw#Dw!sHE>RgU9B9i+eLd33j7IA-^)mm(~oLYw`a1moITYhQls1v~xJE0-3K6}56 zcp^T(7r&$?qZl#B(lAGSc7c=+x~|o=8M$1nJbptjeNPj9>b^g7ke2i^pZTY%aDeHj z3lZy8$a@Ss-eKuXPBS3+=jBA|bme%tdTOs*X!DT65_8z`%~~py$V7dhfyEO`fyXNc z`!Hqi9Bg?iYS14yt2itn6q?pqqvNOb9g^D2E2BrIz^&u01rj{QoP>|N7`HsJB}J9f zDYs*AIQVGk7d5Dc!b7y@dkI;fiFQ&E|kAFn?>;Ih!HW~KoDj!=mMai3>y9n(7HiOo@TFajBQ@ofr{=N89%UF z3@v$TUM$pgs1Z74aDT2%3fFcWnmNy(%{?RwBy3+#3NoacPQWJTDXTZJ%DB@-wW4S+ zYDpL8uttFr$p*$Q{;pTvl64YTgQK20Cndpqh?HuEO}_Pz{t=)@*i|&Rk9+|x-+9DR zgm}O?KK9TQdo7u*B$fAHe}>#ey)tEG*pc)o)@R@NV;FX5ZET1a+XT98P=SbS-y91_ zI6s39BIkyB%{+Md?FHl}c#Gp9$wpbe!+}evrzahN-idHZa74z8woNpOt(c+)i`H+h z6=Bq%z~M2bIa()dIF{rT@$M+r(U^lxa|qZI4#mTX&q-OXQxkU7#!J9FNIr6-@Qk?) z0b1d_%UM*3P`dG9OkxI`fZ83jU6Tdl0T|+zICVajEPFbG~MHTYYLFLps2gg z$9Q}8*Q*m8-~f=9B3(d0Q|Q$e(r)QbWxS}@$7z}@XXAl0-5@ojA#hlCW76ijXo5~t zWXi^)(w6etiQnT@@FZTC*7g)hX5ojnZzkXcx(TN}3($s9hLlv`+8xRah!N&)2T>L1 zmngx$An!rQB^P?;pV)zfkcsC295wUG36k_!{um$020z zho8DgKcC&!8ZvS624x%%)%BFW9m`SB>u{P}UW79@EWj|sYjtms{_c-6KBiJ;qTBC5 zyBRrsDW<&z^A%K*qiv@#pjY9XIN((F#-#M7@Cn+?wDni~+qkPMlAIT9Q=Hmd!&bhKx@Ir={^dazSLxd^`FJt8_Wr=o$mTY#_& zM6;#@n0o!`jdYt{;Jf+eryZ=-UYAdXv(yNm!L0<23zu8$_Y6d%#>T~2sHEP;^YI7v ziZ-xEXQmwy#>#0oXk76gFWlEoDYj4gqtpNq_b1=j0)pD`38sUTxQc0|TW9LZdi~;{ z^msoQZM}n98l9w%(sPV77ifMFT39{JGl#Xu+=kcxoBJkfYYrsX!x#ip1x|qdnZcQI z5$Ufa(fHgh6C+ifE;0Fmjqvku_Pm0S+ZoPjHT%e5Hnx2v=*DDHg!g0rIlJ?78dpY} zzc;};zS@Ax`~)A@ZDG4bm(Ud6OxeXgwdKbyT{|5XJ)n8CMU&2SoTjxW@&Yp;GGf14 zn-0z|f6S`*6~TDwa8;kyUtw*B0{~HrX7WW8nwC^O}wHDn*UIYrrJO=UZh$&;}Vff zys{iw8I3Az>`0B)(x~sd92CbC9Cz`DZhBA;hM&zUnNsjbO8Srs*)ZCaS}A|jMaCcV zGS2OlMAL=y{$JQ?9_lC&(f4a_o1Wuw^9%fZeQV#JB`%oDcr%n-wNfykwr?T&G86IK zE0NwhN{9QBhomiqFeofO=%8&V8Apr0KBx1D4VL9qv%yjLpE}E#5(|OTT9@$@PNKKM zgIojtr}{9mt2rzN8&V`k^X*~sfhVW((zu2eEVUWS5)0O+D2MPuy=)Ccj9X(k_0zSD zf~l`41;5Cp<`pIx_Ynw`O|GeV!;PRg@h7C|Li9{bM*Aai`KoY z!GEs*XPr>)&ReABMgJh%Jlov z7d0i}d-f&xgYJtLM-M{+{p&9(n@0A8I>0Pc6{)8?bOYB6g70`0g0dz@x6J+nKNCOX zVphOl!dcA^9w^aQKx4f5NaFtslBe)R=Is642fDl9hxgMIM|lXi(vgVXdy@Z$&6|_P zp)~mowr1;+EN4`9cO;2vUK~|SvL$zF%`LWVWt4xR4h^17(P*c zp1k>XUSSanRZ0_!kqTPpc@va~)4;}G0>f3o(LL%c=y8W9rt*zvLvL*4Cf>O=NfO%# zGBoP{&@qtU90?@g0RZ^vR!SHs{w?a+B)RS&v0KRU9L`ZMJT`OGfA^HCWl5)( zN}JQ!CiRSp$nCLav6y0gM0sNq?U91@<1ao{{_IHMKjmK9kL{v{a>sCOrU>fzHu#_l zi}>4s*`_)&4EAn!*Zc%h@9L4(Vu!>ngH&>JKtAMrpSTT9;XePLp$*9S9#ZF|bPNT+ z-gyhF2rO(0}{yw*Oyq`&ZacNF34K%lc96;0^?n?r*cK9rm9J z)N0oHg*Tz$YK6AdF&PSLIjX$UbRKe9q_Tg8lXxDZtUF)gc;qZG;+HjtryH(p04B4- zpueE!+LYNTWFuMudvSS;h7xeZRKf=EMuC~=qua1f%IuOsW;poUpZ7i>ZeUEz#W{<#&8mN9~ll?8Um z<`{t!v-MV-oCxq0ztL>aaJRZS@qwBE%ML>CVqNqcT0r08W zkzrcWhCG$Q%{oaICeppbsZ)VU%!H|}FimcZfi%BlM<#9=h7*EAeOupO{$#`!n9h*GcjADoHrlCeHZQ z^cXkU_~F-<1GML@)}fujVfTr z^0-r2&|CWzHx|}*KP<1i2Ea~wmm&O7TH(M0h8fqcfwm>$IwPHQr)T=uJC0)*kjFER zh-*CixL?d{%0$8zdj{CK!-pRrV@INJ;-^%+>U4$7LAu;*47FbNF{^#osEVsV(16HO z`p(dk7(GSxoKCd_Y}wB>-I(N$bGdfV`c}Ih&CXP4*r+g>z1Fyo=~@Qw$!7#^V|HSe zVPQbeFpG{DD|%v)JB0t~LU)@lNrlgWPw9r%raJuu>OZ2u()639T5K!^XF_T{1B0*C z|2~}nrBm85$XIv4yzQ(%EO!#J$E^_ed05)UjkuO>Z9f|Go_Kj_nwoo4b6ZEn19?gu({8Ps5fR;x`-a{Cysi)aRuE>B=uGgq8SouL_BAgpD#^Wg+2m~RI4LccaKU6A5Gh-+DM>=@UD z?dO>r53S7WkWYwPpaf^|7cUtRMkl(Ch!p$LSlpz!9-0Omf0!)SDt~b1Gcaf+bwv?d zQdj6XFuR_aD&SMY(0XLSFG+aPO<>DxL>7;EbmhgKuxvELnDpzQBoRJvHb1=FCMk)E z-0-8*1toR`aLq4PxEPQVUGdmc4xCqXE5?M7&mU-D1Tw0}9JZRQgw4v>+wN2S>gB6( zG(c+75tVAjy@2!W;uz<<`fyjTNc9#t*z3IP+<$b9O9Yqg@MIONX;Yh_@2d`4dYyQ{ z1b>4a^Gh|IlVK@Tv#uwlWmR#S&<6IPkj()N7>o4U{tw9Po8W@jW zNmIJ2j1BtJ>&8H3KwlWknNJta85C9LJnbHFf|Ey`gH2}(MxsA0)aX^9@LbxaKOQzf45{+ z?x3UzL{}dmFzhObZ1*aNQOeeK0+VB-wItxGLB6mfo;VL(I=u}>2v|z11 zAXdLJA0=~VL8Mjp%mp@d(A4NvMk4NxneY2J0QA=Q_^u(({B=M$elTHgQ0gD=!{gPv z1OX|LY{ihZ3cqvkx3U5ZN409e%KmVvLq{{$Dk6 z_$I^m)c|uDV~%sh`VQCuoRP;uY12!2%o|M{m!msB#I&8*sdUxM4?x*ed0!k^wu=YN zPzB(qg7rRCs>g(Oj%|GsldM+Ok>s{LDc=a#0w{Jd0eB$Zfq4oX4_#86LHB}Pemq4Y zE`0qvs15E5&|l28*C8YipPMU(f{6r6^czT3>T(VRSxef#uBRsG^Tb8<(ZTt#-^3Ya z4&J6vGc+}*dW0d)Ba9F|q!Ombwgba&!YHzh!PO4n->vhVL)unqwKKh;Tt4Bz;u0zFX@@iYDFjc z3~8wv5vb$8ET6pB725qikv%7dFlIiP{Hw%<{m*gzYo`Lb28Y_V9Cg#5quxLAG&IvU z^eVl;`HPnbZ*s+dXBQ|`e7pcC(o9>2d3!0C%pPyeE+yxmgW%cFS0X&K)fW=u6q5(=^tSuX7iYd4N!mij-$-MB<_^Dbd)FOONN%OCq;0l*V&h{bxU!PS2&QOwEyv2PY9^I}6&)Cox=;>)zt8 z!{MZMEK<*mK`DRk>gY&V(hg+=G;<=cjN77wKF*s>GRfJtrBwR%Bgx; z_BT6K+aJ%bD_>|pnYrb|d2C}V`oYy14^J?aX|kCRHVhrbgAM>57MHT7F^qQE0Cf$) zb9HnkD{>HFq3xaRYL{>~0q08&7$zYEs|w|sz` z2NY~S%vu>)|Cg9rYY=LD159+fpft?;h^7%w0W~WS@r!C>(VZ11OYW!oR%sBdq8v`q zIcp6Nl=H4TspK(EriO8NB&9!C4szI`$4Ev9g1=S*`P+A~DRswg|MIm;+kz<|FaKhW zle`-4?qsfYtiI%^VxIINNAF{0>iv2&w_fi&-$ESze6Sp`hlbXHR0g|(UDX=tRuWSr zQx40Jq6h>IQ#kf8gu#XKpM18X+_%i`?PYE>k7|UgmB>cbf(KF6flf6{O$=5rPL(5H zc(J4*r#v0!FIkxxWm+uWsB4AAIBqJ~a(PXe+v>EH@D3Z!Wa`MQD}zEea@R3l1`is= zHFiFWWu^rYiyO@KfNX_;jRr;sspk zk#6`nPn)gQ=b&okD_Sfy*)A;REm9ZQo%u_?gRl2fuRZss@pk4dA%}-_`==4jA-g!a z{ckU5Ayw_>N@|#c`r?(jws5InbgMJ8Ry=F_=$huC&D*9w@6ck-#p&v@T$nB6-8Q~t zCAA>pM%@;E0NFKMO?RHSHOr#4WtaotCnk!WSV1ch?5KARXTrsiGPjqGGpjlnjp-;7 zcdO3#CM&}=bF1zs}&euexIkoVzEn(Yj)BhtJ$nUeNMTn{BGS*43c~7e580KcCn{CxT&gwBM zNH0KpcDT)HZd*DJ0N&ZTl_^18ojMC8?d^TP_UuMoa;n)Bj~81qJVB?LXqX5GYy_IK z=OHQ18euZN9Pw!Tj&-u_+{@XF?r?(!k;paA-hM;`W)P2;FSd21Kr7eQ>pbPUZ5!bZ z#P5{rw$CgZ_pWVL#M#o=zh)&SZNG(@)6MJn+Hiq0<^Q>1Eo>B1RN+izxa=mO>?`bL%7ag07j;bS+-mK||+IBMl zsz$u`#^Wvh-HlU)Keqac?f+C-4;~Ss^`Dm0N&Y+yg=#?hu>hK27Wrsx7L!`3p_6t! zHX96Yv_%fxd1*?jw7tUx4q}>Qpk!mx7o+WY>OthjnT^E^^wMSD z2MfgRyrQLsP4PVjB3wt7?*@boaGW(O0QEEX>iP?>>d1Ef$yL_j72m>b5^(w27s2X9{-zE=1MMH)PgWnD zn+yuoKCDhPjFW7o-H{5GzKtco$Y`ZVBV~gsBIJra(hk)cn!yHS!DHIz=3BGBVs(N3 zh8;)YtTS9wQ2FVVfI)z0JzKwaFp}&Qv>=)h5Q9HkVmdKEfD{+`R<^Ht)NfGeT;KwA zcq)6>KQ0pi7}4^A*B94qq6{*a645b&AU1Tej2BqYBozGA)ELA8HLu_^B4Y)rp@-jA zyJ>Sd7*UB|m@m zAXQsJ#~xq1emB6@G3FQ)t2A~By3E32p^~Mr`}m$FjvjGOYi8JO?Dp7U85u^!i{rk3 zV;Kj*7bKc=J!m-_d@ScsHB!3z(L?A6(J3DcdSU1Hn9PSw&D?ga<|lF4x`wAXje?db>WD&Y1~^6ur2bzRkzNfbvw2xF(iwyQ%O}Z4lPuh7@{!Q10B*8UmXPUjaSPL8oif`%f?Zwx#<}015 zC79|gs?h=!K`+_#omXGRnOQRA=@L$B@R?@`JoUm5|3sJSm6_vSM#hU(3NL{{m&<|7 zPJolCH2b6t?abeJoiqJoezHMk5RBF9)Kt+1jN-P<56#iwWtrEkuUBO4pGkNHBLmv} zLRG0%Pt|v64?$3$J7H0s#Sx#~7c;$U&A;Fmy9;KuGToV^ZL47XQ`mx8(7n|Z4rd=1 zmS3sjd~%6$6JhEv2-;mBCF;;Qcw-By>FPZA`n$!A|9$efa#6SJju&w4SmAG-D?2jM zvZSNY=3D2aj~|XVM_4C7?eGX-XUi8g0DC(-{|tBMaSve4&jtk$DYN6rWH6(e#CT|7 zr*4?{zHhDs+58^&Isp#4H+#_;C}Obx^+G(=6W5^gG)rm1Pxi6~4Xi^cI2gccTW;cLJ9VlWZ zuewSnhb=v{Zp*UF`3Z4bV-RMpou9`$O8@Cyj4S`R>PT6=RvDmnlGW0Q1uCN8DrZ=VUdE`o!#oB!z5`Rpf z-@JI|)#m3Zynv1$2b&24GD^Ee24oONVZ%W`Dj*|mu8Eh^&CU-PD{7qoM%PXo%PbMx zwPlw5Yiul0EYC-K$+_o;=hHUc8wDndnWIQV{-`}k9 zgVbHG3PIgE9v6RqhmX8|S=n2xOZQ!B(S)d~gOF(L58b^(}m_PZ^0V|@}ckJNFhJ)zMQWGoN^^Cp} zE=j?q`1Lb+T_Kjq5^G)eyekFLY0O;o)dG)uh2SSNFNwfLT1}EB+}3QR`2L$B!>9S@ zp12Ones5feclYU&?HeHe__*gw&k-X-#pL) zPI0^p!g&}%728c)BngZ|EkMjrGNcn>{DLx2OaMU{yu#pQ)qXybboUG8s*z_cOCD`@ zau#-(8JN-Iz~M`a6%pn92&``(aaA$_c^SCE-I)Ruy6{~xPks0i9A!Azc7vX#nr6T$ z4AL{$U%v*Fbt8G(?59xhlbVtnPny0%_ZqErEOj>_XYzEnd{VZ}-3J2IN)hXmuVkQR zmRx{GBjQFe>LkVyj-1^QxNdg(i;i=>nNz4*GzS98jho!e5NJWpZc{7l;`_8tb_Q}e zld{;Lp!3uwR+|t;?LNGX9|f12jPc*rW!Up7`HBxP^K;weL7Tw}lQ<|Q`{a{%HB>kkNPM?4J||rXGe@jo^3_6zxI5ZZIz_;P&QHK)WGw9gJZgayHx-~_a%m;J(K-*X&vP9MERV*yNv?+xYS0X( z13<$f_;16zua`H;JE*GeDSN)2B>bO-OpnkMwh6-%-Y5s{F#}KiN;!!8b7kxWz_z%d zc!Xnhfx|Cv8{Vj+l6@qT&z={7?O9LD29o@k$?OYeRcuy#6VDZ3;b(z4pT+e?LsAA; zn6C0W-Y^`9Z3?`-rHDD)wZKo-KFEFE0eQBq2BVKs^4AY&Y|KOV2a*#7ftEJ8k4rJKVw{WCQis1# zfdlN@tqJMp#9nEt9k4fBF8u%fH3M7`=}^2oWv4B}_l6v8%Uvughbd2_UYU82sF+qW zfD?q@zc;DBf2O)ZdT(zG(ehZgxj+nbza`8bgHu_!9U%1|V*K8dzZ|=LY$iI|WihiX z0uFv^Uf?KdiTd3OPA~C7cP0WQY3D~g=`6M+5*jCKmln{8ss}JIO3gsXRA*Df2fii!74L>77FVuLVvq+vGYgzbqyO6)mo)usKx*op^eeVE^(;Q|)g^~g za(Kn%(?61m%Hhezo%#nqf4{3dTE7PkVzY^xa*d#7PhXb0N!ABPH|d$~w@>3zEDeXf zTS@~Ud>rXqr@?l7I9fmVrP!N`lc~-2S_t7stk?T-)K6JT(Kf+NI3=lgQAn+k0jPbt;+Z<1v7Of~r5_cKS6g0;alC z%J7R1h|Q>zo16lSE&w7Y?Mkm&_{sQM{1<&@r@O`6{gsk>>;KU2e1h0jPN_ePq*^W? zMHQfCg+mZ22t3ylvhl+zOh@%@@+zqO5idZz_uFE@@faq?Ca?>h4{oPjJR5hn$IFO5 z9!E=uTc|-zxZ>+QJ-J_Gn}Lza#A8SXr`RpNea$kydCd?vZzMs%cI7sg-*;=iK;k2(VmwBBS=*$8nGOaJ@+gv=u~wa>J(^L)60I?Mw2z z6`DS;CAn<0u8FTchQrp(iL{Z9TCsUZ($8359hxaxbk-LT0Au_3_4RS_;DY?=3$3;5yf09raYrzI1Zu{DB{}am$sy} zyQ@VtlpfzaV>$JFh_``5T=WN7Tpw-c)Kx_RhNs!?Zw`yC9Yhndwt7hRP}SkXNcEg( zhU`(9ty^vG_^S;wmSUIQH;q2T~$rka|Sjc?)Y(4z5KMvo=N zT0bz$j9SS8Z#qj31M8HiQxHO%2Z$cMYrvnYajXX*cUL1vLwP~VD8`UkVN@K({R&-d zv4_tsl*g7V!%>sMQ?FEejZU#SkP=b<%xCSUZcetR@K1GyV({0B-zF*Ur7_D;$UJT5 zRWboH9y}2L*EKgbgc#}out%U&_;rO!!>lml{HF#)j(}7nb`DAJb=Ms{xWX21bqM1k zEh#y9`%zI_`K9C7GP8I8wuDa-9|eEks&8 z4Zm~SBO-EB`dih3c*MTRu(xaMXcLZw-S7zSBck-#-Uts3`35okEaM;P*huzF3+WJ! zU~z{`mUUuGUdwag!De3TsbI2COcuT8&6#x^fi(DgZ9-8K8fn43Z(%!h?yTb5OMmbo zD_DFCJ?>~_@ZGaW=>=$^ECa|dObbpdECX_&NHkMTeLv){nm&}(vkuiAap5UbZNS9Y zIlyK$l%e{XOIDZ0=I}6wTSn|_@ritB13LJ7gCx60V^IX>mD`%}<8R0|uqf~uD>5X2 z9^WMDu9Ye?rQXNj7qte{3)v0H+(9E36ZA_FbK3uPZ$$UB7AG+>k`19rz{KUAE^+vj zMY?8xu2)m&7?nwtL3iVkxHDnHIb6au5UOHrB`pc$#1HVy#E*VvaEowb5Mh>a;l- zbAA(bgbWl7`@>GvV~VQDxVKHI$=vo-v2T6lPU;5pcV@|jC?-?Eg@a9y;c)rX^$md^ z#kD8}pP-H{)?wj+2OXb(p@XRM3-)Zl3DUrz^*q|Xg4XNoEMYHd35e0h!?#jREp03# zhJlJ*3YCk-T+e72>O6BYt|Xzz>4!vn|C zvD8YQ1Nv+Qp;V8#cL+DXzZ}&)9&p__7I)HnVruRu0c#faC#05Bg4~J823!dgk)~?X zB4%`N6_^GJ{7$04{BL>HaK#KtZx87CI3W;EgQ?|GLJeO2qEtqfFh48K?j`Y)CO@(g zNlHa-FR|SRl&{qVU9q$aHIV^(!yg$ok6W|$DpHj63Z7uuD9XmR9IhPFoL(Y4NYMGw zQ;1K;TO``P^l&C-bL-tiiU}B59sLFN#L6`f@sb?xuVH(TH1!36sp3{5L%YEIlbha# z@`Bq{o+wTIcE`xW6=npgy8`U|@iF#~Vq(oDzU>e!zCRHw{CQS@xX$&SS zXNlP$bz3Vjj*kcmzb$G<#am1x#QTMPG8Gy*n$L@<5YG@p0e~*A;}~K zDp1PXEn8CgYK0V}b}^uI;A*|d4NPVaf`qrpmOlC`iCyuik9y5;4;fYLs&3UA(USU? z5)*nRc>Hix2>tb>bQS>fX8Lay?r9oKRB2+X{(am$k(pM?jzc>*VLopQ6Yc(^8FBLh zLG~EEU$Ifrwn3;M#hrWLbJX1(++|@_BNWno9|Vp#``p7c7drm6xA;m9xXpKMaBL_{ zxWlR)xeW~Vy3Ja5;pF<_K7W?J&i z3JU*|;jA-{YTcfy_(UM?igaW)*%#vS{`4oS{DgJpvLgQy#-wF{YmFWqaXncu+mrw7 zWTqCT&P=3U@qpscr$kSMH4{?%coU!vhk2#sdXqi^dd6$@#Q%eePI!zD>IW$7Jy63h ziIHJcC0y<9i1Wf;ul($iBKqAZolb^3b9hPKXCLzWlxSAtx&kg+lnBeYbZ@mO;hveb z{r&~=JU)3GN8PC*@l@`RESf|kLo2g(Q00mKo&jDib6;^GrVOP6)de?Jf+T2gd$(8| z@DiR@*&$5mBhS-2jx#xM{bNmXCKGyair`2?ypi$|+6IS7B;t%m|M$d65LV~xHDO*E zG}f-6k*-5sPylI6E8sNNYe`!n z__1*Dwewx!b*gCU{ufymII&0xwHCM0qepk>R zav5KNOfBJ@|4}ku`?#bx%)bPyy9@FC(2@WC#38wx79~$X*{ve|wU0=B=e&7R@+6_? zu1i97IVp2)3H(1gw)HBv&(7`;&hJW&(q;*J}z?eia61 zK_CNH67)7CMB@^J)W6ak*X!AcQyC9+Sej-YU4Q^RUX?XkSr$J|q#NRIvSGlJ6T{W& z(9s$tiY|HKnwF5=^YPc|U58Gs$%T;e0JZT6Sg7zQ>@+s=16@cRtqHJ|!4;N103%bI zY@5pEYg0cYT2miG&f(>9QcO~-p=3q>ZaYz5Bz@&=QO1o&GuKi|I-`{YL5=Sgq7hH5 ztzq7kaqHEAvXhg~V_aU{>uIjxL69YQ{Z9L9;$9OA#g^FPMYM*am(RQPSM`!4%hROp zWOua?=VQW;;qImZDbe&?>R_nZ#c|hAkEtrfY+y15ve7*_qqlFT;LV78CVA7?#UT)z zH@KT{_AVH2IB2eKXv_4p#dD%QuJ^n%MDb^Oew$YKz>uDk_f}lPWLdnN(`}qh{r~|R zl%)2aALmT2klBs*F$T}CRC44>EVnX%l+r?K0Y2(x;V>OPD}D@VEn{_V%B5^!=<4Z}_7O(6R+#&5k3d8VsFDu9T>NPLOpi|<#FC*{#Fic$%?!93DJTsXJY@<0nl(F;IwzofU=e`VGivYc57j&&RF!V9TGm=Vb zQBJpIb+P@Ku%1CBAQRrH*`ssq1=k!d+Jw(YegPg$%>QjKp|JWWAG}V@W3k3=+R?I2 z>HKNj7IBT(+xhb3ye@cEAI_pzMPGsf93*h&+?viIx-H?Yww=n%z9l9)>IHKlSH%gj#o+BGgNc zk9+$NDYOmWmtP=Usd^R+JFl9}9)%M+ey~i6t1A2?S%5fJX$|6vhXNdcf$_F&Ly7|E zs!Nq;n8kUxtDYA=qr*=}pLuBdK6&;)Y`Q{7br?&B%0 ze?oYbis9ad*7o8Z@k&~hcGBvSVIpHhD?mc5QbpLgd_K8B6S)HKNKhQfbOLe}+CRQf z^9Yt+kT+k`3tOD-$YfPrBW~DM1$UOUubhmaEz;|J(ZZ{)fOy$S8WA*$mvqw8nl-6= z3LRFgr8fSeybN2*L%oGkCtFC0_IA(0(3z6F=wp!CSe}8{%#yGuiO@ku@YvS1sz9=|7%44 z1DhIVZ|~tOCR7v5jJxHBvFjNI3}(g~x~1aKc5MH;@k0bYr8OMxjp)0EHd0+2#DAjh zarOmxnt6HO3cZnLw+QiVQgI{}l+38`{HPw;>pZ)YNOCj^QkXk;z|Y}@m`$*y;4Ge+)2dg$Uj+4D*LY7phql7 zkrYk#;po9SfqDI=REoB60%Sh;KDHkxB9bRsZhQ<@3RJCTm&aQ28EZ36XmWM07p*P} zxeOFqhJ)M2S?-&ycdP@sfbdK3fO2%3Sg?(~MH3b2?c5s353BO$W@tMf5&>fEJlOmNLmayHp>xD&B|()6d^@OoC)N zlcH&N!;xYQDVwkJC-xB{Hf3}_xKE5J`?R--d^iJU4wD7$)5yXzW$yd6nF8ra{m`b*`$uqicZ4;2Xl-u^6*PaA;V{y=^O#}=1 ztMyWkHiaw??=fAdn!R*{iQ6P;rM=9Yj;AHLlouTpH)*$eMV4(m~Pjd1Vf-;`YGSe zQm$!wPhpMl7EgguBG!y>(&Xalw@0JwIUo}y*vQ)$yjH4cr zoE6xh^LI!{TIa0Q5dg7*7K)+=-p4;!!XcH9&TW=??ttru{WXErwq@tt`=iUdYwk09 zXh|cf*ZfxDBlw1L+?I@Qe8Wdh``)QUp=ukp<_~P{ST|N`CzBGt=#|C+arO7BiefK} za7xaF*;285(zahiFp#rnQcjaJ$_99+V^0_>CNCefgN7I6*ebQ~K1v z-hcIExmI`W{gf^@4`-Q| zNu6Q&xBpdnhPDJev3K|6>*H=szCTYMoL+C)m!{X8=Z%D& z(=vjcM1PKxsd=oc;Z=Own`Q2@@jyx`)kycF@QtJqPif3m@G45vY4qGwWr>QMfVr!3J@BIySzt$Q4+m{4j8} zdcSn(GoE;(w<7lZP_#po-Kaf?gfez{+je}m@H&aGyZvOTm-y-%KvtSMdMUwnZ9J9{ z{cPv(t~<|DGmJq!myfUA*U^|8jmp^EeyJI;vXN<@Nc!;JTk8tWen-;#-&Bu7A;fN2 zuo)i#5_fbx-X_+VO1{`&p?1PG z!tCAtK6az>&BRB<3EeTk1%1P$O=Egnf1=aDFE~7*@q$<0r_2)#vCS{PyCPrF>z zo@;b@6+ev!ssgQ`$nMWACG@_lA=D*ptAe>A8ym1IV^-aprYPr+sT&7QzSXIK((KNg zk@mKb7fOCpll+T2h)S-aB4sH@Pv*pVy88S{m%+0+^l5?kJGK%-lKY5f4-CBb*O|PUS19(#~be|VshdK z9gS_Ci%qf1^LS_BRd?G0qy#N}PkqWg{P2KDsL00`NQx z(gs3T_R=QqVx+GBCH~YoT$#BaT&oRtlBk1kZ;+1?vxId8o^Xo%eQZV`f~g4Br0zPt zo8JFcQI26e@=4o5+>!kr*w)0#9E))>AX74ACo}I@p$!3U&-}T+69~d(7eCw3YHx`K z$G!RGffjO_u;dCfHIj^zSGubCv{3!}6HJv;hMh*lWmz67(!^c!Wig{@!{s1{ZGz4Q z_dLprNt!%mXKRpN$d!nHBfhA-YYd^ck~d+%&4Et)@zC9d=bH6~bp|55X{*_J99NKn zdc`RTO?7A;is&66${QF(KE*PF{!hKyX%))Pa`_S~=md#OLPlwX8KfxP1P|U~+%F>M zrfzv2Yv$uEZu>~u+Dh`4xn$C9qc-TLaVX6`{F`!9xUppn^DIkVwQPaH;np%t>K#!d z2_7qug&dFuMJ}gtfBN&RIgpN@Fb%+}LIMSsTVptmO9ahUaB9i#KTg_dU8|o`Yo~LT z)s-qoo!aH;Tv%Jm6`yh6cr+L*M0U?*4Hm8Thcf=b3qEk~ntTcz(b_rK@Ex5f&MZB# z>Fsj9%s0MOXbP=7J3Fn)qgI87$9!WjAfUb2+#H_h$#^*hRKr!D%U@kXF zzdu2)!dp@T^lXqA+j6yQ&qh$7+32_xqU!o?lh)F+f%pUAlFb?DcT{BMc|ur4llH5y zB_a`$;RhG7EZ_2ErX|!q=n2qRLUu@$l7q-B@d+uNznbg>;qA9OOB*rVl#y3-2%I3K zX3~J}xu6N>970@ulAf>BW#X=XCD)IrI*mU0*f}M7Lj?)4Wm$a^rOLu=E$41fSr~)r z)H}U4yP%Bc=8z30j2mH9nbw!lFiUObm4$(bKJB$B$&XIrM;Q(;i_ZIdCxNGJ%V5mE z{FidddclM_cBO)%jkmPM03=GLLL6@kg>0~c5nWZu`l=VDDN1~KupSYFkX6gY9z5uH zCg7XosyFwPi(tzSBfxAui0iwZH+1;E{z+s0s`&c1_%p)BrwOhAmL)_)@L6Rggd$ZJ z!E+n{6laq7W`p9TwXA&w!LDhLtoX*@r&5!*SWN;6A(IVK;{|N8jqz?$73!9!$CgWN zA*uaS#x=8M$(7P3Lc@S*Oorq=);!ieZ6A*eFAA*`Sr9r;nWDAs2KLo#t(|+1HFIMg zjl-jJd3($*q=E`M3+-&-#-sCsh6KI;hG&{>m)hL0OnX*b|2api6(x*Fi^Y~yt5}Va zzF-KI3(r3D-?Cm3*lGWN`a=}u{U)jh3v)ODK~tI@$kaj=XiM)WqKVR5QP*Tup}(ivdO5?;#L@ok=qfA>lS2GfrLD8)_c&8T5oO}MsAx7W zl6j9uDp~VWzsU;Nn$@3cy!pqkG;_{a!0)0kl>09G$(2L)B%gz3q}xQ3>t~c)UJ(@` zv=NC2uae2kvq0Wat*$_`ZsJ0NxOo_7Y(Ck(3DV006(^qt@vt>IwagIPx;ap+%(a;v zSYVz1@g*IXJ^5_SBcraIX8}_;)g}4&$@bU5htFtnx9R~3|A*<(9P4WP3As|tk_icV zlz{A2DP)|DCr-ytTYnP-rKYfjr;4@S#xI>KplXhOs7Ucu_i`|LKrH}v{`kK&%C}?${ zch*C3M(6`>9-Pm7Bucwhz!WwTDCn_`ny864wkaye{|2ZmPZnWmnJmGm6h6ssdDaCi z2#nk1k!_d+LskYyST@|8E4I)gL{YP&vHf-};z|Pj@_xi%yH+s`^aKX%GMgXO8#8j0 zsY>7)A(xC)AXqU~RKeTvt(P=lLY3cSC@5l#AlC>P-S$7^!p}hoHg@iu$+AKr!_bi7>4Z z`*O4LN_S6MHcJgs?-6?AB|M2dR%!1)i_p?m*{gNeXMwTNq&Y&JUM5fFu1pP%9S%OX$Z84TgVa$siI`fwDf@tL>Q*@?_5iV-h)CxJ0+ zTZX0G-}21M5(HLJh>CeaROKBfzVWiZ1)N@Vw}l1}jwsL_-a zqWc+}bh>WgY%*}i+$XE!X>|&y47T1{5SB4~H!4T>7aIcfq|Eu58AxH+cu0;AO*OfL zN`zavD!A9SQbWsGw|3!oe!NhsO5-8&Z|;DM5|8pIluiJGEx^zAtv-d|yoef(c?7u! zD$+W^ShvemPiN$FKFecv&rhgK!yZV89FStPKki^5B}?IyiRw=2OU@f80SO9>!6&>@ zqMnq&JA^RKL9Akl)i{}Ao7cb)rH;69Mxh#(D}9s#rDiSQ+Z*6>zt{okNf0**O^|bv zit$m4bI^!aW7>{6>qJI=>=~<>l|oz@Uo_#tFgc&RD+XJtni{izulbN0nVjPt6?mr6 zLr5MEAM|gk(L5~y*7)I6r6im=RT&}B;SvI7&K~9cYDAA|8+Dm-+74 z>-?$jgPxFQcE7f|hHqnv5Uyeh1b64VeAvN@=5ZM0Gj&mrrnhn${pG!zqLlSmIp(_6 zOqG;IaEy?ziWG7>(s8Uwd=Y;D)^c=O1u&JsJLI0cUO)tP(N){b>qhDZiEus^Tq&!W!o++6%gm%-xUI6s-h_>rB96?#}J z#Bir&3C~>NV%|qh30_kP!N(dcQ>IW6Vi3N;T+)@t!@iJw09{!fd6_LIl-l@v7r2q? zn)V7OH(yZ8DD3VX;e5?I5Q>=gLt3nJLf$2t?e}zGSj^;&JK$r2N&4VfRZAHP%LTkL zln7GyMT$CYX25BUbi%J2HN-j@+ePOcfPcLC3^p<8MzlbYeYkRbJ?B(}^?k(zgPsIwaBuMmHwbXfh-gxEwG}q25T=rS=7^`5 z_njH*A0~a{&|XM3)mel=VnY@?f{}SVuD%4N{*7Mmk$G#Y{em`#OsGJjySL=OUKnld zsfjpqNi%^u6XaMen&4yh`E&HSPljX6Jay;n>jOw;hF+8=jsXpKLt%cY@&l&K9XufDM+gd3NkccD_qLYfv zoX}*jCTgg{{aZ2^ud!f69cQDMFoh81p1|~5c7LDa>p}zyKGIiQ4e_pj6e@yS!P&h` zG>FzVR%3i1J5cWnSCXLkm&Mk_Wt;{PRm+|dXqy-<&7S=Ifhg{_K>KoJRKqE4S5xqx zkZl_Wk0P{#Q8W_Ud&tGT3B4S8Q-h*h+Bggwy=W>|bC~O=Ccz?snJtGe?S1x^5TM(4 zPkB_PPHX5F;PP3E*DK1b9sp)BZD|`7Wov`7G1|Z|X04ggrYHe4qJL*|6*1ZWz%oB+ z+Me;b33QYNw_I#p*fy#im{O^N<$m8BCtr&n++S@H(Hxh%c0&gOEra1OFl1vQJ^%m! z0003&o4-913L;tm(j1$*wGjOq8=gDdXl5`^68ieBMHz=Tn-2t~ONHN5PtnlIa4A)3 zQNhm4R*sc)ni16wON;VZ(i;6vb;|aUH8FVaYE05x+AMOKn{7%>fK^_ZB$W%z)7J-S zjV%t2-!XI_it;`>2r>f@C3XrIpr^4=IJ7Dv+L)@RCyg)mv!JKGdAFM#c&maD7$Mb2 zU7sNc8fEfQiJKHKCpkwx=m%ukWdy?7%M;N zuhttS;kDfO39r2i-`*u8KNT08M}$abC^5tzI}UKY+}u8%Zh>>C4&e(*56G*1F|yf4 zWyfVDZ{xnet7iN}w?FA;e5LRA(+Ium47uXd-9ul){7a$#1Chw9;DwPHSo}7}L}d83 zX`W#j)dT6~lybo50Ii6{ETF8!B(JllTN_5eETR&;0&M z$#mNZmuG!x*Scr1lT({`TyTx`1vQoF4t!JCJqDmOdTN!jY)-*>1r%5NY`~&n9x;hs zrYoa@PBV|K_jTxmTv~hDwtT$mRX^_34wA96G}VGq5Pph($Pj&(M>P6Dq|^rg`|y7R z0}L|&EN;06q-a_kvz?%|eF2!eJHyQKg_!?M8XU|3O<|?+`6rY;2*4EHIw&YC*s|ngMQ^SBpEbtH1M~5Ugc+f1Z`ny-%^Z1q975f{i`KtLw_+Yk8z`@Fm@`wZ7^6CF}ViN z-D%5MZSEitg;}$(NaZ8d{0KQhBEv+6oLIMcXdaX)xE1^CE?VvwtLj%LKIU3eI(0e8 z*S{%$nbd;^dE$z*Wpq0TPJ0fu<%$vOF#ZAO^f&G0nIR1>so1+;Bhovmg@&Kj%Nvyr z$Ksvk-1uKQAgCA0bdQkBBUb^D>W0=qss_-m6Xx0gO-c<9=)UwTRD{yy21?jYkppLPEd8Hj97Guf|6!cx@&vJ#hl1ZTAbla`hn#HwWUMRFL>Xc@9=C2!rN z;plP=&i}pQcIdbAT$&A0bK{^2`=oJoWCb%uiX=>d$2w}8!Dz&gv#kpeZb6PYwM#fN zQsmNS%J%Lo5fJZ!^b0hBizQzYSs&fbqE)?ve+rTI)iOGD)v^ZY3v}azOZf>7JK)^r z@QYTp{Tb7$s!>nE)4=VQYd%gPpC3P8ubq=HA3RzNwABcM-bnufkL>0VoQCEjs1h8l zNuDl#Yo5-fAC7)Mu{D23U1B;cO|FU;_3>2zM?kp0N>yhv#EgU>-VaSmAauIwS@>iB zOZvVo@l}w6E8vV(3Xqc&M4hj?C4Y+Z<5_hlw5cUYP{gs3uD6lc4Nxgo*mdxa|98{0 zS+_^i10fV|gpW&z2Mo<8G036mEQj+i*F^21iQk8SHd{Z)+%lzxS@0A+KG-!Q&aB#mX4$!ES{snPH5B3=FB2hn^z0My`)Wwj zhFn)fU~PSxP{q!M(fsNwfdqF4M7F}+lUIM)b2@L+c;1^_sHP!)9c9qac68*h)#^kF z{;Q0%mg*DZgr^p2og;@%k%JxF?fZ)fBWD?s&{&pMpzO;3nb%bepG;{XMC>z$W;$hC z)j{7qCn=$dQ-aScdzEjH5g=59VgtV(BsC-2Y=NnjgS}UKF9M%=F60caloJmUFPgnq(R z7H-8;&!rb_LkK3qoL>ao2RlX1n0|2yRXXnCwt)9NOl=mBN{z;#%kEWUH5HJp*OxAS z!f#6jgwIks#rC)%E+?B+t*GdrpesAYyZ`e&RhUG`8WU=RwN@m9ODv2a<3&KvzAT|7 ziYAMpI5$va=Q{~GUfZA2FEN!Nz6@=SvF1*n`i!%rBRH zo}TIa0ETReW(tRBplx~VqK)#vW>GFEob4m2`q86`R-39%80@|@c7Cj3$fg=Yc?8#p zgaI_Pwlcbtw3E^>@kb$!Wf|9-h&Hq6B+LcXlO&K|&`H5J;6dhk0NAR3L7-mYBOe4x1Ip+_l!?gqpRpx{>6A^#&>H6EHD(qyPSFXgHW!`^#g#?KJttVZ$9s{ zl@B&QT`x~L(%YVS|I|Ar!4YjrwE*b8)@E@tuy#&fqNoDvo5Rr--9@>!e%p5aTo~2; zE*31Q$&x@Xdz72|d&#oz3yteJMxV-}*~(iDhicntOFC=|*$IH=4bEo4o&V2{lev(9 zJqu9PYFKjKk$O+IRup^BieqK6&+!?a7zY{&MRee3JuW02_KSc_a`~@r$cDf@2OpH< zyJ4?Oq@cSp-l~ypP%v}7m=0bv831>x3L5PF&&etUd*ELlKjC=B#oBaqBiLV-g>h0j zC_AWzz zZP*j{x2IqI$r>8LNd$LC77v`3fXa#U|#Pc~=sSWe+ z4`=k*k-C}VxbnO^)X z=1qfNW_Ip%``kQM$|Tf2k|39FKs-}fKnlgnGSU^roCATVz!*Ys^v8GAWh?XT7TvJL z@k(t7RT88~^9!MFc+Qu=O(Ke)&!X>-LD|j5U6)7#nQbSWPt9S`+<7J- z=ZfN#-QO;w-De()32uG{mtFdtk`XoPN)YiTKcMx>=ugPWtyj#rlPaRjZQ)QyruS%v z+Ms;ym;_yqt_2zqMo<4X@)eXzdC&3;Qr zW&40UWH?e)L`xbS&J_Q0fG}rN7Ug9pY!K)Ilvzp$akAI6N|JaT!xDB!v$iHJQuDve z8E5Cghd)^aO<)KPHyMg zORr%9f$)3|h9h#;i1ZrYr&UF<4kRMIdx`%_U!#5EKNSlvlS#IxxmFEHjGkHQ5RvqH znhx8~?v@{Lx1^PNOE$j(%FeY%nUMOXG?oUC)e=6T5v8=4Fft|v6shetqNA_T1(Hz@ z%=|-Tf#IKhEDK|qKKR_;s+O3Lj#`;84~o{+0~X2qrGIB_tTDvT`-W4wXcQ|~^Nc=E zLn0Z(%^)LVGHBtdMMb}C+7!k`HTk?x@5>1A-MU^evXDCvwr`tpIH27+j`~KZoD2lf zL&uC$vqz%5J!ALx_Rn;i!{|&|62S0?n=+enrd`ut_m- zx0#z2kWLOXDluAjAtA%DsgJ95%9u-0(kPJVcIQB+pFx$azv37Tr7q?B#?iaz&^^rh zUU(Wv5@Q$>m{on8vK3h4aOoQ1?qR)!>rYNZqrJtbf`x#D(n(|*Yo^cpjn2Uk;UgD!Uw}D6M97%H7i)vufb^sSi(>65z<19PB zIW0XjH81n(VC62OMWNopx>X@Fkz&m7=SG>j#V#ykv}gFAQD&|DwY{V7`4-tnM4aZl zEu0HHU5)4D`|+m_eB*Wpb7Vm=JW}QSw|HU05sU}DE5u$+&FMvRI}-_hccH?QL7&0P zho$i*xK-<;ogncGHISSxz1pl$9wc*Y$OH1eKT5!$hNf*CY2cIbQyJ_&V!ZpSTwj|I z6dR8tbDi93;Vl!2vxZ~Ia6D90U!)(F*TYj|E;2TX4ICd)%w+P(=)hCpt3|j9KT9KX zH`T#*Y#qppU`6}+03Ag&b9s5xJXWafKWzXf!Ye0-Yo5d{DKWyd`ta-6$5Vt^P2^Xf zFq2y=&b4dYedn6C$fIUZHZA)%BIu9;qCVvBGjofQ1cibHPzH%dXILk2imi@FYp^6| z8wm*Zp_KDy>Cl=cjNfUIvsGf~)S!`PKmqr_U3w1nx`tBTifEWXaj8w5QFUD719^&= zQS3kmC0oS+RD$s!>og6hKpYY)Up(?y0^3ZCz@YY`$4i$H`r=RFHrmmc8V|+ORmPF% zs?$2DQ+O3%(JCv{%t5Fr6N}9oMG?^S1iU}1-Iu>tZ{q2eMo5*_4QT(ERd(W}yvh;! z(8FK>4V))Gd<}cYP!#a6_vPeq#M&qVt_}nxrdaNlQOiB{qG}bJ~-l7vePI(GIsI?;M_m zRUU2}Ov`XlNT11F-SQ8ddNTXm`AhD8;n2@|`Ytyrx--x?7kmL>K@LV8U%h~MEdNCE zbZzl}VCJXk6Ja=(nBS+>OVYw>=~igs+8H)be?6<>sG3ecN>+#R_5)EMhzXezI9WG> zzv@&Kdi)qt)A%N_Md2HQ=JeOvsTh$!uaLy?N-K{Qfdj^+?2Fu)P7c`&ht~>QGK?rU zo?#*GBsZ4@cx23Ae*KnmzR!tMUOw< zXXi!9arSn_7NODLA7}G=8LX^sihoZDZ>3zb z^>WO_MOPvgYPiqMUA=4ORebc~bMn?t9?fYxj4GF3ND8Rc6(_e-E#6psCfHzY;s$Q@eyaZs8CcmO*X4GLtd9%)i%7=s0JQzpO`_(1c z$nsh%ui{%3s>AGO$ad|Nl3EFjkY@wgRc>rHgAHd90%1{O0f%h)m>FSsqGp{gXBTc`P@vPWrt}Xh3$bcnd z>*in;F$7iKXJxODJ8wx1gf9zS>(%b~L`(GtYB8sR=VZ#<>4?>X<0zz!xl!E_5J^m` z{Nd$x=sx{?EHub4P2^^Ol!y`Q+{Qjsf%+gF@HP4cLgOg)@MptoIT0B6eOVpj3V4~B zj%qE-=VZ#-1Hn?p^Y2Set&zS~sXP<4K%?cUc zhW?ooAIV2Hk(7DLCC{H`3llIjQRvWTG1rG=cmaB?M+El2vz8Y{ViA=>z7+C2mFH!+ zC`-a4)MMxSP63*!qN=EW-qZTcJP>p#L2VbiaVznv1`776v4Je z*6PBE!R`5aTFqdWAB}w+%h1c;6>I>Km_dS4t${_I1%y zga2ol-JX!6+r95?`z3~B5cIIJ0tc=3Hg{@|di=}qZ6^A!RTDBW@e-{_<;zTzXA7(S z3QNZvp-3j1hNMuf;L2*{Rq}-0C{mZlswL>bj!AN6Jqz>Gm6mcwOofVg6UvbGf4!!* zSUW$bPdl-95VWT$SUelypjn6{^Q;|Ine=`k!CS{)-?$}zT&?bQr0tXF$nSZP zDENlrC~cFuWUqX=PBv&vq^6>a6MD*R8@niowU4N*6GzVk=^ATyEMVADLkn?3?g^4n zdLx~g^I3ych_E zo22kd)2K+Ik4n!13~bCEg!sOfQV{1`@Jmyypk_A4`efFtgTC3ZiSbXWLS?J3%F9+E zIkw+ldQ@W*7c5O(hH>bsI0 z91Vc1RFbBhtVvHzFc)x1NNd?c{mCC8$d-*r`yNvP*wn-T?XV*qRo|>7ge~WvN&9D+ zfeOJu`mA>t4VT zeR=fej?YrY$;%vYE`w{UymemjD{D_1%X>BCl>Zh085%;jD^Rqa53yfJ7KOGzUuU!#>L%3GYW1|c2a%!XTT@#aF4vC7Vnh>roBIN2lkfx zGSkUaS*1QYkUHK-G`;Lgk$jpp;=TJ9R2udBe+$fD;l!li)6fJi?9h8AHH`|h1J-Ql zW(hzM80m!t_bt$Af}9b9jdrEg?ORyH-;T}*3tf^#Jmx6(Wp2id>~eB{y&*i&cUZbk zlXodk?jn$9$}%BSzB3BpmBunBq)y0~129`jWi(Va zi+zJKmx|&){H`}J6m13g`~z{8=HdH=o*`DPT=eM_;bnc+5>0B{IHsJyeU@3a+{4Z< zN_hr})_xFkO>0``d9P0BNlg6X3flIX`O`EDMVZfuUHge^O+|w2SpEyt{mJSqbuA!- z!-47JDsKalI-4Ep1kdK!Z>f__FPLyUSlBPe6O_wMni$(k~fVvkJdY8 zm!K$9<7M|C+jZdA8M_9=!@5eAh#=8?2|DE3ibprqzl28#wZ*8MhlhNurH3th*UGa7 za7v<>D18J`!4L%VL^mQuz{)bRKHlAlRHb;@FsKnd|C;)@n@-7BSxK&s}(hBekfx`=kbu9ezmX? znJ6lO%(=a#Nc-+$sKVj#`#6biFs*gF-%A#X7c<18T@Te2_SD0;QHn520ycHQ&FH|X z)gY;tv!_`WA?n(6RE^aG`LiU0;8YYf^;T~uPVv*$9W*VC=W^@Fqq;Qnd0UYd24B^R z-;_%J5S!p-YID<#S;G1u$pcgV-W4W{Ee6B?3~s62BpKeam3!?N2NqRerEuHd;$tJr zAS#(`)g|nzZKoN>s78~^ZUEr_3d3vU^e*Syb-DjQ5z*kRiwE*Qt`4RUf8V~vs#VI< z)eQ(L&NIT)CQU#c)_X62j@!fCEInhCw-1|?>m=wDG47W7fiY=#afsXBOY$~j0Mc(D zUu5};rXu5dTUAPHBS=~hXJ|on_y<;7Sq%Jqh1KkA$H&Y~EJHw_7&^{`@*n3sp78K! zsL?(9y61)FXO#;`a`MdriV-Yr)B_?RMEJD?n1d`o1{HYGiBy2s` zAlrj6x_;O>D*E^~yt{-(?4RWOJwD28(Rb#vFPd6TKx9`kCY#U$yY{(@2gtc1DBK;rAs{5x?fcbhPcU%LqF@>Br%Q+pkJ+vj?n9*@+r+LEO>zh z(4|B!1k5QN%U#%3kaeQ8|K#Ll$GJ0Xmj1PROiqEElbBV@ z^dpV;7K^192K4XL(XhoMAetX>!9#7ylEv&Y8>Dlo;+h}DI2wJ7OFp2!a}u=*?o7EN zjXo~MmeRKhy#Pmz)-1# zTQjr}v{$!-`0pod>0v$GP=R!xZ@fDzf}*C~)A+>ve9o~q8a(2zazG!Lz@~Nr^mB%3 z{zz1r@h?l{@rC5lU6=kTh$@1-A#A%+gmX0q_UnGTvMj0evJ#|Aq-_2W0HAO7aRppK z9M;t3P*ZM@U3;yNb3SNKa`GcBdA<8rF(ouL*N0cpayeuAt;#-88Cf1!7MvCUJovus za6R(%?TuLrE8==FkSepWRL!F9(G2~lGZto0LXA<7GS^eR!&XYLJ=`_LUk=N#XE3{Q z;Xd60$(H-^J*pq)Q}>sU!#fncOu8qL%Z5y_Q^g$O_kqI$o_t+pTV1%=GvlweLN{$p zS0S(?9XTki^IIczLLDS&LU79L5?DuDMq>wY;=XpF?9Rdrh}#rRmwX!8#pKsKQgm3) zmyyw=2P>F-#+vkO4t2js1)ncz6}@iI5VRIys`daNQKSP1+hq>jwz{Y1L!L1`$i6|4 zb`2p(Zr>B&V0}pyH>=j%ST!$ehMV!GY5x#u_p->XHQP-~Y=ZppH-8aZntSiWEVq${ z!QP5KqhAGTrWdw`K{7~~(_*R(+~7*H4QfkWVu30#*95%1(ADHXLaT!_=1h8sje9lR z!clye{egNQRTg~_i(Cqqo>3tZXwTX#n57r@$p<8GiDzg%D#>M;`{LOmq zW0A+ZA_hdSusPh>tyZzeue<`>rJyc$g$0p=_yZ|1ryiXj)_!W5rroQ5_YZyTAa~WZ3iC<)Ug^vht@hS~{^P=qYJL@*t<%<8o*+oO6cU5hDg5 z`1Jh%a1C~W-htp(7Iq>Rv=IwPN&yR#C(M7G2vD43g!=L>!h|8_z8{Ct=HfOq(Gx4; zt(5(T(dD?u?@7hX+&*$7F#T*O7`RvMEOH5 zGzUSVkm`w8m``C|o`CmO>K01Nj?Z^GWe2@N1cfcg~8zt z^E$%|&|O=&V%+XCblHT>Yd8l0q;6+|?E;{`(8`?=`8P>HkLrdVj0q5*r*AW2bBRdk z!)2W@?ArK>9&p~MAWsEn8h>!UzBg&tUHOB+cH@GHOK$P{1`Kuf4mJ_UFRjnz+-srq z+$}wqs^_Vs<>~c!%VK$o!7ICis*NDbYT)|aw*zyg_gx7h^6yE?mgB_XO?8W(Jr^nx z)#rG9NhJ_Dx)~n3oFBm<@%P<-$eK@~BGMZGjj8 z(yC;@+12qlnM<1YD~DguW99`uuX5PRTa<^4K*Y8$c23S4SVtOGICa_Wj&!^s1>N=)S2 z4&k-LdpD!ubY9>8Kg-UFj|uGz5&m`DX3Q&oZM_c6%AFeVtEayyUYBMrOR#r#dVhnh{-WX7|JDs`ILTTnH!ti6 zaeBwo(CzL+Z}f1@;Md}?YQ$C`0}P@wu1a3#RK{uucNppPKcW{7D`)%K-&>-JaEIo0 zk5L(a)PBd}7@wzfb6CUmg3*xKu`?LCsHu;#%H}l~r-VQ@)graB`xdrpHCHS2A$h?W ze|qn-R`66?9u5gW7Iv+ftym4e-_M0tx%7!Q&B{Y;ir?HEMEuglSVb}GmrEL%v%4EP z*{$?%hd;gZd^-}I;UncN1Ov_%ge2VkJn}+>7}HpZHtrqBT;g=IW!~4(d?j`55Y2g} zm6CgXap)MP^q9HTVL4ps3W1Yd=tdO^pT1l2#xm7*Q4h858p{X~Whq6rol_WCg61J6 z&7tl&-9($OKV={JTjOEjJ6ws^PNj~x+%%FN5Bz4rKsTsZtXE(1I46bpMud;hkh=+% zi#Fp@B}gFCy7Sn!rPQYCT9QZK@ss=uUHZ~y}T~$>87-#hUa;%;nCva zWHUr7K9tMvAun%z=3yhBUu9#a>jCj9EsgB^P|PFT z!V@3|8XP{4`Uj%mdqOMD+ElNOQ7tua9Lu^o2!{s)f1;xB)2o5zq?~DMrmTbDX02;0 zx}oV1S}knFiIJa85jOv|mA-OT;ZFM0=z&*dK5u+=7b&|P{K8k4d>Rzz7i14{*WY!FaTo(S$ zmYP;Ps4-^M1L~ovK{99CF#>V3@__|o9)YQ8^4u*T>^iaVn^gS@mxr}xNbCY}r@N2B$Z>IE=|_Su zlK=v4&-(%M!G+8*ErKEPx)W>qG;jQ1xa>!?qxj@ETK_O1%0w&A(;WG*EGU5LhNtpn z^@@JrlxWp39)f4k`4hhV-9aHByuG@~aiOi>;o7u4Uj>SE@6XqB^(G(N?b}{DMvwxM zp*WsG9@zoVJvVw!DW*1FS<2v4u)zIsuh|FEe3q9@0L`}dKjUx(!^ck+PIVKG-Cnw> zZU;h2hBl)lb<$J$(Z69#@wVPzNtO zMAaG6jXDSL$S=s+G*R{x~iD=kN;WAz3&a9l*~M1^TmPL!*pVJSor=HYt}cTH%L8{;Bjtq)Xju=sp6 zxp*E?SiJEW`Ech@Qj>U!C$5S9%~JlUU=IQ^=}BT+tFx=DyU!^VH)q@UnT!+bY|_(q|7H4j6Q zE`|fcqQ8#A;}^FWm7UePN$qv1C=Y<2fys5Xxe5p}pp3?t{ay`wDEA;b+*dh~t^GD! zOGTF$hnO={7VvI;SHF>v0y%uK$cNtxo+lin^;`cl<+sLQeOM60MjQ*pYI5i@sf;$a z31Kd(w8iGLDrfO%8M=3yx4;+%9#Fi+1L{As^-wTC!%bPHmhEm-W>XuIW4a$tIOnMu zabwY6t?1NF^^1&rJPdy@Mv}rRU+B+8=XHru%O%4zY|?&}br6=NnTPL3BGmJ({+2ps zz$xJ2@&kNnH()(x%@UN1!qkSO`pJZ58jj~XO~=ANUt~0{k=?Cc@1&8+W64$?DM`jM0t$smOp#)R81?Bj&Rmz21e2! zB|ei%89Ku0BqIPxGT*ki-=-E z>@bq+v*S_6S$lp3FI`g=`C0Lb<1$VfnK>iYB#985SgHo^T<*~K0e_lGEbp;CIZQoE_}7e>NPip@Lr);fFSwe zATgrehWIKjpacA-{4k4M%>oc|!Fh_91!~2Rmf)+fcm`7m33iY+_c<$qGi&$uNU`EUNG3u3y1c*CDPFD{Ds$V z5F6J*J@M#NC`$<*M66ps%W#Mw^4d}@qifaiIm0zeKxK;9ntCe9cGT*~MWIp$^?l4n zDmiVutmJV8*L!5-k0#wgvy@aAubU;pu%Q;m$FyU5g@4KIV2p^1Mv%PS57Ysxo%)ll}Ljvl5~-RHV%KAG_XF+%nEN+09Ng*pg;Y zg^d0$+nlN4Tb9`sQ9_=!NZL2G(L+Uxq%l8FX_d~a6W?#c39j2$d$4w#zf>Hw9lJ9iphRv2tTy+O zW&{ydS`$gMi`3Tg{UOk>nG~GGZc&Xod~Z(!X7NF7SFp2_XA$hA(SWTx`q(5NZs5hg zf)Tkb<4Z>=Y^qR@^Hx0}@Mw3`jZqh0TDb$+AQFo1V8)oWy|Ewo_8%y|2&Y+DXds>3 z_mJZ<1vMaVEGpc?4Oy)`QRyhmci@&*LL^#{Ojc7hD=XB_b%IXbw$1@V0Dk6Qwx!*u zLM$NjN-TF>3w0gob*5Cb8yDgEkEoFrID@qQk3G*5Q|ECXIGoF;R>`Fp&8+4d>#wRHRjP|V))iK>17uH=tCUfomNhTAoMh~?8(Rt z$egT$>kYS#(o#2*HJ=4E3v7~H*DG;bX_0?rTcPD(UPv6b59aUw1a05aFY3LUu;cx| z82$d*A$ww_|0UfTQyn{F5H#D>OyBYNagNf+Dyb}dBi5#?Q0`IXMgzsOE4#o6nY>^3 zx7#7k>%$JG-wb7J_{wvwL=t_*MveQ<$7Yl>H5^|C4EN#8XUA$TKL#3I^kS8Ccze(~ zLJ0Agk?QLNEYf$@2V8jx^rfjR;No*_qPn&)jZ}iM5dq1|Ti5h}SIx8uFEf~A{kJf; zu@R${i>u@8`OdQ7lVg#vTUbSaI@#bln)8lX%u5j9cLwH(0iKmdoX3ADH`h63Agr{^ zO-c1e5SFFM=Z;v9h&&2MF~GsKY!_*i&ya{+_0>o6_{i@A)jFL zc>N-L4gN!w{N8%VuKX?e<+CI#A(pquT;{KK_ zY`eOQF$!E({Ixa9LR6hE*5`cISzI#Fk<`B=p!fR-4J_H2NX9`4N3iRe^G3@_z*HnH{r4;T0kcmH6&H;v zp#-&mT^zV%jFWwXDZ?A^q@;>l&nUy82yi-F*IDhM$nWK*K&MQs{w$Ed?9C+(?&tI6 zBeot&pjV^opM&XXsU3x!&vpG2@m_CxBEiAxDap@^ShbgRYwa7bu-LCy?H@6|n%s@H z@*sXg+SXF=bv3UZF1Fo!Kr!U2H+CpK0*fuc-<8)}-fgMch2{s6B;6B_je=zrFxnqr zh}SkXX5ssJFLn=i3>ti1OL#Ktl_JGOahcpl?OfRLA%f;u^MMQVfG% zdxz&V%xGwWt&t0X;-9 zY#g&U4e|H8WKioPL=#p2#wD+J7`|8nKMa&)R3y83t0Spi2`flAt$UgJ+3SBkq4m&i zC=b0lel`K29peWrGDI8k>60wg5Na|7*_#=(kjHBDr2=X-US0Lsny?Rz&6rguN=p=i z{*Dj#QDj(7AN&YG>V`#Vilqx_2$4+qtUEVRGt+Ue72RG2(7kK)sB~rAwmGYN28-T7lqU>|)fV2SG&!#9F$mvvHpRoiud!!-I;zlv@Es(*LW~sag zjDT}{qFU2O(`|XG2#zZ#c61yZqJNZ|KWV!?Ti|1jnapIv0ndV$d@F!hS1v)Rn8~

Ux+Qt!J@v;F=gLHkVI3Q3V0c#QiL4bsg*9 ziTf*oqhfPZQW!TrIv@_HRi|#fflu2%;UpIg%y;>ek>gdBke^F*jYRSAIFkd2?h>q; z;)<6TMJjFWcLKng!nC$fu4)(~*}H*h(ut2my7tvV4PPo>)$3e#=Qf!hC`;E>yU{2j z!}4jtLH8jn5FMR$k=3dv3g>iKRa>B|_4R6a2@!twRIQ%3kY(|SjeYJao6aSlDnT(Q zp%3bJFg|$4w&5M`i=m<>a=%3bd>vE7cfW2iYER|w65={4kUX>*_e~FX!a}u)6CoQ@ zc4%s9QHyJlvEZz0@Za9zN`L2z7H}@IxuHxomec#w%-f1kewGS(Xi$B;;PB*>FpF7b2*k96Sfw^~;X!8L8Ud2>ug z4dqOKayAlc`B8NPB)0ZqD>XxiJO1kz@P6*G>52L>pdzYt&Njg~$twrY28dQZ4zPwXlhO%Ho|&y-S`Pxrz#9_wIf z3bKNl;Yt@&#{h5hdAtLAya2-4R{$+R5|ROV6-&l#Pkj2gDw$qxcxIKFH_a_aM@3!% zA;8V-l6(*$uDgkK=*b;xtdY$SHoDXn>CzcRWKpMedzle6r;8Q-z>}njiFWg-(7bty z)z?pieBt9ky|S=Y#HB0ez;glg3exBdl1qNEgR-1h1Fkay6Kjc>!z8;}FI`d{yPYQm zGF>_$6u5xLXl%-@%V0R!f`~uksAseA7h_`PNpvlrS#kv)v%c?K3BUG~&TWA*hzXa+ z0*8!Ilmexyh_+ztHqBLG(q_w(O#g+3%A=aw7-$9_1t_6Gt7osHi&UFoJ1me6YGZG= zmy;9PVf<7CR(;Hi7o-SD(emG_Q3Kvxc zT;YSf+~W1nd>BUqgO#&J8zFq`X>D9lI=&=cV6u#wlE(a++`FWnY3HfglZkAh{=CDW zidYn_=D1I`u^g;xL(%g!*HVHl5-HhCtEOLjm)*inW?Md!i}34bkd!<`S13rH+G`oZ z1e{11A=o76j(Z`m!<~ehx6bQAZhpQ<;#zD!G-)s>YbE~Odj5IJXzp&}An7C;74ek$ zB3#0F*o6hIjYE#+-Ei;gb(Pw|dl6QWj_EliGj~8BOW$K|xyM7VMgUWCHcSUd9|>X( zd=PJhy8ZV?d4e1AgfC6a=Z2QP0)?-^hBO9{8`8G9RQK0M3mvvs{K5ZG1&OTZ?f5km)b21a zWMe}b00000009A?-bCNnuKmg6zz|~*Z@wfw2J)PgzC|0wh%CML#|Ufd3fq_(giauh zCr52wyHOz=5_m0_gCWuy#voLT8E?Ykz`TZ}A?#7^<@UE{zx6-u_z;_9xHj2d5owAA z89I16;4@D*OsGJpk{3xcnowE(_e+Z(%s-93*UzB!7%{f?IZJTToA+Tf{`sI{?;`@L z$D1IgzFSZS3>o7%J7}tHnCP(MmmzuaFXC3wg<0Lsv?hAinRsG~K=Md^DPZ$n@jPAt z_eg~;So}M=e4-6p>bp-77aVD{1*)syMFW1W5()T80O^S!vHj{Rn-Wghx~xqT;CkeZ zJRKG+ALD5tF=C|_qV5yitCI{>D4tCuS*0O%G@u*Sfy28ev1j`OKPgWuS%do zxhT`TT`0`+0WpdPkO{A{h%$an#RbRG8zm2={@P zs#v(CWmhGPt^0rnXG3#*eBNi{NQ28=u?&D=&XKl5pTu;qpbR^EH9AtfFIt-;GbPY$ z&58<6D6;bVvO7Y0_2c^3egxm4oazBsh#B3L3Xy23;17Fb+BKbwdRhXa&8`5j$sABN zgHyGo$6kmpdi9o?iIq#e6c1V+B^ulsoy*Vj{>Q9h*h--bZwff=WGb#}@h)TIc_jrZ zV&b_>tEKHB{fm0M7G4Ah2Bn(akd{JqX*JmzOjIF5E}O_7fUIe;C9 zegqrkQ?rwM&w4y*zn9|@HIZe9w6={)z%M+bIvO6*_xxR4ux@lq!+QRq z$W~H(&z?Y2HLM2$#4vC=wZrNLW{R#5>y8wr?sX|=nkIq0-ax`3$FF#nIfP&;1jmeu&4&leoRYc0iUr9gJSzoS|Dxg#FAF@4ZA~{7 zfR;P{jx_4amTdktxxB|`K^!MTVTz*M>0)h?4maNDIktAYQ^cNRV}^WZP2&?$6!3l3 zhMBjY(v6cu%})*gEoIKGt-Xk3kcxf?rSa+UnF|zVClM;jweg9=2_MV95U6mYybKSEsY< zt}JgEs@`j#i}?};AiC%RyT>z%S*V?_Dk)*;@tVGFs!9Qj_*E=?)!wP2YJQzEBc6^` zJ1J3?RQcD@_p)vuL`h1DqQ^%6UNj)Hs9!Ct*NV8_ebX!1o4|RkI~8BF-@@+azABVf z)5)^cl3f9tq0l*)7#7WUzZ?lmfXz-^U+pXA84Pl}2?ptonR*K{Y*ThaT zXtR_NxgLq<3x{4p}O$QQA+dOM&Ku1y|0F2y@eiQ zzJ?6s`ghdq;ZTSjCX`QG?BOIW$?lKk;O?A*_WW?pz?++<56?E!Vc=Ybl5)VP1iv&@y#N@A= zR129I;`y7OJzXX9;C_CMTLtX<={spvNKg9{?Fe+ zzzneqJV6aujm6}2N@goiCPXPcIi+!0wZsohsb#$D*8!gx+_oW_O|H(ZOgNAh5v81Fomompmi|B7F@hWRxIsHs;?CBDtToV5k}R&l?4!pd0O z4nw-0I0q(JGm-}EL$L8-BV|&{qZ%h?VNvC2ZM%Kf) zF#BW#wB4tP%EAY4D@%Vw}k7Y!Sj>ziNe;-%#aJBL+!ULALfvV zMe9PzMg--D`3tIT@`dlQ43Xwk)7psUc5=ufNl4Th9tvTY%x$#ZSwwfc?y{2AoO3FA zDs_|`r!;S+*^g##a0!PejbZ=-XYe-AzI?lfc$jq&N$Rlk1x$p9!>ris;xO<$=oqL% z;tqmKlm?T+HG{Ib(nXB`4gk*ElMZqIO~yk-9mn8neC0RdS);0w{3k$0A_ziU|00A_ z_Y@HTH9*S03IWDx1PNI%4i~Xw%sDbP&XbCk(kda(8P(;ogZL#9@?lFBPiiQVX;Nzg zV!#Nxjt4v&>l9@BTAjIDA~Ttw0N@_h0w%K~;e!Qyudl^H^zNAQQgx=wa+miMyYpi@ zA0U%Z?_-TG$CKA0S z&1v@o*FRr67v{W}rmwJ&8_lsc)m_{oHtv;As-xbFM|0`55k4fIsp3eaOhEgvMtP4!d)pB?5V-~ z3{X{aJGvg79K0^NL@C>wX4hZWnI-Fwf+x=z$C)qm0W3}aZ69ssFnS&_^H_Sp(Qi=} z8&0k+9F8jx*yn!F-aXOpbaB_LyOlS&?~^^%q}Gz zvnoHB0pkrd-(PtAg@3QKL~77_NgPRWuYaf22<#)nN{2t3n@y54yri4zn%C~n53Fj# z{DjwGA&&OVZ%f%HXdeg-G{4zdV(ze6>j=q`na(>a8bUEn)Vx=4XvWuR!do{3Dz>rQ z4o-f#yV6>^5uh7;bFTD&P^GJu7&JaRuqYy%J=0(*#lsP=x0*a8_i%I{4eW*OG1i?T zw$$B&EM#~W<`HD9^aj+tOvfU==`vLlgC1%#I4p9Ey{Zp+WJ*)#nrH6>c%|{E_rl*| zZilCG(q&>uW2Y(QdG8kV?NIeFR-Ol;a%zo^SaYii0M`%12al*y_NkS*aOR{i#Z3cN z6mRi627?Ti=e+%23-1ocmq<#{4#vYzWzOh7^4m91YhEs;^^FWrz0HOrZcADBD|jK< zew#Ns8>~}(#*E-+arbO9#EOw(6_Ixc8A+y^5k)R7`ALku-i{1R^+F1)8FF)3ohlxV zDC$bdKX~?Nac+@QK~-^u8X-l4^Vnk>z}VJ@DRa;dHt5@NXrqv*e5$p?CmN0dm#+v7 zaKO_noX@i4@qSIeFL;jlN%Y4%pd5L*Oz#+!!3JCJ&n4WN#1aR43#hS3s`T6^9GxX* zs=RBvVlo;*@0d=$^~kb%UkU{8hjTYe;_~!A_|qlJKjxY0=d;w{vNTMN&+0wM3Fj?h zDF;q%u6)OO?AIPO_v*T_b;?IOUAbuWS^@qY99+Bzrdv@D@EBpe5J*XC1oz z+my;gCpaF|n7GcKrm#oznM&9sS1A(TFODtYNmCTeY z#+&ZB$aG>LuL1g0+kzC?2AbTzhomGNWJl%;axpKa6d{uD$5d#O2)R$5-7(N$ilCl@ z-XI+_c2}pT3jw_b&Y6Q~>+mAu+N7gIKc>a31h}(Da&z@3;!r)sFkykK*>^uAEJv*p z@?0dYvceLyVO3MLCqrP`I?_x8#rM~kqRS&Z6+DkmKsB-T&OP&%raV|!mIp$yAiBNE zry`p1uYBgXuDtn*-;6Gi9vk;~PV4;iW;i!5NI5H%he9WRPm^K7n~+`1=S^0U77E)h ze<>4NJvgtj)-Z*jM1uw;C0Q=&T*jI5{?F6|4rOJxOoJ+2avG&cmZn3tn2{3#sJEb`hUz-%Z4*|AQ3(oNEVr`fO}Cq8^2h|N(czOjbR>W zRTsk~8^qEptRnY8AzPtHk^XySDQ9`))i=waQrVHc4OYk!4N1DOp~#sfr_FdVo^Ls* zJz&)vI%8p~#JL2n8e`e4LggLpP9YDB-toZ5b_|u^lUpUKIcL3SGQrfp3DNN?B$xsD z1)N=Hps#-4W>!%qCvQG;A*;TEz^?p0A@m~=_L}C9za8_O%EhB_s4Ur{jF#$)CbHn= z@ZoUdU5hW&h&J>&>TMlkX;W5|i3va+e1I%?b0SXJ{s*+C8za%6F0+#+hMA;%3Kg63 z;b|n!F1uqgfNhCX5q}!QoYbmMt6DcN7sEsP`(@%GjpZDOPn#?u_-&69{oyrV+?OGwEZDHAAh z2k?;y4sU87k_eHx$B2EM&l{mn! z-lsbtRUm1^c4H@s#YDMVNa`xd@Bb(xpuD~Y#g0A1YiagqKOE*3k*r`sszlt%#OKRC zbdseTNOhF~ho^@?@0(eZbyI!ECDmN?cLpdgTSoEaf2>&UW6IusSJsbEvljG7%;bWN zG4IE`s&2P|@&eeC&0h)iHoVHg#!{YHG@QLQd7{u4_<`XQd##Ph#eXDqSr0~2)O_NA z+QC(oUt7_NOeTIs+;)$&0lTW*&j0KHB#xK!fRCa|v|55(hxc|w%CLeyGVr&IJnHy z!!SZt9_A$o@bEk8d9%IH9L7^+^A@8biF0A@KsaIp^UpP>5ga!}Wyu$+M|Tq+ofC9O zeh?#LZf~Aj&m0UCDb5#b@cLCt#>PHl<|| zoNSIR-b4txhD08k(5>P-X_#B{o;MNgHvttN4pPjP)>%4$1J76+k`=y!t!~?t0f=9+ z+Lb~CyA|#-<=C^mrd?(#3AwK?Q0~doQKwyxTiv!^~fF?~?%I<)o^;-L7YOOk>y_g`m&xm@yPfIp>YT^w#uEz>A_l~h zCq|}E5TzyU{X+>GGq^Hp84s?qMn34YYYr-AUtLDA7dmt6Y{K#KIXS!>Z(;dc_}B2TpojSen8C0}9pnxc#{Z?Z+8rm2rCivm#o> z0QC?l15O6s@d>+6IRT55%1GfOZfmiWiXze18Z1U0sY2bFlI(Q*Q>@ND;g&+X24%7E z5soH(+iyxzg~t$H<$lnW7767@pE8XEZ;IaqmcS@Dx=!w^iRd>3NdMVHe&Yp7lz5}_ zIh+WQ@|IvWY?RN5D{E$=dHdg6c;(O6!)6wy2MlD1q;^*xlwd!QJ#K#3dtM^uCZ4pz z?Dq>7AZx>UcPVu}`9wVjl^I!;5Cm|@ki}78PJsXiG3%y96<|Mw&E%ZxCJazso&^&i zCyt0u>VsERmzGO=O&LBtFtWk_f4f@Aw{f2sv&fcc0W?Vzc_$@VQ;~^1wkY@C|?kk+i z_++&cb$frE>O9mN?^t#dQh_kgm?X0CF{v^(mB&_WHWhz~dpTU*|6Kvj=v2#S{m%ML&d;Jdxh|M7sT zvl;36MuGXic{~vH0+_@alYPW;SgR=4cq8VC)(j}k+|_&;eY*!^-jHeU^71e^YeMak zckUADPz0qK8%{b$`T{Y@>KPJr0gf!s$3+#bw5Vo1!S&#MFO9Lld%E2Qj~Tj^d3=fs znY!e8X*goGOcH$`B07eja(mkB{ToRB%fxwz!?N13{Gk7Qva|fapepYrPl^iC3`2@9wm3n{D+82ozS{k~3yk*R(%vmBA5C;{P5r4iG)Gl5fs?`ZvU; znCfW=vG-1`mf=d|WDaZqUFz-UH{XNkOl%>Q!o6z8MltC})6Oe0X$Hk~<@Zo9ucrH^ zTKgfa-)NZ4LE}M;E;{es=}+vXfwNWz#rlBBvg*Wv{4WmQ+0<>ZtvWm&>xZnu4)vGd zv+9RpnafsZ57HR-d2_d!UTvk3mP6d##+B>(TBXh7Ax}6Mpp(uu}%SmKY0~wheUqAJ*sqx)v6!(V5 zVIGI)XpJB};(RcC#x@SXkY(EhD|+OSkEk7t#hie(_0m!+#-#yqFb_?R_CHgkN|e(7 zWpjuSjXI2;+84bJHs>~Nl~fZQ&OCXc^rl_Gz0v>RGABEDa;b^9ec%mDP)vD;EW|dD zsnscj`DN*)+9({#Hx){IcVSKKYxq~#v(s>SKAe&+C(^{ULkH@@i?LRPJm&Lcl^W>4v+@cP?p_~v?nOug+_YBVo& zz$0a*-joIqEt!=kdwpWujXSnt9yL#wFIawqoNBva^jhHZD@& z>QC@Uycu6P^uq>j4Q;8ekP@W8P7~0|5n??oxILVu!}%KzC|sItBi`EBle#|0fO)>y zxI?e7o*H6qU>_qjXVj>q{SQU-LqXamNn8+$W&aZ1i~#!?zTo53ID3z5NwsOQ-u!ta zyT0qPPmC--_l|`<_W;0E!RnC(^|r4hE{mN;nrq+*0kpbSdOR5)Nu`aY`McG=Z-jX( z4$*<3#^eSCN+Yo3^qUmk2Lf9WW*f|kb7G%=nP4$;crpl_c^c0AykmEzFRgaI;gvs{ zO*N!;2<%X-TX5_zW>iLD3U5nUfN!BpYNP-#Fl1xcH~;_u0003&o52`SHUBOG+IH0D zt>MGvT=Wxpyle3^&|OF`@WbO7McI*%K1>07F&BjMCPxg4_!}32oyySx>{b9BA(y-q ze^8&zb91(YoR-AKwivC{ZP92Zhv5b(nDcJu7K)y_y)$fU3!6EKA2Pz}+IM9|60@BQ zZJahk;(1ul%SO`l@p^RtEzpXf$w2{f4LGXTd;XTDEONjUzE;7zSm z`#b7=L5%r2dW<_h1(kYPS)!f3wb|~$AW3i9BablInp=Aqkg6l_Io-QeQDVBWiF}S(&FfB* zYsIECT?>{>Dk=14Ho!&ZO4ux|kx&#E_1@=E8zxmS*Y^z^y-C_b1a)Ie%Fmy0Mg`_4 zcr<*e*3Dys#XrqzS~$!fggU=}`Y>c0w>+8@(w1OM91OF|kW5mOF53x$7M*b=gU+a3 zj~H-hg-U~>BshnH!jf~10IcJVlsCXNU{02wsexKxOJ&#nUPjk+gn4s@vxQs5 z>Hc3I=pOb!Njm*ELI*_%1Iq%r-V)Sc!d3|(fED?@dT1PZ5p7hJCxNcl@wa-r0~m!4 z$oYJ9n%VL^5_c|0%5ico6nw{f(+a8ppFR2O+?Cdr)iSPyZy@S|~LCMFNvY zbx~7eo&0s_{=sX5ENzqK9=vAGYt!!nz>L!I`mhx3GqBWx^;{FMVG{+Xg80)#TCL{n z$VhphqB=h2(u0!Y@Pw?HIaKskN}vNsm_?|6#J4VA^!SSw$Nh)FBs3E3g5Lr;j=R2w zgfXL}@W5&Ye@KAE5|QvFT|Y>hg7Adbr(}N=8e4OvM7%1n@Ej{v3=8@hnK5-AZF;aB~lf6&Ba#W+GfDD)=p=(X`_f*npFYg!@-;sZxzJYwMG$48-4 zm#L;&4WwlzQXy^2f+a|+E-3q5`!u3rLI=`pX^6Tk3iujBJ4mZk0c`BbtR+Ray zLxPD;eWyCTjkyW4_ApJo`3Ut-)#cLEjg0KH49}elo|Iaj=^3DOQ?yl(`w&2Wt%i)%gv$l)X6)9xFH_#OT%&?1b2BhD#p!AJ)&gweykjn5(|#Sd8P)k%=!<~_)L zZ3eRQ0fR8X5MAfq8uaQ1mosv)-?7P%>7e?SvOGQh*hKz^r=M)SvkH*%*_W20$d@EU zmzUdO&6fDTl!ak&0rqQ5A1^4Y5ueIOlPI!IVW+AOTs{Lwp-8@z zYbeInTXTOqu}LfNDxrQ*;rs@PSbsC2u(h}o#~C56>B)#Y;bIpjt5LwJHak^s1Va2M zMGc_M(uGPDq0`3)(2h}YDoIp2<3Pk^3!`@ox?!NeS{RV}V}JC?D*dlVsT7eXaJ_Jf z(P1S0Gx*GRpN#2@N%-SJSZ^Qy5^2a(#nA`fI_dRKzTgVX_g#UcQ;bJT^1R+iFl8km z1#2q4eXlsEJ6R-%U#k=tbqH2_C^VRNRWf@%{>QCJ=!xTte8UN-AF0oBDDh_26EzU% z2n6bgv+)dVVW6R1ok(3x0)ln=Qd%AVvmZ9gD|^X^EoYxt^f1az`ZWfFbPBtFc3XRq z0!&qsl{z4WV3QI`^5&FPiMTGKx8S6$6aO^pO=7|>Agq$6(G4K76x7v3{aajN&FX>Ns^kDvc~RG5kK7@XXed(c9m;R8I@r*hGYLX}DMa>{M;yEVRu@e6tz+KjOlt8;iTT%BMn>I~A8;TF%b1 z;y#eK64Vh~0j@pToE)%-$Fh$mI>(nQ$gm$46MG_{>U5gfnk(QLqBBe1)_iG=8n2Wz z?O=;TPCUmpt#e(l0&Q~*k}Vd+BQiBV$_n7xo2h6rnGL%Z(HiadO#@jyDx}}#Q_@{w zfc)kV(@tSu;X;Yg+*?}LTG6!R3a)QPs@(SJ?$e`HXem2($@F`OXk6 zW5VDRfo63YzJ|4~yFMVJnIBQ~5uRh?)M)mxVAnp@OU3fo&oZ@r3m25MbcAbwX&lGX z{ed`~2P6l?T)G5gHv|#<7DH1PPc%|NAasz;k-^F?lm5#Am0iV}D4C87>4?r%mr)6)=W+`P72M1`xs%7a{qyO(Y{_`{%C|c>!JRN z3adWk2|n`l_%*BjFuvVfPs$~9arUbzd%ZVKRAl@XoO}SQ26AJPAaAP+Z6#Hkl&zKc?%5C3&jRtV0ijo_33HdZQ2jF^VaG>NyEtCOz|2 z;P<0MeiLRfWQ~>?eI1OJ13l~pSm~xJW(=d@VYRA3q~E5C-C5S?I>PwSRD2|&Kp`EI zbbw8c(Bc*E_O6tirwxD~iD6MSaU_X{xV(iQ>UTq7X=A`FQ}Spn-vE{2QKp}={@Yk@ zPdr~~`q5WRNlt}y7p(l#dO-9n*|w(v1D!yMz8d(NlyB@n-Pz3!>vioc$ln-mawW7O zi5#X~8rc`l{)Fy68l123G16-o{-GX{^@two|47Sq^_HGScjqWHEQL0%TApxSCEyZd zfOLQz1Oht8h{4sE%;}Q%gyy1doR!SI)w_3~%Y&8!ix>Q4NOXUk?g)jy`5hz%hL0aV z!iQC%4#7@glV1JZsN=8eI{J(~4F}ug;#@i=V>*$)(o?JjXbN#ReiqiiQQ=Gx(ak51 z-7e**cJ)(FJ9fmj zh`3-}UKX0J?oHr?05P0;x1*vr-6-D{&(P0!U(^N?LvsJ%!-fSlqk^PXZ&{|!;)-;`t*HF zuhx#jyQm)&Tpg@rL8C!5Dim+s*6#%V55)MvYU)M)s)wvnDddb0O_(x3Wb#<* zN8eE?Oe_FR8b_&dYKl(x%WgwdFC}~1`eP2p zzE}Zf=wI}a6;a+gVW-b#CXkOwn&p6QMH6t}bmK5N7s*kts}X%)>W@heK1f}qyxFc# z)C=rd_%L}6v5?w^wmI){{2wNf4>eo7}NT)F3`i&t+yIt_ElCzm($0rm2!d6Gs_ zna=(W=L7sJqa0Op(8wzk7Jlbv6E*~}vva*m25VsObhNY^ZF>8FS0g%yl(L2drr?{p z=uAzfJd#(U3mM9ZniB+&#-ElY76oTK#g`S&tz5(I|EQruaoy8~Q%6)7e`E;4ELZP7 z;$>l~HC&b~8z+HHWwNYBb*uNmFMs#4<}&mCzkbV%j!dv%s#L?mP>8&9$Fk6{Ry3V! z>qv9X-D$M{lY{hH$ghiL0xv^Altz5nVDxWtz@AIfUOqhF1NSa9oQkVT{NZns*@Sxb z@hH$oVUV?|WnSdqNI#)Qf&K+T+6sp}4ytf93hWFnY?(w@D=DpZj;HVU+o3|+G@;vF!Wtl zO!ruAL8r_BZJF>0yrBW@2Q|F#1Q+=#w}Y3vARNEOomXkB@@?#Vo^W(OVR=_FLk@~E zdJAPfoPrmwapbdqz<6RG&>4=I(ORy1Q(vxEjOGBo5{02Vr#v|j!$2Lwo4GX2n;T|2 zNCD)J68J`7zFFhVv^49V_X5jTdn0OPxWa{S!}Dbs?F|vy&uzSNxG1L|`Q#0-*lpU5 z&j8kh;y+@m=?RVXanEOup)V~+LDBKVdfrm3q&0;YdYghc&FGW(cx*{<4*oSm3*TK~ zVndyHr;lL3$+p5`s@M|-&MDzom`l#?NtE*HGU~c8WN{`r#*$6f^HBuY1SV0TP6w7e z%LAy9@W(#lgRMC~upp}ml&PBli8(Ba?YwPbrUzRli9C$m{%u?Adz^R8%gLUMu!AVc zi`Q_!24sNH3{aI7GEhm)0~P;iQR*}5{Bp-H1U~M+i~f+G8K3{Y;X@>`iG!;MG%25~ zz6+_#uy?LuxPsQZi`Q4CG>hZSG{dkRH<6RJv=J~)4Uv70D^|f%^IKw8^s0_l>XbbW zM^pyFVuSn2UXmGbhw7@$S}IS6S%y(02nN&Q*( z%-lsnU=xdaaDz7heeGaiV}=I92o-8%0+7EhHKuUI#KsVPXfc3glMLX}X5aTnePm`B zzWdX))5mUMA{LW?qlIeK@5thE*jDPX;rn&QfPOV8E0&r>V#?QenumqAaW5PoSfO=w zm>CYGXL?|k@PS>T(QBVm7bo&6=tAafz7POiiT=P}t3fc*@4?MXe8CP&0EEvp&0V#7@C+ z0JTJCrxvn^OwOdFj_u7507cOYI)jbi0!snEQ72al0$S;wymek9N@whT6}kgYaHw|R zu|ILE0zH|EDCuS7z}>O9(L?ukc`9-C{9hPG;a-%p>3j#tYmok(7U5+bV~!cWt`wLH z5)KAe>)2$5(JZ4l+=LWLyi{6ZK0?tH7RTGIh2LY^Dob{^F8GW3&jOAP$#`h@yt~aj zQNwaWR-KrQcNw|AXpY^Iy1&PZD`RG&H;=PZ`U18%BevkxJj=eB0Y~Wsp*tV6qiFDpCglsHE%^+H9y<{q0FPP0{$?|(28?&Ytg73M zJz12|^5o&F?N#|qNNG%=LaN*IrLx2#AH&cfR*S=Rcskhn=IX~h($>B9u@IlXeKB1E z8UCt#zpDj4Y>h0b_i&P+g`k%C25osX0zvJn0T*6=o%r(#rYq7C_X}OceHE?xiz0CC zRIh(WZjprzfpp85iz@@Ov7$I$I7%9(rG7f8JkEynr&b1)L{SZ1D9CW2eQrQ%tJx?S z9lDEGsEO!4uTe0gfYyTNpQWm0*0wQ^JR7SX$o`C5m`2irWS93Gr=nWhEjLxn7-zEKx0d`ULbiM~PA0gjkrS%# z{UUFLVp}4zl(@#ssVENEvav)oagDD8!EA7(y-_bJ&3klt92X8~cNqcWE!oX`B1=|$ zOM$&#K}&>7<`Uqsj&T4GcEc=hn~LFF4icX=P%7qebo zYYd6oLrL6oAx*f?m|i)$9c*sk{B^)361kckT<5UMx$D)&9%oj_KyRtrS&N3P4gnRe zi{MZjvpK42DBYdE-htKE&yp15=S6r-lrboLvV(kLtP?zey<{0^)On_YeQ-$G4zb4^;JH0 z<(lH<#x9$;HwO!^AKuX6%%kn+)5?y8O%>^@pb`KmT6iP&ysBJ?NwclVAa0D2O1C2; z-m&Hv)G^j9!zfiFdd)I0lHi?68Y?<`Q6HkwFov0-9XQbFlQ)%dqOM~T9E{)}=3vYj z0y9W%o*drm^_}(UL<9=x?u~Ab3;W_5>il!wJACXirCiB$(s&X`lYp^;=&YgbqS%stB!T4<3SfV|~g2Gq&^@bOLe=S`Eu zkskOaZ%|CpB)*T9EQhsOWy_p|&yI9gDUf|Z89b(fg*zVy@ml3WMM^Kwo)i^f?_Hhw z#s~@P)yKqp`%6AT5tc>B_f}Yy)t-rvQI8MapKP$mQdwtwPL$WWhS4zhI7#1`#Pk8p zKB9yNT5&91lMDvVq~jdtRSCu?x#-Y)`c6U)$!YkbCb_Ox;FEW_$O}n3y*vCpf?@yo zP#+1=&BLw^t1ZOuQ$c@bYF6Vp8Bl}w-t1+zeNZ4q@XrBJ@z~k%|6w#vr)zO9_%fmL zo?hW42wEr1+6q|Hh#J?N0Q$w|RdoluH$r3aH%RkS``(mH;11v~;8CU(ErCBAmT5we zdJBr?SO52p!p6^1vovM8h**ygup&_z;78wqcUaljHq2l(1Xr6M>hzhJOX-LntxQjL z<5OTF^_5gUIoyXuqag4O!)hph0YY^}jeGjVk0=0=>Kdq+qRK@`Zf=A5ZUW1L2Ls05 zh7;+2IGqX@)ZYI(qOOSDE541M^I`9U%bQ=IKslQTjVe3Xq*W#6wu)RA2iynt+y4)D zVdL36bG%glzZ5A@rgFeVApYj~oNaQ?qACsXTLH)4EYp;GXm617xUX7u;Ab4xtX-zd zO_hCnS*i_gs!q`V!^l-YXv32v8JnMXHY<*oD|r}!dEEjGDB;QmhUCn(sCQ!%!Y0 zm+as=PaQ52dyTLz>AJ)Z#Un78?yL14TG4q4;En0laFCLN{|{5Ol=In)lwd8{VcFxe z<2%3L=WiFI%B{y#7B2q=zuZ~7zpp=6^{^J(6Up$l$-&9-EoE3!LZ*$?plAp1&z_%9 zf+$P)_KtgNC5p$X?kUZUlr}J+9WAhnN!j3!RHUjkON+i(MVJDYGkxs`2c@laDEc~g za5Bk18f+&~T9pVQB8CNP9Xj@xGx3NU4xl4SI+UNJ+=X)Qf;)XIN}d2u!kfy+E3V48 zYz}yl$LYf;g5d7^!*`D&s2Aq@c&eRZJgilh$17jrA2t25n>euMyd3Hco6{` zjs}g*bxkOZb+&6w!|AQ(SIcJgj?mxDktdql9a!6*@CmnGb+yAE;~_t%Nv%8c!`NjJ zCf*?~xIV_1`**T@AA0MI!0M9yo<|I~J3QC3tmP@YJUsfgS_$)mcND2qZQ=3O`n1mV z3{y(HwxfqQ%9)W{4I7{1YUPSX^qB?KF;9?I)q`vZXcy`7E0 zMf>vm;9m5ZKqZjb+@cr6^xu~K-W-6U>O2cZO4|jmu zLr9?G*7(xakiA!eIo?L)%c{9@2iWC@gS#W8IqaEY2bTpe9irE2VM`Hl@y9OEiLml# z(twNs7}8*`A6gGup*|lMZH0^>pae8(|*v>VNQ+WOYSbh4l z@Xf>1ky8Qj7e0rieBCY`8?C7tY4w9E6zIx75wEdVA&)zW@D$80?yfv3z@wU8x@poW z2ANx3e8$r}Oeg`OvG|CuD_lkhMwt}~?Xit1NiXH);D>OInk$n=ZjKS8cA z?}FgCx!t*pRsac`5GAR=<>GifmCoxo65xd;UuVKYMF;a8ar|(PIZH}LYI8IzNwv=i z8sIl%#BkSZa`H1&Bl$iYCFzB;_+?Y(^-Hy6&E#{|_eF-G*<62DEE(OFuT1ijgt{r} z7dTn6_83hddOjPfJ^_QTL%JUtUL$(snknI6AzY2k8d^bf>38yzK0;w#+Qg~^{3(lh zDThtjCqbw-Kgf>PaK^%xZha&uO_{u{7G7EE(^LN1JH>}0Ye`^#z!`9ib2 znLoSK;(oiKo;N+FMzFZb8j=^44RfmNZ0Hkq)rgCCQbZ~-+dvYw#qHL-2BZCHE}0Zm zQ}}w7One_E#4u8yEzls1(udG#U-M0Zmlb05W|41- zz}PXUiDkPhk5Xl8i%+Tv+gAl?8OP1AOYzi_Q$kp004}m#vN6@IoYiu$7U}@f0+(2( zTiBAYlKhVKaPy5HcqYaJwJsrJGNBhek2}<}lfe!_S!C81cP>ZTV2X1&DIP1>Y8=pu)1=mw<;;DPs+Wy72F!$K z^`r+PXj>Lw6Z_Q)gs&0RnsVn%=zAt$KHL$_yb1nKtc$3``_R;rz9o+Jwqqo}oipuN zN|;I?uTTo|13A9jLYMb`dH#EmXMpN30MIomMl`R8jprPnQu{E1hi1A$?Iq=qgjGvaoOaZY zxwrXIJqi`ylz%Z3$;R9;@cCw^Z&>zwx$kBTA5_{~VbF9(=?hB4R-zT-? zjdReQary`9;oA{MT}gId(`08@S)(O;l&C)3vSGiDm?D0vBP(Bdjt3PX_xd|)(LHq+ zW!L1wQsb&E6~d3yPE<1@Ctk;B1^!+a6x?~1|JT#qcl?vfKyyVq*t3)Z_1&Sbt{IH7 zNRDUF@1!{-VnIqWqKIad14w8J6o5Vj&C*_!)XK9J6|3~rzW*Z)`82p*2B9Ov|1`PI zp?6eHrg7yes+o04obd#s<;pQ}kVujk93a2(sT!KB$B0|OULHle>dZ!dk;CI~r)LXj zZLvMKe>ZO#Q$?^!GWX^aE`gw$bo4lAMWl?C7TkHzig#v8s~ZQIjSkP| zT9L*5@9*6XM^R;AaW7eZ`BBrT{=q`en7^yVKMsMu^@;~H5_{Q`-w*HHe7esPA`frX zAU+ay47YOd(tT5TD`rP^$m|<5jLETShn#-e1+2chKHy4)wa?#66a8GdN3_S#+_>sf z<%(U9xx^X>3vYo#+YV(;U_(0KWG>c=ZG|UrO_!M#!GT&lhIsnVf=RMu04aLBO`QOj zu?xL3<^JJj4qP*lFt29iJQMpI=yRG(GD+fk@YVp}A`IN*LEQ>dAhL6w0CJ8ea=xpP zhvMFmS1r^3mq`2nlv%Ef1eg&1&s`I>18O1F;pa!1$?iW1KRCUym0y^4HtV$P`QV>31)0HV z^Nz__{V9`NBtG@rv`K9$U>FMepF-0kujIqY>qM?NBWg6C2d(F+55pIWKg+ z!7g-Z((W^m2<8!SOL#iWi$bCPdBeh*y!A*j1$kCqe(bTle6E5fgdP#Ly7`b0{GWb% z4Q~_8A`V#Jsb}QH1E4jmyr~^7b`WX8^Mmn4`=y&jR>PY=)|+0MhV0!@REIzEWQqcA zuiipJ1IS~Z#0OrwJrz84vgs6#Nya}4XtkO99ryDHwhImD4&c&H>M_ItLgiz&K8ZDx zRjVD7|D!AM+Ibsy(K!iQd0dZmB~z)sVuXCzEaQz6Cz+P~TJxF0j)vCMP(|JL-iCtr zs25yjzuE3^cj>excQzd{AWFp&3HDG>N&~0Z7E_x2{U3+(3L88DwI}KGSu?L48)Iok zYIE_VkAa6ZeJo=+6Ocf@xTAZ}OaEnqtqMA$6qOM4@#~*})X1E5N3Zqe`SFOP`E)L> z(9h@0`f`3h^z?1H5`FNu9cbS*Cbm|FFL~7iS4aB9#MT+i@>c82oBHFk6Mj4i@Q0h< zXwE;2ff4hr7|FP2jiCX%rSL{S{&!&@5>S9cMxPwV zxwNEAD6FE=w@@#CMqdif8W~8Hfpcunio+bADW}*{9IxYf?oH*+xv{Sc)SQq zszOFK+d&WS*`#jJeZw(;7l4xm!22$Ws$fA;rKlIshF<2m@Cj_3uslQFsGR8|{V+y( zD;IWd+N;cwQ)PQ`pnAV+Uw8aip*BG#$9z(YMlF($MqbBD_4Wg_Og&5g!sBD^tC3YS z?JyP=Uby1j=8H!MpR?Xa@sjje2tcS4uz#>VsAP^=$tirXv2~F!?qw}ckLMxTYekrP z{TZvxww>WqOPZcML0iC7q{^%^Z2FG+4tRR(;@-Tv zMpDPfwN=Aph?^k5X*LnXisX-(v5s7fb+tma_sL!r<|&tpb5oUJGNNDl84%!sfuLa^ zd|7L6J63TSN}K2LUO4wxYjTSqO|~5?%C>gVbf$b1=H zXGR29PTTZM@%o8aXQ9#o{$X)M*?vZ_`=!G{y?*QKdMN}MdCvRA!c!Z7bsnkx-HrKA!b6^jk-GUS4*ikdBtqg{am4}Gl|EuAFgqHyC z1$__b_!3S7An8dcu)5;vEI6RA1VZ98S!}V_kgCQIT1X`vu-(Yqwo!l)MSHd;W5Dq7 zSYiX@KT+A|@QAu~Uy-GMGIjOckSQfVYdK72=gS5BP{Bv{S-I++Kt(18T8T~*i%q8M zRWVLXK#VfZsWM%8|K6#h!`B-K7fW z>{A_CPkQ?dC+d6>m0VqzS)fe!#XP=7+!!&Z#P)wN@v>Q5c%^VRIsiFtK2qQe#Lmwc zm_sgr&T00KhMYkqEj=eqnzfEH5hWlPIo}C1-u*J~pdh zf*F)?`s8QmuPn!nAjPO7wyK-h-WPP##!i5#*aDGI$S(2|1W+<8RqPr^KHqT;fPWmL zA>{KreAoWJZ&6#uO_#x^5^)Q{&S8kTFIptz4t)w*N|Q|fM2781?RG+=PU+B_^3^L~ zoC?889rjLk4aytEJtS@pqYAvC1&8RbxkmmsPggVXrnf_q$mWpMqYWmCvu*nh9vkeg zud7ht$_SJ2NfJHLYTfwgbR^Y5aD^yP#-+R#;ITy09;Hw=T+TM38%&%-|IMJ=0@SsN z$>H9TMmB#c6aYWS<>Hb~{>k226nM^D$Zs2ZZN?IfrtT(m&mp_}`yuGfri>Da>B>a7 zH-pE>;Qup^vlqu1-nyP{fMfS!&bECM%M1(tikg>bvcaqvtnj*`Ww>HMTd39Df`zTs z)g7DddCdEM|?->W7H+G@UunMu(zaWDNg{Igh#eE5F%I%@H&Fbx9V*jp(P(_&xLh`^7trZQbhJgz3X3cP?zOXJk%$9H-qHD-Y;l zF=!7iuz;dyXFk2*51QQA$<=8Yp;o*`m_-$ZgZIZEs&FX>A4EXbQ zIR@CHf#vGkBeksOm?$l|`#mEi|Ctq&s1fe=omlK8e#;F*#kKstDF6LfK56&m;gbGHrvIC2i!@eQmVQ z;-shIX!08#nar?#+E^$HATQdY<;s*xndA zzg3*Sxmc6SVcaz(!&3!G>9t(pPk=G^Pr8A{ra%EnsA4 zudAJ6)yjn0ME%~;adcy*Pa`6+lk2}pN-~L+h;h24*vPL~dP7tMq~h9S``FqZrZwAf zowqxc2}ONH6tu6gWUqTq$V0_s%vZ~^pX1!nRc1>eL^4O8x)rsuie%1*~kIsRS>I3htu zg=$TJKD50%Gk+6voc}(13;rV$OIjarAIuOJ{2gMq5@lx1<^zMkC2o(yS?rxM*j0b^ zC!b~OV0PRK?|q<%#9=;C7;GOyaG3Ed?os21Yz#&%muY4V#!pvG4S#!U?T*rXsQ`0P zude&UMVO2kygRIJnZt+{y(Wpd)9V}_a|MUcs|#P!K#8LbsYK40yOiW=q%#XPYw+`y znvhxW-XG2YC{*Cxq-QDv6C}F^w};3#f8G_LLEYr>MVWacBFS$X+CAS}e*-L?S;xxb z<%WtlElVkO^JIW;nR!?4CE@-fCC9t3v#sRSaS|A{ z5)#(ff&T!Q2-$y1vR?iUA^zYGrfYr)Iqibypt%W862tr5@bKX=R)@zzCWQbTf9v3BQ93tuxp$71 ztFx;3b|!!J)s#RA2nT2${VYzQ$Iibl7k$Q!KQ|Xhk66@8i01SW-Z6PGRkSNW=u6nh ziWNdjqscUrM@1J@qzIW8sW@cyT7{<>aX@l~2U(|qHWd$k^pl^Q*MCCv>|2N#Y$}*T zq<+P}GgKOM@p~uOMMOhOKxLTU3dsa~7Kfla!5eN_c{XdG3k)-^>mBTy-lp#}1;h)+ z=||HK^E0w1Ra+(hXbP~k*#QzBO|LQ%7K`MJ0Dsj^uxPy9W3id9!|z))ry0bzaMgMJ z%dUM%PES!Nsrn#f9EjJX#DCd#7qDmA{S$C^{QNgdRmn_}VKt7g9Y3k~&&GorExFl( z-!prHsSy4s-f?3i3&}i|pTEK&r@G`y3?bHmk0n6TG6Vd<0J00t9w?9GX4uOAo}97J z%Lq#*yW2Oh<+?|DRCD6jO659p?p;tI783A8`xf=VS{>f^1ZruDQS(}F2hZ|=0T61& z22pvR#q~BSQxI>{Z)jgkSefsSL#zZ!S+zbbp_YV}E`*rldaX%o%9yZMrkA0S5_8in12?^I!x?OM%Y>?;)B3&yUu|T}eBbgzk z%Mm455I87=1F1mMu(8j%It8jS>rD?wQ(s89oCXx}IXIUn0i+BJsY%>Kyd4s?l&!wO z!mjx`mWx9Y?6eIMdk!GF>^E8V?{?nU@A3@FJ^34#y1UdhA>(dB zN_6oCF>j3KS)(+n4*r_D6wp1yE8W~lm|x=51l{MnO^!@@TfKoGK#gSLZw)x|HG zr)dXXBtjo|;e^z_X9R0biv#6?nzgZ8-j!4Zf!(`#L#ZzTR`NQqa9YRUKwSolnc3w! zQ0dnEhBM!b%5vX$>or3{Wp!^i2KczisrBg|pQS=YGSDNHHbWTZKT&ToFcTv@PL*O2 zrnVLwL9(x4WrJK5b8_Fse|0#C})#7sPAb|*q(cp=0~W517pk(@2!cEM}J z2S(svO)V__yPXZ0wQG9xM{J%wi0g0EJUfFdW0vVp&f~A0=%KO>nKY9Cw7G-!y-Dg! z-3k2MD598`eg(Qbi~(TwbL}UQD5~31MsVkyVY8?*n03&_?BQ$fnRl?}S;A$Hc_RIg z>B;JCkZZ&P(&^?brFRmmeT+$~Z@^`f`*(?(sMpCmsLQ)m0!xH{jn=!ZrZST<5kBEd zyH&zXsJ=EZxTWPZCK$Lq9R^I+ML<1;5FtZT4hoi>X3Ol1FHP4AfHM|i+54b|o~ZS* zI9@WWxY>lGt&ul#*S{YbFs!tnYpVri>zgOaUc-8m_ED?saN8UjabAxkc$Ov-@5L2B>IHNYoyOuGWZrRg?T$uV=|=a>IxEeH zqJWgpj~U`<_4iLAlPcK z=Y&v{v-S@<_b>AAIOFJz_!1pcdbkheCpMGB)AS%fU$->LQTZ(;v@B5l@P^Ee4k}h5 z@e&A$XJFcPQh6gdI!wv?Gb)XU>wTztp@WH9oRprP@r>69c8uj}>SNqAYD1=w|%41msbkkV4z?$RlZFBNTMvuz9B&>Y0%4j&C%8 z3+Zw14S8*5J$3zVnGpG(6)ta4+3arTu<|o4|LPM;dhw9gDthWZ;gPoH;7&d37vyDm z!WM()g^+#xn`RoIxKLziRoYCGw%%89fbSE7%Q|p}+2V?nP4!8^LBtcfG z?Fxq(oC!UvRIJ$xLPKlW``a9WWiwnSeKN30xm;i;fmHs;1BqS;IR1>Y;6+j#`+s(B zsB9|pIqt2lQK3OYMziCYFu}+{``wd|c3bi&riEH0MkP8t&J1iqN1McB*W@EC z^Iym{&7YVcL%b58)j^*$gjB`HNJZNEK{B@+pWqugqK0ut1OysmQszjXta@PVmX-)3 zqqe{A`^25Q!WA`xhX1t3fkO-W^oFGl4Yjx9{Hf8J7uoe~&b&K>ppn>W;GPQmMU>UB}8Bb+7uEaS0} z{c%Is-b%0c!|L1N_0cDs!SVyVr4C(&)VRx@+eb~(ol9%nb&rN40D zPuSiA(9f#3SKIpSveXDWPbk|?_K>}!LF+g*pW`AGcDtKhG633Qh?$+V%)tSDRAqGC z;=Sxo9bjN%Q-bwH{CuTEkqe?>2cE|S@d_b~!4h~0E>vg}y_HbLg>1omxn`emHzM3s zJFkNu*a?>=7JCCTF;kwJxdbMU?GrnVFrlGm3H<$G@H|KfeaHo*To_HuoX{ew1$%FE zu=t97sioHOv&bydpGEQ3O%m;^M~&*n%NnIh9WIBnBZdmA$m6l!kFLfDI|F@Kguphr zP?m`TEM3;JEH5hJ0-g|cP$@EFRl668I(vw8i(dT>Qbd-Cv)uVM>lFHt;i5LOJFe$x z*SzwxG!sGXVt*Z4J$}H70)1|)FMPv@*ntgFAlhehYr_dB)_pQ0M{L0(SedwR_S~=~ zfBMK68sp#>43O^H?nmMvmhL^YJH4)rU5OoUc6iG);U7S-rVzQ~hYGRXmAS53AHze= zGLn~hJ0>9LRevb1lws-qVu?-FpAyBJpXiu2Zj{?O^?WYw(P9SJaFYwJgA$}+>LcN& zP;X?ER#89p*h zB_np7I?#9j`4js#wqNuyN??BuD&qP!O29k2h!Zz4`q|lLl^54o+AtfY%WJ}K`Akiw z9}H6xcEmQ=jr`QktkAdSZoRPSY;fpR5ByY$(L<28>sfdFInCawWOZ#Lo$hiTI!o6_ z#)5jvE=-_-IyMMyLO^0{wT`CAhSFBNKRIKh*lDE}@Tc!HQRc8}riJfiX^iu+7~c)Y zdDw5|sViCvnq8PxC1~#imZ^q9uoub|Q78jbw*Bw6meszk(?n|T?23sab zalLBYCx*m;0N{$SvW4)+fUq@QtUMl=cXO`Vse_=uSR-)HXKmaIF{z{)BJbP|aQAq4 z-4^bI{WU|4&;QIuJAS^u@|)?{<=pj6ZPUe+ts@B|aaw=crB%ls?H_6p&)gDUn)xIi zn$Y{nRSyF)L^4cj^oq`qZ6wK` z_Cxn9`dcP;Ae3AqcfEpx_m(FCP>ND;F^f=O_;(wVplfp0vTifzFwnu86}USxA#cl* ze7(}oBLVHPYwW+asC)X7+!Dxf}7CRk(%NMP&_I4$h~zn3@eF#^+k7`0F<1BP-pU&EpsaI}rf-nz7A z%qu1bAHi~QdiV<6_&2?Llxr4GqalWwIAO}Rsnxls0^mc#myUvVc||QRd#+DGv<@-f zu&fY4k>{7Mi=^$k=lysO9{EsES?@3d3tdT+wFTfdQydXh+!nS2?Mer7G)#C4RtS+C zSj|Dg+^4P6!8#%$V`icbUvy@@WVrgOQ#N|nYQXEiQrQo-z~&2wfGyaXrCO%7!l74R z03k@{ZRIk)*b1$X_llqwQKXm=o;6~eXMl8nUS#ufcBYNY?F|8a=)4(c(A<%?yS-#p zdS{Zn0J)iCosLZu7J2_yey+nNhG{6SFsbsScJYIv@^t4rr#nU$<|crt!%5&CE+*1{ znTYSjxt+3Y+a+xtF#*gQ+BjDIt^Yx5P!`L00xQkF`{`5i>(@oD>E(9;1${r$SW_tv z(u5E?(yDv^MUf#N&hTJIFpQB8=b{QQhsmd&DT65J5HPM%9%b3-)gWG`F_%~QArWT?*R9bMxvRMZ3Ep*yfc4# z3hTawoBkR`c-4e+hBDw;M>B2yV}=M7D9+wkI7@m5ft^chtSuZo3#`+cR@?ZY)Dx(Y zJPVqZ^!G=&%u*-_&@V$`wYZj^qiN3x12m&yQSDn@EiV`VoS?#z4v`Vv zGBH8E1}WnJpOq4)E!ywp1jqy$v%;gNU?=+k7PYZ(RjHFY{Fik%k9jKU1z1ui_4qF0 zNOu21P>BjYo;c`2@H-33~C?gx2N7vpV-%m9nfB^JbcK{Sed` zQhrga6AN^YwF~-g|PNRG1FzS(&g z3V0p05wYI9bAFVoM>oN}^>;f-Yt0_Ya%k8Ost?2Bv}!9k@W>W53mq7`iN6S8+amSZ z^qRbp%4~B17tP>`1>}{|qfj zr}oN`qD8~Z)}=m?wa4erzRfkWXC-VQ23&&K69?{t){^awSmI3xC7M!U#};fwcR3~M>J%JL^wIM zmP#V+ny8s?t+BwBUYh=mW?qt3jM*QcaEZzxmgD6kauh+RXrwFSMrW$@pjRk}O%{W< z7syN4xis`09*0vvQ>71`2H99_u4mSp%H)c5(J|>x@W`wR zRJdRAUGn>O1v6-DuTm{#@L;^WR#Q^#($j_c%Ld8}UOF z2;mv?20#OFG%*!Hj2?szf+zC~w6Bao!-UhsKB3a4#0-nbkE;tEaYk@^^CE{@*A&mn zkWmRuSl5j)qwawQ6msHd71lI zgVNRC-QT#GpzVUqn`3U?W|ph9d0HQQs}OeNLeA2PSt$H{H?G-;2t_r==!I#~nyQ>qv&{L`T)prI4jM?h18Ft}r;k&o~r za&hFj({5$u;}<{1d!@rZ15i~*B29cK3?KttrT^#Tef(!uyM@ywSwx?w^zWL6otgS( zI2(eskr2s2cq@`m4AJ&E4Lb+s)it4MsBQE&p0PX#^47+5#LzITJj@4X^&!-{@XRHr z)^1}7R|g;i6;H#VRmf7kM@!>kVDVcOuOZ`aTm&ob1&2JRIzx>kp4nDlnotPjVMKgV zb$U{kd-~1lcs^=rGt5ld>{cc6b^bdXA$Y2bu)zo4CcP_6xnlqZisH4>K&W}3&B}jo z-|k(R{=o@9XkS1HCG^}MvF3+Qk3X|2}{DTEVP6q1nEYz(<0mV#2Xmptq zJ2s5Fg{I0{v~?gq@4CWUi0(vg#|ZcIdYIf6pical9eC8)nU;|PgwqgoX9LmY2ZqO- zfiva(k0{8JH3#{jZSd_L)lH_0V;j}x#PXSZhc1dQJwlb`u$u6}p@O4>rBYx@LXn9Z z2XrUn;?+wf%MDv~f-AT*8?Q5U)9`kPKT>B^Ua@V5JkG5|aOy06N;AfBN8R}97m>8! zvPfSp2c#}RwPbV@7Fu2*Gm=@hRJf|>1aQy``fqHnAMx&lr}mv-CZuB40KD0n6|?km zeoB{d7EW8mCBUSbqm zwYY4{FuM)$pre3<{EIf?C^{}0oCL|{r=!?)lo|Q>N>7N*1s>~pDy~B>G!=<~Yj+5` zCrxmWg`xXDH6Q-?FHeL)HOK~-04ucx0UiWoA*Lj`S>w7kIrqQM;=SF4St17y9qi+ppbPJ20n!e@$btM6elEh)j4A}K{{K~y6Hn!t53s!GyGQ8pL!q>j zeEIATK9&oMNCKZ(?0>#Tc(wHeUDmeP<=X#n2d-TbrZwED8n|i`Cc$8H4lH+0QNs^u z4trwWn85z1`Tg0v7QmSbsyG2Y3e`l~)nQ|K96NbSw$A+CYYG_)C(kOVLPabd9=1sr zNVLC_PQ38QR5MreVs#`jmK~4rAb!M|_*>iCI1VfU5^YQ#ywmDtHj=HjnCf<*D+g#J z28puiH2w(5(fjLWvUnV(-ZNXh10St%^B#nsNGLojX3qA)ut>nvJM)%9VG>aC;rvW~ z`v(X~m0}NnTQBpQBZXTCtcXr#%hzVEDdivMqCqC@Lu}D}S7CF+wYWLT2qN%=^PGwd z6+nz(`)U;X9*>fNkR(MNQ6^Upy$hiI9f21$S`juO*V;@GSvy}Gf5l}UZts3;)wrE% zJ~m@gtFNMsyXibX#z*EU6u+>O4Hdl+i}d43!#1?ZZe>;&8lf6LMWelOElqeC1pQUW zNm?x*I^l}lZ2z{VqG9c$A?wxU;7_SYgiR~VX)#t$XxQ~v|C%WlLFhZuoJnJC)6|!C zbDRQnQ&it+rWRn?NktX8%U5Soi=jy1j!CRFMxO?K+4bJ@=N3Q+q&b!7eq)u-_bRoY znz|Zh<-R05tplQn1P%Lhzx@d3&W=+hEU!Nq*y0f&jV&Lx?a52XZFx7%0J0> zmm=vD{llNEbx5;mEs&f#_;5~8Q^c+w5Hb8fu6T8aQyv;%^Ps>ZJL7+1*Og=?p)ei& zphV+M$#_dAa1k%q>KFt2wnCk&9XVT)wf-2MgWQ?uTbtr@Sgg7}N}Iq09a9aGzl zhx|PK?%!y_al}hHHX42;3yJn1*rOa2iP0F$t~AQa1cWzKK{=Ll6ptD7{;Cg$;MHfd8?1Wg?^Z*?MDqQFkuf3Y~ z-$wjFaFX{0o5*6>zbJ;To6zb@bS4pWG?2kepLyM6F4Cr^5i#p4IH~FbWFK;(BGb$v zBZ_aAE>us@#rMp_M>qEOeIUPo>Ecm>plJbt+eJQzHx1=NnrC!K1s=YMDjG{hz*^nM zd-i_<8CJuX;diohUK!9sl!s4M0K#2QZ?{>#?l)YsLwGwD(T))_DfuMNt|oeOt&w&K zz>r8kh4SFz;AC#)uvBdw(+D9*5@a=87x4q!>uT`PWMeg@ixnBI?8aMnl3AK(pa?&k zQEb?PU@?;IAtQx5J@9ID9BUossbHBehNx(bTL+^8LjPKpu8%c(bA4E&_oppUD7bZM zH5-?{zG>?GAUo_N<4V(?wCbM2d%$E$CGK_&6^8yz0N@@YA~^4;@(i(e*{1&xWi;Qg zDv>!$JH`T4)C9tXFnRNaMBLwhzv^F<0H5lRv``EoxkX>AXRI8J+7Mpex_7={KwWv8Ygil zzCA91Ov`73yz&k!$~@_A?X-5Ra>lF7l$R6JNa~6Bd#bTT1bbZ(`Xifru~MD=)^|Cd z6~uQ|GR@pnm94}Ahpxn4>cPc-E%fBL0*MwAFie28OAWm z{fpF9Y7XbGiTZvbY&cxP4`X9BG^8OKhLS{-6$Q9ny&nUg{b}S_iSGI7>BP8ols$t_ z&mFc`p^Y)yQGp=D`#(Az_M2a#{>-@WGP6SsjkvywATe@g_6nE}eGLfch5bmW$1X58 z^vK?PNj{>m*=2=N&hC40<>Z3X10=U@K45f9IVYbQYWOkwGReWSSRWDwXz1Uk_OD0i z+sFq*%zzKP-Ab%nGmC&RQtCwt5m&@r>pCONc0u_KC}3P&L=6KhIn<|T5opDDg)zb^?B?||GKahCRr<+Tk=e?HOxqB#Ihw9|a5g4Jj-+x}1F7kE$^Reh zy88-kMOz8Q3Dl`E;x-%D{7!06Zi@UB*8TZSoob+HKz>#mRTI{UW_Y#!ie2^h7u}^g z=x+>PjVrwm`6!~sp%*La;QnB_qa-Lj8S_$6+(c}o7y>u1OAJcPPD`%Z%`L{T1W`3{ zoX3^Xu_vot0Lc>6c-lcD%$X+C=_%mb>%i#%1t9ws9#I#4tV8(u)S0f&TR~xX&_hvTi?c!iSeKbytsUw#5HEw|?MGH_6t!-SzPoPa z>Q0hd%c;>>ayMsJfsEoelcc*!6E)dR-LTMXzy{NcyTq)(@AygwN;RPuPyww&WUi0V zNLXG&+&?kbcaS7%Y{S#gY~v4d>_}0J<}qL#?5nTLxsr{F8ozjx;EIk#6re>VOXo4T z>dijEPxXil40yHyb*dTxc#mTCu6~;?d|azIu4312`)beCk()ow89ht@ z&6o7UoxGK3A99rnrLCapv{k}yU6`F}$}rnFEjbz>G5c&q>^P~80UL~~zGXnK^ z2E)=bHX^Lmfd`(@1yG&MaPUybJW$v({Dvest79fMQ?l8mSyk7hIHu9&)G95=jq z52U_fTvjO6G+e4hAkK+wEThKnE8n9oNO4!JY;+e4YgA0~K6>i2U<##^u+&oqvvIdw}2i1_Cu~ve-H;e)-iFN#r4({-{x$_a;$)tQQ{%?^kA)C5MM>gameX713!@) z{>vT5Adcfbi02z|IarG(pzZW*s?FGa_qVdSA=c-}0C>MH5)QpOzxQ%4kbnb+geim{ z7sm_7sgjkB^}lUW@cQfQlz|UQL2y*a=#mZ;vx49$#9gXI5FWEGZ=$^ z6|iQJ>&?2d^UEuDCdQg|+`1d-G=<>x-;4q!U;sMx_{aWscC!nlVA)h!i_XBBX*J(u#00C57E%xmAeuHS^;7gQ-6Wy%~hG*xSIF?aP{e1AF6EZOl&!_8yVV%Qs^{8z|w0<*f zE?aMlxC|htwBJN{ZNwiHi&`v|+xUhv%z_3Tdy08&pH4tZNmcs7>lKNjR+_h^J5Bh8 zzIFP`Nj9QUoG?8lZbATK{S1%GnoP zX6GMn%yg?5K_7;m|6pnS*tcU1HV#}k^T|(@XO(hP!R)w4evTdMLlwc-HuWo<^0~x= zMcCEg?#O5cfA+(xfacSu^;9fl(gxdpHd{|LiFA6cj}IV&c>WBE*~6kl$1#*F*q$&R zSqfw5^4$XQBa#fc25Lsd+54c-5Mzx)hE_~zds>ja7Xz-wm&s#@wzm6wi->rGuJYFX zo)j7;x8sIf0ZCv@Y|p)qOra8(yfRZU-f}wp%=I97;*X-T)0_cEE_OlQt98U=Vgg+U ztbd?0Rnj_y1z_zF9M{+joEQ<-|8 zyS8O~Q6A&u1d$XfpM*H4fkRSx)^scH z`0ap&xM@dEM47Q^gjHM1Mb;!eM>!~)aYeJ!y`Y`P^lLCUryN|n>nZghJG#onee+U! zTI$l7ih|`Ea^qoZ5r0@6@KUrd2F6YLGHw&tFK%yL(tVs9{09SA2Uy12kelF9+9~*v zi~k1nUgo^2T1}a70-V)Yp?c9WlKRwzM%x>UV63DVFlO98g{~8E#CeLb6pHP%cy(lJ zB{|qd%N?=C{V;wY)O2yA)iP?Lm>p#*YKbSGrHLCu)i4Jc;H~!q)mWr_X-X<1bw@0| zhJ|*0 z_8hQ8tqb694oN*grTsx_s>SNGH5+*L5l4AMl)D}o>^-r^Zn zJtPlOLEJLC;)&a>Maelyljey#nIAUKL2w4Q(7yL~h=n4Ir6S}1$`$TEe^yj)*&i9$ zx4+~-2ioUPu#8PY1rvZA1QnG)TC0K-O$`MmP^R|b`+1od$I;c3XSr#3S{B=jcBhgtfNj>JMaO@q~*txoF|b9S1vH zIH%S^)rCojZ^oVvS`DGX>9N-pDkV4;udb~(wj=Y`+wi*v!hT}zAdyA6AcuGj+N7R` zK-V|cswfq&vzY4k-1o|rw^`vVh%g}e)O|deUk_t=j@TJW2(s_%|2K{N0x5Q~?DQ;C zYsm{K#$q<|aZUGeQ>4%Z92rg#yr#%XDLma)mCp$=0aNF`Zy5-h4K+aduKc09LIiP~ zIf_WNKk5J^#{fnj3~SS-$*($d`w)SG=O-*~q7JxIA~|@#>FYXXB$at8m#vx!6!g3o zR3o|Uo3hutIg32n2e5?UO_bAQm-Z6Fh+yvfO(8a=k{#*V51 z{|(*+{X=P8|(Gt1>Nk#Uhgf z>E^nl7d~JmnfwJk9k-^qlPD6v_2RAohhLGe7lQsaMF0U-f0E@bKQiW;9YdKFqw;5L zA9gq37#K8v8)n=^@UfCob145v`T$)vDNM|brjWd_AQ%FAoj~5F!lO0|X^}mOs>`dSIa$i^vgL6(Fm~Er0 zJ<}J}lzW;2C4~Fb*cE;!F3SQWo^aEF^Wd?}30|ZxB+7%M6{)dNLq-zZajalwkmtUg z542orHf)<%E81q{Bbv`Ld=tHR#ObS0%#I;sAR6`!BBVHdHdy6(E?!nlBMi*0J1Ih9 zf35k_lT4m|NP0G~0c2jmY-+E>Y?HZWLwYbSd*HAb@A)I>Yz_NS;mRQY-G#XID5ZUR zhP=T&OPBjqr+jV6)^&)Jhr6VUu)YmQkW-(kd-+ z$ymgH&U(k-hO#^Ldgni-O~^~B_Epiaf=&JDXHRVL*UF)R@odcXNDX07iO;L9(}cpB zlTzYG|fgj|0!GYp8m$SC>m=9Os3@F9*7?{?|8FcU#=Le&OUU(ue?pr;Pju zq4%ssaBQ23<9!W5wm!(Kg|^c8OZ{U3I@MO1@!kAMXw*BCJtGkH9Syd#)JhFs=%q-g zB{9_CSb{|>tyYsa041i(ae)k)-iaI#Ja|I`M{Q>es3&pO*<;}l4jXoTXsx&OmoR~; zK60iD0C*!I;wboXlsf%cr^))kW$)|44WuHIdJrh?zA(acxT>vIvIT>x<{XV?Pz&~5 zQYvT_OpIIZ%cLt-b{19~aHj5X&@8tI4b2BMc}A^g#M7V2P!c2(e|T$jOkEF(rdkyz zpijp~8tlP-k?_1=2U+`KLo&Ej%hy_D68`qUXxlP)QknsGx0BP^jEgG{-n=rsyrxf5 zHJXLKN`W4ub|syYjO_tYjhc&;5wHR|KP~-c_css^E(EhikJdGA{9p5k-GEU{<+XWNJTRg$06C%oFE|XV zMVyH949edYkmfOh8}}LG#_azPDSUg}xEl{Gv5O_enw3b>xTEdZzM}Oe#-DE|Oiz?p z-%@U{>{l}~$aOiwGcvLcm1?tXsyrbHA2fo-&bQu|0*~NyaK>#dp-I>*Gxa}HfZMjDC$VZpZp-zg^Zd`Mo)i=#IC(IR9>3Q^qy zU_+^WG$nk&>~KZradAD21CavKY%khvGv)lx53fut;a6lFqo?N@$li@=!`uHDJJNm= zHyLvH=^S?2un}`@XT=vTq#cP7xoej~b9Ku*o*3D;Ckwsc3#upR87S;`tH4UeaW7P- z;I-QZTpAD-%S%_+K!iAWqD`+I8mn9Z;b#UhXYhp}bw`8poLC6xeKC`P;v~Gy!C)gy z>nuz;QO`On_CGUzL~AEldx7IFEi07Ll#!GhT`M@)Q6jxJ)c^CN$ufSjizm6*8q4 zdo%3X{DH7*qUASnc3En^f54@!%p0``gH8bj}XSB+r zy{2eD(B;@EX1ecq^S15(SOm1zn{3=+7?+pmSD&cG2Fi{DPXT!z2JH!8w1>Fds|QsP zhn1a_GZINLcWuFPRpO{MycELg3|zR|Oip4*wEnLIS^lNSQ>auW3=&c@!9HZ5@YXcn z#S0U37Q7Y~a>t6dstX!2PP$e>6`NtW!J8;;`DqK#@~<5-e`P;K51hyl;{Ad6KN;m6 z7_LNn_^a&zkYmPiYeh*4sm23qZxEMKcYv~_xkOOubq)5oYG1mAJj*QRTh(L7b~7s& zST%`9Mwl!6uaxG(n1q*;Lw70S+1w1;3|mHq(sY|ea~Fr9%7g{l23-MmNoK&d<|eS{ z?pl+%i2e2@+%Ay{`u`Q%4z}xI8NTwl-jq@>BtX(TA7kS=naTPsXdgHIlB8obXtn(f ziXPl9&}f^m;MjQpoY(y|3AFeAAf6P@Z&?Rc6S3#N6AK(*U86`3b*;d3Ap7My1q*{H zA&2re2L_kIpx$E9J7hLqdaB!N?1>(CugEIr>UR11S@>Fi%&OjA|-CL4LB@vRuR>B+Xp@h9oy7 zvIXR^`>=7pjkgov{&^zXcKrq8pA~jHq1Xeifxk+EU&{wfaA6pr+dG2}DXmVL#u$;L zi!W_v)3Q_WW!3@egcKcTU!_%;g7Wz@jBg@5G936}!7?_5y4+97F!$;k1~*?mtdXG? zA*ng&UB23ToD*(4$7Sp^wvGfYYjapNurV*4AEtc3X}wX}@0k0~XJu+aGoPqC zuwyg|6lP*Y8BvSJv6Ky@93w;R>%7M1l4reOMV5na$|zY+GwgZ?su>Rpnmz0 z)S>Ww&rY-i(H{D4LRMN4V*{BY!=1o8xGVC{5TY$5l9;`tiE6*fWSmms;tq?eoONvd zJjHlN`S^JgCCRK#0oqI$$1om#;JFb0Uw&<*2a=cGL$cYlRN+l4UhbQXuZw)0%m}O`y*XGp`d{ciuK~II9 zx8|jX`#bWw!U_Q})P>N)0}SeH-Gd%-O8V9X_pjt{N*0BZaAkcb^p3h9T9p!_{89)0 zP#mahjNVS?5-eT4035!(M-JMtZ)W(iD`owPvUi67!^Tb*ah=Bur;h0B$;>ebO!ON# znt@4M86Lw}2%`Tp?3HwgX8o7G{TXBR`Hy66^}wSef1i7U<#tUL;+UnO;9znIwG%yo zr=IG?@zh80-7MR?JLG;?rplr@XADQ|8e4fY@?_6Z^aodhjknPM9&Rt7z4K_PNZw-2 zQ-OuP(LnRS?eF?);0xs9Ej&HZ;VMG083Z%dfb_LA_@sd*C*eEC7JfKOecOCD6ZZsu4A20F||0G z`Ah^mrG7jRllz8`p72c&9w<#Adzq2UqTOY9qkG=W8 z7Kel8;*riJ-kD1od{!NR;7WLTLVq#@aHhIz8Mv+*|KAf=bvt^(B=I#s5p+oi=Y^JD zZm~@rJ-{WUnR(L(=;^1@B1B-@D!Ot27A`DXy6z+Tz&?$Rq)rBy!zJ04Dh*g(C2Y`P zKU7yYy4IZ@@G{O3XpIN7)9V_0Gtg`;$noQQwrBL4*oK@ckM2lRB&du(ObIw_i_AhG zx&m0+=FAf2Sq7xnu2R5EoW=sV$z0yYXveoZ=j!!S?%lx6K74~J((n|bl#?vCfDR6y zjMno`CmZbqa75@-5IAms(3YRdU) zT{%YMXDk@djFF|#b_;n89vO8!hKt5iPc2D<0Zhx3bFQZOL?u_5q#M4t?-p~u!c0U1 zN3D%EKh8XGkozz94~S0>o{zSBv&^eC_ZZ498_-H&&lnqkHRUu_62Mf%jl_a22)hhe zirennZ0m=P_9SonL|ayt;G~7OF08gn3AzaikFB*YucY}^k-Yx$s8(|-4tdC^9YREq z-FO@>>5onFMi}AkM$9-yPi24*9_OYCO73K@uMvtyh(Xy#RvrF-!{bZn&btPuOE_TS+bN*LDlut|50urRL79*u+IJpTsqS2jwhic`%Sd zF4%3TgWW3Y2L8<`%B@t`+>{#t{GwBF2vl*nmMT89Gwc23Oaj>0K!oN zj1qd|1%?AjLa~>pcr<*C+pJLNtmqkn)-`)0g+@xo@@2Sgbd*i)|H_(x0LHVnPCCTR zwCEc^LK4xzU;*p^T0o`0G3rwOr(FVf*!*d?s;!Dr6zp&bZmpx9);@_EE03Umb_;uH z$W~1yb@#b8?J~HahJ*3D=CZ1RDQk(a4^q$E}V9b^TivvGGMwsvq<@y`3MpYaWIwf#v;V#$x^*& z#$ijGT>IAD{;KPdIiL=J>rJn?sYh0k==*2e*Kd_SL>)mmL4>`axIYC9w9R`}2{pcN z0wPd<mdUS18FJ2P|K?OXtBBuW$-0N%!+fTs%82y(i1*s`#pkY;?tb=0 zbRwGGPC-S!|0?mb+6bBX{J~W%aRtg z;=oG|UPplZo;-i`Co3iyoX`h7{B17^r`)^vMaMi@MPjHi9W$;ElAZ)vC)k0E=uYz# zjvaZ$dz+hL11a~-n)0EWxPqh6WnJm8#d5z@xYe>L-?R7#mT*7&X|3X9`i#`-)QAtH z8h(P9(!0H7e9g&Gl>Gf`BdKjENjN8gzSoJPEyly!M~j3d6jVCaeoeiNr+~!kGh!fu zjU%3iOlOh!$Z=(*nUq|l+2Ta0ST%Z--E0H4ur|}9S-B`3@V%J!YW1LGe^?4I2=N2- zy3sB>?%DK6f(WUj*hr}aq#1QuutUP`hmj5>M}VG-VSdtUy(0tH30|84r)i5v)b z@X=TX*jwX(T@6QT{hp(ECMoH>uKGm37+(u_42cxA1V&4s-U6|n7|K#s(rNA@b0(-> zpH$rfYzX{A9wwn&o+m!b$tJM%On~)-&KwR*xsAW7CZun|5OsoLX}QS2Dc1v zoI}iW`B~nY34(_2+p0RGR&#HGx(RRR7e(jAU3Cnl1QRRctd<3wKl(k-I3oh#KL}z{ zo;Nl6gck$MgHy2B<~JbFB*NPAhwT*lD7!D|BTEZDplHxNJ#SoF=WQVK9RD^^V>Jb1 z@$xUg@RYR90EI}bpJ;%_l0us>b_-*Mi+VO7m86f)>Kk(iR>y*ng&l3mf@em?QmB@l9#Q+TopqYy^ur7&>40De1PwiH6;j&x<^)*pGV@xW8-N9UD zXfotNgc;gr{xD>SUlY2hz-g9AP*U&{Odlum9h)l3USsl zmR&T+u`Geh#GNRt-J@qcX+Ml(_c)56Q5qWndoY$k(X6`S16X)xb;kx3Rozn&Okvx7 z3fJqjjuuL-Dj(ZYb4W_nkA+E0@k1#Z$*;xQ8T<+zD!0@pnN%2Q0E%fVDQ0-6iY*i*4q3 zKK{<*glg<`j-HZwKOMmbFT>i-+}Fs}Jy;?Wxd~CWBZYi^5*!&{R!i|Cf5gEk@NW;9 zL|&;tuO@+A<>6mL!_c^1v<7gIKSeFJuepvYcLzHG)G9_kwPh{co?2)tnHc9iaY!E$ zFE{?gLMx~IF43I0?DmbZJejZf-@15~myy`UdEjG$jFZ>89WkkKqK~P$o#FC_CcL6G zWlzT%KsI$UV7?PbKBgSpK730^7o^QvnC7+E(ODAXo56bGQan}miM_IqnDMH(qveyX z$ht^Z_OPym5SLy@BK^k)Hli$iFI!M(I?n z!2fylpWpZ@uADU?b;>X@Yn0HHX)TY(X}iMh)kYHClAm z2fyY_+)GXwdrTt7i~|`8GmBF~Tmfr5A7-}}&kGQp0rG`jpt$B+Zj%7Hp?#NZ+S^8D zgy!1uAzyY5Xf}~OIlx&0^eK}lf16>jA4Z^5swx*0s686rCzdwQFSlr*V~b}gY2s&W_l3B(4^ z+t-JqHX0b8AmKGT&#Tt?CWq=6zJna*X{qrgJD`=wgDZ*77xV|0_uOWiaw|5zXV9t zA|`_o%LI^2WQRWIce zMqHzdw>>vHjn?CuZp^C&ffh7?=Y7({U+{aQbHw9a9CU@iuZ)_5MCp2!0kJpbJTsiIEQjkJ{*)kh&@R~39+74IAF+j6!Dix(#NAek*m;5o; zki_ap( zgX>Vh4&(&l5iI?f71NM%)yvAu ze8>Qww$*U?V4P8^xz(#uKA+o{rwnloqMIzzI@Dgw`?E*Av5@gh|g$ijR`F8O!2uqq@YbUY3f z_ehyRF5lsX%ZtGvdX300$eHv|C|X1cO%H!QAX+1n^qKN8nVy+W;&5t4cWT8#H??>a z00EdxqzF<^&^FpW*2RAwvY9=5hYqz)a1PtU?RYWs;yS2U$#A*n=$x_wXx96L<=e=#GC*Oo8+ieq{@!+r-22I zrWx8I{L#bWUg7W1SfORqS^m+L{K)g=jZaT)P$UZaKAJzUl3Hh&<~O{m-8)Xb@5Y3+rdlQCe-*&^n)o$A31kr6hAJ^? zH_&c1R$<=7>rt|(>1rEI-FD1TSgfFySggSR_w2qSOFN9(Uei(j47A28>DEuY@4$8Xty=Srp|NiG`;Jh%U|^|2Nwlub4F=tp36>mFko*z zcSV(YUOuFfy%<|}js`w9U22tpA`Gg;sNYaoMF@}?c9~MwjmK@W6M%ge4oTqDBvooT zKtrH>>aEP{KFN|%n6n!Dc`ko=!_3+{DYV49i~sHck@QxnCM?R{QNZ)F8oRcqJn z;<4t>IY7pFD6V3Pdfb=;DEmsjW`%#uR4mxEk%fvr zMvHNki#u2Ow@94C5K6IlAfPZ~FW6)zRRdN-{3}d-4lAWMSxhbEFRke!SUyDqSkzXQ zHYNy&z=}%~a39X9kaK#S-P-o+6N+=2$i2mU8F$?r>EkO93qI+OIm~OD@U{nr(rE*_ zuv?rKz6ee$5Fww~=8Am40m47eayu8k%W`BrtvHh7^r2RCR-q#OICQ5p-y4QcDqtf{ z>FjP7`Yiw~qoqykS80#go*J4J_-uv1F2WNRuqqkhm>R&i3?ye#S)^!?QH32zV%ykk zO;&cXo;uPCaKGOk8W4RH{md2?BXa)6kP@!|8FMC; z*yC*PJd{XTXoo=7!XwjR+2nQN`1?Tsm!w9xW%!$1-4DN+Fb;z?1bm(VAc{=g) z$6y^RWZ3Y)>-@R$gz~rG!m@VWm#X7J+6IRgN_@=oU&=pJG|wHAz9tGhhd>+gpdgXZ z0P<16e#y+xN-)dei7Cs1N-mr=wFdH2pFDwuQaqIqQN*lzXy&nW^v@z|$r!?A!aKz+ z;R`;uB6eMbLOA0$4f-QFd;Nhj&HBBqlJLJ2Ybb#D~W8?r((+V7l`N^R1Q_+bxeAu!ot>QmW<80uB% zLy35RLbjO0L1UTX$yjc{xoSP;fgF|r*G+VXjXo{}IxN?fEYTeJ5oZDh{vjWA$l{+NeqBXI_pa9Noz3q3>trRnB_iS9@hy}L>lwz;*@Ob>v1e_;v4 zun-W#Pv1C0e|j5k4I14+7(6r-W>=|d_`wvksfDlRr`U1@`hO1T$X1cBtCXq=>5zqZ zd~sUJP>@;v^aF056K0ncpYfPVa#Byz>?Pu4Zf0q|q#XCB`o zrVBFp#I9no!SGs_7j0zd^+&3lSE5WW6_%DxetR1eq%I0#B;_2tn+@xW+8tN^yXM$> zLO6MYlGb=Ur>VJSpO=r}!B#D^QovdYyUU`{cUm?@Vh~aM3=iPtucep|``aSuL8>Ja z3oc8-)j`8-E?N5kefi9kXA3Cp5sDD_}W$#L~Xq$%^q%ZBh_TdfkH~?3N=1?6+@fHPKw< zp+&Dnx(ZVP*>g`+G5+74aM(DUBFMxTT+xzXzTdetW84))w4uKLNANXsO@1Y_eR5Pi zC}VC((ZTgbe$pKA3(mVdK$S;#sv%PA6ERxB05C9QV?h=G000000Rf%@M4#%}eVs6` z*Fhbv$wrI{#X@DAT23yT#(k@4cf9uu102B)F+acrY_E@Jwa{L!A^bslj&LZvo0ddM zCi#p)=f+`}iK<;vc?k^qbF-{;2M>p*(VYq=uVbeWviy~$H}S*Jf>s-wCZTA!WY-Al zjtIn>&h1K!6Xw?Gzc{Qq#7vE!LoJi$R}1#-_WQ@kQllF4P5JWZZ+g3OOC$Q?qsyIy z)5HQ!*w2{uBvJi=pKY1TAZuGjNJ}W(mw!6wG5Btw>8;&P$t`_8$f>`_Ll6#+SqNZ7 z9UxpKps!KkPb;vja%jgFrk9P)?ZhfpjFCMzUQtFT1t&KUjT z0x^|`_T47+vDh94Pi9=h_qzXS<525*KRf|R>j0?kY};lc-EHtHzPVf&b;b>00BO_< zUSaQE&_RisZ-d3t?LTbtY5QFifJkDPv9vB&_GGf?5kT8}TUrb7h@ z6ZkHA%cVujmbX?9%ZrnpdCK82AB3SZPEWA?9~|J_Pz2%3#IEyy1Kw!5FnI<0s-9F% z1n~;q-Uq&o0#hIxl#y+3^##57VNF5~GM*{6=3y&n5izlI+BDSQ%40Uy>Hdmg4;D{- zl645@W{b}_Kw4B)~Z z!WcqiuIAs~lW`E(!fxKQw7FkA4R1dhDZT0yXjfNGF@mtn3vT@{!P5oG!dcn%0}-;h~OMD+$Zr1+}rN#F?UT_ikM z^jk5OsT`{X>^~?{f7d)5j#upm`*|-c?~#8H=U18u+DnB3>_P}MehzMzbWa>en>*pmTcJ;v{Xpl7$9rLqg0OM{ImXKbd4Lodr z$)A$Hwn1>5)J11G{*B(?Ikug14cXy0g~2H^!3EB#K+y4-@@K#fN)Bw~koPbl1t7Wr zUR%w!G{jfjb>T|;53tVLDDjOQAsD$BTJY?@j16}!CD?4TZ^>K$e;@81e@NI9_9p8* zaz!u$ZmMv2v2N2j(5>B!;yCJ0V68AYAO+l37B6C@z6f!^aATq8GZ(Ph4+2|UEsOI~ zop4f~-WRASF1<-u8nG;_=`V-5!+}ZaJim(q7kp{$vZJ=DfcS&3^otQnABf=jJI9)u z9z7~U%Cjrk=388D`M4#C=U@yab}(5}Y3NC~a&a7ZfDpwE+3DtC&J`0ryJ=R|qlnpp z0cFoCotXe6)n&>FC)VU=Yf3gGidGa&(5S2wyrn@a`5q?4sP7fo3Hq@&H7(k}kJssG zd*XvDD5zncO%Fm)e*V{L;1`QZUOH75j6f8y9sK>NLsktoh?{T?Z&yZ3Q;{g-DHYIv z#>D|FUr_d8j6>9Y9WAw#LfyI|lK@M38XQcOw3z#6-SIK!*ZA9a7P|JWL)$yLu2A)% zRz)!`V(#g$AscED`b0*K&552TlE^G@(DkH^dmGq%kMlyNY!xIY*2jYU?8$DYCh z-aib8$uqAMUsj06XfUa7oxkNDcmY#Jd8Q&5K8A^cinl~g{yk{x+2Ldo+A-czGfrF} zU3pb)hOWktX^`Ab?GPH8kyrue+$bP*q%gPTfTC}1hDGYO`!Cli+6B@gOIzD2Y$47| z1gL$Cm2HtBOR0%>p^h;Va7bU^gL%+1iEv&Ud!YRa4WEFn@m@`(U+&~f!YDK{$7I3U z#-6gs_;?^2|H)c>4u4oWu+i=uSBTUsqjtkWJx?fu%^Layt~Yh|jP7$;AF?0@FtkTe zM;9**FMeOsuxM#yd%ctlC4=?`m4l>g{7FsvlwNIf`EhztlQ|)He zz1r>@SD8Mh>X{Guk5)R!b@q`KP|?+>kwgf^@2c5u7 zy<3*b)G%lzQWkN@y&E#Wi_teiAW}zC+Eh2GUsdEO?L3?PXtX{n4$?B9;Ce^sgd4OR zRybnW9y0cgebSee_9}&J(tV~I3u5YMn>MeGJg>~~7`zoLWtSW5gomHZPOY%$q?DqT zWO3V27SpSQENwe!wzSUzOhXdpuUC^qQ~9)<^jiMX^KZ2fBdFP)SR6^buhCr#($#&& zkFRq#%Zt+b*{GcnzX111dj;X6q)>UMduXR80vgI*uPFm}*#99QMQk1tN7%Oic z;)Uh5uO$uI9{z0aFeb7Ib9+Bb`Y1hy;K}KKRGR{H?8dnK*x(v}R@PYo%i_t4V$}(0 z>YCdA4IDaX61Fda69I}T5!IF|;9Up9pq#9heY0T6jla}W-NFwoTxgJfVcVRV8Hc-D zY<>U~gx>2LB39qj<55Cbr8bHe17(3jA8QO+E&+_d)vsS1`_pZz(~6nQT`zNCj0XMn+jIr7JZ=!0)k)A}>1_iTA+ zt~35$i2JfAzuYr`BvfZ$A=31*pU^UbQ$i-53&DCZ5rdUSqU8eRX*R;4^%&ksqBNI< zT`e?W6RE0<(COC_VHdLWNl6IouZMydG1iW3u$z zA@-D%9E~W0_GnnbHKuYV@Fg9t3&4Rn94JmYw?ZG@M7;h`6$f0=HP#`tg|U5!Q48=a^ZchdEH+FVKbkd zvbIHN5_(o0P@HzAj@QmN^`PAKLQBO_VvMon-^wqbcuNQ&EpA(#>6c0OsTPLYag^{T zu3TmtY4ijA14ofh=sZxY_HjCC+IhS$AB5aES}e3}upsj2#OY#cEsOq?G|$IXYG|Wh z0HbPnrG^L9QT~#>|HnJ)i4oyFH38sD&>lo&dAFLXMH#CB>fAfL4*ZQD9AC!!2CD({ z326~>Acb+vWcMkXG5+J_mMi9KIWlG(E!+NAW=mx7L7*tS6;TOA+G2=-Fy&li8>k@G zEaqlTr6>Em2`{cotA7TC-Ui+CgVmmtbZ9keyz#oUJLbU!o>q@#roH}vX;kalYseB9^gz{m+Z@=G5l(>k(8&LP!`-;g z-gV{Cyu}>YOCbA6e)i2k8-Pv6=kB^Di*@F=3?vQE{Gl5fd3usEN+|Fq4+y;ZztS4T zk(gP9Js%_We+4EXUe#INsT;)NVJAysUrQirgdPV|m6K&nepkKL|IdCj9 z=)vvTi-;V7g&)l;KFaav?%$pC3@>WA#T39 z`KUygAAhMJgP6V5??ErwMhOc`l}LVIkBMF=Z9B$tF2V%3FRlUunNiA=2YAV_oT%cLem{sE3c zh-~QGtOP&V#`_a8_5M#&cAvD*Vp#q`b^^-d=--0Cs0$D+v^XUJI$JuvXt&R4T9^5b zg2;*pe4~(?P})M2QtJ^$<3%LlZP5{J0B&w>dexP2MR_Jgk9tPHwnGp4WDnP{spt9j zS%QHKgUZa0`AQ5x+bmxvcESdjFXj{}+ngXHn|vSmS3iVx-!Ga}v0= zzi9tB`Q7e&J#*69L}Y?wF_H}8%@C$EK^7$cp){+G=Hn0HI@6pg9={ASn@QE*E7qvM zK=9h4K5f{EHHl55#3qG_%1p0VJbl3u@yF!YSOP!(3GFcm>-tjh5Fb3 z&ZiAArNaSASY64%4A?pNO{9PVKL zG;q%VIn9ov@WAria!cVWIzG$288vi$zQA@_HHN=U+Ka}kXLMII?pOOzAJ_$2h2>o3qkq*%4KN2Ox&T0$5E#?foL=CrsYnQlIhX z-kx-&1%>Noj>Pjo3gIyQJ`TX1)Si)fL;Ja7WgH>;nhoU~F|i(^V2K;%eHJ6nfN8E) z%rc8JJrL+i<8T)cbi_^3G4L+`%=ha;Pf%F+TJZ8e0s(0kxv8_sP>6h+4#Ig5vxsbS zbLHr6i|$EFPcgL665Ye zEnt8SJn{viJU%}~ixdc$0sJ4~4krzraWylbL=X*1ec!1|@vh*qjQ@BMgBeXnb>jdS zm}c~kV-NY!ubd4zfRqd*9C)uShr=<$D?2d}3iW^-g(W1_PilH`1pw6o?+n7j7daMG zt6@Z9xF2%2#`*=-Y(sQm!w&7l%0pW|G~7ZbA0ff>zkVR=F#~mId9Mk>zaG4jaaO)k zQrpFMYV-3CSOIG`Vu(#EDvQVTQcG$Ei4FSJU6KDrm@MY`Qf@@E1j@blMisLbA5)lC z_Z$Cdia979n>FAZU=<7JH(Gw?X>{wt^9kQnjy7mS50ghGvj4%0tlN&tL-F$0-ALsD zP#|<%>H8qxwwhlLm5mwcgd$lQV;3w^;p$nw&Roy|pwi_7HPXz0I*)jJEJp`zDjm7~ zew;T}s0M#D2CmrdoOuriDd*q+r&Lb~Vzb@#l?~h};q~oi0j!OxhGcOdPO1AHFHsV`sib``Je9-Luxv67|JEmnDsmmDo~yu-MSFxaN77Ig|6Ihl{yOa2Zk zU9782`uj(_0o;dt3JEoedh{qYK=HPCgc+5UsmW6qd`9A;w)_g>zv;ZBZGGM^=!>g& zXZ?do8B{UlncQ*liX++zJ@rd1XwMH3PEr8zdL|xgbrSCTel$JaJh&~v!&JOJC-c8_4o_a(-CgNEA33;O(KW-`H=?fgjS-KwR{@7W)uio_jt= zdRIc^jOItE2canp<0#0;zcOZir7=rb$3UU#&P3saJ9G zM_!;~PxJfOy5bYdGiMD_6Fe&r%96{JH=QyCIZR_=FxNOI#SbdpUDWq?+vL8kT1W@; zVfB;NDm9R6&#TICoI{Il+#|h|urbe7F=mnLNhawcmz7FSV|DQ_oXbRP0^`=eqzsQG^6#TE*zb3DX@f&PxC zv_*;1lrx~I^8mnrn%C@ML9?_&R(=%-wp|Ys)h@5kw)Zc6RMX|*1@+!)(0x$)w)uM5 zzRO6WQYi@E5oQkj-IDk})QrC**UH0lRU!dm4kBPJxZtQ)SG zFVt?FZ8`O9E*(~r{AI02nX2DoS*=)$&420bH@UfncA$mKfmA;c97((;#@P>7^k-4E zDfQkG@wo{STn+t-@{PVA;4g4MCCEzzLR9TOV=Clc}cnPzpZEAm)@53%i*_7 zqHjWX9t@Dq6S+N;dH=(bSnG44gUm!^=v`~M3dvco9kt8MJyl~D{Nq7I9-#0x?>B0T zeS!_`%d|HzC9Fy3`#vj*!*HNjQ7C-I(#f|EX=~K5=QnA`i3;91Z{KDXr_0wo;@dVt0(ApmzKHM^TyIV5?IY z($gZ@anVG7!}>b`+@sf`13H{o1AkxFf*MoXT)sXoAzFN1%SA%`oYby7SVr$Qnn6el z=c&cq#zG>-|M=r<4ut%el}C{g?{!n~{w{{y?@cbDT8RY$Zjs;g#eny)B*~-6Zm*XvXu0Fl3nSU#9uNr}xQRNVuvxSf+CPzUF%oi8(ecxUj@s*9mPXq3PNTm z;`L6li%i0rPTzuSf|J>N-*i^#i-oJE(fxBY3*wT3+To1wwGc1UlAz-1J6akl-aQPxHD`YeA01$CZeZH= z(xF0~;}55S#&>m~q(G||CeM7;0uP}AK$fy1qBiDOkuGV8=U%7dDslaSFna2L2|EtV zuB%z6ct6h5wpoJwihq8dXrXG~2P*9RC#xyem%XxtKTxp&Jm%%L9SLfh-b!C#mr+%q zG81d|XQLG>2nbU!|HY@~uolpgE3n=-UWIlfs~@uFA@wsBEsoUOqKnyK&a111WW)^{Ei_wNSyZX2XL$u$qdZl9Iy40 zW`5>R$VamQW(XFf;G=@jvb;0x8O~%{)^h(RGsX;R^dPH_7;uHR`-Q{G>o9aHf0=;Z z?_jS3lbB1^M(*3Ze8{5oHcORy|91!M9K6H{UXz4B7dOKn7z4)v8-U$iV569U56rdc zx^QqB8OsO&rVZEWt?(^Ca@FSn=Wcp=p#Pi46xy_m`2OF<)vSWK!!WfBcK3BAUH*+nO2)P~M294Jfnin@D${d&K|sqr^)B0$D_laG>C$w6k6Or4d)MX;(2z2%{Ml5%xf(W>Hv6Ne=AzW(nB9_NJTm9- zSzw-59sOZ{%`7#aGlN?L*BWqU2P{pwcoqM704Fw$Vb4YX4I6vNXPAJ}?@{uBL;AJe zyoD|MK?GI`fFRvYl-cnvh>1)z!hloh#^HO@j9BB|^MOSKAefceYm_}meS5B9jaOWg z4G2_&wsa^{1a<6-#Qife*)) zDIhU8){87t1bl~q8!eA+aUQIlfXId0@J72#dp+$cX+NEz8=jW$s4Uw_b?kCi#><~n zaZ-ki7qP0m^iV4R9B%FAD^vJ~zQzOSF1fn#jZjE+WA6_Ii?}j@*rTl*2e^5vmUqYM z%;TjizO!wmSe0sKY_@FrsvwK-zC#RBD;E3BgR;tXIehOSX%5QUZZRoHG_lD2PHtBx z*{Q=_{-9LN;Nti`^^)Sf+xQby16MgNkYD*Kod(-3E0Ls6T*w*=W5Om(mGOB%7dZ#WY*NsL2t1hg@4qM#AtUb3mcq-3O1BCczIi!8 zygwd8z-(b5sh==&5<>7M32ls0S_$k6Gau0q7DDsJ!D(xC=8ZXq#>yB}Mu=)u?Y>*L z4BgMYBK;Lv%@D6$!i*!^l=6Jy3saiVA5Ptk9!T{S43CkT+T86+9R2%L_$o_mEZT2V zqr+hSruqESdCtq7-_343*lmI^188=;n2#I^j6r!=Dex^pDwRUp$$vA$LK0`cOEc++ z#0eR|QpD1?`^c)6&O9$W{mx?G(a`xJz0iOuQ8%g>v&MOS2 zz%>=45XuauyHw9`PN+`Kk*vZ&RZrrA6>Lv{K^>*-PdbtK#$(Fm$Q$ajgYuwv zcM(t8^@&%@@qO`U{Fe*RYyDoaJ?hp= z0`rx1h})=Qu7Qg|favAA)D?wQw#0uyfzi0_&r=+0hKFUmwmV_1^@n^wfgPMd8$97F zM|#do2>*5(8X-^aY*!JeAAl8)_tcbA>PVN?H~xiDgP^;b&hxMhy{7Vgh>LLgH?Z~9 z?~HBXwFPEY%Xu?K;QF<<#CsIKegGTzu}}B)xMoA#kU}9jX=n43943`D$+dkOu`*K; z{|A|mK~Hp;Nr8ZaED=ne0%X-7Y3{Wr%x%a2!KwhFQS~Jcx4yFj zf$(hd@OISe3@=2lPJn<=FA`DQ;;}6pflXV>H7f73}d2l!?qO^7Vpk@1( zR=2JsVW42BML90z{+K*NfI45eUIZWWu3$XMeH<;h(+niBexfs&#Z;lE2$Uac+RbKg zr}(5tYwh0m+}=MFW)iaR)}Y`)K;D1%4<$%tP^`j}+RQIzX{!mIPfjE=!hq|5A0oBg zeDz9TBo-}MTgXt}Ok9^2QLD@X9Mb>LoOXty&NgSqAkF;o*WO3YTLjJMu~uJq?SaHf z4 zJj`G0@*vPnfhE1|jNhn4tPy9bbYceL;4=DH6OWcnMEbVAk-Cn-1;Ipt+{L6`P@Cxu zZ_$b+qG_C(jl&z|zKqU=GIF~?1|HL9;w{b7`>$NeQ|)*CzgJc!sDJaFppN*r6qISlY(8}b$SuBD7;3+C(%L1O!g#k6dseve1$KMM zhB#*Y6VjsN!V&`LWvv9(QN3k2YC#aPZ7`h%k5c!4F3PPlMC7L0s4Fe2ig=9e@IZ_> z30RN8Kfy+)CpS%Bs3Yu+PbEU1Jc4mgXlMOr66(^yV6;n>bw3Gy+!(J%bd}h}>)=#B zleIKo{}y8OBjy=CU4;mx=$Eq2G2uy3crqA_5Eejz9^rG>VdDM0Y!0;EZ_J~Jg##UW zG$@osq9&7I2X>STlVDteVSX9*kJ|kmUjS@DrG;qbkC!A|8-UG(2v@Q9mAt; ztA>(nldqT}rgRZuXGP)F4(9fdbVOMScUe|g=nA=YU^_%b^8~WyF6Lbk^yLlVMre+jX|bQCI2@I3ZXy;6*dC(KRu z_;M_T#>>0F6gu*n9(=F7aY^``B=y|Z2Ms%*6z$K3TU9_#xnV4L_S3}1nHTeu_lqo4 z+8J9_>1f2jQ4<&gcN=B$3G#XhPOqebek4$AO#&A^0p7mVbXuxH_#bN({$UZX$(yQc zXKs`x%>D@hM|s?T{+s)zFujFq!rpS^K+B;U;nE}J$3w0@J348&E%e-)*N=0#dzv^~ z_q6MBa0My!*_uI7$OUNlvAGg24od!JD&kLAEvtBzV?13!xIy(#&!08-0I;2rQdS^Iet0=lh|fk50wLv342WDsgujq>!~$kk%6^lZ@W$`DMzBF;&r z8BVfoTHm-O05W2VD%*QGyveB(cXO)9c)$DiPytd2nkre*@-Tlcd$q#aQ4QZ{-B521 z;?_ciIu@_ebBxk?+s&I_Jj(dfNy!j&5>)rNlDAc960Zj)pr@uP&(k7_@|0VYysB%F z)(qH(|HZ?_wo#}M?l99?UJ@+B)v!5QWDQ&s5htQH>E~nM)=3^4;C+wp6joQdpK>np zCqxz_#?j7V8VmOfRccZYc+`1fTPXV~9!ngPp$;yHdK6jA9SdhVlXcF5#dXq!K=0#j z!6V!TMrq$?sR~S?Z~2zz@UGp;eOK9DO*l$UKS5zrRGjmz{o)p=l(e5I2ReCxAHY>yBVF%iOC*Wir9gBgzFpX6u)}Dr-)mt7e5Tb{fB7hZ-?* zlr?rEuj2?Zz<>u5`Jp@aaD4j!&x1YcNxf!~M`6@Df+ z>0`$9z3|$A>fYYP;Vu_j2(3bW)h}zOhH2}Hv!?*~8z(e$yr6|-;Dy2auy0O|)Rv+0 zAVOV(=Jp-AJO6Y|g%CC}j%oLXd>((1Lbr2EAU?ejy zFyi51Lc|~YjgHLzMwm(8|4gEo)Oh&lhpQ4r_mzF^RY;sL+GK#lh3g7pov-1K6@few z`pPhA^SPtjj$JFe+rRo={m0e)|wWnD28`;PxNTK4pp zQ(Vl|7O+81r0D)LX#cVto4$pxt{V>ql~mu%JMQ3>n^N&ba`}+_aS?``Ql@$6SOLU+ zFIB9Z(IP#`No01>%wiy^^{qCDlvIl{D!kZVe@mw=<^mv1tUS%lyUepRSc~~jl%%p8 zi)sj>%In2FQmXtJ=f0r;Ss|aH$)YzIpVjKx9zuH^Xx;a-h*G5W>CXMfluV~X1A0_6 zVkhyV=ajW?mmG%TE@hJ6rdKFkt)WsGt0QQ%0zvjKefCGQMGR`}C6i>jAyCr;!C6Q1 zn8@PPDlm~u%Z#vDHiUk(VzbhTb=bYI|HYWpW7mFygiaac%#TJr6mtWj(+I)>r8 z%+3)|oJ$b5=EI7-Xa-9uk2p0Wulv(AyZ`ZabO2e53cL!C0o4kpSI_>1l^-xxZ;I-g zQ3r^m2Wxo%8x4%g# z<~k*t1ec_i*Y<^dDU`CZ;G&aE>elysT6U2Vb?*VT9|NyAQqJy(ec+S@gr}w8_rjSP ztyO$Ec(WqJ-6Y{Q7olNeHP$Nd3;t(NiBq2oI=Ns_d_f$G)hq<2FaD7ZQ;&9arm)=| z{ddd5Ft%UJ0(#crGCK&`2VOAyNwf$X{C|t>dm?uD4t(`ay-4}v9!g8A`p^&1f>g|S zoYsa`O@L}Nkx9H0Gi2_I-+K7#FQ+CxVy~_vKVG;wD%36vwlQuy0vVtyK9N5VtbTCI zkG2_EvmE5KPqg+d`+}2CmRjaQWRk#QI&I=R5pN?=H}`Y7uq; zZfBPYA170#4(CqsOctwy{f)OTU`3bWQS;Cq4=`#E+vrpX;~aXQ8gO(Lz^L2h28G_s(cFUXzM1qquH2JR*QPrXOd z8}%^aWH3r87d9b~IAv{exT%tnHmL`UxJXt0F`Gdz?h@fIH}jAQ279HVEeag653;g1 zi}jzVm~BHnZ>pK*ywEd*7KP~?W8?g zUjGAwetz|+mq;{e9*gG)uv;ivbAjYjw0vprxqns;bIf4cRt;`eDH^d?^CC48$i645 z38w=rtacXraWhe#)Mp(E;dP_{YDUsHn_ibt?a!zB>DcSR_+3ncBsGc_Ng?u6V(;XH z9wGKDR-@d1j#4T|BJNteZoT<9Ie|stE7v~*GzR4e+`_Y=v|rL@@Ed7P3l*V_fV?Kl zbYU__Wpnb6uiwXJOZU3)n6!T159=#-zNl16XkV^7IOfgnQv1#BhR_tWTYy@T=27O1 z8=$Ngwmcfu(=C|l=R;Lx!o|@`nU?5ewvw?3(7%E71Eqoi@C7n1F$`-tdWdFQy0+nd zX%S)*0rsQ9-$XCT?`Nv8!)4o#a8QT^7nLjeIs^SbFJ7=!{NN@P{+-rzKCFEa|Hwbt z7V|TTeEj>0Jfp7v3~)R>dOO?Gy&oc-UY3L8+t$*bgM9* zM_pfdQjhIS0@p7J@>VUE5}K(PKLc0Xn~RsphZQqajEt@Z4ma11feK}cAz!~SGjDB- zy_FzGoP1mV15(K?#{r2#Q8DXhR6&)-gLfNmH}gFnETVh|N`5zAK`01~C*?`_-W~b# zc|PNlCGFv}Je@<-_a)ALwqhZxsIHs;4lWIJ=}T`YKKrKJCf zUni>)+GjTZfwjN2_Q<~B$%WM&-j{P>b?LpxviI}u!5`H@XDUDTUs$f|a({0DRel*g z;lpyCBeVNr;YhDSbk-$Q$_E#o?V(ezjST6b?~b;pqMIX;vIIk^Q5kP62eR9HYZDnx z%v95?sC&<&zOah>SDLECMd+d!o^o>>E;_am${WDGK<=S6FbRW^IQTNVe35>Sc0Ytn za>EQ}q5n_)pN+-_1AiNlilL|0eVQD!S3#Fs9*gwbXiDYXIs~!)B{Dwq&N!@ftdeYK z1G-RWuw>ekmkaE+npyS@XYZWoYS42mnGD))NE@$E_gNp?W2UX$EatXvhVIVqVN^8O zGv@nA4wt$j`p)kl5nY+ceel+VA6VP_|0bUtMKf@a$Sz%@lS|Ez-`0shRs8QI%G6In zx!ah|R2fx#ekU{%)`VA_ykV^grIl{HoCrZCxr)aj%jUU61b?~RH_ixyrL*#R9EZQG zw`-C1C)PARfU8JV#u3oAE~4b@(=+wcUEN*L)@(mAVP6QTuUI%s4BPOi)X~*?7Hq-8 z2@bG3(yU<(Q0p21;d+5x82jb>oaDA%n^i5w21|7x5-*GU>S(R}>jV6d0%l4pps9PPFj&*m ze}(?p?pxt4z#eS_N7WxEG$Hg_m#;HD&FPeC6JPa-9{?ai2bUALbY z%cP#uIM0bfv}^n)K=#$e2F7hc^x!CEzHK|7)8C(N*yWH65&HtBeDg@?&lRMCxm|$J ztk0>qB$se;l{Ghk0Lf+eY!L+#9>DuOV>J7}@&3H5`wd7vMHkBnd?XPFtOCsz@^VMF z$K*6o1WMwyHd2Sei*f4j(6?Z;Vo&$U0T`o{!-FBvh|mVTkVXY$Z+{m`|0P~kgPR}d6=nhZd?u7NUO91 zXfNKs$=t2tEAy9gRks@{4YEb!_@6OCm~^z=8POy_f;0szjFX}Zi3pW#0UVg2=xm08 z)aG*pcenHQ7L%s5M$PD4F`PSW&Q(5z;==m9NQRDtR^CP4q_qu}%@Ho@n?ZeBLd&mU zU|m!l{=U7F9S7X7$M9hpk6yA)fkG#{-5F#)*fBS)oTgNK%uyjxiAult}%m$K^(9@jlH}sq}<Av;K-8?;=O%r-*Wq0!{e)eO7T%U+0EY<1iXo>=I}w|H`T{TQJ~D-e+kL6Ug6o zO?N(XTf;1>EO>`Bm8BhwAsiy$^||W8h_cs;kXm@e8op#}S&j)k9(ZuGJyk~fGAE$-_?XDWOK}0B?V+Ma$t<#X%d^`Edspse z`|@(VRKFOAaJ=U&Ojh;`+5pE#c4{73_r_a*xZ<(SQ&^YaLYfG%xEp!ZE}xal70607 zV`wQoG}(>{eQw*l@NVMvmts{N8SYM*yh~~?NBA^ei5h&QMUC(MyEHU4m+f1Y7#DH< z-*I_HnjWQkZM%@c8(7Shpq)20|B@CrYc%T2auZAzS|`X^h>H)QJH?T?(@LX5g|B8H zu84hd6hRZNL+4O7+g{$p3gQYAOiS3EJNR&=r`8ytp<^{JBQN6PoOM9UMdCbv?~Po#R*Ogh-br z3pNSVc`NGk5B2&0uhHVcuVh@}=hsI9F&i{ zNXD9d@$IeEVo!t!^`VOHHJ~Q2c_2h9WU16wS`+vATLHRAf8Z6*4e>#@FCFN;IJsF z=ykC>l?GX9nCEU^yfW=$JL?WEYQ)vTQ}pkzWW5jq)dNV+JFB?ts630rnwL5!dY!Dx z3UUHkt)BPB{FCkuP9J9>q#nkK?iqK{w*ZF~T2{&=({ww+PG@~ZEA;WUS;8=)00`0g z(Yzk2jQj-}XHjm2O6A0?YYhszi}jq_5YkmKZEeqrP-~feyB&x5m^z|WR=`r=Y!6p# zH|{PGEt%%$w+ycfmTlzvU1LA9tQKK`177>$i8z$ldl873t)od_xKZ1NLG_DhSPLH+ z!=r&%O=18t_Npso@A@@{B(=998G7}Dr*#--RmIt~t zxqB^Ixqm#r^~8d7A*ysvY?P_095o10#8l!O(2(?&IGvt5F4)#<#D0=cDfyPLcU@4l z5mL4?Jd+G5zTcc!hQ6Q14dOy#9)eJd$&dsW>$>h=L6Tn&yAm$F4Z4Ab`Q{jo2p~dM z?N5%2&qie|(LJoYNnAeHlIcu*w(j>ay`Q>)mYx=;-`wR=v5XM?um4VfDr}r>3>?hX z7_dC-LS3fSSy1f_y-yfEl$*rL8((R5;7s2&JEO}K6-B&Rtu}FDK;^?2b^tfvkt6L^ zK3so6>5GZKOoU_atr48&_jRl|JP#4kGv1>7oAkkxGP1EQ z6+2-W(gh8O08Bev{?+W)osU1`rObw8_&Pr@A^3R}P&#{Tt;#4Qkk>TAQ~sg8U)v1N z-R##3ULlqa4hL$QQz@Wv`7ar^m_4ia$ysV26WVqCTUDWi+p-Y{X2pF_)vWI!b%wbUF^}{9pCOv#xv;A3on@ut1*XrZX1D{BgKGs(zDq zXYNG{OCa6uroTLLCR(EMaUZo!xMc{d#MtM`8RriYZDt9IaSPQ%)ItUjJY^Ikvt z#I*Yr?0~Bw+rVfSs1Z(Cx)&<4)6@s!sU&ylQURK0^h{+~#gFY(KaP)s zJ_t^~b(bN}o|O0tRCj)7>ICs(T4kp zb^ZvSaX-`WL^5Gh2MJ5plO^9!zB8G}1j})iP)2?WTUUpZ^)`{A;U87C!m&pX#zYJ} zP6$~7>h_1T#I?D&hr%3v_vCNui0-wXPg|8Ge$NGu<=dP@>*uJuOGew+5Xw?DT#)AdsR#he`rHKIoa;ottn~CkgBNkescasd6Op@E^lbVqo&+EYpuYvQd)G8 zOc(3NMjm+l?nm+*B)7{bC2B8e%aNj3w%i<8%mYB`Uv5_IfNtRPf=gn#FCUbByodH9Rwtq6aMOqI5;^@cMX+a8 zc>iV{AGI(7%T;s2uxe~@z>|x0`-5anC7dc+SLXM4Hm=0qW{FclCU*ShZ9X(iTv5BZ z=yn|D*xF{42ycc*rcK4BmY^3?x=HnL*bAY~@(D5Fyq^J#1p-$-&@yQ(?9$UEe-h+m zxNlcz)LK^pLQu{^O<3oAqQ0a7@2~T%DwOUhVs#a5wGzWf&2{F$XN=sApLZ7GQxF>J zW3shm(gr`+DJ?ij@y@sfm~3W!2OqI7sc}(b`;m)1RZ;uO=17zqW9(@52z*%?Jxp!W zcWq}E(MFBy2QC)M)b0JlCrj^FoX3Ix#)2w`=tjF}p~?*!eNj|Epf@0BF|v(aY;+2_ zNXL8<8C`uU&vop+n(U5gKej4VB^PXh4@k$i-E2qVu$Em)4Quwv*wd*6M7hT4-30FW z!n@?F!bX4RbBprI3^0$%v*SJTgg?Zh^w&Fl(vv>k+_OBXvDBn)CjRc2ikD@p1IxPV2RA4}KImI41?DiGKyLZ9biqN53*;B?6Ocxf0dnWL4JtmPti>|M<}lG0XxAJQ8TH*SDB2>P~MG#ZZ6Q#(AS!)N;gUb9P5I-o8@DXd-~Sl6YV{ z|7`es!j~(8x)hgRA{z{(uK0E-GCi9>a)sbwuz&{hT}PKdE+sb@CGN!v^M^6VL>lB% z>51Uad&&A=T{*Gbec$Dtm{3|@^1w4COco~oHHqkRaisfH$v(qm?xjEi`Xx{}>=D#u zuVgQD4(?QG^TE+OCQdPl=yyk#N7#oF|00VD4i$Wto zWiiTiGEvPlrHoaERYhQXrqQ7b&i4N1*rUA5U!=2_WnOFvon5=<~@$VXadc)F~OH|Ua9fqFcTY&9bMprE^syk7E)FOFyA8&&?J1yO&S;^s(l-~0S#*L~xP@wL1m(1; zYG^bDZTa%AbtfQi((rVB=LQnaOB8c7zVaq2chK|KAaa;7iZHi8|2m^`w&EnWsQ=8*nNP|pqsHu78oJIjS zV=D3Qq@NOym$RZUN81qg{!dN-cDOnu8OR$89*vhnfmuY?WIC6@U*XMgQ0NJ*tsE9O z)b$w-#Fl+N2JY9JANHw@6bXW@uS9s=7g_KpXo#LU^67ji<4qXLC>u$_ZHzC8Doy^O zs-2rOedNbNqJ?ASm9Q3L)mIRPMKJo5$htSUlW2ExLqXEM+w}9T*pikTKIOaffi@PM zmK$LImnFT6isF_?(cX(sys}@`c*~Ly)=)Uc5o!8j_sQ<8_J)jqf#?zmUc#r@9k#&K zOpjeSxz!*VTxmUs4Ss0>6RYU#0UlsCCoUo0QJmyv>euT;OVb0cLQ|e=gc#{2C6NT< zz6`DDQr^J9>c!g^<`bE`N8 zWb>C3a;{6{w}}52=s-GaoZ|bO>pRF6?dNSvrrR+%erfz^lYu_YCv1S3S)p*YZX>`E zTXF7QCy2R+z0Zf%$_RONFllFVqJF6c#`Q`^?DWZA%WKbxylP_}K)pEfR7%3Wpu7ii z-@QfKdbw5V>A>&rpb$;u#0XwmMUA0%DIu~CVpErJGnI`vltor(;7Po7=sHkexJF#_ zx{o!hPqi<+c@(mVL{&G}G;g4cZh|c}S6)X!`g48I*RmHeYU#bL@Pwv3@$3D+XPuNE z(6P{k_Dp0}CSb;?4Wf>Od-q)RO)qoZ;#hKHWF0zWg#|X26zS)edrMe8&2ZAg)rS#2UR3Z$9IC@jW|a(Nt+kd8?1u zVM3YM5mn=cvX`PFzI97+Y^!Ye0+$ItiYWF5WfEj+36 z=-fIcHlZdohji%U>b_PDrQuR<*pF=lxue+a^vAm_+%A(Iut8J%35tdp*n#qDeZ*Y_ zCZ_nUHq#Kq&5+IfrJFXO?5#}_K7B3)7r*duJn}RC82*MBns~KAN-%f456hB>(QI=)oU{9LgCS)-K4{AinJd4PEZ+ zdK}CMX-;(Y_8?j!vDcM~!iPf+nH^!vda?EQl#-AL!~TF+-gQZ#4Mb?rX|AW%hIL3R zqJ`PIwME)cDjq0(a9{(H2y?f(DcP3 z+Sg$KdHnlFiUF?%Bm(P=VsU%5u8r|>GLsjtKOqhOZx*FiGGm~Reu)0+|>{-`i zCy5hs zoY@h_&eHnM??}(f(LCZN7`#1D{VJHQTy@_=gXvWSIuHS)Zr{~k)^Fs0{)hr64cql1 zXpyM;mM@21E$SVo46ypPE>cXy{Gz^|?lTmDxGT{Q`N62lZcgmjoWW>%6o;O>WY7?RPIK7we|MWx~x6J?;svfg>X?+iJJw1-! zd2bs*nux#gl|&a#)s8Mg{&+a55Aw~5d*AHRz12J#iJ+SXfQeuUu7_~q;n$s<&A_0T zEqlr`sZ-XKnEg#CvagjO3i)pt|51rh&r>)7**G}9f(-ePQZePeneCoj!^eXkCzd91 zHSwrHIIL#0?dggO{{Mqx{XmoLK1Z9_2wq2;QLJkhET)nNK^T!GTNyx%IgOa*fzwaq zEPMW@n(kL8bUE|s`82voxVtg=8-N!*f8POV$9^j02ePgy0@69=2f96l&rv7&oFSzf z%G`YX)fZ{Eb2t+?qJy;T);SnXSaXpstLn&p_&X}!!YRZ}5&iu{t*@f!cq1RKf=Zz+ z$;;|RtZ9(0Eb?(KJ8VhDDGA;}nTF#{&UCaOYU0sZPEvREk4Fe@5^mV34WDIHDlW{V zsLVFUDzxsASRNjy4CX{2(^ISlM?d32>Q|d=);D+pY4Y#>@0(3 zNnAdjwx$V0>vt^Bh{{Ctg#h*Se~}P=s4{6Ed`00{EYR(UbK0_BI{NPg#9Qk`z)ZwP zAkZUN9wMzH%XD{N6BJOVLPGl~9;@arhcZU_%xVKU(8pg>hsF74z@@3#)#)$C9k;xZ z%Qw;e#E2SV-IcCWHi;&8<9IE7wbZbd)J0A5nZlvcLnh%ak)SwT@p8XkJ_XovNWKAK z<=+}#-u)Xd8Ue(gUMRN^RcCy!b~zx3Z+Ajd%29!v(x8Ss*^;_Q9Mggd7GNa-QgXrc zn!Gm=a+7rb{F`H;HJ}+somFjE-RoF{jAl;6y)WtPYqn;9&C!J$`_9E#67A)Z(ArnXwLU-A5&DYi^uMe5M)@ zMgwAZk`c7Ai(5%2R?z@GwwWPg_LL+~NR9P_9SWvBWj`#?t#wa;CbdMPSF36BusA;B z@62yztKhtlhZ}zpg~|V{jX)^j{fx+R9yHGveFd01v_vTobX-(Kf>_C3sCC1{p7zw*47=)<{S9quhK;Zu2<(>s`S}& zn?`{Yuha?b6*rMVTc8y$;g}WB{pDWPu8IhO2T`HoabVINgKUbD$OwU~g&3G73d#OEivpRa&>H`6!p9Vr(L!MY-W~Cm6 zoT2H?T#kBE!FkXgc~qF^k#%t+qJS(F6p~!fedZmJ{(FE4rKly79;x@yNJe{S?xf4+ zHf?2F2Xi+#Q|aAlMun%KkvGl4rEoEqUz7mQY`HJ z=oRD33SMgZhgjK3D`3j0MXXCDzZr z=aqHvvzKD2?+%Rlf4gGRExrVL$|qTD0W4X zg|CE`OTHM<*GrXpW!q_>lvbg6Xg_66HTvl*z($902V`>v&nj*~H;F_%;=TV7MDxc2 zzjV0$6#@HDu(Q-lykHLx{GTvCF>RwL2ORFKNbvUtK9sR4|2z`U=_X+^C1Q%OQdds{ zolRa}VTYXM{)rpU!J~3nncX&h2wDBA6hfalYeOxCI3TsiyuEpQy_f;b^?CEq6GV`{ zthp~%w~pAZQ(*$Tkz%4NiR1UxJCXAYg5)krf^^o}`9<7B=w_&X$$t}Kn-X?n<1lWS z&$pBjrer`FZnd?sJ-{bpp+AQLIr23F%HoFnXt9WdWb5R8%C&MHo8-=qQ5fMArv@U- zRqwc5oBS0PyEIoGQnch7!SjU%%*o|N+rK4xH-!Gjr@A!AxLL>H0jGsx{9|il-yh^7 z-9ci@C710!CZKosU8`k=NRluMPfz~(000-f(Fa2y3EyWd^Q44dh|B_v@1>vpFI&Xo zc=xHZfyRUzXmsb6!$lD7g^2F>U%SQZduwcq**#cUnMFXXggA(i&ERbyrF`mDmf_J~ zZ7?|$!cum9@7sYOgd?hdeDbW;G6~GN$4XKm3|!Nj?0}Ui5AQr~ zc0qpbc;bwSVSbv+eXsZF^W4pj&Bqrw2`D{EP6Az#M^7AjH)7ZeLRf@w`>14L1BbeH zly!%%AehoIjCZQpz`ci|i3b}VZ=QmemQjCWe&9r@1SKcNWhqkmpD%Ey#Vg5jl3L)S zOt-IoN@R~2);DC308&WB+zn#d!ZcS#(F%`oKC6DN;6T!vaX_>p{&^(8^ek6enw? zw~V(5$;W@@?DI_d(WI&2{7(M~HB#D5dWh6j-t*jc>`wK?PH}S&cWXj!EWMQ1)*|+9 z7*ZEY#73l<`hCGlh^Z2$H8-&Cb;P3{MokTG-J&9l<%ON&&kZ2Akj#`U2`fS<%0EtG zV>x}RAEw?$s8v8R2 zDA&M6h+UeHgJ0Brlb)fI7weB*FaeyL(Q0x!7Zd+ESP(Ie7hr#xyU+S=4hqoraM9|Q zw`cm{rH}wXZUPg5m~3VpTVR^jP7xmH+%?8VhdtIQa=(fWb5|c`zP*9us4zJiZ}`>4=1AU`mcN-Hw^vS(A-DF)m*FrL6>oGkmpdlAo)VK%B3A| zBB_W1I*Y^_feFvT%N%`f1d{JPQ}H27B@Ear~)C3~JoYF_;(HwbSj-<;FHK;&8dDL7)mx4DbT z8OCyPh>u{4f<8SCln-V9feuyY)Wz1MmlA+4^?v%3xSP_Z#EhnpEz_jzlb@uBcM>VRNa*AQGwq`N3-TjH$%+La9B zD7hPvPsqZjTC6_+7Zea?fD6se>4%Bc4ao&{&l7b7`kizaYxub!sdZ2@m2iM&XzC+k ztUR7jQ-C{i%pM!JTW2_52sh61_=eF29nGlMI|%c$TpbCCN%>b^TuH z+3uF4fw6&1RYQ4)v!+b7$^O8N1saQG1&A8}t!a7(6;L;%Px>TFWn%a>3n#wjQjU;C z%^c{E>(KyYe|`oMzY=n-2k@?kyFRE^23ub?bKquzHTax<#{r;^`cT*~>xpTaHoQ^! zmTe)2eGN&q!>Z07cKOlJhF-yF^d*g`e)v6hgz3y<^bk8$Ly;W=3oEee zIMf#hobGw2-;Sp5L0{IUJE(o$(FUO=8?zjx{OX5E{_Mab0=GI@=m1CXY|!YMQLQ-pokb zToWs)n~>)lQf;+jU79?5hSQ5eb_@XCsOB5_gyzd}PZj8^?dj^5@ks`Sp;g7R|u z|MwEO+Xwi2`t3G{Tn`tsx-IV>lmESofp)>WvTgCa#v$^-nwH5{Q$6z7hN>eYHxy$S za<3L^1nyeM6He!yVww`v4SXb>$NhClBHL3^4hV8={#xdoTvqgyo>LA+U;mq^q_p)x z`&W+uy0-9B)LC}0vgAuFxO(cw*2HSpZlfcm%8>nCRBa+Bgz|eYgF;gf!a>E zwtoKIS+YOgmrN714YJXeLmU#7?g3@vu2NpkJ*3^EwbBshW4D$dLLZ4!heNgA_KE&f-wH?me)rC?ME2v;7w(^gTpip1B|Pf z7KxIse}tJX3f?W{w#f#1*_mWph_5%XsF`>?O@7$$6uR<5IC03uRsHPMtfoAKZ!2qJ zL6d$-3vq8GY5`lTGQag>H;>=NE!;oZgN@#Cqpg1n(0_PC$t^H2WMi-)0000000BXo zCm2)B|K6w-Ighf_BfvcZtnVhV%nN(LLxn%_j+U;)0H%y9K5v)_8zHiwH2$Z62s!|x z-znT7;d&<)e5-<={SRKMQE*N=pnbAlF+tuF5@li94**Y%7<*KYpN&bF=;8Na zAJgu&*#ynadV`jJB~A4QDf;09Txk6s+9+d_gz2(5%BtMmw+*7@0w^S{Y3YQJSSKd& zZcj}Ug}rTRNqNCR<1K;(*zs%P4Eby|unyqlh@YjFbFsU^4Ee6l64~q>X;yL~eMZhm z-miwdb^LoL-XyGjfHJX!_x`t)HdAG-EMei|?nO^V z{eIclsXTxyk;ljtYqp_wzajkKSh$R>|f!GxaWtlg5Igvv`W5i%!{PN^Yg- zMewpObi04A#;?7W4TfK$Z?(!|W+rJ_6;q*x3V<|{?J!%z2vpjcj4ynss6eDyQ_$s+ z0ap9ZuvuVAMScgMwM*W=T@eX4!dJk>TWI!#K6v6hq_j-K0ch|mn5W85>k~SM$5Fq0cyEgje^I)2x zA+Auq;(gS0$5wJ`oeQdQ*{ruYd6YwX^r2`vmh(U#g>U6CV@sWj=bSUFir}rC zkttuyffp(fyZ;PVz0r|!%8WvExx`MXXIw(B2YlOm0c-iSFi^1m^QjKOF8;#6L;*EI6A>XTe}Hka$GeBU^?GVa$Q_hui%Yf=7WErb=w*KMvEQ zd=y^D3vKW}fo{ero`N)~v2qFCF_>z;lA)7o5AKlu1$iuSvNjs{OpR~wDX-_bTzaA{ zXByb|=qK#Nu2S_(lGhyKuZ8A-r*H2-*P4o7Ydk74?m`&V(veRrS6Bsxz*J(*SNh*>0C zc~kJe4yTJ@4r*m;$$*Dpd8fsPc*7!!V+RWYUMh37`UMp9cD4(Q4NflNRuohMF8_YWfB6rBZxsol0_A*beeLUIyN_hrDxe5a~nvr=^7F+ z2%CSmbWlS4JY^1L*m&2P&LnaQzJZa6PI<*Fqv+AuqXz--(2Bn&JN09DuWA>i>;QQ? ztSo4LjVI)=68dTTR_})8qbxvm!kS3Gc*y|CMPYlJ%h@nOSUTh((i%h>$_l&#+c(Vm zEZ%DK9p|1ppW0{ZbcIt!9svQR2)}>q+{YQ}Yv0x_{y{VZBDqxh2&pIxXZ<@z0*pza zN398n$}JiNl+OwoXkjFE0^CzcCPp)js`+KrdxqhBmTMyVI?00KFE_t?>(;I@YMtD& zoZW3d=`6*C49i@GR z$p0Kc%Lg=i2t$)fgrF7BtZ)^~Isuyd3x{7vQtx{dwht-(@=?RzasVG=jp($3ktCJC zS(6zH01AO_(IuAs%2YKqTJov2@9HF)FTWX#5hU*1F8XVnKVYhXY|Xn0NATCXP6}Lxr~ueRnv)96uEK95MEExrY>j@hVR7}Z-B12k7 zO&RUb@aed@thU*C$i}`>i8OBk(q4iM?i$_v2`Dm&NG;%#MQU;}wS(6;6}`DuM6&e^KJ65fhFjWKJwnP$@#wA&F0TFIbs_?MO>sVDeovTX z7LHU@7D(LCWVrMq^9qUavDJDy&b#7hH*lZIYDQG;Kn@E~T}NYsenNx>KD>M*?a;8c zm*jMT1$B3IFKeXX&5J$sdpDiqR~#hGw2hJnj|hD?e^w7 zPO?`6XQ*N{S9@}RVK!C-**y9X2QoTNj0OZz&z#6Cbq zHEwLRuNoBRVXP9k-Z%3ZB@Do?{fZ;h;Q)6(?9MMKrI5SsnV&1<%AUj=2DdU!JP)VJ zkY0E^+%DvuzniqjptbEuiht~bRSpDnfDNy5r>keg?m>`yZ+g-%O~X1s!BNFXG`Hl- zBmlqCm-S-vphVl$CM4JhIL3d|DeyH0ErF3Q0gXB5u}{|KTrArE2YUJFvJ8nkryiY9 zG$B@NCoFTU>*&M`veZ)EK6^HcT}@o;kev~5z^b1^$|oE}_ucjSS>gMx>K%vuzsm}* z=HW2Lwp_fadrNmXs0h> zK#*%LIXZy!ST23gHCu}jBAE)y06%MiwU24!G^TAv%^BE~L9l=T$3=#NSUA57&G~~a z6O`-+UtKJe>{^Z9=tBV7y}?Mo4xs0sG$iqA=c(|<*w3|~^1lVOEZZoFP`y5_#fTtl z%?;#=_FX$It21eh z?t$wUC2Uvc!fC8WcIykbe{si)9at0OC|KPf@5-A96V?-jQ+>6yhcmWui5YLta2vC5TkRk@}`S@u;@Vm*( zj?r}157qIZwnB*^_6Ob)Od0HJ7n`SX{QPNC{f}`r+ZB8Kbho*65txhHjbA)@%mP?p ztUhRyuy;~)GM7#y#ZkiB%7{obH+-EK9zAC7obr!N!t}H9_0>bWvkX!XpHWS5eUEF8 zJ2Z}7Vcm`O3F0hRynA)40fi3*AC?lsc%$PPhUloWYvHYOB^kD)u4&R4?OdM$wdN$j zw@jQGrjOiFSDtRJ(V9GMtDYhAMdN@fI#}pt5wVUCw`LHXZ>=>i*K6VT{TnnjW8beJ z!aBVKA^}6>Uvg0Gl9@Vo87iXR)$JbyZ!|Y6M_zYbo85JP@yPcEQ_LRodeQ9K+;x4! zKHuL7t4d$F^bEAe1?%RLIX0+~IrfKlR*t;G$UXXmU166A$zQ8Aw22B#%}qKUUSzKr zzUXA|mo0OLT^ghgBJso3w+{nLxqz&^%V&c1>)M;t3qIQb`6!Tve; zhIqgUO3V$#_l?mU|Cxj0x9Hy5*w8)~jcLpaA32~v^YW@~cT)yqzHQj1J5b4%_^D+g zO24Mw=H9hbl}U@b6?s8!BMuf@>2}jCyg6Nf3(J!H(BY_IpBSW9C6^b;N@Shaobo1XV8 zx#DuHaMKH5e3Bfaa$^c)DM2{%pg7+YfQVm1$l-#c2DU30En5Jt`QCt5#9EiEb|>CA z#iOL?Oa>Zh9ykCU1T`(?vw_nucZVpiy2aKdN8nucny1u)dBcwfGufiBdFr8gc`J{;P&u(TLsUB=JQvztirLAWIRIUP2FR3 zA6>LC@PBL@jcwbuZL_f(+qT`x=AZCc93*Wn>_8&ZB7hI!QoMj9hf| z1-6yNi~h<7XVojNQ@Se)(ku+-(i4(l@ik}~Mv*YIZC~t}AZf_1qB+2H2Njz30ATPfo<+w<Ke=%C#Gz zgGx`u*-|ygP2T3*5##IAd$pr0+d(_?%#t3>U7qhluSK^H7cJE0oz2BKR^cvX3^Na4 z@ItXq&hH-_YaY+31uYwpC+G0zd?V-!?6#@+o53>+`4kIWJgF2LhSh6`BA^7ni$&uZ z2TCz;u_9*)n;~`SV1`FLe}onf?1tNT@14Qv#|{Gz0d1qKR-RM_6b$Z3xy_=!H=Fj( zTr3U3a8pA4*0;5MWxEwe21C@EOQaSgW&O4@k{LDG3AWh zR0OQ$SgS?uP)sRajfF1`&|7BKhPk75)W;g!w11(ZX*O#5A}S?UOXZ_N+fDU@Yv3%w zvYNcQNgcLs()QOPQ$+DzCXWu^R_dG82jKyP)<_9!Mp)zZE>uRO>4a)+WwWrGu&Y+t zZaJk$@LXZP1j32up}jMiM&({)9gfT!X0fu06&$yf-IHqeuie>W8x( z_UNdT`4*CMXa}8g`~gL4^7p1p72_5d1(n6C4>PmxT0eCKV`D}J)Q|<1`ALe|LKX3E zBEvXCbgU3CV!HvE zubc6sV>$1@41f#sBcnS=^;C_4mvn(v5$2Vj-ihFG46!fT8CiF!p=7q+jcE-uOrk)#|Poy_n{psrj|o!HK2b?NPbrn-Ag zOrkJ?7B^9mne#%ckNDYg_T}%1tF}Cju@teyo?g&XiPddgb%Az^Drz#CR|2bZPa)O4gq`YNuU_U2+IWPuQ_@|9SG7R@5I+mF1 zRcf~OZ%?#p+T8de+sEGM|1^;M0i4C^Ada;xaVAQDa&I6)1L0Xc4x>ICBN20{4V)#iRHn{u;W#H036+;+DeD%ElVGddW|B^};HDl(%k zf1AOG58&2sS~9Llci#WTyTj${Efh>>%EOsIU;`!5Xy#E{sT7j&YN>Cp-~q7ErCq+f z*l@-y548UscaDjy9@cQ(WjdxFI(ekZ_KKen*knnCF z=18T7I|Rmc!#_T=SVqu?zS2mVBEl?reg=)3btG3o`7KJ!t&-*rjHEMA3Tr?`UC!u; z&**hSdWIIZECpE=XnYfTG5)shAnhqTf18n%a#EewEu_v_q9CUnov1P16@6y#7HyLh z0cjHA^6RmUx34E{aGv`27N=d#9ZT`_mYi=|nkRMa5%zj1n=KQZ0e5e2W)M3!O3F#j z2}0lG4nm4Xg7WZEN2O=8Qtia=zP4`0Z{2XEiPDOsgXEs-6Grz2L@*fAlTYMiuH_Y~ zqlhcBmG&ioQnRAV1V#wCuzA1)CJm7KyC{)Zm+ zSm5S{ZVCNtDXWRb&d0&j!fzO!q_MK%Lg>91!zgU^Q=qD2nT5y+>Z-;A-}NF$aG8zX zVOk!-n~$z&nipI_!!Vwk

9@rw)c4@QBfCTi*=659 z;IXX4Y{@dR2tGi|ey54-$LuaVXTdC_&x>jl()d~i`5!^tE2aRj@IvwO@H$HWR*76X zwH9fgG!Um!4Dt=d&}2L(U~-22miDvdp}f9cn~x!?y9L6$J4g` zDMmk7IEhTrVbC-HFjG#AjZOev_joZ+t>XJZQ)_Q!bkm9hW+@1(po%pOmuOUgQaEqb ziDOQJD^v$PySouZU25ysK7ZMTV%50QVq34a!DRLRfcrXqSF>Eryf(T{rda)p>Teo* zMy*TIAH66WsNNlFi(v$(4!STcs4(TYMd%zRQ^1ttfd{P6}H+gzR zd~KPO#;{l!IC}2Py3d*3q%k_Z96q-(2J|o)7GJ*?cmYjfm4J8?J8|&JU3&3Nv6Dnq zrCv;HR1M|68g%uIikMTcA2x`$?SyzRLSEMzxW3V`K3JOT$4u^4N`tSe5JHWol6ZhR z+}xfXrs2*Tq;#bfLblLOxW{-}>!DvUPeRl_>)Op3gbmn8_3rhPM@j#LLl#v$9v7l#baNRy!-=K@K{jH%xVGo9YB4Rr_<}+%AZpV77aXLv! zrF$fu!r8zH)eZ(rl}9?gD5`QX8?Er5IQ1tA(VklZW5~GsY&1go+ef4Tex^Q3^-6$3 zCpq87=<;!pKplR0MDC^bwRtNxgS=Pg)WJqJYsVBkX;OI?fPx0y1xm}SpE2%+h^Lv{ z-|~U;>Q67!aQJqX5N?&9cNwUY>Im-9a#I6Z^V*I&57UHeJ#-;aABhrm@ss@r$*V=ce#IDX|FpA;p zU!Mj{oyfs42&zH>G#E8YRP&Gz-~D=g@o$KX|Mt>har3a(RLkt`E)%GL=wlr~b$sWs zzo6tdi&O&{Zqy7O!k77OLbVU4=Cp%mfDAf?s5@bg=4IgqM?$Q8r`K1$JknGm?uJ-U@x#3e{^vLIU0UP8H59H*#XS#VP(lNesh`~#Kj<3 z{%7-uJ?pM6q1xroN10|2J5_yRXUbpCFbWE6PjY>>s%_+?vEios&#( z=!w&>!*2tC`pw-0uODivR7$2xrL+EGQWH)85l)&Z_q+iju z$MaypY@dJr_92e!5gP2 z4Mr4Ce1a~Sv&#uU09#Z(9?x59({(zD9)0Ol#|Ro9fQ6$r$gQta1UCDta9i%r&_GUX zIm_B)G(poU>Rd(~+1ifo$lGP?O+AHr6tQ<%u!kHpJu9>Hi?Y9K&F8F_AIfmmG-RZGj(lG&nLlZZrcjShZ!jfv zPPK{3NaRGMiKtI~{jN%SfXxRU$jqz2w`4fx4)Wf$+(ClSuR4951ynx{!!c@^B3x&8 zo8mz1oQOS>3Wr#ofo&>r8W~&|BL@}w8a??s-U!MbAs%2TvDjrO9nydCBm|B=aIz)H z6fCH03OLJ%b3#}qh*soZhQvD)rdd3;e|vQTL^homJZE7M6G<)Zyf!iRE$b8GC-=ZW zSWhi-A$st2Vd_lP9w0;7fIFmlmE9Nv1BH7Jlvsn~?IB3ItLFtr^jC+RENajdv9ew;wP{az<9yANCaLCjdpZx%6#6lD= zzg)Pp@BA7_A^4ScO(ce}!qTNw~rZ^GfFMcLLE#)$z#R>eK*&_FO3?T6i+E2$oC1M;>rZ9lYLK@2+8TF@hYxM zr+N=cG}6C+8d-kx08_Hc-l!L$YrQrWTJ&rz4*!*4-(7;T6R~P;fGM246BLp=t~7B$xYB* zv>u1K_!dNsnr~#<$l!M&qa8njqtH#HsktSIE1RDPg_Nh|N7RNm79VVQyWTqpd{R(C zc}fO;Px?sk>A5|L#j^RQtBd)Rn`^y_MkyNQ6&p}2IYE#U!_$^pZlTNzVgWvsbuf0Z z8jC&Lx-9h#-r|u)dei-eK_^8%j+Lz)L(!lG4q7kasBZ1h7Ha9x`yYldB=0ep{me8# z?(>fF#Td{upU|L@X|=FkPS@r$fujD5=@gR@4hv4rqrlz@Zk9IP*+XO$-N2AgHsI@z z5Fkl~-*58qth|FcqBBnGORq(eGQnG?hEKFh^Ms4au0w(kU2%M(s?E4mK}`{u7UZnkHY({HLEv-r;MD1Qb) zV+Y;#UyQ&Ftz?h#0sO@*QuS@wk;b$e3y5e}J;>cLcwDV>jW!6Ho7q^#xSl$u0gTL9 z0P6A4KF2V@5ja7`MZ3k6s;b|tNAE(P$k)ZKMv)*A(iXDT>D{3}mqlk>)?j#EZSUbU zL(0MUEWhb#t<irj})h zkq#hZo*I+gPkK1Yca zJpPD>RRf!}BBpG$rIkV3l?uJ(aoYSTtbhQ zh`GuQB5838TB25QI-*&H=71O1bI5^GPvWq8&NC__05Kc znuqn2!-f^yBH1@4Id65O>4M12QhSKxxNz_{UOQuL_C5&;{H-30?T%54dm2&t5#DM% z2lD4aK=CmAZaF^Ax-N?x)2H>Oc7PNQBy&sKaa)|u7;sg#O~hCzRX#d&WMO|`%Soeu ze@Xeacw)y>FJ3n$gAIs#qbs|Z7PA!>ue--f__iD*OV@|*lL zlTu5GGp3!|uP#!9&5u0fV~|y#uP6i^Is?h~qF_61*Ul1`<|%xwN9)^+>Z7Z|o~X(a zO>v4L8?r;R{fh6skG^L(Ft5-AU~jZY56ZF=ReS1<)(Xm3(;KL&oEFitt*Uo>jZ05G zvvjG}2Ng}lzgzlS$!6q1Yg!IRPk;P=_Rirg?W=kbE zEGv=r&i8L>M+hg9w+qfU}$(MXs^Cno@hq* zLOX&E{W^YCZ&G)Z)M-?8wQ1H-hDn&#X!hP`%?k zMpZs{U&nfotcm<&(MJhQ){p}JSWJ~K9})#zN{E^zHLG+6jt}RNX&9wT*X~v zP|&%Q|IL4SSW+UMAU#0QA>YtLQFz!)3#+2}82bb?6+jCtgW@(R56OiQM=!S{&4gIS zc(kKji&X01(S}#~4fkicd$Pobwpo622w-UOy9wSz^1GT@aDL_$a;c-KR!5|M;9w7LG1{qel4vDXD!f@>JS)6`zK>J ziq9^Pg3qH(U;Kl;4$n#zzt>Ts_0`M|TC&E~(g_hq3-j}wr&_p#i6aTL6~(g*Shn%L z3UEZHrwQK>e=W%ZRhp1Tq<-|%Q^g8b_JVb_19Nphl{jGMpuqFBkVpwnjrP}&H_m;r zpnlAAFBR3z^rNzgP~q$=i(eSDOBp!2^T##6Gg@DpVdpQ8cJ9Em3XvY3PG!JF*LICo zNsC!}{k78%7CVsFL3LF+nbyn0lI@OR1`XWf(XH{OBL>cYs=CHM)nT*}ono(da1TRQ zre1m~W<_~IT97gL@CUEX!7syLn9-GB@J1XxbR&iz$z_#V{eEPMnN!gfiFJOLtHHfc zjeb!N#S~4C^$)1t*FL?sNotJF5%8*deg6>y@`}C!mXNtt-wRTHQK&29_ptjO*xya# zXyhoC-rdu1$LwR@VVnqOAnUuJMish<&;I+W(rx-~qPy}aBql#CyOkI$cUB;CTV^$1 zE3-HEQ)cg#G$ELClG}9SqL;~Q*$y5cSMzBM{f3D+&sC!Jl_8{m|ws072Vo+tbqdy!EJNp&77=GFLd zq+^6iEJj=>NIlu*+9A3#{N@Nr1bbA0@%=~K(Di%7XmI38P9yISs2ho0Cu(rewVH2$ zT4pm>2##mS@gLXIT1AWIis)spnZhUaR~H*AzJx_PtR6;%Fk!_wb#g~~T-#q=eQ0{( zlbpQEO-}+?(2B#TQVKvSvXot` z!9prIK3T|mx^v`>ZXozbf;lXU=^DZ~ww__77CjIq92=h2zQ^umK`6S_Vqu^7 zdNRF-_^4OM;LOBpdU~AP4;b{ty(DQj@kdRFi=8?R-YXQBvY^q&+Q8;en3u>gXTVu0Q+6R~-Wwd2KU=gT-LPSuQ*1$GkCD!t0JS!p zh{*d8f7@e=UXG-uU7Xsm!OhDx4dE&Y9!-*7CmfHs;Msj{@x%e!E(iKIQ}vh zNVuCeOw>Lv)U~^%Dg88(=t*FYQBlRC}*u6KEE%tuh(SIRH+iXHT^kR1sDAz zH+yBY5Ek{$Tb#=GrA=UK}pyNV}?+aI!oUI zVh$dauo)w;m@(4Uxg%K{Zt;USrSv2SDumY zHF2(7zbhoGG_dY>Rs$iQ*>H!AWu4@STv{B<9(f@`jd>iarN)(&I0to2E%wxzE>pj6+#(@lF(|=)D5nh`($k5{Q6O*e zVb_Ko3{RVm33f(I4oc`i4Zi_*Q9Um5H-jwiy3(0^#@ zo-vLy9`KTD+BU4VnV*J!=WA2Rygtx@c{P|C(X4pB6~q|Ivm5VI#uKSp4yG9nM*S15 zv?cIzj)gXSo&@f;)}MT#s$Ee{aNND3V1z4{V7Iz>27O&k#Rk2$r=g$hW06bKQ zu@@bA$^PfhkMSFG{1^VQT#ys08jV2dnhWXPHS-O|253Ihnjhc-8H?w(ZMS{=ZqZm_ zsuVWIpZ0~cyWc~-mOP^2HAl1VC$2u7-*QZ)FPg-$1UTb3^2~XRLSzxJ63e{}pE}v6 z-um_QQABH1kPcHmoSyS3qJD|c1yn+Ybr?3215jjC3>yEoGj^Oa@-c6oPsI|h+(E&f zymA;E7E{gYF@Ex{faJ(f&)qZg?@3V7qtfeWu5o||MS0XA?+AVC?TPg;`2Njx#$PZf z-R{GQeu=D0%%mkjuA2B_;aQNGiQj+6iaqjS@>BZEWexDel1%!TP7U+8uqs&`MCTl% zUUhdGO_Zrv$NP54PAbK2q(9t{A)= zDrj*Ja8dA08Y~Ds|H!x%xk`~o-)J}j`IQSrG5U&!1Lh*(69dgajsr2`-r{hk$!XjN zuk~9Z{~&M;$`B5sbk`0$FjUVKg*lpDGd?56B!Ti0t>5uY4RkQK?P)UTjM>Ngc6+Ao zkH=^*mW5YL>gE%`O2G&VCrDR&#dY2?7TB+>}YLJO<%M@dTLDf=V#ZVL(`K%4_)eb#z8u-&5OJNS== z%<(=`9j)7ARA41aE>m^s8wfe;eTw8kg*QNt7rsXu@E>BHvcg#zlPr|zi0`2{FOKeK z&TkuzP}$|xitMU4>@L8`oW5N7A01~z#A)kmzRcN?$U}lfO-X~7=q`G#8~(XAyItT= z$^%~N5>Bway5-Sn2d0M*Dtk-VWp-TVJp9l==+4}p@n>vZ2gB@W->#j@Dyc50`h<_E zrKBUdE=Y40CG1vUgpw(BOzAuqhlba-*MmJoQJ=i~)M<-`ow{JI>oaFr3E$|v=3Za#Egv7j)R`53VkAG6UNySJyn49(GYfI_=f|x@ zbs03scc7!pJs+opS};U5l91vax<+`Mce&@0#uLKb-=iLkZ{mph+o1ezvBM2$oh2rehCEqDJSnwDKt>u48(<&r!W}xm$$$)3>)NOu!r)q@f8q^)YKZPgJ|`aN%43qTt%uVRSsN()2f^<-aW}RL>qOp{?6mLY++& zY0TuVwIo;DX-~6hL`>2kPmV|YDu2BcrK%?@(TSN8w5*-XksL}fbL%@<2mU!fDGWbS z9=AY3d$uyvU*5S=-0bb4=B;CZ=vSNq1Kvm;2%n;0U^IGnrynoZK4QZWXh)`jwCri9 ztm3H<+&?rK|DX(P)0}RW83_1}k#)7CYTCX7yRwhBJZm7ejWhU#&w$n1l8p5X%v~EK>x_4p*T%WU&$x-1VPp$#U0d>fLknEL> zRwi(G1g&OCc&#$zg}R`Nd$RK;GR-c&Wc7e1C``EAB5{C&*X_n-&r!;NzquXxg;1`C zt)j*Zcw%sujf2$Puoo`|0~~~6r1Z=NA!&>KX&A0bk`HLw!dZES5K0)&-n6L#jamqI ziii7%o~A58>)r$>MB9;L&K5t1*b>nR16;`^scWUJ_T|OdXqQ+I-wvPE^m7H6r)!5t zBnw0NXdLrY$slZ%zicAnJeXxUFXelP5VeanZpSiA@INe(q z4ZS(^(d*G>jG;$&n8CiJo&@altPIzbl2SsGF8urL69BN`! zFEgzAd!^+ay+{rDEr=Tei7L?UE$nra+F;xp9T1qkh>izb z>KAm$%c2`S4IXV`u$^+dMwKEi7o2A0AtKN1$$)Da-GtA=0q|bueq);bBrJ4k zQ{`-zB9Ck{&ib4_AJ`n6AWH_sNjA8?cs)>e5v5Cxyg84a5x{EjT`6%Yk*U{$ulRr! zCST%OtM1sNaZ(WMD4jA7e20RYL~B*p#6YBPcJ#o+mm&3q3jy5TP8AoT!Bi<8u~D%< z>?B{Wrcg9N<`E}5^IKI%TXXBDMs-GhbN1T#OT27z|zE>(GL{xzunOh|E3H6Jp?RY{6GJfJ$Uf< znA`cwRK`Gn8=o(e>;M4@7{5G;0t`^H`Z9$NFkqSN%h1KZfEK+kkAi>z%!$9uMGgWG zRrzw5CkVig=*x&XAb{6`FQW>A0zR3)jOzjlSflwepa2xmtM=vLCs2T%`IqSozyL4Q zUuK8`1Duwr3OipM3_dnJ#hXyeIvwkUw&;UWoukl-*(0~QwFLx8b0Mh>Enf{k=`rrMc zCjg8N86J@N z&we}=zyoytiR(1M1ISFj#z$Zy0J6S)nT`Viz=Qo|M0Y$-}kQn0Ua RL6T literal 0 HcmV?d00001 diff --git a/docs_zh_CN/demo.md b/docs_zh_CN/demo.md index 703df0cdc1..2966a97ba7 100644 --- a/docs_zh_CN/demo.md +++ b/docs_zh_CN/demo.md @@ -7,6 +7,7 @@ - [可视化输入视频的 GradCAM](#可视化输入视频的-GradCAM) - [使用网络摄像头的实时动作识别](#使用网络摄像头的实时动作识别) - [滑动窗口预测长视频中不同动作类别](#滑动窗口预测长视频中不同动作类别) +- [基于人体姿态预测动作标签](#基于人体姿态预测动作标签) ## 预测视频的动作标签 @@ -396,3 +397,69 @@ python demo/webcam_demo_spatiotemporal_det.py \ --output-fps 20 \ --show ``` + +## 基于人体姿态预测动作标签 + +MMAction2 提供本脚本实现基于人体姿态的动作标签预测。 + +```shell +python demo/demo_posec3d.py ${VIDEO_FILE} ${OUT_FILENAME} \ + [--config ${SKELETON_BASED_ACTION_RECOGNITION_CONFIG_FILE}] \ + [--checkpoint ${SKELETON_BASED_ACTION_RECOGNITION_CHECKPOINT}] \ + [--det-config ${HUMAN_DETECTION_CONFIG_FILE}] \ + [--det-checkpoint ${HUMAN_DETECTION_CHECKPOINT}] \ + [--det-score-thr ${HUMAN_DETECTION_SCORE_THRESHOLD}] \ + [--pose-config ${HUMAN_POSE_ESTIMATION_CONFIG_FILE}] \ + [--pose-checkpoint ${HUMAN_POSE_ESTIMATION_CHECKPOINT}] \ + [--label-map ${LABEL_MAP}] \ + [--device ${DEVICE}] \ + [--short-side] ${SHORT_SIDE} +``` + +可选参数: + +- `SPATIOTEMPORAL_ACTION_DETECTION_CONFIG_FILE`: 时空检测配置文件路径。 +- `SPATIOTEMPORAL_ACTION_DETECTION_CHECKPOINT`: 时空检测模型权重文件路径。 +- `ACTION_DETECTION_SCORE_THRESHOLD`: 动作检测分数阈值,默认为 0.4。 +- `HUMAN_DETECTION_CONFIG_FILE`: 人体检测配置文件路径。 +- `HUMAN_DETECTION_CHECKPOINT`: 人体检测模型权重文件路径。 +- `HUMAN_DETECTION_SCORE_THRE`: 人体检测分数阈值,默认为 0.9。 +- `INPUT_VIDEO`: 网络摄像头编号或本地视频文件路径,默认为 `0`。 +- `LABEL_MAP`: 所使用的标签映射文件,默认为 `demo/label_map_ava.txt`。 +- `DEVICE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`),默认为 `cuda:0`。 +- `OUTPUT_FPS`: 输出视频的帧率,默认为 15。 +- `OUTPUT_FILENAME`: 输出视频的路径,默认为 `None`。 +- `--show`: 是否通过 `cv2.imshow` 展示预测结果。 +- `DISPLAY_HEIGHT`: 输出结果图像高度,默认为 0。 +- `DISPLAY_WIDTH`: 输出结果图像宽度,默认为 0。若 `DISPLAY_HEIGHT <= 0 and DISPLAY_WIDTH <= 0`,则表示输出图像形状与输入视频形状相同。 +- `PREDICT_STEPSIZE`: 每 N 帧进行一次预测(以控制计算资源),默认为 8。 +- `CLIP_VIS_LENGTH`: 预测结果可视化持续帧数,即每次预测结果将可视化到 `CLIP_VIS_LENGTH` 帧中,默认为 8。 + +- `SKELETON_BASED_ACTION_RECOGNITION_CONFIG_FILE`: 基于人体字体的动作识别模型配置文件路径。 +- `SKELETON_BASED_ACTION_RECOGNITION_CHECKPOINT`: 基于人体字体的动作识别模型权重文件路径。 +- `HUMAN_DETECTION_CONFIG_FILE`: 人体检测配置文件路径。 +- `HUMAN_DETECTION_CHECKPOINT`: 人体检测模型权重文件路径。 +- `HUMAN_DETECTION_SCORE_THRE`: 人体检测分数阈值,默认为 0.9。 +- `HUMAN_POSE_ESTIMATION_CONFIG_FILE`: 人体姿态估计模型配置文件路径 (需在 COCO-keypoint 数据集上训练)。 +- `HUMAN_POSE_ESTIMATION_CHECKPOINT`: 人体姿态估计模型权重文件路径 (需在 COCO-keypoint 数据集上训练). +- `LABEL_MAP`: 所使用的标签映射文件,默认为 `demo/label_map_ntu120.txt`。 +- `DEVICE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`),默认为 `cuda:0`。 +- `SHORT_SIDE`: 视频抽帧时使用的短边长度,默认为 480。 + +示例: + +以下示例假设用户的当前目录为 $MMACTION2。 + +1. 使用 Faster RCNN 作为人体检测器,HRNetw32 作为人体姿态估计模型,PoseC3D-NTURGB+D-120-Xsub-keypoint 作为基于人体姿态的动作识别模型。 + +```shell +python demo/demo_posec3d.py demo/ntu_sample.avi demo/posec3d_demo.mp4 \ + --config configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py \ + --checkpoint https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth \ + --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ + --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ + --det-score-thr 0.9 \ + --pose-config demo/hrnet_w32_coco_256x192.py \ + --pose-checkpoint https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_coco_256x192-c78dce93_20200708.pth \ + --label-map demo/label_map_ntu120.txt +``` From e74ffeef424868b66fbfa522e66d91a1c9f36416 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 1 Jul 2021 15:43:49 +0800 Subject: [PATCH 179/414] [Improvement] Add FLOPs (#966) * fix pretrained for slowonly flow * fix * Add FLOPs for recognition models --- configs/recognition/c3d/metafile.yml | 1 + configs/recognition/csn/metafile.yml | 2 + configs/recognition/i3d/metafile.yml | 10 +++++ configs/recognition/omnisource/metafile.yml | 16 ++++++++ configs/recognition/r2plus1d/metafile.yml | 4 ++ configs/recognition/slowfast/metafile.yml | 8 ++++ configs/recognition/slowonly/metafile.yml | 19 +++++++++ configs/recognition/tanet/metafile.yml | 1 + configs/recognition/tin/metafile.yml | 3 ++ configs/recognition/tpn/metafile.yml | 3 ++ configs/recognition/tsm/metafile.yml | 32 ++++++++++++++- configs/recognition/tsn/metafile.yml | 41 +++++++++++++++++++ configs/recognition/x3d/metafile.yml | 2 + mmaction/models/backbones/resnet3d.py | 7 ++++ .../models/backbones/resnet3d_slowfast.py | 8 ++++ 15 files changed, 155 insertions(+), 2 deletions(-) diff --git a/configs/recognition/c3d/metafile.yml b/configs/recognition/c3d/metafile.yml index 8dabc63831..3e7a6153ea 100644 --- a/configs/recognition/c3d/metafile.yml +++ b/configs/recognition/c3d/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: c3d Batch Size: 30 Epochs: 45 + FLOPs: 38615475200 Parameters: 78409573 Pretrained: sports1m Resolution: 128x171 diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml index 31dd84f33f..d3cd21b877 100644 --- a/configs/recognition/csn/metafile.yml +++ b/configs/recognition/csn/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: ResNet152 Batch Size: 3 Epochs: 58 + FLOPs: 98096676864 Parameters: 29703568 Pretrained: IG65M Resolution: short-side 320 @@ -30,6 +31,7 @@ Models: Architecture: ResNet152 Batch Size: 3 Epochs: 58 + FLOPs: 98096676864 Parameters: 29703568 Pretrained: IG65M Resolution: short-side 320 diff --git a/configs/recognition/i3d/metafile.yml b/configs/recognition/i3d/metafile.yml index 5a32517893..711d57a475 100644 --- a/configs/recognition/i3d/metafile.yml +++ b/configs/recognition/i3d/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 43564040192 Parameters: 28043472 Pretrained: ImageNet Resolution: 340x256 @@ -30,6 +31,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 43564040192 Parameters: 28043472 Pretrained: ImageNet Resolution: short-side 256 @@ -52,6 +54,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 43564040192 Parameters: 28043472 Pretrained: ImageNet Resolution: short-side 256p @@ -74,6 +77,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 43564040192 Parameters: 28043472 Pretrained: ImageNet Resolution: 340x256 @@ -96,6 +100,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 43564040192 Parameters: 28043472 Pretrained: ImageNet Resolution: short-side 256 @@ -118,6 +123,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 43564040192 Parameters: 28043472 Pretrained: ImageNet Resolution: 340x256 @@ -140,6 +146,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 43564040192 Parameters: 28043472 Pretrained: ImageNet Resolution: short-side 256 @@ -162,6 +169,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 54334488576 Parameters: 35397840 Pretrained: ImageNet Resolution: short-side 256p @@ -184,6 +192,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 48962109440 Parameters: 31723728 Pretrained: ImageNet Resolution: short-side 256p @@ -206,6 +215,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 54334488576 Parameters: 35397840 Pretrained: ImageNet Resolution: short-side 256p diff --git a/configs/recognition/omnisource/metafile.yml b/configs/recognition/omnisource/metafile.yml index 16ca8a1896..7f60b1aa47 100644 --- a/configs/recognition/omnisource/metafile.yml +++ b/configs/recognition/omnisource/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 134526976000 Input: 3seg Modality: RGB Parameters: 23917832 @@ -31,6 +32,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 134526976000 Input: 3seg Modality: RGB Parameters: 23917832 @@ -54,6 +56,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 134526976000 Input: 3seg Modality: RGB Parameters: 23917832 @@ -77,6 +80,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 134526976000 Input: 3seg Modality: RGB Parameters: 23917832 @@ -100,6 +104,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 134526976000 Input: 3seg Modality: RGB Parameters: 23917832 @@ -123,6 +128,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 134526976000 Input: 3seg Modality: RGB Parameters: 23917832 @@ -146,6 +152,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 256 + FLOPs: 54860070912 Input: 8x8 Modality: RGB Parameters: 32044296 @@ -169,6 +176,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 256 + FLOPs: 54860070912 Input: 8x8 Modality: RGB Parameters: 32044296 @@ -192,6 +200,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 256 + FLOPs: 54860070912 Input: 8x8 Modality: RGB Parameters: 32044296 @@ -215,6 +224,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 256 + FLOPs: 54860070912 Input: 8x8 Modality: RGB Parameters: 32044296 @@ -238,6 +248,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 256 + FLOPs: 54860070912 Input: 8x8 Modality: RGB Parameters: 32044296 @@ -261,6 +272,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 256 + FLOPs: 54860070912 Input: 8x8 Modality: RGB Parameters: 32044296 @@ -284,6 +296,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 102997721600 Parameters: 24327632 Pretrained: ImageNet Resolution: 340x256 @@ -303,6 +316,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 102997721600 Parameters: 24327632 Pretrained: IG-1B Resolution: short-side 320 @@ -322,6 +336,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 27430649856 Parameters: 32454096 Pretrained: None Resolution: short-side 320 @@ -341,6 +356,7 @@ Models: Architecture: ResNet101 Batch Size: 8 Epochs: 196 + FLOPs: 112063447040 Parameters: 60359120 Pretrained: None Resolution: short-side 320 diff --git a/configs/recognition/r2plus1d/metafile.yml b/configs/recognition/r2plus1d/metafile.yml index 70f8aaee85..53522a50aa 100644 --- a/configs/recognition/r2plus1d/metafile.yml +++ b/configs/recognition/r2plus1d/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: ResNet34 Batch Size: 8 Epochs: 180 + FLOPs: 53175572992 Parameters: 63759281 Pretrained: None Resolution: short-side 256 @@ -30,6 +31,7 @@ Models: Architecture: ResNet34 Batch Size: 16 Epochs: 180 + FLOPs: 53175572992 Parameters: 63759281 Pretrained: None Resolution: short-side 256 @@ -52,6 +54,7 @@ Models: Architecture: ResNet34 Batch Size: 8 Epochs: 180 + FLOPs: 53175572992 Parameters: 63759281 Pretrained: None Resolution: short-side 320 @@ -74,6 +77,7 @@ Models: Architecture: ResNet34 Batch Size: 6 Epochs: 180 + FLOPs: 212701677568 Parameters: 63759281 Pretrained: None Resolution: short-side 320 diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index 475e246294..ac9ff8526b 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 36441296896 Parameters: 34479288 Pretrained: None Resolution: short-side 256 @@ -30,6 +31,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 36441296896 Parameters: 34479288 Pretrained: None Resolution: short-side 256 @@ -52,6 +54,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 36441296896 Parameters: 34479288 Pretrained: None Resolution: short-side 320 @@ -74,6 +77,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 66222034944 Parameters: 34565560 Pretrained: None Resolution: short-side 256 @@ -96,6 +100,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 66222034944 Parameters: 34565560 Pretrained: None Resolution: short-side 320 @@ -118,6 +123,7 @@ Models: Architecture: ResNet101 + ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 65042780160 Parameters: 62384312 Pretrained: None Resolution: short-side 256 @@ -140,6 +146,7 @@ Models: Architecture: ResNet101 Batch Size: 8 Epochs: 256 + FLOPs: 127070375936 Parameters: 62912312 Pretrained: None Resolution: short-side 256 @@ -162,6 +169,7 @@ Models: Architecture: ResNet152 + ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 91515654144 Parameters: 84843704 Pretrained: None Resolution: short-side 256 diff --git a/configs/recognition/slowonly/metafile.yml b/configs/recognition/slowonly/metafile.yml index 80a66339ef..b67b9d9ed8 100644 --- a/configs/recognition/slowonly/metafile.yml +++ b/configs/recognition/slowonly/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 27430649856 Parameters: 32454096 Pretrained: None Resolution: short-side 320 @@ -27,6 +28,7 @@ Models: Architecture: ResNet101 Batch Size: 8 Epochs: 196 + FLOPs: 112063447040 Parameters: 60359120 Pretrained: None Resolution: short-side 320 @@ -46,6 +48,7 @@ Models: Architecture: ResNet101 Batch Size: 8 Epochs: 196 + FLOPs: 112063447040 Parameters: 60359120 Pretrained: None Resolution: short-side 320 @@ -65,6 +68,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 27430649856 Parameters: 32454096 Pretrained: None Resolution: short-side 256 @@ -87,6 +91,7 @@ Models: Architecture: ResNet50 Batch Size: 24 Epochs: 256 + FLOPs: 27430649856 Parameters: 32454096 Pretrained: None Resolution: short-side 320 @@ -109,6 +114,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 54860480512 Parameters: 32454096 Pretrained: None Resolution: short-side 256 @@ -131,6 +137,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 27430649856 Parameters: 32454096 Pretrained: None Resolution: short-side 320 @@ -153,6 +160,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 256 + FLOPs: 54860480512 Parameters: 32454096 Pretrained: None Resolution: short-side 320 @@ -175,6 +183,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 150 + FLOPs: 27430649856 Parameters: 32454096 Pretrained: ImageNet Resolution: short-side 320 @@ -197,6 +206,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 150 + FLOPs: 54860480512 Parameters: 32454096 Pretrained: ImageNet Resolution: short-side 320 @@ -219,6 +229,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 150 + FLOPs: 38201098240 Parameters: 39808464 Pretrained: ImageNet Resolution: short-side 320 @@ -241,6 +252,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 150 + FLOPs: 76401377280 Parameters: 39808464 Pretrained: ImageNet Resolution: short-side 320 @@ -263,6 +275,7 @@ Models: Architecture: ResNet50 Batch Size: 24 Epochs: 256 + FLOPs: 27225128960 Parameters: 32450960 Pretrained: ImageNet Resolution: short-side 320 @@ -285,6 +298,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 196 + FLOPs: 54449438720 Parameters: 32450960 Pretrained: ImageNet Resolution: short-side 320 @@ -307,6 +321,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 256 + FLOPs: 54860890112 Parameters: 32863896 Pretrained: None Resolution: short-side 256 @@ -329,6 +344,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 256 + FLOPs: 54861094912 Parameters: 33068796 Pretrained: None Resolution: short-side 256 @@ -351,6 +367,7 @@ Models: Architecture: ResNet50 Batch Size: 24 Epochs: 120 + FLOPs: 27430649856 Parameters: 32454096 Pretrained: ImageNet Resolution: short-side 256 @@ -373,6 +390,7 @@ Models: Architecture: ResNet50 Batch Size: 24 Epochs: 120 + FLOPs: 27225128960 Parameters: 32450960 Pretrained: Kinetics Resolution: short-side 256 @@ -395,6 +413,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 64 + FLOPs: 54859716608 Parameters: 31689819 Pretrained: ImageNet Resolution: height 100 diff --git a/configs/recognition/tanet/metafile.yml b/configs/recognition/tanet/metafile.yml index 5bc7961afa..1271209772 100644 --- a/configs/recognition/tanet/metafile.yml +++ b/configs/recognition/tanet/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: TANet Batch Size: 8 Epochs: 100 + FLOPs: 43065983104 Parameters: 25590320 Pretrained: ImageNet Resolution: short-side 320 diff --git a/configs/recognition/tin/metafile.yml b/configs/recognition/tin/metafile.yml index f67be352d9..0c8bf63c62 100644 --- a/configs/recognition/tin/metafile.yml +++ b/configs/recognition/tin/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: ResNet50 Batch Size: 6 Epochs: 40 + FLOPs: 32962097536 Parameters: 23895566 Pretrained: ImageNet Resolution: height 100 @@ -30,6 +31,7 @@ Models: Architecture: ResNet50 Batch Size: 6 Epochs: 40 + FLOPs: 32962097536 Parameters: 23895566 Pretrained: ImageNet Resolution: height 240 @@ -52,6 +54,7 @@ Models: Architecture: ResNet50 Batch Size: 6 Epochs: 50 + FLOPs: 32965800320 Parameters: 24358640 Pretrained: TSM-Kinetics400 Resolution: short-side 256 diff --git a/configs/recognition/tpn/metafile.yml b/configs/recognition/tpn/metafile.yml index b091904d46..c028d1c8d3 100644 --- a/configs/recognition/tpn/metafile.yml +++ b/configs/recognition/tpn/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 150 + FLOPs: 66014576640 Parameters: 91498336 Pretrained: ImageNet Resolution: short-side 320 @@ -30,6 +31,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 150 + FLOPs: 66014576640 Parameters: 91498336 Pretrained: ImageNet Resolution: short-side 320 @@ -52,6 +54,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 150 + FLOPs: 54202822656 Parameters: 82445724 Pretrained: TSM Resolution: height 100 diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index 059e2481b4..98ba18f9a2 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 32965562368 Parameters: 24327632 Pretrained: ImageNet Resolution: 340x256 @@ -30,6 +31,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 32965562368 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 256 @@ -52,6 +54,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 32965562368 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 256 @@ -74,6 +77,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 32965562368 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 256 @@ -96,6 +100,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 32965562368 Parameters: 24327632 Pretrained: ImageNet Resolution: 340x256 @@ -118,6 +123,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 32965562368 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 256 @@ -140,6 +146,7 @@ Models: Architecture: ResNet50 Batch Size: 6 Epochs: 50 + FLOPs: 65931124736 Parameters: 24327632 Pretrained: ImageNet Resolution: 340x256 @@ -162,6 +169,7 @@ Models: Architecture: ResNet50 Batch Size: 6 Epochs: 50 + FLOPs: 65931124736 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 256 @@ -184,6 +192,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 49457811456 Parameters: 31682000 Pretrained: ImageNet Resolution: short-side 320 @@ -206,6 +215,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 41231355904 Parameters: 28007888 Pretrained: ImageNet Resolution: short-side 320 @@ -228,6 +238,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 49457811456 Parameters: 31682000 Pretrained: ImageNet Resolution: short-side 320 @@ -250,6 +261,7 @@ Models: Architecture: MobileNetV2 Batch Size: 8 Epochs: 100 + FLOPs: 3337519104 Parameters: 2736272 Pretrained: ImageNet Resolution: short-side 320 @@ -272,6 +284,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 32959795200 Parameters: 23606384 Pretrained: ImageNet Training Data: Diving48 @@ -293,6 +306,7 @@ Models: Architecture: ResNet50 Batch Size: 4 Epochs: 50 + FLOPs: 65919590400 Parameters: 23606384 Pretrained: ImageNet Training Data: Diving48 @@ -314,6 +328,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 32961859584 Parameters: 23864558 Pretrained: ImageNet Resolution: height 100 @@ -340,6 +355,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 32961859584 Parameters: 23864558 Pretrained: ImageNet Resolution: height 100 @@ -366,6 +382,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 32961859584 Parameters: 23864558 Pretrained: ImageNet Resolution: height 100 @@ -392,6 +409,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 32961859584 Parameters: 23864558 Pretrained: ImageNet Resolution: height 100 @@ -418,6 +436,7 @@ Models: Architecture: ResNet50 Batch Size: 6 Epochs: 50 + FLOPs: 65923719168 Parameters: 23864558 Pretrained: ImageNet Resolution: height 100 @@ -444,6 +463,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 62782459904 Parameters: 42856686 Pretrained: ImageNet Resolution: height 100 @@ -470,6 +490,7 @@ Models: Architecture: ResNet50 Batch Size: 6 Epochs: 50 + FLOPs: 32961859584 Parameters: 23864558 Pretrained: ImageNet Resolution: height 240 @@ -496,6 +517,7 @@ Models: Architecture: ResNet50 Batch Size: 6 Epochs: 50 + FLOPs: 32961859584 Parameters: 23864558 Pretrained: ImageNet Resolution: height 256 @@ -522,6 +544,7 @@ Models: Architecture: ResNet50 Batch Size: 6 Epochs: 50 + FLOPs: 65923719168 Parameters: 23864558 Pretrained: ImageNet Resolution: height 240 @@ -548,6 +571,7 @@ Models: Architecture: ResNet50 Batch Size: 6 Epochs: 50 + FLOPs: 32961859584 Parameters: 23864558 Pretrained: ImageNet Resolution: height 256 @@ -574,6 +598,7 @@ Models: Architecture: ResNet101 Batch Size: 8 Epochs: 50 + FLOPs: 62782459904 Parameters: 42856686 Pretrained: ImageNet Resolution: height 240 @@ -600,6 +625,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 43051352064 Parameters: 23864558 Pretrained: ImageNet Resolution: height 100 @@ -626,6 +652,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 43051352064 Parameters: 23864558 Pretrained: ImageNet Resolution: height 100 @@ -652,6 +679,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 43048943616 Parameters: 23563355 Pretrained: ImageNet Resolution: height 100 @@ -662,8 +690,8 @@ Models: Results: - Dataset: Jester Metrics: - - top1 acc (efficient): 96.5 - - top1 acc (accurate): 97.2 + top1 acc (accurate): 97.2 + top1 acc (efficient): 96.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log diff --git a/configs/recognition/tsn/metafile.yml b/configs/recognition/tsn/metafile.yml index d17f50e036..fa1d690c41 100644 --- a/configs/recognition/tsn/metafile.yml +++ b/configs/recognition/tsn/metafile.yml @@ -8,6 +8,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 75 + FLOPs: 134526773248 Parameters: 23714981 Pretrained: ImageNet Training Data: UCF101 @@ -29,6 +30,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 100 + FLOPs: 32959107072 Parameters: 23606384 Pretrained: ImageNet Training Data: Diving48 @@ -50,6 +52,7 @@ Models: Architecture: ResNet50 Batch Size: 4 Epochs: 100 + FLOPs: 32959107072 Parameters: 23606384 Pretrained: ImageNet Training Data: Diving48 @@ -71,6 +74,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 50 + FLOPs: 43048605696 Parameters: 23612531 Pretrained: ImageNet Training Data: HMDB51 @@ -92,6 +96,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 50 + FLOPs: 43048605696 Parameters: 23612531 Pretrained: Kinetics400 Training Data: HMDB51 @@ -112,6 +117,7 @@ Models: Metadata: Architecture: ResNet50 Epochs: 50 + FLOPs: 43048605696 Parameters: 23612531 Pretrained: Moments Training Data: HMDB51 @@ -133,6 +139,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 102997721600 Parameters: 24327632 Pretrained: ImageNet Resolution: 340x256 @@ -155,6 +162,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 102997721600 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 256 @@ -177,6 +185,7 @@ Models: Architecture: ResNet50 Batch Size: 16 Epochs: 100 + FLOPs: 32959827968 Parameters: 24327632 Pretrained: ImageNet Resolution: 340x256 @@ -199,6 +208,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 134527385600 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 320 @@ -221,6 +231,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 110 + FLOPs: 109881868800 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 320 @@ -243,6 +254,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 134527385600 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 256 @@ -265,6 +277,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 134527385600 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 320 @@ -287,6 +300,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 110 + FLOPs: 109881868800 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 320 @@ -309,6 +323,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 102997721600 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 320 @@ -331,6 +346,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 32959827968 Parameters: 24327632 Pretrained: ImageNet Resolution: 340x256 @@ -353,6 +369,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 134527385600 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 256 @@ -375,6 +392,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 32959827968 Parameters: 24327632 Pretrained: ImageNet Resolution: short-side 256 @@ -397,6 +415,7 @@ Models: Architecture: ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] Batch Size: 16 Epochs: 100 + FLOPs: 262238208000 Parameters: 42948304 Pretrained: ImageNet Resolution: short-side 320 @@ -419,6 +438,7 @@ Models: Architecture: ResNeXt101-32x4d [[TorchVision](https://github.com/pytorch/vision/)] Batch Size: 12 Epochs: 100 + FLOPs: 255225561600 Parameters: 27355600 Pretrained: ImageNet Resolution: short-side 320 @@ -441,6 +461,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 102997721600 Parameters: 24327632 Pretrained: ImageNet Resolution: 340x256 @@ -460,6 +481,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 102997721600 Parameters: 24327632 Pretrained: IG-1B Resolution: short-side 320 @@ -479,6 +501,7 @@ Models: Architecture: ResNet50 Batch Size: 32 Epochs: 100 + FLOPs: 102997721600 Parameters: 24327632 Pretrained: IG-1B Resolution: short-side 320 @@ -498,6 +521,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 134527795200 Parameters: 24737432 Pretrained: ImageNet Resolution: short-side 256 @@ -520,6 +544,7 @@ Models: Architecture: ResNet50 Batch Size: 12 Epochs: 100 + FLOPs: 134528000000 Parameters: 24942332 Pretrained: ImageNet Resolution: short-side 256 @@ -542,6 +567,7 @@ Models: Architecture: ResNet50 Batch Size: 16 Epochs: 50 + FLOPs: 32781541376 Parameters: 23864558 Pretrained: ImageNet Resolution: height 100 @@ -564,6 +590,7 @@ Models: Architecture: ResNet50 Batch Size: 4 Epochs: 50 + FLOPs: 32781541376 Parameters: 23864558 Pretrained: ImageNet Resolution: height 100 @@ -586,6 +613,7 @@ Models: Architecture: ResNet50 Batch Size: 16 Epochs: 50 + FLOPs: 32959365120 Parameters: 23864558 Pretrained: ImageNet Resolution: height 240 @@ -608,6 +636,7 @@ Models: Architecture: ResNet50 Batch Size: 4 Epochs: 50 + FLOPs: 65918373888 Parameters: 23864558 Pretrained: ImageNet Resolution: height 240 @@ -630,6 +659,7 @@ Models: Architecture: ResNet50 Batch Size: 16 Epochs: 100 + FLOPs: 32287070208 Parameters: 24202643 Pretrained: ImageNet Resolution: short-side 256 @@ -652,6 +682,7 @@ Models: Architecture: ResNet101 Batch Size: 16 Epochs: 50 + FLOPs: 51249301504 Parameters: 43141497 Pretrained: ImageNet Resolution: short-side 256 @@ -673,6 +704,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 134526976000 Parameters: 23917832 Pretrained: Kinetics400 Resolution: short-side 320 @@ -695,6 +727,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 50 + FLOPs: 134526976000 Parameters: 23917832 Pretrained: Kinetics400 Resolution: short-side 320 @@ -717,6 +750,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 150 + FLOPs: 109881459200 Parameters: 23939784 Pretrained: Kinetics400 Resolution: 340x256 @@ -739,6 +773,7 @@ Models: Architecture: ResNet50 Batch Size: 8 Epochs: 150 + FLOPs: 109881459200 Parameters: 23939784 Pretrained: Kinetics400 Resolution: 340x256 @@ -761,6 +796,7 @@ Models: Architecture: ResNet18 Batch Size: 32 Epochs: 100 + FLOPs: 59483309568 Parameters: 11555619 Pretrained: ImageNet Resolution: short-side 256 @@ -783,6 +819,7 @@ Models: Architecture: ResNet18 Batch Size: 32 Epochs: 100 + FLOPs: 59483058176 Parameters: 11303736 Pretrained: ImageNet Resolution: short-side 256 @@ -805,6 +842,7 @@ Models: Architecture: ResNet18 Batch Size: 32 Epochs: 100 + FLOPs: 59483790336 Parameters: 12037326 Pretrained: ImageNet Resolution: short-side 256 @@ -827,6 +865,7 @@ Models: Architecture: ResNet18 Batch Size: 32 Epochs: 100 + FLOPs: 59482966528 Parameters: 11211909 Pretrained: ImageNet Resolution: short-side 256 @@ -849,6 +888,7 @@ Models: Architecture: ResNet18 Batch Size: 32 Epochs: 100 + FLOPs: 59483790336 Parameters: 12037326 Pretrained: ImageNet Resolution: short-side 256 @@ -871,6 +911,7 @@ Models: Architecture: ResNet18 Batch Size: 32 Epochs: 100 + FLOPs: 59482991104 Parameters: 11236533 Pretrained: ImageNet Resolution: short-side 256 diff --git a/configs/recognition/x3d/metafile.yml b/configs/recognition/x3d/metafile.yml index 3f7e9393d1..8e264a3875 100644 --- a/configs/recognition/x3d/metafile.yml +++ b/configs/recognition/x3d/metafile.yml @@ -7,6 +7,7 @@ Models: Metadata: Architecture: X3D_S Batch Size: 1 + FLOPs: 2967543760 Parameters: 3794322 Resolution: short-side 320 Training Data: Kinetics-400 @@ -26,6 +27,7 @@ Models: Metadata: Architecture: X3D_M Batch Size: 1 + FLOPs: 6490866832 Parameters: 3794322 Resolution: short-side 320 Training Data: Kinetics-400 diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index 75febec50c..b8d8bfbb06 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -1,3 +1,5 @@ +import warnings + import torch.nn as nn import torch.utils.checkpoint as cp from mmcv.cnn import (ConvModule, NonLocal3d, build_activation_layer, @@ -666,6 +668,11 @@ def _inflate_bn_params(bn3d, state_dict_2d, module_name_2d, for param_name, param in bn3d.named_parameters(): param_2d_name = f'{module_name_2d}.{param_name}' param_2d = state_dict_2d[param_2d_name] + if param.data.shape != param_2d.shape: + warnings.warn(f'The parameter of {module_name_2d} is not' + 'loaded due to incompatible shapes. ') + return + param.data.copy_(param_2d) inflated_param_names.append(param_2d_name) diff --git a/mmaction/models/backbones/resnet3d_slowfast.py b/mmaction/models/backbones/resnet3d_slowfast.py index be1ea1a2b2..c41c2b6936 100644 --- a/mmaction/models/backbones/resnet3d_slowfast.py +++ b/mmaction/models/backbones/resnet3d_slowfast.py @@ -1,3 +1,5 @@ +import warnings + import torch import torch.nn as nn from mmcv.cnn import ConvModule, kaiming_init @@ -280,7 +282,12 @@ def _inflate_conv_params(self, conv3d, state_dict_2d, module_name_2d, old_shape = conv2d_weight.shape new_shape = conv3d.weight.data.shape kernel_t = new_shape[2] + if new_shape[1] != old_shape[1]: + if new_shape[1] < old_shape[1]: + warnings.warn(f'The parameter of {module_name_2d} is not' + 'loaded due to incompatible shapes. ') + return # Inplanes may be different due to lateral connections new_channels = new_shape[1] - old_shape[1] pad_shape = old_shape @@ -291,6 +298,7 @@ def _inflate_conv_params(self, conv3d, state_dict_2d, module_name_2d, torch.zeros(pad_shape).type_as(conv2d_weight).to( conv2d_weight.device)), dim=1) + new_weight = conv2d_weight.data.unsqueeze(2).expand_as( conv3d.weight) / kernel_t conv3d.weight.data.copy_(new_weight) From 8a86bb857d09b39b886489f2752875bfbcba90df Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 1 Jul 2021 16:07:31 +0800 Subject: [PATCH 180/414] [Fix] Update metafiles (#979) --- configs/detection/acrn/metafile.yml | 4 +- configs/recognition/c3d/metafile.yml | 4 +- configs/recognition/csn/metafile.yml | 32 +-- configs/recognition/i3d/metafile.yml | 40 ++-- configs/recognition/omnisource/metafile.yml | 64 +++--- configs/recognition/r2plus1d/metafile.yml | 16 +- configs/recognition/slowfast/metafile.yml | 42 ++-- configs/recognition/slowonly/metafile.yml | 114 +++++----- configs/recognition/tanet/metafile.yml | 32 +-- configs/recognition/tin/metafile.yml | 12 +- configs/recognition/tpn/metafile.yml | 12 +- configs/recognition/trn/metafile.yml | 16 +- configs/recognition/tsm/metafile.yml | 196 +++++++++--------- configs/recognition/tsn/metafile.yml | 152 +++++++------- configs/recognition/x3d/metafile.yml | 4 +- configs/recognition_audio/resnet/metafile.yml | 12 +- configs/skeleton/posec3d/metafile.yml | 12 +- 17 files changed, 382 insertions(+), 382 deletions(-) diff --git a/configs/detection/acrn/metafile.yml b/configs/detection/acrn/metafile.yml index fe41d6db10..f0d40187f8 100644 --- a/configs/detection/acrn/metafile.yml +++ b/configs/detection/acrn/metafile.yml @@ -18,7 +18,7 @@ Models: Results: - Dataset: AVA v2.1 Metrics: - - mAP: 27.1 + mAP: 27.1 Task: Spatial Temporal Action Detection Training Json Log: https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.json Training Log: https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.log @@ -39,7 +39,7 @@ Models: Results: - Dataset: AVA v2.2 Metrics: - - mAP: 27.8 + mAP: 27.8 Task: Spatial Temporal Action Detection Training Json Log: https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json Training Log: https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log diff --git a/configs/recognition/c3d/metafile.yml b/configs/recognition/c3d/metafile.yml index 3e7a6153ea..5d9986c4fd 100644 --- a/configs/recognition/c3d/metafile.yml +++ b/configs/recognition/c3d/metafile.yml @@ -19,8 +19,8 @@ Models: Results: - Dataset: UCF101 Metrics: - top1 acc: 83.27 - top5 acc: 95.9 + top1 accuracy: 83.27 + top5 accuracy: 95.9 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml index d3cd21b877..9d9df53974 100644 --- a/configs/recognition/csn/metafile.yml +++ b/configs/recognition/csn/metafile.yml @@ -19,8 +19,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 80.14 - top5 acc: 94.93 + top1 accuracy: 80.14 + top5 accuracy: 94.93 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log @@ -42,8 +42,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 82.76 - top5 acc: 95.68 + top1 accuracy: 82.76 + top5 accuracy: 95.68 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log @@ -63,8 +63,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 77.8 - top5 acc: 92.8 + top1 accuracy: 77.8 + top5 accuracy: 92.8 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-d565828d.pth - Config: configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py @@ -82,8 +82,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 82.5 - top5 acc: 95.3 + top1 accuracy: 82.5 + top5 accuracy: 95.3 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth inference_time(video/s): x @@ -102,8 +102,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 78.8 - top5 acc: 93.5 + top1 accuracy: 78.8 + top5 accuracy: 93.5 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth inference_time(video/s): x @@ -122,8 +122,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 76.5 - top5 acc: 92.1 + top1 accuracy: 76.5 + top5 accuracy: 92.1 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth inference_time(video/s): x @@ -142,8 +142,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 79.0 - top5 acc: 94.2 + top1 accuracy: 79.0 + top5 accuracy: 94.2 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth inference_time(video/s): x @@ -162,8 +162,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 78.2 - top5 acc: 93.0 + top1 accuracy: 78.2 + top5 accuracy: 93.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth inference_time(video/s): x diff --git a/configs/recognition/i3d/metafile.yml b/configs/recognition/i3d/metafile.yml index 711d57a475..07dbc8cacc 100644 --- a/configs/recognition/i3d/metafile.yml +++ b/configs/recognition/i3d/metafile.yml @@ -19,8 +19,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.68 - top5 acc: 90.78 + top1 accuracy: 72.68 + top5 accuracy: 90.78 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log @@ -42,8 +42,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.27 - top5 acc: 90.92 + top1 accuracy: 73.27 + top5 accuracy: 90.92 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log @@ -65,8 +65,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.85 - top5 acc: 90.75 + top1 accuracy: 72.85 + top5 accuracy: 90.75 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log @@ -88,8 +88,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.77 - top5 acc: 90.57 + top1 accuracy: 72.77 + top5 accuracy: 90.57 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log @@ -111,8 +111,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.48 - top5 acc: 91.0 + top1 accuracy: 73.48 + top5 accuracy: 91.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log @@ -134,8 +134,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.32 - top5 acc: 90.72 + top1 accuracy: 72.32 + top5 accuracy: 90.72 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log @@ -157,8 +157,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.24 - top5 acc: 90.99 + top1 accuracy: 73.24 + top5 accuracy: 90.99 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log @@ -180,8 +180,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 74.71 - top5 acc: 91.81 + top1 accuracy: 74.71 + top5 accuracy: 91.81 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log @@ -203,8 +203,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.37 - top5 acc: 91.26 + top1 accuracy: 73.37 + top5 accuracy: 91.26 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log @@ -226,8 +226,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.92 - top5 acc: 91.59 + top1 accuracy: 73.92 + top5 accuracy: 91.59 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log diff --git a/configs/recognition/omnisource/metafile.yml b/configs/recognition/omnisource/metafile.yml index 7f60b1aa47..5561e2aa69 100644 --- a/configs/recognition/omnisource/metafile.yml +++ b/configs/recognition/omnisource/metafile.yml @@ -20,8 +20,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 77.4 - top5 acc: 93.6 + top1 accuracy: 77.4 + top5 accuracy: 93.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log @@ -44,8 +44,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 78.0 - top5 acc: 93.6 + top1 accuracy: 78.0 + top5 accuracy: 93.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log @@ -68,8 +68,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 78.6 - top5 acc: 93.6 + top1 accuracy: 78.6 + top5 accuracy: 93.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log @@ -92,8 +92,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 80.6 - top5 acc: 95.0 + top1 accuracy: 80.6 + top5 accuracy: 95.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log @@ -116,8 +116,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 78.6 - top5 acc: 93.2 + top1 accuracy: 78.6 + top5 accuracy: 93.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log @@ -140,8 +140,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 81.3 - top5 acc: 94.8 + top1 accuracy: 81.3 + top5 accuracy: 94.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log @@ -164,8 +164,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 78.6 - top5 acc: 93.9 + top1 accuracy: 78.6 + top5 accuracy: 93.9 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log @@ -188,8 +188,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 80.8 - top5 acc: 95.0 + top1 accuracy: 80.8 + top5 accuracy: 95.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log @@ -212,8 +212,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 81.3 - top5 acc: 95.2 + top1 accuracy: 81.3 + top5 accuracy: 95.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log @@ -236,8 +236,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 82.4 - top5 acc: 95.6 + top1 accuracy: 82.4 + top5 accuracy: 95.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log @@ -260,8 +260,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 80.3 - top5 acc: 94.5 + top1 accuracy: 80.3 + top5 accuracy: 94.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log @@ -284,8 +284,8 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 acc: 82.9 - top5 acc: 95.8 + top1 accuracy: 82.9 + top5 accuracy: 95.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log @@ -306,8 +306,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.6 - top5 acc: 91.0 + top1 accuracy: 73.6 + top5 accuracy: 91.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py @@ -326,8 +326,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 75.7 - top5 acc: 91.9 + top1 accuracy: 75.7 + top5 accuracy: 91.9 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth - Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py @@ -346,8 +346,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 76.8 - top5 acc: 92.5 + top1 accuracy: 76.8 + top5 accuracy: 92.5 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth - Config: configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py @@ -366,7 +366,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 80.4 - top5 acc: 94.4 + top1 accuracy: 80.4 + top5 accuracy: 94.4 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth diff --git a/configs/recognition/r2plus1d/metafile.yml b/configs/recognition/r2plus1d/metafile.yml index 53522a50aa..5a61cb3a6c 100644 --- a/configs/recognition/r2plus1d/metafile.yml +++ b/configs/recognition/r2plus1d/metafile.yml @@ -19,8 +19,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 67.3 - top5 acc: 87.65 + top1 accuracy: 67.3 + top5 accuracy: 87.65 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log @@ -42,8 +42,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 67.3 - top5 acc: 87.8 + top1 accuracy: 67.3 + top5 accuracy: 87.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log.json @@ -65,8 +65,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 68.68 - top5 acc: 88.36 + top1 accuracy: 68.68 + top5 accuracy: 88.36 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log @@ -88,8 +88,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 74.6 - top5 acc: 91.59 + top1 accuracy: 74.6 + top5 accuracy: 91.59 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index ac9ff8526b..cae7965667 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -19,8 +19,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 74.75 - top5 acc: 91.73 + top1 accuracy: 74.75 + top5 accuracy: 91.73 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log @@ -42,8 +42,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 74.34 - top5 acc: 91.58 + top1 accuracy: 74.34 + top5 accuracy: 91.58 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log @@ -65,8 +65,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 75.64 - top5 acc: 92.3 + top1 accuracy: 75.64 + top5 accuracy: 92.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log @@ -88,8 +88,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 75.61 - top5 acc: 92.34 + top1 accuracy: 75.61 + top5 accuracy: 92.34 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log @@ -111,8 +111,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 76.94 - top5 acc: 92.8 + top1 accuracy: 76.94 + top5 accuracy: 92.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log @@ -134,8 +134,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 76.69 - top5 acc: 93.07 + top1 accuracy: 76.69 + top5 accuracy: 93.07 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log @@ -157,8 +157,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 77.9 - top5 acc: 93.51 + top1 accuracy: 77.9 + top5 accuracy: 93.51 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log @@ -180,8 +180,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 77.13 - top5 acc: 93.2 + top1 accuracy: 77.13 + top5 accuracy: 93.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log @@ -190,21 +190,21 @@ Models: In Collection: SlowFast Metadata: Architecture: ResNet50 + Batch Size: 4 Epochs: 22 FLOPs: 132442627584 Parameters: 34044630 - Batch Size: 4 - Training Data: SthV1 - Training Resources: 8 GPUs Pretrained: Kinetics400 Resolution: height 100 + Training Data: SthV1 + Training Resources: 8 GPUs Modality: RGB Name: slowfast_r50_16x8x1_22e_sthv1_rgb Results: - Dataset: SthV1 Metrics: - top1 acc: 49.24 - top5 acc: 78.79 + top1 accuracy: 49.24 + top5 accuracy: 78.79 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log diff --git a/configs/recognition/slowonly/metafile.yml b/configs/recognition/slowonly/metafile.yml index b67b9d9ed8..695d808051 100644 --- a/configs/recognition/slowonly/metafile.yml +++ b/configs/recognition/slowonly/metafile.yml @@ -18,8 +18,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 76.8 - top5 acc: 92.5 + top1 accuracy: 76.8 + top5 accuracy: 92.5 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth - Config: configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py @@ -38,8 +38,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 76.5 - top5 acc: 92.7 + top1 accuracy: 76.5 + top5 accuracy: 92.7 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth - Config: configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py @@ -58,8 +58,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 80.4 - top5 acc: 94.4 + top1 accuracy: 80.4 + top5 accuracy: 94.4 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth - Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py @@ -79,8 +79,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.76 - top5 acc: 90.51 + top1 accuracy: 72.76 + top5 accuracy: 90.51 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log @@ -102,8 +102,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.9 - top5 acc: 90.82 + top1 accuracy: 72.9 + top5 accuracy: 90.82 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log @@ -125,8 +125,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 74.42 - top5 acc: 91.49 + top1 accuracy: 74.42 + top5 accuracy: 91.49 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log @@ -148,8 +148,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.02 - top5 acc: 90.77 + top1 accuracy: 73.02 + top5 accuracy: 90.77 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log @@ -171,8 +171,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 74.93 - top5 acc: 91.92 + top1 accuracy: 74.93 + top5 accuracy: 91.92 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log @@ -194,8 +194,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.39 - top5 acc: 91.12 + top1 accuracy: 73.39 + top5 accuracy: 91.12 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log @@ -217,8 +217,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 75.55 - top5 acc: 92.04 + top1 accuracy: 75.55 + top5 accuracy: 92.04 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log @@ -240,8 +240,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 74.54 - top5 acc: 91.73 + top1 accuracy: 74.54 + top5 accuracy: 91.73 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log @@ -263,8 +263,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 76.07 - top5 acc: 92.42 + top1 accuracy: 76.07 + top5 accuracy: 92.42 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log @@ -286,8 +286,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 61.79 - top5 acc: 83.62 + top1 accuracy: 61.79 + top5 accuracy: 83.62 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log @@ -309,8 +309,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 65.76 - top5 acc: 86.25 + top1 accuracy: 65.76 + top5 accuracy: 86.25 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log @@ -332,8 +332,8 @@ Models: Results: - Dataset: Kinetics-600 Metrics: - top1 acc: 77.5 - top5 acc: 93.7 + top1 accuracy: 77.5 + top5 accuracy: 93.7 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.log @@ -355,8 +355,8 @@ Models: Results: - Dataset: Kinetics-700 Metrics: - top1 acc: 65.0 - top5 acc: 86.1 + top1 accuracy: 65.0 + top5 accuracy: 86.1 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.log @@ -378,8 +378,8 @@ Models: Results: - Dataset: GYM99 Metrics: - mean class acc: 70.2 - top1 acc: 79.3 + mean top1 accuracy: 70.2 + top1 accuracy: 79.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log @@ -401,8 +401,8 @@ Models: Results: - Dataset: GYM99 Metrics: - mean class acc: 71.0 - top1 acc: 80.3 + mean top1 accuracy: 71.0 + top1 accuracy: 80.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log @@ -424,7 +424,7 @@ Models: Results: - Dataset: Jester Metrics: - top1 acc: 97.2 + top1 accuracy: 97.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log @@ -433,20 +433,20 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 64 FLOPs: 54859765760 Parameters: 31738995 - Batch Size: 8 + Pretrained: ImageNet Training Data: HMDB51 Training Resources: 8 GPUs - Pretrained: ImageNet Modality: RGB Name: slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb Results: - Dataset: HMDB51 Metrics: - top1 acc: 37.52 - top5 acc: 71.50 + top1 accuracy: 37.52 + top5 accuracy: 71.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log @@ -455,20 +455,20 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 40 FLOPs: 54859765760 Parameters: 31738995 - Batch Size: 8 + Pretrained: Kinetics400 Training Data: HMDB51 Training Resources: 8 GPUs - Pretrained: Kinetics400 Modality: RGB Name: slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb Results: - Dataset: HMDB51 Metrics: - top1 acc: 65.95 - top5 acc: 91.05 + top1 accuracy: 65.95 + top5 accuracy: 91.05 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log @@ -477,20 +477,20 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 64 FLOPs: 54859868160 Parameters: 31841445 - Batch Size: 8 + Pretrained: ImageNet Training Data: UCF101 Training Resources: 8 GPUs - Pretrained: ImageNet Modality: RGB Name: slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb Results: - Dataset: UCF101 Metrics: - top1 acc: 71.35 - top5 acc: 89.35 + top1 accuracy: 71.35 + top5 accuracy: 89.35 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log @@ -499,20 +499,20 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 40 FLOPs: 54859868160 Parameters: 31841445 - Batch Size: 8 + Pretrained: Kinetics400 Training Data: UCF101 Training Resources: 8 GPUs - Pretrained: Kinetics400 Modality: RGB Name: slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb Results: - Dataset: UCF101 Metrics: - top1 acc: 92.78 - top5 acc: 99.42 + top1 accuracy: 92.78 + top5 accuracy: 99.42 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log @@ -521,20 +521,20 @@ Models: In Collection: SlowOnly Metadata: Architecture: ResNet50 + Batch Size: 8 Epochs: 64 FLOPs: 53907910656 Parameters: 31991022 - Batch Size: 8 + Pretrained: ImageNet Training Data: SthV1 Training Resources: 8 GPUs - Pretrained: ImageNet Modality: RGB Name: slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb Results: - Dataset: SthV1 Metrics: - top1 acc: 46.63 - top5 acc: 77.19 + top1 accuracy: 46.63 + top5 accuracy: 77.19 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log diff --git a/configs/recognition/tanet/metafile.yml b/configs/recognition/tanet/metafile.yml index 1271209772..7f62a3509e 100644 --- a/configs/recognition/tanet/metafile.yml +++ b/configs/recognition/tanet/metafile.yml @@ -19,8 +19,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 76.28 - top5 acc: 92.6 + top1 accuracy: 76.28 + top5 accuracy: 92.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log @@ -29,23 +29,23 @@ Models: In Collection: TANet Metadata: Architecture: TANet + Batch Size: 8 Epochs: 50 FLOPs: 32972787840 Parameters: 25127246 - Batch Size: 8 - Training Data: SthV1 - Training Resources: 8 GPUs Pretrained: ImageNet Resolution: height 100 + Training Data: SthV1 + Training Resources: 8 GPUs Modality: RGB Name: tanet_r50_1x1x8_50e_sthv1_rgb Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 49.69 - top1 acc (efficient): 47.45 - top5 acc (accurate): 77.62 - top5 acc (efficient): 76.00 + top1 accuracy: 49.69 + top1 accuracy (efficient): 47.45 + top5 accuracy: 77.62 + top5 accuracy (efficient): 76.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log @@ -54,23 +54,23 @@ Models: In Collection: TANet Metadata: Architecture: TANet + Batch Size: 8 Epochs: 50 FLOPs: 65946542336 Parameters: 25134670 - Batch Size: 8 - Training Data: SthV1 - gpus: 4 Pretrained: ImageNet Resolution: height 100 + Training Data: SthV1 + gpus: 4 Modality: RGB Name: tanet_r50_1x1x16_50e_sthv1_rgb Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 50.41 - top1 acc (efficient): 47.73 - top5 acc (accurate): 78.47 - top5 acc (efficient): 77.31 + top1 accuracy: 50.41 + top1 accuracy (efficient): 47.73 + top5 accuracy: 78.47 + top5 accuracy (efficient): 77.31 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log diff --git a/configs/recognition/tin/metafile.yml b/configs/recognition/tin/metafile.yml index 0c8bf63c62..ae026f9009 100644 --- a/configs/recognition/tin/metafile.yml +++ b/configs/recognition/tin/metafile.yml @@ -19,8 +19,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc: 44.25 - top5 acc: 73.94 + top1 accuracy: 44.25 + top5 accuracy: 73.94 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log @@ -42,8 +42,8 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 acc: 56.7 - top5 acc: 83.62 + top1 accuracy: 56.7 + top5 accuracy: 83.62 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log @@ -65,8 +65,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.89 - top5 acc: 89.89 + top1 accuracy: 70.89 + top5 accuracy: 89.89 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log diff --git a/configs/recognition/tpn/metafile.yml b/configs/recognition/tpn/metafile.yml index c028d1c8d3..8707ab0426 100644 --- a/configs/recognition/tpn/metafile.yml +++ b/configs/recognition/tpn/metafile.yml @@ -19,8 +19,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.1 - top5 acc: 91.03 + top1 accuracy: 73.1 + top5 accuracy: 91.03 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log @@ -42,8 +42,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 76.2 - top5 acc: 92.44 + top1 accuracy: 76.2 + top5 accuracy: 92.44 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log @@ -65,8 +65,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc: 50.8 - top5 acc: 79.05 + top1 accuracy: 50.8 + top5 accuracy: 79.05 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log diff --git a/configs/recognition/trn/metafile.yml b/configs/recognition/trn/metafile.yml index 10890f14de..45f69f2099 100644 --- a/configs/recognition/trn/metafile.yml +++ b/configs/recognition/trn/metafile.yml @@ -18,10 +18,10 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 33.88 - top1 acc (efficient): 31.62 - top5 acc (accurate): 62.12 - top5 acc (efficient): 60.01 + top1 accuracy: 33.88 + top1 accuracy (efficient): 31.62 + top5 accuracy: 62.12 + top5 accuracy (efficient): 60.01 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log @@ -42,10 +42,10 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 acc (accurate): 47.96 - top1 acc (efficient): 45.14 - top5 acc (accurate): 75.97 - top5 acc (efficient): 73.21 + top1 accuracy: 47.96 + top1 accuracy (efficient): 45.14 + top5 accuracy: 75.97 + top5 accuracy (efficient): 73.21 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index 98ba18f9a2..94df124ed8 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -19,8 +19,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.24 - top5 acc: 89.56 + top1 accuracy: 70.24 + top5 accuracy: 89.56 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log @@ -42,8 +42,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.59 - top5 acc: 89.52 + top1 accuracy: 70.59 + top5 accuracy: 89.52 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log @@ -65,8 +65,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.48 - top5 acc: 89.4 + top1 accuracy: 70.48 + top5 accuracy: 89.4 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log @@ -88,8 +88,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.25 - top5 acc: 89.66 + top1 accuracy: 70.25 + top5 accuracy: 89.66 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log @@ -111,8 +111,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.9 - top5 acc: 90.44 + top1 accuracy: 72.9 + top5 accuracy: 90.44 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log @@ -134,8 +134,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.38 - top5 acc: 91.02 + top1 accuracy: 73.38 + top5 accuracy: 91.02 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log @@ -157,8 +157,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.09 - top5 acc: 90.37 + top1 accuracy: 72.09 + top5 accuracy: 90.37 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log @@ -180,8 +180,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 71.89 - top5 acc: 90.73 + top1 accuracy: 71.89 + top5 accuracy: 90.73 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log @@ -203,8 +203,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.03 - top5 acc: 90.25 + top1 accuracy: 72.03 + top5 accuracy: 90.25 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log @@ -226,8 +226,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.7 - top5 acc: 89.9 + top1 accuracy: 70.7 + top5 accuracy: 89.9 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log @@ -249,8 +249,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 71.6 - top5 acc: 90.34 + top1 accuracy: 71.6 + top5 accuracy: 90.34 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log @@ -272,8 +272,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 68.46 - top5 acc: 88.64 + top1 accuracy: 68.46 + top5 accuracy: 88.64 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log @@ -294,8 +294,8 @@ Models: Results: - Dataset: Diving48 Metrics: - top1 acc: 75.99 - top5 acc: 97.16 + top1 accuracy: 75.99 + top5 accuracy: 97.16 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log @@ -316,8 +316,8 @@ Models: Results: - Dataset: Diving48 Metrics: - top1 acc: 81.62 - top5 acc: 97.66 + top1 accuracy: 81.62 + top5 accuracy: 97.66 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log @@ -339,10 +339,10 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 47.7 - top1 acc (efficient): 45.58 - top5 acc (accurate): 76.12 - top5 acc (efficient): 75.02 + top1 accuracy: 47.7 + top1 accuracy (efficient): 45.58 + top5 accuracy: 76.12 + top5 accuracy (efficient): 75.02 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log @@ -366,10 +366,10 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 48.51 - top1 acc (efficient): 47.1 - top5 acc (accurate): 77.56 - top5 acc (efficient): 76.02 + top1 accuracy: 48.51 + top1 accuracy (efficient): 47.1 + top5 accuracy: 77.56 + top5 accuracy (efficient): 76.02 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log @@ -393,10 +393,10 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 48.9 - top1 acc (efficient): 47.16 - top5 acc (accurate): 77.92 - top5 acc (efficient): 76.07 + top1 accuracy: 48.9 + top1 accuracy (efficient): 47.16 + top5 accuracy: 77.92 + top5 accuracy (efficient): 76.07 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.log @@ -420,10 +420,10 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 50.31 - top1 acc (efficient): 47.85 - top5 acc (accurate): 78.18 - top5 acc (efficient): 76.78 + top1 accuracy: 50.31 + top1 accuracy (efficient): 47.85 + top5 accuracy: 78.18 + top5 accuracy (efficient): 76.78 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log @@ -447,10 +447,10 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 49.28 - top1 acc (efficient): 47.62 - top5 acc (accurate): 77.82 - top5 acc (efficient): 76.63 + top1 accuracy: 49.28 + top1 accuracy (efficient): 47.62 + top5 accuracy: 77.82 + top5 accuracy (efficient): 76.63 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log @@ -474,10 +474,10 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 48.43 - top1 acc (efficient): 45.72 - top5 acc (accurate): 76.72 - top5 acc (efficient): 74.67 + top1 accuracy: 48.43 + top1 accuracy (efficient): 45.72 + top5 accuracy: 76.72 + top5 accuracy (efficient): 74.67 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log @@ -501,10 +501,10 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 acc (accurate): 61.12 - top1 acc (efficient): 57.86 - top5 acc (accurate): 86.26 - top5 acc (efficient): 84.67 + top1 accuracy: 61.12 + top1 accuracy (efficient): 57.86 + top5 accuracy: 86.26 + top5 accuracy (efficient): 84.67 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log @@ -528,10 +528,10 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 acc (accurate): 63.84 - top1 acc (efficient): 60.79 - top5 acc (accurate): 88.3 - top5 acc (efficient): 86.6 + top1 accuracy: 63.84 + top1 accuracy (efficient): 60.79 + top5 accuracy: 88.3 + top5 accuracy (efficient): 86.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log @@ -555,10 +555,10 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 acc (accurate): 62.04 - top1 acc (efficient): 59.93 - top5 acc (accurate): 87.35 - top5 acc (efficient): 86.1 + top1 accuracy: 62.04 + top1 accuracy (efficient): 59.93 + top5 accuracy: 87.35 + top5 accuracy (efficient): 86.1 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log @@ -582,10 +582,10 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 acc (accurate): 63.19 - top1 acc (efficient): 61.06 - top5 acc (accurate): 87.93 - top5 acc (efficient): 86.66 + top1 accuracy: 63.19 + top1 accuracy (efficient): 61.06 + top5 accuracy: 87.93 + top5 accuracy (efficient): 86.66 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log @@ -609,10 +609,10 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 acc (accurate): 61.51 - top1 acc (efficient): 58.59 - top5 acc (accurate): 86.9 - top5 acc (efficient): 85.07 + top1 accuracy: 61.51 + top1 accuracy (efficient): 58.59 + top5 accuracy: 86.9 + top5 accuracy (efficient): 85.07 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log @@ -636,10 +636,10 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 48.49 - top1 acc (efficient): 46.35 - top5 acc (accurate): 76.88 - top5 acc (efficient): 75.07 + top1 accuracy: 48.49 + top1 accuracy (efficient): 46.35 + top5 accuracy: 76.88 + top5 accuracy (efficient): 75.07 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log @@ -663,10 +663,10 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc (accurate): 47.46 - top1 acc (efficient): 45.92 - top5 acc (accurate): 76.71 - top5 acc (efficient): 75.23 + top1 accuracy: 47.46 + top1 accuracy (efficient): 45.92 + top5 accuracy: 76.71 + top5 accuracy (efficient): 75.23 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log @@ -690,8 +690,8 @@ Models: Results: - Dataset: Jester Metrics: - top1 acc (accurate): 97.2 - top1 acc (efficient): 96.5 + top1 accuracy: 97.2 + top1 accuracy (efficient): 96.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log @@ -700,20 +700,20 @@ Models: In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 25 FLOPs: 32959844352 Parameters: 23612531 - Batch Size: 12 + Pretrained: Kinetics400 Training Data: HMDB51 Training Resources: 8 GPUs - Pretrained: Kinetics400 Modality: RGB Name: tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb Results: - Dataset: HMDB51 Metrics: - top1 acc: 72.68 - top5 acc: 92.03 + top1 accuracy: 72.68 + top5 accuracy: 92.03 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log @@ -723,20 +723,20 @@ Models: In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 25 FLOPs: 65919688704 Parameters: 23612531 - Batch Size: 6 + Pretrained: Kinetics400 Training Data: HMDB51 Training Resources: 8 GPUs - Pretrained: Kinetics400 Modality: RGB Name: tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb Results: - Dataset: HMDB51 Metrics: - top1 acc: 74.77 - top5 acc: 93.86 + top1 accuracy: 74.77 + top5 accuracy: 93.86 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log @@ -746,20 +746,20 @@ Models: In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 12 Epochs: 25 FLOPs: 32960663552 Parameters: 23714981 - Batch Size: 12 + Pretrained: Kinetics400 Training Data: UCF101 Training Resources: 8 GPUs - Pretrained: Kinetics400 Modality: RGB Name: tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb Results: - Dataset: UCF101 Metrics: - top1 acc: 94.50 - top5 acc: 99.58 + top1 accuracy: 94.5 + top5 accuracy: 99.58 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log @@ -769,20 +769,20 @@ Models: In Collection: TSM Metadata: Architecture: ResNet50 + Batch Size: 6 Epochs: 25 FLOPs: 65921327104 Parameters: 23714981 - Batch Size: 6 + Pretrained: Kinetics400 Training Data: UCF101 Training Resources: 8 GPUs - Pretrained: Kinetics400 Modality: RGB Name: tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb Results: - Dataset: UCF101 Metrics: - top1 acc: 94.58 - top5 acc: 99.37 + top1 accuracy: 94.58 + top5 accuracy: 99.37 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log diff --git a/configs/recognition/tsn/metafile.yml b/configs/recognition/tsn/metafile.yml index fa1d690c41..05a773364f 100644 --- a/configs/recognition/tsn/metafile.yml +++ b/configs/recognition/tsn/metafile.yml @@ -18,8 +18,8 @@ Models: Results: - Dataset: UCF101 Metrics: - top1 acc: 83.03 - top5 acc: 96.78 + top1 accuracy: 83.03 + top5 accuracy: 96.78 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.log @@ -40,8 +40,8 @@ Models: Results: - Dataset: Diving48 Metrics: - top1 acc: 71.27 - top5 acc: 95.74 + top1 accuracy: 71.27 + top5 accuracy: 95.74 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log @@ -62,8 +62,8 @@ Models: Results: - Dataset: Diving48 Metrics: - top1 acc: 76.75 - top5 acc: 96.95 + top1 accuracy: 76.75 + top5 accuracy: 96.95 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log @@ -84,8 +84,8 @@ Models: Results: - Dataset: HMDB51 Metrics: - top1 acc: 48.95 - top5 acc: 80.19 + top1 accuracy: 48.95 + top5 accuracy: 80.19 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log @@ -106,8 +106,8 @@ Models: Results: - Dataset: HMDB51 Metrics: - top1 acc: 56.08 - top5 acc: 84.31 + top1 accuracy: 56.08 + top5 accuracy: 84.31 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log @@ -127,8 +127,8 @@ Models: Results: - Dataset: HMDB51 Metrics: - top1 acc: 54.25 - top5 acc: 83.86 + top1 accuracy: 54.25 + top5 accuracy: 83.86 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log @@ -150,8 +150,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.6 - top5 acc: 89.26 + top1 accuracy: 70.6 + top5 accuracy: 89.26 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log @@ -173,8 +173,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.42 - top5 acc: 89.03 + top1 accuracy: 70.42 + top5 accuracy: 89.03 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log @@ -196,8 +196,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.18 - top5 acc: 89.1 + top1 accuracy: 70.18 + top5 accuracy: 89.1 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log @@ -219,8 +219,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.91 - top5 acc: 89.51 + top1 accuracy: 70.91 + top5 accuracy: 89.51 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log @@ -242,8 +242,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 55.7 - top5 acc: 79.85 + top1 accuracy: 55.7 + top5 accuracy: 79.85 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log @@ -265,8 +265,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 71.8 - top5 acc: 90.17 + top1 accuracy: 71.8 + top5 accuracy: 90.17 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log @@ -288,8 +288,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.41 - top5 acc: 90.55 + top1 accuracy: 72.41 + top5 accuracy: 90.55 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log @@ -311,8 +311,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 57.76 - top5 acc: 80.99 + top1 accuracy: 57.76 + top5 accuracy: 80.99 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log @@ -334,8 +334,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 71.11 - top5 acc: 90.04 + top1 accuracy: 71.11 + top5 accuracy: 90.04 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log @@ -357,8 +357,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.77 - top5 acc: 89.3 + top1 accuracy: 70.77 + top5 accuracy: 89.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log @@ -380,8 +380,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 71.79 - top5 acc: 90.25 + top1 accuracy: 71.79 + top5 accuracy: 90.25 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log @@ -403,8 +403,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 70.4 - top5 acc: 89.12 + top1 accuracy: 70.4 + top5 accuracy: 89.12 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log @@ -426,8 +426,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.43 - top5 acc: 91.01 + top1 accuracy: 73.43 + top5 accuracy: 91.01 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log @@ -449,8 +449,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 72.78 - top5 acc: 90.75 + top1 accuracy: 72.78 + top5 accuracy: 90.75 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log @@ -471,8 +471,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.6 - top5 acc: 91.0 + top1 accuracy: 73.6 + top5 accuracy: 91.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py @@ -491,8 +491,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 73.1 - top5 acc: 90.4 + top1 accuracy: 73.1 + top5 accuracy: 90.4 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py @@ -511,8 +511,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 75.7 - top5 acc: 91.9 + top1 accuracy: 75.7 + top5 accuracy: 91.9 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth - Config: configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py @@ -532,8 +532,8 @@ Models: Results: - Dataset: Kinetics-600 Metrics: - top1 acc: 74.8 - top5 acc: 92.3 + top1 accuracy: 74.8 + top5 accuracy: 92.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.log @@ -555,8 +555,8 @@ Models: Results: - Dataset: Kinetics-700 Metrics: - top1 acc: 61.7 - top5 acc: 83.6 + top1 accuracy: 61.7 + top5 accuracy: 83.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.log @@ -578,8 +578,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc: 18.55 - top5 acc: 44.8 + top1 accuracy: 18.55 + top5 accuracy: 44.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log @@ -601,8 +601,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 acc: 15.77 - top5 acc: 39.85 + top1 accuracy: 15.77 + top5 accuracy: 39.85 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log @@ -624,8 +624,8 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 acc: 32.97 - top5 acc: 63.62 + top1 accuracy: 32.97 + top5 accuracy: 63.62 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log @@ -647,8 +647,8 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 acc: 27.21 - top5 acc: 55.84 + top1 accuracy: 27.21 + top5 accuracy: 55.84 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log @@ -670,8 +670,8 @@ Models: Results: - Dataset: MiT Metrics: - top1 acc: 26.84 - top5 acc: 51.6 + top1 accuracy: 26.84 + top5 accuracy: 51.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_f6_mit_26.8_51.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_mit.log @@ -715,8 +715,8 @@ Models: Results: - Dataset: ActivityNet v1.3 Metrics: - top1 acc: 73.93 - top5 acc: 93.44 + top1 accuracy: 73.93 + top5 accuracy: 93.44 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log @@ -738,8 +738,8 @@ Models: Results: - Dataset: ActivityNet v1.3 Metrics: - top1 acc: 76.9 - top5 acc: 94.47 + top1 accuracy: 76.9 + top5 accuracy: 94.47 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log @@ -761,8 +761,8 @@ Models: Results: - Dataset: ActivityNet v1.3 Metrics: - top1 acc: 57.51 - top5 acc: 83.02 + top1 accuracy: 57.51 + top5 accuracy: 83.02 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.log @@ -784,8 +784,8 @@ Models: Results: - Dataset: ActivityNet v1.3 Metrics: - top1 acc: 59.51 - top5 acc: 82.69 + top1 accuracy: 59.51 + top5 accuracy: 82.69 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.log @@ -807,7 +807,7 @@ Models: Results: - Dataset: HVU Metrics: - action mAP: 57.5 + mAP: 57.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.log @@ -830,7 +830,7 @@ Models: Results: - Dataset: HVU Metrics: - scene mAP: 55.2 + mAP: 55.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.log @@ -853,7 +853,7 @@ Models: Results: - Dataset: HVU Metrics: - object mAP: 45.7 + mAP: 45.7 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.log @@ -876,7 +876,7 @@ Models: Results: - Dataset: HVU Metrics: - event mAP: 63.7 + mAP: 63.7 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.log @@ -899,7 +899,7 @@ Models: Results: - Dataset: HVU Metrics: - concept mAP: 47.5 + mAP: 47.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.log @@ -922,7 +922,7 @@ Models: Results: - Dataset: HVU Metrics: - attribute mAP: 46.1 + mAP: 46.1 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.log @@ -943,8 +943,8 @@ Models: Results: - Dataset: Kinetics400 Metrics: - - top1 acc: 77.51 - - top5 acc: 92.92 + top1 accuracy: 77.51 + top5 accuracy: 92.92 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log diff --git a/configs/recognition/x3d/metafile.yml b/configs/recognition/x3d/metafile.yml index 8e264a3875..0e63bae550 100644 --- a/configs/recognition/x3d/metafile.yml +++ b/configs/recognition/x3d/metafile.yml @@ -17,7 +17,7 @@ Models: - Dataset: Kinetics-400 Metrics: top1 10-view: 72.7 - top1 30-view: 73.2 + top1 accuracy: 73.2 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_s_facebook_13x6x1_kinetics400_rgb_20201027-623825a0.pth reference top1 10-view: 73.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] @@ -37,7 +37,7 @@ Models: - Dataset: Kinetics-400 Metrics: top1 10-view: 75.0 - top1 30-view: 75.6 + top1 accuracy: 75.6 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth reference top1 10-view: 75.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] diff --git a/configs/recognition_audio/resnet/metafile.yml b/configs/recognition_audio/resnet/metafile.yml index a13b5fc594..1d7625a9f6 100644 --- a/configs/recognition_audio/resnet/metafile.yml +++ b/configs/recognition_audio/resnet/metafile.yml @@ -15,12 +15,12 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 acc: 19.7 - top1 acc [w. RGB]: 71.5 - top1 acc delta [w. RGB]: 0.39 - top5 acc: 35.75 - top5 acc [w. RGB]: 90.18 - top5 acc delta [w. RGB]: 0.14 + top1 accuracy: 19.7 + top1 accuracy [w. RGB]: 71.5 + top1 accuracy delta [w. RGB]: 0.39 + top5 accuracy: 35.75 + top5 accuracy [w. RGB]: 90.18 + top5 accuracy delta [w. RGB]: 0.14 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log diff --git a/configs/skeleton/posec3d/metafile.yml b/configs/skeleton/posec3d/metafile.yml index 24c6e30c70..d2c784d00b 100644 --- a/configs/skeleton/posec3d/metafile.yml +++ b/configs/skeleton/posec3d/metafile.yml @@ -16,7 +16,7 @@ Models: Results: - Dataset: FineGYM Metrics: - - Mean Top-1: 93.7 + mean top1 accuracy: 93.7 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log @@ -35,7 +35,7 @@ Models: Results: - Dataset: FineGYM Metrics: - - Mean Top-1: 94.0 + mean top1 accuracy: 94.0 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log @@ -54,7 +54,7 @@ Models: Results: - Dataset: NTU60-XSub Metrics: - - Top-1: 93.7 + top1 accuracy: 93.7 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.log @@ -73,7 +73,7 @@ Models: Results: - Dataset: NTU60-XSub Metrics: - - Top-1: 93.4 + top1 accuracy: 93.4 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log @@ -92,7 +92,7 @@ Models: Results: - Dataset: NTU120-XSub Metrics: - - Top-1: 86.3 + top1 accuracy: 86.3 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.log @@ -111,7 +111,7 @@ Models: Results: - Dataset: NTU120-XSub Metrics: - - Top-1: 85.7 + top1 accuracy: 85.7 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log From 5ac0587f4e42928eb9a2befcb545ef99a220c3a0 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 1 Jul 2021 17:10:15 +0800 Subject: [PATCH 181/414] Update TSM modelzoo (#981) * update TSM * update meta file * remove deprecated ckpts --- configs/_base_/schedules/sgd_tsm_100e.py | 2 +- configs/recognition/tsm/README.md | 7 +- configs/recognition/tsm/README_zh-CN.md | 7 +- configs/recognition/tsm/metafile.yml | 101 +++++++++++++++--- .../tsm_r50_1x1x16_100e_kinetics400_rgb.py | 7 ++ .../tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py | 6 ++ ...tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py | 7 ++ 7 files changed, 116 insertions(+), 21 deletions(-) create mode 100644 configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py create mode 100644 configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py create mode 100644 configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py diff --git a/configs/_base_/schedules/sgd_tsm_100e.py b/configs/_base_/schedules/sgd_tsm_100e.py index be54a66c66..dbdc473987 100644 --- a/configs/_base_/schedules/sgd_tsm_100e.py +++ b/configs/_base_/schedules/sgd_tsm_100e.py @@ -3,7 +3,7 @@ type='SGD', constructor='TSMOptimizerConstructor', paramwise_cfg=dict(fc_lr5=True), - lr=0.02, # this lr is used for 8 gpus + lr=0.01, # this lr is used for 8 gpus momentum=0.9, weight_decay=0.0001) optimizer_config = dict(grad_clip=dict(max_norm=20, norm_type=2)) diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 78036d3c04..6a53454e3c 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -32,12 +32,15 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |70.24|89.56|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json)| |[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.59|89.52|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json)| +|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |short-side 320|8| ResNet50| ImageNet |70.73|89.81|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20210701-68d582b4.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log.json)| +|[tsm_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py) |short-side 320|8| ResNet50| ImageNet |71.90|90.03|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/tsm_r50_1x1x8_100e_kinetics400_rgb_20210701-7ff22268.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log.json)| |[tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.48|89.40|x|x|x|7076|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json)| |[tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.25|89.66|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json)| -|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8x4| ResNet50 | ImageNet|72.9|90.44|[72.22](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|[90.37](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|11.5 (8x10 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20200626-91a54551.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log.json)| -|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50 | ImageNet|73.38|91.02|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb_20200727-e1e0c785.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json)| +|[tsm_r50_dense_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py) |short-side 320|8| ResNet50 | ImageNet|73.46|90.84|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/tsm_r50_dense_1x1x8_50e_kinetics400_rgb_20210701-a54ff3d3.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log.json)| +|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 320|8| ResNet50 | ImageNet|74.55|91.74|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20210701-e3e5e97f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log.json)| |[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |72.09|90.37|[70.67](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|[89.98](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|47.0 (16x1 frames)| 10404 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json)| |[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50| ImageNet |71.89|90.73|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json)| +|[tsm_r50_1x1x16_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py) |short-side 320|8| ResNet50| ImageNet |72.80|90.75|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/tsm_r50_1x1x16_100e_kinetics400_rgb_20210701-41ac92b9.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log.json)| |[tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4| ResNet50| ImageNet |72.03|90.25|71.81|90.36|x|8931|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json)| |[tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4| ResNet50| ImageNet |70.70|89.90|x|x|x|10125|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json)| |[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index 51de257f79..92d59486de 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -32,12 +32,15 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |70.24|89.56|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json)| |[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet |70.59|89.52|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json)| +|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |短边 320|8| ResNet50| ImageNet |70.73|89.81|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20210701-68d582b4.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log.json)| +|[tsm_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py) |短边 320|8| ResNet50| ImageNet |71.90|90.03|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/tsm_r50_1x1x8_100e_kinetics400_rgb_20210701-7ff22268.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log.json)| |[tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet |70.48|89.40|x|x|x|7076|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json)| |[tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet |70.25|89.66|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7077 | [ckpt]( https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json)| -|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8x4| ResNet50 | ImageNet|72.9|90.44|[72.22](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|[90.37](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#dense-sample)|11.5 (8x10 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20200626-91a54551.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log.json)| -|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |短边 256|8| ResNet50 | ImageNet|73.38|91.02|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb_20200727-e1e0c785.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json)| +|[tsm_r50_dense_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py) |短边 320|8| ResNet50 | ImageNet|73.46|90.84|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/tsm_r50_dense_1x1x8_50e_kinetics400_rgb_20210701-a54ff3d3.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log.json)| +|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |短边 320|8| ResNet50 | ImageNet|74.55|91.74|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20210701-e3e5e97f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log.json)| |[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |72.09|90.37|[70.67](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|[89.98](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|47.0 (16x1 frames)| 10404 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json)| |[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |短边 256|8x4| ResNet50| ImageNet |71.89|90.73|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json)| +|[tsm_r50_1x1x16_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py) |短边 320|8| ResNet50| ImageNet |72.80|90.75|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/tsm_r50_1x1x16_100e_kinetics400_rgb_20210701-41ac92b9.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log.json)| |[tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4| ResNet50| ImageNet |72.03|90.25|71.81|90.36|x|8931|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json)| |[tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4| ResNet50| ImageNet |70.70|89.90|x|x|x|10125|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json)| |[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index 94df124ed8..5a18a889b1 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -48,6 +48,52 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth +- Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Batch Size: 8 + Epochs: 50 + FLOPs: 32965562368 + Parameters: 24327632 + Pretrained: ImageNet + Resolution: short-side 320 + Training Data: Kinetics-400 + Training Resources: 8 GPUs + Modality: RGB + Name: tsm_r50_1x1x8_50e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 accuracy: 70.73 + top5 accuracy: 89.81 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20210701-68d582b4.pth +- Config: configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Batch Size: 8 + Epochs: 100 + FLOPs: 32965562368 + Parameters: 24327632 + Pretrained: ImageNet + Resolution: short-side 320 + Training Data: Kinetics-400 + Training Resources: 8 GPUs + Modality: RGB + Name: tsm_r50_1x1x8_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 accuracy: 71.90 + top5 accuracy: 90.03 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/tsm_r50_1x1x8_100e_kinetics400_rgb_20210701-7ff22268.pth - Config: configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py In Collection: TSM Metadata: @@ -94,29 +140,29 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth -- Config: configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py +- Config: configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 Batch Size: 8 - Epochs: 100 + Epochs: 50 FLOPs: 32965562368 Parameters: 24327632 Pretrained: ImageNet - Resolution: 340x256 + Resolution: short-side 320 Training Data: Kinetics-400 - Training Resources: 32 GPUs + Training Resources: 8 GPUs Modality: RGB - Name: tsm_r50_dense_1x1x8_100e_kinetics400_rgb + Name: tsm_r50_dense_1x1x8_50e_kinetics400_rgb Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.9 - top5 accuracy: 90.44 + top1 accuracy: 73.46 + top5 accuracy: 90.84 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20200626_213415.log - Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20200626-91a54551.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/tsm_r50_dense_1x1x8_50e_kinetics400_rgb_20210701-a54ff3d3.pth - Config: configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py In Collection: TSM Metadata: @@ -126,7 +172,7 @@ Models: FLOPs: 32965562368 Parameters: 24327632 Pretrained: ImageNet - Resolution: short-side 256 + Resolution: short-side 320 Training Data: Kinetics-400 Training Resources: 8 GPUs Modality: RGB @@ -134,12 +180,12 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.38 - top5 accuracy: 91.02 + top1 accuracy: 74.55 + top5 accuracy: 91.74 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/20200725_032043.log - Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_256p_1x1x8_100e_kinetics400_rgb_20200727-e1e0c785.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20210701-e3e5e97f.pth - Config: configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py In Collection: TSM Metadata: @@ -186,6 +232,29 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth +- Config: configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: ResNet50 + Batch Size: 6 + Epochs: 100 + FLOPs: 65931124736 + Parameters: 24327632 + Pretrained: ImageNet + Resolution: short-side 320 + Training Data: Kinetics-400 + Training Resources: 8 GPUs + Modality: RGB + Name: tsm_r50_1x1x16_100e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + top1 accuracy: 72.80 + top5 accuracy: 90.75 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20210621_115844.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20210621_115844.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_1x1x16_50e_kinetics400_rgb_20210701-7c0c5d54.pth - Config: configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py In Collection: TSM Metadata: diff --git a/configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py new file mode 100644 index 0000000000..b09b65b4c1 --- /dev/null +++ b/configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py @@ -0,0 +1,7 @@ +_base_ = ['tsm_r50_1x1x16_50e_kinetics400_rgb.py'] + +optimizer_config = dict(grad_clip=dict(max_norm=20, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[40, 80]) +total_epochs = 100 +work_dir = './work_dirs/tsm_r50_1x1x16_100e_kinetics400_rgb/' diff --git a/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py new file mode 100644 index 0000000000..bff76cf13d --- /dev/null +++ b/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py @@ -0,0 +1,6 @@ +_base_ = ['./tsm_r50_1x1x8_50e_kinetics400_rgb'] + +optimizer_config = dict(grad_clip=dict(max_norm=20, norm_type=2)) +lr_config = dict(policy='step', step=[40, 80]) +total_epochs = 100 +work_dir = './work_dirs/tsm_r50_1x1x8_100e_kinetics400_rgb/' diff --git a/configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py new file mode 100644 index 0000000000..66ffd96ba0 --- /dev/null +++ b/configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py @@ -0,0 +1,7 @@ +_base_ = ['tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py'] + +optimizer_config = dict(grad_clip=dict(max_norm=20, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[20, 40]) +total_epochs = 50 +work_dir = './work_dirs/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/' From 1c2d9c6812bfb1026a7dd68981f4a4f3dd7ec5a7 Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Thu, 1 Jul 2021 17:19:18 +0800 Subject: [PATCH 182/414] [Feature] Support TimeSformer (#839) * add file * add attention * timesformer code * timesformer head code * refactor timesformer according to new version of mmcv transformer * add timesformer config * add trunc normalize * add stochastic depth decay rule * add spaceonly & joint space time configs * refine timesformer backbone code * reduce batch_size off jointST * add comments for timesformerhead * add README * use trunc_norm in mmcv * add comments * add unittest * update ckpt url * fix load state_dict * fix multiheadattention * fix mmcv version * fix batch_first * fix mmcv minimum version * fix mmcv installation in CI * revert changes * check_version Co-authored-by: Kenny Co-authored-by: shvdiwnkozbw <36528789+shvdiwnkozbw@users.noreply.github.com> Co-authored-by: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> --- configs/recognition/tanet/README.md | 2 +- configs/recognition/tanet/README_zh-CN.md | 2 +- configs/recognition/timesformer/README.md | 72 +++++ .../recognition/timesformer/README_zh-CN.md | 72 +++++ ...former_divST_8x32x1_15e_kinetics400_rgb.py | 119 ++++++++ ...rmer_jointST_8x32x1_15e_kinetics400_rgb.py | 118 ++++++++ ...er_spaceOnly_8x32x1_15e_kinetics400_rgb.py | 117 ++++++++ mmaction/__init__.py | 2 +- mmaction/models/__init__.py | 15 +- mmaction/models/backbones/__init__.py | 4 +- mmaction/models/backbones/timesformer.py | 284 ++++++++++++++++++ mmaction/models/common/__init__.py | 8 +- mmaction/models/common/transformer.py | 216 +++++++++++++ mmaction/models/heads/__init__.py | 3 +- mmaction/models/heads/timesformer_head.py | 40 +++ requirements/optional.txt | 1 + setup.cfg | 2 +- tests/test_models/test_backbones.py | 65 +++- tests/test_models/test_common.py | 40 ++- tests/test_models/test_head.py | 22 +- .../test_recognizers/test_recognizer3d.py | 29 ++ 21 files changed, 1217 insertions(+), 16 deletions(-) create mode 100644 configs/recognition/timesformer/README.md create mode 100644 configs/recognition/timesformer/README_zh-CN.md create mode 100644 configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py create mode 100644 configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py create mode 100644 configs/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.py create mode 100644 mmaction/models/backbones/timesformer.py create mode 100644 mmaction/models/common/transformer.py create mode 100644 mmaction/models/heads/timesformer_head.py diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 90a9d2eab4..52a2d4e38c 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -4,7 +4,7 @@ -```latex +```BibTeX @article{liu2020tam, title={TAM: Temporal Adaptive Module for Video Recognition}, author={Liu, Zhaoyang and Wang, Limin and Wu, Wayne and Qian, Chen and Lu, Tong}, diff --git a/configs/recognition/tanet/README_zh-CN.md b/configs/recognition/tanet/README_zh-CN.md index eebff061f4..953fae69dc 100644 --- a/configs/recognition/tanet/README_zh-CN.md +++ b/configs/recognition/tanet/README_zh-CN.md @@ -4,7 +4,7 @@ -```latex +```BibTeX @article{liu2020tam, title={TAM: Temporal Adaptive Module for Video Recognition}, author={Liu, Zhaoyang and Wang, Limin and Wu, Wayne and Qian, Chen and Lu, Tong}, diff --git a/configs/recognition/timesformer/README.md b/configs/recognition/timesformer/README.md new file mode 100644 index 0000000000..710d9a4f38 --- /dev/null +++ b/configs/recognition/timesformer/README.md @@ -0,0 +1,72 @@ +# TimeSformer + +## Introduction + + + +```BibTeX +@misc{bertasius2021spacetime, + title = {Is Space-Time Attention All You Need for Video Understanding?}, + author = {Gedas Bertasius and Heng Wang and Lorenzo Torresani}, + year = {2021}, + eprint = {2102.05095}, + archivePrefix = {arXiv}, + primaryClass = {cs.CV} +} +``` + +## Model Zoo + +### Kinetics-400 + +|config | resolution | gpus | backbone | pretrain | top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[timesformer_divST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 77.92 | 93.29 | x | 17874 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb-3f8e5d03.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.json)| +|[timesformer_jointST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 77.01 | 93.08 | x | 25658 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb-0d6e3984.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.json)| +|[timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 76.93 | 92.90 | x | 12750 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb-0cf829cd.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.json)| + +Notes: + +1. The **gpus** indicates the number of gpu (32G V100) we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. + According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, + e.g., lr=0.005 for 8 GPUs x 8 videos/gpu and lr=0.00375 for 8 GPUs x 6 videos/gpu. +2. We keep the test setting with the [original repo](https://github.com/facebookresearch/TimeSformer) (three crop x 1 clip). +3. The pretrained model `vit_base_patch16_224.pth` used by TimeSformer was converted from [vision_transformer](https://github.com/google-research/vision_transformer). + +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). + +## Train + +You can use the following command to train a model. + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +Example: train TimeSformer model on Kinetics-400 dataset in a deterministic option with periodic validation. + +```shell +python tools/train.py configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py \ + --work-dir work_dirs/timesformer_divST_8x32x1_15e_kinetics400_rgb.py \ + --validate --seed 0 --deterministic +``` + +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). + +## Test + +You can use the following command to test a model. + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +Example: test TimeSformer model on Kinetics-400 dataset and dump the result to a json file. + +```shell +python tools/test.py configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json +``` + +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/timesformer/README_zh-CN.md b/configs/recognition/timesformer/README_zh-CN.md new file mode 100644 index 0000000000..c844917e01 --- /dev/null +++ b/configs/recognition/timesformer/README_zh-CN.md @@ -0,0 +1,72 @@ +# TimeSformer + +## 简介 + + + +```BibTeX +@misc{bertasius2021spacetime, + title = {Is Space-Time Attention All You Need for Video Understanding?}, + author = {Gedas Bertasius and Heng Wang and Lorenzo Torresani}, + year = {2021}, + eprint = {2102.05095}, + archivePrefix = {arXiv}, + primaryClass = {cs.CV} +} +``` + +## 模型库 + +### Kinetics-400 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[timesformer_divST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py) | 短边 320 | 8 | TimeSformer | ImageNet-21K | 77.92 | 93.29 | x | 17874 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb-3f8e5d03.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.json)| +|[timesformer_jointST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py) | 短边 320 | 8 | TimeSformer | ImageNet-21K | 77.01 | 93.08 | x | 25658 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb-0d6e3984.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.json)| +|[timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb.py) | 短边 320 | 8 | TimeSformer | ImageNet-21K | 76.93 | 92.90 | x | 12750 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb-0cf829cd.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.json)| + +注: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数 (32G V100)。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.005 对应 8 GPUs x 8 video/gpu,以及 lr=0.004375 对应 8 GPUs x 7 video/gpu。 +2. MMAction2 保持与 [原代码](https://github.com/facebookresearch/TimeSformer) 的测试设置一致(three crop x 1 clip)。 +3. TimeSformer 使用的预训练模型 `vit_base_patch16_224.pth` 转换自 [vision_transformer](https://github.com/google-research/vision_transformer)。 + +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 TimeSformer 模型在 Kinetics400 数据集上的训练。 + +```shell +python tools/train.py configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py \ + --work-dir work_dirs/timesformer_divST_8x32x1_15e_kinetics400_rgb.py \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 Kinetics400 数据集上测试 TimeSformer 模型,并将结果导出为一个 json 文件。 + +```shell +python tools/test.py configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.json +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py b/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py new file mode 100644 index 0000000000..f4bd1614d1 --- /dev/null +++ b/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py @@ -0,0 +1,119 @@ +_base_ = ['../../_base_/default_runtime.py'] + +# model settings +model = dict( + type='Recognizer3D', + backbone=dict( + type='TimeSformer', + pretrained= # noqa: E251 + 'https://download.openmmlab.com/mmaction/recognition/timesformer/vit_base_patch16_224.pth', # noqa: E501 + num_frames=8, + img_size=224, + patch_size=16, + embed_dims=768, + in_channels=3, + dropout_ratio=0., + transformer_layers=None, + attention_type='divided_space_time', + norm_cfg=dict(type='LN', eps=1e-6)), + cls_head=dict(type='TimeSformerHead', num_classes=400, in_channels=768), + # model training and testing settings + train_cfg=None, + test_cfg=dict(average_clips='prob')) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' + +img_norm_cfg = dict( + mean=[127.5, 127.5, 127.5], std=[127.5, 127.5, 127.5], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=8, frame_interval=32, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='RandomRescale', scale_range=(256, 320)), + dict(type='RandomCrop', size=224), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=32, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=32, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 224)), + dict(type='ThreeCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) + +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + type='SGD', + lr=0.005, + momentum=0.9, + paramwise_cfg=dict( + custom_keys={ + '.backbone.cls_token': dict(decay_mult=0.0), + '.backbone.pos_embed': dict(decay_mult=0.0), + '.backbone.time_embed': dict(decay_mult=0.0) + }), + weight_decay=1e-4, + nesterov=True) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) + +# learning policy +lr_config = dict(policy='step', step=[5, 10]) +total_epochs = 15 + +# runtime settings +checkpoint_config = dict(interval=1) +work_dir = './work_dirs/timesformer_divST_8x32x1_15e_kinetics400_rgb' diff --git a/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py b/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py new file mode 100644 index 0000000000..66eec25eca --- /dev/null +++ b/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py @@ -0,0 +1,118 @@ +_base_ = ['../../_base_/default_runtime.py'] + +# model settings +model = dict( + type='Recognizer3D', + backbone=dict( + type='TimeSformer', + pretrained= # noqa: E251 + 'https://download.openmmlab.com/mmaction/recognition/timesformer/vit_base_patch16_224.pth', # noqa: E501 + num_frames=8, + img_size=224, + patch_size=16, + embed_dims=768, + in_channels=3, + dropout_ratio=0., + transformer_layers=None, + attention_type='joint_space_time', + norm_cfg=dict(type='LN', eps=1e-6)), + cls_head=dict(type='TimeSformerHead', num_classes=400, in_channels=768), + # model training and testing settings + train_cfg=None, + test_cfg=dict(average_clips='prob')) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' + +img_norm_cfg = dict( + mean=[127.5, 127.5, 127.5], std=[127.5, 127.5, 127.5], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=8, frame_interval=32, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='RandomRescale', scale_range=(256, 320)), + dict(type='RandomCrop', size=224), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=32, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=32, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 224)), + dict(type='ThreeCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +data = dict( + videos_per_gpu=7, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) + +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + type='SGD', + lr=0.004375, + momentum=0.9, + paramwise_cfg=dict( + custom_keys={ + '.backbone.cls_token': dict(decay_mult=0.0), + '.backbone.pos_embed': dict(decay_mult=0.0), + '.backbone.time_embed': dict(decay_mult=0.0) + }), + weight_decay=1e-4, + nesterov=True) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[5, 10]) +total_epochs = 15 + +# runtime settings +checkpoint_config = dict(interval=1) +work_dir = './work_dirs/timesformer_divST_8x32x1_15e_kinetics400_rgb' diff --git a/configs/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.py b/configs/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.py new file mode 100644 index 0000000000..6d859e8b37 --- /dev/null +++ b/configs/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.py @@ -0,0 +1,117 @@ +_base_ = ['../../_base_/default_runtime.py'] + +# model settings +model = dict( + type='Recognizer3D', + backbone=dict( + type='TimeSformer', + pretrained= # noqa: E251 + 'https://download.openmmlab.com/mmaction/recognition/timesformer/vit_base_patch16_224.pth', # noqa: E501 + num_frames=8, + img_size=224, + patch_size=16, + embed_dims=768, + in_channels=3, + dropout_ratio=0., + transformer_layers=None, + attention_type='space_only', + norm_cfg=dict(type='LN', eps=1e-6)), + cls_head=dict(type='TimeSformerHead', num_classes=400, in_channels=768), + # model training and testing settings + train_cfg=None, + test_cfg=dict(average_clips='prob')) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' + +img_norm_cfg = dict( + mean=[127.5, 127.5, 127.5], std=[127.5, 127.5, 127.5], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=8, frame_interval=32, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='RandomRescale', scale_range=(256, 320)), + dict(type='RandomCrop', size=224), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=32, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=8, + frame_interval=32, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 224)), + dict(type='ThreeCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) + +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + type='SGD', + lr=0.005, + momentum=0.9, + paramwise_cfg=dict( + custom_keys={ + '.backbone.cls_token': dict(decay_mult=0.0), + '.backbone.pos_embed': dict(decay_mult=0.0) + }), + weight_decay=1e-4, + nesterov=True) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[5, 10]) +total_epochs = 15 + +# runtime settings +checkpoint_config = dict(interval=1) +work_dir = './work_dirs/timesformer_divST_8x32x1_15e_kinetics400_rgb' diff --git a/mmaction/__init__.py b/mmaction/__init__.py index 56da91fbbd..c949258a06 100644 --- a/mmaction/__init__.py +++ b/mmaction/__init__.py @@ -3,7 +3,7 @@ from .version import __version__ -mmcv_minimum_version = '1.3.1' +mmcv_minimum_version = '1.3.6' mmcv_maximum_version = '1.4.0' mmcv_version = digit_version(mmcv.__version__) diff --git a/mmaction/models/__init__.py b/mmaction/models/__init__.py index d612642376..2ecdcb9619 100644 --- a/mmaction/models/__init__.py +++ b/mmaction/models/__init__.py @@ -1,15 +1,18 @@ from .backbones import (C3D, X3D, MobileNetV2, MobileNetV2TSM, ResNet, ResNet2Plus1d, ResNet3d, ResNet3dCSN, ResNet3dLayer, ResNet3dSlowFast, ResNet3dSlowOnly, ResNetAudio, - ResNetTIN, ResNetTSM, TANet) + ResNetTIN, ResNetTSM, TANet, TimeSformer) from .builder import (BACKBONES, DETECTORS, HEADS, LOCALIZERS, LOSSES, NECKS, RECOGNIZERS, build_backbone, build_detector, build_head, build_localizer, build_loss, build_model, build_neck, build_recognizer) -from .common import LFB, TAM, Conv2plus1d, ConvAudio +from .common import (LFB, TAM, Conv2plus1d, ConvAudio, + DividedSpatialAttentionWithNorm, + DividedTemporalAttentionWithNorm, FFNWithNorm) from .heads import (ACRNHead, AudioTSNHead, AVARoIHead, BaseHead, BBoxHeadAVA, - FBOHead, I3DHead, LFBInferHead, SlowFastHead, TPNHead, - TRNHead, TSMHead, TSNHead, X3DHead) + FBOHead, I3DHead, LFBInferHead, SlowFastHead, + TimeSformerHead, TPNHead, TRNHead, TSMHead, TSNHead, + X3DHead) from .localizers import BMN, PEM, TEM from .losses import (BCELossWithLogits, BinaryLogisticRegressionLoss, BMNLoss, CrossEntropyLoss, HVULoss, NLLLoss, OHEMHingeLoss, @@ -32,5 +35,7 @@ 'AudioTSNHead', 'X3D', 'X3DHead', 'ResNet3dLayer', 'DETECTORS', 'SingleRoIExtractor3D', 'BBoxHeadAVA', 'ResNetAudio', 'build_detector', 'ConvAudio', 'AVARoIHead', 'MobileNetV2', 'MobileNetV2TSM', 'TANet', 'LFB', - 'FBOHead', 'LFBInferHead', 'TRNHead', 'NECKS', 'ACRNHead' + 'FBOHead', 'LFBInferHead', 'TRNHead', 'NECKS', 'TimeSformer', + 'TimeSformerHead', 'DividedSpatialAttentionWithNorm', + 'DividedTemporalAttentionWithNorm', 'FFNWithNorm', 'ACRNHead' ] diff --git a/mmaction/models/backbones/__init__.py b/mmaction/models/backbones/__init__.py index 4999d12ba0..53704dd404 100644 --- a/mmaction/models/backbones/__init__.py +++ b/mmaction/models/backbones/__init__.py @@ -11,10 +11,12 @@ from .resnet_tin import ResNetTIN from .resnet_tsm import ResNetTSM from .tanet import TANet +from .timesformer import TimeSformer from .x3d import X3D __all__ = [ 'C3D', 'ResNet', 'ResNet3d', 'ResNetTSM', 'ResNet2Plus1d', 'ResNet3dSlowFast', 'ResNet3dSlowOnly', 'ResNet3dCSN', 'ResNetTIN', 'X3D', - 'ResNetAudio', 'ResNet3dLayer', 'MobileNetV2TSM', 'MobileNetV2', 'TANet' + 'ResNetAudio', 'ResNet3dLayer', 'MobileNetV2TSM', 'MobileNetV2', 'TANet', + 'TimeSformer' ] diff --git a/mmaction/models/backbones/timesformer.py b/mmaction/models/backbones/timesformer.py new file mode 100644 index 0000000000..52e82a62c3 --- /dev/null +++ b/mmaction/models/backbones/timesformer.py @@ -0,0 +1,284 @@ +import numpy as np +import torch +import torch.nn as nn +from einops import rearrange +from mmcv import ConfigDict +from mmcv.cnn import build_conv_layer, build_norm_layer, kaiming_init +from mmcv.cnn.bricks.transformer import build_transformer_layer_sequence +from mmcv.cnn.utils.weight_init import trunc_normal_ +from mmcv.runner import _load_checkpoint, load_state_dict +from torch.nn.modules.utils import _pair + +from ...utils import get_root_logger +from ..builder import BACKBONES + + +class PatchEmbed(nn.Module): + """Image to Patch Embedding. + + Args: + img_size (int | tuple): Size of input image. + patch_size (int): Size of one patch. + in_channels (int): Channel num of input features. Defaults to 3. + embed_dims (int): Dimensions of embedding. Defaults to 768. + conv_cfg (dict | None): Config dict for convolution layer. Defaults to + `dict(type='Conv2d')`. + """ + + def __init__(self, + img_size, + patch_size, + in_channels=3, + embed_dims=768, + conv_cfg=dict(type='Conv2d')): + super().__init__() + self.img_size = _pair(img_size) + self.patch_size = _pair(patch_size) + + num_patches = (self.img_size[1] // self.patch_size[1]) * ( + self.img_size[0] // self.patch_size[0]) + assert num_patches * self.patch_size[0] * self.patch_size[1] == \ + self.img_size[0] * self.img_size[1], \ + 'The image size H*W must be divisible by patch size' + self.num_patches = num_patches + + # Use conv layer to embed + self.projection = build_conv_layer( + conv_cfg, + in_channels, + embed_dims, + kernel_size=patch_size, + stride=patch_size) + + self.init_weights() + + def init_weights(self): + # Lecun norm from ClassyVision + kaiming_init(self.projection, mode='fan_in', nonlinearity='linear') + + def forward(self, x): + x = rearrange(x, 'b c t h w -> (b t) c h w') + x = self.projection(x).flatten(2).transpose(1, 2) + return x + + +@BACKBONES.register_module() +class TimeSformer(nn.Module): + """TimeSformer. A PyTorch impl of `Is Space-Time Attention All You Need for + Video Understanding? `_ + + Args: + num_frames (int): Number of frames in the video. + img_size (int | tuple): Size of input image. + patch_size (int): Size of one patch. + pretrained (str | None): Name of pretrained model. Default: None. + embed_dims (int): Dimensions of embedding. Defaults to 768. + num_heads (int): Number of parallel attention heads in + TransformerCoder. Defaults to 12. + num_transformer_layers (int): Number of transformer layers. Defaults to + 12. + in_channels (int): Channel num of input features. Defaults to 3. + dropout_ratio (float): Probability of dropout layer. Defaults to 0.. + transformer_layers (list[obj:`mmcv.ConfigDict`] | + obj:`mmcv.ConfigDict` | None): Config of transformerlayer in + TransformerCoder. If it is obj:`mmcv.ConfigDict`, it would be + repeated `num_transformer_layers` times to a + list[obj:`mmcv.ConfigDict`]. Defaults to None. + attention_type (str): Type of attentions in TransformerCoder. Choices + are 'divided_space_time', 'space_only' and 'joint_space_time'. + Defaults to 'divided_space_time'. + norm_cfg (dict): Config for norm layers. Defaults to + `dict(type='LN', eps=1e-6)`. + """ + supported_attention_types = [ + 'divided_space_time', 'space_only', 'joint_space_time' + ] + + def __init__(self, + num_frames, + img_size, + patch_size, + pretrained=None, + embed_dims=768, + num_heads=12, + num_transformer_layers=12, + in_channels=3, + dropout_ratio=0., + transformer_layers=None, + attention_type='divided_space_time', + norm_cfg=dict(type='LN', eps=1e-6), + **kwargs): + super().__init__(**kwargs) + assert attention_type in self.supported_attention_types, ( + f'Unsupported Attention Type {attention_type}!') + assert transformer_layers is None or isinstance( + transformer_layers, (dict, list)) + + self.num_frames = num_frames + self.pretrained = pretrained + self.embed_dims = embed_dims + self.num_transformer_layers = num_transformer_layers + self.attention_type = attention_type + + self.patch_embed = PatchEmbed( + img_size=img_size, + patch_size=patch_size, + in_channels=in_channels, + embed_dims=embed_dims) + num_patches = self.patch_embed.num_patches + + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dims)) + self.pos_embed = nn.Parameter( + torch.zeros(1, num_patches + 1, embed_dims)) + self.drop_after_pos = nn.Dropout(p=dropout_ratio) + if self.attention_type != 'space_only': + self.time_embed = nn.Parameter( + torch.zeros(1, num_frames, embed_dims)) + self.drop_after_time = nn.Dropout(p=dropout_ratio) + + self.norm = build_norm_layer(norm_cfg, embed_dims)[1] + + if transformer_layers is None: + # stochastic depth decay rule + dpr = np.linspace(0, 0.1, num_transformer_layers) + + if self.attention_type == 'divided_space_time': + _transformerlayers_cfg = [ + dict( + type='BaseTransformerLayer', + attn_cfgs=[ + dict( + type='DividedTemporalAttentionWithNorm', + embed_dims=embed_dims, + num_heads=num_heads, + num_frames=num_frames, + dropout_layer=dict( + type='DropPath', drop_prob=dpr[i]), + norm_cfg=dict(type='LN', eps=1e-6)), + dict( + type='DividedSpatialAttentionWithNorm', + embed_dims=embed_dims, + num_heads=num_heads, + num_frames=num_frames, + dropout_layer=dict( + type='DropPath', drop_prob=dpr[i]), + norm_cfg=dict(type='LN', eps=1e-6)) + ], + ffn_cfgs=dict( + type='FFNWithNorm', + embed_dims=embed_dims, + feedforward_channels=embed_dims * 4, + num_fcs=2, + act_cfg=dict(type='GELU'), + dropout_layer=dict( + type='DropPath', drop_prob=dpr[i]), + norm_cfg=dict(type='LN', eps=1e-6)), + operation_order=('self_attn', 'self_attn', 'ffn')) + for i in range(num_transformer_layers) + ] + else: + # Sapce Only & Joint Space Time + _transformerlayers_cfg = [ + dict( + type='BaseTransformerLayer', + attn_cfgs=[ + dict( + type='MultiheadAttention', + embed_dims=embed_dims, + num_heads=num_heads, + batch_first=True, + dropout_layer=dict( + type='DropPath', drop_prob=dpr[i])) + ], + ffn_cfgs=dict( + type='FFN', + embed_dims=embed_dims, + feedforward_channels=embed_dims * 4, + num_fcs=2, + act_cfg=dict(type='GELU'), + dropout_layer=dict( + type='DropPath', drop_prob=dpr[i])), + operation_order=('norm', 'self_attn', 'norm', 'ffn'), + norm_cfg=dict(type='LN', eps=1e-6), + batch_first=True) + for i in range(num_transformer_layers) + ] + + transformer_layers = ConfigDict( + dict( + type='TransformerLayerSequence', + transformerlayers=_transformerlayers_cfg, + num_layers=num_transformer_layers)) + + self.transformer_layers = build_transformer_layer_sequence( + transformer_layers) + + def init_weights(self, pretrained=None): + """Initiate the parameters either from existing checkpoint or from + scratch.""" + trunc_normal_(self.pos_embed, std=.02) + trunc_normal_(self.cls_token, std=.02) + + if pretrained: + self.pretrained = pretrained + if isinstance(self.pretrained, str): + logger = get_root_logger() + logger.info(f'load model from: {self.pretrained}') + + state_dict = _load_checkpoint(self.pretrained) + if 'state_dict' in state_dict: + state_dict = state_dict['state_dict'] + + if self.attention_type == 'divided_space_time': + # modify the key names of norm layers + old_state_dict_keys = list(state_dict.keys()) + for old_key in old_state_dict_keys: + if 'norms' in old_key: + new_key = old_key.replace('norms.0', + 'attentions.0.norm') + new_key = new_key.replace('norms.1', 'ffns.0.norm') + state_dict[new_key] = state_dict.pop(old_key) + + # copy the parameters of space attention to time attention + old_state_dict_keys = list(state_dict.keys()) + for old_key in old_state_dict_keys: + if 'attentions.0' in old_key: + new_key = old_key.replace('attentions.0', + 'attentions.1') + state_dict[new_key] = state_dict[old_key].clone() + + load_state_dict(self, state_dict, strict=False, logger=logger) + + def forward(self, x): + """Defines the computation performed at every call.""" + # x [batch_size * num_frames, num_patches, embed_dims] + batches = x.shape[0] + x = self.patch_embed(x) + + # x [batch_size * num_frames, num_patches + 1, embed_dims] + cls_tokens = self.cls_token.expand(x.size(0), -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + x = x + self.pos_embed + x = self.drop_after_pos(x) + + # Add Time Embedding + if self.attention_type != 'space_only': + # x [batch_size, num_patches * num_frames + 1, embed_dims] + cls_tokens = x[:batches, 0, :].unsqueeze(1) + x = rearrange(x[:, 1:, :], '(b t) p m -> (b p) t m', b=batches) + x = x + self.time_embed + x = self.drop_after_time(x) + x = rearrange(x, '(b p) t m -> b (p t) m', b=batches) + x = torch.cat((cls_tokens, x), dim=1) + + x = self.transformer_layers(x, None, None) + + if self.attention_type == 'space_only': + # x [batch_size, num_patches + 1, embed_dims] + x = x.view(-1, self.num_frames, *x.size()[-2:]) + x = torch.mean(x, 1) + + x = self.norm(x) + + # Return Class Token + return x[:, 0] diff --git a/mmaction/models/common/__init__.py b/mmaction/models/common/__init__.py index 603aadf45b..5523e6d7ce 100644 --- a/mmaction/models/common/__init__.py +++ b/mmaction/models/common/__init__.py @@ -2,5 +2,11 @@ from .conv_audio import ConvAudio from .lfb import LFB from .tam import TAM +from .transformer import (DividedSpatialAttentionWithNorm, + DividedTemporalAttentionWithNorm, FFNWithNorm) -__all__ = ['Conv2plus1d', 'ConvAudio', 'LFB', 'TAM'] +__all__ = [ + 'Conv2plus1d', 'ConvAudio', 'LFB', 'TAM', + 'DividedSpatialAttentionWithNorm', 'DividedTemporalAttentionWithNorm', + 'FFNWithNorm' +] diff --git a/mmaction/models/common/transformer.py b/mmaction/models/common/transformer.py new file mode 100644 index 0000000000..edc95e5a92 --- /dev/null +++ b/mmaction/models/common/transformer.py @@ -0,0 +1,216 @@ +from distutils.version import LooseVersion + +import torch +import torch.nn as nn +from einops import rearrange +from mmcv.cnn import build_norm_layer, constant_init +from mmcv.cnn.bricks.registry import ATTENTION, FEEDFORWARD_NETWORK +from mmcv.cnn.bricks.transformer import FFN, build_dropout +from mmcv.runner.base_module import BaseModule + + +@ATTENTION.register_module() +class DividedTemporalAttentionWithNorm(BaseModule): + """Temporal Attention in Divided Space Time Attention. + + Args: + embed_dims (int): Dimensions of embedding. + num_heads (int): Number of parallel attention heads in + TransformerCoder. + num_frames (int): Number of frames in the video. + attn_drop (float): A Dropout layer on attn_output_weights. Defaults to + 0.. + proj_drop (float): A Dropout layer after `nn.MultiheadAttention`. + Defaults to 0.. + dropout_layer (dict): The dropout_layer used when adding the shortcut. + Defaults to `dict(type='DropPath', drop_prob=0.1)`. + norm_cfg (dict): Config dict for normalization layer. Defaults to + `dict(type='LN')`. + init_cfg (dict | None): The Config for initialization. Defaults to + None. + """ + + def __init__(self, + embed_dims, + num_heads, + num_frames, + attn_drop=0., + proj_drop=0., + dropout_layer=dict(type='DropPath', drop_prob=0.1), + norm_cfg=dict(type='LN'), + init_cfg=None, + **kwargs): + super().__init__(init_cfg) + self.embed_dims = embed_dims + self.num_heads = num_heads + self.num_frames = num_frames + self.norm = build_norm_layer(norm_cfg, self.embed_dims)[1] + + if LooseVersion(torch.__version__) < LooseVersion('1.9.0'): + kwargs.pop('batch_first', None) + self.attn = nn.MultiheadAttention(embed_dims, num_heads, attn_drop, + **kwargs) + self.proj_drop = nn.Dropout(proj_drop) + self.dropout_layer = build_dropout( + dropout_layer) if dropout_layer else nn.Identity() + self.temporal_fc = nn.Linear(self.embed_dims, self.embed_dims) + + self.init_weights() + + def init_weights(self): + constant_init(self.temporal_fc, val=0, bias=0) + + def forward(self, query, key=None, value=None, residual=None, **kwargs): + assert residual is None, ( + 'Always adding the shortcut in the forward function') + + init_cls_token = query[:, 0, :].unsqueeze(1) + identity = query_t = query[:, 1:, :] + + # query_t [batch_size, num_patches * num_frames, embed_dims] + b, pt, m = query_t.size() + p, t = pt // self.num_frames, self.num_frames + + # res_temporal [batch_size * num_patches, num_frames, embed_dims] + query_t = self.norm(query_t.reshape(b * p, t, m)).permute(1, 0, 2) + res_temporal = self.attn(query_t, query_t, query_t)[0].permute(1, 0, 2) + res_temporal = self.dropout_layer( + self.proj_drop(res_temporal.contiguous())) + res_temporal = self.temporal_fc(res_temporal) + + # res_temporal [batch_size, num_patches * num_frames, embed_dims] + res_temporal = res_temporal.reshape(b, p * t, m) + + # ret_value [batch_size, num_patches * num_frames + 1, embed_dims] + new_query_t = identity + res_temporal + new_query = torch.cat((init_cls_token, new_query_t), 1) + return new_query + + +@ATTENTION.register_module() +class DividedSpatialAttentionWithNorm(BaseModule): + """Spatial Attention in Divided Space Time Attention. + + Args: + embed_dims (int): Dimensions of embedding. + num_heads (int): Number of parallel attention heads in + TransformerCoder. + num_frames (int): Number of frames in the video. + attn_drop (float): A Dropout layer on attn_output_weights. Defaults to + 0.. + proj_drop (float): A Dropout layer after `nn.MultiheadAttention`. + Defaults to 0.. + dropout_layer (dict): The dropout_layer used when adding the shortcut. + Defaults to `dict(type='DropPath', drop_prob=0.1)`. + norm_cfg (dict): Config dict for normalization layer. Defaults to + `dict(type='LN')`. + init_cfg (dict | None): The Config for initialization. Defaults to + None. + """ + + def __init__(self, + embed_dims, + num_heads, + num_frames, + attn_drop=0., + proj_drop=0., + dropout_layer=dict(type='DropPath', drop_prob=0.1), + norm_cfg=dict(type='LN'), + init_cfg=None, + **kwargs): + super().__init__(init_cfg) + self.embed_dims = embed_dims + self.num_heads = num_heads + self.num_frames = num_frames + self.norm = build_norm_layer(norm_cfg, self.embed_dims)[1] + if LooseVersion(torch.__version__) < LooseVersion('1.9.0'): + kwargs.pop('batch_first', None) + self.attn = nn.MultiheadAttention(embed_dims, num_heads, attn_drop, + **kwargs) + self.proj_drop = nn.Dropout(proj_drop) + self.dropout_layer = build_dropout( + dropout_layer) if dropout_layer else nn.Identity() + + self.init_weights() + + def init_weights(self): + # init DividedSpatialAttentionWithNorm by default + pass + + def forward(self, query, key=None, value=None, residual=None, **kwargs): + assert residual is None, ( + 'Always adding the shortcut in the forward function') + + identity = query + init_cls_token = query[:, 0, :].unsqueeze(1) + query_s = query[:, 1:, :] + + # query_s [batch_size, num_patches * num_frames, embed_dims] + b, pt, m = query_s.size() + p, t = pt // self.num_frames, self.num_frames + + # cls_token [batch_size * num_frames, 1, embed_dims] + cls_token = init_cls_token.repeat(1, t, 1).reshape(b * t, + m).unsqueeze(1) + + # query_s [batch_size * num_frames, num_patches + 1, embed_dims] + query_s = rearrange(query_s, 'b (p t) m -> (b t) p m', p=p, t=t) + query_s = torch.cat((cls_token, query_s), 1) + + # res_spatial [batch_size * num_frames, num_patches + 1, embed_dims] + query_s = self.norm(query_s).permute(1, 0, 2) + res_spatial = self.attn(query_s, query_s, query_s)[0].permute(1, 0, 2) + res_spatial = self.dropout_layer( + self.proj_drop(res_spatial.contiguous())) + + # cls_token [batch_size, 1, embed_dims] + cls_token = res_spatial[:, 0, :].reshape(b, t, m) + cls_token = torch.mean(cls_token, 1, True) + + # res_spatial [batch_size * num_frames, num_patches + 1, embed_dims] + res_spatial = rearrange( + res_spatial[:, 1:, :], '(b t) p m -> b (p t) m', p=p, t=t) + res_spatial = torch.cat((cls_token, res_spatial), 1) + + new_query = identity + res_spatial + return new_query + + +@FEEDFORWARD_NETWORK.register_module() +class FFNWithNorm(FFN): + """FFN with pre normalization layer. + + FFNWithNorm is implemented to be compatible with `BaseTransformerLayer` + when using `DividedTemporalAttentionWithNorm` and + `DividedSpatialAttentionWithNorm`. + + FFNWithNorm has one main difference with FFN: + + - It apply one normalization layer before forwarding the input data to + feed-forward networks. + + Args: + embed_dims (int): Dimensions of embedding. Defaults to 256. + feedforward_channels (int): Hidden dimension of FFNs. Defaults to 1024. + num_fcs (int, optional): Number of fully-connected layers in FFNs. + Defaults to 2. + act_cfg (dict): Config for activate layers. + Defaults to `dict(type='ReLU')` + ffn_drop (float, optional): Probability of an element to be + zeroed in FFN. Defaults to 0.. + add_residual (bool, optional): Whether to add the + residual connection. Defaults to `True`. + dropout_layer (dict | None): The dropout_layer used when adding the + shortcut. Defaults to None. + init_cfg (dict): The Config for initialization. Defaults to None. + norm_cfg (dict): Config dict for normalization layer. Defaults to + `dict(type='LN')`. + """ + + def __init__(self, *args, norm_cfg=dict(type='LN'), **kwargs): + super().__init__(*args, **kwargs) + self.norm = build_norm_layer(norm_cfg, self.embed_dims)[1] + + def forward(self, x, residual=None): + assert residual is None, ('Cannot apply pre-norm with FFNWithNorm') + return super().forward(self.norm(x), x) diff --git a/mmaction/models/heads/__init__.py b/mmaction/models/heads/__init__.py index 4684f3f28c..9040674adf 100644 --- a/mmaction/models/heads/__init__.py +++ b/mmaction/models/heads/__init__.py @@ -8,6 +8,7 @@ from .roi_head import AVARoIHead from .slowfast_head import SlowFastHead from .ssn_head import SSNHead +from .timesformer_head import TimeSformerHead from .tpn_head import TPNHead from .trn_head import TRNHead from .tsm_head import TSMHead @@ -17,5 +18,5 @@ __all__ = [ 'TSNHead', 'I3DHead', 'BaseHead', 'TSMHead', 'SlowFastHead', 'SSNHead', 'TPNHead', 'AudioTSNHead', 'X3DHead', 'BBoxHeadAVA', 'AVARoIHead', - 'FBOHead', 'LFBInferHead', 'TRNHead', 'ACRNHead' + 'FBOHead', 'LFBInferHead', 'TRNHead', 'TimeSformerHead', 'ACRNHead' ] diff --git a/mmaction/models/heads/timesformer_head.py b/mmaction/models/heads/timesformer_head.py new file mode 100644 index 0000000000..d8d640d1ce --- /dev/null +++ b/mmaction/models/heads/timesformer_head.py @@ -0,0 +1,40 @@ +import torch.nn as nn +from mmcv.cnn import trunc_normal_init + +from ..builder import HEADS +from .base import BaseHead + + +@HEADS.register_module() +class TimeSformerHead(BaseHead): + """Classification head for TimeSformer. + + Args: + num_classes (int): Number of classes to be classified. + in_channels (int): Number of channels in input feature. + loss_cls (dict): Config for building loss. + Defaults to `dict(type='CrossEntropyLoss')`. + init_std (float): Std value for Initiation. Defaults to 0.02. + kwargs (dict, optional): Any keyword argument to be used to initialize + the head. + """ + + def __init__(self, + num_classes, + in_channels, + loss_cls=dict(type='CrossEntropyLoss'), + init_std=0.02, + **kwargs): + super().__init__(num_classes, in_channels, loss_cls, **kwargs) + self.init_std = init_std + self.fc_cls = nn.Linear(self.in_channels, self.num_classes) + + def init_weights(self): + """Initiate the parameters from scratch.""" + trunc_normal_init(self.fc_cls, std=self.init_std) + + def forward(self, x): + # [N, in_channels] + cls_score = self.fc_cls(x) + # [N, num_classes] + return cls_score diff --git a/requirements/optional.txt b/requirements/optional.txt index 34d92709b4..93af2f7a08 100644 --- a/requirements/optional.txt +++ b/requirements/optional.txt @@ -1,5 +1,6 @@ av decord >= 0.4.1 +einops imgaug librosa lmdb diff --git a/setup.cfg b/setup.cfg index e6cdc146a2..1e62cc18ba 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,6 +19,6 @@ line_length = 79 multi_line_output = 0 known_standard_library = pkg_resources,setuptools known_first_party = mmaction -known_third_party = cv2,decord,joblib,matplotlib,mmcv,numpy,pandas,pytest,scipy,seaborn,titlecase,torch,tqdm,webcolors +known_third_party = cv2,decord,einops,joblib,matplotlib,mmcv,numpy,pandas,pytest,scipy,seaborn,titlecase,torch,tqdm,webcolors no_lines_before = STDLIB,LOCALFOLDER default_section = THIRDPARTY diff --git a/tests/test_models/test_backbones.py b/tests/test_models/test_backbones.py index 9357546fd7..cf8b63cee0 100644 --- a/tests/test_models/test_backbones.py +++ b/tests/test_models/test_backbones.py @@ -7,7 +7,8 @@ from mmaction.models import (C3D, X3D, MobileNetV2TSM, ResNet2Plus1d, ResNet3dCSN, ResNet3dSlowFast, ResNet3dSlowOnly, - ResNetAudio, ResNetTIN, ResNetTSM, TANet) + ResNetAudio, ResNetTIN, ResNetTSM, TANet, + TimeSformer) from mmaction.models.backbones.resnet_tsm import NL3DWrapper from .base import check_norm_state, generate_backbone_demo_inputs @@ -625,6 +626,68 @@ def test_tanet_backbone(): assert feat.shape == torch.Size([16, 2048, 1, 1]) +def test_timesformer_backbone(): + input_shape = (1, 3, 8, 64, 64) + imgs = generate_backbone_demo_inputs(input_shape) + + # divided_space_time + timesformer = TimeSformer( + 8, 64, 16, embed_dims=768, attention_type='divided_space_time') + timesformer.init_weights() + from mmaction.models.common import (DividedSpatialAttentionWithNorm, + DividedTemporalAttentionWithNorm, + FFNWithNorm) + assert isinstance(timesformer.transformer_layers.layers[0].attentions[0], + DividedTemporalAttentionWithNorm) + assert isinstance(timesformer.transformer_layers.layers[11].attentions[1], + DividedSpatialAttentionWithNorm) + assert isinstance(timesformer.transformer_layers.layers[0].ffns[0], + FFNWithNorm) + assert hasattr(timesformer, 'time_embed') + assert timesformer.patch_embed.num_patches == 16 + + cls_tokens = timesformer(imgs) + assert cls_tokens.shape == torch.Size([1, 768]) + + # space_only + timesformer = TimeSformer( + 8, 64, 16, embed_dims=512, num_heads=8, attention_type='space_only') + timesformer.init_weights() + + assert not hasattr(timesformer, 'time_embed') + assert timesformer.patch_embed.num_patches == 16 + + cls_tokens = timesformer(imgs) + assert cls_tokens.shape == torch.Size([1, 512]) + + # joint_space_time + input_shape = (1, 3, 2, 64, 64) + imgs = generate_backbone_demo_inputs(input_shape) + timesformer = TimeSformer( + 2, + 64, + 8, + embed_dims=256, + num_heads=8, + attention_type='joint_space_time') + timesformer.init_weights() + + assert hasattr(timesformer, 'time_embed') + assert timesformer.patch_embed.num_patches == 64 + + cls_tokens = timesformer(imgs) + assert cls_tokens.shape == torch.Size([1, 256]) + + with pytest.raises(AssertionError): + # unsupported attention type + timesformer = TimeSformer( + 8, 64, 16, attention_type='wrong_attention_type') + + with pytest.raises(AssertionError): + # Wrong transformer_layers type + timesformer = TimeSformer(8, 64, 16, transformer_layers='wrong_type') + + def test_c3d_backbone(): """Test c3d backbone.""" input_shape = (1, 3, 16, 112, 112) diff --git a/tests/test_models/test_common.py b/tests/test_models/test_common.py index 9cee2c2ff3..a3ea192c3a 100644 --- a/tests/test_models/test_common.py +++ b/tests/test_models/test_common.py @@ -2,8 +2,13 @@ import pytest import torch +import torch.nn as nn +from mmcv.utils import assert_params_all_zeros -from mmaction.models.common import LFB, TAM, Conv2plus1d, ConvAudio +from mmaction.models.common import (LFB, TAM, Conv2plus1d, ConvAudio, + DividedSpatialAttentionWithNorm, + DividedTemporalAttentionWithNorm, + FFNWithNorm) def test_conv2plus1d(): @@ -37,6 +42,39 @@ def test_conv_audio(): assert output.shape == torch.Size([1, 8, 8, 8]) +def test_divided_temporal_attention_with_norm(): + _cfg = dict(embed_dims=768, num_heads=12, num_frames=8) + divided_temporal_attention = DividedTemporalAttentionWithNorm(**_cfg) + assert isinstance(divided_temporal_attention.norm, nn.LayerNorm) + assert assert_params_all_zeros(divided_temporal_attention.temporal_fc) + + x = torch.rand(1, 1 + 8 * 14 * 14, 768) + output = divided_temporal_attention(x) + assert output.shape == torch.Size([1, 1 + 8 * 14 * 14, 768]) + + +def test_divided_spatial_attention_with_norm(): + _cfg = dict(embed_dims=512, num_heads=8, num_frames=4, dropout_layer=None) + divided_spatial_attention = DividedSpatialAttentionWithNorm(**_cfg) + assert isinstance(divided_spatial_attention.dropout_layer, nn.Identity) + assert isinstance(divided_spatial_attention.norm, nn.LayerNorm) + + x = torch.rand(1, 1 + 4 * 14 * 14, 512) + output = divided_spatial_attention(x) + assert output.shape == torch.Size([1, 1 + 4 * 14 * 14, 512]) + + +def test_ffn_with_norm(): + _cfg = dict( + embed_dims=256, feedforward_channels=256 * 2, norm_cfg=dict(type='LN')) + ffn_with_norm = FFNWithNorm(**_cfg) + assert isinstance(ffn_with_norm.norm, nn.LayerNorm) + + x = torch.rand(1, 1 + 4 * 14 * 14, 256) + output = ffn_with_norm(x) + assert output.shape == torch.Size([1, 1 + 4 * 14 * 14, 256]) + + def test_TAM(): """test TAM.""" with pytest.raises(AssertionError): diff --git a/tests/test_models/test_head.py b/tests/test_models/test_head.py index d75608cfb4..8e0dd32cee 100644 --- a/tests/test_models/test_head.py +++ b/tests/test_models/test_head.py @@ -9,8 +9,9 @@ import mmaction from mmaction.models import (ACRNHead, AudioTSNHead, BBoxHeadAVA, FBOHead, - I3DHead, LFBInferHead, SlowFastHead, TPNHead, - TRNHead, TSMHead, TSNHead, X3DHead) + I3DHead, LFBInferHead, SlowFastHead, + TimeSformerHead, TPNHead, TRNHead, TSMHead, + TSNHead, X3DHead) from .base import generate_backbone_demo_inputs @@ -366,6 +367,23 @@ def test_trn_head(): relation_type='RelationModlue') +def test_timesformer_head(): + """Test loss method, layer construction, attributes and forward function in + timesformer head.""" + timesformer_head = TimeSformerHead(num_classes=4, in_channels=64) + timesformer_head.init_weights() + + assert timesformer_head.num_classes == 4 + assert timesformer_head.in_channels == 64 + assert timesformer_head.init_std == 0.02 + + input_shape = (2, 64) + feat = torch.rand(input_shape) + + cls_scores = timesformer_head(feat) + assert cls_scores.shape == torch.Size([2, 4]) + + @patch.object(mmaction.models.LFBInferHead, '__del__', Mock) def test_lfb_infer_head(): """Test layer construction, attributes and forward function in lfb infer diff --git a/tests/test_models/test_recognizers/test_recognizer3d.py b/tests/test_models/test_recognizers/test_recognizer3d.py index 31064c7f33..ad26926cf8 100644 --- a/tests/test_models/test_recognizers/test_recognizer3d.py +++ b/tests/test_models/test_recognizers/test_recognizer3d.py @@ -256,6 +256,35 @@ def test_tpn(): _recognizer(one_img) +def test_timesformer(): + config = get_recognizer_cfg( + 'timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py') + config.model['backbone']['pretrained'] = None + config.model['backbone']['img_size'] = 32 + + recognizer = build_recognizer(config.model) + + input_shape = (1, 3, 3, 8, 32, 32) + demo_inputs = generate_recognizer_demo_inputs(input_shape, '3D') + + imgs = demo_inputs['imgs'] + gt_labels = demo_inputs['gt_labels'] + + losses = recognizer(imgs, gt_labels) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + img_list = [img[None, :] for img in imgs] + for one_img in img_list: + recognizer(one_img, None, return_loss=False) + + # Test forward gradcam + recognizer(imgs, gradcam=True) + for one_img in img_list: + recognizer(one_img, gradcam=True) + + def test_c3d(): config = get_recognizer_cfg('c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py') config.model['backbone']['pretrained'] = None From 9867976c4b8309e1c2fbe0ca1472aeef9a2e862b Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 1 Jul 2021 17:53:53 +0800 Subject: [PATCH 183/414] bump version (#986) --- README.md | 2 +- README_zh-CN.md | 2 +- mmaction/version.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 789866d251..dbf4f478d1 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ The master branch works with **PyTorch 1.3+**. ## Changelog -v0.15.0 was released in 31/05/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +v0.16.0 was released in 01/07/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Benchmark diff --git a/README_zh-CN.md b/README_zh-CN.md index 1ed6d4f762..ac6597a3a0 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -53,7 +53,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 -v0.15.0 版本已于 2021 年 5 月 31 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.16.0 版本已于 2021 年 7 月 1 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 基准测试 diff --git a/mmaction/version.py b/mmaction/version.py index 32ea1c548d..34877bdff8 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.15.0' +__version__ = '0.16.0' def parse_version_info(version_str): From 74dc92f0a951e2772573f33390d9a5645a916fc4 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 1 Jul 2021 17:54:43 +0800 Subject: [PATCH 184/414] update changelog (#987) --- README.md | 11 ++++++++++- docs/changelog.md | 35 ++++++++++++++++++++++++++++++++++- 2 files changed, 44 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index dbf4f478d1..16ad57087c 100644 --- a/README.md +++ b/README.md @@ -100,7 +100,7 @@ Supported methods for Action Recognition: - ✅ [MultiModality: Audio](configs/recognition_audio/resnet/README.md) (ArXiv'2020) - ✅ [TANet](configs/recognition/tanet/README.md) (ArXiv'2020) - ✅ [TRN](configs/recognition/trn/README.md) (CVPR'2015) -- ✅ [PoseC3D](configs/skeleton/posec3d/README.md) (ArXiv'2021) +- ✅ [Timesformer](configs/recognition/timesformer/README.md) (ICML'2021) @@ -127,6 +127,15 @@ Supported methods for Spatial Temporal Action Detection: +Supported methods for Skeleton-based Action Recognition: + +

+(click to collapse) + +- ✅ [PoseC3D](configs/skeleton/posec3d/README.md) (ArXiv'2021) + +
+ Results and models are available in the *README.md* of each method's config directory. A summary can be found in the [**model zoo**](https://mmaction2.readthedocs.io/en/latest/recognition_models.html) page. diff --git a/docs/changelog.md b/docs/changelog.md index 103fb0eea4..53e28d295a 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -2,23 +2,56 @@ ### Master +### 0.16.0 (01/07/2021) + **Highlights** - Support using backbone from pytorch-image-models(timm) +- Support PIMS Decoder +- Demo for skeleton-based action recognition +- Support Timesformer **New Features** - Support using backbones from pytorch-image-models(timm) for TSN ([#880](https://github.com/open-mmlab/mmaction2/pull/880)) +- Support torchvision transformations in preprocessing pipelines ([#972](https://github.com/open-mmlab/mmaction2/pull/972)) +- Demo for skeleton-based action recognition ([#972](https://github.com/open-mmlab/mmaction2/pull/972)) +- Support Timesformer ([#839](https://github.com/open-mmlab/mmaction2/pull/839)) **Improvements** -- Add a tool to find invalid videos ([#907](https://github.com/open-mmlab/mmaction2/pull/907)) +- Add a tool to find invalid videos ([#907](https://github.com/open-mmlab/mmaction2/pull/907), [#950](https://github.com/open-mmlab/mmaction2/pull/950)) +- Add an option to specify spectrogram_type ([#909](https://github.com/open-mmlab/mmaction2/pull/909)) +- Add json output to video demo ([#906](https://github.com/open-mmlab/mmaction2/pull/906)) +- Add MIM related docs ([#918](https://github.com/open-mmlab/mmaction2/pull/918)) +- Rename lr to scheduler ([#916](https://github.com/open-mmlab/mmaction2/pull/916)) +- Support `--cfg-options` for demos ([#911](https://github.com/open-mmlab/mmaction2/pull/911)) +- Support number counting for flow-wise filename template ([#922](https://github.com/open-mmlab/mmaction2/pull/922)) +- Add Chinese tutorial ([#941](https://github.com/open-mmlab/mmaction2/pull/941)) +- Change ResNet3D default values ([#939](https://github.com/open-mmlab/mmaction2/pull/939)) +- Adjust script structure ([#935](https://github.com/open-mmlab/mmaction2/pull/935)) +- Add font color to args in long_video_demo ([#947](https://github.com/open-mmlab/mmaction2/pull/947)) +- Polish code style with Pylint ([#908](https://github.com/open-mmlab/mmaction2/pull/908)) +- Support PIMS Decoder ([#946](https://github.com/open-mmlab/mmaction2/pull/946)) +- Improve Metafiles ([#956](https://github.com/open-mmlab/mmaction2/pull/956), [#979](https://github.com/open-mmlab/mmaction2/pull/979), [#966](https://github.com/open-mmlab/mmaction2/pull/966)) +- Add links to download Kinetics400 validation ([#920](https://github.com/open-mmlab/mmaction2/pull/920)) +- Audit the usage of shutil.rmtree ([#943](https://github.com/open-mmlab/mmaction2/pull/943)) +- Polish localizer related codes([#913](https://github.com/open-mmlab/mmaction2/pull/913)) **Bug and Typo Fixes** +- Fix spatiotemporal detection demo ([#899](https://github.com/open-mmlab/mmaction2/pull/899)) +- Fix docstring for 3D inflate ([#925](https://github.com/open-mmlab/mmaction2/pull/925)) +- Fix bug of writing text to video with TextClip ([#952](https://github.com/open-mmlab/mmaction2/pull/952)) +- Fix mmcv install in CI ([#977](https://github.com/open-mmlab/mmaction2/pull/977)) + **ModelZoo** - Add TSN with Swin Transformer backbone as an example for using pytorch-image-models(timm) backbones ([#880](https://github.com/open-mmlab/mmaction2/pull/880)) +- Port CSN checkpoints from VMZ ([#945](https://github.com/open-mmlab/mmaction2/pull/945)) +- Release various checkpoints for UCF101, HMDB51 and Sthv1 ([#938](https://github.com/open-mmlab/mmaction2/pull/938)) +- Support Timesformer ([#839](https://github.com/open-mmlab/mmaction2/pull/839)) +- Update TSM modelzoo ([#981](https://github.com/open-mmlab/mmaction2/pull/981)) ### 0.15.0 (31/05/2021) From b484cf3a522f8779aff7949f7c82fefba8aa2e15 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 1 Jul 2021 20:14:08 +0800 Subject: [PATCH 185/414] rename (#988) --- model_zoo.yml => model-index.yml | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename model_zoo.yml => model-index.yml (100%) diff --git a/model_zoo.yml b/model-index.yml similarity index 100% rename from model_zoo.yml rename to model-index.yml From 809a46d37adf9007b7a6e853efdfc3fce7fbeb98 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Thu, 1 Jul 2021 20:25:43 +0800 Subject: [PATCH 186/414] [Metafile] Use Top 1 Accuracy in Metafiles (#989) --- configs/recognition/c3d/metafile.yml | 2 +- configs/recognition/csn/metafile.yml | 16 +-- configs/recognition/i3d/metafile.yml | 20 ++-- configs/recognition/omnisource/metafile.yml | 32 +++--- configs/recognition/r2plus1d/metafile.yml | 8 +- configs/recognition/slowfast/metafile.yml | 18 ++-- configs/recognition/slowonly/metafile.yml | 52 +++++----- configs/recognition/tanet/metafile.yml | 10 +- configs/recognition/tin/metafile.yml | 6 +- configs/recognition/tpn/metafile.yml | 6 +- configs/recognition/trn/metafile.yml | 8 +- configs/recognition/tsm/metafile.yml | 98 +++++++++---------- configs/recognition/tsn/metafile.yml | 70 ++++++------- configs/recognition/x3d/metafile.yml | 4 +- configs/recognition_audio/resnet/metafile.yml | 6 +- configs/skeleton/posec3d/metafile.yml | 12 +-- 16 files changed, 184 insertions(+), 184 deletions(-) diff --git a/configs/recognition/c3d/metafile.yml b/configs/recognition/c3d/metafile.yml index 5d9986c4fd..781d39a4ca 100644 --- a/configs/recognition/c3d/metafile.yml +++ b/configs/recognition/c3d/metafile.yml @@ -19,7 +19,7 @@ Models: Results: - Dataset: UCF101 Metrics: - top1 accuracy: 83.27 + Top 1 Accuracy: 83.27 top5 accuracy: 95.9 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml index 9d9df53974..9cecd88694 100644 --- a/configs/recognition/csn/metafile.yml +++ b/configs/recognition/csn/metafile.yml @@ -19,7 +19,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 80.14 + Top 1 Accuracy: 80.14 top5 accuracy: 94.93 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json @@ -42,7 +42,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 82.76 + Top 1 Accuracy: 82.76 top5 accuracy: 95.68 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json @@ -63,7 +63,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 77.8 + Top 1 Accuracy: 77.8 top5 accuracy: 92.8 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-d565828d.pth @@ -82,7 +82,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 82.5 + Top 1 Accuracy: 82.5 top5 accuracy: 95.3 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth @@ -102,7 +102,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 78.8 + Top 1 Accuracy: 78.8 top5 accuracy: 93.5 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth @@ -122,7 +122,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 76.5 + Top 1 Accuracy: 76.5 top5 accuracy: 92.1 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth @@ -142,7 +142,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 79.0 + Top 1 Accuracy: 79.0 top5 accuracy: 94.2 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth @@ -162,7 +162,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 78.2 + Top 1 Accuracy: 78.2 top5 accuracy: 93.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth diff --git a/configs/recognition/i3d/metafile.yml b/configs/recognition/i3d/metafile.yml index 07dbc8cacc..9e210b8b6b 100644 --- a/configs/recognition/i3d/metafile.yml +++ b/configs/recognition/i3d/metafile.yml @@ -19,7 +19,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.68 + Top 1 Accuracy: 72.68 top5 accuracy: 90.78 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log.json @@ -42,7 +42,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.27 + Top 1 Accuracy: 73.27 top5 accuracy: 90.92 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log.json @@ -65,7 +65,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.85 + Top 1 Accuracy: 72.85 top5 accuracy: 90.75 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log.json @@ -88,7 +88,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.77 + Top 1 Accuracy: 72.77 top5 accuracy: 90.57 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log.json @@ -111,7 +111,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.48 + Top 1 Accuracy: 73.48 top5 accuracy: 91.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log.json @@ -134,7 +134,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.32 + Top 1 Accuracy: 72.32 top5 accuracy: 90.72 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log.json @@ -157,7 +157,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.24 + Top 1 Accuracy: 73.24 top5 accuracy: 90.99 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log.json @@ -180,7 +180,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 74.71 + Top 1 Accuracy: 74.71 top5 accuracy: 91.81 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log.json @@ -203,7 +203,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.37 + Top 1 Accuracy: 73.37 top5 accuracy: 91.26 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json @@ -226,7 +226,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.92 + Top 1 Accuracy: 73.92 top5 accuracy: 91.59 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json diff --git a/configs/recognition/omnisource/metafile.yml b/configs/recognition/omnisource/metafile.yml index 5561e2aa69..f0ce133aa9 100644 --- a/configs/recognition/omnisource/metafile.yml +++ b/configs/recognition/omnisource/metafile.yml @@ -20,7 +20,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 77.4 + Top 1 Accuracy: 77.4 top5 accuracy: 93.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json @@ -44,7 +44,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 78.0 + Top 1 Accuracy: 78.0 top5 accuracy: 93.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json @@ -68,7 +68,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 78.6 + Top 1 Accuracy: 78.6 top5 accuracy: 93.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json @@ -92,7 +92,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 80.6 + Top 1 Accuracy: 80.6 top5 accuracy: 95.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json @@ -116,7 +116,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 78.6 + Top 1 Accuracy: 78.6 top5 accuracy: 93.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json @@ -140,7 +140,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 81.3 + Top 1 Accuracy: 81.3 top5 accuracy: 94.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json @@ -164,7 +164,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 78.6 + Top 1 Accuracy: 78.6 top5 accuracy: 93.9 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json @@ -188,7 +188,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 80.8 + Top 1 Accuracy: 80.8 top5 accuracy: 95.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json @@ -212,7 +212,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 81.3 + Top 1 Accuracy: 81.3 top5 accuracy: 95.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json @@ -236,7 +236,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 82.4 + Top 1 Accuracy: 82.4 top5 accuracy: 95.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json @@ -260,7 +260,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 80.3 + Top 1 Accuracy: 80.3 top5 accuracy: 94.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json @@ -284,7 +284,7 @@ Models: Results: - Dataset: MiniKinetics Metrics: - top1 accuracy: 82.9 + Top 1 Accuracy: 82.9 top5 accuracy: 95.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json @@ -306,7 +306,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.6 + Top 1 Accuracy: 73.6 top5 accuracy: 91.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth @@ -326,7 +326,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 75.7 + Top 1 Accuracy: 75.7 top5 accuracy: 91.9 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth @@ -346,7 +346,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 76.8 + Top 1 Accuracy: 76.8 top5 accuracy: 92.5 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth @@ -366,7 +366,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 80.4 + Top 1 Accuracy: 80.4 top5 accuracy: 94.4 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth diff --git a/configs/recognition/r2plus1d/metafile.yml b/configs/recognition/r2plus1d/metafile.yml index 5a61cb3a6c..9a95900717 100644 --- a/configs/recognition/r2plus1d/metafile.yml +++ b/configs/recognition/r2plus1d/metafile.yml @@ -19,7 +19,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 67.3 + Top 1 Accuracy: 67.3 top5 accuracy: 87.65 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log.json @@ -42,7 +42,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 67.3 + Top 1 Accuracy: 67.3 top5 accuracy: 87.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log @@ -65,7 +65,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 68.68 + Top 1 Accuracy: 68.68 top5 accuracy: 88.36 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json @@ -88,7 +88,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 74.6 + Top 1 Accuracy: 74.6 top5 accuracy: 91.59 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index cae7965667..a3841f2f0c 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -19,7 +19,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 74.75 + Top 1 Accuracy: 74.75 top5 accuracy: 91.73 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json @@ -42,7 +42,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 74.34 + Top 1 Accuracy: 74.34 top5 accuracy: 91.58 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json @@ -65,7 +65,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 75.64 + Top 1 Accuracy: 75.64 top5 accuracy: 92.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log.json @@ -88,7 +88,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 75.61 + Top 1 Accuracy: 75.61 top5 accuracy: 92.34 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json @@ -111,7 +111,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 76.94 + Top 1 Accuracy: 76.94 top5 accuracy: 92.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json @@ -134,7 +134,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 76.69 + Top 1 Accuracy: 76.69 top5 accuracy: 93.07 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json @@ -157,7 +157,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 77.9 + Top 1 Accuracy: 77.9 top5 accuracy: 93.51 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json @@ -180,7 +180,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 77.13 + Top 1 Accuracy: 77.13 top5 accuracy: 93.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json @@ -203,7 +203,7 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 49.24 + Top 1 Accuracy: 49.24 top5 accuracy: 78.79 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log.json diff --git a/configs/recognition/slowonly/metafile.yml b/configs/recognition/slowonly/metafile.yml index 695d808051..34d52f70db 100644 --- a/configs/recognition/slowonly/metafile.yml +++ b/configs/recognition/slowonly/metafile.yml @@ -18,7 +18,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 76.8 + Top 1 Accuracy: 76.8 top5 accuracy: 92.5 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth @@ -38,7 +38,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 76.5 + Top 1 Accuracy: 76.5 top5 accuracy: 92.7 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth @@ -58,7 +58,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 80.4 + Top 1 Accuracy: 80.4 top5 accuracy: 94.4 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth @@ -79,7 +79,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.76 + Top 1 Accuracy: 72.76 top5 accuracy: 90.51 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json @@ -102,7 +102,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.9 + Top 1 Accuracy: 72.9 top5 accuracy: 90.82 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json @@ -125,7 +125,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 74.42 + Top 1 Accuracy: 74.42 top5 accuracy: 91.49 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json @@ -148,7 +148,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.02 + Top 1 Accuracy: 73.02 top5 accuracy: 90.77 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json @@ -171,7 +171,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 74.93 + Top 1 Accuracy: 74.93 top5 accuracy: 91.92 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json @@ -194,7 +194,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.39 + Top 1 Accuracy: 73.39 top5 accuracy: 91.12 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json @@ -217,7 +217,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 75.55 + Top 1 Accuracy: 75.55 top5 accuracy: 92.04 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json @@ -240,7 +240,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 74.54 + Top 1 Accuracy: 74.54 top5 accuracy: 91.73 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json @@ -263,7 +263,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 76.07 + Top 1 Accuracy: 76.07 top5 accuracy: 92.42 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json @@ -286,7 +286,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 61.79 + Top 1 Accuracy: 61.79 top5 accuracy: 83.62 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json @@ -309,7 +309,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 65.76 + Top 1 Accuracy: 65.76 top5 accuracy: 86.25 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json @@ -332,7 +332,7 @@ Models: Results: - Dataset: Kinetics-600 Metrics: - top1 accuracy: 77.5 + Top 1 Accuracy: 77.5 top5 accuracy: 93.7 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.json @@ -355,7 +355,7 @@ Models: Results: - Dataset: Kinetics-700 Metrics: - top1 accuracy: 65.0 + Top 1 Accuracy: 65.0 top5 accuracy: 86.1 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.json @@ -378,8 +378,8 @@ Models: Results: - Dataset: GYM99 Metrics: - mean top1 accuracy: 70.2 - top1 accuracy: 79.3 + Top 1 Accuracy: 79.3 + mean Top 1 Accuracy: 70.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log @@ -401,8 +401,8 @@ Models: Results: - Dataset: GYM99 Metrics: - mean top1 accuracy: 71.0 - top1 accuracy: 80.3 + Top 1 Accuracy: 80.3 + mean Top 1 Accuracy: 71.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log @@ -424,7 +424,7 @@ Models: Results: - Dataset: Jester Metrics: - top1 accuracy: 97.2 + Top 1 Accuracy: 97.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log @@ -445,7 +445,7 @@ Models: Results: - Dataset: HMDB51 Metrics: - top1 accuracy: 37.52 + Top 1 Accuracy: 37.52 top5 accuracy: 71.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log.json @@ -467,7 +467,7 @@ Models: Results: - Dataset: HMDB51 Metrics: - top1 accuracy: 65.95 + Top 1 Accuracy: 65.95 top5 accuracy: 91.05 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log.json @@ -489,7 +489,7 @@ Models: Results: - Dataset: UCF101 Metrics: - top1 accuracy: 71.35 + Top 1 Accuracy: 71.35 top5 accuracy: 89.35 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log.json @@ -511,7 +511,7 @@ Models: Results: - Dataset: UCF101 Metrics: - top1 accuracy: 92.78 + Top 1 Accuracy: 92.78 top5 accuracy: 99.42 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json @@ -533,7 +533,7 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 46.63 + Top 1 Accuracy: 46.63 top5 accuracy: 77.19 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log.json diff --git a/configs/recognition/tanet/metafile.yml b/configs/recognition/tanet/metafile.yml index 7f62a3509e..76b5867a14 100644 --- a/configs/recognition/tanet/metafile.yml +++ b/configs/recognition/tanet/metafile.yml @@ -19,7 +19,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 76.28 + Top 1 Accuracy: 76.28 top5 accuracy: 92.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json @@ -42,8 +42,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 49.69 - top1 accuracy (efficient): 47.45 + Top 1 Accuracy: 49.69 + Top 1 Accuracy (efficient): 47.45 top5 accuracy: 77.62 top5 accuracy (efficient): 76.0 Task: Action Recognition @@ -67,8 +67,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 50.41 - top1 accuracy (efficient): 47.73 + Top 1 Accuracy: 50.41 + Top 1 Accuracy (efficient): 47.73 top5 accuracy: 78.47 top5 accuracy (efficient): 77.31 Task: Action Recognition diff --git a/configs/recognition/tin/metafile.yml b/configs/recognition/tin/metafile.yml index ae026f9009..539b8899c1 100644 --- a/configs/recognition/tin/metafile.yml +++ b/configs/recognition/tin/metafile.yml @@ -19,7 +19,7 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 44.25 + Top 1 Accuracy: 44.25 top5 accuracy: 73.94 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log.json @@ -42,7 +42,7 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 accuracy: 56.7 + Top 1 Accuracy: 56.7 top5 accuracy: 83.62 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log.json @@ -65,7 +65,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.89 + Top 1 Accuracy: 70.89 top5 accuracy: 89.89 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json diff --git a/configs/recognition/tpn/metafile.yml b/configs/recognition/tpn/metafile.yml index 8707ab0426..1387c4c5e2 100644 --- a/configs/recognition/tpn/metafile.yml +++ b/configs/recognition/tpn/metafile.yml @@ -19,7 +19,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.1 + Top 1 Accuracy: 73.1 top5 accuracy: 91.03 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log.json @@ -42,7 +42,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 76.2 + Top 1 Accuracy: 76.2 top5 accuracy: 92.44 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log.json @@ -65,7 +65,7 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 50.8 + Top 1 Accuracy: 50.8 top5 accuracy: 79.05 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log.json diff --git a/configs/recognition/trn/metafile.yml b/configs/recognition/trn/metafile.yml index 45f69f2099..1f999f63cf 100644 --- a/configs/recognition/trn/metafile.yml +++ b/configs/recognition/trn/metafile.yml @@ -18,8 +18,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 33.88 - top1 accuracy (efficient): 31.62 + Top 1 Accuracy: 33.88 + Top 1 Accuracy (efficient): 31.62 top5 accuracy: 62.12 top5 accuracy (efficient): 60.01 Task: Action Recognition @@ -42,8 +42,8 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 accuracy: 47.96 - top1 accuracy (efficient): 45.14 + Top 1 Accuracy: 47.96 + Top 1 Accuracy (efficient): 45.14 top5 accuracy: 75.97 top5 accuracy (efficient): 73.21 Task: Action Recognition diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index 5a18a889b1..1f0bc3c81b 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -19,7 +19,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.24 + Top 1 Accuracy: 70.24 top5 accuracy: 89.56 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json @@ -42,7 +42,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.59 + Top 1 Accuracy: 70.59 top5 accuracy: 89.52 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json @@ -65,7 +65,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.73 + Top 1 Accuracy: 70.73 top5 accuracy: 89.81 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log.json @@ -88,7 +88,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 71.90 + Top 1 Accuracy: 71.9 top5 accuracy: 90.03 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log.json @@ -111,7 +111,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.48 + Top 1 Accuracy: 70.48 top5 accuracy: 89.4 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json @@ -134,7 +134,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.25 + Top 1 Accuracy: 70.25 top5 accuracy: 89.66 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json @@ -157,7 +157,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.46 + Top 1 Accuracy: 73.46 top5 accuracy: 90.84 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log.json @@ -180,7 +180,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 74.55 + Top 1 Accuracy: 74.55 top5 accuracy: 91.74 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log.json @@ -203,7 +203,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.09 + Top 1 Accuracy: 72.09 top5 accuracy: 90.37 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json @@ -226,7 +226,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 71.89 + Top 1 Accuracy: 71.89 top5 accuracy: 90.73 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json @@ -249,7 +249,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.80 + Top 1 Accuracy: 72.8 top5 accuracy: 90.75 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20210621_115844.log.json @@ -272,7 +272,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.03 + Top 1 Accuracy: 72.03 top5 accuracy: 90.25 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json @@ -295,7 +295,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.7 + Top 1 Accuracy: 70.7 top5 accuracy: 89.9 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json @@ -318,7 +318,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 71.6 + Top 1 Accuracy: 71.6 top5 accuracy: 90.34 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json @@ -341,7 +341,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 68.46 + Top 1 Accuracy: 68.46 top5 accuracy: 88.64 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json @@ -363,7 +363,7 @@ Models: Results: - Dataset: Diving48 Metrics: - top1 accuracy: 75.99 + Top 1 Accuracy: 75.99 top5 accuracy: 97.16 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json @@ -385,7 +385,7 @@ Models: Results: - Dataset: Diving48 Metrics: - top1 accuracy: 81.62 + Top 1 Accuracy: 81.62 top5 accuracy: 97.66 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json @@ -408,8 +408,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 47.7 - top1 accuracy (efficient): 45.58 + Top 1 Accuracy: 47.7 + Top 1 Accuracy (efficient): 45.58 top5 accuracy: 76.12 top5 accuracy (efficient): 75.02 Task: Action Recognition @@ -435,8 +435,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 48.51 - top1 accuracy (efficient): 47.1 + Top 1 Accuracy: 48.51 + Top 1 Accuracy (efficient): 47.1 top5 accuracy: 77.56 top5 accuracy (efficient): 76.02 Task: Action Recognition @@ -462,8 +462,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 48.9 - top1 accuracy (efficient): 47.16 + Top 1 Accuracy: 48.9 + Top 1 Accuracy (efficient): 47.16 top5 accuracy: 77.92 top5 accuracy (efficient): 76.07 Task: Action Recognition @@ -489,8 +489,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 50.31 - top1 accuracy (efficient): 47.85 + Top 1 Accuracy: 50.31 + Top 1 Accuracy (efficient): 47.85 top5 accuracy: 78.18 top5 accuracy (efficient): 76.78 Task: Action Recognition @@ -516,8 +516,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 49.28 - top1 accuracy (efficient): 47.62 + Top 1 Accuracy: 49.28 + Top 1 Accuracy (efficient): 47.62 top5 accuracy: 77.82 top5 accuracy (efficient): 76.63 Task: Action Recognition @@ -543,8 +543,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 48.43 - top1 accuracy (efficient): 45.72 + Top 1 Accuracy: 48.43 + Top 1 Accuracy (efficient): 45.72 top5 accuracy: 76.72 top5 accuracy (efficient): 74.67 Task: Action Recognition @@ -570,8 +570,8 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 accuracy: 61.12 - top1 accuracy (efficient): 57.86 + Top 1 Accuracy: 61.12 + Top 1 Accuracy (efficient): 57.86 top5 accuracy: 86.26 top5 accuracy (efficient): 84.67 Task: Action Recognition @@ -597,8 +597,8 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 accuracy: 63.84 - top1 accuracy (efficient): 60.79 + Top 1 Accuracy: 63.84 + Top 1 Accuracy (efficient): 60.79 top5 accuracy: 88.3 top5 accuracy (efficient): 86.6 Task: Action Recognition @@ -624,8 +624,8 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 accuracy: 62.04 - top1 accuracy (efficient): 59.93 + Top 1 Accuracy: 62.04 + Top 1 Accuracy (efficient): 59.93 top5 accuracy: 87.35 top5 accuracy (efficient): 86.1 Task: Action Recognition @@ -651,8 +651,8 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 accuracy: 63.19 - top1 accuracy (efficient): 61.06 + Top 1 Accuracy: 63.19 + Top 1 Accuracy (efficient): 61.06 top5 accuracy: 87.93 top5 accuracy (efficient): 86.66 Task: Action Recognition @@ -678,8 +678,8 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 accuracy: 61.51 - top1 accuracy (efficient): 58.59 + Top 1 Accuracy: 61.51 + Top 1 Accuracy (efficient): 58.59 top5 accuracy: 86.9 top5 accuracy (efficient): 85.07 Task: Action Recognition @@ -705,8 +705,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 48.49 - top1 accuracy (efficient): 46.35 + Top 1 Accuracy: 48.49 + Top 1 Accuracy (efficient): 46.35 top5 accuracy: 76.88 top5 accuracy (efficient): 75.07 Task: Action Recognition @@ -732,8 +732,8 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 47.46 - top1 accuracy (efficient): 45.92 + Top 1 Accuracy: 47.46 + Top 1 Accuracy (efficient): 45.92 top5 accuracy: 76.71 top5 accuracy (efficient): 75.23 Task: Action Recognition @@ -759,8 +759,8 @@ Models: Results: - Dataset: Jester Metrics: - top1 accuracy: 97.2 - top1 accuracy (efficient): 96.5 + Top 1 Accuracy: 97.2 + Top 1 Accuracy (efficient): 96.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log @@ -781,7 +781,7 @@ Models: Results: - Dataset: HMDB51 Metrics: - top1 accuracy: 72.68 + Top 1 Accuracy: 72.68 top5 accuracy: 92.03 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log.json @@ -804,7 +804,7 @@ Models: Results: - Dataset: HMDB51 Metrics: - top1 accuracy: 74.77 + Top 1 Accuracy: 74.77 top5 accuracy: 93.86 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log.json @@ -827,7 +827,7 @@ Models: Results: - Dataset: UCF101 Metrics: - top1 accuracy: 94.5 + Top 1 Accuracy: 94.5 top5 accuracy: 99.58 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json @@ -850,7 +850,7 @@ Models: Results: - Dataset: UCF101 Metrics: - top1 accuracy: 94.58 + Top 1 Accuracy: 94.58 top5 accuracy: 99.37 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json diff --git a/configs/recognition/tsn/metafile.yml b/configs/recognition/tsn/metafile.yml index 05a773364f..4a60bd9261 100644 --- a/configs/recognition/tsn/metafile.yml +++ b/configs/recognition/tsn/metafile.yml @@ -18,7 +18,7 @@ Models: Results: - Dataset: UCF101 Metrics: - top1 accuracy: 83.03 + Top 1 Accuracy: 83.03 top5 accuracy: 96.78 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.json @@ -40,7 +40,7 @@ Models: Results: - Dataset: Diving48 Metrics: - top1 accuracy: 71.27 + Top 1 Accuracy: 71.27 top5 accuracy: 95.74 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json @@ -62,7 +62,7 @@ Models: Results: - Dataset: Diving48 Metrics: - top1 accuracy: 76.75 + Top 1 Accuracy: 76.75 top5 accuracy: 96.95 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json @@ -84,7 +84,7 @@ Models: Results: - Dataset: HMDB51 Metrics: - top1 accuracy: 48.95 + Top 1 Accuracy: 48.95 top5 accuracy: 80.19 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log.json @@ -106,7 +106,7 @@ Models: Results: - Dataset: HMDB51 Metrics: - top1 accuracy: 56.08 + Top 1 Accuracy: 56.08 top5 accuracy: 84.31 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log.json @@ -127,7 +127,7 @@ Models: Results: - Dataset: HMDB51 Metrics: - top1 accuracy: 54.25 + Top 1 Accuracy: 54.25 top5 accuracy: 83.86 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log.json @@ -150,7 +150,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.6 + Top 1 Accuracy: 70.6 top5 accuracy: 89.26 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json @@ -173,7 +173,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.42 + Top 1 Accuracy: 70.42 top5 accuracy: 89.03 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json @@ -196,7 +196,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.18 + Top 1 Accuracy: 70.18 top5 accuracy: 89.1 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log.json @@ -219,7 +219,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.91 + Top 1 Accuracy: 70.91 top5 accuracy: 89.51 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json @@ -242,7 +242,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 55.7 + Top 1 Accuracy: 55.7 top5 accuracy: 79.85 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log.json @@ -265,7 +265,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 71.8 + Top 1 Accuracy: 71.8 top5 accuracy: 90.17 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log.json @@ -288,7 +288,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.41 + Top 1 Accuracy: 72.41 top5 accuracy: 90.55 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log.json @@ -311,7 +311,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 57.76 + Top 1 Accuracy: 57.76 top5 accuracy: 80.99 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log.json @@ -334,7 +334,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 71.11 + Top 1 Accuracy: 71.11 top5 accuracy: 90.04 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json @@ -357,7 +357,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.77 + Top 1 Accuracy: 70.77 top5 accuracy: 89.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json @@ -380,7 +380,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 71.79 + Top 1 Accuracy: 71.79 top5 accuracy: 90.25 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json @@ -403,7 +403,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 70.4 + Top 1 Accuracy: 70.4 top5 accuracy: 89.12 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json @@ -426,7 +426,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.43 + Top 1 Accuracy: 73.43 top5 accuracy: 91.01 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json @@ -449,7 +449,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 72.78 + Top 1 Accuracy: 72.78 top5 accuracy: 90.75 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json @@ -471,7 +471,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.6 + Top 1 Accuracy: 73.6 top5 accuracy: 91.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth @@ -491,7 +491,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 73.1 + Top 1 Accuracy: 73.1 top5 accuracy: 90.4 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth @@ -511,7 +511,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 75.7 + Top 1 Accuracy: 75.7 top5 accuracy: 91.9 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth @@ -532,7 +532,7 @@ Models: Results: - Dataset: Kinetics-600 Metrics: - top1 accuracy: 74.8 + Top 1 Accuracy: 74.8 top5 accuracy: 92.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.json @@ -555,7 +555,7 @@ Models: Results: - Dataset: Kinetics-700 Metrics: - top1 accuracy: 61.7 + Top 1 Accuracy: 61.7 top5 accuracy: 83.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.json @@ -578,7 +578,7 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 18.55 + Top 1 Accuracy: 18.55 top5 accuracy: 44.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json @@ -601,7 +601,7 @@ Models: Results: - Dataset: SthV1 Metrics: - top1 accuracy: 15.77 + Top 1 Accuracy: 15.77 top5 accuracy: 39.85 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json @@ -624,7 +624,7 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 accuracy: 32.97 + Top 1 Accuracy: 32.97 top5 accuracy: 63.62 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log.json @@ -647,7 +647,7 @@ Models: Results: - Dataset: SthV2 Metrics: - top1 accuracy: 27.21 + Top 1 Accuracy: 27.21 top5 accuracy: 55.84 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log.json @@ -670,7 +670,7 @@ Models: Results: - Dataset: MiT Metrics: - top1 accuracy: 26.84 + Top 1 Accuracy: 26.84 top5 accuracy: 51.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_f6_mit_26.8_51.6.log.json @@ -715,7 +715,7 @@ Models: Results: - Dataset: ActivityNet v1.3 Metrics: - top1 accuracy: 73.93 + Top 1 Accuracy: 73.93 top5 accuracy: 93.44 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log.json @@ -738,7 +738,7 @@ Models: Results: - Dataset: ActivityNet v1.3 Metrics: - top1 accuracy: 76.9 + Top 1 Accuracy: 76.9 top5 accuracy: 94.47 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log.json @@ -761,7 +761,7 @@ Models: Results: - Dataset: ActivityNet v1.3 Metrics: - top1 accuracy: 57.51 + Top 1 Accuracy: 57.51 top5 accuracy: 83.02 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.json @@ -784,7 +784,7 @@ Models: Results: - Dataset: ActivityNet v1.3 Metrics: - top1 accuracy: 59.51 + Top 1 Accuracy: 59.51 top5 accuracy: 82.69 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.json @@ -943,7 +943,7 @@ Models: Results: - Dataset: Kinetics400 Metrics: - top1 accuracy: 77.51 + Top 1 Accuracy: 77.51 top5 accuracy: 92.92 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json diff --git a/configs/recognition/x3d/metafile.yml b/configs/recognition/x3d/metafile.yml index 0e63bae550..38a9f69eb9 100644 --- a/configs/recognition/x3d/metafile.yml +++ b/configs/recognition/x3d/metafile.yml @@ -16,8 +16,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: + Top 1 Accuracy: 73.2 top1 10-view: 72.7 - top1 accuracy: 73.2 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_s_facebook_13x6x1_kinetics400_rgb_20201027-623825a0.pth reference top1 10-view: 73.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] @@ -36,8 +36,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: + Top 1 Accuracy: 75.6 top1 10-view: 75.0 - top1 accuracy: 75.6 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth reference top1 10-view: 75.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] diff --git a/configs/recognition_audio/resnet/metafile.yml b/configs/recognition_audio/resnet/metafile.yml index 1d7625a9f6..3891bd4b16 100644 --- a/configs/recognition_audio/resnet/metafile.yml +++ b/configs/recognition_audio/resnet/metafile.yml @@ -15,9 +15,9 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - top1 accuracy: 19.7 - top1 accuracy [w. RGB]: 71.5 - top1 accuracy delta [w. RGB]: 0.39 + Top 1 Accuracy: 19.7 + Top 1 Accuracy [w. RGB]: 71.5 + Top 1 Accuracy delta [w. RGB]: 0.39 top5 accuracy: 35.75 top5 accuracy [w. RGB]: 90.18 top5 accuracy delta [w. RGB]: 0.14 diff --git a/configs/skeleton/posec3d/metafile.yml b/configs/skeleton/posec3d/metafile.yml index d2c784d00b..a9b87a714d 100644 --- a/configs/skeleton/posec3d/metafile.yml +++ b/configs/skeleton/posec3d/metafile.yml @@ -16,7 +16,7 @@ Models: Results: - Dataset: FineGYM Metrics: - mean top1 accuracy: 93.7 + mean Top 1 Accuracy: 93.7 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log @@ -35,7 +35,7 @@ Models: Results: - Dataset: FineGYM Metrics: - mean top1 accuracy: 94.0 + mean Top 1 Accuracy: 94.0 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log @@ -54,7 +54,7 @@ Models: Results: - Dataset: NTU60-XSub Metrics: - top1 accuracy: 93.7 + Top 1 Accuracy: 93.7 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.log @@ -73,7 +73,7 @@ Models: Results: - Dataset: NTU60-XSub Metrics: - top1 accuracy: 93.4 + Top 1 Accuracy: 93.4 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log @@ -92,7 +92,7 @@ Models: Results: - Dataset: NTU120-XSub Metrics: - top1 accuracy: 86.3 + Top 1 Accuracy: 86.3 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.log @@ -111,7 +111,7 @@ Models: Results: - Dataset: NTU120-XSub Metrics: - top1 accuracy: 85.7 + Top 1 Accuracy: 85.7 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log From e88e0354f24165330dbaa3fd83245b97922f359e Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Fri, 2 Jul 2021 11:26:12 +0800 Subject: [PATCH 187/414] Update MANIFEST.in --- MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index a452def31a..3dbdf42ba6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,3 @@ -include mmaction/model_zoo.yml +include mmaction/model-index.yml recursive-include mmaction/configs *.py *.yml recursive-include mmaction/tools *.sh *.py From bef4bd65a008cf5dabe12e39ae7a91c405d7d914 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Sat, 3 Jul 2021 21:41:44 +0800 Subject: [PATCH 188/414] [Fix] Fix API doc (#992) --- .readthedocs.yml | 1 + docs/conf.py | 2 +- docs_zh_CN/conf.py | 2 +- requirements/docs.txt | 1 + 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 73ea4cb7e9..2c98050982 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -5,3 +5,4 @@ python: install: - requirements: requirements/docs.txt - requirements: requirements/readthedocs.txt + - requirements: requirements/mminstall.txt diff --git a/docs/conf.py b/docs/conf.py index 57e8566fad..00802c1661 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -44,7 +44,7 @@ def get_version(): ] # numpy and torch are required -autodoc_mock_imports = ['mmaction.version', 'cv2', 'PIL'] +autodoc_mock_imports = ['mmaction.version', 'PIL'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index 3b098735e2..40f17d03ef 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -44,7 +44,7 @@ def get_version(): ] # numpy and torch are required -autodoc_mock_imports = ['mmaction.version', 'cv2', 'PIL'] +autodoc_mock_imports = ['mmaction.version', 'PIL'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] diff --git a/requirements/docs.txt b/requirements/docs.txt index 89fbf86c01..10d4b1ffc9 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,3 +1,4 @@ +opencv-python recommonmark sphinx sphinx_markdown_tables From 3c61dcd09c024e226e996547d1c351b80bcf4706 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Sat, 3 Jul 2021 21:55:59 +0800 Subject: [PATCH 189/414] Update docs.txt --- requirements/docs.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/requirements/docs.txt b/requirements/docs.txt index 10d4b1ffc9..a87770c3ba 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,3 +1,5 @@ +einops +scipy opencv-python recommonmark sphinx From 4460ef36ed940a820362038dfa4615289de93630 Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Mon, 5 Jul 2021 15:26:06 +0800 Subject: [PATCH 190/414] Update metafile.yml --- configs/recognition/i3d/metafile.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/configs/recognition/i3d/metafile.yml b/configs/recognition/i3d/metafile.yml index 9e210b8b6b..02f5704cd7 100644 --- a/configs/recognition/i3d/metafile.yml +++ b/configs/recognition/i3d/metafile.yml @@ -3,7 +3,7 @@ Collections: README: configs/recognition/i3d/README.md Models: - Config: configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py - In Collection: CSN + In Collection: I3D Metadata: Architecture: ResNet50 Batch Size: 8 @@ -26,7 +26,7 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth - Config: configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py - In Collection: CSN + In Collection: I3D Metadata: Architecture: ResNet50 Batch Size: 8 @@ -49,7 +49,7 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth - Config: configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py - In Collection: CSN + In Collection: I3D Metadata: Architecture: ResNet50 Batch Size: 8 @@ -72,7 +72,7 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth - Config: configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py - In Collection: CSN + In Collection: I3D Metadata: Architecture: ResNet50 Batch Size: 8 @@ -95,7 +95,7 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_32x2x1_100e_kinetics400_rgb_20200616-2bbb4361.pth - Config: configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py - In Collection: CSN + In Collection: I3D Metadata: Architecture: ResNet50 Batch Size: 8 @@ -118,7 +118,7 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb_20200725-24eb54cc.pth - Config: configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py - In Collection: CSN + In Collection: I3D Metadata: Architecture: ResNet50 Batch Size: 8 @@ -141,7 +141,7 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_32x2x1_100e_kinetics400_rgb_20200612-000e4d2a.pth - Config: configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py - In Collection: CSN + In Collection: I3D Metadata: Architecture: ResNet50 Batch Size: 8 @@ -164,7 +164,7 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb_20200817-4e90d1d5.pth - Config: configs/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb.py - In Collection: CSN + In Collection: I3D Metadata: Architecture: ResNet50 Batch Size: 8 @@ -187,7 +187,7 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200813-6e6aef1b.pth - Config: configs/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb.py - In Collection: CSN + In Collection: I3D Metadata: Architecture: ResNet50 Batch Size: 8 @@ -210,7 +210,7 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log Weights: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200815-17f84aa2.pth - Config: configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py - In Collection: CSN + In Collection: I3D Metadata: Architecture: ResNet50 Batch Size: 8 From b38e9ad97f8b30bc854c684f9c87fc457917826d Mon Sep 17 00:00:00 2001 From: Wang Xiao <31362395+SCZwangxiao@users.noreply.github.com> Date: Tue, 6 Jul 2021 12:25:36 +0800 Subject: [PATCH 191/414] [Fix] Fix test best failure due to eval_hook wrong assign (#991) --- mmaction/apis/train.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index aa3450c939..3254aa5452 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -150,8 +150,9 @@ def train_model(model, dataloader_setting = dict(dataloader_setting, **cfg.data.get('val_dataloader', {})) val_dataloader = build_dataloader(val_dataset, **dataloader_setting) - eval_hook = DistEvalHook if distributed else EvalHook - runner.register_hook(eval_hook(val_dataloader, **eval_cfg)) + eval_hook = DistEvalHook(val_dataloader, **eval_cfg) if distributed \ + else EvalHook(val_dataloader, **eval_cfg) + runner.register_hook(eval_hook) if cfg.resume_from: runner.resume(cfg.resume_from) From c6b75d02ce6079dd9072c18c906d8db2537bb4be Mon Sep 17 00:00:00 2001 From: Haodong Duan <34324155+kennymckormick@users.noreply.github.com> Date: Tue, 6 Jul 2021 15:08:40 +0800 Subject: [PATCH 192/414] [Improvement] Remove Flips (#996) * remove_flops * fix lint --- configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py | 1 - configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py | 1 - configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py | 2 -- .../i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py | 2 -- configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py | 2 -- .../i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py | 2 -- .../i3d/i3d_r50_heavy_8x8x1_100e_kinetics400_rgb.py | 2 -- .../i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py | 1 - .../i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py | 2 -- .../i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py | 2 -- .../i3d/i3d_r50_video_imgaug_32x2x1_100e_kinetics400_rgb.py | 2 -- .../r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py | 2 -- .../r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py | 2 -- .../r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py | 2 -- .../slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py | 2 -- .../slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py | 2 -- .../slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py | 2 -- .../slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py | 2 -- .../slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py | 2 -- ...y_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py | 2 -- ...y_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py | 2 -- ...50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py | 2 -- .../slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py | 2 -- ...only_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py | 2 -- ...wonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py | 2 -- .../slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py | 2 -- ...nly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py | 2 -- ...only_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py | 2 -- .../slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py | 2 -- .../slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py | 2 -- .../slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py | 2 -- .../slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py | 2 -- .../slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py | 2 -- .../slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py | 2 -- .../slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py | 2 -- .../tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py | 1 - .../tsm_mobilenetv2_video_dense_1x1x8_100e_kinetics400_rgb.py | 2 -- ...lenetv2_video_inference_dense_1x1x8_100e_kinetics400_rgb.py | 1 - .../tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py | 2 -- .../tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py | 2 -- .../tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py | 2 -- configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py | 2 -- configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py | 2 -- .../tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py | 2 -- .../tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py | 2 -- .../recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py | 2 -- .../tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py | 2 -- .../tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py | 2 -- ...n_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py | 2 -- .../tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py | 2 -- .../tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py | 2 -- ...sn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py | 2 -- ...sn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py | 2 -- ...sn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py | 2 -- ...r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py | 2 -- .../tsn_r50_test_256p_1x1x25_10crop_100e_kinetics400_rgb.py | 1 - .../tsn_r50_test_256p_1x1x25_3crop_100e_kinetics400_rgb.py | 1 - .../tsn_r50_test_320p_1x1x25_10crop_100e_kinetics400_rgb.py | 1 - .../tsn_r50_test_320p_1x1x25_3crop_100e_kinetics400_rgb.py | 1 - .../tsn_r50_test_340x256_1x1x25_10crop_100e_kinetics400_rgb.py | 1 - .../tsn_r50_test_340x256_1x1x25_3crop_100e_kinetics400_rgb.py | 1 - .../recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py | 2 -- .../tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py | 2 -- .../recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py | 2 -- .../recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py | 2 -- .../recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py | 2 -- .../recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py | 2 -- .../recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py | 2 -- configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py | 2 -- configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py | 1 - configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py | 2 -- configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py | 2 -- configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py | 2 -- .../recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py | 2 -- .../tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py | 2 -- configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py | 2 -- configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py | 2 -- configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py | 3 --- .../recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py | 2 -- .../tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py | 2 -- .../recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py | 2 -- .../tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py | 2 -- .../tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py | 2 -- .../tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py | 2 -- .../tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py | 2 -- .../tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py | 2 -- .../tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py | 2 -- .../tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py | 2 -- .../tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py | 1 - .../tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py | 2 -- .../tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py | 2 -- .../tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py | 2 -- .../tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py | 2 -- .../tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py | 2 -- .../tsn/tsn_r50_video_imgaug_1x1x8_100e_kinetics400_rgb.py | 2 -- .../tsn/tsn_r50_video_mixup_1x1x8_100e_kinetics400_rgb.py | 2 -- docs/tutorials/1_config.md | 2 -- docs/tutorials/4_data_pipeline.md | 2 -- docs_zh_CN/tutorials/1_config.md | 2 -- docs_zh_CN/tutorials/4_data_pipeline.md | 2 -- requirements/docs.txt | 2 +- 101 files changed, 1 insertion(+), 189 deletions(-) diff --git a/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py b/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py index cca88f1999..b9ed3979eb 100644 --- a/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py +++ b/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py @@ -62,7 +62,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(340, 256), keep_ratio=True), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict( diff --git a/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py b/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py index 435ac635b3..75d927a76f 100644 --- a/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py +++ b/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py @@ -65,7 +65,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(340, 256), keep_ratio=True), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NPTCHW'), dict( diff --git a/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py b/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py index 8b90a0f972..cd96fca866 100644 --- a/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py +++ b/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py @@ -30,7 +30,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(128, 171)), dict(type='CenterCrop', crop_size=112), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -46,7 +45,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(128, 171)), dict(type='CenterCrop', crop_size=112), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py index 39e269c419..8ff1e2ff1e 100644 --- a/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py @@ -49,7 +49,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -65,7 +64,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py index 65076c9d05..894e8196be 100644 --- a/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py @@ -39,7 +39,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py index a4f19cdfce..f1bdc4f4a1 100644 --- a/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py @@ -36,7 +36,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -52,7 +51,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/i3d/i3d_r50_heavy_8x8x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_heavy_8x8x1_100e_kinetics400_rgb.py index 3895cdd4f6..b4688d4c13 100644 --- a/configs/recognition/i3d/i3d_r50_heavy_8x8x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_heavy_8x8x1_100e_kinetics400_rgb.py @@ -44,7 +44,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -60,7 +59,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py index 890af437d9..eb285c89e9 100644 --- a/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py @@ -55,7 +55,6 @@ dict(type='RawFrameDecode', decoding_backend='turbojpeg'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py index f10dfeea7a..968d6c9e77 100644 --- a/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py @@ -38,7 +38,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py index 7391ca9d80..2ee3ff7b28 100644 --- a/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py @@ -38,7 +38,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/i3d/i3d_r50_video_imgaug_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_video_imgaug_32x2x1_100e_kinetics400_rgb.py index dfda8749e8..68b1bc9971 100644 --- a/configs/recognition/i3d/i3d_r50_video_imgaug_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_video_imgaug_32x2x1_100e_kinetics400_rgb.py @@ -45,7 +45,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -62,7 +61,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py index c8e360cb39..fc5514a9be 100644 --- a/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py +++ b/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py @@ -31,7 +31,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -47,7 +46,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py b/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py index dfdb3a6b62..a4c25d7f69 100644 --- a/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py +++ b/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py @@ -33,7 +33,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -49,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py b/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py index dd8112cddf..c3744dcf46 100644 --- a/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py +++ b/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py @@ -36,7 +36,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -53,7 +52,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py index 79259e419c..e484296692 100644 --- a/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py @@ -66,7 +66,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -82,7 +81,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py index a74dc94f26..dc2f1b898a 100644 --- a/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py @@ -65,7 +65,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -81,7 +80,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py index a581bae143..06a9792ddd 100644 --- a/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py @@ -65,7 +65,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -81,7 +80,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py index 26c8da627c..a0de3fe8ca 100644 --- a/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py @@ -32,7 +32,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -48,7 +47,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py index b4d4ed63db..57108548d4 100644 --- a/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py @@ -40,7 +40,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -57,7 +56,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py index 5fbea21781..2624e00be6 100644 --- a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py @@ -49,7 +49,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -65,7 +64,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py index 521357178e..7aea6956cb 100644 --- a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py @@ -48,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -64,7 +63,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py index d326e9cccb..638324ae81 100644 --- a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py @@ -48,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -64,7 +63,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py index 88c2bf45e8..8bfcd77d39 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py @@ -33,7 +33,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -49,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py index 3565cb8821..5ed60a91eb 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py @@ -33,7 +33,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -49,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py index afd7326ff4..9a09622804 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py @@ -33,7 +33,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -49,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py index d6931558af..8ad75bace6 100644 --- a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py +++ b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py @@ -35,7 +35,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -51,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py index 830c30de95..e5e33a126d 100644 --- a/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py @@ -46,7 +46,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -62,7 +61,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py index 008b2cf5d0..8331fdac8f 100644 --- a/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py @@ -46,7 +46,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -62,7 +61,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py b/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py index d7aeb2774b..04c0a25ca8 100644 --- a/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py +++ b/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py @@ -35,7 +35,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -51,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py index c8ffe60eb8..2fbab6150d 100644 --- a/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py @@ -36,7 +36,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -52,7 +51,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py b/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py index 2eb20a0084..5d55e3386e 100644 --- a/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py +++ b/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py @@ -35,7 +35,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -51,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py index 087d0e09c4..5764678c84 100644 --- a/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py @@ -36,7 +36,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -52,7 +51,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py index 86c948df93..3a89d35785 100644 --- a/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py @@ -38,7 +38,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py b/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py index 8077819a1d..311216fd9c 100644 --- a/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py +++ b/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py @@ -36,7 +36,6 @@ test_mode=True), dict(type='DecordDecode'), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -52,7 +51,6 @@ test_mode=True), dict(type='DecordDecode'), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py b/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py index e6052656e1..79edfb6db2 100644 --- a/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py +++ b/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py @@ -35,7 +35,6 @@ test_mode=True), dict(type='DecordDecode'), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -51,7 +50,6 @@ test_mode=True), dict(type='DecordDecode'), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py index b41f018eec..9277fe9ac8 100644 --- a/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py @@ -42,7 +42,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsm/tsm_mobilenetv2_video_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_mobilenetv2_video_dense_1x1x8_100e_kinetics400_rgb.py index e0a3c4873b..b62990f5b9 100644 --- a/configs/recognition/tsm/tsm_mobilenetv2_video_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_mobilenetv2_video_dense_1x1x8_100e_kinetics400_rgb.py @@ -43,7 +43,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -60,7 +59,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsm/tsm_mobilenetv2_video_inference_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_mobilenetv2_video_inference_dense_1x1x8_100e_kinetics400_rgb.py index a66c772a9b..71b9ed8f52 100644 --- a/configs/recognition/tsm/tsm_mobilenetv2_video_inference_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_mobilenetv2_video_inference_dense_1x1x8_100e_kinetics400_rgb.py @@ -17,7 +17,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py index a85104da17..f04cba0e46 100644 --- a/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py @@ -50,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -66,7 +65,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py index c501dcd6d0..f7d7360c0f 100644 --- a/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py @@ -50,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -66,7 +65,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py index 8713150fda..5770e50c92 100644 --- a/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py @@ -50,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -66,7 +65,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py index c53bee6c09..cf067d6728 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py @@ -43,7 +43,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -59,7 +58,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py index ec8ef9678e..2e0a0520ac 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py @@ -40,7 +40,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -56,7 +55,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py index 5f81caa280..96c050633c 100644 --- a/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py @@ -40,7 +40,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -56,7 +55,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py index 803d793223..9b600feda8 100644 --- a/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py @@ -48,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) @@ -63,7 +62,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) diff --git a/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py index 0a172d0205..9cc6fc34fc 100644 --- a/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py @@ -42,7 +42,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -59,7 +58,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py index f020987996..41f1257364 100644 --- a/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py @@ -50,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -66,7 +65,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py index 9b7674f619..e0d249f63f 100644 --- a/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py @@ -57,7 +57,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -73,7 +72,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py index 815ef38d0f..296aa194b1 100644 --- a/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py @@ -51,7 +51,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -68,7 +67,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py index a6ba9671e1..10e74c3791 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py @@ -40,7 +40,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -56,7 +55,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py index 801e54aef3..ab6a31d04d 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py @@ -40,7 +40,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -56,7 +55,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py index b0c75385ff..6ab7806e35 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py @@ -40,7 +40,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -56,7 +55,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py index cd54379cb0..061cc6db3d 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py @@ -35,7 +35,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -51,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py index e439955240..957dd20165 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py @@ -35,7 +35,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -51,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py index 89fa11011a..02dccc7f3d 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py @@ -35,7 +35,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -51,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_10crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_10crop_100e_kinetics400_rgb.py index 90b73fafa1..378572d72f 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_10crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_10crop_100e_kinetics400_rgb.py @@ -16,7 +16,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_3crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_3crop_100e_kinetics400_rgb.py index df17b6c46d..4f9f39073c 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_3crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_3crop_100e_kinetics400_rgb.py @@ -16,7 +16,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_10crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_10crop_100e_kinetics400_rgb.py index f23941648c..fa84d042d5 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_10crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_10crop_100e_kinetics400_rgb.py @@ -16,7 +16,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_3crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_3crop_100e_kinetics400_rgb.py index b7a8986ae0..8f37ff246c 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_3crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_3crop_100e_kinetics400_rgb.py @@ -16,7 +16,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_10crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_10crop_100e_kinetics400_rgb.py index 0327c2c81d..eb38dc9f29 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_10crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_10crop_100e_kinetics400_rgb.py @@ -16,7 +16,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_3crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_3crop_100e_kinetics400_rgb.py index 3ff56bfb71..95584c8ef6 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_3crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_3crop_100e_kinetics400_rgb.py @@ -16,7 +16,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py index 4d6c02dbbe..067063dfd6 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py @@ -48,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -64,7 +63,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py index dead102b3c..2e8369a79e 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py @@ -48,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -64,7 +63,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py index 66fa7b6743..4f33d7706b 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py @@ -48,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -64,7 +63,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py index 867d548161..be910fa5f4 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py @@ -48,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -64,7 +63,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py index 66fa7b6743..4f33d7706b 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py @@ -48,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -64,7 +63,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py index b571775b39..fd088f8f82 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py @@ -48,7 +48,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -64,7 +63,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py index 2f7738a5c0..ca96c1b6d2 100644 --- a/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py @@ -39,7 +39,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py b/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py index 8ec42d707a..f6a938a938 100644 --- a/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py +++ b/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py @@ -61,7 +61,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -77,7 +76,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='MultiGroupCrop', crop_size=256, groups=1), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py index 7d2f8aebee..410dbe6b1b 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py @@ -23,7 +23,6 @@ random_crop=False, max_wh_scale_gap=1), dict(type='Resize', scale=(224, 224), keep_ratio=False), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py index f2378c7b6e..3705de2a2c 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py @@ -39,7 +39,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py index 816bac874f..0c98df7039 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py @@ -35,7 +35,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -51,7 +50,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py index b3c6cb75e5..de706a4278 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py @@ -42,7 +42,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -58,7 +57,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py index 3bf0c10e08..9b92b5cfad 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py @@ -39,7 +39,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py index b23a39e8f9..93588f034f 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py @@ -39,7 +39,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py index c01a744ad3..51fb545379 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py @@ -39,7 +39,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py index df0dc367f9..54bfb8fb59 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py @@ -45,7 +45,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -61,7 +60,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py index 8c6e98e0cc..b9035e12b8 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py @@ -26,7 +26,6 @@ random_crop=False, max_wh_scale_gap=1), dict(type='Resize', scale=(224, 224), keep_ratio=False), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -37,7 +36,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -48,7 +46,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py index b33881c57e..5ca6bf89d8 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py @@ -31,7 +31,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -47,7 +46,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py index ac2a41f5c6..00b18daf9b 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py @@ -34,7 +34,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW_Flow'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -50,7 +49,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW_Flow'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py index d9a9c32896..b3b341baa9 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py @@ -34,7 +34,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -50,7 +49,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py index 24b84659ed..710416878a 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py @@ -34,7 +34,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW_Flow'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -50,7 +49,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW_Flow'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py index 9dd380e517..ffe4bce362 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py @@ -36,7 +36,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW_Flow'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -52,7 +51,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW_Flow'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py index 823cf2d39c..3d4834947d 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py @@ -36,7 +36,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW_Flow'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -52,7 +51,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW_Flow'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py index 21405e77b1..c45ea446e0 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py @@ -36,7 +36,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -52,7 +51,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py index 26908f1c0c..01e0b6ea89 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py @@ -36,7 +36,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -52,7 +51,6 @@ dict(type='FrameSelector'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py index 1277a7870c..fa462447fe 100644 --- a/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py @@ -43,7 +43,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -59,7 +58,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py index b32f73071f..a177a0035f 100644 --- a/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py @@ -40,7 +40,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -56,7 +55,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py index 00b764a5bc..2a9594e27c 100644 --- a/configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py @@ -14,7 +14,6 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py index c0f464671f..bec1b85f13 100644 --- a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py @@ -40,7 +40,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -57,7 +56,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py index 97ac75f0e5..80d54e2a54 100644 --- a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py @@ -38,7 +38,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py index 7bd12e3bb1..00e47c6431 100644 --- a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py @@ -38,7 +38,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -55,7 +54,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py index 70d46ce44c..862d287899 100644 --- a/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py @@ -35,7 +35,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -52,7 +51,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py index 0de50d51a6..824df78dc6 100644 --- a/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py @@ -40,7 +40,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -57,7 +56,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_video_imgaug_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_imgaug_1x1x8_100e_kinetics400_rgb.py index 04b1901df3..fab95ceb14 100644 --- a/configs/recognition/tsn/tsn_r50_video_imgaug_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_imgaug_1x1x8_100e_kinetics400_rgb.py @@ -61,7 +61,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -78,7 +77,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_video_mixup_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_mixup_1x1x8_100e_kinetics400_rgb.py index 6df826a9e1..bef969ad45 100644 --- a/configs/recognition/tsn/tsn_r50_video_mixup_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_mixup_1x1x8_100e_kinetics400_rgb.py @@ -60,7 +60,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -77,7 +76,6 @@ dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/docs/tutorials/1_config.md b/docs/tutorials/1_config.md index af4e27139e..0133ca9ce9 100644 --- a/docs/tutorials/1_config.md +++ b/docs/tutorials/1_config.md @@ -715,7 +715,6 @@ val_pipeline = [ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -731,7 +730,6 @@ test_pipeline = [ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/docs/tutorials/4_data_pipeline.md b/docs/tutorials/4_data_pipeline.md index 463046929b..af143301e5 100644 --- a/docs/tutorials/4_data_pipeline.md +++ b/docs/tutorials/4_data_pipeline.md @@ -63,7 +63,6 @@ val_pipeline = [ dict(type='RawFrameDecode', io_backend='disk'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -79,7 +78,6 @@ test_pipeline = [ dict(type='RawFrameDecode', io_backend='disk'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/docs_zh_CN/tutorials/1_config.md b/docs_zh_CN/tutorials/1_config.md index 22c1489687..a59d765786 100644 --- a/docs_zh_CN/tutorials/1_config.md +++ b/docs_zh_CN/tutorials/1_config.md @@ -706,7 +706,6 @@ val_pipeline = [ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -722,7 +721,6 @@ test_pipeline = [ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/docs_zh_CN/tutorials/4_data_pipeline.md b/docs_zh_CN/tutorials/4_data_pipeline.md index 21e97ee6b7..611d1143c7 100644 --- a/docs_zh_CN/tutorials/4_data_pipeline.md +++ b/docs_zh_CN/tutorials/4_data_pipeline.md @@ -60,7 +60,6 @@ val_pipeline = [ dict(type='RawFrameDecode', io_backend='disk'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -76,7 +75,6 @@ test_pipeline = [ dict(type='RawFrameDecode', io_backend='disk'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), - dict(type='Flip', flip_ratio=0), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/requirements/docs.txt b/requirements/docs.txt index a87770c3ba..1e973b21e9 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,7 @@ einops -scipy opencv-python recommonmark +scipy sphinx sphinx_markdown_tables sphinx_rtd_theme From 36f9dcd45cca135b5414c90c697056877e1aee5f Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 6 Jul 2021 22:49:00 +0800 Subject: [PATCH 193/414] [Feature] Support persistent_workers in dataloader (#998) * fix LooseVersion * fix LooseVersion 0706 * 0706 --- .pre-commit-config.yaml | 2 +- mmaction/datasets/builder.py | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8310e7efd3..702fec4d1b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ exclude: ^tests/data/ repos: - - repo: https://gitlab.com/pycqa/flake8 + - repo: https://gitlab.com/pycqa/flake8.git rev: 3.8.3 hooks: - id: flake8 diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index 267c323b0f..1902b907bd 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -1,8 +1,10 @@ import platform import random +from distutils.version import LooseVersion from functools import partial import numpy as np +import torch from mmcv.parallel import collate from mmcv.runner import get_dist_info from mmcv.utils import Registry, build_from_cfg @@ -52,6 +54,7 @@ def build_dataloader(dataset, seed=None, drop_last=False, pin_memory=True, + persistent_workers=True, **kwargs): """Build PyTorch DataLoader. @@ -74,6 +77,11 @@ def build_dataloader(dataset, Default: False pin_memory (bool): Whether to use pin_memory in DataLoader. Default: True + persistent_workers (bool): If True, the data loader will not shutdown + the worker processes after a dataset has been consumed once. + This allows to maintain the workers Dataset instances alive. + The argument also has effect in PyTorch>=1.7.0. + Default: True kwargs (dict, optional): Any keyword argument to be used to initialize DataLoader. @@ -108,6 +116,9 @@ def build_dataloader(dataset, worker_init_fn, num_workers=num_workers, rank=rank, seed=seed) if seed is not None else None + if LooseVersion(torch.__version__) >= LooseVersion('1.7.0'): + kwargs['persistent_workers'] = persistent_workers + data_loader = DataLoader( dataset, batch_size=batch_size, From e46018305ac5f49db3a9aff21350648d4e151ef1 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 7 Jul 2021 16:00:36 +0800 Subject: [PATCH 194/414] [Feature] Support ConcatDataset (#1000) * support ConcatDataset * fix unittest * fix bug --- mmaction/datasets/__init__.py | 4 +- mmaction/datasets/builder.py | 7 +-- mmaction/datasets/dataset_wrappers.py | 48 +++++++++++++++++-- .../test_datasets/test_concat_dataset.py | 33 +++++++++++++ .../test_datasets/test_repeat_dataset.py | 13 +++-- 5 files changed, 88 insertions(+), 17 deletions(-) create mode 100644 tests/test_data/test_datasets/test_concat_dataset.py diff --git a/mmaction/datasets/__init__.py b/mmaction/datasets/__init__.py index c4b10e53e5..ecd1be57c0 100644 --- a/mmaction/datasets/__init__.py +++ b/mmaction/datasets/__init__.py @@ -8,7 +8,7 @@ MixupBlending) from .builder import (BLENDINGS, DATASETS, PIPELINES, build_dataloader, build_dataset) -from .dataset_wrappers import RepeatDataset +from .dataset_wrappers import ConcatDataset, RepeatDataset from .hvu_dataset import HVUDataset from .image_dataset import ImageDataset from .pose_dataset import PoseDataset @@ -23,5 +23,5 @@ 'HVUDataset', 'AudioDataset', 'AudioFeatureDataset', 'ImageDataset', 'RawVideoDataset', 'AVADataset', 'AudioVisualDataset', 'BaseMiniBatchBlending', 'CutmixBlending', 'MixupBlending', 'DATASETS', - 'PIPELINES', 'BLENDINGS', 'PoseDataset' + 'PIPELINES', 'BLENDINGS', 'PoseDataset', 'ConcatDataset' ] diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index 1902b907bd..20d7bf728f 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -36,12 +36,7 @@ def build_dataset(cfg, default_args=None): Returns: Dataset: The constructed dataset. """ - if cfg['type'] == 'RepeatDataset': - from .dataset_wrappers import RepeatDataset - dataset = RepeatDataset( - build_dataset(cfg['dataset'], default_args), cfg['times']) - else: - dataset = build_from_cfg(cfg, DATASETS, default_args) + dataset = build_from_cfg(cfg, DATASETS, default_args) return dataset diff --git a/mmaction/datasets/dataset_wrappers.py b/mmaction/datasets/dataset_wrappers.py index ecb7609b24..dd00b9f32d 100644 --- a/mmaction/datasets/dataset_wrappers.py +++ b/mmaction/datasets/dataset_wrappers.py @@ -1,4 +1,6 @@ -from .builder import DATASETS +import numpy as np + +from .builder import DATASETS, build_dataset @DATASETS.register_module() @@ -11,12 +13,15 @@ class RepeatDataset: epochs. Args: - dataset (:obj:`Dataset`): The dataset to be repeated. + dataset (dict): The config of the dataset to be repeated. times (int): Repeat times. + test_mode (bool): Store True when building test or validation dataset. + Default: False. """ - def __init__(self, dataset, times): - self.dataset = dataset + def __init__(self, dataset, times, test_mode=False): + dataset['test_mode'] = test_mode + self.dataset = build_dataset(dataset) self.times = times self._ori_len = len(self.dataset) @@ -28,3 +33,38 @@ def __getitem__(self, idx): def __len__(self): """Length after repetition.""" return self.times * self._ori_len + + +@DATASETS.register_module() +class ConcatDataset: + """A wrapper of concatenated dataset. + + The length of concatenated dataset will be the sum of lengths of all + datasets. This is useful when you want to train a model with multiple data + sources. + + Args: + datasets (list[dict]): The configs of the datasets. + test_mode (bool): Store True when building test or validation dataset. + Default: False. + """ + + def __init__(self, datasets, test_mode=False): + + for item in datasets: + item['test_mode'] = test_mode + + datasets = [build_dataset(cfg) for cfg in datasets] + self.datasets = datasets + self.lens = [len(x) for x in self.datasets] + self.cumsum = np.cumsum(self.lens) + + def __getitem__(self, idx): + """Get data.""" + dataset_idx = np.searchsorted(self.cumsum, idx, side='right') + item_idx = idx if dataset_idx == 0 else idx - self.cumsum[dataset_idx] + return self.datasets[dataset_idx][item_idx] + + def __len__(self): + """Length after repetition.""" + return sum(self.lens) diff --git a/tests/test_data/test_datasets/test_concat_dataset.py b/tests/test_data/test_datasets/test_concat_dataset.py new file mode 100644 index 0000000000..062037eb13 --- /dev/null +++ b/tests/test_data/test_datasets/test_concat_dataset.py @@ -0,0 +1,33 @@ +import numpy as np + +from mmaction.datasets import ConcatDataset +from .base import BaseTestDataset + + +class TestConcatDataset(BaseTestDataset): + + def test_concat_dataset(self): + dataset_cfg = dict( + type='RawframeDataset', + ann_file=self.frame_ann_file, + pipeline=self.frame_pipeline, + data_prefix=self.data_prefix) + repeat_dataset_cfg = dict( + type='RepeatDataset', times=2, dataset=dataset_cfg) + + concat_dataset = ConcatDataset( + datasets=[dataset_cfg, repeat_dataset_cfg]) + + assert len(concat_dataset) == 6 + result_a = concat_dataset[0] + result_b = concat_dataset[4] + assert set(result_a) == set(result_b) + for key in result_a: + if isinstance(result_a[key], np.ndarray): + assert np.equal(result_a[key], result_b[key]).all() + elif isinstance(result_a[key], list): + assert all( + np.array_equal(a, b) + for (a, b) in zip(result_a[key], result_b[key])) + else: + assert result_a[key] == result_b[key] diff --git a/tests/test_data/test_datasets/test_repeat_dataset.py b/tests/test_data/test_datasets/test_repeat_dataset.py index 9729b7af7a..5aa42ec649 100644 --- a/tests/test_data/test_datasets/test_repeat_dataset.py +++ b/tests/test_data/test_datasets/test_repeat_dataset.py @@ -1,16 +1,19 @@ import numpy as np -from mmaction.datasets import RawframeDataset, RepeatDataset +from mmaction.datasets import RepeatDataset from .base import BaseTestDataset class TestRepeatDataset(BaseTestDataset): def test_repeat_dataset(self): - rawframe_dataset = RawframeDataset(self.frame_ann_file, - self.frame_pipeline, - self.data_prefix) - repeat_dataset = RepeatDataset(rawframe_dataset, 5) + dataset_cfg = dict( + type='RawframeDataset', + ann_file=self.frame_ann_file, + pipeline=self.frame_pipeline, + data_prefix=self.data_prefix) + + repeat_dataset = RepeatDataset(dataset_cfg, 5) assert len(repeat_dataset) == 10 result_a = repeat_dataset[0] result_b = repeat_dataset[2] From 5f11ef2bcc84eee03e0f52bcdececb62c93fda54 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sun, 11 Jul 2021 19:57:41 +0800 Subject: [PATCH 195/414] [Fix] Remove frame selector (#1010) * remove frame selector * fix --- ...frozen_r152_32x2x1_180e_kinetics400_rgb.py | 6 ++--- ...frozen_r152_32x2x1_180e_kinetics400_rgb.py | 6 ++--- ...nfrozen_r50_32x2x1_180e_kinetics400_rgb.py | 6 ++--- ...nfrozen_r152_32x2x1_58e_kinetics400_rgb.py | 6 ++--- ...bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py | 6 ++--- ...trained_r152_32x2x1_58e_kinetics400_rgb.py | 6 ++--- .../tin/tin_r50_1x1x8_40e_sthv1_rgb.py | 6 ++--- ...ed_slowonly_r50_8x8x1_150e_kinetics_rgb.py | 6 ++--- .../tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py | 6 ++--- ...tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py | 6 ++--- ...0_320p_1x1x8_150e_activitynet_clip_flow.py | 6 ++--- ..._320p_1x1x8_150e_activitynet_video_flow.py | 6 ++--- ...r50_320p_1x1x8_50e_activitynet_clip_rgb.py | 6 ++--- ...50_320p_1x1x8_50e_activitynet_video_rgb.py | 6 ++--- demo/long_video_demo.py | 2 +- demo/webcam_demo.py | 2 +- mmaction/datasets/pipelines/__init__.py | 22 +++++++++---------- mmaction/datasets/pipelines/loading.py | 10 --------- .../test_loadings/test_decode.py | 15 ++++--------- .../activitynet/tsn_feature_extraction.py | 2 +- 20 files changed, 60 insertions(+), 77 deletions(-) diff --git a/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py index 8d352419d8..1a5b54c056 100644 --- a/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py +++ b/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py @@ -17,7 +17,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -34,7 +34,7 @@ frame_interval=2, num_clips=1, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -49,7 +49,7 @@ frame_interval=2, num_clips=10, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py index ecc41f1451..8ce5fb5180 100644 --- a/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py @@ -17,7 +17,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -34,7 +34,7 @@ frame_interval=2, num_clips=1, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -49,7 +49,7 @@ frame_interval=2, num_clips=10, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py index 7e3bab7f59..ebb3d92856 100644 --- a/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py @@ -21,7 +21,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -38,7 +38,7 @@ frame_interval=2, num_clips=1, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -53,7 +53,7 @@ frame_interval=2, num_clips=10, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py index db97c917f5..d25736ba76 100644 --- a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py @@ -21,7 +21,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -38,7 +38,7 @@ frame_interval=2, num_clips=1, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -53,7 +53,7 @@ frame_interval=2, num_clips=10, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py index d6110a4a83..9e39011374 100644 --- a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py @@ -22,7 +22,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -39,7 +39,7 @@ frame_interval=2, num_clips=1, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -54,7 +54,7 @@ frame_interval=2, num_clips=10, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py index 67b371233f..eba08ca20b 100644 --- a/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py @@ -19,7 +19,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -36,7 +36,7 @@ frame_interval=2, num_clips=1, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -51,7 +51,7 @@ frame_interval=2, num_clips=10, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py b/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py index 7ae1672dca..f6bcf82807 100644 --- a/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py +++ b/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py @@ -14,7 +14,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict( type='MultiScaleCrop', @@ -35,7 +35,7 @@ frame_interval=1, num_clips=8, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -50,7 +50,7 @@ frame_interval=1, num_clips=8, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py b/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py index 2240766199..fb4340f19e 100644 --- a/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py +++ b/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py @@ -13,7 +13,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=8, frame_interval=8, num_clips=1), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), dict(type='Flip', flip_ratio=0.5), @@ -30,7 +30,7 @@ frame_interval=8, num_clips=1, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='ColorJitter', color_space_aug=True), @@ -46,7 +46,7 @@ frame_interval=8, num_clips=10, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py b/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py index 12602ae8da..32c108e7e6 100644 --- a/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py +++ b/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py @@ -13,7 +13,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), dict(type='Flip', flip_ratio=0.5), @@ -30,7 +30,7 @@ frame_interval=1, num_clips=8, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -46,7 +46,7 @@ num_clips=8, twice_sample=True, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py index ca96c1b6d2..a7ca319174 100644 --- a/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py @@ -14,7 +14,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=3), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict( type='MultiScaleCrop', @@ -36,7 +36,7 @@ frame_interval=1, num_clips=3, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -51,7 +51,7 @@ frame_interval=1, num_clips=25, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py index ffe4bce362..5a019ab8ac 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py @@ -16,7 +16,7 @@ img_norm_cfg = dict(mean=[128, 128], std=[128, 128], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=5, frame_interval=1, num_clips=8), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -33,7 +33,7 @@ frame_interval=1, num_clips=8, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -48,7 +48,7 @@ frame_interval=1, num_clips=25, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py index 3d4834947d..f5f39ad68c 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py @@ -16,7 +16,7 @@ img_norm_cfg = dict(mean=[128, 128], std=[128, 128], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=5, frame_interval=1, num_clips=8), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -33,7 +33,7 @@ frame_interval=1, num_clips=8, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -48,7 +48,7 @@ frame_interval=1, num_clips=25, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='TenCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py index c45ea446e0..9321b6e3c4 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py @@ -16,7 +16,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -33,7 +33,7 @@ frame_interval=1, num_clips=8, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -48,7 +48,7 @@ frame_interval=1, num_clips=25, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py index 01e0b6ea89..6c3bcc8f29 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py @@ -16,7 +16,7 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -33,7 +33,7 @@ frame_interval=1, num_clips=8, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -48,7 +48,7 @@ frame_interval=1, num_clips=25, test_mode=True), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), diff --git a/demo/long_video_demo.py b/demo/long_video_demo.py index bc82c2ae9c..0fab771005 100644 --- a/demo/long_video_demo.py +++ b/demo/long_video_demo.py @@ -21,7 +21,7 @@ EXCLUED_STEPS = [ 'OpenCVInit', 'OpenCVDecode', 'DecordInit', 'DecordDecode', 'PyAVInit', - 'PyAVDecode', 'RawFrameDecode', 'FrameSelector' + 'PyAVDecode', 'RawFrameDecode' ] diff --git a/demo/webcam_demo.py b/demo/webcam_demo.py index 89a926b972..4e1bb0a729 100644 --- a/demo/webcam_demo.py +++ b/demo/webcam_demo.py @@ -22,7 +22,7 @@ EXCLUED_STEPS = [ 'OpenCVInit', 'OpenCVDecode', 'DecordInit', 'DecordDecode', 'PyAVInit', - 'PyAVDecode', 'RawFrameDecode', 'FrameSelector' + 'PyAVDecode', 'RawFrameDecode' ] diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index 85d6f0ff85..aca8d9de8d 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -8,22 +8,22 @@ Rename, ToDataContainer, ToTensor, Transpose) from .loading import (AudioDecode, AudioDecodeInit, AudioFeatureSelector, BuildPseudoClip, DecordDecode, DecordInit, - DenseSampleFrames, FrameSelector, - GenerateLocalizationLabels, ImageDecode, - LoadAudioFeature, LoadHVULabel, LoadLocalizationFeature, - LoadProposals, OpenCVDecode, OpenCVInit, PIMSDecode, - PIMSInit, PyAVDecode, PyAVDecodeMotionVector, PyAVInit, - RawFrameDecode, SampleAVAFrames, SampleFrames, - SampleProposalFrames, UntrimmedSampleFrames) + DenseSampleFrames, GenerateLocalizationLabels, + ImageDecode, LoadAudioFeature, LoadHVULabel, + LoadLocalizationFeature, LoadProposals, OpenCVDecode, + OpenCVInit, PIMSDecode, PIMSInit, PyAVDecode, + PyAVDecodeMotionVector, PyAVInit, RawFrameDecode, + SampleAVAFrames, SampleFrames, SampleProposalFrames, + UntrimmedSampleFrames) from .pose_loading import (GeneratePoseTarget, LoadKineticsPose, PoseDecode, UniformSampleFrames) __all__ = [ 'SampleFrames', 'PyAVDecode', 'DecordDecode', 'DenseSampleFrames', - 'OpenCVDecode', 'FrameSelector', 'MultiGroupCrop', 'MultiScaleCrop', - 'RandomResizedCrop', 'RandomCrop', 'Resize', 'Flip', 'Fuse', 'Normalize', - 'ThreeCrop', 'CenterCrop', 'TenCrop', 'ImageToTensor', 'Transpose', - 'Collect', 'FormatShape', 'Compose', 'ToTensor', 'ToDataContainer', + 'OpenCVDecode', 'MultiGroupCrop', 'MultiScaleCrop', 'RandomResizedCrop', + 'RandomCrop', 'Resize', 'Flip', 'Fuse', 'Normalize', 'ThreeCrop', + 'CenterCrop', 'TenCrop', 'ImageToTensor', 'Transpose', 'Collect', + 'FormatShape', 'Compose', 'ToTensor', 'ToDataContainer', 'GenerateLocalizationLabels', 'LoadLocalizationFeature', 'LoadProposals', 'DecordInit', 'OpenCVInit', 'PyAVInit', 'SampleProposalFrames', 'UntrimmedSampleFrames', 'RawFrameDecode', 'DecordInit', 'OpenCVInit', diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index 3ce3fb70b8..79a6656c9c 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -1562,16 +1562,6 @@ def __repr__(self): return repr_str -@PIPELINES.register_module() -class FrameSelector(RawFrameDecode): - """Deprecated class for ``RawFrameDecode``.""" - - def __init__(self, *args, **kwargs): - warnings.warn('"FrameSelector" is deprecated, please switch to' - '"RawFrameDecode"') - super().__init__(*args, **kwargs) - - @PIPELINES.register_module() class AudioFeatureSelector: """Sample the audio feature w.r.t. the frames selected. diff --git a/tests/test_data/test_pipelines/test_loadings/test_decode.py b/tests/test_data/test_pipelines/test_loadings/test_decode.py index 8b30fe7e72..ae86e444a3 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_decode.py +++ b/tests/test_data/test_pipelines/test_loadings/test_decode.py @@ -1,15 +1,14 @@ import copy import numpy as np -import pytest from mmcv.utils import assert_dict_has_keys from mmaction.datasets.pipelines import (AudioDecode, AudioDecodeInit, DecordDecode, DecordInit, - FrameSelector, OpenCVDecode, - OpenCVInit, PIMSDecode, PIMSInit, - PyAVDecode, PyAVDecodeMotionVector, - PyAVInit, RawFrameDecode) + OpenCVDecode, OpenCVInit, PIMSDecode, + PIMSInit, PyAVDecode, + PyAVDecodeMotionVector, PyAVInit, + RawFrameDecode) from .base import BaseTestLoading @@ -308,12 +307,6 @@ def test_opencv_decode(self): assert np.shape(opencv_decode_result['imgs']) == (len( video_result['frame_inds']), 256, 340, 3) - @staticmethod - def test_rawframe_selector(): - - with pytest.warns(UserWarning): - FrameSelector(io_backend='disk') - def test_rawframe_decode(self): target_keys = ['frame_inds', 'imgs', 'original_shape', 'modality'] diff --git a/tools/data/activitynet/tsn_feature_extraction.py b/tools/data/activitynet/tsn_feature_extraction.py index 5e213cdc15..0461c013af 100644 --- a/tools/data/activitynet/tsn_feature_extraction.py +++ b/tools/data/activitynet/tsn_feature_extraction.py @@ -60,7 +60,7 @@ def main(): clip_len=args.clip_len, frame_interval=args.frame_interval, start_index=0), - dict(type='FrameSelector'), + dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), dict(type='Normalize', **args.img_norm_cfg), From 0a3bcb3f45fb0e1770cfe32c5a837b3a49e115b2 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sun, 11 Jul 2021 21:02:28 +0800 Subject: [PATCH 196/414] =?UTF-8?q?=E4=BD=BF=E7=94=A8=20Colaboratory=20?= =?UTF-8?q?=E5=88=9B=E5=BB=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- demo/mmaction2_tutorial.ipynb | 2315 +++++++++++++++++---------------- 1 file changed, 1181 insertions(+), 1134 deletions(-) diff --git a/demo/mmaction2_tutorial.ipynb b/demo/mmaction2_tutorial.ipynb index 82ee6cdb97..d5ec6d74da 100644 --- a/demo/mmaction2_tutorial.ipynb +++ b/demo/mmaction2_tutorial.ipynb @@ -1,1183 +1,1230 @@ { - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "MMAction2 Tutorial.ipynb", - "provenance": [], - "collapsed_sections": [], - "toc_visible": true, - "include_colab_link": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "VcjSRFELVbNk" - }, - "source": [ - "# MMAction2 Tutorial\n", - "\n", - "Welcome to MMAction2! This is the official colab tutorial for using MMAction2. In this tutorial, you will learn\n", - "- Perform inference with a MMAction2 recognizer.\n", - "- Train a new recognizer with a new dataset.\n", - "\n", - "Let's start!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "7LqHGkGEVqpm" - }, - "source": [ - "## Install MMAction2" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Bf8PpPXtVvmg", + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { "colab": { - "base_uri": "https://localhost:8080/" + "name": "MMAction2 Tutorial.ipynb", + "provenance": [], + "collapsed_sections": [], + "toc_visible": true, + "include_colab_link": true }, - "outputId": "2c685a33-474b-4e71-8f98-c2533c66095e" - }, - "source": [ - "# Check nvcc version\n", - "!nvcc -V\n", - "# Check GCC version\n", - "!gcc --version" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "nvcc: NVIDIA (R) Cuda compiler driver\n", - "Copyright (c) 2005-2019 NVIDIA Corporation\n", - "Built on Sun_Jul_28_19:07:16_PDT_2019\n", - "Cuda compilation tools, release 10.1, V10.1.243\n", - "gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n", - "Copyright (C) 2017 Free Software Foundation, Inc.\n", - "This is free software; see the source for copying conditions. There is NO\n", - "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n", - "\n" - ], - "name": "stdout" - } - ] + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "accelerator": "GPU" }, - { - "cell_type": "code", - "metadata": { - "id": "5PAJ4ArzV5Ry", - "colab": { - "base_uri": "https://localhost:8080/" + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] }, - "outputId": "e48dbf61-fae0-431c-e964-04c7caaee4bc" - }, - "source": [ - "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", - "!pip install -U torch==1.5.1+cu101 torchvision==0.6.1+cu101 -f https://download.pytorch.org/whl/torch_stable.html\n", - "\n", - "# install mmcv-full thus we could use CUDA operators\n", - "!pip install mmcv-full==latest+torch1.5.0+cu101 -f https://download.openmmlab.com/mmcv/dist/index.html\n", - "\n", - "# Install mmaction2\n", - "!rm -rf mmaction2\n", - "!git clone https://github.com/open-mmlab/mmaction2.git\n", - "%cd mmaction2\n", - "\n", - "!pip install -e .\n", - "\n", - "# Install some optional requirements\n", - "!pip install -r requirements/optional.txt" - ], - "execution_count": null, - "outputs": [ { - "output_type": "stream", - "text": [ - "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", - "Requirement already up-to-date: torch==1.5.1+cu101 in /usr/local/lib/python3.6/dist-packages (1.5.1+cu101)\n", - "Requirement already up-to-date: torchvision==0.6.1+cu101 in /usr/local/lib/python3.6/dist-packages (0.6.1+cu101)\n", - "Requirement already satisfied, skipping upgrade: future in /usr/local/lib/python3.6/dist-packages (from torch==1.5.1+cu101) (0.16.0)\n", - "Requirement already satisfied, skipping upgrade: numpy in /usr/local/lib/python3.6/dist-packages (from torch==1.5.1+cu101) (1.18.5)\n", - "Requirement already satisfied, skipping upgrade: pillow>=4.1.1 in /usr/local/lib/python3.6/dist-packages (from torchvision==0.6.1+cu101) (7.0.0)\n", - "Looking in links: https://download.openmmlab.com/mmcv/dist/index.html\n", - "Collecting mmcv-full==latest+torch1.5.0+cu101\n", - " Using cached https://download.openmmlab.com/mmcv/dist/latest/torch1.5.0/cu101/mmcv_full-latest%2Btorch1.5.0%2Bcu101-cp36-cp36m-manylinux1_x86_64.whl\n", - "Requirement already satisfied: Pillow in /usr/local/lib/python3.6/dist-packages (from mmcv-full==latest+torch1.5.0+cu101) (7.0.0)\n", - "Requirement already satisfied: addict in /usr/local/lib/python3.6/dist-packages (from mmcv-full==latest+torch1.5.0+cu101) (2.3.0)\n", - "Requirement already satisfied: yapf in /usr/local/lib/python3.6/dist-packages (from mmcv-full==latest+torch1.5.0+cu101) (0.30.0)\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from mmcv-full==latest+torch1.5.0+cu101) (1.18.5)\n", - "Requirement already satisfied: opencv-python>=3 in /usr/local/lib/python3.6/dist-packages (from mmcv-full==latest+torch1.5.0+cu101) (4.1.2.30)\n", - "Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from mmcv-full==latest+torch1.5.0+cu101) (3.13)\n", - "Installing collected packages: mmcv-full\n", - " Found existing installation: mmcv-full 1.2.0\n", - " Uninstalling mmcv-full-1.2.0:\n", - " Successfully uninstalled mmcv-full-1.2.0\n", - "Successfully installed mmcv-full-1.2.0\n", - "Cloning into 'mmaction2'...\n", - "remote: Enumerating objects: 8, done.\u001b[K\n", - "remote: Counting objects: 100% (8/8), done.\u001b[K\n", - "remote: Compressing objects: 100% (8/8), done.\u001b[K\n", - "remote: Total 7596 (delta 0), reused 1 (delta 0), pack-reused 7588\u001b[K\n", - "Receiving objects: 100% (7596/7596), 33.53 MiB | 37.44 MiB/s, done.\n", - "Resolving deltas: 100% (5329/5329), done.\n", - "/content/mmaction2\n", - "Obtaining file:///content/mmaction2\n", - "Requirement already satisfied: matplotlib in /usr/local/lib/python3.6/dist-packages (from mmaction2==0.8.0) (3.2.2)\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from mmaction2==0.8.0) (1.18.5)\n", - "Requirement already satisfied: opencv-contrib-python in /usr/local/lib/python3.6/dist-packages (from mmaction2==0.8.0) (4.1.2.30)\n", - "Requirement already satisfied: Pillow in /usr/local/lib/python3.6/dist-packages (from mmaction2==0.8.0) (7.0.0)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmaction2==0.8.0) (1.3.1)\n", - "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmaction2==0.8.0) (2.8.1)\n", - "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmaction2==0.8.0) (2.4.7)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmaction2==0.8.0) (0.10.0)\n", - "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.6/dist-packages (from python-dateutil>=2.1->matplotlib->mmaction2==0.8.0) (1.15.0)\n", - "Installing collected packages: mmaction2\n", - " Running setup.py develop for mmaction2\n", - "Successfully installed mmaction2\n", - "Requirement already satisfied: av in /usr/local/lib/python3.6/dist-packages (from -r requirements/optional.txt (line 1)) (8.0.2)\n", - "Requirement already satisfied: decord>=0.4.1 in /usr/local/lib/python3.6/dist-packages (from -r requirements/optional.txt (line 2)) (0.4.2)\n", - "Requirement already satisfied: moviepy in /usr/local/lib/python3.6/dist-packages (from -r requirements/optional.txt (line 3)) (0.2.3.5)\n", - "Requirement already satisfied: onnx in /usr/local/lib/python3.6/dist-packages (from -r requirements/optional.txt (line 4)) (1.8.0)\n", - "Requirement already satisfied: onnxruntime in /usr/local/lib/python3.6/dist-packages (from -r requirements/optional.txt (line 5)) (1.5.2)\n", - "Requirement already satisfied: PyTurboJPEG in /usr/local/lib/python3.6/dist-packages (from -r requirements/optional.txt (line 6)) (1.4.1)\n", - "Requirement already satisfied: numpy>=1.14.0 in /usr/local/lib/python3.6/dist-packages (from decord>=0.4.1->-r requirements/optional.txt (line 2)) (1.18.5)\n", - "Requirement already satisfied: tqdm<5.0,>=4.11.2 in /usr/local/lib/python3.6/dist-packages (from moviepy->-r requirements/optional.txt (line 3)) (4.41.1)\n", - "Requirement already satisfied: decorator<5.0,>=4.0.2 in /usr/local/lib/python3.6/dist-packages (from moviepy->-r requirements/optional.txt (line 3)) (4.4.2)\n", - "Requirement already satisfied: imageio<3.0,>=2.1.2 in /usr/local/lib/python3.6/dist-packages (from moviepy->-r requirements/optional.txt (line 3)) (2.4.1)\n", - "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from onnx->-r requirements/optional.txt (line 4)) (1.15.0)\n", - "Requirement already satisfied: protobuf in /usr/local/lib/python3.6/dist-packages (from onnx->-r requirements/optional.txt (line 4)) (3.12.4)\n", - "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.6/dist-packages (from onnx->-r requirements/optional.txt (line 4)) (3.7.4.3)\n", - "Requirement already satisfied: pillow in /usr/local/lib/python3.6/dist-packages (from imageio<3.0,>=2.1.2->moviepy->-r requirements/optional.txt (line 3)) (7.0.0)\n", - "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf->onnx->-r requirements/optional.txt (line 4)) (50.3.2)\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "No_zZAFpWC-a", - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "markdown", + "metadata": { + "id": "VcjSRFELVbNk" + }, + "source": [ + "# MMAction2 Tutorial\n", + "\n", + "Welcome to MMAction2! This is the official colab tutorial for using MMAction2. In this tutorial, you will learn\n", + "- Perform inference with a MMAction2 recognizer.\n", + "- Train a new recognizer with a new dataset.\n", + "\n", + "Let's start!" + ] }, - "outputId": "1d425eea-d44e-434a-991c-01eb15abaab2" - }, - "source": [ - "# Check Pytorch installation\n", - "import torch, torchvision\n", - "print(torch.__version__, torch.cuda.is_available())\n", - "\n", - "# Check MMAction2 installation\n", - "import mmaction\n", - "print(mmaction.__version__)\n", - "\n", - "# Check MMCV installation\n", - "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", - "print(get_compiling_cuda_version())\n", - "print(get_compiler_version())" - ], - "execution_count": null, - "outputs": [ { - "output_type": "stream", - "text": [ - "1.5.1+cu101 True\n", - "0.8.0\n", - "10.1\n", - "GCC 7.3\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "pXf7oV5DWdab" - }, - "source": [ - "## Perform inference with a MMAction2 recognizer\n", - "MMAction2 already provides high level APIs to do inference and training." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "64CW6d_AaT-Q", - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "markdown", + "metadata": { + "id": "7LqHGkGEVqpm" + }, + "source": [ + "## Install MMAction2" + ] }, - "outputId": "3b284fd8-4ee7-4a34-90d7-5023cd123a04" - }, - "source": [ - "!mkdir checkpoints\n", - "!wget -c https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \\\n", - " -O checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth" - ], - "execution_count": null, - "outputs": [ { - "output_type": "stream", - "text": [ - "--2020-11-20 09:38:27-- https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", - "Resolving download.openmmlab.com (download.openmmlab.com)... 47.252.96.35\n", - "Connecting to download.openmmlab.com (download.openmmlab.com)|47.252.96.35|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 97579339 (93M) [application/octet-stream]\n", - "Saving to: ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’\n", - "\n", - "checkpoints/tsn_r50 100%[===================>] 93.06M 8.48MB/s in 11s \n", - "\n", - "2020-11-20 09:38:40 (8.49 MB/s) - ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’ saved [97579339/97579339]\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "HNZB7NoSabzj" - }, - "source": [ - "from mmaction.apis import inference_recognizer, init_recognizer\n", - "\n", - "# Choose to use a config and initialize the recognizer\n", - "config = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py'\n", - "# Setup a checkpoint file to load\n", - "checkpoint = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "# Initialize the recognizer\n", - "model = init_recognizer(config, checkpoint, device='cuda:0')" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "rEMsBnpHapAn" - }, - "source": [ - "# Use the recognizer to do inference\n", - "video = 'demo/demo.mp4'\n", - "label = 'demo/label_map_k400.txt'\n", - "results = inference_recognizer(model, video, label)" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "NIyJXqfWathq", - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "metadata": { + "id": "Bf8PpPXtVvmg", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "f262f3c6-a9dd-48c7-8f7e-081fd3e12ba8" + }, + "source": [ + "# Check nvcc version\n", + "!nvcc -V\n", + "# Check GCC version\n", + "!gcc --version" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "nvcc: NVIDIA (R) Cuda compiler driver\n", + "Copyright (c) 2005-2020 NVIDIA Corporation\n", + "Built on Wed_Jul_22_19:09:09_PDT_2020\n", + "Cuda compilation tools, release 11.0, V11.0.221\n", + "Build cuda_11.0_bu.TC445_37.28845127_0\n", + "gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n", + "Copyright (C) 2017 Free Software Foundation, Inc.\n", + "This is free software; see the source for copying conditions. There is NO\n", + "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n", + "\n" + ], + "name": "stdout" + } + ] }, - "outputId": "a5ac09b9-d7df-4430-a89e-28526b76f3be" - }, - "source": [ - "# Let's show the results\n", - "for result in results:\n", - " print(f'{result[0]}: ', result[1])" - ], - "execution_count": null, - "outputs": [ { - "output_type": "stream", - "text": [ - "arm wrestling: 29.616438\n", - "rock scissors paper: 10.754841\n", - "shaking hands: 9.908401\n", - "clapping: 9.189913\n", - "massaging feet: 8.305306\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "QuZG8kZ2fJ5d" - }, - "source": [ - "## Train a recognizer on customized dataset\n", - "\n", - "To train a new recognizer, there are usually three things to do:\n", - "1. Support a new dataset\n", - "2. Modify the config\n", - "3. Train a new recognizer" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "neEFyxChfgiJ" - }, - "source": [ - "### Support a new dataset\n", - "\n", - "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/tutorials/new_dataset.md)\n", - "\n", - "Firstly, let's download a tiny dataset obtained from [Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). We select 30 videos with their labels as train dataset and 10 videos with their labels as test dataset." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "gjsUj9JzgUlJ", - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "metadata": { + "id": "5PAJ4ArzV5Ry", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "b68c4528-1a83-469f-8920-040ae373fc7c" + }, + "source": [ + "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", + "!pip install -U torch==1.8.0+cu101 torchvision==0.9.0+cu101 torchtext==0.9.0 -f https://download.pytorch.org/whl/torch_stable.html\n", + "\n", + "# install mmcv-full thus we could use CUDA operators\n", + "!pip install mmcv-full==1.3.9 -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", + "\n", + "# Install mmaction2\n", + "!rm -rf mmaction2\n", + "!git clone https://github.com/open-mmlab/mmaction2.git\n", + "%cd mmaction2\n", + "\n", + "!pip install -e .\n", + "\n", + "# Install some optional requirements\n", + "!pip install -r requirements/optional.txt" + ], + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", + "Collecting torch==1.8.0+cu101\n", + "\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torch-1.8.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (763.5MB)\n", + "\u001b[K |████████████████████████████████| 763.5MB 23kB/s \n", + "\u001b[?25hCollecting torchvision==0.9.0+cu101\n", + "\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torchvision-0.9.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (17.3MB)\n", + "\u001b[K |████████████████████████████████| 17.3MB 188kB/s \n", + "\u001b[?25hCollecting torchtext==0.9.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/36/50/84184d6230686e230c464f0dd4ff32eada2756b4a0b9cefec68b88d1d580/torchtext-0.9.0-cp37-cp37m-manylinux1_x86_64.whl (7.1MB)\n", + "\u001b[K |████████████████████████████████| 7.1MB 8.0MB/s \n", + "\u001b[?25hRequirement already satisfied, skipping upgrade: numpy in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (1.19.5)\n", + "Requirement already satisfied, skipping upgrade: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (3.7.4.3)\n", + "Requirement already satisfied, skipping upgrade: pillow>=4.1.1 in /usr/local/lib/python3.7/dist-packages (from torchvision==0.9.0+cu101) (7.1.2)\n", + "Requirement already satisfied, skipping upgrade: tqdm in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (4.41.1)\n", + "Requirement already satisfied, skipping upgrade: requests in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (2.23.0)\n", + "Requirement already satisfied, skipping upgrade: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2.10)\n", + "Requirement already satisfied, skipping upgrade: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (1.24.3)\n", + "Requirement already satisfied, skipping upgrade: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (3.0.4)\n", + "Requirement already satisfied, skipping upgrade: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2021.5.30)\n", + "Installing collected packages: torch, torchvision, torchtext\n", + " Found existing installation: torch 1.9.0+cu102\n", + " Uninstalling torch-1.9.0+cu102:\n", + " Successfully uninstalled torch-1.9.0+cu102\n", + " Found existing installation: torchvision 0.10.0+cu102\n", + " Uninstalling torchvision-0.10.0+cu102:\n", + " Successfully uninstalled torchvision-0.10.0+cu102\n", + " Found existing installation: torchtext 0.10.0\n", + " Uninstalling torchtext-0.10.0:\n", + " Successfully uninstalled torchtext-0.10.0\n", + "Successfully installed torch-1.8.0+cu101 torchtext-0.9.0 torchvision-0.9.0+cu101\n", + "Looking in links: https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", + "Collecting mmcv-full==1.3.9\n", + "\u001b[?25l Downloading https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/mmcv_full-1.3.9-cp37-cp37m-manylinux1_x86_64.whl (31.4MB)\n", + "\u001b[K |████████████████████████████████| 31.4MB 94kB/s \n", + "\u001b[?25hRequirement already satisfied: pyyaml in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (3.13)\n", + "Requirement already satisfied: opencv-python>=3 in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (4.1.2.30)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (1.19.5)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (7.1.2)\n", + "Collecting addict\n", + " Downloading https://files.pythonhosted.org/packages/6a/00/b08f23b7d7e1e14ce01419a467b583edbb93c6cdb8654e54a9cc579cd61f/addict-2.4.0-py3-none-any.whl\n", + "Collecting yapf\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/5f/0d/8814e79eb865eab42d95023b58b650d01dec6f8ea87fc9260978b1bf2167/yapf-0.31.0-py2.py3-none-any.whl (185kB)\n", + "\u001b[K |████████████████████████████████| 194kB 8.8MB/s \n", + "\u001b[?25hInstalling collected packages: addict, yapf, mmcv-full\n", + "Successfully installed addict-2.4.0 mmcv-full-1.3.9 yapf-0.31.0\n", + "Cloning into 'mmaction2'...\n", + "remote: Enumerating objects: 12544, done.\u001b[K\n", + "remote: Counting objects: 100% (677/677), done.\u001b[K\n", + "remote: Compressing objects: 100% (330/330), done.\u001b[K\n", + "remote: Total 12544 (delta 432), reused 510 (delta 344), pack-reused 11867\u001b[K\n", + "Receiving objects: 100% (12544/12544), 42.42 MiB | 30.27 MiB/s, done.\n", + "Resolving deltas: 100% (8980/8980), done.\n", + "/content/mmaction2\n", + "Obtaining file:///content/mmaction2\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (3.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (1.19.5)\n", + "Requirement already satisfied: opencv-contrib-python in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (4.1.2.30)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (7.1.2)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (2.8.1)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (0.10.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (1.3.1)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (2.4.7)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib->mmaction2==0.16.0) (1.15.0)\n", + "Installing collected packages: mmaction2\n", + " Running setup.py develop for mmaction2\n", + "Successfully installed mmaction2\n", + "Collecting av\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/66/ff/bacde7314c646a2bd2f240034809a10cc3f8b096751284d0828640fff3dd/av-8.0.3-cp37-cp37m-manylinux2010_x86_64.whl (37.2MB)\n", + "\u001b[K |████████████████████████████████| 37.2MB 76kB/s \n", + "\u001b[?25hCollecting decord>=0.4.1\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/11/79/936af42edf90a7bd4e41a6cac89c913d4b47fa48a26b042d5129a9242ee3/decord-0.6.0-py3-none-manylinux2010_x86_64.whl (13.6MB)\n", + "\u001b[K |████████████████████████████████| 13.6MB 231kB/s \n", + "\u001b[?25hCollecting einops\n", + " Downloading https://files.pythonhosted.org/packages/5d/a0/9935e030634bf60ecd572c775f64ace82ceddf2f504a5fd3902438f07090/einops-0.3.0-py2.py3-none-any.whl\n", + "Requirement already satisfied: imgaug in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 4)) (0.2.9)\n", + "Requirement already satisfied: librosa in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 5)) (0.8.1)\n", + "Requirement already satisfied: lmdb in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 6)) (0.99)\n", + "Requirement already satisfied: moviepy in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 7)) (0.2.3.5)\n", + "Collecting onnx\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/3f/9b/54c950d3256e27f970a83cd0504efb183a24312702deed0179453316dbd0/onnx-1.9.0-cp37-cp37m-manylinux2010_x86_64.whl (12.2MB)\n", + "\u001b[K |████████████████████████████████| 12.2MB 36.2MB/s \n", + "\u001b[?25hCollecting onnxruntime\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/c9/35/80ab6f444a83c708817e011e9cd4708c816591cc85aff830dff525a34992/onnxruntime-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.5MB)\n", + "\u001b[K |████████████████████████████████| 4.5MB 29.5MB/s \n", + "\u001b[?25hCollecting pims\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d5/47/82e0ac31e01a271e5a06362fbf03769e9081956f6772f91d98b32899d743/PIMS-0.5.tar.gz (85kB)\n", + "\u001b[K |████████████████████████████████| 92kB 13.1MB/s \n", + "\u001b[?25hCollecting PyTurboJPEG\n", + " Downloading https://files.pythonhosted.org/packages/f9/7b/7621780391ed7a33acec8e803068d7291d940fbbad1ffc8909e94e844477/PyTurboJPEG-1.5.1.tar.gz\n", + "Collecting timm\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/90/fc/606bc5cf46acac3aa9bd179b3954433c026aaf88ea98d6b19f5d14c336da/timm-0.4.12-py3-none-any.whl (376kB)\n", + "\u001b[K |████████████████████████████████| 378kB 43.1MB/s \n", + "\u001b[?25hRequirement already satisfied: numpy>=1.14.0 in /usr/local/lib/python3.7/dist-packages (from decord>=0.4.1->-r requirements/optional.txt (line 2)) (1.19.5)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (7.1.2)\n", + "Requirement already satisfied: scikit-image>=0.11.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (0.16.2)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.15.0)\n", + "Requirement already satisfied: imageio in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (2.4.1)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (3.2.2)\n", + "Requirement already satisfied: opencv-python in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (4.1.2.30)\n", + "Requirement already satisfied: Shapely in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.7.1)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.4.1)\n", + "Requirement already satisfied: resampy>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.2.2)\n", + "Requirement already satisfied: pooch>=1.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (1.4.0)\n", + "Requirement already satisfied: numba>=0.43.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.51.2)\n", + "Requirement already satisfied: audioread>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (2.1.9)\n", + "Requirement already satisfied: soundfile>=0.10.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.10.3.post1)\n", + "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (20.9)\n", + "Requirement already satisfied: joblib>=0.14 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (1.0.1)\n", + "Requirement already satisfied: decorator>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (4.4.2)\n", + "Requirement already satisfied: scikit-learn!=0.19.0,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.22.2.post1)\n", + "Requirement already satisfied: tqdm<5.0,>=4.11.2 in /usr/local/lib/python3.7/dist-packages (from moviepy->-r requirements/optional.txt (line 7)) (4.41.1)\n", + "Requirement already satisfied: protobuf in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 8)) (3.17.3)\n", + "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 8)) (3.7.4.3)\n", + "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from onnxruntime->-r requirements/optional.txt (line 9)) (1.12)\n", + "Collecting slicerator>=0.9.8\n", + " Downloading https://files.pythonhosted.org/packages/75/ae/fe46f5371105508a209fe6162e7e7b11db531a79d2eabcd24566b8b1f534/slicerator-1.0.0-py3-none-any.whl\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 12)) (0.9.0+cu101)\n", + "Requirement already satisfied: torch>=1.4 in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 12)) (1.8.0+cu101)\n", + "Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 4)) (2.5.1)\n", + "Requirement already satisfied: PyWavelets>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 4)) (1.1.1)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (0.10.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (2.8.1)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (2.4.7)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (1.3.1)\n", + "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2.23.0)\n", + "Requirement already satisfied: appdirs in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (1.4.4)\n", + "Requirement already satisfied: llvmlite<0.35,>=0.34.0.dev0 in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 5)) (0.34.0)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 5)) (57.0.0)\n", + "Requirement already satisfied: cffi>=1.0 in /usr/local/lib/python3.7/dist-packages (from soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 5)) (1.14.5)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2.10)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2021.5.30)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (3.0.4)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (1.24.3)\n", + "Requirement already satisfied: pycparser in /usr/local/lib/python3.7/dist-packages (from cffi>=1.0->soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 5)) (2.20)\n", + "Building wheels for collected packages: pims, PyTurboJPEG\n", + " Building wheel for pims (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for pims: filename=PIMS-0.5-cp37-none-any.whl size=84328 sha256=436632b7a982144fd933f01d12e38a419eb8a636f2d6dd4bd4a43680734979e2\n", + " Stored in directory: /root/.cache/pip/wheels/0e/0a/14/4c33a4cc1b9158e57329a38e8e3e03901ed24060eb322d5462\n", + " Building wheel for PyTurboJPEG (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for PyTurboJPEG: filename=PyTurboJPEG-1.5.1-cp37-none-any.whl size=7979 sha256=755337aaa622b48be036eca6d743e99bf4528fc6c64e810da11a71236a78bcca\n", + " Stored in directory: /root/.cache/pip/wheels/19/cb/78/5725c881ee618936d956bf0ecd4272cb0f701cb898f44575ca\n", + "Successfully built pims PyTurboJPEG\n", + "Installing collected packages: av, decord, einops, onnx, onnxruntime, slicerator, pims, PyTurboJPEG, timm\n", + "Successfully installed PyTurboJPEG-1.5.1 av-8.0.3 decord-0.6.0 einops-0.3.0 onnx-1.9.0 onnxruntime-1.8.1 pims-0.5 slicerator-1.0.0 timm-0.4.12\n" + ], + "name": "stdout" + } + ] }, - "outputId": "2bc7af2f-0068-430c-d8f2-f85413466b85" - }, - "source": [ - "# download, decompress the data\n", - "!rm kinetics400_tiny.zip*\n", - "!rm -rf kinetics400_tiny\n", - "!wget https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", - "!unzip kinetics400_tiny.zip > /dev/null" - ], - "execution_count": null, - "outputs": [ { - "output_type": "stream", - "text": [ - "rm: cannot remove 'kinetics400_tiny.zip*': No such file or directory\n", - "--2020-11-20 09:38:48-- https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", - "Resolving download.openmmlab.com (download.openmmlab.com)... 47.252.96.35\n", - "Connecting to download.openmmlab.com (download.openmmlab.com)|47.252.96.35|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 18308682 (17M) [application/zip]\n", - "Saving to: ‘kinetics400_tiny.zip’\n", - "\n", - "kinetics400_tiny.zi 100%[===================>] 17.46M 8.59MB/s in 2.0s \n", - "\n", - "2020-11-20 09:38:51 (8.59 MB/s) - ‘kinetics400_tiny.zip’ saved [18308682/18308682]\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "AbZ-o7V6hNw4", - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "metadata": { + "id": "No_zZAFpWC-a", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "7e95038a-6f79-410b-adf6-0148bf8cc2fc" + }, + "source": [ + "# Check Pytorch installation\n", + "import torch, torchvision\n", + "print(torch.__version__, torch.cuda.is_available())\n", + "\n", + "# Check MMAction2 installation\n", + "import mmaction\n", + "print(mmaction.__version__)\n", + "\n", + "# Check MMCV installation\n", + "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", + "print(get_compiling_cuda_version())\n", + "print(get_compiler_version())" + ], + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "text": [ + "1.8.0+cu101 True\n", + "0.16.0\n", + "10.1\n", + "GCC 7.3\n" + ], + "name": "stdout" + } + ] }, - "outputId": "a8ad33e5-7d29-47ca-f14e-bb356ea096eb" - }, - "source": [ - "# Check the directory structure of the tiny data\n", - "\n", - "# Install tree first\n", - "!apt-get -q install tree\n", - "!tree kinetics400_tiny" - ], - "execution_count": null, - "outputs": [ { - "output_type": "stream", - "text": [ - "Reading package lists...\n", - "Building dependency tree...\n", - "Reading state information...\n", - "tree is already the newest version (1.7.0-5).\n", - "0 upgraded, 0 newly installed, 0 to remove and 14 not upgraded.\n", - "kinetics400_tiny\n", - "├── kinetics_tiny_train_video.txt\n", - "├── kinetics_tiny_val_video.txt\n", - "├── train\n", - "│   ├── 27_CSXByd3s.mp4\n", - "│   ├── 34XczvTaRiI.mp4\n", - "│   ├── A-wiliK50Zw.mp4\n", - "│   ├── D32_1gwq35E.mp4\n", - "│   ├── D92m0HsHjcQ.mp4\n", - "│   ├── DbX8mPslRXg.mp4\n", - "│   ├── FMlSTTpN3VY.mp4\n", - "│   ├── h10B9SVE-nk.mp4\n", - "│   ├── h2YqqUhnR34.mp4\n", - "│   ├── iRuyZSKhHRg.mp4\n", - "│   ├── IyfILH9lBRo.mp4\n", - "│   ├── kFC3KY2bOP8.mp4\n", - "│   ├── LvcFDgCAXQs.mp4\n", - "│   ├── O46YA8tI530.mp4\n", - "│   ├── oMrZaozOvdQ.mp4\n", - "│   ├── oXy-e_P_cAI.mp4\n", - "│   ├── P5M-hAts7MQ.mp4\n", - "│   ├── phDqGd0NKoo.mp4\n", - "│   ├── PnOe3GZRVX8.mp4\n", - "│   ├── R8HXQkdgKWA.mp4\n", - "│   ├── RqnKtCEoEcA.mp4\n", - "│   ├── soEcZZsBmDs.mp4\n", - "│   ├── TkkZPZHbAKA.mp4\n", - "│   ├── T_TMNGzVrDk.mp4\n", - "│   ├── WaS0qwP46Us.mp4\n", - "│   ├── Wh_YPQdH1Zg.mp4\n", - "│   ├── WWP5HZJsg-o.mp4\n", - "│   ├── xGY2dP0YUjA.mp4\n", - "│   ├── yLC9CtWU5ws.mp4\n", - "│   └── ZQV4U2KQ370.mp4\n", - "└── val\n", - " ├── 0pVGiAU6XEA.mp4\n", - " ├── AQrbRSnRt8M.mp4\n", - " ├── b6Q_b7vgc7Q.mp4\n", - " ├── ddvJ6-faICE.mp4\n", - " ├── IcLztCtvhb8.mp4\n", - " ├── ik4BW3-SCts.mp4\n", - " ├── jqRrH30V0k4.mp4\n", - " ├── SU_x2LQqSLs.mp4\n", - " ├── u4Rm6srmIS8.mp4\n", - " └── y5Iu7XkTqV0.mp4\n", - "\n", - "2 directories, 42 files\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "fTdi6dI0hY3g", - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "markdown", + "metadata": { + "id": "pXf7oV5DWdab" + }, + "source": [ + "## Perform inference with a MMAction2 recognizer\n", + "MMAction2 already provides high level APIs to do inference and training." + ] }, - "outputId": "a78557e3-2a82-4c5b-a292-017660cc5b5e" - }, - "source": [ - "# After downloading the data, we need to check the annotation format\n", - "!cat kinetics400_tiny/kinetics_tiny_train_video.txt" - ], - "execution_count": null, - "outputs": [ { - "output_type": "stream", - "text": [ - "D32_1gwq35E.mp4 0\n", - "iRuyZSKhHRg.mp4 1\n", - "oXy-e_P_cAI.mp4 0\n", - "34XczvTaRiI.mp4 1\n", - "h2YqqUhnR34.mp4 0\n", - "O46YA8tI530.mp4 0\n", - "kFC3KY2bOP8.mp4 1\n", - "WWP5HZJsg-o.mp4 1\n", - "phDqGd0NKoo.mp4 1\n", - "yLC9CtWU5ws.mp4 0\n", - "27_CSXByd3s.mp4 1\n", - "IyfILH9lBRo.mp4 1\n", - "T_TMNGzVrDk.mp4 1\n", - "TkkZPZHbAKA.mp4 0\n", - "PnOe3GZRVX8.mp4 1\n", - "soEcZZsBmDs.mp4 1\n", - "FMlSTTpN3VY.mp4 1\n", - "WaS0qwP46Us.mp4 0\n", - "A-wiliK50Zw.mp4 1\n", - "oMrZaozOvdQ.mp4 1\n", - "ZQV4U2KQ370.mp4 0\n", - "DbX8mPslRXg.mp4 1\n", - "h10B9SVE-nk.mp4 1\n", - "P5M-hAts7MQ.mp4 0\n", - "R8HXQkdgKWA.mp4 0\n", - "D92m0HsHjcQ.mp4 0\n", - "RqnKtCEoEcA.mp4 0\n", - "LvcFDgCAXQs.mp4 0\n", - "xGY2dP0YUjA.mp4 0\n", - "Wh_YPQdH1Zg.mp4 0\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "0bq0mxmEi29H" - }, - "source": [ - "According to the format defined in [`VideoDataset`](./datasets/video_dataset.py), each line indicates a sample video with the filepath and label, which are split with a whitespace." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Ht_DGJA9jQar" - }, - "source": [ - "### Modify the config\n", - "\n", - "In the next step, we need to modify the config for the training.\n", - "To accelerate the process, we finetune a recognizer using a pre-trained recognizer." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "LjCcmCKOjktc" - }, - "source": [ - "from mmcv import Config\n", - "cfg = Config.fromfile('./configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py')" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "tc8YhFFGjp3e" - }, - "source": [ - "Given a config that trains a TSN model on kinetics400-full dataset, we need to modify some values to use it for training TSN on Kinetics400-tiny dataset.\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "tlhu9byjjt-K", - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "metadata": { + "id": "64CW6d_AaT-Q", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "d08bfb9b-ab1e-451b-d3b2-89023a59766b" + }, + "source": [ + "!mkdir checkpoints\n", + "!wget -c https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \\\n", + " -O checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth" + ], + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "text": [ + "--2021-07-11 12:44:00-- https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 97579339 (93M) [application/octet-stream]\n", + "Saving to: ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’\n", + "\n", + "checkpoints/tsn_r50 100%[===================>] 93.06M 11.4MB/s in 8.1s \n", + "\n", + "2021-07-11 12:44:09 (11.4 MB/s) - ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’ saved [97579339/97579339]\n", + "\n" + ], + "name": "stdout" + } + ] }, - "outputId": "13d1bb01-f351-48c0-9efb-0bdf2c125d29" - }, - "source": [ - "from mmcv.runner import set_random_seed\n", - "\n", - "# Modify dataset type and path\n", - "cfg.dataset_type = 'VideoDataset'\n", - "cfg.data_root = 'kinetics400_tiny/train/'\n", - "cfg.data_root_val = 'kinetics400_tiny/val/'\n", - "cfg.ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "cfg.ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "\n", - "cfg.data.test.type = 'VideoDataset'\n", - "cfg.data.test.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.data.test.data_prefix = 'kinetics400_tiny/val/'\n", - "\n", - "cfg.data.train.type = 'VideoDataset'\n", - "cfg.data.train.ann_file = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "cfg.data.train.data_prefix = 'kinetics400_tiny/train/'\n", - "\n", - "cfg.data.val.type = 'VideoDataset'\n", - "cfg.data.val.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.data.val.data_prefix = 'kinetics400_tiny/val/'\n", - "\n", - "# The flag is used to determine whether it is omnisource training\n", - "cfg.setdefault('omnisource', False)\n", - "# Modify num classes of the model in cls_head\n", - "cfg.model.cls_head.num_classes = 2\n", - "# We can use the pre-trained TSN model\n", - "cfg.load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "\n", - "# Set up working dir to save files and logs.\n", - "cfg.work_dir = './tutorial_exps'\n", - "\n", - "# The original learning rate (LR) is set for 8-GPU training.\n", - "# We divide it by 8 since we only use one GPU.\n", - "cfg.data.videos_per_gpu = cfg.data.videos_per_gpu // 16\n", - "cfg.optimizer.lr = cfg.optimizer.lr / 8 / 16\n", - "cfg.total_epochs = 30\n", - "\n", - "# We can set the checkpoint saving interval to reduce the storage cost\n", - "cfg.checkpoint_config.interval = 10\n", - "# We can set the log print interval to reduce the the times of printing log\n", - "cfg.log_config.interval = 5\n", - "\n", - "# Set seed thus the results are more reproducible\n", - "cfg.seed = 0\n", - "set_random_seed(0, deterministic=False)\n", - "cfg.gpu_ids = range(1)\n", - "\n", - "\n", - "# We can initialize the logger for training and have a look\n", - "# at the final config used for training\n", - "print(f'Config:\\n{cfg.pretty_text}')\n" - ], - "execution_count": null, - "outputs": [ { - "output_type": "stream", - "text": [ - "Config:\n", - "model = dict(\n", - " type='Recognizer2D',\n", - " backbone=dict(\n", - " type='ResNet',\n", - " pretrained='torchvision://resnet50',\n", - " depth=50,\n", - " norm_eval=False),\n", - " cls_head=dict(\n", - " type='TSNHead',\n", - " num_classes=2,\n", - " in_channels=2048,\n", - " spatial_type='avg',\n", - " consensus=dict(type='AvgConsensus', dim=1),\n", - " dropout_ratio=0.4,\n", - " init_std=0.01))\n", - "train_cfg = None\n", - "test_cfg = dict(average_clips=None)\n", - "dataset_type = 'VideoDataset'\n", - "data_root = 'kinetics400_tiny/train/'\n", - "data_root_val = 'kinetics400_tiny/val/'\n", - "ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "img_norm_cfg = dict(\n", - " mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)\n", - "train_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),\n", - " dict(type='DecordDecode'),\n", - " dict(\n", - " type='MultiScaleCrop',\n", - " input_size=224,\n", - " scales=(1, 0.875, 0.75, 0.66),\n", - " random_crop=False,\n", - " max_wh_scale_gap=1),\n", - " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", - " dict(type='Flip', flip_ratio=0.5),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs', 'label'])\n", - "]\n", - "val_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=8,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='CenterCrop', crop_size=224),\n", - " dict(type='Flip', flip_ratio=0),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - "]\n", - "test_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=25,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='ThreeCrop', crop_size=256),\n", - " dict(type='Flip', flip_ratio=0),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - "]\n", - "data = dict(\n", - " videos_per_gpu=2,\n", - " workers_per_gpu=4,\n", - " train=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", - " data_prefix='kinetics400_tiny/train/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames', clip_len=1, frame_interval=1,\n", - " num_clips=8),\n", - " dict(type='DecordDecode'),\n", - " dict(\n", - " type='MultiScaleCrop',\n", - " input_size=224,\n", - " scales=(1, 0.875, 0.75, 0.66),\n", - " random_crop=False,\n", - " max_wh_scale_gap=1),\n", - " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", - " dict(type='Flip', flip_ratio=0.5),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs', 'label'])\n", - " ]),\n", - " val=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", - " data_prefix='kinetics400_tiny/val/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=8,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='CenterCrop', crop_size=224),\n", - " dict(type='Flip', flip_ratio=0),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - " ]),\n", - " test=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", - " data_prefix='kinetics400_tiny/val/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=25,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='ThreeCrop', crop_size=256),\n", - " dict(type='Flip', flip_ratio=0),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - " ]))\n", - "optimizer = dict(type='SGD', lr=7.8125e-05, momentum=0.9, weight_decay=0.0001)\n", - "optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))\n", - "lr_config = dict(policy='step', step=[40, 80])\n", - "total_epochs = 30\n", - "checkpoint_config = dict(interval=10)\n", - "evaluation = dict(\n", - " interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy'])\n", - "log_config = dict(interval=5, hooks=[dict(type='TextLoggerHook')])\n", - "dist_params = dict(backend='nccl')\n", - "log_level = 'INFO'\n", - "work_dir = './tutorial_exps'\n", - "load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "resume_from = None\n", - "workflow = [('train', 1)]\n", - "omnisource = False\n", - "seed = 0\n", - "gpu_ids = range(0, 1)\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "tES-qnZ3k38Z" - }, - "source": [ - "### Train a new recognizer\n", - "\n", - "Finally, lets initialize the dataset and recognizer, then train a new recognizer!" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "dDBWkdDRk6oz", - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "metadata": { + "id": "HNZB7NoSabzj", + "outputId": "b2f9bd71-1490-44d3-81c6-5037d804f0b1", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "source": [ + "from mmaction.apis import inference_recognizer, init_recognizer\n", + "\n", + "# Choose to use a config and initialize the recognizer\n", + "config = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py'\n", + "# Setup a checkpoint file to load\n", + "checkpoint = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "# Initialize the recognizer\n", + "model = init_recognizer(config, checkpoint, device='cuda:0')" + ], + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Use load_from_local loader\n" + ], + "name": "stdout" + } + ] }, - "outputId": "85a52ef3-7b5c-4c52-8fef-00322a8c65e6" - }, - "source": [ - "import os.path as osp\n", - "\n", - "from mmaction.datasets import build_dataset\n", - "from mmaction.models import build_model\n", - "from mmaction.apis import train_model\n", - "\n", - "import mmcv\n", - "\n", - "# Build the dataset\n", - "datasets = [build_dataset(cfg.data.train)]\n", - "\n", - "# Build the recognizer\n", - "model = build_model(cfg.model, train_cfg=cfg.get('train_cfg'), test_cfg=cfg.get('test_cfg'))\n", - "\n", - "# Create work_dir\n", - "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", - "train_model(model, datasets, cfg, distributed=False, validate=True)" - ], - "execution_count": null, - "outputs": [ { - "output_type": "stream", - "text": [ - "2020-11-20 09:38:54,909 - mmaction - INFO - These parameters in pretrained checkpoint are not loaded: {'fc.weight', 'fc.bias'}\n", - "2020-11-20 09:38:54,960 - mmaction - INFO - load checkpoint from ./checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", - "2020-11-20 09:38:55,052 - mmaction - WARNING - The model and loaded state dict do not match exactly\n", - "\n", - "size mismatch for cls_head.fc_cls.weight: copying a param with shape torch.Size([400, 2048]) from checkpoint, the shape in current model is torch.Size([2, 2048]).\n", - "size mismatch for cls_head.fc_cls.bias: copying a param with shape torch.Size([400]) from checkpoint, the shape in current model is torch.Size([2]).\n", - "2020-11-20 09:38:55,056 - mmaction - INFO - Start running, host: root@74e02ee9f123, work_dir: /content/mmaction2/tutorial_exps\n", - "2020-11-20 09:38:55,061 - mmaction - INFO - workflow: [('train', 1)], max: 30 epochs\n", - "2020-11-20 09:38:59,709 - mmaction - INFO - Epoch [1][5/15]\tlr: 7.813e-05, eta: 0:06:52, time: 0.927, data_time: 0.708, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6865, loss: 0.6865, grad_norm: 12.7663\n", - "2020-11-20 09:39:01,247 - mmaction - INFO - Epoch [1][10/15]\tlr: 7.813e-05, eta: 0:04:32, time: 0.309, data_time: 0.106, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.7171, loss: 0.7171, grad_norm: 13.7446\n", - "2020-11-20 09:39:02,112 - mmaction - INFO - Epoch [1][15/15]\tlr: 7.813e-05, eta: 0:03:24, time: 0.173, data_time: 0.001, memory: 2918, top1_acc: 0.2000, top5_acc: 1.0000, loss_cls: 0.8884, loss: 0.8884, grad_norm: 14.7140\n", - "2020-11-20 09:39:06,596 - mmaction - INFO - Epoch [2][5/15]\tlr: 7.813e-05, eta: 0:04:05, time: 0.876, data_time: 0.659, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6562, loss: 0.6562, grad_norm: 10.5716\n", - "2020-11-20 09:39:08,104 - mmaction - INFO - Epoch [2][10/15]\tlr: 7.813e-05, eta: 0:03:39, time: 0.300, data_time: 0.081, memory: 2918, top1_acc: 0.2000, top5_acc: 1.0000, loss_cls: 0.7480, loss: 0.7480, grad_norm: 11.7083\n", - "2020-11-20 09:39:09,075 - mmaction - INFO - Epoch [2][15/15]\tlr: 7.813e-05, eta: 0:03:14, time: 0.195, data_time: 0.008, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6735, loss: 0.6735, grad_norm: 12.8046\n", - "2020-11-20 09:39:13,756 - mmaction - INFO - Epoch [3][5/15]\tlr: 7.813e-05, eta: 0:03:39, time: 0.914, data_time: 0.693, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7218, loss: 0.7218, grad_norm: 12.4893\n", - "2020-11-20 09:39:15,203 - mmaction - INFO - Epoch [3][10/15]\tlr: 7.813e-05, eta: 0:03:24, time: 0.291, data_time: 0.092, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6188, loss: 0.6188, grad_norm: 11.8106\n", - "2020-11-20 09:39:16,108 - mmaction - INFO - Epoch [3][15/15]\tlr: 7.813e-05, eta: 0:03:07, time: 0.181, data_time: 0.003, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7298, loss: 0.7298, grad_norm: 12.5043\n", - "2020-11-20 09:39:21,525 - mmaction - INFO - Epoch [4][5/15]\tlr: 7.813e-05, eta: 0:03:29, time: 1.062, data_time: 0.832, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6833, loss: 0.6833, grad_norm: 10.1046\n", - "2020-11-20 09:39:22,815 - mmaction - INFO - Epoch [4][10/15]\tlr: 7.813e-05, eta: 0:03:17, time: 0.258, data_time: 0.059, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6640, loss: 0.6640, grad_norm: 11.7589\n", - "2020-11-20 09:39:23,686 - mmaction - INFO - Epoch [4][15/15]\tlr: 7.813e-05, eta: 0:03:03, time: 0.174, data_time: 0.001, memory: 2918, top1_acc: 0.3000, top5_acc: 1.0000, loss_cls: 0.7372, loss: 0.7372, grad_norm: 13.6163\n", - "2020-11-20 09:39:28,818 - mmaction - INFO - Epoch [5][5/15]\tlr: 7.813e-05, eta: 0:03:17, time: 1.001, data_time: 0.767, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6309, loss: 0.6309, grad_norm: 11.1864\n", - "2020-11-20 09:39:29,915 - mmaction - INFO - Epoch [5][10/15]\tlr: 7.813e-05, eta: 0:03:06, time: 0.220, data_time: 0.005, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7178, loss: 0.7178, grad_norm: 12.4574\n", - "2020-11-20 09:39:31,063 - mmaction - INFO - Epoch [5][15/15]\tlr: 7.813e-05, eta: 0:02:57, time: 0.229, data_time: 0.052, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7094, loss: 0.7094, grad_norm: 12.4649\n" - ], - "name": "stderr" + "cell_type": "code", + "metadata": { + "id": "rEMsBnpHapAn" + }, + "source": [ + "# Use the recognizer to do inference\n", + "video = 'demo/demo.mp4'\n", + "label = 'demo/label_map_k400.txt'\n", + "results = inference_recognizer(model, video, label)" + ], + "execution_count": 6, + "outputs": [] }, { - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.0 task/s, elapsed: 3s, ETA: 0s" - ], - "name": "stdout" + "cell_type": "code", + "metadata": { + "id": "NIyJXqfWathq", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "ca24528b-f99d-414a-fa50-456f6068b463" + }, + "source": [ + "# Let's show the results\n", + "for result in results:\n", + " print(f'{result[0]}: ', result[1])" + ], + "execution_count": 7, + "outputs": [ + { + "output_type": "stream", + "text": [ + "arm wrestling: 29.616438\n", + "rock scissors paper: 10.754841\n", + "shaking hands: 9.908401\n", + "clapping: 9.189913\n", + "massaging feet: 8.305307\n" + ], + "name": "stdout" + } + ] }, { - "output_type": "stream", - "text": [ - "2020-11-20 09:39:33,791 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2020-11-20 09:39:33,793 - mmaction - INFO - \n", - "top1_acc\t0.7000\n", - "top5_acc\t1.0000\n", - "2020-11-20 09:39:33,794 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2020-11-20 09:39:33,797 - mmaction - INFO - \n", - "mean_acc\t0.7000\n", - "2020-11-20 09:39:33,798 - mmaction - INFO - Now best checkpoint is epoch_5.pth\n", - "2020-11-20 09:39:33,801 - mmaction - INFO - Epoch(val) [5][15]\ttop1_acc: 0.7000, top5_acc: 1.0000, mean_class_accuracy: 0.7000\n", - "2020-11-20 09:39:39,131 - mmaction - INFO - Epoch [6][5/15]\tlr: 7.813e-05, eta: 0:03:09, time: 1.065, data_time: 0.832, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6696, loss: 0.6696, grad_norm: 11.0206\n", - "2020-11-20 09:39:40,263 - mmaction - INFO - Epoch [6][10/15]\tlr: 7.813e-05, eta: 0:03:00, time: 0.226, data_time: 0.002, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6824, loss: 0.6824, grad_norm: 11.8897\n", - "2020-11-20 09:39:41,147 - mmaction - INFO - Epoch [6][15/15]\tlr: 7.813e-05, eta: 0:02:51, time: 0.177, data_time: 0.002, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6869, loss: 0.6869, grad_norm: 13.7294\n", - "2020-11-20 09:39:45,927 - mmaction - INFO - Epoch [7][5/15]\tlr: 7.813e-05, eta: 0:02:57, time: 0.934, data_time: 0.712, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6459, loss: 0.6459, grad_norm: 10.8083\n", - "2020-11-20 09:39:47,203 - mmaction - INFO - Epoch [7][10/15]\tlr: 7.813e-05, eta: 0:02:50, time: 0.255, data_time: 0.038, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.6004, loss: 0.6004, grad_norm: 9.5414\n", - "2020-11-20 09:39:48,386 - mmaction - INFO - Epoch [7][15/15]\tlr: 7.813e-05, eta: 0:02:44, time: 0.237, data_time: 0.056, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6829, loss: 0.6829, grad_norm: 12.0934\n", - "2020-11-20 09:39:53,096 - mmaction - INFO - Epoch [8][5/15]\tlr: 7.813e-05, eta: 0:02:48, time: 0.920, data_time: 0.701, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6921, loss: 0.6921, grad_norm: 13.1963\n", - "2020-11-20 09:39:54,671 - mmaction - INFO - Epoch [8][10/15]\tlr: 7.813e-05, eta: 0:02:43, time: 0.314, data_time: 0.114, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6073, loss: 0.6073, grad_norm: 11.3379\n", - "2020-11-20 09:39:55,578 - mmaction - INFO - Epoch [8][15/15]\tlr: 7.813e-05, eta: 0:02:37, time: 0.183, data_time: 0.002, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7029, loss: 0.7029, grad_norm: 12.4497\n", - "2020-11-20 09:40:00,149 - mmaction - INFO - Epoch [9][5/15]\tlr: 7.813e-05, eta: 0:02:40, time: 0.893, data_time: 0.662, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5367, loss: 0.5367, grad_norm: 8.4173\n", - "2020-11-20 09:40:01,830 - mmaction - INFO - Epoch [9][10/15]\tlr: 7.813e-05, eta: 0:02:35, time: 0.336, data_time: 0.127, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6797, loss: 0.6797, grad_norm: 12.8048\n", - "2020-11-20 09:40:02,743 - mmaction - INFO - Epoch [9][15/15]\tlr: 7.813e-05, eta: 0:02:29, time: 0.182, data_time: 0.001, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5711, loss: 0.5711, grad_norm: 9.9660\n", - "2020-11-20 09:40:07,240 - mmaction - INFO - Epoch [10][5/15]\tlr: 7.813e-05, eta: 0:02:31, time: 0.878, data_time: 0.652, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5379, loss: 0.5379, grad_norm: 8.8850\n", - "2020-11-20 09:40:08,703 - mmaction - INFO - Epoch [10][10/15]\tlr: 7.813e-05, eta: 0:02:27, time: 0.293, data_time: 0.075, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.6644, loss: 0.6644, grad_norm: 12.6150\n", - "2020-11-20 09:40:09,886 - mmaction - INFO - Epoch [10][15/15]\tlr: 7.813e-05, eta: 0:02:22, time: 0.236, data_time: 0.047, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.7041, loss: 0.7041, grad_norm: 12.4941\n", - "2020-11-20 09:40:09,990 - mmaction - INFO - Saving checkpoint at 10 epochs\n" - ], - "name": "stderr" + "cell_type": "markdown", + "metadata": { + "id": "QuZG8kZ2fJ5d" + }, + "source": [ + "## Train a recognizer on customized dataset\n", + "\n", + "To train a new recognizer, there are usually three things to do:\n", + "1. Support a new dataset\n", + "2. Modify the config\n", + "3. Train a new recognizer" + ] }, { - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.2 task/s, elapsed: 2s, ETA: 0s" - ], - "name": "stdout" + "cell_type": "markdown", + "metadata": { + "id": "neEFyxChfgiJ" + }, + "source": [ + "### Support a new dataset\n", + "\n", + "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/tutorials/new_dataset.md)\n", + "\n", + "Firstly, let's download a tiny dataset obtained from [Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). We select 30 videos with their labels as train dataset and 10 videos with their labels as test dataset." + ] }, { - "output_type": "stream", - "text": [ - "2020-11-20 09:40:12,776 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2020-11-20 09:40:12,778 - mmaction - INFO - \n", - "top1_acc\t0.9000\n", - "top5_acc\t1.0000\n", - "2020-11-20 09:40:12,778 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2020-11-20 09:40:12,782 - mmaction - INFO - \n", - "mean_acc\t0.9000\n", - "2020-11-20 09:40:12,783 - mmaction - INFO - Now best checkpoint is epoch_10.pth\n", - "2020-11-20 09:40:12,786 - mmaction - INFO - Epoch(val) [10][15]\ttop1_acc: 0.9000, top5_acc: 1.0000, mean_class_accuracy: 0.9000\n", - "2020-11-20 09:40:17,313 - mmaction - INFO - Epoch [11][5/15]\tlr: 7.813e-05, eta: 0:02:24, time: 0.904, data_time: 0.671, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5357, loss: 0.5357, grad_norm: 8.7986\n", - "2020-11-20 09:40:18,877 - mmaction - INFO - Epoch [11][10/15]\tlr: 7.813e-05, eta: 0:02:20, time: 0.313, data_time: 0.097, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5829, loss: 0.5829, grad_norm: 10.5930\n", - "2020-11-20 09:40:19,797 - mmaction - INFO - Epoch [11][15/15]\tlr: 7.813e-05, eta: 0:02:15, time: 0.184, data_time: 0.003, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6232, loss: 0.6232, grad_norm: 11.3676\n", - "2020-11-20 09:40:24,593 - mmaction - INFO - Epoch [12][5/15]\tlr: 7.813e-05, eta: 0:02:16, time: 0.936, data_time: 0.689, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5701, loss: 0.5701, grad_norm: 10.0657\n", - "2020-11-20 09:40:25,850 - mmaction - INFO - Epoch [12][10/15]\tlr: 7.813e-05, eta: 0:02:12, time: 0.253, data_time: 0.045, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.5980, loss: 0.5980, grad_norm: 11.1706\n", - "2020-11-20 09:40:26,805 - mmaction - INFO - Epoch [12][15/15]\tlr: 7.813e-05, eta: 0:02:07, time: 0.191, data_time: 0.002, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6105, loss: 0.6105, grad_norm: 11.8521\n", - "2020-11-20 09:40:31,590 - mmaction - INFO - Epoch [13][5/15]\tlr: 7.813e-05, eta: 0:02:08, time: 0.935, data_time: 0.712, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5439, loss: 0.5439, grad_norm: 10.4133\n", - "2020-11-20 09:40:32,717 - mmaction - INFO - Epoch [13][10/15]\tlr: 7.813e-05, eta: 0:02:04, time: 0.225, data_time: 0.002, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6338, loss: 0.6338, grad_norm: 11.3842\n", - "2020-11-20 09:40:33,827 - mmaction - INFO - Epoch [13][15/15]\tlr: 7.813e-05, eta: 0:02:00, time: 0.222, data_time: 0.036, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6328, loss: 0.6328, grad_norm: 12.0624\n", - "2020-11-20 09:40:38,599 - mmaction - INFO - Epoch [14][5/15]\tlr: 7.813e-05, eta: 0:02:00, time: 0.932, data_time: 0.695, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6514, loss: 0.6514, grad_norm: 12.4955\n", - "2020-11-20 09:40:40,073 - mmaction - INFO - Epoch [14][10/15]\tlr: 7.813e-05, eta: 0:01:57, time: 0.295, data_time: 0.085, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5536, loss: 0.5536, grad_norm: 10.5949\n", - "2020-11-20 09:40:40,980 - mmaction - INFO - Epoch [14][15/15]\tlr: 7.813e-05, eta: 0:01:53, time: 0.181, data_time: 0.001, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7072, loss: 0.7072, grad_norm: 12.8191\n", - "2020-11-20 09:40:46,296 - mmaction - INFO - Epoch [15][5/15]\tlr: 7.813e-05, eta: 0:01:53, time: 1.042, data_time: 0.819, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6580, loss: 0.6580, grad_norm: 12.7106\n", - "2020-11-20 09:40:47,307 - mmaction - INFO - Epoch [15][10/15]\tlr: 7.813e-05, eta: 0:01:50, time: 0.202, data_time: 0.001, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.4524, loss: 0.4524, grad_norm: 8.1145\n", - "2020-11-20 09:40:48,205 - mmaction - INFO - Epoch [15][15/15]\tlr: 7.813e-05, eta: 0:01:46, time: 0.179, data_time: 0.002, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6318, loss: 0.6318, grad_norm: 11.7249\n" - ], - "name": "stderr" + "cell_type": "code", + "metadata": { + "id": "gjsUj9JzgUlJ", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "61c4704d-db81-4ca5-ed16-e2454dbdfe8e" + }, + "source": [ + "# download, decompress the data\n", + "!rm kinetics400_tiny.zip*\n", + "!rm -rf kinetics400_tiny\n", + "!wget https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", + "!unzip kinetics400_tiny.zip > /dev/null" + ], + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "text": [ + "rm: cannot remove 'kinetics400_tiny.zip*': No such file or directory\n", + "--2021-07-11 12:44:29-- https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 18308682 (17M) [application/zip]\n", + "Saving to: ‘kinetics400_tiny.zip’\n", + "\n", + "kinetics400_tiny.zi 100%[===================>] 17.46M 10.7MB/s in 1.6s \n", + "\n", + "2021-07-11 12:44:31 (10.7 MB/s) - ‘kinetics400_tiny.zip’ saved [18308682/18308682]\n", + "\n" + ], + "name": "stdout" + } + ] }, { - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.3 task/s, elapsed: 2s, ETA: 0s" - ], - "name": "stdout" + "cell_type": "code", + "metadata": { + "id": "AbZ-o7V6hNw4", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "b091909c-def2-49b5-88c2-01b00802b162" + }, + "source": [ + "# Check the directory structure of the tiny data\n", + "\n", + "# Install tree first\n", + "!apt-get -q install tree\n", + "!tree kinetics400_tiny" + ], + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Reading package lists...\n", + "Building dependency tree...\n", + "Reading state information...\n", + "The following NEW packages will be installed:\n", + " tree\n", + "0 upgraded, 1 newly installed, 0 to remove and 39 not upgraded.\n", + "Need to get 40.7 kB of archives.\n", + "After this operation, 105 kB of additional disk space will be used.\n", + "Get:1 http://archive.ubuntu.com/ubuntu bionic/universe amd64 tree amd64 1.7.0-5 [40.7 kB]\n", + "Fetched 40.7 kB in 0s (88.7 kB/s)\n", + "Selecting previously unselected package tree.\n", + "(Reading database ... 160815 files and directories currently installed.)\n", + "Preparing to unpack .../tree_1.7.0-5_amd64.deb ...\n", + "Unpacking tree (1.7.0-5) ...\n", + "Setting up tree (1.7.0-5) ...\n", + "Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n", + "kinetics400_tiny\n", + "├── kinetics_tiny_train_video.txt\n", + "├── kinetics_tiny_val_video.txt\n", + "├── train\n", + "│   ├── 27_CSXByd3s.mp4\n", + "│   ├── 34XczvTaRiI.mp4\n", + "│   ├── A-wiliK50Zw.mp4\n", + "│   ├── D32_1gwq35E.mp4\n", + "│   ├── D92m0HsHjcQ.mp4\n", + "│   ├── DbX8mPslRXg.mp4\n", + "│   ├── FMlSTTpN3VY.mp4\n", + "│   ├── h10B9SVE-nk.mp4\n", + "│   ├── h2YqqUhnR34.mp4\n", + "│   ├── iRuyZSKhHRg.mp4\n", + "│   ├── IyfILH9lBRo.mp4\n", + "│   ├── kFC3KY2bOP8.mp4\n", + "│   ├── LvcFDgCAXQs.mp4\n", + "│   ├── O46YA8tI530.mp4\n", + "│   ├── oMrZaozOvdQ.mp4\n", + "│   ├── oXy-e_P_cAI.mp4\n", + "│   ├── P5M-hAts7MQ.mp4\n", + "│   ├── phDqGd0NKoo.mp4\n", + "│   ├── PnOe3GZRVX8.mp4\n", + "│   ├── R8HXQkdgKWA.mp4\n", + "│   ├── RqnKtCEoEcA.mp4\n", + "│   ├── soEcZZsBmDs.mp4\n", + "│   ├── TkkZPZHbAKA.mp4\n", + "│   ├── T_TMNGzVrDk.mp4\n", + "│   ├── WaS0qwP46Us.mp4\n", + "│   ├── Wh_YPQdH1Zg.mp4\n", + "│   ├── WWP5HZJsg-o.mp4\n", + "│   ├── xGY2dP0YUjA.mp4\n", + "│   ├── yLC9CtWU5ws.mp4\n", + "│   └── ZQV4U2KQ370.mp4\n", + "└── val\n", + " ├── 0pVGiAU6XEA.mp4\n", + " ├── AQrbRSnRt8M.mp4\n", + " ├── b6Q_b7vgc7Q.mp4\n", + " ├── ddvJ6-faICE.mp4\n", + " ├── IcLztCtvhb8.mp4\n", + " ├── ik4BW3-SCts.mp4\n", + " ├── jqRrH30V0k4.mp4\n", + " ├── SU_x2LQqSLs.mp4\n", + " ├── u4Rm6srmIS8.mp4\n", + " └── y5Iu7XkTqV0.mp4\n", + "\n", + "2 directories, 42 files\n" + ], + "name": "stdout" + } + ] }, { - "output_type": "stream", - "text": [ - "2020-11-20 09:40:50,731 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2020-11-20 09:40:50,732 - mmaction - INFO - \n", - "top1_acc\t0.8000\n", - "top5_acc\t1.0000\n", - "2020-11-20 09:40:50,733 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2020-11-20 09:40:50,739 - mmaction - INFO - \n", - "mean_acc\t0.8000\n", - "2020-11-20 09:40:50,741 - mmaction - INFO - Epoch(val) [15][15]\ttop1_acc: 0.8000, top5_acc: 1.0000, mean_class_accuracy: 0.8000\n", - "2020-11-20 09:40:55,222 - mmaction - INFO - Epoch [16][5/15]\tlr: 7.813e-05, eta: 0:01:45, time: 0.895, data_time: 0.658, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5428, loss: 0.5428, grad_norm: 10.0143\n", - "2020-11-20 09:40:56,773 - mmaction - INFO - Epoch [16][10/15]\tlr: 7.813e-05, eta: 0:01:42, time: 0.310, data_time: 0.102, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6099, loss: 0.6099, grad_norm: 11.1996\n", - "2020-11-20 09:40:57,703 - mmaction - INFO - Epoch [16][15/15]\tlr: 7.813e-05, eta: 0:01:38, time: 0.186, data_time: 0.002, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5309, loss: 0.5309, grad_norm: 9.9062\n", - "2020-11-20 09:41:02,293 - mmaction - INFO - Epoch [17][5/15]\tlr: 7.813e-05, eta: 0:01:38, time: 0.897, data_time: 0.665, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6303, loss: 0.6303, grad_norm: 12.2689\n", - "2020-11-20 09:41:03,790 - mmaction - INFO - Epoch [17][10/15]\tlr: 7.813e-05, eta: 0:01:35, time: 0.299, data_time: 0.094, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.6635, loss: 0.6635, grad_norm: 13.0193\n", - "2020-11-20 09:41:04,739 - mmaction - INFO - Epoch [17][15/15]\tlr: 7.813e-05, eta: 0:01:31, time: 0.190, data_time: 0.011, memory: 2918, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.4261, loss: 0.4261, grad_norm: 8.0281\n", - "2020-11-20 09:41:09,499 - mmaction - INFO - Epoch [18][5/15]\tlr: 7.813e-05, eta: 0:01:31, time: 0.929, data_time: 0.694, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5793, loss: 0.5793, grad_norm: 11.0233\n", - "2020-11-20 09:41:11,013 - mmaction - INFO - Epoch [18][10/15]\tlr: 7.813e-05, eta: 0:01:28, time: 0.303, data_time: 0.089, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.4537, loss: 0.4537, grad_norm: 8.3300\n", - "2020-11-20 09:41:11,913 - mmaction - INFO - Epoch [18][15/15]\tlr: 7.813e-05, eta: 0:01:24, time: 0.180, data_time: 0.002, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4951, loss: 0.4951, grad_norm: 9.5771\n", - "2020-11-20 09:41:16,921 - mmaction - INFO - Epoch [19][5/15]\tlr: 7.813e-05, eta: 0:01:24, time: 0.979, data_time: 0.743, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5532, loss: 0.5532, grad_norm: 10.2661\n", - "2020-11-20 09:41:18,235 - mmaction - INFO - Epoch [19][10/15]\tlr: 7.813e-05, eta: 0:01:20, time: 0.263, data_time: 0.059, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6117, loss: 0.6117, grad_norm: 11.5430\n", - "2020-11-20 09:41:19,137 - mmaction - INFO - Epoch [19][15/15]\tlr: 7.813e-05, eta: 0:01:17, time: 0.180, data_time: 0.001, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4127, loss: 0.4127, grad_norm: 7.5687\n", - "2020-11-20 09:41:23,780 - mmaction - INFO - Epoch [20][5/15]\tlr: 7.813e-05, eta: 0:01:16, time: 0.904, data_time: 0.675, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.4327, loss: 0.4327, grad_norm: 8.4868\n", - "2020-11-20 09:41:25,236 - mmaction - INFO - Epoch [20][10/15]\tlr: 7.813e-05, eta: 0:01:13, time: 0.293, data_time: 0.078, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4623, loss: 0.4623, grad_norm: 8.9795\n", - "2020-11-20 09:41:26,198 - mmaction - INFO - Epoch [20][15/15]\tlr: 7.813e-05, eta: 0:01:10, time: 0.194, data_time: 0.002, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5917, loss: 0.5917, grad_norm: 11.3897\n", - "2020-11-20 09:41:26,304 - mmaction - INFO - Saving checkpoint at 20 epochs\n" - ], - "name": "stderr" + "cell_type": "code", + "metadata": { + "id": "fTdi6dI0hY3g", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "ffda0997-8d77-431a-d66e-2f273e80c756" + }, + "source": [ + "# After downloading the data, we need to check the annotation format\n", + "!cat kinetics400_tiny/kinetics_tiny_train_video.txt" + ], + "execution_count": 10, + "outputs": [ + { + "output_type": "stream", + "text": [ + "D32_1gwq35E.mp4 0\n", + "iRuyZSKhHRg.mp4 1\n", + "oXy-e_P_cAI.mp4 0\n", + "34XczvTaRiI.mp4 1\n", + "h2YqqUhnR34.mp4 0\n", + "O46YA8tI530.mp4 0\n", + "kFC3KY2bOP8.mp4 1\n", + "WWP5HZJsg-o.mp4 1\n", + "phDqGd0NKoo.mp4 1\n", + "yLC9CtWU5ws.mp4 0\n", + "27_CSXByd3s.mp4 1\n", + "IyfILH9lBRo.mp4 1\n", + "T_TMNGzVrDk.mp4 1\n", + "TkkZPZHbAKA.mp4 0\n", + "PnOe3GZRVX8.mp4 1\n", + "soEcZZsBmDs.mp4 1\n", + "FMlSTTpN3VY.mp4 1\n", + "WaS0qwP46Us.mp4 0\n", + "A-wiliK50Zw.mp4 1\n", + "oMrZaozOvdQ.mp4 1\n", + "ZQV4U2KQ370.mp4 0\n", + "DbX8mPslRXg.mp4 1\n", + "h10B9SVE-nk.mp4 1\n", + "P5M-hAts7MQ.mp4 0\n", + "R8HXQkdgKWA.mp4 0\n", + "D92m0HsHjcQ.mp4 0\n", + "RqnKtCEoEcA.mp4 0\n", + "LvcFDgCAXQs.mp4 0\n", + "xGY2dP0YUjA.mp4 0\n", + "Wh_YPQdH1Zg.mp4 0\n" + ], + "name": "stdout" + } + ] }, { - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.3 task/s, elapsed: 2s, ETA: 0s" - ], - "name": "stdout" + "cell_type": "markdown", + "metadata": { + "id": "0bq0mxmEi29H" + }, + "source": [ + "According to the format defined in [`VideoDataset`](./datasets/video_dataset.py), each line indicates a sample video with the filepath and label, which are split with a whitespace." + ] }, { - "output_type": "stream", - "text": [ - "2020-11-20 09:41:29,078 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2020-11-20 09:41:29,079 - mmaction - INFO - \n", - "top1_acc\t0.9000\n", - "top5_acc\t1.0000\n", - "2020-11-20 09:41:29,080 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2020-11-20 09:41:29,084 - mmaction - INFO - \n", - "mean_acc\t0.9000\n", - "2020-11-20 09:41:29,086 - mmaction - INFO - Epoch(val) [20][15]\ttop1_acc: 0.9000, top5_acc: 1.0000, mean_class_accuracy: 0.9000\n", - "2020-11-20 09:41:33,520 - mmaction - INFO - Epoch [21][5/15]\tlr: 7.813e-05, eta: 0:01:09, time: 0.886, data_time: 0.653, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4515, loss: 0.4515, grad_norm: 8.3883\n", - "2020-11-20 09:41:35,097 - mmaction - INFO - Epoch [21][10/15]\tlr: 7.813e-05, eta: 0:01:06, time: 0.316, data_time: 0.095, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4543, loss: 0.4543, grad_norm: 9.1107\n", - "2020-11-20 09:41:36,032 - mmaction - INFO - Epoch [21][15/15]\tlr: 7.813e-05, eta: 0:01:03, time: 0.187, data_time: 0.001, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5324, loss: 0.5324, grad_norm: 10.5992\n", - "2020-11-20 09:41:40,665 - mmaction - INFO - Epoch [22][5/15]\tlr: 7.813e-05, eta: 0:01:02, time: 0.905, data_time: 0.671, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5645, loss: 0.5645, grad_norm: 10.7946\n", - "2020-11-20 09:41:42,016 - mmaction - INFO - Epoch [22][10/15]\tlr: 7.813e-05, eta: 0:00:59, time: 0.270, data_time: 0.061, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5059, loss: 0.5059, grad_norm: 9.9750\n", - "2020-11-20 09:41:43,160 - mmaction - INFO - Epoch [22][15/15]\tlr: 7.813e-05, eta: 0:00:56, time: 0.229, data_time: 0.049, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.4390, loss: 0.4390, grad_norm: 8.7574\n", - "2020-11-20 09:41:47,850 - mmaction - INFO - Epoch [23][5/15]\tlr: 7.813e-05, eta: 0:00:54, time: 0.916, data_time: 0.686, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6279, loss: 0.6279, grad_norm: 12.5599\n", - "2020-11-20 09:41:49,318 - mmaction - INFO - Epoch [23][10/15]\tlr: 7.813e-05, eta: 0:00:52, time: 0.293, data_time: 0.080, memory: 2918, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.3282, loss: 0.3282, grad_norm: 6.4802\n", - "2020-11-20 09:41:50,252 - mmaction - INFO - Epoch [23][15/15]\tlr: 7.813e-05, eta: 0:00:49, time: 0.188, data_time: 0.003, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4970, loss: 0.4970, grad_norm: 9.2502\n", - "2020-11-20 09:41:55,208 - mmaction - INFO - Epoch [24][5/15]\tlr: 7.813e-05, eta: 0:00:47, time: 0.970, data_time: 0.731, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.3843, loss: 0.3843, grad_norm: 7.6723\n", - "2020-11-20 09:41:56,257 - mmaction - INFO - Epoch [24][10/15]\tlr: 7.813e-05, eta: 0:00:44, time: 0.210, data_time: 0.006, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.5481, loss: 0.5481, grad_norm: 11.8595\n", - "2020-11-20 09:41:57,251 - mmaction - INFO - Epoch [24][15/15]\tlr: 7.813e-05, eta: 0:00:42, time: 0.199, data_time: 0.012, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.3737, loss: 0.3737, grad_norm: 7.4704\n", - "2020-11-20 09:42:01,669 - mmaction - INFO - Epoch [25][5/15]\tlr: 7.813e-05, eta: 0:00:40, time: 0.863, data_time: 0.635, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.5808, loss: 0.5808, grad_norm: 11.7501\n", - "2020-11-20 09:42:03,132 - mmaction - INFO - Epoch [25][10/15]\tlr: 7.813e-05, eta: 0:00:37, time: 0.292, data_time: 0.075, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.5970, loss: 0.5970, grad_norm: 11.7784\n", - "2020-11-20 09:42:04,287 - mmaction - INFO - Epoch [25][15/15]\tlr: 7.813e-05, eta: 0:00:35, time: 0.232, data_time: 0.042, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4509, loss: 0.4509, grad_norm: 9.2979\n" - ], - "name": "stderr" + "cell_type": "markdown", + "metadata": { + "id": "Ht_DGJA9jQar" + }, + "source": [ + "### Modify the config\n", + "\n", + "In the next step, we need to modify the config for the training.\n", + "To accelerate the process, we finetune a recognizer using a pre-trained recognizer." + ] }, { - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.4 task/s, elapsed: 2s, ETA: 0s" - ], - "name": "stdout" + "cell_type": "code", + "metadata": { + "id": "LjCcmCKOjktc" + }, + "source": [ + "from mmcv import Config\n", + "cfg = Config.fromfile('./configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py')" + ], + "execution_count": 27, + "outputs": [] }, { - "output_type": "stream", - "text": [ - "2020-11-20 09:42:06,788 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2020-11-20 09:42:06,790 - mmaction - INFO - \n", - "top1_acc\t1.0000\n", - "top5_acc\t1.0000\n", - "2020-11-20 09:42:06,791 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2020-11-20 09:42:06,796 - mmaction - INFO - \n", - "mean_acc\t1.0000\n", - "2020-11-20 09:42:06,797 - mmaction - INFO - Now best checkpoint is epoch_25.pth\n", - "2020-11-20 09:42:06,799 - mmaction - INFO - Epoch(val) [25][15]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n", - "2020-11-20 09:42:11,579 - mmaction - INFO - Epoch [26][5/15]\tlr: 7.813e-05, eta: 0:00:33, time: 0.954, data_time: 0.725, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.3759, loss: 0.3759, grad_norm: 7.9644\n", - "2020-11-20 09:42:13,112 - mmaction - INFO - Epoch [26][10/15]\tlr: 7.813e-05, eta: 0:00:30, time: 0.307, data_time: 0.114, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.5393, loss: 0.5393, grad_norm: 10.7580\n", - "2020-11-20 09:42:14,007 - mmaction - INFO - Epoch [26][15/15]\tlr: 7.813e-05, eta: 0:00:28, time: 0.179, data_time: 0.002, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.4193, loss: 0.4193, grad_norm: 8.5498\n", - "2020-11-20 09:42:18,684 - mmaction - INFO - Epoch [27][5/15]\tlr: 7.813e-05, eta: 0:00:26, time: 0.914, data_time: 0.690, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.5922, loss: 0.5922, grad_norm: 11.3893\n", - "2020-11-20 09:42:19,933 - mmaction - INFO - Epoch [27][10/15]\tlr: 7.813e-05, eta: 0:00:23, time: 0.249, data_time: 0.031, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.4481, loss: 0.4481, grad_norm: 9.5221\n", - "2020-11-20 09:42:21,051 - mmaction - INFO - Epoch [27][15/15]\tlr: 7.813e-05, eta: 0:00:21, time: 0.224, data_time: 0.040, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.3864, loss: 0.3864, grad_norm: 8.2003\n", - "2020-11-20 09:42:25,678 - mmaction - INFO - Epoch [28][5/15]\tlr: 7.813e-05, eta: 0:00:18, time: 0.904, data_time: 0.668, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5439, loss: 0.5439, grad_norm: 11.1966\n", - "2020-11-20 09:42:26,792 - mmaction - INFO - Epoch [28][10/15]\tlr: 7.813e-05, eta: 0:00:16, time: 0.221, data_time: 0.003, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.4840, loss: 0.4840, grad_norm: 9.9172\n", - "2020-11-20 09:42:27,960 - mmaction - INFO - Epoch [28][15/15]\tlr: 7.813e-05, eta: 0:00:14, time: 0.235, data_time: 0.044, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.3131, loss: 0.3131, grad_norm: 6.3335\n", - "2020-11-20 09:42:32,811 - mmaction - INFO - Epoch [29][5/15]\tlr: 7.813e-05, eta: 0:00:11, time: 0.948, data_time: 0.722, memory: 2918, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.3146, loss: 0.3146, grad_norm: 6.6830\n", - "2020-11-20 09:42:34,156 - mmaction - INFO - Epoch [29][10/15]\tlr: 7.813e-05, eta: 0:00:09, time: 0.269, data_time: 0.062, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.4965, loss: 0.4965, grad_norm: 10.3011\n", - "2020-11-20 09:42:35,103 - mmaction - INFO - Epoch [29][15/15]\tlr: 7.813e-05, eta: 0:00:07, time: 0.189, data_time: 0.013, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.5301, loss: 0.5301, grad_norm: 10.5820\n", - "2020-11-20 09:42:39,945 - mmaction - INFO - Epoch [30][5/15]\tlr: 7.813e-05, eta: 0:00:04, time: 0.945, data_time: 0.720, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6774, loss: 0.6774, grad_norm: 13.5632\n", - "2020-11-20 09:42:41,213 - mmaction - INFO - Epoch [30][10/15]\tlr: 7.813e-05, eta: 0:00:02, time: 0.255, data_time: 0.037, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6271, loss: 0.6271, grad_norm: 12.5629\n", - "2020-11-20 09:42:42,162 - mmaction - INFO - Epoch [30][15/15]\tlr: 7.813e-05, eta: 0:00:00, time: 0.190, data_time: 0.006, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.5681, loss: 0.5681, grad_norm: 11.7026\n", - "2020-11-20 09:42:42,265 - mmaction - INFO - Saving checkpoint at 30 epochs\n" - ], - "name": "stderr" + "cell_type": "markdown", + "metadata": { + "id": "tc8YhFFGjp3e" + }, + "source": [ + "Given a config that trains a TSN model on kinetics400-full dataset, we need to modify some values to use it for training TSN on Kinetics400-tiny dataset.\n" + ] }, { - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.2 task/s, elapsed: 2s, ETA: 0s" - ], - "name": "stdout" + "cell_type": "code", + "metadata": { + "id": "tlhu9byjjt-K", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "3b9a3c49-ace0-41d3-dd15-d6c8579755f8" + }, + "source": [ + "from mmcv.runner import set_random_seed\n", + "\n", + "# Modify dataset type and path\n", + "cfg.dataset_type = 'VideoDataset'\n", + "cfg.data_root = 'kinetics400_tiny/train/'\n", + "cfg.data_root_val = 'kinetics400_tiny/val/'\n", + "cfg.ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "cfg.ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "\n", + "cfg.data.test.type = 'VideoDataset'\n", + "cfg.data.test.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.data.test.data_prefix = 'kinetics400_tiny/val/'\n", + "\n", + "cfg.data.train.type = 'VideoDataset'\n", + "cfg.data.train.ann_file = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "cfg.data.train.data_prefix = 'kinetics400_tiny/train/'\n", + "\n", + "cfg.data.val.type = 'VideoDataset'\n", + "cfg.data.val.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.data.val.data_prefix = 'kinetics400_tiny/val/'\n", + "\n", + "# The flag is used to determine whether it is omnisource training\n", + "cfg.setdefault('omnisource', False)\n", + "# Modify num classes of the model in cls_head\n", + "cfg.model.cls_head.num_classes = 2\n", + "# We can use the pre-trained TSN model\n", + "cfg.load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "\n", + "# Set up working dir to save files and logs.\n", + "cfg.work_dir = './tutorial_exps'\n", + "\n", + "# The original learning rate (LR) is set for 8-GPU training.\n", + "# We divide it by 8 since we only use one GPU.\n", + "cfg.data.videos_per_gpu = cfg.data.videos_per_gpu // 16\n", + "cfg.optimizer.lr = cfg.optimizer.lr / 8 / 16\n", + "cfg.total_epochs = 10\n", + "\n", + "# We can set the checkpoint saving interval to reduce the storage cost\n", + "cfg.checkpoint_config.interval = 5\n", + "# We can set the log print interval to reduce the the times of printing log\n", + "cfg.log_config.interval = 5\n", + "\n", + "# Set seed thus the results are more reproducible\n", + "cfg.seed = 0\n", + "set_random_seed(0, deterministic=False)\n", + "cfg.gpu_ids = range(1)\n", + "\n", + "# Save the best\n", + "cfg.evaluation.save_best='auto'\n", + "\n", + "\n", + "# We can initialize the logger for training and have a look\n", + "# at the final config used for training\n", + "print(f'Config:\\n{cfg.pretty_text}')\n" + ], + "execution_count": 28, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Config:\n", + "model = dict(\n", + " type='Recognizer2D',\n", + " backbone=dict(\n", + " type='ResNet',\n", + " pretrained='torchvision://resnet50',\n", + " depth=50,\n", + " norm_eval=False),\n", + " cls_head=dict(\n", + " type='TSNHead',\n", + " num_classes=2,\n", + " in_channels=2048,\n", + " spatial_type='avg',\n", + " consensus=dict(type='AvgConsensus', dim=1),\n", + " dropout_ratio=0.4,\n", + " init_std=0.01),\n", + " train_cfg=None,\n", + " test_cfg=dict(average_clips=None))\n", + "optimizer = dict(type='SGD', lr=7.8125e-05, momentum=0.9, weight_decay=0.0001)\n", + "optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))\n", + "lr_config = dict(policy='step', step=[40, 80])\n", + "total_epochs = 10\n", + "checkpoint_config = dict(interval=5)\n", + "log_config = dict(interval=5, hooks=[dict(type='TextLoggerHook')])\n", + "dist_params = dict(backend='nccl')\n", + "log_level = 'INFO'\n", + "load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "resume_from = None\n", + "workflow = [('train', 1)]\n", + "dataset_type = 'VideoDataset'\n", + "data_root = 'kinetics400_tiny/train/'\n", + "data_root_val = 'kinetics400_tiny/val/'\n", + "ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "img_norm_cfg = dict(\n", + " mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)\n", + "train_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),\n", + " dict(type='DecordDecode'),\n", + " dict(\n", + " type='MultiScaleCrop',\n", + " input_size=224,\n", + " scales=(1, 0.875, 0.75, 0.66),\n", + " random_crop=False,\n", + " max_wh_scale_gap=1),\n", + " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", + " dict(type='Flip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs', 'label'])\n", + "]\n", + "val_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=8,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='CenterCrop', crop_size=224),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + "]\n", + "test_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=25,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='ThreeCrop', crop_size=256),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + "]\n", + "data = dict(\n", + " videos_per_gpu=2,\n", + " workers_per_gpu=4,\n", + " train=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", + " data_prefix='kinetics400_tiny/train/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames', clip_len=1, frame_interval=1,\n", + " num_clips=8),\n", + " dict(type='DecordDecode'),\n", + " dict(\n", + " type='MultiScaleCrop',\n", + " input_size=224,\n", + " scales=(1, 0.875, 0.75, 0.66),\n", + " random_crop=False,\n", + " max_wh_scale_gap=1),\n", + " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", + " dict(type='Flip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs', 'label'])\n", + " ]),\n", + " val=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", + " data_prefix='kinetics400_tiny/val/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=8,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='CenterCrop', crop_size=224),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + " ]),\n", + " test=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", + " data_prefix='kinetics400_tiny/val/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=25,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='ThreeCrop', crop_size=256),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + " ]))\n", + "evaluation = dict(\n", + " interval=5,\n", + " metrics=['top_k_accuracy', 'mean_class_accuracy'],\n", + " save_best='auto')\n", + "work_dir = './tutorial_exps'\n", + "omnisource = False\n", + "seed = 0\n", + "gpu_ids = range(0, 1)\n", + "\n" + ], + "name": "stdout" + } + ] }, { - "output_type": "stream", - "text": [ - "2020-11-20 09:42:45,097 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2020-11-20 09:42:45,098 - mmaction - INFO - \n", - "top1_acc\t1.0000\n", - "top5_acc\t1.0000\n", - "2020-11-20 09:42:45,099 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2020-11-20 09:42:45,103 - mmaction - INFO - \n", - "mean_acc\t1.0000\n", - "2020-11-20 09:42:45,104 - mmaction - INFO - Epoch(val) [30][15]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "zdSd7oTLlxIf" - }, - "source": [ - "### Understand the log\n", - "From the log, we can have a basic understanding the training process and know how well the recognizer is trained.\n", - "\n", - "Firstly, the ResNet-50 backbone pre-trained on ImageNet is loaded, this is a common practice since training from scratch is more cost. The log shows that all the weights of the ResNet-50 backbone are loaded except the `fc.bias` and `fc.weight`.\n", - "\n", - "Second, since the dataset we are using is small, we loaded a TSN model and finetune it for action recognition.\n", - "The original TSN is trained on original Kinetics-400 dataset which contains 400 classes but Kinetics-400 Tiny dataset only have 2 classes. Therefore, the last FC layer of the pre-trained TSN for classification has different weight shape and is not used.\n", - "\n", - "Third, after training, the recognizer is evaluated by the default evaluation. The results show that the recognizer achieves 100% top1 accuracy and 100% top5 accuracy on the val dataset,\n", - " \n", - "Not bad!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ryVoSfZVmogw" - }, - "source": [ - "## Test the trained recognizer\n", - "\n", - "After finetuning the recognizer, let's check the prediction results!" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "eyY3hCMwyTct", - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "markdown", + "metadata": { + "id": "tES-qnZ3k38Z" + }, + "source": [ + "### Train a new recognizer\n", + "\n", + "Finally, lets initialize the dataset and recognizer, then train a new recognizer!" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "dDBWkdDRk6oz", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "a85d80d7-b3c4-43f1-d49a-057e8036807f" + }, + "source": [ + "import os.path as osp\n", + "\n", + "from mmaction.datasets import build_dataset\n", + "from mmaction.models import build_model\n", + "from mmaction.apis import train_model\n", + "\n", + "import mmcv\n", + "\n", + "# Build the dataset\n", + "datasets = [build_dataset(cfg.data.train)]\n", + "\n", + "# Build the recognizer\n", + "model = build_model(cfg.model, train_cfg=cfg.get('train_cfg'), test_cfg=cfg.get('test_cfg'))\n", + "\n", + "# Create work_dir\n", + "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", + "train_model(model, datasets, cfg, distributed=False, validate=True)" + ], + "execution_count": 29, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Use load_from_torchvision loader\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "2021-07-11 13:00:46,931 - mmaction - INFO - These parameters in pretrained checkpoint are not loaded: {'fc.bias', 'fc.weight'}\n", + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n", + "2021-07-11 13:00:46,980 - mmaction - INFO - load checkpoint from ./checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", + "2021-07-11 13:00:46,981 - mmaction - INFO - Use load_from_local loader\n", + "2021-07-11 13:00:47,071 - mmaction - WARNING - The model and loaded state dict do not match exactly\n", + "\n", + "size mismatch for cls_head.fc_cls.weight: copying a param with shape torch.Size([400, 2048]) from checkpoint, the shape in current model is torch.Size([2, 2048]).\n", + "size mismatch for cls_head.fc_cls.bias: copying a param with shape torch.Size([400]) from checkpoint, the shape in current model is torch.Size([2]).\n", + "2021-07-11 13:00:47,074 - mmaction - INFO - Start running, host: root@b465112b4add, work_dir: /content/mmaction2/tutorial_exps\n", + "2021-07-11 13:00:47,078 - mmaction - INFO - Hooks will be executed in the following order:\n", + "before_run:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_train_epoch:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_train_iter:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_train_iter:\n", + "(ABOVE_NORMAL) OptimizerHook \n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "after_train_epoch:\n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_val_epoch:\n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_val_iter:\n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_val_iter:\n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_val_epoch:\n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "2021-07-11 13:00:47,081 - mmaction - INFO - workflow: [('train', 1)], max: 10 epochs\n", + "/usr/local/lib/python3.7/dist-packages/mmcv/runner/hooks/evaluation.py:190: UserWarning: runner.meta is None. Creating an empty one.\n", + " warnings.warn('runner.meta is None. Creating an empty one.')\n", + "2021-07-11 13:00:51,802 - mmaction - INFO - Epoch [1][5/15]\tlr: 7.813e-05, eta: 0:02:16, time: 0.942, data_time: 0.730, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7604, loss: 0.7604, grad_norm: 14.8813\n", + "2021-07-11 13:00:52,884 - mmaction - INFO - Epoch [1][10/15]\tlr: 7.813e-05, eta: 0:01:21, time: 0.217, data_time: 0.028, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6282, loss: 0.6282, grad_norm: 10.1834\n", + "2021-07-11 13:00:53,706 - mmaction - INFO - Epoch [1][15/15]\tlr: 7.813e-05, eta: 0:00:59, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7165, loss: 0.7165, grad_norm: 10.8534\n", + "2021-07-11 13:00:57,724 - mmaction - INFO - Epoch [2][5/15]\tlr: 7.813e-05, eta: 0:01:09, time: 0.802, data_time: 0.596, memory: 2918, top1_acc: 0.3000, top5_acc: 1.0000, loss_cls: 0.7001, loss: 0.7001, grad_norm: 11.4311\n", + "2021-07-11 13:00:59,219 - mmaction - INFO - Epoch [2][10/15]\tlr: 7.813e-05, eta: 0:01:00, time: 0.296, data_time: 0.108, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6916, loss: 0.6916, grad_norm: 12.7101\n", + "2021-07-11 13:01:00,040 - mmaction - INFO - Epoch [2][15/15]\tlr: 7.813e-05, eta: 0:00:51, time: 0.167, data_time: 0.004, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6567, loss: 0.6567, grad_norm: 8.8837\n", + "2021-07-11 13:01:04,152 - mmaction - INFO - Epoch [3][5/15]\tlr: 7.813e-05, eta: 0:00:56, time: 0.820, data_time: 0.618, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6320, loss: 0.6320, grad_norm: 11.4025\n", + "2021-07-11 13:01:05,526 - mmaction - INFO - Epoch [3][10/15]\tlr: 7.813e-05, eta: 0:00:50, time: 0.276, data_time: 0.075, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6542, loss: 0.6542, grad_norm: 10.6429\n", + "2021-07-11 13:01:06,350 - mmaction - INFO - Epoch [3][15/15]\tlr: 7.813e-05, eta: 0:00:44, time: 0.165, data_time: 0.001, memory: 2918, top1_acc: 0.2000, top5_acc: 1.0000, loss_cls: 0.7661, loss: 0.7661, grad_norm: 12.8421\n", + "2021-07-11 13:01:10,771 - mmaction - INFO - Epoch [4][5/15]\tlr: 7.813e-05, eta: 0:00:47, time: 0.883, data_time: 0.676, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6410, loss: 0.6410, grad_norm: 10.6697\n", + "2021-07-11 13:01:11,776 - mmaction - INFO - Epoch [4][10/15]\tlr: 7.813e-05, eta: 0:00:42, time: 0.201, data_time: 0.011, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6949, loss: 0.6949, grad_norm: 10.5467\n", + "2021-07-11 13:01:12,729 - mmaction - INFO - Epoch [4][15/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.190, data_time: 0.026, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6290, loss: 0.6290, grad_norm: 11.2779\n", + "2021-07-11 13:01:16,816 - mmaction - INFO - Epoch [5][5/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.817, data_time: 0.608, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6011, loss: 0.6011, grad_norm: 9.1335\n", + "2021-07-11 13:01:18,176 - mmaction - INFO - Epoch [5][10/15]\tlr: 7.813e-05, eta: 0:00:35, time: 0.272, data_time: 0.080, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6652, loss: 0.6652, grad_norm: 11.0616\n", + "2021-07-11 13:01:19,119 - mmaction - INFO - Epoch [5][15/15]\tlr: 7.813e-05, eta: 0:00:32, time: 0.188, data_time: 0.017, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6440, loss: 0.6440, grad_norm: 11.6473\n", + "2021-07-11 13:01:19,120 - mmaction - INFO - Saving checkpoint at 5 epochs\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.9 task/s, elapsed: 2s, ETA: 0s" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "2021-07-11 13:01:21,673 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-07-11 13:01:21,677 - mmaction - INFO - \n", + "top1_acc\t0.7000\n", + "top5_acc\t1.0000\n", + "2021-07-11 13:01:21,679 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-07-11 13:01:21,682 - mmaction - INFO - \n", + "mean_acc\t0.7000\n", + "2021-07-11 13:01:22,264 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_5.pth.\n", + "2021-07-11 13:01:22,267 - mmaction - INFO - Best top1_acc is 0.7000 at 5 epoch.\n", + "2021-07-11 13:01:22,271 - mmaction - INFO - Epoch(val) [5][5]\ttop1_acc: 0.7000, top5_acc: 1.0000, mean_class_accuracy: 0.7000\n", + "2021-07-11 13:01:26,623 - mmaction - INFO - Epoch [6][5/15]\tlr: 7.813e-05, eta: 0:00:31, time: 0.868, data_time: 0.656, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6753, loss: 0.6753, grad_norm: 11.8640\n", + "2021-07-11 13:01:27,597 - mmaction - INFO - Epoch [6][10/15]\tlr: 7.813e-05, eta: 0:00:28, time: 0.195, data_time: 0.003, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6715, loss: 0.6715, grad_norm: 11.3347\n", + "2021-07-11 13:01:28,736 - mmaction - INFO - Epoch [6][15/15]\tlr: 7.813e-05, eta: 0:00:25, time: 0.228, data_time: 0.063, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5769, loss: 0.5769, grad_norm: 9.2541\n", + "2021-07-11 13:01:32,860 - mmaction - INFO - Epoch [7][5/15]\tlr: 7.813e-05, eta: 0:00:24, time: 0.822, data_time: 0.620, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5379, loss: 0.5379, grad_norm: 8.0147\n", + "2021-07-11 13:01:34,340 - mmaction - INFO - Epoch [7][10/15]\tlr: 7.813e-05, eta: 0:00:22, time: 0.298, data_time: 0.109, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6187, loss: 0.6187, grad_norm: 11.5244\n", + "2021-07-11 13:01:35,165 - mmaction - INFO - Epoch [7][15/15]\tlr: 7.813e-05, eta: 0:00:19, time: 0.165, data_time: 0.002, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7063, loss: 0.7063, grad_norm: 12.4979\n", + "2021-07-11 13:01:39,435 - mmaction - INFO - Epoch [8][5/15]\tlr: 7.813e-05, eta: 0:00:17, time: 0.853, data_time: 0.641, memory: 2918, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.5369, loss: 0.5369, grad_norm: 8.6545\n", + "2021-07-11 13:01:40,808 - mmaction - INFO - Epoch [8][10/15]\tlr: 7.813e-05, eta: 0:00:15, time: 0.275, data_time: 0.086, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6407, loss: 0.6407, grad_norm: 12.5537\n", + "2021-07-11 13:01:41,627 - mmaction - INFO - Epoch [8][15/15]\tlr: 7.813e-05, eta: 0:00:12, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6073, loss: 0.6073, grad_norm: 11.4028\n", + "2021-07-11 13:01:45,651 - mmaction - INFO - Epoch [9][5/15]\tlr: 7.813e-05, eta: 0:00:11, time: 0.803, data_time: 0.591, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5596, loss: 0.5596, grad_norm: 10.0821\n", + "2021-07-11 13:01:46,891 - mmaction - INFO - Epoch [9][10/15]\tlr: 7.813e-05, eta: 0:00:08, time: 0.248, data_time: 0.044, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6470, loss: 0.6470, grad_norm: 11.8979\n", + "2021-07-11 13:01:47,944 - mmaction - INFO - Epoch [9][15/15]\tlr: 7.813e-05, eta: 0:00:06, time: 0.211, data_time: 0.041, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6657, loss: 0.6657, grad_norm: 12.0643\n", + "2021-07-11 13:01:52,200 - mmaction - INFO - Epoch [10][5/15]\tlr: 7.813e-05, eta: 0:00:04, time: 0.849, data_time: 0.648, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6310, loss: 0.6310, grad_norm: 11.5690\n", + "2021-07-11 13:01:53,707 - mmaction - INFO - Epoch [10][10/15]\tlr: 7.813e-05, eta: 0:00:02, time: 0.303, data_time: 0.119, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5178, loss: 0.5178, grad_norm: 9.3324\n", + "2021-07-11 13:01:54,520 - mmaction - INFO - Epoch [10][15/15]\tlr: 7.813e-05, eta: 0:00:00, time: 0.162, data_time: 0.001, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6919, loss: 0.6919, grad_norm: 12.6688\n", + "2021-07-11 13:01:54,522 - mmaction - INFO - Saving checkpoint at 10 epochs\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.9 task/s, elapsed: 2s, ETA: 0s" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "2021-07-11 13:01:56,741 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-07-11 13:01:56,743 - mmaction - INFO - \n", + "top1_acc\t1.0000\n", + "top5_acc\t1.0000\n", + "2021-07-11 13:01:56,749 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-07-11 13:01:56,750 - mmaction - INFO - \n", + "mean_acc\t1.0000\n", + "2021-07-11 13:01:57,267 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_10.pth.\n", + "2021-07-11 13:01:57,269 - mmaction - INFO - Best top1_acc is 1.0000 at 10 epoch.\n", + "2021-07-11 13:01:57,270 - mmaction - INFO - Epoch(val) [10][5]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n" + ], + "name": "stderr" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zdSd7oTLlxIf" + }, + "source": [ + "### Understand the log\n", + "From the log, we can have a basic understanding the training process and know how well the recognizer is trained.\n", + "\n", + "Firstly, the ResNet-50 backbone pre-trained on ImageNet is loaded, this is a common practice since training from scratch is more cost. The log shows that all the weights of the ResNet-50 backbone are loaded except the `fc.bias` and `fc.weight`.\n", + "\n", + "Second, since the dataset we are using is small, we loaded a TSN model and finetune it for action recognition.\n", + "The original TSN is trained on original Kinetics-400 dataset which contains 400 classes but Kinetics-400 Tiny dataset only have 2 classes. Therefore, the last FC layer of the pre-trained TSN for classification has different weight shape and is not used.\n", + "\n", + "Third, after training, the recognizer is evaluated by the default evaluation. The results show that the recognizer achieves 100% top1 accuracy and 100% top5 accuracy on the val dataset,\n", + " \n", + "Not bad!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ryVoSfZVmogw" + }, + "source": [ + "## Test the trained recognizer\n", + "\n", + "After finetuning the recognizer, let's check the prediction results!" + ] }, - "outputId": "200e37c7-0da4-421f-98da-41418c3110ca" - }, - "source": [ - "from mmaction.apis import single_gpu_test\n", - "from mmaction.datasets import build_dataloader\n", - "from mmcv.parallel import MMDataParallel\n", - "\n", - "# Build a test dataloader\n", - "dataset = build_dataset(cfg.data.test, dict(test_mode=True))\n", - "data_loader = build_dataloader(\n", - " dataset,\n", - " videos_per_gpu=1,\n", - " workers_per_gpu=cfg.data.workers_per_gpu,\n", - " dist=False,\n", - " shuffle=False)\n", - "model = MMDataParallel(model, device_ids=[0])\n", - "outputs = single_gpu_test(model, data_loader)\n", - "\n", - "eval_config = cfg.evaluation\n", - "eval_config.pop('interval')\n", - "eval_res = dataset.evaluate(outputs, **eval_config)\n", - "for name, val in eval_res.items():\n", - " print(f'{name}: {val:.04f}')" - ], - "execution_count": null, - "outputs": [ { - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 1.9 task/s, elapsed: 5s, ETA: 0s\n", - "Evaluating top_k_accuracy ...\n", - "\n", - "top1_acc\t1.0000\n", - "top5_acc\t1.0000\n", - "\n", - "Evaluating mean_class_accuracy ...\n", - "\n", - "mean_acc\t1.0000\n", - "top1_acc: 1.0000\n", - "top5_acc: 1.0000\n", - "mean_class_accuracy: 1.0000\n" - ], - "name": "stdout" + "cell_type": "code", + "metadata": { + "id": "eyY3hCMwyTct", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "ea54ff0a-4299-4e93-c1ca-4fe597e7516b" + }, + "source": [ + "from mmaction.apis import single_gpu_test\n", + "from mmaction.datasets import build_dataloader\n", + "from mmcv.parallel import MMDataParallel\n", + "\n", + "# Build a test dataloader\n", + "dataset = build_dataset(cfg.data.test, dict(test_mode=True))\n", + "data_loader = build_dataloader(\n", + " dataset,\n", + " videos_per_gpu=1,\n", + " workers_per_gpu=cfg.data.workers_per_gpu,\n", + " dist=False,\n", + " shuffle=False)\n", + "model = MMDataParallel(model, device_ids=[0])\n", + "outputs = single_gpu_test(model, data_loader)\n", + "\n", + "eval_config = cfg.evaluation\n", + "eval_config.pop('interval')\n", + "eval_res = dataset.evaluate(outputs, **eval_config)\n", + "for name, val in eval_res.items():\n", + " print(f'{name}: {val:.04f}')" + ], + "execution_count": 30, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[ ] 0/10, elapsed: 0s, ETA:" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 2.2 task/s, elapsed: 5s, ETA: 0s\n", + "Evaluating top_k_accuracy ...\n", + "\n", + "top1_acc\t1.0000\n", + "top5_acc\t1.0000\n", + "\n", + "Evaluating mean_class_accuracy ...\n", + "\n", + "mean_acc\t1.0000\n", + "top1_acc: 1.0000\n", + "top5_acc: 1.0000\n", + "mean_class_accuracy: 1.0000\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "/content/mmaction2/mmaction/datasets/base.py:166: UserWarning: Option arguments for metrics has been changed to `metric_options`, See 'https://github.com/open-mmlab/mmaction2/pull/286' for more details\n", + " 'Option arguments for metrics has been changed to '\n" + ], + "name": "stderr" + } + ] } - ] - } - ] -} + ] +} \ No newline at end of file From c56a4731e10dd82c104212a937955f868e15618f Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sun, 11 Jul 2021 21:59:14 +0800 Subject: [PATCH 197/414] [Fix] Remove tqdm & fix posec3d (#1009) --- demo/demo_posec3d.py | 3 ++- demo/demo_spatiotemporal_det.py | 10 +++++++--- setup.cfg | 2 +- tools/analysis/check_videos.py | 7 ++++--- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/demo/demo_posec3d.py b/demo/demo_posec3d.py index a2694638ac..03b79ce2d5 100644 --- a/demo/demo_posec3d.py +++ b/demo/demo_posec3d.py @@ -224,7 +224,8 @@ def main(): modality='Pose', total_frames=num_frame) num_person = max([len(x) for x in pose_results]) - num_keypoint = pose_results[0][0]['keypoints'].shape[0] + # Current PoseC3D models are trained on COCO-keypoints (17 keypoints) + num_keypoint = 17 keypoint = np.zeros((num_person, num_frame, num_keypoint, 2), dtype=np.float16) keypoint_score = np.zeros((num_person, num_frame, num_keypoint), diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index 4b77305e10..7d188e13e2 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -10,7 +10,6 @@ import torch from mmcv import DictAction from mmcv.runner import load_checkpoint -from tqdm import tqdm from mmaction.models import build_detector from mmaction.utils import import_module_error_func @@ -225,11 +224,13 @@ def detection_inference(args, frame_paths): 'trained on COCO') results = [] print('Performing Human Detection for each frame') - for frame_path in tqdm(frame_paths): + prog_bar = mmcv.ProgressBar(len(frame_paths)) + for frame_path in frame_paths: result = inference_detector(model, frame_path) # We only keep human detections with score larger than det_score_thr result = result[0][result[0][:, 4] >= args.det_score_thr] results.append(result) + prog_bar.update() return results @@ -355,7 +356,9 @@ def main(): predictions = [] print('Performing SpatioTemporal Action Detection for each clip') - for timestamp, proposal in tqdm(zip(timestamps, human_detections)): + assert len(timestamps) == len(human_detections) + prog_bar = mmcv.ProgressBar(len(timestamps)) + for timestamp, proposal in zip(timestamps, human_detections): if proposal.shape[0] == 0: predictions.append(None) continue @@ -389,6 +392,7 @@ def main(): prediction[j].append((label_map[i + 1], result[i][j, 4])) predictions.append(prediction) + prog_bar.update() results = [] for human_detection, prediction in zip(human_detections, predictions): diff --git a/setup.cfg b/setup.cfg index 1e62cc18ba..32cd9e1f7c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,6 +19,6 @@ line_length = 79 multi_line_output = 0 known_standard_library = pkg_resources,setuptools known_first_party = mmaction -known_third_party = cv2,decord,einops,joblib,matplotlib,mmcv,numpy,pandas,pytest,scipy,seaborn,titlecase,torch,tqdm,webcolors +known_third_party = cv2,decord,einops,joblib,matplotlib,mmcv,numpy,pandas,pytest,scipy,seaborn,titlecase,torch,webcolors no_lines_before = STDLIB,LOCALFOLDER default_section = THIRDPARTY diff --git a/tools/analysis/check_videos.py b/tools/analysis/check_videos.py index f207b7a43a..6a75322f74 100644 --- a/tools/analysis/check_videos.py +++ b/tools/analysis/check_videos.py @@ -4,9 +4,9 @@ from functools import partial from multiprocessing import Manager, Pool, cpu_count +import mmcv import numpy as np from mmcv import Config, DictAction -from tqdm import tqdm from mmaction.datasets import PIPELINES, build_dataset @@ -134,8 +134,9 @@ def _do_check_videos(lock, dataset, output_file, idx): ids = range(len(dataset)) # start checking - for _ in tqdm(pool.imap_unordered(worker_fn, ids), total=len(ids)): - pass + prog_bar = mmcv.ProgressBar(len(dataset)) + for _ in pool.imap_unordered(worker_fn, ids): + prog_bar.update() pool.close() pool.join() From bb8dde890f189e05f47d7e6d2f088985ed3486a8 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sun, 11 Jul 2021 22:45:20 +0800 Subject: [PATCH 198/414] [Improvement] Make a resource directory (#1011) * make resource directory * fix --- README.md | 6 +++--- README_zh-CN.md | 8 ++++---- configs/detection/ava/README.md | 2 +- configs/detection/ava/README_zh-CN.md | 2 +- demo/mmaction2_tutorial.ipynb | 2 +- docs/tutorials/4_data_pipeline.md | 2 +- docs/useful_tools.md | 2 +- docs_zh_CN/imgs/acc_curve.png | Bin 39921 -> 0 bytes docs_zh_CN/imgs/data_pipeline.png | Bin 117332 -> 0 bytes docs_zh_CN/tutorials/4_data_pipeline.md | 2 +- docs_zh_CN/useful_tools.md | 2 +- {docs/imgs => resources}/acc_curve.png | Bin {docs/imgs => resources}/data_pipeline.png | Bin {docs/imgs => resources}/mmaction2_logo.png | Bin .../imgs => resources}/mmaction2_overview.gif | Bin {docs/imgs => resources}/qq_group_qrcode.jpg | Bin .../imgs => resources}/spatio-temporal-det.gif | Bin {docs/imgs => resources}/zhihu_qrcode.jpg | Bin 18 files changed, 14 insertions(+), 14 deletions(-) delete mode 100644 docs_zh_CN/imgs/acc_curve.png delete mode 100644 docs_zh_CN/imgs/data_pipeline.png rename {docs/imgs => resources}/acc_curve.png (100%) rename {docs/imgs => resources}/data_pipeline.png (100%) rename {docs/imgs => resources}/mmaction2_logo.png (100%) rename {docs/imgs => resources}/mmaction2_overview.gif (100%) rename {docs/imgs => resources}/qq_group_qrcode.jpg (100%) rename {docs/imgs => resources}/spatio-temporal-det.gif (100%) rename {docs/imgs => resources}/zhihu_qrcode.jpg (100%) diff --git a/README.md b/README.md index 16ad57087c..9443603366 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@
- +
## Introduction @@ -20,11 +20,11 @@ It is a part of the [OpenMMLab](http://openmmlab.org/) project. The master branch works with **PyTorch 1.3+**.
-
+
Action Recognition Results on Kinetics-400
-
+
Spatio-Temporal Action Detection Results on AVA-2.1
diff --git a/README_zh-CN.md b/README_zh-CN.md index ac6597a3a0..ac7a5c1107 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -1,5 +1,5 @@
- +
## 简介 @@ -19,11 +19,11 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa 主分支代码目前支持 **PyTorch 1.3 以上**的版本
-
+
Kinetics-400 上的动作识别
-
+
AVA-2.1 上的时空动作检测
@@ -243,7 +243,7 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 扫描下方的二维码可关注 OpenMMLab 团队的 [知乎官方账号](https://www.zhihu.com/people/openmmlab),加入 OpenMMLab 团队的 [官方交流 QQ 群](https://jq.qq.com/?_wv=1027&k=aCvMxdr3)
- +
我们会在 OpenMMLab 社区为大家 diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index 6b03ff07ad..c1f84f3fa5 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -1,7 +1,7 @@ # AVA
- +
## Introduction diff --git a/configs/detection/ava/README_zh-CN.md b/configs/detection/ava/README_zh-CN.md index f6e9547c04..51de6fa290 100644 --- a/configs/detection/ava/README_zh-CN.md +++ b/configs/detection/ava/README_zh-CN.md @@ -1,7 +1,7 @@ # AVA
- +
## 简介 diff --git a/demo/mmaction2_tutorial.ipynb b/demo/mmaction2_tutorial.ipynb index d5ec6d74da..8671ab34d4 100644 --- a/demo/mmaction2_tutorial.ipynb +++ b/demo/mmaction2_tutorial.ipynb @@ -1227,4 +1227,4 @@ ] } ] -} \ No newline at end of file +} diff --git a/docs/tutorials/4_data_pipeline.md b/docs/tutorials/4_data_pipeline.md index af143301e5..f60ab6bbc4 100644 --- a/docs/tutorials/4_data_pipeline.md +++ b/docs/tutorials/4_data_pipeline.md @@ -27,7 +27,7 @@ A pipeline consists of a sequence of operations. Each operation takes a dict as We present a typical pipeline in the following figure. The blue blocks are pipeline operations. With the pipeline going on, each operator can add new keys (marked as green) to the result dict or update the existing keys (marked as orange). -![pipeline figure](../imgs/data_pipeline.png) +![pipeline figure](/resources/data_pipeline.png) The operations are categorized into data loading, pre-processing and formatting. diff --git a/docs/useful_tools.md b/docs/useful_tools.md index be9ff05663..d2575df328 100644 --- a/docs/useful_tools.md +++ b/docs/useful_tools.md @@ -19,7 +19,7 @@ Apart from training/testing scripts, We provide lots of useful tools under the ` `tools/analysis/analyze_logs.py` plots loss/top-k acc curves given a training log file. Run `pip install seaborn` first to install the dependency. -![acc_curve_image](imgs/acc_curve.png) +![acc_curve_image](/resources/acc_curve.png) ```shell python tools/analysis/analyze_logs.py plot_curve ${JSON_LOGS} [--keys ${KEYS}] [--title ${TITLE}] [--legend ${LEGEND}] [--backend ${BACKEND}] [--style ${STYLE}] [--out ${OUT_FILE}] diff --git a/docs_zh_CN/imgs/acc_curve.png b/docs_zh_CN/imgs/acc_curve.png deleted file mode 100644 index 27a2f0851e7d9ee0c912f73af947b11453422988..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 39921 zcmd?RWmHvf^e(z+q`O-|Iz>8_5Kv0G8>G8Kx&%ZiX(Xh(8>EpCWz*f=A$4AU|8d6s za_<=Td_Ih^*_*Z3-s@fOtY=hghc#;gy%!c9~&;B{81j#L8ZDssO&mi?*_X0kq*@I$AbT}4$@3P!}H9o*!)YdoCH#hLm` z|McH>L8el@ou1F#IV?|EO%j#Koxn^FPO|^b?CNOQ<9a>mdUd+mVs)<3Ia$D&!T(*J zN`lm1$f0u)TkJM~Sl^Z&~?WUT%AP7jrGrmbCDu8+TgF4 zj{*r%`7Vw5=ZA8TSWIMOpj@8h7lyZk73ygQ{&$38w?_#2Ufc9O;DUxEy}Kgbmj_bX z+GP3f(jz}2pct?Ae%gYSFN*wl{5HxRc!uDwMIzo8S^f|AmXjr<+VysyDoy&(nrt=7 zbR+(q@2*Z&nr!_oCMM&vi>4CsGCbY*6~b(Bywa1*??|Wbd#;ulBnD~LejwcWQy8@I zEA_itvHI0&tk|4rEF1!ImeoYQ$L(<+mr)nu+WPu2?8}!g#|z$v+2Ab8=_>jKzZ+ZK zRxd)x;qIU7pHdy--N{n?H37FhO_zhY6tRQFb}`Z0qgO3A2hESG3&kWyh8jFi8G1jF zhxJkWQ(%(45TC8H)PLw(Fq1G4(a%-lR#e6vV<2PPwV;}-qS=pn)w4Ny9)tkfizATBLBcKNVC~nOx zEKD{>(mn34H{qaG@5|m49t($!VN!{vbRl=+{y5t3h=|Tnv4?1|*~Z`wNJP9|Ks3C( zgm?eWUpYB(w_UBED`W_hf*+C>mN-sMPCi9Z^We6#)r+~TCs|wm?G%rFs3qZdpvk-p zL_+5@?0`yi>akP{-mM>|zOce$c$XHAMJc$HWZ)-}De8L!L}I+q;_0zdSb#%L9(BG8 zLm$*`c4PZ6SrP%nMqCsV9Ct(Q3WqN#7t&+gxi;|S_$R)i;ow)M+iUY*-?D)kX;zs& zp%$-Xje;5Yr`sQUoeVr5ueMMe&JZErpRK>t9Eqowudp1aj;$BP>Tp? z7b4`cKSK!AllT5=72D_Z7oShl_C&#VqwdgTQD4DFTQHibvFh*d@5qsgGJKg@>2YlQ zT1pBI+WA`?c7J!X3fA*Ot2%$wRfUn@>fdfTE(r-L#P5BtZ z8{@fR#?R&MFgS;gdtg0B;9QfvsY)oI&UThgM1(R7o4R+_zWq~%i1$jY*nP)lraxA# zHfF1StpCFu@vAT_wm;U=sD&gykbq7wm+3YJmXxrbZH>QjXan2us8v{0!i@Zj#Tp#< zT-}^obB#Rj(gl&ek8f3`NktJE0e9fBTUuY?cjr~2Ro%NOSf<}fR;p7kEiMk7oSYcG zAR8}KeQq;T^MvdZGNh!W6mYsZir(}AE(<8_RFxUJv9YnkT0ia!>&a;Bdx%CZNPz*=>-j#!b55Nj_^cLQt$f| z#qJ2uiC7`%Y=4#*2?=Sc%~uFA1iA+F#%K3A?ay{4bQ;>+_fvfZJ-xk=W#HG!U;;~y z^GMG3??$!S+2fXo9}|kPMXw;REg|EUKihn7j{a&GWP|j4TZknfAn-%2II-`J2pRKA zUD*5j;GoufD?4ngM2n|HubD{1>#Sp@)_T0oR_A9=Pl9ag6WUAS4{*mJ%Zl5z$5^h4*EtTwWJY7Nbf{POme|a<;Hy^-@VC zB^)9S{b=AC(C+`1Xno1Vz#{{oAm_j=s52lq7;$ZFjfRy~fir)cor?>X&wgR8@0oU~ zdZ%lt`fg}DHH%7eT)n%S58=F40@4++LpjTJtK2*Hm!4)lrWc6Yv+ zk6o`R_`6Ij9V;t_>&~Cf7)l{T&%K3Kav-oCuu0=NQs@*yZVEU7X6EL`M@yZ+U5U%e zqC!g}=|Z=z-^arz3KYG4-^s*MNtv2HTkzUtYa>+oKE4E;zK{5H?i+kY0<<=7m>u|{ zM-R2;BmkAoYUimepN@*jI5=<$y6tV%`bf(5yau2m_kI7m)8n$eFZMc7sC^!H_EDtv zZPyHRPoHvGj?qZ`qxQRk1Go7~QW9P|n&ji!nrX|!{Y?nF`&gEQud;H0i&*} zuI94({ft7;1$7rz-WLFe5SNgEl>QC{dUVu4y2rca?Z&`I$to!PPQBxF%HoT@Oo zHJ*}?kl30mB?rzn_+Y;IDGg25SUCr6uJQjw6%azh3RMeMfKHQe86)~#Eu+*}j>7}3 z#QUU?%4r0zUZVN&a={yWdV2cm9A+?HV>Ka}G{A}R=)i%-`+(>fuhUZsyvXOzpDkf! z>84c2$H$fv`Is;57ozv}Y+QE!JgK%A?FBBK%lId%Lh4J*Mb16fBRLhiwC~q5qGYjJZ|pKpRZf)|LxlKiGt%OkCEy>g!&EfKUn*J2wr!g#DL*p zpD|-WfNuNmJ{kpVZH-lxNvGsM6^IqA{{w{keS?|;{y*`r2_#QCR8^tPR-s*LrboCU z$?!#j;Y*NFa#a}`z|2mH%8+K{usObybG0G&_D~b^Ps{{n93vxTCC0Dde6Z@ZjKT^y zLjW%R18g8FTFy7sntlUja|u4XW zm}N8*LAVZ;{t`4i{QiB>`%iwNk8oW&1Otkt5{W!s?E_LJx0`tO{JF_m|K}@!PLBSl zDjETtMZ)`m;L*$)8XAmYoi;B&nw6qr$A-ueYVO_2TA>E$vIE>h25#W1oSK$KILHec zpEJ)ri{rJPGz1Y9_=6gl3!8S0c)`1L;DEKZwDh0C{;TxUUZq0*jS#I9Lv&?gbc4uG;G{^Pn{=>p}{stgKXGY`L@{ zCLj{+p=&_kbKZ2+sX5Kf#svF=0`WW7dVXSpEy&{6ZS#u&4a|4bdOo z`$dy*AfLV zH1!J$3rYA6eD@aa3?kOwfUCr<6vkBt_&y??TL31m0ALu)lP9*BZz2rCqU?H}>7z2K z#-a+mu(-B1v)}Epw{C+27Nw9|XNzaGzHq?=>47*a50Q1VRi# z!3R++g*+b49`60y)~5d2P~q0BYW?0D8*zQT34z3#vgIaXv6Yqifm zG9W-a`IkAXehc()TMY+LM6ETf^M~@PQnMT7mW$wU3J;}_yIt6{jbwM|CoZ==nef8f17FyU?vm*Fq6?_iB__ZJG*wBO$5kJOn_&74DCY=iDuS2f3(7* zqWY#Psk7}DlR!8{t+8K(1L3RtPhoY1`=K2`e(8$s7K-p>yj}#Ue1}~Pj?}B0na@w> z`?$Ecu1-f?AE6L!67fNE?@0P=rPDf^&&|H6%|Z*h^X7ddxc^GqS#zi(o*w=2g+dB9 z!oz(t{@c`-ePPrwp#X*g4D*POjF!91G~4}Ha&mG&CecTKNrwyaE4A@cYB-Z&j@=LM zEeV^Dj#hf20#`xGh8~mYvGPP1;MdD4IViy3f`0Y4-Y*fR;&*vC7}grqvX?*yNozld zL=zkAVM9OEO9g=!`v|UubJ)rUKv}W6ZOiS6I3x~UjJ5fq*i4^bYu8wYWM$>V7WnK{ z)>;0sWMWo}eC6i$5+HLNDyo>XGZ&}>;Al>VrCqRrVGtCA+#2%IcBT`g4Sg}C$^6I9 z0iHpCq+=+8X6x;V8rgxqAF!oxQFD?*YL;3Wd0j+ip+15s{FNVJu){z;S<@m#JL7{|Gco)qlKa?yLp(0I)G3GQ$3VLb#Z!seZt} zxkEF(Pu`NBJ==6Q)EHTj0`el@#p!ZX3bV#7#|l+7fYVwoZ&&XyGWpkpqNDs_40*- z#?i@XPfI7Lc5$k1RFhXyyOlPPe#HJlhB#+125P`nq&DH%?E{z&lr_6Qpk{;#Pu$e~ z_*g*v%AlV5k40kmPJygg(9+!^O+Abng4Xx%3|+JgOUy}jgb!scxx{_VNnK*G5*a_j zcl>_M1`ztqH{lyJNqkY}-48g=jb<0acBGGq{juNNwjq94jYdiak%=~9@j4Ey{S}r1 zBJ@L%L*l6O4QfN%gzZe0DmreR34uTBgoSr$(7!X|;OVA!``cgL6=m}=XYbp#bhj=( zE6q>Qdr`$q@?Vx~Y#k`l!sduzv1Zw#T*O5>-q)%UVgAxriAJci7iBbs3YkH1tp>8h z;s9q5)sy4zbMLs_kSlBg2S9uR}?;^v&l7G!s z@ewzUFjsOTpMIn^CjV+9&O*p7KQdhnv$0NHQtbo_n!_;nyijX%#eodn6l_#;1p-CE z-^!$L8k0elBoL%LG(&fpavevU8(8_RGx;2K;Zz`VQnUK0W{QhPR)aGl_bYaP8|;O> zbHsoEd27V^#*@8brTK+XYMvRF{!=^g{Lzn0ZKM#*(HwQ)37q(6N35z{p=|r3NK-x8 zW57{H?v>Uru>bar75)v)*6lMb&)}$3hS-w@pW)UIHj$w&9x=OoS`4o1@upHCY|AU< zT=V3^sq_ng!Csy4o~m%17fLDV&BEIjSE7mvc8AS4z!_di+f6Cx_hlOWfDwPRNTfaL zTGT(pT6}@(N$Dp!HNw?(T6f#UhY1h8F4;!!H5PADg?>2f2T#(tbbY*D;-~LW($uK)UV6%H`A|BXt)T{8kc(KN z&mRsl#Sle@Q19$ZDtsU#odr0PNfk&_?gx4@jwICY^hG{*Xdok&m9KHiGvS5S!rP7y ziEWy}fUPG z0lRUd?lp1%vlS0Kdl_;G(ZQ8@po`Xs;`F3fLCa@I%Z?jPWnl7qk~R(d{;IRAfn-LT5laq z+nT5+E693Ao|_zLz2@+5(XE*hh|<3xamv&|TScIJ4I?q}qum)s8XVdF?zh@xQd-0b ziH}ukjq$o-oQLb*^F!hW<<#&TB?Jlhtu+az(<-jUws2XywOi}4Pn$;tgKd3fxHE5` zwE2wFbKG#Gflm1hW%zB^sD(w!E z6SR7ZB2|_GX8|9|x7r=XCo1j21$~WO!uubCnkU~U&i?9K7<_6tiQZc~J#b?8k!wwG zXDD2(X%8mcO2o}p@6bA&jIDKV%XvX021RS3^nP%XOWkM~Hx5*AMj1@??j&igj~?I| z@qCK_4GN+7%DI;dp!g38$sBlI4+@1Hc&6NkgVKO(BT?NtCF0@31yzE2gCV!M1VX3d zpWiQYb_Y|Nare_kKH$!8vKhF;vzNab4*Zwn9tyjnf?1ZZ`%Xjpl1 z3#X9O6nu9-;fi|ySh@+lg^LV_bx<-ACQfzMN45@pf0yY@`JbU5?V_+mPk0`Zjo1uX zk^K_cAw-77XzggMwN1oyQ`?nx`CcO;QK;AW$W}tfd*Q-q-?ics z{%X0v-;Z3o6zhTo&VGA!6vbB*Pzrk#`9qI(qmO3s5jFKQq56LU^&~T=^|G*_^^t)Z zQeFA)s$?m&Vb*#zBy)8%RncjquZr&DM`3-H>1C@#*Iv_!CbYS9pqdg+f`L5Onz|uE zOM5A6M0{9_F8qK(H+d$tb#aO7vL!Qq{=;lPivS~cb1*{t9`P$4!l{V`v6)X6wrf4f zRm&?|9!3)jnE2k_PI&sNrh=BI32}~%$@Xj4)KkcCHzPXz6k3$rWWLShYk1Kv+@Bex z|0md=b+r9S!1`e3X+S9zFHU0)g0TfGK%vj|yC8=UUSn1B>hw&LgITXn+rdfNv->9B z7IG6_x{D+fq)|mz6Jq!>TZui6TAbyt1vo)l>-)i@^WCHLC!_32jS5bTB^~m`QLbXN z>$q>>Y3ZIyV@Mn>+<`1Nn~MY(vYVrudK``F)CP&K96xl%Z|b|JD&&ub3Ll2fwvbNlSthakr147;9jcYg zd4ZNRof~46;0mkSh@e|7No5I1Eo2*gx6*g-sTL^@!%TIzeVgtoin78^dI!oI72`R6MzE^w-%2E*7xmZXRz&@s);cEYLqJ&n@u6GssF+5%E ztpmM{^>Q6qRLUt%7wnvP4gcC3MY+vwIaMEprWKPILOi@7+fj{zb3lh!CVmbXH*8Sy zY)UZNKc_oue|R#2LYdK9fC<9HAo-JU&2!Hk`(VXw zU9PhEmY0>MJE*bqEeAfL>Q5yveKvGb@bHyT3x5EP85I;&;H|6*ua5f;9hbWVT=u4r zK#8UIZ*dYX8CfX1UPe!rguMjd$CtYr8=ThhOu0$1s1#nCni3#m`B1icog+h!`KIfx zs7kI^vI4_!Dj)gHNW{!`+C9+M!I&u9HK<@U(~;B%aR+Bd-55lNLl@F0+m5T8ZpvDYqVfD z;gv|_(C6;)R4+U^cmu+{WXg^wQGqW-w$gxYXI_7+y0Is&q#RZ)g!i#f)q<9toyw%o zFSyOuplfUV>3><+R0XDL!ARFaYcrSkg^TE2hR74kKU>O=>Evp!Z*-=AM)Xse2BurU znga6E%H#Ym@$sBWTpEj zx%1&;xNob*dnF%~=_9aesFf$AJ%aF`4Y zKR;ro4>uf0eASA|37M?gqI=@)@~SvG$Q@jt5@-=O&S^$vxhkgkY2&amO@uT{H*AzQ zwZ<;K98wE-O+!ReTfs3T>1tbSN+p%uQd|D>gf94sdIz2zK?3FZtL0V6>>jh2LTuYz zN2w-078ZQ9qW6Sp#k!5m7{~}!)r}IXe~UGUbz=czu|6~xc0g=^3bP{@aC-65Ry+2~ z^VsitLZhpJXI_+`5*iFxf^3yMxgV+pRGNuMLsk7lL!iL%+Q&yYQ}o3P0+4F=rSKHM zLC41l*Vg0XT;VY>Ol#H8F9U^dn1Rdu*lA{grX|4~h;>WDUm&*-1F(~LhBrduSW&;X z8=QM5NWo~aJH6WjIKxe|MVnicT+wImMbGLlI&tZJy#9AD0G`UUiC`h`&>Gosop~pF zb)1QXTF}NZ2Lydgnj|Q)3-Gu^20RA_8Z;!Xj8NrW3f!HnEKW>klkW}DduGC}t}Zxe zd%J7e)%_rs23U-?EgTWsw^(2_D?wbvf6=O^qXPnYS%0BehzAcyoxI_O<`6Pg^XFljpI0)YhOc1CK--$fZq|yFB~*Wy3lRJ6b%QAL)aiz3P%fiKW1myja`Z z{GKHqB;cICda-{}`1=WYL{w; z>)xjPwnFgo)~Mhh(aRsTii&#K{5<$8;r|~pz>!r1)OaZOroVt26EfNuE(C6-{Lw2z z9X}&f4QsWJf6KQX-JXup0Zj+YDJ5FSW$32z_ul1`b$O4A>4Ncge`?EGB)}N6#m6_) z_W!?<0n1GoVh^|EfRsL{2GfPcL%sF`G^rxy7q zqp)PHR?=3Up;N%&S#Vrrq|r3p#U~tnY3a5fVFRYzbUde6Ip0}c6~0e++aovK72@o8 zd!;;_CQx2dtYUh9O4_q#BV&LH8Yp+}{D1xAf<1~t>s~r^nmxDju@$~M?@%`j` zvQBCU?H-f}J~Dia^R|PPQC?lla?I)5&owaRE2JD;2sZjNagPL6lF(G!x-+!iqS1`D z`*IXZfsceW1;_`qyTNH7o;f$wx7f~tgJnO^8(fr#IcZ-9aBU>L>t* zpfc%~8otfhU)+29^P6a1JO}N12z^b{Vp40D@)0D-@|#8 zAN6}wW#uxmT4OKCl_%9`ep-MN@fvlpAWkw&GhO_Zh`Y_FNl7Cb%<&*_$&4c^oFnfrcOWp(vDeH!9mPw5Q&#FBOhS;$1MY- zzhKr1B66qr`w^>{>f_W6Usv3s(dM36rnUuTtvW|0gtfVGAnNODNb7R4A^e9&(*)`^f+5Drm`aTV?NIK*x$gpGvt7!J->c#d`JHij1 zxx+9lS}!Y>SSylATu2+CiGGv)?pRELEcu6=e(7C`ol&`nD%V@7UUL&R_?>e{TUNJ%~C;)Gn za%-E67$6;{K(F>QIQ#hAQyE-t7%fvj1<)7Ot+}2b8)R8uVEI#2o(Qa+&7wka>kLn? zmQ2^&Q;;fH>D13(zgxr%*St<+<7H{$IQm0Z0f&o6FA%68o%BSVl1{F#VDp|)#djcF}u7aZ?ee_aS>`UGX}Rw#!?znn*egCSZ0h0$ouRejGDj?z?R<_M~-;}>faPU#Jz9PHKQkG)2Y5&6V>&^--KT^l{+X@sev9GuQ&SIVTN_# z$wl=g5dqJ;h@143%#S^GxLZ3ebhOyVl7%WMC@T+ zxs~R7osjkARFB6scc3YF>8ThvTfx8@9a75FWB1ZSI784@-qzS$YYd+#L#M=;b}u&7tBeljAjAhD#cu*B12hQBvO|1Ji_UX5 z$x?(2-!xyzVS$Lj?>sS&Q{@g1&wl@DOpp4>2qB$GIn#SSJRA?JTGRWJ+f>x~^X#pR zjwlu+o=pP0Ra7l^GkK(Z_E9m@L&W|U?l?`>ElWOQ$J2qUV-LMg5=Yew{lT7X6;F72STtLF>CkpQbn@bC`NJcCIZ31r*AWLmdT^Mu9aO zQ_@r!mK%=&WaxBaHl{_Y_a^;K8oq-i)}M7+zP{C1W*J$hB^=VsN_s?crM@~X5<`l6A16rC+;ockrmS z^-GRwtjIs#?KpU(F$vuF+0+hnmQAmj;x?a6$G^$t+5Wky%V>Y-Zh&2PWbMRci~r&) z-zC~ko~#(AXW}I<9BGh=dKU9~z|uG2Q(^aaRr)K~Z`9=IG~9NSf0P{t?+i*0Jb4yf z2+1W?p)#>rJD61rD(TecDj8|dyuKVntK13^hDx}noXIvh@AM3Kvo$HuJ^Ej7GKoKUsoa8yv*U^)L64 zSE2E0reLIaA!}6kL`he&qBFu9eboKO7O%E6MMsVUZR@h}v4`q^J3s=|N4LGbB(5w3$P{)1-z5YC2eF^kyllsvZU z4_j{ewIY4g#VfL|dW*1@x}n8moIpmz2{{FAM$md9;dRBxL{ z;h<^X;AXw8UAc$*?cKA`vSaf8gK8|T@;&K`sF-Y`-lF}Ve4H4p%h+_MfA9LedXCLx zX5Zg7XI{_H9y~~d8OB(l>V22!_p@Sw43C#neFVL&i4n>O$A~BJ;;;Ig*UDw{9sN)$ zU`SA1g>6OFw%0%te1{N=;&OUI0R&Coy_AGqy}fnZd)Zqe(7nDZAH>uhkic|QXGO4Z ze@am1W)L&b{{ZnfHKY&y^4X80A#oEy4j8gg6VA_6KB!k|UsQ%<4@k8#T-;^e z7@tiCC>u{|(Gm+KS{=x)vx}-az#J9*&`dq~&@P3wh$4k#S{O`NS48s6z7h1F_}1!mCi~T3^51aD*`B&!j-!dz`yz5~lJ(jLtx(2@nny8v3Nqr*i z5T%8KG$RtlZyGqA^BB5LI~{XsHu}RFkF6rbw^(?)DLvJaO~?fgo|Mw*DTjXKEkS0* z#Xc@zrCq7HxGAMoYxwa7m$A_=|Hu<{QT||Ud-*L{gRoX&&%kVB#mv25dh>_$%qUVc zOEoT87>Qw>gY_|K;L7K0+_ofB0um!7X?V2SEGqM;RS{KAc4Y@X1a;DA;b4c$A0~zY zXuUMcBy!%ZWo92fGsWWSIkWRscOV9IQeM3rN+zHL+LsZbKbik$2z8MYr!y>)J4)R{ z#4EzyT3hPwB6l?-uN0@X#5Oylfc1fhPDAX_*=ut;Vq9LE_UN&ai2o4(PF{PI)yDuaa)>fN1IMBAv#&v)>A7rf1vWST)MHvSgB-zo`k-0oiq)!d0} zkzuUGXt9ss)-uoRwS9{?IGrmu&;m}-G8NO^o07fL+UOH@O21f!snZ$RS(?8rv%t>@ zwt%5+B$C^{H)%=9;wzCx0d~&V#+znk#D=HW&0EVV>W3j=72_|rjfX;9a@QQ;R@yfy z4eaLc>b9vpaRoSJS&-#?COO-v0n#WX-AuD7ud16Is()J@k>9b#`kU{^68N>D_4T~&ch9R3FMk6Rp z>awK1K~G^o3h|9H(L65t7r%XW0K=<3B%_%yBPy~{ak%LEBziE6u?wg*awZ{T|lg#W5Dw@PD!UDr^d22+^+;3 zcy0kX=Q`9z2 zSg=@!512L%nPj9ev98i|)K#Yz#CP|rTlchl+w}rTy4n^dQ`T1I__fm}WZ;4VEt-gc zl`FfbAvVX{VwYztenglp027u57}1K(TScF5S9z z&2(!LYemc}JsgPaGn0HBOXC$`f=vB=2+?Gc*8KjdQhvA&ifLWQ*T$dGie}OrcC`HJ znOdR$)gyjz;EpA8k%UpkH0E~y|_4slTYvQzEqdWw?6Y4u-L&P-J(8# z)AQZtGN$830#=Lmj+Dx8k8P_|ctI@2rQ z$t1kwEnT6G37(5p)CrQtQHScq)8^sOWeK|iOy3|4#Okm{Vc%()+0*m?EaX&yq_{o| zNKsQQn`&ZIz9r^@=YqELgz;Tn1{`#?-Co?_p%Jv7L}B7|$zVoH4?Dg*{u6y?_sW0> z!%xhMJPO;1$5iIlmrU^&&O1?O5m6`Ae^FV^f-L9?i@v{BPp|!M2=5)sk;0C^r*6dz zN`yZL@@b@1h+xZ5w#+{qEEZG@TT#$6X%T>uvt0gq6Jc^ZDw^i#BGvx0zEOeN3xUz! zgt@i#b<@oXr1|}D+iubI*YLH6a#0qSN&O za@Y<`9JrqT?jxwg4V<*}c`oqkF#Z!O8J($$fKH$0ub688NsD}Q-QBg*IYCn!bD zWpDe8Is+xfvNLh&?nGi;pS-yu0Vzl4HQS4;vDn+yscBwBS5#4MOs8MA+yA^)H+ZA< z*|`uZ13z#79%@3Ps_fAQ^#Dy7LHTIJ+- zUYai7>)NJH3N3PV*XY?7Dy&?XbNbaq4Iv@5pB4hZZuB8Vs5d$nUT+CL|17ovFHiIj z86n%A6DnsD+o+?^3(}w;wbp4t>3%gDIm$;3xBopTyL^`XTV1B?!mLxIN8{4~^o@Ig zq>ZzG2$UB4eJ4-r$1KK;N(7nGUQap=+S#-+-nk#0Cq@NUO{Tx=(VD$#lb`%>OBOUO z$R6P2`&atbSNpv>Odz6XMxu26>-?jNiWby*kGyPi%KACVRNcYoIqFi7GEx>zL?mB` zRPhIt@_m#dO*l*J+6bF`90puSmDa8SxgTon>pS4Zo)^357=FfHIlD;jEIxZN$RZ42|H7Xm%956)MyvbTR=J>C#B2kiw zh-IkuoydCfpU(w+t6qsjd{wRG((u2Kkv&|V}I0B)}9@di^S&1db17U z1%b{~dr)>uql}B_%3(&Fg`Sf$@g+M^?H?{7wTv&pQ(Av}#Zdhf(X@`TmZ{hz^-k z_$=B`p2K%vzap6b-fozPMevtXGz$6ItRp*sf}DLKJC#`UQX1`ItW@Tl&Bj_;i{)YC zK)2e8YZ=q&+Yqhgf2BL7l%hfW9`kpI92Q8@=vcEvUgji*9lyT^9*36gMM*aEbk&&~ zDi6>5&+pfI&%b?30aA$tPft}J=eg#Xr8S;3wp(TVTiEqr7qv}2HvBu@zuVSd8A1rH2Bjm-?vobR zhE#qKN*1X*ND+*qpa_tn3mXYl>WHW0vCa7m_I9VZZ(nwx8V&mOHtjR37( z=!Z}wg?#BF38O1GMAoD?zu0Iysy7Kk=R>2)gse4PZ>?>H6E3$5Nj?bGhR)FCv{(=a zxS~NI8m#o~!_DzG=oWA}T_6RcQ{99w&I}8P)e(5ngC4yN1dDPTj(XokoiDcOBGycA zcmB=YB9ByK6I}Zxt|!J0j5kvA`SSER53SR3z!@u(GC{0hf-io>*=s_T-uC0aD~_CJ zpm7Z?CV?p;(BymoNlEVx1?tfJ6`roD>M$>guaW;&=I72r{NHf>KZudTr=l7uG>Da5TnqYeE!L={KU__{%Vom>u2u$m``svlZ?M(Ug`? zbop@QZxg)z@So)*zP_pgracTR84yTa;S`WUwsja3hTM~Y~ zx82|I9FNj3^-eCgg0?btYlf+|`NewuR=BGH*wCvVa}?>jUe|1>~1r_41TQ;!RcFHYXBb8JAbrdIJR;zMQUz#cfcWMh|BC(N$+CATP|Gwz?x&?fKzSk}s@sSB`;*s-e?h(UA!uiat;nYKTjW zP4WI2Mf2RuQVO3q|3I#*kl;aY^vio;Fr1r-n4wR1WssPPvHnRf@I{fe&QCsWaX^ms zFzbAvwOA3$HE(OGDw20t^+=fud;QKJlB9Q4JCH>&y!mKqh4nY`cA1Wa`zfVAkIFi6 z&p9jGef@pfW$??4CfWT`WA((I(dKvuniTTYT|s2dc;H(r~(P zr-yX)M9$h5H{H&)^p(6k1)}1tsF~|?q2#nrt_6pv3?(>D?Etv*H{2*0YXu){5Xsd( zP8pH4yC(R8&)5h&YZNyaWavTzG81^?DC*^nk3-BagveF;hXt3V`56= za@|4GwFdU0Y+W;FSHfTu>6zB5RH^0{w{Vo>R8m6y*aZFG{i&diZ>#U>!0(5Io`Ta z`HPHN5$z4)Gxam#xQ}jZ{|elnNi%3KC7w9`HCdIyDk1ykJBZtu-E-BHRHoN8cr9Xv zYEE# zqFx4TvcfReeFu|WP=%KH`@hsX8-bh(n9kp0yspmokN6~kF@lLA9-G2iy&rIXhklW%1lH_Cx?DS`X_Uj#$FI&~JC!vg|79XrhH z)7&*ri&^%3lO^uHcMKT^q>o9q+(5Su%e$r8^*+b5e242#B{1ynBL~I$OChmRjU?*s z4RrV8B-P6wBH`pGn{WIYi8d$4((5L>5n$D=BBhefFkV~g*eB2`U)tvLl7^~wyYdIO zG=reAX(ehuqTo4uW>bj&!yiUa+%y`^5V->Lx1W=f!@($S)NlG{&kPUd8Z|2n;XngS zsQaQX@#CygsZJZ#eVL}|xAz^)3m&tso`hO$mt0cO;}^X;!p zuM0zYIe$BoQELUUyvf8a-Z*xjTWCu9R^TqgFrvDfG;J&u8cIHz^mW_QipX0_OZ(}; zhWx1TudiDtHS(3OAtype+?s%6WcT)kS+qxVCSkw;(KWs9Ku0OXsE$wG#`g7e`{0Q! z>i_NqxJDi|+0oT`qXmW8o35_teLBOmOBQB*=CKe~i~Xcf7EHQ$fEnq>0W~nu6Qo@# z4Vn}{ce%su#jJf!&6_uG47Cy;o+k}uzh}-?@>sb4k%KmSPxZC_r}eQ^=@t!tjQ$MT zf~e9fcht-BXNVA>a`DougWp%OrLnGAwwfSrqG`8`$~+Zly+5;hj%{-OJW}1Lna)l5 zBH2IuVR1guAfsvlIH&mys-LjWi>rA(GQP~a;5XN(*%SO`#g~oLvzG1DP(HoKf_thF z$#~B2=E2%(AR7&N`?Bzb-q6X_U3uT_E73#vWnI32)HrURbt{Qo3&+;F+XhfseNRKk z{=3oVnwyS}4u^Pi3O7r=R9p7V8wq`Vs;(}I;=k^P3zWqg<I4(4K)UOUc{zvx(*bX3DrlP;KcR2QPXOm*;&Q>%?qx5&()=`BS|)&XY?X<10Oo zVbx*CZcDLLtsAGBHOk*w+uKUTuM#0e1tX>(wV9|!HxGQQm^i<6!t-ZaVu%Z4y|t(c z*Ty+LTewV27(`jprhx0PH2R#!D_Q#WsS<6_0=*K?x6&M<4tSGgi{7TX+iit5q~MEx ziO9Z88*gPjRhG3ObTqUqb#xT5+1Og`(xsLGQ%TUe1^S8P9>;YJe9s9*uh;N(TNp*2 z?)wo{XTg1~zw}nLRBW%n{Zl~H)Jo-IQ`dK6=4F?y14i~UEN8+k!mX(u0{zn#@4Z2x z_O+G5@vqM?E3d-8y93)XT>jUJK0WTAW=7=`%KdZ2+FrD)Z&eAiC7s;4*&!GGF;I3& zBvN&!j~LORuWC*#Bup3Dt3gV-^K3l^oe~#i&R9zW+cyqZ(pVX3ZHh`LL^y0ADqoRe zz1eW;_O6?Thi`w9FW?h?E>KsIWR6E8hLDkvBnt5DKG=$3VG*&XAAv!}?{51ukK@vy zPvzg)b`ZncZ-&3fmg~$2;Y@fxa(OT8>A1vZKQnIwk$sLwcob_5?6ul*Q-C$B>{D7A1!!iXVF( zFcF|JCJcJ3FP_lYOr+kf9(^i57Gmtzh3^147+QM0oUsBe?YEb!v=jN=MKK=_#_Q7< ztiSqGmx+b#PHVR;-qQ-J^PoYnNZ8Kv19#uu5Eq53OFHRVVs~qVIU)<_NH*S-YQ%P$_*z>$I8dOMb;;ZCXIV5F~ zY~a8MIm0(T`$4-$A5ib zrgU2Myy9c$BMi-A;znHeNHw1Dwjx>ojpwKcy4qD z-d98A)|)$JY=%v%=DuP=)$a{T$yMO8oS~#Ob4uY@&wo?G7Su@^lck>9WHY$oaq;h$ z;l-iH>HT4I9xw(RPHfZ4e1zFD2Fin;lU_&04^X|ukEaiq^`0()`K7bLa zFs*9y<dp@mp9?`ctS*eCTY&qYuh6ymWq{nE#8duMVmz`r5vTbazNdBdv4` z5>nD39n#(1DG~zG-3`*+!Ud7$(p{HsxHRA4_r|<`e9kZ<^2G=!^;VirA zO4EZMBTI9e{NB|@5Seuqxog80NWx=eCJnZ+LGy6+t33}%I!j*^TAK-*WkvV?VdUuB z7VWn7wt!Z>na%J&$-b9+lkk6HDc$9-G#T-U`JHHi&>GL`GL7TGOS&#bW-676KxIRC znxBiG+G!KmuMLK!V0VrdTKw`3D^r~nHV$-Kt_FW7pWXT4ZLOv9Jg< zu*wlgx?Dk~W3??>T_3O0sbRgtOzlO`U+5HS({(0qeKjl9Q>l0(2=@etum9A&?K(MI zW!DaFp!(C1$e{B3(RiYXoa~yY=|`~d0ViR1R(YzHUx@WYTS75xPL_Lp7463OLh`Vg zRu}tmcqBPemjTeOIR!=*u$a**L=)(ncUrv%lGA^5sq~a3xckU5a8IQGIf3OrSp)vG z0Vx?kVuDd73z`jaMQCW@yR(<}B$wl)ErE)x{* zoX;?s{!d{0ZDSoOZG;hxB>SN|zeUpr&!zVRqn$)2v4Sn!FkSF!OKTP@Cb9QF(cK2j zeiDD6LJeK?tB?NOsIwq6eh!>-p}opHbbQ0< z)W+_ZE-iN8$f02g57P48$JEz%Qqv_$l7x9KIWh6GwT05sJEKu4uKHpa*gLXxKV-1_ zHnkwttjjYYvWbS-Cja?FzT`^ORkP8&Hs^_Wnksl(HH1P{(3=DL9b|YpzcE9c5JT^$ z23nxE|1p}nk=CPC2hXD1@FTfpdQD)qmgz?q7qJx#yQp-Aa3GC*c_-KQOeANkk(X2F zEAc-Kt%{|SqLn|a<2JQ?ewr7|qQ?#TQgsfV`LAe_V}%V2ENo#whI3ag&{@RSIpnef z!BFT9B#>HtX;S={6IaYfEFCM?{)QBtjWyt|)%OOh5}04K3}y4wk+eKtRKzFK2{vxI zQ1oM#9rm~j>hbXNzP*3l6!I5u3I+%2T-cafIs6V>#dLZ?GJUBBLI0-Q4K7mjWs{QV zy?{J#pt1zP_pdjBg8>L8MUL_?;qMv(s(@Mp<_V&!CA@6#h6ABvxde3j`HMuWfl91z z(j4{(lR<2NIV?s2>i(;4xD7YXOG>NEuLzd|o$2=pb{G+!HC06?badE9v$~`Y!9HOG zReS`z$M6^a*T+I4k>^kFH|Ars>G#x2uf=igaB-1g?~;+#5F{k~;R)K6{Ix187c?p= zD;rA~tt!<^wa*x57H5K;%5!ES$(%^V;;{!i=9i%`u$jBLoB8!$yX!&dqgTW6KOxQw zd9(S)W6rS)-?TE*gAY9VB`Rqf3K!k|KX(@JP)h$In6f%|<`>Q!=TC6l6<1xLYwbEe z@4%sDs&;WlwL+l$6nDFzrR*g(y)L;H2hbB~w+)p9zHe(0y+kXj;}1B)j0vq@(#k3} zCFvg3up?CpC0+Yteu|x-x^3nwnbBl*O+Qor+zF8Okf?X4boVi<`%H$I1yjp{dbnl0 zi#XGW?T%laq1@k~7BC-IO#9cGs7tO@b>6Uwy)Rx%CJi=l43Rv`lMh(?Y{V0N?o_C# z@ZEd~x4pkUYf=FL8JN*aLyyRVOR3%p;&Q7%JuquocZgG^UKl(nc|m1JQkAKg`jqFt zc3f#Xy#uE(4Vp090_LY9Xg0X@EHw@%)VoL-EFm>*Qy{;BEI{}kIp+xh83PD z*K>{l1k%xFZ=lHs@*V;-u&6v=+Z=NeL>JC79Z#rmNH=6*R(3FWPnTZ@xo1GtHYuhS zLXhQFg)z@_Jd)FLU|O>boHDuJ9HLsg<9`bZwf*7r{S}PZy-i@1tZ)#W{y(_q4V@TD;bU_>7wQ>*skOo!y8{B z)F4sO8@?Roc4Cl9hRV}cN&3_8F<~8VMbsM`p5TY~l2`H1AdvRCIp6(i!`33@lz{f_ z5+2K}!V_!7p099>?J*~$VGYb+Jk{NQ-l}`mz==c&s>f&lX=_jNTdAhMuJq8W8d|D- zoa!doT2*?f%@x(6*ZBBRJ%6J{JDBWD4*vkrU7e%?A*Wrlb=KE?MF0mjO0wG^Umf)) z;NVw@dnjVRp33incJq(-*`n=lKx-{@g(9*lw7>`agyPNf*7Ks8MlU)7cYrI_H7Uxk z6Q*hbL5-9;*BaPNxqkblv{0E?9-EZ4!qV4J?)2fbiO#eqO&vc?AWY4$G9JSYC8djB zney*n?0$56(Y+vB(8Y``ALkIvTsLy7#!25wyqncgPeq*5DH$VRy&uGZS?Kq`nc`Oc z!-ZS?Z(7BQd$r$#FvS52f3VO@X6r6ZHng<$Isn|7z#Ico%EDEU{Vd`dDuTmm%j({n z5|l8E0%zH+=iQLXe_G>>xe2i^23A`TynB6Cy_g9hOAj@)Za zqa|&RbxC=WS(zZoJ5KGiO=&EzQRHdFJ{rHQYF>)2BMAvMLb(HO0P6}M)Qh<|O; z=!=YMQVYE;xEi;js`nald~8{X!Q8l!QP|3vx_drhS3bGR804DQf!4>2g@I_^g6(;u zx1=-f-{M45ik=ogAr@StAGw5;*%u9%J&&5rDy24AOvb%#m)sU9Pame^MIwvFHw}0( zX`f)XpEYy)APTpc-(())6>LMwaluifWniW*9PVU0cQ!fcy)5k;MJ^1zeb?vKAy|Iy z1AT0E!-$>lbpcDNsKb3)o^d;OD-(n5-1iQg;a*5yp{NC-Y({= zD-1zQ?gx_)0wyUeN1c(W#(5V*W~)gNFt#2E6QTul`4=FWl3s}j{PYi-Wy+v{#Du$d zNpbqImrm?#2jBUFea&ri)&OCKze_Lx&Rg=)nWRxke)Y(L3|RqUI`Q1GOq(_-2FreU zo9|VZuX}&KvRIycZ{d1wD4&gN#4)6U2qNk`#4R-7OTJjq5ORVTw8p?L5{Et?^D8}#3hw3S4|4{BF?&x5ie5Ib z3q~29{hw=v1S85PLZS0KDs;2dA;MY;GUK1Brv|kN@GiB9UH?LXiUGST*IGK-2ALY0 zXW!$}N|F)UOTV`UXsf()KnHCG)Ht=Bh~rGA(L7&yTJ?xNt~6P$A%G5PtU|?}b|FjN z@y-RTT%>9S?I;b-v*i>Amxm0Q)#b@H*KL?8q;i~~&lK?9eti4~uQB$EgaTSiyOM3~ zNxcxh=$$s6qb&Iy^G3Cy6`P9^sOkBZcDe{^-;JE>jd}`8OjEGynklwEEqVr@E+o>N zHOWb@?V-uFAFIwVFV6=NMttlonyn&xr^7b*-jl44UU#TCtD_koHkJ@8)OhQmcH|f1 zN`wyHCO{2dTZp;XLg;f;{%S}Q`!Xw&RP1rc;gud+(7)R$)8V8;IKLBXYq=B~xiOee zMJZ%)R`9RcWn!_tV^Y)e?0fdgM~~}S@q?t*zu@o`8j#Hq&)*}ms@YKiOD^p1so2M? zKf`E;X!K4b#7RTM^*^o(-ubE7$Q{A$Zq3>^piFb6ZEw^1n@}=bea$P&n!;D{xghk| zZj6pPO3jMMB9USIryv#+J5j4GgO(;5wD*0H^HUbr!nbj6pAh8cJItK|+PCw&RwGkZ zT(uWWSMLu~<7n>}w`do2M3(tuR3|yfWl&j*@U?+?r96R`2fd8SPJ-g5c?@%nrk@*5 z_?W~Lj#}P~Px|Y3hYJcrt4oC4gN@4wU{2Q;?LjjX*PZ+rna*juU|cTxKFQAQc{)-`~x>4_+a{v=o59YLZ&^@_lmOI*|B3;9Oz`-F*=clax=jy zcSO%nG&zp^%CX!uL38}|SSV%>u%-A!1t{W3$5tGVrNU5>o$GJ!dd4+#}HxO?LC z*EXO&-*&$cy6{G1T8@<%eEQiK!M{N`mW%Oodwf_<;bZJuvz=x;ACao;HGa9DZB?Zx z&91V6i?`e0xVzgZanN;AmqDgp#qaQC&%QRJ{$`)ptAKWdQ*+KpjzSLUnv6~RO?{+N zqva`w*?$)DE|l4Tj*!x@J$^BnTxmH-S8J*x>t2tad7xZXxGK+u)rwuY48cp4EoSG0 zrES@isy>>|6B4I+D_oKm-0j@0*)w$zj^D!QHmzG5*2;Qk_#zdpdP3P_;(o(ls2D1d z)Ralt?w}joUb2x*^yz;&pFbTdXa4pGZI7eywUo5iXD=e0 zr=)XNko$8O=vhWd4z!{iSDm3Ow(yFtfdia>HLm`p1yhCp9ZHJepDvNfX+J>HKW`)} z(!eINIBf0Td?ke+*_P#W@=)=VXZD<4RC7uM2jFs#go=tePk8EI9Poce3V&VRvfJ*4 zq=!M!l-p1aOO<+PSv617IlD^+FRCYh}SwCcCe$rE&zxr+FojScd2AZ-#>7>jS<$TRpntd+V zjr~AG9>*(acjm9n%^gF+K#w#Y)Lfv;k0H}4{?rlqAb&afE4GOBBs1{#Q?UJrw$day zo$%wm(bYj6q>h>RN_&Z0uCm(cK2x+^Q1-FJ+^-QuGx`i2bdt%54I4c>pO-f3191>G z_@3u-KCY335=IHgc8eM*W^@*|{{|zxH-k_prje-i@Wps1!U2k+iu2uiGwLjTuIc14 z`Z!h{yV)TuirBuoo+kD1w0bW*^K6>kbFfBKYA?xt$ccXH^^~svh&py86qzag#nVRT z3hNmP*SQC&eqfU;C=%xM!}wW0c%@Dp@OCY=mliw!Zg8sOa_EDRgyO7#^s<~+0ZkXV z?^A>TDSUUz^ch@&)Xi*Dh0)f?=qy9EV3ATU!RH0lj+|hrO1Hz+!)5y~8>{zauU1al z_NChE%O_19Phzp8m!qV8kE`KMqg)C7Ok4gP+%}O3g(kbHZ`9>y@nefF7#z9#Hj!PT zzb(B`v9k2O*hif|R|=K*=S4~5jOMHtYYZsj%GaxOGQGZ~^1j{g1rkMV?UXOFcR zU}yJ}jKDe11BoTk*%d`wFGn`NscSE2C0@-V;k}>x2-AR26HW#BJX48=KQ%Y7>6BmJ zB_c-?iZf(44!&1UZ#P_{^bjZV(63O!8yNqGnX|QLE3LF@ti0@3(TGy*d;6(v%sHhX3C{1-Yv9h`2B1`n!whPK0v?|j>Ry=jbOwpv125Yfxsv?U`OgQrOdG;|vxoZ{vAZ)M2+aE|&zAW`!V1J8k z>f4{0#7)PiQ@h$LQOh^c<=6R8o~DCTE$lyJOQ?CS*vL5Dpq~$g6JD6R;}(XTt+6Y3 zqkF4D7OoB8>iqElpZ-0;MktA9_b*o?SHED89dAS?Kpnx)F0>sxP(+ysG<{Yr}ZCyfAj9whN<;Ux$ zm`FxDdtzt7FJpdx58fYpY`$r~>gt9kq4FkwLRRt$wKCUf+Qzo?)${x*&9|c71#KzjUtnc(kCieD$P6Rg*{_~gdP2drs^IjPp zbZ1rEZ{S3FIJb8DWlF4r2GV(vPF+r()sxyM(p7e}6a52!@`yZg_-?I@2aVZ}Q@V>a zPz@Ou7TifAxq9<*qVK7;W5T}SxYSe~%R>pD#FMd;Xa^`T0`xaT3vWNctFIc16&56_ zbW?brR9gk8e^B)xKibS<;UBS`chs$v3a*#%?0{$=OjjwEHY*ph=cwiQ-qn*v@2_St z5=!>)av;d(2U^Z9{3dI4JND|oig~yEZ<$HS_l&-&^1P94kD->eGPb>)4Tqb^KhGg- z*RLK|9xbcBOSu_7he-690?YR@(sA#IHjX3eNykD7P=HBKt0o`TOV zE7%pp+11Uq8I)5$;I7|ZLmItggd04XZEh2pr38??+ugQ5qaS7MoRwY--X5i5WiU1Z z=eJmET8EKfBe$NZovyTXrihqP5vHb6uA@ENw1wgDF0+n?!X?>^k=qLVzl8w}E!$V( z$ZEVd*&Hn`kTSmB*$(wy91oe;BcElBMu^oXJ!fXn;J&iwLfx~GYB@o)S$Q2MhHZ5w zt^crBT|de1%nD74DC&@p3i;)BBb6G{+-+6mtvyj`Qk)^!ql9wOkH4PuAsA*~f8OXN z%Ko6bt<($?2H}GccUq(>8TK0(4+!-J3;u@czK@ zxme!Y=fbGM=c-F&`0=6(d}gBe1IQ&D=MvpGC7uFryAty)u z)hi7!d!KJpVnWZQ=A)9}?8{aRgZ0wNYn+kQ3?;v%`!2jIkF`~#jN#rIfQNs z^ieOLH)bM@7zP?yj*Vdg`^A^Aw5uB)p^g#m$h+WAl)m(#-$H0t7oqO0qxLt-Sq5Zd`o;kIE?~&op>_keyeLYWW>ep1l#a zi6-fe2+|TZ2eC08t=BD`K)v;Yfm`j@z5Ex}fBmhJeFRORf1@^Zh1vruBk?oma;c;& zZ7Pod!{M2Qmh8#BEfxSnCOEF4F?1l(c~C4UX1dBNg_Wo4?ujw6ZtgXU2Gs#O11ofH zXT_kb-&j($T>Ywfr%{8W4uV*|mA-hkVBEmZ-qvq?HCLr^(n^TNHJ;PiA7n-uYf{Mr zR|oM8-Ye3eCE0?<`oasIHCeTkYyovDFS)_~Q5Vqdo~?H> zRloCJ_>_h=rt_1hUMCIgn>H;9Hi?-XLUr!>vY&kf+w=neX003$`MB|DuPETpAglAh zh1?zt4ro`aZsF9sx)Qdx4U=8%K~I3AqaXVN$^DgSgWlYbSd#x6fg@=!C`jpHQsX9w zcYDa}0tAxNc%K%gyHOTAvqpA%P}SlfvJYM+2x>*xQF5gjc-#|>TwFPJgRdy9ntwI@ zx5d9S=6=dwY4ICYefEOeADW(;HWcfp;A(%qO-J-@P6wy@Ul|>Cf?4;MC9|7M5bxn0 z)j6Nvh^iFxLDl{x7mRN#7&|shZ;_AF1~f9z4UgCAX6*W-z>OPqG*vw^)jktXTQd^_ z!uEQWv0S=>qYzv1JBs)!ECg^3LV!dt=4|wsw07Rgrf>NKNKYk7i@%ieI7L6gcu381 zWr?$#c)Np>qtk%rI74~2!A3G0{o(x3xFStEpmUxcniFp+c^z<=ha=2U=8kBWGEQXq z-3$FBF3jK`BkHc#C#rGpah&|C?|BID-J|lB_*GOW(KJ=gY*|?sni;L92GrN#(5SWpen=)`CILvkA7;Z zm$)32?~Q`{JG4aZRls0$-7UnzbQD-G5q!~lZ@T!LnRqD=&IeGiN#zd1am%>$_yEsO zZDo{7)W0&tUbP@eWMaT~6Bkx%_~8>ETWeQiPt$V!>!8oIys6EB!C=YvTj9Kd2G8f2 z_L-{1aNU{q$0#?$;11{e^W?7jYInhS>{tHtc8aYJ`$UP&a`BX76MuJklC&oX{1>Cc zMfn+ZU>&`0aSB@zHha7L=mke>lRLWyp0ku#tACo(7d02NE7y61m_OG8YU%Ihiehog zXZhXaQ2t=uz}dX`Q+$E<+Hps`&kpm7J6BDK~~5>7)tL-$`Hmv@ipbBon#K zw|f@z_Ftyg2TSRz?@=M4-E*%qF4ie3Z8gZF)`vAkGkV}erKZF7Gi$rbsB&`n=59mt z`+cD$SeHKqUOs?p{@u3?pU|r(uhS)}X7g(h^t{aSG1GU+&w2DC^v!(7YWjV)uB}=n zZ&&JpH%BP%?N@ph%H-85lZPVPp9;Ubbb>k%_xy&Wg#F&D3cu%37BT|O4SiLpGH6SZ zm+qnrHBSU-d&h*gjQzqaOKlZgxuNV7Ii5dJQ1CXvg`MQzDjqo?3b3D4xl}px=}twA zAX$HdCL&6W`FqKq#a-d)=;oVIF1w7IwDs)-bBi3bAIw?KbJ?sS#vRTZ?dR(}u-*4G zF$9Y!3MrxKyJ%z36Mnm2?k)NAZQRH(3-;vWTPL19oe{Bt2jBgrs`Ss(ochOLRU%(t zmtCy=m{4E!Gy&O2*i+{N%BMZPVuY>ghD0$D_0oE0=e8;8Kbxl*|Kw zHpkfF6u~=BZjbldow1Z7e8|Bin=i(kjDaZS0{mS>49k45A*bhwJ$}hTk;VfcqNWE= zt(%OUH&698nI@eJNKXxv;4wy+qbfC^#<%Swl9j8kHg&KDuV$Q59Dbg%RO3#mxxeSs zmkpndg^Sv?#VSmdGM33{7*mk>S*>)#n`rBFH6(XRqY&4&k}4y1bwERX9tLN~FvWHG zVJO~b0MosgunS4f&LSa21Co9=R=pjJob`N3EFwye1rp^~!T{aeAIoMxUjDj<*NqkK z=x=#K|K;%OXX6)Skb$}v7N{^kEKwWd-NSUzb$m=Kgt6kH|7`7 z@oG#}pqdNYcRMm}aBL@Ej~bPgKIquyn*tdrlsEM=s;!*C+j`Px?aikS7;URu{QBL2 zeKvXD72e$)TZthwPPN1;ecxu&Uy)B5_T&my7HXPuzQ!mGhC~C_XplVS;mQZuz_Vhs zj~d8$S7G#m<}PDD$iKW735#GX`i=0r4kzoe#b$tuD@m_dsd@8e3I>f+Ksu_!cwN#! z-Xb&Of7fAiOZhB}3DUaH{^U27GEPzL?<#Z%jd0b`jDczKTIUF7M$SmT_ShTDYA%uY z{96+#^7rC^DRWFLf5Kz=m>p4E0^iOCE(#7%@&Z`Zdcq#kC3o?ta5)nb#NDx@rSMMQ zqS%YvT+?dXt{}152q;AtX)t4|aj#)fw(fK{gyvA6oS-a!)`Eezw`8iFJKSAKz1ez0 z8eKcJLD2%0RzEF1F!>AZyJ}ms%U3c~Dk{CQ0LhPA$B{}b5K&V%f(AfSqv1bmOXLhowgLl*2;t*YLi zTselUF>*LR(iVj7&VAIYLtr@2dtCl9-7v`Ds(E8$&!tmybovfr^0y1?5KS;9qnBtk zd%zun@f&4bzLtf3Z{h4F)vT-NopThaTYX#i1%TrCRl(K==>rR|^d7Nt`BQ%PnAxD+^q$Ij zFDHF!K1+NSNKSr!L=&ppESdEs)v3IxRS}kq+-y_2%WOXDVsL=`FjxyR7wL0{L^t2t zp0N7KJvY8#_toF4d%}9~76q-f7rWoF$Q{9^H`eu=4D{DmT|xnW8aNv7ej~b^8T2{m zxe+F>Y*$-R5iFfSurYIzOmP0x;ne>beLt;CY2i4pHjCo+NdV2wmv)~Im=;i>92Y3d z1Qh*$DewWz*s(W~(#D@j{bX3U1@EOpsTmBp>pvC-8{&LNx2q2zzMxi0SK zj~VmSqKhHGeQmDa$_>h`T_vc=1sUaa^Rdr|aI$tV4?b5|EcACUa?`wcdb zkk1WJ+HEcn;5eXq_YMV!r|gX+V16N9iLLd&UtHy|UEs>_K5hX#Jj5J^Abp=R$zO?* zU$fH;b@uToXt-mVDO?kd!FzStVcZI~h#}*Mvb7?P`k0S%JD*ZI-IxhQF5ZD=FRdnr zS8oi)#BhcUo+i!74`U$@Wo)jX{82wuApXWqY;vwMHS+$_mf8Je9-hPgbCxlX3SI|p z&%J&Mxt*sMx=H8(y*{st|F-?)ag2>GtBts9niAzTRw>!r_Uvp6Fb<+o}_NcJeTW49-wZ-Y}OG8jP0#Q_2ZQhwu zP2{Kw$LN>$Y>8i8g!JAtiZM=0d6GI&qXsu02rrF~2NclWvipLaOiXAdCv^?jiWRd5 z>c&KVgj0AScik_a{I;HD({&!<{;a472BI1T>Q~msId@v+3%{#dhOqy8Cuqg8*Kr&cnxJv$21|j z3M7xbHuDZ)aExdDJ8L-H9K1NF*;d;h-(4n-ZWb}1Tncu5;6wE5I$++aZ`^JLNr=Ez zW}vJTfp2@J@-11ZlnRG~#PMr*jA5iq^teU#NGycYv86&y5Okinrs6uboeH7 zx@*+5UkNMQV__2|xgddroE=MT?zuP59|f{a{x$;FKx{Jk2L&k>CiO4q~|&i=*F_+4G(&ryY3 zHU&7`SKE`{DsEKdkrB6}FXW7G7^Ib${L!QAet7Wtm6*8Rd4K%L7l@Dm=p+E90pp$^ z1PW6kfKA8$@xb5)EcygT^%~%?`OdrI0S}bqjQu#$fpSQ5w&l74ExD30|F~QM&ioN> zO?uXa=L$!@u}FE~n6nW3;Nj;3J-MJEqU@(IIN8ruV2_Ff2X>_jmOR#ZW8+AsHb1E8 z)R|!b1>S*@gkdktO1&W{S~a~-$o2L0_-}aaUV~n!W`Gdi{ueQ;t^Kg%2qwSTQ}mAn zIL9Pw?@;q9Bqx!i;rF|)HF8_UR(idrm<@)Ku(DBoyCfq0NTbb}vsC)LztrMvcL+nb zj-t@5}S-$2}?!bDewMSrZEH!4h{7HlwbHwM+*&djY7boE}zC# zN>fHK_3Ai$_S$r1nu+rE-lRk5?Zdh}T8VRumknk6bs<+x%bj<*wKg?{4&Of$-{*{P zLL962E>`&7qa=rEnByh08+4%(3ljLGj`OcRAGt3MJo6jeFVZ<(^pJXcf)dTQWHJn% zC!OgNikrxLc>RR5tIjwlIQzr&Nn~t&LHTR9Y&gmc066l6izuvdbtC+~?A!3uW&fQ= zmvuHx@fJ;ufjTk>uvm!=)DdTzej2Y<1nM1P{27J)`uWf$XXEPgmO7FZ9rVud{>#kf zs<&d9T@zD*+eEYE&*$4J(n^iMYColH_>>JzSiQtIT9R)EALxc{-q2E`RyQBQe_p@p zSz&6wxjfdJe3r$+rc#PhxKZQQs?DVadQS=8eun{9x(l-3f5Yzkptzcm~L_z}J z3cx?bzb$@RTD)M<2;6jKy}7SCfddI*W{bPkPVnc!Br>7hnCUvwz4ib$D5v+1r@2^t z!jgOZj*}%NB0@X}&QSjJNz!8Ia8>N;HvraBtO=i^8l4m$>F z>bw{LPyYg8dD0xe7wNqH2|Xj#Is22;=dpfq^4{Cf0{+Xs(2U+%>eLC~v@~Dvc+p8^ zc-+()A|eFH5d$3mrX6{&=z~gZZ6w2nxbJE^`(Mc|GOlQpV_>jqbj3I~7Y(^n?O%GC z(t1`5%gCH+&8p^-;oyW~PIdoDEfOO_RP(zt(J>t+5{0!mCxu;QQ}kVe8wHSD`6$kr zuEqqaI8K8H$=j}Ra3qls18TZ=44BD4^_SU1r|SUpGQqP^3t$%M&ZNBep5%eq`B5ra zqSRduE=V}aYJfEekBiw+-Km@}<_(o^eoCx9-Kdg{y;8InMeRNYq&L37>t$4#M}Iz1^~O0>1(ZMer0wuY2!4-kpP>H1!D#mI^6e&yjSqujLDbt zG?53kC?s~)LC?mnmv5K6O2RnZl(guGZ{j{l;A7lal5OL5dP_TYaBPQM3Xy_V$)FGI z`1&%4dIjYlB_x1pU7?E}Uw<8l7e>~gynv$=hJ-Xc^CN@i{A=O-SKPE39|14M{GGIf z#Iy`xJ$aIH$=}eCN>^QKTdfXxJruH5{>RweOvm#6Rl4)p5H&xu=}VLgK}1B!3o>A? za71z{a{LvZoYS}cVkDNT1PAyhDTo;GzQl4|a&)!S{IZmgv|Fjz9WP>5!Rn-dUK$n?$p$^o4n#IsMKGxO=4+^(=wdfYSv>P z&aSx$Kmn3;%ND!Rt$&%nSUl!6-H-^7p3lJxE#ijZj%y>?Hxxs zNZONzzwl5q@c$;Xq^6EU2ZlAYlBHR`in<(R!EHqX$$-~xeS3KL>@t+@Df1T~(D?A(7WD1@COXbY_pT{Z z3YCBFoiC;3;}NwnRZ*RlmgpoK+1?L|_ydm4TXj1pBg}J^qsn04I)(8AU8tNQ*IUuy zkX%2CA3#%tz^5(}Y*2Lcy%-;U5RtMqVII|PgS7lh)iUV=w5SyQ3^bs^1(#>M{S{lz zIR@+>8zLyS`!~&Je{J-m9g^R;ILhCE2qQC|?Nt5>lE(*;5J)9~a_b;s_pa-FiT+Lh zKWb2S6kJQK?Ig#TjstId<&~|P;2pl6{+U(Eu#1teQ}CM8S|#6m5NI8lS$<_!r8Z3EL^ zg`{X=ig`i2rfhX$CGv;}#`ep&Dv)c1Rm?*)s>wNL&DHoj$dNcmAk?hx*XzuE5uPSW z^npf^07=`*kUaA5{asy(qbGZbCG;!+yD*07w*TQDU=x54}7)z^s zJHRXPGa$BbM)czEM8|vk&zegDg`fayfv!~eHp886Hj3@p3{kF}ixuv=xm&M#*;B?! zZ8x2jF6>KN#%sZI3r zUqQkH{t^;3p+E?4snApq0M>h49N@)O$xfrWmO~z>^ay6nQ-$Dpqrb77nC5$V+Kee& zEkQ@DrM&CcWZ9_{s4QGO9?O(DJ|ia5lGG1!k(_q>c(V-=E!`b;c#c@;u3e{m-OH#- zd^wSfQGl@B%ZFmHF4}|fF-IzsDqzp-9HrYjb!vFA*pIhfPlQl;Z?4%{Qar-$%+i0R zZuz$Ia{DCX;Pey5yo#aP(Z_paYJ!6AIA&?@ggY&Ws~+!@Yh7g!S<|#_N4E_r;0%O| zu(Ia7v>NSzIaaiFi})@`^G5Y=g*WjvjgbBPq1GhbP9lT2#8paZm-cN8x{EwlO)-T8 zsD>!*DsZGT!WglvWIHuaCvwB;u!#UM3LF47Lo>ven|oGl;trH#dA`|4n&ts=jEdG6 zub`@4e-3``nWhJCRv{)7NwR7h@|iNci94QADfv|!1{_gDz{WJYdx@;~vu|D9YJ z_~lr!50Vul+V$;qYoTWmucxOaKjAMI@Zzl6T510^Vb>Bqji0Yujr3O}Y3A&{#C&h> zUnp)Ie~aFLbG^OtoCmPLtq%9g?P6Ly0}z##y#Yu4U_;h#E3L z?eEi+t0lw44Voo_2?Z(Km)yQ$_+X2Q(naAn%FwaRg;MsneI?O|N=tgskGUp!2w%@;ZOy zOm@J4vYDw4bEb>kk~HDm1yqkO4@tkxiI#`$O=!ECX1SrV-G?2ly?sY%c`5QN9hFFk zGveTk;Al-rfnb*22l2+EB_lnfaZ5EMZx3%SmbW>sZCZu(9|FqzFWfH>hi1Bb6%g)Z z5DS~_RfVwuSzy6@^&L;zT5z_#b4ZF@1rMXdj)O*!zj>aK%rVO3`UkCyToO&8Z0x1P z<-gO60r~uiw>os}tAICytF>XUh++P@l;lbqif@aFnCbK*-&;5l@aP?5MDV|p%DA3{ zangxBjfsGVZ75b6ad4CmjnNDmJVDP89&|=TcmLE{SZ4QT zeo!2=wz6LVK1b0kUiwFpZ~B8hH^&$!$mNfMROVx-3bU@7(BY*DO8TkK z>xzM#sXUG*-yR*uWc;t$(!iHj@Oe%4iBx)(aF59)MX;D?{lcX946w98hq1c$=`)nD z&L*O8t2Bq%3S(jy?EX_kZFA)f_umr3ANO zBOlfc74kP4|kAGgu@sdh?g3p-(NI&f&%OFgzOgg(cna8e^8yWPd*2%^Aq9c4 zCY?(Yw_hXgm$+&upUxnT^8%LTY1QClUPZRkR^j-NWWp7|wIDpBu%8_)^XS~G6#J7h zJ|coTgf-y3&GQgq)I%RbT$R+v0MqTYk}x9b|F=4?)ld+tEA6~BdSTkPyO@Ii6zcqZb@CBhl-4) zv0;jR;%7j*OMqKn8OVGXNYH^u*GVxKf>VLJ;&PbBGqb})3v0|Ta3A`Uod~GAJ7Vh= zRTt^xOTjR_)nQu8$V;+$|4uX>Gx4E<7C8Dv4sh-8P-{(l_<dB$|3ard{0J9+tjSPj+Z~tPo!f1(X7_-v~d0;!Rbu5JxhHauTmrhZT5? zm2_0hq$xj2mm4zPwO)HZ@-h@>H9EhH1$I!_rW`8a5S|5@?gf87m87vE0D@BuA@Z#tp-Mf%!gnb_)V3YY_8@shuI`el5=8$1U(ola zBMCqb$N&(zWGpuG_|zHmAS;ZWd2B7C@iq9dR^jH!g;Se_$2?V<2wf7`XF#mMEF2+V z?2q#!)2gksx4yFc9;c8j4m5%`_llU4IvcZDPUqt+9XA{)2si;G?ON8IRB^|F-vLBz z9{tVn?K2SW9f^{%h==PYqhJAE0S1sMt&11{fk3osj1m404=XDx2Lg-$ea1B&%jPF5 z9mFC&e2w<2D`E{%tmRO2P>$H_^lM$M1$fQqz2Mc+H?+(b|0L;l6I1b1QE_o_;#N04 zfOK1&=;Z0~02;p#j5j=nKph2-om6wRVP0&6Kl}zMczu2aklgmK#pdi^kp#%4AELjCNTH z_}TfDo~*)B!mSoglw^Tm#40vBH8AyMWBkzrS$s$xpQg>u`&W~ZaY$OTFgY2NZr@2F zdOtrl4iKheR+@ObxMVamS0IgATep-CjvdY=-~nfW7J$dB_4bNS&&=eNmHllw2FQ6| zNZD0XRWlDd@AjErN+h_icrv`mfWAH^sQG%q+Hp)6=Hk-e`vI03nO~gt`_aKRoId1? zVTCLuIS5Vq64_htW!bnkst`CtMJY{9O`Cy~>(g?BM^u2_v=?B!tpV7pzqXv8JNevh z1KM1FfBy@wJs@Ngaq8D{a_d4FW%V#4wKPp$-3Js9lDk|Vpdogm0g|Mw7STrd5v2A1 zL9>hzKso<~W<@z*1aBe_G*Xm(w-A0AN@Kqs+FKAnOl>%l#vQo`B-0nRt~Rauy{Tdf zRa3PFZ^LcPVFG*o1}~X4;EV~khZs8Za(;*c8rEl8(#<4BFhySkDU=BQqY8JswAX(ucnPC!kpSi6fA!(x3#?p^ zXKYLdfk9;V0rB2u1o?_Z)(%50eXVe7Iaol4qXBS+zpP1qV^hCdbQcp{5^4bjw=Lia zj*(;_B`Al(>8UsX5rd3_GMGdBFC(}EZV&Bsnb*SBhD6ni_9=6`EKp#n@^*VbzwvhC zH%JHH*SZ(Y*8Y(74rvp3*`-E+Y4~gX#(Z@4Kva09%7m37mtuN^bPgfk%e?;J=H(F` zVvy83Ti?QqD}VnIG>>QRF^V-GUm!%fY@3bkYM>BQ<)K%n~IFVBi*jAXsc0$!9mcwPr&eMVu2;PuRTY% z0jkZD4@r$Z17%&m3=<+f$c|g>f z$(!UOUT~WT#(|e5)PDYO>2UdMze#o#HsuC0kwpM6LAx(z^Z)_l> zN9QG-hy(!LO7y%4LI{Ri2MXMhFN7GN45afEHD|>J01H$i4179fDy8MQvi8#d+Zfgb zpa%di90BqAz$A0mj{u$!0DK6+0R~fMX?TMTm;@0Efk5NNq{J_)js>(Vx29$5T)->_ zyy(ct2H(BEZ&jgH^J;s0yQHL~dmmD6RHKDI+wV!Su;bEJLq&I4I@;VeZ0<1h)f6 zaCcugmdT4_w!w-ZK0bcDT$3Fbu6T{<$)E3dT8%a&va+%O@vM@zc4TBE>furg=PT~y zDaI`?qu3XrLk(@VhFfZ~r~L5Y!_VYosII1xQo!H8vKjo&h?bU?qgOxyj2fF&U=e^< z@!XdjrNbebnWq+TTPfK8^SX_N#YYvDcpxmJrKU#NwJU)^(RjYrl=#Cprtn7h^2O(hLWO(tpDh8n;m|;6 zXlTkhIzP(Fn4qn|eVhY0M?NbkTrqYT^&zt8wc;o1xsg1bhKqJvaJhYRg`lKMqpdY} ze#I#R&>@`u1fYeBiwk89jj;RUHms|wD?YDF#+fR;9|&lK9>08nN**uQ=-LZoH-V-u zPT+zUP4Uu9Pw5B0jne@L8Ej3th;CxlAZmMmrKoKg-^WXWC~#KefkRt96s zDJmIDv`KbHmN3ay3PmMZ8$}u09NXCM^A30U+>~Jl#T%GkSe zx7iVt>BDR8zw>Wfhwg^nzWKF$6&S+#L2<}7b08@fJd6fGjp|jJ@BmGQLmFGLH=$;T zzTxn`MwYC@!0Nvo@b6)nkuabNUYtc*W%NtCy0CZVLE-P&NV-zMswhcXzY3+W!ZnN# zMiYB<9omMW?Qw*s$u8tXJE9-I%H(oz&XUcLLTDc@{tsjqbmnC-0vv9by>942RC0XA zEps@?Lo7loN!o4`TDSpP!;kZ<7(1CJ`1jE)>4AySeXh*u4x-I z_9K{Q*YemA<4eJPa{lo3Sv5rPWHtvu_wyyj6&nz(A^|d0C}iJFFrU0QaDbEk9(JV1 z-#B9;^-V+{!PUo?ONlcF_4Yy{QTY3>h$!12B9o|P*75w%Gc?Z@xM$K0h6GwG8&cxo z4lHgChpbEalFaEH4=y9jEdT{@|CEEvKmJoIbKhCU^AB#VX>q)Iiv?lsM-Xue z|D&U$-6RIKv_`i#$N7ryI|ApwKP>wkv30F#Rpx-=Q8fJEe@Vs2ubL4BQydSD%)$JQ zfsx|Yh8NdlpuN>@qdS8zfuWep82huJg&8Rs8IFK}fEEHLCudepP90#mi|g!oUwme! zbZ5o#(t;96P(f)WMr^G*|Q?T)vpZ=#_6sC0s=ZsM*0Ecv1?3W(ajkO zNi0W?9!1xD`Qp+!fA8LQRSk_-AJjB7h>0+x!q#p`@f_=;^7Pot>RRM~N=b zHcVPg&GN*PtSs(@nbCyYE^3J01|FXM_wvwlU*o&T<`5jL+tk$L-4`^QQC%$#1@-FJ ze1-TTXyaH5)#oO_QLJ$)?1$dP&*3Zd2Vdldj6&c}IAq7TG4~HS(B`b>_jBz9XWvSj zB<*2SJduN8i8pULL7_KnB+NiOXvz#F+Q=NbS_iW;CTe-_di7Ob%h3g9GP62I;*)N> zK_&oR_MZl4U0(gH>Vad&;2l`J(k>ee^gj6~vHvO`z|zvgE+Q-|f&jK38m9H*dW3X_ zsT38qf8x+r-Cg&Nsg zwpYN9YbnnyEX09I=7I#8UqHWgoqxIsJ}skMyyA7Px9lRI+g+@Lo;V1Jzg2$mg9q!| zADTAKjCO%X$)xcp*_QEvrb^Nx?6eP_bM4ecJpI5l{p$7W{3+mSek^gn2R@Ct$=bfqNYWQ-lU%ka$U{E)c8LASLl zDtnzb{B|d+$mJzJo}OD84YGj$@&c!FrezxgNMQ_=>j_y|qCuI!S*wtV{(~yM-+o~% zFO0{0_;7+TNb!nxtc{3*hKM{+w`uzQ$$HWc0(HGrL7FWd>j0mhn)hK$V*= zFRGobU#BbW-J|2+dmmP^E;8))Y1Pr^mU``0oTTC$;opCm%kapN7$(FR>Y0pzdK<4Q zp_}fDQ|+ep)@M>>!Lq7*;@L&wL+*RD*ta_!3uD|3CKq?4Vh0kd+r=gO-&6MU$kWRyr@^i zkD!wO+KrR;xD$^pp+KL0iciZvH14=i2Qg>P@9 z%q%RnFEmadLcUCpJ5+F@+Afe$Pn#P`pS4=)595t~wVP`DWDIJc`37?<+gDm?&wO-l z@pLP;1`&_WwPEd;ivNTIqw`GqzEX%(*=*dZZlbEL9^FjQhyfOR+tx%N1e*eDFMsaPck)Ed~@l@~L$g|M)lZUDt;^Wj6 z-4@*ikIs4cozpj{ZInPDXa+Bqc@U2s+uYXPj!L4=uPr_RR7-)C5{l6@TosD})kBMt zOABpwJ6<{X%PJ|R=^ebLI@5x_TB$xgPsgicYQnf0zv|5RRWbZS#LUgjF)U%n(dPh1 z*C>>j$(H@Qca0a8(&L7RCYQlJG7=RPg=inL=FfY0#2VfR%zpH!=FY*eJ>V2o0<`(V z=AxD4oGLlYxWq3YklI1>Btqg9_RMAPzS4b7>Z+<)%el`P7n_=!!|n5Dt@_gFK2x`^ z*{3@{Ev0kxclULbcO4l`rVkJAfx4)+7hcLTnFH~RXB@o!ur0Oirh1zsD`!=^J2qnA zeoehGV1g#W52apgnATaZov+b5z8kBY?Ikh+HtxLt6+aNC(XgcBN9}F#j(-VprMqM( zTY3C=0=O^x;KOhEhk~3O7ri6D@d_soie!%UJavAbZHdN9lDz%2{LY>~?`MAmJf2~{ zMy}YY@z4IL*RN;Jmkz(6CX~)Ed>Ui&`BK+^pC%GnJQ%xGV{@zSf|7BZC@1VT;L_)! zSG#I~l{r@*-1hY8c;q&CRWjD6g)9dE3pY;doT%vaucmH}ii$!mBScjTAOozdx-oQU zdh?Haw~vjhpjd*S!k?;lwJU1JOV6~5FD}ngBl(_}w|J-<%+4 zEAD5(Rc)iWxw)FY-}rzr-#`UMX!bqQN9`+Lf&gSoBQ88V9RB>xLp62kqgX~(mi!%h z!g*6u(?j<5X(RK;(zK&KeL5v5Bs90ma)BUBXvmbA{~^g5T&<2n`=092emLZ2Z zjtqfR#x3fi5lmMI*~MqhoRcE?81{Y0wMo6p1x;QRj-_hJLdRGLDf17d_*~Vz@P+I% z)qh>~NXpP3YbZZS9Iujt^yD2+JUXR8LouOUs*#aVxc}00M>yZ-DwcZ9&dn79H+u_& zPM;Q>$}RF__^;_zDNAhrBV~2>Q2Z9^@^n7ZoB+0_x^srmjoUOa zF_FB_XOJAPJO83Qz1dA5WO*{Z9#kmU;!$N^t{-ucSO?HV8`4`ThjW1(C^>nrY-y&; z$iN`XX92dN4!mtCurlkAX09O6p9klTDVSA5Uhq(fDMh_m1`C1}*T25~7c?V;2rhrD zB9{74Iz6`VYEOhF?Qy&!qT)mXi=;d&JNv@<+38g+ISH|PNw?QYQW3loyCBlCV=J5Z z7;$)XNiYW*>mpu7;T5ETI#)BM6pu%zRB)xZQrrsr_tPDMpCh|55@ zr5pGOZLo};EM%SBgiL~5`^U9y@%1RDzY&4W%U6=p zP|FP-KdOR{_ySm^V>MUD{;x#G?l6(2?#TdOj5yr-iPs zwwR>r>Rc(2lisiy8PZX}3EXbQKcb9oet!Npxi%WzzU@cR0Z?%)G$>x2n{-qag<{qs zHJ|u#oRM(CM&4e%vP=ZQLHWlm@IoAzOqmrGqS$W~3Qz7$YI>Bsp9+}X*U?GHrP;AK zuu2=t^S~tmYvx5$6FYWRHTfsY@s+MZ=dE09lH^REb%yW2WaQ*)=fvIDl=9{f)ZZ`3 z&!6`LU8$$~WDO}qxL+ov;6Q+9*P!Or?JpU(%Dxzax0t92bSDSe@O+zYxC^p7(!c1%zPW5c3x zp%_-reEo9ra|Lw9t;T^sf14+TuBN%*sHacdQr|)p( EuNc`VZvX%Q diff --git a/docs_zh_CN/imgs/data_pipeline.png b/docs_zh_CN/imgs/data_pipeline.png deleted file mode 100644 index c5217b17aa745a654bc44898ea342ca9c85dd2b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 117332 zcmeFZ1yq!M+b>Et0@8wjA|NG$w3LW~fJ1jT(kY;Hi?q@WN)PSO(gM=bDG1V`gmj$$ zfIjg)@B8iVJNvA?)>&)2)-3M9ng4xt{H}{TSYB2VALkkl5)u-=)Z>SWNJzN-NJuFC zSSa8(P}faa@B`UVQSt#&Q7`ohc!Opp1{Fg>DviKBF+d0JvF#pfI3gikX+gY@+ii1= zk&w=_q#lYXyXvl=dWD*{Kc5NSqjffNF)JNIE^9h5fI!Mn7$q13&E}>aA`i@aSGAyt zYcsA9x=4}m{KxiIwxWI`p_3NID?=8>6I~VdDd$qntIQA;pdWg98FIRrazo+t3WJc*( z@AF;(TrUJ?fBu7f*M7b~O=Ek?9<>J%`j7WIay0xrAoxPOC_dvMKyeRmt(V4qRoK9DCt=jsA`&~(LGpkT zig-nvSz`nrpw76Rh3O-4TB#q8jI1Shpm(GUc~d@if}tvgX@6^x_@xOU1jEU>F&fEPki2_ z#`7HaI>uFzQWzMH;2Gdx5p$X3mR&~+Nw|x~+CFX8+i(pJHxU16&iM}$nrFm2b6F|+ z>5%U5)7dOORylNUJj&|`S(*;w5F9>?j~5eTBSq^)=rLxycZt(5~sKwIAjCp!(yx83iJDuL;Fams{x_?uFQH5;9>% zveKj2U>&xyX_ekc1B6-TUTwkCENyn*=y9pv<>xyyl-LLu{Zb?ZhX9d`6EX&BlpCsy zqxgR8)-3g%t~oG;kWiSm*6&PZbwZKz3=e{^ z<)&rb1t+9P4sY3K10&_S>q#lLgCU&rfh?Te>7=*2Y3uwUoNIds<($AN0I4$NK&)Xn7bf4>9mDbgA+kIjC(rq=Qje zUqG-ZOnd6jPSsev*fIQr$NBt&4Mt%1RVuvoIpK59&}df;YlC?eJvD!^oCsi@_u~0M?o?v zAzQ4UO0OoK-iu_e2l3AelHsnt22GG8)k9XKZxULK&~!(2Nz5fWEZEP+_xdL^E0vizvumu!2gxs|0#R1x_{hS=)$4B zFzUH8GRu15e)uRhCF_O2L$GQh3@{2EMlLRRfB)v67vM9h_e<2Ts0vteqD{ABaJyi8 z9k!e&WuHvge!2E8+WgWj{U#gEw)xJF&%b&plm9#nPSdVin3<+E?(N+?T#WGC@@rVV zcc3JCTCZ1sIG-lE1M@tN5Z#(g)UD#REWC&dz9=@vD3kkY=h>U>=W}{TtTs#G3pSTV z!&Y)Ii?cSvV#Xereyn~T_(KWd2nfyC);+lH&{m1je84d2PHoDbN}oXSU5+g;MT zZ@%I4Ny4gZVZvQ9dLPgYqR#0Vy9;Kj=S;J`<|~ymO+2yv(mSDR#DiKOs1Z6fSmG|W zTiPB3iDAHr&lhFxOrj~79Z@U0wKc4v??o%Uqwb~EJ&P0-VW5swz4$h6sfe74J>2#M zh=S^K;bl=lZCmeOJ}^UsR&cjawyH5&80ixIY#_ZV{33?MHudDO&E%j~ne#3CCTV%C zsEu&zM6jk@6^NQNjA;PD&56ct&?)yVcgk*ZwMiwW?JHXq#(2MJ0-}dWdqycXA1?GX zlN5&cgxBy*@CPY-w5cbizqzx-O#M`!6{6ue_PiYTQVwxO+G5>ZAgusR>u5nlM`T0x z-FGCttXJe?=2_--m0vEUXH(u1nV)GvaZ6+#h{2CPCUv+J2;tg}GO5~ec4enV#w>$P zW|$Cnc+Mk|dyXK+1CuZ__R;k{eluOEfjOmb5mVmFgD8jMX2$YFxC!yPW*3!t&5-#v z=_(gWh~O3K3&lv0RHx`u2glm`NPS;qhgW`jhxX#ttc)~xZzZ`*acyapT6n$u0vjw; zJG)2jZn^eT##RS$mQw>C6=l3tE2CCXPZP>$v`X(ia&^_La@ciO#^cyK;e&xGe6=N= zdP}}bx}F6`#R=)h3-ONAnpM6}dsw_uaPfNMZ9Gr%(mc~vZ|1HudcRnasEtOf}~;Ex-9HaPI2GyrnxI zeoRab2buatHK9;Fe>(Lsik?h;0d~l>pYQZSV7K9<_$ZyqN)iXNYpKBE|5vMe&EPv^7GT`XiD< zd^U|e+}4DatMhsyKRh?y+InIXu5vZ(t4Epv&j3}8QM@-5XQkk#0rEE+Evxa5T3me7 z7%q6nd}84|PA$6p>rQJi4ZY|SZ}-YGi{?EvJMVpZ33axMscQUqA%gpE#_}=s3u}$# z6O~T(swS7F_Uw);sODQM?$m2|6wxnx3bc{7)`6f>4L)ws=Cm{4cV2j&LqpJJOE%J? z__-_UyzktT$JPs(@06{CDe0yTxqoO;S$8KwUGF8^;>!Hg-^d;dsRU$!GQ=@VmpQs& zgZ$vspvmwbGCw>%|Nge-_RqM^%}IUcr+X6(>TGr{bTHQwH3ga zbOxWPO zYT_ZEnv~&&tOi#l?40pR@)K=349fSOUMq_=grb=Z&Wx#S6Wm#_#D@5yp8kQ9y+S!q z(Db5vHYE1GI(q)1ymbvZ1t%xZim*_-R*q0cS-T|1!SuPy4PM0zVT3?@W;m4cciHtY zWgAAuihMu$(=HtX`<@B2Jb!XkvR8XB+34DZENNV~88(z<#=_QI&3~(;@q1hpQ z!{Y^0obyDVq`-&d_JI>Oi)5Ly^siVXh#9wSiOTX~*W^jF+tZM1`wd@Z8aa46R2lVV zx?@F=kJ}+&rd$QM=)vjT+UV=8ZTD0dM($9S#?qJFKmMq@k<`XwmSTCr8o*9|phE*2 znc|*ydP;36B%j-5WNp*g!D7~vR;`xR01M0dTK`k6x#mmA7U|MRkgVVXg3+^6A)jQO z&JJe&w=&yC8VKccsKdFI^zeh824^35{ZZY`D9tUJGIK?KLWM`}4uS{?Hb z&+XJJj^)l5YASl-BDx0OXI@DpfrWiM30xV}&BEVYpfQ3s6-&Qo6F9Y!oxYRgRt3{S zdQZEv2c!>TFW$L-(uDBa2(A91-up$V6A$z1Dl}8V-Z#6R@|7_dCYqI@8+H!#JmdW` zUCUK|=ez@B)J7Kb#)op`71qzOg?+#hQgD5XfsKQ_@#t<+l*3ZL9f2qy!%Fry|QvdI^(~=U|6|e&LPRA$W z8VkjaKDbp11^BgFTqGu!@%-XO_##F%hYHF4QY|iz`Px58516-Z6wH%ws^z>2%8Lc>}F1{^3@Eh??n^h8PHsJIc$CdQDV7 z`gU^i{nMHD1-WrUb5(&gZDKuU*oQ{iZ%rtUgHCkTW?S#aq7DD9Ne;d$+iCx0#cthW zIijlai7}VBnKOpN(sj4KhOHsTxInn0Xx%NG)!t3FIm3QSp2N;hyayI`1*9SAO7V`{ z`m6RKW9o&zH8s)0-Mv}!Z)dYx)!BOIgrKYzoS(xz7jlOa8>&A|%qpJnqP_iwF{mdy z%4ir5E%@Mw0pq;i7`EOqwNrEIk+4oV83*0!?8`a`-U=VcLVKe$cw}oV(>mp;9k1_` z%giprH(KG!N$$dw)dYQ`cT`gA>mu@FL+@m^SQ=9YKd~W8kX?x**6>W@^K8Kvw*HW+ zw0qTEk#j-4TK>TS^e-)?c)f^w$F(b5hhs9U7|D5RYbo;NVf5wc(q@PEO4K{+A2nFy zwJuP~9NdWrO%l|4|GoGOYV9@Hy>N|9oNrtpJ7YbmuWm6d6k<6O+}qWv>c+@`Rf}EE z-kzu($NG|&o!lT>y`RSn733^6HreG9E|JH|rTkvuW}IH%H&#Q1(AkF~8ruAzYG>l$W(mN4BQyUc90l!zk(&SYz)43G3}nhlevR4}?GvLkBVJF@&> zV&mjXtH~QAC3(-nT#%4b<$fK`bkCq|AakRxSJX*{mK<$bF+&f zA0=}jmC|_b9J<0aFXkxoRZ)D#w65BISfUDw6D67PS=r{kewZKZZcMaB?&4u?8$S~j zWu05dLcy?K8Vef^BQ->uG8O~rfpc?6>&?gG&RIn<`R9?#dtFf02VT;H(5jz$`t~x~ zomIH4tyx-K+~Y&nXXY>}DaT6`=GuU=@j@W&=I=g{Bc#k7uByt;@Rx%Tiza!*8TVkrd_j+ncl>LEQoEPf`|kkKmJ zXFSV{wf9lB?9*5&Xy=AyUy5;QUS+D<)UAC~s`=>HayA|Ymuu_+K$iPzwG2W*Cqn#I z%8t6%+}GYsz?ZehDdKYSqI{~kM140EC#V+hy&edh$oEyS&^W~HUXS;npJl|wz^%#) ztDvm5!r7~?;W|Z>Q#KEX=c{j|`VY+sSCxN4VVZ}w2M5bk##RhFn-gMGMx^Dq+3iyk6t`YBW+9bLwSSGMX0J-A$p*RT#rqQ6z;O@e^k# zyUPkxWP zc_&2~31`21$;oHn19UKNQkXV(Hae_7crzj)cYzLOp z*>IoNKS{KQR?-`jmCc@2I|c$3M8+g0xg=i6(|$poxnm|=Sr%q#Ild!C)e) zm3K&$w0mM@qZN`Q(Fi5mJ(XtG=pXuG$hx=}$_aP-(RVr{w>GgH=V=1nAFTDWZArqGOPS>P|9PezJId^$>kapD>n^U1$`X zZT7(IcK1pl14N+haG^L85xRmm{Cwx$G_t=AD zp3u3+<(T(daB;`H+J6K!5=kd@2waHD(epSAyDm6raz66yTwCnDIRloGxOGvTU?X#{ zzS{ctg7bYPJ>?*$W5=bw&G&A}H_7_EO9>MhLl{5tTPC6J)0ib=>= z$DACA2Rri0%`;(8A|spT#-YB~ZKVX-&)ueqj{EF2^B6+#u zCJOahjoUF9v090vz5_p_tRPH^O4oH`V{<*@dp-5;kxYc1(0+?(Og+6143vK2Tlq0XFj zwAH0NR+xs!pYbHzqm*=Y6m%;B0Zg+&$a zT0lXt^Zarvt+YZt95i>M?x)AaXdkZJS!ETYVT0tr7}3L$Z(Az0Vx%SA1*QCs*`^95 zhvmm9?r*AHHCJtF>aNOZL5Y*qkNwA9FIUWs3M3?Jy%%b~Tb%r?XbZg{gV%P`e>6u2 zgFaToObX+J&#DXGS!0T=z3z0t@(C*6^sem}`-sl|g}ffd>qJePXO3QHakgg%W}e?U zdlqfj?1imu2x=nH~jr!p|Cg&XIwem7hnxQo4Z6(Nm?BjwB>k=$K<~J;YHDFijRB~bSa$fkS z>T8{S;GgyAIQaD&J)rK!J@HIXS(xH4No=r{f*GTHjp$YqbnN*?Jt(hwxTZZz>+lji zgZEWh#_fVRf&R*y z^vO>39oM30U{w#tK-Y-FY~ zaAK=cv;u@8ib+Ovht#wY9YO2R`&nv49?M?L4uv_4oFurZ-21@4q~_SAz@SSNQg4N* zBVFs__j^3nj&7UoygpG!*z#LnYaF=331_K3{SuptGO6PR{&~O2+5;!UrlQ7n4hrKp z#Z#e_vm7>)7dZ?=O(j&+q|3&5M(;FfnB0r30iO(@OSfrIY|0RQuvdQnw8mVS6x*1d zCA?{hI6405y%{w@QfQqwF_lY^g+|SA1ypm+6rC?M7(cvRYAd?z?riP(RN6V{3RU=+ zSk0~dsJf#!hir5j62Dybu28cFq@!CR(CKsrNHylygAk_A0aow#s_uMgRe$B!i33x8 zExh}}cwUmli4ch8PCpH$a+YnipxrT)fca|blLJqhob;A1+JQkSC|BHMY{B!X-#vif zw_JR%cwhZ<(a-bBk%GA)Xu*k&o<#s@TA}aP>#!D5GB04oz zzREW}U_B!jecscEP!1E!(utLUnMdzrB;ZpH4@YjcXfnx>m@8apMGZ#_`081Sera{^ zr??8+QW`BqG(krt34n5*b$?(<(M2hCR(dx#($@4~qP+ut9);qvwuDsK9*I}2{hGE5 z(J%HUd%+ooB(MWb>}nTgJRiZc9icx86KTAvC)4~7OlD*`8g^MNJ!|3x8+WhsD>#*{ zM7Xb$Rq{n{c&?Tw<ztqEa78EoX4J<3g zUDQ?zPb@8IpWuS=1PK8$-#F)!#<2KFx0;S_LQ)VI}*Rt4k?hHS?u#U^^kex&qVC0MB{QV&v_AN z%!YgS0x!2#X=_QjnD;0nH*F4pi`!=*~!$*;#N z8|0*(t2r|{R~==w1QkYl>BM@?8O5xPOGwjV7qgy38M5!BSQ>O0$MNeNiAs7mG=CpQ z7;Kq(=M_WvU{?H&#gY#_!{ECGMoQ@bliyvuvHhf{f*!U625z?$e`H0utEQDyFs0p} zUHo7OZlNB%tDs@^s%r_El&D$mci_dGy zTBp7+rTUgm#h9++U_1dBiuIy12T_XccX1ZcEt(+SV#wK#Y`*dWAoY-fpQUMU+Ub0> z{>=jr8h*}*?tX|4E_22Rc4Up-(d1h3lsIIZv~ds7&i ze1}-=0KhsIAT*(js7h8aGw#G{?egUPz+Z4mfNxw?Fz5u;+n0h{165;0;Iuzi+;8ZMQk6;WmS4{uqw#s z*9W72lk!_Kh>^x+#P{>2VsrbAitmsY3bp_K>_5p)Vm;~9`(yfu+_Yzy?2$NdIWVCx z{WZvdbZ4ua;d~TtggO)ce>s!9^p8Jfd?`LKqM&WDLJVO4{l%r`N~u9Q{bL$6haHH1 z@f~>~&`)C`^QrIs)(}EI|3PPehy#3vl}?=v^AhhjiWmFqaQ7m5{zX+6T0jg{I1ys` zPZt-OBI|1U|DPxi2j8?kN)pM_QHD8WzE=Ki;InG}$fF+`%g)D$c16j+8sJj3KGFTEotXzO!^d;;R zWBBuA3w05PYCHw z^jO&n0YgZ{g2ClFM*77%hCQo%2}eUIF&;`_LK)Z*G6l;AJBz)fbE=&4jE>U(_0fRp zy~4)F{Qp>-$ku<}fp0ouJd!&_QdIVAt4IGOSa$hjrTCl$37L;_fb3$~Wd;WivFu_m z9s@iZhh%Y(L;DH_atlnuhe_%8dIwjL@By8AcU27Y80+`?uE89LpYsec`OMVUYt~p9 z1@X`~&mWq;0edN+35W!H9zI}kq>R*U0^PvS^il#!@wS`Vhlo9jK$MpCF~ok)MACO< z;3!}h#s-TR+%Mr`k0!RzxT*f6?aD;!lW_!Mb2ePbf{9L$ASHMHwX4a_V60?sR*7Tl zE;AM#i1ZR@Q&|4)hP;J8BIW#pDY=y?@vs6lN?r#XPIcL$zx6S2MnJF%&n*mqL6zNj z0@hnT=9dARNq{$_AMXi%_XKk`{U;DBLP5u~VZP@TP}Z`Bqk(y$LTWAppzo`F6+RS* za@h%Cn(<=v2&InTsborbr{8$e>uSggT@3m6`#|_jEPV|}BVes24EJzM-g)t)0BjLe zJ4ilVfb-2fKfx?Mj(g7uldJt698#9)wsn=D!si8|iTaGphXd|)o&MXP4`N4(<$CVb zaw9Ay*W(nI=SL)_J+eR&0kkaiPY^WtgGDH?^5y#xM(K;#lZ(FrLY<^VW7vJL6b2*O z*1!czVcg0ZxL_%a$DgwN*%p*C7HmDiypDlK<9ZF$gH5eaisoIh_;_^jm|&o0-y7=S zAnyc3WCz=2sE9qZNP3_@f&CU4u!44rnm6=f1?|jlRL^ZErIY?E3)mwYAv9$NKlhB0XH3fq4JxALQ0$K>q#y_fPQ$|HnVR)7E!(>J@BIt-b)4=lv&H zd1!HuIlEyhceEggZ7`F9I*Su!_`((4Zvk3dWhH9fcMiCpJqN{}@)_!@Li;FT9U0iUiU5tL7w}EM)!+05X z8(K@Z=0X{|yr$O;$sdXVf4z-gkKeyL;_}dovmj_qG}sphm=K3zUs9U>{_H{cY!=Qu zl7{zNUq8SU1(BVS2U3Z&dH5?xlSuB>MmiH5;+C?KPD@<6W&S{o3UKl3TUP>6)|C?D zGNnrhbn_usBGLI-_<{LeL9wB;m3CD2$KDf*LBbG=N5WuMivPu8)=zQ=G>&Hw68*G+ zIuaFMkAGnU^|?~w+nY~H2=dV&Sj~UDN-%U+^=~(dWIbojM?3x-!9WY!+1!$k;`M2j zvGoc4|9s)s4K>Modr0pnDr4_8$&j`ommx`_gCbxYWUg<}KRLuAZlwG5y5^eamw|K~ zy_l7Ib=@zHB^1>dIUEPDR0gW2j-&x#?#qQ%6o z`f8#G!NMY=zI2aBzAOQjX0+pUX(T_oCx5&wXS2eiGpgJVx10Ph$;rs76tVo|6ZMIY zLm&qCrIk#GSLW|G;^SfY{zD68*L%ww>HavXTT{KM**iMBlT0zt(ApaMuIBxFr!MwN zWho?K#s9qnC9VYM(oexHM+0!l^KW^3GDAz4PanlY*s}7JAA%CJW+-0Yp+NyguYBJKl8(r@!e7K?|cOH+{fQ?}rc1!i%pR6rbVS4m*`DJAMF z9RY^bFBzu8d^f7?h@TiBcE9Ap<4wuzTJYr{SXn*ygHIntKn0RxjBj|HQh@( z$w%=hgvLN!-UK(<%qQc1JRD-wTte4Pp^WY42QdQDEkQ_E;tIM&K-1qu@DNTT=xnvW zInBz-ibaO;%i`iq8u4)@m(vDJ)1%KaB`L^2#@Y5qoJ%K#kbxhIfgPbKUbFvyBAIzU zo#TzBx;l_=Aj)5?_&w$AtcGQxKy#Z2J&w3Xv4EzCt)#t(81ZDx^Q~@rP^%v`d()}gT{e36zX1Jb?E?Lm{GoqK zgEoHyv3Snhs~DJnD(XF6z?1a$@avNw-1uypyn!T2Hh^X6`+%E0JmM#`Lq+w~zsL&R zjI)7KSb>>;GC$J%oD7htfy>2+hua(~76akO8Vn+3Q%?tm)qj`dp3faNwsciMINIbF zAON!4&=}~u2vk0w#^0otV^@6*WTJTWr;0Rimo)U%fk?Z7d1G;5s5H;gJ)u=CxuEF5 zvHbe_I;^meGYr@Q<5(2J3Hbl%1iZ6>RA6ZhSk%OE2u%}8ZW%VPR&Tm)B-1A)NT6zn z(MR*AY{VhM$hq_ZS9xkYS#w|L3df~205bhxp4e5u3NIZX8$=U8Xe=Of57<%((r-O+ zc~cUzc|$;ar9l2utpC>j{}M(x{nVuGTZr)Zzg;=n3cVdg2NsWxFsPgKzRcMah4)Q3^}yU6IJF ztSl{?>MUuLWpp&$KafUjoRS$U!A{$I1t5P#P^l~Fax}N|0ZCxSj4}hl|1X;74@$<{4YV|zdT2| zu-18_#_sEb7_@$lENtGahRb{KHvL)WU{(3lFUEA|r!Mh15i;~u%eMVeux!0bSwCPieob~aR-s;}5bIe$85 zMGH_m+TWF4P6pIN2iyg)@$af2L@>sh`hn&rKYdS(R=ll26fvx|aYZT6M zZz>yCU$NUjXzM^|$d|wXuvM%@151@no! zM}bo!XRiB-@vFkmm1vVB3bH7 zn<+Po_##=PhI1O?OGQOh>+)S!U0q#UTU$@BSaIp)gr)!FD8Oc zd{g20Z+8fmK z4~FDqaI5fsp5)JU%-2U99mcFY$R)7k-I6+7nr-rXedp0orAf8x`WWY->JA4xdmfdb z_IP1o;akZFiwck9y%a$g+4`7=h4She8o_F*Nl8g}{GNVm4JHmQP@;_*v#y-Z4vUr+ z*(;3@-fAWGY)7}wIZsc_*6l*N2k%sKKqKOKv84X-pXSyn`&B4pFKUNv{+DY37pwIq zWn&#P2ML;e7eaC-Lp>zpA-B_zqLw(cn)9BpPWa)_YP1$s7IsD2+07^Z0{9?^VB^l) zEp{i+ONLYP+ug5oF*lbwYdBm?`WQ6aV#2`YwZQr*eUs&o=Mfj}J*UTK1a+evta?(s z__|ToWK)Hh&)27GN3FRdLpO!rMU%t{)f7I|I9wi3t}F^uKFNQdV&4dzT3cN+buwUk zbTYAsahWe#H~_?}gvIDt2$UTcEBymK&ED8-6W+ifvD_dfOvl}8t^rI0>)}OyANMec*b;!x?Berks zS=nGBv-#jWdOe7vZtHdSdwDlQr#m^x-k?VDJQ92mp++t~XEo(L_OSGYvb>4QLF%XH zvGxRQ(T08Hblu_lZN_cM82;nvxmQ(Wv73_x>gCFOP0$6AfocPaf*vg4K2!e5J#>%k zUQi8~nY|+f9z_5=3Z*$Y5IBQ>iv3F-E!GW9?wp1zFcnGaIQoM%sSM{)qEXSSg-rbV zp<2U3+bB$(&c9kWR%E`jP2JI`Gm)dAH~Z{UFE(^$+)9pwBDm3~(E94kZ}QtR>?8|H z-J1cm>(9X%q8BISA?4M#bV-idD-#DETry#o`sn3Cg6yXy4MI~%h|ygmS=8or6y;!+ z95jPoYSj{yUseosmaqDOf{edVHeF;jC=r^ic_=w!aO2UCXk}$(#C1W&8H+QJs`Yv8 zO)4uY!fAx_$M-fSb1JuGGF5NGV6fJS@u?{%O5^-{_H(VqM>QK2lkcMxKeYpYV@h>x z{$7>75n4>t_8}J8=}opf+^jFvwJ+OE$jkn&87NF?Kr=UP6U$0d#3zJJ)K zsqS1|&5sTf7QJ0?9M+iiEFvSgrY<_-=q_{6$al-a>xJ!7-5G__pRJUNL(ngK4hWIm z{CBT4Ti2u0NgTW?O5F1)dj&@ob1r#;)9Zzjy+Prb>cT9!WrF;D&Cg?l#o~3+Q?tKx zgbMgHqMHI|)soNh{w*Jveh#9nfXvD#3pg%zMA2hjp(d0gp8j;sH{)J@P=2(ryL$|{3wc~AhAa9!ErG+*y~#h;_qbJUtj8x19> z%45)}(3C5oB6_dFoS33E4aNIq{x@eOn2@H=S3P;v35e_l!)2{(P~Hd#+KUNbBFv+I z=JShh4<4~HtSN}a_%%L{J%|t6VH3eB$BpMi-#9>9C5z`ITI?2!LCH_P>Oal|)7NN1 zP@zTPetXVb`IOhrR?PHUObjW$lajJtX`f|g?6Y3BsxJR)Vr#5AxJO#izQj+qYOH<) zRKVd3ldoGz;27gqKRfqYx+Vj+XWu0^V8JR6zVw5q1F-LXuNFz-oXf1YPtswlx9wK= zXWQa_7;_K5MW}LWt@&4?!$Y51uICveU+a43KKZQuE3R$byRXdcwAD&y0tW#+QZrur zK@2*ttQyXmOKsgH>W5Ec_F=*&Z@<@7CeY4z+I-kM`CROwS-x>R71XQau6dCktQLHT z?GL&Z%Ok4sLMruK=1OZBv7@TDd##r?hb5#LXa#p_gfBE4?I~wsosn`T* z8pep)!G7%cgy7VAaDLb%A%f=oDeu;u!}bPJxmR|mH~Hv#g|m+)ow~EnRtm(aV|Li! z5S?Dk)7?1AHJumT9NqmulGH7xGPlN<&@$-6pFk>14wFntJJ=mre z-W<9{D^+MWr^*8t${IxnV91346G0J;NY2h}8B*mwicaWj9I63~eFUAW4&41w79$KJ z7S1S|@6$5J(-*uio6vZyD)X=15vfAk!*bUTP9({S0((2o`O?nR;=a_wGMKvdy%dd9 zZ~0-&J-#sC`U8cqr&~rgCuQ3)#)Gq@d0)R(p81`W@0SZ%RHeJTbJ^3|en85H?8t#~*^0zPXqGmIZID-@B*dD?F@EQ`Hd#fTz>( zUpF)tH*&`)nF2G6YMz*ICMmS&I{RB8A#`tlXHA2MEKpjyX; zl(mMFwbwVDPoe~QCKpmFi!{!?X<={CPuZoLuytJ+!lG|_!n#K+`?@M_aRM21#6I4T zN)t$bPucOnD~T+iiA6f{CmA2z$lYR)PZqqnKb5a62kfvdctbFKD3#1tpW$@J@AwCt zWlAG}5y)|lRDAv5QHd{m81R$kS@EbM)z7|edr5tG! zU0TMs>vnIp-Q%)LV(3g6!n=sLKU-JwRxlQM-O`K*Sy#BQy{$an0MObPXEZ=V9@yMR zLsz--_=uBgNkbad;0g*?b|!N-Xf?~dydp$m;T6;FVf;`$Lhs?p@h zIjFEp9h8ON08FaD?vkT%y(OJv2l+DoMY%LWmEybh z);Ok)cSj@Ke<~S*)bIB=aiZ*KD1i=30@a55voN}(9^=B3H94){;hawr(6=Gc}s&6LcwFAILGL3dk4efI*mkOt!8#us4Gr z8IP4M3kk1!c)rsNsG@wMpcPSp1DAbb?hEhN^UoWqp!Nr9MEetUkFWGjN@4v@eL9iD z;Hb}qSCz+uVuQdPfVmJ8d_xn6oyTZJ5sN_uFi}>;LV(|enTWlH(siGS#o&OsnZX_b zt2+2EIs~kTLHjCrn1xx>nbBfcXe+e3x_YGn*Ws4E@Wi``R*gKBG-N_2yVTo%a}wW+ zX#y_dAp;zBdleIE^{1J*RfkP7wl%w8ZYm%=jtCQu9tj72(#5exJKsF6f-70p+9uz+b?4WK-( zL-=F>bBQkx0((OUE=@v3@8;OT!ou*dN{lyz7>+Y>6(HcBgc)RiO!8ixE_lpro}=ur z+<)WHYo^`-<{y1rH9+~p8j}^Tb2h0f0geLL%PnXhRg>qWeYZ7 zrAM{#|D~H(-bwZVoG9y`X+;$f*52w1!U+x%XU24}=VE+YL1V4rvLwDlr7S>~Y^J~% zyF{I4CKm747>ZCN+66~>)&jRLI0Bp$nuM!?`N<_WY6T_r^_ve_5?}0omP9nWQ6g!% zss=_z^Nl{JY6V_$Hf6!(W__wN4D{QdU3o4oSnL^J*eV5vM*@>HF)RQy3FrpAAPfTB zqit?`p{vYMo|3oJa7?wJv5#LUg3IHuY>8X2eYLoUij8@SuYDB4mm{lN@05STD7ybuCB_K9v z>W3@UJi>xY&W29!^qQ`u!X-J}_Y;2-bhFG#jJYmGp{AX`9BFVpeg;(Hb#e-#igQt& z<2qHEeazH$VJ#0%PA${aGV}Co_jl^c(6>U)f@X6cW}^Eu_hJ%qA+UJPr2Fxwek`N_ zhWw8Rjr?&W6^*S?nh=T&QsV9Z47f0B*SjAkS{8klA7XUA9O2|6bw?v+XN>lj{(+$84LaHsK)e6@h*3J)g`zd? zH}-T_)$NS%tkEKxNU~qmm@=uihCG<&bw31_b^mf`#@!Su0A_6#Vle>rL;epyl-p(_AcQFI2=J9~S3504MhJ+D$yQr3zabSH{MJ0~U*oy>mFGGH|M_9Y3b zrFzA*Pa2~l^ZnS@!*j~_2W=kR=cgqoi5BkRVn zruyzmeZ*5`5Raazrqw+HkDeg{I|ut;;`|K;5f56)l?6|U(e*kC=mI1DJoh5KzoIyQ zAb>DG<3vzbNg8^dN&+&?ejRd?h7pe@)dKsG_G zsovFm3H*r!zV9svOi=JNh(mPWVTJ(n=6`p98}p@2Y+iHk`;6DhKo(txcn9Zf+xlc& zTQdPwqU-G!`e$gzU}NTibxFqh+Kd-s$0EF{FOlu-kqSY z=elA2g(?8wQt!pd9D(kO^6ifB8-HPRoG00=RDuj-#@};fQ^^uvIEEUI~$Mk0&M5JctbAIdBg)}fokgnNPaFGt~ zi7eC7(vpmz8MZp!+ZarhzHH!^s}4H|tMP6&5;Mu7J@>`q`{Mf6IFj~bp2{^kNrtV= z(U-55q)N2NXYd}-Lac7PtybHa4g4*d2!i4k>J{zs`)V!SE%`CE$RWJ z#&LS^Nq7KbI@eg9{T-ipSEt%*s2dAUve0wIv+-J&;>p|)6-B3)@1mXcDi@YM3%~24 z!^g6C`oJYn(b?By!YYBPsUG|VyjUx0AigDHpJ%(b7eZo3U#scJC-kB0!|@P4-^rKu zg1-5Zt!{cn=dUA{0mMc+-`_hTMT`lzEEx<8>NwM(8v^g$e4$2czN{nf)?KK0#Ol`NIdH zZ(cU^C73??OaLIoAW$tN8Q!tAKN4on#us_*A6`S)7yB_rvZ8>6cpp z8s(tI*g`y`$o)MP3=EsU^lv;j`MxN3OPmw9JehXgY0bj1Xm!o=?E`ujRS3wgkPJGHJG|K$g7mf9B4_p?GF*DrV$^ral6L-3pfc&5AHnM-my zo)pGYr5I1ILlB_5p#;xYOCM?aZ0|Tq9>!*+3F=p203KYs#bFQ&FvHML@i)tfPDJl= zEO}Lz@p!1dha^6=2z(Q9wS58W%uprw`pt$j=KUavc!DO8?cD+~7%Es4LzIOJZ>RMc zX4_`mR__g@cuxY2{?t4B31C9E1Id}s(WUX(2d&6_IjMEFK$+i{M_8OK-+879{y>OTc;D_ zRqFl1dp<|GZ!A^Hcq6@buagZ(K09kQef(Fr+gy&}^M^Ad6O%{nJ(7VPCXpXTf{B@Y zg~S4>Z0i*6c0|lM+YfkyIZUe{?RDMCSxUrMSsYBjo0DH(T28J`Z3Wv?6`tOxiMaaA zI0W3*p`xm$mX%)IkIh|>%XKXKMRypjr~kT6I(ftQ3H!s`@2?!5p4t@W{>)WQ^E_H9 z7&dOB#@Ykzgjh6c;CQL59-9e3qFl;qv|!a=nOnS*;4=-@U>JHbSWDd3uET}N)vvzW z53ouro#8NJ5C#Md>TY*OvF+xjcW%xKfmt>qfRue|sJXIv;yHT;#Jv=+ zSk+Zaln|ps3cK5_QQ_1AnW?bR8k?FG#vDl|{L{|rd!}$9g#g6u*`A z{Sw!eG5 z85<>1k13n3!63qDoo_K`nmgialC!5noRV#^6|lnLG5zWu{o?%+#IrKQULvH0nd+Xm zJ#+=Ym2Z!grrB4ojHc;eRZ*g)v`lF;3|eeNi}E`{P{SSp)H0(zP(|k^Fukvt5J88} zZ+%%X!P*kXXnx>Wbl3w6AM(9)*P@ePc z#6_`ZTwChri6bEWF@`4xWF2&NzS2{@KDqyaQueK8%q)#Yv;Dm5N^ zE~hxs<&WQwt6mI6x-g5B`Ul-~X{xVwWO!-Y2(jD)4`%DsGYiY`rkef)m@j(u$%Wb4r=VZ_<08T#Rc^Y6ebI`r1#P_r&8sFD}5ukH%Qim05;VtA} zT%`)`opq#a_cd|Aq>j#NMcGyqZMOFYe0~+LyP) zzujf~;yE4MzKOGqMrq^z;~gU1^+;^Lwd<-Uda-PqL{oe=hg@%D<$%EUGCRLMy|*W? z;F$U`?``)E8^BJLwO9Rp$l-F&QMeenvJp0ln1%R3+yY5qtgOMBv7}b_5k2Bq#?$Rw zbt37=)Xm6JUb7-qFTR0?N?a%j?U`f<+{O9j$x)7S0kbsm`YbRpfb(e~un`K%pi=IF zgwx%LIe0MCFd@U#FF)Hnu;|3kBnN@^T~QG9SuLk?X|mCA=fbiLzd3Nz>IBuT(XZE9*kZblgc-TMooh(a1^=izZ13VT?u7thd zuR9eBhn#13Xzh7s+>i8hUL2SGav1zpDciu2fo$`DWxGQhkwAX6g2n6%9#K-UsuhPZ6-Vxm*^8&3%A~ z$q&(tVL>r(iM>$Tif{GHB1sG%2i=S#p=lq)2lQ)4(oX>Tfyb&2%~R3t z=Y={=vKycmAYhQd>#sB;EbO^{s=K7Xrf+~@1qfrh2_pW(!^4f|Wia^gOkW4(iScL?D~7e{-22Z7;WQ3hRxjAFw-2G^>W%2`f-n$F0L;X zSOZT1kLP3cO5qs+AVjvy1M_zsMS@q3Bi@g@E)qe9g4(gzKdRC2e~lX+BN44L^}Pfn zRT+ZC_+?&jaVvCL~k6FEbc$&p(66QRm#|(jLJ8kXE`M zFYL#nU;<`&W)>+qAK#v+B=>K+!SvxYl(a=8y_hXm9u-bGb^geak%Zv$ATg`U;c2_{~l2ES0_GZn^?imVzOYh}{h^x>$XcDX39>{hm{YxmebX zuw0|oU$Iiu=TU%4LCwSWbq||3 zw9o33&SkK)13A1LGZmI8X)4=>%x5nja`jzKdvxnp9xj5aEqWhN0Z44Twe+np&z!W` zAUymjM`>r|b76RO4NytLt!5eQfuvXMt(L_J95|K{La2#}M6h9}Q}Xvv;%R5gppIfu z54%u5mY~d$0K`m)4BcQ)?oD<)rwOsxWIQLX!4H7ZLd9Y>0EnIUj7GHtrtg73I4>lW z$bI{OhLw9kSuA+)-csnJ&wiRrHWHbdmO;6(8wDIMqvuv0JyA&1$NbLG{4ky#Z2Lju zuVFr85{s>^m$bIBS{{@@x38ED#F4b#`uX$OXWwiv+ZH}rep{BjsZri+-18W(ipclI zYiP2tTgB1ls@Ci239b5J7izVcum`o2Rz+S8AUmO?Z4NEcwn*4b`(Y(| zjG28fKy@D}gDm(UsR&1gZ)>is(Vk>We}TXKc#dA0XUK3DRTG=Vt!_-QtH}KXb-*G} zW%HO;?dp$4x)KbODPt+~KmfI`ulC|0%Rq%%XoD(lBF!B)Y(d^`rl8e;q|l^DS{EJr z-4!>%6yMJi6#e|ZkoTu+$6FC6%}MdQ z4uMEU_qW@LgRRkX7;+7ya#ncXm$k{+4Tj|I@&p~z`-%8vuZeo-Cn)WV)zl<(r7+y8 zyvjkM)aCFxSKuWjE1EHYy&NM&u>hwCMqD_BoSgbGm+^TVmE|70`HM(a9jM0 z?r&Y;nn~H1hxyb*)GQvKpU19b`9vA5~4eMV61ca9wBV#cj-kdJKVN zKdH-gxZBBsF`#chOjy;M48$5OY)Y~~cs@iie5<^IK zk*Ms6z&%|ngP$4iG(qf-i%KpE6kIRMuR2CdCe1fLB~R&(L({181bS77_-}r~6tepq{q8bDnI`=IQe^sA}Yqj<*dy!V77tdsA@C`2GpmZBm2T#L*K1 zIQFj)Yte=L7P<-fYyz6DkZ4mcn(t;eo-^h&L?)4-)v*#pO|On82uWvb%dXbe3SYl1+F(E!vc?!^cBp*Zg7g#wh)9o zw=AfAXqzCN+L7!u_qF+}t!|H4-;VluI{EMfFGI__L0RjE+ zSAXH$(|IWn)Kj6ff}-}za5WQ-g3?M1={jysDwqm~$Onf2K&-SekL=2%y;sN6(2CU2f^LO#Bdk`+TJkoqxP`_i{caCfpDtI>d?%lQ0(CJfQCS;Mg)DIdZLejL3o`8U+>ml>t=De)Qqjl!3`?&Kd+&er5d&RbHiWVNmVA*APH` z>Nx#S!;sXj-`ZHpb_+BGPePk#tO8OgFVnDmS7~8g%TRKQFE$kg!_G(ShExtAx=l2k zoQ4ofptnPK5Hijpu;YPX|0f{uuQ?)G?G=GE=Ra}t32)1~ygP4iK2T^%!DW(-CtZu^ z9JFQPifh|B{|h|UNb#Z4r~hoXkaCLvXM>R&9)+5ZJimRSm? zx+QQC47T9R{MWcJBBS|J#`@PtboYwzLn(j1cgYe*rM91L7kweEStzXL_XQfh(5TDn zK9d|(z1+CUTr`C2=jy6uTwPvX{`6^TLC`MTh5m?Eg8vCauAdAv`h_8HNIeKh0^E3^ zqLc4AzTh93=3l?cSjjgZdUJG^PMyipNcldce*}AU_Rv)m#ZG{`@oR4UU%#jzAyrq2 z|K6_311_v$jwNFq3?aX(okxT)qa&*?-h7IU@Z+e!UBaIs?pVmJQJ*g;P zh^@cORqx>)_7rg4VIyG7V~!ZZYF{1AE)#Pe5)^D5Bf|WOIH`op?8>!lZZp!UzVUh1o)A@d!Ck zZ55u&<;i2s8@nXH65i)a7ssl-pn=f(q4@dp zYhNd1+8LC^T_lA`2AZU_59Gl!1&kI*6&cQj{ozOK)15Op&4sddUrCf*CqPiRkzGA8Ap=XBMb?V`#q(E(<6^bMZsv*Ia~ zB+Q2pZsD!VC|s006R4r(ookxeG^-^hbeQV}tYw>9agxFRx_T>3*un*L!q!%=(;vQ!oZ_RuDL|9>sJ6F!K)F zJrgHmGO|ZUb9FmkE3Iqc+NoSVa71w_@Yi_?DC`eg{wpWuI{2uLAhHTE`N#Mrzdkep9lbD5eWAPeul}%tcs%f;pzmUfU|f6szD%SJ6(}%{%w1ovZ|5y?#Gz@LG}d#^a}sbpc-dPh?nD!KxXeeT}G zu_tuL;&MZ*7Nhr3Mb)3En4KQc(0jlV6$&30-CbVck6rGYo^)Qd)+Q`yUldBrJ;;Dw zbn3T~t_x$2``6EJf7mNv#YzeVlNWncK%6gJ@Y)m*5+B0<^{gleER@FYHg9O2Y(&3w z>=@kY;*SFxv*t*`8S{fEZWWIN3U3W!MfT+pG16kAcMnCLh>}Ipq9Zz&*X*g0@-(ae z$GndptzRym#-#5S(LzV_*g5>AZcA_C_~iX6CnGBt7o0{BT(lxz1z+?}m5ozs^8-oic!rG{^#R zN*IC*^Z+q*Z3!U20`}OuhGUjk_$b<`c`!ZsUDawJ&wlE7lqb1o;Oa-qnGve)C2ID@ z{aKM(S0Z-B^VAC0KDJ8)bxs(f8R#+{7gdDl zj#VX8_~D(QASd}sBK7?0+uCZW=243s%+c)yZhxIaH+MI=x5wV4Toe3JPJ|qv$MGt= zUcOlRw%0ZnQPr8SIAA9HJ?x$0W_`iH9nP`R?_VQobbP(Y^zA;iiYe57zuaC9jIsrd zF|&_f5uEC#nL&^EJ{6DarSJ1iF1vtd{q4rxMow3$;*d&yF0Gbf)u4iT3R~{0Ozc_W zw>p?mr)&P_+SHR?ej13C0`Xq|^lT)3oUjJRS(%vZ8}{m{d*wWn6{}oHfZ2GQYl-GFqT zBPjeMD7+cDhrAw5!oNVg+Y;)3;<{KN&`pz&ty^-WQsm`ML+hc_z_hH|+@4CMl!}P! zmZK15z+$rg$-2HI2v`?Hfqkuil2!GB&r!S9O1SmciU+A9*W(9uThv7@l^DbaA^!Pu zZmw|90UafjtnMBz1^`u>Oz8nUKAB1s`4{U@q(M&>i{g;h(bDnj+P%3XPO1Gyn)Vm= zAw6?Oq`G35B2tm}_~f*B-&xnX*k*ihcm1td3oE|SOMN*U`nMiuIA%GSs+P!@C7C^{ z4_duHihB3)KJFN2{C6mJoxbWi)Lwfdfta8P)PDVGNAK6H;{B@qwA0Hi|J{U?z9PTL z(-eq9wcpG-?Ft{}H7jGG!W}-P7v>*NUcU40$dhYs#YmkI2WFqH$-+_cgXjj7hg3Xs zNP$-1Fu>V2-lI;Nx~-%V=DRDG?)YQLy1W&rn7}VoVv%v2;=)VwM{>eEv`HW*?4`5N zcRT?CUJxt@wyrD!<5`%3o-_a*`owq7eUjADdF#8PcC4qQ#%O12%UcfHTD&XwAqn3bVUu7BxXhs zv;N&-ZL&>4UYm~;Qa|z%`Cj?AslsXM@J#U1OznpAUbX+0zwf|DSAvy|)2DX@Fof(N z8z$a61KB%CVk-bB%!Rv4%bEs!)FZgW5MChOeLLPKWz-=CJ7EakFMw0tP~99-!G%_Z z1{QGA5QocsNG&Yl$@Ep&;B|*#gQpU9lm;Ayr!gRIizoW^LJyi|CmPfRYM`440G26f zbv4@c$vq+n3JW9f_aj|34fDHG^8ESGF(08Q=pQPTi5i}j`cwRaA|73O&5Qt*Z*;{5 z303C}ul4FH6h9_u)%kcQnsjxy#HrOBh9B}BbV3ZC1q|Uh`UqpY2j-`%@E5y!)8i)D(+^L-Z4FG(q&gJPQOTA`61N48dusaqj8cw{HPD%2ht4 zw*cGAZ-ZR~1u2hw;ofg zwq|g>yL-mSzh~wlu?qtVNa3Ey6jWvgGKoa@OyL)aY*s4&Dan4>S7FxscoOy-tulRz zNy~!aC){2k9|^lxS-6Cs;U8a@Cfr0`Q9kmDAkH*WA^(P;5F=FnE#@+)lily~WRtz! z+^izW$^P7>aPs9$qP;~Kt(4z&d;0~@FD1GIld{rxiPoIz0002;lv@V}LmbG~K*<>GI8&7H`M;rw1pX z(`9UFq(-Id+#H4wXE86=QH{#W=*(9XBf2*2i(he}8Xy(mox6Fgq+|DAa!`w%dXw>f zFJ-rQJNBVM&1TM7v+Zmf{AAlFb)pu)=2sHT;b|6hg1dPYng5A}7y?!KCBDweBk#w5 zTwxTVlRbcPk@$QP)qfM?xva{RiI##UukZv(khi##XcbxKMXlG-4) z^2lpAmBh$x#){H~Az#X)_HL-J?{$@RVSg4_}b6!IlO>58i0fH*#L+c>IKH1C)wYU<50lV zS=<>~w-va1{jjgSmdE^lrTwJuWPkPg?q@Un_hf5G*4aQ!N@#^EsLZx#E?}eu{yb>&4#a_> zX|2nT2Yy>i!6w+gGwsNb2;+|i20z{&e!qvf-@m0}rd(yGz#z#AQPMrRG03^7tCWm* zoX_VyxrH*)TW&w;rBM>;e3aW?u{J8RO8Vjd^_%_w2NEH!F~1{|{~qi4D@^$BUw}l! zYcy`M^7Zx6w|k(Uu7TXORJFjhqdW+I0XL$)z8>VWklfN8oSn_8t|--#rB_-(eMX)8 zu9Mi->IQ%;#mJw?TJ$r8kVXbiJ6f~vb1^ujM* zyr`=3-v05eCrg8ygQEd<>s$(?{f)lWk%Z`fjS~Hy!{*#+)E|j{qz!yr$M1Lj_Flo` zYd}5xLhA;mNJKN;)ZdyH4&5;k?j0UB$|`96zd>+-02QZo@yX=hj#^fyKj`!ss(B1; ziEXZb4Zn50)7%FRy?s@RNH;04g#N6f=3)gc?^5{n8}_%(5=UHLH0BxR{-&-0oVtQq z96lXrY@v0vKO*w94%1E89K44)dWmoJb?|frzF+%4ubnA_ihoDxw4(U3Z!)mq(LO&i z@h9r=1a)qk*0G`CC0T#7^}B~D;(AX`513uJaKY3REK4dc=-pdM?cY^8pSNce%>@k) zAVrza+{HrCxV)(klcny?&L7}1IhztY`9w5qCtay@A8;s{Ja=0CVF*Rrr220>MIGCf z$k^|o;bAPDF*Gv5TjtmGSQ-`C&^uCd4Ey%LCNE13wFRhCNpEhw?- zg2}*Lo2ip`ohg{_+hebCwvK;ziwJ>4Y_Vl{Mm$V-T7##{*l^0*G%Ttvrka(7h)-j7nFyZf$7&|~TINcn1=rCc3?1Q+t z>AfW98pA78TS^5sx}PU`N^IgvqSEx{mJ7CH9ilz~=w{Tge&+J+xIV$&nb5p$X>!-G z=+hdlQb0J4m0P?!j%{uwrU2KyIXpWip|LB`w|iAbpQfg6GrTzwrSNq@=& zFgA|o*a`Zx$UlC$65*723Wd$=_Y?IFI$f!l^|UI9-sG!(o_Fd5pbQn)_fG2zY0&yl zcl3;CAgckq4700KOj6Kp-HAT#!k5Q-nhwDzH|M_KNL(=WZ2ppWLOPtU^#Y#=IZUZYAs+M*UA%oZMu9!Pi_nd0*E&HA?|+c*wR?EH{pfp*u~OX^+a^k z#SAwDWMX4xOjgp=nNi}gE~j(Xpjp9`rJI@u5ZaZD@UW=&YPQWt zg|pe`uaMKgXeYWY(G~#8=AQGh?_0S;3n>BY@}rLV?inuUbqS-&KdI8ZFcqhhET(+! z<|a*D^V_1i@5h(>k|lg5$MVbj%~;itJc#J#q~m+`b+wx$TT;Fgr)BRy#^+%zyS}!g zOz6uRHFCOP4aZyYxWAtEoEJ;u*#E+_fyZja!BlBn=+) z-VDD78c?pTaA%5*PCQI3`My;F)!rG*(LvnGZyHu!66Zc5fGwgvesf9dINy(ME0;v_ zAXAcv>nSLyIjFQJ#NS#0jjwY&2Bf0#XY!6>bprYEqbiGGEGWoD@-JfPJw?7h!hZaf zR+=N&4@T}e*KF8)%YJqCmhd^v;=3!eWWp_UC&vnNiR;pWbHU(=bCJ|;0zq$+V3luF9-1Q7BUq-V!fnW zmn;{bZ!YU7bLxz-^Pd2BxmRIJL>|F;KKv3b+P<98L>NN2$Dvq$-l>*S5`g2^J`{z7 zp~R}!Gur2xk%^W8v2Ab{(-9k-%%fw>`R zj%EDuIF>%%U2T*WQ9P`PwmW9BI^~BFWEib?u0!X={B(J{fGwT9!tR<}$@VEO(B1jv zfjM@Gj7O}qGuw%=eQ|HMdbwE8xTmpIP{#vv?6<01+FRr{2F2Pi?(gl(+BvmxAfiZ9A=g=4IgeEM?k{Agp=M`NWc z?FQE-ZjEDSynMva6`%f!U+(hc?dO4F`tMz_G8|FhYo#*J^0IwZsHUG9NX0lMRx8vZ z^dGQFdgBmt%O%D!FA9CX%<6i?NtzT`GZd<8`u^?3V0xo=S;{}{8sj;FAH`BZi zzpaoo(fVszylO!i;YrAJHQKlKGo6JNe&Q@uTz3cQaG$8mYdd_=jKf1sc6L7rB&;zU zCT&;_$bx942!@{@GpDSNOqDY3FsPER2@vtIdG%yo8urle_+)y4=r zVyR^L#5C(Ng0gFqx%~GLfK$V7L8@5_0enM;DQXGon?1v=fXr$lQ!tINFMpoarB&Co zmqSkYPph{x^oC!@ccMyac@c|5I!ZX5NAqNG8e8tX2oayS4Svbo8)=-fPz*JK>^{?p z;GatpF~oeyz){BjjRVw@kS=JRTx^nye|duq{)@!~QpRWS0Au}m=-fQoh-v4cp5cDy z?CFD_Zj>dHn=o8~oBTNRY5XKO2{h#|+);rHw`g~w0@g_i>e{($ zC?okVs<7Y4Ay?px@wC4$_DeQ?-muS8KEKcmH@qYx4}R4g-F*$S4vS$t0_clP@!YUE0r9mdt|ha5 z4U98UFd9LI1xiRBrLz#J3^H=97Su{=4v!>+ONz@6mf1hJf&GK z#LP{J`n^tV12NrQ{H}F&k)8+i&*`FuFB1FqtI)WV?rh?(OYV5oCdXm3Y>FS>!%_`w zD!XK1A*OTbXeH&ZU9*X5ZgZ@90kX#0NDvY&pqkCe;zB1RoVh7hEUu7To9)5bQ9282 z5XgFs9#NC3rl~4{JH~ITRm@{p2UIfTck-AT*+71jE|4{0%k1wtSk|9Qmw_RBhXr}7 zQg(d>KL`rj`=!vTdK*0S^|4NvG#u+p=AU>fD<^>(IfXr{=m-tCxzuCD@E`!vmD%u3 zFrP7&LN5A9K)BF-3Kz0-J$$)rA^FGk?A|7VknjcvUl1GsZ;*ABL8C(Ij9W4mqW`zS z+fmfM*hE7Ee7;!Ln0#fFKz)#OlQ?rEx)OW9W+4HAdneAGNiga=HI;D5-My5lRSki$ zyO_&_q!o$JDT|ThknJ`Q^aMuHTbqu6@972))|!Nw86LaADbsk=6I5zndRP~1!CQuBq-kJe&zE6(_r5uUtVvI833DNY%(@yc^qaaM%_VB0~$k(bpCswr{t=xS#ecwEV zSvw!4N4$6T!#(z$S%Or3Qqj0Noq=E{giDo) z6;^~2u_3R%*a%t`VNd~^coeUX8ce&Oyc}+8!~X}ib4$*eB2$FqX7)7YP=H(+-r}OF z0>iso;Mz8KUDWlpNnJq!XI2lGdtu4=s6lbNn_AZohCJwVDN7YqJsv-dKT9aL)_tpa zr9KE>_r}nf8^;fy@>1}HVXcA9*r;Le9f{ZAZWbT)qd+z3on-%VDwVm_tFZJ-(Zkeq z=?!J+&-fyT2Oot}XJRp(Ypp7G6kkCIO`9Vh;&wtIcv=XjR}GkiX-O(Ev=pW8N0YKH z48z7CM=9ykb%x;*ilo3$9!v5-Ht7`4-W*$h?;=a9)j>)^MZ}Ts zE)#?KB+8N*_GmKb1a86?Eww?J+CygyzAW*x)WL2Edjt>%RsCl1&K$=#_6+`G*5AG_ z#C_RIKmXXA#R3B9*Z@5QI(^|I?Upodzw>uBMtL|Zp}D?1&_@a7;V${A+)90lM1lC3 z{gaU-91z~?Wc{_Q=^Jw)b!*~Gu{lX2 zL_jTOO31}vfNWjN6J~Q5&4WXU>`(-Kl*q2A6xorteuCoh&B!Hctkm;+M6H?cfF>I} zSAR;CnVX%2cFPlr8un}Qf^=slwrLnkJleYX0UsII5UPGC2LQqSPI9eOOFyujU{np(Y!B7Y_j^4Z`TfD%eIOBFhWCxT%AA`x>)`dkX) z1x=2B*bS!i-47BcW{x?;mkHP0Vwa3gUYt^ccrN!_X~&5O9{0s}{UP0d_4y)I*^yit z@>>Z9(Vp4Q#pdmuYe*1+zJL#6bq^3c&;}VWM?r7B|MBt@;woY|B&|0R%XMQ@{9>{hIpXiTo3|&-8aOI$?o!)56gBT?4m#z#U;9*G-!JvlWuc67UuKoV#v_JPc?PGRhTKV7 z6$8c4lm)R4XrIzFG_C2uhv09_wd<}6&z~Ll6$x>B#e@Gn*17fvI9ar+i3R*dvHS0C z+ekBP!=;zvf{z%^734!!@gK0CT*Usbbih9Wz1L_uQlx6Zbu9R#pTX{cLfCtc0n<%g zlClV_G2lyF$%&>&y|PK#{fm65a1m|HJ&hB>< zGW4#sp|P>bH4kSR4EN2|IaRYQV`>-o?H{f?yY6DbdwmbZT<0%2I<{ktON+TP4}KB%Vh!la9*tlq&$dTb+vtE^*r&4clP%)7-JbKo18Bwk+sZLi>`yw1{*btCZZXD8td&*~?X-rgJW%dbr}^ zJsJ!bXl5gdeBv2|RmTj-@m?{1?%nQ#PUoL4)rp5X_a3z7!t#L~km9}64YMSU1fBxx z0c7mKyB+vqV)|^{pRs-q6jKHTxreqN#NSr_vLX+^i1ilF6S2O;mpBr%nM)!&d9K7j z())Sw6MToAJb{n8bgA$~5KTTY+Px5guoLZMJcwVA*rAA2T$Z&+w(>&{h$qz%M-CT7mM&ZkFH1@ zYK-YOYbHKF#EN<{t6xvXg?cio{U_Ta#L+dm^F%O5pt3=jT~ycp>*i6dJRL6No`JAd z<;`>eZyIIA~($}qh)^n(xe!Ps( zPbn&ai#2$WnfS)DmL(;Tm3=y^5PpCK#RUbFxkWmD>`co&!z^N_A#S2uL&S*rB0_CM zG+2mTD2fw`ci!SgPkB$ybqmq4hqbly&&^h3{sQ{(5LU&E!D4fNXm4+K|J2bURqQO3 zp~w_9+yC0gaWc8C>-_WD?zppWCtOO(F>*C)b0aU6O=0849tgy0elYYxTUld1tXktR z@BuOHh`uW=AIb9`A0aK;iBS*yqQD?O37JomMcr3gMSj2i4aH2 zni_O5e?aijqJj^_9ObxolQ!}BL~H;OXzGM_?9wx4j3!&{lPIk69HkxURk2au8r+FX z#AdKhpe)_}HWyL|dZ((=sfnl$Iw8~!E?T#q%V2*`r+x?+tsSK<$<`L&5~(U|DNpsS z&y=Y>Tz&mOTRdX!c>oOCSY_h*z^$Bp5XG-V8Aj^fV55&e4BaSjUZA^n3u;uLipti8 zee8*$k1k=Eob7sI6xyZ<_FlT-R*Ah;XmYcDseSS!)zh-Fk5o^c$0hym#1OkgXHJ7- zt;Y3l+Ke1nI(ih=K!!j7ak+RDulM+MnkzE14@EMS(iT};F^}QocF!m=feEC-SEuM< z>{;SVfDE=sKRvJ^i;@5!R*!ww(S`S$sxgY+;>5?R3%wx-=>b0iz^y^>f-pztpGCLD z6Hv{Cz^~mCApq`Zsxk)J+7q!qR7Y4sAl=pECbR8Vah=>)F1}oKSvj&~S)aYj{%US)WDZ`fY)QzX zzZ@u$b2z5ubBFf^W?-(ophnK$ednWjSBBD6vM-h)0wolpW?V!zvNhC45;2zHeD4m! z#YAo3J${0YLpaE>jaty{C;wQ61URt3wn_}l9QVUwZFr>+1A#V!{Z}8cz3C%%@!_My z0K|skya-ONNk=J8b18!PvJtJl8*IUH8lt#wg^zl9=S3uNN;S@x0#<%&NEUf}uhMYD zrBan77Xy#2E?-b-FKdV$5ae#w8wnVe1L`~KX_+DSfn=43u(Ufrh5kzJP{1%Ykqh5h zA`a;q)B!epg#W7lfZv`Vft4Xx$|5&%aMHaW;pO{9W`j*)LcX=ttiTG`Eb=dDpryp7 zLj`WqK=0&sdXK<{7EXcKipdD6g&d3(hh6gqFS_EQ0c>e8s3v)-d*^_;9m6MP}h;j>G4Nq+lLJBt&G_3h8JGH;*OaZiuNXs%Z@mEChTh zdZcrlc3al8%DN;;zCbjxU9O-kwLcl?@rS^!9rf#AmppOh6#hk1^E-QQ6(lBA20jBd z{bSL2dVyJq@HplktK%yv(~x_e>Nq+U~=t zgRzxoeulH?0_-MjZPKt<%7p@L8C@{0`O^zj6RfRDO0Xu!!F>w@9Y9uaCxIGe;Ua1uT6Zmwt8xgD^ zD5WK}YCp#LB7Z)?dE7ZvKL#K9L~?8zyhoo87WvJl1!rF8n`&y3H(6AiY+WOWaFtjZ zi>%M@^#;1}P`|;7pB99+IfV8JoYEi&?uP64&K6i*O#Zn#rXRvQL=Ro`2JblR&(7pP zHM^V)qot0fw#q=ZgiK!se{9#n_0f`}@Jaz3KXn4-ZF?gBB4V(>c|@3$D_qTn-Iq2= zS>xTItUM2Bhb`-~MN*Ac^E62$nB<8)3P+SW>oLN0x{@Sis0dR)+w_7DCn~l7Wr3PE zFHf_|1|;Y}+Hmj};6T`h2EMzcsx&TBwCX+Q-9hZK*scTI6%5E{;58`xt{W40YaTzC z7t>XEzgm6zV41^PnEUctt12TdvW`%C;?#d8(R&IkZ%^bNcN716`G^xqus&JHlKMm8 zH1*^{J*6SIPKqKo9DFmCEIqIKrluzFJ$Ys`bSlIF&Dw>^x5}7Kyf754aLC+unP__> z%#Yiiwyf1C(6v0Rf1Cyr|2{XW!a)*(neR6pqWI;`qk9ubS1DyRfI3I+m?;zWJMm-s zVo{aLkT81tOE*4S4$GSBVF-bJ_Q6~lJn$EMXqy8v`Xd#e!71EWIvxit7*J|TcT+7 z8OL%ybW%vM2~;X~=MX&V3LXssw1GLc2xRVTDN4xQru-%wANW$O5WpG>=`=6L8-LF* z`p$_&24bp7gInKr*U0o0dYMl?Xux&%*$njXjDbq{`J{jC zW?@<{rbgul>;`g8>rA+F)yTx}l)mQN(l^Kmg8KJLU`%oW7h_S8a}bU_eY z(Eo9fp08K1Dr3*!o=<@5e}XfSjVE2pG3oegH`wqo%xAFBD=XW7_KbD`m29q7mfGjx zvIPVy!{AZ69N7M{7N-;w0*eGa2VAI7daNg(Ny!bNktdq>0uNpw2cs|28>$?6xk(2Al>`x3@-##$UmV=9bpc` zDa-h*1&d-1f>7|^jQ>LaR3pp{pJ++U&XVr$!|Vv zZ>;!y|9tJohv1L?zH47iy~Z-kaG?eA=dli@)}Sx!&QR$K2H_FY%hFQ!owb3t5gAgm zAkN(O+&wCKwuIcFKP=d7wnGbs^}JyHXH&{wv)jP`uk*(disE7rqoNH-XBEOeA)_Sz zAbVxnBHn;73~ZZlM&06-nu3%`e_(snH%-DZ5w`yo%FvMRx=Jb~{Cdxmu#<3Uyw-m- zcob8M!xIK}tWuui$_7BApxVZkb1}Bjao%sbuK>Zg;vHK7w6K1&TM67ZQM53aZ}CIpdY4&BK$4LgY&6d*g&)oD2IjU( z|Nh+|6E>EuUjYFBbAlZ*Vq@lQ09uLP`C4-}V!>}|A~#C(=JqO@QooDO>n2we>ghd{ zZ+on7cIxweEF{q4 z{JhFHpf=vY*bFq0F1@gLXhi4Ezr{e?wUY z9@0#|{gZ#^K}`^o;d7LgsgWtd$=+nGk(O47)oBABf8?>#PG*u*H{B_7tJii4s=@4+ zYFpbKMST@_Xv?06oy{o4BpeLe3%Tw57=yEU(;-zrSuJVEMfRZO()Fj)3L7n*?ajDd7( zt{XU91f}83i`TraEQ!jt*2RyauVkuNlDK~3#}>I-WFX0B4U3cENsJAg&fN~muSb~! z9bAK|c)do{PHIb8GY*UI9k8{4aQ2>RJT>+tbd(#X^XOR4J@iX1OWB>DZzy21YMeOv z_OkdJG|vL@4n}0%&Ki9Oc%B4FO3B92oW}K*h}^IU^|3vb-)dV#r$z zj14Pz%w5Y@@XY z=L*WEAL}`0RmYHV+z~}4(13(W;Dn!2G;tK*m<3B z-Cb6#u= z=sXYhmb=R;5ewVatvRQ^7=wjwbM&cKwtPFms;ozOAhReZya10rQHfnSMDiLZhkEbGBX8Jh*iMJn;^#(nH=m1wW^C4KkJqV1 z+-olM?7TOlRQ5EWKQR z&!BH;Y8qQoEFD-}6`&(W2mM8x4+KVhN;^*u8>EM;Epe?H|;H64+CnQS(yi z88+}Y4?MLPmXDP*cy%YXS~Bp!rN?5iFw7S5M}{|Qsy(}$;i-iX^4fJ?wKrIix;$C9 zZ2D&XsTF!XGnrkJtwD}~zk1j)>CBTJ7He%Es>Xvi7_n^H)IoEa%?p?+ntzv+I9>6O z%mHz1fud=$ErAaP`G~ko1c-)i`>(VM-6hHO;o#=Z!&k!AEuJ1AZNHA#mrKQap*;9> zzQq1E#iw^sh}6T9HG^XMim^;`BxVg~gW4TqE{iM1y#?b}deoLak}^>1iCKu6$ZUBq zO$k1dOrImks4h&nV8w{0;Hln9KoAZz|4vPvzGEUTToPG4q@{n4X`~XPOig?7hrj@B zG_zp7b;Gkt!Xp3Uk5$FfUq+T)B%v;n>M?M_<~duXLy110?Eo+f3Q@mW^`i4h$-KQG z=`k1gSd)WJfn@jc=Wd?7GJ`6rz@Tv$=B4m>H%)i%>h?w-5;35=&&X#pu-Xa1hlBE{ z$7MGdaNED(gGv2WD&6(_4X=8vXR#9UxilZ;7yQ+M$ zbnO@}na{p~pW|+45q*J~u2{(u3m5nsnET*9em&h-=p_BgC)U``o>$ z8cH@lT2-Cs?Kt~ht&4s{+CFene6Fb3Fx*f$mZvHu|Lc5oie!^wfl(qfIy;V{Ha(WO z{$xph+R`Lt`CK7PKoh-NThSX4hd8%$ef6dL%C)oRyQ-XK6MgdhuB%Y$LZc-hVw12d zk5_!QRfYjXv1)3vu~2{ScAf0nbL}Hx>{2pNGIP58IL;btL4Wqk>9r>x|FC&V?9Bzr zaj*1e#bRgt;=_1}v*pI@ZMZZ7#Y(Og(MCE{nJ1@+dn7k=a|+tCa&{~U>f83ma0u-C z*TX@gmPqh0y+4EGgzA7#oifMQPmvuD6)mDceRwP8cAt}M7eVO5lxs5yF`UfhwL)-A zSWK-2<9we#@UrVs>%JvT@Zy(*lM>fJ7b=RVs~!f!9-H&`fK!p`Lox8T1@Po}FVTgg z_Zi|Se^a6Uhc%4Q%EVv+OHb#W!@zB|)gO~zAKx*TayZAJwLdhA?ww-bSJG2X+M~;e zT|TnmR?Eh_emlJP_L*iQS2pi&O4unp5H7j@s894548Q0>hF`=Wsec1GdVFphe>Z*m z+`1O78KB`b2Pl}!S|s2`hv%SLI7zq!IJv80>Gb;v8&fS~yC)*KlB`^N$tg zTor=(C-PS@&oJ%JKlxCvi5Vsr`otK#2j}w=8PByz^fQHs0TThWLl+7szs6K5$&M~( z>MLjPw>i=`lk2ls&N>UhO(!ODw$+XKt}F(u()HACzxyylGwnW#HIm3kX)hDg$hjpu z3i~>eYT#w`h!ZR=D}y-<)IzvJ=-Cz;(L71wH^}leoALDasbO<=WCI>YjaDe_pvX zOo8u-)9y;wIgcnJ=Ll`y)%ux69rtS94>EVVX~#a;#fq>Aw5yK9V8L z7LNQLkesU(tP}FktYZ=&dK1|2fmN19j{XuD4mvutV-_=qs@)+;I_RYRbAvd?Qc=qG z_U;yAo0kKf1}4a5UZNI$Hwi;M(v+gU1r^3K?-fZ%5DUjLV`s5#+u)E~wX}$yY%o&7 zV}BWsySRQ@ISaJ|Hs9rtaBVN{!E+*FomdGpDQ}KF`w0-C#0b~6xnrZ(||&1N$%@S zE&joClJjCo4=?Y?Vz6(9JOWAM{!xEJxT;H-%NEXh5*W}i_r`%Fg%mQOAKB5h zhO@&*ftu9Klmo7$7U~NQhfGS7tlv9%xqaZ8zw<*826H2ShSxK->RvNOf*z(z@!uEpEAyQAE#N)$S z3W66rlMs!9=_J_bjyyQHs`~q5gCpUuc`po4&UPPivTFx?vLNh8rE8e-{Cze}Ls3{|gG3luyjUvV-#pRzBm6|$4&cK#oG90;$N_?rl14Wa_ zDZPdHd#KBqH*p~&Zqg~jNHbHfUC&_2zc|8P6vQ5M;>-_C##~)Z-%zi8b0XRemV3z4 z{~RU5tA;b(qmJapzT7?!&2m4uj#Z3BA`9VZ7LqjuYl=07ix~^XHK>U_hD@j0u=i0g znf~y^?yFvvXYv@Z0$k_Rt@j}w^39~dCmB zdr#s1lmkPQ5GGw{Jt^}Ar{ciA?c#aZwdCi&$S|R$AJN{$g7g`&bJ+Mvw z+&abh`zIsRe@xE^LVQezwcy6B@C|eHmFAT!-hDC({tgWEL+EhhSV5rN_&yu- z9o8xi&1Ps0@V zj6d$l|0EUH?&Q%SSaPuV={NXFl@*}~(&M-TY4DGS*LX#6MCFeVf_dpD&*Lrv_Km=@ zGU&g1>+w8MJVT7{$R{uj9!AZ*0fbhd7+JZezg7--Z0ONIKNYD5`u~O{SK9bzt*X-T6-|^-2;U9%61ypn0Fjr|iO{C-B%ZF))~D)b5n}10o?a4}<~fKUW6z z@4pAH|5pV1uO5U&M-UIf)w~U9(aN7wZLY_{EP^4)0xMhFPn!!xW+o=Rk-h6g?L&&# zaFhI>L08aoo6~b0xBgAfy&^;7f-vC#HXZu@OmJf6$G54?MH7HeN}a@G&U%F_|7#yd z-|*KtI$5JP4fu~(?^?(u0<-1IxYX<5(b?BGOo-m0ObHY8{~lTW?^AEUBT$hvObmDO z+)U`OzH&Q_)RmkRPD1tn87dxmv!31I+3k(XpU9Xf&6zDE&ztm=gTMRO2*^o(1?TXCp63( zrGwDYHq&1hHAG$*dG7vc+E@wUtuYto=c|^pj13M71MHz!;`QY{%!;`rQEZc>oqmm8 zVnCdDVsq0Q{5%|-oT@UFQG+PT>2FT-vgx*@$-S6;Bt`c3ust$Z_`H%1ib(yl7lP=B z^U`x*ddvr8dQ3HSZ}6vgbueGqto~v0gaG|xy{yrV|9GO~l!ramz};{Tphd!OG9&YPaG*zzul3xlsIcrD4z(| zQIHFocigc+bt!;_I6vQUG*(zE=p>k3=@}c-)G++8Zq3ONwxBUhLk)MK&QsaqyPMgZUsYD-u{x{O!uolnf;*mSTbX9AY|eKjDx15J z2-+shNn^-xOD3v%V6&QUpXCUnVU#flc0(C)S!-`=h^aK6Q5Dom(V&x&X? zomF>nsPuGAKK-#c`jraheJ4r1l$SH7+KLgcQSkE{EXI;pUTzeQv$^SDB9nEU=PYn8&ig@*kdgF`%dTykVt^^~wh_5!$9-Nz zfSd3duYB{fKm^%m1Ekr1`vjiH1Zrm%UPbTBoV%NWCsYdj9OqYfT~S+-*jMRsYdd#* zh@FgGoSLFU3E9RVe~v&#_3OiuS|S3OkLd0UL0z7%*bWk@XiqAnmwT8)$%$N-rTn!ZB}9hnIix5Y!SdAs|Tfsc5{r;>(E-)uRS>YxeGh9Kz>j zneJQ+<~ip08i4lg1bb+bb596d=2MSUHd(D&n|+Sy1&JR8T2%)m0e z2e^GGPs%ssV7q%SMmt(|L??zh4Netg@K;TuLA|?lKTOj4Rzoz|%ySd*(AA7)N!6%w zKaak2_R_+M{%LzirM6Evb}C}@Ik4>zIT*Ch$wt zX&P(KkAJz$lt+tE2B?v$oPmR!x{9(*{N_04~H`uOqN@irT8^jMM7_`b~+o*#8@z^cx zQ<{3@smub^Hb4lFrvwX(Njf;bd#W0{_~0pewqB#k%{H+jfz8XU4HEAiqiI20@C7Jg zD&5EoQpPBg^|33wro5l$CGc0n9BHkfA@R&o1roCQDOe>Gy?2)rK+bzG!+~ayR3;qt zxxRJXaj{jzVb^Y8j`}*ZHlA)8dcLP$_V1E~6~xpTf5+>;zpoFdgFO_G%`gChS96E` z&_uN5M-Nz@+Rb7BDvg?U;Qi}U_r;?Dge%LuZC*GR;k=SNj8eKx*%I$-;|aVL(CU_> zpFR_O_Z6i?=X}>aUhWc{oN0m)+m>#$6lsi#Gj z)_XwgFZH$gZi=#i|6VLE*)Qtgkx^QP4<>ovz3?4syH~u}`nfKIw8kKZc*=xy&BF1rV(_oKBzv)E$Y6si=I59$NuItq#B9MQv z*mTJDm=EP_jln*fJ77L7PksF{G2(&&LXoadHRKK0Hyvl(#e7~rA@YQ7JaycRy;j^a z+G3v(tahrT^=@Pl9Z(S5`vgXA{GF!n`vY>KaFxQ$Y*?DZx|-c>(F54WB`f#M{k>EP zlp#d3!8LNQ3t@qNV5KjCl^$>eQVbiiC0{OJN*+8IhKWqxP-;Ylk9@K6@ckQRZ%z}r zb`vW?e(phFYOw_%P3VYL;S6MFm_Lxg_go$G1%zp_K^HcLvU*k1`p=wh21RK&RkaU| zEW(k8>_magO$b`{h~}*r3&uBgsQVy4W@O{VC>tDfc=pg#oqM~s|MVN5>vo%KsvC<- zOF)FfLh0$4BRAjrX2IxjXaC8jC3@D!VtZ8}H)`&(Nyf-zpURQ9KcgCXAvbEZ(9p2( ziwT0XW=_IJ`Pmn&Fg)zbB#%4IV!a_wXXEf$Xu*%bw_4p0P}vNFN?cEJ^fu)^xd0m{I#W?bQj)xsh)sI;3M0F*d?U(- zIKH#{FmGGYbHU?cWc0Mh2!E()NEU1!>M`mzT;VQcC^i>sDh4sruWB2I0ae_~W{LWL z-4bBMi9NLMObEqJs~_Qo;;73*ySinzD2Vi^E{4i)<#5DjD^C&V{rDvB-0=o*4mD&S zaV)=-hFCyh6k4Y1iUmFVJ$f+T0O__GIfq}%4dQ@Z$O4_cZ}3gh_hhxfICjWQvyoU8 zn8^Famw*&%9Vo7pqj1wVKLlu8)~GBjOxF7x+ZQ=AV<%2pKi_6Ry|BqbwmQ>yZAfx8 zvZL#e>I*bVT9;gBQc|z_(PvkEaNxQ>r+rYK$u&X4l!nc5@Pv-QHHA6!MLO)GCMl5N z>i)$X0@v%%^{~1Gi>ZMNixnruJ>v8;gQ^CLc#yP1qKJ<~vvb`|R7%K2UqA=}PfTxQiO-aPMYz)yAX!7A_!XpM!>H}I&Pt@>%# zsUG$=U6FG zY__nBG(A(J?o#SkKVN1DP*w&+MCoKIcGZb!=;w+;bPmHSUv|^cxp<w+{8Bn~PsM zi{@l8as`m<#b<3RK6xKfhtdb}D8cImIA#9JvjANevfRE1QCN|wu+g<64&-DWLvOPO zYIz}rUN$^U0Op<1_a-Pdp zh`ZSWb|7j%(3IL19Xs=QdFAHOE<1HwHy*Y8rf015(NSBG^n57{j6Ej&exLL;B!V!F z`OY`53%uc|b%8#sBWhXaO@WT~hkQp@oX6z>%6-21^vTw9!sE#xFBLlx7}5befYeAd zGiqQyccCFGkoq3d%7(<;JOLD(6(Ds2=dnuw#$+HBS}GKIO@!FXN+@&0-pQOsb~JAV zp{ZX0Zc8DakAy1oWP3Frtuw)7SpeF%8q8tN>ynUea1?O(6wHF`{OwfM{uGdCN&A;@ z71&ZhVni1_w`EXrCkF{=sIWN*J=6mIh2lXu-yVy3&=4hhM(w_%CI|GWL(zcKHq9y0 z!_Lnbps$ROw%reS$AloY;)AKHI3mav1Ten%=EB+%wOI@7=fdSYavt%A!zd)?1BZ?g z-@r55+`+4IQ=v^~V}2a&?0$##%1>afS8z%g2nzT$Ku2FprnxuH*umH-Bi*W+Y~Aqb z_6Lf`_J=`aX7)~PBD34-G(~?E7#_Qv4<;ZP=+d|fB^Ksq==R(|;)eN|3-#AN!%qPd zr2-JQHjS`+8xW2~2Y-zMZzSkbek}zVtKsnz2Zt#STKQyCJTv}xSHzYIuz`0R6Bwv4 zstQxx`KUYo%f#tF`XW;=BROmA+rNzOee_S*+wWgJnKwS&FZ-oXs{U~*a{O=ym+urh zSPI7WgVWjx%3C`gXCB{y zYqeSjPp$6N?p%6cb_A|3^?yF2=d0Kz{_rm#?7xAxjz`5!h>Tl)MNtZ^?l$_9_kWrh z2C=7rsQerGvW54?gdWQ|W@(M0ghhKrOVBQ{*%zn={u!J7=g2x z_nfix3=^Ag|JQ8czdzG|CT&{b}vP}r-3Vv#xm@iuhx=EfUjIX|-_-D>=b3V8k z*b56%sF#R}BrF93GGeGF9;oCQ! zkggcu=z@<;OB!2kv zUhAE4K~JbLm`_RWzm(}p3o4fvGw}`^{YzXgVMhT6CaCqxIv5KMjBVchimOuwZeMDT zqgzsWU6;7&eF>_B6yE(47WBJQ%X>DStNr$NThZSJjg0}cH!FhqaI(fifSK@K0kV@G zf@;7WiUcgDG#41i!jXIdd9oR^H^*Q#Ox( z-26Cz>q9XfpVjOe$P`huTxbQJm)lJ*O(kpPliQX2SsVayQyUTF;VA55UaN|!h|LWI(4W<@X z7XCJuD|~Nw;m~9F zw(@La16Wrw*|_WR+xYy!r%9|mCY8)a$ytO_ykaDglzaY)z!382AY+7_=mvlq24C(k z#I6>VZ!CJ*jXOL%CHMP891D-(@z>P@zLG0@N~6uH=ic81Ed$fvd|~?nkSXqS-El{N zy5#cCiCUPc&=?kd6IK`m9s+k?2gV))no<$vkX{QKHQ~FquXuVAMG%)31R|8$0vNN8 z;-1+vx|YB*9ZO*5v?p~_17VMT04P{;3f}_3c!IvV@&#DWLbH{z-PUx(w;V3R7U36D zr1BM;hWJl9?VgH|_wM7y-q0;ADG4NdkUhAwGMoVniFXfz!o#nTa*`9?j1<^W0Tf$g z$c(shU8sqI&u{pP=oO%sgS+Nr{c_Y@|LKKpDzcRrL{t!JNU~iSDoj-Dd zi%08TElAsuo7}d{WACVl%l6RD+D-ka+PHBhrJA8RX}h8-xi94m3NCmON(jL7(uN#9 zE@m78n?@G%)BZ-LlZJM+BYoGY<|XFEQV%SV-nKWF1b?M95K`Ki@+~Ze7>gCHO!@G5 zc$Q+Kp=N|-{h*9o2C^XnH515hD)`m%EgMW8V6uc5`We^LMA7spJtuENg%%jJpbbNR zF7TN^>iKA(Wh$j`U?%MnEFU@t7CJ>A*1TYvJIYSmi)PhlQ|r00K)x$N zP6<$N$8Wb4D2ftWr-TS7z8gbhWY7@hSjp9qX-NHp8J3Qf|?(9-_JCoAzRYu9vT?v_#9mH9#2DAdHq}V^TTS+ zdP9Yrkk9<63#}0OHtc2@!vhE3FKb!&Tcx)Uj!W3lrEaQ(d;Q>ohClzRxuC^x>HTi9 zxY-sQ^CG*(myf6FGfLb?y{T*(Gw;>H%T;^Mx=ygeFW& zc1^KY;`W32*T0f0n9s@Lfe`H_*PW0cqK`OH~1d*JDOuUStvRj@;3*f z#2?I^^ngkPn_4rX05>ie{`!rbs|aZ)?#}^4n#vh)=Oe7?LI%+2D$cQN;4K6+gSz6y zlAqiv+4$ruZm!WMhXKM%3*gdH_idHl(tfo7l>>Conc3Xp173qMbyBXefe#dVfR*~< ztura#%h!~S!B~MX<_|UJP^g?B*Rs6?z>NRz?%ro`>o)%AN)At&1i5pviUn?HSw0LO zU+Pyl-ZOFK1b(+)mH5(XZvkfoxs#d&e?yLk&O)M@@iaaO1&{I0U=lMBk1n+^1RBqywWG048i90SIdB zNa5-47T|+Qz@r|LTmX>=Ii{!g&@&mt(v+`6yg#S;K=#VQu(D`}+zP3_)wf_;0g~mF zAg^0oG@3YNRWg~YDVYjSdHv8YspOe? zA7*Rm?=q30D$B8{Cd9<06H>A>7{u>gm&fR}>0r{RcG~-)<$B53r~In(O{os=6OgQETcuSP(C57B2c*sc_F7(%% zed!uIb~P>dX2mMkGGCV-=)17Vl9N#u<7ohCSB)pn<&S>E0PGG@(^LtkLY%yP7T!5= zA$T7^cPDF*X|2+x^#JK8-bldJU#`P^m~CwsXIY{PSc8ARLY%}Q3$j%jqsE!3GyB)$ z?o_(%w4v@>Ctl?d-$x3^*|>nD8O9d~706@()HGIDTlaU_6M89w-3L$Js|3Bl1Q3UZ zmQV5%vwOOG-D#9j-Z4G&3#&=Syp*~jg5@f}3AC_b%=-RjggpSd z^^ZH_az}s&-gI$d7nZe6Mj*krqBC#ULLxlbN+sE~n7A_Vc5=#Ybj3842dR}eNFVSIqiW*>!Qf9xh8q_>~R=xvZyg*RrWN`fD)K=U^yLipm!hEC%T25BA4qDoan7p2yq_7yq7PcS2e764mx7 zPvWo4u=a2m4-W?kc1ys)K|+*7optjSF&|E*Z@OL&Z#m7`-tEH();I+axh;SA!z5$a zgvc96Z$uU_HrWj6Z5^pa%<0K+9Zy=Ql`Al>0^f2ATgrjWTM*5j=aI|tg2!_xQZnV_ z&!iH7FFPc-^7O*ZOqQXP1|6lk5k-<)QRaJb8~1bBt;9ZHdY#!fbR%2$a1r%p+Div) z&=LII(LTLFU{v(&O#YfIC^8R=-bW{9y%3AqTy0A_{bBq-pG5K7S+bJm~hb+y}c z{J>JPZWYO4-fg1zvX$1wWp0p`ZKljesO<$h zru771#ycH^&*eV}B?tQ%;46c7Rd1Ym+*?zLda3R}M)qcd?XUkI8j?dGdNthy&C8uP zh~0ZY3E2rW1-7O@f8a5e^pt)S%Nb|(=WH)ffPrDN9vq|&AvM4Uxfr691#hKj92QTb zndqd7qkvHzVD9~3BE9cuZsw=l^8uJ8`RZDu$aE(80_JU00Cib@sdS_;;U&VlkuH0g z_~B(R+R^w|P)asyBQV4oV5iH}{bJ6;)H7D;S+i<}`#D(;tUR#@f^=`v#h5;YrE^j~ z&WoYFjxI1ONy?@iK*H-ZK>lec$xN_*!JPx`F+_$PS1}ie3PIO`3?8mR{!wS3GhlI3 zln9L@&cv&yzy}o@0`lU?R=C{hKJHrO-a|?F!{6b(QV#8Ox;H{1PIUndpOzS&l z0*w~{Nc3F~Xmj>p$ev9(E)ypNe_wdK{E8kab!Q@B^EQ(tOQ>Rg*x}A?qrhGEp_K}ngO?lTtCj1 zvH_73Vf$s+7q#RtH8r(s+~8=bF}g-$Cgi7r!1)M-!781ldx>Pd>uBk{S0BoaJJkZF z6EvO5LH`|6L4I_u_HaJ<4S-Mm>lV09xz4Gr7ah%+s`s(rKz;7@Ew&rt3g) zGXul2Uzprk;h9~kgq(;{4ixjf^zaX<&UyZ4?$yBiO)k#K5l~OkMHD+h*b@W$T;tRl zWZZLMAWagd5&91ZgEu})yLsiGb(UPb;W$-fkA#SkrQSFQz=z zpoEv2Tap!`7Jp}O2l2y$f1tWVpD9tFOt6@#PP%fK+r zME$V6k$qX{#C6KJ=5STj++a3p!b@KBuRe9JpeuWz4oEKp!Py06vjiB)oj0_hRIK_Q zP&R6v<+~%4$+(J>w!Apo1jK_nvJU`h9PB+{k~iUPun^C_P;CO#e1|XY>k+U8Eni77 zI)v*2(y>qC1?jj0^|}8@;ac!ZgiiEMoYLiXY%n22FFPahUy2(*8x`MTb0?O0bi+>l zWQ(tCS>iN|RrKD3!KR_)-JfJ4^h9bhO{LSo=5-tLV%S8|?{Uu|NK`M6<8xn31AiNSiOojZ6~ zD)+OLb_vb!bJZqaDlR|s)G4ZJU%c+Cq?=Fs7`BB(?a0;#vlRM<)kj8*^D=CYU2b7H z_5-SX_TEwdkYlfFzu59O=^X$zQznWjD5KTEzekh>+M%C#KRkx`yI2r^R{+a85diW9 zXq@h0h6SdZKGu_iHDz0^jXgRkf$w;tjWOSq-`OR!cXl~_S&PGkVa#{WFJSTVwR&_< zNfWm-3=QWK#jlZmpqu$9%OeSEB*q8<4ILefG?d!Ca`0solTCBK!}u_a{>>G5fets& z;e5b0GVjMemvGWEv*sk)y?+H$q;0UuuAtgg4rS#2Y3v4yq>E-KN!IPAX7g!$7W-~cjt>_n&6jAVJ zqbi_rVsU&xps!QN&sQI3IsNqM#1XZtqlhP>;GOZ>C3bgu2tQ5nN?2A!)M~hz&4%$` zzK5OAX0!1!+tnuN#iHabd$~yUq0Mz%R~qiQ-JnQUYrN~J<@{ePu4v(^CjN<={l|B^ z6Wwg4_Dr4osCN^Itn7J(vN%EfvA;2&GeW84yaGYybu$VRZB&InFwrT*NWpvpZON_2 z;hN;h3Sd3~*&PFPI|O9Z61B_R-up7jVABjBooT23r0!mR>TwHn*)$UdlJOMXonOnP z&02CfA8gIG#zK%%!FF^VWW_8AY`E&uC1Vl6A0(u?xw+wwwS~pS#f61kE=6~BMs##a z%tlqx>w0^857%lm8e|&n_4I_id&uXouByBj;3WH=^uJgMvAC&3p|^27c8kN&(a}js zNr1fPoD&~XTU)!nzTVW-1ORmdmqZfo7do1npVlUX2FfOBw${=9uRiGxC+A#*iY%Ys z+U51q9k`*|5d7%C+;0{s;z-;D?JX^DSjRoZJKWLB*Re4FzMcO8_rC^8#c(@Nu|q3)=t zsIeTTMAxrhCn0&4kYGNC2nPP;q+NoUbBzPpnzPnEpt7*K%4D69S={~17-v>|O z`h_5XAB}T{u0Z)(E(2Z4P)?%?`=7s&ZZF=r{>PKmAYPa6?QOg+{UE+I4i^|!Ml?U# zSnls^GpukDPt7*gga>oixK>Jd9or@RMFW2V&+0W=E0Kg1OOKkVb&Jh!1oJa)w-V7? zoaa+hQ=y@uCs$YmAI&{-Walfjgn9GEiI0nGqk zU*BRZmAnWBidsG%9#x^+yhE2<4Wlb*s8yI=&jYE*#H!E~*GW_jF8httN=XBb&aYWn zbc6I$%RThMVa!Aem{m8jxeRI%M_<17`TMC;DCf6Ul$kNTJXXP6^_tp zwY(?h<=5Aly>y(`+PEeUjvq&A;NvgC5un%~;mnxD^v-yax=s&ttS7N+82 zmzfEDe-th$$3G?0Y#fL`R_$C-Ddm|~Q(zLJy(k$>1R!2oW^W&#Q2|_)jv}9B>I!kf z2Yn8`8R#O!W=Ax-CC@rAb6g0&n0v(IKBG)~+JwJ|^n9@2Jl5GA-=rhJ=Vd07%-s?G zfzSAvux%yY>(`DityfLO&e~!u*Vz4tKAd&6EQ=$)Egs3I^3f7@toQs~^?NGHqHQmq zW`!twzGBSk8!63Vbk*%IR%m^oeFRIW8ge1$GAg4p3|{zJj>dVmah?eKFwpP~whNfo z<}L|cRch(T%B#sNau7pI_-KCg12;6!*)fnWW8dwC&cZafP<-M? zKRC@i<1xNsGGc+-C;Ul5M%QL<+>2$cD7W&U|5&GNfpLjtWNX|R-%}ZzG(XC?mU3?2 z(xot=jxPyTv@z*7NZtcl0fX$BazGQX5tC65rT<&GXOo21xcs)vJg_E{e25H-$f#7= zCR!Au;mz@s0@0LIgKR}fA#sC4aTZ94bj7rW+0v+VoxX}BoHn%cpKz7IW059ZIN*^h z6kU)o<^Rs0ll=6%sy-S#;{Z#5IZOov=|+L{BV>M4g$$PSd(!B^@iw2!!tTA(l=cyx z0>PhYSK4a`ZclG?6OyN$v07sqk(~^-&rPi)Sf*X0R|iR#Ds*M z2^-?+Pi{#_NTgDC7{Q*weMD?+YqJd#y==Pd1P3aAN+iTW)9y# zoScM7&(e1C3xxN|S|mIsr`e`m@J}eS*?(>m4bGph{-{<7!AF~ijno>p;DolH`Jtqn z6H#+#ccFJzdW^e;d~d3Hj<4)!l-c;hrnIZkcD=^vpu@?PJkXsr=Z&!aFtWFOS7`zq zY6X@1+uOW^V#F5fYbS-r)rq36LoX(GT6=RE)eUyd)2nJ)3%hlIfRLIcqSvGF)n0cA&Mk#t@qtg;>%9 z4UyR2y?=ihBSyw8{(%m299_@z@Jdhajh;nLx}q8P>?Y=vJyQ!S&mj@ICxu>SWNe>} zjZNH5OQ6az;%7LFi*{=ji1d663gk6(quS}^l6IPw_cBWNpOfgL9=_j76Vn5m6#Bt; zP%aIe*}zL0e<>?)P=on<_&;>rz7p+{+Zn`lke6Gf%;a<3Y=h3ctTQNlbt=W_tH>Lx zvczVgr)o!oAWL32azkp%SJt22o>@E28SXF0d^ppE8kD^~E#<~e85q`2!sxXAu|0e0 zvt9mCQ#EK_Xq%ch6YyEie6`y6KB9CoLdvG4V@UjJXN<8zJbI_|+k>{IVEwZ7*s#GD z0tAQqp)AHt=Jf&N?>bn{zxfR)<`g^j{hE4Qc%w^YS-?TU*#lZ$AF-iRbqR>K=%IXr zZfFBA(i$Y?*)JO!huQ7CmSl;>y92HR3&Q8G34t2mQQ_>cjm znYXVM6$A9lWKemjV~ert1CmP|7X+naOd}EzRIArYyL@V(J_O-&)5O zCa0OnNYb~vt-+na6#AesA(UXa`T^y>s>Vx^4c*AJZ{#Z{>-{=85#&5m34Pj>?R+m6m*cKorj}<2V_WwaEcQ>^KDnx-3GOjHX_9Aap-IJXJT<3E zE4%xWV#Uv%R@rQR%8G>9z1U(VIX-9#lKM96IY|RDh5jArkl4)bySbhE|VL6!-e!r>ull0DEED@-}H)A~Mzj@qQ5>BChutX8<`zmbUdHUPvx+p`Z zTOd9fP08%LM84iFKkwPf{*&aa`Qjty&T36o16K2CcNKK|t^lE*8j768o{3xyE>;fN17WNyyx$@Pu zi)ecOI>tcI3A>)QZ7oR$RPX)x6k{W#&i7(^JQtXUV}!5QOJ{CBLdnDyK1*Cqno}0J z*5epXstZN^vN#_b4eWNGN~)u|KJS8dzU-B^=ef(1rbce=4Kz-dP!?6Dd2K!yLP;0m zz}s;>7u)_IONCc}hBtl)Ns&V&f3l${emZ#0Oq9!}YZd?OYge)Vy7gWa!PEI_6SIxL zBh}>+f*W^lDA!Kzh{bOlv5I<{s0plVklrCcH*`g@^o~>4N{p#k-Ic%29f0{b zUS*RFls;P<-IWbcDt0}$~Ap7`?sb`Z% z^q7$a-1vHn)j6eRW2aS(RKi{HQ|*Qya&r1hTlgka)vbk3UxPd-sG^y5WDiP&lxPD~ z{Mq!^yL`GtnymA>s#D|cQFlte)pEXtC!^~zO=z}Z!c@w{q;a~x6j`%LEefjLP>U{| z)g1pus><6&Wt|eGD1qSiqt4@-45yPFc2MQO}j-Lp+CzMR%RaA{j6 zuKCV+8VvLoz_FW0Yd`%_6-O$rxZ-h#gx!dQxn%iCV3>*AJ$d4Z+X7-BpwKzp-06u^ zD<@~c0q|}7c!RW}DIuMV$c!bFu0FtG3Q!zcoYO~3GWcfrp69glM+LmKBL-Lls;{=a zoyZ}vr!rrr9GF4^+;x`?&*%sbPij`L%dB3VU3)TNN;wf>9X9LAazxaLJeZR)MmXi;FsksJ)hn$#P!lSjgA$GCfjIMwV0L zP^i3pSb4lNyka^tFID7sOG!6V*ma|cR`Hw5-BaZzIiUp0+}L}`598(wEzj13cJ=M0 z9#I?m`g1@7$a2I_(zMSOTJNVswjUjXco&T^SrO+lWZCWg@*GDUpMAt2T@^t%p?f-{ zFX2ZsUZ@$rUZ}NiT%c5p=cJHDP*{wE>3B3Mxq+Um$JvqziN}xgt=^`g9%a-;=d60E z0DZyF7AYg;WtA`Xa=N_Y*@kJ5i?I7_h#ybT9(GYxxz&Ma#eqZ+Ht-ek!6BGYc?XvI zi0*93u%RVWFxI;t873qQLs`(X7H*Y zhre!O9<&cEii3CwV;j*?krOLdy+>r`Y0X-mq7#baO?;yWuG~C*Qm9kiq;il$ASys+ zqc3Bk+K6UgFHgFvp!|t4B7z-Rx3%38>t;j#Tl=NZgymgM2yC5U%)X7N)rZOI6;m&v zMfM@9@Ku9PvN237aCI0LW)xk_G#vxWUsC8dlH}yK+hWx%Amy>$TT$*86N%{sb+?gVgFU)7$w!rpmVCcSVG0*_7@ek6Bn(QlqTy{ zC=?~WJ8}yYeM84n1)>BFT_)cWn-Ld=EGW6l&85*eRcL^~`1;cGB?mqEe(zZ8E7rn% z@;l{woC&+bMNA1Y7Q29^Mj2MQO=r-QcckNr?ov2j0GTC)m1Y)R2OFQ-D;h%$IuDs7 z+@b}G0Df_qh}>@@QkK(~g_oC?n>#D`_SU03_mW4~iHWUpK3}DVC0ge_c|h7UndSQB#j+3|uKOxom3YbA>tnKcMR2QZO>fK+gt#t(?c zyTCb!|Gu%o7fz>6ORY#tnG_P?OyLpZpwlYDW`Xt%ks02~mxfqVLWMyJYIII_6`i)# zszZ>}sXG`3#mG`^f3~1B5T{a$I^2R_Qj8??ewd2{SdNkugFNt%{_W0B)Dv6}GSe%rF3|>xf7;>fE{*kU~68QcRy9Eg}J1xZ2rl&P!@1!>4HfJ8QO9Z z`19;Qj|Ww27P_@GN&37FTa-U_bYNj#S>zT#GTLtteqj!0@_UdBgDm=m^?3Ysvto&% zp)QBh&*kxhAho6$y)@d{9#&=Rw1(>9*f)dw9qF}zW;V3j>R<`g-<>$XqEsDJxN_}I z(>j8<%C9+00Vrt4K|5IEwi~>Ax{j*chXZJCgTz9|a zw_JeFwWA;nK3mH%c#>(_!y}!V@ek1b!kt-o>1?NE-I4z)q1Ey`^a(UWF2cV$d~a9v z(qihpaIJIP6lidzP72BRw@k3^29UdG7s8}lZ-qE$1G6;hkOquaiJFrz62b9&B0PyCCCk70f1_lQ9_8+URY=uEUbOg|bMV8IJgMkHq zw!@y6xglo#%6HD7Z<&@A;{LV=`%OhbYnpmDcB_OV9qW`fk0m^pgDE1n~SLuJHU!KY#=SkI__orj-Y+K8L zGYoS>Jp60?;gCwqTO__gbMTv^6Pxv*nS?*0^gTb(mn&DUsFs?kDJvV!UG5S29~bXm zouLTBqn4JIPoF-0{fgWk>i+v<{#&}{ACHn{PjUe`!|>)&($8>zknfFGyFAw5^y2N- zx3G{=%_Kwa7c_XAh(K&Tb%X4;ZVLQiqSz&mU#x72OU77f7~YqcaaF^T#olAeX7_Mo z80imww92TCi;lLGhOY)ilRkOP(lV%bmo^&9Vc0C_oItp>Gp~a}0F`6D;o<%NO}it@ ziez?ncEtTI5Jz3F0_iA(P!B#fv+g9-zY+sJahBKs)S5W~w88EoE14^F|8XNF%k*qr z47=0wR7-&p36&G0?GHwQA!g zd|Z-9lKOfTEH3AvztatX+~<3BaDBL1QgU)rVm=1kPr{~CiQ+?gUcZTM>EF-bgB?s3teQMHt@ZlYZZHdhN_#C%pyM|DMLPe4 zxQ2Jumak%WRh)Sx9O9Yf9bi8=P9nI*iQtZaOmG1(Hl8f{Bn1a!AGg5E@MRXtfp0{j ze2*JM(?!*0`fTJk->7`5tzUIX9`j6dH8{Xhb;@EZNyG3R|A4I@<@aA+_H$A=QkRd39N*q?m5 za`nS_wdKl4XhK*Ez}0LKioKWCDrpdg$cuKz7{kS9zs&KfZahmOe>0;n#as2zl;Y_t zi`w&S*ml`Gr#I%^yRi0$)iy+lQZl+};AP0xxAqZ`8QBICuNDo!Ydrx+flofbMBV7M zC@ES%X8?->0N1w@7n6RtzD~gMB0bvl<%n9~m;X#N080BiS{_gj=gpgk@smQlnBwU! zWYx}A+uz4|Uo&WDUo z@$906(LKoFTt{~~bQj?d@2z#lpCBPukrz&K0(@5PBg~Sd;BBRc>wv(AFa}Bnr>uMjYt>s_>RAFR^Gq>a zb*-_SOV`GQsg^a4lk_f7-S+~}b{B%+4wt}04Ykolb}?3`^cmEK801$ekX z_{_EP|<>j;Kw#9+&>-MI;rVRuKU#`KsB zr(Cye$TMnLe%m$FhyIg@MDU6nkK-rxK%-*=>i?u`xQa2dJZxY61=q;22x~Y!RX2B@ zyfOFjW4VL#x1%c|+_nY{*E)zW;!cwAxC*W1tWsYzjR45!WEH_8A&m}8@7g?CavRYi z+D#A_Hg0zuF|s`aJnk5KM+?sX`sO)b#aI-@RtBqKzEyluaM>JHbf3O+1NGnf4b33q zhynQFm#)|})?m-hYUz*w*l`Y77mZVD`9yg9(SznZ|8ERFHT0?#5qElkHQlc>r|ATA z#1RF;B>|dB_*6AQX!OqCfs(thel%XI@ekvt&#Y$0?0cCL9Y0?7EnV_DydCfgbf*%Q z>%ryz07;s)TtN^9y`Ffpl)*j(JuiOdqQAqZtuXp;7DfuztR9BIQ)Tr1yHp93=zP>uMv)pSTxJ{EL1S&$wrKl)%MNrAw0(mHpn zFEZNs(y+dDb+lqVhV!yJ0+M&^#HaIfFk*RaWaOO_mDs5|ej=ibt~`K^aK?z7iNaK? zghX!}%A1s7k5Hzis}I}F)*;1QjSz_14z(kmxeayz->W;uh>C)QSbD7h0BXkuEz@Y| zkS}`J`4EP$A#}|(iK}#vWPp~VmIDk5TqT8CGT>w7`b1MsQ$jW1@5VmPU&Y6_4?K zSQ%!OlemcA^eQ+B0GiXTaiH)GB0+GW!c|?cq@lDeo`})EKi*NdL#zeauXLS;gNA&wTg#84*8Kk3FFzRv>$i{{CyH%rA zz}EBu|Gs$C?uT07*#KgYfDM8EwznM8p6Gj5Y;}?K=ALahR6RIc0_FD-pj!fL199)+ zfGTgk$Wlf&gKwUOm+Ya%-=C}LB%asVp*vmjB2CqY>p{pi@lz)>8*$(OT~MQn;Qk_7 zjC_FhdUaEgaUp;x+3wC!O;{)eMVR?t`_X|~LJ#FI>f!~n!tx(X3+BHmWpd3`#T9Y$ zcipr<#DYf!Ygg-b-b-T~ z)KD9nov8I#*@YNAvB^r_u@_nT#*xQ2rM87Wcsb3+-RcRxheoqLNX^7%W$j(yGIDXS zS(5J{rS<{13n2&_8R{@Ntv-nYsPjsWd$EKs3By?&2yGVXid-$h?UN;_*k;Gl&s4am zE_wg-ZqNk_rl%a(ecIypi2q}0EkeQyqHl_dwppvoMQP&x>EMUHVA#i6qD3*I)_fi5_sPd|k?8zOV2eKG5Uvwxkz!+wQEb!bt9{X$yR%p8Wkp5FUL zc9Fk5a*bt2f^D%)dBk+e8mQCd)c>&BXq+7snT2!?24$VsjD_M|Ee^Ib*gyl|{q)ac z6y^5${U{!&CskEa@Mc(Ol2;t0K?e)HdD3wdph|bX`q#E)&_Zd!+hKrUu9NF1;Z}lE zf`Xk`w?K|;4iY11-<)AM{E6oQ?tJAS@zSMcF40E<3O|FL0Mwtz*=VFT(^Mw+1na`C zXhEyw!}?lI$To~5a<9q7_1B4$ct?Jq5jwpOp5Ic)0|)Z3OHIQ{Y|au(hC6g{kAqZl z=k5h=_KN@SM;sEEaMijXUSXAqbp(s5}HgXn>pI>@;ST ziK1J64xPovck$XH==+7Myvv2zBG@wAb^p^3Q&K80k9OrxxR%x)3UJq9wo>^QCDlJ< z_JxJrz|~qd58bM3;#hjX!t(@C3lW9Y%UJ^23FhA4`@YPC*K3$MD)x7I>7RZdlGYH& zcIGz$`dueCTU@BYRa(o%kA;;9PIX+8i`$0vW%j|0^B~%Xmw~nI(!`bODp}o2GjulC*DFK10t4&>mo%ZJsdWI z(a_Q&AtC}kSvg>rwIO6;pUI;VGJypQ5{LrmIsnQ4SfBs>wb(AN{jCx{uf_%H0bqR1 zlaML15d5+~R^!MDzn#PMb$_bm{#MsQCpi(B zeT>*WIT;?;UTOZm%aL)HJ?L|Us$CRbB+P6GGjQgYjEAN8tBJihsOw7q752_~;pe4}3^yv^A$E6nx6MrsbFo>cy#gG%l#~>rI}307 zEJ9eGKYuP48y41&BfD_`6tGfv#6n5ZXD6~)`RUURi40@( zH(SDe)}IWxKDPJznoA2h5GZ}rDQ||6)kLw`XlvJ*>z7Lqz`Zj|l5QW4$P}{;d$%QX zICY!x*;=U-<9Mj~KQjF@JU18P)R;eET;I3eB1Hpv=_3%IU=TFNFLQn8)Vk_R-)#qX zuM51(1$(C_q(4T3xnbiuu{1zWkRB*{y3qsJVa~w%uKWN3hnMmk?Ne~^K)AzvxOi{& z>9Zac91o*j%`bPc0UC2Ht*Nd-P~iSO#Z!N^0?!f-OjcB9IF zwv0l7g%x*N5ys?8T|G`cZ$FIVxX^jL-gFdX-C029{)bqkOg_AgecjBc8hZY^n#d#+ zfQslE?3_F%1GkRR;sP^}WmQ<<2SmJn!3014rPMPx3chCG~Ygo?hu zK+V_6pQ7vK{!C;>e}^!OO>Q=&gH))VKSWOJZc|I~F4;fmI(=Q)>m$ltT(X6|1owxa z6`ni14Vs>yn7?wk*2`vVFxxZ8cf43Q%QI+o#BC2%Q3n#400?I}z6G+ORfkPOrC4#5 z+g0F90Sp7P#^5>f`#XmTrQ%0!7imQ5`-os$nr5An3^bO@L(HNc?IWDqL%h!4!*e>; zdW42(Mz?Z8p0}QUx2W(GPJRH2jB;9wUME06e&WXb(50}3$w6I9`p23?Z?_--vSf7o z&m_{gTkvc>qwBQKD_^hpP=vV17XsfKUkSzFbr_J4lJ=rns*pNFN3h`JpNGHN@8RDg z@8d5z11ty~tn;{wB+55f%D4qd;j#wfT8rx@Cnwcmtv2%N^DPN)--e$EKXWXe{d~3L z5@+YY7v&tlPdzKA>y`nU(UP)md27!lTWjk4g)9X!A+x(2%}cI@D1R(z#Z)-OO_6TQ zZk}-M36m~a4Ru8~-4t$VVhr6VTw(n8l~qUwPzl8V(kY*WD3@cpxlOLURpS+$4d>^r zw{If!$gqHx4*+PsvW+}btE$#Lfum!!H4rii4cTx20M7JwQa4}N{*u>ojal znfu_-Zfjn}N-x@aMi)YBa=NhJ#*|k7cxzrpc+#L#Ff?0E$4sMom>QJ|NLa=MDi2R7 z^B<@b8OsSfIG(+Qv7v6}$phW^tmBZa%DR9QKdVv&J&S-xA+dyQSs)G4dwkClBT%~!*a9l zFA_wil-=rC^=A_2QU7gEz= zUhJ;x+TC%7kg+d{Owsu-@*0_GB;)pKbI04P!5Wf^mUd*!nTjjSH1OT+IpGpV)pvQx z@3)VisqOIce7Q8bnSUJUj@2Mg03_6v1sWcoqb z7g2O*uMvH2GNa+_6A8M=&>7l%4fOrmUnZ##jxp7;$$yhoKwZ&AO1L=_=3@%@E5f^t zw3~cgk82zNG5B{Pq!tMaB^9ZN{oT;7cI$lH+;sbL9+f*a(pT+k-(}v{GHOzI(v;oj z?;4U-Zc5EQeESc34}0SsiLaB5+g>#mwW&ogDemkwSUgp>7>_JV-u*A%%BK})=B!iS zOs4qM>CK(~2sFUE1sFI=(!9utHv_Y|5ZuWKVjzChLqg7*=L&*Q(?5(4! zO56AGF;GA;Fc1(?8l)rzBt<%?;E^L~GSxLB?=gL^;wiTk*d-RQkJq=F)Uv zx8{Yw+e*3feAc7xj~7Td3uVc;c)MC$6Hf+PNt#_P-c&wZlNISUK)2_VP>vI@=@3`d z7*J{yRrM)gn0jT;t!IoVIAuB5xq149D_;@qK?oXch{T|7T4@{N|1>1>>%*NlWo>3;{$Y)4Lu`jk;oILQ~IOQ8||-m^xlm zK)F}*fRd7O!R^k?f!)*Arp-yhMfh+%e-ACJqcw~)SUh$$F+85Ugu~_xx$eq0rWh7- zN^0?|$_wu%FQIod9KwUj+P;K&N%DP9jOUZ8%Xry-NRoehhhV;n&m4-7J$~bS7>XM&P`>oCexnbbJmUQV+iF-Du;8Wg=Z9@e6f*b&b&ZBF)&GW ztj$xnH85`3Vmjv9v@W?To>WsZ;!<9jeR$9Q?%3ropWhWb%$5uCMYHy8PVa_xKURGE z^NrR3l{Ccam!nqNezDJPuB{16@O)drydI;k8p~BQCB+(~^wp~#w%2`*P`%<{L|0q! zDn554Ixs~j!uIy3Z27H&acK&fGuMma(r&D%dIF8rHaAK*qUagJIn1&rRl!xV*4H(} zmo8>vYJxGv@_;Vl6NGw5cf>Ru%W;;X7{Q5ty-uzB!3FLZf?MIo$2=(5K9uUK%zmm~ zP#Y>6&DP&D z${tQ9wCwtG;MQcFsr0OqU&q1hYXYbhheT}n?uo;Oi})u zJTJATP-5p3X{v|i8ft0208JIl=ZTSx-5>_XyA>cdaK2ee7vLUSBny{u#Re;0zX4Cynr%_$gb;gwyF?B_mXw0z@S z(hBt#4jQtJL2G~X9p6=EgR@trLbXX~=55yb67H*fntGOWsdB+=W9X6NaPjD<-quy> z?{^l|FL?JUZIHLqNP2D;k5gsF@+a+<)h1BFezXMpkw+iW?Gi`(y1#PumGtE%xGPvQ zmh@`H`}8SB+{jmhZ!3JA_aP|E`$|ylNx^HVZ6$NJ`0%!U3&VueyEc*81rI% zcl?cmlWLmo*{`k(90RbkvN5bQbz(1-S#a^Z5QVNC!c|Hh2)i@`^L`9=YvE861p}Zw z8V**RW%tb4St#YVAZ7PV=-gCbWMoWxuN)w>{3&@n{c=ohAQu}Og}3c`d3{%iX!uT#VstbEp=mORQqH`)?-Sm&svX)Da!cb?f@ne@a{7~ilcyk@6Xzdl>wIC)4-1@w!U6LY=X zBEOD;bD1=PY9I)J&iPY?toLDAf%^HtGkmFgfuvz2cn5!8L;^zO&qLPB zrw+1$@T=E_oO&qo8Oc$-9{(0Dyj}ef+Yz#o$%mv9v^!VYp13N)fpDniIFTX#v8iuK zde4!r$H;Nd3|mvv43rCj(Kkl>b5m+=s+q87?WBVw@;By0=ih*%2>5moW|MOJ?dQQg zsqmq>%WS8Au@~eDO5v6CTcSxWI3HwBU6^LUP_#Z_7=lP8$*vVa&lq1G?1iv94mh)) zb#QKHuhzL4V-QJgN&e@MB-C`p8#16(HYnezTOboD7^}(h~ddrWIz}%2&4jqx)d$AhKzl!T*_}gL>cb z-TyIq(=3ri;Kp0Djx4eaf$yeOJy84375`5Gx570*RpVQ?K}dh>Ymbr8s*hLU=E|}L)6z_?=_!~b8d36+7 z8n^lI-uyeWRF{(Fjq0+?)7`;#vEiZ=4{tbc7#VS@J{{ZcB8hjL2foAw`&IUh?fvul= zl*nrp1Ne;?zM;AfIaWemStKLm6^+=m_6Wqs9WH;eu2HisM8O&AvLsB#CH}WLA@e8A z?H3d`Yin!szH_!;u8Y5@8Y;8RMBOrfKw@t5?(*av3<)iIFW%ztPJ1#bUfm(@t_Qp; z?cI-URFC}6odZB~*V5CHX+}@r9gFrN9cj`p@FYzVJX~&tIxinYp)Ngw>g)4o9x|>o z=KOgcP9$V%`T6*)1}|O`Iq)Y(62so?Vn3TIA2(cKU*KZW9VrsiLkKF%IW&dtTb@JV zTR>_ncD@PP(rnn4DDIsyM4*h~M?ZSsw!f{j?7XV{*4+8>=?C%mHHBPp*Eb_y;pHCk z*1uA@)VMN#A^XTrW7lv)BfDpOJZYz+8`?TJ|3P+7c^EsNoGja_nf~Lp5hRHWB%yT zBS;6#&t+v}vzqAwp?Bm!@x#t0kcodp>EqokdDU9x0ofdbS459N>Ev*>R~bzgSxu>} zq0wLISaP-pfrP!;a{OIXtg^PYwjSg8i#i$_8ENMlJc!XqmfNxl+-oB(d$}X@mOM9$ zM(YGsP>k}ld-Bhv#IZJuWz=NV(Vah;kxdM3t-mkyR0}FS8RMqKsO+)oaobGwgKUG) zh|EP~;=v>~4Q-ymRRrI9DW|7pj!|7=q%gH@n5W&xwYyKdV@@a_Ffko&CMG8juO@T> zaNCqS@a-U^s8w9JbSbQE+T9sr=HH{WcQ%#W$AHKzHEMbF*V!y@jP-u;-XYaztn3pQOmeUmA$>X8@@VfmzNB(kAEspBBhxU zMy)p*?Et2v?Jq7FUY{jZeRh+UgK1*GEXi{g)sz1Hj6m#DTUs6kyYkL%q7S~Euzn*XPB^kisL#d7&MsUpFw=g9U#dhrqWyA(imXhr$` z4?>0cAd{=jG6}+nDw`Cm$}a4%zHh%`WEHb7yq>%21WBg~_L z#;tR6S+hkx+2zJkF~F0RlYcQIdVfl;2m7sQY6dSONDbv!Kx&^ygYu^L@1412%Z{saZtOIh8hgXLuJ~h(+DaAeDY2@Xej49!wa2@RbK{m zi9}&w>e0v2%ZQgxfFS!1ADosNWgsR3*CN|xGm}pfSQdH<>{0dzQ*qZC%Tz36<{Sd? zwcU$-sv`eOvkcb|J_1K{(!x|JR?D6IHtZc}xkZV4>jX`HfJhs)8j9J3aw@iLd(8~- z?OH0|Q{3GA*S6F1Ue@y~6PUEprNuw4WSGlY1RUMCWP zgWRYQVyfYpYbm!Ie2l^)pJa|kosuSYQi@`q#ET@5CN*~&iI%Suz;_{fJ;+ASGSG&n z{nizoc?{1Q4hSaH*O-j3H+4U`<1iwEfrB+!2P+S~(cM{FKj0OulTe~D$aId!`Ur0^ zl(ey(+!-9krQV`oLz@+eW%O3wzXJqV(AuOwn(KVJngYW#q`1u^tZ+iIF@h-J+9asG#a{6sApx<%i5RW=`7eh^R0E^?B-2& zwnnOLIu_>2!b!Qjx#E0Ho8LzQj&57j43uetInXrw(Z zS-wiJ`V4dLLMToYO@4n4FY2Q37|-b^c!9iEHfi38@_%m+O-?#OJKE%xOu(ISx}M&0 zMqibBb9?67>&|eP)8rM_%*dQRp7OY2o#f}!_ulG&;@0B&Z~O;&hORJ z0gv^Cm7@bCG3c&y6#_V`%6Wkwk@T2_1kL)|OUZ=;YAFDtI1-6xlR z<2fCSFgDG>CnLXnywMEpZ89{PDNvw?2a2&7>GixM!~2vr?_HoeJ=1c8u>P)TomuWa zMI6&GWfZHDo05tKy}k2Rx#e2`3FBZIZ;sNAFM>Qr7Gy;IvR>dFvY3jFuDGyQo22gt z9IA@p?V6h{&Q`W-+aT(pf}66>)p*W}yr`Q@oq?3nw6-iJPv=Teq2hm?$L%B4lsH=Z z)A;Ak&HaTKHCwxf*rjb}M)RnVh7n<1HqRCWc-4rI;3KO`kLDf=96B7G>hcTM756yy z&fxIguC)x_A1j-xH-giO8K~+0?bv-1UnY-;-q^~v8(<)j7~tK{bx6_BuPqNNmzK{> zkWZfeA}&megHo5b&uq%3+a4u$UxC;t#3TZtbn2L>7O}IjajJ92+=i|oI(*mX`!NWB*ZsVKxx^z+z>GUq(VzaAMUd)$8h#u7R2vPLO%s-ju z{JAh8YiE)0CMEtY6ybEXTC%?_X7ufsQneVWKs&<5OlO+AZ>N}#9?#^CW6#zueR1nOh$owE0O2E7{+;Zvqb3-4$R}> z*Cj3AW(!NAe}sJ$Avw*QbmekWRQ;KhI7;I-!?KTB6SW|DBag9UnXlYhTI_#krq3nQ zdJ;Ibz*p{d-P=Z_gF$73aBL~GoA?h-OYmDVn>_qKIqK&FPh!bg=JuWIq$lkL+LSH~YEzebv5rA#cdW9XQf!!A7N$ zjNgLRvv8xPvuN1`Zv^~kpu$JmL^SPho0<+&?@T$}a`Eq&m+$S_K!z*2SH4LzmH(>9 zz5ZBFj%`qgvfB$s<5sYFMSmQ)7kciipk_1weYwvM?tlcfZho`k-Hk_E+LpY{GM`f2 z{v0SvKLxvxi!pMfWMo612fuW51ns>Z;yJ`d8Wv00rOsbZ&w3cyKreOXIO91>Vt9|# z?&tMtfh*Va;q&znpGmf;?x*C$#HZggFbvpwqP~55Q&3v(2Nwo8A8O94ptY2i%odfq zjDgwe=Mpcrs6TYN8QTi8^RZ|EAd(i$sAXl=&Fl#3aDut|t195tL?7H5gu+5s>vD9QiT3@j`%u71kN z>h|Z7?~;nnX*z8mhauyC-D`BkR{*5LGABLL`K0{NStT|hI~n>Y)Kwn$}3Ywm~3^VO_&|=03LIa+D?4$jFF$^VM1OtH+T*y|3R4a<&nSw;P$Y zb@0#KLfWrD9qxRF?NO}uT~A9p!*I$;d*F&vL4f4cv#7V60TNtF|5acudkQ{U{4M?n z(A8JlEQ6nDR%po{$lj2Du(R%?Ol>m0b#&-Mf)t7Qcxm~tlZM@f5;6E?;24$G%Um$u zj;P(yv}%d=FD!Va9dEbza65FtSUPbe}-c%aWR@u2;-5{5?&FZB-8 z*LH4%$mXe+(oMyqAz9`hHGS3@HAF7ym-}eGC@vAP0IClB1ouwlh^&n2B}(#<3*&-6HLZG-oR8cVSl#;kR~kfX?fZIJ>iE%Y2g&Fr5?ZCbf14r~uxf@7jzk-d z!Rw9<`et!+IbxB0XO4209{0Hw4{WyV~KpP3G?_R|LM93NJkhutwsACLL(Eincfl=5tZ z3RV}`E)H2`A%g8!)cR@tAxW|wM=O(GeSZ&R`gYiYEp>ELe+MRyji15=urqpX>#U@M zb)%X~<^xNqcUzx=R<>()cPr%N@c~Kr&j!&vCEdu*gbPLt{ilVasLoYkd>E+P^dygK zTLK(gi|OlNWCPIG!Csjjk1`FyUQI!+7+f%RmX(8}Fz{S<&#ix-W3U%v7}lYqV!kZU z*eP}T{@Q&0V7flSd{<354fDdzi?qI7s>8WU#t2UXs1DiGOY`%=5X|z z1>!zMa;YL9BQ5uDX*XOjKm4#8tKhC-DxstwsvG@#gC@$B$^_e?ffqkzQc;cFJ~?h2 zqg2>Vsm>VmF!k_w;v10u=riUdCcl}mvW;!KeZD2ytVbQUa+|biHuwMG1%xKw%xD#! z0U`KW=gkkk4oZJv&r@Ccq1?3J4MC<=WpOKy3G9g;Te=s+f;8%($}S0Wr8i*Pp?hwv zN@`-C!fiZSIo5w#vV$u9$Gj_}nd&LH*IRN7kAMlu9SO={0fpqJtcPxeUgb5d*8RY= zqujx*N_xS(=N3W^7xb#(pS%aSE4d~+56?T3`K4>#bq#l(+N0K>6}^)J7@$8=qak2* z!co&-*ID+38rk*rlF@co;}?Y^M&J0o_8=9Lh_Lbrm>zm|0~qOV1H@e2Sr2{6CP}`^ zGtD4tDlRQ}HT%)2i85$%w}SgsQ%;;6ay4b@mqw=fIU4jJtN*{geXsRpz}a*Kn=1p^iFu0spQ11I3pN?*&BD zYoK%Tj}L+A0=yFFmn1Js!qD|QCwZx3H}nr2=bk+X-27uEtK8s=i?QPU39v@IcmL`5 zL#3w|vn!EgZb^d28ALG}%DkS@bM1m?hoh-NVe@Em0ua~2N7sUSgy9I*jrRSZg z@)pAtTVL|-FK^FuXKEHd;Cb>Re!`-^&;;_y%#yzu0A1U_AQ_dy0rBk{Bd+MLU*k{A zVc73aPdpeoRd)B+`^g{TY0S^R+b<)=v{k1jGk^k5OhTfeqZ8J9!Q2C4gN`3RUV4pQ zK4sLfHI~1uNDx58U(ZHlBK-O1@74*)SRVY<+_ZNeBSqe@4N~o1J3$tVsLt&INwu$j z=COs=MMvm!xhc2*KO+w8bPdxk*&x;zrP+a`=4G{ZY-_U;UjJ0QBR0r?{>c&!;hiCV zi5++K$y%F~x79|)JzzlF;ous`qA!4R^JsDFJ{=yMcz_unYTw0mVsDu#L*Y0#|FuEm zhvwQB(Zbp&(f{S?3n-Qi@j-$+;2*bH@XB=~9KPvy(5=8)EF1~(imds9Sj?skipC2@ zf2yd8?@ZHf`eBgK&99df2Ty@Wzu_q`Tv&Jtl7I{;l{EBRQlVk8(;J!0?tbuXLLaR>`;BHJUHI@-X!TfGc7Ff{spA)X>Bg5_okR6@x~<%|Ez@4wL|i2%sg0}>tk{vqyIZzlAw=+1|B@}G`twZizO^clJuvAVeZXflO(ml*4tC4-~vb=+~n)6@J zmD$TgPgKj!g-Y~8HAHe_Z0sp_#&rxDMxq&v#R<6K06?CshcL2LRth$vD zFn7}r8fgxO{9!Ne z?wwgx5+4g(>aD7XsjsZC8LVa>v$%C%pwi-+h6=GQRsZF26E1DG0wob07jkF`oGJFI zl8hVKIfr*kZ#MyLcTZ`nD_BZ`U-It&3DWUBrwEk~e1UyAH2B*tepz%KZqc|&zzvp* z#+9!I;XkNKirqV(&StBd>tA2~Zo0Lvu%O|zHv=T~SieMG%7%d5^Ym@-5YSi?NjF`~ zC7Nzb3y}{oftCRADXYkcm>5}I^OucNisSuEr>j75qV@oY0ordkav1Cr#6wat#GnU> zb$Z>)%gbAJGCGJd4!m^u7H_hAr$UklntPi~Vc`uu=L7I-wUEm@`8o@@K3>4}!O?px zi@mxVm!3FzvQc+yeB$Ukm%`wHP9p}MRTBk<88&b?x}j)uy2Dnfug*ku90z})MPq%&1>aG_Lzpq~+Iy^EfUdnA zSY7YS_~c{;hnKJ=1G3zn|$Lt#?t$-vN7R7Y4AJ zPMh%Y^@TJ$f#F4o%xtm8WEw?AH{>!(-y^6q>`P}2w5`1SXHv;wAQL?i6? zC#Bac0CgN?PB`xZ8bztqQ+$*|>G6ea^O*Z4RH&IAt@DvrCl&V)0cpXfL=nca-*wtrRvcu=OMFr@% zMX9Q}o!r=+J=cwCw4$!yUbjW*K~&)1;VR^`%M@BPoFOBq)O|XdLhD6ZNk3+Ot{J|r zO9JNWwp7+sQ7uluQcQ#Z${v{-Gb`xgE<{LIaNf;9jR#Cep6Rbg-J<3@||2Upr3|@X4jLsW5wP+Trs;c|+0Bw|Y!)>*SFb`no{ou&>qI7JHgQ4Cyjv zF{8DDJ!rFaSKH84@n0Su2P|R1A_&I;q9;4N;3#k$kXRLxXD-4;uY(BEE4M-lR|o? z!)+d}T&)0;{hool%;C;r1vf`?E^pUp6P+a}{@8uxQ9Heo*vNLcp>XQAj-B&~Dxr~D z@`b{5JmPfvkEK30E;<4MU<_q@EuG~Q9RJSyN8L#^)cVYGo5)j4N;FLPdX!PkSqxWd zW6g@okL;I=+cX9a23^qW2loF>l>Dbd`-OGPJY_y~oZz81cO^v$TAFN}sksN%WOW`nrjz^)eFPn=vSS?y&Hm2s>jAt~RmWeS|`zyn; zM4OP?+3gT4%5POV+j}W{E^<~j-|gvvNv3p+49*&x7$>IMpdFBbo7LLO_KSrHawbwa zu}js&-cIqUlD(tfzaGApMNC;ZpkA{UWbCdmY#H1~e}$zQd`7{$n(wRw4l$4_s;&5! zXCqx%SnPW1pphC>fQX2BglqVUCiLkyYdq6^d223 zpfMH(Q1l@Bb#Pyi@W`x9>Wp%^WotpUjA}t$d?Q25|B~VJxz1a$9en45)B6e=sKn)Q zy@Mj3O`js<)@!yhnQI+HO5h_Rpx~N;W9X9BngqgVA??)K~M*N$us1?05?- z*XufpDG5`%jM^CHDT%W?fAHNl*;TG8su~qD3v<@qs`_OSur}r>yIN#%yZzz_FZf-i zFrOQ9qW#_|;dk-#IVEfVeglrw<2e}~)8eMl&={#c=*w&9L@GN>nOrDWon#iRJ6hvd z*E{f)d10OekT@S#U3uM|5?s!@>X=D$lCm9l`cMb_LR=ZVx&5#<@59z;SRShLh zb6W2>7+-5?Ggh7XM0~IRJm} zU^ul^tb2AYWVQYIsSrF@u62hNY^)^=X&la7RT9gP(oGFvPN`qX2ns8uo^mQH^_e*& z%*GNvEp1J;kGtV`sP9AnN!0PWiCTVD=T_^^&D=QS)GCFP(?#FB^^<%wn>j|ErzP@s zlb4r@bR#5)vjr79&UJE4wX@IK#N0;5?cq{oZyX%NV;bs5C?%MUvhe#SVlBd2lJK~< z#S#o@RKegiWL;b8X}K$|gi1}^*p){&R$SGpDw;OkVf$6N+2g+wHTBAan0ho?k#Ic( z3crUn6p8EMt5u(jRSdLPyt#0}OEEHraCsSVl4W10imj3utmq_l^4{_H)_i8rFg4(D zt9EEdvlk_so#8V}-F_rt+`}UPGs^+1;p|+G+HIcf<{s0Od>u){PJNs^GCgrLifBTYBPkMGAUQ-rB0{= zTW+pOeJF1pp%77cyPKS?p6Jv`%no)d;^3b$k&b6f-&Cx<9gi$wd@9q%aS7`Y#Izb! zY;JWn{E@i2mX?ibG?;{;Y0p#FcV=BC2$R)#$MF?WI20(9CFSr^ekRgXG0=(emZNsj zzwN~=C)M-QxMvfLT^dcIw_qZc#0k~U*ZJUk(V=eBJt`fu8=8?el$`(2EVgfA^QTo| z7WrBfcv0WppNy-KQq0qx@m>5%NOAMHkZwj-y;muDIpcO1v+L?|Wc9{Co98G^ZbW?M zMApW&I|*RLA+|W1H(sSndxp>iCinGR$pM3J{4D8r6Ntnk*+gQcQ=VTPG*$ucvRs$V ztId+(jAo_vi)XJ6ips&cpet-+%Yj}_2!D4k+AxwWvRA+kPbZw9Fh1j>CyRFexPkKL zIb}P7g})8x>F(?L_ylg3F}Tb&%{fN7;1WX_3fb+w0yTD6IEw!9gD7?vTT}J> zEN6eREI9ph=ME`_X2mOM5ZeD*^QJ1HQA)iGbXp8&j7z6$s)N)*3-!V5YDc`c%X4&M zW@e3P<)l9In_w3imE+L7Yaj4qe7D$JAGCwK_NGoS6u^(xm&%U`#`%3A9ANbt=RF-$Epvn0p3(?jv9cB)dQv@xz_wYADQO*;Q zBJP)Q5hNMzLne*V!%|#0m!b~#$~o{{6wHRmAIT`Y5!Vf(d=)e15sl;AqzC>+EiG_) z4Z^c$$p=$%K( zzZa$#mpzWG0Y^3sdWSCLKxi|eE`c)KK8IAiwm5b#f1 z;VX`=w3Cj6HV>C#3RNU9$+AUmZy|PTM+SM?G*I3_IT6Le{;}$*vmwfKo-9qyVsg^jce6&07B0y{rgGNJ$;RX4f`2vDAmJ0eZ&K?a)JB3Qwn# zoG?%hJkm&$twr*qN7LeAUp}?=Z6-4}M?%c*1awWj1IsVe(9yBQzJ0@&hmRDyq>znx!n z!JO;huL`p=w^xp5Lo_T0HN_D}x+{@zpwQixIWhi{slx7tb=^JaGf;`kvj0(iqyz7v zlo?l>BLXa@B*sv(gO9t&KbNU|SGO98dj?erDgkpR>Nm66spaGh*MjJ%C~Ign#t8m+ zS|*PeB_r)1!OdjsrvA=;?~qXu4|+9qckRAZkWuOBr1vdG)~N@ocb;3zyRd0nL7qub zQw+|7oMy(GJs1NKUps!Qjz@$Qw5`4n56Nzfl)--@%B!+gVd2%g;XA0^ zn}S=YVHC@5r|Vwatm>#BS>>ebw(bOqjrpANGHij2X-Qu!S4Lb7#jN+f7d$T>USZ)- zk|;@%G3)GJH~8hvB(CCQ5^eIg}AL@A zk~-EQW_?4n2!jih`}3|cdsda0X_|a8rrQwu?x>%>2PMH1v#oH=337?>K`{O{iG*2@ z=17yBAn--6OGZRh(OwAS)~D_3PJx+uOofKQl9f+8AM(?^=qB zj0`kXBcBK!L?pGmX;JdrKJ2qer+=ERlSS3?NYoA1u zB^=}WCG+C{bSXw89Fn2X)#(~rPwTQA+elNRdT@aVNkLK9mgf*>9@+lnCsj>%#1UCrglP`@IMAcw(HVH_>O$< z!%}AI--0+e-SzI>yOx%g0H?;71vRDKRfe&q25*=i@P@gZaEu++pW?vvsUR-XQbjVo z()uKd55B6IV=NZQ-`9LxbN{E-m8vL;-IH5zZ@x}`x9?N+PUHHH4zkfpqA-rm`%L8= zNw7I11)m<@^W55MtsE-J)*c$~KB6}_H^;~I2~Mtk!HqQK9G(D^`QlhuyM}n$5s2Wh zzg8Hytz-nYMthLv*8{|pq^FmKIXOAMbKwDJ0xWyVY@a-N0@~k)`%;ar6ivtlBoqjR z5GTK>UiBl|Otp0}uDo25^>ZOJbkES!%2AchJL~ISMGf*XyxqgaUh8ThbS#DLY%LwE z#IC8TeN?(a9pny7nb$FpNdw|ck9GfVm6{cDbpPX((e%f84es1b z8*e_nez;n^k|H7fQqYsl7imQ=6P5o6K0J;MBY>bk1(ejTkH5dXyu1alvNyg>(Q=fi z_%bpCCyM*e4CLiq8&Z_seiV_e-mURQ4joSxc(c)OCZau2aQQQ3zGEZ}o_&ldVC&iD z+k=w=lZ>4G4@&BZy>^4)HSvw9J8FVPSXQ;Jlq=h~48R=t=Y`QU)27=r#qSw)exvGb zp^5oq%OYB3$2k`Bi7X4}TagHRL(W%T!=u6<3^X7nxD$kWGZ^vC?z}m-H zKjrQ156wk!7kQzt1^Be-jx$T?zHjm4#i7i_xCcgS;7z^b1ri7rBi@tYxY1JSt85yd zLet9ZozV}}*HT}--0ymT75oG^SFRX4U3_^{S@4R#kI&|ed6CA9a9}o)T1Ci066Qje z;@&CfXzW**z_*sX!O)rXP3}9viv?<0-fF8Yv8>1(hWQx%oJPSUW4k+}DfooHWRhT7 zR=AbIU(M-0hp&??xM05T(Vz4EdWY`@eIy`h3LaTz(b&vPZfy4qI6bW;K^XMhUZbU*1fi3Mk z>;=+}7P{4MqCI98_og8J&1Uvp?(SA7H(UjWu`xE}f=fnaYfuQEf$a*i6ES1-ToNU@ zjrC>cvV9N~)JJdogDS{kD$r2ZX`#TpGp0dsR|V|a`bR>04%jD<-KcieBB_{B?FKF%A!wjr9?%4AqNLS_PR>|Kq*TMTV)Z7>^)W)Pc@^}dpO~u#^Ww;d;V=o z{~HwL;U+-=z3FhZ9-c{QbzZPGpBRqBvk6|I-Yo0flRA(ss&Qv|>}526k<&w8*|#8~ z)+>{{(sXbiaW2X7_3<$ddAvKrpjQkXyrITA|1u9lZGFRh=01D|eqA=QXyp{8>BS@* z$MgVgRODouz~z72T%n=nsTj^g5-#)FBDZqQA)BpGbf3EE>qc>I zo(}F5I$>8T;<9A}sBaE?VyXLJlpri{D~Lbj#a69hlm z(I}0@&+ z@K;-6Tq=8WxA!XF_3Y3VJ3AVJ??Vt0S~^qY3@o7;8a-f(fuuT1^Lh0(9?)kDltG}= zkl&8V_IUVDphI$*k(d)_k?^H}T=8N^KGtAgfeBA=<`$k!l~QpIwa zzaHZdkoS4PAc|_-HUoH<8>-yS3(dK_(!SPPQ#3{wmdSYfb?M_nC62|MNxNVtUOUGG zY1@9k?YGxs4Q|tT?Q+G@&gwk!zWzQ7qr zcIHcZ(?J}^Tg!<~P)c$Q=z4%y1LH^oMssSpIztF|TKb4CE=d3$edSUu>1_eB+MvXp z^Tw)eN}?LE=Gtk?R2-W5k~lFPy^*O09~L{kUnPC_wwNJQw_A)Ov+-EpYSi<8BO=Wv zZQ@D63)}ArkFY_ckOwkXz*Zkctf zIqZJ!vRnSxoR-CJ4ERU0g085R&hV<0WN@Dj%)md+JL0NVCJDe%OMUD>X}uwGv)~j-G0Fl zAK&)(uO16;5dEOSEh{HCLz5s>@X}Pa%{{|k_RFSS2b(OzLdaK!K-qy33%jkHzOrz? z(6cMuLq&$#nGa_8|8-xc{p1zBwtaVbj?+cXFlPFm0bOTB5P{`5fyA(mw-disy~@Ly zgL*w;6DT|J?FGGS)0&RcubX(w%W3bV*i(~!{Y)BAQAxPvQ0j)~54UbkYitVM#N3$O z(4)#5sPy#@q1cAPvGZo{pi+dahb+gfbWnz{KOD5|%#$=gl9=j8t}cCMIL>|Tt{&ZL zq?eUq+bfBsbE<7uKlfy7#)KA(85G`+nWb4{tr5p52$!`E1~`~yC>O29wYWzAFlOjyBO6+4lD z@#Nml37v~|;Ao*Eu(AQ|P-+t})yQCA9W;)Cy!P%nr+%Q*WcZRGpz-XqQ_T$6Ii;!&Dv zI6SxpZ64gNJEBi&zPte#HEtgGfoQjxDGl37eXht)VxgFR?Z(?H{U#$wmcd+T_#C@6 z3)9@BHSs*H47j3DG|Bu2XWMqSYoUG(jM4YMWXA|%g&a)~hE45z1065GIgRZSoMr6E z$42P&RPHJRdq0xPymT5f-yO~H*QIXcLtoOjBjfgFY#XLT>&w>$KME<`yh8K>3BIdl+{l%6@RiUS8h$cH zKp$5%DB-uDX-Rypizd42UYQYP80{lsroJRMx#`)}ZgsjRY>BWFF(;N0{w8l&c+(w< za(t|BBPJD)U%`L3dWyxe!PJkq0D&I9I_M3`@206wqj z!r~GGvXF0tW1I7;a5Uw9FfZ5T0-^UhS(cr-;MpR*a12wYP7|n>PD_)_Edm;aWGxIi zu(~ZWDz335laL1=ziHnB?JlAB#(k z#rT??Ye_B+1O@Sw!}VXjd|6#BEK?g2Ez%L763@9@Rv4{CIi4*zn;Q+j z+qS_9mc%Nn_h_etZJD^QTnilzTs0^}_EPFnDg92MJZe{&6)z;;r~b5SX4V_7*vrru z{uOqnyMRT$Q94-Dgy>-Q(LIdM2u6tRs~c#MZhb%>bv*`z(9!XZcpR_i&p|Jx z{cLN?zQhC@yxE^`AiZRu3{JeD6zL}W9-qHBU{O6ZtQyrMtyxEd2mer4I?Ztcg00?8 z(E?f;8p2nu560OthV(p1`8eRX^=3svn<0LW@Or1tfym4_kz@c+t2ggbV+HJS%sS@3 z7~-s82tBZaxIJ_J3-9g1(za-pl2W-{HY}2z`XeYHwlQyX0?Z3+Vnm<;;KJU})((sZ z%$ks~@a1v@LuMCBI6Px9^9T;jmq&IQVLeJOgd??g^9s2Z@YTAdtm@pCJ9J#9!+{Hv zd&%)Vk6MfK_g07$w^jQJ=JJoCA@kH>%?@q3P%r?uF1HohH~AC2q9 z9s10-V4oLxl)%_fIU~Ro&Fj12Kzsl<8NP9 zaIJKr)_Zh0a4t!ydS{G3FZTZs3(%R5mL2rM1_$s7`F=c~j2OW-i-a&B{TkQ9Go;lw z;w*UrB+hrB_fC@QYF2ii=*0hfD<0V}kUu_Q`*MB&8Z+sjH3~|%!QlflZgjSP&lZ^S z7XrT^UypW}8v!j&t971VGJkf_`o9g6Mh-Ka3Y{Gi$J_GZQ^GO&{ zFi$T$%RxP{W6}$nM}8fSIF%}dn*R3PW9*rzDFYe5^HR_Jz=T! z$-4AEKT6k)6Ikn@mAw0P<3$1ro7f(}_>2EU5U6WwSEBcJjvhT)M8wX{4he@U8wa;< zzb!Ez+52?#D*Ut2tTzKBx|0|8=H|fHQwt`5Uya5;O@RM;8$^_$dT!GBPfR*80+|+b^g+Te}e=?^2mq>-5`vC>F>G`<_H;WTu)lS&| zlsfh&8|5$GGtG!@KdcdjSKe}VMqkr*)ro`!q6A@dLqHv%iKJp7OlUA`YNLMGWtqr} z-)@=c)~)k-z#;Y}4&*ldk%skOPulbnqUp+@Q+HnBgM#sEzR@oB_p1IHW=~!p#l>Rv zGGB0iLh8E%3$k$`TL9dZZVR2_kpbF)DbS|PEOc!UAh0ZG&AFllCVwX!cv$8Hmx_ zK6ayW>rST#2~ikV^Zeizs@k-Agf((y8$QNa4|SZTvzQ!09A*Bq86>C&uW>h}rr&9x ztmRs-KEJn*0b}#%eEn~JyQ*^#<#B_J?OSGfw`mNot(4B^9?&T`XQ#zGolZ9R9>>oi z-v8CLh|&DEcp<~!WF^k~gC58J-r=Wluuh2G3H=QSIOo!GhMa379nN>8eX{(J-Q)lO z@AF8JFW{$>{}e!YqaE-e8o!dElAB=ek{2s~@asPMUIVF17&CeA+$VUeq;W1Sj5jqB zkjowF-X~c%F`>7=jk^8ho{IRrMb;e`lz%=U1OR!H2uUT^AgKMoNH6vSgf<2s{a^%7 zR_RW2(`q{Q$D9es?0_O_fC@}4ef<1pdcat{uTQpvUW$0mMXy zDE~KaR1zJ55t%gwk6NVKB}Js6qPZlw+Kva4|^5-;1{=8njN}XIeOvE zsMw-!nPa?F)V(XdycoyXDBeEnwiZ0I3LM+h^tg0KHqd2 zKWZ#fzCV8QFhAZ|?sQ6xbL*wZI3hr#h$M2;OQ{w19V7ga5$@NoX!~OO^@EkOGgdvB zb)v!j?-clciV4KnrXrlNl?b>NHA9x4pB&SaZ0%}@5LA}np~?=Hjr*4VRiUJ6w!v?I z3@<^HSx@$dk%-q|t@}_Lt9O6cIVd;83$z4Ko#aG^>?7n-hQUch3l&tAcdz&&Zo%(m zu7qIhW4jd2N`D|yBwj9@2DopUXaJJ-kmwKO0;dp!ux#HyYV-g9ntKbNEcfjVR7FKa zKrv7dP)U`N?huvkkdRUV=>}<00Tlu1l5P}G=~fVw?(S}oZn*3HK=(fT$ky|}b7w9y zXAZMB`}^Ma7b~8%*7Kw!CLE~Jy!uwR>g)jXQM~=Xx-W$EFl^3xLNf%qkrHD4W{br^ z)wyx$4q+EUzOqF(UyQaTx7d8acA2rwY+-q3v;)E}1YqZ64s;IAwp0;q+1cz-s+Rq5 z(}2Ppzw$I{9rt5mIa&AKB!EF-t?D=0BYtvKFgN$3eDRua-}9cJfJW;Fuhj$9g`U_UZfg@4#7h zjh35xhRMy%EguBe=d`nVQr%&MtgcS7OY$dp6cQwJocln+fxjeC|GLqKvcirT6P?fj zj*b^L_6 z8iYq7aX*^nz~7o=OQ2`q>0g(o?odF+xmV&tXy zjG;>hL1Nx=uIfkyl5mL?qHy6PIRwCHO2oDlcDQMnuGs#npUnK`l-UMF;jzEpTXYor z?JxWqp-40@_>WnQ93sPaiq62)r~L_qE> zm4Vdt-qkXguBfwM z{zPO#gumELLPCqs-a8z;j6MW2q^Q1imea1Tt}ulPW`%J~R8({|=k*G=QSW2v{=c8J z7g+#NQk_)fPd~QX4iR-hX2}lP4Lds67!%HMo>B97i)eZ3K@5m^f?YYcU-)Vucn7zp zfDdYNGb`P=8|X}{&gaZ?R>ZAwx>vdiedn}`-3CVs000J>sRr|70P1D&E1Y3SAf}9# zgh|VvM&O^L@F(K`{o6phP7O*?AXN`yvO2)TZ_E5DM~6fU#7Tg7`_dr@qs+Q-(+5Ul z%bK~mND^A1kLQ6vG+^$OKw7YB&koZWb`>+oW7~UuP^>jkggK60uZit z?-F4mM3p8z578%tD`%@)0g@i*V)>Jm47)DrOu%-DdZzXcFLfu~HV^FD54W|0;cTzG zUCIy>z3S}5kcjr*$%2#d9^8vd_!d|7_+ z$bYM>SBflrpacTtii(OU%Ex%H4q(=Qsb&XjA#<2lLOC4tZr{J4lK82a77MeOQ1AH< z!TFsO`@b@*V{jEun1x5u9eFZ-;LJaIqUj;?`J3Q8&;Q|3ekNT1OtyB{PxAahUUSEn z|A;gH;u!u1zc7lEV3g;U#)*SXhpzr3EdB#l0fjI%`@gB{wejN>g7$j|yJbeth{Q+v z6#1VCt+~wN_1XPyI)YEm>eafrJc_55xZcUr`sHl=&v@uR2I&8ONu)+t;u*iNO2S0{ z4mh%yWC!%GlWnQ|nM;_w7)+z3d>k^k@qZ^UL+vuM)4$s71k(${!^55Z*Fco-Xf^IV zn2o|X#5z%5S^0{-XWNZg?W}j!Q_sdTzmGubcvAif42Sa{m30G@tC*t z4<7XpplD_2Tw#h0!d50neXzM>B#h~jhuMA%IV6GGlyLkXr(}=jwMFs)L|spbSIyL& z7IN49ZsR}!f|Ar*dh_T9tFM_LKD7DC+|O!bxhkKD_woo+mL!#6?GviDN1KLA6AmXP zrT2b|YyAxLPWjM@b0h%>1CsBaLuZaS^IEakI#0j~{0$-zCwsyJMWqY;2zUkzUW|#S zu&Z{UT3@=N9m8wTRD|y`W_9SHiBmvNv}Bo@LJ6MQ-|_sbH5I%xqnS2i|m zb8l+DIk%Y@o$w-2=&bb1juctaYP~Iia@{z&E0n6&pvi2VuLKtD!NB4|aP1);eVI7A zJpdP=3Jy**I0nGBI+^-G4MQ|S>&~M;U5#4zYqf%fLQh>Kd>{$0W-_7{*wqh zGq9G8qBDW=0g~@(zp#ok@O?a}I}B64uHu|WkL38b8z1a3>z_$p6c-cI`LGyzJuMLu zu6}sjQ{`!@{G}oLagI$rx2IjwJ>nC7Na^D&1jY)9(&vA1l{9a?!Lq*Oh9Mk0Y%#sD zrJBBTMZUdh!G0*y0(w6OG>^S~j4Zx21%tEs*8Tib%D3Mq$ZcPcpO2UTC1*skPT}P2 z5u`0qUH!ts{qNttJ*Vk|R7E%c;RPYbXD4}=ar)MUiy$?882h$-irRD1!g4)%B3kle zqF2dXG2`{c4YI(*a_Rz;acXtD3V~B;YQju}VC99)uNMD}Ui1fi0{o(u0|x#z2Qy#a zW`X`+Lyci1UFmk!ihVx<Z_}(0~b#D5un;iO-;>_ z#i%%RM`hIX@&kU{*N{VlrelsjEfdr5cB!&auCUq+Xk5FNIce^U)QdUW%hoqSwZ!#G z$y7@s&VdDfDV1E;*waAyZQ31u-+@Kp{!Sa(5!0tgYKljMepGMuyWDria>4LTdpBq_ z2~NSNtL$>3zMTC)R^z3wVPWL@F}sbji~!X6u(r8mOobNjHZJ4*3nSfTy5JYqopsdw z5~jCv@T0T!Cs6A4uLFyAGHDA6hDeg{rC-0s#l?}{97=M~er;QI`d02})3xW}HVJ-* zxbJOtrEdYF5LHI^zUIW6mL;;jlw>~Ih3>C%>5`XEJr}#Z#&}S4hlcLdE*aYji`nrT zzC4!)k1a`E%9;zz#Zc5!{z#=D8+dQ1Bjp?a-30m&BJ7=eC|s3u4=3E^*B7xeG>X~4 zj9S%j+qdwo!9hqSLJWegqhE5r@ANy*ObigKXlr#IZnfJg0)!Chb5#gs|btv7kC!1mgf1EWlSDQ_C` zVbm58_{;H+hD0~6IUmv_zIuh>Iey0q z7n2OjTo)$qbHMdOv~KK9Xc8hR490-O?bI=zs?KZTyf3<#Bir2JK*)XEe3IJBGP|br zRN#o>djkBKf?fnF+2hVJ(nCUzrppzuo9IKk)7(;TZb}&$V~9Y3CPKSPA6}V1pExK` zpWo;{U?H=C9KvQUstg!4pSm$ro9JA4UE!{5*}WTqOB3I+7EN}_Ogu$Tq#uJe9Z|zM zh>y!F*=VJ|iKrt7Bq5@fc9?(FeZgbyP}d!Su=L2w&`i?`jLJ#`KRbbIDMW4MYzN$! zZBwii4;)g_FTC2NOc!(5A8BRsg7?fwUSdzMFivdCw^_Efye4o1p^aT#siHA(96QWY(rR+gC5?yLOkYV(d~iMD{(9H9ECSid7hGq1b&e=c z?%d$8Tos~oJSp+Nwy)+=dY2`)h1xH8BW<*n;sNHn&9F%=*;0rb5z0`N&~=wtNm4Dm zLer_kd=f-uo(A6N8dr;@MX_N@^GT|mmZp1Jn3qo}&VE!}>AaAfY^?n15{5yjxWY`M zCN~hbBT8M3Nm$PU`FFgqfIK0K5zh_fViQ!0teif&6^_K&vuAJf^PAInenR>L)n4^l zsKRV(slTVE=gkjv4`n0C5`?ON$K{6Qj=er)h+}6N<|f=um&3w-p5)&D-_IiB6B zbyWN~Yh7AugeEHa>yS*Bk0?;4m$+;p<@98X^O-wbHCtX|x>sU%-HOSBvFJu%ea3|L zw#w6Nboiwr5kJHt@lNrsl;RtX)-=pFCPuI`F!*ep$yN28rVf;PS|Ngajjs$M7^OSw zUFEWg#yuP@o*=H8Vu_o&YdjeFd3I>a*X%FMkzlKzGz<6B%d?+Vn29+UerFHc->&9_ z-!(44?wI!efsSI!{r3o`Dm7GOQmG1#4zM|UyC|sn`29UG+wpe>~y3mzB+;Njs;LhAxa-prTjPmiDLT3EWf4(%Q7FlyP7%j$bJ0Sn$tc*$mJ_98h0EB8}H)H(9k;_ zpm##P{lJBwAY!P_(C;72UC5!TC0rjMz7LX z!JudtxtCdreottf(8dVE*fgL9K7}6zU+OyyNp@ZPkR!sMmS|J3xQYs_*)q8Ed#jq7 zCXhk8g%A7kRsERI4?>F(J~AVNSkXE4x$@s&8oDwjdE#J z)@skI0=h>l`R=%4Q|LgApsbF2T_R4mzgLM*|I)-0<`)GMPk6TGSWCWBzA_pc*rJne zMV<>;O9@@X;;m;v+F^)Ly&*+uUUZ$+Qq_EGS=@s|pMh?1kE3cxu{6OWf3dQ_5ym4> zb&3?eI>{uYjr5~N1e4svsl7}b_+nxcCoBO+PebgmXD$O^}l}{3I_TA z2L$p@VnqWSFa}Ke?F>I=`e>Ynrm92OJAzx}6%`c~6toMkRPE0AHH}ATX=)BqY(;!# z`M<#!jf?%q8K6~Q2jE(hn_~R?gr6Y3f%^qkI$GM^8Cfq8Aab~o`NZ!32EbHSRb`uN zG_nmSsca+(h4yJqG$&0c58l4hU@R5_9FJNBju#=6dh9p8geh<27T&!k z>o>hrPNLjoO|+m{75dS5f#|`=U77#J?|y)0WVQHDK!pgZMYjCba~zBB%rcS`5@uo_GF62cW=-SypCBrLsV`8ZD( zi*`TlPJ3^`uPWSk;D(jjn}v2ao`HYCu#Tbxp$2*G=(eLRJVVL`n0%77j+f%|HFt2} z>uv3s5N!i7zGsX#BrOEHgr@II-sv(OsDvun#F!zvda`Jfs|6&GLta#gg5uq8(ZXF5 zEdP3N-{4~sC8J!oCW_8*C6=dY&YRq{i!4XluBRib5M8Gi>3riyx zU!V;l;mcd{0%$>p)MM+)GN?7j_F3ORaslTEihxfiK*QS{_6WVlE@^YCCzFZ=-n1E) zP;urm^F)x_J|uLpUhCiuNYE0GjvF^mq0UP^l8TMkTlsS?FdB#wd|7R64al<}^yKfn z!J=`T8Af5}?mUP;%$~qc!R?Y1Hm-qNs7ZUP7BHgO#6$U!q|{PRc#GW}0>y;cdDnub zzq5B&A4A_1Kd%jn&c6a#10Opvd^&K1{uEhjI4|O2QN{O*4BeTp z6R9cj4H}1rr;T6i!#McXX}-WxR|_p)S*|lZDB;&|YQy{2hZ(SnDh~q2gG#)|BPc>Y zok*nbL=FWdw0`K08fvM_X=cHoH!l*3SF`xk>8Xg_OIE%W6p#6Y?BVC;hDZ67iEQ;b z-_ojqNGK1u@Vy*>pM*p?V0~eHoxG!?*~q4=AlrI1SGD=xNWt7li~X8V?z68u<6h^f zOkt7{*!$L|ChoL%NMXKi&Y7vJZ?8npXQk6mcdzt*43wG(V|wxBZ+##cwn>*W5XxzT zC~%^U(Y5%AHPXWfT?;WZ|Mes*IRSPm9VeEanbe0xLX$?Qoiri5))>dSxW4^sYf~vQ zc#U~}4QdDUr=A@}!6@<86vxPD7u2QfcUgb6i$ztc!K){A=VVV*#hEYv7~Onfl6q3K z%g4nWr8Q;fN!#KVHe08_MkS{?r*beoCucmi=ce=g{QMNokyKUWu(h5?mvu{toeNqt zJkcqbtFPoWyg`y~=z1A_HP2e^Vz(Pg8TGQ+# zRjI4NPbR!FaHpTkWK?dXLNKg+f#;*PnOP$>>s_l7SX=ZC|oi`7#T~+%CoA#4LX`~H5b1WqgrWv(TT}@X9ce&-ogc>ZPWSr*F zzVrI^w6e7Q=l`zRAVCxTso}*4g%g@&M^53PIw@8pe5|5nRNN;i0%0&rXBaH5S5(E| zLoxoKoSm0f95uYBA}SzWIWjcb=pTH>lwKzgJN4T^^>&m%j@AXHNCj?w-lulAYoDao zuF%>0n+2wM-`<`5?GM(*>Bb|KRH||gURb>;@P?YsQslu|d;>a<%OrE@)p_D4OY+}( z@Ab5+rPX;{P`RtT>9EhgpyaRw8j^jLy4)jn4u-sDu#>3>>UYZyGaqj1*P?$RlgHQ; z6@N;q+5Rl=&6kB>r4P>6>>*^mI{%&?3%^e9UNTXux zD?Y7{EZ?6^Zy`bIht#^MW{BH41Dv&j|?$^jg&qVtHrlJ!#*YYnp%sKJ29Xam=6?qOLZ38>z3D}tX8yHKmSRz| z3C7Odp1quPKrOj=goHJdmAs9PXb^sL>~p?$U7kFH2c?PqqtX_56hoUQ^rbt<&f4TEjmYdFfA`t$(Zu8aL51)B785BW`v!0uuO90X;a6s&ZM5_ja6; z`qLnguQQXw&pdd`sy55`$=P>KrQ{pbfz*MZw2>JN!|R|LHC||JNDue!wnpf347;iB znynX^9oI=1`FgpnlvNqUf)-X$!|}SJ3L0)Ale~}Tfq2*+2 zAO6aQIIi_+?Euw_n*zE04Kiy3Lek<{Z-4D!>C5t7?32JhLhlVhFo#j?>7Q2(zfF=3 z5aXswr)78HY6w+KSP8VpV>#ZrwjR`tD8#?s`OV#HjLSTZyoT)w?G!>Nb;Rz%ORbL) zhENEnV&Iyx%wF>5+hvKM$^SkYPHC7mO(a%AD;lkMe*S z6e+Ao4=d=mWrCN;U(0o10pdJh1O}HFJi?s?lPOeIAQZTY9cgmcVVQpfnq$h3X5ojA(dt{mPKu71Q zarhTWpnS#gNV~=&s==hf{po!X=HoN>F#c*!LF&i<^ow9gpL+Nx<}E~|w!h-o?oCM5 zY9}qPYyA8BdcG+sxuunU{``4Ckx=tpyN{gK$_IShfTuYp(_xGj2d{~bwbKxXo!u*T z321RaZs|ploz)KK^UUbwA%eKAc#6h*d3I;e<>C<}_s%dj8_U`u`LU2wRU5azu}6%0 z6rv%@zoMZdW)!rYH*e`Z!OSc?XVCwn2L_<0I!*3+x25Fz0@jPkbe zQXp{00Rp)5k`OJy^FaX%vA#QS$JMJ7M|WY1-HN(`~ z1$GgVw4E50P|JGp5=fm49AJu#56%#3>Wocd@79Mv=s5{DKB4&p`raKK9f2BAZ|9=! zb3QP6O35cu=smV%;*aNidU21pJq5wE{JuR~SN`Sucd~;2t#4)TO!>DDit0*0R0#gw zM1hEIAbWcJN%*f#FskVLUos9=14e5LW03AmqO&j{3epsddExMlRU2<}_!}jK!YVEcU*I@g$ zc$0YT6U?016WISt=gIg5^mJnuG^CNHwzhU|?qh+hZbffzFN!B;5YmQvXum>_>u(ec?Tgj1Wq$0m{p3WN+Js)-M&Vp&yTwHR93)f==F(=%X>`F$K5pULL8ThL zzuR0GhJf7X8Tcf_pbpl*SZW$@{RXEH1{<4sJYj!s#cbpB%~dh(OTCxxviB_#6HfCK9q4T?0Nc-;B?(KN>{}DO`{3FBav; zyWTmUn{m z^09|YZCXQ~1uc|Q?t7#zKihm5kKg)y&^f%>4^lXM?u{=GbbBDNgSAAcl_v`=cX7wm zs}Ln56k=*88t~zsVqHY(W24{^Z(&-5T}Vt!=v*}UOw)dek-H|=a|K&W(Fr-2OisJ% z*$kC3kB?p&ojb#Y=cZjXJS4y(7%&B`1eX44!C_&B{_@yTWl$)HzL7MDp}JC3=1smC zd+d<@Yh0N7@V1M|la_wlR4_OgDe zaJ!MWykV@=+;Qf`G<7I*uNN-20$xtAzFE_upbPhnn#^SU5_GII^k5ZhEB*05R`WeA7I3hHgoZX}&%?Y6B$&h=0t~T?!T$7IT+|Mv$&G5{ zi$|0zAH4OrIqu1{Qhi31phW+5QT<8WDeKGi&VnI|7b6%gB?@1?$yH49vw0iLwVYj( z;7HbWnAcH(WVf{R|h!P5m5h)GNgJVjJf59tdCD9vw*)Tu=V zTi1f8Doj8+Bp9uNL^D4#cB|HFg2W!Z@=#!L%&|q?iRVj^)g`Q8ayZ>u&ec9@a9FS= zm0-lH5j%-`l8!#i z&N5!D-);yd+FlwU-E&DyA0UG!HrL8L#K?~`qpo)WC-J(Fm3EloRLi{J6ZlPA zhYS>7n`?V6t0yNl_ePnTD^`X@R#$i)fl)zoZzG&L;Cgc^a$XBzU0}I*Mu!w^4X#)y z#~8CL#lxq00yQ;f#4lfkV0g`p5Xh(GD;|C)mvD>j2=@sSq6|o7xQ0@M4>1;~;fy0r z6>_;Y}r&7|2@}9E2R2p)Zm+%P1?qYes)YKcLUynkzYUn#b z#6fb^1|U8_1Y$C74ZDVAnGCX88Bbqxpbmr({bEYZ43TAM@PA87vlA_W;Rhv=Aup@h z?3%UQ6xXzUc5@jzt*)0hzNW>G+b-G}7jZ;^Rc?S>*Fs;@EpTe0l zBX?Uknnd{P(0M0(f~jKnpD5N9^Ea;uf;?M1F03*GY%{E&RL=@^%GZT}`;*fEH)$h3 zB8@P0BSzB0wUJo=d&o7wh}#cep(Kzjeyt}(K3UUF-5`8N#j8HbOS=ClvN};DMf=hzdeQcg;cm$k*+ek)bR>7ORNV zfg89MCqm|y$>)6*(o|LOqu+CLw;1C{8lf*%T;O>0zDXxLGiS^+Z2b-1!+QM_&FsFT zmeMLC=a1AgknV2cXi^{9b?+|Ed%7V1OV-|dfc;d``$vmd5m~a+&krq*vSfJ$ik28j zGoHl@)5>Da1xeo=b%UO}`74;fAjPx1#^?B@1G7I^ zw;_7yN;Lf~{ud3desw|0AD+q2HO4(GzG+5E)bE#&Vm8w}ae9*KH}MYt0H5Trr__!C z#H~P}vw^!bq=GZ)kH(6D8CyW}e6eU`04Poj14#A4V4Rm9ZiAr?%tq zbig-CKdoWOXEzI)1D97LvMd*+f8$yhxK0B&K=OuqTw1WP`J_J2J>@rMn^yV)>o)bA zI5@dle#Sn`or6u@-^4BXzg4Dz5-$KFZX(^}WPN7MRHiyfk1JQVQ2#|ujc?%1%HRVs zLSo$*Kg}`%f+N`8SiCU5X5a--z!07Eav$;1Kk;%g{%YC^v&`FJhlk`+ zByoh>PvaXrpJ&E~<`5Ze{O=V@j+?BCoXEH~9!h?i-te}OK=-PAZ zca^dUFY#&2BHAEs60RB&e}PZ_@H1dc*qF>OG6RL%K@0W+;Q872bFDU#=fxd#7K>HW zncsQe6Mk8mN|ZCL82I9vg36Idx#gQB;K!T%{VMNflJ%V^XJ@4V@HXt%;O)=(hN#&H z(~P;th9?oNoY8Krc+k+cN%wa$ZXfbBLLMWr(uWL*ipIVm5D*f#t( zLSKwdowX?a3#>d*V(r=6qHWGg@_o=-Jn=$K?YsM^&fvx-RwkyUr4GZ)%%n>$EHE(C z7_PBHLt?j@MTUuq2?8bV0W8g5zTCWbfzERBM(PA7sll7@L%I>O`E;FGi+a@gY5e-k zb88@+TxhrX@~oKmqu$1x+UHdm)s;zO^nyXt&%*EUB(cs@Sm%77pN~!?p-yo7gG&!^ zXZ(Hu{rT%LPo!;89|w|ur=u?`e1HWca54JprPPFT29@^!Iee36Sg1?!hm!<RQGI#sq6K@J*?Q3Ayq z2f)q`3#jT$ILHT!{~BD{!M|b3q;5q*?~4&JyHM=BE7C7%(mZhy$lvHb_`l6#FBZ+Y z0iwAtq_t{Bb(I~-B!}I4HkIy#e>yx!BM;GhRURCub8&wrzs}vo(FqP2brcbncvt0EBy(>;1Rt9Fc#X3 zt~ekZsX&CDQ8*Xh=*^U1_9Y2@poM;J<0;JjHiY!ut`!r+V`m^X7t2^0Q$nsG5}?pY z`Ks~|uqF!zg<0LSq)g%uN@8DwP+~eurC9VS{rUgESOJa!q<8uf<56!R{=0cU?{{me z6fF`_)N#j)?H7`@-)}ycMW$^(d@?sw=x$11>=jhE{tf7(%jZD=${kaebFT_G2h9=n zVzaGJXkJJy7g%Y14lvg^7(v0ez&q8?YAhzdD_j5cHm}%GnXN$oV5cJfY~Q>`RMgbd z?dcV=bc~F?EE4?`aC%}NFsY>Ey)!~nbNxq^2dVUsJqGzwhz|f!b{nLGo+|}?BobB~ zjp0D_s9qdk+S(v*M1X-;UZLd7u$BShQeO*#dkx+YZ^2O7F z)hKd%$8(-DAki{5u7hubem&`jq!3#-r7Jyy z5*1A8NL7(4gU{<1yor4Dr~L`M700o$YpSc)=B<(ip^%9BWJ@@0@R83^m`8*M-v^b0 znIgCLTVcQ3&km?t#M?@K0@kC!?yqTw7EUM|C-Dq8MsDhqD|rk4<{q6Kl{g)&99{zj zqrRogfkDm%OeBef$<&LZJX7Sp34O!BquMdk0;NM)|NQKw)H@vt`Fw7PnyUGBJnAcOi$yt*a@1}=U6_o;I?8lNg`J71@y=PO4+WJ``ZCr+2U}tf z_p9N(2~v*t=8JFt{x$Z1ZP!IKcHG4{&>VGs=z14}QL|M7WdJG9j@zUCf=;C$v@a$8 zy7=<*h8-Yy>Fyj11WuWi+_V5{Jz-dwbRW7l($4-^9U4WmHmAT(X%{-)Seq%swG;dk z(Ds!5Ctmq~8wB(X`r5ZJ)&MLKTo#i7c?F;w0HeQHgdFj~zLC*})@FHB%Z>$m$ zY`WwuzD$Ik8b580usadPzwS=`d4~9BB~bF<%25M z^sQq;eR~Zr)pO_Nk4i>=gf7FIA7k2-|rQ*-oA;U+qd+E;{ncG zD6RHDtNJ}xUV6TpDrCp$GR}+EN2!T)p0(1LAMTZ&k&rPr?=diM+7t`x@o*h00OI~o z@t(r2EUup1G&Yg$VzL^Yb6I08yr=xplV=P^T10C_ln-!yu zk3zuxEZWmF*im zl3BP)w|&KR>HAxLazka7*-{ML9vhAO6Q$`khVI*H1l#-arPnhPqNs@=WSE6wa z;D{U%puKeQ^RtI$+I2Cfi~HW&3LUIyQB?C!?Jjc(9;}(Gh|9C;)tu7eHuDaS$zEJ{ z5M%7w7+4>eVGpoQuJv+|$n-0>deV5wBeW8$^KRaw>znk9r9&0|^9zDH0r<=Y(KNt^ z6k;)~?N!zm(s@UFq%BpeV*l!K%jwRtuamkCN#^1Z|~`?mtSlyS)rKX`@odh}3FRnh)eYTO7ttoF}i9HpO4Kw-Vff z&1OW;O6R+M19z5nlOrj{B=n+rfd1n=a&f~!UT?Y@PoJ@z3m~FdGE#6Aeee#!g5~2j zOYyltavD01wHP8U@65Dpn`MJ;1^dgssiK%w7p&cs2X&Q{l|wc@XQijhQZsrC_agQu|HFa)iSPYT|zUdUw+;-zO z=fTY_Sf8xsP^GgOPS~8czfAy-RhgS{Z~QTFLPx(0LF+Uw_j=|&;nXaj^H~uFNsNhV zIAJo#P1s#GH)^UJpCa^g3vbguQLE=$!8em}u172urx1qZnfDcOZ}M$1mDwbB+04%u zZmpKPaG%dxy4x<-V?Xv;l}pqi$RgCnGQd)0t!XgD9hbBYR3%*6Zp|bO&v4O+Zxiwr7r*NO&k~`OOIK+QmP^>WBcymjW zP24`9aWKPV-Zm#Rf5LC;aD~Uv0`nu}oYySQ^L)BI?dO^H_eGf|L3c ztBJ0$#I!yw$TdHB%YI?hQoE+SY96^9>uvdYrt85p^Pwgg2@!#ODIZ*I_1_v6%1{%x zt@YbyHzx245eRn8@$f8PdWxn^MLl>z)Z{IjxIbT9#r}?@mjv&ts7D8+}Bc*Q!NMgusZzTq(X;idFnITcvyG1%q=j z$Ak3r^af`=7Zw*MS}Nv;+1eMo;tm^lB?S6XsumA!DcAU2FmNN7vex$|!)8g$p%Mc( zWRJKZ{(o^pk@v66xyagV_jJF%__vdmknL^T;%9>EM_J?K6T(l&@^K<}guTjVHGp$^ zKfSqNZYUKTnB}ISEr!DeSIb&`|qoVKE zV>E>Jw`qmtn~l{Eic}U@*e%1oBJoHf!KG2jgUtsAS>GbvfV1<0xfIOpg1Fu(RQ@rY z^P1ylS;SQ{=iF1T*;%8`_K4s2^{e^vcgv4=N@OI_fcGIQm?{nxWp;jY93yYKQE4tx12JBpd!CKx(N0}6Li-p13VhStw z2f(^BSIBNUEYfZouH-zqHl2&rNn$zEU1jLmqhTSSfif)j`J?Ja7h6=t4QDrFFu@CQ zuKL6$IKl@iah36=q`D@eb!_%;76^Hq^`}E)ZcIwQeMY3;TWHrK!vjaw#oY3jI_^@8 zW_Rx(KbYaAf^Jx~V$3KXUT?P~d@EZLJ3Fw7gq~vtEtX+;S%R^eavyCmz1J-{l}xf2 zEz%~^MT0nchmR*>*n>L9baow zRI^c?k;LpI84Fk_N{iF0*&66k21ipC7+`09Axm(sRj8f1V7YtcZaJ#;y=2m-a>$e)2 zAiLA2Z*AnrcE$e>jvLG-KZ}v~hydab;am+*RkG8>aZ2;i+Dvd6d3SD3s@S`3P^a=; z%|G5r5;ap`xB0oIL1|EzeY3fAP&#twuB=YuO&g_3d`-dfAmq$}MFc}AHUFT7Ykd+3 z6D=()%H_)lA!EwO%=|j?Qp;GlHOgRs*jm;+%Hq#`*;4B}X7RU`?im*h)M5x^;ULOX zM`6iCjNksftwKWP5wC;W4$Z9H5Hx>KFeJQSHM@yFFjL69HrSA`D&}j^n~Ca5y1lPhDTy?6x5i z)5gy$DI=K)7TJSM!YL6yy#n!U-T&s<@`bNoz(Rled3-!^eZC>8`GC!v=JnBuuXh~Q z8|Y{9I3UW;<6QS}uw2KUncA4}rjz^l6{3xeC1zFknlm;1e((9aO8FQ5wnD#6DsEhP zo{~=OE7ac*6Nq|D+4A_Je!Ql=!tjXGUs@FXg-B<|>O~0RZ#21G)sRs)cGKA_u$4I0 zubU(s?e@gP1i9FoZw>^- zeMYqBdqSu?^f_ zU`cq`JKUbGyDA=aQa;{)ucYvA``1P{VFaUSCfL3Zfjujyu**&Pa(3Lq+ zxUqi9Fzo(_{O2!fqiJ0Z~sxs#)^MGN7K38KW@`&ZabnG zCsG}-a+FC}tA0$Zb5?o9pF}<8=A+BSd&|Dsl>Gi7+col62fBNSW=&opm)Z|JijntB z1f5l8@J3uJ@fE8Y$b{{;TDVnLzmq?02^H=b@1PNNEAlD6-D9VO#SR&B=JxmQmFF`< z{Bojhd4nx}Bl4II^VasOgBhz|m*+n#>)9D^cjgvv6q#Yt6gUuvWxq3+yR6mX#Q{g9 zsb`CU;Ee~Xjn<>AMNR1zQ}BLP_wKAsP+-3nav*>sR-Ty?=woDd6ZP&B1XwOZr&Q%X?!2g)KMaMstpRGYef_h9qPHfte=syLTvcC_74;5RzrCiXvhT)5UAio!!2yyLG& z1T@hhE*CHJIq%7F^W%)V)sn{0@amLd!JE$1%rg1A;=cUs8#ZL#vZ zmHqt;`x-VwIj4=#E*$;$1KDnzf7oDDQa9lZxd6f3<8PA>${Gn2G%csC{vP+fq{mUt(iwN%WTP{HVcabVIUMs1IbqV+(6_Kl`vg)H=Tnw z5t(d;ZAL!7Z4u6I@A6cZ3L5i5Jb1ZN(O9XbBm^!p!XRU7@#@}$}(q? zJAb{gfJXPe^0^r!=^tl0Uzt_>Yx?%@=NX3{^9f4O->A)3E3zn<^s@q!RAV z{%So>U>ED>2e;i}?((t)FuN_xuYx%qn6V-Kt*wN$#U|q7u$mieZ*Q-zu6~3|DZFz| zLrr>z7nJbF!pOTH^+DXlwe3oTtztv1FU?19^@xB<+ z>mI}qrs1&B3Q&18-KD;k^Co*hv0Q27nh1?xPpp0hWmijZhOMAgS*lAFJ)T>0OACY$ zP8hhQxz>O5sH;m%|8WMs`=Bw8y|39`8=`|BcJRs=tip0rP>^~Rq3 zlVh*j>r9BaADxuqNOXzNS6fS!Q$)vAYz#c~cP!wALbcDwJNGqR|J?=p@ktQYJe2ue z5HR(zyY69HK|#UCj~|iCc1XApE}JBJVz}h*u8&#IqJh4?zMkIk!%ugd`K*e3HL?Cc1Z z3_3WgTfeo>JL`v@_|y@9?a!b7^CwKZKMj5z$C2O{va;-K)-6K6KYyg+hYUF;L*;6; zS%B~?EiFBwp$z@gw^wwr^~T5cyI1!6xB`^|S%kB3*sgpZEc4zy1Vc{I{aCN zC<-p9nn-W!HyrTOZy^N-(+_8m6(@q6z+aMQa#pZ1^o>kF#4rh2xxJ^8mDoUuygMgBtYme|eY I8){GfA4#@a@Bjb+ diff --git a/docs_zh_CN/tutorials/4_data_pipeline.md b/docs_zh_CN/tutorials/4_data_pipeline.md index 611d1143c7..cfcd03013f 100644 --- a/docs_zh_CN/tutorials/4_data_pipeline.md +++ b/docs_zh_CN/tutorials/4_data_pipeline.md @@ -24,7 +24,7 @@ 我们在下图中展示了一个典型的流水线。 蓝色块是流水线操作。 随着流水线的深入,每个操作都可以向结果字典添加新键(标记为绿色)或更新现有键(标记为橙色)。 -![流水线](../imgs/data_pipeline.png) +![流水线](/resources/data_pipeline.png) 这些操作分为数据加载,数据预处理和数据格式化。 diff --git a/docs_zh_CN/useful_tools.md b/docs_zh_CN/useful_tools.md index f036be91bf..1cb301ba28 100644 --- a/docs_zh_CN/useful_tools.md +++ b/docs_zh_CN/useful_tools.md @@ -19,7 +19,7 @@ 输入变量指定一个训练日志文件,可通过 `tools/analysis/analyze_logs.py` 脚本绘制 loss/top-k 曲线。本功能依赖于 `seaborn`,使用前请先通过 `pip install seaborn` 安装依赖包。 -![准确度曲线图](imgs/acc_curve.png) +![准确度曲线图](/resources/acc_curve.png) ```shell python tools/analysis/analyze_logs.py plot_curve ${JSON_LOGS} [--keys ${KEYS}] [--title ${TITLE}] [--legend ${LEGEND}] [--backend ${BACKEND}] [--style ${STYLE}] [--out ${OUT_FILE}] diff --git a/docs/imgs/acc_curve.png b/resources/acc_curve.png similarity index 100% rename from docs/imgs/acc_curve.png rename to resources/acc_curve.png diff --git a/docs/imgs/data_pipeline.png b/resources/data_pipeline.png similarity index 100% rename from docs/imgs/data_pipeline.png rename to resources/data_pipeline.png diff --git a/docs/imgs/mmaction2_logo.png b/resources/mmaction2_logo.png similarity index 100% rename from docs/imgs/mmaction2_logo.png rename to resources/mmaction2_logo.png diff --git a/docs/imgs/mmaction2_overview.gif b/resources/mmaction2_overview.gif similarity index 100% rename from docs/imgs/mmaction2_overview.gif rename to resources/mmaction2_overview.gif diff --git a/docs/imgs/qq_group_qrcode.jpg b/resources/qq_group_qrcode.jpg similarity index 100% rename from docs/imgs/qq_group_qrcode.jpg rename to resources/qq_group_qrcode.jpg diff --git a/docs/imgs/spatio-temporal-det.gif b/resources/spatio-temporal-det.gif similarity index 100% rename from docs/imgs/spatio-temporal-det.gif rename to resources/spatio-temporal-det.gif diff --git a/docs/imgs/zhihu_qrcode.jpg b/resources/zhihu_qrcode.jpg similarity index 100% rename from docs/imgs/zhihu_qrcode.jpg rename to resources/zhihu_qrcode.jpg From f4f6e6ba36d972af3bcbeb9ae9563a919a508906 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Mon, 12 Jul 2021 16:30:43 +0800 Subject: [PATCH 199/414] update resize_videos.py (#1012) --- docs/benchmark.md | 2 +- docs_zh_CN/benchmark.md | 2 +- tools/data/{resize_video.py => resize_videos.py} | 2 +- tools/data/ucf101/README.md | 2 +- tools/data/ucf101/README_zh-CN.md | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) rename tools/data/{resize_video.py => resize_videos.py} (98%) diff --git a/docs/benchmark.md b/docs/benchmark.md index 46a9ab30a8..ff40f1852e 100644 --- a/docs/benchmark.md +++ b/docs/benchmark.md @@ -33,7 +33,7 @@ by the training time per iteration. Here, we use - commit id [f13707f](https://github.com/wzmsltw/BSN-boundary-sensitive-network/tree/f13707fbc362486e93178c39f9c4d398afe2cb2f)(12/12/2018) of BSN(boundary sensitive network) - commit id [45d0514](https://github.com/JJBOY/BMN-Boundary-Matching-Network/tree/45d05146822b85ca672b65f3d030509583d0135a)(17/10/2019) of BMN(boundary matching network) -To ensure the fairness of the comparison, the comparison experiments were conducted under the same hardware environment and using the same dataset. The rawframe dataset we used is generated by the [data preparation tools](/tools/data/kinetics/README.md), the video dataset we used is a special version of resized video cache called '256p dense-encoded video', featuring a faster decoding speed which is generated by the scripts [here](/tools/data/resize_video.py). Significant improvement can be observed when comparing with normal 256p videos as shown in the table below, especially when the sampling is sparse(like [TSN](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py)). +To ensure the fairness of the comparison, the comparison experiments were conducted under the same hardware environment and using the same dataset. The rawframe dataset we used is generated by the [data preparation tools](/tools/data/kinetics/README.md), the video dataset we used is a special version of resized video cache called '256p dense-encoded video', featuring a faster decoding speed which is generated by the scripts [here](/tools/data/resize_videos.py). Significant improvement can be observed when comparing with normal 256p videos as shown in the table below, especially when the sampling is sparse(like [TSN](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py)). For each model setting, we kept the same data preprocessing methods to make sure the same feature input. In addition, we also used Memcached, a distributed cached system, to load the data for the same IO time except for fair comparisons with Pyslowfast which uses raw videos directly from disk by default. diff --git a/docs_zh_CN/benchmark.md b/docs_zh_CN/benchmark.md index 06276fa45b..7d033d1eb9 100644 --- a/docs_zh_CN/benchmark.md +++ b/docs_zh_CN/benchmark.md @@ -34,7 +34,7 @@ 为了公平比较,这里基于相同的硬件环境和数据进行对比实验。 使用的视频帧数据集是通过 [数据准备工具](/tools/data/kinetics/README.md) 生成的, -使用的视频数据集是通过 [该脚本](/tools/data/resize_video.py) 生成的,以快速解码为特点的,"短边 256,密集关键帧编码“的视频数据集。 +使用的视频数据集是通过 [该脚本](/tools/data/resize_videos.py) 生成的,以快速解码为特点的,"短边 256,密集关键帧编码“的视频数据集。 正如以下表格所示,在对比正常的短边 256 视频时,可以观察到速度上的显著提升,尤其是在采样特别稀疏的情况下,如 [TSN](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py)。 ## 主要结果 diff --git a/tools/data/resize_video.py b/tools/data/resize_videos.py similarity index 98% rename from tools/data/resize_video.py rename to tools/data/resize_videos.py index 537cd45e9b..3ab33d469b 100644 --- a/tools/data/resize_video.py +++ b/tools/data/resize_videos.py @@ -69,7 +69,7 @@ def parse_args(): '--ext', type=str, default='mp4', - choices=['avi', 'mp4', 'webm'], + choices=['avi', 'mp4', 'webm', 'mkv'], help='video file extensions') parser.add_argument( '--scale', diff --git a/tools/data/ucf101/README.md b/tools/data/ucf101/README.md index 3e3af8c570..abac25f0c7 100644 --- a/tools/data/ucf101/README.md +++ b/tools/data/ucf101/README.md @@ -36,7 +36,7 @@ bash download_videos.sh For better decoding speed, you can resize the original videos into smaller sized, densely encoded version by: ``` -python ../resize_video.py ../../../data/ucf101/videos/ ../../../data/ucf101/videos_256p_dense_cache --dense --level 2 --ext avi +python ../resize_videos.py ../../../data/ucf101/videos/ ../../../data/ucf101/videos_256p_dense_cache --dense --level 2 --ext avi ``` ## Step 3. Extract RGB and Flow diff --git a/tools/data/ucf101/README_zh-CN.md b/tools/data/ucf101/README_zh-CN.md index 3b6b6e2ba3..96e9453ff4 100644 --- a/tools/data/ucf101/README_zh-CN.md +++ b/tools/data/ucf101/README_zh-CN.md @@ -34,7 +34,7 @@ bash download_videos.sh 用户可使用以下脚本,对原视频进行裁剪,得到密集编码且更小尺寸的视频。 ``` -python ../resize_video.py ../../../data/ucf101/videos/ ../../../data/ucf101/videos_256p_dense_cache --dense --level 2 --ext avi +python ../resize_videos.py ../../../data/ucf101/videos/ ../../../data/ucf101/videos_256p_dense_cache --dense --level 2 --ext avi ``` ## 步骤 3. 抽取视频帧和光流 From cf15d56ca36ef5ad96463503f3935d8bb0b4c726 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 12 Jul 2021 17:28:57 +0800 Subject: [PATCH 200/414] [Fix] Update CI, run on pt1.5, pt1.7, pt1.9 now (#1015) * upgrade CI * fix * fix CI --- .github/workflows/build.yml | 90 +++++++++++++++---- .../test_augmentations/test_crop.py | 20 ----- .../test_augmentations/test_lazy.py | 8 -- 3 files changed, 73 insertions(+), 45 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3cb0283ab7..85d0bc057f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -26,16 +26,14 @@ jobs: strategy: matrix: python-version: [3.7] - torch: [1.5.0, 1.6.0, 1.7.0, 1.8.0] + torch: [1.5.0, 1.7.0, 1.9.0] include: - torch: 1.5.0 torchvision: 0.6.0 - - torch: 1.6.0 - torchvision: 0.7.0 - torch: 1.7.0 torchvision: 0.8.1 - - torch: 1.8.0 - torchvision: 0.9.0 + - torch: 1.9.0 + torchvision: 0.10.0 steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} @@ -74,7 +72,7 @@ jobs: coverage run --branch --source mmaction -m pytest tests/ coverage xml coverage report -m - build_cuda: + build_cu101: runs-on: ubuntu-latest env: @@ -84,24 +82,14 @@ jobs: strategy: matrix: python-version: [3.7] - torch: [1.5.0+cu101, 1.6.0+cu101, 1.7.0+cu101, 1.8.0+cu101] + torch: [1.5.0+cu101, 1.7.0+cu101] include: - torch: 1.5.0+cu101 torch_version: torch1.5.0 torchvision: 0.6.0+cu101 - mmcv: "latest+1.5.0+cu101" - - torch: 1.6.0+cu101 - torch_version: torch1.6.0 - torchvision: 0.7.0+cu101 - mmcv: "latest+1.6.0+cu101" - torch: 1.7.0+cu101 torch_version: torch1.7.0 torchvision: 0.8.1+cu101 - mmcv: "latest+1.7.0+cu101" - - torch: 1.8.0+cu101 - torch_version: torch1.8.0 - torchvision: 0.9.0+cu101 - mmcv: "latest+1.8.0+cu101" steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} @@ -164,3 +152,71 @@ jobs: env_vars: OS,PYTHON name: codecov-umbrella fail_ci_if_error: false + + build_cu102: + runs-on: ubuntu-latest + + env: + CUDA: 10.2.89-1 + CUDA_SHORT: 10.2 + UBUNTU_VERSION: ubuntu1804 + strategy: + matrix: + python-version: [3.7] + torch: [1.9.0+cu102] + include: + - torch: 1.9.0+cu102 + torch_version: torch1.9.0 + torchvision: 0.10.0+cu102 + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Upgrade pip + run: pip install pip --upgrade + - name: Install CUDA + run: | + export INSTALLER=cuda-repo-${UBUNTU_VERSION}_${CUDA}_amd64.deb + wget http://developer.download.nvidia.com/compute/cuda/repos/${UBUNTU_VERSION}/x86_64/${INSTALLER} + sudo dpkg -i ${INSTALLER} + wget https://developer.download.nvidia.com/compute/cuda/repos/${UBUNTU_VERSION}/x86_64/7fa2af80.pub + sudo apt-key add 7fa2af80.pub + sudo apt update -qq + sudo apt install -y cuda-${CUDA_SHORT/./-} cuda-cufft-dev-${CUDA_SHORT/./-} + sudo apt clean + export CUDA_HOME=/usr/local/cuda-${CUDA_SHORT} + export LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${CUDA_HOME}/include:${LD_LIBRARY_PATH} + export PATH=${CUDA_HOME}/bin:${PATH} + sudo apt-get install -y ninja-build + - name: Install Pillow + run: pip install Pillow==6.2.2 + if: ${{matrix.torchvision < 0.5}} + - name: Install TurboJpeg lib + run: sudo apt-get install -y libturbojpeg + - name: Install soundfile lib + run: sudo apt-get install -y libsndfile1 + - name: Install librosa and soundfile + run: pip install librosa soundfile + - name: Install lmdb + run: pip install lmdb + - name: Install PyTorch + run: pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html + - name: Install mmaction dependencies + run: | + pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu102/${{matrix.torch_version}}/index.html + pip install -q git+https://github.com/open-mmlab/mmdetection/ + pip install -q git+https://github.com/open-mmlab/mmclassification/ + pip install -r requirements.txt + python -c 'import mmcv; print(mmcv.__version__)' + - name: Build and install + run: | + rm -rf .eggs + python setup.py check -m -s + TORCH_CUDA_ARCH_LIST=7.0 pip install . + - name: Run unittests and generate coverage report + run: | + coverage run --branch --source mmaction -m pytest tests/ + coverage xml + coverage report -m diff --git a/tests/test_data/test_pipelines/test_augmentations/test_crop.py b/tests/test_data/test_pipelines/test_augmentations/test_crop.py index 4c7c6c9be8..036d10029c 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_crop.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_crop.py @@ -128,10 +128,6 @@ def test_multi_scale_crop(): # input_size must be int or tuple of int MultiScaleCrop('224') - with pytest.raises(TypeError): - # input_size must be int or tuple of int - MultiScaleCrop([224, 224]) - with pytest.raises(TypeError): # scales must be tuple. MultiScaleCrop( @@ -214,10 +210,6 @@ def test_center_crop(): # crop_size must be int or tuple of int CenterCrop('224') - with pytest.raises(TypeError): - # crop_size must be int or tuple of int - CenterCrop([224, 224]) - # center crop with crop_size 224 # add kps in test_center_crop imgs = list(np.random.rand(2, 240, 320, 3)) @@ -251,10 +243,6 @@ def test_three_crop(): # crop_size must be int or tuple of int ThreeCrop('224') - with pytest.raises(TypeError): - # crop_size must be int or tuple of int - ThreeCrop([224, 224]) - # three crop with crop_size 120 imgs = list(np.random.rand(2, 240, 120, 3)) results = dict(imgs=imgs) @@ -290,10 +278,6 @@ def test_ten_crop(): # crop_size must be int or tuple of int TenCrop('224') - with pytest.raises(TypeError): - # crop_size must be int or tuple of int - TenCrop([224, 224]) - # ten crop with crop_size 256 imgs = list(np.random.rand(2, 256, 256, 3)) results = dict(imgs=imgs) @@ -318,10 +302,6 @@ def test_multi_group_crop(): # crop_size must be int or tuple of int MultiGroupCrop('224', 1) - with pytest.raises(TypeError): - # crop_size must be int or tuple of int - MultiGroupCrop([224, 224], 1) - with pytest.raises(TypeError): # groups must be int MultiGroupCrop(224, '1') diff --git a/tests/test_data/test_pipelines/test_augmentations/test_lazy.py b/tests/test_data/test_pipelines/test_augmentations/test_lazy.py index 8031501b3b..74d3d1e296 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_lazy.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_lazy.py @@ -160,10 +160,6 @@ def test_multi_scale_crop_lazy(): # input_size must be int or tuple of int MultiScaleCrop('224', lazy=True) - with pytest.raises(TypeError): - # input_size must be int or tuple of int - MultiScaleCrop([224, 224], lazy=True) - with pytest.raises(TypeError): # scales must be tuple. MultiScaleCrop( @@ -357,10 +353,6 @@ def test_center_crop_lazy(): # crop_size must be int or tuple of int CenterCrop('224') - with pytest.raises(TypeError): - # crop_size must be int or tuple of int - CenterCrop([224, 224]) - # center crop with crop_size 224 imgs = list(np.random.rand(2, 240, 320, 3)) results = dict(imgs=imgs) From b25cca3c5f297ca69afb6c7df507434027e7b8e5 Mon Sep 17 00:00:00 2001 From: MakeCent <42603768+MakeCent@users.noreply.github.com> Date: Tue, 13 Jul 2021 20:34:26 +0800 Subject: [PATCH 201/414] add --to-mp4 argument to resize_videos.py (#1021) * Update resize_videos.py To support resizeing .webm videos * correct error * change back, but add one line * add --to-mp4 argument * Add comment --- tools/data/resize_videos.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tools/data/resize_videos.py b/tools/data/resize_videos.py index 3ab33d469b..82ba912263 100644 --- a/tools/data/resize_videos.py +++ b/tools/data/resize_videos.py @@ -17,6 +17,11 @@ def resize_videos(vid_item): bool: Whether generate video cache successfully. """ full_path, vid_path = vid_item + # Change the output video extension to .mp4 if '--to-mp4' flag is set + if args.to_mp4: + vid_path = vid_path.split('.') + assert len(vid_path) == 2, f"Video path '{vid_path}' contain more than one dot" + vid_path = vid_path[0] + '.mp4' out_full_path = osp.join(args.out_dir, vid_path) dir_name = osp.dirname(vid_path) out_dir = osp.join(args.out_dir, dir_name) @@ -71,6 +76,10 @@ def parse_args(): default='mp4', choices=['avi', 'mp4', 'webm', 'mkv'], help='video file extensions') + parser.add_argument( + '--to-mp4', + action='store_true', + help='whether to output videos in mp4 format') parser.add_argument( '--scale', type=int, From 94317d62ee2f31c04220b599be984505cfa6e023 Mon Sep 17 00:00:00 2001 From: ZHAO Date: Wed, 14 Jul 2021 12:32:08 +0800 Subject: [PATCH 202/414] [Docs] Add Chinese translation of feature_extraction.md (#1020) * Add Chinese translation of feature_extraction.md. * Fix typo * Modify feature_extraction.md * Modify feature_extraction.md --- docs/feature_extraction.md | 2 +- docs_zh_CN/feature_extraction.md | 71 ++++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 docs_zh_CN/feature_extraction.md diff --git a/docs/feature_extraction.md b/docs/feature_extraction.md index 9b28889d8e..919fcc35af 100644 --- a/docs/feature_extraction.md +++ b/docs/feature_extraction.md @@ -2,7 +2,7 @@ We provide easy to use scripts for feature extraction. -## Clip-leval Feature Extraction +## Clip-level Feature Extraction Clip-level feature extraction extract deep feature from a video clip, which usually lasts several to tens of seconds. The extracted feature is an n-dim vector for each clip. When performing multi-view feature extraction, e.g. n clips x m crops, the extracted feature will be the average of the n * m views. diff --git a/docs_zh_CN/feature_extraction.md b/docs_zh_CN/feature_extraction.md new file mode 100644 index 0000000000..d993c9cc9d --- /dev/null +++ b/docs_zh_CN/feature_extraction.md @@ -0,0 +1,71 @@ +# 特征提取 + +MMAction2 为特征提取提供了便捷使用的脚本。 + +## 片段级特征提取 + +片段级特征提取是从长度一般为几秒到几十秒不等的剪辑片段中提取深度特征。从每个片段中提取的特征是一个 n 维向量。当进行多视图特征提取时,例如 n 个片段 × m 种裁剪,提取的特征将会是 n*m 个视图的平均值。 + +在应用片段级特征提取之前,用户需要准备一个视频列表包含所有想要进行特征提取的视频。例如,由 UCF101 中视频组成的视频列表如下: + +``` +ApplyEyeMakeup/v_ApplyEyeMakeup_g01_c01.avi +ApplyEyeMakeup/v_ApplyEyeMakeup_g01_c02.avi +ApplyEyeMakeup/v_ApplyEyeMakeup_g01_c03.avi +ApplyEyeMakeup/v_ApplyEyeMakeup_g01_c04.avi +ApplyEyeMakeup/v_ApplyEyeMakeup_g01_c05.avi +... +YoYo/v_YoYo_g25_c01.avi +YoYo/v_YoYo_g25_c02.avi +YoYo/v_YoYo_g25_c03.avi +YoYo/v_YoYo_g25_c04.avi +YoYo/v_YoYo_g25_c05.avi +``` + +假设 UCF101 中的视频所在目录为 `data/ucf101/videos`,视频列表的文件名为 `ucf101.txt`,使用 TSN(Kinetics-400 预训练)从 UCF101 中提取片段级特征,用户可以使用脚本如下: + +```shell +python tools/misc/clip_feature_extraction.py \ +configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py \ +https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth \ +--video-list ucf101.txt \ +--video-root data/ucf101/videos \ +--out ucf101_feature.pkl +``` + +被提取的特征存储于 `ucf101_feature.pkl`。 + +用户也可以使用分布式片段级特征提取。以下是使用拥有 8 gpus 的计算节点的示例。 + +```shell +bash tools/misc/dist_clip_feature_extraction.sh \ +configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py \ +https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth \ +8 \ +--video-list ucf101.txt \ +--video-root data/ucf101/videos \ +--out ucf101_feature.pkl +``` + +使用 SlowOnly(Kinetics-400 预训练)从 UCF101 中提取片段级特征,用户可以使用脚本如下: + +```shell +python tools/misc/clip_feature_extraction.py \ +configs/recognition/slowonly/slowonly_r50_clip_feature_extraction_4x16x1_rgb.py \ +https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth \ +--video-list ucf101.txt \ +--video-root data/ucf101/videos \ +--out ucf101_feature.pkl +``` + +这两个配置文件展示了用于特征提取的最小配置。用户也可以使用其他存在的配置文件进行特征提取,只要注意使用视频数据进行训练和测试,而不是原始帧数据。 + +```shell +python tools/misc/clip_feature_extraction.py \ +configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py \ +https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth \ +--video-list ucf101.txt \ +--video-root data/ucf101/videos \ +--out ucf101_feature.pkl +``` + From ca581d627cab620bedfcc15915bc3c629a9f2a22 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 14 Jul 2021 15:30:34 +0800 Subject: [PATCH 203/414] [Fix] Fix doc code (#1023) --- docs/getting_started.md | 6 +++--- docs_zh_CN/getting_started.md | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/getting_started.md b/docs/getting_started.md index 6e86e497da..43845b64a7 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -192,7 +192,7 @@ import torch from mmaction.apis import init_recognizer, inference_recognizer -config_file = 'configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py' +config_file = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py' # download the checkpoint from model zoo and put it in `checkpoints/` checkpoint_file = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth' @@ -201,12 +201,12 @@ device = 'cuda:0' # or 'cpu' device = torch.device(device) # build the model from a config file and a checkpoint file -model = init_recognizer(config_file, checkpoint_file, device=device, use_frames=True) +model = init_recognizer(config_file, checkpoint_file, device=device) # test url of a single video and show the result: video = 'https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4' labels = 'demo/label_map_k400.txt' -results = inference_recognizer(model, video, labels, use_frames=True) +results = inference_recognizer(model, video, labels) # show the results print(f'The top-5 labels with corresponding scores are:') diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index 2da9320358..70714d5d67 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -191,7 +191,7 @@ import torch from mmaction.apis import init_recognizer, inference_recognizer -config_file = 'configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py' +config_file = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py' # 从模型库中下载检测点,并把它放到 `checkpoints/` 文件夹下 checkpoint_file = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth' @@ -200,12 +200,12 @@ device = 'cuda:0' # or 'cpu' device = torch.device(device) # 根据配置文件和检查点来建立模型 -model = init_recognizer(config_file, checkpoint_file, device=device, use_frames=True) +model = init_recognizer(config_file, checkpoint_file, device=device) # 测试单个视频的 url 并显示其结果 video = 'https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4' labels = 'demo/label_map_k400.txt' -results = inference_recognizer(model, video, labels, use_frames=True) +results = inference_recognizer(model, video, labels) # 根据配置文件和检查点来建立模型 print(f'The top-5 labels with corresponding scores are:') From ae3504ff1a3dc8eab70ecf8fe6b499bd22a69936 Mon Sep 17 00:00:00 2001 From: irvingzhang0512 Date: Wed, 14 Jul 2021 18:57:32 +0800 Subject: [PATCH 204/414] [Feature] Support Pytorchvideo Transforms (#1008) * First commit for pytorchvideo transforms * typo * fix pipeline import mode * add pytorch version limit * remove ptv in optional.txt and add it in ci * add ptv resize & crop transforms * fix ci * Update build.yml * fix a bug * fix ci * fix ci with == * add resize & crop unittest * add tsm-r50 sthv1 configs/json/log/ckpts * update changelog * update README * remove redundant codes * fix lint * fix lint Co-authored-by: Haodong Duan --- .github/workflows/build.yml | 12 +-- configs/recognition/tsm/README.md | 2 + configs/recognition/tsm/README_zh-CN.md | 25 +++++ .../tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py | 94 +++++++++++++++++++ ...r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py | 94 +++++++++++++++++++ docs/changelog.md | 14 +++ docs_zh_CN/feature_extraction.md | 1 - mmaction/datasets/pipelines/__init__.py | 10 +- mmaction/datasets/pipelines/augmentations.py | 72 ++++++++++++++ mmaction/datasets/pipelines/compose.py | 5 +- .../test_augmentations/test_pytorchvideo.py | 70 ++++++++++++++ tools/data/resize_videos.py | 3 +- 12 files changed, 389 insertions(+), 13 deletions(-) create mode 100644 configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py create mode 100644 configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py create mode 100644 tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 85d0bc057f..358384d21b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,9 +42,6 @@ jobs: python-version: ${{ matrix.python-version }} - name: Upgrade pip run: pip install pip --upgrade - - name: Install Pillow - run: pip install Pillow==6.2.2 - if: ${{matrix.torchvision < 0.5}} - name: Install soundfile lib run: sudo apt-get install -y libsndfile1 - name: Install onnx @@ -65,6 +62,9 @@ jobs: run: pip install git+https://github.com/open-mmlab/mmclassification/ - name: Install unittest dependencies run: pip install -r requirements/tests.txt -r requirements/optional.txt + - name: Install PytorchVideo + run: pip install pytorchvideo + if: ${{matrix.torchvision == '0.10.0'}} - name: Build and install run: rm -rf .eggs && pip install -e . - name: Run unittests and generate coverage report @@ -112,9 +112,6 @@ jobs: export LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${CUDA_HOME}/include:${LD_LIBRARY_PATH} export PATH=${CUDA_HOME}/bin:${PATH} sudo apt-get install -y ninja-build - - name: Install Pillow - run: pip install Pillow==6.2.2 - if: ${{matrix.torchvision < 0.5}} - name: Install TurboJpeg lib run: sudo apt-get install -y libturbojpeg - name: Install soundfile lib @@ -210,6 +207,9 @@ jobs: pip install -q git+https://github.com/open-mmlab/mmclassification/ pip install -r requirements.txt python -c 'import mmcv; print(mmcv.__version__)' + - name: Install PytorchVideo + run: pip install pytorchvideo + if: ${{matrix.torchvision == '0.10.0+cu102'}} - name: Build and install run: | rm -rf .eggs diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 6a53454e3c..06e57e4119 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -60,6 +60,8 @@ |[tsm_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 45.58 / 47.70|75.02 / 76.12|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json)| |[tsm_r50_flip_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.10 / 48.51|76.02 / 77.56|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json)| |[tsm_r50_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.16 / 48.90|76.07 / 77.92|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb_20210324-481268d9.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.json)| +|[tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.65 / 48.66 | 76.67 / 77.41 |[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb-ee93e5e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.json) | +|[tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 46.26 / 47.68 | 75.92 / 76.49 |[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb-4f4f4740.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.json) | |[tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.85 / 50.31|76.78 / 78.18|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb_20210324-76937692.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json)| |[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|47.62 / 49.28|76.63 / 77.82|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20201010-17fa49f6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json)| |[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|45.72 / 48.43|74.67 / 76.72|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json)| diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index 92d59486de..814a8a63c2 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -59,6 +59,10 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tsm_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py) |高 100|8| ResNet50 | ImageNet| 45.58 / 47.70|75.02 / 76.12|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json)| |[tsm_r50_flip_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py) |高 100|8| ResNet50 | ImageNet| 47.10 / 48.51|76.02 / 77.56|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json)| +|[tsm_r50_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py)| 高 100 | 8 | ResNet50 | ImageNet | 47.16 / 48.90 | 76.07 / 77.92 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb_20210324-481268d9.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.65 / 48.66 | 76.67 / 77.41 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb-ee93e5e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 46.26 / 47.68 | 75.92 / 76.49 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb-4f4f4740.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.85 / 50.31 | 76.78 / 78.18 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) |7077|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb_20210324-76937692.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json)| |[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|高 100|8| ResNet50 | ImageNet|47.62 / 49.28|76.63 / 77.82|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20201010-17fa49f6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json)| |[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|高 100|8| ResNet50 | ImageNet|45.72 / 48.43|74.67 / 76.72|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json)| @@ -72,6 +76,13 @@ |[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 256|8| ResNet50| ImageNet |61.06 / 63.19|86.66 / 87.93|[xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json)| |[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |高 240|8| ResNet101 | ImageNet|58.59 / 61.51|85.07 / 86.90|[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9784 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json)| +### Diving48 + +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------- | :------: | :------: | :------: | :---------: | :---------: | :--------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsm_r50_video_1x1x8_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 75.99 | 97.16 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json) | +| [tsm_r50_video_1x1x16_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 81.62 | 97.66 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json) | + ### MixUp & CutMix on Something-Something V1 | 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | top1 准确率变化 (efficient/accurate) | top5 准确率变化 (efficient/accurate) | ckpt | log | json | @@ -85,6 +96,20 @@ | ------------------------------------------------------------ | :----: | :------: | :------: | :------: | :------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [tsm_r50_1x1x8_50e_jester_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 96.5 / 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb-c799267e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json) | +### HMDB51 + +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------- | :------: | :------: | :---------: | :---------: | :---------: | :--------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py) | 8 | ResNet50 | Kinetics400 | 72.68 | 92.03 | 10388 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb_20210630-10c74ee5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log.json) | +| [tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py) | 8 | ResNet50 | Kinetics400 | 74.77 | 93.86 | 10388 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb_20210630-4785548e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log.json) | + +### UCF101 + +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------- | :------: | :------: | :---------: | :---------: | :---------: | :--------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py) | 8 | ResNet50 | Kinetics400 | 94.50 | 99.58 | 10389 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb_20210630-1fae312b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json) | +| [tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py) | 8 | ResNet50 | Kinetics400 | 94.58 | 99.37 | 10389 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb_20210630-8df9c358.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json) | + 注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 diff --git a/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py new file mode 100644 index 0000000000..c4540ee855 --- /dev/null +++ b/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py @@ -0,0 +1,94 @@ +_base_ = [ + '../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=174)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/sthv1/rawframes' +data_root_val = 'data/sthv1/rawframes' +ann_file_train = 'data/sthv1/sthv1_train_list_rawframes.txt' +ann_file_val = 'data/sthv1/sthv1_val_list_rawframes.txt' +ann_file_test = 'data/sthv1/sthv1_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='pytorchvideo.AugMix'), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + filename_tmpl='{:05}.jpg', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=test_pipeline)) +evaluation = dict( + interval=2, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(weight_decay=0.0005) + +# runtime settings +work_dir = './work_dirs/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/' diff --git a/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py new file mode 100644 index 0000000000..75eac07094 --- /dev/null +++ b/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py @@ -0,0 +1,94 @@ +_base_ = [ + '../../_base_/models/tsm_r50.py', '../../_base_/schedules/sgd_tsm_50e.py', + '../../_base_/default_runtime.py' +] + +# model settings +model = dict(cls_head=dict(num_classes=174)) + +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/sthv1/rawframes' +data_root_val = 'data/sthv1/rawframes' +ann_file_train = 'data/sthv1/sthv1_train_list_rawframes.txt' +ann_file_val = 'data/sthv1/sthv1_val_list_rawframes.txt' +ann_file_test = 'data/sthv1/sthv1_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict( + type='MultiScaleCrop', + input_size=224, + scales=(1, 0.875, 0.75, 0.66), + random_crop=False, + max_wh_scale_gap=1, + num_fixed_crops=13), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='pytorchvideo.RandAugment'), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + filename_tmpl='{:05}.jpg', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', + pipeline=test_pipeline)) +evaluation = dict( + interval=2, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict(weight_decay=0.0005) + +# runtime settings +work_dir = './work_dirs/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/' diff --git a/docs/changelog.md b/docs/changelog.md index 53e28d295a..0631f1b6d5 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -2,6 +2,20 @@ ### Master +**Highlights** + +**New Features** + +- Support Pytorchvideo Transforms ([#1008](https://github.com/open-mmlab/mmaction2/pull/1008)) + +**Improvements** + +**Bug and Typo Fixes** + +**ModelZoo** + +- Add TSM-R50 sthv1 models trained by PytorchVideo RandAugment and AugMix ([#1008](https://github.com/open-mmlab/mmaction2/pull/1008)) + ### 0.16.0 (01/07/2021) **Highlights** diff --git a/docs_zh_CN/feature_extraction.md b/docs_zh_CN/feature_extraction.md index d993c9cc9d..8eea87df24 100644 --- a/docs_zh_CN/feature_extraction.md +++ b/docs_zh_CN/feature_extraction.md @@ -68,4 +68,3 @@ https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_ --video-root data/ucf101/videos \ --out ucf101_feature.pkl ``` - diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index aca8d9de8d..72fe6afeca 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -1,8 +1,9 @@ from .augmentations import (AudioAmplify, CenterCrop, ColorJitter, Flip, Fuse, Imgaug, MelSpectrogram, MultiGroupCrop, - MultiScaleCrop, Normalize, RandomCrop, - RandomRescale, RandomResizedCrop, RandomScale, - Resize, TenCrop, ThreeCrop, TorchvisionTrans) + MultiScaleCrop, Normalize, PytorchVideoTrans, + RandomCrop, RandomRescale, RandomResizedCrop, + RandomScale, Resize, TenCrop, ThreeCrop, + TorchvisionTrans) from .compose import Compose from .formating import (Collect, FormatAudioShape, FormatShape, ImageToTensor, Rename, ToDataContainer, ToTensor, Transpose) @@ -33,5 +34,6 @@ 'AudioDecodeInit', 'RandomScale', 'ImageDecode', 'BuildPseudoClip', 'RandomRescale', 'PyAVDecodeMotionVector', 'Rename', 'Imgaug', 'UniformSampleFrames', 'PoseDecode', 'LoadKineticsPose', - 'GeneratePoseTarget', 'PIMSInit', 'PIMSDecode', 'TorchvisionTrans' + 'GeneratePoseTarget', 'PIMSInit', 'PIMSDecode', 'TorchvisionTrans', + 'PytorchVideoTrans' ] diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 64cef00f62..6fe020fd2b 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -90,6 +90,78 @@ def __call__(self, results): return results +@PIPELINES.register_module() +class PytorchVideoTrans: + """PytorchVideoTrans Augmentations, under pytorchvideo.transforms. + + Args: + type (str): The name of the pytorchvideo transformation. + """ + + def __init__(self, type, **kwargs): + try: + import torch + import pytorchvideo.transforms as ptv_trans + except ImportError: + raise RuntimeError('Install pytorchvideo to use PytorchVideoTrans') + if LooseVersion(torch.__version__) < LooseVersion('1.8.0'): + raise RuntimeError( + 'The version of PyTorch should be at least 1.8.0') + + trans = getattr(ptv_trans, type, None) + assert trans, f'Transform {type} not in pytorchvideo' + + supported_pytorchvideo_trans = ('AugMix', 'RandAugment', + 'RandomResizedCrop', 'ShortSideScale', + 'RandomShortSideScale') + assert type in supported_pytorchvideo_trans,\ + f'PytorchVideo Transform {type} is not supported in MMAction2' + + self.trans = trans(**kwargs) + self.type = type + + def __call__(self, results): + assert 'imgs' in results + + assert 'gt_bboxes' not in results,\ + f'PytorchVideo {self.type} doesn\'t support bboxes yet.' + assert 'proposals' not in results,\ + f'PytorchVideo {self.type} doesn\'t support bboxes yet.' + + if self.type in ('AugMix', 'RandAugment'): + # list[ndarray(h, w, 3)] -> torch.tensor(t, c, h, w) + imgs = [x.transpose(2, 0, 1) for x in results['imgs']] + imgs = to_tensor(np.stack(imgs)) + else: + # list[ndarray(h, w, 3)] -> torch.tensor(c, t, h, w) + # uint8 -> float32 + imgs = to_tensor((np.stack(results['imgs']).transpose(3, 0, 1, 2) / + 255.).astype(np.float32)) + + imgs = self.trans(imgs).data.numpy() + + if self.type in ('AugMix', 'RandAugment'): + imgs[imgs > 255] = 255 + imgs[imgs < 0] = 0 + imgs = imgs.astype(np.uint8) + + # torch.tensor(t, c, h, w) -> list[ndarray(h, w, 3)] + imgs = [x.transpose(1, 2, 0) for x in imgs] + else: + # float32 -> uint8 + imgs = imgs * 255 + imgs[imgs > 255] = 255 + imgs[imgs < 0] = 0 + imgs = imgs.astype(np.uint8) + + # torch.tensor(c, t, h, w) -> list[ndarray(h, w, 3)] + imgs = [x for x in imgs.transpose(1, 2, 3, 0)] + + results['imgs'] = imgs + + return results + + @PIPELINES.register_module() class PoseCompact: """Convert the coordinates of keypoints to make it more compact. diff --git a/mmaction/datasets/pipelines/compose.py b/mmaction/datasets/pipelines/compose.py index 2f0dd7b817..eb5a79bd11 100644 --- a/mmaction/datasets/pipelines/compose.py +++ b/mmaction/datasets/pipelines/compose.py @@ -3,7 +3,7 @@ from mmcv.utils import build_from_cfg from ..builder import PIPELINES -from .augmentations import TorchvisionTrans +from .augmentations import PytorchVideoTrans, TorchvisionTrans @PIPELINES.register_module() @@ -23,6 +23,9 @@ def __init__(self, transforms): if transform['type'].startswith('torchvision.'): trans_type = transform.pop('type')[12:] transform = TorchvisionTrans(trans_type, **transform) + elif transform['type'].startswith('pytorchvideo.'): + trans_type = transform.pop('type')[13:] + transform = PytorchVideoTrans(trans_type, **transform) else: transform = build_from_cfg(transform, PIPELINES) self.transforms.append(transform) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py b/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py new file mode 100644 index 0000000000..df1f6a9fb2 --- /dev/null +++ b/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py @@ -0,0 +1,70 @@ +import numpy as np +import pytest +from mmcv.utils import assert_dict_has_keys + +try: + import torch + from distutils.version import LooseVersion + from mmaction.datasets.pipelines import PytorchVideoTrans + pytorchvideo_ok = False + if LooseVersion(torch.__version__) >= LooseVersion('1.8.0'): + pytorchvideo_ok = True +except (ImportError, ModuleNotFoundError): + pytorchvideo_ok = False + + +@pytest.mark.skipif(not pytorchvideo_ok, reason='torch >= 1.8.0 is required') +class TestPytorchVideoTrans: + + @staticmethod + def test_pytorchvideo_trans(): + with pytest.raises(AssertionError): + # transforms not supported in pytorchvideo + PytorchVideoTrans(type='BlaBla') + + with pytest.raises(AssertionError): + # This trans exists in pytorchvideo but not supported in MMAction2 + PytorchVideoTrans(type='MixUp') + + target_keys = ['imgs'] + + imgs = list(np.random.randint(0, 256, (4, 32, 32, 3)).astype(np.uint8)) + results = dict(imgs=imgs) + + # test AugMix + augmix = PytorchVideoTrans(type='AugMix') + results = augmix(results) + assert assert_dict_has_keys(results, target_keys) + assert (all(img.shape == (32, 32, 3) for img in results['imgs'])) + + # test RandAugment + rand_augment = PytorchVideoTrans(type='RandAugment') + results = rand_augment(results) + assert assert_dict_has_keys(results, target_keys) + assert (all(img.shape == (32, 32, 3) for img in results['imgs'])) + + # test RandomResizedCrop + random_resized_crop = PytorchVideoTrans( + type='RandomResizedCrop', + target_height=16, + target_width=16, + scale=(0.1, 1.), + aspect_ratio=(0.8, 1.2)) + results = random_resized_crop(results) + assert assert_dict_has_keys(results, target_keys) + assert (all(img.shape == (16, 16, 3) for img in results['imgs'])) + + # test ShortSideScale + short_side_scale = PytorchVideoTrans(type='ShortSideScale', size=24) + results = short_side_scale(results) + assert assert_dict_has_keys(results, target_keys) + assert (all(img.shape == (24, 24, 3) for img in results['imgs'])) + + # test ShortSideScale + random_short_side_scale = PytorchVideoTrans( + type='RandomShortSideScale', min_size=24, max_size=36) + results = random_short_side_scale(results) + target_shape = results['imgs'][0].shape + assert 36 >= target_shape[0] >= 24 + assert assert_dict_has_keys(results, target_keys) + assert (all(img.shape == target_shape for img in results['imgs'])) diff --git a/tools/data/resize_videos.py b/tools/data/resize_videos.py index 82ba912263..1986cda7b5 100644 --- a/tools/data/resize_videos.py +++ b/tools/data/resize_videos.py @@ -20,7 +20,8 @@ def resize_videos(vid_item): # Change the output video extension to .mp4 if '--to-mp4' flag is set if args.to_mp4: vid_path = vid_path.split('.') - assert len(vid_path) == 2, f"Video path '{vid_path}' contain more than one dot" + assert len(vid_path) == 2, \ + f"Video path '{vid_path}' contain more than one dot" vid_path = vid_path[0] + '.mp4' out_full_path = osp.join(args.out_dir, vid_path) dir_name = osp.dirname(vid_path) From 93a4c7cc19b3e4fe7e04a89bebfbad799de80e2e Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Thu, 15 Jul 2021 13:48:30 +0800 Subject: [PATCH 205/414] [docs] fix tanet config table (#1028) * fix typo * update zhcn --- configs/recognition/tanet/README.md | 2 +- configs/recognition/tanet/README_zh-CN.md | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 52a2d4e38c..52be55ed6a 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -24,7 +24,7 @@ ### Something-Something V1 |config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|47.45/49.69|76.00/77.62|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json)| |[tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|47.73/50.41|77.31/78.47|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20210630-7c19303c.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log.json)| diff --git a/configs/recognition/tanet/README_zh-CN.md b/configs/recognition/tanet/README_zh-CN.md index 953fae69dc..a92230c0fc 100644 --- a/configs/recognition/tanet/README_zh-CN.md +++ b/configs/recognition/tanet/README_zh-CN.md @@ -21,6 +21,13 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tanet_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py) |短边 320|8| TANet | ImageNet |76.28 | 92.60 |[76.22](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh)|[92.53](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh) | x | 7124 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log)| [json](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json)| +### Something-Something V1 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | GPU 显存占用 (M)| ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py)|高 100|8|TANet|ImageNet|47.45/49.69|76.00/77.62|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json)| +|[tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py)|高 100|8|TANet|ImageNet|47.73/50.41|77.31/78.47|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20210630-7c19303c.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log.json)| + 注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 From 93fe92ac28c703209f347d162060b07b256b1f68 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 16 Jul 2021 18:14:00 +0800 Subject: [PATCH 206/414] update slowonly sthv1 ckpt (#1034) --- configs/recognition/slowonly/README.md | 2 +- configs/recognition/slowonly/README_zh-CN.md | 22 +++++++++++++++++++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index 3852a04a46..292bc42581 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -95,7 +95,7 @@ In data benchmark, we compare two different data preprocessing methods: (1) Resi |config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb_20210630-ee8c850f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json)| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20210630-807a9a9a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log.json)| Notes: diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md index 33a645bc94..726a2cabe4 100644 --- a/configs/recognition/slowonly/README_zh-CN.md +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -68,7 +68,7 @@ | 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | 类别平均准确率 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 79.3 | 70.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111-a9c34b54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json) | -| [slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow.py) | 短边 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | +| [slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py) | 短边 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | | 1: 1 融合 | | | | | 83.7 | 74.8 | | | | ### Jester @@ -77,6 +77,26 @@ | :----------------------------------------------------------- | :----: | :------: | :------: | :------: | :---------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb-b56a5389.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.json) | +### HMDB51 + +|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py)|8|ResNet50|ImageNet|37.52|71.50|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb_20210630-16faeb6a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log.json)| +|[slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py)|8|ResNet50|Kinetics400|65.95|91.05|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb_20210630-cee5f725.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log.json)| + +### UCF101 + +|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py)|8|ResNet50|ImageNet|71.35|89.35|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb_20210630-181e1661.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log.json)| +|[slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py)|8|ResNet50|Kinetics400|92.78|99.42|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb_20210630-ee8c850f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json)| + +### Something-Something V1 + +|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20210630-807a9a9a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log.json)| + 注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 From a4c8868a37773273ec8346e430a2d2913727fa00 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Mon, 19 Jul 2021 03:00:15 +0800 Subject: [PATCH 207/414] Update data_preparation.md --- docs/data_preparation.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/data_preparation.md b/docs/data_preparation.md index 1a7f736720..e864cd3357 100644 --- a/docs/data_preparation.md +++ b/docs/data_preparation.md @@ -32,9 +32,9 @@ Similar to the datasets stated above, it is recommended organizing in `$MMACTION Please refer to the official website and/or the official script to prepare the videos. Note that the videos should be arranged in either -(1). A two-level directory organized by `${CLASS_NAME}/${VIDEO_ID}`, which is recommended to be used for for action recognition datasets (such as UCF101 and Kinetics) +(1). A two-level directory organized by `${CLASS_NAME}/${VIDEO_ID}`, which is recommended to be used for action recognition datasets (such as UCF101 and Kinetics) -(2). A single-level directory, which is recommended to be used for for action detection datasets or those with multiple annotations per video (such as THUMOS14). +(2). A single-level directory, which is recommended to be used for action detection datasets or those with multiple annotations per video (such as THUMOS14). ### Extract frames From b8115d918330ccfa451d4aa0ca61877b4dcd806a Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 22 Jul 2021 08:05:54 +0800 Subject: [PATCH 208/414] [Improvement] Update CI (#1046) * update ci * Update build.yml * Update build.yml --- .github/workflows/build.yml | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 358384d21b..ebd18a81fc 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -130,10 +130,7 @@ jobs: pip install -r requirements.txt python -c 'import mmcv; print(mmcv.__version__)' - name: Build and install - run: | - rm -rf .eggs - python setup.py check -m -s - TORCH_CUDA_ARCH_LIST=7.0 pip install . + run: rm -rf .eggs && pip install -e . - name: Run unittests and generate coverage report run: | coverage run --branch --source mmaction -m pytest tests/ @@ -187,9 +184,6 @@ jobs: export LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${CUDA_HOME}/include:${LD_LIBRARY_PATH} export PATH=${CUDA_HOME}/bin:${PATH} sudo apt-get install -y ninja-build - - name: Install Pillow - run: pip install Pillow==6.2.2 - if: ${{matrix.torchvision < 0.5}} - name: Install TurboJpeg lib run: sudo apt-get install -y libturbojpeg - name: Install soundfile lib @@ -211,10 +205,7 @@ jobs: run: pip install pytorchvideo if: ${{matrix.torchvision == '0.10.0+cu102'}} - name: Build and install - run: | - rm -rf .eggs - python setup.py check -m -s - TORCH_CUDA_ARCH_LIST=7.0 pip install . + run: rm -rf .eggs && pip install -e . - name: Run unittests and generate coverage report run: | coverage run --branch --source mmaction -m pytest tests/ From 76db1813f01d157aaf12f8b034358b305052d522 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Thu, 22 Jul 2021 18:05:35 +0800 Subject: [PATCH 209/414] [Fix] fix bugs in persistent_workers (#1044) * master * master 0721 * num_works 0721 * torch version 1.8.0 --- mmaction/apis/train.py | 2 +- mmaction/datasets/builder.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index 3254aa5452..4c1b62d988 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -127,7 +127,7 @@ def train_model(model, precise_bn_dataset = build_dataset(cfg.data.train) dataloader_setting = dict( videos_per_gpu=cfg.data.get('videos_per_gpu', 1), - workers_per_gpu=0, # save memory and time + workers_per_gpu=1, # save memory and time num_gpus=len(cfg.gpu_ids), dist=distributed, seed=cfg.seed) diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index 20d7bf728f..6cd2a053e4 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -111,7 +111,7 @@ def build_dataloader(dataset, worker_init_fn, num_workers=num_workers, rank=rank, seed=seed) if seed is not None else None - if LooseVersion(torch.__version__) >= LooseVersion('1.7.0'): + if LooseVersion(torch.__version__) >= LooseVersion('1.8.0'): kwargs['persistent_workers'] = persistent_workers data_loader = DataLoader( From a00f46921521473e189136941a96459051d6460f Mon Sep 17 00:00:00 2001 From: Wang Xiao <31362395+SCZwangxiao@users.noreply.github.com> Date: Fri, 23 Jul 2021 14:00:20 +0800 Subject: [PATCH 210/414] [Fix] Support Timesformer feature extraction (#1035) --- mmaction/models/recognizers/recognizer3d.py | 32 +++++++++++++-------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/mmaction/models/recognizers/recognizer3d.py b/mmaction/models/recognizers/recognizer3d.py index 35eafaaf47..27f25fd715 100644 --- a/mmaction/models/recognizers/recognizer3d.py +++ b/mmaction/models/recognizers/recognizer3d.py @@ -64,18 +64,26 @@ def _do_test(self, imgs): feat, _ = self.neck(feat) if self.feature_extraction: - # perform spatio-temporal pooling - avg_pool = nn.AdaptiveAvgPool3d(1) - if isinstance(feat, tuple): - feat = [avg_pool(x) for x in feat] - # concat them - feat = torch.cat(feat, axis=1) - else: - feat = avg_pool(feat) - # squeeze dimensions - feat = feat.reshape((batches, num_segs, -1)) - # temporal average pooling - feat = feat.mean(axis=1) + feat_dim = len(feat[0].size()) if isinstance(feat, tuple) else len( + feat.size()) + assert feat_dim in [ + 5, 2 + ], ('Got feature of unknown architecture, ' + 'only 3D-CNN-like ([N, in_channels, T, H, W]), and ' + 'transformer-like ([N, in_channels]) features are supported.') + if feat_dim == 5: # 3D-CNN architecture + # perform spatio-temporal pooling + avg_pool = nn.AdaptiveAvgPool3d(1) + if isinstance(feat, tuple): + feat = [avg_pool(x) for x in feat] + # concat them + feat = torch.cat(feat, axis=1) + else: + feat = avg_pool(feat) + # squeeze dimensions + feat = feat.reshape((batches, num_segs, -1)) + # temporal average pooling + feat = feat.mean(axis=1) return feat # should have cls_head if not extracting features From 7fc0a75b8e045a657061e333953b079c2af50333 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Fri, 23 Jul 2021 14:02:19 +0800 Subject: [PATCH 211/414] [Feature] Validate that Precise-BN works (#1038) * precise bn 0719 * test * test * add docs precisebn 0722 * fix typos precisebn 0722 * Update slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py * Update slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py * precisebn 0722 Co-authored-by: Haodong Duan --- configs/recognition/slowfast/README.md | 4 +- configs/recognition/slowfast/README_zh-CN.md | 4 +- ...t_prebn_r50_4x16x1_256e_kinetics400_rgb.py | 96 +++++++++++++++++++ 3 files changed, 102 insertions(+), 2 deletions(-) create mode 100644 configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 126d68ed44..28f274275f 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -22,7 +22,9 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50|None |74.75|91.73|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json)| |[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8| ResNet50|None |74.34|91.58|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| -|[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50|None |75.64|92.3|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20200704-bcde7ed7.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log.json)| +|[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| +|[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/ +slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50 |None |75.61|92.34|x|9062|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index 452ecc8ec2..e2c8d4e014 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -22,7 +22,9 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet50|None |74.75|91.73|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json)| |[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |短边256|8| ResNet50|None |74.34|91.58|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| -|[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x3| ResNet50|None |75.64|92.3|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20200704-bcde7ed7.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log.json)| +|[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| +|[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/ +slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet50 |None |75.61|92.34|x|9062|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| diff --git a/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py new file mode 100644 index 0000000000..b407bc15dd --- /dev/null +++ b/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py @@ -0,0 +1,96 @@ +_base_ = [ + '../../_base_/models/slowfast_r50.py', '../../_base_/default_runtime.py' +] + +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +evaluation = dict( + interval=5, metrics=['top_k_accuracy', 'mean_class_accuracy']) + +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, + weight_decay=0.0001) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict( + policy='CosineAnnealing', + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=34) +total_epochs = 256 + +# precise bn +precise_bn = dict(num_iters=200, interval=1) + +# runtime settings +checkpoint_config = dict(interval=4) +work_dir = './work_dirs/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb' +find_unused_parameters = False From 3ed839a4e593d0831adf926c7480308640ce20f6 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 23 Jul 2021 21:13:20 +0800 Subject: [PATCH 212/414] [Improvement] Add options to keep tail frames (#1050) --- mmaction/datasets/pipelines/loading.py | 59 ++++++++++--------- .../test_loadings/test_sampling.py | 9 +++ 2 files changed, 41 insertions(+), 27 deletions(-) diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index 79a6656c9c..7365eb7054 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -99,6 +99,8 @@ class SampleFrames: start_index (None): This argument is deprecated and moved to dataset class (``BaseDataset``, ``VideoDatset``, ``RawframeDataset``, etc), see this: https://github.com/open-mmlab/mmaction2/pull/89. + keep_tail_frames (bool): Whether to keep tail frames when sampling. + Default: False. """ def __init__(self, @@ -109,7 +111,8 @@ def __init__(self, twice_sample=False, out_of_bound_opt='loop', test_mode=False, - start_index=None): + start_index=None, + keep_tail_frames=False): self.clip_len = clip_len self.frame_interval = frame_interval @@ -118,6 +121,7 @@ def __init__(self, self.twice_sample = twice_sample self.out_of_bound_opt = out_of_bound_opt self.test_mode = test_mode + self.keep_tail_frames = keep_tail_frames assert self.out_of_bound_opt in ['loop', 'repeat_last'] if start_index is not None: @@ -140,21 +144,32 @@ def _get_train_clips(self, num_frames): np.ndarray: Sampled frame indices in train mode. """ ori_clip_len = self.clip_len * self.frame_interval - avg_interval = (num_frames - ori_clip_len + 1) // self.num_clips - if avg_interval > 0: - base_offsets = np.arange(self.num_clips) * avg_interval - clip_offsets = base_offsets + np.random.randint( - avg_interval, size=self.num_clips) - elif num_frames > max(self.num_clips, ori_clip_len): - clip_offsets = np.sort( - np.random.randint( - num_frames - ori_clip_len + 1, size=self.num_clips)) - elif avg_interval == 0: - ratio = (num_frames - ori_clip_len + 1.0) / self.num_clips - clip_offsets = np.around(np.arange(self.num_clips) * ratio) + if self.keep_tail_frames: + avg_interval = (num_frames - ori_clip_len + 1) / float( + self.num_clips) + if num_frames > ori_clip_len - 1: + base_offsets = np.arange(self.num_clips) * avg_interval + clip_offsets = (base_offsets + np.random.uniform( + 0, avg_interval, self.num_clips)).astype(np.int) + else: + clip_offsets = np.zeros((self.num_clips, ), dtype=np.int) else: - clip_offsets = np.zeros((self.num_clips, ), dtype=np.int) + avg_interval = (num_frames - ori_clip_len + 1) // self.num_clips + + if avg_interval > 0: + base_offsets = np.arange(self.num_clips) * avg_interval + clip_offsets = base_offsets + np.random.randint( + avg_interval, size=self.num_clips) + elif num_frames > max(self.num_clips, ori_clip_len): + clip_offsets = np.sort( + np.random.randint( + num_frames - ori_clip_len + 1, size=self.num_clips)) + elif avg_interval == 0: + ratio = (num_frames - ori_clip_len + 1.0) / self.num_clips + clip_offsets = np.around(np.arange(self.num_clips) * ratio) + else: + clip_offsets = np.zeros((self.num_clips, ), dtype=np.int) return clip_offsets @@ -333,21 +348,11 @@ class DenseSampleFrames(SampleFrames): """ def __init__(self, - clip_len, - frame_interval=1, - num_clips=1, + *args, sample_range=64, num_sample_positions=10, - temporal_jitter=False, - out_of_bound_opt='loop', - test_mode=False): - super().__init__( - clip_len, - frame_interval, - num_clips, - temporal_jitter, - out_of_bound_opt=out_of_bound_opt, - test_mode=test_mode) + **kwargs): + super().__init__(*args, **kwargs) self.sample_range = sample_range self.num_sample_positions = num_sample_positions diff --git a/tests/test_data/test_pipelines/test_loadings/test_sampling.py b/tests/test_data/test_pipelines/test_loadings/test_sampling.py index 4e47424f74..2cd7a60116 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_sampling.py +++ b/tests/test_data/test_pipelines/test_loadings/test_sampling.py @@ -26,6 +26,15 @@ def test_sample_frames(self): clip_len=3, frame_interval=1, num_clips=5, start_index=1) SampleFrames(**config) + # Sample Frame with tail Frames + video_result = copy.deepcopy(self.video_results) + frame_result = copy.deepcopy(self.frame_results) + config = dict( + clip_len=3, frame_interval=1, num_clips=5, keep_tail_frames=True) + sample_frames = SampleFrames(**config) + sample_frames(video_result) + sample_frames(frame_result) + # Sample Frame with no temporal_jitter # clip_len=3, frame_interval=1, num_clips=5 video_result = copy.deepcopy(self.video_results) From 7eedb093bc28cc8a41e0aa6d2b3875315711c284 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 24 Jul 2021 17:26:57 +0800 Subject: [PATCH 213/414] Update README.md --- configs/recognition/slowfast/README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 28f274275f..5f07c2cc31 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -23,8 +23,7 @@ |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50|None |74.75|91.73|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json)| |[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8| ResNet50|None |74.34|91.58|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| -|[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/ -slowfast_prebn_r50_4x16x1_20210722.log.json)| +|[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50 |None |75.61|92.34|x|9062|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| From eb25d73db361e040619dd989f87ecf30282c1651 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 24 Jul 2021 17:27:27 +0800 Subject: [PATCH 214/414] Update README_zh-CN.md --- configs/recognition/slowfast/README_zh-CN.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index e2c8d4e014..64be756818 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -23,8 +23,7 @@ |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet50|None |74.75|91.73|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json)| |[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |短边256|8| ResNet50|None |74.34|91.58|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| -|[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/ -slowfast_prebn_r50_4x16x1_20210722.log.json)| +|[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet50 |None |75.61|92.34|x|9062|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| From f895e06d04016c90462ffaa61091fda6c86d2bcf Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sun, 25 Jul 2021 21:46:22 +0800 Subject: [PATCH 215/414] [Document] Add description of PoseC3D dataset. (#1053) --- docs/supported_datasets.md | 3 +++ docs_zh_CN/supported_datasets.md | 3 +++ tools/data/skeleton/README.md | 12 ++++++++++++ 3 files changed, 18 insertions(+) diff --git a/docs/supported_datasets.md b/docs/supported_datasets.md index f66fb3d8e2..1bacadaa16 100644 --- a/docs/supported_datasets.md +++ b/docs/supported_datasets.md @@ -22,6 +22,9 @@ - [UCF101-24](/tools/data/ucf101_24/README.md) \[ [Homepage](http://www.thumos.info/download.html) \] - [JHMDB](/tools/data/jhmdb/README.md) \[ [Homepage](http://jhmdb.is.tue.mpg.de/) \] +- Skeleton-based Action Recognition + - [PoseC3D Skeleton Dataset](/tools/data/skeleton/README.md) \[ [Homepage](https://kennymckormick.github.io/posec3d/) \] + The supported datasets are listed above. We provide shell scripts for data preparation under the path `$MMACTION2/tools/data/`. Below is the detailed tutorials of data deployment for each dataset. diff --git a/docs_zh_CN/supported_datasets.md b/docs_zh_CN/supported_datasets.md index feb639d3d2..b0c9b554f6 100644 --- a/docs_zh_CN/supported_datasets.md +++ b/docs_zh_CN/supported_datasets.md @@ -22,6 +22,9 @@ - [UCF101-24](/tools/data/ucf101_24/README_zh-CN.md) \[ [主页](http://www.thumos.info/download.html) \] - [JHMDB](/tools/data/jhmdb/README_zh-CN.md) \[ [主页](http://jhmdb.is.tue.mpg.de/) \] +- 基于人体骨架的动作识别数据集: + - [PoseC3D Skeleton Dataset](/tools/data/skeleton/README.md) \[ [主页](https://kennymckormick.github.io/posec3d/) \] + MMAction2 目前支持的数据集如上所列。 MMAction2 在 `$MMACTION2/tools/data/` 路径下提供数据集准备脚本。 每个数据集的详细准备教程也在 [Readthedocs](https://mmaction2.readthedocs.io/zh_CN/latest/supported_datasets.html) 中给出。 diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index 1ecf2796b1..12cf15cd2d 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -27,6 +27,18 @@ bash download_annotations.sh ${DATASET} PS: Due to [Conditions of Use](http://rose1.ntu.edu.sg/Datasets/actionRecognition.asp) of the NTURGB-D dataset, we can not directly release the annotations used in our experiments. We will prepare a script for pose annotation generation ASAP. Once accomplished, you can use this script to generate all pose annotations used in our experiments. +## The Format of PoseC3D Annotations + +Here we briefly introduce the format of PoseC3D Annotations, we will take `gym_train.pkl` as an example: the content of `gym_train.pkl` is a list of length 20484, each item is a dictionary that is the skeleton annotation of one video. Each dictionary has following fields: + +- kp: The keypoint coordinates, which is a numpy array of the shape N (#person) x T (temporal length) x K (#keypoints, 17 in our case) x 2 (x, y coordinate). +- kpscore: The keypoint confidence scores, which is a numpy array of the shape N (#person) x T (temporal length) x K (#keypoints, 17 in our case). +- frame_dir: The corresponding video name. +- label: The action category. +- img_shape: The image shape of each frame. +- original_shape: Same as above. +- total_frames: The temporal length of the video. + ## Visualization For skeleton data visualization, you need also to prepare the RGB videos. Please refer to [visualize_heatmap_volume](/demo/visualize_heatmap_volume.ipynb) for detailed process. Here we provide some visualization examples from NTU-60 and FineGYM. From b39eb46262d564be5c83945b2da3c13025e9d493 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Mon, 26 Jul 2021 13:46:09 +0800 Subject: [PATCH 216/414] [ModelZoo] Add csn r50 from scratch ckpt (#1045) * add csn r50 from scratch ckpt * Update README.md * add cn * update metafile --- configs/recognition/csn/README.md | 1 + configs/recognition/csn/README_zh-CN.md | 12 ++++++++++-- configs/recognition/csn/metafile.yml | 20 ++++++++++++++++++++ 3 files changed, 31 insertions(+), 2 deletions(-) diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index a347de5f4b..acae7e42fb 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -33,6 +33,7 @@ doi = {10.1109/ICCV.2019.00565} |config | resolution | gpus | backbone |pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py)|short-side 320|x| ResNet50 | None | 73.6 | 91.3 | x | x | [ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb_20210618-4e29e2e8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log) | [json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log.json) | |[ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet50 | IG65M | 79.0 | 94.2 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth) | x | x | |[ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | None | 76.5 | 92.1 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth) | x | x | |[ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | Sports1M | 78.2 | 93.0 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth) | x | x | diff --git a/configs/recognition/csn/README_zh-CN.md b/configs/recognition/csn/README_zh-CN.md index 9a4fda15ed..06a28cd5c5 100644 --- a/configs/recognition/csn/README_zh-CN.md +++ b/configs/recognition/csn/README_zh-CN.md @@ -33,8 +33,15 @@ doi = {10.1109/ICCV.2019.00565} |配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py)|短边 320|x| ResNet50 | None | 73.6 | 91.3 | x | x | [ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb_20210618-4e29e2e8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log) | [json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log.json) | +|[ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py)|短边 320|x| ResNet50 | IG65M | 79.0 | 94.2 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth) | x | x | +|[ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py)|短边 320|x| ResNet152 | None | 76.5 | 92.1 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth) | x | x | +|[ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|x| ResNet152 | Sports1M | 78.2 | 93.0 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth) | x | x | +|[ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|8x4| ResNet152 | IG65M|82.76/82.6|95.68/95.3|x|8516|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth)/[infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-e63ee1bd.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json)| +|[ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py)|短边 320|x| ResNet152 | None | 77.8 | 92.8 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-d565828d.pth) | x | x | +|[ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|x| ResNet152 | Sports1M | 78.8 | 93.5 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth) | x | x | +|[ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|x| ResNet152 | IG65M | 82.5 | 95.3 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth) | x | x | |[ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|8x4| ResNet152 | IG65M|80.14|94.93|x|8517|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json)| -|[ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|8x4| ResNet152 | IG65M|82.76|95.68|x|8516|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json)| 注: @@ -43,7 +50,8 @@ doi = {10.1109/ICCV.2019.00565} 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 -3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 +3. 这里使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 +4. 这里的 **infer_ckpt** 表示该模型权重文件是从 [VMZ](https://github.com/facebookresearch/VMZ) 导入的。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml index 9cecd88694..10edad13c2 100644 --- a/configs/recognition/csn/metafile.yml +++ b/configs/recognition/csn/metafile.yml @@ -167,3 +167,23 @@ Models: Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth inference_time(video/s): x +- Config: configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py + In Collection: CSN + Metadata: + Architecture: ResNet50 + Epochs: 58 + FLOPs: 56209211392 + Parameters: 13131152 + Pretrained: None + Resolution: short-side 320 + Training Data: Kinetics-400 + Modality: RGB + Name: ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py + Results: + - Dataset: Kinetics-400 + Metrics: + Top 1 Accuracy: 73.6 + top5 accuracy: 91.3 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb_20210618-4e29e2e8.pth + inference_time(video/s): x From 9ab8c2af52c561e5c789ccaf7b62f4b7679c103c Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 28 Jul 2021 01:15:36 +0800 Subject: [PATCH 217/414] Update builder.py --- mmaction/datasets/builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index 6cd2a053e4..bbdf1bc7ef 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -75,7 +75,7 @@ def build_dataloader(dataset, persistent_workers (bool): If True, the data loader will not shutdown the worker processes after a dataset has been consumed once. This allows to maintain the workers Dataset instances alive. - The argument also has effect in PyTorch>=1.7.0. + The argument also has effect in PyTorch>=1.8.0. Default: True kwargs (dict, optional): Any keyword argument to be used to initialize DataLoader. From 15b1588f6aa603d9bd4d3104094d704cfe518ef8 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 28 Jul 2021 19:45:20 +0800 Subject: [PATCH 218/414] [Fix] Support mim (#1061) --- .gitignore | 1 + MANIFEST.in | 6 +++--- README.md | 1 + README_zh-CN.md | 1 + setup.py | 54 +++++++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 60 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 722a50e523..4eba3d0d8e 100644 --- a/.gitignore +++ b/.gitignore @@ -132,3 +132,4 @@ work_dirs/ # avoid soft links created by MIM mmaction/configs/* mmaction/tools/* +mmaction/.mim diff --git a/MANIFEST.in b/MANIFEST.in index 3dbdf42ba6..258c4e016b 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,3 @@ -include mmaction/model-index.yml -recursive-include mmaction/configs *.py *.yml -recursive-include mmaction/tools *.sh *.py +include mmaction/.mim/model-index.yml +recursive-include mmaction/.mim/configs *.py *.yml +recursive-include mmaction/.mim/tools *.sh *.py diff --git a/README.md b/README.md index 9443603366..31fa544e4e 100644 --- a/README.md +++ b/README.md @@ -257,6 +257,7 @@ We wish that the toolbox and benchmark could serve the growing research communit ## Projects in OpenMMLab - [MMCV](https://github.com/open-mmlab/mmcv): OpenMMLab foundational library for computer vision. +- [MIM](https://github.com/open-mmlab/mim): MIM Installs OpenMMLab Packages. - [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab image classification toolbox and benchmark. - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab detection toolbox and benchmark. - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab's next-generation platform for general 3D object detection. diff --git a/README_zh-CN.md b/README_zh-CN.md index ac7a5c1107..d39bbac68a 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -227,6 +227,7 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 ## OpenMMLab 的其他项目 - [MMCV](https://github.com/open-mmlab/mmcv): OpenMMLab 计算机视觉基础库 +- [MIM](https://github.com/open-mmlab/mim): MIM 是 OpenMMlab 项目、算法、模型的统一入口 - [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab 图像分类工具箱与测试基准 - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab 检测工具箱与测试基准 - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab 新一代通用3D目标检测平台 diff --git a/setup.py b/setup.py index 6477cb0454..f23072f05f 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,8 @@ +import os +import os.path as osp +import shutil +import sys +import warnings from setuptools import find_packages, setup @@ -94,7 +99,56 @@ def gen_packages_items(): return packages +def add_mim_extention(): + """Add extra files that are required to support MIM into the package. + + These files will be added by creating a symlink to the originals if the + package is installed in `editable` mode (e.g. pip install -e .), or by + copying from the originals otherwise. + """ + + # parse installment mode + if 'develop' in sys.argv: + # installed by `pip install -e .` + mode = 'symlink' + elif 'sdist' in sys.argv or 'bdist_wheel' in sys.argv: + # installed by `pip install .` + # or create source distribution by `python setup.py sdist` + mode = 'copy' + else: + return + + filenames = ['tools', 'configs', 'model-index.yml'] + repo_path = osp.dirname(__file__) + mim_path = osp.join(repo_path, 'mmaction', '.mim') + os.makedirs(mim_path, exist_ok=True) + + for filename in filenames: + if osp.exists(filename): + src_path = osp.join(repo_path, filename) + tar_path = osp.join(mim_path, filename) + + if osp.isfile(tar_path) or osp.islink(tar_path): + os.remove(tar_path) + elif osp.isdir(tar_path): + shutil.rmtree(tar_path) + + if mode == 'symlink': + src_relpath = osp.relpath(src_path, osp.dirname(tar_path)) + os.symlink(src_relpath, tar_path) + elif mode == 'copy': + if osp.isfile(src_path): + shutil.copyfile(src_path, tar_path) + elif osp.isdir(src_path): + shutil.copytree(src_path, tar_path) + else: + warnings.warn(f'Cannot copy file {src_path}.') + else: + raise ValueError(f'Invalid mode {mode}') + + if __name__ == '__main__': + add_mim_extention() setup( name='mmaction2', version=get_version(), From 6c8e59a34a1779ca2053ad1748ee91165267fd38 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 29 Jul 2021 19:10:59 +0800 Subject: [PATCH 219/414] [Enhancement] Calculate Top K accurate and inaccurate classes (#1047) * top_k_x * add comments and unittest * update api --- mmaction/core/evaluation/__init__.py | 5 ++-- mmaction/core/evaluation/accuracy.py | 39 ++++++++++++++++++++++++++++ tests/test_metrics/test_accuracy.py | 35 ++++++++++++++++++++++++- 3 files changed, 76 insertions(+), 3 deletions(-) diff --git a/mmaction/core/evaluation/__init__.py b/mmaction/core/evaluation/__init__.py index c8000ed162..a4694017cc 100644 --- a/mmaction/core/evaluation/__init__.py +++ b/mmaction/core/evaluation/__init__.py @@ -3,7 +3,7 @@ get_weighted_score, interpolated_precision_recall, mean_average_precision, mean_class_accuracy, mmit_mean_average_precision, pairwise_temporal_iou, - softmax, top_k_accuracy) + softmax, top_k_accuracy, top_k_classes) from .eval_detection import ActivityNetLocalization from .eval_hooks import DistEvalHook, EvalHook @@ -12,5 +12,6 @@ 'confusion_matrix', 'mean_average_precision', 'get_weighted_score', 'average_recall_at_avg_proposals', 'pairwise_temporal_iou', 'average_precision_at_temporal_iou', 'ActivityNetLocalization', 'softmax', - 'interpolated_precision_recall', 'mmit_mean_average_precision' + 'interpolated_precision_recall', 'mmit_mean_average_precision', + 'top_k_classes' ] diff --git a/mmaction/core/evaluation/accuracy.py b/mmaction/core/evaluation/accuracy.py index 49c5c4950e..06a38de241 100644 --- a/mmaction/core/evaluation/accuracy.py +++ b/mmaction/core/evaluation/accuracy.py @@ -87,6 +87,45 @@ def mean_class_accuracy(scores, labels): return mean_class_acc +def top_k_classes(scores, labels, k=10, mode='accurate'): + """Calculate the most K accurate (inaccurate) classes. + + Given the prediction scores, ground truth label and top-k value, + compute the top K accurate (inaccurate) classes. + + Args: + scores (list[np.ndarray]): Prediction scores for each class. + labels (list[int] | np.ndarray): Ground truth labels. + k (int): Top-k values. Default: 10. + mode (str): Comparison mode for Top-k. Options are 'accurate' + and 'inaccurate'. Default: 'accurate'. + + Return: + list: List of sorted (from high accuracy to low accuracy for + 'accurate' mode, and from low accuracy to high accuracy for + inaccurate mode) top K classes in format of (label_id, + acc_ratio). + """ + assert mode in ['accurate', 'inaccurate'] + pred = np.argmax(scores, axis=1) + cf_mat = confusion_matrix(pred, labels).astype(float) + + cls_cnt = cf_mat.sum(axis=1) + cls_hit = np.diag(cf_mat) + hit_ratio = np.array( + [hit / cnt if cnt else 0.0 for cnt, hit in zip(cls_cnt, cls_hit)]) + + if mode == 'accurate': + max_index = np.argsort(hit_ratio)[-k:][::-1] + max_value = hit_ratio[max_index] + results = list(zip(max_index, max_value)) + else: + min_index = np.argsort(hit_ratio)[:k] + min_value = hit_ratio[min_index] + results = list(zip(min_index, min_value)) + return results + + def top_k_accuracy(scores, labels, topk=(1, )): """Calculate top k accuracy score. diff --git a/tests/test_metrics/test_accuracy.py b/tests/test_metrics/test_accuracy.py index d2e5eae63b..6d2a647b5e 100644 --- a/tests/test_metrics/test_accuracy.py +++ b/tests/test_metrics/test_accuracy.py @@ -9,7 +9,8 @@ average_recall_at_avg_proposals, confusion_matrix, get_weighted_score, mean_average_precision, mean_class_accuracy, mmit_mean_average_precision, - pairwise_temporal_iou, top_k_accuracy) + pairwise_temporal_iou, top_k_accuracy, + top_k_classes) from mmaction.core.evaluation.ava_utils import ava_eval @@ -307,3 +308,35 @@ def content_for_unittest(scores, labels, result): content_for_unittest(scores, label1, result1) content_for_unittest(scores, label2, result2) + + +def test_top_k_accurate_classes(): + scores = [ + np.array([0.1, 0.2, 0.3, 0.4]), # 3 + np.array([0.2, 0.3, 0.4, 0.1]), # 2 + np.array([0.3, 0.4, 0.1, 0.2]), # 1 + np.array([0.4, 0.1, 0.2, 0.3]), # 0 + np.array([0.25, 0.1, 0.3, 0.35]), # 3 + np.array([0.2, 0.15, 0.3, 0.35]), # 3 + ] + label = np.array([3, 2, 2, 1, 3, 3]) + + with pytest.raises(AssertionError): + top_k_classes(scores, label, 1, mode='wrong') + + results_top1 = top_k_classes(scores, label, 1) + results_top3 = top_k_classes(scores, label, 3) + assert len(results_top1) == 1 + assert len(results_top3) == 3 + assert results_top3[0] == results_top1[0] + assert results_top1 == [(3, 1.)] + assert results_top3 == [(3, 1.), (2, 0.5), (1, 0.0)] + + label = np.array([3, 2, 1, 1, 3, 0]) + results_top1 = top_k_classes(scores, label, 1, mode='inaccurate') + results_top3 = top_k_classes(scores, label, 3, mode='inaccurate') + assert len(results_top1) == 1 + assert len(results_top3) == 3 + assert results_top3[0] == results_top1[0] + assert results_top1 == [(0, 0.)] + assert results_top3 == [(0, 0.0), (1, 0.5), (2, 1.0)] From 49fdd5909599a6dcf7c3f3b8aeddfef52595b1b0 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 30 Jul 2021 20:46:25 +0800 Subject: [PATCH 220/414] [Improvement] Update colorjitter (#1025) * update colorjitter * resolve comments * update unittest * update ckpts --- configs/recognition/tpn/README.md | 4 +- configs/recognition/tpn/README_zh-CN.md | 4 +- configs/recognition/tpn/metafile.yml | 20 +- ...ed_slowonly_r50_8x8x1_150e_kinetics_rgb.py | 4 +- .../tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py | 2 +- mmaction/datasets/pipelines/augmentations.py | 230 ++++++------------ .../test_augmentations/test_color.py | 58 ++--- 7 files changed, 104 insertions(+), 218 deletions(-) diff --git a/configs/recognition/tpn/README.md b/configs/recognition/tpn/README.md index c7973f381a..0d65841127 100644 --- a/configs/recognition/tpn/README.md +++ b/configs/recognition/tpn/README.md @@ -19,8 +19,8 @@ |config | resolution | gpus | backbone | pretrain | top1 acc| top5 acc | reference top1 acc | reference top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tpn_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|short-side 320|8x4| ResNet50 | ImageNet | 73.10 | 91.03 | x | x | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb_20200910-b796d7a0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log.json) | -|[tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|short-side 320|8x4| ResNet50 | ImageNet | 76.20 | 92.44 | [75.49](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | [92.05](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb_20200923-52629684.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log.json) | +|[tpn_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|short-side 320|8x2| ResNet50 | None | 73.58 | 91.35 | x | x | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb-c568e7ad.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | +|[tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|short-side 320|8| ResNet50 | ImageNet | 76.59 | 92.72 | [75.49](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | [92.05](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb-44362b55.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | ### Something-Something V1 diff --git a/configs/recognition/tpn/README_zh-CN.md b/configs/recognition/tpn/README_zh-CN.md index 2e32d36948..e6e04cdf44 100644 --- a/configs/recognition/tpn/README_zh-CN.md +++ b/configs/recognition/tpn/README_zh-CN.md @@ -19,8 +19,8 @@ |配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tpn_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|短边 320|8x4| ResNet50 | ImageNet | 73.10 | 91.03 | x | x | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb_20200910-b796d7a0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log.json) | -|[tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|短边 320|8x4| ResNet50 | ImageNet | 76.20 | 92.44 | [75.49](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | [92.05](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb_20200923-52629684.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log.json) | +|[tpn_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|短边 320|8x2| ResNet50 | None | 73.58 | 91.35 | x | x | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb-c568e7ad.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | +|[tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|短边 320|8| ResNet50 | ImageNet | 76.59 | 92.72 | [75.49](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | [92.05](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb-44362b55.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | ### Something-Something V1 diff --git a/configs/recognition/tpn/metafile.yml b/configs/recognition/tpn/metafile.yml index 1387c4c5e2..bb70db3cd3 100644 --- a/configs/recognition/tpn/metafile.yml +++ b/configs/recognition/tpn/metafile.yml @@ -19,12 +19,12 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - Top 1 Accuracy: 73.1 - top5 accuracy: 91.03 + Top 1 Accuracy: 73.58 + top5 accuracy: 91.35 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/20200910_134330.log - Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb_20200910-b796d7a0.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb-c568e7ad.pth - Config: configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py In Collection: TPN Metadata: @@ -42,12 +42,12 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - Top 1 Accuracy: 76.2 - top5 accuracy: 92.44 + Top 1 Accuracy: 76.59 + top5 accuracy: 92.72 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/20200923_151919.log - Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb_20200923-52629684.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb-44362b55.pth - Config: configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py In Collection: TPN Metadata: diff --git a/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py b/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py index fb4340f19e..a84a0b1895 100644 --- a/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py +++ b/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py @@ -17,7 +17,7 @@ dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), dict(type='Flip', flip_ratio=0.5), - dict(type='ColorJitter', color_space_aug=True), + dict(type='ColorJitter'), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -33,7 +33,7 @@ dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), - dict(type='ColorJitter', color_space_aug=True), + dict(type='ColorJitter'), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py b/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py index 32c108e7e6..8783f550fb 100644 --- a/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py +++ b/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py @@ -17,7 +17,7 @@ dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), dict(type='Flip', flip_ratio=0.5), - dict(type='ColorJitter', color_space_aug=True), + dict(type='ColorJitter'), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 6fe020fd2b..f79f312b1d 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -3,6 +3,7 @@ from collections.abc import Sequence from distutils.version import LooseVersion +import cv2 import mmcv import numpy as np from torch.nn.modules.utils import _pair @@ -1520,183 +1521,98 @@ def __repr__(self): @PIPELINES.register_module() class ColorJitter: - """Randomly distort the brightness, contrast, saturation and hue of images, - and add PCA based noise into images. + """Perform ColorJitter to each img. - Note: The input images should be in RGB channel order. - - Code Reference: - https://gluon-cv.mxnet.io/_modules/gluoncv/data/transforms/experimental/image.html - https://mxnet.apache.org/api/python/docs/_modules/mxnet/image/image.html#LightingAug - - If specified to apply color space augmentation, it will distort the image - color space by changing brightness, contrast and saturation. Then, it will - add some random distort to the images in different color channels. - Note that the input images should be in original range [0, 255] and in RGB - channel sequence. - - Required keys are "imgs", added or modified keys are "imgs", "eig_val", - "eig_vec", "alpha_std" and "color_space_aug". + Required keys are "imgs", added or modified keys are "imgs". Args: - color_space_aug (bool): Whether to apply color space augmentations. If - specified, it will change the brightness, contrast, saturation and - hue of images, then add PCA based noise to images. Otherwise, it - will directly add PCA based noise to images. Default: False. - alpha_std (float): Std in the normal Gaussian distribution of alpha. - eig_val (np.ndarray | None): Eigenvalues of [1 x 3] size for RGB - channel jitter. If set to None, it will use the default - eigenvalues. Default: None. - eig_vec (np.ndarray | None): Eigenvectors of [3 x 3] size for RGB - channel jitter. If set to None, it will use the default - eigenvectors. Default: None. + brightness (float | tuple[float]): The jitter range for brightness, if + set as a float, the range will be (1 - brightness, 1 + brightness). + Default: 0.5. + contrast (float | tuple[float]): The jitter range for contrast, if set + as a float, the range will be (1 - contrast, 1 + contrast). + Default: 0.5. + saturation (float | tuple[float]): The jitter range for saturation, if + set as a float, the range will be (1 - saturation, 1 + saturation). + Default: 0.5. + hue (float | tuple[float]): The jitter range for hue, if set as a + float, the range will be (-hue, hue). Default: 0.1. """ - def __init__(self, - color_space_aug=False, - alpha_std=0.1, - eig_val=None, - eig_vec=None): - if eig_val is None: - # note that the data range should be [0, 255] - self.eig_val = np.array([55.46, 4.794, 1.148], dtype=np.float32) - else: - self.eig_val = eig_val - - if eig_vec is None: - self.eig_vec = np.array([[-0.5675, 0.7192, 0.4009], - [-0.5808, -0.0045, -0.8140], - [-0.5836, -0.6948, 0.4203]], - dtype=np.float32) - else: - self.eig_vec = eig_vec - - self.alpha_std = alpha_std - self.color_space_aug = color_space_aug - @staticmethod - def brightness(img, delta): - """Brightness distortion. - - Args: - img (np.ndarray): An input image. - delta (float): Delta value to distort brightness. - It ranges from [-32, 32). - - Returns: - np.ndarray: A brightness distorted image. - """ - if np.random.rand() > 0.5: - img = img + np.float32(delta) - return img + def check_input(val, max, base): + if isinstance(val, tuple): + assert base - max <= val[0] <= val[1] <= base + max + return val + assert val <= max + return (base - val, base + val) @staticmethod - def contrast(img, alpha): - """Contrast distortion. - - Args: - img (np.ndarray): An input image. - alpha (float): Alpha value to distort contrast. - It ranges from [0.6, 1.4). - - Returns: - np.ndarray: A contrast distorted image. - """ - if np.random.rand() > 0.5: - img = img * np.float32(alpha) - return img + def rgb_to_grayscale(img): + return 0.2989 * img[..., 0] + 0.587 * img[..., 1] + 0.114 * img[..., 2] @staticmethod - def saturation(img, alpha): - """Saturation distortion. - - Args: - img (np.ndarray): An input image. - alpha (float): Alpha value to distort the saturation. - It ranges from [0.6, 1.4). - - Returns: - np.ndarray: A saturation distorted image. - """ - if np.random.rand() > 0.5: - gray = img * np.array([0.299, 0.587, 0.114], dtype=np.float32) - gray = np.sum(gray, 2, keepdims=True) - gray *= (1.0 - alpha) - img = img * alpha - img = img + gray - return img + def adjust_contrast(img, factor): + val = np.mean(ColorJitter.rgb_to_grayscale(img)) + return factor * img + (1 - factor) * val @staticmethod - def hue(img, alpha): - """Hue distortion. - - Args: - img (np.ndarray): An input image. - alpha (float): Alpha value to control the degree of rotation - for hue. It ranges from [-18, 18). + def adjust_saturation(img, factor): + gray = np.stack([ColorJitter.rgb_to_grayscale(img)] * 3, axis=-1) + return factor * img + (1 - factor) * gray - Returns: - np.ndarray: A hue distorted image. - """ - if np.random.rand() > 0.5: - u = np.cos(alpha * np.pi) - w = np.sin(alpha * np.pi) - bt = np.array([[1.0, 0.0, 0.0], [0.0, u, -w], [0.0, w, u]], - dtype=np.float32) - tyiq = np.array([[0.299, 0.587, 0.114], [0.596, -0.274, -0.321], - [0.211, -0.523, 0.311]], - dtype=np.float32) - ityiq = np.array([[1.0, 0.956, 0.621], [1.0, -0.272, -0.647], - [1.0, -1.107, 1.705]], - dtype=np.float32) - t = np.dot(np.dot(ityiq, bt), tyiq).T - t = np.array(t, dtype=np.float32) - img = np.dot(img, t) - return img + @staticmethod + def adjust_hue(img, factor): + img = np.clip(img, 0, 255).astype(np.uint8) + hsv = cv2.cvtColor(img, cv2.COLOR_RGB2HSV) + offset = int(factor * 255) + hsv[..., 0] = (hsv[..., 0] + offset) % 180 + img = cv2.cvtColor(hsv, cv2.COLOR_HSV2RGB) + return img.astype(np.float32) + + def __init__(self, brightness=0.5, contrast=0.5, saturation=0.5, hue=0.1): + self.brightness = self.check_input(brightness, 1, 1) + self.contrast = self.check_input(contrast, 1, 1) + self.saturation = self.check_input(saturation, 1, 1) + self.hue = self.check_input(hue, 0.5, 0) + self.fn_idx = np.random.permutation(4) def __call__(self, results): imgs = results['imgs'] - out = [] - if self.color_space_aug: - bright_delta = np.random.uniform(-32, 32) - contrast_alpha = np.random.uniform(0.6, 1.4) - saturation_alpha = np.random.uniform(0.6, 1.4) - hue_alpha = np.random.uniform(-18, 18) - jitter_coin = np.random.rand() - for img in imgs: - img = self.brightness(img, delta=bright_delta) - if jitter_coin > 0.5: - img = self.contrast(img, alpha=contrast_alpha) - img = self.saturation(img, alpha=saturation_alpha) - img = self.hue(img, alpha=hue_alpha) - else: - img = self.saturation(img, alpha=saturation_alpha) - img = self.hue(img, alpha=hue_alpha) - img = self.contrast(img, alpha=contrast_alpha) - out.append(img) - else: - out = imgs - - # Add PCA based noise - alpha = np.random.normal(0, self.alpha_std, size=(3, )) - rgb = np.array( - np.dot(self.eig_vec * alpha, self.eig_val), dtype=np.float32) - rgb = rgb[None, None, ...] - - results['imgs'] = [img + rgb for img in out] - results['eig_val'] = self.eig_val - results['eig_vec'] = self.eig_vec - results['alpha_std'] = self.alpha_std - results['color_space_aug'] = self.color_space_aug - + num_clips, clip_len = 1, len(imgs) + + new_imgs = [] + for i in range(num_clips): + b = np.random.uniform( + low=self.brightness[0], high=self.brightness[1]) + c = np.random.uniform(low=self.contrast[0], high=self.contrast[1]) + s = np.random.uniform( + low=self.saturation[0], high=self.saturation[1]) + h = np.random.uniform(low=self.hue[0], high=self.hue[1]) + start, end = i * clip_len, (i + 1) * clip_len + + for img in imgs[start:end]: + img = img.astype(np.float32) + for fn_id in self.fn_idx: + if fn_id == 0 and b != 1: + img *= b + if fn_id == 1 and c != 1: + img = self.adjust_contrast(img, c) + if fn_id == 2 and s != 1: + img = self.adjust_saturation(img, s) + if fn_id == 3 and h != 0: + img = self.adjust_hue(img, h) + img = np.clip(img, 0, 255).astype(np.uint8) + new_imgs.append(img) + results['imgs'] = new_imgs return results def __repr__(self): repr_str = (f'{self.__class__.__name__}(' - f'color_space_aug={self.color_space_aug}, ' - f'alpha_std={self.alpha_std}, ' - f'eig_val={self.eig_val}, ' - f'eig_vec={self.eig_vec})') + f'brightness={self.brightness}, ' + f'contrast={self.contrast}, ' + f'saturation={self.saturation}, ' + f'hue={self.hue})') return repr_str diff --git a/tests/test_data/test_pipelines/test_augmentations/test_color.py b/tests/test_data/test_pipelines/test_augmentations/test_color.py index 72b3ccd4e6..0aa65c65af 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_color.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_color.py @@ -1,6 +1,5 @@ import numpy as np from mmcv.utils import assert_dict_has_keys -from numpy.testing import assert_array_equal from mmaction.datasets.pipelines import ColorJitter @@ -13,52 +12,23 @@ def test_color_jitter(): np.random.randint(0, 255, size=(3, 112, 112, 3), dtype=np.uint8)) results = dict(imgs=imgs) - eig_val = np.array([55.46, 4.794, 1.148], dtype=np.float32) - eig_vec = np.array([[-0.5675, 0.7192, 0.4009], - [-0.5808, -0.0045, -0.8140], - [-0.5836, -0.6948, 0.4203]], - dtype=np.float32) - color_jitter = ColorJitter() - assert_array_equal(color_jitter.eig_val, eig_val) - assert_array_equal(color_jitter.eig_vec, eig_vec) - assert color_jitter.alpha_std == 0.1 - assert color_jitter.color_space_aug is False + assert color_jitter.brightness == (0.5, 1.5) + assert color_jitter.contrast == (0.5, 1.5) + assert color_jitter.saturation == (0.5, 1.5) + assert color_jitter.hue == (-0.1, 0.1) + color_jitter_results = color_jitter(results) - target_keys = [ - 'imgs', 'eig_val', 'eig_vec', 'alpha_std', 'color_space_aug' - ] + target_keys = ['imgs'] + assert assert_dict_has_keys(color_jitter_results, target_keys) assert np.shape(color_jitter_results['imgs']) == (3, 112, 112, 3) - assert_array_equal(color_jitter_results['eig_val'], eig_val) - assert_array_equal(color_jitter_results['eig_vec'], eig_vec) - assert color_jitter_results['alpha_std'] == 0.1 - assert color_jitter_results['color_space_aug'] is False - - custom_eig_val = np.ones(3, ) - custom_eig_vec = np.ones((3, 3)) + for img in color_jitter_results['imgs']: + assert np.all(img >= 0) + assert np.all(img <= 255) - imgs = list( - np.random.randint(0, 255, size=(3, 64, 80, 3), dtype=np.uint8)) - results = dict(imgs=imgs) - custom_color_jitter = ColorJitter(True, 0.5, custom_eig_val, - custom_eig_vec) - assert_array_equal(color_jitter.eig_val, eig_val) - assert_array_equal(color_jitter.eig_vec, eig_vec) - assert custom_color_jitter.alpha_std == 0.5 - assert custom_color_jitter.color_space_aug is True - custom_color_jitter_results = custom_color_jitter(results) - assert np.shape(custom_color_jitter_results['imgs']) == (3, 64, 80, 3) - assert_array_equal(custom_color_jitter_results['eig_val'], - custom_eig_val) - assert_array_equal(custom_color_jitter_results['eig_vec'], - custom_eig_vec) - assert custom_color_jitter_results['alpha_std'] == 0.5 - assert custom_color_jitter_results['color_space_aug'] is True - - color_jitter = ColorJitter() assert repr(color_jitter) == (f'{color_jitter.__class__.__name__}(' - f'color_space_aug={False}, ' - f'alpha_std={0.1}, ' - f'eig_val={eig_val}, ' - f'eig_vec={eig_vec})') + f'brightness={(0.5, 1.5)}, ' + f'contrast={(0.5, 1.5)}, ' + f'saturation={(0.5, 1.5)}, ' + f'hue={-0.1, 0.1})') From 931b7c69d4cb41cce031aea44807ce29efdc8840 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 3 Aug 2021 12:45:33 +0800 Subject: [PATCH 221/414] [Doc] update changelog (#1065) --- docs/changelog.md | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/docs/changelog.md b/docs/changelog.md index 0631f1b6d5..c382868898 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -2,19 +2,54 @@ ### Master +### 0.17.0 (03/08/2021) + **Highlights** +- Support PyTorch 1.9 +- Support Pytorchvideo Transforms +- Support PreciseBN + **New Features** - Support Pytorchvideo Transforms ([#1008](https://github.com/open-mmlab/mmaction2/pull/1008)) +- Support PreciseBN ([#1038](https://github.com/open-mmlab/mmaction2/pull/1038)) **Improvements** +- Remove redundant augmentations in config files ([#996](https://github.com/open-mmlab/mmaction2/pull/996)) +- Make resource directory to hold common resource pictures ([#1011](https://github.com/open-mmlab/mmaction2/pull/1011)) +- Remove deperecated FrameSelector ([#1010](https://github.com/open-mmlab/mmaction2/pull/1010)) +- Support Concat Dataset ([#1000](https://github.com/open-mmlab/mmaction2/pull/1000)) +- Add `to-mp4` option to resize_videos.py ([#1021](https://github.com/open-mmlab/mmaction2/pull/1021)) +- Add option to keep tail frames ([#1050](https://github.com/open-mmlab/mmaction2/pull/1050)) +- Update MIM support ([#1061](https://github.com/open-mmlab/mmaction2/pull/1061)) +- Calculate Top-K accurate and inaccurate classes ([#1047](https://github.com/open-mmlab/mmaction2/pull/1047)) + **Bug and Typo Fixes** +- Fix bug in PoseC3D demo ([#1009](https://github.com/open-mmlab/mmaction2/pull/1009)) +- Fix some problems in resize_videos.py ([#1012](https://github.com/open-mmlab/mmaction2/pull/1012)) +- Support torch1.9 ([#1015](https://github.com/open-mmlab/mmaction2/pull/1015)) +- Remove redundant code in CI ([#1046](https://github.com/open-mmlab/mmaction2/pull/1046)) +- Fix bug about persistent_workers ([#1044](https://github.com/open-mmlab/mmaction2/pull/1044)) +- Support TimeSformer feature extraction ([#1035](https://github.com/open-mmlab/mmaction2/pull/1035)) +- Fix ColorJitter ([#1025](https://github.com/open-mmlab/mmaction2/pull/1025)) + **ModelZoo** - Add TSM-R50 sthv1 models trained by PytorchVideo RandAugment and AugMix ([#1008](https://github.com/open-mmlab/mmaction2/pull/1008)) +- Update SlowOnly SthV1 checkpoints ([#1034](https://github.com/open-mmlab/mmaction2/pull/1034)) +- Add SlowOnly Kinetics400 checkpoints trained with Precise-BN ([#1038](https://github.com/open-mmlab/mmaction2/pull/1038)) +- Add CSN-R50 from scratch checkpoints ([#1045](https://github.com/open-mmlab/mmaction2/pull/1045)) +- TPN Kinetics-400 Checkpoints trained with the new ColorJitter ([#1025](https://github.com/open-mmlab/mmaction2/pull/1025)) + +**Documentation** + +- Add Chinese translation of feature_extraction.md ([#1020](https://github.com/open-mmlab/mmaction2/pull/1020)) +- Fix the code snippet in getting_started.md ([#1023](https://github.com/open-mmlab/mmaction2/pull/1023)) +- Fix TANet config table ([#1028](https://github.com/open-mmlab/mmaction2/pull/1028)) +- Add description to PoseC3D dataset ([#1053](https://github.com/open-mmlab/mmaction2/pull/1053)) ### 0.16.0 (01/07/2021) From 3cc2e807d91c6f829ec7b6debfaf76ce97858b40 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 3 Aug 2021 12:49:56 +0800 Subject: [PATCH 222/414] Bump Version to 0.17.0 (#1066) * update changelog * Bump Version --- README.md | 2 +- README_zh-CN.md | 2 +- mmaction/version.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 31fa544e4e..2f0ae802a9 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ The master branch works with **PyTorch 1.3+**. ## Changelog -v0.16.0 was released in 01/07/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +v0.17.0 was released in 03/08/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Benchmark diff --git a/README_zh-CN.md b/README_zh-CN.md index d39bbac68a..cac77e17c2 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -53,7 +53,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 -v0.16.0 版本已于 2021 年 7 月 1 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.17.0 版本已于 2021 年 8 月 3 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 基准测试 diff --git a/mmaction/version.py b/mmaction/version.py index 34877bdff8..bf9fdb7351 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.16.0' +__version__ = '0.17.0' def parse_version_info(version_str): From 5c24a8b64b013e354ae490a687d49dcf8ac3e578 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 5 Aug 2021 13:49:40 +0800 Subject: [PATCH 223/414] [Doc] Update .readthedocs.yml (#1072) --- .readthedocs.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 2c98050982..e35725f209 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,5 +1,7 @@ version: 2 +formats: all + python: version: 3.7 install: From 456a5f23bfffbb4ffa4c944f995dcdc8186e59bd Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sat, 7 Aug 2021 12:28:23 +0800 Subject: [PATCH 224/414] [Feature] Support NTU Pose Extraction (#1076) * ntupose_extraction * fix bug * resolve comments --- tools/data/skeleton/README.md | 24 +- .../skeleton/S001C001P001R001A001_rgb.avi | Bin 0 -> 987146 bytes tools/data/skeleton/ntu_pose_extraction.py | 337 ++++++++++++++++++ 3 files changed, 353 insertions(+), 8 deletions(-) create mode 100644 tools/data/skeleton/S001C001P001R001A001_rgb.avi create mode 100644 tools/data/skeleton/ntu_pose_extraction.py diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index 12cf15cd2d..c6bfa92af5 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -19,20 +19,28 @@ We release the skeleton annotations used in [Revisiting Skeleton-based Action Re ## Prepare Annotations -Currently, we support one dataset: FineGYM. You can execute following scripts to prepare the annotations. +Currently, we support FineGYM and NTURGB+D. For FineGYM, you can execute following scripts to prepare the annotations. ```shell bash download_annotations.sh ${DATASET} ``` -PS: Due to [Conditions of Use](http://rose1.ntu.edu.sg/Datasets/actionRecognition.asp) of the NTURGB-D dataset, we can not directly release the annotations used in our experiments. We will prepare a script for pose annotation generation ASAP. Once accomplished, you can use this script to generate all pose annotations used in our experiments. +Due to [Conditions of Use](http://rose1.ntu.edu.sg/Datasets/actionRecognition.asp) of the NTURGB+D dataset, we can not directly release the annotations used in our experiments. So that we provide a script to generate pose annotations for videos in NTURGB+D datasets, which generate a dictionary and save it as a single pickle file. You can create a list which contain all annotation dictionaries of corresponding videos and save them as a pickle file. Then you can get the `ntu60_xsub_train.pkl`, `ntu60_xsub_val.pkl`, `ntu120_xsub_train.pkl`, `ntu120_xsub_val.pkl` that we used in training. + +To generate 2D pose annotations for a single video, first, you need to install mmdetection and mmpose from src code. After that, you need to replace the placeholder `mmdet_root` and `mmpose_root` in `ntu_pose_extraction.py` with your installation path. Then you can use following scripts for NTURGB+D video pose extraction: + +```python +python ntu_pose_extraction.py S001C001P001R001A001_rgb.avi S001C001P001R001A001.pkl +``` + +After you get pose annotations for all videos in a dataset split, like `ntu60_xsub_val`. You can gather them into a single list and save the list as `ntu60_xsub_val.pkl`. You can use those larger pickle files for training and testing. ## The Format of PoseC3D Annotations Here we briefly introduce the format of PoseC3D Annotations, we will take `gym_train.pkl` as an example: the content of `gym_train.pkl` is a list of length 20484, each item is a dictionary that is the skeleton annotation of one video. Each dictionary has following fields: -- kp: The keypoint coordinates, which is a numpy array of the shape N (#person) x T (temporal length) x K (#keypoints, 17 in our case) x 2 (x, y coordinate). -- kpscore: The keypoint confidence scores, which is a numpy array of the shape N (#person) x T (temporal length) x K (#keypoints, 17 in our case). +- keypoint: The keypoint coordinates, which is a numpy array of the shape N (#person) x T (temporal length) x K (#keypoints, 17 in our case) x 2 (x, y coordinate). +- keypoint_score: The keypoint confidence scores, which is a numpy array of the shape N (#person) x T (temporal length) x K (#keypoints, 17 in our case). - frame_dir: The corresponding video name. - label: The action category. - img_shape: The image shape of each frame. @@ -80,8 +88,8 @@ For skeleton data visualization, you need also to prepare the RGB videos. Please **TODO**: - [x] FineGYM -- [ ] NTU60_XSub -- [ ] NTU120_XSub -- [ ] NTU60_XView -- [ ] NTU120_XSet +- [x] NTU60_XSub +- [x] NTU120_XSub +- [x] NTU60_XView +- [x] NTU120_XSet - [ ] Kinetics diff --git a/tools/data/skeleton/S001C001P001R001A001_rgb.avi b/tools/data/skeleton/S001C001P001R001A001_rgb.avi new file mode 100644 index 0000000000000000000000000000000000000000..0ea54177e04e0654267aba7b77c936f8fe477658 GIT binary patch literal 987146 zcmeEtgLfs(w|2~lIhokDZQGd`C$?>4l1yydwry)-+xC}v@BQ6%*Y_{nx7X>er?#s0 zQ{8>m+1*D`LR1t45dla*RRUjHLP?n&83@SK$lg-l+02w32ndM99|(vL0{jaB0U<&I z0h#=RfPjGiO9JEz2?WFr_TR$))%=aMg$M-X;An68wf_^#+04j6oRO9Jf0_Lg2lu}P ze`9Y$0Rw%0ZU3?Hg=7c&@;4^`68zo&GafJyCeS~z&UF4``ai9timVjo*VzA=7CpU@ z;r}TA1^+VeF9ZKF@Gk@ZGVm`0|1$6|1OGDcF9ZKF@Gk@ZGVm`0|0fu5wzanv{$H8? ztv~#yt^f^$_&@k>jpJ`^<3GB;?f<|1tu=ss8QK_GS$=&?pZ z^6}R4?`j-K)>B&}PE%g_G0RQ|PS`#_$ zI9L&HN~B)KUm?1FJqh80i_`HwM?jBnZU4_8hGtg!j(;a&X6*?0dOYxD^d&d2bh6iX(>1iQvekF|>KlG>8>ROpue|7O43<1^vLnlWbX8OO= zwAcU3sXf5K^vm7eQ1^e*@9oc9mExsyLsUbId%_Kk!gl(u2Pskq?IEZGq>i+D=zYj0?uav! zNo9G-QE)x+-;fYKQgKG`Q2A^2br>B@&n2>_BG{|KDTRGzdFVWk$YADI-IaunY4I>I zTE#OUu%EI#>lIXADxaQMcJ%SGhnIUwXMhRe{8dx&#<$d=Ct=Pl!!bd~-jugwQ=n~F z@85!pK6GZOcYM|Hgtf&jvQq!#7IbM6a% znjPunI3*ZeT=_SmQL@s0U91w0piQMb*utWv$u?^mSE5VfS_O^&)B@!y-i^VMX~i<7ycrUsv=~-tjwUxOI*!@(JE@Li0_FgB`4Mh~;>_r=XpCA{XuIKNW|-E; z%(#FM=G}!KhfREd)R66Ou0G7;2j&DTm(YiU0PqRV-F}}Mx3cALAq1>sqh#vb_xCBk zc8_zD{dt!4vUI+~P#z-s$K`9Pm-UZ&a7JpaB7^)nt|xmq_7Tgk%yv^RjIz{KdjAfr z&z!0aMDKbYo$e#0kzjw#q<4e-q2k3o`7;+( z|MB_b?&mIhT4&}Nzu=FdRYg=tN1k_zNYA!r2dH(> z)ha9Z#gAb2WJ=%$Q=@T8z7iS^=HA9P!QGZ0&z|y{j$#V}jBi;bpJRmnFSUA_>KF`< z#3QYFkeN{x$b$#X_8h|7aD&qN?TQ5-L1B)*N&D*2GQ^JD!MJr)lD4!I82M^+%XIJB zxQE7Q!o!p%wkZl7$jpCqCK6!gIjog~J*YSyKARkyOw)Zw_)Eiuj={eh_>lthO1|W1 z@0QX99H|N&@&jK>V$KoFop-pv%wZku*uiq0qOh3MN%ITg$lcm7n(<~1PShHGA0xZ* zvUvduz6D=nB)0eyb>sVy!qMIBBdPNa>;v$B;wY`asBM$G;QlxrH~cg#dFl^Oe-;jg z(i5`~r@b-71nyFrrlq`CoYcu9bvzo(g?Q`A_b2`B>ZY=`aZlm4s*MRAClL-Jiweom zmTYN0>+`Y9>7;E505UMn$(4wZHL#8-bwJr6Q2d;%p8OmGcy-ho@%b7wNw<&z?F~=8 zv({eO6JVgC#e@9{4=8ruKghB*{UcdE2`wyD=OxCZsbLohFR}xoZdm>LdqL)B>Vz#& zh($h|UaujmHGZj`Hbn063leFTyZgNGr5Jv_&;_Y{8yybTFCqeavCD+blP4|w>q5*- z_A1~;N-mK)jGDy;amUc6I>uBG>jku2?kPGU5-n7UpcMmwvU+cffZ2z-my*&@nqcKY zSqFWr>7ckA_P8m=D`A%W7?Jkf0g;f8I=~*r%kU|Ct);Ria67(yYlWk>x2N#*5cO@z zPT8`0D`NUYiSkXf?&@hIM|2OXndc?-lg3-%$JP-8{>^BWY)derL5NyQ9xic!D&#k1q z=H2yB4>WKEcF7}L&M0}y*^L)S(u>`YLGI?h;Pc3t8@ z4r00on;^DBOHsKy83HT>#l-9arRJzzkP4U4Ja2C2;XF!qRT*um#-rE>uDssV?HnqA z8L`kCxkgS%87NgQWVV@Nu2XscBt3x#REky^_M_!af$!YR2YWG%6wfQWagVBHZf$9T zB|PGdetU9tT_^vH{n1?ah$K`Qg8P_S^gy9y(7)g%%JYq~6y7XaFFV?8Ce>RG2x?u> z2Z2EKRzxklAr&Df1xCNVpmyc)Px6!k0 zx6ft0PBl`R`{pBc-jtqZmBMd*hm0nrJ1W|g#HQyLD5mDlC?q1l)v_53)les_#<4?~ zvxe@a{VSD~LH0SJ4S1Y5d#r8XE)?7XnSDEw-5K}>5D_mUPIfKAAne$`O*eGZ?3zX3 zcK3wi(+N01$s6piIgJ&-$CP;csrfrwSkv-{_@KmN`6hQ=xLCy@zRNIS#i!?Lii8fD ztlLFu5^ZkY9EtT&TYbqgZ$B+l5Y^Bu^q9i_+7VgXhHpQngafezq4Cqiv9ERC#whBI zfVLPec9?=?(8-8wU+@TM-CeTkXsP`0h?ILcGklX9UH5D9mOZ!W{%7 z89(kKLJ>uCn!O4<--xJ;oDIj@btprak=|Ji6GbmKe&#+*rd?hYrD)(B)&yEwbh?`2 z6GHyHY zW>k^JTi}G>I|r4X!@HJzQ^#vst=!}Drou8Nxleo^j>%KThb0w`S1bW_@Se|E49vdP z>J2?2^-}1qwrgMq1o8L4_{gxr-x(k~@kRMp=&u`I-rCVG~pPA0a!?PqZn zSW|QY8Q+I4LEqGaF5N= z-#dE&)aXHg&yJQ_{J*U(N{x6D+r-ocI+aR-ji_zd5O;^*=U^l)$ybSPr8DvuVOO#u zP9HUlCb1{(P?IQXj>gna77YoKn^OyFOhkoFq=w=3A97&kNcbg9@E5U|Fc=VPSniYb zg_#M2XH9j=ntqb&J(nPQyThoXc~1uSMT88JNqBoQk0%dWmn4s33{Y-;)t56Az zxJM!{)_lYDkOg@tz)!G~w0`w}*0cHyeAK=+p2B7gcxWy#Km=wo%{HI%1HVh)omRl` z|Is~#6@>*vtKh2XQe$>MWmL4hW&!!_@X}R)bWMZ*c);C=qH;$#xgI^bRehGoI&|ks zO4gCiv}Ce)HV$u20L|x{$i#{y5IbI=ppeQcVIqh`r82+}DHlXPYq@|01_CB4ekNK~ z$j&dXjwq0!Wl&rO)0@60DY3>wHt<$Ul@Drc_6SzQ)LFR3#ASpZYAq5)k!D9Uk1btK zK^%7Jn?H05m+<#?SDN;`a)G$ih<2<07@L1}!bbPbFzA-asGPTzPGVU{eMkv-s?$FC_`wOWh z@{eE^ph!T)B0U>kX$ZW+y`YTTd$f?M3?dFlC41EF5bUf6}$;snH~$d zaWJ>mj2))LLNr!p&1rpwF)?rFI45qZ^fcRiU~%l9*dO%OKl_6Ua(lZFImB4m&L`rn zP?k68JGT#W98La!b?m>;_5CVult|TVc5mXf5qfhox%QRPg>j!RC&goLg^BH@vcq;} zsm!J;x&0xvqSf;#3zY!2fyviiW5e@e&?!hvY;NCkDyS{SL#^nU5oV{G9{t|$xaqkX z!>U&7I)8NG=(h}jP0R?2rE;*h-KcKK!Jn+KD+4?4X}qCISU88em43eCH?Wy{aDsl~ z4~SkSgx(?XSkDNaqP>!stkmyo&IJ{*_#Oia+}O#e2}o zsTLXU$-rnBw6<4Xx3i_4nE`n63=>wGuY7Tx!9D@e+8miTrnkSX0QDQ2S0o(St!dD~ zpWsKouqrvL5l>SXdfF_K72r0G;WRi2gc{t76yPL}Wr_HQr;!sdw?IyAXn*l~G=AH;CQ}LXg2c2tMqkor_$Hr}b zmnym=?^?4c`NJ?daxf`P^@eNAEVMR<#FHoJR56I1~e*LO-GWQpO#L#7dMsUSF&GjLko6o%3BqL8GWnw7pwj8{$qct>47 z$;lp*vKH_Iql z5;QTw5Y{t+sp8iBQ`|JmwT@<4ijQjVl(&pv^NypjF2W5fC)x8@rahX@U=upsfmy)P z6^!&c?4fPKWA~OPFoIsVjS}BP=}RnnR-O3fNFHgg4@rM9E^<4RdBT6|D(Dyu!VP#Cb0f8(A{w z1KcAhvG=HPEf!6XO+hi)QaGkUY@%q7%A~@dFfX)Q^e~K?^<`C^#6xS=jEj)r&uWBEjrWI)tA(V-Yt?+iStL|-?wCPf%C2I9c|ST50vxMXnuD?1fIvJcX!`Yb zwa-v?%Z+-;txRQ~0LM(FWj21?x zYvK7fP1i_La^SdnPmP82miaTxGso0JtHJWZP&LR!ls3D%ufZcM8+;?@GC4zz}{&tCz%X(S4mHh9t9@N!`@{U zY!^k!X%Rfocs#!3q&Q|Xd~&tGJlOT0XXM%C^Exk#`}Jt;%QF^~S?%r}v2Vcpjr)mr zzfq|jb(wf%2!5DpV4jcG5R5GO;LmiI$b-R8DyB^&J%qKMn$U#N15d7{2>I zJMc-QLpfq59)7;@#q+;)oI?XO&Eso)DkO`eMu^dWp^9yvu&L$NM91tN?0LX6Es21; z+IVtbE3^|4c8Ripyfw^zC^wj_MypXjj>*AsAD4(-g$%ihXT{qk9#`A;L&P~4;$x~m z6N!DCas+cu$rBy$@50-A%zB9RoCJ0oUa)Rm%*))Bf;+GSK6LMTT&+G2%EF1aSo5-gkbCmj8c_8#}o+$VRtSOYJ5&Q9n z?!9S`(2am~gLoc1Fxjzxh`jPJVTmjSze_e3;f<1AWA*f#7E=B`f+)|rb{psJdmbqi zW?UPUh+4*2ecrWC?I@)llw6KeV<42hx*=Sa1toeO6INDF99^5$l9bx{nCKB^QM* zFEWm1v=k&S;OF(i_$}KD7ur8G^Og*H@Be#~GsFJ3Gr^5` zA!AQin{GCE3S_>vn2(mAC|_qY*#-m~p_Ag|svz6CT@1;Ban_|KyefYCFfj9}-6kdO z61U`1Z5$L<>tyUSoQp`(m~Mk=YCpFK-jOIavCuO^3o0Y`z||0muzo3n%NuFnaj5GW z(!7ZVSbr9X%q41n`9i>U5!Lp>k;j{a4k<;!Gu3muYF@|849T?xEY(79B(&ZBe$*t2 zexeE1t+y7PNfOHPVP0ZO}_eKS6UUXA)C>yC0cVrjRdB&L;qYK7-O@=+)j|hJg>{M6$sZg4_Z@ za6y#v``uyCrv;A4X*X$sNOQm=;+F~U@wJnk_EL`7IWvPWRah&g2wQZcDSwZ@8KvRR z!R@_q{XNFvLvTzFAz}j9sC$W3a;($Yw(EoFB2_?N&TXR(aac3AfH5#Tq;6dEev~57 z3|oGNV;fD4>1d|ALhR{N-+u)G2S3|)e_bhd=yetYsw7IkJL^Wop67fFfc5x5ZXYHC zdl|Zur=7WdC$wwep~NANf)7gc-E#Ol)GS~y{QF)R-v z(KrG>V-cYbJP(RJx8$>HkvMk6?ea;Jr(yR>2yjhYzZ@dRUvf1H*=Qh4JhCWVaw^h= zs(w`|_t-)eSH4l4Nzo`>N9#bCaMAoWG@#5F66+u2if*2`W7b#7{QC@r$on*^1Udf9 zpZ$|zVG1G>68xiDJ7f|AP$K*F00>v-dfM$AqJMY}w+>`Yv~{k(GCZ9fdxlptU01K!qQ9Dx)(=Z`~IERM=Clj0pA{rWZuQ3OIbNMbWX(Ygl` zSV(O!x*uoqJW$n4h7R;V4^N3Z$SL%#(6%5bxHQ!aOy_Mfj7)9V4$lHX&kv|Xduffwz|JtL!a*B^hBiE&eFt!5AX`{>G*CMm0yc! zv>3$xV!Qc1MH3rg2Ix}e<<*JyB$CxSfzyUhYeW>V2!=vW`xGWkd(=%soQP7em zW5od@n3vuuXmv+)l8Z&DRH^;`FAcxNo~VX0CS6n89N1(#`~&7 znc^VBFyD!1|``n56%M6;-GDxx|C5BUvFhBs=G_zTPK19%9Ag9dt#g>=)g1^ z^z#C;ghl$|juO|(ntM0kh z0Vj27CuSw&p{C>rsS$E+HH(gyWrA0I?Re^@u9LZhtdcQ5hO}PTBrd4nG0EYs+|(YG z8a)q^ej&)j;GQZddz}DuiKw5R(o_ePYqr+k&UHcW7;aPWJ2$In#lAK*xlKV_k5@dW z+G8)C6r&f^ieb;VuXxia7M&;Qz6g=r^!ikiK1%=rZ@tiW&JV3R)p0GvnyUyhAUcAD0bgl5Fz4u;F1w z%t;aw^m~+f_UH3G?4o2VyV5VmBk=S~=2I_Wdz-OD@crzv%hCMfPsIitISCVBKF4cg z=vN}&MAO$_;j>6TK5XHP-?bPCU-u{$_?z8A(SxwQ3;J zRIVLOjJOG-w;MPUpzVfwUT(D&W)&`2QZezFMw+bVLtV--%MBlV)-asb2<`W7;Jgof zAOdah#?~VuY>vu(8015W!!t@Frrin^xx)>8(PU>^Y@t~2z6BS+3+pV5%W<>)rWbJN z-Yso^qg_Y%!eyYaAnG=1(zRpzktf%i?`QpsE%7DsDx;VrMveibpZG4oq>VkXu#_t+3WVW#a0wbzd9o>-e9^1%!%oD z;#RF{yIB=ek^s%JxE2u3OtBw~%ydznVbpJNRN`rA@__v&RtH9n4BbM3I8J0m#Xa7w zbmg1oHY-`gI2ISCAAU_<6;hH6MP@^U8wtSj4E%C!$s;i;zDDzw;2dPPn2fG6D5HQH zMR#^ctzSj97!o4D7-FOoHkz^K*7P)_Ayk{1IT~B|k!RTjqO1gxX+#9?{P8wfRJ>Fv z;Tp%R-j8IBT$2Q!89X3)4GY!7GM(ZUwbOGHEowet-B@d;5UsqM`^O9#7>!pM0f{%F~e586=~6gz<9WggO~STTkU ze*DNFjI_}0g8f-ru%ObbGp>wwqHo$pnx=|WsoTCYdQ~f9P)cdnsvtD)=QU>3Qcg9X zCoWf0f2yZQ!TIe%1Rv$)@w#0vOIhV-FnuB)VVS0JfsiIHvU#c-R0{^S?1a-LXCJ}S zvma!LZMYf1Wm;+F`^TReLwXJ%Le&@_Adkf(Yf@)9&ra~&#*Q}!NQA>3%% z$#`zr$Mt=3r&C*m$LBU;5u#S`cD8|9FA^l-?y#IQ6hgEs8YVleyX zSh%%BjoVIhOUF?npNDV}X0W~AP?dJ+GstdN+?T==IlJ%?*euS6($9 z+iH4lSr|ZpJ-L^^L1}gxO%rb5Jbymwcw~C&|7<_u)-(R#Z7JvW{ZO4cB3#yPCIo}> z@hw#QPFn$visy!Cu@Nbdm_VM?8O8-Nn+GpQHE_L2I?Ob#YvMM>vJ|g+@RXz+bgaT0s z9JfowGI&#bf+_x~dM%-1OYEG&i50;lO@|n9lh}5}7FJ2TIyU4-Ou%G$R%uW!C^@OagGJY3tEl+e6bgTHE-7uUd;WwyqK@f~G~ z<1;e$>2zx1n|=UGie3LkuE6(*CTKl(DGVA$j!NCDqb7=e2;e-fYS4XP@ga8R%#P2r zDNo@@Z3vRRGfFC_9F*iV&g;{ZlJIYN*kSd#9XZE{BX=)aHG#EwsK|pN8LUOJekJFG zvlu*{q9L~oqS3=C;9zG_W#Y=WB$Gr8SVJ%pImuu>6r(cG9eUwCcam)2-|yVQ;x85u z(b3^dG4e)Tfm{N(RngHst^jVGUFyJQ1n#fSW1_7kEG880iD&6du}79E@1@3BcCU?Y zma!9I+|AswDEZaj)P?VIqphz^HNuA?8<}Q5}vHpy;JyS z9ymGAT}>R=g_0EN^`aRH;N_vPUddk<;pGK&Hcl(D(${O^u%)Hw%1ODVTOOS_PuZep zV$MAgZTEReB17Bsfw*a;{0Kd+o1p!z$+AxF{01IHzb!9%7`$zvv~>mU%As9R4~VZE z8CNFH=WZB;!^N+GKk?LA=sewPdsM%lIdW1offZ$AgPeX7a{LK7eG}%4&Jc3fLyK&A ze`Ca{*Jv1+K6V^zyN1ORmseZImR`2Waki)Z4PIiN=G1L-m5aH`iX^YIntx+K?fL3A zKpBkGF{$|=w9HqTg0d3MJ|ViI4#Q1@5}m)^eG5AOlTIRcWU=8{Ib@S=;G3!x5>gG_ zIrC5~hR`4O3ryxD%|~;xiZssg2J{Du4ZBHW^~xTl1>O(z9Eq_V6c221>7FC!mbu(* z%=G@5xWFF(wzN?hKB3_Gl!qRutDy<$cn?gpH!%k{U^{pUDn5na2(3@^FeO9AM-kBS z4>jJEZObDIm!DQLWx`4)=|4n5_^}jwX6s=4ka(VErQZ3XdFUDn7ynq@2d+#kPB*!# zP%6@KOH=y~1_jP!Q$=d*&yi7imMh|Ueao>p6)6wcv ziUVKTlV2_7uM*&%=8s7J;RYjVyHF^$_=d0u)m!xl=-~;(Lau?(;e((XcHz(~x{q<1 z=-zr*5Ni1;6rKHZtY~Kf*SO|g>ug1gc!i$s6D_hW>$I9d!uMYOE$%skrh)KYB}BBg z-}-%0*R5L{3@RTJ@v0J_uB#dwH8O9Y@>TrJx5_KU&@favW z%#&tpzG0A`^TGmKLlhx*^bPJx%=&o_gQ~mh7{MfkyX@4;8L?gtB?<&6QbW3@n(+jV z+_0d$wOyyDmXI``OI`)IKYpwKZF}aY6egA{*=XD4^EjTw1NsSWBR1H#6j87dMPL8o zS51Rwtx>awuZi2=BWU?_- z0o|GojTFO6&~fOtmy_LtfKC?Kp(rkN1(lg$IoT`yLta)O_s5PAFtbSSRL92MUf}|_7gvadit=(3b_ZU>%mGq z5%(at{XBMuL_XS--VB|Fm{M17r_{~9@nxrTJ+0&hIcz+!HUt=wISY!x0ds4h++7f? z9`Bk4(FXGE2=i}au$#2lr;*2qfgebf{+e0#dX`OVuC181bAdEqlk5s173iuyISD(7 zNDV@~ckL-I)PZjwC(QZg4ByW7JN_Fm3Js}YEFCMJ>}Ms4sXhS~DI9nrc;3UhAlOU@ zv;Sm6vm$x{l9fsl4`txok)fhEmXzQvg=q;wd^T1#8pa<7ZWy@g#b9zt4p-e2D9U1= zlZAQdUUBI)-zfx&3@)wBgR_pnHJ#Yf3#E#qB_X|w(oBJ^{S#NiqwCMN+WUK!jaTqB z5j?e6Y6JKdj#ly*xkuCMBGEgl4KN^M8T~1opT9k&!1DbNHxXX9(i)}QwRa*U>*=6) zNxqX6Ik=Ls-R?{4{z%iAww24pv^&W00gsphXJ^-*AL92i`@?fs%rkpD44?Km47wxA z-U(;lqWfe7|2iOF2DCY6H|u0}+24TRMdAB;DCxBNhWbO}L*{}EqqJ`xwCfXg*`0)O z*&)dlP@Uzgs=Pl-dmWoP)hAmNh!J>!egW*7nu&_R5+B&dZxb5=jHpnZIe2)MCCjy7|3|Hr?^sD>2RbN@t#Y=OqImeG7jsjLopad7wbV7D9Vz6 z84F!3q{v(FHwtL(sJ7If1Rh#;`iBKgEzR?utZW!A^h1ivp9LdAwLzt8{7-6;(*~Q zOr0LW`8D$rz< z{yLBnc`t>)!waTRL0&5S< z#m1%9K-UB?qO?JXgCy=g7-Yp>uh4BOJ;Y~~I0g}k1)^I;mIlYQW_=&%ZnH6q0~rDt z?$~9V6HMZp%80)w=c=sZ?odC~vZ2EQOPGe?hxW_r?m={3h|r|#lm8h7t>%925(L`# zyNh~l?%1o-f*?(x>J*eT?^M}Noh)X=t88n%HG{k-3!Y}*<&?@qkBGB&pv8G--18G> ziMgfDed8I04t*#ZAuAo%KK{|B&FloL)2PBKO4i`p?xoslVq%rcT!W6D3_iI`ZZONb z3kPLDo-XcrJ73}ZspR!0P%I-(#9X`S1FaQq;6^jX;${Gh+WG|Okmc{#3w^+^R<2xdcUD46ev9`=eZ`o_kGQ~W2L@@|Smg=U2g_}p`+zSSqK zQD<*;MRG=Wn=6bEco`l9>vdIDbEVMIqExiIPcg*D4xh6~zo4-Rxhvv-rN&22cy2#{ zXD`D!s_9Vn>>Z5+tta;(I6EF~2VrgKa9=?W1KD&$a3G98n`t=ysON9C2hnHUjQ+irDuW+(2z zR;F>@KP!%?wsV5_aCio%qo_~EoaWCSRC#q6mUhjS3#BE;I4I;EDc_8zV{VX^Y7ory zw0xIa+qN3j5wLsvxe&4ne*&^u?u%7o_ybOh7srWWups7WI$u32S)$+`I(cxBd~Ayd zwLu);QjBjVeoK4V4bPN&G{ZCX;^g}TVi-d(0&MSCfwJ%}zht-V(wQgX8~nAc)d_sW zi6zt?8QD)hS!?VuM6Jnghs)%Xu>(6oNIW7Pg);A}Cj;Sywg(K!SW3l>Ii+5A<^ zJM+yM^IZ2@*@aeMdf3?uL4-rO)JtQFMBG==r{H3kocQJIm9IHE8mS7p^;yp{i&WkF zT*2ug(|~3WxeBFo$AHexZ1Tr(w(?t=>sa(yI_;|Q#xTW`_65FX4ncEv;7^d!i5Pbo z1xmS7BgFFS9*7|KZeLN$SkuZv7hc8`SWkgq^4H<+XTXOqOwOD zoEy-RWcWWg!OdFxzLP>aGlWWW5hp+Jqd`jW-g%xh(|Eo3+?us{>rp#&Ril=KU}z8h zDkS;diYRtl%eS3E1-y1Z>GEBq^N4oT{n2aw_wkdBJs%|LCS8pLShhr zk_xAz!M)e+2G8sv6QfD761eX4;!gp^1X8aBRmGfXTiY8aeY@2yKmxKSZu)i3jsqIq6NTzMc>jH6B_nJCkU&bSfO&rtnYl5VajwB@4O)`!$h1 zCd$6Ir^rwVu9e^cR~ko-gY>ls(!4oBQIdKUFEiM1Zc6i_%;nd0B0J6>xvl26A1E(M z%m^JZcPzIjQK7ccyy+{mUU(0*?pa%9!3IvL@cVoUR0Sd6RJej%6Yf>s!qK1IJMfL$ zd^@Tt4+|Lcc~fcujHD?yZq3#(Tpz=j)#R-cbrpJ6RmYqtxS0$Bt)xN#S9A&?QP6Fa z>@rPegGi^S4Fzh6Z%pFLON&@*p8nvwUek-ObgOOhG^ z`;F?~`lRod{iNuauG2>De=mRd=MticcvT5p12*o<}EgHXn(*lp8tWb3joF zI&4kW1|@>Nl^SNC62Gjnf+to+LY!iDc{H>1i6f6T#YGtmr!&Id$ge1LGYku=z~3^k zGwzybiH22O0j=1+O(wMqE|F6lc8jR8q;zo9A)FI#+= z5UFr@SqDaE?!O~=r3RhH6_I4^S>&avDs>q|K(KC6zf`UAtouC8^Nj0Mg z3YT0*^ziBf%_?&!s!z5^qz~UFO3es$m(Cw~Ax7yj+T#fb|8kh>*?5{}Xq0}zTBi#M_(Lcdv zO~)QLWf!}Stj6tgogue<=n8ZcG`$#V!$+ys z;ubQu0AW^F(RAmcAH{5bx_Q~&cB0*Qch-n&P@k(XB!~?aBJ%8spRv~=X`qK?CJtEK zu*^ByeRG>4ni7dph(IFP(9#z5iQi+xo$t(cNh@e+pFal{-w@24=%oCqHDK5NP;GU@ z^ST()5~n7v`Bn-4sZ|%He@S8*C_#KaiI5Cr$oB+`phl5KRUfL&?1dEL;1FCpdKYu? z6FNFdNX1I!DY!8&(eo!VL+>det{i#NsxM)IJVm~ZG{d@&GW+sV+wkTkZO6yDW; z-Vtt3;zQ~mhw5fSDwR2XSP@8kU3K-#N(h{b5hyU?5-25Q%HoKxr}W;7|i=?jD&3jrkBf0@{^73>QAmn=Ta^bW>|oSW;xO=_1;+! z55^PJ5_?%&?=5+r=qWURH#~}$UJDd(5AUh3Wld&TJ1JG1HOr#IW?pNx)RdEbz0U|Uhu_}Vhe0f|4b&!360J=q1_z9kfSLJ=Jv zEEs@nvdqieT71u`!u>NgRGpww%vkHH>86 zex&)U<&RLvjQGAA6J=bM`)x>%*@r!Os~va774cE;Q*6j!dcYrun;emi@DuYKiip1R zbs?+IW#kupGUszgnkqtW6!a=S8^pFCcY-CbF2K2p?a=32`|wR7uL6UvV#8ET|!oV z=G(zg<<-5vRfzLKVaQ~bV=*?=bX-hdG*#$v_rHCmK{e;N$Qf$+}Aw?AaDvTm}<^WQ?AlqhG$M!7w!GpjQr06uT}(I*07o?d?m zNuuUaG7!>_d5t?Vtk${~U;6d;9DIb=l92v01^T{`ItlNxrf63dMmn9GZT98c;JoIN zDVnQCO1R?VkkRIF7?L;#FV&Z;Vs+H%jw4TB@iHC9aRgidRk$rSVNhT?%F6`MM?pv? zs+fQle-avNXpDV{$hHKn94!NX?RGsaA7L>oCN%=nlb31|Sd?tR@UCl%z@ZkjpS+M6 z;HP-E!Nb^$E;W9!6O&ycrEwV$>4fI|EuF!jRrk(IM#Uap8mcFzK!)t;DCKR$9ghU> zuK`Eu_9`W^>N(nv8U-Y}g4tP~`LEiur;pj&8f;+!+kK_uAJ`QYiMp5=eK=zNPgNB+ z22n*`c|`P+Ltq@i9{RiR8t+8|PCuy}M0!VqyUP-uS82O6mULa3`{b}UOP*}%rk^Ka zw9TK=_mJeOhk{3sxe}593AXgbSE>#UZ}1@SM2CnUl_CaTP3nX8RM-1Jv}uo9QULs)iCz0fP$-rAh%XBcpRiAqy*u)X6vH;aHti+UkCq`2$GM^pld)i7 zyXqTLpp6%+4i2Txn2-X*0=aZ{9kAz~V`=wFLx~?v6bg9blV(&>RT`gvKPM5LAsd@# zCM-O}f@z{7k0~LfzuQ7Mf23Q}E8KHn9bToqP?OyZ+x3jtcIqwnMh>Y1n+@IOOmu>= z&O-WbA5&k1&-IwI%S|*EOYWos1K2Hnj?=2nL4X?7gm)ssk?^us9*^bGpeK3qd&zanpS_+Zw@PjI@Vu#Z z_9WC+oIdhFmQnbNVwM1%XUJTPY5F*g!qBn=eJnS3GtoA+kigCXkB_4Mk+!)k?Ak*T z9W~q>FSA-Uy28O0phX(PiXS}rNMYeFVKgzG^WlNN!@AY*yO>Q`_F{nRfUGxA<%!)y znuRQwcp(xat-=x6C}>O0Ffk-efW$(TuL5h3VE-0oVL@4 z+cacUGS=1axbnIFAp4EKEyb_?Tk#$20z@#rcgdU{keD_VE5~R^gPo-Z6^&!LQnhNr zlVjfiR6T@Ah*SCSV;;J74U>FckWK^42%xZGzujTL%Nk1U`}7F1>qoRk$mJb3h=bib z$sJWv+;(P9t#h_eA~n`@q)cIO1?1tx&G!>r`9ixL-q_zDisn~R;Qs?3K;XYF*C1r3 zD_mNrZz@FfXAX-W49fT(UoU2Qie5k0ql1S0$QEHE-I2HcSr-8NIUkSucen^Lo7Nzq zzRC2(>5Q<)12Ni4shS2TVv)yAi{nc;(w?6F z?ZV-;uyP*k(47Bzb`v%kYg!w3Ui#8(OUUUd# z zI{~yxUQ{0T^*~vUVgY%oxF#M)?;L+6Vwjn|H_VVqo^h?$JQaEkv^=7q{*ISDo*G}L z=aW`D@l^mEus~Mo057ygg(?5iFlKD7$_i~?D{{y1;@aBRzp)!EtiVLkFD<5#swXw7 zlX;4j5WlJC zFK{HO(8W@zJP0iA$+Is^^t8+)H0%@hr0J7pk>HW*0_b@=sbLS$$xzlq&djfA+_S;S zWZ*ENi~KFUbtsS|8sWNluT7Y1l@{??$v!4;IUdsPSU<7Y>$eHT=kI|ud-%JX=CLC8 zF6EupBYmTZg`UUm|z+uWViT%jwBIrjC)BQV<%;=z+-|(bM*B>yAv0f39Zuoa$V(y+f=c;jlx^Oo zS{g(&_GzG_b7Ox1M!)jCSAUGoGh4JCq74qanocxh6b}gtrOt@JZA>LQEKn*xo6OnV z30>pW+m{+1fxg_MpUYjgI<0=?PUr_AQ6Qf=d+^OAv=gO@JZ z9hJIg(rok%MQ9y|M9$-WK-SwNdi7{;*JH)bC`>fok?+vmTBCghhtR@7tP#~T!LHr7 z!-!smfllH0DNl9d%HhE{6dJ;F;B)Mz0)s>LqOgO5gv(RB?|{iBDmWjwXebT{sgWWu zx4{O^<~vodO4Ava0wPS3V<=(Ga2TdPf%YX0<7mN-hN8*sv(UQ~UK|cQ&bv6QF=`9M zt@)+UQGooic+>i16vY$Db>hR z?Du`jD#rEHHbJtUW{$7TPZvP;;FCC+k-Zk`h8xVCC&g{*bCsLW6z@)25|>!Vdzt1@ z#dn-kVCDji4k^;O19!_NTn~v^`XXx9sLUHtKNCa^4(SD$lZv72QPLz4qRA9 zY5)UWWH)$d0)o)dL3VnrnGt9ZN2A;Zy4(-OE15O?7;PJ!PyZWzOWJ(W`n^a-iL^+) z?Y4af?H?Pke%7o^>44%kL`xG|h5$fv@`Bxt2%tD8=|!X&GJLJ=TGTpw8Ps^(jT8xa z%d}(-5_GdM_aW?V^5U*5JldVwH(j{6MM2=gjaF|S3cO{1;p;Xds};?fyHVM8r!%I!Av}IDk)w|DX#^!e2Fs9*P#o zg1a5eMI#t!v0=U!hzA3zj(P%A>YS}<(Aj0z!j`J~rHa?iDu}x@-2*6KhajcfifEzDYR-MW zj4umo_g6#t93~4>1{bUGZuW(Co*jf>;o@~}D6Vb_Qd8c~2VI8CXbzh(!dxNzXxxiH z75|yKQob?nwYTC&$Z^QRd8DaIJA86B*XhZo_>OkZt5xn`{|HSVcJm8V3*{8bu7Hg) z1n9bri7Zlxgy<1!}_*R_{&$DwawN=5?mW zqMa_dw|18^sCM;6YP?Y7Y8K#S$3X~l({^f`IBx(D*_hQhh}mC$F=+jbaY3$B4g-y7 zY+8$DT1q}y;t)zxb5WoCLIa*EQQ7s@(c}#MZg)SJ&p32ud(T>fiVm?&QG8}7Y#zO+ z>#A_);2hG4ZlcJbuRTf@J0dGm0{XkX9!|qeO|O7*8JT5m1W33mT=I}{&5F0DxEF+4 z)_2DwG%Dea0?SJ&Z8SxX{~k-GUv$gHiju)g_Q!VMu1%(IVGz7yQDUEvE`Bhaui5M7 zZtedGSgaW4n#FOQU~!|wZUVPTJH!9To37LfVp5V3q|zh(i{`-IU-&m&2)M*$G7_QT zIWJWK5zw}-`LWR@1xZO58Q4iCSaMrH{L4s;KDJ1DsZ37R0Q3P8x3AZ6Hg6x6m9ZH* z;MMWlcZ444zzLPJ3fO4Lkw%ajl3+U51kg@+PC{v1F17co?d!q=7HZVJDVf}cU7s22 zO2CqDEcGJ>PQkqjMy%GMrd2((b)^o0OT2o)DZxtodBg>vKQvO7gLf<0q{!3;*hvc$ zDMN#e;55Rx85pnW1h+zzfLve{%@Mf`E z5OPfJ2-e} znj|kE6_@2v!oa>cq;zbH?(#%!^sFOA+NMwSV#b#GbCFSJvdKAkZ*w!PBqzE6PZv7^ zxd$(zkc{3;VT=s@EMpFt#LpgCis}>KJVyDZS!lw{)VuX)o21R^E>(_=;GrpZfxoT( z2>Fx6mz$;U-Y`xzFK6S9brRr=o1TLlChD&BnRBPf#)K0BwS0$N;&MIthiCY$X?IlJ zF45@~a`?ipF_l>G;(`ZknGV@WiMEpJ^up(Nw(u!cvqPEy!eU0+-t?^M6_pqxecQFv zRDvAB^oyMyft6xT2wnwh7SFfSrJlyW9E|)C{XJ7J(T{(Vv zkqG;0xN`jBnH+)SXfw0$aj2^sfQ{z|%V2FqUw%}D)lfK3$~E96&5FIA-9jyHl+SG;s}HH$~c9&V{{PB9>d3x^^uqbo|1uS~_Mvfu{yLSdCN$=-VNgQv}fJ*Aq-zS8o zAzVSPrIIO*)h)-qnQN^Ni=F;ho+sl^K^!6C@DoSaEit=^{iHYUhD$Eo2BX!|K|m}d zOHva43?~?>C360WTX8hViz4icHn5tCDIetOoTn3FDBV>r*(8!_XmL{${iio^>Jr4OUV4vrr?Id zz;I|c*sKFq<2X{ZWwop4#XY*F|7`&-3nV{`TT+iH3|}ZC(f|1(^?x4vu;h}3%v##O zPD3REuiqJFkuygj$j@K&FqvGNEYB2is1~bkK^#SJ*C`Q&L?|ljcN`B&^=z@}FPghc z0Qu2#fB*|VK#yebKECKL8YY+$zf-W2DH;1j0N^Ci*L`iU7Ijmb1j;WzRBqnDXLTo^ zbyga)Ao)P2PD;DwaRP#X5qnPNpE=mfZDfbUD^^qlQ&eSY>9xyP(6DqX+8m$nD@QAxeBJu56c z9)^Ht+*2m*?!VbwOD5f7;Uc=Y!{fR*>0!~{Xpym~0~8v8hcfk4u}3A_mq zk0pv|>JeI*@pA}YRsVOof=u8&dYrk+D25J`u#Ir4ISaf+jKK6lT+4NP2{DLS=SRI& z8`SYs7+$$K$kn~x&JzKjZb&b1Eew}4tbIAQYcO|AqglPZ0zO-JOw?t^wd7RtUV7K} zVv-NC?M^=j?(p+SH}Y4I`xE8UnKp5~-5YUq@yTzY=$IP|9Mnj_gXT4;vSwUd?Gg7w z4z%KeD5b@{{%UClL;)UrgdI&U?F+{Hsp10Rb=8RM2&w2-`r|KzdEpUR)RwfIJ&?#d z<*~Is9|6%TE~fx1xFKnXC&77%G%DqE|e`SNidwi4z>n$w)O35KfXfy+V$wr^_q>Z`pd-Ui{Z?ep;jaLH# zK2oPWm~?5w=V20w?#b-=U}r(a9XWtmn#uZyOJOU;4t2pth9DN0&N!Yq8Ik zn(dyP!g}mVn2M=J84~lY3aq;DSjW&OE58$T=ZnOU`N9jjyH-fBwtWVXM2(`y|aa2vE3cZk~p)xV7Bw_omrx zUnqx|Nm%^amMz~|LQFPiME=fD2zg5Ni9-hMs;hFmexOAMlPpOC24OM!U_}lDOqj9@ z6xgUJGEZ}|CtIV^p@6JD8S?8HFq{O$a5q3656l&?Z9>w|cW1(xr#T_OM0&n3#4)Idw6)lQbj+3$<{vppy{K(FFOuGI?WaTnoDYDW`RQ`J<0=9s4-?AQ8 znNRXiS^dmfy|QP}@mn{h+`*s=PgT?`hyI`kknK z%%QwhcozUy&aizUb4s>Os52~$$-k`U%z_O9N@&XUG3>-4x1mXlSn@xudl?wJfVlNK zkE+c%d(lWRe|v@pXsnTixq4<(<~ECeH(AZQgg66t3KXHL`lsiqmagZ&*lDLA_zOMS z4{uQvjq#a{y$=22UH!NxRCK0#JL+)a=v?*y?b5^0VY6{!ngD))(sr(|kvf#F{PzEy z^Q2tmtldbzQJfwPTIDcrLMjRVgB)Vk^DTwKdWg$!iW**-EGZZLx}gsP;FU!W@=Vg% zre@aS8{RzH0t-~a1}M)xkj!x!-uDbeUc8rD!Qyg&UJPpCCYTr>^;`Z?NiwAo1S>Y7 z8!Mq%JT}KZe7t#yqLb$m{uk{B5Accb@SsF?szx7Y`iVK95*`LV>H9;(dQE5<7;Qr($0$&m&qnnr zwUvV#d?Zl3ZN^67X`p(QufW-h^Kajq1?gaCPk+k5kTYrMahw5S8h`?%Ym8Ey2~%$k z(iZj|Af6%490qR)6Kx?c=gx(0=!{p-VMVZn%%`6SiB^bXb2zE8=mA|o_k!XG_S3>K z5x%1oUky81CnMkEF`uc8q;y`}92l*``!)J#$=wDdaP@*utPU!}(AH&Xpg~9sOBn+( z?z`fierQ-Xf-VtD1))lI_;f0D3oimxwt-WV8Ft4TxW8MxofoDSQNz8%{wn)@B)|1s zEbf;HD)Qam@ub#+6MNIE*QBlu+KR{&-)>QO(XE|XgDqvh1Xqj%Z>$1cYA+cSNIfkr zq7(qr2Dx6v((Tv1i_Ao~Q=TXuM;T#EcrI_SlAsP|_7$a>tU|PD)wLF7;LS%&Y*_(b z$=p!-FZ;+VamHjGcg8+T$wZhP<%wPQ2FQJ|av9_;=)udvKR$n=69yAas}MhqTO*jE zmXlcE580GCmDWL!urf)<7feItmY8-Nl8Ndg1Vg3ip|1OW{)zXrta(N<$&SxjVsqhz zxBt`=Dgj0aNQr0j$~d@{3DiR@l$Vyd$O5OS56})S%6{eEzHn%uRpZ zb(siW?p%p+kS2wN@qeI$3;kCdCA4GI*=*>{C%CJ=v=`tlz}QL7sObrCD()uIZ`BfB z$!T@T;SdXuH^C{=m#u0ce>?*`3vX#%9vR-kC$uj>k4?CVqkkogIc*r^0PkY@hDy^d zxl;J#8CN$}b1&=khYnTMljv@*D8KMsjVsfQ`Y$0)f%L&xAY8nQ>;C>Q`~H{y=0gS< za~NNPirJ$BvWMB;z1cvs_fci9ShK13o)?}lPJ)DQ0<_B*8s1SXf1Y(Agz{;-5fyVM zv#BELQ>C`FQ@1`)l4~54(dT*76S1WT_tfl@SsAbSsLAj^^Azh0gtPBk9YB1uDr$=c()| z(WIdT|BlPbTTvE72CQ7r4Y!+j=tmaWkGU@a1dxpGLQe+<)xL#ENLO({@7i0!wL&^Z z!~dx3on9)mw`YR>YAayFfVF#jaF}DMOo-Q?v8bg>BY3Xp!5C}Zj#eQ8LAD|gqzfyFFw^QPVxQu6g+=bVt?Ps(AsNVqF?@lLyZjKf*H&u5~06)^N6pYhIKAA|YdUZ({(=4O=2F{>m z_60W3Wu~jA!BaA{Fp6DzR#CGnQD^`3$77?tgcLQ-qWfS6ru|0lHNkHIXxASnnsu2# zDsd?PyTlHoH7Oe#4`%!E$ZQtcq;|615>e)zlI>x!H-H%}7uwPNmXW`6GQZQ(N(ay) z_(TJyUt89)#D^j(-MG`u{*6@_1+jN6=}36ci^`e0y+u{`lkRso2tsK}4z_DQFkktyp|K+j%ps zF>C63uPZJ)Myjp0Lfwi z1H0XxH~HnJh&+}XZ8W=~sP}%mA3|t#1{g6Ook=jh8T_YD#BpTUjJxzO#ELIN+=Lla z2p0J_Q6@_Y)OlbDWp7~(zrsF^c3kbty3e^{=klQJ$QWTL^RO3=P(XG9Z+bHXzdwC$ zs`L#gexw6Wx~9stwhXw8GPpR>YlBmQxU@)lC$h2$3Q|tLfsnOH58LEy(Wy4)u~pKn zt10F4yx!2Z!LqguJb{lKd_sO42s>BpM1j^wwHgccnY{>4lVW9dKcO4e07L~qg>-33 zy4I#wH=q?Gn%MmKA32iOn$9A(AVTc8AdiaqD-@Thsqzi zG{ZpX$Go(sF_a`XyqM++9Jfr~$DbylzUqmPQJhx+OY%5O7P z?(7AyFvb`2CA6`ulUsBg6U^e(2@oNHd;|3X}Il8$2E}TRl_*jus-!uCiu~ zkseE!5d7ok97yaR8!E!T(Odxq!~#WL4FaXuWj8H14z}gypLil>B4}u~Usf$2;+fCyPCb z&jB-$ri#LndR47?bp?2crRGCAv^Y!whfQ#%>wEK(2c|>0R-(cIw}LFvci8Jshs!n1vwVxl^trum^SfOf^(>X_Q!A-ANs)gQF=!|OXk)g$78081;Ai5eJjHAB}m2ks&E zvf1dR^?X`5n5Q4V7}>iyOO9<4>3u^QZ|OaSlHrNdq62@q*|=S{I!g#6FzfmnKFttx zSQTYzajWo_yG9ewj)`meUR@>+iheE59s_H=)l=~iFGNF++^SAkrmr4xu#n0PD0g(E zsrGxYXzD;~F<8tQT7K9B79kfoOA{9bjTXXY`0Bhf{M-a3{%(N$>!m_TwKx)lw;R;x zy&`b*$_xEfr?vX=_Pm8z{Eey+uYbWy>3J$jEM`NH=O}5V22s@__mCBeqWLh5mIJ*V zJvb0Xa~#HK3t!m>%^6CM#q}aR;cT;XObzphmpUv0vOwCPX#Bg2f}NKX3X7d^zsW)o z_E(%%$^C{4Q|URz*Q)A@!GGNpH`dIC`x7rWRESbeO+}6sSWnm8h@z%P6l+Ww&FVj` z$<6)JO?hIlGn7w#GqJ4Ei8Fpu(|!RWy1!}|EM`KZdqW3RLVu^BUQ}4ogK?{LeY->QeK*1>fhh$%sEyxnmOofeF}$~XNn~aJh++@ zv5cBt)%GjssZYWs>!u{}Ej?=Lh5pXnNrJ!H4Q%cKb{tgbp;ZVL;n3>D`cmOqGb~b4 zLvp|X38pp;x>1E2vr*TQ}2S2 zeo|3EjHe)iNFSI&xmdBqzkE~9R>nWKLOIL-QB2kLZ$|=&l>}=^yhBa#9gq&T{FO@^ zix)y5vTlTNOA^IypZ{DnV%q+f#Jdp+LnhK*0V~zDVXK~``||N|o4VwU2oLb5Wn2asj237wk@-9f;7d>deKY%6*bv|f6H^z5@MD&r|4WWBYDebL5w&gXcb zoynYFTKqcrBT4_eOX+jp>NO}gam=#0fYzfYz3~jx6<5~S45qwpO^Z-X^F{D(7#&*o z6_Ydw*gyWPlT%+sQ!-+(!Q;)TLu2VfOA?X=1%;hmgJ*C}^##KwV-72VNZFE!e8#$k z22+zqUV-EGUD;f6*ta5*$r5KnT;&lUNBggPEYXwcDcLGNhZ5d+;Npf%b(}GpcUyo8 z{7@N=gYV9r+P*21`@UU;VXxk#sAWs)P;U4)qC+@rXA34qyH=d|@W*zvuOY7j7IOPb zH}kJn)Afsku{-&lF)CHtL^v}RL&PqI*@y2W+QzCAfDZ%c18T0nhq??b{>BDWy z?YJ1=k%*`vFoNVG#>pSy^?mWqP7@;@DbJ(+m=DZ|!XUwi%@6GdU!zKL=RV>F#J&L+ zU-9$+I)nTO*#;)vhbeVL00R}s8i=OQJq>a;u5;@AJg{tHAl*t-^Ywm%+4@j)I1lUn zAY*KAdO%@i>x&RX9vIMKYubLCPRDW!$5^@kM*-_cp885Z1+!y9J;J>E=89}?#hHzF zKGs3!7O@Eok_WY@82L&)qrXM_k!e&P-(g^YCjBz>Hq|{VTkK!cab_H;4nrz^bG(y7 z^*#*8ftE{FY}TMWym+xZS@YsbVsL8^&-xAje+-Z?O))n8`djNh%D zTni4_XW29EeyFaLY`eE#4vz)F62?~IE~}d8w-W95H`a|i{1B6OnWVT5H!a!PeX0qc zLHRcQ1RJIvy68ffCCe^c{AzE%|BRtVqB!5$IxfE$0I9rlxbb!-=m`AR3`J;@V_Zp~ zv6VRcV#K~ot%8$RhHxY3IARocMr;F$qYFjA3SD1A!W|K=_AF8velN*3F^eR>>ldUm z!dA1vM?}iE_NL$K4B&XaBc8L^9!u>8@av4!@QvVYfJ=)TnC9G;3P=nQZtB&5ItUH% z@q1T~dk?G%&$#fT|MNWSG#i)wr#p*=X_+xY$Z&teB}R`C$N@P>oNwm6*`W%6J+6fw zRp>J64o(^NTWVsv=K-^xnQxL>t(4XeD3|W(bxX_LJoHR^Jg76S>VY;85)DLvH`*lk z%$1f+E3DOBm?p4EfAcxv{Q7L?XZ;Kj!2!D*IthOPFCUF0W4@GutcMe<)mkQE_Uy5d zrOl;6yZUqHY6|iD!m6`tduH5I&S*iWdM7$%%dw3Sf{0LGovN`49khrny8f5vU9IU0 z!kt}{F~DD(TgEgNGzrA#{U{${ zAJW5}CWeMr#ust(T~uwF&0A=6q3;y?PR!4`>rV+eF(qA_y zik%TGt+OBkY`$2ge4pc=zt z=#$Inc%P?a9GKB4dmSPD%^vw@?V}rTuUP)VCZCpDLQl?8l}3e3`gQ;ZOC$)3)GcQ zgDxamk2OuSjEaQX42tJk2E-XtxhhD}g;IiUY*Mw;*ys-Mg)CQvlwtT!CiLQKQjOZ) z&Xx9(wx9Ln-*aQZTYG5iqfW%Yru^tB=yZ8e#r_B*oI*N1v@)UZZovpc#HDd3!I*43 z;IPr|2ih1n6m!Nr8u%wLokXC>T7r$NJ9iJ2n}MK;q5)^@JELpY!lYc2I83JfNv!!N z$V|Tl>TppsXwr*|LO$Iw7d&UK^st1)yaC$m19a8O*$15}noEzf@M4?bU!MV0GXw4y zn7@?o*%F2-24k`btom{G{Py22?tw9%jQd$U8Y9c$iGqjMSJ<^}@3qjX+W>@hy3OC^fhn4 zz@TW^OGt$&esN8npa1E|Om$y$9AOr$Kia=`Q4MA%c;CIO&l9cZasf8i8`Qe60(-Rl zlNd5{&0~m9G!aJp*iZYdE$2wWX-M2XBD7Yez>C*jB#&>&Ba2+Dmv&?Xhl|u(GmW%5 z!zIY8Sp>r`R@7I@f$t&e0=E(~6NoLn59Xyb@${ATJu(gwjEgRInqlJD!>XqD_s`uN zIqI!lqud#P<7tHYB~>-Ds>N&<4#I1UN5|@;_@rN2)tj$Dp7mH1eycA&6kvJ&|6a`H zDrvElUPiH{JxDpKz0x5is!yEufGpX6Q2X*)%`qQ4ET>uc+3&lVmb$(Y(;;g6n`aWX ztNlvj1$n*a`~&nQPCg13fKS+`M_1p*h5gyIcY9wC>t%`Uy45DfEJ8Lq2JB zvISLbto=YvWx_f&V)~JVydoh(HTAasK#1kkVUrOzaKxAklys|KuR5q^MhZMjE>tQy zOVz{|j8s1gru?l$Kh`$wJhNr_h5z_);pFn+`XFxl{Ck_AWD^L}ze^UzxBM7*c^^Y` z(avOq;MM<-Js)j;s9pXq8h7COMifc+o<-2Tu-1*xd`DKdXAOKY#Q>*ZuD|qponK)j z){wt&|LZca4GwfhxTUw6-1P&FRh{S&4_gJN1B5mJ5hPL$hfISygNJbL=wkwdZ1sVm zBws3LH`#9TmR@^F*lD)9igD*vO3b&v_09z;WTUbS+2AKATB3@GmXX8$kGxyTdqkFo z0h1iDufAs75;6EoO4`!j%s8IgYO&xq>UW;YWaKrC4_Pz}0x#!i_${*QeOr0lAv!qs ziUCrp!1=y7AB^3xm9?=Vf|>Ubq3U9d&#K`$DQmd*lJC)F;Q46n%Ir{qwZhlFF_vKX z#4q|;X-r26;l2pbbv&_~iZE5J8wyq)Go^}boh``npyAaDM7B>6aHSh|>xuo3_gFMj zgtS&bPxVpKXNf^XWQ0L+IHzVR*gp63l@ zcuq~#z`QljO%iC&D&m3<+vYm`2YK+EgIb=|?Wr6k8(KwgyP%uM*+6|*c#_Hbe|>4= zb!CwYt7jh&mpGAY{i_AtFeXTW7oOp=Nq5$-8X6Al^ck)CS@1!C6HnZZl`|48X zs%babxkiS!um(dC-s0SML zqjICu*`k))&sqn~jny*GwN1Z zyfLj$*6&BrNi@2X>(Ra0HA%n3faNVyGf_*j`r)?3H;%}8$p4}1lc+KH;*zj4vaRfQ zwzIaK_Bg_TdDZ$zu|qUL+U*fE zF;l^slI6r2)`5<~%UZbi>Pseoqm{y`zJsd z5BveX$K0iOn)^`D`$>e?`-UE*j;Y%76KibOog}T|V5M>1c=bZ4-ZBJ!F>f$5VL!Hh zyF0Os)bj;pk19}UJn<*ACO?RW%zEym5Bka5N=l@z(Lrm1+yaI5D3AS&$FBs{g_;uS zlMfAa$8ujriTUGw($sT9Bm;bx)eU6vO9xYTO8T<6QC$bi8gqGb^FCl2A#yd&sAbc6 zC?b?F5^wTt8>ywqN?+`Je2!`rN$xmG3fZLWI+x7wjQvPu7><1>8LD-az`UCX<&3O4&US*u-Af70fd_;{YY-@<7 z#A-@+fhPye^x)8EFkHlQRvI1DiLbF#ig?Co5kfy0628p2=7+poHRTvH(N&A?!och@ zulWYjqvzBOkYHZPa_vg`+dB~bo#5aM;?MvqQaQ%1+k&-$J?wpuh{%j5UhVD^iM4c& zG5L#t5d=QGvY1Z19QjOYEDdfmA;s$CKxo&ZY8Bc7kE3J!h(ms~UU ztyJk>QP0Hu7=du{Hxyb zDpcQbOmB+S3}4#f@OKAafrAf-N<2ALIsQ9&sQ@gmbIhEdQ1G9yNPbk6OEY>Q8}k@B zwzcgDQL*ap%?HgceKQxt(VpH)&+f}Z%_APNhOH7N!|w}dCxkwavybbJZ^2AkED0J!Qhi(F z$7!*C9E3)eSUY!j0AQWX0nqY3nV&g@QG?vU*19hjhuZd@(|AMw@7rcTJogxm{24XD9iF) z4c#k1gdB^U9YvBn3rA!L0uIE5GChOb9v!UU8eZwXm?xjgc!VF-!}NNzdwC_Rm7+*+ z<&$J4yo}0*5oge16=P@U70^?+62Olku){B`TCl~P93`fr*R5za?L}IjCSuD zeqr8I{baR)HQZ0Zcn0xgHO;z$EeWb#Q0ax5(T3fIjUp4g$Nj`X5d;;`)AaXi^436e z+i{GEir6cfzliM>f%kDRwDR%~M%LRb{CW(-`8rYzCn5Ha z?FP-dK6MoH)outr%js*EMubN)U;1PVP3bN}8p(tSXuf1KOM z==6re&wiS>qRjR6`ObLD*mzR6avvy&UAD}T^wC(BGgQsMS6Oltnv(d83k7WYfSJt8 z%Ge-m#@|Ycdu1_r4jyunJnBbPJ*F+}$CkCdg+~VEyo`WYPv+g}WBlt7#+_N9WL30Q z*O2|LUMY`oaG7z4Mx4nxeekMuwP4aGv7AYQetd_itTmE<2fVO~hTz|xb@VF!ttq&D z<%-1ll5N}U-b50y2qHYiI-O@n0Yp7nIY5?|kUY3w$4JJbKmUOFD5K?)-Dr`$dQ7hu zZMYrj0I2mw2T?ozUWMN58&)~S;Gb6HX3ajJK$QoMURCJDl$u}$n#DP?!(!y%^hsN3_g%XejBH5(`A2q;%Ss2R(sg_8Xw?k+&p%8n0#6=51ktwzCkZsK~^z2cIEt z|IIM6ALW{51xN0|sx~QHtwV&QxC`oedE~Tbowwn;=$%u1lFqvQY{vnKS$Hzg_z70l zj$F@DIm+!Qr(dW!Re)?{hX5->46TP(( zQi`X#@aSV>{TE`({4V-8XiuDNy=qSHKGhy#Rb=NL%MzT~%QfehJ&-(_uV&^{J0KSB zG9W`>f-|DE1tUM=WyN0=W3oydKgk;bc7C4tYQ~3J)Vpn3G)SD5PXFBvcyeYoAZYPN z$`UYH`>TCfuY4Hr1qk>>-sqAW-ac*&^N(B%AbjE=44m=w1iE}obvWay(aBB+TSuh~Q}r=pjBsj9&sNrF@wy5_yr*AL zy6NO;Xj<`L>p4A44dfGML;QyS$fevou2WBfFMxr>7qc@UF&~Gd% z-a0aL0{{LwR@2?8OUr#%{~pBu(Bpqfs)V7|<8tGMm^d(&D_yc7f2d)P+@%akj z##vjxkbb(p;ON@SkrA0RrW#WK`HIK_Og&9CY~v@5Eq>Qhq$?FcsajJ=(vg(O|F;OL z;W-TGpGpYC-Y>=HopP%o3^^u?s5rx4^gEoT^a2y|gbP*?zlar&!*CNhoQ*|C-+ho^ zms3B(j4N3%y`eUa@jdbSu9;Ye3XypXS>wD9bD^K^Gi3pVXnuI&RTZnCIel`p$)?#X zd@gk=S410Czol^;Xb}EXIGhOh&SoUBg-L#OCF`K&xYirO^jE_f9QRFEnf91dpgGn014VUMoPHc6`n3MEe>&#|aw#Li~q1S7(B z@Md!u2#H+X-#IXb7xf=WdbHCyxXHsisHm-HoSVL-zW7`vKkM?yLw4|;dgko2lSexB zom!RRHLC=>smQAoO9xIc@AO&S-(k4Y(opX~T;JpB!$BQd?*Tp9q*di)A0Ua#L*xfp zG^fZ=Q|26{L5@KnoSFg3efDXZayoZN@?4hmt4Qt2>HDz#sUJUNjpRo7R2 zz9h{gu9li#FIFa*!Atpiw}*u|rW{#2S1s>uAfGEbrt^qbv>R2i`Rvj#6p7ueiS#4* zK@@7iA$n)7-cg7+Wco~e&4|@jQBP1bW(5Cm$O6_ZQH|kVqLMs-EX#ZDmo#6nf}KKJ z%p~aaT-2#LZI==Ms**b%Dq#rksgqyMafrkE{j|jPMM*x4EC9an+=ybvFcL+w#hb{?GiOCL^73iAaERC^b2&SsG zdFjYy{l;v$ZHYk=z2vPC)IG0Nry!SY2((&r184|-m>rjJG}mk|H8`;9t?(coN&y=t zycWCB-joWy^OV}IOEh2$xLh$jasZohAwMgHYIXoGK+wOuj{WFG4QUcdmBJVW$_*i; z(LxHo*x`1uQN#lNmyFAE_ylY8Ma^;V>erAS(RvTeLn;V z-iXU+2%$KvGn;=S@J&q$g7NEBy>f8N6-1YN$O_Ok=-dmXS{L>1hqzTdRWBe(7HZ-n zQsf4V_ccF-ISd~GM#_P#j8$3{l6Yze_;p?7*{eJKpwzchYMiBOV{M&uIaO}KWk1?* z7ZL6`4pn*=3M>>D@Hz_sH$^lWdK!tAW{n_>nL@A#;9GvKDX@s1Jh5rV&8%Z5c^sI#k&_JmBM^NAqSxrQXiej5JD7-5!)4N09?2}!$aw+pONInVKltpo zMbzk$tRzu4pyz`B4&cX?gSZfciW1P91CYwY40ELq^s#S{41%pk-2D)0$FCs7l##V3 z+6_hnT)CA>{+Jk;#W3Ynl>JIf{vV^@t4oE4Brvni+*IV za2aw$ueC`NJqeSRkhLzyzR+IiVNFah1gr#^J<-VB%$Mae*-3hiI6dgteiVt^E#idS z8~9kPVP!ez4U9na%0M$Bs=7!6QA(iCXlZuB!XgT_I2nMH?~UOZuQ`+rWZVT*_Cd`# zY@C?H!kM^9t4v7nih6^yF$heO3nd`y>G$76lVrxsaSQGaW8M*M(Fey%d_}KTCOWHS zH9ZJP%yCezOMn8YUXpI)%78+x2WN1NXusx9BJzb&yQ&x{AE$>XHfJ`Kh_yF4rgyBv zKs5YgUTtqP;@0iWqqqY8wj*^pl+WU#XWK|v}KrR|DPelXaPb38#; z5iR;FY#7{A5*{eHa9cB`EZ7nv@Zkb(x^w_^nsx=kU%hVu;D&Mep)LZ=rhNQ~wc(Cr z=5UIrOMqb+r0;)AP97w9l;2&brx2tmVCbp%lp*|WF%$;lp#}uj97@`#4* zGV@T{Z1f+BmuJm`5=1QFt%tT7sXqk_ON-C2v!voC8=gpSLI-vD8Le9Q0#9hI z=p4%w*!isT6@@ylu@1f#5_jeRs8}yfBJQ$D(~#W7A|AALRyaHf+Af$6l?OWuDAOIQ zxIH)TtzyA{3LacVSogMnVP)mN)7ASVX~MpP^c^!s|ALv)6(^8Wg1nAmM!f*OI|`X6 zFpd~CuwlAq;`==NjR>Peboh~dA86N-rcup;iW!P5L*!Op5H2EpW-chJRjX&2L#@!1ZYU?< z#0WtH8nYAE?E*@g`4Fk4u?8t+4DI)q&;F{KmD(rlNQrF3E#7lb!dD{;Vp&@Q`B{eF zrCa9zJ}4-KkANiR*X3@8R0Q|My@yU-r8KOiVaJbR!(E% z585R_fvb~J;BX=FuYT$COs0J;ue|7WGdgiSI}THfxtgu7-3 z4d5v_@kTcz)bgrrFE!2SO8{-tB*n86=>luQtyG==kWvO-jy;l z2}B^3Q*2x{8dL^9zhSIruwDIY?@~hKT^QRW%`+J30 zEQ1x42ePKLKnXb+uj>!J)`$(LCL(Vs7=)6?ae~NXW?txT5Y~yMBQunRD0(NVLyMm* zhg(6RRR$}Rdh!p>W9&hAElv<3oaue3gGIB#CZTa{MY0x?#N zRfO7Qqb7!|2H4Llb5P39VuLV+xS4D}*SR&))p*v3wh$kI;HCDmHon#1HMzGoIMI>q z4Yv*Z{JaK)u4G?10;ACW;QW2|pb6`E8P$KhsIP7TptYC9X4pG=YHzkF_fp@L zi9nlI;p{;0yy)Z>c-hXChvD)iEUf*X-9I&GkfC}3MwF6UvSe4hif!n}bbP)20Jcgk zOeL%3B7e|H9iL%+kIkv^Jd^-EeRfa91a>*knIlOy+y$Xq7iZ+9v0RO@L1|iqKkGCM zc~qF>SL+@V@|cX&rwIjPY$f*mXbC^DEyNuuyni$PM20&bW;#Cwxl&N`E>gDMI2-T| z%zKXC=Cr-1Y1%tt>mTj>Qew7lCD$pWLq;Z%h?5VjrtljhT0=nNhMydEX;=WX=tUW`bFC*yN4@RA8y^le1xvfa~+MRAGy?v5itbs1l*i3$`Jd zrNjdtnW_I6S?3ynFtE_;nbl68Wsg;KXfOa;=*38M$Ve5mg^#Ba_r#ssDUPe~cE1=Y zUtKnlp1OQXO1YoQksqmPyS$+3WOe&1BJJOF-r5c~$x@RaJlPlT2*NrKe|vtA# zWVki}Q_5woCddnV76v)i{HujN#!?dQK;9#A#VFzN?XdoHaPc;GVnoUHI;HTW|BsEA z>eKsEH9|^z^VOc;8c{|X(BQ7 z+>5h~@_dz-AG2+Rnv%hqi#)J+K^BfkJ`NyigUOv{-smUVIz>j4@+|CJ|T0LM$1pw#o5#ZOdp5aA)c9zTRBAMpMe#9@X0@}%>mtUOe0qQlPLlxlR`-KU8co_+~XphT(TbSU;mxGa+vP;17^xI zLNNs!k6Axi@x8aWq}RF7@!9&0Mc?$}hrpS^4tK#%PxLzR=iyIrl*n4wX$?K4yvvHy zF|R^wBIpowpM@fH7Cn3}5)FQ31T+4ks9ok6MN~O{9&BE5iaDs_-)*sbS@~U!3Gi$Hgd%twlUGLM1=Z*F{{z_3)2U&=uYjSUF_fF8oQ~D)dD6w+gP>X zql2(!*zfunrk>_r}4Bw#H!l#wiI-6`_h#|Wti%@bq6i3V+zC?(css^@VD zg|3-X;oAJ*OL!hKD%h{fp*Q%0chuHNx}~d=9J?mo+Cfo1%%9#T>au zNbS!tAn9a(XN15JL`SfRZQng%l(3ldpO9ogn2;lim-cgJ>2DYHN!H!iS>7vwQ7(rr z17YQOm^s)w4@fJwC>Qikc$oYn%vN(b!vFFwWgwegUQCPsLJ*BLJsvWA6l30VUEW0O z$e_V6|EMw_Hey4_bwKaO(X*5(!WC?pjQ>Af+Va?AV39)>jbuuk+0j`6zT3EbK4OII z*7Fwv};TIHpK~atGrrTejo`V0R~b zyD{DcJBo<=jfM)^(;r{9VX?^Ro3&HdR=czJU_BtII*o{xi3>w$7XC}FLY;yE##vlY zYm&Zq4TPCT2|vCf6!C%~+2BBvX#lTZ?c3`8sj3=zuzQAMf%U}{0gJpy^)7zE7{lgS zOBRmh>Ivy;=zdxXV}Sf-Sn9^=DIN%7t?u?npF8kvl8rx?@;+I#MhV|-s&+)CaG|k< z+kDEypz;^W!D3EtUFFKNg=4~JW0_%U=*Rn^Ny=$sitVw(tv*Y$S8cDb+_})ckGJ`E zG{q@Xp_5i7R<=#KotAIVob)1)+YwC3UnSpj+L9MY&PH?j$aaeu?x(X@X_`chD-<~? z9qJqhW+lp(S_Wep73~6rWM2a`G$cDUXLtd+xrWHNlmGYJS!r3}%g$^fBmb(zi^CWO za5iHl`%VcCa`dTZxb@)toA-JSdJb}|qSX;b?Ick(lw0#6jyIC>1P&`bE3s&A&R8k$&&DT`6l;pYc^ zN8YMKG-@8Kt=!~=MfinJ+G8I@V7Od13EZo_t4l{OzGEM#OxFaX3FcDSxpmh z%P@rmI3)zrg{jh3Fdxo_#-Y`6g{a|=p8D&I0HpoMB+B@{DaJ9W01}fGZoguU0)Im1 z3jv%SE^BaAW!f3BKGep&_hs~s&I}(3YCUeR%uAa+3;;3d-{bjpma2kBpNoWz90X8t zU}^vU1a35yRpAt33ARo*EF8H>D~Gx_8A{+mUS?`>XJUAEia+V`H<$6%re(1_mfevL zCEgti!pl8G=Wv4wnA_sF)r>6h3ed;eC>^JHOEns#Thm5<>Ic8A#B?hhNlnGUO(c zP1gNK2|%G7veGdcHwTdqe6K2qnL<&M^qEk`CS;vYtAg(&>OSuuI(+&7^)`EY9PuBNIQ#f&BP|{=V)>;l|KR4<*Y+oE{J#yPh-xN6f_u+R)>d zJJu#rT3*qfaB_UFkcL?lhEp48n@9$(+(RgYN)EH7Mw0zei2^rtv`nFvS=TN^T!!s3 z_%WXXX7vk_I#Jc5oQqaSVPXnprRed}H%NhUYMj*!(Q?|;2J$nj=veSZ^xyT>zrU^W z1BA?}I5AaKU}A1GipgtB)%HaPT1u;PKY?FAm7vSLfCn0cTn9a_Y;l&M$WeE6tvm|r z4};akEy2rJ32BeC!m8aTTDj(~pOCek_cN*NhSegOv`jP653VTRwz*mXZRHt-ov(2V zHGo3SMyKp1`c&UyJND)Yuqfav`cRe0G~ygR;TVv6aOYb44|khgqBUj^ zGC|XG1R;(YtI{1*a#GpV=|u~GqgJ&Z72K#q@iQi$(-yKpQnESz!XkwrSGL6Ssj;P7 zC16jwQEa`5JvR#kjtZBl#R)XO_PKxFb*tJ1LlEXhylZyK$;|$1{ZksgEZIwWa^WK$ zceWdks1fJ8ldAaH?#-g>xM-wmQ}qQ_AD*%Nx|4`165YysWHX2T6J^E7{{;=6r&Fq()lxqfl+l7; z_KW~?cOS`~ks!{F6$akPb{zxg5N%ef$rnQ&+uni&_OMdpq)9P2TFD7Ua~UMDzx}`< zp^aF-<*Zo>?x_kSMz4Bnba7Hpou%JL)Eb|%iSZe{lG1}l4nMUqsHwaT9HjbMb7dEh zxuTDi!4wC73g9r%rWody1x6G0-bPy?Q!an4MhKjjT5aJ$V|R zdW9-eW#7r8N=>1Z76<0_-#}A_c_N^;OI#0PB!V(ev7gj4|13p_j+#Ws4Kt;{`fTtl zu43E<*Q$*$)XD;>(M$Q80FUNzS~^Qa!n|m|$f@NC(c-MB%O)wIyyx(ia?wA}mep%t zWV8G=h0%lfpT^u*Xt*z{{#X}sKdN(;DV*m(+!LdW81oz{Oc&Ex&y#~Ev^CH`B>v}YzNS}bm{v&BBzdLct2rxoG zGkK>2#_KfQ&=u44*HJ~y4*n4$bHHMJ_nwj$RGrd=rWarl9Y+Va{`5CX{<3^d#@uLG zms}Y!I-)d^KahouC8j!xe_%awPd>$j%R}nI&l}5-i1y5t>#l=)W6Ki*4r$&LgB`N7 zM7RYO7V}z@VSTpnzM#}#`e53sXD=&GD1XuQm%Sf-%slgJrPR|o5aju_?Qu&^MhyD+ zS}h4c1uRdnRnF~QOWj}Esm(-&g}W%d*_|^_u3^@ndBfkdvx&;|hnRK(KfB%uP0xr7 zE!-&Si$*vTMN-I@AYuA6$e-6?VWJ7KJIH4Csko1!$Ck7={K>~=8^H(`^X~To2t-u` zmdmgU0F9ZHr4sPT&{(hnC0lSz0ch`DJ_tHYfCIQkt~|iUR6iJNwK)4ZKJOK)0gO65 zwKm8=SzI_s5);&WZ)==(9W6m1o3Vh!b9P55fLKUs=QnkPq4$7&dT#cn1)mvYaL{-~ zK(y~f_H0;H7(MRjcX+xgtD7KJDk%BoJU4Zmm>XP2=Fj^0jj3jFjsJ+hiS!}bO;#ko z5N93%C+i&wdO@vzhYiP@P3yJQ1gi`*dTHzurw70z;Xn-zg{FdG`d@#PMlTs8ZE`=3 zWNkNGc)-QX$RxtJ3>lZjnD)TwOp&x+fYA z3WY1liuP{D`~Gv{cBJW>%wuW^BL{HnNfCTuPa$l_yS!?Nql7o7HTruBE@;RxZ%$^V z^EkVrK_Uh*=8Y~yL$&sUb z=3nW4){Pqsl8LQ?oO<~{Q5BI)t4;(@h8w6)_v1EHXNT9-k!e*TR?gJ8pcuTR&Ie_q?ePpbt$Q(av6#X_7-_e*>VC%}OJ z7>RMmpI^_TbVt>$*l)x*E?MIOQI!Iiif$TYsmJiV+-yH5HMHTym?)Z7KR}h&ZKGO^ zEdM$yf!@UtD$Bm0-5OEAUSkRNMjA-i^&MuXZi?rJl7MhVkmY;!?^O+^5;^2yaKz4g z3J@`(s8bI6ZEva1E#uc6v+5hP_wTjjLb>7sv)b@%0=s z@%`&Vb%bT*r?pGWQMl|)s9_a6&+GQLCzB*_aQWc)=|*|NO%$0Ch1a|zumyDJ*qL4_ zIcijoX(@zSHi+RU#@-~v$X*Nzj$;km;odhN^PaC4V8z24#bgSE&b6c*NDQaP6W9zJ z*ulI!Wux%XmO^!O+Am%05+mDgZ5tGTvlzkcP$llpb>rQ6^xx_5ffEKWd{S}fR=E#h z6c5SpQHrs*8Vsa=?bzm1m><&LG`)sr`mT1?c5E#GTS6JKtXCw_W)Op}DycWu_1_$9 z*gH@eg)VKLa;pSTu5txnkjrDTTwW0e1*yR2J^G%NN(K&K(~Q`5rPbW;5v{eakVy$; zzV{#6toQbXvb4bQQM5~x>^O;Zqy10=bY=P9?J0QNcAP4mp1^vSyLUT!nE5NwhxuX_ z`rU$n=CU=Yvef|!H;@BFj=s(`y-4hN`xlxZobJ` z;q?XA4ie9g);WjM77L$`QBi(MRl63(*^u{`f-E4k+!=plnu4gvf)Loil7L2F4uz5hCTw@*OsslpoevNDvYVinMjt_wCV zk1nz8hu*Pd1i|Pu8prHsXf7vPmg@*tjQ+1pAPo*m*dI6%PqsnJW6W#EWoG~=>r-Z% zT^h~!^;`N)BSgYBC0=8UJ!V}~sir`buKPSZ(%e6qr4W`Q&e&Msrnsqt5{@r-TAQt*MLkBQ<|t==I0V8!Sv)kQOxiijXw z8l~o9OP1#B*Vb!G`a%n3Uk=nFmA?uFz;(FZ%C=pevpAs)l(YKQiP$ay=@mj`h#Etv z;~-E`oS~TcVSZJup|nCO30|{X)G%?RP&CXNi~xw5&ZBv(^ZbC*YgYl zT=N*G@9KqLD|x{r#dN?=4fLDD0UHit&NaK%t}Qo_`$nMXP}KF>scRfX9C#_^T4ETK zuL{dJ&hNIU!j|`gR92|I*?HYrQ&zOm+a6s|)Cz74Z$miSUpBrs2Ao&~C zpK0=Fa6vf!J5w6F9#qksS;EDE1m`SqfY1PX^hL_;?l?VvCG2230O+R|_lpr;_a!N5ippOGn^1Ok z?8VKLW^lhiunI1BX5KOmRwj_`Jo|Nc_t)ZTu2ZW{U=1Gr8==Hy;u~-JqCaG?nUi&2 zmOMBI7u{jI@ZKs?InB*~AyFqgb$)7)9=J0d6_96W#cu5J97h{w&Op4+gURM4BF%vQgYFygI zR=6@jr`wC{4h%ImNP$|_sAMkzQ-y3$#qOb~>qEOV@APgvYS)cwt zFY~$+xtJ=eWkTa4ulJC{At7ofx8R#<=@QjnWO)#F6q*8$bh9ObH7}BPZo;h~b^ zWB!=Z+?v=1Ba#m7N%sj*$(l z0#se30CP9Pv}5u)z%^j|1}A9 z3Pi~flSSy(Z&9sM#O#gs3XI)4BCmwW9eKzytd~7({=t2}itBZQ za3wxugt*&jtE&!#{0+1ZS7v4t*6(LH@4_c^^l*%@fulg&1)^kXz3^y@F@;~lQ4lC+ z6-Q!&&<}Jq4NNlt=cjcEQ4EFioreIdUcIDfpejCHu}i^Ke&gxSZ5q?$L(lGUnpE|o zyH}WjkL5bDV5=%>5oUO&g>2v~eU1~|lf`n-%&_dmco;5S&GIb7-0v*fZOt!x=Pqhbk9{PMRRIOn$o9m{Hs z6!bt46=TbUfAv$!pMniaFzD(E&pk))t3eUlj&3bW_P3fUwTJFB*>7iQ9qt2or;-{% z=rk;<$St0YUrX~*Rt&C@r_#Yt+KH3lU-<9)kY_1~8Q$?x6H+|B_x-8gEv}Sm%VFq& z6u)lFNdmiPAN>{yoj%pLq?5?Ge6yE1>PCfJ^K!!}^@BItW+<=j(>3x;P~80l{v+iW z{q|UAkx!{Mvwfsm_Q}R(9GuAqC_FC;MANyblWa>T4 zlO4H|HITA|)DNZ-65jih z7-(m8zW)JxeG>(ElJsd$EkDx+63LUrq+Fm1@b&e*BPWKsQXW~3N-QhN0_sHhIH|r; z!(0BcV2w;CE;6t(<(=f=6r&$cfDBKr;r|->ps9pC2JFSihP`i?7df+yg1>9j;6Sd@ zgSa(j7WH4ij0qOB9f;m-u8SE-AMBH&)Un49eXD2^>NCkWg?Ld1oVw#c1R37Ng1`-91{lbJ+Qd_ z7Dqa`VCM*8GUo^D3Ep@M!Rc^RJ~PF!NM-1{p&RZ2Ceo8p^W{#oLteU_Z1C48;+(@y z3B<7N2=^T-VoKPhzx!xz@=e@TM@JGl9|u;AR-~9`u$=PTvcczET_}9(<3HjlEyt!R zvP{x&(J2%6c%SN%IpJVO5+eM$Ry5{rc1@+_0<;-IM{@ZIQqRvm4J^_->qxwqmRA#- z!}x7pn59-ia%L6gI`vLO!Kq0)qF*((jB2uenV_N(L=#HIOcczZ-aJ7zb7h7w5hqcb zXVWs}2}J$2K1T4m?dunKO0#M3r3cH~e~YofzLxIM1u_7$8xE0txysGFHTeGcs5mmj z|7J)N8O5ykW=^P3jxFIo8*!vpcl)~}E{Nc(18sD08!jCK|H$;H69xk9JhOnc0R{tX z3o@lzkq#au^6bFMNJ9VveynF4T6VA;c~3~X{&XCq1?VYx`%&osZal3 zNgUYgC-q65@vV;cIzv^}h z#s3uJwHfPz@nGs!H=G)rYtahRtwzQgNeX8MvT1!#7v_iRP;2WGE43Q!++ofQ!VO3* zQ%Y)INkuvm(9~V_p80SKxGofuaPZsQPB)b4xtDD?zJ}@&9iDu06AG5_nkwBH+SytZ zMu4qLgy`wqzb>qJey80{T~MoZY>X(}L;%GqsW$OGL&GR_b8P#zLN0BSlz7rcbuF0| z*z-S59uXvu&gxQCWfa7x%o#m?lG!4q<#Ow>Whdx@t*ce}m9G|#h}+?CL(e2g_Vd}6 z=+9+2jd4Y5<1qlOee8bnFW@IRcUHlTT%U=}56vJaeMev>qTM;Y=I#xQuU-@3(q8=Sz|{eUa@n06_aK)G8s6xIq$LJ$L&XpejpJx{jE-Fy!IGOaFCd`LI1m7=#KD;pelk@3S7(6xT&=@(1Hg&++g$#E7HM5^~G+P}IoX(0wKR_x_Sc zr3lbtw&%fP(~8PyYLZ%*!P%v5>sO@eGG7?$=t!;u795Rb&yMvS>{ zjJSSBrS5y7?2VIFXgRP7KePqJsc_Yz(!AM`#f#2JkvH1oSESfi;AXGvW?Rp^t zC6lLO9nfsl4BAH}<(6M6h9k{x?nwdi->5X?ShZOg>m5D9vgN7kpZUv{nH}E67Vn9h zMwH=T?0g}aMI#9f*LiU%=z| zep&^Qd@$!z)51_U;FRtOWS)8tWX^sQ?vpo|{JvAEg2+}q{jmm=q%w?j9eGI|nF?FM z07e;5C!RmGCQv~Bu@~`3`SV6DiVjjS&FH}T&yL6uBrb~bM_D*!P?Q}NvnpmA%PwlbQC@+&gCA`>Gd zF34fQlw9i{MotL2v1GM%ukpxmy!v)@44*fUX-x^2E$H0tym$pW(&$P`%UVP;bfq?gbB!=unM_icCGC5n0 zD3X(K&&I7Q$`w{BFJ=>OO>pu;wV|4H6GM~NFEL}eP#DW8U1w$&2NJN$6~}pZzEuuQ z{zMmVF6?SN57{KB9~Jp?k{>HuX;g~fQVXPevItRTh4BZO-}sC4z7QPX#_r4vo%aou zP0fYF4SXOQguCVE`XH^CarcTLLi9B`CO_m=ldXN8HIraWBI;E_|GSVFT49U%$pBOP zS#*?u1HEfTPL0~C%tgL`0vCYrjCu#-s?l?w7(IT%OEY1R25KW1q+?iwNp0$Qj2MtW zsYU8qBDb^tVqtjTKs=p0(Jpf27no-NNjTXb8LPkF#hWI^F3P12a(bpmi10%igEZi- zOR>?nt^FhmV=UQ-L2k4coPl-K`A6*@tvsao*gh#$z%COWolcV7x z7Oac0RN=k7D>Ltm*^O>5AdXy4t*k;jus+x%^j(>9>q2UM3td9)!_?T1bY%R4xF_*u8JWlvbS5EYz`zLo_%(DF zGlRe$>;5X_j2I=_mdp0Um@*ToeD+SzqIuvI=z)Yy%KT+uT}`3chB z10ujOi_x*O1p{tAK|~ki7bANiKxJ!nZ@TSglS$o9l&!Hi)5^VMJ$cl0hQ`@u!4R3D z`1we7S4VBVV0wnetW3wF^609jF<`7bGvx+>d3dggCs6uQ(G|@MtF!sua7;WEm2-p$ zvGyW`qU1uCn*E>(KG|#;Ydo*{C6%B52TmpHAej1yzW#j<(>Rc%xNg#lqpb_h20im# z*^pSU3JPdHOkTLR#h{*91?U0e6p!DtpHCu8xA1y-pZ`m=f2p1*n}$3VP0>7+cf|uQ zs&QBV$}+Q>{UBe@RfX)Pu$DlWkIg6YWYH@-lN0dGI*Sc2fKNikiZy?VzsooppJQcZsHk}Gisq7t9$m@C_3 z&NoFQN+f?M$dv|Y#DW4azlni~t>QK~X>8U9c0egI+^#YS8ohQQH@|9@%}<%Ii5aji z?w#GA>L?QVH_AfhJES!!{|j=u<`c#ZDk|}ik0)VI-#R(}gMlsJXXugU(GMB4gO^tY zZ%z4^d!b(E?nc=G#vnC0N;*Qcg~~HThI%*^MJSOI zU(C$qM|G3K`h9TGMXygvT&$FMJXD`0L1MNj;VhTZZY95K5=rN?2FLFckVHRy4vLep z0M*PAoJTziJ%46-Nw7m^Svp()xhoz%<1Rz2^GI1YyB7vQF2L7qUEPG~vya^p`AjD0H zU()e#q!*SG4re`n$D@Jg7*!L!U!M$8rIA~5^A(81cBhEyS0`PYU|QNC`tSr#C#-B3aKCemdv>;=JT4cDQrx`}iQFxo>{ z_}duyH@Q_yE-L{k{XL_dOv>%??^fb9wkuVi{PkRJfpz>UigMp{Hv)^4=RpQ#wnnpc zgYz3;@;bc*dWH}M@Iy5v%x~R1Y98yJ22iQn7_n}9FoWsDycyn+-`}v+JkdT_oDNvYVteI3J3^V16~@2fC?de z=)7wPPvM*}hpxm=joOsK$+m!7jjysybcMPV9lrET80qVkfiofv}@fI5{oXH z1`%V_yM6IsTK&_2mgSn;%B{p1Id#P!ry5%jX@q^2p8Bq}I2#C_RNb_8yH3cWZDt|r z-;~R96ST5gK+i}~z=Y)g4S>MP@Y8g;S zc`-@6Ia-~gx_^eK6FUjV_pS6KsR9s^T0wBcJ+`9s(TpMbfb-dsEG=6x6=LUwGz{@s zVy+Mw75{jCxd{_*Xer$FL=YK+$%vnk=94~c#=TPo(K)*65WR^~y8{k%mS69}mB04? zF97}MAb(~hjqWv^!LqGp3_kn%=TncZi#o?+f#wdepyN=8?V8#r_@4-EZoJ|X`$8u- z$-h^QNIsiDl%i4~(_uY_VLk*aMxf1C0C-u>%zMXdX-%yl#1>qwSbv2hOkgg1$qZmM zby9T1mBNWS7@qQLjaIj_yGU8bv>y82+EQS>e$}CTS?^BP>C5O$Lb*)n8S=Hh+(3W> zsyW^YRnbUol1q916htKJqy$(AtfA0d@?l`QgvEi^ZlmsU-nS9^Cwgv{pX5rt$#a9@ z@XnXi<5Xsk?5uzyY_+f*H@OaMsTv8rSf0l-^kT)H#OS%0Jv5kCyG{D_Op$2xdyXC27z=?|@@($Nw6j=(x;)?* z;EiT-RZVKI=j<#-nN72i`Xn?5=N!3Vr_N0-p1letHgT+g%HMtq*TL_D0W&-9yKW>ssk;yhHZup ze>sZGGjObm5P|z<4}IZ4slu!X(;Yc9Z(*px5I7ZiOv`Q21aZVbI4J zwbw&7`Vsn@;Q(yvd{&MIv-xNtOwNKO`Q8^7wLqjuR z0{_?v(G+z$v(A?oRF=A2a{h#c4%;R(+nU!iST7UlZ5Q44j()Z0uLJZc-qhFRx7M;0 zBh2?YZ16nRapK}C%1q=={7B)ByVtN|69myu3g&W`8i;<(?1n z5Zg$mB1e$dEz)4_nsrC1_a>!hh(y-MB1?z=^{Ycbso1m8KZorR%+7}a>7!jk2lEx0 zS*L*dTi1qUQ?ww`3;p>Jd0lNdrnRXq>LqH;-wl}_XWBE-MD$EK1AYkdD*Lf4;**_M z_OIfFQ01Z^L8v@@>6r-XhZZB0?k4RHX6BH)`R&6k{>lo~I{!fcP%E#!O`8*FBwTA3 z1NaOqF+R6UXn;1;B)TVFK`ZpMlhGoV>WyLJ|Ncl z@v8WNos{AU%4H){GsYGhI20y$FNvKUAD^T)2qU*>TS=WP@WLvJj4w=>uxffO)NaoUlct@2!4}v_6|c3 zqDo)HO)JLz%Re#=z=7gEz?5WuzZj_2C|T5<8JzvNnb>QY@gkv;>Nz`A6W_OkHj|PB zA4g72Y5hZtU`Fy}u{rfpk|V6D2qW44yNB@0;_M!Q&z9UGHWzV=nzJXTfnaR;!Tctb zxH{2d1I1S6dP;TxS+ga#$5`;CtB^=mD;D$t_ZhRsENUXTqN_evwT_%9u z+GVpQ&n4~A(yU^l&iM~eAn8SSg|EkIJ^-$cK4@ivG3jor>%QreqnZeh@|d$xc2m*r z`xBxIi-i-pm%o73+*G?y8&Hbz-A{K@YP+1w2igS3Z;-O2L@^+&p6qJHo(IG{rk!#c zMM7i5B2t6uZ-Z-EYBF&Wo6^HTDQM$)*;U7H<8Fva%VCfm@eWj6d7Uj*68zQCrg$jo z-B~tkt&7$ko);Mb@YHuuA_I|J_tCh)giccfI;1{$cUQ8(7lcr~t^D9uXUhglEk>v?=0EgA$#mz}BD7 z`f2H0eq9NhbPurO7|xImU%?teXMhO(cpsuj!WBOBtzW%xoI4ZU7_YNC25 zLY21TtWy!yX9+mAUI!dtZl0*i{J<&M2klV5JPVJru$RT7uD4G^<|Ncd%j-DY(3zXT z%;>2H;2Ile?%cTq6(N5kHw`@h*bZN}HrToxgsKt#{)83oe+{a<2fzgyI%0# ztETmOi=^0PUqJ^IEt5yY)#D%89fQh(R)m^>ZNpbnRIx(s`QMkCLNHpxZRWT>Ey_{= zMPJq@$)S^PVamwZw4|9RGG}u6=}6J#ADAG}-8` z*(nI+^R!8EW-D`Q)*-QmMV!P2^Vf#1$Sqo%$s(y2H2}*dJ>EjjwsnM()q!#d`;pmX zJVdEwr{_6^0pfqVO0)ot?1{?#$bH5zV__1koDK&smOJt1#@&+RI+UM#sX@U*F zFdt2ln{$tzkzGECB0YMuq#fsww5qkmHrb>r$P?cpNDHKdv@tCxFx0Iw`64ATjGy?3 znl2Gdv9Rt@T}WimYD>XNEGO%g6~7V5c<4lWNVr`VL1#*_Tl;WU}2lH zI*Fc+e)1J0($25E_8CzZOz(^9YdN2UuD%&zrr&y!C07&n{_CwU?ET2$4T!Ow7p6;J zZXDMLIM{VfuMi{eR7*YztMWmzS!ySbW>%SX#nP=88=_8WX;UoOEKOtcerWlsb9Gzx z0%j^WOW+3Ic^{i5O5$s;)*^pq^03%s6=-UU%cW-rO`B8cA{u^3fb^2u2|Ysx^=W~` zJfpRAh44R(gb+o>*+7r!zXcotF(}!shv5cYDs$oP z$&yeEc=Ydf$afnZ8DVQ(p#&QcdIGD+o3ItT9>{Pggd`f@sTh#zZ82QVLr zuF{zb@i4ha+v73B05!VYF`WkTU(3VnjX#Acb4ar0u{l5DKF;3xbhvaLMX7VOEfed~ zh>`)KxFq%jldE8}AENrs7~LVY;Ojbx6<@Y}Ln~5HHN}XPu!#d9OIt^CE_ChWv8YzY zg~Z!CXS>Ofb`j!b5LSdQFIeYgbY6&$t!8SvFKZ3|LX7|`{fr@&0=9usj$HRXTe#(T5q&CY2(@JfQk>)`N56T9%8 z^Kbjgs3~&d=dL`?z-gW%kTKK2bm}hlr4%7`RLn`T@&4$G2`L=FAG_a1$QvWF1A+Ke zfSj+;R}v+m=ywl{tqBs?ePe9T@V=|}-W_fx{uj~VAHV)6hB9$8&c@r{I z{an4Xp@)7u|MYfQYEIXH?9c&oUz~>mxUwwBi@m=TI-9Q?XniNWnFH%idg-7i9hMLh zk!tT5YX`%-b3uR;!b~{K z(P7tF{ALVl==h3mlbe#-Kew&0caRV(SU==|vLMI$5$Xb-_qSbcXJKwId~E$w57M>{ zZFnY}!T?EFMQ|=1XoaWm+3LCTz9#u8RnT!ZI3ad=eqk@e{%Lb!z^>?2bH7N28tMag5rkJV4A~UE zJ>}AS&V@N)K#pd0MJIS&gkAylNKt=tTI`5{cL2L*@bJ(+V8P0!-cg1q6IxrVX7C+U zPJxBjdeD$3Nmf}xNb<|0a%H0#V3=(NO@PEcd*aK3|W%P@v$kmu6BA&d0LUYeQ zg%#24bhi8B({!bjx2L~6!vTe8Clq&q(qiI>W0F0Kc*!;{#I?xpj8P*1PY1H6n6Lv2 zBkqA9J&dKpdIYGk`hNJD8n`W~_o~(FbXIIkoUx*i`E%5Q3o*^Szlhz~Jf;9JOYP4h z=^meznl}f6I`|?S%iSN!cIYA%ll<<40}xPZHHwg9rmX~p%Xc$3nnsC)D90*0X{IY6 zwRFE8wCvn_M6N*tGlLBpcg_q&+WR5IGUr8H6xLusksjZqapM6NoDc(U6jENy?4+e6 z3q^EYqxvy-4yMFteUeQYt8GMt+xpmMLTb+NW!lF%Ph}%F9U+@;RG-$^O>PY+|DmdO zY@BI}aso?mNN%qG>V%Y`{r7yp0t^9ibrgM|4E)k5&mMGsjS>P?ew7UU69AvZZQG52 zn6E5p@zwI^Z~}^{ZrUVRMi(g;Yr}bkBT^46`Ki@;p%Hg2vCY>1LK*(WVXab03YB$~ zEqmQ`y0Mie$^%IAZ{n9$R*Is~^J;mOL0B(h;nIC=>YmFE`=)~2Zyn1DI!L-Jdz-L- zVo38=zK%r{WF+QM`)|TCchqta`MZ~rgg*Rz)o=*@BGjeP;ukX(l~GMdONs%Hdqj%R zT-QFU5P{f$GKKo>hvR6Lr zsV~_CftK3hJ;dEF+E`JJTnEAr>d1JYH6`X*7QsC-l%BSlX{N~)a~IY9;g^`*@`o$W zjl-p{i&9ukbnNTC+b-0C%@3rJW1=p!g~@`O;IDdMjJb9HOR5fZwB(^@s4y4yk>v^+ zCd4URfvFU#Vk9_(*g{AT%iw12kt5IrU2|uy<=4~vtK2{9!X0}Lhy@vM!UP7?bjVLR z@B#+?fKVFaFjsM{L=_PchKYIMzpI{6$6t0841?4mPA1vcjX(Cl$o-T$Lts2;!2<7{ zkWN#)*S3hL=q z|KCa!9b>ylGc>?%vXoyVo@VZ}F&*J(t_GhZ{aqVH%K|n^dTfvZ2u?uMh}t#?i1f0n zjbuhG1^WLmr+A8?99n;6e~9!ti@6U&3*{u$?0U;C25ZA@4$v~{3A2h8#5UdAi#yV1 z+@CFIacjxWcl;+yxezW7UG_~q0Z^jbG2Z=HW0<}UNCxi%Pc zc3VsEa%FsE=C7N*2erHr?Xg#PHFLUBn8A2M5Niki=x?*XJ(NB<%i&1ZUeFp+5^Q)z z$hnfYB!-BQhe1MCCIE9@O2n)Iy-aqcWC?ck`9geWe3$JObnBNt#I2V~NKgtsFCmZ52zf;$U!>L)a z2#pv|uI-csNC`Kzo|13q`432}06cmmgsu7XT{0t{z_+^{xt$4N{HwNL&x~^VUc&Ga znrq!BoX?CbF&3+hL7Z7g!4==!F zZ4VN7(y9#v$5e9fV;5qp47+ya3P(tHD2gO}0H(}0VMtn?c`6EJxO><= z#YB>^WuB(OsRJk?vh-TrZ46S=HR=`z_pz7i_E*3b6x+4I z>o{trSA3EN%#A742SkZm7ta~7@SWS4B5TR1RETckg$&S8no~X zxNkEmO#iHjW!7KP-R50f{_B>~4iJ#g?nK+KkT5+yU`03tiawWFRufz6;#Fv6Nk2d( zimQ5Cotd$rJ7t*Vbl{qHFzMk2RE~pg`afuWq?v|7EcvmF#knVG(lv!Oa$D~CkxbfD z_%ZjI&W)iMuZms71jfm1&nbki0jM9p1e?YaBTRMw53W@}62ZgxNXGvZ%(6>dJGU)Vsj3UgPOWBskJsVF#^3p$x_>_phU}r zl}=FX8f#w@F32Ik=W}W6W~i<%tEr1EgG_xc9(R4 zH^xnznkv8hTLb4It6kS^3t3wRi(RfT2oqMyU$UXF+Z z|8q`B1{imdaI`+D`*@ZPW<{=~W~*rS2piykCn{Q3qc+sblRPUVBvl>j)?q=;G6`Eg zrY6>=9B02#e#G+rKs6gfALlG-+1PQ}et6|yT@oYt&kZN%7Y7W3lq9qckEz39X@l`pvYkp>27Vj96D%`pkP+t03 zq7MJ&y|i+I@c>MJ%02*MK*kpMrF~}s%*b9`lQ{bRZ?Gn2i%PH3T&X9+cR?P+OsejM zH6^y&0O!J<@Ta&2|71*>jPrb)dxK<=6$TS0hcUV`?pnGT-X)=tv|%Xe*?~i2ORgj% z+i;0DQS!o>`ASq8TZd`$Enn1|EwR$wEa&=FuHqDpOH|54K0!$Gi1y#JzE=3VCy}(s zbX4@;8ZKcro+k6K+!Wh0KihU8n7ONmEO7sccHLoMNwn6Ts-JoFmL)??Bum$SzLZZ` z*UHi77c_n_fLgmCory3_0y}}4Q~+p1;`NKb%YGtIcYIZV1fVzEn~mY!u{7Y*C}Jdpx~ z*`Ukjz|tv!fCQ5$XEKIoCuk#$fqU6{=MuT=nyy?+g43H7$Pp76-_Cuk(PyS+sDhDIp!59fsX(Y3ju+ZXFEN;o7lBdX^UVooh#A5;9iRZ^XUw5 z({NZVCRfZP3=Ew>#9ra<2};z>SyHm>=RQ5C=1Z{#WJ2e(<9j6qUoC6oHDkL>s%BeR zeYCP9{eUU);F<*qq`&eJ3vQq`^AhvDL1}J8U`&4lgjvYKNYVJ8W4f(UATfIDt(f}(x#3T=(iJGDq6jx=S`?(z3-HMkFbEOI4 zwty&S@uIRAix!RsuPL>A{lQ3+N4eZ34jtKUrX4R(V8KODVgB(fF|I}bOH!3eUDXcY zfjFej(BdYtbC#;bVeaoFZn}(dx5nC;X}h~ho<>8(;lObn*w_-5Tc&(vkLvis^r+xf zYv6mXpTZM^=#=f7WNfQ z>M>^tfhnBo9wfIXaXp0u5L$zCJCW~2OX(E$`{)v@K9UaN4#Pc=;l(aFK1fCI36Mwa3OzC}0g%Q%hvB)0qDk5=5DOy>=k0~#7aF)t z_!pM0@}ovip*&rFbVmoaY(Ao27uoC_@mdFMTJv^#;8sH5STU{#P~^y6z#{RF7nZSa zaI&XU0=KFMcmZwUEr=w~o3#hMT|0UR2CEwkPZ-@xBqZSdI})rzF@vK6fr*j;dhQn< zvaAk31zhz^pv$O&N`5p>;3xD^@JxFy_V|yBj52l5C9oX3Ib~m4D#S~HSo{+r3yxGI z=PfD;Yv>Y|l89Qp4Q;fnIN&Vn`E;yyyl>*o2rdMOv68-c+C=)2&kL|g9%W(w4Yinc9e7Q+PN=sziCJ^Mj@ek1e~HVe zSmg-tWWn~Z;Yl}}sO!rRsKA;-K82Y*4xP9|9KrSJ~7+4NZ`Z=4FoL`A-q^)}hQn z;V|;{gX|VYK3g|wkXHQ0w@`k76mbwa6vkyoeQVdHl0p zDogW4R^@<>U{#-io19EX-#^nFvWhFJf>9o=;Nb^lJNnJlbw>ab&~MeO;EsYV@OyaS z#ka8i_%$-XHsaSCN=2sjeq*|e>Tg#G@OjnK2Q9`xc3dakmMdxOz^foL-`;^t$zaah zMI|5HvmDgQ#}AUTD?KLNYm;LFnh|eV85p?eLiJ^H*W=D{Orv_Z)xk8UJ7M!$23(Sc1K_aceEOsdN78mx0ipQp))0{vpQTYSozR|M+zK}2n~Lu; z!`(VVQ2vDc-K|Yv{@*|;Ka3J>gOHwOyootEyKOt=KG0u7jGs)wk>&VH|N6T0S*tDa z)dd>7191ny_LI?c1wEs{_X!+PF2c^_%0csU%?oXsk%y|K1vZMtXeXJt&IogJUOR$o zvc)sq%Lb^=&ZjKmV@rtnhgIWq-LrTHR>Lza?K1**5+d(mG+qFjcw2K3LYQeLDWIMC ztti$t1jg;mx80Va(=ohY-$AtlvoR2!m0AAS8ZEu>l$7vjE!c5r#w6 zB8S&(Q`)!gtWQcpOYnDDTbey5v0(Om4@#7j=(;Hi@O!Z351!Q$8%94|_NGA}!aGd> zf|l@AEVA%ONaA%gs^rQ-Uwg%EaM-_By9;KpDPOfwj{grJ7Hk?J%_^Irv5S)I+}vtQ*sSejPRdAfzeF>&28jeo2JIYT{!mDq zS3JV^baLYKz-ShZ9LUqzA-Gf-@tS7_9=|MezMNzPB65ein)LxKY92;?jj=jd&RbyK z+|FSded4W3e1CfIhBcGr%|1!o)0OpOuJigGh0(xdYzmn{^HVmECJ$6Zaf6 zE&5q5l@cM%UI36)Tqe?(ajyEYloRN}xc)C!I&7o1|4JwTC40jT;yA#KDmIOE-m;{z zH@!oOw6gZ63Mvb9!G4QCV$`^4c}?g;7KH-M{_zZ5l4>=hw79Jo=Kii3WhUvaRCr+G z=HI_i{*&%--P64@ypo7MbY`Em&su@oU#by)dx!gz+aPB2%$TntPTpdyz)ZYMxxzH7 zv>2L*+Vp==3eG-3T*Zuts6mb!QO6)&86*$+*~5bdK?XG2Slv#p9<7qZ1CPD&S{ucZ zg63jCVxUSr4YF39ZFE?_ks=%fmAm`=M|mXAw3plq1MvypO4Y@l2WFq~z97(uy#F9~P-RNeGWgqM&%Uss@w7O@NI6BiRF z`R;9QZo^q_35L}?bMkfo?SpihlP6?0E{6;ILsaF*vy&UQGIsfb;vUe07 z0yb*?c~#uFZLzpbKMr*N!w<5MxMJC3OiI*$qNX|xC7chzP>Bt?S@O9Oa#UKizLx!5 zPJSJxI~6Eg7-F00(di1Tp*|v|h%s5-J-UV9O1#B7f2u?MhF}FDc5WW~u2T4UFVE20 zs1(&T%#RPwwB3jhZg-kQ}?s7n%PW?GYaz%gLQHfBXIpiU6<5qn{Rw%~EmYhA-QvKOmvKfz<~>?#!@7 zN2GgHiZZI;k_#8$Lk$zohWS<_lFEn=+_Xg7?xt9K8dLT}H*zcV{!Wg2T0Eu|COWUV zc*`>GF_vbE)KIr}ir|5^yFzJb+E+S@tFYRsX>EtoB<1>RJ^S~Zk7D!{f+ezb87+*D zb6$EhsN zC2~cMYy4WenJ6YY{VY7)5Bv8Z+^YgpwwyH7BK;*Xddpibe|V%+2g7b)l!*$0IZ7>+ zM^hTwEeoxg3(2n|*8;1*T;ui)r!qV+w%~)wIc7PIFCXuvg%?$s_TOR|$2>3e-nY?Q zLUr8rw?-zU8nIRTgL$CZhV3UIR5SX&U*12{#rD@UlQR)B<5tbox$iJN64%4eZKZN{ z>Tp!hSFL9b&ib{CV0cfJb-)cYLlX=2IVL!$i*lVRQJ?Mh=;%cmp(c6ln$f@MR>=xi zy$}2^oQ*_pW-psaW)r3Vz61bPywx1tmP;eP<=s!~1Cz4q_VgGb;%)ybxZ8BC_ezGJ za5liyNCw?sE9Mc)z>PG1iHjdQ^J#qmcs9ulzGT7)EaUY7tjd~gms3m*nN104%?YTh zx2x@OHa3hJ9}I%zy~Xq}8hKH{bu|WFs3O?b20fmuKo5!pRRP^Ziqx`WRGqSS`Jf$L zzQ1E_0P4o$zw04-(gz&5!|)*f%giE#o*BkwPW)Z(695^^R(SMS<5)5Br@J?0W#r$* zLD;bD&yf&NxfC>NNrYd~lO3f<>pry=?r)!sQ~&0r=-6kSPi*5`L+kx<+fRX(N82`k zIpJnjxn`n5whqA!7+c-jFS!)pnl36u<@ZP}t{&|U%$U*-=~{!vNWu37U^%fGQf3p# zqf8zonF#e7U=MT3A>{Hhr=gVQ~kvj_DPnn?*^Bx8|}{WVox2* z7vHWL>9UuRxrex|u*vSlZF412htH=2Cw8QY3`~JuSRa<+bYW;`R)GxSP=<0 zT|^NV7*#h7I{2*E5Nl+`73e|f`jLQEwc14W<53)dk^Iik@S6^PFPX{_zGU@e;xbaa zfBrEoJPEdu=$~v%#k6ZxQd_v`&qqB#w*f=j#O;_{>;F@4V%V~xV(LO+YCh#QP94uj zAwl#~sgC@?d<3aE%A(b!>UHC^HjH;-oM;HP)HMgGHp7t0*T==qXNJ4iOzHHN$toY_0 zZ}S@nH${MLLFl#p*C7NbuP|aAc*~Cw-DbxFS%dy!_Lu;RJ0NP1j!sD6b>ore*-7_K zrQ$P;iX-Yy4E2zs>RK_2>lq_sKJEJrw@HRDKIzG&c%zo%N!6_+e==d-l;|PGJFHqp zm;2i8`xHfk((SO&RqNq@aSHB6 z83Q>d25vfcWQx4d4vzE)5s1n?deQ_F?6`S=fqhsV`%Kue+?Y}?d8R~(r#o_!B`sL2 z#i+^l6?yl+kfH{(u&;9}Rv4B`%? zAV#Ioktfyq9ekKL_qA&0!1HJgyzym{FWznK`^qHB1Ge)N0;wi)BpVZ zYE{Lj;3cHqhxiDq%591`I$5h-_&iEPm&Q$(m62b>)ZU=t!&fHY$d zUyk#9?KAA|0x!UAOwhYKDK%+aTmDiJ!>9kMZ@gu;i-P||%k~#(j7*iO zk=$T55tc10*MEU{@kcjzKQZFPqUK|){FAmKs0UF zPUv!3wL*d`d7fbW{(xcWhB3uYdoQFxT(6RtH zHmd`^ztRf=rT|}ZJbzdsb;9*E-Bx&Sl>*e;9Lkz+l$4VJV1>ZD*SGKL(I9gK+?yPG zTeTSbbZu2-$H!BTUg-X7mIFQ4v%B1OU8}=CrX)v}qu?oFvocVgiuB_KxQzGD%;&cs z@qh?2T$4BSAQ7zw z!r6KurTBsVt;2O#b*WD-7_*jrCO4|SLvhF__ni@b0{uuE^9>A|-!`d=jMF4*!+zj? zcn9G!=<|KptU%PRKAMtC%+0ma`%x9{=+3T95W~0~Wn`_Ze%2BJz@;*%Ib$YBud+T$ ziDnVY0E_#Cor{NsU995%*QSrVT@JI7Qdyl?K&=;&&~jP=&<)=vevye}=aJACc6I%_R-gg^iOL$q@y#F_qmp<46rFCczYt|5ef4PZTMN6&6q2MCresTB-Ct?jk!-Mlp!*82A?e@zWt<(r@LyV4vb0$r$ z?@|wqHbjNNPidZv=Q{}7uXks*bS^vHPr73lF>VS-slO+pv%hJd{T=07ValDD(8LrZDtoPRRi`9YpLz%jp1831HTM6AJzpT2xDZ)- z45IS`Gel|W08HUWPgh*}I3;h9;^z5ZejRK5En6>*%h`qrHGCMQ5I&uYna4ML*|+wK zBEAn~BTF<*qXkh(8L=khBTmp0Pbl1Z`OSWolB-$twcS*%VGLthiLl0Kd5sJEgA+&% zNuV3To=(V$;5#&$%(F((;U+VV8=$a7?N6%S+Sgl9A=iZc;EH;4RulWKrE@__sPmj5 z-ykt@s7lHU{0;0+=lJRru?-)yxqlWr##kl-7$*j3W-z6M2tMyF95&d8y^Why7m~uu9HvV<4(_Zf-Te$%>vjyY zy^gD8H5vxEhaU4)7&mu9?2VUvHe8FH;bTrhej(pBmPHP-C(oMDo!D!7XXg5;N_9Fg z47VJ^XW+gdA=rGmjkgO3=Jdq!;ek|H0iNZvAgh}owg<`MMp0iNQ2D4%>jintR=c9| zU<3AC|j|2M$P8@RwNA0cOJY})}yw7=E z!m&Zs2o%8=4J5?&Q7b8240{Yy`8~&fRrdsch>a}%nZu`t#1Ypm&qF)lyXE)>mHMWo zQoA6uid*uI;(0f9qe~X3J+(Bhc1emq#s&NcoC-!+uS1jU3j2wBT$*zzTVQF{%fOTu z^#zP+SwS>J4hVuyxAP8Z0ytKCz-`I9rG>#D^o1=CtXU_>`Sl2z>iJ|I!iXq{f~ zIMK|c|5E2xuV0|~&JU1giK&%i2WD-T|Gf1(4iSTAP9L`hceFk|;Ah6MtOW@1s<#_} zIct!ofTnt9f6(pD9`B*$_Z-#XL$XJWVjRzw*wB`bBeu>}Kgm}y8Gj91Y-eAg$4=|; zpJ~dfK;&=v=aEHUI;d@Qk3YW}6Q~XUhk|1j)Q`%;XOPTF0Ox~+`V#Z4g5&{Dp^&$I z#;Tto_bhiNk|!l4y(&s^1xC`sxz^nB+zp-A`@!`5H`r+Amq-y$tj@SJOq$aWz}o9k zmbC2=0(N%|3(?2~Iyu7AMF&j}>7DO&qD^~4hpG7JuivI;$sb;CO9G*q4IGkq_0Dcm zcKWKmQ`9T*THcq}9z@AGeJAy#8#8q{vbw+Ih3&CXZ%QqHj5FPq=>tKk_J(R{d^h_W zj6+vFP0_J>>%AP*WG7RnIrM_Y@1W!-9l!DN*Y^&#q;^TMuXwt9P3#17!Q*nQ>0$xnjm%9pFe zxk`U<>4pA;pwwvJf(f1qz*mpx`^ADM$4e|s5ft@U)7mhK&VHK6C1cqs)Y@cXcyn`( ztM6JwaJVWn8ubWCv4g+zB&IB)&y&o!Y&0KRz?jV-xM4(-?XV5SU$SEP-8u6jFY?oNuKl26b zX8ickFv5z(`76FkaHk$FLJy~NHK;hjSmzHD;zE?a`(@bqUqAt@v)qe}p@bX;D$fpf ztq7V4(wK$IC3mqh<@A_`(wVW6x}&&0LW?S84%Fvq>b&>mP^I7BKr2};!cN?PPYc1F zx=tS;$k%1WL)f>-81@{cu|DENG1N}F2ajQQ|Z!1jRWY!(J-=v zsdpP_GZNd&8aK6;#X}Td7@e}Ji{%mI<`)tFT>4>4Yb|Fl6*oY}=K|Bs<#g@#>Qh#+ zCM&DZqedtpkJi>yb~p4am;&yYJ*EwqDR0tA^ZIC^qjY9=h(|etyNKO`f8y%?^bDsn z=I<$FKB|Lxl1I)6!Tie1avHWXC(QS0vU}iud0e@ltRZD}N;vq3>!XsJ`Vg(^?QQ`{ z45O>?+F?ZTquN!HQ%il}=?<;)FCrWju~7pK1<}8&PsZiM{geo>P{V&eAlgd}gnIvc zHs7R!rUo%<1An#G_}Tp@AW?Zjc>%ZWb*2kL{$${veCi*>AGPqr*^;fMUdA>x{%c^c;fQjmK7*a_R;X`{D(OKbqxLoPMzXcyA_X4_b3 zFZs3b@N|~1-qp}2ft~(DtdxL@GXJ}6U{hY5ky%chAlwnd2q3NdIPW_fzk#6bRjd?Z zb7Af~qPt)s(x#O%$Dl4M%u)1$$KE!Kht+qRZok^_A`VBCIqnqocHR1&=w?G9I12#E zIa$Fg_!J^!P~X>5nWQ2CV^uH(aLsmpJASUvoWG*fS=A-jJrKooNV*)`%b&gfNt7Ej z3to0d!I}&h?u!W?7JyM2`o-ale1R5+js8JZOS>NJ$fD1VECx?Ey?|l8yUoskk3)D0 zKV{eC{-B((cpLVp`>oa#FP`%=RjlaNGLV06VF7%9q-Mt%YU8aHe{5d50zAqhbd_2< zxgmeTek={=zm5+Au#|Lbzr^eUP>PwH7e&b4<@X-d-}^d4lxDQSQh;|E%^UBqICu}m zW-Ggk-v|Z7IXu+fol5ngTn8)7j`M`Xdn|ZVP&d5JLwb$nsy%P>pPbmh!;+6?l^jh~ifib^q_SnhB=^+5hH1AwDe#0hw zbxFIEfd5T1u?|$l*YZgyPpy7Fz`U0_xkXVF&ig+XanLNGvvIxhIKpwaN;Yq7UKR4o z{PXxa70cFIwA|FB5|Yc=?NG|&i#b{_d3;UbyfVp1a`)&Fej=@UmaeP3qIeYHm6>(X zrQFkNVDsKYe-l%7HV~fbJF2brTSnZCt!T{ z{501*&K*v>+*1E)agG`w#-jJo>49~=*NTuhbFDX|T6^okvh8?8-~O&whzs!l+{(-) zaGy|swL+WU<+Ms$%>y>Y#So0}wI0Yp&bOsh?h(>eWPa?F?&=||r$GFy4;&K2gzqp? z3^SbLB5ypx(3!(BEb70(B$Wx39nsfpMtU#Fp=vX3av#j^F#a)13e?qs4-^%6+i$R{ zTXQMiFa5t}WB#-J~tpHeElsQ0Hs+FKRL_2&vnHTJwRIZGud^qd7#^#zh z$F2fi3al9(d9!j*fI)2b`P(UW9`FqwC+Utg*bvDd@HQgbypviV40uI+_w?&vu;ZnI zC7(g2g4WI@Kwk2>6Z&3Z1A?4G8nNC&EBs$}*4e+bB*@2XE%>-jIET;c=%m8x5?|y1 zgN+(c>;{>(`^cM7eGo6989f5&-u1I~ck$P`GJ@f#mVaX&l@Z|5S&^ z?eX|tNdOmOmqQ~>`|ePrdVP)j#MT#io6xXk6DuoExnq*`p(Ak+RSUVFnYYwYj~Jr# z1k8QLeX01W@y(}2RbN)B0eGO(wSR7kuQj6;6}R&m`cNkimWi&%hQUNok`I;EbiUo~T3`r)Gr`<1=@b0f_c*~aJR!U&2!f>P zFB@1`S7!6^>_7X_S0^i?)Z*@BTl+~yry)3;ltgcmgy7yNrmJG=&`et8(!Am2(R6A} z;Po-H>B3w%Sq48GM@C6n6p-uLMY4%@XaPN*02uYT`_&TAT82HZ%jQa$1N5#C7TGil zTi7%Aqqvxg&_Fkq_%7NwQYs` znBVImx};wgsfh9C#D0Z{mi&pY7ccJA&k7B|EYFxZy3xR9`Y3lyxR~maxLYkhAdS-&!82)TmZ8j z1$A1R^nh}nY#raw~OI)QPJwaA$Al%dw z7Qd-@s`78f7E^|Ym$ZD<>YY#HGqdz#1rSL~_8?n2^lzH0rA{`W(CNkH%H#>$ra= zngId=DrHOHy;=#|qXJ}R{Z-*OqAadB1@d`B9t6|S9Bp&FbRn9Pnbk^-Sq?@y`iy+D zs3MY)!Gxmq$Z4rrk`ce^dB-7)gKPv){4Q2m1P%Ha)GT<#O?+vo_4qGIbh%)hkY-&3 zuerpH1!c^MFWNvu$8Heb+xI{eUJ&0LX0IPKu>_}-b4}*OxF#L!d z)knP#QG6CtK5+5W6`%1z>39ZldlyoV=d5Bsw{!e^n5z z9#aEjC_?81Pr9TwX5Utn?KM#`MR+EKlX$0ZLTvhe(t}iEw9r$vYTS;%%);*<*u25Q z>2qd8hPHk9CEd?q0qJ!5#JLG^)?~nOK}VNZC`HeY>f_NF=sG9GVAXe_@1fBtdm*Qy zNF8ME_60uHe24M2k=XzF@=8-hx3t=lAid0jim4JaRt^J;^2FcAzn_4~sa10;6d|rF zq|mQ4Ems}sx}PHR_huEE#6%^J6}1?p(A3wJawxEkQ(AK)Wb?*;^_&n-#3AWi_ha?A zSw#cvlfbYXWYl>;ixTnY;O`%M>N#T4_6yui{+iakC^yn7>qO;oktM1EndB2UBOBhV zI{e&fax%R>N&f#x-wx1vLF7w4v_!x>^_FayMCg0O3HN02joIW*KmQexrgt0YG`L@0RlLGx+HSz0ca-=?hNuU!Xk&C%tIk)7H4s{ z-P{LccAxj7wl_)Y#zxdq@A5&M5ZK4y+a6S6xL0F$7AF_qHsh9@lbJ3)FLNO-vo8%+ zh%YbuaZ?Ua$dOK4($_EOfJj)A2HdMG{#C=vG4Fh4cXXL>hWqf1vz0l5GjE|=1N$#w zk#r@>KIM_@xi7UV6kb>OtnD_YF?VzyI$>h{vqGl`QKy&~`WqjOH}%|tTSJV^K(2b| zUY--K1>*A+%7qs~h4)U682Lv8Xti+i8VfB|91E~a*~dtS@?7n!6p&?lUiAdffhjW2DrK>LrL*I`qu z&B-5Y%czbLxmV*k_sHP)zjhY`)SZP1$fhZt2AtTioQN6J@sW6nKIm#*T2S%Dl+|Rg z9j%C0ZwYpZtHBwf^OU(%8B{$t-x?@;cYxVL#yb3It|?@Z-=9ctW(|~&bksdmoGeAS zn*eFx=W8cfuO1$tM$+@>a);pcTVsjcl}X}Yb9x@$yL_%enntfC9-B)ADZrU>3|`M7 zibmx^4)|8RynTsGngc1Ekpt}8YM&}$ZtdOJa<$h|CJcw(=-3%2u0G-`L>gK(%&Sljxqq$-|MPUb0CI>1&KxpU#YJ$PeRyCLsVX&?b=?4c7`|#iC zdF0~UYG6E=4wB>gL&NV8OKS7PK?rW8BZG>zWQI5i5k1RLupd$=UMv4URHCQJGv;*O|KFlwB_%cEl|Q{#v=%O`twPI;~k_5 z!Xbq^8crr=j2aS-c}*y{&FQf6P(@8i3PM-RrGW{KC@YFoKpJ?%$H zjatD_vRV3{IYtO8XDV;%TOKbqYvgiEV|j$`;@2O7Nj-X03Z}5(W97O#rQmBK?Zc%f znFp9bIsAFS!iY+$kE^B8yFQX3oR@4LL4!ij2Ms&Qh*kr*w}Mv=h|~FEsCE%-LYN?S zl5zj1r% z(e;spri2W6)>|L!Q;+g!L&;CaVbFuEyOI3&ZT@mOq z?R2BzNuL9@v!!VL#cW`X;$rRPCMpMKL|%4}#M-~uM@cbwP%q0SI0siL`JjNEXT^rhJ9t66l9!9G}ft}W@2b!j3USlX%B$*D1zKS%!y8r!a zL;X>KLf2|AeNwN07IVk7%??U1ZmB*=MTa>IA5l)TW0`c8m}5!K`x`Id6XoQ!5P8|+ zIB^s*XkqEcfkts>dTv4o*96ANTee@Snr10ckM~I}a{^t3MF8zQq;U7W_9?x)#J?;} zF$i;hZrroFr;?0{U$=jy)Z#l zEmCUJCT#n(1rky>!CYmT!l4-835N8f;bl7ZG#|@`TA}`G9X?B_6XajKwGUj3e2XQE z#N)(ROjJ2XG>Gd8Y87Z?ExBPPf4F-q4k5p!+qfoFVEV^cNqv^(dt6Y1RHc%);CG$o z%x7|pSoxDdb8z=;#SkSpRPk8LnVM^s3hc+1Z{PRg0K>h;81m>`|M7_C1JO7MpAl1S zbeEA>)DtmD{P-rp8r!kj0_`tw6;`f(6I)f#8NJkV0adfO8}@(wkIW38&Q>)X~|iD`E_o{go1cg?T*z?)Ml4i@81F?Q!H=gP@m{l z^Ga`>&VJb=xE!A(y6`N2_Dl?~lO!7g09l~KSec+UqXZySzpOM`(__}CY1HNZflz&e zO>BGdUSrWgY0(PiuCT5|yc?m4Z~#Zi`np$pdg6#|PNjZ_6pMxSkpuruQ3BbaL>9eo zoRCSu&zA#PbQ_~brcyO5&-ZQmD_v)Gxv7X9mK~Y|6EksSS;Oc|k}K&QjaX<`f6YE(rDqujy;pboZ8h^R)(il20RkVR zI*^@5<@7U^#Zu8_>JaakB^#pRL6t$e0M3(qO;3rsy8Zbr$CrAU^+~|bT)Uj_I6fmh zCvSf(nth5e2X)mc4!+T|ysv@@(9pCcYN(?VqonjOl$o{!7w-(VRGgVZhMm^uut0&n zco$LUvS|^wa7gRAf1+Q@N~9tBl|Pfb8!2|^nPJt=dmowpc54P5{{7rf4(wmxp9%BI zzGkSMy`2(48*Soqx@{~ap9~yn?No`_y%!Zo8AFF1os-GYD?Lu`f$eb!fpm3=-(9{> zLE+!=sTMBa>mV-c<%=-tD2jRpMSsmEAYJm_mF!TPrhRtDLCXZ(=zq*qUY1ORxzagd z(bq8Sci$KpmZlclltBPuK%KwD?ZGYhva!wQ1ImfT+JN<|)R;|H@DrIn#uc9Le@c%X zj3EuT*`o2e0O|oK@pL<*yU7h0?Xqg2+KpfnpDj#~w!fS>eD%M+GtHxgT33dwqkYd##y$<_==o{0>|8%|eIUUWJs)*gy@( zEkNjy&`;4?Drmu(&v{^$o;F;7a(fAf3I8fb7j^@j*T(*xcC|?hG)26y%vq}j!Yx{2 zJC6P80vd4jfMIm1VYWiR;8~p-3nIkB)l?b_ph^yq&{GA;86R>Bf|SuXOhQvB%dM~V zz=A1dZ$jIA;N>k1c;Hsyx2VN-r2U;iqVu)e?j(VYRH&Y0XuWV}zll#VZMqibS-HK&6*57Qj`RfzZtc?4kypJ9@pkL=-8BMsi#U0tHY1)X;3Q zL(laG9dY1xh*-(gZl2n@lic$zy?)AS!qC9RqZ>GQdvEw&b5n-}!wFi>-<^Tugw-q4SM{!(I70a70Qz9p4kfD?(mrkfpG-vi)`o<>3-aFUW<^^DT=o_!MpXr&GM67 z;8atuIAG*+b(rL2FQ{|4ivYHV9ua{Ow=+{o?UuZAQxblbg4+@pX`G)vW^cM zgo;?Vbo4XDdOMFo$|aBwoWSKkQzr<$0YL-C{|mvVRbsV#$>&w)Lh06w|0O~G$pACY zn`D4Xq*h%QWVe6cZ$M`c(%Z!9bEQ9Aq;Jb5){ZMc_G~F{7UotZUD_)4^&Ib(dXNQ% zIBd=%jn?~%7zG~WIqplE(@@rXQ_~Z_zAJEnzv)f)OIJ+_nj( zqlxN2Cd{m%Cw0gXX8esh3Bb3tjW5P6T1KXKqI@8B4?RSB!Y!?uWg*SqhAl1-3S(%r zL{u6cZPh2%eT;hH>$?~Z@?d0cTL}X~Ln>MS!bs^DtGTQ;Ch5uDd4oywQj$Knw1D!qKY@A3#!EYk0l1If>-ks|3V3E%eS zEMQ@~RVoA1*yV`%jSsT+La2S1^fLE6WJ@iF_n^80@8CmP(-*P=;<7KW?@;A+^cjrXyn|NG8ql}zH z9caiB7{P6$%g$FcObdTZO$F`@_u~qI0gdcet2LdY8z1}|a(GEcyKcFx>HjJ8L*_xy zg|m3&DxrE6oqqIh+`}j{El`G!BsMh6yM)xCd(_QKl@cY|@JVD4x1O0mbOs8=xVCBGDi0z$gNpp@|aQrg${W<7D}SE;`6Q5?Ia4(+C8*U?Dc zi_i4n_1((Cgb0Ze7ErY~n0l}r6kU#_S6KcQ<^K3yzP)1`_olmvy{GTdlCLSZ>-333PhF{?Nrp>obCYeoGk`g znS~jhGlr?~=b8Ilt_c&phMW>I2V1E36EgeN*-?#?R>#Lkyk(7mF?oGdi-SMh#S$>+ zvBbj;xuGgwIR`}I7h34W>*0p1!OXZEMsZkjdc@TqJY+Alcioe$Vxq*ELxZtiG*qTO zgM7IrDlaK{R(k8H$J`+xh1>6a*-y8Y%4T$Wn>Ji=p6$6HF#Z>_UrngbBjU#329*El zO5V}XKE({diJk&}g;fHkq%p7;Ht)(F98cVOn9ce**!*VD<^b_kZ(yuwiMdd=ucs|!D6^G8jJP1|*(#L~*Wc+xl<-qxE+ zrC{Y2pg{(k{e%Sy2i_=kLg#BbYL@Q&VS!G;SY;T=hvJi5Fw(&XXmQvl2h~}VeYwUA$YC&vnz*{Rpr1c{ zre#%g#62{7xa$3*ETq>=mrjQdCM%sd-Hw&D6p*kx0N*j*H5U}V-fQRpoG6N;B}?r~ zDF72*kvYQzdJIj;4;iM$Zdc?Bt5cDu!Vt-#pr}#$ygt)l&^9CC;)PZlQHtr&@n7xF zEt`<@VT0{SQ8?{2F(v#Rf^;qN{CUe2DB0cUJ=qA-fyU;^2-*xX)5I4<_GcIvA z9coEH=VUSi-Azf7i>A7_JCd^0VP5wA9#RQE~Q*6sEJq4UQ3$1P&+x> z&^RfW16A^67@&0vcoC(40J9LLLSzv6pi7gv85<*fw0}#nbJ+BPmCFX&ea(W1Xz9#6 zxpDWcbjG!^1^>>HHYcT5HF()WSaIA`&|$D$Ff?Yn1>Z{F}3^cAa;$8n4QbSp+(%X z?H%+4<=Plk#NMBXy4{;NXt>4jOn*d#Ac$zQ+20W;~JDYf@-5>tI(O zj^3+L-92Ac(M%yz^bK3%Bl5;lm+|V}FE?vOpF?Yy-OR33i&@$u z<--D;T~}*Hc%^+j-D~<$=DjtrXq3>7UN^Ul(s36YJWq~d49NCAUo95n3HfFzedob4 z$;`>K2k<7RP7yd45G&Qw?_nt}9FL4xzE%IIRzLEi^kS>`rBHQow#p06@_IdGb8@M8 z$83ho>#8v|mWr*<-4-X))m2`fbxa5K-+(wtH=h73>V#V;p(}~2vS>Ghu-+ZtT(i&u z{mc9Al3l`CLI!>eI#<(NORc||xahc^@`TaTva=NxW#6BH**T7+XaWv7#M!EOrM6D@ zPwnsI>bk58-fC2})bJv>Uy3-vU63_Suvtb<-wKh8R$UH3-~mH1DA#_>fBA1@<&8by zt}}orx#zgGU)!nzt5?ng?NXKf&Z_c4UiQoiSFB<~F(f%Orm+g~&wJ55fEt0`$l1;x zeZ9u8x549sNOOJh)X?oX=JsOVNV_d3>Z2;Iu-u+SkQ&W4G(UPG47dimROpku$6EKp zTK~JkEKB@$d{h9O6%_0%YMA^&5!4Z8i@>j{6P(74FCs6Tc{pnZS`M zPA8W1CF%qB<|uL$w6$aJv+tLcHfk1|rKCMnQ-e2!gH$rVs99>+gpF?>IC;1wrR*#I zCjwT62@;99!Wa^nA7?Wl9%XY4<{3X$j@iCAIg zMbD*mkMenp1>W+gYZCRV? zcr-oD`Ytv5zFG{#YX%q`h;|9?omae$C=*vgJ~J5`CBJn}8DT1~bCA5Zwb8^r04b)s zO?lo5l1v3}O(>c&L|Aqw=H1bM6QFELNg!;7E(51`#|UAoab*j;Phtz4m!$2EQ^-TPqnXI@NSWllRJ-ztA%rS2(XQ`0bL&*HIE z2d#3dk^axts_pf?+SGTPX3ToYrm6u3icfBr;3loapyETYOSkL3&O|T1{{Y#V`_ofv zi}#B|Z?U5ueN{Ty{qZzSSQ}Zg;9`JS3~r(jFnxwxgO#PEz*q@%0fgrL6X(t{4Doy> zN?>dJ9xvRZlJU*fo$|?BP+C99F)^C2mF0%Ac;o};e=Xt`nq3V)WGK!dn#azsFSl4l ztK2K`9$geNGAPPAVCnMp=o|CAR5wMlTQEt8bhgst@)Lb(a9j8Om-IwCF~du`5uaIF zoy#~1%oGE*^h`2@gvF-894iaOChynbF?+frR~7BDDy`+kLM1AIgp)eJW`}(WNR#52 zpFG_f$Z@}pa z!>UJ6c^?Qsu$2#f;!~4SG4yg6YP*mykc8iY$RA0txTE%DpgD}mJ9$YT(=;pSG+Y+r zSTio{QXvpp*bZ7xC~R8d;j$SdvHkq#ws5X2tJ0@qg88!AXx7id|qf$H*%dY+c5@{ z)zuKKxRASHyCo&-8w#<>nO2}yNwLjlhwP0h=d#v~%T`1}#)>X!@*p2-af{Y6)s&cs zjR)^U$AzihPpEd;zm=9$dHna;FQ zEK7$rr{-*Hu zA0DtTsP2{@r)>PBNFRX%X49yN`WQs}+wuH9E>G6NKtxMZ&2QZd!)`QYbmD}2!>*_p z@I+quzly#;*8)k%3UE^i^{DH4HeSx$)?yI#<$;?CV0 zIT&G$;HVfd%1f?EQ_ld*>?MRjJSZG+V@H)d>Xr?GQ(kc#8$AD_ot?7X|jP0m=7ILdY2)bx#`l_}G+UGsAw-imtxE>DK z?jV%ruVQ}_cAVjghzRXu%gQgrC4#fQ!@5{89XjJ+eg}SvNQ_3bIB(BaA3Z399YJK_ z6h^ZJN)ske)dmm5c&_wr9TnvvbO#n}_vO%v7;0;z>3y{guJm~B+-E86OI2OOwnX)u zxK-H~{8=WRBvK0I7-O;Js*{rC3Zm5aIcJJ~0$WdUQVY~L+4L5a+Gh^en^3~gdN#f! zfSx-O{|Ct0kcW{Ic55&WneS$CQD*NM5;BW<`Ca-6==cnLHcKk^x9hiNodVz4Fa@mp z_;Qn~3YvR`QMt$Kq1Xr5QYlu>q9L_~qam8q^>p@bj-Uistc#D8^TeI&=EN>@rVlc- zfebh&B=@zORq=Xc_>PxJDSMh*!TUn25Zx)(yrl!+^P9mDj`9v|sB2bhuBS@0w+ZC? zoRR&;wX900Aoj>}$keSpd)pW+XrppG&8S=;UyxhX-JyMC!h;c9-zDJ(j5VTwv#hH{ z-P;BpwiPv3wiJk!@FbDS5-ldlUB?o)9!DiG4|cg*dlTeFYB_7qFp2iEyGY=X#JE|? z;y1>(2CET@31+4%%dMdcPSTQe4mFD@xYecKTB%K+VX;E~v)A-=>$2^GkkFzb?Fyb{ zDVAJdt5)cq!(LNH-TYHTf4ysCiPHw6ayV3@51GhC{fCQyHFAT|kQ|QP5AikV#BA<8 z@!1AjSU$)k!N`KtOK}~5a9rFJp8~fWkxvEF-iH?k$QCWCZ#tBCK&MCMh|;KBFM&?> zu@-n1^w(Y9UBCl~Ti40FH*eG97bj3PEq=?Gy!|di($q@D0iCpcT1W)|m@xk{Pw@Cj zZK*N?I+_lYZ9v_(X1Kyn)Orhc(%R{+G|(GX0w)S=yk&$F6Bd6^41VS_#@H7%>ZaU) z-7!8^%$rriOfR!*SBcQ&E-&y*$7x&Yt?wnpv;1dQD1emhtqtb{PN$lIaJvSsgpto2 z_9*MEtd4R4(DK+o?@#iPmqc%BNBWtmmQEvBd{_rqC~_3Ff;ydQyH{AVsVo9{3I!Ut zneV35Vd%f>EXHH1)69_1VXnuv&S;1F)CU7ru%jh1U`Ya5%%bqeR<6*l!G#%tx^GSj z1uuolC)d{Y&4!5fBGU( zoH{9eAeyMp&!#m&*9SIQXkoP0SX6m`HReRH8*n2zh_(^AO9~XKM%Ue%$m%4XhI41G zwz%B(cv`lgQz(wa36&1$UqY?lCU!@%5q|x}2C~wLmIVImkTXxQkWl z&6gl~f(9cV>6EG3!Hk&A{MWi_|)8uLHk7 z4=s*Es$)MN#1h@uJ_8LHN!pA7({tB?a)6#RGB3Xf3-yHP>o{wRzCE9(Oq(^w8<1TT- zk=d*XB$P4zBIPZFc#JdNbRBs*sJn9wd98kXNbyf^*SIZ8f2-8#$0rvX{5-X!G3Wpl z+xAq|tal#AGmtB62L+*P%lTA|5a0-{7&?;PFS~?Jkjy^rb)t^2aR}EyC#oA8L=E@1 zCl2w$h`oOndK%zHw z2{nX2Ozp7e%|PrOT=P0&T-ry`bw1e&Aq=};GxMad1rtC2iZ91{G^KBXef_F(uap`W zHpGVNh=Xy7Im3=k&xoHg^#199R9`{2hA+j_Tn(nOAe;BMKD|`og{&J<|NiNy=WLi} zW6=9vJzRBYe2%py9Yk8;0Bh8o5y9G6=?b<%>s7^aVhNo_;vCIYN($7v3SP{!Qzgc2 zKJ>^o|A$T0$e;Tg?<+nT&cLeme3D`2D~>eaA~0pn$K#rXCmRX}kiVUc>U69n)gchi zX?BL=2LQfGV-diqKG&Z<;n6@?>RtO}0AcwmWg1=U!4L__1k+fH+#N&@#WTz=%;B@- zwF8H!+Oo}5np~P6BKfK}AoBc;MJ4R!lARF4#%n1SjPH)xp{tyEXP6;Mli~_!|C^vY zGkQVYO9`EqSs#`7iFAOKW)zqe6jbBSqR{{UWQU^!4#|u3)g4Uu*o}H*k z5zBpLhYK$Z8=-1Y5tM$Q0y7_LC4_CIb-gO$e^Wha(~?{0UF+}iDTz|z;iutS;(i&3 z=UT`izVRmg#X-$muUlc3QB+0qSu;$dOL_+y;3>^o^fhQWeM6BcT0Y3zE9ma3C?~g4 zfsea~j}x6%HQfc1nsl53~y1fOKa6@aJXPB!#mX zRDQAvbCl|Vz-Fq)f5tK5vJtDN`4q9RM;G#Grz-f5RxT=_!LoR~zV>;hUlvAt+paiW z?JJd%FCXx$!+&R!C;q?+-#KE1Jljc(6p3gp;oguH3Od$a;JFua%cZR3qYK8s;-WkV6%mza{yMy8e;(VV(y9uulm6CZ{A9h4cL7=_q9tQr z@NeFz@iu#BmCB_}Z?R+BYYd`UO1%NcfdWBBV3Q9mu)fETZ*UG%0aa^zJz#qiY+Y+%#m| zsXbIRa$ZT-H?$K9N%{1*8(a4>Zm5he@$k8w@kP~fOU_s6mQ zP$ec=X~ei};rI%T6_E``o6J5Z)#V!Gd(`!ugBZ z3Cf3-Xq7jgxVoH;z_qos92*eE8o6Ha+4Y)4(tZSfmtT6-r2#|@vV-)MIFTWIX>FAE zxahTiM_#z2{RT*Nv)}%sNfi$8s_GbRnc`!C>g>fG5$^IDSMY3in46Pu(`n@GljH|l zPY=j_9}OPeHo|Wc;JixZ&>604klYxt3ub;2yqpsspaAlRpm2SMKIyu^G3xNtVOqs+ z`h7ziw8?~#QL?-b3}SQw6tWA}h0$EPtp_zTh{Fj`p8F1LJT5HKpm*?tLgJ|M3X3g7 zh#{bsAe2MCXrA&uwhc_8N{w`ewxjc&dHI4IqZt7+q80)rw-%>Rw&kjBm&q0YI-`B&&fD*uA&U=59;phfooM3ax|3RT=4 zlz@nIpHrB2yzuuWoGl4}ZVbpXti+FgGdp2RzxndWIw2?k(6tv~x7He$1{Hx`p9~eh zgJr8J`aOX=g_>_)6`jV4pNw{pP0pKKuci_|_HPB|^hmMUXNto;9~iib(R-D#sZN(&EN{!We21A%Iixt%^X9(@jt@Wmg3wQg*N~D zb}-A+j)UilHCV@U{T7Y66UU?w zTH$59HO~N(!sC>qpkwa}tp`V4A2M@BXj##BRBTv$$9OBei^U`^XfLnT2>PUaO0*Is zLq@@?Pf?RYJxP&JCK02JBF{PZP3X)lcUPA(0cq3*6UJS#eRxseKnT@=f2L4>)Vt!h z%Bp2a$Dv(*K2XG1I&n3ibVLkcBS0e2?|`b@<fS*L1n>eo|6kO?u7fr>#wn(8KKHyYbxp>vQnBe)ak%b(cB zmyha4P}uQ}oqUT8>%I)5p2(}T7pj_N_h$Y2LLezPevkC+VKtl%(Ih<7okSn_F^<}7 zBJ3IUj){kIAm=C9%`(Uw#me*B%>)0PXf2-Fdium+CnLw}0<>d~6(J6Lo{N?-$ep_gQQa{>pID3KNF zNEc)>qT@j*Iu;|zm0$IGPqBJcsm(~2rDEI(L*B>Jos6@n7wb(qa%9^ezuB%MH+Bia zfKWq`wlUcI^empa0+KQ>?(cBdCyEP&KC%pw-zMBJtLjK!)>9-;%SVYB%f_Qy|hK< z`qUd{HF6^a?Js2qdxeWTo7@p7Q%0rE{gzg&MVh1S`#gA(gYMn{__4`4B?z>(`@|C7 zph>edjA*z1lpHRew4 z7pvv=gK%KpK>(EsTDFMlg_Df72@vyW_aWP3kO!&JQ7ufVqd~fP0(qSHN6m&1SJlc9 zzkl(+G{+HX4+g9=_||p2lYA?!ocx;^3LC}t4q_X~=C3XI^UrUijwJr;VLzJ>nCdim z=$%fVU-*JV4$w8#lV@Y?)RT}cmM~lV(iG!0kvAOiN^xp@Z7e5TT%hw&nzJtqKwMWl z1EQv)9i_0#ziF`wB?+a9oK$FwwY~9__=IF4qafu++~Poatk;P=HWy_~hig2TkpZy{ zy)B9XVJ(T*cpIT29-imul3J#U&*A0Q>>`R*<@a>$t{aieF8A?w5G3q= z9w?>ns(CNz0WQ=2Hpx;#%?ddT!)4O=Owq2pHI&zOY4vSxi36=#}_`gGis>c2z6MiEorgR zbJseVhzP&Em2gbcoXzUmsufITgQ%KU!Qk1&;5+xGCr&tVI2ET21EW#D1e3$1WG~dV zRoea%h7pP|gAT>l!N#V}U#$+ZOUERU;sr2E&~Dvds=Djq>aP zX#fIJG(Fwl2YIgKtt$&y56TW?6GPEWXDB|gvGP|JMY^xJb)xnRl7O!gj9veXo$54g zIUD?Wn$^n0HmoANi{tST!BBc$#0?POhuu!@3C?v~drSv72^S(fdGD$i8eu?OrHC~% z*YkD*+u8RnCFB^K%?D`)D=04&$tnoogus$QW&VYRmNP+0ox(jDs6Z(^WP(3N=1)YEv(yw|$nqIb}D~Y}!a92YLWFRwlE%Z6c8agCHbU z`cPwRsjq8f)LxC)5>80JFReUM$bFNiN;sIv82=$G=}KdAWZOswRmx1{rJ94!Dl2M6t0^k1HQ?U#C8Fr2^Tx4Wt=!~o_^W>9gt42L1qwHq^~3v0 zQKbJ844mF-#E}oVs2>2tI2ASjHPlk?xW1Q+Swk;mg&JL(!X%>p#U0qT6mz&w?)R^f1*xJ(!I-jo!1AiR+d(&Qr$FKzDjRtVJw)^%<( zTtnqFpszNb{K;i>D8pRwA~I!7o>5P;+Oy-f*h>hXu{RxkMs`AS2KK)9mOb@B{6BE{ zkk<$mgjI=U$*9~IZCg+d@`vI66V9&K(iI4wAVn3~sGc@;IBCjVo;DJ&cYfhPiX2j} zK!KfQxL;7&{|D}$4w>m5C@q(d@1XzWpm88eT$M*v;h??4`^t9@Uok3b|J~kX(;^1@ zc?ef8Zo`y!u=c+lc8F8~*U?cEA31qSc#3kIGG$=@t^KKt>7YrGe+Pw8E>ToLnLXsS zf$KLWsa5srF$1}8tK!LRfga?(RI zj5>=sWF#K?Wd^YWZji&@ zIic6E;Be}IOf=$)(Ck*ch(Cj^Od+!TDP|{eqp#jG5{=|C#@`Xt_P`H$NdlOYjb|xJ z4*H361ktkuH(*)glPtlmrs{!ZIOUGEIs1e8P+8*;SMXCj7*^0sKZcGw+1d!e)-|5( zdRsIA+%$0W5~%_9E`*di?wc+3xf5PXFF(Pu(64O=eHlUUQNQX zFfo1t_2LhQLs`DZ=T!~+ZXNa9yfk)+*Acj2bQe0b$N7j`{pWnqMM{qT!CaY=p-zaTyl>vn(tKjVQtPhG^d^HExyDnPb`0n_3L#=UJD)GSJJ)i- zPn>0|>aivm6;=Y%<*>-oUP*KTvCB5Gm4gZUodcjVCGs>%DP5LFj`VSY&|61`ikKeO zr~jG-ZoX)Qq84VFUPLGEAa6(FS|auhD)?uGBicRN=XmA4>E(uN@?8l$@da*Ue5JLM zt>KIUTtIQjq$O||(xY-U)MTZ4DXGnuVXT74u6(qH0kdzBb1^RcirB)d|E&PNW8?qz z`4skD{Lvo+wE?3%_P{PHHYWP3qHxvy)4jopn32MMDZh|}giZmlm3^Ys+TMFTf`xir z>yBP!mBo_E@v)hnH#wleiSa4qk+()KbC{C^E>N*aAkx@OoW(zjQwC{08uZfZ!au4X z$)bxithlE)mC%qQa7e3H(G9Q$Z;;Yog+0&sv$Yn8OkQm5v(DzRc`dB#4?c!U`h~@V z9}>|sS}g+pPU7jdFtQfpVa3e-SGV(noale}qBo&kNSHW!>`1t*Gn zJrcdI)wb2pMm+p-u<*S&G(Vs54#^sevl-DehHAGsZ(wMH2k$42(#EnNN$j1!7vkCy z^ue}Roe_3EQdx?8w1ocE|No*h@BmtkGTelC@~_dN;O%^Cpz#1mFh_#SH>0Pz>zN_N z*_&i`!m(uy-p{;akCXqDbCwOMCTa#w7BtPN;6_$z;d{M#cS^zVf++a{3YV68E$ zI6adpk$VvVI;*_>^i@F3d{+NTNlN-aZ_%t9C^F|WWpb|*}O~U$f_3oLpVq(x(8(Cx@j(r>d@8ecY zg;yO}c6ZA~UNoTJQW22OiIp$rHL?px&K;cQ(|07f7)VJlA~FNmc^*KmI${*uy}#eA zXfF4p?W#03lA|jAdpp1Cu&(3qOp`L|#3=;}g+lCJ2LWI-3knsGjs3YvWM${#&LbCi z6r{&j#kLG3#Ac$PEl7=vYobX3=O;ph9l7K2J&YjERDCcAZ}X?Jox(z($@oliGr~gH zm~+#ZUg=(#t5)dP-|~~VuZ;%^iH3W@$dWqBe+j-diL13sq<+h3F!Cgq3OBKlZVDt` z^b}2RX3K&@$qSb1oszN#$%6TbCZR{aeUa>vZn~sdz5_!sMp0sOKK1`YY&z23X7&5& zq&ZXvP}60iadRB<>gDQh+y4sY(#=Ae4+$n6Yi#*Fy2&@c96b~}5wkpIYoH0w819)t zrL;zO&sL0{?h#Z#YHHsw2A+rTY!Qb?8S`C`e>w4R9OgBAx%wE>7xdO^OYhF)DiEA6u^NLdY`}E0<|1ZBJ*5S_&Me*HwNjaNjoMCM z7G^!x1>e-dPVsf}9JuIFocUBbjDSCV3L*B5YF|8J@R(H@usismF9Xi6T$X+*mLBWJ zL%V(DsMRC{2^f=sYX`AyRPAxjK+Euho;jCeABBh9xfG`q@d0glV z|7w8dx6Q^T%Y5Bu5km5Wg~G|H8KU;UYOGmjS@MFn@x;Oi30L?>{dKk@Y&83*+3V5)$6Md58 zZ+tA9O%`l=dRO{H19ghe9G*_X8Ns12fuKjo2Dl@t{C~kGTVGWq8>-{TYHW$vK(bQ= zjs@~zaI5I0;4vPW`W@WJjSLv0~D z(h8l9OyN|&@k{W0?;w3ASnM}LoZ#F_xaOkD!x$RlQq~wX_KdD9w?!PUOJ-cAgR+2e zP`K=Rt-18inXL;frsY)j$qbe`t)Y?ZKp1mIQcwB~3Ko47v9M_ePLDHfM_ls58)tsa zk$@jh$pLl`xu>UPos@*wbf_Tn^O~ZXVjP4V7SC+puxIA@F5?i+W)?a(+>Rl+BkL5xyiCnj775f)qUKG+ZY z@?#q{e$crVLu#RfnK3#Yj<_Awj$^8UCx2iz%O-M7f3qlSBnT~v#YPzZI}7?WQo&_# zuQvtHHuN968ZBFS{5Yj+fq%1gPl{gK@VZb}`$PRaO!EfR*vO^)k!(o=P#6=4H8|ve zDVY7fAQ-*U1V1{CrcAt;g{GO;XRx$rY3&X5nl+f__|(Y>v{NrQgj)#L3lBe%<$?Oi zR%?AlfDO_?q|gP4J?dYqXE5f>D!P!BKj3(6mV4dj#{x7dF^(2x!8@;y8isuT5?&jr zWMbcmoz2#Mwk=x4MAyh_jLLN9>R$ENCPHQ?2siF*aUR^t*G7L}63`cKCkN`6MK1rl6=^$>EmQv3K^0^01gX3h^i$u1B>NO)-s3FgxU^lz_dHdMzKsm7)hcc%xqP+!pPq zR*pc=tE8?#$RF4u;TGZSZ>?NEGuI9L*^$)aTtRLotDM?aht0KNr0c=PYX>7N`R5zS zJ5CosimzZuRs*=ZF70bw0W$MAyRY+(DcCH?w$aqo{ca_B!Nd2%A5pFF^@4K|X^VVpny#I^UQQkDVjNrpED4)WJ#{eMs2BI(lA1<9d>PnEhMPi*Ik zBsFbhTD(i>mPS=Cbz zXyNKrPN)?LoMj~^(YRm1iq)8a#7<;wE$t;qcqiSPe5ZB+^;`Entd@i%yiZA|P`xcucdmf!n=y&{I#pbG$dil`a`HQiI{S!|D;8c3?G2Fcr z6e_W;2@yu{6UPpYucY_&zSY0+*E|OJ>NW|P4@I>mS5u5P!D4pTA?y&Dw<4+>fxY+y z80&|?Ta7Cy59aN~Ae76p13pjCoE1<_{AeJ44?t!HgUCJ#igL_<|AC>~Q(nBR!3K*U0Zx<3=r8^uLf_Lx`|VF7#HTt%aG-hUOSW88|IK-kleUbo z2Sh-9`Dl>&Uj>$3J`589IptEpr;m=dxrA0|lp2p+^Xrm@cP5(x1hoz-Dl+|}@d~n0 z<Qz01<}SfJ7o^o@s_J#j4RCVbgR7oJZBgS5_{D(2BR;n`1UpM%bS6$9 z(l=UTE)RxQ53%B;=8I0#_K4B-K&Nw+<^mB;kOyG8WLrW_^>l1gwb1xcvL4!FDe>GC z7E7u%`e*kCMY#_RVm;?UEvg(JiS)2|$VjGZs2j#?l?Hh*pnCSvrQ*dY4m!P2&xAdxq<)51BBH*dmq-T5$xO0v#ZIYBk{Lms`Zu3(v1AxzkF<|> zj>!tmlE8-RkjvXHlOciW$SAoD zTFh|WzuHd+o|xMycOg5#A^|wpHHeZnSgA4_%QDT7L^@AIl}G_;7@29Rtb;$FuNJ2- zv$Aj6O@Lz}P5&&3hCi)Gwz)_@UjSOu>7Ys|&cp&c*YG23^Vf+KhF2=*Fg30ozn120^=ow&P(u z8lLSn{_8yKx~NakC~h6eQvi9$+~6=YRP;akPbgWITiIjQ%&Lw7I=d?G!!}81E-{aG zjBs6-Wd%_vEPH|+hl_bFF%Db$_%{9sV)j+k)X^5xf|{k|S+N>FUXo**MS(SCb+~1) z;0gChY4%lCyq51((}{v&)?Z%A%hLN@-6Z8=*Ag?GM_3g1a-{>3o`Fo&`jjaqrn5#p+Fa6!n~aG zEdkYH-XypopkE*yWS4X?hDv;!!wX)85A7brFcEPhJ^&+>*VbU`D~DtT;W{(O#n_x_sIOn zewgZ?c*OVgPtDLy2;=4GcN7T}*-1tdyY}ZTBM*5`?!ZUKH?*FWJz?K_)U-6P^6Abk zb04(r0tzWyac;^4S&)AZAV@6%66er4En>!wvScchVu1^zfEYVWN+8z8ehJJ;CSamk zS5t0imNP;Zx@@-Pz3|{Guai$-bg~KLx@j@$6pfKQOYuUzno>p!v@rwvcRv-9{S56^ zpLtRa52wDmFCyP)-<{TPP)@+r>+{KcS>EQ9kg}p*sOWa-!P00|(HjwOPh>+L=6Woc zMIxD_{MC}dqCJX;kigk;joIh<&8+sg)g(#VIF-@`FOJwq_c3I3N39gdP<&96uT2u| zGm7^?7&%^iB_XD6<3+2e*6Xgb-VVU;8@cjc!GYX$;x!vwk62}X>*rmtf*7hFdpf+8 zF&gJ!AhO@u7-u)xfyj=`SY2=z_&Y#RAMZFMGjJ*Yz3n;a^RnVwh+iPL0&~qon#h!l zc#AG)Cv}KL!U5cdXyKt43i_=pP@c)uU^}eBO)2dOpp(TqwXs4X7zam5u&;E+4GYjL z(FGK!oC0$4y|Bn)LJdtS=|_O9YzI{2KK~h4sRz6#yq%Nk4fB(KVJm zpvYkf^q|0;zQK`D?ve{3<(Wcj1eH+Voqe$G2qn1U;xVyCX|Am{oje7!+SAAxhU~t> zwBIexoRVpyVHGUdO7@KKGSAx|KI^5cNo7>35$q**bZZ3H*oN>Q%@$Qf5{aO;-bq5C zhKl%uxMRcKmji0X^~4vy3x1WjyWe`jw@tZP&RYKf$r=Gna`5 zZRe&1rQ4Rj@Rwg#L7_DGF91bAy1%rKbX~kigiGKfcoY$yy7dMAsMO~Sm5V5snJQMc zU$vST9|z#}S6{bkube<(2QDhAPx7QY_(G}Q^isJ8eKb=&l&h|AU;Ow^Z-qO<(mQ#) zKmusJ5omx4mpoueP0JxKG2NN&=t5Wu`mFBoeOF`LO17a6G}aio-I3`~GWm@*T857c zHDWH9L2bM-aF@F=utYz9sYAucXJC%b&69z+^xeGT|N4V>gzQ^do3z6#R>HdWU&EmS zhTY~ST|Dh>*lg6kXyqR>RKN(jjv1ZFgTIQa#CO||PVJJi`G47Z?9N~Vn#Lrm5p|Y^ z1D9AgY!jy-BXAO(Pnp{FISbI7+?gcQPpwFu8PM+<&-*AFf0D)X#iTW;+Iu8{=C$uX-GBJ0U!jJo#A-Q=^+xExD z4Y-nc(Z(e;CJ|{kC31W~zt@m?#aK(Xy7rJe-QVtzUQ|cdDk06RGKOxM-~Pp@qcPI8 z8+!?eMz+_mI1I7-1Gk30Dr?>?=D>58$DHE{MA$dCf+zh6*D3lF^SfuGz&UO0hSUb~ zk(Vi)Yt6QX$#aa092HqMkRnre;f9oZ4H+py(e`bJ-*ZGr-a5~8s78)M%dP@Ix8-s9 z&SiC#$9gu41q$hZcn;(~2W2L{)RLdU&txgylygq|lg+hyUtbcJhvJK}7-SE)Cklff zA*;CIbjTANlRBV+tSeQhfV9Z6uyTXJEvc-6F+{R-^**7z@S@nVPd0wZQ;s<6OQJeH zHzZV~@?l{AMHs4h+~U_UtpB1hdL__tviE-GFB(b2O{`|r4Qhbsq-Kpf9HIg6+5s_T z+&b}9^xwJ(7T-jCe3<5+$!iWhwSORfLEQb}y(I_h|9YN=-1#gQ9!~KrNZ#5Wc zn*02&k>q)x@q3S{MHxo2p#}9_%~H*0An50KPg*`J_S5-wD7FpDv65Y=VT?GesI+0f zG2~+gRsl&HU8MoRM0y`w3~L)+(ZlE?Qf?N=lnymFutlIU8ej7H2lzu6k2&1EH+)icVD(z(;&qTX0b%GM$0(YW>hY2y17fsBFTKo7vgdGJFY+n# zE}1v8P>wB>n;4ea!t3F!TSbvIlkQ#)B`Krq>L4rQRe5IIfHaR2F7@xe%5Y*xw?xan z%uC7S0(yV2MLx;FahK$G$}R0WRp;E{hpbDIRDg|!gM%(Xr-*a&z!rzR7Q~V5+OtaV z(cBPe4R?pJ{p#1)Nxwoi>!64QV9WFn!iT1HBta0tAPmPnq6{{=@7>TKTk;k+VxUW0 zPaUlSMdZyv&>bXjS)lU$7pZC7RzPN~qvJ|i9fWP*Vr2jrO2rgZLOw{~j`zX%4_vf?;6TuvQ4VrUe3rvukD|8fSsqSth}4OI4W=RSUz z?#Q_KoN2+)G870j3HzDuZL&TAF88|u{0pS>uI<4U=og!~AmL6Q^cOdV5&|~Y87>r$ zcO9?~$w+fa9IW7CxgAY@=ap*lS_Xr~H1?*=`g$k0e)SPUt+sDcYq6_n=mef}Dj)e0 zp6%Ujy>d+K@eSY~Rw<~pVfsdcRbv5^_HUG_N;Ja3^8$_*h#B+7>NQN-&+)jHQ66|U@(fQ2s5WmHL;QnLh&3JdkLhcJMYeMd`DYj-IuJq&5tCdUE zw5y;|l`I@$BoZH?FO21O-GTc3;S|E}cCOK=Yx}#H-WQ-!yW4i6*5aL5(QPq9IkGD3 zcgcm3{M~viHa$1XW>Aln zm0E^Xvem&xvvqF|5>h)tf913~#(b48B;)xyijU{MD?P8vsB+3OsPBX@GDnn2U|d;; z=9zbYyE?9WD-7C&vfeEvV2#GFIbs%@)fK$1mbl?IMsLq|e{tmiV$R$YlaLZs-rzj# z(>JN^U<{H1iobM3>?(3ZR7Rx#3NHa8nxyh=M}koPb2iE$iFY|&c1oJ!Fo+V<;}40L zfpN^o!>dBr;J{;x&zKw1;MLdaA48eN`t7ZG^*$E2;Y-f%jwD%EQp`_NfXSxPe(W<) zQ(uJwC%x{?H#CwJI^xW*=c?PP&S6C$@lyXCL+zF-LXo$`!V(FBF0bLmuuP-2JJ|?< zM`}Ev=MUIkCRkpoc^_3W<`*DLhoYvQbuSyE&Iv5}j(>=#yF)r7hh))a=sf~0kuIN` zJ$@?a;*)J}?uM{zBbKo1U*8oC#o(sSE~8vK(W7@*OsxDL>WT?QlwaOijl<2cK}%&J z#oVC6-t6eq`S9osZk&w4$p;u+%Pd6kwANwaN2&8Mg2Ttqlx{H7I6}He*AI2(d3|)%v}sNP#6{<6Kg0n@=nzxA|JT- zEu{7+gnhH#EKzXq_{fLn#!r%e7xkZj*qa3z!sB4_@nJRU3^k1Y1Ct$`zqM+6!<2IM z;z|JuI{hCZsD3j*C`Iovf56f(5C$7<3Sg9>N2f1^j+<#)d!EK)DfXdGewJ{>_;s~uzy!Yt(<2VFjJ9{?Eb(jnAaU3&-u5=Mwd7Iri zFzpJOj>$w?U4KKTq?)5M)Y_ia<_F>dy-RZ(i`rSJQ&b~=U8>~>TnIZngxv{4_P1vzXWHO4bt2%xSSu=jL*F-4}tkYx3W4Sfg$?3Ss(JG_~ z6crt-Y}%rGo-w5{6O!SUx&RsAHIwtsK)k+SN`eWGMT5pMm;ts`0#ZqiKM<5bXyTo) zrdhT`(f>(s4*?svi^BLd$bF(U_MO#GZJfjgbMwyQ)vr^2i3$WMKj%V5o)yH!?61~3 z8t!8>Wp+TLb=kwWGpV=XMCPSD#b^8EjQ9U9HhlEl>o@ZcIKOtZOGmTAI<7IkmU`PzzerXV|Rs{emXuK=EW=OElNOKu1>|s zz5I`SvmzwUVTq!J=#axJRv&NSLr(i1F#B=&$~tfl$d9fGn`Otdt)(C4&{t+P<#u-K zhRFFx0|==9(epfoKfRW~Qj;DpWpShFeTKJZbSH2ptBK>Ws{hDG4~!~vYZ#ERY50_=94 zRe%Pxpf>@vU~kdjW=gBHyk6;*KO~{aDET?BfsmlmgYd6x;cyRRnY@gdir1cntRa(I zG?jV53VcxGuHb~^sOJD&92=AaoIhx$h{yxs4U!v0KKs|)?j%LU+r;ZU zE|F<^a5wB#{#5>Y1aRDl70@yV>cb-`e=AqbV@x=?^3uuQf#LZe+$y3Kw}=Z`w3Ae@ zwTP0#uq2nFhr3u`GiBf)4*3`a?avp?3nDOq^f-BNST6l@aTc>R+S&R6{HTdfjPJ+g zmNqOCo@uBBVqT{5w$>Nhc6?c9IP`CAP-Y1y!z&qNxe6^xZOeXBOmYf6N=IbosN$tu zhZEH+{NcaBo<#eu>NQPOYk6Qk_Wv0H%gxW5i`YS-u}I`qs2tl-DEehW zg7%wOC8(;Eea+JkETXmB2g~a0i6L++*xI|lgLlYyuJ%@^#YnXdSE?ItSe9A?=P6;? zt|X3dS8$OMIU~cB*Yj@{l-v|UU9?*C&^mizEOtHFH55~>28S6fP^mF39F5%Ov}+Sy zqQ^CiO$JeadwyT{ZM=U}G6_hJz=PF%nWR(%82iZ6l3kmlFvY$sB%zdj3>dgkjrFVp zUCcB0P|M$jY@ms~25l2s=#@gEU{h!f-^N_q<$Ik|h!XnUP#cD^&|mFIi8rHhyb1@2jO7XAag#SUx7fn%P5)J&^VNOy@ba~?-)o7^P}cck5)=+JtEx4&?? z-FIXWekNT}XVI<~WRiy2;Ry#DA~UizF86dv`=S$Ph8x2YpdJC6hJ9~`O_FmkV{7&k z5+d4K*|r7ODQ*Ah3cc_ZJ-~y0(dD#iuX#P;uX@#XH`1eTdX}cd$AjA655&#p)#Fe` zyId?~AVMv;v}bX&%s|2UVwiI7VRl14t|wH72^+g<9#j{K)& zEJ0>?>CR^hR@j0GTnZ{aWqjfY3iKTg6^R{YED_7I1JrlWtc!BHS6=~cn}(ZYr`@Pu zNMU~)kCKZL0%wd9{wPfWIk5q6pi~^b0vpl_11yBorrut&0->@$+xkvC0S|%umi1SKb>AjUpOO8o%_Al*B<;arQ-?>5iP5s?jo^+|+{ufolBgTe%^zeBUR8E&|4bv&uJfme-0xvy2zyOO9Sjw#YflmDG@@YWbWI8J$~E&L`& zXAGMlu=o^bG74DFO54uyry#mgF@D^rlAJhxR@tZB6&yvsKUGX3|8T6zK7GIZ1Cm6k z-Mh@~hMwd415EUCcIMjZ^pn46M=E``F;_jKgE5~61fI#wKi7C3E9_p%kn>b?Krn{n zXke_DO|+Y^Qh^Y;YzEuO?MLskg7)JCSq21rY)_1`t|k^28`T5PL7BNQa_99wWZ6TJ z4GgKg9F)z^F=@ikXBJzRMpeZ1ovV2uCyA3liGp!OEHS)P%MT;5cbH8Y;zHT$Tc{`x zZi;z#cX;V0uZXWd|8u+OI~pqG#)#|*X31yf-PbBAO&t$rDB`NHz-MVe9wYC-r*}uD zk`b%;$$uHbz5V*E}OO-v*8R|g!11{8TzCprf}|3sBH5f>Op2N zvg2%mluE=Kqu6qk3N=Q?-};J$bX zV(s-0Yw!=Q>m6bz0a0A4L(NrAuPw8*gZeuDaUN0bjn7_$Goo{{!#%468jg?b;X*F= zrF32I3Q@Ns?@?oM4vE^$%E2kdG}9G7_yg5P2IEC!S#n~Y{wU=B#c0WWyby%f~tJ4qwOT&()DA~L6P50 z7bHy4%=3*4SUMtLYposwWFieW*WR1aLwanEeAv0rh;U4be#!Q*858`0RyX0rNAUDL z!U8=PN3S%Mu9d?^&0!2EH9LChvO;-s_aq-e3{t%0z~W6Fm6WeWe>#Au8o}mD$vbl# z6bL-;Y33AW?QW|Aamb}%VkShh$&r%ze4YvAKQUY z<{$`KOd*l)Ttex6?DDsBI)QKyCBMs4g>IE+b%kF6_G8lYo+eo)#EioNYsluw(^sF^ z>?4Vyqx2JGg}*=~47Oh)kpe7fSTr|lKEOPLClwnlNrfQ}^&nunOv>#}aleisV6pI? z@D};DMkk@-L^}){=sDL9W$W~&i-?uMPJITYVG#masHz|f&m4niO#HJX4@G;?zql9?{)38niLnCggl zka?q8l);*e;IU0Cs_4ZLZ5N~I9Hb8W^y}K#qeYlZP`2RGCQdNhD6p81pfn>7ymiqh z{rUkq#w_*V;|Y39VZ_a_!W5{C`7tZYOFt1eRF9noub^F95XCjJ_DTIKk**e;k%b?C z1p}o0Rz3Fd&RYNQu>(?&vONES>tQ~OmJQvmJU6{IP zH@Zj3P!r848;Nk2B*^=m1HE{A6tTQ1!Ng4oFstgGKke!_^7{f$zuH?VtAt59@ZU*a zK~7Tj{Ri5IVg~&nnR}RRta%KJ;`wI~y8t9I{I&Xb(!8Vu@^j|NJxC+6-$$k@<^#u^ zY!*Ux?~K6|*GEezzoBYdKd*fK_o7`K1hTg~>!c)f^_r5O9Mpa@0BghQxA*3F!3woh z=$Gc4>#-b^98gkM!r*55K{8#Rf3W!`D@3&%L1-myI;z=M-j9M%fWDoeqF(dlxI+xB zY|mz=WdAH|(+VdPur2BSM!?=iW8gJK@u!qfz%uT>ynVsp-TqpY&bzn3 zSqTCW4+k8AJxXE$59$GI{|B|9hOKO8pyR(%emcKE*pOIn4)g22$JxkfZaIFf)f?_9 z<$L6k)?Ysq-Gp-bXWM5P1$Ltan0NGHf>m;8t@b3`X*_WB2WDkV1{GuS_wa<% z#$3+4LCf93jD0^k<3+h*R}|+-#~OXD9pq#IsSo7K3zqKnn7p4qR^G*?t4P5rjM;6y z&}2EQ+pQ(7McAn++4rFy^NpQiJ`AN+-><&W%1M&pfAHYsaLkC ziVCPC@Dp^{@O%om@FwHC*bsO#RK;XFLDlucCjdT|hxU~5W6B!-;dPa7?K@qc*83PQ zNtuX|nfRYpAAe0p)EGI83PYo=cd$OAzzz!;SxM!@r0kPy9DwuMM ztyFkyWxBAe3h%TMl{b2AC6Mz`?nPEM9Kin)6|pu&Xg#g7&fzi899^Sdvkkb~Q8vpSYi+0a1j@)(*cKcxL3CXOv2X;^ zs0sqWb>AAQH)$SU6tjbw7sRCik$0W$ z&ov^xiU>HOiGFqseDJvt1(lQlv2w3RqA31Wyl6$(@mbR}3-NOQBq!ihGK5~oZB}?h zamV*O5L?G|Y}75bsis&?(%CjQNY55jXnDjywc_w-h|-*+0@x?=t;o}A@)=bTx^jSC z+D^@|m#lqyR^mZ1FT03cNGXP(-6X$r1Q|m~# ztqs9?hMFdTg@_>Ii3Q(-II?X~BQB426uivR9=?0R!&?Y4*n8iI zEA9?shc~nrChdT-IQ=x#oZ~gA-d%`odE|RbT`uluyiPUvZxx&x9RE@bfd@r zL14A;#ZWQ3v56NLZJ*9#6j1zua(0)d__6hsRdK`Yb=|C|jD9?q#j7sme({7B)*e0k zSCGFuWqUetp16uVHpV_iSk|;4_P)XrjSL5?5*oN!5ToLm88~3*Ui&kazYEWt&199T zsKm7^@jJrOthrV6Qq-!`=^rN%W=QoTQy-7drzTTpRDNEQHlxZ8^jE6)t7b?%qLnGO z?}3M49slb)-jF)22S~4EjQkR6m&7R74MH@hEQ(GQJ+oGAGwAUq9iS&alS(1-Ke)Bl z)3p46k_yOUz79~Wb8k*mu7um%q!$~Id?Rh5z4fGkABV+Q-uyzXEoKGSqRZFHf7e6& z9@JW(u#we&RjXc6h0jG^9v-r{xg))!%)*}p{*by z7p^|?INy^dzfn3+X8uY_qK4KDufBcPI5|pFCwG&n9&0CV;DmC%#7Tmu!aqZ+>$Hqa z?7*~Vw4-cHe_>cZP5f9Z#rf4Ghzjw7kxvmeV(ss}eus3Vkgr5)-}@8%%j9ar0!>R9 zb_HgIgWo*@DpWq8GQXZxXSChdQ_!;om)4xWdSF~U8L)68lY_(&op?^(hDoXAWAqdD zEXl3cqux>Q#onbT;kq?zWEu`!AAN19OIZn;ybIJ5?#k^;EeS8NP_7QdGZhqO*Zuuh0iJ5Q}_e-R)!vyyORc+ZI0PP6L$ z|B^R!458^=74gEP;9P}YO*LB1+uDT%&qdZCYgil9G@y`H^K4E3pB`bo;l*4rV8%#Z6 z5E;!nSRfJXl0wi+%5gy{>zvK)1F&RT2k;|~te&<9P$bi8{MNvmYJ9?5@5YceczvW^ zpEQ|mTlbse{cj;gQHb{4zvUT()2$A6Z5iV6ddcqBqY`6Shl4od*urNS^lVQ0W%#3* z7u;eWJkg43FmV!tElPIuh_H7IPWaS{V`77w{JVTTK$A$A;XWbG)GUoq109)kgLYMZ zD029I{o6{QL7!GCuF%BzX7|Z7*Dm*;!&)tH8i6eBNiEw3>j~%$JNBw))>7B!dC-6y zKzPxY_NPRO+sDW3RL0#lO6kn;Okt>LPYh||`Js}oQ=(6)zB+N~F_SHDxS~RJsTbN)_^q5kVZ6WMmGGK=J^Jr1|g;NEN zWlA@aa(WuGas{a*4V44_SJM_istQB=$dte7dhWPD_12VAFkZu>9_qP4HxAW1Jtsv! zC*?OSOz&9#Q>iQkYdo}3j*dQp&?VYngS_wPA~x?+C+PLV7WsSu*45$#N1SyFEqhie zGPhjHpdN-wTBZrlGmJh$0neY4)B^bHpMfnJZM0uIFSLEOzB2*@w;pex<0H@YYghIWsfMS$ zCa!S0b^RQucPwpN!!jyRJ>L#odCui3v64)q%f|J=On_OGdW(k99|?v`n4=L5r0E+~ z9Y2pve12jF$2H}f7eg}K&~O(31M_x4c{6{E3LJt8?qlW@Rn>wVynDI-$Ce@6TCB`4 zN38Q%ae&ztjQ3HjVcLjgN91y82dz&DWq7|>i7~Cz>q}9P%T-W>c&<}eNyx?&{jm;1 z0OT~v9?e~84>Kvdu_VykxJo^^!IH62)nXj1zd{l(L*QQglA$|ir5~@{DF^ToMagbX zu_IJ>t?`bm`8KH>*T#*elDRO|x}TMm?>=OX8?`80$1h(qxF5xiV5n)896Zg99+QH= zox~3&_9P>)$bVtp7r1OsYs8aX&fw1E0LE7;X+1SjO7q?`A77*m43E8Z44aw;s;B051srO6R0D*kxF?SCKQ8-VTDhX8yc>5~g#D5N1}H zHMI4t6B$KcC>fAj06&B@%D6!6>?Y2OZ>Jm=CYPQs(*L$e!Jum&mAs_h9Bp%dxkpZ3 zN{;`)qO(|=RH%U{=y)P(7)dugb<9PxfY63};x?0tAM2(18l_km9a^z}0nP%%1HoE~ z%tS)HOV>@GVT7aKM#6aILKq`rbf>;D%j7KUC5G+mJZ9`keQezLh_>(-?D;*@=7Kv<*+ z#0+!xw?GOZ;DEr^%kj?B<{XqM>$x-wvfkV<; zJ?Su0=k{pxEvYHy<&b^1j65O^IlNPdeIcgoM^{X^41=b?FJ?Y2rc43!H1H;i=kJ^$2`q!+|z!4`j5?i1t1wnUtN`})8)wC{#2H%BblzlvQI_#|y!jpXP_R0k5Zw942w{DCvxI-am7jGyqaiJyYD8ut?D8F{=f8v9Nf7^@hm(HD zgz#g8e5jgD6bgEoe9nSu`5)qi?@x7Yyvjdx+wW7}TQ+TBGp}@M4kv589N1MK_u}-j zu4b-fGeO^vR!rebuUt*j%fSu4Pj}*cC;`Yd+Zj{Qui;s1b#bf^r1;)an-6G61qe>Y zj|nj+Kx-`=j!CzoS(F90%b~kSs4?$fuYs4?>v6GD$-iyBM=#-9(deI)EmW!@u^*u7 zn8)#80i`L@z^H@a>$XodcOQp<$9gY`N-=J{!Kjb7|DxW%2;Qd(uBxXZvXinwW*mA# zJk4(HhM_>yKMAjvDl&1nES~E^nL~K(59dl%z~;k!`X~%cuL`1XaJ(WmHCVkIk`PM$ zx6kUe6SYiJm~#9yzu37uBi1^HTp4# zb(S=wA5|r+rsz=@z%}h0RIcF8I@-5xF_BORciV4b<(w||m9ItU*!~G$kQqO<&30&M zba-DfekX!SilA0A8;~>hN={pTB9tt?@U&gyy&KRZ{PtTE7~YD<=qR8tZeWOv*z4bzM*?T)F=|0C}g+Y(*Qes3xKJMU3_pW-qC(ljC(FptN^jp z1<@goPr(G1o*m)r+aSM{W{~uj-zf|7;jVnLnfk< z<~h}|=?hf6;OEWIaF_rU^NVDXmhY{oaW%j>zV9wUAwth$AWX8Y9+HBC7TkXLcW8D*6OTSsPKE{9!-W)bxq+s*;*IBPvnP& z*>@l7^zy*L&G ztc3a5z-2fvQ*DkwBCuil@@$V0Y$za~8MjS|9B?%2l3K#EjQiWHks3J*WnuGVaYq%d zEG3fmdpSX~v9HJF?3g>;@X#!vudSym?J*jIvCrw{)|gL{g(b0e$SUWqVDo|HA_!uI zf-vwuCGy)Ni%)}Z-w)%&V+DH$9Yl@11M6jrz&FbsDQ}|YwTxvR zojI!#EWQAhR+p>d3R*1m$AZK(WMv>qyT+B%DwtG(37Af9VRT8Va%EI~o4cktJkW>X zAml1J+Z>0?Nx4x=Y8V>CSC@DCVuI1AsrFG|ISwtk293@y%rsXVkA0~C$Z>zpie1*^qNSwyJ53#YsoL<#0*9wWFSmT_ZRwq)>E zvV8Aa`&cU6v&#`VxWn3!N#&Hu-X7ej2xiC1BCS?>&4{mYZQ5A=+oF)q&2)v929_zX0nk&FNMiZ`WA_{oGPbQf4`2 z{T_-`$>n@)MBoM^67h44e>&30BoGc$pu9Tx{8~`6Q|*2i2_eiWrQoxOhc8cNjsA@dJHoo zcGYO`xO$zI#83U%Ir zN_*`rgwi1+7^KP+Oxo+p#QVHap`(Z1tGLXo5ysN~(%2YLbj(0LySaQ*rB||zZ+&@& ziBJr~XU>qGXe$t^lR*eb|6}PO03kj_NmIVIMC7)+@+ELNI6a)xmSAuvlfp8p-)ZZx zKAsK5Qw?8P`RjZbU5M?32=#W_agT~Wp^Ky*EpQDUY+_4s z<}_)6*W4q`xwsYQ*|(*`rlht(iPBP?v%(UBuqzjya?NGm0>*d10e}FRTBueg{hbS( z4OR?tI-- zlzK zXnB)-rgG|$(C=}uYXkZf<^ba^PnJ|xc8|7vwCqM3a`n>v`@2#rA{JS^pddlcw~*4>y?m z&tl{5e?WX2nw6^>z>ngvk6su&Wp>t8t@xXRlrS#&Yqo~bW)euNQ96G|mVu3WOj@|T<}b;+)5D7ahLIyUP`#BxXy%3w;%I$fi=Po`qU^#UKL)_ zC{szgpflLP(9&%A+aXSc@&@CUR1^3v1Dj3x6j+c%9);N^fkdMUqW&<+cOOyQRYe+qoa@vTa{|S zIVxX!FL1p!`%HK%39%dE8}eSdAiHfiu@B!gF*6iOgdx7ZuNAsg-jXoxO^S%4(;(2s z#v8T}f7<*u7?8_>0Wk#{%Lfy7En`K8#d-mXx0`c{cw;w&63N4eqbOSLoHbT5co0&+ zvl*HUj`Mo&M|4Ocp%?0sp$4cZbCsGKgC%n}aYCCrX*e%GIna+U2}ljpu!(uZpXwNR zD@<+0AMG5QH@G_N?=TijPx4+H-=8kZDR&Cwu9Td|L0o(9xadu+3tlcla}vf@fqral2JP2fTq*@c2s$>ww9#lZY3051utVvB`H1Wn)7qz9DP9OF|T zR_(`-N?yr+fq4|r%5sO+l*17B?46Cx8Rp+Gsuv+a?SpJ>+FwdN{7 z+_nDtFA?}I>rkND2vqstD@Gn&V+-Urp`<$%cqyFeHSLu3Z1DW3dv|nCeMeLUXY0W| z0tVP_JNcYX3tyS{o+CmP=VB>nu_Kfy5%!0_S+54@X0I^X2}xR&9gULd=SFVvdGVS8bM2EWpwYMwC%sIfsUX716>qo4P!FMKR9S0u zVexmZI;RsIUfQ=cVU+vsKF>5j1C&yy8HP&KciQ(JnciL-@~A@Q$$zRt8fa+oVW^!e zZ~%_Wm@;6_raM!N|1wXai254ky~IJ-%>qTJ_RR3@c z&fCnbc*o5-$n~_l^8SfO&s(;Y5wmuzaEoeFuduz!6taHdA_u2J-qW&(14XS_&*-Cp zcaZNqyJm&VPNGOpP0<_#Rl-V_W)ohKmB#(X+b>;aOFTsC5s;%MT7Ow{qBN`rAa)rb zH++l@*t_HW$>3SAH4|3k&V^*R>b=L(I1>R>=0No=<9heSg=#5D3Tdxn$V@t@UPTFU z?A?ukqq3V^jUhl90V;SeI3xB73afq~j-#)d!u4169d6@CsH3{%VUbCsp)~-|4Cnp6 zo?xuxUdzi;r;Ac%gmfg*3s@iOZO=twNg(+HFi&n8cD(gr{C+XdZB1}VTmzieq=BoW z^E_7?(ZCO#+%=cWC3X(TJ|mqXezxAO(>wY6A(LO}a!)vDX1=&21u1wRBYh`qbNVV# zvufC>JPO?D4;mRXrnl`VM68}q*XGcpC=uxoJ&>s@a%p*tk78KP=e@Yji$QgE?BI%K zKfYjUjMJ4-3X)Fh^As>3Yu7-e-b&BR*o}b!i7D98Of=~U`8_`M0hZY&0V}xE%qW5* z4l!(vJK2mFNUYU<%?iDj+Z7IY(65<3v9>aS-+aD+=_s+0i$g(~tI@+y-Y=9Okk%xE zTVY)_%iVB?e675;rkNb8#I5^+8KGaB0sJ+M;6jR?8~5@_dX|RS(1qH2f+J<0F;Thp ziKsv_+P1Lz2)9PmEKV@}0mZd}F6!PDGWR4#>NO z<_vrICjvlaxp~h2kopW?0LXB0d&qmh5H#E}P@ol!6NjXF^F3;jo~=hyk!fh>-t==m zOLXy2&brKjTAdqy!~>HdxZ(hLal3G_0%lvaXu$)0 zW~1#*8*)-4)WA+J$(!c~#^tGSQVM;Ih4d1&E2l;~~ zT+Tk89#L4Ntd|^6)zaMIKFq&|W<8xY)?YTk?f0lH^P#)iSt}7(W8fC5+E55EKO|`S z0mXqn^@Lx0Yb|%W+n-DXX@o=)<9J+Zv2xjY8^G8NSm77;3WRBWaYA^|^GfRG2BuR#|CUGiO-4(@d ze9H>JiI8(VFR)D9v}S&kh7?!rhX|(96P+0jsUiPgjc+dz(MV1m{$p|!^SOu3zN=Iv zw1#v)pA=i(OL>fIP;EuJM?2Owr7iUtxa+o!Uy4%C&b zeThVb*mHiqELCWQ`BsPiE8#+u3ey$ipX1L3(NlYHowfq=8_k6c3h1=d{gb3_E+uda zgGjidQLh)!*hlj*vj#xft=l5jS(-{5A9rn_MVDA})i(jcu>T<-E+(%a`ep+;jrK#B z&BbdyJfPNIn*E!i@!hv|1lz`9=_V=))y$H-i((P%b%#7jM)?P&59wlg`FB~1x@D-` zs30Y|0k1e$C%!adZx*RO;gOOLOOYh<*S}Bs5QxQhcJin!AcOfx`JK8s|N6h{~ zOkH$EKkbO@M-vl7KaUrzz5~&B{o$?eR_gJx^$c4xe%ASUwbsUwm|5svb#lF$6C_mV zSJZ=P8Zox`BaC!;T+wqAlKtaMwLQV?4tD!Jq?%_@lrU<;DVum|Z50ideLE2xDg_(K)1DSjIc zy_miXy9)4v9kF)^+|7so{#o87X$R?cavQTCWW0qdNbv+d{2b5^1|SRVpAu||=Vl!1 z%#emt7)rK7F{BB+RkePE%>`lUZDn+O^-4YvbGLInJc)kOT4>*0(XD}}qB6%&(m zVNfN;sLxU1BC3x?4|~ujCT8zqFSPNbyVHhawyRU+Bb_0=wIM~(h?E(g2s^dg`vYn| znZ^Z_e4$ec^2|r9noc#-JqSV(JmBCh{>$HSIKiur@yuaLC}dBdmiw`bVq?*d)wNFOFC4-B&f?F?KC?3*b$u1C(N93{ zbgST0+S#r)w6?8jii|mejprf9_<%pDfgfxVcJ2$-Tu>njEa^h}*>>8hKzZf243{RM z+5L+={fH35qWBqUIs8&hs`oufuHih_5+53B`wCd#?!Ni&`LaOtZ#mS?E6Kh~YYA zSVv-?4xf!)LM~5~=x*E28yJ&pLg~A9in>LO&J~iCXfZs*FHlFF(7vcT@lX@ol21$) z%2FPOdBxp??;b!HWgHwZU_h~CAc=)!a6P?C^`~b7d^en-lH|k>I&Bc`O3eKY?w{ep zw`Xny{jvw~*xdkMB+kAZ$G-+cXcNGmoX~H%Ny#W$%Tl)1s6Yd>7Dc1&{wuASSUq62 z6#r>K+65J^yCu zwWy~$mLbMM*F5Qz@&|#{Bwbp7DZp%gn$qJ{I~NgTO}_HwPD;DolErI9*KP`Ya4W9S z@&-r$=eLReMct~%OO+EP0?6ZUki?2^D3AQh1D7F&Q|lp_V`ZYPCu?FW9uM1Q7%owL zO8hsjfc1GMk;hOy-i%jxdJOATOwB~?K-iimL*(5nFZtd7LqYl}S3n!*za-4Vr*^@o zI)dM*)N$#fiUN?$hoqWwd+(V{wJ>(;kda@+;ey6)BoX`x_|`Nm3ZH^JFKWZ9{{Bc2 zPNcA^(8e^frpqSftZwCuW6fjRZ3tFc9jP*PVd~LW#t0)K+{mnwU#fJwAx7o)&iO%{)$k( z{Uy&8DW2~*=>V_hHuvfNRie5g9T*T=>p zh@(miJZi`j|9^A{^s+*Xf5>`W8)Sc?Sq*~U0mCzyD6~Bp67PhJ?uTFiPe8E0MC!9g zI61d>P=TpXTn0f@wgwa(wk)LeT#Hi(wnUP!*!WbzG4X*1!@@#qY=iz_-WF3WZpuZ< zX4;*~)-z5ff`Z&FgQul9tpa)kiY$dt4vXo4!RBhE@YV$ui+z7DYQGnS{*WzN z5EABSA3!J4#d)|j{9;pAjY-@( z4ec7+t@3+hn8MzGOh?83MmDCkvGFPXnqSP0xU6Z~hj#Tu`hAC|pQn1^mMtWRU!$1_ z4Fhk;bo!AwA@FZuHLrk^ol3{`!Q&CDFntzo*H0Jej^gN#;HFZQCZr(mrL@--!>L4a z^YoBn@P_B;m&M=|j2J#-oT6Wf_@X~)Mu`hK`UaU00x&#_z6}yZ#5|*D%cCBKK77*d zWh0L+0Iva8tI5d17pB}>)b6+E#k_Z@C86io@B-2PqpEcK(tU(SyO zX?vHRj%Tl0((=1D<@XNo95A7~8iIb2rp!M2*Sd^(&h-3LpeTaxr^(+p-eP=zGnZhR zI?~I@0s!fpj5Np3wNxu5!+M#FsB)Eh3lzf8t&W#!G~2lf3c0RDMdA52E1thd-8hgm zz75Cn%XX;zpF?okViXFtw#Sv<(BNk0=>>;g-{^t{GJ*TI$(bS;OQ9aNLvt>|YM|=_ z5y@~Lkm**jGO%#a`Ac^YtA&nbkQc%>k7OFg8k%lUZ)c=&(p6~Vm6utEo@yJZjF~Uj zgZ=O&H&0!>!-3m&@O8**6-V^!|MOyR;$IO7*sOi!V$dU%#^T@yZ+9$mSm??ii16=1%qk2Lp)Fn0yO@25`K`WC>UXU~r7nuihlKa=!b z7D&(NcV{69nbGS*G5e-lKeIrzn-g(E601^bxt&~0?e)~obQ;XbA$#y*u}V-BNF!QJ zpm7dpK`W9rLYXi8DsW^oll1-noF&n-O!PNs8Ym#K)PSIvD4!*o>?gKQNbyH9_skY34uSlzV=;$inz2|watP?S z-ABlCj`aht*bi2AQef)*^iEb!YsqOrQ47fZ!!~yj!Ue$Z=jx@$%bF!*yM}Mh`SKpD zjjLiT-pb6~WXM%6fYccV-AvhSVoglEhqk0PsqOA-O*t&yWg&Q%I)5Tx2p9u}jY`EC z_{gDRT&o=tQ6asuJcmLC=YtXxP)&OUg^h51yEX3odzk-V=%Xe$*;4yRZFyMB`xK$9 zY&77%&)MjtHbgjmpa%bp7mK__&t6exGK9f(g#Vb4)_d{C+3s|&ypTQ(K$U5&)stP< z3)dbQ+Bz!!?UNdBYjl&d{$@uNXDF5V$AYhZ9l?33CiD^I^32wcmYpwWA7u<#=$&|e z0|doMhl>n7Cx_e7kz+<^InoPoQU=48^VP^Ub0W2FoLFv>tjZCK4vng996*C7i+8sD z{Y!{(H=GpXr3YbGoLk;!eIP#_UlQriHK;q6u^BVh3y{4QrIeHacmy<)7dT&+jGvB% zI|uUg7o#vcZULd496Vz^7pGfWCepw;FVMD(LLy;0mv}_+9-A(%1gnS9T95*s%bXah zl}zSU{o`B#qhuoaU(F)=A-PKnNIK|UozsnqELBhSFLK*&w}J3uhnEhQ%PWNQp5x(f zk~G^_#$A2Fb08n)_}HW>Ko7O=1t!0up-%XV0L}Eg&HbXZMq5}U+FG261_D5kJsY)| z=wlGJmB>1q-M-zx>?Rx@|F%ro<{_bbQ;Qd-SH#iVBD^QqL|`Azo2XrnCP9$+?m9{&ei!W;T2| zR2-)VY)5_urK*sfGjyH%i=@l{RkH|klZryMPRQMAnST=n@qG2Lwrai{C5buCk(`wc z`!lTU+#W>D1+J)js+&g+29XTjJHq^gal8>ws538Kf9zb5U9tBeg}{e0>!};&MW-&C zg1o}DUXflZDqqQ{+*7LM)3y`iy;;V{qDV^M6+Fi;|@?p_~joNVui z^_N`lQEtz#G>@NsYV;0;+T76PFVDfV2ZXE(eaihn>dEgCe<2u(NFuO1_iBPlz4kWj zcgBvDrm6e^i%HQ6Jm!#qE<6f<|)ancAu}b6uDM_`rrc zIjeww%NXi!b0Pw_v}6fh8``~CM_m(P`(+z+%;;bHD4?R>R4wdjdJJkD8eI11ynyBKikJ>p8tOh=$@tO@?(eMpF{eTl zVCDLZe$_u{pjZ^hOAxJ@G}Mm@yu^@tZlV0r0SYf65Ab4ve$y6TGTL^M9BPa&$b?!c zZ@%s~-`GXfcyxn5K(EGA3_U)ogg9;TgpN02#20V4DYJAq7kg61 z%5#dGoENO`8akfBrBzWIZ&}qD1?hpZ2UP`JtZD}x9@WhFrkTI^#R!yN^aS8tPu&C9 zSSdIy-H)f3=f7!sTS;JSCjJ-y-y!_OdtvF-v~VfNP_lm<4sN;l-wZuF2wL3u9lasD zIj|LA5Pru?g?9iHynmd)E_L9YnWt!fNiS^b3u7b|bNF^_Lt36JCT6GNBVW{E3H^BU zOrxmzkJ-SLb-Wgr1{VZ@7w$#`4A)5;;wa3CIgBatMn)k48u?x|y!{-|?;TFhT-RCb zdtgvZmyMu8MB8kY&17C%M|2@ut9^UshwJ|5Di|q)FV-=YdoF7sPXomb59QVO%5Uyo zyEXH*M}X`r1S9JOK1PxxfIXNBt1J)XjH{1b>0n5gsZDG|6NjxW)Uk_3*^ zHU3C2QbC)#4HE@T;K9UT?3X#NgB#H#3l|x%j%^Z(? z(0HPKa$ChfdUO@@Hmi{EZ6UbLs(ok{wQ;^uN0KH+5qS4G{J}2>VZf?jDWS2BtYmxR z3pJELYF3Jw@+zw%eo=m<*bXqv`NPSjS>1|l8SJ`40n-Mk#ndL8W(KV}a^>y$mb5Gc zfoYT3F*bEL35y@9H2Es83 zPPSS^6z4xm$LjFq_4sA* zqwxG&xb-zz3H1i?`gLrM=A!GAEKDy%21O_qF>^{RHakX~s{mxrtq)W#+39TSH>Jo$ zpJ5dFMRWonOpMP^+&6MdiL(En@Em>xnI9DbLbH~UnFV6 z;;tPKA({aKF#+-Hb3|X7Dw>n{sn;(rQ#p#jh9p>TQ|YJqPk+~r3ycmGO?p0(X9G+d zJw0<$bTg10AhDhfNE8GbE*=B~0a9m^!QZ}B3u!=j`!l<65<%T3+H2T-PyIww{VPYR zza04C{;>7oVA2FcTcgSDkbc7nSReizRzL8w(H&q}-eewAg~3CbyXH--Y5#IGOk%;v zBMs9D<+&}0qzoE=SlWWF@!@m5pLiRQ-c34dT!(Wpsn`BpS1xXB6 zMtK8WZ_jfKz$h(b?15Y?J7}{CU7lXe>ECH#j&_qphBIV}KMPPEK>dgE)w1 z_n9$EK03?sN%o<$LL+9&GkUSS;>XN+2Z^|@=7T}U48<3^LSK{rWwG7GVBFbfMJQC9 z9M@S7;kdPV|{v2BEf7goP9S%DlylTj-GD+RU;r)B-q z1WC8@)q7x2f9@^Ww$kF4{ErWazK@YYVP5LoBjLJ`r-XD4$Qy6Str!2!{tc@9iRsK9 z2U9S*d>>M3i4N(kP);M;sf{R6)^2tiZL($GZ{j$yWQsQeyDv@FR)~e_Fn1s>KBQg0 z(68XCA>SXj(V4Whx<4Tf&g_4WYHH-KIhG7fSoxw=j*!I317_-n={#0Mq#HW4Wtw9> ztkhBqGW4WWyiUyX8?W}(wHe! z$~CUre<)OCiNXK~)U?-C#?r2LDWtEJU;hebM`wZ!vHm^AwEI}~sUW|O2a9%zGteV` zcf=g$Cg7H)!MR#83PC9XSIZp*i`bDO7(k(w?#E4uS}HdW3=zS~oi)8w4`|8EK?}~C zA9Y?Qt6fIIC*@0L#j85jitxuwr#@+CoFj|i(25uRp9_Bkbg9^GZVK@d>1fJ`(t%&D zIbVNaFkYxcI1elA)8;IwD;0q23)!Jrd8C?Iihf%$bLWp|zAClTrDmB|0tW;l1=_mx z9@G-KnJn1zTfmrbu7W1+8&6rZ_Y&4t{{0Ua#RMdS(@eJv<^Z;;yC{7V zBS#ehN-9dnIDC%mq4M&CxF@aEa*qGj1U!f^D}AvNiHj;{q6;H5_zJ2Q3(ZFERVNmP z5J?0$6`3zy!8PT04EF3WP)(j#0>VC}3iv%{(;PnpaoI=twJ8H-_^pYb1SZW)=$1^u zjV(RNA-AxAFzZi-K{gZcgD37i3tQl|nYbeD@*%^Us!_wiX7vnEqLE?{I0OyKUuui9 z_TBt^hFDY>^ah$NV=r)BrTZtg7r8Xz%E=V)+y}Hs$ynq#(Xw&$kZe+uwR&1lRk6iX z3@w_Yyb~5S4U;KLjDT+MnARRU0UAJ`HolHOmSdhC&zlW+@Qu47ST0Z18=kv!tUs;Z zC$Y#G7R(jXA|57Av2dM5#oX47BYWRW$3m$G_a$_yfenMyN3t|lI)-A+9JuVFa2N2& zZY8zb)?d2GQFfu=8@a6YYb~!ydru`D^E69Pu^NRusUB%l3>tXfkoa&_pkJT2Y*3Xu zGO>Jmz7dyf9abfJZI8f1^=*B8ZqX*zH10md46qkyLF568M2Yxoec}hkNjTT;F-B!F z&Joz212!2nN(FQ__qmLH_;v`7~Hex)j?5aW+R2v#Xxsa`$a!lX1KeB%F?#%Aqy`p z@Y+;7I{yU>IB&2U2?YOHys^5eL7q4C=ITKH$fmSZNeXDx6reS{O!;baCIqm^;Y_aW zU22^T(cOJB$AiZLbyZrx6bgOUnEu+|me~N|NMmE@%BSkL5t}bFzu}yjH?Xi9h#M&& zCPttN9DBfZNizZWlw{vb@l!`qo)&}5bDwbeOX9ToOdCE_t0V*NHs!d>@N+3p!TEeF1sc;b%$X*613;PFg}* zwhT2Ei#<=W`FKj!;wd*41R&6M5NA`iWh~&-5|YHzAJSfK(lvTFS4mQ9;xxtnh!v)^ zTA_(!AFq$Ui*cpS5-nQVp$SIHx{b&*&nop&IsMc$2{}GM=d!#!1c5xEQWGZ5!!Gu| z6Po3`AvqG*$!R#rf;)lj5=PS8gc`hCqE{~$m;`~QQWYx-yoO`rCZ{@sF|_BuoO))g z1__oHeFedEU%xr5TVxK>KwMGzekjbQWeR^^c{UFAn)2@Q*GGQ&IcfE&ZGZK%H#F~-ZSaSyf2|uQ z4CfitcI;<)Hkgcz5dVv^%0e>@JVgjHDhPi0y!3>kR!8Wbk1gN-kF}|%uQF3Fd23H8 zZpi^2Kf+x`0g>dlAaYjf>$A|yj1Kg?K8=DRo(!j_p0x)EE^kFGL#y{EhpvYp(+n>RE^LdaRhip$Bp(bRuXpxR_4I8#iQO>{g2Yd56fE(H zRj^DPv(#;#T3-cqAdFI}1L(9U*Tg!I&dTKf<+8Q!{R!=L=PY3O6Pzdfz99Kn3*6F1ra zC*?5yZ&pcq&zi_f(|oj9aKQ{?0ghbJb+6c-aqu%TvNG~1#$_~9%5oH_#xSk!}NUK*MUfyZ>MXla<#yq zJvF8PAv%?I*}^T*{LSo?3a{}XL5w3xR4KL=m&0un=8q0~2E3S8tIZJitA<$Sb?VZuejzV0_N2c%lMf(sj zrxdz1~bm1r!TGo z`)#{Jx5I^LVt$dQZXHF?SO5q9{-qb=)XR}6d%DMO4j?Dc@F#9K`w}jB`?Z~Hz&tpW zdql>Ap_J+}A{;y=(~IE4k|%D89W^<~#5|};Oc>W$Z2ZZpz5p-(y@&oL_`Ddg|KSdX&nm2Ml|zgaKL z0nTs=EGo#NAMQ8R=c;t2L!s>R;GmHyDNr^Xl1o<8Mb-F7Az7gdX-cs=L6OV;Y$ZEt zH)w2o@kKVL2LkU4MbMaX9y2hYKz$9nwvMhFH4=UAH1GHqR2lhDgVlQo2b@-l=UvJ% z07Kv6fHw5R>m;U*%0&qo@P$$p%s}1t(zO0>KuXp~`C6G*_a2)ezR!PkhTc%JD1b?8 zxwIZv$Z8sLF2*tNT*xpG%PY~b$2tBH)-@4;B`BAGwR5q#=+<)B^+x^;NhCmbkfCLT zQR9hfCBx>}cW)bceYQ)FA&l5XOuU{UHrxnVtsKY%cCNN(9YpMP*qiG*R;5KC&0Z5V zr_{+X8fMk1PVP^{Ksv6zo|T8OA+!rj{Uk?W_)h!f$AEu@mOT&}d@op`9h?$G;q({jrG5Y zl2Gq7ApoVnKCF4$jI*vEhcGXx0F-7jpOEI1=ZNdXHuN{}p9iKM6YfDhSY~~YvCgnW zSOQ$S`T}H$1ZX#c#H_fd>bNzvcCG=Ue0+3LZlsx{H3=0146_<1Q?M9qhwRx?gJQU@ zuU-NONS~Q*eL2~$wH26e%9M)SH*l_mza*{`jkGT?*TbFWrN*oMFemQ;Y4gU4GNf{# z{RQzw=O?03}GD6;j7!~_&($R}n_iM6|7F7CL#F6Sn-vAY!`8u&L8Muy9GOF(21~L8~4y2g%w4rq8!DU#dCxUH5xYTTB3m zKQ20_NPwhXT+qSO7e@~PqqJ06o|-+Hq(;w@zpCH&uvL+^XK5<15U3a5kB zNqScg{vSuWz%(aQ6rkjr96w)1w{UxuE7D$7#m6I8yxyo;cE5DYispKkyN#Y!G~$u9 ziJA-4)nQ}s)sVAD<5&!r{E4Z4GhY=WrCK3c z^)ejvsVX7K7H8_%BrOoM^(%~>ZMEL1n%~Tph$T!O39pYMNRb>rw2eS4deLNVogqdi5_MfD z%lyKDP>-un~mv`uz z4Yp!}G7NlZfD#Tb{Kd5FBLZzG4EDW!dR;b_)kqd~9PQM&(SCRS6AOK7W9@1fv9Ko! ztoJJiBTfwyl>NSzo5m+d1tDe?XCTkiHwEH5h^QML&ff<_TP9g)2Uzs+%>33+gQCZE z$fIU}lT9TRlNGBPE&gaLW#x~t5=D7@pZMbwY&wfqYq-n}iL)*E*V6(QfCvq-##QMoFE#U>*Q z;=Jtk>)}^X3MTafb9i@DGJc|4kQ0 ziR54521)(={e-+)=+QoOt0j*Zy?8Ay&&P30sRRcdMy% z3d{ckBhb|~XY1^xh=ppHK1J*z!6%hdM@Le1vR7Rv*OOexb84B)LnX z$$g44M)=^+>MbDc#*OYXN6>|@i%fgSVrF>xPTnl4|8U4mfMcTQ0!?GgX%C0pq=MHN z@gKf=N6<6R6Q=or3^BX$5>WP>^RdozFzU$(hp1E2?ntY~y}71pOYfPZeFuv`Xvly3 zu!(th-bQeXPEnHAfBkSO1ayS^YgcS-9~Gn;o)dVGT4!9Dfj;*5fXx&^P5ldkdy>&g zvpHU#myi_-KV1Fhz(lXeT{)vknN&m~W|N9KvYRIhrRYA1d1Rep1*K{d(e z>fw|VI`)}4Z_!ks;ng9y z3ajI|ZvUjii_ap~x~p#}|0^jTFMcaI%pvMe@lALz=nDyH@<}y}zG7vcb?}mDj z&IjxThd+F%zkGH%UD+4?KKF|1q6eT^yZdHX7!sarAzAK;jK(BlF?e$C0$c#fnkGA6 zxkVbV)GqCSj%OE!CHR|43>=r|8?oGlZkJvO?rz=)E*P*bj4AbLR729!P30@g*qe}( zfzXIdtTo#FSq;GXMfx(`zqev!lwbxA{}@eEe}`7szw0-7ANwGx&^yV&i3$kK%x@7P z3QS)poilEXN;9p`x9yXqzyK@40dUOTiP+&P5RHa`ILEfANT_%R6+6%=8N68+4l&4m zSIyxg5XME7)r;=gmzm#Pl)pd%$O^X(uqa*iv8khOgXllLVGLDbw^~rT?)(o@XRMN)tn^$o6!+6SS$T#d#4Z!T8rN zlFr7Tm85U)rZRRvS~|Z2C($}h7V?vh#a7o3)kp~__e5%#}< zEY|a0`TSQNQR1pBFe;3$#rkXFdO1tdM=SL%%ywzz%kxc2qXRR6=s|LbwFuCPvekgz5AwW9% zZ}&Ew&_dXfxkgA>1`k~!F#}3<#q9Tshzb8MX9(`(v;_+ciHj?Vixfj+>b6A-K>7G$gJouow=K~&}v7Xm>}$tggGsZ2-T$)Z3vji z*MSF~Tj-sFoC^1PNA^fOB8p?u4zp;iLba!UX3&5plR2Q`iv(baK3)-h$+G3cML0F_ zq_wknM)weCb=7K=ic>0ye9vFZ00Z{(_)0(x_jN6j1c z{I9e*Zo%3$Tpo^e#fe~sj?7rWFyG*k4>v4DGMh3uGT1j$5etslBj)+;KBn+=aH+7k zbX?|fv~%3LzfoYs{0f&^2S%8yzSwR!(OSCzBa$q~1oRW$av`<0zp*oPXf3L`@FMm| z)-4OLqB2pE4H+_oo5lI%!5=V%#~%iqJ%20>UzCShyZ`(7##GF`zA%27TttU7c>eGK zkD(q8P^`(pYFp});XBm0@*NbF%jqG1B(DD1-U#^4{ z(x{5;YUrgNYCohFMayJZ7+ikTh3`2DPAkcW6M)lFR^2(MTSz_pzNCoBTDRpsffWB* zuTv+5wM6Ll^cIaebS4%al>=VD4av1r_Spt}&ctdMJ_Y?9mg=5TApf%*{P9_1`~|m2>{C{ag(SYV?caZlMQY3Y)` z9UaxLt9ZR8<%&$-959!2&l>g*iu|6!eqyik6_6NnXPJndm_l#kh5*vvwZLTpGI0$4 zj=DUUnwc#cq&3%C*xdsthFI0$6vMjSG;!h!RC*opARy=`!s&7>Vq>=e` zXD?}p-O>t~WWeY!pB{5t5WPs^?-jTTkg!rydj)al=iRjh5whl2g&mM zYfuXWkU^-CToVT$(9Qw#U;{6OF-4c}6X4L`ka5%x9vTy}jH+zoKOwGkAXyq8Rb>Hb z-)y!Z3FpOTPIDl7eE8x1n=+#13N186{3ufuI zym$4egLJ=xs%Zt4Y{(mbD;G>jQxo@cDHt z@YW(zH{+HD4&WqEGi>|S*-?pwJs%PL6_Q3=5mM|i1fowqhDb!`Rl?>(dU#=#qYgtE za-sArFy4!xLqPL6jnHvqci@F#ZG$7l)6YN(`B%`YTB395j(_`nYcCGd^H&aIZ|1H) z-&~JTxrwP@G#py~A;;h?!}$0Y4xu3VmvXhYxCQYD+(KozT2ZQN5y*bNxeZxwyW_;e z@r|?}2gMBwP_KHh92c!n5w5q?#R`(u{M239AcpRRfdNPQ10p$l^>(5d%c;6!2G=(8 zg}7~pu*q`4vZRLC$|!Rac5a;G1i=G(0|YRW*-TWNiH+pDJv$*N-Z9+rj~osy_sPp= zk79&UbKh~EWg}S!wXkAAY{^qj`wYba)g*E|J{Z>bSLw#|2O(M`|6~~W4)>2tlK@Dw zwqY*=EZCP8253;;i6=4Q4}AB1+N<(CwH??#NQc!rjKA=HnGRl=JY3GK6k4%mv(cM7L&3yM5B~WPKlPn^prU>$o5-TD06@ zua$;~nNK@ey6yK1#MLo$7h-k6!EoaPC(8b?kUBcQWS9QkLZno-+ZYZF20|OW_@4;; z0-Qwb5UQL*sOT!vfBbv=}GK^A7-g+nqTWX^31I+D0*@-N=XkDn^wCm1W0IeQv_CsG;XBGe=jO`_o8ZmUo-J83=}5>HFvINg>yr z>)68M1&t&NC5c0|6ehQY9+>3I{g+7Ms}ck_l4#6dUaxR=+-;MRZ>=wvpfc%6PzxRu2Ls=bo)apjutRLm64Ps>0cro3b~$qb@qFY?T$BH9s9ubwwW#~WJ)m$e61e#c(Nk3uSNT?8Qtmi1|0Bdf zDjuKAd!-LM&$M(#M|>Eh_XYdhGND_wf43i~|KOeOZy}YPZkQOh1YZej799|7641Ua zspb1cXYh#->n)njP?NvMZC`UO$5g*n+9t1n0w`Lh7oUpdydJSW@t)meZuGOYHD_vGeA6)3;u1?dY1J&brnAJ4|pnuc% zZrM|iJ8>P$GWx$cvOLxDZGSXmjv!3~2b7GmKUjwCyKTXVad3H!5pXgv8ceQEXB#{9 ze2>XspjTX}r=F!-xX8v@-GB&Mce<_2YZE4SO_;k^z@~n`K^&CXVU{S2_DMkN<1cd{?J{;FMIT= z{kO@0vX)l4v}Z~6a>Tz4LVV4l8h2*Sdc5l8iy#^VPonom4j;GCF9zuK-WK2`vF#OiA$$O_ zs8&utTPB2OYDpX?7H8`h+T;MutHHbua52s#cNW#wV19g5|Hb@)sC{A6daDK8YoG?z zHt3T_ef+-xw8;ho{_14{BEbX4(Db_HXUx4m3K1kgYPyevA#BIjeNEh1+FN3A{Njl8H3at5KiQLq8cc(1M!-t(ph0f;+ z$xup+E(9}M25YE>WU;c|qQ&WwhAFJmtTrJ5f>YUYay{mdO9cA*aCw70^gvKwOqVd) zq8K{fL(>>=25GCL0p0$}Wfk8Qh+scEaG6K0o4{~^7yPm)sCcYlRDU$u>Xij>?E>7^ zrFen$L$Y@Vh=sUZfJ^A69iY-QzDee-#eIRYD;4vuuSJ##iF@S;V}HB}>;-DJ)N~4) zheNZ=^Z|tnYTML!AP`n9u1+>hekp86TWa=%Z^&WA)Q{=*))f}LCY$NFLe21nlaI0L zNw2w8g}UxE%C1ZO{_!($L2`$+g2Aw2uS+T8E1rZr3)1;*KutT%65o}~(^-OCiFU)ZVx0R8ATXjKw`Ze7GhScsg-7LR(SHjZ|!=pml zD>>ta?JE0QXGv&|UB07mefeF#Mh|nbx=ELtgnbX%av!r3Wl)a6X)wUZ{5(j95sg}{ z8J9kXW)L`~6Y%;0qkQ0^^-25tzT$dD0iz)TiP|EJ+VbgaS*~Dcmg>6Q<7nQwRT|1} z4sB$pG?opfF6hK&5ZTkOF)x`F(mMbV0be*5FlE?XC%)8NKuoKE10s)0o!-~pw_C%b z3I_&zC7$aCiue+KCITi)PrgFhnfPDS_*O&~NY-@KSlgq?i$6;MQ=MV2cm$a%4*Yj% z<33^*pTje90p#!Z_M+8u?ek#D-{F#0oP@sLt%RCv5uSGe;2=bk=+_UIW*V*F)upDC z(i4^i7Bj>_E|yq!!bc2%TBUly-ZZW zUyFtxHU~Ezuo6x-R;l-IM^vW;DOjfVS(A0ozUmTUDhh{;p230m=i;F3nZIC; zL3rxbUG99~(|r7xJWyNm;=opH(S%L=7zAbdC8(){1asGKs-E?BO}HhxpCkD|To*d$ zvAh<6$e%7cn!o26wufz5#T{fL5*Y&}A5(@G<*kenH;fc^cu|xsTuKLWm`b^uq8MHe z{d`lg`~fr`&&rivYhJ`k(x797W5=zvC*zrSNpSj$d+crzsVnvkRa@j|01n6_O-MBN z#@+8n;$=47sk28V!e27>Y_zJv)xE$0q?Tw=gvUo;TqXAQHDQp|743qF& ze3U>7h1Z!dNHSs+or=U*NL*pPMDZlK>DVuq?BHST#%_b zC24lP#rber&Zn$7GpCc#v6-1m~_UP*ZvtWK75&CaG? z3QB@7+nKdMr7s_<4j3TC`S=0To|RPR>a8>_Zu0eYP$yyct4El9n#_wmnh$MVYC^{I85|z<&)sbr>ocI4+jN%n0A39! zXQ!Ifro%q-rv?}b(5L1p{KC3Yk)bIs+ILdG4xnSl>SXkooIct?+_RKNJ9w;TA_a9o z&`nh0jN=Fw$V(r@XQ%YPESKKGA)HyRaW~c#a>IE;oy;Nw2$O<_ldk`2P5MoKHFQ+Hcz?@PWkmomjg2?1WmSkDDAFPFm^W$$sNQHfTbym4j&$yoN-(U zu)nB+KFgtsSy%bOFoPV!_E&w5UnP#oR5)G5|Fr91$&F|F*^DeHHCxLd;O1j znaw5xn8!;`>si&s(Ers8XS|dOU6@~Cho4lL4PmdoYwy{L7-DsCTXFvtgBR+SK}mi& z(uNadO_pd-Xjt34mcj&xOh9~{lrp77W^ZpZr0f~SnH4juw8VhT$rrxr7n`(`!ri#+ zb7B?TksmV~)hU0}bAmC}Z(2Z~$`&Ixv!0?3zpWI2AkdOkkC{9O`fz5z^C9YHHHdc1n{i zH#9S6O2q$}2z;AK8w)mI1L*B@3r`r)xA5YLevH&`LBXlkFRhvj#Ja}3#uiKrnYH|>#SyhnOM1&dq?9)FR?PUy z#stVr)W4g?e**02nonpSF9wG>1V+`2X{5qHWT<6i1VDYM4^+L<>7CYI9C976Zz~Ku z%8kk_-L_NFPfj_~}99e*W}2MvUk+D|2cDIGLJ zqarA*Up{1AJhY}vyIm$<9cw*2ZqBL2=0qY4x)cSnTHzb#rb@u*_MJDei*STY6a{H! zNLeAJW-o0ZgxP*xave}G5-=1Q;4v&{2Vf%0pV1qzx!Q@09Q_4o0%m}jpcT$|#Yh@{ zr>-1am5a-RC-I|-l5{uC4)-m;>sv;xtp%G}9E>)-YQK~D?iFQ~PR=d(7IZhIOy2}e z@+aKuI#e+Rz#o(X2}i4@_Y%{x%F(z_TI$vB0GK@%=lhDde^`(N(Y7kIYf}Gkjm#5z zuE85eNX-Uut=hP0P&UXHO)?jy09q)GznnBSPgF;|Bv4*MY}th~7Sws0iLBmaRAP{V zE*>ayFRQG(suP>&wLzV8Wc9)aF&;k&pfM8$$HS7Q9O(3m(O&;Zcq!4luxBVbt1fGd z%U;iXUII;5Ed@saLFfSa2~yF+lC2k>5{C}7+ANk zw(tJgePLV>cZ}~7PzU50VQ>d_?fFS3NZ)#!8o^?y%&?{Mj!=}_$ntUe2lMX290k~F zT?b6I=s*lXH`B#s7mu3=$>=_94}&%S#*So4v_bo8o#o9m4(xLxfaZQ!s28qOB5emG zsxSqN60TBERmpAA2lubO=a3XLlqnY zlJ&I~R-A0GQ91t7A&6#J`-W6F`>jzIIT0(>z1e^eKCheu8*MUn*N47uRQcmpU!SiJ zI20*vi^wqg#Ky{%a#&9(QUL>4m2S=SBDvK7K0v|0S|nDwt1h94QZRX?D&l7+I5(^mcK$-NMkUbhhZx zMOT*83ck3Gicbqyg2DZ{;wD9o#vOr*cTSbwct1vu0vT;EpveNzu?C5JE?1AeqllR*xoFhMh+;3gz$Y2r#!Mq@%T zq><~0rG|(EhWMj{B2&x2VMiZ+?W!YUmqsK=47PS1GhaV7BV#q%aKtG>-B@iP1J6j$ z{HGiWh~^pbPwXp2jX~kjK4?S0YafOd@~kfsBMpNe&Et~-UO~tw^0Vuo4sXX0Q;I?wgQYM(4k`VUi7#->f~JfbqC(>2Gy=H`)* zuHp7axnr1|Fr%*v+V4-MktrvdQKdNWdpK^}QdlR3{bGX^-jX};X9(@kZ{YWbG;mJ zVS();BJt87Y^%_e6)vjk@Z|J?<|9cM#CdprGXqa(qNURNaJ{sdyqeDi!}T762P&gM zq1(`Ir!*o-jC|aTGU05$HPR9zYgp?14*?O~)M()5d@I5x=|%8vk-ZqeYDW298n1`` z?%FHwTPf1j_C|WQT2QY+0&JXAOhOz|EkXMPbcEP3B?f_ZIj#<{49bWh6h2Dl(5fvp zu;~p$)#@0LC5E<#Q#rdiSY(-HByNVs4%WOq-I_d4IXP6r|Ku+x-;AwAjHZA*Z2|Q_ z;`;aEBmO7(W)Tj{;ROBMHMpp45I85wg}!&ZB393#S&qHZd-K1KdCWHj;@u`!v>OQJ zzjjI}v}c353Syvt0}Z9c*+|QzEztc~@P9cheGC;v`UUR@oRl!)?L2v<{Wp|}8ped{ zUx@7NcU`014cyo?X+m;h$=8hO^#mi*Gg3jQZngm)y_ROn4Wz-|B1^iYH=hPz?u>g$ zV>hsdH;><@T0?sq8wy$;a!$DRsEsJ%ScWVfG=MM5N3Jf&KdxYC+-H)?G^%c7jksE( z%d9L)`u7I4e{cO?Wh^+;;ORL#-*83MYh@vSbo#c02g%DT?ccTfO_CnX5`8w#o~lPc z{Bpy`npTa3JKM03=EYi`0Xfz8_zh?6inGXd##A4F@e;h85}dl&^c9m|aXLd{Y|uAJ z&)lhJW#NWU*v)mEs7`%)NMHPK4y*^yDu%uKGCn5lyU`|xl0OEz2O^qa*WdP_5cR^V zr*oe#bid~Mq#*Ez`Wg7~0=4`u4mlXrl(AK(jFU&9A z-T*K#WMhh30000000BXoWf)W6VuZ;W{m$vs~AdR)SPC>@RWxo3l_xj zxXprFa9c&|?&_kjCibL;7+r28u1|-Ljpk-iK*5?35PYc_%DXb&#kr66DPcb&zBuvo zFkiBf4(iqh0}{YX`Bb|7qkw<+EdAHvd8pxGz}s6e;Yc@xx#Hc}<3ZcaMaKx&)LtC# zxJ&lTU&aaCzluBZ^#oduv->%nN}B)^Ynss{%&+I>{qp>RXA^M#W2p> z>~X_6N)lxCM-_g6fnL(tANspvQDv6nY7OjN68E2ENdi^>D*dW#gEjBR_Gn>g51f?} zXic9QRzFG49-ZB{x;g4k*|??rFz4d;Z=Zpot-L_w4~84h8JyyPftK1%|5{-2AR@uy z8ON0T>Npo*iNQSkXoQwmYsFlV1jbHHzSd1M(r%RvjSSw6FJIT5z%XdA%Lxh`^ouLy zJNyfRgSr~ZJj|`mr+MRvkt9w-k>zrQGNR%GRSgVW$`_10c|FtL2=daF?dv{LXi^~H z-+Pl)ZC-H!?PlF*;R;9IR90WJxb&`>%z#h{jTFGXVJ4g^H?O-b(VV*uy3X8;5#iKyngwLMYkv-G zTHa^lfjsV;I}Kh#J2G443{4j~LJGTI_aXApZJedB3^HQk`CT`+OVg80qL}-?(AQE} z_^JdoxDW-YarnA3lQ$%{r%d(&beG)!2Z-rS)vy}7)iR9wBfvV@cV4=Ir95k05(U^O z4LfueXJ)*@BxNwp>G}s@lzb%Wrv3CGN72iIu*D2^BgaS${K}7B;_>a>)KI#xklCaA z;*RD`4iOe?UG{JRx^!2Z`NF)NdkbL!G^2>0GHi}{12MD#5fAoJuq%Z3UWw(cQzfYL zq%v9;^H^?T*Tr*Lybwj%UV(~Vv;Fw+cmXlvIrc+wz0C#*qUB)iSb=T>RWoLQ0GqJh zS*Ej;sw77HJ(?+T7&#XM*PJuFL?pHLeNQQ0aQuV+L@Y9|XI+V^NoT72nIMT9n=oam zi9nRKdU}r|UBn!5|4xtw#VVM_5(hKcH<&2?w9Bvq53yv_I^e>3$@+$dBH5kj89uVv z`TQ(p1S!5Yx&x#E9^ZB{^-1ngDjiRrog3_VsgrL|35~2OJZ>V3hQ}Nr6{UKhN*rSd zsYqH1V0XMUuzcVM`!o?wY+%V{{Ww{o1l&;Ny&X+087z%F6kx$1PvzvF%XI7H z{hu@;$IVs*>n-l%OrKz5br%p{G9G&xS`U1&(fX1$%$ea}^aD5SA~t$;(XKLtyYYc| z(<6K~F9#gTi`jG6hFITi-|AI+vn~1|t1=PpCc~D+Wdbaa>nXP$W2GghU#CQSZG$O6 z;{m$v2{ZUhUtsO{x0(-H8PL#RE@PTpFFcZN4CWfM?<5B%=(Yv7c4@&h^ukW4K|IWswW}3) z8CC;Xn8h;LzFw+`ds6ob@G3wn+dz+JU8$VZu{p| z8~VWn6r&^(4RdUs*f%0O9s(o@Tyra=jn88p;KL=^Jw6Z!^}+3XeCqMPc5%(G4lf=px(Mzfik~X z%fhVMy2cqHwXsZ8rk@Zt0}j2o4t^>+*X%msw}a76SOWdAv~xpeR2CDWhCgH(3YywB zLjeo~yovVOdX`|=uz}nT>yD-ecbBh-7x^634G%D<90XW^@?o_W*zwJdBFu*DN~BP0wxi&*XE)oHe_u3xh0-|mvPOQ zsU-DUVh;tom)7i6r-CC0?dttD-}l60c*gWHXKN{c=#TD zgVNCHi|o6x+BVGQP&q&ypcE#C=MQU3b(An=2ir+ijZ?P^#!3LP#CCnS0K+wYL@I%^ z)>5`y#Z@sHJV{~~+TI)#OYE*p?9}I=4Z++B?B4GIkTALweKQz4<3g{ZH|Hbr7FK3V z$=7l*nYttFb*>EGB`W`r&vM!E5S+5&#+2)x?_}bgF7ik@q>3DV`IY$vJ6c?mvw73s zk<)%8z;m|c;c+9zm!FL1P`=O$KWR(akK&$ho5b7~96v=Po2%k*U)ru5Ab7Fx#HVg1 z{oO=AE7L|YE1KgA_pIV0ra3-6V>4@9^&keVD-m40L3;K5wpUK{JXPz!qj9MIfU@4J znOz75Bzbyubm9_9We65(K?u-TCxGQw*~0pQ0a}Un5GqhL10B-7$o^_9NK zCi>fQG^PSMsHK?4_Kw_T*dHyyUyMJTxSmV*GFxp2apcO`2Ke{5ScY`up_$H$5diB9g_3faD z7xJ7nQqgw%ZNT+talk|*at^sg@Vb8%Mc?`=zryl%vH-B3O`iE6+mmCl;yWe&GZe># zRkg`<*$wZB^=vn=h9je=`SxcN+;GP7Hui7#(*OiAE|mfjM!eNPrD@HsLW&j%IfevD zPC{G|CPHivl#&SkkgCrIq!|H(b=4+!iNlx7iYLhOE`7oIH(CAh*q$pxu9N@o)tR9q z?7+_a<$=a*g`94UTa0f5UYT?ySoHqAfgW?A<>ZT97_cd`?)%>*|F$22=(h5Y-_=4u zKCZm*pX2~qcx04Mt-|x{PCKgFeO}s-N|uaTIJwOQ3+{Su=JhSo#v3} z5{p1LzY2}BNadpEUc$3&7??cKb|mGQxZt9Argazm(+*OWs6FIVhwW5GG5fB$(^SJ3_3 zreOpnYwcwvexR@a!#t& zgaEJ^mt=boA#vA%8w*(X)IjHf}O zNt$0`%7$iBymd?C*?tKs&(hzDui*^M9*kVnD^hefi;n71Sa;$f={JpKFrpfz z#M-BWi_-0*KF@Z>4iwT=_{Wfa;0@;%*SH&4;-1o%8T>p{iDjLNr^1SjL9%2K7mIA@$j1}Ba8ducWY)~^7!fG$ z9H~;>m_ZQ(7vkDjpiG87T6P&~nshk~y3Wa}5QvD+U-q}=LOW{JDUKOt{f)JMH#KS=FiJ>6zT=9Ls$3 zu>C4d=4AHA?u7>Du?~o__?V!lCRQz9h@4p;EFm|p*uXnb!5;&pwos?7;D4Trpy(9B zGwab4SODRI+sYN$eyMltj$uA1tUowzeJT$vym%>l5LjADXm)98-Rdl97DIP+`f* zzU{7KnOcQf20w2Aw;(mejy3ANt#m@7|10crwZFpQjt5Lwt0jP3$d>wAyzoqL;f>AS z+zP~0ga1U^D2Qr ziR=sDtT&-!lowgn%>TAkdA{Iqnn+H}!kF)1K4pg^vHf7x;Lk*7S+-mu;;#~pOTI^0 zEyv9`N>e@@#r7un+;6T67ty2huTGtCMv>EZwm8Gvdklm5)j&N5p#{NmO71!8!OcFJ z>R0_j%$MzQf7+sy^u^bGWfh9()`8Rwn$dlpp41<*{}RZX(7Q zE=j#%5Wx_KYs`*7%W$b^0;=y98*Z+?N27ujdu`N|5k%Z+vuA1mx|6|8O>Wt#IJ5@isY;e&61- zk)46@5`cVZu9>mciImW!3skv8O!^#5d9cScbVK_h^s7t{g8qDkxzR@_Dgr_u$KLjs z+On86cKln5e#O1uk{uJ$!T5l}J;T+$ou|3HJm)wSaVCx2P1>gHi$imH4BZZ$D`jL+eZ2>uH;aoQ7l|9T;dXPpiJbH7SV`n&j#XU-v51!H?pG&KHouV807j zx8BWQFB3>CB$o$4{r%RS_8P`=I2E4F^#J8*y`?7Y5(XE{(%=(^nB{e&K=w~`w~{?7 zDWLgvNzi=8c@zTGWx1>XL=r3!y__q7Qc)XA1RV0r`XYq9cAu+4H@rgM&^+2L%nJb) z7O^PW7ed=-XW#$yac2D{-baxNqX)oZ6|I+S z51r{x8LGDSV#z1|4WgGvU|kTgpc2~5d>LGAa09VJ;hPJS-k|;qq#x!h64|8=U+}>3Y1qHiRE5KzE?APHR6OhBZ|0bK!qR~)eu4abkbXg`! zdsPmNyF+^eo+aiU_xyV(uy(Ruu@6C~Y6qlZVJ5$GSb*jHl4K@wHd=$YA%?snWkJmW zS6OAjAhk6?@9*X_!#dz2l_1;y&bQI%gmM>xGAD3E6ADD=zr6+P_PLVW)}C+#pcQv0 z#O~uI>g?dhRf9!gUkID36xArO{E>|>BZKL2KPUES>jLKuwefZ}obzFw1n2TLaPceT zFKDIpFet=sR^yhwdXuG#LnOY~UA*Oy$j`8n9z1Fui~4pWP#>Js#%W)gYTYF$l5LO6XaE24Fw_xzbs@MS*{5#v;>UDm zwSF<-$-E7UFdDjYOERa-ALia3BmY-HxD6W!Faph@7IG=|ZF9N6ZJ~Pk*c!e{R)=rN?q`26!QvLKURL{>U zKXC&#aRYu8`8EAQ@v-&>O_*CLq(?6ua@7Wi{GdT)IF7&lh1jyCM#_mA>~5n!EU zYzh{xvj_-18fl`#C`HA^s)v*gxa~t5ESz}j!E;=L1-ilIQeG>-zH_IB$VHbCVja|d zQ-S*}Y0lMZBJyrTH(({Y^HI0}jA7DImOtcf*3c)pY=GNWg4J3`!qDB6b=aWmgJyN_ zuwhFDBY_>rPtbSTlJCqs7Gp>>>R(SQc^>|;>gy)(n^Bp%w?d6+&8rYM>z{jUIdqYD z8rhz|8_p-hl^ooR9Lil`jgqA#-*8D`@0-jUx|g;z9>TZDXjiE^v|!-9FccPOhuS0wf8X546oQ`XK73AFxfgQvCf`j3C97NSPT zsv{k78eyu91jZVti6XNl?Y^oE);tFPY)_ZjvLJ|t5Jk%ZP-8rHoWfsG3@TATjB`EL zy6h6hM5fpJ0U8rI5Uhkg!zZN6B{{@9+?UIi6cMC-@Q_f$Nwnk5zrSw`sFv>UajL7K zBT%i%PQQmx(<1(0d9o@hl62)6>;isNSz0z~WtYYQ!j_p6>_&<2#bMx=0<~sPWq^n( z-8$r@i!vKn^C_9@IMhQj@Ph|UT#tii{Sx$%@Nz#<1_&2{hmlm^G%Z0Hk$D2zbW^?T zZK*yFHbb)`ifS6#!w>^7B^=G?-sJ}6+u)+XNS(=*Lg@%9UNWDPwEH^jbRTS{&zD^S zkw2_Ddur%|1w)7Rcvf=VY%Jvv?P@ESxjEo#YB`y(wwPcl^<<0OY8Yy7Z-u<& zFr%BA2>pgtlXUA^;4*Eaa&a;7XutT@5K^M0^lOw`((H(Rv(3Nf`C$Lq=Tc&O>CK^zaBteIFPsDguK4 zBSkaKmiL3~+uU=$x%@%j=i;zm+DN7<;b@8lb(J!HyZ0XfEgJnyeqkjKt0xn63w(k) z{NgN7PGj*aKKRFh_d~}#oecaD9Kdk_pL$|g)z-f@fyv;33ssD{*q>$X{Ch|Xvryhs zgq!TDhjG{Sx?GsFoh|**XU;fPw9JR)8xN2FSv+B?4Y@kyGPzNf51*LiMxJ6$DW|Bu z`KRDEK!MT?o^F~!PB@qhkw*!a>ALy>7nM0C!DR(El zMq}~3AFyb?)<}aZvDkj6?$B@E?PopTroFc;5CVbdL5WIl>&^sRTR887Gb{%ywIF^F zk~!9b`zQ<>Mg%!D?O~_OX$?RKelwa^grNLqdJ5hHy5Z+UFkke}#?hqU6_u|4OE5sF z1e#_@VB|HIb4GeZAB`RJ2QzDez_OUE8||Z(LjP%agE1u0%S1hh-U29Fm-O9Y{MHkY z0a^X5d$S!tyC+jmYd&a-441x({3w2kx5BO#bX)ktKubpT2(GjER$lZSM*qAb>31%F zZg|lcR1Z{ZfW3fz0RQdIpSMm!YKuc45ccpUid=(_ zH4`#4cvBj&`1T5*psa3OwAF~8GSv)2fl+6P!`ws;YII-srXO~spJ13Oo^#rrJx?c8 zh@-_I*EHn~drt{X^@e!$IUx@Px<~(pMpf!zvk&EpuOkwIma-+TDu?a5$!#79%g)YS z+9baQ=V1imwHfT-#uLdK-aO}!#C6J7qG^r~`W97dluag9sUC3uu~h-&SRMJkv7Okk z2V>o}$KmfkCBtn1 z*}BA5ob9JMN1vCk;|lY%4#QfX%?bTbtPA9Ua1VZz?N7aJR7sAi-1X3oT6ja zsQiV?YS1ZlrXbs%g&TtB&o43awCOBNP?jr~6}nN(l3v|8g_Pm=UtET9=F&7>x2F(( zSzRE0t(=YT#aQbmW{Wmvn?y7i@Mllmz9OIAAm8CIrf{%VXOFKZ$KYu%)5iCTdm{3S zS>NvY86v9#-f$wB(#Gcy_`T=&E6@a^5Y3%l7*xA_b;bGZ{XQp!_<4&-lR_~;YqS;K zu7Eb(xluxu@L}CK1oQ4uR!nZf3UWalKf~<>*>fQ8eZk*dFE74h{$Y}lcSr*c7fv6C zsk|_uv}4rUEgIBCF@z)&L1snG6qGc!ImQZ?!>nt@&l{wCX}z$&Uo0@8i@~DMf`BF= zqS^r#^;~b`zpAv8PY835EQkVO*qxbw;gRVGJoNohQnh(0^9^8m5`)@$c=k-hlOQ!p ztAt1=An${KMbyL0o2c8`X$?qzn-<*#Wm**D+JTpmzu0_86sr%rb?#4)1YQNr54tRs+xRPTAL@%CZw|E)S)8BrBGN8reFjYAe>*nKaEHCHkhNhw7II`S(Tp z8fUet*e zF`9e+B=}`P!yBxB^I+Zb1^e9*;@)2{%CsM6BGVG9N5ArmG{8brIzBthQ<6rKDB2I5 za2r*@dt-gUc!2=?7`@x4{<;zzhPwL=6_V1d7}yM!_EpOqtez$4EeUH8^RG z6x>?PmY=Fi9I3GrXjWcyk6}`}q})lWqYn8Tr3EWi3KHR~lyyNVze zT%t#GI>g=;ECHy7mF1t~6pAk|f;*Kc!%;SFwsWp%zV?_4pfSq6A?0WCpo5`ZDKBH% zuvCJcMv2dT2Lh}(RE%NqNyo(IK5=C~(^*XY`auk92PkG}B0tq+wUe3L)gE8~q<`;CNZGwn!9rSbHBYGT@&d@+))w(AalyOyR(@*#o z{ZWo%H;`{NhPT|z4wPh(J+WU-Zqg#dGsG0r0FDc4L|<{px;J%fp=C_47{`F^syUe% zpTNO9K`$sfX_0OmYb7doG{c~3J_g%fs8z)ZBGQ{dnLjFeH=5J1;_tn+%?9oK-9&(| z3tJw5Qsct)5{@(U-*$2*(mv zlp~?d12mOOERd0B@R#@(^asV_UXX-NZug)d3Lx#2gPHlCkNsGA?@2I~<><85=+OWG zV5!>)ci?W8?6V>@ABdCd3S`@AglI9`t`(hm+%yk2iquE&;#v3RvhH~&EgsZmJw1k0BHpWh2e!BoP?nFDi(;@`JZclGW5s}hGQ<>&eEZk$^`g=FyMr< z6M`bl`^CHSXLX;zhF@jMgVB;MrNh!fTD2IHe|QHotHE@gt8~7C8n+l+^m1ryShT$d8@7>p@$;C)mKW=NZJ34I?uK8=3|NP&r5 zm1gNKX?br)MV7eHxk*_OD~xz%P0xz3H3WP$FhJOfg~WIwdei{lDg-%cfW-@+r2r2O zhJ~SuB^N=JtuZBUGe66L16i=SN^(v-&N~*gTbV&lx z$QLuIC=y8I)`{{Qrz3M{1WdgNs4>N{jz#2%4t5XM8LjPY?`68Q1BFz^dT8#G$@G7> zuf5V3*tJa2m-mI_@M4$2P252Uro~eaIR4X(pdF6zmI`MKo}{?9V_c0yKQ=jAhwhED zE)rmsfJE`ru;M5m4;IBZowGqvImF&eMku-~I*ZcORz z5X}u!6(LE}1kKvQpuaqZgI#xMr=u9J^)7D1ONdasE_tX{t^x!=Z>s7BR8GqGrK)mr zboz?gVcR(7b_CIplD!yd!>a=dC6l$vLGN-25q%RLCir7OEPA7akKf7QZZ#-{-YN}A znP75LWh>VE057wS{DjsOqDd!Vx0a@zXx4(Olft?6WYXv6$l^>Nr1CGt)2VSAd$1}= z^;mL9>)k6AS$9j_h#?Jb@!T(P%bQ27?5xP3y7s`BU-^#Lt=l7Eq4md%2zSGNsE7D6 zQ(s~C?f)4EIx)fLh#6^K=A$rE=>fj?J(`y0A%=y6q*e5MtdIe~4^oW(iaL|-UztQZ z8ZE8jHBSxFM#Pmcp>$+6jtBLc4Bt7n>#^MqnMsZ&HpS!*sN6Cs#{q)^7hRMJ0fkT^ z=F}n{u|LN7Aa3dNBX6m8XV-$6ykr+<^(s(FBwcDr8>i<-QHO_8JG7;qc#+zYp;Ni_)fnH zoxV|;!&)bhdhfZ#kBCGH{4y)8P}NW*J(mOdWdZeMK)Z#Rsu@zz7Zq%<1DVkgd_s_# z_SR+!yvh_Xl`6`Zy01)>z<&@z!O>dfxJgDW@hcXtig0T_s^0oj$cU2N=)=CP8Ar^@ zzxxNcX7^d`@#23YacRjEkZ3L`c2D#kqfpk+G(IVgFLis|89OH(RsqTKYRM4_SDzm^ z5D=mI^MAnyjIlsna4}eUmV4?w^UVu?l!0-uzw%!T;^-|KvOPv|IL}1-|BysAb=cL2 zl5SaT{m5i@{=ZXh3_qMTVD87zG`za5oxgdZd)AewfnT()rpph3$l3g`@g*BGpT zFuWK`5l9hnslqRV`E$;54M}_Q!-zy)YAL@-^VI3|i90|w(=I-0#4F<&z`{_6XD1M8$o9}L4RgnA~0UkxDKOA7_zwxZ1+-_Oc#PaDE)`JTSow+u2++5&*V z@>QMA&VBsIb}Z~BoBf?brbl!@)4UZq7Z(O_C+Tnr=lK@+srIa;M^@=2`3^6_}iE?*DOdphi{PdgCt!jplOhOtl zsn#FYFQ$~Hp&{?y99anbst%8|x434Y(qQkC4pI=UIy!tV0bVOhHXQf}=uDm(m$YQZ zyg^+dD#?G05jn60pFxGRJ5PNQu16Sq<8r0ALG4276_d1zt1OO+CT3rqf!mhrX@4h{ z2$+Mk{wMuY=&OirDAzd{tMx1dGZqF|ckHA*GOUJBQq)&x0MeOV~|Ip@Q+5R{s z=1hI^rY5dn=wMe!B14Xs3$zMc@MjA@&1lOgLxploaWm$`e^(6^eR+g0c{A8>`3AcS zPZYMnng$OWKX5-0Bo20GC+TlXP07Y!R$Ys@jTqG?{6ni+BRu98BXmNeX*t;jU%eAD zRA7VGUVSa}&+mL3X`+6uOWV#}!4nkgZQXv$nFoE6mj;81+N`qF`$sbvA8ABK znBfk8tcdP^E_{qQ1#EKw@;?BOyjL zNYe1N!nd1K*B+wEz)TdkO>TSp-e3KEdc*rXRJ1y1rq|KigkZ`dM9kfQTii~>Mfn-j z`$#og+M4nssb}#lZPXXT&{GV$oPD=EFSCb1Zkj4!+sxp08-1tgY#V|#PZeN#YcDdx zP8|3bG*-Lxz}Afe4QSU8X_hR4p=}8{joTqrz5*dF13Y&9QMiCsLZP4>D$|83r~mB( z2il!$vEVD5o=JZ;Mr<~9B2Y!0=`&I>5-W9e1;j6$Ah>Bo@Q(}wtqeCW78dmmc$9q> z!(S*>47Lup9&$71yn-Iw5ko-yq%^3XDA^qDz?U&yXFTbPBu$C;;sKh0N6sZJW@dv-QPa z_sb5sXMX0K_ZP!QzZ+#yYg=Xo!LnjVm~MX!W-U}eLFGe{gMR$(&3%>petgU~RwSGC zb~>9r+Ds@3VHd&7_aMhFjxU3GZJb)hZ`VU4Xj!zXur*}dQD8IB>L$n6;G+jYORS596dA9zJ!M!!anI5Q7X?!$hy|H1kBTUX^rxcU?slHI@Tju9 z&KWB!zT<`1e;`@fGBH^jU98&tkKoEx1}O~ZUW|KiP!!5luT7-JaME$DakhQtft%N6 zM4{PVrxIHSDD^|!fl(bqTFDo6!W6pQ9S##N4H&+;Cgm&-urSmN1K9ovq%?j{or%(F zM%~)Ort4?ERSs$r3O09LV&0T;V+07)h4tqimVnNPg})d_D?*Cd?Lj2Iz7tpv<)A)r zlCSfYOL=W5M(q(1qD_fHT!EVt!w*b{MZ*{83Jd`wi zhR9NTstDwROXzeQ{iM*xW|rRT{173BtUn~HmVApr?x%qfdH0qU767$ar)fGj4mlxj zf%apyh8tv052%lql-Ur6Q~#DJ$uQt=-y)p>%Djcmt~T_`Y0Ca-L%kq}cjr zlm_z^kqlWfsk2YCeLqeziDXpr*)2`f_IVfG6vqaQdGA$FIvIhCJmeyg9Y)2+g zevh4P0hAnf2FyofF77n&QE(m(to?fTs{}SMI8&7rN{8T&Y$Ivs<$d^@Io_tTW{YU zQVEbM-D=}-aO3)&2g7c!C<}KAz{oE8OYIwmcPl7b1}siDb}+D52H17Z76de*;-Rb( zvY%EYYB0jC=jub?6dm)-Y%hvl1L<{L(XDC87Z)0&#e0X;Q5l~4x@}Fd%uIM){15u zQkda6!UD^kHGD+u%BINZBpyAE6ll>Kb+jPBS56R0_TqP*(MCIG46H_7(5GL42^Y$< z4_l(X>-{#>@Ema!c|vdc{U7>WIvBVJw+NE9*YCFoMNh_(T{iuKJ!%Alu4Oqkxb z3ENnCoQwBR`E!zBt60jF$EPpa2ly$l0xS;SS6uR+BJm_MoXc%Rd7R4DS2y~#&Y)UM z751ld=}v@kL^^vl#iW3Y+a;|=TbL+o3HR2WG~9Jn@Mq>{P>U~;v?0ki%9;xra{cse zP}Pp}#3CxX?sSYjaS0W~b^dvJE8d;tQ93mdNBig?npJFpWkFHu7JkI<0y)f_R#$OGjZ_Sqz;E5ybW3iG z_isQK#^JbTW0oYPxjQ;>xH|M`hqEvs;Qq5odck7d(<1&0LNrt8;qFCOT=v4fe~{0XIT1d zhlE#~bzu^hAT;zjFDkDalwF^`>=38Wk;;Lf+rqfe&S~QMD77+&SgAk1m)NTcM^`0a zRnoslS))5jWkc~vvtbaUbfpS^%{PX9{B3l0ZMXiKF=cS^s7V1lxqWz_ICA?`tu8=V zrr$JMz3PAox$PpAw}{4*lc)zbGVY`CmxWH8`y8dD0(wbkGliBZI;KtkTLvhQ$O2!d z+pABvbfU=*!l2(hj@MZ^4`e>J)6iZ&LLw-SWn5a>f&TvN$Ugdd+gz-~CMMP_?7PBA z=@x@z?vA)>J)}51JhE4sXec4GOzw~OHhDXng%|-#pzod~awqWqPF+&s_qHg+x&mt$ zmbxo<1x$4^SXcdWobs^W>E8Yj1V9OP%IrpybBcBUCr%e9f-~iW9-k^9gJ#5`{g=4? ziQeYb1hGXO+5xW2fF-f>-#&~Ww@*8UDCV@TmS5&OT0v17Y`%u-JzrQU@bX%Z^dd&% z`*N+9hZ^rYBT+9}*Cnj29HC_p3N&idP?3`57k9bhaR682_X!`P9o_+d^b5^Up@dZ~Q; zYBw@51v_o;3mBG`ol=nEsy7t&7=DsBV&6cxcx{uyh7}wCUKe$>Xti$&#vQ$AlK55f z;$jGcfM0h%^vv4ll<0yqt!SiFM1Abl@N7}ZJ}Zh^ySAh}8@UgYlY|PJsZ{deO|n;W zt1&$=H{iCyNC81?t<44mY*RZQb@!*ZV5)stOn&QdWx%@1BRT6;a%W%4M7t<*;`z(R z(Yin$ODxzXy<>!Gmyl#>N&9ECEo*F~G>s8k7v@5n>h3EUa!nJ(uCe`i)tN?z08V7{ z^ghD*Dn?i2UzYu0FzcR*tJzPA(LMR|33)bK*WV*kQRgKaKHl#W%#UN@>M5ZD8~~q= zWi0h(f$rVR(eWC?qUS5pn%_c~`{}A%y!i($-GF<}_CRvJU;-%M6K`BR;rtQQKia|! z1lzw{{t7T0qXayrXmwz*xtE`hXW^L8E&~X?7?;$n2h)37li>s=^>`^1P77ojTQZCTOj>|Fep-BB zlmc`62Rj>o&`?OQek|6e!g-l>iHQhaw{Dmg1m2fg?9{a~uU>R!>D?)&%*}zU8|NAM zb`hrE4ThB+st_Y&hdSCwUEKo+Tax?lEG7P2qku(91L}PO3c{~MenJi4gv8BWo$1X% zy%80*EYpjPfy@$F@d^^B<{_G_eSS+XCv^HE_wsCgWjI_x;|Vv8cFn%Z2Uo0&V@=P4 zbt#KHrgI*BV%TW}ARNktDQ#RXByO{mYn09uc>4%e2yvef<{S9BNICuL3E|59wP*4n zog&>6guq}8)1BgzXU9idYN+XPLv%t#n5pH&8uRLCi+>>ViK_pNXQj}sGZf1Zo3xHH z`0DjbREoELD-?)uFxABY<@i$Lj2%>=a_PP@Vm|&|5+!S{Y3!`8yeyCm`+aH?H;ZgT z+Oqdrlzo$c5P5`Qg;?_w?6SuY>Hh_^EYvrA+`bD+#eSm#Ph z=6uE|&8j`~s#{-U@yc@sQ*L@=o#FA|+~AM;2#ujgbg}wTRfr%!!`5LMl)6{O%Pkfr zZ7bb2MyL2|#mj!T-5K@2S*yfqjo0wMqk2%A=MTm-mZmB0A{|nJPL1YC#b#2g|1aEY z`xzR&g8jY*d08^^4SjI{)lK;&GMTm*5BQYI@s_<3Zk-`!egfygO+`t9Lf*+R@8a&8=kqa;Xxx4-tKe=Hb zdT<+v=S)a9Jy;Ctph9VT>7FFhso;(LBxZFjp=-~Lsa=;{xtGULHtIJTVg1H@50Z6& z#dnI3{*Clgv*5GF%-t)aWibpHjoygsh`cjRl!W))RK1n|N`nJLSQ z+{+wr+_S6GLd;bSUv0Sm??h1efsJUm745fxYhfqS#^tplOw#o91M{D!BPw5>vK$A_ zrHoy3jYok4kUfX{mm@NYaJUd}eUMI#cc|yDIo+8_b5+khd>-6y3B1(R-1SM_0(w1L zG4q_ysDy_}X5*!crI;H^Zh$6@@~!H_oX#T^nWu2^ zo{V}^!Kt4lYXeDCfHd`B4tI%E>HY`JR*>Ve%M1sUFRWUT?bU+Jypip*g07L+Lw2MB z+fG&(8LiL22$5#aS#ay`QC$ltXSSNB4|UW49q50@>4}6>7)m*cbOJ6%8Wp-+e09x0 zo0Ug*ypnI&6p?)?)*8qNMoi3bKqN6kVgu6)Jxg3{M>4VypbFTENva1I+^!V4U)Vxc zoW|qSwXC(NF!WVvM!lU{K#oUjtA;oF(-eP9Z!RLw)37AWM=RebjlGRR0FyxH?6BJ!IA46!D8x4W2R%EpKQ z(hfeQ#V{;ZU7NnvfKy2e-C&;kgbF?XeQU3*6;95gk3XA~8VB`6`yQbYB4Qssod*s~ zR?%7pl5b;{WVRc zb;$kU6nL)84PR#X5|R*}dkbT+=>roRhYj#x;^E-Wr_LG&cqcQB)h_3E zhndL6w$W#x%~!7=RmG7fxd3k=IJHtKP%l9b)dl zEU6f#Xyt7VTd^Na=i}F$70smsn%QdvbU}yS?ai7(xA_!(y)NCfQ10PqgZ>&xT9gu* z*@ha_IHQKdegy_vOGTok)k6?7-V+^ws|&{efC1@kUwYj?%r&k7W`vKv`m%8 zW+U=6m#Q!wm$y;d^=H6W%2hmy$0PxH4-u5P0gEcc1y~x!1UX{K1W^>r%!~jzK*ql* zc+D;GO3A|Pj{SRSD!0Tm=i-yzE@_xXZnB(cJ{T1{1bZV454`@V33~i1B;2{Q_NEVn<~Dzj6Pw*emDH zb|9v>bHSH@o!Aki(4D|Zy*yra)CmzS4X&2ml~UFBV073@^-ai&NuLR;a#gRhE6cJ7 zOXmwOTVNPtw&0hu!ETfkG&Ce^Tqskk|A$I^nCtswZtR=RPC8k`&w9if$~^7Oda-*c z6ZI&PVEze2zfPWgdYA$bfz&6XFTIpDd_!OQ;f)i-!m;@$bQpp^GA=TITQ_TE^zi*M ze1b5xoZ|sI5|ZyeO(9W^{A8XI10E=7ji3QANP#p22PB_a9-V?xdOGz{R zWn`MbRX>@AP}2S0Z|Q^Jy$DbMYwQzlS&Vr6{`xOkHUym*8qA4I2L zIFeOF*@6toc<#rXQao5!f}K5AT{MEE zST5V8xTC}WvSM#oe)}1XM@Y5?E%EHe2#BZ+pOYPZzW!Ecr zODOOFSUVxeYIvB0MhV2XQa35+yJt~rg_?A0G_RurO}W8Ne!iGr-__?CuD8OZhvd`u zHl|_`PCF_TWph!X2N{+{_zwN$kpznPC*+?Q8o&p}~ z%ZeZkSzEk^i(OT~h7!OyuoWd@-D-&`JbmP;R3mVwCu|vks14)E-nI9_M7V{=JjX;& z@>dv>pPM(fkz(gT0R8*T7l_o0_?KlX$V2q1_cB9x^Sx6Qa{dWJO|pC+f{=$y`{1JT z`XRY@^@?CAMf8vKYloNDNQwiQ@VM-NzyL=_t$A>R1nU>H9G&pqf%G~)Y0%hGD)x=i z!=T{Hdo_!Qp>y!>X8awJW`2(bWxWm%a|(`J*EkkJf>75quVcB6nBk`B${PVHCSyji@s%ec6po*{uOO6fdf-V6BmBJZ(8->hMTbndVF1 zzX}$ex4in;SM|Qyq{X{Hkz#}cr~zasq_3)+iRF?Umuk83fktoPcA3?7K?(((F<3z@ z`7^>MiDP2e?C}0LUtF{a1hZ{k^$p9Bc&O|Az##_J8;*Z*3#vEL4MjP|+jZ9GCHm=wE128ZTcTl{~pn9Q-Fqz8`+!f&l@-tNmpE?LtpfX3*eS9(buY5*&b31VxlG^<%4rcwXQQ)}{;@{lxxMyWJDT2t{QM*4hCa_v5a_-pnWDeHBWW=(U51oRqO)P#v z%S*2SFQ~t&tyy$}2mCd%3FUUHb<3QxEuo#a$vD}TvpmzVG|k>6olL*~Vw6c%4l+$G z)9tF>gI8;6}C=YY8A3if>)9Rygi0?|3hH7m2lHG^vs_AiQ(%s60tVzwu(1?naEKk90Q|mPxh+Uz76Obrm##{9j0B?2 zlpIj-g<3ou<`t1g6W#nbCEdM&{>EEPwN}ThKtLVgbQIuHa>X~=(a?ak zQ(vypa}1UhK*W7*N35S4i!1T?PPfOeHTiw>h`rrihU$3t%|*h=`+QAk8H7)yd*x_e^$JnxqoAjGwJzQrE?KGh<#@Tg6D?K|nF#lYH)SY-+)3Lj z)F|=`;Di(uhGGhErfv0bcK6y?Jg^kmj+)YM&WZ%j6Opo%QG|${+l{tq=p(}ebCq)C z7$%fK);VB441zmNh^TLy;V%8Q++E3Ib|-Y!`tzMV4C;h zKZd*Sy6|veu1S22N3J|-UrBnG!bC4aDmjv-q56uMaMWZKN(+F>@E%lDa#K{g#gjY| zgqno@oT_Ega=!)VnulLxk}Ow5`TA$xvcK}u{)}k+4rba$#|zwT*nuh7&w(;Ac|)4B z6Nqdp7Yw!W70dnU(nOt6tD1@1Ks~UU)D3X8&Hg5$5Z#n9o~KFGflS`*(XNNUVne0@7VgE*kS}xqh9pt7g(wooFX(r0FgM3 z=^N;;=(%IHU`1mVA5TX^88)}4m z_sv-2L+p65*WhQK4srurt=(L&7366AYRL6PKBhpIB(~xS(H*($Z=Pm+)XYGp}E@ z^{|U^4t&a*M`%iRwMi=#%!XFGELQ=?>TVA61TygmNfT(BHTW2gJTA^isC#!zUa9l;&XLPYj^FO@1s@oqNSjdcfDyB!00w9X3Lkmx z+uF_XQzkQ3p_z&*3K!L|)a`$A7SH&%NNhf4@J}2ja)Wb^W1&t)sz1cs(whO0s@H4S z$>zo|CpZ^p^O4dyGWT2Zp)nM_;ZqQ6cAb}6;AU#sTHTmtXbzFxVD9xw>!@OSGSL>o z(f&?)vqx(};RFY9WL+r%yw%%ok5%xqzzTy{Gr+M3ip1eV zI9W^%Cr2^~KC4rY@`!qWM-NuPx&yX~f(naTb(m zG%OY`D+}n{l;Ig%-ad0@7V&wtQ?l%3c?f9|5MG1B8vtw`Fu&g<7vX4WE zc0_mTgF*!mq!mprxrz;G%PslzpOz@}GIZf7(~JRmkv3C!-R;vD@?X%>R-Byy+c5Bb z#3f;w2I-|A(T)kVJWu0%ET&qzg-F(T~sA#So>OL+npte2Ebjl$}Jy`PF<=6BOQVZeCI zzIG>5f6zM98hzD-ycjDu%(|P=#m{F)99)Yem7T4v)(p}|WWxE|;8gTrG4wxv)f=Jf zehfB27jDOBk-ao;jwpgl_hyVH=le7HX~&!?{7q)N%3+4gh2`14t*_mCNUxe?5ab%; zTJyXXO%n;}N0n>^D|}>FzyDJ{_7iaK;_p<5CS-p}>%F)e<9dEvs7B#a^7MVa9(vX} z(h;v~bIvW_2KitSeYT)vFnCb?HYvL?hY#;aID4+qKpw7}7@7F`T)7owxx3nJ#hAz~ z`JrI{AaI!a_q{M~cThJcCTV~rQmtM8F5_@}okKaP961O*WTua0@{`6-&b|gKJ74C= zj@z89I92tMa-|8@V7IGY!%!*R4zwm+NI93?^$-k0Ep}UaOb0U>*;~juuw=dRgne?>qt>+L~m&&MKgsR9rb{DCrPLT~B&AFzrGqRTyVoO{Jq zh|es2N09#%~ORJJHsPf)#SKS|u7fBWuKaVSKJ&b3N$Rd2Qb3sXD=>5M+;v^&m7Vzf(Xf z+t$7`Vc1r%Yg~{H3;G~@DK764gr}}29O-s6hFCm3`j8PVhAAK_@W~*z;(QGQ!4T`q z0D#faYNwQ-L?$m8Ue;8SW9dlq+Ma4zpopcG(@|J#MT}l|WrZ(N)F;7@hp3jJlTU-( zlpi!a>f}JtK3wuqj^KO}DGGU{+x&V_gMnv08olvuvLuxw6ZDWzB?gNhNxf1X5wn>p0QaX)?D7>CQP@B*v z5Y6oZ%=f2=ksnYuTegC$2hzYs1vDWSxs~&&x7Iz8WDo*c?U!8XN0VRVdHLtTr!d~< zdpmBx#OYxBMehWd?3-rh2#pGAFVCF~orP6{$ON=(r$(G4LW;uwiB`6+jQo?3A?Q7* z`4)XusuNdvEEEb!x<##*Ho~88iHo{S=S(k_i*GSYlvLey0MLEOX? z;N));Dq1@sy1#s|XXUAZ0mBS&eiD$*jz3f+%Rlz!&frlgU0R%>Re;-f1SP>0ZIMhu z-N)s`z9F!Hpn*c4FW(tyxo|~-HpXn0X^JiYP1QbxKQQAuoM*5>dR9h4`UVGcIY&+S zu6V&ZUT3HxZs(j-M(vpzP!hYPU(4<2-RCpk6Y-EhNvUfTjt5C<8`07aXyJa0^H?|1*aB(}G zWH&0md!v$)_aV%XHae_uP8T*X%#)637#I1$qvYD;F7Z;f17aCHl6O{ps2=GUDJLp@AWf$dFmwj$83HDI%_q&`q$o&f#mW6+r`&5{5;RusV& zTagC^+0@Xl_@n}1$|7od#^P@vGlpckK-X+JVufxZkMUwI7+f00H<6!knH!#qhl`ea zAZvFL^b~-pFJx7>3d<~!s{@LR=P{M6^=Mzg5G~%=j#W~fLk&NONBB^z)_;9SQigJK z#V~DuW;@DAsAd`@IPy?k7WE?Bm&w)J9#{efVa>#XFbc~yvt(w=-LxUq7Wkupe%qjm zU|dXr<2yU_$ITGEi7gS`iV!Us$^CcO&8=M+5{^lx!Yhpy3)MV7`!S;`>+@;qdztZRi&#s?Y|j_AYLrGtXg z0&a>iYiZx9N|gD4r1~K~aeuO~#((?gH_Lw^H)dphTrOGJ`MDV3dJARBU@qY`&{tNU z?zam3p&8eEoQDZTlrGXV(!NjnTBd1(Yy?YE;vnVCgTXyWEt@)2F4lfZvxJ_fFl_^c zcHW=Bi1Ly%#%MKg9+)S8fPuXP?F|~A_bh0HgG$q)z7+2>Uz5*A%#Av4aUrufWXjKq zk@tRw;Qtt_9i?t(V90^U#Tem&mw8haJ^4GJrv{c z1*@4TdT^W;vEc!TuoAROfy*AvCL9Usv_U6ZqbyQI-qYS!*LXpq`er}#85zF4>-|*W zbfQG_lE6a`nTnWbAS50MO zv`PlZ$tv8;8*CWi12(O@wj!3k7ud;UDX>hz{5XQz6@i&ueR_ zseYx|#ZC)s(qTPS(y!Sh)c}Kgu@y2{=VRW4$^C#|z1=~)-W|T&gjDG4imw+*x^7C% zpn-tl(3<)1X^)iKsw58fWj(BvMhz!u8aSZeX7eKcF1(j4h_iu}f?;DmO5~c;w!K%-8y}$&rL;P=|lM)I~N>r(=Gy)ObiNr;G^52pru^v(O%3+rGDToG(C_5Q4Rca> zJgU2@Z{aV9p)7W$Yl8Au7-h=bOxI+h?bITQPZ;{XmE5c8^mlH8Vy<(l|V+ZCep$*im_6De)j-K~QgQade_)3^PF? zETRn2bWI>$$3OOTIMnEn2mPw9Z;LVRK^f+?%i4$3?>ord6tZOp%#R>tz@|Nl4CO#f8e$k#xY?0 z`qx@Rt2p?9Dmh_wu-WeZzmdb*MEWb*#$)ggOtA-;MwFW?@(hCP8plij`cmvvdzK{< zRH^I3_aJg}1^di=w0GX97k6!`5PXv*=1C^zs%{3=xTG~tXUy*w_}bDi^bNH4`q>+W z=ui=6tL>;1S|8B$`G~wbihF(kdLwifi~)YcQgzVHN2=J5w_pY+gM!5{fje8ddd!mM z2L;ESjGK4(1Na%xEEzdn2TY-qJJeU$Z3iGh4n^U&ZI*Z)0Py8JQP__c`^#dLbgI4l_9_u=#rQ5FBk=d>Lu3GisXp*o(=3371!Z>1HD>2Js#PW<`;H%t*Rn<>^q99mx#U$!7dv(!R ze!rGb#*boQ&CNC2bXwT=9KBd`)a-X93?nTlEK-S;)!$PP7uJmN^Gp9L)nH=bj2&EC zIOaOE>qvNV>*CsPZwB;AvSw?w%9}m+pq{llP=jniWTL5aQMkQpZ=6e1C`{L=$bde> zE6&(`o5&y8@TQb;vPoX-cH7_2Nq9tCocw3ou28g;!DPX+@1v{l;#oonh%~RH`Ptvp zJmAHf?(%z+zO${Y#D7>EW=<4G{wWIE!h7Vcv0(KRTiCtzM-goAl3sb^jfIzy9B)`< za+h3S^)bYpIOXkPcsgO?VM?f=k+;B3+8WlSV!0w|pHFc}rcC1GwiGJ}3gm3ecxDE| zHG~>ig|YHh?(OjY2(Hw6DFjH0j;Sbamg`LoFFeNN+GQflN&u_AD44?)P9WCyHbxZF znW8=+9y}>Ctd5$JJXwtHG|b8_SfzVjMGl(OEY5(pLME!t$j!g%n;Zd?>*TPba8Lvm zB<%p+iqnt{E)x?ta$Ju$SQH)-!$Bi5e|ZF!p=p2AZmC`EQ%a)SRsNk3^*{>?Ac{yB3mI@C@)(zkfD7;K z1lZJje~S5^@#z#OmeRMQR=9!R6?7BVeTU$vF zbuMyR#&bi*-uInib2Tl`fvsP~YW)SnOS}KE)BjHjD?36R2q}78^|yp4)Nk0Z(y$2` zu&`=@U5n7|qjnGfKgW8Ofa}`C#}T99;ewq`l3SJi{L}Y)K|v%QFpdiyH?P^n(6)Lf z!~jPPbjD_J8APMp^zx}#%2tz|4(jZ8Tt(=EsIai<{tSX^iGULC=so>gNvzYB$4V19 zO^iU>V(S4b@{mW)_Vi)-_Cwv)aby<|;HxU%0mFP^KMS(>Q#O=@f(G?^Tg7z08sQ`o zp{F0x?QIU`l3lapqIPt^MtG@KC_%Lb=stbC=<10PDA}U?Xv6|Um9M#$w=seG_b<3o zZsLgK`I*mcdpXQxYT}N$4}1Xe|4tquQ3Q4Vv#+8F>F{cb%8^!0fEr%L3IU%f3wG3R zt;&5__{@e;I(K{y$6RrClyl?)3DO0jbw7Q2!YLo2Yv zHBd$<-9sa?GW}-$14yGqLaADu(svSMKdZnMMnj2)@8>ZSE)=0) zpj^4~3a~k;Osw$b^L)$*kM3+Qq>-Erj-uF z>n*iy7}J}yrN&>SRjyaYfcRTC4;LGr6t79L4+(wx+odEHutP5e_bDk~o{B2x2{15Z zV)YTl$T5X0hmKoUFoWIDG& zQ*tZphMV1he*15dlsq2KOO@U#M{n;Cb10_`#);mpw97U@0A>b>m*Ho-YW1XvC#k{p zd`?CvQO0$4r~L^Hjvw*8OfiE%qZZp9{gF5&yRn+=6@Z2t&BLcB0;71ZMf+=vUSo{C zSHUSbFMk0^)F_f=y}lNK%l6HdW-!*|ZYJVX6;U^SxTmNKk=ZfORj2>Fm-x2n*#Gx3 z<#5nK1W$wA6;Ty72=H2;#o^TEs5yyLlL{z3RdtB-PLf+R{~$~7SWKNDpiS+@3a|cr zfYesE~a@0qdbRV083Xfrzzs_vO^pY_R(lutXvei#e@cc&FM#03Su%LRuEwjU2B)%@5nVEX-E-LsjI;n6u)S8^_ECO$wwE3p0{WgtDh%GSS!CKr?%{DaIE=mXqkXxRbEBje0eE**`|SxSePTwjSule_YeI zZy>xaZ4RTkLtj8v`HGx1|Di|7!rzy=cw1ZSw~k{>%_JXxa4sp3zkB2aC^VKk+c&h$94A zYT$a0Yrx7Za!bL;kJP{Jm~0d)Y;N4sTxb6t2QmaR{mmELgDuip^}de6>Z6@u94*&q z+VK$;W_7j4k08ApGvqW19iu-9@^|0*T!hZ)*%-1npB;63cStwXSI@UwGo+0v!m-|*MQOD!TJxgMpjoF!bQj8hzSy#QdkpIV;s2047|I~1A&+m&VDa@XxuA^IQV6c_Q;K$-)2pW+!bL)GC)s$>~M-41I z#Q-}J1UxomCNxiwG}P(}=PxU1=)d{BLJ4rof>u3$w>a{LAcL3>?VB5%@POjd3@P{1 zN!~g$*_SAfJImilmaN<0_L0m7O%DnQ6YVRQoM05oT;TWm7#0!RoLW~CGlmN^YUUm3 zLZVm_K5{6>T2CH1dh-XGGx<=LPI(t~UF2$Od4Y?!Hk6ieXV?&5} zvX61atl%{uSLc1hY^~(x3pXSa&7U(5eK35n>9}`uDLsqt7rLNz5r9NFWZ~U&S6^qH zTCl;hL(y)&=FEl2*IA&S=%=yH#|Nd!<rA=&~+_)HMrrTD1-Cvq!8466#^H^iB{DMo^YCjN-RPm$BSRCid z9=MFfbRD2FiVNhQeIsovL~!j(YUCb3&zu%gOi`{bk?2sX}i$l=n!m|c3t#-r6; zN3x1Lg#I8ELFW_bL@Wcbq^MX35J#9_$8U13q1!ZqJ`J2Bp&@yfxOH~2aZbT$IE*F_ ze2wpkMP6QJL|D&qKV_wjf-Bzw9beB9S$MfZgu%iCcUq9uX>+iC>I^dEQn<6mdk67W zRD|i|{v5Ka6S>ZDgeg?XxZqa6io9_p8p=2^Eelep*uQysE?Zdb#RAPMRG=r+_$+za z{idyNgffuf&&IgD>Q1LLyt^^6E`KW+VbJ){sZT_TpDoP6@!UGJd$miy5MrTf9%gj4 zrG6>AT;7D8DmVX~FwNL=4|{U4A~5x(&BnrxmHHeVdeD>V=eok?-TLGVMeHu~{mxQD zL_ut$lwq-))mv({%JESs@UWD3i!OfrUkYs;G3E@#1qwfwo}@&WchuAe=TQRZoG>OM zAe<^FE3x8tfiCJmL2;S%NWUl265jhgYCqwi$jrF*(=!MXrpN!NWuC|PpogDloKY#Q zHUf=_Xe46iMJm?9ST$49D)YfgGCeaT|J2FP6HWZXYQckY&?R_W^Z&wyrv_4&*mfNX zUeN||@N!K^x78LVPe7%Zi;nBW1@-n{l&9i8(4vNra|fM)Q>)D&>f@S_TeK{su}*RL zFZ|SeL;QixFu&Vv;$(pjrBnrvTtoobD0!M#V!cN#uNtt3?Vn{7b%(uxdiCONwD8J= zahj1n=6J;-;0yMl{<&UaMf?S{LfL*a&#5=T4UP|x8TS2D1MDgh>|6JiK98r1%8M=f499mT^l1wqC6VG zf^i07%DgS#KlRa;jab%N(X&3B)eLY@kn&d+&61?c;7zqMwGTmdkW70gl=|6)8n22d zJ*>g#fTIA`8EL;*W`~Yy-=R1R)&+_j5OpBEEaT zt2EG-6gHV8;uI=?Z2!GCQLn=lPL_j>i7!Iq(^(Lrguj0xYLmP%r(m7_lHSkeWqB6R z=p0p6lZ486u8pDI=xs-Esbh?j=4##o-dNu^qf+iuN$H*Bp7rzyM4o(Nz4YaAC7ATL zpZ^seE8Q$M%o#uz6N;JycSZMz6$Dic+O*B0pqx>mc%eyoBO~*3JP4F0;7z-^z>AwZ> zReO%MzjVyP31btTIB7GuO^u~>ze=MEmbH7FeBMBGKtvh=dhCT0~+=iWOpg)>No zOQ0pgXupY+d{FP3!u1A?-DMC!ttmo8ds$b5)mNYG5tl;?l4C%f`b}1@+d7&+LH~mw7Ied-OExW?=T)R5X8&fZnrpgA)2D%GbGiUb<&Z8zv%Z3Vu2Qg53Hg~N zg}3UrMt*UkY?QTTI-RW%Bbcn^bU$rUYs{CWo)q*Ou-GV>jH}UYu%%~M0BK~bT=_#q z{+#fMU^mLI&XSSRx9zCXkE0f{rwo%WP`J%cp4=S$x9XvojEc=RYNv%0OpHxw<9f*p z=%VNzqhjNn3Q}*J+;o4E(G_GQG)1n_$G>^RE07TBIDOg9UY{$DmuGv356#M zA`S|a2X%Ki-mSBe&9_*)XtyG$yx* ziO`*f3^1xb>TK=T4_SLm&sP9bec@neqagGnV-Lt zP0#hUZ53w%LBdHEtbbO&Sc$rtfh(; z48x5`=ZpR%!}ooCm<>IeEVy5C4L)^!06UVi27-jYbMj@EpX90K-_|9BG0>>N)78km zQGk{SB;jVp2#17KBO-=d6T*rI*QlbN*vJ!D3%qj5g-s;`WdHBIvD>WEZQz_=|f>Byq z&1Mxf_Us@1=%L;4sbkYy=282$F8;K3c+`nAFDBcOAF26jB7X7;E{fuy=l44%s zPrWgFJbf@f_ZM1yJNAwaJzl@mRKqNJh=LXxB0e!u9LVANwoLa4k$3S-bQU`Wj{l7+ zKz?}mM`WN!$eqeY`Gv23x6K9?_O1LsFNrekU&xxtmi(BJEqW2{?6TN!+ec^qQ@CnN z7K^;PVTF_TldFp{3~M)U?Ff_XLbgl}L1Jph$PwI?OKN~Eo*Y)5hF>f;&{TE4*ls&t z&>6OYUL@ed6AxSY1K4aI&5==Unx&m>i1hbvMG|uM79oBI}8eH3-5s% zTNqoNI=7g0pS(^>ZT6O74NGv7=?-pfRykVLo#35`CEAn&OLa}GiiGf*Eiu%mFZzVh z*@r@|%L+DnD%6O9mJ5q8=uXzugSZVhbcTr)1+jVWyDJ^>tFUvJW|e z!}cabVk}Bkl0}~h*Ia}bT{BawI9yLGPct&tmZ^1NK9WNEWUlU#(S20iLWr+?AJT^$d7XQ4?^u@)m?2RH+8wdzLm9Hiim{HHZ*&n{_>$wQ;F+ z9{CMj>R^*q@l&bc%r2)r8)NM}+7qLBAz~buJ>PO^02SFo^MgX#a^?O{FHGv(vVJF=$b3Gn)32X zt1J>Nxcc;@Ktkp7C^-3E-~N)ROtFdG2OI-_`Ro)~GzMah5wp0z`Y8J*}AQ6vqLeFlkGeiLl4!M-G7PFNA%zNO~5~R7wUODt@Bcivv975^`1& z?7~NDlXK7}5$xYgr^n5wzs`-t=|4inz|`pgvhzLu@b|gEx4<^y^9K@Qnt(0hf*Pt( zBrXn`URwZx*&YeQb|sk>_N`AV-x?Bj8;k{jdjKStAXjPIveN{_mT!WBi!Ps+34Mbb z$-Ot^r^E&o^D0&Hl%BLJQu#{Xr6x!V#h0$>wuH!5O`$b&GF-#1mn-Su5O{^lZWJPdww<&CSBP z8)4aw6GMsTs?i8K&+3Db~3 z1+^vqJe80ASEc)F{XNd=p+gtdVcKqxG9T@AjA0D#4YVmpSH4J^B~$h7ffYWLrYr@{ zcOw>{gIkgM%5T%xz0AJ>B{TZzqtGq3f{By-_O7j8V_3t9p771|RHcV-;%$}~=- zD-BU~4+|mE9Vtdmh0do#dPbohc5C^AB28XZ{LZYVGgw`-7ow7inT@|w>5Rs-C99LQ zt|=^N0_0p@Ptz{m(0nRRdL9I?0PYpS6Of-w0w2ljC%akYQ1grIH`CxhoM ze#ck+RQ}sCz%1cS?M$m+WiTsyi1?M2!rVfS_BV># z#a=>9d;>S3ERc_>n`SIX`@{~XFR4pFwY<*lR{o%0L$`q5)pidT;r(j|Na3X_EM`|y zFQWdb1tz~z#1z`Vemb3=6_00H$#+^{dFd|q^WL|-Vh2Ir3PVj46EH1W0u<-+a$N?kxcWdYV zKE4uZyn^#19Q#qrJeR&bUB+&_@4{$)s>tme3C(byDSxV9NDS)1Ow~&z0d+PgH-1uj zp6xyJ3RQoaJQvf@P*hLH2I!}#!cYS1(saP{DyU~t#Oh9JdD702H_&=<`AQHPw4KQB zLWTB-&hVY8};(AKivPu~!xxgpy(~#^J_rpbOZ)fjFLEeaH9_)kluQ z<|q@cbHgp!rw0ptkwy|xq>y$59v@7&_WOP3IC|%lnsiqU&FwePIXH*ZquH%5fpmgb zisTaSIwe9JELMg_Oy)%ir%Y)bB2huDn6ZDJ(Ye7w(V6IHm7(*LpU-}&zH%|$!4me} zWPuk)n9QfT0*6b|t5lsbjC7`jneqc0bodj0w^;a~nk|rdRu=AA9tSZhoXp)^i0h6O^7*o#uvsM14Y=>n&8QGSd&}g4*edm#x$2CZvLs)RM6O*<>_ACr zyW*p;-BoSm8!Cb(zpXU(h4fPCn+9v93URah6OFCW!W^ZaPfIq}z zluW>Ve}!s460h)GkZ=0-lI(a>o2T)6)(P}SvowRm&Bj!gr@xj5IZXO zF(t%LcyX4f?{Ro43U!VgjR{=1Mudu;Zh?QW*jSiazv!CUtloSXTOI`A3A2NoR7qsO zd!&yN-=@+aM@|miNJi1DXL6ZFUF@D9!I~=G(?B(Fi5@%P^_am3LUpltQva2TPM##mhUIGU8~pr` zIgcXF76yBJoMARr6O?N6lacj`WQn@-ik2j8il~v}*B#r~sX=>9V?wXj}_`L1AMqck1b0)z!_h+ zy}_YQ7#ntgXbz+6yc_@K^{So<%kr8BeZ&J!7!X07|EO%P{<6#UZ%RL%eKPBh!e*G> zE1fA+-yulEp6aBjq)KJV#JXGf)452C?yJRq^B!RwPXf!I-vtzHS7c_Pz zTKNG!t^>f)yQ4fZ3^aQy&fFj}?(sMaR%9-s5DDVq*)P4OpXOvU`ZN`x=a6c2YVw!J zuq|G0!_o!a#!1Mh^15975@007!V_;SFbbdfw54e`5`C;(3;M)G1k2whgy-<2kpTJu z+U&+5k_Kl5m%<#Lx;UE}HSuc^j5Wz5Yl?JMFoQw%%#lK`x}mA@E?9jEQj6nrbe=wo zGg#n1w4NiLoied~gBso*C^}|kVZa)K7w#E^jGb>%@~{A12}E-fhAtuoV5>P$G9VhWo|h7$0GSPHsPI7j{9GL{Mq&wK2@YfolKKf3 zQD#;tk`e(F{xhm@Mdc~C!igt#<$@6LK(_KrRL{ppKAPMtmo7v=qy?u&Q(Pe* zZp51oDAAoMJ;_dlV9@MJ?cbxp>v4PBn1ADnL_h0HtSa=^+NQsguzY)8rqjYG%Q)<* za)Rz8$n6mR5lhRr^$Z zl-I7`;62hpHE65o;8xH{6-glf>zA6oC6Gm_kZV=?H*A(? zwOsV+&ZXguRE^&7H6`2S0(Liql84zuV?L2fOi46MT>bBaHFXT0cvKC*)s%X)z)7@= zk_4}OINfSFCBfXt3Urd_>)E~9h()o>_~peKKz{Ql{t{mC#z?JM1Yuu=VKhT~RhbkT zxQS>MJdYE#;kR%uP@_sz{#@t{jWx&Yu~2MU2hIG5k3rIf6HA~ol0{zdr@_y{w1^** z^=M5UrfMsKOB;#A-0`34)>G42@-s()rWY8qIRHvO(w`CLJ;+AmF9VC;QFt351-FZs zr!Z%4^#C@808v1$zvK44{S3ArHtRmyrOLMtJ4zFZ;Q$a0QU%Q=T#@XcziBHj&Lj2vTjG_Rs8VC$ zIkyG;;0w#|nNi&RACiK2O!V%4ThXa6%7)Y_4E4`Gf*sg2abQ)#I}QO+j&&}UR5xR>LjU&nw`Xb?lXh(F zA%|PLjZ!dxu9qRVU8baZc|iVuIr$jjY$wq>Nd)F(WeEf%U{xL^uSLo*V?VV&=;=9_ z!no|^+uBjsDQ6ScMer2 z@DbDNha7GduI5Hw47%K}#bGd!z!9`{3YL8J1@0;q6zgMhZD@JzG-F2XaN3z__kZxM ze)ub*p5prS9Tmv*oS_3w6ksxMG*|Z`;Jj3cm+4twzr(+Mv~L;zYw_q{5{27{q!R*Z z=;Gtq!hM72Q`0Y9Ffe3e;!ywq00001L7ImcQ{QK0hxoH2{mbwx^M7lK;cpI$a_!}i zG2&y-dYEEUFJ>qrebU_+Vs8_CF69cSJ2Y6rDNO<^SPnP6P&8;=;vHaj|An38Oj|F! zE3ti0#f&OUG#d(`2UXJ3W+hPFNef_V4j6mtYWp#Gglr{G*r8ieMJa`?tD|S#a8Tbs z6MiWTy93JubQYYg40bGcW$d*D5UT~r&QAD#Gmgl}FO)nM(73X5*J9|Ao+beMRCW10 zc?58;A!I451J)*ylQHb)`)A7hB=4oN=-!_0G(1muIBL_>4)?PiLpu_w2wQ!{Eu=2O z;p!cAJ?HW&ujouj$Y>36(H+-2#vPvCX$$N2JERZ;FX{52Os&0otcXnOOC$eV(hPNP zhKD38B?)R7a?Fl#3LH$VSygmOwS5!U&-@-1P1^0^rr8ge{LqRy5H4#e7a`r!caU(;U)&E8n-iDk~d^8)`!CUJCqbvT8RU z1pch|(U|f9_e`R@?Y3a*m<7_Ttf!-{D5FjW8d`0DM%U3e$D|@PxJW0RiSkshzg#U{ zLE~A6iZ8ndMAw!$_TCou8hIx?soDv0h-7naRqAC=aB9|0!@J>qLAu*I zG8h{afPcmh&|}Z`y^-W9ESQ0JDxo>X6IR{{>mz0ksPCFr10pK1^{Bjwa-g?IN!d^J zN~CK?M>JxtEwE%FE}uztx%l}oK^BzHlP2RoSkb~%#|&+tp8Qs+1sUhrRS~f)FtvQt z@2`17%biJgV8BgBm=F5|XGjCQ41heefi;0N5j@YbB+v#=Wz%Vr_DwRE*)E4QRq-{T z!B0@!nOSuIVoYuC0YN+5`*Vnof+Rx=oiAH*bmUAY)0tYrHUmXSAIRPgw`k$Oc5RAf zyPYFV@px8m)NJGP0@e_t%W1Fj+9*oZx_U9gL);WrQ6gHWFfuwSn3ET3A1IQa_V;Y? zF6xlNYu{Dz|ay44@G1*XQT&4lxgUk_4A@c&11tL^1X$~HuK)z&kejd06% zoVMA^K?H9>RU6K8h8Z^l`Y;RNs0Eain$JE8@$D-V3U5#uf z=-$LsBq-Q@>Fo#htVGvfiWa~gFMU<1badNv-zMJO$(~y3Oc8~c6tmBw`TNoFX7NMG}h&VDCi?2BUTw0Q{lpp-rlr)!i7Dv-Yc-ma`GWmUzV%IDw<2klcPJ zx|iZVED2D2!P6a;>`T@rWw4iCv@C-tPn@rX;T78wZd9%-C~K$+nTK0BqYOq9Fk zuOd84oli`(lMjX*jsBgebEL}vwT5d4Mfv)E8hhxzvPdyix?s`6TdQQpExZCd+44+a zJH&xJU?O70P!swSy@etdx$;0PSLi`D229%cbL~aV>f2hBzS-L!3z!vM{^tbdq$CXN z`}EQ6aRN^#6 zOEHTes1#f4AOL8Nl*jUd*y|kXxjZZ?f@E&G=`3Sy#8RL)dC3MkkVwBfj`2{`c3M>Q z>75d5A}$=GUgwp@ZQ%#v0NQKGybBgcq8+2k|HvHGHC%>^g+TIk!`0 zb>lZ`Kig6&L8-`IBD`46V{=r9BO->mEi9fM)TmTe<>ob0G8lPd&17$H`SzS46T4xX zeVUAa_$u?}CFe_o9{w@H-(ycued9-PBbH1fl1XkJXEa*SnnM3J70~mQ0CtrwzcQT{ zvY~;`sx*Tnm6Dj1dt^ zfQPIL7>ZF$kS;&A$fUZ5&yME-V@-ah*n)r8``VN{hi%h5?Jhl7W^u(mXuf!M?;Y%!gS_$#R3Tc~$r#pT*5DQ6i%-2Nb(9ZO=#%%b@BTiX>{gE)PCKQx^ zRZU0!vVklQ7BxOMG~Es%VYfy!2^BLKmj^dnRzJ8uCc@k3_`#Mbrx4BglgW>gM z{+VFOWNjnWJY1hfQBw~bDBx<8BcJLt=wAKtIL$)A#6jASD*i52R#9sVV>Xl*w2tAP z)M=>_;36*f2HYFy$Val(r@OV)lg68X&W`+;H1nLEwWBud9YC7QoptiorZM(;X}3#gn2*+ zmnr&pJD>Bgjry37k&Hga=ya1RZI7*#Z>E5UH^Q;UV6Ec=(C6(VylN zm|YvsQ^a-n+DD(DGL4@mH^kWG$T0{!rYr3XQzUG$%v}?G?Kv;WUXgx{TC~wYZWrp@ z0Pn$&M&|y%5X3A=BADTO9!^Y?O{NJ$uxUR4SAaUc=TVIx=G3oCBf~t_-U6yX<_9o? zO#$~oAxf^44sC~U8d69coa8$X6d#a(yQJ~XbC}|fMvUpJwY}Mn1T3xAfr$H-LQhm^kg!JYE@-KSnwCe723L?H<(>|_wkBAD`|#P>XS z<7qtLZUiuM_5E;U{sd%4Kt~WBK$wrA13&n0j)dhL%0H6>J&x{2f&o&YM^2CRwjo4NM4zW>4CL+Enu-HEJVjAC zsk-M=CH{rH<>?Aq@#+!6i&8A8pVf%FP$AHX+aeK0(13W2p~h$RhMHs3OqcuwUuIAj zYyF4Dc*_y1%biq5ntjFOp$n6TEo-BXWk$bCNINi!;R0pJmCFQ0a(^&%k1)k%yW2E0 z!4hI<*}Y_K0q8<_Cx5K@aaka5h_*x73rDu!q?FE>ICRpUxG3OZ}F%%v%?Oxy- zPE|B(;lVwtgi>b4)-XOWRzaC7mK85vJrjt7B=R`zbMC!Id)dphVr`SpIgf%eN~5ZK z+*nX4&XwS#&-=avN3av%gNbw8Y*{EjRF!n1hZlU@vbkk3EOP)Zl8UY>2B(H;kuM>vptWL;5rYfa6I$2tSPrhOTd$%BZMS`K;vv=#mr6&jy)}m0?6@O=woDAL^DoPN5E76NPs>UKqvUR);v^4r1Wuz(IB7J2X-Y# zm1)xYW}y0~=D>VJUUKCQEPEPoIze*GAO6CrUb*}cwue%4A(_2KbT?88_Zj2sb8GKZ zT0#e8#>ojQQ?VUTaP0m%1FL7GDIy{EAXU4lG5_sxaiae`&rT0Fh1R1D4_trY5-bMe zUPr-?=u#u^&fdLXVah9InC}Rbe@jxDJ1gvND^a?!OoC@c{+!iVV|Rmpt_zV0teAxV z3U`I=in{*Hg+BQ*;~fPGFGyALRg5RK|R$@tIhZBBt7ABo0Omy zQ%~_6N!VMh?HIK_?N|WZ=*8U6R%e^2d@?9jBwh&Pji4Pln00#7TkZTWIMegmmk ziAcfV!=*TTKpQ@=!nA~`)#1LDwdBrrOOO*GHUo1l;J|uC`nN2!``4U4EwiP+MZtSs}$?rU9n|b*o zlK0ooETE{Qlxj(LyoPn%2+*c}5q9i$nT7Os5mB~oHF3j<#lyQ1oQM2o4f|tns3`Gd z1X)*dmzMh)iiF%a-dAksYQ8MQpYFVxe^G>K;OXSL($+{|wn0%(pASka%sYQSOQPNY z2pGOYWq1ad3>FXP>4p6x98x!>S2}C43R(Svjo{sXVVRmde@~h)5bf>kY z5tI$D(TjsN4edT(I=Uv%-3IaOjgLA{ZMyD(C`ubPTi|c;z75h`L>j`Idk< zzn&pa6k%{Uvi4w|%|=fvE-23Uge`ph4?Rx{EU%LB`uk{ zXJvGP6x~(#F~X|b0jG@6tXN$@-2aq%BBx)ucOiEmlxuMk_q@4KHN{g`%BSAx|Fv-t zU$~0OUuOm3g9SrpL(A)T8SvO-=zYRGB2fO3R!(%Ft4+NX!EVHuGZWXiY} zy@wha!AXU`MFOs{zDYYoau$YPXy~r??jbCiD2Ct@T2r`JsIPz30!rE5BQG2&`Y;qy zJUL^%LRaUx20=4R*bdLZp~#tj`C&kbx7^iah+b`723y#nO^A~Dn0lhEHKA9ICf5Wz z?&59(9qmLlP`M#yE25mUszu!Or?~DTF*ABsZZ$Q?zrB*%HwuFy9%95L$wxe>{p^FK z#0Ty{wBX6Rovd|8Z43o_!3R+tR@>Le{0 z?2_rZ**)g8JI9qBvstpdM4t^VZD#VERp1`IBCzqND_a5D#Q4C_BWHE`s@3_3F#Wd6 z9*X?HR+d?-E45!j+NrW0PcsvuXxRS2(zt$Ju2z^H6`bkjp%`OyVbRrBR$G`V$pD4W zf8)?Bx>nCDbUgSq6Gv(9C0Ir1b=lA-HqSA&CJwL%6y-{=^u6sv!{QrUvbi$_p#>^X z`7+s3NiK?jU-y9Nrq9xz|AI@HAWfcCwo}C?JV_KAxFne;vNdw@h=Ac=prMZ{0btY> zodNrH539dsc())PM>>`0{UuQnT`qvuJm#AXCCpR4&WSMyl=TvuTsSRkeBVk86f`=! z4(p=nFBuVh$%0yBF@{V_KF?G&oE@aDUT#^ZZ7pwq= zZj+?S0&z+;{U?wku;wj)Cw7S)l>IgF_H}8xa)Y?`OYzXIa=mfG>jCT#VlVLdN^hDZ zUUg@~ZblNz+S5GCFjc-SBDnt|p7kvRo$GL5N!gi6l;#Bx!#x`z-vq`F_t|O(? z6xMA|kIO$O`YYrMmdJpa^f&{Vo;@bwg!?g);4Q+x84D?=ldiwRG{Ls@T$elRVxr;T zX@%@($6_1*2dPx94C2nC{# zJjjsptP2PRo`nL0b=iq_c@5yDo%rC;zABgfEy$!+f3Z}vCe@cbl2NKZX=dD_^3v0FAZyCj3yxp8 zsu&&WBuTGV^|Y2kP|4KY>^h)#2y1L%M}BvRHk=3&*;9oo6s@^O;p>i(5sBYfrz~c0 zS!fd<=k~k6d9#r$)~mU|k1HxB9q9q4_Gn)b4H9D~xcL@O;gCvt*D%zMxu0$S;-7t~ zk=cR8;$jc%HuR)s@)lO0Sn<9TMj(5Pps4<6H%a0R8-5tnFQSqjm3#D1QRJy@-}+&9 zYL73lsPaZdT@hR?Vdeg~LReG1^mxu}lJ6-M3%rFPjr7ZsbVYGX7=?`{Kw|Q(gzDZh zUn(Y4D7o^H5Vgahp_}xd9R(ntX^XHwfN$x5So!yD?&KZ8I~WNxI|_O3JERBCkW*3; zSvbl1_1Wj7NR;RQ%|bir-B3(b_*OmC6Dr0_NhSB7Z}@cOw(KjiATdq%FShCKW<9u( zb~ZYEmM^HdehnZ-t^{X-uAQK_6K;`ztp9PqFJP)OJn~b+h-AhUNy+CDh4SoP^to*- zI$Y5_spaqCqG%CR5Y5S5Wv^U>g2Wv~ti)*F#HDFoK$i@xUNLmRz`7ZC9dm)9UdDN@ ziva7CU1tj9qwbm|ST5oqsQEJ4a;-45p%)!d-Cu1gXtzCE4wSl(XBE%1<$)tjNXR9# zISnE-TO6(KxXZ8NV}5P+DvCzRXHRm@RsK1rR7jn_tUlQtOl=Ncbp{V0Qje2qD1=H# zhI%BqmShtJ$XADv9dxec^OHiIDhZW)EZpZ*2so3IWS|!W8{$52zH0}!%u#IIznQom z{poC>@FvD>nrL3u?NiMuV??VWCR^fh*6w}xhZSg)>nU8!|1?`&zIhOk0a&E$0=1cZ zaH+dA<|xnkJsvcUH}rxegTy^K+l|ddoDEQllqFNo!xAZ8n+qdMrLo$-Ae*xzS#oI9 zEW($9xqBFGW>XIo37=3R6lu@Uts)@X#|e+j*k;8898DKHT&)6;2wSpRorQ=iYUEeQ zT&1Wv@1S?X*kyrb`Jl>?n^+mWL)OPrmI;6RI&-^sIR^<<_MVT`l~gJ~`H<%XNPy63 z7(~~QMB6kq;}{wCN%QOaM^?qer;6=YUn9*34F#Td5DW*xcmaQbc_TyU$V(|KL8z*c zBixq?lp2Am{r5h;hjLE1ix+1h;KWj@`Zk(b;|mHx%#&d;*Zk+sfnbeIgJUEOiq-mZ4O#} zlNr&SB6&MS>;B8EDg~G5;0)k&f^Aq*tGCjv(Y;aNF{TceSKneUZQriTPeH!KpVZq_ z{IsAQJ_Tv|49ZRr;YDuE-`xH-RY;+@VmtEp&TzV**MZx2^DC_K-l&7`f56xDo`)vW z@OzdaCi;dl#J}?Kbe3My)~OBa4&;04+_3*2315r|>}n*en5Gh)ve7IC$JDxc5chB( zDWD;;w*4C4antG*`Iqg&cE4r~!6nGq>gxsMnUgo>VW@CU8^se065$WCH3uAukjIQ? zHisLF3Y&PxpA&^#gdJC_X$Dwkj_a*Q0lJ^w82XQyG*7 zG~d+c2y{fPZ6_qcYXPFE9`mRz2CkB+&974GS(4wYtf13ZS&8$mg-!K0s4V*lH4EzQ zS`|OeH*oQ3)d;?vcPHV*ClHo8w17!oF)vGhXQal-v8Pq-MNO(dt>2()0bwY-6Wt0% zJ2ivDhE*Zv3(Gtt&+vN*V!Yo{Hx%z(8ny_8Q-a}6?kz3Gz)Dtv{K~1Q>0h`G!gWQP zYCtinUY^~h%LT81HN!5^)p?s7*1e=KgW}Yve5R7L5P!}TcRY7 zQbO76zFqNYaO?kU$mXKBQHh=ztdt0q(Id>Nq)J3JeMFwVeu+pu$OEgb+MXc+<5x7l;(55Y=S7 z>1ZiQ#I#Ksgjn@l^#NL1ug!QBxR8!1dqZW17$Cawv+?vJ*g+ygg5&d=E#l|H{kxUe zxLm;ok$tNa;;W2hXTSODA4m!F>WM+V_X0;FQ&lB&HhvW^X!k3GR_j<*dwew;%VE?% z&u>3RmfU0)Q_zlNoDfVB*}O<74bQ?HKDu@3&*S&A*_$NUF%49feE4^?*0nRmpUIvE zbU67!)(^+!u&qph%>PZS8pR!WS*=&>L{}mkI1|&vvgGEg_BZ+6YypBJ_lP^d>~cnX zB}x&@#)I>dM?7teqSP;Mo!DobYXR0fU!K6?Tx5Md*Hs%-rgdR=QjglH0?)wnymXvrNnvjJxUZ zH&_$QWg4S=A1SBiT@gdzEik~#Gd0u?yziD$^3j6z%fFBSf9LrAGoTYmq_Ys~zFb0A zshX2VidxT(OM`YYX05t_$!l8ufKxRFKyxw)^-{+d0J|zFN+L2pow)J= z$-tM>L?@^q7H^{%68vaFGS4K&kg&}zFbSa0yekE;B@#jYO+hj@@CWzk8mFj7?&GCE zBl5!b(atjJB(l8pX%)WQlECj*k_3k!=U>7fpKUEPW!AA3ikAaph4l1>UZuMd1fx1n z%Z#-=HJHvN%ocKg3F0)b=csWa?fva)r8kxM84|o?fX=oIJH){PkvSJ{w!BX%r^EjndLCj_0Kl{EMzl zUrsLY^$COxgU+|A;c{sXIDQjd#qi3>Pk=kRpl{n?qi=ceNN`>A`A$J{`!KvB@HbQ0 z4hKTvF1W%3_SD@b(P#$tzwq0=KU^3(V4>8uG>T>)RHY$expeLcJl{O8+<-Ngc9`qo}R}N1*R?z>lx9x~AfOCcHlmYQHMI zk8=%Sb}UM&D)N#PCIB6FPcWLDZc}{tcY1#=tH50jURLDV~)7e4I7PJen<$j5mx*?7ZgExlCxe#2vj@CikvWAv zc%W2vXUaSlnQGu0j+E1hi34eziB2xLrwa3J_w$GFJbmqW%wxNBtDg6J;Ok!sZC+e2 z$4OaisrrfuVGwS=vLg;KCu@^4TB`&_&3=^;$5R+S8q;tuvuH;71v<4&H#?}MFV4U{ zJk#f}GjUb>7?Gp-zD!S_g#~{Hc_nP3X_}{YWRW~Bq1d#IzKDK75~#RkCIa#{P^tVz zzitz6FF|gTJ!lm|jDeW8jY-RDU=5A?F6d zC9!)W_!aBD_DamCVZJGb4q3l56{$CZKd9q3)=*n6GWlf zJ^u_9bi${a2KSn8?mqkDq1JpzBy6@v80<;Y~H46Wn%^aKA&P#MwLPb*FlZ}2!q8=8{1VTESs||ox zl!uFj*@;h68ifY{dj)fqJK+Ynb$Ezr21)3t+tU$^;$yow3|W+xq7Ua6POz&YW%_2U zjz?$eMG|IiE(OVJEpkjuD_~ftqv?qKmt6~(7eOXI8LH9dY1LOai&F9$TcBH0fzn|;O78Spxu#%u6Nz?4Q(fl^Az zRiA(!O&f?$Hk^$bVV_N_Kd2j1S*&R#s!ocm!Y^*(1D+D*jf-D=@%GJQ4-mO1x1>_f&8Hf zZjQu@1*rYY|yLQATFTB5FezHd$cq7q#x6QSv{Tg*)62Qw)gmXIQDp?io^UTQu zcrCoD8Jtw3Tv~7UoMOEzRi>m-L-4rn%lEd+h5+ zG_qwN^tFbz1Q1C;9cIrdC}Jda`Xu7osQAI+(u1`sFUPbShbxw@>B0!rai-yC2vkB8 zWW}_eGG#wRcEzL#mMC04D5AKP{P>`(!5N!^G5 zqK+3UL$&>MXk}Fv`5uxj7pJ+6T|3-f*2q!v20lJIftdBkofIR77SV`o?+qkzM?|Yn zyOm3xi-FhV(o6yCsIZh+5|BF_aa!jCP;xkFHB2isNkNM(@_3l!a3nEAQw^M&)|@h8 zFBfe$O=FzV`=kHU4);Cl?UQQCJGBqHjU~AxARO~m8)ra$r3^|Kim2>>ukV4u`o<0K zsZa%X_Bc$C31LTS?9Gir;yDVJ2Uyg4cio>+YuwX8_Yn>RURYf6m;V@2*s_p@?ctkc zgE2rPTJ2K~*?d^GqPu^V~~o~zwr36R6lEc1Lx!(j~rdsVNu#TdS;R30M8aSFcT zhvv5gw#W8ALJAzOQ$y|r=#1Ckh7Lm{2%yZt-NF9Z-Fu(Mp@fxJgJHZka?_N7{Rx(f zxYri>OdZO*Su1@Rnr|2gi_g#o=9^-PL=Ko4z`LOtrSle2z6ezOV#RznvHWakHE&g- zs=v43V$j%{27T+=QPdlzIexj|%1DR;K%a2kXR8In5hkm1DHWVj8Bk#TJ|JVrLRRHaHX@Hzt2slaTk zMF>5M1?-L@Km+Fh`H@&Fx^+8+Fb=4`|88+^k13J^5#6F_q=|`;k`Pv-w?jP~eMdtw zD4l*s)1cZI5<`j@^yR#PR(6_n@5uBl=yT5e;$7YZ2KfJ9&(rIt#_&EGh658c7@)NB zn4Q_$cEdKX82JES2F>DvXDHOCT#Bk-2Q6Hg{&%LeHsyI5Q!k@*Rx4 z59|Duuq9NlzhWQ7WWNXOk~*A`y`93V68g6NUfTnM^c9^wSBJtuYN*3k!`A z1T$yp!}xO&tWirW)o8>K@gjpEoIUS938T|$i!DU6s+@y3d6dC9KP5#(+v5e1ujIm#Sf+E1=CT6IGOTq!-5DeNcE!1=(*ud-7pY_%x)3nT2eM$hq?FPZT6-+kS z#+n^WPd*8{R|!_cFNo0q*eL}BCi?Z#?;DWxdx=)!CNdY6Ij5&*YZ*mj52*;AUd_rP zLgDY|K%+v00kIEQHjX1`H4)AS?>;X||!6hlFF+}bj8Ok;d)zpOq;m);`sV+4Z3^FtfS)VIM`^?oh^VPzzp-Tv!dzZJw^nXh!k* zD60_gEu1D!%dJm%w0{B|Ao>IJH)8KFag)!Dt%c@>KEQwzxwINLcF^hJ_kX8DzP%Wp zsZ%*(YH;g6&;xy|QI&+&pv0H~V=x${)4&2e1_F(x$Sx-+HvJ2u8tQ|vFHJ)Yu2`ig z2x5f+Uv{{V4v>xO&c0-6?EDaGhvx8nV9MgL1m+)!2=9AM_1WMU-5oiprq_nI6Z;FF!8I&A7abR*H{q9&m*6G zEZduC8x8sT1-5X3=-o!WmXoB57C}^#ds9FtV3j&xVlP_?i5MBIWBb`{#uFL}wOxK=1CAAZIDC{i^mJhCrfY zA~U~YwAu>N%}L$S784#ku0l2rt$GKZF}CNBglbN)oETZ`mc_5;_bsE@W~i{mNLTlk z0z{3S11sy3IkSj^+(#kh$zrnayU07j?og*q%`&KQ=e#x&Ah2 z-(iUWPNCrH)alBlsEcSk>`vc^Wj{H4+ryge(`D0I3<wiLoii`lzb_C0s9ClPP zHX0Ey)AWnl_%$34R*I2xd!5tj{4e87-|u!x&J{(7{4o>zGbNAFIjAVth(){V|3VyH z$1=OIP}4&HjvT)m$Y2Gzrjxh44RrA+Ft=wwjbnZ6OMd<+LoRz!oHZl41M zY6sdT9xQ|Fo5@?PIXG@f3x@f#a*E35SfnT9dCZioG)`qgk1j>^r#Gar2F3rr7jMU_ zks!?;m}A#KYxLY{6vEvf@-(!h+(+YfG8BWJ*Npyw;+cL_^qb3;Op5(qPXl=pcM z#IY)T71!#I=JMud|BrRn!vxgAiczRnCxQI_~{`WT&ls&ZD?s0}j)LPwQBPe4rsVEYw zpOsbREzW?9X!_UGa&WExSlCP2g|Te~=>UPLL3lXNqX*#o#x6u|zHI7IMlo-)h-)vl z--REx;?N|he)zz`vI#TnwO1sVy2CGF`P>dpVT|{>2QzlhpVVODvSaQ{ zr~pkfx6Kea;cubUCjv6O!zY65dnrN9w%7ToL;L}44((c2H0w)c%34aEYvfe{haA0S zcmfS}UGjK`;zMc`y3rM21vK{Y(rLoeD_X^!<+LS9^{k4kD0>MWY;2y9wbTVv0ZqjwS#Q=M~fap!uWCfu4g z1xWS4=KFqYuS=P%_ivY7scB#ReAQJJXrs9RH<_hZX9_wEgEnA(?jdB4gS#*qJMM%~ zo)e%k5Jr&J>SqqxI0j2k(Z@E6r?=H)kky=SM&VA>SBM9GD(q^2tjF>KZogEpDVD{t z%I83mcDw=_CaeA9(!<5ITezsSP+S}#9uw@mG`a<=;88KxrB&HA2InHN!Z|>6(Oqr< z5K9gbrg(q)K^@!m8HaBQhqt&@B)>P^bb;*H?rP#YHyWkn-ZI}>T6bcAqb#R`1`}4p zG@XKYi5+9tp2&I#Qs;l?R_5-DD6`v@ESxm6(to4ByRLoktEWHP?+5$IGt}0rgCHkx z1t6%j*E;2YbPNFU?pyv~5C*s5CJ6^^-I<%?XYSaJ zeC2Bia$Qdc`Fue%m7WC{$|-#=R^11#?aBKD@4^a*l+F)G0ehl8@~E2PP*6B`PlXN?35X|8 z4-pn&)21-c_SGmBM`tgRmJLD)k>7FHUTVpR@Q>3$`J8~e8_&1rhA&kUdS?G4N|WB8 z47Lqo+%ZQKaz`R-nFdGu#Hz(!`2R~lis0NbLJ#=t1n5v$ykl+v%@bO^Ypq>c%IM`Q zJ)D0Jpv7IYkb{dd*+`&OmB1{Z(8M0MueE=3D82D2?LVcSplI#6TG7UArEh;k7kLe~ zt}nt?X=#BP+p`e~5MqcX1h7h~j%m7)S(;MOG8~pVrNq$toANV)pX|7XBr;L(WsBZw z)58v)lX%B+V-C*}RXB+3Z8el;LS?`GJ595FebS8y{=_|pfTpH#neaOs&@{}wcX~Pn z#lRQMDVt8GV$kp$#Q?mw~T0Os9JjX>V`q%xvC!11{4^rtUcniHr0XH-QI zlantj07VWVAO>c{lvVq4hJiCoM~4aUP?~oa7S?q0qOh!h|8=IEa~~4JkCq{m-IZFriptw~J6|@8k%FsrMEo@+Co%({X_W$u{x5=hSregu|7o1s~Fm>^Wsf{Se<$+tf)vPM&onsm2UeoCjly85BNcny@X%ymv8ER zGkk%KY{POvAvr6baAt3ez*;=EZ`&=eM3Xg3(Dsz&e(Y9#N$NvuB^UM4Ny^i-RJYx8 zD4fV;fO|)jTLvlAXfQvrbK!)5^9n!wN7SMv$-#=9SH09?K~n>d1oj6*x2^ta(zg6HAj|BWK7I;e;W&D_rH>QYYPw%{0;vr1l z0B1NFUO+7} zH+Cp?-Se0c!3=^=6F%GCjxRWaTnSRkXR_RDR2RV`Qs8+x7Pvrsz?^84J3arY(y%iE zTS^P_7#p!MX3#xKkaKRy45bxLYq)DRaKkm6Hu8nVA?}ir9Meo0yjUhMNnurS3*WF% zJrFCQ(<{h5QHzrwh{&VeVQv@5piZL%Oy?$=t-3EP!ap{CVgKUryH#yVz)Q%b?a$XP zdQujMTFlO%`zbul%g#tEOZ@Jxbh%XXWShXKUSMyG_ zz4Y$eyQ77Lj*W8i0NdWhEF35Iy080mTcL+o@dSQRLk#p1{yG9N;*}ErTnpo?(Yt9H z^G~u#c?~m(S5v8~UMnO_4pIqB_I4MXOtz^(G4Pg>VhoeF87spzB$p_5DaQ3BpGq9_ zOz;@lZvT73PKNKvF-Lf~_+AC5l(6BPo*b3LU6#u@O$b;g-kObQz`#m+FXlMJzUzc8 z((X{xEb84A3V6h&l23bBcUQ-usYh*uf9+Ew5~BTjvV?|-&^~TaLq@BvI^3y<|J$kq zwTH=KxN_frs~?yKWX$Gi(DB$9u>%zuo=Kixrk1L(AvQZHO=bn4FT5v~hVfp|AMt!m zjs;uu8f_KLC{%Nhm7_PqCyC%c_xebu1m}#oxc+qFxfX0u_C}+HmR*FT5mid2NrG0Y zoeK6!&2YqpVi%67LkN+t;DDc6;ZIv4sy=?>H}ZTeejlDIk*CkU&peW5W-`!T#oEo< zKG`dCe)cU;2B>rkDbge?!083@0J#4h@R<%)+%velyN7PGbnICa#~z6exw0Ws=YZ9M zLR3dGmuzTCfo}ICy8NSiO2s{Q7qVNQy3Yz|173C?A2tbL3YlU^#z0@?b@f~9vMm9h zS`K$3KSP47QX$`S2s~s41dpnj*tg+qvVDE}{*x)gF6*4KD9QUC``$ zZt$XRWAVP3&7MB!_79l}QDG(#EPo7{&4&L03zK(N0n zgJ_QOTh=s42|pvXoj{Nt)G*jQhkslv{T=+6dZcZ@{1PQ8wpPJq_|JQNLTkb&bZYv= zAJF{YpA#AiIS;LH`{|s5M+4hLL3FOa|OZmh9G!eau^D-Mn848?!*NkYw?#` ze$HEN6*~3!+b}=@vIE%Y3Cg=06=u?AfWdhhgnp%w_4E^TE;(zCZ;2O14yr-C^2Rl8#>$hX()Q- zH~r-Tsv^l7R`^5b^V~Ne5?#O$=oMa$3%%>pg-|%ECOe1q-N= z2{e!Luz15GSO7lhme5i;dNO(vTkZtJ!67K2pHXA9l8awqUXRT4-s}8`(#@Mz)FmXo z>l@+iA-)36oL7W@430HPkAi{wcyhTS(&C?jWO={06hFWyAc37sPgc64R;gusgDV8# zC#V=?JyOknfRKBL`%iby(HWSyv+1I2v)~j0X5ZTpq{A-4q%u;^((lPH!Oyz6J3lPE zWewyZv)3OiCsohiTc11wxz67hGQK>=94B`=JI&>q+eQ=!#F}hKSvW9`cAm1E;UGEC z9E<|SIPTvQ@UbC6L#pK4 z;6*0IDaeysBAKoJ0&Z4U*4B~EKp3AANPpTQ40ifPEAza44!gCU{=V$^(@HCJ^9oYduj)00q*zCGum~wj}*EtjoKvysUOgh9|(PK}vO_^SyunZVsznRLH z=3fqck~Y7gwi~&lPc07$Mrw`Xnl#=$&tHgtOSdlPW8edwlbCIT<5VvOz$k#!Iyk2D z_*GpO#R0b4`H+D#QD2?b=8wm(xa@r)hZL_b2)o#5wvDF47QN~kf8r7O7nPDA9}%N{ zoFRc?R7UAPW&zJhvm3egf1~$VJPmWVLAeY-4c1`1czQ|p($Bwd0Yx$4A5;kRyHygg zz!vgL`#X*N@g)h@@i;NEVw?e3m>8&KQTx^L3FfkJ=Ch%ezZ3hZ+g2E>79KhU4dOg5c9`7T2Ik&j6Tq>4=*5KtT8FHzX*FCLA>W(mjp1-sLsu=7KD(jHannQ z0d}xVNy7UpH9$_iItFp(+Hc06 zH!3|D?T>C!r|Aq0u!wyK0DdXfO$|*2l7ZWkL7h*RFPr#fUlIi~bguc1_z9B?7R`4h zP)fL*i#)M4D!1cxeyEtKmH#9lKSVi*48@G{a%i)0Fk7P=E$*p#rKa7qt1KAthoP-< zK;25FOZjNFfpeFWbUkzRnY#=$yrZ+pdf&S0K4pBE(6MnS4 zHXncRT#=NJ*Q&8;Ich>LJL^~vbLYPZfadtMXhm$98X0rUCsfm>>> zvu(tERrw$XYBhLR(F`Qq-_#DI-u$=JIPc8*NYS1T&ukKWLwKog1#2>w$FbqVU~=L9 zLQdL{l4BMaFs)ouU$#bR>QHf#^LjZ4fxghDDhlH2^lKpko5j8J?aA=lMnu3~r>a)t zl(i}e5pRQF3sA2 zassav(g~=S?V8y(E+K`Qyl2uUz&Vyyl9g+Gh=Zz&@9-dnRlN8;(*f#k)GB7-{UeZk zm!kS^5thUE;Y((<2P=fx660_37$3G#DUn%07w*B zj;`tp6G%B&CK`jd$^yh|9AN^V&3Xt8KGePv>PZU5p7;P`keb`22)-X)#t2rgy*U`; zA*~0)NElOKcIo?vIqtX36kjN0Mh;v&rPWhoTNurxmZ)Dpz5!lS#C(wEn-sZ}-+)al zi|_}CN$AVQcyWR!y=5$>p`Ig0L(z9s+tU@ozTF0lP`mpbn;*fq8 z5VQ3>rS2|H#ozj0JQa%rH(IXRmut+)XQX0>oZgScijk#eGr#EMB$EJCHz@PzV~kb9 z{+WOuV~ieDnco%Tew+$d@X+1uZ8`W9c#=`raoQ!o<#zrA&EcB=TC>E-Cub^`xL38; zPo)vKo6h)k6#vZt$#+bH6ZG&I%J(wD@RFMKZN_D|cKahBV}f8$T}Zfb;+CKjz7&wb zbMAOqc2hMDpzd5M%^V$B@p}rS8+izEEtoVu&kH;I0FfbO*s4tu&h`LrR_(ZNJ!``S3J~IwW>HyJCkG z9HdhLV1b8BNXt4YgcuO&h+E*J5PAPj9wTv57jnk2%<}#VY=U!2#cpgL$~P0{(<6B- zK$0DR>yN4wmdI2jVjMiiDfQ4-6(D{+@W${HD9)=Ec6135F&=B2^cXOPO>)Y)%-e?` z(-ixrE-H5?PT<$BoV!6wHhB~?M3qA;S{VhY>38qwSs+baD_s2oB}CN|o|^w(37RA#goiDq&elzIw}3m&0c!|x{N zaHurt6*6I)qwd)An%(qgL2Xr)*Vq-1I$j@+*+tecDiesb%9g^Pdc6N z6l_NExDC1CV%b;i14u{CN6(y?)&bw)#6Lw*W#4gG#UwYCtV|oZPLg>;Oymw0T>SGl zOFWojxFio*byW{ZV+u>{H;5V+})cLn? zwV<|LuGD;stQh6eolEb#Dduj!K;CHMhD+PwjohBa4Y39HVu2Jm+3Ve@pzO1+I9131fKlowfKcThW1J&ld2hv*hP?E+Xf72`Lsr*nKQxXyGtQ z7RN)|bGr0n>FahAXn}xHjgD3_jV4N55Berq0y!Di3+77Ii^t(y$2USZvfLI}A!C-{ zwHl?~98Ip9b9`^4EP9>P+^JM^Z=j{HNM{vyr)~#$w6j!a6Kd4ZX3#GuaXg*?eJLFu zr$u`Toj1cRQ9ryNeEI2ad2AQnm1;$Yq0wG}?LFS^s(8$mQ=qml_}PWN?6tV8LpY8Z zziLGxIQ#e|dj-D!HCw>*Syxp?(|qYhzZByaPYe;vJylHwPp^I4JjD7rk+(@VcD6@i zsa-J+>8&sXgju#bOsgb*U-9C76m9fp+Hk6CATRJf+iE_Z=H3PrTQOAtYbP_VB=f(1 zVM@=>&nQ8}FiPXR(|z%OkBrEpQ2yTjpj4UVC6Lr6^Cgc;J(h7lP`#~~cQA{&p<@vg zv1yg!#;k>Z;SC(-{KR^?Va&b+|mSWHumSk}_TK$WMwj!thrrt=0=u@AxSw&frL|JtkC zoAK?(z@bO`A<(-$s;x->Q0&yJ;WY%aLbnc^H?Hw9ak|GKehD#wb+^3)QauJjs|qMq zADBvQU$;^OLzB=T>DB33lYX3D_2dy(HmiQJRBu2q$5${*;D`} z1(v{XI5_ov4$!i=KaQl+5pv=WJcWs02(gFKpTNZAO2Ov(@bwRHZTD(xle8ehGI-(v zCxI|2MS45))0OPjj3XTR<61OshtX8mnX|l#)6jH~hPEDt&);eIHSyy!8-FZj;K|YI z|BE!+Stv)l+}!yLAEW?Oi|jR)$H_mJAM@|IpBZ-1a368gF! z$cSyvq6;Rr^8$NNgEzk_*0V*h4KZ5gn$j_mAoI}6Ca5CJynlk>(6yOfLs3t!_aJO` zKqVpKI`IcbY#781cLJse8URZK)2BV518s`RQ)T-@ zWBKB&Xg-Gy1fBziEiKx>1}x+?ik0lzhQ--TzJ zr6rCRTmnR`Wt3Qr=t4u@UB?kP3%RFWi0Wih!cQx8qIO&WM?%+YR}(dwGQrgLR-Lhf zY@InqW8H26q4=zJ;%^RcZ%`pRAvv>p-)%dL$xF|*F5>_$H`M8jYY*?jJ%2w3Be<4`DAx@ks@r9bYYuk0bAzmdom6b znUv{(rC1++8(Va$;^fcVvJ|JwMdzj3N!Yx-hiQOCpI)lY)5EvVFR_p_W>q+ZUJpP+Na z@GZ0Fj^d7__0~@H3*BP_!{2VP^Q{8O1CiP>q5Z)yFl1v8AOHXW00032o~A@s$CJ=#ot~?(H@`ddpKt75|0Dm|FZ!%Yb&ph%)hi zn9cLfH4cT7K7~s$qJQ{^nG~dG`1Kwu_iwt zpgTDi5s~|V06*)7{S0QGAS=1acjcrlCDnOM6tA1T3_hyr9DNgk0~8H z&|r$&meI)tokBpQTETc|n`i07Z}9B3NDZuA%?!Kk3fh@~)A{hlqRsIHEyRlhG3j1d z;#!yq`-Rn7NJp${$Af(s(U(1`C>9JH5!*X*XOtBIkX#h!V&{5dp&401Q|?ylya*wB zpw23@uz=7>p=*{C4I9%1;etajTjuH9(*Lpk?2gC6DId3sB9yuOG4){YK}jW%UU-Z7 zbW^Ujie>Ads__bD4Or}o&PS~udV;UuGV&@D=KaIF$PSUaXAU?Pp{}oc#aXo)Rx=W! z*;F2oV;lg&u2lr>MvhkpiPFioA7hNH8%3{O(2rDy%1DbQx3(a`joHaUAO+v?Nx!0! zt$}B`jWv3YSNQr*BMYMUo5zn;TT=14wh&HBM#+y)E`2i?(Y;h*i`3<1#5`X7t@b%a zAQ8o5LK(qKW$&dw+IyKgCg;J z9kx*UVH`*AbnE`;hoLVmHa56!rP)n<`n!8{XKVQ6;P2Z9?f#ugoGFxOm(z0;AVFZv zJo(T_6IKYW@0#59YRf%SPzrEnzTmQ(vN#lHy*4V5^S|6RqBafcc^ovR^XP#s20`ik zsulDY;PqMq0Y1^AfzTRSsMS(jRM=;N7sa~5qVJ*3%;|TGn*A;Oihwc-+M%TkF-K*8 zc|~joh_YJ)?#hfjRAJlpN}N@Dijdt>PrNtjm=bM{ z##$OJry&6uFcl;_NAuJz^$~$)B^>jytoWl_MI?e*u7e-fZFo=<@kk`m%%-h)y=fLq zQ}sSPmxyfDcYCI8bYn-pT2MSnB;gUNoF?h~;Cryq#Nn6JyMR?MjJskVpd{yOumlH( zAMu+keYi@LUMp9a8InpZozFUn&oDoGpu26pw#A60BB7t80r2>k;B97fCO;v@EMC3} z+;1+Nn$%m#aA%Y~LQtpc(3Xa|E^-6qf_m(Xo%v$?_3p2ss!z^f-TrF?5xYgv=y1|S z@!z8S+na%cVPEL(fXqlr%M+u{B5M4X2Vj3Cng}|@z8+)CNHLt(L&_w5dO#*DZ!)hC z{=afQsk206OfbBdAPcz;V1lH)TCBPm(rlY_t8b&y!(}rqPF5yfTXki*i++gqPmOS) zQrm(M;D?=U$k5!+Jj=eJ{U>$=i0WR9%YjrJYF-+oJ*TWnq2!Y=oj#rlN7IV8UyhM! ztay~cQCP$bR+2zsptajK?C41>>;Fq_DhjR~X|a5{z(tX8qAK^nQ?ev`dZcak#qpn6 z@@Ddd!B+b48RN^c6Y>%<`1$*AhUPemJ=dD*s?~C|oS02|_qQ^a$?RCJkJxa%v0c?h z8Ev0G1VZID9F(rppu}>#R5Np}V9ymfN|46=Ew>`V@*u=B;q6&?<92U zPVTd0Zp%YHy@#q9)H0pdJ9v#-Lx+C@NX{^7!o~FqNllpMoRfQaVuWnqw`2l!xiy&C zcmyV(ZYjAzR3GY&mXC13V17CQv4Na^D;-($zr(9lQz7ghA?6KOZ|=bg3Seko-)rgj zuSuD&K;)V6hP9j4hg*19IF~hZ|_|m?PBw;l5_C`y*Zqq;5zP`1zuTI#8Ms0EyXdZ^zCD4NiSnp z1ZW=g$jq!Rul+@_z^s!|$Ch~C;67G06})T0yOW<+6bGA55?`q}trAL2Z{0I><|V54 zW6y0d%7Mm$b!OrgcW51!3?c#ApyYN3Fu=5PD=M#e-BJdSR`Gz)Rh;hPu5>J*n!*WK zARmksrA_j2hV7k|2ry&{`uN`I(|+ZqL7-@AiPQZFpzG{m@H~??I>h%`&6F3oOeR=6qVHaf5+3#dS<*QV#j4YmISsogB5{vPygd96_a!90c`|4c7La|4NR#A`Kn^2BF`Wi(<^9Mu!H+)x86U8|l4& z{6N;znXop{a}8h)pk#C?@cj>zblk1WSBx77|R3K&PYrY{D3-UZ&t3|$* zp#hK?;-FgAu|^=amw0jluU9gY)YCjU0p_ES@iP}JAuf+}rWmSm`Rj#hLm`oX>qCLw z9PHr{eNv0M7<_QAJ3R*+2&^sq4e?$!_d*4wK(19H|p8R+AZM@j8~oC9}>5Shqy2OwX1{J@Tu)3I=140JogCb>z?gQ+{T z43vXN!AuPJd~Q*u`;St-Li*`B>AUw9zGFnPUFnQf^6C?Crpc^kEkU^hd+TDRN~%63P^LGX$8)H3vre@ZWc#qpn| z)N0XzLQZfvf*w3r1zC^f^aGDTbYV)&l8GT_YNcNjdLhavy3a1fkHJvgGN2kj^uonD zr{Yh&TT|QaWoX>1^My@m8g};Ud3efk{puD61U@a*VWTcl^Dh>@lEG>A_haE6btlUU z4R#_|98zhEAqA-zPNU1{V?~526#c^lM{{S@2e1hm)EFmb)V}GzCiz$aV!sE^CT496 zjF}UL&DuL13i)nIxb_D6iHQAr%7Zoxya;{+RPs&8hSOI0w)u-+R<=38PL;-^%T8v> z=w+!tb_u@tB+swPIMoOkf*S&%7o?(S0)E1$IPx5;L{rXQ=TBJ-2J2}UR46#@ypzS#Kl_?`ya=d-r z`L08HA-}&E?=W0TK+$~P={^y25Se+ez6(o*s^L)Rt+gH&hrJQ2BJjjahVUP)$=5eE z)ftNSYB$06tS1^ycTr}rRBO!6a%TEfRCivEH2-pCC1+vDQFe}cBI6JWrZ^5>4Hn;W zI~m>a+;yT&#@Hfp+Mm4rVB=$Az$?`FTRh1uP4O)1fkWZEg=jP&QNB*1ZBaI>hP(2G zsi}4C0Ndo!zV@xKx)EbQTw1dG3eWry71bz$3 z)5bMZ>S$i)oVeo7L`zLg%m{Rf-{6n^Pc|#iYG7hI*fAbRFf9FtAHpis;fs5H5T%D^ zeBi*3gl^?7tZUP-9AWGuFt3awoj&8wT6dfw_TpmpB3s}%7tBVQ4ux3EiHT@4{>SL$ zvrTMXBg`xsAT3_HHiB*7ij4)4A@$YV%Im@ZeNO4zvGeRSfs+?(7?8B&$DnPHa8r|k`RH39l} z^N{lP)5y*C*LCC0Wed1~qA8q#{BVVnEh>W^JIuDGm#fs2{0|D3EmGvfuB7~efG0=S zk>JDYghJ^&PGkxs+`?RQLFJ-*hoj>+U>dg(NNqK#o1Lq@&^TmY2)I{_vmoqZd+0Kd zg4h}6h&lI;LD*!hK#3w=sNf3}v=@PiHYn$~<|-va>ND`!;c7U{F4mh zlQe-o(7Z#?KX=+}qVz>u@46zn{I%a>{P|;K91Q4kXha=|n=tckLOFnS!Yjk=^No`C zWQjX;p(tksesUTl$~d*+Ewx52b*T!blssZQqyRGu;J?eYX)@wf&Ep=#CDy^_e4J20 zXkPrz6aNOLSR17|#5p)+6@_fw8E5Egm&sml8h>*ExpWH!1-F7h_x{(6=y|h?^sn9dY2Ds(2}T zCHIl6G{jPnomyH=+r>hYR9h`_WW-=L5~!9gc7uyyKLXvMU;5kMKF4@aQ%dCUK8nX^ zJ4Ibs)kPfjC{+SW3o zC4|Dl`M=Z*4kj;l#!#q+npk0V$z6eZ5N4KsQ)SAm!UfJSM-vU1SgfdgT5w^(@CUAN zLOR}~br*G*o3($CiLW`zDuJ3I6N}4>+#(e7%W7t-ZJv94XUpTL^w<>eK08n>G-F_b z@g+Psdw^X4u9K^S6XN)XnK0pwb|BZJ2t!dRx86)aBiw-p-O9BmH1yY<|REa;bW zB*W5Z()9mkk<;eX0QXI@Uxv95v+&^8a#X9*FTJUGo zrNM3h))fwg58jB2ybBz=#W}Tg1?&fGr~`mVIroBl%be{sg!7W`JDDN?&cr2(9;(RK zd@=@*RBchVObU~Y^beGv9fs4>9S+x-fi5We8JTPzBcYp>YMbY&ioiXZOK5SoqG9e0 zWrx9y+z&>$1ONNc-8Z0VjZGDyaIHUNszrOJ$|U83SrNAs?Cl0nx`v#5gv`s|zmy`q zTbrlJb#a3HM$S{B2#Qku*a~-Kzf)YW8$K+Fm{= z_|tof-DGHuADJNkimU)L=sF+cGnwe^bYyZYS24JOq7-#!|+f1eX{iPsL z?m9q}ApXCVPL%0;08<|hICO}sWrj; zD2=UtD;c%{*1U0={x+A{PVn(pb@!+z>9;Y=m2Q{suuOE^Z2ZE0OAlAQmMA1{2bz5d zHCrwZIlSnZP{XltRm*AbtgV7V4)~2ZG3ofO6puI9P_b9)d;%A^8HJVt90`G0LJ>HRa^mL=W|WP`-5>O2pNkBsbr2L{@t&lTckA3~4#9)FERm*P6O0age+9X%*NfLgU@#I71EAi-`BYgr zjgKS@6*OQ56{Nk@=mGdK3ZkS(X1V1`Q#3#mCIJ^Wc+2`}WX%>>Px#h1vHCko`jS8r zNP5@0LoFKa>P1{Gm?=%gxcL!GCXpg(cW=q z{^BD6ID6Q`$dpWcMbv%3;<@YVZoT{gD-N-$(x14tmTy~;u^n`!4E5czajkL}Xctm& z|5JSTB28vnXw!tAMd{xC@u&;?Xmcf7S0v#x1$Hr?bnqdt`z^}G5x?E!!cOiEHR%^% zsA9)X6Z-a_@_=mlAc#ols?_A6WG}{5-0mZY^xg=uAF%6xGR~t}ZE*0e1X- zlxl9_a{?(z&yg{FSo~`|jWECuC28JcufVdk%4#pC2Yi`#acVX=Akrt-H%h)FITVWT z5bBlxR-UJa>W9LHYW0vZ=hXyO|Oz2b;K5sOtd{jYtQpQ1+cn)MBHV)m=I z0prGo{BVKgZ|D?vbQbitPhc>eEee@I(70o?`_F)aG!g1C=t5hiQ_q5melLm^=z!R% zj>_#%Iy&2<BPuw4a^JAl3I%gU$f^?u@mv}cW1 zm(20gExEV+!A^2Bn8L9>BdpWXs!iRZ#=+s)XSp2IN3psB&RRV4dnz1x zO_nVW=r;qYQf`zXgQqe7vcweTWU_{7^bASzF&clS5QUzD`mv`pMZkytc8Gv!MMLRT z>&AV$bc97%IHBPzm-^qw*F{A^?uH=8$65A%P4+^J@{T?QF8`vOIE3{EKm_W(DQXS>Y_sH3=O(b8|P_gL3w6 z0hz+YN37|!i;Zfjy;bW@d`%Vx=WQ4Gs7$JC#c!q!_%qvX60{xNKoDy*%%g=%eVc#%)2*2eKIRkzC~1 zV1O6uHY&Y5)hJS^2L-D$EMFuA7|Ih}DYbYMr@J%9-1wek;92vdP6g|a{L*Jv*7ayu zhsXdocD|5Xk-Z`tl}N%d^Vfmg?_k)2NJ9wTX=bi#MlQvOL}YZyh%`RU)6fad&rzI5 zhLYbVclahm33M8pfk)D1Th1|2BLg zpwDhH;H5DO7;iIFmxzS^J3ode9X(JOeA@c4eh=GUj8tk@Twy|LK?B>5aiLOm!)S-K zA0)KPD^_*z?vL3fW?I|?!G11fH)IR!o%TTk9KXU&rbKC2I+kI1xzl;jnNR?CuwJ^k9IUMy)l~oVau= zz^`=&K%hF^XrT;gJ_b(o%br>i>4V=6{YIL!3XJ`A)l<1LuNEabF?Z+UT6>+*%N%4B z9ucV*JonWDm(DF`q7st>wq&lcIdpC7; zvcu5eQooR9*pCqV0;Eef4AV-iCwI=rKk?ZIj>zlRKc1g3V3JEltxugLw{&s(k(vER zA_zJb$+Rd$k`zF=2dOr7N+0;0hJ)anUHm$Gz(zmBKXkZJp32Gy`iY5vJP)6myF5P; zqb=$Mnmc`&z7W+LMr%dMMzd%Zs{Up=5URyt81vu$axVWXgv1tqY-etS!G`-2HkWWh zK#lOd5{wX9bT$SvfY_WlB?Fg%2~gg^he5KqP!^?C)WVkf(6yI(Kbw?ILraq;E}7T2 zwXKNLljk?ok4Ax8c;hq>t8@P`s1PLk8$56`j|m@&WEouPq=l%g0od#qg(esW%ZugJ z-$Sb=8je!UFrTlNy;<+1*`RnTx{1+kaIF0DRTQLb6;#u6lb|&oV=&R3f2V+5?9xQ= zJOK-w!Vr`*U7fDP%#>RW)ca>9YWytDplNCt4yc?(XL#QN%uJvP)!;wMoD|Ob_Fb)!H z`{bFv$B9@`m-lhnXj^B=kdZeva9o}UA9|4ilLu6el-~SIL zv6UvG9#0m%dEwg)DQYB##i`b}rLB?V4RL}5T4m`O5M0~KK@OV>9OsgKX}eSHHuX6ipdzk&Z2?^K@> zH})-24xH2F=?mXVVH146585jC8G;5zx?hIvXnH4!6;Z&I(|+bF?R}o>fmjfkHtNCQ zyAsw>P|hNHYlANlaWnQhfN z+%~qU9@RiY$i{SQA@Qo@g5#GUZm%hcHzTP_#xoW)7FBG5U~MtLtZ^ZtxFQsOYqPQ@0Cq%mg&5e6cD$t1KMC z1<2`74DdbsLi=3Qc0oEq`7_1pc}j6IG=vG*u?y}TZdMT8T;+8t?SnK>I{;%RhMtVl zfBv-W8c(l{gE6YER2^Xrsiby=+eU^dWo5$21W{3Yr?lKA!@k*X#v=Q}6~EcN=XY6B zoi8LEigGQlL6nlo>7h*~%xuOJd7-Sz7zv_`PTz;xT3uQD&*yGXX^$6e$LWWSSV4ABP{cUy(7$i^P^U>}L z%oIP#{OpmAWXuTV5>!DbE)?CqCtftJEz;vRr>HqFFl1wkM*si-0003&nyVNj|5<`n6qU6`ljJUzV9NGO@k;1S(Mj;?_eF2Nk==@6$$eF zs?J^+TB1Z-YnL&C?O@1`oE%oOEt5VLe1&R9v&IjAkuORhL$*$6sd1ww^0y1CP!nEh zH-!P&U2?FByP_1j&~zhPb7qDtq*jhb1*u2m4`6%4TK#(J-9NiXl4O_Rx~p&b{T zYZv4ctp;XDpf+>|fYaWR5Q;FcZSO!2w7GH&S4Xpz^kVh!ZNJ0jDF5QKJu&WnkzZ(%h_p0S}wKSD_r|G$pB?+`&)- z^iavc?i(tB*RALR?WB}YB@ce%*8kh#0$3aYIho?_R(%^&e4_g1s)UJJZ5X;jY2`-` zoU5FU$iWC!wnEN)CL{wkxD6*!f~wZK1mp9*?)Pbof+QTJ$dTE23kI1+K4;`pC?$7o zQLAR^FcjCgUNx>R1@~F2I%hIonVU}u_;LAcGGo`k)ONY`dVH_9|G1Jf!Iqht03@|N z3C;B0Z{%yU2}#7jG6rXj5@x`F!62Ln8q?`sJr`l~-llzuWT6~8(%KkOb;P_t4XjRK zw56N(Yp9YO%T$aHJ%E`dd_i$i>(y6QiQmw~X-!^r*W{|!JYUh_5i?!-C#6}i@k2I7 zN0>^aI9DGHgz&J&pbs9SJf!N8$^!D&rD7L0mnjf|?07-EBw41_3^5;ko8t7UNe#MbBU3NKFJ4H|yZDSwBl}I&sRK^Eo{uoyt3K=`robL?$JtRP_O!sCXQLim+nFAwh2Sq^odIEVo-iaC+=~3Qj#q`Ua6jcDxzE>;^ zf29M4kkUVq^i&9#C2ak1yxU{K!}2ot9EtamG&FO%SqI~eA`m25G&DxyaaXH1L}`p! zv;XK>{S)`fd$IR>QmB9N=BbCgb!2th5UMny`|@ynvIRNoo;sk|Ycb%is~PuaPrId^ z8E*!!F7dU4eNL}M_(ST{@{qDtr4`}7Y{+2(%B ztejLc!8hTj$d-H|Ql?kt^B^=vRJ_YWrXF*?C-POCl*ILtsq8A*Jg&Qib-TfFbLMJ4 zpRLlE_;y>NL`beN6hTj)yaRHA&=DZ8gh}m-CeAgbOgYe;ck_0F5)+F0FOW20T9yre zU&C=mOEb3%lz=OrU*`qo2aUFR#+a4PfW*a$Cc)>(W-`Hj6V@}2CKkHGFLa-Ms(JEZ zwD+GPcOWypTH;p_mtHhqMTzYmJN=m94>2ziKr_ReJ{(QP)o<>5T&+Bi$WQDocKMiE zeQw$(%Bu}Z3xWZ_rYpA$2Cgl|6?w5BhLbaCi(duA?k7*iIb4t1mxW5G?F9Fq~hW@+wx|ou2mOb$79vofz_77$)+|?>+i zGE^7%U;FMP57I@r45W41E6`X_>{zqNDMS4RIQCKJ|El|~y#AjQ3XZq3O zywH{dVG`h!*MZ7KBKh(ZOV@mI`>o` zCvzORomxfP5-W3>9@oF;FXdB=?vxX@Z|Mex!p^dFOn9f-$2KA%bv3ihxw`;rtXf;= z{odP=X;({e6~22WyD3dDlr>5WIyKep>X)eUz*pa{{34s=^0)RzO%?qrhW0|fM6y?S z$*+QR#nh>)6+a~e#O1-X{*2Is693F6KvZTQlnM4*RYnGaGMJeP@?y(9afT;1WGVg4 zirptI%v{AxY^BF~XWZ{Mx-Dk>g8VrwTmS#I5ngZ;OuGgHsAt!UJNrpw0ZjDwDy8^1 za{y0HwU~1O9A$5`%oqgw}5l2?ebVQ7*2V*9f zgS(EZg{|pAb^oENa$l{9yRQ&bzwxTmVid7OvX*lpn1b$#Q$4$hm`ity|C z?xl>K-`4^a$i?OC^bz1q126Z?g1uAYs;^lqJv~jN?rO1PY;c%%!98W4)em^7=Vzob z#AQ~Lf!sV5{l@f*aPTH&H6sIf%v2P9;S$kuI4gAf^9H-swtSx0z4VIzn4_K%rX5tZs6_-dNxsol5Y*^%jYj@C+?CkyfG6l)LPjSPVLa933! zRWy4!&yOaeVWZ$?KFh#9oS>J+T1f)}ec7$1a)xj)`mh)q&5T?1ZrYmFr3e;920-g^ zMbm*Piu;OI(L@1Xk2jXR zR~p1Vt86{PPG0kSRoQo{AzL>#kh#QPLa?GW%1BVl@@B>$cBj-gU3qzenlwI?cZM)R zvS%4TN(4th`8$Zcb8r~D?a7BUdpNo>ipB@d697VNI+67nfnyTCx^|awlJtD!lH#%_ zKFS2pdddr9qyj$1Wjb?nOo2+RQBw?BFt?l|zpwyAK)b&Uo>3Iw7qYBXNkIu7PjUzH zpg}aH20rz6nJ&$k-s&)J3@}s?m7L7~An?=dEDgmJ%GDzJ`-C$)+fNxP#Wxwg=rLq2 zX%U8|cr1>orW`dnN+-w3HG!~%c;ZBQTB$WAvKuR3Br%SQ%1LW`H zLIS8Vf?jY?ySso?(=g;hnK*&g7=F;=e9Ynrke@Tk(vra97)_>H!=fjL-foq0p$R zj5$G>F3d=qN#X};b48j+fp|@2vB|pZPyv;IO8yKzI|9cR0`=RT0!o7>>5*5zPXhB0 zJ=@3%Fb4biJ~VLW5IeGBxB-@4KNQ(`jMsHS{S&Py!+^bRASmkP(j;+oDH*K$&eH!u zE2PifbvRnLOF}p>pp+BHIz|*7(Nz5!UJS-Qpb5m@*InDYpE-AQk}K2-_GpS-$ObRi zr005X#v%1;UiNI~zGnQlH%Hc|=^;LsT1q@>$r}ceKx9@&^%CxZY!nTDj-Y-nS{JX( zWRND)IVL>IE)y$fBj%=QysZ@hsKD+N^e=k8@cRdLeh z$7BG0*5{T49*^Ogyxc$T8A7L%_K!jG$tFAzZdQMrVj-F8&gZLJq3MeTVTHYnzvnX^ zSU}yJwUO=QHZS%{Qe#fmQ4QTO8XE}D&FxC%2WJ|d*4;9WX9qBteA@6ml<&PvPoLH3 zmts;UNBdKC7(Kia^a@lu?hZ(w=hQtXEAPZ_9^&Z5(&X7 z&WdSu70O;sOJ~Xi#H%;nGgip(1o@LZ{e zM-Z2j0hJXG>DXL$zBA@igm17z&`}aucOwb64mDA80-!=lwTIrTOj(I)`uKz{$cKw3 zj3p95Nehcz#g;J=Z%XyG9hR340zy)ZCC!mY#xC{i-2D9ou%$j!fFu#?FY!nCcOlV? z8>eEXiWjrbg>PIiYkUsa;!)eoA#@T?7SH^$8<+O_>T=N6GiMOYpPU3Bq-Gp)bE>u1 zN2cX;nQ1KVwdhwEn%xb?$1-=c{z*2h{&JQG1k-(%LNkVwjYJ%${6*j{U*&qF{`6)Y zN=xXwPW@2dAC6+man_`^2*zncPNN$Qga0YI{B=c;9uC1uwXCPG$c?J4sWAYfLxAbTGB4VrxCXTzfR7E<1io%G8p6e-U7`z z^ROROlW~A%;$FA;%+jGcVoeiW>}cNcRtOvi)Uzgn?W>&?yMAP=7zkkIXYue!-~V>0 znPXBUj-g-QVSRKHcnhW2>|Xa@H|~2nBCQ4er*OAGbi5mglu4c9A5NV8ILP3< z%qUxq@q79odrKVjODnw4r9rZ>WB6m(aF5DGaSoI=LX|pYY$lH)9f@5`NQz!nGQA_O zgg-oWs_c+m+UI#p$1A(P5m}FZh2DtBPN3KtG<`#+t{fb086|D=vLu@)V0((t$N$bm z&Sg4~wHpRqNO4}ofbcMlj=0N~E{u@pYttZ$vz;yIp$GLWx4X0*O*8yZn8dvDL+ky@ znaaJjI?=hgchAn6VZOQK7I5;I)4S0r-C*N*IlN<|w^fNJve>LWDG_~!i`P$CCVSzcxdz`GUz^V{@Q6)iH9|$m3|T>LAprz@BbOY> z0$jhH7a^9R6qbqruF)pTECdf>H6Mc75c40ag?b^W@!}Jr*slOs$KTM16pw-+)21l8 z5rz`tXotgge1YqoKaS;o1hPrzolH%5LRD>tL=2TCXN$#*|2k$WE_@3p#(cUHnV9x- z%&^&M<+9fFT#i4FGUWZvIBvv`d9FLGu{|c;2?`KAz*k%lz#yiD2@c$M0?=e^zxJUY zxg_Hhbdy$okJX;g81D+Oo`-B5MVY=T86zhA26dA9!uQYkj|B^v`;Xx*u62Qkg=~{W zBLZq~{#J&{NFPqn>GLA`umDyo`nUG!b+aQ2%k@q(9(0GA-os_S^vC~FdT)g8tRy*k zmKI*!bCcx2@dGZkXHgV{RHjgpOkTt#uqfU~y*f^U9cp{0uXzHb-?~B5<+2#J6mJik{J z#&q`6>S|;9-E)34>azwHzSKI5KD%(e_`7kql56q1MO0+^lRPxTLIY0DT86m3XS=7= z8^k~WOBL!YnLs-eX%)uE$T;Ub;S!#OGLn}s85MRalpE_nOA@LLJ@g6g1%kGMSjak{KstVWcOltGNOuNT zc{VjJ5b|>05kH9KLaMnANO45l#S*Zs^dMAWK^%rIdIl7fpxWQ_;ZQZADW$5>*66i}_`oN4PX$xUxg zMtK)-cLdGqOd4p`7h&%do?O{B3(t3NUulU(1PUOo>tMy**Q7x~dw3{u5CtE*Gt-28 z!?!m?eWg5oe2|vZ{$nuew4j)NSs+M?JI!Oxx*-sU; zKe}-+dt%~(4D5j!`%k3%I~&^zbE_v+BiqhIm3N7e*P}Fm2o{{)l0<#90;~mPQWaj1 z%|7#t%qp64aa$8+NCxM(Q(*qGxbV$q!PZ&w{Ng0~vEut)Q=nF&v_lsqBDp^#14rK1 z_R3Ad2Vk>K+wC-`vM{hlzLtd8|JmyRLEIpMPMaK;4dKmSM;P$*DD$4OC66@thzTAr z6Vv%V1rtXgOFT!dRS&#ffaGhYa}E@Zmjw+FQ(deggsF%&OhgQZ4qY9T6L69oPF(`$ zN#>={?)B+;2i-$cf>k{azb!cerAZA3uL>+^X`;tMe6AC9i?xW}XWJCJ=v zz{;2dw^ZQ;firI3c*krdg^VF%CFU77oWx?qxL35f$YQ%YPF0=fhXg&ciatZ>#yV** zO7vSm-V)qE`$-HN`4X4Tme*EkQzT2nL0x;DhD3H(hM2jxZCDOF|61YIP!D*30Zy%6 zvBN*{Ho;sJc=;jC3PqM-=T>al(@WKwl4^!c7YaUvpksMYfk*sPHTEgSBMS-m1y4?k zjX?1Hs3hqRrDXYlJn3cpXGMXxj8Q~xpNk>N7&-{FAjS2mn>wTZa*Q`V2aMSkBlH<^ zbd8gdPegEj?hdje!Bq7MbaE3A@!C&n)#7V=WTWL9{vRI7Jfk^7AylGn=?wpzItK6o zKUe)3CFOT{om~1jN~|Q=#=PyM4O^FDf+$QPFW^H`pbZ_#`I1%1UQc!b+oT?s!OJ>RV82RA#dwOh} zbaokCGmt)56aGxLEe^9RH?77C<@Tl}+T*~Id#8*qw?$(y?I91aj;+@ih)oS1f`f=! z5F8~--!ONMkg&DYwPHnytTE9;Dw!_B6`e?&wAyP->BQ{@2PddMD56=!rGPy6cE8=X z!VtbL@Ur`;%;uEqyI^qvs^#k{fDuxZpmr=D?KmM@;MOa(m;QR-zeK@nVuq(fRRz$h z@ge)gyD4Vuf*l%j-NbX>eiLpYmE^m2%9TXA-IN7E`4;l~{{D@daHU zDkQb?$x|Av2}SAwRDww)cnyM{4cCdz&iXPLB$A{}b&hRxBua)2{_!?gL;?EJ{(r{& zTuHsVdK}i7QN!@^PgN9fR6iY@AC^s*+AIgIqzCHr_Hc2*pWs_xL|+NocqY;|En|S) zdBMD9GTUWG@K^T&$`r2MvIzCQj|ZI^J?o|@X9B8jz)u`ZOr2fsdae`aT)vPdR4 zsiYG2+?wx;CogT976)5S&v@PVudQWJ|2&nQqyv$9enK>YF60y4nabSlXQsyScEwwS z7dgEQ`>ipF$|UL5h-1ezSANSbb`+U_3Sf?O@;*0zEmgl1;?t z7NJRrB6D#dyvne8h8(0I!iUD}OBt1BiB#x$p!;dzO%II+AKU+#q1J>#{DF*`I}-tv zOY_xmAUdJ1Nn^t1&-i(n#NN@Q>hH@+2Pn{D%}p~kso6DcE6AV^*$*6H3-0yTTjrmR zPSPX#5wv1&H-1%FYRN~@#X{$awVm-U@Q%(q=iVD+%9e4zza+x=GCq6=&mB*9a>k5c#e~jmRcy)(M(w);ZfmBBmJ*Xw z<%Bt@!PIoMMbDTzDBBIZ7>Lrz!ZJDxhdpuwg7*d}~ zqq4(xCf`S#78c-)kZcoE$0Lu?HH5DY(tXF$S}LITmq1BTz=Yu)4%+pZU*3j^Dfcz_ z@J&fgO=BlmZ@9x&buwyM)F7l1MmbQ$M6GGWgNi=O-8@P zt;YhEyJcV1icUvW;`<5EqW`xCPQiCB-rrus9!v50cde+{XX;J1)NPH{Xv{F3xrZ5I)rx zO>-m@KxSmj1l%e2P7TV|jp>*`gFk*&$3qaWR{Al?s@uYHw^dMY*M71Og(1vYlqym2|zxs|b1HHmOuNy?OY>!Z43Cujm4?$TE zd9aR5XK84HHfNzrKbD3JIz#RCG?{VJ)k3cm5cgVJ^gpGmmF0!-LYyE*Lx3&UH!m{K z7^v6h)y+-%w8Nyl)UUikHZ!`Ae(wRnKKxjDZ>Q+I2MPGvxTC~eVU_|rTE6_;Kc@zC zv=7apb~SUCQ-7^)y+lK(cAcsetOZl$@#f**k3_`I+Y zxK2S^_b7ImsZD)V#7^HD9>onF=rTwm^dYP>n;&`Ul150!^n^iX)HzMxi(-@%C{f~9 z0+<8myEojm@YwJe7rEfICd#(+lL2_wWDxq23NYysd0>5y!p&p+N`OQ*yowpZXPJ=V zwVJLKHE{La0qT4X&y8Wwn=R1JEvF{tHRT3S=|Rpn$yG#LVOT~>L@5wG5g-rwKB%NQ ztpit?P>O?ag{ZJGKX{_BAaS#n&{yViix6C59GgXpXe3Yn;s!5a^g#WZ^xa!*ethex zc4`_$*vvm^Tmn2NN;BkNK) zwq%nnO35^aPM&M_rLnj^?$Azf5rri{awDz50fu6n-7ZN}D~>SUZ?9<3FRG&s=Ab+_ zD@{>&0X@C3?rkwotPtDI@DHBkeyN`Yxtv!tB#9$Zr|2#`3$g4Nx5qbAk_cE#W*G-c zq8THB+>PH>%-SQ1mzQzjl|!HkaIdrCxVRq@`w?5`iugh@2(HN~GPured96eyph_DO zQd-ssx=e5^r#&pCh%1r5*}5x2E=3$n(VarksyI4Ig(aHr#aFmT%><3H31V-6CSFcU z-?=i$<|t-hQyux>RSIz49ZXYoFd6i>aL&ZFeo2EL670tSt+FlJhHX*FRxAfVsMrG; zM9;OIXPhqvBn2#C4ceeEO#$PK@e)=eied>`Oq}U{gp_?CH`a|GjKkp<)8R*Qf>xU3 zH#`vk^+4}eK(DK&Z0Ks~yh;FMmOXB9NNbN&r@$5MidE|w~#+HMO6aU-9*a=Nekqfd)*#<{md#D04YkWb> zj6s|&P8%x&sRNU$(xS$x@y&b5>>=3L-nO9O`;m&OeUuQ?K+ljKcHuBoWkje`u@#M8 zpLeS=j3Aj2=Z~*lBRA02=oMy#Y)gg@(qjc|eNM`;X5rL*;ImpQ4v?X-SoqU2MYw^d z`5$_h;r;R*3zy5C6f>ylQ|{ktl%VS2oEMACEnU#o;H%ru1`Urt?FcYh+U}dc9om3T zI7C=Zf9QH5{!gW7{#_X;k+&V&5ArJOR_8++Xrn=1G3+w?pBjIo3fv2FE4->@#XdUoatvKO-ngG>Z5^e;H2v z8I{`l`)KBj+vGSiL+3r2ClB^ZkDa31J-Zs9{S?K525N){Yh9yry;)+1Y^=;~thGZ{ znfbbNwYJw>%a10vof@2yHVRGFPZ~ML>wx0+KdLZT%C9cZeV-#JX5|?OvjmhdK|H9m zV$pt8s}Ny!o7_jb$A%@uTbWg>%jNmkin|E^p5QtXhPpH|6hA@9N>jLC@keBvs4?NX zI7v&DDKhs9B2q3582@AmnuWFq-#q`y$Ddk6Yaep^J#)OoQHr z>Hfr-J_?x#6iVl(G2|;G(Snr>mJ5Kg$4sh8)cf51ctKSN9+$4-Na5zlzwm=b{tfPF z=+kZa`s+^6N!>Uzq{u|-e_T}wAOh@t)%Jp6=>LReQhciWV{o@`i)A-I3KmAWORS?7jHW3cD0t5VM>l42Q3jfml`>ioZ z5*r$ik0%x5jr_m5se99 zN6~LSO#Au`nyr6#EUeLxab}pt04k|&h25Cc#4)8d5o&uAZd%#|kcJDg2 z2^hcghs$+@>`CaKqVr~tuhQD#@_GII1OH-ufm6 zr@y8}Dct~xUFu4r1$c0g** zm>DkBuG{Y@AH0z8;}YiPjz=B7&q zZ7qg(1ar#7uRM30D0V9nD(g}s_>>8h=G8j(R?#z}H__?20>fpF#%w$o$L+&>DpSt; z<+nA6Zf6u{-Ulo~ztxq&+h5iM%%kwwJnlZG1|2=4vHxHWyt*o<;2@7gvBYFos1;Ua zXhCFd?C!)w66PEwY2b+KFD;jyZWY&JW&Q}v;AZvhJiS>AkK=FY#rbNFns00-evjb~ z)3LwJ?%4jyjaoYVO?EK50#6y5I5W@RArU1fm) z)q3TZPyZr&)o2+-UGk|X+%b8 z*K63P3SySUx#0GWX4_ctXGf%6i?oH=BLwlJ=DgzvlTu&vS10W61PjH+gi?~ghAzhP zD+40wz%Ht1lc6W%{Q|Me5KS(x3ED$7s;bJ@&rd|gt6oz3v+388ic738S$h;CR_Cz0 zYUo|ckh-7ub{x-s+EHXn}1L z|AAF4Cr~y>3|QJb=H23tzJyjB*&${0=B@21o(2r;ut&L0TrOu!nmGd3xxAR3 zWIs66UjNt76^zGCv=S@Uyo<(`bovH@xJ=SKwCjf$%S3vy-%L01fTzZdvV6Z03CLy~ zOC~qU-i&{y3GUi#yWI3(p8d}hO6%XsJM+R^9K4_9&JDS#=Hmg$M~`nlhC9fMtRSr7 zMR7J%;4Fx=1>4!7_y?w(0s-JUWA++NBwPhn3idM67IBh9znQu?Jg8EbN)vi~g|OUU z*oZAd(BJn8O@4uWIbMa(V6P?SaLu81#6N07_P5kd(Y}vJxl#1Q>?tHJmeI!vGt<9A zqy^#;UH`aY5#)Rr%*Ej(A?4)8t_N5t`S^_X>n1+r)8%m)H0)oRo@?23dA~%(#_QgsU{U7v$;Ri;8b2A84 zhZKwNcWFPBDny@yi`|D*9ora>H%A|E-&t{57HiXzZ};Khy2Uz%`7MM+0prBJ;vlSy zAbOAt>&6rj)gJMYpiudazPWU2IWVRHlkv+5-13Q?rR}Pz;%wfcPbEdkhUUKQddMx@enc(Xu_OXAar%??obwg>n}qWXBn@S z-{!{5T*lMaJK4$J58KoT+?iIK*sj_G!K4uc@tIgsvMt;QExClz+TKHEZKXbTRA1BYHZy>>%1@rvjm>S88RBc7 z`~%t7^57j;&>xF2-QZ0hmxa@WvC7OlY#T_qwF+!{XkGl#V+&1NnzykA@brZ8T+@q_er=*W?OF5CF2j@F&4E3MCvPGOj?n9J?F#krk4F zf&vliN6kk-+0#}*Hw>|FW_`qVSlk$@*@!o^K8Lqesll%0_?~dfQd@>`(UlAejs+m= zBy-|lfzU&q-s({89)*0!NC62#XbQ6iR>gP8aH%y|N#Byy8(?`5&wiMt38Z$+*3-)) zDmh!2)hm8S&zK=@_7wtX0@^48S8Y|?#0^3QgzV%MC!zxn#c$x)NR&zJh>m7pd|D&C zxzjYu&gS!zv^mzRq#bDfqiF@lkw(YI7bp(MHMu#F#P#>%KM)coR#X*ViNQrhg^H)p zMuZK-W2XbIIQzEQ-)1KHv#Jsu+}V1L`RD>^x`{Y3wrtFKpOrHxKZtyxHcn=aF7xBw zr-?q67}*DUEnQMOgjCCLq0K6OO6k$GY2u6y_}i%IbX?#|@T;StAlRGH60#l4dT zF$9Ut)b;`GBO3|OjRBaoH~naR`tov67qVj6oL?`bO#8tTZZ5wk`>*oNHfSWVckCTG zK4a-#9bchC(W&c*Az;4?#mhsFQDas=$O9|&tV#g%&qOTd@M(PaH_)nURLkSdun|_t zTi+>{BF?7*Ov7#q4JY$h;9b7pv;({c;MQTKIrBG}4LTF%UzTfUxX%Cuh%0PjjOTv%!hCY9n7 z|B{pO_h2kL;&5%HJ02Yub*fWPl~X~AcwLkF)T z^;_;$T_;Kf| zLt--Y>wykfha2#Y2N@HbufXXo#K?)JVZq5#TvaMGqZt}#JlV%-s2-{ut5Gre4U(mT4)yhqOWZ~{Q?*zkR zp(%;v!_*)l3#qXdUT4JhvmdgeHk>fE415Y4)rz8yh$`V-#TYI9Az?Ho$JTM9s(+n& ztl75IREv_TMupl7vt@W7seqgGdb;FgV6p-WPDm@F<^xIdkGPso6i@Gs12V~);Fc+_ zMqsnE1$L_)c)lUNQH|a+RKS-W+KgJduEm2ds${2*yG;-9JXYtkq?HJ$c#F%RLhRE6 ze}%o*gCV2C{LRVmPpS0r7{22GFrQee74bD$)MkAcHBBWzBMY{07c`{1>zgJk1`c(7 z*~+WFiO{)H6(VH%cCkVV8dyScvz&}$->A}{HXF;_Vh2+iU!DQbv}L6XEF%NyWugEo zyw$HU@nE<{5(yj5Ud2d260>8N`FEqH#(H5WE}m28-(hdBeacX1!m`p-#i|L#y(_pY z7TjNS+~?jqq;`ESHO|G3t&Bqo1lO*OCq~fRb_o-{#ohKl8ItNgnh>c1zT^QvJuKyO0o!kXSs`nb_xa5nPUf1M(pc0@L72(> zS{y$xU${_hW!4%t?p8z$q+FP3oJ>q2>sB)NzC##kq9=xwM096^VIpa5S(YbhH~=h_ z=Kg8;JQ}kgZTZ@r&HWtudH|MgM8cpLtLRYsJwDdbp;u~rnsM*xkvmgNK7zwb_qOo2 zkj`v8!J1Oi#sO-U%24Z$HD}U~m;?MOVW+YXGOdGYA%1+L<_s*8Wr@fDTz=MOrf=hh zxHEcRrb5F%HAY?d(qkA2q@FAE|DlXp^B=9q9x3Aj@|XN`bie4X3zd{ppVxxU@P&Ig z=9zdV#*5r07{Km30xH({t?eC#ZkePmi-J9S4IFuo=7xS!&m_FdzRtl$C5fVN#~sFs z2-}WSKEBQ3ywk8(#IP=XPz6E?@(0=e@rk{wkNDb7cCH!`#2bFSG%5+&MlbG{0ULW+ z$kU#ulGfZAofDmaZTB3q78yUHM)=5!WIri;bY?CP%C-eP*6N{Ttos{C_}ph!e|80X zx}Q+lRs2RvK`rZ7d~4F4Mx@Q}<~+#RSW6@NjsB8aHo*}da0%HziYlS3^A`>AOY5wg zXh4T2-}$rC!9rzLlX&O`e$}>7FB0jfxZTNEt_iYnD@xtc2gw#2(7Pb#wghE#Q{o_q z@=~E83#tg8mph>@O0j9QCVgeA?z2pM0CKVP88Fi(6k!ir~WQ$nm@8qNBJc%;a&`|R+ z?{_?ejkPON_ozkyDpd|*{Xe(KU%EEG^I-6+TR+T~(Hne6%z2iOq6fAcaQmfBUBL>j zTXn_;Bl|t#h8j>8aGM&DBBkXH>RMui(*fjr;b3Gz6^!y$@+!zIDY4-Zrzb3}4HzZx zcV)YI3a&$*A~5_65~$Kncyj4j3*)FXCZ{)msR+-WRA9qxg2rliAh6X~n_pr^(3(Xs zhBDzd8pMq#Q>=Dg(U}d#`niJP)qdsOvOyYm2+fwb5VTVmcBr|9EQqPgl^nwF3TTYx z?Qhfg?+BiUWa^?I9m}}EDqGX$X+3^)1Mc?q{_jPI_l(ABKn9C-t_2;cOW)-5uE3bz zWdDj`E8A`Lao>Z{H%&Y#r?`DPxAjZv8l=H0>!IVl0rNR=?pOnkWMa6V*N{!yNsprh zF+OGViz^CcVBEV@UC0%{0N)$!PN{hAESmNha(p*`WhTLO9q9@dq%2bWx`aH-KJvvc*hb&k`FBP{nM=ayXg+`I~yfc4Iz$#+r=ZP>Xf zqC@)xEDA=GfA!>0u9yc)=9R~jhIz&yfN6)XMt=)X2tSJI;~!%zz`yn9n2u{LTq^vO;gy$FC7)Zj<~(NKd1{TLL z9(P987OxR^yMj(gaNXzPxaB994sI?!0IAq?eY>QTn@zooayjRy1wR*-)I7CJw(-Y- zbW{`l+Uj$QHYQUM;#6YKYhzEpWsD zs9Q9;cACoY%LrY{%DGHqRgjz`UZwSKLa#hVmHKgfhq)JNgutQqVS_ea$n`r%PMyae zE`ml9oop!3{QwO)+-6hzxg!LJut@O4wPuGTNSbF6pFa#?tW1Gz?l=#z7zp95JkrHl z6Cp8_uvZT5e~Pr2E<~U2R40$?ZsXmE^LVI~(*p7WtIx8QK^skeERQdrDte@)ZYNRV zCJa_zVL=0M>YQ?u-Km2IU*$ka3jq-si-8RVs-psX&pq8$IZy#g-uF5Zny|&5 zkzsoGEN-Yz@RKV1wB$*i6aeV@C`+5We!-1gsR_rcTiZRYNlAd>r|;U>zUCK$Bcb5$ z)$Qsbh`)as+|TSICb-wBcyVo|m(wKa zo^de#e^o5zch2Y2)pthWqFu+<3>B_E-)Y_WBsz;~?>{!=w96V+GKiz<5{b zJCx%RnY9k6z{=%CWHwDr^UWT9cTmfZ{k4v;CUS5m8*#?UOOO;oiDXxtf|^f{H{7Df z2>&V+@Y)Yb#`ZgB0y{rsDSFU`RO9T<(3PnL+$)%2lhvQmiX0@JrDgcN_`GkpbTYAP z2=cTMO#9xQF=volGcc(lwTY1`hWR8~3BJ?Pr`rHm=7+o>i;2aLeufgE5}0Luq1%bA zML4p`&DGe}pM+2m|7G(zMrjPn({81Wb$AvR^oktr|@kLHiQmm zPutlTTC^WTtVbDNsK$Px?92xiA*an~R0A7AwM`J4*}8~mWH#4&kWtKoyiwRM*Qs3; z+)u`6tNw={S?-LHqRWCgPF}@%l^0OWRulT|mD_2R&#I1(cxJQAZ{)l;MXH{?>{ohWfOU*?2uyVa^Sns;PLi z2UV1F4kD>5Pgug7uVEUT1!s#OD9pOxCMaaI3751NGhs>!xp9zfW-Ez|x=byyLP^U$ zcMx(-xx1^zzguNb1559)+ljnnK{ecKhJl|D;eJ0|a)5*ZBP2RE9|M82%R>A%gyl$5 zLx2}q$)xPBUVHt!wDYR%a`t)%k9IpU>0`lUcvnGUUuvZSG{c`&0!U8&FRI};Hr}Vm z4fw{kI`=4ZWTL3c~7voQoq(`{x$)xhlYU40au0eU85kRK}I7+ zL+Q8(hpH0-W06PBFy& zS=?RBR6~>v>8dN}pAw*!{SgNuARst8T8M8bg<<9hLdP2)RPDmxFuroR#Fm~^hYM+o zbMDXZ#h0Yfg<-K_)qM*w{s|khB|fmIRlm%I_`TrTZk?Df+x)`oecE*xJ17|Ln)g)? zb@B?xKMMo%L>|C?drW~B0==L~@Zqi0W72hC0h_#OUWjFC;Cz=fmP6-;(m);AvK5nG zncr49-P;BcAr0aP;tOp1c2$1fK0{t&^iAlcu-&*fOQjlG&QaHy0>Cl_e#ZNQx4f!{ zT(hw7$AOgV9dhFwHVk?q;eTh!<%2SXW_at$p^`MD?T6jIg)|9k z)gLd@1=4V-xha%XRR#Q*+=Wm-^@lFkeNMg7cgwi$TNFN;o}i{YhS9OZ*?Ey%^)*OS7wyXJ5~;NoCE2p#_M_O=pKRGs#q>6S7OU1-rZ}9g{aV@p7ZU42W-?H=Um#90vWQH znCL_qQHCNmn-pyAAP$9=`68J>&30eDy$Z!c)CB(@*~!cqyDQ#-eZD%_jZXrPY(uo^ zVyD+YbcKpYIz@5eUj0S`$mf9NkR)U200ya`oQGmR7$jvuzd1o7(8fNswAf*#zj9;f zh2XmbVGuS&j)i$-#b9Vqgz-1!5R~Ch6lG*skHZ?vYhsAI#R7wPZIVO&8eg`Dm~l-L z7(m6X;8V_OT7m0M*+>~)fTMqBpi3L3DBD)2grg2nJ{+18l{0O+5`_UzmFEGcVg3rs zj0W|XMceU)POdTOm|9Rt)rdMjI}bo!_17bJ*oqSdm=ja1XA5smP*t}`#L4AgG6fBd ze89lk_nDDXhx1ah^dvFK@(oK;@~(?3uRa92A8dQ0p*qHLtpA>?z99@q)mUz@Jj>|~ zwD_dPJeH-mB=^H8?#}P`ETDC&BO1j*z@-r}FsKOuC<9-P6R*oGftABkN}cz>?FgGg*ZL@vb777XnW1g>#ix6n!B z0L-4FpZZpMMVp+J(Tdz_l->tg7+>cr;F3yxl~}#tIh7SVU9q39fA1RF#pnE4?e)CO z==}&8isf0K7j0yRH8s1wgMNxnOH$gr$u_EXX;^&;Q4%+WL(QRX!ZH3t69*qF2Zgd1 zj(E$F_)_C)$=>#+EQR#&n(1!94^kJr_seZr6B$Ms9lx`t1(&bH7qKoZejqmF2ZK6@ zuI7%0G4igbKz7ef5#!aRH_(ac&%`Z&+azn7H)?IkLWjLlSi$5~SkS)8yXXW*kjPZ& zD3>5uO39wFQ?j@Z)T)cIPW&ygY(u12yd)3_CfoM^_4!MLutZT_uSL{U730aU>DWP{Koj& zZy7P_gCfV{&1b4r*r#ZZ3WPsp!d$JMVKQG?vMKeS+=T1yjec4uToP7qKHP4x7tzJS zDwDdH$0_D*84Uq69HWR;k9jcA7V2!?Q~KHP@y~K=y+Z9hNlUc8va*os7crH zGV9-t&*wll2c8JPwY>vd0vYg$`h=z(NEGdxp^e6j1(D*sapC#P`vj0%gI6dAE4XQT zmRmnEb-ZlMcPI7Vb`gIdfj7x^Ku^lEX>0(j(2MZg!_@dSy|MgL@aEn=7pi;W5;|0) z{5ZQ;%?=tuXpA?YnRU|m^nQ7&F~9S$2|%M*6fvVZn}7F)!vEMSF8(e`!G+WCfL`km z5wA#&Cz1!0%CywRZi&~stIWlZObKTfgxK^p6Ieb)Ejo&zpQKT4Z>ZGxz zIG65&l%1LyG{wvi-fyJ>&Z8Bm-^-QI$MtX&kpY`L*5;p`3s8-}1W!dEJT8UAGTCE^ z2L=Bgyyp;wyWV6#RP|h6IXoW_E7~X>-u(vf*$~08HqF*DXOMbSP80nNQ=Mt1rf}RE zF8gvwiFUSji2fyXW&6McN2BN#|Cs&%;9cINyJ+ZfI{>uTA!zj6cZxPR2-MT%AG3Z4 z>PZgA6;PotywvD%$ShGHPUfPe@k^U8o-XC9*);)4b(o(Xl*?n)^4xdNN(Ujbvw7Yl ziB+A1T=o}@6f^#hok8}du!z6wsQ$LII_FEMuY*5XuZGsw+1fXQivG5WaqebXTFQKE zc$*^}z!^O_o>=Ll!RK3D9#T;=<--3n6<)(_N(NyUoy}xAu{T0gk*XdW6c#dJI>yaB z5jk*g%LxU+<_*)um)7xLd|xu&gA&38L>%4(!}#;2hQEcpPU59lh$kGm8d=O2rLxr@ z2dO0(^_iLF`GM8yAksidU+GfM<6|wj9MD08zHAc~$FuA^?F09=D{2Oa(-f`ME&P0> zf%k->Q7SpqtFI46ZVSm%wPKY&s^wQ*$0tsAS+U`s+7)parsc|s7f8N;^e`oyv$nrg z?}TI$wuAqR^k}R;U3MCr6SIlw^IOkizMjS4ex)U0XR#SAiJ{L;*SsRx-v3h0#dKbQu{d&0MEvST4P&9Z)nR;Z21f3PAN z0H)`cRQF;RL(aXNT(Xamw@y!UU!!^yJ4zMUsIc^P^XA>CTq!^y{5}L2W96DYPx1dh zK2&+$pMIYQJ;5G`tO;#pEiZuX-;PN-9Cbx%$|G}9O0AE*t-qv`?`37GVzZbrYI8kg z)8d^HM*IdtmZu(`g6$y_0KjUeE}0aOK9TD(QiVd^b~H;Ua*#&U&W!x~@MYP#lGI*h#GDrj*q#`?6Ms+IdIlrU|H1_R1Xd&wnqRrp1w%P(qg=TOO}L} z74#wiFfe3eN;Log00001L7vG)5j>y%(Wp8{?LF5Ti<-+YjLs=$yWX$stiAqS4RV0> zKJJBNP#g_d@k$7fb*ieR9yqHHp%sjio`vhl5X9%{oz5E{iJ2~6{}3SPpG!!I|M0%4 zf~L@?1@G2f6*Nz!Vy&$J%<~HFGsTu4G}$;!-&|q^##~<;PN*yMglp_hPHU&@c~a=@ ze80(1N}u?)^Xg0|#>wL0^%o9Z`lLA(ZM_br!ZZWR1`@w5k2@&bNVa*6(SBTC-~K6; zFAM0sp-X+wcIFXigoBekVe_?-U9zG0Pe~ouzOa@{#H) z)Qp)M%eWvUL$whoXke%Z>+Oeg^Dfj~m^{X$<-a*u5`|xGY{{DyBN82Uf8%LTMjqo0B9Oybt=Ial3)Zd$Q7|v8*qoL^2L`qJJ%&rluDYexqK`0;T$fG zsuVM16m}+jguV>;xOO$w=BU}uDR;2WGlGYgk9KZGQ8jw;BIy;z4XK7FA8;iweqN0| zz`;UC@P-b-g|vdbsVwbaJl~)#?d?tP#|gR$9-vagat8|h9s+F1j3G#gxte)u?}jA9 zfgL`Pb-;0{%nmA^h-J=~$;|8t!GVeQ-zUFlP2?^m8f?_=kE9mf@V;%G4}`uP>_LOm zyh9uZ7S)<&T^EO5h6F+a!H!gFB?AcfH!t<1*|n3rH#W)j@-Z5M>vL>Qhu5L-_{(By z%|a(+Eta0>n;W%N`8P6a0IE17x`W(|1hzCx^xJhy>*nLdLCwMPtDKshM+8j=tbdcW zwV1^FEOERku!=V{S&owLHq6@8ANOsOrs*5Lh5(DbBydm=`TLezSa{+lj2KhJ>U7(N zO#@MqixfvU}4tdtk4AnZQa&A`{9m&YT@_T0f3ys+GGNr zQ}pFbO548Yn2Q6fp?at-ta%OfnuG|Z+_Z!g*<(7^(jcQ}-@ri=T5lyGw8?Q8jKHOgpCl0xq?Ad*>o zmVp|6J&SfZpZJVEW8tO|l;yxw_KDqf9c^uaT!N1 z-$~B^F+k40Ym3WVkQl;E8(6Skhr&?w&b@u)=H!2W*`BY0=r01(5Ne?gW}f6%bW=RS zNHc>kjO6}AJ70Q2g}^5&4cfmoGc=$c&jbxN&h?!|k+%rzDi8ciV}DKlkFW>71M4RPL#Hyo&G4!_9VEGLnTk3TU~GsNTAT*5p| z;(D*%GX;Z*wDVJjwx{g6!D)*f$2Z1Vlb}3AZuB6zsB1GM!VJ^?(Wl=WP}G?dj9w=h z!il3vVEjk*rWnR!>~Ac(gDZQcF!#|iaJx$x{79I)O+!)U@OeDaC`+hxEx-pBhC^^4 z1^&C*Sj(>>QIBs`4OP4dQe4pXZ^?D=PTag%H=pzd{HB;})B~-9ML+ViBvaarPK`NW zxR4t013GrIlDwV?559%O=uZQDi$0jgBhy6Ig?nz5)M)j`@ougA4*8=c<7hyI5SAce@k*wq5Zu z$x9&q{GwDfxcJ|BcvaBh7zL*K5X@ojiNqif-P#Y9yec`U$sZm$VEeXtV0!dvlgL|H z@aqg+v*y{FzrwX6aXRzw$Rk;*M z2VoAoUKt_6`UdyH43o}P5)l{u*8rUF@?zD!1sx~A!`6Xhdvza{g2C>D=4|M0EN zEc*AgJusT8MWhG(V8rMqlj|nEyR+>R;2$jl7)tMAf8MC>4!;YUK=Y1z?8rver_$rt zU4mEXXv523&z&rCMfUJ=58^t^|Ap?yzKV<2Epg7CAbua@atdDGK3iS) zV7j7F?OnsvZXk09jWte{#w5QM;y=y|SG#RS+7UzZINH|uSY`pkr2I(3O7+em@iCQm z^Zm0xWLLJymVGc#lt;-DB{awx6I=pz&l`QAscs4+NCpgw%B2CAHVj96X@DkHyBq4o zobOH@BU<}t^GsjHp`Fn!X5Xye2_y}J@~|6y?889HhA~{>mDMze_&+lt=_8fw&PkNt zy^t8%-PtT~*#8^jO-E_V-pZ0jKd`5PxgHl4+fT$vhFd!pLI>AdD=! zsU|O=8<`C6{~7|1y)nu{1x9eoKAKg2S)&%a@je9wSAl-aBGCcr^(cjvChq5FxYTqN z4!KUBr~?=AbqWn~6>Rmia#(Y}5jd96=!~S7PT2Li{WWkNcYR?Z_Xm{NDV4DkEhUv- z`!-0VF5rhN6YXp)vtaTE`|FJp2;Ska_9&m^`KYJB3|js8u#}20WHx@t@!2alsRU+A zzpomTTlMj@$S`;bQ7R{^J``ypMOU^nOi->VXNfZhugPLL{IBwh;UTXKpahNhH#AAG z*Ui}WDDmYW3%rW+Fc=I78X&w!WoOa#t&{Ydh-aqYKS2g$;8T)$YFOyMF?-V0Tjej0 zEB(=X;a5ns<}87&vGLt&sn>PeEIT<`jmf*3BBDx%hi8@bILOtMy^dgh!({1We>bY& z#J<4c{jo5Q!{A^cH2_LW0&`8*}v|q zX2s}?298NHI<$Ep4lQ|@@afIZ5^=v``NepdDRA>G<0_9rTAaa(+zPf8BcvIg7&AyO(XgjZ*-H^y+IE2=Z?>Xs^#>+{=ZAt}+ z%_lTU&c2L_&vNO0h)AfR^03ogx|I+Ff6eaW81>0{*0f3cgR^iNKZ0$c)7(*LP@|Kr z=}~yes})-7QsXoP&H$QrRNf>?%U{y6x!?KyB>!3Rr z_(G*~6GLs@kV_7&kBN0!;izZoyhAB-hECN)u8305Bx37ic4jL#EeOE0$%h|ns;Ju3 z?=_Jq3U_o=tS_`htlHA)gtBX<;m{6G1#3wTu!!5Lu3VljGZ1f2laOjRdtW9%epz3D za)2Ll8Jxim&%g!&a@KtHM-f3`ObOen&%H9f#VYL>;758o*6DQRJ%F&lWa&tRlh^v2 zO2~l^)vf!GyKYEgsoe=o|+K?6B{sEBQJHD%xTs_%4r>Xla5o7h$X+J z=t>}Kd_x-$4-w)5k5#cF;&v6Z(|@JAohPQq%gy{ibkHh+{tENdYPoobt9Y2-=dJ04 zWnu*ujH6qxp6m!ntTN)Jd2C+A-l%1EiNaLWSq-AdwrJ>yRwzt0Br`1DrhmXcWNn3K zWBAIq)k=Z>uag-UQ5EDZb@}Eq)Jp~TLt(ibZ!4wQ&FJFQn?DMes&_G+v3jYi_*_<; z(7QLhjEA#p9J8M|#MiCJs%Oy@4FBlSS{frro%8;WhGgYJI%nUsTTvTqG!I5|9&+b| z3&TLn!VetN(C{A_VD7)RYGw2qLgg!-ww&o7IMg7A;h&~8y%by$;o4?)m*EM_jMan- zY?j8?kR>(AMgk5Qpd%TKl~Pbbcz|Q8dICUHNY+zx1qN0(42ZBMo6(ToIZM~~$6NSa z;1$8*sm@bl#>x~)aXN%{45{6+e#Y)(;^g7YAlm5b@AG^n^P#s>Uyc*Ur*a{%ycoFm zjpOfm`_~?GpEjJ@edKX*$NRpfLeVwPa3o`i#u(y#kox(GBK3`TyZYAbXv~mGcE;2h zd_we#09r_%X=|k*?j@!m5-d^oOIHxL2P5bByyNIW9NtyZ=Djoy9vxldsOWSqBA<%k z@;x_DCq~iR>{c|q@kG~_oV=&}=lN~Qe?a?aY{YlIR6S9Df$EsfRJgSo!u5uyv!P+G zoej3thkB0IUK-oY)a_{#e6WL=n*AV4_o&m3%x86A544$=y6FsTQ;x1P0qPV(cm3%S zM(vWoR5YY1g7Y<(?VaMK5CmfF;dH9-qVM#S_sd~cu?mH_qcbYV^nnveW0dwak?2?Z zRp?PIF2>lldfIl4HhYEY*`fdUtdfH=JxQYP-zB^)UD*H1c()KolCI*Iw=e5ZXok;! zPU*RUN4CNnP%!44$jkurqi7>Xt{ZZ%0v8~O|~kt;6(`C+`MNr4=`9TB0W71MNmKFrbwn>;nyF-xuw7;vdKeKu-~c& z(=|*~Pj$6rRMr^Qeep|TgpQ^l~10}kkyiXU#Uu3A?I?OS+0f7%t z>whuTD|t!tF4&MBoLe%m$4Mdj9p>n5jDKko6#KJGZ#Kf^Q=lx7FLRZ(fhhs(C=b_! z2MrS*?yUdZPNiQ|VZ9#5(_DbsX9PV+cORyB_IePwV=0F+k7V=!8hCNP%=iYT24x-x z%dI~Je?fQO@-$ghQ~SG)g=8>o9UB}4e^6@74<41z9atSm@wc0Xt$@gpZ*a0Ai0}8> zZBWfP)^=#&R6K#`U8_{by;Y71w!0cih;R)`TUnIt(B9jOI5eZ?`UHN@)KRRXSV!N9 zh8~M*<71>nY$83@sdSJW6au#%mQjWeLp{3N?SSjn_ID?{g&DOP-6P%V&5W_ZhNr}^cRGqZ zqzO!^Cf$B9*lusKd0dhbf4geG@_aYH$plCvlqr;W1t1lLO@hATd%ou9l2MDSd9n*8 zviADr;4z*Ot{sgk`31qT>a;WuKFI=Gs!dcxm?Fh6*u-OLhv7#knm6SB zDn|rtOYm--yF|7_4yaf7T~;L&R@W>n+-0U>z^lP^daDV&+700fO;g0q)7oOyp?j@y zkb-_@FDa#luvf74A+By|SP&ErBogeC&QEjfm`*w``&@v~$V2Xwe`?Sl{!l7)z|dN+ zV0ULT0tGv6a4K|zD$dSR!IBO55$1#*=$Iv+?MxGrt|fOrFUH!9(_w9 z*w0`g&z~opK9x{RA$dg6N%zP@)lxo_nI^^ktYXPIVhm5^uV9H!lf~l{0)0%(d2Nl7 zMu%4-*|qbm)E1A39S0vhjcmNKF$$rA$7-88w;~Gw@kJ0iQaZY71i zyD|(S_;36`E7qMY3A5oIJMWcVVn~CSSsib+d!XndoR^+ybOFV)}Iye2HjoN8yUy~&wD0x&I^U?N~Z8vDuZt1;U*2O6GA$WL(i=6nlf2j38f z{0?Hx$aD1EIisfIi90n>s4^$VP>~b>k(diDRBtF6pci(9_5xfvNiBnn%D_8XJpu>} z^|}bl=<=J9r5<)J0d?+>ueQB*2jlK6ebiI01N{`uN$^(wn^=+|ajyXC02b5GrDGW5 z@q4{X8I|2{l2HiCo-hM+b%M`s!62sJ5q!%xB;&qJgKYvNzD=m1 z7aCHd%$nv24RM!&lv+gx&6D@;q4CLJmVO0je6a}enR|Y7{q)HIzv7kK1UFPzOluLuueR99 zL{Xy$(X2jlcaMj|3k8tgcgMc@S*WPWL<2x|f>wXR>u?N547ag@0KMZ*I`S!EMYkYc zm7<^y5GqCSC7wdMiHJjLWVV6;ZVWSHLsCHc6mYwJ3Z5UxsG&VmnX+2RBqPG+wDPF? zF|7FOwMA5ndD5%w$RO5@Swets!~8n1o!_gpI;v@`hR~Zz*r^LE+HFRDsV&-RIEMS; zU7JY^9_O6206s*kQVI2QIi2Q+TbeNTel*a)86ZEH{>$$LzZ=DCRBRzuEW9KZLdD&P zsmFF;O#PU~TTh(e9lfE4wHnn=Ks*cHeK#_*Ea;9>t@9S__S=y)*)n$0gtW!e>Z?Jx z>^dU`X}WKsa@&go{mF78UQA7)zp!om&-yoaJ>NsIk_hTdg*~Q*w;8r1$ptg`?Yj>D zpQdoLwGfVKH|TG@T;?LeFYrTMF65U7o&x4Lmv2jV%)Bd`5D{~X^BfR5+@BA&aa{uG z)3M&ftYUJQaouvMFP0LIahtd>Ld9tW@xR3oO;Z`|jO;FNl?(Us$unWw_+wIm$%vOg zSECS*{0PUR|F-+L^TgpHx_*5zQ6X`VXuUuw4$5<~rpucQ@PQVmm{#T*AP&;(*4Uie zpnYOju^Qn^J_K;MPmF01NEe+;2`9c&))T5G9ug_}g&UI!#wkRKY;-f&OL@0~P%oOB zH5seI(9F}PXp+jFFX$W)5M|nQ+e!^DrdfLpp3klLh{#@kV_R@)`y91|oVzob_9vZV z9)a@C=0zy<<6<%l*jECJ%nec0%%3W1m#|i}Tm~P!B$W4-fu+UkmBYjjAtp&?vlS4a ze-hhWz0okDmLXJPC=D&-3In_Tbk!&Oj#;rO@k54W&Q|TIVEwD1g~Ge@$aVhkWFF|5 zR>(#TGW9n$Rec&nw9~9=V!1v2;90daXF*T~m7&f#?el|Y{0i5@P)73K(KCaV?{UDQ z4LXg8!2*o?OgcvtdeMZhOB5x})yG8Jv}i7>veT|zoDrq z1(c@K$GkJ&3k{KE(?si1kgQ~4UcSW2(rwDm;Z3~Y^X*`vUVk%?l`r_L*+OK}yR3+- z4bThfx1tZR5Wi@HiC%rL#}vsat_vb8JjOMs0#ZQJx~n?k{y>-t{O!Xp;qKWyecCd1 z42<&e6WB<|pu<_N#A5ta_L8?vHa1DRTS7kbCqo%CZ~myWk~f0uPM>Vy9tM4aPoYFj za+{ok)$98#sa|stIKMl-qL5k*T-{5v*c0g0lw5C+0>`n)C-MgEeHF$UxT=tR+4B4C zpQ`L-c5^<5{$O2y1ZtCN-lNg)*e%4#ovPCFy%>9NAlK{H@Pt}mRI49$n@mKXc_En| z@GauR1&SlgNL6jK>Xktx31xH--OF&%eXQ=dJ{*-| z$j=sHO-!Ejs3f92$LD=ezbKj?Ol|T*Os@V}#c&58z#&G3FR7%DTEsRD`6-O|7Mwq> z>nN&2Br127HbWn7a)jAg9ymrEo0|km5LLTJR+)eYhnnL6y zw!^Xf_*0|^O+?M{t5^gOtAI2&i85xP$#ejU2OS+fr+3~kcS~*yrg5rRL2xbC#@hrZ zFuXfLJ6M^p0MepK&f^3}*U9nNND0!8c0{iGOmw1yIFP3d$x`I9>a?yFjT+#=o_ZTI zfJ}6Lr-9lK8eOf=%Nt6?UuFb1M508;gH5wW`(05-iGWjv_EK;D?o z6O#PO!Uk@8n5qPw{>mpNc(slb5sX7;lX zFwMTeD6{4IR^vuIz=?|>!!YSXCo?N8LYSJlr zysa;fpm7?(5I}JRH0*mH+7uw>80ho{TWMAJb85!lR!F?5TXvfF*^ zF03J5*P+4QDzO=LJellKuYh_fesZDuqK|u&9q+48^gKh%w>VL z1{l5HArrPkD6_%(#obwakZu1vTGmG`qnVW`TnrBU*fTsa= z;$!7YhTKc`iuaH_+>3E-U&HlpA+OHVYc(IGyvy8v&=T(ra81KwMZ>XWQy{~Aymp5r zI(%*x%UV-^%NHMjRg0rHMD+Oryw&P6em|8Cv{-OMWWSS(v1p*=t2>|(*RyGE>7Rke zj7s(L0jA&YF5%8v=3xtEIo`w)3KrTO)tC;+uA8sI8R9WM+0%__NX#^!XHO1?HYv+6UNxL!ro%gn2x(*Qox>~YN=jjeYwhcv^B{X$z0 z5KsSTZf{X{DUy|d&`I@$3>!Y+_M zQM!k*xJRd25l2O_&U#Vr`pnE0K_ceH-W8}ymH^ur6A{DlyN#if$| zM@TbdwHSJ-=s(v41oHRH2|y9>@7(>?Nh~FTy*ivUI-(+E(W~O5l|+M|un}!Sn`h+L zAD0hL7@F$e)dK!o&fi9c_(_xf%EjtI>6500@K(97>TzvW+l125W_W^~#x3wFRl9fB zUcj8|9z}=Gxas^F(#eO4U_G$Jl<;z1Nl%US{MlYehHm1>_D#oQQj!!s)081-Nk=*O zm3y=Kosn`Tcrn)~{m7v+|LXJK3~Q(C=pPXIw-lg3A`h(gR+;)VhWE`>z=2p*LC08D zU?GZtvQL@TG$5hFR{CpWH==~*+=^j!$19`ZljhZ8oc(x+^~dp%&#CtF`0IV(O&5TA z2b=V2H61=+WyUZdXPQteEFH=V5$-)v5rTg^TF{cmKkPrn)nkXKk5L<aT9fXwjdNW90$K? zoI%q?G&DuvBCcaMQ+TJhF^^ulrsn+yC)1ilgKm#aXtaK;w9Le(-+$PLEw{;`1HJ-k zPz#Urr~DO(yurf@K)I1(w>eomNskwBo9Qt^RKcz5jU>2Y8{s!_gfDZa{fpt5uZ)ah zkJV-G(nEF3>DLZNQDn!G>u>|($M3P{QruqIv=NB8;J8Yk&P0BazVcOGE{`fHWNQGg zCw#aj^^p2;;|$__Y{YAtFcO)c4JfmTfC0_meX}+x&J|Pgo%q z)9RxRCyVeU`18OlLxn5(I4Zi#kV(^z_lT<~Arg#>i}UQQJ}H=@DSInpIKK z@50LunJtm!Lj7-%0b51>{gZm^rKP=u_5o;vOT2WlOYijyTck}DA3DEgKFW8TUYQt% zXQYvh*u%HH-;SU}G!s%z@b?Q-Tlx8&Pk_W1*A_oYZ4P+$`yC|<+ZYD*U7qyet;+a8 z=Uq0-%q3JVo*M-Ik0SmCMTRHl`?L?I$($$s7JMt9jFuXCS`jJn)|{xH1W zU}5+vid8uwWG?^v+Gznu3G4a6&}pEPDYGm7v~L_AbJrj3BG?%5s|fDr+=!%L@A}$y zVx!tJYbb924p{fPz!&)QM{`k_!;YkfI#J3ye8D6bbCxHdMqfd_s&_z^)prO0iDF0Rv&)Y^ZAJglQpKbNO)nJvc>~3gzzl3c? ztP@j_Vr4OmNrQ}I%OhBqw)2osE^1v97*8=(Twot$Z8V+9SoT^!FTl_No(6H?nw)UmR70D8ljH$Pya4dK-au(}z+B|r4?dTJITvQ0 z*?>+DbDb_Qe83?O#b7I8t#7rZ@BypfgHnQR!a;y``;$#sGRTwuGhf0+aI+T@vT3!~ z^u*<}e?8qlmSn`BZSSpz>nzG<%^Bg2RTy`*lOCXiVP18itVHQr2u#4Mr^tf{G1QUL z9Bdg$WreOfWh8ps#&I14?!2?V4U7wVBx+WVvJ|XPa=;I00}bi!PT=aVmZK0=JGF5d zxsM#)qmffgE_y1q1E>6pWglxPGbdM)IH@J0jk2#sz9TL0wTGY6s>4k73;kqGsn@ch zJQ!uAZsJ%U5heAe&eNWs+ko{=mEP@_azj|9U}g#)@mTIyAB@;|;HuYPYKsaDlkub3 z3iYcCF54wasO;YcfE7W6jmL(wj!dknsOizLcw-#Pbeq~R3GfzHNcqNaGFcsII#33x zv1HX~=Ij!1`tx4-?~7>6xXXm-tZ%aN@=CU6;ivo;c&|yNIE1eJ4@lF}AGeNCH`t8o z*vTAwg*w$lTN5Oc@aUKIK_*PSS}mQADDol_43{1UjvU8T;DL8kypZGZ36Ot->hGY3 zgl;bkWv4<5ceb-vb?A<3O;65xq;Uo8H{uvTO=sYzO1^kYx%yR?S#wRgyzL(H6 zOMwucqzd1R#H{M=&VD;{v7|iA6LGJw=8a?&Yr^Y#4;QQyT9Q%JYdm>WGF^Ko*P=o~ zG4eEC_F~9>MqDYye`)e7{_j0dW!8mWZq@!$^YH3TY8p!(q1vU%tMd#Cowtz`i;s2c z*I)x@vL9u>;$03z1!f&L@WZn5Gi>$Yd}+m~0$M!RjcE{W*0hR2T{BrbYFZxcI^(DW zLClRqbHLCtU?Z){`HhzVoCi~Hd%R%B^TX7x$~?3(nf>`y%)x^_Q5-2myzxtH+FMc# z#=Mf3Sb?-5l`nt(3QEtH8#0mhFk-e&GYV6lV&vhsBGpMT)D5CW4N9p4UHku_WQS`7 zjXwQI1EGVgoGf<(CEIDD8a2_ab%?twGTmV_b2Jx$*b329iQWTevveEyGki`pM_m4? z+a}SI;M01?nGTL`N!lRaNq%2}!5#_FA;i-xNQnf?5d9|!Qtn-CL5k<0Nhu2g^9E6w z;xa&=y{!=*N3J!Mp;as+P1jT^qtMrm#VYrNyzaBUpNDV{>VJ@O3UPk!uSXTEg%JBC zfi{Y}^iC>(FhUwUhzQYri4F#4NN{i6LH z`w25+CiKAHkR7>Q-d*nibk^PI-Gm0%8^Z+hkXl`3V{J_ii$3k7uh1komVs@Pn0ITk zvK;LTbGS9Ag>v@%`!u@ZL(c6i%N&%jN257|awQvSg@(~aECRqNauFGtOkYSrTelGfD+0EBLAwqT z5PbGeDC*kVc`oOQgsukH1(&xcd_HiV_d(hVlU{R+dMvv(Yd9l?-J<%$k|-2dh5PaW zC*-P~?Xt`+vjX*&+k!?pfND3Q>#luHrRzxH`w9bL08&|x!ARyO5iS@lXSE^vM@OhC z0X02cgyz72Ia=wod^{hc@4u>UdG4t97~j1qgYo-S#oU$D^co*217R%(d+9QX3fq~6 zlLoN$3g`ZTk8@{CCxrV8ea0ti#aye?1VbgqLe4@AKTd+Rb(1d+L8%ilv42{L8WjV$ zFg|Bs8?LLB!|3op)_JwJZ)vSRvQ{CpS=SxCjPv9bi247n+Gw$FBLvU?%>?Cx@T7G~ zPkDiwh*#xiT#j#|c9q3{%^Iumkz#Z^WiBv~u?o~h*HM^T!Amu!dUrr{;j&d(WJ8~H zk<$167o$|Frj|PO?sl-@K{qL7)K`LUk{I}gVy;b9T@0&|l+usV z=a~Qjkl$tO?N(QSQYa$?)!WO{yBPkd7s%`NB!92+Wf>8~bw64rE$p?}2)PxQGx~pC zEvM#wgNKq_y_|7MYfSLccCif?LOl{0ki(*r&cQ70fHnUttWsohgkP{G8I#m~vF8p5vd}j{7oJwFD^t!bMc}@yN*yjb(D6zhp#5a?J`;d zj#eLAp3V&#bE4FaTQ=Bo(yBWUAgu|91`tY(68e=*twPhx{BH$m%_u+wUS0T>Ge>y6 zPoh0(@zJ{S6w{yiQJ>OvjgZJI{GnYIEbzuTTwz`17}AVhm%p^j^i9@Eeyxhv#~R;5 zN<4ho-z5@03mKJ}|7k)+EeCafsME0V&B>7)-#PKso6xO<|y61Wr8{nBmj6!eENp<7n4Xl&w$m2|mK}a_3uVhA%6wF_qGG0-hoSmnBNRI5+DNvw(|LAp zMW>GcI$OQwcUR$rfzT3PyLER|e~z^q#vSRaV=;!d{dVc+>;Pt-Md@#=a;4BHa1Ho@;OB#}7;P_)q(m1jmc%k2#r6HFG%Hn?zGb`dS% zc}=qn;qPQu5CG9)S4LtLGuVc!IBQKDy?|=`H}zJY_$;6)fZNDw&}Auv%TDr}AIzgT z*&QYzoPF`&xDD{`5{`1HPHg@hierc zqUIkX` ziJKQ{uC$!vE-n#$rt6p9KQx*QfISxzh&A^B{e=mZH;z~KeM>S_#!pAr!VGY7CX#KZ z!<+96Hl$$>dfYQ@UEipqR}%BCluY=6VzoGLEio}K=1DD^JUfz-%JJxN zfS-y6`Y8N>uy|%eFfL91W{xrEpJ*vQRjROqK=X$vjkNTnW#9tG?k!9@C*Gg7L_zIp z#PfenxuQ;c$CeDPAL??Uyx=s@@?sn zl`=|j@AgkAJ`BU(q_cO5+iojgCMdPn`13U%w@oYdi*Y?VGEcp}Z+5C8n~f)|Npx#b zJ=*kI!u`R{7ciTG=2_h#h>3Fk(+>)k%yuqskL$4Y=xse1CyoWxyD28yb9FIT77%=D zCYy`PGu(-Gg#6l!u+@;0Jtp%R%}Y^nnRn(XjAF5jWDqyIQ8p`!VSI2ze3W+!5kie& z1}I)4a6Oy&ovBL$0cKY{Njhm%I5*HdG6W1+;s4I*J5Lm&{u6MIz_l~6#^Na)clLmz z{zQat?hHh+oqg{+AB3EhW5kABZ?eQ&>DAj^+Lnn+rPjc7>`ZrJGo!bA6r#Jk>8q6< z6q8qZAb`VCCijx`4GzwhN;Xog_E1-_jA|k<)#TRgo}`p1vNe#&tBjB~G&U~1EdE*$ z0$+_Qo<&y0(=w<+B z-VvyI2EQi{;XD{c9qZB^{9OUO2NABjr(FMxW9%Cel70|P*hJBt;6hwBt!JL$oRNn+ zTJ9_Aq7Gz3fBG@+lR+Kj3rBp5ASWHjQYrO`+%azJ|3#U^Vf%y%7d!48Q98;6g46En z@s{*Dz*nl$^*J>!2-b?7iR7ayi;%B8<#bOFxju2TYpZ` zS=c@55g+}%HB>cPyp4U(jMVuVi^=_Z*KgHzX;v-T$^y-&7;DW>OSVw-6x!R;v!{g= z2^`G|bHhVE%1OjleZQQUzc4#6?zp_)Yh6+orVx#}g>Bcd@g9@(#6Digk1jw>u@(|71;tBQje!ly8*PP(}pa zzgzzneB1wkZ21p&gV^-G$asiXtVMn2l0AYPt;O8E4R1HyQ+wyLIX(SV)=`_L^MpXp z-z{j1L-4K=gSHO?Lv^OmhE9E@B|Mfxum4p7JcZ(+%WQQZ$M(G~$%rU0ge!W(Nw_x1 z`V3<}{dd9+zMsK?B7&|FKF9Sr_4|5KFrkXAx=%c0ot-v*Nxuu}W}-@-e-Xh{^v<%8 z-Ou>rO_~Pxouh;tWeatYPh63(`54yBH609K;z7a)KJl#Kc`bav0W&0EF{|BVy_Z;} ztZa|ftk%6@8$qB)D&rHJ(nzEno9XIE1j)@*ZawA^1ZYKqy|adeKzq}1|Ji6B)ZVZ@ z)z)})M2N1TKGhkR(^~5H(p)RF@-MpSPHfZ?I=6T3abq&7^pG0I6ur_d{4KX_MBWGW z*$3NZG0~?xR&678RyDx?{-Mp-ft|hlx1x75%^<7b^-V8D*|eO&EVoTs18X>-XW2L zz@s!)lMup6?J0z4lSov(ExckULmg|NafcwWmQKn`Y2s8-*w88`RY;*>Hz^C13L7}X zD$wy>{CS7A=S({gjpBaC2mBP!(^x9TTGi7-`!mx=9k`d{#DXX`DFBXbz9l00<0>u2 zS6!>Uc>WF)X9^`!YD2yyijLNLbV>vy%^%msvE;x`b42UqnR>RqiGT}6wO{d0Q$!~L zzlS??F94q3XCt&$WpQtIvs~UqvwKKW+rKtV&GcO%)g!gGE-$5Q)i)s$Ho!08t{)u~YWT`ux5WWz92I8;BKuzyKurZd{g)Vv8F|b%c<3b zGxLIu4&MH7)SP!xnhWX8>e>?EF7~_yU&759omBM!k9RUedO8sa5LM(QGI2BwhJB`Q zM(u0%(UafSv|^?T)`0Pg^7V!pz$yLXj+2V~2gh(tNh`3;Fiz7uB!?EaEU*I6&!ORZ zze0CjQOexvA5MpiOAB!rb~mAf=_tblj_|}q?GtJ^wP)8ZuLlfW*pH+Z2Bt7v;#UGQ z_{+k4t3b{bI^7SNul8$CZ@MRH%Dv#7VbM_1m7?shk+5#TQqu%%SfP45pKX9jC#FUg zI_h|Z@OHnOa@Erv9r05V5A8R|lc`^6UGl&Pyv_3psQmWc*@0NtrxV&q)NSs}z&A!*LEMVDUBOWmB`_^SnX*2{*7J!4 zv_ab!B2|}(?Z1mMx}C65J;i5L;>X-(X1GW~cq6NdPFm;ozRS>l`@9t)*0vT zG90~c_zSjuxM$W7d(GIwh_pTK837;@#Zm-;WlL} zwvbDDy$KQTq%_mPNz}gOApTw9^l?b-=2p@0pUpGx;b3;(s{E%yBP?p^daA6}qewAC zQ~rG_W0@WkdXt(+`zlp1ocu2o^bKzktQ45>gCTP9K- zSwo~`*T;7ymQ&$EO-hR;b|05LgN!{6{!v>kD}aX)^csTMawcsyI_<{BspNRv8SYNV zj8(P92+aCT4-5{5gzs9IIL{8{7&ILTW^wiA#EoxjMu*9;gPjn*rjKK;4}Ab{z$JJ8 z;JeI2gm7#}VjUC3h*v$|(JtZ?F>p+X#YzrOkpKBOe6n~f?S}3IYtmjA@P&PF5Yd!Q zAC%$r5r+o^F!6JNJ zRNK0z%n_oXj*5R#NPI?c4W7r`2=)dW&-OoEFl{HAK7g=ai>0hXtx-ECN*gHQCCoVfGsFEDZ(@x&A*LXxaD2n!L>bCqp-aa zNAGaX$MI^RjG&3Ro}1YfyIurQyV?~BN)C{TV{SY>H&ofc1?hd#b8uz2FR&YIHlCtR zY>@Si)BK>kZs^A)cmC^CG0hH{r8ls7HFM+YP-)`%oDq;+r7u>3V3Zw+M6{K5Zy&Ae zE(OmmfoaJ*)qS_OLFgkM1X1@KWb@Rsl)k|u7! z*)_ItH1Zy!05r*AYNPui^^RnZjS--uRwtX4$nhaed>B1kv2}1e$C4YktZh1%Ei8gK zAkf_e3Mg>{$@s4w*E+?;$f?-WLv*2l6yv(-n$O4sFJCxYBTbN|k)i0C9kPY@_16Z7 zeMra9%{4KrGsc8bT;bJ_6^!~2k;XH(f~(Vdx4oV zS>FW5L7jnm!{5j>6kBvy`bM$4o_2i{IU`)%Ue|nARco23Zj+dDdDox!a;eyiJetN_`A`D%?GEc?|2jECGccOD!JJxq-E^ zV{u`ddehb``_Silc$Yi*O`&_Azzs{WY#2ib+j*Ii2AefpcH2GUUwthKgErsIb(x(| zxxITpGpqI!dn{4V>LKxGuX>)Q&!o%*jqZ;yv~{&12=+jKQ!^JbI^MWnqsqSPA|D+i z4p_sS4n^@dDWTF@X67K@CTXR{i2)?3dc?n9dWFYAk^96N$ICo5BuIF(GGMk=t*r}J zBgb9;$UNe%x9m7vR=@h>hXLT|ZEcJ7iS4_?L8|!LG`(^U1E2zvf2#!p>}|=NSx%so z@jq>i-+7Yl=B4u@qIQ!{itDx9!NE$Hv5qw3;krso>sk%*nPN$<=7$m0S}veUvncna z-H!Z8_t3-%my`O{bY+cEpSOG40cgx~_h#&ZhC7gRs#g!t{zj%p-jC$E}WDk)t79O_`qY$;+v0^IfX zllh3n`dyNmV`(r%x+%q?cDN{K198DSUdhQqyg-ezk))j*>?bLcN@C7~L~D=S*da5a z$~-F5PT7ZFD7RO{b;XcBA`J;V{g-?tfj2is<*j!~lK7>iAo}R+|5fD6GSZTW(%o}- znOnJUcc$vo_m9?JVQ`A$yeezha8&*XkiRN@ahxLCz>HyeOCO)CMpsyVA0ZHp^$(?q zS!eR=0BJf+!-oK?d$Qkg6U2G+VjML9vaR<1(#)Qgv3qX=O3?H@*MDJ3;__#qu{7%O ziK*@t5g%CMto-3|u$_;&BmsGZty%J0H_g=eTfNFZ&kW?!<4gYOHsh}sP-Nu(z&r_ESv0fpF{(44VBU+cmuW+>M&6 z#+!=x`URc>|Qbx~oZZFM>$#=(Jt14UtXA$w(?od$RgaqS!s(^FB z{mlV_uO)mJR2?bC&mn*a5gRu9T}2zb^=E6I$FY3VS_Ln@M&ch)!d^4QF8H7uzS&^wCTI~tZ7ZR)l zdX<1m1H`J0&Ow)xIEGR7R{U?fwU6*AZrKrZVN~j7cehcVg`Z+C0%f0vQQusf514J> z8O{83ZxS97CH_0rZ~g^eTqC;!R6ei@&LU*(D4=l^3c!k}!7$8&sAH zo}9Zc(o)fveOU(Im{JpI;`m3_&ZCQE&8LZ0j3qs-m<{abl^$i~|S z*bX;x`jhYWngsw#yXpzlL zq)=?C7P5v0n|#{7;tRG>CT=r8b31J3C;$Wh9{2VwR{+q}zgirLEL9^seB;q!8}!?N zM0P>5({1}|2$+WAb26a)q`k`icW9Ou?x*T~gUEyTQ_KT3yjR34EKH)Ljvse&EUmZi z{;{LVpp^c=<;bQqm{xlItb1SU*%e(N4KkjTb~jk;01H574yu8I*sPZM%N}9P0M|U6 z8c16(wBdB2&{xu+ny@V4sa}wUh+!QS(=$*ueD%Ljs(oaMMnX*u^qoS5J&PE5^I<_( zmzq=CrCa&C@b(dBN-L^6kT5B_U}vGQt$Y6|**dUj-CKsGqVOF$O?|nFNgHIdQTIXZ z84{-*?Tk@hSPy?tWe(7dOg?nng9988!>Xo(c?f5#9Vi9>jpXk(;&#Y^Yh1uRb_R7t ziug@7w(@|vjraqm)l~4<`&0yl@i>B?(%vBopfU_-6XrGE8C))@;Q;*05s-{Z3okL{ z`auirc8_B6ywm7yD4NOksE+Plz2j0iHSXW}-VYZvxMY_9_e1u7*N)0=sOBz<+}se; zpiyRp4Ye)8qO+cpZd=n;omrkdGc#5AaxWj9X$9tpl^K{`23#1g+Y_aqx{vzvL~|xg zb1va&i63;H;M=GYYnSahzt`?=mys$%0QMf|SijA5rZP_X6s<*`-~EFT$L#Go>-VTn zk|}5!5|GV1piadnu2#J>8^-7)yM|3 z7E?(+EMDeP5QZf7p0~aEv~dkbr!(*_swXKcVJ^{ZYvOr<@1&`cOdvml^4DuTzFEP~ zwu#Xd20&c(W&NX1|CVHgLM$2%`2*-LUz8$i7uRwt5+vh6Vc=LC9Gw}Oi0rmJ|E>I) zoXG!h-EYBc5-2Hz{N@mnjQKd}1ie3qXt6EdOV>4ySp}AHHR;a+l*lHrSdUh^<4cIYQ4Qo`4ZBy=js>>WdinHyd~!Z*Xc50e`$?V-m^w zg={6_h)+3DLFWVAGt8b}8q%5mS&3dY=0;a$Zf{8}RKaf6sDVex zm*l3WXeOm`F!2{nQp0PEyUE5q(DO-4C6^zQ$`eCKlDTi^^I$gI*}Z5Vg)M{&({@4x zo#zQLBdD`f)<8=v6eV9}Muh2xZg}}1>gtP=J#-#*{z>HP%2xi0wW;}Z>21fcFhAo~ z>Z`Wl?Jg=FwbE;kAbY=f4bLiEH&nWK#B&ukD_eJX)XB{nu~r^HDwrejqM=cLGLsOk zD;^z>jyY{B6SE)YfDs0@YTT;O;M7mwif>8gkuPh!;Czih0(N|qeKN)<+M8d61BEEng25uTEHWN55;En>a=?!&bE3S`n1;1wn~h)(hwj8KvJ8#D{L@8 z=#xhMcqe3t=QK47B&nv7*ulM6^U7)mMkK$#Piit7aZ4dAW7DY-Si-!UDG`srl#JZK z+%VGY6$9-2y)x&8ly0pfqprM!OBzzOF0cBO0_hgne7o^_lO20A7`-6VeZ(y7qKTst z9_TP_0C&qYaX7-15oHc>bY0}4WAjtHJ&nL;vW8h))6v*$y>udXsnT}S0DS&>g@#pD zmJ<+K=ltqQC@M?-s2AimJ*1Oy)`DBS%-zj3D+&t$uIdUZu$;+6@du5~oFDBDto65# zE!PYiVhl&%`&+la(Q>Xgi<>@EhN z(jBfT(bNQ@|6dUnjzAshd~u?JFyx_L9`{tX?}+(ac$|@vdcX4%A|LsDMfr)CwTs;z zdxZ@SBrNVcMvkxN=T{8=pp8i@CI@eHs8c#SQi5R1UEd7Nb7Ev{HBT{hpuSeybi}c4 z=YB7ZP9zaEE8XR^y%WY?JS*{v4onn0?+w7N`TRe}iqP0$C!8kM4tWhXH`ZAq8T?W2zY|^A_SI%+ ztMk6i%bE znMU6~Wnt#(*BQ39+ zP?D2!pFmh+RF5wudd=0dsu^I2f?JRq3%@-jx%iO`9v~&IsBO&OD=qpLOlH0`IR@Fg zcs61u?uMnT7!34_M!VUV&;U23inD3Xya_g3naBYglyT;+a%cRV<-(&~$s1%|W($X_ zSEJ&#|1x7PsyfV58~fm zrJ1dEJUF-iesbl0s~$>hBULJZrnEX`*3#`;nU7F(fgxvzAL8|9vD=fmuKiaq`9;-G zr8Em`@k?Cl>Ar6}V_~b+Sv#78Rkj-SG3O;LtwT>gvvZ6FtiXnj=a1nwidnXJ=wX@K zXuGwOTJK{;i9=VbuX#$3(2N)C#=Eg|adFJ5FPRi$B+i*mbAX{#UEwyonPTKS@9S4K34K0<^e2u}AdG z#K64ls@d00vLKQO6}-zThm&H~&IyMea9KQJL1yGlgRUKY(9kV!MH_OKJ&BwX!8iZ6 z=`c%=Z!GwKJg$%hQTZ821X>3*V7fs7Ffe3eavcBw000010iNnaM}f%o^H3g<#`-nuWom;z5F~2*gN}Y$BZwv`X@J?u{W@FrSzbW% zw8|vI0)wd68WM<8d02N*aO@I~`2ER)KOgkM?q^Q#dx^-sgW-e!0__U`O#|_QhPo20 zg1@LH+7QTTYEnE3BP2MCpSq3ag}hNH<_G{jEn{Mu-`{WW{2a>gXByS-H2B3({hYNhR&SKv=}cI+CXn57_)CSpo}r;tu=g2?hb*b>}8vz=Cm z|L2>I#mtOeoA2Bs+_~ap6mCKt#{@YUr8=hp3C@)TNHNf6PwFjRm(?o2+9Wf&*M@gb z;a7Y+jxg=nj(6gKLuW^qq}K>UI-L@Al->WtizAq3IS_z27TI_Hi=Kx=?;7PS^~+%LXq{JYeTdQcc3@iv z-i9K-ecZUnvTF-&O5saKCX#1$z8Y3?O+F1@tdw6zJqE z_!I5_#J>=7}eml-3y=R~-=#W1&YzhI zg47mb!OUYHl5(j6>8zn*$KB6(<}aGM%e2PWxdtm>&Uaf0~Hv4GS0QZ9r_po zZGFBo&Y%oxr+nMp2m=0(oPX?sSq&|fgwA5>l1VH;31??zKyXANq$FM_P`)izD~okh$s$M_PWdVNcJEzYDte3v6*ks z-oCP|<-%>yOnkP(soYKV4YxF5lSvMrOnYjKxU1ysy#vuB_Tf%8o}oWb+Xki(SaR#* z9KbK6w;vVpbn0Z#GTbYclvu4S!ldo8$T7Hfx?BLmx8$P=5=-pD0Qu>IE53jAKA5m| zF4s_57o`O7_ApqLMvE{grr9a49HF{GMnDZnwWB@@pY1jEz?ICXQBBZDrr`mN;gks)G(k>}a-FiS4)?h*o# z(AN@RVb`_0(U6s(m(xsgE1UwsEGu^;zTG>SYart7Ja${k7iop`Iu3P!Lp=)l(Ag38 z6Jm|UBfsk~5~Fi^K^NvFV7Y>u*xA5ZZ)ms&h4`1cos~#6^Tn{^#W5WTEelGQmh-_N z86G&VLSw?nT$l;l9$^#|OVW&ry_WkNNg&C8zuU>V99N))ePPlv!lbW6v8czNR_MXzUcL>fMy7=QGiwNAvdScGv%Yu~7ii4ng>aeGb#)toeZ8}`+)GgBf@r)s zwS@9*U*Tqawlyi5EixCsuxs!r+!c(s9uI|jG|z0Xx)QQG#7u7G-!lBAJJp^=o`m9} zIxG}(!EwUtO1|yWWsa_(Yvr9p1v^p{jFAi!F;slmTS_#NHcK6+4Uk9Q$>k9E#mfqW z3Li-LZ;cJfL-I3uQe+mxPedf!P-$(_y4Rp+vsj|F8{+l+%gnlb&DuChSGy5K2?c#z zq^w!W4nW!kKA(NuRL(F?#<&iC@kH&fr^@?c0A*okwzL@UcJ{`?C`jsRGKa9r)>N|qZdyX+Cxff&^6z(GiHs?b}#v-B`slmxm1`JY9SE_`!#!6*}+Ns;9 zuXb`ohRisv@;J_$5IIA{T};_{aw8gB&9hy8A+dL!t(l}?B_XxXbmjyn`_ujxPf=ZrbPXN-p2z)JDiI1j>3i;5WH7CvCwcCKg<2&2(JMgb^pxs@aLE?j2zhgU&OK^x4PJ*?L;HVyXaKHc$dRy3Kh5f1AD4j!DQTX6UveB z{(g!nk-HYn3kE$1@9T_~`#IwxPJXdCzD9W!?hU^~1jLTWcG(@9k&#$ta%iosz!?p5?{jxF1&_oE$Y@pqU-h6Rjt3%A){OY_esBmGLVzxk!hF(UpY zH^h%X#3%_2!!W#7WV8W_263R~z3PNNyltFBorm8$ut3l1Y*uIFM2JmACA0_@Bk^i} z*jyspY$t<>dM&pcdF$erfSu1uG()e9j3yD?{-3dZ=eUcO-X-)!sfasW9lg zRCWHaL|_2b6x^5m;RF3v%QZ52N8-6Y-3Sw}rykOJ-VrjYaVE!1*Kd+k7HA5Rrr*6l zAbTleo0%Mp0a*tMHN|YXcK>{*ylE2J@Pef_@<)tur4rrNvzqHAbUmaIqk3-FC(w*& zKQjZwj?47^WkcgO)2UVLliL%(w=N&5fhZqsA32u1X*?SDMj2U zotxtrWgU)Y>`BKM8zy%Zr`DS_!fZr@o*f~%3jsb_2_!BT$aP+we`%0rL@=PmJjYj` zY7<)I)0PP19cV){#eb4Hm$L$Ja-JOE=PV_VHVSE=$n>X!c4y|}0x%G}fdr3S)AWvb z>oCn!`^L|&%=v?qD0HRJA--Bhn?ZR9i+;OA+}(w7fou$Rp-@Dj86UF7%|B3B`K)xZ zS~ZCAu0eu_)`W2DqbQcDDUjGaR$E&ih-xoVA@)YRvr~;;B0GmN4@t~9G?_+q;yTX& zM))!8img!H%sN8E4e)CE3Nb9P9`TZ3LnKi%2}MXz(%k$pzJejRFq`o0;2X0@P>m=CaoY;1<;v zR)DEL3{l?gRNp86pD2q_Hq|sT1l=7MYKC{|JPCp@ua+D^TJXVmx%t38+&x>Alt=M_ z|JLe;$;+BTM*-XQ6hDrC3J>cyHhW7+5jSq9b~@v5Lt-IT4BpXKye=$2f#HCAqTw== zLY0F|DO3Bj*T9de-zhv}>qR=$)xJAM!_qTF*>t43+tR3>;bZ+nl z421*->;YWpBIxVhK|aOw4Kh|pm+Y-zOv9%6i6c`z1g`$?@V>+hXVmw6 zgzY8g^xydQsmT~rh5_t!jw7D;yHNxm$e4 zPR2XbrlqDz{6dyCK!{o+8#*1d#L@XQ4KOD{pEqlRoWe7SKiPf9Vx)voDjp*;xi+6t zX(Wa_RrHh!GeqOq+l;z+zun|enw#b)v{kMXjK6#zMR{w&1(Cb&JXgdK^gdXKd6a@? zA)PL>&YmxFPv;5?pOWvSU-{Fy{8j8kEwF2&dqhAx&s&lzrfsoMroU81^2&)YE{xV} zIamrE&8!`eS-6|Sgh|RQyP`|wU<88k$VF8}S1XtcJ21$>JZ~Ptd0;ZP!Or1l6F09K z+pb|ef?N5=BC_Kn<4eN?VsLs}mrKq`;nwjZiD$f{_Qrnnv|@Go&>YsFsdCrfVp5$; zFvo(F`09DI&uR4u*SP7e8IzL5{h#WXWN~eOB%pT!s8=&or|>|pbL18sq^jFWfC_YR z|C+IAPhEJ{Dw$%+_)9uC;}jUDXEFXgfl}(}h*a-l;NG_P`dk?_51~3I`a}9{z4f@W zHsO?oQQpfOsUj0;G*v6h`GHQ_1CErv-c-fw>oH(P#xy9g zJkLVZ^mX-Pcd>m8|H)0H%(Jbxw`Nj2VD#Ym6mZk>S4Muc!Hf|(V&N`4?rV6N?A)c~ z)%h%C?y#Y-JlcsJqjuvLRSWDTQBgyD20yln87E4vNHTcjNg6l&d@AQ&oQ_h?Kvk73 zguo%ATc@*>S!KP)yyEu_4%%ltLoES^L*7d2V89dh2k1S;fk%IwJJ4dH=ogF^3ca#x z2lN6SHyCvXbmCBQy3X(R5 z%1vs~WYTJ6rKRxXI;)K)(EfKj$ zO5kX)=MOWwpJ}=Of6$pFNz6gtN>(LTG-4ZtWQ1NkE{caUjsQl$! z9%r^+d?hiz(a_jA?5CYU?ALyQqB-Ja-}w*ca4Du(iBw9&BNC$U?=FuG}lX*BAcl5Kj!?}2Fv zn{CXIg8W(bOqjD%Fbln$Q<-{S|IgrId+MH+mH`j#BtvrW;|*a`CU-*XVuIEP0Oov5 zN+r;tSwCB0@5*G{2;GzbrVAhnl6uINLSAmv-CVefil+qZr2m}|#oPx_ww~VF8Hu4u z6h#}2e7=L@fGJ$B zK_ zd-ohcB~~Z2cchIpk5>=#$E7)EpT9~s47$zFalto0J8%!!K&ANNJ#7%Ke0bALERC!~ z*Ivvkt)J*RRe0$j8YKCU!DD!SBM<^tGuFz}MQBp$UK+=WT@&xFgv)%CA1>vCE)^Tg zinr$F^+q&*V_)q{_EXYzCO!cwc>OzpTM_C7bf7?zKo|W5#*w2k2$FkD5Rh4JfBy72 z|KP03S5L#4u1ou5Rce*!B1iKdw!`+-Gz1;>XWIm#38E`1Qu`(2AaEyGgEMf5Z zfmaaEcK;OisBVLNJe(vAvu_6}rh^$K;$HJEKd<$VCWb6>>+17fsD+X0>N0KsY%R>n zTPXq27uFh8YACR_3AEYT0mvGRKZVxD6rs;Rdxn6(2T4~L-nSOov<;eV|9X?UXesQV z*Z{n=c8nE8uY>l<7f~thc3s-&p2N~|K1xq57_IEl`xg%HjHQ~2oRS!dj+XOu8ig3h z=AX8|1$8W3U}wmr59>1o$S{!PUD>X7!PTBUb| z!*-m1&DO!{ibK@)nr4?SN4TB1jHfMO2`-0}eo_T+fD!Gc{jvdd^)NxAEQaQ8H@zr{ znRPPF<q-qw4lV71kaJ z&OJVdu^rXfo=;isi#VumV2a%@z^ZO=N<{|GeNxuzm#@9+7SYhOGpl*UOx9)5UbB!X z6CZM2=2{%8e#1Rpo-jaYYd$wBOIlOobn`)mWTqJ_$`V}wI%SLBlC~0ZZ6re!X5P$m zGj&ZfFReb9ugP~z!~XB;!Ul1RE;UB7bwEh3(4flee57S}SJihV$n_YQBZV=lWx-A= zw+4+}(F)6v)l-!Ow-$hB3q$70?q+;fb36DSe1irk_6_iG5%G zC|ogzHp+5XnzOj`kO}x^Y~QCpdW2pF`({Iz07Lvm23Rods^Ov=kKr42e(t7Xi@r09 zRI$edt$9)iVWM0b;q#wmO2NVEvOaCNU5Vi&j!+&_k?ADCiTd{zjmw9d6@t8di0;#5 zB`d3vu2}digZ!Z6FUW5Ed_XD%d1wL&5_{WhkD%vyxx=Jkz>bC&!NDGGm@3NZ>(05l4BaQZwpD2C)8VY!X_WS9xlW-f}!!(wa)%6v}Ncz^d$j z`U%P0pdGI)>V@_%ilp7$k9V@WmvMT$1#dv|3bb!ZWC+^``LU-T3Qi1*VRQI@G%x_C zhxLop1ffw59jaz42W0~buPEmR2G+hx8WAe#KvMe$bC^i_5apww5in}ah_+|rwEsge zF?mvzrq{)c;@9XL0GcGO+{qs}aDwA}^BAA;edwRa5SkK}i z_33I#4mfB)k;_?q*_sXS{NKFoUC)h=W$<#%?oh%v^y`#XElGIK#N`M4Hlvf8VxM}R zBA+F2c8X7U&3k=^KLD`JKVr9J7(Qmr!50-Fc2s4z+(YdXaDB||7|;#;q<)jy^K;|Z zyK~eQXREQ3u)T00vt+>$`|e`LX=804vE1k1vsc*0$74^au^Z&_^^iP;jEdo2em;x(MX$?23p~WZLBIN475ipUA~{)dlLhR@hfK%& zx%mP4>cm|1;Z_{$rEc^fj=L3`eKe`>*go|?SV()9(WPsHoqiHGqj*^D!|B4oTr$RC zXoKY!ttQ4!0VDoJv@I`|#;Ie8?}`;9W`&vY8r#?B=2M1-k#L_b9Nv&GD4}X@THn&b z+sdvQ2*%FOB39&$s5+bhuR_7=uy1f0%{qQCiTJVoyf z^Mvdh7*W_Spq<8F4KR9$LwF`~FE9jt?*3jR63qRS9*U$oY8t{eu>kF8ys7D19VhOl zS89j$&WCCAp-bY?Q>-H6(HjA>$!2D-uft8sVQ}n*(!$-x)B3nw`ml}W<-Q196=k2= zr0s<{Gqcrr{iKMf1xqQJw`xJIP$U;R49mZ4;|M>gXEOtI!O5aw2BUcgc>`z5UHZd6 zDx6qNS7&RQDq6S}kfj7K243ha|s(*P7TRiB&^Er#%>9BX{WLPVVD zi8jjtKrWh+M)nyOYNVPS*qrLh)OP|^{N}~B%QrPN)(tLOxFkCJ@j3!qDH2K(m8yNS z;)=ENn{j0sZUjwXbjN+bsa(PH4TN#KM^VZ1M~X+bgDT|Q*a|JCOjj4-Q}G@zL5s_% zwN7|a5D>{K9ow8$%HyrCUFgY5qIlFNNXEW;B zA|_o96&~o~x>4@%HRw?vjh&^*`=aQ{9}XKC@&$4g&6Mr}&IC}T@rU#`?k_dJKVUZ$ zs8rLk0!DLK%p_W$*D#sItPr0?42N@%VkWtnQ1EfH&8T*c>z0(tcXb$5r8l-WMhP#0000000BXo>@=joUt*4- zZj`YY39)fcXC{(ZZ%Ps$w}Ou%L#yZ=eK)B_V-AJu6G*3OBNbSR62m|insqUe!Prn% zs`vV@1bT`>1BFzV!TlmE(}lxr2N;rk9_iyRG9g=e3eKhn04t~F9oea>v1+lpWuXoD z#?pQy3?@l+`D zI-QC16-!-4NLcV+GVLL^q+V@^zPdig#&(=b#U!r~BTc$tqU4kcmI_@if0yHUHP;WI z!0mzH0+JV;Y5NMjEcCRHZzd37(+AR0abhLay96te(T|eQ&tXDg2(@<{DdR{&Qs;3^ z#tmcm$b4VsT+njfF-6j0DO=O9t83cm?TJ9kVo1W@5h2UUNp?*p^$=57>JMP~Ju4oWN%;;t?TKpottOEGrly>5VqfZ`CaAVyKsAoJh=BV&1n6 z_esceV;|-75!Qw0Tct{a^DY~?P+67?&J(XjNIucA&+ktWY1@9`l-#T8Vcilv6?yb!K1T)Nh_&M`xE2=W!`P z6IIMy#H9LxUC*PZbSpEuZ;YcIKZPq5lnOo(Q5dG*Jhl$Ts`ME#y$c)Gx{f)zi6!cY-CRW5ShL{bF zwgc1)UTV#d){XL+G{w^|DgHN=VLKPL4Z#A8;I4*~C4WnuyvHM-*iWxy5$cDZFW6;h z|FHh)XW9ICj8-txe&6mQ8+QUwxyu%C-u&mxudbME&RrU|b++Gw3L?HCf&QgxxS3?B zPVBvx(EiH~1Xy^b@l9h;b@U^oIc`uUkDE;{S?As7iPv8ISNd!mqT3Wg%h^lb5lD<3 zZVAVTyvNVJY<*ZDsiUxJ*evgU8bYR|? zkGynz2*QI1-DCsjV|s~vH_JOI>@6e$EcXBDfZEKRg&Kx)ywXA#stXd^C+G z$;7X8dM9tYi1Gsmu6U0Ra(rIkzWHagnNppo0RA~zQ*#ea}`qX##hlo!rqQ>(yj82B0M5o*gItmI(1?O&b^9>m56f)9K^3^0t4y7|5=pOlQFLoWgC_b9=p| z+_fTQ^3rYWv+89m`r}ZwXn&i%pn8P8Gl8^EuZul2)sV>=Ke2!zB667@J_8YAF5}sA zR;iO@b(YHONi|_6N0^QXB6by~P0``_ZI%$(L3aRiir_=F6a z5E)Mmb&RHZXA~x*D95XrikGYgOGTUT+LL=O*4znP&cxB5&7kGgkI*z6#aEbp`FmV3 z#UH6I{-7a6+GmfRNa^^YAq<1`XZU|%e&uT%p(rc3>{q}$V}=pPNy^3qCX=!m&BIhK zx@QhB>KN_<1crA1srcAO>s+kI==e*hhDu$CQd@w6=T&06%4V7=$KUJpw&Ob)u z65Zeoc=GpxoG%9lg-mo9D5^n6h$E%x=?M@vh6= zkFKBdrp|FPTAkIM^6|;9=qhv`zvr=+U1OA{NF#tOTN<140dB_#S{}~7^$v3v&ruKc zi?%c?_C$^*sYlN@^6vnGy0XsW$xzMFWI=huO$8iXuadxvv=ad(hVYY?Fg$%`Mz;qj zHwJtM*OzyQK&GF((1rT-ov4*L163}QRYRRaC3USACXx`qHYx@rWpEi*ETu^Z`2uqsaFGb@6?#f9Y{Y&2gI#Dbo$vF7a+ zoI>nYr|(Oy)(|Q(sJe?qYnaR7ETK)auFcoEfzUXJveo}jPLe_EU2sDY z-j?ZWJpU?JqldI3Y}7DS%m!S+_sT3z2$x4%@C1xbcicTsZ>BT5^ytatuYJRN-=tB5 zQ-8n~_TzsKcQo5tH~iaMVJOePXc$H}N=2&$rW{0Gd8zU}ktnVAd3n27mJ1S1?f@t! zFdd0a95b2qbsnHKQC4*t8b4om(dTzIu{zDDUAp4S7 zny%F~3}fj}aX!Q;I9|!CKCj7u!QLbXhIt~#~SFKzIBLpBPdKF#+yUdkabf2E@PnzzB5(dsS8_ohKJ;3Daz*3Dk9pz5%e7S3 zZpKd(YcG5|{c@A&AfC~yzg&}+o?`vjLkbzmI_g3GXt^oS4c%({7Qskl0Rv7HpA$mS zS~C%V5H(us+L0HHzxCc{#bJGtK~oI>zH%NaV2*`?eO3^KK#M6j+J!!R#8-rX8GsT*|3Eo^7vHZ?v5Blwrh&jHi8Ioi8aJ%85+8 zP#i#(`W}U|ZbBF3*b9cfq^P>vK%W_I_=JlTS{-SZyAjwlhBy+`9xFOr>Y`L!O{3#e z>zm@&OW>5YZ6|YRitKT{-jg0cc7SdnAScUg5CGknBD#3(r`XbLExyfWjjhYehPm2l zy)>fZ5|(Lzu|6&6`AkzWHuLy+JOF#|7T%bY+$XBEQX?^UJxsnDzM#l6T5oPpD;xY` zMnpD1>d?ZNj|=!MOlsxs71YWpMhP{4zR_6agANIQ*TTJnvG=fdg#HM7^*{APHY z3F8Zh|2YL>k=OpaBh6@c;kf9VK%WiMa8FY}$e9Pu@E%Uln<>mn}5AH%QzW#49;MxH1#Qv;QbG-E3=YtOndlt2~}T1`*R=>fem9{0<55%-sy2avQY zo&P#DUj zDd;9aVc@WkfTK;!jnvvb8qL|Vlo|_eEAMvew*^jP)Yy6KPD3uO9yspH3V}kujt*x$ zU*tlmRPdyC{f49}Og@^Vb2z3%{87vBH3Fxq!(nNOXmo7HY}OM?b`zjB`Y03MFA)1= zpx#yEC5qDDt-d~6%K#>jG{>G#s}%IIf^_9mCYR3 z!pRVt46wg>$%BozP@iQWD!qi{Ov6sT_2MPxWrbuvc-^p$LcBA@T$4~>ktF22Urt#X zTnu7_`Yc6ZD?hDngXjYuSEK z@9kp#7V$vF+3QXsDKN@teWZL5W%g+S3*dtM*3dH1Ba7F06J#tP(h|F#s{z1o!oox) z1+<5ct`%>}aj|bIJ0;OMk;l_CsHSToI50A8;YrB%4E2ZUW1~HKr2Oe}m^s=fWG{w` zeHueQl@kcOzjXK(9*)nIG%AnB_y2YEyCL}RC9W;)oW~bemO=mk0|8C~j2Tv)EbL;SDSp|@d+=m^Nyg{Ttzt*kheV-y%Tx*HLGnC5Lq%TaVqFJNrBN{=?x#k%!6HZojC$fBRKL;Y1%pE-K=Fp}EL8JT8M3g3`ahy^ zKk;Uvr=BNfP&vV_!-g$C@U6{^-8N^)apG3f@^P}h=c4wyWeas4q|J@tN}SQf8y)XE z;ZPYDTgL#$z11eu1OS#6|MfjsOBa$+Yw-Cnkrcg{NY0eb`a?pIanPfrkfT;XHVL9s zOC$muh)O<{yG!V(Ibd{5woLh=p{4!DzK1*Fsh`9R!%ef7r)<_%&UMH8M#!e|RyHU1 zs7N>(ZBiF_Z%F#BFSQO}8Ku3ngylcB9`{{FhAUsORhs1VS1kg(g5l-CHVvw?nwxu9 zI$POoGbNpT`gE+YeFWC>(Ywr1nck#QL|HC(J+*+Kdz%w7)QI~fwH_^2Ve!4wK@j~$ z{j+L$+{;%$`uiNRP5v@nY^IR7;)HFA(p%l|aHvBwLzQhTmXQRpz5hLOHdUunlUP8+ z6gi32oTxmqJE#@T+;FjheunYA>@NK{Wt`B=T5uVVF($my%VuK9Jz>Kk5xFJjq)dtb zc^%JZ?`X6;k3sUg7-Yh>pOD||V`5b%-xDeW-w=X) zY6IYO|F&Iv0Xk>OJ@V%`3c5=m6uLm%jt5gFR*Co32Cn#J0e6#I*Xcl&SFd^ZGbZ6c&+4Wo43ogoWXUqdzZU3<+R8izVXI1j_q_Qf-CT|9sDbme~0V0CZUd=7hX$- zV|j3!KdiFjL#Iq$|HdJld?zw7e?Fo?nM9gE3Fxp{QMq(?jZvl=5*Kewrf91q9a%+0 z)AM$U?-2FYid%!zpO3NLquxQlSR%8O+o4H)9-G(+#Xr_M8`5bapj%PKl6V$QT0yMG za=B}-^UuQqZ=8vh6%1-^C^qu85i%yHy!QiYlbftvK(*82ijGy`Hn`deM7rAsM0Bf^ z4I?F8&pZx28yM#%y6G3woHJc^Sye@7>TUxUv(aS56Z*hg)VnhVk9kGC0gWD@IqFg$ zo4NGT>{B=6r9a=Qg_5aMcW3Lmx~PtL^gtd(&WIMI$BI*jM)MV|uxI|`RuSBHq(EGt ztfwv8*V?URSi54j?uhb_qH-o|)Ys1vJ)Yzwg zEEr+X_t$}tr)X3lnHSJBsGpq^BB}vK%db(ABcJ&;UI^!oQkO7|zMprj<^= z25_!BZO)(D;<8f*lqZt5&5DRwXt+3)z+O?V*-@=Ww64Q=!6EVOhgCL$QWe^zd zgB;ptO(FZBTM(x|c~aVH& z!O4nWfxeT@sZ4B=K)$W0j=3jiGv{(giQg8F^Oh7pBr)6B0b71>0GmFqiYA8So-+laPVO97>*q>b_|KzOCqhAJx8pRjUh0($W@WS497!*02JT5=efXoC@ug|LAsHR7S*z?TtMe ztbX2>Nl55aIA{%6S5xpy&+We3TYWjjNcNFSkP@0U;MkVI^-_>9yOxH&nmi0F?61Q! z|8&3sa9+U*0}xdBX71`bZm=+Ky`827-*WP&nG~+y|f1@#L&u1b+~O zU(gqapnu}4CvIXW5B3+JPCWe<*BUg$0|#BdmJ_Bm64FfM3Gm!?fY7Q>hlTOO+#6uBnXYaw`_<&NT3!YGnQ1C1iHOb_^z4@J_H(aY(wh09S4C_p}l0zrMhn4_G9@WoJ{kVV@a+fmoF zV&{5DM}AqW81>s1TSbXc1~0ny_b3yPt}!nCYzLESAo7Mud^qZp2;l&ms_4Y62 zL!pZWP)n{H43djgFzK@{;IJ)$biGXGVp1SeP*F}Q6V?A5p565F+6INe3_Box)X%y0ZMQuAtUhkRA-&=KGO@$>hmamG^2sCFshUTRZ9`6$m*Zv=|znBUDWiL^y9)e;Z9u%am%@)B|tMcZq$;$U~idmO+VQP>8GvxHYRMn3jSm zRq4{yEj~qkA@aU=p!1qi?rH>Um|%Q)BI?O29Ft61?B3R0vr&`(_97&o2VzPqeRzLk zOB3$Y+^VktQ&)N4IWm?7TU}a&=5&HCSMU0X^Wsna0^`JC8ylbJ7in&-m)C^cq7=O; zVMup9LM#c%6Dj)snIRd!rd|aw6xgoRRb+DNWE(U)6YT&GXv0e&i?=OO=CO)Y!C6X? z!G|K`(-0Nosxegsd-*d`##!5ZhhL)ST96IyRt#-qpe-w9U^|4Az&Q`nJ&hUH4Q4q? za4esV-47GioZ$X{KuPNbTBgpmnYsd=`nB}bj0fKi7_LLw%J2bk9UgG@0!%dqj4?Hl zZHN9>$-CWq<2U2Kqxxv-`D59Ey8EALa@9!K~&zHK6*vQJ=`= zUXl`j$C~aF?&}+H{VQEBXRBY$(C?+}Eork~?UqOqg|kf0S@rhxQSn3Zd)YHGWvif3 zJlS@fd9cE9vPvc9f%RCx@wlq=05qO4WfP*>(@=y(t7AOl%d)qyX}M3?W1x8R8wtRu zy3yU=2s5|0>+=cm*GjgyrgErPLa>e?`X+^IHeD7(Rxo#a^C7yPAO}RY+oTz4;|hZ- zFw4m9Q$%Ln(hq`2Ki^c2ZnzR3oQeXv^xxcGh2*+ikyP9L5a-s}k{dbC|#WW#PRrwIT#y z!y}Vs61>SYBj53pJWom@v@J1VAcqA?e|~(EsC@Tt3l| zHJqgiliv8k4qlemic8+m%866VvcX)SFQNaXROcVU?S2TC&|_|5HTfBy54gv!@RLpn z-vEa%x(x<~FPiCOHp+Ce7dwNu5JM`N`UG}&|qFu7&CIyI_tXc1lD2v`6EtcQ^|wGA%K?C)SFWAz;q ze@w~k(1vl$%C}&hsQaPajpif*f_(ccn%W9Mvnk_n!ZS9u&Q07pal!xdhk~Fj zRd8V_WG|(W9x{^O%9fe7$B}t1bHCF_4{Rln|0)PoY4`~xNGoLsp*N-0>0ZV%Uht4ygpvaY5p!s67NYgBjF>*JL?mI*HBV z(%*gkth^>VMM4S+J1jAhSZKG!-x>=fr7(}Ff!ZS7pi`LSie29cDYZ84z|oaPrP6or zbdp?fJY{T}QCH9XGj@GLGi&vgtYK8a?U&bBweWO~D?*>xSu|Qzo`{E;A2wWZ0Xn%|2)hFO?N&WOKqks)+dT(%qCc7!^1mS_gD0z|BWP>A>zzJ@h z(n2&sTH6LY$?Y3neD3AB^Vach*F*_RP%G_0rN&Vs0Do2aY(%CnM4vTi0YehZ6HsyM zDx8Ai_4Ba4w;t8zi~RyFQdla-g-5&$tSK+;9GEibYK0U&T(@WWFtI`TY@XTapQdB) z@PPumb+2$Y<>MUT2US;c@#ma{z8|M-oRow|Mx+#zII;VjWWB0TA!Q_93$(xKKh9Zb zlu+dmu+=?-adgihKSAZ+_d@cYo>_XnlGJ99<@}r?EIN1k0BXRLk{F49`~n9Stb3YR zg#Z|Q$F&K2ZKM{cA~ zy%iocF|}1pt%Tp?p=#7a9%%#3O(Oe?@ywNTw2h(!sX^qT9Ca)K>AxK02T@wyTL%svT1_|D zh3CDMM7OLdfDOTsZ6np+b6vVjE&U&+=}S>=)>v=#Wg!EV!~+SLZLB+}^tPloHQq@* zTV}jGTPkg2Uy!A3?DlDBXCmJ$v@)KPO-6EHojU^n9)3=IEELWu*Q0jWVgKFAuyB`< z1xR8?Vlep^9gaN;eyC~^g8JmPqJ8Pn{OI}E+;Iy)P|7lotWjwmg1$}YC+|6N5!pl_ z@td~9^$g8m?D;aAJ%ax%EF0l``yyfA;YYAjnxpMhe>ruNvg-NXR14A2+}Raq%SK+53|E>rhmA8g6S}sO~eGgSVo|&2N(S`Kx_Nm z$23rAf3D&jx{5e4lA!o+fw4BL5UOw-!K-SS47*#Y2+YOy`y@IS$^F8A$ot2ny91kd z)yO_trnJ^^nNDUhPa<5^umQlC)Bm@XE%-xduc%wSXooHvCYbGxN<_9mESgoVfVsqm z`0xr9SfXYcb{Mb8s89zgyU{5>N~B&#WFx8#ytwAKk=C1K{Fl2Q#OeyflZtXJ4)^Ao zXV$5AybMC$kpe2}RB}X}-pNJ&mQPJX614gPZNZ6qA_tLhOYf0O#PmGYJT{iJlS0(TjGnME75jmomA1$c!Bwg02`J7#N3^NV zlLCJt$YlgQ4NtE|c@$#sacV=ChU^6GsWxhZ(|O_2L}-4bKTE&xoe!O;OWKfm*^0w} zj9&~uzQ6ugtRXpcCFIbohS?KsJf1NAgh;qWY1epIrP`G~z=2N6mFIzA5`#{V?>`YP z;~xr1LwP;1f)t4bH!ua%_O~3nP8?vHwq$iQ;6EAYkXGu{X&nI9L-uV=uN^>`x#P(3 zS#&YiZhQTgs_SKHJ0#xk%~cfj1X5V{;ENH|^bz!2Yb`C4rZ)PE6_>VnDgd4n^vbaL z3~iQpZ`p<6NZ2m3Yn6IMgNL3>?{_)sg1n0N<+E`W<{Ux~-CR$xJxv`d?ZMqvJeSo~Obn>U5xU(5Zn@U18AA)(GWeqhNN&f`Vx&8W}!KT|1}k zK-_AJvA|7P3o;c#P`uW#M^x2)qvq;xy$zj-ffd#|iDiwU!e{GWa%Xu3qBpgYOD1B^ z(}L(NFtxst!{W>1)skU3Am3o}MecxxQFtFdUbCn-;50vUi0wy?!35OxX2d`}oSBDx zaE&-Dk3WFffB9By1pB14I{6yQNr={?L_?d$dkZtH$BfRu(q~Go|G`z;Q?@QnbQR*Q zbUwHiP93zOTZ!EX)wF=XY~(|k4y@xQ_}W(A=-``kHo3mfnrc(m$?q2{HrAoDV%jn> z%y(hT{3d_A$>&zpr$aowlejB{C>4O_zOIO$rnCQ37z?#vjeCNzGUSsvbU~Oe31nTk zgSW(R+XUhFW-~(x4z03q$f+jxB&x}8`UT%MKt>nh(imuxu92zZcs(0#k!@MBb?1B`eQh7)p)Y5+jw-UEI*TY zyv(i_-Dj&Wz$lm&=Tb%NQckvAvnrueC#!5*cH?h^(inoI7t_84G{pV(aT$8~SH-NT z67ey{ai!^5z5mPm{lVjf**Omae@jx9gFt z$hg!vqCOw``nzNN2?yMrC7LYp3YgpaRs=s7r{iPAvb>!yJ9tF;DjuZ-gVGHv2bpdQ zB{m3gnEYv5Sf-=VEV&dUsJzX4@RMg{KZqUI*=VYVCN2?iG2d0WXeGDMrzT#!N-qZA z-i<^&|9@%;jV73;mU#FfRL$Y;o!v&OrbK{yjlhfEd`h~2E1olL8+tvKPER9x`#A^H zjC?%N@;RwJm4M7MU*3cduVmtMSTSa5WLXbs`d#^#GB%gZ3fQsU-y9z~0oD>gDPDUt zpTYur=1fd@gaAD_@8&7<>@QI?eNXjQW;Ky}f>Cs`%;a)=4d6Gb%lF0C0$lT_pNiLf zhg;YeEymo?3ETEjxq=ATCyS|x0VsSUyP3Y!+@X66xog+`dgM5b5GI09ZWgs$6-0}r zF<0*a1+}L?Zq=Ei<+hX?kbF`yz-zcgXhcJ4xHAy`w2izygtlh2GdKXwneY_u!3mRa zZ3mh~AB<^HWNsHIBY9X7s12Q|T2!0{L5-JV5PL=^XM`+@30N3KmR1}Hi}gVotZzKlJv@{O@5*@3QMKb=lh0mlyeq&9`d>?R6Qqt35Gf^v{r`I}BtY<0; zA7Fve!E1#0{4-WIoGVe}HQeWHe;;ysog>DSCh5=Rm7X-WD2b|Xb)#h7i2DR&<-5+D ze$C4N8I&53_vP_zz$4iqm(>)9gsOdGa*z!G4<+1NP&{BGpMNjyp1xTulLkvc zXuiHPwgjcglOdY%e9J#2bd&-*C|Fi@e-Ld=(~+7(ZtX{F)8P;mtpsiKb$sD$7Gu03hS zBml#ymRW2s-OywV>uN_xRP6SMa298O*AfhzYPY;~p)M&cZ2qX3iridR!3VChmQ_>3 z8waSp^%}}Mczd}i4lEL4Oh?leb0djVK1HA#Z+g(ehEDdjl)Kgf8&(9h+mii?53vUF z_5-?z?cUSiOyMMveg<`{q5wU^)2|~w5ImZ@poaDsHmMole!qn?dd>%-5se%L)8)|# zG(ks3vQr4{LrpcQ_|j7>08n8gs4@jV^X}r;6x2fm)L&>c!be`(lXDq}*70Kt2vpXe z(AjiK8&=t_*8m}=6G{9A#X&7nCsJe0GcJmAa%9MqlckcI!C z?oufR#)&_AbfV7p{i7+^jWn{h`GBbWfz=r7X^m9j%wX<+=`Ei zvz4L%*=MpEPua&d2y*Qp@DIg>tVjE$KWH+zA+0jp+f1bs7LIu@SDtC0q;NZi=NI|) zOH_>H@P{k=BiYcUjaJ==Co;n1Xv@_2!u4kY;$(*@Uc-ccyb8iuU7GUnB!es05V#}H zCk0t|yxE-5Hwf=EJ*SABuet3uGeHEA^kl48d4=PZfocP?%{Al6Jmr#9VPV81cH8uU zebUGbUVAK@lVY7-r-+e!7cf3&kY0<$)lUj&H1vZDP*Zn+x8oSdS7Fqg=KF!A0$r0G z6V1ZqI}qB!{cHK{WKwTNb+VUN!v$qP#sR&FOFcy5ZYUp;VdBf;D4fRBW)|NoVcxy< z&{F~0w|;hcjcooMND+Rxios@!1yASWuI!9z)uDS}to`i&k*kf7cpdqaES;|fa6Ot0 zxtuEid7|dj30H=m_C~n!y*U@r!q+56Ye$5mCu-$SVSf9J+#5GXB*s$a;8y?0u%-5IGlR7|UK-O3cM>{^e5si?X_RT)3A;Zo2S4ad@r^rVdwcrz<#z#rkR+!Bmck z5!(DT2j-_i=5}Q$+Y8a_M-+}&-w;?q$W4C<0Tn=h4@Y3fdAs4Zc|?74E9ymB@}Wy5nO)-fvE%uN zuaL5XEW9ykg1f}N93u}B#9L}{N&7d_`&r*B0y6t5D<7@U-zOw%Fc3G)eh|@7Cby0olS%@4|fAovET+mFYPwMcHi+sRT%9g$E2$TL60D|(PifjN09?HHjstNZeRrcA6_hGL zwA-3<+;8>%6oAZVqF-Z{=EJ$D2&e_NXrFX}K%tYBvo^VX4z2=(DW68IjxCF=yScq^ zhn)4yy`lmxmzlF&kvS^k?HbsjH;TB)yMX!bkI(G`Q2-H=>vU-nX#$Qp5}W|clPtu; z#s9v0v@P5W(8`$sGb{oFjiEsiJmm?Ow%UOQskadla`sObv?wuB} zKe>_RMF+pFi;dm;Eber!8$soQHd&IL3ZUO~Zpx^6^W?LAm%uJnY-;6W?qkH`C;f>EffP z39B{hHKXHlK^yD;&{RAXqB-}FW}j{dfza!OA>{Q(C2$TSQbE*~^6?IsP87XlCOt>- zIe@s6&@fDwFR*=&)X;>ymIR3|v!Un3G3+f=D-Gi6NPK&S9vPgXINfhp2 zK_IztQE>L9)@d-px%wUr_zAW7alia+|3es4Lf;az+%Gi!D{#X!Cm~pPVVs>dJq@oS zfja+WUSvE5huD7YT7bQ8v_&ry8aDz$lwQ`t-WU6_K3OTU!IZLy!mK~Pk-PyC6})!b z{I;UIf`TA>I!%1WO64s3PV1O}0z2_`u_g&Iv;QdlnnL!@>?$^wEGBjr8UDd9umlv=WA+iBQ@ zPFYs5%er^IG#--PjxQh097v4w4SqgTQnT5KN#CTRb=2h!Oi(jN0atnJ<>Vuca$O&! zjvX7WjgF#WmAYdx;LW+d;Qpz3v76yP!o_fei0|=76x9l_g=(L>bb%V&u7KUX#&Plj zYm_<}AQjc=An*2a@(ZNh?{0Y?kLXIHob$ecw*E81`PCuAu-nu3S6IBMTaE+TJ-wt@ zQ5t$CV%K)L*QRqPwq@tSkrT#bsl^F)LRVt-`5Ap##XYH1Z8e-4cSy^!c6vdvM&F=U zK|Tq*kntcO<|Y^$_VR~f(qgc`6$o0SGZmt6v` zC@j7gy3Vm#LiG6S;JiTEw|Bc#3o|o)dc8w`v~A?iey_y|wHo9X;`-VV1|be!n^wRKmZogia0d1<;ozgiTDpY#+z%je+|w%Y!>ouYELaW94Y`ppP5(jY=}^D9L#NTjrR z6ed*Gl6<~>c|fN3Be{MfYc)T}jAn3QK}j2T;p|amH6HS{!R-Rx!&XHfCCl$kE`u&MUH{mbVWA1)R?o*2Y}YDCQBWRPVPVlhJ$w5Enx+=|xvl~94&_d5lg=SF z?!U?(#2El0m~$d{?FX-tbfJIW^q3Ye<<;#X1BX)>< zsN;|^J<{rKH6n|z|5)iSGNEB_2%Q5l$e#<(;*-u=x2bg%20r}4k8St*u@LGyc}3HK z1K(H|!-@q$Ytl%|g^ZdiWFc}5><2I5dG;&P<1T$1lr z0!vJ#QVo5q_Y;YFj08eMM(kNxh6@<#Ju{%=PJ9R0e9rziM#JN+)_<_M?{^n?2zd95 zx1V^XhU9yw0_&Hubv{-J&*QoY1V3VDe9~q)n5$=!tnsr(t|nnqCnTSWS1oJ=EDJk8 zp~!OaJ^d7*_+Q)D+KUTu(s4eF#+bx1-gwFCsQZS3{M;gc*Ff7YqA>6CTgp#8Gj1be zEY6+??T-3`^H5a!vuV%xsYp_(XY!HuBklP;=6fl86#J&M2Gny{lU zrsZBXWLSE2`WB>r7XgAx@=K!Sg0Uw%5?wPZiJ*686EIj*B+?hyI_ZyvdZCP`l&;}C zDbCk_x+T-}+W_!{UsNZD$z&#|B><6D&jy}y&MT|<j$JBD0KOqKOc{}U z;&N3F*ATU+G5mR)^l_>Hur6zWtxAu zmM#4$HaYjXn=}3}-?)?uc(nxkmt0|3n%8okb8Owj2g9Z3o3l;UZpA8$-F) zW8AfGZ4{p@D66u9j>^$=fdpfwh*2O>TKSF&pFX0nb84)^QcCNF_EjtFLL)!N;;@pr z(hB=_NQK7tn$FxR1Yz#lFaUF$m!cAe{6OrMx?YhsJFa_eb}uoL&Hk9UQhFnIn)GmC z+Pa5|wvI}I`IrFOj4Kw8Ifb1FC$x#83{76HSJvGej5+=~a8M?@D{sewF$_8g zk0eX;Ngw~Cr=La#DAj|reb)gk0)O{B>AK%9+htO`R8c6J$8N4?1R&Rn0_xEg^rS{z64I_? zphz~h=Aq@A3qL-usQd2wIAju;wouaxQR(kzCzUQ2&_yOlUl<<-wP-6o`M|mB7QAr| zP)eU~Nl`%_v7=P?;ORrxmQ2;uT2D;oso0&B!GPDSw`q?bD1=FM;0x%zb9!$%I&-+n zz~NlZ4-Qt4>sBl9TTk-jaDBUv^iFx>H_pbpqrbJN&7nR#xhoMKT)SS+xQFGogCHV4 zgv8A>$vD2>re~SuK5`cD?z!P#XM|G5bvqSdiCh}mpkOi-kEwkLL;8{f@9_t;gK0_E;`#V5+=-Zv%vQ`5zc$!%*A{k~q)IyF?EA^IM5x4iV7I1#ZD47oxNs14U&wDrHTd7EgFah1e zl}=d);!MLUgoqr)(@(XtJst}dNw77QIr_*_Hn6=$=FTw}vU>jh89nxj-E&Tp{wpjp zM&?Z^AO(~nVuc>0Te`qJ6yig?f+|~C3D)x2n${rB9HQ|4RwLw%u=;QEd6IH{>q;lFy#dYAeRvEzX17W_2$a2oEwZ`0+k7 zAyy$wC?ygYa)2DJFV{q`T|mL}L04L;4`wBKK>@7YLQ|}E|1LDkua1L27;B~TZdp75 zW?m8*5AjlofoOt*U5wXalTc^)+m#Bf+qyu3*X~4_I5gqksb4C-on^%{f1>arBnYGQ~S zxbQxNR0WK7FbeKL-(uQW;+ zVy^J2$gFovh9+>1M?-1XTQmhST`%R@e{+|{hJ?1$N(!X~Z~CEH1Z?X}Yk@cb($P^5 zn;YjJy)Ua(Vs8?#7!5>m4hCHG}dI7WW0K?XPdxRQ#vpaRt&>ACnTFk z<-bs64H7_8rO#LJS`#h{0g7jb@+ZVTn-yw^WSN9Fulj#VlaE{%_Ncp@zULg)R@_O< z%h!JwAS?WkG-j@*n*NS>Izcm?JNIjLo~+*>@o5AQAwZHMyQv7|7&KzIlz5su=6Ix) z%Unk@#W9Cgh4fY#(vg-K>k2VsiTpt5ud-@(zy4&{<(}4{T?^$8j?;uoWY}|?h~%ty z`Y7zqH+28i!1YtmH8jXy!cqpUgJ+fqkmag89lWYLCA2U|A$vUP&~ZFq7B>bk&2u@4 z_3_3W!85D|Ug*6jCl>o2^+t%ETdxAUPFro>=a&ErH9teAC4H@ zAvt{&@%xmiQw#$NXud>fr;>pquPrVL`PGHQG+PRdZ*miwNOXE7$ z(W3{$6Veid=!dfmfz3l`2C=%QCm?5?rLmv{qW2JGP%U10fcj2FO2)1X|MPRZRY+BQ8Vq8vGN;_Ypa&N?uR}!Y_gEu zkrPo!FX#Re!&^Jk4f;1HqazbuXFFgXJ=J; z@6qTJN|264vjEgAu5)Ft3jtozzj58rUkTTks5Z1{{9ddxIvT9VEiU+Q_q%;bKJd|; z8sHnGlFL5;bn)=nxwxR*3>M(okHLCF#pX}29qVRnjTLZP;zy;4zN_z$6)q72p(r`E)z;+@OuJ!3!JRe0r88X`70e0Opnpt=k4RWU5bEN$-RDn z@nxZb5>f72vpDHAe1WzWbs;qtOHXprX!Ka%U6MV!IW=P=#;19l_9fP0ABz97^p~DH zy>B1Zv0_rqUi_L?#v|PGT7wRvQFgEdLUWWHEx!!ov8Ybg>80G6iMjoe@hosauGswaMxOS_b)58+64^~nMgGj2^_k4uGB(qbiE^^>)ah8 z#JTYW@>_qG$E2nd17`K6Bi4PP4Z3Cp0aPnQwcnq$sdltK4k=GpA94;LmJvU0xvgIm z=Fqi$k6tMYmfB-5h_|w~b5Pgj>v6Y;Wd{;O@a+gwY(AyHT_tinqTUH-z`~2RVAZ6- zeDl?|+ee_cjSV}&M5nKe(h!JF?`?z`nS((ZgX@PoaShQ)IW(Aa`s12GEr|BfKsYmP zjxoS#&+vN>nKoQ^XLfq~X?OA1L%Y((VbRT_=U(*rJGJosH6k3X@{?KBT!Ae?zC4+W zeVwj_o9v})g!+5PH6pqgiMsJev?n4Z@56}RTO)K55GV@-!VN`xFLaEPt(-TB<@IIq zw@`lv?1?wNTxqJM;jO_ZBkn@j)PC{hnB%mBYucnogxp_ zQW^tB*h&ZRr6cNR+%{ zA{#%Y?)WqPy|$)Z!w7NTrPrEugyw2OHReW|KK)@wR`{PY-tr3L{x19uc}HC6oM;BC z-REHnLEb7HXoM*a%>;+a6}b|zBxIl_R@BW|vc=N-z4T#>Ji{3JW|$wqc-wfT;-9zz z|3f#@h_t%rsrpfvUyX?Qv={Rzpp13qlIV(D1nF2%dn_bAWirbY5%W`e{z2@hPM@lE z4b-*-a0#(v%_Z8iCj=8TCo43;;Hwx~58t<1TK7|Md7w95x4WS@Q@2O?^faeQC+0@# zd=r%xf>l;n+GqBS!lBkTjW-DWnv^-fg%%PbHEQ|S)wsh8QAV1?qjxW&DDY{C8jUCl z@{l>WoRuiy^5jF-dY_8Vs7LQ?&e?dF!%eT70w8CODp?6G6T-xDkYb$dy%G9%-KgKM z@1oT5mVE;?4WSIAHCB00^TbugM7b`2u9D37+tgIB{=pzj^d(=|=2ka^l8d#q#Q*n4 zgSW}1lOwBTR;D2Hnl#N+Uw85vW|e(HUcOe=yjY0e^-X8N1hdU7$DbkG_7qM{E|g-2e1Zi_gZ+?YWVA@$j^l zCMl#AU7rpV%?OV@@_ki!J`0qRCxruUA`k-4*klz*3Lff$x_PDfqiujR}%N@Hk zae^=U%<5BZnScl!QY?s&yJapg9&NZC{=>v9N-eV3!)50etNLE16jkxfpe{qln4A6% zDuT9jM+@Fse7K)A#!^ado zrSk8_>^w|azGo-m$*EU^myP=u1uY4piL>uUMJcc*+DW{+hBOa;nPX_(W?r*YDG`}O z%!OdFG|>ITl7Bq}QCYFeZKtAdhR_h@ou#DFf?SV|9e2HbrnE>R^icF{5#j2NuUl|M4fVOz{Wec{iRZJvQ+GG~ik_6x-Ty zLHMm{6pAN~lcUmf*TpRJL!c_5yI$r|<#-Ze9hLpKsb|$?xGa#tNQt24W_}?1am&5C zJ6&Rr#`F}s#+Px*0UST^(dGTB6Xa2E(uNG#1)?IJ!SjlkJMBVDOl!~IVoJk0FIKs%i_qP1{(`GZgt89hi4aTuSlPjJ)!8fO>s$lQr95pp?q!d_e!@+T zLB0g`^TcxP&_Iq}CsY1TH2@g)y_BBP24PU76<42l6!#l8i1WC&%G zKtPmI_6bpZ4t&a7L*V;?&ck^{gnc*WQuvx7C*be>*CK^47Ls1i2^MZm?k3!i`AL2J z7|H`VUBylNLY=eG>6DO0SBGgna`TA^IjDu_%Z_lm&RQK=IC=O^ixU{s5AcLt0S-lN z-*}H~LUWRa`%Cg$UZTnQxSBL)D-Ku;Cm|aA61IBW$@6Ubqy2uhO94#c5KUQ^s{Uk= zsc$fQyN#Y02ZB)fp%BXUyp8BfqwN@9XI<+pPL}&fk@}OHf1wO;`I1*?XpWN5my>kN z;G_lD8o!L!kidM$#<6c4BohOF5fSWk8jkpSQbsuQR{1Mz<9!&Hw=EwFND${2$zL5q z6`qUBITwo&nkucumIK36Fqbae$x4|dlRqARNKM=IHeP!0woeFpZtH~(q>J?PbHu5P z;WHU>Kz*3eqn6b??;k%=6jSN~lt3@7bYKE>Z_HI>NtOps#UG;5Z@ilko#%5M%=tW`_;T2?XC{nVb z+B&y+!o}liq0;bW)eX`0;S5X7HB+IH%vwHqw2q7~D?4jII)CLOsGmRX!8d+H2W3dqrzloyZvgAF~E!UpC z$QAw@+rIm6r9YW9!M7&782N@s3B>uuUytkl!Qj`NxZ$4py&IqSm)$F>o!;~|TqGYp zxQ)Mx)_YtQin-W&mh1jQIw;J|as$YPJ~c>4?2vG9{|x6eD_fny`aL5p_uA!^P*u_x z3t1m>Nnn$zLBeJ3@lj{yOkGDyi>DxM;OA$1`1VxCQnKJ?%mR8g7^xeY_g>hdHHy%V zPF5>aSfU{?tkWQBVxyBAx}uE6Bo+K^>TKEP=cFM-fVJ^GXOwjYZ7$9!ELI5cAeDiW&!X-83}b)hikTjD<Pu+-ya?WdmT4=+cQr} zo(0Y8GWJkcI`~g{YdHg20_wey&CmhqJ;7h|e(oI=OhqnS&dW_MoIqs!*O<2gds^Nri| z6 zIVX@`&o(j20iKKGr_h4j3rCv_+uiY2fTL%x(Pc{uHx?C)1ag=;r{ z^VMZ!nROh<8T+4I^nUzLm)6N^%_%8VvBb?y4wBx&8g?2hM-HI}zG7csv{e5E!I4)6c3 zcgQgJzZyQbL&GL<%IB5%hp_h6;?Zp3D*K}#YFNV^N?5vRmILeUX}!0d&PhTmT;a0d zc@w)eXRub@9-2Ut>TBDP(FLIY#x$XLD z)1gmF5vT&jFkoScPYu_ZQs%{6ObV-dqE4X z04G@8(nk2f7T}9e#g?9Ob+A{x~Hj5^!lWpO$&6 zU^VsFQ*_Eo%0@Nosk`y35?MN-)gP-zUBO{-gW6)vPUAzqX0ZKA+1ELdl830EI6E5# zjScq0uI^qzxwij~S$Ph^#Mwe1YJF~MqYIQ^ewH_o-}6{}X4zK7fPuub=HY^<57`W! zouD-_g(12!U?gU=6I<5`q7ow+&LixIp{-Ot#==;o z74U{0A%!@u3(WxE&D8>UfqMmWs9Szc8|>Uvg$)_~{y|c-0n3jZ*X9{>g>Mw&Ac;Vy zU#fk2lV7AWtdn4k!iY4R-U8=8d9lk2L1Y189_xK{gxcjCAK3_}soJ}|0TtcY6^6J3 zFZ-LF78uU$Xy6<^;Tidiha0|_X4gHncCFL4nT@AArACNm=uVUqDerORi6QA^Sb=e{ zoD*eZaJpFSXsa0#?%)Z4E=>ZCor-PC_*tlpTj2&&y-&BI33lr1*CMg#_+5w6VrDXr} zBPG6OsCC)GRoSRrq2k}?!eVq?2+8mGBDBjzjtfqRfa}D;7EolN3D5YANX-uSTs|~V z9M>l$@)!Az2sXb*lo}2zKKQ$;7#U~6nKyL?KA3!nO9+^Rnj91Lm@KyuF;~79I1n}| zG~m-R`~r#*?~yy0Zr>i249crayNaQl@Gx^$-bt#(^lY|-2ngDTajRlNuXA;{j51$r4R z6_`bFci2nnMT2Icv?O>;Wl#G5V1{+BFYC=)?U&+r-Bp*dR1qjNouOGQCr3 z5$)}{PCESKoPNG@lZxwpF3OYDYh7f)gWfk!`TrllBY=Qh%lcY@C1K zhs^2X5O|9H7i)fd>J82efddcv33)OLWR9m?2CHEw-sO@)2>4i!!CSs!2ntyz5>)09 zu8B=teEdsGSeVOq@*2^q=Y*$dN_yvmhzmOVcWpa;YH~>Rf@Z9M7X-I~;~4%v7F2`+ z-C~M_FZh8xE<|v9HEMGeJql?h$)FQ+IDVQL1cstSL_7BOE3QttOuF@^Ypq(hu<({c zL->mYNl)EQu3p!FS?U&uCflghh}Y;bO6^?i$bN z`|lK&xS%;dcpTcMTKWj+%I{ty7%r|3ZB);O^aas)({_3(o$hLpb1bLhe7+-EO`TD7qlBd)_iPM~Y1~BV$lO4x ze`#`H@v?inR(IN*lhuE(iNLzVD~A=&Dny&(CKP)aE{q8A7D%Pb3wwryOMN;3ZgR2fw|EWja+Pj7-=y zi*-mR_gaZSqL6R|!P(m81?**kJ!Iz_h%>ap8CdI<)#&Y=AhGCN2Y#)t`lK93taM29 zy6S~6cIU|$@TTa5kcSTY*SwPsA)}q(lPf@3F=69CskcEWy|Au|dBG(sC4-% z(_6&`3G1hV!YpjAG{3yL05|_6v4-7akxL{wzfWS1t~6ojt!E_|bJR>0mfN7W!Nm`c ze9~u!Gt(Il`)aN7V0VtRrt>u?&+TX-eu4y~6as`+n$okUwTi-NvFr4;pK65oHRy18 zSm_RhCQ82*h>C*X?N&3|nwt%T@agx6sR;_{?;@vpMRx;mr3i$5d*{qVLC&TQu{~Gy zn!KO3n>xxZ@ztrHgWG{lH%PAQ=Ky8YWa61T=rQ<;xm>!5#W;P8km^Am*%64-xK?;TLqkaAESCq#r18tG_ z>$w=)#~Msq|8IeU)%RfaFVEPL)nvkP)qzB6apC)->b*%uVy_kjdCn0#w|t}ksOC5j z3_{60Au(eyTuZ4W*hAIYZ#=~u6M0y-NWOwx?ZQ#=DQ2lq1S1+hGf>JBfxljjEL^`ncpA*Ozj2~ zQBL1?Fefi$sJ|2}#UGQFj+(Vi;AnjM-zC47DM2_g1Xbhk?rAmSjw1n>>2hY`OT+6} zfLFx{obWZUO&53q_U@s<18V~qZYD{XAd~1ma$R<%G#nn4av;dzXMJj8UN&iPx|??* zVdnU?U#X3wM+Bb_($RGTqJU>#?Urf4Sc@W4e!3b0L$TSjlA10T+7G)jcFpu-{SXP<|ZF2j{^81Z!p}PF-$$&ena|dXuA4lt+EgE?| zgw0A{eR{3d!swqF1t&@;?5Vo4Q}Av;rdqGij#snGN6%BK$cy%da+>rvX>NkMGb&mx zvx=&XqTUa6>D4m3%ia&OH2NdiDV~;zN`gje$Qd6Q&j@Uu;}4h_pW4LidEuC7HEBU+ zlM%nsxMLBwudbsyU&rPew<2v~zMYfR(~f?lc`1L(2vMnM??I&@eDdnNR&O+#44)1# z*eMB-_$B$-5C!Qz2XsYCu_a44kf zHscKr|ZW}UPuPAS42*GSIRX*p_2Vjyv zbpre#Op5-2^<4`71dGNtrz!PsFthe-yG&zffPNRg3H?DOqYK}ndD^Bw^&jR)&*C!5 zcYxN5q;#dF&qR6hHt|gWgz%F6_MMJHR5Kqo?`8M#uOnBKRl{kPwk#li1ejvREkF|V zroEuyY+XLqcsNM}2XO;Enx#2Ws8&i%GV8-2g*928FuDW7G*#3|Ol@n#k_p zP4-p2lkLnD_sX-Yy+yNQOsvpFPLXMqm+)q)CmMC_J)SdTr6^GU2KH5LRvM4E{#lq# zFN~(%E#cW|*BV6rP$vS0OxP&u43nXpPzzQD{y4hS_KC4>wLBxPyk_dz?9lk_dVa2M z-5Zp78PU~GsCK=Qy+Y0cN-TZ}(hj|0oq#=qorRIvCxrb` z+nDWwvE%0o5?=IGNPQn8&Zi?Z!DwTrj;|8HK=~a9*tDunY)PPJG{+BL_{Tu*LEGLB z;?puvfG=gp-_1&_EUn&D>q(|Qbv^@XyiG*c(CA$;LR5Ok#ZQjRfxns_2DANKaL6ku zISzb8Dw0V_kcbG(1Yqg#4$JR+fcI%sPR3n?04O3D`eHP5h1^_hGC3H&^pY>FkfOzD z1J>PNvI##t6=!~&2guQwN`$0sQE<`{$jHdwg;n(%)_?Um{`0XUMcsS39@SNKdR@QG zAT_4%6UsLeXmj`jxnO-#fo|yxoQ*k6MLB8aM%XM%JYHUc4Sct{G0T9uNApn)9e*@L zvhXoY_mO_E`D0A`36#&kx@Z&T!r|Gyj*@oqDW0>Ux!O9qbxyAB)rPL8R&6Pymh5NL ze}1^rs(PcfJdB?_NTFq59uI-tKI0_fe*saegfVmUhmcpD`X9@cx!f}sT{PfqZsbwk zIgUk3hMYLre=HKHkKqANq{Kxy-$?+FWT_qcspNxG))Go7LHrM#&QK%n*s0Rp=k(QP zr~pcxD&GbQ5V#?22HCkiRc?$<^6k#z8KQ0-)MSkIl#SgkG)$6X#6=XLzEsiTfPryX z#q!Bq76TLe;R;1d{*)1hUVlfG2oJwMC};*W1V)zcxUq!ckaT32E!d~%+ktI5o@0Ea zxRcw5X-o}#RcB>;Q-SVo!`s=|&gK-j69{L9)H3!dRqaxDsY7 zU1Zq+udTi}+lq7W%HAaI&%5+wZv2o}-;JO?W5FGs zbszoUN4Xstj@57#6tka`=U-`C=W{ddxMDJ(R9;#E?+D~yDXI!mPL%a#;_Au!Np=sV ziK&15C^9tIQK|M?N2EK4qJ3P)KzUQAb?;Tk%(AZVMMdV$?Yqe_1)zUFvq ze}u1wrJ?)^W=37G^{VBtYew>6i;<4ftam`=REP+PfxLxEm+$f&dk|=O&*b>q0zw!O z9=%kpN8lzCy{ZF5pcW$B8Ox4)U31914OugIzWB%Xh^HE8bBFb`C$L!SyVRGT0Fj06}xuPk4mc>OBoB* zOLVaZcWrQUUdn)X;dgY#!~FbhFI$F$fhO2_Ty%q$u1j_2c_cL*S|&_7LQD8Z2)oon zZ2}Pqf(=45LQ~{*BXWeZ_3OJ~yqghGtDI{+@vJot4ulV$M>>A`r;FCJSlgbmr2Yr8 zd-JYLutv$+iE5m^<89JfC!v2CE5KLu%=8nHp+4WEY958kLF%yZ4WD35_4QY) zXRlsNu_5*614c7)Rw6neUJ8=KT_8sYw%S80v$X~_ikob?bYSw&!a^eDcB z91U3s#%A|z%_fX5rOJrz=vn2u@@@fU&2U@o>Cj!Bgq-~5cazkH0T~|!k&@&f*cdiq zRakvCy10*odPq+ThIBZ?`pLx8P2|9=P@~`6EY-=1O?Xs6=Dn{cYlz!ox{aL~k8S^$ ziWDew=O4<^j3Xkt94ax%JFxw!DYx%h1oN-(RDQkB)<>$e!R*rV} z%bq4&D4)34Up|Oe6EbaWIUIC}S;0i4F~k09ewClr(KuSzxU$4YwGg)8Dc15!y;{N) zE)w3}(d3z1=W(C??7Bei$^KA2tvCn>c!2Z(Q{KI|!!?HSuqv~#RF4DZty$$>22pm2 zSyjZ*m;4*==%sX&cp9&l*YD4RAdL)Mo}lwLm`70&DsKV(?chzS3N$t!13Rq3~*+FPxaBgFv`H(yGj3=dyE7V+FWQ=+{p zD=^p9qFczH$gxkY2wIlM^L#M=LL**Tci>RDuIkd>EFE8k=o9F18S=c2k$F*SRs2XU zBr@Y3n9R$SCKIaJJ{^P$kpBy*3;1%Wp|z`3(=0c=2`Z6q??McxR6z# zXJ*asH$T_t4o5&NGZthxD`HI?u%N+vl9-Q07na19@)vIOkM+sU>RHVNaigy_c$qzQ zEK`Fm5}5C7J?MoSd}$}f!GRQ*{OOAFJu`&ThMUByB=UCGmxD6`g}Z$Z(Dxee6pGW6 z)PO~oQA<@)!-EhW%POA()8JrD&EJJpd<%x?K3lsoGdgyhqE5OFGx;u7GEMATB>af6 zUv;yUi-TVF`W9#qpAO6L#Wj<$DIF3Bh#_&CyqhF-!~M+%^I5-!JmV>O*7XVqf{QXp zg59}L-Yu{`%2l+db+Zp9C7y{Y6v-<>ht9MJ(W0DbxOr**$0k`qQP}S=hcB1AYcj&z z2I6jwFi1?(HVBP>dM$7s$ds_6g>F5=mG0#v+f&b!hBLaR*;Xh%{0Z?ofS(8?W)SK{ z>3ukX(7R&tX#aOnPTA>xxGH|W37s8w2=nA{B;Su>Ly5M-ibkkiS63|bc7B8V_0+(w z6i-8Yv2foOGhPq3^6?@BN$%@&r*R(=rlBK@ z!26dLekss$OUmlyx`A1{5JcpY%ak6$sRLr?A1lFDs@Ufaq)rjY2Q*XoHIu_fbHq{< zD+;5pI@tY5c^=m0+y(5JU-%}z*g@I15pG)Htx~B!q)S3AS!*88mpFC9A5$}+(4oc% zix?mhRmiY(9Egf@`JoA&t@EeaLny7IvsW{5As>>7l47{>duMLpcA$#OswH$6cVudL zt#JyzgDa;OGeeM0>k-L5)t)pC?h9MZ(sKv>Mr6GU7c)9%hogpxf*lJ{e{r#;8W@hVU zx1P&c)7eG(e}nTGG?K<7So9nVS18z!&{)CsBf&jI3jM;KKfbV0O20PS#)2_!y#usl zYkCMfE5sj?r)oIpK4H}|6qDSWq82x($ z|L3wZZp}5C9&C~_rl#M=Z%h8UWKNDPhz6KrI+3DPwMAy)pi>~NK!R(Nf7jNDou%#phdo8jeB^-Eh zx5bT(28{YeMu^y^P_yWe=?LmmXIt)*=@nSUxKp}&ik~rq3GmS6__dxEBTsxZ&)LKY zyfPr4p0(5Sp*phAx~ZsDq#P>{9mu!1Q@F+U{X>w9uwl~h^9Iiu zX6amk0XU>>aPWDJxX|pg20-JU=7oRICcoGM zOxIeq|5%E6Hf25^2=@&4h8 zh+?&d=Iu3F40}C$1^~SF=viC{1%Fk(2Oxp%oVCXxsJBsgaLU&Z0{dc@2_4=NtmE#_ zJQ+vTuDZBOzLa6Se$^EnNI6SZWDQ7F8gidSg!R5=&$vb>r~`!skn7NpTVKrKh0&(f zBppj)z?gKMf^9uQ)DhaEY6Og-CV1L3`di#(ImUNXnNlck2} zQ8+U^Ax>ZOOW}|yKJuxNZAcC2W>dOtQ(#$nMv&6J$m+KE&#sm=fbHbG-Q zA3cKXSgh?UU;|1fI{bqJ{~!WRb$a4ucK&M2nP(x)m4#B@9*g_=QZdLu1B$n{XCuS=$$Azwyg*oM9cA`3j* zggHsAriTU&JN~v$>WLu)D>|vYS>URC0;kTV)|s&L@+RtfV8v4~>ohOf5RDS#?S3Ya z9n)S3&80J8k^q&hHd&*>;3C9^ycc)?5RN9vTgWz&S4C*A@@PG0N6V%&Q3WV5p}4ln zUfu3Q)4c7XkTYSqve(oV;IE>wRgEpDHlY7A{6-}t{qKRJfRZ#53ToU;0#nI2kqeP+ z9Y{McCNw{Z0~L!e;TKM^5jlcOyxXijPT#$nZ2*%CsUe5VD`)6|7XMrkMid9kWLC2h zLTsf|?nS^7C_vb56>#E}Gk05Rnh$;We8$C{tldoDeKel=-0Gnz7wG34h>gZxW7`ZG zZy*NTe6ez3*5Zf>5%g5=RF8soFN0gPSpUC;Y9(ss&i0h+Py^MdG#UDV!n3;&l((4e zr-5n`BtelV#4%BsS2bic$5Gm-z!m+=hv7t5(LR+2tBya@sAweBb_FKfx&;D38K|mM ze&p5?cUG~37rOgF)re4rjK4XBvs1SM3(;#*z4k{*TDBB-+O z(Y5UhZ}<-66sgckf$(3RjmyV@B38n4x4 z3zmbjUlF{cDBm4=xu?Z&qkUz>P7_6nIxRmd2!*v;qjaVRAPRpb!{@X*QI6#y`~sc$ z20&uc%>|W#_XxCq&@WL_%?XTlLWq-MTP?Y>Bx}6sv!Lz+Nnb>; z1K{8I+H#;^g1q$v^gF&c&Q=Q`Z82G@W+8 zh88jfBf;@WXqx|ifsk&52e2Err6xoGpCC>?$q$i8dxqpC zVf5>&*enGqbR_g`xX%ztCY8y4#=1PP@u*H%n*St9G36W)YtLDW4;NPN z8r>DEV2=@Oy0;k@4;9o;Bbxb7Wo}Q&TmzTI-5Kst&ajhsW2@KcVai)y51L+(vya$p zl-Ja$pS&-sRv$rMTMjpa2A}l5bW8&kBTI5bC>nC=aHh`&T=`fRNwSv5treHRmP-2T zT}n)zgx^ec9R{bS`*iI)hbP2{5}VId6A}tW)21o?GjU=RXSlXwna6sK7}C+pBWk9z zbwB2k!Yxm1Y7b}b`UKK#yIN_}HKU|Z z!@oTb{`sKEb!ycsaLollb+e6LT;R+G6z{({l*7OzzrmK@iSGL1im` zq)2p-Va$64Tt^A~2V-kcSv(&00T~xd;#@G`jfnJ%LUc3C1P7lE}rk^3t+@V`GtxB zMvs(~N#3MAMJCX@bWUokk2KzJvG4~-RVjdRZ}NHru;hs+B6PZDe(!aIGqtf-68A!R z>!m*?!Wxb?R!~1%T#vpFN(<_@zk>xLkVG}Z-bl$L!xFx~5P&^=*pjLJc28*F6e!mO zg;1*tUK|k3K+mYoh#{jbBmvwHi@Y;=)<Ic-gufs=ioIy@)F0PYGJZ5rI#rE3wBtpyXji73Zo~Q4 z@nBuCjlC?H2P8X3-PkrSV^GjW3rVSV$ehd{Vfi!EPXDFAzkKS*z4#%(6A2;h3d=YBit!R3>EK1XCJ_Tbp>Mr^Q<^6MTMZ zu8g^6!?#E}EkYZ~h+bPSs+K0yb5xNg`cGZU7$TlK7$o$QBKrF%!VVQ+OWn-}L+A*b ztfx%h5Er+iykF2m|GNJ$HXQO~obr*}RU|Z$ypeDJpJnuAJt{jX+?TLR!%`S%!wPr4 zP>RuEJrT}dw^gyCobpU;XOB7Hbq6qMj~yi6`(I6hhUr}8p3h4-DD=8VlhNsu1zX8N zhMv46o>EMV7!%1Lt#3PflFu`(AP#-z<$`7tPSfD{ex?Js2Hj+esXBD)Smt#n=}+*U z^c#U{aCXT&wZaM~qHn1*%0@RjS{EQj&9?{Dh5KOlv(&0q09%yPDbJWX7@-mO=tnes zYu?q%z22M9b&SG6v-RRG-@6-4d`Y1*ED#zl^>U2pA7!ppQWTyRhY_4d%!=Mwn z+k+U-ekC$`RRkCpB)%z1+P6jJ8vQ z@cxZH54F+ECm66aLJc8m#*!wvr&uPS32BG6;$}Yv(k2 z&Z?~g9l62|%#c~=Kf#xt{$&hPnG7Yh$JdV<#ot{jlLkk%P08uZ5PB5P5dgVM;XMeo zN&>0wz+7uje?%V%tlVC2{8HS-8f>Y2D9w1{ zmQPU-azeW);^5P?95v`NegM4sA#1nkUr_1#Aks6$E2bxp&}s~Nc71h70mKDJ5HnvBT`m z0n}1`sVXX-WJMp>h#LWur`{Cs>E?xRrcj;u zlsEK2kCO+RQcyTO0;B1w1s0{volGiqm9lR#jMd0=@~vkhUB}M(NZZ6kcd2`;c21b} zsE_0c8MdEm0bQw@WTfy?;CIgF>IiYO@YlrDvuGzS1vx_IuPy9zLX9pdX-Hsu_KE} z(Gvkumz=^J+{U-XXtzS{?er?Y2{J71BvG)eZ%@;|dc^bnFxFiA$El_NN7TDN&dTw% zJo5;{wtaMFijj9)$v%Y3Ea*STjTXHXOk(5K6^*nJhXT9#+JyyCDck5c=Vq!E>A^`_ z#nOKs#{)TD9o%cc0OesC zdhWAUZ_j0LCXZi>q-rum0<^LuK*p6xGVWs2D@C5(FSB~f2-7NUakOD?!2L7w^9}~K z4~?{~pt&wi7@Tp{iao|o?5~`0BZty|33b9Ybs`GxX42j~H-^50wrktjX~$J*@NDl) zVVX>vtZ#K^(MpZ-vgyAlK){LUq5X-Z%*yO*M?r@?pJY-Tb~}c_!)784fq8qUb8pir95!Py4f78ZUde#e zFM(qgqk?#Vlttup*Irkrh%;H!d0Y#0G>fPYQWcCHCK2vg@bP&}Wvp#{DspUbdawhF z`=Y5<{;sH6A^O^~Uiz|S;Ju&-Bq;%T+5c~$of@(7IEbx%!rqB=7}7oyp-$mj7%5zx zKST+Zy{Cr^D#VOXc)KGhd08~UaY%xWc+ zG9;^<8g7(wTUo@$To}GjRTkLI;^dHMwT(IN_Z>erLgP=hwinjPpcHb*U8-&&{RQJ8gfGD*`|BDn8e>0znm5IlHA+uFluF+ z-RwFdB?4C?_55-`;%P<4lFo3o6TvpXO)1|MFpzKu6Kj^NXE!jxr=vzC1wpTtP8ObBZiB5_ySzV(c;OkUjwiU0pj7>zDUQkUHh!;?|U2>jF$1D!t%4D?YHMk4Me4QGe`0{}`-UBU!Z5k7}Cd;U;4pOCnua2p} zrRZWhblPe&-Y@hFGa*kstvT+NATF@Hbf)Ac?qf9-?xFg+n~h^XMe6^Kj+g9tZe4NV zkOfZ3`eW4;Kvv<}m+0#Gz&HM{tlF_y{Qbq3fR<4Xojdae6+R%Ama<><`GkymP-b-7*D8h^)>^P-JR;N{l*eg{*IJ zQE!E$k2ox5oj@Ak|5!?da1HZVMI&P$fcg%2whPHrRhso$8SBq+K+jQJMAao0dQhQM#!2Lsy6J`Bm-tQS7JtM!&}M}otsm7XRgu8DKG(wP+Fj3oO{WwPG#ZJ;EYC8B94WV6)HIo! zU8ks2_vA1L-bK1-U?H@y-AUQbcNb+K&9VZ7yj`X4wbb5E!eX z7gIiro7je$Jm*mxKX|5C-gb-yk!;9VB`xtj$IrS~fK@yEwVw(=07JA@7PS;0*{$0k z-+8m})JM`N)ODbFb1pAX*3NhjzBIRIq{|uzW{;)4@EooGQY?&W{Ee^cmUY57Q5fxc z-C1;QkqM?=F8B)OF+=wDf_u1ezwIS2R(cpznh|vW7AeUHLv?mF%FbEB2*WjWT^pK+ zR0dw*EE%5(Tg5wo(Pams&xG7In^)w5q+ZIZ3=(vi&M#&ERsD-weTZEw`A@r=CbVQ6E-&PZQbk&>-khzM8+H?+dY_7rFW(8d2B)<)fZkA@-+T~syaA0} zJVq%tae*1{t$yWL4r+sXmS#!R)wVg~S-I(D9B0EWaHvdo)?CO(vo}UVAAE&4r4OY^ zk!$rF_MX@U?$I;r8?0cIj+woFi8_Zm3xBd{i|Xb5m2A0=DCmjJjkF1>|8uQ(fnlrq zLGTV@4B;j~QW~$k@2oJfr#OSTA=XjwdM?l%+mrU57Rk9k>?bfP2)=?5;Did$b+#yi z{?KWy$yqeA^A)=PRmIr{X4co9Fbxfnk#cgVxlaK?uf`{AyYcqjM-s?s4&+Gi3J^&k z6$Dd-xS^ZHE9vavmEkW7DP}{_Bp06D5mAz@G1trY6 z3z?AE%$1UnBvTc2C}&KFeMgc`I(p-)Gy(W+=vd@CtW@HRh)^+RkI6bX7!4#}Qk>-2 zE}Tex8FV)*M_klnGC6g(@lsg6@VtU;!PyFqLU(|CSS^-$;N~sB@#!FG3H95Fn6E+T zC7@rnoclq`A+K0Co3m9n$f#DbBuxLTS0W7-$cH3hvC-||-hdQD^0geiE1D(zE@r~X zyNx(>zycMEz1e_9AzIU-p>Ckhff=unxaFz&ifaD7FtxAg^hc0I!IS)0JHOI~`PkC|`ip6rt+*v*bM$eyYSbaYP z0n;0gj`|YId^mvl_KL@MbP~zbzih_I0ZDyDiIb?WIQsRnP&*MGO};dimb{}}kcX{zbH{eq?zd1lY!Qq7y3sDQWn-bY4OyKks z0Q6-A>*p^#^Uw|p2mZx=lW!sv;|j3;Lq0+;?Z-CBCzl;pDP)}a7#(;!MA9+VA82E& z2rXA0ogMi+Lh;5#1CWl6J*(?Av`)vGY`_}?=LD(~S$>^>=r*%5EU@8`VQ=p7(!cf} zs+R_F9d<^cI|O?>t_$0ql4As0tyt_~D{3YzGrNw3S90{$tt$N!D*kPRW$1RCf!^{{ zvaS@ay)^F&JAJ4ir9{&4WpykONyS<7y6JXl^hf3@sa+a5sZr1#<1`v?E^5Mh9Bsq# zs7~KVM=ofdjb8tYm+VtSi~R@ZE>Q1(2JV8~#exas{$Wy7pWAXOjo$a4cdhuBs$V9y z)uRqP^vJayp9~7ZMQRlMX*;b9j%h4L{Ym!(&pA%UN^c~Ok>sq|%g|MKuVBuFZ%qw+ zn$rjOItH7=#Z&l0Pxbj`~H&I$Mo$J`b_3!`H_D_(=J9o+zspPoTWF( z48^o=r4?(kawEFt3cy#e(msW^qsXezQmeby_#A`rP+`NH$c zH&R*`d5*B0M0kim;gzczq+|SXz>|A9!&`6f$1K}77}KawOK=)eqZVMK@^5h27goo% z#iBjnpiP%GO!px}rG>lWh&~UaxF0=pN1)=VC9nViyW~ zO6ltlJB(-8(^HiZ>YEr_YeUl>uR!4?N|)zGYs!Cgb{#Hjt3=+;0?xhv7p0@R*W%W1 z7P^JEHwG;`t~!^q!ZW*D&LhZXYcgeXoEi1Ku(?AMQDorK793$`41Y7G|JF_FBSs+XDlkk_4^8|L4zHp%bzE?#fDyJ655&Wi(l?qcjE6SJa#R4ET zvlr5Za?IIu-$C|M{TP-Q&Fz+SjkmjHHN61Ld=1jhF-#UbP~sy|3oM#Z)@7LQ@FNM= zI_{iOq>V$fbVs;SQ0D#X6Lcn31=z1UqP;M#ng>W>1792}nEDg_(3L4&IY^O6^3S#5 zY5E-1G*1%EZAb?#c_!8;IcZF4;RxrqVc~Y{eOffu!5$|ta;|Wq43|FZL{INsIj#5N zGOfSC*raPxI}hsEHxy*>qE7De6_yeARKO@oe!+f3KLBZ4MVXh}UNYQoC~;1!!Zku7 zWP!$j*mXY(5Yxn>ct@im=TD<*4A#*Ntsk8Gg;N*b>GTr57(1CbtlYjx1<5rvAL*pp zs!}utZYW(o}D1=yu|G!c>q*EtG^H9!{?yHoQ1asOTBNKSLM00VR3w*I{wb##Zgln zc@g-J+v59Uc$T(y2OC)1T=Uk9a?7g@Gr^rb*;~*Ue5hlmBKEuJbu^@82DVky#ju-uav#DCiRZ01-e7iz)0WzlX)mHv(uGC8W=~q zRi3`FeACo*ZDHR&`?T*QX&AuEaocE`5DlV3OIDNqybmw#RyxfcYtJh9P()Y=A?=Jo z0L+Tx1xW7@jhx|7mvP17(Efd%2JaSPjcH2ln^o-{QUMU?%gUY|?JS8UgRr|44#2I> zBn7l|%_L$n^C0TwVJ@oNA@ZC2Rr?4 z9pywUUA)q!LJq!$V5(GjD9-pT?8LP|DbssFR@?$mgx-M@AqlmIAW0{pP<8h27fIcC zwpI13T7nJo@}3KAg6PEIUI}_C$d-nn2JFk&NMVdfte$ztcd}>Zho>4q>I#U;`%`

prF0Nj{p3y^+8FOuEkT>ADPl8Jas#ZzUY zo&=x|CPqZ#wz8`ttCBl_b~6sAsr{sV2}GI~z@?|#xTPT410oX^5J+P<+fqGQIr_uoh*km> zfwyK01L#0b!*I<#?wz!}hc))0xve4GY*{ z2Q-d=XTd(&M->6@*F+s+V(V(@VDO!>1yvEg=hJ=>F^lHF1lvIGcK@(?UI_~$`TC%p#)QmT$fY*|qd8~T1isIHW<32R-9SvY~iR zgH&)y7lTz9OHv;4Ym;n!Z86xpTPBA$%&42RK3cno*Bd6+^#Uw53pk5m#3R6dAuX8L z@mPs9oHSwaPM1{+vA51JnHCF%xDrGhyhW&cf=a;-;@`4%7j+Z#fdUw|w5C`!-A~is z&GA>aX5zr}`yFc_=Y!GsgW3pTgM8E1wlxA^%#BAUdo)8KsQTdnuIm5wbLDj8nE zFOsdIm7R>}&a1Na38VEGUAoqlLM2NBccX73i7bw+2__3d;I;m<5^oO#w3GXE{p-F- zE$0OB+Tyj-25@D;0AOF|KsJJ5nQ=+jUz^l>lF0GQDdJ&&E+DZfFDO5dVjiw+9hmKPN1EyN50#VUrZ{ak4cBy($k4DKz0Tz(b%HlIBRnY8UqzYp4?#5Fm|El4zgPI@wI=1O~M8&^{@dC9IZvbK^t{x z<|=b*pZ=e2ut*K(&HeB! z84F^>H@tS>QEtTuX#;7m>8Gyc;<3oM{K;a_3up@1{d);l1cTrKFchALRSYXYXOBq5 z6w10<`z~CSG|4sDmyn+AYc((1*vy4#bSW;yX>wy50Oo6Ei4y`~=rcy|%X0=0&fl?C z^8Xu(f^ZU_AvSYs)dLceMO+!SH9q!ay1~C^r=*5Pc&q}e^?$b0M`fXfFf7Dl=p@s> z?q~H_3mEAZT%*Xi+YhXP^)-8dG^&n9lL)IqYvAWG$V)IVC0TC85z>; zI8UOuD)xQH%!DGQry5e3u()}1%!7Bql;3)b-5>B|Lg5yTDU8kn25k3DrJ4Nw9<*_#RY8Nu{(Xmm+LU7yx$>SxnBNa@3KQiD7=;q*7<9R4M9gS1oyPk9yQbT5c_!CdOrBobE7QeU} z4$K;9UT^-Wj^)h-Jk14jQu2RL6KdVyIBN2ezF-exjQK8u0Kz(1uL2`D(ol+kOfeX79^fQEH6)UPNa1uuwVk#POc`$ z4_P3}t^$vCWG^SMMU@7_!WH#5?hd*l7&<>q-S$Y%$h&(4Ius*5TYw(af?E?8ubT<33n5~bvvne4}19So?h$E+=d$SgW0^KJt3N0V5cc`QOX~lUtQKs1{6v z2&nYF6|qx0$AO58rEV0`YhNO$17z)D)L13ucO-Wpl(X&(2w@m*)x-`sER49=)kL|T zgHc3oW|~On9aE%og1T{{@ckp--(bN{u{hNeeEtSgO4PEO!qS|wK*I2AmsxXGzOXSO zQu>cg+&tIHHb;&9(uRMUq1eZkHPW%?MDQP`=_XL@$r(h^0$mn%bP)>5;z6s7U>_lK zl3H1@uyLEf4d+$iJW(-Y32zCHFGy2KiFQMKjWZFP5Fnoh2{ByM!h}wYmYl`G{db9Kn?D&i#hg-Lpn@-xC&4E3%>}^5aZ)GC=MvR2c z`k1Y*>XMcc$hlx69BK7*PRMIO93q48H0^;h7W^Bn$OEns@dHyC8B#8efaF~IPycO0 zYr4v!38VxPq+}uV>;a#HqzqkEryx=;umni$x3`77qSd*43(N=Xf{*zHr2Hd{6>C z_s}OE?wEkAdYHf8_W=eLELm_8U(hej5deVl&CZWj@3ew=4oprnJPcVXfY+q$k1KE< zoTbanTkT8BR|$;DLQ^UvM*rD(gHF~yg+9dJ&d-2o|4THV6aWSNR`BK32-r?aOlwQTJ| z(=K9!S8V@q3a!BfL;8;a33eISZ^HcogG!IK!WAjQt+y)JbE%!eYs( zUmq0=?Wi5dlL^eB-&JOD<5(sL?-Z)X?!)n>D4i_|S=1V)tSfjggIP>Px?3jc7jAVea zso}|rj}R)L)b(;R_cQdP%n1QZ)53sG*Y{X(k*4;=+D0alOlu1}h~-uMV}Ad6#FQT* z<_&%6ofUQ^1L==M?$_1<+c*|g(PFSerxq|{yG$CNyBd-8OeR4z{j*-+^JeD`BA^)a zKi)qeh*Q4SEaL&->Jpq}{rn)0An;wE_i4SGP8INh(5TGD%5}I)WBO_0_2cjhp`UKz z=-s*{)9=pQthY9hJQ5>CKJ774rq4}149r#z5W(P0W62rWuL)88NdMRC)T-?|Tay9U zO!+b5C&R=FZ^#cNb=?Qxj(Vn)%E=YeNP%IeVa&Hscg_?DRZ1s4m%hXSNSm?2T9P@_RSq zIiGeC!E8ncC^(Ykckr*8C~2*iA6*OYd9hcX@6C@Xhrz0U&7$QFYv_@(*KuUvt;6Ag zD?i2+S0n@(_7&w~9S10u&gSn^uOtLm*d%S{BmgK~OAI1cY&Q;h9$*Dy7fWCXCI3|E zOX0ua_c7xDhhwnbPc)+z1d@wF`~YyO-#PB`HRj7bBjmcR<7}D7mNe#2S%Mqqa_`5EYk*->%;aHif{VA4;J#WiFNNhPEo230zc4x@cqI5@iG6;oM1>!s zy>>foP%^9jlI@e;GsRB!u2v8ti$AMw0g~1{^S%64_-11Z(~j(;}FBy z=Own|_tZSaHBR$HeX!`Iujz+50OeRIv$c8*(vXW&k%F1shu-}X%dxU=*@ND+pZb1}2j-Ebn6a4*714KnyXi6f5~R7eqc#1*7yW6%rQ zEH>n5{bDZ2t|Oes6f+wqIhK3NSC=$PbJ8hET4ft~UqF78yBo>M4(#jC&LgQ9sqldW zIAh_ca6xRC`7Gxj5DKbl=`{S)XMBt-(>2w1v54HIDH=aGpH-ea8g4}@`?nUXx4lt; zWUG&cH}aKS!|h8}5TXw!qx%;(H(mV7i~1bHTb4>;<^WP6k4rghGPpXk&tC_4Xr4w< zdHSSjiQOjPNDWyO?8-e}(FMaztF1F2Qv}3V0e>HDo3QD_d!o^8&SJgy-b!usQ%hYI zsPtC&PW_GW%r3EeMq|Smzj9DSG@8l(p=`o5mQ{&!H%6ns<>R3adCfsIi90e>)mawh z+!W%XDp25g%_Z~W&cnR< zphr@Jz|I-XGgEXT#nar8MMxIe(m2=GPc4@T>Ju9Kfr53nSjt*b5EpCbnwM7WTM1#rBq2RTFF!F)q(_lt5K*Cd5()WDfrB2!$|!$&0YBGhKb$@? zxUBQhyy?cTuA&q#uoW0tHQDDV8}EpQ;g7gRW^G1%w?9TNr=^U7D||K!uaUwiS`ZD@ zoj(&O;t2`xBJ0rYOd&*O$Mcm&>kNR!zxCg&HdkITk_v0;78M^u-I#n3S!%yI(AlMS zgNu4i?~hgTta(X0ynLBR%;hI%KEXyQpLI2*Y$$K9y#o*b%!AE?5VeW{TW^`zDO_FL z{W88tIhsUI^^=-4r2b>Z^Y7+=(@=Lv$?>j?teGvI2RHATno&nNoV#w*s6>KB=4M3` zeX0!NLkjvKV(TCO1{3ALStOH8fr10Cm1|{zF7=EM_FShvA%HCQ{!`Y>0yve@hJ^o` zgu6C}&jus0BqwX!B>`C=Vs}?B0W+sml`l3K%&ui>j3F2Ylj3K78IHhhNpPl1`z|bF zm3$6bal7+Pw;Ez(cPD6)(&Z1QH7QBSm66P6vSt%J!6Dy?G~u{FL2t|V&XH6Sq*&kO z->gg#|7S~vTnPI13r`c*BPAZq#JjO9!6yrwygrHhxx&Jnt4N)mz|%0BjQol1$?JZZ zAc~uy#J%(dsjl)EBfB5r$SI|HU;$)7qBSamX{&j%t}gm!fED0aRGO!)DlsFloq>r1 z9{xcg;@0Iqf))GJ&`eHJ+l-<2-E(0Q1|2a^I*xwDe%_|5QFqIfI>Xw_&5`eA?rhGs z5{l?1-I(IbdQB77&T=0zW^0#V7xZ_y;;**$jFn=T9PpN3==$&jm^?8MXbuawU>oPf zdCwX8kIf$R3{Pn()yfqRT%}z+LyF7k+FNgFPcS|YPsPDkp*nN!Z7C5qSwtNrABlu8 z)o6y_O9Tr+^OwJ~L;`9_dVgmTNel&DD8@WMM@q2y09$*$xLD3wn%yRIxO8@V_%*#m z$CGQ1@6UdOmWypys32f8`hV%6k=5@d%IuwzWy;5KH2oRiK8u)9?4(22%AhPM@lq-Y z3-`8Hws8*7X^s%1)R;xNa^459pVwTd`SceA3U=UJCD2wW5s@)iJ6w7f&?XnjP{lv^ zJmR-q`Fv45^JXmFl)bigH|h&^1N`?YF5fHLCHg=z^jq2|^^P?sbXg$9NlF zgUzaok=txAa>pCA>!vfRA>MAGR%b^U6 z)SU&Aa>e@$&6sZ|v#BwNg*4%Xw-r_VDZ)Bi>KYE3xbL|vYtpm&<_YSMAw=uT+J!Gw zsRcMU8DM3BB!2>#X8n^{bN(HPkrjm5fopOnH_~U&oPlFY8f_tE&B+fT_H{rE&8$$Q zK0VV*UkN)HJbV|2SHqL`NBW{tB;t-yZ>oY zsB{BrgU=UkM%ihx;9}+a8hIJBCKa`98YO%>@#~n0EORb+G z*SY?SEUv|5N1kn=vS5O-9S>Lj-^4 zKm__{&C4KgcCaG4ssyT;?1xVU&W9uyZI8%kSnmR#`5R(~5Tg^X2x&C&aC1;3LPRY$ ztXD`+Wv%lWe)*)%1c%13y|MknKyIU*=_KjEeaSyNnJN-&HCnvU2Fbq@mUyqbAJP#Y zrl~k{Mbpz%q1M6B{nv9Cj70(_Pwwf|K8Kg^226GSpcme_|U;n=6yDxj#; z16(<|qu)-{TO}*-o2NWq4?GjBj&IB+GGLJ#1P@LN5dyr*0K3VHBSku149o?-SMQC2d!xc+Qex0ED^AS6!Ddw&cSO)n zuX%N>qnRki9cnn=M1l|G&4>^jHf4Wq)L^2C&!XB(yKLd`h}@bd<27SVlaE>o*2$ZOLSRj+Kga03=_@Eqri8N1^qN~#8jd^*c43Ba+YQ? zWp7SM;rygf(aK=KEVX~km8I0`tPG8ON`XLk~FSRbTAS)PW zjEE8+ehXS#6}e1WK@b_^ic2Y_W#Q9PpO4=R z9Kgp^18Dv_WwYfbBr&`!nfGMJeTXF(=Q z+J@}QNkSP`4jdUjz_h!;M4q3+K#N% zr2C_5seAwp;<~5u%KcYoJWqt6o;ZX)tI*e%40x<6oTB~TbKp^YaY9M&b``!a3naQ@ z3Sb(IWq1&o2-|mVx;jijYy!&G1jZ^#?uEUcUHPt2D%SkUU_GB)KBVDlF@LjI_vUCF z7qY-sQw-TmyRX?Q(gjym+OlkB%%7-6_%82UI3nxcpGf6e$g>mQGFGpTZ2 zLfYB8{(})LQX`9Zb6?w^tuelbrk;?_IR>%eHGitftHj?~I zaa(@s1_sU42xA(06A=FVrYikWrq!_1cI?hKYX!#E$XGRJB~Bpvi=?vb16)g-aYrBS zW%iqT9{P=tJiB?hHPH7Q8icS;1mabHDtWhxMFf3%IJRwWYT>^LFvc)idg0XoPr`Xh zp(DsP%v|l$Ez0%SQHV6&VmJ`0%7|`JpZ|acN~2@ zbpoa93_3>Nj8^pfq$XM%JDTmi9JljXGYR+)Uv$>HZ!)`{hy+nSn%pPC3bL%TjFnB; zT3^Qj3w3<=cSxS|cc2GDhVf??iiL0gVo@s<%OU$EO{wU;EdrY;*e?|4BwEfR6z$4L z*4->W6VjCx>AB9^jZMA_+z!Q-xAfKE{Zf^~=@VjNqt)^pSWggEKR53*m#hBIWPx6qirK`OuY2~+G8ZLaG*o6hg(*c ziq#+oonE^BJa%u=^&*u@vrl^|Q9l0Y$BPN$eB6l+L(iwfxXJ$_cj2Y_@%?Ovk-*nw z{+T@#`x?Q|&-I`n#}*$>z;!n&7bW#9#?a&Oc#VyeP$8^oG_tRd41W-^V%uGW0g#1% zWoY*|Vk5L*Xizi%3FBo7$XCHFtw-w(o+OJN^bVpr2kWxwiLgm7hIHOuh*%k!2KlDi z3fW+`Ka)rG_j_8HEJxmq@Ye1yjS!@UVz^|^^YXIdq*W@&w`k~KirBk-_~)iFt#j&C z`~Sm2?y(Y|PACg>C;8A8VbAQiK}c6DNLqm_UR(8Th@K;Oau$tw;GW}=&^lyw^ENbo zH44I1VVTyv9>ClI{AR*<+KHAN8+E)dbTETu3QHdEv2tGU)mAKQugBy3dXmrJ?|fiC z5F#XeM#S~2?0K|Ik2~m{CfcGceo|1rnuJ?i&=Xu0o*xy28A5`^>bFH|+* zAQcVu$#DYYhSi}*p(y3w#t`gp*`tohAL-5fYjU``4U}8NZWF@|7N}L8sf4$HuWodO zjnH}^C!0w0#opK{lyce!CDiDjJOz<2{X%Jpl1(`gO*SM>@PK`uYLapJof*{{&fxAf z;z^e4vr8{lDsPe1V2}!*cEe4vv=KDk&d}g8XS3-z$xuu1V*Qx}^exTx> zcGbqFGoyNec}`ZBqaSZ@cLBM51FLILe;s4zEgumc^I!f~fi=>LZ@SM>#&)%O=~a7& z$~%$jmf9M8;f)G=k_Im%m%Jc0<3ctxEsN>TAS)=d+sR7I5zL9Ez(- zr2XbV?(qI}fW7Cz%87U(4sH&7dW~kh1$&9>#ObpSDj0Pbw#H5l+h?6Uyi^k%vh>+p z)RoXOv>@9s#phNV9y+p7t)ZDI;^M}pSCXXSJ;1MJ#t@`|@!}$ewra|!q6LkK4qDLk zwKw+;1O#(~>PidBdo3jF+wA(6d#x#5@&8t#qd-!M{mz)P#9z(E6Kttqo6g+&z}NwU z;*O3YHxb2Ub*n7WTIBhnTZ7L~{tj(uSAo}6SP;GUeholz1yy{J+v_VlDKa@|a#oAo zeeGY7K*w;r`Rs4%kd(IZID>PY#EdD*1v;8So|}&%huC4XN*!ewg*8&M8&Cx(F*}pX z_i+jA$ihN6_KMmhykI{w>NQ5Ez5kllRVrCCN_4sAQcg|1pe?}sR~qCIWu0I*;?u+E zd~sL=(T$S_#iEnHNvE!pe*9CAoaUkbOTzJsrH;p4-pJ|Dx(B()t+cZyADuSW) zVTd?T{qqawV?^>!9sL|y01y%x%!ss*S-ubuoL4k9BhmS82NMZWmN(MX(5Lum$c?U7 zD|khtUH*yKwE7}@GzrFP#)}kLHZlK%vNP0lcdpF5TG0XnzGf>Ok2tf=wV~2` z4&xb5ypx5T-{ky6d1z46yc-66ZZ=ZkHnTh{pmm!+v7Vm3y>nR4Q;7L2u!hj-ZQFHn zI`GPI|D`q7rqrgvp)8zZ>h^Fv=#f>U4U>G=-4df2HRN?J z@nA8>w667gqjbtqCFwUbLmjYk{6?d8$q`vM&uKHeoa*%G@;;92Kf5vaNl@uqG8+jj z5Yek~y949(T<3F@DP*PcIMRSQ;f5QafwTU`xdl}0PaSz!><)oMW!tZiab>S1O@oEW zF}TeUDXNgS`j&1n?F9QoM0jelYo|U{!ejhViDir4&JV`aXMSKmC2V@fPccqH?ALSRkgX`!GIImnT zTVWR8I7P^k5d>H#3t*(R;rZ|TvC}F}G0F_632zI6X|lYq1nH;ZR!2B$*Zj7IJ=&pCNe_zIK?Juv?JQc1Yn8g*gKd>Iw{pJ2NdIUof2^XE$p7rXKd z+^^fQib*MXGK6gdj7yMbkgvnFf<)`ry5N}k^%yS^;Mux4QsFh>(64&0FodaNN-`zA zg7a?>LFBjnI^WE>R0%{AP`Enrs<0<0vb5f_?e{EL;qxI-Y7YjCqf^blM8fTR}2^$)P+FZ-DO@v5`HO;pKKu+ z5Vq6Cm7`X$d+kV?6yeW)agt>_elosfkS4qlVb|c~?!rZyxWuER)YK0j0JnYaQQC)e#tdJOVL3`?rKs4 zj*6c@7z5ppTCS_>T|@$@p)t23>hTsT;N#KxIt=Tw@RC?aOxI|;OS;?YUgJV*X_++^ zT9Xra;~vAb_&96ZlO?IUeQqeL)19fQ5Ern0X)!d%&G^);9~q^i4T+|Vww3_2PVKcF z#d*;sh+pf^U1N9yGRYSmE6B`RsLz} zbeCVK2KF{?vw2T)U_`o(njWR5%@ES~S+(0_ou^yPE}YG-=ElnJm? zhia2ys8FskA9{m+C!yApe~Gmbac{V-pXw@wDJ?XhFCWxAQgsLa}^_|gpr8nrK=!GS+(I;_Q&64pIobwI9rLbY

6WDP2 z;_g2mCzeIAQQm3(5b&tFMn+T|uaNM=GHxd*kjp8UFd0|`x&2g~d0nOb7KEXybx=uTFvCVpXl4uKcE&$@`gw(oBiJ0S>yQH!b9)Y( zxZ~H6G{?;mgq)r8Hb@lijl_nXEq{W|IVG6rim_SBOW>FV#y(2E7m=JP$lQdHDNGd4 zi*tAF@)kgDsJ5!gy_eV)~|& z_}^DJvKU}mOsR`gU1*`U-2>igpfZr|GomautWZr4zrlW6Rz{s)=k+lpCjEc(U_M4@ zhv%B}jM28tvYypUlDx?dp-S zS`qTFDr{~l*OT0tOk0Y|y$_`3!;e;>%O4#YrCc_NNo;B^m?cVvvNH3RVq7~1qF@3{ z)RaS>QlZU5eX`@#<2a5$rgTnFeUTNr*!csIV+Q93z;{$59@2$`sO8Zvq^Rq5(`U5kerx}K}$%;qjtkdW|ES?1${(6*y z`}%Ozk3UEkf&{9y-qnoYdSXplasYs(xH?~|f8(*nNS+Rbfr&~|@-D?hK;dMMYB&Zd*5G3q=RmOZiKm~R zv~9L`wCriKe~IRb3IzJ?v}qG@3jbgLA3aNX-R-=pS&3QT=9`(F}pIx|E z;fq_88Ts%g1WY@WBx`zem$m=BK+Ba%)*Tq`)BVwpkbQJKo+yi#Wx3K!=`I4A&_zlhttReY3DKwAO%^_iwg`0YodaD2)~={&UyB@XNO%C!f60pjy!l zI@sNLzpp9=e7Fmz=OK+4srtpQEXFZyl3X!mV4{OTn13d>7o&g+C7Kfb7{rS*z~is{ zeU~5+0`&&Mvcb!e**}=ub=Rbst(2~oyHwd{bayImDIQrn5()` z8FxSnm!62~(FncL@AeBOS3|DdkC0&PIp^gW;Sx#unhxyyiq8w5`9(}3Jby{IbH-5D3G~sa&5&b1*dql6kU+k6v>-z|uH~_g=z0A{1x!eB8%bbVLN1ji zcj|4RvFADRp;LHqKLleolnL+%`8_OG(=hySio)CGjCL0Q$fvFrxhl`BC6E5$5SBeX z2D58tQDs5M9+?UehdAN)%JBj6yGn?p-i;PoKMiy)cx*(lylPYFhX=HnJs*KVI> zlL$S>8lh2JQ04!3FLP0Z@b#7;@A=Jm!ggb#@$YCyCMx=wXN@?*GHCexSE=H%>o`zM zqSVnLrc;DjL6cg=R0rE?ggQ1WF&3YZ&L{6olJ7_6|Q^FbHO=HdN96`x4puey-)a@lU_D}yR zX}xTpK0Cr1ec{UrO!^WVJ?=w!R7@dx-@&xYWs9oypzgNcyoxo7Vv@tCXqM{Ld$XVP zV@g7h(ZUXq$7u~tX_a5{=HY&`dF|QWo1aQ5Ar8TZkFa+hLoG#I#Mc7&-*0;d8l9bhx9`%*LR>n+r#$2KNO|u2JvABFiT{;go8S-^_9+4 z%2TU)3!884hqQkPO+LQVTej6hYF!ku7>9!sQb=}; zbR$kl2p9E7R`x3EhLRddpnp#ygL3YmOdNoy{X_&fws*UV?s6d}SL^=Sdp*AX508pP zQ8yl#@4X#bD#!DA9)i@Dm5{t-K78M48S7!+E(PSy7ylm@Jxhv(@|R%P0yAgbMV+*M zL^2E=2!tKZ{SB{eYtS!`y=drq+T6!Gb}q{##E-7ULJid`)!y2GFu}HQEt4GRa-}6| zHX9H9qoCA`mTt40qYtI~O zc`G6ap;(H>|T?z*``PRcxa0%l7*1UomCBv)GX-e*C>X^`~xF4XT|FhmmDj- zyffbtKnQJVzf>h6lg_v>4Oi``y!_nPVpKhg9~#1<$R29tyIFDm|HBZu8w07Z2yuOm zVFQ;b^=LMQYcl1jDoF2Z))nLJ$n-QVu2)cRocJ-i+{`IxxbJ-s923~Q`M6)LVE~hx zurB=Ob7el_MXc>QI;xOAu_d^9FSrGLI$rYK_*#=4YyX{)mTD;6&yoW)$KVy2zFF4e zP|q|Z%d=c&V;J|Sjg4)l)g&&w%`Ek1o!X&#rs(+QpsS*$aqE^`wuR~tL4OyQuIo;| z@*^++9yeVlw_cRsXGLA2K3<0& z{2lRA|F&U?;oFm=(i?xIK*JLVXy;VqYs&Y$Xa}TM#Qf2G(in;TAkYw!31I?=TQ0ho zV|~kmgRCWZz8$aOgI}fuu6#9&X=y~NqxC-!tNX8t2oM7{zv7lfBZl{=`aGLW)-HyO-P0)Np0CO?NZFBVfiv;V>x_ z#wkEJ4RFl?{V~?oyu5D`w?CKaoB z<3(SF?yEz3*}aaNvm`Qb$hiBN?PIAUY9(GeTYFo1|#*qL6v#20yFP{4^-&y2bJ_lAq}S*O0>jW z-GmY4xt{0g>Vi70@bs1-Y)0tT4NaE~$g`1!@9-TNLBoG`ZfT5`DW01{U3zG#WHVjG z8DhY}1M-6VJ4c9;SuSWtHQJr-^UU4H6-NF^)QC2pFFoNxWt9a#`6_^C5F^i>4>_(4 z(_;!?uUYZ~YjneTC}Hvd0Z9}82%i`nGB}K~C2U^_f!lSeos9=g-%zC+U}^6!XK8PA z|6{Vz*8R)Zj9(E#KGPNY2K)O}8en_Bz^91W=-?NK$27GRrVQFU-qk-YAZ7a(GIran z(+OePFhwmk9>K^CZ(k-3HeMc~R@o!fE^4Q|x$~Njx-H@idqEXIhWG1~@2dYJm|tYC z{SeKHtbITBlU5YGN&jt)-ga}0gLk>ytzHQ_x%1P{Vb7K-z%J74sDp3<#5wdFTLTOd zz8xa3Rd1xT*>~b*V;1Fy5Jlw-wLmh>#e_RinKnaQ`kAJYC98Vx;Lp4P8dmZP{4Oqj zwg_y^0r}#lCr}k=BfO2q!vzVXmYDb{v?*lpT1FR}1((4rfgMs;ro;aeBHPZvyt)E( z+$xVqzni}>T9~!NZjDO-74cps0Du(mYeJfn&eBL`-`bJi_ zyw(<|ftIzu6~i5O1h7KmD?7iQ=EgcOBk!12q4CDr~*W#Vth zfatI^Rv#W|7ETkP?$hvlWL-$AOKLjL1fy7y*+W~q^Qh=nqF0v^3;LlMGDCbd6vhUI zOAPy(#o&3k^LE)HM9U)RMV9LN;dQ0#yaXl#acyDWX(s#;tS&?vO5t}Vw9ur_83vb}KlJ4PLy|Q8R1@7A3W(oYxw->1{{F7BblDuv{vKf;=nEIb(lpOuVm&Sk9{V-)M7l|xbf1+%s_0To z8-?NF7^Z}e6E}Nuc72l@B0_2hU-A$Q^)oFt zaonw(cpJoc*R~giK4oa;VwxI9u|vj^Myz~!;0N^-Eu4>?DK5%kOmt$tcUSF zcYYQzRYytRyU(g(DpR%Ef(PVa2uMcrHJ+bjo@6pG?uZNipvyXsbAvnm2{--~0*sW6 zQr5{9N4N)xp={-{fMC0f{2?p6_&9AAO3)8`u#{7B$~dF-c)rjg>t4+eaR+GJU;II0 z)3rEzar}(g%zc7vTaCf`_f;`S)0FHk9C*z((8DY297q=J7VxLnHEW5;W4Qs>{VxQo zNm5q+npjYy;i#XCA2Mc2NU*objnF^Gxi|euVOQ@=|r=r#k-DelFP6N&lOs&IO-8`|WmYBs=qW~qZ^tpPeU!w?OnLJNS z7M~)}nINW1BE11>cJAMQ${OcS1{z!H%-rf{SQ*P-&7Miat(>DZLl{26aWTyqw-+dfyzfk7kC4s49(#H6R)*1!I z(dYPPQa36n%*)7Y4Qf(~sh#vdiRvb;B4$nN>K<{q81^l%x1noRGBuV+UF`;mE~vQB zw&%R_OOMS_hyio{O^v|x-hT)AcElnMSL;&INy=D)a`<+oL;g!*YD`wZY}Pi*wV)G8 zcnJ+Rdv653zL0fY4K$KAT!feN;iuJTrrz<}`nsCM6bv?oh-Gwpmqq^;T}i>VDQfT# zYdt1-5KvF+By%K*MO*jl3WBVI(NzsfS9}=dt7h4kA5TVhEO^4OoR+?Ql3)gE{GaDk zI!D^}^+9n0n*eoD>J$t#U>J0dADZPuza-Bd;?nT2`@c$H2nB14>>6?Ra$?IP-btCr}j%*)Y^@ zjFwu0Tyh#}qq0$Zq5r(HJ;T2eIoV*A_Vgvc<$Kf~0nT=)vX2P{B;ZJ7Ad@)C)s%qx z>l)j1*6G+P$s4FjbhgVfsb-^jOBi-PpP$7C_p9%63BKLPz$YN{mBpdRQutD{6`|S` zzig_lkImb4hzeZ2myRYLqeC_tw;bT`;@IL zeWNAw_50U-zDw9nrb_mJH42Y9(o^_TyB6nBG+t)l0a7p23b2nv6*Z`E!?u&NsZ_AW z<&ZAW!FGBvARgn(Ot0kVX#B&F$$HGU@^dnd;^3$_TS$&_D ze^X4zKh{J&~o7}BR(qkc?4V_d%#KLO|jb)gBmFESf_&x_FE5vt%F zpN5T!nF!IbFb3-_{BC8j&Do%nF6}k1QWDovkBDu_jXL1m2lj)Xl!VeJhZ%8)=aEbEJe z0=fK7?Tkk8>)6Z<2d-hvImIXjBV}DsE#Gc>))k-7{)9FrT6iC^ndkIiZ0b7cihL(-VqbpdpASn&s;^ zDUk@c|H1|Q>4ZY6j0v19)A_6|vc0Y1&Ay^F&0o`ZzG=yO=%}ml!SoM9qA`%0=`*>; zt5jL4cu=BB02sybM2^XR*~cBjU_w-5`Me2=zbwr!tt3K|DNl<TU`O2`W$jmyL7cWka`mjK$JM7?F>{;*%OJ&;nCGOE zJAF%F*Z@_9sI`kA34tP;oax*e(dYj7>BKce){LF0iBnU!b1x3^6M4L|a#{3U{hB8y zLZQ!^i` zI!feQGsX1+I@QcEJSSGeRHc=mH4)@>6y~;YKs0jo_A&9iqbEMCceB1e8#EC$a0mP9 zNMSurl5PY_0LLv?fK#sC@v}l;6UkG?n&p`A>f}09LMZIXSU#3*IMk}wTJqGbYB*<1 zlR79_C~QkEz4vfPm`>K;R2*Wx^9g7)5s){yqU!Xa=(4o4<{Qr)S zM;m2T=9rOt`jQpK!^1Qwkl?PVY!W(B(`SO=CDIr+V1-gBRrP`!FrHvcA2nxYHNpI# znWIzEet&7)P;pv*O3?9o2)xC9&6P}jxY@Ym<>O$P{J)jK*e<&V3}Ah++r#^_NY|=m zuPyAqUv@y2JvG$y-bA+D9vyScCB3iRc7~8cT~8EcJ31XujOZ>@GthVc%*zhGg~C@j z@&%{7ei!E!NI+Q(1v+vZQ%e~^m2?SSjcjdcPkplwb+Q3NnU%byvkIM0QdgI6bc{gz zesyFsKO+Lf29ja z=8N6Veimj;KS#wl&XIb6% zCJO3>-gM*z4^CFbG`hi+ejWl9SqiP1XR&Fo;ty59@f{hCDhG{y4CE!AzT#YaJgQvj zf(XS+wUYZ4X-6tl9s8ZZUQmW$If7{wc4V?PUX%)zA=qdE&Cump8zN(x?>yZ zY>bYgB=^akS{^nwUL-2(q{Zg5rTxDE;~OGqLT5ZFjX_6o&wb(Z$n8aQ6dJaZKkUbS zyTlJaoxuyt`J41Jk-MU&JtlcGrar$dxDXM8;i=sZ9yrHR{HE87@XSnRLk|CbrhdIz z(A8okR1)!j%1eQ>==NI#i@+!#U%cas7{%^;KyFc}b=EL#ud$Z~v@44AH5TBvYo-Yvo>L*C#We|$7<}`|$6@}D{1K_fNRjcAm;1&8 zs!vaYm#vZ2nsuIR)0!%jNNl4-tR+6eYDtaC`voZyV~cCbX2%3br=ah23Ms>Gj4&Jq zl~ksgAFpyVLyO@G;k;iA!$SyJEpP(#8Z`KHX>SJ`&g1`sFbkeSQxbJvJ0kSzd-{2$ zpfHT#HhF=f3=J4wsS*y)UBSQp*oXE{-o?EQ7J&JanYthD8!nxv9B%a0=2x#&lICoJ zf$@REs0O^MXi@i64(U@!U9I|T(ismR?KKsl@ZA@OiDxS*ZcEENd|8iZ@Ji#;OGLYw zq2^TFh^yHNm$76M?YNTvpxQ&P6n4D|(mw=>=30COX#zov?*QjUQ>V~-<&`eocSt1a zhrgdba|Hovhg2#^%YFxV|5TQeL*TwPH>C|&8gVzb0YCJqp<>`+<=ZnDiTrg-y&HR{ z3PWQ_3?B#j77S1G>NkF~hJ1!tr?z!HYsWix& z^+DXU@``c5+UMVbb2;em6HhaBAg%8BEyO~p1jXyhmgVGp1#382u;I9wk!58;Ca-8J z3mdtJg$hg)42K*I70rl69W;+pKc{cpj?V*D7Vo$PA)Ru{Wz2HankqEU$4wL(Llhta zDM#E52Z3cui7AL9U=GorjOc;fKpSa+>p=sL8t^0M`Fu?gu-MbZQcglUV)`Fb^a^mr z&NZMb{fgr)0z^9Z#y-xcJeQefD(@nED93grigMqRuHgA~vX8C&Lbxpb?nW5ZUu)9| zUBkpF!ur)HtMO+T_QtQOlPQwDfez1;8(>@%-Y{sP?}^^*HNmLLxeJ{KYhDX0A1BL< zbE8ABRk&Jyf-NK`^-7bjhrA0>(Vu8yWs;nj{Nj@7wgDT`MPam;>R~F+!*H!uj&XM} z2BE5BkP0>ix^)l@R-P~{G6sTKp{rZOvnBS;aS+3zAbWTh#QMtC5`)aqdGS0|T;~2< z$ffOI>mydwOWTF=QP!&tQ!p5tNI1VDmuHpvj~ShYi5DZ`-!>G=PGKjvr2@MEQ1a4c zJe=V&JS9I#Sih)W7ImO6)LI%%Pir>lGVy))MiW!L(kwSIPXj{z!FVCba5q;NywLx$T7#BHzH<~Hz%VvQO>X;I{<0B(*2)PBv zS@oGatn=N49tt`%hhslFux&~hgcwA+OZX{~b3L-!QWJgqKXs6~LeANI#dC2M% zEx=Val!T(%nOZTR;JMSt#P}`|&&k8bl|N{&#d|`7sTdwh)1=!*knex^Hq9+C0S-_B zKd@k;z47!gS{&UXqm0!^pp3Muz@5f^XJoz=XfG2*8J$IQ9K*3Sg6!qZz+0s)0HV({G*UU-@ia|9bz;v zM~;zYhuxWaAzxLm|_g)#;|MJYYAGU~fs?mF4 zD!rzP*6A#dt{{WVWe_Ez;E)OlJAo7RNS>64l7r8O1DAP#6tzx}BgO$d>HpQ3aWo0jRhBYU|1Xhz8xq>lG=;9IiKAN~xlfbSWrubFk&aNG0Aa zKa_mV3(KLIR-S3%i!$lzvvv8a?huWQ&7|Ch`vbZgnf~Q#)zoghFue{DQpdB*NISc` zN$Wu9)(`d=*RA4${Ip{^h--7EjIHT-{PX$rVKITr5%b3yfBxZIXo9j$G%G3G-!qupJ#9@ZiWR`b+{EL-K2VfhoS*g&DClmD)-Tf(OY>5z*T7Naw@xE*}^1UrYid~ zK`Dl;=TO@d7u->rdclVEl}=SPU0j-=QOMy&%sSmWA7SJj<0o#q3RTBLmwD1#%R>s% z%8ODS_f2kd7SqcW=ykr7K6^kDkld#-HmM@FPM4qM!vysxwWxGcv147xL=j+bCP&{0 zRm;G)m{_HYRuhM#+l7oi?scDetm`jKJs*vynu%}Z*PVxM0VNx*!x~-Jq;Y|H$XAB2 z!4Kf~w#*Zv7lFw;rR{xCNlt|dq~rLrsou-aZJE;&kC9FzI&1+LDm7RZvvYq`6A!%% zQUUZ#g&qXg#04~jpq`m@_qmZJ!jdut8}SR%-dgC?B#oMOB3dTf4Kt`J~1fG)D`041^(tQ{$e4 z>U7kBWGWM4LXD)GoU-rY{`WkgFjmA~$PAKaG13e1juyUpPU?RB@`f@N`u^HMbyhN` zQ{;`)r>JE!vaGh+V)@!Y>e`rpKhwqL+JT`(vpXBAT4szkBx<=iFiXmzL1$^-RoT&% z89!#pt1T{8&nBX0wxhh)t|>xp5PqA0^pOAo0&?TM!P+yM;oX1$w|OoNeDrf85L

  • UVO{ju z-u&-Iz=a+@bitcL^Z7M$2p56%uTmlzxzUxP4-n`24+Z51&*@!b^q!x-Gn<}FC0%-u zSHeY&!)n|dZqt{h4fwMfIYEcZj%NW>wgED8rmRG>(TK1#1Ilze^tW>)r_6s@U2;}3J_E@FNvby}7;~34I64=7 z5Moz+(Qs5d#g=x|B)3u3K@F6V-0*A9-+`lIoM3}f6_)E<5y^Xw!3bi$HP&*TUb!R- z%Ryn@C`WLcXJIR_3;Y2EiLLnSPO~ILjzm|W7H_&+%=vw8pon8gj4DdIar}plQe{XK zSZ0oMy3NsH9fH^ogk^mp|Dc!xLlw}62M{{&hquIN`2A@_bUU*fI9cwpl_<2&t*~!o{ONDYqD`gxv~qaSQ8YhNTV2%k_qDJMQ81m zxDV?EQ`98$bD8wldu3YH5*_3M+1R64#~p6p#mbl=qf6!p@eN#1~q_n5`>W7-HQBz-Q>d2Hk3_9 z*JGGJN-guZKLe>Yx_Xvy$+cWY)pxJPZbiwuMFN@!PqXqeIa7xz>lbn+;lXW<3dPuX z$*9Y0{>#*0xwqre-`$TkN&(c1g{Om3W8H93Wv3!`t{%6V;|nSCL4Eu>x^l9t8C$?5 zj0aYt^H<$^Fndk`8VA;4*?uXT>ir$@~@G-@k^ky_o&e%>5rw3%Yka})^Mz` zpdSQWNV5E_Cs6OzIgD@V2Qx{-oVg03GJ$7x@LN`j19*O3(-1mDhYY0i9G`1oTrHV%DaJ;se>10@;Ur<2)j{2%h2HWT>%e&qsd|0HpZzS-jdlgY0^ zu%T=e%$S${GhSX1siz_Jz2XdVR!6}>8DCs!HTX7IQ9&V+nEXGEwCGPW>W(FwC6CK7 z``@fqy&7+Djy$jG(bR6HmalXi=vn#b!f45!~ zERNk!ZhwoPU;lM)*Z1&?SEgA`D{B4Xj0w88DV098FdBOI^`pttKgZ0q(Dwb|k6qA^ zaH(AGS>J4=3vuf`Ld_Z^7Uczp;QRL4Go2|T-?8DgH}E!vUZsXF*bCp8w^q)xJpS#P z=MymDleZUca1Pf4iyWFs=oy!2>-6;b=(;eFO-uz6A_CVj_J;+>KofBbGj)C;>rwFJ zEZ|Zz8SBt@{-hx0H~NLS3o=$j5a2?^cxO11cFZd@a)F5%YT4Tw6xYRmG$ zNhc|N=elarUrD;5LYTW=I(ySCYz-;BBS8N%XullCzN$x4xYUG8>U77&@Nf#uAIA#X zxI)0{hy-xj$hwIRp(C{D4{PR_Tw?q{keOQ+pfJHc|Y}Ji_2s8D2!)$}Of_H@# z@dSH+98%aH^dd5cI$)N~%Gw1f_;(jxsDVXIt_!!X*&;qzMUjr>(F;71z zA*OTTaD3$26vQWvvSoDQW5P4$%*bKi^{3+{H?V;8_x}CQiT3EtBs8aSp+^ly;Pq)y z%fi+G*#{!FsdkAMJdj{Sq$5u186goOP8ya6;m@)SI9aKpJnHYeHN_=dG(}bB~a{ zyU`gWV75kC0p}}q*9Y9efq8;i!=91;@a(Y?gr3D|S5+$!deSNPI1tX7#c1ejI51K= z*5Q+ccJan{-Ba-%(|4kpU+3QGG*Oi|znhQ66?3o9zxK0An;wTN>=y3;zg}BF>4Teg zgd?hnxaC!xJ6ARiPwBu7X@~?ZG)P7OV_cjM1h9C&G(8u|Bb8lHBEulYz(QC|bNfV* z@Irw+1HO@mguTl|d)^}f1yu2!YK17h^3NbFBOx<|8OB^_%K4wb{}N7n*KfEHKoyns z_aw)V$rJya!>5hJGWveHQvbcmrwYL~)W(tz0^63eI#@z*U!i{gQ*^BN1*YUdAug9 zbDZwk(E)Uf`MPS#GL>RklpLzaos-GYt*XtvCe=Ne)l)q3Z+-W*G@^Q9ATq=A59&;eXSlqSY z{+OEVlq)_Y_xC5W0Gmn)v;5 z=Gi!Gb&C&dr4BFHJYgyTNm`%pq0VPZnvM?4o&P_{-2UiZoMJhE zw|UoS*tyc*q?J+H06sC=Ri}tJSIy08nQp-T$_U_ZIVcpQbeXVFW9`k9i+)d<^P75@ z=n|Wtj=bCr^kBUduxAly{wR)(#Jq9=jvLXGH#)xONHzrz?_F)># z7S08TqMRv&h(l*1@_AYT>u>lH-s2y)S(Nn5@6i4FgXh}mZnafehAUOg&d*&F;ie8hsNJ={t#z~-C&?XZXv6a6} zZ)9Bi`0(j^o!yK(GcC;SF;x(><};w+RhrSokSVREg0si19`iztSb8Xe>~q{-jLrC{ zvKd#Z2A}=Li)ZK$Of^QcM~6Eeof;%MvOnUCSDjYa%>DNQ>WpI%3sjOA_jwDo%F|#; z>?s0lUlGxDjLUv1@79?u7fl}8*q+-TpCMdS@q0^G^tl7)z1F8gJqqQZr0p7_1Im_N zuClQ!8~4pVuGz^%w$!-Tct{WSMeAp(`d#SY2FF?bphNrk8v3v1LJuseiB3-l`nQa`;2_)dkFC*DSB53G zO+1l>MlL698*pGCpm=>_;W7}5_#>VZG?bNA*FRXtO(US3&cqk46G!8)}=@^N)(s z0@DFugR++ZyKUM6(4MX%W6(m-nq z4i|_g2qD9t(SFazDWTfT!6OlThCB4ihovzz$77-kbAMf~qfjEalT|UQTT&*RO`*A4g4!1W!30Oae)nyv%?)WZnz0ORKg!x7IU3uoZxJ z$D&Hkkv1?~wxc;Un94-f_i<{lW;sjj5A@onH2;G*F0#(%2AD0gakPvwzeIr>pB=CZ zav|?O)THUAjNKAbx6aNw~Pc-U(6kbYlPWttNDf4%f5&%A0+8PXl)t?j~Q`W?_9-$PG<#<$fl|q75U*%T(ZDPB~fRsAPi^}dc>jBe!6}%kTd!58^ZAaTW}s;IeAo3YF@Qmi z(LyhKUUDq6-a+AB_%*pLy5RGMPE+G@zHi)kobOK6_380T2Tlxxv3DX_G?el|w_dAE zzqr>zh2l6AP1!M15QlTy!2mc+FwmuMt;U;o2tB@S%^0G7_}K`}Cx9k;WNL8Vdd+Z* z$~Di?XQOA7iVw6X{QZ$|t;5I4I-QI7IPsgNh)zUwrW=}74YxXN8!mC}U#6$W@+!#Q z(HZMmriyy-!Q6T*q)FeOn#+$QN8q)`0)gxo1n|e2pBNXA&MheZF@lTMjvt8M3>Bz@rcU1r! zq1W?IWgrCLvIQ0t48)I!WLys@|I0Yim_FwyG?+8^GQ@XGCnfq!?omY~qJdrc!-1`A z;Zz5y9yx$|f8%A_d}YF_21y;%$-PM#ibyRU$CF9eYpAd&%HacdO0dkmC#U4ct z7HP)Yt?nK0OBDC(CXkJk6`~cMmBhjDJHv7SBL=S zTJ-oT+#PKDe|)fBZh==TEs}^IujyUh9{*pl2dC<3lRv4p_*L}FzAfrhRJ7d%Zd^`* zTI&3`#UY1#UhQWwZh=o!Y3~5h|((op*SpIeh+CM#kjw9=BqTPuHUbmsK3Zh)l@b z0K==(Qm-?BVN^-D1W3uOb{Wd9JR0CT|K*WVo8F?bRK*VP4>!LK4R_NUxDKzew7G{d zEh(kuyvLPjovPNo80~M*$qta_|Th(0yqJ#QWboSE$OlQyM!fh*VWV++Had{z2QntKtL2i ztS6e*W!Z+GGC-JvXLEGoJRhOIg}|Pqs{KEnplGTgiu^3+qqccLr2t&wJ!im5#L>EAa0$LFmRpkfR? zfO6pC?Vsi(3uo+{l2lcSuscnB_Zf=3aqC^XEDDc?V&DtHUtGF3%TK;=ORA~?fToxu zi2b{~##qXY0W3S|uBa85i2x^N{EOZZD1Sl!!atSSPrJO}BBUwWrnu}j?^H*0iKB=c zl9{V}Uow;oJVHZFGn!SOCi?CF)7iGt74-@}We)b$uR-a)#=$tT%+tsZtufSQs%r+>m@fIdjj=-gG%zJjxq zPZ7hsTu0?+Lc`Gj7F@t3F5mhjU~1?d1BOifL>Ed%;LUEQ+3IGJkys=Y3r#7$V_C0) zcSxXCq4oa}c7_20Q4Bz|kG7UP-s3%0CQ&2U1Y@N1i;UjgfD6`Uzux>K+?>$>bha3X ziU-e*7&w#$m0mQaG@yHl@2vOtqELT~n*xZ}lPX^9dTW~j`qUF|u!q~Di>Nfyro+Bo z2W?`|SvrI9Oq`Sd`Q!2kCgP|Wwn;!UF1^S9p=hwEdwsAh?I{8;t*`9W2G@2+l+o0K znnVREpk`7tWiCDH9oze7=eJZc*MYj?Ma1vowOr~#A=^3c&1_rb$KwNv6I;^Tq|YG! zRqdx&Q@()4Zyv&!#R^;%={q2_s(v703HPfs86HZyf^B(rq8U3apB*4fK=sCa^AiAw&{9*fJ# z&U(UtpH}6J12kUvE8sV0GaRknU+`dL_vMx0g*mF=X>%A!391XaBT=qdxC++Eg%~Fs z{?G96D0431MacPL2c}6Y-A3oR5eObD*O_3xxyprJfa{uge)DdXqcKQN{bziEV@)P5 z-QcAqW^E1mD=jP?Yds8eUs_2W|)P2@%@N(KvRE{&5!|a6c7lfYU zY$hcvAh)=Hy)G&$ar0f$gR@EH=xj~AcW%dy%xMGvUwHOH?BB@I2h%|q8C zBM=Rcg3U|qMexn6Z&Nmr+9tIgj1oI_{B3AF#y6KM`&Xq%_4w`vz^L!pYB`U=;>^7^ ztzUfLc{&oun7%O~_N)U^f3d`&S)pW7R`4!rvM*=?Xbn39(5(%vEo(Dh8rzCmu7 zWuE_D*d#UOqn|Jg{@h_T?~?<2(}WY<`m2ZdCghJ*r^Xh*2*^?pNPQaXc@ZcpHou@S z?U!QtP55R#aaLhHYp zKAV`9$H50#wo3VX!;s6J#XtKb(E1V zWrdZj<9Zoc*o9h>UFE`w{k^%LRKXYVqE1eeEH9f$y z)62;jiy&ih-YIAvS!@S|2Km23+e6@?_W8 zTY&fR&%F}asNBwJK^lYtXwtu-4N>|Zu7|1(4BH2MF0eRO@~2OJh=zHZAKjoxfny;6 zuYec!SefI>dC?~{5+N#NM}sWS?Cj`IieBx?Q2$`HRAZUe`hU6(caN_?(Q2NI;X-+P zz?wdhon;;gGig~1T^8_vn6OBnm+y!l2jTM!NBgL9OC8UOn;;LLv ztEV8ob>6vFV@}=_HQZU_tu$&+;v=Txx*DT>e1z)=<=S0~#HY#NGvFp`chV3w zZV~r6pcyY3%xdpb;siCP*baI>V=B%#TF(I^pN> z*H%FvqYX+dcv`A(H-5{5ja7I%4`q&%ovap_YVy3!ky9&U`v~z!l--O#${z<+A$ne& z_+o=n5{fC6A2MCMMmzNJxwiT@Lq^O&L>d&*G%3k07+>0V=KIkDDgBne7+2n#nYkRRSsdg?sEO;>?iq>Prw z`^U1aWzbED0L49~@(A95Bk6;jJ9I@=sCD`fvy%38J+Wgua~08ezG3_ofuC<}D7NPx zp6(l@LLcZqCkig3(a1-{ZhcPAEb#sWPQrPp0qyh%j3CP4rM09byHv-BSDQdOP2n4| z1Ex2So=@?LdmLVGfP-{7^xKq8r1`w!Uz=V#*4X{DU^G?Zf)5EXb5C_0XMiHl<6?Y6 z%VQF3FJ>5UkU%ta#!fx@tiZ{aKX2^HUEz^@x9LvIKuS&+m6aM7F^5bx57EN03$50g z8a-IZ?d+Is26UwVsrK$HW4BnvcvIhynxf_az5u=9x5J1zHm#L38iYV_X;Hsu(R_Pp zl9MfHtBfNOC1_8R*P)Q{y=eyP>%iH6?;RjT8*fNb!$IIoa)5nwPjnE=Jil>c91)LJ z03qC3OW2ZjNtbOXh|0QTpQPfFGHlMS`X;fDdjiQ+D<&kU{ZzFV?rRMG2$dzOZC%h7 zvCx+7b{@>%$q%uj)liLysST0Vzu63Hk4-K?O5s7Gfw23!Fc*A{kS5aiC$_N6Pw}(H zZ5Ka2O6iI>rsihXt))&D#E^@kBl}Io>{{f(Lg+PE#ip!MAuN%keb}f|DzoakdeeMt z6ulk42<|x~2;Q7cVHvEXR-Blm#L~3ck1(w<@{8bU5@%~tlbg^c5pXoC1PxP36YOB# zPI*CNIjK^NhSM`P^MMHg)s!|WC%ME^wH%~Hs})z=QV3rqyRdUnXTO#b0FegA8bM-a zkR;pNoFsg*ADXDi4Kzk9BU6x>xY>QnldD&|5^{NZ6KZ@8DZQ(Gg*=*CB+8A??T%3* zJI|l?;-eBz_{>f3R<}e}L1%Q#xS7PVnE*v@2~Gs9tP)gnqD1j(kX_0G!Egh4Vq^2l z4`Hkj6^gv;$tIx%Wjo2$kWS$jg4#H38i_%boe3t%?179iV!gyJD`mLx&(d~P~#SgCx&uri7Zb7^r{*+2Jb;ZVqb zEj+2FVQlzNd}#z#|Ng6o*8FUrHKt-dwM+0G1`sCloAK%^Vlq|(rvg|k!1BRums8p4 zGlCFJP=vpRU)R z8|eD}C*yP1_^OaA1QgaY(?VNMCaN%3734MsBCS-je^zLO`97Dm5GS;aAg@j@g~XRX zkiG>4!xTPwW9hEKiIQ8GJ=P`onZ{WU@A)^PKmm@ySY-Rx6r>7yd7Zcq2U=Bj&i~o! zKWP)Cok*pS)Wv8wTJwvPKH+l3o0TE(fS$lKWYHoQs$$?XUp+j+z?7|^vMNd4=n(`Z zrmKxh#>3!|s}`$%|Dt5d<%fqxa^#@ahcfI23tZK=jhQr?AH%xp*V%2Wb^R?huRWKS z`P^ZCv)Y0cd|aQa*HZ4|Rl6uL8r{IuCWkl>De3azc2B&`m|o_$D9vq}c+eC8c)r7Y zHvS=4Qch!m1}D`h@*77nL3pQz7;hKQpR?JxYmMF+ZzVL0nINQQM-v8l?>IY`uMi$f zJ-_bM1+FBVl3f^Sw8GDVE4WDWonbhpJ&*CWyfbbFmHHrm18l$-4YHaGOlxBO%rh=! zK?hgDj$iw1rmlo2?UTj&kfQV&n{pUl z0pH_#NvE2!(0>pri;g#8aJ(@+yBWk?O>9rDdtef?s&1gSn$txQ&!0;VyB&>vx54n= zXeXtP{4F}Yll+@B07R>4yzD$`0gPDa>X{6vtRz?YPvw$K507`6xVtd!WvtQv?WG?O zmWb7#d~<_BF5sq8(FnC_6nPiT8>tHEhaQD zfKDcukT&aa`=pq&nEo&TMXpV*vnso;Kl#dK2aXC{Mrf2o3;|>hfP_Q>E2Do+Wd&oY zTlPd}l2A|VEqcMz zXCCHZ$zs@Diiz2AL0n8Bmm>8-cVeh@Q^rrV%Kc`6Qf&%lD~RCSguJd;P$wUN`bPJX z4hIQ6;>@uiMr`&|T8}26MAya5?1!>QcRwjig&m$i&`emluLYhM((MdNz3HLI3e(N) zz9{C|_-hv%w5`d6$>KQ7keJ@0Xj~XvT8zep+a4k8#%S8Uuh1dc%h(}|M0}~01p(vk zLUW{+jV3kVk1szN5;|$1G>@_Va;p?MQ;u*115ZC#QQ0MWS!1faC;$d4ja`)S;fY{~ zk=f6 zv{^KY z*oFc8MBs}_d@e72h-ch{Dki^oX-^4#o4S6G<+HF}*$H7^9vp|TW-&$x!2w>iJM~qF zc#zUSZx~9y5BO;?xj&`JC-6akMe{IcTsc?eJovkPUx zL5~{`b2x3=)f58^n|}P%=p-@ENDh+$kIPTNy?}tVc6=VB<+%8)rH1V-LXg;Y&bWi1AP%2f zet5}htJ02R;ldx4xK*AhZ;pToW7vO{TppDn`hN&f&8z1|H8BxvN?fWJNYR^6?6q5~ zqVbtFoNiv`U z*>Ej-Bf~pKS~76j-7iGASG<;6jrd%z#fJJ@`eR%TYBw&)w)$1LT1`H zg%GI85SP&#<2mBY`1EVLwah^H0d*VAz%FzlT7LVZ=xc@VFWA3Hlu+gVIUb51C9h`a zQn5#N$I6JEKPKR$&s7-)x;lY3M&%2>0dHO^X=shzY6C)PE=Hp%`vR|~hEvw6u~oy4 zwXuX)h!Q>L#x<%?I-sM8vgoy%*xM?d@Of*l<_Q;e{7>x*;+^m;)I*=sghRu;&KyNA zVR=g-9;c@#kDJ;&F2IDS{bT#}C!jJJAW~H=2%zvx`9j!Z6Lgb8iqIvA{5J2eGESi(d>c|cLmHn^? zWT5r3g@P0!ySq!}yH#}9^|DV3Hu2`xScNgeZXv~f5;o`Dp%>P&J$<+c= z1=WeXZSO+>L4o+nE*%iiiNoNjJ@1ex`}%jkTc&$rEUs=1KDerjas?z}O!BJhrELq+ z@B4Mxsptm1y7V^$1ff&6S8v^%&p^<^c$L<6FE_Cw*Ak_2FC#LoH>-_Bk34t5V_RjF zWhXIp$>ASQc}M*zij@h`I}5Pz~Ob+1lGq;n9!yiQd9k*l0GmXny`*gMNK@oF&%ER@I^Z`Dlq?EXsr#DI_f8@NR~mN|G5>`XQkbXt$3*2TNju)Z=sE zSBQ0pIHdKD1sFo(u2zw}KM^by40u$0%)kS@d*)a*8}8d(+ugHSfo=k!HDVJpW%~ku z7G0d+#6D?lbA**6-!d{gLX&p7?+}4)I-2u|L8#|wM-a#V8tQE&J)?pdK_ku42kA!m z$ISlqa0&&vFrvQxL3%rvC~1rs@-|YjX;XRce+y#jM4B3ZTE&e zIq^s@1|E#IjiOl1o;jPE6e6rCpMuGtf5EqeC4I+hN)`D)s(;w~^4Mu5(txzD?ZQDy z|EZQm-0B<3FYS}A6Y_X&KB=1Q#=Ut8u_~p7008yK+S*NZ;+DIdOLK_hyu1O}jJU8m zyqt6oEjRh2b)gCZ8mo#ZrAr1BBFIqrHzV<@zYSFiSf4x4%2?6_G|wUKBC^3RS|}>B zE$tIr&1=+4>h)pbhq&5hy5RWq=8E=UCG{gC7I__(VhlLf|B@B@mtTgMF|xWqy$Ibb zH|9EXI&_gMpTL1$$G@O>P_Sh?*#u;Qvz?oD9{;)+D9+qGiP#@K-yZ}XirWECpOjYM zm10e2-d!~EBMtTKs-{$;Lnp$+(61z;&X#-$k&)CFk+GqY-90C+cH1+HsUDo|iGP62 zsexpM3&QyXpb{xTtWMP4s$0@)?Av^|1Tjjxr$(Ys zq@0_Kc=BeHa^H)FDN|-K;uqo{?-h@1WT_s(DxaiL>}@k^Qu~c44oFTFUBNo6YWT*b zXrWx0^n%3bztwC5cW0Sw=034OOjswrU&H(6(MF(1o)xb^Eekyo{DMuKlc1p|Lu zi;SIX+i7rzn#b#u1i5Iv9OBipi*$dMA7Mvv%y#R$&*uBSkzcv)GHOx${uDS@p87bx zD=m|Uw9w#yA1Wge7Th0GHDUR4#Hi(2@MjA@t82{wJV3+0zt*DQB0of3;~54tCk8M1 zOH^@{P|z1oH|JC`X}roXpLjcp#dH-gI$bt2FM8P)vqOwoxtpkRBO}(hG$cFm3+*bC zsZv<@Z6U#VjpJ$n3^IG<=VB*Ql*~#Zdp-#E0vvB+b{O?u%ypQ54*(8lqxIDVSq+zv z_Rm(1_S{E#te10M_zmgV5T-2W`2_kcsOd>l6~N&=2%!8x+z`Vzhui!X=Q;3ixR_jPq}^RDJ8oX+uZd^6ul!o5PD zh3`0IfUmyv)6c!}{ZSFH&}>7Ru%{=UAuJxGxb0U#?%b(ofpOhUGPhWJEmmq4j@ti9 z7~p~IBJQ-tZfwVsqb@GlGK3B$!Jb!10!tYM;rQC$>K__NG0lxg3xjAI`n8h-v4_RQ z+)BcHK>Lt1^eAmIE#gv~n-Ul%7guq2X^PpPaC!&b5(e2JTrV?(V4}mtADLpvOKW79_2DdGXrchs{+Cs$=a??BGM!mgAWQMazzV1# zwIFD~poK3LYD^<;vg1Ro^`t>n*tw;j^~o_AwO1~_Tww{*6;>v3X7!3eHWCAUc-|=D z!G3Ce^_6}jq-GmM&0@5UQ0(1|$gq_0k9I`lWzoNjo$CP8#^lO15?aL|`~x3ZdoOsA zB(@mG^-uyCPV2LP%}N-nje9hJx_L)H!EJ4794UF?nP1sTaj@-QNRc5Xsp6G3d#xLs zZ|;N&ZIo08n@-SV6gbCKDZUFyMchF0H*Yhb-2d0PfF^Qs+~R8 zL6w5C-0y*x(T`jwPNEfa5(#-turB_8AhiFg)#ocdmZX@ca5lY=62>1j{S@E`-xo4* z-M%fab~mga3o_f3s$HE#I?8He_aT_cU^VStaP5Ov>ZAO|nA)2NArjr&>xm*}koWNc ze?=!bSuEYtSrw^%oGTTlBByd+dhp;HW{icqiwWRdq{CwU{%(lC5#*I;fnPn~2$7ICd{na~se40oM5?|eE{hT5Z%8T@ zURrptYb4spNISyqZ94osXg{eLs>ImFtnd!;PiKL5gL)X5+;c_DQ*{|H zod`(?TezKnb-jTA0jZ@;8SH`b%@3@H<$Y4P$#O!C41IWyX4{o2kp;%~2uDndayYW} z7xWkj*lBq=H@xV1xiUy=g~f;_xY6B`{biAn;d+!^*onAp)NJh1`ST=TMV>{J>9P8B zfP+nQrYRDGK3l8Vlf)w>g>g!{g0e@v z9JW`BA99<}!XE2R*9QatWWJG4#i@w*B;Uw_8RD-UHTs1HY@EDU1Usm2Ns5!>((zA@ zl{{YCU4nqTSXcL{vg=0~E8)jUPM@H&ZVR1vbKkMIvjQ5Mj-U#*i`vDi$}}E1NgcQUig&gpW9LLw za-~@h#wPXrd^iXHm1u%dc6=y|E!=@gNsAu#lErhUaRqev)vSX@%5aRQHMql-1BRxY ze{4e7Aw@ZB4==;Wih{9@vv6FTkXg&6*mO$Yk6*Qps^kIKtj9(gR(;+3S1Kx}v8*ci z-Ugx64XZ^H_}rd`Oou|tuj)=XW?g$2ZSM}?)5}u!`Z?m7{n{+7HvIL`(JgJ5 z*=ehDOyyr7LFluqlR4W`0|1dQT}zXmFG4}ycf)!fQbwH5&U7N+00%r;xgpifX46@t zJkzX&c>5W@B~=b&H-)>TUP}pNBv{7N{FLCitv~CoDqqxrY%X_TCBdq1^lvMypuYg} zmx%p==|l8^MZ2upLU!fTV&>4G$KJPdO#LBw4w;&AgCb6n+jCtFtf-g_-c>vbANYqv9oT{OVp)0!o6pr8)jwO6;=~;5hm7^w%ZCjqEmc!)@>JBntbyWQlV1 z+^J{w#Tx_Tp{DHa$s7Z_S~ ze{p|;We`rR1glR~b-^DWWx#GUZ;HgD zeJqyHoK)Vyc;Hm5nY29O$D^z-;w=NFn~l#-+oG#hFjg+p3J5VQv;b(--9-A!o_u%_ z+y>cZx>jh$JVtIq?qH-X`;>R-K~~n3R5SRVD8(>f9Ep)SBbyz&t1X85!BcF%N0Fg` zHzDOr?X08#EC=i2fE!a4oIVQL?b}%X9;{{j4H+FsV`K=PhrD`ooOd$iBPv3cMYPXU%2_5vI&Xm4xfFOLUG)55LKe z9rg>ysOI4vlq4IIi~ro;!^X62lX3J>AYJ5CD<(3xe1GF3bk|u@^9jHElvgyRGaS=Y#1LY%0u2HCqm9%w!rqPgig?LKLZ@^7)4PA{* zPA{t2hjeI~eH01Jzt?4!M!kvzsJY&qdL4~ohFE7vtyjB4TK^VOEMJ9+waun|Nq}aH)%X8DxlPHrC#5;%A)%4=^R3B#PbPwrG5p=`ZT} zekWJOiP%yHW)1)UQ9Pfz*7^twe{WNh?Yjv($yksQ0I$mfT*rGTT4}AzjToGN`MWr>6Xc9^vYyrHfR{a%!7-{alZ96}fB~ewQ00xYIm>JPbO@RjzHGrc z+FcrF7QxVBi6j61)EXm#Z1m|p2qxERHc*F+M-n4yUrx~($Bj^D${z75}npsu$ z;Z$??>4h9fW@mj!$`OQM7`w*ekoW1)K&*bLZ625vvA?AYGh0U*^)Yi^NDkF@@AMdQ zt&fLwv{_Dt!$w_L0^4@QK;f>_Lb0nt_2-h&gv-MoC5BuC@m|@^J+EDudQSV@kHAW* z8Bcr)Dc8ra6<`5pXFnmQT()uo5@3=}E3dm4R9pCD$USa0HbJ5ac&Tq3N21O{s~Qwj z9~h_P73lKZ5rAL&F(^yOKu8U)KY^9$?;7^#ZO#Kah5gw{gID4EZHkt+-c@Q!O2RGR z;!;L<)cv=H3E}OE+58C=^LDz!1_*5wH!s?9JQqBCM?49Z`+QutXhnsVSyo_f=Su}~ zKX0lsSR0op38!CMKXK7g^8O)Pu;#N%e{X-NIxuof$8Hy$ttIB%>>>`8r94<77x(NC zZrWt#zMnGz3uE?cp^RHM+n#?DRgZ7pB#FPSE+|q2K*X3I{o`9#Cw%Z2=gjH(976Ck?`^=ccNB?!a~h?oy-f% zfH#}I7B@{&Vl>$+ldf2r=EoH0y$s|`B&T&UBg6k z=O*qQBUww59Hh%xiT({t()zK~9uG~6s=Tu_(*!{CBT!g=WdH<0>fHPlAP$lhS$c%I z=RyjVseaCFonDJft^MEuwA|=^Ds_kCw#3utsye#$nP;w)D2b_J{Vupp=t4BJerbrs z{(bDAT3`iME)Zx?lBLBoF1&cQSNBRzuM$+25E6e-G{uL02e6pV=)3aivpD`>Uma_? zG_9gddOf-V7`EC+jOl{x6-*qaF9Y};upj^)EOn(qm)FCJ5Jldy8#q|)n{5+VZe|7> zYy5UwV^A^ZCj$<%LtmWmjb_2r@1x_7cvj23nOf^w*DwfVUKtTFlB8am>omk!pKY-! zT!p}C`2p;lsiLs>K-%Le386ITpb=k|FfhMwF({9DAvt+w)YOI48&{^a?q6qxZ?xwK zEl9b%ncDi4sKb}p@j82@@kLKcisFbsSV+f~xYAr`B!C8-(q+VF)uUh$uy}dB+`(1+ z&l0TUGo&Dx%w$_1A4MiuGx@7(p3yYcg=1O(z0u|aiPK_Z_N{mz=`Yr|ph!&$y$4 z<}BSbJ$e$pbO)%?0hala>)KlJDD-!95JF&+RT)acO6ua|SlO*>PwfhT%#%ejD8gos z5H9D`)K@^gDabT}5#YXpqcR4xa`lsJAD`D{Rh9;6bDmF>cv`OaCLChXoYn{?33UR_ zBteD02Q?gS^2l|*kBf_g1nr3OiAct0)gG8HpK&xw|C}#&oR16cd!}<6Ec>M8)D-G* zE(ZVXx|(L*ocg8@8XDl^+UUZb_wSoA3pV`gRm2>ZHA zjt-@L42@EM5FAiA$a#aH)I&Yz5>zRY%H%d8vUlqaQvuQ*ms|sSK>nNHnIEVE8KZ@k zvPWYFE%n#G08?itzy@X;76~|dsO*(0Q|I126~S=Ww=EJIS11o~W_grl{Khq)UkV_D za6L_Qf0-*Bw6{q!5c&6IcHAMzM21KeOgkDRN)x5y+I*`l+WQ|#0L<;jv3ZQm#=;=j z;sC60ZOvdytWFmsHI=jc?E?b?1A1@63dxHPsAAt;Ufmks23OBsZi>T|=~(`@ z3?)+{@B91$WWg)s)#cD`nP-C7BfX+{TkHdOB|;9Z4Mmz3kY=yfSwjji3jX2L5N z90c4JI{X+_+76~(DSmTWuz$iQ8zg;CIrWPswbjM)=Fkss2n#EwW_z!Q^g|Si0wHL| zeIulMjNikm4xla7n&LRghDHo9?hYs-$VVS zCB+7>DD+737O9P~03gvJYihbzyls;$Nj%10CvKsGqvtV@CO3?-rfQt*F$6{Pd=GKmMfL^HGyVyZ^_@!^D0XIKNmXs-<= zQgbtSmEE=s2vJ2-s7Wf0iah#@Q+#dhMp7(`*+I9_)pIPAL8(L#$hN)m7AezeEZ+Y# z2GU_i@EV=GL-~RhE;-y?Q5(x|8&aX(s-`EmxezV;xImMbqKSy%l+gm2ZvgN85vfa30Q%7bDj|%?Jq0+0B4UU-L3KVl7Nn1Kcxis`c?xM*N^>@E9Z7{2G z*$~^#th7}Q9_S`+Znq?^v0Cc>zM@zc>)`m}g^%Y4+ z30q-_YnraeZ1oq6>S%wq$t9GBPD1FC=xTZX%J$pTeEX#OduzST9tpJX7{d+Bx;m#y z&&kaxMZ<`6S`AC}eaY)j!usZ{P@z+BB2xh)fFvV7GaLRf^sQBv4OTAQBE# z;0{&4+%3rf=d^T#zH##*FzE95_lxN|-G`(j@`ZSE%{EDxwD)f}q z;R!yjeXDK+cY+rW@oT2kp)z`DYp19^s>5K|M!=x?6Cv}gEVEzGI5rCoV~N{8*fq!M zWhp^6SAQbsb(JZ^SdVN!v5g`gok=;D&@5zG=(^X9)oe8)r3PpmzRMsAo$B`q zSB;14-@`^rjneerTOvfr(S9f#MJBSNQ?0_9M33hvx!MXBV{?mSi#H=w`RH>btJ@nm1v*o;UTh5~bRJca3&7ilb~bSGpl7rv9$?833a z(4Cl2RcXHeFE7&0V*-8#Gz)}(z5|UGZ0>^OFNFjZqt+YXainY0x;6d`xt_PqVVn}L z>Ns3~H!Yj`%0pVNZs4x~;jqIukL6ppFJLOPotj3ZXs9Ru%-yv!P8t zBa0W(41!BreFLc#{7Nhe8u06riy)TeWxT-VK5?|Vfu0&JQjsPWy6`z$5Cv+hvjc|A z?X?V9FqW#uQT!8t+lL@nE3xg{5`h_AGC#U9NHl)nJ-WYEw#&MrBx*sq+Cd;o$!7k* z%N_bRPH*?f$C*)j=IjI^do;xKOfR>yEwb5I<;L-N_!-29YS_EsTPLHgTwJN%tqyYlB`Ecs zx!2nF$<3C#nMm`=BC<9(xTeTgUm|VH5V+=lSp_JoE^eb78BLJL!zTbDP!d;d1rcVx zL+M2>=QtUv=+u@kRO7@Xj)m0#ODjnD|&5?Dh~_ zk^;zbf2G&!aL;igwFwgNs1~<=iU)@4l&8%9eQpD8x}}Y6^XaeC48qSm2p4?}6lLWk z*;l0J2$acaS7A~(6OUH*EQXTpOXpMhSuK++Ye`?%issKr%+M9^sY8Z^Dqi0Htyf(& z=R`zC;4b2p{Z5&^y?e!27mv&MG9Pj6M!)Jn&QoA*yDq zKr~FFoXeGmADL?j>hbwxwkdA3D+IPccg39{<6ZUBJBN&QTy2mIF#&+vnmZ zSU0)4VFdAg7EEV6N~{gQ?;_yWL*eysc(mtcs|X(VT7zV2cXSaQJY1z=w-}nSSo@SL z-fIo=Wi$73F#Z8mUf%+X2mXgOz2#Oqwx9svEw(@Bjzt@PT>9n=TW9!&vu8?NC zd4k<%8tMI~3%>s@|5e7XIuUG4h65JX zVc0>94%94r88T38I?-)OE;8I;C!Q;&RCLvfiw=-Zy>~YO+n0|ob*NU=S6#%`z z=%J#DMo@P+LPfEhp;We5Iv6I;SzA6OclMmuCx{_Lls%g_dGYL7cR?Z!--W4ryR}U{ zRIW|tr}^aF*B`vOyG7AsQ>{|WS?0EQ*C(0hXCwc8(Q7h*`+u-<&Z{DuoSpN&P1lC= z*JLR1$x--|*~nkc9n5YR8VfKjmr7srvRO#tkWOc>_Qp^1*`RUJ-e;n$ZH?hSMx?g(`Y+gP8CF<1-8HY-(PQ6eYRHM=L;&J$T~*<;UD`|k(B zzpLyIlALDIJkG^}k^0cWQ2e4Zb~r;y(hF|04}reqI!0*2Gwy zx!T>bj`SjPKAt*x_pEcLATt%uekQK|#BkX3^&HtoSxZY7msa6?;=8xQxHymNbbrzf zTJ+$Tg&g!8))qa4ANHD)wfqHan`?j1C;Pw^In1XuOZzed28kymi<1%X2%9IK-wJo` z-iHw#nIaZ)TVUL=H-DZ0SdS}z3?ST~au{2WtEfO`ZN(6J<5+HELZImky0sfi=&doG zL4lHkN6r8u8|4h@b5<}hmn@WP#enVSw)|+BsK$0g7VPOhB2-LS@E14Y?0!~a;b~u@ z9#~DNmjHAY^S;vaI2)tkO_j4IG(?83rJE`~wEwUiUK6)sX|Ec4nJ(xEtXW8$Sujb1 z5^o}6FFu@#3rd?HXU+Y_Ks7?1Lf49kKh~>i7?+%b2n;5rppR2I8=HZ*>GTPdX`eig zW2c|_wJMvi@12ana$jp)3y`$+BfF2V$u45qw^)nclaw*?5bGTdWP5>m+zI{s`5ugU z9seeE*o#_f+ju_*WR;}-?x%g97Lo0t1{e*DgA@|VAXWN_s%AC+Sq4yF=XyW`P;YK+ zE3g56Em3S#rS%@$`h>Wd=DhV4qkew9Isy)7z*Xq zX<%8;=Gflqpfw0A*oxq4es?>K`_R9nxEaXGlZ7GBH^oGs${_}hh4BqS0*Mm3s z)W+Mf&?MxDor#~V^reIBMkbVND(@wQ6s4IU0llGMY!#}>tkcNdAgokL;jze0ACFdq z&*5w+sASeTvE8k~qkw4_MpD)1k7XLWhsEY;J$w_mZV`&w>DjNZes~Va@$A$9XKuc1 zCl@=}?mpc*5s-u!!-x-5l5ST@dVr?)Q^UZMggL4cXhqFa8lxW03R zgRQx(hjk;QbziB*KMA5%GjbD+pL~?T*$Ncrv+n-N%21E)|Bb&6Snrn}hdRy};s&jt z(HIBw6qv_Wc6PY0{bfQ*p3jV3aCVtSwXno*H(b>D690Tb8j=*g&xV0wt1Qf3(Vp}X zXz6|0FW5PMKc2VcB!Tt|Xr{F@3c17Qcw5 z7Qn~3wJv8P*q#lurqfY-Hx*t;NL=o%R5;ZEA;te_Vp z1l$-g{)vff_LtWsRGq%HhAw%Bt~4bJ#g#q%w3fKGHqu8m-&I4Nnfu%rzS3vHVtNY- zX6hJU36PH!zBOX>(n?j9J_b2VLwM20KTbGPz&34VbUa=t3N9DUKMdhZ7{n=y@9@d4 zfP!dgfnUmY^10w$t`eRy7=aIRt)JyYRw?6*mThzD=%yjoHM8pChzzp98+j!iP2}2j zB88|W5faZI%sJ&B+ZbgUmoc+lY;{Bcr;R)jKdv+w9QvSM(U~)I{R@2oHoO-0wQl^m z64eZ59~9k5law6IS~W@F&bp9QNG_oc3S?jOyq8?ouVs7CywJd*$O6MuddAcjCkelQcq1zh|HWB!_ZmYCfH5`svnAPu@F6BE_J$=m zYBNl7YP;m|t%lmAR&m6bfJDejqdbm|paPsxe^o2I_l#z-c(dHqz*nzpTRQ3$@g34N zBgIq}0$>jL^{oT>EYN5v;a{=<6j>7Y;OCj76vyz*8ktP40+c}I+A^XCr?2mPd_@;M zGC42s`>SFq$%-c&0I#A^^i$-*IRWZ0@L1M`O>RPO3DF4&O41-927w+t2JV7_93Q2d zPlW&t`hgDb$2`1(j-{bS=n>(yvcjH0$A9qByK^g^heAu#$>Fz6S(4R&FfAcDv|7|_ z|1PCpND0M#eBSeTM-UNj{s1x?;*&PONs6?ZEH-2u+R1XIRNI zti_x<6Tq~XId`&w%?7)ah7c)6x+{l*L=AgGWemB08Y>A0owhvc*t@cKpLV+WhCslh ztDxg<65SO#Mq2rCS5sjisn8w1W=`sho`6ymBmDw|%o0Wp=tP4;1U{r0=7MbK6t<}* zQdVs*y>0@Z(htDh;2lsHbI6xRN<3P1bto~n)#4B8pT>THNv#xY%yQ8T3M z7PKN1HXHwfVR)awXYT{p)(Z06UckgtS4HaflVlkv*AtN?+*$Htas000L!W0OA{C0y zC9`G^dIol`vFThFu&DA8a|T&t;{7TPIeQpn$MhFd(#vQTsygKNI3ufLb#6p(c}yx< z*>!GX(~&RQXl-J91GBrJwOdp_!2oMPUoE6P1f89EvQZS(fqRky|K&lSEZBC&=AIqi ztEn`ao_6bbD(84*KFLYX3HbR5?wxhcK@qYBP-K$tEayYg0(L}{dG;f2O53ucq z9>UvaaGDPL?~r4KH!TiyR*pxybe2aBdd|aBWmJ950Xhgb-~dih+rO+2XmEVY6{$KM z{O9_6Tf0*N)koPG1z-cYN~qH2sfO3D6#F!wD^HkRkV^Y5RM+$;ilOuF3t)15YwB3o z7nBMr%wk}o5kJxW8A0^Fo;*;x?T2tgou5k4H;C-Gy0-yJdqZIS2G;BG*OXUkH4mkg6Sv|t4o1PLNKX?jR@VL<+=2r}T$1D`ELNd#fV37?>5JpbA; zkM-)?8JN?#@5XRkDM9DOWHHzw^NBo}&5^F9t({(3f75bkcqK(n9CIA54I6vm#K0nP zhr5JF4CH{P^;Yf#si~rkyq>;`U6&W+2h7n&Qb7iAe<)Hx4vexpN>D|WrG*+X)#w%x z;aY&_Q5ms343y59hI%#QFj+qbf!S64t0vaMz7l5w7J$=Wj%!nOFMc3*3h|AqZ2i85 zhmk)ExWlD`UHv<=Ge>Rzolr(N6k12d#Q)S445^6L;2V1ZXKSVuu0sqaxhHi1S}+UN z?0X)$Hp5uH^zj|;X9Bwcpmw?uy&B?@07SB?%h4+IOci#RHC#BuuPEAy8cgLd(e zYJIRIZJ-W?g&X}Lgj(-Vxv`dqBlBcgT( zj;U+AU(#zK&z}jn2pL>o;9Ie`V#)RY!pDKxf9~bV$fNwn=f2bmxXM2QyNfhXylGBp zV*Xi;sgfW9#H@iZM!(`2s_xQu1svlS_F#D8l&pl%ay=Ca7jKSUW%GU3$*rbjw;HDr zC1I?H5{&Ypu|wuT7g{bQMEajze2eTI0b|AqiV{En9N~FFdh7bNcd^-Ny14w*>NJ;S z%;9xW&kNndisZSuOrD;w{W5CcZC&P_EN1+2WS}0fo87eY3C9K1a8?_y4)Q)o7|Tp< zjvf7v1i+>;lT~ZQQ}8K(V&hyQS)UqmGHts(fGp6LP%eJS z9LzDaEZAL2nUSUb6$Q8kQ$UPVA$~7VA=|R?lez#6#wSeVB3#`;Ff;suBvsw{rGT!k zS7Fry^0UuietwcLMU{VHf1yUx^_$+}>cc7!pg=;DD-bJBwtTc7->^q!Wu2)pltvK? zt-y>n7THz}mtV5?TwOq12ODx9=h=#kmKl$4qgjebP0F4z$U02by&v!)(P>Bjqk95z zX$g|x3yKnwu6lP0Vocp!(T9o|0x6uK=T=o*{;u~I;xP{8_x|G`krl3NxLf&H`6a2S zu|WW>40quTMvn=4QAZLRUNfLtYnPJC z-e=sHKoRbg*Im6_SgChCI1JHIHDf2Lb^E5vMxM>1HAI)mK5`Hrq`2(E94f#y*eNX!Z(%U@FJyPGJL5F%nH*;uAqMyo@ zz6{V#WKv@015})TYfc4Nl&51V<*{dCoIwyIbIDWyCwtIEJ^L@^wkM8wv9rmrgtgA8 z+rXlWmXsO91fk;%T-hUkT_{*9v$5wX$D3%%-?lA3&d2uD(KBCb!qQ9y)=cC0)wlTC z-5**%?a}bR;t<<;Qgrko{ci9FR8U?g53~hj5xB0C99Z#6g<^uI;Dm(H{{q|=wGuMk&udadRq-&CI-MKlL@lH8zdlyylgpD|k{VQG?9rjsZ?-eA?{ zrrR6z(HdL2`PZ}E0cc9y1_NQ9C%w-w1}pVTur0Jb`1PphVaPO?E3qDs7a(1aOs3x@|(`uk+V>2vf);#A%a4 zY)mu)MfKj|BLFy0F=>R6%No_SNe#(W#^gB;$XHfQcRgVBhAhNkwaf`LK3hXy1mGB@ zke(w_B&NlP0}T*<|5mvnys6srjG`4cUV#r0T5_fVcn*d<1|x2k74dSSGyl!Y>Ihh` zEQpTfqm{d??oBd{ky%8n;BVAeTWMWrE=tSY+&A0`rn}nS|ItgWVtp7#)mK*EjpIY3 zlfTj_j;Lkqyvq8fF^w`#PJ`bSgdilA?j?Xzl^sW?V~vsmGYOWKcz=+WPtT~yWw{veP)oNtFx@D^=9Oxq8z<2ItkIBO!&@+!4lll=&ytYgr zI;4GiM!oH(i2T94pT=SWee&TrW8?66rimMm6%nNI1+j|?1)H%)G*ZCgoq0%Qe8tOF z;tg!UG&hCu<5ZodIR725|A;z54EB;cic&xejAAu0blBay9UHRcx)fpC7h9W;-a0I~ zLCusoCb+?P#~<~T0+nYp`wu4h3M`P{LXQY!+)NkB^KG0UC0Mb@%(LzI`aZZs7H5T>_QqC#{A zwo)vNwB|drs~`d{)@uz){~h+lP?YDG{y=NT6UgOFiEN_wE>n$d7A3E}1baHyS@sts z`rGYNgGeU3%O|AnqO$E|z@fdOh&1un0})TELPcwflFLIXt&nL3?8yd#yjl^Y;K=iA zXbJbuHJst|1_r%)ZS_exyKT$MfmQJnAiWu6fGD7(8c%HGJBiIZ56k)?TN4=Qm;0z z-`#kR3hZZi=goSD#x^-$_)o@G$kQ&wXRIRvTgoId($Ue;gAUQm`=vK@Dz{5xAn|&Z zvq(xU?cF2Y+gKn8<^lSO07?hy*31pzwsH5ekm&vvk`C*giF4rVX82tJN|K@Sm(^L~ ziXHFn{$_8Zfk>!>^ahm7&S0L9C+kvuqXgVfr0sHq9oglv@_z+-zYK ze7V?#hqZIJ*6qaKs>YmVTl%}(h9WGyk;~AOUumlpk8$o=QuBGC+ijUg&GFm$+n$(;{CXw~NH>gNjyK)j0`; zlKXjYU8Q`>JZg~XTVEpt?ywFmKnU0?|C>Zqdc*`dKW_+3t5ZBmx?GOr#7Kdl-kTPf zG-3G;`rW{?d&!oOgT?Tz)l{?Zq}H)OpH{xMmHIMai~`OjB#po_m4%)lMV&!fu5@3e zfK9CkikoJOn_?Rw7jP6mjV>@~q;mZL-yP<^;wjT>%|HF7_{Dbevjj6>Lqs2) z!;-(+Cs#cZMlpM%B}R8v(%8fMUUi=~#Nvs^T|^W-o2I*(-l;`t#)OAt zS$J<~WKJu=x$U{TA+Q|{g3kYwt_jxNt!cW;prA#Du}hstT;M+GcH(GjRl*yhmhF3R zsLLh!)$S!K=?6vR_eY+d{>v$=<$OEV=p&+~^3fqx4Wfs-&x(gig&b zeXs|fiukgU?7K*)_j&2`+UmXT4~IQW)@_&8DrDVJ2l;s`fo*t zG?NQ@kJ zvLF}Z=fcL1jQc!HYw%oQJ5}9ZlxcU8@0N%Ns7Zbp-vnJWSm!h}4Mq&g5UD#zGtikgO9#;+%pKNE))uBrQl4F1S6m z+kbN_H00Rs$^sHuLw904(9Jd(n@9I;PR4(gSW%O~taT50nEj1+e&puir8@kT&Hm{& zE{M%?2E;xVL*KfA(gEAZ;m}X}_}tEWUY^;P`g@*wfGxm5EO^u&^%7FLm5#FWWfe%M zkP>g#Ki#Jf-CUont^>R?;P3C}YgwIhGmhU1{6IXu-piv^D6Z~?;LMC?huqK9pwPu< zp!dVYl(29nKs76|1oI98TQ-e99&+8kZKWnnE$E@oo#m(g@RG|h-^EJgB)d;L!v`AS4RZS zmUGWR;FJ5ulS1}WTVk+@UHvUda(YsTkn6kv4y};^4!ecPORL#?PUOrOg2l~S3(~H` zpfVzgpDe`;vB8qj_27{AqR(%Hbhz!`VL$b~>bxFrmN$vn|LbYYYmfSl)jxb-6r&ZZ zC&{S#QvRbc``QvvsxEGpw_a@eW$wc*L{e8dCi@@laI(!0)I!4j0z+jYWv)rOe@k zrocoJUYACKk}L+7st`xFjMw5(x!VRneoGao$BP~OT_y*s#_Ze{by}T=>8PvXgL31U zxyol*f+So8=BB7TRnu(v5o%gmik zdSXvfi1+#tQs)Pk+a_{CUi&e9HN|=7PyUb7>pTXJwB$AtyP>M5e2=gMPo15pJ!#_j zUn6U`07_ivBQ-C`n-^OqcsgMja&Ck>gR?!~3*%d(Sg5x}RZVJqe!q1hCE)-ec}qg! zBmVcZyE7J68&m?1bm!!O+e)i8S@L_RSmTOdzuGu=48QxRB!y$EljULo-1znnPQcBR} zo!e`Xc|{+0uB08oX^N3_hE0<1J*1T69A1%mg6dNqMzrN@*&s+-9nJkZYcqM6F30r6 zU2UlXu0Le7w?`^BD&Xtri>Mwwr3A2HPF&n25$Qapi3mF2DdB>zdai^jB5q`~^FTjm zTuVv=g||ec1&WSf;_PigzB`-|?;C;Yhh1Em%Q}vX@2+A;i-ysNeVYqtm@RvSgp99f zJ;U>2o_D(?Zz|!T^Mo3Cxq~W=y_jp+fxi_horCpE>ZW>g7YAV&eBa|&*Y4U{Vb!gU zH!_mMSV@JRQ6kL3&s}io$>J_Jh7S3UixhYy_CQ~jMS$>HGGr1^Fp-}=LVRkvm*X)J ze{4qx&LU)16edpwYBanocRA5U?d=A^t$}zP*6i_b#(hD}gc5T2_8j0vv>hV`z6`;Z62d~Ive2JWh4Cw^G{tgfX4Y;B~{=WM%=*`>peqzU6)Z~MfW z-vEo44RTzBW-25U$^KG?y;(93yrEtJ#!XgLg^qN2OEyo8{{o>kDOChbfgeZ+m%6jr zxwJOV{}1_(JVXk-$SCTamH6kPH&w$I zi1Jc{db>xTe9w%kAw}4+3$x2sv{R^&fJ2>q?6pZ!`k$~s*CN*S1$+eFL`jhL&0G3y zgS|8cqVU_X@^qBz z!KDKfs>>ru4?DlenaJYLeha|#X{TYMD4~43mO(EL=uLbTxZK4B`dm|>kp$>rL@nOE zeoQUqNeC^M#whf+5pHH$c=t;D{U-&5>qEYo>RL!~!Ygo6*~96!5i9NkBT8Z__V4&G zEL`lT?kUaTaoSryar;R6*&ho9nSRdncD3beH_q5y-cjn6xSGaMZvijfSg$?o05U+$ zzX;b)Nj8wLHRX2y2`?2@FQ-vE{S)Eg$sAA+Me3YX`+?Q{MBzr>`Rgx5)6 zxesYgC$BHqkzLx64^C@eSX#{tmZeya+fABs^5`naJbhkH^9`PP6=B$5Zxopcv%3jG z0OyI^>Z)j}YGR~MPnWxso zltTH6i-$lgF)Mm+E5=*lVAq86)Nz5AkB$3%pJHmbER!+8D(Dsk(YPg2L-s%rHZ$*%*uhV5D zvb=s$jE#SR(a+=B2{|eBpa~y2TLcGaNh?2cae`w4W?@jF-3rtA;hMDgLI{aMAMHD{ zf_sQoMMTn-Uz!O1G;q1CgSeRRY~ANo?Z?+Az&4VsewidD(3V}@0L2ylrVlLLDb5b( zN>A;YcTmtz74{Bi4HTfq`ShKBJz;KdD)phW_pkyfBauBXbL~i#Kd3<_Er1vfx-n&! zAOQlL{dOw51(HdAlcU4X2FK$V?hRssRTO9fBA+FwkKS0YoExKL^dO4A zv~Q|JSFGP1T;DA%UzAHKMpqsy-q7ia-jJz3Op@ioG3(RNg zb^1L(%phE@<(;OX^hpo>Zy}V`3syezX*(cLTVI&OIPA`QeSr)5v)9n7+8W{KD@+?0SB7k$RPba)%05XCR#xJnmwNm$^4 zs|;QeT$+E-BdI1+26}g-@XQdZRFE2k<*_F32g(fZ9IQoRDe ziIn4L9w=NHkva$HyZwP;(t+G%{wz5c6sfK8g~Uu*ol-#w;H95^>=0Q5qLjHNH-R$( zzJDbYl>PPz(^^2>H=xwo9lkawO>O~~A z6zP4qtGOYmi0SE$)l~O5`DUYLgSY?!F|~T!>Z#>_X*mf#X|@Z1_PEpKaF?XIP^Nh+ z=Mv0!P?C7Ezj=MJS=5vv8XM>^AHzL}q9jViu1T6hR89%fAZ!bM-lTNG zkA(rfVimFCAC+ZnaL-u2JP_Xil-CAHQnN;Mj{FYH2*63v%4__2GTDYw#W?|94_>mp zXb8FtM#(``p*#E@0A!42edImcNIPyj{T}(Fm+PZ&u_;Rm|0E;^I78?gPKt)K+>2ym zh%%XoKqm4Ed2waaYy&G;OPVNIk%0y9001c2yj{YSFiX<|d)wwfF`SZjdq2QX z4tkbjV>2%Eg~D$vh@Tisr_?bM1$E!Xq!;scV*u2qlzJR6ZGNQFAa7g zhw}2O9ROb$o|$;%`K7A`_*OF@ySSSW z1QbRxU|OZr?F6#32_bsBE3A8Im6GY05TGBSvpE7iu>%(*t%$3iN>l3=-7WgFu-v1e_nnTD! z9*4#$+fkN;%;}Z3Br{(>nvjO$(y1y~N#*n-J={O;4BbC?hsoq&K3D zM=AFSu&_t{K#~*0?gKwUq7zo7ey@dTH)ALZXBr4$&_;!&3%bT~1?PUug2hx$9g_P% z$gy&%Zf%;}yMO)@`_enjmTk|8IZyjrLglQz4`34gRZI{7JChl(LjY4?9D{)CBMrOZ zRPAf!!`T)l-#IL~{ksxruWAV9To}4=8n)pJH_%rPtYAh{L>s_Brcz2MpQZc85ikdl zY0e6am9W<1Uq#Z83>ClW=i0P!i-PTDh&@njwvym7!FBmtzR6IFH7_*rN3STbeOGV~gz(h(=-tr4YtUGH z5N+3g$~y1_N74Od@M%YV6ujU2EkbbI$xLU8n%op)OQ1DHR#j_c%Vhegv!tUP@>2+a z9-|P7X~D+IMEwbsje5+UL9?^=Tqf4^+Y_!9hAh%C*Fc$2j9GwQV!nU;#LiPRAh4hM z#3F`4LMmk#<#*izS~*SB~KDj*sy!UD{tyXIn#>goDheExhk<6HrD zgx8vP;(4%L06`j=aP*tJ^iqD9fS6+1@6Uq7;NOSvJ#X0q_6jeTtDo-4EvxpDEUj(S zLcHuN-?2S)k4`kZuJt!q3!8U0HX_=b)ed`hqHRVDwvn4WDKZ{A- zY73Yx;26Jx3lFe~36tON77kcu;npx{UjCQhuhZ9h7tb-&ZV{gN>!3W-5T%bbz{m|# zp3=vwM<2o{^1E*@J8eom#a*#NDkfi}#+dwrzP9bDI&^~hn?+vam_5jCBP5Svc$RtS zYma|9rtg>bFNV`;;3;P|Q39SS2rp9$d@fsRevIt-Hg@W|ErQvXxSaM9&N692pTyAa z2{q^Uw<;`u=!eS&3W_FX&=77%7JW_)!CzGdfxasrECXKx_e^m5c`$*MMrnz1vhag| zh!Tkb89bRG@ZTqowp&n^ZMASV4(RMRW&h>L0A)TdLCF)0YTdqm5NT#S9>34VRz-if z===ei#O0O?0)-fG%b|Y3mNo#xAZ&_p+}>vkek&7?P!ISj5&703|HXt%E2jO8ogeK3 zZjsTdG~3N#c3bD9W`~sxE^Y~=uZiY!m&j(>J&dokLM+(jn6y~qFsEHUdEy46hEW`u z9|>F!-Wpbz*o1f136EltwnAdjowT&0Wx13P^d)Z6>b51EVPZ<~coOE!*>%a{+asNY zzt~JV8DpB8*B68B_<*3ka|XX z`JekQtz+yOT|5NDiHG)LnmTwLI{~DPo8j_D-dTpV+~+QXmeRcNeTxP?H7s#-r(U_5 zZC0_!A1K^p;u6z-?hr#9Z~CTyk`e931j<{{9J zDv`RR-1gTcJ_!iYm?k+4BTp#67vMCVsF9v=ROU#8I~H{|vTUBQzE>i{MG`-=O=SSs zr2>}9u=LPS%aWx-sWD7F^4D0hg9ZjDT-h)^$u7VZE> zVSvU531^OsD|E<4+UC}PdvZl15?_wmvxQ4;gqCX*Fy{N~S^LgD<_35Bf~kNXHUe_vlz7r(7DHS>5 zU=oTG+W7LI?!XGjmzA2lIJD( z{v(B&MHfQU*Cmy6xdBkGjTf$*v=UAusRJn4WB0A-!|)C5|50#A)`Yqa5vBxTuspHA zO&hb{q#Cj`&4FAF>((Egw5|D`NGWvIb>ugbxnHr)+cKLVt}K2DUJeq%-nS*ADS2Ld zS&D(*Hg{`J@sUb(+G5VLk1n91mo zNc>2mkpO1=bSrvxzaL!WDYI?vR>mlKF_k%rmwvIF9x-?OT@iGQd8xP4%uy4UOcm zki7Y)uFrGX9SJlmVJ-t_@4;z@JYy7y%X%&~8i~_ zf)mS0oz(sR?_O)FQPHAr{BN0Qi@{YFp_6n5kk+(tD9c z@Vja|ROo2$3L730h3Q9N5s9<^X4BTxU`}v9L)}?S!JEK8PI4`7mpl@t7-~rCVG42V zfHV|EV`msvsR6GnH`8!eu&3diV6PWHQ|&u$*b3sfS12<<^{E{N$w*9I)j?=2+|H^% z>rrJC^TuA`HJrFr1EvFkR16ZJrhJ)>$>_-CrgOr>x+^l6nmpx2au*Qbwj7)cDi5$` z+9Svpy-9>nCBX-6hYKQA!N`oc;3bfP4BSZ(R}YGsqImek2W7Ng4V>nm;K+_|i-%dh zxD)&kyWA-js)UgUH6E-CuN`qfpnU*D>>XnX5HE}-*Xqa*kN5k_h=rR4h66x>;^HV% zATTOjg?PuDS{3eue+Iy*rl?dLa>7E3n$?=G3jmR!c&|V+yR5BNfDhO>qY#`qw#QE0CU(zms`*j?XJ8)?iwYvn%TdMdxN`cGbb zi=V(Uk{>Xv*8)?BE(;?K)b#N`TrN;l`ZV?uroqFl?Bwjt1Q>@q8&pz`o9f$MNEdAF z7#Jvh6w}w8vWz|W|B=-%j`5*q=tiR5-`LRqZ9OyyAnKMFc?id1&~^Qb97YAc4%6aJ zvo`^F4Z>lx{m<|AU=^=jd2*K2|Sx6tfoY0X<>eHH(t zXyf~|vMje2oAI2-Vc+TvCmAQXva^m`hCg1=J4tqke~U{pC;dYic9D~>OMnp=^dypA0z&w zT_kw5lNNKb&$N5yuVAam4v1hN0JdHir7m9lb#mKrui)Z34a#<3XcMz{ru29@)ndm17_i6&& z*`EB8i8=DbLOTejr3(2=Fn5`x`*>)x%cW6XcVPsG{WWphX!=gjWEUKR7TN=BayMrO zIL}${H!wKrhq+*;hjN_qi=5%EKvLSN2jN2kAl) zD|v8!s$hl&GIPkMOZGtA%Vkgm8f0(t^<1hcj{TjG7D93&q%*dTtdPJ*jdyUAC+y7> zj=a4+-MdD%Na#lgoom)~2EJwa+VS&>B03m#@>mYi`bGlU{uP_l*H?2k{tcr{3LbX) z_EY-|^!-KK!&JVMFgs@sJX#})KDNHA>ZyDg7%q)Qu-R(;8m9}%6kdNImuf#3%To_R&q8>Rsv4W8yv7(L`L=Q#}OZOud1h1MV6>dxrL`Za$<)A z3iO8@c}>zuMVhq3@e$3w7f%I$ZqE_GJ|%;CWp$G)5B*z(9g&r}jO0SE`k|C!YUAj} zhTjk)ONtFq`Qs)t{Nh!_-b)rqx*QL}kFqsG{}``TDR@XY_wDxCr^`IaG>^l~q<|29 zahEDivNkPIX=#3-WfO^6i}?uh|JqVeen7JU|9-zldVGdK9`cm&p)7Mf%+%d;g$@lq zcQl5z?aDHXf}BdbY(q#GLFZmMNt;VKhHM&GxIW`*r#{wm-5c^g2)>VqetP1wm7ZAkKnEnb&(>LipT0wTiO zq4HZ&^VGTi>(2;u$0>4r7M-y02Gm$-r_)7ALL!)|bwo`2K)EGEQ-llw_(UKe-jN;M zWK8~3*Kzw#%C)*J=oA9-ctPRO9O>rjBJ?VTC8jN>x7U^6Ur9~GaN&0qHPIaEpLX~+ z5cj^}nsosHL#X>4Z=NZ~vWO>F@82sX>dDRrH{tJsU<z}E%S@C1|#QS#^w}DS_vRP*A7@9qvxA9Swtt8ypJ!r^W!R zc5k-7{dZ9I={gmU8zCld2Yzw6s?{5M&q&v}0}cPOMbjCW*RpoH91`)=gt4@$bg(!I zVgJTA8P7k7yQVI^(3C&Mka&^%B7s>Ck=5UN`VF67h1g=7OnUwgGN}u@%jLW5F!La!wo-$J9_q9PX>Eg&Uw z?Zn|;?DW#80$3pI!@kW3z6d>%zb$6=Sn=6*rU||c_5%9a{Su7&(f70D}saQjKCF3lis4@i-cf$w9rRFOpSTw47TP#=?!9C#Amjv@uj@6IH+V=?A;7g z>}s(u7=%s=@p9i(re1u>5}*)Qhk&7>_L9LUgtX6vPq`qJXCF zudy8Br<$Hutt0&kkKP;l(=c)kskFaAdPsOwFLEI4e41gS3E^x*YGZwY5jWJqOFPyM zC!$|aEEJkMwYddc9z}&7C1(`$So|{XP;7Z~gO=c4trfL+dTFFS!12k9Z#>WS+LL5O zD2bX98~TssF>XP*!Y$QGCh+1E{ePFw6>y>gKUMqRW5n;gp$C7?d%+r3I48~hElrZW zGZDqbOJ7Pb7s1|fU^LGAwo~O}(0`6lIUawEE8iodp-hePy|6;`V+3MfN5CV;yw9Qc zYw;^lQ9eukm5AZDB8B8}?(kPw@Zj1!`)sGxlMb~g(NvL^q0ccSU8Ma7YkfCTS)c-@ zmtRV?#GA>bmuE#+ooP~*t7jX85hZyWP*fnm;~>DOA|2bVa60z`IDXDbtDLI~Oc=z1 zaJnAlIN=TT%k?)*4NJ-)+J4Ae=z*#TsoOTb1D*0#&}Vl%7{!1C*;$oHA=<+0G{+{g zQG?M4GI`RnbO|Y%v#XIGn;Ds8bXcNSe-tOlfY=je*vs))JV-$;8gG;#o)g6 zdfP(+P>ycNoE7LMn`nj&8+D#!wpQjA?wW+}sN=hbR(C${f&lGu#~tmWH*eAutXw_q z^cNzZjPb{^lxUoz_dS8^*P@<+w9uk#=n{No?@uXv*Rp*2m8K;OUOeCp#~^?`TdxIu z(9FUxtv3h=ymEOU+OycaJDV8nQ>D6jq}QkVTzY$An_^ip5-Uj6Z0B<?)D3In?UClbV+Jx(^Mn97BLRRD(2a4 zW?HKIUCGV?d)^hcxUK5NbkVH4>8YqXc$JA+8P=Dy$}?$*Tu9(TPJWgIy=Bc%_-a~f zcsr7KRIdo(Xb-nC^S`OHQcux^+0E|6bENIWu}`dcy{PuTsexkGfp{*rZGS;q%<o&-v}n}*z2hx{FwmgD=T=__;f2iYGt`D?7*7`R|w^Q@;OJWQqMG;Q~b{a3KNls3>&p=Ds z2j5_SZ=$E2FTS#kXQ$CnehCN~Rz*g_KE=kFG<({(k}EmgjoB^FtSl20O+}+C5tz4p z+xaXL&gB!w!Mv<0lq~_@^P7!qEnaOu3u1Z_n^O#6>{Gbm`0*1E9{&gEKl}(Gn-;Fx zCf&QA(hH5HkbWAG6?dcez~b}u8z)0-a!&K`ExGf2x7S~N!TW8_fhEtG4(K>@H|Z4^ zZ2SD4zr*zwS;w-3G7F11J+GUw7_bQfV@Jh?+*Gr$WldU7fv#&Fq|C!rDE zHP(ieAe32va;13*&+Mk%qcY*&=Y68}H2W3Q#d2e{&;yJ8n&~b$?lk=$EkzPBs=96Upp33Y{79c8;BIr20M%U`B~?awJi^ynuV?uxS4K-@qcg#u*E{k zo^R2+MY-+1$~wH=6p*EUo(G|dU|aZwh+rp1HI_Q9iW9l3RA0(=)ufa^0=Tpd%TT3D z4O(g6%_{cQn$T#-S)+!hP2U3L@Gn=uSx*R}ljxziWR*|gQs%e0CQW;{CWK$h2R0~Q zkE}7O(>u%(FF1QL3xmhEZO^8gZJVgU)VMuO3fK7j*1Vsj6|lRS>Y zX?E3lJFSulBbDHf48RBf)^I~VqP*SDxdFHUN%6ZyV|@j6@x-ZeF}FlDOCW|ju?LiH z)5{Lv;aFpQm3c_d3=9M6A-=@ucFgyHxo9tL1C;xlK!t(kv_$0#i=Omz5qx!brq}9Y zU7M61h$^J>U4>+vxd3T`Lzh0t#k1GivKUw`A{{4H()~0pr=g6=JnZF8USPk)9c|n| z|JWUG+h{5^u{ZJ)KHMq@xWc9ApY{L?CsB$k%9Sm7{xCD84)gNg8QrhGyYBISMDwfL z=nwlL)l#ubVz5$7Woe0E1)y7fi`uS}rtC!mOg|x>~oxb*Ea63HRCdX|7fZQX9sA>dZg!FEm=#BnvW366osT}gTw1sE z7|1T%avF>D)M1K>?#cTrpq#oli--c@++#V_n^3pfAAiO3u9t*2yzFzdSS0L6$>_I? z^1$`0IX|4vct1$bF6NH8>E}avpLyd>lf60D!!|SvPKyh8RA}alGXfaif!9@Lt?9ki)41 zPW^3gkmo`?Ze&Sf-|u0EeT+59#Csyg@H8m_N;7JKx+dS};+@y3ph(zMai8P9 z2YdiDgg#SwxL1P*Mt}Os3_57BaxLeSnkyxb9AddB1xK+NJLU|IVnmoaZeQo>JZf3d zrN`NL|AMVpJhYbs0@`jk#Wpp#)KWsQhx|l=VI_g$g3U_W>HMn_t+eC+1$ZTv_9n1zvK8J9M&lR~$*Z-F-BSwDG#c4SW z3_^lYMb2doG{D2Cu$51|chjW?NWi+l+Db-0>F$M|W%}tZ?tp7++y;p@+%n6;nmI#J zgxTn0RoR6!;u3@M9Hw(!gEOo^-_J=WC}FP&s{9&05X@^76)_2lQPMD~SMxI8EIvgl zZZ5Acf1DQK_DTL16#1nDr55k3p*8z{>2KzZsoC1XN5bG&i@?HyZQ2L9O?NI)O959- zo2%eZOk`|qLjjY#tp689QgPzkrM$KbgFuKQrZ$ifJPm$rT0`G1l7!QvhpffU{+!i- zrA>;$;(14a{!I3pi#O^qjjx1$_%o@VaH=Uii&Y_9mVs9`>UX)-QFHJ{01LEX_jd;) zOa%k^(BXW9iS4a7s1T9BZP8A;=H%SUeui>X^xE>Pc^G-Lv!D3malCoWEZM}yA7pmo z=oE}XZvL~N71XGs8i{m;P<}$=e0&Y2F?Y1BaZ~U7)Qpj~?C0S{dDfij*TSahQe9)P z5+n6&#uw4eQCPQBXjKxQ{4k5f;`fJcQFnbC?AasOL`s7KjtpLD6<%pwn#f>j+RNOY zsdoJLrW~FuHt+~qR2`HR-5sEF-s-jbWv{7j7a`@Mg8pl8*z&k8QS&d%276ISRx=FK zPz!?Eo?R;?t~ZaQL9!mdGScR zZ3jxVh10B{>^`jj?wY_JN{E} z7Yw7X>gn&j+4|O;eVbAM05Ns?8$e{0v#yF+u6#cp(;sn0%@aq|LsKAk1Dwf3y;U2P7rp#_{PtR^l%_`Qs)s1;x&;Y>D7Dj?izY1tB>kY44A=r#Z3p zaO#o^n~&2jAex_)x|m`$rN670U(q(KlIe?X?A<>>;I?RUGr>?p99*_$#^axB&?d{( z?cXPOxF0D21qKuGeqF@4(i!F9;JoR2kmq%}g>kj=ouK|=18F-k$W>)KKod?dHYeGQ zU#$8wiYkQTrai=sxP~NtBj+&A6j)JTlsszBQ;|*&L2S%xomW=wUvOyZCCWOu0F+=Y zBfKq>VlYtW)FMhkRSyak0<9_lf0fM5*{&uBy~+s&$#tH( z=rp0r81={%T8};-9$A}tCJ6mHp^CcXHqPn(uTa1RG*Ihk^rbO1MNd<=rnZQy-8@oR zZ|EGiMBTVRfPNYTkY{|t zYK7&@bJ9)yElT>f01ObIA=JxOii-~;^+CZ6?7GBviWPxMqLx+HITOqRd2ck?bO1~o z*bB=GuMamF{uPz_7a(5ce#IifOqC4`i@pymMhC$`q_AE)Ezt9%h{}{bKR{huY;wuf zue;>4svhA9G}oSG@^ud~OrU&;2M)Ims2dI2%lI!*-=8wHDZEDGX$}9(f=}iv%fP~4 zJ8xmdi-|{9Z331mEAIVP8gwU63%90esK_E2Hs&*;V;a1ueX(x5zSRb4`PtDc4wNXq zWNAGA^kC2pSyE8OZ%lkkPmW0m>4$Hzn0jDpgusGGwAX4|U75 z;s^;-dBPcS%t3{l=oW&niG9LxV0F*B4Wq2~9K3-eU>%320e zKx$ZNb#M2i_E#-CL-IH|{_tSuHSF8xqh>z%(`iw+p8_V5!j6UYK*u|S+Mg_Z$yAr$7j-Q zDw~~+u}!QTM!D^-_r-~4QYWUj216{AcYv4-?nOAAdgGB%@I*d`N+d^>w3*ALHlKy`cOuZ3Jp z8=@Q(De$6YTs&9rjwiVlE3LMv0F{BO#`U>r=)flfhlIE^$=JiV!e#(j+%z zSzwr>UPA+RxlLcm`U#}zupIH<5=3+yn-k>7`5Yd1q#i6n>vE5a%Oapsje#{}`*pJV ztfV&JW1oMJgE+DCIF%ztdGguI^8AME<0%&yD#jtV|#&1 zS}V5Ex>ZOjAp@$h8h^ULL)Q=&=tm|r4-?@N5)c(o>-gH_@<`KV*BJ^rs<b&T3bOB7r+Otx|>YvA5HwvZq$U9wP)IL>xzs zd{2ngIUgy*Nocb>xPfYF!${+XcUT)yfuj(tB-cxcyg1})O&n(8ec+Za*PT4eV22tq z$9BrYGwtbR7m9_UQ+j?STD3cF6qdUU+w(@Z8tc152dH6RlXE}N@wvh4TfOp+A&VBU z0e$SDU}vUA07fTWSyf~2La^FhUoY4L-V9J0mPgE7&R-K?`(!{cB7YUhE9yY^R$i&zb}ube7}0+(n-wP);NRyp*m-EoYZxb zz6$_X+iuD)6`;f%&HI~g^&AEmBXrbb|d!0ZhKS|&$zIwl|0s#Va;EroAWz(^O@Md-A0NEaxESz=+)GMDK4PaZir#m5bvP{F5Ma>E zQSz~vtt}rN!)yfJ$LWXSHtA_EYFS{?#a32`nRJK#NTI!mrn;iZ*vWoE$d3T&UrnX; zhdB6ccjvEL=l@J*?$IWbCv<7^WtZmgp1Te_8GaFx5sOv`+w%a=A^eaAJ3R~P)~v}syA;6+}$4(uBONvd#wy}XEx ze*e%=BOQAM@kO(E*pW}UR=T*w&*ENuS`01ei}w-pxKgV%ec&ZCuuqAz9{hqsM}?bz z%!>QAAIL!7;3bC-V(ZzmSH411=-gv&(}iJSgb@;&*x|H&8+Y!m?P{A5DP-9NE!%6Cz{q#D@2Kq zsaUT9%c2_(Qkq}HR16eIpYHai-g4Ij<(?idbBR%xi-uYxR$!&4e37Ej8jN8YzF*_< zq(JZMw^>q8GS{TGu6n6KulOc4{d9b@WP#LdB26*==Q0Hz1UjkluHxS=oze>Q;$45d zW)tm}mPTSsi^a{ZVAsql3VE_HKssAtoc=C<6r}x>8xN+nDT>KQeYAa0H<7-H34Nzo zeW3LBA+k5}3?|xMMS<~oN80&ta;#aZ?2CavJUE&2Nu45nz1V4WP8u1j<0B*$jj&om z$~@4n0&V|msL|8Uc?*Qh^bS^1A5tk;=|=2!qzHBCx&Q&d!1=m1|G%clA1wm%oEyBr zZgi(`0H=M}bx+T8S_S&uUW-&1=9wp&Gae zFBd4CY$B4pA5Mq%E!a z!o?NHqR)ex9rD!Pta0@zum+=GA`+FUWz$P-@vkpcNU#o>bL)?J6`jmaXkHN5)VSid z6TemD4m?ZMdDpAKAU}W}k&~(qW4*W>y6J2YKoTDLrcKwY7}oyxISYkHCRa@FAXLx1fspl>Q6RG6&Yl z{}|_n-Mm^|NXaL7aFkq~7qpTcmD`7oz8<|W`ki+@(*4Osf+ws|W7Wc13wL3DTfR3n zzZ=>W-d`Z3wN`A&hN%g-Pc(J)ByKyO&2dgyOHjr9aU?ezVEuTaAa07OUDvqSo2Q+w zPgUF(q>sED)Q8`IQoBt;#mK(CE+-Thk;Ls`Y%bL-QhC5Ye+#{@oxy+5X*GGWx)xiv z)s=CH01GaEH(kM-qyKk>vvPiz0MFZ@AFcH}--rm-G(^eFg$=!IDag2nVO+6b@^#Y@ z^0E*N96Nw74UahtVF4}TKg&Sv7{Tu zm_*mDS^-)4Af`V8$IJi42f3Dr>(1^T{3SwA+Zm@yJa_g6EXzZ8@y5xg4VPqEqm#8m zE3M6mbtgxSd}>q^gYE}mTOT~iK;QY#XF+dTQYggy(TOfs(_8gC$YT9pVdOuq0D4I@ z9{gKu+0NOSt2=a>6o93b3clN$SSEoG?WZfF+e3=RueZD6k7l%e)R|X{FL{){89(b6 z7eH}mrkTtmGEINE9$^KR!((m7=8S_wJx*-c&i1i}KXNid@#9K{;&%lp5UpA@`vGmR z)^(`qaw%`3Y-OQ$u-8PK328J|QrvJ0rv89G-&dr%&I+=ACENW$1ELUgjLx=ah|(P# zu8|_p7JkuWDzyzRE6awMIl%Lg%t)DR?e5my5hwk#VXKzz&VHg;KvOv9&cJcMZQF+} zfAw0>i6F`v?>^&jtW&{vd%V=Wz^ZBzuG0wzeMJW|`9<1A4s~n%_OGwTtzw1nv2Od8 zn-jBO$to}!8K;3lazxZ$;{`2FHhV{wX@?DU#EPv6uU-JLsy5zTn)wj47afR;fdBv; zA6Hy`+{?GOU_Tm4P%p@a)|?E3^5ORAM9C`Y0q7~DA!hH4J5(|oE|oVKD=Tn9erH!@ z!==rnyzDCz`Iu*5=4Lt>MHW{uE2cAULpc_rX!T4ydAIVBHsZQ*+$0|>gyeZdwhIws z>ROie1YfMS-T40E_4s46{zP&YL5JR(!w;F)n$g3vaU0PdXB&Lg3cM{aU6eVRfWAZA zUjzeC{Zpt;;9adaohPz}Q?VA1ogpu!#-@w$vW|LidclzQx#)v<1<|S}7R6HPBB#)r z^)h3fWvhvDo0)Pwy9ZMM>Kn28fR9r{s`C;nFS*1rc_`MW=Ow>D-7VL0kLHa@q(r6? zj#TLzc1ls;RJN13vv^s}g(o7b^%ovoSPHAr+h$V@4R8yveiK;VpuQT#ULDpb+US{~ zDN=UQVglf`<4DnV2Y7<-w)VT_ykA?xT6vqRPMU!4NldK_ z=hs^Snv9-On4yOJ!;Vvd=Qx;zOxv_Y_P)B(BSS&05r+jC9@_ja^j|sVBWgy0uD)JB z_365WV!rE^32<;)$g54nYdeNfYt3qV>&N;jt_hCP8y}Bd>;;gHM>7=$5<}`sWXU(8 zA@Z^ISl617RvtEyxvoA%Kiip zzqbW+!~ppRv^JP~Z-Z;GU1#|zx2O0zpHPC%!yGd`##^N#fm&>W`|pKDmu>3=Q41!t zZ(wItEo^YGmgN0m;xAlERAr;4tymKIjCuS)C)2JLMvFOEk3h393@3g~2jc^MY2bd{ zvb{L%3^F;J2X#!)yPA@&h)wOuwZYX5>|ZX#G=~2!^@=%iikMS-hs{>CX`<6kK1(ta zP%}oy@POb1aQpnYAGxC6*UrjWY|0F29x%rE%W#HDGWPh9<7anJePTAl0pUMDgZ z(683e#%<~Qb$SMgYn zM-V^0=+-P-SU%its+~4jCGiNm=&UE&@Sd?!^XNTEZlH(bGH;Z&Ocite+-7VuG-0b9 zR}{-ZZpN=xBB*eL-?a&G@R}sXd*9Ev9o)8;%dJcQ1O#-*S?Xh`cbqL<%xhDil7opL z4MW;ui6ArrUq0KEHbf8DLU>_3(&q^{=JDXReop$xp+Q%}NrSdD@UAB;-AnbimcwM$ zRiM+(?!&`QBc5cUg`l7Pg?oQ+%bo}E@ne({S8WgX+Wp)>3g`Oh=JXY-U;1hi z%(7;&ZRr)}bWh&(3{qBO8u0jp=F0F>ZmGI6B(9bzK}DDyJi;ag0A8kYEpV>jGObRl zL|L|YnehyqE#i9g&1~Y^YsbDocgMH2Qh$_?W2|Zw-?lR0L7WaJvhOR5e zMqDLY&M=Xp{|godP^Ie_^Sv+ZbV-1E+oNVZzJs5ORXXBY1O+itgOH4UBl?mMAb7Elkv|%V=HRX$@=Ych-uSwJJXbo`HN*W8(((|9Ju~_(IpH ziq6w91LO575m1vn4t#qRzW@7Ua8)1HMjurCQ93`k#)!J_ohF9@bF@`DxxYsMZFw_x zWfqP^qC!7|K|?y^z)A@7dJ+H8T&mlwXvqrbH{$NLlS0bM6mW1Dm6bCUV4!zx1tt8W zt4n=c&B0W|b1C->Vtk3LC+nu+uMYBQ%<_UTMGQGPJ|_QZ^RK7N-vA56Ud1}cxGq#l z^Ah?1SU{)0388eM@Hx%8OO0q$zQNs9*3v$!^mJL_V8LutA6R7SKKO50c(xCkX@29# zOA^!^(%Y;A*Xi*IPNX>zLPh^&;h>9zp*Ry|TWw)yL3Uo_HOt}SVMBnlyNv>Eqw%Lc z_c!OBr=%%S2SDU=HauZwdS^^Z(GeL48}F#O7?o@;Or4AAdvBtFHG1Xo>GHa50CvFE z4N(XCi}|mbX9k9*t5rw|x;!)Kt~Pa(rkP>CO@5^^^57*SV*F+cgwxgRZ239iT4+fk z1d7%1k+J}6m2nE~=8M2jU@D2!JG^)cJ_#=Vx#7W4=#`_dHbLN-G0lyPhSB*GlFA-z4LWD{ZJ(?JI5mogzlxNzuh{W&fWvI zYbQ`$1r09tlLmaJo$L30ECjJIMym6mQuLAZ1!XP>n-7lgy+$CeG^u)~pz2K+&VV|_ zG-M&%amF;{Tx)m!bOxeR+~o`2C!80RtrK5}&-L~Lju)=E8Mw&_`fR~KlxYAfVA2{$ z$!?`mw*q%2TN!$_#AU6SntU*xc$G<3zDqy&v>a6Me7^I4znjNPT!?|L^@m8qn~hy0 zw&p2t6>=<3%_0tr>oTW@_n3*fWZnC8_NJwlR%*Jv)1|~hYCqp;FCUg z_a^roitI-))4Uk;13l{la*_?0npmo3=};(vux0qgcJv$G&pt-TU|)O5EAx8RDPZq8ZuOt|+Luxt z@TO_|)};x0{GJ33u8<`a1JQ=X(AKDRbH`YgP{gENMEj|34YCEXgqZ(-TsOITPDBT3 z(qqWf7l8NBP5;U}vnk8)T6*9)gi1|{b$x%l6nPt?%#BUaLdbmKIvdJRp(uY3dVHj5 zn%%N{?l55dfknvFhRo@r5}yedeouHKXtrfQwo1~{Y3t{Bzy12bzHoU)P9x}{xBww$ zZuFs<15|J@I_Q|kI8`*~-G zq5cRvCyhu=ov3=B@#CK?&clFDWp9&wXyD((2g8e+Ct-_}aLiFg(eCuP_--X>sVcn1 z`en%AKoA2_k70b3{b?U$In#_UracGbsZvm~nt*mLRev*|%-MYx9 zTbI)_wRpJ#rD#)Lv^R)&q!8MJW=H^4^q{^%mDE@(NGHYO6z5dJ&MO)(7A=$Vo`jM}5@z2aNzKQ4tZR)p*`Bj?7Xq48lftYi)Dz zPzz|JlCS^PO&7UlHZ*MIK_Y6JEnr-#q|;iBTW+sh@70fhaduu0950I3ie4pr&h@Y@ z9B?H6(F;!eOXdf$!FK(a+`q({7UJ?Q5MreM98x`rz9&8&k=wyKO<{-BDQmSl&SkO4(*V})f}E$&?Kc(X-ZZ-uIY2FfGEi$6&%^E4lk|jaBzA|NX(n5 z=)Ri>yM*MoBm*CLJPp}_e`2t!`>qB&O!zG_Hf!inK{_ia;ZUFjel_Se3^bzXK?xrP zi)n9Nv^#e;R4MrumPSsLIle7oP5>}j%GVm&W{x>9tfCj}Yr@rK{ORJOHva08&oj^z zBYaL)sMgVo-lBSqMAPF}hkOguQE|z0Bt}wk_@6tO>BAY7DHD@m=>2!0>YK6=`YeGh%ew`8U>|4VBA(3G~ouESoK#bCYoP}Fk>62YAwxFP6uE{hu!h8P>93-?n`sND!}7@ zpW)HZzHuH2L0`sHR36a|mO#G#Hh}c@>Hs2ZK0WciRiH#wTGly3WYhX6LlIq1-!*&q zd=)m5l5An|BUVwEe@z>-t6P*APIMtQd}vzE+LkPGU!-2F!pI$(TrI1+F+5$W2qe@ysw6mZ9 znj6(IZ8xuv7gZL41IG+M*^)IiS+@G)4^yR!KNHHc4a=5#Me8^EUC*%r&fwi~A(X5c zBm(|7I%M+3etg!%faU3|S-+b6%K+g!6s9tzWPB(mD)&oxg@&YgL_4&WO0(oed9tVB zKYj7{;rhO2EOuw5+K=&i)W(Ju@e{2Jy4Q@md-Op=*k_W4f$^Bg2^zN)AQ%R`zlbCy z^kJ?h8IkoQzl%E2SW)^7GQh^O^_@gWsA%{rP~tAw(18z4h%c7bjx1a}mqRbnv|=?9 zHox$(|FJ&B6V1YFzk47`r>I`FYRV+U({;sqrH*}EInqP@<1T)j=J;Ca@AC6vwC3qu z90Y~?0y%z0DRf(?uh+=eTYS0AD-u)<3n%sZHK(#1I83#G*so}Y2|x!*`FYlasEU!A zchs!qQ!E-ofYc`5i~oo|>fd{h93@jDDP?Z|(nm=ruArdvQ!A)%gJjxmRcV2}KMUko z+13NFUe#aO%S=VJ1GYf$!v3b*%F=XY6{HG*K?Y(D1I0M0&QwUQj4!_nSKo?;-z&H+ z_cwr5Pj(GZT4`QQbFD8)ufG`_3IwpCcnUE`eT^;pgnhwIK{2c|t0&H2A*0O-`?;%y z3jsSc$Pn%6`Y~|xrLE~Kf$!GjSZ7xx&$Y09;)KUEF#!7`3qcsVhxy(!1f-i(5Ob`t zDs#NiN@t{>oj(vellzhSmRVz4(>26((31pN=A=v0-d8(ek8rb%^cKCM9}9G51ne?D z_ITtMho>pZVD>q0D>^HetMO&O8MvJjprFRxt88esbFjo)OM;GLhSOV8Xer&=9RVb* zuOoo8>|L7EHUuS@mY3nBoN-{UJ|FhVh*3&ei@7Oc zNzwX2qg$io{JjbXzy%Q|0Rz8QOO=yS*GT@?{uYE?jM%dIb`Sp2nr;yf%j z1qf^DhzcE_^CJTE;3)+#J5U$WSc{j-Ww_1q(AM~#rcNU!B?$FadY|YBII1Q|&d4F+ z1g@AS?{~{Fk>Et*@Hjftb)*kjiq>%+na$@NAU}6fBsDVrkZ?`*vgr;0nx|B?BMC&& zVLQ#!2t$}9yW|W_l@qW~J4H3`>?R($uuBM_CPRJ|bYBwOYBqKA#~NqIb@I;pP0shrdz#R$NeKPfsm4l&w)SgBTl3!)B1=!% z$R4Z@OJZd<%<>B$b@~Wvy};e2t2~%Hb23-3XevtpJgOxQZ_H}C{6|Y^lZz(g2^U4W z+MP_#-wU-tFX2-qwW@dPavrn>mF048F#9-$v`dy++L2cWCX*$xued`4NBwU=K0pa! zQ7nj%gc!*E#ZIMD_oRnSpO*rNp^Mmsv)t#Y1lV6`jsQDnmlM17&c^N5ZwfuT8HCf~ z_uH*o5aVE-EU%$lvDSYRV|(h4W3K?xp-?>c`}4XjN11%o?UwY+H00f~J5bZJ^f~@J z+`k|Pw2)?`#CDQ@cXGZOT47V=)xxXM!>=#6J7O53IN!LVa@x2B?HcSl9@`HPSxp@b zXQ@uLNoORqne~qOoM!Gmw_!ONP~`U51&(Wl;0`I_nE7Qoik1M|a<1@9o*FtT(LwWM z!;TCzVB*3XXkI8VdmCMlli4g`!VOhpdonN#FlR7OM_5qQaRMMiJFs09a_uWC^trX~ zBPs1~ir<>=1LgKkAQ6M4Qz$Ehj_m7k2Yaej6-NHZQODLUgs)8ZA{4W}|LDR>e+ej7 zYJ3hCaZ>8j2(SpNXn@bY&#FM299?^$L4RORVc0z5i8dPR^J#3=40WvXHC1a&`vv&} zwLkiFs$ulD0Pf9!GyYUp@)f+kc*|`CbvxD{1wFLP;oRbpR;WvktZT*Fc9DoN=Og%i z&AhD2TVZz-(>T1489RxlXxM)3lg#{EL+(%U5cIXvmu>$lzITr)BZ~;&AzyR<%VkW- zKl0HcxQPA%ODe;)+eQht70$_G7nfuWu2PBK0p9H<5(A?55KuHG1*J*-fXm`9GY5*+ zRfXrcf~GMS$+`ejp^VdZ6bIWCS&3ypj^tn^#;a8yBmcrehF)^jW%qFckQfA~VnLa4 zeL>{`w7E1u-VZYleG|lige{`W8P;<%x@>Um8qt`wO+`~|70H$XsuuH2U(m5yhU=@( zNto6v%Qo&dAzgLE=t5%{S3i(ai4fs((| z&Vi(w+ucfh(_6WkWwccBVd(TvxxVt%}~U(RJYIuO?Y<+}5%E$M77D)L=q zC7dRnPE5LDyv`oce?Sz=|I^y1%d%n-e`D`6iGY{eQSIft_(imQ^n_+#?K`U_mLqkE z(+f@Qr9T=}Cl?L`=3uX(=y0XBM^_#vw+RgM6@qPTcOR^+e9L;JFjEG9Ji-SkYz!s~ zO(Mx`iXNd1NsJ+So68oa*9KB_=2eCi{?SXrhIsmDr;=3J9uOcZ3`4=1f&h*Nya(ZPXyVP*%WKK;j69*5-A%$n zixU4RZ2+Z?+Bc_7V?Jf7?P0jSLrKySw+1&lcdpWx0O}JNH!d?-mY=8}$?BhjG>zlA+ zP%+Fp0xRhaj#SMjGhSQ%JFI@1$6Z*=vrU{EkRzYeX`xIS2Fa9SH9z5M zNEpFc;q9P*Ft*aju+#iy=7*eziQ|RA^8F&waDG&{H>NKzTeHTILO}Sc)ax2LIZKNh zf0|xT2*(%z&GaNT5}aCY@v0(ubXP3>U7MbQjn_=9P)a_hY^9Bc9QO{P00b{kuaz5x zIhr&;ejB{d+!RR($&w{(m~l^ulcoGbi>BXz>$Wvgxy-kYoBg(zMa}W5m_8_!<$#1f zpNaqqrUX!A^YU-%d-Q@CJ=_zz_okPO2=|UbbB)ZkTzKsy7Wdw}l)t|hyGSb33}?nQ zTRic!M7XyiP<WWK_Nkra4`uJ;r?EB#t6fB@0a|E(*yr zO%~?Va7MzgKYdM%`fjB{N?k?t(1u43%Qgi`8WQr z7gX>~E@qahRE0NX)v>ri-)c6NvO`Y=q##`Q7b7n)Eof>{h|yj5g|7>R4i;DR{{)H; zQG7nh436r3{VbGpVyVcZ4ZPbK`4_|Kj~WEBwy$X1-2da&PU6YN77w+E^yq4Ie@UW5 zYe>s>J^2kbw0p}5?&2vXPM{xjqT
    ErihYV69aHH)uGLUn#Ajqy*ut+fAL$*P%Zjw&=bTz~@fh5~tjb z7;4D=*3D0^I95Vzlx|q|OR}JBp>ly|s zRz-8AC!m`;*QToKnp&8G<>S zicX(MjN@C#e)!^#6YKa*?s(3c zlm%9O6R!FteKCeQCHXqy?33#9ExE!cdC6B*xA1s_!29salCFllEpmK;)IyJ|Ucf|!kHFhL6M6#kY@wtwxfUABZnxg+Sm1DwaY8*m zTRb;pNDFoJ3WA-HR4KEr*@sYiWk;>CLYFHZM;Ua|9q&Na&E#+i(ukZC6R2PW5|X-f z`E1G!2;Yt5bIMBTV_sMR_3QkWTR&bJ?w;-5d`9T;Pti={RH|$9WiG+CV)BTk+V*ve z&@_;);^+K(dDT>ok3aG}T_QAX@pKlGhB&Em{AqLwV3qTES-VB=Hhx9#r2il$`P=oC zzN!fwoe*OxCu*)U}#Yj%J`E34kgdT_Y?7Ua{RPby1(~5``_+-wOBj zgWsu!5)>;U-w-3bHha?On$xSnUI9I;w$L=%Y>>>}l+W-qJP%?I+TGtyLvDBHAxa-) z=V`Vly3ijF&J!M470Cyjd;TRpU>qyr<_hWXIH9OMmwfYg=c+#jxGtwwgBtJJeH4Lx z7s9ekuSOIKfpUZ?rWK(^9)@5C#a4%<#l|`(P}BgSs&#+OYi;z>ykbtu&+2WqdiKh5 zDrUAkPi{lWgWYk|23<%STjOD*=B)7rt2#J*jfB{U%#RK|>^I$U!sJh~jnx%H5IzBX zz_NBs&0^`UKi?ivR8qd|IG@cA0>8Fefwmu~G}R(URoVV^RTI?+R?|Z=d{Xxq?eZvc z4%b15AgSiH9XmTwN)=dVJU&qPXuOy=B0^wDE5pHpPqn0iYk4+|OTr{2TB;PmD@*F9N80jshMh zQ@T*4QEz94xH3_cCj(25O0A#G4@iRxt(|m?1naPASkWRmlx^!E6AA6-gkm?~v#aFy zFH9MsvRvWIN&E*=QsA0gb=nl=WY9h_1CvXOuBU6BcC5cevCz<@*PV)5BWe7wPmglW z{|C~MVW^`-3zN@RxB3x+pBp%j>Qu?mVmRKPG9QB}$ zD%&0ZF*wtsb>>g+{{Hd*|Kkm?%0zZLm7H-xtR7n@9jDD?=K16ZJU?7ZI@S^!Ti-eu z%e)WiRAcZV*+h8<|WkFrkF;tCQO89vo zHbqwX*us99Rse|erpP$QU4&HBghmx&HCj!PXIU2_4Z@qzf<*?MU?HBMW zkrQ*atE#KM#SQxe!qf(=^;K1T2%Ou9Y1)+m#@95+UJ7-5{KJ%zl++)I+t2%xcsboW zr}I)0FmXKn5G&aPE0L}jg--kR>O?DA&;Z*is@y~IBW(gle*bMndYUixt~L>WPk*4e zrSsnS=QTv0;w66oDGBLZ@+ZGwP?6NV$JQIq7ig;_=P5j@JPz$Oe2qC`oSzR}c^E7~ zu3!J(_T6$Gnvf$>KOWt3h$vC%UORnkYW))vlCuSINvDMno*T3FVv|IzT?R=k!7U_6 zOl0V5!Z)I}(`;wVcjAL&{(3Z-n&!qlYUpYM-9Zyh0$gD76(~pSPfjy$5D+0H+-nC!>&5H9uKoL2fIct z`l=SHod#oarjKcLsSQ@kTjSxDmK2xG)`sbz34UFCAs(AdAf$>Q`#kLeLqUMJj!Kgj8p5HB|EBbCmcBm~JS1!*Vu6Fl=PR622q}mY zs0#_B(J1=?a24eDHC?z{yr+G4t@0AQpV!>oFwP3N1$A1T<<_md6?k7Ruon;|BeXHj zVhNv(m?0R{h%fFacB$&G_nmRt%+Cr{hH34gR0t}J(9v1Jgq$+6Kw7B|+Y5Fv5 z0Kh~03YQiud>fDmiBz;+=x|15Pe140VVAzUO^My}`>dYaICo@x6LT(JvabW&QP+20 zhTq8?t+U6gWu;~zR%gY|CPZuN*|nn1&?YsmC591!C3i0tMheaq{pmMkH5((Pw9 zxE|@*{jJTuStYktm}%E|6?oL-oW%m4QmI!Ag8y=XJhM0!4h2?&kOb9Ie2CBFh^rX6 ze8;&6lTjl`-!2ONb*Y2Oudum5H=9e{6=+ik<55=ZM6JCo7)-ZfCamOrrn zQ2YgttT___8jv_t*oy2>wUNZAyB3x}akXd%&(f=$^i1&QheDwP`({)MsU?w0GT;S8 z9n16&VgHNci872KvRw@%zc%tbOzf+zpg`5cN=6TKctLFmzOVeiX%EJ{eJm(P9tX7H($E1tRqcxNvo2!ZnT_@$n+Y!k>(+McP-*Meu@(Ce*X`7-2GUWGt;bcZ zadif1m$V6xON)XDA#BFSB-Ai=>L_obWI1Iv;*Iu>(9G{zQN4Vq^>NCKkQmZOOeM>VGC=} zsvQ8OX>n&jJ+h6o0&qNPeR%}chOD7`x@z?- zvKTr#xPqczN%*yq;GhtC5XF99F5+sYPz2bCKo_W2NTqrG${_?o6 zs_n1sY^7t@%8}r0;Pa?(cGQU{9Ku8ZDaMntYqp5|AV$}wj%yj*T$@MWDZxuO-sF3t4E~CsAleoG`sz?T8xha5MZKP zm|5VVVRGR;*5hAFeqHH9U%maJ|I4#oRk@B7f9g+rL#Pl3ICbDsnZx@f%E(%rx+>4; z|3Z@X5iT>zpS-kr$sYPMcAE?}Ac1j8m4I79R^t*Acv1j_|40_ER9^Td!^*Uaq|V(^ z4R6a8DpqJz)8}ZtMv!h`%)j=&RqB^V_oYJ^M%XI^?Z*$YrzCrex4_uAe#dS_i>$1p zW?dNDj%$1u3{9z)=vjXt;N^<{A*htrrNX6h+XoGx&&l|2be+<3V7qMhuMn;MQUzo; z-q;#Gu#(MY3M|dK*k>6_N74O=OB7UgK^|rw|9qs0L1D!n8|+UixW78f+6wBp92DZ^ zZL%X4h`VDBE?|UBQGe~sFUH^JD~U z(Cx;AU;Y+`%)^>c+tMJm8c3Q-dd3!UQ-~&)+cBzLN0xb4?=FPnq+k>`B%9GVPXGJX zhzBa=IYAi{yGHN$U?@E%7EuJ_{a_3%Dpi$gAj;!Y)7^f3mCyk(?%4-!&F8{D>PA&7 z>)P%#8%~^D;@)PkNhz0NCt#$&B z7@{DlTp1<>%!EVJ0Zk=z=VA9^9=tcP5A%}G@0m+ei_lz<(Fh@T5UE1M+41pb;^uUB zbDrjIO$pm!awN2K%5t=0M0=sIuPYF=k%`(HQ++g6U>@og56j#!`v4xLO5^k{c&z@}wFs>4`nO{SqMI^BVnwWL#u~xX?`dG0^&kC<$L$ zk5-!xiXA9sR`J#PVjq+d)-AP07{i^Y$KoYj{Gz=q97?o+i`Nrx_!Z11@py){aW4>b z;8s0ecBYMA$79uN3qah0MZ>x)daSKoWDIP6W(z-4Dd7kMU4%zRqj4WpvZ;b7KqLYj zsBqfZ=g7xZ3OS#qHK}PaC141)$hHq~f!4=3iQvEz0vG%)EZ`C{h}hv&$qW{&f06_ z6S}D^i-iyY2Ny>4um!*a!ZypvT-MdZZp=OV>+BHd`~S1Iv6VF}aQhg78XK7YpBbRJ zYsGPru0o^U{j8T>cEU9P&nJn2>vOXNLzgX}Zyp2oU;y92m&qt`r*7uAfn|sYl}|~;_-G|)79WsGub^L zj|tPDKcQ%)SF#&-{xD15$?Lpn07wPCR60FGK&Uy~jzX=GpKs-m)Fj_1f$@SR|1XZ9 zz*ju@G)ecPlW@c7Wm4PtYC&At>I`5U!=cC$!wDzg9@5vmrc zVP6@E0*6}1zf4ypk4X0rGIr%j*6+AR-2(U&!JlfDe0 z=T4*I|9F&F83S-EHDH0E1v=)0z;bZvn%-nE!lG@uc@@jZR5ENT2_jE>;+Y}_HBOYI z^S=A)gPheXmsz#*tc*>n!4_mpz0n*39>uOR9w04&;zq+ir>gEMhoDt(@5LLB;zU9? zG5F@Ob<;?&ne(r06*r0`uV2s^wNyyyNp0w$o}mq>T#gRr*0|}2;aWT17Cw!0`EvkD zTr*i=@6=LE(pBb?lW7IjFzrhav4Xi^$V$=7&Y=Q7?xTcFNzqKVKB4K-e6)55%v;av zIdQ{<=5_KlSudh~?ST?K+0#Hfk;R6_5ewb}Y2{+io&@$eq>7e}-8qMJp&FJDg1&3( zoMD=D{_}dyCi5q(fT0?-homi|PL^mu(20`awGS6WFELa_gIW9mY32pnkm^+lzPLRb zG-VZ^#OW1FU6p#hqlZEG37Nu8a8}Bs(d_3E%T}14&r7T{ZO4^S*Z%Se0!$hXpdT+J zb}JO*K@IK(O6JCAW({Ag&bNOo3ny)hb-;gVVDI%N)%cQicd?RkVN=U0Xro@=8fc45 zaI+55Ftf`B1qlb5HSkC6-Pv|Ac>vuN-GiLN|49oipIug8&fSG)UgSC>3^~EFl9=s+ z=b+1!C2`sHT^C4a!7Y&123X}*pO#xdKP=FX)b&pPFp0k`(8Ow&0nQXQ=V&8O%l??c z2ewk6^cs@%`uy7NJ5ET~j9$NbgtY>1NHDf)kP+)G*YRW zq%&+z@k@A796=u#`QTV3P>!Whj{y)`y~OC+xp)^&l@vLnefP2gGt(?mbI~lwCimUy zws0Uk*yU0Gf_#y)&+pKC9uA8n52!Q^$pQq>Jg68rQV|BXuO2s~8OE<3y~y>3yFZYp zf{4@O1!k!X_VqhZbDgN+`2@xxkgX1#1I@CbOln^yC+W_(jjc<1ED7n78^~?DPUL@{ zE0`t1oH>yOTd9s6lzCg^B;Ev^V4~OymBVu;ONuz+%__Fj-6IzEazE*}11jR~1#j-U zq7EyT&%O#DAKpoj>RD`gQuIk>$?-yOk-DL|o{KC&uEb3WOnM>Fi}EivC;V2#OZ}K= zLYk9lK{>ssV=rv(bY0=9ph$vZe zcNz5LTELCQ(_(GJy@Y!M%I#`SwdYxG4?2C)H5YvDj=Bj2N@#Wiyf0|Hs)6x;&p3i2 zjlQu5{LcgvkPTNo)fGBDIs~m}=-S5lfZ zYbU};%{Mu^2>GU_myB?e*t!_kY}v{BH`6r|S0V3x0BXxzvOXq$jogmbq%}4HU<`5D z8n=;}qUHJC-RT>1@ctf7&IYZUx|4KZvu1(s)ObH!$k_CxI5FDmwCIQNVaaCh(I7~l zZeDvXcfsLl6j+3)hLtYI|FgLhu_!Ih)g?ME6qaS2IUM)Z$TnJ{s2Ae{%)-a6ubj%C zit9VPjTai0kMnWRy6E0$`uRnVlB+wqVBB7RXh?$&p+tQ17JxN9{?*W$>C_(!sVlhMR`Wc1}5=V@E0jo5aVMjkA7R zi}u7{j>rORj_tjBSsDpWs^Nuedtt@ujL27LI+X?h)NmD2#T_im9dkfC3(?&EpordD z^@3ox+2D3dSptl(9sq9 za8Cn>_|0soDK&8r()lWR*~>h;Rr*&L7$zp^&umE1-9ie6_)yyiw=4#*a}@J0T}{-m z-6whKJRUt0ilJ*tSLoOfhJJDZi|0k~7}IauhBq52S<#SnW?YV`y8o5d-uCMbIJD2_ zwWGHf$}(5^FF5a@uD`l#-8Vd|+ma!qUkp{jU#cA65Yy%T900I%RDi-;Z+Q(=?JQ~J zp!pKl+6SfkAtj$GPBV>bl_$sR3whVClnOC01Gr!$)!RYDsAK>^Fd-rWZAgtJ>-<@r zmlareQ4a_}DBUeRy{czc_b(^xs&jKyat& zL_oLdj7qM$aQ%uW_5znq%kjqaU@{ynl#86RMKG$!Pb@Odmo35xJKzG~3P+lzizzju z)_p>#n0AInrLX}esRSif%;>HXWifjPCGZ}VnJx0eNApWRsE59VXQPD>IfptqJgc+p z7m|CX-^s?XNOnuNi?CI=FxS{LUOGp1zD8{OJ`na3Nu9m5=;f)mK>T%JbuHjA2S>g0 zm)><-C7e=wV6{Rq*L!}93lZsG{r}3XV|eMhi3OFBoch-&ecQc1rOwLkoo<)0#q`R`XfZ2Zy<8OUXUGs1 zph_&=@roaA{J;o~PINhjaKr)kCgLsSQ3Ir>r&{qHb@GodC$fp--dKL%88!z7iY4mc zKeJ(`K2O7?!D}YqB*>xqz$LpwA8h?iF`^O;@-d3mOg#xdUe^3_-n}+sgkBMZo_oWhB z_9=Bcx{mInqp{dlh9vv4r3b*jGqvt=JmCQHdw6XW$WhRaZ9%g8M|3jG7vi1MyE6;m z5xwiYdZRMa?5e$gd(9@0o1WXJt^zzS^9Hk7e}{6}WA`RI1tlne(vA4U7`x2Z-$77o z0%^z8+>|P$oI;H)GhFqQ3Cdpon`s*ZVNb9cQbZ8V#n|ZoLE(ZvIQtt`-#KxtHIZ}s z
    &NieeWbgrvrDz>{riLXf-`D!=YRQDKn)Wo_l!{Kp4lr5m^FpVkh4*Q2Kr6*<& zNLMl)@_Kjvs$n5Xz1y{qm4zKs%NQ#)$!c1WmZ(JR{_J2xE?Nig_e9^uLraF)4ryel z{{Pm-{3NvW^B{`Ys?c-EEdrrZc4R0>@nRM)tG}o_KDO(Yt*GA1o@DZ)*pkmU*$82* zha+M`Fx{>I)VxEYm&c|i+nOz@x+qgF6wl#^H;7|zV(bxJjg=W8m+g{0JlPkK+!@A# z=TYCQ7nz4hUtUo$cX+Y1UhO&yW7VCME~gcvNB?1|I9SO5>a(ArL&j6~4}95=2x&;U zIGd_t6NhItrz?pi?{bs9DS?|P=#5S0=PS|zy+@%iI=%1kptlZnM+?eG6}vgq2c?_F z4%v{LY8PacNgea%iwKgamE~mYkB%6+f(vLTt?-joxrqF5Eo+Q|EOuvJ;6@xM{Dl>M z%KzCtZl1H`0O?HY7uitlt}N|33<7tE`T&6;D{E@edR4O$D{|AGYcKnu_8rmgJiEG> zhj>Y%;uS^|O*Cblue_KWb3mOZhkByb5rK&&^B$l>bi#NE^#|lT;?R$Kdl@tosEy8k zSW|{crcxpnjUy&>5#U#%bp_*S^$ra13)nvk-$mW)MX|D6XQY$<9nEkpKUK}|F->%E ztEh%tV%84g4g`O&EMJT@77}D$Z5wMJn*?<%DRPF$ica|Vb9tX)vP;qW$( z(m}5whg|Q<*0qbau0zfiajat+!q;oCzGu?dLxY7`)yOTh#}!d6k|P)Fks}XPLqFX{ zK8o`(Bsu(yG86t+hm;JP>~F6P&isc1tXjro#A6ph66RRS85w~Z??p<2Jh7bjOMc`R z-xTS+Dj5Xk4krG05|PAdW0*^~6HQbs1QN}*Sc!ATrLjBx7Pixk5i|r3Hy}s1X$q+x zk8DxMT6PUU^)(QpVs9`J8QO&#;(F*Z0*EM-r)CFR@x4vE5ILxg5R+N0BqwMz7^R8` zzSZ0#Tcz;$t9xz|!}nDpdEURJlGEQntUofaT{69Ze#{;%e7Sr&6az+_3N zGdM|Hsc{n>W`_7HxO*r%yd1?Vc4Jl{r|5qkESk^<=P% zB6PjFJc^+go9PmJr1mgYG zA^T*&KU$T_TVycg9l|@>Sf?Wv(stRqW}HlPH#)=dhsK!@LF8oD5(X^`+=9@9EFKzW z$Vw_de&oMTTM}0TI~Ok7ZQc8G3tIH3Ps??-A74-*pLm|?AAI$SKKu(p$sV7~18M5w z&DD+=i@|Q8Rs7IX`@D=^r-G-mK6&uYRCOnNkst369V8|vr3}GbAGTitt&-Em(;sim z682QLM&I6^W@cqbK_7fg#Q_z7F;oSOtsKs_9ulg1Jo!w9D@GGv)OEE;XmsXw1V|js z6M88J7&hx%4N2*kx~xfULc?i;#+$5&+!OFZ*<)Hql*)qFRNMmzIRU?=TUsmf1}zVZ zwd=<$*GlJTjEv3YQ7$g&j$LY$)M}aOe$MR0Y(_mBcscv6V1AbZiZ)3SN9o&aMrpNh zTRyo~cq%lP8uk7}un&Q4%i5ksQwJDGxh#TORF9(_G?GYldOn9_7n*wKzT#)!tyWtb ziOq=Sy$yT!8pP8wBE7-5Oi)sBc>o$oi&e}jw-6WAd z&u;RxT+I)F(E&TJ7qcW}4(K}i)2S$8N=O7~%-i1Ku#j&`eJUI%Kb1P*_HQZShqhBM zkN`9V575A;RWKKhP=70mz?yJLNJ+l08cqK$>!HVt*%4+B3A;vT|J@M>!Tt<$W#w|o zsXu+0_D40KuKr|2iW3ha2Y7?5fwJ#4{!iie9o#5ra2F@&2`*g*PF?r06g*w))9n;2 z9z5l9$=OCiN0(f#2iogeo=A;%(NhO}9;ct8J@)31>Yq!>ja0awWng-KlG~QWXbE(n zO#8W|JBxH!4WJfU@E6LlR&NXONW2AX)!BTK7RAXY7myX~9BkY~+4KP3Y4?@y{2-=J z&^@{KS`XYrm%R?j0>*C&Y{Q`J_~EE4%qteX6jsQ^~MAD8d^ z?NRHj05C9QV}LgR000000YRT}MG-tl$=WT(3~wZt(PjW~6L(brC~Uq0z2w~#=HtYr zT}DzjeRhmu)k2bwaRgFVa;w=M_6X=*Z}0h%u~fbPEkDDQ@?+JciJ6nz?U3e}&_4Z@ z&HO{8F}vL@Gi=d#Lw&q*V$y%wrX}in!5x;=6JSR?TW%YdTv)1 zPn(j^b@P+5L^ERTvVIru%i4^w5&k00AF6R3>T|fr2_;62%lMgU<5%1>sbfzTz%vt= z^GEJI@8N}+exS2i0Q&>MQ)X*|c@?oc{e{hmhCDut9u%{vgM0!~a}6#7T})u!5DTYv zCb@*us4YERxkghj>YA!0Uf>As-Kf>cM~&E5+_?JXV}TY%A=bU)y8OX*@T?gcKv4AT z8Yzs8t)L<2uDL<_rfHjlS-mwoyMU>uwBwJt2DHN)MoH7pxX*i%W-ZJ+M8dZNa2{5G zRY@1Hk#X{Fk_1tuS}bIf(!NXw!#icr3T9;#`yo%3hBk`6K2DexV3i98LNh~iu@kmq zkcONAid5%`Y2qn<5y(q^TG821QqN8HE@8`n#U81gsl2-+pjEP^oF{H|za@>7=oBB% z!4aw|3*pnM89C|ft5@49fQN(C?Zp<$vC7p9@TUM-^oMbZJ)2hqE_k*Y{s#pLvznr; zSVL%ZX@)+V&=5xIZ7j#BRU+tAr{IrQ`2NImhVZn&iih(swPsbPSp@XbPwPlFkyV7G zwzpjXCB&cbG}w8Un4r!(Y|CW@Q-*GZ&`` zYuFyO%dZA>VWg2x_Lv_bjbnvScFw<;z!7vt%?X3HDkOT;dF-%Hr00YqDvMYZvkit? z$)3$kDwAp1li{P=EA$3!)a}wUR)SXofnC2ZO{-cd3$vk&wpTH9UopRd0s4&V`*7Nf zKeSP(a19}py9r|jzz|^t?4`bHF=0}QM6U@OoiLYolE1@dIFs-eB$lfvc~7BsAS`-NRj?-CDd`Y1Y{xxazqJUDJ#8-eL6h z=C_rE%t6XqIIw=d%NZ*W#6AhH3X@{{7q;Ee44qkX(U#)MUD_$EMG7Rq#N9BxD0UU4c-0U>74`iRuholY(n!0Ev7iTH3RPlCZAi?zHj~nrw~$(W<{Dh zt0mqY2@!Z=Larerqp-g`R~O-ZvlZn$A!GKKz?PEd``zSz&=cw>lFek|1tf*;HZx~s zDkduT>ad2_GEbCv_FTN?GY>Tx8k-iK9dt9>52RWsymrZ*D&v9r1mV3??Y>e(W9&OZ zwm$<|07F2$ zzi+89W6r-EsJqK4dFUHd%*4BwBR!P;K&sxj5lv15VeBU)jIhg2z+*a}g6~5RJyyyz zs-Owz{E)!TQZr)=#z52`lN<{#w34kew})p2ol~q!QTH{6 z>#=w_&z+ptR|bs2$>OoNJJJE1+}5Q77D&Lti%9Ixq@y$iPmZWJ9acq7R8m>o=Qq|K z_06XEp9=;i8BlD5|C?gZL<3dPzOXU7?<7OWG>4m1U3q(A2{!f}IRNLhOwWqN?0Voq z=Hnp`;^xD+gEsNOJZi18YvyNdR2o(3TCaWT)1 zGivCz#(MZIrPWuE)Fq~*|LOI^?5k%xH8wh4$u5)r&>^{pDMulhlw4>Z?l7*8k7atP zniR>B7?RaTNs2H5Y~FOW8=6lvjozi6rl;4%3m8J+PXc5J?@m7Q@dYPXWBTd_S~W~N z56_bUIsJNr*|$l@zVS7oV=Q+-TES5BQpI7mM}I2ct{yeT(vkX#Esf*q>InDSX-X|5 z?H3J!3`xYb52Yp!nvu@^uRu%^BT*B4`yLF5!OW36HZ`cosv(c``Oi{`abF>L@k}Mz zC~U>gD_~#BPZ0|rpEK^i^2a@h6N=&wDJ?Iv+MF+^>mX|2ZK2cdzlpwsyf8{pf{%h= zIjG31p}^M$X-ygqT{v4WAC&tLu5==8#%aKSL9C2gfr5*x(0ac>z7$^VXm8;;)R~1C zs3*>lDQYMn{c3_S;gW-`xhZj^8_~Im*H3t6fxwWkR$WaFWi00T6YGcA#Rl@~Cv9ts zWmqBKLuzg^j$rN+yt9DE`erTyxH5Q9Zn%-kRJggr*YH zR09mF%v=194(+@5UqzXFo;0-RtN)6s96EkrB$&VX)0Zm~WtkLRMdlSKeaO%J3EUI(v%<{uD+g38z#1CupCJWRT zLSs`phcycBG~%OoFNGZ+A6R~ptgOb+^x8jXElkWw`0*)>Z4cYSm%Q+#`NZ9}qRN9E zWf-IUH{zhKMuqS(Wf{((=?#IdGlDXa|E4lh0J%UDE(#zuyM!2jm8Lyur^tU^f_2xs z!_FhB(X)ZZ&L&M&qgY7?9}NQ(H(lJk@my{;P`MDkQD`~=0HYs;k6a;>A(nX@zJw3Oxgcqsl!bT9o4LfzU{A6>82Jkwzh@n^MsI&U$_mlUum(DJva zkBzH+gEfx8Be1lI>y!2Sq(Ew{fp-^sUn#o zPr&2;fq=pH=$WmqIV)N?vmb6u;-%hh|5yv-M9~Hd14ul8I#GCpCc4omk9odMPrZE~ zgN+Bj*IM4#_=y`ybMFO~?ZHBno_d~H7) zcGTfT`}QlAZWgPsdj_#$Yc${us0b#GSz0(~Cv~D0F^gwB;NPV9a|ExOOfL8M*H6DL zMywH9Qt!uN#Z%%x;rZG5jr2T5*ngef;H+G~M@ht42KJxC1Jt@j{w>ZA1+a zJM2VDc==d?sedvC6!`-HJndQkJ0lScBP*Mzl)*SA(zL*Vy)<;8(es5R2E5+khNeVq zkkOopL9cO03oYKy+!!b5xJ=y<`$}>@!8vh zo?lCZg6!m+B#|!4g&Y05M2%?a6C1x-!I5mvdTiTMDBWrGYxTG%?;rZBF7fGlnp%$3 zaAK@eqS#&SfDsR7&~lYYz~uvsd-~m*0@aG5cf^GTo;$>@XCIOT#@cmnBNqSd-LuoM zIv4XEk*xZ3-zu!C>Z9X?zA!m6VR~-agBuheJE#6}TZ9-{0nGPc(GUWj9KxnMosYLq zeIO>sLvruh)7vG{aYu^}vPzGCBVYNuh&7Pj0v1^=KTIDEDL?Q9o5kRV2)htrV^Kus z5|%peII8cbh$Ma9EWYB$wQRHWgpu5JoPZ2c?}El(C6NnU6$2vP2>-9z{XH$l#XEJeha zu$8No!nuo2G?tcgEQyyld~)4{VD+ckzE^F{#gW|;k%u7mmtO@QVE=Dl-dVp-zMkL< z$!~?F<1yHMdSF^P->?r1Ig6zULR5LbWnU0{2lPa!Kil(8t~tzZx4wOSke5#@8xiJW1((| zKRF7+2IUMF+{?g#XuPfmH3f?~Rwdn^~6)~5)x&GWcP6vbdHEhyt zdeYm7*1B+Qq|wgiZInORnYGL8Pl#TKB(Sa*)rwJcRGPa*z7EIXlJXW4lzHR76 z4hEmBH#TWFiwwpR&SjFS8h*(V0<=m6;yQuxZgLvYqno+bafT|7O>igkHX-x@?KrJ< zV(VPX2{eW4RG%02=+P>!B5xA8cdb&>xmg%DB{G9{zGnRq8o-3^R>s`QTFT}tNc{l+ zY{m-?KRD}Z2A`0VNT5)z^w`9gp30E(jl}7jl*2oIbQ-N;uxh_s4N`+{m4vND(4W({ zuz!SAE0>oy>$pCKgK?=jJ;i>eI~3l|8MNC@S*TfM3O9QDEcI-L*?+C6;yO7mR?)?I z)*P_pU3q#frPvPaoLTBL904|cZq5(SEgLTE=f?q_gvYGneKvat>o55h031sKpH0Vn z&@b+Jm5;)#x|YO5GRZ)eg9t{2xXZZRgETxb9mvC_-GG@ zJ|y~;Ix_xd-5qJGk@8Q25D2S1gWEeFcTHx_he6F0%_7V7@vsr^D&(S*K`>Y?1)Z%F zl(Sq+^6yo99-j~>5aC|L}a;bdh$Ks!c#!TAGy-eo*`g~c)yAx_&d-7+pQ zL`Zv~|9ON2u-4|WNM?52(gst#6Hek1Fz4q(7&8JKNqGz6o=iCbh$#lO?(pw6o})`5 z?{EMNZTD#QK49M8H)zwTv=X8K&{lg>-uBC(*CR6U2#a*8mIV5PB@sru$bg%(-iGRg6}QWkZ==z|dhr>UeeoF7ltzT$O9T1#ni}OK``@ zTv}AeWl5+nj4TE|l#wpswC86-?*4cxxt|Sh2F>}7%3XewvGNpZD`AZKQI@1LB?8R@ zrC4bRU=4g&!ZKdO|H(B?m{wfX!|_%Toq!)6G)&IpSR-)|mD_k+XYRIh@I&UN3^d9V zuKJ*^fQx%Fjpq20V}p>x?C;Pu_{1R5695lPPbgwUsdGO2ZUC~1DTX(Kh3ES4Nh8*bHv8uE9PVvYTB zj3V95;(Yds4y89P6U6@KjU zxd0Jk{0JOO-Mk1L)9W;EG6F18g6a{M*FcJ8ShVO$r8n$J@EVIqOy0Il#p{b0j7r*! z1l_Jo)gxzpWV-JrY!ioJBQD?31{h581l;rC?e+wwtBa{qfID@`p62QMpdM@0Coek% z&6luT0Fe;TW?T)>cmg!qGDAW2k#Kga(&CqAI2qQ!jV$RVYW+qR%0YI6gHiVFOb=wR0G z0=vMj!omtLMIvKMCX!j&Qzq{zxs_jmBAJOSzhbT-9m(tA0V;c2!#o!SBC4O>HghtP zzhV}b?YiPwedODK*c;e@1$-O)r~Y0m49tcAIe}>QHCk1R))V#O6Df?vQ77^u%zS6JvWC6wZ7UqTPx;ADugp7SkJobGl@r z2@do|#n-}~sGjlpgdxM@0tU6H9&zu!VLsk|o(JEe|6o(UZd76SAijxI=*EwzQV-lV zwK_rcv)SN$99_lS)IZ6Sp?Szso^PzTRaB~_e_naFIMVJ;FcG7QMs4`YkYW_~nO0U9 zr_?u`ctr~_BjFJt7RZ40g^K7NFn3NcKL zDrS!A6n{~x`v+P0=&uSRGrhf*T<0`2kpJe{-@kkE~<`zXYr<^RXAC`|5cI|C~~xvv<%OrMc77 z?iQgP^S$Zq`pLZyWFJnt6MVW@>w-q&CwmCx?Q$wWwX6tSI(<>(_NPw)G&lVCsG$rk z!3X>o{;=<~h)RukLqcRJkE*PQf zhN`GTPXpN!*tLFkWue3HF0H?uUR)@Yq-o+=?dOAsC0#GRTdAUH*emcz^T+XWuXm#5 zqF*8z{?RFQ_RY@9n7)WAp-Q?>GpaOal5AQqf2{hXsL2!-goEYqOLACtkQ@soMF8v$ zC;>{k{{AMh94UHsQ0ag^+ZL80#|Qbb<^UQ`OvBA3xq>Fr?-Y4hP<6?<{@ zUeX-`dr#~$5w}18{b%50e1>`CrVwc7;h6VM86$u=DW<6ZagW1`_WDyid#`8A9eJPT zhYFdbl)pn0e6#@rEF}g;*5%Hrq)@e;x7=lZOF)|4s1li8LoM)*HEvstD>W)Wu|Kqb zhd?ye4PmcJC!gW~zeaVqVI><{dvmduzoh#QL%-X?AxKEKHU0#3@pt{u^m$^SHi=ID zJ>8GHAY?7l?ui?L0i3dy6@+!7c4fdgh`{M$Rh>C$feoZ7M^1wv(#U^I_BfNbEG$5~ zl4)i+#aQ{6c1TP$I8W{_SXv4Ec(eo|zpY+T{e`+x9Dij?^7n3(8kM=`Wur0sy182z>=V8fQ%b{5LU$lS^u-wSMg;s|Ms^-@$7 z6t{iTV|Q zIlq6UR2Y&00driY+Zs1yfeDfdLfByoykg}Q4L zmAKgoKIB}DC-V6a22MFc(ctpDN$%-}$cf8FT*eU1F22>f7*oqPREOjR^L`?ojJX`G za+D(P6DE3C-njU?4xSe>!=0t622LD0T!7eiq=A)1L(|5F>v?OH<7+09F~4IG50RGn zG!t1*2!$~hF+i$FKfYuiviP)rH!9scoXRnoqDzH{#Vck5f-eBW-I5!Lj9&fKxtywv z^b^GygQOqj9Wq0m`wL>J`5&b}QkL{3R8;VcvkO=)6(ac;Wf}%RbHD*2jw?9{ZD4Ua zS_`w-dPjc;#Nk+zh3XkGlw3IR7M{`oyG98n*hXSh3ZLU+u7VKayq&5ZR(9PGY7V0h zVDql38O!W-WqhfFE6E+z!_o0mMH@-^!j@xgw#l*6?{p)V4*BFwp`D!DlB0?gr7s@G zB3H+;i5`lIXDkOp^P)i2OjpVOa43F{>o0WD#HGxw5GL2B1Up!>>Dt=zD{1GDJA@s< zaeMmnMRXb}oM$WkuBj+x)~Z&siy|bG4hC9PCO!=xV67EAd4kdOJJ^{|W9+|M#6(?) z@C9YrvGus)@EYs~=qEWq^3o3Wc`;+Vz@a{*4pFCb+mW?xZBDeNow5Z_eNgN&1^vdUBpR>RV>%?}xBo4P+0B3;qQy~`4^bKKPU z2BFhx^4$q(v^s=qkpPy*3b{cq2HXO#Us|j#5Bo$`i}qnVwDaP(1bI}fdA@fV4n+{+ zT5)uD#J5x+C5h+ri&?3RAg&L-18Mj&j$$EF%>W&FJC`DW>byhX0U6Yn#-i7J- zq$;^Ag6V+4J3`?jtlTpwE1d5@B3#Llgp%llldQd}=r>4({V|``Q|QDZC;2n+9%Jb- zqJO$CZmihK^Eru%b2@-oOq#wUO}82eJ58cw==)$s;hIHCd^QaVS5&2O3cf6z%W(82 zhJ6cw60|0U>#G<1IIy~+wZ?Pa0B>hu(g!O~{VG#tdR-c3=DW6rw7Dv%{Hpvy{5vSl zYweFJxP`=vHpqYumOSLpXA4~S+GOUEnC{q)v{A*LiUe8sYZrxF|?Rs>s19 ze(gdsrQvlwRFbt@4a6qK<7Q(Y<+wquDMyZx(AlV1hLR{)@?gd$XDS+*%4Vqc@T#I# zr>ujnJ)x|M$Pl$q61RxW{^GL_MfQgZnYpX3Hb3g43c0mEwzWYOV${OZq6#CZ{v1DF z4iU}mSEM3p+C6*{()W{&x0xPYl9d2-L%Gl_@v`TQ7$|1{iE?I77lcL^1EPyeU8lfo zhUR8hxz1wg5JKk-FcCykBxo@ED1rPOHRz)^bSm_QTUxj! zC+_QhfOM+1fl`k7PWI!v;#8c^2>8)8pjVVuK9JpHT#$Ic-%A<@THGxpL2qWpQ!5L& zQlixHDpJm-LWj+RVqB=wDxARS5bYzsT?UR$Vn+6vb#l*0^SOppHbq6g`zq&^PKX}l zmek6b1*RN{UW0B;*NnJ)xg@jr%Em|ktM(!xUj|4Z0Ye#K-_kIuj^wBcG^oCoh8ah`dY;0Tu-~jpquW^Qf z1J@{ylq<2NwpNvp%s>MX&c4M2c1&-hR7n#yLJ3;Mu`$0vQ?^t9y!y5w!u7hOi*enE zC>em=tPvuTa=^vlk(H3TA=GldEEZfrhR5ZFKqhyq&_3Lvn*4!g zaNJHdr4OaKnP3@cBJSzQy&ASpgt1qy9aGez^lkSq(;so#of8pV;pn#rBL6|3B{=$J z#~g#hgGIlDWR@CQiORLNMmg+pR}a_>LHP0VRcSb*aAdqowq1&)x;=PeUBLG^<3l7} zFyVA1)o;W@`F%&|xk?;juBU;m86U#cx@TuIKnhN{R?p#Rnai+Fio{O!_a}X~EbM(t zLvmy|XcLHRq&b{=d|anU92OzvC%&X^K5^I9c)hW|we-h&_q<&qZyx!V%iyKB%Hj)f z3qTyU!m=66Kq$y>l-9*z=mHOhdEOt6p0%d17e_~i=$$*u_3Hiccz}$oAQKygJ?}NY z9lIKbq-S6$s2P?_fmprWQ;^rV^~n3s&O&epQ;myzPtex zo5Qn%ydw+mE2x~o1^%P0B}4mU96>dfxYVKTe2V8rJi&^0mJ!`XU*6V1m^m9?JDs^E z1H-7EfQF)B2Ov8Kql8q4HPiy!wbY1mC}PWXf|l1K)0iw76yY9`5_)~pq>T|{@EysM ztAQ{lJ(XUmk-OA5&>)y;{ zj3TOWM~Y5+KqPM=r-To z>0=-V$O{&Z7^vnnc5jC+q!pJphrKEg%T38nCWoE>=f32h9~_=>6vBG7f*ta~eXEc% z_Pu20ism3klD#(zO|R)8&>$xw1u>m$FI=a`jyDNk%4Ct$(gQ*F`;5_97-$LjTf;Z? zRP}^K+}j{#oxcVZhQe4HHpa}RUZWw60%J>E?4yhzi=FI}*jDbsL_X!*!(+1ciHFji zg7y>(&f$Y6Nv)Qt*+)4~9invO4^gNgRQE7eZcMu+)yp%TOTweJn*M|u1i zCbhfF)JaIk(|kCGDjt#}RdF@1pu;Ix{itM_0tRB#pL@+Y=fK)qKELgh`|%+ezq8d& z{BN(vh`dR;glJ67_i?h}JF2%S45cvey)a%u&V{krlGWmM`F|gRjIWxPl3&KXT`LL) z3xxBQWoiWe>#r*KqxX4wMx}=p#z>DgX!?ThL@Wgr@$RWj&_qs4Ry7!a(w?F-sRAC3 z3j?>NM~8ZF*HT*093%Q^wzQMxlMJz}FvlF71uE1oWH>D1$oXW?ZzR^VP9~Nrp`~kw zvLTcrKFu_4O5aT`8MhRytt(>1i`ZYNl`*oj>ZCjBjE7N)#&)^c<2sM$*lZFCi{Uc$ z);HwUDZ>+N@>aVL$COYuiwre1LBA65DRf0!1(?>bOlsAmpCua{lC;TgK?*Q&8;Qz# zn@81zO+-R3U?~Yogi>$3iv#|L=$)!4dHm)zH?bYcSuTHBt6QM&L z>^c1OM$A(p>sNjRNc`u7f1@t(a;8MG3f$J8a*23U)4{v3BOjRM;EF zZO$a%kEOn&Cq~7>N#>(YJm;`}eflUUuOg>Mq)sw0>)}e8y3f|vJfYdt3ehMS8>K-{ ze)Gqo^Y4BUaAgubXbD5xGAUBCja1o%OwfcjZ6!TsB8il2%##GZ$2E|A?sm>H^AH<> zyJxDl69PE+W|&G29n7(se11qoX~2Mm@PcumEb(+a&qm&tM2(2q)Dr&;x9N#g1JRp* z?8`4bPsB>thmUY6eY!tf{AK0?f1h_d>+=4_KBF0L>iDhUp3XVuXvEcik4a|xr}xxI zJLTIvKs5Z%qa-WqUCDj5GlXU}kmW$4KUO=L5Bkf(lPSODj6UOak-Z@x{ z?0N+lyGIB*evzF_v8e&R56IJgi@>3&zF0b)mA=dUJzVK2Sz5qLuAp3SLO3b;w*~#$=r9gYCCV7;h4+CL=?kQLaDS9aX^S6P_IY93rr;%hO?gwjp` zT6(J~QYWaq)y_|W8N$rx*wTLvjuCO)9DtzEbvz$)oPdPeZvwJkg8>{ZOQC)&JXzOo zVh5CS9LWw47ObujCe!f5kh!%c0R2{(^=ZHQC+p<$p;jj(l&BP8SROOKgKP<(!8WmIw zm?foQko16|@+#|}_IbQHjqJm6Aq#@6n+*tCdtA>J+DTt>n9L=-1tlBPeEQ&8uJ$99 zUB{;Y-poNyCDT_k!5F*Ok#e|(e8UZ8HDa|vojGTD&@)ws!D&y>DT`C7*nS4$qy(!> zz~v_rk00K$BS{%esO}(aX>`>DP|l(+i$PK_o~|L#9KHEd7?m z9~qz+=vb12IcoW7op+Ya&E^9zLpDTAD`R>Gav~~)#4|-}paXeai8c7NFice`LW$uv z`OI2xd_d`htkzTb?Cf-T*q(F@cR&hxdDUj_3EchMi10+hHYrDX>7M?qTT_olH?G#w zt58f#T=j>cLA~~(a>2JbhF6eE>%8HETjci^HE3a4t!{{C6v+#OJ6I|01NrIF)X(Tz zKH3+xavP5oRg4fE4y|bxi?3R%(q6imLXA<(ixj!dLFYNrixq&H@v_G4U}BuTk5-2j zSSC3kJcJ3VrgovGD}F_3R}%61kG8V`j$Hkk zs1$H(2s}qu8f^1GEOtdv#_WYH14T141DS~l^(I>dz)n~sUbAv8*p1T*x2El`2`c%! z0kBt%E~jryHjH*-OGsEpi#YJw*pw&~5 zu&ZTtW~a?I+JgO1j`Q{)cdUJ0g7h1bqjPG0UIR@359wY6MnX@44Su94j{3d{1SS-W zWQ8@RP+MRiR{`p8RJt#mxJQz(q|BgGMoy+p%2r)l+MvX=YV%~cN`{CvUGb?H5JhXc z_t)>p9VpEHyDatjSDJ1{ea_}XdiWv=QeymjcQUj;uB4gKKA3E$rggz5zLyb)OS#iX zg_J*e@qjYn8q&}3hgrm=1O>V8PLh(?`x-bOcu`vciiN?w7i3Gd5U6u#K@KOj)|PfG zm~^vr?U5TSYmE%94zReN+>^dDfRbQ%LvI{rsYD73a2prZddD8n6Sr6MRh)cSW*P;9 zBfJ%{nFV0V3Xd2dgf>v&7_9R7jHOG3k>3|QdX@}NB`kot9cr!3j||C zhhWKRa@gJLn^^<;?&%0VzoCr-wK#_htKv*IeDA(A`d2%X`|!LIC=OYJKOUV%cJ0)hvBejick@iP+|2YN z!DJSSmN7>f1#5u+IuC2paMcnNjoI%a;d$M*AsxF z6{VaE70z$HKZAKmNtI9@7}d$;g^#Im_uxq*Cp5;GIzo*w609Y_X2QYXl@qRUs5hWr zZ@LH?RhPujG{Uc~J>76!5~FG-^{kb_4Q%MAWFxPnDM;Nm%{A= zlT5s1)meWDa5?bMq+x(_)6H7q`FJFgSxS;T91-}yba{>&g%S;CSL4zbic}6$3vHW> zF*0ivRPr$=*DSWu?nf-Y0xJkp0XkNDAkvqhzsEZa7QL!NIKQ8($rr`9Aa5F&*n>Dn z@CT@YADd4rPTn;3j&I>5$)_3K6dPu3z&9%6i#acBbof?LxJIBd*>B=&^0{oB`k z08v!n?QW&%j37Y}1_z{pQ3h(veZW|~Wn&7AXXD>BuUvBHfnWe}1uu3-5M)D|wfbGj zBEa9S=y|=t=xX9%Ew0GiJpD*Mu1yu#jNiYgpnBBzODOly!9u50YkurK;heKwk2^42 zEEa0YT!r69&pZdL=Z6&h{$F_=BLTd04!)qwM&4Yu&FZT6cWWyj)K*GG2RJ8E$BOe| zDt@{f+ityA>p!QklOFL*7VOGE1!?t(CGorcP#rda>eLBgQH6&E_6?R=Lh66)yG25IVxc*PM1O5deUlKqCZ|Sw@MjyMu zQjLqGGdUXrjzyHX^C|SD zN_FYkOgnS0Ha8A#k41)H(s$=@mTb$+O9WS?s$brb&fu%6!mgN({xoHp`t|uA>*IM% z#&`o3Urx#HRpljzx+8%eC<~STLC`^Cm1Ld^FC(K@1-UnlVqS1#RiG;IWg3GreKF`( z{Ra}2s=3g+B9J9i$Yp?jXax_TX%5JWjrskMe!`%4Nw*tv-Ue*UeCDpx8@)&7ZH!L? zOx^#oXgu$)Kpx-!7wsaoz37B+G|N--6Loz(ABoFf8QCgLh_a5)3*q z-dJ@T@y+k+9jbi!pl95d_XkA~YFLWP<+3|!CjLvzdqy>y397$)Ie@DKL_0?OX4{v( z`+G#H^sMBA!7lgrgXFZJhC;%&u$*fyCj5zoLPw>ceL5~TYYgZ*(0|8=Qo{{kZa5~K zlG_OmQl@j~GixWs`~emK@P4n$MP!{(bMu>SxanheaiL+E<%MGGcm&z}Rhuqon8Z*e z#1ipm(igq6dW{Yvivc?uL>q*-(ML8r_K%U--b6L_t747?Ilz2tLqYD5>}`f9Wq6SB zC7~@_F}*(&_^I{NiH+~-+GUaC1%#fH)cfUzY`5$mT#-LD`#cyk;a14Gvb2?r`iuv& zaT;`A2s&tB$qLD&NoT$q>FW&n2bC~fZZ`9BV4xDg#)63pRr@*-=R&zJ)C`ii$VNNO zGukzh8A9n-c)BGM^McX6fIy>GrzL)*OT9#Ic9O6&a7GGQs3gI9_xi=cF+0bTZ{BsW zk54i6mqRsA&h8+0R+~oylAYVJJ@Ay)VU`pWd6Esb(6L#PYr`Qi@mZ>C;%biy{I0oH z$Lm6Yz7Mm8!`=z?I3#f=q%KB+4`Yq6 zSy63uHxeH5SE#+<(J-THG>_qhDCE@!=`{kzVFu@@K41!x*!6Hjc<&=4IE?9UqGK*? z;)D&6`${lexn7L}*M28qfJlmdzM9NA*~JDPdB89*WMeBH00000009A?kaR?U%s}A1 zwVMBlT0bRbmA+aP)v0!wM20Ugz*wq5uA!e+Z&+>=uYB4Y7)7WTP6RABX2%$PCi|jE9j@b$LcFa?g9b1|riLUL|GV z+OI`AX(BV7zNt&93``)gU-0IP6-clbHp6-v6CS-ezgC9qWwwyv9J%Lm+F$#6sT+#4 z7N=Ee;7{TsI(u&;0ccaflPwqAi8YB9cFe83@2uMz^7T;Ei=QjNJNQajLdY>-LAj;4 zs)W!zk7I3HH+a@5EsvNrOqn^kJV-{k6VKUXIDli)A_k#j-7I{tD_rpq_1q>=e?cFX zrazQ520qO%JVK{;ZYRMiRt6EOmnez?-*a1<>2YAXptY2hPOHyLC@zi_gRF?k>&s9{DkfCiOw11 zoa1#QN_}1nJZ93ou%RkZWotmJ7G&$|l~aCr`4kFPF_bc<1g+flVZ=A3F*oi4@C!z_ zU;Fl2biP#gH5LVG7GY1{Yz)zG>g+f?hFFa+V*1x_+Cr{wK(&c$E@K$Mepv};i$bLX z@HJAg{B@?ukBKOJrhYP_=gWEZjf@UFGEwOjfsyQWQRYoUS_?S!Ek%@p!+@DV+467s z`bnPpE@j|#7^#G#bd41U`xUWSLK0L!XCz8?2i|^3qMEBC5mOx7s`N2npy`Pwe-(+O z7w_&ODCwds+f?`VTS;YLS_L`YLTr5#?WG}5-53L5FVxrPM{X`g%b zu>)V?Zjotepz!$k04D&M6%~R;n_TKvXlmo-72jlP-CNSmE9sp|wYfWVCgA;b&FAo{ z7YiL{GsaTskS7h4u(YOd{RK0;)UcC+Hk0Jf_q0waLLul?^^25oKz77rE!+`{HZ}Sp zzU@&dR|UoRT1-~`!pz^~NcdPzpd&(FQ~J?e&{Nyw&gDVB_Z2or#G_{K>umSD*m<^a zT<;zo7V`f(=4+%Dbwr3-$EY>}6_!T^`DFs&1_jj=_CuefjGQhNS<4q4+8+O{wqlFd z;)Wmw{Whz#>GzGm(O$bL_bQo{lQi_`*=F|U5(xC6CX~(T1wFQ2!WeB}B>~rmJq9f0 z2xy-DMRG8lGz4}I%8EyI<-@Ax(SAQ!uy)x%FO;a#wHfxF#K{1Dg+KY4Xwf8z6DRM!3-4Fbp@ELCc0%UXj3w1NqBP^M+E>uN}cV)_=Da(!O z(A4bBH*F{vVM($Ks86aciO+KkegpLSWzX62^@Hdgb*MWnktv`P$Sinq&)-X+6@G+* z1a2XRZ*I~tx+0m@Au3t3RDllX=v}J9Y#5yU9d8warNgU3>+FLkGVG^sph027sai9( zTn;F(PN2IOL964W8jqPio!X^L5K2IEFsB)K>!ucbMV3j4N9}e*N6LS|ey-TrfW&9J zmx|cAW>e0L;Om9MXeHZld&F&63O)CJtE}kYr@U*O8~JMvgtB>C_CYp57D;5>Dgr?y zlN@OOgk(teugK<|{Hh-FerIm8Rpqyg5xEGel~_r~)lD>e=cXg;A<>~L#_&08J~N7I7>bRDVbqEK0n5BttX3f|_GD8! zDpeJ%C2c2Dq8zPDJ=}K-O;vm1ZC_>QZd&>Lm2T<#rm9ruqYPdt7q7m&D3hwqo2(_2 zcZ0Iu&aZqji<`}P3Ah#*(=h@QdCEOuoCe*#+%s091nx&6wXF{lV@Ht9^IzNIcaKB~ z0ImTuujaSU{V|E#Vj!<3n14A4+F*ZF$B9(cUY)3^pG$6)$V`n{O;YK}&!z3Nx1q>j zxE77j8W^9UJ`h={7`_!?d(c0%Auft1*QrFmh^uqjBIdmP_RzlX5N{IDrpV9^=N&(c zL;a~XRQ)$y%fDAVC(u8O&LB6k?$wMq8ZYfeXHJXPbG%Vj(iM%67>#vw83fqSor>4~ zUhr(~OLFD=)JZ^%(nzO!L9fT~L^ciPl71YvwrbKg$b4ulBICtcRT`+C^hT~RmqOA} z#ZSiW3nZrwZK6`KEH3U!iXqaC`W~pALzM(fV*()R*HK*04ZkN3EK7@~SK)718byB? z0_G*)UGpL!tGxT8IN|qTnoA$n0$*f^HP91%F0tr$06{#c0?j|y)WX{{q$!{rERzs3 zrkgOtldVP#qt{dIY14k61}kN(I{GDTM9yixfkF72K^G)cnO4Y)vk^_dHU4|TS(q+E|1HeE{K+6jD}l@l3W-us7ZA1jitOFegHL^5UD2>+a4a1 z@BYW|1FPV|VNLO{R$j+kN4hiTn`AeJ<)*G#N3G~!EU=SnES%I>$IoKsxFhB*Ty~rD zYYhnwWBdU;8Y5+aADaZpdR)*HC@dIaLWA3_X@{sh)^Ob&wza-1*#!ZA<QCnib0dlrpl$}Pxf+2(#& zT9U~|osxJYkt@jF%m_f|Xp=(Be_U7_Q51aiI{ypun^awVk;?m-O2Kb^*#eaQ@DhIn zgVyVNXoK^@^FQ<;%mlv^YGFU==Fzbz`5aFmp~!aZ8bf!0zFC#b#mU|JZmb?di|9$C z4y5}zKQDYiiN!?9C=Hai4F|#T&(d3ZK+AzQH7)V)mCG1cRsueo=wMYx7&xR$}fLq0{r zu7oFy`za)I2kIMfgMX?5?k=*N zMKi>{WI~{_7;Vo)j+@K!Taf&h=uJP`5HGqNeMYw$n<}M50HNk1;2p9IihNjOaZT8$X`qKL);=c`BmVdWAuq*3VZ7m zwsWOCS~}_OF(>#{`?kpr@Ad&5-{vWttfP90f`x=;<-2sFBaob~| zx4Yg)&@1(uh-&#_u$;KenR;I2h?xbYLgKI--6=rOyX(b2L|AY3J@q{Z*bP`Nq}Qs3 z5ljV53OPT^{aW*;FZJyW$gH6&5fW|SiOx8&W$H^blLQ@GH|YYMc-xA`$BH|^#8!56 zBDi@=t-he!?chms-|>+O#McrGA*w&e3(GpU(AIkC{`fCtYn?kea>sxH@wTvN8d;TG z+@n&e2zTY9!NoI9_%Zvc_mp>zKffbqz{3O4*yxkMFdQzD5seXE8VV^~*C0jvnu5%# z8@6I#^A$?gh-)PM3y>?H23^+tr)rU^U}z+hli1Jn3l=M{4N_~n>g6UW?+2rboY!Oq zhHpFUTsdL7-}ft8YrYY_G#MZ(HL;-!n2i{k11kAu`6NMUymuy+%hNCha4YUWeEthY zuVH@aubLLau*M-dRQ0a86z;W1)`AIyCeH!F@B+LX*iNR|6rC!6tWVAu=4}jxdEi7B zbns5}bp1xmX3S?DgDsm+{7t4INa4>W=i#3a!xIk8A3=;Rn=Q~D7lCbV{1XR%CbrOL z6|A0Iuv4W@#r{9z8XgCSXn&3E{p`Uwd~p4j!~m<5s4 z8B(1s&^N}xa9J`aNlTQ^p8Ex!mX1%%UkDo|X+h@6@D_ZgIFASFh=f5?YwKDPUEMQI za{RduG+X7r;bTXlX$B(}!N1wz08v1$zkwQVvv(uyQtm&J;qVog#2-{wwcs@r_JPY9 z5k8w9f~YX?ZLxq0gV$KAkhau9bqr~P4?dFa-Q>d}N<=rqq#F5#OaMT$1gVAL(&bbJ zM0KBUf1B}^d&fBjU(jbd4Phg<$X2qu$7vPSA(e7e62dScDM9yMUk4KM1Nptt8z$fE z5nBo-bz=nloJ7o%6_@VhrIBujL>jrsqbUG-9#{9%c=Y<>kfzgeaKZYZ5Tli+oH<}( zYV~2Z0?~O)-I>Voje%*iU-j5Rb0@hAQrHPp0v<75G0s0lT)`(t3tz)G%^2>{*|}AkC?ZIdI>k|K(6haCavFaK(N@ zP=4W?z+e!J5@9Cw#G43K-*A>3le*gtW-)54nVv=C885S=VcC7Tj^x?#MGa!_p`QYL zhkVAqiG5RZdhM?)?TNcVaM>a>=VslNyY~B9Yl)$bY$Z7#KqZr6E>7EBA03KV@wOp`GX~iReoqvYLvqX(Dq6eyS3%lArzFM&@X=Z z#?;chGRu5oK2ux7fmgM1zeluTnHF@fBg#~5gZF+pc{a&p8+9Bc-nA?mxsVSua+)|R zH4_71FIY-b-a1r5Xb_9(J!?`|I-fUicQw@LQdFLwKt zi}cP1t3TxgCzowCyqr_qA}`cpmRD!H4h`lz)fk%F#x}gEzrpoXJT~%M!fN_mv7^P@ zmTt+gj@v@xCYW%9Z+QOGF^@p5TM*k#_TP4Wht+FVQ?NO;c2h01E+~IXb(|egz|8Dw zS8PamoHVI`SMhR2NBeA)2p>l@b~vxkl-8}L2lZoF@9Av37dc!A zp4@=#&^X)kcHphrPBEyNsZ_5g_IL(#7qeX6gIJB+SVb?@Kl8IL%Bws6Dca*;>)L7{ zo!n2K*W0V?4QSZs3`e@b&FktB28Tz?71t+z=Z zTvOWpOijULuEnm9(E`83Sn9V}I#EgfqSQ`t18+u(%P%||@{A2`!46ZoIwu;8--UFR zzdA!=+!M`BV)YZM@bSH&1@-VPtHENUMJSp-MH|R^JFg7YMmcw}e(1~ie>@)fV$FoN zQcFaIMeLe4DlM_JrhkOca-kHdv<&-RAbbPB(`~(~V)xAFKRAmg2Pu*jpRC^sKo6Yg zDx3~p9YFh@vG&Gk#t6Tw_5YP-I)7EbcZO#tOyJY=)~QqJxEWXJpGvvIB{{8HJ5{TC z%PyV5|67$usADGOEN)aUyU;wZD4b6w!{~6BJ*b?Be=MP9+tSyOi%0o#b;DS-WDXk? zM|$qqGsy(q8rEqTuF?uej*&rOZ<$LalL0w|JX0p zwR@Bq7mZg;a#i#AKX}|R>fcmVy{k`l>HyzW`2nj)NF1xrwFfLcd3czMBamygQyi5! zIp8(JCTZ%0($fS&yzOx}26&Qa1;_HU!VV6KZJ&Fp%A2Na%#sWw7RZXxk4){i6L79a zUVoGhq7OCmbhPh3v{wAf@&`Gbd;Kk~|Ho4Da10puP75MzkWytKK;q)6UzOta7DsY< z8=lgVPya(Ek$=*z?7X2QR@O7iCi zX}5$3!t(qPUKU7Dqkn+AST4VofpJ{+DGrks6!k$Ii+LdH-?Fs9`jp{stpjEEY-=NG zO*Glpvi*sEFSYMr9mh~rs$D3~gQ^}VfbZ}HMF#RvpD?7z+>!X1Uv*w>|`ekh`_>LVYRMYaLA99#D zzf-dg0r1oQNcTQ3!rE`4Tx8DD^rPe9`hGC^NvOyAI*F)q9_cy;;xK*g@rQ1Y@E&VN z1#}W&EV0qnFAsf77rAaNp7N~<=M@BmDqDxjBvfBUC$)Lrj=2hfpY<*8*(Xf7>^5J% ze9@$6_vm5;v|IBp4?4wm((5<0`btly;VKeqv)^CB*#f^lf!ly*3KGCtBc1U?%hD zYo_Rp*M#tTrbo2+s4w9rvTV$!{Pu&vblR@7UWWi;tL&BKZfN zb2_RB+$_K@e9$J9Eb_hvC4p!FTGzqu=Syi5pq|lt6*GercP^c&vn;|IZ8VBf!yq`y z7>ZPlF5`_+UXKXWGNZMRu)2*7|Kg-HFOjE-SSzFXqJe2cC)yf@$teWox!f#5x-(}TNu4y_Yr5{pHMv^ zQpHJV>?A9^LhpK$pMvG2FnK5PurDkpm@xr@;CtZ$`yl`S{i2JUHJN(m0Y#!7;W6`Y z`@7;>0uR22>XYvVg-bi_g+qXZi9=Ao+!RJ<`(dBEHXq*Uk%uCD zs11@irm!1;6P&l;T|zTeufP#?0%&!L$0Lut)g`sH9EI#}p!hoT?wbi_!4+{vDLmr< zzC1H(OFRNq`{(Z4`<%Lx5b{pCe2!fbJow;s(#Mfwbh**3Amz9kk~f;Ws{h7~a7M%r z>h>{du>>AOKrK=;n|VG!4Hi$+Wh-84 ze7*7@`*ac6{r+sRhUYpIhWP7vq<^8-GS^+iqcl+Zn#QS3Ek_W_RzG{Kx47e=`13>p zph33q@sx0(bntpaMC)0qKC%BWZw~0t4lbmsQf_$Ea|42Cdt2^k5i^2APg)Cx$8Kj% zm^k7x!ldW9et|AL~ z5e4W!@v=tnH-8+gYs|3n8iP~Z_bD+z9nvFbc}{)u_SZRueOhCJt!zi|Zj8h@+EJ9b zpYsc<0$#~HE~o)VBxk_n#URQ5)8YHhD_UWvosnBAYt)yNqb@V*W^aab;np;30mta) z&0mky-0HaB3>HOi)&qy@L%}SF@!(*H?8r`|{nB`@ieo~9e{E#NqM0}18q-HGu%U}F z^2T!8G;Wz;2YddB%wziH686tKqyQph2~+(@mxqG49O@WXN$o%~EESA6gK#08ZnfKO z#vXcxE{zDQi3>3|f9S*J04^cmP^-8-_-sq5`U>;oF+K+j4lJ3=O?=`)QdD0Q$r84T zn_NcV(2&NP$&@_n08$gwg9!jIFl1vq9RL6T00032pOQpZ&PtwVdDu5Y_E9PoOZqG` zNh;t_kM8C_Lw)Q{3MG~Ro_j?n;VKf);4ukhq3KnC5$pk(_S`7>9Qs_shLaF}%t<44 zwhJ@364p9GMC)wVB{Ur=q~+q_Jg?tYT-k_m;%BjmRsik+N>S1Xcq=JC^reY-NM4u$ z7h`h3`6SE!=2bT`ZZhz>Ona_%*j%(z+*`GPqv=fcvy_Z7a#Bv)-&!AHd{&i$FNFrCey%Ys* zv{r#9Pj#xf)v3#3vEJ;#C>x^bp=s4XJ%__uzcb*83S@8NXo-Ctei(}f8->RxWb`^j zf}BF{@#VOu@N1Y6J!2Uc$buke3A+=v-RM1GH0}>`98zS4ZeI*o1#b z-Idem7KSXt$WsBI&r=tBQi7F(F^K?`QX4nnGBlbLhf;NiwKHqNW zGZzjC5%fC&S((oec5HH(1X1zsM!W_=nQG>FS&Ya^O)x{yD=fUZq5>j!!So z?w>7RIV($ff$wjd9ZPAWPtnTmNP+KW{D=zRU7x3?$ycCdm=zyIa}3`N87E(0$>i8D zkV|VaUN{?EiNST||BLcI6UNBQ@`y|l2Ysa*7G1?lLzX`?cTtERlOO{-_Oggd2oK)c zk#Ch)4fw2S^Ck)v68gM`B&!>Vvej~X^YG(9!F$=GDm_=1cW6py+eMCKtZ-;~iS0+v zTw)PW#*DcQP|SRo1!Dm~)qFE`r|pHd6G#>_DXnP5q0y)HGASGmpl~?Lms@*y)Ueh0 ztDDwPKUFXn7jgU1>qio>wHk^zF=xsb8WWL#pUaiE3}D+LWW|SG%dsn(XALf!S}dnl*k-H!0$7CyI?&S!-WOsFKgvRpk>VeCCPCpLbB;wXy*zGL0}XOx0Uzs`x>QQD}4O4e;zrrZW{evYMBpu_jd17i9(w`S?dV z@M{`QrhdEgdA{oY4BjCwN1gGHfdu|aDCSYF^uaXouvMyK1{Hz>M!u$qa}DV- z+3iAs*#-mrar+)*dXy-xe4))X-p2oyrJR=L`YNT2RUTzCQ@o92@=LlC7{L=Ze>ss+ zO;VE9R}8?}{{hZXHiatF$S-d4h9PASgtCMv1XLYZ)kB~$TBG(H5}}XA)S&*63&y?? zxGRP5CQY%q9wZ|0nU&bDB}N5w$O7Vwk3=rA}3xIDQv-Ct@J$=Tj-TsU7i}Xg`EglFcx#pKlH1M zi^UB#nQFH(B5mNkFQNS@ze{Q3qGkPGtR_8$0z%g=-glnmy`um}x2KKS)z_~6W|?=VZX(ni6MQ<9|XG}8zvC+0T^$t1hVdDW>e0VXqu0*0g3aJB0l91|n1CzVaN=}(L*X9NE5kxt%moAC7Gm#U~{lvJo_p9+2V^({r_P!Oj<>bA63>|DQ-E zO6_~o^&7X!@kbdl7>4{DnlR6POCK2!B}9?Ra!zksOv3{+q^ojS?2Ao_osGLY@%Lir zvqmCeuXYGl4WO}{W^3^ZjPm9oQXrmm25p8Y(fTrCh6y}$9jl{$){4B!l~41*#)0RL z;i9-&Q`KKqv%$jpe563T1ZA_*+Q#mko&6MDIfVItBhWnKMKM0og|ln#Db}?#=ON(6 ztYwCo9=J@ojQ~f)HWXVa#pHR?9o7Kum(uMDgAgAC2x8;Tsw$5+dOEk3hM!2{dWt=b6|1d$J zG5H`5TWs9?>TTt}XZLPdF*Ce@IQH9*VP;45U2t||?0>Yj&tGqB)aol4NEZ=A5z+Sv%r97uyR)yV$syeZAw*(GU$2LP6rUsHhY@d275-HuK2!fx-h7F(Ao2{ym&9u|pAF4u3-#??-vQ#o$9Mw6PyL@nEiYC8{LVSKEByXsj&U@st2x~NNd-l~q335tXiu|DMo+IV$r6$2sSKBh)T%w3hezIO5(*t&V`J>NtYob;+Wv5Adfc6z|pO4SaThhCQNr_WWF<+!p z6h~iBkUK$6O%DsI!SjqHh?hxi(dOYd<=^PjIRO+UsdUwmlg@DwehMgnajwdEX0Bu) z?T_9g+`!mw(gob>=e_1QmQ8&h%jLVUQZfx|(NG5=o>dyvdfRQ} zmj@1ZesHQQp`j)VnYG9(-mWjpXBI#KfPOVG)X?20wu`tOeVd~L7LwuYBbO(KO|p)e zyRaugNa-2^7kT;_F1D+#G_xp|he=?hM%3MGeSpF`wKknCUk?NHet3tKJUGIYYMGhC z%M32U@(qcNhk47~Yt5J_=)WTy;+9QT-OtYU!KKQ1wuyxHZP|Gh+SWPK)0=_XE5)t% zmR+*0$V~z1v9HFm!MCU;yS_Ez_twV;C+r5@MS$eu{4Xj;_bJdE#`sO9dpd&%`@zxhuFO9Jx}_lRs>}4O&-TC1h+2!g5CKew;ZRa zKlpB^@<+5<(qS6O}V06?*jz{gmywvDxtX%_xmTR}(n_iV9;B|64t{03fr z>3N57v8QBXfbPCStJD####$iHtd-Vw6y6+!+#6{5sp1XxGlYYRKBBVv))% z@}IN>3_rTu$JJ}QTN>J(^T_566cd7RMNe1)CO8vA4JhJn?{NIK%^m@_;E7R2WCG56 z=;tHBncj%Nybze)-7p+WRfu23u|W(ZC-(HBRU0^>2Bgp{4f6sp!aMfPa?xfOa~Nn~ z`@CDRYs}$6H`_SX!Uq(kCv-vTR+^%aStNrw(5*zo_fMiJ*S?guyv&a@5AZ~3}?-8=*w#EGz}g-v)Bl!*zB2W zAoP!{AksD&5EE@qJ6MeP7?s1xG91;1W;9d-(}Y|5j5$M)%J9)>GE)cdOuy|z=ahe?)D_Dq+Bm0zf;sXaKlDsK zl?Eia4&6~t(rR_^pDm<`?VP%d1nN%`Z>lr2obZXtJkyi+r_30wCawcxUE-Y`3M9HI zH(`+C-L;Ljd=Gcapb5+`_co2o0@F(Z4KyTdD9zyqQaPyu>2m**>xARXhMYX(dFQ(h zv|{RfDU)$F`yMcJQ}qax(8m#c{i)c|B!mJj{3TPE!Ju2h&j^8n)XokZ6MRWbC>%|A zVd?tv&^sFM)98_BE1R^&YxQ!R&IrP!G;WlCcR#CN@riuupHiW}GJ(1LDD={bjlZv} zDq*ZF?&M}Mvi;~b_Zc4~sWj3a2TwpLgc*tFG!?oQws^_IZ0N=0E8A7TD2+#D_Fr@UK_>PuU zK_?o>E1hiM_HfHYTb0ks1@GJm4jby+=6RhMFOUu)O%X|l90Q-uY{&C#{0r}q1pw)y zJvwExAC_7&LlZULz$Y=cnCQb;`XWH(LxR0MBSHP;v^S#XH8;_xUk6PQEpw05{eBav zcT&euP?3l%Z9Dq68{m&B0G$WEjkr*>o50^wVmCSd&2QZFLbOBavpS}~U0#mk+*F?- z!EU0!?B(dR*anQnMy>#F)Se$`e8RuhNXuzVdGV-(m&5eWS*FG5;)V{@qX_%IBLmt! z)5}PIbMjTPNvASmr}GhqS$yPjrLy71+^UDoo2F$XTJ={W!{C8w7p7kWS(9a{Sy%ms zws^@z&yj0LouR#W(GXMO#L>z8k~$+$DS#nD@+5=i^iwZ-{}&F18T5v@8CPL_#sA$R z9!qRDh!X^A?0`xQdez6=T(>4Ak)6)H+D$hE0^M|uh?Gz?03)o9?Gl$(MkX6*K+nUM zF;s;5KoF)DK1);5Mgp#>pWO!|t7+Ayu>Mv5pcVI@*`H}foPx*D{UEr?186fNx4V#^ z(I;7rK`4xZ;`+yuJzp6@e1J&BCzz(6)0(NSSj31Vlys0u3td~zbQ!~2&uPgt{>&Qa z)$7%?ZD^XwFKo0&9L*3^nOrUc2P`^A$pc_WIAB%7_ic0Ss*%66icB>Umtm`fm{~h>!3P;*-^a^>unmdt0_nwD8vH-PoPEF~58z=ozh%a*lnE{yu=1vd}dl z<@X|Y^8s!AAjfu)mw2QWqb9}HqeuN9K>*42$*@`^M%?Kav#1=GuM5At-&^wcrH7iZ zn$bO@iSy}>Q(|F!WE@ZHqRx>M`mYk_AYyt&=byb6VQsYTVce40j1?OCi{)V^FTd6V7wmG2BX`!{h^IHE`SZZj__TscG&0(IRfgUseu0!ob!-12dA+5<44d8uwSC8%B(Ksn4 zL(>jAF|GZ0t*hVpAOKgu7{GQn}`)6!R{A{ zB>++V7CyJ3V;(oy)WFoe#i*gX-?(Zrhb?lrKCd3|Z96HDbuJO)P_zE>=u*fMQ?vW? zui;)6{l#?IxCV-iMOs5A-fdE33Og@*3uCLxIo+L3@a0$=ac7+xs|^wDEQD?pDguU= zOY4_T&y#Z#zl$5m^|2wv?A9*IS&Xgnxdhh^DQHkRoUH-a^HoJA2cfyL6H^G^$XEjf z{YO;QqGbZu?XQ~UMi@VW)hI$rE^12q402bxJjv1_`9MrH6KvQ?Z`;a(B2SZF$7*%< zR31t_=0Q$nu;Qg5Op*AM%x>CtUw z@s+(~5>>#zoML_*jjYDSTx^JrB)dD)KSU@RyrY`#E}!|e(qIer<>DK&==JY^#&Ql} zFMV>%6zKEs`5MwXNr0iCVFgaPP=qU45t}|)hlCNt;(<^`xtCNFXXc~;kCeOD@P4a~ zs<+o~cu*BB=|}X@sx%QZ5kP1VP4-J&vVVp+O~w}qk$f^CmT#Ytn;xBo+uN~S6rs|8v z6D5-ho*{7Y7xTo1JnK)gTtQ)YjM*pYVlhw2$hP=|?vGIXZ!r7#?qYyT`#M=iX~Iu0 z&IZ{=J-u&d;IQ%G=43t#lZNKW2$`L-ym|uh+sU*tXZj*G^}B)nzp0IYf?DV~m&ub8 z){F&~2KJnQ_Su00pZziuc)41E4OB@;PMQ|QqM68qT|~osLXZ$^4j!+lwv_0Vp#)yF zgwKtRR? zQZiiCt6f*c!(9+gR*9>i8xceSd(F;1<24{FsgnV@$r6Dq$v;{M&ENJCK*REU{3>!Q z`8$-ce*oIz9KEGJiqoG^y)=Q0aE+}QzmKsg{o<^`2PPp4GLbVw!>h@+y=zA<(d=v+ zf3Tqml=yfk`15SWEYRThnK0&qi;@c9`)*=uR5#_)*O}b`ZktQ(w#8(^ib284a!zW! z5$Pq86>4ivYx-yutZYFcv|jyBT7u--srSn5XY;Zlix(C?NqQi}Xu7#nAfZ2P&n&hz zU1;3DZ&72fGn9QW;KjnZ;L^OKqHURz#MeteR*o>&?Oxi8F;@O+WK-0tdu2cWs3>$r zbS%S0A2mFmY7eGPT!B4`@T-SuTXMqV-t7K>v!F<{pAuR^y7r}5@ue7axg@&llw4S6 z#e+Dj`~MASD-*?Tb!W{Br#CttS~SBh$+P-%U+A-Z5QT(3G)m4>na2ar()2oC0GU)& z4S+NLKsA8akXRhcQu-<}!<0;d)V6a4mc8XP>3)p}x&eRaA`WGAajDn3 zdZ*qhfRWZhg$R2YSioHPmRq#H8-9pk3|p0@3?)F@>1V+WCjXD^>W2;4=&v-cO&LWl z8btWmio?vWu{0#vQjAmZ0kaqZy{s%G&OmlfA@I~FqUf6dJOXH>@f$v?^52BrkkcY6@H2WAZU8D;SyuMq2K6#V*%%QMA%^O$c4)^MD`W z8p<<2WOGbd6VcFBDS!ql*3gIfV~%NEt9eM|A~C&8rIKKU(i zI=i8x4`YM+0I+-eblv_Z!?8gbk^-nM++mw+1vVEtFL_Gkj0xSI8mCL2f~IplWdQ=( z**)2f+2~xV$fxkGW6HsmnMXg5rA!8#g?>w4Kx>FGfO=higb@O(-1m%!Th9PIDDNIDr(E1E+mK?6eX8+se zpmut@mew%D1B;spZM;V(D4+Htve%IsR`t{`d1Cnnz2O|#$HxN}^Wu3VQb;<#e4+cV z8tLuB`wCQvDvoN$Dm@Dxjy7KM362AQ!IlhXT!R>PzF{w|V#;Rqa`nWOR;Trz=={9f zFGhr)$fz{uQU2hZMG^U4FuxJ8d-k?DugL1dt0JHUzy$7I5d508KbEj<&KLd|sHFyN z)>`njI=DJX-s7U!D(SGGS(__3)KvDEs=t*PRs)E^RGn)G0gF_zBu~JW@%4-Qg6Ked zw5e{!?>F5*n4wDLQ|Q9ma>?(dYUdtqy|X4sITI9Id=e0rN03;Pngh2#m|QLRxwi<{ z&twe7R~VOyb@+bbQyhoF+K+Hmob#eHkh4~E)+{Qx#>d&hFTG)^p6HINv)tKrEg~`V zd)21%Rv(573d!3Z-9ZFBY97-|v)gu&IhxRaRXX)Eq|0!KHG$@Am!8aTBV-Uk=(}z0 zuMZiX=+zN|e~p*$MPRpS3vCL(X_VlB^4UndkFhYzV;pj>`^s<~-=+cTN$}lTaUgKn zhI|zY4Si$JRS!E5V@KesTla?==#3Ci(J`-2ygsEtu*N264?TQp)@i#^X!QZJIr3#> zN%9G;gfKK`p)gfS0u~uA?>BV;e&aweFl1w_MF0Q*0003&o0l}C!8b_94Bpi9E|9Y{ zqz4|U>w!V}93i)H2EnS^sq0&UGcF2DjC8{+%9Jm?xA+^pZGTel!t%h$wRki`2DT3w zJ%oKKcm`R8z4o8LyHiLfTPLvyV>qs+VJ&P@Le(@?M>S{0tg38@+3I-8-=2p3N+rC0 z>h9XAk>~~u7$nQpoq6|N1X=0_DQ0)ohevsm0L`Z?kKLuwRP@wScu}8GsdIsD@amr0 z<$`pAC+ZIurPwoiJ|<xLTO>|_VXuGmbyvZ>8IRzb2uF=HOL+4H^mXN zsh+s(8!6IOPa&o=@433@np;}C7h_8`{mkmImiG$0wEr8w8$-Lf$BKMay6et1y?9N| z>^m60lAk4^&r8_)Ry>)GSq{+P-9a9Nz6!rl^Jquu7B@OGU~Ex;7a*UJ8K5wJsS2GIVMq7W5GEX$VGXmtG8G`jN(D1}EEoK?(1TyI~_zj_=YZ`5Y zfTl}XEKs9c^+E)@68tTOGeme(B`^B5H;rGNJ?DPe0C&Gtc81NZ0$5~Od5dIEk@ub{ z&|>X&X57CDTQB7!hLtP{HN0iVzT46nfetM%_&Z((tzUJLAWIC2FNK?G`Yiwovmno= z1pkn+*gPb>*~9R*;>e)bo)SaH zewlg!r9gJ;D(Ts=%H_Wmph$r933wD`J6fi|6FL!!MNv;K;(66IGZKT66I}nF<`!_w zW2EI?aC|4{pzboW?}|<$m_{EVN?$&h$cbSJf7Cql?AtYtF6V;9!r@n$AAvV4A{-58 zQ+3%eLZA>-EvQt$BSTYlFej&2_$|l{_2gvHFephPDUGw8NYc1PKMcJ<(A_bl?RLjr zBl#>p38yt`JpTdua2CtZWq}4VH;hC~U5Fz^$<9sULi$$wAUW81)+z+G%Ch;(qd&yzm<-I0C{Nfce1tpRys3G0Vz@VHZ!x$k@P+jMtwh{3cL}n%G@f z)!N_wrAuJ`GqQSBfri|H10?_Dn2-cV`Jt|o^4-Zu(Htg9trVo36b#o(vhhZ|9ZFwk zL9V%f=AY(eo>l)O)E$#+0Zj81JaigZr*-w~A0*fti|2+L0z3hJQ$t?%J;l6dcuz<6 z$>r^cF7^uW;3OXVpZ>OQ2Epu*RoQ^>5>80u>3`bN<&%87Rax>Y@^zBk33W3=Wv7h$ z5evg^CnB%ay?7AP!<`}1-HD2@51VvS$HCZw+*-GnbkH_7VXk9)ypV%!5oIu(-g9+x z_vJpEZlN;rB!=jke%L~D$iGqff!iC0RESPNfCCV>@PxrZ0GBaPMAk3l3FPvJiDX_T z0JfEz*;Z(sNE24f3A^fl;A;wly-&jc1=hz$zOO`tVh1f7=h>gb9IqI~A*V}h12Oj$ z#)$f8AoLTykbDB1z(!(yKk=N45$1_8>5&jKP;@I@&w5>J0v0I_v(ekC%x6JSJFZPlIoG)VC!WnVpYU#*=f_clg{$ZYIu%sZ=yyGTUpjWshLK4- zXe6kohZu)To<@Vz?F*`GW#s{2cSzVYfYxQr5eC(=>M|JOaM5Y5&>|BWeN2r(Q!DBV zgY@XW4)-28AUwPy?p-SiRWI5bbs%JVk-Q$yS;IYcAn|Y8n_gRO8Q@Mh7FC|H(G5y$ zV^J8IAUR*c16h_q-M0d*389|Ik1hx3G0t6O4y@7 z+BVEs4#xE)>PcAS_xXaDjV7jlKRM|(R02J3ii{FvwZ{&U@$_}M!nM4N(mgdfpRwhc+1`UAt9ypDu-Ow<0=fm9Jz95ju*ucI(1L9 zO5gcqKp3v%fF*!Q*ywKO6&&~4nd{WRop6^MM4>4SrZyN19xFmLtYV*u_VBq=aUL4} zC$T#0HeYwnHwdbKI8!7c=p&Ur9GcTWIcZ=Z}dN~?cS|*n5rgCHhJe5 z)mQY@t%-eXYui)~D=(W~^c~O4eE%P=xbj30=~tYY`%e$7JHbGDe=TEEtbDR0r==to(=30Q^)}$lk}P3$SE$jX{M^4vtgEchuOCnlSem*aTPB zaw!T;od3HYf-M8q{owY_Zgp%rPc9{G#*_N9uKGN;Mmz2@ebq@ic$+Ms#RibZG$Omu z?^&qw$(LDUe;0vF-0a*=uZikWf3AYc2yLb;C|cV$RLio=;#XmKVso!h3|t!7GgjM; z;G`Z&+nCci;^}7-f>E032@-gY+Yd-FADsPG9#FC94ah`OBY zd_ihGIWf>KGtuEBs;W5wLtC?gJ;RZaF7T57LOm~x0e1?I%j%1n2C63P=bhBfVchS= z!jf&v(A{M#a=KL1yLVslJ<^@ zHkfiLF$b@Us7OLkzG*9YR$s%eC;D#OcCkCNS2Mj14DudY)IEfn8NIQEq*Wm@d4b%p zcjhp!!8);%;u0K_X@4MzdAo1s1#LARS`d@2G_+Z876)cUXF-ETyT4kf5RWu32YCIV zGeZl>;-`#!u}Y1;3d;k{)7$)hp_5|H;GLTR4u;r?alkll;3vP;DG-dvD|}|%Z#(LK zcUQ}s(}OR%%d45@szd}SJ~brXWkiM7nhb@}!|fR{sM1Ph2K($NaMt=oab+^RtwP-I z`%C*Ss)}CHE*dYmLf(dTf`+HH<+B-s!_VjQa;wdZI{&)nynL&jraw2LMA0Ry`r3mb zw>`>E))!dtu_)R>d-Jl`mSnE2&7123^$@LUU!eJsx}MTD6KV!jt7DxFfDe$mq{fr^ zv500W{k74RgOTqUSc=3X8dmGd?}}Ko`K?YwJxJ`cHBC6LxPHwBdar!XTi=4vEcVTA zW~^rA(gdR1;c<;j069!^)$B6C_SN>hhT;Ck{Ssqq0xqx8V-1apEqRuznVU7TP8mD8 zTm6{GT!eQuzQj^J8;ATZW%t zUCe(xLDjf1^Q->VhBRD(a7}#1Qo}lfcP55{d$#=0fw|oM;yOLJ`O<1N6KX;qG;$8p zmgdI3Pz(^X78XK)0@k+S5ec5O9GgIoQQ?a;hJ^drNbN{h>`vf{p5%K>4BX+`oT$yU zdeG)K1)9ahE024>9Unt)2O0(;xFN`Ac%?50W)&pdStOE!!!K6PIVD>&1qe(#&>U6M zj$Vfc)ym4;IY0oK+jsBcXMTo?0}e&H53mI#3koIVPny+t%?M!LK`ZsfjTNFtfqgzZ zjz+rJ;s2ZJtPeDMkCF-#gXa1CnD7FguZ!5sg67()Q;}57lX5nM>Q-qc{xAD7FS7Do zIUqeSn!!&{x%~lKo)-BnP8g`T&EWjL{26NpOG^w30#3U#w&b?TPcOj5{n?RZYxHjW z%=M2elI^>!84#2}6Tq%0Lc(h9cfJJTBaeQz!>o9zcMq`^^9!GnVr+T*!XCPw45Lv< z8vLPfons$ni{I)Z_#}fxzo@1R z5x4PPB-U8ceCsPRw*DuAKM)9ty$x-QJyBxwjN`{Mt@7FBtlP&G?PVJmW z8?_9Qg~a)b`8IWpa;|#nWr*6&FOGxbW6?y+k}yFK)Ddcm&4e+03Q*8UN+yJuJIa83 zlmPwz_0KO40a@|b7jw>ngR!EhYqbRDzAHzrD>=0qxI#FZoF&nrtyrN>K%^&);rgfI z=a5IH6QH^M|C|2Eko7GKUlB~ds?VC?Ax8tt<;Idy06^l7KPUoEdfO6V`(cMuh=OGAl-9~9G|<8?Wk zhP5&3R@OrM795{bz_r@(4L#x6HUgQk9(C+Gt8^)>0@iOKK;tSA^!$1u3?Y;q-rUbW zx3c&WIYMc0o&&|yy~3$L?OF)V+i_y4t0qN+XP`{52M*$*R)tNY4s4UbYI|YH_k3W! zAeTpAfNxxDwSsF~)j|QQ-dPn-MwqXoZSNBj!iikI({h-nc+=`P1cLg)v4}}IhaJZK zwlz7y!S5K7@nYhiw3nFkr_)7zq)X}*!)-<-`m`10M}PrqO^#A}PYrepDJa`8m-@aV z(Jd{KDUMn2yn^5jMmhCP61WUi#L+bdU3nxeaa{?LTIUQ^R`QxeJy*`(?5D;Q4qC2v zL}V@S?as~t4mSoh^OUMKz+GYtWA?=2jJ@UuBi8qSqO~&N*aBmL(^J4(B>z)7oOb3H zQ>$Fb^gT?L9~nAq#+S2lj?m#YkE~{TSb&aGXa^&ygVnUb^s+z{AJHk;c@%Q5f z{!(j0Eh87+f76^9#u6@^5bhd2=A-JLeRi3lU={}S`SK;Yjk$tz6-?L~C*PKKSv#}Z zrx`3SBzmVR+q@$dzQkWNB-|XMq&b~$Wp7uDcr)cSUM8Eh#JS$=+TNn}HU~r)2yuOl zx7^Y%S~B*EEAj-=&BL}}F%AE2O*ukfvcysbVU754#2GWwX3j*%0VP3WZn7i}w zc1s98a}2or-!104p&suF5uZX^MC|1-A{^E;8D<#rS(;H#QcSvQZB(W#$9V%Hec5QvVfEfV8#RGt>TEbVt6P>u|nnKe_Cnq_2r_s%kb%rNO3F+8n9@P z-L#S|5;pUHKDN~jB153;8ZFU~w$)(g)Q*8IHJoFNwoA)dnjR5E6P(BprOAXRrj&Vz zTynEKA1(=&n+ranmfwa<5OGRG9-?)^)Gt(i-7L*4<$Cn>X1=&lTo=l@k}zZ)Mt#DJ zCC|NBGARUw?y2O})eZ7#MlbxK8S{X*<;ickO@}(hm*Ohl4GulG<~uEbN}>?rxt!0I z!qHQ@=X?NEK&!t$x=oLo>HQ*-JlW-_&CUyrr*^x_tzln12^bD0RLwiYM$Uj1%;+l% zOc7d?IaAwUt3@eU_0U?YuHmm}tuzsw@|xN8yo}}Bx?rgeDQ0jO5Tr}=LmKEu-U5;n-39hsclKr9MN|5+KjHO=Go=?=xTr=|I@P6;8ld-~d%LTrlbJpWt^+(!%(85HoS)kc2wb4=?IST~%e zTou1EINlK%#*!7S=_=}?_HKY=(c~ckSH5+-fyFFZ8C-m4tDTDGB#-KfR2Ad;A(hJc z;%NXZ3re+VBA20Sz)0xk2s9OHLju8fPl&IKVbHVg@~&m_+R9sftv8}@7Dw9DQ)wyp z)?Om9<8z_4BN@^57|$1zuGU0$)UpS{MbF6vKQk@K}0*4 zq-#*@FWDZ|O#Ze*H(y=xAx_CPbVV_g#?(T8o9R83T0#Mh(nKx%%SIo29k{HI;B;a*%A6@n@T++s_Y!bYh|&lHi(!S^xV#5;6Uv1K zrU%vud)oAUK1mr|;#ZJzzWoKl!dfH90RhlGKvX{3FZ^%fdFLF1tzjA@34ngv{V=(G zE0AlGq3j4TF!ZfD2b)`z!&>x2BLD2Eoc9zY%B&Y2&o0kF97D0Ufv@<7@T^D~Yo{kn*U;`)8| zA5+g!q--`kO(RByy=NP?WL7b$rsfg`fPKRj zCbwRQfM9M%2rT7gLY1JArix;2AE1loC0086eEwuUUia|SZW_F$_$I$iqmR8spJ399r~d41giy9ST^!P zhbS@8F~}g?eiSYD&c{0e1!wG#r|O3&xp!T7fZp8yPc20_(*yvtQEjvh4NSZ#XLLiy zu-b0=y-`0iM!2h{%)bYA1)9i}OwZV`F7zQk+x}mh|c~lNet&W~H`+d$;AeNla zKf~SIt4CRM6G&3rpaJ$8=i|nXAKzr+6T&TuUQ1*Ho9-jIW~6G|mZnQB-RORCJlzHK zTIEa)3e?x=R(>6*h)xp-IUUr(xeUOy#P{7ugR`_{%6M>xkqzqLoCk=*#*e zl}^g@HD8G=aHy-~&r`TH#-g%MIsSv$T_>@T?5d2&FbB4H3Ag~ZkPtZYBI(IvQ2<|TO zzmuI|F8$btQ;2qS)tooT)%@i$W>`VmEFXY^srjlV>9E6-JT*3BO*kQAw%<*o?lrlH z-;_nHkArZ~4;1CyB6PO^hp^ck`P0vNvLYNifRIdi&4;(`UNnXNJLyREwnW_&W`PKNBm;HDa6-hnD+&4>l_9;B3sAj0kR)|qCUq%O z9*I&AlsCr3oL{s4$K;M z$=nB!0`12CSeH=wXeB1WZw=_(+#>D{j|C>K0(>Tl);&V#(HwowF#Up+D!I2dttD#Z zVqmXMbKcn%<>=+(i+kGt#}7{pdOECZWi1N}(Z4IBm#63BiwrANC8f}&ARZ9y^zShq z)(*Z=*s5=syaqz>H6pGwawG-YbO$$iPl*J$wF_z6HdCK24WoUsVHnl+wBZ`np#Tg` z=ktq2{g#>+iJV%%#3@{uWc${+2*JU9)2sGAQhwaNJ~NUGW~@*AqUxos6k4y=gYmu% z0!I3`?OyHBSaab^rWgco>}pH~^%Dk)@)AHQsC*&(rq9WWF3(r>4V&hS)`pEkn39N_>OG8w=`{LfW z38D(3j~L{9D@DC-JqqFMRpU&1D#DmM#Q^-=Q)EygIihkp-&*qJz%De;i6e8|cdDdz zma;;0SUG-xfo}2(Tam+q7$Vn}v#u#|Mj3RCao*x5Bi5BG(gGGc=J*6u2?e}gq4{rb zOMDE}Pe+&(Y4Js zfQU~KJK_+AN`0T}tIa|{dvJKpOZMGv#zcS-13h7o5z3zBL=2@H%e{_;h4rt7jN;m4jpamJLJ z%~P{`(JqV*N585Wim%<&K$MUlqv5nCKPa7M)}8W@0Ow!z^A$}TClIiy6dk|`_E%Y$ z!*4?`>3ZS} z(1Y6GiE<#sKV5y?lV&q;78PWb>O|#v^+PGbK!4tr-PC9)$REZJqzjbPHdu3XZ1?mn z5F8S3pOQ^CKH4pVM3^Y|rc6|9i5XK zFzm&t=oe)Kq6?8p21&>~<8l=jJ%~NIxP3U1Dx6|fm|d#Oj|u_|efr&>tzFAo9vHfl z#NmzckW(gP3_j@zv8zGfTJq3J5$&^BP=3= zA|CM%uN;5CJm(1F)on+~>?J9m-xAO#(&R=q(wvUYS-tY>~VTpn#8WxSM}*ai4w{KQn?;6Ekcd4-el^aV%svf$cjwaFkWwnO z(pzIPs!H14n}~u2VSjT)Iy_Qallx99)DTXcA_juz23V#B!V7Gkpdt08I?tOv*WdW4 z_cdKlFGc0L-;cp#Q~Ia$WHoyfC}_%bDIAlkBACT0-B!wmJ%83?O`B$W7Ia z&()&_SeqpbGH^W6B{PKDXpg-((@wl*?}Z0eZiYkRepgJS?ljU{2WZDAUmkgr_KV84 zk}5tw;?KITQ^^|C5CS{~H_uHZ=d_BINgXrhc;s{gp$hhnq{ejXtzW>P|w zRMNnUbW#L<;Z%0n4>9E(Qb1yQ$&@swBi*VqElV#ufVWXY?Je3!j2$m-pJW8I>mh77 zVLx=R0(eg4hZ1s$yT(5UFQYg-miSAfaOobN(FDi|Ad+W3)T&J7aTmOM6k-F%bxP9qEg&GvT2lz**EHWaSTvM2xFD7wn zgs^d`5VddB+R^^T8JSk06>t6$BLq_I@`~O z6NH8VsNxUDNN#cTvsXiH{p$p>-W*I~;w@7I5;FFLeLIy#ULl%xss} zCFICHY=v!)nd<6)su)+DmOFlsT5V;5J|!@le5*lw#+kJr-%Fi!qoH_ve7!$_ZSi1= zUwG!T$YRMKwz{debuJIE~iMs@^aa zai;sUIS-v(5u|Rr7skq_9ANek%7`qo?U<Olh zeB>9`T1^1GKp%Kzgk8t-M^}H+pgWSn+M^i<_ZYrMCL>G`eINYtLD{^Z#HKz;85c;=M=<v-acw0}RX2XQoC)Kh_YyCL6+;P3Rd0)*bi z4#J<x^n{-Y&v45fcopN|dH6$i zl~8+!-izX=d2yG)HkuLxnNJ{$Mrnw2IZ~&~fh?!D(V0p2w}c1HBym*7y0RRMuNkAg z@WSwmZq@n2_bYv6t57H~Ug}e-a4OSTqLXkb#_qV78#)cn$^u;uu`+cOR6T{EF@6=` zmY1!ag!gjHFr^&gp-+<`S8xp0L8klv-=|0*);zy^4tW7R51{Iu!7s)NWw&{Y0jmm7 z140LArpyYeMUoE1&N1v#6${{xu8WW2U05-G2borl9TUf7muMFZ4tVxNw7TmSo~?pP z%(Z;}+M2%);%XWo+NA6JUD%E#M=ZAra$d$d&8ZvJr`WZ_?k*8KhIU0+e%)uz-Eh{d zV8;@-=eJCDBVYdoU%R?=ZJE$C%3AdD0hO2K%Iz!)dkK-MqzV0pry+l&7ilKD6~hOjiNH3y!r zg^yAf!Vi9;R?@D6PW}qu-_li%_&YwXq_!!lc7aqT#s)@nOyP1Nq&d%H9`ocjIQ@cV z+$SPW`7SB3Hg10%j%K%js4&y-e$>0(0Cvaqfdh-{FVIw}q@L$4rr@Ty65Z}<<1m__ zc(L0w^ndr-2_Y8~H4~-Elq6gjc4*n}_X@L~z8b8@ic3rW-w1ZI^cXm(nW~mc$Fg7T zz!{KZ6(5BhI}Ips{nB_&)DhDHQ@aM#D+P(ekN zUf(J}BlLVN)3}75>-oxOsSvzg-OA*LVJpIDx=}d;i&ZL(Zz~bq%(y|1I zuza$>pTr!wa?)HmGCw=`Iu@I-NyUKWSZlnR}G}_mhBQs9v%ImJ8hh5(1x^exa~#{Cji9w%q8M(+2k#iden` z$fI`7;9^9Gn}S?Cnls`YaRAr!a2j02BE9@6{4e1FQ!EMG7~K;7ih#D_Mo?{1zfoW~ z)i(OueBawLg|0VoQUy5G5_i$00gFK4ms@U(><6(t9v@VP<6-y!$RE;9U>j(V;s&Hv z$i)5B$5^LTnXZUD8jBJ${>u>{aeGl4e2^H9wAB$l_(;ZC;GLIRdI06NbER4^M5Grc zrui2eNy@|16eq4kLNDTuGsO^iO+@B|mg0FUb{a9+2L9oZnc__jb}`1}r94$4hRUwF|78WUG5u zk@pu2tfa%NfHcH0jVMWkG$&fE4x9@{2*58)B*;OKS$4c_$FNMQX69&iaxZOj$9}Au zC*ROy>uEStvlnr(rC^K}(WcGt8d`~ix<&2*E{RLPFE#YDUqQ>S)C0)A0{Hsv`wx^EokYfT;@OsJXJcQi&dB1=*x zMv$wJjHgPS<6`wG3e6+|55)C)@sguvuNuQ0hB1H6(Y?+9y{NnP0dqvVQ8vl z9?iv4HrhkS?phK(FLOa-)4bRO;7hS|!(_Zaz7!V`vJ`~yQbAUMmzs)u%?bB~~ z`|E73BC%LK52us>8=Nwlx(I&=37B7GjnR|x8*<2v=`C=X}GIGN<1jx$y2)C^+r0ZcqKY>!VXjpvL2UcLFkgt&%kR z53AW@k9X@u{!+Hn8NKZDj+!?=r<=OD0k_p2*#9x1Vo+#aXNk)N({*nW&$bx+5ZxLf zzh?RIWr= zZi9BNC_nNuvEzj(n9ygYFl@vx%wv9oMVUqSW)op}ZwvSnGsbe;Z*o5>@?!q%zZEHk zZaCj4s>emn^7$NUKstIV^#?2A{$lN!;_S{qn2GLa>ukBDFAw=h0D8lbk{<>Eg8#=| zTx4+6r?UYde3zB;8e&#bg*HnclArYRD_G>`8Yp+|2&LCs>O0}oFg*o09{>jI_FJ2g zYc2%UZG(gjGRf)xrr`1xLyE&=C6(}^ByE@8a=6PWzc6iAj?}q|*$vlfLEqv(xn2qG zMZ~KrTv(Uv8X1QT1qsfZ>NWgHpt0snHzjiea9lnfvh#eP`jVaJtuMZ?fc%Mx#@}{z zt`*dVVd~_w8>sfKspLgQ(q9!AwYvPI3SqZH^m}e5afTgbLJPg3YhUQ-vK16EwIRRm zD03*_fKbz&7Laz^r54Ss83r_k=D8D=HWG0L0*=NNDSsg*pX)-OFc|ME^$NG}bFsi#;M{b18h5;34?BiT^b%wHr>^RQ%>O8g(HyKTSY? zk9Xgw1emO;&2k>!1nNN4ixAw4up0SJrzNYU)lSL%1DcXW5~lIu1%Uj1$JT z!7)s}E`Dh|=k3!S%x)%w-rf8;Jwd=oC4y7S+~B05G0Y}GBLJ$8tP#fBPw?yy7Pz@a zJnV)?gH*IWDCayRL$qT2BmB5js+VOQ+?T-RFp zuN3w0?R+^Qk->es4wn-DH5Hw79k42YUe|!Dk+s$yHCy9bToz5WF4jFI#m)kbRKfU8 zoELcXZ|K`7gJj7q72A)nGWt}PY?5$+?Pf90UtQYbWTtd!h@suu z_>DeG)ffIyH;ov`bo#@H1x@ENAF@{@$Gqj?6N@dcK6l#^TIL6ZVaL_n1$LNCfxGf! z%0-1fHqU)Ibznc9%>m1Jig_lVq2FN`{?$(`ux1OOx>9QsB;wH_UbUrLNFBd}9E#Dj zhUX0i;)zyn_(ro}5jNh4ohP;?F{(_fr0|uUw1RWT!z=2O<%aRob_9Z^g^&I8h(+MI z&6@vo(7-GP-|`kducnm`oEl%S+$g`rXXraZ2?9a&uU6ZK3*lcWno!cUyh(dzJ!#bT zTK1c3KRy-r#EwmEysF^-C+$;7XD~Ha8>jZ*8II$i>@p0@%J7Y7cgZ%=zo_{IV$qNz zV>w6=$l)M5&-SaUF(f#oXZYSNZ*Kmdlbcx;t}iy3jd%%tu$5nTWf`I(Uc?vD-gOI+ zdS@jez3lT=#723`XTx!v8XRx+4z`5`xvB&cIa|Cj;jewSk@VU9GYnz?%L!Vt6q2sq zD(Rz!xCCi}H?0`1l^d!*JySTmjAcDt&Si*%#CK#(E;4=jerHUsPnI9DJZryL;zE0_ zcfdVN^$d3r$I25({gcWC?X{$Sf|^0b?!9u%CZ`onH02Mb$UGRxrx|?awO<$X0AyMdya@mx9jqS@~ zWCC*)V0zoGGe_>OIt7V?JBCu_T4_gJXhk$$q-@j`c7eO=;v$@-SJf#8V7w;DIg>Q2 z03YLf|2<%K#c)D@AON~k4JD02DX5Hg#)=CeX=V?axhGT_wnuxPfkGGbfo-u`;?+6h zAS0Ww>p4H!WME(yArGLQH7DNWQC2*6-t!5phZfj0I(QOb6wKG#DEDKoGHRhE4iA=| zAqHMuTyFjO2)EVF%yDa1B6gCFt{R-E#t!K8I>ifPL0fUDXd{mSuKmGikKw&x2|Kej zI*U`HQ{nNiPZg+7Kn4~GkxHbPVF@tlWOFv<|2wv?)BRfcpT+l%SZ<~pTY^I)+F%S0 zrtSI6|5)=%ws4a;c|pthLaKxOUH;^_t>SH7^zk8^e1_>>VRUnXeVMi3{U^C_3z?O* zk!3k{H9PrsR(BKYC5g?QIFQ<3;uxcPMrG(_E=Bvkd=Y%!PCC3-%O!k{W~*%w*BFK2powb}{%&XLQ|uY?3I{`?9$Z8W@-=ZJ1lzJ$Hc~EW zFWm>E( znMjEL!#Ifv&~iz+3$BtXa2vc^QoZ%rx20LBZAlxrii*$;Gd2h#4XG8}8nmlTWPzs* z6+w53mQpN1NP^*JWG4Y+EQ)deGi1r?il$ZCY9(~=U^N{D$UYAe1* zQj^7Q8}h(&9Q0QQ@IIVj%}n}mC;UY)>r+#zPq$u_tZUWb%XFci)b&#|07nRw`O@;( ze{q5bi+pub{W9Wilc7Ehzw#GB5~OUyGM9*T1lm{`XOQjPtA#DDL7l~RVS()@xcUuR zD;Gbzu;p4fd{8gFs!IdnDQoS{0-*vCgEN7kg6t92(c?JOkHF4CG*NEn+yl{R4^1W9xG5NpiZalS zgaw{#b7AxW{XErcN^=FGnS-W+icmI{G)#uxADz4>#rs0^5C~_ZBqZ}Z zimaltIzB+sdBDvjx2T0D(!OIbFKuGaCD7`}eYoYSaMain+N${xG3x{|Q@|&iHX(TgrUEfm&9qC{XF6S`L9rlI7>%d547fvi- zg@qJDwkmRood}3gUTDM7Efm=6*33e=~wgTJh|fujlll#39o#`nO}^OCw&VSm<`PaEa17 zGZQ48gdhBCZI??JKQ{EA2=!~tS_>bh&38b;v^n$6S-h7bo1_eZ6a~0Sx=${!Po~kf z-f|>8o-b3SHA1jh0*l}~hFZ#V=#^&Ar{d?NUrFX! zfI3O+>jXWJq09+CiS75m+%+R)r4E_fNI%z!X`CAtz(u_Izg%GXjO_JGjp`=|3=qBf z7X@$vtZ42H|BEqi(MDD#pA))LlM~&sLIHg13xf?H3pl&MEp8UAeSM68jH}%sOZ}Ls zxT`IgwgBS!uJBWuc(Vov&wAol|u4+|6Imh3w_D#;UiC`}TZ zmJhkkZ&BAR2Y8+JtrjV66rZ8IXR}A{=(Zftee-nE=~|MY4ow7CfEgd+@+sMOv@*jC zz~+u9CokZr3n+4k&f!r-;kqDJ!3~ z{-0&eyn<@`~N-N7$(e(CCo`H}?11xgR|E*=$AljkzQ- z)r}8dXfAo^TG5F`=4hfrArbk7BY=rB3sA1-i!Nfz(mZVV+JCsucbM6~?uWo8eXyuT zC;AAsKY3npv~1g6b<3h{_YGj%rTC z3F#(u!{WJXsGrMy&=t#R{!!G|e`fRYO-IIH!FV5%umhgLVm%CE_woef?R_dgMLe%5 zE-%G3Dm)3lhMMN@u-pW+?l($5G9qvSCwS!A2Zi%v496qLXVK5AbBQsU3FlHbai3@%vhua2{0)UHz4f} z!HkSmh(wL29F7i#_)>xYlriH`>bd1;VQi6^SiWmnu4N0a8B$}U^5p?%S`T!|b}gQ0 z_zgVqu__GTS!wks{0w>%8~N|G{`z(#6@PaE@l|*TI=PbDk}Msc3SG_|kyTLY)(a%3 z47#8!<4N?9QU4H3_3pZ#Gj;GDT6$l`_(-1E1 zJTApbw0DVYND76pp*W0pJBVUMif-lt<Ri%HlxyoA18QP187{L zi2s7_YH@#0MY_CcE1L*o&(5*i6Z6jBqsc*{;CGC*O7_?dbIFf`avAdMAl%`%-{J3@ z73UnfI((6TQUt(Hm<$KF0u^v{iM;?$VO?Ag0_z%gj_*>UcwjXxPm@7!)A9Sz$zJ`V zA@!may0uO<#Q4*qD?1~TZbBq-l+*vl3 zwyw!C5~I>-Cy?SjL2$4P1R<~7Pz|2Qo(1_l&qG>SLCN>ENmb-;}U?p)PreupNXNaT#y@B&l0=PQ^Pmcz7b-eF}T=aG&OI4RmraxW8JcVr|Rc3KndupbxJ;yeUEX$ zw+l$;YLvBqt2>i{t0;h{n%JO))~R44+Sxs*kUA0?^?3f2G(kFM$gT<#d7h;WXaQ1B zX94*=)krM3EO>eEIvRh3;Fu=Xg+9dcMV2zro?RmViicq>N3z`ccwh)~Hb`DNLfr<9 zrM1cDta#JWj0DpCvgGT{96~p@j}5Z|5D5v{zd@v6mK~hp4IM6+AazgF%(;57`_xEc z>5R<51zxd#+R?yLhzQ5LTnK~KiSPi64oJ9mAnz}0)E94*ix3?8K8r$C;Zq2p-17qF$2{FBGg*mi6`tgFC4eAB+vI9an~J5bau2F2>M581+4HjdnnI9(;@S z<1{l$>dl6Ruk`L0_?wbu#(FNdx2Upa0}kz|^? zUge(^&`DBVr)$-OX)ioC3~J@mBp6^b`@XP!_?08m($gIvvCBMvmE8(uJzH%$KBTVh z8a! zn}HRk?%kc4Sz)?Fi+h_zGWtCY1CzUuQ(%c2aS?;T-}Hr4E#-I9ui~S_EFDGl7=t5& z)O5;Ta_EDwc-4|^YpS_w!zi^76WJB5{h|`ay&vHd1XaxmS)p;knj@NS!oyipZwf2F zMM3z1iRf1NYit1WVon+lnc9>G8mkn*%6zVE3$Ec~GaGn| zOgZ0u0aE44EV!kUtFEt~^;VnIY0)Z zFuOAJiWdgFv=XM|N)Kui~v>}xax+V1NgWV% z1)9wy!Pml4YDi6ZK0Q< zT95*~EvqBWqO_xPpMooA8>7S$D5sQy1o2tN8#VHyCb%jJIqd_<#_pU&PCa74&BETL z!`V}B2N~y)Fshg|rw>Lu)ZwPb{hp90FV-ij3yOQq=2YAqFfe3e>ofoW00001L7%ln z5iFnA@8A7p#9qPQx`m6XQU((1~Y~Y%kxvLad-%|uPF7PwT|}O^JgL3 zK>ap4E=PR<9Gg&^3fSo?YLk3>7Im)n)ZUQTNqPQ*HB6WB!%zhvPOVM-3MriRss1VV zj#it}Dxbp&C>?wPl3jC9 z(AgPE9FyHlRNZYq$f*&JMU5*^Z|Vv}ZvyK9eT7grg3aL-^Xbcn?Iy2l8gZ2EHh%o)ludZpCLYbB#TY4@7dsQY_BAF!1^yu)YHI(SxR zsh}x{4icaX%5ghf?vFuQFFR`Oi_5)5FvScsLc=HDX zH5G^Ue>M5R+}!Oe0UpxktI5Q-86l6M)BbosA_G>5%3@%-14y5rc*Sz!dW&+kj^Wiz zj*_BT6ZCVkKU$clLI?YvOHBy z^2ed6pjH1D^QF;0d(lWWkBo*lX`in(u7}8*G(G{ z+jre`GQgs&{CNSjvd1N)VA`pwU-Ty4B$5T#(=~Hlf+XH~r);poY{MEp%+y7s-S^0X zAC)BGPXQsb>VM~cZdDFK64=nN*ADjVg6S{ICxrZmHNhfz^=`GWYI@Z#Lle)rV1dO0 zZ;{sOGlBd9{;ohsg%KlFVl8@v-#P&U@K`O){1+~(Y&Ye%q32Iw-v*7kk2@&B&wbzQ zKg5t20Khr4)CVsR-)^)Qmx%PVoFFg5H%qi75!$i>4o@=lt@uqIPCFH=>$aNrlL+8V zqsZfb+%W$OkvbB-=&TGrV%nJX{6HR1ZuK9m|1~Q5zv@H-Bnhc5u_dE-ZM+Q=HS)z^Av@5IICX2JKaF)IwPR`(#M zP8Z0A;Jkvx8f>gpht<%_9pw7=A!jTDTQw%Pdg~=CsIQ0GLy)6XjMZtm92SZDy$CqS zsc-?{f#MAyg^$Pkm}N*~5^6u^5$jlu&6d-68n3=Tky=rw5GiPRqOr6PSnQFl?n2i(B+7C;_raK0qIcj19X zAo7?KH_C?-dwMmt6>>^O;HmcGR0eOOR$@^ZSiocseOCR&ezz5dIGb~F z$Vk%fu<%5n#%$&iW0dN+2}W0t zT3vrUy2gQc&Z5rxmIfHi6-9(CSy8!*{ABN(Ef;+B4N-YDJHxEos2X_l&b_Z%5<8vO zJ7twdnU~V1jKAAtp=`SPRi=Degfc_J#iN;tx6CDrjSKYWMhkL+aJEoGM8>&f09aUT zCH&4omus-x&=3h3j=kea28k7$x&a3VO-8Y9L507yRIcnZwXDuxiv> zNHZ;~y`e1PYje;5MUV<2CG>ud8vri~m6ER}(TYu|tvAaT3}gC9|6Hy?^&!L=5Z>8O zaZ)ER7MG$JNCOJ{3FK%xpKMJTXW|D4XacYu7R|arKT}$BXbG&?hdO7g`y)yTEnCoF zh=&A>+-i=ur|`w6H|coXeGkh12=!m@AMEWah;0WG+T=|&^^&6s$-zA1f??ZeatF~f zy(UkC4!K_;iZ!V{Y_VI(Xa1jG;_FdE-fvs~G{?(UZXLW#+AY66D&n+ZPG}uo4 z)Sc4G>Yk4&S7y&QpI_?osf@sJP^{iOa;H392M7A_$RS+sWUF!>cpOz*VbrP_%%yk$ zeuJKy4o?X59g=a~EPOY6;Pux7BWuPYQWm*-m-%+yEiZot_Jsh=Ph{YvE?nm0Ew%4 zIuKN4!3}%6Shgx-ij6NAuPa_=2M92cLl7#Pvm;-2yWeK?ToKG`3B*&3ErtqHzqZ~w zwk%Z_L|76F>k{=s3%gVN!NRsdigqJV$I$C~!2{}IsUN#-#7)Zl_GxXNsvs6GFXwPJ zrN@x!wzU0g?Pn;rZL6VWsAp`#%hk-SAszX5o}8yP?kHc1Xnn&Xpu33cA~J&PICL_x z32LNIt_l6$NgU@mnFLE>p}076%(%>bDjZ^mjR*2M^)N-D=$EZBpcZrTiW84nTTfcM zsbQ4UQap>Ipama`v?}vPlV+0ZSo{Mf6w;Y(&BMLgVOPs!?Gr9FtbNWo%+hx@@q9|5 zGH)oG@Y~7~R`*LWojX4NyquJ{*L5hu)`TKtmyaJ$>#lJ%C#mDde`>WAcL)!=bGE|@ z!&_>KZ3uQFHZ~CuD?YLtVH-_4XqtfaLXS&O<%9xq2F|;TLiz(#E^ecGj_kp|;;hvH z+VkcGXoy+{xsq<}pLa{Y_DblX@QAMn-gpvk+gbN;)=&BZd7};}=pqM& z%MXXuv4jht)>p#rN-SW#Uv@VK$UiQV1wUd)Y$moleC9wI6QDMwx%jedxpbKTkv%s0 zsMzH04L(9I4Ow;(0hpEPtPm!0(ZSSXlSl)$m72EH9sMC}E)Eh8qpt-v_cRh@8T0#! z%*kr7=YopFC(p6%XDs%x!>H*Sk@5q-w2!|IBtb13H%w|#Y=(x)TLC|~x5f1Emmcxz zNvwN(Y9`Ay$My7r9e;Xj__}jT*?1o$G%NZi+y44yZ*~EH9=uiI-h~K^-G!Ym&@9b^ z`n5qZxzie}O1?R9Gea)aR1TZzTm4(14{3?JDz=Lq$=tq^aM`WayDeY7N0Zv?z(~Br zRUyS1zywf?<9IWJLI_u@BBr2{%K+X^Lc3rr{_7G@Mw(ks`r=nz!Kr43MyGPv%<*88 zJWufBmkyJiVSb_lfdEj0XjbAn?UA0tJn%6~R5>^Ggj$h|KM*xHcYqNBIFTDGr_(H> z;$++>2oM5WGc3%UZXzwD-ETtliwGwD>(v##aM90l8%NNl&!fr<0~yB|?Mo3Wb!^Bm zW(Xj%E3%oN5xx4Y_bns1N1j z=l|IN#d~>u5b~ens-fL<+O}4v((N;U!GHKfgZ3!`LshY}n5QV?0jskV`b zioF*!@>4w`PoDnJcdigIpiQZ*8R*u&iQdlYWx&pXcFv#?`BnOwG${!KKyU^|90~$7 z{?+93*Am>Q)qIvKd)P*IbNb_vUY1`iJ4W) z79BMd5W3&*X|f*&BFK~N8oQ`J3L<$G<$@*K1Yk+TD7fpAbpA|~B+_S->wGaWAM#wH z*VbnpWxOKI>O4y#`mPZcXMOg}2K;8uye}sX}LbiRZ zOev2$$rI+l;z21RtIyYoaUS(#gS8h0T?xV$;AGi4_~?ao>o zk8C@TG{;^9`jcFHy!4FG?4V%qi@s0ZNf%Oh1KIl)*_Sern=r z@z(U*_EKIh==1VF84i#<`qfc0I5Q$?^0NF45Z>h6K>tlBJfkUVH3f9<(fLyVH$ce0 zz7=oxW1^cgqgLfPCrtykJ6>!0T{VBdYJd3e{5)7~$I0I)YYRO_X&Y%Y1YhleSGP-~ z#pBTrAMKH;O3KJd1W*ZZUfH9$Kv1^v^H(vOKuNfxYoLu=GE&r)qp61xKdcU-2@_%Hij>Y#QfEzbwyr>^^9B^L!6gX)v>&qef#eO=pa1J+ zeU2SPa`&n5rIl9d1F}kcI{KDj$6@#JYXbUGFD!S}lk95|GQFCaBW+(vT`9tcRW7Ul z1Or5KvHjlnu%#3BwHO`>*T}&&RQPY zdWV1{au)gD{hMd})dG=jM$4j&)Z<3;mnC8=W0ZghgWW;<24iEQ)PEsay`jx~ zWP&*JKp`q2Ig^7yNL%M5qK9`pM=IKta3Z&Sc|$$?ufzt?F|yqm&e#!(ECd%{@ObK= zKTzSJ(#dIaADDNs)j;O|i}iQQ3gErb*2m!flXj;1=O&FvHFV1Ag|`G)7{tA4^KDgw zx%;7xir`D{F4czZGY6QXZ+q{_D!GmAo9 zmBiWFi=?1H9+QHe84jXL*gDWn@E1Yc67$#wN}$ zs#y=HpWBx?@;o~K2y+ludHi~?G^iJ^!tu{thr*XALpXmQ@#>QJ3YB>59pHpesnhv` z?3%Ou24MAIbTXC^$+GZiZWcNj-4V1JonrFiXZQjJ12O=OA;-#HV!d~=p|K|$prDL7 z1GUA1HBxi3WD?x1-(1`nK3ClQ30N9vj4;?oeTujHfdFFcpPPvz5Y zRG!$0-8RxR1_)TtWBync;YJCA)|Ocp7T?NN5qr1Qts1)`o~i7UUA}uDB+F}aofFf* z=ZIT0ek?S_nk6^`{!%Uk=uYg8FSZhTQB#RM<@t&bBNSxurWYu%Pttf1k1HiRTnZge zDPE1SpOi`Hp;yrWw%PU3Z7!BYuZNEb5r@|^K2@ECid%pQzx;3xmY;gl_r0(;hb2Pv$aBotG3c`ZV-&<{lt|xU$4&)uUE4{-oL^}hy_uK@avdOKJ1=)T{{ZB* zG^A2x06+Gp<4d~jk4}f$Nx?M?p_}de{5D_5yWeT-T7{qxnvd^~ciwe+w2CgCINPd^tF7Z{Y_65!}6=bz{hA2>slZBP?L);VI<6Ag5@* zv_>P61bcCVB**O6i7Fc)g{-sQ>c4AILta2E!&#VpgOF*cndKUI>3-X^tRS%XK+2Gt z5Tm2BVaVhmd?~xeBP&b)Z6W+}gW5dCC#1ltgp{YkffP^D(sZ)LW$@C#gGXc7aPCU6 z*i$(mI$S|-RGq#Y*Cpzue{KVN#ti@66fq(eO5|$O*lbnEytN*ePu5gUsSo!14-5T( z+IUzr4CQPLZ;32PB!O7><;@ybwPapv6Q)*9HCdt1u9mJZk4FYDmO+_jrPcGmrfX3n zb-K5M%6n1+A}p!0z=^m%3jXj&?9$j~a(??>0oro~zu3Wn9-k}K{Z&T%3Hxx243&YE zXYb?|V@TI?dXxh0Z2^ju-7(wfM(eJf0dugiG?aFkRz5!9T`jj`(8(2h&-2OgBUc{3 zYL(`+sXhXhjd9ynGU2x{xWe(pa+J}V;`T=$+ly6*w%7?Dk>hpG;&LbfeW2LXs^2bj zf+1wJ+rjx~XF2}S%!g}YFtZM#^+Oi{aO6_pGc-EbSv3oL5m&5{IY-6N*eq;SYGS4_ zU|%H4-MK&n)HnWUA0JV)4tg?`=5t&!!S9^|5b+(RcNE^JEu+v6Q~UMRnJzTDR5~;# zJnm(|ORJ+isu$y=3HuCmTu3+xzJQYZzr;{`qq7Q*4f z#Lg&{ASOs!6nd7f7%#w2kDPSOy z{9eU)>j?ZxvPBguz>GklPy6AqGg1TW=IB;0PvlhTFoyZe-SdUT6&-Y*wi|TK^Fqyx zaERwJ%<=8H?Qf5Em3h9(A>57{zq#-ej=6gvCmFNj5cy5GyFT5X}5)N^-ODV?-$@cs$OWY1#_bWSFbGXsR&`bnwwXgR6 zhC0IvdbSKlRz`{}nPxl=1iF(l-vn+Uij$yDTpmyk^W3z(pIiY4MVRL$TMvl!eIw5U zNxXhd!)zYk0oPY-Tb6CX=>TgekuL$7XJqjA316b+WYhscghBXBh1MX{`wL)|rCn+E z2M2Rzq+qH>zM!M<47T^Dn$k13cssC{5rl5cOT5HMQ#9$pVK(X5*MpAG^~AcDxviv_|?j*eKK?k~{0P)CJWGdN{MSK;KFP zOXcA=%g`~W_nb!fSN|k@@|+;9PQ$lsG+oyo3M~_+Y<~ef99+f^Tv$%v7*W zlX?qU@hCI3MYtWDN4f^9Pu|2`FN0?VXlseWG5Dq-%U1&4-${DGWj`xtF=#3kv# z_gGR%EbMsU?OA*_iyN8s>aRcOWZxVkE2yE6j#W=h-BQyV?!H)IvA5)-90ndQSlV$Q zlGWvs>3saPR>e*+dOJPo;(Y(P0{79O@6h8ioBs9FR+3$S=N|^96lQ$X((U>~rN!oA zZe3iq_(ZzPW0X5C>{k|OXp`0c9v#jrgF50T{lPA&zwxHFw%E zsm9cJ6J=IV#ieJtR=Co<6K*NAs z_fq#$)1j1gdz>8MA4usWWRUlX^$9-Skj{x{a0jCd;q28`nX9JH1B9*uJOe{dU{1N> z55lfWZX!OOcih$&=$r(!F9*&B0Ig%?fh`Qa1po|=ie8?#zYehwOsF2K3R&1NL z>9BAH@03#;J0E~aJ;*Zoaj_P*t)s$6NdC~z^(9FV z`a+SZ%E7R_|NjKP@lvpX-kdZ1JW#I2Ns%k1*9@N`ip$~}25@V33+MbtXK;(U-b<3i z<`3JM)T3M!pDI~aLYatJ*36F$zX_$IU^ZHO52006Cr4#h_MRl9E>0jC z3gXKl7w=n(8@|X|k3G?xz;TV8(ooxhG>XUVW%0MhqqFvj%m*T{2I=K)4I9E>03>Gf zF}(igDz+7+%=}%#!^a>Tq^axbe|lZ9pxhdFATR7z!W@y-{QrW}B&6XQj+}l^XcU`^ zrj&OmIf)wvWJ+Yo0kN8(U}@>?1~KGh6>3c;HnBt)G%C6)Y{0kB*|FB3#EI?m5+#=P zQ25;7c3n>$!cHn+Fmt{?AxQ`kUMjk$7xm`*H$Df*idIZtbzjl(1}6&`*od3WS$EEz z`?mt-Gx%bZm>XxCKA3f>vNgM3$jYV95j*bg2KC)Aj=SEF!R_F*FG`)?`fE6p@rmVM z8+mJ7scZkefrBK}KIHbu_ZfJEq7Ew(_C z5aZnH==ScRMUIDb2&XQ}F?ZRHZqyte4Df^QA>VIfZ4Lwd=P9*YlV*q;q|Crr!>_hB zJ>>}>f6{W%F1e{gl+K%YBYC@B2f~wt@8c5QyAsEu# z9#Pr|NLF5HH&<#Ncl}aw&X1kG zUGMn1^L6yt7tFP~2Z2lf3XTl}hLcHK#QNT}%u<4n&l2BvLn$`9YP%nh6k6nys&Aa> z+F=rWs(?!6a}Yh6-b|uh{M0`bfuvxsH!t?|DXK!mSX$f)MtNT`B1@PZ2W_;v>AUS= z!T?6@Q=JN=LdY4iJcfH5W&#GxLMW~X$im}; zFSzvX6+Z{jYfd405(K+@MOw7u@6sW*ZdbD)Bu8imrY1UW!Bp>i18g3(gVIj^p+h90LEA>4bNh3l_96xnYLo6wH`Lo|$#MWw3DRA8X_10poq6qm6+zQ!1JR@mFB zImX9pd9$=I_EEDgq}aG$~@Wxv@|-ww}uMrz|rLzdt8EL+yNSZK&F#9PfUmh zb(lHQ68N;2^Ci`59M^WXCx#R-Vf9%HIk$(>eH;IXf>p6?M;u#buZkj5sNni~BAut+ z!2pz*?A8n}*{2rXBe9pTGGk|&@twmH#=N+9;d+7%lIsefeH-n%XM`ZQ8&$XW*<;=V4LN9)%tl4b;lwoKEAGtDf@ zew4D!Lq^k|+Ajp<1PMY;MPu z-YAV3XT8Higmq|=g9L_UWI%LDh%3PJ4{QxF{s^-J+-QrAV8%wwXgw66>40`0k582Ye7 z1`VixZgG8&_%`NZ%mk#vOW+IM(H)WJOy?G=wG`NlRYEAkxU+94aWyykZFb zAsYIh8mJBn($6lnA>xQ{q{xC1Cj7_IYHO4tbLh(g?~L6NU4=?;T&v*kJDU)PqxlBc z1x|Dlv2I$!KNw`%THKM}wJ7mIsQ!iRf!)0;ewtxFNg2z#Cr-zri2zYDtg7PatSxO} zl}4v3*5>mGe&HGU6shljn(?Q zl6&_V6i!A|l4Po7^_Ns!%$$53kdH0aA7yI^Oa~yhXkOa^)juzRlhqeXVRC{`zY5vj z&sxK$JtuK+^{vLUQ#1Pc(uirc(LvijW`9f((DB=)bY8&uRtzNil&zA zluFc~wUmi50sHu|?Kq5(a8zB;?eH#_Y*xi~han;Hc@|_K10QgjE%^bDUa~r~#8)>5 z*oAqiD458UTfikPNL1}<_>IcH!P1&)NLbAwe@8>%P7(oBnU<}C5no7r8jZX_Uv${N zR`)%gVW8;4w!5)FX1R^;FQMh^Lw=6^O~{q#z`0yfUf{z|a*IShve8I|n)U_T-e}Rm z(@BmoBoxSscLp zx2tSNaDOwJV4i zn45w3Rml%B>tXUx$YSvte=71=U)flBJ+T_h2D@ho)Y#bve2sd|wPB)W4`qi;SWP7s5BbZt~ zusKIFr(4#UgTJ`J6^(O|4|H;e)Z;;Px39ZCRZjU`rT>LNhS-C~A4+TGo6w6M@Qh+t zTqDV#IOekwygarL5B*u|s|7℞h;C{4MB0)YWIrXJQ9UgT(v6Y9=+BmMK$T3h`Oz zs%@%WY+*d~t;|unCiZv0OCRDh~BqADMD^*~v z)=jsbHpuHk9<{3_2M)Du^+)0#diA~R=aORUVWV$j=K-}?z9f5rz9?TTUxlFR+~?_y zG@yh}2ovA@7>V}g^u9Ovp_-&*kW0h(57#`!NrRM&i(GMW$jXGy z1&NvPo|f^L>dq%}4rU@^o6#xz*8zAoFW!-AzZoh@*&FUQcQUZO&fvDx?}3!nryxUV z&l$mt(LCSbP-1{vrSl;-h(--7uR#YNqMAAQB1cDucI<^3VH|JWXX}y7JzquOaV|w3 ztydWj&pFvs>eA}Xbm1N!lO0Mv^C?YB8jYmmav?9rtLCtQZm%oi)+#SDS-N6QN8%wX zi$5la0Ht%jp4>dpgtk+ADmzP?`Sxa={`b-=wIhxaQob5IB`xg?qC`x0n93*$V$d9yeU2~>NWvlh z+7MziwI}IB)jnUxpjd=Vx>C>FV8G6~`bH?JTTozMa3m2TdebhU&4I%4hUn-EJW`@l zQy1;J!%=ocj6m9J5^U&bXgtLI^KZw|vn-6fzx|7%ee5+NS@vO$O75GK^q}(bZMQe@ zX85a?M7@;7zB53|+m^!bfuqMqHC)tCijmQyYXS$q;(}U{&DBpLI8pL$(E)^isO!L#_>GsHPHhN zWq7x1lV%wsrZx0dwL`*xpVuE{NdV)(6c9alJY+B;T@u?96;$7J&V8zAS?X>9Wc*>! z>&vB8?3v~xxC6;Zn`;zZ0nV;fxH0n#%!{VrE2m5=wpDVv!6cot`(0w{JDGAdI#ADkzm{&ySJqE%kucG%gy z@aZOfklkOCx>JD;xi;B60x+ z0z|O(CXMA!4>wDUmmRw>w*C_EkyErqsGYYI&c?~ZD15$yMs%o%4X zWQlXu%G_0RJoJTppV%B(HCTTG9NuzyJr*fCWh{(W<0C}G*aAi!8-2XnrSbu!Ny2-0 z=&2&YHg<>|bm}Ht!j6YS*W`FVptQ9<)RK{Ci^3RDzsO;(LmvUA+YE2HiEMEBFsP5m z-V0rHNzx}>^b+ZqN40ZXZwyJsaQYpdJ&>Q}fc>7zr``zk}OSGA7}5rVUvD4Fx-(eQmYWkdRn zl!PFrQx5LEz*vcwT#Z3V;zWT4i#lZWQj*vap&2dE z%aCU1+hQh#5S`?>AE2&lUE7zGYH`)Ng`qPBxfi&$i|#hQ)kUv)sgRLnSbyDGN4F}~5m|(U3@^WBk6XRQ(8cz^ zwPKez%<+byd~>%ny>>0)pBp7_4pS*;_>GO z7eZ8VPj+`p>@3+O$`IDa8In1~;$HSPn047~Xk;r29AMrTJFU27LLM5cxh_j97+V6g zR3Y2@#=P-#T8Hy!2H|8rbZK2*2ZeO(whX1XVSeI6S9 z0bTW?;R29O>HNlJrpnyYns-kkeJOAmsDkb?vk}-o1?(un*Q#vhGtdh z@KLhwKIO&qh`HRw0O=)+At!D8Z;?~>EjF64RXW0+<{7!RQY>wpp~eZo@J;ea`=DJ> zym^qX1`S@>7nv-*8A`a%^qC$)Dq<)lE~xeaA||fZ-mnxj=%?%dl=-m5C5W8D-3_!W z+h3IlgJ=6(KP`EhQ4X^b(ABL|pGB0S)X^gF!A{kNN;kD)>f}af#iR`6_GkJSG-h}! zNW@6nbI_kk=*aeoMySa}zE9DCq4sr`cH>!w1MJ76EcAzVwu9OQ7Fw0kJ=%4qZrf_` zH~Lu8tw8dau%~18M}$~IEM)ynF1Y(SY6KKgb^6@j?+5Pe&O1)V^g<>oE`L>QqKC8m z?v&*3B4e2CMm^})c~>MW2H5`>mb7?IKK~yZqK^SfoCgBbbX=!hyOcJHNFW^B z?moO>tjP*0X;&aAy&!?^O>RQMX5zZs9f#C~BnNJQ1g!f7`H@=217 z*TI7-y}NFHl*Ty3#P(aCmz4691`ii?Ibfzk>p2sdcXlt?R(p>%xT5b>yk4MO zvl1z-qsE~&ulHNktjk)AL*WM<8k)fUf>^DaGD4gsm;&mWxZmPHURswV^&UG%U!be{ z4ji0eC;xz~QlG)eV-@FD`n;nig8)4)R>lN$?UbHTm;(5mO-uKG;jVDt#~M9BM}H=u z=T&=ce#D(|SK_M8^;0?YnK)o8it>n7%!5MZc{^#_nR$>RLt%2Oh3X^-XvyUd4`fyub!yNtoLKX=etAiY;mBsyDiBL z`|Ry;Qe8ub{E0-P8-Lmv*<13hNHL#0{v&1b8gV<0c&hyr<~3R(R=;s5*P^ICW3jsn)rHz z(@*Mq0duKIULwXDXYK#Y3aGqvNGux5c}xlAW?o5M+4rnrBjUE`6MLy+9&d7wE)N!l z%My1(tEeS=Htar+zlW_wa@6m-Rhm?ZSlYat^F1h~5jpEoIU9&x)R6Rv@K#%V5x-#f z+G{ju4@J$c9ev4IM4LUd(^n>AXuvpYOiJO{vZ(b2j0{2c(yzNmS|%RFfjkXFfE$|W zrrM%7YxmZiRVNb73Y~lQNovRi>GewH&26KU{!G^wvm;B;vPqcnv6@4+UID(%@jK${ zxR#yUBq$VhqM$Z_&pMHyDaCtGI*to#!$+N) z0e%t5Mz*=AL0VGeX&AgF9x>E4s{=U&-@h<%lT{Pf!MU6#&+tbSnCpjwT8ZkOHI*Pa zvQ#mvtC+EKPHLKr?6$&0vsOK@FoGVd;%pTHNMQ9E3p}?}I%Lhx+?0GTsWHT*yb>u| zpX!~X0cZc1)L#)LqQ8Eo05isCAtBwe83(=A*j(02ciCAX!bU0gy!{ zrd>Wbxa?4qcU-~Q2UVO~oZMMzbWIz|Ro;F?asuI_n0Z){2}%nzw>BK3LF1K%&7Mmo zk}6PR<(^gm@ZCZP__(F{1Kr4}TZ5j&&+53UYO{EMzp@gENSs)!ldjh33segmwdA9G zhU(rV3?>wLn_L{uvF4Mi`s@^qYH&J=8K6x{ieJ6GNWeJJju53^&{|*&5;`C*Nh$L7 z45}sf_Z9_7Itrg|5e;u4uD<$>AbUuYQtq)HJod}Tqk=*eIv2COC{!R?N-rNT4O5&R zgEvK-sgTtw;qAo}KWXinTSr^4K*E!kZx;BiTF&%rQ2OZ$qh*iu)is5`xzWVS48?I@ z$@ts@U5(5fgZ`(>cqDIYn4;A_^j)8t;LpVe*BF$e9#JNF5w3NJJDSr;6w~I22A{;u1Al+UAYS^$Btbfub97IUy7!`Yj73?Rc+<7raO zzxTM^75A-jJ=#KB9oWN|;A>9<*Mv(2NwDB>-LV(&!|b9yQ7yN#<8}0gumhc(`EMcr zSNhP-j#erFS>n8NHgytF?tWivkgz))v4$;mkkiJSG1kpySicf4LTyk+Z zHhx7>1Z%UA2%!2dD#U|FnINX{${WhRjq=~Rhy-Aum4X zblckZmhiGE(+(9ioj%QqdX) zIM!-vUN2VJW3#4-5*}F4Hkh=nYf@yleN1dJgka{kU|t|&7T9#|vB)a)qd{upbOuX5 zuLKqx4w={y|HH+j$r%O5h?6qwa>i)EXVHcNmrgaY;O{Cd`{k^8>=g=rodm4vc5+xE zs41S=mx3xk!5bSy63YJRFPJ>GNAUqi$NdU-z2GY(zbms}t_}kKp}@_dyM7`J3lQ2Z zo0yFkIF|O42J3c>0U3Jd@v^m`Ndr*)UM1KgeNC&*Tbr2zn}IlhK^D{xyGV-D3uOe< zHe`Muq~((#FWibTMfAD{mIR&3Yc(8WAU$T2pr(smWvLmoLd3eeN=kHE__2vV#@}(4JBVPPo5n*^Ac5rO6(01lc^z+GoAE0z{Q^GUA>l!N2Lv zg(`$P83L(0E0aaMvn}zkP%O!m`hs$6zFIOPnfC{xem!pTh0N7@fb{!XqyR~kISniy zN|++JXIs7=&(K`}2=t%PY-ssqe3$-UG+$4}ULYitJGWD5a$1f-f^Uz~%(l}6IJ zHMn1h_>8^@MGO9(gVts<^JF1xAc<`JbUe*w>rHp#iYK%x{BW3*#37(g%j}o5uW^m~w_yh!6 z>P3@cPdzQq2URy4Jw%VUwV5p}&-cO0PZyY6DxPBaMlw?}CLxrYv-)FX=pWHC81+PP z7DoxM(EH5KKt)Dm7to3ZVPCUE&}4tS@R)A!Eu-J4@5jUf+s&=QWbGIr*Ae?^ z&|fxYVnaX6NnVYlX)@YI2&~wraGKTG!|K$Bqd0iP8$>c4CT4>e@pKkK&M05m2@RNO zi)>W|#RY%BWg`=BQs>-39FC$tm9h}o1lOzWvlhgZ3i!sGIxkH`$vbKr$`<`X zm~{lMVn4E>wkrKG;?f?9g?7l50%>~8Al+suvX#2xEVRO#V&|G*kw=h)D8LsZbFp0< z#s@R}gJ8#+yuSzl(? z{XOK-LgZ54`IF(mDa02N?n81-3m+^(La|U^OetE=!(Y;;r|NdKq&L%H8U z9)Xrfmx9>n#>(2w{<L?1>j z$$9!0xUVNhUI+H}rb4kDP2R~3t51TUE57SC`H9K5d`%N=~h1SS60G;Vw{b?#rN4k}bd z7O95-y*QyHG-vAms4wNv)IrMLu656vNJwYZ)%28cD&Ol+iKnjn=rM7oP)JH3K zvUqcj7n_A|dX`nPb)4}n80kOL2H%y4-SoeeS&zrmfD5>5F>4tak0u%+>&&;o9!+k^ z@Nja)?XRGT*WGL(LB|XH-x%)TK>OV^3P?{mSEnoovHxFOA%wf-Qi{uP&qp5B_0EZ7 zIBl?gS=F|Za#_N ztO=99S*OEHF!)5|wT=j!B|Kuvxr#Bswo+07&PWHIaA5&nD9_kAI7B}*%6}k}&9{`N zrES1Lc>ACukspxfO`Rc?W{MGNNUGB3;O(+}y@M9)t&=2Hl>t|bl42nx?@@-LAS7Dk?CwEU8H;oFcp z`mCT$v3xv|tIPU3iKXINQ0&`DXK3d>@k_9da@+nnl7)ZYo*;ZALF6pP2JU>Ic_ViOPIn`Vku2i} zyyM$POBj>SR#;HAi5b`PQa4`#oUw0dT@bBoyl9!2>MVH9-aRVnL(_1T!EPAP@fX)c z<5#QSrPYTorQ+R;bVW61^-=Lh;cyEnnB%lEal9&s5hYRY_Vdy@&z0bBm}Tw#k+>R9 zm@mFkBUny_PD1GS9vXS3=PKkk0Ly|~pLLU#+h@nB1#_FprqH%(uO7|s|!@h z*fm@axUdY45q@(24V1O+2bR=Zv3sFJe_7BekUTAuksd zGz}`MFXlg8R+QLNuGYp~;NSDfS7HTQ?PwADU@sP=ubX(>*v$3-AA%u!(??et=%zLq zKvUd=ot}(yUkwa-=CJ*VKH96YLW&@7q1_=WZIUIgf_`}J@nML312x){92>5?CfH=p zu&-b6@kbJHOh3ag!}Y$X;IH2=;Dx`p^xhaieNg$p2v{t*&7N|c)y#YwHa=3tcc*%1 z-B;mks;vpn%{TFwqpRz@)DB$$QU73^ARsY`&@~{dGN~Q1c);E>e@wE#^gl@>n5=x^ z;`JdTMkmR_iD=?`#qD$vseR0UTKYiflJ z#MuY+n|Fpgu;IwF>$U)BD!*HjO^ZHZb|d$B!3%pRr|C`!58>M^1s-u(iCw-1Bh{Vx zVO?gB1!U&OQTv(w61ez3m}|4TTV*sIHIG97PW8y4W$dg zbGYFcJT0K52i2|_<=V?vmKa5jtDc%NGXgN{xE;rJ5=u(jIL?n(D6f#;39lJH ztGz~Pf=e(7d(<^R1iL#ISm!d%0{`fsO`Hr={5kpr3o~i@sy8(2@+K(1`K3r~#5FuN z2#8)5>kE5ga#&C_@W_U*ErbDY$QpoE@BeJJ`Cp+4WgQ(ouMwjEVQ*Y&q|HqPrho=7 zt!gz|C+PY9>$<#b<)iu%})Gm2iVg+88at$K17TC9z((sTx zx{?0%@Iskb^OBq-eB%%Z?!PpX<< zEHSQU7S2@E1JH|2sCgycrCcA*vETRW7*+Ek0}~+Jt`8k|5c?oS_a z89oE~Ia6f=>jo(SfX<39F4N+B${>~*)`9;82cf_WVG&wE&sS60yb7kpEX25z)Y?Py z)%;lcU@2&-a$=N|3E?FJW~jWb;NUXHjPO|L!ea&X?fe!SE3L0pDDwy~#QFdLoB)ibHkRW@L?3?X+6#*B*%`h7i?`jpP6YR19F(#VBLtfEd1 z6^uAOS27H_sy!o)!(%hz4`rTnFhDh2re$?7HF;zN*+W;0vk>RuVR%dw(0RZVL;r9L z>b&4+Vt!Xv5N(<&5$B~eM!^C76qE-rAh_3T2cd`XEhjLDN$_DPd$vv?07OuAZNQ8R ze*ys^`ZfD-41{$cUq!omGN>JS!_iVJsN7I*lu_`;w4*w~)rWgBi4=M0ru^@R(Bhmd z2p5*_3h-S?wc-s?x_s9m0srEzA=|YGihVruTbm0)pbGlVH6eJ?Gg%M<)(l?KQ-*oe z^tkB!-PvLNnV+82QPQ*wF$ni&MqO?pP~U?`w=3Gc{rao|iDhc9W;rKfw=2wXk}G!( z(t>=|1jMs(tO+gNT)ex7;`0=0=oRo@^pxPWmU-xZyNH;+(9QQ!o_FW6&$P+ncxWyj zUYXXPm_4?%hx-)d-4q^Rg`W5qJ_yle{H6Gd4g2;&{=(Km8dp8I$*U*zV9G_<(|g9B zMl&vuR{^76nz1VIMfB3+p&b*_3~<(BtEF~Dq@%z_7X0jhu^n3O+$9G8{)MI=_()A@mh6;*KFS`l*4nnO&EGYCaR;JCHfB<>R!1JjWz&Gfw_K0A zjD}r*fK{z1_Zhm(k1hM(6t;tU&V9Kda(>7Z$HSk{`oeEl5i{a4qri~`=W5apYy=f# z;R%t^vsr1?Oz~QSM`=EVvf;;rJRxfzt71zL^0G?C&^-z14U|Xt|D<&>PEnPoWhzGMxCs?~qrUV=nbCTeq<`2Zxk zr=-35I&T;VRte)^9VXL$QPT5nF~TYdPAKKfIKFLYm{&k9Ebt4=kz$gf2zGT3r1PNl z4vV&ecH0*~8+U^pTmA-~#8c#*O<=sMj6$bnp`CqA<>OStqM=j%#*wB z-?Pq@ooWq;U+miY`sl1hJ!8bX$EvE|Zw@8C^>+k4Ho~J^^?*^~VkepnpBUa#2|)ah zSeC_*WRN^IJ<2!>)#81}rw0ATknls9_JgC)#Sa#dRV4!H4pPGBS#D83XH7p~qt6K@ zPcl1#2t-R*g#FneANkWV6ofoMe2%Nvt24l7V0XH?le`dl=UQ~u*c}E3>5IXN6k(TrxZ7j*U z|G^20`vEjyHRU9vZ|49JjB1HkXKk_v{692KuwIlMvc~XoyU->R2k|^17c}Pj$Entc zr9%}z`lWURL$l&PieZ5%K>VX_uL{!}Bit?MzgMu|-=_|eKc@@~2VM3^6oT3`IG@n` zI3N{hG7Wq^OrWA~>Mw}6YXhpBSslXX1UPlbhRM&nQ>Vo>zP5SkCIno}vGauFi=rJ2 zVo;_xZ^srOe|;jG(<)}>mcWr9g=gi28qpw;Lr799#+hK;hlc*`eLBn4tbYyqtAxb? z%uWsgK+5DmSRHk9iZ7y~*;zvl{g@zRzpVm(QUr)%yRDtRPF*EbynSvnZgFSW<|&(f z#CxJo_i5nML=V;QwDZc&vId`;E^#a*cug!E#grt`LRJsfY|iC1Mz0TI#+omP&%ADU zd?t|I4Qa;}8fuGk3C(%Rr1?okZjiP6v0gox1|H3J)`@3qmvL@sK5!r|$3us5N>!>M z-Zg;R{udMgPe8E0sJAebw1~bA1h|FOEbYlJ0-vikdjsMO3!~KB0Z~Oqz3|8;Zo?rg z4CwOEi4;29t0{r zb|(~k>$uLRAe6lf?V4(NOpaIJ=J|OJc{L+*8_&CQURb7Clea>pl_W#-Sf}NRsxYTu zKJlz#Y+kVEKCvdLng6Lq+U4RzL^q|a7dj;vB^t$^d`-V((3d90hv}fd`AAwE@)LR0 z9{oIELb{b9^=+;Y`Cwh)CXQ0la4%qv-l+>9GHg1UznLZ|Kv!(n{=C%fnR zp4niUL#x6KU6k395{5Hf4X3BXHP`8!XJ1TP%8?*Q)95u?LlN!Sgv-bX+kZ=6#f7f$ zQL4vsJbj!#7euA_a0_Qn3(>bN&VQ}MN;M{>Z{|ps8HT^*L*J^b%@M9v?JCYwXUUvhA?B?sdh&?N_D)x!iOaC`OZN*)Zz1J21ZzsXWw>E`q|75 z1~$v=z8jPk;J(4`aH6U!dt9hw=>@1p$^>UWNO+!k@In#>3D5+*kB}b1ECG_kAL@)} zrqR#~m#V*fK<#Z(?lcjTfy0ZU7JW2IVs(c4!b@St17c+4ae1^7oMRR;!4umQ%*ROo ztMt8J$K|xcm2SP4P2~+@=D2Ag2vSN`&n-4(sz`X~H>SG$cz~j4PFPiCMy>Hcw{hwEKmsaH(xEZc8<^r*0(-q-az`)PHnpAGLdS~qO=U+sUlzGIZ*%ofYCB0WYx;MIB^!NKX;obYfFD}0 zwQnF(boULz+Q4fa@}x03I9R;e)BlNl>;mSXB9zsr(ef}|XVY|5b>-8$a58%2c|C_- zgrpxTuiOoWVk<^%v(vzNg`*DjZJho+A07m80r8Hq%wgPm@&&E%o|EL!E2WK~jv_ZF zoJD|zR#WddU z7JFgjb&(kh-Nkw1^NtPAJ&0D{xT5)`;oxJ8=3j`?WC0utDA`0F3d6Lmbn7mc4099l6o|U%k_255<^g%I8T%G+k z18)#@K_dYv0ifHjMdUCzqb^MMK~M^6wGY5`IoUJEsMWhV;W~%M7W5y0%!9)`UXy&4 zs84&oLW8uEZA3C;qq8Dg`tKT!Cis*#x{gkWwd8vjrEpC>Y-2fS64~$RM?m3%*1FuH zLMJlYSqD8uQd$%vB_$W{e5n-U@zY|Q{tvsAEeGec&1%NEc2sa=GCYe5w0*kN~4%tj4RyO>9VE3Wk-(~;5#>pqJ7}DC$ z(uuXD$>2H``@+8ceJlsyyBz5of8WwOC3t{U+}=0pFHOXoC4-H0QOR0Mbxg_II^=i* zZ3u$~iB0z#sQ=g8Ho}G|>VL>I&s0c6vk(;#nGtGr2mc?MU`y3PiH5K>6%fHQKI@bK z%I{g1T_reEukNh*()gB(#S;Fn4xfy}7C|`P^?5nC-QPmGp5|lrVAcvxf^yytj#t8@ zw$-6JWUclT(g+|(5bhfHSU_AG5vzxa9o1Va52mT@Pn>oYDI*fdju~SbJDnQvfULi8 zy!KFak;-*=RH6c)T)GNVAw~M_1-j{}W;o&Z&(IwW-7a-V@F3}o>)G@T$@Fn0jAInQ z859ZJoTZ5tvz`wrtxehqwZaX*i@2DGw@;}Yx)mi|N+&yD|Jx4yoEcR69wLk{o*Hz% zun;{K^^QYc(g;Ipa$|O8JV2en?!~&ew=yiD{J&dX$Wk_Q_Vt6dJP*>nL+&WRvn`5P z>Bc8QXe%L$vRs=uOt|4bazHB(UnB4`GVo21k#Q0SbTiS(TbnUV0!YpK8hm>>d!?d7 zvy*Ut5rOS`A{;Ix>u*Af-Jua^CxJ%IXM3TdW@3y**~{b_f-;#KM5T7i3c}5lV&@{Q z9|LVRhB5VhrV(5@fp-4Q!&2G8i3v?q(i9W~ygSLU6Cx;PR(^x*EL9Otdgr$X|NnU_ z8aB+sDV^{H8h)-=TQ1SI%y{kDbMX{mwQV#H!vM9JdJ-Uh!SDO&ISp*XfSI0%EqNP$ z)Lb&_ES6)YS*jKVyH`h`5wi{UdA1`52%$O?T>c`q8y?EtR=WNEdg#jo{Q~7I><$JR44R478ph>myuEW-Hu4jM%#ouYaHH>~q zTt`%l`G=<@W{Md@e`m=EE0%y&>bpr$PkBA(y*1WzwX704Ng*ghqr5T;CT}crp-v4_ z9IrG5c^;xuc%aUrr9R-+>Mtb7=@7i zd-O3uLyym>5EoybJfA>v^SXZy+A5P#zwnK)?5H_AafMV~ zWK-T*a*y>DQ^}_&$$#dmY2snseqy01nB{P;^^i}+yWQNSZuA*_3Jf)u2Dl$!`wGoT zZbQG}M3P7Euuyq8NM#I2d7IE)Gy~Gbbb~`I>tShCMkG_4R&DmM+DfMaY6y?5U1+@a zDaGkv2>Zc}1Zi_o#+r1mKfR8)&{7c{9#vswO(4a^dxT$$#bQ1BxhIKJ=}#0xN38Ok z{_S(lNN8vBa=R2X^ZmNx5FKKSt7;cu@6gTg>Xjm0Wa5gl3cAHHm z-TPntciI?@xYXv4%1wMYrH=E-{20L(%=(Ehq^$vtGdA(|ppi-41s=v?GX`hBxVL`V z(4b6Ea<%}%8hdHd8&iLn4~yHq;o(}*d-+x=fe8fA%tSw{f|%F#3<5bT(pFAm3L6ae zJ~qeJK=}}oEj$jGc(dxN&=d}&43?rSxjl#cxF$S0y1Ln+_=x7kmwq@BWg6hulmKbSs9$;KBn}5ro)hdMv<{RIray0?sj1Lbf;t zK8zT^ZE~&`b6zv+E2J0H?XK<6P?H9i7l>Ik<-fhsB(Dki6yzq53~YjuPA&h)^D`)*HA2Gn8OC)l!Y!GpsoUFtjwyIrQ5ZmweuGEUO{FhRkMP$ zj21f8*y|bwM8x%a5C}0uMV1;L&I#>*Xr&d$wgCrcD}=L zr{fl=o|p4UYTA%tHvXv{@ViGdkwhlF4plUw?w9d1+`^*SisCheIPSHm`cs>p*&O>zYUC z^9-fugg9S?4epn3PyJUoUD&|U5?k;(P-!cjw`qv#6|Ju=!@gW;?c88-Tmuq5ua;EB za*hECQMWXBaZ~7VG+Y&upekYfpmLzBpGTWOf))}y4NGi#=ACm^)w|N~JM$u6dw>$$ zMc^bR`|&kZQ!-O^>|w4q6+Pa8Umm!|3NsEWfV-FPhZSOkKK&3JDwk-bG!tDR-cpTG zoso$-P5o&;dxW7~&jX9$Uq+yB2)}32hcIn2!7BJGi1KW9-H$DpZ+aUPb@g{7{?DVq z;U@0~!)}Q~fGgpdoLP|g$bv2P6~=1r2)${n#ZhD=F*6aP zkFkN^?@VO%x(l0JQp*B!EDC#bn~{1qr)gZj{L`jXXuuDPK2#*YOK5g9~{PzSZ64t(g2YJm1CWjrCt zcB4fiuw?^u;mp-VajLl9M{1S|YdZa})F8l=*1@ocar>mc)YUtLC_QUys%GM{rHqG5 zkeJkQBkMAsXwi28kI$mug(i)cxnSPKrz@Wh{e?C{!pn<7D=a!+UH?EILcqr4dA z&W%gISLo@XKG9^0VYKB;4ctpea@4~3Y26OL*lvnXq=f_hh5^YOb)}j-prI36{4 zPo1?rNLQ!?WEh*3P3S4hADd}EhB_MhBkN|TA|RP-Gx&kX%}DY6ryu5?Qx@2 zPUh2u*=8eZ?{p?C3MNyT&2fSPTKp_>o z44a2qZM|R&m~b%`PJtEI6{qGkLa4X$|F5GuD(&9`@r#G>anHbf!VOl1Q5Qb~QoKfr z{7Z3h-X4)>sg9Pib!^~QNEATjx=KMqFZ|K&n{L`*vE?OTpy=`ls*OSu^ ztmeitR_gtkDeV;mB$N|gW>6HP)x#>X0WeY_WM&}d897Vr?FeMi$Kj*Xw9}WgHXvvE z^{+qnZ9}EA4!PE;eYm4Sxq}DA#lVXZ*!*15tb0^hHaKux6k%p2S4dqdNneta%@BBp z8NMI%?eDKGrV}4Tq?JEN%i)*QSHjz*I*fiOrtT(&{&0fUv{R@LqIxm-%x7IE#mPrhr*AJ)v&n|~ z6rD~rm|R!#4|cjw1@#vhR6c`NE&LNk?x47RtQwdEEE%ch#5F=RJuP&n2EU}e{ z+HR4bTP#HCqfHrh_qQXIU&IG@V>~AJsY7E-&qX0X+avDf(7S3+&+U{HuxtwqjPjASGsC-b7b4 zK4B^yfF;gMINL!PydH^tSv6JeS3WDx_x6td(~QdT4hrrbiny*e;8G1udAz&cG68D0q&&`Qr%G6^yTzuKwr$qh0FtnyF+vUwz}o0R)8MGSEg}5g z#R97&A?;kgtRa8>k?Icat(>sQsPt7S!@iZrzUzA{$Wgr0Py_LBOvUr@vN)n_A~(Kb z>KI$PMB{KC!P_z-0TWE++M@n9A~@d}o+Qr0EVFalrw0*N7j@#bd_U$B1H}Ni2qA&3 z$ZMfDn)f1<8%xhAw#`rP>VK@Tnu7uE)iWNCl*#S1|Js27u5QRxdhBRZs_ab0V{QT~ zv`3xC=T}b&4m3p$e1!|~={h-gc+l2w)aTGvbO1`0A#n9G`YVA&hiymavW1y0ZbOpZ zJ+vH>T8Z$^8nsgkPLczr^b)Ly!{qW2Gq!+hqZdt9Zg82Ne%sD-IV1btZER3K( zx$m*l_$olyG>-p?v5`LJ!gn%}xyRP_=19Q~av5pGCAunzaGAn6gOC3~;;RurJifDF z^vmZ0R%qRv4PWGKFM+M*OVCBZKWeP4+Jn>Vk{Yxn*!qmB&srty8hQ)DBR3l6D5XR2 zz)B=0<)BZuq-&rlar0Cxt0mH=^d(Al5Kh(H`!B(F-+8(%mB1bffNASWDV8w%sYO>{vz|e?p*TH zWx6tZUtQD+<Y$!yl9pH})LHNTkF3R-MX*fu~tPxEe z^hHQ=EuF@9&=X=K9lK_bwB9CIi`^BSjXo_)v-lc6(%$9pc7wLn{qHL9mIyDa<>=SPHr z6QF+{%RuAmccVS}DpXHM>-l<5ecf+YJ)?&W$k!`9J};Zlf&?1@m56{8ce1-z?Y0Bq z>(X%>3LOpQQq@w>T9sbnM!FN=C04lABt^|qV`3R@z9}DSJql}NUW24*(pm@>FS)&U6`gs5!kdlzDLFWP zQn#}Jm%!xJZsr*2K!*{~X4bif7Cgzv`QH|F>8~L@D;Hij+F~fopDFwz3W=75a%F_g z=JVo*Upd3Ef zj*$6kw;!fO8BU0dvo-`0ng=}$KU6n3)G^=fh^VH3_y}!tQwx8M(fm&xSx)HAD55sO z>lY~2XGHHlPZWqsCC?{-)>NE5AF$#9)7!m0i%mKm7{O!ROH7I)9z)?6mTZny!T_nn zA1TG4P+QTrNOw;2?%Jv?loB7MWmW=d>&@Kc)yAmkS&dT}s_1vd@OV1n6_aK-DA6|cj!i5*Sy=^K|G=M|3x@^lMthfh;eA7vzxfA0n+Q%UZJgGLsLfkN| z&g`sNru5yShxrtYkV5dsHq@U?KhO(0t# zFfe3egPQ;V00001L7Uh#q`_Zat1w_VX|CY!+KG?v1eo5`D~J(KgkpxzgOah!cHw%{ zD>zNP7$b=%CK(MJ=O%B>4H!3&cBrXQ=ThDbAv45y{j#%vT88QR5&3BaJxQ^hh>tqA z6>kYQISaV73axcw0bX$EEb2107zo=@jH_#PQAa~@ZY?~M*K3iFUCbV?CmwGJe`_2k zt7>NK+{5a$7A||~A=j3-I-(z3<(yNiVnW$;Vj^d+rIzVdjIhm1XL7<3!_t!54tL$B zFfHMEZF_u_6>6`?T3FB^I0WEJ*6KZRqu2k z-#zMi?>AG>{y+6CFs9T5^lp^Zmx3}RL}4wpn3-$b2d?R1dA%hOVU0=o-&*q?`GEta zq&6+^ig>XkUh?G>3H!Raz#&shhqEaXB52}vuTh+sNxRcWyy8@wH&B6$_$bb(q7N+hAPTdM=XdJzz|35?0tnNHjI^x^ z-JHqIj3@|L(V3@M#RBV+VLSy<9efzWVl12zJa@MQBET}LA9XAWxfRC4$L}c~UefV~ z_e)(bz%>_`BV~b!{xgMI&R%Mz1kj@v*m@PqgTy*T*|>!V%+ibn=XVnV(!|Ji{qa3C z(HcgrtnRBxlHgR)k1*;OX7E$yqEQ2gsqrprP|Y}2AFqK|{XZHDiQocE2S*#Gy1vbw zBPP6I&?(Zl{@{zhjGo-pOr$%0AEn;*vIFh%R;TGJlb5Ar*MRduVtR&tn^D`9S>F03 zVJl$eP7Ko{1b4sTI%yQ$AoQ*c3~_M2OWGaE)Jlv8y}?WgbzN7T98!Q4oP<6@@#;T5 zpYY~5^=C;8lK)s+21S@cMhf*#z%7M{eL>=S*xE_VSM)|A*M}0_KeGmKH&_x-8UbUW zpWG=u|2ERw+-hs z;?nrb*!Na9*%Q}h(O`!xfUR>PIi{Lk^}FK*UzpK)C0K+f2k+v=Wm6{4(# zX4zUukx`R5P@dMT!H$~-jO<$D-YEZjawTJY^h7l*YJFq-8FDcoEA#cXNp(2ziPInM zX^inbIGef!;};+pW)$QMkk@hd?Lvka1#S?*dXGBzQb#@l6}>n2j;BH5>k#GzI9Jdr zg5NvYpzYQi`}406r|}A$V2lKVL6I&D%ztq_b*{uyGK~ws=~4M7Y275j>5uaJk%27O zSEz!r5+Bvs=q8+>NET?&~q7&_yvPxNu#mqSri?{10#m5sy z+7fRhmuw)0K-RICVJ0d@MOh4KO2ti(%+gxFm1OWWV!nnARe{ZXM%3< z=e1}1*Q%c6B?GDkfKh1D`fGebP9D^8Jx`@a;<#av5k}pRToj6#)CS5n$&r>C1Ssa- zuFm!uQK$t_!C;i)iU0>@9U1;*`L~7=&yXA`B2Z9O(61b#Lld*{wZ0|8zWq0(bC$!M zw1wPpZ4MFKRHeZoC*fkjd`I{5jpMS^SU&FfZ7g}NbNraC*Y4m-Tr5xr+ymu%O-!l8 zL)g3~rvDWna376nQ`bp4|D|Y5-L=Id= zf8+20`(+st=MKq6K+6E5ms(1rY_%A_)aBcNz9&2wn;`#5cYS;^d)vg$G!-jY8$p@x z-c_gw(%ObBiBO<5d14`6mG`J=?x_pUrVN|NG*w%T>BX=4%COWs0Aq)^4U&2%H6$L* zu^~B=Drgu!zg0~pq0Y|dgRCFz{>QtTm|f(i&zl>^^-~^R67ftrYldJr=>EG$8PTfP zX5_|30W)-_ z;0{F89tc^{a@o~-%Z&TUw$&-jLPC?vt!v1oOFZYu#D5rg?fip04vIRK>%wYd7 z@P*Rv22SE2-3O{t^6aW~j=MTtH?byz3$ZMyh1A&az3P=kI(xisCoI5$+dR5?!7Uy; zu3BO>E@}Ep7bh^83A){3yDxunv|zWQWU;`~`%*n|fMmJz*6Dity&^4#O9RNPx6Gz% zR^wxJp8s|^L!UY$7?8sDd!Nw%lm4(%4s80Ru}Yg78_`bT$Wz{Gt^7JSY6dtePr7S6 zrdXD{0)`^VGwFTE{oM{nEmXo`>25xQ1)&J7^pdV)+Ejd)6~M&vUBZ{mT*vI$M6vkq z{_9Er@KkdF6g{Rt$+~FEdE(Kw&vb47S>%fh?M}vp+L$(tn!+u(c~X8c*&{_UeVP~V z7kz<(>uLmkBl_me>Dnl=oO{!AG6BTUTjF#T8#u_h$1m%)0`3AeaP=1HwUdxBFvGSi zA0-CM)By3=cS2kWF_Lcbm)qH`4wI_K^Sla=Z!h)%R)gFnbN{aJ)bjX@^uy{TL(6Pd zDD&}VuTluoXtH~7v`O@ifKJZ+c}H^E3fx%K0T7k*ze|R+Uv5}OX67f6Kh*bkHX0~g zuv{WD9ZLop(l-?J+xt&T@ko4sgSR6Asvwl+vrXlU0PD^4u^y>l%dO_+jK}LYwOY7c z1gaH7oC-A{qF;+y-9_(V;+&T3j( zz&HWF#)fk8Q?yhcLn41hd+N%iOzVp@R&kus{RHfE#iY@sJ&+jcYM^8JJn`m8uW_Q+ z5Q`*hmW}LtnRgiDG3vR(^#8NWKvxU(R~Se*YF6XE(}XNV6Uy~b&!VU9j`CFt4eIWM zW{e0*CbP?E$vo}l`VQML2bIjmO2&wd$#7QWwmWmadSsu3&y12p7; zd*`-B>6k6*P;_{{uekmkV>ukBD#mTC9fpDWuW`9=uK=%1=Dr9p2HE?@@Qus0q?p^+ zOYQYlxl~CnCPf}McMZA6*c;$0gEw{Ce96u^R@((-ymx9r8DoW(^(Ub$)YcZmIt~`X`HXM8RN@4Fe z?`9cr!m1j=^v^eS0U@BAFjp3Zq7z${%1vF(W3$9dTa#^R_x7Pci|vWmJH{<+!YOy5TTn4ZF>uibVt%M&-3l=tZ@y*bM{6s_|Rr zcAd|c&*1=CFJd!~R zuvbZ4A^6{yPBf4Z-i{*4QD1gi;8#i*W0<5seFy|jq_j1diDq_HD3^7k%DqKhv0>S; zcbbsNO69^+T)ZxS;J+aR19~h9dBAn2>sIm}?1{q_hy1`wNg+ic8NCr@fzLCBlTz0| zaV<$BBXp*Q@~r1_7@q5Fj3#opj~m{#|8TMV6wC1ZcQ0LVAxI%zV1s#~gFkh-fM=oh zbgPDJD)U@(s3D|pmp7xD4^R=JYJG<;vhbSeksyW)oE98EY+nA{ctpZ&7!fky6e3Zh zLqdf>AP?T=leYneV~s~Dx!nNpc15)Bo&@cd?n~0ke+TSuwQ03@kwdoZ`!t@Ry$c(i z<{?gDbY8kbAS|jH`Qkr`+kgm<3%Yy8SC)@Hh!5qW|GtFrCN(LiYH;1fwb?7w{3MMl zU=4~vTf0k^QpfRf(qeGfOa;9gaZo}93+gAE~2)2$>QelBkQv{rnYwQ zDO!i8iBAo^jc zHg)!4?6;AK8_+CX!5d4VDBo8G4e>Pk9?WV4mrs(D$`--ql#h?eumrK7N`9HuN?dqI zZ&d*pFtK+(k^c`n4;!$_bnY;*^Or*SZ&&es8>Gv|^=O8WMF{UGefhfJRJQY!Qie}W zm9Ox70`;^E-9bR%F>?2BEVmNH3<^cmXPM`h?aM2~d(cfKe)NfsiWr11;xnmny(kP% zHOI%caNM>F!j6rZml}a8P!Ru{(6;7|!o>lX;RyWafYD)qG2ND1OjkLSrFWD@zXe~2 zI!ax4=F+^;EN}js8&IC-D{}1G0i5OgBO9K1;9^DeYllx^*UCdEfLu!iZ=ca4fY#3p zSkm0n=L!`qXns^Q=cZ_X3&kbt2C-Yk&W(cWo-*&lV2mwam)<~Fc(jJr@WR8pN@YM| z+R0LVDmGK)7@2aWp+RsLK=put0L-ga1afY@Ip~{sceqfU|B=-ROs+GdECNFY58SM? z_g`br&}IFv>1s8=tW3&*5|7R*Z-Hj+`^38DEE#WQRb0ZFI%(Kj3cs;HKgzp)?O?qb?4+)l=Gy`8q29 z#K?c!+pRmKYrPqM{<=||8_spBhz#h$%rFG|W&U@rh$(kn&?@d7ScB%11_1z&bTbgX z*V7)^<|yYf2dLYqw5YiX--0V-oc60X)dbwenB16T{jN{ogo@X|tYUa~O{v(yYSdnI zP;AceYflGG0a{m&Ei3=@IZWj8Sbvc>ZuQmiy=lKe;5FI?70^oc#eB0wba#PZHOV$a zT-GDc_@smt(@8-^(!t(#80l8)kGfl+pX)K*#wKJ1Te&LSy$fi78aTKXPq-46c&8v1 zdyf!5I}FQa*uU$H(th;&QDDrYNL471}yPg0(M1X(^vrD&26fdO} z>typ6m5`-%>1JI1BT?6bb^9jwb4?xRAk{S67&l~=h=@HfpK;^Os?I2v0|c>d{_5eS z#hnRO=vs}vms1&4cPLF3=N(=Hld8sMF_Gb5H&a*9&VrBZ>;M3%Q&ea}n{fI`MK(oW zVg8!8ycB4ja{Yw`_Pu!d8X(X6(WShWAD)q~>(&}y_uoS*znj_(s(6Oq9a{LZs>-5= zUE_uzH*4h3gqx<;EY)p66^B})N${vAP;`J;C^OI3G@RRHmPwYT) zn(1|8tnuk_+l608tk+T`AI9RfnL+_-e!y1Enkgq?W$=odDcPM?lF(O9`%LwSU+qp# zxYF4VhsvH?4PMU?yneOi7|eOb|6 zsg3sq2EB4BJ_@L;!g_(Dp2A*v3N9P{FdKe@#20k~HdW0~c7J?TyXBG?&*mD0Dy&j# z&+lXee@pHr7jFA4%Lp$OJ6n%-mtUQR>J?A^j`M$@iz62l3))j0&KvLOZUMSxUZ(ES z;Obw-*#i=zQK2;NV(?QozU=iX;`2xFy}WQC|3l-F`VgO>wm?X@ZAaxbZCzvHC!t4= zPQ-@;M>1vGHZ87tU@TGk&?NAiQhLjA3^CX3M3yXghmhXa*Wt*;x| z1CxmVh^{<`g$6BwH?eooCHt{nqyA@7m829CEZo+5^X ze7IWV)~t4;hgbKXvuya+0x3~t1}Gk4r&-@6DUyoh5R&l7wPvAtls?13?bwook5dmK zL!D*iGwH5{^O>t5T7toxRtzV(a*jG7=6 z-gnSQ$nnNBUFB^~PbGgsoahPx@Lz0Jq)Rd9Zc{XXWjh^X4dk7Gwo{sGb)nw6Z!T?( zZw*mQPI1cLTss<64M@a32~;K*fa7c`$Ah?WW@`Zj&Xq1E!N1+h^K*+P%GD21C$&%C zcUemdnu>>R^{JdUw5GDj>DO$(aLrX4g*Fl|RA`WZy$KpJ&PC?h18ckG-Jh-~4BUhR zKx(dQmdny~@BFB!_FcNu&Dii?wJlZ8Ejs!7m}<+Om#dTT;erpJsJmpc4rSk;K~*y$ z!--a(#&^}MQ86OYrTDArFFg{%ESZZyR6j1<1b+1+mI*-!htH586MF;8BQO$~nN8cj zl=;oGwT@J=QLVvYq5Fylg&9=qOc_k6H%zeac^%|o%%WTO{xxhjn_pJ&+Vi&s2M@j0 z2ILboy-X*4W(Z(b79L@I0hX~Ip=hsM{i|jGnoR7I<37W>F^^YQu{BIjn2ngH)n{v$ z7=8X#H;2H^L_#)X!2e5JN`s#~RN((f+9^4yox~dUduiDX>$an5$CXyM12H_U>+bV+ zYw-EpG(r`s?}w-i>=H;n6EgLFjFupi(6R_Cu?=-QiE9Y*RebwHF=F}y>;#T ze0f&twSNzIFupPg_{g5c}$0@$kH-AHN``lpJH6C&0h8iaH#$`tVcB^uhrwRpOV7 zU+nz0XP1+icr)a_sbiDe)pD(gV}gqSm2Sl(r*q&hL9_8vd{#Qw_ZADt!8`ryl@h07 zM5545mGvxu<`N?b{g=sppwh!OByx!UT#z?x^fVA?@F|u8 zG`>vZ^)M`)%ND8%=XvCfBH>e(ucGv9-O%kNlq7xHn%yFTtaf{iSjQd5Wj~B;h(zYD zY3oV?SR564?HSCKUU+z5%t3?i=0qmgi%`jT#xc!^p~vaZ+b{9m9M01K%A&1tW+ zkN^Trj<0uB7muhIBhI=yOeqQ^K1o4)iAp7ccCzV5L!nFhcM3Wh@DG&~P?nrst)d$RrR-kSqTOfXNhJ6@6P;On8HL zWrBCnHg2@O6KJ&MRM%rn%9k|=^k!WswnLOR%sIXZ1WV$KDE_0YAh4pFkwrIfX+?Y( zzBY89j<@4P#S*NvD=)M^ab<3cDY zFnlWx#kJ@zpUR`xeTjyai`9-^x4{Nw%F=ugk!_qFMc^#9Q$)RVa)4F0REsT|FBQ>H zNJS2|2Wmv<;1o9}Z-U+>Eswlsif_Od9QfrZwC~ZL5{U6rqw&i`$LbyIMkos#kEYSP zEZ^97Ozz%Ib8~SNPiomGOy-o9=~f0v&9Z&|(>Vjgtw8}=V0p~!(f;Q*R~%D4b?M%5 zjoF7(Ee9^B^h((Z?sijaH=#LaJX#!P&J}N^hn9rMM0mz4Bq@!#-q0?}o5SXM{o9OV z&&hHWU%6vzkPBY(=boD^-nu*2v4)x?k^3G_g?w>DF-wH2u)-)Xhn^K*J3)6m)@`JS zQ}7-oF#Hl-*rm!=`_u}+Gid^4@S<#EEBt}6pLC|l_lJp$(L&I>WBz0XW1r~z?lBb+ zFC;zeXx18=G`o-iR2N{Vy)P!L?IE{QHr!d&ThQyzXB4pQWr3gECa7*ARZtQ$+Z;zWW+A|RVqnQ%> z^vahzrOxkR(o^Ivm@@v)JGt?p*6YNVr_GdL4kSKTWnM(ozi)Jz#Ir;M6;!$Z1AZsn zJ&WGLpb$&yDf;?xu|gWwtv-iLs3f+fuX*}^0B+YXyRdwUdE$7cA!#~oqn3V6QNnK~aVk*sH;g#?SqZm#W^w5`)+z?1XCb>7#4(vOyiB#Y(%207q&;fSe> zXWdDFVu1|(R$SfSypcCk%=H-d%sqZ3=t1SUi?yh523Y30fpxHz24~|aR9FG)T0@gU z#J}8F@g5=2fP0L=FnkJnVymxt@U^x5>gLgRmSyx(vs!ZrUmR;se>%^rV~Fc3e(5NI zk-QeaZ8qMRNL;s5eyy}bn@Z#O_cEf@Bz3Hw#4v7=72QW_+xnwyls|jj{Y<~h^q$M9 z2BTwworsJ&yO&5F4qnu6dc!;mpGimA`O}Fy>K+Uw9Y8LXtOj;>=-@NBPjNoS#Z1YE;%ydfHOG5QzywpD^zyKToCb8zfld}PdhZis5(T`$JXUF|8 z5(kkp>V*hpoJA^H=Mms{S|rLN{X&=DYz_d6;C+J@iHCS@Rq?=nZmBngnG(S#6a=M(r z1C3+JKOGFiScwYKOx7*tsrh8D?ki5nlZ&l(Mb4X8EE8TGBr=1xJS}mN!b^10X_!x+ z!)IZhvi#$;|2FvS3pNl4*w(R!#+$dX#WNJlE?^!8~3e6n6X3~8Va0Haert6ufVODBbShp~6xkjlx_YSY{rH z9>~cFV(l|`lb+U3AO2fPIik&QrsJlo;VtTaIF}Xx7IAa3?3BUu8--YvEsiP&LLnv(=btyT7gJ)-4Egn_K42Dw0><0N)h73P1a07M0GCZ&x)mL52oY%U8RrT zYg)MC5sh(RM6zNAkFfAKxRPF6d;pp&JsjxNYQHGu8=Ju3a(prXJwU?0g6_-% zE=6Duk4ex2Z8oO(^Sy)?VYU;;8;Jp!jD~7peiE*?NaSoh`W4xt{(@V4ENXcW3vHT% z`U&i{bc{j1MW4Yi$&VyAUm>Fr$v{tgsAe}Anp}$Lv3BUrSdmPkVzH%9Zj0Q-s`{-spJICUE;-gMbvSOlI{@zk>;X&0x9xzbC{g-1cLe*e=S6d4#V z+*QM3^1yr5XzjOzZMzTUv$U^f08Zcve9WP9y~Lw*wr0MwSWo;ok(<3f1JB6ZU_Ze* zg|q{W$!qsre+zvRqYw|qic8AR_7fd63~I5v=;qS(E!r&v+~m4{Orw6t?%3-`=|s%;B+C# z1PG+~Ccz5eZ>ftbJ%9iG;+mxT6=t}C?4(s#_!BV2vc|Ibe6Lw$jRXTd-c})?*;U}| z_o@B?NjBTcbMShtlRD3AZ;MZ!>%YKD<51N`gJ$xV|DgDSYD7T@E*n&8-*sX_WNUg@ukwv>CUL4Sw%M8Fb@fa;VM$ z9JVu>Gu}&p=x;`!_wnTCOWqOv#ud52BZQzZi^^+^8-%%njmQ{|eb5yMS&?i{uo^GU zit}H%m=M?vtz@+Rk>Vf3n<&agyc49(cUBsyDs=|SCI&n&)-$i5U~VjZZJjn`SsOrQ z5Xl+qCo);!-H_x=iYI3Orgg}TA{XN|^)${5wL9K*T1Q3YqP}JAILeRb{)drN7pyTk z1d!EqL1{VzU=+Reb`FV6<$rB!oAQhtdTGds3e-<;1;WmDCI?N{WAE zw~8IjzA#zZy796sTd30htx(4)W_dund*3MKJ84cA`);D^S+#tR{xNV0T7AR`&V``? zzO$(pWPlQIj=eP7mF&w?`UR*9X|jQl`H)!428#@h{8H^v;I1Hq5%vA*(JD@ffQa_4 zB^x6#-J+#l|D_?f5`KbBSB#(VD1#}GZ=oM9r*ZGPcH^8q{8CH~^8gx@SqF>%9LdM( zhcdVZ+Rm%H&2cDgV#B}5b2z@yYZFZB@Q1i~O1ja@^fqwA9LPF9b!O}C-3pmS3R0$kzf~Fb*+th5xkWSo<(Ot9-{+;mp8f6WI#PvFI^F! zu$MiLK{P)XXrC_8fD60v`wP5(!qpd4@=PzecBd{jM(>$d7L2Ofxu3m#j_jtmVS+^G zypC0QO@HX=7F8kGbJkqcoX|60mkiA|%x|uzSD*i$DiVX2fKfP>wizVfk#t|Yv@8Tmc;wt+5h6AdLlp3-?RE-QYX0e(wtI8$(hWa`;tYg0XF-I z=vvmhDEt+wd{s<4gq}0XiFsj*QTuS#-j;m&G%Msd?Z>qp3AjG_o0IPOas#Pkv>7LOI-5~6+yyL?Q~JnuKhLmU$w zVc9|~HMPu6D_QW`a=y7UBZ<%YOru^eFiJE?FAJVntv+c1{I}_BN#}S4S4S{Yl}ZeM z>b{lgi`mpI=hwNr)Z3|=nI8e|*j;(T4*O`T|DK#=88|82Msr{?{;Fh&;4NaxPv_JA za5EPOh00IfZ9o6e=Ny>6HDHcc(7|T~rUf%m;{o^XHhbcVPqBvypy~auOr_OAf`#_%g?Uc^C|Dv7-flVgkI}Cu!(SA1zl+*Y}2GdZ)A$ZtfmyD?VWJgMjOY%1XHHyf9?8|Tx zej)Nl?5H*9H05#~ds3>|$%%UkM=u42C79dv35^gAd98L7JXnC;vE^f88c#2w7k+2C zZF?64o13FBvl=oJNHGZUCTgGZ31{r1-o{S;dXtOGF^L(o0 zCbQ?QRy_(#+Ue;pQq_vw%y$*6aT@WAIcR~G<4(lUEC3jAmXKHYImMp#W@4~M;mU3O zMS}b|-<$Bq@;Vgb3Vy#VRAX%xO(x|*3mZfI< zaaPB8n?*kCa?gO%8c$=hEin3=+e0>;=4`xMp|BJvX|k;)jT6?CYer%NpP#po+{Dk_ zdh>8{`VyW6kC^C$kirb1&tLNf@XOMueC|7#R}1_x-L&t1EnlSM3qU%VGw(fcKfsmC zn$tM865j80*rdEVU+nEZJqPT1W9%YfgGhaqYf?TRtY08)zEz4IsT#TY4uU&sUSF?I z^m};<035sEO>lzW|F;iM=!wqvZUKsw2JBmSV(^1O5kM&tdOGmjSEPA{)Sz|%g{-<8jUkL#^u+Dhcr+%KcYlflL$=Hnzt zL#J5lBsz6nt9&W>2+@fu#OK1UlIb&Rj&ONREob(Mn>o5>8Kj2QM!D3$fnk1U1VlB zkJv}S(Lt%A3B2)bi!Go#TVQEMx_p-JYn9CGAgxVfGdzTG0D)HGZMV&G!S2$8GJG2+ z#yqlyH5GiImcv^Izu*8grI6{$U_IX6$Z97+{b+yL0|i-_By_nZtsqGhufvGf3}<-t z0?JTqfeO^nKp>?N2{7M_U+qV+ra|9!VXE3@voz5TK7`ls;ZGwbZAFPv%)k9Ae>MQa zJk1Wu995Af-hU~88Ce6R(a?hE;gH(_QUmP58yu4loV3{St`u%}i_Ua3DacPLF{|vp zW-eP3cs+jLL6Uc)YX7=XhON4}zg&io6T3fR8T+La6hqvBQmG2~YnbOFXayQPNK`z46;6chB?2Hd^n|F zH2-M?%Ep~PIPS!oRfw%3S+QQmHzR=UF$xi4KPo3}hI>KJX&BJcKEY+!ReW<<)vqBZ zZE{*GQ;E{6GcWYQw8Rdkk3C@;LkB*F%Q3c*b_W!{t|zO}T&d2Cx@H!b?L@++Ib0JT zq`7dW2>q?d9sFS@?aneR%83I9dmCM?5&%=28!D0$nsVeMMdFt<|DtScyLkmPEOdn* zMsg2h*%XSeRk)IM95a|XYhl61sZh4IaN$;eag?}sM`F`h@m-&BHtX*A_)A?tArHEQ zCC8Xrn?+@bdX?U!&z?6BE}|J?7#{NW_(vz!hOD(HuW4=Y^8_N1J>m zquX~W@=UmZ^i5>BhT`0a-Kff91f3`q-n;)6AflEzc8?OC z@-uD2^(ZUi-LgZxl{StiKszY-z<4*RHsI?s-7Zajq?rPosd0cK1Bj4Sf}ooP8Su6> z#%{lmbMSqEmbd&};eqPM`5 zn4PO2C^Tucy}POatj*mE==xg{moZZMQy&Q%zEum2b; z6XU4k=6u6&O3$AdhY=>d`dy?R6eU#$ylruwyZaewfhI)LEa~J@dC2Hl_ zin~Ej?AIH8N>6t{i%}WkWqUbH7o=l7NGzt?jY_k#^O4e!3uL7agK$*!7zHc7+S8|_ zkd}#xv15v_P$e?=|0OB0FLOn01|k#OP@uAw=d)_h8gyEO z&`WA;mlJIy1xjV!V|uM}yZg^5*^WFSj#z9l5jHbs11pQ4bJH;3F5Au~*eA%Re4Ud> zSQk}!^1RShktHIS4Awzh^wmi4QV`%UuZg7!n6au{C-5(_9BE2K@bC`W z>X7}y#Pjp7F6*9>Ct1}CO1bh6axXi@KpdTh@`|1xu90E>Z z(nSBVY7!6S(J|yMHz%{D*cxn;jvbR!lkT{*BPvM9!pn73SY7`3y+|&;e(E+x>U)RIoskngWQIG zoRsV$UR}yi2Q{^~y`#$p-0kYYi#H^9iaVTq-c&IRPCPdw!T%r)@{ZT(Q6>9g>NV&9 z<}x1Cs`IfW%^4wC1+%UK3zQ3iN%i*c7}r@z>@b&e-;#X9jvGB)g#R?LS$b$w7q-k0 zFA#^SRlC5k?fhn85+jDM(!b;QQDrkX9*%fJFzqwRevS&^P}KUOL(RZKlEKxM3>!sV z0FN|a3bz8EPr}BCqe+g_he%9J1>9yz_1&kB&HZSX|A3uEwP{jN|! zeo*9k0VP26>=B^6T^xi)Rq@tYA9gSAj8VRTS8=>Kkhx+b*2X1BS6^z9``sy-<}Yh& zz{AzjX>X13;!-~v^JXW-WmIl{rvYWia@VD&rcO>p_m7aNCL|~9q8t+HK%rCpu>*=z z_%Y9?#-ry&k0t*befG^@0l9^Rm<_K;SN5i#Vlj8YW#t|dFcep-d(*b;faAaO_g7Ec z$b;~VREZC^yv-4X5scz(W-RYucQZxYq-+B@Yg8%(i@#Wwk(m7*b!8{iu+vTjLd$lNZL8CXp>8O-l|^|`$($~(5r`qvc958V)?2J$JG$)> zP*H?l0$AsRo8io&Q96VjiA%pX*wlm4>Xo7WmV`Ql4Y|*3(J+(#4L7I01&83G98kmw z=gC9rYW=ahX63NS5>P$fda#sOo)7(ryb*)O&z8B1yOSaO^b&M4K$exSgel{ww6gBq zog;Va*kV)jEGw|)nZ&C)w$I*9f0Ocx9$xdxE)pK*ZvfC9wawp|j_{b*EbG{cPPbSouJ zctr3MEPh)Vn-$QOr5uFpFn}RhKwO1vI0tU=DMR}PeS&~pmSk~-+KQOir@+D@nM1@G zzcb{53dO9ZV80D|$)Dt{-shBvk@f-hDeRbPR-tn!JK_xCGO&=V!+;{+n6Gq4#5b=I%k@n(J97wQTP-R}}hUTfb14$YQSdF(I~*S6FDi^WN=d zCp4e}I}vMtgAk2<%dLFNM$%Ts>vK6?PQqNcMpaPV4~y6-VKxs*63(`u z(?j3(L)YlRElfL-iGkG5Ri2~g zz2B<-6DA7J(h}d|8Z9MS32RTs>fYX{#*BqnVbQbE6)N8sqehjF)AJDSX?K}5Yn+F% z?0UAkN*<(FoKr?Mu;hX%@F#Xd=eaUg^{`d#l#m!d|4~=mD`RR*acgv0C1dE&ja62; zEsT$hE{uK#wW`htJD^7{$-Xo8Xv}P=(+2rvI2t=Df`F9a@^L_c+1 zI1K;J&7avgh4JO2(tVd@|EAN}W2d8MLbIqt#N==SSk<B>k65f!3kfu-#w|& zB8&p=KzQ?P@Gx1Jue_lrdgy&o#*; z7Ovxppgv2y%#kk(&f?nJ+VBj{f-JWCl){Qg)R0<9<4pIDFP69;Z+OD< zalKSa)sZc=ohL7^Z*L5Bam|l*J`{W40lj<*T?OL$`QLlx^%E|q0?S8xzC;!j_i8@Y z9U=Yh;YS;mgE#SW7){j(tP#r*0l(fTW>yI5b=vMH7G<}RhCy8>a4Ji1^eqBt@W8U>w$xeT3nF9Ii<+~u?<|0C zX7dolN~tP9t5cI3>xrkSOkpJ^P{quSRtM2IM?pWfk^q zwyfKG>ZDL?ImLAJ{P!4FzBW~Ds-qSi7?#sc0V2V=82`JyBukud^Nw>ZIEJ8>9xqR#IdzM-rpoAp5w3GrdJ(x%9&KYtrl;~k;TV)I;krmL zm@Vz@nOw=PxG#^#92eYt1&k;iM*7i}t1(9kn^sE8kY)l_3tZs;6G|lLm`spy)dR0c z8M8GLEH%xB_eN=(M>oN|fr%$C1g=-cZYojq^62u?vk){nT;j-xF`~u*$9QhN29vh0 z%$M&+568ng^u84)>l>U}7?*7pr1;Um)|`ItD<@I)^ZV%8OE>%9duRH>ElEwXT!dTX zg=J*&8b3!MN}-__>&AF}$yqrfgl?|{jk1p&pgiaD9tDBm{Ma+{4_Q@=esIue_S=n$ zIZHrADbvstr^Thec@#6~>(L{%@zR_7#AP=j{CZLCZ_4JidGF5&UBMTj>-W&#~p6MK&{=js0tqu>@qe3Zo7FIQm1+ z$g~MKyQv~Hq2Ip8m;0uryC}BOSppu04wZ9hiN%&b!j&l{06nj()cTY~wv%SK7#6S~ z#svjFcp{p;-1CPIr?k%h#f0XL;7T0zjPa1&TcuFyh(+Xg{zt;3bCp*i7ChBQWs z`f|>;nQ$r!;V5o`fR=|!_gN;ACO!~iv?E*u33sq4@rscV&o5gH7c1qbX0#I@8Xma! zfs3!p3$sKtW$$(B`wEpqwK&4IVejjtwTV+2xv-VwG_n&JB;6Zw?!=j79JRv@C)o%^-aNrX>7^ z+(+3b&?)1e<{jxMoB6=cM(Hxg%8H{$+@dwNyUm(wUHU+Rp5(a38j4mkCW+ z7G~FDncRbYN+FE2lF1lP7doKtY#`O1{AdrF71tCldFA}u&+!U#c*RyLnB1t(%mKwI zTl(6WBN~I?0fgk6*w8S)kFZ;dnFKD;WWpQQJ@vC;bD4k~YJGu?uEI3YFc&gMu|`eP z9h4bxRoCqTs@d*-lOC(m`MMCi0zsu-DJ}dL*J^8@cuBXDf0l$Yde7;;zF_JJpo^Z( zc|V-GMqCHvo!Ro-+61*3f~np&uVIqhjAcy>9hJyiCPY$vk|i}?ko=y~b)nkBOBjbA zX|ID`xCmUu8PlEVhuI;VyoKksX>wzGpvUY4gc2fn>Wr=`5ZphLDMru0Svf{;OgccuTAaJZiQ5fF-GEhz zeDOky2#oNn`)z+sHL|8-@FWPq{nsic*O}YkVu_X#R7xXs$PlUc$C@Sy5>)YuI}VN#Qb=;ZC2NWH|~Pz*+uZJk)eEXaIhDfC!FTe z3q-V&Qnwxz_P*Eq7{@Ou44BARWW7k|*`i>!$1hdc5_z-@P7Dmlack6Hb;=vKVJ1K5 zOyHb2V&i90FTyR94Xi%uZFD}_cTFArqlP;ZTH_iPz`Wk-*vlA?a*rJ$$MLX9@Y;My zL@kH}Gq{+mZ6t|ftoejtv+#9{+jFl?12hkWuj)qj>> zu6R-5K*LcXrrk-*9Mh;MXg`BrgkuiDvHo&E7*wMNnsv}<-}LL3gH)!rkuZ*KtFrh$ z%VnRhHwgs7vv#(4d9np44Q{1xyT{E%i!Kr)5L5h=v;(Rqd5syXRd(?}?TB+b=c+75 z34VJ8l#_ObqxusNJ#vkft90^{$w%5(YvR>BBT8H($>ExO0Xh}3~C88OFZ;%`4=P4Bo8r_KD$dqTHxFDkbbg^t}6Y&&a-h6cOi z0)0wLmaQlKl!R(gn~`(G2@m^cQ2fPT+9#rz)i>4yJ1d)cZiaNnSy4} zjT3wP->fia5|s<2PX4zfjKGXH@u8)T4W&*4Z%pkae4MOgQh8qOL3ix(*`zWl#RDUD6?Nht9j(?{vuzidsMM^fP}te?ObJwl69bc#^xpv8Ffr*j2YU9~{u zx_KVm{T$3@N2r6%0uR4z{3^WmJ|v-6UqxB)bcm!{7g)2&f@34G{`Q!D0q(Q|b@z8m zqvOCLM1r4%9j*oBB>hclXOz5qzpGEOgL_I*sSN1$3%wT*2T!8TxHDRR5EKXV>E+8*KSt=}EA+8SIiqMlV=>Xt1z6dEmii~6onh;jIiM+Au zZ~I{^M{%ph!fZjZ-neB{T3ro~mj-bg+)3*VLQ~T^)i<$rkCx^bZOGEG16^v;o7eCc7_TLP#vk`QfJ6Ur@=v4G0j{w#{0n}s(UMFNY->on)>I}6} zEftQ<*Bjs};P%D2FB&-}#39IcV_Ntq{N=KOsoC?t1oxmZivj%<0K?3_ycH^0Ai~ZJ zajd%O*gXaFB%uvyRBxr(EGcCT zm?+AN#ox2|4F*VwB zlrri4j6hUTK}HV$T;&gC2vFrOas`PG1n5EhG-fJ9Useyp`Mcyv~MysS!8 zjuk>kd*I|sw@|Wi7|GZb>R*~C{Dow0B+4(<9!E9aD#Sr$khFWKuD9ly;%#9w;z3pq zQONMZWN^QhtaqGBKaXcx624(az4T(?CRO6pt7`ntX(!fA0q zX8Nq~p``ndx@*8eWjP3CLem|r?2j`c^%zS>WK|Y0(lQkc@BBI%FCTOnLL7o}%WW5+ z6Z2(q@#UDY|6kO_I8(rV)@Fw-+_`aI4_uawI&TOg(c)71a&T4eXU7ff$JD?Iox%gu`?dDYsWiO zi+1~VPMTJ4x>^csfM|k-BNjw%v0g@~*;FSP>jnpYReM>@%eY;E+19v zfpDR(eZHh?P={_Pp$*l-SWibN4Mm~53Gf60QF{dJJ$0H(L~YIr)05^NOY*~uByTUw ze4-TcpkfMMHuu6^iCv0mPjCnR<`?YDFvU-<>uQC}16}_;g%;aB6w}>#o{ti3ju`YE z%wsth6H>bzEZK1?B@eaPd$mP&OfT_D>e(>jR4TbE(e^4kx1I<0HX5sr%g>TO6gty5 zNlm}=IUugHcgT`c$UlHVv(6CPK;t6T-&$$(GVt6Cni%^fmB0B?-r|H;)t#i32Wn9j zNqLnr^0}M?Pk`ba3(HvAKQZ9EVsV%PI(`oUdezHtO=HNi{l917qdyxr@E{#cT^V;_ z&ZHv`~igU_Ai#_niF_s-qjW(?m=s)^mn9j=Xn ztrn<(+gd|}bp)J+Szh=em*(Eoa8(U}***3b>Xwa{Zp#vd#v2YJh+*bI5euEt<9rsg zw)oB+1szddyQ|YGO?}sYM4nf?Q1LDYH-)lG zV@(wkKaUnAx7c||GV}SJP>I>)+O(n(ICU>Ze}+B+k>f>dJ1vw7#qOAh6=6&yXnN{3 zHjqiueV7)ELYZ4s_rbH2_CEOa1NssB{DVIcpDIc4GkwA;i=xfnf0VnSOd4OQo;nN2 z1)#j?_L>R$g^_}J`yQyNkm!Q_UHqGE<5Q+y%Wpq__0?!J%3nn_Y`U}HpD1Rb<4Ndt z7erxJm$Wm=1p0AFnT-x8;J?sqsuU{NT}u0xPRvtV@M2dX=F33ON&pe*wyhDe$(`>n zBIswn(FfwifWVE-=r^z3X>!u|i)^@lutoz_>6RDkbZ*a6?&fQ~B_LO$6N8Kz_j3}% zW6f~byz5u!N*whS&0x>V6eneVhY88q@DXBxhHa6E15|sqJFq1&7+Eli|Nm1(6%YW` znYZ?sN43~|=h;l;UHl8|WW&#MtGF0{j(2|hXPeW_^%)?vGX!K>rcEH$a+2_rtD|cj z9F|}!f9u$;l(6~UHom`B7`m$sJOr+I$Auovz2=14hNA8dHIrxtawWc*V+&2c{riY6 z+Jrr_v{t0%Fu*#*?XBh4+AKvZ#2-yaM0oKzjJKt9be}e~-I==EF`65rI*mZp58mA) z5xI;!=rIm;-YeEv*dHD&InG_iDfL^ofsr|c zS^xUR&$(9}^^a}+Mztkkvm=&G_ySD|Ue-x~L_;Kbs$zOMZ!R_}y~8p*Dw|*iDAlsg z3DH3TXNe)tl8)5JR!KRj_-HC$ta8GvJ20u?5Ub02u#RZ57BDd9!rpv_IYKa77uquLtnAeSNCooS$v#7Wc94Zrd4PH86 zyqPdnG0ML=o=1&Ee%o-A_rPVO5r0ub-u(yqZ(mm1pK({dfUPULVha!V1~-zefMXoW zEdTpp5~Kx28?VV#S+~=Uq@qYK+&`t3b(BDdpu3l2495N>(6B28qWZkvXbzzekM81V zv__HVMkSQ%eYk@Cp%%RFy%4CXv!P;&J~CoVPh#+&vykaJ8*OY(KiT~SYYs;cfm{Jd z!G$M*^C#cKNdh&rpkK8;j})d}6-7;CsliTrX4fJ@2$<8lN`BdauymXC2|rfziKAPx zd^SH;-pP4r19iX{ALRm6?FB+Gxg15`pd^urU4i$L9D4e$gE1d!H#QK)0t{68z7Vmh z6Xb*HKF%@&4aMV*AGqTa&l*(i_aKQTgTA`lefJx!y*HJS(!HB-R_(95F0e0}dVD#? zU9T*?o7Jd@K!EUQa+J50<8?*aoymN=mr>a(wO17`kLU37bM(GaKY^lr7`bKbS$J6_*${RD7rp015##`Q+ z=T0+=Oi_}eY87;s*oRh+b1Dr1efbv1L+gI>lBl3b+YSMkL1RU=*=(@!YRIoSImmVo zxqr*6{nWC~_$!im#%CvRRfs(|3ymwy6I=@ue6l$H+T(}VJ;BhfO9nj;hu3=_4U1M6 zK=!b685L$zclr7Dl+XDBljxrl{#DcqEB01QJ8G4AVP3lu-tG(|s)x8mCpb>xN(2Dn zTe7ms#SN%wT|Reqv++`$6)=_#`N_)YHv;UhDo&MxV6d=Z7?8^42s&iT;l@FR@HHAYtW@CHVd7p9)kWSO;o(= zfapY9OU>L+M!=v`(lH(GQ;Zoc5??5(!~Y?dHUVcW{mv+;B@^@1-TIXLKa-((x-%us z@}qc8SKBb97N+WIqS`4uJpkSF2*@6V71fNN3J?_SZGqeE<}ghMzJZ3mZNuFkkg8(< z`&A^Z;(jV+iLbKL<+dV}^bt){R0JvzxPa* zbbXn}p0GDe#;)tZgfe*bYsJ$vPH0%ZWDj+k(NNz&1l6eE@);PNJ-#phm$|i8=45Jx z1kuFi8ct2{I3QQll?;EKD>VM!G9GFZjnK!JzAAWXSb4O$o}fHVT!};~$fy|IoDF14 z45uU_MTbHsdoTl_bP}{-nzYEgMfqbG^G~#epE=+g^8WNV(erPTFI-|78L3o2) z(_}9l-~ymn>wq33DZeflgT%=+P`SC*J8{E6fZ%9EU2PQSXR86V_4A0x_`i$eT-Lnc53abth2s^)JW&^9W%GuI22Skv^c#CTbXsZ+%Y@G$@1QB_&iB=#FD47I$ZM8b{>y*L!jkG^!obUJje zv;r&(kJeq0vJ%m_x|u;MYZaTF+j0OI>Fx=nrf?Nzeo-#;^l1n#rllfiY*BYVQo^fP z_Uqo~Mda_HXwEVG=V|?m;(zaSgu7`p#PtswK1odA^_I_hG}%wDh($Iw(f^%o9wVfn z_~#KH+ZVS=Ien=ZGxEU`>?fT2@k`#zclhEPB{Cf;x4+L!`?NCLrpTYfVxA7;?axx>^OmiOm^8Y>m z8vFZgV9Y$uL^D{V2MmBq`+QdCFhE4F#3MN;gigJydRZ1*D>*ZV zwxv?C5#!ctINP7iNM|~GrbY&GL4mE8%Xy;iG-trHkv72+GFzH%2XD|MePwn~Sx^xJ zGLu0zxdQsZbyd|Ns#~rt3N`IwI|CT)U{kjy=~7YFZ#~<_`>2fIU{1MC_EQBx}!^9TD^5}N)Z$_jJSCUh@@hjWG6v%)EH1G1V%pLxu+eIexd$B^BuI013 zcXGkLTt>(B1Tt@gZDD9!;k(BwG0mK@r)H57g%=D98T5W@uq1cY3*S$(1KY5#xxj)F zY|XcNioPZl_L(Ss!qC=&#enF^H5jfQo>vwzk3l2PXWzFRjI^+mWMz43!F}`Vl3Z^2-OM{8SI(hs{!2Ha6fR zMmc_SDz3J`|H85G2ysP4)a|IC2A<%&E*PD*m||#b#sZo&;AzSUCPC+!sr*Vst_nXE zF!B@yO8Q^qZ{5E5sHE9QDZ*Ak-~Q7DTjdDBMJKQkS89N3^AfEz@dA(O6iR%f=TqKd z?GJUAG;}2W<^Jtrnz-B^LY2-Dv6|M%KW7FJ44P1S?OYryda;q=N`Hj^jMO5#k9!Jd;IP!{v4yPFln!Gy!=ri~0lz+mk zE`poz03cCYOj4)dqmN9`n_edBszf{Nm)9M3BIeznfg7l2HSX(~#6Q{4SRL6g>>%!P zHQk$r)+#$!ea{nN$VR}xolx+eCgE*1FrY1v4<&1Th4Nvg*CR-UH-s||qZ}ZajaQB8 zzl zRN4hB{R^!T8fwDk11dL1X}W(Br9uA+bG?UMhat1?zqQy;hei~Ym_8Pn>F%0QE3mKA z3*v|7(*MA6kIT2k>v^2c3%>-P;8DtNjKP9a(h%jIPQgPTZUE@dYMpt2C2%nWRJ)9L zU_WTrSeO!=Ouzf*V`cV_gyp?D-ZIUgJNdm4uDw*RgSpC;p&2Jj{GLEt9s5p+sxc+8 z#XzPt9J*jaG+6Cx=(5`*Y~FTnfm>127wTYJ!Cx|ZXsf@Y;sQki)o+f8@E;M9fL`ML zY*$j_U>oCnn^9_}BjaVJlq3*%IdFPrKW|Nhi90U^0+PhxB%lZ)S&>@1LSeSufe-8^ zBxO>1#Rl%M%3hO;@|3G!cpx$u!#!64ACr1`KgvOZO_%EMc4|gXPVYC{qZ=%Kn*pK4 zSE-5|5YH3op6;cIQD)N>sSlTi1ZM>`CSw3W?nrhEtQ|Akcpz_`o3b7gLi$xxnviz& z@87}sS2ltA?QhZ$I|X)o8jCq&(Z_4$7H||u9`dr=X=gBAFY)9Fymk#oQ3$xpzk;W$ zDNS)u3Jm+~a>C8)eyRz3T|*VTCPbQV4T(#nT?q^EUy;d0x&<*;DA{7}b4rTDwbdly z#smVrO0r&RefUMjh`a)VpL;iVg|eQoHW3+?$X@H`6uy*mk0*z>Wn}`u zZ;w51eC9^`5No%!)*5f`tp2+)w(cAq!ub21?H$p)@xpfoU8omi`1x0gc2pA8;gC>5 ztuR~rL-khBF?wgES|{0vAhqK$uxPwOCBxMT@A^aYX@5{hqW5VsfV&m1_|6IL>V@n` zWKhr$7}6*X1+QSYKD8Qs4a*yHA{vl|VmPj#lIh?$ss@h}UF*Fd4^sL5#8N9Zj;H86 zCtmpAgEVUx8m$#MkelO#wmJiPTqyR>_O1RFdjaNO-;+F?07QWTx@98b!kr{f#hBO_F{IK4U*8#6Ci=&dw zn5xNA25bch?@PJurXYv6ee;a5praU0dUm|RvC7(_PFRzyGQGBy;BCMP$4odv4TMQq z9{nuKgIopo3A?t|d{gY8oP%6A;7irR=AUHgZu7y0>46=^aB@Qt5pqy-1560ABKE)i zZsC-aC1F|yeHe<;M7==mH{U$ho;^koA7we zzK8bW>Frneyt9~mdxK@D60K*7q|NXSs0tqRGr@AeNRQ;O~F(Bu%S=@v1(i~uE#|wRV zrXG{&x_lS6q9RCxu7P8bIfkPcv}Z@C!*M|(ov77xWB=5O2nspSA&F^jkVe1n))q>j zL?=>kb0dVIj+IHwSK7Z}hyDz_lsSr@{=PO*nEM4TE;6$t$iae1ujz8jyZLTJHg8pT0O!JEY3`^RK*bNW^Oy`VK9~ubT)Q3G?uHE_x`hb|37K854scAt#{XSF{U|C;3Hiwohs#PK9oe)LX!S&8L);UsFXF)WxJ$C zz=X!?BIsVYY7#)QrMyvk5vd?>fN!H`j>64R7$KvO1oVT#$!5&@kIu(wxT$qwRFLY2 z-NL+BV0Qn#6wH$xj#qm76(k>VDw+L?&J3cL*+I*e-~6yeIvHo6-eVFlp2kFYgKIoh^f54J~!E285Q+kp%VYQ zMYFQooW~3=`cU66J}7aA)|*;dfbLOdiqH4qI3@OLbpX8yx0w+{uSLFkJKHe7I1OT3 z6D|xW>kcC^6)x2A^<%{^K7y=?_fZdZo`1yqKMvVxGD7jOqqKR}+Fx5E@;tB`K;jAqANf6-?AiVp$;vx1-lz>lA=8~^|*NtlWr>CanzkyztL<0 z*n+}aRe|!KFqWs~GV2&R6VaRb@Q;GMB**?ee6zcR6+rgtUr8KRk%<`c282kV)mDlW z%gt>}lT^g+@IVDoeyceSi;NM$rce2=j(c`35nQ9)UN;Q0Zvwm7pj8|iO%bc(BOe7r z3UXFE&tFBeRscRg!N2W)xNEef_@KJ!HZxnrGcs$}r87&1zn$Dz0yP{N&q$v>jfSy= zu@_WERF10m(d812gM2D0!y8|M96hcXd_pgf#4t}@2avRO_lXS;WNoccP%vfHj+r5A1HR;Q`NJyFvTMMC%LL_QYkD#3=f8yt4;afi2M)RQK`EgWS13eD_T#=&5 zpFKI{N{4J0O~Bx(DW6l>w{ZW(T$3g*(3X>{&7<6*TdH5Fk|OtbClhm5`5w7 zNevUMox36RBub0#z@~~lt}<1TH`dYwXiHHP^tko}q^Xjm!CB^p_bI%SG<4Ja=e7l( zUAv3?hky{C{miix52k^w-(L%wpS2s$1Ulb7u_qIS=L3c#pZi1~RwKi7O(0fe{c+ai z6yK!`oO}WgcJi7FX$~HIoSp+33Sf{K_IGJ2F@`G8Wq0l@^}su6%T9UirTaH&J={ZB zr0#@zO#m)(DBEtTp_5xH9?-fSSo*-LTNgJrn9R{};jG=Q;Ge#%kzjZ(JjSHZz)6Mr z$;z(*f2tAw-!8~)gV#-R-b@^4xt3oP_+S%5t>e4u>Gp100YQ=Z%a4A{3 zjuPYkdo^?GEmY6_NS{W>rv^QM@y?La{oIk4syT95I-@hL$I!^inFS9Vx>;jY(e6{38hN{Epieei` zFdmSswIW0Pk`?MQe-umhLn)*EmO>1AZBz=!Aiev$It{iW;X&Z_Y>vtSiba89o+)x> z=YtX^XD$H_k!SoS_M31{H@$F;ZMEwuy>GMmL68i2h09Lm0aPf)Pel!&;cNSLnDx&) zifbucI;KbLy*A@JKSo8g?;z=k2L4~>=u|K5;2fKy@*OvHP9uC&!RgR^vW()rLV){e zl+J!}H+Q!~DMSPlkvZwQht;G7v3<6MSw@$BbZ`SH*VaoX=5d1YZr!H+IG}}EeOyon zQ_vD?^yQnL!!1J@jW;w#-fITGjbONZU3Kb?Fyn`G1b3JTPvl8}f828K7i>$CN*j!hUY$Y<|)<V7f%D7I@BrxZ;W}sgyYWUR4r$s}q{l775Ogp8 za^Sy5TEEb_IwIYLjXVOQ{2IN}64V!iee)-@qlr+ykS@%$r_P{`opPjp4J^(gSfN=F z=rQUhvOrUp9eR<m%NtrHO`KKmE2T&QRuv>!`J;*>vo*04PNi(44s>COE ztWL|)|LlS3sN~$*k5nHYG(L8jGAbHc&-VyCFRf+ca}1B3;APo&Kv6{;Y}lV+VVkc$ zX0%=5(WUwzP1g?6(7Wb<>xV$|=hHT|SLF|L01LapLt>dePb<~M+|{OJ4SVU`CT|8I z80IHd{xTc7^Rv;ytv=RA8aA}?wf2JQi0?uWQS?~eD?6QR zv@snyXahGctj+S{xBa)5vqoAp{I!OUp(h_BTz1zg z{G0o~uP!*xA6yf%NZBWuQQTF5a;wc0$@4R-FI+O+!5H5 zZ~jP>E28tK9DqIyIEp_@cVwUx2V>EVF7CCbKNYU#S%ALzIe+U!ZEL<$7;x4J<7Zl8 zMJ|c`43#GW-@o*C*Ka{=Tu?)J;B?iZW_gL4l_f*vfqM?aNa;h&D4w$9~Y7Od*+Ny zQkL{#8v%M#`us9dvWd)uHxY^|iotDj8~X}fmg@(uiJahQOl*9%*2SjTP{pKIQi_B&J4|`uD-<=$zmy+u=cU!PFX112+$s8`u`HrLfyBNkFf^o&_nr&J)C1tf3nG z5~wk5+?W41na@mZiWF>Wd{ zc}BiK=3(yl;u&sm^sfCDyx>}20lvLpU}H=TB}|s2uRHWDR~drZ7yf3sm6mYSOCR2X z0|Tt^$zaKPl>Vm7zlTn=OmBpdz7T;ITr*4H4zZB0f1&w90n zGwBnJINoXQ*54|>-&J^g z@1}zR1@fs=ChHK?lq{GR|DmM41A%kt>}9P-#YVt~K}y2o9?o5(-muDw`ceQo>jM;8Dt-!$j~wi_W~TS_U!VTo{}Mu7Rnpume5^hD?|9Kf0A0^h+t5juW}u z0V(JPrl-Z8E4mDnR_C^GVRW0lx+y-s&Ir-v5l-gl%jy{WrC6`lSLL$g#E#6gSaAZU zI{qX`JX(9&Lxq_0FIgw`gn?w`*lQq$?sm1cdZ)iz-0+UjJ&kMtu1Bbwfj2?1@bDycB z=^r1Wj6Ip|lhI`)g%JTxldFjVJP!EtaC6)nZV}3pH{2cF@d(!bpT0fh zr%Bd4I=e+I?($a&jZ!NlZ&X=+RtOm1&ZO7%g|ZTvcl1Fw`v*>cycZ~QKuOr-$pQf$ zg&3-(Bc$b$NyhskYUL^Zh?Z4$JnaQvEbsPrrvkx|qZfdvz5lyzewLmrR22YP?i?;< z=9lYEXRFkglXq8blJqevq6zdT=ws>0 zTKFc{A(cpC{rM#aYx6e|c)|2Dt6(BWLsgS`qb*11h)@G7_fKSk+Fb* z%P!pEbsv;|^Vzb2?E;OhWcWfXDpE1sFT2fibPd5S?dVkKOPWR`Ws)TV1!x>5_ z>%JkFvJ`v=nx>!$w*xmCg25B>i|;lDo)6RveKnweR<+@DEKv&ik<0Z&H;QJX!*v}> z5m|#Th^?2g=R&+u(J^yOUAuQ5!EqO+^n20&-CoF?X8rV=+wfQ5UpGfQF49nsQg-PI zZ{NhGxpIGruiha6IQ(?hiu3@n+fD7cNY&G`jSGZi{wx(r^*;>O%;omlRD>fl_%JvL zROQq;I6W?=?>grEv+?3Su{_ITr{@c1K`U#h0%8u_FH@VbJSOH<_qSV?kGS?JlT8|$ zsd;pg*L@zp8qyF8Xww#8AX>Udq1k~XDNBJxt7rzy7d`-T0pAk;0&&2Z{F#g%%`e`l zL?Y98QjEbQG52mo?v!h3RhJM{;X+2HAhf|sc3YNWXIS{}iE*q0T>?gusTW`D@AvtW zPQRjIpYRy|Y;MNu&8JeUz+0Slvf26SQICDJ?VSxs0~%#R+HOR@b6qA%HhM+!uuBgP z_AzLJE!M=GWL!?uX-cN(cZ6<6=+-QEQ7U$6tIJ;e4-~yH13z;O7oe|x8$_Fju*xlW zYTUP{+nU5Uy}_3GJwX)Rty4@Wx%6SS;T%)eFLG;u8?a8UEM;)t{iDO5py_#&9ybeXHxd#O1#f?@6te4qZ_0tsi_cnfRB=U zFsw(-d7zYCsPF0Xo9*JAi`ELGFMv{}AR8lbLy~SrR9$~yLu6xyJhDB%#fu>Gwfr;({y$OA+UX zH!?)yrY8hHntM|vNlo<(i>CLMzE_PP*Z*i5;7&Umrjx!6qSJNJGEXz*98x&pA{Pbn zu)IMf>YJ9An`v&qUZ_pDiuDA??dNB`MnSyKUbtpgCLy|LKy9y^6zy)NC!2AWW;)3d zH#PdJ+M>7e>Q-(|cRGP!;u$1yzWsU{RuJ&d%cOEj_)Ndam+$=D| zx96kq;?|U?U+#W9wA-9N2KK8FNfiOuNWDb(fGf!YKHe-XGhOZ5$m=ZE zzN_TgV2d;167s#A!_5nUXy|Oj15jRb726R9;Q?c7t6hHw_E$5l$N?8$s5Gv67du*m z7~rV99R*D{h75ccww@a)B-V_m9+N{LGztSndq1lFVmh^ClE`@7Gny^he3GW|ucCf5%JF;A@V@(?;K2v|E8mfB!(~Y%5v7& z4^h+w6DL-2ur8YkMl&@3VrdUJ_Dqwrb9gEed#qdfOHW-18dt8(f4oXbPl~tw>R+%K zufQd{^aJE#0_u2cR~mL5+JS@o9k{WE}HhHZYFi&vv2KI}WWxZo*_hoq#Cy_G!F zopYxI067USK4f7Ku<$%*Qf63ZbD2k`pb;hUMgUE0L%_l1^yQqe_P8R;eEIzz+e0!b z;LRAnD$fZFGg}FBkwTIkuM0c|2$ulaVpo|_%4D=eTm94*6eE?CLz}QIQ*M(tX1ai; zp5X3`)bkI40?D<`Fk<#4=Q2n%^FNUjNYbnPT>;wq)6-Er7PE|fWJ@U2}*tef?#(kPt5>UUQF=j2pIEW~}&oMw3 zB{JK3-_9kPQJODedKSxZ>hom?H@D?-!q1+LKGwZ)S6NGa94*FKgP3tUvrc4+#XvNX z$h^EH?Ygl;_bCUUdyyYSs#v6NZPr5IT&wrp%JIa4fd!_yGWL~}I_o?`iPAit@6H2- z$(_WZ%$V^z;O9K}g~`Ilc30U+_I(}uzn%If(c+lQ48RQ~2VHPxmqN+sxcYWbUa03+ z3^q?;JL&1C;c(kPix;9ttY)og2-3 zkbwcy5!V%nZy*P)Js~P)H^ zdV4w}(i@p{>37*sFhJ2NQ71Z;tRfg&a)cNtb;*SN-y!Ce|NdJ~%gxqQAUauZc?-c{ za=4MaMHa;KixzbsVy%UL<$eO~mR`Y%a%}ydl0@9@pL_`nwI_;VRCv&ho}fqHCaH3k ztW`JMjTATOVq<^IKNs{F=Ahs|J56UM;G@v8*&$zg{xPLm;@HNZvgn%GLHPOo#a5;K z%%4FF)rB?e)k;R!aG@cA_G|ZQ10>4I<+aooB{1rX7%X^%UTKoyU22pYAWbpC&9+u! zPW=uK(Px01{BJ->^~$Jf8Sa0dgB)w1x}trcW)GwvRvl){BEQ+c8CTh|)svIbX({OE zwH0YTauM))z*Ulco*jvZHOVwZS)n+XPv1t2V<)+Q7yUeesZiLfM3&)1O9+;Dvy!I2lQdQ|^**PZOL-la`r=llr1W z>n^xy3NqD*zY=#Z-27cdFqc?52Snek4toY#L0ptG0eY)Yh^$7S+oj0W7*NxIfN~z@ zD5Frch|&(RlCT7H9<7cz-k$nh8x+^y&)rZWui~};G*tQuXl^N_uoJ8H)5+u+^(z6S zw|5@dnZW4*Au7NC&+*crsXRfaKv%%Ho>wqAri-tj+h!|64jSo{ge+9mRG7*ueQiFz zP!o({Dxc*5#k>sw)!G@?yE!eeT;jz;px+1T4YVv?vpiy<_9h#KG(l+kI$ybmr7NcK>&QJEr$&~33RN?GY3-wGtWg!gr)mj}`B}7>I*v&;9C9Y$HhnwK6J=)u20z zsHs*0$f{DDUIoG&+bHOr`r={hv`O10-+gH`<4>#YiF(IXwd$42snGlGHL8G9>pPrBS#xuoHhb_kkv$9^j7tH1LZ0Jn+McC%xtx zd223wwA3`wpDFM?_2MgRKn7nM*wd@bax}0=Dy@H%V*W1wkp31bE74{B$FM0n2~V>_ zX|mG!;BbM(dG2C9@$IlIV{!RGI$Z0A;lT8+J3Ii97cXN1h<;w#{Y5PdVhah_{wG4m zNQ;bpM{H>tq{|po`XFoEoO9ER5w=jCdeFM(g+F;%zb6t?H4u*;0OHuA;?s<}(^M#yywG|@<3CJ~2nfEIgZ#zx(iFHE;xJs%WeM{Z6yUxmf07X`oRi@u4 zfF$HZ1S0x9nn*tqPgiFp)$t093Q1+M{0rK6@71F;=ChMjK6n|?`zwZd$Br|^2~j&G zZ8gP=;6aU#y(0#IVh|BaOuuLrVN zNs@}2TX^a`P%!48VV+P=M6fJ*nAaItZhWVRNCea_=&zF?~2@l25w`HS_vX1gjlyR}(oebZi zy=c?f9=ooOXW`spXKxPyq0`@E3PAT0KU1f%1^eR9sf2&KkNKHt$aCBrvRXIb$p(KP zkbH;{QeaMPE03_#`z@@C8?Z;{U0lTV%3meh;v|2^F1SnQkKvySN}_i0OH}Ce*Ka^@ zIRO1dDO^*semHpVKU0YObD++0!#A|q;K6n4vFiiil|vAB%1R#b97E$sWep!vS<{O%^Ri4?6}&j_xnBlDm1 ze>X%y?+dRZa5ovW5{{Wx{6T+NNEiIh&T^d0I54D1vJQ^W>t(y<&itZnn7C`}Vp%7P z2;vJBpZ^~VYI_=_S7F7yBZij@MSEVm*^FPtX~jBErYsl86feasy;;^ zaSY=8$<=uO25l!-a?mx(&Tyi&QxTO7Cq7B(Ek4w)9gv=Ga#a5(fx)52Y+CZD=EM_L zyt<@7A#|l#`-UdFj4uO1hT3vFKBle~SL&5}R)Mo#=Gv-_`vE!_x0&~C09lWt1&Iz- zJ2na88d+CEODR52x})_kByZY77|BM78@1QPWxj3*=!4{}+PqaJ()c=KpLH{%NQK{` zzrKoKn4^asM+^Dm!5UhP1KZv zhD~KE%sLCT!v~o@7yyZY_8(#0Om#~7p$QoQ?1mUNCUbSPTFZJbeBdHyfRU=xx6x4> zNQ!X;iR_8B?V`bQbW|1>DsfgmYl+FWh(Y8>3fk#1(f8hwC-;KxHy(baYD9tgSn)&(8duuw0sWbGV-zS;n+d)<&B~hxj=rfrem#%dKDc&-i?Utv$ zM*}z7GaG?9(rYrHi7;*3C-8R4y*8*TjH(32Xz4$BWPz|Cga1D*#dOZS$96<`ou5C% znN;%$Y!kB{ucW(HFPE{$_(yV)Y9PEugX?UckZB=MfA^Ypn!_|m4f24yvT(qOTn(uZ`6`0)3XX_UgKU53X<~l zBrD@iSK+uyKr9uh(fOpv4ul0Y-Rcui^v$t<)fo$foZWCzGBhbElsaH-4hI98w@4^% zhoBlY>E>6t%bd)=Hj#s=C8zALcu@13x0aqEFvU?3?GGsPvmT1f9?fjCUpa zQDiyuJ3=6tk~(;Q?wpc3oF6T#o$z46yeUhpE@JVY0iq20xO1WG`4T`9qJuYA2>zcQ;B`JVDtU+c9XXFSF~of(ZV9Y z(fK?8u86R?p6%u+xPuYK*?;dTY=G)~Ke@WwI|UZRy1Rf%@RC5X6iAMu+=fu>v^gv~ za_If~J~qo%#|grLLA&27fhw0vk!S(V5oQRUxMG)PqiSvs+&ET7s>*@s#JQr)tZ-g5OC>p<3%V#0ThK1bVa=$>HssEgp-7&@@9Ic(wvG4E2>*)Y>DSz<0zDA9D%0nf_mNt&<)2 zb_nF8$shmQBD!#7q}1EwB0{f}_tuWp%7Sn{!1A~VioLu@mWjRa@*2ZMl2#b{x^|YG zx#){U-w>u8_0qwFy|r2ljK31@)US&NneNlD z+!4cV+9O^Nyf=_|<4qoi;%}K(Usa>39R8d5!@=Wil^9lWHlW}i-)MUvp8Bv^l-xc1 zgHUwW27|lc{(HrY06Jy*Wvp6JKO-&`qLq;dW>{^({#h@ZQ&6fP7pwCF%hVVI5!U(q z>x#|#rOR33v=Nn0`s1k@nDQ7KWzA1gGfiH)w3d8_>l21k)@b#EPB~hRWAp3QLw4>= z$1KdwuO;PBQ)`lOW~5FAx<2r@i}%#N9$#W3xX3A!jz|AHhb&VyTHK{ljwP>^@sxfh z=Qv=Ak|!~V<#LPATdc?4b@O=SqqvrSDN#7gK{pZ2NUk7I06pTgW;a<&hQa>ENgmuG ztJD<6c??&0*8PgT1e{R|_B}nt^a>oQUps|G%UE?V+{C3SaDC1{HhtWeu)q{HP0njz zV%3=BG@JOMCM`Vo6RN+x+m_ZP`5{G4A9;*#zJ{*n8MTI9w>9h=t|=A0{FHbCP)&gn|^F|*;nQ!&8Xd7%#wTrpexS# ztLDQS>Abh7H~m;0Ly`w(R8A3LS)MQ}T0~cB_Bq53EEB-S(GN*e@Wrr7JYNE4q--yO z5x-pnw_{6M^#*ftRUsp1&61|DS$^&FIGkyQ5B zdI$>mu}-eOj{riw8n~G)=nQ&fKFZ`c?RBo2^pO<6`TM*XHZcp#F7x(u$yjnY@)pno zDlJzPJs8K;(bBHfT!cELa;`9aGJkjIWZZ?K9M$R|FGXU8P^7w!mX|yscx>v@H)Rt; z(W)PIyyd4gdFvO4#(O%gvZYI!IBl40U6G&tsG!j-5#u zkgYMuHI7X!7$XLd_mOl9|3v%|1#l4(n+S>c>FW@ejZCpcjb?@28odN_?y-Doa~X?I zAHWjw){;CUFlmO(*&*b4xA|3*D@UhQOKDWz><<}ALRqrnDi>duZJ7%sh}w*6^?{j} z)}Q;$@E@C1IGaC)`ygH1A2-?2&(YY@YITtNM4+06=@>vZ#hTDmvi$lS^k93zJXDfR z(e9-}psNZ)9e}Mewguh$RYXHLXS;%Uj~&+B=pyVXzK(K=$!TtS)^_~dcU8!#1xX)V zf=Pe~HU%5(|408zpJkk#FKE(1aOrAWZJ-?1Y14Wr9zUludQO_53B;~tsW8paZeyx> zqp-nA`DO5@0>%s$DoS?!F?mpC@gwRJcYP8#@`yEt(jEExlZLJT&Zotw9p2ayam4J3 z^7KjoWrScR82j=@)oUV|hkCe1J*Zww5T7iU`8Z6C$=Uqo2jb)QL_q)pWDPYL@d_`E z8;oio=x_Lg5{{Ptb+$NJ(whjz1X(>t&KbjJpE#3SZ(bLeKT-yYH<5Ii{m{_FDs0=@ zmG^6^7a}X8CkRNj!3Y%ClHI)^~1D=h8E%a$^toVEjG`dK@L zFE3IntnXL4LGMrJiOt;@JP=fCVT(# z{>-xdS`Z|EQqqt^FE5x_Q1I}%WORZQ?;(WXyk`^ce zB#4H|)lq_mH}mRHOe*L1$y;3I_MT&~Ks!p&T|AG8Fpk@0L13M z!)WDkre_#UH7eMW9YI8I+621O;Ay^8z&V!McG)G_yuH`)Gn|*|e=)Bm@xU?%6vT<- zjZ_u4dg2xA3A-LisPtXMnh{oVa7@2ir#;k-O06K+MnBvz8|ld6riki`l!+ED3&+fr z*n-VtR`>+lKfRDvTIJ|NDW^Lm(Hqht)J;!jIBf;C<-_g4kimH$&U|x%e=vJH7}t{K zSWz^e?;_kdI04&3@zXDh5Fu{6xQRN!wAP=3-F{QA+{@a7xo0typS3cb0J~O}{8bHX z$3|WVo{JmCmG_mavy%T&&Jj1y^By$ix!;s_(sa5C7P z8|ZPkY2QuWG@8PY%uoQnGO~O`uDm}O!xcpT4HPJoi04O1)y?5K3lu3MV1Ta_@mb=S zi9T$RgnkQi_7Ycl>3$|0DEy=g1Yq(2gmh>Cc%3`tj1h<(rxba>D}y&HMkW7g)79Y> zYa<5W$!O4FOlWi~rPD|9jTBY_L3g5uDXGl=g=?PDh(6uv)MZkOh@eun}RvR8QhfC_cr2vH8iHE zPa-XRGX3l!YCW~n#*vy!sbx6u{OM9Fi0eZ>vyqzh!do@*SQT1wpIoQD$0Vp=2&_*K zcY$ZgtztW5yA+T2MV6|1Dg%frnGA70w1myrT&)_w=Ur2GRS8p$kRs2J7NTp+)GJ7Q}aDe8T2%=m|z3!hZJeoH(4y z1HfLT`V>8=Q=RKj@4s<)>}%*wl+*wjluS?9UDmhMWRphrfKE>VMP&n%#?<3k3B!{* zZYj=ycV10dJcBlsSz-qI^z6!6_MhCo< zB?ziT6;)u^R#o9<5M}C>r5ZGnh$s{2*RV z|F3{RyR780eQo!o6l$=K=t*rg%si36~OhOOuxZAih_U8UJLUU+^_OWae6WR1`F0yB-1zW z6oq3AUZu%sIk*FPSG2m77WY(U4TErcuJ+?emlxq8S6A!4lN|%AF!k7V0r+_Vid{!x zG~;xRL9UJZp-5S>?rJZ~5J$(n<3@Q)13`@J^Uh$mEG#T!C^SIg=7Qg;GQiA*FmJ|@ z)z)wRONO%XIq6TMEw-t_HYiRwd;wa0Y{9>)i6`Cn- zRnn7EtRf9!1H}5qfV&5|14e0cN?OTnLg2chi`YQHJLk4E!$|erVI3lU84d;q`8@5; zM0Zsa${IVQtXQT}$A_1=@ezm|*|#R~{y&;$uiND&`MAJSc6*|p3BIQVi(vehTMlp_ z>aktreQDO~Dq#3omv2~xaP%sFKzJQkA1XvO^i|3rI7Sb1cvZ@WXN_7p+5w_{@H(UI zzM=KWIb5I{?kDOi-&jfH1CqSp{M*9OxgZO8`sOJ}mC@8yyEG2TsdD62%9)hR4Ol~i zaWKvqNy}m2B@q;`T9~TM^J-mR+`bRSqde{Y^sajKxoXxV>b)>IOv;Vu&)mgheI9p& z9x@fA6~xFM#!3mhQOW7$JHgS#Ai;@=sCZ7t=?%gGf#A@KDUL4%n7GdB ze~gU3%f08V;5GXih!${YcF&B%E!a9{g4AD5~MJJ{;0)h$M)}QK{k71^A$W71q z7_e5BBGP*

    {=x!GfN+g5j`l7KcwMUTWz8QPZk7GuMP|_3nxdNO+f%XleINz4#OP zx!I3_%`KvWPRDl_)F10)=K|vv5Zx6kJa>XvY?l@_dB?U8!3|8;m7j~to<-d1LrXfS z(I}dUh45D+9-tKfRTpRG6M_6+QJhrW_;6J2#yXxRB1@!Wo&iNn$ z`>nO2Ut=GDe1~T6)>XMm^NYkE7uobtonqL=DFcAIGx#z$#B-QQP79CWGGGsb4J?>b zt6GXblsj_6T`@TY8wQZarOSD~?_2B`)dY8#8)R$S=b?q1Wa!NT_ z^L%1*O1R$Hc*-Qa1!MDg;U3wwdipZh#DXWy)d#^BKWD%h()Y4yShoXiUP2#Bjy#Ad zJk(#UP%$SY`CE;`0BD>KyF0Kcnrey`!y9i=U$ImDx23p3u?Pg3(Fvs(#P^wHMxFB$ zB0rx7xZ5of7yaz9CSLpU{)2#`9OFD-^Y6Cojy2JlPhG;Z#tk!b0%vEOPZDnJi{F1M z>2%0Zk4=SzSX7XfjlgD z@%L{odRA#~J z>Luywdb7-mz6;Xg$F*~9&e^8V7-U>CCR%s>VHyTsz(YckR$_4tk~OIPazGt0p(R?v zcI-c_j|ksP5PECe&NvUjEi6kNas>0qw+OB?i=o$^_A0L}_(G}UcvHiYZBXZgnZ*h9 z?Z8p?Oza8-DSbkW768Kln3V-g^(N)dAO{U2DJkuR?g<%E2-Pd6JJEv_1j}9qVY^TH z+4_q|CkJh4fBeTTV1n2!z$R@PyIWc#FfQZ)+WJ|hK`581=0v%@$ZA%K(Mb%3>^q@X zj*YMl6;6LLoIi(>!*NASt$ETrUc^j+lQC?pM!sObtzhl~eY5Q(c;u8MO(!Lyma$og z#yx$9<$Mc_%ENz~!?BM?SiTBs!K!s0_gHFL~k*G4-XrAyzeY6pU;G^r-ODz#9` zvq;=3Thu}eh09)MFafrwP*nB0c}fxABM4qvvp*scD4RqhWR5cWYU)@G2MZ1sPYG0W z*iswHm9tI%UopXy`5sbpo-k|_9?EXK!k4Q#pNV&vD=wB0OT`)#vBxy`ya8JWHf{W-MMwQc3R*lt{H$SLqlZ`paLvtyQSH zP}_+Rq5k0c77mtNl&O^P-R@udIECaKcao$3%MRSyGJ~^A-XR4k(WdFh^4fDFhOb^& zBGb z6L8?GXn*^g(UQBG!L;NwT_$caCNix!vDujf&_B%Hy@Abwf{|R}bfK_+{IMp9TUXC>IORL+km0p!N)Y(E{GeM}bm84Z z_N{8dS+#WqB-Ci_Q2-z3sZbEd)if+2B{O$Jm2N7gZee^kaKlS9AC*3$#UlAet@75( zr>J?LPf<(0()wch4OKDI*KnPv6|*3$9XjVTZBiohaI~AjU-0#P<7S!>je_Kv`Ooy^ z7X6Eo+`xqdyA#M(4i!yG_tjLUJiZZ4GfD)l*GK&RoU5;#g(IPtH5_O0!#NlG-%>;V zynCk&my+tGK-=BnH>97eENGkM?g0ZdtauU-P)mNQ{gfZHE&@~vw`rZd`7qh;2;nCV zdpHWV%4A-tP02f>-B6?Pkz8R0>kVk5HFe$)empH3#ibdL@?z%)w)$$nL!Z$bY$M%R z3eZC1;!YtF-vY2q&u!e*lfSZKvb^(8GiJXWQ}oMRApZW+IePUB#*Q>YJzwUPN(>teJ1HOq!@+VHhqg8hCSwSyBTI0#-kSTOpZh zI#(&ENwr6;b2!(WlAxu80A83@^;V&FW)k~uaANKw=65OdVigqYenYqtPoCE?bI7z+ zRcp9k$yvk9$mF`I%Cml%K{OvYl}?hC#CmcXJCffJ>ANV+)`pU9ce5L^+LO56{R32g zo#73NAdtvuaW22Y&cd_`wgz9ex3WWFICbV>JD>H7b^! zE49OW)CiHEsU?Z=!uAWDmRWq?yJL)2CiqL_05L$$zgu$3XdAswOGR}=*}b~N&K#gQ zGuLxAdtOj~6g+@65K^X{eAqF1XehS}XAli=eK+5_{xHvJLJ<$)X>=?MDTDR)(VN{QgkHV-UO8Ogk;d`(#1=ageX@iOEH$ zVL(p8d1mt9SxN$y^p!ua7{{65PUqr%o!ze0-mR``C*X34bM39!$L&K<>|I3$ngV85 zT~!V@U~!lbtGU*Ii=(d>+rk6XWX+SoeFHz98^j&t_wRqA&z{AcVN(9cDy44tnF2-ju6W+8X)OB}o=Y0vaW zUvx|wVY2~`DmPshjSa6|nt=SLJ=(dnp&EYk12Y2&iALdfh7d}TR%*gy5(pAfYUd~3 zr3v=mH8XGOsJD78=k9n~u^4dReRIA-PQ5nWd#Jj4p8#g+9-0$0g`Og`F-)B19?q6i zl4RybM}_bz+Iz@6P1)jshiRnUDuGlw0_V;uMTvxSOqB3@Nl{Q=_a(03zQ68UUUgzQVp(^9+-Hv(=7Xm+a4AE5U5twIL}wcXs_OL)F&sMIEtLz^ijT+Np8Td?F+~7o2u305=$aKP`in> z3};x5ShxfpDppXgh&l*5>h%Jjq8AEmOOqP5cyh1g8*-WdoUE@|UQ`f4v@FrkTJR0> z7TMIW-xpI)oEq*k#ie)YP`mi5uTDyeFA8gpik_}8Z^ZV5-S1A3v4bFM@mJciLq4FD*zn#dNVC-DR;%`dT=5j4NzbZT*WVm!p zij1>kH%b-8rs5ssPzd8)DNI)f^eYb_sX&$1Px}rXE}&mf7vvaYiT28O!oCQ9J+YpO zY0=fN~Q=dUmVr zei_c4L)A21WE*KBrA9Dw(@|u}qM2Yv-(woZ&J<&C(s677(u@nsrY};T!d&HdF7`;8 zly1)cR#Qg&>zZqBN*16lwlf~zcxO-?lAWSk_KUVvu%bo#o zoM?>2=j$jMS-)*rKot2U`|2EZ*W6m;!Uks%+KWNV%5O|chGZ6)hSOpR5!;>|Iloo@ zmQK2U0GWPDpp1Vsf5~=~mJu!1fo%eP1YKzxd0?Jyg(^_D$Cs2BiK{mV^_>mI#7?$C zK;Qwd#+R*1&iw;!UKZ|uVc{BZT#t;!3j00TLFm|T3gdAO01??F^kg0@yUmEhB`fJ} zv!E=+t?|^gd~YskXTPkCM@|6K&OQ7>?SXDajol`93ikKl;cGCUB9r#?F3~r~VlPC} zr1oveqtOWK$}k%7+p^Z1aim4|Hm65(jWe<&+LWJ}c22W@t4WBh-2kgTbpzKj5VM26 zSKV5DnClpCwq#bApF|-;YUR&}a0B>1_i>OfxhZA_sbXWYT~5&B%<(gYapN1igTh^U zJuJRg9EbZ&dxY_=>=rc6ic1&15XuHB7x22b6+|v}=3Tlp28x!KK+Mw)-QvOBhhfR+ zuTT}y(@U8!yc&<(V{nhdKuW-L0!d{zb3OM#)V~jsos{_NFc8z{Y27)x`wO#{IsK3D zo|onLI|5|5whkv))XoVeZ5Zb?$UD;w&_0rl7R`;T`rG+>n8V4V%rLP5IqT5aKnZtjC~WM z%^#P@%L;@@0OH=P5$`7izGYm8HR+QI!}}jTHm3UFR}>#_f6OrKI! z4?VLl7I$iZxh;2oUlB^~agEO+l*rFT1U*3k_5bJ}RX1%-w8)$|)1@RE&!pOqVDkL< z=jT6|9$BYp2p3YATm!iqlkt{|;QEWUs5EIzG=I`|wlYbzl>Y@i)~giVB@{_ZMeWwX zZw!PFf~*L|?!%G4?BVRr-7Gj3r?)W1h*i3l|CA}NP8u|O9$zQ2Nq?kfY!B|Pr9pg?jOFcciQVIZ+c0gFVHVndqx_YE4$CI9LWLN`-3K>+O&=aJB5 zgZ1MYBcjP|MDvs35pegR#t5O{Hw^pfUJ<<13E%dMxY>$uE$?V?2;SE_fibFlU!5;EX-98WG%NB8a2KD39$Z-c1kSUm(TIg z{~IVznJ_ULT$24B=NozL7C)KB{$Dw)sU0(8jJ$$!PMg8EW$kJEq@%F=k7XVo!x&Cy zInU$?8l(g~bGXv|<+u;ZFL)9RUzXa01T2wjEG*^UZY9uv53az)bI;ZHGh)^4vd2!R zgh>oiZ(QjX1{ISZTo-EhokD;J;B6l}wbA-40%CUkO}uX9H;AN8lg@UjRtG4(IqDA z1AjFB(mHszD0Bx)69Nj|ryVF+Cou%3JG}R-QHPpc#A{`A?<055Ggxy7#8LyxIzSB=pBDjmHA$5d)2HCGUF0-=KnX<1SGg_2_xVyoQdRy!$pMMp$(%5* z6#L({V$%W2PLCDW2{2OKDZuDyLa96scP;Bw$F-dXaFP*ayX{QabA$5`#YGSP=h zlvcH+p<@5ZcAemnWn89m?KSZ4lwo*(CFgEvCBXGQDv2v<1-^23zG04@-dti2|3B)B zR*#7(`9pz`8DhtxDA@ounHJtH`gPcv39MN9lD77T-tXT&92kJ$O?+jG=uUT%!llt6 z?S-}yy;r;>gjh`=Kg%XQS;wpS+iIltdz#3={b^Yc38dt+hqU{HR;-uoV$i9HZ=iTC z*K1^aF2Aw!g}s9g45Fa-l8(@|a1FXCqrO;M9xBQum`?wCgo1=DU!|&bq{wOiCTFe_ zc1JxS+WJFu76HvEMdA{)(m{_TX6OsxNQna!wk*(l8+8W&&fRx43sOlY&nQT@HNHdG zYtBo5C#h;>H9zCm-U58d^4y|)zP|2T_nRfoP(r7srElA=|6QshekT+HeXiRyqXq1^ zwW>stSoC_Ir_DbAnj7}z2!kByD(&mg3@P&&$&+T7%R#`b?GX~ms^1;=ZVmv(@)Goz z!KeY9QY$`FAywAaQue{%qxp6BOM=tJENgM zGPrBv2VfiEsvCMr>-?wTwAyoxZuaR!uZY)F5_4xzYz}}me~m`NKhsh7x;JpTJP3-> zDWiN#YqAb|IGmu1Y^=J}$j7kwpCN8T^S zUOuy`tb`;X4J_pJE9WQ)uflRVwxv`RKL5@sNK?|fPDE>R1N^c}nIr|~9XFG~%)=J| z$9Jo!YhhIo4Ov>NIfnsTt+>4;aFyow@g3ox}+hCkt zNN}JpZ#i@|LLF=G@^Cz6AH0ku=!NECTr#ep6$g5!X}Sht-S3Hp%VgT$tD=96$i8UK4QAp~|vTsaT?e0U|8BJWYwziqL8Up4Qwkjh3_d z_u;-?(2yLXbzQzpqz0}A9?7h2Up$l%xu16>_+bQ2vTIfZdyTc~8XlWk;TGp_y1C`( z!vS-F6ohB{L`5t-G7kON48gbP^41v8a>Rd2>m#W<$yy^JNz0=QeZ+g#R*w9B^J6jv zqdE2117hUvwU(UG+clc6T;L4HzmE{x5(|>2>EqnBm$fU!nDp{ZIP+$zBv-yj{+2Sm zQ+W4-5B*H|W*Bb1(WNt8q0i(Q*^pkUWQ!Xb%Rj&K=t@FsfhtpGQ{C}l8OI7bs0ka! z%0hRJ79&lds07!3vKzv-RweIVs=&#fJmL4&j5F0WeQt9mddhF@`x8VIg33k|+1qbZ z3LR0P@rPFs${;PvEAA=vebO!_0lX@xigmjP8JFcvV2>a<&5Kh41GMbdXwRziJs)YzQ}z6Gk78!m1g+1j|M1WLpdi6(G?Qn4y#akC z1${EiZnZqi$(*l#aHSQ7@23^1NaP5qIVp+`D$Fuk`v~Rj4`ki@}qE zrePVEEVztWmKjDSP42K9JG@od1f0UN+34hx-SmX`gPU-Xf{4+$j|=Q7pyD!*^%hYR zI>HP~>A9yr$&G%qi1{c0ObCY-FH#pu&6dHV+Gl)t5V87gPlJ<4Cokv>b8R~d6`8B) zG^v(zlHhh9&8~E41=vZMB4K8=SW;SP6?OGbWiTu3s%c_tNhOugm{=!Jr{IQ_0aWT0>n6@$Kkzb6FwnixwNn)K@m1}zkheqLO#`-aCW&c2JvgDc*4hI()N(LK-4Mb z^D?!EfWc-1Nw-IiUR0wWJ=L-N08*&VHbOfgJ5=PiN1EU96dMs*s0rV`I&W)`h9wgY zAY@lNc9VK-#iR@N9FXuE9+8|-=Sg^`u-<{J`y~tMk0?-L*@dS!k`Ed4yjM&+F%b9Q zdDvygI?SiqygKYLDna&2AO++z@b6-wL{q(EUQ9n3<;hYQe74P z>(f3Bol@M?ZvOv->}TOBh7CjGm) z@kuEy{wjYh5wH5`n9L;!Yl+`v`buu;OthM<@JgB3(NUtH-1LnZi) zF7f+g5X;BILAP7KOD+!#sn)hN93}S_(a%CiYXV8U|8B<%C54dO;sDaEXik-oZxEVFu41lI54HeA^mc z_)B}A&!;(>D^Z$9J)7R!(beq$BzPyYU}`4pt>9+G99Cbpce zzv;^yg%#~!A_JTH!A}hB>IHCB-B;FzD=S?}w+GO+i|VMz`or&L5zNkO`(Gp3@%_`ha#ZM<7*8 z_R9mgYs6jxr)&3@#U{Z0SwH?DUlg%;YD8b|-AtYoiryHP?B;@*Rtfq#krYLkVpqH7 zCD1Uf@%ukD4EFdeJ9EhICuOW__cD~jxZan1C;uSsu-t}7R*hsHNu~Trlmo329zEU` z67yHcc)$}l`(I;#mFahGts%72?whF>O0YM32=U7}p%FCT_C+`BT^lNHzX!J#19^}d zA_HA*2oi>Q(xE4uE=>ULZ0H$`E^Jo|1mp5^pUeuwatAEGe&A(W z-e!*@Hs0NWKt!%8AX31mu!K98VJ;Pr0dmek2|<0(?)tVX^d+kRSxNsZKpyt0!k(RB zfkDTj`3p1@(|#Po@S7tC49^l;R5r8-##{5FW>Z>0D0}(e<@6rs^%LI;Ku7AIv$O2I zAI+Uv%^X$Zz>AK|a+QjvWZ(@TZ`EBcNBaK>uS>DBZeYl8;p$5$v3=cx2i&;5B1VvY z)Sp&pO?xgXcjEmv5j0}hwU}i4I$H-tD8oHTCP0Q@p-vqLyn*!4ZKmS;4>v2k|EtCE zGub3esG|B__MaEe5j6^ve$TQ1p8a;P%rwbOwOvG>18gy#aV#RnT$f~{w}KYXe9``m zbX&STA`6<&QnOyhO3Xe-L*(6x*j=wLd91!KU9?-s3P`eI`~Ffqgy^! zrVX?kGlNbctpd)^&SFF=pKs!tdJG7(*}W~XS~5Vxtw(RpgX)+Jf1HO1jLe|kJ!v29 zHe3vJ3PeQGK5ZOJ>nZv88T0AF(-ydMyLqMQy-H4Cw0orDTrR_tPWTgC>h%V#+Y{u^ zffN`-v?w<&y#g7hY&l~e>EHMcTpV~G`co2P|E)7BEIw|f-|1v=a`&XC*) zT&G=-hiw@ByCt2fci>DG-WfNkiU^Fy_XilW{kp-^^@|YdLTLO(|M9ced-EZdqLyNj z?5B=g1RZ-?S1=iTz-9e)6dnnT4&ueK6o@(|&^F*_e3L>zLp@NwqoAojoOGs8V(>(! zc`%OSx7Oy7$m*$3zY%GlQWVC>rEIDa%g=o`f@;X%Di@P9spx@LkF~Z(QDd-OCz&rdfZVSa6mBQk&T&P&%Lpv4IA0a ztsBzGjV~4N>m}@=#3McbB4HIgVb6$h|2rb@7d_BSBVxx-Ldv*~4_t3D*PeEPWT*1` z%u6FHCCnkeJc{-Ey{`4ccRD%QR?udL+r8DOe|wj^MTG);lm*fc^i_d(JM=gin8%Q1 zDad}$GyMeKpKw0i=ATaHwA17pa1z?z>&>Btf!USm z1{;!x109;(92a?I4vQ9bHPfG`zbs4AXin#b(9SVL4+*`unu^s0u*>bPxxlJD?RXT+uC{dC zeFVqhLZ}s{IEW1 z*Wo`>a68a4$FLhsqoT4mH+@OTjSeljme88BgF;ntK;?Nhu}jV5GB-E~haeT)B!&UM z5kUV-QiN0zoeXTmQMWS6_kZ;`4G<@MT?<6X*iw3I>kHYu;WJ!Xyesq{oQUR7d>Ldk zDKI(I(}s8}PbrP}qv(kL(EdK^e1q}%J;A%o@THsD(wag+jFh^O)sUW+5o{}m`(Rh( zM4uPbX|M{ZK!2iW^^k?`KT-eGgM77^K!$|+^op~dAsOw~qJyE_V=+lK2x=K-qja!V zh*>Xy1R;Os&C4Q3subNjh-zRlVz}%b%FILUIvSWk$YV96)PiJ5X22!L7=B}!#PqOu7)G$-piCzi6pA2Wt->9m}e)Wt>s%M873Gqq6D0tV%ilSR;d z>D#pq2X5(tCVuAS!^!_pU)gaSkzmFkJZ|>>=y$rCZ8J)DYBFbD7zg!3dmE^SwTdvJ z?ITPkPs~v~k_MzZi~M~v0~#+BzOO#YnoFzR$1<1{dk|M_xFQ}xl}@~1ILS2(QI;f zu&*yIuN#?NYuCKxCr`N%$>LuxJ952yVkjH=g zt6?7)MGk}uf(PYN-{nPpF6zP_?K>psk@Lb85HX5FZzyPh@T?PV+iJywI#-{Q)09#1 zxU>njh}!@1a$Jq1m7{KR=)FCN$E9LG#M=_R7SIwRDRv=-V~mX(dYnT!Q`5c({MW&X zQ4cs4N&0sR4A|UKB`Yo={bT>Xk$Z4EYeuqWltz+r&ezmDVRF77k#bbN;o8$6eb*ti z2Wef{+IOv7rx@I+D64j(`98Hn$5Ca6hZ74QGwD-XAnnSjRNmcGG-U_K z4q$ZR)&7xMLa1{7xf-*6qvBP>(jO1QKuNm{*L6Bxk#$75KUm~oNK&HQAa?`qMU!N8 zBg89rpa4I8KEN>;3fp=oWpvFL2MGOYQ_smG3#i{((%KmIaZY5J4@HKZwwp;*p3uD1 zA4>VY+twBbBBt(1m?%|qy%k0-G&bHeu?0Gyp+CZ+r<+b7X&@2 z2Mm|hy&-`A8msntraiVe6g!~yw_DJdi7Txo_YS2w)cb#2^@2x|1t0|3YUC$k;PFl- z<8Z5%COBn*NMMk<`2v#bUtj_#DGJ>Lg!=^4=+(*J^t4`RGEjOXgx^Bb)xT26Z$*Phww|9W2CS#! zseeo1O4P2buvxC~x@tR1G?G(#r0RwPcOBScQS>JT7)|`-1%O-v&{@XA+5;W7A*=Sp z4Pw6#Fl2H8{(uhg`8)wu*ZkB>&vBTJt(|o9xi%Q99-pSkk&s<{4TPDdZszXDg{`u~ zu?^Cbirngnb(54gRcR$USuzJR7d{eO52ivwO0eaJ?uC^H%{yA1%lFGkCD)t?2qI@( zewYn#VMs#hJVQSnWEE~yzQAgi>z+k6AxBbwfN=UMk{^EV5e7aCOALxRBvEfyt9|ag z!}xT8VomhE{Z|}uNQfah;r%gzZsgrAHIMK#Qu9bd3_FH{+I3AvEQxybY6lwmOpfsG zft~JjeqiGC7yM|_FtO5PX_ap|!O3)#vg+GZ2Mqb=h$&#|WE^l%#<#9vHW-&?(kA~# z?1=#&Lp>8gT{*O$tUSaCmMFDk2p?XYO=%aYC?y2VfF^{VlP^tbT2R8y{pfQl2;Ce~jYBg?3ajS_4G#a&5&B96c#e^u8fR1vmOM?%IU}jaSr~v?xvAxvF7k`po|fPvBOKUb6-0XS?uT$hT;XHP5L z=Sts}#g$9-{3uw)5BAhI?%$>_kuiUeFbDINEQ<8Dm`WO(s5nH)TiB1`kb%JdD>vQbh0{+2*3`nZs-07aZ1DKWFH$v?Pt&NNl8}FOi})bHpLo zT?$onn;ZA1v+eenBg>5`#}Zc_UgydKJTp5q|H|&xGJ)%s1V;uPedo(QsqvrX0=N+` z>{c91E%E`&?!^e5AqU`d@27cC$^;{m(Q$hT47K?;B!8yPY*rl)^_v98cVSm(#C#si zu`wR^k0xQk4BJ%$>QRl?sWq>X0<`zeOLq9U1KG*RS5sYZNaN)^VGK(Tj%bQ+R<7MT zB^Pc;=7oXQQC+qW5pE@CZbK~``vI8yD19qRcdtn*u$9>(f#nQf(2*_aOB2|pYh*Jo z$4~_lol4B;XzOHJS5h5pb{h$q1+_BhPfPB}Y+*r1{O#GBD(|lub>0d%c4`_e6IW*w zd;E(eWDC^fl3>}(dnFti=v@BFPc&7NpV~oRn>5~8K}2=r!fW`!10CcR_YOTL5OXnQK%?tQVd>DX}s5*OB(c%fIK>R0FiYgUw z8oX{5CQvw5SfK{ONSVFI{B@D;gye#*0IY3cC2QtA67Yh_9Zk(Ttojh%>cT!_y2`4n zM^$CgR@N=C4^nSaJs{Yb1V~+PZdJ!&&+A~8>J0PQal#j>Njyd%rHcj2OD~-2VzF@A zLV)-dhk&H275Bfyvwns8ewuu!dlK<~7yH!V*ZT~_Xny|FH45(ap^|o z+<)11j`26ryy0mbC#!{Kbm!t){|P%A?*rE=a%t=+*e~5GMS|ZLz!dk0S#*2YBh^f& zZZCzYYNbXgNAF9Kf(r(W9%p(!sJy9uqH+wbUwgMLJ#iq0rizNyPrW+8cqo3jZ}9dH z8fqqx?NUZRuxfx@~<6*@na}I=iDxdQTL}7jrcNXT|o*1wGM|;}f#Zbp$hlsYLUVWCi z2nP$JjJ1AKP#aH59V$RD7zObj4Yw`;>tp6{S6z*sG=)2v$uz#vxfmg$EF=Ks^h5mX z;d>$n-xBy##30?}WaP>3^CtV;h`;q?25v`C zdGcx_n%68U4Otda8TP5fL6PaMDt`lr6jdp*OZ7MLXZFn+a@uBJD(!6j;}&})a(olD z9Vf_?mkKLTmQBOvH-GWP6~w88HZE{}&K2ou0~o}|#tl|tdv8GBW{M6Ez}3dwBHF|u zD~gk))k&c9iC**v!~M~WK4ZxQq9nlcXs7R49NiA$FIMeItT`_2TVErYP2f314N0q2 z{t^AfF6$q75SO5br4NDTk&2B~cfbuibUF7)z|R~51*Q*;-fy23=_TAv3(T`p+vTRN zV>Hl$i$1h}Ur3yV&s7a>8m4Z{k3bJ>iEme5KhSa3tNP8leN7nx##JLrEUN<8d@UM} zdDMO?ri&3Lf889l`(y5!)WY0+nGMM0s&uQQPiz}7hal1r-#(>--Pcv;8gds=ch)RV zM4@8E`9U>l@_BDWpI0#A6;`J0(|1N-=|2U5cwmQ^ViyC9<_f-G-Bx5>HVlIzcTCi*-TOW5kEiANjn|2&Jx$Gz<`nHjAVUXWy0 z*~;FUyb|I0kk;f_?wWS*xGYb0uThh8y?pr=zGDK&{R7SL=Fb(<|7>Vw%}OZENUF+1 zsRDO6;IE(e3ie7hcINzsTSrOX{?b>`LO=#p+BwQk7lDPZ-Z^C!JOEAwzRYC8+dFh` z=L-%=Hm}?5^?XC&1t(e-)PB~au6@|{;K;%w_}=UddOmRlqwItdOlkvB*ezH}(jwd?P1s$H(HLfAr0y>Vxl!m36gHlk1YRZrb*uQk8lr zeMmkm&%)YSAk}k#s6$c*)^zM`yl9LJ2jk&OzR?_mQ?BbpqWBXSJ_rA%W8Z=w|2 z=@x;7ByZ~|s_uXJ&@(D-`)?Th<}ZD1a`TyN9+bA8AY7ggMy_En6B#|xv>=E{O&;d3u; zt*icb-a_Z#(b~r2%I(6F)@;qVIlf-Ezu#-&8;X!tR&AQZC25p|_S+=HFR zHRJIRbF%a@&CIm|1wIjWs+b$H6fCy@5J#pCZayj)#+`gJ#N3T zp_M}+!^Y&Q4^cT?zWOjgUsd~jgDVt$(;|Uu^2A$t5`q9xn^IyO0f-9&T$9gemydzrk1N83(tGq3+tYrtRUmz0U zsxa{l3=MX|5CL7ch;;lvSB(inw^T*%xUZ}3_axKJAy5}jbZsfkZaQ$NCvXHPWQfHH zs_SkvTc?XtwAI!^>&Q1SA3b8}f5=JcWzxe&X0BS+}E6P++-z^Ct+ooHaK zu2!!!c_$!ozdd}M$XYN|;=Xsr4W8^0$cF%XpcSe$Iz2Dq+MUU4^H)jYD;`0N*hqF8 zK4?uJiWVsWVY9}bI3yJ7+A#UrPBWdBWwD z#?WK0M$92LP}X}b!&w@MIdhRMMZb#xdClD+EKTndHAB_ds;NrI?R}nU24{c zO$B*!gdJ0)4OdF^1BIYr>Z(n0llmAY4D8k|dPpkd>`gS3Ze**DG!6pBcsEm$2;x>} zHI!eaG_S=w=hw=OzlM_hVutCBUsWtoBml`?B0N7EoB^(%l##oNVInb(lXx$7Z!kvS`%Y2xGqx4nz-am)Ky(1i2DLy|62b_*UIV5V&wn+@Pm=R4R9Zt< zygjDl1721CgIG8?Xn$g_6aM*y%@wjH*Fau*lU}{OlK&Q~V!8$=Tch`h=|E6fdBWNSI!g7g41RiyeNg<4%Nx&mueGnYD-cqiUHzxt$fx~1)4vw1pMbl}ud0S0bh-Mru zrsAwa{npx-rM|W)qJe;*zxUhJB({jHJ2P{)o8@_JB{$a#v}&da@}j}k*n#u@D9%K$ zl_`c`rj2b|{nL*CS^IsXZtQ|*^`F)2MK_tfM#h>T;fyhOMV2o0(aXXSoym7|3+`^q zyxZgW$DbiC&5*;NBS=?`;`Sfq*c1JA-NHi6)433RNJ^ix9K0{r%1rfpBp=)USnfnk z{i8Mf4IWl}_TrqI?;xOv@B{^r#_DjrWdU-Uv?1^mhhN?~ot40RW3+KOy3z_=2wnR& z49fYY3^CeJ9Qg}4Q`V4T??FSODe+A&a;5-8*K6zYy!UOBOZzUojM{lj z26#1h_;HaSeM3Yy!vA4o-#|ik4Sr`WWahm1hI!Eir`92wV)ymQ@{X12#-96T&dVNV z(jgUU=;PYlr0Eo>_D-UoLsU{0^j(lg+b=Awy#hVbYC#7rQY#rt477Skwbg`cGT`s^ z7O4Es^1_Q&a#KQ|i};DwJjK2R^NI$&BXLRwa9@ELt0#GWSez6RRDP;62w5;aW=iSR z6-PeC-6-*dmmJq8_Hr&y!XpfiI(V1>v|vT~fvW-NNzqQkM}*fUv)x#gZpEz*TG z7|ckTA!ap9N`%9)qFdRGA20>iaz}!7p_ONn<^pMDp_lSpLPzHC0FnzfEp`#D<2%!H z@rErVfj5qEbK+O%3S!iTcsUMtRI}jk;oTBkPB9>#O4_UqPz{U!&DyNoozzschB!t$ zR(y3rg_Y6$%VBq$h$X-!-W-6XWvZlDHA;qO%I`|Ms#wJPs%gDOr&!1F4W3gnCgokHb#Lw2Ao81)Z$ zJYg8X>jjj~YqaaNZfgb?HtTo9Lu*kmGs4}v4+e4FPDZWwQjwjw*E1t z7>u_7+Qyd(h?L3dk{8vP1RtnBVm%u>+`2s0X$nNIH)P563Gbg!i=Yx$PTmuFxKrou51+4^-EZ>ok^$n7lQ+m(EYg8C2bC7diLJ^=jZR6J zgbtVll%J5MoN%w-`OBhmnGyV~gUS3LsQ=iyvm>t{p?%mZeGjJh_>VKrfj7POl8Z3z zS-*n?2K@93L*z0hstawnMX0_K=4YS<)4p+9!aQ!ZC?V(q8ZRzm)iUDJpY}gQZfFNY zU;L=NQw;{nmlh<}@TXqNK_s2~p6XG7l^+F3FTpJ%h#-tkw~?_^cls^CX0GpR7`lNN zJi6?hFMVI<2On08;2t@@l$A(I1cwfB6^hzW^ufu&1|6#wtTw71(DU<;DO5)xR=>kq zHhyFgZWoF*bor2qWhI*6t0YPuud!N>?FQwAquf7H`!y_!$qLeg_WMqDPN>{jAAM3a zMjk^>wj{N9hs9s6Of2-BsLhnOpMJ)=UU7+gqj!zXFt+UM$ znt!DZ9X+DMTJj4WxD=CC#H=KxRG=g4QZ!`NG1_rSTfLgr8H zOs#}$67CpghK5ENL{_D6--es}o~3b0ROG?7_anw*NjJyl(JJsoXZ zi1e)MgwizEA;<=#Lmd7va4{!ku1)Nwyp~#iu20Chq%Lh|N+|gUxJ8|q_Ej}7$Qul+ zO(vc1ggdusP(m?2pX>1o#ywvTPH*%7<@^3{QK6IU?c(Rir9Nhwh?Lj+$Q;8bmm(+k zSI8r1FxL$iPMhVEC%!U5e8K^RV^tn6M7EJTs-}(5L!+Hmm+~EP*%gDt;~7t}rJhcK zXr=KINO`=^9mHk$fs%w`3*%y)M>1?s#m4KRLF&wc0p6i(DqST2X!K^q6VSpxDf8C3 zk0C^J#cIlp6i<`j$_1dgDS_(cJ`D+uRY{4uxKL;??=us4qfaw_LPG8^MJ5;i{Yas^ zp2nyQ$@VO3pDvMgN z3h-JXg21H?q?`VQKBM0T4-x~X559r$%e~jXj!sq6d!Z$s+7qvl(pax zUbq1=O~*VIZk+&p0%X1gIgs^mMf*7!yyoVyn~l%{G?HC#JSJpZn*z?rynA3WaR_ZFS%~qK!q@0&2DyDp&ir zT%D%7p+>oFiKf(vGKLa9;uzz9Db-N(Kv5KaG140s1Q05mqJ06xo|eJEPX^3ZlHpYP zlYbc2syKyDM5UT_-=nrEp4cC0M7>az$!h%)51_qJG;aqqg9CXoDtL@tJjg%3pTGvZu#(xT6GAQ9+v-YbV7O)t!^N3v# z3Z6n-OJ9s-l^{_`Vh83r62nN%U2&QN&8Rd`Vq5?T$;EUsW5!3l42c>$^D1ht{pevP zY?2RPkJ{0}WTD0~KMx=H?JiySdSJi9Ih#IjR4j*1EMH)nTUravIh4g~CBPrAA^K=f zRD0b9t#3#uFF;T}=xmE2@& zY4Z+{)>#7b>BJ48d1k%dIq@e$*3RO$RR9=Bp5|pj((YLXL7M{Mz-X*>NcE@x9X8u1OVad|?W`ZQ$;@fa6;|upd=L+t0iU=GQZ2+0Mx{3T+}o=L*q-G~_A&*n5Jo1-P;P+6pxGBX)Kk zo0STCmq5ESbH5^8YYem(J1jkuX~bPww`eXXZYwdF^uCTzGO_~yyj~J=$hNKsra)F0jQv8Q3r2L0S2t=ucUGaAKO0Y74t&E7 zc038LVO;6+IS%(d3ms%2j&t=1SD}G+r0Cg?UR7AoPfi70q)KSilz{aldFEz zV}pVdd2_iFunU~=m3!A7*ekN)tkV!jrN%0{9s%k|pdOUB*q=aE)&^tSnK8m^m}wGz z?Pfzwycsj>wskiy5d(v88;9Elr;LpiZa_shoFzHtiRvYntPMWd7l}{w2`IamMS!Na z8&(GI)N|XnqZbbN-Z$`gTBX$TC%OlAkS{n2#wH?sfaIqKj4v9z@)#kzP`-KFU!Hk+ zCu2Nn4^lxQyaoHMZCBx20&9xRj>N(pu1CRZx{L!~@oOX)T?G~NZ! zs*ps7H|&O=4WpU?isOV+=Vd}zXG~syM~x-!;=%zogDM3&hiHxgJ2QiH@kU0xQRK0NnNxoA6?G_ z1!Hk5cXd6r&KA6nBGYf4&TWqxdhHR~>Y$ZCa(ob5*fm3=fH+lMa*X*ZJygE*xsZ!< z6Z_6LzrZvS1?QfSk93b~wTUjW?9w#po+)nu?8pMNE|INu+$*_J*6t;7f>2{0quXHa zJHGfaBh6J_I|Sh`B1z88^}#S|uTLD#$i9>d2KL1%?IqB8ig*=I4ItnZ>C@0}V=)2a zZUX06Tze5e^H%LkQz{>%5>r~L0gG#}yXL>st_cb?9L4}%K%u`2wn{h-#2z)(*a-yz z{X<{8NU9HC~-VhrZ?$7^y2lV%qOd(v6Yzc-q zOhC7Mmx-9-%9X=x^9AN}sh4RblPh)or7{2rkA5yX|KlBs$+KBf#6z-SRW-5t41_3a z&)q#B4SL&s&`|tIK8}lt^d@q%CK&9c0D)1?2bed;zd%~TqOx}bAhsTuN6>%AmwbRFV7~&0a=A!q`Z1#>YYgs=t2;%G zYxC&2veEoOHR65eIof?1fdP51md7k%Pb381me<7;>Bvx1Umw#@Yd5Q$=T?{rjQ_fH zv+rhKtU%o4{1Y`)nd`1#&AnrxY)7|Qu%y37IpU+&k(4bV%q^Cr*wotdnz!|-AZ5={ z??Dn8W-DWETKg=g<#E)?rkn~D{=I~6tV#!&w~Cy211gJnU?M0RcKeF_Hi$<`BZd0dHbT+) z_wLflq3PQ$Wv(ZiI!KqcDe{yo)Def~re>|4muEY%9rAI&fXC=3f(5GyoT^uL#|9-h zF%gV=B}KI-30#j9XLZhYi0G3!*BO@W>gti7sBign%8}j5fEVeJdUQ42N z6zFzFCrD+9gHHywQh`VtH4pog_o+nkf@z63i;;k<@I8~}noH3>pdjD2Bgtul%~1V1 zYkCjB?Ux}W@3KrMK@+vt0y2gF8G8j?;x$E3c9#Bt^0IQ810!Q}oL$ocyc-zg*1k1p zdMKBwnIdfdKP(P$mFKxbrwn>SS@rS`0~~*XB=tJ$N%rM(YU>DIigF&%9ftKrkiGj) zt^QiJOeC@8_*(AsUD0+2R1|*x7X0u;p50Wtu6;GMUeY-jc8sgB6IzLyY zYIfcSF&(h26u_#c+5*+j?k;l97&_@#)9P}2I88afe0i8 zi%D2XUKG)l#nT6}H=T+Mfj25fS%zyO6qe_S8F)Q_avMfoHQH?9!V-q#?Q2AZLi*sn7en!J4drg&pL5;`lJ+(BWoPd0yZCiQzXcYVu4wC0UJ5aP1wDpw z&17J+-CK%HT&Q$`ML2xA`K=`mY};V!(i*DF!Bt;iCr+nvG4!ZM4OcN*U74HPgaMt4 zb(2OOz?1~T)dF_#X3m*9?iif)5hWH9w!FhA(|?I?lCYSRACsu4r+6l5oB^fIy^ak( z3VVte!hSUXjh|62`)L=Ht}K1?O-_cXtQC=&pXCMO_$xD5VdQckZO9MR&BC8X{~)U* zu0us+`>(d~-C=OOgMH&KV@>`rfR|e21T({hN*yo1XmUVw0`aePQ5m&@c-Ex2l^PW6 zTAL6jnnEkz6?Ca+WXvAS(mxty+T4iaMu*O0CD|Bn=Q!)HzexQh z`X}QRt~^3p}&L1JDxA$4ff<)4h}!O#F$mC}Gyn#Y5+>2t;u0 zOCM)@oK_m#PPGw|&9b=Qjg<{mGD4eZS;;xq-?1(+;Q?{R;3;cTI*5+ zSn*GSj=CFnG@E!Dd=sWh%KurVHYDjYYhr0ZX#F{-g9pD#TV9PPYU|@wl+gNO!rS#4 zsn6ajYX?$%nY9b=!QYwVabW6Jb{4go$}8#-m3gX#Fw#^TWjVYM~e@OwE?ZPUn`;Ca-C?Ar0>ttbcy)B{gJr1&6jNn zc*Eg~YdnzGrQ6g`S?X6y9jEmO+WidGr!NSQ6|ixtKU~P!mDQxOuL0x6?|#R%mbqIR z-@I(xLAKuToWLqe0%+s;|8~j@z_E8guJ+fmh5;k2Ul9U>0$^u$w=3@ISGvM{cahWEptPx8I!zB@l7}i7%f(j8 z1nJHkzKMgyIM}|fLEzKDST$=qdI&!MQ-R-_zGTQQ2>ejhJID|4yx8|Pz-ZuB_UrF3 z(Hm25q*x_R1Npt5h^OA8(jh8Zo&Ytc1%~>0q3kGaInvx6lG^jYFJT4u59yrPR}?8$ zX6(P$b9v@qlN%Uc;g~-TO9lFYWv(d|FQZYGL=PzDk;US#H%h}l4<6Fwf!czdVPSar zi7&|ISXBxM8TA`>jC8K3gr)J{-4EESP&bTSxjz#n>k%@dVU987G_K(w^CqP=1hlOMA{^p!88! zU_z8@^J-s^Z`%^$FChDGmU6=9VFG`{lATlwbo23)3|Y7kj)ouAxZ0W6P7hCh%Z)}t zT@0hw&Vl9-gO;qTNLx_Nk`yr)$Te;r`Ph#9@6aNhuvp@X>FT?b|JvYA`*GZHFEdjE z0mlB2nO8fvGKXOpG|FnoU!M2v-ea3&?++e{TvprV0iVaq(%{ zkUCU+gIi!ZQ4mvYOxWxo=}8WB$h?Y%KJ(Kf=6Abr*Iax!!e=iu_KYm2M+VV?6W#)T zde|4e=DXBFV#mTTK(b6nJIX;Wie+mZ6z2>7G~aERmga*QH%Fim=ko3U_~A!*uLm~P zycc(eJL?6=Uu1K}`W_o=~1&4u?t^{uDn?g{J?Bh$kkB^d!+PM(eu3T6Xvg*EBzBb-60b4 zk-wV85;~?Xq`^VP2G5#FpvTFdp)DbX>wX~iPXm=-^z&nCwu&Waqrw_MH!P6_ZxEO; zqFI5?(0d3=bmMQ-d!|rJSU&$2VIFf}gGUA^$C&XMh1a4LS}mWx*TqJP@!lOy=ExHZ zYKFGrtG)v+y^+jE&kSa>;{|+}LZ*9PrI#uastybfDj5nB3Q@#^zTDU`NnX}}JxU7a zhPLA4-S6y^+kBe;haT_7&?ejjQ|%XbCUBKJ(CH)-JGRf=Cq+^-jZQ|7|JYeXy(W? zaR0LdQ&)xQNxJy4?0TPMCrGrg104xF0dg7`+_4=-{24p&;(PdoK3C9m{ad6Qa7Il+ z1Abb5sM6@U;>MZvd>D=WL)m6ieCk-n!Y3FBZAcD1YkhnOzY~9}->SLA7*21lIK*G@ zlpJiecZIIp!xX9_bhrWr&__(QI4(WU*}c97^q9!+`v9kEFl*B`PsxExLC%b{hfKI% z0?j=4e1icV=ZgaTN#?OljVceTZh_n}qVFz@lcipZ)M~n=}3!Jq0xoTIkJrEMUV5_1aRM?1u~s!813UCG|K^T&X!uT0{xv z=1x*~kVmZEkJM#f$;1hWyAi>}7QUM?n1}+XW+y7d8CbfuUd$DjTEkjhxZ_ z1WjgOURcu70|L8RT<{MZb#4(O5v8NHCmkQ;lfcm`W1)>HELm`=r{~6Oyi(SAE!s88 zssqs`Z^+R{Y$!5TlcWIO%>$NqN}2bu=)Yf+lG0}&DCW5sIyE9?IcTw_z?~kx&Bw%zy7L{!V_wg7 zp6De#;kyY>>S0cn4<;&$Yq}aldquA$P1(g57JtKQ#3^OP^u@4>Ih1ee;QTh z#>%J@5of?6f3xme_^wh|NR`ev9S-y9^AmW7dNsW}4uiLHu?cJeujrRPez~fp<|Jo% zj~ig$aT@C4#cUfZa5ndpc3Xg1aPYEFzxxGwCg)KN{p}I?rbuQ=*wz`h(GYMje^M&HKr|k@Xms^SMQvc+*e@qu`xTHFOz+S<-H~RQ^NWV07?ugT#{3PCwbX#;fh^ zD+2sku=sX*@+Tw4H3){~cw+QKl7GKT%$v&zUi=QZQZ%(Xw8)*xdW?_k;kmngbR(P*P1tXT;-H;; z`o7K8I#Dh76;!j=Y*1Ni`LP`E4VxZ4+f@vJKWPN}6QjI9Q@9Wih@#9Jd@tYoNQQ|| zfQsN7YSSTmNMybzscqdK7IyiyWE(d^gke6$f`#y;dx=w)jH!aEippCJ6J7Fl)!xOR z=1+=)3q|6ybRh>BrF-rbE%>383KUsIp&GDc%(mc~`5+tdy`8P#&bHgM6hw`A7br)b z?3zx2Rh7ese<1FO#-wny131K9Jy~++4T8FfMESq)XrHa|6`kN_3QCL|o>^=W(q4Ml zdY36u(q4l4`Q1#kLXfB+lULYOt<{e^3-boH&=b-b0fcLeS?nm6PWJRYp{IGUHz_>& z`I79>2dP-2nX1$W06#N*trkGa4BCV>n{J)dV+{^lk>5IBE|ckdPu4o{|zD5^HwE$_S5#GJt9nA=)%l3n-kd_UC80xYgtV$t3|@aee&r^vmy&p zj*T;1X-JdCX=XN}@H6*^8~*-iEm<4|poS8U{DZ|H2Ue(=+pB2%*tauckm-_nT72@U z__h%2LZErywKn**qH&jUvVL%NnjLDXloGk3k)zpgH`;TJyS+xrESglNt=b|>#4iYS z%rkHx>Hag)LI)5)Z6>T5=B)Aj>N7}*f=j8g1Tm*ot!zgdUxuj)1C4F)!t0Cm)6Z zzj3~@sGP1c7N8Y56MegV$8F=)B476T0K!3YvN{{)C&9Uoa38N6M_W$10=h>)Q_Db7 z?*|7i-ulYw`k5r=Xnu`H?8O5n&X_Lg((wMgHUZ}|%!a>0+TWLZm5#MAmdc+?+9T!l za^cs{^I#9R$7kdnNaQ$69IbviY@$pjg=4mD$eA`|t{3a9(nZip7cYpdAH=u=V~O>) zU%an$fOIh%he3wH=f}fUKj1t;@V~G#;CR)x(a|OUeD%L0J8LQM&prgUIk6oVYhQmZ zb<7rEpk5`XiYQ#Gc_xC3{;d9#q=N%FO`gvXhn%cqt+Fc>N9rZ7sgTB<+#>;JKt{`?RbJCO@zt$r3X;FA#gv`ailU(Wrn0|9=58AmMh z{NG^-jT^cfG>s4#3+?paGL`oTc`fq`GmPPT^azHp9F!q(K*llwGq@)$^dl}4T! z$*QUJ0i)(v6)1VxNr?G~fqYdUL!O)zG0bZH15HZ={$RuMUR>y~m!q$>^1U<6|ErPW|mtcx@;HF3)d$!+(NP+5GM8_Ile;sk-%U|4-J<&B5Q&p@_q z6Fgy1I-cuP6%?qC_2mdcqTmbl9u-Oa@h3M^Nj0CBqiywyo1v!UV5X7m@Q=Kn^pH`8kcdMneUM-NKtEqLfU z?WBZ$Sr(U7*b{@%S6%5r0Tf-(J$KlHm*WJsB-U@LCzQBlo;-bMlQaMT>{m}~iPcu4 zm(|m0vdD(VLj7AI)beJCfhu}L*ErGL*S)SPP4PG|Rz~mc`7`sKC3}5+BHl6E9)i-& zM-`AL&DAW@>sqp|VGMvhEpqv?dzoL3zb@S!wLdq!&n26Q z+Q?x_o}NJer{0If;0l+x6E<)5!E3YKSl`5D{QQ(kE>^&WpiB=31yuRlrSGLpO+^cp z4|A9MS#Yq`RSR?z~(?;dG=vUw=I$kPR^^ZCI9`>b&!uwa*y$ioi>bXvjW^sfeh`eGVZ@4 zeXc{*?ZL61s0t4A+G*w}lsAU5VNG{J3^9pZHr zXP^X?y}$6AD=`B>JRB=()Py5yU^{vJ=#~4AlV(+0^h)c2C@AoJ^Ib-T)|+i6B+AXl za-0ECX~DN>mAyyRg0wiOC!sE**p3D=rs&|Ls974?{Y1a>C{wbrn9am*4h~MAqg#8X z=qPvn)I*BOyYpQ}BKu5~LY)pU(mH(`xS|QYa~bH9 z*3Q-ioPAIF4%Q({jssZ!Ng(G2aYFL$MS-WK|1r)!(j#NBtRB}#-w@HC>KC7<1T>qV z&;DLjKp@B%dA*-C6qy+w5Im`XwaUygOTHO=BLQ>YAIK7_pmD`@gzC&4+5;~g{I?;~ zV{J~unh>zeQN6i()3C+GE9Tl@t#Y@Hg6O6fCt6$F<+ML~Z!$eZp>gb6lS*H=t|lk! zSFD*4S%yN-EqH5+q9A`DRgWL>aQl=ub_o-XVfh>cS`u7PAz8xWt>A*IPH7~7e58|)*m?F zB|l8WHmqz}@bQQPW+^noqUqLTCd?QIGVg|KesWY7=sbnDS$$dgdaf>B<6&~(--!i* z!N!5zsPfCoi3h4ya*7|SCs1KZ9c{FWI1st`L-b-0?NMO-P}8|-H_tPKz{4Zv&J3{tXuRoiCQ|Rv^PI&KgOiz z0PbjZo+YafV$F&N2ffDLS*Dg{M9L>Hd%nx?ZdJzLsJJ9#ta#XW@#n#_Jd$t3dmdzu zM?B@Rn&PuL__$32@&bok4wF1ZliHQ<*rM)n8xCSZZu%vyc_KOwH*^i^5Sm|ZYCKc42@as^v>*# zK4jbO>40VW<8>=Mz_(Fy9DP50i7LHXf5pwsxkhn>`0M8|%!+k~<>@eNaktzBp#9tP zu!+1xF#g;sD&)BWM#rK@WTGqh-Hd0bMIJcb=j_`WtKE$yO#3-?17V>b!(ZSqFLZBH$`Y9sg5sk83wQ^rt;}+(d&UlheBBH_%(=vF_r{}G$%%m3 z)9@yDk*Oz=4ctl%g`K>#(J~Ca%JeoQi>BA}p_;_6t73WXcKCK(B8D4mR0 zqWD4*2|wV&+suXXAVXZakNOX~mE`Hl;xvv16K1XVLl;0i{z8W_k@{`khJAjuk)iDP z%aeR)*}BMa2yhqJl!0qaUtV4(@^UF5A#eYC?;@TL*CAcHpTjpkcFT{sbnxzBPB+3W!DirRmLEYpn{x+ zd8RWN_rsmt0T&(veEUcwtSpB8$^p8{c71z3390Wn?4EtxBAv{+L3l4jFh(o8Ol5ISav@&IA;Yb39?VSinve!pvtK zp>!~=to-pzRUKY*Fz2vXw^kVYLDE+^Yhes2y+wf~%%kN%YWtK2Q!#+p?@_eBEm9Hc zXfO`H2P-8#lxS`|tc^PF2iwvk)Z9pnd8AYQQDbR(Y$YlLM7)y+2?Q`y5J)pSDvZwB;-ysHgY=qmSF{9 z4Kylb`^Gbh@Gh$1+S7D+BvPoO{#aU?uho-lzYQFRLNN8`4E8^uO4U>T z+nTyKAS%Fk6FfuXQp{KQS*x3^nsPlgqRy#!2!7oM3INW zKNqc2`pzHSPNbf1_JKtQgIEk->*G#SB)Gv>%c9>{l{dqTIaCZ+HueW@{l@I8Q&1+K z)10T{xyatu?fb}B!A6|Sr5rmEOUFInGy|UaGd9Ru{U~8J7RE`<>91YUF2;UMWlvNs zyi9T2x~}bexyH-JSLH;ntnw~3rROEYf}nqF3|NJH=D__Wf2w#0fZp6kv|tWZpz8nF_ET>X zSYg5hF~OxnXl~?jIZSvlNZ2Yy{UVEIrIR1|q=~uLUe$$x-;R(vvwr>JLi=-Z$@inI zL_Wj?NB?;SK4%rctS4HC$06U6c{Sr5C+_%Eu1_oHHj6)uZXW^1FuNq@i=fwWtNuT4 zZcr@5C11ybAwOJ`LA+EgjAju)Rw-7qSbS>LCt!z8z8Zf~ z1b4s~R*JCC#b-`zVSgTPhEc0ld}v5QGTxNn* zD|m$GB4t;SzY`Dnun$GSCJ%SO8N{4>NiCN)G%@a}v2l16e)sT;VaW^q*%1h~n-QuR zd~2iKI_8Ajhwl#P4=22)d7b}j(!B-Kd+W(AGpA1zg6y>2GP3Q0414}jriRc)jKA%h zZWT`h&<14tk*a)B{&8eadti(Y}{tknBbP|INrH?34t8ghC-F}MRICAc9V%vj%g%Pmmk9d{-u9Zj^#tU zY6>5^vZHrde-D?xvxMMUlSV-qF@e;I!DXBl2&PeRz1^%TQ)?+1n&4mFPlF^XQ4paj zNGR7PuQ{CwR&6ViV1&F;Q<+M$R2e$Jvo0`Ws+%9u&h01cO(~w%wB5D1<6>`l;jp0HyaDMTj@9jX8wcIA+HAIjBcMeQCJ9(iI*@sJJFbpu;|%qX>Re zy3t_OHwSj50iY9&uN)>ZddtMZ2q^qy;rf-#V&>P#b|2Bbs+@aKmi93%QOS1U;l0Fn z|HVN1_t6NT#k8ZCoJ_sr6R<9g>1vG1=Pd6Ya1!3NK3)=(hS^;Ly92`P3TC(hp#Jhx z5pcPZ-kmYacO4Br5@Wub!1{^Vp>knp#>DIh*LAg(vrP{IQdSwgf1RGv5=o1xbgy&O zlwcHWs(eiw)$p4uXl}#=3lwCTY)p$umCdiBKo5A_IqFIQNnzd$P>RggtrC`(nj;>y zh!P}Q6&HFj>TCf=40$8!4q!AVwc1@m`l8t`{0(da} z#mVB8FvRN!6YyAet%{4TDP=w`9CkI4G^N#S8J{ zR;@ny)3}O(M4_O&9-ym#$4Fv=2sLL~v<$PCx%Hb%%-@IxXs0|3Bk*dGj5O@(xn@&| zDlZNUv@IEmJW5d`IK!CXZ`_h)!a*z{-jv!cPVX$hDu4gc_xp`Ls7n=%B7%3SG;JN% zkf4{Z^O^9p?K}WEqJfjIWdNEEk-ri*b&qXk>S;4;DjJEnL?4QnkNJ#Yq?9D(8+xU2#FBq|RHF>+Q4_APQJTY`wpSivKGWX>6oimoHe}O)A`v(K^9P z$LqinOm++riVG~CGGGt7OAi2&t-XJVjy{#}*tX(TC}U3YVz9=h!*o&3MZdlwk)oIH`~7G^r*9FDW8cH* z?MLO2SdaQAw{$blP;NfEdWaLAE1Lv|RI^>uXm)xgtIZF3y&kzuPjV-aEr#}YVK?s( z0fKyE@Rk^-KzpXImT0vS+_b7gERNde6$(;xy2P`y)kvy_9C4Ad;&kC2I~SL(@D38i zThpxrztz%u6h7X828u$g>6)HocEXW2O$v^B;jK09^rAfQOvABN-hmozhto_%12K3O z_%8<5+Eh!vi}q46ogo=?%rr!zxJ3&aFmsNZ3DWgw=la+e=IgEf_B2C#_ej;`$vx3~ugt zkqiC4toJjmiVE<|eDmnUin8_7KYPN!HRp63DRmqP2qbX0GN~&aq>Mwb@noRas$VBg z?3{`qpTr<6>2YA)DbZ|2?3$*XmK`s78-CY<^`gmkwXOCrt9pqsypXYz@TA+V3*RbP zGY@)fUP3OzFk2O~?`z$gv;hYjj9$NZI%Yzlxpd|D|F`0MeR9X8FmAWEQVuG{r^6GIGa4YjZ(;1FMG7s5I}??~x>Ot_ky21zW5-BgX`G4o}Nnu6hD)EvvF6 zLxiA`k-F`hMmi;c`rc-X6q>+j{bbLi895uWw`>lXQJBkE6+ga8a60^MbrQuDKjE4o zmeWew1p1nT)5Hh(R5$(Hi2i=()dh{C{zQB;0aa0#Mq3bu>|Kb>-$rWh&&#=M4`D+Q z`+uQ;;4H>Em$|Kq4MnF&&)#26?Yl0JH*M6TBbZalA-imyMphv$c!g^=j7rPpnBepp zm@j}Ld)i-%Mo&v6k7|H-8B2McJb?1rHIDMT z8L6F8$;|YBYUcG~Ha8RdcQPA@UmzCG+PrZ#@bFl@N<}>277g2m53oCL^jDIVK9L01 zA!6VT^O2(W4?9f}ekI_PfEN~4x=S}6Nd>m(Axu3iiZ36uX}_^xIRrUQ6p@Jn!KtjR zM|?OB6^QybyIqZ2*xw8VHdT^K6o^e~UlYE_mlm4WO2?N-g^7|4`u0bG@aU|crJs-o zvhwXX;_WMx3Rw7GsF^5)oxZg6$ok%qgqGu+w^q@=u8_d#geNa@=_9ijz|Jd)uBmQ; z(Z%dO!zp#xZQpSV=?K^TC!d7pLi}{)$ynCsemRjXR;ILnz8ptExO&BS;J11*7P~Bsbs}P&99HV>3+>xKpL9!aRbAz*kWBx`{Td zzO6tEKhuKcF!nFQ{+G2iJEY~_NK%cm6Z`cT)|71$*+Tl%ob??EddgDJ2hs01Jcu-t z66=Ur4dMoPd;9QgoP7UJzD=V5?3p+y@<6nq;TUDC_B|KvS!+EwHdQVLQ4~;DihATI zYPszZlxbKEjznh@M~WndQ%(09$=bVY?d3lQA|hhh9PMA3(kBuvAA<@RUN?}(FxnVB zjnCNwN^!;_Wn){SP0bI}o2$CaI_0>gn|V8Q|+>f^;os^wTO2D$9aVqSSd!X1|Wz#M!>-e1lQv7!EUE`}&# zlwGJMZQBA$lmN)}HDjtk5TB~IAZLK#)NdBMscfKQwe+g1FtzbFugxX3BvgwekYE1? zd;}m~CYI@n)98ja%cg$%(~qwPtg{Upp=5ylu_88hOK6(q-FsYH_u;^(R+^O)vbaQk zsTU-Lv(;$(dVi}4rRgr{5RGLThinuua;-aFNTd4O<*r@74OW2*^>Nu@HsYiBKV!&W186tA-MIn;4J0u4D&E2#Gj}F`uzDb?6zoYJqmHixv(+ zv*z?gON?<=8^9_R+tEDEQo|7e=Ztzn?>AipQ420xt3&LxK!00daJ!!lx#BpL!}N(W z^GGr>E*gDy4fMAxJGd@sOb-aZpCYc*HOrJJ`09%vM)BR#Nrc&gJX&hLzuUyjv#Aq-Jf1IyC%J4#%>3d%skQfkjkm$8Wh`u%qrh`42Ifk#?O zOf~erUuJ&0((L=b@oUAgstx?$L+pJ^G5kV1i;rIDr8%`Zm2MH1N#msE4^hJrQg40( ziX9G5?~Fne9(}(}rXG4Z9tW7&oeEn+1`x`TEr$Nh{{L=;-YP=bA~l$}D8xWCXfTQv z&(nmT;_I|wUehiJ_10;}Dzb)N09xZpNtgg|9NL`6^>Q#j3ow-X0JLWrbgg}UT@hpg zfD(A`M<{|4N$mggClageq8wjW7X|+JmA#_bomu?7#QLDW%mm1Uglf1PrOmS*4(@bo z+46B}ZLC9twW9DU)E+h(qxhi;?_z!pGiqxP&`kYL*7s`dE{3~tV9g$Y6aVHF2Bh~l zG8?plZ3*~6^ZXp+xK~Yf($WwZ9tcJc&QXWs0>Q%ynZx&TI>$af^kVjmV6Po&Ot|GmVlguol% zh9nQ=dlgwzqW=3>viNf{vy*}k0I&}0TgMub0(UWEWz!b3WBFGamTrY6B-mb&m*mWZ zjcOr*3Q)Lw7f1(M%fv>Y&CfNkINzs{`{jh3;q_KSqOfxoFqwdhr_;G?3DPxh1^FzD z6O|Wl*`k_Xc(13P(%m+^J?SwZ4yA9~Po{+$*m)pNzWWM(^h=G40sO=EXn}^1a8EI! zqV6L;*LRmhX^}}Wb1%&7(ucn;GC!_{4bx4c_7S^=XtlviWc%r!X2ec#WbXfVT431~ z5>8pE%(WYICQlN*RzISk6oTJf*mL$3VpiwaT z+)A?ekd)VeD(+4>5iQ7kE$8UO{{F~(N87PE_*)+tOEq5tW4N=MpKd{6KPOo}iC*pZ zo4XZ)MG-HEt3pORNt`f}axwoJb3BI;y;L0;_8`jKLAI19iaKg+$AV(cUcR0#pH##? zkPFN(&h4ocXD`zqCOp{yxr{ZGH~9mYtF=>_I;+2ITOB%q#GXY_^J3o0YBhRHqM+da zh1^Z1;a2wL4sOmZO9W?PlQNa*dh00HaXTt7YF$)P4-_ItyG4~z90^y8PB?;;J8$n+ z5KlN>y^pUiAE2IsMSA+$9J5TYD*#(p0_2$zv%p!ij^u@yPnU`hw4aMF%Y}v{gxnd- z{`*G`k&A^)tJWqPq24=7M~+#w`j{cAZe{CA45%}oE%_BsTkCJ;;Z8jAl_Ty@g?!DyFoF_@-$Je@XW-SKdJPfd|Fft*0D|-jti~YY47D@Pup2Z= z4r})!h}BzHu-0L8XY^c14sm&A(AgJvt@8%t20cJ;T1j(Edz{u^rV4d_a2{Bt53@6c zB9CZ%5}hgL;Wj^g7A#eU03>E{ox?Sb(@%D0 z0_wX3px$b-JJp3~rN~D6;p|-XoOw5!%Q0Ob*a|YbNJ^R(1nNayD)Ex>LsD#!Yhdhk zj!(~20$KO<1n&`d*W-E)W2hf1{PvmwrlS$W{||=EQ_zY!_3i0FQg9t21LPDqNHuVf zD1ALX(De*bC749tdfJ_4+WwR`aY`l309mi`z{6z3|>> z-g@mRG0+w*aJ&-s5gDeiENIaf1m z7Sfg6v+UgU7#jcwdR0C7lU;f>w0^=6$|nU^l!Nc=+*}vXfiL(3nvsiulVg*Tzn5N@ zJ3p}(M^~YLO^)+ny!6ruaA$J(P^*`NP&*kQF=ohVtk(5wnSEr|&s1Y#IFq z+$Un*vZ8Vg@$R)Y-H=wXl0^?)*mzrdo%bt%MFd%+n{sQ9aT}SvhdSx=k4pcAz-+wm z)bj3iaiH>mXvPfMiI3+HC24W^J^*{o7n>z$t5B+cRQ#XpdGv%>TlrfXM;%HJ!>8}po-W(LG=8meT)~(^_~LAe%#~S9ciDflD;y76Md&d z=O*q$?vhDy&54RSN2G6$Q#Zn|no(A#E4rr>Ty<+Oh?=F#Eb`Yo;bp06+Q;RzgZni; z+L6P@zgB?Yd|`Lj*9Qr~{CQ;gf@Yi^C-c7eUTZ>vv{{jbOVk zj)gqSl)%Q$-?RV0O}{<*Zle3KtcblhP!*Qw2ME~&H&einkiQK-`lwRphm`%k+3P7OdVyVu4IDX&Rd5;QQXKduH zXSNDNC?9c&?y(R%ccJGS4GE9DPm)omwej$Oa#$L7j0oO~%?v3X2DTldJ}3|+49k(j zDq9muo#%VSE=LJr4|wQcv_^*h#bU0hD#H*~w*_P;{r&Jmtp3|IPFEyrUmQKS>nOt> zDRoeWx#p0NLAFbzJIL>!@;i@}5Jx05z9mb%HQfQ2UpF@=An)zA7&!jWi6rQTS_mw` zyW(FOz7wjeQ}GYrG)boNnf2FKYjlBLdx3oCY94L}^_|8{!+L9B(nIj_d_m2j0_>w( zGWG9FWx^nwau>% zF%k-fW7tlW6(B4K+Fua)cYvsplg)EuU#4i-K2GD;RC5h_vnLF zJ_I27kJ9ougP?ET#Le{cXgky$RDWYzdj*XlYE(*br!V7l8tlE#lr@ zlCfAKOaKFk{TPEtDip&Mly{r7HDZE1^KtI50e;Y@jf5LPit*|D&;BaxEKLGDj%bl0 z5@B0;*2tbMzbdo;V2o^D%O&>bQp!B;i_(6E>qdS@Maaf59|9!hN|~@hj7lrPUOEWO z-a%#qPOE? zgFKUR1!CY!`hlkz7w=eVCX)>~^kV@iLZpHTtg19muS!=flNj(yEB!xO} zn=IpN$h1)+08@3nBC{+iekUZrmm2#d2yyHSDPUTH4lzdmX zkwUBr)n_1`?M#9Nc)?d2MHYwC=r!T>4kPskCeN~5H=?7Vn0v4MFlKaSGWg*>*}Y;d z?PMGY(H0yxCAie`iWzcE>7=@{Tlrd(RXpRny@fydEL+FVW+|ucmOETMPNsHA%*o0- zNYCfxHUZH=qnP2cy2H6KA$0m~Pq59ySN9@UJ#`1II25nvpS1u&39Kh37H%!TMNOcs zJTUPSY;)DFDvn5;L0pPlzR_b<0r$qd7`bwaEOp}@8^8P=HX8~KWagRHh`iM-#JvXZ zrf=k|$nD89NIZjE^h#IQy^z2}FfaB37o}rKN<&9KzG+;h;%{3W>UiG;Zl7PsL@KK7Q2cBv!DVj5}o>+wv*Fk_gQQ$*?l3K#j7i5#? zds2)<^Cn%uO9kFDRD`aZz(=Vnv`U9TxZ6@pQj z1O!6ydpPqopJtZXjIz&RChCX&swqfxuFd%Xq$ zV0yVHJgt?w`{fR<_$b8sMhhO#T_8^l>itKHsi{zGC}dB`WSP7qSbY)c<7kbw!1bx`C$pHSieFVUUZd&G zhqGhSCJ(cvL#>7yhW%s=C$78zdJZ_*uGPu?*s`JW4;3mRB&PgYsi8r=tA}W=@HA*Q z4=v09{ZHZJ>BM|1U*)H}?Qe7I!!i@b`p#mp27-QKzzgbVP}3qGH4w37;0=6VCIUk% zB$R^iqfvFEsR9xlWbR%vZoWTnNQh*7_Pw$;D)yTLZo2~Qbj%yc8i&bP11jc%K4iPb zJvvf;7A%xMHsDIACS@k0ok0hGGD_)T>^oA{#$SLq;c_zJ{CBEDv_jxpks5ISoL22* zM*W`%>C{x%wD+WNB-#@kd~IB(#loj!p9rl%2iw74w-3y%IYG#t+Z2Uk68{fm&hL61 zBqGD7f_H-61c6R-WqIxdkDX7vOJwSMm`gYhO;7>pz{$d$qUer7UE~=UhvqB!X22qJ z@9{1EOS#B%$vE^^>m&0gLthFWFoH+s?T7PJRJuzAP|itUnGJJ(ajD3f0&Qe)W~0Sc z8w2l*zEDV_T`5)C9=Snc|M8i9R*3=$X&$Qba08y+fUB)6=?z`O;*hkVOnNW zi-!o5qH;~2H83H;pb&3vJEF?&omi7q`8?-pB$@6eMc5?Mu28R#uZJUt9GJEtYmRC6-_ z)JG?HOFY!CQQ(|GabI73w&$)~3KS71VEtH0s?n2;WREj&B)Nz7Sm2O!%7~p4O4PEXsbz$cj2Hd!rgzW zdkF5)E5}O>^r)S$ULeGXfA#9!$!|~4fT3&OQg8p2zwYBa;zo`)7^B0Y8HkRZG8)Fv zTtujjQ)&%PuU56<^gbS8RzW0Ca2Zm~V1-(u7(#6z*tT8glPuPf7IuXV_Ud4wt`06_NH9slEeZV#u|%Dx7DnAF7s z*{W^#@;og=A<-Tk#N$lbG&M4rTnKW79uN{;;G=H9$aSQM2^WEFm0yimgz0s38C3+km$t<}A5jdj*6B;0nsKU0&< zd=7iX$65A~I&!BpWL>aRy|5LinZd;SX3eRB1vSa3)3?}=9!%cuEqc~qHRhsCo5HM6 z3Z9_@UG##+n71P}2;KAmcK+{PKprNp()j&>WUboSF`EcuS51>NJex8vP1J`k9q$nC zt(r)GE=CVN3F3Z)R2P0eyh;uQZ4N4JjFoViP?IOj*a5jJ0Vjk5b(l2W)IVR756n^7 z&BNoxu2=O+X}_1!ccc3CYWGMoF-W{rz?G3(m9GH2E-rV;^! zy#X=9SJ5@osT+Kp!>|vj@!rn98e;qz3UC~obAqwya4|R!G)0y5g=*rdWRY-{8=$)5 z$x<2^@D1CPst0kuUNQ@RWuKFuXfU)jz6jk&NQv|T37Mk9*J|e@(OUfKv9PeaQleEI zI%cIzPT5ex#M@$%OJgUiD)j>j5IHrQ5Xb$eyLZ`5K1{Z&9~vQu(>^^``lIe?e1$ByjgoW9VBZZXj8{tLaR z8E@NZyF;u5pXS~BsoCSTLpqS0T>~+B=r% zSQ_)iAN-sFAm0*=XnqgbxhS|QWbQGtgOD-{YaJv%N72|5iOMf_cOCGW9+Zye7z9d% zC?x5n-nf9jGbU{IM0L^5rzd4a#R2y#-Wa1Bq+DMN*MDujkp(@3?!*5|9#YzNhD{dQ zxnkGAvOoNivVflE==rf4nkn$K^1wyK3%cRv(~~ZX!R?` zlO;hy9d`=&u|0LByvmm62=Et@M7)ubQ?JV}3`#;-aa))GIpx@vTH=%iW3Iw>igeJ# zQ}iMU$_q(qF)A^+6iAekRV9)hsAQxjEL?DUK-hnvLXZlmiMFWCm+Lm5~=XEEgBiKl7RKM&8q_s^a(XkD;wU@xN{Pw z@Bt)c7)7Xch)p8Os}|f*Ru=ir{DWGy+;vp(sj2a{si`*BmZ+Bw@ad^M(0%5#W+=lv z1*0BMfId?47`kIuUI5z+?VDa$-bhxzhHh(EN4PAQmhC4+>{aC4!TOlIlPP(A^=Y%9 zKJe~LF#kBnv5FO>3aX+*{YcWt33;Ch)+4yh^OKrYzY3m%kL$W8&cj5Vz4GRHI2h$* zo;@+1e14{xLUWf>AwMw=*1b&ti0-wWU`j{-_f6u|n2RM%M^nr;AD5-vZD1%fe*IYW zY+pjc+l8=Jb)!5G4y3BAz+DnBYiP!Ls zjo}`6D6mi{dEm4CcTpD06RQd#0YaQG*-gbTLi=PO;K1Eli0>&xH687Rt_VoNnWwm|dQy=GG}M~WpcTv-yHC~!L#tP- zX^@on+4p#fNZ9`;U%8ldPJFSihOnE4v9HzjL){XL5_W(!`yO(j((bHZQ5>LX&m5ms z!p$GvE)_m4nfpH~T<({%t?ruz2KJ4$!RiLC8ryqs6DEISnH6b8rg>gCDF=84(uymO z7&ToQIq{X+(6iy^k0ID&V%>j^7xiYAA|mNjoT=a;mWzgu_yzs+*7V~$YsMPKr_6tB z)h=833xQ%VQ;EH6ozgp=OLS)&PM)-Upc%b839QE4~T*7)&4WB zn&Kz@tAtaHtsUb@{tbN`8x8bJ%ItfBKW;DX=7?B5rh4>jY0HFR#d1>$a?nA%Qn2l1 zyBs%gk5iL8nW0Ktckh4VwFK%U(vw42Ll2{}aQUn1^Y_fc&uULKR*TNm_Y5bV+rr{q zOtYU!9Fv}(0ah;gPT&5@0+qkm0NM%~Y;nyUL{p@WOq+5@F}E)()KbOLg+BI@Tj*Xd z(;fK@d9)i+#BPE+Dj#Z_q9X@?RC%%Kz)2pLC*|?6Ru$QS9avO1t07tTr#0*S(ol8I z*SB)OB~IacO8bk6RG>mcN9V;>i7;L5qe>Y+tDSb}7!$7yYxz#J%crI%7#Gb=8{GXM zzbl*y)mhSi!u4Hywc+vKKIBB4D*1s9ORl_kSSD8yA5xN`Ycw(*I-v=#*Uega4Fc$_ zszo{@XV54J>Ql_UT6Xfu65#A49}?oR>PM2AqbPw)+?)|JtW%Z=(lS%%~Sq11j~Gv#(o z|K13o5|>}X+ycwJc5e^a#Xy7gS9`?C-WCs*34Pihe?5pBhG`Jt)=^k{Ufvds6vmBz z<`JnXGYYqq2XLOA6EVy1C!Q>CT2FiCqnY_G&_Olc3P4Qe-Asqu)g_u>{8ncIA+qG_ z3Rnlz5@L?RemzKQ5bbK@a^^f(GBbn`B+5Q>d5yMgg0cgpXIj?95b|YUwW^|mbX?n^ zcdc&$Sj>Bc%@SC~gjM1DZ-x^ORg7WA=sk|AU`!5V64b?1a)OEv1vexN5l1!iqz0fx zrokdd^|dIMVl+H4={F8qWLh1C3_O3dVMn%pfp3e--Q|n?FF_MYYV)gJ&n>24)TccI z0u9M?xp2jHB_oGiSQn6s?VmwS1;s3JRC_aqn*k>G21ej`QMPwHjD)ah@gztPW5XLy zWoMKL-mC=6vLGCgw&(x}$)$&f8uEAiQRZlYzd&eD^?wqFuAn{_GQcmM!Z11GbeWK^ zOd_~^yaFfhGG}FYrC=f^4djS-V_6I2CZ*yRN{Ts|ERfk66E6^>F7k;?uXCbMqwRzY z5_CBMm2FA?4MSehOv-)NN_+_<*bjLqhNX)azIWIIL=q#FyKcR*)JgeBN@xP6 z(?jiLL{lFjim>1yz!HVY->eR$I#)W%e`8o-IO{si5mg5;Dp3Y$uxbg|;R^yU{fy9A zM*zftAhBLU%+dlCMe#)BPZ=v;2+45=fceu1bL8Xn#WTTMt>Hw{^%N#x_6-Twb8gp| zIm!+l{2u6d0P#h%bwoW5F$dkqh>sZ(M<0DMO*c-276zdGVLpW*<>n=o&i=oQcLf6g zRKob7)vf?d&RWVBC@_zHpd|MD^~0M(w%D_m17UY9*)A3q_Q;y?&c@vx}#e6El9(tGfchjLqVg=#J$E=ynDiNcmA= z4=0qqc`D;m)sqI{|}hqV7;g?@>>y* zlQglOhQkM&sH2=VvwW5+M^3VZ6Ryv3cU5QmM)AT0#GLlWlQ>}d=p@?*S?ShCcAkIX zHl^(eOzm;c9Oa8zvlC5;p|pY0+Xr*ZK9rcKZvsD!<;Jb=M$6O=#HkyIuxanv94Lh5 z%g9V4LLmYod@p3Fpy8h8xJx+kM3(vYS?+X8T}@|AvTdRe^oVqNZ%9LOs~66d%~^2= z>7gk3Ekro!v9YvecMnr&6injDxNPp%42X^XrO}t@)Xij53fQhboSUO3Kbo;${f#=z z4s_;(xq>|Om}k0dMlq5cF8S#g;CGpy6SFEc%g%4ph={EVXj{3}Gj-ih`g};B z9u!@Tz9RfhiL6KM=UBZaz|FPi`1a_BJR@pl>?PDbVjzh1(Zd|xBj<+`Iw;aNw++Pv z0ZdQGSun?E{Eo|at-9FQritUwd!8-)^f z@HWVVlqnobmh3xfMOdKgv@xPW>2VjX89teL3E0c;r5eT^dx2l@Jgefn)_zNduE?d2 zL84SDBVh4&^q5D275;4Rxo7Jqlp>tS8ZrVcBbrGv$YvHZ+7{bx1c_JB5adK7g!>>D zL<>+#+w3JnbR0>nR?hrqcSo+4d5%OzSPQ`k_2f#J@2RY5~_t`1$S zsE%Och}=x9xV5DRS<{?0a`|Y=I$D95czl6_1}Q?ndNV*_NbuHfVl3YE^eqrWwf9q`h?J|}Ip7%|C z`4Y{nd49mB=}4sv*8)uHuGfGItJj1xe-9gCvgZdiU`H38#*8&uo6#=xwY9Inw%Zi) zotMr>+-RcQ61u(X$fdjUA@Tt~*1NwCkh(XRHleGP~@7Ya^eiAx1mi62BL{ zzqp=pY>vTa!=|r-y|zU;0ZWtt6M8a>N%diVOUw0uT3}K(N)=x6x7l%SBtCG9(zdWL zo69wW%eD)o!s^%EE~&cIwktl=ZS6|U8FVS-$Kbq_WZQCIsAH-)LaBRjY!o~-f0gnR z@6lm*6qUo7suhSS$aVPsN3j3y%iMoZnE|kr=b+jkWT|F`hcqwWQCipUe_4ZNjn%AI z@@IfIr(RpPhVb+E;h&su(0#is_du+0@(At=+3BTI4UZt|I0A7jY>5{u80B&kx*o3+ z?Y|=uZQ%GX;|!J@FMyW(73EC#msa@)z~Ph0mhy;N5gY|8?g$pqr2PwD!FXNKZ&0ZgTh-&_=Xni(C+FsS1d#28JDS@{01PBLHS%;d$z8Kb`wenK0Mn$6a6# z?Uhc=kK97&u#8c*>nV1NhxJg~5a#j1n~GL`|CNGNpV!12xDcrzC#VFpIv-Wp4TXSA z+$kk}RWIJIAe*si4r2KO(PNPmKk5>T^}XhpkpMxXx;-dfb})L|rfRUf@?a-^lYPm( z0*s%78Rf4NSX0o^6$N;=U&l`89+B`7DtlWR7!opF=@`*yj7o4@j*zj4-9&BVacyKg z7_43+(rtM~V`yOH1+5lwmt~%|`;V395}vZ0V{Y7S>*a!1U~*xoV}VN6OxSbTn9y$i{89ROQ_V#)+RpDLr4f zk;_(HfaE#Q$Ut~z4WH(t`GPeYlJx${B)RO#UeCSaqNLgDoK>GOKclk|W!%e97upc~ zDeQ>sQGAjFy*PE?zMw4I?}6Kl!$uX6?ubJZH%MbS382!@jivM>AYYc|(e*IN0p`_S zoY^!J%Lf~c38dFKNeX8?zHfT(&_)q3bDILwLJ#o&pn%AYi3ze9K5Zp($#;=ne0+C{ z#{Z?G;c1HwWg=!uXlR`$8>T2vkHNCT9$fAP7JJnlMYM0U5!mxXZV~4mPq_q;_ZPMR3gKGs;O0j`YGr*3cRVCM~VP0 z+$h?30 zuemV{mlsx2PQCu=mBZqfvXSA4Rp zS#BGajkD2UE5DhTAf>)|IB!nTn9~l49l3^5m!lMsC4r&?4eeU%Tcr>tmbCy}hM^9V zco9RcNf=CYE8EUi8wyx`aA_Hx%2|ah)u2n{2gbZnv-gvpm0XeB8+ZAv|6OeaY8493 z6T-~HsyuuyqkN0Vo5W_H39OvsIN7Vv;KaZy(m3j`3eDFUQ|ZEH=K|dR$H0LO`8GFl^0RJkpmWAS#;QA0f|t*GslkW=|;=dalhBC;CWqhox># z#v6Y(i(OEFr+25v-8y&{=XZPqUMxq~>(@RyI zk&0vKKY_kbjSnsFh2vx=~OpVIg?{#8T%cTgSt$Bj)J*a=1}P(1n4hi(_EgY z<{?RL4A=snk`3D&J>X7n0*~4L~ z!0a7N?QlfZFVFF-q(7XHJdN(F9|L_nYDvpDX+p)XZb$isGKVN=$~H|~Jk>TMPE6&% zB+bKmCiWg#y;Hxz9nT{ek&pGf#0U#@k5*Di4m=>!*G^?p79N-Kvw5ol7=#+gOSgUP znD?wqD(#%oAnlO}K}34+drn{Ro)A0jX;6Gi@)paa_RepOw3t+NTLX>7QY0!yaOH;cRA08~w7%E08#E zB^0gEu80O42q$=3)lak&h#|5cqj99w;K>~UqQ&ooY99;v&lVp7!Y0T^)hK`T<`e;y zvQclJic4*?n|wTZWuQBeSNh0};ud zgPaEyWHGnWwY~cF3`+MTZm6RsR$khgXGnPmir`bpu^$lrwFFW&mOPZCq1a;+0m2FC z!0(FlG-!Fl#d@eg;ioQ>0c!|jB-Q`Wk{0E?zv9EshE?!F7k(Jo$nw7SL=0D2cWGd9 z2?WC%jNC>Rj!PaJ$IxXRFtQ--L-d|nSA$IsW{Zs}hfHX*$vJ+`=#^;1JRofp81QCA z)or$@CuCS;et5`}UvIqK^WmSq!UH=&gE|OUx3F9gGR3Ufl01-}xIWY;Ijg`ow!^H_ z^2qoSXn@SAp$PTp<0nM`k6Sj^6sg=EGdLDBs^eT8EAaXosZlrHTk6@y&>m1x1Sf6H z51`$-7a>~#;yz|wt_Lr~sdd_P&EOJLH-rH#pOy0jb06!25Ep?Wn}f!yqmWEN;LZv& zPth3U$U7w_e3AzmUM_4P#_9x>)*2~wyWSM+nK8Lo#>Y9xD*2@)b=yAAL`xa+<045B ztF_`0FuBWk1BaOR1=<7EoY4ERoQL6$ny17j%KNnYn46kseX}Lzk#3B_A9`Cuv2eI} z=k@C*YbCmykxIJ68m21Ze>FvyLFp|EY} z$PB`C{*nlVn`VXCiN3+k?}~B*IoD?Fy_HjTm#SB>PZ1h&qK{nlfi@yeEb<5nB4Fg> zWZYkH-5;3RVu~6g7{deDBv39nm@DEaFd$?pG8ENNXvcVUH1cWNP-BP7l^Hf$&$ zUYy1(gbPLRkm)Aqf40Xwsh=y(vhlz>2v;kBXo&{X^QoW`I?f)!%8Y9#+SVzkb~-3w zb$DzGi9F#qj5%05Vun)XXT_@tgn!{L*ND&i6C@T6(7}fMgEB1L=b6Hs&IwGk9iI~q zJ?o$4BhiGZufDss>@jp2Q+N+)e9LgC`AD+@tH-I7f|Q7)GGL9`plO_I`*~dbbB+yi z@-Yu42{Zxl=PEHelKM@peQ7sHu$*r2Ph5OW6$mo?I)W^v;Eu^0OD6k3zwseNKCO4` z7xu~AZ?X@eSS1OfLb?ebJ6oX&_SvG(*=X0;(~hJ}`L8*R|8}M-KGUV8k$dpXICIq< zlKcxjLLeysZ3hM8Ke<(#d$+@G#p1!DbHG#>5X7d{Ln<}S+;*B2{9XkkvI}j}b8#>! zP)@7hD8GNoH+Pz5{C&R1w4|o0d?CU@L}5qJA`RQyHhB%5 zl3d1->VBVO_lQ*|ld!i(ni9!Q$in4OeSDe^gxgyU+*n*8j(#ARPUoXB%+kQj7IjRsn@?FuD}A`b%n$;3Z##>HtJ1>DoWn#$n| zCsWQVFwI{j50-!B-1IeD{Oc?Db6&}{wn`^2s&~>U(?g$=7;?F@n9qTITJPGEQ)w04 zBH{KL;bh`9u_uq_3Z}o_Z6$&Ixg`yDA~Z1+mlA&J$;)NYf~I^u!8_EAhgmkDIid=M z9@qd6k|3GcGOSW-YsGdvT9I(UFpD@n7Pp`hit$f`W;-_kU*Sye;{SZe*UA)l{pWLA zJu#{)i3i|QC58})yhO91wr`xcq$}##%#}WqIuvl%$X1WFh@O}DmiahazDHiCBiT|B zE!X^;&tuwqDf!qzcgq;FMhPk~F~hE=g*%Z|6SZYs4S(BBIT^6y?m1IuB#<-72oTJw z9Q$GoCGYHE6tVHr=uKS1*_tnBYh4NapU_?n*qRIb&j?m-p(h&zqiGHlf&o9zO=S4- z8j1s=V>hs??2UNG4DZo9rz;Pm5C-^q{OK5)2b1Bct|{Q(^{?N8yZpP#fGANJ^DYkj zalxq|KP9u}|Amk(0}lHL^SnNJGwv1Go4W^wqpEca!@C_?iGbkbb4ECVTKZ>t0lmp+ zt*sI6zg@xA!Us zGQmi5FV8m%NKrg)6U4KsISk`5=METUs@CI_I? zul%Mk3y|O@LKxdJ>sokvVgcwrdhB~#L@Yu+ z3q(^W3a=7xzsjF(W)7wnh3ik^|7@)otF*npdhdv0T1U4NY&bEiBnw8n%oJ$i>}^9F z+^CaG=RB+-^Xz~0`+}~5>(mu)4T+veJC5$KW0tL8J!FPNl0OLDf~SNC`0oTkl8tF@ z*tIQ5DU8C)aT9#>>j#YUE=SXuX{Y84{k&t6XE)>0RF*ekdRZ?o%J_oB4gSGuEA%aI zM%lRBobUF)xeSi3O@hL=VEHfny8`-RhmG~hcq}?fu4T2~_;Bu>C8OW2C1mglWV(U6 zDrz?ojS7VaPM7khC*a`XcD;qbY?uu=OFkMS3E~58fYHn#`t>z$;VOCcR{JuEbZW5j zTX_7t{d7ydUTGF~OM$_zB}q#?JtQ=me`uG7rpDO6vN41b5e@!cV0=5#y_;VeDWOvV zana~mzgPO@mJE^0@e%gqcCOVbz5nr`W+Q4e+?$Dl*(OMoJ6Cw|CP<2X@1BB7x zy2P)_P0r0w^Og~xmZUw?6)3gc;=c@ZtFOk9ocFS84-COo~&ub(=F=Hop=#`w z*{pxs?)xRoShew>W96XP5V5oED=B5E|LW3fIsaV1!Ap+quwUJ*+i)KIA6UqnO;S!q zOn$!o08y2?+7%eYZIZe`XQfW&If(PBd!qcwB`aDHp@bURR!JqUgTc|0h}&VP^RgnX zy=Qy~OjF4newV~!cmLD0oMfa<@qe1(_c!BSkpuPOjEOmM_C5J6A8+mQyog;UL`33= zfs6jtqhZ7x8xq>BMGPjBFJCA`p6RTE4Z(rc>tlYdbzUKYD}HJu^*W}|UE0F&+Hr4; zGDRRbAYflJr54 zJLRe@v~hJQPP!6l7dseUlIb`4K!9R1>1K0X8O5QKeViX)Ja@Q#3lgnOU%lKi!QIF- zLfgEU0hVkj5b5<-_c{XxelD>>W>GlmNz62grwMGpBDp*%myo&cm@|=PEoY6m6Fn1c z-hOi7(8Cpe7VNF%m{&g643(}#pyv!gE=*x-LDRaLs3_0U|C|9>N8v-@+0!6+RtYjI zJAQo<_#aJyKyo6!xH**{J+OEvfP+Gaa>vcE8+tt& zm8fVXCcAPBb~w*Z7#Ay$HqLeqQ&Kl1-djXOJiE8kMivS$Z6i#?uE?I{$`?|X1hnH* zN;3bQahn^i&!^6@DGyFoH35_cyB$w6VQB4qTl(3@6w?OAenf;ub=XdkwH~t(_~f6{ zz$mP^VkGMpr8m1%58B?}ynIpACp5H3z*42L&<TwS1s|r&;DXW~D0QUJLdqsR$j*=6 z836SSLC|~B^sH{R8or+>xJ*e^C>>4{`E=_*a2TU}|57|E>UA-o-q#ZV1J7gU@H7?H&h(yW^1zsotk@ zPm!;Q_A%WmbN|oA5u=(4z zXf|yuMoL}4M?v6=$6_+2fzTf;A?D3WBnCMP>?b8bhN+J`@oG)G>%mO6y{PPA=`oHw zK(FVY+Z>;9G5Q0OrEzNhG-0gQQ9YPN%s6tEckLPZuWNl8fe*`g86Dj5aT!B>P+EZg z!ny#skQwu%!#}(qG31;keZii-Fu#HOp;Z77Fui9CAkSgMm{ckVc`dZWS)j$AdbV){ zRZk2<7rgg>xTeE^h#^8<7Llt)^Z#CXU_R*#NIpif{g860Y&?cJ=Q1BMw8XaCB1Nhi z8}2rR=)1ls%ND`_CH*`|5$RM!y%w^9{IWrEdeY zS>{mC){{Sds|LlVBD^XhUvq?#ge~*z7)<}gB5DZEP^~e87o?IDVVzp4$i6z2YZt<1 ztHL7>E>6T=Z<4c2*np(5~ zR6>X({(a-7k)hcEyH!(w`Cm@%uzaAQNB~>a_oD{t`NzSxb8<$pa#~&d>%QPJN_2ER zF&y`c%D;N$a^}Y7f3TRk&tXaQLd}Qc!6c>^x3DkX&VF08`R*>WEAb8X@;#)PHirYX zUwzR!25`j_aq0AQ&mCE7IiM@=sD7Ad$Kj=d!&JlPrdTdqa%M4Ap7`y_mFffG^3 z9C0bv`h!b3k9paxQ`0|<+7@|zEGt#l`~KWcx;f<5`EWV1(;N2_gB%XyQNSEgJiNa| za^n14vD=UhV)Jfm^2Y{7bB>Lc{(%q)$pdJi4m@Pt@%q14AH$8?B^?3zM-U$+o#`j& ziWT6BwTC7HP+l&XdJ>h9onEEVk+03;qeylxfAd2+aUMcsrZq-u z?1bHg<_U5jzo7G&quS3>UCf|nFn^~Gd2Z5`w zX?KwqsTmnJWeRu;xF;^nsAe=5nbriOJZo?tCp86*|Fnm|Q7u#Iv62 zo>UrrRT?`87e~0T8HJb=?{JegtBZFb5D1~zj~e%s}57d%4c{9Ijs-^)Fr zzCJGp9&EqgvrdTY1frd;y8Z2&&OKZj$C0{la(R*puH`GPI2-%PgLli>TJ1=_`2h`dstOy?$!jJve#JHM)k3OC!0%_4!fO(+*|&VdI| zBh~7$!5%J0RCmj?;C=T;yH58aaMBDN1jeeDr!=@0L@UJR3*5&`O>Yj?R0_zrrP<3C zG?J|&e#kK%(HP(o5tPTnRog8l<)-0&eB`pBp+oXGn~rx&8g{P0SNNQ@&*n3_w-JAM zHPR`mOCg+ICih*WTGVrpTRt`lL*XVFNC3W>t%rU+5oZtV+G2Z*jo2>I+*n=_+`240 za!31mQ>BlZ+DzW62l~K1Q!V5ZcDsI=&E}?BKm@IUxP9q1+5kcEbKlK7$oI7%>^KLO zi=z2($cnvFeGh5I|5#~DkOEI5#8(?dj$)6%tLS27E*bf2SgKAw1k(Mm%;~$pb+$o{ zY*5SNk#x>~x>ViA9O+wfy}g%nvk)t26t8&FT>>J=0sBFg(pesRW0z{PG$8t{TMz}4 zakGsE1IVWwP&OTpebov*Zb%|@Xrrb*a^zmfGaLX>7wg>*^WqRjC8bX_dUcB2L|lj* zw~}^5{a{KAEta96)>U@a76HRH{hE(@{v`2;mwG_`(KEFbS;hecgwnZrBmB}f5&I{c zE%VbdvY&C#@tp;1rZFJ6Ir0Nmh?pdjLPxrqUJTc!jZ8=8I^F3lxwav-|lMm0&)%+KIvkfPpjPZC8*qrixo3EKY1zYX<1 zW=&8nI4qZ8PBInw)QUVAq}`^SRN()9zCxr8c+4F?UGTq}zU%wA1!sGbn;YKFRq^9l z(OF>~jQE}pMGtp)f%A|W)S*NI2a*BW04eDOtxTb50kGM?6O%U|rKYD@CgozfLrgA1 zAk?Z$wBiBi2h@{54%?aF3c=1UScrqnK(K`w@PTo-Npm#b!)yeR{UD?m?3DI0`{5J{^X}4_4sa?7a{5>zF;^ zW9h>e!nJI?S@f9&FNevyClnnLJ_^3cIdI5&jr6$|8KLoC_GX{JkgNIX2o< zto_#ko!TNw)XE{yB>l5-dxeK}Kyfn>4o9zdVYKk+$NXjq?c{ zm7T9Zq-ePiLvKRtKdoNz&+0sp&*v|){L*!H5GtR)xy07t^Zzj%h{i(aJ0iBUyx{Cd z_)`NG(B;7mGLua)lu4_cy%MtjLYyMfGxp=ioR{$3c$WQ}=K5Q~x%6+VJ8A=^LM1eDR^TK>THEMI~s>7xldy$AI1bjX++pIH>32deHj{j6zj*SWw zM=~*@l{HcPlE{Hm2tc4$?cwG@_IK_pbc^=xtgGzI;II709wWa;qg8w|0^N|fit!>T zv_Siar^iQ{Ng~PJ4ckP*yGVXunt3qFqXT~}NWV`r#)a{BGA9P@R$7;C4%zNb zyx`IC$7P6Nkk~oa9#MS_UoALZ_#@-`$Lh{^jbsB7~lXlQ!b9 zPK^2~`>#A>+jR`y9zXm?t6~n+#85yVZD-kUIE4OBzy_@<#bU=MadR2j?`3!B}#Av zL7dyNSGQ%fHc#bgooso;KujXzRg#UUET`-C>zEm_qQB^R{8j192N@|^ach1QVp&Vh z+7+m6>}Jr)j@&d-bB+Sq<9=hPE$+6*sC=XVG7eRe2iE3-yaAX2$|*S&wfaM`MaSU*t;Lo=qaq@JhB`49njm{qWNWaq=Sv;73bcS(P&w1 z?uwa|*|l`cBk3Dtp-a$$+l415H;UI?dcS9Y!M#@U0%jXuc=GDV+u8L9pKK~_QYNJdWDDYf|6|rOMO0hK5>4=&5r&1L74O#f8KwKb?F1ej zlWw3L>6+m;Lw@#i9i}yf^YHPpE*XcGqt3b_{o>Y04A#B7Nk>#)0PD~kLs8vgti|)` z2T>+y4bbi-CMu!l^Z5;}^^D?ewv@Z?;#ZpU!*B75A4IzdrhBX+-n?^m`5LN#4aM)z z%%Bl_SMd~;E?x|85Xt!Il8`LRwp6Kb)y|>9l5pm*_+rHMi4j(E3tG#qGX3JPrqGGT ztqccf+TPD>ki&}%1R^Kk}CPG1#j%U z6Y>He=#P!K1EHC3U9HYN-0bOLj1Ed(X{jnjq$T{^=?b7ShO!Lxwe5EruPbTJ_&YqK-tu#l;hHV{_n{__KbX);#^pFtM@qyKszU@ z*BA|!=V?{haDUCEt_hzMUhKJJ!@0%Hn~QE!GjX?ON-TMr$zWd`E|Rx%n$OsYUQ6`h zmN!h-a>y%=ah1~M9up>pg`c3=s4+3RZK;@ZGtU#pDlbG02CwnP0wLimB%670Z$e^g<^faLsm$Co zfn8CAw}+0P5(UX<{?oD$5wYaUgD4Txf&oSX({z&Qj34TS9(-W>jRt1YjkmBpGSSkC z=!s6a<@IirhoRRnza-*Gy1DbAYY{@7I zLlE>|XMIfL@tMhFyIxbM#RY-MdRn{W`$-|kUi_nz;NXuD0C|=`Ok7WvJH=PI6@;-3 zi(waI`v^z$i{U;WrTehNG|bwh59WKy8QtfvcoV6kHSf>KbTd$<;jVf7!%f0ULKoskaInCQT5tdp82 z-Fh>LiG0ij49vgySQ*oirJwY1pCI0RcTD7dqtCpRSaA-v2`LCe{sL%3OW%D5%_CJQ z3RDW0X_C4t_nJ6xz!2c#BD*-FXLzx9*qgEhpI^Jc@E>tS-g7zSv-l@tzzpxsxG*ra z1@*rWhCulmd2k(P(D;U$Or~J#$pO;ATxa;%deFyUJ6lPP`5O$pZm^uy_wg%dLsQz@ z?sStfo1g{@O+ff7*X@0xI+`Kl8s6E)FMmY-8>az9aaRI)!rg~H| zcO~3Vhe421HFYpI%xOL5jx&;D4GazNWGf;>F*YaD`Y&8e5(-DOr7+`Yv9*j;?eSL! zz8A5;lHI9APU7E&y($i9kXfm|TFmH z{4zL*S4GYIdrniT(kH6GuV=Myzf(koA@3me5B%JtG{mA2xD9)*L)-63`B zp~CLZp-s%86g$Gn|Hb$(+Ow7oGp!djZG)%jK9A865ZxH6=8G~0<8kd(*<(c$;riyqSAie2b2 zA5Ppc-S_@reK0QD#H05|%lTZL-}W)is|@j`pY-9vC6$vl%>qVj#}!&gkk@<9)q|VT zhah>z?tR@B#tiy5FkevL?7JR(lsFR%UmV}BO+;;IM?)WSRuLlGsju^rQ7B}*Ct4j$ zSr{3Xs|K!R`4D6=fgn8CAw_KPxXy$EA?^HI;`x7TbVFdA&^UCQ3jZvEv?H_rN zA`S|g>L1aF-Q#MI=9-y+O^UESF4Paj=OpZkoE6BOfrIy z&!RyK{aKGZ{OwHWkO3J&QlKuJ9IZ+-sU~_q?VL zr|r}2utObUKs-3R38%{$e#^ZDG<0-m*|Am1$((~yC1#$POX&nlr9sg^1DU!VD9A~uliE|wd+40R zAnl0Y7xEV!=p-Pehu!rjgn+iFEdLR_{Hl>tUkpnvayap$d?*HKW&SX6PDot=7 z#q1IsSw18N&>%Hvp4rHXOZg@K@@QU=cDZ>l>Qi)kd2Ps1hNJ8h-qAE&K$ks&g^p!0 zVx3+)3^3bk!8Gmm2 zqN9BdjY`dEVm#N7fIJ%<{Pt{{i{`inqJY*u@l^3>J^SsjicP+9vg<$tP1OvDTsMP^ zw8BYsVF4JGF?1u43x{rps7OGs0zz+ZhT6n8P}o9sDY&2Xt;?SH#B5sp6+F5N`P?pC zkv-ZX>KNbVR}UqJ@w>8oDSsg*VHAD~`P&7EtdRGtpVyFj&j1-VI+zL%UnTxJCNXq|s1If6FWuufz)q=ORN+1%Sma8-~7UFZtIPtBQY*@G?so234G9QA3FG(V2ok%!$ zJrf)W2Gl<=2XF2@r*%}Ki|@!sMoOz%q}3Ui_(Z9e+&b{H&>h_`WIVX13iwnUIiEC{ zvUpLH#o)(;H>-S7r>CnIqNgG3WdebXj4$N;y;HT{TiN6ov}c*I23<|z6!DOQbqwwR zOF*>0frLwT$*R@Gq=-;0iY4XFG|G~}(fZacqzz1#%r`Yj2|N?QpF4P22h`;&j~~%b z7uL2a)DRDeo#M90rs=e#lXeYsxzcVaz}AzFYhj|LO9r09QIHo6gIFd>t2nu z8OiK}ItNeZlIBa-_17)A$Lb0w29o+;Z)7o*NTn|Ilm7f}U7u&ot-YKco><(M`y#Ao z>6<3tIhhv!@|B)F;pJ0$86TvyFoCSf#1DuarQ7^winxbOeKrtmo;&PKum?6t)fJ4j z4RhNfA0fZ05mAjj|Ldg-M*fV0CsF-}!caJd_m)l(D3*7kn#d9Ra{hlfK*%wjTS1ca z7UNWR1g2YCGj^I^tiO1y{GP#`0x*{OE?FuS^c5sgVk2NEV@mRb_2Ap<{A5xUi{JDb zi#^*joI0l81`I2eW&O`{(yu4))iSF@PEi7}roya5KpPD4&jZk|mY4EmtLD{ymb!CVI z`C4fULS!WN8F-mRR5W?872lfw32sux-{8|oOj+=JJ}g!yXH8)^q`{3>Kl?FLTN^lf zO=u99Y*h<~0n#2mwPC~so?ff8@}HN1xPMFGJ~2X{8Sc4Iw~$i4L5 zyMY4v=TRYLy}?irE=vt9n5&2`SRXqh!C&ML-(m8$A1+>ktha;Mk`VHdQ=CpnH2Tu^ zK;U!HPzcPMaRVnmck>w4To-j`Qt}EZ{Z4i59GSNdvQZT9HYG5wxRSVrGjaQTZa`xH zGQZFauNJ`46~)&E54+NPCGbCE&JYKfS>RUTDj~v2oQ9G%614SY(j6QiepcMDS;fet zD+AdO#G~f+%cI7d0s@Uh8|&>l0=>2Zw*%-@a@}ak z?DhTdz{5xHRS)F(!!0Q*gUvC})=96=m|!g_najgrr?#@TGeP(KycMIy4v<<5od=H? zoT;U<+@0-|s*4!m0+K^z?_m(Wn5u?F`Jh6ihi=j=^d5divJ`zbqqO}Vp^@~MMxF)2 zf8dNOI9CF`B+X=*Jy|lfGuQk@nx;EOAU`BqdW_}L35D8~PRXN0l);y7G1OORo&klo zu9+fet$X0bm>Z)PUj+E1I8)F`GH2N0%tea=Vm-7vuiL^ERjr6=e=N;ANK=e`0gT!fjBL` zk&2xdB!AAdnS>t*0E+WRm_J`zDXpswzEG=M8R9A}4y+X=;wU+X_L|cOMA_u zPm#u806f?7cs-o0>ytAGa>Hk@CE!DRa<}T%>#wKJ(-Rpn zKogR=Go@=XLb-33csb&m8a#E~gCS5VSeQ$2{p_mqUx%uFi~%5qXFM+_UtIjn5)m7F zWym^fBr3N(?l*U89IMwEKy&{)1e`#w9H4AN(!ezRH(pSNbHkvnNl0KAV4vnmo+LlW z*cX7O5*_Sq*G|>WcS2?sBrn40gtYdfSDM~+^mGoY4uA(Z*X7;-+Sth|72MG^Ocv4$ zQMRp^WhIG7x#OT=#Bx? zecyf50Ti63^#jQEM2~Etfc>%bbOg|^{@hzfjRV{Ruz9@kO_p?qc~tW8nM<=2dPLfG z1$H~b8|1YSiJ%I8^McR}!f+W^}vu@E$Gehg*jbmw>o^A6ls^t<*-lV*b zR{wOzGr{F(=Bmwxm{YVR`0s7aer-WM0Z{I2P=)@uWXuZQ{p>7lN?Nl&XP4L{jO)8g zVylebxc>u74bUNIL=x~j%!G+1|4U$kvL$iUvv7VRG4^{8`)#T)-xO^mepC?XSR|}V z9bIejjSQgRY2dY+<}Xhx-)w~-=g9%?Axex1X#6I?;~TmBxqwJr!}G1*Z6WdzJBxCo zqZP89h@4sAJv-1|z{Q5yf8xpdnXYNmd-KoZk$*bU^pBwt@`j$u%kYo!z0o?#TH+tE z+-#t`q?j(Y*DLT=J}gCAD!AC+2J8b2p|2?2s?3rX;8~sEgZ4aYG0iI`qaBKDF7sj1 z;t)I&RlfB&C>=VC$mC5`KChfw?5(E)bi`&E&i_gi%g#$9`JM4Epgd?-dY#|G?++*6 zHwH6SmVtQ$-HoGsIOb90f!SDJoK=-AGkL_{a(GMsPcMRfN`aeOvg5#rJ^ft|H)_0u z#I0wgdki1M7#J*?HZGQhzDq{m6i47SM}6Ue-dWA8@IzAwzI4uQ8_dIL4}WfukQjH; z(FZxH9ON{)48bb_Cw2Gjeu?yOpyN0SA7z&v=WaD~_hm2hA=f@MkBtnJYeP#Nrb1LT zi#}>-7#fiu%p5x!tkU8Vcw$b^hJwx`0~OkZrNMg3m*7SfsY1mx`5h{7FHv0w)l@EY z4?Ie91N^KA6P9!Rf^GDYbn|lJumBsU$a^BXKcN%$-VE+()EBzTFm0}YH7uqp$SK<{WVL>>GEf~An6t)5NVmUBy6u+M_!`AvL z2=jRrBZDRRbe&LxiI_B!8-J=*;P07)_5+A+NJJ=dJ3W}@`0{rW9!|5Y_#UAc8qzm{ z@(qB@TL+5LmFzm{gGj4dF>fxP({5#N@{x^3kpV5zv&U)4UTyg4wG$h`9VLD? zPEM@2F098u^+cpZ*1plXQv?;5FnD6;Kp1FTv4bwPEi_>DC?bM}y-ksT=YI1x0b!YV zW`(!SD^5hep~g%czZ#OqR2#Uk@~*lJlC;L%=zsP{36SPf{N&b(g1z^Jkpfz`%L^UJ z`JmS{9zdhtj`=!1U;JDWUv_oIp;H>Uwwk{@IV6r-mf_QW%jKOSqQ*>In zcub(B@7Ydgp#g`$=!FO{<%ARTCna16v7qk?_ov^J=jtFD#?ShGUKw~-59&bBYHs{( zcID-SaI1v4Ozu)=YpDVp_XuSkSFpg0QXFlj ztYWhu?FBDOUNF`;sc-njXv=_qQ>B4#q4xm|O9QR)vb18}rIa*U#SxD-H4gW{>(TF* zrXUkGTP2VR_pyI%#;oPzrb1KsEi1^`C;GU&=OeWslIbAi%8esxp~~tXcECL?lNse3 zer@Cw5?Jy<+13nL$_MYl+{;nsI>s`Dho~7 zQ9>7`rYtAHKsCd@N3oDHPb$qQymTYkb5`9mw*0ACME}jn^NPm2w$10Ok*&-O%OAM^ z7>+x>OLyYuI_{poH{;0QkY4s_r0{s#Qd3%mvZ(UIf99{Xl z`2_cW^YvogUG#gcr2nb6W>-?9VlV=c?ij$yk#^^PdBB3+$rIMscRm#=m{4B@sDu$^ z4w7x$>lcDZ<6MbsvTzMQqrT<3FU>J)g{QamWnye&a3cHpi^DJ0$%ih2T{%ws+E+2Q z=c?e8B&zOOXY(gStgCIC#>}(EF-BDEO9`W$J5FuI0%NUI5g!6(paXy_H$W9;d+aU) z=4*sg%zfCOa;JL)<=~J4Bn(s$BGN%rBVomL*r@6dVeQs!rx0-JA+jAfK(PC32!X8!Rg-MHm(| zyn%d9WzUk)b5(l*eu!ssW?|BMAM&-)!Fs;1^ug-)W{=s^Hf4~{TvU6he)MljV;Im7 zc&wuD(1%4c#!?NAVhB&xYk6}*mLLx&IQ;FssYFI!jkV^~TFmcK<|aQQ-k z!sBkJ(tc=0Xa3yB$V#~GNZY!c5Ibf*6j?EouA@fhthhDO*L=i0?R1zj+9y6#d>4uu zU;L2Z#bLsI99Vwy{sFjS7iwK5!T#EfR-p&>%jfXejX7u$212wZxQuID-x#wjcwu$d zrr!;NCYis=67=qkWfgZqd-cMyq~r@o8Gfv=6Rz3Z8SaL}?5jP1=!2P`w+=3|d6s_3 zuhw<+XdV=anBuwO9byM|^U2RrjhpKP9yYZtD3HNFv@&7>-= zNOo+!56Tz-o0Nas-YQ)uS(ZaNHgwXH5!7FX07AW^d$z_d_TJDP6lsc*#3-h5+vID= zlE+DPXb%D4K|G&5o2^&dxZAI{8SZ~z(E*|E*U;NG!5jJ+s?d>I`!Ed$Ds^~t9fxxC zD419W7g-b{hoFi!cf!LVrEc_E|1B3p}B>4rvlX244dyt;CMe)Lo=Dem}}WA9#;y};K9l0u(E`o z01sx=HDQo^8teU0xcIXiA0hS1Ll@?HFjeFdJao>5HzM%2NkrBcF8*i1y4|&$EO`WY zAXN<%hpKD2a+Cr1loQT<9wae0iu~(LtfbpBiSwW)Y?U=OlfR z?X9EZEX~*{u zLy9Xo?9w#~RfIKH0C9?K?ky3@q5+hXU_)x6mzVN6Sz}3AlH2 zYpUW89r!E^wKw`}s!iv5Wd`vyJJK0l=N}oZwnyaoUCO;-8(lm zjXg&c;GM~Y{qp3?$HWdZqMCJ1S*8Kb8b>_k@BlC{WMk=B0000000BXo zJs4BpnGHVS3+O}6W)Cyp8lt5VyDVLH+9sXe_ciSmtniBGu4Dxr=VKIH>dx3~mG_dsexlolELaUEEyyO3=>2V(zIC2sQSSj?djmD92t%Phh{veL z7+85Jy?)!PRen^tZ0j9jju`)1NgYqoLQZ3kOPt+k3StEYB6%`-{VLQBx4ZLMf32`! zTGEl)vW`8vw>8LiDd`91sU#qy)Qb<{@wqJm1TdV=5@uNN%ejf)J>l(sBjb1(y z+y6J(;7^FV!solJ;hgAV56$BZAk1&BH{_+Z9?ygL-zhM8yDW$nG=I`GE7KMx`Yj4xzO@ym~~KiPn(uikg)-~o;EDY8wN zzd&YpU)oyABUYq^i<`&SActxxmH_SN{cs?|(~w;PJ(u9O{c8UYw_%%i`>5xwjM z65g}5mBTul0e@>dwXWsE1YbA5A6QB^5H~9~H)9En)U}_l7bH1M`CzYf{%fppY_{uF zB7u1Ob*I&8j9-x&&J%fyE0ed$%q5;A+8K>-QOa>sd9gmiHYF0~yES=06Huu|ZXKo)c_N7NGWXGBNEbb=ay(Zkqn>Yz&OiBMHwl;3F~7VjcKBu60r{kO=2~? z^{?HyXMFp+CCVk#%gyRz$O2OUFvT+UnZ13c?hY8w*T&8CHU6v|e%8452wU^W5Mf1M zXsD_A*w#VQC}rORviK`3n5^UyI|oVE=6WYBby!c~JulP)1LQg(PzHO21y+Zz?`P=b z%fmv|DCXyY<-OGiK7-D)_u)BOluwdZ5`j52}RAz=|q10?iUC2u2iZO-3QG}UYfXR{w(69*pZJYJ%eRR z;`K=YLOI@eq@PpmRr&Cml6_4Mj69wV{(5EW`!`vXJcu$%?}0+`mywi${)7ZgL-L!( zJvf6zFRf73w%;T3zW}eUQ17w>JA{RjNLfg-Y7mTCGu7t0#l>0yea8ZD^VkrOCm7gH zLeEKFE1gzI{$FkRdgGgGi$1Png!qHF$17c>`QDHg^Ff?7XU*0gsL2K$-0J z?e83*(5!^8>_w%JQcU} zNS8i=p4pK4x#o>x1)kgej@*okFcml|Y@y`cjD*Ve<hc?cUuZBRniN+o}fMUWlgCwwodhCBp`2X*a6sj&=6C5+8n1Sih zPprM=()CS}Qvvu^dzv_AwM;DG8(2J|1_E;TPe>zJnV$MmVuX`3w+I?Q#*4lOA;U>7 z*V2&CE%ozQV865M za?%GOt&#zu`iyw<&vI^;I0aj52$(+sz3ofyJ4r$TwqxwLpIHdTklZhW}E(!FE!zeSN-Q7J3L!?5KDDY#tkr}l((-ik6aXq#P< zdd&j{3B!u1^&6u9#}y@`qZoeko48$?Fz*rdkxD2v^hi@le=R?4QQc9S63sn-j{*sS zO8qNop{z>s!jSt|Ue**K0qhN3w~qXt8XO|9vNS5)B6!&4fi?}omD3Cbu3bi){0u~k zSJ&DK23%k}g2Eb~tUvjD{UVCfXWK3PO@-EG*!ml`h@L%daV6$_9cOlx< zpZCcm53k9b8zwcP{e}=RD{G-ViV?ci_6%S!pquIBgzNy0C!D(u!m(5bW>9hB-K+Je z7Y)8d_nK6NYLH6rn}fJzi*>FH9nV{JEH}=j=jCyWg0p0>=0LjDABb`htc&KM{(+Tg zE}0`fIn9|GIrXcj@v8O(r0f$ni(*{dkCfD<>st%pL}e(?2x4*dVcQLA_xw3)Ugq5o zqXsD}o8#1r{zZ87!8|Nu=_FAtcOP0~ByRjuAFbvnVe7*oiu!MQr`vET?F;;3({|gB5gNY+@{h% zG5!d8i=NcS8-e*n*H?^eZ~a`)oRjp$gZVh!^CsH~N|h)6_oZbx&{9{cGgMr&uq*)y z!U*nV&VmLUe6L!(ae8GC8h& za0xG{gpEyWoQ>=niqE>*llkT_`efHgE7aVj(8XkBFFG}B;~u2d+oT6jr?LoPPp#PK zn)x42X!(EZ!gBjuSJY|0gl#)gt|i9@-_Tq~)bBL;U_DS6Mxk}m6k+LGg$1<8WuJTq%>micGjp_s@4%=myKM~{R=#+PV^bJ z0uY4VX91*`GJ3-jmV;pS_QoX=``2_CbRFzVwhWrxNZU?rcsp%PR3MRO=L(*QX*0Tf zm|lwJg#Vn+i{r^r<@CU_HnqAr#@4UMk4O}4aiz}^OWd{&z8`}O4SfM<;8bnnQa^vm z^rUxb9hJgzxFMY95i!@Bl9QU{RbFi};5EjJ4{JNy;ROCLfel??ABZ7rK{n(i1Zbr? zRGM*uW2;E7=up+fAzV~DkuB{e*5ba#0CfyF!iiE=!>b^44Y=<`q#@kf%d$Jg=0#5Q zduxlDQ7r404zGduny>eKmjpnaUCcB8*ts#Thp?XDxFC$u7&ehs%bZ+iC?=!_>;Ahn zd$2T1!ur>`e8ruF5O56yYL9#w9ipS@D_*uE#D9kuo@LuwUtBRNE(=xau%e3Sen=?I zs$_Hw_bzp5w8D;vDFXyNhpFMeo^jHjfcFSoBAc9;NWyT=8#lspHEb16i0k3*vp}C~ z$0DYZ6o$uJIn-j$4Tt&WVZrA&r*SY}S2aq57I^a*6C7;`+rM)J6VH#kuCOCz!$zf_ zy)7hnZnM~HAvje=!U=?cS4LK18YB<@-qa zq(DI!)?(&_%dZ^nA4nPY$#4z8*lt2o9S-+xUr zZrJv9C9f@HiJ?H2598!=a(N5&^6qHs^1N@V@}oTQdMY2(Q*rmokF{V{GQ6Q8(WHZ_ zdGu&pFLfwBnM13A>@pm{7jCVb9Hf}XGtUNKI=KF#zf%lVv-``2jB8KzW*}GD?qC5z zUi|;xeX@od#9b{{T8IF_=Uww$0iEa5_zHh!Xi7bojOFulO% zdr4q0T8*4wXsQ!LFM=HHnZ+bX7iGPf)q#*Py67qkpF2@p1BmT>{G(!)YjNFavEH_# zCVJo2y!??3uiOaY$3Fw?&bn%2TJtIoiJaO_IuO%)++(#`9s50kaRjwpLesRZ36#}o z7p#pslIs0{Z4F6$TYjE<#IS$pnj@q)grBW9U}BSV`A#j}e^jFmUbUok!Wx9PwwH^U zowc)&qLf@}8#4?P3X2t#dWU*WJkFmEjT-hpLyu9RAE)2 zvu?2F^o04}D`(c&9Sw3|n4RAnvh^UWE`-J^s>WxgU7{=g)ZbVS0tQFEH#-zE+@deO zCF{mJbDz154I5rXRO}4toq|@z2G7nt_wXD8*Ms=f<%Z*uD2K0+gj55H_GtByiZl(Y zHhv}bCL_?C#afjvlLgD8&IdJ>=g?i`NjTqp@~5|$$B5aLTUl3AQ;9Z2OR%^^0N`n% z>e2RB26zRdHML>C5zSO_U@U`%9&i%^4v3M!@qBP+(bGr{W;W1)Y|+Hl1N=Iw#Ro)7 zy}{DmUH6%(KwUF^AU9=ZU=}%OZfcI6dIdRgRSLSTVildM^sx(oCt6vYA zV3ne7Pe)*CrKPF@!Su39&b%Cpgi~mFeBjU_NHOK*576lr9BiTXZtYEU0ng5y<)`J`dEp+QipHjDj4n zN;f#H6Koh}Vy=U~bXXXsJ2=jCRU#?gq@=;-931HIi3aa_{H6^8+oIxL7( zUk}Aznux}?4KG9GdaR}x{%xHo&H_Ua$od_njkPX0oE{ii@~jUi$-i+EDl>aE_N7TR z)(|(D-J8K-=Lo3j`l-p?)Gj)#>q%n2AA1wPo`ig(-Lq6ELC@Xc zcf1BZ@q~}f!)}sM)VAMK(L^?8gcM1hOp&!0WZklvHg#h;?jd$3jUrWAF(m~S881=8 z_12dh>+R=ggp_$^f5St+ZjiS_D zhQfj%{H)s?nFrT;L(%Kz4JN#7=kP^_L7z&DcR(`tm|mJ17UPjPVBb|9@&I9kyjpN; z=`l=Ol0YBZ8X{aV{*Ud|(d(^e$_ieWV!|0EVvp84Z4Mh|VdZmgu%9_Ok}7OtDbk&N z6JccbHj5C)%=mFJ0OW3ctYBS>-t?1A<8Yh4u+}1eo)iL~`bvayoe)D(3cH(Z9+oU< zthS(zKYC2nz8n;qqIg)pT6^}_b$WF$nBp8WqRZHPOON`-t^!m$w~t7%bZEK09m;GR$p}oz)>=M#^uWYl zHbh+<+Do+ihNpMi#zAW`hg66kG$4GPM?a4^D?(h6+4Rm)S4p9yU#28uY@4`XE8sbv z`!-hL=TU{2v$qh6SzeYNrKK9Y`%k$5x zdC>c>gPz@ZvJB+4}Sy zaEsj%c?SLsQqDx^)86A3Q89vS2**5macGFTW5Ad?YH#`_<6If4dY#Giq}F{xZ;o7Y zyNR?KB+l{GZ8&MIP569Bg8Da31-U&cDcE5QaUk=D@WzB(khlQn=AHxhnz`r7J&V2+ z!Uu)<#5WjZ!CxDqzON`lpz_~9>l!Cp-DsiSks7%6Wg_4+5#V|ZuV(h$Q+l7m_O1d5 z72b+`y%Myp$c1Qu9Pzv9zX}-nZmg4O`#_Kkbj}tbnc}v!R!gdE?)4Lp9w}!wHmM8| z&;~p0VY;fb7f-YEuRU;G82)!z)(vWZS4R}y=WDfFkk5XYj$7ft?BOTLyrH}Z*Bu=2 zrP|s%0>0kG`L!qh;05(rur$Qy&J1SKDXo9zU)T~is$-_eiTgzUY-temMK9IC>DSUy zAgSIQQNA5He1RcMXYB|F zO~@;ORU$Z-l(Uuo7zjo%9joq)1?HxM)7jE6^T(QKt_i>H~IoyuaQnz892e-SwB67ihKV z;m&{Bci{7@KI-}prAjr@ZU-EtU;Vz(fL_lrKUffiZ~mr? zK2?`YXJ?*VQa;nQWcimPuJLzYW1>wrD;UNeP!FzlxMtD%gTV|rkr>GdQRv5W3QZhT zhnOaB$>Z|W?4qI;3)7C`-xU6(`JfcA4C=R8^qQ#Su$4B8-%ZXF=H%?$gBco#O+FjUF|K)-xD>~f8!iWIseX!lPxZ@yb782NGorCQ3-zu)YA zxTRr&folOie)4CoM&&ypaioWXfOZ7PK>ItQ3)PBbI~|GgC|t5rUl$r5^YjBCb_Q~m z+8Cr15I^-s2LI+wL4Q!w(fMmw{>0cX&8L5rq{R-d?lE^g!e81_1Ge%HhY$xA;fU~n z0_TZxcyAvH6iUZshYs@{KFV#J9y^D)$xN2K(7RZ0B5zCOg&^u6Q9QT!%xzju@&CR_ z2WW1Emn6j*OC`f;u=xWfvDUI3w-w~{7#9v%YNsh^to%0DM&8%nolTUFc+Lp-Uh|e| zbr;dKAreS%`x#|bprvgxCzjyNnS){tLBGrhCv>OXV04Lci?F>N# ztj_j2}C*2PnjSF^> z*tPH3{02_>-g}MFohT(x^Ysa?`oH&U zTG3xz|8Pk8n{FIBx_yR-@eH44uaJi5~4^2MRlZq z0s)%bc4%P5C9nv-1z&Bro(8C0CXSWsbC}R0DU{;fgjhzX-m#IgAa`NcgCSCuET%uc?xm(K$$|48l;n^{ z9AFpc74ybXtW}4Sx><}xHb=t}npc@HhS7r+L?Nx8gdGGP^F9}?D`>Idw37W*bViZk z^V$y${MtY$2vF1AbGg_RlhN|I%GH9NLmE%mgfZ+ z%V^(bcInMXYPjse?e;yr#h(J^ct1L$h)<1#FN!IlbPW?1S(H6UG-(ka;$Xi=oS@ zxKggS{TqGBLRXc&!odgPp~QT)M!-IWj=9So^A~pNaVXnyIn@k+X=I_|_f@Q;aqm)N z&IzRy={5@v0H}?%rX9G!|A8&>lK*c=9SbnF9BgT@6Ocaxm&^m}*0-cU4qK_&u|r@A z!F6oLOP-{N6gZms!*$K*d00?<*GQdyCFO&)x|SDb!3CGY1g&SKms5BD7xFI0E{YC% zPFAzDQkA0TTh&ULjkNmGfxE@jprS{gDXdH2xD}tEXP9KPuv#!*CTyuReZT`~P*|@f z#>ZArz|tqz3G<-q*#SE_#=M(qRdb>|m1teBW`YM;w%e$9B2{Tl@nW(+holO%PLLv= z+$&wZBsPC$)>S-pM4Y8UQ--6^n!}Y|JZJiJR)+9BdNAgndA@|5S$<*oN8Li~CVj3e zdTVsIW!cES%e^cp#xN9a-b_Pp2e?;D_+{nLu5XHYDBC8k$*5e+tXVp=!YPH0l${fuY%pzuQ{?A3iSlj%B6 zhSelV><=++XoIfrPoc{4<@^0>-p_!ZLNvz(v}MWrQqSyI#UUGWEWkk>Smw6pTm=iL zH_8V9WyyY%>O3iFE8TUFo}35n*pJcR(nf72M|nvRac?M-g0?FL z8I^uUL%ymqHohwou-H|7qL@oAOig5B1;&jQb3~d5C@kSBWeNq|_GNgl1w@S7X#CoC zUmCtTw%U;)s@ZHF+c(|F^};QU@S5&m0P8jZ%~}u&Ow5;SGn3_yGTYoO_MOl_*SygT zz5{Eq!a$@$29y-+Jq!tO*FX<0gm@tvzT0)L?j7waTs9yOv2Rkds+hz@)9}4YfDCRt zqWMWE(((!w9T+x&tVVM1R9-L!YT%Xg@aH|)kOX?D^VMQQk# zJD?`F^})~OY!E_2okxBx{J-zZ+;!<;B`kM_dY?XNcl^X0Fb}Gl*(BI7VXEYYulH9r zX@%k%g{y{Rnk37#c{w{c$EHssrof>p+vI*YX@`Eli|lG*;MJ(JYFC#>mr)ezI9Hm- z8IA2Isqe~ub3*-=t>KBqLA9Ay4)O$l5%HslC` zCVNenISO4rdi}-~(qt6^6Pwq>lyeZ};Sw=#B{k`{J@fgGI(s?Sn!` zdSouCDT{iN?T8Cn?t}*&0Jn%$PKkUIGP=sjRI4IrD`lS(^_YRszswxXkbt8)(wa67 zSWqEaS6dWjsZD+TzUY`d9Y(it&{MmqM>FbqhZDNnga#mK^jSo&7I&%_a5!n;UKpN* z8~Sas0)V0)3@Jo|TWqqd=wb-i)F(nRGTze0Qf~7gX4Nue7y5tWbRGKL#uQcT4noJt z?CiSd@NYTm@>xSx>;^-#QfT%e!8_D1sKL46BL*+!^1qa33EF4bo;|#6G;xOs&3!mi z`{`KPHlr6vU6XXKftk|ryVI=I9pxxhdI*~t@!_z!msV+X$aU;`@C?cH6cCytcaoL> z=8-;BgDJ`l9SB4`{B?;}ii3z$n}7%EfxKum#^<&7?a=G#kUng9%X;;-%d5&3X?!wZbF(sSGEQRG+IU>oH%=y}($+ue%&3ANrpwGAi4MfjA<(HVs7Y ziYcz&Z&XMu$v}^=6IvaONfV$t1ONW7rf$*T(4Huzog;J8`0w`V4^_mNNrDI5=q9=b zO-D#D2c%!n30HW-K@^Z2*3SbISM6)`QKyg_7n8Ecj8QVbG1SfrWdDPT_kWy;OWo28 ze1@mj&k+m72U_lUW}Uu?8&nc*JZR8cH6#_4LkM33y?zW)ElUC&4Ipmt5u7|DgU>zC zyOaIUO-clgX<#3!MIK?~kfvbUp_p?b3O05SQ;RWcRd(|BObsWuC1}o`iqaY+dIv0h z?F7#as9H5nx(1ELKaFbKec7Z;h!3E4;<2-M@D3yxezw_1{xCZW11HRs94=wyql;@W zD!Zf;QG#nwa4Wp)y2XgkZm%9F+~{1`IbBu4>WjM{Ry;~3=;j7 z1-_&ZZD@S4#&})4*?bKrh_VFTMkfGUwAMAAbmI-5}0&sTJjW;xG?2meyzIO|}M0mgrgw&Q(HP5t1-yNs62mqR|xn^a+sfT_?@gL;ry~Q zR`eR)@Op2=#HirT^db20(0Y}^F=^nyBju9wBw>V?B`e7#(n&7WbxBDl!IBG0OEieX z*aWi&PCdSEbZ z_K~|gwRp%g%|qY@?dKLJk3#zcwzpkZ6QV8LA9?V2a~+rCKs#Ue0x`moT_5!JEeLqb z75!F%^7XI^^I24LFca0uHLDVw1NFh#B@ccCEr;I_{QvXQa|~rm*2i)e(as4wDt+#0 zV!OuaAtEtoc8x)xrZ2bka11+FSJ-a zNjKqyYPRN{_i%mh8kaF6Px**tvJ=lH_y8 zfKz$$fMaNI2{1?6#}cSU1`UgWt=Q^uULObG*f7Izc6b&lv&U#EIzabj) zPn|n&>7Sw0?NzM@rx3D1T~-d2~vBWoQrLyAASECe4f}%qgOQ_#BarBDq7*7 zS8seg*hdQ=*C`~yu_b|xNvIR*@Wi*2*YXHyZkpXqKT^cz1RuId|N7N;cTaewTUrFTf1ODr&_@jy(~6(dS?O_3NXK&C9uB4pdG@_Xy&^7 zg>Z3;v~BQsVE7U6^w|0_pQhQiz1CfOmi`*_4RgZ~(H9=-(1z4L#1Y_uNyQq-UH|o{ zMEt(NI@kW2&YWlOPe#Zm25|f^?ljeemy31n2LY;vIMuOQ`VCI^W+#@%vxucmshVJTWLbC~6DN zv(dI7cMi15)TJ*+p&AUFYR07-r!{zR652l$^%rBl2$KOHVQ_u%XXmlbvEi=Ra$uR} z_r1fAKr!1tm*VmGy%{um0*#>qyQEEmux^EXP4@1A1kV8@#@b07?#r4-bfIC^W5@N8 zs?Df>+(pSh`Bp4GwLh!`xmNRobG<0XajuK6ZU=Jdu_e02hkLk|qt{2`K? z$2?hv2>_>1wP)LG-g#mluqLKs91WF;58~aF(R|;8V(xm}I+(ROv!W*^-viriwi}8{ zT4R@LkMa)E)YYFf8D}mOrzf^*NsWL{R0}Jw1Y}8K+v>(X_AhP^ij%<9$NhsS%k*|l zeo@;}wDlt-+uwNK*&AkTWMNur?1oQ2(A@icx7~23u?{!a|Jitoy6JBN6|~TzI2>sj z!ON77GE*=kUs;?9!j5r4s{k6@ph4ziUXoSEEbwxmMiASm+L7Ii-PdWA4hYNgWI_EJ zVY-)(&lq9i*nf+KjRN1gfu6T#((^k5=K)N}?1-uew{`{id2m+Otf_W;lKJ-JylcEj zgoMNeB=r7T$mYLwm*b{Sn#L$Px-yFHq%>_b`vl@Ds^mGZG|eZtmt4oxg{MzG$1cHP zhIqy^?b-7!0sKZVrb?9baMHl=#&;|ySm+)Ukp1NoLtdVvlwt3$oSQ1bHw>C3Ait@1 zUNnnWUK-cN!+0Lsc|DXv3YCVhby(2Il9@ByPIgW`s-JTe z9gttlaWZ)K0Ry+yPW%hbq{6SiDJ58#%a{h8=o=Vktz&4tb}6fDE1y2IOMFeGff2coeZq0)M4(@k+pKRxtKhr{YzrYAd2{F* zPd^G6|D8T)NCq!ee@0S2acK|FZKTy#ve~Mc0Eo9O%!+|`%@MRx>upin&lILRMHtlq1 zZPc9K`j@osGB&-v|DI~VDO^OqhVvj`zqwHjz(HU~beVDQ2pqh)Et9u%S&)x;28&8S zPH@5UNVz|`9jDAUN$F;gi)24197g}tT%E;d_A3Deu%Sm5TB!!=Q zFFE}U(3z4M{MN>82d&QTAfHrq(b&~ERZO{})LbisIMEcJqcl#3pWBX1{(EY#G~|KM zhH+GqBGe;|fo!6E({_o27>jALvUN zgErv`w(?|EF~`+k{EJez-v$&=`5`PM*QW_eIC1A?jjb~oK;%L`=C|=&_USwKhfFtV zEXXiXX6T?L>a63KJWaTiJ5i76?(|0rgn0dCcW>bxPvmrd3dvnmc+{m_m_lYT>)stQ zcUv_Z_wJnBaFTfe{aziU-q1L!Q`R?dut}W>b?mMHW}uGAPH=NgF;78@op52c z?kmt0|3?em{e6S9gr0>Kla^{e-|N-tNJ)w~2^lXx-r2ve%K);Rgi_Sapnl)&_P_6^ zOYe&MO>&P!zA3B*$&`YTkF|-ylGK;QaX#XKR~5)$yEl|~X`ovam?~AZ=-V&RRUd{n z99A+=kv9W??+Vtv`-Xt<9X>L1s))TK2t8ur{CJ;ew>mWi>cKX)HZjtf*#H*+qCzz0 zxJzvvpV%F$>x(D)ZTA=yYq|2tM7%1!yj}#q4?=@(QvnQ~aUGGy3PAXF-L=mH$r&9R z%EC`Yxhv$-l4|umU=y8Ogz>26MF#twKUrN$hvm^SF*ktli7GaYiKAUK=*u0bWnu5^ zq_%1H_Y|G1{JsL_)F%Dhs~i;FoGorSSUpCVxnn9Gv4PhBDJ|WEA0TEZt*XkK9{K<0sFXujS%qA79LdbiQrE~CxRur(Rutof;1{jl znAg{kMPhtsy>}ivf*k|~jo6GFpXe;V-*!Rm*YUS6GZQDZi`~e< z9lb79BF>?lu}DjW(h37H?YF$z)>YF`Y-dft{CHbD z8Y_vRqM}G5r2c7osZ0brf7^Wk@8)YGqgjqp0_%gVf(vd#+_5)BFnC=rT;49zraH)N ze1-^HWg!y?luGx^_p+2n3akxlo5?+C5H4 zv|!1$iSLtK3-t#fG0orOi-J#h3Ytl|AWl$*aIj%TArI)(P0<*eQXYlUQ*Ce5KYWi9 zl{~sn-8I#@lG^e7=>Ho`9V{@`qCg+n^F0AEGUP4wtBiJm(u8gU{Ly50NKmWV zxHeCQ9hg-8W=!czu574|4#=Uh*_Lhgz5F8y>jT)2UyDy@*^G#`+M& zchc^iPHRGS_vEJ=F<;7Lz1ex71?$bqG`s3=vmjo)v(EhwhJP00MgFgl&+&`4sS({RFO-ao9%i=Jg!5cEO z@4RvZ@Pct5hyP(6k_)U+qfWlfL0e~_1hVARLW^zrWH0vx3Uo&6B&saj2Z~$A2u3Ld zfOnrcjX#++YcwO?Bkzx^%ljmQa>eA$0lr43{KT3%P(dN)X3U#LOi(cuR8y1xax>Kz zTxa^=xk`%a+AG1OKdN1(jf{;al)<#$qadPNe=nfN`9xZ3ghHdgbli@4l$uqAb5zgF z>7528APhX4SjFj{w&GgQ>t9b1^!{3+eeD*_59iT*+7`p`Mh!84Ca%ue5rWf~qLO_# zZBvk~1o=QBWES9$`qCc0dUxMq}4f`Cn#!qZ{!zv2oo$ z3U6%b9hATuZmce1-)JB4RA#`&6GGH&)P`s_c!N|Mw~Rl%9vDJV z@uf!9+IWv2IJsag20`I*lXw=ms2GpTfPg0Um2z7@jl2#6$p$lGk$eP<@Q#U^XUJ%Q zCL7ZKyvRE~oo&N5_LDK7jp|=>Yn!%}k(ThTUGz8A*i`00=szqlxR3g8v9Fda!l+?h zOU7trfMB<0MYUyeI`#AzN}kR`h!?w=?rTowE}83kBzq6%^Qj?_%5&~_?U8Q=RS}u? zmlJT=N&`n4fL~(&mXk?%DUlaQNYPf*r$#{XR$x>@yyqPOPNmb^g%;nOv?I%hMh_3v zE824FgEHvz)3nO?Bgeb%wE+Q^x@;)Iwl0CKrW8Fm@M-bgNPaU)FeD!G@sn z?B6}hPHy$9Q9KVxWg{3yZMV2Xy#Zui{-U45D_Yh15xdmuKP%H`b#NqBrxUS$xe;Ny z@s*NJtd%vyRi}T}{h!<$vr5|OldUn|Ps06i`(_HQHG3=}?C<|^-)b(#0!(!xjd%MDbIln z?IhP+D2MJUJv~4?Tl_11vWwR_#(y;)4{&&Tki>kD7oOAtnrfQD(**iYd#+k>i5)GR z=a7FzSP4zn<!xIlqXVc6A?!L1&97t3#j;+9vV3Sa_3NeOD(Li%D z0ia$S1$2;+HzA#D02_eTR%}=8oGM_{WWS)VpWV7p z6PF^qc>;hf)Fc&6LS9IX!ZQ*+dvrI|o(F#n(HLGXlBKutPY7=lE$>u&oHz&;gL zp3Rsk`)Weag-S=xU1?9KLt*{c(h6W0U?<_1K)=nLYWDRezY!F@a_wt~Z{t`)RT|T< z62fCQ{dEH27u+?*m7@xGRPS8PE31c*>gNg^H&M@j^JtNTp^r7Q;#HtgJvs~X#VOJO zr{^w1uk%JcsSES)QE0FN|Fr4fNp~rWXt~qNx*JU%FPtiap=wYqccQ(Gp!#$X2#;zi zy3x)bl10B9->I<)pJYp0G`8XFxi+;Rd-8cq_L&3=2DB$?%)6*=gMy(r5;ncChX!#t z@8?UOM3Qi`Ygdt0=58C!9gV!?*Mx7M0lXnGQ+dd~>q9vuw#T#58b!d-dkE@^3A;qA zreCym(uw(^A!4O+8DAUTAW$5u+m4OOG?9Q%`rq1l9IdE9a;<5JWCse^Y0jPIFst5Q z3*~4{9dJ|LnyX6>hUu?Q1XhehW}7Ew_p1IXmf=3CsEr_^;<-MYPLYXwSa9vG%Vs(e zM3m6-XuGV3Jw<<|SuG@?+R77_7G_W4I#ahah13@gu~ROG^4d-H5fd8ReQMzIv37KAcHB&Z(&DzTvA&6G27qZ8>yioVH6R{^%R%p zP;u{=u0MPK50QeT|1WaIompU`@sS;}?;x<0E}1oR_?Nu%=cCVO5UNuj6~fS;K^1w$ zrRJd0Y3{2#-s(lS2}dhQ93e5Tak6IF85ZdzJI8nJr4oP;InesQVVcPP>~zIg z{nMLRrt5)>9wX!R`aC7gMTmwnZCvn!`(Ej8YrV|xn3#aa9kov?cf7_ZP!VeYMu)q2 zFCR@k82O{-t&`_F?tM|+W=pG6B3|$QG@yIvYPGOvQcYk~jD7QcQr2+FWz4s`!OUUi z7gyd!y7W*Y$J_1oN9L~I)Q>cr&89k&reA1Wbre`-@$ZFyE5@wQap6uGy-~_%VZ?%r zGU5WP#GA{&5pOKp#WsMVB(M5Fu2>V>U*bz4r*1wW_Q47C;n*++4f%v~KOc01ol4!D z-KUR)nF>MhmAfQF57o3Zp3T6??~V4!eyncIZd}m;TVo-JvV=k`Rb3}O?3bIsmRmrV zV6%|xRm!j@vbe;6{K!-jMPYL{N5mfNJ1^kg&`{UlwUuNWj1fn2jXw0h|14X229jGGzz3F>K4ebEtFm*!|$yx*G&UXb5)sE z(>_1>KBA!C$$WC_OOGgaUs8w^AHn2%+~zPkb*~cpL2@W?5e*OpSB8HgVe9#*r)s98 zOB~l7ukzFp{S3aO=9QQ`6Link%W*uU!ye|yWMVZX*3EU}@wtxxO_a4o734;+$Z7uv8ly37~M9lqPMpOf*SojI=RzZzBRB4AcfYk3xvGT7u@Z`dGqbT zQfbn-FY;kBqQ2C;Zsh_WMe4t|EUz&Trk3hzrr-W>+J)Z)A3MH)Y6l};lV1L4i@~hr ziL8gb$y(QSskx*fMw`Gyo69RBKB$^4%}OXJ6s6NsvTdV4g}^+1w3ZKISlo6bTf^%q3?0^&o)eevrsTX zLOA}+$bH$&dTPV;vW4SVEoVed71+&XsTT_K-X7tcCp^f zt`-V_DW`ar%Pr&kJk?t)()L&&_J0F2L&o+90<8vf@*Vv2lwtAH1T+7G$f1o8$?nwa z3fBOhcLIbPh+R68__sXEA5woV9z#j}WFW};F@?qZoWcZ8<+=EBS#IjMv}Q#t!kY8! zrMY@VY4}}TRXw3VR0-VA8`EQl7fCozqat+YyO@h^{8c*~yjJ4$(h(<&QnMO9%co%d z8acEc0G{HcN{!R_#||&UrF&21Z{ODuvHuA6TEnW*-G0GK%BMpF_+3?^hRpq5ZUzwe zo=Kq@VA)lAD7y|ZW(bmqv#pJNz3I9g9f{QSq}X((A}8BDdH^z7ncKt8NokWM zR>+&DeN4qq+qZyQf#80yE_y&3LLo{+W$R4~z@|RoU_YK~jv#den{|R}V^m_;jx}*z zxEgn|sA;!2L1UIy2mC7IbWc#?F13WCQ!qwzcWE=^aZZmTbO#tNT=64?6xWl8faipT zmi53)(~+9FMLd<=_6N5q+t?ZBk{Vf8%gKHZ0?6OrKxGDG#UMQHm^Sr*?pBrMX>ngw z>6KL1|MMTrM)y7u&XYKqWLM87G-Epn}KjDgGiBYE+q>e{`5ckf;O=d1Cl!q$*imrk?kG<)Rgk$6g zz>Rjb64JTC<)M|$8y-AYSn;qnCreFf1#5t{Rp$5t=UBUV=FsSLnp`$2BFp|_r%zqP z_jVdWvcAH*8xU2e(wlk4%UJ;B@grB9q#e$%0dDMPQ*mR@5)gQh1iI`vRN?<=L;7Mf zBOP61(zNgZPg*q&Ie1x4v69<5Rf1no3?&6;?7Y)tR)u~CIOWkD6`wd3MFF|Tf!b>4 z654Stj(!gkDiN7LC)LHrMu)^&$Y+0*p|g9>w^PIIsL+QC8^6g=uzunbmIR68fnoYg z-8&O@3k9KN^hm9Au~>Juhf!(>vXJL;VWzu7ZS7;N+zx~oo!6o%-(gvA78|4}pwE%#cn}4vg7nyQd0s*awt&K(o zeUudkmz#d>O&s&SB?Xtw-h(-<-{;;Xm||a&up2TaFN(mj#dB<*JY`SMXSH>0ohcY! zJyHuIhAXDjfOOdQB9ivXUqLaKDq>gyk~l;<}61`Ic5?THe;PHevON%U!% zGYXqst2{7Gu~jx`e9aW{8h%{w4lP79s|uk2CUlu>lMXDU+XL7=PUrj$c-j@+W&zY~ zFZoMhk-oLOW3!ud6h}gJ#p=MzWY}~aYI|PAt9uYS6Ij+D$7nBmjh&~zKO{_`!{Lr9 z#Iy+H#u3b|UEgO(G0V0jVl(P%3e)7(aMeQ&Te&*hwGskGy0k z92Ag#xd}N@g_x#Tm(UyWebvK9ZaJZSNKk`4cXx$o9set^;ovIZ(Hh;=_r=G=?gdKX zWH!-)qZksaU~{fJp_z(o6>5Re`cGa^4NWL)H)+TcWf6n|iChSp&&VO3>_o5RWp21f zVMCV=gQpLgQ(=Y-Howk#dj794jXu=jY!Y(G1nN*=F>01x`_!3kkGb1F*R^;%eW8e5 z4BddlyD)tT0XdXU1vHwmf8$@xGlYp@NVTj z_H69>bW08t9I7js{(dM~Qg7P#Vf z5s1Y)J0b)mdN~W+HGTv3{Y0F@U!azaO&!t{8Y~da(wC$k5p`&R7|2&MImsNP5B%*i z=+TN{-~`)zY0!7Q{?PyAkG2^a#scZZ$y9lmVkjSI5Ys%)>ZK?cQ-3_N#J$|`pJ-typ8mH)nT21f^K#T#Ng(hoD=e5pT<{yGDQL><%ZNMJ8B z_&z9{Q`O0oB25IPK4zGlP-!FmIw8MksZ)4=A~C55 z3wyPbL4M*$!Zj#N=}8ZDqww#hT~UK=Xuo6sFkK}tc$oNA@tFx(6#zb@^w0IcT+J>q zDvem4mJP7fxd94G?N4>*l5?eEA_G8#96fXPa?1ok@pGMAYxe1-;AP-o4A#KVa?6P| zKzGyv{tF9pt__z?kRLhOVznzdB1@)jRALO+XkH&fuG!^^7^8JyK}WFz=xTlf97S=x zKX@iL1wYiWYbVnl6jDooTFIusD5Yt#O_67m2prEaBf~R#7m{f#c7nFfL} zVCfai>`g~X!1@!h+fIIMXbQLA*T>4V=rynNk)AhqsA2(x7BD4k&D1`gv2~HxTI6}o z9(_Suj8C2=uR$oqYh0698J=0#2VeYL##p#xH;rE+yi2EJ*5QTXsub~rs{=;SuG~HqnQQn351&_nnZLk#CE*4;)Rx2P}^Z$v>S94Pm@1kxhCb$7Lvl6}BYascS88BJ*M_ za8yr21EM7@@0f4ZA~c#p6osuKTnj;+XlACcWj}&Qi&j~#j@K|-c{*+BO?&$H#JEZ` z=6jw$oRX3%`=VJ+>5>$Akx0W-bg*&cQl#QgJQr^a+7Ayh^=JngC;-3d0W;I&rJd0l zSVc^&nb$Ds<7_EWV%BVC_Tv1*xS}!>62HBe3b1H#DvE;LQDr@DCf||^Tl@kbn#<{3 zT-ad_-uvrexazes!M^cw0cR0M6G`d;4kdUJS^;vvTX*(>QcUjZb^c9JE>V(W)KPz#U%=J zABkr1N;S2AErxpo^y%>4K`u0+2Z4`2tlaWxf$-F8xR9U`7tV%6JMvRoL;27l8AJoG z^7fgU9pbAIr8#KsUKpb*>?WvW{=>EbqmyLCV5mL>^BVd{Zuc7~$n>h2$QHyijkgiP zVxM|~em6@tJqO;tddJ0fOuhiXX18iI*;~r^72_a~cOh^;TInMYCN@3r(TN`}+tnd@ z!pn(2!vsIE`C=H8f9!_?e<>-U5{%}MLmlHh%JMl3b|col&YAQ6#Q0MqPdTw*ebG`POOSGOYJ8vgw#2BFg14yd=Z0Tgkp9YltP_ z)-)eT9_W&8?gn3O)Dkbte)V6@E3EW}$#B9ZDXS*4xabqM^#!1odaQ%?kiKLJCPOgO z<}Dczcynyri2BmPQ{i$17TSo7`u+P=aFe5opb7AA?EG(yF}ry|R20qH zyT4q%>hb2FI|t>Rlws4V3q||r*M&w++f>-VQbT&pzwmwB26;8kDqxGh zGtxj7I)`EfbiRiy?OA~-upVMSsJB>0)rR*< zm~Od0%3>E3A|qk?K zyC_?Rr!n!Bq1FPz?6b+yV3YUIg;`OQyjb z5}zXH%Vu=vXmLin%*|q&DisG*sy|b=tOWN<3A3Z+nq*1DAAs;53~uqPrG0S4)}oihGrXyDAMzA50C^NK1^D=3OrZZEs^z-*-Cg(! z3E24mHZUlWs1b{sVHeswNjec8dBjr=-GiOkdH3a%#cj6$FQ8$+oV;;U2pV2Vt_!{d zp9k5e?j6cIDs|Fq+>u+1ldpI4N}?5z9*g6h0d9j#qB*;rSLw0w$ z3JaHhwlM7qdpMpT;?|=P^QwQflLc&>Zv+H4huMoVC`kIvcRq=I8}&2ti>=LmOP8Le z{dLqpVidO$zE~mofa1-e#cS0H${O{i;ay;7_+r#J71pyJvyDI%1efRKuJ$l|&qn}6 zgS|hAuQ>r|CFt850@bubS$?P%9+uTe8&Jb%0F~riJz1+_Lh4#^`xJ`)%|LGx z)V|1anZuR?pg9Bw3ZX0vVkx@K2cyjxr>V<_l0o@j^G5|KWc_vV$|aqdMvs+skS&I# z)!9-c-%a;^oAJH!^UHB54@yI-KfDAPCZ7e#=oWB2o7sMyQU~fauLINrfMa64ahlYL zCQ1>3`W|y?uV;UaNNDbtx-RE|`MkNY0v#X?Kq~AAAv5iteN4BR%FN#K(SHF5zguKQ zBBZ<$CjC&0(`(N{lUh=!?kNUOO$~Q=CP8TD4O%@9a1L=BB>c2Q_~%C_^og>6b;>Qn zxBrO6spnwrTl&-z40u}kQHe9WHt8L}B6Gd^{t9b1*nG^}r%2h3kg^5T&fwyJ;88D^ zn0MIQ2zAostk~vtltbMdU_`64;!(J`{n@-}r;${iKA0_m&^e|65e41}3hf9l$$>c@ zesDa`@=swPXhT~+E7#N}MiDHlXU!By!lPWsjKX zu*HRf>!aO~PeC@pHrlP-G$|zrR8VgS4Vn!+8}L`KiA~|Rgl*S(zT<-LRn53w00 z<)t0Ik4XCo8-#xdTo{smA}p#Iu8JX&n0ok(dGU<)Hm47)A$gYC4{9OKibtt%i<1)g z|LEVOao?D#gkxEJSnz;ilV=WhPd;KBAn-~eVfv{9QJ{}*uXQ|TUp>Cf!?-;Pp~J^! z?y_mH;6`jaixFP!emJk13j{Nhhw$KeAYcW8RF1;|C_NQ?$u)gF8=ImIW!KQRRikbi zg@I>jC@&a|&c;)P$^9pQ|1`lj;KHzpdIdrhy6+yKXsYxGE`o?rU^Y-!U^UI0S3>A_ z3-!YJXtajB1!>@SVnoVoU3~VOLzCfL3q=fyqQOnvD+A>kTTFU#9~m8{wQmu#G=Gau z+Xvb69bE@|n&aPd`m9+sk4)WRrtnwrT7~g5!fW-j^BpU)9_sSTHjDUrZZ`+uTR=R{ zwC2S+SNz4(MwG4sjr$1BG+dlZs?1PpWr8{!H%OzUR{o&-O7-HYqb-qjd0@Hw*F-66 zXib;(!ae8@E#BHQ1zTJjxvB?@SpT{inNtkJx%`pO^ibX@RZ}hfrc1}&w&jqcj#S>- zff9|wl{!B0NFVuF<#5X0KCQB;2h%`M;)NlR+qn_xbY3tbZ`dw-dgb^Q==Iw{Czq6H zi>zyb(&Puf@W4Pi5(+03t$+H&=Q?1M0*LqWAxSuOBEL~^B({KWs0l6ok9`nrD2g%l zmUB6}?$Jp|&J<-#6bta*VlNbT~Yzen!{)oL@pjjn1#1`w9vzcvk=)p zwsc$I+hNmx3{n=plZIfgj(_1d`*w6jHO60f)P4?c!!EV#D>~OtKNrS1|14k)A@dKJ zq*-7sn1lbjg0^#Y0qRvKG$gzJ-bz$9e&xIh1kbt=_$e&$k>h9><^icRVGDxU7tnu! z)Jw-m?wf{OGU~&e%4!aU-nn{ez_vuk*TLwY-adBlJB$n%keJA<4uhf)5_JF1Z8k8; z)=2Egk3GyI*eGKEUX`mP9RKee@nh+we&m{q{0^Fl|02loTFh&vUw8C}Orfh?>~){v zGhI@E4+Zk5L^<{gDRZBKSHHR}DYheff;e*^hzfQ!PJpfx|~w#p0aE5Kv7Oi~N)WB+u;jC=B< zRI9^9|_2{(O#ZLhG^vH}E zakvdc!6T*ti7e`Rl=!VYFaCyVOT_6tuu(Mvh)YtX3yjEnUF%P%>KQK(ElyxHO)r}? z5yz79hsCp92x=EXh!3#SCeq!v$E;67+Q6-zSHns^2#pQ8Rk zV@w8_PXlE_2bEYcf5TrZq%PVSFU&Gt4!JBvV3KkqXg4-lv1d;7;%quUiH1&> zNwo(rfV@ZI_c{UVK#n(is24+sD>GF%b2P3%39Z!@E(jJg0xuy4o1v#yasyv-*)V+S zS^9{_J@P9yD2DxCLiabLUg+Po!<}x@X>lyg^bHE7%pRhJa)BxEER&)vdGEOu4MDacwG+C zCi_QMAnTZwme}nuOM5?A1okIMK`;r6NG+!WU?2W5I6FIlHOy6Ok>pt0uqxvi@ZO&F_Xorq zMOk#ax%RiyZn!fNGss(a4>gDtv?L)^O0BB*IS5iUYabEfHPLy$qRFa0-*L&!%Ke6& zb>)-FdkmnL?R#0IFE38L=T@kd5J`-)oP4aNzeG3Xw%QSzt31A#_T+e}M>W?1wM2wY zj;zoNDH00z~tQZ?E@(z80wk3>YD(MZ>fkn^G))T>$>{2yf+VlUIbLktd zVR!t&(lbjwqE9AB{ca5}Q|#~LKE8j2Jyz$T%DK;Os)S(NLGs)Ag3Agujqre8W7cE+ z-2&TEYdlC~7=Tlj5zvoJg)n%1OU^lj=7iiI=9z!8RH-s1K9L3f_I%F#k(tMtw2AI( zur)XL_<%KI;_WT3uZlAmHkrBrb%1fREI=tqxh8iE2j;SBTSHh;vZ-+3Yn$6UC1d$( zouv0>*0Q_4Usc~Tl)>iXtg*58)L%Z!0R&FoBzd&@URvCz)8&gqN6l~9EKG-8aL&t= z^U1pye%)Be08 zZl81!{vnx{wG^6!vorm6B-I=MDy*J5EGGVd7c-p7DXQWZq|oy9w<^3$dpR-(WbvCJrcN?y1D0&&8LyTswfolri*x0OxW zyWQ!r=DBE=A2<+Y-~cA}uPvnR$>rrOtU+x;`0~3t?iePxn!PFzFVx(0q$3dJ)uLl( zi7|VU?AfQAkx&p2;9!f#faTv4s&1@@)pmf{_26!G%2oMcZ#qyQoTVj_3~|%IM_an(EB5j@xj?2atn#05C9QWBMQf000000Rf&|L`SY^ zx+^=<>14xf6#Oi(<3MVP#$pW(T5h9wt4_Ub+^xqmNKRI1#^;^8Rwi2yjSQDjv|5aA zeCH(PUg}^{zdLczrsdt5$=IFf_vkA!qw2XC!R!QmT*?N%!x7kVbdv6jIDKa7czLC) zQ~h)HCmMXETKh(b)_F(DF(wmgie+$zc_{&7G7KBQgQQP&TmOxVKy0OZA{39e9rMk3 z7P*!*$^xZUQx6P}pos$s5bUUbb^A55WBp`d;9VhmfIW6{#kh*wG9vkO7Cw(Dp8f>r zR)IKr3k;_MrM1cN6f3R!U$T<0FA|lz3RZ!L5}6w;S<*G;+K&)o_veT1fvwPYH*3wV zqkiXzPa_8v|6^OOQIeAfu1jaC_^XwbBXm&`@R{^}hpK1|*j zvob_H)_p2EP;bp|bC;4rq818r9yH&H;Op}dK!KPX;jE4-b{vW~upLPF(g+%p#agv8 zXPj#o!WZ~ej$C_r2R2o=RArFXMT3LQ59unguG#OU^b0agc1kW6F0|QgU(8pR2k*b> zStvJ2f4qX?v`$d4SpLm$Rx1%M3FNUBi`y9rrrYg6v9vdBvlPz)%`~oTVmgc2wHnNO zzR9j9&EonEgeRJ*Nw%g}{h6I`{{H+cgb;YSQf?03rv$e$r`D-{XO1C3?)j)y^!AJQ zde&VsPo#ElQ{BJasx>+dI71J86S!*Iqyl!ofLVgFO^LO7@(6`M@=lPW*C&vhv2SN!zU+nh} z8O}Fph8?+XNCV~M&SqBi_M_SVMuE)(A_aGB28ujKOLDpT$95vw(tl0-faGtJtz#BU zi>K*g(dG(!M>y3XPrSp;d_Q$YtwusElch@%&sYd<_5%S%KU#aQopu^$kPc`UmSGhn%{(S<*N0b{ z)NKIy^Kk6-OQRD8qACKUI8Ji%s#a>3G}Zox4oIsUEw2p{Dsj!o@TlnKysBHxR)@;O zP_($O8NlbS(FS(39M461@Wg9{1-qdO2_SwB^!#?`8L~^7_a-j&d4K9=VGr1iZhpmB z8|xn|isE(<_oSHf#}L#j&k+3h9_-{8y?A^wlYjF}l^;54oA0|s%xZms3_UE6FsKE` zU6<;FsI#Xp;&9{;Da;Qq@p4nxfu6p58=1w`4S_KOdjWc%Pn2&3ilG;H=X>)n%!{)@ z(t>op(EJ19m@LiwgEkHA};q9_f!$fCNM2kVjnv#Fa+lVq=6dOQxo*C55YavJ9VY@z2a&81Yi zyNyNRQ_^C3rGRhwk60sdTX{h#)i&6xf2Q_09v@>;nUAWZedz$FZ;^>23-WSlz%DcN zm_FvFy1Fi3J*^ZYsGW@ce8|!j^#k z)5-bqDOZH=TK4EFS?un_e;9euvE1edRytLpyg+_R5W1U{ygbk&tg z6~S>SPfdyoGrmmT!>9OuV;M&XmM`3sTW2{PZHY)*dU=hrGT(ukoNXaxbq>D?yKMRa zs6pm@C!ApxJlRUdXnYcqjj{uOzvoUONU^;?0?+;+O(!fJ(OxOha6(=`Fb_@^l7syX zSzH-u$=UKn2)VyfSrH{Zua%q#op;h!4N>Q>)VYZjdSa>ULdXw3_5lR|V+hKw@&>$Q zbK0#9N_j+={$u*sXWJ=Yak^O$<>b@*J@ctG_RK2u{M?MIi8zO{-6@Ahi45DnyVh?%T=p&6L2j3uP1Gb|+&6#KU7pI*JXm zk*F2J?%PElhBiZHx_1Ug>7~-_G|B&Dr-lpM=IDPz&qr3VxM!xHFyN99MwCL5pVb3U zEs0ftvjATMdNIbc`pA7db-?H{8Xt!b1NtM1aAyI5{ZZP7V}pBF!0tEtsK-Z(f=-q7 zAfzBnRrbih@Ok4rU*gSn*s%px3UN-HwcN(EY19ME+6bVCH$EZcO^|N4zhVIaQfEgE zwh&M1+iHFd2sS5sqx|Ape-_Yokl;nE0wd3pm1`uqN_m8SSVOxW?Lg;UfJQGgbs?+7 zU^B=El{^!K{4R$r$^T}x#}ZLP*g#aMjS>a`+(0b)rI)*xFGkx&9UwMr=4#_?A_^t1 zHTuresSy^4Pli{DO$Fcw#SuQJ#J2Jj;shhm%2W&NAYbHAM$Ui(_v7>E%yNvMyP*|g1dcveyo7QshEcde{uvz?H z;um?SsA(no4yTj*yQR!C3TCVnFYD_Ae)397E*SyHnRCc1S}~Q_UL+=_&jnoE zVM;vi5+?%yQ46^2A@xLq245zOT#dSAIJX|jf?Km`RrA0J!d=}IoAd;O_eAfR>S|ta zS(a4KlLcGGZ~&d1_IA&Pme~`|0Wc;e@g;)!+h18HUDm7=h+i}BxO954n9NGL12Z?E zj4b)7bXk@`)I>Ms8q^K_@r{+CE5h(w*tY5$C7&uzGN!lIlK1e%Im590%eiw5!N04?tluENv*R?JpqmOZHz9J_$5R?3WEdsTr?KH+<*^X9Qx?2T|qJV$}rFfijDE4!&4q$399v{f46g z)BnBj?>pzY&XqVcAE=%R@OWxGwu_1yBx?UiW|Q})7YzKjGH0~&k;N3mtHVeqbw8g@ zZijCe#wWT$uzG;`G!gMo>ilSbH5Fl$G2_$e(|JYUaz`geF^`{_10V~I&uR=JHa#}y^$M9#x6-n1$VI5m6l ziJuMly%~SvXb%Zze*gNy4DGb|LYu74L@~gF?kxfkI1Xz*3C&A$1->!Um-I{d`r|bb z{XC|)p+xe^yb!jv%is98Abxo%*`BA4;#Bp|zI= z_wG0jEO)@ZQX1O8rYn*YC}k*nEzk6KmES6-571vw`KNO(FCHLO)PrDucm=Pu;td#) zTp915@?Vkz$20=L;rbp@j=f$|4w@wGVI0`-%cJ%@FGu_IHAO|D*mHeqT6i ze3W@&pU~T89e!-e`Ass9i308?!Cd288*!o1hgh(RmBuq*kVxDHGMsJ$EU81@rHFOV zP;SY8&(oaBVnm5nlf_ZPr&->ai-4o&s~>cIEE`3CY_wN$`y@PKwc{2yyC=wFoT$(S zo@bX_UF+>S$eN2Cjf_r+)hGYqF?~(FB6Q7^roD6tH>$Pc7J5oftr$QJ=7A$Ri{?Ce z{}0xms2@na0j^(_99bf}8Qf3h1`>dt$ebY_M3jjy?@@~iFkjRGLW`wi|7U}S3ZDXV zy-z^Ko@k|RrtI#11C#x7)nN0X_QzT2cBDfnrF9R6RMol}EYr{%^jN8T$w(BUnX}%^ z-^HwAd8Q;Im1pUYgWPfH9qFxemxNvD#%!)K|emK&5`e3aTuYi@jd1#f(eIUSWf3d+?op}um005Eu;5VYBiv>fTu$Qbqm0~IJ6eK#3`umg)eCqaV#RobnG~`=dg`}| zHUKOu*SH+rPOeC3{&dhnyFf#v`6Da7`JeyB^VbZ(V$P^C8Nyp70H7VnK$$W0U^FTI zBFz0MWW+ZPY4Tr&!TZh5O$~F6DO)xIY_lg#znKYxzKdACA{U%gtBYpe^o~7N_ckQ9 ztn&CpB%OaijSyDf>#tD_&X=D$BC>YqrJ1Om9^TTih*4?6NUKMv%(1n-7gQ-t z`J@LE^E1_6UjbruOBNqOqsK5YU7%dA4F6*uPvPGLW>G=b0J!xboRBf+y>ioDnm+Y2! zs0Op{Gfz6k90p_fLU?Z~;S8z-v9$-u`^HxxK-U<+mmLMzw<0Q_2Y>*DIFfGf6kLvJ z*Ew%^uURO}4vRAb)Iycj8xz*$xyb_3+R1DYIM|;U7epTb6&OFV_>rHg^fOEl#j(hY z4$g9r3_sIh;c!%C@!e76L;7+jm1v`nk*Y&vdi$)m_Hxh_mHU#7@%~u3&>AZyxDfoz zwxFXW#NbOFqIb#tOhx!dH0M=KRwD7}^2rp5=iqFA5&V}HRCYcf7Dok} z4WT4bt?TVjnK+QWv%`V-h8$^b#i9qS!9{HL5vPPHJeU;VA6=C9krV~O+EV=z9rAh1{ zttbZ$z4rX3;Ge27YA{qvC=y%5-7V_&pkgZLkB@`Hz2XDTa_QeLsyz}V$PcO9?ypjS zOmh=AxcIP1fB8ZZ^h5_!pKrmK{qIJ5kop!}cahlcJk!1X2FidsKL3E=)58u{Z!kas z`dady;tJc@lt)?OP%(qAg=fpq@^gVPFjMNJGKthe=O+>Ng}0=)lc74T)`QQXp_x8Z!f|0#D;GEfk@8vIJ3wRc zjw;XS?QwB)h$AwyE=mSkD}f<4@;y``T5-D6^m{L8%FAnKYC-cVo?}InD+oRV)0cv2 zSCzI_>s0_vK(fDpY?(k&T+@Fruv;8Tf$xFe#^n7v=|C;e) z4J!H%IT-QCw(Y|tJ|#Y{CXx3hrM^7#B8*-p*K?rNaBYV-_3|aGKO=Wb*N~K&62yKN zM%GMt5%tgeo}WxBBN=w`rY@{8vbMe>H{$}2R9-DBKlQfncP)VPb3m*PMs#x;0!_F` zr>8jzp<)-&X^ay~vG`2iQiR)1;)byku?TK;WzVpz{7C?lSo3WZEn#Pm_5sPHOtl2? zv@sL4{kTAyG>|tOf zBM3dc=p&XoxQK939}v`zXZlO|@{B9wQ0D#NFhO4mk{^UO`B$89QFcas0I@6G#3`$h zFdiUaMZ)q$flubsi`5INw-5k`3L4wj!ZCPd7Nb(q-H3quHb!Z-@pzowF1w@$PLC29 zxoBn0>tWxvPyo^=Um+z;N9xI&ykiL^rAkzUaWm7l$BZlz@M0JCr;VS}liVa8LlAVo z&*L%I7&fAxkmK>t>wEOo<<_6U= z#(@Uibt2?2%O7xvMC?^3;wR*Ta6_rK=L&UAqC(@!@O$6E4u&p|ekvsr(lRZE?VxoE z1a=;~g{1*u$wq5X)s9rABkT4sQ`7CUK&);4l{7kxLdce2<(6MK)qwf#EmYY4bI%Bv zNM3N{$4!>nv;G;6=IFM)BViOVs8{UR0OsC9q!~{oBL`Z1(JPh8H&>KB6-B_jH@`82 zx}(nqC^!Fny?KNw)XFNuM##)~#ic?|iKg4@c&_LRNjJrh?Ypp&!@+Z|f39JOl9S!D zVlw@1>vzrI6LjS0{#)w)DMn^zT2ys@mRri#$lR^pzR;&7u|C%(%e1l39;h1R2{yP_ zN{j?3IS)IB=C@IEb#GEqr*+OhwkpKrMW&J*0p>d0L=Iu=lDEhuzG^d@{}->%a~(O< z7qMAZVs;f>e~QuxI$SElMvnFCr%su~4C8(7 z5~^+Z(*fhET(etG-n;k29P*$0n(IUuI3MxH!1wRMvtIQrBnSm^47=ot^=%4|OpNg* zCtT!;2AnBf#~Yl;kzi~O%BId{C`skF$qy)EJKvWj_A`kUo_GBf(Bvg-T%XjpHU!(# z!^rC$KQ(n2vluW{3)yzbELrCglVD3<^IugmN1-y0I(Kqx>fJ$NbsBsTyci`CVz#y=#fa zG7f-9jd5{X-Nf=^V0{|^WVM^ZC#s`0X$vBRI+pOoHWSELr#$h1krwXMeiJWTeDg3Nu;?+syxet?ZRSEqqfgDAK#F2J&+Z(qi38B?4yg3HV{y+izg3S9)uztycb)Ny5|ndx9EC;UyoKqo zEcR4`dF0>fjD@_36Of8Jq>thaNG=@$2=tAi&07(Q`}koM>6bq$!}uOp3h_z}NBj29 zH2n6W>-4&m0YdBy*imTNfL25x=8SjaAA36RmE8Lb+j!NDun3-)8y0k}|5fezdWo5} zI=XHxEjs>XqWiY%JxcVp8b}ro{LY_PiaRzTmM&%6Q4AZH@p=A;?a{$noP zluPhNb9I)v6@}XW#v1-F;%`1ud0*f~fBXmCP9Wx0*Lp?(-kibGe@R+39#B7$cl9GY zQ92$j;6I*vZ<+qANU{dTk)#rrawn@@iF-kw(ipx#_v)}EHMkM7br`Iw#tipowjJj0 zN!#hVb5Jk|+NCjvDjRHt* z7p{d&ZkTv0R<2AaFi~WCx@d5{9l%R?(C^gswMFGhHuT3Veg~f#R=^k~saC0@Z^YX+%(ju(32>01eN}#$;-$_DjV^_~V;cpkUtiJ+Y z)_PB;KyWN#W$*o~QUB%f6f6J*q@8MDLjMd|gZStZhJgGo7x;1%gGG|=kfjWfWtJE- zfZH>8`LcT9bCrY{w{&xZ`Qf`Dt`g1XkijF~-12eZpq!YLtAYM7>>Xt644uftMhc%g z=n9;eK29)qGM!=!hQh5LRzX2Xu02RL+l72|PzUm-v77v4c7=_ah_+sGx>m zO&W3-)IHEPjkutL3FiKLp_ttOD!1tXYu1zV6<(glep)b7dt<5IOUIrGFdnNkdZN2^ zbbcY@y*?#e8=yF!O;Uv*m5FOPj)Nuf)3QBGiWGKZ_z%tCpW;6$tr}1>lvFYYO5TH{E+!?61go3=3!A*EFQJX()%K6=;^9S+w(bLy$;hJ}g!lQ9sLHf=2nj3<{r(`12WqEfv(*J{5$31X=0%F^OW=?PJXz!FZ^eUZ3Gf!-& zZ11Jss!*-Wdn?>+8RC9()aeJ~M!{6=;aG1*Ih)V`C+8OrJ~}z%l&XEMEtv#4|18kB zk}V!8A=mBZIt3+za~o%gzgKD$EnS)c+-=yp&*B;9$7`VIghJpUm|KC$ul|=;PkFA; zkV%*gLs z|5rg}G?zzimR*ES=|B}S1-vcR%}$7m45Bbg#h`cZh+eAnRdZ%5!+(Mt7zj=w&!(nG z=@^rXtiH|Hl!Zh6VeLgXTL;%;gkpp^NKS(3wtET5TAj2Pzy&0rg^zFZ{1Msw;k|5@ zF5R)Bbrw0Lv(YpV@jDppJq2M$#TG(x4`_oFDBw06cgDEi$>E~Pat%}474(v^(G2EY z`D@0OnxhX_5XANar!wmW;tuL*f~Dk&`W_}lUW)?T0Qu2_=!ZW2o+J|n7r{8r$|h`r5OTG3;pbt;Qgl0pU}K5I(v-;; z4W-P~G@bczQ9-_6mmT4G3(Jn3w#x2BJa`%C6?4 z2Pub5vyKD10sAJF!yKqoRgIEQdaeRvb3P8jN3d*5$Z|>Du+Ldia}xAbAj4h*Od>T z1E@X^+K3sE&i3`<4T3Mf130Uonm4ts$qt#Eyn7xGg5OWSJ5UE==)t_g65Z`b+Ppu< zVS=CS_U>xuEitoeRsaf}Wso434XzHBQ!MTJczfp#OG;IgFDIJS|Cbd3q${H;UxV`i z#uq?)qjY~t?7&`r>CfyQhU*(6-CjYz-z1}|H$=?8F>%8a*JUeiWJKcC@8X$Dqu%NF zC;&|_KHCpsI-9RK9?D|oYa9-&Uz>mj3J+dBbfV_Jp^U=-!WN|g7(#usd#zay1HAnv7ameaMewnWfQGJz6q@NQ+m!!5jLa);9a_Cw*@NJq`j_au`Oa1c0 zJQCzQOwP(!8pNvl7;i8iM@RA z&}*(dH}IaNy=cK^;nDb-4zh5yi*8G*TcoMufBKNH^xikK>jEFKY{3q4moB^Idj8iG zc5zpuCUyXoa2JF=q0#%F+yL!&!_@#n5;7ZY#)&C$&z*>By)IBvjrS`sTITL9j4Jby zmuJ#Mfj6sMZR^IoduxDq5`mI8xZn`wIln3MLFizR7voI8YAF{Rt)(VkAo9 zx!2^Q6UF(DXvcq|p&1t z4T2`*`OARbLB%Y{uQguFpMxk`(xdElFOqmuA^9bTWsalCD<9_up+_%U$U`UH2?;T*`vZkTI;0|O=93V zX=0PvyuZ^llXe}<1qAtEg0Nfk;2xul`Ni=~<0E)y0;HPprU9(q-gfO4o`#OUN>Gnw zf{u3W8_v(Bz-qrtrMnpeD!`qb8r7Y^{ji$gZ3{=7HKK=v*J84gmT@95r2Q=9I3nIW ztCUqwx$!HbBmIIW! z!RJ4)1-x<=chB`N#fgWf+w}lT+JYFkf{(NvkA?CI!DLIbQJ{;M*zYDInIB&DHHOYn zUTDV?*h;f0)$_rv^Z!zR=rg3A-N~H(f~aLXmcvKn_#m+1YsuKpNOifZiZ?ojTz~_& zH76`$lxqVyRe9=HbRfU+7`Un_!;=mjzqOP9jOjc2Wb@1O3-riCShod925?k6X7&<+ z6J}kn{+V;bwa{-?bNv9G;=n(=&1%m{3Ss|Gw@@(EEiG%NIDVr$6V$=bVC$WU{R4pA z(J$usyabUE&=%r=Xd`3ePl)>+{X$uu(J5I^Z$ZgX(upfq`TI#10DKu={2n5j=5iTT zl0ga39OX2C(>aFa$-Ki5)srus?nqM|W0GTqBL3EI;jgIB%6iSEb=i2;OVQae-(ihA zfkJ{I(X)P($xAw>ED?fE?)oZXCs!qKEZf*a1w&bNxK)M>5mIRBM-~&*s|tT)K>6%2 zBhRK|$XVMwh5(Kx8)Z$ixBABb6x>=sr`}GidVNk&_&Mg8cUy{I(fq&JRX}QXv zO!LFvSlTjLVVgpN9(wIlOv`K~rye(dwF3X7-bvmw&d1TM>MkChfoK)G9%fDUk1iIc zuwf0-=xz#G2X7sUnis3ZE7@*fO8IVf6zLT`#Y0TPfe5r6$UzRsVzv?BFY>go2%N$b z^1#%AoD+z79nTNk*~XCKn4ML8XT4JI>>T7gXW85q$E;2c0(jCXq47RqbrD*jJMQdq z)RwxgNJ~-_?+7d7D&9Kd31E0@y&Wy8z(9;8iexE>Lt>yO;oII|ps-t>5gk)^^z1YV zcV!a1Uu)7PN`0CoWa@O3Ut64PQ15{%aUdw(nYeH_e6Y>G6{Lsdj)tJaMu63ccjk)` ze;&t1n>3AmscLJTk1X@`6$3!7OL&jI)Jb4%q*t;CLNAZ??lvXAoeQ4qIaQ0S4j{t- zsQ$P12^rz4Py?1h92Vj>mCul3~JnA~_^^bsA71lWY1>zKHmCg~uD zCCv(mN|R>-r9#CjLy)AOmP@4Fk?z?kioTEY6ONBO?t zG|9(i^T20(Vd1HsrYT+mi)>ayLR>OQ&Gds!LCAI2AQ5C`QtbWSnl24tL#Wk%)nx_cv}8)zM~%tu=WEb<_yBW}t948W?WP#;1B2 zBIgRAOrP7Il-%~}*1r0C@|0J0GEy`?V=4um&3}iNZ60BJz4yLv5qd{2^#FNgJg5Z< zE6Ur;26l;vh$-TZoLG-b?~{7cltHF@wi0gX*=b?)?@pQLWEdGu$1WDBPW8#x1VicU?Ykkwv=zrU4iZmZIOB7ZdBe-51SF!v6G{ z03AZ|Y|LhoVdQ{%5)N~7uJ}gN6THFz9G!t5{v6zF@ep*kpag8lvyUvuMBlYD?)Ii` zK?}&3pPx*FB)K17IS+qO(OvIdtSX|oEx79A3zVs}p1)!M`w^9uBb%FMYrCztm3^MM zA-MM|K>l@%4CV$xwu|XJs{oC?$#nmh0Q?a&e755PZcoj(g&z2_)fe|?<6yeuV6(gV zd~I1Q)P%v&@>i?o&9sLsd@I~{sh*o!L9Hb0>*TjcQ-kXMo%^%co6x6j3B=LA)2edH zOB%++W^ zf1flV1jxRWlbF-*aOG;jwsm}Hjw{cdo~-^P=YmZEuVCXUjIo(;Gm^y zwngx+aD#p7vYev@!-gYMsoH7)JC3LtJaWl=+?FNq+T4R4&f;*Z?D2kx#Ai2QxG-7_ zCk?&&t=#v3o{cp$brLck9T_r+f_U$;6`hY!T#UH^|M?nFJv^vx)IMxt4UPe+S~jS{ zYlkdEQRZW|@ww*LSEnWOha>qjmoth81cWql9YvfY#XsK5o+N>*SU3{jB}11Rf(H%5 zly*GSC_Gb34E~#9w~!y7TjB1}*`^kqPmnhA=RFf-3m#;mwex#~)_!nTFIz#sg$=r-5*)FRz{@Awar%mfU9j*Is_RH2x8iD)pmB1_s zhI-T-EqZf6opI?i?__3tN*>pOR%Ks7n}2K0102xRfXEOTcdePWqpvXO|ao}|XL3J`09Z^vGhV=}zNS0RLsVbycSKJ_*_q)w5FJ3*2F<=Alns>oVVKc!Xp_xTE@jWo+u7OhJOKRX?ZR8J+2Q^MBj^2fBhh1Nf@PVjd~v(%~LxV;>B`{EN3@K`g|`8r=$6 zf|wy#_}4OkfyNT&XMW&Ka1tmkewC?nC!kNVaSR&@o9bus$d4Vp1`mt6^$+m`V>{Ur z97g~o2(kR;j9zIP%-hYPk~;_5oTbh=@}?a~Y%_R>v{$`BtKf8t#@=CY0CkZt;6L=O`I{^EK>4<~hc7@E zjr?;{OK4S)QB1*@)irv@(ui-luUh%BUhxUwHWLYfMrN!)7F#tGH&>$~`F_K`16l333)7}^rW7@2=HvIXya)htUI16p2Za8Z)TT#kcR7#mOJPqiyVe1u)X z;;)Ah{>O$pxaY(=ltDAm6zlR2x*;0sCbQPO3rq~ysZMDnJ@OK2SQFQ+kQN=uTP*^| zd7eS?WqqOUBZ8F;vG5%<@m~{rzUNXbTlgY|Vk_-I>_<`~6cjirlQXW$pZ6aAc4~{6 z*PVNc(w!}X(YHJ8!!iqdW1E#)HY2q;mLPb2_52Wiy8 zp};RsQl3UgpaCS3+n`E8fS2hftqiuG7tz{nk%7vaqrqiQHqQ$atC%q<#wTy7aJpe) z+JabGgX9faly7zRj4)9SPK$n5K_pr`EVY`1;c} zNLUCS0{WLhSNWss2R-n`&L zYI?ZJKKNs-<|wQQaC#60UdQWfjnAWtjZF_ z1(TCu=+hW^!#|U-caV z(Z0`K7Wtbvyvu|SWMQNI1db&%$TAO7>P%gvT(j#)BmI!ZkBWIR zjJE5SMJ}L&Yx6QpQsuXFA z1O!pM3pS+Kt_H8pebb(YU;aomQThv*xa$**A^iCCQy<%p7GGC*qMzC#q}6cHc!x=r zebPos>l{=i7e&PMN6EXJXreRG);8~d;w9mew6`PuRyfDoqQZy=wSkL?IF`5kU2sO} z17gfrquhPaW{|m!v*@n*!|fN-{l?Bc~`e% zG5`#FUFXb?CgJg6oe@(&RW?7!Svra^E;p5Y8W?fh&I^eO4(#F#yRNtfaQjD(5Qweh zy+3AsR6e$2%{vKgOA2~W`F4OrE*-d;bzWRSlaY=P+97(O+{CJ(9bPDq7sEo^@F*4I zvNf4rORDQ=V8GDfIM{sWrq|G3@ar~(y?m0Tn(K#1MuaCv^Op(ufg@WfH(uczHIgO9 zF4ZLFuYV^|Pxo*z)AiXdonnBWY6QoAFL7lLnCbx7mPzK#bev7E2Nk69SahlEe ztM&I#pp_T8lF;1zAyJ@KA)wwis)I6S$4NU!qsya+#exe^dQwhpZ#z58 z_dSR{D;HNeZ}FtvUOZY%&FRG5SidHh5%G^sd8Bg|S29oKy-u(Zs4}DFk>jcNfu6ff z@F334>S1EwwQj6LD_x367N+ZC^Le%p;t<&u%P%}lel~q9p;M>f`}~tDU)^`EXN-$C zHuDGGmvvcwNAufYeGuFW(#2ngZ;$M*Aq$P!PXAxnW)QO8*U^+-Og}|UZj+p*U|8)Bknhad~iPl%kEbfFQyl5aN7;PuD#5UxZ)ak-Zzf#sPi|FwlrX{k(2=;yP*+aav?>o4Nr@Ze>; zuSZLd@IO`Ic?N3Aoxj7$^t~{5)IqnqkSn6{k@uJk#qH3g9M~uXJ&Fi}?77!{CcP=S zkcmmPNA5CJ(ZO`c5*M<6UrQd1L2yXmdaELQzji2Tu(uz~1Lj*<>ShgL8+>?CO@?25@iAfzE+CSJIb6OSLG%;h)$pPXA;%tlVm z>@aPw{nLc34z%p!70NAsuu$0iCQ%R<@&J$-d|V((H}s%{k=wR{X_&7H)c$#+%6Kmude} zLWMbyuERX9|7#W98BMMGPG3yL+!4Eb9QVNJ1r7soo5gp-&IEH3aw7ikW_16`T+c><5D2W3WJVA7bNZ7i?SRQNUeEy|;`MnWIx5;+#I*x@} zMqGWBE)Kqqq;jbxduJ|S!g|Ym3S4*(^)3U-x+o-u1h1VfigALq1g})1W)L!IsycM(~2)6+hh=PrLg3P8l}vB<4L&@*BS!Y_EWI2WvW;cI=hgx0d`5@ zL0AcYMu}UnH%t{LslU2b`%G6x9C7$66J}zLpe12Fn#qS(4dXsQPCRazpr8p)HpScY zNx4gygrf;OV@>vN*12vng1?4<{;#Ca)-C(!hC!lvg5@Bae}5cLB%bjE5#NoTMvcL2 zh?KtfgA6X_2C$$=+n;^Vbnoc&J{RGx z8ZG|mZK)zk&qRS%{ghZAiy{VqMX1|0{Y0+8leLhdP1lcYcOBm>JFq*esrlz?#iTdB zmwObZNV3xEP6loWv1~$C9;6pIjtyOdMm@qsr5U6>M4}%@nA=J6_f%jcW$a| z&P$r?O_?Sfl@50HG6U8zVVr}(u`WJavJxu~#TU2+=>R&Y3E|YGUeH&~@jG{TrSbX` z2OO})5QFAF*8FSa_0j`W*OZ;DOu&!IKj|l;H9oOq)SKj$pQno-6es0_%K#(4^fw*( zW>9~SVABHJ#DcLiRI|P4W#~lN;5xYo6=tJf@I#o6CD*}y1jDgGe@;Q1-!4t57>&=h zdRB!P3``7e9#iCw=N1rx-Mz~8l#Ku_QH0X&Lr?+iR+ehHYmsGbHljgf9fRw_GPJSp zw<*h98F%Ut{n&E@er#|u2Xe=HNhJ9Bz44ys_Po2)4pDZ(H)(twt2rttBKZ>#ulw;} z4`jo2!gpVJdC3U+`tfyTjdPQw53H>sz3Md+`3bE7x@5S<_=b>A3uT-;AINC>$qitJ5YK_PwEkL(r{U%b2~Hgn;Z3qqEs^lwPi8z&Wx%AAyD4lvP;Y z;~Hf;2GRjBIS&Mv5aF39|J*k?^irGzdIOtw2`i(VF$j2Xy2;NV^iDpj09tMpEWrqq zyN((?I;n>Zfb0pQY4PZSA>G~_zQBCgW9K4g%uH*;-->Z7qWW&)Zjjgfy4GR*1iXKVP7HpO;nY)*T zN5^|B!J-IHHt%37vRo?=*389|SvkbPoRGdO%#80d`3+y{Urlypt|-9JjD6m79b*R|GDX!@3*jq*Ubgy5pS#c1Ijie+`b^Si zzY8pY@Mlf2tmmSYUrEY9+2^9M1N>1dTOQe=I_44I{!_KtA6sK3GeJlvH_D$%mn%sN z{Q8o%;)x|F=$|DIX{lcv4)C=H8J(#_KxmYzmE2BP@%)j^#)s-clu?Ouj4HBe*<;5$z?k;rETi~fXMep;1 zR~gL734n( z`)0L$;j_ZI`aO+*<$@hu74=$W5nC-0DJ2Hu(9QseE7{wJ$PSJA^(B}T>Jki&dZ7{c zU_$>9a4pLg5F3gp-z$%+6ohd%j9KlECZbheX7n?xfagUr=#Y#heb+;Y^02Uu8NCJ+ zfE9KcINDXF@-)@Z*NEuso@7?MRAwzSnNV1&?L!d`9A7o>bl@!Uzn=z&BAP~CRi$6N37Je+Pa3fxw-rK#-k()PdXPA7O0mKi@Ql_<`FY&mo^a%fX(IaN4`%zaqV!tu8`( zt^VRFYZmM)Q$6a1#aqg1>f$l^ubWBCnd}_sdfI)^&aX>1RaCL9c0}2tW@Sa!s_Q$! z5g9SZBhjgllow0}z5x2tEC>sa*GKE@5DLE-ov5;M^ZzH#brX1Me9#;5>~)1ts3f!| zFD>l&s%DT{zCT9zG$R;MYxir8EG~qi&yyRU$VVg8ZW-8&fx10$CX3sQbatw=nN|R6 zJrv29E9(kn`C4fKr zAQ*`YgRGO}niTK+3B&}&ca-<}=|P#P9I%)t$8fB_@^4?kq*96@?e4gLNG|4nGcZxz z&$Ww7Ze4Ju3L)qY>d6S`3EiDpJWW(WY>$D>q?3HR=7#)*X6Y>2=xrHDqX#N+klZBB zezQtV##7J>38qX;#pRNkpao*6j85yzdy*hraIR;fQqX%&2;r^-YMG^2OKhFg&o-M? zI&VBKJL<+e`QxAQ*?tQD0~ynQy~_e}`RB~*@a?py?Iy+z{>kfg`1Mup3LzmZQeduL zBnq_Ackw3o=)By92T$^_*JSU_b$7<}=Wk~PPj=edNhJ{&6L7*YMJ%Z04}dchfyRg1 zPhykUT*@MYK@d|P|`;^ zJ$QVUH&}j2AeqNT#T7b-p5tAd*rU{v1iHhwqAA)24EABwn#J02i2|v!3rAuLwmGu7 z^@csEEff~B^STKNfyA5*2$bXUqdSpI(8#hi5nE<03e$~57kjLLi=t)b-Jn`w`1$e& zE90S}BuOndIK>O{P$zY^qI)f}K062iPSuoy#`a=>EFO5#J*|V{p_5 z1G`(#WY<=Z((PIp7p?^r6c4{0VApuj(to#yJhG&NUA*wQO-mgsQ!~h2KA>KW{Rj-X{N6@^dQ?lt-N6J(t zLW&nob#7U9XgbIqk^^F}CZNc)d{udI9f6aYq<$RkS3+P_%S01vK40#r09f>)sC1V57YpKD~v>miZ>^8$7(SZ`Yh)6ldl9ood zf}g~?te)W59zLve@CzFIQxhdJ6gCHP=z*`4vH_Xx?&BsK2(FI0SRU~e`5>A{n?kw` z48<~z=LL-qA}4Ia;&VTaX{Y-iQ>wJT~iv-~tq<-Guu5cK;d?X{P zGVMNuM=fj-KgESb)WHeK)6GN5eh(iXMD_Ftq6W{qNk__q9)*>1U7V;+fibakg6rOPs8MsNT zlBp;TxA82dm>8Lx+p(;@&Mh~Ac2=siZ7ZP4X0TKx`Gpu*_U3*>kgFBqO%ZnpKh-yo zTD%UWPF5yh2haY?F9{v4zKTLJ>J2KcxTVsmHl6tSFHO)YjuYyJdl^X>36O8_eL=CH zTe;F3ENlIU9-!!p`~?`W&5+T{Ca>r9aku_{HM8ulik`Qje(`caIGGG z&)B4LCh>Eq>Qauqh-u~LDXsh2%_@zG?-$#iRUg#eTz{2n`Fs+-%2tzj(B8(zQD< zUWg8aA}d$bhihP<;c#-TUS974)j2?-~z#3kvl3r66!@Qd|joM1(@z7dnE%(2=Zi;pQqRgU=WGCaH~J0 zdH-8+sis*5CEa9bRWXor`121`CH!yQF4qQStE!SSG96Yy1h`Gn>y#U*PMQ6VjMSma zQLIGmHQCH*DHOTxLFUQ@xgjVOo-eeZnohd*?*`LIYkL6csxOBUyagV>n= zXD6bqJ|-arxTnZAz@@9Hv0>VsxWdPJsQyCf`Wtbq6;Z�l-73;vLXiq7ix!OFo>bkRi98b;?Y$Jc<)-VAn68 zOA;COqrh9>chp0y%)U~5&DU~E4C!=Zk6&#e^JcL7vH&6H~#_HwJ7+*JqSY1MXK#c;idftIuah$o`0@8U(a#|Bd|%72*UO&&}Wb=P#mVx#=l@ z3z5DV=^A>?th$CE<%i`ZbJ4fgJ0Ip`skEn`4tmVF^a68gd;K7brVcC;8?N5a08QwjR zNstU(>e~V`eIh~$nh>F{PFd9J)Q)2Ie$(j#jh zEB0PquTT_Ol1j?jvikn0a)(6x+@CgQzjf&;D;-yGLAH1X8YNwFcn~XGDpv!aMg-Z4 z@kZ3NRA3yMnF1GFhji7WbrhFMTB(LmbsfqDb(Rp@1;cA$Zz}L~CD@Vy#GaJfhe9nKRMiJ8KbA&-a`Hi`%X)=9X~^SutFQFssE(V0xE-|_ z&4kzlES;k*R%J9j|9!!f+*~(?XjXEj(+=IIllOtpQFRVb60U^EzUH+Wo*7%OHq<&% zYddNeMG_u$u@puh|I?=hAtcWlE9iHyJs05WyUE)Bxi@u5E*LNn+zfns=XaQy!}YBJ zoh})Q1LU?KEN8Z`t>~54!OL?f8DG${=1=H;F7=Bc&>Wg)0@|}SMqHPd1?{LC=r+!O zN@~L@_{zW1w_Hcid$N0r9my{wG8UYYiaeSe?vxz<1oZLM+F3;wI~Q&>DU&Ik>`zj5 zJR<|C{R)FG@9i_D5~7ZFrWMeG#%T+w)sMB**@3!^Vdv|E|{v;cRNwZmA6gZ z4Q!dm?m1sjxf^L?+A|=96gjHO`$i(-Xe!>ZNoPOHTG<`|)mC<@#CTZh7IN7R=?OhN z_%A%sd6v*Udt3AzRFMmy1`5Q=M0puOj$yO=R1~@*5G-7n^ZBWMSDU1d;4@}mP-SJN zau~5_0{joHy8_A#4fcKZ54`W0M1-rD0G(8FU^35mMrnN{MyB5FYzNzq`rn5XrkZ<^M_NmbYjrWtE`dW7h<3AI- z#J63qvCJW_jo@vv$6j2st3dLzBN`OlIxD*i83XOk@JI?H37Ok979efEjL?5OSqQ2m zV;Tf+JZ9AQbWf~6sFA95TGxV})Pjn0YRPP+nU%~b1^2ulT(-1L=ks4?yG10tdD??2 zoTzZ0JbIlk!@3Q&$Z0uW@V}#Q%eCahlWf!|Q@`MLWEKTwLCU%WI{1I8X%`7xY({U2 z(Jl68JYG?MO73TRzsNfWw_?lt)_${>G2w7C%<)TD4SW{vQB0ccV2DQ>o4_i{M5E4U zf{G<&e$mKV9K-Fh-mGh*oiyq2A#fWaooMqdxzV+tj+@qO4dq86gE6m^am@GSs}<))HM# zv$dtB0=JV>@)bp(zsO^*KyJ5dhpvQ`P)Z#E95`x&yQA1daFFr8l5^lkk<$TTtmprs`^1518xH$OPd}m*Zsv=Jlfv`G z^E0~gv$qIWe0=N^n7%zZ=%w(nMIFdDc?2LnMeNA2Pc{`W-YHn0w{F=d+6bifW)bDm*!cc*9bh{oeFqS0mTUm-AHYCV`7SW0>C@FBL(AD+SCVjz|A;y#p9=5|*C3?m z;WX>Q9fFfAk6Cqi&dP!lTy7wA^m)xw6Eso3aB*E)Rk7?4oO3faLIiUgMB%^_<2at9 zoe%g0QC#0M+CT(g9Lja1M>BC$`*nLDa)pE-inKzAJzNHV#pY54w;hLasf3<^Q9XCh z|L0}Yrc~Wt&%J!<)D}ao<9`xh^}~VbzY$&YA3m~7C->;CG0BbFmr{8AL@O4M z*AQ6`;ejiFh>Z{{<`)LT%+x_*j0H{<(4~*Oh~i@}C(5~%AQk^^>wJa@W>7xuW?k4^ zRco_EHn#rZp$6VgK$V?BgBG$cS%1D9f_&}!`)zl71g4X$+fza7`wWEMo=``ZAD>30 zGn1}V=15hr8@>sE5T4Q0hvdSM=rvIu@2yzQeyZ~a;i%di>B4zrX$zP9Kbt|$(4?NB zq=94QF_I;kbgZRFd`z9rGNW~b+*}+HH{O3$@=&V7Y|FJG>dYdiSEOkwXA5SJZk{jS zCQRyMn^{@o%~K}xPBJktdLskhidY<}ia{K8gw=_@c3$P{KnF&u40{B%zT*36!lgog zu|cbSNfU2*fJ7rS{2rL!u*KahXHg;%7fc)%!i?8silc>qp0kEBOR(l_jJ5K{wr*__ zH^n?7Dm-faJ7itXjH`L09oEB6`OY9|HNAhvLXc#PtSs&+4+s5k;!?Cq= zM}U+Pw}{itSxK!@bWtsJ-os(G{Dv8EBBlTRd1X`A5Z&mgE40$?Ww+|rb&mU9T#gGa zq%-Z1NOT&}WPlYa*TL6m^+qO)yKmwXCZ>4$oa3hW2?g*3E6N8bg*0G4VjdTxLEA2c~My8LztuUhBhuwl%7GOz&Ky(^5OD z*;TU{#Cuq%4NH3(Ty$c!Z4NxeK4|{<!mnqlLvUdD>p(Z7Kt^`b=r zm6x*lU2NSZLdff0r0P#M4)aIoz7``{to?UNdaX^!05Bcy<@3V}uXYueS19YCBAa{; z%^9YG`V5E$T10XXTw_)1OSI(cvhFU4@oOeXq~cX^d^{M+P99LR#eS}7*pa(DMYfK^ zAT%YSx?9x3Dpb$Tpw1J@6GmRZSCDK$BBqWi(?0ftva-)1N`h}Q+tKDtcg$DCSjxuH z_#80|0|uRzgB$?Gu;63-O7fH<28R^+|80OiZLEF}r+<6$kQ76sMbBJM;f^eg5r1rW zxby`EL)70>i7vWf6;~q`X5E-xxp#8rt#6+B)u|+~h&QdEn7q}{=3xS{ZR0tZ#Hz-aK7g-hfgO)u4$pYzj>n6SSw(h6 z%3@twQW5vJ`=x9CkJ|?UV_7Omb&p@;!Ej4(>@Q%1gsiN?_#K-=!pj{ka$LN~PyZY~ z&>DZ4QK-($%OC9*E_-S+i#p9QH zW=m5AnL0-auopD{KD)v8k5F3c2NZC5vo>zfdF)BV6_dfA-ZXUs7gw}&#ey^T6_qJ9 zgb$=+fuw%dqgXg`20DN}655d>FZGv@!z#!^oEQUhppr5I$cgcRS9}SONVshDs9C>J0|5 zlsY7!Vf2#`BgGYvZ}BNTd+tSS!&6fwe$0X`L}Ihy*m|<^ zppDjV5zbTe^~J%Acnzd{v0IB$^k<&@59EWtb(BdK2{PLgY|s-G+XI2T-Fg+%EkK+` z_-B?s7|XBB1t_t?!CsSxOj?3Y(oL0Zob;ld9yO^GYH^m^vuV!2IO0es&bNsv1dWBj zj+qpzz;vjp79z6#%gX1;}9udd* z$w7H`;!9Suf|-Kf5AqoROb6^tfZ9{Ylp^a{l^-wIy0?~q5qx0IzY8Z$6M{t=(?r$0 zKRHVoKLxSHl}eU4Tuc=QJ8kg_SyCWr-s2`tfBqspp81HJ3ysda=B}_dEfvxmb0Dz zZ%*zX8&;+L;9cUm)uW}~Xr3m*d6(Pc-y9L@8d4z$yafFkvT!7BS5qE4?rpDwp z`g5-tte45QhzPslok1QWmo@cH65Epi>L*sX0K==E<~#6S=7>!7SseM3YrO zy8U(+IKM+E@QB%+b#M9D`u1nPS{QoUFcA7jg87hZ4!c^){KE!*k&u)ot$>pwI!>aR z5H6}`>#}IC_ z)!OLI{1@o^44=FaTXjZVT)qlZFV7~WC%-rRel(gT=GQNc1YEp!cxl}T>cZ~74DFmO zx&lW|4&{hWe3G)qCo!~Xxh64ezMz{a69}r*OwLWrFT8eVNuCnp^%CTh zuq2cNP-{FiYY7jdI=2rAf+$xZxOZb9Pa#k2lpV*_Rp}k@Ks?Q@eJv^&SSP)r(~__; z);9@iUURpD_vs3Rk{%XqhOpeR$n9)?Ep%_GBrm0-Nux4E1LMw$$?Qflj?-G?(hbk6 zbjTD`PT-mQn}RiW@o+xxyDN>a4hG3z#8qyvtF4Lzo@()!&fF)3$y5L%S|PF1ynTt> z^3s7*-B$8=$}MDc3=*APB+?oVTBfl!X3$H0FAfB+T;ZOI6u_wjblSCk*Ct`6+qBj> zGtdcI^%$ENW>jxzXSn!B{^SY66Y(ET(30Eks^Z-p78Bi`N6s!(;*jg;fKED$?G^!`4#wdRabZX=#BW+KcLS(H@`&)xxooUeY4UKWrK9%Z zvjoe6YUT_yt?b!G{2M?jm6_P@$b!D>6G!vSB_A0bYt2hW^{UEKO27W9! z0tQnAk5^TUd9>tgy(~rX79`1$B$^Q%`w9>LRE(0GIC`2`xHjke7|7H|@i3pw#?mb% zY0*Q@M1h@&vly5_7*yxsdz8U+(9={O3@CaB&8&K@l_E8-)<9u|j|<$=eUZ93yymr2 z5oiFGs_Ed8o@}oU>t9M~OW0QuJGNQg-Q6kCabvJQL~sN!P@zYk;a?@MQ`I7>jjHKF zkGF006Y$QU)$B(@obcnz00SfSX6;S0nC2CNz+d=V=Dvs2!PSd5-E1`JpX}R9F9`sq zR97>2A)$cIvfPiO&QTSc5QU(lVXl_zDwX@C*ui4VOo7mc8{I4qdy7Oqlc6@Q&Af&_ z5NP!=z*5$lzw?;lRf`}!i-Z<)C( zcn#boGA~-N@WjYJE%Z{zcq(QjdQwKShZ2If>pA@6iYFETox!KS839}Vdh>2?x|wK? z7(IDA%`1&QfVuToZB^y<5NX9*U1?46-i+qKJA7QCey44a0I00S#__^EyxsNZXEhUR zZ>LZ#Lb~`*qkHaG6OIV@y`k*kHOS;UMocf!QAoOTPjb|2$7$ zLbSE=^l;>`m#OJ-HUw9F=S3lFIa}^ZjRz8u7$cRFb=`E*Ee-rE0IV;wL*dqED+)Oj zwE~nYIQHF9+s4W(gV(^k(Rh1pK(L4XYiwWt$zs%Soo&xUEaHC|Wyd;_ZASrSub0Dh z1{urjlO(V5#-wZ(_<60Mj%w^npWoEVv*rz~6K{S?aE%(+K(p6Hi~<_Mc^6v&HP*Uw zkT=HF2M@2C0uAfRzZtm*N+)sc20*#>2GP^v^+TYw?oLq(*~KwR<5)yTf;_p-4R~fH zwA^+kC*;cL={2M9ZGHI=6}tF8?Mg6-^1GXEoB6$PmBB|{TC!nsH#jg} zJc!xL10>PJE*#GTqDx}@*k{+t>*%5L;1HNrY$7Growm#0hr$Qd>pAF@p{D1_m##QW z$yoHhx}=Irr61)g7W(s)V5lCfn!nJ@%OUr%-Xo--C#fLp6V&}(fW$4dC4j`1>a-az zi$Ed0icA?)7jp!oP)*&DIyY$xbq}JMdVU^(cs5BR41@J@?weusr6PLP>m9a&&rn5X zQ(v5{ZW>~)+_BFpU1uQ738ucz6<(Ry>WMdY@cuFQ(5KpSvIVtP6`(>Lsn_J& zF)vx-Fde(Ts^WzG`WGbqcNYE$@u4cub)&Gi6rWMmt9Lb8_4I)kV>ZVrR-(z(MFDB} z2S6FNTw%VIG^!Z9`Dq$(xpw}UWI~V&?wWg&d+5~)^X?3?H4X=@_jS=V!pjJGexVL) zl~2H$IIny~sd-aKpf_Gv&!URmf1YJ?3zwdy!cZL?Y+=Y`HGPEPMSVZj^{Z=TN#!}a zRTPlhZR&c4z5zinxl;RS<|W;kn@^@T;XP{Wgq-UfLB$grQcS`ov>}_#wl|s zsyUOPNkR8WtQPYAYkdln~84Bnn=JD55-w>9Cc)?U?i-39-!PCYK~`s46;$a;Zb4^|#5u zzCnR?a2{0g-kzB8uaINjHqT0klq9LGqyn2cFLZUL{tF>WTV89-;2Ev;`^iO(%`v-z zHkBFDv#LTcmRz}x^<^e)MP_z#T;-dECm3L z<2TH!bVD1YW^d~$bSU^mTWMP#DZ~8xL1)#kOr^ePnCc=0V~FtMl2r4IpyVy}9YTd& z#|30dlyN|@6yd$=Nm37~MvQroTpgp>jd)WYS5U}MD?xo}w8!$hvq*D4f2zVN+;5PA zzw3XalmCZ%FfWR%b<dLh)B6!N7jPJD0pzS2ti>hVg!!)@Uv>8#3 z>IKD0|3zipk4K-EU!_#|Lq1sCthe;*uoZH4RQqC3pPp5`XyJ<6Q3^CdtAD;-w!jYz z!~yqOcbRM0L*TaLqHiw?J2tTl&pMD~aZ|uU&!<25=DVvlP<4bUXOZx8x;VWvgFw@{l}o?XcUO2_%RwOjKvj)G>(X<|L|Pd9@sxAXj?0O z2w+w*1P*R{nQMfmV5t~PL<{E8yf53!zX^;nDy%CL+}NMxBT=l1XrbM9VU@aEbI-xw zRY(l}ncQ~xpZM4hb_aND%a+I?6ZT`N3#}`p)`yipNof4wly&(KVtnUv{@6MrRB^^R zZhL`h4`PqGTYsf^8a*^B?0IQdHKl+n_W`Z(i8osvZ0l{@`8Ghi<0UtlO@5 zxsWStwAmOAM8-+2Ez@8#F`PbPUO3kLW4gVxnomcKk$w@EwKqc`<@d< zP-3(Aw1FC}u7zCm^m@}a^W6#=tX|+qiA|tCk~hg|xl2$Dq;ZGhJ|_gbCm_3k2qzc> z5=0`{aiU~~RfS&B9kSvka}1}OolvlOTU5?|vFQ=W;*_k{Wx?2#4Dd=AdP*7S*xL~D*swL@ zB$cjktnrn{r%_=n@~r@o0j&M28g&|_&UcJn8GVPN1_+RqEQXA8J8a~^H?1;VaLrs= zBHZ@=bU>S)e3jPSqYJ$lpQ~HjJQRkWR2sj~JI&AO@1k+CeuGG6G~mITpMG13js#`I0e9w)zf*qNv5;3Yw931_jSBu+IeekHI=Tt;Lw7T)4hvpHnoDXc z(?}<{j+Qp8fQVKe=-=>oHf5bkVNY8A2w&^=&aLOW#R3Z9e(h&Ic!w@`0)w*NEFc(1 zJB_Lpmynsh1)3v(IK!;R(O@5ks`yJrO|WQCHqjD@gAG`ToGKQ9_u550LSmE|F^eF{ zd>mg^#`%sQcoZhY%iB7U_7HO^#}f>j5hrs29Nll^^hQ?W+`@toH?nb~d#f zvHQ<6X|(q{OW-qDoj%j#sHFVG3Fg}j8*VX59Ba1eB@5g4@cow7S16j6C2d-y#I|wB z8?p0L10XqZl&}42Nl`}k(JIV3l#f)Zq{AboB;a zW-q*iUUI_&-OmVEPlMuJzG&U(Lm7`iiZ>^pF1ap*O1P2s?-cr$ z1N?08i@S5fa4ofj;~3h*rMg(d*ZbKmbl$>4A*9`woS)74uA#NQe@F)nKRe^G^_+|7 zq(@5Rp)m(2nWYxTO{#Xz)Se~|Jqc8l1Z3SUyV;PS^+$lE=YU0|TovGr#b{;m%Fy_7 zeevk(kp_*VS$vjC<`imb2>5M7a+Y`|)LhDOoEwA7?A8axM$O7j5?YSojZ~-K-U1$N zU`JxjR$T%9&>dLj*s02<_buFkks==c&<|6Dp?~d_;`gWYkaxy?$FWoXSPv`@*BYPt z5#FqUUY;*2+QYx(z!83kh35h^@Doj8leI&&!}J`h5nm70E5e5+2}s|H6>pE?a=5Pb zrL#{%37{rkRMl`>gN8q~#_3iwND&Re3uiW`@g^Sa4$U^yaRpZ`Ik6Z#$)^G?O_Uwlg@FMM{&G37Jk*jrF>2Y+cKnXq=Q+&A}nrXk{v|~%XNPy~u?1-lD zT=UhKIfxfG=DQkVpRFQx)ODzozMQ0QsyP@d)~y+-061tLxJo-k5enX%P0$ej_GE%N)6R zC@k##<|;V<$8UDyZjzHyjSl2KUuzMMww_mwX^oF2-ewfVKgEg`8&x2YG}m92DK#no z?rP;OvfJ@SHo89A2nOXto0x0GiZmkDeW5w!hrR+PV5rVm1!B#deBgQ9r+httzQpt& zDq#}E1{d8V&10o+<@jFMs({o21j~F9c z$YbwzcJeC&j?t1{`&N?OB8Ac0Lbe`-HtdhfR+Yx1p`jRW%DVHocQK1GYHW2JE!{`s z%t%Rc?7|o!6VwblC_$IeCi&*_<7DjXSXxD56zE0TlS*JQ32`N%pwM!yY0Qcg*3)o4 zp(fjPW|xe^G?cU(h!c*7-=1E?IhDlBgP!M8O+*(Bi7 zL8dO4Z6XE-I4eP3klYc@P-TGs$QNiOF>t(7{=28`7s96z=i+75EAN+8(+fjCBQ^`> zkmUAYzI*>(!}g8{x;rc4auB37Mnffp2iNJoBxsAw+>!)I0S{+d2phoOLcb=Xg&ZF) ze0qO$y>PU;gjCT|bs~aLoP>`FlzqQU=jS`mZ^aZFB?J(r7E)*!_t)*|^GJV0e_RpY z!gB{VhckL%eWu`UmU5XIr%i7(pjK)zBuf7>C^4~bCpV=hK_htVInFCpA*n&~WyVOq zU>X+8ig)YO>@Lo8o{Jd|MX~~lxp7uiL;-*rCoUigx~E{SqwP3k9DLrmE8ib?`CZPg zAqhMjIce9L6VlG}N9NoIRD#LxM-W_B(3_ccV}0ih@5>x}cEchsC74)oO9Bn(j!Cma zN_2$R|8Qa{&&d57A8=le6tx(T57JK=+qXnaNj8Yn23CX{24_h=ohPc*cvPY0Vg<9W$&1983iLrw&cwn42_NcvIbe_rWfomyv-^Zo1? zQOE|3uOXJler|pq!h}nE7DzAj-GqZTi1%cF`&O zBtHGXte(yCxy9i2kO<6(u4zd@JcPM$Aypapb|A5`!el-gp3jNlL-sE zBCbVVp6t&8deJ|@`e9k!O^#8mnJF{-V@?-5xNAP=?I@@;wL!#9Abec~(kpK^d1%f+||{ z{XBIh4ks7bGGW(bIG8+&tyutMHx_+rSh){ z2X|fEPR`v&r>P$WqVhQsjw#L;p?lK3kyh|$11ZRSL?0tL}zB0AEs4&MNtf-1{OdX3fN9RhQ@3wW$Dirn^r z%KbP4IjN+Z?pMHL9w|d=n~=LyZ1vXpOIBDo-*>!vF?*>TAz>I%WqUn2!8~YO`J9ym zcR|}5etp&&-_LP;lwo@7R0y1``<#lUf#Kk9F-dmgS1+S>l4hSPoaGB zzjhS}b|P&3_~UVYW<_@}?f6xH=lxUrUh-=#MuP0iQzWG=V5jrBkMfY*g zUOnNpuHzV2mK`3(v{H5S(l(M%ItMc{Cq?5GcaVVq23{9)_sGG>2%vb>D^uy7(`iOc z;wvy?-(E|FvOUnMP07^n&v;CJ7lk}i#_GOgS=Fhbb@7nUNs3{6fCMEBp;)s~(`%~- zoVvAJ^&!S34m+hfa&hKS0^l`)j%iQ#!g^jbHUjEG7_U=}Iqakru)YT3cTPX!Sl(@j zU?FD@{K4509mgj#_u>VrM)?OiWDDW2Wvy0z5D2?dg3Pg9Ze^Tkz(elSQd=5NPg!*e5CsG|%K;OU$G^9P@-YHkq#O`ni=c}z-R^Miozl+-g zU1d`wt?k9*k#zu|aup~jg_mJerg$~WQ)BRc&~5`vFUkC)8)*HbXS!;011Gt|P>BH_ z7LqkJAtQoTVX@do+6vZgAL_BVZ)l-$oaEt0sJpsH*yj=~hj@;`kogF+(q&@@f+=0Q z`D|!%%9FQtI@Vc(uyZsq#r`h-0^$e04i{Kka;gPPL+H-q@VhYY3m;mTN04Xvv4Zo- zb{LA_Gzxp}nH0~6G(2vQ$-Z3RuXP2R8CHF2Os;$_7v_7@+7jo*K1B-D$B=cvp#cyO?YU7%Y7c)LqBF;Zs!_lkaKL_u z3)9iK?j4O-7><*>3O!Et<*c~53z=xTE=K$k#I4>}hlCIM&$BoEaAY#jL2E#*jaZ9pQS8=sQdNa( z`tvbk5*?Bh;C^3=bIDqP{T4NAZzvK7+h0b3V%h*fe>Sb)25dN$YUduDs--lu7r>!= zPys66hiTw9;#e#H$72MZ9i~d+tp?gxuyd9|>lJacURhQ7;b3wUmA?e^e4ppHrIV2w;V<7_p7E&ZsxI-;6hB=R$xdjm4HV#%jdcC_RC>unSVDdlfB z;*X-hp4>EFy;rB5%q<2oIKzIyrXhWB^rV#`s!PCu_`Bp7+=epBF%DaWlovlm{huu^XTg?A*VP-jn%G;JueJPwKo_oo-=SlVX z6#pv4Gj@uJ{D1dUNTBpjr(Lk_Tnfz4C?dBzULG6SG76+HFl1vcH~;_u0003&o_|FV zJm1p++L|2K$7mn8OCy)0>M3Jg3ox)=3~pLX1PS|3H_|O=2=jEs!YlNKd?67dSp1zu z*ki zg0}cV$hFPTwyfwL0%vt?s9ONbK!Y$fjpwr9bBZdTE=C5`T>SDlM8H>6htsGWQx%g*bz94EYf=|vZ+DfyDc-zP z9Bs@2>7jhWSxvmcM z@HW>fI?6bnX(N+2IBlVjMi+SYU%`Maw`jv0r=D3WvqWJZTKuDU2<+JzJpI-*v%LIN zyK|guCxx1$UxHT*czYdKQD_&SKhzPU7*ML5W1Jc?S?uSF+~-zwoZ5Eq@;SpvLQv2S z&b3vc-+3RUoAW_)@>nGb+j@^xsyW8#FZe7#eX)rK6M9*b*hUu?sro5oht2zyz~;}H zY0C7*WcOnp>~K6vWFi|e;`y3`LZ8=P0-@-7ow8C_xpplT;71B z9aPIJg=FoC7o6%5ZJbRkql1DfJE86W0x%hd?0MxEd%1&rKbW^j=cFaWuk{;Fcb1mf z3&YQgM3}Hh2hRzTniDcz*1IPLFZ-Vmx?zv%#1yjvpuq5g*?W(}i1PP!>~K}7=dds2 zrjjCm0XCMGE8IMcR%C+D_6(J9w3&^|jA)gPNF!8RXC?7U;(h+jdqxt{!kM zLiWUezd1DlWcyACj;l^+sCThZbjNnmmbL9RnU0(VUGH)+x_|cjY0s^(YAU^jZI@#y+N0{^Le6_#n&p+h?_L@v!L0qV&O{rfb;B!{5n8M0M!^wgO zfQ^J=T3*Tt;=K}}geo;}V z%4hCJVAeh(SMXh$rmLr4Yo{&Z0?c?o3wC~rID*w4Of*P%TelE~~7I@40{ZFOv%yRcq&*s69W zaMAP#Qd8BVzT`z84*s?+@1^@Ts;2@M8C_ROP!d9*n9EIN{v$Lkiew^?b#o%^Cg&k} zRx_J>3PboFuvdw8L10O@S_Au4ZD4bhCX*9&EMluKI)4_Gq6^%+2%HxU8YVxc3{)p` zyuKF87Iv*$2l!gfX;_hkp)E1G6S)KTkX6ru_jnS- zc_-v_mo;p=nf75seA=?fpAD7H3qLi7>x@dj+~Q8RSkS@2r>j%h#$iGs5j9PxdpW#T|7;)+!)tHLCa%uku}fJr*%2ow54tj$9>I z=HBJjM4nS(hx4FmskhIULckmKol*^pBoE#LbIOnx9&j*|=~JgFxMMP=2u;dI{Kx<@ zrp5AI_z8F*t2xbv0zL!Fe~NwD^)o41w&c#zKSxO@22k0xKi{YL^qZDSg?oAf^Ab$I z#XKawGOF<2QI(ppBG)gNcsL^BZJ9C8p9#x~p8D600KC%|>z-?vLt-J&&J}DH$+<$w zi|4Y^xCzpEH?XPgjzy07^#g{{oD)kFmKIs1ICiGaoE=}5V@(HUGdZIT^?A-zbA>i- zsE}(hs%*>EYQ*idiIo@rRWyhdnFz1Pt^R?ZD27_mlEH5$O}@*?ti4{Du8zq3Y4$}h zV3AKy`EUE^jzN}?33$EE-`7~&n6WMeXRe=P@wfk&HJ__^ENjj3~)g4K4zOp~cF2q{U zhG$WywxZr9A8vRYuV8>P17_=l_>E7rU?qAt+q-ZEGs}3A(r|6r4>LEP@Oc3!(8eF% z&0_762BQe+{9>E*Ac^;hz5N*V7}RomdXm?zLHUSqOV;T z)uorBR(h)_u%cU^q5R4QCpqbLBQ#}EfWpfzPKB+G+oTb&04uW%w))fp#i93NU;mO2 zGA01H%^VGft)yC~z&^;x65So@{iyxLx1*ilwl;Utf>i^UF8{c(yo6MK9ghB0+9{35 zNi`6QPb`c(1pFVl(vAfx^_f!0VksyJV&?pC;udXVLhcw*fvFa2@lh>I+8auy8rH==zSFZ1gF!q^C9Y`uL`+6mxFoC!3BRsQupx%BW zx^*J4$Ne4hHJ?v#Ye6^UZH0L(`mdVO{*ez(Qu@AQmjo<1i|tB@{GFKc6!;spL^FAp z^YUCexss(ii;M>gqcFZ0s0gT*3M{S)I#dDC8uU)w0JRQ~l<%&SHPz{a{~j1}Iy2S( zNm0>uRd}RR@Eft}!`d?J8hH>dQ>mNy?~^DmSoh;GqWjgsKrfhF3W|aoCm5}azg8n7 zE^lP;g3$DY{{-IZP@L0Pvr)2@c60e>nnsIEpZt`Q9ouNC=>=KJho_g|`1xi9R{t9# ziTWG;fU1%ytX=+cuY1h$g>~!Hj4mqx<|rDbhp20n$ZfGqEjyGimtx?w%Quj ztj-40fGw?9r)u-p6W2n)tOTLNlDcV&zEy!_{jyK%v9Uk>rm(k_6{k23tTFIohXx;EcfNC zCY&2I8k=udJ3g!VILjB%pDM6xYtHW6yKpwPi>iG@SuP9Z&QQqFn|}=deM~FTS-l=< z44A=vR66})kD7zD_JV3b)OHfUI-{Xy|1iwCg}1LYAy=&R)8daaSoOtJqI*m=>14qf z+pZ#6P5pa)x)-;(BHB85`oP#6o>eNkoRNTghr28V=Zv&^^(KbPpIM7O4nEd$bj*_E zc>9-df^t3N7?Sp>Y)ZJs{`{|LxhDmSV@!&kZec^A5>D-1DWliM)}LH8rryNScj?A?7}ELK8xkwmcNmC&}RF3J^LH3Q5##S zw9R_l$%fC*_pa{Pi(7+u`ED0`iyrR~WX!EweEXz$95gaNgY9KGv~su0`!2J`a$$`F zKe|v*GA1g|Mp>5zoj_2&yuAbkfd*QT|7e$=91WwjeVtZQh^!r_??F=pk~iZF88w{# z?WM+d33h_=ChLL^&6%u5^m28*O%py_X}%C1=A3P>G2ruC%>z@&b3(v#1y)FC@eoy_ z-}%NwB1uINVQVs*J7}XhIBMyR>=Sg9mjHpvFt}s9iz+|mG7;z~Dk2@iU8;;jF$0T+ zT)H;0oDK&tG^MqPw2gL%<*NxV1Fly^tE0|k6iU^Ka+R}XbP<~kG4fXGi(xJl+kga1 z=-X_CJ>AEj}XA-;R@?c_}c+zA>3(YwnZ9Vu!zvdwwkL%v9x^sOv;v9hN!_^^uanN7)>V7Y6B^%?0F!um2?l_PT@U2pzN*ddAjfzMj6`OhWfu zvC72nsxatbgp~+_lJr9V`+k=P?jj3N*A|i0qwk1@(Z~M1@=*Y}^!-FFF?l+^4y*1h zQPXB1#}{eMT>qb3NUZ$aWAfHp?k)Og+gn1prjY$jj*2i|VqcHWu+?#C>c&=^_fs-| zv6f8>tpbrzil60#U+QM1BP20;6QkyN@R9(8Mr^Y#>(K7l2*u@ykUG0`SV>vThJrip zV00qKI854~(D@5OEBI&qf#2T@{6`4|$qbvL33(1dzzmO)w{n&uE>aSb8T6NF6v93u zZ-gnr0JB)}H_)HDC6O46c)Yahzo~GcCWjBlu#+kdP+Bc8DS|2=K5Os_z81;llRq;G##xYJ{(aU z4YTl_S&D>T*|zS=N#6TC-MM9c((wJP-wUSRnVVXRf*`}owNbjn+6^u2kdgvo(PjRm zqa+=sLKq6nSIv-IgGvJt`(PLo!Iw>pBd8@Qp==68C|P}AHvoaV za}Y^i=I?aSzqIP$mB;9i`@nt?)LN&MD)6!m`71xAyL7x=p|$dA87+;yBA)kdnh;5B zm_DjQ*1!IT(6cTKpjx$szL6>)%3QJ#WWKV=eXYU9xpJ`QW12S*@aNNNPR!O zjaX20WTo)sIG#@99CiM7J?awYluZ*|Ga?F`Gm;W2-PAtS zum_uUA)`kD+B=pV8)8UO^TR3%k{2FyH1RRiv5>jDA~&PPGS@cWl;C9l`F9GpDxtuQmhhQkAb*<@?OhL=6nnU zYoWH4tdxMkYnB;v_LSI!@OhCbLGqrFxG(Q7p8g!K_qt&9aUZEacjm}H+PGA<*@mL8 zYi<@TL4mSRz{8}E-q>=3Hcm>wNiWDh-DO+`Mkzmb!eXhJ)?F)6tHG15)XTUblZWcO z5)~k@^rBG3sEjf zGQfSQFt((n-@RNaX=qN139;EWf?YJy9;(xE2go-A3G490HT#)>X`xn_xdwwU!C9!v z10|ZUnXt|p9rH{_us!>(!|i|d#L-L6-!J^{)`pmAFH#_2!{E%Ov|ZpD^Xu*-s=tBo z!A5+Ys-vVN34}y7!f`}yWZ+|4Kw{kqhnny$sJLwtY9-y+ws9sFr4h7RfSa6*xL?G- zW1Jg~|7)KvecN|bDhhlWcv9qz(ISTk+{iXX+uoqbs#OUhahgJ9BiHQo7>|#qp(OUO zIWbfxlh9%(c6&4zInNi>M5u5s`0G67Hy+Y)wPn(nFhA`@c7)bKiX1H^Evs%3b8%Gw zUU1DGRSl#XYux1%XPaa)zyezAv#F&#$FrJM)Ofx}CBm5WR~~0TDLwf~^7#-BT2E1m z!JGIX0v?vHvi|Y}GF9QIr0eOrNH!+@0mcc?Fy$qUcOMSanG|s#Ku1#(o+VKB{X*HCPf`az%DJ~1lTiB;#?m(jJaw`kqwr#vP}m0zmGfq zvoqJdI@?jwgX~E;BD_6R+W5@1jEpWs;Q(pfgG7P^MfrC{PnD7i5ST$8KquOs0K3Op z-hwYd5NM`0+DQYxIzIZ1(DfS7zWd(B#;ffXOCN6Ld+QZ#?-vBJH)6t-XCMW}Cb-wr zf>TNW4)PejctGKp@MD%iBx@+jK`eGSQz4+@S&7g35_hM4`X1imCCeY%rMAPT*hG0( z)KP|5uLe;>Y821e?gil#a$KUxAj=6^9+ex*Xc)#+;}I$8+UNT$DF#c~oG9+%>SPiv z!9zoxhDl56YE({Hn-H8loSKGkdwyb%AZXY;Blo;wpSZTD@(|pTYS;3OL|qHY-A-|6 zaC^KNCokGd^@TEJHtDwOf98&V`%iNLk#U-hp9x2?g-;mmNG56%g>gU@xI-Rl9KpH1 z37(wdcOCJ`#a}UAIN&)K!}bs!>qY65EDvnG?cF%ascEF(T8{4z|227ibM_uOCK7PrT+KO&8EIRz-B7wpZzcCMYRaKQZqzuM(6V`NgSjdij>FiU5K@W9=; zg{d9#6d+5S&|UBwm3*(jc-E_Au50^}jyaS8STQ!$>62j*TG5wxVo;vceCk3$=WVnU-R?OEe>Tg8I?BQCB(XM8usxkzN<6{Qjt*gq%o2L zlmrNB9qUz8Ua-d@V4=tM9? zvIz@XxBP?z$OOOuLjmTygQX{LUFAx#M|>g?sHF8@z-HgMxBWifzAHtOPcgrTb{SS3 zj!W0w$<(BEBg@A}qXut$7J&(K1MYiJy{?JFbC8&Cz~tSe(Q~ij!S12(CH5@JzAXB& z`nV8KQMzC4SCH__)t~Jk8mUcph#Hk5?pT#BRjK;Gsb` zzYgYARr@P`REjuJ-?wv-;?T9IvxtMHNTPCc2?5a>u{AlP|Kgv(uFNxtEDzlX{Dxk9 z3ge^sOX_gg#XYmyAbVaLy()L$tEJ$DA)PekU5l?DkS1D(t>@`}3uGbqv~5;;74Hoh zXS$pnh!tAHbl68jUc-b*bTqbYEFmBPXWcsa zo^o17)(9~$AvoO|Ep-L1bdE=9nQu~zJZ3Pzto4l3&quxc6I7?19K5HuV4MayB2Or&B;;Tdkv1^l}QQ zkpV9cS99&MKE(kPuow6$n|yW>W6?4)y4fWPLxU}`hFlTt6lgxa)71PS4lCj3j~=*B ziw>`A`n$ny`dJ4910gEYJL`1XeW5E6=1HJc;kK@+Re(X|Y#C=snqa~qR0~;Q z$j>KM$FDpPmsD1>d3hz({M9fn1Jnd=Jgk?#ZWk(C@ZMQh? z68UH$As5MftNsc=+HEewP-wf@3QF4-h}&6&HvrcNOi&;N`y^+RW>3bcML`jnKMRv@ zBUtWm-G{>oW@djvNOr{|NWO44)i0%!)RGMZ9_-mdP=_A9oVoS2-PNPZOa-P*HhFkUtu*VKbq%#aipWC!>GD$h=QjqJ>vm>2dDxO{Tq`Y6&dvI~9{zI8 z&EAV0^pWHLg=+_475{=pA-=Y-g21|p9ZqG`Vsr*Mm!`so)}z%8(5H?DbyOs-X>Vut zQk*Qbs^~t)|7G$mGPFh1C9gPkI{e>Ta02*t5h2cvVPzMW4Vn)IX+v&ww5K7ARBx1{L0p(kS4V$zl65RFKaM<_ zjqxg(&>!OZ+rJHY1lz;|w=x5<)i-k4@BI9|rfS~8m@&omld~%_)cgdt+nKIiGtot4 ziLCBgjl7ViR(v77k{R!m7HO)Vz1uJqx=(*1TSjeDl7Cm`y$0b7E$>mYFoAa&RFr!r z-P-mpjftHBehe zv?0pT=i|(O{v_SDmPA6|h4{6@cv;;D6Q|$Yl;H1*E}(db(&RX;-uqNRNu8Q3hmV#= zU*dRcY5oKiP~#Xe?k`1>*=zXMFg^Y0D|IMuw}dAh<0x8TZ9s-^$-e8*GIh)yvV8(8 z_`zvkl{|pZTtJ7T)#Z0pUTTr&E9pGCWkx!IqMf{@Yyv z?#J-S)QDw6?t?HTL(D`55ZSET#xng&8iDl>4y$jq+2)AE6Jv}oVqHmJ{@~cP}taSHy&_W)0131K3IY`t_Er; z4KrKxXhI|f6cYl92C3%Em+`@eEndE2F6IwY9L(Mr!SRhZe@pr6r}^P@Y+p-ou0D3m z<>@d86u7{=qY0CbFFP`4Q$WA0_CFS+8`2l)0P+R2v>Biyno}XlwX_90rKn?@&HmhM|m=b zXhu_pwli*Hq$N~deMT5Ux}y-*n$m0ZqBavq2-@((iqc^9G)zZ$5n9tv*$G0Fu;qdl z%s8N8!eUL0V~cFo7TWyGX5OvSj1fQyF~KyB+B#tHN4q9{F=i*h+mOu&8PA8ZC8yIj zue&w>|HCl5zZm`r`)x2Zb$7d-M#t+cgcY=!zdF3oeO*oC4jg-Je=}e6#hQUzv`)~V zJ?Ti#Z;@i}?K;yL)p_F5qP8W)#0!7^TsF$7PAIFi^Bl2(y97$Fgx@T7*;s!yvuu&X z54bUcF9fQQ-08MZ2;)OTgjj>CxzJ1c zFxpbx!{lJU4b5SjnxNM5ePyan=%1F-m>#q)!Ua$L<%Nc#f)TLm3+mzw{{A>7T2U!Mnbj+|Ey z-W~W}rEHrp=1yVo8LpJBUs9m889zYm-!XubD6J$OYmaYB{{k{|>9@GXpt3O2K<#CV|_9Jak<^2|9n~`RfC1>;yOvrN^~(StptlnoXgr>D;5Ru% zxd@Ro8$vD_I8BpZqr5dR3oPw5C`tH}0&$R(hfUK$E~CRt0(Qe*CXO3o+qee7c zc$kUnT->*v8_BTCo+D8is3xNNJ$HXX!D{aJRL&3@$H3)lRz;}y^Z$J4 zB7h19J8!5PP>?%9e3vcYf^BK=1j@`1E3q{!FR?3G;p$WG{fx2rcqq1T+39#)BLM6) zdILavn|Q?^QMzud#%|YU=3HJ?3ZmitzWr_MwO5HJq>-%$rDPp;hew%EhF8|ZqnO7n zNab*^N!qb30>td-X^{0va*aW^?K5YFysm(^Y{_}`XT=N_z2FDN!O4=={2E@`mRkNv zxu+07^(>5qoIx+=OUj_;%o!&DlO(s}rP90v%_^Q};RCRFtf)9ya?PZgd;|cin^Url zZ)+8W!SmJfGvV@ zEjy7H#D6;u!LXG-t@5-zLIyTuCcUBa225bKb=|b4U~ek+7Q@C;_~RGjD6;{anWcog zW*1PF=yt3ye4*++A$aUKEzz&mMbiZ@78hy#y}=8fOHa0^+)+SJjSC2;c%TPyba*~` zKI&^kjmt1)6(mj#ry%_5DS)q3F!w#8D&;zs?@5!N$O z-$Umhn5n4+qC_uoz=GZ~bd-R{Y~txZ40U?L75GsXw~kI`-0bRrPy7ObYTw8YV2lo8 zR*n;Rh3C7bgJ(J@cWK}J4opF0w;{9+Yr!SW=SRSg+(@Zsg-~_vc)(|UE;f1TyQ4Y; zgZfx_w_q{}Ii9-|`(P-m%gA8vGr+ja|Fd~hlnxE)h6^`_-4=nExi@O?k{so|O;(VtpXsQC_op?7F&}T0teqlxo>_1^1PsyM z>#1(X$_JibP8ZtBO<%PN>;kI?U^QXxVQYk~iCX?b`q+bw1vc zf@xKgFW@l{-CqAQWQnnw{t$lVi57{#x5?0UDc8$zH3UGleKd4Jdm!@p@lt#(`>g}DF^&CH~ z)>C_p6j+G=0-4j?lmCzDgm!Rb+kNr?o7O*~CW}D+H zR)Cp%X9H(f^jFQQK_FJ;3r2y$b$dk|)}c*?|6nign-qV96x<~m5?ds5{D0`-14g`> z6Xqb^hnO`VjJ!j@NP=@?44WXuLcjNVZCw!mK5vHkP?=MjzHE~2yaOElPl>aQ0f*w+ zM}8?9_^-*ZGf7%%xK=LoQj^3cUEl)YU+?hFL%R{LQ?Zb>GQE>)#q!a?|Hw+x6U*k- z{XwSRkOHFdex5p?1bF&-z`$ys=rrYi^?Aha!~#vHQN{FB3TSzCBPtudy)YPvz1H>H z5Yus4Fa)l*oj>_M=|Pt`fRX?0MmuE%E0R{wMXcoxvXj%n$_5O|vWTOa=#l7*WJFmKuyhb> zon-`@tQrySAy)MvLHmka5F1!^nb!qEhk7}UIT~?}-YBmzFds6)%#47Tk!e54OB$~LeI?Wr0j+-)sndLTd6XE*Q#b95UeH^Fx zfQO_qQE#+{VTA?Y8vY(8-0|>~vi_?8$7ES@J>I5#nxVkCA94U2!HoalzeUI;nYJ-% ze=iwx7UBZgMo{ld>XRm3(LahNXLL%jmo^jY^R5_;o_g7!#QX#!1sp`%bskx-k|S}= zMUZlQz$h?YAldB5y9pNn73W+MDyNO92g}3imu7IWDYPwGs(mTS`@0&}>K9q#sy(V= zpx9xqo&XT>x0{;?8)H^-Qe1}|)pYvAz2?GgkN_SK>^mgwb85Nhg4})yW&^lQFn$_i z#IZe`tTGoy?I3T+VnCdSnI%fS@@aD;+X2LPoz*MkWW7ncPnn4!Je z3R7GJVDMztTEw-|8vb2%*PX1l(gfJ%TP+y#RI9Vk2|}x}W48Ge4ZCSfY6<{P3tYJ? zgw3kc*u=0^#LL+ZoDMvpsesEgCZD7uo|=}6n^v;XN)+a{I@f1k+1n61 z56nxUMxJ^i%BdI-?>(IwYNvoT?oApsg~V_3h$nwE2D1otP4Qst0$}_VPm+i%>t?Y& z4Ey!AIBd>{9%0DT6;dqmau4;YKWBFAVc@J*Qf1}5*-R_fd`K>+;`vy!pG62kcs-f@$BW5?@SB-P}*%UFo5Kr|RzmP7x8 z9gN!byJ|a2F$5x zdYMeU>!bqYj=>FhV$7zcTmbBvaX8xhH;o*dNv1eJe;qMgDz0#dIT^zo3t!8^#^fdI zjy9JQF8**!d%zuMepzzm;Nesvu%Ahsjql4U`J0jYJ_IR*>Sd%WyfEFvmeO;n`E75; z9vzNqac#}DEygdLdS95rl&Z9ZNV$-J&5p-Zk}X7VH63R}tH^VX?>&Xf`3@P%@Pc~6 zsR*V9f-jD%dx!6FB%5<-Hx4K|_Q^FvH{l8hK)d7a9{3#hN?v$7Ud(AUXCg>0IKHw3 zw;1=HL_R8Y>~EbIGB&=MWga9{9u0ru82{XwQ%V{_KGCai%kOr5?q>#gS7g&d46`L1 zlC%x&rlQZ^4>v#|Th|pa7EGE_5xXyK)q5&A%1eMy%X$w(4~69M!m`NG$ZAO7aPX1R z1EEDkb$KGy#laoABKSgB2+}-emm0+TnD0ov9#E6zFJroaU3 zO8&)4x(-ql97}6}dxBZ}&Gcf=`NF00(?FDx?#stVqE-`KGA4N~rawL5nX#-{aR~(u zb$XJqHHSkrhqB2p_6Xc@S|G+@O1&Xf(OxolO6e9K!B$2%u{MMT4?0FT`tjd!$#z%e z{vuX@+1^%r_aKC|D@6k-;H+>Uy&&u?p4_R>cK%(JZCdq=BgzsDOQni@NNdv9ponEabZxyMHo;-(7F0PbCZd_KQ{F0BY;-DE%Y)@jjD7iM zH3*<x~#V z1tJ?7waELww>fT@SqK~N6B%K&MS&V~S2Bv=N4oTG+}lsUWr%T1eF=EC`l-XTNvitx z9F~hk#$doHqAMz1(bD_tI+)~e6JY^34giPlU*?`J7oj~xD?^@j#k!6~nC zZis6D$fInwd-U@<7-T8fW#q+CX}Gryx7O;1sgy#0NH|U$gaaqhK6W~4_iV)Ter>4) z|KJ`*DGd&58bo-S@Ew}qWHBR~qqW4Byi+qtFa_<1yCipEwh_346GWD zQV_mc&pkT=cVC?$tNklmMBfYa;>T``&lJHy%;YTf?QWooD_WQ!oN3qE_-cJffQQ4u zq=qeA!*sr`ax(GX%D027ox+u-|I|*g+U7g5B~yv~ab@}=K5x19lvn@%n_3FgHXdHO zsW#RZs}-{9v9In`U=<2sc&L@KY9BwrtG(aQfwBu>R^-GuU?OZ_^@iT#WGw68NS-i0 zmwtApW(EG5M(DNUj7m;_u^v>t=b%vfdHt5Kq=w777Ewcp6N>`{>j4 zuzALUUKkdwWsIzmX&|HVL6B(AX{3&@>c}2mdKA>p8kUdnMVF*T!6!_M7--@7zyshC zrX4n&;k0$P_n*!Jvq%TNmbr^K!k~aeLRHmDx8KS;kCb;v8Cf7APLreN!qwv#JC4!0 z#3?`LqxiaEAZN>ByV19F->-}+WS8VDgGm)Y%ySw;j9A4H{_HTj6tzpOg~tzh%L~hI zg<1Go35F9Q+>06k^3O5Qf3FAbcmYs&y11q*zv^OlPs=^rna)R&YeIamS|6}fm9O-8 zVr*14eyJJ9Et!34bV8z$C%DhrpA}CCLG8@nPG95RO9`|mB(O^6yw-M-qv>dv^TxEhp{W}3)_g7gTg3OwFF){J;RT+9!7EC zxU^|*Ox!~LQL=hIsKH_Teh%(X(0DgXfS4q50#s^KU)ah|IVc{(xt&{QN6WvhA)QnU4gQ(=CR$i#ONd-H6NH%=ENau=H{0K!_U`K1q_M7B5UdyC9k!! zj3`&dS2bo#9}V2}aE1p_tP(a<(6teT zEcER0lLkE*jOL$i4|UbW9iaUo7TRr})sFddJOVAa=F6|Pta0|6dXcG-sScFsNCo-9 zyrIN6lM)?G0N`tTY~<$?lfBhpS>d(6jCfLl*wHO>eTfD3-a-QLf*`*RYsFKL_v8t6 z3`?5yNKIMvTyPeoAw&;zuzc>Ff;V;|N*B8l)7cE!R@}u75pH?92j~EVwQXPkdU-)8 z7|U~aOp4Ik^^;;K-5`^i(u|VDHJ5ry1}rg)#f$Ralx>aU{MZkEj{K4?mj%iTgJ1Af zujyTzb(;^@an&6=Ilr~IZxwYj0Y7&W`;&EBL^8=SLkM0=fbsu6Wi?{ra`~UyWuE)O zoUA@{>JCuAZDPO(sv;G)-a6o6v=`gcEgp$xJ|L6?ls4$EJ#p!-^$d|BI?P7@(F7`pIng z3Opipb>#Z}oUMK?UHFCzDZSLAUW~erpzH6&Fp$hc>e;3`Zi4?n78Ka$)_xjQ0;`c( zDzU7KD>~}+X2|e=;e~vEJ&hB~(-6|mhsb=ZAioluC)H|5VG4$WbPLIg_gW4u4G%aX zb5i?{*DELOju1Y35nwK8-9P;o71i-eno!&7u5XD$a7Yj!*D*RR=|&4z2c+?KZc10N z=oz*20q3$Dd4)1Ch;-oB6!#WmeJR04r37Ot;pWIyf>jI}T!@1ktMBvGfUYMA)Vuac zTh*S!uT4XIyfB3%6UPTOX7@sqlK$M8O-ms1@#{0m0S7#-mGK&%^gO0;8@)}bxmMpC zKMbL-MmW?yk8W{Iasc3-rxbLOMr}T3DhK3Nvu>|?_TkmPE<4Z|$$E0hH{>4rL1_x{ zRNCMyCz&q#{c6~WUa1eh9?zAyIM*;yc|b5QWMlmt00000009A>o^(Wi&|eSkB?mxj zRafebJMN&nU0yIRO)S`ZI6TIhPp!gP{HbR**2n#dKQ=Ih4`=^xK+x+any3+~CKnl@YyNM6SPfm!PDY{F^{ z;4%AjVqSpQfhxYf4qZm;MbQJnnxM2Jjk6K6zUvuDsi}4;(z+x3ZaCA}Hf30Zr0gr*+LB zyJURld@E8T=6V-QmB!S0i{o0#m7rVB_bf?+Y(-kpzM{<5MS+Qv5}Dp_CBBe3bl10d z&@3Kily!cV_F!Arr`n-|Y6QjSe{eq6Y8#Am#p>VMkynM>=_=eT1q3J_%#RPn+xL2=!Qo@I_uipAX zoggM90oFikOwKhAL3ew{o4eRe;^>u0~C^3{|3}KEd zs9L?1WALmSC_4hP5dfRA2l^toerbMzG!N;v6b(^gONO7-x~IB6H=*;g!84w=+<@{c zab0x!uosv-Ns>NG*+Jan$&W&}b&mwea|3_5v&{Qn^SHX=ZAS3pG+WhcXOIH9?Om+C z*{c&vySuFy=z#j1ksd1IDKMj{6ErrIV0z*a1(4wW*F??JEs&FH8R1=90cO}@s+IflHeT{S`s)SnqQV0)&N@^4P|t!7`)=yijLC4y5u1EQ_1z3RUR)tb zjfx68@s@QdsA;8JJ!Kr{m<fwvE$Ic zo9Mig%_w^BMr8tnZq9rqqH_&)bI$=9Ux3V2C@vLJ?yll-qb+ESi4a%KSUwK^2q@@< zirMSY;ozCu;a<85A^0X=&}d71XhnnqC`GTAlA-Zt$a)=J;W9b{;A3i`K0EYj!Y zq7zbqqS*V6E3}>o2)De+4MQ)&THsv=1CN*WO@a|Hg*k@~;gatQ#u){y%TMiMbtSOv zgG0j(*{y8zpN{Zz8?=y(C=JCb`IL^;%-0?_r{UnExP{#wXL^QW#QeGo+&fWxbFb=^ zpw(b65!yNBjsRP+BU@7ab4;1TTO1g3W5=&o-K1>q$FJo*WmV5Zt%Z zOy|kxcosMZCOG4E@h~szR6#y_8+}HFnUb*A$UI8;Ru0Cu|NpTiXN?8hGN{azAFU3O zN+2h(QM+Db7Lpu!bt4z|t21nzaE)7Y#O=np%FiQoQA`PH?+!(|y~*A@cz%hB^IP82 zCmU$UV_*^;#9~Ah)GQSOd9>$JPhQrP<*76UJ57r40|1E^vSb5m=xn@d8u_#S)K(@m zCd;}oc(<)?vtS)TE&H4 zLnngN%Rq(Zg0k3;n7@@v++AaHObV(zaN}fAoJ%*y&t(Q8>lr{dfKe_Kor<77yroAd zGoFyt!E^TC?a$d(XTcZ0sl~G3v24Lj!f+N!(y>n=c6ksB(~n(~T^6 zUrMiOXBSB!yZ52ndjIvBIHvvG2nX=$6u~00axBLzwy|bA*&e4Zu6j0yV-Xc08*O*J zc8l~{7D;K(efD{3KPp#js`DT#?o@R=I8Tf^RvHyOdZ7;R=?PTe{%gv$A);IIMC1iM zMaHd}E9*nd?-uC{eE2e6ArKDD2P8b&Rk2Bee6-Cp2)%*gFlWBc9*1SF+N&3LTvZ7B zu=sWOh%~N3N|v@`&Cr9=PVI-8eDYQz<*BxrLTx}Q5G>+OUA(voYyj#XHvf@vp5vYB zC2f9Aqw)cRSgTh`03XbL`imEJ=GjILX(yaBqA@kN$*g@yt_YT#@fS~N*C(K`9uiqT z5xbT$za||j4Q%hz`Gsl>*naX6EK~=dPS6>gUq|kYAE)>c)C1TzgbI-WZVMIZrTzZG zKO$_Metm}WwLW9kvuqX&jv$G6j>M^{3Q&B&E4^ju9?RR^{wzK4XLdbPq;I2dQRl

    0QIR1<%-Mq9yW`%_8M6?i`xtt_kXSMFHz z7M?$y=@2Sh9edDPKXzQ3x?>NDStu$}D?mM&{}j2HM=TSL2Ce*@lm@HohFWvz{OjI> znM$Qj25nvtxd)Q|-E95t)OL%WvwrS$uanmd;yWiKl*XUc0y)DLbx9UXHZNFU#oJa* zDP+f;j6GxDQRa;NzR;?>M8|v`;ph?cGnjA^?t_GQG6LM2j62H#tV(;uQ~>M4>G^hHq(S@UxW-hT`u9YnJ}@k|R8d*~>jy=d8=!HIljJK=$t zM^QTMs_<|8V)Oveg!lqKjl4Io#7>=~&Nt^`6%&M$2RX#L?P^-$xwBGIOP-BIUO14KdwA7@4N!YaAx z^8kTtd2w(2UIx;NW|l^OM6?qLkXnh^PASF$Qhx{rTfe7uRAp&`(g89+|wv*|xco3}2Qq8!|d2zD)l5wnZ|AcqL!zH#&c8vFU zSC?n6?e28vwF;pAkhSNP)Ih>q$V%$=g5nBPm7tj-ZGa1V!NEi)7464iRTb5kOV?Vz zNP?ir*KFk7ZfKVN0aB}mZuKsNO|jzwdLisQ%sbj(E1=vb=~z&c;?O7~* zr);qPmEh|960_8Swnez-?0QS{K*m(bLy2L~=1JgGb4})CgI#Zq$IoFlMrt4Vf3~Y^ z;d&ghbu-0uo7(uXSH1% zY5qoYKo>uVA%N@X2F$O4l*cmYvCP6zG6!@EEy9~W(_juZp!Tkj;jjsHp_^@&KoY2X zfK`HoNHo#q!&?mH z?>7&d_lNDELD`6);h(!;x`&{SA4Sq__MAG9s*J06-Ll7L1jK6m%0V*bCP~06+Z3o3 z5oXoR7lNpwfVZ#+jloqSyi#5dD>!A_$x%j?ymrn3^J3}{??-6bitW&!iX;DwA3yvP zwf>uPUiPc6J=D#z>C%1!6JVRT!%?tAwQ|P-g*#k@hO7M5ujQnN8r^oK0c`F%)DEes z%`HWNRGBW9?cbHF`@A9%g`n#7d9;#sdJTO8urUhAr^M` zXWwrW{$W$B70|yywzc9|mng5fbzf$73kr*ub)7&y=Y2JQU0v`*fK4N2;Uq;YMM@-{ zHy}?a&vW;>WPPu>35FZL&*m;vo$`4ONiH>xcUWfp6rLJ#q9`p?S=(n&XFmoJ60w%! zpiubaSs1e6B;up2z5p$ZxV<_(ki~>RJt*Qq*c~ zrufG4-G9@~P7$gFf%7&wCm6xbgqg2D2A-oK0m@Mu0vnI$GoPgS|LZY1m?{{dOOThw z5g<{^Na-8@o6GTC&~~cRce8&_t>^MsN5usf8U*c$48XCmJmW(Pk{HsP=FnC7lXX)#lC2otKSu+x^#}>A56PuBiNI|GIyOfwS zgq)tVfbbV{WIL=9NI_-N7jqYuA%i67J- z((h7|f%O2BjcdzA8Ncs%e!#|o8B1*ZQ9(2cVH7Efx3F@&oY16$B1sd{ZJ-5?vWxYGkDjRvXDs7Y``58FNI2Qyin)2CDTk{g*_52&h%u96>-TBo zyFbb}IBq7XnKqq>hp5wVmNz!EveyI*n?hJ%*6&P6_LNO@Lv^L z5gXz$kMVV5qjYaJneI~oNBk}w=_53CWLmhddN0P)y^M^j>4jZF`aIgsy?%+BZ|rZ+ z5KW`#XY|rn!_&81$_T>6CeTUI$l2z>yFH88ep_=43;YId<@@5(3U(2n4H_YYrydh zUnnvnA{C(u4wL$CBGR08wGS2akLv=PV(O*bRN1kN#IZJrs`dyikqsiOY4vZK{c$}> z!Y-5k9U3D@N*Qu?;Jr^sZhLCAwVX(MlE;f-jdEFr;_Fkvt$u1%3A~U}oBJ2pc5b7* zk)!cmeBRyo$0=WrE?MgqWdT_aWNL4P=SC&h2PjvcEwcjR3pTes_fGx;s0F$Aq zCSE<<}G%S%%G(LAe?_l8ynFtuB`~p>xF2{0MrC zYl9cq`PA{9dlN=Snf`@A#6B>1KHRu7h2QfX$PoLOJ+E6GTzLk7Ufo%uQq>uT(Ai=P z<(2s%WDG5nti`ghiJ!Qljbm{~VOucDg+P1e`RJaMBOaS`Pozhrp7TO(GLr4_@qF#w zGZ<}a{=FLf221wN;C|_F(6egN7IbMiv4fXw&x_;-+G|dKC5H88XtWbrs@pHxy3z84 z0lJ|*}Y`DW;6F37+FYzXyI^(4+gp*q=V#DaQl0n3V@(AqE z;NsbLHV~nJ{UlMof*4>?`!he|piKT=vC`NRNC46}DoAbN|9#_sK#RG6~)b9}S+#O%4-|SNZ(X9F5G|YGbC&OI5cZE<$eQQCv-=T~i zBUS8X$qNl0HA(GRb1YF{54)MkxsA5geH+*0J$~)8Q_BhP6foA#rym6BuEP{2DsR6< zkg>(2nJTB48QOxMaRE;C^8^|pbUcN-PQMwzOZ&CiX<)J!*)E7|vMVUgGAV%uD_P`r z@2JiiP(hgpYCFP^kZaS$+-aA;pl=RL39SGDi>i_Dx0u0lI$$ar(X=c2YFA4~R6)Pj=f-PhV>zovCbD0g5*7Q64~vtV0|~$2hsw`2Z=P=cU`HAODej za1D1>=vtRp+*Y(^Ps131Jk}` zOkAd4_NMoawFN!Byb+B+Hw86*pt?;afooD&HKo^L#+xI^9Q)4crOWSuKVFA_@&Cio zw0pNjo&yQs1Tq+l{^pZ)9Ku_eONsqIf13m$Tko*>ZOY6$dNjc!$Ki-rWK_tkA7E*p z_(p)gH&qC~+ab?(tzc(0XAZ)o>Yx_}vGd*3@|gd;mn)}eHmCEN6`#eDA=AR z>I&{Csyksf>r}MYJ9rh)E^)qV(68)N!OtxwG^{5JSJijt!LvTJCCg9Z(lxmn(pQBl z!uSqkb_&x*Vy3J#17FHBWo7o!ghct>0F0-79jy0`bDru9s9A{QdNN}G)Q!-Iwc9ev zfyP^Ldq_lo43~mMmq5W%Sd9>yWnRf}DVo>^7LEQufB93M_WV7w%uGo!13#LC^Zse8 z7uRtm+HCkd9GU<~?2iuiv72s+KoLx3LJpM5tQOafV`prIS^i1eo9T=6IO|kjubzqul8xua-(@zxrz}+x=N%imWJCky zew2Wc?-Vy_T(2zg{(b&m6pCvTKwr~v^x_1U^FzBHy;qD>bWxV_=fz>d&G;YiPI+e2N@?f|Hvu``e?lrt>C9hik=`tx_wfR&H>=OtBwO z$K55URx>>UV?u0@EoyDT1+T$zJESPS%e@j>+^xuCTX z8ToCWi|<6o+H12C(!+G(V(sxoDljyP>-TLpN5?3)*kJMUjLAV*Dj zxtj}x;ATSr|Gpn5Dyq3^q6;Q+w(n?{Nvvci^v`tqW>2^Jep9t6vf})W0dfbDU4(z`0OvEdef$kpi%F|6&hydW$(oS^7S#`%^4o$5 zwv+Mq1&^pL^}bbAv_?3`rkppd7?56BD<%4s?CUFZViG$0Bb(d9ykW6&Y=Vw(>k{`H zfX3mO(6P;SNRPoy=2D(ryncD@@fV-JM@(m8p7%`KRd&MXr)kwtlfaPtVrX+f&{(}v zEnEr?zpTSs>=>pXk^iy^R@Q0gD-=~}ER%edk@>pny-$-cu=?vIG%iWZGQGT4Et4v$nJ{@0n zdidL{_L~zy;4*zBx=6daZLw|qEA1N9>vO)|IJrVib`x*6wDMj11oS;q%EmP6aEWz5 z0TOyojh35{yE^@Bo=EcMn-a(r&-O@CLnlktyIg|S!!?2!LAYS!iZv4FNP&h8FW}yR z4inp{X;%!mn=FO7Amx`f;3JO;!KbavL^Qh{C0weWLJIUA&p*%YxX)?}--+6Hd@*a^ zs?Cy$SJ-XpkjKo9(BsN5_cu@o;w=7F;PCd54*KtK(zYsgfYqiX$UA9?W3nxkzd%b} zZF$_Rp$L9vbiX5r5^P#UVh@kSD*u@TcU-RDlB&=hn)=@bmneM#?>?wvk)x#XXy2yb z5oWF7HLE9VlDL-TIi*T(s2yf0?YO&q9<{$O@7+}Y^|R}`c=e)gg9KAQJaaAv4I)E+ zm>PI$xzjC7f>CLvDv`p?+T1SyFfe3ehaCU_000010iJ+FNB++%gI<2+Dk$QB7J`l9 zf@$Bm%xF=b&d1fvYu^A(+#eB+Q~59p|p|2$Rhc6(8~R+;jN_40)AdlcIFgpg5qcY4WxFDt&SY z;<=k+(vct$EQoG?D%_l-%cnTJcDXzKA?d1OA%f=@SYQ{wpY`5BQ-TC;uvP878~Xe= zi&r=gX(8VF-xD8&{&ZpY;NlnTzF^C6l;&oRx0%6wKi! zI;~;UUk6S4XC}akPJj=WBy^wW?zw6H&rV{Wk;s(I?XNgwX1&%H51MFSuVG)r#9V1| zQW!t0gP>BfbXG7N^Q`^&1r>dWgS2DXoHt>?)ljARb&)jKb<{wX`NqE!e*DnE*thXg zCKcLtECEqBLZD`FiPj)NVpsJAm!$B{wC+fV{2cczqDNXBDl6iv7&P;LtC!oiVW1!B ziFW(|QDKUu&j7_jIa~~?Z^;UM)q0lKFZAg{aad5m@FYziwK6nG`oB^IW4i6^3-t~7(^8}b52BRvZMKtR90_NcjaL(MC^^E-?2n0*>e0j+pQyz;8qV!Ywu z*MI0rTir&YUKbQ$6N@FGYUW+nqQ;sX^>GMt1UQn*i%sxUF|@khGlfc%jyZ21aOVS@ z2;K`)1f9)bi)5=VasYwL;`#OlH@(>h;-gi0mgLCFt1P7o!T04`NTuHT@Xe?cUJ$byWMIRxY4W4Ptm&sn7RCpuo)XL z?TnBp4Xp-o9Ls5+Av;Mr>E1}1xTUL0JZ|p#e0>8jn{)*D6UH4yvr2nT-8Slgn&E+m z20;xlQTOtunEKLj5=(uleWrXXEu^O9xnU)<7G`6aY3 zo}Y3O-%*OMc6E(VR$&Mj%X}0KJ#XEHBe3!(134~<0eW|D5l@1#MK+mOikAq@gr!Xv z7d@ey@Waw@>kG4>#M*uVOWfLn&zqS8s-6U+NziX+NVJz&hd5iEMHafr{=q;032hDH zK1mQQ!Mn^F|3Xxv;La>(M)cR26-v-8K`;oEi2qGt?|YN54LBaU&-fw4 zP;d2eQ_zX}t_!ATAT>o3H1fF>)M!JQ?t-+5pp)r%I%Xp7sQ6lu+V zzwwV+UUXa$ap5-!be9Cbz8L+)n+uWhd+qZn^&323t&gbQtqW8^_~9hur?BbXS^ZgD z!bzmb#}-vrJ>TTh931N|=UyjGw?4$C*u&qm2p4ks zyx)l-6hmiqEReJw9_`nF)d&V!I$-Ij*y@5Bh7Ik&%iymv01$A?nqSG#0MF}UTP+)mF1W-3A+i(N6Tq=fLQomnQn0K4nXl2 zD;7vSZQ&?Cr8HI|;|f;)c`CuWGzUi~uF~1BZzMH*Z|}mYao8?W?SSnGl$N_olCDww zISv_eG{70+W?Od)opa7m=4fS)(CTD_md-4DOJo36@-LCSHudl>+Mvgc0HCE!>^v*u zMJzsQMMZYdgyfk%sci2M!#W2QPdOvXp@a4k_iBXq>lPFtTNuz75+!Co z50km<14$o#H6y=>A?{S8D-Az_wZ6vx8*WMUN5^UB$|lrp9Iuq+Npi>x2{y=7F%7OE z9OgVc$N*ZnM?6t`rXvOGc(pXioHDx&exex-x_32)W0ZUiO6ccE{D7D8o>^E!fyK)i z69?IS;Qa;xQTAM_v4q@tUo{4TU4^oEBC}p@>hvrlXr4L~H=Sr^{8X!a#z1sLC$!jg}p;r~_>4cf6nTh4>HH`8|ZXECF@ z`WDn`Qs6P;xvJx!Pi#WLrMN^L!TQ*5Uv6^he62c>Ue4r3?8m*ScE>YHiKi45 z!;Q;d{F~~Y&j3tp8-yA7R9<^$?^X~_K!~OQ5Z?Qa+gyvYv;Rb=4U@gWASP73H-0}~ z>X#Gc4E;;JD)k%A%`T~CtXC)cLRs1wq;u|!;rrTZv7?^%?O%Z@g2qlYLfQboat2D4 zS(0m`G34gaP&H)&sR6dJ{=V~yHG4F8KH2m@RT0WD(ijm+IcIS{bWt~sjB+?c2#kwR z32^qv$pd7v0Ta?uw5a5Uwx~p*Z9G?1jymvcg|>&8HIfdiG;ReY>2#Z1f%0G7yZ=j=P{qemNzKA z{SN^OQG4fyj0S$`pY^o@j{|8d?e4`>(F=HIaj{K@ng`8doP<7{2qfseMlsu!=K#7~ z6ykwRaPg(XE5_!7l|+ew8DzUm5~3uM&i;Qe4^60cMAY_@Lwa(ML@}c<3xG~WidUp4 zfNNnvYG5d(xk^NP4ZY~OpFQ|rYf4FerAC zu2G7C+W`w$-ttQ+R^3ovZYsWxcD!N%;D0<-)y%p^oYA6fNuq|`sg4ICUt{L&V5_6Y zQI6^88F>57F!9hdz?q^6U8>#7eS`!FYIPCgH;ptDvKWqNM)7#19+cD!WPx;MW=g@C z&@3osFIvpTaOZutNVVcj6<8dcP~&_26J0h9t=tn4bK4S7{ZJTc3+$&?v!Hh4l#^eV z*pNpkO7I0pO~kgowH1n7H~N%&;4_$D6H30AxpFnj!tpy2>2^8XK6M5+Kctd~0pY4iI@jMLj`-RdG-5RqWI^yK_%QKFr?cKYpJ=ntW* zCM2gA+x8_<#REZzLCu2!LLPBf8?yNKhV)f=Wl-B)$5Z*1_(k)aQiVsON5TJW8|&|$ zGm}I@h<{B!=A;;AJzO(=3Ox7l_Z_CtG87Dc< z-E#(F!-=;D6A~@q$f&KLSp*4dDH>__{fL5<$O+vJVAO`$GaENl$c!f(N((p z30g;57Xo&YSs@NZQK@zqHSh(kZehIM#(|OniuWHqsf8Yiu@B3uE<*oPKkM zx6`^%y<=-nwRzyj8?|6Sn%8A>ONCs_m>g~q#^`KCWHi(8;l5oHoI8uRH~6d#@Z}qf zlk??-kv4gi-u&;++(P?t*8(SuoPW?Hp7a-z>;443C}Hc5Wz4}E*2sk%DL*QIVVis&2XokAcN!-fWNq_=l;3~5s1oGk8l(} z9L{7F3&F#p%kIH8qC)oYSN08`->In!aaoBFNxZ_#xLATMK!$^Bdm*#9){X^QiDs4m zuQBY!W)HgCPbMZ)TqNQtT5A<^=n6E0qGGG8i*>dDzwB*?y7CkfhOSL3Tse%du+=vV zdNnS`-ZoK2K^cWz_B4G-qqtN*ex9`GgcTHj)Z;IFB3JG;k6KJJ?e5LTW_F`t9QHf4MK;)Q+#wf@ zEYM`%t&$(Pjx2MS@)t2YM*71ASK_44tQ<_{O}c;`_1fW1lf!+Ba6}c#xKmaJ(v+n% z1jS@-LXG~lU`=RV@$wYwhq#S(fNbraS`38Ts|x&ot$K0V<9oxwXCK6sIo)k83DKos zH3FpRn1Rm3FR6uvb7D&DLO8AB%R1jjIV6DXV^dsr?YQSxMB@?pyRq^&gl&;C51%c@ z;~@Hbol~r-B$e?e+7e<_8Z`Pw)vt;9yV4TP-BTl8a@a38M8H}J&t}>#ifW2Jkmcnt z$sDy7H{5fIqdl?$%TA$=to8Zu$d^`gAx}c2`Cnz+fCmpY&f0@K(W8^@w;^y6^4pb{P@h(m0ko)2-hr8m_DW8Fbd4f~_g32sViCm}t6YGn#jK=)tpv}dw@ zO2B{BPlre_+`!;5#yt>y8fYKJRiNUF5OZ;$3p2l}rsHmTFsb5YEd6Y;1vbM%hG74p z+m6yz;Y-C$=6v|>c&xN#U=w@3Ah~a>58C@$*Zd;}0c1mLNie&mXHtXhK%kCbMa{Cr zU;~;luEg(97?l_a?JkBFv(nv}x3{9s9=+>c`7?hfdTbYnm%x_tcrJ$ggFqq$`Rwom z3icPe!!YA(5|ub*Ph9NimSMCG)^{~FiZymFL@<7Jp`&pf87InDOgQ5!OYJu6ehO44 zv=(K4lYRA!JvO4M^RH*4cdL<+DE`b2u=hTClwVZdB6aAdj6K7H4wsD+y!H(1U3Ts3 z^glxhUlrlzxuNXGz)x|+KHts)Yv^jUF`XaZjzmYhKFxaR|0m5SE%E7;a}HdjA374* z$gJ3%R*TgoRUCstI9z#IurnP9Sl!9)F|JF5NCi+FEDyOMe#n)k_OOvanqV}-)mX-n zX((#KjwgYgU8y_02Tv?z5UzuS>K~Et9(mXGGt;}5T>ATQ&bImonff0x zko(Sj#nFb6r0dP-&TUA}kG2Ii!-l*q>tlBejd#QbOarXVlXZXqImcH~0IBe$QQq_D ze|E_Pefos^1!0{VuIPkiyMVq?< zO}SJ$iA?%B9l{?EZ`_TW--FcGP`xwsP_=?GYK{1iU`osXCoM=+nAQIGWDo6FA{Tn$ zHEE2dOpp|Q=@(L(LHqtQ_WHGexe<4>o`gFzBapc{XP(*KkNyDcSb|J~UnMUbNc3-U z`zgQ^BQ7=d**fTj&9y-UM?@^_(Oic*10A{z!6Z!O{bq{54$&RX~G}OTfHFTymV5= zy1|3Yvf&7cZ()|xdobKLp6H?wV{_(c7CG5-(Elj_64G1ODXz*+-SV<-N_lCVg8cK$cV7=7J(KJ4LT^o=$;%94kg)kA!X#N+gDcEQ6W~B z{C`VD*fZ^WqgK%uh$3Bf{{Nc@{gN+D0@dn>R2pnl zS$McwDt7Keh$E2aEEWVO41QQ7)oyI4Yj6Ai_A|6+9gfBy3r5N(I-7DJ7tbQGLD{8N zdKxn1cC0v0uYBYD|^zvp*;+>K9w2q1<=i^ z?@6n(wG%0Z&$DF_C`G5xQCgw){|C&rwg<;KCAJS|Z5K|&J7sWQT*?LT=2*@qGH!YbMD^6%m4av8YVacL9ffl|nExvUO?0yGM14%ddz85AT=M7H%I$ncJ$c2%0yJrj37LG?Y9`$k3 zAtiKUZ+&g%t$!^TbsIjCK_yI4Ao=1lSRKa>Hlj@#%TBOf=x5fkbAx)dS3s}7a1Z@1 zinW~E{2t=ZopsiDDc5bzLOv3$2*|1RM)l?47yC21x8AY{x0w-8Mn)r@@;rG7c9>y% zy*OeSM^cVI!?NOjeRy&^TEY+6o}d3FanLR?yARPj9>dQi8I7tS?5>MMC7Ddzs~}!( zQMSI?LUgc%O}iQ%83?|WM4854ei1$@SgYdPpBOwe%Mw*bR)F()JGke3ndSM|T`~~m zO-c#X>PzEUDJ2PN7~r^XfYQ9Je_gD2>~1=Qr#=M5?2V@jh|>go zI=u_N$JDJ=r>Qt_sMQelqgoDDGS^JNOqa6@LlHygzExOg+^!atDYoL3hDHq5uN2UJ)U znT89L3Ux*axf(z?rfVGb=s{wxyjeF`&A&3eRA$<=iKckL@}K{$b_0hn*?Ra_c@dq#8i~H^SuS8N9whQre9xYR_6(J0>x8pZ6sXSJD@~rOk+@ zC&S1!eK^@erV}o!t?!kta*(-?_S1~u_pok=PPt5LI%0xh z%wCMVWf^E=?Fgxv}6To-uOtQ|4)a<@4W9>6;-QfAD>(C`0 z0tnNXp-(mClM1G_+h`~8V--^&Wfo)$g!tzI{}p@tuY3Gjh*0&90*|d=>f0;&%$2?n zwjfy%%I}b&=rVPH#Lss5+;vFWY#or~ncxL=(>FD7pA7<(j1&0gUjZXT4mtz{8@=z^1 z4~(q(dRXyIw_#cj+^~$fuA^ZUR9LA`2bl;e{w0kOAadlBu=s27cyzo=!E?j{%kbU6 zbxAZ(5))9p{a}qnoH~$L?T5)DX$m032yJmJm?B_xQY}khhUv#~G>qf?-~vF) zd(iFc;Pp*)0&JYY^at~4|8)R#6FH|tMh0041av@4Uagy%_9=Xe-)=Lv$5+-mQyTT0 z@hKpbos+QJ9(azp{hhp~VPvb@d-wTavBNlT$i0^KZshz>s-VuQZG~0U5X8i~v zO!iIoFyZWf)~+d|6bX*ffeifE?15j%nwcK3v6Y&9CloLzBkxR|h}pH!Vl4Yzed~}> z8FXk=@RA()ecfJr2FM7OGh@GcVlbhFY{sL5US2tEH10TpG|_d)?~SlYYrJ0&^Pjo# z0LcWM@f>}VD9T7>`zorTlIkr@?rty|Ldr|1I{s-JDj2@ACtwMlHqUh@ z&8*WWQ(EH!yk1~{(3R4Vs7UWKOHodxzBt00>SVcDr|JhVwIH8$zpf}0gYwPF!hB=# z3*3gxpurrhVn{C1mBwK6-q8zIpUTT`Oje&@P~VV1W4yvZBlx7ehwkL`<}0|)J>?%^ z%G~%unWHmK+J|8+=XZ7ceCG`eAESBCGe)I1pzxdt&!Al8Se3>z+-?Y4HK18I3A%{R zrQbAc2`19q{s}-Ytl!6#%p0h~)k0Lg0%e+|j+aB!X;)y632zSp84JSzVL=(iv}x}? z$A1Fd#(=x5DD!bv^`k!M2q#xA$OQ{*TOJg4#7Yy)N17DKeihUt%Ge}&Bfr(LPDNBS#^$QBZUV}-9!GxPG!Udd4-@}8^F zSv8?!vhVbZ3mB)qd2j0Vra zL|X^Oac`LA8kSUdQN5-*GKwD5ReB(gmEAo7tGBCDRE$OEAoe}{mze#si>Ofxw8&;$ z05C9QW8nw^000000YRFCG^D{B|EY0vxKK{(PmABbL_CY>oXd`!*_iBB?dBHH7_Wh2 zPIzecs$-ZXg3woO?7}6+NwTK$`y0tusQ#4Cd?(PearD6cFF=s**l1~Yj5iwCi0%J z!x}?Dl|;b86xO`iJ6~Jdtr>$z#Zk7ua@F?1VsW!!x2?BW z#z}de;ydr2Rag!8>Wy4uGq3`W1um)HXz39U==PL=$|AAY{b3AXvq=PflRx>@aFU%S zW+E7VTeU=>c{T))Czj%~?I|+%(fos|UpRD=esN2CNS3w|{pKViD9R!E!t&1ntMPzq zH~HX7HE=RvsT{rTJ_TK$+q{Aib+TA4HkJrYp;9(L zN|iH(zW(|Q!*E;#Jmq773r@M+E8wXC_+KL~icgGo2Z4NJoO^${iVF42Q9>U zOvKzFy% z4^BbVuB^`+&w=bZ1osaBx{M%t!}Xq2fpctg_ArLVXXDBQ^qwRu6s9_j@lTFL!m$<- zH|=)Q7t1r6IEU%a5Zo={;RgvJJ~3yby?lxIGjY*M*9$2|*XM9gn;)QS!dVTsUE(58f)WCWoF4irT=p(55z+krTb_lfWmfg1!j z&?*D<25~MKrhr%fhirHqA0h0Fl%~_|DHQAUCbT(<`SFX)(!<@NRu`Q*rz>;0!=3>kl+ebwp2Qh%5sqyP5U>)|JgZsN~(j_^$&x#?UY6?C;V82HFR{gv>Yqj ztr~m-1^1;?yqsYz9D~v`RIK4fc@H6-4f&z|BTY5MnCUUb=|6+0_u5&u!tdtU@QqH> zf^Di~XVGEgSbo2(p(gula)KW-VBeVwTyG?*W?-cV6`@2VjHgvB^tU4t9J5k8C3r#c z+|w>@JpSA-Tj-bnzOYNJGP^Hv+%l`jo*BNsppc=;0&whxZ}f==LzI^>1#XJSv@4hI z;(Au@nr}2EP?5R2UjnwjoV!=?>P;XBblAd1bAGt zP#6BXr|{4_&Wi$^u#1BGWb`gGC}h?RxsrcB=kV@&fmRILlu@}0-l_|f(j{99b-rge z0V0y+SaBmYRUb`Nk(&T;Mkrbf9kw(l3dBM??T#*PA!cOH){~}23Vi41pW%dvaNhMF zf_UVT^Gop7BA(I1q#3Qt+eFDxP?SrE;DY-K;@v_pyCalDREVH}R!ZE=!=GAH6Si=R z3h3{Lco{fe`+M&#{u>FEY7gcw&9o^-)3e7bRxxC8+d6wsW}+j=C|z^6PkLp`BB!*!2apH20=Xxpwy2jux{&*=+By!x~FA zol6;*`FN@hn(FO>2AMu=1^&)%yf!%)`)FDfq5wxhSY|+WBBChUqT#TW>3IA{QE2SET0dJp%MZL(JjO@zy-iZKcnJc3SG>S02R`IE)t|*wEuZf3m?*aR?_3i z$|!D-OXrbiv&R{dc}1JZuWq@q<4>zS_(2gV5il_STm3KTnF|! z74Kt&cx#4U7k{s=Fa|+N4_7$T&qsPQbgYtztl#=s=~CZ;jMOb~zcDq?0lAmXrof$w ze1MRCEC6g}*><{6Om2lI7mi3gmep?nLU$Z}1GKlc@C=j{t9piAiJSB=mbfX5`G;a# z`4(;gzUqJT2c#xE<#?lWf0iA87MKm-{Ue!8;U~yHk*mg(o#y0Us3jeUs&T+mkKX^H z``KM;>gh*7{`AJ?U(e7lbGih$q8KNev<39eteBbK<>RoqiS?Bl$<|~FEb4KimhAnH z1bE02&83>lYB>YO9z&_X5m#IrQTeA4i1aX&>?50bUXHag7@G#jeMciA|iXd8NMu+dn?9ESZre6bEVZ>t+CP898)Jh*|m?sR& zJc%$2OFBI=qERsqvsBa8qfTfAj=XM=tQ6Hf=*voM%UmfY=1Wk)otoaac+02KDGW)c za}IcLfPLF!%G9Fz>i@UW`^4Q->3oPN@{)@Mke|nLB>Li-aw2K;uj>CAZ?FnH;RP_HE*p;Aqx5d?SN5Ei` zDLX$QybrY+kkkYBcYqE<{aQ^v!w5fexaC4ALMfuJ;aNE5#5}Ypf7m7ft%kiY!Ga)v zJnY(={s7SUgrrunFdt2x@xSsZd3eiAfHj>nwB(GN)}UPb-oZ-kLT>X|sx1yLjr|h9 z3L-28E}`rbFQiC>=jKu1od>wD+mnX!#oG?NaJqzY4R$}Nsnq{=TmGkITnp?6I;GZ0 z@V*~_eS53{Uf=#xm~$9&kxz5N3e-KD8_4u)?4@G2j28GQ6Ke8XKnlqqqTv$0K;k zbqUs#Bexko*qM2n-6_i15rYVGC|*~Dl3ACHRO05kusbXlnZ;uE zh?{chOF`s<=yguCz+!6=8TRSia*E$xVPun;9g8h z;~9wW4uGVMC5f}G@%yj*XDd}4ga4#oJEKRGoG#6u{OVxHiYw zLe?Lwv$6y7e-VkM*vQb%pcP_BZ1o9m0HQXEyaBgpP$QG)RQvTPWj+OF2GD!E_nMUl zr-qd90eOGQ($5$$JByq)1HpS=eYi&zaPJ(&(M0!`m}SaQ5hBcq$I z*EFdefcN#hD&#wVa?nkJ_PPSG)yMM5?HNW#>BgkmEpU3!_h)XA7GlOK?&4>F2wMWQvE^pZzx07qtz z233%ehEvzfxo!sNrsoB^f({mn!9iQJDQe*z$6J zx?%}{kN;Z#H4sLStf(Q;oU)pq?}(tgyU;Uq!-*1&wI^lz=s~tAHREm@pR-Opineft zPG%*=ylO>)R$GRAf=U?xyMs%#O+5#qD}x3@q?J=O-wsG~ygqGBa`VawlA&7BFnqye z26oy{Cit-7=MmNDZ*~$HAVJ=-A~hRxkbMjCCP3h9-cv-N^>?~EJ+OUG>&I;Z@53oQ zC;pW$i>KZ4SjZiIJ?4}N-*RPLEqo|#`AX^uH}BhVAzadpC*D}>1KVSfY{4>D_WiSF{QJD!m*GP8gv8KUt-Qt!bD`B|+VqlM3MeCb z&cQaKbz$Wh*I;;?y~UcGLV`sI{;XsL1W&(G?Xhx}^>FMW03-_%gN12P6HHhd>C|(; zReYABZ4BjWdJ^=|&n~eR*@96ilb%#6z|aPt&$v6 zNfH5TeYlh4S4hIIoLU|u4R{_}tOBfkI9ldm0I5NpB4aXg9>_4hIOCySs%Yr)nW+Jw z6mqn_z|4168}GHIy>%q`r8fmH8n0&xubt~7zfstS%UL(+94_>;_`E}17Odqyxqj*M ze{i}#R+|3Ut>}In@WF+v~(!=zFl5Sm{eXvH*|Z4i(}CkGt}mZRV!aSLG1otlNHa-;@r(81rMgf>=v7f zk;g9fo;gf!y_q0Y{f)Q*cFK`1gI?8mDomoKNv(tIZhlf%4z3|&pF8jXRYfK;cnrS@ zkh?GM`?poqfD>zjv(=BRKmXr8iRmBry0z8Le#*nu9u^1);>?ksSlB~5&GKUbbfxxo z%?n?+XcU4;l#z{O8@drT__>Zlg;!#^ClV;*kT2D-e)lwarW7n0BRJ>`YCE8L6_$D+q`tW28w>Wrxhd9MvJB3HRJQ1_H*9eD#9~84 zL+9TtXV?mvXXvtj>HQTzvH;1e5*E>mDGe0jlJI71b#f{!Vl%~g*H+`Vj(g&m6{S4O z@C_-)ZR5~)V_w&mf^*~ys|dEFVjtK~v(o(5FCV!Xr-yMe*eXZiO1|wTRdd#Nl?4pT zp^Lsr48k2$V(#|q%DJn5wr7P*mJn!!6PK9i_@v%?_^2T3^u5?%scm&0YBip03>kJX z_kB`fSFb1tcKWrUZhV-Kq5@;x5MDSrk5u0H36V`cub7b}79D1y?xn2I zqOZ1g9QXD|kJ@%j;52mqh0I;-p+>=SF!^{lc}z=$DKf!+*EfJ zV)l&vaIDOC*A!gi{^mxpI%`DlUE4pkk*3FF(ixwXktbQm#f$S~S$~Iio#ZHLc(tm| zuba@iLDUp-a*Fw;7DfT`!DfCW#Go8uEQ~G$cnTP#XWj<+_lh+bs|zIT&^XfgC~?B zW!gX74ul}fDE$p?_%XXSgPhLxNN?0kTh4Fe;s~mVHXt+Jh#lPYO{2^qP(j0w?%Y8A zQWOrvWbX_F5H2xkYWRKh(=OX!47#MzJbGz84uHZJNF&K>7OU8}LaHn&g3xdVU6qcN z(&Ht&2Om7E->TY3ccSX5h5RwHPy2P-Wv%@*LjPapg9aASvymYM3amUwe~=GcmY^+@=n+cxNwH z<1su`#Lx;qnJzt_!7Gp`4GcSsL5DL!z&s~b8~J)LqrdPSxjMCEwIj|n5FwX+c$OvP z&I1_8%Mzm|9@jGY3H}w&k>8QIZ^HvR|8G2zXya7uZ8^gBLy9dDv{>Ess8%tULVT4~ z8IfI4b#zk&BzoE8`~I_uynzlKp{Q2w+>CdEkWigKxGj+z_k88^_X z_`!!KF8dH+N;*V$|1 zoVRBcil4(CIm?JBq*#B}UJ^Frc9K+M_EP4GSDf;`2gQ~67+zYiIx-x~*bqvmmESGr zL-^0?PQRk82xmE#QMzj2H0=-x!=yX) z3;@VN2l>3$eu}|`R_>lnebQg@>_t;9AAh5KkjXj=5OB?OOz zg7COTicwF_1F!_L%I>4CFHyRpu842`N&ZZZo!LXJcic8}4uish!O-{en+;N?YHHQQ z43T;GXg(7PM|*<0`&|6c>A@DynpGW^b`nks!TA>&*6V2j6_gKf>;ueD`&_OJTS4(9Cu(BPn8& zx1mh5-C%+hJe2CLZO25X+h@8r_DoWLR%VVS<6Z=iSkmty;vt=VWrC(uE8tTKa~|)m zxOZrv($_#byu=;JN}-G|6SM?hkM82bNRKOEvTfYkO47;xcGgq0FeLX6TerfCamf4F z>JoIgH|FZrx#z5eZXDp>UGh@>6}Zb6%sfbZwoWUWXvg3!Et#=xQ6sSgmf0To9 zIv9(-B-6A#YIDZc#>*4A?#M_iLBC&#x|bZ`UHWtgj?sH5e9O_Kzl+&G_jT91KdJww zV1P>;#j^ow^rg-9Y|29(hzR2CWB@{16(fKVZ3GV`nrIGgod%{$&B7jab%WS(3viKM z3fv2@CGuQDz{{dGZ)>nMnsS^m4wCi`XzhuBwmV(xZ**>cPt`R8-vuBkUcd7U-I^dv zgN2Q(#hcmj$2+MctzAIL$>nb6qeMM_BS#ow>V-H~kZbuWgb?hx-Oz|Z)7zv)7cS*2 z#qdWYz^;s5mxc7*b@G-fwSJQeHVtUmlcfm6cMiFz91nmoFrXV&)wA4{tg-o{1#hbU z`2dRQs1>YF(DW2$3q-tO=3e{l{$?5O*TiN{MT|6BHh9Zl*A1Mk;{z)23aS?0&{2FZ z09N$d{gQ?~NZ*UhzH(giIPLI9MQl_uUkUa<)8wdi{SSNWbYp(g^s8~VIjw2%u6vup zyk>1ORuo$S!r)HOuYJ6MsV^{I!x>IFeR zl57pG* z_e&Ih4e-GfyzQL;k<2bz0=-G&b(5S?W7JVnS)AI7gho(|tv6q(tFo$wA`atVj*6Vu zJ<8O;n?Pf5T~Km0tf7MUY{JK5!)89oaT@YKCST?C$)rmEU%!FqKE3h?k+!j?xM zRXS56+AdiA7w=3S6nCp8ji%N;H#qEqhL{(xMl5t!@}wg12oznb*4s;LW8U7*T6AdZ z*f{tbB+uS9)<)QzjcD#l9JiyONHwF`>tj5|!V8D&Mx$tTTQ~bzF`+;TlCmuK2 zB{`5<6o(5p25Gu@h>{cEa$lO6R=7BQ*S*UHAkW25BcSj8v1KGjGADJNo7CqPqkrB< zS`}kRkPhJ61udD1!we02pOX{ijg}ynP`gmmun|PH3v+F2web0s5d{Fh6zn$~f_2p?B{DNun z<8)=eiR`foU=Nk*6+I9<@JBFn6vEovP+M#H;z&3y}jQ(?Tk0_;l7L zuE1CG0w%0t#sz!;2p4wOeo6kUf8se5=5re5JTAzyBfO|1WW}aEoOy6<#f;>=N1974 z{lCJ9sJ}2wHn$L&ZvnhssokaE@>aa^j8Q)-1dW5SjjK7|{7SKAt?HbtM|~o}S$b07 zrC9k9+GZFbm*6Nn3%IvXCg2IiR|V*VXDlDzuTX7~Bv&U+EG}n?W0^I6jhP-l;9>OC zp$xJEWdznT0>Rz=&e;@D$Y1y+%n>MFhUN>H62`tGMc^%HkeNM_o~|r&`lM8oQW z=CqvJ0!flzCN`b=4uH5y6&wizp7<7LvS+vt;Y-!k=z*BAk~7t$l=YXt#+Iu&Y7~GV zUL6!!k`!jcTT)V6VO@3)7s_d6NZ6mAz`Oet0HjeImm5fu@z}HZHpSJN(`InD3@@R5 zj;Rjl-fw`z9)T?G)@~`E){`x5vEL*$x**+fDy2iZJnj>E?;$;R^W@VHwZAi~-#?B9 z2v8c=#HQ2m#K6#|7?18GVr z<)q4%u@a*irG!B4DP#f3fwy{Y_hP(^ap8V@P>(trQ1QuH{$KbGXw3avt}~ugESPyR z1l(yp7aKw_SiS)T&XMn6Y8ag~)`z z@Bmm`J=4r(6aH$-b1a0M!wZsrlQVIagZxg^U+%riT>tB($L;^<{t(=7<`qw_hdV zlJQHV7(!EFf1j_VwR9$WXsm9)S4;kE)~8w33A9>N{I1jFqM+u8jC3vDP^iD)-4tP` z(~cVyLQ?&W$-pOyJBcygh{z+PKyqo{f@x$X2l-DwFUa@}wYl_loqKsaY>C|`z#7fR zugH*lhUB43;H^&3Wr@8&gJE9rH_&9_#HdKg0xzK7{orWV5imn0llA7|x$F61vOmXa zsDc=;#wvpCfX6KHC1!nq}&}U(I`qf-L+#3bCnBa-p zU>Z#h(@u}N1R64@r;+JJ4j*^Vp~48&Lj(y4v`R-hh9DC8^s zu5+P-SSB0kOW@eEPTP+1GKT-<*UubneP!t^l=M>_3kYH1V87HInpdi!s7*q+V-0wz zE5S}vi2|+{h5*nyw>Ykne*?01Nv-tfFSqn4+8Gm;V2ezOfDRu;&aDbNEv_%j2b-1C4AAQQDe)IuS+;h^V_c2qz-*hL<>Tew^=~|Eh|9z+<^i7PES! zmw2csk<&?oWazTwp|2@fZ8!8eZf0#Y~j(LwS&g*Vo4gUI)xr;>|4q;L88>;`w0kZ z5(7U^(y!-kDd5BebvvS-%OH&|E~b0)o_*Mej12dFktZnje`oIJ&WKs?S*J+oXzAnB zE;XUZi3b7tKcuZigdSZb&ApWZe=9HW)u)W+6m$-armSN8(&Wt5jc13EC&JX+)AdF- zinHtBaMtj=@M0|4A)1#a)AXy~=1VlU|nTtS8i3{+yXX(TtRnbq{&R08c=$zXi*N zEvx-SjAP&g{1&NT5>V46U=*`rzVUd)G*Ce0r9(;pKB`42LqjoBY?_=*h=rL8hHuC0 zpy(Od71JVxU~waREVYrKH?g?Wu~Xsf$*0w7z#&Zv6zEoxx}bJ$xZAPl)d=W783yhs zfXRG>Bfri@Hj^;GfJG9=5$8v!KDTYXTPOeqz7F%#AKQn@c;VDbHIm4iHL3; zE9yIq+|T28Je=A>Y5uDX$1I9hh>9h(?U*9Oy67AjXA9-~)66Yp@}u3UVjIQs7f?3U zRfMhZ%;Wy@0D1~qqU|qB)-q-f0XUHT6uE3%F|fT>ylLa_ysMsA2Q!?}G6Ma#Y~~aj zG_)%khC$m{d`fc;HLiZVW&?~cGWo3IknGiIK$wLTQuz0Ti^y1RsdIqTdZmzmZlhoTk=r}O#ybb*s> znZRl)AK_7$AzT!ypw7PkAa&25mHb8`<W zsK@a|_X6j5n8ZRJSH8+N?N(2Fj-mQ-BNBma~W?$YqfsLR{VZ7@XI1R3v8nGd#D z^G+t`+d5@2RiiKkdEfC%n(_c_Rj5k}udNF$k~jJh3p?wOVDkxo9OL3iZX@~?5iON$ z1_I(g&b}HU#w=%% z;6SyheUm!tGbvd{ep`4KOjd1idPeBCWzgEx)0W26gMyrK&QYc)KeF-Hr)@qU#+cp7 zRW&%~i!jyqt}-ke+nb>r5AkAifu$}>j!R!r3lkUI{ z7tzywaH&APmyS5y2tQw$J=Hk^)+nxR5^f(_e4f=!I4scBeWtY$o9ob48z=dOYYD+c|{x)R9rN4ovg+bpy1y}U%t z95kHpA)w$lGkKQ*$Du;RMVtXYQ1d8MS|5(k-6-VTcmXmENEXujTE$;vH4xJJzvCa#{c#$o1eiu_lQ9T(hu2cz ztdDD4q4$eVTAVA>ebILa^Ks{qGfjt!nq0XYy6r#4IjOXcX!_Z;08s4R@K$$L0Y*_I zbH-f|{8SQhWQ?#QxId$*|Brwi>=k8jr)bLTCcioPO`t1%++$2M-F+Y@Mc0MRy~x|$ z>Aq+oN+gCY;vz-8lm%ipeZbZtF@gBz%1WBu)Ne$^HIz^(FPIaz-SPBQmebDdze&&Y zi+wP?#!A)4f|NbZoqV^;{=7Ke@}7sHm1AIUfV+v5KR=u9kpH6IP>k_(ce#}3Z ztyVi&c>^y-Bo~~r2#9hcx&Ax-z7Cd1rwT92njDGvhPJN@95y5upn!B_3TpG{`2#ke zWbwbdO|T2$Ub~j=RHX}WfGN1FW$c^5uHAQG#$$>e#6AxV`CQ;0)wzvA@lfr(hW+ek z7rsSI;$~x}vE}JB?CBcuaeK~Z9(dI}2CHbi2YS1y?uK1jYS%=*Hlx?7uV0klOIqR) zz`=K9iw0!a8_R*^+t%UCy=A?}ei&gxoVOo?P%d<*oQ|&$LcSSc@Zxlu+tVOXgn|>4 z#pJ`&!8{rFW}!uyq($ILzN#6X)gysZlOz17eb6$BSoZ$hs&w)7Xg3IR0W%eeo=_f$ zhg)}h)$M5Ht#A*1xYE@i-Phdd5P4U`Y{S^vncYAl7C*~u5!%gqUd`+0-!$qmOumWV z8h6oEh;PDFtgLHP9SaZiP85%Xqs& zZy+|g*Vm%C{yP1^7mbrhLqf`{=iHi>R`ORpb_S?yd*6UAOjrcOR^cAp0{paGKY1a?ymUutgsMAG z!^nS2wK&*abBz?q(0;uSzQ5iKn@G1n&%`@pcSMr+j&%fxY8;FdcibLbHOxz(1C>~NW~X-WU9=(DPEEc1(xgTAspAW-x*@XMNm zOq!-{E^X+4W7XNMFUY!&wyHxl?*A+*$RDa z5C;`-LsqQkJ9l{BxLQ3-qiPh1?ce&zi||IxRh@2W6RMK>_H z9x9g;49tyY0doXs6Zy%qJ>@cObH+VuGK1hS5TVOSsDO2`@qBuD51$WilppLrc*TpS=@t&+?`^*MYe+ zezsG~IGgYrr?fd4PYb%G9l*imMqMDLhS!1E=l*&Tc>6zgYXlh%7tI zH3JqvO?HeGFgB$pndtyiARoy{9Nh&-l48Q|?eJ3CfCUkCA?dsVH3HJh5_R1WtAM_% zo=nJ6wM(j>n;C0fe$rzL*jM^mVHqioA^~m_J{it<>=;)wTK_)b=OeS}L1%y=Qpe(* zm5>W}$yeBdutpl&q$qlDv0ie-#0Q@7EjF-$Xy0p=rm+fwBp*k{Z$xfvCoW)=fkP1- zD~rfNg{Ac^D)=ZnstlV)vxZPSH*9tu zxh$h2nXYqibVT0Zc=3w(W`U33wN?|iMp&?nZLihbG~g0KW>G1?{~~jbVr0w=k4$*QqKSkOE()B`i;8n8Cqrld{tT5O(1*uK)P?8f)iw zfxab7NtWl=^v5hn1lptn9aeweaIG2`Dxcqhz2zokxq#rWAn=DTDgK(N*^-Po1^$H4 zkW#ypaNQD}J>q=n#yBabwvfsl3?G(R(X397?C+SVa#*f;nDA}i^3}GxQ zjjp-AWz6LR9xg}tTGxVrEj+hxz__jf0!Cu>&O%FRM>lQg?HcP*Y6mjDNxi)DO&d8( zmf4N2HsEvm=#H9lolYL8*P%*i>?`+eA7-->LpV}D^W;*W@)5}#i=XiwqN4$zAGz$uDb`%&=(sB*1m0&neR$TVru zzaob0>pTDw5*8t|OZYdJ7ruK)ZxaQ_%o|T}bnS9Pj0|RyrBKocR2ogY%zY-vq7pTKXW9?s@Nv^>lm|I>xHFa5=3SSe`O2GO9$*%94XEw&NY3#7v zr%}5AIz8Bou_991cO?5G_K`C>)JiT+tl471+$7g)qQ3^d>6C8$7o-8_B7hvWZr?>I8)zQE107&G`p#QXNVQl-4-cv^*=kJf@ z=GZD3_0}N#?i{~&@IK1oeo}@h{6-A~%4Kv}KDy^e$vtjqcYK(%oCS;(J%^P%yo(~g z>Q&l&)NS`ij#@l8O_tcGbYE)tQIvrx;gH*ikiLVmuRF(aXRDY+H&4W(CDGhe!Pq{{ zcSRJOy5#0cU#_rVHe^+E)}temhP0zx0A6t>yKAQrO(K4W^ z&4>_eNte=6c491I7;SXPmS-lHt8f}HF&Ti}2Nf%dgy?)ygtDp`2HCCz)cpCK+gYi1 z8{y>3j6#3p7pE&%d%M}@z9!rdK-o**4jA36kKnca4cj&Y`psEWndiM{UU4-~OBZw6hi7YlROL8gY^3yFPsBHZA|FEgSUWd{^5`n&FZq##Mgf)hT|9uIEnUr zq9Im{rpYV=N@`*-3{X9c9X93_##WKS^mA!%?*$jn<%N^- zvz=px8WG85CTaZF+d1UeQq!Njo*@@TKlmK79XpC;t}5iu=|_)U-{@w1ah15~s|{wH zp^$%e_C7`~lTKUVGv!4tjgTc%BgO#bg4#c@67VRVOrjhFN$KY}kv*{Ihs2RKl(VtH zY3~&185@-;sOk}m)rcHNYR#E9Czvi=MLjc}(@%}o)Pu_=fqO-5aWN}anY}b!Rc=0l z1uuU`S7n^39Vs+~<<5dv+(%kzmldQ#p6Hc!BCOhHPlj@5$s8tjsa0tGH{Arq-q?W3 z42B!T02d}$$vylzoLY^r>5USD$Gt%VDx#Uc(m=@cA5o@C$9S!n9)GK_eUR{3Yi3Bz z?dY1HW+Kmat{rdpf@bi|*}6WrZfr{s!fwom+TO$F2E6eVWs;L+*lJdiH{$3WnxsXT z1z};a+rH9bJV2FC2(m+5R?1l%10U{kbC_%+J#wQkZenV!-L!{{ucQibT_{9fx*m#r z6xf*|@CG(eG_B=IyOCo9{K#pYGX5$TFCce|h>cPEf*SP2I#11a#!^j*1 zW6*V1(*A$fjT}NV*&|9=g6V=aiw4)?b6bIfXDLUO^!XsdZBUo?PC?N70`AGFeG!{u z%THSQmCJZg8Q{*&LEfwugcCN0PVR zD7*om@0B#H~DvKfCP`J0M0ctzk$cA-nXlA z7P@4JQw^i~sr#k;X1EEwLIu8TG4)pw)&HvG`)EA9Mg;F{!vOU_czVB!H^Tb4ZOVGc z;5?vCd#s~;a#9ZL{o7f+JJp{dbcHh>na7B9 z??_Q;C$8jq>GTQqI8w8mfJ22Hj}0GlY1b}yGqgckcvJd))FClPH|F%MFF3{&+-fD! zjB-jYc!A^$sq~<;`Dv?s*pU4nPtE)6gHWL3(;1|TM?vxuqdqktKh6`Q>K;6H0wFSqnXAtt3n;OE76KztidjQ1f;o3fp)2LRZ=Hr*$^fKQ_^A+r zA9FObZKoi5)m5AVo&Bd$G3_uMquxh`+Nod9Gpz)W(=n2k?6;p@rlD&ALnI3pT@U37<^T4-ffDvAI+Rb;InggJxHdpd+7 z%Zetat#%9lDzDb2`Fs>w4hMxmotkrY4ZoIi;>j6lCu&wcLHUlhnQy|%Ns2|S(1S@L zH4no3J$-h{cut$wsC>e^O%s>35$A0@yZx6Nta=I2m<2WWC~z+l)_Z?qSD$Tx`NAJr z592|dWvM`Pp-vg!V{$x}_S=$KD z6cC>Cy6NHA9N5LSot#Bm;<^yFz&R|)2~^$qgf3N?FY*w6jiz9ToEtB%fHgipiguO= z`ny&N+dZ(dyLUMUI^yRh*;v;nnqaE{w1%+E_ta zLSN&cYg&$#d7IrLx4md(fey^ldWbkt{*X~6cIh%HF3rKwys6AOgi~6LCunv1hZe_h zR^G(QSp9rbhH;K|89kedM@XgijhEoyEI9IXu1K4bUN+p%Y;1gftkFDf32$EOxSW|* z@Z8hf-!9*=h!%mKs@;g76*{#jRt6lwsbWUsnh&dblX{O~W#3_L)mP z%|;43=wjNjVr{6tX3G`Kh}@Fcp3a8}So}UTEW3@_Gj8usL^SIMBfXbdrc3q!DMEiG ziI*})w6(Dcc|W>nYfoK=P1Y*a8Xk{-xY#~%6=Cr7_ae4s%}vV>;5elc@V z;5-AZlkmG!Lg+lbXd?;1Se!=O-|kD=N8WEWOnquBwD&ISR&<$9 zdOjD1m$ci64#P1W>ak6UGk8B7TrzqYYLAWG4YMJ=rKa;{X};KB-RS@rdPyxZ$DDIO z^dopM9iys4?y`Avl4Vi?YA{TwS*h#VvlY@n5_kHx%+4c6Tab5npo_W$G*lyBFMvGx zu5R+KdO5*E#P3x(Px!rsr4@wML&3>P=ApJb1hDo*N$eB5RuP2BJb(rpEV`~Zhw_*s z6uDe&!W&-s=O)I;lOe>^gSbCR~VE| z`a6VY?f3rqbFM0|_!_PR^}6$-V=|;`T6DG#m?_0KzvZOO6h##PMAAJ{5Uk@VaMn`f zYSV_q)4I7I)B(3>WHukuMZf;Cv_M%9gng`s+|`(>E7w*-3x>Q($hkh_>&@aJTgH-m zDt9}Vdl^^Gfm_XAKAfWB?Od5wwNu^4!32ok@B!1mSy+ZTAK2Fbt2fCcu+rbSz75f2xz} zY&giQ_v!I8A9-k%i;z$HtE!%%k#RJU+vfBR+Z+)30f+rJ%kEC6Ca`PT)OhhtnT!v! zswUVHrWwxsLbhJ%kPCW505GlfokfyAYf&m4sQXAW*GM6qTb`3lq%@uO z=;Omo6sk2qW|8&oS#R|XvZH+}&XyF~ZO(AR$Ak~G35+$GX%Jf()K2P^Ht;eG_jb}! zOh+Q@k5>DTS+N8I+aj&zex5cMJ0V(KLvbrKHEgXvVWL1S2VpdnX}kt3W})tHs(KaY zh(}iKxQ##0DwoCorBKH%)h3ec*-QJ}-E-IkG2ihAoS90rtUI0dh5mh$?87{$ue( zbS}wwC|kkD9d^EF!p0kFUD&9zS%-(LJylanC_ z=}((eSeWp=L#r{+YKz4Dp;pLJw(a!Gah=Tx+C3MHeLmkn3)d-bc}S$9x2XlFK9X83 zR#PPtH${R{6AFT&?E&f#C`wD>7lvLVEkh3#qq}&tZbT1L1dH_mLqZ7E=l`W~gT@?a z+dN7f2X&lUQRhg9y`yP`W^td6%TLFb^6qgB!(Rx^)ovWdR`!)RRD{V}r&T(<#5dVR zGC3?h?7{WiUT2b^oy2(jyRp?dKd2H*MAz(L7Q^=#RMr25Wiad{A+Ub#xj+ZWndO&3 zxB-T}6d+XAjSHjSlefQKUIAaQ1FqA9A;gy&3s+TqOxxY`w6dVl#1y0FYjVvg{NrPN zUNhD*564CnZjpj25iN`di{sFv>Y03c{#Wp*&HYj%mH-l%e+_5Ddd%3t@y@i28`Gcwa0JdU z5gJjx^jai?{~l-5;4b&Wjk4ADcXurZL8{+~I`SE3!K4|$l~O&KEO`Ndkr~VFlZhz2 zsfSj)xN+q_-84I}POqsCs_eC3)e2p5O840tdz?}P8Pg|?0dX(6U{)T_IRvjPuQs7ULxD+tAtD?*$^?_ zPWIftLhHxNc5kmarm|B7q#fbHQ5lP< zNc(8VxV#fg5lM&~vz$?nIxH+iMZ~Ko``4%6A-RS~V3iHzc1n@+0wyV%Z;!^o?>W)$ z4Y+2{{v55x|E5w>KA#iN!ERq)DrEQUBA^RIgm{oJZ z(BkSa`Qx;sm%cK-f7sN%Iq;?DzaAiRcR`1Z`2PFp__F`-{1>EotVXw&cIGo_s))xC zC|wREpnER2n&_Q?*+$~TbBh?nAk=~Xt$8GHdt8O)baxY=NQC7X!NyM`^1TRn31a@V zv*praNjsz;W7e69M~raH?)hN}O>kuUy|Hd*o>8|w^oOD}-jIT~Oeo;TbyQr;l``px zURIOzRmPa}Y(_S6>aQ2-?O!mT-v(6~-NP5g4uG-?KR9=CR$0b5)`*e@3!iGc+vy11 zCy|ftS#$d30Y>W!!48Dheqn|yZ7U*x&v{|vN1cbPofBqiBQ(Fb zi*MnbU2ow>G#T8-luXi^wPHgU6mPh8Gx8#Tdw2?2`o>Q%GLzCQ`!4ic7`Ap&GU{3H-o2Y(*D30~eL2ZNkF1!@4BN;Jn422r15>6h-zkd<5SQXOk z$JNJ!r_UZ8+wfr{`LGv3)dM4*K7YWdSg_B zXJ>-fGV}EGqmbR_0eN22uVLp7Tw4ErdKQ{=^=_xP%)aXa5(s-J$=Y;G!FGIc&i;Ix zXw>;~$C4-^2)c^l3@z+Yfw3IfZ56y$;OR&8D1|+r^!Za*Mj~=d)8@G$E3=G15?sKS zDzC41kFA82{OV!No4p_@Yg~!Ee2oQEQbO!RZ~}NP+e`l>J$-L>3mE$rv%**3iE`xF z%R(WpAF)~lXW2{^)Po(f!nr!(%5PNxuo2S;#9YlA+U)K}j;!031%k5pk=Uy{1-dKNkJ=3IUJ zUMEC?#^2DsfT?mS>CA;jW7*>FLZzDXJ^d|dELTU=^DW*VDf@G`alySmHfkQ$ZzKRDy9{n+t{akn^^ zI4pJP&s$&_f43|CXkL&g+7RtV&+p^K3n)0``p9y7pZXl9-M23$+}synq|rXxv;C;( zrUi6IWJisjajLYvt>YiNy!7ewQHei^@prnUWrj4!d!9u$K0C{EIP-qA#*3kzs-KkY z-?Dp*(Q!VgKCxoS5i5~WwpJWStB%VG_YafWfUw9mv1BfZZkKOudkY7}Erko<5hs~g z=u!snCh7v1Mz2T|phr;C+9iQ=={9eE#sq%9eiig3$kD1#^Y~_j8?vQjI$&J0ijDSC z?kV)aDMT!14P;^y=5Bg^iyaoP*xqaJ*a`ZN0-A=t;tw<8>-4ZPs;7fk`)<5Y}@)1t?5yEn_|!lb{l8@ zBKN(O6W)*xlpND{`iBF77E+3(C$>!a)RaY|M>#O(XA~|h`ImhL=!Pec)kf)s9qnT% z9<56R9Ge)DjMagB_)BN5AX}JAMi+jsZicpztoVV*8ca*TqDk@XA$rLxxyC0Y3%3*} z-tofT+4aOyU$*gOgAGxlVZ510kyk0`U2UciMy z0+d9>+Ak$P?yZ#*ADIBgsud1d^Bm;r3lPa-OUnT7W+=SrNv$2M;8jBi zhdZ%Xs*;hjagyht>~Xlth=>+xahSiPOqcRBm>_Fy`MJJk6W62^qN)?22yb&WQN1Wz#P3nGqDeW$vANvSHmq(qZ~(FclEk z(u*4Z1a_1)LFRJ%t%Bk+d5y4Naob86YJTE*RM*p^o|yAT^5pXeMw@@3Z0y6gfZ`%Pp;h1b^o#D6+~Y&ubCYt6`P|6?xB)o!QY&U4H_c!BMuS&pfy2X)Pc!;V zBTshoPFA^3@XYFI$By0>S080389l|i)h2=Jbr_6>AZCa5g&`oER{F{f*`s#`j*FUB zM6~X@LP8%*>&G@J;8rj(Ikf~)zV?vx02`p40G42ATrahgYp%9DF~6F^@iI~)OH)p5 zeDF>u%2QwiT|Arkd86!ZkE_hhlwir6Le;h(DC(7y+BKOV&g!mI%eYy9XhUQ)7H>`( zFlc_=yed(zIPp?*isS7R6~_AB7xvz{19uThQ5^ew%Btl6awnV4Wj>3HbCNzJdsgIB z4Xf*^Oq#WZ;`+FBjh%D}k}xmoxBa``U3sByKO&MAic9GDf*IXlQ}fvWg*%m(ic^U^ zp2M_+$^G3QP74V#e&uB!4B}>240icB9E%{q@VIp?m%~4JR))8!AuMk4Bg*0;7fCykjQn{*zB|ET!NfRQe zUI+1}AXCm^GJpx7U9>NGGKwG8aFj#JT>!9_jJC6gX`W2eo94SO`fa*jNQ4xDvCB-( z$q5slaWlur)iFX+&cV~6mw{aZs;CsbSfZ>$JhTqh76o+G_|(F?K2J3APz`7}VnTiS zqm|ATzhW)Tp*$j5rxUvXy6skF+&3i$29g3+6MK$Em>g?R^EMr2zvWFwy3EvZDsHpe z)B0=mCmoAfKre+tcE2P7#p4NR3Sx2}&G$OxPigyyMem&g?nOCGz{n9-7I~o}6vr7N zQLk8g?Wxo&31@-|Pt6?RQ-hg?b1)ITmBqbEeeoM2>u!*Ku4!+aQw(otiA6eq*b&bE z;#LhPV#~GYe4^pSm|niv>Z0xpfvC36V|1s}p?HD1F#GT)k#K6F9fap+BJ6fRd*yKd z*?mW)T}Xlm0x-|t85x2&YM?#(Nhzt`0_^_yC+DtERqx49q_-0#`-m`fU5e>n(4Zmx za;1+ym@FOOPW=sIxBIIKvk~1X2XdD8XSd&%P&((GT;n8C#Mz0uQ%7pl!o#yy@v+*E!9C-G0cpIGZkWRCB2S( zZpFb9NxA8t>4Z|CtQ4g+;nP;R?OR1Kj~x>E2XEQ<;-TR%2Y8?(67Q}D-qX@;e+VK2 z_$V@`ykq|0i@L7~P@&(1Kz!5bkIh!r>P8#*iY+O`vaY~Nmfss zHPI~zj>R0k__)8Qe?_}K6GHwj8#LpNm}>QsVekU`BzxZ78hKkX12>AOo*DMNT)B$D zI1BprhDhmx4o26E26+%b{N#z8kjX!pSw6CaVtnll%Q!wF(1Cx8`qrSlkL~YG0d7?b zG7%xIvMIPBy%6Os_&fpji#or;VH?)x$w%< z1t5z>7w^R*71a|FFY_|Byz0isDl}uMPfvfU1`1f-_2~;Q2?e&j{cT+y_@}eqpVTob zEC0_?X3>ZEy}}ke1aVX^*?WE8UOI=Mo#lw$S%vvB5`WC#K*tqj3d6w^8Okl=!_-W_ z_8XGDCA7fh)RQ6*Oc+cjTQpQ5RIApYZVeO5wE`h(sJWX{#Q*+pA<@3n+V*{GRBm&0T8VHW)6=TgEx>Q1pco+ms7@!k_4*ZLCx!yab;XIjN{pEN#_bvv4Pv3v zqV0k>2Y#eVUpCj7KuDeUjD9RynT zl#b`--1UZ~Q1$+H5&O|G)2hSnO+u+p;gxxTwOeq1z{r^HMQ=sm$`}W;RX|^p2yEt} zyDS*beoDe@HUWOQTh8r5c!djgfH2fgMoSI$@M&^dtBA?-+pL%{+`;;8!{8S_7OC&9 zU&Luxd4!>jZl>`6tsf#u$=MFOZ+l#| z00JSO3v(?o#crM>doX!Z?`sFixz}c`0wOfC`2eizyACi^{j%~T|8h50+^In86a9aC zW*++lLBGQqC&?I#qp_fKc!eu}0&Z4$+J<^_x`A4kzeg>Tow;^vUhK9^P!@c))qq}z7M9fdq7xX0H0JUKWZ@E5}ZWRBp^#h10{Bn zQwvO!(D7BRvmBtBE`9_>_2wkwR4@>HlmO%oTfU&SOuFGe49_MWn%b}}KDf9j!*gTE z-%z=N4o_hcOWU^aG1(al%6oxEH&x_=$p8Gn!y&;I>5A0Qnz6^p?#esA+!Ke|@QM1N zpaf1I;k+0X9GSF(Dw{=UhGlIEiY%wDp77$0tl`_&+&06(l)BuaoU&#)%8Z7#<0$T$ z?9{}~`~H0P7b$5mrIf2kc%M%;BSEdLA%1uX_YeB>Knh!XF zAAt!&N-14=^YSY*_q`ISImsnv9^O<+nwz9#Wp@_VU;QJSaEbS(~Bu;#h>eHJ3je|2Hc18oPNNt#$Q`7M83vbx-J3Qm7`3lI%HK zoc`Olafi^czIAPO0Zaa#8hqUmH}HnXA4eQxLJ<{1iiPZIup1>OV;aBWv&~s@dD2I_ zopvy`-6$1WaK#HASo)?xEIJ_J}V6m+!zXSXutt3X=B#av6aCDD*H`BiN}IMcufpm!)rY=2?dk+*I_7 zjeN*?3UO&18E1P6wNl%Am{qQjq~45i&A2O@AkGU=s?CGoyxLKPc<2b~nyyiA4I`cg z(=H=hK1djUWy~dKl^yqC^+%Cp;CGfNNqvi5Fg7Q#@_rURXf-I6lkV=K8`{?FNVMe- z$C@=AE#eD#;tEU2Wh<>EVOM0b!N{Mpvc!veUf7aoj<}(!Y5NyQKZUs&o%S5bM!9iA z^~cbBY*kd~N6}}D&Wjv&>s4$|51Cy{sF&+ifMstoJ%GRT5tvubhKt9jRMqW6J~c_L zN+whIwxZk?(qk6_&C8yo<5d<SnJN zB6BTE3F|SI#uFE!lv|Y!2T_eR$22<+&JY`({|J5}tK^!m7_$+c_2`Mr4uu}mHsnf_ zVCUO&qcZTOc2e2x5I;CSLgZp)YT|hE^=d_IUW|JE@=BVlBCV~^K<@)l6{r8`j-t+s zy*h~a+!GK5ph%+OH%??2*lxH0l7^Zl%1JlB0E<`|-wEG-L6)4%>q_o5Wr2nI$LNps z@W$}uHDoo;LB8l0WKU&FO$q?0gyu@gCWqe5B_^Nf^g;Lb{p`-Qquk&7wvD`@5g9o= z6vQ)(6BiQ}In=6pZ%tWiiJZz%ezU0-xxgA|T@~xD6UTN^_)7_+=#9~{Lhnr8j+BJG zGm-u)3Yu_dj7*RtOXy(q*Iks30QDsoKbHdE9qq0xa75Lx^|q5C|34%|InYbF%KV)6 zqh&;CBoY=pvo{~mk_B^*YQffgT#TkIgYsM7Zr@c`zym!*$gxVDP)+}-0-qm_MwWu8 z*#~=ARwwuYC_R9L7ThT87f7n`!sI_n=Ip?F3B))%tI(WeZZ_sFE69n8Dj3EBXlI}H z`qq^3GglY$G@g&mlpg2432I8I6G}NzP#`V2izz$+@B|%RQCf9?=3*7~O?OoUS_*sMC69 zIiI+FB?B%rI0up+(+gHkkdAN4;*VoMD7p_gzKF{176hBRRf~21Pz31SyFs@HQ?4Q4 zU0>`!eTz6QTpPoLhV5GN=W$v8K{A;jq9r32>(^@+Z^8#CDsp>-j&u{?>+YxVLf_Jj zDOLzx)-su4ix7Y>^-I!-X47h|b9w}UmaYK9#{!|#7uryiPC-<~8m>cy_~r9HPU$z% z@R3DS+?iG9?Csu z+Lnoffekz7@#q=3TMKIkhE5paQeZMO)5S6QZ^A5Az@S!;mf+m)23vbc@|>4JMjCV6 zHvsn>Q&7LR@!R^Pc8V7&qDBpjyyX7YL#(7eoNcSW#Km<~H=O1L#q$ntD7o`Y!cDDi*sz1SyQ_`s?K|#+%v4>w zl3z=|n{XR&#^liJZRE7m!HEL7@-O0n>DJ(H;|f!it?8o}NxJsh`LB1L5^wG;Tf^HM zR@Hvva7GYCn{LUTPZkq4e<E6^#uCc4L&ovH=6cm~BGH#q;c1safWM(IZF zU8>3_10C<(#?u=*a5Gl*VLG>wp$aO)fecFKQYTEr#jxpL!vYd~0egtz)QO(skf1Fy zOm1vqN$gbQd z@dE{3i@ex5AFz0~l$#c|s`3P(3V3(RwHYE1nA48~C{xuGfQ-xox@jr2`BRVv(Qz`- zIYoBk2H8j5%E>T}QR5wl?Q#aeRC{cIo^Hb)h;jC?zsY{Kah=d13&1gq0#OC{sZexL zrQ;GQP*xU^^6k_1=1#Xl*&U>hyNjut(@D`oK^;Hg91=S4f6-Id6cW4nR(5~BEUeU? z*I6T(R-*6S1!HxyUNf?hNBn&NM5Z9&1B2WN2%CuX6qh4=H@aGr!4!LmsFon**s zx6pjiFe{kHJjcKPOfYB7S|6xc5cSBw6#NCNU$QB~R^eu2+g0!sb~a`|ZUeVbN&L+> zJS!P}c|k$=(3PD$`|BwjaTGB34L#K&KVWVGtjWuUPJ2&F)zXnQkE1BrMEJujMkUobygAK@h!{>p5ekNF}{g+c0Jucnmcl7lSf$uTAc0?IL-D) zx$XUx39l6T^)SP7A$j=$8~^+HUm1I>Dt%?xrmqyTd_AM2ZsyWhI}$C%B$90pLg`tG zw$6vFVk3fYj4C+A&`qlSeOdVda*)+>@e4t!#pk*&@bYVxlKR4)qq)*cS4w7uxG*@S z>9NFtkV3?FqvX+i zRFhitziJ!xrze0W9{w+4<#2wZPG6kRfhtYwQ?u}p00*$TI=V z9ITi3ZJ%nx4Tk+>IoeCvKwT-M$*!k)0FUYJ`TjHuO%HDKOPn34pSyps1|1A`4~q9Z z3Hrzq-?I1Q!T=>+fvCLi7#d3MpNbavEz3(KWW5?cU47W*LIA+t5Y(*^5GnB&>ylZlKqy#?tz;kz8P6GZwA_)rlzJ#*+1fIDs@vT0Gx7 z#T;g!Qr-Xt_C`6f<2q!DMLybrh}to>*8LM%7t2BTpZN<41YJxv%}t=XR0^N};Q>ZT zx{7-rtC~ut@j^U6K(Xr4>itK6cpSpgW~ZvveE#`;1oRVuZjFQE9l?T8&8s zu9)1kzdG7bj=e*(P}BM}0%=~NRPtZP*~j>ww`eO0DyUg;2Cs3V|JG?`7I0XEt%)>- zi%ewLqf?`#3NlB`(_j!L%H&!I1ZRLr@vL$Ov-nJ1N%~FqD&q;!CuLOw7%FPI?V^a{TMA3y6oDmuV(XiYIE;RfsRB5 zJ$GuILsblv?YlSSs)}#%)fPDclt@i50H4L-`}!ZJIzzm{4QeskXB<0lCJIrt$E{yQ zUvbLVywyg0)Boh`O(0Y)3-DX1T~B_fPwl~yCn;=#;R&4#^r&m7pTVG|e{^$vq-VUD zfR$wrFUOKm@Omt4^Xsx>sf3b=h6D6QU{Px!VI|6y5`o1!$nXG4f$(qhV9I%CqKEz6 zVy^|4$MN5FE`k{*a=-qV{ZQa|NQB{?yyHj zoq-1-u^q`Cr}S!lPvJvS`KfXXi9woSez+DmF;-&$wyT5YG^bs+o(Dh_ zME8?!Ca*cgGs5$fpW>vBV*VAFjX1ujWvK|1r3a}qH1-9Z5LrFd2u_9Qb=Z^SInQi+ zH~HfDOO5ajoxGHD!#1@nDtf3!}vphJ!Zb8cx(8K~v@syd-IK;(o9K1h)>GPk26 z=#cCHqd52_7rG&?ovU^g>#H?79H!JH&vuv==NOU?w=X4ltuO|qxo@c#qBzHRtc%iA zjD?@iExR5wk!S}K2DaR;~l>C zVVU!@`l!H9=4}iXo>zx!X(?ts8ZWP#oKnY1-fdO3)4O5bVnnUt%VFToTa)=GA8p`<+lE{MXg zw3%M<|6*A62<6?qRm+QQoeqQ5%3WwY<=p|k%nQi&f%?{GX9IVf@rUt4<`p{k5DIv? zg#7Ws05u}W;oJ<}Q3{B_vS)`l2tq?lt`{bXkfv$AVRxQTg4MwQF*yYXw-wt>WAxYu z-o$=$Z0ddBD_T$m?y0fU>=Z_i`DiYCVXKzKvE{ttuFL^*82q(rZ8H-9jpG> zk}=3oIfvrmTJRMXtfD21Z96O<*Gz~~{lyn>FK@`X1Q@zLBELDelmHPSjqQo3-P25e zP!&DF*i%%?+35^fFuM>M{7#*ejo4&6uztgt&qxYKP86j?H640YM7j^1ooOxsfUD5m z_tr=`HbrNj&9Splz?)gFU)D;e6>$gDz=F8TWBMe|>G-+f=xzh_IdGB(+YkfLKz5yA zOxgQj_wAP%zJpCzu}BBL@&va+{247Z>%kX!==DUK`U5=_UrCk);bP^hSg;qPp>kfc zR4ZV}91cBpg+`?2vEQV(0ulq#qJEQ9;3Ygu7M+WA9NWFlkR zA~1IowWKFoDPK6HRgpAKo=S;TP zBlWv+^1Pz=CMYrn$t>j(U$nlg;9&)5C=tV!LBIWbbB2#qDhOu8Pe6Jw9mSGKAk}hE zVK6)9-x9s7@wAIFdap|7Nub0RP^%@crZsgO8nRYz#nJ5tyS`k?0@0wkx~;F_CGutq z$s|o?22nSDzUuOF@lM|d=(2_IQnc(;M(p{k=)nQdqqt`y`!xKL_SLnaSPjzG|aQezhsHg&iTu!G~IS`jwt&V#M(A zt-!fFRmp_iF`tA&6m?`mWG0n^FAyQ3r?5WZv8I9)48MC;Qrec2yCOKd0_dY%g>~lb zT;%?Dqs1%7Uc2fTax(s&d4Mq`m%t(b;Di(+T(REkbbUoA$o~iPGneYCk{#Vrtk(_a zRfVl(_xKi?_k!Nh+269PnE4PZM5xKM;5mT$eLOy~e9Em!C9ui9v$bP>biB9$bJBvF zLF~ocb~J;u8x}DSg9%KdSAg1*T`wgynb|R9PA(P2yz*FbVU4~T?wM?Nt;Ffyied*z z;;;^d=cQ1?*?&@{Bd**e6etiRb3NI5P>H|Z^$&`Y1G#1TeRYj!J@QUPQqYi0)c=yE znBZ)>CuaD7Zrak_?!3RUB;RPa%S{w^_x@dl1@^NZQ9X*qGyf zQ~me_J(A9;Wsx|Do0n%v;t&UioH@aS*Cu^gsV%xKXw<0(suK_nMoZZjLXes#biy&- zY-#|Pyj0&iypPmS(Xc;#EvM(qt;53y#G2Rd$ux5p@6P?)DO z{~?gpHiuay`ZU^sb6LYa(@?7uXp7E}-U8HO51}0{T@x2cpl^l^S?dt4SEZ%N<_@sW z$n#%-v+!1I^VpZ#HRaZoWUJd~J%sYmGl6p*&(RNsGs(&UDSs*at(0joEA`Fo0K({O zrNQo8Ta_8tr!wzSKyyF~#)bWzr3);{!;o`Hz=Z;3DENx%DGoG>GIHX`(jkPOL61UZ zWbm6xu+6jG>;Mby#>5+VVbX2)JPA=*`a+9>GC|{P;qZ_u@CjjVl>VE_*1=nzmEJy8 zAT~+|{c}oqIey|avq(AUH-rv8PV7sH4Cc38v9^4QuZ(?8e!%*G(;$l%bq3Bai67-c zOSqodswXZIq+3fK->Eiv$3x4XtEk2r_dRo@e349j40h8r1kBNcopo zwpv$BcYDKLe(T1Y%NQ#S%_6-$C@t(Q;OOh`_K?QEWAG4Yr|wl3yoak|kRXKcJJlge zBD5bU-UXnQ)zPYDqZhJ%=7~pixK!x*!SpfzvZy**W=Zy#8(m{+CYXo@USnQ*cz%mZ zju#uRgje2qxA#%5KfteM>RHBH?9v!vI%Uj!%tm+xdPlKBS zl_;^Dm>Ww8PPY_QM0g7W>r>qN4gwm+DPJwuRpf0(d9uw1grz1!dYMMCvSHiq`ySK;l6TeI^6l_Yt2*z~njD!ul1%OTz@06LJf*S9#~LST zRg?3m;92>$Y;5dZhKPYn+qc;tu`3fSVv5?Ih;p5X28ufVe44|RXbUf13daEQxjpZc z(B(yvLe9Lc>*DwO;!o0_{3W3<3S*Pb8p+ZSHC-Oro8+=(uu^z80pleM%iz`P7agyN zcuD%9$L~@DBWV0Re2jOMwnx?KVat!kYyi1k9k_4&g(ukzIzLN=ubb`|N9+s*^nwPM z=;-wmhNow`u(0UPjgz8C3{J+!PV`gb0vv`g(MAMQEczGsY@tkmWtrYWkgNs=t+PtP zT~rYHAa(~KW26e1TWqTic7%|*Lg?6~4wbtJ!FONP?kA5fOd+8E4vK-_QJlz>(69TG z4k3Akb4z-|(6p8@z32t6xfyJQp2n~)UzX%~**CeMVMA~D?O>#01y(kFWw}nM=`hM?8NqiVOH{uyc=Uqc5s6vlJ_W}@;ycqf2B57U?XEya z$n?m;UdA>rF((}efA9@x-sy@sizz3xnQ{i7dg6Qy%M@)tuJn5SgWe3};_sKi;BbwW z+xqpk`e@=17x|sPnvgsT(3f@i|CTP9uu0mIr}jESsxh(b^4#)jo0!%xNRm7R#>-`^ z=C589WfgT^4jfF|t<@!inxa@#a(Z@PtXlLz{u&JtSD@)8&Tzzb@2*Ssq-omt`p+z| z8X^)=Yl$PGjsx2*<{tfOQ}LSTBkK(-30LwiRD@;NWIbo3~l)(yO5iY%R=(?7QT@J_u1GcY|{E-R8V* zR?y#5tw@2X8Ub@d-IaFZ<)2o!UJ6|F{ue6Eb5~>)rx*PdXEKA*#M**P67uOpTGOC< zMO~A?1kcC8!Ru|)iF2x@onB|jgnRs7RfL9Jcy&&<&bfuy_=nNOt4!@TM)sysk2#VC zTPmvZf9H4R-~0BvonU%^%UWE~xFBcYF&-HdKS5BXXH$25l=2pT5Y2#2Sxn3bkyR7BH z1Hii)ovG>|s6mun*t0C^M7hd91nE;)dCi>FB-9Od)v4l%{p`wsPljyspnmy_iiX%5 zBFs~8+caK2G^C+tK;$QY@;O{}JL;pet^rR|p>gVCmTDt;as^i!_z`yhI(z#3k$fPp ze#72h@2;`u8SFp;`8*=r6`$);ByFkG+_}N(TSFN%d{mIzaYR6A0COywy~(8_5;T_O zwEOjr2{kAh>A0Y?QtnxqCTmRL$UztEDR(Y8e90po6KMM>M+UC+x=rKx zUUnK?9uz8Q-ZWb?is^csAvx#{QJ1J$%#=i(M_6sGAZrZw3M4R)&}k`7*}JcnJ0LYK zA6jr7Do@1OAG2<9g__Tku83y%S;e@AAUEPRygI1oUoyBj*}wlY>qbHr;Uu@3Lrw<- zkrmv9!TcwDgL;++Bm&njpP}C|%iVBH9ipB(Q^2(Noc@t@yDQcr>m&{pelM%RY0q@3 z;Q=?W6Tq2Oe%0>*#x&Toskum!fXoyMJ}k?kP@=9Um&(rjq{9%Eh>_-95Vt&1XXA59HjRH}J0zV)Wi zQ_lS{`>KSIDW7&i^BMOS4~yM0Ujg|b5-y_CP!k!mQeFoz>;qG53XMcmoFqc>A{AZo z9$S#!)7HasNe8}U2e=&M>WvJhxa^ZiJ=a%m)5-cIC}Gv2Nl7;Ck5$Dx&pqyX%`;&KIdt=>NZ%J?qvDb6=&R5Qu*lCFV4{Yel zMka4Na9N=sk^WzOqWufnpagXcep-MHYVG?ndHF8Jj|SqTR@KJ@l{F1Ef!?#v(T8_} z5ez``Dm$ShHvxu;Z9`vlp9)q|mGimpUF+Ad9Qyh!C^{2@PQ^n|E1F<$>-!-o6|Vwq zd;$N99kX7l)mqal@WL4;@IMPD(RDPK1;Zf^raHU5E%rYih8;{5 zKY7oI>0Njz`{t~0GvaYMQ5~rSq9@%I@nJQF#0H;DY^Fg9qfd>eC31PY)q3}nT8b4s7@k=l&|vyKK|z%gqL%( zz#daDcu#K5?qt!?c0djU!c%1bk%e%MJX7D*vI5K{O*_#}vfQo5)>Jf)x7;bBxo6MO zkW=pFggVX^rHAb%&sO1_WVRHK8y`cS31WMnPo~2x?q$k`t}IPmJ+ z*3+{enYF-UC^b+x;`+}+&H4|@93|L_GR4SJnsBI&loH^@F(Qr8(+FySXCy#RStg(g z>d523xS&aT@iy|*50Rz)Yx^G_Uj=vctllmVko zP5UBP-%0l8ZOv4SnGH|JpOqRYwecVFrD+qgFhPO z2#Vc(dL=$zh^O!CL(-q?7J&0c!@0o%M>wy|bSuzD{s-v=$oPim=9fR1_orHu1{%W- z`+wFhQmsT57v(=nR}oHDUJCa~8RKz{Z07No9x``{Ktz-?ZJUgQS)BE4>*^*#qVvVC~amoQ{M`T$|nc!F7f1L9thikE-)+pirjS9S}m* zaKjM<<#6@GYE}T;rDv|4okIxTJEdtLsRjVvTWORqSLd)Snn*NAO`38)I+K*P5Ks-= ztQQ;Uy9J#eY0T`ltx{)BNu_SZCa@6u`lTw7rb*E#U+wOTfipx$ynyVE=}y2Sz-??l zBIe-k?>Lbl-iw3x`Eru{8rk_a&!cGO9|6j+)i1mj3Dh%_knTlKMZJ)V?$LpLGH>U- zS@UUs??Q|HD$4oyQ736~!Qr&t^*aHQkJK(emf>$^#FuxNL)WmnHYnVq3(dqIFP&@JWTh4VUGa~~zuYx~wnE;rWn z&DO4v_2g{RKQq|E|36TGC7~Zq<0bQ5Ld*qu!xyW)n~TW4YXi=s%wkrh+js%gsRok5 z&J&8!;%y^?;FF;$rGOAo1!t$`j3woB?yt8aicj=lpwsoK_-Ft`ZO8cvMY{dj@m1}< z>n<3yXYi`WU$A_8SP=1KR(eKRM_z@y>5|H^f2DjIc68`m$cz$53QhNA1-cI(#{&~q zs3XT@J8*C|r}ndVD}Oqa#_%2SphGW}fsDeSLID4sDG4!&pk{?jM%eaV@$JK*P+VXN z8xz8gC#U_rnb>ke@7MJDWWWIK^QmsfT!MaxyoQX1`(9oHb;E2uhj8<9Gk;vW5$SqA zNuaxiL&KlLIMF-~Obc{&OcXyKtb?vRc640KS~oO%=8_v9N_htHs=O>G4LDMS^y9j; zz5aEB#aRe(fDp5W2I*!czlXer*2&2j8T9@j12AFocj>*UyB=grT(YJYDx@lY!q3A; z^fCuxird3PdH4!6BX8q%(_BKeB$r4|0XG|m`&hAf^PI=-J?~gYZ&7Ku*vOnTRkf zeJ4LRuD7uN)GC3k=H<2+d1GJ4li@`rKoo{yB3}>c1+-6 z^xUGe?1aD?llR&tuE%s1XSjRcjaL7A8Zn8i=``GgtdwRBzkI=a#*##_@jfMbcuIq! zuk)BeO@>4FOj_I_^ezR5m!^D9>szJ!RqP&%oLyS%#fVq;BJ(OFs?G!uRoW?KPo7Q& zMF|G^!Ff5dkhYWq=b*{H|B_^_me>&n13yx<5$8*T(rNb!Vvu2C3g$c#KdH>(_a`@hB5x#7u{22B|itA#G#tV@dSf)r|h1&j-ZOr6u5J9=MyU}GtkT%4*%P^ z^mlryiUvaJ+uC!!xs^iWutGLJH?&zx=rvqfwrtP7HGQ{a_SWVC4#@h5+4yU=5HRN|@ zXCSK2&Z02(&vxJ%-gWZ|xDA?92`h&3h}WmYNlN6ZmMliXz?h7}*9!ngWVgqJzaq)5 zqX35l&afKl3@z_TD3=2|Q~38zI!IsQd1MOU86 zc~C8keGtDl-!cr;2`k!&XNP`|=CK82vcfs0$J%aMI$_}rB70V6Cab^90P=(pMG-XCEU zouXK$UqWkT-@>(w?d^&(EErGKI@|vGWJIr(mFc_}uH#0pon!sPRrrq&vDkzi zJ9|9eoqvXC_4FsrqJr+?^bn4=K`1x3bOD&|?<_A!n~2X(#@L(Z{SxqCl`bq|UEVT( z2Fr8_i<)9Nf}-;3mU!X~J|%F`WWCr@F4J*^go{PIOcO$`8%&^Qs=NtWYJ!0jHtDSw z7)HPqQ&o*=@^_yjoP#2}P0)6JMarG-j)E-942E4(uGH5~nfiW^b2JXmZ>Px$3e2Lt z*i?Y8f5Nw1gD!X2$p+)ZoM7z}KYJnN$V>BE=c~%&q=_Yp8@&KSObHCUa);D6+)S!FT@NAedqD&YwG!&Ng*X?pmM_ zzXoIbF;Wo?*~_4jbSYVr2B8-Np-Fz311xn{T=~Yx3+%SpgjVkRe$7wKGn`5C6)n?& zT=LKx=t1!fKsduRI)dDzWAMqmw=&Mh#%Yj9%PGW_x=qujMLi2gVHq@+uA%PmTIKYt zfQnL7tI$xAzkTwa=o_bumTdGtCh4Zw1`P?>t7~pp{&OfKFN%ZwB>_MmX}eEsAsV&`5;wwdc`%6Pg;B!eU1j?%ko|+Lzk__zM(fe zw)|lEM^NG}tZex+6HSa{Tx9S46^gjqG2GP!?& z508R<$Il8cOqLeRPLC~E25sMZoc1aM69UyF7ou-ewJZG9SmDe)CdXNb;qaLo8i6J} z<1+R3DAX+fF4M1`c69G*ARGw%5VM>q$+jX@yk4wonje);5|8tPgX3A;YW>dJpz;9< z(N+`)#UXCj8GIeP@5MP6DjIIO<3lZV z-)kXsXK}8G%n3u7phX<^3AKxu?^&R=7N=t;#aGz0_yrv{v}4S_NMTD&M^Un&ZF5D+ zp$XAZ#6;BdG4ldtD-7KqU8xjv*A?o&i&|K zd@GvRufWGcW{8@Oa2TqsTVG@fGX>tdN z3AbZ}p|wZ$0BiJA{Kg$}wkvx=U21rFmv2euxKaR#=-3CUVI=Qh5CSRm6tJEl|Fa)+ zU&3u#7um>9mNHNiaHT%-NtwpZ8ucM(4DG*&sVTJXB?TMp1vx`m)P34}bgrz-LQ`EL zT9q--e_A}^+>ONbVze{kOl}!FC2s$PSlmqCYQZO347tM(hbVf^b^=NMlQpi=*(A57 z3LQ_dcjV%qCE3%Cp=CK9@=kY$^pkNiFrcQ3%^wZzf)dtZ+kMJs8(E=4BJ({7Yhc0W zTGAp9;ox1RCv&T&KW8dEDj2*^N$*SC6oJt1#Lw7;K=VCrXWjR71fJ1?Db(z!)+tc|$iiQXOz+dS#!9P^zW%4O3p7;{%#~x|O5Pqlv&H zy7O}s$I&&mkQjiB24GVTmiHUV+>@1Gunmr|&NOa^<66Eshu%rnJbN^OUl z4Id*8;<9G{Sv=)(f*Jp%n!7R+)oH=j31X}3-h;=3)&z3#kJM>(zw>T+g zeSb|R_KvS9I~Ffv7-XG1zT&{qS~3y54M}-9KvO|422xJgGe-K!vhI=LH^DsxGm4 z7oPg)E^2bPR8-epSS-r%Z(@sqj0V^(28_(t{h)I{b^tPOeQ-?efWvCiSO=gy8rqx& zs%qd-XWi7@Fz}w#a7zCbGBM%K1d>24FeaXZ_-;PAyc|NZ)EC@oPMTjU-8iwo!Pm(E zVbK4}VSa8*tFL+QDC#@^Xl(v-d=}TbpIRU%QgKt(2%xzlTuc6H8MJw$cM3_(0w&(g z$gwlrxXx$-U|KUJQfxnGFZu!HNBqfyU!4kvT-%vJP~|qJFQSaqNzY&iwBvGcda}=N zGC4Yyy;1nQ_lJ#@fui{n>xKSZIf@=TdC?~`om3f?5^DcA)AZfFjpS16N8o9ts9beZ zZBVJs1elh`68@V)a%58ZkN>t>v@q1C>$?Tvv?=R!#O$fhHq6|m%LWWds<{_51wn!U z$xKq+7R6b6q00SkJQ2GAnW_Xh4g~ogT<#tJLM=Z4w$(kI)#;4u(iuns2Hz88Gdd^m z1==CzBCCs>?Mrvck-gN%ug)tT1`U2Yxc20vg2ehdkw&mm&pS(!C+N+T+){D46(JiR zk25?N)qeYfi!E402@@vka31L4&sWRlJ~&*11@QUNu{_CYtn_wJ9*RY~nf9-Fp@34S z3|@_JI+!jJKQ+Irajd5bTN$n4(h5!`0)T&1HuwB+Q`wvlRh22BSKuBB5jhXgE$VIP zqx9ne6G?+R45}@BXjf&0*-NUuzqEIRXk&H+9}#b6A6odiS=_>|XfKRC&Aji?n{{cQ zR5RE4eb)?`phvuzQz?poVd01+h&*26psR0O)>ZFj?%r8^s9yO(IoJAH<0&nV!=-_( zYY{bp7;^g|Bd!4keJDFws0p>7V5Fj{+#5e#>Hx%MCQ{xCOPr?vEQ(bXc8?+Ilwquk;? zc|o57LR@vKJEuL5vPv&*HmkC z6~+4jo8Rach=$rt|M`gGnV8}jMmc;1N!UycDv}$7g$N2G{U1fN&qtaoT2#Kk*@l|B z8$BK!UpBERB(>L$C9yRJ@*U#CdnWRxd#~+g96&_rm$(WG=RhXw@HttN`5j@WSkvLk z@Y+Sce;Y-S6sOFleCqFs7LQKS8Dcz7=x$SvJ2PlQd~&F?d#%HJCVWrb>9fFw(!69= zWHrfi%BM$~hqISzi_?PG?*dm>I+Z9W^i)ubAoj=gr~@AhFlFl<9u^)cDg6raHZUDb zZB9GnaWVKipA@;q+J?>ZrwcUh$OvkqPS{-^B5{)wAY0Y1fESTHSQF@oPL)GO>h?rC}q^6gttM57!Rl9(|#-sk`EvoGK9KZ$8*2aqI;4I zv|4n_fPs%NQW)X2L`YzMDkL8z*xbZy3wxH&KIovA9OINSG6}(DU;#)`HD7u1EhJaG z7WAyCt-mIvRq%TEAO^Ce=bscam1hrohg}D{zM2g z6Ap)nO0WbP7IrVK0P23H5!zh;!b@F;pHwMkWp2o=0!zw}E}Q+!`IuBN98bZ!ROvnd zx$H43DY;1RA%cD8vX3axUWN2Obv;QE6Kn&r0qRe{JXTmU5`ntVvFeaUDvu1>ETvmM zgf*9$APv<_28B4L`c?1h^*o%BZlohxq1n3(o3U}I;gR_)zlkGnekJ!iMqB|3K6k6Gr_T@-#q`)!u9N+R$5N0_ zzxCoGRIrsteEpL88zJ`~&;BXKW1rX`I6e%1g#X;#s%S~A0l03+4j-_XFunB=r#Dl6 zB`KkW*&<~+s{@6|O~)Nt}zy?+&c=K2cTs; zlX3(2bD&#Lf;6f>AC~6(KY5n|T=14}6@CikO;fVvRMj8clX~%7w0Gm$$Ylhv)?rpz9>{ix!GG2Ib zNqv&}w>uBn(#;W))8Y10IRzG8tjIR}f0dmVn4#tVZ?{r&MSGCXunI`Jr1Twc`-IxL zyVG4M|KQjs59-P=hQvepwO)iw!V(O)=9`V-BEAD^GOT=&6x)&9FX{X?W;e%PRp_h> zwRnI$=#}gK8nk#2Cur1?)?YMZM;l_Hlcr!J-1F_ud;(KHjtD6VQv4W$YlxJR$C z(g6LR#Oq?PsOpN9@V{*0WN~pcu~zSZ=Ij{?`(g|XxzP{+0Prjt_=Y`$!{HV?X`++Z zq9Y|qS5qbO`edoZ3@0fgDQfekt53C*H{UZrf?t^mk-Y%h1Tq;BaK_W;ql%;=rkN<4 zC>2?QGFfrQ#ZF9>P;Dw~qFNk>uxCu*#-m{@f2^Ag$MTg>+xO7z6FB>ELG(`Ph3|K# zhz62Z930J&qZH!r_N%-|6mLDI4d&L%Gjj8japn9K=~cNnL$^tD7_W^RR+K(zo|SD| z_S=*dz%lVrs@(Mq8CZ(Mdm!0l{JtYwdWBEmxe{u*VHb9w*N$M8>cI+-!{9G%AE3pr zC-%&2(uyz|B=+fB?hLAtZ3cj@nzj=+Wm7O6B^X!sum)J|=b@nGC?Uo6UbtK%+V|!` zxf9KEhHN5NxL%J-*A6Ywf>o3|wVo5j*DN@-)GK+0(vND-Z@AJlN{rGWy?Wu4g zg2itvF3H)|0V<}Hafq)W4v8eeMo~p&B_JVs{ zl~>@nQrd}ljha;~n^vc6O2h}5>m(IdZ&*85Iv_BF$2K=#s#;*op{!Cv|5 zRm00iZl|eB9MNlTNmtq|6-?xyMuVlbK6e(P&!C`* zy#M<7v~w%OoQL7dkjf8wJq@3Hy1Ps_z=4g2nH_7Q$Dk@1~7bApIpgP)u!dPPweSc7J7<2s8@6)fLDUSTZ0w=a8{! zV~JD@+{XaQ;lWx#9TahoP8xn;(f#kLhfl&f$Fv%P)_b;zQ}r$Ui3dDgL&__hTNa<= zS(A?SiB2{msfYQyT<-XyJy4WzA2R2p{R!2BoY0u^4{*7;M1}t81srmx$N#Z9ZG9sr z7cd>2oF|H4lj}8hXONJI?J%bTqv|1&n(Wloe7vkug%MAItw6 zGmb-k!%#4;d;m7OIT|_9T_d3{x=N<92)O}T8%2MgdP%K7jX$`^N*Qt=^E?llel~*c z-PiNRuVsxi{3`T%|0)>fe_rI#ie^wyfIkZkq4zExklqmJQ!);4FKB=6z@LV-z1!(6 zJ(CGpWi{&F9_--e-zl|+7J9J+shfn+QC_7DKyMX*ANOyNLH%&skW_+<0R#k^k&B@~ z@`b7&aEPrOIspRHYx0w~*nP9(DQ8zYh))*T72G+8i{vW8O|?S~bpqVCRF^gAj4(J78D5&^Ct7Wr1GZL>VTo*wMf^BM(q%r#821@0#ruWkQo=v<#9vc#p-i((=r zNOP2muTsk5>P0+C7|saFsTOI3acQfc(XKq@lEzl++tAK%=XgDXv6zCix;fwA=pyR4 z-u>c{1nC-^GKUPG@QBe0za(I)vUv4v(7<&X$f;m7dMnTOuWo6gx|iUskX4(%bpRE% z6zv?sR@CP~yi-p0J1T*&Rn1d#Cm!bKy3y8|-oN=9&}OeXtJ>W?@ycVGh+0aZ9*WVL z{g(?qBu0AgtEk}{^9#dw>_54}(*qR`64O{R7dr9xg-P6pM^&sM%$%KFP+Q^ELbp-C zeoVVcHP10bWE-He%B#RH=QP8_|Gid_c>}@nnsTu;-|;=jv`ovW+eMtcz1ntpN;pE3 zwLcBeUCD4a91s62N{Z;aEid!qp2EGSmMDWi$mc4ahG8beTKdjJf}$M>i%2|NeKj-u zfk-xTmL3tupx|5d^dEKSjEU?G_DkX13jHNv(`}2~rfw}ZsR`t#`2h+P+8KL{{%%qH z@~rpQ>`7~OhpIFw4F{`s-B3@k_=jfpRdUm_boIZ^r#BrFORRCaw?K1yofBEo(1{4n zw)X-_>3FG7&-|8 z@)*3{OgF<~cOG?;)p{x1NACZ@82WX2D_kQ6i-eeKO4BYz=snTA$j zr^$=?=9||ZKv4IE^8`TJ2S^?-d_L)Zv#nL>Hv&*D6`*Shs3L`N9PGJrSLQa`Vh93A z1Uk;aQ?~r^s)G08yG&`;A%9jWFn13Hfq}{CCdO$U|Kk{EvcD?5bK%_*D{C(iCjOKo zk^CFyb_8+zv`oqp886k`w$;mf&@WOZlDkHX5`y3tx$-U&A$K{!NDuS1YT+9EMtW3F z?uqSFSxBjY@$MpnkooD&$1BK5u}$^UH9~nEt;-VMuvS@104tJHIL)5_Nw=_+t*rgl zuCM8o1)J0Kjl<)eHA|zRi;8&TTl%iZW6eBaa?k8%Y^8l1W~gjnV%??wy`hEGCwcu0 zMhKh+FELJmj_Mb+0_@uvBPBDbRD^|41k*Nu@%W84neA|DPALw=1jZV8$(uUt3x-=a zhPs~Ww(2pLxOtf_>Vk4d=mL35)W?%{j0t%(5u9mZ!om(h@_~utKiLe$Kh)Jvk;Qp`RKHsFGqX;$}-6bfiN`afq6vr|UONHtiNPUyF1 z{5SN$*Yxq+Hm;*DK1&C6jpoiAO|@0|SyAcG9w$LJ9QsARM9>PD#;MS+C++&9{WCp1@w@tmwEhwW{@J3TruUCPR-M5ywtQe$9s zbN-3F?6$0Wy7$P`v6yl_Fs?V7xl8xyMrZzkukS<&I7PaR3as0Y3Vl5DK zcIri6ehKtgT2px40zy)@Km0xgP(JIbNhV9JAgm478#FKuC(3E)wa*^*d~w`hHB*ZS zy569--oA`x6f&=qefznDD?7eQMcX}a8*6+PdxvtSX&=c$8|P4Id7dUML7If6@$L80 z<~%|b*32q8)HvT!a8$|ZR_lbF(8KujLW&=e#YkuB24!H5PudjuGelx$-5ihB$vbI(Hzj{YL17=O|lqg{vEbw!^u>5$yB0WnsBAu(x2|Y{*^oX$4gJ+ zESB}6Wgum;@38zis>#x^U&x;w4!orkP*ewSo+HA z+B3Kio(08)6F5xiN&~~c9k$Jv;?pV3%L`!R8r(v-hJ`zBY~7(A7vX(9<@)98ov9$Qj6HrFFvr-Hs=16luF z8vs$l;XY%m!uv5l+-1#tLyG#`-pSFh>?UU$NDSUJO1G-cihm)vL?K{^`1jzR`L%e8 zc~HI|mTwuc#q>ZuKw9Q}dZ3~E7*e_}fMrci>`D|cztmGwyPH;6r&5GT@Ary}`n~6i z_u;v$(4*(7z<2^Wiu)1ML(>Y1Iw9dFuw(_kpNKsY6tren1b4UuLL_0>w^r6r6j7?h zlZ1E*;hY%>KyI*ZqcbauW}Xt4bPp-v^5y3i01SmV-_aH8UFumNS23ln3h!?5_*HMn zF|=wrJq8t`hi=CF#OS>X!?@l*fxN|4RY0k%WT`)UCkqOwXi6NR`wu-B(bn@cxtY^G zmJ;YCb;1#ZI_9Lz#`eB&)fG%_O%t}h zqKmy04qH_PjW>h2)w#pYJP-gdtS867cn<)@J9L5Mn{FOS{HtB@TvJk~T2!pC8osazBAoXwq8v6njoD)hixe>)e1+wl9MKKH76B~M}K&F zzPRGJ2L3upL6%fFuZkcUd#6Q86-7F)WT?K(B&q_FL|hDGC!zO{8GugePBqtKqgcwE z_NZSPd?{snH2@3U+wa#ZnJmgD{0EpPwiGGID_9=v_eoDhgLln(c7Oh~#&o%8RQz_IZFr(B?orGSABTZ_N3@pvjD1hk8q%Ho&xy26rNuC~EMv|>0A8M!;D zBDtFX6#kiFhI`jfmk;IwU5X^4abEr7t^xsbWI~Sx&*Tmq&cyk|5?+C%Yrz2MZ>9zy z*mSGwh4`agE_;BoSD0xxod)HfQNrL)J)bA$!>Ucd2ctnui}@1$v5E)WWVdfPh#W`p zXJS-_9ba%ZIU_Nbjo#w7rzE6Ssmu<(tm^1aZuzs;xVLEJ_71{yvz?7`K=14U z@u>7xQ$f^Tms4C}rcq9Hd&9`bMQ zmw(ZHj-`aE7w2)IS{P7x;FnbLMoS_ioEa$;Ji=Mt`Gz`+3SE$0*7-SDDEh;)Xm)I^-1#%sZ!`AWx2u+n zker~g_E*0BH};`a3tU60{%aH(uu7`Lz$sZI zlY-Bg$27)Adr}!Yu=v40>BM3LzE}L~5?WftI-56^&!c!7tB}UnYpj~vqMK6ov^EPh zl!rOjC3jxBm4Mxj44Ls2b5+mwxhtg+LypL_AN7rpkItGCCJ4t~lt?luG}cOLt*E8 zh;-uHq(HaPrE!yHp9K$lCs`0q0`KC+Wgpw8> z^V)*)1{MKn#--R_cLjN$1cZvkBC8tSarFc64uyJ>8(0l$oX(!M*8sjJ?Qh9#wBUhJ z7JobqJ;z%lgd^EKPQiA>&Dyph-jy|Bv2j1_7?)EBB8<2ZqUM@D9TH;dbp(g3F_MrD z$?$cVgtac7a9*?{wHpY$(m>cGvvFXy+r;CQlzShB#-69*)&u3DZ?iIN$Q5jU`yK)kLhS6%yW)#U$==nz-qWkm7ot0t;gpL%B9yV%cU4#x8A+jQxe~JlQRO_aP zAI(h8`%hdY6*T$Bo(w8ZBSCZ1IXrLfaNmkpNW>IkA?o!Ihw5wP(AL$6Wg~Qcm{AGxd9=ofeFbMB@4de$>d|OiY-a z$!K6cD_rRHtux}pzhV!P_4O#+@!jFSKv2fbwD{dNITyF=$q4xP;sxW@0%_ zuvoOWK~7}mQ%_yB>Sj=L29?yab%WjQw_=kpkYQZgOc*)OMMkdY`fOzvTA&YwZrMWV zRNqA|AZ$Wo5fx0FB)Lmx8AjkC<`FeC@)Ae?vKc}n`*&gPU=1i1tSoylo((XRB^I>CjwmK(4&N*<; zI`}PDmbMKzASFI|S14(_UtF-eV^)Nieo1;KOg(;qPU0*O0@M8FIG)h-N@yL*PrnJX zW{_LcNezNfagawb z+!f2gQyXGB@6Gk;n|^@^ifp=z7Tb`EaN>i}l(c*gGCMOu84-sLZaGWV<1Pk)jZ1s) zfzT$!kQ36?F4JVaP}-BqsTmCU$8;meW%LzdcmDFc(n++=C&3Qd06(Oyt`)k8It8HC z?bT!nG51vO`unjw-5a^_`#$Pd!=bt?M~jOk8pR|dgDTCts(~|nJ_h%V(+lVc(`;XB&F895{lL#PZ_VRjtO0XT#6^uhnj;K&nnK zQ<1Zch{Z+sDOsDh;Ieuj(AY?|r_yMf<+#12)k~4u^a}v#MF#r1I3&OYc?I6p+|rFO zt<=X|m@{px*#ElR{=u*g5ndZ5@*S-GBF0qh*UD^&^HnSm=IqH2oom4%SaKX4br3%E z!lxE)99xc_431Q>*m9o|F~VdkW?dGvP6P^On^CsifBl@pPpnX)%9Cj#-C>LQh zcVJAgm2NO?MknQ-Ea;0cbh`go(wM9AbKw6bq7F|7lvUuaOpJDDJ)sS)>=MM42A?;# z0fJ98J}VkUNhMjlkULD%aEAvM`=-@;E^o)(3RLI;5_jAK1=mHn$Nipqn9afSBlPrW zJK4I%s?@{ZLG5iFkzQPOOVf5>^~9CdjobCTT%AT{{bqR=uOb8jHQ1RR5Sz*;>b@Kx zglUqiTBj#|m5T7wnvwP1+{Adv$mOMe8Fl?%itp@0t>dG-Jupow z&Km&u>-D?xCN!QOj_}}0*|r+uH_<@B8?`14t@*fE^Twiz4{#-sf>A9F)t$EH?l;x` zx!(3u?EANX7fCpqriDI^Lrxpq8Tt5amRLR8%J#}S@b@C zY_oc06KMne~^IRE7Mj}_Z>82v@ z%u4x_7{cm@K;8u(5q^^3$vpL3{{oNQ-3|C*y4R5dWgWYIju&bXq>uiIJ+~k0i|~+7 zaMeEvpe&P1@|#8&|JE->`xm8ff7>t9zHC!3tV@UlQ%#o?tiW1m2P&A5g768=7ZZn4 zD^L}GL=G<&)(>P(o<{WlrnF`aeLF*Po}n@*t>jTE?1~Dr z`;d9Ecf6^7_FnGuR=vwVYRLn+cEk&K#K3kq*%&{x|GwTi0s12u*kYEuxK{a%JU`ji zo^(42X!U*0>VO4(**K?md>cb`j|QV>j;S3rdz-y&`yMk|0UxiK*^mid(7-Hpfcukt zs`xx{Ay#0bwfep!N;(sgs_3FPe*})U%DWoHNnO9`MVR0wj5`Lz zs*r zu^rTL3H=E1W*LOrboThxEQBzLbzhIieV0nvQ)IJ!VByp$KQ=1v(GNm=_ z>pMIY)v1HdgxXRL%`XFS6y3Uv{>T;opYFW;mygighMz-vfYB2z{|tu2G|o|2!&%!T z35JvFg1KEp4tNTyBzCt>#tcEJflLc_0xZpB8`WsUK7YM~KE`yc46@bBtGgLm|JMv6 zREguv?3pUR>qT4?<5O0wQQt8P_L56JPH!pAm1gM7W4W8o#+0Km$Mdn)Qa*Yycj06U zsQo65I%O_sjm^jYZVS-HludlphcsH>gPx*WT=__h2%VY^T|7&V^DwrQsPcRYA7-WU z6p-K2o=p{SjMUXh`LY0>KQ`DTaEASC!P! zZT1^GiWF#|E!J8;wM=27aUn9@W0K7|Kr)f`Q_HUDnb}M*K*hnXm$b<#N9snU3@?++ zz8RQGcoy;&c7)}U@7(qM{n4y3+|$|?zI0RxVjRR{Di{_v@I5^OP?8h{RcqFviI@Jn zYVxTngY|S}E7s_)l~#F3MRs>(#yA7}Xv}6J8by3%|NDwngo=+++M%m=ziUQCo9Wx? z@!7jpB51jVTIqek?P3Fl;lZiTmi9Ywgy%-7{X_&Ad6wf2k5gkavpFZdqm6%aS||aL z*fZfyA^HO}1$F~>2K~+q|Mi7+^cv+z!54^TE{FePsixPP=i38GYWV)fwwnnrNv}P> zp4h29b-xjmXDC^rsZWjqzcojZ5at{v@ewFg3ms5=SJK~yvy2lzcRvx^fA3*}pv{}| zPG^6_x&JPhI1XB&>mB;LMInRhM~HlSHJJypEFn97t!=FB`Cy{KYv-X>gUZd^xZ`6h z4Na5&6zcUNo~89xhsCWVnzoRMe?Vx5KZ(VifP~g49|x{q<m6nP$KlPz{Xh z88G%HoVp*i3e^ujOf)9Xo^f;T^{;FxGdn<3I{2-0?Ck*WJLBP8iEt-JZNet5Y`Bq6 zg?9*`P{99&)(OzKDV;GG;hJ4hnh8d_bw0@E2|5cwrn%P0{|os&Sc0ujA(*;a=MBrQ zE?jxOoO_@#<=&QgX2i`t5I>YC)q;rf(4<2Z1^j`(6N?7ITX3(ERhsj_@h)Y%KY~oGw3>P1iE|jkJZ7MGzsKr#!PtXL zc_G3w@}R9T`&pymCtchFv^P0v-=bE9?r7&#zXU)T)3ho*JXm%rl1+e z&weur zJAc)*&&%Kw_|u4ABZ@rg5L+)5&en@V3}+rdv;fbb^2~mneUw4WY&i@!U4$;$TGpKS zOtIpZpi&@&52c@GoiTBB-YMNqXcE3T3*jTUYT&lQo$$_ z300Xo!J}8!?IG{l$61QwfehZ7j-BWTfo3!ui4yp)ch-1NZl;$P;iZ|EL{|Ax>&+x+ z<6(FO#)X$gsGkuV==fSo3S8R{=&oeCK(>5yau5E2y;!jGel;ml+xe^K{S9NN+(Qwz zFr8Tj1zOR_=yNI$X0#vKC3_fVC5~yr;J7M!zK?Tx8CjdO>7t&W;Ch0+I_pk78FW;= zF7=)=JUNEUjv+L0W>UDLch`*$EAN73K2E-M+EPnih)#v0E&n#BF4QB=T9+k`{&Dci zR&-8>u*pU7=o>HL55g^S=c^JC_md~viS zwYNpUzW;~ZGcO1fS&(zIyPgcFG0OCzXw^JFo<=I}z_T}jv0h1xUT9ltUJBbz`_Zei z+w#nFUV>LIvuYaKsQXjwJr0m=RT?WdfP`8Pd6TF*udV!MKVIy8;_*PJUJY_&J-=#+ zUadrY({6FR;GH%5!$KoQR=;Z!8K{h2w{+rlwvjJ9B|&$vVopDF->1?n6|eFWN7CT_ z8<`*UcMpM(Evdf(HO(iqa@;C=x$2TmVxA!rnU=L%6D-lS5IgM#Yj%xPcv%wdLPZag ziF03WW~A?OpAzi7lH$}!MpOBxZ3~6%2aMC%0ljqbw7-qwMo~(xv~YbgtD#sspY1;H zfXl(_dnCp9^iu^O6**Gp?yHA^IX5`Tgm6Zz-I!sQ5=n}1#$R&eXf@jVZ5c^_eARFI z`3ZvnI(O{A2v`bPXl_FIzD~=9H@BpORfZYd~766LqHs%R6haiLeinn&HO?M{?n@B z-Qbo3pr|4b3EWF=vPng%cV9Vi@Avs8o!!R1(+%WU~SY-hr!3)Hld+Id>wVFVujBihFAh$m4?R$&BT-QA5c$gkMR;b7Gz5 zc@8rwNhCd4GoEoX8MXUAxj?=$lKg!040s?10MIOEFbwi2RzGz0^v>-LqraR>ojxSn zg?TG`8u%5ty)cifzPLaIWHTZP#&W!MqJj(CvdNTrj^INpAQ;Al>qOn;l)3ceQU({c z8gkL1G1}*w-2ua)I1suw=3pK^&9xOXTzy4Pp&qI$Iug32VoQN{>-eD& z>A);jVOC45dLxfC+4e=R7Zt`2sv%`Le-$Urgr`D&lOttVhmf}8>|-L??`-W`;w^k; zeHU4>C(%0#kwnqd)nCE&8{~r63Nhb51fNUQlyePj%wdgTY0!-B)2uJcdcuGiOl)4M zK%VhKJCY1<5NcVAQ+6=C!hwNP*1tu3*+*)wtq>56#VA(1()sY|Q0`ll@N)81Rvyk4 zvq_M_sZDnhLiZ4?o#2?$#04Rg%)g$qHK2$C;7mxzb$x>1S%qElZpJ-W0%r&;_^JR$ zOP@zv(2hiz5XW2Th0sD16T7{f0k9f#EKpJhMkk19VK8m|ZdN2@PReh2BHt@?1iA`* z8hrhCY(4@T!pI_X?af(SZ-jE)xe?<^h6D5{k80EMIz(*aEGTq2RWemRDxo(4&=F2s zQmcgCFMrlsjR5Xc{24oxMg01-N$su%EUPnxs!MJpUhY0}F7}-j4{c$9f1{kOpDgum zNecao&lsx?ip5euY~D#=ZTY%G%s6LM!Rj6=Sjx%HC>&I&O5Q;HnfRhZ3~#j);_ zk=5_4cLDJ$ht~0tRo{PtA>|sq1p7rF?;tKIiq)-mdGjoEU&$&5!w1_~#Ub$!4d@nG znXO1JdFttmcSm^kYu6_Q0WZI$HlY7cMx^H~=Y~4x∈!S5~|Eh1>K|ts8<2bnae?X|T%dA!sX7orMA3Gs3zu!DAJ2|rt#6OcC_%F?x zp$z`Lg4?M0hdBPY>U$A1pGf~BS z@4~7$Dq%&R{`F>>fWJ1xs;Clk2PZ+u zVqZoQMt{#A$e~DQ`6HfvpR+m9uU<<1_YB2%S9B;F>tSkps>R{=$8)TFd^n5do&2vs zMe12(E@dvs0N6tPFt&0l2GK>t=BoXFmzPf_t*8(MCRMS{``o0U`o+4rBZ$|Flt{;O zhwooOOVb$({rfyUD-oBPGPmQ;-iX(6j;g#^brG3wKno7y?=Iiyp}m1gZ-Xqhh?=@5 zGkFT^aXOYF#-Rp-82Sr5z+enRYMt++aOK;*O*T*(j4rU@2x>6V0INGL*e)m~VE{+{ z@;M9^&SRc#j&tF;a@alLbhxp#Nj$A$J0(h;MEe~G8jTHsFm=qg{eCLvLvvaF5O#2c6HrFg$Y&O;ndC zdq|uy*M7D+rI8V_C~OzbIxIBl1F?8vUx}g%iMTo+aYzy?5nfi}|5YDfJxSh$&*QtX zZi^`RX~@41mLwT02BUkTA}sEv;55HXQ7_Ck<%fH*#?ncjgeBpXW|r~v-_23{jW;TE zp_zNmj!u;0$7~&v8qqihkbsb?nlZ`wqpz?9T)sdF<818F3sUPQ;xFi&--s{2Y8=!v z!}z7?hq$WvQ4cleB!FoWO?a-peJ*AX2V2fIH9mmC<)x{PU?qQT2VPvtUuZfB)ZM3G z?NFdO>O)PjvAt;aMe#EfIbrB7vGcQHZU%R7fXF@R=N(}9-mpGf)~t08)j(V0WbNQK zntqtK7sAAd9{fQ*jbbGX-PI91hhpaYJg11GpY7+l;B=|2vscQAaO`SH|CHT)EULWH z{HPL4^;e&Lyxq*HM4KS_P&$7F`NPkkTx0&NQT+dpz?yavjogYVjG}%ZxuP-iXd6t* zkq~)RDZiZC&ql9T65;?p5tp@BztXaSl#5l;tAmQ$dZWEq1U|+Cg^q;P(1r?WNpnss zXJ~I|7R*w+V0VwWnu1% z1m{~_`iPRe@y~5$yNbbkH-Lbz*bU!KqAcwW`r8KWq;j4OZ~L!vWkmve3J3vZ2u+)1 z`3dV}%~gr;>$HF#>Cz#&Q|3u~d^)=TcogTuVzYXkfcSpP~oaIyu8C;4*W(IL) zf21KB*M<>$mA?c3!FUSn39HuSl00`xR$QNt&4s9bHk7-yNIgK%r z<}4qsBpG;df_v}{9b4lx*OEZlP9D1Au5ef7Fs%oJ%DfKFL`tgWlo(X?ZJ`#92y4^1n@|a}kEh@Et%$M-35T;24R}-CugR1z(3$=fWo(Zs=as3N3 zB)0*LS`lFGu3uv3d_aC~_+=#ejW7t)blQNkZK56Ktir*T{5TNoNSpDQA^&9iL1ZD+6cPqL3k2ostjx=W#lm1vqFOnk!n~?)dn>0G36yfI}18s zOPwtp?3M>8b$$rNdoUjWEL4d?-^ca)8Rvi|;aCd;tJ~IA8(v(`E29fQFD>%b%}I$w z9zuRJ=zJ`%z46Xz``6%+_Xwj3z~J=M0u((v%!Y&QUdrYdYH6bag~#n+LB`HP_cxL+yY}2$iYVrao-VO3TrDn}M6me4_sIQQLPxS~jVPZVS1C zs|!;^D)ogCmRA&-%4E5n_kfLqdb|+ zxblxZr}Vj9%lF~wI&|8KM?*{^)t}$Uvzj&QI|oM*$AL?`dj%cb&*nHiaDU4y6_FEeqkB zo35VF^yQNVu%HO&-}MtK{Z>Bq;HPw)y&%uq!hc;1bla57!d(_&_R`u-je(*)0GczE z%9GDDd^>NWNdGP?-nM|VVB&c)4b<_44g(MaXp!(wfPpK;!CuCz3^wVO&-oqiq@kHS zfRcQL*HM$`B1nGpX~3`Di29LlD&8L=!6KR4R7X~zT0qBrQ;q+xuKljYNpAs6$Ueu2 z%ox;_(u8dO2{24dE14@_gD(I6#7r08Fb{*{9APswvAMmPNG*%m%b~_pUldzkw2b}= zhY(0-iC7uO55$6RiGefPR=%pF_gRJyOp6nf0)HfqiFthz!0f%hEpFZrkE3Dst4Y1N zy(+6a*{2?Bv|H*EkQ&buaepz?lbe;`LArAa__hg1MsrXOO@PkJq>A`(Mwcf~FYiRG zB)wRDWB-NwgGBb4b+|pz{)B{(OLvsSBhxsB5pCRUPJ!G}A((x;hm1CKDyR6AhaT&M zOIu&hJJ8SP9rlEN6K%E^)1OkvM;@0B-vS`M3%uUG z)1u6_s?2qRXNYIEP_UY_4q^|5p-XEuB$JkQ?K!-g9-g2T!v^5Mi%ND?2iX%nvZh03 zy79W1?vtI1QA^?-v{BDGHj;$l%1nIvNv)f-Imo^_gA8cm8%!+BhtQ!d1;Mz|S_8^J zpP`v^(K>LzMTZjC3SR!l9$pm$(LgNpKrH4+hJ{h1afz zo{i~+n>zWB-cIh9g9$$21Fo(;Hcl$jc-_2Q3u%Zz7nymaY+z+aY{l?> zngyq^3D4ltkeim*)w)n|K|`u{P4ic3W%4?%rHQj(WWOcVbZIiax~;e2wdVu|U^5IS z^w`b*{cIX=y{HH(f&oD0UFT5eco6C9OnnFmNq}!}Z3J<>Xst3djc>swMb`EZEy0l3 zerPd(3Ei<-qIBT;*`LGI(0|pS5|x?}Pp3V$k1zNSe+AR`&lTV0LVgbXoGMn9MD=?* zG(@;n2CeF((6l7M0WhvY5@+YrYU|_^-yzZMLVR}xj$G{yrQ4#rLd$PO#mdZ7LvrhL z>^@=2*vAN1bvSn;3RBWVMSvRT|3janPqp4Y13fv3eJ@a}Nk|sL$P<9a!MY>ONmbx3 zZkQuX#bFB$JdPciBj9wbO2cBvfECt#rqd%y+G8*qg`~8iQtp?}{Vux5iSqz2=!ks;4s1`4j=ER6dhNc2ZCg(xeU(9k3O ztS(~@c^zjie;1U15q` zwo)8mF8rc}P{?Z`&fajtj&pT3Hf#L3GI28&#nQtqVJ$>#M^vk@w&5ERCxM5D=*~*{(~p?S7eM%}uD2DWg3#rb0qDDw`biiid%4T@#0T2?ygCAq1T*x)SX4)@b1GqTdPg0$ z@ge#3D9A%Yyz(;gl*CuPVPX0i8%{Y%nP&mmFQp8XPs=)UZqn1-O+s}!B!8Q!%Q7?+ zBIM8{y%e=d#`b5tRkW8wL5vxgmT7&>L9)UhLEJ)KJ>%%iZ|KG z6E8J!G~VW(2|JA)LvOomuZy?o6meD`080HS03K*UcFF-sMu=sWLZf=Z9SoA@Origr zz1(~*Ph>AFt>3>yy&fON?$4WKL&Ie4bZ(V-{e^MP2osb;5wN9MnFbs6V*KahnDWc$ zDAbl1=6Xv97gyH?*`=Jj{(XIQVbk|oQZ0QFgj&?ofIt|}HRpp+Ei>uRq-Gkii*Om} zuf${WUBz z$AC%asVdK4*-ckx>VnprAh7i^Oi3LIcV2C1XCw9?!y@tug^Y{59vlA#4I;*`Rrccpc0`7SH z)L%hOHn2QH6ztF%47CqvSRCG{K9HUPzQ5HE+{TRni!dPRMC(+{o*0DI6arE*+i6pG zer5nbJlF}^GXvpbY>ASZI+0EsF{Kx|(aSHJF*0WX^^a{RYt%SoJMvvy0`H>&t)y8 zuNX%!-KGSUX>O+UA}?g`{Z*7fvP6t*T26Pq3Dv$L7p9VO^u^Mv7>L%GF3tEZ>8` zFM>6qVXkY3DTy@Rz{HXnnKcZjpm(6eZK%{sYDr1G@c^^f0%Ts0(b*@_pyKa9zINWL zXe(B3cz`Y5?E`f|(5e|DYAPm+j%w6{Xx*~wUNT<3K6kN}2aw3;lnxw8RD0`l@dna= z;xxr{6(VG}>hf5>(1#PJU&9K_vtGLcYLx}8lc%znB<>-TxQBu-;TKrrK{lN!H2*6w zf?d)UGz%9Y9mme~)>#kSB|c8~>R*N=c`eHfS6pEJH(}QsT(Waa)(P{b| zNqBPOs--{st^|(M%%fy4%wn~E2WS@BEbnXFynU(1kSQfo({jXur13bwx0mq+Jb9vI zT_3~WZS*@|R}M;tdF|n?C>ofEBHCD`7pql%zv7x~9nc%+^U=r*UvZ5)IK?pM_XfMR9fXM#61!t zxxU9#8Kf-^Z!ym}oY9Pl9-G+8^tIvpxYMC#jqCIbpOzq4TPgi5Z_^#IjPBcU8YWRO z5rV+gPAvQeq0$o<{a}7_Y45jSftLRrVv&(?T)QyU9%f9(jMk-%$7#94EZ+uSpAh7@ zg7kUo8dX}LKys^L3A8c$_opMn#i%e_4;%ZNhE0U<<1tAyRCn+6n0zN;6mHG*sMBK3^U z_D_E6!oEI^X|*n2)Tw`0n6s$++8LTzvZ*kqnHmIpY0JTdZO9~G1Gp&1W^2ybKEuKx z%RbWS#M@46m9Pjx0Qof)f1fy^g_>ov>mRPHU*CR(ifqW>hSJ(e$+1e`*Osb|?j09F z`CA?|`}CjSq$?d?^~f=aUl~vG^m{^dqAqySg{2D&&7`n%Xp%6IdD7H1Wf51`dB4a^ zE(-g1@*5Y3LrP>W=5^gl*S&`C_l(G7avlSHmwO3?H(+0aNgi#4>a|hND3TbY|5o-| zU(=s{LO9!47yKwY*@M{Lj8vt0H)2X==VG@8C*~Wl1f3()gNvu4(yLb!Z(TUpyX(2s z{&fBd80J&Ja8k~NP@QUmQZ?pLE%^>Y1CB|$g>VPfs0hO!z7?r72s2sS0%b+s6iE&y z1z43wtQ}sNQYW7OBLX>Um8OU_-xXUA!5BuV8~!IPh@{*JsEM65SOheKQ8D^~l{J?^ z_R`ux}XYxt^DBEdJ(I~bg)N>gnX5D5Gx z4*;KSnXq17wDdf3d4&Y8aG1#Mk=dc9=XXHP3%%kBg2E1Zq5}QHwd>K=4DAy5D%U`V zS=Tz{@5E`{k_8Q}zA?Jdf?05$(c(-%t1xikkQ{`{J04(tmf1x?D}s5^!46Nh+v%m3U<3Dwce< zWZH0E9GF$oSN2)8_C>YN#6Djc?u{_>)sF5r_O)aB@*~+nYp|B~XSxUT>U?c`-PEKHG5jdrPN)*+yRg+lKGHBj zbeKr6ylHi*ssvB|-c>qtE1FFC6x;bkyLZ_~RrPGhn<8TV^sMJ6;&x~-jGHxR@{-@d z#&QV&fU$i_K7u-CpJrOa3v^^n*kaS1xlxF1PvE1&2oFhTUG80F@h*g+Kz3FFgFjF~ zN(ELnw7ho0awsI+L^g}EG}!9jW95^2iL~$#O#4};!NSG~I9Ad$`_Sl@gRp@_83tI~Rm>+ql}nNz~k&W?Y^6kHtX$G7po zYGs$D*kFt&3C;H>lYb^tALj8=ma~%Y88ZgbIKwoW{O!2<6MN9pdYu6@C zmovPuF}C`KR@ZKL45*br%mPf%t&QJ4%@7uPIZ|H8&*M2&O0HqyT-PH*LcQ1ng;IzD z&u>D8OrGbEE>weNgFR#k`#@!-y@EBy1JJl)>397PKK$4B&OOk9rLT=1m4$g)OE*!kI+1+b#%uiE$`QA9}7P4|H#xpl1|(nlaI3w6G7dG0|e zcy2Qy0VtLKQsywWD1UPSDP$J(zH`rFD^hfJW zTD?2`UKnAVl|ganV8pz*lqTON)CB0)>aqZvWZv>#dLMrZv8ei<7?#WmWgnhZc4a~OomI5lcy_VhRj9(2gcfotT; zoVYV-;_;RJSDp&~mo5nJ!d}1soDoa{%d_-L2i)@MH3mCQ7yf$CCrg5hw+Z1^vi~!m z-S>T>pyuXFhVcQ3!?Tn(`h?TqH0Em$h#B2!0OKFbS<^AA8*a}ZmJsAD|F@EFxTce7 zJp-rWs>SQ5HdRDDGoGBnV-9SCaI?AAxD^WwYFfH&B*AzMu1fsH7a3*=`?nPO1vH1s zIwa4KbqawnT7r0?Trzc0AAx3cFBKvr00T)FVVe*ZhU%$4@y_p8+eb;GXf$X zAb5|iWsE!n;6O=p`nCj>hxy*=N$RHMZWYaGac^H=8q8ztNA|I>&&p*S21H@tE1jyiP zs~aQZhUL3+r!^OzN{zuRV!9;xm7dA)!)m1h+OH5ayqMw*AO(s5g*V&Z=jdzFDNRi4%5@Gh z^CDT^;;zx+d~-*q>0c~1lg`SED}1Ky3{>7h^IiCbh$SGO2v?*U>&JDTO@SqHWjcum z<-FJ!C7Ps29pJGSu_^XbMXC5(wPn0^2lq>8K;d1BdK1Jo=_E!|@Gd5WWTB+Z`! z>!K637#h-Q)#tAwg*}+J=;9AM8=Ue?7P`IDp*t7AE7``7m2^UFQHRvexU}c^R?v{m zELTHTHJV00zwRw20j>u2+bZI#W&lW9j)%-~W$0j6KYYO>mMX;=^iaN`djVjH@ks}i zkGCu=JygaI7PMg|d||AKZ&X02R=e)RaFj}=G3E1blmdAC32BJqp&3Gm0;&3e+?>C5-J0C zCjzv7uWAYr$GSFAVC$OvvO&c6ZM=+KiUo!$z3`YE+d{TzD{-cP*|Gfd{GEh>`mZkX zjaPTX{%;y~%6QN89Hp{ab`wkV0aR(YlGE zpOAba*yFqP8X1DZE!NIfqQdtD)A{UY@3=<1)Uw>5#{O< za@C}Z!Uoyf?Hg}_Hs954i=H+$k=i-))(ihPJHyE}CodIR38hN7_8QQ#!FXt~8bJ`I z3fI+|Gc7WnjWol-n8E-weKimUxfI^GG2S$#AWV2se9{95kQt!dN8+&)eGip(Mo23z z0Zu}rVI`8aE)_%FR(+g+-oud?&hxVNFZDD={!PcI(68Ph^>7eRT=vyiA*`5}JW+Qo zL0Gi_v)0~={hOCZ=0byXMtYaC%pGi6D&`G`$ki`dax)RN5oG7r2Jwk@+Mx=@l(Zf*ah>p7@g+AA#PC?l-6y&wV(JSiz? zDb+T3^6!DLHZ`OkD|=Wp+G0-HO@q|V+Ei5UDdw_Rf}$96CS3}56aaaFz6g)m3x+%}4Q; z2=wf8U5bgEq{KFnCPKWLxq8JQN^_8E3w4fOk$d*2lQ90HJd7#!Nys~{dLB^uRDZ1i zy5!Iqe^cJkEIa>#0bBVpBkz^b{_y!~H~{0Vv2p&_GFZ&H29$Eg$3dDt$po6{7^!L` zy5uW6l$Oh}C5b?!v6YOBtZh%f^U@a+d>6E1pgV^+iUv0$I;-r%7qfou-+MecS%<*j zT9K%Hv)&Vpd&y^ev?L2?1+C_8=$f|Ee}QuL0p|Yq*8xfsp4b||cA^NIe#|RO+*=Vd zjt%`A>teP*#2+9cA33R9h+~KPA({fR2)^DHY)7hhdr7*`o$hA6Amq$)bfF#RpNpcC zPE^-~j45Nhw>*i;wVIaJ6^@Uy`J{#f307LTwpab`&+^lAIGQ+LEx`@E??k6Qfmzg>{UN6yNXu$+usqC0^~4 zfjyEbzs-1xXE3#9V3d1VJq$?KOu>$8b^U?<*qsjdze_6JuHbWm)?UO)qT>#5p9d?L zJ9YgWNo~JBW?Z4}bfu+#FreLbE4qCJ_KZVBRIMD{x z^xTWLD~)G)N_5|9ZqvO7#kl>X);ZzV*wEt(lgPn0`}QJFM9Ak4lJwNMS=j|5YGf$= zxGRDN>Vq*m-~~tnE0N%f85l23j3V3v-SRm@q5-?vQi;XlKk*lbFi3`o%@=kTnQH{3 zy3Gtiyc_E4E`}3;lVeVQFI;ynmeN@1cK@lV^+mm81ucrrHy(%y9FW;H+@>#Gp@63qb)c4LqOEklF(oD=-GGcjB zZ|kt92p&E~1g9XaH=1JRC@Sl(4xe|=T7-a>A@aTpe0{<>+ReO^qC z19_o-qpWDkkF1B8#)74AYa=)dMcx5PL~PzE_QnT2qsA#T^;6}ttN7WBa&m^jMa<)W zbD;oGwqf$MFE{x>|HJB1B)n^K3}yn#OzL3F2+KD3C$j`GB~RU$beRkiZQ3Pke9^pd z#t83{m|0bb6HuM}yF}&tQiUz@K(g%<`%oOVli*Ix5d%EXpcLv7?rM{+Q(KaBTbU zFdGdaDGA%PqyGTkb*$EbYUQnVRdI4TwvB&C|I`dJ37-9$VeI7{x9&9HY@?8+xVWGX zlm`eP>`vw_T)6UDcw@fF2m~SkUN_`9WOud2e4CY?2`B$Utus(vFk=h_Eo87fG}&d0x(fj{N&C zusa7sG|=^n(;k-M<5>G-f0I#7U?|nx#dpk1UsGf)dSt=%>Sa?gAo{f9txm0s#8>TI^s9`X-vGQ1Q?9GPr> zAACKcU+6HTTjnbyCVa#k?amSnUwY&EJk~BJn6O&Ma7@t^OAv-bTS+A8y{uRCI-cSt2;Fl$+vOOEy(yRrxf^l5f z<6&`DEv{xE1GF!6yDIssuogYWyjP)Q^Zk$Vwm0a`2{Z~>4J%X88v6sQMjnBCwLg>; zr6ia9tjL}9uCC@LdXFj;XOQ(8nT~FXZgQ1j>EL;2Y=JKfme~GVI}Z)0OiG6)FSipG(4W-IWF|21sTVmR)sS)y)Js^Gs2!op;+|=K<1fV>fmNNH3ia*uCslx>u ziBZH2I>s8xV}yxTVXOJOe^alib8t#n8GC3Lp+J0=XO4SMrrtJ`2Kp#MEi{@>&^QRV zK3YfrdD^KVWj`Pf=hjp!)y?~Uw?F{I{n?{ITki%d0H(z+SCEbJ5}jykRSPv1H6A8-#F@Q-njM?z?>A0>nR!GyqNrR27?Rct${8S1eGrtui} zQPxw&KVH{$5Dt9whP#&1J!ble0P-6*v4Ke}FthyuHfm^G6!ttaKGi$npPxtD$u?D( zgurI4xaunO+Y3lBC`TuUP#J!nrhn*TA__H~h3Xr^ST{qt(>FTHjKbvYS8ji?Jq3qp z3fT?*&=P6$T1A#A;;+sV3H#`NbOE#@^Dm`>+MvJZ=4O&}!tEa$2hIKoF$gWFYL9JNB;o1DeDq<7M zGmqS$hWa4c^I6JlEsX7=@29^X3H8z8j*>*|Ll5w+VfB(O-LKQBDqAbSx8 zyyNlLxZ+@I)IVfJ{q9~^QkF~Wc~ywLH!$A$&3u{Bj@-&~un5Q8DL^ ztGnMww%F}3X=Y#E&Z@R6#7Cdh+ZvnG&E#oGxqS4xe>h)@kIkRMsGdw( z!LE>zmSPv|Z1zC6zeu!|*_BXpIfsSqY2u`P(jPjwx6-Wuk~ zy^}=pUnGIWdI3PM%{%k@-W_A+{tvOw{N4lPUF$8rPV7qipag}W+4YrxFoeF7V>3E4 zD)dDz(5{v$Rv+CB$FxfzDew(tNM+T&>cXzkyMt2SRJ z>lOI&=u!k{@jGT46M_DGXcFBfHP%cmAzFNi*kbXji4>Y%I1AA8twT?hbNwnyh_Iq- z>M{~^mEsWT*cd$e#2P8F*lPUX&&Mr+!jktpxB&O-n}3DcpOZ`euF`8GO5QH%prw9Kq(O$OtiJrsJTYR}Ha!kfp0Ji>sZ4TK zfbpr5r>*p4UEBr1CZA)C-SMZfGbW~=c}z4%P6DSz+1ziI=nBF~_O@@TNQ$Z0bNu9r zCcc@l@xjft{OXZEfg{wL-x}DN!$7Wb&RIP3K8o?VkLuCbR9x~brDclzmL9$8P!Ux^ zkRMHM3>5*p?UE@c^hmy;-+E)X6E;Oq&<*COqq?w=hFog)t7d#o)aDh(AgL5LGzcN= zinU72+EGzwN#x<|gQ#o}hY`9LJB7FPoQnNoo@HPz`m(cGjok)7Qb}ZBvB3BKsAua{ z4CzoruWM6dsOwH_B{oHR{_P^HNF)P2)p>$)POpK0mju;TVuB&yfXtrz4(t-x%r(wf z3b6(pYf(r9dRK;y5bkgiy zG2WL|!_Uold&Ysp#F>%Q&;y92m+ZXMBs_QMo0tVY4f57h3ZUbg4(r0^cA+ReQv&+v zwFw6BS=Xob>FGnAET{gb#zq`KRQB1+bf*Bl1 zF!4Gu6YpV!T{jKbxVbCexDjd=(^66>C$0G4kj_JYZtFG zOgw^PaCz6jmZENowMt#N?FFKI8jj4YCf=R5+B6=UN~Xp*`6Jk z!#0g{>_(DlQ`J`wvF-TR}$NuJR`rYw!Pn|R)85%qV5 z?o!>lQl6aw|<`TQnxYItZHUG|A>OAr*f}58442aC1v@$j0TlI{tP#%YcN7QPi7XUbMU{V=VFx8wh(Eq?@s(~u}T?0L9tOi+J_+% z+i+sJ!;IH|tcaUjGVn7NYtG}n4hhrNk~!3qQ${qv^;aOCCV&bFzT@J+O$eUX>)IyFkA+(X67IprOfR@-VEa=& z8V9O0)kYV>t<3bIj{MpI;V!%`hkl!qz=wTG@Y`K9-a7J}19Q|>E;Iu`g%HDB6Gsa! ze6T~vWQz&tu|;qHWYtEmiAV8FbT7g-Bf-N7g9G5y?x@8P33^k%rH5w(BS`wb$(1VNKc0 z4HhlpyAYIP!jw-cGTdaRPaBk<(MVNn-}{uBq*JqWd=DO0F16S}0C;1&-tir&QckS zgrwAjN(y&rdw6fXbfatk!+|CH@S6v9lJLV{joc>MFkv(#Xv#3J4G6@wpU`Zb7NPiV zOfuKy;`)AT)EsJf1%Bf8yrtI0^TtOkSUP#6PRJk&{&x)ZefuN-rt7KFnG;*PIpVib zgEVD{tWekC@7Nw0XuqjUNCj3AQ7f)}Q(sbvnh!F<^j=ds%DX@byEO)g4TL zu0+Hw|bjlu3c>26e#*h7SWUlD=vQ|74{Y8hj6~2+nvnzhnX3DT@2- zmiZ+nNEc3*P8H(RerggQ_89zP)!^>hN8j^)FY_3&%b_lG-z>{_6N%G&Z^W|S$wW`f zKqVg?YCU4M9Y?2Ygj5oGT?-whF(vgRipftZCH4=dCAv8xnH|?|U58$8P%-zw0Z|@7 z4rd7=Br;u6j2BY*w-m0`5bCA9uW1;T@HL>B3>3smuU0^b|;SOMVs` zGfV%86Wrr8$E10(Asa22goPhSJQ4a5RzXSfR?(7~-LOfBN3WZ057L#cegn2#AaEd* zRecFj?uXe0J+(!B6`5vIyDeN}%z{Y9JhZ*3J71`q9-0-|30)DTb|d(sB+oy28$^03W|i0 ziL=A{pTkPzE~n&rtEPTl-P;AprR`?{b_Jh%7X*6~9WT?X(fQi2(yuq~&IsX|yDP$C z{4Nm7&Q0UH54KWCiP45bc7LIkq92Oz{-Na?sB4`S#Vi*4s+(@lZ{BAj5ySMg`UoE< zeG~E&Jpoy%69iR7J=~7F?*RvkVw5rl_I|u!@rp=XQ#ah*bC?I=dwDiVUp6nQ`!MiV zaoTXa=sy>bMy1Z{;`{rWy> zOdgSS^ElNdye0c!va(o9KLhRFhzS1uoOk9bUlxq`X5+FtMBhQm-h2Uvt03BteloHs zQ)Xl39g+LVFy}r7VvR{qnrs-1oD^3}A94kxRqGDLe*~gSD-n0*^HxOQKv(M;Hq*qD zwm`)99N?ICDJvK+mH;T&}Pxq94pS!)_zt!!Z7WeBD?eY+Z*R zkPY%m)c0wDtKkz}590kSKtK1kbwfMb{PRcRR8T3@U1NrOr?=xiL)5}g(1=k%Eim#^{K8#D(2aa(%a{mRqY?TqoF!$$jj zE+fzT9R=AX!62dwA4*y<=XLD~2n}<4+gVx=w?b{XC-pxGxqW%N?J2S_65>36eV(PU z?B_FQ3m^(qnzFsI%t({?=N_!s(4*NCk{1Xk8(}C@fayqK7Z+V|*!@lMZpq@&JS>&_%X$jx5 znfQ8g`=-LPC#(N@|JDaq*rPQ^m(&=}heVg_5^@E_h`tuFKL5yjeQ`eS4+qZWOr)!i za?%~zeE6xURA*to{d~tt^L!UtzscWe(yd(n(vE^CY9%fd=QCSG9i8oI?|wn4G(;Y9q`EjLd2rqXu07B)-GoXtMw#z8=wETLe;j+ z;bFdVqQ4|{Z~lMI4C!SoDBA=lf!~836vZiu($cvC7v5_K8M-;8tH9LYa1bXosrkg> zPJir=lG=qO?-JsLRSwiQusjwhjXkCCz@sl2e=)B?k!Dj!G$8Kpe-63O+_MvZZeS2D zdqkmjhg`f##`r9vbU)WNL{m;8=$yUh$o;@?j(ya#vvhed?}INjqM2oqz2fQ>P$BPC z8{GP&xSPqF?SwrAI%)?lYa3~vKsn8BZ_v@RFa5uX5e+t6`?f`zriG*%Gi?@Ll8cf- zy=Yy!K0amslDtX^_*y-=zDN{@_Zpe)U`&e`-3wIZh@ubggLk8+9JyhIwh$eUcp!-7^7}cHQ9$%d z%eWtJhhalfx8_|?o=D(|Jr^PyEaj!~H^((4%dlLk7-3Hwfh!_~6AyTM9tU0N4u1YO z%vz=i&5kjP8&_LLQMkuOSLXs`$H6YnSnyhaLmU=x;+s|i+Vzjn0a^Qc(cGVrH8H$nzdNCGR=1|ih&tR_ zlG;alg@xcoN+oV@yQ1B!q&a9oGSj@^otL98<{de3#K3X&?gZh2Jh!MpA_jX?V}e;# zaj{C5Ly?Xv+8$DlgGKoE;905S5S)0uq|ayDay z(<7@!SkbNk2)&_;2=Y#3m~u~G&O#IGCSsG1B_e}?5yzR2Tu2W!-F8^dGF2>yJoQXT z+28&{1&|0gECnEGWx0ArUeF(@Guwj{Sc%t6T#U5&i8xgps z{tv-35H^ro|F}Vv>|C6&#rz5{>z~YrgOg!&ZQg1f+_x+;?fj-wi0HLBh!5x&A0C3w zrpuwE!vE8p$&K76AG{uq<+o%Brm%ah53K79{fcSTP-&&G>&G^Kw zYEGKC#*@zr1y+4Ve?AIsbG^qM7Q3n_>}OdepvE=f#{P(4mQ#~Hz(+f$SCoy~3|^h8 z7w`M35p#{|x;XA6cw7GDnFw@sM_eY-bWQ3421sx4Or)g&OxTTo>5pl6fZPCcx7bwT zRVL4oIsF2Yt+qeY@{tuf9-}#8A!mF=cg@k}6te2|Zw-@2AeavPLrQHX?&OS!8R{e9&ikK9q3yA?K(IVh>-+|T1sAsfT?PUa*}TPLdx zO8~`bBA5BGBeK5h7+pLXupVaR^3Izn?Q)vBz72Pnx zHILwue$0k-h%Fn-v$|27>aDsOB(R$L{mShq$~QX{_RU2-N=-mC!6`rXFYp5Cad zu36EqQI}bZEYf@WVor{*pUt|bNHTQ?-64Ye4P^B-;MQE8jL!18Z=d2K9((2V( z<*A`3rbZ~&M})3P%GSwPGCE6+uT`%;lp#G&S=q3p5qCWyjmms{e-pSB*cWs$@F?=7 zJ;N2tF;XDY!ydE7=#Zk%b)2L<88c(q=b7>Ay->vR9}TTyn8{kS#8Y@Pe&=M+GUo$H zGKFV{soCdh{D6LCZx^7Z{WqW;Y$}X>NNAuLE-C5#Zj2=ah1@xiaH0%;reUC*glk;> zY~x_9%}gbVJU(gZtGmV~c^eMh?j#_cR=7$7akmL-fE7_rk*V-HG5sj?mpvZ45m`6#5FeE&BfU2&wJ>EAGk_S zfOaLjX_w_7XNf2ghCx7wfOFh$6NWFenQs0=NB1FoGVf;V!Z{kK2rr?ET1tDIa=EGT z$l}hI?kwavQ6X{NaQXw;wJH1l!qgss6QoV@H6E;mJ`&MX-9P2zl97T{HCTO^+e=6G zBtSa|C;pM~GJLhnf9;&A9cvF2yjxC^Rw`4xer057b5^Ta|F{H^4ltY`rs)a`40?;r zaB))@127;ifseh94NWtJR`9}vv1hk_g(lkG4fa*|L!Lk}3XxOw`8xMTe*J_6EsXh<-i+@r*U?_)K5X>2?R zHdVO|D=w|7B&wZ9M965Q8&tQ+*TffUV&3gG-T?KxEeZAeb8E>)ssJFLQ z!=0kWGc7gw$Z}6ixAbig{L5U&?(4WLd1xEN3y;6Rm8H9eDY$ue?3_tz_{BwUA(S`K zITYt&l`e5;p8~+JBQb0dMf^x!o|n?}O<@M&YV7b>{eNy0-J@*535j<&RWp_{s0|z> zuCPfLYXdZvFc9m$=zF4%9~93Qa1(9E+dBE$>*UEh?!y!R(CI8dFDKZ zjE5}F)8%P8$=wzV8hwb!0l2 z=mgB9^CB%bTxU!m|3wQ_p#Qkkz2X=y5j^V-HJz4uZE{9tM-}0&8|#lvOB70qp3dyATaBwhk|u(aXI*j&mZwoDR%WDWMd#UeWD8tB z4hYYbbET*!`j@-LZh52 z{@$U&n+i6P}CiiBkPT0NImKU8L!!^1%@ytP!ofRO8%^;nAO|QDlpN7?b2Vthr zKh`!ajJoh{aTIehqjdFH8ExO?B*#42X2?$K*mcS9 zMb8TL7x zcJ{<$^~3fzcP@@)o1E#a(*qBL)E3QD78u?h(U~r%P-D!iA?<*Pe@JN485IJ2D!X8? z{l@oYGU~^?2y_j}Efdhp{IPaRG+g^e0uOEdIQ`@oUHP1?1CcPY-fN;ks;b}06j z;QGOiMVuAA^u3;@Ng_!VHdvS9nA<&0Ngr3V1ACjr_bY^HQEyX}bzAn9wKEVoB~)9Z zX|W@}Kd8X6ZLr}j6pF<*Pw#0sfq%#0GAB*}uo0tD!-=}Yw(P!==%Fc0?-Q4hACT~6 zc;|C7CYJ+QIh(Me({kbF*KJz*w$RgK3qtT-0##s}^CtSm--W3_r3aF<(o}&c2GU7X zirT!Y+PT{Y9Tr?>^S;PB@Bf*QRmubd+3GivY*MK$x`?2ilC)l+@Ei z)N3D534&EY{tis~)8G^J3?mUMJx}1sSGcY3WG|Zbc?z>7qRxawh@@z)Z&Cx+A7Y&9 zMzF#S$`oU!i1$@-Ic|f{WPjbds4}1S?iv?|4vOk`u50aiJACIB;GVo8Luwzq*TqK; zP2$7K2G@&0t)^UEEU&aV4`(H3+vPV|pIIB5bjOyH+iDF!OK;>KSaO$8`sdIg@DEZHZFjF#YH%(2-3*j54XW;PTi}8}cMdgF5l2U;Hd9e~nc>%5Epbh}ynb77t zD7R-mX>-R@6F*!e9O1EUfh5wK0Paylp)xVjk6kDb$OglraY&^+>TKKelAMNl6!Y{a+pW|OKk=ETs zxF9)A0lsC7qq}lx^0-us(FB&wWMB&O$fX9g^lhB_S%1xjoPWd|duU@#v>XIfeszf*8_p%x7$;((SiN;k zAvGY6OIQ=N`<-KF(1U-aHYP)fi#-!Zu8DYhy>z5h9|Rr21u+F-SYc*k=D}%B!c2UT z!i4R1DLJ!-rFi;@(D@R%iI>-VKa4l07(mUt*MhR0&(0o$M*Qg1kkg%zHW7bDm|n~) zc`tn}g!-fN6dlfQ4m~E=svQMI=>2qKTS-)~lKUzT39@7uK|&~yHX`MCA;isKQQZI0 zd`)?gz5h8oNGK1@Z;(Q z;+VErUxM-ZH!R8ByX-oi(4mue_jDYZs7}u~#puI@j=0CTX)IQIx0@_NP5_z9PYmsS zC!qDtx1_OU;y#rGnBpVupS!CU^C9nQlQ7FY)7q zGo~XOHI_EUf&lXnJE^rp8IT2rhviA4cb!dQOXqjh3Ow3`mibR4EQJ(^(qCfXvv^)O z&dS*`{)E$FT7683;!`sn$)(bQn9|{{FB3i@Kq20tlHisj4LWb*K2}}$)0oCV*4=q5 zggZ;81*@-hvVS0tcU>%^H_1XJkb;)}heVuikRRHNLzWAAVS8*xT#x%}?E6s}jI{UH z98u8B?+eHSNi;*cufn=;@tzv{|7OPRUT6BL`GMvGn2=hYaN}W(t~S|7 z^%K9_ltK;$D9T>TF+x&D`PR-JEMDE%M8$!CaT}?2Xi{kD-VufMt?Hj%swc?KLW(l< zh$wj<9JUsx{(^4|Q);>AuxF|;Xs&!rN>n`f4E4+rHG2JH=$A4)ap!NjSowBgTx4bs z3^3!3odImx=w#G4QY_S!t;y$p-ywkZlOtztxHwKk$+^)ZRfy-dD0?I(2>L0|b(GTZ zSI?Jj1YLOtMvS(uI&#PfnG|Ju8d!cJBLQF$bGgL>-7Z3M#=z?!(qFqizTQt1JuZmO z=Rr&MlV=ty$VNn!EJN;EI+l(@0tOXS?y60!L-q%_xww`Y6kA!XgU8zhR7?WrBZ?lW zJu(A;Wjwj;9xP3vve5Rfe>&2!lFXqKM{=;N%}CwbK{P(=d-w7bIUjsIj6%Cc?j+QW zL4g#1o3!9JAZ>++rSY4#3uc;RnV7M=GvkRK6V1a1n-t&@U@js{DJ)NXY5uDkRINNP ztdg1@Z6J+_u`7wBL}=&UO~!vEe$hbg^XeS;<42}s8I0)YD6R}X|G25`JE^zROKh;)FgaazV=YkhySw^IiZjhx(s@o5xFYE6r$ z43DG&c}AZGYsWg;(z`Xo0w74KZS}a^d_{~vi46(sKr77=Ph*XH(2Q&&re-Lz!(b73 zZjI$-i=Elql+a*)t-L)l6K^Xh7O(VM7#`I=dP z8~POl8{@8ZcJ(;0m5Q-K-)wjSJ03>Oh6VxL|N7HLe!Ni!G77+Si$v}gEapnk$-;IN zewMJx!M576JJT|G29*q4?*{TnEvpE<(5O1TpD~#H!^%?2n+RK)TIb`&8T`vmjsD*L za>~hU?i2$lZxshYsw{Ok3xNynS?3{jQ0XnY8&$8*;$9UUi_&1}0+wGW-Ns>C&t6RY zvonjN%G`Q}s1vBi@6STv=qi_LMJ{nBN!u4Tk`Wps72 zPu69jP2L=+ixEexhatEQ?Z6=QkR+P=WWsO1g#^cF^j&+gL9yfatf9!loYkt&xBigT zgaVJSccYFgnv+U`@(>_={sj941im4x)CXoG{ukGL^%%MEBTQG#L{zEjQ1eN&UNZO}Y_$UnQAM(I1i2%2RNo|_Vb^O6etOJsX%r-2Vy^-j zda!~6mWOgPKuIXTAM$9}nasMdy1+cVOh~lLBiP!_J zdhmuM8gkOHs<0bh{D!@earhQnj3z#pjOEw zdmJMGwYz$ij%Zsa|Lf4mc4MOnl~SnLwKy?B$?56a+D=Be*GDa_%7{3es;ghcc!1rl zrOQ4*bKoq(v=`){!!97VD-XVoI*SX9%Q;-Jh~aMJ^|0(6;(z@dv|wEG)Z3Z8bM+tCm+={sSoiLm^&0UdddPj_R?>D7goQDBdc?`rcLf9)3x zT(Hg5#*xy_Lt^pPXujIc=+z%dJ2>J)!?qy?fT$CgA!e1Fd%zbZAHFIoZj_N7_XNM5 z2)xK$n=edDI1Tu;yt-?HS&l@Gf`ybsDN7h7kt$$p7)0yYFd(TE>03x*&z9hO9`V~d z_KOqc|Agu*A{mYlV+mU0fg_^%ZkN|3jzd3J>BKD?(3XZ3@*Z$3!=W@1FYQt4KH!X@ zmT&I;iG#qi2Nm zJknoPtJ~G=v@MRc-%w0|s>086&EK!Qzx6AH##B$thuWF>?XZEEuFZiM72NMH}69WK#82@XeK%eRy`4jp-O_M8o(tU_G zvMzCMK~&mWg%loKcUA|TepxW!$RXo?FWRl#_voT7nMWD+hSYhRm)9q~!AY#Z43V;_ z>|!ZuzJ>R^0(tL8l?wxX(l&q2P~SetQieArVB*##nw^8T83~`#lH!O2yqUK9D`djd z2+5B@b~jdwA-A#3m#th8#RhdTFiPN0l84{Q1L2S@FQ!gum!3R^5_hF1Us;>lrul;T zke-z2-am?OXb>08=E8BIND+nnW*(IwSu;L*T-~nlb!{cu24q`FVUH_N{CEciF~U%; zr+A!l+)@n>W!*5FZQA@42Ft;2XLANcxG0;8=1gGBi=F@9GAM42@NOB-!I+cp+>TDz5{efIj}lKbC!&=rKAo*ZGiFA@S0eoxyhpuz zjj=0FR6Yj)k!SK(bNf#4V!Q|6*o)lyJz0D@kIEgrwv9(ShT%+o2M_t7FkmZd

    i0+*{b)T84h1gcZJh-)7v3rNnh2|&{8U{4hc9B&LP%IJ zhf~tLZ{_1m>{W}am+22k??{Y)%$Xj`mLXt0)e_E;?ZV(%>1BsM>%=eO z;7U*ira}N4k16du=&ImT^3ma>3fyJk0ds?hPWJO0E!wp>=QvmvAjX45%ynoBML(hI z8L06hV1}E`e3YF3iET_DX~w~S{R&_AU?~@U6>qjlmVowMs?&v2Ta#reXyHOzVC^kJ zqoB6PWnlAdPp0@SDBXJCR*Wq zSaJazbR=)4?%w`B{>a~qjY_!JS!zr0e^Nu}RzLtSFl1xm0{{R300032o_KUaAEHXC z1RpHP|LW|BFzLQg;K8Mm&VvCu?r&#hnl~15yy{spQU9oCgyKj))IP_sL~Nvgo9wTI zzyuo4YMS4{PXeWU_b*#5Xenae7%&+9JJvacC4LvSyOWX9&>6N8n$oirvz9W?#8dJs z4vYLjo^+0Vr*KpG+4&N0Asbx0K%PoBKi4!1nl=fxnGHPr`-z!(bO}kN4!m)r7;(Br z>-P+NW@C=KxH}wF_gAA%`fU^Q-Q|>M`}}tQ$_;gIVa&EQo72&s_;-YAlplc$;!n3+ zpYUnQP}nokafAl$Saa`)Fbf?GGRX?3n^Tj(7)(JshGi^7U*l*ugYEU&Tla-xs0SLy z{&nC<`Wd<9&YHR|T6xbs;bH(m1OXBJpe8j+)YJWB0*KGPMxRD+xj!;Nm8uV3av-kDjh`2U$r1pw!quj61D4AYpjWVXKaxi- zE5`&02ZTrgHtsnXI@=G2ckcXII&Spvbi4Y zFfTov)a>l{yq);^=RqEIKGH_CVJS%+HeG8E1FCm+bnAkbNIg8N$f0pCT$Wa~GZc7!pf*r+i3B_mO9p z*pfT}!b9Kg`45IxzSXwkZK2k}=82`_XNiZ;EW61*m z000000Rf(RL^1yWp&fgi5m2KbK5SdhsgB-3{1G zYEwdcOC7pBi9p(eABE5EPibBgxIzpU(aRZAU)zpM`6$r>VAMiqX+`r-&lkwQJY{}8 zEUspa7#Kxry@s|Qf_FAT@>2Lj4SvvVpvlp%_t`~ULJ%;i&Iys!e!Skgz*JW-LUyW+ z$CpHRE*c>(xP6}&29f=GTcXlhMJ*({`iuzap;UP6O124#x1M1xA&6|=mpVNO?$KLh zC)NMpp8;_@ub6u`ZD;f{1yDjR`AQD+T<3dO>=q2$KY{{WE-kZC2bmJ^6NktNlxk7w zK9p53$+(zk^$i?;^%!Zg{;WZdH=;%Wrb8_ug(MSMo4`vNutrShFr2EnC~3OOnD4#j zu#;=u%MZ&Rz09?1)C;PxeYQG~Jr=}gdz^FgwV3Sn3&rWsD6d`SSO;R9h1p;?Iy3GB zMCq==Tw+J=9g}=@ZuSZom*OCGRE`7@iSn6Ya-g(EyDh58B&2QAjm!jAV^$>I=UEDE z#CxM>v=XFd5@1Y7@HkXQzv!Vp;jPiW0t>-njQuh@?#IeqC4Ehec#)E^Q-4!88&YOa zOKwer%@G~g5wE9B-Rv>O5L3PL!OgD}%m{hTF`qb=fmRwvs<{hHI)OYy>}go6Ejjvp zI86)<%cVwDrb`_MIi)jh>=)#>GP)`IBq8qhf&!w< zYx6??5V4R7K75p*yQ{U7mDR^?PtOqw&K}IeS0IT5ziKCn;a=T1IU88?{Z;NRAOoDx zxygs@kinoU)=&)UC?QqR-JWF{v{X}h{+jo;w?B6Z4bcHc&v1Q5b&7R5UOhLcrsE)B znB-8hNQLXr?O@@34)=j^v@CM-;^^>as zOJr33L0=#8Iaw{K88Tp>u3WshLh<4(+5KAtvk^3xNr0|vy%omao7Wx&gL-_Kdsfx} z@@WrJvcV;q&&_~^w47Lv1ML!kQ4J35uGFM+%^3t*DKU$K8Urgv z-shBorx;a_u!F6mtG%^u8CxvU+8RG)z7m@v2ef>Q-=#O~1YOM_gJ*+sR8HF;zC^zc zvynt~(V6W7KNJz*xbJmhE=F^6KHFl1wcAOHXW0003&ntU{* z!3clJ5`Z^qLKI2zH%D)8@W?+#9t>&55UOXQgl1c{yw|Doqpk!w&`>vHY zQL7nIplR7|n*kzctiP~^S^iv+#)QI$qTLgi@fmzn1;~wobrx$aTGsUCmLArwR~yUT}Fp$jnwb_s)dUD$8AA-z5*bY2xck1MsyM$* zO;skK<>h5aWr-ujM2d_S1c(JAe4%eg+FO$&L_Vy+HiVz36PqQ!_~(i z>>ZiEdAJABR-ve{mpJ;*kL|wkkHL)?JxrK2*;%7UGkc!8f+X8f9uLY^G@Tr$(F6{G z%sM(y-rdcPS40MNQZ}SDFl?=ZzcpbsnWD>#ZtYPe35lA(qkb*=k_pTvoViW&kN0K! zra=SP5HOZpeH9{_Pyq$mj>i0y3by&`v7TVx^}|9k>`eruwS7Y@K-^F`BKso@U18Y? zzi?zZ-E86ToYh^rAVQXeVMdQM)PHb^gtbkA%^K#orNG&DZ}sq)FLfdi>7_7+Mm%)f zr0Z#+&KGTMFbnAMF!!KUy9MBd(cq0!2cy3f+9<|zPf39^?FP`m8;Zb=V|gz#m|I|i z$@2C!D$+~XVqOVf9Yw*mrRV<9qoB3W{gY61>s@0GB5akmo1#~GqlJ%8y4 zd5+@2FCoSjG9o?X5X0j^b;ug}FcR$M4r04s?HjUvI@$Tgtk`lqo4u$*_!#CT_8X7aBnMPJCAn2w@JGidR3tF!M(rKZs7e7yFo_4g$j1*yY6**k)?~byVy?_ z7`*ILrQ+$pGWNeZnv-gN2CU}EkNQ3iyq2m|(7WLdC1f`$P#F6YAQ+Dexny4t*^?|n z`uXCF;mCx+gQos{YF&HRw{7y+TKcKL1Uf1MalQL8^s;5bYYY|*V~Y`NANo{9$x4}Z z{tO7!IOUmB+G4QeR~K-Kbj=lKB8dbkiNmk^7Z^)Mt2D~<*oInGNlO$gDvp>r#9kGB zgV=L-_!e`NO~LOd>XQ!R{lai=)o3g_ipphC>=;n>4!Kn#-twKpYTeGP)L%&VsafGN z7cDbBS7}qyPASL@Mg924V>0o$jnBKSx(XS80pSH8q-LEv7981Ew>&`8hfQzp&rj}; zQscC1TzMs;@kE?bElR>+zzyk-Tzq>j(1Ry*U3L~lNL|eqsD$^Jk;R}JX|0~1w1A0J z*2+-$FzF^;9OmfvX-uPYSf8Fuy>^pyR8@SHc0}BUFCi$%`u;A=dELg^a(`4u@qrZr z87jMNSF@ryD+DZzHXvpq)awzasC{9@t2ZUr=Hw?Dnue6-G1w;%j)7{)+2&D7&DFWA z+M>eu8;pYWT~`w6n!tnKpd94=Vx2Zlt_J&O3@82UeHzfa#*gViUBh-@{!HR88b~cg z3T9%7KBk8L9Q^sngXTx?xBHV^m*~o2!E3TCU5!W<;3i1GX#S*#^9|Fw=hv$otl!}e z4i_ykcQ}C98-Mjm{^-QEN?5Mn$sT6kRE}C~Lo_Pu5AB*|aX%Cq6#H^+>8%t zEdxJ%l4xap&l_&8$n=OY89ZH=1D)AEtTeJYcPN;jleZgDUZ3zaj4v?39<{dDlyQeH zE#Hd4LgjYqS+kDcrfm&`Hx&Db+=2tg0FnQ+9b=z6nOYg-I_iuKMkdk(eZ{vRlPB9n zNqab?E^SF%qLa?FN*c-kVMkJ2C&XzSBKW1+@dI0O9%>r@WT~M11{6W%Sm2&J`n6YN z=%-1vQ)zS>{L36U2v45>Tb*YwA8?z>oF2EhHGd*7T$pXJEoC{~MkGd2RB=`Hf`ays zT0dY63h;gOU{~9BKte_)r2Ol+8U5B5$0hXF?UR|&UtR;dl}h}B{2V9?_BeV`CtX8- z6Wc+oJ=p`d)HP8Z0Uy+7yE+NZ<|SpOlbe2g|L(#zLR~x4Lj^rF11xNzqvQ)xOte++ zgv;`%PzGJlgxW%h2Nn>QoHOt)FwO?~&A3{;K=rWY-EzL|S-DjL7TH*T5c6f_$c$LMwM{3KnOsjYd?8#aEo*j0xU{Ea@ zVRODWzeJgRjq4dgPaN{I_Q@Y&W%+Lu#RqU1+@5tgUg>31ekKX2n1*?NQ$Op+6UHc+ z!m|GA#YsnLMT-Oh<{0v55xdE!h1OMW@;S3Uh|(miPIs!FJORj2@O=QFPDyMYb+hg1 zpG7W>>EhmWngw+jEqm#wFUniPMlKR@xv*q_(gpXy;|trO>W{MrH#OrIm1Kq~-nrJ^ zY<7+2Bs-904lg-3wk)gD)HQCM?J-6Y`5UMAw^CbmAtT}VAC*Cn>1?Z=$hLEQ!E-gp z4-IjT!13?TM@J2g?I9eS}QL*o;g`G$uvNp_no#R9@%IAq;XdQ1Pww3=-zJ! zZ5B*(1WqGlJ5Wt)HPX=~!i}Y#q%rxyAMUKQV^~^^Uq$2orb-Op1HgQ-*gaop%!zHy zP?pOU=As$Rs9NZ_)ID_*NN`7B{JR&Y9YVL2N&G!pmVSdnUgV*&9!EGGL`CHuCUKqtTxx3H_O>C#)cMnQ$urFmN_l z%y$7ABq;l2c4^?QT3zE57%s6;PjF{v_+iU0OAU^`w+1Xz0PPl7Gy>iJ|UsqgKlFqjoKWMlkV-*!=`QzBvRFF9ja?n9VDwxzwbMSzXW>n$8&= zD~J)>C$-ZF+>xp{GSQli&h0kax49BvPnDFHHLlOsg@pFw0GUP&WtkuO0V@i~3fztL}37-!g=Z!Bh;xcP!5n}N1 ztu$%{no&&g>$9NPak$5Eih7$)I+4n(5t9-V^1#O3FHcoS5M$*u^@-=ebl})eE77D! z;wUaTV}ugQIa-;Q%0d~?!4sl}YZqg5BwPN2XsSb#_GM32AeRYN3>YlApWJtz6)a_HIFQUd zW%DLy4a%DcKapNZ3f`HA7vv=1p`>xkM)X=a9ln+IL^(%JCvvNT*Qq=y44?;SQ7C}s*fsIrZv!lE!vsn8VIfSkhm~!cdvDIP`q{ESF6B>Axcv;%|5wm`_;p|Mcsy}9 zfxxFX_@ANbUaLa(kp_)r9(3*S+=V(QgmO>!C!BEh5-t{Vw4QejZBQ<&Zfg~?B+to@ z0tZ%5lNz$A^=e9vlTBMuRwKlZ4+Re8lk?)>8tX4m0Q_0%*t>SJ=#FWGbg^`=fyd$I zpAK=E$Nq)ECA=IB`A8lPdc^@MsydZG>BE;Y38vtcO_PHk1oyy4i17Nm-M^_kCgI0E zC+TLb!aIsII8|;7xVXHmw9>ljr4g^=-X{W(I9A5%gnrMF4C<6y7RTDjXk?OyI80?) zq|NLUIWy*_=SlDGJA-F&%l$$M%w-7Tyf=wt0@wB0DT|9r1KEBdbIbd($-*k+{Qf)g z8(HzupQ&S`>g{8ktTR;-mROXDjX;-HVh=jJuU3IHNhd{Q;DlQ>&Lk5^99kRu23x$m zVUUg779`zxH5oaOD|Y}3nYCie=eYrw07wx>YnU?_v;$_cLEHar&v|Gx8N}MRqx}c> z*Ly$=m#zVZ(I4ON;F&hOkcfR2#oSneF+9Krnh2_2+G#EtB8&&L>o|~>RW>YUACd!R zWcTgJL@`HOtQ)^^jz6b66RjN4ZMrI<+Gep7r>7NR$t2h$l%P>qN3M~c+X5i|xY z1-ayhffSpwbYm0WK*ip=mgk4VzJa#KGcZn1VnrQ(%d zN4)?6X%_v;Y^v|G_xo0q4jP>@xf;jug!Yq5cJ~tngP6?KUYb z=c@MyQW}oCoKZriFHvaV8S*y{fYJ2#J)|{bRK8`hzF@bdPA4i}>X=VD59-&88x$tT zzaL1%Qi1&s3&;=9j<{HPoq^dDSvu4Xy`HrTg9c_$6Rbh0GU5|BOduuiC4mZoMUu^+ z!1iQoTx9xC#oGICJ^wP39E0>}bf+9LepxnB7k%X0;tD-tZ|zaKqray*Ib`U!h;c%S zsVcFNG=#T9S?ox9Rde{Br~gZ~PK27^A3FLPug)zg_0Jv0jM+rzsmA=F&WUCjRQ9&H zi@n!;UoCINo)&_M7N<;%nwo&mZ^W(8(q7KU=g?oi);3?CiftL4Tu0>LJPi|7;eaFX zQ`O2grTerk6MJr#%V*e>P5ydRo`2CefG4i2RWEy{6SIlFTv7rZaIzB18bC<72^NhB z@in`N{pQcGEG3Y!tyl{LXwPUt{Z@*^}7*n&M&0Um}=l;Oh2q=Y%D^Z68U^;#@)+(C3Fb`%@fL0yUQ<>w$ztk)>UQm z^t+kyU3Jj!$RwZFm|zHF2GRs;aD?zvH{_PkID`I|NcZs=MSFMnkjQ8!C+Tu#j!{dV ztLKc>a6nWM@uk6aBQup%3(x;X-)5Hh=5ouqKu?#``5wq5lI<+?P99>esFnm@O6e=2 z{kd;HPuZyQPyz&`n`Dakuw5}SWNfOv&qIC>OXvu}7b zg%}N@Iu~Da-{=|&8Z0n>_|llv{xfPQRF3y4T10-r<^NCC2?*q;I>mPVL$Q}ki;c|_ z^EW+dRM1A=%_#ekAd;$WVaoqX9-jbv%)q^1udbLJf>Fr@m^uDp=tYfV?+8>*SX&<3 zNHl9Yo7MuSe_ zlvj1qVUbFIW!e{!%TsNvfd;T^_gdExe@>V3BvXpVsP1U5Wq_PP!W-MP<1n}&o_y|~ zw*fhU307#$$62JO+1GZIQ{CKYT!Ec}NoKHDGpKgjc2^avdK{kLL48N- zgH(0=k2S}NYsHO9F)>ZthCVKUwF+S?5xR1eM6`bd-kmn&Y3cu^_yznZ40+YBs-dCIw_eIaG(kdF{(kb?gQvv*RkR zHMF}NwE@nYyg1>1PH-uqc{jba@nkO-vwbyIGLs$wEqO*%ygvnt6JWYsHiTnIgOhiuuuZEKUC5{PLWU!!s|&s`r!3Nuz%Fy#J=#+dje{e-)tPn+7^orSyfx02*aPuTw+uXz2Oy?< z(15Sw#S&jxWKO$|sSohG&Z zRjUP=w)mk$0(j5X0zchwnqt2wckFcT;+fmqig07esAXQ7TCM`K(edWk16FS)xOwO4 zA*uzfv0?gpSSZYzaC7ko`S}@Ta;M|u9!tw@GlE;ClErqb(Yr76Siwu0kr=+TTa7%aLVvMjHaLw06bv5FR>%JHlq~-$>}8Si2(0?u;t!(0HTIN%n{^0(W7DO$HDE zV7iCY(m;7~Miy7dCBgbnnlXV;ZaK&ns^%T=sjTmu2N3!4+xB+k6MQboBlEh3gvHMk}Ty`5hf=ol8LJ94MSwljX>Ej^2(g|26kOc)Ob< zshVxHWfN-)t?sf%kw59-Z@Z6$ zfqH&ptuEYvk?-&Lt4Ng;5_6(c#WUU+ZhvxR$M;{lX;^E=LreDHuqJ7{e;YB>=v@r` zrN)yJ#*we_a2Nc>m{khan)*^t3ujI?i?|#b<{e8^uJTBvrk3LOhJC71RWJ3T3@d&* zVZq|jF%LzSvWQDK)Xu0>!C8sHx8lLd7H`po*uL_49YfxDsRvz_Q|Smg-6fOYPl{&& z&E;qI@m9j~YL%{%b zK#IQxRgx0ae>TS|&_Aq+P63%;N;YWFgl#h2on=2|pxt$>n<6^e{hnNN=2=chY}5rQ zcy1c+559~1Gt;-xs)UzN`aDLf<%pZDe{&nP0hsUlNsn4FH4kZA#{MvP0IA#l*ISZ| z>NsT-VfB7#`+a4QZZ4qq)8&^a!7$Q`GZfRJpr+<2dV+28cW>?fth)0?jj#~*e93CM zEms zc(pbD38o;dOEAdjAcUH4>ococ>o7c8Z$^s$KxfASy6NYP%L z5jpeY@3&KtwFZIxd;wEF(Yz=>2gIu$lw5`$KeoOqz2S^n)h%NK0u-U0%w>NK5%QEV zIh@AASBaEDb}8}JQ=N`GM4#4&kzep&daSIaV1%pJpqo^X%iK%#47^lSjYC^`x4K^8 z$q2_USKP3Ig=6|Y#ck5ev^LGZYUO5kqh;mlbHIoIqBOzE%GM$;36~4~zF8pwKcIgB z1Y5tYC#$CxTThA&1sMzILognfM*Q_BKS5?5gF4;kw;hexz7HpVgEg*oO76V!>;w-l zCeHka)db=4L8+L8!t54s!Me|>@_?8W-mCy9I6VXFZ71B7y7qNe`_~S25{Cp02S;U9 zj21IK#RX=$qRx!Z7S*GX~xPFExko{5*C0nA=>bZqjE-#gpw zidTr;uhFsd3+B>d_`XZ5z4(n`XA@CBZe-(N%ik!iBf!Z7VyoxP>Fl=NPHWoJ?O@pKR@3s`|#L4I?AX-GK4Nxsf$#~Poc=0)*AmtZS#ymH)Urmw4T?Tdje&Cz{`_98>#o=QMy2f)k1^Z-~LR zD}gP;d?qq0-?aM9PO?zKf4$CD9`oW2BeYlSmPOVY!n}te!IvI_k67yK+zOvAEVE*F zW|m_zZF$1_OiIjC=5qOX&CkQCu(fpc?dio{hWEg%0CZGKL!V#1hd>0jG(jy?yP9EM zyGfoIsg3YBU(p@Au{oOjVl=!N;FK8Lx5D!B>Cl-v=S;ZWv%+LC6fzq%S(yISm5V>< z54{cUc|+L-fTSicGa-uY&5`X!4ohvfz#=a_-Me>!*{q=9LFlDVTm*vO>P2L0o+@hC zbC-2j=j|-;3!u!jd7@inT?Hy`-z{d$!iGK& z<|XDIhorlBEvnbH%wq+BpyXl&eG%=(BCv;hX0sQgi+Zt2{=<=(c2Rde$S2c#;L@M+mgQI&`q-z$zCrUee#@dZEij$hj!! zW9SSbB{V#-69$y`;m<4}32&wunopc5Uu_^ZvHWlE5Y9b90A7zn-s~DRzZ@XylK~Mj z99-zrocWd;V#HI7Q@bb70G*R0j~4=#>5~EyZHXbM7krY20YIEQVj-SzLP9{l@&ld?&-Ih3>FjA1$yAL52;J}o#6t!mb0IRYyY1s zGtEn>L`~%4jAm6(BAL5($bchx{3~v8*epbyd{+(M3L>4Uv)f^H;4phY*R|zKPq)K$^)xrJ->{*w5CwSCMXH>Zt|U=}2{ zkS1&|l;$uopYL{vo_a48>DqAUsGAqh7Z)gltNo zR8lDyef5qmyFwyKi#4ezrv(~=k5gp=63w z6(O&=7HQGQA@dAByLJmo9%-%H%Kzb!abBvoR-!YMr|mq~EI@qvQSFfVBqrLUYx!;J zB(zX@n?3g;!U9D(FQ-O6i&R$R-1-X^aG2L&u~21nn`{JV7~Fr>#VRZ=S%X3bv0O+} zJGku@cwXYnmMvN%u|JAFn^V6;kk^r(@ZtctTkl6^Lp!xi!55+gy_kuj;cc z1Ibx2Y$fZRpU>SAc2fJp8uhub$GA(@sC1cWLaB!LKUF|jJV2QLIp_kprNRPCdtD!) zM!{0ENnPSxJTrAcHrXlP?{EoBf0qpOpz0j)d5n|=V*97@nrijZiyox_ zFfe3e7!d#f00001L7JUC69ji2AEZE3USO6PR|nFFiH!EIF&01raOQiVzej3D8LWTo|%0-AhdU0(bBZX52baZN#ffs&2(9oU2UbS$zpR1&RKICb-? z;$GnyFflP#Zsl;pYr3e>MNRZPB*z0BUr@mFws}*JXwcI;zfU+5w+3qrC!r-06|Ucq(A)> zTj|UmHX^+@SL+hJg}Ei;lQ2Ah%x@7o0E^c(@TPzry?Sw~ zHF5Vyr&Ei;y2BHAz7o%n5DNdo0OszDnW}$D8a_ysG!b-VA-P_Cx-@*^{`_!(h^E13 zA19S$t2U}zR*;&BAdPI^V+?1*S}viwK&7yW~Yf}I!gzo%jW#e?e6^b$?Q+0 z7a#Ax(4m*TxxpJ@d3+R)sWBp)yW;*{y^1UnO(nT@Pz|}#3gP>|yeroN;yu$*qR$=LuL4+H19)~KHFV#t6%kHiJvAM1Yl(YXZg6wd(4yg8}J!( zjfj+yP?A}5HR#Y@lIPmmaVT@s7Opx{*bFP`(C|qtS(h~c1^K+{?f_xlr@@B}x4Z<^ z2Ip(DTq%iH(dvvyS~JS2aeF*xxl@J~Qq|(#%1;jyBmhu~Uz7~X_KK1}?R%5Yk;3Nd zAq}NIj6mQ(K;x)|k26a=ixifbs_Pj8nj-^%B0O1QN|`5wn#coOfSB$wy~%a9dx>uYf+PP7={FfL z#kwE*Ng*GD7H5r-12L=9%4FFe;dF5Bbzgs}{uQTMygA{zxhzboUJShRZ&*FtwW(`) zlKwa`YvuB>TYxgAm8NLzqXU&TZXpyAF`HY7qV?R>&SKP94A!f zwFN-ryaPJ*J|9)&xm9-N;DO>qI!~+fLGlH_W!#vZWX}8vf<7mo@w-v8+%b)A;JpwW z+e3bWpfojNG!sr4bq4pM3NG+wDbY%}!==M^Ing11iVORu$tfnFy$#&vucm3}uMNvL zH$sUe00=q4jwO~OZ!mpW2I1#Ib?{hOQyIP&m8KqqOUlh}aN93_*!M6|#_0=Wq0Yz= z2M=fVYQI{Q<2KsFj;bS}1tIDE6P_k$Gg2aYqu$i}a_Q#ualI)6mPN*PKVeHB49~FA zY{VZN)P)@A?35pl})VYpvDX7CwRz3{;WdJY8)1!NT`*6-6$m1^JN(FRyPIb{D)As zX*7P^49D$qTC`FsCs<%v|u9Vfachj8Pz z!j;;xz7Z}K|I4L1g@Q*dG@TxY80pA`|xh0td%QJm|1G8uRh+)2A`j( zrtjB?Cg%#MpLVQ*BbCT9h+p{*KgWucKm&e4Y+AD5_YDFeMAfFW>pfQ*^0U9|@8w3@gJ(5^ z?*#3Qq8ryt{7SDq&2zWCka%c4oUOyZ+TB%w6UJ{bgS&54x9~d%0n<~pHn&48iNQ=w z*E4g%fS7%K=O3}sO5~<;{K~dM@y$yWokTlDCfTH&xML&wU>Fqi*)g(!{*cLqy&yJc zHHX@)RELp8-d1GHw`{F912#y6V<#0qztu?(Co?VU|*^w|WD0IbjVW zv8|ywPj)07fWxLv^apRZyxeH6)>t(#MeS<9lCjX20GM>*^2wj`c_muHw@TLc7=eCm z^ScH=9JEz$Nl)(2XydnOYW_UMR^1^QiKav4khoN2jV z0-5r{7GRZyltE_4|I)EI^8P{f@r(Pbn*ag)^NGK0rTpO`9{~Q@_Du(Mj?Jff@s3(( zRJ|g@CJX*^^k{i-MwY>qb=ZX=(l^ZrpTPgb>No`G12FAxJvkSPFYRwLmTAyNm&7fu z>uFBMD|y{i4fKVi`!1g@X<^^*stCntxv=%^kalzLBe3VlQVyor1kJKOmvna7!w3BF zxHmp1qa`r6=uRvNMF;lK5k6$ID@m&DXv;j%8XvI%9Vy9-<%ETmv9ORfoNPcwP+@gn zzC-e)lcTZBhW*%Md|2e)jLIiCKwNb~`l7P)j>x;y-;&RN zOu6CE-i8zPtlzOzAA#d+Ge#wtwW4i?lp>9unF%>sQa_Uo?S#{hpV`870MCbPJPfI& zpIbkM+x-Y{6kt{gE^AXv%17IaifrW%|9_cx@d>xMM3FR^2EKi`LonO};ApLuB3%My(9;s+5|*# z_x1qN)+bYXikU$6GjG&@664T4Up@Xl;16`{yr^%#{QCYA&c+JO<#2pfLsXCO?q)xEUjMx5h*BSd$-*NfszlaLfUQlD=45ynMzGpN;89yoaflfaEXcakR?fVaeakj!^-5#s2LWq6 zjd?9CbXWEXe`|6}hVIEU-Q%H!h`0|l%Y9*EbfX|p-N?zgn2h0>7F6f?vAFXu@cwvWE+ z5hnu&H?zU>;eQAl@ZvO74sS+%aSEh4zsH)$LT(I!yog7d#3A&1FkVh$<5$A*3iCm8XpDD%cY*%v!S#DgtoRqx(HPd;KbJDpivkv^TLP<6kr@=-_E|C>GZjL4ZpI8sBV=BEUgZK`g^ zsg!oOFZyD6_*s6`+6b?mxPs}6n(^bXiAbaG8uAgVX*ue@^QtZ8$&Bzu7-&D)R_wTX z%>Y6fc~Ga+dHV4SIO#=P_F-l?eI+c>D{zt7LtGNY`>AIA#|2-w#>2i`V2<}P^#K7` zi?2*Fijh6kH%?^DJ{h|Z2AaHa#l(1`p0)ZPQaF0_+n~DH-VwJbUjw8Bm)B4z=*B>E z>hPk@ij&!A*CxI(paovY1wrf}7}@3J`Qu=yQRL??34AI(57vf~ECkfU{u4P~54c4% zMSzXGC@mrIQFx>cTk1dC-&4jPsApc|x5`kH{wn?4^Qqw?s?5|2xbjHrEdeKqBKs?>)8V-xd>0Vekg7^j){I`O)SjlK3M(3Yr4GJdFCK?ci*O*vN4u_-YRZd9$!m`Buuo;p~A-&Bx* znp~v+QLABiR5Yq_QXr5g$}4Vj&bk7PG4FUMA)LHY;g@Q`0vceSX4*|!fMg~*xo(qG ztwZw1#nn}Q*EQHq&ln|{3IJrSz5gSIIDiaTSY5CvzJyuvp-2%imAgs0) z{G$;$a`viAsV$w`brFJl4_KOW9jKwt0Z~5xaQY-fAF|U$ddmfK#9SnhM>m^C8cv!H zHqgaf)~}xfjhWB&Qr==`_m-`9`o=Z6?8w!0Y8s_Rt+-;euk4<+q1HdVdq+BHJ+n(Y+~V9OY*YH0PE{r z_r{49DX-bm{j-8OI!=ujLjxT4Y=oNovfCQNhM1erVy)knF10m`!c3EpR1fC-UP8M2 z2!p>o;}|259^BM@DdTuaym~`NFfDTvKyGj8-=wBjri?_GmN1gPHMNF2eeCf!`_jbr z+`$o@sKl~b0b$C>n-9vsjC*!F*+PIg{eC}DQj z!=<4`eIsx!7L~8|(H0m^-=g99?h!@XVYQ@w6_NXMuD+M4FhLJpG=DEXModle%|{1EVOyE)1_*W@BXm1cn@> zdPDG5xZluFLS5=aM#X?^X?EQM_5*~@)^?;@oUXx7G&Y)zEIR*KS01)MOLVaRk9^I5 z&UweRTITD!HY=ppcXhcB8DgoZj-h8Kz`hjctxiLx5CT5nfX z3$>KkLSn*I7)QNC$~b|-i^8b8XkM|xOa$am>sR6}{rNqy_#-IT(3N8Ba&GXvuB+Y=$b%)Ty|Z9h_DzeUI%Q{1s|;i%X^w`ID9HKmupm^ z1gl#0E8B$r_!#}rOsI2z*%Ol{D{hdMv;hdC1(`hxpJ1oW_W&y06P_-&H6VMWFiT^|rUPRr=0@n`Ld#TRh)`yGQHLTRxsnLaaL ze+Wv!LfsMpI4}w$n@DtF8v!+^n76x!(f*D&gzi2J+n<& z-LvjUIPXsS%gCdl`1|y?L^G%eE^&SL50q0 zA6BF^f#v^5d;Md)L4mWnO_!H)8OI!FdX*6|hgI$!6^&40o$&bxKvnn}Z1usCso!$2%utC`y9n%DlI;9!Wdt(u|yAR>T zo^9U4pAP$ugeg*F^mKOjsp>f-|LnliK73ALlveko^38FM{{?9M+ztmv-(qQ`_y*Ih z!MNSaN}m(nI$FUv&=x+(%ubrEvp{$N-3}O7F8_LUI9!Zsp(m_|P^`cQ?i6Y&43$3; zXN#IZsJZ&yY0b@7@ET)l(5>MCxnfXAdyqhN>$WRDQLw(X6A)moSuh>7utl}##!a3j zt}?jN1P@Y1G$q(uy(mR?{?9)3MgG-&0M}0t`iGLNA$Ec0c^jZi zQiHz&35>On2R9-^m@!;HJi3%cv-hb*m~fEKE0-wW#^GyY^~c@Odi+&2ObWAnb>NS1 z^%J^U;-UqyY4QQc&m`#p?jfv|dc~MXu*u9j;#J9k@uApJ(yAinv7rXqni8P>DLX_g zOi!BLzt#7SwaA#O<&ps%5#-r=17dw4f0!i4M?=@mUe&q;gAEwF<7gge)3t-}?9<35 z0k2rbv^}sxq9}9`e7xS1(f~?wY3h60a)||7&-`Npcs}nB^(6MXk7Miuv7J%R{DFP$%5)p_#?w?MVvvAY z1Uj7mNx}dnCIZ-a!Ig-a)o^1!--yem_{8M)s*>wL1>h0!W}^W7CGMb!bEW7Nn5&Qq zqlm>QKkPYWeWk%+pF@f?o8W}RSK?pxo0QaDYVftW)Ey4wLsCg%%1MUmms1?v8sjvd zAh|Z^MfSg-*sdz%xQW?Tn0|3E_+mvj<3kW-d}MHHs_q=xYY(Yb`IVqr(Gncf1|NZ} zOvhnjJB}P=Hz=iQUPngXSsXJFE_;%b>~@)jSYr6*d2=QjNj&7fGZ~`)0J8w^GwUWc%{rj~fw6I-Sp< zkG$2;o@v@Tg?|!vK^jb)GMTHk89U4iu8NAI5I^MJAvJrUJe$E5j1xOKzXMHtggq5( zOrR}svmR3Yn0!0_oI!2JCir*k2@Sp%xfn8gg};>w0mc9;z}eT$pqTb@^;7`F zjl{&EzqBn%W!A}&uF|$p_BbY{N_f?~rrIDA!bDoo0?CCA(=X!VIK6<#kxG~j(iri6 zs?Y|ho2xtZ`>65D2evDBy+?=w82moxXy{?>CaqsW#=$LCXa#cJ#@pf-yx@-0i{ z(hC*dSGY^yQW+Rq$}O6+=4<-&DMso`vqC~I!gwl!)mF6v;U!6ClA-dTxb+9Lz!c|h z^&pRwNpqd*q=h0bDS!308_|g-l(ArZ1~6UNHw*=2oxOvJufavCXL}0`dk8_ObQ?*I zL@CQTY9Ts!7H>1mX%ksdCd4ii{siIg`ak^0?I(Y8%xs98OIX@YNVv>)I6abk6lW0} zqQaO?$WGyku|}e=yF^rm08|uAwNi`F>yF>Up|`saK&)MIq)kp0khB)4f;XY5&c?QD zgK+@kZ<{(@mt^k2hI9#A6z{N#g1@wrPkp=RxvL&1GwraYTlR1mu31}0(yrukAMISe zZ}zg#UcY?iGxR&9{EjZ>(e2LWfxP-KJ~%DGgMQPu$LYJBd8U+!`&AlgnO8X)MP|fx zkHNnr>{lgfjfPGWZ!z$tFEcT${iP~2F9m;~E*(D|#$g*s1*ebfwd9YEJZjuzAYX}* zhF@_*J{a<@kaLU)+-yN}cwVLzg=uq;$)h|jv=Beqi@zhhcg$(VEzBR;#t0qGSh$t@H5p%jHO4*#yi5<&jkaS&i{kx z`ZEOTklVDA^G#&D7AF7SlHFa4Q&?3Op!b-E!IB}ylwJA$po+lG12tb?^9$U1n4f`; z7KrG_v2aBCaleG+8j7?z^)QM4y8vkDWD`l@e6_dE@SZJ@rzj6JF51(gda<7QsqLr5 z?!vdc-uD3vf-R+SvxlwGw& zPmIB^tPMa%c!Im68Y`L-;Id&h+g=Ao=2$wD&Of{QM}e2kq3-PxwTED?5CAYRWMkn200000009A>-b6M3AHFLFOYtkY@@ZCyDk!~$ z??dHdl2B(sjL=H|utwgW9o#hp&UU$_hrn4sMp^O1$Dm=HX{f5zB}K2@k-%bbk}2seq{_ebHXR7C-ucH)NBMuE+6>UAp;$q zz0d5u>O->AcphIcftno?XHx}IJnxxT*mnr;@7zcWWD=1z3#19|NIYfGTnudG925no zY}@O_ka+7ccXN1(DtbDKO0{haXoB8p&6tZ%SH!C4dCso<-3;n+_B*3&_}A2ZmURw# ziNrg6z4Qccsxfu5-wsN&k;Fa2-?c#CQ?^Y+2v_MNcxcMFXe|?p&~kXHc45@G!tp9VYNyprtYY53c0N)%wqA_L#$ZtK0IXPD z4x@q0)E@p>-->|9SWl0e6BYf>;3}KPi=&L#61a1;CKM%vEHzw2$o}Y24v(w>NfqWX zIw}oIq!2T0P&TwuQE&;#6z`xupC3X2wP7DU(c^rxmvAX$vM5+GHm=^Zlf2v0*vgH% z#csRjU7#f|!<9rm9b9A*%{5xH4P)j*l+w#i(9TF+g+P%=R@4l#7_;m%__(RNI3g$I zi94SHposgl*UTkQIK7$3&K1U6N-@qh$v#;J$xY#Q~lRX!m_m>8jESQNTo zPW<`yy@pxNZiFC&{|@yh?C#nL8#V?=RIbMFYd3RtkkSKS~M zLyyne7+HIH8Z$!Gw|nULE(oc^Na=)zRc!a3;Gxeb8F4OZrn;_pO+hD|L>S5r3z193Wyw?iF{@(C8cHHm|G#*&fht(cgIp-X zxUF}^u|dHbIKKx{HS<+~si?!lI(-EfEqV6z%#|f zwaMDR*HeJ(6Mzo&j2(>W`he9~CrUiZPviH&(~kRb8WCTQuFgEgV_CHmIc}?5B7iOQ ziwj8m8X{$6=&UY zN12Vdd#%VpX9RVIdX8=pmK6JfM+Ev*4=`@ zs@03^@P2MkBV=4PAe5P~h8UqOtlH-BHH;&O$~}fRUSwa?k+=7fKF$2ydNw`L0jiQI1U?SnaMa!70lx^AW|P=`HkyJq5#vJ+wc=`h6h9 zi=45W5}La7n|H%wBJ<)CelGMiJsWRO?_gBvb_hzlm07fcg4g6|CYKL>FTdM6QXC*u z%i48@!~x`?mdQ~_rJa&7;)$31HjpHBbiZbM9m?qWdAt;6_@2u}bOA|WFSTBTNi4b5 z_!TfL_Razs4P(_Bc{E5UMAJv66N)`}#tAI0QlX@S1Cwj8{sW>-#xJ_C`H}^~5_hiQ zvB?uRau{QGj`w3ntIP32Mpo>XymO?ApnOqKEa|J_ejD3pOt%j?KX^G{F(4bC{CUP( z3_dWYAb1+!p2(A{9kB2Ut=$d4TJ>XifI00SckVy^oyL6{h6^iGFD8yw{UQ2u1XLND zmAVCF$dYN23ohP5;KM%&$FJRC((G&}2uBXX!F;!Pyb_U8cVnBG#ZfGy*M$T29_PA_ zE*M>ao)f$gB0J@3sMjF|;P`UMPXu$l)oo%OK!Z-J#L^33=DN$PlG>xT<^pG;6@}R5 z=O+=s5S)ozV5f`l{{PK2+DQTP*UzHX+UkEh+{B4&j7b?D+rrHQMNH#@b&XMLryO)^ z(qXO#-ZuQMwSiUsVykZ_e&v7(NBdSMgT|+KMdcI|#!8{Rv&b9@jciBwr6|7OIRmL;;lwV5e}n(A9lWyA}>A>2v(AMfOTQpkNA~< zxi{D_Rbz_i^r}1N3YcN6{fb#c9f2ZgC;vI7?EvA4g0!DahON=SIrYcPw4Q_ zm^;skS!>ZohWoDc!2y+l`xo@}cb->EEqX7gqNHhjh~VYtSR9=AT}?ESeJ&?Tn@FH* z?EHcgMrJcG>#roHAjbz{yn3(Y`SWf6Ih-{vZST`v9A_z~*}rQV_U{26ORwr1aQv;n zpQ}_8-J??_e`@1}c-O47+xQA7zU*F}sabI_n)G;O`z1wyS3*cG?uR2|#nKUL$i6Ui zW*P#JXox!?BzeIBj_vUaZ2wRyRDR0RqnZ+nq##V;2P(chut06-BxrQq)Ni(vo^)t^ z0I2pamVmle86pa{&PlR~f9~@sQc!yfnQ(LKg z*c)YU^4XO0RgVj+@gRoMHTLT&%FzZ}2V|~t-6DjGulMLWr`oTKxY}2jiP&2j(M!TU z8ACO48|8kuut^@*k|Xfv$1(QIypEKG6gL(GF|DjJtYBi;f<`2>AQ_506=>kNXAskq z5YKIwkQymez&=frR12;B)|1DbPoS8OMUd$70Iq1P0J1{UnVALZCE6n0GIZ;CLy-t| zF)G05T>>3?E^UL^$N7sy61(QFL{Bc$iO!?(XNR|Pm7abD8_1_}sn4dmV?i4o_UPSy zv(jW-l7kWJ%eddt=#3DSBkhzRS*!jP&r)Iu7~%b}b0PL5W;l6Ij+cZd=(~?+&#CH^*>=%l6)>fqkEa0 zX7&4$hlCsNj>X7sobEOQQ3)aO{!(=JMuDwTcQ`X%14~;L;1_6?%H_ruUodi)7s-bw z`AzrZAfX1XQR1myeShE4C*xR^cS31?GWdZU^ck8>``3SY>Zb+3!nmkFzR<23x_8*G z{qp}WTB=;_M>2p~IJxN=uChkn$RdwC&GL5vCQ%t|@Z!3aqa3_Uy$f@_89YqMUxVv3 zq%~TsLY6kNVxpUskO(#Wj$rfZJBZZ(1GNgEFO~2DY?L4R4cu3E{|GKkF9m8wz?r6P zLPBS=@MiBI8g%*AXWio-X?L;5y2T@HQ3Zm+ITS76oCK*+*eZmv#4sO_EQBZ0i5*Q2 z>0&+oJGV9q5^Szui{A0nQ32a)2&IOl$?@l5_DwolWSAqPPr)DeJZUxLT<%2k1tB^M z_K0`TfJ}`7%7N*b0Vzi3*OZNgsXakMJ>XC?$_4yGm?W(`KhHFLFZK7it5R*WXFqZY zeKDXR#$-Zw1=m8H>elH&XD2IPMBv(LG3waXn=-*7?6}GI2sjQsTC41<^tPMnt@1G( z{@diNGg7}y4flG9S`_4Y&r$qs+o|b%;EX)=2Nt^il2|{n8jTH<3jEW!p@B?|h7KWe z4$u1_d>hU;V4D&Oul0@_`%4TIWoe1ZuWxwT&qM(>n>Dx$bFleHtDn{U9#f_YYUs{G z#xqJMwK9`&bkPqxKAJhBnlIBC>?51%HjYpXkp4(S8zDvBvJVpT-3j7x*zYO;GE&mHto8dZnF%zBx&VS-=8bu?x>qh9y`Zf#aT!!v@oip zXru<7mnZlf<9+1)Onqf*K~r5&q^o^0e6)^n_M=+b*N^j8S@He`S_Yl^8i3S6SM?Q;>VU$-E=&Nwyp)AJZ{j&lmSgyuoP1b*N1Rd=v`NE5ih9H~8>a*{S>G7=g6XWahheT) zOTKb)q%H1o25oR9!GvHn16V^IPG&`ms8GQZ!7u~EO)t9xfH zS*RLZP{Q_lkwjRjoz}YVW`;n0JQjZsSsu}9o8fhjUz;0e4a+X{LC6s^UH}T9JD4#H zDy)U=lasl>eo)ZV&;QnrM#W=&ZZ4I;E@W&Ln4Q6zs!+nVRzS&C4I>{ISUj~OTYt3KpF^|$KsIsXjdd<2Z%*6Ij>t4ay=#(p&AfT1t}J>LEHvYVT) z*p4qLK!7Vx!pF8~p58;sag%W}>xs&Vt-`^Bs8%Dwh_7W_&9s#-p%CrzxJtj3wt!)&_Lu^}a=_)@YC_<1sIf=#ix=Yr_;P)usr9FCxn=_ik?9DJS znK7}C_+dq(P+msY9TM&$w(WJEH7cJ*;7xAeSUn)#h9#JNNIneo+OP% zk6rJ;ie%G?OB7rmpiLv`2_CReqATC}F7f1UJQMxJ`sL8V-(kN`y(CZ6PYrRKaI z0Y}xpD!D*YmvnYgF+Z_r^$KxSNr3e~(Ra*na%rgh=Dz%K&P63tsc&5CuFnj-9jPHV zdx=xs`wor!R}0h*qt@AumdT2C4qVrzeUv9X|HT(1{Cta~XBcSrX&{k+Muo{)X&!$3xOtMSBr>c|_Kz0wr|uwGvXSE%HdRY_n?l+AL1^f^MM>;bCKQCN7(t*3qf<@| z24wqP-DJpYF*w`@s;*Gq^~?hu)&RdfD0v|+f?%z<>+DUctqE$I5G` zev^ZBx=T2|Ws^6WXH8Bs$&RgKGrUYfUIGyu>#Q}nZD;i;a^5-rZALNBE<6I>5CMHo zWAG&Zf9N|Y-)G<(prbzWy-LM(6``Osjub0K7Hjmv zVe8E?+z;>YRB4m7aArosVX3VwUX-cZP6*Iq-`MPggZXOMzK6}$B%lHYJ7;^^Xn4G- zT0Du%-YpGF!%}Uc_`&W4eI8RpYni(-HO;;+yk84p`%FEWmz92&D%LN!QQOVEb&b;}>xTU3x z-=tAIhFg>Wt38&=A7(x_6gTUc2PtVaeoZAkmg=%eHOXwrzFUwr$(CtIM`++qUi3_s-1w z3;FFl8M)4m2rHB!;82Sjc%3-s6#~V$>Ig9%eZoR*srYM#0TG9=1aBIH^j69nO%Ux2 zB$0<@C&G%18roY}SS3TVqtV%hemkm3kBbk=D>QI#SG%V=ha4doCPEF&@LAe_VZG_h zGQ=ap+UU!m4Ldq-_;F*1G}t3DL%Pa%Ht1Kz>r?n8RL$%5!B_l)MbfLBIuuhF&;fMIEn&**wn_fNd3eTw`dgk~)9vDUuAi zUd?6@Gu<@mal%YGSGN>X=;x-H-%PfD0qcbS!_oC1Lx zAD!?t!NV*4m5g=qbOjHb?=vyKz4c@*!A-wo6D_LjnXY3 z+x_A_fkP02Rk&Q6e}4jVqi_<-=eE>DL0QHm*e@`T^zfvN_Z?4BbwHuNE+!ftWS#9W zVdQZwilse?0Mo0zkiLwBIq_Tx+Mu`nWcaun!Vg}=JK}RM7=^o^I02?|u7zyh*~9v| z_$6W@St%h+@GA(@id85ly?wd3)WH#1Au5&+{hZb+Y?F~X^Jh&>f86Z~?N7GWXOku{HV=t3`E|M6wk|kHw zu{vDt#eK?gsOEH!t_hJ*yR$idPvFCm4%UD!5J~sNUFfjo4r-~GJ#1`lF^LAz-d-^h zRLspLXV7IN=X7RwXg6&Tkkq9fMKj3E-4F`F$K}armd-_lpV50&-ys(LyMmCRiGN~i z)yNW-MaJqI4vL}_H5xnr2>?m_7kc7(T;lk8yNe0pkiK2x%A$Ee`=(+eB_c~0IzyNu zp}ra<5q_br$VSyMJQ>fAK;$PsOHEQVz4>`^mJMl^cp?==YePKC7>pH8e%)uwTaY*U3oG$d zpN_&^+C~C%%9b{nIj%1ZFU{ckLFz`Mv1v9I?REIl-;mVy&rsLo%}0OOJc)&J{w5)3h>iQE+c;GbAF}uEd>j{b{p>KJxxz z|H7FZmA9(gSX0B8aQJY1blLZ){KN}pAlrT>W7eYmb?1<0Fl=c6vVlA30O(y&E z53WlrJ^AF<6z!CeaxcMHW$(CFD!~OqO;hH~f7O>*w15P9kAe(E@Pf+dIO8e$Q7knA z$s9S?f+|ol*CAS>z^V1u^!7EVix-Xw3)=5sYq+5Z9gs2}O#@T5D0Lg_jY>Xq0a8fQ zP#=GKGGRt`l{(|#P6z2{7ka%A=w@rV4gwKI4*DE{^6heQ%v^2XkR)YONPz<8J*5qQ zt$R0vn9~^s%~0tcG0Wq)y7ec=3*s1+dKe*~jS88AZjP0(D)g>59&hh&c*z~Bkxgv9 z9A8Q4w7qqZ?q_qGSuYrCb@DgZo zeKj)L<6ji(I{PfKU`P*{AyG`ojUCtWdi+q;l2JfwUgFag%mH`NBJeGbGDhhO0o1Ol zGr+_@p6eoC_=!S%-rdn_IYADD#5u6qG_;onpWcPoyLNT`&~j5~9Ft>tD^I}@?eI6~ z&J=uzceRzhX5{rxa5rPAmUBA*TQ+HY1^TIF(P7P5JuX~mPJoczawgjGF0O2LsTh$k zo!6#J+=t3NBlJkN#=gJ3u0)u;OZ1AJ>}LXxs?pw0`5e4ro2|(vRX+|*L*IAMTle2e zr(u*aD>GlMAVvyMYq5+u>Y`4?c*qOpPW$nHi&kLE2cfe4#`_TWa~I;*LOXj7LQK;@ zQ46-hkLK?YlyhajPm&#mD0u zNXY=bb&PbLZ@j;!U@^$nX82}MIK04kcQ&JXj{u~7q$c@+bPI-oMq z?brSq9)HCs<)|8!@S5_Brzr{UA55-!_iC=$a`%rMH%})eEf-YZKF1boxirsD9v^P% z*T?W`cF3W(fG=Wg)BY_$z+NwnihGf3hLZ8SwRRcz>8BRxqLYl04|a*yXsi+@MWT2P z11Ig76|vT2eTk z)jTaX-k?JO3TG_4An19N0+|JbNqY~b-D_+D%CYa$NF9o(ieWp|7}yKj;ymqy_9!|E z8%{NubSUifv8RBMoV#9@$EesXmK#G; zS0N_m2Uy-6mf0}#d;Gh~6cpO00x?cU|DlWpm*haqK+6<{_(X=iW+|;g+Xpj{Q#(16 zsa6~!ZTwx2w3o|9!KQCA!1A&>f09A;VB>;UH)zr{Ikf*V#t_pf4lQRt#Teb1$CDD- z{};ftYvSz?(eqt32}Y%FOF-khf-BH=&=0r z(;1;T*|7H|z2PBxD?N960a@nQEmhRJ;Q;e$&ZYh^Qt$^ecvH&45s$|IBbQ>0b9x zTH)7BG+>{ccroPlc5r^bJ_kDK&AW@n-CkdXz{@IcbLi*#@~`JH$I8TO z1e{`HV$m?mA{@6S0z4plSZz>n>sRQhPhi3MC@CY2;d-2<6n1(F>v#ZKKaxe{|8$t1 zf&Q-s&jbF~VfwG32S+f_Qc_Z}FXG5%iUIka+3y#Bo!6fKGQ15eVg|y((q}9z z8Fh5jWc3fk#8f^umJZFPjD+|D?RDDuafv_simd?zAJUsZI9G|_tKf;X?-x8=iXKop znoOtp;r!AV(VXDBX_ij8;dp|wBB`fGcl;oc8G3!!xF1WT#7#CV_#rX6(``443JCbA zkIuKsme#e`q!F!C6pIOby5uLs`^J1Ay#4CwchaKs9~0>69w$MNe7I*mUZOsmm21K; zhm$s{Mz(_!Tmf%YpVnF7xOQZmUH!ZL1pJ^@4!vV<^6VNA9lnzGcYJJ~i^5o=YK!n? z2BWOqA1DzGlZVLKWlvcbYI{mDmyV34p@COYVVfYDC~lr^cNpjm`oj-(99-}l?6uyy zcBWu64f*)at>uH{4Mio)6VJf6DRKEkZcbxJK4kI>fQ{;3_{2t|R6SX@euXfssGM*B zS$$8F+?-0dwnZ>@GC|vfcX1CKH2ZHt)ZK`dVP1>o6LhP4Jq)stUQU8Xd{)H}D{icp z? zuIJ8E-w^s}Jm(FdWLM#=RTz?_{U_#`v+)s`6y$*Bg>XsP%qMWDOC zZG~i?4=-Y&!6b{FVpk&|K8zxZe#4e|+U*Zp;tQrsoOhaBb6mFuik+@%gbf^w^hk9g zWBim>hR5NsEwOC9L)*wibok%F=M*DUsA`wnjM#QAvtx5xf}kwZu<^}-PyW+8OF+cb zht$~}h|ES#xYCd_{8g4J017pESZ@@6P~tR~%g*XG*v|!MenztrH#U>`=3X<+)Sz9E$ZF-Z4B@VMeum=jyl zKA|SqM&Vthq2{pGf4k9F{bi=+6DjnZA5ezHKBrRgl;A z+dl{VN=P&5D6|dr0Y}`vU1IQ?m zm=+JUxSDQSOeO2sc<2Beu0MYqLPCPv1f@Od7(6KHhQU=E56!y_lw@>?l`=~JJPkDe zl$IBAo6es^1qP`p%+j^L{0%T43+Z(QnUlWgw&DmOY@zhBtnx;l_>E`uF=IdB#$dZ- zLWC#JxFXu{f@AZxdo*jE-NSTHE@0WbPpUzOUN+VSzL&bn=M2k%*;`?5P0EG#h^Wt9 zetBR1uo4}8P~%5T!*J^2i>G6LoK2P;UU&rHehg1k0rQ~)2Q;h9f^(7LA3%l(?%L~o zrlA2&zo93ioGe4ZawSa=HtA=YRSKrUwX$sN#gX}tdlBhu5QK`$HMwHCP-JZUU}!He z=z8%$l(UNUi4g%i+>Ku=KqVsT^=+l!%$JJBDz-HTo>iG~7y3(I!}GD2lx=STco5Nb_2t=vvUaud4f#dIuWk27 z_{WTL?>~+}YoPx*?*a3OoQ3s(G~xUO?kXB4kM9q=zXtV*GB&F#LF48klE0H8R{Iox ztxrg1#Z9v6r6tDxax*REt8oRPZ56wH0{$q0!FS$LPVn?Q?CDd%<-8MAyC|*FhL%!X z!wv4FhYxh3kAIk_8k*h#j=#wlvt|Y@X8w$b4JK{!!lRyqGqsw}G-b0=sd2N1uPzWPpt%bsmF(s-U zV{Ms&Zsz(YB&vGsoyg~L19J#J$~dp2>%ryn5@8<_0X6bvxv%4=kRO|RrmS9MtNz&f zR$oA7lv#n5!nnJ0EAamEd86xWRI?W)VKyZDq3#mg&xk5{ka= z)xz9oZC-;@gL(SrxJJNJ-8M+SykiSbY;{4r=F}uu(KM_yaYA$_3!W`Oa}$g6ybQ=u zR1Ziainytf-I!j3hKf5R{%i*5#wS5vo!x@J`jUDAp5+aHLG!?P4U-EhxZ5qk56gFZ zHD{N%J;GmkDlqNHdRPehrrJOLI>-u!-J;KC2&~gon5hbdLF?Q=Glznd3g=J{5T_2? zObt4qsM3_k`50}TyHu78l?T`X4{Som(A#FX3~nsII{4P?r5ZiXJF-T%g5CVuQli+z zj3*n$W)D}h`zSO~b@|WC;r{=bBl$U1wGcL(Y99A?P;{Ty5@JR9fi(h`)8ha5DGS z-hPuYlxN8j0Q)1to5>w(I%N$@KZyx2(P3KtVoW{W<#>AUNv1^l^xeDwanc~MuMM0o z#CYH)kPLkEC!9^EHYwBQMs|w{aOfTYXK231DN4^e-WQUsxC4{3SNDsqLWV@$O4>1J z{Jp+!3~7J<#9;%GRVPaVH>u)Kww;_*YmCJF#nV7IV%Zjc`aNg z$R*C>d;pu{=S-+(LOQC*S_AAOnS?nYH0wjTu0J}F6uR4^O#rSq3hm(N4d?WVh#G`D zzUI584)3**JTlET9e6sW#+1NS$UKbNi0_1N6apBQIsd`Q^wazDf;}%J`mkeyY4>DUfDzTQ4^>*<7}<5B z#uN|=;^Y1bx@5;yHHGQvw4Ekqi*g^z#>1S?e`#zlheKSrvx{D-=#CZ!zn3M>DS%b~mWHk2+FNvta1k}DeRA<~PzB6KP~koY z0M0Wg3xOM@s_^j8sT?s=wJzQhDjP5H2Eh6{7x$PpI>j15f75L(Drj0eEx{1dTV#v@ zX#}S&!D?^i31m2A5UkPLXL^D-89o9gNwLyxt{bA6qlMgyH*tPPQI2UO^$}NRPVmDq ztaWCruuAKbJ9tj5Q?!R5C6RW9JUaN70(L6F?lp-)Dh;%CV}d^6F)9s*@^h;(!@@^x zrp3AbcJ-JtOh_O_s%MpEbFmB<$8WSH0Xw*PS(oUqGMymcxf^7m} z^6wsc#WK!qR)_m?!RBihPGps|$RY3q!@O2HxyoB88PzSgU1Ts)VcQkI&vNGw<*npm zO*w*{t?KT34n18Ld~FW7&9b{6$;QZl^d<8vFa7(ijibW&o%XBwP*U3{e8jtBZB})1 z!l1+h9j#4HQfp8pJ@-&~L^Z!E2Ip}x_ZXy1ln?+>nK2du7B8R|n`DhaM@ubey4(Gt zIB2_OQ#%usrfc9Vn}tW(Uw}U$skO`8TdN_z$Y?oB&Kb>@I#RumVhoWv!xOZdb>uEq zzI(lToK}EKL*v3&h-a+&>>QvqGOs?ZKVilR^G5Dbg6=eY;$9t%&!iWPuMINxJ%Eb1 z7}0b_EFIDm&yyiLuc?w78@E7vIS~yy%BY{d#|5Os2v64t5NN;`7X@P@vvQHuNoz2bc*O^& zg0<$#u9q8WP6ppxYBWsN(3XM&TD>N@UuwcKPtCr6ud>N@ECmSg9@!%y&8m?CMG=sC zxyS;t`M7l0(6Vgf2}%-!y{poKl^isIe*E9}o{06F$tZGL)5u%NbAOU)s;H$I0ON6t zPs$%6J|)||VaKdH%~-a+<~e~JUumw}7$XGA8PtHMCeML-0*T)4@Wa8&BxNc(x9_Oh ziUnxY1nT<7HBjX^uB^=N+{Vv0{)yn|VqPbYK9gEe;5BB^*JmCBv*Zr~w!kl|iQm>8 z4f0{mJjEM!llCvpSmYx8W4Ky&K&tdhlQz;h- zlu#%pEF0U6_`4{9`X>A3WFQ>j z{{01Ukm|fg!{v)*j~2+h1r5-!`OWVK82lJMwT%%csQjZ5ev2UWNvg{ER2C(Q-H{G( zT%oIlIdkn2jMhu*ypL&n2_c_{Ib!1GfjVmG%g61FL!O+tUuDegkVbzD>gH`ml?L?I z)s@+i0LMMOA1n-1C#H%Qe8Te0RoRbM%LxT$@g|`E93`ViV%X=0Lb$k$&~5|~r#_n# zQzt5cldoKO`eY+UM2{n_pY6sMm#@H`w_WRBHlbp%d7?Ib#8rwcQu+r>Ouf6Y5U!ci zQiPc?^%EnKClX$tzhT2!<#j4BZUzACrF zA~+rvG7~dS*jO4@gf<>8m5U(PAk+D=Buq`|hObgA9JprTtkSz+DoWYAH;p-he&OGQ zf%#F!Y@)re)Cys(g^PfQuSOmLnU{f(8gqT+0;@0civ*kK+{5}Zq*OR(D>Xy~l?@V! zOI<1h%p&8kX-bsyr_V!S3!^_Sc5gAjBTy1#F{`TA`tdfwHz`Es2?WW(>el5NeZ1U9 z-^=*Eh_KaIN^cXV=pAm8LN$1Ml@QH zmuEq&BD|_dgQW0BNN2K|pO@K+v8by>=r6wy%L?~TSGjx3!5Gm_LPOPh-|)C#5*AKZ z&^iE$1l%GJFNu}3E>VMNrt}Gr0&_ZH1`k)svl5Fz;o8-LoaiuDGlrFS& zQ||L{5BqMjd-ltxDD1+4JaY-+Xrz$qzMI5>YFMfTY^~XBI#HYjs1IZuH`F~9&O+ki zVvDveLGpAJ0@Skp>W$mC?k`wnqs$L{SF=+L$BfpwOu5AE-u|FduwJxzmeIvwN_~ir z%I`AD`aL%+p|!uAC}y|dm|9rkE$8oBZuO@1|1ocL?;RG9tywkZcb2-UG_1rUt(MR( zN6Hu%f;(upM^euZ7;=bi*W`K{Lf<=M!V6SdlMO~MxUX=;%=`%yQ7FOiWk*GP=u|20sP!HCH1v(zZd3W>)!0TryGutY8#XdA<8s(jX=}?H zI#|K;wxb5pB2`3R_^hpxN#hC01%W- za2)W&Ol0`5t-0zaVx)~r7f<{A^)@RfHh}m0+Ts{VAVvQ0WJc+2hN8Ot@Ly`DCO45Z z&+RvuauA_D#sJ+o%rM!`mcyMbA0eMVM5ZPqS~fpL!E`{2{H~Oq!t6H^J@lA8+3gK9 zb%G%VIQVO0j}IN3F7)28=`4>c-hjUlz{4jakwKn@SErb|+9HE<+Q$5(GD$iLx0yrOjf7Ri=+3_h$DQhjCy`}qtg+xIfW z3Eaa4wLOcwI@E2;eYT#SeUTjiCt&ktR~bs(opTTdF~yGc8K zS@m2^uc)3>Xa0!&14~cBfPHSu?}MoR2$3F9D94|*TI0?LL=Hx%w{_CmdExX(6j8gc z^I&`K-Rm6?SSWt)ehDIjf#i?)s>f0wTg099EPTqmk#Qjt>TW+^IR+xozFyN#hRBiMB<6 zm(1WhZvZQ*QJmm$mkNPm;}NCa3;H=e^?au;AY@tk48r5E7ghmNa7t+N6aA+ByJAOLbu|=AP1JT@_)Z0X1O% z>Xa44@m%KO*8O70f0RIGMxPDYi;ZDK?N5^EGhk&EmByamD&tOk8>iuq)28djH4x-i z1J77ZefGai`E3T30UqS|wLTOOuFG+5V4-5W#NpS9y>a(MJK@FjQM)Ul%UcNE<3B3) z;m;g~94mE6@kh6?BZU=9wv-3YJ9<>2R;E*Fzyq%xR;Q~a)~eh>x}bcHJNP!R67 z7I7DzYBSD%6iEfr<@gkmbuzJFr?jE_e|9iKI&8rqw>Cd9DZY~Yxa*w&Y$QnrRPR)O zXU&RXmaVZ08hhfr$m$d&JpC7#;mhltnI(mvV4v;ne5;{MSAp%b=BiD@uc_%4TxwkV zsnbue-Nr&{bqaPK?&oGOEGQn6Ki07n2Qjuc4Fq*1OOIg-+dp16?uc-mMiNbx9efc$ zIYn5|x`=%~f;>7yZ<{cg=>T98k1r~$^`R1+(&kD)=8Qge-Dn%b*Dfu`nw*FWd=F`Ay%Cb1%xwIFscQ~TLW^*dSZ_sW2 z4gIH!(Q@b?p=#XIZUn-y>Rg9gNlRO5x zOo#D&1Z+D(iO&Wu)&uUVzJ2OGzB_U*ZOQ$d;c%rVs-Z-|QfO&%Y+GWS3)+i=4rFnSMBD=NSL|ZUa;iPGwoBTcgU-V0PzO9uA9&=i3y;Z6Ah1 z=<|_y?saY#8mXAoSQ)ug{D?LDakq5VrEodfyTxc%b!(*n)&F|=tA7!M?ca2fdgYJ>NS;M7K1QBgsu;Zb~d4BXB>X1OZC& zbY6J}Y*&a1VNh0U@yZNge*3YKY;XngB}f~gc7!pHiB^B(^EcYZo`&AuO5c4Ty1%c})XuMo0c(?1V zKe0D1W_Vb@)lM3s0wU)A?fA;%7R3Q8o0cdbAsML4Gw}ydcUiwW;ZPaMDdFt3z*@b} z1hDR!Jfc}&H+PMPx{u#O9qHuMIzy~mfXD~wJ8_n+VU-C{Ef<;SI&7ABZB7B;#|}~^ zBF6|<79KIw?VpKzMs&=nY^nLamwbwLB;-y0mNyAqSgu`B0^J$rmUa{`6ipf!mA{cRGjS*BsctS-CpEEh05O^7TRvR{13f|R+O@?+7096 zXBi<(5qfES)cVgNZi5h3wt)+7pF)cZF~}A4p$x>>IMPfcMh#YTE=`2AzAZgeGnq9+ z#Lt*2MlYCC@kIWLn|u5-%Y`D0m`84ef$WEhSy_8>!4X3C9pc;6vJ%e8 zP!D6|x5+@}2^)+C7D&^KqqliR5wq^jYK)EQrCUb4(u?SpU@K88-uJ-krHbS6XZ%eO z^dlGNMJP!LaM;}7VV67pqumaV$OXtA{#3^U?ZNK4{8$eARNj+&@i0G67Ir9jS^yT)OJBA01FzSWwnjl0L zHh^Qncih1tMr%{sD)r zc>QbmYM8t6{(vwuj$T+q`FgXBhharDbS=iqLBYwsA%3~SOt_4ka5GSD;UAT|8>0Ca z-n`OV2AB|85zZ8J7e@*)U?6&Q6}2lL0pjYL<}?z5md+{(+7e2*B530PUn1e*N|T+E zDYBS&#t+bMQtp<0-X2T?;kG1bp)zfN_<2&zGw7PV2?P!yUQyLvd}U4B2eG{c_d~He zVdKD1E1Hg}=PyW=XVmm)1I+}GN}(3M7^8!iNjE@_M>Iha{|=Iy5@Dg{V?%9bpV_i) zoDkO@in_OcEA)ewR0-sg1;>4}NCxup;cwu9b)(C`|S100eeR0-7xphz=L74I9!h?PK>q_~X1a zZ3sLZ$pj9UdGD?fYDtcS)lf6Wl(7Lv79HkJpHdtp{-Z}-g;HPt_XHDLPG}x4fOG!*=&GU z*($({dojo1Q3q$23w4+R*&DRN^4ZM-aWbu`p8&?Z58h#ZOy4+l@iO=yQ~BUv?hCN& zyWFrf8^kFUaCD|u`Nt=0O@ zK5nzi)UPb1?}JKqK&01jyq#}0`n(#NS4Zo>l#j`BbLbxpxv1Jzw#jj_@(486Qf?M> z^s&CZ6)lRMUTuSt_BWV154yEvZl29lyf*t;YL|No3ibED(lB^xNt0+~h|R4;CriBI z>bHbJe5+b96CfZHom9*--h1dN?_r||BKqn>gq;!|(@ZXdeUlc*PY2r?W2>u@$vm}p zx_Q!8AuLV-xq5(CG9a|^! zn7hL=k3D$qo9S715;7Y+G)2ogG3lrx1Slks3S zc=E;cj+e8s^_}gP2NtAqFVB9@@a~{&dMC^za7}3J3t{3M{y9j%+M%o zmZ=H0F@`MoPKR4DuutG>U~V4%Dz_dP*`vM>l_=x^eFg@{L2g1v2t!K_?!UqXbu_4W z5vWtiua8?*X!Kn^XYQDO6;%(sgw<)NcUNzauxjL#j|(n@_ffhekQwQczbA-kAL$J! z8l?{)Gdp%H-5{P@7pz~-4k4hOgmgs4&bxsfeF~kadW}&(^$s|-rx zSuHu@^E54ign_P?1@c#$9P7AC^4@TROn}((+|d>>jIgW}9nu8f{mfs9TSkiqcj;u5 z*uR^WcVZGtawMcvVL>zD5z$IGf&(s+gLb~uKO z4up{&H?Fnmw^XMVLy1NttO#Kx6q2$XCGeN4L#<~vVU+t`e(T?lGM5hng|$izAx*3oEgw2vp;Y^f-7QK`lpsB7ZN{; znt7?BiKZy^kZ}~PLRAyju>d~U+H%MYTy6i+fc#MZryZhrk)fzGlz)*}B*b6lp7HEZ zz`3&(XyN$%AT}ZF^=q%+hw^ih#C&-~C zB}NOGV`lIbUT7iq-J`yy1aL-sn28B(S_x%Ew8F125DD7kdimIwT#PL+&DR5QGMRmM z+x98Kvo+%x1hC!~rFx7OtN(SQ!V>-2JCt>(gQZV|%W@gYTqe-qUuqNk{+@nPMw+;( zGjr2FunF~XtB4{v+7S$=GDydcq;CK2jCTuX{YL8TZ`^2y*MDKN&^#`PJFzAlC;~%3 zRNR7J>$C{){bg~kKD3%=5d^n8(8xb<=a;)AEyVG_h1$QB%Y#TOlmW86q+*x93}$Yp zqX(yD+q|EOopj2h{scVU!J1gTqHS1uZs+|fPTK2l8(q4~6w zaH_x^$@#5zOVHZ-(pKm&;IC|3XeXY(kwpYP9;AV^ci}lML)v4DjwpozVdzIPvn!aQ z^IVPwoyNok^5fDB=#DR%x2bmHRL)aBuApS;i?1h#3DrFg62BUZ^-P>M`&&RnZC#ChOY5~pi-CK^dvAa*;Z92&&)Y8K z`gF0Oor$tMV~bKL*@!07ey;vR-=$7Rpeb_WvW(u8M6#-SkIr*AX28K47 z{e#s%K)h`FZY{3R_&J(&5ZYGNImS=4Y3$l7A!4tO3~*0%i`1r##H|4r%4R)9-v+=H z40%P7?H=XMBqBW-v0)`I%_3Xi1dg~+?bylJyX#L;XgCHg$K}PnC8!Z1i{~7rJE4%f z%UGNYC2GdqJ zDVw6_jRZIP&fgGk)|zr3%Oyp7cDGL$K@P4#L4Y^jr<^aK3_+HE;|LnyHMzyJb7Ag4 z1pDYHgfv15(&~ASKjpA|Y+1nM!;k~nm^b3N_&Qb77{q-@T~0@%5W$KJAnJ0v68+xI z-hHlXNezc?*)0zvgT=xI*^LTdyQh8`|M_G00VuSW$5n%oRbv?VY@W>h z3h%#RtnRF$3uWtnNILJ7Rm4#I>)<+gc9NhpTUgPA{evA9Fh{V2qsu8|e@&K6+ZI-{ z_Gfnrm(|?YEXz@VNXb{`VkNih2f0ip$aO7GdX)R~D@y}WyW<%*vpf3Af$|0Elzcm? zhR$TGY9|np?cJu#aQ9!cI2)`Zy@)%8F(&;_;%JrT5rM~Pp`ZY@JSFzS$ue;Wzl?o8rQiR1;e$fE^~tYzvC%Yn*(kTP$1K51^z0m+mxmD=jaSRu4{A z%qT|;GsC)C;LX=D^GLkTjW zXdwG=epsN*t_5^^?Q%AAq9D;>ISM1cl(xZsa?X81H-U4jn`3=Ag3 zWxPO#6KJ}~S?`)1KvJ*6pQK#`d|T)*+)x=znQZQGtLww=Tfb@u>oedZw`lgg7Bdf# z`X38s!O#{PuU9XSM-EGK>#pDFDrGk=I*7><77B~R)#^zI^I}Eg6{^Pq;bFj}HJfaU zoKI=U^EGtJYyTK7j(#yKZJz+VzHVx%;8vqN^qHa|tOFi0eY{(*ZnLoP(F;b20P;F^ zdZX4XI!b(HGayTI!^C1lX=>=#-%YImmY{c0FQM6{h+n~bHNz5NS0#+Vxkw5QwYq(u zqF*2it;MN2hbeEWSOsHbG_q9-bpqq$(un(jsVY5! z7NsNNOofxIr4_hT&JWLlr=lZxz@ZS2=4k zdkr!kNonhazz_^>S*x zhvnJpN?Hi|6e=Rl*Cz$^A<6Xw-&$9)zhx{WKypye1Qp7Q0gw;*$OCPl9@igu0X?~c z^v2@|J^;6^H-bZC9Hx|BL$iiq6awH!5)$$6!<=i51~6Mt=3duMvu?JJPXnbn92Dd6qLOx- z4Nmjojdm;mrMf}6(dez72GZ2j()$K}S9UC%_IM^dK^26#1)6`KK6lzLpT6DFl5Ph3 z+~JM@-6+ta9QX@f;R=^LoH07Ba)UZ7bL^VwUBd6v!bbZ{^44s`^YEu7N$3yz-=y3f zfb}Rbmrfiq1uZ+<+Lx{l)S6 z2LEMHl>eV4{{6=i0eH4ARX{0hEPeUhVsh4xKmrmeS$l9xpOxB?xfT4eb;!li47XtRS}QCrlfwSye5t&9i8r;`PURZ}iptOHkwo(0C=N zEyn8eSTBwL<1xN##oP~{j(><0jv2}VUm-JzaIsN3-rQOQ1wI^-Mz;qdec8!Si`18* z#SqRQF5BH94UUFFq-zf2j2VmjVRu=GY|PBiir$Ya)S>0!9!4K0dB+0)@>s6UNOdgh z&~uZCOgmum!!u|AAe{}sGV#VSP29Q%tauiZ84U|*WS3z}+@Z$$-zVk*`G3m08bgtx zEW9{98y^z!jn8eiZ4Ir7@g{{}aW18l@^z%=#ytDowLV%0q+}*Kz(l(Ox=eBNOKv3o zr46a*rL%HS$j+3Pa8-K0P_iP$$jUP~k0Yhfu{rN?=8-uG+?g3E-6?FM^dodZ*)!$U z2zMf*m3^&1CynL|-dsBf9czf0v^rt4OFA5kE>~eLAs+(=^Cy)nvnv66^b)$X6-quk0U*xD8xO_pY{m$g&hXoJ9ele?mnhZTmb?0k10Kvho!T)`wKT@P5_ zpwD#E&z9JWbf071ZHcU8&%UyriUEwnQONAd9a5_oLP!;C72w_oUl1Ya@|7jW(Ck3W zHwj;-lx}Pc7Z#*jiB!4CV0FlrCeK{-pN6TU=+Mp=)ZY=v5yi9XtVoN$6(-cL=9oS| z>5_w`xsM$0wiJk(VGy!;o0eRIpiBHJXlZND4w-GEX6gEP6vni_>8H*%=6DpWdM+|Fyr z9Q(yEx3PzlO}O_!5idO|;qE@uadAsf!eSWTY==eHer0jzpJ!B4j=@ zCo>$5{8;_2zMXC93f(t$!N+|%0OYCJfBU0mo(zQ-k9%yJ90UXpmP*G(PRGMeBL>ZQ z?)&PQo#w0@MN^WNP6wO(VIZ9q*O@b^Eba^J&cb z(^?ba4}q%^0qZNXLCgk8a_cjDS_rIZlo

    i<}h#mb~DRy9-O;+1FIMSg$isF<|J% zw_DPA>TVN$?GfR^mY#Nn%};RMO?jDNDqvD?ogL{$=a)p`Hqc>Ya)@ycX~_Q#s4&bo3rppu`>U@l@*{4uh_wZ0~SNHfsu zbM&#cvc?Due4_A2QhS<26@HEJjkpYQLCm%f9E)N+K zzfZf=Ob5-5$Oe0ttf9UxpwXonSE_utvNMfhP+YLcg%wxg_N^IJF^VIt+qOD((y?tP z9ox38H_02@wv&$S+&*XA^ZkXj_8O!1teW+JtfssFpbIg=X?O);IlUD+Vh;Z%tL(aj zipNS_3g@OlsTQ$pUUs->2A^w^7Nt_kR_csU51Elw*nhUH3nDw>1ovXngAgDg&To9i z+$BR{vqylK7dQzA6tGF0U6ZBTzUnT>6-*8x&@95>vl*~>jrRTY?t7o558@q5Q#+3@ zH=WF&nk@0{qsXp2Um7?;SX!?1cc`5*Qol&uhOES%seJH#`#9m{Y}T}^au7U$@!d_$ zQWR9ThNeOTx=Lr6nDY!xJ2Pq>{=iw+HGn3Is;B)O=^knj9qCpZd?k|jE%r{|vzeG! zX+v7szB;p`0xiDkWJJU(!^Z?pYeZM^>SO!+sd~7fQJ6$HyKr$yvbMRHSlS!=jHB;p zZse&(Q)!o<*uOcatk6(jh}R(r2Ey6nX@Ha!+ys;d-yC#KuGx=kYWP7%(zqBQX^bk9 z8|~1W{(76p;g6GeT2Z(4(?&4g&z?bjJNGH_VIJiWIIRfI`Tcp_O<2ymuDAFxdA_kj zmf2{v&*TZqmH{xFLr752NQLcajg`%{G_^(r4}Vfy zaqa$aUc;oD(F9YwFQ%e~uPTepL!`f36patW@IGX3#RI;z2+gc!F3#+B zj47Y|XDt>iN>DljZ|Lm(3X}@u1kYs~SypkRs}~UCgQMKl1DGb2B@UPj?f&5Tnn%D5nr4y2?}$YiNw-=7JM*&omVr89^!=`@~CMC>5=Yn5*nW zT}*gxrI1hwVOS#6@#xB9R<8h2JiO=bw*5sSu0N|%YP1$b^r5t+_#uaQUXNXzwmk^k z+ym+~*Qt6bLPn;ZYWJh7GaHm&nbC~LadkuyAO)IYdC_oZY%6BF0!RE<22J{uypIbj z390VkgJyv{Rr5V-ms)=;g!inLJY@(bIVSRyTHi?^-&Lhj-Ot>3Z ztq=u738ESHs#K-^4G51fmd&okA!Pnwh)+uUrly+Cu)^tPzx#h@Y*PGXt?BH9cY&El zfqZx7hYLOZyZNVi>72!11+I_L@bL}w)s#$+w$y&dB){!F&w|T67_J)W2Lm2hEjmxc z-DAMbs=>vTH|zI-!rS~ z`W$4|1*%{TS6Qv=WrBl;_b_JKE&DQiah}i8q+( z9o`RFP#y$H>{of?`O-g1GrJ(|+yiT(O3cpHaMr{lKf-U18ue@w*?l1;Cf-$O-OFT`HDCb z=q};!>uF0%B=aOEleJrS+Hv(-t%q*hwfL0Z;QOsds%6B_C>H>EcI$2i{f;({ad2?3 z>_GD_35*5S3N-j-)qsGzc%C%B{8xQ3{hwZ){EuFB^6ycfC0=_*R~M*TxS)8oOqjzp zK5e*rKo~w2xk>2R=1w?k6NrrJlLm|G_7Ork$^TZ2{^5W%T7xy#*z?e2` zl}?aYj*uWS^&SBK&EB*?{ZF;d{Od2ygPiAF0Skt-TCdCsHsD*3l-GjtUl!oZ;LDU0 zADt88@H?3ly9z4ij@-%Bvpo-Fsc7nA^Pexs`Qq?lAxKzZZonH)fZHr>uusGjD*%Xg9 zrG9NP-}Xr`?JTSg6>_<~7d1pkGa)}Qa-Y>D+g}ElgXZ;_o%atpYyX~ILJ6ljnp`xu zX3ynKs9Rc`(&q0PpD-U%mIGlPO2YtEml~6D5yv)kQUTL0Jo zC%xDP8JXr9HvUbbcu6LZvcz<}f!1<}C*B<2&^Ke-NArv4eHgzUM?1$uC2LX>vEC*~ zBxI)gNPxLvlP-8o&RWr>>Zeb8F`l{knY)ywtBz%UQcK>(*54i2c)OE$3}bWU2&rh- zDkIKyly^WBA4?2epo-lDMWNJ+4{2Rq*T#TU$Oq&MAA5b_+;>C~(^*H3Ag`xC@2)XD z9u!}ndvai8G^RHU$EdhmpZCci)u+m96eSm&d-JkZuV797#stbXIv*4}dj(>aJG)$2 z8e`!XpaEo=g)8EBzzmW;N)K%0yh&4E2Exp=!;I-{XF>e5^c3m1?);j0VWer0pxd&8 zU(MO=#OB4Jy!qnf2E1Y`j=aeEtW$WxyR0DiNMhFhnO{9OZ0D$_`rmh3nW7#8p=x^^ zjH8Ca-RjvNHW6X+fiN0`^Eqd^>JX16v#1(9hkt#LpRGBvW|Q0<)6QQJ$2Ta2>i&i( zl_{go%r_;T3h`>p^DWt|qCfA7^>=E;1OsnOwpF#o&>U0qi|Z_^aIDO ziF+(n@WTB^kVR#^_mj$+O6bE9)zEo1^G{h6&w!OpA+i=H>r-jd8OZM%cs-YYYHFegNVBi%^%n%{F;IHIiNN3+%S8%?}uevp5 z1S}ZvZQT+V_vl8I2WSmE(CO;J49ElZh#j$&h=DFRz%k+>O5H3LqrZ zZgJjDThIJq3)J7Qev5Q9+qB8(Cv!M!*baQ<&SuB0sx#PXQkFuR{#Zg7csO+!j_NhEA1TJ({ zE3TH{#=)?mVNhav{QJrZx*npsk)_ZS!DFfBcn*HUW*H zZf_jW@=A>83{S?fEf{EYR#7wVL*~R~HD>yTd_;7=^iVD<4)aut@M1gN5uJFD@lK^u zkQG$8P!ab@Rgm)1Mb$&Y8T4GX-VlU(+6I@1!l5Fl>rcizopjiI^xc+vq(rqtFu>0~ zke5O%9V*fa#Q?0Yih`~o4K|h&_j+8r-X;4jKktb{0%GrbF~|K0*Q9n{hf3FQNFdq5 z!ug`7e8>tT_n4M#aLp5oXh(Xc@ZxI;mpwuqQ@nP?O@K9hDplRgY(iT`1e*Y@gIZx; z-!4lRg!YI%!9_;3lHxXY{0se>*UlVD0>6`>0H5vDC` zeMZ#d1fRmSfcJt&Qab|+5AJcU2H%=x;#vpQZw!iTqnqKJYy;M|^rWD3RXL(o9lahz z-I$67QtOrbjId_zrP1g{RS~Y#MY!-S@B2N1v~yN*j%?x3J}^BS>L*iWS3K?5OpD5g z9f3b#hIS?Vf_tX$=7~aK)0*2u3Z7!Qp^++3)W(JhGTemQ)EH!`nrXb|ZA!H0(dTmo zg^T@BKTZ+&K(E5PA(Wxz8v0`h&yk;?NaNSM1fKqsZ}D~H(;+^GXhZztWAE3#LO6h# z^CYWhp}TWc=9|(o8x_QqrN&#tWrT2qypJD`8k;aoZ8x1;y0cnJb7o&vqoC7Doow!@ui_psOJEHFq}PB76!tx?bS~&Nu|C zswrnxA7TI5jF4Wd+7hiSae+&&mTuZ}4cZ#HZBLm|#?q|+!}<$F05wvt&wcdpgEmK) z;#yeoaFkBQ$izuoM{J<%xI+g(w~*Gelq$Scn{Yx!5f;Dot2ui3bn-mXd-?cLg-zkN zhSS!ps2Y+0-oa0DQaWM6p$!_Vzv)h&S)MdL9p2E+uFU?Bfx%|utM*E|pP?)=gbbQ@lRyeSl-IXkVB~U1be!r9J2KF?fWXY*ZK%TQBwRT zo+qp+V2jf!x z3AqyVLJ~O$$j`8tLc%N?vt$q^p>2U_*I=}mlw0O*TB(1~Eb!~f01)#Y67E<7nd2Fr z2bEx(MA?G__nT5JIP2jm9E+q#b(@E>^jiECuF_kFg3}Y`?)vJ$|DF@&|EZkY|EQdZ z?r&&6#-Q2SJf*w8JO!!YXKK!+^~_dJ&SINRFXPh*`xW(e z#ACPzKQHu*RTkypSN$p?t~R2Gb(lo)XKPaQ)++0-``-01#P_viw|oAG%!_a(Xu)H$ zTR_xSXrOP`@=dL=n6#NH_XzTh@UCyxFtBujz}+XSvg)zi)=4q%1qD8@Mv9^z4?}1M z-bQDcMlByo{+H zr~P<&QW2%-I@W@sdYyr?fm3ac?qXe#ms4b@?&BMZyaD3g(d7x9Ns!wV1AwKHC;F3G z?Da@0BH`!Iqs*%BOL~8V&i-22pxoE*+rL|_Dg>po@3Vp|loh&n>CG{c>HjnVZ#KUz z?tY$3x-IAton&aF07~-fU>gmsJM$9D3hWKz>Z(IV%>SJ+t!B?2)BMLCcARbC+`)$D zI+YZkC5}(c$c#=^2ZA8OsLMHa+s4XfGv0>sW}5WBqV;AKvm6RnH@WFj%Vm6dD7N7O z&8l_TqMokW`ee43zh9Hv7@PTcX;Fz~ooPsuj2d21h(%;Fesj47QY?KM*F`Z^v2QU-!0X7A1UJ6=-LwQ+~G&S z6n`r`flY}HjScL$zhx;PA!xCLEPNJ}Km06$3qhun5%La3e|jkglF7H09u29FS{ObH z(dhG_XAHJ$7fKP8lZRe~znPU)-NbzR(i6u~UK!n^qn$YZc>p zI1GQAIHh!y-sE;2RKsdfmSpOW9E96z3UfOub6$)m72P~{q#V#d2j#2XeMOn4zjjh4 zyTX1~Mlu|-O>VvDBWLoz#>{x7q9`=9ra)>{IkC?dOt=iHF3IEQSfjK(`?6SF?rfin zHj#2cLW&ad|Dh>{RhPRdd?}nF!>S5Tx}Wi%i;RzM*%yqhJT-YPe;jhrzli6IQw{#j zj2zY*iN63mSeL-m=0J@XTP`q+H)dD{VRXs|4?74Zh0@WwPjZspSqVrP*oK?uqjkt= zq>;|Y9D>@B1V^G@v2mO+*cF5t05HWDBp4ot@b$PFV@LNVhfL~3-rl-+7@AeyPL98) z#aT?6k1w!3O@MN-fA#GGwR=%MnOmk&v6f7J&B`;O8OHo}HY^bnoVM5V>4Jpzu)#{c zgnSwrNX@2M7bH8+~BC`YfTJamO!Qh=xzvcS6cKm{3`TrSBxEN-2;({ zvaMM>VZn|7CuEY0t2S}cGeAfk_U+%wsupe6W{aPR4iYD2G=UTEjVKCf#csVzOrdeg zY}Ou}w^O7)!Qzq}*>@nRuJ5E1M%OPe{aODS*E&>CS8BijcZ33NLxtu&%xThsAml$& z*MO0xEb*j$E_C)`p-}KNSy^YrpRhSK{@FakYiahn^ZV0}TUDNqA92{d5X})qgjR=mdlz*zNQ9xPBW_+% z{>K!unG`0-`%4xhqEoXUx?a9Z%AnWsVbb$6Lam>I2RPx!G~~jDKh?ro+!JTuLEKA6 z?Rx6ch26AJPK94`* zRMK$w?WqyFeSKdN#yC&wdvrZvY2!HYHLIp+lm%Jw{3Cq7f}!$csXGhtZ`l0lvfXxC_xsNJq z)F@ggN=wroOjT@UKcu_eoI5sxsZpl6>{~EC^$Kv)SMzcSF#W@_D=|HZPH1kd0j^1n{P7mGJ zDo3$G5&Xmadu;zXbLaWknE`O?ufn1|QYQuw_gDnPAP^SwepfZpO}V|gx9lBY8Q134 zj73_x51S*0-bGaEt=hhR%fd9qKQlrTIwe1pYQTaIF89VFh5m*6LGf{E!-jd=-zPy1 zqrRf#_yf~=Ei2&?ch<~}^lH$F%;ABvBrFs-GT{1ev^_EB$py}#HJ^Smqe14(uqf^TW z;cZY_TSlI9a2*w~VIi6VS{Cx>%5XELdSxUU?2M?fFR${05fC4RNBfy@7mW0qygT{S z1>?-ujLPSuq`mC=vye;)IU76b8_U&FWnzx;c2$De$=3%*j|5L!EW$sFm>-$_yNbY@ z{|P&Bi0u&k?riwJimVA|qXE7(o;`XD)BR{+R6}%aYbJK=&kmsQ>A`2#^x!ha@m&cw zvNwfGmhcjHm|7JL!6?o~2w?>^0oZW{asqUY1p#~Pi^BM`_{v{mr92>HHB_8=Yb6vL zC0lbBDGx5`>LTot^7WDMB?U7FH@2P<`L0U31`c5_m)*p)V~dzxF4-=|Zrfg~Hc?wB zT3>pwo1hzH&t}4`g1uzsM_$J=#WA}4f<$zXVezt}cSskm|_uiMJ)m>t{|HQ?8YQKvrMCfABzJZtm`H(7(&VA>sX42UBT^fPu&# z1HxN@Rw{^mS3&iuuI5&>WVY){=Ejx|MUaX!6GHL8YC#`0V-?_Z#4{D6bsp{>D*UnQ zEQOydJJrHiT12;*7h7*`Sj6MbC6Oa-nKW`Z+K6+@`4rPMo2xdd?H!E6h0`^GyOVqn zJPoA}C3aYXK0ioleY7~l`@TGD*#d?ea;awW6Vl7WSnD3WV2#e;N#^DKplf3pVs4Bg?2~92H zhtTD6?hT<@sU`L z&ZLH_Z`sD9eo?@}W4vjg+5Hx49(*n7v%#!=9WRGe*b)|q6v+m692>+jJ7Pg3kN;1@cK=udZvxVK0`LOE@> z-AFp`gvS_MIdN_&r=zaDC|B1};_tWE#zMM8n5GqGmt8pE>EcCK&rS2n>wl)|HzV@c zxo;3&Zl>zyQ@a9@KC8pfYTgjbN`*`VGX)2J2Yp0*(DxBVU5RaBu@8{lh#B%|Bn<_f3ZFEXVVfG}YA9TL-p;8rlXpP9!c?eGO$|-b zz=#+So_;)_J(EAbeHBL>3|Hcjb2NhdV(E8xV7VIQnB#*pJFdpPu>l*|pkz~5lpL`Z zmRYr3&P#GPtj8lfYW1PMBPd%#HUMnqskhTIGHW2k-Cb0|7@XRcK#D^kHsY!xWZ#&Y zabZQjhQ($!e_D3uBQG2?Ga4Xiyw4I$ur9wDN~#YFMC4Qg$j?Q8c5*aA{G3zoc+6N_ z%cYA+wfSq*WF{zvFg<`_EC;qO`vP*O-Fznea!6!2H&YAeGWgeh3ReTa?a}jn5n5u3 zMPuhRr0zthYvR?<(BSW@XjL>J))@aHKvtIkE)0HxNP8|q%4a|nFWdh9Ulh8(!pBui zf8+-8>Np}Pw=ESTbua}&l)+PvJfLs?Q7g7X5!8)4KU40|;4hx<2VYTq{1ZA_;qKpw zsh1Tl4<|;5YxG}e({8)-?HR11zr^Z+vVczl_K&L&>Mum>UxbEJ( zpG|ddd(tkgMM`)1K^^b|b=qhjfSd($>3X;h% z;TZQr6^Uz~65q?5Prn6Q8(hLFnVYYZgjel^i=Y(v^QW>Ql&$OI>;H;BIMw6hQ1jq% zEq$iT#0bC?#tdkl3V_btH(Ki1TS^&7`hY31p7FuGKVnsj(SeiEBo zx(e3<6|;^=0mU-q_r4+izBwqWsGL$D@lwO}x|10c_cr!Lc=eZ}RN|~*wT#ihp5Dx|ahVk&H0EQHC1kfoLZ6R=~n>Bj|rrD5Z(HT@NZs@eQo zRn>(uv9C$EAoX-|<4d;=`w*?shnzcWvv!7@fU!HuE}q0(fkbew1; z&MV5SrVrF>57TzExk^#Kpp=IN-GX-EXFv%;CePJx?g7rn80kQjr%0nP1&#hlECkhG z=O^nF6hexxAi`V7h{?c*{1>~A81&XRL^ZKxuVDBAL#wq|(QlF@SiWT7DTS(iM2c9{ z8WTuQXOWu-);lh%EIui+$YdooHX$@fm&g!hY8B;SdakDqgwAk8TOsA>VqqT}u`-dSM^6f4bn#{|!c{~}LL^6G@4N^9snIr2?LX>vBO zaC0;%DNB3T0>O(CFJ@NgQuAjyvrBF-`C;&K|Hyl=4zj0!J&rG_A((xK+v~zBXphxE z4&K0R{F^N~-=dLWYKBD)amOAw$J&s-`j5mqo3j3XiAbT3n_@z}AVxvUzy9TpXxJA(oyZu0vP5oqCCPL|ELhI-!F{yc?Vt*T|PkB#}%uP~Q5=En&~bmAs@vF2FWU zsW!j8zKY~?s;?d8DD}LFHfzn2O)avdennJuphR5%MRAOW@2S{Kq^U8Hny2Z;kdNSJ zrpjA!pK-9T_LVb>c}Dao^k?T=Ze5F5wh+jOL>^!ZD*Y@u{1GfK=)i`X#%;JD@s~WD z3_>ws<%w8NA4?;j((el3lnPa(H@{^y1czb~4j2Sprt&+{Z-UWwsTz@&X(Ix7n{04h zVk8S3P(!C;iq?iD+Kp4Zyd3O=T$63#!f3~-Yb^n1r>gmLZWI!PI?{uT;~>wq{$!?1 zoBOYoC&2o%k@yJoHur*bguoygLXMyn(6l1zFbY-u9tG@O_+*a!BDnX z`k_$eaDov0o*^LpEKot0g6d_VAsPo@*+iiqGP2(!)jItnLP$yLDe1;V zOORIF`7%vN!r6;s1%z)IsCUNH=eS&TqYQP>|DHt8V2Ei#&a|J$y{wUWwg~Z!veo-w z*f_qx$v0^9T-6ifQr8m!If#Yb)Lp>83sK%`@%KS3r3yR9Ay%j(QrXTA+~ZSQqCd`= zjk`=%a=y2LJG*7-^^Yn>9=`_qv2lD=f;xB%&MW`5zn2Y+qhfP*%7w*sr6`PSzLG_4 z=atBR1cdpMp$p@%eP- z=Ohz{pdZ0kvqfd9T)i#yj$px~!gWmHs#5;<@}W&y;aWe% z_Nn8nsm4@eO9kmO6T9RoHSZD|u|dUH0bLxh~14rs_@$ zx+VO9ju*UuYr@3qxYopx!(|S70R7Ia+ZM8)E1QlAmtxv;>uBH^T^Oc;J807)KSq^| zS4T-JNzBJAg<)x8wycQSC&wBR$1NcY#_B8V))Y}A9Mw@iY6%AnhkzPl3w@l+_(ilq zHf0IVC0naJLB&J$lWiEeD#ZbRuNXA@_aUMAD>3A4M4Fx0)bUv~H#Uz(G?e?1UnE)= zl^VdZ`gQYk)VGoy6obNFN1aqnPj1zj!ki(KS|NIMb)wjIb{=^k7%5Y;kA??RGv$aK z@Ao}RMGslb15>sWY(Fa$n(B?6TXg$@w^?p6!UXDeErv9AL@ni5q1u0c$a?USLx zv4vNfQM9uR;=G{`Tu70F_RD4s3uI@RCbesu>b0jsk;?@ReY5PgWFRGtjnw*r%4<8n zv_JlZHN3wHC=zkxTa(H zITFkGa})C5`!*)l4Fg4?nD*EhhkiG0-WuAJ8SsK&mtdI49W45x6;I@N|i(&^v&|eTIIf=IGJu)^W z@%)MJP46lBCP*xtcn$3sDi{F0wHl#r;M zRY3(>P7hf`F*c~t#H;y7%)VWT(?a0M02%2;uxM^pN9!k@8 zKdi6W!tG8mM>S;-!I!2G8+~FD$>H!CP;ZiBR`?3iJ;qriimur#pwADpY%0urbu6m z$a&8*rQbhA^AOcDD6@0`NR9yo(?fmGy+d=}T}JHPGp{rbl&3A#wccWX@uTu^d&?;_ z%r#37IH++68EpvL-UCWY;ZMBoyjgCcug>#vA=ZID9`bcA{un5oe?Rnk6jXfvO4*Vt zhZ*7f$|)_`F_x@AI)kR^s%s-LLggG>oWfWpA8huWm;bBTK(>FZ(1=gYN<}r3jowf2EC&o z^X|0%*-`18@-1Tq&~9|m$Iswn3P7nDMw$~K!icCn@Q!16>$46DD8wy8$ftKvnTXo` z=>%QQT=#P`oL{biy@Z_6dsS&e=u9hM;VE#y%Wv3Qr*7En;5&M0>Qyln!}@u@>*} z9zzV6&;^Jug9tyWu0Sc_&$lkHX@$To!955#FW5Us95<#Fd&$NFLRh(xsx$CX$S64K zX{9cDuGp2)Gd;Uo1^7%zR%49q>>-0&KNLLbEmT6{MFWHfPja)Uix?f2qrlrC z-#e-xK;j`pD9Oq6MLQW&73%O*z3H^yO#w+f8e(ouK_x5Eq+H!ORrA;REkr0V`B+$B zqNhI;=KF}3wHOlriSzhTWEZoUtlOMAm2j#+{cl04mqKGn3f3J5Z;6>j~!r6%|8+rM93JlgeAuBwSxv_!_1{8?_pnV|hdm!Ei3>pK|&+ z5msBZt3plSJyW3RVAxs~FC3MdYYe0EK|hyh9^M&1nT2aX(j1k(Fom!WINt(tja1WZ zP>?)&b@w@$7CL+D^aybD^D*DQQ5`*gEY(bcJT#hG+*ENq{TSE8X)fKC*SFTZ#!}4Y z_|Q#^XIR1%VqtC!HIfKNt#=`S%RY1&?bHRJP`pt|GI3VT>J-vhmeH|9>v}n}=c3t_ zbaEf~^Ba}&vE@dx#}490=N~VjQpi)g?41pzh{smyC?kI;OUaB4RX{8Aa%KJ=6}GA? zHolCbN={Q^Z$D4McX?$4^DI3-mR_#1S<|yE`uTL`*N3i3mfa*I&E8jySURSXF`i1% zbO1AFvWKU(dVgX7rt;+!g?9trDIXMKPbUb2IHx0ZK8;f7xmKr##_)pB&3`^TEy^r7`A40g|E3dQL4-5A$QdJc-iN;=8da+U2|*U5n@A7tgR{qqn9zgHU+Fur z=YI~^it7$MUzx{X1#nTB&Azeyz)9~aDzwwtK+KeR`Fj8C6oK_8+IT6<3`UEc8`&6a z7L3gNx@Gz2L;ch@0vL4nDw?K4vL4_)5PhMY7F3&9!SafHxowtDQhu@QAc>#Sl%5n&E}7Dnb^6m3qoLr&Bvzff8Eh{;&o^Wj!H01uL4&78Dk-?*Y@{O zr-jMR&Rb6NQ^Rfa@(5D8Txk?`MhVMrg$zo<9js!K3qxO+%m*gI}Aw5Pc~FVm_Chhquq9#AV;q4>dxza^Wy~lE?FLAot z4#)w?xZ6Tu!>={9-ZIQ!bUONQJ2s2L)=uh6)f*BfphponL-j|^hpD$>h`GON(*+?gFlB@3c&ol{vc`oY!5;Zs6ozF7$1Lo6)~mmpZw z5Pfo3cAO>Oe>`4j=M}$RND%fJi(eK)L=@}et`Pv>up5!Y z@YIXqGbshN^nV_N*cUVX0*m!xkdJ5buP+VQ%tC08Kp)EYc>dBz*nY)MYsgO;Vcq)* z@9$RG$?WfP)y?7IRQn0&Lr7a^TyR4_Q607Vay6Gc`o%@~D8do&Exa8Y;!aNh{Mekk zOV&)o#u0elMq(w)6PSB-*~fa?|mi-z5T(|WybJhu<}`BZnxB= zgifpj`X>;2>4#aHFIU4y+%?uery<@XEEZ?p{ITHoPpZje^6v3{nNo#%EH|SWwjGkBZhCQn)EwH({c4r*50#U)l{31&|lOs^*hLOb)=Rx z(!D4<26O3~x?56HAEYq5%hEcGnIeulA2Hjo*8jX{v zXbuvWy=X<%31UNzUH@-3i6;>rpPEwqUkGbhfUa-4LQEwB>kqYtXOwhgQl!Uu z>8Z)ptRezHy6hgA4S8@xITUuHJWC?-9iQf~Q+LDU&F$D@#`zoty-tO;-Kx)SQM8}x z>@K`ui<=R>;L7E>=`MaiyEa=CkOc}g|A}&`Z3%Rgg7Fz@Scrvq+oOSPccrw;KlS*z znjYdqlf%P!Q&4gE7Kp@bH!39>PuzK4}yf64jO_uo8E7V`hRH1&TCSup-wtjcfeFmg}f z_TE?5)2dR(k9W6wiluZTi1)78SP|U<9iY`F>BDC94Kc}YwjEV*=(4H{aZs!flb^RU3*n0D=vuhU; zt~PIhThdVEIMvxJY_y>@MU5Q=6}41h$u_d8w~U>d;SgKl{=Zke%Pcg|J=!moGf^Nb zi^mJeLxjElYeOJdgTJOGPOa)910nxSjrmo7-uLEwATG(qxP&t==YQafwwS|k3OD%r zkC9CEJ-S9wyIdd;>yk5FKaH2Y4|mz3JHI&c!YKnThHc=T43n&7t;$FfK%X;!p3#eJ zFh<*bCw`Rq!HcL*LkaqBg$}8WDAK7!s!m|j^#gA2&byijkGMN^zIh*G*A3&3Lfu|Xx}VX<+WdjH+rBQW%oR`Ktn$0$By zZn-cO!rkg!GI`=}O-{B#q@irrag9!V!Zf7qr1~py`~#+5P@|?Kg=4-r&PT;g;-D8R z^esAjx{=3G*@VE#rT>tij6KrQ%cIctob{4R1!H4|#s3P>vskvgV*Ep8X1za_6y^F~ zEtxrjtgwLe13H8d4haptpUlbisG^to!-_ojpUgS*2+n0wcuMNH&cyCAc_$S!6e4G+x`;fMg|R?I?Ac&~o`m&PgQ5-toYOte%}#-=Sn=W> zSP5H{;=jy(XDjYndp-O)7S}KpRq-3L{MhIbqtl!zC9~KWq;hWwU1AWF=k!gPXY+cW z3+fPp)2pSfj0TlkUu`Eh%bs(Ay?`0i)ab=1E5@%<$TR^0#;JiYFq6g7W(9_#%vE}N zscwT|StZheekf-)g~$tJGBC0i+@1~}|F9Q5A92VxPT80G7IgDN_Jo$QZOrt`-XHQg=n{AuLn1lX! zK&^_01vRjkn^nlNIOEfaW&^tk1%#UTc{`Kv1lZdSLCd;z+92~v=51SyW|P3NET7?juPrO}emg1y3IIaVU2GMs>Tz@O4-Y1X6Vu z2hTaZdh;GEPAa+ny%o*oFHmtGG3|XTadU*LO8@U91`qaDY{`}RhY?{cCfzvRkk01i!+W$KlE5TsZUxT7c z<7B7Ujh_vl;g$6ey`ufO2_Bk{fTwD)J@K-3MEOlkeeU)&OIljzjln!ggdcjh=Lg=@ zpojpsl|@a{*_CSqxwvttixGh4|G9JGlTU@N818UcKdAce-V-r$huQ62$Atx_q`z7? z8-0BH6Gp@|=Evw3+JD;{dg>5yqUt=zJTD5|;kqjHWTVp1IQg8e>x~8xcsY`KQ!hrbapb3~bv~YF-x&OF@VJ@ooRoj}sVHoBt8~^%tB@Q-giNUU{y*59#zFi$q zmpicPqRY?O{qu&|DH^?4u}sRnJ{dADNAg-^8(3`N9!zP2BEdq05JtfJYC9^*!(mZwx>JtCYn)*)_>POTYzYS%r)1 zyaeIXKUv2+f5Q#I%gQk~FzAlnyL!1-9#qvn@uYIG&^m&H{T5zrc_tZRUAn~3irGky=+{Yr2%q! zp5|S}ox!%Qm107;!FVaaQ|H=61v~&*g^Bc|nmgNK@P6cn^x*^#3}d)Ojp*cc200vE+}0e@Kh{xbDK$mEz%MZNrp<8@91K`e3Kai;PhYoO)=;Zb*l zBdv9F7ode4FySw``Wov5Jb$+*Ge~GGOCy2}gmlGv^-9O{iygEn`-QT)=B@M39q}T# zCG;#FOfAzQ+g}%p-seu^wH;_?-})w?-WB#*Kb@diZtZ@!7q^obvch21@?!?A*F-M2 zV4F&A#QFocn={usG}O>60q{L_P>uNgQh?GR%dE#5?P;Gxs~2EQ;Fc^xO3Xy%5kvug zRerj~<9H{mIa1chO{fujBy>b$#@}*|6uKg?-|*&zcw(cm&UZ^(YA_|zJ)p`J?~@zY z;VAM|<1;V4f|=iDx2-H%@NL$Cc}$rim>N=(G0D37P9KW(_yqpyt(F&*vEaFk+sp`6 z60E_?eEYiq;ga}$wZG5`Gio%l2D}f%trWgT_;k``|0Zh6i5QKo%jhKm95JO4h;t$; zt=MC5UqZP`FI%2K)h4W%S)tL3{YEqfJd}nO5nRtt)trU0wy!t`hU7##7gpEkFMPfW zKnfx({v2E=u$&MG`WTF=bERa<4Bx^HjI~yT6YGHojHx4Cx#YNNlPKnLfs0OVzg^Za zAC|jy^S{l~$;T1w-`$LqTh@9;G@O_nuj(QiF|9h4ydq|G>G5)0HcN*W3FBW5=7*=* zx#E%bm-}4z-%JgssfVK9|DmlG{q^i8LX_#`wJMdy5VYIpy4uWa#5F*L&>DZlr7PF3 zvlDA&IVmgkvh`puA;Et+p=vPv1?&px?KrLag7Q7gPAh<7&LOU9q3ZsVEsUnfv8xKF zI(gpqCW~cOa2huQDzHvq++rs|VsE6jz`kZ0KW)ho$5?7I(AbimL_HmODThg>F!0qK zJLnXFdUj|;8Fwn^wxpe?8Qya4nvY%s*C^Hkz#g!SM|YOHsn`KIV!4WQN;|H4T4yF1 z&=Z~Y(#%pzlgU0Q=c&5ns)!*`u#U0}_jP9(i6xdR+uB-TY3b&k+MIP)%xHxD#*SK@ zg2b>yHrh+Nb&|O~6{O$N8PAyrEp$0w;~G&79>y7*4V&^^hW-chQdTc#OWF~8N;Fg$ zHx91d&`Eq<`931&jXyviVXkpJF5L^@jk<(OJ+4%xtcJ=XYHq)1h*jb3&a(E0_ zQ2^791sYE_Znb@>_Arn)nLUoJLeF=B$%Cj&0T66q*Cn4HdrmAS%H@>SH}>s)KXAT9 z-mY3)jrD5QOTxA)xVHxE^PkeoW)@}o9_&tYL=JrO1Ksk#ey!RXitXmdcnRq$=Jo8x zNr{P}b)F;dJLiuR4T5qM*`LgiW0F=Zses>q6bAfQ8-}HhG&J$_0yVLSv&oW%I?yXW zm(_k)0~cW^w8e3YrVTVx_HAiMrcRfXp-!F|AF|w9D#!tnVZMNwpG;guf3e$!_%%h` z0sXhtbm#B$RL%9=`oLNqv3|_wH>}FXfpM>`vo}O`X5ed6%qoKrczdZoxHPKiRt%&- z!91~i^lZ}!geqKcI-^JPzgIK351QE#5J=tzeK8%|D?DT*fp#WweR9R1(VKnxwU2Xq zr=DZVZoA7X#Tl~13h2s_c?9~;G*;@OeBf(PP%D)aIYl$hY%Tqw6%ZII_xN4Dmk)3H z{{W6aalZ?Mj+J=EY`1O2eZXiLE zD8~=b8fUJ`sxaS{kjaT40hWenD}W(&m)Q&Komtv!e$9HP;yhvgx{>M9vK8E49cB4{j@Uk(AGo+l(ro78>xbD0fx)D6i<$J}ul^qZKZ`xI934n2 zw}MF|9`8qaz4mr#ronQqq6+_;^x(wQfx05AJqT8sYWk}aKB|gA5{C-<%Kqy$e4nbh zbhZBbbj4UxrkrcVbwk>v?iK~Af!zlwNRIC*2{qXhG*Jd8I=ASsOh6@X&fsQzfm>m8 zpzBwjV3j)@Vs^&hK#is=DvDf`bjdN*W3$X`y`gr|z?u7_SPFbI7FZP2{{^s$$>v{L zIJZ1jGD7ntd~}g@&e=;|iIa0s(dP@?i1roPr|$f$@X3?cj-0afU`93)AlzT@#LuYI zqncxR2$?aary*r~H*kh<2_Ow~@8+4L)s3~qkc(T(MJv2zr6ZtOa(`D5TH~~$Sr{mI zCS=>5ZL&(dFL7-;Z(wk&xmk~L;^0+p-Ty+GZlB3#3P}hj%JIQnUGuGitM`FE*r8^$ zA^F-kBG=$3@W_Msr;KzX@QW;iMXA@TqdaA{I&5EJ8f+KpbyIy zW=KaLu)3)w)4Dko{(_=)28E(fJJEOk&R8`dluic*xjrWH71`|9I7Dh^!U=XiG~ z;^>iB)7BF^a0|M7%Wj(&gs@`$?s?>LKR5XRFfe3eMi2l100001L7I0x6AB_&L;p9W z7>kY_;LAQH&+RS2jM!Wt8d7y{;nIH$+^Yz)`v!mxOyhx7(@ZCM2 zl(o@yKUmkS4$*yQ7YT2RF^N)aCKa^(aQ%&b8xCx=GC>csD<^yOzTk=%Bc;6%|FN*n zC@oJOLapwtmiWa;EH8AsT^_R7G#stf2Uf8hDKm!-r8~~(n9rXx z5o~n~Wg0F&ZntK`HKs5~70Ph@I=Q}lNzVF;riLHUzm>& zK!?Z&&Ued!ijBT-FwLPta>iJ-K@1>4i+fOSvFuTWMUMM@ z+%#-(dg>S*A@z4>QSx*GKeXWCrio4RX^kHH9b!R4Zal4I;2*Q%)eraqQjDZJx7~re zukkG+$^iUk=4_|V6s|8Ja%iS1C6>)gpbxpSJEin;St+E9HsGPF!v$kqIjxp-TqXXU zf%f9%otMP(tJ2L*=-68!k}2<7ybUx#zmpJ_df}os2P{IGL(I9?@E}+;xtr8^J@o&dIg4g=#%%tOpU% zPrh)q`iHkSm;@5fnH99H^+RqXd?4`F{L;eiiO_%HJI>p*fckT?z6Oq9kE>TqihScA zT1Q^TAk#<{p!sL1K!M4GbygtOEDUQ4ku^@mRtd5T*H#f5BGfl*i5KDRAKpf z5k&I3j|y*kkFD&UZy1d`s`NFxU2N$}d46aK#>MNDow0mj5#ETx!n`7+479&6$7!ee zotGVCR20$Ips5~v6w-hOF(?_)s1V;Zua?~jK2ysW1_l1Yq6TP zPk>|)D$QQ>ZC@xwRi;$`;*oG)F(uCF@4=aaN0Cd76BCLQJi#pWAFa|@q)VGUChPmU z`%+W1EMN%?w+MCE^nx&(L)pa=e4`$b4&PE9#&oO^wi*w|yS{#WcP=1UwJ0yysIXPlu*2mF|(ZwVgUE z@ZS$dtyeI9B||6C9KaF~8q$^d%#Z4y!Rmxb;Eh>ry3=vA>TTAe%?snq$Yb?4Dq=RE zjVke16P4)Z`7ZxGb-UX7`PMP#f?%VR_Gm+iX>`0hyd;gJZa4*VooHa1WiL%UiwgP1 z?KC;<5_1o=6=}}pF!`=+j5oaR7D;xu1;SsS>UcsfSaE*l%3HrQJRSMOQzQf;ih*}d zYX=}l+dYUn?o&2}G*d2dr{EPwrx-gyzYre$1Zo`kCq*OyxbYI_P)A<1dm!xlm(b_D z8fTLaD4|}et2$n@7-;pmaIg8L-*DPv9W0{*^a1^4v8)UnAYwbV%}MPxb|BTViGOrk zYyY~HUWC~y494MvFeO9%bCeuK-vSrR3JmaGV&oLq1%ta$#N6_|{Fj)0JkEYlSk}O|(4+qTslni2JRhVwHlpW^e=uJV zISA-|WOSpvzn+c-Rn&aOfD{$a_;~J?8HTGi*nLHZ>eSc4rqIA(fVE(dPJ_QQ?9{Em zSGl!#pN}uLr&ikeRy#n5M^#oWkDog6+}FLfPce>hH?*B4Ke{qW^`*KW zdH2ZHrRo`z!aZ2KP+>Hq{2t#&c;R7N_`m@8z{jiFjwW@a|BjYvlM5BfH9I%`ql#6T zmY&7dI&L4BG~DaT73B6-Hy>#l)Xa#Ds%jA6(wtO`6<4*5*H0sF!hN_v8d1>@v`>DB ztpO%E+ve-B`Fbn{TUm z7CLtBV>gs6^VH#-dqhL8!29vP)0g4VLeW832(~<)9qS0n&TsU|wf{Fvu)Gym>DRZM zru}L(0DLmxu!Xkull$vAL~gt8TEjCZLkRRne|maa{{qUK%^f6(s`5R2Y|qlG(yd}1 z%bzCP{B57H7(eI}pB;4wa$S0W7b<{+MvoewBfUnadcnUOdYqtn72Casp z3m7n2D1tUzq!^QE(KrP%TxKo;qOX4G7*A$ojcCBISg zT5%UMUJbO;c6dzYBzw@FA@nl}V9lW#6t=inl=u)I*ZfCbJ-j)k0&}v7a z<=wm~5F8a#SI7m6vm66q42tJcF!mQtNIk6ScM!+^%>^8ZDkx~iLCo-aP1rmBL}~AH zAd}XmU#gd^ZKjE^Q4&&Y(Xb)K$L|FQL#N&@JTBt=Z?SlS^(rh5h`4l_dW*YUtyV1}c_YbM-I0V2b0^+nVQMH^1z zh+?wj-BtN21xv+0Z0b6zic=(DE=})QFibvH0%a`FlJ``~;T(X7_MSxpb?MJK1PnTeCx`oEcK#)u;G3_V~h2 zw-HoT!Aqx!>TCcpDOhYFN|#g@`&U|{?-(@X0$2@<-fmrG@7^l94@&9m<0u||iQ$4Nx^cKPZaMp-;K`isC3zBicf zUr~@LR*Q4PG5nPYt|jXPepsl~y1cG!{F+)ji!cME%b@Avz`;x-K~Vb`^q09OpX)!( zjQ^-sq5&UjWE*ie7+_T2_pG6~V0Zb3s$ZCDRxwty7;*=mv|fW&5Z8zBMy)))$K(-# zdUMizYvCzV+kt@Hp@y^`7C;7h#S_enfGJIVP!16#h$54}NcMbA1pt?`MGFT>_&s*p%B&fwnp zF($p)qlc$N=9rz<0ny0F&A8pN(+GJ9qlDQyrVqOzSgUPf)n@jP2!^Pcez*eLc~4Aw zWAOl15hS)|z*dx#4tn@9cXN=(L=TI)5WZ-#%IZuT=bCRld9K*oA4Ug*R_S_gDBmHs z6=qX{_`cym0H+@9P1~P~j>#q)Jk*nh9eu8A8r4}iG9Bzx66#@dy4{G%Y6u-N-)dq2 z3+4=a&cXg5k#dR#1ofK@x;Cmdzfd@_-GUv4np}|qftu%%=R_EX+!eZN5aD`oPpjqp z@h8iZk#iNEIm7?cU)u!t88y3k^<0S#K)hX1l7$}I&N?fIN1+=G1SO&@Vw!Q&w%E^0 zoBFNoC+s_~ROD1Zb7p#|Ecx`yfA%TlhEfArCBC@1=I4RJ{Bu*hbS-jw-*vj-vR`X| zKk@`@+re`Zze1O&p|j4t91qQpm}tAegY4YG8Kz=q(B0Q*ElL4O>#4bP?(PZ^7V9!n zdYF*a2x4W-PY6q+%O#;LCD*((i{BSSGqKtunssZ5s4J{8r1u$N;{zENBE@QEUQY9I zR?yZgNP92>OK1OA5*22;7rwb|;fKH4eoOklkkKdfi-{G4yCSaVI`8802(pqz=+BaR z=_Mw8tX)W>Cx+auBj|($jJNb0(y@W+&s>8T*tvd+@#j2<+%&RIF76#uZnd9Nm%8T= zSd1=P?h-hZVM{U&_OG8J4*kGFQe{N@8DoaeSb%M3F0KP2G9j11!DH++&Y&XPV`0K@ zp4;ZDh(Hz|&V2S(#FE+gdHV1}a#o5+6Ix8?uK8>wGjrxuw#C!?$cT>_WbZi$mxL;7 z4=ka-ehm-|k9-+hVXF=`TZ>dAtOK&R#*3m0wuf`fxh1|T7)Pw3^1j0eD7V>Y{jco| z?mobI@;XFKXCy&v);x6RhNV>Z^l&xn4@c_aR4jDTj2F2qee;2~*wOsW(BXF-cNorT zVbd)q2IEa)gFcEYGIJ~WROh?}Q9Q|?2&Qo^m zH@lRi6G)w|%_h>FYZ#i5pZgT%x;v;oT|~Sd3VmG?M4@HC$w~Tpf%wqHqiNz%@`l-# zyC{hpy1o(cQM%oiRugQ`$o0^bT{G7oh(ka>305hgl3%!uXhipNEAaQdek&O~WAsc+ zk>iF16|5IDf8-_o61Ov6*6N7w0PEl+*|vZ7p$wUvnT2e37+u@FKcFx$WMfSO00000 z009A>mP9rGg#7bZ8QLFLhIZI-2bUx)(C&PWv{w{W#-{%AGVHx*^HNGY>zWGAT!z=> zYapjoft|Ut5;Z_-g7f%aS$!=sqfE($qzw>ud8pW%J+|~3o>O9@5T_ekjk5*Er(>Vs z69pm}&XFS84=1b4pdQrAC-_e%By!>?nY>77HkS!5J1e5^)a~#kyGW_YkTM+`V|W@7 zdX%y_Fq}OMXi;WAV0^4_4VN~oP;Mv>d|=bwU}jlP$#l0 zYOdwT3nm?E(*;|@3!d}hu&YQ&y5ffEFG8bJq^-n z{0%ZLYpJm9yGr>`;Lu$u7@YYs^L8C~J_n}~`j6)#zE$*Mn8kTx8;L;{Gxk5*ndPyW zpaOZFVfav54)vED@cNsjLu7^3D7?p z{~Kzopq|!cx}=syl%y??(I%)UjekAKb6U<-+ju=M5sf}gE1E~JdC|u%Gq0#`|_oizH;Y4lMfnO zu4P268vzj{%G$KIJdSh%mPa3+Q5JS++7n8#ofj?ATOvO^s&=U*p*vx5AvsGJX~Pzm z`}d{$_g2Kt)pyXQop+q{3Jc>j@?~wZtjE&<7~-?G^(qNyEO|W&WAe?BUq&e=Hrz7W z%u5`jsX5cfhX?lopm#}V4O2zT#}aBN(FF01YY(erm-p}3h6QAA_RD61D6lYMNq%u9 zOJIe9v5n?BQa3i9wVPnU%;5rK&@C%mos&q=P|cReJ+cAcagLDDlEN$Nxuv*+5QLwS zxJdGB+_j~RxO%|=Ffe3eb`Jmm00001L7JHuQ8C}QD6mibv)Esm8TEh!ZR~U$s1m(& zWxIiohVqCfw|ki0(L2rLeq9kGXMQx39TrKMEvF5<)k}e-Dxb8UR!u3~5`~aG&I#wH zgOV={nf=%TbBhtd!;ine*T1Sd@(?F1RGtrJ<9NqUX|4AxJZnYcBJTQMx1e)2${h-0 zsZ-nXdyk_Q_!~|)LYA0MZV&%FXjr=Q41qqhbSg9#FUl;n_E|&Oeyor}=w#aM0hKvK zDg%%~^mEVQtR?4%t%Z2ISs6{AZ6!cjmKN>1n_N}weR;joY2p=w?fQvE-j1g3FGi7A z4_`4yQ_-A5SVuae1*o9yM*jB2=-TE-c`%h4ZT&SFCih>Gz$tTuIHzg1@LT3d!AWP|HubuLG^Dim3 z^dLN%@QkF%P0`yAG`7$_Q?diHB8Hq|Jcv1M$H!0ReiuTwKq}6?!dC`iJ#W2VwAHp* zOrOw+ZXyn~bM-vc&BxFXvGKEQ>?3TRYWwqnL4}HO9*RN;Z-B*v@eP1F5T%9S_;0Er zR*IS*wVYomHk?g1<2%Wl&^MArl)W`I-I(}S{wNj#)>(a?Tl&NNP@Mb_F}ok=|1=&b zkuB4_V%)*wMh%!U1Z{z+{6U~+yNtt0)$=;kxu}&F%Hj*H3OT5;;MMID&2&6%HqlWM<>vk2JIQv3!#46St z!P(nq|3EUb^ze8*{VR4;y)x%q`d=fHy9xgsP7G@=3qVD(OFd)LTV$+nSWFW)^5%s` zUp$gq>bZhHs&LP`JJMg)zfl7II(xfhE0Th}Dn+v&uRH~Rn$a&z#=Te%kMqVmvXSgY zxflROO9y#fn_KPml=ny1l~(`NT$B$0$m3+H?9RT6DhOy%vA!yD?xH(esJ-C@OW1|X zNH&%{N)N;XARg2f7>sU+0ni#ResV%vfzIz7!RiyTWzaQ4weL;hFG3#0du+qlA@fg> zWS+TPJE0M0jt+_zJQ~`CIfl|F>N|1!sHuBa0Mj3*agEPxfsGGfI19+ssFCqrp|`0$ zqzfJ#&pM4R1Zksf==D)d(Q$CGU!i}1D6A1ZI2qs2IndBWek1$t5(qM9>U8|Fdv#u- zp6%!(X=u|znz8^~!}HgDIe5vd2nF*O&2?}S`c}ysQ70#BdEh(>JrPJN*4z_dPFCxV zPI7*$J)bBVj`zpY3aiL11j^ zfJ!?ogKyiX;*nQqB5O|#+LjcFrDW)}=hTbXRsxcsgaSZu zSlcsSQNm)0NJy32-o_H8s+M5SuvHfC^OU^jq8OgZ@QQ)T>@yEi^#Bzx4uwR*24kyd zKGmCMsz6~4u<9C{^bg#n0S*!VV?C~~wafOI7xH;z1Cx?>rIrT2v=(w9-}vACPBmdS za)pD{%OShUh=S*fY8BKSd+6f65D}H3*k<$zH`aSsD#|&Au8hT7c1=PuM$3Q)Ct$#o z{Z+((`*8z1jt_5DLhLiEheyhyDW&Zwpj$6`YSWgsRrDU8Ggd^}nQ#o)QJcz%)~Q0# z05sttYtP20C2aY0N(MF}ngdA$U(<(gn14hSZo|nM%DU)Vj4lQNu%m;PTd6oR?K1Li z%~^}Yhze+6OM2E9;jI_H{ylx(=j|ks9jVy-46p^#Q`g0um7s4HEC{_JyrCg$hw0~4 zl*>*_ke4Te8OTxVbuKVDx%;gnG_*9S(r_$%M)GgllMlNUVH%OP6wD+;sF09SlSYkO z-m=PJ*KZ>Z04nym3WAB!XvN3^YOk~_cSjgNTr{Cfu-(6BczSmQe0|Xl0c2<)Wr2a3 zHnZhZB_uL7$HH6z5zR*_)Gc)=N3L9DFA+aPn9tz6u0EXPf1fog+YM>qG)dtnQS=o6 zp4-#4+|jY1k~iX`wzw0YTh;xt-{U`W6S6^*p10?GQkTA{$&;ZX2?JlnBzNrgV#rVBGWOocT5lgIHC zyEIh-lj*JRIth=A%OiuU7d-J*k028S;Cb6hx*8${ETVI4Hx1*_{mk)`yQ<0gq@}u+ zpKP?iwn(t4(M>fG+9y2^KWV^s5uSDwJRUT68?F?AJ#?jy$z((wul*F=zL0^p>NXJU_9(k(QnI7%Oi27G?*({-=1=@Dk;8pluKzbwFKe7$00Trw{wyIHrU`>cx_^VOYD6XV zPu__m*h|`pSXB*m#bq2>!OT+7WX}0|#KxvCL*B07TDnFS5PZ7<0{+wGulH?3IcX#z zy3rBarS@L0zYN?8te3O%@#-(O?bvVemN}Z63)rCJf}r881J;C7y;bGa-;DD>Ru+Va*yRFQKsRx7>{A39egxh_eF@@`?Cwlt=6710K9g^$@Y z6ZLr>-QG4t$B~qy^WrLL`qlrZC#G8r&yH|Ss-yypmH0Ejo{VX)_JU(n)W9dSg^uOcT z03K6;sbZzVVq@Le9s(v{U1O0R5%_;9E*0S4CaAk}8v$gk8WySh{{9|ftm62q?uqDW zpH8<#MGqxlf2h15-(9}teye$Ez7uZHDg0hi>rkuF*_IaU?hQ_TKGXds{_z8f-6P4k za*`7sRZc`TdR{^|FBRGjI(&H#NjazpX*<`e)Wl%8Tm+z&V0kZU9Q~jma>1(w?1~jJDfvi zJMo@cB!4?|!RNEdZV;nW8CZtW&FiuD(Nb?lP;(fbFWN}1Ay<-gt_JOeG}6ljZYDsp z2_ydSyMQm9LI||=#<$h3M`40>G#MgZJZI?T82RN_3O#$uVGjasvnkaj7Idsz0YNdw z?+{B143en1SzUwchM$OeKUj(LIf$DJh>sK0K};YXy-Rm<7b-?ha=4nKL$uJe^;qAn z+EZb%V9J)Y8>b-<=R;&O=M9A|21eXF|1dN6DrQ=N4lp7iG-J8I&NF;hX9*H}cP46p z0QW{;>nHvhQWnB4-NGdzPyfDc1qkKSG1_J^^MKTxbT>>ERMZ6^$LZU6Magu(^`(E{ zh9;Pp`6P?YH(;;^J{2XcU%@iy7~3cbdAP^ea?0^_&YvAxni;17I5PD$^BjENQg4Xg zoi|ClZsLiQO@R{+kI~W~QR;ElcPmHuo{Q2`8omej;v|A}l5rp}y+-y*~yPmu2sW{r_Y}iV)R+RDP}*vE2pg zPGt)RP@`9a#O=-vZvfO1+i!)d_@3`vc6Wn9VFDJXpd8fs$_dF39E}2R56qN`i&;O)omxRldJ@M)!E9_wMkv9K?f^Jk3!b;> z4b~>K$8d9*(HppoP}O1VjiA`j4p7D95UDUQWMgsz00000009A>xI{7k{^EV6+qH~C zG3S5(mdQi@d`QeY?U`OWe-y-eP`m*fWtAOmfL^=_tN>NxfDT2oT6X^@dPKx zhW|_iN$-8)VlnIbBOF)@)LG*?F71a6fgwGEK=YkA2q}8wD+c& z=NtQ9uQ2vrxrZTI?9yg$K1#*2of2+^gswBYcD|dQwUSzBfoa#vU1?=Ivkt?9K^DSY zkRSn6Fq|(MgYQZLKaakg|M9#E<>h^0W@pXu6Jb}s8kpET8$32(jQhZ;uuiDwlndT%I&zC=}-&DU5I(^VXp=moTv`ZGXO`T9vJShZ~d#*jJflCwi`2+_(BuC!W z1jY&^r>Lx^04IC09Zv^{;Xkw7XxyR2hmXE!@U-0ClDIkRT;ButDW%kqJ6>qriNZ&R zKX#F(0?xaB(CI`OpR@!M(!~wN9w^8AR40)m*Hby2ok6i!C-$Ps^vCRE;@N#;&w7W| z=lZi!b5~fBE?qF*Co`lK^U#NKU>;C@+jKl8YoY(Y)QE-uW~o)pPcpQ^CHZM z>x0r?SzIR>`BECN%tYMwPM18oFTB;IWG#pMx+|aCFfe3eO$`7500001L7Kf7ArF78 zy6LN1(6ULZ*}P)6PVz0V*ENfNutw`Wh2Q2ENb`Ua8>ANLLv1WAJ5u(8<1Sw?xlsX6 z#jB#229As%z2$w;sZo~O$3U3VdYCbq+h(V&r@q3;uq zTW&>0mI+0p8a#VmXI=CMrxg

    9v}*w@l5Uev{~P{s>^1BMhw7k!6UM{p7Vl7IXY z93^mmT#lYtiD>T)Id0v{L6vw@??%ZIktKPMPuB9QL0;x$2ZDGN9}`3p?sYu1wKkj6 zDIUXmJQZTZh3>x7)6oGl1#<%c>SgQ{6-sY zZ~n+vh+2!cbBdvftkCHhw5@#wn z+8E0WVHr5@Drf7dYXHU^F4#9TOn_DG0QE!TnbYyB z%;S$!_LC7*=}3UEXkZdLQN2yP?3iNs!ryvB&+>LebOVLzHJUCm@-Te?nEZF8p+3um z9-q6%-#rEQcQ=x{5^2e-%I(TJG3)FW252~_y-RybtPJp4Elh_&*p@?Y9%c*IA4z7~ zJ8aP7RYx2uj->CEr7F+*D|=#60qFkqA%khF{fGD;O_hjI(K{0t1416z0M^?9>?13i ztI#z)?_u`<_`bpb0TAt_JV(u0L$UrJ4*;{1xB~!yr6TSsqG4l8kMle(kU`||+>~^l zvOamd_rUd_1&$WnmF#JA5CcV974~A$_mpK}Anv1W^Ny1!dMzrMpN#E^JeDLEw@oa}<(R#Ur6he?Kz+DbxS*75-D9O9 zXm$(a0-);NSJJ_nrcgrF$wiq+IkvwrNIIHIJQapiw5F4#2;A0v&2J$sf=aB`(HDfS zAy#O(-+sw?5Jc7RO_!Ydt7Ap*Kg^(0+-#u~x%+39Gw7#Hfw{y>HMvtPv}w7}()CQQQT{@0wDG|kBQYTyn5hYLS z7mJ8T$h>xbcTfcb98Pn}G$O~5+uPjD_?CuQ;onvr@!w5lmkyP|!;MkkV6g2A3qed= z{|nKMtH5W^qQN+Zr{>%?xw}ZFUslmt*4wPP_sJQ4t~c4H&Ciqf4;gqU<5to$7?t_| zR${fWv_`deu#>^6O=8#^>9s11L?vS5l54cjSsN7aq-x2a(|7*H!Ra<5%_EN+G0y9z z?^4N6Y(Od}mD&ZW8@J!&4l1B=`?!okHL~ImoF1YwtyoCO7}|`gUnN`zFT~sy>B!k% z86qN(?X|#s(DeeO^QNb%)K>jyIQ4OXaY71c%$-0+f#c#zm=aJMmfeF&XT5S84@fN8;Ru|eI)`}~ zFvAE{60dNs!I4-VewItm{*;5D;0#p(HMk*?+_dA&2m?jFJHnwH5$N5x@^?^YN~Z>2 z=r75y{O&9)T~#r2y8!p~;K;;R)BzrmUfwcQ{)n5Od)0wZv~`%>Wo*Eq-yXunuG-z0 z=Hizb6W@bbwoZD7!CKyw6@}hH!X7Lzl#?6=gfg#j`FgO;2>=bW*?buq8c)9!WaVPs z6V3&~;9oeN+bNI`(EK=CzTxA7pK?ABSb?$12%%kWp*0Y!q;Z zz`9y7qeNw%P|8&>I4~_>c9O$PbH`^AF~Z{~zPt>+gqE!O&`4N?pvsL&5IANg`-^?I>a{>~TfPIF3a&}kY%378<&wY=XVWK_E zB{3DHT52M0OM)zH!*w+8vkHyUVq;ldhKVorJpXLI+Rwcm3g12S0OJP3u2^Gil=F$k z6)4Cka_Rk{D9;T#`-}>>r5@n z6v6w>AG!LCM}dR@$VJR#+!~#;KLFW_^jwIc`o3>jApP*p%i#6Zfs)R!-|Ernh@nq3dLAh%^(|Aj$Hu zsA(h^r1jnK=$(QD*TUheYK`=wFYU5!wAc{XrP;ouZ7PG5;Q3ql;bS5tEmLhOKQ+Yl-aGaXgF zP(F@XtgX&bkiRxM@-)o&XmlD~Ad;4o%N+B(e^+4!r*Fm_gyBdA}2 zz7}~jBF$?SmW|XSWeDq1vyzYvv6bDjpg;eDA&)PRzE4~AYTu~z6|ux#aNQ;m#bHNp zF*wfkKq!_CWEqGPQ}>GZfL{nA?Dm_4FE#?+&fJyV)h(Zxn_tyCXtsAv{0#P)ckmju z$?ZO}@JCN3xT$1x4#r0yKNLqlrKp0!-}li+7hFrCgg>*O+u)7{I6WS;lM?`#j@R8F zNy`uz59z6iya#HGd>fbPnchN$b-R@xf{z~fzkxtyc;p2}2dH!rQ&!bzZo`@q<05<+ zEx^yw(J0PDtJ8|2-VW2I4smXA{bC|P#w(OsuRO{~-_v|7Gz;+aXK`|>%pD-p#01cx z(aqp`FqL0g57ZcdnO%rT+2-wjlPd1GNW+>q{LT?Uvvw*>2pw#kqMT<18A(k%ZP)-vK z#J_UU7kY>_x{hj{YSI<5gS^oWw-ES&9JC6bY~A(AEXjDo=unWL!$H1klL8ojdaFA( z2s%j|4#tX^Np`(y#Zc;_hzwu@Vh{G#U(}=Qp34;1w>-9Gej%sMIxl-6yltatH`dxt z{PtF|%UslmKAs%^)Ji8js)xnXF@#DL_A|IcUGer`sF(Be&k_s8@_oZEAQ+i9;v?Q$x6$6Z!9R|-J2?%T19n&rZ_~YUlCGdC_Rc;6_n778{eSrmEbU@D`VzoESU0e zuFp3a2m`z?6I1P2b7W}Cd}Q>L1H;^|Y_m|r0w~fo4ssR99zb`j?Yd~K8;|@4?AU{m z(G?gyUQ||&MG>a@)?-CQioN8GEk_jGl+D4Km{puwlQ&4az^uK1ntSFe?W^Wz6?YfoSIfn7NTrt>u_ zkpZz^1G<*1Xc>CL98M$%X&twYhhUwLAt9k>LHXIvkBxMF;A*ZY>F@Pd39xor{3 zB%GURI{@ydw6-I=Xy$|unESeXt=n`3=S~Jk`+H8H-v&(cKTe9Ee=nH~(Cj`tbW zlZy~6$HJ!0TBR-8yMPpew=yD6R%^ds|BybB5-|Ql8<6#hgK8BMZaH2Ie4}|`FG{N0|>=_oL@DR3ZN9ST|e|0{0Zihh|z zb)lK1x50yv`RjLiItx>px0wxkD^1Kq9jk3+iaveFO(pb;Ua#!@)2RusiNVboVyRGt zYzrs8F3raGX@I5qF?vI$m2z1Wb0G%N5*Va@_PZP ztdIj^->Sy>c;y-0GBD_ut)**zl7ROp>^6ZwiN^mR_K2)#aS+d1uYh9ml#~$E6M|ED z=omj3eK#RAd7}E9J#)pfxuiOl!xHFJ{0nk^4EZN z$9X9<{CBHkUtIEHPXtn=mWv%I@A^eZDNUsFy60TZ_^5D^>lm+>^=d~URDsRxnL`L6 z(+2M5?0#&BU z!q3N;I_{a9xaQRV`50tTVODk<0hmw{ark(}u)RARJ7WQTVW%M@pV+yM39x8&TWN<0 zgn5!xkf|pAH+}6~ksFuCd^~QKUHF4zxsnY$0=dYJm;T|zLTspZvGnig0@{Ocr|7m` zrb2|Utn*8#up_eBUMIW>E0L)%*GUHS9IIJVcO!Ewtuv~G8I5C=-Jr0add`ga(7K+{ zzPwnr>x&Dw&p9(%j%-L;$5Cw69m-j*{nT}p%CE5$sNUOEwk_38ii7~V=JqabfsOQ@ zY^HJ?op22EPQ4H6Gd(O^DuNgU5!+kIo;bs!K-OL2V(Od6IkGJTF_uBRm82Z&MR}SU+q+)28k%y8Pe_C}Kiq%MGn+{Q`}zH>Ll{x z@-+(h`QI1bnAFHxHfz@D^H5oIDYln?`d4csH&Yg(jMu$a$nR#wV~o5XIG=9e6Gfy0 zn{&3Wv|^cHW^;)e=y7=%4nfk6Zuzr`+a>;aK4(kusb=f_Rn`QDj2256|BxEx0oY9b z1&!2*0>C#;djPO<-7u;DVuP`?*yxxkj)`q^Pv!R<-jIf2R%znsD zK9UYTlTDXr2C{h&Ji+S?yH)Ho?81c=gQC!C=i~F1vVG~L1vi;J?|i>rP&$0dg!kCL zSgsF3e?RW9`thOLX*tUj|6roZ-r2l?_0F;9xWqkJV}AjTqXo*=BouI2JxKYz%my-H zlKF}8!IzpWj=SRNVu`x*#*Li?=r5*(lyMpBunRw*Is{KPTE=po4N{kc(Fk-m?SR8M z-eU9^X&reF&HO{99+WpcuJa7n|A zFR+$GG%stJ{ak*pn~!o6Z!f4+kOZ;rY<_VTX(cLx@#XWILRfREwy8D76lrHQR@lE- zxEd7)Z)$)5Ffe3eFarPp000010iODFLm%5s9nzt&P{bFEel`6|6n}dy|xWBbh6E_+{2$kqKuxCF#_LIHQy)!WFcya2ZVn z3A9iuy|E{iqo`-^EV^i)LT7t|edA8!#}s%n%^5#y*Af zqKIOHr`7}R&M~^PQ1Qq}3kqiiXv8Q)JBLwLei6}0<&1eRzv-bR-tg_BEY{wJNtMU` zcToUj1+ApZ&#m>_7#N|kcKVi}y84o+hNtWF$lvU-F7Pvd_MnYcW-6^oLDo$gGCjIsi+e3vYG7gT}Y|wQuBvmG+=}|eD=0*_C zJM}up!iFK&yq6o(g$s~qrnKvbXHT-*JCVG(=F~+H_L2ALDT@Vcd97C8hjZuA0DVA$ zzdXofqtx3%fy-c{=&DBhTNQ8jz8w4Y@cR0J>KSX-*`QDbV^7NJ_jW18nMBX2> zRKcueohDd!FGhzT&DWEpRx%>o9_ZHP0awN_ z;Juclrc02;;vNAwM~W`9e&&W@p+ zk0zC*j6lYdKdb*|u5oTrt(~!tBneAv2CfT7P9q5FIgEN8>?A#gr?^zcx;or`4U8-A z50BO1%wJAE%^5yFfBc_2tb_)|mqq#TQ9IF+OfA?JCF)go)Jo#SNgC|fy227;7@|b+ zSINA1fiN&+V-y1b000000Rf);L@obw?R)RMpM|5WIMLm!dM6hcC6`g8eDp&10*NWg z>PLNlJYpJ~(KY9M)A7E!pJhA0v)Ew2H8AJlVW*8MQB4NW10g=D?!X7KrdWwkA$@bEQ#vKR_0u{Q8RPC zH30GE%ouSRcaK-+eTJ{M?S3}XJ}zC|t6rI#da|^A7SVfEXv2xm}nMYYv6FSg3mY4Nx^mzij?%*=Pef3=5 zG)gl6)#+ofHVMoRtF7JfDU0+t)<3~(YV&Iow|ZThI=XbS>Hp?LbZfM${qck}PVZsW zJmGvb-CIA7&xf(pUr<)G%QHklpW8cZC?K(Sd0}g$8Es*=Ks3>#V9yzld_%G;?UC-c zC`FxN23&K)7(^mZV_c)`N2@BBLA_JLY^g7B9!;t1&XL`b8uyEzgJR zh$teN;bqR(NF-3@PjoviIorROr%3NqM-)qerC_bVNL^bpcdipL%7K$F6vhj)S7fYf zqjW94|W6CN3000000YRGJG^D`^e^CZP0PPaQ2Ih8s zZ|6sn9Vf;(>v21W1LD{qs0acsI>4kNe^a+3ck$XLE2d-OV5A7Uno(J8$EnZl5A`TK zXe~&{P7s2BTmq8AN%e2Iyo`F^!sJ}DiBLx+1`Mc z%7nV$cAe*=5~sV~BkW(Si=OaUew*as)Zs?|pg@_F79yUa{yW0NJMnGFB{6dOAkP$tey8Id#(n zVP(e($>Ox*-ccZ_QB%TLqwnTq0o7v|Dr)L^gP<4f4+ia*{B_0)nW%2@*dd;_Ca zy^F{JI%Sg9k=~ggrnTS4gJF_5bm72EJD7X^7q>K!{(jxBI;}{2l>xH|^HTgaNR1w= zA#$U9oR}$&FwIdi6ziqn2@khTMLx}P$%o)qFb#Sez$GvnM1f^l6sm?uZd8WpVKX}r zLB|6y(sJ9HAV5L$*jz%K$Mj0+JEouqKkIM#031CoP*hJnm9hBU8lL}WYy2j&-K zmZ(SkenV_eXM_^SX}%7GqYq+fx*|!n)QQ4$B@ipn(ncB=urm$Y#wb z_8qP;g5>E5F0E$R?6Ok7eNf)v_p+RKUdD7u@+JwXws_^= zHH~DkA;-i8r%5YGbS6$4z(Y|Ed0cOd4j*7;K2o*NSKyovj}f+pVdj%Y(QiCg#Kqe) zd>rd?xZn1)n!1(55omj5br)~U>gX`hpY2EsJ|1mLC9ux>GFl`%OY@X~slaYnhmD^L(u{P9c$hi#fW*mt`hpp)lo=TAlu zvf(Z`u-^&xEb*e21*Ko$X1C-yLFy7`*}0Z;FcXDqRBfgzO}h7CpzrhQFPjJn??Wugn4ZZ3xuzuQUW=eJDqRPP59(}g-|IicyLalQg?t*8v7Rvz z7#1D1X}Z~#m*=Ehdkf*mDjC;tSrof39-Pv$y{Y;I+x~OK7O(^l9bGGcD!Gm*CR%ak_QH`b&-gDbc!RD^NJl9D9tlzh%Un<{6q#32|!lTZt$N=Ju?I}#8ko|YyPz-od~ zzlL3#GuZB_WFy?|!u>yka6mzhg0GvkanY86*Tm4y5=3H#m>|P28xuV*hxlkAxE;| zJ%6iS9Aycnzf#W@w~25(BRu# zYAnY-%E>MZEEV$0mfuN6M2GH}YEo4|>JHsmF1hbYKbhE|w3unEaW@on0=F_meols> z!+~;k5vG?>OUX#|Gr`SJL~XG6S^In`MRKOZAnUk6R1?8Y6-J7d#czL^AFF^1a33X{ z@u+WQM?7Bf^zML(YFZO9^QrZ#a}vNqRc8O$dIFn;X>vXV=VvnP!+8!{a2-@%@TW=g z1#)#>)m?wQ4`5eE-c9!oSiV>sL9iNxzYj^*L?2y7$`#HH*C)d1ryvW2k(Hfn?opbt zlxD$@t_id9%oGA6KdFfr7);;3^)-#&vfnmYwpJ-8`Vur{Fjn{j{XpdPXs zY)C*-*KPNI!7kFk2ZWFT-k1gZhx__=S@(PloDv8>yk?4VkchX>ckBMxNQA`CBGB8Z-Q53<$T?wPTe2frL`kW<&eCeeQhv1lQYK zgC;U9xcLC0&J_I9MR!`G=DC@4oU33t1t?7(01=Dtg+-9L2@xHiS_879=JF22s0Ry^ z@zLZVeMuk}O|B58Z^Q@{MAPc3Q7ntRxA=h9G5M=S$*P%8b->JSf!B?e$a=bL)V#-1xNuS zGweg?=}+(0AA---bEH>{VeXBnIQk()!an2HK|l4BGLgu@ zIGel}eWQhY{-dSe1L>-SF0aQ{6`hxj^nZ(4I5M32aC^ z_d$+g=XJgi8f3vIx68M?ifWVOa~Vwv&X2`(`cr8-Yh7f6F%L0hjnAeetaKwqf%z4+ zWO{X?TnMjSCT+gbiBYs*))`)29#%=kh$Z7OfP|MP1f8`nCG2EXZP2Kx zkK0D)bMKSOP;&E|Qm1Ut&A+`$_odx=)dVJeLF%sSdZXcK3F}5^-H1PKU{bXW8lR-o zRc8FN`W8C;QcTdHdj)gOb`R?Q9R27ckBiHB2(B8@j5CO{*>Ne8Ryic=r<7Ctq0cZ? zkba77rO__zbyU*Xk*&mqs9=Pxqp`%GJtfC_MH0RgAJ6`jR7uTyZ@DL8*Nqm9)U<5V zQdzkuZea&ezKo$PJPP>nk?;o1cp{>PgJDA%<@)F=cx~nDYPu&v!C3&twRmQhxz~D2 z((ApR3P@d_Pp@=LAp`&7KZyornX>mw_(EK-U*ew6Wt=JFNO91Xc(>K2S_WM-%|n|H zpv-ge^2yNRCA-xxbaw__LIVxVlR3vM6433R^N-;E^20NEFrgO0mt38g&vknI%L8m5 zUr84!V%Kf%pS!Hr^o*@XAI&Zk@WYmB$b^-^DFQ#jH`yJmz1{E}IA_KE`FSf@iO>){ zO2x&rg~hr#j7*fIJ{`2;++LF4WMIwpEkV(NxQwL~EIc!pL>z`j>>GK7eSQ{P3mgJ* zV9$u^cc$v5yF}6Sf@BK7HEvHEJV_gw#)`%x@3S|mVoR+8OPn%Cf=vW|fi)l(Uc%HG zM||$#x&@*oncqOy7w_#|g4@S9jyg~q{W?NylY=8(thi>ury%51xN?*ND_1mn>Nr^5 z9}{v1xfGz&QmP7ne^8One3>cLiD5;P%J?xNQ)izD)GN61k#cS4 zeAC>zDx@yJ#j@X)EVsgUmz}1AdhgSQn(!a1o^zO1lVUeGa8xh2YFY7e8@)OqTTutg z_4xxWhLiSLnkK+OE=&9R^jTnZl&O<12;@)w<=W2>zAE`0o0=#)$DY;0$+`OZ50vtP z#CasWW+M$RnsvXCCfuQnv3<**J-q|I%$7&GRwy)Szi7^o-pO+oFZQ)jqOQv9gg(jQ zuFT@73m27|7@H$S3!x<7X90QTQM6+#&z=NJU%8C;v-)7XvFnjO&hkU| zp&>z~o%wV3X+=;5b$o2dr0@C`P}awYvX|7C=ZPuH?Gs>R@FUX9Kk_MTa3)s)bV@F# z8b=ypite3J$^F7{+9*KDY9J2}T!Nx6qiALuT#yl~+b(f~Almn9k-uwHjgKZezfVyu zQSq?w@MJlcm#$KwX;~is>Ycutb@LM z6CVz7hj&-yXHsTw7Ee2Z_rMWJum61B0%lV&own?VsGnfuC*h-Q87xz=w{Knb40->R zhdRWauE0IVF)9<(7Hu;oFGXvokO=!E5qpxu<9K$gK?SU>)x4u3{R5Y1#zuywM49=$ z=Nj6dG+4!KOD??MHvzA{K+vo99pkHgZX$2%t0V3ZQ-)~yP(2|1QO!>p%Q+wVr-LE$ za`p9!6jB|uXy6uZRZ?qk+O2Rqq%7-IzQcrk2bBDl z#I1U~tG_Lav-NfC0RoXP&c3&^VF?^bo+X8&ahOa-ex37XQnrpw1LzRSUPb%GuU_&t zqU`UL-l`VR|JRMvZQs}6DI24LO4&=IazhJ8Mo^rmZ(`vF_#}8+Mvx%b=_dj-PB(2zY|&r|51kgxw~w+EM+DS@t}@V2+3gmBp0)#WPUcgEck;s`GZb z{@(j|Os@d7Ov5SYIi-^YMO(if^9(~AohS6e(Z(k&c6=buJ6LNIvg$+@IgdLtPo68r ze!v~&DPb_Rg?EQ-@-& zj^Ml^Ln7Jf*HD!m-mX!0myf2fvDP2{O1q!b0PsOM$=ci{qR#kPu)SAcXQd+BpwGiF zf}l6Y9!b_>uHvQh;bU@uqgA)bAnxGN&DZeXkZz0qXQn+(GDkqCS5IP|Ca03M8(yfK zDz)_^J+0A2vHSc0dys<)cT7mjrfeF)U&sAI87~$C`nFr9j*X&>^G zH!bhqk&sk$Fh19x4V@tHeGO=~%CDi?SPz@2ZnIC=-8^4?%2LZ*13k3DIE}tFwEEra zOsd{m?fTar3&rc4<&tY)ub((kt>kjEVSKGapt{=K8Mf`xlVp2ka~gNw*}&IU+X_SN znpJ%`QSm%Hqvar`Hi3d?LUecgC(OaNq7Aa}R*1tDBr-Fd-eiE+L7vw@^Pb;{dlaP^ z7=`maK2eyz-FdW$QwUxl=^#PaAQD6O2 zCeC(|{XGBWMq|+JIrpEqaPieIWQkZi62PXU2FZ8(vse?`6>)`%kF~sxw!{-9bU4N?&>e%$`VDOzEF@zc2%N#5O{y<3#=OK$Sc;?d@)Sr6#w!DLBQr% z<=coMUva&d?8a}hHT8eX%#NzCXDp*mgp-Mp)ukU@?$x_zQQ2(1nsI5fi8uGM!?!0M z=FsFLF)ie}fkG<#zFyUP*9f$V;e(rn8qW8(UvsKz@aMVysP3iXMjy92f3xA20(sVJ zQIU2jRwFvfS5m?sDl8~4AIXPOZ4ZqdxT_Z`i_BwN1b() z+#*Je{1LS|(GY?Q;o;Fgeyn_$xUK#@+sZ?KW`119b1LI~4hh5rT8}Ln5&&uZ(rZAu z$fM239>tb-D{))7SxiP+SO4{={LLt~%q3 zl87ad$#Mr3dwpn3NC@4Y7QbW$$9)+x;LsPd-(4lCi>$-u3-<{~Q2h4Zk!VTJt{OnXHu;agf?mjWN8jB@ zkINHFi>bk5o`C)B>)wj(aDA9o01x>3G`8`1V*MUT#adiMHf<GD@v{U1O4b<0$@3 zq|6k`T-TQYNCspjF`7J~i~0&hV=_e|jllHI?gqX7uT=$o^mJ-Y3Towip<^_TcSyau zqc@~YdFVXDh8J}o`=(JTDZn{PN4vq&MGeNZ6AGDbmh_{FCs;>;fF)v@kCNy4TraV! zo{nTmn0i!ocJ^9^-Y#r*`JE+zoglJ;<^tp(-yC#bAjh{j!bOfyh3LyKQ&%Msv&1(+ zHBOT3iZ7h+Xtf|^1Gw@jcZ6NdMUiPA$lwAz)CbUJKkhBO|EBnPtM!4V20;wP#glL; z{wbir`9*vpcQMsD#V{w^x7!K1mBm*oKF4?CB!2|lCQy$Yt;CF&lx z=JJC={fWiQ}9S8Yf6>+ zy)nBo@!~VNbRscW(q$c4t7h`m8bK^y9dN^GIxmx+rKo4}r0L>EqTcx4@&g=E)AC~b zB2#fJ#ne?LTAhysDAK5@m_^&Gbgxv8cD+bS%v{B;$XbDO7O;~BL06@9Or2N;|N0oj zm9|HlN8}&H$U`QTn@vRqaO3_G+t6&3w$Wp$=lLtLMbfO0vNWH-#PfZqKfMO3?db1o zPea#@$d?gae;f|pf6I%|`A}k!qBEhpHQyZbMMC^6g5dlcz4^mdMDl%3dgT4ZgLPogLwO1!Rg2ruId`1{c!glc%4SGb& z8CDJE(MT`WfYL5ukemY_+wKOIVSiX(2tKe()ohnvFW^}UfO)Ydk)?gCat zD2DoT;!G-ME*Xu6&vi74*0f$Wsb3m#M*YdFwkq=#4DvFZ^+i&5(Oy<~jH-T*?}uHp{zkS z{IQGWLC48P6?*j)(_!+ceXWJf>lGDz`Mi;d7KKbS8hr3+4jrw-T?`jdE0kWcwWQ3g zbkKTNm{;}%#spZ9YiWPdDY z{ZwfMcE12Y;O-E?&6jwh3FG(rTm!%SJgkWC9msW7XTFui@S0z6r5bHx8_I4S|5BbZ!~n3e(?K7m0DzM3-NCqEn#cbdVG|*eBR<%S!?5 z&=Y?ly4zfGcysia^12Vyy`;Pdj};zB!Qtj%tQ)5<9=WX8RR6|gEi2HqF2YnRlRB$tUV^fT2RkK+|?DWOWoL*{u-Zb6&!cPakg znlCSio4fEs&4)>ZMNiyr9raI3?(|aJkjI)-B-vB*-hI9+w<8C^8X8$12_k7b{Da7(ikj{ZAFfzHJPBtPEU}P5 zuEhr?8QzEBAKl0$pN2`6C~zQZllQ3`!fbo;;=xJAI-y4{FZ)=NZJ{fr48)LK187FHY;>3WI5xf3Svb0IL!{$8f|b>ej0H68ch|7hY)lJ_T(Kq}tx(s)NYL2doz);U*x zdf$21+3aS73`}C7Y(pvwg=~G~OWTb1pXCFlv%f1K>OG=O6(h%8H`XRKh7Y`K9W~HH z(D;BJ157=H{J4#^!+bg>+eLKl>C4!nDrOl#6eT05Tx%|gj=N_<8FGgI1Lilul=dD?PL zY_RK{kIMIGWA+G*MC3dXQ^}X{H*qOWG987WcN^6TPEc{RSZh4+(3e7-R+UPaw}deKX6|c8yp@7XN{oZYpm5(x{;hDv z892)uXsF*`s1Cj{K!+CW$}TmDx2d=V(WSnEWw?@d&Kn|ad^ow&xxC~WB!`#I&FW2wZhOE*42P&UOVe7S z^S}L6us<=!pQlKqeBP?I>l5i`ysp64S(>vT!CScI;+&}8?ebhQKNEV#x?Z34bLKIb z^kQrXj~UnKaY&lQKh`mmB71~HY&(mvCYQ;k0>UWTj}I0-z|2E&sjfERUY3i80rpH1 z3fgU@2KNH0v&!rQf2@xBYYB0%j2YtGfJ-vwv24YjS<}WWvmpg`TSjFf^s9fzl zSk~x4&wFM2@LK9O0gdF}5D5=sK1viPDf|40pD}$Sn8^(-yUWH|!8DLk zsQ8ss*3S6qfhGK5N?Xi02iV7GnRFWH-$EdXJws`1vs@CIjt?q_(a&CoOl4*=z;qCR|bre&0nb|!BNf{#W--3-F zBesZPGD#en0BSTb(y90wD@n)UeIal%S(il)wEBjvrK zs8tU$YGWc{9RryriVBEv@>lm7p?a_wx^XQPfDUC!s!h=$(L1!&^X;x%)#>!CPHvzP zXEH@MRPb#?AHDWWjH}GE*$;Z5>jKXRuWE~(d-@Pm5_asZCJLglES;p?f?!KU}ZeB}yrq-;$YMTrA=(4V|Ut*BU^dSSzl;!!sb{gtm|? z(j>9e>D@v6V{%ZWbev--*+S8E;-?hOp2cvry+w%Q%G~7-Y5}@mjb#pfO`#2G&t+_> zIXP0rfO@~-Z@wfk3|@m_Bk-#Ye#Q+oh9Z9Z`gJ;v2BP|ke1`aC+P4th_EeY`dA(`< zt2AT1>1ch-JURRNF+X#Hx=~U;YIKtd?6q(3oefokFL-_MG*>^5_8 z(NuLo15ZhEoHO&^{hMlV`eINe>SY{vc&r8Sw_kX@79?C{9lUS`mhF{nLY@&nr^Yq2 z%>5vH@lwA_4ZD}y2+)_ZPvO20UT^xOzA|V&+ z8EJfJ_iUI|aSR>B{inV#Owe2dAlEACcZ&EFpJ^+#2@-VtP%OO#{HM3Iis*c#DjIA- zyPT`p-K^=4LPHhq7v`;p8tZ2%I{f6dLH(ghFZevVGidg}P>8S?(bQA-T)Rq+*ZX%QL&K}h>WNb`xoulQL+R>q4CUo!rhJ{_IxY)WM{m#f3a3yuXt|D^MENl4$ zAqKKn_`B{wH4yiBWD6@U<6API(uLvr5Rrl9EwvMFM zKrli42jK)5jL89K9a(xw^42$8CZEG&#+FFWPOxVk(|O+XLA;fD=RJ9a0dXhvVmlNQ zMe%^3D*(y}BkMKLN_Z5=l9DfXEztGs^q3S3z%>iVD#K(UY?iJ;OGYeamzxQwwPb_+ zD$ze>iim(~T}H8q7N9h2Gc6FFEzAphd9aGICe@N!F65eq?j(?q2bk0b)Z5`z5qg0G z_Fpl5qv)!~cNVQTYmlIYUCGj*k)-IjT~kGOvA3eYYSB0BpQdv!lR^S*iACs(#WcOb zD2&O}5fMIIRaby-aCfXnNABFH40uX{=b~2_{9)74>LL1OnD!K_@YMnqe$x{^iY)7r zZG8;^5zaq~(!rbR^sx9%Ad{Da2CPL!$##LSc1ZNL7Y*`ZQt({hvx( z-OE`5RC^^9Yt}b4iJHt*a$AKNa}wm)9#fjiUOUG0P&H_^GC>%%#l?H_9Dc;I@N{Hl z25ry*P#tzfYXV>2KVDG5US;f)YE+dZwq(iNa&P+7B8Fb_+4vgfguby7J~ zFox65FTJw9X(%uOGPQHQ+A?6{%wLc^%1;fN-JH?ET@asAa_L zPOkB=z7F#26)ZS6X3E>xX-h?cq-%jTWqypAyQ{f!Teif_ZkzP=cBki2^bSNZN4kD$ ze4LGNh`B8hd+6)I{#?h?$(s2YG7fnPf;)3*G%5d|1-}GH6S6u_>>X0L5a-v-fna&K z+7M(Y791+7!g^0}zhdGx0j7TEZwI5pj6C}gtsb4HFJY^3{T{AaKF3`}J-IT|(*zF% z%v#XO{L2(<$KH}Nx3P#Kh)e@w&VN!^jc{ODx=0PFjj*R!yyc(Aq5&Zo3YuaE zY>G2u6eP+wJqnN7U-ardh&7wPJo&@xB_-Y8hH715cmfQ+c=$$~Yra&d5fzpZJJMHG z{=`2(+&M?=%Eu+NyUf_PbRvMpZNV9Pc?$+eE9k1D_e|KpaevIm*Uyc5mb|8O?wN}8 zA-iXhq1M%XDe3a_hhQJWfg`F;@arlVeNBphFM>6|O?$Qz)+Z!XQ7DrA z^F8g9xiZNYDME}CKF0)JlHDntkFG@~Z?B6+ko{w>O~sGQR}~*Ve{s?J6~(C`e>Dm| zZG$sj@fZR=U145X_cOj5t;6wgqTY|HYs#6&B1Wgw~Vey6tl2oe-nsYe{)C8JgjE_S$RvS^Fs-g(wwYh<`Zb zCj5Fs?xg!`phYVQ4ZaW1V}Gq7WghB$s2F77)r`Rd|%l&~&7rw(`5QMp0PuIUjMaW{dBSo?7Rck~@x zKcB4!a^n_FUUB28l}2nSwmW^$16mtT^a)T0~0_np8o`#q}8 zB&Ufc1qm7& z=YY{>_2`M7%^u!&SHHxeXoQ+}FiyMkTt73=ZjVqV`9S1%0eM$$5p8oHaLZ+IY04ws zbz6Nem|Jvtx17a#D^Gip`M*Soe+aRF-u!F>F}*vJH$_c0HP3fAgO7PALUi;qxWe7U zn|Ga$E>m&}85PDE`HkiTKCAAVD82I@wQsqWnpQ3-q0`ANOhY|0D8^Y5LuJybwZRvYc;+tr0gHJO zGRt(SND5kw2ejYI#wQ>Yz!5!3;s0sOO@t$%uaUro&JrJ?%F4G5%1TaP31vx(@exvP3Zf|uiXctZXU;$@e4 z5E|uP$m14TFr}T)!T5rUJ5lT?`76 zLO+y{80uei!KzOmsB;ylTgG|)C;)XEq6+CCTBh53&SQEaYqlig2FJx#C+4abbI6WQ z0mZ2oCCwM#jo&dshlBd-IIkA_L?CN*0W(+oiKuWmM@+zBA37lq< zrN`%w)^Z%^J@Jt+2uMJSzINS#L)zx^&mZ}LNk5R{?wb6S7(#~igBOGFXTwhum5b6-OkZju&Cuj1Uj=9! zDBrIMvt+LB*P#$sM)5dH3((&jBrfPV``i6VR=meyD(e>%`ja9YxW_=RMNeAAw! z0(|drgFHHC%y!CreXUdCBM}0h_(jkUAT6=m=o5ReW%#YfwU#5N>DnT3Y-H=t&AR# z++Tx(z?CNb(S>syzU+g%CZR#;L{RLmS93eoX;K4>m>wtU)%%&-5d^~2VTRi2yPsZr zh|+}y>{b{pn#{lhr;EXD>zLKeXOR_{Zs+gAk_a&O|8npA9UUZx=HeeKeB-rau8~L5 z586)^5Q?YTqGfGvP-hnUm&?iSZMEFv(Li#R1Mu;l#}3wwLsIZoK&ExDK1+mo;8Ct_#PAbjqJZlEILN#J{Mk? zODeyFWLA1z33)aVNR?fUyymVTZ&g(X^Lew~!R2EuBT-oZ z`j<869Y|pu>LI%qAjIVTXfsZHpdrou2GdK8#7Xq_pjStv4K^mtvH%Fa@zT&Id?WP8 zN~$!Lb56KEGD+N9Vj}hZ3invU!b4$>p{Z~1W7uQ|b8^H3R@q8bUd6P)K%y z0UTnR5Z_}a)TB|9sZXl4zphR@p_FX%_C%M^C4?Xl=Qm2SG5S?OA+97|9ESSyhu ztNQD%@lCp?dsKAj0TV(;**1+?@tKlCAO7IY z(ICz_it`b7EWhA7qFr!3G##AYK+^xUJ^<}ubFds$pQnMLswevlT2&jG_=Qy+*N$&ux~D6%v&l0h^YZ;_rU z8}+&%1jN$6=@^OqygY3m!|9`S=sMpYoUlbC?FOdmWj2Det0O1`FZn0mSMzRCNW&x& zi=b~jPMgxYk{!YxBq;b0G+mN&uY-~jip4=zsI_alHtvy0o|#|=b35RO4J`AosSoy= zcM%3HURdV5^xzKdtAzi7`H3RmD{IKVo{+Z!-hz*+dq`E}xhRh90(1L-KflUb-8@%83J&q6gnuJ_OEJX=5n9viYej zh;Xi0`RF+=b0o_52$&cyKC*MTyF*?6V*4CsWd$JD*(+E$Wxq|Hqop{H6T*Jl0xl^2v}br{1J^< z%O^KT4fxJ;DOYR{lpwbF)vya#p#2>6Gd{lad}9JCJS3z)sdK}Qiyi}eDNP0#d z$?%A|DTQ@3VNeD~jM+ya#^b2DFD>AZ<7KY^zB?o1duk6Rc%vit>h}{j(%N#irV5AU z(S-S1^O1>-D-^#+Qd{io)CjTC1i2_U1#3e)XgIed!n=Kk=It( zP*KAiOp^yJVqAxR!m0u(g80F&PT*Hlw@Wslmyqk9AspqUJ9}i{IG$ZV@p;R-p%u*S z5}y7ma&}6xOkJDiksA7vhx^*&5ijnPTgArUc208f(tIMej}WjlA2M9|KFcWZxx$>$ zhc)n>lam(8^&#obh#@&}T&C`157;bjkA{(JjINgG+)2ttK29w7#&}sjLC#(Pm%vL( z;}q=wHtKkk`FSg<%?~4hV#R~t(_+ekpvT?4IL^VJ9m|5oD1NSm$7b{xkt=P@V`zq&VUzdTXM=IfKc^^5!+l;MKp62o;~qBj6qJw`UCCRAcbupNl9Tqxo2b{s9l)`ff;A(ZfeBq2&z2UsgAaFr$1 z32+g47tUDTW|Yue@{hK=`fBZ`4g^^I)h?IIck+R{cbz%@zxi@!!MsD=Wk2o2_(aq< z#hE?_Ev&W$37O87<0=Y`SP`<=B6cPKPuz(?xdNhPN={#lZqAQLc=^6aPkN}(W#EL% zrryR91l`R2J*8mMBWA2E;7sdg9PP!G-jBGcDO(k{^x!4j?n0;Z?BQI_s`@#c#pD4f zh9^-d)6D6>eLrr&AzLb4s6K7lvJoEXkD#84y2m?@?>{*xj3xKH|GmEa*Y zjX~jsRU8uTG|wz_!Nq=kaTv4Xq`&*u!2fWp6x?eOb}G049_Ummb=AOe3x=rD#>oIM zFl1x&82|tP0003&n;|r$!5#l@CLLCZ$~Q60^KFyNODIL^&z;pNF0^IOTF7ohDJ@Mp z;74hS$9-&Kt~@%Gg?&vVwm#ibeWBgo>Rafn;xU2(%oI%cpNhLL%9vS(Z0mz^8avQmD*9?^BFBQv6KH@E`J=Fj^)OkVO0 zvZz2^sdNAgd3{gi9|>cXZ>qP1mG*npyN(oTXc2TPqAL)vpZqxUs4rH)P3UJaNt0D? zk)|_@@k5jGM1@YESJE(>-V4d`uEy-9`ZL7?$2~3eKJ8dxVc;=e{r}B|FJ0H{oU&7f ztiQ5E?c@>OBhDUpT7p_&`uaZTEA{W{WqdJvpR7DnknrWhb0Yfa z41UozFcXLA`icV6eLcVkgrTSQMC%;0;tXmn>@(jC*@WEuoGrxC`PzbJ_w2!Aw}(_S zSl$@GqIy7+pA~mVyS0&TIN<=Jw?L&0Oco#9-GN> z#id+%GkXc}4VieI1u@U!Ujq{TH~&-lJGaM|2Q8ce_Y~MZcPooJZdx zP1?*+{2i;nNf$BGF2QamKOlaSOhF*}R~T=M<1CD&hR=>{xf;e#cQm0~yw&0EMJ1IP zh&+%RtmFB#VIAOXm+C@GOjScUu5ns&`>S2mxlR_+nZ9(`HB~53rirI4N3QxP+vCp{ z)kuxvq`s7w_9eo^OHuAgcHjHm(#wib5oC#9jl=TwqCCe}ail)PrxI{x8cJJ?Yi^IF zd7oYz5Tn^s98sE8?mCIR4Z$`XeRJ&K|9CO=Fi?3Vwn`8(_(RErwHyHArQ5y5sA6eq zL{D`Y+k~^cP8)JzK;<{=I#G7R7%hNRjju_{vFW{lgupqT>j$AfLWCW>ZLsF2dZ{W+ zK+a@#gwsT#qy~*MYzoiSAkGl1Gd_=woOCqo& z^%8hj|ibW>qqe(!F_&0Kso;>-5WMd?IJ71SgU|Cc_8L70lepJJsTg6Fhi}`rW3`vlL^2 zDVX9nC5``L6}u52)-k6^WUjfERpwWDVH4A4)ym~m-j7jfq0%lciAvRAl%D5z>@fbo;?qPCZ+RgGNzpoYEf zz#6W#57MBj&2AVvP??Iu^#yw=w1)Z>BJ?c}8hX{h%8)NcqP@reLl`&VUG79HuEFKY z!%PF01Ow2f9$HoE+r!DSjiM#BlK@vKCp-@rl}^s1X5v%gfd>iL17lo+E|s{jW7kBg ziNm6*ofGWL?0Wq{Lker;dn#@h;lVxOs0JGXdWTteAuTxR)i|L+bQ#`6#bRLdU@W6% zXmkx0NF;o@8wLg@zvw5KTj-r0m6sK0xBq_M8g8uM^IzZ{djnNKZh*B;XkEQsP%Yx# zK1C%>GYOCxUWOwG6BW;yZd|jm`O^fNoXFvauGKgK6ZvuGf;67aH zNzknkPn7eZJ2mTRLm0LGQfmsX&*45Bu*=OK(@u%$8-e`c_>LW18^wSqlukCwvmI(l zq-hlqZUh37hx&Khf6;HNsFLYtaGbc?n{*HY4sdm38}Su>UO84Ot`b7#kK$meon&#f zINyCARjH`JYQvSi!lP}QFz?VPl69j_r|Xl+7K6I}e*zprghSFW&r6VoTkO1+yGuNM zU$Y*Cd&Shdv}*&nN!#dsb$5wwJv`tE9$JoMTaVvO^_)1-7`bAQd;M5+RG3t6dL3TUP8Uo5^EL?ndbW6fu3l|ci;JV zZVctH5JS+<2@lSVB`nR8nyy6cRwJF{1sBq4jl#oVf2{^ZLuj}^a7?o4KNgk`n;CKr zh0|)(7iba_MAyu@7F#;#`fydENE#jJ9w}h>j#{kf6aMGf%fZMs1wUZ`1QfJ}9~i2<{*{bD zDiS?H2NCL@bFN2E0C`8+lDW`!-%(U8p8-fgt((#Us<7^BicXtmNdFI8tYg&5wdJM) zIJB75^__ahQ|kzhI`H#OOu<~Q+UAn|ZKw)8^-&@|z|AfaK)BwL4WKgIg_Bzi`Zw67 zPI10Jy+492a{8>Se9gZ?qH+wU1(pK^*b{D?&no+8piNxcNDQEo75yzaAwZhLLXqN* zSu&XxHYl) zJSM3=JmZsrarw)%mpeKb0i%72tf(Ma63K?ddA@>`yO=u14<1F>MmFE}`il;Q8?^hy zkfjjUQvZ%ZYojtD^ybFYND-B`Nq|8QO=s?}3+uKSr0{BYC6@IPGDRTxE2z5}+OtJ0 z-`iQN<4(IFvB#I|nF28A`!rK-;A=@QXB2jmPnN6Ti2dFTQJFtlNYPp0nz_4p#wCHS zAaJH#iy31+W0%9rx?@d${*i;l8wkDVv2>f&N%rt`dD`C4eSp^O} ze=F~x|d!#a0cKwnoqzy2TBQ&N}93gDSr`JB+)xy*Ci0*5DEduKPH#Xz}6RHL?K>@wG z>z^b-VTVuy{xV#bS!D251?D}-tZU9n> zIB~fvo$N!N=(CvAE8g(DD(B|6`antWOL1hGOl z!#u8!XOF+$XB80E&!Y-Gf(z7Mez#hM0Va4PKZS8KaN9%I&6mHDZSCTJZU{Ep>J7af z7SjN~II#ZqE`lqZPbL%H8QM)gU`Vi<>vlOIHL-t<0s2tAVMLJ;$CDPSAYkSTl|<{OD9VO@~_-HjhjXhx8+Mk~g8 z6P)IZi}k!a4=yIvn;?%k!#-CTizc=C7*nY>LZ!uYSoVYu)LgV3b08-8p`5M}LFHYj zxTAmlh>&~~_||^<%jjOzx~|_N!qiZzj`!$Dw&7p-+O?4Nz;Bkk;^;p$fxQrpasqBA zb9$;->OK}0yQ&2}o-NwT`XrQ*4cPlBY*#GR8_HCrBEj}5=Le0H1A$^xT53`*Kik8- zdfoEn=u)V>69#X~I4+_X9CK>Zaak_}3S#uS$kjJbldhk5|^~_$htB(=EPBls6yAc18%gKN`9Vwi}4rjnMVRA< z2VqIL``7S$CIIsp!9D-oYUYrZ8IK@b*Xf|ds{HF@Lm?mDfZGUGg7v#A)_}0Tem;<= zG;T@3TjCXUNU#PD{88F#J9pBh>0ygiJeC-HNX=6Ll^E4rT8Y<%YoDC6Nz9XPS9@lH z=f5M?9sJCz@gmXXrB6RFinssmY%1S+7{kQ@4J4rQwgiBvf37dwjoheZUW@)FLPF6q ztNrV~9fj=&^$`zu68im4+@%B+p1-=mL3Z^-tV%ZI2WMff*dXC6)T#Kio&MspNGbQ(wq-oqaqAxXWuC2rC)#yMGDp z1uM%$<%H)LV69Yjr}m5(iM`t(71iC#NvdBCYu6sc2};FAu9f7&u3Z0q*8sx3)=2={ zoYlUgqiSN~Ddtm5Gj2;i_&CS3p9Hot#B6HsShDk|R!@r-d%eCCy2EwAtgIgKC}kSV@#t|2yVNT->p?L37u?JtiTq47Q{?AoWmzPhn5zIT`F>ERB~ z+rnu~8xWpuQtCQ|hjPQ^C{oHu!+pHJQx=jgT}3o^^=vG_7TsM_sjpY1r4=YQ*BegX zny$2rEVKjGb!gH|=OYIj!T7W|h#cfelHaJA8Tm5u)tMOHpRl56Cko5q&&rz3f=X(s z&3;R~>Z#zvoa%z!2mQu7vbOMSV-ia*&W<_F)k{S%j@+|u$M)I3fKOgPu9I7`OfuY3 zflY#jy%&NMo^0Klq)j>P;Y?8jd)S>D)8880*Zk>N8wgTy9lV`6{~W{b0r~d zv^Q@_d*tC**m1FQEImiJd(Z3w->Nh55K`tETYf(lcT~c6MnR2xMedkmeTj{~yqY{B zy^46Tg6c4Gtd|gG5>{GGhV4Kc?<7Uv@sF%t8kZ9kmb(ke#nNw34b^Of;}|tOkS(>1 zQ)PuM{t=ao)RIutt35-Ck#U@td zJFL6xIn^Uj%d7}{0A6pDuA{cdJFCG!TJ?-xvsJ6vSz~IZSZ`th-W$!7U)RfyxqAAF zW?+@bvQSAA-6YcH2e_UF*QY_iFi*aT*A$hE{e^_TU$`c0UedrA&XA*AcVuOB26t>e z3MT=kH4{4o9%2keK5E0gv9Fwv<-S=>A_PFEOMSsR&Lg z_jzE+PUfUOfR!9Cr<$-u^*xaCzQxw?foU{{ibxATzCM)KY^yxF?nIx=Nkt`+=&cKP zlWfDV8WqscR1Oo>6tt-0h&I4%ei_w#@sWWbwzE~z%)tZoHV<=z%gcyt5?oJ|1xcrX z_R^F$wk=k#;~}hMT3To)&7rBozbj zFZ_f!l|Sz=5(iA308Cb6e&TLW|HUW5aR`fI@}&g+sugI7?jXm9b1{nJ1ZJ`r%>m2` z$?X}0PILdS^;XU?kG+OL`Dc(Ej`u(h*TV!D;SEwgFpeRXan1dv(!*V-nzn1Go zv{Ibe>@f34Jk|ZmXXk$UdJXP|r#9L?_5>q3St)I8FIn~z2N2H12P#<;7(c;YM;^RI z{Eb^5p_)Gft#bnS%O7AZTWXa9B$RrMT)c4LFs!qfz!T}-GU@-3bFBR^hI43ram@%M zWf4()&D%DxKeSM6YiKyeqXCm~Y=g7Sn_2k&FlawBFvl zIFspHzn2M{qyBtLXcP$E_h&;D>)In0D%2&odNUcw`UHDG%H-`F8hGCDk?_i};XYEL z&?Y#qmL*F?lA;NL4itKXps9OpiR$?Y*Eq=Wp(sFNbFvO|A3_+_Gs`iRCD7*1`IZ{? z`|O~93>8*-UU43d0|zIdh=d1vq^NKDsxj(l%^E096R`*_A>XdCmsgfYQtL^f-pwk_$PHK2LU2^8iKyS z%Z2reU|AI9wfhMm%$DkQTB%RZ2fFB)H9q{FE%w7lL4yN$9sqN3&vYhki0xCKCHmzV4L>38HM1o@{t~dp z8#o_%RYs~tK9400001L7PK86ACI=??+>7%ZP2I|47o|0{!a%^rL0&LOWMSkB7_m_yJwmwmw|)I zKY^69tCf&tF>7l*w6{-}_le+5l9;HfK?&-0x#u72LbU%nXWt?B8HeB^B3Z zek&8Jw=qUUjNLXx05N7BPmbwbU-PVVf!$68NAA@Jo)vRbb6$n#UD^ z4ASxmKr_|q)n5c!>j-5@)$qeUeW?zRJHo8L!H5siJ<(vO1HTp_*NkB>o=NLxeJI?{ zh8lT31CW;K5$4kJUpf-)FBSR3*?zz0U0TYVmI@nRxdH2^=FxM;nyj+h)xG6;%H5m_ zo%6m7PIC6aL6bnC3-i(3!_h#idmY{e*F*yYs!Mzd6`|*C=kEf34?>jYD4JH4+%C(w z7&V|eZ#l)F&l86>*?`tT+H#zW`)zMyk!8e8Sn2QH6j-C6kBFIrC`*WQ!@SVVjbsB= zJ5U?6Sn^uCqriat_nPD5={m*y{TZ#S8#azE969#yHtaD=~ySLUB@o~5U@FxAJqqi^gK*_Cz;w>agk)#nw4b(iSde5@OGdU zWOVf8MbO}+P zQiE(q;#=I6g^D%FYtn7xK@8rI_LVJ=&57<%Jia*ZfF+F&r~8<&H4+GD?NT^xD@k}v zFBrwo-w5sqJc?7SBjEm7TLEj~ zzE-Efl__wtM@E=1OghA#WE|5e6~O+JQA=cgRIl?amW# zRf9bTOX+4BJL!^V*gH8dWbOiq?)4=0NrJU5EMZWX#f=OU?yn$`Rpi|RqsNn{ksOkJ zj**I*K5*`13^Vpt68+{Ii+|~Elk}P;TkRj7uDeNJRQHZzGDSJW)5gfFpCedzkpkAJ zr?3mW(c3`$6jn^KV72Igs6{evO35IYW9t-ENqhqabBIXr;I>hBKh!OcKusCQ?UgIX zYO(UKX;a~(8)nBV;HI$&KJPblFXq#rHsNQX*c&KnVg*wZTm3RRGP+{~H?g$fD6`mt zOLd~eXWF{lYq*vXh${PbJ=guv@5rq`ziN4@-cLuZm(g3hG%q>q=>jUKR29Tx$lK+# znZ*v3X^(ct8)Z-7eT&AG~<}sm2u6MY#btyQFgWB1w_?83ec7)Yo5clKPa} z*x!Gagj*rihTKAqfhZIm2X7-_5z*^qZrt@~XK_vhORVzyAd2$NNRlqYt#eIVyUf3eZ*shVbkSqg32}Cv zKYWP}_4Zq^YB(dT??ESF4}4hdP6F_)Ul=&|F-!u$&Pi%A?|^l4MSPj!KWDCjDfxH2 zIsZ>FXf5l|!O0cg6Fdc|Ff+?QF&6)dy*mJh zXLAkCM)_y!sM2I-f!;Qo#0$3Sjq*YUDk75}no&vzx2Y}ki#!49WpKWJusN71&t1&g zsqsuj7c_z_;|ize{?$qihkAZUgQi@`mFcvDEP$)lVxVVFJfNZ%uP-7Q@{eD z_)WT1`^qhyTcYh;TxPb^B18TC!EAy-e2oLnJ!}un5Eyv8LKd21UWXbGlwNjKpr?oC zrF6f4h6Pn~r@z)zCCOGV(HN8&rU1kx8ea|#aBw1Mkr-IaYp{Z(Wf+-5dxuMg&iU;G zRu1yGbqah0sf1Pj0IUy6u$B_$wrQhsQqU3Nv|}8HRv>7JsX2zwV!-k#m7a5g`EC~i zyM3O`vd8zr{v<&D8{^RNAA3}w=K2~b^o2Br1u7H#{&y=(qi|V~!d1ta+7R_YL;fN+ zLa4wKUxPIVP>*GsoQ@Y>HNqsq6*lWdo&>_3=Q<+?4$krmfZYYOVt_bU5BgOweW)yt zri{UkWLp-h?`G#3D#LWd)6pn{=)I5CmkwMCrR2n`vqZmze#GyQwe@|XGM#WFzJxwTjh2-Ah2-DnbO ztHbTt!+9H@|HrpT7(HlxsDtGkyVdyJE63OPRDTmSOQ?@blutHV7Di}&b0;7!j1Ks! zZk5Ow=JGIS5|~QE8wG>`dDEzOOi}=I*v33m_f262z!;)c4wZru*l-zGZG1YUbo&zl zX(Tc2cYTjqg`Hr45@)&siJVSaA{B8z8Gt#SQ2wcUt&dSTc83y@G0&oJ*5xccZw;Wz<%j#Oi6vJrWpT@EWg6QGxJN;iQ zPN8w>&FmOCcw43m`@3s}+$${3Z<^RQ5x2ba?z9C}Hvrg$re-V2K#Y`;K35wW`p>c+ ztx(-I=b@RtS%=1je^4>hsLzGyQ#Te4T*_pmPw&&H&=mnuH#BrD#XUL+z<**{0l8eR zr&@;+y(6+t0XZ?=IpQbyI*BuMx9W&44>N&zS;EiS%b0lxG@RWuiKz8btFltxX1?+u z=ahqagroz4@DaZa`9yOs2TD7m%6Bh0TN*Evm}-hutB6QugnrTWQIR>W)dM+byuYU9 z3A_5-a0^t8W=Si*cZ0mYoDizsKkBBc&l~aSKdPY*w(U6Q_xlK31g>N=+&jr*Be>Zn zzUmjdgSqOIh9f^91cUpkT_+7KW5!|gqCE4Fhj5;@Xaj*eODn3v=`8B*-Z*f@;tMb5 z<2VBBgHZ4%oK;^}yRkIwkcg1f!dLFR5JCxmeJZU`QOCOdThSKH9RHv zl9Rf<+aA?Yn9E>bi-w8CaIUCTW-&ZAHPevtPj*%l!Ul9-??)Zt4n4n!!dCeR0E97&HrFt|s5XhDInXAp3%QA}w z+_5JR$a39CRH)^LY@7#3>*WMwxGyO>?RU4kRDyJe%Cxg#!01T)HF@#qFg{KL_v_~n z0f+T4f~SGB00tbDvB$cyeAdUof;A$`M*Q8C7qiU-8xp?e#YT13_BFiGk*6AT3+wIG ztt>4Q17~^q@``P9+gIp9oYzeW2pj-DJ{zQ@DaC9;wh{ z!s#{AK6zSTO^|KrS)jmXpIeKM^`cRpIG!RG#ix~-u1H1|U@_E(%6YK_n$@|Ep5hFH zRQL&I(Q$H0`TeA_PuVVQI{Em8ZJkPHM(}3e52+-}Y{zWWa?OJGL>`Zb^?6zOu_zqp*7o?6k8`juG?T6~oF z{9U>nAY^O{e_`PhrKoTs+yFma;6__3V>wzln4!J9RbRX7jUT@6yJG)Cg?35tTPR@( zXz+vnxzSawv7@Jss=lsiwO3h4kaCID#(k$Z*2eH53=}7vnCn>m<$Rt&jRmOe;q z9;{EHl1V9rk3FIBO^S3jOF=5n7y%}UDH;C%^9b-ds=wvvcw0w3#1hpvGl6 zulL!I6-OdhewwX!E3wFpmovTuHO-sT0f7K`Y(!7>H^30?@%xkh@{EjZQ}|H_PxKx; zvjt8YQiDAvGVn#%(eVK<8qQ7rWSwdla&Tm)aENDOuke$r>u$#aGRs~5Le%z*jut%~ z`919{&C#*F&U1x0Ef>db)yS1{rEB@iAw(M-xfmwDGlZ%dAx48C!Vq9MR_uMdA7mQP zro0LK_b~#%3Me}LTgc(F(G!vMuHx4CNvIpFL;LO{$<@iET|-`I`kF$t0v?7~ZB-Kj zGy)Sa4Pzs&QvKPjOz9o8MLo#zd*YvZR1C{u(NHfJ3B!sTDJ-><@!4kURQ)^=YH!V+ z2VGh%q^4Vk`=tl+GN45jMT+Zv_Vu}CF@F$hl>!+O@u~e2CHQH{km2J->fy52hWshu zRV`cFsMnnl95h-%9Mj~C#>a0)%ONnX3p3iWxQv$My0%)6q1a*_qvKqC7VX#Pfy<** z`*42BZyw&&bq*`3peM zkr|VgpB=KrszBrn%zRgClPt*-pgrdHcCNwAy$~OvtS{Kxf8*#Tx(mlR5A8ul$)w^TEOZ3M-2LvNIQwEsQB zs5DnsmRX=ryYTR?1C>2jwLwlzwVqXy3bG+zO^xLA`MOItD6Dz-DrsP~=zoZ zYMb9I#`C$<7o8Hra{0t6OpR}0+DvQI!Ge$kXSmYw>&K{?gS-n8lILKwGDZv;aOz55 zbc}=esKk=QYQafYxWk#QRV{Cerlcjt$h5(>1{N{$t~2I;ylm28#*iRfr_cQwL*P+x z*V6QWzC1VDl`?pIjGRH6eI`9X-9BJBB0~Zek0a+MLWT|YWaOf~S%JfB%AG4u(7}g0uJlOYvPh5Jt)_2?+pC3n`327$zzw3 z+vswJ|I*$BLboS-vNi6uBmIpi`#eBjZnW21uO0`ZD+B9AKDS(Q5{1>0Mj$F3Z2xqg zZLK3@7YXF(!~f~66IqX_nAABxHWMe42Wu=l2K{Pt*aeF+{F$s*vb$fC;SbPM}C z-!9~QS4na$8}}6OZlyXHj=fSPt2}Z7bhG;FFAhSUE!F(uzrEQ^jq_ohnx#t@;#gz% zoN)Bj^gqP-Ie`vXyVzL&vo9>>>qM`KAUMT1CA;$Hp&nXp)M2OI52d99m0u`^sXMk1~PaS5GoVaM~D({uCE&5P4-pYp+N7Ka&W)H)W&Y*1h-z_Uj z{A@6)GOR^Jqwy*)j~LjSt?@`BiobbZBGJ(SmElj7Qw#_ixuz|?G@xPAD5q!8ZmPJG(CZmX6k`!@sdRom0W zn7)QHJem7qpPl2vQYu5I?sSZ0&r~|y*Pxkm<#j?C?j%rjPtP5lcwU6IXv#WTj-m!3 zW7H#O7K3uUjP+&hy>s1DoXH0;4Vad%raEf5m*%gqetMFrb!4_B6;C9+Nr1_xodUYA z_95X2C|L-hSaRE`;JBJIUtvB#kjD32yUD%9Vh2q!x)T~{u`VCZ!mM%ow`7qpC;;QB zM@iH&?QPLzVG=9D5j5Q%9APj$SzU!#TQFG4F0V*w|sfT6I1JKvSEdb;l!T zP><#Na#t|1P|DG-$Axvp;Es4f@K)rWL zFbKsJ6DX-{Bkw5zbI))dBHjqKM;{1tHj5kn73;CS0cf(LC@%v-+HarGp&yjjaohY^ zKu$9pA^(|xpnm50i!Ox3{4YLoOcouDg4H2wh^(i$gygiFjnyYsC&9uB;B8Hc>tJ33 z(*Bz%@TlJCnLA^Cks#gPM`o99Cz!(;WmaBpcZ$jB^dC>(T_)VG{Ph^WM8agCMRsbv zsJFKUvJ*<+h4WrLBjso!s4v6GB~gQVum*)$lopoLv7bp%=_{-Vmq&Jfvtshl*>a&{MeAm=j!I4kH1`;cH4A*47#x~kP|tph z$Y~f&D5&O?YPBp2HPt5{xH2hLTbmCcqix_f#^OXZ#{tn`{YErlfvCcV%X$Ufe*@7v z9m@9Lv91#Ti6Swzcj};#NZ0Tk^E#+G3SJP@bVIq1cfV!<4XdJRld`o15!2UIfX4Pa zU5=T?%fSk)dC2vux8b(EM4RX7(JJ>s<||6xCS67%1gz;eMEuG)Yp>p7sHa7uZ1pZ z{WIh*$TyGS)fskYPeo}^U^ajfGAlmWPxw6qvtdw{3Rc6ouX{l(x)(5jH&`YeX0te7 zSx*Re$f3YU*tU$8>`RbuD9*5;91%IIKz4w%bCvyQ?)>D__v%`GSXFcRc#6eeYJxp3 zw@jI?M5`*%E6w(jkfE=Pn&AN=ts1B~P08f3L9z2tx7V(RZ=D2-N(n`&a|F+K{`((>7m>Dpi&x3j3Sex8?IAU~7;A zy8%A>pS5b4bPwu2jL`QvlfEG0)|xsi(U4@t*;zO##Qu&APK8X?IbTtpYI)Q1ecyl)ID^qDe>z)UFKAF@xUQ{u^)dk3XwD4X?#j{Z4w0pj> zC%dd}H@cTQ5@v?C!4SnV9_=hnxHWdeEy{-2MTzcUYB>5d{X5tvX{^mL*!rcfnPTZg zd;gt^^KztEjJt#1`bH546*X%wP?^vquKB)aYf{%zrF(f@NuE~_wCz{FhkXcVlcja* zi@Wvic=2p@CZjGiEEtQo64oIu_g7LNleN71s^L6Z_pPFcAQSH5e6Ln6Q}@PyB%c=6 z|8}I_HR;tl$NFZ)X**;Wp4(?EZU_)G^1I`7*0?nxx+~t}oksKdoUF=(p^bTa^14 zu$4^m-k~>9i;~yz@#&ZWf<3UL;@>s-%j$bKQkL$e4yWCLVQ2+67R3}k7>Ya0&EYr7 z39t7(k{hwd3s0{-F0EYXSp>98Z(E36?kK={UC3!6RmYFCwOys}fP_55!M%~oj}AiQ z%LlFR8AK3*R)J|9sMLJm;t{ijJhN^`)@%S)fYR~JOJda? zaFyRdHwU)Joi3kwjKL5Gc)Gt{`{V4s)bvuycKV97agyAFSiSs?5i(IT#5gb|>uOC~ zXhOp+T{pBqQL&g)>~YGfR58N=V?CDU=D9L{ML_RNQyaKc1iboiB=h_=$w>0QKFG7j zf9fk$2G_|m!9@j*Qq0FtJ0LL8!1m~6ai^fcm8d`~xd1C=M73#@69wssmkS}!Aw19D(i97v5%C{yIT$Ux6Whcrq4Jl#r(|Ah(w1n`Jme4=yCWQe&;J%)p)CFFh_GF z{}Nkie?9PW?7?DoVDZ}Zhm>lE;y)K+9u$UEn-s|5@SK5QGfU!|1goGSKF9O^j-7Lo zfiO3@wGud%jMYMm#V+m4eq2xIxMF2MnOVjc(?La=_D2rwQDfPV&k(-nk04wmYVNohc4xKz3h{|IN!`}=JN_4wALc7EC@m%~4q3Dmnt*h!x#E+Gp%@C}nVG zCx?NrTok|-3-g2nTcyn$hf%z08osqkxJ6GLh!=pn&=sfKg=kFA}CW(I83$(=CmD$}u>5(i=1(t)!wavO}VTsN4J0bLH7o>i}pO*QG*PpOP8( z7o2TT>CsZykOHXNT1Z8_y~;N8%()w8M2}%0Jf3Ti%lgtq1Qp-R#2@g}0svgY(sV%|zm3F3WmyH#Z zb?B$$cn>0gICG=0lTVzOu0WHYC^p))9Lg31m@6a)5gw4lXB1IzM9rdDDDm>lxPh$? z`q5pw_TMhDogO=C%-G%QNO&rc7j;5Db+xVDl78#|qR<=v3l9FxN~NuMim(*-{<)tu zbhIvEGZHml=8Z5YkG!&!XYA}N;dfpw4JZzhT)oKs;&>;VPVvtt`~8SdZ?9eFL7-js zpaG}tng?soH5pg(jeFyiJJx5c(Oubja0{-fw<{|_1EWNdk{t(3NgLnbrrZ8H;$=T` zwDwdBqaKJmy&H>}FCJWW*xh-Q<3>SdaDtT_Km-QaXHhM0WJL>YR2NDt&UF4=l>z~H9d`Z}e1%_s9l5>BPa)qVl zyPppUNqdz*v=%EC-cb$zQH@nJ1Kjd;!fhemiM++7Z_pTC$bI<)U%^{Q$7#B{C15Ve zNO~!2s{Wk-D`I4E&xGYmpDBJ)*mV*`fXa>?5zN_Y)Fi$Dd%3H3S31int^JANQFjPW z4oF)@OSt5b(DTDrV1#z~#Wv6hl=Q-sjS_$u4Lf~pa{hjWa5Q4>C-x~VH1O%8=HD_a z6Zt_jW5w7*WspxpizvH56Wf|r5bJ8y!xz9T?NG@WX68>)1${&x6#Qy|I2W7v*1(#P zNphur>g(lF?6lz_%@d)4=c#=_{yn84QMuhWe;){N%K6JaZ5v?#eZK*OBpjK^As3#c z@(g;@z=h&s2DU-qR(7)55Y_l~Q$Mq|*R1a9Dr0fsxKJep757h!JH%}3EnHGzo46hJ zY;H7#lXFsx2>lXq6izi>=?q?u_$P#;HEptA695@g#hSX>wEE|-n;eB(7!&ZH393TZ z)qRcOJ*+0@?{~ke0Gn)Jb7pXP_x#&M#Z8@MP+ZLy<}VU_28Td`y99?2g1fsr1h?Ss z?i$?Pf;$9vcXtWy5IkY?{&%zRVXEd=&uKe-r>47acONLO>-JzSDH&+&P1mbSvy2h( zwC`H@{*Zx6)3SQ>%05UYY2#XjR}MNyBBTVdrCnx(mF`{zhb?T)bEN1ED?%5VpH?VT zg$95GPH5N7>6%T36R%}rI-;8LdS-G3ve*-s!J2L4qM zwNSWmp9tVMIIp9>>unlOLg%yTdy<$-Ep%>U5N3plr;VDV*9GE@GbBT^OPfZ+=^e|G zM7Lxhc+_`5$$v(#Opj=Yep+j|Yq0LIcBiC#>jc4u=9eD{yPqs99V0R3t16l)^9LJ7 zC-BsyH!C=Z+{pJ=#$g%2)HpZvs$?I=6+|YCjxj( z4r_m^u%ukPUB$#FYVti(Cd-e$~1%vaTd0)9~fuxbP70sPKR`qEFkGz2(^sz z%981BOJq8AM;?UQ&@4{JP-`t2cP#NpARRK)57s^)7IZB0-L#uN?hrMYX7(0yFMFazKP=-4tm zY>FD;jwyIp)fWfGQpb;t2W3Y(J9Z&6e*7353d~J169`LbI{dBLBWPKH{4|YSh@V_b ztujqxn(LjIfV%LWEC7r?rfyUU6~?-TszFX`p&-jBZ2Ymb>jBL_+ymoNhJ{woF%2@3 zr^7c@x;1n&e7r8R4}|qOxSC7d=EvhT1YDK8YIeOt z>ca!?9diE$h?VZWjX86%mifpk0OH+FH}V~KiBaDtW_K7EWjWaba~cPD2>ZC|956!XqcUyOQ(gzy*&cS z`%*R0v0m+Ef%lVrCC)eInH|4f4eh69?CtVxTT(hMW~anRkA`AKobmT( z@DPTH9SuWj?1SRY>{6DThJ+RJbM-BydIGX(JZ=_=-%{OVdD}!KI2pWv$c0> zJv*E-yiY6I5h#ko$fsI^sp!C=+SgTdQk@xN+Vkuhs;ypBT4ilvN3DLyYje8#r6z6? zx0R*!qXZ2V^JihVc!fa_W}dGVE$5af7Aaj{WJLa!JI&;Dnjkca(Ge z9rOyJk7{+r&L_LiYV)^>UJ^0l?&{ zoojcfqVw;GBliuMC(>_oZfp^pct=>+SUTxAF~-18A8b%cyTcHE`4obKzcI-)l{IW6 z7W4wJE2<4`ro#BPW_D6&=3qSz$gsi8pU^0o3R`&iK9k=`u^pimx`Ohn(`moW;L&R8 z7B>%HX%k|x_0x@0mf*$4WZG z%tL$H=7!lAy&cR8QaQ$4=u^&iphz!n9pAa|f$~k8##Pq48w+v%Nkk?vnQNCO2X3mq zn7AY>sq+pVQ`mtG9i;5u3F?Eu@Os5Z(6X(^5Vv6!T^-&q)#0kY4&mA=mGdbi zhe?ul7?BChc(qf0+~peVdT!eFL;5~cxP&vqD38?(owxB7>>!Ce#>6d87uHGKxOB>P znYB*a%bw`6k{}QgkR*BZTMjHDz*ht)?d5bVCY|J~d z7=vg&xM;cW*{1aG;m(P^+NEj%VXxVPmF6nxmFqwXrh((BDbg_@T|Igy^lzuF-jV%u zJa;`Rgu*=uT+(K5bCXVCUlewjQo>4CsAwkp;z{!{ew;tB-?|bqp~nxwsCafe9zE?0 z|BBsE_sK#~V^hNJ0(y9kUr`_%`WNpLIhHnBg7M@jJT`50QME;R0~!}0iR!6g+LeGs ze!(t*s6y^H+!{}9h@?27lg}C`QpvUc^}>c1u`Nq91&UIHIx3@s9h#U-KIr# z{`NJ$jcM@*}Ip7KuyQsKl<`1eFA{QKgp& zh1O-bG9EVq)Gv(A1U%vxZd2KSw?aM-oN2Mn@Jg|^;Z~rHTPCboTpmkw0uFkA{^lYg zEr_R+><8gJr6d!ZhU)_hJuO(jc_JqZ3Z4z=+taZr$vk1gP*1A8Ju#HD6;e0w$JeNY2!qX>JW55b4i*`Y-ssAWEin zqBa$W%Z%#{98rPAMnfDb0mg~^01;w(pa0!|wvP1w)I{!54o?bgNZB^yRB|`!inI~kdxwxFgEOO5FrW5otS8@R;!`+=+$r`89<}!@|*4~1c~~-uU!Ss-MoC5)F&L9(=$Yx;fyJ0pg--g6}AfYx~CiK$(|tY{uS_q@76oCAjm?5DSn zXRU)j63giu@F(>WX9prw953K2d_gZ|z9p8%u{}|3N*$xlu zJ|Q7|7Ts&B zC={a!S;byEPT{xu)ek8B=#qzn+`_EIE5HylOL(~cO8q_Wv17Nr7mxS|qiPBo+X+CW z_~*#;`==G`ZQUAESyE~O*5t{M`fH8`_?v1BJF-?C>b)(UGRM3jkuwi5!8AFW|ul)AAeWEt1ZMFLL*SiVWrVD47izwfK zQK&{6X+^0aMqDa;&3#6yQS5_MM&?uI1QipttFV;j@$Vk0g-D7PF#V&NSC4C6C7^HY zN{W0ED8k>(v7QtB2w7-5czETf{b=3omzh@T5ILz_KeY3=a$-;3s=udsH|8oh`|u`i z+}d_&<~>6`4^7cnDS1~@oxO74Nn@c3#WwEyxXCBzSwyX9B8HQ_Eu^ZYBy(4epa{RW z>u-mz-XXldmK#(Q{R4*RudkrD-IhT=H@k3k!X^mMiIfT^@_d|3Bqvd?rM9eyIo1iB zc#k7?eU0f^dYCG^53j;UJ%}#V@_ZM-HMY7SMQNjYwO z-IVHvjeN4a$z^NWlP2`1RYsz#N}fW?6<^$&`b60T$C3KCB}J|oD+E)7%OpXfjdvzd zsj=)q2o_p?xGZdY^|iy^lPi$YcWu%a&@i1zWoBMiEaUQhb^Z~lIVdGLgNO6k?^HAkkDle?;}P%8^`_@$UV-#e zlv|_<(Sqdo(=prkV=*}GJX)*5Wyum#tE>@b9!fgpr)TUERUxC>i&HtIp&3m}v5+d_ zJi!ld)xKExqidV_Z>3towvVqa9JZggZJWzw_vB^nbzFPNZKL5`w_`m#+frO}AZ6Oy zRBi=xA*|Y#x0Sp1&u6avH1J@=NN~n$Vr;h4v%GLY7z82R;1-VLKHTOEz5Pix>Ff_L zS$#TSn`;KkuJ@ZK*?L7{ToEtvQEZEzDXRg)-1}t`!m~C3*PAI_U4F=2 z*ssYvUM?BHr)n#wc`RZy7{}!FEPGbKXg~w!V=;R$`-2CQN#!RM@ldg?o6x%xrDT27 z(aS!LYirQFH$k$-_wbMX;`N&Jt4%B>6yV#V=rS(_h~fgVS{(=1s|pN~x>NM`^Fnh9 zSyu94gNIM;NPX00b0=dtNdQIzb+AzVkn6eC{Z~229ZN4`r5ki5dm)AHT2X$?zbYSC#asWobbn%EcrzlCFYXKXI-y z6_})M@X>-2$$sC+xLH)A5SXbP%@Hr?h-&+BpRwMeq8oX#fn-|1s)E7yao#~+TinBukBl~OHpGroN5OfiRDs*yw5h&LpNdH_padl4D>7QM;cTtb75_SY@kFQ{-=P?$KwSerG-l=T(T%qa@c%= z-{DEtzM1#l@Id=kPbyFdLMxLUgDjjPq3QRz^c;A7LmrBam^t-VQfENhfZ`EM=O`8GrD`h73ES_|PnpgvKxL zW5sF=THhv{o4-x(cq3NgIZK8~5I}dB>!D6rEiM!!fSH>}?W9n4gg{Ong z`#{LJgyCCh^CXj(4U-6**TEOE&zgNi;VEBO!*@Buw3y0fRP_4QDSiIFG)#F?ks{v; zu#~V^{DJ*>G@ylzJP=F#7(;0hMAsN)Tkk5G@T;Bp$oirmGHGjoxG*9-0i#z{(q*41N&9xQ;emq{2 zgReM&OB6q31fORx)W#L2N+<*ym&`c@7i~&COvLBO4hhb?%ac>xhV|J;)<=GH`D=dM zR~1bmysoe#Cq2CndX(bwzS5Pu%!aHC5xtZtNTFtDbLhpZ`Z%qqQtuC>c%FUJAYVi) zIA(Xmu%1A3YPA~aZI&{3(J)66!na6AkvL>8Q(J5I>mzcH!xPB1z6m=}#@GfA-@_rw zTO1g9X(k2TkqpS4!`hyyzn|DB(fp)KJY+GIvEm*}@4 zinUkYc_YpIA^P7uXAAz{F2MZ1y8w6K?Q~jILO$dPJoduCHFW$v_ExGDL}A`5W}-9B zE05Dzk3h#Q3o+F0OK?BStdu`2K&yB%&drWP4_#xR~`SbJF=^8wZB>D7GdD zwi;P2cK5rD`p$rR5q~p%uNWJ9{r6<#&}8B5Etw(tS>N4Fr{_yXe}uZY>5;Da=%_jc z^L@QV%gQ?@YSRNSvWzuf;o~C&bb^c);O`V5YbADFWlTY z`oX9j2HIR$Qgg>sk`_tvusaNRF8Z6_FW{lpiJ%EIPVB}_2;AR0zGdIBD&HjhN|MYc zlvlvm$C@|C-qOTkkYSOTn8$y_AiKQklPdVL>Vr8ieDLl<>{VEZ!z!833e?G`8UxBX z0^Z>hlWi?yk;~#C@1jK>8q}rdG{~uS6?W1u+=f&ISqVC^Wp7eK2FXn6FA6iVmIWii zM&Xmuv+$tTp_{ew0iR9%O*xEEzF!BtkqND?{OQP-`22eO_r=a?DPtI-viBeMRsBBU zXF4YGEVN(c#R_3HZZnD@_>^Fi)uDcYzV^22r!O7~kmDO+Afm?j1 ze^4?FZ{;{T_)DrD&P@@qO%QzVI*BkD6zR0> zS9Z{$OZ4Je@*R~2Wnri1xxieV&w9A&Y*h)P(#)S$rrbK=WNCZE9LNCqBe>p6HSce@ zkwePZYh22F2tjWwN&E#6J& z4mlx*kdYHuV)h}yshV_ybH-U#{@Todg+A)wAzE()cCbazqQeE(p6RpswWMTemPUl? z=FwW@mr5<4jyktndb!wA}w&dj7$`rMZ!P_lx z>2uic0G50an7@BCRqnnC#!M}J`6E+k_b8zF)JR@6@WI4~Y+hM(!SVaD5n1wpP5YJm z)wq#)1|K_Iv&B7SpVEw*l*yK#rL2F}Nq*}!kF(*voLeMGKO`HNEjy%v*cuvioqMnPD8)0vI;?Iv1wn}Du3ueKxa==k6>j%V56(W33h?5wWPcpX z)(YovjO|AeLX=$L2V+K&A8mCxBzBv8s@v!z2JMwiES!re$Bv#vw#_lDN-glmjgC9> z1fM9pDto+vWmBR`v;2psiWy~}3=GR{oHD$J(KGvK+g0MFh1X8AJYPOC>0=_$ z38L=t_2-7|iB{;rKdm6%mv6Bl}N=(*@oBo?S>G7s-=G+ zDE(Nu&+rjPPtY*>0-$TAlbAE)E37G@8XbPL9iPOkr7W9y`hqLgm9(dhWTgz98*v

    M^%=}G zlhp_N%z2-m=5^r@o5iw*oVCVs#<*RW{uB|s)=dv5_xPM#XJ(s?GNGN>Fu%m#E_|2? zzoX+RdHkb5aymu|h4P%sCN)B+Rq4_gb~%KK}u;%vQI1hT!~2 zwaI(gKTpR!ul%sE0G8$@XQ!;Z_^E(lM3 zaK@~!G>)>k6r|IX~N39ZV|A&otRYnNT4JJ_XSDtueSsD@K2W0#st3he;%H@Ta5bW`!M< zY$_JyWiDO7%>2kVS--FcOdV3DV~CAnjuQmBs&|oj43Vqv>Aya;!bC1195%u=d4;sN zBj=F`xalkyAWS>oAgV7RtqwE}#pg`Q0VtP#^7C3ubWt3eB{C$Y@|h>FXNiMm%p=FX z1KNAk10574t0!sM07l|3?P4MsX*PRyuVp^%WD~Kv?8(Lq()qkD;k{H1%{^)UDAIQ= zzkImO=tl_+-9q(=7`r|CCfSG_KcDuHIsExHL>b|7bq_I0P3{JAq5r95dosG{`e6>O zBr@QS&pS0PUUmDG#T032BC%`r(mYrBQy3C6QqI$|oaAVqv3%D^mWjuf?9o+kR#Nu) z=~|`4v*NLat{R00GrO#i|K zeGovX#tX9(LjvR^FKi?P32e!|uxuhEfI|4fCJm54n8*u1{ec9;SzlP11qujy_rhy3 zP=GDo3#(c}0siSPyn_G@cuBvog&H(qsrkZNbUFbzmx_bj0vAl5j2NRnB;W`5g_Bs20Dj3A-ttBQ{%&3R|MueFkbt#U=@|-;fN!tbKhli^Y~sGy*RLS~ zVt>y|{VNad00|($cwv^`NC4H}h;;vSBWMuN`S*wRj~OsQfZUrG4#fol>96u*AOiu6 ze`jL8wA!BqrpbhWL{IbutLpe^vjTOb}oJ`J$`lfB?K#?3o7w!a*-OS~UpZeU-m@ z9SCTLc+rU)KmZHe3v0K70G?O=DHsF+RIlt8hCqPBD}O1^fB=S9dgLq!sC(5PlB*zq z;FWH_1_H=m={Y|^fb=WsaXf&U^X{$uedFz{c*r++M53= args.det_score_thr] + results.append(result) + prog_bar.update() + return results + + +def intersection(b0, b1): + l, r = max(b0[0], b1[0]), min(b0[2], b1[2]) + u, d = max(b0[1], b1[1]), min(b0[3], b1[3]) + return max(0, r - l) * max(0, d - u) + + +def iou(b0, b1): + i = intersection(b0, b1) + u = area(b0) + area(b1) - i + return i / u + + +def area(b): + return (b[2] - b[0]) * (b[3] - b[1]) + + +def removedup(bbox): + + def inside(box0, box1, thre=0.8): + return intersection(box0, box1) / area(box0) > thre + + num_bboxes = bbox.shape[0] + if num_bboxes == 1 or num_bboxes == 0: + return bbox + valid = [] + for i in range(num_bboxes): + flag = True + for j in range(num_bboxes): + if i != j and inside(bbox[i], + bbox[j]) and bbox[i][4] <= bbox[j][4]: + flag = False + break + if flag: + valid.append(i) + return bbox[valid] + + +def is_easy_example(det_results, num_person): + threshold = 0.95 + + def thre_bbox(bboxes, thre=threshold): + shape = [sum(bbox[:, -1] > thre) for bbox in bboxes] + ret = np.all(np.array(shape) == shape[0]) + return shape[0] if ret else -1 + + if thre_bbox(det_results) == num_person: + det_results = [x[x[..., -1] > 0.95] for x in det_results] + return True, np.stack(det_results) + return False, thre_bbox(det_results) + + +def bbox2tracklet(bbox): + iou_thre = 0.6 + tracklet_id = -1 + tracklet_st_frame = {} + tracklets = defaultdict(list) + for t, box in enumerate(bbox): + for idx in range(box.shape[0]): + matched = False + for tlet_id in range(tracklet_id, -1, -1): + cond1 = iou(tracklets[tlet_id][-1][-1], box[idx]) >= iou_thre + cond2 = ( + t - tracklet_st_frame[tlet_id] - len(tracklets[tlet_id]) < + 10) + cond3 = tracklets[tlet_id][-1][0] != t + if cond1 and cond2 and cond3: + matched = True + tracklets[tlet_id].append((t, box[idx])) + break + if not matched: + tracklet_id += 1 + tracklet_st_frame[tracklet_id] = t + tracklets[tracklet_id].append((t, box[idx])) + return tracklets + + +def drop_tracklet(tracklet): + tracklet = {k: v for k, v in tracklet.items() if len(v) > 5} + + def meanarea(track): + boxes = np.stack([x[1] for x in track]).astype(np.float32) + areas = (boxes[..., 2] - boxes[..., 0]) * ( + boxes[..., 3] - boxes[..., 1]) + return np.mean(areas) + + tracklet = {k: v for k, v in tracklet.items() if meanarea(v) > 5000} + return tracklet + + +def distance_tracklet(tracklet): + dists = {} + for k, v in tracklet.items(): + bboxes = np.stack([x[1] for x in v]) + c_x = (bboxes[..., 2] + bboxes[..., 0]) / 2. + c_y = (bboxes[..., 3] + bboxes[..., 1]) / 2. + c_x -= 480 + c_y -= 270 + c = np.concatenate([c_x[..., None], c_y[..., None]], axis=1) + dist = np.linalg.norm(c, axis=1) + dists[k] = np.mean(dist) + return dists + + +def tracklet2bbox(track, num_frame): + # assign_prev + bbox = np.zeros((num_frame, 5)) + trackd = {} + for k, v in track: + bbox[k] = v + trackd[k] = v + for i in range(num_frame): + if bbox[i][-1] <= 0.5: + mind = np.Inf + for k in trackd: + if np.abs(k - i) < mind: + mind = np.abs(k - i) + bbox[i] = bbox[k] + return bbox + + +def tracklets2bbox(tracklet, num_frame): + dists = distance_tracklet(tracklet) + sorted_inds = sorted(dists, key=lambda x: dists[x]) + dist_thre = np.Inf + for i in sorted_inds: + if len(tracklet[i]) >= num_frame / 2: + dist_thre = 2 * dists[i] + break + + dist_thre = max(50, dist_thre) + + bbox = np.zeros((num_frame, 5)) + bboxd = {} + for idx in sorted_inds: + if dists[idx] < dist_thre: + for k, v in tracklet[idx]: + if bbox[k][-1] < 0.01: + bbox[k] = v + bboxd[k] = v + bad = 0 + for idx in range(num_frame): + if bbox[idx][-1] < 0.01: + bad += 1 + mind = np.Inf + mink = None + for k in bboxd: + if np.abs(k - idx) < mind: + mind = np.abs(k - idx) + mink = k + bbox[idx] = bboxd[mink] + return bad, bbox + + +def bboxes2bbox(bbox, num_frame): + ret = np.zeros((num_frame, 2, 5)) + for t, item in enumerate(bbox): + if item.shape[0] <= 2: + ret[t, :item.shape[0]] = item + else: + inds = sorted( + list(range(item.shape[0])), key=lambda x: -item[x, -1]) + ret[t] = item[inds[:2]] + for t in range(num_frame): + if ret[t, 0, -1] <= 0.01: + ret[t] = ret[t - 1] + elif ret[t, 1, -1] <= 0.01: + if t: + if ret[t - 1, 0, -1] > 0.01 and ret[t - 1, 1, -1] > 0.01: + if iou(ret[t, 0], ret[t - 1, 0]) > iou( + ret[t, 0], ret[t - 1, 1]): + ret[t, 1] = ret[t - 1, 1] + else: + ret[t, 1] = ret[t - 1, 0] + return ret + + +def ntu_det_postproc(vid, det_results): + det_results = [removedup(x) for x in det_results] + label = int(vid.split('/')[-1].split('A')[1][:3]) + mpaction = list(range(50, 61)) + list(range(106, 121)) + n_person = 2 if label in mpaction else 1 + is_easy, bboxes = is_easy_example(det_results, n_person) + if is_easy: + print('\nEasy Example') + return bboxes + + tracklets = bbox2tracklet(det_results) + tracklets = drop_tracklet(tracklets) + + print(f'\nHard {n_person}-person Example, found {len(tracklets)} tracklet') + if n_person == 1: + if len(tracklets) == 1: + tracklet = list(tracklets.values())[0] + det_results = tracklet2bbox(tracklet, len(det_results)) + return np.stack(det_results) + else: + bad, det_results = tracklets2bbox(tracklets, len(det_results)) + return det_results + # n_person is 2 + if len(tracklets) <= 2: + tracklets = list(tracklets.values()) + bboxes = [] + for tracklet in tracklets: + bboxes.append(tracklet2bbox(tracklet, len(det_results))[:, None]) + bbox = np.concatenate(bboxes, axis=1) + return bbox + else: + return bboxes2bbox(det_results, len(det_results)) + + +def pose_inference(args, frame_paths, det_results): + model = init_pose_model(args.pose_config, args.pose_checkpoint, + args.device) + print('Performing Human Pose Estimation for each frame') + prog_bar = mmcv.ProgressBar(len(frame_paths)) + + num_frame, num_person = det_results.shape[:2] + kp = np.zeros((num_person, num_frame, 17, 3), dtype=np.float32) + + for i, (f, d) in enumerate(zip(frame_paths, det_results)): + # Align input format + d = [dict(bbox=x) for x in list(d) if x[-1] > 0.5] + pose = inference_top_down_pose_model(model, f, d, format='xyxy')[0] + for j, item in enumerate(pose): + kp[j, i] = item['keypoints'] + prog_bar.update() + return kp + + +def ntu_pose_extraction(vid): + frame_paths = extract_frame(vid) + det_results = detection_inference(args, frame_paths) + det_results = ntu_det_postproc(vid, det_results) + pose_results = pose_inference(args, frame_paths, det_results) + anno = dict() + anno['keypoint'] = pose_results[..., :2] + anno['keypoint_score'] = pose_results[..., 2] + anno['frame_dir'] = osp.splitext(osp.basename(vid))[0] + anno['img_shape'] = (1080, 1920) + anno['original_shape'] = (1080, 1920) + anno['total_frames'] = pose_results.shape[1] + anno['label'] = int(osp.basename(vid).split('A')[1][:3]) + shutil.rmtree(osp.dirname(frame_paths[0])) + + return anno + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Generate Pose Annotation for a single NTURGB-D video') + parser.add_argument('video', type=str, help='source video') + parser.add_argument('output', type=str, help='output pickle name') + parser.add_argument('--device', type=str, default='cuda:0') + args = parser.parse_args() + return args + + +if __name__ == '__main__': + global_args = parse_args() + args.device = global_args.device + args.video = global_args.video + args.output = global_args.output + anno = ntu_pose_extraction(args.video) + mmcv.dump(anno, args.output) From 0210ffdfe433e9b3d7abf7e0c2532c46dd069ede Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 9 Aug 2021 11:23:43 +0800 Subject: [PATCH 225/414] [Improvement] Cache in RawFrameDecode (#1078) * cache in rawframedecode * update * fix bug * update * fix --- mmaction/datasets/pipelines/loading.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index 7365eb7054..1f212ee09d 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -1,3 +1,4 @@ +import copy as cp import io import os import os.path as osp @@ -1275,7 +1276,19 @@ def __call__(self, results): offset = results.get('offset', 0) - for frame_idx in results['frame_inds']: + cache = {} + for i, frame_idx in enumerate(results['frame_inds']): + # Avoid loading duplicated frames + if frame_idx in cache: + if modality == 'RGB': + imgs.append(cp.deepcopy(imgs[cache[frame_idx]])) + else: + imgs.append(cp.deepcopy(imgs[2 * cache[frame_idx]])) + imgs.append(cp.deepcopy(imgs[2 * cache[frame_idx] + 1])) + continue + else: + cache[frame_idx] = i + frame_idx += offset if modality == 'RGB': filepath = osp.join(directory, filename_tmpl.format(frame_idx)) From 3af9e1dbf65d4e64df0e1a89988f485bc9fa6f27 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Wed, 11 Aug 2021 20:39:52 +0800 Subject: [PATCH 226/414] [Fix] Fix bugs in dataset (metrics) (#1088) * master * master 0721 * fix metric bugs 0811 --- mmaction/datasets/base.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/mmaction/datasets/base.py b/mmaction/datasets/base.py index e4f753388c..189354305b 100644 --- a/mmaction/datasets/base.py +++ b/mmaction/datasets/base.py @@ -223,16 +223,17 @@ def evaluate(self, if metric in [ 'mean_average_precision', 'mmit_mean_average_precision' ]: - gt_labels = [ + gt_labels_arrays = [ self.label2array(self.num_classes, label) for label in gt_labels ] if metric == 'mean_average_precision': - mAP = mean_average_precision(results, gt_labels) + mAP = mean_average_precision(results, gt_labels_arrays) eval_results['mean_average_precision'] = mAP log_msg = f'\nmean_average_precision\t{mAP:.4f}' elif metric == 'mmit_mean_average_precision': - mAP = mmit_mean_average_precision(results, gt_labels) + mAP = mmit_mean_average_precision(results, + gt_labels_arrays) eval_results['mmit_mean_average_precision'] = mAP log_msg = f'\nmmit_mean_average_precision\t{mAP:.4f}' print_log(log_msg, logger=logger) From 075464c9157a20a33ccecf11ed3a532ecfbb3b78 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 11 Aug 2021 20:58:06 +0800 Subject: [PATCH 227/414] Update Version Checking (#1090) --- mmaction/datasets/builder.py | 5 ++--- mmaction/datasets/pipelines/augmentations.py | 6 +++--- mmaction/models/common/transformer.py | 7 +++---- tests/test_data/test_compose.py | 5 ++--- .../test_pipelines/test_augmentations/test_pytorchvideo.py | 5 ++--- 5 files changed, 12 insertions(+), 16 deletions(-) diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index bbdf1bc7ef..5fac34b690 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -1,13 +1,12 @@ import platform import random -from distutils.version import LooseVersion from functools import partial import numpy as np import torch from mmcv.parallel import collate from mmcv.runner import get_dist_info -from mmcv.utils import Registry, build_from_cfg +from mmcv.utils import Registry, build_from_cfg, digit_version from torch.utils.data import DataLoader from .samplers import ClassSpecificDistributedSampler, DistributedSampler @@ -111,7 +110,7 @@ def build_dataloader(dataset, worker_init_fn, num_workers=num_workers, rank=rank, seed=seed) if seed is not None else None - if LooseVersion(torch.__version__) >= LooseVersion('1.8.0'): + if digit_version(torch.__version__) >= digit_version('1.8.0'): kwargs['persistent_workers'] = persistent_workers data_loader = DataLoader( diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index f79f312b1d..c134eb678b 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -1,11 +1,11 @@ import random import warnings from collections.abc import Sequence -from distutils.version import LooseVersion import cv2 import mmcv import numpy as np +from mmcv.utils import digit_version from torch.nn.modules.utils import _pair from ..builder import PIPELINES @@ -68,7 +68,7 @@ def __init__(self, type, **kwargs): import torchvision.transforms as tv_trans except ImportError: raise RuntimeError('Install torchvision to use TorchvisionTrans') - if LooseVersion(torchvision.__version__) < LooseVersion('0.8.0'): + if digit_version(torchvision.__version__) < digit_version('0.8.0'): raise RuntimeError('The version of torchvision should be at least ' '0.8.0') @@ -105,7 +105,7 @@ def __init__(self, type, **kwargs): import pytorchvideo.transforms as ptv_trans except ImportError: raise RuntimeError('Install pytorchvideo to use PytorchVideoTrans') - if LooseVersion(torch.__version__) < LooseVersion('1.8.0'): + if digit_version(torch.__version__) < digit_version('1.8.0'): raise RuntimeError( 'The version of PyTorch should be at least 1.8.0') diff --git a/mmaction/models/common/transformer.py b/mmaction/models/common/transformer.py index edc95e5a92..56be980bb5 100644 --- a/mmaction/models/common/transformer.py +++ b/mmaction/models/common/transformer.py @@ -1,5 +1,3 @@ -from distutils.version import LooseVersion - import torch import torch.nn as nn from einops import rearrange @@ -7,6 +5,7 @@ from mmcv.cnn.bricks.registry import ATTENTION, FEEDFORWARD_NETWORK from mmcv.cnn.bricks.transformer import FFN, build_dropout from mmcv.runner.base_module import BaseModule +from mmcv.utils import digit_version @ATTENTION.register_module() @@ -46,7 +45,7 @@ def __init__(self, self.num_frames = num_frames self.norm = build_norm_layer(norm_cfg, self.embed_dims)[1] - if LooseVersion(torch.__version__) < LooseVersion('1.9.0'): + if digit_version(torch.__version__) < digit_version('1.9.0'): kwargs.pop('batch_first', None) self.attn = nn.MultiheadAttention(embed_dims, num_heads, attn_drop, **kwargs) @@ -123,7 +122,7 @@ def __init__(self, self.num_heads = num_heads self.num_frames = num_frames self.norm = build_norm_layer(norm_cfg, self.embed_dims)[1] - if LooseVersion(torch.__version__) < LooseVersion('1.9.0'): + if digit_version(torch.__version__) < digit_version('1.9.0'): kwargs.pop('batch_first', None) self.attn = nn.MultiheadAttention(embed_dims, num_heads, attn_drop, **kwargs) diff --git a/tests/test_data/test_compose.py b/tests/test_data/test_compose.py index 86379fe933..b4a866b6a3 100644 --- a/tests/test_data/test_compose.py +++ b/tests/test_data/test_compose.py @@ -1,14 +1,13 @@ import numpy as np import pytest -from mmcv.utils import assert_keys_equal +from mmcv.utils import assert_keys_equal, digit_version from mmaction.datasets.pipelines import Compose, ImageToTensor try: import torchvision - from distutils.version import LooseVersion torchvision_ok = False - if LooseVersion(torchvision.__version__) >= LooseVersion('0.8.0'): + if digit_version(torchvision.__version__) >= digit_version('0.8.0'): torchvision_ok = True except (ImportError, ModuleNotFoundError): torchvision_ok = False diff --git a/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py b/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py index df1f6a9fb2..ce7fbe6c8b 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py @@ -1,13 +1,12 @@ import numpy as np import pytest -from mmcv.utils import assert_dict_has_keys +from mmcv.utils import assert_dict_has_keys, digit_version try: import torch - from distutils.version import LooseVersion from mmaction.datasets.pipelines import PytorchVideoTrans pytorchvideo_ok = False - if LooseVersion(torch.__version__) >= LooseVersion('1.8.0'): + if digit_version(torch.__version__) >= digit_version('1.8.0'): pytorchvideo_ok = True except (ImportError, ModuleNotFoundError): pytorchvideo_ok = False From 4242f64692da43fc9b92f66e5d74733a1da2c804 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 13 Aug 2021 18:34:33 +0800 Subject: [PATCH 228/414] Update tsn_r50_video_1x1x16_100e_diving48_rgb.py --- .../tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py index c0ce9e9e04..9bea133f7b 100644 --- a/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py @@ -18,7 +18,7 @@ train_pipeline = [ dict(type='DecordInit'), - dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=16), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict( @@ -40,7 +40,7 @@ type='SampleFrames', clip_len=1, frame_interval=1, - num_clips=8, + num_clips=16, test_mode=True), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), @@ -56,7 +56,7 @@ type='SampleFrames', clip_len=1, frame_interval=1, - num_clips=8, + num_clips=16, test_mode=True), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), From 97528d0dddf9b3b4d39c12f7b3dc460524dd7390 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sun, 15 Aug 2021 14:04:05 +0800 Subject: [PATCH 229/414] [Fix] Update ntu_pose_extraction.py (#1096) --- tools/data/skeleton/ntu_pose_extraction.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/data/skeleton/ntu_pose_extraction.py b/tools/data/skeleton/ntu_pose_extraction.py index c0d4b604d9..132a553f1c 100644 --- a/tools/data/skeleton/ntu_pose_extraction.py +++ b/tools/data/skeleton/ntu_pose_extraction.py @@ -23,7 +23,7 @@ mmdet_root = '' mmpose_root = '' -args = abc.ABC() +args = abc.abstractproperty() args.det_config = f'{mmdet_root}/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py' # noqa: E501 args.det_checkpoint = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person/faster_rcnn_r50_fpn_1x_coco-person_20201216_175929-d022e227.pth' # noqa: E501 args.det_score_thr = 0.5 From c9090c32b2bb9f2ee3bf1a8d968f0dbf587c27e4 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sun, 15 Aug 2021 17:34:09 +0800 Subject: [PATCH 230/414] [Doc] AVA annotations explained (#1097) --- tools/data/ava/AVA_annotation_explained.md | 34 ++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 tools/data/ava/AVA_annotation_explained.md diff --git a/tools/data/ava/AVA_annotation_explained.md b/tools/data/ava/AVA_annotation_explained.md new file mode 100644 index 0000000000..9d9a3ec39b --- /dev/null +++ b/tools/data/ava/AVA_annotation_explained.md @@ -0,0 +1,34 @@ +# AVA Annotation Explained + +In this section, we explain the annotation format of AVA in details: + +``` +mmaction2 +├── data +│ ├── ava +│ │ ├── annotations +│ │ | ├── ava_dense_proposals_train.FAIR.recall_93.9.pkl +│ │ | ├── ava_dense_proposals_val.FAIR.recall_93.9.pkl +│ │ | ├── ava_dense_proposals_test.FAIR.recall_93.9.pkl +│ │ | ├── ava_train_v2.1.csv +│ │ | ├── ava_val_v2.1.csv +│ │ | ├── ava_train_excluded_timestamps_v2.1.csv +│ │ | ├── ava_val_excluded_timestamps_v2.1.csv +│ │ | ├── ava_action_list_v2.1_for_activitynet_2018.pbtxt +``` + +## The proposals generated by human detectors + +In the annotation folder, `ava_dense_proposals_[train/val/test].FAIR.recall_93.9.pkl` are human proposals generated by a human detector. They are used in training, validation and testing respectively. Take `ava_dense_proposals_train.FAIR.recall_93.9.pkl` as an example. It is a dictionary of size 203626. The key consists of the `videoID` and the `timestamp`. For example, the key `-5KQ66BBWC4,0902` means the values are the detection results for the frame at the $$902_{nd}$$ second in the video `-5KQ66BBWC4`. The values in the dictionary are numpy arrays with shape $$N \times 5$$ , $$N$$ is the number of detected human bounding boxes in the corresponding frame. The format of bounding box is $$[x_1, y_1, x_2, y_2, score], 0 \le x_1, y_1, x_2, w_2, score \le 1$$. $$(x_1, y_1)$$ indicates the top-left corner of the bounding box, $$(x_2, y_2)$$ indicates the bottom-right corner of the bounding box; $$(0, 0)$$ indicates the top-left corner of the image, while $$(1, 1)$$ indicates the bottom-right corner of the image. + +## The ground-truth labels for spatio-temporal action detection + +In the annotation folder, `ava_[train/val]_v[2.1/2.2].csv` are ground-truth labels for spatio-temporal action detection, which are used during training & validation. Take `ava_train_v2.1.csv` as an example, it is a csv file with 837318 lines, each line is the annotation for a human instance in one frame. For example, the first line in `ava_train_v2.1.csv` is `'-5KQ66BBWC4,0902,0.077,0.151,0.283,0.811,80,1'`: the first two items `-5KQ66BBWC4` and `0902` indicate that it corresponds to the $$902_{nd}$$ second in the video `-5KQ66BBWC4`. The next four items ($$[0.077(x_1), 0.151(y_1), 0.283(x_2), 0.811(y_2)]$$) indicates the location of the bounding box, the bbox format is the same as human proposals. The next item `80` is the action label. The last item `1` is the ID of this bounding box. + +## Excluded timestamps + +`ava_[train/val]_excludes_timestamps_v[2.1/2.2].csv` contains excluded timestamps which are not used during training or validation. The format is `video_id, second_idx` . + +## Label map + +`ava_action_list_v[2.1/2.2]_for_activitynet_[2018/2019].pbtxt` contains the label map of the AVA dataset, which maps the action name to the label index. From a067e27b3b5e399f054b89cf2cacd2c50274e0d2 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 17 Aug 2021 13:20:05 +0800 Subject: [PATCH 231/414] [Docs] add copyright (#1099) --- demo/demo.py | 1 + demo/demo_gradcam.py | 1 + demo/demo_posec3d.py | 1 + demo/demo_spatiotemporal_det.py | 1 + demo/long_video_demo.py | 1 + demo/webcam_demo.py | 1 + demo/webcam_demo_spatiotemporal_det.py | 1 + docs/conf.py | 1 + docs/stat.py | 1 + docs_zh_CN/conf.py | 1 + docs_zh_CN/stat.py | 1 + mmaction/__init__.py | 1 + mmaction/apis/__init__.py | 1 + mmaction/apis/inference.py | 1 + mmaction/apis/test.py | 1 + mmaction/apis/train.py | 1 + mmaction/core/__init__.py | 1 + mmaction/core/bbox/__init__.py | 1 + mmaction/core/bbox/assigners/__init__.py | 1 + mmaction/core/bbox/assigners/max_iou_assigner_ava.py | 1 + mmaction/core/bbox/bbox_target.py | 1 + mmaction/core/bbox/transforms.py | 1 + mmaction/core/evaluation/__init__.py | 1 + mmaction/core/evaluation/accuracy.py | 1 + mmaction/core/evaluation/ava_evaluation/__init__.py | 1 + mmaction/core/evaluation/ava_utils.py | 1 + mmaction/core/evaluation/eval_detection.py | 1 + mmaction/core/evaluation/eval_hooks.py | 1 + mmaction/core/hooks/__init__.py | 1 + mmaction/core/hooks/output.py | 1 + mmaction/core/optimizer/__init__.py | 1 + mmaction/core/optimizer/copy_of_sgd.py | 1 + mmaction/core/optimizer/tsm_optimizer_constructor.py | 1 + mmaction/core/runner/__init__.py | 1 + mmaction/core/scheduler/__init__.py | 1 + mmaction/core/scheduler/lr_updater.py | 1 + mmaction/datasets/__init__.py | 1 + mmaction/datasets/activitynet_dataset.py | 1 + mmaction/datasets/audio_dataset.py | 1 + mmaction/datasets/audio_feature_dataset.py | 1 + mmaction/datasets/audio_visual_dataset.py | 1 + mmaction/datasets/ava_dataset.py | 1 + mmaction/datasets/base.py | 1 + mmaction/datasets/blending_utils.py | 1 + mmaction/datasets/builder.py | 1 + mmaction/datasets/dataset_wrappers.py | 1 + mmaction/datasets/hvu_dataset.py | 1 + mmaction/datasets/image_dataset.py | 1 + mmaction/datasets/pipelines/__init__.py | 1 + mmaction/datasets/pipelines/augmentations.py | 1 + mmaction/datasets/pipelines/compose.py | 1 + mmaction/datasets/pipelines/formating.py | 1 + mmaction/datasets/pipelines/loading.py | 1 + mmaction/datasets/pipelines/pose_loading.py | 1 + mmaction/datasets/pose_dataset.py | 1 + mmaction/datasets/rawframe_dataset.py | 1 + mmaction/datasets/rawvideo_dataset.py | 1 + mmaction/datasets/samplers/__init__.py | 1 + mmaction/datasets/samplers/distributed_sampler.py | 1 + mmaction/datasets/ssn_dataset.py | 1 + mmaction/datasets/video_dataset.py | 1 + mmaction/localization/__init__.py | 1 + mmaction/localization/bsn_utils.py | 1 + mmaction/localization/proposal_utils.py | 1 + mmaction/localization/ssn_utils.py | 1 + mmaction/models/__init__.py | 1 + mmaction/models/backbones/__init__.py | 1 + mmaction/models/backbones/c3d.py | 1 + mmaction/models/backbones/mobilenet_v2.py | 1 + mmaction/models/backbones/mobilenet_v2_tsm.py | 1 + mmaction/models/backbones/resnet.py | 1 + mmaction/models/backbones/resnet2plus1d.py | 1 + mmaction/models/backbones/resnet3d.py | 1 + mmaction/models/backbones/resnet3d_csn.py | 1 + mmaction/models/backbones/resnet3d_slowfast.py | 1 + mmaction/models/backbones/resnet3d_slowonly.py | 1 + mmaction/models/backbones/resnet_audio.py | 1 + mmaction/models/backbones/resnet_tin.py | 1 + mmaction/models/backbones/resnet_tsm.py | 1 + mmaction/models/backbones/tanet.py | 1 + mmaction/models/backbones/timesformer.py | 1 + mmaction/models/backbones/x3d.py | 1 + mmaction/models/builder.py | 1 + mmaction/models/common/__init__.py | 1 + mmaction/models/common/conv2plus1d.py | 1 + mmaction/models/common/conv_audio.py | 1 + mmaction/models/common/lfb.py | 1 + mmaction/models/common/tam.py | 1 + mmaction/models/common/transformer.py | 1 + mmaction/models/heads/__init__.py | 1 + mmaction/models/heads/audio_tsn_head.py | 1 + mmaction/models/heads/base.py | 1 + mmaction/models/heads/bbox_head.py | 1 + mmaction/models/heads/fbo_head.py | 1 + mmaction/models/heads/i3d_head.py | 1 + mmaction/models/heads/lfb_infer_head.py | 1 + mmaction/models/heads/misc_head.py | 1 + mmaction/models/heads/roi_head.py | 1 + mmaction/models/heads/slowfast_head.py | 1 + mmaction/models/heads/ssn_head.py | 1 + mmaction/models/heads/timesformer_head.py | 1 + mmaction/models/heads/tpn_head.py | 1 + mmaction/models/heads/trn_head.py | 1 + mmaction/models/heads/tsm_head.py | 1 + mmaction/models/heads/tsn_head.py | 1 + mmaction/models/heads/x3d_head.py | 1 + mmaction/models/localizers/__init__.py | 1 + mmaction/models/localizers/base.py | 1 + mmaction/models/localizers/bmn.py | 1 + mmaction/models/localizers/bsn.py | 1 + mmaction/models/localizers/ssn.py | 1 + mmaction/models/localizers/utils/__init__.py | 1 + mmaction/models/localizers/utils/post_processing.py | 1 + mmaction/models/losses/__init__.py | 1 + mmaction/models/losses/base.py | 1 + mmaction/models/losses/binary_logistic_regression_loss.py | 1 + mmaction/models/losses/bmn_loss.py | 1 + mmaction/models/losses/cross_entropy_loss.py | 1 + mmaction/models/losses/hvu_loss.py | 1 + mmaction/models/losses/nll_loss.py | 1 + mmaction/models/losses/ohem_hinge_loss.py | 1 + mmaction/models/losses/ssn_loss.py | 1 + mmaction/models/necks/__init__.py | 1 + mmaction/models/necks/tpn.py | 1 + mmaction/models/recognizers/__init__.py | 1 + mmaction/models/recognizers/audio_recognizer.py | 1 + mmaction/models/recognizers/base.py | 1 + mmaction/models/recognizers/recognizer2d.py | 1 + mmaction/models/recognizers/recognizer3d.py | 1 + mmaction/models/roi_extractors/__init__.py | 1 + mmaction/models/roi_extractors/single_straight3d.py | 1 + mmaction/utils/__init__.py | 1 + mmaction/utils/collect_env.py | 1 + mmaction/utils/decorators.py | 1 + mmaction/utils/gradcam_utils.py | 1 + mmaction/utils/logger.py | 1 + mmaction/utils/misc.py | 1 + mmaction/utils/module_hooks.py | 1 + setup.py | 1 + tests/test_data/test_blending.py | 1 + tests/test_data/test_compose.py | 1 + tests/test_data/test_datasets/__init__.py | 1 + tests/test_data/test_datasets/base.py | 1 + tests/test_data/test_datasets/test_activitynet_dataset.py | 1 + tests/test_data/test_datasets/test_audio_dataset.py | 1 + tests/test_data/test_datasets/test_audio_feature_dataset.py | 1 + tests/test_data/test_datasets/test_audio_visual_dataset.py | 1 + tests/test_data/test_datasets/test_ava_dataset.py | 1 + tests/test_data/test_datasets/test_concat_dataset.py | 1 + tests/test_data/test_datasets/test_hvu_dataset.py | 1 + tests/test_data/test_datasets/test_pose_dataset.py | 1 + tests/test_data/test_datasets/test_rawframe_dataset.py | 1 + tests/test_data/test_datasets/test_rawvideo_dataset.py | 1 + tests/test_data/test_datasets/test_repeat_dataset.py | 1 + tests/test_data/test_datasets/test_ssn_dataset.py | 1 + tests/test_data/test_datasets/test_video_dataset.py | 1 + tests/test_data/test_formating.py | 1 + tests/test_data/test_pipelines/test_augmentations/__init__.py | 1 + tests/test_data/test_pipelines/test_augmentations/base.py | 1 + tests/test_data/test_pipelines/test_augmentations/test_audio.py | 1 + tests/test_data/test_pipelines/test_augmentations/test_color.py | 1 + tests/test_data/test_pipelines/test_augmentations/test_crop.py | 1 + tests/test_data/test_pipelines/test_augmentations/test_flip.py | 1 + tests/test_data/test_pipelines/test_augmentations/test_imgaug.py | 1 + tests/test_data/test_pipelines/test_augmentations/test_lazy.py | 1 + tests/test_data/test_pipelines/test_augmentations/test_misc.py | 1 + .../test_pipelines/test_augmentations/test_normalization.py | 1 + .../test_pipelines/test_augmentations/test_pytorchvideo.py | 1 + .../test_pipelines/test_augmentations/test_transform.py | 1 + tests/test_data/test_pipelines/test_loadings/__init__.py | 1 + tests/test_data/test_pipelines/test_loadings/base.py | 1 + tests/test_data/test_pipelines/test_loadings/test_decode.py | 1 + tests/test_data/test_pipelines/test_loadings/test_load.py | 1 + .../test_data/test_pipelines/test_loadings/test_localization.py | 1 + .../test_data/test_pipelines/test_loadings/test_pose_loading.py | 1 + tests/test_data/test_pipelines/test_loadings/test_sampling.py | 1 + tests/test_data/test_sampler.py | 1 + tests/test_metrics/test_accuracy.py | 1 + tests/test_metrics/test_losses.py | 1 + tests/test_models/__init__.py | 1 + tests/test_models/base.py | 1 + tests/test_models/test_backbones.py | 1 + tests/test_models/test_common.py | 1 + tests/test_models/test_common_modules/__init__.py | 1 + tests/test_models/test_common_modules/test_base_head.py | 1 + tests/test_models/test_common_modules/test_base_recognizers.py | 1 + tests/test_models/test_common_modules/test_mobilenet_v2.py | 1 + tests/test_models/test_common_modules/test_resnet.py | 1 + tests/test_models/test_common_modules/test_resnet3d.py | 1 + tests/test_models/test_detectors/__init__.py | 1 + tests/test_models/test_detectors/test_detectors.py | 1 + tests/test_models/test_gradcam.py | 1 + tests/test_models/test_head.py | 1 + tests/test_models/test_localizers/__init__.py | 1 + tests/test_models/test_localizers/test_bmn.py | 1 + tests/test_models/test_localizers/test_localizers.py | 1 + tests/test_models/test_localizers/test_pem.py | 1 + tests/test_models/test_localizers/test_ssn.py | 1 + tests/test_models/test_localizers/test_tem.py | 1 + tests/test_models/test_neck.py | 1 + tests/test_models/test_recognizers/__init__.py | 1 + tests/test_models/test_recognizers/test_audio_recognizer.py | 1 + tests/test_models/test_recognizers/test_recognizer2d.py | 1 + tests/test_models/test_recognizers/test_recognizer3d.py | 1 + tests/test_models/test_roi_extractor.py | 1 + tests/test_runtime/test_apis_test.py | 1 + tests/test_runtime/test_config.py | 1 + tests/test_runtime/test_eval_hook.py | 1 + tests/test_runtime/test_inference.py | 1 + tests/test_runtime/test_lr.py | 1 + tests/test_runtime/test_optimizer.py | 1 + tests/test_runtime/test_precise_bn.py | 1 + tests/test_runtime/test_train.py | 1 + tests/test_utils/__init__.py | 1 + tests/test_utils/test_bbox.py | 1 + tests/test_utils/test_decorator.py | 1 + tests/test_utils/test_localization_utils.py | 1 + tests/test_utils/test_module_hooks.py | 1 + tests/test_utils/test_onnx.py | 1 + tools/__init__.py | 1 + tools/analysis/analyze_logs.py | 1 + tools/analysis/bench_processing.py | 1 + tools/analysis/benchmark.py | 1 + tools/analysis/check_videos.py | 1 + tools/analysis/eval_metric.py | 1 + tools/analysis/get_flops.py | 1 + tools/analysis/print_config.py | 1 + tools/analysis/report_accuracy.py | 1 + tools/analysis/report_map.py | 1 + tools/data/activitynet/activitynet_feature_postprocessing.py | 1 + tools/data/activitynet/convert_proposal_format.py | 1 + tools/data/activitynet/download.py | 1 + tools/data/activitynet/generate_rawframes_filelist.py | 1 + tools/data/activitynet/process_annotations.py | 1 + tools/data/activitynet/tsn_feature_extraction.py | 1 + tools/data/anno_txt2json.py | 1 + tools/data/ava/download_videos_parallel.py | 1 + tools/data/build_audio_features.py | 1 + tools/data/build_file_list.py | 1 + tools/data/build_rawframes.py | 1 + tools/data/build_videos.py | 1 + tools/data/denormalize_proposal_file.py | 1 + tools/data/extract_audio.py | 1 + tools/data/gym/download.py | 1 + tools/data/gym/generate_file_list.py | 1 + tools/data/gym/trim_event.py | 1 + tools/data/gym/trim_subaction.py | 1 + tools/data/hvu/generate_file_list.py | 1 + tools/data/hvu/generate_sub_file_list.py | 1 + tools/data/hvu/parse_tag_list.py | 1 + tools/data/omnisource/trim_raw_video.py | 1 + tools/data/parse_file_list.py | 1 + tools/data/resize_videos.py | 1 + tools/data/skeleton/ntu_pose_extraction.py | 1 + tools/deployment/publish_model.py | 1 + tools/deployment/pytorch2onnx.py | 1 + tools/misc/bsn_proposal_generation.py | 1 + tools/misc/clip_feature_extraction.py | 1 + tools/misc/flow_extraction.py | 1 + tools/test.py | 1 + tools/train.py | 1 + 261 files changed, 261 insertions(+) diff --git a/demo/demo.py b/demo/demo.py index e470502875..1b299d32fe 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os import os.path as osp diff --git a/demo/demo_gradcam.py b/demo/demo_gradcam.py index 0b68cb8071..6c8631f4a3 100644 --- a/demo/demo_gradcam.py +++ b/demo/demo_gradcam.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os import os.path as osp diff --git a/demo/demo_posec3d.py b/demo/demo_posec3d.py index 03b79ce2d5..b5d3f7cfd9 100644 --- a/demo/demo_posec3d.py +++ b/demo/demo_posec3d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os import os.path as osp diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index 7d188e13e2..15f1b6fb46 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import copy as cp import os diff --git a/demo/long_video_demo.py b/demo/long_video_demo.py index 0fab771005..45df202dbb 100644 --- a/demo/long_video_demo.py +++ b/demo/long_video_demo.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import json import random diff --git a/demo/webcam_demo.py b/demo/webcam_demo.py index 4e1bb0a729..575a503a9c 100644 --- a/demo/webcam_demo.py +++ b/demo/webcam_demo.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import time from collections import deque diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py index b5e407cffd..3245af2d2e 100644 --- a/demo/webcam_demo_spatiotemporal_det.py +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. """Webcam Spatio-Temporal Action Detection Demo. Some codes are based on https://github.com/facebookresearch/SlowFast diff --git a/docs/conf.py b/docs/conf.py index 00802c1661..1bf9f44d53 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. # Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full diff --git a/docs/stat.py b/docs/stat.py index e0445737b9..cc5c8f4fcb 100755 --- a/docs/stat.py +++ b/docs/stat.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# Copyright (c) OpenMMLab. All rights reserved. import functools as func import glob import re diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index 40f17d03ef..8338f28df2 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. # Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full diff --git a/docs_zh_CN/stat.py b/docs_zh_CN/stat.py index 8a6cdcffe7..ca37f56c22 100755 --- a/docs_zh_CN/stat.py +++ b/docs_zh_CN/stat.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# Copyright (c) OpenMMLab. All rights reserved. import functools as func import glob import re diff --git a/mmaction/__init__.py b/mmaction/__init__.py index c949258a06..afd7d5a336 100644 --- a/mmaction/__init__.py +++ b/mmaction/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import mmcv from mmcv import digit_version diff --git a/mmaction/apis/__init__.py b/mmaction/apis/__init__.py index f720d75421..8a68055e70 100644 --- a/mmaction/apis/__init__.py +++ b/mmaction/apis/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .inference import inference_recognizer, init_recognizer from .test import multi_gpu_test, single_gpu_test from .train import train_model diff --git a/mmaction/apis/inference.py b/mmaction/apis/inference.py index e31685b7f5..1c721d78eb 100644 --- a/mmaction/apis/inference.py +++ b/mmaction/apis/inference.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os import os.path as osp import re diff --git a/mmaction/apis/test.py b/mmaction/apis/test.py index fea43cde65..fa6291cdc0 100644 --- a/mmaction/apis/test.py +++ b/mmaction/apis/test.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import pickle import shutil diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index 4c1b62d988..0154bf85d8 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy as cp import os.path as osp diff --git a/mmaction/core/__init__.py b/mmaction/core/__init__.py index f5f617cdf2..a86055476a 100644 --- a/mmaction/core/__init__.py +++ b/mmaction/core/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .bbox import * # noqa: F401, F403 from .evaluation import * # noqa: F401, F403 from .hooks import * # noqa: F401, F403 diff --git a/mmaction/core/bbox/__init__.py b/mmaction/core/bbox/__init__.py index 594c485a71..27d8fe0532 100644 --- a/mmaction/core/bbox/__init__.py +++ b/mmaction/core/bbox/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .assigners import MaxIoUAssignerAVA from .bbox_target import bbox_target from .transforms import bbox2result diff --git a/mmaction/core/bbox/assigners/__init__.py b/mmaction/core/bbox/assigners/__init__.py index bd62c1cbf2..0e9112412b 100644 --- a/mmaction/core/bbox/assigners/__init__.py +++ b/mmaction/core/bbox/assigners/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .max_iou_assigner_ava import MaxIoUAssignerAVA __all__ = ['MaxIoUAssignerAVA'] diff --git a/mmaction/core/bbox/assigners/max_iou_assigner_ava.py b/mmaction/core/bbox/assigners/max_iou_assigner_ava.py index ab9bff874f..d347282369 100644 --- a/mmaction/core/bbox/assigners/max_iou_assigner_ava.py +++ b/mmaction/core/bbox/assigners/max_iou_assigner_ava.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from mmaction.utils import import_module_error_class diff --git a/mmaction/core/bbox/bbox_target.py b/mmaction/core/bbox/bbox_target.py index 958fe80215..2d9f099e1b 100644 --- a/mmaction/core/bbox/bbox_target.py +++ b/mmaction/core/bbox/bbox_target.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn.functional as F diff --git a/mmaction/core/bbox/transforms.py b/mmaction/core/bbox/transforms.py index b051e2275e..6d9bb4eb30 100644 --- a/mmaction/core/bbox/transforms.py +++ b/mmaction/core/bbox/transforms.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np diff --git a/mmaction/core/evaluation/__init__.py b/mmaction/core/evaluation/__init__.py index a4694017cc..354d525c74 100644 --- a/mmaction/core/evaluation/__init__.py +++ b/mmaction/core/evaluation/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .accuracy import (average_precision_at_temporal_iou, average_recall_at_avg_proposals, confusion_matrix, get_weighted_score, interpolated_precision_recall, diff --git a/mmaction/core/evaluation/accuracy.py b/mmaction/core/evaluation/accuracy.py index 06a38de241..a125a22c5c 100644 --- a/mmaction/core/evaluation/accuracy.py +++ b/mmaction/core/evaluation/accuracy.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np diff --git a/mmaction/core/evaluation/ava_evaluation/__init__.py b/mmaction/core/evaluation/ava_evaluation/__init__.py index e69de29bb2..ef101fec61 100644 --- a/mmaction/core/evaluation/ava_evaluation/__init__.py +++ b/mmaction/core/evaluation/ava_evaluation/__init__.py @@ -0,0 +1 @@ +# Copyright (c) OpenMMLab. All rights reserved. diff --git a/mmaction/core/evaluation/ava_utils.py b/mmaction/core/evaluation/ava_utils.py index 159297fb7d..e80708d9a5 100644 --- a/mmaction/core/evaluation/ava_utils.py +++ b/mmaction/core/evaluation/ava_utils.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import csv import logging import time diff --git a/mmaction/core/evaluation/eval_detection.py b/mmaction/core/evaluation/eval_detection.py index 3dfd8cd94a..604ba4fb7e 100644 --- a/mmaction/core/evaluation/eval_detection.py +++ b/mmaction/core/evaluation/eval_detection.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import json import numpy as np diff --git a/mmaction/core/evaluation/eval_hooks.py b/mmaction/core/evaluation/eval_hooks.py index 9ef5a8ad34..c870054e98 100644 --- a/mmaction/core/evaluation/eval_hooks.py +++ b/mmaction/core/evaluation/eval_hooks.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os import os.path as osp import warnings diff --git a/mmaction/core/hooks/__init__.py b/mmaction/core/hooks/__init__.py index 405aa4e130..42ce6c6c0e 100644 --- a/mmaction/core/hooks/__init__.py +++ b/mmaction/core/hooks/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .output import OutputHook __all__ = ['OutputHook'] diff --git a/mmaction/core/hooks/output.py b/mmaction/core/hooks/output.py index 9a87644cdb..fb30bebaac 100644 --- a/mmaction/core/hooks/output.py +++ b/mmaction/core/hooks/output.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import functools import warnings diff --git a/mmaction/core/optimizer/__init__.py b/mmaction/core/optimizer/__init__.py index 40a70dd3b5..9b96eb660f 100644 --- a/mmaction/core/optimizer/__init__.py +++ b/mmaction/core/optimizer/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .copy_of_sgd import CopyOfSGD from .tsm_optimizer_constructor import TSMOptimizerConstructor diff --git a/mmaction/core/optimizer/copy_of_sgd.py b/mmaction/core/optimizer/copy_of_sgd.py index 9dd3d27ff8..daec4851db 100644 --- a/mmaction/core/optimizer/copy_of_sgd.py +++ b/mmaction/core/optimizer/copy_of_sgd.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from mmcv.runner import OPTIMIZERS from torch.optim import SGD diff --git a/mmaction/core/optimizer/tsm_optimizer_constructor.py b/mmaction/core/optimizer/tsm_optimizer_constructor.py index be819ea4aa..340e37bcbb 100644 --- a/mmaction/core/optimizer/tsm_optimizer_constructor.py +++ b/mmaction/core/optimizer/tsm_optimizer_constructor.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from mmcv.runner import OPTIMIZER_BUILDERS, DefaultOptimizerConstructor from mmcv.utils import SyncBatchNorm, _BatchNorm, _ConvNd diff --git a/mmaction/core/runner/__init__.py b/mmaction/core/runner/__init__.py index 3cd541b3b3..c870e1da44 100644 --- a/mmaction/core/runner/__init__.py +++ b/mmaction/core/runner/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .omnisource_runner import OmniSourceDistSamplerSeedHook, OmniSourceRunner __all__ = ['OmniSourceRunner', 'OmniSourceDistSamplerSeedHook'] diff --git a/mmaction/core/scheduler/__init__.py b/mmaction/core/scheduler/__init__.py index 9ac369faf8..55757c4357 100644 --- a/mmaction/core/scheduler/__init__.py +++ b/mmaction/core/scheduler/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .lr_updater import TINLrUpdaterHook __all__ = ['TINLrUpdaterHook'] diff --git a/mmaction/core/scheduler/lr_updater.py b/mmaction/core/scheduler/lr_updater.py index 215694de3c..a36f2bb70d 100644 --- a/mmaction/core/scheduler/lr_updater.py +++ b/mmaction/core/scheduler/lr_updater.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from mmcv.runner import HOOKS, LrUpdaterHook from mmcv.runner.hooks.lr_updater import annealing_cos diff --git a/mmaction/datasets/__init__.py b/mmaction/datasets/__init__.py index ecd1be57c0..2c2bc8966c 100644 --- a/mmaction/datasets/__init__.py +++ b/mmaction/datasets/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .activitynet_dataset import ActivityNetDataset from .audio_dataset import AudioDataset from .audio_feature_dataset import AudioFeatureDataset diff --git a/mmaction/datasets/activitynet_dataset.py b/mmaction/datasets/activitynet_dataset.py index db04a8cfb2..811d059c16 100644 --- a/mmaction/datasets/activitynet_dataset.py +++ b/mmaction/datasets/activitynet_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import os import os.path as osp diff --git a/mmaction/datasets/audio_dataset.py b/mmaction/datasets/audio_dataset.py index 041695e9b2..df19b1806a 100644 --- a/mmaction/datasets/audio_dataset.py +++ b/mmaction/datasets/audio_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import torch diff --git a/mmaction/datasets/audio_feature_dataset.py b/mmaction/datasets/audio_feature_dataset.py index 89e7c06e3d..eaa54642f0 100644 --- a/mmaction/datasets/audio_feature_dataset.py +++ b/mmaction/datasets/audio_feature_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import torch diff --git a/mmaction/datasets/audio_visual_dataset.py b/mmaction/datasets/audio_visual_dataset.py index e3d5fabfbf..15a31240d8 100644 --- a/mmaction/datasets/audio_visual_dataset.py +++ b/mmaction/datasets/audio_visual_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp from .builder import DATASETS diff --git a/mmaction/datasets/ava_dataset.py b/mmaction/datasets/ava_dataset.py index 4a314cd96f..b071698e34 100644 --- a/mmaction/datasets/ava_dataset.py +++ b/mmaction/datasets/ava_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import os import os.path as osp diff --git a/mmaction/datasets/base.py b/mmaction/datasets/base.py index 189354305b..8d2589ca12 100644 --- a/mmaction/datasets/base.py +++ b/mmaction/datasets/base.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import os.path as osp import warnings diff --git a/mmaction/datasets/blending_utils.py b/mmaction/datasets/blending_utils.py index 8ef35b0e73..bd8ded3674 100644 --- a/mmaction/datasets/blending_utils.py +++ b/mmaction/datasets/blending_utils.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from abc import ABCMeta, abstractmethod import torch diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index 5fac34b690..821123f1cc 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import platform import random from functools import partial diff --git a/mmaction/datasets/dataset_wrappers.py b/mmaction/datasets/dataset_wrappers.py index dd00b9f32d..7868e40709 100644 --- a/mmaction/datasets/dataset_wrappers.py +++ b/mmaction/datasets/dataset_wrappers.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np from .builder import DATASETS, build_dataset diff --git a/mmaction/datasets/hvu_dataset.py b/mmaction/datasets/hvu_dataset.py index 59a4c855a8..7049944a21 100644 --- a/mmaction/datasets/hvu_dataset.py +++ b/mmaction/datasets/hvu_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import os.path as osp from collections import OrderedDict diff --git a/mmaction/datasets/image_dataset.py b/mmaction/datasets/image_dataset.py index a359277264..6d84b35f85 100644 --- a/mmaction/datasets/image_dataset.py +++ b/mmaction/datasets/image_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .builder import DATASETS from .video_dataset import VideoDataset diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index 72fe6afeca..c942084cbb 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .augmentations import (AudioAmplify, CenterCrop, ColorJitter, Flip, Fuse, Imgaug, MelSpectrogram, MultiGroupCrop, MultiScaleCrop, Normalize, PytorchVideoTrans, diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index c134eb678b..6172afadec 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import random import warnings from collections.abc import Sequence diff --git a/mmaction/datasets/pipelines/compose.py b/mmaction/datasets/pipelines/compose.py index eb5a79bd11..61fc5c5645 100644 --- a/mmaction/datasets/pipelines/compose.py +++ b/mmaction/datasets/pipelines/compose.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from collections.abc import Sequence from mmcv.utils import build_from_cfg diff --git a/mmaction/datasets/pipelines/formating.py b/mmaction/datasets/pipelines/formating.py index 045d54fbd0..b12978a7e0 100644 --- a/mmaction/datasets/pipelines/formating.py +++ b/mmaction/datasets/pipelines/formating.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from collections.abc import Sequence import mmcv diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index 1f212ee09d..e5b08bd812 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy as cp import io import os diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index ae198d42ed..9fac43fceb 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy as cp import pickle diff --git a/mmaction/datasets/pose_dataset.py b/mmaction/datasets/pose_dataset.py index a3c12dfc05..b9fb509090 100644 --- a/mmaction/datasets/pose_dataset.py +++ b/mmaction/datasets/pose_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import mmcv diff --git a/mmaction/datasets/rawframe_dataset.py b/mmaction/datasets/rawframe_dataset.py index 5bcf678cfe..060fca83e5 100644 --- a/mmaction/datasets/rawframe_dataset.py +++ b/mmaction/datasets/rawframe_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import os.path as osp diff --git a/mmaction/datasets/rawvideo_dataset.py b/mmaction/datasets/rawvideo_dataset.py index ada7b4aa66..7199f1dff1 100644 --- a/mmaction/datasets/rawvideo_dataset.py +++ b/mmaction/datasets/rawvideo_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import os.path as osp import random diff --git a/mmaction/datasets/samplers/__init__.py b/mmaction/datasets/samplers/__init__.py index 72cb2f686f..00dfae83de 100644 --- a/mmaction/datasets/samplers/__init__.py +++ b/mmaction/datasets/samplers/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .distributed_sampler import (ClassSpecificDistributedSampler, DistributedSampler) diff --git a/mmaction/datasets/samplers/distributed_sampler.py b/mmaction/datasets/samplers/distributed_sampler.py index aefb6eaf2e..15b2c9d248 100644 --- a/mmaction/datasets/samplers/distributed_sampler.py +++ b/mmaction/datasets/samplers/distributed_sampler.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import math from collections import defaultdict diff --git a/mmaction/datasets/ssn_dataset.py b/mmaction/datasets/ssn_dataset.py index 8a7f1dd0d2..cd0f0b908f 100644 --- a/mmaction/datasets/ssn_dataset.py +++ b/mmaction/datasets/ssn_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import os.path as osp import warnings diff --git a/mmaction/datasets/video_dataset.py b/mmaction/datasets/video_dataset.py index 7c1b681b99..21c47808b6 100644 --- a/mmaction/datasets/video_dataset.py +++ b/mmaction/datasets/video_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp from .base import BaseDataset diff --git a/mmaction/localization/__init__.py b/mmaction/localization/__init__.py index 2a0fd9e1ae..64ebdaab9f 100644 --- a/mmaction/localization/__init__.py +++ b/mmaction/localization/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .bsn_utils import generate_bsp_feature, generate_candidate_proposals from .proposal_utils import soft_nms, temporal_iop, temporal_iou from .ssn_utils import (eval_ap, load_localize_proposal_file, diff --git a/mmaction/localization/bsn_utils.py b/mmaction/localization/bsn_utils.py index d748227df1..6938fb8eab 100644 --- a/mmaction/localization/bsn_utils.py +++ b/mmaction/localization/bsn_utils.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import numpy as np diff --git a/mmaction/localization/proposal_utils.py b/mmaction/localization/proposal_utils.py index 0cf0111fcf..a3e2f4cf0d 100644 --- a/mmaction/localization/proposal_utils.py +++ b/mmaction/localization/proposal_utils.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np diff --git a/mmaction/localization/ssn_utils.py b/mmaction/localization/ssn_utils.py index 0c1f528d18..51f434b0ff 100644 --- a/mmaction/localization/ssn_utils.py +++ b/mmaction/localization/ssn_utils.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from itertools import groupby import numpy as np diff --git a/mmaction/models/__init__.py b/mmaction/models/__init__.py index 2ecdcb9619..5aa3ccca1d 100644 --- a/mmaction/models/__init__.py +++ b/mmaction/models/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .backbones import (C3D, X3D, MobileNetV2, MobileNetV2TSM, ResNet, ResNet2Plus1d, ResNet3d, ResNet3dCSN, ResNet3dLayer, ResNet3dSlowFast, ResNet3dSlowOnly, ResNetAudio, diff --git a/mmaction/models/backbones/__init__.py b/mmaction/models/backbones/__init__.py index 53704dd404..7262bb86db 100644 --- a/mmaction/models/backbones/__init__.py +++ b/mmaction/models/backbones/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .c3d import C3D from .mobilenet_v2 import MobileNetV2 from .mobilenet_v2_tsm import MobileNetV2TSM diff --git a/mmaction/models/backbones/c3d.py b/mmaction/models/backbones/c3d.py index cfb203988a..5221314d0c 100644 --- a/mmaction/models/backbones/c3d.py +++ b/mmaction/models/backbones/c3d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from mmcv.cnn import ConvModule, constant_init, kaiming_init, normal_init from mmcv.runner import load_checkpoint diff --git a/mmaction/models/backbones/mobilenet_v2.py b/mmaction/models/backbones/mobilenet_v2.py index 5dce73502b..d86a80149c 100644 --- a/mmaction/models/backbones/mobilenet_v2.py +++ b/mmaction/models/backbones/mobilenet_v2.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn import torch.utils.checkpoint as cp from mmcv.cnn import ConvModule, constant_init, kaiming_init diff --git a/mmaction/models/backbones/mobilenet_v2_tsm.py b/mmaction/models/backbones/mobilenet_v2_tsm.py index af9f9d5e18..a7050e559d 100644 --- a/mmaction/models/backbones/mobilenet_v2_tsm.py +++ b/mmaction/models/backbones/mobilenet_v2_tsm.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from ..builder import BACKBONES from .mobilenet_v2 import InvertedResidual, MobileNetV2 from .resnet_tsm import TemporalShift diff --git a/mmaction/models/backbones/resnet.py b/mmaction/models/backbones/resnet.py index 5004e0f4c1..d8f697a001 100644 --- a/mmaction/models/backbones/resnet.py +++ b/mmaction/models/backbones/resnet.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from mmcv.cnn import ConvModule, constant_init, kaiming_init from mmcv.runner import _load_checkpoint, load_checkpoint diff --git a/mmaction/models/backbones/resnet2plus1d.py b/mmaction/models/backbones/resnet2plus1d.py index 4329ba404c..1055343b0c 100644 --- a/mmaction/models/backbones/resnet2plus1d.py +++ b/mmaction/models/backbones/resnet2plus1d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from ..builder import BACKBONES from .resnet3d import ResNet3d diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index b8d8bfbb06..68da5a43ef 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import warnings import torch.nn as nn diff --git a/mmaction/models/backbones/resnet3d_csn.py b/mmaction/models/backbones/resnet3d_csn.py index aa190a2888..8b7a5feebc 100644 --- a/mmaction/models/backbones/resnet3d_csn.py +++ b/mmaction/models/backbones/resnet3d_csn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from mmcv.cnn import ConvModule from mmcv.utils import _BatchNorm diff --git a/mmaction/models/backbones/resnet3d_slowfast.py b/mmaction/models/backbones/resnet3d_slowfast.py index c41c2b6936..aaea09e625 100644 --- a/mmaction/models/backbones/resnet3d_slowfast.py +++ b/mmaction/models/backbones/resnet3d_slowfast.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import warnings import torch diff --git a/mmaction/models/backbones/resnet3d_slowonly.py b/mmaction/models/backbones/resnet3d_slowonly.py index 89275809e2..b983b2a1f9 100644 --- a/mmaction/models/backbones/resnet3d_slowonly.py +++ b/mmaction/models/backbones/resnet3d_slowonly.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from ..builder import BACKBONES from .resnet3d_slowfast import ResNet3dPathway diff --git a/mmaction/models/backbones/resnet_audio.py b/mmaction/models/backbones/resnet_audio.py index 63c0ff0d8a..2245219a60 100644 --- a/mmaction/models/backbones/resnet_audio.py +++ b/mmaction/models/backbones/resnet_audio.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn import torch.utils.checkpoint as cp from mmcv.cnn import ConvModule, constant_init, kaiming_init diff --git a/mmaction/models/backbones/resnet_tin.py b/mmaction/models/backbones/resnet_tin.py index 229c387cc3..dfada1614d 100644 --- a/mmaction/models/backbones/resnet_tin.py +++ b/mmaction/models/backbones/resnet_tin.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn diff --git a/mmaction/models/backbones/resnet_tsm.py b/mmaction/models/backbones/resnet_tsm.py index d1a383b33d..0fbc20ed10 100644 --- a/mmaction/models/backbones/resnet_tsm.py +++ b/mmaction/models/backbones/resnet_tsm.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn from mmcv.cnn import NonLocal3d diff --git a/mmaction/models/backbones/tanet.py b/mmaction/models/backbones/tanet.py index bb446ea23d..8cbaa8fcd9 100644 --- a/mmaction/models/backbones/tanet.py +++ b/mmaction/models/backbones/tanet.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from copy import deepcopy import torch.nn as nn diff --git a/mmaction/models/backbones/timesformer.py b/mmaction/models/backbones/timesformer.py index 52e82a62c3..26a9d7ad68 100644 --- a/mmaction/models/backbones/timesformer.py +++ b/mmaction/models/backbones/timesformer.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import torch import torch.nn as nn diff --git a/mmaction/models/backbones/x3d.py b/mmaction/models/backbones/x3d.py index 4d8e39b641..357af53ae5 100644 --- a/mmaction/models/backbones/x3d.py +++ b/mmaction/models/backbones/x3d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import math import torch.nn as nn diff --git a/mmaction/models/builder.py b/mmaction/models/builder.py index ef6792fac2..08e06b1bcc 100644 --- a/mmaction/models/builder.py +++ b/mmaction/models/builder.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import warnings from mmcv.cnn import MODELS as MMCV_MODELS diff --git a/mmaction/models/common/__init__.py b/mmaction/models/common/__init__.py index 5523e6d7ce..7ed60859c5 100644 --- a/mmaction/models/common/__init__.py +++ b/mmaction/models/common/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .conv2plus1d import Conv2plus1d from .conv_audio import ConvAudio from .lfb import LFB diff --git a/mmaction/models/common/conv2plus1d.py b/mmaction/models/common/conv2plus1d.py index 675b0e2261..72965617b2 100644 --- a/mmaction/models/common/conv2plus1d.py +++ b/mmaction/models/common/conv2plus1d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from mmcv.cnn import CONV_LAYERS, build_norm_layer, constant_init, kaiming_init from torch.nn.modules.utils import _triple diff --git a/mmaction/models/common/conv_audio.py b/mmaction/models/common/conv_audio.py index 3b8a606753..54f04c9cad 100644 --- a/mmaction/models/common/conv_audio.py +++ b/mmaction/models/common/conv_audio.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn from mmcv.cnn import CONV_LAYERS, ConvModule, constant_init, kaiming_init diff --git a/mmaction/models/common/lfb.py b/mmaction/models/common/lfb.py index f54ae36e31..a80ff1785e 100644 --- a/mmaction/models/common/lfb.py +++ b/mmaction/models/common/lfb.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import io import os.path as osp import warnings diff --git a/mmaction/models/common/tam.py b/mmaction/models/common/tam.py index db15bd8049..5574213de0 100644 --- a/mmaction/models/common/tam.py +++ b/mmaction/models/common/tam.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn import torch.nn.functional as F diff --git a/mmaction/models/common/transformer.py b/mmaction/models/common/transformer.py index 56be980bb5..f7b6796859 100644 --- a/mmaction/models/common/transformer.py +++ b/mmaction/models/common/transformer.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn from einops import rearrange diff --git a/mmaction/models/heads/__init__.py b/mmaction/models/heads/__init__.py index 9040674adf..17c8c38643 100644 --- a/mmaction/models/heads/__init__.py +++ b/mmaction/models/heads/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .audio_tsn_head import AudioTSNHead from .base import BaseHead from .bbox_head import BBoxHeadAVA diff --git a/mmaction/models/heads/audio_tsn_head.py b/mmaction/models/heads/audio_tsn_head.py index 4fc0359216..9f5f35efa8 100644 --- a/mmaction/models/heads/audio_tsn_head.py +++ b/mmaction/models/heads/audio_tsn_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from mmcv.cnn import normal_init diff --git a/mmaction/models/heads/base.py b/mmaction/models/heads/base.py index 7815f6838d..2f6555c191 100644 --- a/mmaction/models/heads/base.py +++ b/mmaction/models/heads/base.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from abc import ABCMeta, abstractmethod import torch diff --git a/mmaction/models/heads/bbox_head.py b/mmaction/models/heads/bbox_head.py index 3f3bfeead0..4d4d2d70c7 100644 --- a/mmaction/models/heads/bbox_head.py +++ b/mmaction/models/heads/bbox_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn import torch.nn.functional as F diff --git a/mmaction/models/heads/fbo_head.py b/mmaction/models/heads/fbo_head.py index 7aeec0ca86..95dee4884a 100644 --- a/mmaction/models/heads/fbo_head.py +++ b/mmaction/models/heads/fbo_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import torch diff --git a/mmaction/models/heads/i3d_head.py b/mmaction/models/heads/i3d_head.py index f86b978661..a5fe18e526 100644 --- a/mmaction/models/heads/i3d_head.py +++ b/mmaction/models/heads/i3d_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from mmcv.cnn import normal_init diff --git a/mmaction/models/heads/lfb_infer_head.py b/mmaction/models/heads/lfb_infer_head.py index 69bdf8ae2a..6dc7099b5f 100644 --- a/mmaction/models/heads/lfb_infer_head.py +++ b/mmaction/models/heads/lfb_infer_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import mmcv diff --git a/mmaction/models/heads/misc_head.py b/mmaction/models/heads/misc_head.py index 66e1b2c3b7..88f9f203da 100644 --- a/mmaction/models/heads/misc_head.py +++ b/mmaction/models/heads/misc_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn from mmcv.cnn import ConvModule, constant_init, kaiming_init diff --git a/mmaction/models/heads/roi_head.py b/mmaction/models/heads/roi_head.py index 9c167d46f8..2969d6fb40 100644 --- a/mmaction/models/heads/roi_head.py +++ b/mmaction/models/heads/roi_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np from mmaction.core.bbox import bbox2result diff --git a/mmaction/models/heads/slowfast_head.py b/mmaction/models/heads/slowfast_head.py index f8cb7d6964..62ff22c0c1 100644 --- a/mmaction/models/heads/slowfast_head.py +++ b/mmaction/models/heads/slowfast_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn from mmcv.cnn import normal_init diff --git a/mmaction/models/heads/ssn_head.py b/mmaction/models/heads/ssn_head.py index d51f921ce5..239e349d69 100644 --- a/mmaction/models/heads/ssn_head.py +++ b/mmaction/models/heads/ssn_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn from mmcv.cnn import normal_init diff --git a/mmaction/models/heads/timesformer_head.py b/mmaction/models/heads/timesformer_head.py index d8d640d1ce..72ccf562bd 100644 --- a/mmaction/models/heads/timesformer_head.py +++ b/mmaction/models/heads/timesformer_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from mmcv.cnn import trunc_normal_init diff --git a/mmaction/models/heads/tpn_head.py b/mmaction/models/heads/tpn_head.py index 34d476c144..051feaa217 100644 --- a/mmaction/models/heads/tpn_head.py +++ b/mmaction/models/heads/tpn_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from ..builder import HEADS diff --git a/mmaction/models/heads/trn_head.py b/mmaction/models/heads/trn_head.py index dbc080f0d8..7a2a21bb6a 100644 --- a/mmaction/models/heads/trn_head.py +++ b/mmaction/models/heads/trn_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import itertools import numpy as np diff --git a/mmaction/models/heads/tsm_head.py b/mmaction/models/heads/tsm_head.py index 3d6a5f6e00..b181f3db43 100644 --- a/mmaction/models/heads/tsm_head.py +++ b/mmaction/models/heads/tsm_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn from mmcv.cnn import normal_init diff --git a/mmaction/models/heads/tsn_head.py b/mmaction/models/heads/tsn_head.py index 998e9b7e61..5b34bfa430 100644 --- a/mmaction/models/heads/tsn_head.py +++ b/mmaction/models/heads/tsn_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from mmcv.cnn import normal_init diff --git a/mmaction/models/heads/x3d_head.py b/mmaction/models/heads/x3d_head.py index 816c45a2b9..4007744ff8 100644 --- a/mmaction/models/heads/x3d_head.py +++ b/mmaction/models/heads/x3d_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from mmcv.cnn import normal_init diff --git a/mmaction/models/localizers/__init__.py b/mmaction/models/localizers/__init__.py index 523d3f20c2..4befe6f0b2 100644 --- a/mmaction/models/localizers/__init__.py +++ b/mmaction/models/localizers/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .base import BaseTAGClassifier, BaseTAPGenerator from .bmn import BMN from .bsn import PEM, TEM diff --git a/mmaction/models/localizers/base.py b/mmaction/models/localizers/base.py index 893678f6bf..65b5c6f304 100644 --- a/mmaction/models/localizers/base.py +++ b/mmaction/models/localizers/base.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import warnings from abc import ABCMeta, abstractmethod from collections import OrderedDict diff --git a/mmaction/models/localizers/bmn.py b/mmaction/models/localizers/bmn.py index cb9bdc4477..420495940c 100644 --- a/mmaction/models/localizers/bmn.py +++ b/mmaction/models/localizers/bmn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import math import numpy as np diff --git a/mmaction/models/localizers/bsn.py b/mmaction/models/localizers/bsn.py index e65f7ecf8c..1f8e7983cd 100644 --- a/mmaction/models/localizers/bsn.py +++ b/mmaction/models/localizers/bsn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import torch import torch.nn as nn diff --git a/mmaction/models/localizers/ssn.py b/mmaction/models/localizers/ssn.py index 32c0dedbcc..a92ce1cf68 100644 --- a/mmaction/models/localizers/ssn.py +++ b/mmaction/models/localizers/ssn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn diff --git a/mmaction/models/localizers/utils/__init__.py b/mmaction/models/localizers/utils/__init__.py index bb2fc92762..13f70f35f5 100644 --- a/mmaction/models/localizers/utils/__init__.py +++ b/mmaction/models/localizers/utils/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .post_processing import post_processing __all__ = ['post_processing'] diff --git a/mmaction/models/localizers/utils/post_processing.py b/mmaction/models/localizers/utils/post_processing.py index 74a6f87c3d..4ac81e2f07 100644 --- a/mmaction/models/localizers/utils/post_processing.py +++ b/mmaction/models/localizers/utils/post_processing.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from mmaction.localization import soft_nms diff --git a/mmaction/models/losses/__init__.py b/mmaction/models/losses/__init__.py index 1c5f139412..2e94b7b240 100644 --- a/mmaction/models/losses/__init__.py +++ b/mmaction/models/losses/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .base import BaseWeightedLoss from .binary_logistic_regression_loss import BinaryLogisticRegressionLoss from .bmn_loss import BMNLoss diff --git a/mmaction/models/losses/base.py b/mmaction/models/losses/base.py index eb1a43f6ee..9e1df07d7d 100644 --- a/mmaction/models/losses/base.py +++ b/mmaction/models/losses/base.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from abc import ABCMeta, abstractmethod import torch.nn as nn diff --git a/mmaction/models/losses/binary_logistic_regression_loss.py b/mmaction/models/losses/binary_logistic_regression_loss.py index ab23651cfa..74ed294f53 100644 --- a/mmaction/models/losses/binary_logistic_regression_loss.py +++ b/mmaction/models/losses/binary_logistic_regression_loss.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn diff --git a/mmaction/models/losses/bmn_loss.py b/mmaction/models/losses/bmn_loss.py index 9ba312cad9..5641847626 100644 --- a/mmaction/models/losses/bmn_loss.py +++ b/mmaction/models/losses/bmn_loss.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn import torch.nn.functional as F diff --git a/mmaction/models/losses/cross_entropy_loss.py b/mmaction/models/losses/cross_entropy_loss.py index 836b950c66..40072c9718 100644 --- a/mmaction/models/losses/cross_entropy_loss.py +++ b/mmaction/models/losses/cross_entropy_loss.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn.functional as F diff --git a/mmaction/models/losses/hvu_loss.py b/mmaction/models/losses/hvu_loss.py index 9fdbbfd45b..25388b98d5 100644 --- a/mmaction/models/losses/hvu_loss.py +++ b/mmaction/models/losses/hvu_loss.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn.functional as F diff --git a/mmaction/models/losses/nll_loss.py b/mmaction/models/losses/nll_loss.py index 7bd57a50db..754b498ac4 100644 --- a/mmaction/models/losses/nll_loss.py +++ b/mmaction/models/losses/nll_loss.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn.functional as F from ..builder import LOSSES diff --git a/mmaction/models/losses/ohem_hinge_loss.py b/mmaction/models/losses/ohem_hinge_loss.py index 75086b2048..8804a194ee 100644 --- a/mmaction/models/losses/ohem_hinge_loss.py +++ b/mmaction/models/losses/ohem_hinge_loss.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch diff --git a/mmaction/models/losses/ssn_loss.py b/mmaction/models/losses/ssn_loss.py index 030ab3cd0e..02c03e3efa 100644 --- a/mmaction/models/losses/ssn_loss.py +++ b/mmaction/models/losses/ssn_loss.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn import torch.nn.functional as F diff --git a/mmaction/models/necks/__init__.py b/mmaction/models/necks/__init__.py index 76535d5e73..4ffd340960 100644 --- a/mmaction/models/necks/__init__.py +++ b/mmaction/models/necks/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .tpn import TPN __all__ = ['TPN'] diff --git a/mmaction/models/necks/tpn.py b/mmaction/models/necks/tpn.py index 7264b3c366..5770ffa98e 100644 --- a/mmaction/models/necks/tpn.py +++ b/mmaction/models/necks/tpn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import torch import torch.nn as nn diff --git a/mmaction/models/recognizers/__init__.py b/mmaction/models/recognizers/__init__.py index 9d0bccd56f..47c06f879a 100644 --- a/mmaction/models/recognizers/__init__.py +++ b/mmaction/models/recognizers/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .audio_recognizer import AudioRecognizer from .base import BaseRecognizer from .recognizer2d import Recognizer2D diff --git a/mmaction/models/recognizers/audio_recognizer.py b/mmaction/models/recognizers/audio_recognizer.py index b17e44680a..6d5c828207 100644 --- a/mmaction/models/recognizers/audio_recognizer.py +++ b/mmaction/models/recognizers/audio_recognizer.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from ..builder import RECOGNIZERS from .base import BaseRecognizer diff --git a/mmaction/models/recognizers/base.py b/mmaction/models/recognizers/base.py index 41164f3bd2..b9f97f0241 100644 --- a/mmaction/models/recognizers/base.py +++ b/mmaction/models/recognizers/base.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import warnings from abc import ABCMeta, abstractmethod from collections import OrderedDict diff --git a/mmaction/models/recognizers/recognizer2d.py b/mmaction/models/recognizers/recognizer2d.py index 6b4bedba04..7626918f51 100644 --- a/mmaction/models/recognizers/recognizer2d.py +++ b/mmaction/models/recognizers/recognizer2d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from torch import nn diff --git a/mmaction/models/recognizers/recognizer3d.py b/mmaction/models/recognizers/recognizer3d.py index 27f25fd715..8133e7c12e 100644 --- a/mmaction/models/recognizers/recognizer3d.py +++ b/mmaction/models/recognizers/recognizer3d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from torch import nn diff --git a/mmaction/models/roi_extractors/__init__.py b/mmaction/models/roi_extractors/__init__.py index f62d1a96b7..62d6814196 100644 --- a/mmaction/models/roi_extractors/__init__.py +++ b/mmaction/models/roi_extractors/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .single_straight3d import SingleRoIExtractor3D __all__ = ['SingleRoIExtractor3D'] diff --git a/mmaction/models/roi_extractors/single_straight3d.py b/mmaction/models/roi_extractors/single_straight3d.py index b0ce5266e0..a06f5ec0ed 100644 --- a/mmaction/models/roi_extractors/single_straight3d.py +++ b/mmaction/models/roi_extractors/single_straight3d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn import torch.nn.functional as F diff --git a/mmaction/utils/__init__.py b/mmaction/utils/__init__.py index 8cb60fcd7a..7987cc8148 100644 --- a/mmaction/utils/__init__.py +++ b/mmaction/utils/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .collect_env import collect_env from .decorators import import_module_error_class, import_module_error_func from .gradcam_utils import GradCAM diff --git a/mmaction/utils/collect_env.py b/mmaction/utils/collect_env.py index b2e2a6cdfe..fb8e264095 100644 --- a/mmaction/utils/collect_env.py +++ b/mmaction/utils/collect_env.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from mmcv.utils import collect_env as collect_basic_env from mmcv.utils import get_git_hash diff --git a/mmaction/utils/decorators.py b/mmaction/utils/decorators.py index 727fa61df3..ce923620f2 100644 --- a/mmaction/utils/decorators.py +++ b/mmaction/utils/decorators.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from types import MethodType diff --git a/mmaction/utils/gradcam_utils.py b/mmaction/utils/gradcam_utils.py index d5048427f7..06d0c78b8e 100644 --- a/mmaction/utils/gradcam_utils.py +++ b/mmaction/utils/gradcam_utils.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn.functional as F diff --git a/mmaction/utils/logger.py b/mmaction/utils/logger.py index 7950860220..6b4a3fc0ee 100644 --- a/mmaction/utils/logger.py +++ b/mmaction/utils/logger.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import logging from mmcv.utils import get_logger diff --git a/mmaction/utils/misc.py b/mmaction/utils/misc.py index 5ec9550ee2..cc1efc9598 100644 --- a/mmaction/utils/misc.py +++ b/mmaction/utils/misc.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import ctypes import random import string diff --git a/mmaction/utils/module_hooks.py b/mmaction/utils/module_hooks.py index ef0e5020b4..6ee6227d3c 100644 --- a/mmaction/utils/module_hooks.py +++ b/mmaction/utils/module_hooks.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from mmcv.utils import Registry, build_from_cfg diff --git a/setup.py b/setup.py index f23072f05f..a1417abdc0 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os import os.path as osp import shutil diff --git a/tests/test_data/test_blending.py b/tests/test_data/test_blending.py index 53f4e7bcfe..cff88e161e 100644 --- a/tests/test_data/test_blending.py +++ b/tests/test_data/test_blending.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from mmaction.datasets import CutmixBlending, MixupBlending diff --git a/tests/test_data/test_compose.py b/tests/test_data/test_compose.py index b4a866b6a3..5e782b80e1 100644 --- a/tests/test_data/test_compose.py +++ b/tests/test_data/test_compose.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest from mmcv.utils import assert_keys_equal, digit_version diff --git a/tests/test_data/test_datasets/__init__.py b/tests/test_data/test_datasets/__init__.py index dc0d7fde5b..1a8f940809 100644 --- a/tests/test_data/test_datasets/__init__.py +++ b/tests/test_data/test_datasets/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .base import BaseTestDataset __all__ = ['BaseTestDataset'] diff --git a/tests/test_data/test_datasets/base.py b/tests/test_data/test_datasets/base.py index a8d6cfe001..3b4604c5bb 100644 --- a/tests/test_data/test_datasets/base.py +++ b/tests/test_data/test_datasets/base.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp from mmcv import ConfigDict diff --git a/tests/test_data/test_datasets/test_activitynet_dataset.py b/tests/test_data/test_datasets/test_activitynet_dataset.py index 0a83b93fae..02ae3fdf96 100644 --- a/tests/test_data/test_datasets/test_activitynet_dataset.py +++ b/tests/test_data/test_datasets/test_activitynet_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import tempfile diff --git a/tests/test_data/test_datasets/test_audio_dataset.py b/tests/test_data/test_datasets/test_audio_dataset.py index f45c1e8f1f..bb7015223d 100644 --- a/tests/test_data/test_datasets/test_audio_dataset.py +++ b/tests/test_data/test_datasets/test_audio_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import numpy as np diff --git a/tests/test_data/test_datasets/test_audio_feature_dataset.py b/tests/test_data/test_datasets/test_audio_feature_dataset.py index 6a3946a7ea..ceb4946133 100644 --- a/tests/test_data/test_datasets/test_audio_feature_dataset.py +++ b/tests/test_data/test_datasets/test_audio_feature_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import numpy as np diff --git a/tests/test_data/test_datasets/test_audio_visual_dataset.py b/tests/test_data/test_datasets/test_audio_visual_dataset.py index 19fcec06b9..34fedabb58 100644 --- a/tests/test_data/test_datasets/test_audio_visual_dataset.py +++ b/tests/test_data/test_datasets/test_audio_visual_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp from mmaction.datasets import AudioVisualDataset diff --git a/tests/test_data/test_datasets/test_ava_dataset.py b/tests/test_data/test_datasets/test_ava_dataset.py index 270b40fa42..aa7babde6d 100644 --- a/tests/test_data/test_datasets/test_ava_dataset.py +++ b/tests/test_data/test_datasets/test_ava_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import mmcv diff --git a/tests/test_data/test_datasets/test_concat_dataset.py b/tests/test_data/test_datasets/test_concat_dataset.py index 062037eb13..4c9b6ed782 100644 --- a/tests/test_data/test_datasets/test_concat_dataset.py +++ b/tests/test_data/test_datasets/test_concat_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np from mmaction.datasets import ConcatDataset diff --git a/tests/test_data/test_datasets/test_hvu_dataset.py b/tests/test_data/test_datasets/test_hvu_dataset.py index bb6436227c..eb449778da 100644 --- a/tests/test_data/test_datasets/test_hvu_dataset.py +++ b/tests/test_data/test_datasets/test_hvu_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import numpy as np diff --git a/tests/test_data/test_datasets/test_pose_dataset.py b/tests/test_data/test_datasets/test_pose_dataset.py index 2aa5707b97..3449cc878b 100644 --- a/tests/test_data/test_datasets/test_pose_dataset.py +++ b/tests/test_data/test_datasets/test_pose_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest diff --git a/tests/test_data/test_datasets/test_rawframe_dataset.py b/tests/test_data/test_datasets/test_rawframe_dataset.py index 2c333f641e..43fbeec100 100644 --- a/tests/test_data/test_datasets/test_rawframe_dataset.py +++ b/tests/test_data/test_datasets/test_rawframe_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import numpy as np diff --git a/tests/test_data/test_datasets/test_rawvideo_dataset.py b/tests/test_data/test_datasets/test_rawvideo_dataset.py index 1ca14ddf4a..86fd4b0c15 100644 --- a/tests/test_data/test_datasets/test_rawvideo_dataset.py +++ b/tests/test_data/test_datasets/test_rawvideo_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp from mmaction.datasets import RawVideoDataset diff --git a/tests/test_data/test_datasets/test_repeat_dataset.py b/tests/test_data/test_datasets/test_repeat_dataset.py index 5aa42ec649..736fcc3998 100644 --- a/tests/test_data/test_datasets/test_repeat_dataset.py +++ b/tests/test_data/test_datasets/test_repeat_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np from mmaction.datasets import RepeatDataset diff --git a/tests/test_data/test_datasets/test_ssn_dataset.py b/tests/test_data/test_datasets/test_ssn_dataset.py index 0ab33e177f..c3d04f9ada 100644 --- a/tests/test_data/test_datasets/test_ssn_dataset.py +++ b/tests/test_data/test_datasets/test_ssn_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest from mmcv.utils import assert_dict_has_keys diff --git a/tests/test_data/test_datasets/test_video_dataset.py b/tests/test_data/test_datasets/test_video_dataset.py index 20a7c596e0..36d280b3ef 100644 --- a/tests/test_data/test_datasets/test_video_dataset.py +++ b/tests/test_data/test_datasets/test_video_dataset.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import numpy as np diff --git a/tests/test_data/test_formating.py b/tests/test_data/test_formating.py index c840a6c20a..a75c9d49fa 100644 --- a/tests/test_data/test_formating.py +++ b/tests/test_data/test_formating.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest import torch diff --git a/tests/test_data/test_pipelines/test_augmentations/__init__.py b/tests/test_data/test_pipelines/test_augmentations/__init__.py index 7ebd6357e2..949b51e923 100644 --- a/tests/test_data/test_pipelines/test_augmentations/__init__.py +++ b/tests/test_data/test_pipelines/test_augmentations/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .base import check_crop, check_flip, check_normalize __all__ = ['check_crop', 'check_flip', 'check_normalize'] diff --git a/tests/test_data/test_pipelines/test_augmentations/base.py b/tests/test_data/test_pipelines/test_augmentations/base.py index 24c8a9f62f..cc75917bfc 100644 --- a/tests/test_data/test_pipelines/test_augmentations/base.py +++ b/tests/test_data/test_pipelines/test_augmentations/base.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np from numpy.testing import assert_array_almost_equal diff --git a/tests/test_data/test_pipelines/test_augmentations/test_audio.py b/tests/test_data/test_pipelines/test_augmentations/test_audio.py index e78b328ffa..cf1a53e14c 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_audio.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_audio.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest from mmcv.utils import assert_dict_has_keys diff --git a/tests/test_data/test_pipelines/test_augmentations/test_color.py b/tests/test_data/test_pipelines/test_augmentations/test_color.py index 0aa65c65af..ebf849cc16 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_color.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_color.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np from mmcv.utils import assert_dict_has_keys diff --git a/tests/test_data/test_pipelines/test_augmentations/test_crop.py b/tests/test_data/test_pipelines/test_augmentations/test_crop.py index 036d10029c..b8d754505b 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_crop.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_crop.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest from mmcv.utils import assert_dict_has_keys diff --git a/tests/test_data/test_pipelines/test_augmentations/test_flip.py b/tests/test_data/test_pipelines/test_augmentations/test_flip.py index b11409c1d8..fd62e13f00 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_flip.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_flip.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import mmcv diff --git a/tests/test_data/test_pipelines/test_augmentations/test_imgaug.py b/tests/test_data/test_pipelines/test_augmentations/test_imgaug.py index 9ef533f1b7..646e0fb813 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_imgaug.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_imgaug.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest from mmcv.utils import assert_dict_has_keys diff --git a/tests/test_data/test_pipelines/test_augmentations/test_lazy.py b/tests/test_data/test_pipelines/test_augmentations/test_lazy.py index 74d3d1e296..34d535c502 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_lazy.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_lazy.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest from mmcv.utils import assert_dict_has_keys diff --git a/tests/test_data/test_pipelines/test_augmentations/test_misc.py b/tests/test_data/test_pipelines/test_augmentations/test_misc.py index 9710624de9..a3ad2c6abc 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_misc.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_misc.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from mmaction.datasets.pipelines.augmentations import (_combine_quadruple, _flip_quadruple) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_normalization.py b/tests/test_data/test_pipelines/test_augmentations/test_normalization.py index f28f7607ce..ee3bb1cee3 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_normalization.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_normalization.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest from mmcv.utils import assert_dict_has_keys diff --git a/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py b/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py index ce7fbe6c8b..f8dfda0e32 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest from mmcv.utils import assert_dict_has_keys, digit_version diff --git a/tests/test_data/test_pipelines/test_augmentations/test_transform.py b/tests/test_data/test_pipelines/test_augmentations/test_transform.py index adb0d94618..67fe310d66 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_transform.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_transform.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import numpy as np diff --git a/tests/test_data/test_pipelines/test_loadings/__init__.py b/tests/test_data/test_pipelines/test_loadings/__init__.py index bd8d6ff56b..fe54e15c46 100644 --- a/tests/test_data/test_pipelines/test_loadings/__init__.py +++ b/tests/test_data/test_pipelines/test_loadings/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .base import BaseTestLoading __all__ = ['BaseTestLoading'] diff --git a/tests/test_data/test_pipelines/test_loadings/base.py b/tests/test_data/test_pipelines/test_loadings/base.py index ba9e3ebb5f..3c74628779 100644 --- a/tests/test_data/test_pipelines/test_loadings/base.py +++ b/tests/test_data/test_pipelines/test_loadings/base.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import mmcv diff --git a/tests/test_data/test_pipelines/test_loadings/test_decode.py b/tests/test_data/test_pipelines/test_loadings/test_decode.py index ae86e444a3..2fc927bd80 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_decode.py +++ b/tests/test_data/test_pipelines/test_loadings/test_decode.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import numpy as np diff --git a/tests/test_data/test_pipelines/test_loadings/test_load.py b/tests/test_data/test_pipelines/test_loadings/test_load.py index 5d30242713..560edd0903 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_load.py +++ b/tests/test_data/test_pipelines/test_loadings/test_load.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import numpy as np diff --git a/tests/test_data/test_pipelines/test_loadings/test_localization.py b/tests/test_data/test_pipelines/test_loadings/test_localization.py index 2716b579fa..40005965b6 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_localization.py +++ b/tests/test_data/test_pipelines/test_loadings/test_localization.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import numpy as np diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 720580a23f..7d984601e5 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy as cp import os.path as osp import tempfile diff --git a/tests/test_data/test_pipelines/test_loadings/test_sampling.py b/tests/test_data/test_pipelines/test_loadings/test_sampling.py index 2cd7a60116..ff08436ac9 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_sampling.py +++ b/tests/test_data/test_pipelines/test_loadings/test_sampling.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import numpy as np diff --git a/tests/test_data/test_sampler.py b/tests/test_data/test_sampler.py index 8cba48455d..19bfd64a95 100644 --- a/tests/test_data/test_sampler.py +++ b/tests/test_data/test_sampler.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from torch.utils.data import DataLoader, Dataset from mmaction.datasets.samplers import (ClassSpecificDistributedSampler, diff --git a/tests/test_metrics/test_accuracy.py b/tests/test_metrics/test_accuracy.py index 6d2a647b5e..6769c2e6bc 100644 --- a/tests/test_metrics/test_accuracy.py +++ b/tests/test_metrics/test_accuracy.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import random diff --git a/tests/test_metrics/test_losses.py b/tests/test_metrics/test_losses.py index 5705da28a7..1c0d657798 100644 --- a/tests/test_metrics/test_losses.py +++ b/tests/test_metrics/test_losses.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import pytest import torch diff --git a/tests/test_models/__init__.py b/tests/test_models/__init__.py index 30c817b141..86888bd8a3 100644 --- a/tests/test_models/__init__.py +++ b/tests/test_models/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .base import (check_norm_state, generate_backbone_demo_inputs, generate_detector_demo_inputs, generate_gradcam_inputs, generate_recognizer_demo_inputs, get_audio_recognizer_cfg, diff --git a/tests/test_models/base.py b/tests/test_models/base.py index 1e6b475f43..6d9e9eedfc 100644 --- a/tests/test_models/base.py +++ b/tests/test_models/base.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import mmcv diff --git a/tests/test_models/test_backbones.py b/tests/test_models/test_backbones.py index cf8b63cee0..b962917471 100644 --- a/tests/test_models/test_backbones.py +++ b/tests/test_models/test_backbones.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import pytest diff --git a/tests/test_models/test_common.py b/tests/test_models/test_common.py index a3ea192c3a..0ee6e8abbe 100644 --- a/tests/test_models/test_common.py +++ b/tests/test_models/test_common.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import pytest diff --git a/tests/test_models/test_common_modules/__init__.py b/tests/test_models/test_common_modules/__init__.py index e69de29bb2..ef101fec61 100644 --- a/tests/test_models/test_common_modules/__init__.py +++ b/tests/test_models/test_common_modules/__init__.py @@ -0,0 +1 @@ +# Copyright (c) OpenMMLab. All rights reserved. diff --git a/tests/test_models/test_common_modules/test_base_head.py b/tests/test_models/test_common_modules/test_base_head.py index 6611657468..cff9eb4a7f 100644 --- a/tests/test_models/test_common_modules/test_base_head.py +++ b/tests/test_models/test_common_modules/test_base_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn.functional as F from mmcv.utils import assert_dict_has_keys diff --git a/tests/test_models/test_common_modules/test_base_recognizers.py b/tests/test_models/test_common_modules/test_base_recognizers.py index 9d0a72bf19..7a145701d0 100644 --- a/tests/test_models/test_common_modules/test_base_recognizers.py +++ b/tests/test_models/test_common_modules/test_base_recognizers.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import pytest import torch import torch.nn.functional as F diff --git a/tests/test_models/test_common_modules/test_mobilenet_v2.py b/tests/test_models/test_common_modules/test_mobilenet_v2.py index cdb4cd9ec4..5589b03b3e 100644 --- a/tests/test_models/test_common_modules/test_mobilenet_v2.py +++ b/tests/test_models/test_common_modules/test_mobilenet_v2.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import pytest import torch from mmcv.utils import _BatchNorm diff --git a/tests/test_models/test_common_modules/test_resnet.py b/tests/test_models/test_common_modules/test_resnet.py index 4f33f7ac34..7f4a46ecd0 100644 --- a/tests/test_models/test_common_modules/test_resnet.py +++ b/tests/test_models/test_common_modules/test_resnet.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import pytest import torch import torch.nn as nn diff --git a/tests/test_models/test_common_modules/test_resnet3d.py b/tests/test_models/test_common_modules/test_resnet3d.py index 1da93c55d3..d0c354eaa4 100644 --- a/tests/test_models/test_common_modules/test_resnet3d.py +++ b/tests/test_models/test_common_modules/test_resnet3d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import pytest import torch import torch.nn as nn diff --git a/tests/test_models/test_detectors/__init__.py b/tests/test_models/test_detectors/__init__.py index e69de29bb2..ef101fec61 100644 --- a/tests/test_models/test_detectors/__init__.py +++ b/tests/test_models/test_detectors/__init__.py @@ -0,0 +1 @@ +# Copyright (c) OpenMMLab. All rights reserved. diff --git a/tests/test_models/test_detectors/test_detectors.py b/tests/test_models/test_detectors/test_detectors.py index 9afdb60032..e1590be442 100644 --- a/tests/test_models/test_detectors/test_detectors.py +++ b/tests/test_models/test_detectors/test_detectors.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import pytest import torch diff --git a/tests/test_models/test_gradcam.py b/tests/test_models/test_gradcam.py index 04267db03f..f80333deee 100644 --- a/tests/test_models/test_gradcam.py +++ b/tests/test_models/test_gradcam.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import pytest import torch diff --git a/tests/test_models/test_head.py b/tests/test_models/test_head.py index 8e0dd32cee..7a02829652 100644 --- a/tests/test_models/test_head.py +++ b/tests/test_models/test_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import tempfile from unittest.mock import Mock, patch diff --git a/tests/test_models/test_localizers/__init__.py b/tests/test_models/test_localizers/__init__.py index e69de29bb2..ef101fec61 100644 --- a/tests/test_models/test_localizers/__init__.py +++ b/tests/test_models/test_localizers/__init__.py @@ -0,0 +1 @@ +# Copyright (c) OpenMMLab. All rights reserved. diff --git a/tests/test_models/test_localizers/test_bmn.py b/tests/test_models/test_localizers/test_bmn.py index 7c650a9114..d97efd35c6 100644 --- a/tests/test_models/test_localizers/test_bmn.py +++ b/tests/test_models/test_localizers/test_bmn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np import torch diff --git a/tests/test_models/test_localizers/test_localizers.py b/tests/test_models/test_localizers/test_localizers.py index 9920f98199..98df755126 100644 --- a/tests/test_models/test_localizers/test_localizers.py +++ b/tests/test_models/test_localizers/test_localizers.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np from mmaction.models.localizers.utils import post_processing diff --git a/tests/test_models/test_localizers/test_pem.py b/tests/test_models/test_localizers/test_pem.py index a20e42dbf2..f1f4a6f97b 100644 --- a/tests/test_models/test_localizers/test_pem.py +++ b/tests/test_models/test_localizers/test_pem.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from mmaction.models import build_localizer diff --git a/tests/test_models/test_localizers/test_ssn.py b/tests/test_models/test_localizers/test_ssn.py index 45be3c7247..1e67455a32 100644 --- a/tests/test_models/test_localizers/test_ssn.py +++ b/tests/test_models/test_localizers/test_ssn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import mmcv diff --git a/tests/test_models/test_localizers/test_tem.py b/tests/test_models/test_localizers/test_tem.py index 741ed76d22..179362e190 100644 --- a/tests/test_models/test_localizers/test_tem.py +++ b/tests/test_models/test_localizers/test_tem.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from mmaction.models import build_localizer diff --git a/tests/test_models/test_neck.py b/tests/test_models/test_neck.py index 0847a0f9b7..6fc97fd19f 100644 --- a/tests/test_models/test_neck.py +++ b/tests/test_models/test_neck.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import pytest diff --git a/tests/test_models/test_recognizers/__init__.py b/tests/test_models/test_recognizers/__init__.py index e69de29bb2..ef101fec61 100644 --- a/tests/test_models/test_recognizers/__init__.py +++ b/tests/test_models/test_recognizers/__init__.py @@ -0,0 +1 @@ +# Copyright (c) OpenMMLab. All rights reserved. diff --git a/tests/test_models/test_recognizers/test_audio_recognizer.py b/tests/test_models/test_recognizers/test_audio_recognizer.py index c2401063b1..b2d0b2ef04 100644 --- a/tests/test_models/test_recognizers/test_audio_recognizer.py +++ b/tests/test_models/test_recognizers/test_audio_recognizer.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from mmaction.models import build_recognizer diff --git a/tests/test_models/test_recognizers/test_recognizer2d.py b/tests/test_models/test_recognizers/test_recognizer2d.py index 8d4cf23744..21c3a725d7 100644 --- a/tests/test_models/test_recognizers/test_recognizer2d.py +++ b/tests/test_models/test_recognizers/test_recognizer2d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from mmaction.models import build_recognizer diff --git a/tests/test_models/test_recognizers/test_recognizer3d.py b/tests/test_models/test_recognizers/test_recognizer3d.py index ad26926cf8..3fcdea7337 100644 --- a/tests/test_models/test_recognizers/test_recognizer3d.py +++ b/tests/test_models/test_recognizers/test_recognizer3d.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from mmaction.models import build_recognizer diff --git a/tests/test_models/test_roi_extractor.py b/tests/test_models/test_roi_extractor.py index 25853287bf..6448019845 100644 --- a/tests/test_models/test_roi_extractor.py +++ b/tests/test_models/test_roi_extractor.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch from mmaction.models import SingleRoIExtractor3D diff --git a/tests/test_runtime/test_apis_test.py b/tests/test_runtime/test_apis_test.py index eb1ea1ecec..c3b853d3e7 100644 --- a/tests/test_runtime/test_apis_test.py +++ b/tests/test_runtime/test_apis_test.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import sys import warnings from unittest.mock import MagicMock, Mock, patch diff --git a/tests/test_runtime/test_config.py b/tests/test_runtime/test_config.py index e11a334bef..21c7cb43b7 100644 --- a/tests/test_runtime/test_config.py +++ b/tests/test_runtime/test_config.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import glob import os import os.path as osp diff --git a/tests/test_runtime/test_eval_hook.py b/tests/test_runtime/test_eval_hook.py index 21d7c927d2..b10798159b 100644 --- a/tests/test_runtime/test_eval_hook.py +++ b/tests/test_runtime/test_eval_hook.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import shutil import tempfile diff --git a/tests/test_runtime/test_inference.py b/tests/test_runtime/test_inference.py index f378b75d5f..bd100e6be7 100644 --- a/tests/test_runtime/test_inference.py +++ b/tests/test_runtime/test_inference.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import mmcv import numpy as np import pytest diff --git a/tests/test_runtime/test_lr.py b/tests/test_runtime/test_lr.py index 89dfab6cd2..7a530fecdf 100644 --- a/tests/test_runtime/test_lr.py +++ b/tests/test_runtime/test_lr.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import logging import os.path as osp import shutil diff --git a/tests/test_runtime/test_optimizer.py b/tests/test_runtime/test_optimizer.py index 0ff8ef9da3..f0c06fe768 100644 --- a/tests/test_runtime/test_optimizer.py +++ b/tests/test_runtime/test_optimizer.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn from mmcv.runner import build_optimizer_constructor diff --git a/tests/test_runtime/test_precise_bn.py b/tests/test_runtime/test_precise_bn.py index aaa29b6681..c4825bcca3 100644 --- a/tests/test_runtime/test_precise_bn.py +++ b/tests/test_runtime/test_precise_bn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import numpy as np diff --git a/tests/test_runtime/test_train.py b/tests/test_runtime/test_train.py index 509fe33eac..14c3db30ce 100644 --- a/tests/test_runtime/test_train.py +++ b/tests/test_runtime/test_train.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import tempfile from collections import OrderedDict diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py index e69de29bb2..ef101fec61 100644 --- a/tests/test_utils/__init__.py +++ b/tests/test_utils/__init__.py @@ -0,0 +1 @@ +# Copyright (c) OpenMMLab. All rights reserved. diff --git a/tests/test_utils/test_bbox.py b/tests/test_utils/test_bbox.py index db1553db44..f3aba07840 100644 --- a/tests/test_utils/test_bbox.py +++ b/tests/test_utils/test_bbox.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp from abc import abstractproperty diff --git a/tests/test_utils/test_decorator.py b/tests/test_utils/test_decorator.py index 9666778814..b962bb1214 100644 --- a/tests/test_utils/test_decorator.py +++ b/tests/test_utils/test_decorator.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import pytest from mmaction.utils import import_module_error_class, import_module_error_func diff --git a/tests/test_utils/test_localization_utils.py b/tests/test_utils/test_localization_utils.py index 54de3acb4e..b4709fe869 100644 --- a/tests/test_utils/test_localization_utils.py +++ b/tests/test_utils/test_localization_utils.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import numpy as np diff --git a/tests/test_utils/test_module_hooks.py b/tests/test_utils/test_module_hooks.py index f022b27175..6cd9fc1f3d 100644 --- a/tests/test_utils/test_module_hooks.py +++ b/tests/test_utils/test_module_hooks.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import copy import os.path as osp diff --git a/tests/test_utils/test_onnx.py b/tests/test_utils/test_onnx.py index 472d247fa9..7e8168ddaa 100644 --- a/tests/test_utils/test_onnx.py +++ b/tests/test_utils/test_onnx.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os.path as osp import tempfile diff --git a/tools/__init__.py b/tools/__init__.py index dedc85c424..fd77cb4ce5 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .analysis import * # noqa: F401, F403 from .data import * # noqa: F401, F403 from .deployment import * # noqa: F401, F403 diff --git a/tools/analysis/analyze_logs.py b/tools/analysis/analyze_logs.py index 84380d23de..d0e1a02608 100644 --- a/tools/analysis/analyze_logs.py +++ b/tools/analysis/analyze_logs.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import json from collections import defaultdict diff --git a/tools/analysis/bench_processing.py b/tools/analysis/bench_processing.py index 3a44603dd4..b2f7034091 100644 --- a/tools/analysis/bench_processing.py +++ b/tools/analysis/bench_processing.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. """This file is for benchmark dataloading process. The command line to run this file is: diff --git a/tools/analysis/benchmark.py b/tools/analysis/benchmark.py index 7965158d67..2e546552f9 100644 --- a/tools/analysis/benchmark.py +++ b/tools/analysis/benchmark.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import time diff --git a/tools/analysis/check_videos.py b/tools/analysis/check_videos.py index 6a75322f74..323f1c3cb9 100644 --- a/tools/analysis/check_videos.py +++ b/tools/analysis/check_videos.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os import warnings diff --git a/tools/analysis/eval_metric.py b/tools/analysis/eval_metric.py index 4335ea8c84..7841a4cb66 100644 --- a/tools/analysis/eval_metric.py +++ b/tools/analysis/eval_metric.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import mmcv diff --git a/tools/analysis/get_flops.py b/tools/analysis/get_flops.py index 12907077ad..d4c8e9732e 100644 --- a/tools/analysis/get_flops.py +++ b/tools/analysis/get_flops.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse from mmcv import Config diff --git a/tools/analysis/print_config.py b/tools/analysis/print_config.py index 2ba994fb38..c3538ef56b 100644 --- a/tools/analysis/print_config.py +++ b/tools/analysis/print_config.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse from mmcv import Config, DictAction diff --git a/tools/analysis/report_accuracy.py b/tools/analysis/report_accuracy.py index fc42eb1e7d..329434d13f 100644 --- a/tools/analysis/report_accuracy.py +++ b/tools/analysis/report_accuracy.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse from mmcv import load diff --git a/tools/analysis/report_map.py b/tools/analysis/report_map.py index 2dce04fba6..2aa46a1c50 100644 --- a/tools/analysis/report_map.py +++ b/tools/analysis/report_map.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os import os.path as osp diff --git a/tools/data/activitynet/activitynet_feature_postprocessing.py b/tools/data/activitynet/activitynet_feature_postprocessing.py index a822b3d1bc..8dcd7bfe26 100644 --- a/tools/data/activitynet/activitynet_feature_postprocessing.py +++ b/tools/data/activitynet/activitynet_feature_postprocessing.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import multiprocessing import os diff --git a/tools/data/activitynet/convert_proposal_format.py b/tools/data/activitynet/convert_proposal_format.py index e942c2af78..f2f8613eb4 100644 --- a/tools/data/activitynet/convert_proposal_format.py +++ b/tools/data/activitynet/convert_proposal_format.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. """This file converts the output proposal file of proposal generator (BSN, BMN) into the input proposal file of action classifier (Currently supports SSN and P-GCN, not including TSN, I3D etc.).""" diff --git a/tools/data/activitynet/download.py b/tools/data/activitynet/download.py index 7d837c1fd1..b604e65d23 100644 --- a/tools/data/activitynet/download.py +++ b/tools/data/activitynet/download.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. # This scripts is copied from # https://github.com/activitynet/ActivityNet/blob/master/Crawler/Kinetics/download.py # noqa: E501 # The code is licensed under the MIT licence. diff --git a/tools/data/activitynet/generate_rawframes_filelist.py b/tools/data/activitynet/generate_rawframes_filelist.py index 784925635e..4be9262288 100644 --- a/tools/data/activitynet/generate_rawframes_filelist.py +++ b/tools/data/activitynet/generate_rawframes_filelist.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import json import os import os.path as osp diff --git a/tools/data/activitynet/process_annotations.py b/tools/data/activitynet/process_annotations.py index 0bd87ebe9b..09ed5b5c8f 100644 --- a/tools/data/activitynet/process_annotations.py +++ b/tools/data/activitynet/process_annotations.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. """This file processes the annotation files and generates proper annotation files for localizers.""" import json diff --git a/tools/data/activitynet/tsn_feature_extraction.py b/tools/data/activitynet/tsn_feature_extraction.py index 0461c013af..525f44deee 100644 --- a/tools/data/activitynet/tsn_feature_extraction.py +++ b/tools/data/activitynet/tsn_feature_extraction.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os import os.path as osp diff --git a/tools/data/anno_txt2json.py b/tools/data/anno_txt2json.py index 9f3d3ea357..fcefc7778e 100644 --- a/tools/data/anno_txt2json.py +++ b/tools/data/anno_txt2json.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import mmcv diff --git a/tools/data/ava/download_videos_parallel.py b/tools/data/ava/download_videos_parallel.py index 027e9cecfe..7be4b1b883 100644 --- a/tools/data/ava/download_videos_parallel.py +++ b/tools/data/ava/download_videos_parallel.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os.path as osp import subprocess diff --git a/tools/data/build_audio_features.py b/tools/data/build_audio_features.py index 3fcc410eed..05f5978083 100644 --- a/tools/data/build_audio_features.py +++ b/tools/data/build_audio_features.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import glob import os diff --git a/tools/data/build_file_list.py b/tools/data/build_file_list.py index 340d61d3ff..c8438098e9 100644 --- a/tools/data/build_file_list.py +++ b/tools/data/build_file_list.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import glob import json diff --git a/tools/data/build_rawframes.py b/tools/data/build_rawframes.py index 6f39fc1520..066141a9e3 100644 --- a/tools/data/build_rawframes.py +++ b/tools/data/build_rawframes.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import glob import os diff --git a/tools/data/build_videos.py b/tools/data/build_videos.py index b9f00a868d..77a3a0bd39 100644 --- a/tools/data/build_videos.py +++ b/tools/data/build_videos.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import glob import os diff --git a/tools/data/denormalize_proposal_file.py b/tools/data/denormalize_proposal_file.py index ded7e91156..3d7706c0ef 100644 --- a/tools/data/denormalize_proposal_file.py +++ b/tools/data/denormalize_proposal_file.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os.path as osp diff --git a/tools/data/extract_audio.py b/tools/data/extract_audio.py index 1840bc4884..0249e4957a 100644 --- a/tools/data/extract_audio.py +++ b/tools/data/extract_audio.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import glob import os diff --git a/tools/data/gym/download.py b/tools/data/gym/download.py index 51b54c24a8..cfcb954c35 100644 --- a/tools/data/gym/download.py +++ b/tools/data/gym/download.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. # This scripts is copied from # https://github.com/activitynet/ActivityNet/blob/master/Crawler/Kinetics/download.py # noqa: E501 # The code is licensed under the MIT licence. diff --git a/tools/data/gym/generate_file_list.py b/tools/data/gym/generate_file_list.py index bbbaeae209..5f4295d2ed 100644 --- a/tools/data/gym/generate_file_list.py +++ b/tools/data/gym/generate_file_list.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os import os.path as osp diff --git a/tools/data/gym/trim_event.py b/tools/data/gym/trim_event.py index 9ae22262b5..bf1fc97ade 100644 --- a/tools/data/gym/trim_event.py +++ b/tools/data/gym/trim_event.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os import os.path as osp import subprocess diff --git a/tools/data/gym/trim_subaction.py b/tools/data/gym/trim_subaction.py index fa705e97a4..bbff90a839 100644 --- a/tools/data/gym/trim_subaction.py +++ b/tools/data/gym/trim_subaction.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os import os.path as osp import subprocess diff --git a/tools/data/hvu/generate_file_list.py b/tools/data/hvu/generate_file_list.py index e76706a7ec..83e99b1482 100644 --- a/tools/data/hvu/generate_file_list.py +++ b/tools/data/hvu/generate_file_list.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import fnmatch import glob diff --git a/tools/data/hvu/generate_sub_file_list.py b/tools/data/hvu/generate_sub_file_list.py index 41279c445b..8313a9b3c9 100644 --- a/tools/data/hvu/generate_sub_file_list.py +++ b/tools/data/hvu/generate_sub_file_list.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os.path as osp diff --git a/tools/data/hvu/parse_tag_list.py b/tools/data/hvu/parse_tag_list.py index 41e8db1951..0871491ef8 100644 --- a/tools/data/hvu/parse_tag_list.py +++ b/tools/data/hvu/parse_tag_list.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import mmcv tag_list = '../../../data/hvu/annotations/hvu_categories.csv' diff --git a/tools/data/omnisource/trim_raw_video.py b/tools/data/omnisource/trim_raw_video.py index 32cfca8897..81aef77140 100644 --- a/tools/data/omnisource/trim_raw_video.py +++ b/tools/data/omnisource/trim_raw_video.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import os import os.path as osp import sys diff --git a/tools/data/parse_file_list.py b/tools/data/parse_file_list.py index e22009479b..f649ab411c 100644 --- a/tools/data/parse_file_list.py +++ b/tools/data/parse_file_list.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import csv import fnmatch import glob diff --git a/tools/data/resize_videos.py b/tools/data/resize_videos.py index 1986cda7b5..d221437569 100644 --- a/tools/data/resize_videos.py +++ b/tools/data/resize_videos.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import glob import os diff --git a/tools/data/skeleton/ntu_pose_extraction.py b/tools/data/skeleton/ntu_pose_extraction.py index 132a553f1c..d53abaa44c 100644 --- a/tools/data/skeleton/ntu_pose_extraction.py +++ b/tools/data/skeleton/ntu_pose_extraction.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import abc import argparse import os diff --git a/tools/deployment/publish_model.py b/tools/deployment/publish_model.py index c20e7e38b6..5d3912e45e 100644 --- a/tools/deployment/publish_model.py +++ b/tools/deployment/publish_model.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import subprocess diff --git a/tools/deployment/pytorch2onnx.py b/tools/deployment/pytorch2onnx.py index f27e02b434..8852c765d4 100644 --- a/tools/deployment/pytorch2onnx.py +++ b/tools/deployment/pytorch2onnx.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import mmcv diff --git a/tools/misc/bsn_proposal_generation.py b/tools/misc/bsn_proposal_generation.py index 794fb2e3c2..04e3cc7244 100644 --- a/tools/misc/bsn_proposal_generation.py +++ b/tools/misc/bsn_proposal_generation.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os import os.path as osp diff --git a/tools/misc/clip_feature_extraction.py b/tools/misc/clip_feature_extraction.py index 90724a9ccf..1829bf9b5c 100644 --- a/tools/misc/clip_feature_extraction.py +++ b/tools/misc/clip_feature_extraction.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os import os.path as osp diff --git a/tools/misc/flow_extraction.py b/tools/misc/flow_extraction.py index 2a2cad8d73..b8763430b5 100644 --- a/tools/misc/flow_extraction.py +++ b/tools/misc/flow_extraction.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os import os.path as osp diff --git a/tools/test.py b/tools/test.py index cafed2b3b5..849005562b 100644 --- a/tools/test.py +++ b/tools/test.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import os import os.path as osp diff --git a/tools/train.py b/tools/train.py index 1bd5a0d6be..dcbda35c6c 100644 --- a/tools/train.py +++ b/tools/train.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import copy import os From 28ac80cb1a96c208a3e95f76f051ebafc74390aa Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 17 Aug 2021 20:00:24 +0800 Subject: [PATCH 232/414] [Fix] Works on Regression in July (#1081) * set videos_per_gpu to 1 for testing * replace top5_accuracy * update * fix * fix * workers_per_gpu -> 2 * fix bug * accurate testing for sthv1 * fix bug * add cache in rawframedecode * Fix SlowOnly Sthv1 checkpoint * update * update * update * update * remove lambda in AVADataset * update * update --- .gitignore | 5 + configs/_base_/models/ircsn_r152.py | 2 +- ...etrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 4 +- ...pretrained_r50_8x8x1_cosine_10e_ava_rgb.py | 4 +- ...etics_pretrained_r50_4x16x1_20e_ava_rgb.py | 2 +- ...etics_pretrained_r50_4x16x1_20e_ava_rgb.py | 2 +- ...d_r50_4x16x1_20e_ava_rgb_custom_classes.py | 2 +- ...netics_pretrained_r50_8x8x1_20e_ava_rgb.py | 3 +- ...etrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 4 +- ...etrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 4 +- ...etrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 4 +- ...etics_pretrained_r101_8x8x1_20e_ava_rgb.py | 2 +- ...etics_pretrained_r50_4x16x1_20e_ava_rgb.py | 2 +- ...d_r50_4x16x1_20e_ava_rgb_custom_classes.py | 2 +- ...etics_pretrained_r50_4x16x1_10e_ava_rgb.py | 2 +- ...netics_pretrained_r50_8x8x1_10e_ava_rgb.py | 2 +- ...ource_pretrained_r101_8x8x1_20e_ava_rgb.py | 2 +- ...ource_pretrained_r50_4x16x1_20e_ava_rgb.py | 2 +- ...trained_slowonly_r50_4x16x1_20e_ava_rgb.py | 2 +- ...trained_slowonly_r50_4x16x1_20e_ava_rgb.py | 2 +- ...trained_slowonly_r50_4x16x1_20e_ava_rgb.py | 2 +- configs/recognition/c3d/metafile.yml | 2 +- ...frozen_r152_32x2x1_180e_kinetics400_rgb.py | 3 +- ...frozen_r152_32x2x1_180e_kinetics400_rgb.py | 3 +- ...nfrozen_r50_32x2x1_180e_kinetics400_rgb.py | 3 +- ...nfrozen_r152_32x2x1_58e_kinetics400_rgb.py | 3 +- ...bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py | 3 +- ...trained_r152_32x2x1_58e_kinetics400_rgb.py | 3 +- configs/recognition/csn/metafile.yml | 16 ++-- ...product_r50_32x2x1_100e_kinetics400_rgb.py | 3 +- .../i3d_r50_32x2x1_100e_kinetics400_rgb.py | 3 +- ...d_r50_dense_32x2x1_100e_kinetics400_rgb.py | 3 +- ...3d_r50_heavy_8x8x1_100e_kinetics400_rgb.py | 3 +- ...3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py | 3 +- ...d_r50_video_32x2x1_100e_kinetics400_rgb.py | 3 +- ..._video_heavy_8x8x1_100e_kinetics400_rgb.py | 3 +- ...ideo_imgaug_32x2x1_100e_kinetics400_rgb.py | 3 +- configs/recognition/i3d/metafile.yml | 20 ++-- configs/recognition/omnisource/metafile.yml | 32 +++---- ...8x8x1_256e_minikinetics_googleimage_rgb.py | 1 + ...50_8x8x1_256e_minikinetics_insvideo_rgb.py | 1 + ...8x8x1_256e_minikinetics_kineticsraw_rgb.py | 1 + ..._8x8x1_256e_minikinetics_omnisource_rgb.py | 3 +- ...lowonly_r50_8x8x1_256e_minikinetics_rgb.py | 3 +- ...50_8x8x1_256e_minikinetics_webimage_rgb.py | 1 + ...1x1x8_100e_minikinetics_googleimage_rgb.py | 1 + ...50_1x1x8_100e_minikinetics_insvideo_rgb.py | 1 + ...1x1x8_100e_minikinetics_kineticsraw_rgb.py | 1 + ..._1x1x8_100e_minikinetics_omnisource_rgb.py | 3 +- .../tsn_r50_1x1x8_100e_minikinetics_rgb.py | 3 +- ...50_1x1x8_100e_minikinetics_webimage_rgb.py | 1 + configs/recognition/r2plus1d/metafile.yml | 8 +- ...2plus1d_r34_32x2x1_180e_kinetics400_rgb.py | 3 +- ...r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py | 3 +- ...1d_r34_video_8x8x1_180e_kinetics400_rgb.py | 3 +- configs/recognition/slowfast/metafile.yml | 18 ++-- ...lowfast_r101_8x8x1_256e_kinetics400_rgb.py | 10 +- ...st_r101_r50_4x16x1_256e_kinetics400_rgb.py | 10 +- ...st_r152_r50_4x16x1_256e_kinetics400_rgb.py | 10 +- .../slowfast_r50_16x8x1_22e_sthv1_rgb.py | 3 +- ...lowfast_r50_4x16x1_256e_kinetics400_rgb.py | 3 +- ...slowfast_r50_8x8x1_256e_kinetics400_rgb.py | 2 +- ...t_r50_video_4x16x1_256e_kinetics400_rgb.py | 3 +- configs/recognition/slowonly/README.md | 2 +- configs/recognition/slowonly/README_zh-CN.md | 2 +- ...edcrop_256p_4x16x1_256e_kinetics400_rgb.py | 3 +- ...edcrop_320p_4x16x1_256e_kinetics400_rgb.py | 3 +- ...rop_340x256_4x16x1_256e_kinetics400_rgb.py | 3 +- configs/recognition/slowonly/metafile.yml | 48 +++++----- ...et_pretrained_r50_4x16x1_120e_gym99_rgb.py | 3 +- ...trained_r50_4x16x1_150e_kinetics400_rgb.py | 3 +- ...net_pretrained_r50_8x4x1_64e_hmdb51_rgb.py | 4 +- ...enet_pretrained_r50_8x4x1_64e_sthv1_rgb.py | 4 +- ...enet_pretrained_r50_8x4x1_64e_sthv2_rgb.py | 4 +- ...net_pretrained_r50_8x4x1_64e_ucf101_rgb.py | 4 +- ...etrained_r50_8x8x1_150e_kinetics400_rgb.py | 3 +- ...net_pretrained_r50_8x8x1_64e_jester_rgb.py | 2 +- ...0_pretrained_r50_4x16x1_120e_gym99_flow.py | 3 +- ...400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py | 4 +- ...400_pretrained_r50_8x4x1_40e_ucf101_rgb.py | 4 +- ...aussian_r50_4x16x1_150e_kinetics400_rgb.py | 2 +- ...gaussian_r50_8x8x1_150e_kinetics400_rgb.py | 2 +- ...owonly_r50_4x16x1_256e_kinetics400_flow.py | 3 +- ...lowonly_r50_4x16x1_256e_kinetics400_rgb.py | 3 +- ...lowonly_r50_8x8x1_256e_kinetics400_flow.py | 3 +- ...slowonly_r50_8x8x1_256e_kinetics400_rgb.py | 3 +- ...y_r50_video_4x16x1_256e_kinetics400_rgb.py | 3 +- ...ly_r50_video_8x8x1_256e_kinetics600_rgb.py | 3 +- ...ly_r50_video_8x8x1_256e_kinetics700_rgb.py | 3 +- configs/recognition/tanet/metafile.yml | 10 +- .../tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py | 6 +- .../tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py | 6 +- ...et_r50_dense_1x1x8_100e_kinetics400_rgb.py | 4 +- ...former_divST_8x32x1_15e_kinetics400_rgb.py | 3 +- ...rmer_jointST_8x32x1_15e_kinetics400_rgb.py | 3 +- ...er_spaceOnly_8x32x1_15e_kinetics400_rgb.py | 3 +- configs/recognition/tin/metafile.yml | 6 +- .../tin/tin_r50_1x1x8_40e_sthv1_rgb.py | 3 +- .../tin/tin_r50_1x1x8_40e_sthv2_rgb.py | 3 +- ..._finetune_r50_1x1x8_50e_kinetics400_rgb.py | 3 +- configs/recognition/tpn/metafile.yml | 2 +- ...ed_slowonly_r50_8x8x1_150e_kinetics_rgb.py | 1 + .../tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py | 2 +- configs/recognition/trn/metafile.yml | 8 +- .../trn/trn_r50_1x1x8_50e_sthv1_rgb.py | 2 +- .../trn/trn_r50_1x1x8_50e_sthv2_rgb.py | 2 +- configs/recognition/tsm/metafile.yml | 94 +++++++++---------- ...00_pretrained_r50_1x1x16_25e_hmdb51_rgb.py | 3 +- ...00_pretrained_r50_1x1x16_25e_ucf101_rgb.py | 3 +- ...400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py | 3 +- ...400_pretrained_r50_1x1x8_25e_ucf101_rgb.py | 3 +- ...enetv2_dense_1x1x8_100e_kinetics400_rgb.py | 3 +- ..._video_dense_1x1x8_100e_kinetics400_rgb.py | 3 +- ...erence_dense_1x1x8_100e_kinetics400_rgb.py | 2 +- ...t_product_r50_1x1x8_50e_kinetics400_rgb.py | 3 +- ..._gaussian_r50_1x1x8_50e_kinetics400_rgb.py | 3 +- ..._gaussian_r50_1x1x8_50e_kinetics400_rgb.py | 3 +- .../tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py | 6 +- .../tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py | 3 +- .../tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py | 6 +- .../tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py | 6 +- .../tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py | 2 +- .../tsm/tsm_r50_1x1x8_50e_jester_rgb.py | 2 +- .../tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py | 3 +- .../tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py | 6 +- .../tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py | 6 +- .../tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py | 3 +- ...sm_r50_dense_1x1x8_100e_kinetics400_rgb.py | 5 +- .../tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py | 6 +- ...50_flip_randaugment_1x1x8_50e_sthv1_rgb.py | 6 +- ...gpu_normalize_1x1x8_50e_kinetics400_rgb.py | 3 +- .../tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py | 3 +- .../tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py | 4 +- ...r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py | 4 +- ...tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py | 6 +- .../tsm_r50_video_1x1x16_50e_diving48_rgb.py | 3 +- .../tsm_r50_video_1x1x8_50e_diving48_rgb.py | 3 +- ...tsm_r50_video_1x1x8_50e_kinetics400_rgb.py | 3 +- ...ense161_320p_1x1x3_100e_kinetics400_rgb.py | 3 +- ...1_32x4d_320p_1x1x3_100e_kinetics400_rgb.py | 3 +- ...r_video_320p_1x1x3_100e_kinetics400_rgb.py | 4 +- ...alecrop_256p_1x1x3_100e_kinetics400_rgb.py | 3 +- ...alecrop_320p_1x1x3_100e_kinetics400_rgb.py | 3 +- ...crop_340x256_1x1x3_100e_kinetics400_rgb.py | 3 +- ...zedcrop_256p_1x1x3_100e_kinetics400_rgb.py | 3 +- ...zedcrop_320p_1x1x3_100e_kinetics400_rgb.py | 3 +- ...crop_340x256_1x1x3_100e_kinetics400_rgb.py | 3 +- ...256p_1x1x25_10crop_100e_kinetics400_rgb.py | 3 +- ..._256p_1x1x25_3crop_100e_kinetics400_rgb.py | 3 +- ...320p_1x1x25_10crop_100e_kinetics400_rgb.py | 3 +- ..._320p_1x1x25_3crop_100e_kinetics400_rgb.py | 3 +- ...x256_1x1x25_10crop_100e_kinetics400_rgb.py | 3 +- ...0x256_1x1x25_3crop_100e_kinetics400_rgb.py | 3 +- .../hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py | 1 + .../tsn_r18_1x1x8_100e_hvu_attribute_rgb.py | 1 + .../hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py | 1 + .../hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py | 1 + .../hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py | 1 + .../hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py | 1 + configs/recognition/tsn/metafile.yml | 72 +++++++------- ...tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py | 3 +- .../tsn/tsn_r101_1x1x5_50e_mmit_rgb.py | 10 +- .../tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py | 3 +- .../tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py | 19 +++- .../tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py | 3 +- .../tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py | 3 +- .../tsn/tsn_r50_1x1x6_100e_mit_rgb.py | 3 +- .../tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py | 3 +- ...sn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py | 3 +- .../tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py | 1 + .../tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py | 3 +- .../tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py | 17 +++- ...tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py | 3 +- ...sn_r50_320p_1x1x3_110e_kinetics400_flow.py | 3 +- ...tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py | 3 +- ...sn_r50_320p_1x1x8_110e_kinetics400_flow.py | 3 +- ...0_320p_1x1x8_150e_activitynet_clip_flow.py | 3 +- ..._320p_1x1x8_150e_activitynet_video_flow.py | 3 +- ...r50_320p_1x1x8_50e_activitynet_clip_rgb.py | 3 +- ...50_320p_1x1x8_50e_activitynet_video_rgb.py | 3 +- ...n_r50_clip_feature_extraction_1x1x3_rgb.py | 1 + ...sn_r50_dense_1x1x5_100e_kinetics400_rgb.py | 5 +- ...sn_r50_dense_1x1x8_100e_kinetics400_rgb.py | 3 +- .../tsn_r50_video_1x1x16_100e_diving48_rgb.py | 3 +- .../tsn_r50_video_1x1x8_100e_diving48_rgb.py | 3 +- ...sn_r50_video_1x1x8_100e_kinetics400_rgb.py | 3 +- ...sn_r50_video_1x1x8_100e_kinetics600_rgb.py | 3 +- ...sn_r50_video_1x1x8_100e_kinetics700_rgb.py | 3 +- ...0_video_320p_1x1x3_100e_kinetics400_rgb.py | 3 +- ..._video_dense_1x1x8_100e_kinetics400_rgb.py | 3 +- ...video_imgaug_1x1x8_100e_kinetics400_rgb.py | 10 +- ..._video_mixup_1x1x8_100e_kinetics400_rgb.py | 3 +- ...0_64x1x1_100e_kinetics400_audio_feature.py | 2 +- configs/recognition_audio/resnet/metafile.yml | 2 +- ...8_64x1x1_100e_kinetics400_audio_feature.py | 2 +- .../tsn_r50_64x1x1_100e_kinetics400_audio.py | 2 +- ...wonly_r50_u48_240e_ntu120_xsub_keypoint.py | 2 +- .../slowonly_r50_u48_240e_ntu120_xsub_limb.py | 2 +- ...owonly_r50_u48_240e_ntu60_xsub_keypoint.py | 2 +- .../slowonly_r50_u48_240e_ntu60_xsub_limb.py | 2 +- demo/mmaction2_tutorial.ipynb | 2 +- demo/mmaction2_tutorial_zh-CN.ipynb | 2 +- docs/tutorials/1_config.md | 6 +- docs/tutorials/3_new_dataset.md | 2 +- docs_zh_CN/tutorials/1_config.md | 6 +- docs_zh_CN/tutorials/3_new_dataset.md | 2 +- mmaction/datasets/ava_dataset.py | 19 ++-- mmaction/models/heads/misc_head.py | 4 +- tools/data/jester/README.md | 2 +- tools/data/jester/README_zh-CN.md | 2 +- tools/data/sthv1/README.md | 2 +- tools/data/sthv1/README_zh-CN.md | 2 +- 212 files changed, 589 insertions(+), 407 deletions(-) diff --git a/.gitignore b/.gitignore index 4eba3d0d8e..587b296482 100644 --- a/.gitignore +++ b/.gitignore @@ -132,4 +132,9 @@ work_dirs/ # avoid soft links created by MIM mmaction/configs/* mmaction/tools/* + +*.ipynb + +# unignore ipython notebook files in demo +!demo/*.ipynb mmaction/.mim diff --git a/configs/_base_/models/ircsn_r152.py b/configs/_base_/models/ircsn_r152.py index fcab416cbd..36e700c384 100644 --- a/configs/_base_/models/ircsn_r152.py +++ b/configs/_base_/models/ircsn_r152.py @@ -19,4 +19,4 @@ init_std=0.01), # model training and testing settings train_cfg=None, - test_cfg=dict(average_clips='prob')) + test_cfg=dict(average_clips='prob', max_testing_views=10)) diff --git a/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py index 7bd1ea779d..442165082f 100644 --- a/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py +++ b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -70,7 +70,7 @@ exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.2.csv' exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.2.csv' -label_file = f'{anno_root}/ava_action_list_v2.2.pbtxt' +label_file = f'{anno_root}/ava_action_list_v2.2_for_activitynet_2019.pbtxt' proposal_file_train = (f'{anno_root}/ava_dense_proposals_train.FAIR.' 'recall_93.9.pkl') @@ -118,7 +118,7 @@ data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py index 50806ddacb..d199598628 100644 --- a/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py +++ b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py @@ -70,7 +70,7 @@ exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.1.csv' exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.1.csv' -label_file = f'{anno_root}/ava_action_list_v2.1.pbtxt' +label_file = f'{anno_root}/ava_action_list_v2.1_for_activitynet_2018.pbtxt' proposal_file_train = (f'{anno_root}/ava_dense_proposals_train.FAIR.' 'recall_93.9.pkl') @@ -118,7 +118,7 @@ data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py index 6b5796425c..27b5637276 100644 --- a/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -119,7 +119,7 @@ data = dict( videos_per_gpu=9, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py index 22020db977..3f1fadc720 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -118,7 +118,7 @@ data = dict( videos_per_gpu=9, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py index 3b14fabd04..713136ca3e 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py @@ -124,7 +124,7 @@ data = dict( videos_per_gpu=9, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py index 9106fa8d29..89e83a0b8b 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py @@ -12,6 +12,7 @@ depth=50, pretrained=None, lateral=True, + fusion_kernel=7, conv1_kernel=(1, 7, 7), dilations=(1, 1, 1, 1), conv1_stride_t=1, @@ -118,7 +119,7 @@ data = dict( videos_per_gpu=5, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py index f7898d0b61..1b02c1a205 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -68,7 +68,7 @@ exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.2.csv' exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.2.csv' -label_file = f'{anno_root}/ava_action_list_v2.2.pbtxt' +label_file = f'{anno_root}/ava_action_list_v2.2_for_activitynet_2019.pbtxt' proposal_file_train = (f'{anno_root}/ava_dense_proposals_train.FAIR.' 'recall_93.9.pkl') @@ -116,7 +116,7 @@ data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py index b59e0008d8..5c167e9bee 100644 --- a/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py +++ b/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -69,7 +69,7 @@ exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.2.csv' exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.2.csv' -label_file = f'{anno_root}/ava_action_list_v2.2.pbtxt' +label_file = f'{anno_root}/ava_action_list_v2.2_for_activitynet_2019.pbtxt' proposal_file_train = (f'{anno_root}/ava_dense_proposals_train.FAIR.' 'recall_93.9.pkl') @@ -117,7 +117,7 @@ data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py index e77496a39a..4bea67b696 100644 --- a/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py +++ b/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -69,7 +69,7 @@ exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.2.csv' exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.2.csv' -label_file = f'{anno_root}/ava_action_list_v2.2.pbtxt' +label_file = f'{anno_root}/ava_action_list_v2.2_for_activitynet_2019.pbtxt' proposal_file_train = (f'{anno_root}/ava_dense_proposals_train.FAIR.' 'recall_93.9.pkl') @@ -117,7 +117,7 @@ data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py index ce12865cd0..4967ea3679 100644 --- a/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py @@ -102,7 +102,7 @@ data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, # During testing, each video may have different shape val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), diff --git a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py index 7ff769e7a8..d42c6b67c0 100644 --- a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -102,7 +102,7 @@ data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py index 1f81b01afa..0e6ff25105 100644 --- a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py +++ b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py @@ -109,7 +109,7 @@ data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py b/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py index b92faa6b17..c18273bbd4 100644 --- a/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py +++ b/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py @@ -60,7 +60,7 @@ data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, # During testing, each video may have different shape val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), diff --git a/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py b/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py index 5bd3489bb8..bd05e864cc 100644 --- a/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py +++ b/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py @@ -60,7 +60,7 @@ data = dict( videos_per_gpu=6, - workers_per_gpu=3, + workers_per_gpu=2, # During testing, each video may have different shape val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), diff --git a/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py index 0113a42751..3c5adc3e77 100644 --- a/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py @@ -101,7 +101,7 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, # During testing, each video may have different shape val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), diff --git a/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py index 37af19e945..4aa7e72ef6 100644 --- a/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -102,7 +102,7 @@ data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, # During testing, each video may have different shape val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), diff --git a/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py b/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py index f9832276bd..09f5ba43c5 100644 --- a/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py @@ -81,7 +81,7 @@ data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py b/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py index ba1c5a3025..8e7434c2fe 100644 --- a/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py @@ -81,7 +81,7 @@ data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py b/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py index 862ec19498..f2d11ff3a5 100644 --- a/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py @@ -91,7 +91,7 @@ data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/recognition/c3d/metafile.yml b/configs/recognition/c3d/metafile.yml index 781d39a4ca..4fa40ddd54 100644 --- a/configs/recognition/c3d/metafile.yml +++ b/configs/recognition/c3d/metafile.yml @@ -20,7 +20,7 @@ Models: - Dataset: UCF101 Metrics: Top 1 Accuracy: 83.27 - top5 accuracy: 95.9 + Top 5 Accuracy: 95.9 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log diff --git a/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py index 1a5b54c056..6c0792f2eb 100644 --- a/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py +++ b/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py @@ -59,7 +59,8 @@ ] data = dict( videos_per_gpu=4, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py index 8ce5fb5180..19873781b2 100644 --- a/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py @@ -59,7 +59,8 @@ ] data = dict( videos_per_gpu=4, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py index ebb3d92856..cef9d5dea7 100644 --- a/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py @@ -63,7 +63,8 @@ ] data = dict( videos_per_gpu=4, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py index d25736ba76..54bc5b012f 100644 --- a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py @@ -63,7 +63,8 @@ ] data = dict( videos_per_gpu=3, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py index 9e39011374..fc44dc4251 100644 --- a/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py @@ -64,7 +64,8 @@ ] data = dict( videos_per_gpu=3, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py index eba08ca20b..015526ccf6 100644 --- a/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=3, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml index 10edad13c2..6ad1d82831 100644 --- a/configs/recognition/csn/metafile.yml +++ b/configs/recognition/csn/metafile.yml @@ -20,7 +20,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 80.14 - top5 accuracy: 94.93 + Top 5 Accuracy: 94.93 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log @@ -43,7 +43,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 82.76 - top5 accuracy: 95.68 + Top 5 Accuracy: 95.68 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log @@ -64,7 +64,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 77.8 - top5 accuracy: 92.8 + Top 5 Accuracy: 92.8 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-d565828d.pth - Config: configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py @@ -83,7 +83,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 82.5 - top5 accuracy: 95.3 + Top 5 Accuracy: 95.3 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth inference_time(video/s): x @@ -103,7 +103,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 78.8 - top5 accuracy: 93.5 + Top 5 Accuracy: 93.5 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth inference_time(video/s): x @@ -123,7 +123,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 76.5 - top5 accuracy: 92.1 + Top 5 Accuracy: 92.1 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth inference_time(video/s): x @@ -143,7 +143,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 79.0 - top5 accuracy: 94.2 + Top 5 Accuracy: 94.2 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth inference_time(video/s): x @@ -163,7 +163,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 78.2 - top5 accuracy: 93.0 + Top 5 Accuracy: 93.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth inference_time(video/s): x diff --git a/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py index 8ff1e2ff1e..466285006a 100644 --- a/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py @@ -71,7 +71,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py index 894e8196be..aa0e523f14 100644 --- a/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py index f1bdc4f4a1..17ea4303b9 100644 --- a/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py @@ -58,7 +58,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/i3d/i3d_r50_heavy_8x8x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_heavy_8x8x1_100e_kinetics400_rgb.py index b4688d4c13..f21feb2a01 100644 --- a/configs/recognition/i3d/i3d_r50_heavy_8x8x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_heavy_8x8x1_100e_kinetics400_rgb.py @@ -66,7 +66,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py index eb285c89e9..de84b8feb5 100644 --- a/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py @@ -62,7 +62,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py index 968d6c9e77..1477ac2a99 100644 --- a/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py index 2ee3ff7b28..973f7fb88f 100644 --- a/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/i3d/i3d_r50_video_imgaug_32x2x1_100e_kinetics400_rgb.py b/configs/recognition/i3d/i3d_r50_video_imgaug_32x2x1_100e_kinetics400_rgb.py index 68b1bc9971..86baa0289d 100644 --- a/configs/recognition/i3d/i3d_r50_video_imgaug_32x2x1_100e_kinetics400_rgb.py +++ b/configs/recognition/i3d/i3d_r50_video_imgaug_32x2x1_100e_kinetics400_rgb.py @@ -68,7 +68,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/i3d/metafile.yml b/configs/recognition/i3d/metafile.yml index 02f5704cd7..404a5334ff 100644 --- a/configs/recognition/i3d/metafile.yml +++ b/configs/recognition/i3d/metafile.yml @@ -20,7 +20,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.68 - top5 accuracy: 90.78 + Top 5 Accuracy: 90.78 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log @@ -43,7 +43,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.27 - top5 accuracy: 90.92 + Top 5 Accuracy: 90.92 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log @@ -66,7 +66,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.85 - top5 accuracy: 90.75 + Top 5 Accuracy: 90.75 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log @@ -89,7 +89,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.77 - top5 accuracy: 90.57 + Top 5 Accuracy: 90.57 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log @@ -112,7 +112,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.48 - top5 accuracy: 91.0 + Top 5 Accuracy: 91.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log @@ -135,7 +135,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.32 - top5 accuracy: 90.72 + Top 5 Accuracy: 90.72 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log @@ -158,7 +158,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.24 - top5 accuracy: 90.99 + Top 5 Accuracy: 90.99 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log @@ -181,7 +181,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 74.71 - top5 accuracy: 91.81 + Top 5 Accuracy: 91.81 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log @@ -204,7 +204,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.37 - top5 accuracy: 91.26 + Top 5 Accuracy: 91.26 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log @@ -227,7 +227,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.92 - top5 accuracy: 91.59 + Top 5 Accuracy: 91.59 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log diff --git a/configs/recognition/omnisource/metafile.yml b/configs/recognition/omnisource/metafile.yml index f0ce133aa9..71fb7e6ed6 100644 --- a/configs/recognition/omnisource/metafile.yml +++ b/configs/recognition/omnisource/metafile.yml @@ -21,7 +21,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 77.4 - top5 accuracy: 93.6 + Top 5 Accuracy: 93.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log @@ -45,7 +45,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 78.0 - top5 accuracy: 93.6 + Top 5 Accuracy: 93.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log @@ -69,7 +69,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 78.6 - top5 accuracy: 93.6 + Top 5 Accuracy: 93.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log @@ -93,7 +93,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 80.6 - top5 accuracy: 95.0 + Top 5 Accuracy: 95.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log @@ -117,7 +117,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 78.6 - top5 accuracy: 93.2 + Top 5 Accuracy: 93.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log @@ -141,7 +141,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 81.3 - top5 accuracy: 94.8 + Top 5 Accuracy: 94.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log @@ -165,7 +165,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 78.6 - top5 accuracy: 93.9 + Top 5 Accuracy: 93.9 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log @@ -189,7 +189,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 80.8 - top5 accuracy: 95.0 + Top 5 Accuracy: 95.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log @@ -213,7 +213,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 81.3 - top5 accuracy: 95.2 + Top 5 Accuracy: 95.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log @@ -237,7 +237,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 82.4 - top5 accuracy: 95.6 + Top 5 Accuracy: 95.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log @@ -261,7 +261,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 80.3 - top5 accuracy: 94.5 + Top 5 Accuracy: 94.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log @@ -285,7 +285,7 @@ Models: - Dataset: MiniKinetics Metrics: Top 1 Accuracy: 82.9 - top5 accuracy: 95.8 + Top 5 Accuracy: 95.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json Training Log: https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log @@ -307,7 +307,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.6 - top5 accuracy: 91.0 + Top 5 Accuracy: 91.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py @@ -327,7 +327,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 75.7 - top5 accuracy: 91.9 + Top 5 Accuracy: 91.9 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth - Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py @@ -347,7 +347,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 76.8 - top5 accuracy: 92.5 + Top 5 Accuracy: 92.5 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth - Config: configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py @@ -367,6 +367,6 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 80.4 - top5 accuracy: 94.4 + Top 5 Accuracy: 94.4 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth diff --git a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py index 171965ca4e..0aee7f2c2c 100644 --- a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py +++ b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py @@ -88,6 +88,7 @@ data = dict( videos_per_gpu=12, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=[ dict( type=dataset_type, diff --git a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py index 0f5f430b06..06195d431c 100644 --- a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py +++ b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py @@ -89,6 +89,7 @@ data = dict( videos_per_gpu=12, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=[ dict( type=dataset_type, diff --git a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb.py b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb.py index a59abf4653..35263134cd 100644 --- a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb.py +++ b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb.py @@ -89,6 +89,7 @@ data = dict( videos_per_gpu=12, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=[ dict( type=dataset_type, diff --git a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb.py b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb.py index 2f442d0f3d..4ef38005bc 100644 --- a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb.py +++ b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb.py @@ -119,7 +119,8 @@ data = dict( videos_per_gpu=12, - workers_per_gpu=1, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train_ratio=[2, 1, 1, 1], train=[ dict( diff --git a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py index 0707487bcb..38f7be651b 100644 --- a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py +++ b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py @@ -69,7 +69,8 @@ data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py index 68f679ee7c..4acf708c5b 100644 --- a/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py +++ b/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py @@ -88,6 +88,7 @@ data = dict( videos_per_gpu=12, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=[ dict( type=dataset_type, diff --git a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb.py b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb.py index d4c69bdcf3..447b7cb6c4 100644 --- a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb.py +++ b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb.py @@ -90,6 +90,7 @@ videos_per_gpu=12, omni_videos_per_gpu=[12, 64], workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=[ dict( type=dataset_type, diff --git a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py index 30c65e4481..89d369403c 100644 --- a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py +++ b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py @@ -91,6 +91,7 @@ data = dict( videos_per_gpu=12, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=[ dict( type=dataset_type, diff --git a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb.py b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb.py index 0f0454d410..f86eaa5f69 100644 --- a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb.py +++ b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb.py @@ -91,6 +91,7 @@ data = dict( videos_per_gpu=12, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=[ dict( type=dataset_type, diff --git a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py index 7832b953b8..e87c726b47 100644 --- a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py +++ b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py @@ -122,7 +122,8 @@ videos_per_gpu=12, omni_videos_per_gpu=[12, 64, 12, 12], train_ratio=[2, 1, 1, 1], - workers_per_gpu=1, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=[ dict( type=dataset_type, diff --git a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py index 012a7ea51a..6ec9e1dc65 100644 --- a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py +++ b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py @@ -71,7 +71,8 @@ data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py index 2ae15da4f7..070aa8571e 100644 --- a/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py +++ b/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py @@ -90,6 +90,7 @@ videos_per_gpu=12, omni_videos_per_gpu=[12, 64], workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=[ dict( type=dataset_type, diff --git a/configs/recognition/r2plus1d/metafile.yml b/configs/recognition/r2plus1d/metafile.yml index 9a95900717..a88409b3bb 100644 --- a/configs/recognition/r2plus1d/metafile.yml +++ b/configs/recognition/r2plus1d/metafile.yml @@ -20,7 +20,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 67.3 - top5 accuracy: 87.65 + Top 5 Accuracy: 87.65 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log @@ -43,7 +43,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 67.3 - top5 accuracy: 87.8 + Top 5 Accuracy: 87.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log.json @@ -66,7 +66,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 68.68 - top5 accuracy: 88.36 + Top 5 Accuracy: 88.36 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log @@ -89,7 +89,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 74.6 - top5 accuracy: 91.59 + Top 5 Accuracy: 91.59 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log diff --git a/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py index fc5514a9be..53b1763099 100644 --- a/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py +++ b/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py @@ -53,7 +53,8 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py b/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py index a4c25d7f69..f06d5696a2 100644 --- a/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py +++ b/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py @@ -55,7 +55,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py b/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py index c3744dcf46..49c85c2ae7 100644 --- a/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py +++ b/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py @@ -59,7 +59,8 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index a3841f2f0c..a55a9fdec0 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -20,7 +20,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 74.75 - top5 accuracy: 91.73 + Top 5 Accuracy: 91.73 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log @@ -43,7 +43,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 74.34 - top5 accuracy: 91.58 + Top 5 Accuracy: 91.58 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log @@ -66,7 +66,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 75.64 - top5 accuracy: 92.3 + Top 5 Accuracy: 92.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log @@ -89,7 +89,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 75.61 - top5 accuracy: 92.34 + Top 5 Accuracy: 92.34 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log @@ -112,7 +112,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 76.94 - top5 accuracy: 92.8 + Top 5 Accuracy: 92.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log @@ -135,7 +135,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 76.69 - top5 accuracy: 93.07 + Top 5 Accuracy: 93.07 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log @@ -158,7 +158,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 77.9 - top5 accuracy: 93.51 + Top 5 Accuracy: 93.51 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log @@ -181,7 +181,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 77.13 - top5 accuracy: 93.2 + Top 5 Accuracy: 93.2 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log @@ -204,7 +204,7 @@ Models: - Dataset: SthV1 Metrics: Top 1 Accuracy: 49.24 - top5 accuracy: 78.79 + Top 5 Accuracy: 78.79 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log diff --git a/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py index e484296692..31c52441e8 100644 --- a/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py @@ -33,9 +33,10 @@ in_channels=2304, # 2048+256 num_classes=400, spatial_type='avg', - dropout_ratio=0.5)) -train_cfg = None -test_cfg = dict(average_clips='prob') + dropout_ratio=0.5), + train_cfg=None, + test_cfg=dict(average_clips='prob', max_testing_views=10)) + dataset_type = 'RawframeDataset' data_root = 'data/kinetics400/rawframes_train' data_root_val = 'data/kinetics400/rawframes_val' @@ -88,7 +89,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py index dc2f1b898a..b8da9030e6 100644 --- a/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py @@ -32,9 +32,10 @@ in_channels=2304, # 2048+256 num_classes=400, spatial_type='avg', - dropout_ratio=0.5)) -train_cfg = None -test_cfg = dict(average_clips='prob') + dropout_ratio=0.5), + train_cfg=None, + test_cfg=dict(average_clips='prob', max_testing_views=10)) + dataset_type = 'RawframeDataset' data_root = 'data/kinetics400/rawframes_train' data_root_val = 'data/kinetics400/rawframes_val' @@ -87,7 +88,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py index 06a9792ddd..0d9cd7ee10 100644 --- a/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py @@ -32,9 +32,10 @@ in_channels=2304, # 2048+256 num_classes=400, spatial_type='avg', - dropout_ratio=0.5)) -train_cfg = None -test_cfg = dict(average_clips='prob', max_testing_views=8) + dropout_ratio=0.5), + train_cfg=None, + test_cfg=dict(average_clips='prob', max_testing_views=8)) + dataset_type = 'RawframeDataset' data_root = 'data/kinetics400/rawframes_train' data_root_val = 'data/kinetics400/rawframes_val' @@ -87,7 +88,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py b/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py index d97cc8f613..f1e692c050 100644 --- a/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py +++ b/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py @@ -66,7 +66,8 @@ data = dict( videos_per_gpu=4, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py index a0de3fe8ca..7e455a7ca6 100644 --- a/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py @@ -54,7 +54,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py index 49a30be628..ee68e80e05 100644 --- a/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py @@ -5,6 +5,6 @@ resample_rate=4, # tau speed_ratio=4, # alpha channel_ratio=8, # beta_inv - slow_pathway=dict(fusion_kernel=7))) + slow_pathway=dict(fusion_kernel=5))) work_dir = './work_dirs/slowfast_r50_3d_8x8x1_256e_kinetics400_rgb' diff --git a/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py index 57108548d4..7335b3e7b4 100644 --- a/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py @@ -63,7 +63,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index 292bc42581..92672e5ec6 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -95,7 +95,7 @@ In data benchmark, we compare two different data preprocessing methods: (1) Resi |config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20210630-807a9a9a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log.json)| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb-34901d23.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.json)| Notes: diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md index 726a2cabe4..a8e87e4174 100644 --- a/configs/recognition/slowonly/README_zh-CN.md +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -95,7 +95,7 @@ |配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20210630-807a9a9a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log.json)| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb-34901d23.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.json)| 注: diff --git a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py index 2624e00be6..e79543a59a 100644 --- a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py @@ -71,7 +71,8 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py index 7aea6956cb..b2d55cefae 100644 --- a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py @@ -70,7 +70,8 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py index 638324ae81..d5c38635b2 100644 --- a/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py @@ -70,7 +70,8 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/metafile.yml b/configs/recognition/slowonly/metafile.yml index 34d52f70db..0e8d1e8196 100644 --- a/configs/recognition/slowonly/metafile.yml +++ b/configs/recognition/slowonly/metafile.yml @@ -19,7 +19,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 76.8 - top5 accuracy: 92.5 + Top 5 Accuracy: 92.5 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth - Config: configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py @@ -39,7 +39,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 76.5 - top5 accuracy: 92.7 + Top 5 Accuracy: 92.7 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth - Config: configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py @@ -59,7 +59,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 80.4 - top5 accuracy: 94.4 + Top 5 Accuracy: 94.4 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth - Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py @@ -80,7 +80,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.76 - top5 accuracy: 90.51 + Top 5 Accuracy: 90.51 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log @@ -103,7 +103,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.9 - top5 accuracy: 90.82 + Top 5 Accuracy: 90.82 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log @@ -126,7 +126,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 74.42 - top5 accuracy: 91.49 + Top 5 Accuracy: 91.49 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log @@ -149,7 +149,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.02 - top5 accuracy: 90.77 + Top 5 Accuracy: 90.77 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log @@ -172,7 +172,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 74.93 - top5 accuracy: 91.92 + Top 5 Accuracy: 91.92 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log @@ -195,7 +195,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.39 - top5 accuracy: 91.12 + Top 5 Accuracy: 91.12 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log @@ -218,7 +218,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 75.55 - top5 accuracy: 92.04 + Top 5 Accuracy: 92.04 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log @@ -241,7 +241,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 74.54 - top5 accuracy: 91.73 + Top 5 Accuracy: 91.73 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log @@ -264,7 +264,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 76.07 - top5 accuracy: 92.42 + Top 5 Accuracy: 92.42 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log @@ -287,7 +287,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 61.79 - top5 accuracy: 83.62 + Top 5 Accuracy: 83.62 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log @@ -310,7 +310,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 65.76 - top5 accuracy: 86.25 + Top 5 Accuracy: 86.25 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log @@ -333,7 +333,7 @@ Models: - Dataset: Kinetics-600 Metrics: Top 1 Accuracy: 77.5 - top5 accuracy: 93.7 + Top 5 Accuracy: 93.7 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.log @@ -356,7 +356,7 @@ Models: - Dataset: Kinetics-700 Metrics: Top 1 Accuracy: 65.0 - top5 accuracy: 86.1 + Top 5 Accuracy: 86.1 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.log @@ -446,7 +446,7 @@ Models: - Dataset: HMDB51 Metrics: Top 1 Accuracy: 37.52 - top5 accuracy: 71.5 + Top 5 Accuracy: 71.5 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log @@ -468,7 +468,7 @@ Models: - Dataset: HMDB51 Metrics: Top 1 Accuracy: 65.95 - top5 accuracy: 91.05 + Top 5 Accuracy: 91.05 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log @@ -490,7 +490,7 @@ Models: - Dataset: UCF101 Metrics: Top 1 Accuracy: 71.35 - top5 accuracy: 89.35 + Top 5 Accuracy: 89.35 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log @@ -512,7 +512,7 @@ Models: - Dataset: UCF101 Metrics: Top 1 Accuracy: 92.78 - top5 accuracy: 99.42 + Top 5 Accuracy: 99.42 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log @@ -534,8 +534,8 @@ Models: - Dataset: SthV1 Metrics: Top 1 Accuracy: 46.63 - top5 accuracy: 77.19 + Top 5 Accuracy: 77.19 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/20210605_235410.log - Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20210630-807a9a9a.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb-34901d23.pth diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py index 8bfcd77d39..9ef7dfbe44 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py @@ -55,7 +55,8 @@ ] data = dict( videos_per_gpu=24, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py index 5ed60a91eb..750d01b8b4 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py @@ -55,7 +55,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py index 1a95cc0155..0305527d3d 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py @@ -63,8 +63,8 @@ data = dict( videos_per_gpu=8, - workers_per_gpu=4, - test_dataloader=dict(videos_per_gpu=2), + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py index 588c6b7803..89457ddf04 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py @@ -62,8 +62,8 @@ data = dict( videos_per_gpu=8, - workers_per_gpu=4, - test_dataloader=dict(videos_per_gpu=2), + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv2_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv2_rgb.py index db92d92e67..65720cffbc 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv2_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv2_rgb.py @@ -62,8 +62,8 @@ data = dict( videos_per_gpu=8, - workers_per_gpu=4, - test_dataloader=dict(videos_per_gpu=2), + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py index 3dd5808b34..48df87cc32 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py @@ -63,8 +63,8 @@ data = dict( videos_per_gpu=8, - workers_per_gpu=4, - test_dataloader=dict(videos_per_gpu=2), + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py index 9a09622804..0e34eda9fd 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py @@ -55,7 +55,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py index d2517e8391..6e4e7fbc33 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py @@ -58,7 +58,7 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py index 8ad75bace6..7ac7a0bedd 100644 --- a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py +++ b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py @@ -57,7 +57,8 @@ ] data = dict( videos_per_gpu=24, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py index b59a759a9f..53832d7dc1 100644 --- a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py +++ b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py @@ -59,8 +59,8 @@ data = dict( videos_per_gpu=8, - workers_per_gpu=4, - test_dataloader=dict(videos_per_gpu=2), + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py index da2341030c..c4e5be479d 100644 --- a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py +++ b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py @@ -62,8 +62,8 @@ data = dict( videos_per_gpu=8, - workers_per_gpu=4, - test_dataloader=dict(videos_per_gpu=2), + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py index e5e33a126d..85d8b7f237 100644 --- a/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py @@ -68,7 +68,7 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, diff --git a/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py index 8331fdac8f..4f71e890c5 100644 --- a/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py @@ -68,7 +68,7 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, diff --git a/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py b/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py index 04c0a25ca8..02a3faf696 100644 --- a/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py +++ b/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py @@ -57,7 +57,8 @@ ] data = dict( videos_per_gpu=24, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py index 2fbab6150d..a68c8efa88 100644 --- a/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py @@ -58,7 +58,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py b/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py index 5d55e3386e..2cba67d9e1 100644 --- a/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py +++ b/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py @@ -57,7 +57,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py index 5764678c84..eec3694e7a 100644 --- a/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py @@ -58,7 +58,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py b/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py index 3a89d35785..202fa4e330 100644 --- a/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=24, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py b/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py index 311216fd9c..4b2b987b68 100644 --- a/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py +++ b/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py @@ -58,7 +58,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py b/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py index 79edfb6db2..4cbc901850 100644 --- a/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py +++ b/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py @@ -57,7 +57,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tanet/metafile.yml b/configs/recognition/tanet/metafile.yml index 76b5867a14..19a5d47839 100644 --- a/configs/recognition/tanet/metafile.yml +++ b/configs/recognition/tanet/metafile.yml @@ -20,7 +20,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 76.28 - top5 accuracy: 92.6 + Top 5 Accuracy: 92.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log @@ -44,8 +44,8 @@ Models: Metrics: Top 1 Accuracy: 49.69 Top 1 Accuracy (efficient): 47.45 - top5 accuracy: 77.62 - top5 accuracy (efficient): 76.0 + Top 5 Accuracy: 77.62 + Top 5 Accuracy (efficient): 76.0 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log @@ -69,8 +69,8 @@ Models: Metrics: Top 1 Accuracy: 50.41 Top 1 Accuracy (efficient): 47.73 - top5 accuracy: 78.47 - top5 accuracy (efficient): 77.31 + Top 5 Accuracy: 78.47 + Top 5 Accuracy (efficient): 77.31 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log diff --git a/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py b/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py index d6ff915721..741bd4db65 100644 --- a/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py +++ b/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py @@ -59,10 +59,11 @@ clip_len=1, frame_interval=1, num_clips=16, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -70,7 +71,8 @@ ] data = dict( videos_per_gpu=4, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py index 987336e081..2aa497dca9 100644 --- a/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py @@ -57,10 +57,11 @@ clip_len=1, frame_interval=1, num_clips=8, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -68,7 +69,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py index a651ecf7f7..3ac78366c2 100644 --- a/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py @@ -63,8 +63,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, - test_dataloader=dict(videos_per_gpu=2), + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py b/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py index f4bd1614d1..8772ad953b 100644 --- a/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py +++ b/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py @@ -75,7 +75,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py b/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py index 66eec25eca..4f4fdf7cbc 100644 --- a/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py +++ b/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py @@ -75,7 +75,8 @@ ] data = dict( videos_per_gpu=7, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.py b/configs/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.py index 6d859e8b37..a6207d9542 100644 --- a/configs/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.py +++ b/configs/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.py @@ -75,7 +75,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tin/metafile.yml b/configs/recognition/tin/metafile.yml index 539b8899c1..2fe338d80d 100644 --- a/configs/recognition/tin/metafile.yml +++ b/configs/recognition/tin/metafile.yml @@ -20,7 +20,7 @@ Models: - Dataset: SthV1 Metrics: Top 1 Accuracy: 44.25 - top5 accuracy: 73.94 + Top 5 Accuracy: 73.94 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log @@ -43,7 +43,7 @@ Models: - Dataset: SthV2 Metrics: Top 1 Accuracy: 56.7 - top5 accuracy: 83.62 + Top 5 Accuracy: 83.62 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log @@ -66,7 +66,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.89 - top5 accuracy: 89.89 + Top 5 Accuracy: 89.89 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log diff --git a/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py b/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py index f6bcf82807..3ba652479b 100644 --- a/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py +++ b/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py @@ -60,7 +60,8 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py b/configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py index 8d1a93d561..35bbd26b00 100644 --- a/configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py +++ b/configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py @@ -60,7 +60,8 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py index 9277fe9ac8..81f03a7344 100644 --- a/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py @@ -64,7 +64,8 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tpn/metafile.yml b/configs/recognition/tpn/metafile.yml index bb70db3cd3..ecf75fe278 100644 --- a/configs/recognition/tpn/metafile.yml +++ b/configs/recognition/tpn/metafile.yml @@ -66,7 +66,7 @@ Models: - Dataset: SthV1 Metrics: Top 1 Accuracy: 50.8 - top5 accuracy: 79.05 + Top 5 Accuracy: 79.05 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log diff --git a/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py b/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py index a84a0b1895..3b1738fdcf 100644 --- a/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py +++ b/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py @@ -57,6 +57,7 @@ data = dict( videos_per_gpu=8, workers_per_gpu=8, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py b/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py index 8783f550fb..0258f4a3d4 100644 --- a/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py +++ b/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py @@ -57,7 +57,7 @@ data = dict( videos_per_gpu=8, workers_per_gpu=8, - test_dataloader=dict(videos_per_gpu=12), + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/trn/metafile.yml b/configs/recognition/trn/metafile.yml index 1f999f63cf..aa47950de6 100644 --- a/configs/recognition/trn/metafile.yml +++ b/configs/recognition/trn/metafile.yml @@ -20,8 +20,8 @@ Models: Metrics: Top 1 Accuracy: 33.88 Top 1 Accuracy (efficient): 31.62 - top5 accuracy: 62.12 - top5 accuracy (efficient): 60.01 + Top 5 Accuracy: 62.12 + Top 5 Accuracy (efficient): 60.01 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log @@ -44,8 +44,8 @@ Models: Metrics: Top 1 Accuracy: 47.96 Top 1 Accuracy (efficient): 45.14 - top5 accuracy: 75.97 - top5 accuracy (efficient): 73.21 + Top 5 Accuracy: 75.97 + Top 5 Accuracy (efficient): 73.21 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log diff --git a/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py b/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py index 0578748296..dac55c03b7 100644 --- a/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py @@ -68,7 +68,7 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, diff --git a/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py b/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py index a3e2615db2..09e8e2f6d5 100644 --- a/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py +++ b/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py @@ -68,7 +68,7 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index 1f0bc3c81b..1a0ccf4072 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -20,7 +20,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.24 - top5 accuracy: 89.56 + Top 5 Accuracy: 89.56 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log @@ -43,7 +43,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.59 - top5 accuracy: 89.52 + Top 5 Accuracy: 89.52 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log @@ -66,7 +66,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.73 - top5 accuracy: 89.81 + Top 5 Accuracy: 89.81 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log @@ -89,7 +89,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 71.9 - top5 accuracy: 90.03 + Top 5 Accuracy: 90.03 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log @@ -112,7 +112,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.48 - top5 accuracy: 89.4 + Top 5 Accuracy: 89.4 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log @@ -135,7 +135,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.25 - top5 accuracy: 89.66 + Top 5 Accuracy: 89.66 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log @@ -158,7 +158,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.46 - top5 accuracy: 90.84 + Top 5 Accuracy: 90.84 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log @@ -181,7 +181,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 74.55 - top5 accuracy: 91.74 + Top 5 Accuracy: 91.74 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log @@ -204,7 +204,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.09 - top5 accuracy: 90.37 + Top 5 Accuracy: 90.37 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log @@ -227,7 +227,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 71.89 - top5 accuracy: 90.73 + Top 5 Accuracy: 90.73 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log @@ -250,7 +250,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.8 - top5 accuracy: 90.75 + Top 5 Accuracy: 90.75 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20210621_115844.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20210621_115844.log @@ -273,7 +273,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.03 - top5 accuracy: 90.25 + Top 5 Accuracy: 90.25 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log @@ -296,7 +296,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.7 - top5 accuracy: 89.9 + Top 5 Accuracy: 89.9 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log @@ -319,7 +319,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 71.6 - top5 accuracy: 90.34 + Top 5 Accuracy: 90.34 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log @@ -342,7 +342,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 68.46 - top5 accuracy: 88.64 + Top 5 Accuracy: 88.64 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log @@ -364,7 +364,7 @@ Models: - Dataset: Diving48 Metrics: Top 1 Accuracy: 75.99 - top5 accuracy: 97.16 + Top 5 Accuracy: 97.16 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log @@ -386,7 +386,7 @@ Models: - Dataset: Diving48 Metrics: Top 1 Accuracy: 81.62 - top5 accuracy: 97.66 + Top 5 Accuracy: 97.66 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log @@ -410,8 +410,8 @@ Models: Metrics: Top 1 Accuracy: 47.7 Top 1 Accuracy (efficient): 45.58 - top5 accuracy: 76.12 - top5 accuracy (efficient): 75.02 + Top 5 Accuracy: 76.12 + Top 5 Accuracy (efficient): 75.02 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log @@ -437,8 +437,8 @@ Models: Metrics: Top 1 Accuracy: 48.51 Top 1 Accuracy (efficient): 47.1 - top5 accuracy: 77.56 - top5 accuracy (efficient): 76.02 + Top 5 Accuracy: 77.56 + Top 5 Accuracy (efficient): 76.02 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log @@ -464,8 +464,8 @@ Models: Metrics: Top 1 Accuracy: 48.9 Top 1 Accuracy (efficient): 47.16 - top5 accuracy: 77.92 - top5 accuracy (efficient): 76.07 + Top 5 Accuracy: 77.92 + Top 5 Accuracy (efficient): 76.07 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.log @@ -491,8 +491,8 @@ Models: Metrics: Top 1 Accuracy: 50.31 Top 1 Accuracy (efficient): 47.85 - top5 accuracy: 78.18 - top5 accuracy (efficient): 76.78 + Top 5 Accuracy: 78.18 + Top 5 Accuracy (efficient): 76.78 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log @@ -518,8 +518,8 @@ Models: Metrics: Top 1 Accuracy: 49.28 Top 1 Accuracy (efficient): 47.62 - top5 accuracy: 77.82 - top5 accuracy (efficient): 76.63 + Top 5 Accuracy: 77.82 + Top 5 Accuracy (efficient): 76.63 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log @@ -545,8 +545,8 @@ Models: Metrics: Top 1 Accuracy: 48.43 Top 1 Accuracy (efficient): 45.72 - top5 accuracy: 76.72 - top5 accuracy (efficient): 74.67 + Top 5 Accuracy: 76.72 + Top 5 Accuracy (efficient): 74.67 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log @@ -572,8 +572,8 @@ Models: Metrics: Top 1 Accuracy: 61.12 Top 1 Accuracy (efficient): 57.86 - top5 accuracy: 86.26 - top5 accuracy (efficient): 84.67 + Top 5 Accuracy: 86.26 + Top 5 Accuracy (efficient): 84.67 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log @@ -599,8 +599,8 @@ Models: Metrics: Top 1 Accuracy: 63.84 Top 1 Accuracy (efficient): 60.79 - top5 accuracy: 88.3 - top5 accuracy (efficient): 86.6 + Top 5 Accuracy: 88.3 + Top 5 Accuracy (efficient): 86.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log @@ -626,8 +626,8 @@ Models: Metrics: Top 1 Accuracy: 62.04 Top 1 Accuracy (efficient): 59.93 - top5 accuracy: 87.35 - top5 accuracy (efficient): 86.1 + Top 5 Accuracy: 87.35 + Top 5 Accuracy (efficient): 86.1 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log @@ -653,8 +653,8 @@ Models: Metrics: Top 1 Accuracy: 63.19 Top 1 Accuracy (efficient): 61.06 - top5 accuracy: 87.93 - top5 accuracy (efficient): 86.66 + Top 5 Accuracy: 87.93 + Top 5 Accuracy (efficient): 86.66 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log @@ -680,8 +680,8 @@ Models: Metrics: Top 1 Accuracy: 61.51 Top 1 Accuracy (efficient): 58.59 - top5 accuracy: 86.9 - top5 accuracy (efficient): 85.07 + Top 5 Accuracy: 86.9 + Top 5 Accuracy (efficient): 85.07 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log @@ -707,8 +707,8 @@ Models: Metrics: Top 1 Accuracy: 48.49 Top 1 Accuracy (efficient): 46.35 - top5 accuracy: 76.88 - top5 accuracy (efficient): 75.07 + Top 5 Accuracy: 76.88 + Top 5 Accuracy (efficient): 75.07 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log @@ -734,8 +734,8 @@ Models: Metrics: Top 1 Accuracy: 47.46 Top 1 Accuracy (efficient): 45.92 - top5 accuracy: 76.71 - top5 accuracy (efficient): 75.23 + Top 5 Accuracy: 76.71 + Top 5 Accuracy (efficient): 75.23 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log @@ -782,7 +782,7 @@ Models: - Dataset: HMDB51 Metrics: Top 1 Accuracy: 72.68 - top5 accuracy: 92.03 + Top 5 Accuracy: 92.03 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log @@ -805,7 +805,7 @@ Models: - Dataset: HMDB51 Metrics: Top 1 Accuracy: 74.77 - top5 accuracy: 93.86 + Top 5 Accuracy: 93.86 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log @@ -828,7 +828,7 @@ Models: - Dataset: UCF101 Metrics: Top 1 Accuracy: 94.5 - top5 accuracy: 99.58 + Top 5 Accuracy: 99.58 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log @@ -851,7 +851,7 @@ Models: - Dataset: UCF101 Metrics: Top 1 Accuracy: 94.58 - top5 accuracy: 99.37 + Top 5 Accuracy: 99.37 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log diff --git a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py index c73cc685ed..9a6535b3ed 100644 --- a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py +++ b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py @@ -68,7 +68,8 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py index 8fa456dd9d..92ef9bfe4c 100644 --- a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py +++ b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py @@ -68,7 +68,8 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py index bdc430804b..5169eda3a9 100644 --- a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py +++ b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py @@ -68,7 +68,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py index 5b6c07d478..84317727a4 100644 --- a/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py +++ b/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py @@ -68,7 +68,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py index 57628cfc23..b6df2b32d1 100644 --- a/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py @@ -63,7 +63,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_mobilenetv2_video_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_mobilenetv2_video_dense_1x1x8_100e_kinetics400_rgb.py index b62990f5b9..9442e1d700 100644 --- a/configs/recognition/tsm/tsm_mobilenetv2_video_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_mobilenetv2_video_dense_1x1x8_100e_kinetics400_rgb.py @@ -66,7 +66,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_mobilenetv2_video_inference_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_mobilenetv2_video_inference_dense_1x1x8_100e_kinetics400_rgb.py index 71b9ed8f52..15a3edd5f4 100644 --- a/configs/recognition/tsm/tsm_mobilenetv2_video_inference_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_mobilenetv2_video_inference_dense_1x1x8_100e_kinetics400_rgb.py @@ -25,7 +25,7 @@ data = dict( videos_per_gpu=4, - workers_per_gpu=4, + workers_per_gpu=2, test=dict( type=dataset_type, ann_file=ann_file_test, diff --git a/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py index f04cba0e46..884a2d663c 100644 --- a/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py @@ -72,7 +72,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py index f7d7360c0f..738043ac04 100644 --- a/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py @@ -72,7 +72,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py index 5770e50c92..9516e93b05 100644 --- a/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py @@ -72,7 +72,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py b/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py index a4c5ce7d41..1926a975ba 100644 --- a/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py +++ b/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py @@ -50,10 +50,11 @@ clip_len=1, frame_interval=1, num_clips=8, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -61,7 +62,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py index cf067d6728..7dcf579f21 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py @@ -65,7 +65,8 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py index 590a8d1ac9..8ca1b6b0c4 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py @@ -55,10 +55,11 @@ clip_len=1, frame_interval=1, num_clips=16, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -66,7 +67,8 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py index 04bd982d77..f930f1c244 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py @@ -55,10 +55,11 @@ clip_len=1, frame_interval=1, num_clips=16, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -66,7 +67,8 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py index bff76cf13d..88b28924f6 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py @@ -1,4 +1,4 @@ -_base_ = ['./tsm_r50_1x1x8_50e_kinetics400_rgb'] +_base_ = ['./tsm_r50_1x1x8_50e_kinetics400_rgb.py'] optimizer_config = dict(grad_clip=dict(max_norm=20, norm_type=2)) lr_config = dict(policy='step', step=[40, 80]) diff --git a/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py index 2893df0bd1..4c1daf1d49 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py @@ -60,7 +60,7 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), test_dataloader=dict(videos_per_gpu=1), train=dict( diff --git a/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py index 2e0a0520ac..76195eb83e 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py @@ -62,7 +62,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py index 4967fa23ac..e57a5b020c 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py @@ -53,10 +53,11 @@ clip_len=1, frame_interval=1, num_clips=8, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -64,7 +65,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py index ba60196089..c51ac187c5 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py @@ -53,10 +53,11 @@ clip_len=1, frame_interval=1, num_clips=8, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -64,7 +65,8 @@ ] data = dict( videos_per_gpu=6, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py index abf672adc2..cac9dbb75c 100644 --- a/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py @@ -85,7 +85,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py index 96c050633c..8955c8a74f 100644 --- a/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py @@ -62,8 +62,9 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, - val_dataloader=dict(videos_per_gpu=4), + workers_per_gpu=2, + val_dataloader=dict(videos_per_gpu=1), + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py index 00f40cbd58..9b5199a7d0 100644 --- a/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py @@ -57,10 +57,11 @@ clip_len=1, frame_interval=1, num_clips=8, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -68,7 +69,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py index d03ba632b4..11ae99c946 100644 --- a/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py @@ -58,10 +58,11 @@ clip_len=1, frame_interval=1, num_clips=8, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -69,7 +70,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py index 9b600feda8..61004a5bd4 100644 --- a/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py @@ -68,7 +68,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py index 73d6321081..24864ec229 100644 --- a/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py @@ -84,7 +84,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py index c4540ee855..7b39be4964 100644 --- a/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py @@ -54,6 +54,7 @@ clip_len=1, frame_interval=1, num_clips=8, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), @@ -65,7 +66,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py index 75eac07094..a7a8346a78 100644 --- a/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py @@ -54,6 +54,7 @@ clip_len=1, frame_interval=1, num_clips=8, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), @@ -65,7 +66,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py index 448908c5b4..83ba457bb0 100644 --- a/configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py @@ -54,10 +54,11 @@ clip_len=1, frame_interval=1, num_clips=8, + twice_sample=True, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -65,7 +66,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py b/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py index ee348e8999..6871f53817 100644 --- a/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py +++ b/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py @@ -71,7 +71,8 @@ ] data = dict( videos_per_gpu=4, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py b/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py index 3d4a439439..65609d21ec 100644 --- a/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py +++ b/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py @@ -69,7 +69,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py index 9cc6fc34fc..3e34c822c9 100644 --- a/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py @@ -65,7 +65,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py index 41f1257364..d4b5051083 100644 --- a/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py @@ -72,7 +72,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py index e0d249f63f..978cb5bc9d 100644 --- a/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py @@ -79,7 +79,8 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py index 296aa194b1..dfe70170ee 100644 --- a/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py @@ -74,8 +74,8 @@ ] data = dict( videos_per_gpu=24, - workers_per_gpu=4, - test_dataloader=dict(videos_per_gpu=4), + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py index 10e74c3791..bb0a5fe333 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py @@ -62,7 +62,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py index ab6a31d04d..6b77944ee0 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py @@ -62,7 +62,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py index 6ab7806e35..897fb05f90 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py @@ -62,7 +62,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py index 061cc6db3d..3d9e8ca547 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py @@ -57,7 +57,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py index 957dd20165..c35a32e4e7 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py @@ -57,7 +57,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py index 02dccc7f3d..968bfc6f38 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py @@ -57,7 +57,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_10crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_10crop_100e_kinetics400_rgb.py index 378572d72f..bb4da3990f 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_10crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_10crop_100e_kinetics400_rgb.py @@ -22,7 +22,8 @@ dict(type='ToTensor', keys=['imgs']) ] data = dict( - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), test=dict( type=dataset_type, ann_file=ann_file_test, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_3crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_3crop_100e_kinetics400_rgb.py index 4f9f39073c..82f1d3eabe 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_3crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_256p_1x1x25_3crop_100e_kinetics400_rgb.py @@ -22,7 +22,8 @@ dict(type='ToTensor', keys=['imgs']) ] data = dict( - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), test=dict( type=dataset_type, ann_file=ann_file_test, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_10crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_10crop_100e_kinetics400_rgb.py index fa84d042d5..74aeac51e0 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_10crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_10crop_100e_kinetics400_rgb.py @@ -22,7 +22,8 @@ dict(type='ToTensor', keys=['imgs']) ] data = dict( - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), test=dict( type=dataset_type, ann_file=ann_file_test, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_3crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_3crop_100e_kinetics400_rgb.py index 8f37ff246c..ba35eb5922 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_3crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_320p_1x1x25_3crop_100e_kinetics400_rgb.py @@ -22,7 +22,8 @@ dict(type='ToTensor', keys=['imgs']) ] data = dict( - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), test=dict( type=dataset_type, ann_file=ann_file_test, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_10crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_10crop_100e_kinetics400_rgb.py index eb38dc9f29..ad900cd342 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_10crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_10crop_100e_kinetics400_rgb.py @@ -22,7 +22,8 @@ dict(type='ToTensor', keys=['imgs']) ] data = dict( - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), test=dict( type=dataset_type, ann_file=ann_file_test, diff --git a/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_3crop_100e_kinetics400_rgb.py b/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_3crop_100e_kinetics400_rgb.py index 95584c8ef6..980259ecbd 100644 --- a/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_3crop_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/data_benchmark/tsn_r50_test_340x256_1x1x25_3crop_100e_kinetics400_rgb.py @@ -22,7 +22,8 @@ dict(type='ToTensor', keys=['imgs']) ] data = dict( - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), test=dict( type=dataset_type, ann_file=ann_file_test, diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py index 067063dfd6..77df841a88 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py @@ -71,6 +71,7 @@ data = dict( videos_per_gpu=32, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py index 2e8369a79e..bdee7f32b7 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py @@ -71,6 +71,7 @@ data = dict( videos_per_gpu=32, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py index 4f33d7706b..11b369bb0f 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py @@ -71,6 +71,7 @@ data = dict( videos_per_gpu=32, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py index be910fa5f4..bcf5a40171 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py @@ -71,6 +71,7 @@ data = dict( videos_per_gpu=32, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py index 4f33d7706b..11b369bb0f 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py @@ -71,6 +71,7 @@ data = dict( videos_per_gpu=32, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py index fd088f8f82..305e9f27c1 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py @@ -71,6 +71,7 @@ data = dict( videos_per_gpu=32, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/metafile.yml b/configs/recognition/tsn/metafile.yml index 4a60bd9261..700174155e 100644 --- a/configs/recognition/tsn/metafile.yml +++ b/configs/recognition/tsn/metafile.yml @@ -19,7 +19,7 @@ Models: - Dataset: UCF101 Metrics: Top 1 Accuracy: 83.03 - top5 accuracy: 96.78 + Top 5 Accuracy: 96.78 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.log @@ -41,7 +41,7 @@ Models: - Dataset: Diving48 Metrics: Top 1 Accuracy: 71.27 - top5 accuracy: 95.74 + Top 5 Accuracy: 95.74 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log @@ -63,7 +63,7 @@ Models: - Dataset: Diving48 Metrics: Top 1 Accuracy: 76.75 - top5 accuracy: 96.95 + Top 5 Accuracy: 96.95 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log @@ -85,7 +85,7 @@ Models: - Dataset: HMDB51 Metrics: Top 1 Accuracy: 48.95 - top5 accuracy: 80.19 + Top 5 Accuracy: 80.19 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log @@ -107,7 +107,7 @@ Models: - Dataset: HMDB51 Metrics: Top 1 Accuracy: 56.08 - top5 accuracy: 84.31 + Top 5 Accuracy: 84.31 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log @@ -128,7 +128,7 @@ Models: - Dataset: HMDB51 Metrics: Top 1 Accuracy: 54.25 - top5 accuracy: 83.86 + Top 5 Accuracy: 83.86 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log @@ -151,7 +151,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.6 - top5 accuracy: 89.26 + Top 5 Accuracy: 89.26 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log @@ -174,7 +174,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.42 - top5 accuracy: 89.03 + Top 5 Accuracy: 89.03 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log @@ -197,7 +197,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.18 - top5 accuracy: 89.1 + Top 5 Accuracy: 89.1 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log @@ -220,7 +220,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.91 - top5 accuracy: 89.51 + Top 5 Accuracy: 89.51 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log @@ -243,7 +243,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 55.7 - top5 accuracy: 79.85 + Top 5 Accuracy: 79.85 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log @@ -266,7 +266,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 71.8 - top5 accuracy: 90.17 + Top 5 Accuracy: 90.17 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log @@ -289,7 +289,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.41 - top5 accuracy: 90.55 + Top 5 Accuracy: 90.55 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log @@ -312,7 +312,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 57.76 - top5 accuracy: 80.99 + Top 5 Accuracy: 80.99 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log @@ -335,7 +335,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 71.11 - top5 accuracy: 90.04 + Top 5 Accuracy: 90.04 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log @@ -358,7 +358,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.77 - top5 accuracy: 89.3 + Top 5 Accuracy: 89.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log @@ -381,7 +381,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 71.79 - top5 accuracy: 90.25 + Top 5 Accuracy: 90.25 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log @@ -404,7 +404,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 70.4 - top5 accuracy: 89.12 + Top 5 Accuracy: 89.12 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log @@ -427,7 +427,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.43 - top5 accuracy: 91.01 + Top 5 Accuracy: 91.01 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log @@ -450,7 +450,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 72.78 - top5 accuracy: 90.75 + Top 5 Accuracy: 90.75 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log @@ -472,7 +472,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.6 - top5 accuracy: 91.0 + Top 5 Accuracy: 91.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py @@ -492,7 +492,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.1 - top5 accuracy: 90.4 + Top 5 Accuracy: 90.4 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth - Config: configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py @@ -512,7 +512,7 @@ Models: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 75.7 - top5 accuracy: 91.9 + Top 5 Accuracy: 91.9 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth - Config: configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py @@ -533,7 +533,7 @@ Models: - Dataset: Kinetics-600 Metrics: Top 1 Accuracy: 74.8 - top5 accuracy: 92.3 + Top 5 Accuracy: 92.3 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.log @@ -556,7 +556,7 @@ Models: - Dataset: Kinetics-700 Metrics: Top 1 Accuracy: 61.7 - top5 accuracy: 83.6 + Top 5 Accuracy: 83.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.log @@ -579,7 +579,7 @@ Models: - Dataset: SthV1 Metrics: Top 1 Accuracy: 18.55 - top5 accuracy: 44.8 + Top 5 Accuracy: 44.8 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log @@ -602,7 +602,7 @@ Models: - Dataset: SthV1 Metrics: Top 1 Accuracy: 15.77 - top5 accuracy: 39.85 + Top 5 Accuracy: 39.85 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log @@ -625,7 +625,7 @@ Models: - Dataset: SthV2 Metrics: Top 1 Accuracy: 32.97 - top5 accuracy: 63.62 + Top 5 Accuracy: 63.62 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log @@ -648,7 +648,7 @@ Models: - Dataset: SthV2 Metrics: Top 1 Accuracy: 27.21 - top5 accuracy: 55.84 + Top 5 Accuracy: 55.84 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log @@ -671,7 +671,7 @@ Models: - Dataset: MiT Metrics: Top 1 Accuracy: 26.84 - top5 accuracy: 51.6 + Top 5 Accuracy: 51.6 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_f6_mit_26.8_51.6.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_mit.log @@ -716,7 +716,7 @@ Models: - Dataset: ActivityNet v1.3 Metrics: Top 1 Accuracy: 73.93 - top5 accuracy: 93.44 + Top 5 Accuracy: 93.44 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log @@ -739,7 +739,7 @@ Models: - Dataset: ActivityNet v1.3 Metrics: Top 1 Accuracy: 76.9 - top5 accuracy: 94.47 + Top 5 Accuracy: 94.47 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log @@ -762,7 +762,7 @@ Models: - Dataset: ActivityNet v1.3 Metrics: Top 1 Accuracy: 57.51 - top5 accuracy: 83.02 + Top 5 Accuracy: 83.02 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.log @@ -785,7 +785,7 @@ Models: - Dataset: ActivityNet v1.3 Metrics: Top 1 Accuracy: 59.51 - top5 accuracy: 82.69 + Top 5 Accuracy: 82.69 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.log @@ -941,10 +941,10 @@ Models: Training Resources: 8 GPUs Name: tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb Results: - - Dataset: Kinetics400 + - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 77.51 - top5 accuracy: 92.92 + Top 5 Accuracy: 92.92 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log diff --git a/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py index a7ca319174..5f73da4ae0 100644 --- a/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_fp16_r50_1x1x3_100e_kinetics400_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py b/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py index f6a938a938..4fcaf4b796 100644 --- a/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py +++ b/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py @@ -20,10 +20,9 @@ dropout_ratio=0.5, init_std=0.01, multi_class=True, - label_smooth_eps=0)) -# model training and testing settings -train_cfg = None -test_cfg = dict(average_clips=None) + label_smooth_eps=0), + train_cfg=None, + test_cfg=dict(average_clips=None)) # dataset settings dataset_type = 'RawframeDataset' @@ -84,7 +83,8 @@ data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py index 8ddb99f79e..9b5de9f691 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=4, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py index 410dbe6b1b..1d8b3e0143 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py @@ -29,7 +29,12 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ - dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=16), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, + test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), @@ -39,10 +44,15 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ - dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=16), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=16, + test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='TenCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -50,7 +60,8 @@ ] data = dict( videos_per_gpu=4, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py index 3705de2a2c..1eca1ae6aa 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py index 0c98df7039..e902eba955 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py @@ -57,7 +57,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py index de706a4278..5f8a15419d 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py @@ -64,7 +64,8 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py index 9b92b5cfad..b881817293 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py index 93588f034f..6b3230ec2d 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py index 51fb545379..83081300ed 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py @@ -60,6 +60,7 @@ dict(type='ToTensor', keys=['imgs']) ] data = dict( + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py index 54bfb8fb59..0147490a42 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py @@ -67,7 +67,8 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py index b9035e12b8..6b33b98a1e 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py @@ -32,7 +32,12 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ - dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), @@ -42,10 +47,15 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ - dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), + dict( + type='SampleFrames', + clip_len=1, + frame_interval=1, + num_clips=8, + test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='TenCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), @@ -54,6 +64,7 @@ data = dict( videos_per_gpu=16, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py index 5ca6bf89d8..64554a7934 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py @@ -53,7 +53,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py index 00b18daf9b..761d214aad 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py @@ -56,7 +56,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py index b3b341baa9..7641b9771f 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py @@ -56,7 +56,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py index 710416878a..3ca87c708c 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py @@ -56,7 +56,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py index 5a019ab8ac..ebb9982850 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py @@ -58,7 +58,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py index f5f39ad68c..dfab68032f 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py @@ -58,7 +58,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py index 9321b6e3c4..7ccb2beed5 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py @@ -58,7 +58,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py index 6c3bcc8f29..17f1a7e79c 100644 --- a/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py +++ b/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py @@ -58,7 +58,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py b/configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py index fec95d8c2f..a64608acfe 100644 --- a/configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py +++ b/configs/recognition/tsn/tsn_r50_clip_feature_extraction_1x1x3_rgb.py @@ -32,6 +32,7 @@ data = dict( videos_per_gpu=1, workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), test=dict( type=dataset_type, ann_file=None, diff --git a/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py index fa462447fe..e8e498e9df 100644 --- a/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py @@ -65,8 +65,9 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, - val_dataloader=dict(videos_per_gpu=4), + workers_per_gpu=2, + val_dataloader=dict(videos_per_gpu=1), + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py index a177a0035f..70affa8382 100644 --- a/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py @@ -62,7 +62,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py index 9bea133f7b..a2a3e61e1c 100644 --- a/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py @@ -68,7 +68,8 @@ ] data = dict( videos_per_gpu=4, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py index d9ba6bf97a..57a8614fd5 100644 --- a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py @@ -68,7 +68,8 @@ ] data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py index bec1b85f13..7e3cf98476 100644 --- a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py @@ -63,7 +63,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py index 80d54e2a54..687ce2018f 100644 --- a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py index 00e47c6431..62390025f4 100644 --- a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py @@ -61,7 +61,8 @@ ] data = dict( videos_per_gpu=12, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py index 862d287899..ad67dcb74d 100644 --- a/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py @@ -58,7 +58,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py index 824df78dc6..7c6d5e820e 100644 --- a/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py @@ -63,8 +63,9 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, val_dataloader=dict(videos_per_gpu=1), + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_video_imgaug_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_imgaug_1x1x8_100e_kinetics400_rgb.py index fab95ceb14..c16f7a3001 100644 --- a/configs/recognition/tsn/tsn_r50_video_imgaug_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_imgaug_1x1x8_100e_kinetics400_rgb.py @@ -13,10 +13,9 @@ spatial_type='avg', consensus=dict(type='AvgConsensus', dim=1), dropout_ratio=0.4, - init_std=0.01)) -# model training and testing settings -train_cfg = None -test_cfg = dict(average_clips=None) + init_std=0.01), + train_cfg=None, + test_cfg=dict(average_clips=None)) # dataset settings dataset_type = 'VideoDataset' data_root = 'data/kinetics400/videos_train' @@ -84,7 +83,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition/tsn/tsn_r50_video_mixup_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_mixup_1x1x8_100e_kinetics400_rgb.py index bef969ad45..4f5f2a3a03 100644 --- a/configs/recognition/tsn/tsn_r50_video_mixup_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_mixup_1x1x8_100e_kinetics400_rgb.py @@ -83,7 +83,8 @@ ] data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition_audio/audioonly/audioonly_r50_64x1x1_100e_kinetics400_audio_feature.py b/configs/recognition_audio/audioonly/audioonly_r50_64x1x1_100e_kinetics400_audio_feature.py index cef00ef22b..d8be216e99 100644 --- a/configs/recognition_audio/audioonly/audioonly_r50_64x1x1_100e_kinetics400_audio_feature.py +++ b/configs/recognition_audio/audioonly/audioonly_r50_64x1x1_100e_kinetics400_audio_feature.py @@ -45,7 +45,7 @@ ] data = dict( videos_per_gpu=160, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition_audio/resnet/metafile.yml b/configs/recognition_audio/resnet/metafile.yml index 3891bd4b16..42ebc2bdce 100644 --- a/configs/recognition_audio/resnet/metafile.yml +++ b/configs/recognition_audio/resnet/metafile.yml @@ -18,7 +18,7 @@ Models: Top 1 Accuracy: 19.7 Top 1 Accuracy [w. RGB]: 71.5 Top 1 Accuracy delta [w. RGB]: 0.39 - top5 accuracy: 35.75 + Top 5 Accuracy: 35.75 top5 accuracy [w. RGB]: 90.18 top5 accuracy delta [w. RGB]: 0.14 Task: Action Recognition diff --git a/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py b/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py index e2fbd7c777..d8b5c1e6f3 100644 --- a/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py +++ b/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py @@ -56,7 +56,7 @@ ] data = dict( videos_per_gpu=320, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/recognition_audio/resnet/tsn_r50_64x1x1_100e_kinetics400_audio.py b/configs/recognition_audio/resnet/tsn_r50_64x1x1_100e_kinetics400_audio.py index f4e42f6cc4..a806dea747 100644 --- a/configs/recognition_audio/resnet/tsn_r50_64x1x1_100e_kinetics400_audio.py +++ b/configs/recognition_audio/resnet/tsn_r50_64x1x1_100e_kinetics400_audio.py @@ -51,7 +51,7 @@ ] data = dict( videos_per_gpu=320, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py index a9a3aa5f04..640c67485a 100644 --- a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py @@ -85,7 +85,7 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py index 31cb0da388..978bb2adcf 100644 --- a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py @@ -91,7 +91,7 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py index 3863ef7159..47e541115e 100644 --- a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py @@ -85,7 +85,7 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, diff --git a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py index 2c3f47682f..7e98d22dd6 100644 --- a/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py +++ b/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py @@ -91,7 +91,7 @@ ] data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, test_dataloader=dict(videos_per_gpu=1), train=dict( type=dataset_type, diff --git a/demo/mmaction2_tutorial.ipynb b/demo/mmaction2_tutorial.ipynb index 8671ab34d4..24981795f4 100644 --- a/demo/mmaction2_tutorial.ipynb +++ b/demo/mmaction2_tutorial.ipynb @@ -840,7 +840,7 @@ "]\n", "data = dict(\n", " videos_per_gpu=2,\n", - " workers_per_gpu=4,\n", + " workers_per_gpu=2,\n", " train=dict(\n", " type='VideoDataset',\n", " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", diff --git a/demo/mmaction2_tutorial_zh-CN.ipynb b/demo/mmaction2_tutorial_zh-CN.ipynb index 0d7bf32b08..501f2b8d50 100644 --- a/demo/mmaction2_tutorial_zh-CN.ipynb +++ b/demo/mmaction2_tutorial_zh-CN.ipynb @@ -629,7 +629,7 @@ "]\n", "data = dict(\n", " videos_per_gpu=2,\n", - " workers_per_gpu=4,\n", + " workers_per_gpu=2,\n", " train=dict(\n", " type='VideoDataset',\n", " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", diff --git a/docs/tutorials/1_config.md b/docs/tutorials/1_config.md index 0133ca9ce9..20abeeba3d 100644 --- a/docs/tutorials/1_config.md +++ b/docs/tutorials/1_config.md @@ -370,7 +370,7 @@ which is convenient to conduct various experiments. ] data = dict( # Config of data videos_per_gpu=32, # Batch size of each single GPU - workers_per_gpu=4, # Workers to pre-fetch data for each single GPU + workers_per_gpu=2, # Workers to pre-fetch data for each single GPU train_dataloader=dict( # Additional config of train dataloader drop_last=True), # Whether to drop out the last batch of data in training val_dataloader=dict( # Additional config of validation dataloader @@ -593,7 +593,7 @@ We incorporate modular design into our config system, which is convenient to con data = dict( # Config of data videos_per_gpu=16, # Batch size of each single GPU - workers_per_gpu=4, # Workers to pre-fetch data for each single GPU + workers_per_gpu=2, # Workers to pre-fetch data for each single GPU val_dataloader=dict( # Additional config of validation dataloader videos_per_gpu=1), # Batch size of each single GPU during evaluation train=dict( # Training dataset config @@ -738,7 +738,7 @@ test_pipeline = [ data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/docs/tutorials/3_new_dataset.md b/docs/tutorials/3_new_dataset.md index 1b1d18d37e..223117aa57 100644 --- a/docs/tutorials/3_new_dataset.md +++ b/docs/tutorials/3_new_dataset.md @@ -128,7 +128,7 @@ ann_file_test = 'data/custom/custom_val_list.txt' ... data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/docs_zh_CN/tutorials/1_config.md b/docs_zh_CN/tutorials/1_config.md index a59d765786..d3f5ae9ad0 100644 --- a/docs_zh_CN/tutorials/1_config.md +++ b/docs_zh_CN/tutorials/1_config.md @@ -368,7 +368,7 @@ MMAction2 将模块化设计整合到配置文件系统中,以便执行各类 ] data = dict( # 数据的配置 videos_per_gpu=32, # 单个 GPU 的批大小 - workers_per_gpu=4, # 单个 GPU 的 dataloader 的进程 + workers_per_gpu=2, # 单个 GPU 的 dataloader 的进程 train_dataloader=dict( # 训练过程 dataloader 的额外设置 drop_last=True), # 在训练过程中是否丢弃最后一个批次 val_dataloader=dict( # 验证过程 dataloader 的额外设置 @@ -587,7 +587,7 @@ MMAction2 将模块化设计整合到配置文件系统中,以便于执行各 data = dict( # 数据的配置 videos_per_gpu=16, # 单个 GPU 的批大小 - workers_per_gpu=4, # 单个 GPU 的 dataloader 的进程 + workers_per_gpu=2, # 单个 GPU 的 dataloader 的进程 val_dataloader=dict( # 验证过程 dataloader 的额外设置 videos_per_gpu=1), # 单个 GPU 的批大小 train=dict( # 训练数据集的设置 @@ -729,7 +729,7 @@ test_pipeline = [ data = dict( videos_per_gpu=8, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/docs_zh_CN/tutorials/3_new_dataset.md b/docs_zh_CN/tutorials/3_new_dataset.md index 19402cb41e..172d73b00d 100644 --- a/docs_zh_CN/tutorials/3_new_dataset.md +++ b/docs_zh_CN/tutorials/3_new_dataset.md @@ -123,7 +123,7 @@ ann_file_test = 'data/custom/custom_val_list.txt' ... data = dict( videos_per_gpu=32, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/mmaction/datasets/ava_dataset.py b/mmaction/datasets/ava_dataset.py index b071698e34..547cd37204 100644 --- a/mmaction/datasets/ava_dataset.py +++ b/mmaction/datasets/ava_dataset.py @@ -165,15 +165,18 @@ def parse_img_record(self, img_records): while len(img_records) > 0: img_record = img_records[0] num_img_records = len(img_records) - selected_records = list( - filter( - lambda x: np.array_equal(x['entity_box'], img_record[ - 'entity_box']), img_records)) + + selected_records = [ + x for x in img_records + if np.array_equal(x['entity_box'], img_record['entity_box']) + ] + num_selected_records = len(selected_records) - img_records = list( - filter( - lambda x: not np.array_equal(x['entity_box'], img_record[ - 'entity_box']), img_records)) + img_records = [ + x for x in img_records if + not np.array_equal(x['entity_box'], img_record['entity_box']) + ] + assert len(img_records) + num_selected_records == num_img_records bboxes.append(img_record['entity_box']) diff --git a/mmaction/models/heads/misc_head.py b/mmaction/models/heads/misc_head.py index 88f9f203da..a2888a26d8 100644 --- a/mmaction/models/heads/misc_head.py +++ b/mmaction/models/heads/misc_head.py @@ -31,6 +31,7 @@ class ACRNHead(nn.Module): `requires_grad`. Default: dict(type='BN2d', requires_grad=True). act_cfg (dict): Config for activate layers. Default: dict(type='ReLU', inplace=True). + kwargs (dict): Other new arguments, to be compatible with MMDet update. """ def __init__(self, @@ -40,7 +41,8 @@ def __init__(self, num_convs=1, conv_cfg=dict(type='Conv3d'), norm_cfg=dict(type='BN3d', requires_grad=True), - act_cfg=dict(type='ReLU', inplace=True)): + act_cfg=dict(type='ReLU', inplace=True), + **kwargs): super().__init__() self.in_channels = in_channels diff --git a/tools/data/jester/README.md b/tools/data/jester/README.md index 7acdbe13d9..2e054ab33d 100644 --- a/tools/data/jester/README.md +++ b/tools/data/jester/README.md @@ -39,7 +39,7 @@ we add `"filename_tmpl='{:05}.jpg'"` to the dict of `data.train`, `data.val` and ``` data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/tools/data/jester/README_zh-CN.md b/tools/data/jester/README_zh-CN.md index fdfda97d65..4b3fb17f0b 100644 --- a/tools/data/jester/README_zh-CN.md +++ b/tools/data/jester/README_zh-CN.md @@ -39,7 +39,7 @@ cd $MMACTION2/tools/data/jester/ ```python data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/tools/data/sthv1/README.md b/tools/data/sthv1/README.md index 5b93de2a2f..75f4c11134 100644 --- a/tools/data/sthv1/README.md +++ b/tools/data/sthv1/README.md @@ -40,7 +40,7 @@ Since the prefix of official JPGs is "%05d.jpg" (e.g., "00001.jpg"), users need ``` data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, diff --git a/tools/data/sthv1/README_zh-CN.md b/tools/data/sthv1/README_zh-CN.md index 262f51dabf..11cc9318be 100644 --- a/tools/data/sthv1/README_zh-CN.md +++ b/tools/data/sthv1/README_zh-CN.md @@ -38,7 +38,7 @@ cd $MMACTION2/tools/data/sthv1/ ``` data = dict( videos_per_gpu=16, - workers_per_gpu=4, + workers_per_gpu=2, train=dict( type=dataset_type, ann_file=ann_file_train, From a892aae0a750554208a1f0d80edcb14c0bc49ac2 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 18 Aug 2021 12:01:05 +0800 Subject: [PATCH 233/414] Update README.md --- tools/data/thumos14/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/data/thumos14/README.md b/tools/data/thumos14/README.md index eaddb60cbe..ba62bf0aee 100644 --- a/tools/data/thumos14/README.md +++ b/tools/data/thumos14/README.md @@ -75,6 +75,8 @@ bash extract_frames.sh tvl1 ## Step 4. Fetch File List +Note: These files are not available temporarily, we will update the download link soon. + This part is **optional** if you do not use SSN model. You can run the follow script to fetch pre-computed tag proposals. From acce52d21a2545d9351b1060853c3bcd171b7158 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 21 Aug 2021 20:33:33 +0800 Subject: [PATCH 234/414] Fix lint (#1105) --- tools/data/thumos14/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/data/thumos14/README.md b/tools/data/thumos14/README.md index ba62bf0aee..7c3c7533c0 100644 --- a/tools/data/thumos14/README.md +++ b/tools/data/thumos14/README.md @@ -75,7 +75,7 @@ bash extract_frames.sh tvl1 ## Step 4. Fetch File List -Note: These files are not available temporarily, we will update the download link soon. +Note: These files are not available temporarily, we will update the download link soon. This part is **optional** if you do not use SSN model. From c1d463ff6cfcf0e05422bd9f8c3458e1cb5293bc Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 28 Aug 2021 22:31:08 +0800 Subject: [PATCH 235/414] [ModelZoo] Add 256h ssv2 related ckpts (#1101) * add 256h ssv2 related ckpts * fix lint * add tsn * remove 240h * update metafile --- configs/recognition/trn/README.md | 2 +- configs/recognition/trn/README_zh-CN.md | 4 +- configs/recognition/trn/metafile.yml | 16 ++--- configs/recognition/tsm/README.md | 6 +- configs/recognition/tsm/README_zh-CN.md | 6 +- configs/recognition/tsm/metafile.yml | 92 +++++-------------------- configs/recognition/tsn/README.md | 4 +- configs/recognition/tsn/README_zh-CN.md | 8 +-- configs/recognition/tsn/metafile.yml | 24 +++---- 9 files changed, 52 insertions(+), 110 deletions(-) diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md index 2034d08e1c..223d769168 100644 --- a/configs/recognition/trn/README.md +++ b/configs/recognition/trn/README.md @@ -25,7 +25,7 @@ |config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[trn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 45.14 / 47.96 |73.21 / 75.97 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210401-773eca7b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json)| +|[trn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | height 256 | 8 | ResNet50 | ImageNet | 48.39 / 51.28 |76.58 / 78.65 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210816-7abbc4c1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log.json)| Notes: diff --git a/configs/recognition/trn/README_zh-CN.md b/configs/recognition/trn/README_zh-CN.md index 79a2e92e50..d0e85f015c 100644 --- a/configs/recognition/trn/README_zh-CN.md +++ b/configs/recognition/trn/README_zh-CN.md @@ -19,13 +19,13 @@ |配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[trn_r50_1x1x8_50e_sthv1_rgb](configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 31.62 / 33.88 |60.01 / 62.12| 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json)| +|[trn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 31.62 / 33.88 |60.01 / 62.12| 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json)| ### Something-Something V2 |配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[trn_r50_1x1x8_50e_sthv2_rgb](configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 45.14 / 47.96 |73.21 / 75.97 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210401-773eca7b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json)| +|[trn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | 高 256 | 8 | ResNet50 | ImageNet | 48.39 / 51.28 |76.58 / 78.65 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210816-7abbc4c1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log.json)| 注: diff --git a/configs/recognition/trn/metafile.yml b/configs/recognition/trn/metafile.yml index aa47950de6..81578d8437 100644 --- a/configs/recognition/trn/metafile.yml +++ b/configs/recognition/trn/metafile.yml @@ -34,7 +34,7 @@ Models: Epochs: 50 Parameters: 26641154 Pretrained: ImageNet - Resolution: height 100 + Resolution: height 256 Training Data: SthV2 Training Resources: 8 GPUs Modality: RGB @@ -42,11 +42,11 @@ Models: Results: - Dataset: SthV2 Metrics: - Top 1 Accuracy: 47.96 - Top 1 Accuracy (efficient): 45.14 - Top 5 Accuracy: 75.97 - Top 5 Accuracy (efficient): 73.21 + Top 1 Accuracy: 51.28 + Top 1 Accuracy (efficient): 48.39 + Top 5 Accuracy: 78.65 + Top 5 Accuracy (efficient): 76.58 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210326_103951.log - Weights: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210401-773eca7b.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log + Weights: https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210816-7abbc4c1.pth diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 06e57e4119..d9b2c0fcfe 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -70,11 +70,9 @@ |config | resolution | gpus | backbone | pretrain| top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| reference top1 acc (efficient/accurate)| reference top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 240|8| ResNet50| ImageNet |57.86 / 61.12|84.67 / 86.26|[57.98 / 60.69](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[84.57 / 86.28](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_1x1x8_50e_sthv2_rgb_20200912-033c4ac6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json)| -|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 256|8| ResNet50| ImageNet |60.79 / 63.84|86.60 / 88.30|[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log.json)| -|[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py) |height 240|8| ResNet50| ImageNet |59.93 / 62.04|86.10 / 87.35|[58.90 / 60.98](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.29 / 86.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_1x1x16_50e_sthv2_rgb_20201010-16469c6f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json)| +|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 256|8| ResNet50| ImageNet |59.11 / 61.82|85.39 / 86.80|[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210816-032aa4da.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log.json)| |[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 256|8| ResNet50| ImageNet |61.06 / 63.19|86.66 / 87.93|[xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json)| -|[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |height 240|8| ResNet101 | ImageNet|58.59 / 61.51|85.07 / 86.90|[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9784 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json)| +|[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |height 256|8| ResNet101 | ImageNet|60.88 / 63.84|86.56 / 88.30|[xx / 63.3](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9727 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log.json)| ### MixUp & CutMix on Something-Something V1 diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index 814a8a63c2..8f65a3bd14 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -70,11 +70,9 @@ |配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| 参考代码的 top1 准确率 (efficient/accurate)| 参考代码的 top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 240|8| ResNet50| ImageNet |57.86 / 61.12|84.67 / 86.26|[57.98 / 60.69](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[84.57 / 86.28](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_1x1x8_50e_sthv2_rgb_20200912-033c4ac6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json)| -|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 256|8| ResNet50| ImageNet |60.79 / 63.84|86.60 / 88.30|[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log.json)| -|[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py) |高 240|8| ResNet50| ImageNet |59.93 / 62.04|86.10 / 87.35|[58.90 / 60.98](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.29 / 86.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_1x1x16_50e_sthv2_rgb_20201010-16469c6f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json)| +|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 256|8| ResNet50| ImageNet |59.11 / 61.82|85.39 / 86.80|[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210816-032aa4da.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log.json)| |[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 256|8| ResNet50| ImageNet |61.06 / 63.19|86.66 / 87.93|[xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json)| -|[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |高 240|8| ResNet101 | ImageNet|58.59 / 61.51|85.07 / 86.90|[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9784 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json)| +|[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |高 256|8| ResNet101 | ImageNet|60.88 / 63.84|86.56 / 88.30|[xx / 63.3](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9727 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log.json)| ### Diving48 diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index 1a0ccf4072..3dd07ec59b 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -562,7 +562,7 @@ Models: FLOPs: 32961859584 Parameters: 23864558 Pretrained: ImageNet - Resolution: height 240 + Resolution: height 256 Training Data: SthV2 Training Resources: 8 GPUs Modality: RGB @@ -570,71 +570,17 @@ Models: Results: - Dataset: SthV2 Metrics: - Top 1 Accuracy: 61.12 - Top 1 Accuracy (efficient): 57.86 - Top 5 Accuracy: 86.26 - Top 5 Accuracy (efficient): 84.67 + Top 1 Accuracy: 61.82 + Top 1 Accuracy (efficient): 59.11 + Top 5 Accuracy: 86.80 + Top 5 Accuracy (efficient): 85.39 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20200912_140737.log - Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_1x1x8_50e_sthv2_rgb_20200912-033c4ac6.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210816-032aa4da.pth reference top1 acc (efficient/accurate): '[57.98 / 60.69](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[84.57 / 86.28](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' -- Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py - In Collection: TSM - Metadata: - Architecture: ResNet50 - Batch Size: 6 - Epochs: 50 - FLOPs: 32961859584 - Parameters: 23864558 - Pretrained: ImageNet - Resolution: height 256 - Training Data: SthV2 - Training Resources: 8 GPUs - Modality: RGB - Name: tsm_r50_1x1x8_50e_sthv2_rgb - Results: - - Dataset: SthV2 - Metrics: - Top 1 Accuracy: 63.84 - Top 1 Accuracy (efficient): 60.79 - Top 5 Accuracy: 88.3 - Top 5 Accuracy (efficient): 86.6 - Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210401_143656.log - Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth - reference top1 acc (efficient/accurate): '[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - reference top5 acc (efficient/accurate): '[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb.py - In Collection: TSM - Metadata: - Architecture: ResNet50 - Batch Size: 6 - Epochs: 50 - FLOPs: 65923719168 - Parameters: 23864558 - Pretrained: ImageNet - Resolution: height 240 - Training Data: SthV2 - Training Resources: 8 GPUs - Modality: RGB - Name: tsm_r50_1x1x16_50e_sthv2_rgb - Results: - - Dataset: SthV2 - Metrics: - Top 1 Accuracy: 62.04 - Top 1 Accuracy (efficient): 59.93 - Top 5 Accuracy: 87.35 - Top 5 Accuracy (efficient): 86.1 - Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20201010_224215.log - Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_1x1x16_50e_sthv2_rgb_20201010-16469c6f.pth - reference top1 acc (efficient/accurate): '[58.90 / 60.98](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - reference top5 acc (efficient/accurate): '[85.29 / 86.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' -- Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py In Collection: TSM Metadata: Architecture: ResNet50 @@ -670,7 +616,7 @@ Models: FLOPs: 62782459904 Parameters: 42856686 Pretrained: ImageNet - Resolution: height 240 + Resolution: height 256 Training Data: SthV2 Training Resources: 8 GPUs Modality: RGB @@ -678,16 +624,16 @@ Models: Results: - Dataset: SthV2 Metrics: - Top 1 Accuracy: 61.51 - Top 1 Accuracy (efficient): 58.59 - Top 5 Accuracy: 86.9 - Top 5 Accuracy (efficient): 85.07 - Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20201010_224100.log - Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_1x1x8_50e_sthv2_rgb_20201010-98cdedb8.pth - reference top1 acc (efficient/accurate): '[58.89 / 61.36](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - reference top5 acc (efficient/accurate): '[85.14 / 87.00](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + Top 1 Accuracy: 63.84 + Top 1 Accuracy (efficient): 60.88 + Top 5 Accuracy: 88.30 + Top 5 Accuracy (efficient): 86.56 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth + reference top1 acc (efficient/accurate): '[xx / 63.3](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' + reference top5 acc (efficient/accurate): '[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py In Collection: TSM Metadata: diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index fe2c41bef8..f20034ebbc 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -134,8 +134,8 @@ In data benchmark, we compare: |config |resolution| gpus| backbone| pretrain | top1 acc| top5 acc | reference top1 acc | reference top5 acc | gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py)|height 240 |8| ResNet50| ImageNet |32.97 |63.62 |[30.56](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[58.49](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10966 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20200915-f3b381a5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log.json)| -|[tsn_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py)| height 240 |8|ResNet50| ImageNet |27.21 |55.84 |[21.91](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[46.87](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|8337| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20200917-80bc3611.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log.json)| +|[tsn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py)|height 256 |8| ResNet50| ImageNet |28.59 |59.56 | x | x | 10966 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20210816-1aafee8f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log.json)| +|[tsn_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py)| height 256 |8|ResNet50| ImageNet |20.89 |49.16 | x | x |8337| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20210816-5d23ac6e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log.json)| ### Moments in Time diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index 3bd6ecd749..1e3e89bc94 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -127,15 +127,15 @@ |配置文件|分辨率 | GPU 数量| 主干网络 |预训练| top1 准确率| top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py)|height 100 |8| ResNet50 | ImageNet|18.55 |44.80 |[17.53](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[44.29](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10978 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_1x1x8_50e_sthv1_rgb_20200618-061b9195.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json)| -|[tsn_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py)| height 100 |8| ResNet50| ImageNet |15.77 |39.85 |[13.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[35.58](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 5691 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/tsn_r50_1x1x16_50e_sthv1_rgb_20200614-7e2fe4f1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json)| +|[tsn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py)|高 100 |8| ResNet50 | ImageNet|18.55 |44.80 |[17.53](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[44.29](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10978 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_1x1x8_50e_sthv1_rgb_20200618-061b9195.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json)| +|[tsn_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py)| 高 100 |8| ResNet50| ImageNet |15.77 |39.85 |[13.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[35.58](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 5691 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/tsn_r50_1x1x16_50e_sthv1_rgb_20200614-7e2fe4f1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json)| ### Something-Something V2 |配置文件 |分辨率| GPU 数量| 主干网络| 预训练 | top1 准确率| top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py)|height 240 |8| ResNet50| ImageNet |32.97 |63.62 |[30.56](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[58.49](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10966 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20200915-f3b381a5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log.json)| -|[tsn_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py)| height 240 |8|ResNet50| ImageNet |27.21 |55.84 |[21.91](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[46.87](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|8337| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20200917-80bc3611.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log.json)| +|[tsn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py)|高 256 |8| ResNet50| ImageNet |28.59 |59.56 | x | x | 10966 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20210816-1aafee8f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log.json)| +|[tsn_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py)|高 256 |8|ResNet50| ImageNet |20.89 |49.16 | x | x |8337| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20210816-5d23ac6e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log.json)| ### Moments in Time diff --git a/configs/recognition/tsn/metafile.yml b/configs/recognition/tsn/metafile.yml index 700174155e..dfb6774980 100644 --- a/configs/recognition/tsn/metafile.yml +++ b/configs/recognition/tsn/metafile.yml @@ -616,7 +616,7 @@ Models: FLOPs: 32959365120 Parameters: 23864558 Pretrained: ImageNet - Resolution: height 240 + Resolution: height 256 Training Data: SthV2 Training Resources: 8 GPUs Modality: RGB @@ -624,12 +624,12 @@ Models: Results: - Dataset: SthV2 Metrics: - Top 1 Accuracy: 32.97 - Top 5 Accuracy: 63.62 + Top 1 Accuracy: 28.59 + Top 5 Accuracy: 59.56 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20200915_114139.log - Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20200915-f3b381a5.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20210816-1aafee8f.pth - Config: configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py In Collection: TSN Metadata: @@ -639,7 +639,7 @@ Models: FLOPs: 65918373888 Parameters: 23864558 Pretrained: ImageNet - Resolution: height 240 + Resolution: height 256 Training Data: SthV2 Training Resources: 8 GPUs Modality: RGB @@ -647,12 +647,12 @@ Models: Results: - Dataset: SthV2 Metrics: - Top 1 Accuracy: 27.21 - Top 5 Accuracy: 55.84 + Top 1 Accuracy: 20.89 + Top 5 Accuracy: 49.16 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20200917_105855.log - Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20200917-80bc3611.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20210816-5d23ac6e.pth - Config: configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py In Collection: TSN Metadata: From c2c7b47724063fde562c1cb01386ac5646c6127f Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 28 Aug 2021 22:32:27 +0800 Subject: [PATCH 236/414] update csn noramlization (#1116) --- ...frozen_r152_32x2x1_180e_kinetics400_rgb.py | 4 +- ...nfrozen_r152_32x2x1_58e_kinetics400_rgb.py | 73 +++++++++++++++++++ ...frozen_r152_32x2x1_180e_kinetics400_rgb.py | 4 +- ...nfrozen_r152_32x2x1_58e_kinetics400_rgb.py | 73 +++++++++++++++++++ 4 files changed, 152 insertions(+), 2 deletions(-) diff --git a/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py index 6c0792f2eb..7cd96b726e 100644 --- a/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py +++ b/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py @@ -14,7 +14,9 @@ ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' img_norm_cfg = dict( - mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + mean=[110.2008, 100.63983, 95.99475], + std=[58.14765, 56.46975, 55.332195], + to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), dict(type='RawFrameDecode'), diff --git a/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py index 0cc11366ba..fc5372a82b 100644 --- a/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py +++ b/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py @@ -12,4 +12,77 @@ 'https://download.openmmlab.com/mmaction/recognition/csn/ipcsn_from_scratch_r152_sports1m_20210617-7a7cc5b9.pth' # noqa: E501 )) +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[110.2008, 100.63983, 95.99475], + std=[58.14765, 56.46975, 55.332195], + to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=3, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=test_pipeline)) + work_dir = './work_dirs/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb' # noqa: E501 diff --git a/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py index 19873781b2..777b2c0c71 100644 --- a/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py @@ -14,7 +14,9 @@ ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' img_norm_cfg = dict( - mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + mean=[110.2008, 100.63983, 95.99475], + std=[58.14765, 56.46975, 55.332195], + to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), dict(type='RawFrameDecode'), diff --git a/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py b/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py index d0803f68ab..b4601839b9 100644 --- a/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py +++ b/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py @@ -12,4 +12,77 @@ 'https://download.openmmlab.com/mmaction/recognition/csn/ircsn_from_scratch_r152_sports1m_20210617-bcc9c0dd.pth' # noqa: E501 )) +# dataset settings +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +img_norm_cfg = dict( + mean=[110.2008, 100.63983, 95.99475], + std=[58.14765, 56.46975, 55.332195], + to_bgr=False) +train_pipeline = [ + dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=3, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=test_pipeline)) + work_dir = './work_dirs/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb' # noqa: E501 From c0b4ff8721792dd9c98225615ac691617acd041c Mon Sep 17 00:00:00 2001 From: Devbrat Anuragi <39759685+AslanDevbrat@users.noreply.github.com> Date: Wed, 1 Sep 2021 14:45:04 +0530 Subject: [PATCH 237/414] [Doc] Fixed Typo (#1121) --- docs/tutorials/2_finetune.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/tutorials/2_finetune.md b/docs/tutorials/2_finetune.md index 1ec072ce58..f29263601e 100644 --- a/docs/tutorials/2_finetune.md +++ b/docs/tutorials/2_finetune.md @@ -1,7 +1,7 @@ # Tutorial 2: Finetuning Models This tutorial provides instructions for users to use the pre-trained models -to finetune them on other datasets, so that better performance can be get. +to finetune them on other datasets, so that better performance can be achieved. From 926d5fcde4338b8b738dbf95a6339295b672b9e4 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 2 Sep 2021 11:52:48 +0800 Subject: [PATCH 238/414] [Doc] Add Citations (#1125) * update * use ubuntu18.04 * fix sphinx version * support python 3.9 --- .github/workflows/build.yml | 15 +++++++++++---- CITATION.cff | 8 ++++++++ requirements/docs.txt | 4 ++-- setup.py | 9 ++++++--- 4 files changed, 27 insertions(+), 9 deletions(-) create mode 100644 CITATION.cff diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ebd18a81fc..9b16ea2f29 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,7 +4,7 @@ on: [push, pull_request] jobs: lint: - runs-on: ubuntu-latest + runs-on: ubuntu-18.04 steps: - uses: actions/checkout@v2 - name: Set up Python 3.7 @@ -22,7 +22,7 @@ jobs: pip install interrogate interrogate -v --ignore-init-method --ignore-module --ignore-nested-functions --ignore-regex "__repr__" --fail-under 80 mmaction build_cpu: - runs-on: ubuntu-latest + runs-on: ubuntu-18.04 strategy: matrix: python-version: [3.7] @@ -34,6 +34,13 @@ jobs: torchvision: 0.8.1 - torch: 1.9.0 torchvision: 0.10.0 + python-version: 3.7 + - torch: 1.9.0 + torchvision: 0.10.0 + python-version: 3.8 + - torch: 1.9.0 + torchvision: 0.10.0 + python-version: 3.9 steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} @@ -73,7 +80,7 @@ jobs: coverage xml coverage report -m build_cu101: - runs-on: ubuntu-latest + runs-on: ubuntu-18.04 env: CUDA: 10.1.105-1 @@ -148,7 +155,7 @@ jobs: fail_ci_if_error: false build_cu102: - runs-on: ubuntu-latest + runs-on: ubuntu-18.04 env: CUDA: 10.2.89-1 diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 0000000000..93a03304ab --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,8 @@ +cff-version: 1.2.0 +message: "If you use this software, please cite it as below." +authors: + - name: "MMAction2 Contributors" +title: "OpenMMLab's Next Generation Video Understanding Toolbox and Benchmark" +date-released: 2020-07-21 +url: "https://github.com/open-mmlab/mmaction2" +license: Apache-2.0 diff --git a/requirements/docs.txt b/requirements/docs.txt index 1e973b21e9..8a342a3717 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -2,6 +2,6 @@ einops opencv-python recommonmark scipy -sphinx +sphinx==4.0.2 sphinx_markdown_tables -sphinx_rtd_theme +sphinx_rtd_theme==0.5.2 diff --git a/setup.py b/setup.py index a1417abdc0..8a4accb7e1 100644 --- a/setup.py +++ b/setup.py @@ -100,7 +100,7 @@ def gen_packages_items(): return packages -def add_mim_extention(): +def add_mim_extension(): """Add extra files that are required to support MIM into the package. These files will be added by creating a symlink to the originals if the @@ -149,14 +149,16 @@ def add_mim_extention(): if __name__ == '__main__': - add_mim_extention() + add_mim_extension() setup( name='mmaction2', version=get_version(), description='OpenMMLab Video Understanding Toolbox and Benchmark', long_description=readme(), long_description_content_type='text/markdown', - maintainer='MMAction2 Authors', + author='MMAction2 Contributors', + author_email='openmmlab@gmail.com', + maintainer='MMAction2 Contributors', maintainer_email='openmmlab@gmail.com', packages=find_packages(exclude=('configs', 'tools', 'demo')), keywords='computer vision, video understanding', @@ -169,6 +171,7 @@ def add_mim_extention(): 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', ], url='https://github.com/open-mmlab/mmaction2', license='Apache License 2.0', From 6293e54126bfc6a013fdacb9a8b140c9f65a0f98 Mon Sep 17 00:00:00 2001 From: ninja <2402224349@qq.com> Date: Thu, 2 Sep 2021 13:55:24 +0800 Subject: [PATCH 239/414] [Fix] Fix new_crop_quadruple bug (#1108) Co-authored-by: Kenny --- mmaction/datasets/pipelines/augmentations.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 6172afadec..dc411dd9e8 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -716,7 +716,7 @@ def __call__(self, results): new_crop_quadruple = [ old_x_ratio + x_ratio * old_w_ratio, old_y_ratio + y_ratio * old_h_ratio, w_ratio * old_w_ratio, - h_ratio * old_x_ratio + h_ratio * old_h_ratio ] results['crop_quadruple'] = np.array( new_crop_quadruple, dtype=np.float32) @@ -879,7 +879,7 @@ def __call__(self, results): new_crop_quadruple = [ old_x_ratio + x_ratio * old_w_ratio, old_y_ratio + y_ratio * old_h_ratio, w_ratio * old_w_ratio, - h_ratio * old_x_ratio + h_ratio * old_h_ratio ] results['crop_quadruple'] = np.array( new_crop_quadruple, dtype=np.float32) @@ -1060,7 +1060,7 @@ def __call__(self, results): new_crop_quadruple = [ old_x_ratio + x_ratio * old_w_ratio, old_y_ratio + y_ratio * old_h_ratio, w_ratio * old_w_ratio, - h_ratio * old_x_ratio + h_ratio * old_h_ratio ] results['crop_quadruple'] = np.array( new_crop_quadruple, dtype=np.float32) @@ -1677,7 +1677,7 @@ def __call__(self, results): new_crop_quadruple = [ old_x_ratio + x_ratio * old_w_ratio, old_y_ratio + y_ratio * old_h_ratio, w_ratio * old_w_ratio, - h_ratio * old_x_ratio + h_ratio * old_h_ratio ] results['crop_quadruple'] = np.array( new_crop_quadruple, dtype=np.float32) From 4c7bc917332118ffde20fa67177178d3f1b3e819 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 2 Sep 2021 14:37:30 +0800 Subject: [PATCH 240/414] [Improvement] Use VideoDataset for training instead of frames (#1126) * mmit frames to videos * hvu frames to videos * update * mit frames to videos --- .../hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py | 24 +++++++++---------- .../tsn_r18_1x1x8_100e_hvu_attribute_rgb.py | 24 +++++++++---------- .../hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py | 24 +++++++++---------- .../hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py | 24 +++++++++---------- .../hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py | 24 +++++++++---------- .../hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py | 24 +++++++++---------- .../tsn/tsn_r101_1x1x5_50e_mmit_rgb.py | 21 +++++++++------- .../tsn/tsn_r50_1x1x6_100e_mit_rgb.py | 21 +++++++++------- 8 files changed, 96 insertions(+), 90 deletions(-) diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py index 77df841a88..3e2a4bfa8e 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py @@ -18,17 +18,18 @@ loss_cls=dict(type='BCELossWithLogits', loss_weight=333.))) # dataset settings -dataset_type = 'RawframeDataset' -data_root = 'data/hvu/rawframes_train' -data_root_val = 'data/hvu/rawframes_val' +dataset_type = 'VideoDataset' +data_root = 'data/hvu/videos_train' +data_root_val = 'data/hvu/videos_val' ann_file_train = f'data/hvu/hvu_{target_cate}_train.json' ann_file_val = f'data/hvu/hvu_{target_cate}_val.json' ann_file_test = f'data/hvu/hvu_{target_cate}_val.json' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ + dict(type='DecordInit'), dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -39,13 +40,14 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -54,13 +56,14 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=25, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -78,24 +81,21 @@ data_prefix=data_root, pipeline=train_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), val=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, pipeline=val_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), test=dict( type=dataset_type, ann_file=ann_file_test, data_prefix=data_root_val, pipeline=test_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg')) + num_classes=category_nums[target_cate])) evaluation = dict(interval=2, metrics=['mean_average_precision']) # runtime settings diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py index bdee7f32b7..f4ebc6ff19 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py @@ -18,17 +18,18 @@ loss_cls=dict(type='BCELossWithLogits', loss_weight=333.))) # dataset settings -dataset_type = 'RawframeDataset' -data_root = 'data/hvu/rawframes_train' -data_root_val = 'data/hvu/rawframes_val' +dataset_type = 'VideoDataset' +data_root = 'data/hvu/videos_train' +data_root_val = 'data/hvu/videos_val' ann_file_train = f'data/hvu/hvu_{target_cate}_train.json' ann_file_val = f'data/hvu/hvu_{target_cate}_val.json' ann_file_test = f'data/hvu/hvu_{target_cate}_val.json' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ + dict(type='DecordInit'), dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -39,13 +40,14 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -54,13 +56,14 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=25, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -78,24 +81,21 @@ data_prefix=data_root, pipeline=train_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), val=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, pipeline=val_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), test=dict( type=dataset_type, ann_file=ann_file_test, data_prefix=data_root_val, pipeline=test_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg')) + num_classes=category_nums[target_cate])) evaluation = dict(interval=2, metrics=['mean_average_precision']) # runtime settings diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py index 11b369bb0f..350d256ab0 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py @@ -18,17 +18,18 @@ loss_cls=dict(type='BCELossWithLogits', loss_weight=333.))) # dataset settings -dataset_type = 'RawframeDataset' -data_root = 'data/hvu/rawframes_train' -data_root_val = 'data/hvu/rawframes_val' +dataset_type = 'VideoDataset' +data_root = 'data/hvu/videos_train' +data_root_val = 'data/hvu/videos_val' ann_file_train = f'data/hvu/hvu_{target_cate}_train.json' ann_file_val = f'data/hvu/hvu_{target_cate}_val.json' ann_file_test = f'data/hvu/hvu_{target_cate}_val.json' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ + dict(type='DecordInit'), dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -39,13 +40,14 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -54,13 +56,14 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=25, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -78,24 +81,21 @@ data_prefix=data_root, pipeline=train_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), val=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, pipeline=val_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), test=dict( type=dataset_type, ann_file=ann_file_test, data_prefix=data_root_val, pipeline=test_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg')) + num_classes=category_nums[target_cate])) evaluation = dict(interval=2, metrics=['mean_average_precision']) # runtime settings diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py index bcf5a40171..4073e5994c 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py @@ -18,17 +18,18 @@ loss_cls=dict(type='BCELossWithLogits', loss_weight=333.))) # dataset settings -dataset_type = 'RawframeDataset' -data_root = 'data/hvu/rawframes_train' -data_root_val = 'data/hvu/rawframes_val' +dataset_type = 'VideoDataset' +data_root = 'data/hvu/videos_train' +data_root_val = 'data/hvu/videos_val' ann_file_train = f'data/hvu/hvu_{target_cate}_train.json' ann_file_val = f'data/hvu/hvu_{target_cate}_val.json' ann_file_test = f'data/hvu/hvu_{target_cate}_val.json' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ + dict(type='DecordInit'), dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -39,13 +40,14 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -54,13 +56,14 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=25, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -78,24 +81,21 @@ data_prefix=data_root, pipeline=train_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), val=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, pipeline=val_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), test=dict( type=dataset_type, ann_file=ann_file_test, data_prefix=data_root_val, pipeline=test_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg')) + num_classes=category_nums[target_cate])) evaluation = dict(interval=2, metrics=['mean_average_precision']) # runtime settings diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py index 11b369bb0f..350d256ab0 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py @@ -18,17 +18,18 @@ loss_cls=dict(type='BCELossWithLogits', loss_weight=333.))) # dataset settings -dataset_type = 'RawframeDataset' -data_root = 'data/hvu/rawframes_train' -data_root_val = 'data/hvu/rawframes_val' +dataset_type = 'VideoDataset' +data_root = 'data/hvu/videos_train' +data_root_val = 'data/hvu/videos_val' ann_file_train = f'data/hvu/hvu_{target_cate}_train.json' ann_file_val = f'data/hvu/hvu_{target_cate}_val.json' ann_file_test = f'data/hvu/hvu_{target_cate}_val.json' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ + dict(type='DecordInit'), dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -39,13 +40,14 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -54,13 +56,14 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=25, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -78,24 +81,21 @@ data_prefix=data_root, pipeline=train_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), val=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, pipeline=val_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), test=dict( type=dataset_type, ann_file=ann_file_test, data_prefix=data_root_val, pipeline=test_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg')) + num_classes=category_nums[target_cate])) evaluation = dict(interval=2, metrics=['mean_average_precision']) # runtime settings diff --git a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py index 305e9f27c1..ff60a65934 100644 --- a/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py +++ b/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py @@ -18,17 +18,18 @@ loss_cls=dict(type='BCELossWithLogits', loss_weight=333.))) # dataset settings -dataset_type = 'RawframeDataset' -data_root = 'data/hvu/rawframes_train' -data_root_val = 'data/hvu/rawframes_val' +dataset_type = 'VideoDataset' +data_root = 'data/hvu/videos_train' +data_root_val = 'data/hvu/videos_val' ann_file_train = f'data/hvu/hvu_{target_cate}_train.json' ann_file_val = f'data/hvu/hvu_{target_cate}_val.json' ann_file_test = f'data/hvu/hvu_{target_cate}_val.json' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ + dict(type='DecordInit'), dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), @@ -39,13 +40,14 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -54,13 +56,14 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=25, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), @@ -78,24 +81,21 @@ data_prefix=data_root, pipeline=train_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), val=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, pipeline=val_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg'), + num_classes=category_nums[target_cate]), test=dict( type=dataset_type, ann_file=ann_file_test, data_prefix=data_root_val, pipeline=test_pipeline, multi_class=True, - num_classes=category_nums[target_cate], - filename_tmpl='img_{:05d}.jpg')) + num_classes=category_nums[target_cate])) evaluation = dict(interval=2, metrics=['mean_average_precision']) # runtime settings diff --git a/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py b/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py index 4fcaf4b796..85e22a12bf 100644 --- a/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py +++ b/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py @@ -25,17 +25,18 @@ test_cfg=dict(average_clips=None)) # dataset settings -dataset_type = 'RawframeDataset' -data_root = 'data/mmit/rawframes' -data_root_val = '/data/mmit/rawframes' -ann_file_train = 'data/mmit/mmit_train_rawframes.txt' -ann_file_val = 'data/mmit/mmit_val_rawframes.txt' -ann_file_test = 'data/mmit/mmit_val_rawframes.txt' +dataset_type = 'VideoDataset' +data_root = 'data/mmit/videos' +data_root_val = '/data/mmit/videos' +ann_file_train = 'data/mmit/mmit_train_list_videos.txt' +ann_file_val = 'data/mmit/mmit_val_list_videos.txt' +ann_file_test = 'data/mmit/mmit_val_list_videos.txt' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ + dict(type='DecordInit'), dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=5), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict( type='MultiScaleCrop', @@ -51,13 +52,14 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=5, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -66,13 +68,14 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=5, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='MultiGroupCrop', crop_size=256, groups=1), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py b/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py index 5f8a15419d..d11f283237 100644 --- a/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py +++ b/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py @@ -7,17 +7,18 @@ model = dict(cls_head=dict(num_classes=339)) # dataset settings -dataset_type = 'RawframeDataset' -data_root = 'data/mit/rawframes/training' -data_root_val = '/data/mit/rawframes/validation/' -ann_file_train = 'data/mit/mit_train_list_rawframes.txt' -ann_file_val = 'data/mit/mit_val_list_rawframes.txt' -ann_file_test = 'data/mit/mit_val_list_rawframes.txt' +dataset_type = 'VideoDataset' +data_root = 'data/mit/videos/training' +data_root_val = '/data/mit/videos/validation/' +ann_file_train = 'data/mit/mit_train_list_videos.txt' +ann_file_val = 'data/mit/mit_val_list_videos.txt' +ann_file_test = 'data/mit/mit_val_list_videos.txt' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ + dict(type='DecordInit'), dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=6), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict( type='MultiScaleCrop', @@ -33,13 +34,14 @@ dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ + dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=6, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), @@ -48,13 +50,14 @@ dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ + dict(type='DecordDecode'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=6, test_mode=True), - dict(type='RawFrameDecode'), + dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), From 8623677d46c8569f39c3e45dd74ed98c4268eb6c Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 2 Sep 2021 15:52:40 +0800 Subject: [PATCH 241/414] [Doc] update changelog (#1128) --- docs/changelog.md | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/docs/changelog.md b/docs/changelog.md index c382868898..7ee0375adf 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,6 +1,24 @@ ## Changelog -### Master +### 0.18.0 (02/09/2021) + +**Improvement** + +- Add CopyRight ([#1099](https://github.com/open-mmlab/mmaction2/pull/1099)) +- Support NTU Pose Extraction ([#1076](https://github.com/open-mmlab/mmaction2/pull/1076)) +- Support Caching in RawFrameDecode ([#1078](https://github.com/open-mmlab/mmaction2/pull/1078)) +- Add citations & Support python3.9 CI & Use fixed-version sphinx ([#1125](https://github.com/open-mmlab/mmaction2/pull/1125)) + +**Documentations** + +- Add Descriptions of PoseC3D dataset ([#1053](https://github.com/open-mmlab/mmaction2/pull/1053)) + +**Bug and Typo Fixes** + +- Fix SSV2 checkpoints ([#1101](https://github.com/open-mmlab/mmaction2/pull/1101)) +- Fix CSN normalization ([#1116](https://github.com/open-mmlab/mmaction2/pull/1116)) +- Fix typo ([#1121](https://github.com/open-mmlab/mmaction2/pull/1121)) +- Fix new_crop_quadruple bug ([#1108](https://github.com/open-mmlab/mmaction2/pull/1108)) ### 0.17.0 (03/08/2021) From 42988f528459598869284e29201baa3f1a434525 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 2 Sep 2021 15:54:52 +0800 Subject: [PATCH 242/414] bump version to 0.18.0 (#1129) --- README.md | 2 +- README_zh-CN.md | 2 +- mmaction/version.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 2f0ae802a9..a9668aca51 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ The master branch works with **PyTorch 1.3+**. ## Changelog -v0.17.0 was released in 03/08/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +v0.18.0 was released in 02/09/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Benchmark diff --git a/README_zh-CN.md b/README_zh-CN.md index cac77e17c2..58c17b3764 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -53,7 +53,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 -v0.17.0 版本已于 2021 年 8 月 3 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.18.0 版本已于 2021 年 9 月 2 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 基准测试 diff --git a/mmaction/version.py b/mmaction/version.py index bf9fdb7351..bd2fd41acf 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.17.0' +__version__ = '0.18.0' def parse_version_info(version_str): From 430b0b66fcfc4d8e3d8d8766174e7d0612d6a40b Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 2 Sep 2021 17:27:59 +0800 Subject: [PATCH 243/414] [Doc] Add label_map (#1127) * AVA labelmap * K400 labelmap * ANet labelmap * Diving48 labelmap * GYM99 label_map * NTU120 label_map * hmdb label_map * jester label_map * Add label maps * hvu label_map --- demo/README.md | 40 +- demo/demo.ipynb | 2 +- demo/demo_posec3d.py | 2 +- demo/demo_spatiotemporal_det.py | 4 +- demo/mmaction2_tutorial.ipynb | 2 +- demo/mmaction2_tutorial_zh-CN.ipynb | 2 +- demo/webcam_demo_spatiotemporal_det.py | 4 +- docs/getting_started.md | 6 +- docs/install.md | 2 +- docs_zh_CN/demo.md | 40 +- docs_zh_CN/getting_started.md | 6 +- docs_zh_CN/install.md | 2 +- tests/test_runtime/test_inference.py | 2 +- tools/data/activitynet/label_map.txt | 200 +++++ .../data/ava/label_map.txt | 0 tools/data/diving48/label_map.txt | 48 ++ tools/data/gym/label_map_gym99.txt | 99 +++ tools/data/hmdb51/label_map.txt | 51 ++ tools/data/hvu/label_map.json | 1 + tools/data/jester/label_map.txt | 27 + .../data/kinetics}/label_map_k400.txt | 0 tools/data/kinetics/label_map_k600.txt | 600 +++++++++++++++ tools/data/kinetics/label_map_k700.txt | 700 ++++++++++++++++++ tools/data/mit/label_map.txt | 339 +++++++++ tools/data/mmit/label_map.txt | 313 ++++++++ tools/data/skeleton/label_map_gym99.txt | 99 +++ .../data/skeleton}/label_map_ntu120.txt | 0 tools/data/sthv1/label_map.txt | 174 +++++ tools/data/sthv2/label_map.txt | 174 +++++ tools/data/ucf101/label_map.txt | 101 +++ 30 files changed, 2985 insertions(+), 55 deletions(-) create mode 100644 tools/data/activitynet/label_map.txt rename demo/label_map_ava.txt => tools/data/ava/label_map.txt (100%) create mode 100644 tools/data/diving48/label_map.txt create mode 100644 tools/data/gym/label_map_gym99.txt create mode 100644 tools/data/hmdb51/label_map.txt create mode 100644 tools/data/hvu/label_map.json create mode 100644 tools/data/jester/label_map.txt rename {demo => tools/data/kinetics}/label_map_k400.txt (100%) create mode 100644 tools/data/kinetics/label_map_k600.txt create mode 100644 tools/data/kinetics/label_map_k700.txt create mode 100644 tools/data/mit/label_map.txt create mode 100644 tools/data/mmit/label_map.txt create mode 100644 tools/data/skeleton/label_map_gym99.txt rename {demo => tools/data/skeleton}/label_map_ntu120.txt (100%) create mode 100644 tools/data/sthv1/label_map.txt create mode 100644 tools/data/sthv2/label_map.txt create mode 100644 tools/data/ucf101/label_map.txt diff --git a/demo/README.md b/demo/README.md index 66bb7a5051..3ca4f08c1d 100644 --- a/demo/README.md +++ b/demo/README.md @@ -64,7 +64,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt ``` 2. Recognize a video file as input by using a TSN model on cuda by default, loading checkpoint from url. @@ -73,7 +73,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt ``` 3. Recognize a list of rawframes as input by using a TSN model on cpu. @@ -90,7 +90,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt --out-filename demo/demo_out.mp4 + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --out-filename demo/demo_out.mp4 ``` 5. Recognize a list of rawframes as input by using a TSN model and then generate a gif file. @@ -107,7 +107,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt --target-resolution 340 256 --resize-algorithm bilinear \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 340 256 --resize-algorithm bilinear \ --out-filename demo/demo_out.mp4 ``` @@ -117,7 +117,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, # For --target-resolution 170 -1, original resolution (340, 256) -> target resolution (170, 128) python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt --target-resolution 170 -1 --resize-algorithm bilinear \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 170 -1 --resize-algorithm bilinear \ --out-filename demo/demo_out.mp4 ``` @@ -127,7 +127,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt --font-scale 1 --font-color red \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --font-scale 1 --font-color red \ --out-filename demo/demo_out.mp4 ``` @@ -167,7 +167,7 @@ Optional arguments: - `HUMAN_DETECTION_CHECKPOINT`: The human detection checkpoint URL. - `HUMAN_DETECTION_SCORE_THRE`: The score threshold for human detection. Default: 0.9. - `ACTION_DETECTION_SCORE_THRESHOLD`: The score threshold for action detection. Default: 0.5. -- `LABEL_MAP`: The label map used. Default: `demo/label_map_ava.txt`. +- `LABEL_MAP`: The label map used. Default: `tools/data/ava/label_map.txt`. - `DEVICE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. Default: `cuda:0`. - `OUTPUT_FILENAME`: Path to the output file which is a video format. Default: `demo/stdet_demo.mp4`. - `PREDICT_STEPSIZE`: Make a prediction per N frames. Default: 8. @@ -188,7 +188,7 @@ python demo/demo_spatiotemporal_det.py --video demo/demo.mp4 \ --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ --det-score-thr 0.9 \ --action-score-thr 0.5 \ - --label-map demo/label_map_ava.txt \ + --label-map tools/data/ava/label_map.txt \ --predict-stepsize 8 \ --output-stepsize 4 \ --output-fps 6 @@ -265,7 +265,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, ```shell python demo/webcam_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth demo/label_map_k400.txt --average-size 5 \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth tools/data/kinetics/label_map_k400.txt --average-size 5 \ --threshold 0.2 --device cpu ``` @@ -275,7 +275,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, ```shell python demo/webcam_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/label_map_k400.txt --average-size 5 --threshold 0.2 --device cpu + tools/data/kinetics/label_map_k400.txt --average-size 5 --threshold 0.2 --device cpu ``` 3. Recognize the action from web camera as input by using a I3D model on gpu by default, averaging the score per 5 times @@ -283,7 +283,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, ```shell python demo/webcam_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth demo/label_map_k400.txt \ + checkpoints/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth tools/data/kinetics/label_map_k400.txt \ --average-size 5 --threshold 0.2 ``` @@ -323,7 +323,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, ```shell python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ --input-step 3 --device cpu --threshold 0.2 ``` @@ -333,7 +333,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, ```shell python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 + PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 ``` 3. Predict different labels in a long video from web by using a TSN model on cpu, with 3 frames for input steps (that is, random sample one from each 3 frames) @@ -343,14 +343,14 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4 \ - demo/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 + tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 ``` 4. Predict different labels in a long video by using a I3D model on gpu, with input_step=1, threshold=0.01 as default and print the labels in cyan. ```shell python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO \ + checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ --label-color 255 255 0 ``` @@ -358,7 +358,7 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, ```shell python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt ./results.json + checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt ./results.json ``` ## SpatioTemporal Action Detection Webcam Demo @@ -394,7 +394,7 @@ Optional arguments: - `HUMAN_DETECTION_CHECKPOINT`: The human detection checkpoint URL. - `HUMAN_DETECTION_SCORE_THRE`: The score threshold for human detection. Default: 0.9. - `INPUT_VIDEO`: The webcam id or video path of the source. Default: `0`. -- `LABEL_MAP`: The label map used. Default: `demo/label_map_ava.txt`. +- `LABEL_MAP`: The label map used. Default: `tools/data/ava/label_map.txt`. - `DEVICE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. Default: `cuda:0`. - `OUTPUT_FPS`: The FPS of demo video output. Default: 15. - `OUTPUT_FILENAME`: Path to the output file which is a video format. Default: None. @@ -434,7 +434,7 @@ python demo/webcam_demo_spatiotemporal_det.py \ --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ --det-score-thr 0.9 \ --action-score-thr 0.5 \ - --label-map demo/label_map_ava.txt \ + --label-map tools/data/ava/label_map.txt \ --predict-stepsize 40 \ --output-fps 20 \ --show @@ -467,7 +467,7 @@ Optional arguments: - `HUMAN_DETECTION_SCORE_THRE`: The score threshold for human detection. Default: 0.9. - `HUMAN_POSE_ESTIMATION_CONFIG_FILE`: The human pose estimation config file path (trained on COCO-Keypoint). - `HUMAN_POSE_ESTIMATION_CHECKPOINT`: The human pose estimation checkpoint URL (trained on COCO-Keypoint). -- `LABEL_MAP`: The label map used. Default: `demo/label_map_ava.txt`. +- `LABEL_MAP`: The label map used. Default: `tools/data/ava/label_map.txt`. - `DEVICE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. Default: `cuda:0`. - `SHORT_SIDE`: The short side used for frame extraction. Default: 480. @@ -486,5 +486,5 @@ python demo/demo_posec3d.py demo/ntu_sample.avi demo/posec3d_demo.mp4 \ --det-score-thr 0.9 \ --pose-config demo/hrnet_w32_coco_256x192.py \ --pose-checkpoint https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_coco_256x192-c78dce93_20200708.pth \ - --label-map demo/label_map_ntu120.txt + --label-map tools/data/skeleton/label_map_ntu120.txt ``` diff --git a/demo/demo.ipynb b/demo/demo.ipynb index 01bf96d241..0f7ff116ef 100644 --- a/demo/demo.ipynb +++ b/demo/demo.ipynb @@ -54,7 +54,7 @@ "source": [ "# test a single video and show the result:\n", "video = 'demo.mp4'\n", - "label = 'label_map_k400.txt'\n", + "label = '../tools/data/kinetics/label_map_k400.txt'\n", "results = inference_recognizer(model, video, label)" ] }, diff --git a/demo/demo_posec3d.py b/demo/demo_posec3d.py index b5d3f7cfd9..fd0d497fb7 100644 --- a/demo/demo_posec3d.py +++ b/demo/demo_posec3d.py @@ -96,7 +96,7 @@ def parse_args(): help='the threshold of human detection score') parser.add_argument( '--label-map', - default='demo/label_map_ntu120.txt', + default='tools/data/skeleton/label_map_ntu120.txt', help='label map file') parser.add_argument( '--device', type=str, default='cuda:0', help='CPU/CUDA device option') diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index 15f1b6fb46..bbaa06e0fb 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -149,7 +149,9 @@ def parse_args(): help='the threshold of human action score') parser.add_argument('--video', help='video file/url') parser.add_argument( - '--label-map', default='demo/label_map_ava.txt', help='label map file') + '--label-map', + default='tools/data/ava/label_map.txt', + help='label map file') parser.add_argument( '--device', type=str, default='cuda:0', help='CPU/CUDA device option') parser.add_argument( diff --git a/demo/mmaction2_tutorial.ipynb b/demo/mmaction2_tutorial.ipynb index 24981795f4..0b8f3ed146 100644 --- a/demo/mmaction2_tutorial.ipynb +++ b/demo/mmaction2_tutorial.ipynb @@ -385,7 +385,7 @@ "source": [ "# Use the recognizer to do inference\n", "video = 'demo/demo.mp4'\n", - "label = 'demo/label_map_k400.txt'\n", + "label = 'tools/data/kinetics/label_map_k400.txt'\n", "results = inference_recognizer(model, video, label)" ], "execution_count": 6, diff --git a/demo/mmaction2_tutorial_zh-CN.ipynb b/demo/mmaction2_tutorial_zh-CN.ipynb index 501f2b8d50..7c03cadc54 100644 --- a/demo/mmaction2_tutorial_zh-CN.ipynb +++ b/demo/mmaction2_tutorial_zh-CN.ipynb @@ -343,7 +343,7 @@ "source": [ "# 选择视频进行推理\n", "video = 'demo/demo.mp4'\n", - "label = 'demo/label_map_k400.txt'\n", + "label = 'tools/data/kinetics/label_map_k400.txt'\n", "results = inference_recognizer(model, video, label)" ] }, diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py index 3245af2d2e..ac91e38c26 100644 --- a/demo/webcam_demo_spatiotemporal_det.py +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -83,7 +83,9 @@ def parse_args(): type=str, help='webcam id or input video file/url') parser.add_argument( - '--label-map', default='demo/label_map_ava.txt', help='label map file') + '--label-map', + default='tools/data/ava/label_map.txt', + help='label map file') parser.add_argument( '--device', type=str, default='cuda:0', help='CPU/CUDA device option') parser.add_argument( diff --git a/docs/getting_started.md b/docs/getting_started.md index 43845b64a7..7fab90948d 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -147,7 +147,7 @@ model = init_recognizer(config_file, checkpoint_file, device=device) # test a single video and show the result: video = 'demo/demo.mp4' -labels = 'demo/label_map_k400.txt' +labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels) # show the results @@ -176,7 +176,7 @@ model = init_recognizer(config_file, checkpoint_file, device=device, use_frames= # test rawframe directory of a single video and show the result: video = 'SOME_DIR_PATH/' -labels = 'demo/label_map_k400.txt' +labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels, use_frames=True) # show the results @@ -205,7 +205,7 @@ model = init_recognizer(config_file, checkpoint_file, device=device) # test url of a single video and show the result: video = 'https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4' -labels = 'demo/label_map_k400.txt' +labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels) # show the results diff --git a/docs/install.md b/docs/install.md index 9ba611d3bb..aa051821dc 100644 --- a/docs/install.md +++ b/docs/install.md @@ -242,5 +242,5 @@ device = torch.device(device) model = init_recognizer(config_file, device=device) # inference the demo video -inference_recognizer(model, 'demo/demo.mp4', 'demo/label_map_k400.txt') +inference_recognizer(model, 'demo/demo.mp4', 'tools/data/kinetics/label_map_k400.txt') ``` diff --git a/docs_zh_CN/demo.md b/docs_zh_CN/demo.md index 2966a97ba7..6c6e4f8cfb 100644 --- a/docs_zh_CN/demo.md +++ b/docs_zh_CN/demo.md @@ -40,7 +40,7 @@ python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt ``` 2. 在 cuda 设备上,使用 TSN 模型进行视频识别,并利用 URL 加载模型权重文件: @@ -49,7 +49,7 @@ python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt ``` 3. 在 CPU 上,使用 TSN 模型进行视频识别,输入为视频抽好的帧: @@ -66,7 +66,7 @@ python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt --out-filename demo/demo_out.mp4 + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --out-filename demo/demo_out.mp4 ``` 5. 使用 TSN 模型进行视频识别,输入为视频抽好的帧,将识别结果存为 GIF 格式: @@ -83,7 +83,7 @@ python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt --target-resolution 340 256 --resize-algorithm bilinear \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 340 256 --resize-algorithm bilinear \ --out-filename demo/demo_out.mp4 ``` @@ -93,7 +93,7 @@ python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} # 如设定 --target-resolution 为 170 -1,原先长宽为 (340, 256) 的视频帧将被缩放至 (170, 128) python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt --target-resolution 170 -1 --resize-algorithm bilinear \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 170 -1 --resize-algorithm bilinear \ --out-filename demo/demo_out.mp4 ``` @@ -103,7 +103,7 @@ python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 demo/label_map_k400.txt --font-size 10 --font-color red \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --font-size 10 --font-color red \ --out-filename demo/demo_out.mp4 ``` @@ -143,7 +143,7 @@ python demo/demo_spatiotemporal_det.py --video ${VIDEO_FILE} \ - `HUMAN_DETECTION_CHECKPOINT`: 人体检测模型权重文件路径。 - `HUMAN_DETECTION_SCORE_THRE`: 人体检测分数阈值,默认为 0.9。 - `ACTION_DETECTION_SCORE_THRESHOLD`: 动作检测分数阈值,默认为 0.5。 -- `LABEL_MAP`: 所使用的标签映射文件,默认为 `demo/label_map_ava.txt`。 +- `LABEL_MAP`: 所使用的标签映射文件,默认为 `tools/data/ava/label_map.txt`。 - `DEVICE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`)。默认为 `cuda:0`。 - `OUTPUT_FILENAME`: 输出视频的路径,默认为 `demo/stdet_demo.mp4`。 - `PREDICT_STEPSIZE`: 每 N 帧进行一次预测(以节约计算资源),默认值为 8。 @@ -164,7 +164,7 @@ python demo/demo_spatiotemporal_det.py --video demo/demo.mp4 \ --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ --det-score-thr 0.9 \ --action-score-thr 0.5 \ - --label-map demo/label_map_ava.txt \ + --label-map tools/data/ava/label_map.txt \ --predict-stepsize 8 \ --output-stepsize 4 \ --output-fps 6 @@ -240,7 +240,7 @@ python demo/webcam_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${LABEL_FILE} \ ```shell python demo/webcam_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth demo/label_map_k400.txt --average-size 5 \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth tools/data/kinetics/label_map_k400.txt --average-size 5 \ --threshold 0.2 --device cpu ``` @@ -249,14 +249,14 @@ python demo/webcam_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${LABEL_FILE} \ ```shell python demo/webcam_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/label_map_k400.txt --average-size 5 --threshold 0.2 --device cpu + tools/data/kinetics/label_map_k400.txt --average-size 5 --threshold 0.2 --device cpu ``` 3. 使用 I3D 模型在 GPU 上进行利用网络摄像头的实时动作识别,平均最近 5 个片段结果作为预测,输出大于阈值 0.2 的动作类别: ```shell python demo/webcam_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth demo/label_map_k400.txt \ + checkpoints/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth tools/data/kinetics/label_map_k400.txt \ --average-size 5 --threshold 0.2 ``` @@ -291,7 +291,7 @@ python demo/long_video_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} $ ```shell python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ --input-step 3 --device cpu --threshold 0.2 ``` @@ -300,7 +300,7 @@ python demo/long_video_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} $ ```shell python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 + PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 ``` 3. 利用 TSN 模型在 CPU 上预测网络长视频(利用 URL 读取)中的不同动作类别,设置 `INPUT_STEP` 为 3,输出分值大于 0.2 的动作类别,此示例利用 URL 加载模型权重文件: @@ -309,14 +309,14 @@ python demo/long_video_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} $ python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4 \ - demo/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 + tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 ``` 4. 利用 I3D 模型在 GPU 上预测长视频中的不同动作类别,设置 `INPUT_STEP` 为 3,动作识别的分数阈值为 0.01: ```shell python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO demo/label_map_k400.txt PATH_TO_SAVED_VIDEO \ + checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ ``` ## 基于网络摄像头的实时时空动作检测 @@ -352,7 +352,7 @@ python demo/webcam_demo_spatiotemporal_det.py \ - `HUMAN_DETECTION_CHECKPOINT`: 人体检测模型权重文件路径。 - `HUMAN_DETECTION_SCORE_THRE`: 人体检测分数阈值,默认为 0.9。 - `INPUT_VIDEO`: 网络摄像头编号或本地视频文件路径,默认为 `0`。 -- `LABEL_MAP`: 所使用的标签映射文件,默认为 `demo/label_map_ava.txt`。 +- `LABEL_MAP`: 所使用的标签映射文件,默认为 `tools/data/ava/label_map.txt`。 - `DEVICE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`),默认为 `cuda:0`。 - `OUTPUT_FPS`: 输出视频的帧率,默认为 15。 - `OUTPUT_FILENAME`: 输出视频的路径,默认为 `None`。 @@ -392,7 +392,7 @@ python demo/webcam_demo_spatiotemporal_det.py \ --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ --det-score-thr 0.9 \ --action-score-thr 0.5 \ - --label-map demo/label_map_ava.txt \ + --label-map tools/data/ava/label_map.txt \ --predict-stepsize 40 \ --output-fps 20 \ --show @@ -425,7 +425,7 @@ python demo/demo_posec3d.py ${VIDEO_FILE} ${OUT_FILENAME} \ - `HUMAN_DETECTION_CHECKPOINT`: 人体检测模型权重文件路径。 - `HUMAN_DETECTION_SCORE_THRE`: 人体检测分数阈值,默认为 0.9。 - `INPUT_VIDEO`: 网络摄像头编号或本地视频文件路径,默认为 `0`。 -- `LABEL_MAP`: 所使用的标签映射文件,默认为 `demo/label_map_ava.txt`。 +- `LABEL_MAP`: 所使用的标签映射文件,默认为 `tools/data/ava/label_map.txt`。 - `DEVICE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`),默认为 `cuda:0`。 - `OUTPUT_FPS`: 输出视频的帧率,默认为 15。 - `OUTPUT_FILENAME`: 输出视频的路径,默认为 `None`。 @@ -442,7 +442,7 @@ python demo/demo_posec3d.py ${VIDEO_FILE} ${OUT_FILENAME} \ - `HUMAN_DETECTION_SCORE_THRE`: 人体检测分数阈值,默认为 0.9。 - `HUMAN_POSE_ESTIMATION_CONFIG_FILE`: 人体姿态估计模型配置文件路径 (需在 COCO-keypoint 数据集上训练)。 - `HUMAN_POSE_ESTIMATION_CHECKPOINT`: 人体姿态估计模型权重文件路径 (需在 COCO-keypoint 数据集上训练). -- `LABEL_MAP`: 所使用的标签映射文件,默认为 `demo/label_map_ntu120.txt`。 +- `LABEL_MAP`: 所使用的标签映射文件,默认为 `tools/data/skeleton/label_map_ntu120.txt`。 - `DEVICE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`),默认为 `cuda:0`。 - `SHORT_SIDE`: 视频抽帧时使用的短边长度,默认为 480。 @@ -461,5 +461,5 @@ python demo/demo_posec3d.py demo/ntu_sample.avi demo/posec3d_demo.mp4 \ --det-score-thr 0.9 \ --pose-config demo/hrnet_w32_coco_256x192.py \ --pose-checkpoint https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_coco_256x192-c78dce93_20200708.pth \ - --label-map demo/label_map_ntu120.txt + --label-map tools/data/skeleton/label_map_ntu120.txt ``` diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index 70714d5d67..efd306b67b 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -146,7 +146,7 @@ model = init_recognizer(config_file, checkpoint_file, device=device) # 测试单个视频并显示其结果 video = 'demo/demo.mp4' -labels = 'demo/label_map_k400.txt' +labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels) # 显示结果 @@ -175,7 +175,7 @@ model = init_recognizer(config_file, checkpoint_file, device=device, use_frames= # 测试单个视频的帧文件夹并显示其结果 video = 'SOME_DIR_PATH/' -labels = 'demo/label_map_k400.txt' +labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels, use_frames=True) # 显示结果 @@ -204,7 +204,7 @@ model = init_recognizer(config_file, checkpoint_file, device=device) # 测试单个视频的 url 并显示其结果 video = 'https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4' -labels = 'demo/label_map_k400.txt' +labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels) # 根据配置文件和检查点来建立模型 diff --git a/docs_zh_CN/install.md b/docs_zh_CN/install.md index f5883403e5..f922536df0 100644 --- a/docs_zh_CN/install.md +++ b/docs_zh_CN/install.md @@ -240,5 +240,5 @@ device = torch.device(device) model = init_recognizer(config_file, device=device) # 进行演示视频的推理 -inference_recognizer(model, 'demo/demo.mp4', 'demo/label_map_k400.txt') +inference_recognizer(model, 'demo/demo.mp4', 'tools/data/kinetics/label_map_k400.txt') ``` diff --git a/tests/test_runtime/test_inference.py b/tests/test_runtime/test_inference.py index bd100e6be7..15acf740f8 100644 --- a/tests/test_runtime/test_inference.py +++ b/tests/test_runtime/test_inference.py @@ -10,7 +10,7 @@ video_config_file = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py' # noqa: E501 frame_config_file = 'configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py' # noqa: E501 flow_frame_config_file = 'configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py' # noqa: E501 -label_path = 'demo/label_map_k400.txt' +label_path = 'tools/data/kinetics/label_map_k400.txt' video_path = 'demo/demo.mp4' frames_path = 'tests/data/imgs' diff --git a/tools/data/activitynet/label_map.txt b/tools/data/activitynet/label_map.txt new file mode 100644 index 0000000000..6b1bb01db4 --- /dev/null +++ b/tools/data/activitynet/label_map.txt @@ -0,0 +1,200 @@ +Applying sunscreen +Arm wrestling +Assembling bicycle +BMX +Baking cookies +Baton twirling +Beach soccer +Beer pong +Blow-drying hair +Blowing leaves +Playing ten pins +Braiding hair +Building sandcastles +Bullfighting +Calf roping +Camel ride +Canoeing +Capoeira +Carving jack-o-lanterns +Changing car wheel +Cleaning sink +Clipping cat claws +Croquet +Curling +Cutting the grass +Decorating the Christmas tree +Disc dog +Doing a powerbomb +Doing crunches +Drum corps +Elliptical trainer +Doing fencing +Fixing the roof +Fun sliding down +Futsal +Gargling mouthwash +Grooming dog +Hand car wash +Hanging wallpaper +Having an ice cream +Hitting a pinata +Hula hoop +Hurling +Ice fishing +Installing carpet +Kite flying +Kneeling +Knitting +Laying tile +Longboarding +Making a cake +Making a lemonade +Making an omelette +Mooping floor +Painting fence +Painting furniture +Peeling potatoes +Plastering +Playing beach volleyball +Playing blackjack +Playing congas +Playing drums +Playing ice hockey +Playing pool +Playing rubik cube +Powerbocking +Putting in contact lenses +Putting on shoes +Rafting +Raking leaves +Removing ice from car +Riding bumper cars +River tubing +Rock-paper-scissors +Rollerblading +Roof shingle removal +Rope skipping +Running a marathon +Scuba diving +Sharpening knives +Shuffleboard +Skiing +Slacklining +Snow tubing +Snowboarding +Spread mulch +Sumo +Surfing +Swimming +Swinging at the playground +Table soccer +Throwing darts +Trimming branches or hedges +Tug of war +Using the monkey bar +Using the rowing machine +Wakeboarding +Waterskiing +Waxing skis +Welding +Drinking coffee +Zumba +Doing kickboxing +Doing karate +Tango +Putting on makeup +High jump +Playing bagpipes +Cheerleading +Wrapping presents +Cricket +Clean and jerk +Preparing pasta +Bathing dog +Discus throw +Playing field hockey +Grooming horse +Preparing salad +Playing harmonica +Playing saxophone +Chopping wood +Washing face +Using the pommel horse +Javelin throw +Spinning +Ping-pong +Making a sandwich +Brushing hair +Playing guitarra +Doing step aerobics +Drinking beer +Playing polo +Snatch +Paintball +Long jump +Cleaning windows +Brushing teeth +Playing flauta +Tennis serve with ball bouncing +Bungee jumping +Triple jump +Horseback riding +Layup drill in basketball +Vacuuming floor +Cleaning shoes +Doing nails +Shot put +Fixing bicycle +Washing hands +Ironing clothes +Using the balance beam +Shoveling snow +Tumbling +Using parallel bars +Getting a tattoo +Rock climbing +Smoking hookah +Shaving +Getting a piercing +Springboard diving +Playing squash +Playing piano +Dodgeball +Smoking a cigarette +Sailing +Getting a haircut +Playing lacrosse +Cumbia +Tai chi +Painting +Mowing the lawn +Shaving legs +Walking the dog +Hammer throw +Skateboarding +Polishing shoes +Ballet +Hand washing clothes +Plataform diving +Playing violin +Breakdancing +Windsurfing +Hopscotch +Doing motocross +Mixing drinks +Starting a campfire +Belly dance +Removing curlers +Archery +Volleyball +Playing water polo +Playing racquetball +Kayaking +Polishing forniture +Playing kickball +Using uneven bars +Washing dishes +Pole vault +Playing accordion +Playing badminton diff --git a/demo/label_map_ava.txt b/tools/data/ava/label_map.txt similarity index 100% rename from demo/label_map_ava.txt rename to tools/data/ava/label_map.txt diff --git a/tools/data/diving48/label_map.txt b/tools/data/diving48/label_map.txt new file mode 100644 index 0000000000..e2f629dd4f --- /dev/null +++ b/tools/data/diving48/label_map.txt @@ -0,0 +1,48 @@ +Back+15som+05Twis+FREE +Back+15som+15Twis+FREE +Back+15som+25Twis+FREE +Back+15som+NoTwis+PIKE +Back+15som+NoTwis+TUCK +Back+25som+15Twis+PIKE +Back+25som+25Twis+PIKE +Back+25som+NoTwis+PIKE +Back+25som+NoTwis+TUCK +Back+2som+15Twis+FREE +Back+2som+25Twis+FREE +Back+35som+NoTwis+PIKE +Back+35som+NoTwis+TUCK +Back+3som+NoTwis+PIKE +Back+3som+NoTwis+TUCK +Back+Dive+NoTwis+PIKE +Back+Dive+NoTwis+TUCK +Forward+15som+1Twis+FREE +Forward+15som+2Twis+FREE +Forward+15som+NoTwis+PIKE +Forward+1som+NoTwis+PIKE +Forward+25som+1Twis+PIKE +Forward+25som+2Twis+PIKE +Forward+25som+3Twis+PIKE +Forward+25som+NoTwis+PIKE +Forward+25som+NoTwis+TUCK +Forward+35som+NoTwis+PIKE +Forward+35som+NoTwis+TUCK +Forward+45som+NoTwis+TUCK +Forward+Dive+NoTwis+PIKE +Forward+Dive+NoTwis+STR +Inward+15som+NoTwis+PIKE +Inward+15som+NoTwis+TUCK +Inward+25som+NoTwis+PIKE +Inward+25som+NoTwis+TUCK +Inward+35som+NoTwis+TUCK +Inward+Dive+NoTwis+PIKE +Reverse+15som+05Twis+FREE +Reverse+15som+15Twis+FREE +Reverse+15som+25Twis+FREE +Reverse+15som+35Twis+FREE +Reverse+15som+NoTwis+PIKE +Reverse+25som+15Twis+PIKE +Reverse+25som+NoTwis+PIKE +Reverse+25som+NoTwis+TUCK +Reverse+35som+NoTwis+TUCK +Reverse+Dive+NoTwis+PIKE +Reverse+Dive+NoTwis+TUCK diff --git a/tools/data/gym/label_map_gym99.txt b/tools/data/gym/label_map_gym99.txt new file mode 100644 index 0000000000..8bcf084d5f --- /dev/null +++ b/tools/data/gym/label_map_gym99.txt @@ -0,0 +1,99 @@ +Clabel: 0; set: 1; Glabel: 1; (VT) round-off, flic-flac with 0.5 turn on, stretched salto forward with 0.5 turn off +Clabel: 1; set: 1; Glabel: 19; (VT) round-off, flic-flac on, stretched salto backward with 2 turn off +Clabel: 2; set: 1; Glabel: 20; (VT) round-off, flic-flac on, stretched salto backward with 1 turn off +Clabel: 3; set: 1; Glabel: 21; (VT) round-off, flic-flac on, stretched salto backward with 1.5 turn off +Clabel: 4; set: 1; Glabel: 23; (VT) round-off, flic-flac on, stretched salto backward with 2.5 turn off +Clabel: 5; set: 1; Glabel: 24; (VT) round-off, flic-flac on, stretched salto backward off +Clabel: 6; set: 21; Glabel: 67; (FX) switch leap with 0.5 turn +Clabel: 7; set: 21; Glabel: 68; (FX) switch leap with 1 turn +Clabel: 8; set: 21; Glabel: 72; (FX) split leap with 1 turn +Clabel: 9; set: 21; Glabel: 73; (FX) split leap with 1.5 turn or more +Clabel: 10; set: 21; Glabel: 74; (FX) switch leap (leap forward with leg change to cross split) +Clabel: 11; set: 21; Glabel: 77; (FX) split jump with 1 turn +Clabel: 12; set: 21; Glabel: 81; (FX) split jump (leg separation 180 degree parallel to the floor) +Clabel: 13; set: 21; Glabel: 83; (FX) johnson with additional 0.5 turn +Clabel: 14; set: 21; Glabel: 88; (FX) straddle pike or side split jump with 1 turn +Clabel: 15; set: 21; Glabel: 96; (FX) switch leap to ring position +Clabel: 16; set: 21; Glabel: 104; (FX) stag jump +Clabel: 17; set: 22; Glabel: 134; (FX) 2 turn with free leg held upward in 180 split position throughout turn +Clabel: 18; set: 22; Glabel: 137; (FX) 2 turn in tuck stand on one leg, free leg straight throughout turn +Clabel: 19; set: 22; Glabel: 146; (FX) 3 turn on one leg, free leg optional below horizontal +Clabel: 20; set: 22; Glabel: 147; (FX) 2 turn on one leg, free leg optional below horizontal +Clabel: 21; set: 22; Glabel: 148; (FX) 1 turn on one leg, free leg optional below horizontal +Clabel: 22; set: 22; Glabel: 149; (FX) 2 turn or more with heel of free leg forward at horizontal throughout turn +Clabel: 23; set: 22; Glabel: 150; (FX) 1 turn with heel of free leg forward at horizontal throughout turn +Clabel: 24; set: 24; Glabel: 156; (FX) arabian double salto tucked +Clabel: 25; set: 24; Glabel: 163; (FX) salto forward tucked +Clabel: 26; set: 24; Glabel: 169; (FX) aerial walkover forward +Clabel: 27; set: 24; Glabel: 171; (FX) salto forward stretched with 2 twist +Clabel: 28; set: 24; Glabel: 172; (FX) salto forward stretched with 1 twist +Clabel: 29; set: 24; Glabel: 174; (FX) salto forward stretched with 1.5 twist +Clabel: 30; set: 24; Glabel: 177; (FX) salto forward stretched, feet land together +Clabel: 31; set: 25; Glabel: 181; (FX) double salto backward stretched +Clabel: 32; set: 25; Glabel: 182; (FX) salto backward stretched with 3 twist +Clabel: 33; set: 25; Glabel: 183; (FX) salto backward stretched with 2 twist +Clabel: 34; set: 25; Glabel: 187; (FX) salto backward stretched with 2.5 twist +Clabel: 35; set: 25; Glabel: 188; (FX) salto backward stretched with 1.5 twist +Clabel: 36; set: 25; Glabel: 191; (FX) double salto backward tucked with 2 twist +Clabel: 37; set: 25; Glabel: 192; (FX) double salto backward tucked with 1 twist +Clabel: 38; set: 25; Glabel: 195; (FX) double salto backward tucked +Clabel: 39; set: 25; Glabel: 198; (FX) double salto backward piked with 1 twist +Clabel: 40; set: 25; Glabel: 199; (FX) double salto backward piked +Clabel: 41; set: 31; Glabel: 207; (BB) sissone (leg separation 180 degree on the diagonal to the floor, take off two feet, land on one foot) +Clabel: 42; set: 31; Glabel: 208; (BB) split jump with 0.5 turn in side position +Clabel: 43; set: 31; Glabel: 213; (BB) split jump +Clabel: 44; set: 31; Glabel: 219; (BB) straddle pike jump or side split jump +Clabel: 45; set: 31; Glabel: 222; (BB) split ring jump (ring jump with front leg horizontal to the floor) +Clabel: 46; set: 31; Glabel: 223; (BB) switch leap with 0.5 turn +Clabel: 47; set: 31; Glabel: 228; (BB) switch leap (leap forward with leg change) +Clabel: 48; set: 31; Glabel: 230; (BB) split leap forward +Clabel: 49; set: 31; Glabel: 232; (BB) johnson (leap forward with leg change and 0.25 turn to side split or straddle pike position) +Clabel: 50; set: 31; Glabel: 234; (BB) switch leap to ring position +Clabel: 51; set: 31; Glabel: 251; (BB) sheep jump (jump with upper back arch and head release with feet to head height/closed Ring) +Clabel: 52; set: 31; Glabel: 256; (BB) wolf hop or jump (hip angle at 45, knees together) +Clabel: 53; set: 32; Glabel: 268; (BB) 1 turn with heel of free leg forward at horizontal throughout turn +Clabel: 54; set: 32; Glabel: 270; (BB) 2 turn on one leg, free leg optional below horizontal +Clabel: 55; set: 32; Glabel: 272; (BB) 1 turn on one leg, free leg optional below horizontal +Clabel: 56; set: 32; Glabel: 279; (BB) 2 turn in tuck stand on one leg, free leg optional +Clabel: 57; set: 33; Glabel: 289; (BB) salto backward tucked with 1 twist +Clabel: 58; set: 33; Glabel: 290; (BB) salto backward tucked +Clabel: 59; set: 33; Glabel: 295; (BB) salto backward stretched-step out (feet land successively) +Clabel: 60; set: 33; Glabel: 297; (BB) salto backward stretched with legs together +Clabel: 61; set: 33; Glabel: 300; (BB) salto sideward tucked, take off from one leg to side stand +Clabel: 62; set: 33; Glabel: 306; (BB) free aerial cartwheel landing in cross position +Clabel: 63; set: 33; Glabel: 309; (BB) salto forward tucked to cross stand +Clabel: 64; set: 33; Glabel: 312; (BB) free aerial walkover forward, landing on one or both feet +Clabel: 65; set: 34; Glabel: 331; (BB) jump backward, flic-flac take-off with 0.5 twist through handstand to walkover forward, also with support on one arm +Clabel: 66; set: 34; Glabel: 334; (BB) flic-flac to land on both feet +Clabel: 67; set: 34; Glabel: 335; (BB) flic-flac with step-out, also with support on one arm +Clabel: 68; set: 34; Glabel: 336; (BB) round-off +Clabel: 69; set: 35; Glabel: 357; (BB) double salto backward tucked +Clabel: 70; set: 35; Glabel: 359; (BB) salto backward tucked +Clabel: 71; set: 35; Glabel: 363; (BB) double salto backward piked +Clabel: 72; set: 35; Glabel: 367; (BB) salto backward stretched with 2 twist +Clabel: 73; set: 35; Glabel: 370; (BB) salto backward stretched with 2.5 twist +Clabel: 74; set: 41; Glabel: 398; (UB) pike sole circle backward with 1 turn to handstand +Clabel: 75; set: 41; Glabel: 399; (UB) pike sole circle backward with 0.5 turn to handstand +Clabel: 76; set: 41; Glabel: 400; (UB) pike sole circle backward to handstand +Clabel: 77; set: 41; Glabel: 411; (UB) giant circle backward with 1 turn to handstand +Clabel: 78; set: 41; Glabel: 413; (UB) giant circle backward with 0.5 turn to handstand +Clabel: 79; set: 41; Glabel: 416; (UB) giant circle backward +Clabel: 80; set: 41; Glabel: 417; (UB) giant circle forward with 1 turn on one arm before handstand phase +Clabel: 81; set: 41; Glabel: 420; (UB) giant circle forward with 0.5 turn to handstand +Clabel: 82; set: 41; Glabel: 421; (UB) giant circle forward +Clabel: 83; set: 41; Glabel: 425; (UB) clear hip circle backward to handstand +Clabel: 84; set: 41; Glabel: 431; (UB) clear pike circle backward with 1 turn to handstand +Clabel: 85; set: 41; Glabel: 432; (UB) clear pike circle backward with 0.5 turn to handstand +Clabel: 86; set: 41; Glabel: 433; (UB) clear pike circle backward to handstand +Clabel: 87; set: 41; Glabel: 441; (UB) stalder backward with 1 turn to handstand +Clabel: 88; set: 41; Glabel: 443; (UB) stalder backward to handstand +Clabel: 89; set: 42; Glabel: 453; (UB) counter straddle over high bar to hang +Clabel: 90; set: 42; Glabel: 456; (UB) counter piked over high bar to hang +Clabel: 91; set: 42; Glabel: 462; (UB) (swing backward or front support) salto forward straddled to hang on high bar +Clabel: 92; set: 42; Glabel: 465; (UB) (swing backward) salto forward piked to hang on high bar +Clabel: 93; set: 42; Glabel: 466; (UB) (swing forward or hip circle backward) salto backward with 0.5 turn piked to hang on high bar +Clabel: 94; set: 43; Glabel: 471; (UB) transition flight from high bar to low bar +Clabel: 95; set: 43; Glabel: 472; (UB) transition flight from low bar to high bar +Clabel: 96; set: 44; Glabel: 481; (UB) (swing forward) double salto backward tucked with 1 turn +Clabel: 97; set: 44; Glabel: 484; (UB) (swing backward) double salto forward tucked +Clabel: 98; set: 44; Glabel: 516; (UB) (swing forward) double salto backward stretched diff --git a/tools/data/hmdb51/label_map.txt b/tools/data/hmdb51/label_map.txt new file mode 100644 index 0000000000..3217416f52 --- /dev/null +++ b/tools/data/hmdb51/label_map.txt @@ -0,0 +1,51 @@ +brush_hair +cartwheel +catch +chew +clap +climb +climb_stairs +dive +draw_sword +dribble +drink +eat +fall_floor +fencing +flic_flac +golf +handstand +hit +hug +jump +kick +kick_ball +kiss +laugh +pick +pour +pullup +punch +push +pushup +ride_bike +ride_horse +run +shake_hands +shoot_ball +shoot_bow +shoot_gun +sit +situp +smile +smoke +somersault +stand +swing_baseball +sword +sword_exercise +talk +throw +turn +walk +wave diff --git a/tools/data/hvu/label_map.json b/tools/data/hvu/label_map.json new file mode 100644 index 0000000000..a591a291db --- /dev/null +++ b/tools/data/hvu/label_map.json @@ -0,0 +1 @@ +{"action": ["abseiling", "acrobatics", "acting_in_play", "adjusting_glasses", "air_drumming", "alligator_wrestling", "alpine_skiing", "american_football", "angling", "answering_questions", "applauding", "applying_cream", "archaeological_excavation", "archery", "arguing", "arm_wrestling", "arranging_flowers", "assembling_bicycle", "assembling_computer", "attending_conference", "auctioning", "auto_racing", "backflip_human_", "baking_cookies", "ball_game", "bandaging", "barbequing", "bartending", "base_jumping", "baseball", "basketball_moves", "bathing", "bathing_dog", "baton_twirling", "battle_rope_training", "beach_soccer", "beatboxing", "bee_keeping", "belly_dancing", "bench_pressing", "bending_back", "bending_metal", "biking_through_snow", "blasting_sand", "blowdrying_hair", "blowing_bubble_gum", "blowing_glass", "blowing_leaves", "blowing_nose", "blowing_out_candles", "bmx", "boating", "bobsledding", "bodybuilding", "bodysurfing", "bookbinding", "bottling", "bouldering", "bouncing_on_bouncy_castle", "bouncing_on_trampoline", "bowling", "boxing", "braiding_hair", "breading_or_breadcrumbing", "breakdancing", "breaking_boards", "breathing_fire", "brush_painting", "brushing_hair", "brushing_teeth", "building_cabinet", "building_lego", "building_sandcastle", "building_shed", "bull_fighting", "bulldozing", "bungee_jumping", "burping", "busking", "calculating", "calf_roping", "calligraphy", "canoeing_or_kayaking", "capoeira", "capsizing", "card_game", "card_stacking", "card_throwing", "carrying_baby", "cartwheeling", "carving_ice", "carving_pumpkin", "casting_fishing_line", "catching_fish", "catching_or_throwing_baseball", "catching_or_throwing_frisbee", "catching_or_throwing_softball", "caving", "celebrating", "changing_gear_in_car", "changing_oil", "changing_wheel_not_on_bike_", "checking_tires", "cheering", "cheerleading", "chewing_gum", "chiseling_stone", "chiseling_wood", "chopping_meat", "chopping_vegetables", "chopping_wood", "choreography", "clam_digging", "clapping", "clay_pottery_making", "clean_and_jerk", "cleaning_gutters", "cleaning_pool", "cleaning_shoes", "cleaning_toilet", "cleaning_windows", "climbing", "climbing_a_rope", "climbing_ladder", "climbing_tree", "clipping_cat_claws", "coloring_in", "combing_hair", "contact_juggling", "contorting", "control", "cooking", "cooking_egg", "cooking_on_campfire", "cooking_sausages_not_on_barbeque_", "cooking_scallops", "cosplaying", "counting_money", "country_line_dancing", "cracking_back", "cracking_knuckles", "cracking_neck", "craft", "crawling_baby", "crochet", "croquet", "cross", "cross_country_cycling", "crossing_eyes", "crossing_river", "crying", "cumbia", "curling_hair", "curling_sport_", "cutting_apple", "cutting_nails", "cutting_orange", "cutting_pineapple", "cutting_the_grass", "cutting_watermelon", "cycling", "dance", "dancing_ballet", "dancing_charleston", "dancing_gangnam_style", "dancing_macarena", "deadlifting", "decorating_the_christmas_tree", "delivering_mail", "dining", "directing_traffic", "disc_dog", "disc_golfing", "diving", "diving_cliff", "docking_boat", "dodgeball", "doing_a_powerbomb", "doing_aerobics", "doing_jigsaw_puzzle", "doing_karate", "doing_kickboxing", "doing_laundry", "doing_motocross", "doing_nails", "downhill_mountain_biking", "drawing", "dribbling_basketball", "drinking", "drinking_shots", "driving_car", "driving_tractor", "drooling", "drop_kicking", "drum_corps", "drumming_fingers", "dumpster_diving", "dunking_basketball", "dyeing_eyebrows", "dyeing_hair", "eating", "eating_burger", "eating_cake", "eating_carrots", "eating_chips", "eating_doughnuts", "eating_hotdog", "eating_ice_cream", "eating_spaghetti", "eating_watermelon", "egg_hunting", "embroidering", "equitation", "exercising_with_an_exercise_ball", "extinguishing_fire", "faceplanting", "falling_off_bike", "falling_off_chair", "feeding_birds", "feeding_fish", "feeding_goats", "fencing_sport_", "fidgeting", "fight", "figure_skating", "finger_snapping", "fishing", "fixing_bicycle", "fixing_hair", "fixing_the_roof", "flint_knapping", "flipping_pancake", "fly_casting", "fly_fishing", "fly_tying", "flying_kite", "folding_clothes", "folding_napkins", "folding_paper", "folk_dance", "front_raises", "frying", "frying_vegetables", "futsal", "gambling", "geocaching", "getting_a_haircut", "getting_a_piercing", "getting_a_tattoo", "giving_or_receiving_award", "gliding", "gold_panning", "golf", "golf_chipping", "golf_driving", "golf_putting", "gospel_singing_in_church", "grappling", "grilling", "grinding_meat", "grooming_dog", "grooming_horse", "gymnastics", "gymnastics_tumbling", "hammer_throw", "hand_car_wash", "hand_washing_clothes", "harvest", "head_stand", "headbanging", "headbutting", "high_jump", "high_kick", "historical_reenactment", "hitting_a_pinata", "hitting_baseball", "hockey_stop", "holding_snake", "home_roasting_coffee", "hopscotch", "hoverboarding", "huddling", "hugging_baby", "hugging_not_baby_", "hula_hooping", "hunt_seat", "hurdling", "hurling_sport_", "ice_climbing", "ice_fishing", "ice_skating", "ice_swimming", "inflating_balloons", "inline_skating", "installing_carpet", "ironing", "ironing_hair", "javelin_throw", "jaywalking", "jetskiing", "jogging", "juggling_balls", "juggling_fire", "juggling_soccer_ball", "jumping", "jumping_bicycle", "jumping_into_pool", "jumping_jacks", "jumpstyle_dancing", "karaoke", "kicking_field_goal", "kicking_soccer_ball", "kissing", "kitesurfing", "knitting", "krumping", "land_sailing", "laughing", "lawn_mower_racing", "laying_bricks", "laying_concrete", "laying_stone", "laying_tiles", "layup_drill_in_basketball", "learning", "leatherworking", "licking", "lifting_hat", "lighting_fire", "lock_picking", "logging", "long_jump", "longboarding", "looking_at_phone", "luge", "lunge", "making_a_cake", "making_a_lemonade", "making_a_sandwich", "making_an_omelette", "making_balloon_shapes", "making_bubbles", "making_cheese", "making_horseshoes", "making_jewelry", "making_paper_aeroplanes", "making_pizza", "making_snowman", "making_sushi", "making_tea", "making_the_bed", "marching", "marching_percussion", "marriage_proposal", "massaging_back", "massaging_feet", "massaging_legs", "massaging_neck", "massaging_person_s_head", "milking_cow", "modern_dance", "moon_walking", "mopping_floor", "mosh_pit_dancing", "motorcycling", "mountain_biking", "mountain_climber_exercise_", "moving_furniture", "mowing_lawn", "mushroom_foraging", "needle_felting", "needlework", "news_anchoring", "opening_bottle_not_wine_", "opening_door", "opening_present", "opening_refrigerator", "opening_wine_bottle", "origami", "outdoor_recreation", "packing", "painting_fence", "painting_furniture", "pan_frying", "parachuting", "paragliding", "parasailing", "parkour", "passing_american_football_in_game_", "passing_american_football_not_in_game_", "passing_soccer_ball", "peeling_apples", "peeling_potatoes", "percussion", "person_collecting_garbage", "petting_animal_not_cat_", "petting_cat", "photobombing", "photocopying", "photograph", "physical_exercise", "picking_fruit", "pillow_fight", "pinching", "pirouetting", "pitch", "planing_wood", "planting_trees", "plastering", "plataform_diving", "playing_accordion", "playing_badminton", "playing_bagpipes", "playing_basketball", "playing_bass_guitar", "playing_beer_pong", "playing_blackjack", "playing_cello", "playing_chess", "playing_clarinet", "playing_congas", "playing_controller", "playing_cricket", "playing_cymbals", "playing_darts", "playing_didgeridoo", "playing_dominoes", "playing_drums", "playing_field_hockey", "playing_flute", "playing_gong", "playing_guitar", "playing_hand_clapping_games", "playing_harmonica", "playing_harp", "playing_ice_hockey", "playing_keyboard", "playing_kickball", "playing_lacrosse", "playing_laser_tag", "playing_lute", "playing_maracas", "playing_marbles", "playing_monopoly", "playing_netball", "playing_ocarina", "playing_organ", "playing_paintball", "playing_pan_pipes", "playing_piano", "playing_pinball", "playing_ping_pong", "playing_poker", "playing_polo", "playing_recorder", "playing_rubiks_cube", "playing_saxophone", "playing_scrabble", "playing_squash_or_racquetball", "playing_ten_pins", "playing_tennis", "playing_trombone", "playing_trumpet", "playing_ukulele", "playing_violin", "playing_volleyball", "playing_water_polo", "playing_with_trains", "playing_xylophone", "poking_bellybutton", "pole_vault", "polishing_forniture", "polishing_metal", "popping_balloons", "pouring_beer", "powerbocking", "preparing_pasta", "preparing_salad", "presenting_weather_forecast", "print", "public_speaking", "pull_ups", "pumping_fist", "pumping_gas", "punch", "punching_bag", "punching_person_boxing_", "purl", "push_up", "pushing_car", "pushing_cart", "pushing_wheelbarrow", "pushing_wheelchair", "putting_in_contact_lenses", "putting_on_eyeliner", "putting_on_foundation", "putting_on_lipstick", "putting_on_mascara", "putting_on_sari", "putting_on_shoes", "rafting", "raising_eyebrows", "raking_leaves", "reading", "reading_book", "reading_newspaper", "recording_music", "recreation", "recreational_fishing", "removing_curlers", "repairing_puncture", "riding_a_bike", "riding_bumper_cars", "riding_camel", "riding_elephant", "riding_mechanical_bull", "riding_mower", "riding_mule", "riding_or_walking_with_horse", "riding_scooter", "riding_snow_blower", "riding_unicycle", "ripping_paper", "river_tubing", "roasting", "roasting_marshmallows", "roasting_pig", "robot_dancing", "rock_climbing", "rock_scissors_paper", "rodeo", "roller_skating", "rollerblading", "rolling_pastry", "roof_shingle_removal", "rope_pushdown", "running", "running_on_treadmill", "sailing", "salsa_dancing", "sanding_floor", "sausage_making", "sawing_wood", "scrambling_eggs", "scrapbooking", "scrubbing_face", "scuba_diving", "separating_eggs", "setting_table", "sewing", "shaking_hands", "shaking_head", "shaping_bread_dough", "sharpening_knives", "sharpening_pencil", "shaving_head", "shaving_legs", "shearing_sheep", "shining_flashlight", "shining_shoes", "shooting", "shooting_basketball", "shooting_goal_soccer_", "shopping", "shot_put", "shoveling_snow", "shucking_oysters", "shuffling_cards", "shuffling_feet", "side_kick", "sign_language_interpreting", "singing", "sipping_cup", "sitting", "situp", "skateboarding", "ski_jumping", "skiing", "skiing_crosscountry", "skiing_mono", "skiing_slalom", "skipping_rope", "skipping_stone", "skydiving", "slacklining", "slapping", "sled_dog_racing", "sledding", "sleeping", "smashing", "smelling_feet", "smile", "smoking", "smoking_hookah", "smoking_pipe", "snatch_weight_lifting", "sneezing", "snorkeling", "snow_tubing", "snowboarding", "snowkiting", "snowmobiling", "soccer", "softball", "somersaulting", "sparring", "spelunking", "spinning_poi", "sports_training", "spray_painting", "spread_mulch", "springboard_diving", "sprint", "square_dancing", "squat", "standing", "standing_on_hands", "staring", "steer_roping", "sticking_tongue_out", "stitch", "stomping_grapes", "stone_carving", "strength_training", "stretching_arm", "stretching_leg", "sucking_lolly", "surf_fishing", "surfing_crowd", "surfing_water", "sweeping_floor", "swimming", "swimming_backstroke", "swimming_breast_stroke", "swimming_butterfly_stroke", "swimming_front_crawl", "swing_dancing", "swinging_baseball_bat", "swinging_on_something", "sword_fighting", "sword_swallowing", "table_soccer", "tackling", "tagging_graffiti", "tai_chi", "talking_on_cell_phone", "tango_dancing", "tap_dancing", "tapping_guitar", "tapping_pen", "tasting_beer", "tasting_food", "tasting_wine", "testifying", "texting", "threading_needle", "throwing_axe", "throwing_ball_not_baseball_or_american_football_", "throwing_discus", "throwing_knife", "throwing_snowballs", "throwing_tantrum", "throwing_water_balloon", "tickling", "tie_dying", "tightrope_walking", "tiptoeing", "tobogganing", "tossing_coin", "track_and_field", "trail_riding", "training_dog", "trapezing", "trimming_or_shaving_beard", "trimming_shrubs", "trimming_trees", "triple_jump", "twiddling_fingers", "tying_bow_tie", "tying_knot_not_on_a_tie_", "tying_necktie", "tying_shoe_laces", "unboxing", "underwater_diving", "unloading_truck", "using_a_microscope", "using_a_paint_roller", "using_a_power_drill", "using_a_sledge_hammer", "using_a_wrench", "using_atm", "using_bagging_machine", "using_circular_saw", "using_inhaler", "using_puppets", "using_remote_controller_not_gaming_", "using_segway", "using_the_monkey_bar", "using_the_pommel_horse", "vacuuming_floor", "visiting_the_zoo", "wading_through_mud", "wading_through_water", "waiting_in_line", "waking_up", "walking", "walking_the_dog", "walking_through_snow", "washing_dishes", "washing_feet", "washing_hair", "washing_hands", "waste", "watching_tv", "water_skiing", "water_sliding", "watering_plants", "waving_hand", "waxing_back", "waxing_chest", "waxing_eyebrows", "waxing_legs", "weaving", "weaving_basket", "weaving_fabric", "welding", "whistling", "wicker_weaving", "windsurfing", "winking", "wood_burning_art_", "worship", "wrapping_present", "wrestling", "writing", "yarn_spinning", "yawning", "yoga", "zumba"], "attribute": ["afro", "aggression", "al_dente", "angora", "art_paper", "asphalt", "azure", "bangs", "barechestedness", "beauty", "beige", "black", "black_and_white", "black_hair", "blond", "blue", "bmw", "boiling", "brass", "bricks_and_mortar", "brown", "brown_hair", "caffeine", "calm", "camouflage", "caramel_color", "cardboard", "ceramic", "citric_acid", "classic", "clay", "cleft", "cobalt_blue", "coca_cola", "complexion", "concrete", "cool", "dairy", "darkness", "daytime", "deciduous", "denim", "drama", "elder", "electric_blue", "emerald", "evergreen", "explosive_material", "floating", "fluid", "flyweight", "forward", "freezing", "fun", "glitter", "gold", "granite", "green", "happy", "human_hair_color", "hunky", "inflatable", "iron", "laminate", "layered_hair", "leather", "leisure", "lilac", "long_hair", "magenta", "maroon", "metal", "metropolis", "military", "moist", "monochrome", "multimedia", "neon", "orange", "origami_paper", "paper", "patchwork", "peach", "pigtail", "pink", "plane", "plastic", "platinum_blond", "plush", "plywood", "polka_dot", "pompadour", "purple", "rapid", "red", "red_hair", "reflection", "satin", "shade", "silk", "silver", "sweetness", "symmetry", "synthetic_rubber", "teal", "transparency_and_translucency", "turquoise", "velvet", "violet", "white", "wood", "wool", "woolen", "woven_fabric", "wrinkle", "yellow", "youth"], "concept": ["aerial_photography", "agriculture", "air_force", "air_sports", "american_food", "ancient_history", "angle", "animal_migration", "animal_source_foods", "animal_sports", "arch", "architecture", "army", "art", "artistic_gymnastics", "asian_food", "athletics", "audience", "automotive_design", "automotive_exterior", "aviation", "baked_goods", "ball_over_a_net_games", "bat_and_ball_games", "benthos", "blessing", "boardsport", "brand", "business", "cable_management", "cellular_network", "choir", "circle", "circus", "class", "classic_car", "classical_music", "clergy", "clip_art", "close_up", "collaboration", "color_guard", "combat_sport", "comfort", "comfort_food", "commodity", "community", "computer_program", "concert_band", "confectionery", "construction", "contact_sport", "convenience_food", "costume_design", "court", "court_game", "crew", "crowd", "cube", "cuisine", "currency", "cycle_sport", "cylinder", "decor", "design", "dialog_box", "diet_food", "display_advertising", "dog_breed", "dog_sports", "doubles", "dressage", "east_asian_food", "ecosystem", "electrical_network", "electricity", "electronics", "emergency", "emergency_service", "emotion", "endurance_sports", "energy", "engineering", "ensemble", "entertainment", "equestrian_sport", "erg", "european_food", "extreme_sport", "facial_expression", "family", "fashion_design", "fast_food", "fauna", "fictional_character", "field_game", "film", "finger_food", "fixed_link", "floral_design", "floristry", "font", "fried_food", "friendship", "frozen_food", "games", "geological_phenomenon", "geology", "german_food", "golf_club", "graffito", "graphic_design", "graphics", "grilled_food", "hairstyle", "handwriting", "health_care", "heart", "heat", "herd", "history", "human_behavior", "individual_sports", "indoor_games_and_sports", "industry", "infrastructure", "interaction", "interior_design", "inventory", "italian_food", "japanese_cuisine", "japanese_martial_arts", "job", "junk_food", "kite_sports", "land_vehicle", "laser", "laughter", "law_enforcement", "light_commercial_vehicle", "lighting", "line", "line_art", "local_food", "lockstitch", "logo", "love", "luxury_vehicle", "luxury_yacht", "major_appliance", "male", "management", "map", "marching_band", "marine_mammal", "martial_arts", "mass_production", "match_play", "meal", "medal_play", "medical", "medicine", "memorial", "mesh", "meteorological_phenomenon", "mid_size_car", "military_officer", "military_organization", "military_rank", "mineral", "mixture", "mode_of_transport", "modern_art", "money", "monochrome_photography", "motorsport", "music", "musical_ensemble", "natural_foods", "nature", "news", "non_sporting_group", "number", "off_road", "official", "orchestra", "organism", "pachyderm", "packaging_and_labeling", "painting", "party_supply", "pattern", "people", "performance", "performing_arts", "physical_fitness", "pint_us", "plaid", "plant_community", "plaster", "police", "pollinator", "pollution", "pop_music", "primate", "public_transport", "public_utility", "pyramid", "racquet_sport", "rapid_transit", "real_estate", "recipe", "rectangle", "religion", "research", "rock", "roller_sport", "romance", "rose_order", "seafood", "security", "selfie", "service", "shadow", "shelving", "shoal", "shooting_sport", "side_dish", "silhouette", "singles", "skin_care", "social_group", "software", "song", "spanish_cuisine", "sphere", "spiral", "spoor", "sport", "spotlight", "spring_break", "square", "star", "stick_and_ball_games", "stick_and_ball_sports", "still_life", "still_life_photography", "stock_photography", "street_art", "street_food", "striking_combat_sports", "stucco", "superfood", "surface_water_sports", "symbol", "tartan", "taste", "team", "team_sport", "technology", "telephony", "television_program", "tool", "tourism", "towed_water_sport", "tradition", "traditional_sport", "traffic", "tread", "triangle", "tribe", "troop", "underwater", "vegetarian_food", "vegetation", "video_game_software", "visual_arts", "war", "waste_containment", "water_ball_sports", "water_sport", "water_transportation", "watercraft", "weapon", "weapon_combat_sports", "website", "whole_food", "wildlife", "wind", "windsports", "winter_sport"], "event": ["800_metres", "adventure", "air_travel", "art_exhibition", "auto_show", "autumn", "award_ceremony", "banquet", "bedtime", "breakfast", "broad_jump", "brunch", "carnival", "ceremony", "championship", "christmas", "competition", "concert", "conference", "convention", "conversation", "decathlon", "demonstration", "dinner", "disaster", "evening", "exhibition", "festival", "flight", "freight_transport", "general_aviation", "graduation", "halloween", "heptathlon", "holiday", "lecture", "lunch", "manicure", "marathon", "massage", "meeting", "morning", "multi_sport_event", "news_conference", "night", "parade", "party", "photo_shoot", "picnic", "presentation", "protest", "public_event", "race", "ritual", "road_trip", "rock_concert", "safari", "seminar", "ski_cross", "speech", "spring", "summer", "sunrise_and_sunset", "supper", "tournament", "vacation", "wedding", "wedding_reception", "winter"], "object": ["abdomen", "academic_dress", "accordion", "accordionist", "acoustic_electric_guitar", "acoustic_guitar", "acrylic_paint", "action_figure", "active_undergarment", "adding_machine", "aegean_cat", "aerialist", "african_elephant", "agaric", "agaricaceae", "agaricomycetes", "agaricus", "agricultural_machinery", "agriculturist", "aioli", "air_bubble", "air_gun", "aircraft", "airliner", "alaskan_malamute", "album_cover", "alcoholic_beverage", "ale", "algae", "all_terrain_vehicle", "all_xbox_accessory", "alligator", "alloy_wheel", "alpinist", "alto_horn", "american_alligator", "american_pit_bull_terrier", "amusement_ride", "ananas", "anchor", "angle_grinder", "animal_fat", "ankle", "annual_plant", "antique", "antique_car", "appetizer", "apple", "aqua", "aqualung", "aquanaut", "aquarium", "aquatic_plant", "aquifoliaceae", "arabian_camel", "arcade_game", "archer", "arecales", "arm", "artifact", "artificial_fly", "artificial_turf", "artisan", "artwork", "athlete", "athletic_shoe", "audio_engineer", "audio_equipment", "auto_part", "automaton", "automotive_engine_part", "automotive_exhaust", "automotive_lighting", "automotive_mirror", "automotive_tire", "automotive_wheel_system", "automotive_window_part", "ax", "ax_handle", "baby_buggy", "baby_carrier", "baby_products", "baby_toys", "back", "backboard", "backhoe", "backseat", "bag", "bagel", "baggage", "bagpipes", "bait", "baker", "balance_beam", "balcony", "ball", "ballet_dancer", "ballet_skirt", "balloon", "baluster", "bandage", "banderillero", "bandoneon", "banjo", "banner", "barbell", "barber", "baritone_saxophone", "barramundi", "barrel", "barrow", "bartender", "barware", "baseball_bat", "baseball_cap", "baseball_equipment", "baseball_player", "basket", "basketball_player", "bass", "bass_drum", "bass_fiddle", "bass_guitar", "bass_oboe", "bassinet", "bassist", "bassoon", "bathing_cap", "bathroom_accessory", "bathroom_sink", "bathtub", "batter", "bayonne_ham", "bead", "beak", "beam", "bean", "beanie", "beard", "bed", "bed_frame", "bed_sheet", "bedding", "bedrock", "bee", "beef", "beef_tenderloin", "beehive", "beekeeper", "beer", "beer_cocktail", "beer_glass", "belay_device", "bell_peppers_and_chili_peppers", "bench", "berry", "beyaz_peynir", "bib", "bichon", "bicycle", "bicycle_accessory", "bicycle_chain", "bicycle_drivetrain_part", "bicycle_frame", "bicycle_handlebar", "bicycle_helmet", "bicycle_part", "bicycle_saddle", "bicycle_tire", "bicycle_wheel", "bidet", "big_cats", "bikini", "billboard", "bin", "birch", "bird", "birthday_cake", "biscuit", "black_belt", "black_cat", "blackboard", "blacksmith", "blade", "blazer", "blender", "block", "blood", "blossom", "blouse", "blue_collar_worker", "bmx_bike", "boa_constrictor", "board_game", "boas", "boat", "boats_and_boating_equipment_and_supplies", "bobsled", "bocce_ball", "bodybuilder", "bolete", "bonfire", "bongo", "bony_fish", "book", "bookcase", "boot", "bottle", "bottled_water", "boulder", "bouquet", "bow_and_arrow", "bow_tie", "bowed_string_instrument", "bowie_knife", "bowl", "bowler", "bowling_ball", "bowling_equipment", "bowling_pin", "box", "boxing_equipment", "boxing_glove", "boy", "bracelet", "brake_disk", "branch", "brass_instrument", "brassiere", "bratwurst", "bread", "bread_dough", "brick", "bricklayer", "brickwork", "bridal_clothing", "bride", "bridle", "briefs", "broccoli", "brochette", "bromeliaceae", "broom", "broth", "brush", "bubble", "bubble_gum", "bucket", "bugle", "bull", "bulldozer", "bullfighter", "bumper", "bumper_car", "bun", "bungee", "buoyancy_compensator", "bus", "businessperson", "butcher", "buttercream", "button", "button_accordion", "cab", "cabin_cruiser", "cabinet", "cabinetry", "cable", "caesar_salad", "cage", "cake", "calf", "camel", "camera", "camera_accessory", "camera_lens", "camera_operator", "camgirl", "campfire", "candle", "cannon", "canoe", "cap", "car", "car_mirror", "car_seat", "car_seat_cover", "car_tire", "car_wheel", "carbonara", "carbonated_soft_drinks", "cardboard_box", "caricaturist", "carnivoran", "carpenter", "carpet", "carriage", "carrot", "cart", "carton", "cartoon", "carving", "cash", "cash_machine", "cat", "catamaran", "cattle_like_mammal", "ceiling", "celesta", "cellist", "cello", "cellular_telephone", "center_console", "central_processing_unit", "centrepiece", "chain", "chain_link_fencing", "chain_saw", "chair", "chalk", "champagne", "champagne_stemware", "charcoal", "charcuterie", "chariot", "chassis", "cheek", "cheerleader", "cheerleading_uniform", "cheese", "cheese_pizza", "cheeseburger", "chef", "cherry", "chess_master", "chessboard", "chessman", "chest", "chest_hair", "chest_of_drawers", "chicken", "chihuahua", "child", "chin", "chip", "chocolate", "chocolate_brownie", "chocolate_cake", "chocolate_chip_cookie", "chocolate_spread", "choreographer", "christmas_decoration", "christmas_lights", "christmas_tree", "chute", "circuit", "circuit_component", "circular_saw", "circus_acrobat", "citrullus", "citrus", "city_car", "clam", "clams_oysters_mussels_and_scallops", "clarinet", "clarinet_family", "clavier", "clementine", "climber", "climbing_frame", "climbing_harness", "closet", "clothes_closet", "clothes_dryer", "clothes_hamper", "clothing", "cloud", "clown", "coat", "cobblestone", "cockapoo", "cocktail", "cocktail_dress", "cocktail_garnish", "coconut", "cod", "coffee", "coffee_bean", "coffee_cup", "coffee_table", "coin", "cola", "colander", "cold_weapon", "collage", "collar", "collection", "collie", "color_television", "colt", "colubridae", "column", "comb", "comforter", "commercial_vehicle", "common_pet_parakeet", "communication_device", "commuter", "compact_car", "compact_van", "companion_dog", "composite_material", "compound_microscope", "computer", "computer_accessory", "computer_case", "computer_component", "computer_cooling", "computer_hardware", "computer_keyboard", "concert_grand", "concertina", "condiment", "conifer", "construction_equipment", "construction_worker", "convertible", "cookie", "cookie_sheet", "cookies_and_crackers", "cookware_accessory", "cookware_and_bakeware", "cor_anglais", "coral", "coral_reef_fish", "cornet", "cosmetics", "costume", "couch", "countertop", "coverall", "cow_goat_family", "cowbarn", "cowboy", "cowboy_hat", "craftsman", "crampon", "crane", "cravat", "cream", "cream_cheese", "cricket_bat", "cricketer", "crochet_needle", "crocodile", "crocodilia", "crop", "croquet_mallet", "crossword_puzzle", "cruciferous_vegetables", "crystal", "cuatro", "cucumber", "cucumber_gourd_and_melon_family", "cucumis", "cucurbita", "cumulus", "cup", "cupboard", "curbstone", "curd", "curtain", "customer", "cut_flowers", "cutlery", "cymbal", "dairy_cattle", "dairy_cow", "dairy_product", "dance_dress", "dancer", "dashboard", "data_storage_device", "date_palm", "defenseman", "desk", "desktop_computer", "dessert", "dhow", "diaper", "diatonic_button_accordion", "digital_clock", "dining_table", "dinnerware_set", "dip", "discinaceae", "dish", "dishware", "dishwasher", "disk_jockey", "display_case", "display_device", "display_window", "distilled_beverage", "divemaster", "diver", "diving_equipment", "diving_mask", "dobok", "document", "dog", "dog_sled", "doll", "dolphin", "dome", "domestic_rabbit", "donkey", "door", "door_handle", "double_bass", "dough", "drawer", "dress", "dress_shirt", "drill", "drink", "drinker", "drinking_water", "drinkware", "drop", "drum", "drumhead", "drummer", "drumstick", "dry_suit", "dryer", "duck", "ducks_geese_and_swans", "dumbbell", "dump_truck", "duplicator", "dustpan", "ear", "earl_grey_tea", "earrings", "eating_apple", "edger", "edible_mushroom", "egg", "egg_yolk", "electric_guitar", "electric_organ", "electric_piano", "electrical_supply", "electrical_wiring", "electronic_component", "electronic_device", "electronic_keyboard", "electronic_musical_instrument", "electronic_signage", "electronics_accessory", "elephant", "elliptical_trainer", "emblem", "emergency_vehicle", "engine", "engineer", "envelope", "epee", "equestrian", "espresso", "euphonium", "executive_car", "exercise_bike", "exercise_equipment", "exercise_machine", "exhaust_system", "eye", "eye_shadow", "eyebrow", "eyelash", "eyewear", "facade", "face", "facial_hair", "family_car", "fan", "farm_machine", "farmer", "farmworker", "fashion_accessory", "fashion_model", "faucet", "feather", "feather_boa", "feature_phone", "fedora", "fence", "fencing_sword", "fencing_weapon", "fern", "ferry", "fiddle", "field_hockey_ball", "figure_skater", "figurine", "fin", "finger", "finger_paint", "fipple_flute", "fir", "fire", "firearm", "firefighter", "fireplace", "fish", "fish_feeder", "fisherman", "fishing_bait", "fishing_lure", "fishing_rod", "fishing_vessel", "fitness_professional", "flag", "flag_of_the_united_states", "flagstone", "flashlight", "flat_panel_display", "flatbread", "flautist", "flightless_bird", "flooring", "florist", "flour", "flourless_chocolate_cake", "flower", "flower_bouquet", "flowering_plant", "flowerpot", "flush_toilet", "flute", "flutist", "fly", "foal", "foil", "folk_dancer", "folk_instrument", "fondant", "food", "food_processor", "foot", "football_equipment_and_supplies", "football_helmet", "football_player", "footwear", "forehead", "fork", "forklift_truck", "formal_wear", "fortepiano", "foundation", "fountain", "fountain_pen", "free_reed_aerophone", "french_fries", "fret", "fried_egg", "fried_rice", "frost", "frozen_dessert", "fruit", "fruit_tree", "frying_pan", "fuel", "full_size_car", "fungus", "fur", "fur_clothing", "furniture", "gadget", "galliformes", "game_controller", "garbage_heap", "garbage_man", "garbage_truck", "garden_roses", "gardener", "garmon", "garnish", "gas_burner", "gas_pump", "gas_ring", "gate", "gauge", "gazebo", "gear", "gearshift", "gemstone", "german_shepherd_dog", "german_spitz", "gift", "gin_and_tonic", "giraffe", "girl", "glass", "glassblower", "glasses", "glider", "glockenspiel", "glove", "glutinous_rice", "go_kart", "goal", "goat", "goat_antelope", "goggles", "golden_retriever", "goldfish", "golf_ball", "golf_equipment", "golfcart", "golfer", "gourd", "gown", "graffiti", "grand_piano", "grape", "grapevine_family", "grass", "gravel", "great_dane", "greek_salad", "green_algae", "green_bean", "greenland_dog", "grenadier", "greyhound", "griddle", "grocer", "groom", "groundcover", "guard_dog", "guard_rail", "guitar", "guitar_accessory", "guitarist", "gymnast", "hair", "hair_accessory", "hair_coloring", "hair_dryer", "hairbrush", "hairdresser", "halter", "hamburger", "hammer", "hand", "hand_calculator", "hand_drum", "hand_glass", "handbag", "handcart", "handlebar", "handrail", "hang_glider", "hard_hat", "hardware", "hardware_accessory", "harmonica", "harp", "harvester", "hat", "hatchback", "hatchet", "havanese", "hay", "head", "head_restraint", "headgear", "headphones", "headpiece", "hearth", "heat_sink", "hedge", "heel", "helmet", "herb", "high_heeled_footwear", "highchair", "hip", "hockey_protective_equipment", "hockey_stick", "home_accessories", "home_appliance", "home_door", "home_fencing", "home_game_console_accessory", "honey_bee", "honeycomb", "hood", "hoodie", "horizontal_bar", "horn", "hors_d_oeuvre", "horse", "horse_and_buggy", "horse_harness", "horse_like_mammal", "horse_supplies", "horse_tack", "horse_trainer", "horseman", "hospital_bed", "hot_air_balloon", "hot_pot", "hot_tub", "household_cleaning_supply", "houseplant", "hub_gear", "hubcap", "human", "human_body", "human_leg", "hunting_dog", "hurdle", "hybrid_bicycle", "ice", "ice_cream", "ice_cream_cone", "ice_lolly", "ice_skate", "iceberg", "icing", "illustration", "indian_elephant", "infant", "infant_bed", "infantry", "inflatable_boat", "ingredient", "input_device", "insect", "invertebrate", "io_card", "iris", "ivy", "jack_o_lantern", "jacket", "jasmine_rice", "javelin", "jaw", "jeans", "jersey", "jewellery", "jigsaw_puzzle", "jockey", "joint", "jointer", "journalist", "joystick", "juggler", "juice", "jungle_gym", "kayak", "kettle", "keyboard_instrument", "keyboard_player", "kielbasa", "kilt", "kisser", "kitchen_appliance", "kitchen_knife", "kite", "kitten", "knackwurst", "knee", "knife", "knit_cap", "knitting_needle", "knot", "koi", "konghou", "lab_coat", "label", "labrador_retriever", "lace", "lacrosse_stick", "lacrosse_training_equipment", "ladder", "lamp", "laptop", "lasso", "latch", "lathe", "laundry", "lawn", "lcd_tv", "lead_pencil", "leaf", "leaf_vegetable", "leash", "led_backlit_lcd_display", "leggings", "lemon", "lemonade", "lens", "leotard", "lettuce", "lever", "ligament", "light_bulb", "light_fixture", "light_microscope", "lighter", "lighting_accessory", "lineman", "linens", "lingerie", "lip", "lip_gloss", "lipstick", "liquor_shelf", "litter", "little_black_dress", "livestock", "lobe", "lock", "locker", "locomotive", "loggerhead", "lollipop", "longboard", "loom", "lotion", "loudspeaker", "lovebird", "loveseat", "lumber", "lute", "macaw", "machine", "machine_tool", "magazine", "maillot", "makeup", "mallet", "maltese", "mammal", "man", "mandarin_orange", "mandolin", "mane", "maraca", "marcher", "mare", "marimba", "marine_invertebrates", "marines", "mask", "mason_jar", "mast", "mat", "matador", "matsutake", "mattress", "mattress_pad", "mcintosh", "measuring_instrument", "meat", "meat_grinder", "mechanic", "media_player", "medical_assistant", "medical_equipment", "medical_glove", "medicine_ball", "melee_weapon", "mellophone", "melon", "membrane_winged_insect", "mender", "metal_lathe", "metalsmith", "microcontroller", "microphone", "microscope", "microwave_oven", "miler", "military_camouflage", "military_person", "military_uniform", "milk", "miniature_poodle", "minibus", "minivan", "mirror", "mixer", "mixing_bowl", "mixing_console", "mobile_device", "mobile_phone", "model", "monument", "moped", "moss", "motherboard", "motocross_bike", "motor_scooter", "motor_ship", "motor_vehicle", "motorboat", "motorcycle", "motorcycle_accessories", "motorcyclist", "motorized_wheelchair", "mountain_bike", "mountaineer", "moustache", "mouth", "mower", "mud", "mug", "mule", "mural", "muscle", "musher", "mushroom", "musical_instrument", "musical_instrument_accessory", "musical_keyboard", "musician", "musket", "nail", "nail_polish", "neck", "necklace", "necktie", "needle", "neon_lamp", "neon_sign", "net", "newscaster", "newspaper", "nib", "nightwear", "non_alcoholic_beverage", "non_commissioned_officer", "non_skin_percussion_instrument", "noodle", "nose", "numeric_keypad", "oars", "oboist", "ocarina", "off_road_vehicle", "office_equipment", "office_supplies", "oil_paint", "open_wheel_car", "optical_instrument", "orator", "organ", "organ_pipe", "organist", "outdoor_furniture", "outdoor_grill", "outdoor_play_equipment", "outdoor_power_equipment", "outdoor_shoe", "outdoor_structure", "outerwear", "output_device", "overhead_power_line", "ox", "oxygen_mask", "oyster", "oyster_mushroom", "oyster_shell", "pack_animal", "paddle", "padlock", "paintball_equipment", "paintball_gun", "palm_tree", "pan", "panelist", "pantyhose", "paper_product", "paper_towel", "parachute", "parakeet", "parallel_bars", "park_bench", "parquet", "parrot", "parsley", "passenger", "passenger_ship", "pasta", "pastry", "patient", "paving", "paw", "pawn", "pearl", "pebble", "pedestrian", "peel", "pen", "pencil", "pencil_sharpener", "pepperoni", "percussion_accessory", "percussion_instrument", "percussionist", "performance_car", "perico", "personal_computer", "personal_digital_assistant", "personal_flotation_device", "personal_protective_equipment", "petal", "pezizales", "photocopier", "physical_therapist", "physician", "pianet", "pianist", "piano", "piano_keyboard", "picador", "picket_fence", "pickup_truck", "picnic_boat", "pig", "pig_like_mammal", "pigeon", "pigeons_and_doves", "pillow", "pilot_boat", "pinata", "pinball_machine", "pine", "pine_family", "pineapple", "pinscher", "pint_glass", "pipe", "pizza", "pizza_cheese", "plant", "plant_stem", "plastic_bag", "plate", "platter", "play_vehicle", "player", "playground_slide", "playpen", "playstation_3_accessory", "playstation_accessory", "pliers", "plimsoll", "plucked_string_instruments", "plumbing", "plumbing_fixture", "pocket", "pointer", "pole", "police_officer", "polo_mallet", "polo_pony", "polo_shirt", "pomeranian", "pommel_horse", "pontoon", "pony", "poodle", "porcelain", "portable_communications_device", "portable_media_player", "portrait", "poster", "potato", "potato_and_tomato_genus", "pothole", "powdered_sugar", "power_drill", "power_mower", "power_shovel", "printer", "produce", "professional_golfer", "propeller", "protective_equipment_in_gridiron_football", "protective_gear_in_sports", "pug", "pumpkin", "pungsan_dog", "puppy", "putter", "puzzle", "queen", "quill", "rabbit", "race_car", "racer", "racing_bicycle", "racket", "radial", "random_orbital_sander", "ranged_weapon", "rear_view_mirror", "recycling_bin", "red_carpet", "red_meat", "red_wine", "redhead", "reed_instrument", "refrigerator", "rein", "remote_control", "reptile", "researcher", "retaining_wall", "retriever", "ribbon", "rice", "rifle", "rim", "ring", "road_bicycle", "roast_beef", "robot", "rock_climbing_equipment", "rock_star", "rodent", "roller_blades", "roller_skates", "rolling_pin", "roof", "root", "root_vegetable", "rope", "rose", "rose_family", "rotisserie", "royal_icing", "rubber_boot", "rubble", "runner", "running_shoe", "saddle", "safe", "safety_belt", "safety_bicycle", "safety_glove", "sail", "sailboat", "sailing_ship", "salad", "salmon", "samoyed", "sand", "sand_wedge", "sandal", "sandbox", "sandwich", "sapsali", "sari", "sarong", "sash_window", "sashimi", "saucer", "sauces", "sausage", "saw", "saxhorn", "saxophone", "saxophonist", "scaffolding", "scale_model", "scaled_reptile", "scanner", "scarf", "schipperke", "schnoodle", "schooner", "scientific_instrument", "scissors", "scooter", "scoreboard", "scow", "scrap", "screen", "scuba_diver", "sculptor", "sculpture", "sea_ice", "sea_kayak", "sea_turtle", "seabird", "seaplane", "seat_belt", "seaweed", "sedan", "seed", "segway", "senior_citizen", "serger", "serpent", "serveware", "sewing_machine", "sewing_machine_needle", "shaving_cream", "shed", "sheep", "shelf", "shih_tzu", "ship", "shipwreck", "shirt", "shoe", "shopkeeper", "shopping_basket", "shopping_cart", "shorts", "shoulder", "shovel", "shower_curtain", "shrimp", "shrub", "siberian_husky", "sicilian_pizza", "sideboard", "siding", "sign", "singer", "singlet", "sink", "skateboard", "skateboarder", "skateboarding_equipment_and_supplies", "sketch", "skewer", "ski", "ski_binding", "ski_equipment", "ski_pole", "skidder", "skiff", "skin", "skin_head_percussion_instrument", "skirt", "slate_roof", "sled", "sled_dog", "sleeper", "sleeve", "sloop", "slot", "slot_machine", "small_appliance", "smartphone", "smoke", "snack", "snake", "snare_drum", "sneakers", "snorkel", "snout", "snow_thrower", "snowboard", "snowmobile", "snowplow", "snowshoe", "snowsuit", "soccer_ball", "soccer_player", "sock", "soft_drink", "soil", "soup", "space_bar", "spaghetti", "spaniel", "spatula", "speaker", "speedometer", "speleothem", "spice", "spin_dryer", "spinach", "spinach_salad", "spindle", "spinet", "spinning_wheel", "spitz", "spoke", "spokesperson", "spoon", "sport_kite", "sport_utility_vehicle", "sports_car", "sports_equipment", "sports_uniform", "sportswear", "spring_greens", "sprinkler", "spruce", "spume", "square_dancer", "squash", "stairs", "stalagmite", "stall", "stallion", "standard_poodle", "statue", "steak", "steam_iron", "steamed_rice", "steel", "steel_drum", "steering_part", "steering_wheel", "stemware", "stew", "stick", "stock_car", "stock_dove", "stocking", "stomach", "stone_wall", "stony_coral", "storage_basket", "stout", "stove_and_oven", "strainer", "straw", "streamer_fly", "street_light", "string_instrument", "string_instrument_accessory", "stubble", "student", "stuffed_toy", "stuffing", "stunt_performer", "subcompact_car", "subwoofer", "sugar_cake", "sugar_paste", "suit", "sun", "sun_hat", "sunbather", "sunglasses", "sunlight", "supercar", "superhero", "surfboard", "surfing_equipment_and_supplies", "sushi", "swab", "swan", "sweater", "sweet_grass", "swimmer", "swimsuit_bottom", "swimwear", "swing", "switch", "synthesizer", "t_shirt", "tabby_cat", "table", "table_knife", "table_tennis_racket", "tablecloth", "tabletop_game", "tableware", "tachometer", "taglierini", "tail", "tall_ship", "tank", "tarpaulin", "tattoo", "tea", "teacher", "teapot", "teddy_bear", "telephone", "television_presenter", "television_reporter", "television_set", "tennis_equipment_and_supplies", "tennis_player", "tennis_pro", "tennis_racket", "tenor_saxophonist", "tent", "terrestrial_animal", "terrestrial_plant", "terrier", "text", "textile", "theater_curtain", "therapist", "thigh", "thorns_spines_and_prickles", "thread", "thumb", "tights", "tile", "tiple", "tire", "toast", "toddler", "toe", "toilet", "toilet_tissue", "tom_tom_drum", "tomahawk", "tomato", "tongue", "tooth", "toothbrush", "top", "toppings", "torch", "torso", "torte", "tower", "toy", "toy_box", "toy_poodle", "track_spikes", "tractor", "traffic_cop", "traffic_light", "trail_bike", "trailer", "trailer_truck", "train", "trampoline", "trapeze", "travel_trailer", "tree", "tricycle", "trigger", "trombone", "trousers", "trowel", "truck", "trumpet", "trumpeter", "tub", "tudung", "tusk", "tuxedo", "twig", "uke", "umbrella", "undergarment", "underpants", "uneven_parallel_bars", "unicycle", "unicyclist", "uniform", "urinal", "vacuum_cleaner", "van", "vascular_plant", "vase", "vaulter", "vegetable", "vehicle", "vehicle_brake", "vehicle_door", "vehicle_registration_plate", "venison", "vertebrate", "vibraphone", "video_game_console", "vigil_light", "vintage_car", "vintage_clothing", "violin", "violin_family", "violinist", "violist", "vitis", "vizsla", "volleyball_net", "volleyball_player", "wagon", "waist", "waiter", "walk_behind_mower", "walker", "walking_shoe", "wall", "wardrobe", "washbasin", "washing_machine", "waste_container", "watch", "water", "water_bird", "water_feature", "water_polo_cap", "water_ski", "watercolor_paint", "waterfowl", "watering_can", "watermelon", "wave", "wedding_ceremony_supply", "wedding_dress", "wedding_ring", "weightlifter", "weights", "welder", "west_highland_white_terrier", "wetsuit", "whaler", "whales_dolphins_and_porpoises", "wheat_beer", "wheel", "wheelchair", "whipped_cream", "whippet", "whisk", "whiskers", "whisky", "whistle", "white_coat", "white_collar_worker", "white_rice", "wicker_basket", "wicket", "wig", "wildflower", "wildlife_biologist", "wind_instrument", "wind_wave", "window", "window_blind", "window_covering", "window_screen", "window_treatment", "windshield", "windshield_wiper", "wine", "wine_glass", "wing", "winter_squash", "wiper", "wire", "wire_fencing", "wok", "woman", "wood_burning_stove", "wood_stain", "woodwind_instrument", "woody_plant", "workman", "wrench", "wrestler", "wrestling_mat", "wrestling_singlet", "wrist", "xylophone", "yacht", "yakitori", "yolk"], "scene": ["aeolian_landform", "aisle", "alley", "amusement_park", "animal_shelter", "apartment", "apiary", "archaeological_site", "arena", "arroyo", "attic", "auditorium", "automobile_repair_shop", "backyard", "badlands", "bakery", "ballpark", "ballroom", "bank", "bar", "barbershop", "barn", "baseball_field", "baseball_positions", "basement", "basketball_court", "bathroom", "batting_cage", "bay", "bayou", "bazaar", "beach", "beauty_salon", "bedroom", "boardwalk", "body_of_water", "boutique", "bowling_alley", "boxing_ring", "bridge", "building", "bullring", "butcher_shop", "canyon", "cape", "carport", "casino", "cave", "channel", "chapel", "cityscape", "cliff", "clinic", "coast", "coastal_and_oceanic_landforms", "cockpit", "cocktail_lounge", "concert_hall", "condominium", "conference_hall", "coral_reef", "courtyard", "creek", "day_nursery", "deck", "desert", "dining_room", "dock", "downtown", "dune", "ecoregion", "escarpment", "estate", "factory", "fair", "farm", "fault", "field", "field_lacrosse", "fire_department", "fish_pond", "floor", "fluvial_landforms_of_streams", "football_stadium", "forest", "formation", "foundry", "function_hall", "garage", "garden", "garden_buildings", "glacial_lake", "golf_course", "grassland", "grocery_store", "grove", "gym", "hall", "harbor", "haze", "headland", "highland", "hill", "historic_site", "home", "horizon", "hospital", "hot_spring", "hotel", "hotel_room", "house", "hut", "ice_hockey_position", "ice_hockey_rink", "ice_rink", "inlet", "intersection", "kindergarten", "kitchen", "laboratory", "lake", "land_lot", "landmark", "landscape", "lane", "lecture_room", "leisure_centre", "littoral", "living_room", "log_cabin", "marina", "market", "marsh", "massif", "meadow", "meander", "metropolitan_area", "mountain", "mountain_pass", "mountain_range", "mountainous_landforms", "music_venue", "musical_theatre", "national_park", "natural_resources", "nature_reserve", "neighbourhood", "nightclub", "office", "opera", "outcrop", "paddy_field", "palace", "panorama", "park", "parking", "pasture", "path", "patio", "pavilion", "pedestrian_crossing", "performing_arts_center", "piste", "place_of_worship", "plain", "plateau", "playground", "plaza", "pond", "port", "property", "public_space", "race_track", "ranch", "reef", "religious_institute", "reservoir", "residential_area", "resort", "restaurant", "restroom", "retail", "ridge", "riparian_zone", "river", "riverbed", "road", "road_highway", "room", "rural_area", "sandbank", "sandbar", "school", "sea", "seashore", "seaside", "shack", "shooting_range", "shopping_mall", "shore", "sidewalk", "ski_slope", "sky", "skyline", "skyscraper", "snow_covered_landscape", "sport_venue", "stable", "stadium", "stage", "strand", "stream", "stream_bed", "street", "suburb", "summit", "supermarket", "swamp", "swimming_pool", "tavern", "television_room", "tennis_camp", "tennis_court", "terrain", "theatre", "toolroom", "tourist_attraction", "tower_block", "town", "town_square", "track", "tropical_beach", "tropics", "tunnel", "urban_area", "urban_design", "valley", "village", "walkway", "warehouse", "watercourse", "waterfall", "waterway", "wetland", "wildlife_region", "workshop", "yard", "zoo"]} diff --git a/tools/data/jester/label_map.txt b/tools/data/jester/label_map.txt new file mode 100644 index 0000000000..577e5a22e1 --- /dev/null +++ b/tools/data/jester/label_map.txt @@ -0,0 +1,27 @@ +Swiping Left +Swiping Right +Swiping Down +Swiping Up +Pushing Hand Away +Pulling Hand In +Sliding Two Fingers Left +Sliding Two Fingers Right +Sliding Two Fingers Down +Sliding Two Fingers Up +Pushing Two Fingers Away +Pulling Two Fingers In +Rolling Hand Forward +Rolling Hand Backward +Turning Hand Clockwise +Turning Hand Counterclockwise +Zooming In With Full Hand +Zooming Out With Full Hand +Zooming In With Two Fingers +Zooming Out With Two Fingers +Thumb Up +Thumb Down +Shaking Hand +Stop Sign +Drumming Fingers +No gesture +Doing other things diff --git a/demo/label_map_k400.txt b/tools/data/kinetics/label_map_k400.txt similarity index 100% rename from demo/label_map_k400.txt rename to tools/data/kinetics/label_map_k400.txt diff --git a/tools/data/kinetics/label_map_k600.txt b/tools/data/kinetics/label_map_k600.txt new file mode 100644 index 0000000000..639e9c91fa --- /dev/null +++ b/tools/data/kinetics/label_map_k600.txt @@ -0,0 +1,600 @@ +abseiling +acting in play +adjusting glasses +air drumming +alligator wrestling +answering questions +applauding +applying cream +archaeological excavation +archery +arguing +arm wrestling +arranging flowers +assembling bicycle +assembling computer +attending conference +auctioning +backflip (human) +baking cookies +bandaging +barbequing +bartending +base jumping +bathing dog +battle rope training +beatboxing +bee keeping +belly dancing +bench pressing +bending back +bending metal +biking through snow +blasting sand +blowdrying hair +blowing bubble gum +blowing glass +blowing leaves +blowing nose +blowing out candles +bobsledding +bodysurfing +bookbinding +bottling +bouncing on bouncy castle +bouncing on trampoline +bowling +braiding hair +breading or breadcrumbing +breakdancing +breaking boards +breathing fire +brush painting +brushing hair +brushing teeth +building cabinet +building lego +building sandcastle +building shed +bull fighting +bulldozing +bungee jumping +burping +busking +calculating +calligraphy +canoeing or kayaking +capoeira +capsizing +card stacking +card throwing +carrying baby +cartwheeling +carving ice +carving pumpkin +casting fishing line +catching fish +catching or throwing baseball +catching or throwing frisbee +catching or throwing softball +celebrating +changing gear in car +changing oil +changing wheel (not on bike) +checking tires +cheerleading +chewing gum +chiseling stone +chiseling wood +chopping meat +chopping vegetables +chopping wood +clam digging +clapping +clay pottery making +clean and jerk +cleaning gutters +cleaning pool +cleaning shoes +cleaning toilet +cleaning windows +climbing a rope +climbing ladder +climbing tree +coloring in +combing hair +contact juggling +contorting +cooking egg +cooking on campfire +cooking sausages (not on barbeque) +cooking scallops +cosplaying +counting money +country line dancing +cracking back +cracking knuckles +cracking neck +crawling baby +crossing eyes +crossing river +crying +cumbia +curling (sport) +curling hair +cutting apple +cutting nails +cutting orange +cutting pineapple +cutting watermelon +dancing ballet +dancing charleston +dancing gangnam style +dancing macarena +deadlifting +decorating the christmas tree +delivering mail +dining +directing traffic +disc golfing +diving cliff +docking boat +dodgeball +doing aerobics +doing jigsaw puzzle +doing laundry +doing nails +drawing +dribbling basketball +drinking shots +driving car +driving tractor +drooling +drop kicking +drumming fingers +dumpster diving +dunking basketball +dyeing eyebrows +dyeing hair +eating burger +eating cake +eating carrots +eating chips +eating doughnuts +eating hotdog +eating ice cream +eating spaghetti +eating watermelon +egg hunting +embroidering +exercising with an exercise ball +extinguishing fire +faceplanting +falling off bike +falling off chair +feeding birds +feeding fish +feeding goats +fencing (sport) +fidgeting +finger snapping +fixing bicycle +fixing hair +flint knapping +flipping pancake +fly tying +flying kite +folding clothes +folding napkins +folding paper +front raises +frying vegetables +geocaching +getting a haircut +getting a piercing +getting a tattoo +giving or receiving award +gold panning +golf chipping +golf driving +golf putting +gospel singing in church +grinding meat +grooming dog +grooming horse +gymnastics tumbling +hammer throw +hand washing clothes +head stand +headbanging +headbutting +high jump +high kick +historical reenactment +hitting baseball +hockey stop +holding snake +home roasting coffee +hopscotch +hoverboarding +huddling +hugging (not baby) +hugging baby +hula hooping +hurdling +hurling (sport) +ice climbing +ice fishing +ice skating +ice swimming +inflating balloons +installing carpet +ironing +ironing hair +javelin throw +jaywalking +jetskiing +jogging +juggling balls +juggling fire +juggling soccer ball +jumping bicycle +jumping into pool +jumping jacks +jumpstyle dancing +karaoke +kicking field goal +kicking soccer ball +kissing +kitesurfing +knitting +krumping +land sailing +laughing +lawn mower racing +laying bricks +laying concrete +laying stone +laying tiles +leatherworking +licking +lifting hat +lighting fire +lock picking +long jump +longboarding +looking at phone +luge +lunge +making a cake +making a sandwich +making balloon shapes +making bubbles +making cheese +making horseshoes +making jewelry +making paper aeroplanes +making pizza +making snowman +making sushi +making tea +making the bed +marching +marriage proposal +massaging back +massaging feet +massaging legs +massaging neck +massaging person's head +milking cow +moon walking +mopping floor +mosh pit dancing +motorcycling +mountain climber (exercise) +moving furniture +mowing lawn +mushroom foraging +needle felting +news anchoring +opening bottle (not wine) +opening door +opening present +opening refrigerator +opening wine bottle +packing +paragliding +parasailing +parkour +passing American football (in game) +passing american football (not in game) +passing soccer ball +peeling apples +peeling potatoes +person collecting garbage +petting animal (not cat) +petting cat +photobombing +photocopying +picking fruit +pillow fight +pinching +pirouetting +planing wood +planting trees +plastering +playing accordion +playing badminton +playing bagpipes +playing basketball +playing bass guitar +playing beer pong +playing blackjack +playing cello +playing chess +playing clarinet +playing controller +playing cricket +playing cymbals +playing darts +playing didgeridoo +playing dominoes +playing drums +playing field hockey +playing flute +playing gong +playing guitar +playing hand clapping games +playing harmonica +playing harp +playing ice hockey +playing keyboard +playing kickball +playing laser tag +playing lute +playing maracas +playing marbles +playing monopoly +playing netball +playing ocarina +playing organ +playing paintball +playing pan pipes +playing piano +playing pinball +playing ping pong +playing poker +playing polo +playing recorder +playing rubiks cube +playing saxophone +playing scrabble +playing squash or racquetball +playing tennis +playing trombone +playing trumpet +playing ukulele +playing violin +playing volleyball +playing with trains +playing xylophone +poking bellybutton +pole vault +polishing metal +popping balloons +pouring beer +preparing salad +presenting weather forecast +pull ups +pumping fist +pumping gas +punching bag +punching person (boxing) +push up +pushing car +pushing cart +pushing wheelbarrow +pushing wheelchair +putting in contact lenses +putting on eyeliner +putting on foundation +putting on lipstick +putting on mascara +putting on sari +putting on shoes +raising eyebrows +reading book +reading newspaper +recording music +repairing puncture +riding a bike +riding camel +riding elephant +riding mechanical bull +riding mule +riding or walking with horse +riding scooter +riding snow blower +riding unicycle +ripping paper +roasting marshmallows +roasting pig +robot dancing +rock climbing +rock scissors paper +roller skating +rolling pastry +rope pushdown +running on treadmill +sailing +salsa dancing +sanding floor +sausage making +sawing wood +scrambling eggs +scrapbooking +scrubbing face +scuba diving +separating eggs +setting table +sewing +shaking hands +shaking head +shaping bread dough +sharpening knives +sharpening pencil +shaving head +shaving legs +shearing sheep +shining flashlight +shining shoes +shooting basketball +shooting goal (soccer) +shopping +shot put +shoveling snow +shucking oysters +shuffling cards +shuffling feet +side kick +sign language interpreting +singing +sipping cup +situp +skateboarding +ski jumping +skiing crosscountry +skiing mono +skiing slalom +skipping rope +skipping stone +skydiving +slacklining +slapping +sled dog racing +sleeping +smashing +smelling feet +smoking +smoking hookah +smoking pipe +snatch weight lifting +sneezing +snorkeling +snowboarding +snowkiting +snowmobiling +somersaulting +spelunking +spinning poi +spray painting +springboard diving +square dancing +squat +standing on hands +staring +steer roping +sticking tongue out +stomping grapes +stretching arm +stretching leg +sucking lolly +surfing crowd +surfing water +sweeping floor +swimming backstroke +swimming breast stroke +swimming butterfly stroke +swimming front crawl +swing dancing +swinging baseball bat +swinging on something +sword fighting +sword swallowing +tackling +tagging graffiti +tai chi +talking on cell phone +tango dancing +tap dancing +tapping guitar +tapping pen +tasting beer +tasting food +tasting wine +testifying +texting +threading needle +throwing axe +throwing ball (not baseball or American football) +throwing discus +throwing knife +throwing snowballs +throwing tantrum +throwing water balloon +tickling +tie dying +tightrope walking +tiptoeing +tobogganing +tossing coin +training dog +trapezing +trimming or shaving beard +trimming shrubs +trimming trees +triple jump +twiddling fingers +tying bow tie +tying knot (not on a tie) +tying necktie +tying shoe laces +unboxing +unloading truck +using a microscope +using a paint roller +using a power drill +using a sledge hammer +using a wrench +using atm +using bagging machine +using circular saw +using inhaler +using puppets +using remote controller (not gaming) +using segway +vacuuming floor +visiting the zoo +wading through mud +wading through water +waiting in line +waking up +walking the dog +walking through snow +washing dishes +washing feet +washing hair +washing hands +watching tv +water skiing +water sliding +watering plants +waving hand +waxing back +waxing chest +waxing eyebrows +waxing legs +weaving basket +weaving fabric +welding +whistling +windsurfing +winking +wood burning (art) +wrapping present +wrestling +writing +yarn spinning +yawning +yoga +zumba diff --git a/tools/data/kinetics/label_map_k700.txt b/tools/data/kinetics/label_map_k700.txt new file mode 100644 index 0000000000..2ce7e6fa5c --- /dev/null +++ b/tools/data/kinetics/label_map_k700.txt @@ -0,0 +1,700 @@ +abseiling +acting in play +adjusting glasses +air drumming +alligator wrestling +answering questions +applauding +applying cream +archaeological excavation +archery +arguing +arm wrestling +arranging flowers +arresting +assembling bicycle +assembling computer +attending conference +auctioning +baby waking up +backflip (human) +baking cookies +bandaging +barbequing +bartending +base jumping +bathing dog +battle rope training +beatboxing +bee keeping +being excited +being in zero gravity +belly dancing +bench pressing +bending back +bending metal +biking through snow +blasting sand +blending fruit +blowdrying hair +blowing bubble gum +blowing glass +blowing leaves +blowing nose +blowing out candles +bobsledding +bodysurfing +bookbinding +bottling +bouncing ball (not juggling) +bouncing on bouncy castle +bouncing on trampoline +bowling +braiding hair +breading or breadcrumbing +breakdancing +breaking boards +breaking glass +breathing fire +brush painting +brushing floor +brushing hair +brushing teeth +building cabinet +building lego +building sandcastle +building shed +bulldozing +bungee jumping +burping +busking +calculating +calligraphy +canoeing or kayaking +capoeira +capsizing +card stacking +card throwing +carrying baby +carrying weight +cartwheeling +carving ice +carving marble +carving pumpkin +carving wood with a knife +casting fishing line +catching fish +catching or throwing baseball +catching or throwing frisbee +catching or throwing softball +celebrating +changing gear in car +changing oil +changing wheel (not on bike) +chasing +checking tires +checking watch +cheerleading +chewing gum +chiseling stone +chiseling wood +chopping meat +chopping wood +clam digging +clapping +clay pottery making +clean and jerk +cleaning gutters +cleaning pool +cleaning shoes +cleaning toilet +cleaning windows +climbing a rope +climbing ladder +climbing tree +closing door +coloring in +combing hair +contact juggling +contorting +cooking chicken +cooking egg +cooking on campfire +cooking sausages (not on barbeque) +cooking scallops +cosplaying +coughing +counting money +country line dancing +cracking back +cracking knuckles +cracking neck +crawling baby +crocheting +crossing eyes +crossing river +crying +cumbia +curling (sport) +curling eyelashes +curling hair +cutting apple +cutting cake +cutting nails +cutting orange +cutting pineapple +cutting watermelon +dancing ballet +dancing charleston +dancing gangnam style +dancing macarena +deadlifting +dealing cards +decorating the christmas tree +decoupage +delivering mail +digging +dining +directing traffic +disc golfing +diving cliff +docking boat +dodgeball +doing aerobics +doing jigsaw puzzle +doing laundry +doing nails +doing sudoku +drawing +dribbling basketball +drinking shots +driving car +driving tractor +drooling +drop kicking +drumming fingers +dumpster diving +dunking basketball +dyeing eyebrows +dyeing hair +eating burger +eating cake +eating carrots +eating chips +eating doughnuts +eating hotdog +eating ice cream +eating nachos +eating spaghetti +eating watermelon +egg hunting +embroidering +entering church +exercising arm +exercising with an exercise ball +extinguishing fire +faceplanting +falling off bike +falling off chair +feeding birds +feeding fish +feeding goats +fencing (sport) +fidgeting +filling cake +filling eyebrows +finger snapping +fixing bicycle +fixing hair +flint knapping +flipping bottle +flipping pancake +fly tying +flying kite +folding clothes +folding napkins +folding paper +front raises +frying vegetables +gargling +geocaching +getting a haircut +getting a piercing +getting a tattoo +giving or receiving award +gold panning +golf chipping +golf driving +golf putting +gospel singing in church +grinding meat +grooming cat +grooming dog +grooming horse +gymnastics tumbling +hammer throw +hand washing clothes +head stand +headbanging +headbutting +helmet diving +herding cattle +high fiving +high jump +high kick +historical reenactment +hitting baseball +hockey stop +holding snake +home roasting coffee +hopscotch +hoverboarding +huddling +hugging (not baby) +hugging baby +hula hooping +hurdling +hurling (sport) +ice climbing +ice fishing +ice skating +ice swimming +inflating balloons +installing carpet +ironing +ironing hair +javelin throw +jaywalking +jetskiing +jogging +juggling balls +juggling fire +juggling soccer ball +jumping bicycle +jumping into pool +jumping jacks +jumping sofa +jumpstyle dancing +karaoke +kicking field goal +kicking soccer ball +kissing +kitesurfing +knitting +krumping +land sailing +laughing +lawn mower racing +laying bricks +laying concrete +laying decking +laying stone +laying tiles +leatherworking +letting go of balloon +licking +lifting hat +lighting candle +lighting fire +listening with headphones +lock picking +long jump +longboarding +looking at phone +looking in mirror +luge +lunge +making a cake +making a sandwich +making balloon shapes +making bubbles +making cheese +making horseshoes +making jewelry +making latte art +making paper aeroplanes +making pizza +making slime +making snowman +making sushi +making tea +making the bed +marching +marriage proposal +massaging back +massaging feet +massaging legs +massaging neck +massaging person's head +metal detecting +milking cow +milking goat +mixing colours +moon walking +mopping floor +mosh pit dancing +motorcycling +mountain climber (exercise) +moving baby +moving child +moving furniture +mowing lawn +mushroom foraging +needle felting +news anchoring +opening bottle (not wine) +opening coconuts +opening door +opening present +opening refrigerator +opening wine bottle +packing +paragliding +parasailing +parkour +passing American football (in game) +passing American football (not in game) +passing soccer ball +peeling apples +peeling banana +peeling potatoes +person collecting garbage +petting animal (not cat) +petting cat +petting horse +photobombing +photocopying +picking apples +picking blueberries +pillow fight +pinching +pirouetting +planing wood +planting trees +plastering +playing accordion +playing american football +playing badminton +playing bagpipes +playing basketball +playing bass guitar +playing beer pong +playing billiards +playing blackjack +playing cards +playing cello +playing checkers +playing chess +playing clarinet +playing controller +playing cricket +playing cymbals +playing darts +playing didgeridoo +playing dominoes +playing drums +playing field hockey +playing flute +playing gong +playing guitar +playing hand clapping games +playing harmonica +playing harp +playing ice hockey +playing keyboard +playing kickball +playing laser tag +playing lute +playing mahjong +playing maracas +playing marbles +playing monopoly +playing netball +playing nose flute +playing oboe +playing ocarina +playing organ +playing paintball +playing pan pipes +playing piano +playing piccolo +playing pinball +playing ping pong +playing poker +playing polo +playing recorder +playing road hockey +playing rounders +playing rubiks cube +playing saxophone +playing scrabble +playing shuffleboard +playing slot machine +playing squash or racquetball +playing tennis +playing trombone +playing trumpet +playing ukulele +playing violin +playing volleyball +playing with trains +playing xylophone +poaching eggs +poking bellybutton +pole vault +polishing furniture +polishing metal +popping balloons +pouring beer +pouring milk +pouring wine +preparing salad +presenting weather forecast +pretending to be a statue +pull ups +pulling espresso shot +pulling rope (game) +pumping fist +pumping gas +punching bag +punching person (boxing) +push up +pushing car +pushing cart +pushing wheelbarrow +pushing wheelchair +putting in contact lenses +putting on eyeliner +putting on foundation +putting on lipstick +putting on mascara +putting on sari +putting on shoes +putting wallpaper on wall +raising eyebrows +reading book +reading newspaper +recording music +repairing puncture +riding a bike +riding camel +riding elephant +riding mechanical bull +riding mule +riding or walking with horse +riding scooter +riding snow blower +riding unicycle +ripping paper +roasting marshmallows +roasting pig +robot dancing +rock climbing +rock scissors paper +roller skating +rolling eyes +rolling pastry +rope pushdown +running on treadmill +sailing +salsa dancing +saluting +sanding floor +sanding wood +sausage making +sawing wood +scrambling eggs +scrapbooking +scrubbing face +scuba diving +seasoning food +separating eggs +setting table +sewing +shaking hands +shaking head +shaping bread dough +sharpening knives +sharpening pencil +shaving head +shaving legs +shearing sheep +shining flashlight +shining shoes +shoot dance +shooting basketball +shooting goal (soccer) +shooting off fireworks +shopping +shot put +shouting +shoveling snow +shredding paper +shucking oysters +shuffling cards +shuffling feet +side kick +sieving +sign language interpreting +silent disco +singing +sipping cup +situp +skateboarding +ski ballet +ski jumping +skiing crosscountry +skiing mono +skiing slalom +skipping rope +skipping stone +skydiving +slacklining +slapping +sled dog racing +sleeping +slicing onion +smashing +smelling feet +smoking +smoking hookah +smoking pipe +snatch weight lifting +sneezing +snorkeling +snowboarding +snowkiting +snowmobiling +somersaulting +spelunking +spinning plates +spinning poi +splashing water +spray painting +spraying +springboard diving +square dancing +squat +squeezing orange +stacking cups +stacking dice +standing on hands +staring +steer roping +steering car +sticking tongue out +stomping grapes +stretching arm +stretching leg +sucking lolly +surfing crowd +surfing water +surveying +sweeping floor +swimming backstroke +swimming breast stroke +swimming butterfly stroke +swimming front crawl +swimming with dolphins +swimming with sharks +swing dancing +swinging baseball bat +swinging on something +sword fighting +sword swallowing +tackling +tagging graffiti +tai chi +taking photo +talking on cell phone +tango dancing +tap dancing +tapping guitar +tapping pen +tasting beer +tasting food +tasting wine +testifying +texting +threading needle +throwing axe +throwing ball (not baseball or American football) +throwing discus +throwing knife +throwing snowballs +throwing tantrum +throwing water balloon +tickling +tie dying +tightrope walking +tiptoeing +tobogganing +tossing coin +tossing salad +training dog +trapezing +treating wood +trimming or shaving beard +trimming shrubs +trimming trees +triple jump +twiddling fingers +tying bow tie +tying knot (not on a tie) +tying necktie +tying shoe laces +unboxing +uncorking champagne +unloading truck +using a microscope +using a paint roller +using a power drill +using a sledge hammer +using a wrench +using atm +using bagging machine +using circular saw +using inhaler +using megaphone +using puppets +using remote controller (not gaming) +using segway +vacuuming car +vacuuming floor +visiting the zoo +wading through mud +wading through water +waiting in line +waking up +walking on stilts +walking the dog +walking through snow +walking with crutches +washing dishes +washing feet +washing hair +washing hands +watching tv +water skiing +water sliding +watering plants +waving hand +waxing armpits +waxing back +waxing chest +waxing eyebrows +waxing legs +weaving basket +weaving fabric +welding +whistling +windsurfing +winking +wood burning (art) +wrapping present +wrestling +writing +yarn spinning +yawning +yoga +zumba diff --git a/tools/data/mit/label_map.txt b/tools/data/mit/label_map.txt new file mode 100644 index 0000000000..0f7495ea5c --- /dev/null +++ b/tools/data/mit/label_map.txt @@ -0,0 +1,339 @@ +clapping,0 +praying,1 +dropping,2 +burying,3 +covering,4 +flooding,5 +leaping,6 +drinking,7 +slapping,8 +cuddling,9 +sleeping,10 +preaching,11 +raining,12 +stitching,13 +spraying,14 +twisting,15 +coaching,16 +submerging,17 +breaking,18 +tuning,19 +boarding,20 +running,21 +destroying,22 +competing,23 +giggling,24 +shoveling,25 +chasing,26 +flicking,27 +pouring,28 +buttoning,29 +hammering,30 +carrying,31 +surfing,32 +pulling,33 +squatting,34 +aiming,35 +crouching,36 +tapping,37 +skipping,38 +washing,39 +winking,40 +queuing,41 +locking,42 +stopping,43 +sneezing,44 +flipping,45 +sewing,46 +clipping,47 +working,48 +rocking,49 +asking,50 +playing+fun,51 +camping,52 +plugging,53 +pedaling,54 +constructing,55 +slipping,56 +sweeping,57 +screwing,58 +shrugging,59 +hitchhiking,60 +cracking,61 +scratching,62 +trimming,63 +selling,64 +marching,65 +stirring,66 +kissing,67 +jumping,68 +starting,69 +clinging,70 +socializing,71 +picking,72 +splashing,73 +licking,74 +kicking,75 +sliding,76 +filming,77 +driving,78 +handwriting,79 +steering,80 +filling,81 +crashing,82 +stealing,83 +pressing,84 +shouting,85 +hiking,86 +vacuuming,87 +pointing,88 +giving,89 +diving,90 +hugging,91 +building,92 +swerving,93 +dining,94 +floating,95 +cheerleading,96 +leaning,97 +sailing,98 +singing,99 +playing,100 +hitting,101 +bubbling,102 +joining,103 +bathing,104 +raising,105 +sitting,106 +drawing,107 +protesting,108 +rinsing,109 +coughing,110 +smashing,111 +slicing,112 +balancing,113 +rafting,114 +kneeling,115 +dunking,116 +brushing,117 +crushing,118 +rubbing,119 +punting,120 +watering,121 +playing+music,122 +removing,123 +tearing,124 +imitating,125 +teaching,126 +cooking,127 +reaching,128 +studying,129 +serving,130 +bulldozing,131 +shaking,132 +discussing,133 +dragging,134 +gardening,135 +performing,136 +officiating,137 +photographing,138 +sowing,139 +dripping,140 +writing,141 +clawing,142 +bending,143 +boxing,144 +mopping,145 +gripping,146 +flowing,147 +digging,148 +tripping,149 +cheering,150 +buying,151 +bicycling,152 +feeding,153 +emptying,154 +unpacking,155 +sketching,156 +standing,157 +weeding,158 +stacking,159 +drying,160 +crying,161 +spinning,162 +frying,163 +cutting,164 +paying,165 +eating,166 +lecturing,167 +dancing,168 +adult+female+speaking,169 +boiling,170 +peeling,171 +wrapping,172 +wetting,173 +attacking,174 +welding,175 +putting,176 +swinging,177 +carving,178 +walking,179 +dressing,180 +inflating,181 +climbing,182 +shredding,183 +reading,184 +sanding,185 +frowning,186 +closing,187 +hunting,188 +clearing,189 +launching,190 +packaging,191 +fishing,192 +spilling,193 +leaking,194 +knitting,195 +boating,196 +sprinkling,197 +baptizing,198 +playing+sports,199 +rolling,200 +spitting,201 +dipping,202 +riding,203 +chopping,204 +extinguishing,205 +applauding,206 +calling,207 +talking,208 +adult+male+speaking,209 +snowing,210 +shaving,211 +marrying,212 +rising,213 +laughing,214 +crawling,215 +flying,216 +assembling,217 +injecting,218 +landing,219 +operating,220 +packing,221 +descending,222 +falling,223 +entering,224 +pushing,225 +sawing,226 +smelling,227 +overflowing,228 +fighting,229 +waking,230 +barbecuing,231 +skating,232 +painting,233 +drilling,234 +punching,235 +tying,236 +manicuring,237 +plunging,238 +grilling,239 +pitching,240 +towing,241 +telephoning,242 +crafting,243 +knocking,244 +playing+videogames,245 +storming,246 +placing,247 +turning,248 +barking,249 +child+singing,250 +opening,251 +waxing,252 +juggling,253 +mowing,254 +shooting,255 +sniffing,256 +interviewing,257 +stomping,258 +chewing,259 +arresting,260 +grooming,261 +rowing,262 +bowing,263 +gambling,264 +saluting,265 +fueling,266 +autographing,267 +throwing,268 +drenching,269 +waving,270 +signing,271 +repairing,272 +baking,273 +smoking,274 +skiing,275 +drumming,276 +child+speaking,277 +blowing,278 +cleaning,279 +combing,280 +spreading,281 +racing,282 +combusting,283 +adult+female+singing,284 +fencing,285 +swimming,286 +adult+male+singing,287 +snuggling,288 +shopping,289 +bouncing,290 +dusting,291 +stroking,292 +snapping,293 +biting,294 +roaring,295 +guarding,296 +unloading,297 +lifting,298 +instructing,299 +folding,300 +measuring,301 +whistling,302 +exiting,303 +stretching,304 +taping,305 +squinting,306 +catching,307 +draining,308 +massaging,309 +scrubbing,310 +handcuffing,311 +celebrating,312 +jogging,313 +colliding,314 +bowling,315 +resting,316 +blocking,317 +smiling,318 +tattooing,319 +erupting,320 +howling,321 +parading,322 +grinning,323 +sprinting,324 +hanging,325 +planting,326 +speaking,327 +ascending,328 +yawning,329 +cramming,330 +burning,331 +wrestling,332 +poking,333 +tickling,334 +exercising,335 +loading,336 +piloting,337 +typing,338 diff --git a/tools/data/mmit/label_map.txt b/tools/data/mmit/label_map.txt new file mode 100644 index 0000000000..efa36158ec --- /dev/null +++ b/tools/data/mmit/label_map.txt @@ -0,0 +1,313 @@ +crafting,0 +paddling,1 +raining,2 +weightlifting,3 +clawing,4 +hitchhiking,5 +autographing,6 +cooking,7 +gripping,8 +swerving,9 +frowning,10 +giving,11 +tattooing,12 +dipping,13 +leaking,14 +plunging,15 +barking,16 +stroking/petting,17 +piloting,18 +camping,19 +towing,20 +loading,21 +parading,22 +submerging,23 +squeezing,24 +sculpting,25 +stomping,26 +punting,27 +kissing,28 +smoking,29 +pouring,30 +texting,31 +adult+male+speaking,32 +adult+female+speaking,33 +crying,34 +unpacking,35 +pointing,36 +boating,37 +landing,38 +ironing,39 +crouching,40 +slapping,41 +typing,42 +ice+skating,43 +boiling,44 +chopping,45 +bowling,46 +fighting/attacking,47 +tapping,48 +applauding,49 +driving,50 +sprinting,51 +slicing,52 +approaching,53 +waving,54 +dusting,55 +wrapping,56 +knocking,57 +snapping,58 +gardening,59 +combing,60 +tickling,61 +carving,62 +smashing,63 +smiling/grinning,64 +dressing,65 +pressing,66 +lecturing,67 +telephoning,68 +exercising,69 +riding,70 +draining,71 +flying,72 +wrestling,73 +boxing,74 +rinsing,75 +overflowing,76 +inflating,77 +picking,78 +sowing,79 +shaving,80 +baking,81 +shaking,82 +running,83 +throwing,84 +stacking/piling,85 +buttoning,86 +leaping,87 +fueling,88 +pitching,89 +child+speaking,90 +breaking/destroying,91 +lifting,92 +filming/photographing,93 +singing,94 +reading,95 +chewing,96 +operating,97 +bubbling,98 +waxing,99 +cleaning/washing,100 +scooping,101 +erasing,102 +steering,103 +playing+videogames,104 +crashing,105 +constructing/assembling,106 +flooding,107 +drinking,108 +praying,109 +shouting,110 +winking,111 +dining,112 +repairing,113 +tying,114 +juggling,115 +rolling,116 +studying,117 +marching,118 +socializing,119 +ascending/rising,120 +arresting,121 +cracking,122 +laying,123 +clinging,124 +frying,125 +vacuuming,126 +combusting/burning,127 +filling,128 +standing,129 +howling,130 +dunking,131 +spraying,132 +bandaging,133 +shivering,134 +slipping,135 +racing,136 +roaring,137 +planting,138 +yawning,139 +grilling,140 +squinting,141 +skiing,142 +taping,143 +trimming,144 +preaching,145 +resting,146 +descending/lowering,147 +clearing,148 +screwing,149 +chasing,150 +speaking,151 +manicuring,152 +tripping,153 +performing,154 +teaching/instructing,155 +blowing,156 +painting,157 +sneezing,158 +packaging,159 +punching,160 +clapping,161 +rotating/spinning,162 +skating,163 +cheerleading,164 +balancing,165 +child+singing,166 +covering,167 +snuggling/cuddling/hugging,168 +bulldozing,169 +jumping,170 +sliding,171 +barbecuing,172 +weeding,173 +swimming,174 +shooting,175 +dialing,176 +measuring,177 +pulling,178 +celebrating,179 +playing+fun,180 +knitting,181 +spreading,182 +erupting,183 +snowboarding,184 +swinging,185 +protesting,186 +sitting,187 +inserting,188 +bouncing,189 +surfing,190 +extinguishing,191 +unloading,192 +aiming,193 +bathing,194 +hammering,195 +fishing,196 +opening,197 +biting,198 +packing,199 +saluting,200 +rafting,201 +laughing,202 +bicycling,203 +rocking,204 +storming,205 +wetting,206 +shrugging,207 +handwriting,208 +gambling,209 +writing,210 +skipping,211 +dragging,212 +unplugging,213 +kicking,214 +sawing,215 +grooming,216 +whistling,217 +floating,218 +diving,219 +rubbing,220 +bending,221 +shoveling/digging,222 +peeling,223 +catching,224 +closing,225 +eating/feeding,226 +falling,227 +discussing,228 +sweeping,229 +massaging,230 +locking,231 +dancing,232 +mowing,233 +clipping,234 +hanging,235 +burying,236 +reaching,237 +kayaking,238 +snowing,239 +sleeping,240 +climbing,241 +flipping,242 +tearing/ripping,243 +folding,244 +signing,245 +cutting,246 +stretching,247 +stirring,248 +licking,249 +kneeling,250 +sewing,251 +dripping,252 +queuing,253 +pushing,254 +pedaling,255 +flossing,256 +buying/selling/shopping,257 +smelling/sniffing,258 +emptying,259 +sanding,260 +smacking,261 +carrying,262 +adult+male+singing,263 +poking,264 +brushing,265 +adult+female+singing,266 +scratching,267 +welding,268 +crawling,269 +skateboarding,270 +turning,271 +dropping,272 +hunting,273 +cheering,274 +drawing,275 +sprinkling,276 +spitting,277 +competing,278 +bowing,279 +hiking,280 +drying,281 +launching,282 +twisting,283 +crushing,284 +hitting/colliding,285 +shredding,286 +plugging,287 +gasping,288 +rowing,289 +calling,290 +drumming,291 +walking,292 +removing,293 +waking,294 +stitching,295 +coughing,296 +playing+music,297 +playing+sports,298 +interviewing,299 +scrubbing,300 +splashing,301 +officiating,302 +mopping,303 +flowing,304 +sailing,305 +drilling,306 +squatting,307 +handcuffing,308 +spilling,309 +marrying,310 +injecting,311 +jogging,312 diff --git a/tools/data/skeleton/label_map_gym99.txt b/tools/data/skeleton/label_map_gym99.txt new file mode 100644 index 0000000000..8bcf084d5f --- /dev/null +++ b/tools/data/skeleton/label_map_gym99.txt @@ -0,0 +1,99 @@ +Clabel: 0; set: 1; Glabel: 1; (VT) round-off, flic-flac with 0.5 turn on, stretched salto forward with 0.5 turn off +Clabel: 1; set: 1; Glabel: 19; (VT) round-off, flic-flac on, stretched salto backward with 2 turn off +Clabel: 2; set: 1; Glabel: 20; (VT) round-off, flic-flac on, stretched salto backward with 1 turn off +Clabel: 3; set: 1; Glabel: 21; (VT) round-off, flic-flac on, stretched salto backward with 1.5 turn off +Clabel: 4; set: 1; Glabel: 23; (VT) round-off, flic-flac on, stretched salto backward with 2.5 turn off +Clabel: 5; set: 1; Glabel: 24; (VT) round-off, flic-flac on, stretched salto backward off +Clabel: 6; set: 21; Glabel: 67; (FX) switch leap with 0.5 turn +Clabel: 7; set: 21; Glabel: 68; (FX) switch leap with 1 turn +Clabel: 8; set: 21; Glabel: 72; (FX) split leap with 1 turn +Clabel: 9; set: 21; Glabel: 73; (FX) split leap with 1.5 turn or more +Clabel: 10; set: 21; Glabel: 74; (FX) switch leap (leap forward with leg change to cross split) +Clabel: 11; set: 21; Glabel: 77; (FX) split jump with 1 turn +Clabel: 12; set: 21; Glabel: 81; (FX) split jump (leg separation 180 degree parallel to the floor) +Clabel: 13; set: 21; Glabel: 83; (FX) johnson with additional 0.5 turn +Clabel: 14; set: 21; Glabel: 88; (FX) straddle pike or side split jump with 1 turn +Clabel: 15; set: 21; Glabel: 96; (FX) switch leap to ring position +Clabel: 16; set: 21; Glabel: 104; (FX) stag jump +Clabel: 17; set: 22; Glabel: 134; (FX) 2 turn with free leg held upward in 180 split position throughout turn +Clabel: 18; set: 22; Glabel: 137; (FX) 2 turn in tuck stand on one leg, free leg straight throughout turn +Clabel: 19; set: 22; Glabel: 146; (FX) 3 turn on one leg, free leg optional below horizontal +Clabel: 20; set: 22; Glabel: 147; (FX) 2 turn on one leg, free leg optional below horizontal +Clabel: 21; set: 22; Glabel: 148; (FX) 1 turn on one leg, free leg optional below horizontal +Clabel: 22; set: 22; Glabel: 149; (FX) 2 turn or more with heel of free leg forward at horizontal throughout turn +Clabel: 23; set: 22; Glabel: 150; (FX) 1 turn with heel of free leg forward at horizontal throughout turn +Clabel: 24; set: 24; Glabel: 156; (FX) arabian double salto tucked +Clabel: 25; set: 24; Glabel: 163; (FX) salto forward tucked +Clabel: 26; set: 24; Glabel: 169; (FX) aerial walkover forward +Clabel: 27; set: 24; Glabel: 171; (FX) salto forward stretched with 2 twist +Clabel: 28; set: 24; Glabel: 172; (FX) salto forward stretched with 1 twist +Clabel: 29; set: 24; Glabel: 174; (FX) salto forward stretched with 1.5 twist +Clabel: 30; set: 24; Glabel: 177; (FX) salto forward stretched, feet land together +Clabel: 31; set: 25; Glabel: 181; (FX) double salto backward stretched +Clabel: 32; set: 25; Glabel: 182; (FX) salto backward stretched with 3 twist +Clabel: 33; set: 25; Glabel: 183; (FX) salto backward stretched with 2 twist +Clabel: 34; set: 25; Glabel: 187; (FX) salto backward stretched with 2.5 twist +Clabel: 35; set: 25; Glabel: 188; (FX) salto backward stretched with 1.5 twist +Clabel: 36; set: 25; Glabel: 191; (FX) double salto backward tucked with 2 twist +Clabel: 37; set: 25; Glabel: 192; (FX) double salto backward tucked with 1 twist +Clabel: 38; set: 25; Glabel: 195; (FX) double salto backward tucked +Clabel: 39; set: 25; Glabel: 198; (FX) double salto backward piked with 1 twist +Clabel: 40; set: 25; Glabel: 199; (FX) double salto backward piked +Clabel: 41; set: 31; Glabel: 207; (BB) sissone (leg separation 180 degree on the diagonal to the floor, take off two feet, land on one foot) +Clabel: 42; set: 31; Glabel: 208; (BB) split jump with 0.5 turn in side position +Clabel: 43; set: 31; Glabel: 213; (BB) split jump +Clabel: 44; set: 31; Glabel: 219; (BB) straddle pike jump or side split jump +Clabel: 45; set: 31; Glabel: 222; (BB) split ring jump (ring jump with front leg horizontal to the floor) +Clabel: 46; set: 31; Glabel: 223; (BB) switch leap with 0.5 turn +Clabel: 47; set: 31; Glabel: 228; (BB) switch leap (leap forward with leg change) +Clabel: 48; set: 31; Glabel: 230; (BB) split leap forward +Clabel: 49; set: 31; Glabel: 232; (BB) johnson (leap forward with leg change and 0.25 turn to side split or straddle pike position) +Clabel: 50; set: 31; Glabel: 234; (BB) switch leap to ring position +Clabel: 51; set: 31; Glabel: 251; (BB) sheep jump (jump with upper back arch and head release with feet to head height/closed Ring) +Clabel: 52; set: 31; Glabel: 256; (BB) wolf hop or jump (hip angle at 45, knees together) +Clabel: 53; set: 32; Glabel: 268; (BB) 1 turn with heel of free leg forward at horizontal throughout turn +Clabel: 54; set: 32; Glabel: 270; (BB) 2 turn on one leg, free leg optional below horizontal +Clabel: 55; set: 32; Glabel: 272; (BB) 1 turn on one leg, free leg optional below horizontal +Clabel: 56; set: 32; Glabel: 279; (BB) 2 turn in tuck stand on one leg, free leg optional +Clabel: 57; set: 33; Glabel: 289; (BB) salto backward tucked with 1 twist +Clabel: 58; set: 33; Glabel: 290; (BB) salto backward tucked +Clabel: 59; set: 33; Glabel: 295; (BB) salto backward stretched-step out (feet land successively) +Clabel: 60; set: 33; Glabel: 297; (BB) salto backward stretched with legs together +Clabel: 61; set: 33; Glabel: 300; (BB) salto sideward tucked, take off from one leg to side stand +Clabel: 62; set: 33; Glabel: 306; (BB) free aerial cartwheel landing in cross position +Clabel: 63; set: 33; Glabel: 309; (BB) salto forward tucked to cross stand +Clabel: 64; set: 33; Glabel: 312; (BB) free aerial walkover forward, landing on one or both feet +Clabel: 65; set: 34; Glabel: 331; (BB) jump backward, flic-flac take-off with 0.5 twist through handstand to walkover forward, also with support on one arm +Clabel: 66; set: 34; Glabel: 334; (BB) flic-flac to land on both feet +Clabel: 67; set: 34; Glabel: 335; (BB) flic-flac with step-out, also with support on one arm +Clabel: 68; set: 34; Glabel: 336; (BB) round-off +Clabel: 69; set: 35; Glabel: 357; (BB) double salto backward tucked +Clabel: 70; set: 35; Glabel: 359; (BB) salto backward tucked +Clabel: 71; set: 35; Glabel: 363; (BB) double salto backward piked +Clabel: 72; set: 35; Glabel: 367; (BB) salto backward stretched with 2 twist +Clabel: 73; set: 35; Glabel: 370; (BB) salto backward stretched with 2.5 twist +Clabel: 74; set: 41; Glabel: 398; (UB) pike sole circle backward with 1 turn to handstand +Clabel: 75; set: 41; Glabel: 399; (UB) pike sole circle backward with 0.5 turn to handstand +Clabel: 76; set: 41; Glabel: 400; (UB) pike sole circle backward to handstand +Clabel: 77; set: 41; Glabel: 411; (UB) giant circle backward with 1 turn to handstand +Clabel: 78; set: 41; Glabel: 413; (UB) giant circle backward with 0.5 turn to handstand +Clabel: 79; set: 41; Glabel: 416; (UB) giant circle backward +Clabel: 80; set: 41; Glabel: 417; (UB) giant circle forward with 1 turn on one arm before handstand phase +Clabel: 81; set: 41; Glabel: 420; (UB) giant circle forward with 0.5 turn to handstand +Clabel: 82; set: 41; Glabel: 421; (UB) giant circle forward +Clabel: 83; set: 41; Glabel: 425; (UB) clear hip circle backward to handstand +Clabel: 84; set: 41; Glabel: 431; (UB) clear pike circle backward with 1 turn to handstand +Clabel: 85; set: 41; Glabel: 432; (UB) clear pike circle backward with 0.5 turn to handstand +Clabel: 86; set: 41; Glabel: 433; (UB) clear pike circle backward to handstand +Clabel: 87; set: 41; Glabel: 441; (UB) stalder backward with 1 turn to handstand +Clabel: 88; set: 41; Glabel: 443; (UB) stalder backward to handstand +Clabel: 89; set: 42; Glabel: 453; (UB) counter straddle over high bar to hang +Clabel: 90; set: 42; Glabel: 456; (UB) counter piked over high bar to hang +Clabel: 91; set: 42; Glabel: 462; (UB) (swing backward or front support) salto forward straddled to hang on high bar +Clabel: 92; set: 42; Glabel: 465; (UB) (swing backward) salto forward piked to hang on high bar +Clabel: 93; set: 42; Glabel: 466; (UB) (swing forward or hip circle backward) salto backward with 0.5 turn piked to hang on high bar +Clabel: 94; set: 43; Glabel: 471; (UB) transition flight from high bar to low bar +Clabel: 95; set: 43; Glabel: 472; (UB) transition flight from low bar to high bar +Clabel: 96; set: 44; Glabel: 481; (UB) (swing forward) double salto backward tucked with 1 turn +Clabel: 97; set: 44; Glabel: 484; (UB) (swing backward) double salto forward tucked +Clabel: 98; set: 44; Glabel: 516; (UB) (swing forward) double salto backward stretched diff --git a/demo/label_map_ntu120.txt b/tools/data/skeleton/label_map_ntu120.txt similarity index 100% rename from demo/label_map_ntu120.txt rename to tools/data/skeleton/label_map_ntu120.txt diff --git a/tools/data/sthv1/label_map.txt b/tools/data/sthv1/label_map.txt new file mode 100644 index 0000000000..8e07166d8b --- /dev/null +++ b/tools/data/sthv1/label_map.txt @@ -0,0 +1,174 @@ +Holding something +Turning something upside down +Turning the camera left while filming something +Stacking number of something +Turning the camera right while filming something +Opening something +Approaching something with your camera +Picking something up +Pushing something so that it almost falls off but doesn't +Folding something +Moving something away from the camera +Closing something +Moving away from something with your camera +Turning the camera downwards while filming something +Pushing something so that it slightly moves +Turning the camera upwards while filming something +Pretending to pick something up +Showing something to the camera +Moving something up +Plugging something into something +Unfolding something +Putting something onto something +Showing that something is empty +Pretending to put something on a surface +Taking something from somewhere +Putting something next to something +Moving something towards the camera +Showing a photo of something to the camera +Pushing something with something +Throwing something +Pushing something from left to right +Something falling like a feather or paper +Throwing something in the air and letting it fall +Throwing something against something +Lifting something with something on it +Taking one of many similar things on the table +Showing something behind something +Putting something into something +Tearing something just a little bit +Moving something away from something +Tearing something into two pieces +Pushing something from right to left +Holding something next to something +Putting something, something and something on the table +Pretending to take something from somewhere +Moving something closer to something +Pretending to put something next to something +Uncovering something +Something falling like a rock +Putting something and something on the table +Pouring something into something +Moving something down +Pulling something from right to left +Throwing something in the air and catching it +Tilting something with something on it until it falls off +Putting something in front of something +Pretending to turn something upside down +Putting something on a surface +Pretending to throw something +Showing something on top of something +Covering something with something +Squeezing something +Putting something similar to other things that are already on the table +Lifting up one end of something, then letting it drop down +Taking something out of something +Moving part of something +Pulling something from left to right +Lifting something up completely without letting it drop down +Attaching something to something +Putting something behind something +Moving something and something closer to each other +Holding something in front of something +Pushing something so that it falls off the table +Holding something over something +Pretending to open something without actually opening it +Removing something, revealing something behind +Hitting something with something +Moving something and something away from each other +Touching (without moving) part of something +Pretending to put something into something +Showing that something is inside something +Lifting something up completely, then letting it drop down +Pretending to take something out of something +Holding something behind something +Laying something on the table on its side, not upright +Poking something so it slightly moves +Pretending to close something without actually closing it +Putting something upright on the table +Dropping something in front of something +Dropping something behind something +Lifting up one end of something without letting it drop down +Rolling something on a flat surface +Throwing something onto a surface +Showing something next to something +Dropping something onto something +Stuffing something into something +Dropping something into something +Piling something up +Letting something roll along a flat surface +Twisting something +Spinning something that quickly stops spinning +Putting number of something onto something +Putting something underneath something +Moving something across a surface without it falling down +Plugging something into something but pulling it right out as you remove your hand +Dropping something next to something +Poking something so that it falls over +Spinning something so it continues spinning +Poking something so lightly that it doesn't or almost doesn't move +Wiping something off of something +Moving something across a surface until it falls down +Pretending to poke something +Putting something that cannot actually stand upright upright on the table, so it falls on its side +Pulling something out of something +Scooping something up with something +Pretending to be tearing something that is not tearable +Burying something in something +Tipping something over +Tilting something with something on it slightly so it doesn't fall down +Pretending to put something onto something +Bending something until it breaks +Letting something roll down a slanted surface +Trying to bend something unbendable so nothing happens +Bending something so that it deforms +Digging something out of something +Pretending to put something underneath something +Putting something on a flat surface without letting it roll +Putting something on the edge of something so it is not supported and falls down +Spreading something onto something +Pretending to put something behind something +Sprinkling something onto something +Something colliding with something and both come to a halt +Pushing something off of something +Putting something that can't roll onto a slanted surface, so it stays where it is +Lifting a surface with something on it until it starts sliding down +Pretending or failing to wipe something off of something +Trying but failing to attach something to something because it doesn't stick +Pulling something from behind of something +Pushing something so it spins +Pouring something onto something +Pulling two ends of something but nothing happens +Moving something and something so they pass each other +Pretending to sprinkle air onto something +Putting something that can't roll onto a slanted surface, so it slides down +Something colliding with something and both are being deflected +Pretending to squeeze something +Pulling something onto something +Putting something onto something else that cannot support it so it falls down +Lifting a surface with something on it but not enough for it to slide down +Pouring something out of something +Moving something and something so they collide with each other +Tipping something with something in it over, so something in it falls out +Letting something roll up a slanted surface, so it rolls back down +Pretending to scoop something up with something +Pretending to pour something out of something, but something is empty +Pulling two ends of something so that it gets stretched +Failing to put something into something because something does not fit +Pretending or trying and failing to twist something +Trying to pour something into something, but missing so it spills next to it +Something being deflected from something +Poking a stack of something so the stack collapses +Spilling something onto something +Pulling two ends of something so that it separates into two pieces +Pouring something into something until it overflows +Pretending to spread air onto something +Twisting (wringing) something wet until water comes out +Poking a hole into something soft +Spilling something next to something +Poking a stack of something without the stack collapsing +Putting something onto a slanted surface but it doesn't glide down +Pushing something onto something +Poking something so that it spins around +Spilling something behind something +Poking a hole into some substance diff --git a/tools/data/sthv2/label_map.txt b/tools/data/sthv2/label_map.txt new file mode 100644 index 0000000000..7dbb309b34 --- /dev/null +++ b/tools/data/sthv2/label_map.txt @@ -0,0 +1,174 @@ +Approaching something with your camera +Attaching something to something +Bending something so that it deforms +Bending something until it breaks +Burying something in something +Closing something +Covering something with something +Digging something out of something +Dropping something behind something +Dropping something in front of something +Dropping something into something +Dropping something next to something +Dropping something onto something +Failing to put something into something because something does not fit +Folding something +Hitting something with something +Holding something +Holding something behind something +Holding something in front of something +Holding something next to something +Holding something over something +Laying something on the table on its side, not upright +Letting something roll along a flat surface +Letting something roll down a slanted surface +Letting something roll up a slanted surface, so it rolls back down +Lifting a surface with something on it but not enough for it to slide down +Lifting a surface with something on it until it starts sliding down +Lifting something up completely without letting it drop down +Lifting something up completely, then letting it drop down +Lifting something with something on it +Lifting up one end of something without letting it drop down +Lifting up one end of something, then letting it drop down +Moving away from something with your camera +Moving part of something +Moving something across a surface until it falls down +Moving something across a surface without it falling down +Moving something and something away from each other +Moving something and something closer to each other +Moving something and something so they collide with each other +Moving something and something so they pass each other +Moving something away from something +Moving something away from the camera +Moving something closer to something +Moving something down +Moving something towards the camera +Moving something up +Opening something +Picking something up +Piling something up +Plugging something into something +Plugging something into something but pulling it right out as you remove your hand +Poking a hole into some substance +Poking a hole into something soft +Poking a stack of something so the stack collapses +Poking a stack of something without the stack collapsing +Poking something so it slightly moves +Poking something so lightly that it doesn't or almost doesn't move +Poking something so that it falls over +Poking something so that it spins around +Pouring something into something +Pouring something into something until it overflows +Pouring something onto something +Pouring something out of something +Pretending or failing to wipe something off of something +Pretending or trying and failing to twist something +Pretending to be tearing something that is not tearable +Pretending to close something without actually closing it +Pretending to open something without actually opening it +Pretending to pick something up +Pretending to poke something +Pretending to pour something out of something, but something is empty +Pretending to put something behind something +Pretending to put something into something +Pretending to put something next to something +Pretending to put something on a surface +Pretending to put something onto something +Pretending to put something underneath something +Pretending to scoop something up with something +Pretending to spread air onto something +Pretending to sprinkle air onto something +Pretending to squeeze something +Pretending to take something from somewhere +Pretending to take something out of something +Pretending to throw something +Pretending to turn something upside down +Pulling something from behind of something +Pulling something from left to right +Pulling something from right to left +Pulling something onto something +Pulling something out of something +Pulling two ends of something but nothing happens +Pulling two ends of something so that it gets stretched +Pulling two ends of something so that it separates into two pieces +Pushing something from left to right +Pushing something from right to left +Pushing something off of something +Pushing something onto something +Pushing something so it spins +Pushing something so that it almost falls off but doesn't +Pushing something so that it falls off the table +Pushing something so that it slightly moves +Pushing something with something +Putting number of something onto something +Putting something and something on the table +Putting something behind something +Putting something in front of something +Putting something into something +Putting something next to something +Putting something on a flat surface without letting it roll +Putting something on a surface +Putting something on the edge of something so it is not supported and falls down +Putting something onto a slanted surface but it doesn't glide down +Putting something onto something +Putting something onto something else that cannot support it so it falls down +Putting something similar to other things that are already on the table +Putting something that can't roll onto a slanted surface, so it slides down +Putting something that can't roll onto a slanted surface, so it stays where it is +Putting something that cannot actually stand upright upright on the table, so it falls on its side +Putting something underneath something +Putting something upright on the table +Putting something, something and something on the table +Removing something, revealing something behind +Rolling something on a flat surface +Scooping something up with something +Showing a photo of something to the camera +Showing something behind something +Showing something next to something +Showing something on top of something +Showing something to the camera +Showing that something is empty +Showing that something is inside something +Something being deflected from something +Something colliding with something and both are being deflected +Something colliding with something and both come to a halt +Something falling like a feather or paper +Something falling like a rock +Spilling something behind something +Spilling something next to something +Spilling something onto something +Spinning something so it continues spinning +Spinning something that quickly stops spinning +Spreading something onto something +Sprinkling something onto something +Squeezing something +Stacking number of something +Stuffing something into something +Taking one of many similar things on the table +Taking something from somewhere +Taking something out of something +Tearing something into two pieces +Tearing something just a little bit +Throwing something +Throwing something against something +Throwing something in the air and catching it +Throwing something in the air and letting it fall +Throwing something onto a surface +Tilting something with something on it slightly so it doesn't fall down +Tilting something with something on it until it falls off +Tipping something over +Tipping something with something in it over, so something in it falls out +Touching (without moving) part of something +Trying but failing to attach something to something because it doesn't stick +Trying to bend something unbendable so nothing happens +Trying to pour something into something, but missing so it spills next to it +Turning something upside down +Turning the camera downwards while filming something +Turning the camera left while filming something +Turning the camera right while filming something +Turning the camera upwards while filming something +Twisting (wringing) something wet until water comes out +Twisting something +Uncovering something +Unfolding something +Wiping something off of something diff --git a/tools/data/ucf101/label_map.txt b/tools/data/ucf101/label_map.txt new file mode 100644 index 0000000000..dd41d095c7 --- /dev/null +++ b/tools/data/ucf101/label_map.txt @@ -0,0 +1,101 @@ +ApplyEyeMakeup +ApplyLipstick +Archery +BabyCrawling +BalanceBeam +BandMarching +BaseballPitch +Basketball +BasketballDunk +BenchPress +Biking +Billiards +BlowDryHair +BlowingCandles +BodyWeightSquats +Bowling +BoxingPunchingBag +BoxingSpeedBag +BreastStroke +BrushingTeeth +CleanAndJerk +CliffDiving +CricketBowling +CricketShot +CuttingInKitchen +Diving +Drumming +Fencing +FieldHockeyPenalty +FloorGymnastics +FrisbeeCatch +FrontCrawl +GolfSwing +Haircut +Hammering +HammerThrow +HandstandPushups +HandstandWalking +HeadMassage +HighJump +HorseRace +HorseRiding +HulaHoop +IceDancing +JavelinThrow +JugglingBalls +JumpingJack +JumpRope +Kayaking +Knitting +LongJump +Lunges +MilitaryParade +Mixing +MoppingFloor +Nunchucks +ParallelBars +PizzaTossing +PlayingCello +PlayingDaf +PlayingDhol +PlayingFlute +PlayingGuitar +PlayingPiano +PlayingSitar +PlayingTabla +PlayingViolin +PoleVault +PommelHorse +PullUps +Punch +PushUps +Rafting +RockClimbingIndoor +RopeClimbing +Rowing +SalsaSpin +ShavingBeard +Shotput +SkateBoarding +Skiing +Skijet +SkyDiving +SoccerJuggling +SoccerPenalty +StillRings +SumoWrestling +Surfing +Swing +TableTennisShot +TaiChi +TennisSwing +ThrowDiscus +TrampolineJumping +Typing +UnevenBars +VolleyballSpiking +WalkingWithDog +WallPushups +WritingOnBoard +YoYo From abf449a53dfd402f8425d65072dd31dc0d1c6cc2 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 2 Sep 2021 18:39:48 +0800 Subject: [PATCH 244/414] [Doc] fix doc (#1073) * fix doc * update --- docs/index.rst | 1 + docs/merge_docs.sh | 6 ++++++ docs/stat.py | 4 ++-- docs/supported_datasets.md | 2 ++ docs_zh_CN/index.rst | 1 + docs_zh_CN/merge_docs.sh | 4 ++++ docs_zh_CN/stat.py | 4 ++-- 7 files changed, 18 insertions(+), 4 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index fd26e0e7c2..749113fe03 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -27,6 +27,7 @@ You can switch between Chinese and English documents in the lower-left corner of recognition_models.md localization_models.md detection_models.md + skeleton_models.md .. toctree:: :maxdepth: 2 diff --git a/docs/merge_docs.sh b/docs/merge_docs.sh index 884c95e231..1c988cdba6 100755 --- a/docs/merge_docs.sh +++ b/docs/merge_docs.sh @@ -7,6 +7,8 @@ cat ../configs/localization/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#& cat ../configs/recognition/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# Action Recognition Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > recognition_models.md cat ../configs/recognition_audio/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' >> recognition_models.md cat ../configs/detection/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# Spatio Temporal Action Detection Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > detection_models.md +cat ../configs/skeleton/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# Skeleton-based Action Recognition Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > skeleton_models.md + # demo cat ../demo/README.md | sed "s/md###t/html#t/g" | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > demo.md @@ -29,6 +31,10 @@ sed -i 's/(\/tools\/data\/hmdb51\/README.md/(#hmdb51/g' supported_datasets.md sed -i 's/(\/tools\/data\/jester\/README.md/(#jester/g' supported_datasets.md sed -i 's/(\/tools\/data\/ava\/README.md/(#ava/g' supported_datasets.md sed -i 's/(\/tools\/data\/gym\/README.md/(#gym/g' supported_datasets.md +sed -i 's/(\/tools\/data\/omnisource\/README.md/(#omnisource/g' supported_datasets.md +sed -i 's/(\/tools\/data\/diving48\/README.md/(#diving48/g' supported_datasets.md +sed -i 's/(\/tools\/data\/skeleton\/README.md/(#skeleton/g' supported_datasets.md + cat prepare_data.md >> supported_datasets.md sed -i 's/](\/docs\//](/g' supported_datasets.md diff --git a/docs/stat.py b/docs/stat.py index cc5c8f4fcb..82f5e5fc72 100755 --- a/docs/stat.py +++ b/docs/stat.py @@ -41,7 +41,7 @@ def anchor(name): print(p) q = p.replace('\\', '\\\\').replace('?', '\\?') paperlinks[p] = ' '.join( - (f'[⇨]({splitext(basename(f))[0]}.html#{anchor(paperlink)})' + (f'[->]({splitext(basename(f))[0]}.html#{anchor(paperlink)})' for paperlink in re.findall( rf'\btitle\s*=\s*{{\s*{q}\s*}}.*?\n## (.*?)\s*[,;]?\s*\n', revcontent, re.DOTALL | re.IGNORECASE))) @@ -122,7 +122,7 @@ def anchor(name): print(p) q = p.replace('\\', '\\\\').replace('?', '\\?') paperlinks[p] = ', '.join( - (f'[{p.strip()} ⇨]({splitext(basename(f))[0]}.html#{anchor(p)})' + (f'[{p.strip()} ->]({splitext(basename(f))[0]}.html#{anchor(p)})' for p in re.findall( rf'\btitle\s*=\s*{{\s*{q}\s*}}.*?\n## (.*?)\s*[,;]?\s*\n', revcontent, re.DOTALL | re.IGNORECASE))) diff --git a/docs/supported_datasets.md b/docs/supported_datasets.md index 1bacadaa16..ca449c1602 100644 --- a/docs/supported_datasets.md +++ b/docs/supported_datasets.md @@ -12,6 +12,8 @@ - [Jester](/tools/data/jester/README.md) \[ [Homepage](https://20bn.com/datasets/jester/v1) \] - [GYM](/tools/data/gym/README.md) \[ [Homepage](https://sdolivia.github.io/FineGym/) \] - [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] + - [Diving48](/tools/data/diving48/README.md) \[ [Homepage](http://www.svcl.ucsd.edu/projects/resound/dataset.html) \] + - [OmniSource](/tools/data/omnisource/README.md) \[ [Homepage](https://kennymckormick.github.io/omnisource/) \] - Temporal Action Detection - [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] diff --git a/docs_zh_CN/index.rst b/docs_zh_CN/index.rst index ae4d89ba49..71a9527095 100644 --- a/docs_zh_CN/index.rst +++ b/docs_zh_CN/index.rst @@ -27,6 +27,7 @@ recognition_models.md localization_models.md detection_models.md + skeleton_models.md .. toctree:: :maxdepth: 2 diff --git a/docs_zh_CN/merge_docs.sh b/docs_zh_CN/merge_docs.sh index 07252f8bc7..2bc75b8f89 100755 --- a/docs_zh_CN/merge_docs.sh +++ b/docs_zh_CN/merge_docs.sh @@ -4,6 +4,7 @@ cat ../configs/localization/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed " cat ../configs/recognition/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# 动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > recognition_models.md cat ../configs/recognition_audio/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' >> recognition_models.md cat ../configs/detection/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# 时空动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > detection_models.md +cat ../configs/skeleton/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# 骨骼动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > skeleton_models.md # gather datasets cat ../tools/data/*/README_zh-CN.md | sed 's/# 准备/# /g' | sed 's/#/#&/' > prepare_data.md @@ -23,6 +24,9 @@ sed -i 's/(\/tools\/data\/hmdb51\/README_zh-CN.md/(#hmdb51/g' supported_datasets sed -i 's/(\/tools\/data\/jester\/README_zh-CN.md/(#jester/g' supported_datasets.md sed -i 's/(\/tools\/data\/ava\/README_zh-CN.md/(#ava/g' supported_datasets.md sed -i 's/(\/tools\/data\/gym\/README_zh-CN.md/(#gym/g' supported_datasets.md +sed -i 's/(\/tools\/data\/omnisource\/README_zh-CN.md/(#omnisource/g' supported_datasets.md +sed -i 's/(\/tools\/data\/diving48\/README_zh-CN.md/(#diving48/g' supported_datasets.md +sed -i 's/(\/tools\/data\/skeleton\/README_zh-CN.md/(#skeleton/g' supported_datasets.md cat prepare_data.md >> supported_datasets.md sed -i 's/](\/docs_zh_CN\//](/g' supported_datasets.md diff --git a/docs_zh_CN/stat.py b/docs_zh_CN/stat.py index ca37f56c22..bfb0bb4417 100755 --- a/docs_zh_CN/stat.py +++ b/docs_zh_CN/stat.py @@ -40,7 +40,7 @@ def anchor(name): print(p) q = p.replace('\\', '\\\\').replace('?', '\\?') paperlinks[p] = ' '.join( - (f'[⇨]({splitext(basename(f))[0]}.html#{anchor(paperlink)})' + (f'[->]({splitext(basename(f))[0]}.html#{anchor(paperlink)})' for paperlink in re.findall( rf'\btitle\s*=\s*{{\s*{q}\s*}}.*?\n## (.*?)\s*[,;]?\s*\n', revcontent, re.DOTALL | re.IGNORECASE))) @@ -120,7 +120,7 @@ def anchor(name): print(p) q = p.replace('\\', '\\\\').replace('?', '\\?') paperlinks[p] = ', '.join( - (f'[{p.strip()} ⇨]({splitext(basename(f))[0]}.html#{anchor(p)})' + (f'[{p.strip()} ->]({splitext(basename(f))[0]}.html#{anchor(p)})' for p in re.findall( rf'\btitle\s*=\s*{{\s*{q}\s*}}.*?\n## (.*?)\s*[,;]?\s*\n', revcontent, re.DOTALL | re.IGNORECASE))) From 3afcb049a73f46d9f8f530916be3f44ff2f4678a Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 3 Sep 2021 20:53:46 +0800 Subject: [PATCH 245/414] [Update] Fix Path (#1131) --- ...ly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py | 10 +++++----- ...wonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py index 9ef7dfbe44..f5c3d79fbe 100644 --- a/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py +++ b/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py @@ -4,11 +4,11 @@ # dataset settings dataset_type = 'RawframeDataset' -data_root = 'data/gym/subaction_frames' -data_root_val = 'data/gym/subaction_frames' -ann_file_train = 'data/gym/annotations/gym99_train_frame.txt' -ann_file_val = 'data/gym/annotations/gym99_val_frame.txt' -ann_file_test = 'data/gym/annotations/gym99_val_frame.txt' +data_root = 'data/gym/rawframes' +data_root_val = 'data/gym/rawframes' +ann_file_train = 'data/gym/annotations/gym99_train_list_rawframes.txt' +ann_file_val = 'data/gym/annotations/gym99_val_list_rawframes.txt' +ann_file_test = 'data/gym/annotations/gym99_val_list_rawframes.txt' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ diff --git a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py index 7ac7a0bedd..b561287f32 100644 --- a/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py +++ b/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py @@ -7,11 +7,11 @@ # dataset settings dataset_type = 'RawframeDataset' -data_root = 'data/gym/subaction_frames' -data_root_val = 'data/gym/subaction_frames' -ann_file_train = 'data/gym/annotations/gym99_train_frame.txt' -ann_file_val = 'data/gym/annotations/gym99_val_frame.txt' -ann_file_test = 'data/gym/annotations/gym99_val_frame.txt' +data_root = 'data/gym/rawframes' +data_root_val = 'data/gym/rawframes' +ann_file_train = 'data/gym/annotations/gym99_train_list_rawframes.txt' +ann_file_val = 'data/gym/annotations/gym99_val_list_rawframes.txt' +ann_file_test = 'data/gym/annotations/gym99_val_list_rawframes.txt' img_norm_cfg = dict(mean=[128, 128], std=[128, 128]) train_pipeline = [ dict(type='SampleFrames', clip_len=4, frame_interval=16, num_clips=1), From 0b155c7c4fecba6a305052f37b7c637c375a973e Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sun, 5 Sep 2021 19:56:39 +0800 Subject: [PATCH 246/414] Update .readthedocs.yml --- .readthedocs.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index e35725f209..2c98050982 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,7 +1,5 @@ version: 2 -formats: all - python: version: 3.7 install: From fa707f92c72bde1806edd2b80ed64597e56af1d0 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sun, 5 Sep 2021 20:53:29 +0800 Subject: [PATCH 247/414] update metafile (#1134) --- configs/detection/acrn/metafile.yml | 3 + configs/detection/ava/metafile.yml | 3 + configs/detection/lfb/metafile.yml | 3 + configs/localization/bmn/metafile.yml | 3 + configs/localization/bsn/metafile.yml | 3 + configs/localization/ssn/metafile.yml | 3 + configs/recognition/c3d/metafile.yml | 3 + configs/recognition/csn/metafile.yml | 21 ++++++ configs/recognition/i3d/metafile.yml | 3 + configs/recognition/omnisource/metafile.yml | 16 +++++ configs/recognition/r2plus1d/metafile.yml | 3 + configs/recognition/slowfast/metafile.yml | 3 + configs/recognition/slowonly/metafile.yml | 9 +++ configs/recognition/tanet/metafile.yml | 3 + configs/recognition/timesformer/metafile.yml | 70 ++++++++++++++++++++ configs/recognition/tin/metafile.yml | 3 + configs/recognition/tpn/metafile.yml | 3 + configs/recognition/trn/metafile.yml | 3 + configs/recognition/tsm/metafile.yml | 3 + configs/recognition/tsn/metafile.yml | 9 +++ configs/recognition/x3d/metafile.yml | 11 ++- configs/skeleton/posec3d/metafile.yml | 3 + model-index.yml | 1 + 23 files changed, 183 insertions(+), 2 deletions(-) create mode 100644 configs/recognition/timesformer/metafile.yml diff --git a/configs/detection/acrn/metafile.yml b/configs/detection/acrn/metafile.yml index f0d40187f8..50cacc7ff9 100644 --- a/configs/detection/acrn/metafile.yml +++ b/configs/detection/acrn/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: ACRN README: configs/detection/acrn/README.md + Paper: + URL: https://arxiv.org/abs/1807.10982 + Title: Actor-Centric Relation Network Models: - Config: configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py In Collection: ACRN diff --git a/configs/detection/ava/metafile.yml b/configs/detection/ava/metafile.yml index abb89ce202..971abd7bd4 100644 --- a/configs/detection/ava/metafile.yml +++ b/configs/detection/ava/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: AVA README: configs/detection/ava/README.md + Paper: + URL: https://arxiv.org/abs/1705.08421 + Title: "AVA: A Video Dataset of Spatio-temporally Localized Atomic Visual Actions" Models: - Config: configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py In Collection: AVA diff --git a/configs/detection/lfb/metafile.yml b/configs/detection/lfb/metafile.yml index e9b15fc94b..90ec931e97 100644 --- a/configs/detection/lfb/metafile.yml +++ b/configs/detection/lfb/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: LFB README: configs/detection/lfb/README.md + Paper: + URL: https://arxiv.org/abs/1812.05038 + Title: Long-Term Feature Banks for Detailed Video Understanding Models: - Config: configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py In Collection: LFB diff --git a/configs/localization/bmn/metafile.yml b/configs/localization/bmn/metafile.yml index f2d52ca714..40eafd4f94 100644 --- a/configs/localization/bmn/metafile.yml +++ b/configs/localization/bmn/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: BMN README: configs/localization/bmn/README.md + Paper: + URL: https://arxiv.org/abs/1907.09702 + Title: "BMN: Boundary-Matching Network for Temporal Action Proposal Generation" Models: - Config: configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py In Collection: BMN diff --git a/configs/localization/bsn/metafile.yml b/configs/localization/bsn/metafile.yml index d3ccfbbb5d..e1bddeb9cd 100644 --- a/configs/localization/bsn/metafile.yml +++ b/configs/localization/bsn/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: BSN README: configs/localization/bsn/README.md + Paper: + URL: https://arxiv.org/abs/1806.02964 + Title: "BSN: Boundary Sensitive Network for Temporal Action Proposal Generation" Models: - Config: - configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py diff --git a/configs/localization/ssn/metafile.yml b/configs/localization/ssn/metafile.yml index 29da43cabe..7470ea852c 100644 --- a/configs/localization/ssn/metafile.yml +++ b/configs/localization/ssn/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: SSN README: configs/localization/ssn/README.md + Paper: + URL: https://arxiv.org/abs/1704.06228 + Title: Temporal Action Detection with Structured Segment Networks Models: - Config: configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py In Collection: SSN diff --git a/configs/recognition/c3d/metafile.yml b/configs/recognition/c3d/metafile.yml index 4fa40ddd54..865e8020ac 100644 --- a/configs/recognition/c3d/metafile.yml +++ b/configs/recognition/c3d/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: C3D README: configs/recognition/c3d/README.md + Paper: + URL: https://arxiv.org/abs/1412.0767 + Title: Learning Spatiotemporal Features with 3D Convolutional Networks Models: - Config: configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py In Collection: C3D diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml index 6ad1d82831..c598555b0e 100644 --- a/configs/recognition/csn/metafile.yml +++ b/configs/recognition/csn/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: CSN README: configs/recognition/csn/README.md + Paper: + URL: https://arxiv.org/abs/1904.02811 + Title: Video Classification with Channel-Separated Convolutional Networks Models: - Config: configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py In Collection: CSN @@ -60,6 +63,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py + Converted From: + Weights: https://www.dropbox.com/s/3fihu6ti60047mu/ipCSN_152_kinetics_from_scratch_f129594342.pkl?dl=0 + Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 Results: - Dataset: Kinetics-400 Metrics: @@ -79,6 +85,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Converted From: + Weights: https://www.dropbox.com/s/zpp3p0vn2i7bibl/ipCSN_152_ft_kinetics_from_ig65m_f133090949.pkl?dl=0 + Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 Results: - Dataset: Kinetics-400 Metrics: @@ -99,6 +108,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Converted From: + Weights: https://www.dropbox.com/s/ir7cr0hda36knux/ipCSN_152_ft_kinetics_from_sports1m_f111279053.pkl?dl=0 + Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 Results: - Dataset: Kinetics-400 Metrics: @@ -119,6 +131,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py + Converted From: + Weights: https://www.dropbox.com/s/46gcm7up60ssx5c/irCSN_152_kinetics_from_scratch_f98268019.pkl?dl=0 + Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 Results: - Dataset: Kinetics-400 Metrics: @@ -139,6 +154,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py + Converted From: + Weights: https://www.dropbox.com/s/gmd8r87l3wmkn3h/irCSN_152_ft_kinetics_from_ig65m_f126851907.pkl?dl=0 + Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 Results: - Dataset: Kinetics-400 Metrics: @@ -159,6 +177,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Converted From: + Weights: https://www.dropbox.com/s/zuoj1aqouh6bo6k/irCSN_152_ft_kinetics_from_sports1m_f101599884.pkl?dl=0 + Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 Results: - Dataset: Kinetics-400 Metrics: diff --git a/configs/recognition/i3d/metafile.yml b/configs/recognition/i3d/metafile.yml index 404a5334ff..22a7bfe33c 100644 --- a/configs/recognition/i3d/metafile.yml +++ b/configs/recognition/i3d/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: I3D README: configs/recognition/i3d/README.md + Paper: + URL: https://arxiv.org/abs/1705.07750 + Title: Quo Vadis, Action Recognition? A New Model and the Kinetics Dataset Models: - Config: configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py In Collection: I3D diff --git a/configs/recognition/omnisource/metafile.yml b/configs/recognition/omnisource/metafile.yml index 71fb7e6ed6..ae3db16e52 100644 --- a/configs/recognition/omnisource/metafile.yml +++ b/configs/recognition/omnisource/metafile.yml @@ -1,6 +1,10 @@ Collections: - Name: OmniSource README: configs/recognition/omnisource/README.md + Paper: + URL: https://arxiv.org/abs/2003.13042 + Title: Omni-sourced Webly-supervised Learning for Video Recognition + Models: - Config: configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py In Collection: OmniSource @@ -303,6 +307,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: tsn_omnisource_r50_1x1x3_100e_kinetics_rgb + Converted From: + Weights: https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmaction/models/kinetics400/omnisource/tsn_OmniSource_kinetics400_se_rgb_r50_seg3_f1s1_imagenet-4066cb7e.pth + Code: https://github.com/open-mmlab/mmaction Results: - Dataset: Kinetics-400 Metrics: @@ -323,6 +330,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: tsn_IG1B_pretrained_omnisource_r50_1x1x3_100e_kinetics_rgb + Converted From: + Weights: https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmaction/models/kinetics400/omnisource/tsn_OmniSource_kinetics400_se_rgb_r50_seg3_f1s1_IG1B-25fc136b.pth + Code: https://github.com/open-mmlab/mmaction/ Results: - Dataset: Kinetics-400 Metrics: @@ -343,6 +353,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: slowonly_r50_omnisource_4x16x1_256e_kinetics400_rgb + Converted From: + Weights: https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmaction/models/kinetics400/omnisource/slowonly_OmniSource_kinetics400_se_rgb_r50_seg1_4x16_scratch-71f7b8ee.pth + Code: https://github.com/open-mmlab/mmaction/ Results: - Dataset: Kinetics-400 Metrics: @@ -363,6 +376,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: slowonly_r101_omnisource_8x8x1_196e_kinetics400_rgb + Converted From: + Weights: https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmaction/models/kinetics400/omnisource/slowonly_OmniSource_kinetics400_se_rgb_r101_seg1_8x8_scratch-2f838cb0.pth + Code: https://github.com/open-mmlab/mmaction/ Results: - Dataset: Kinetics-400 Metrics: diff --git a/configs/recognition/r2plus1d/metafile.yml b/configs/recognition/r2plus1d/metafile.yml index a88409b3bb..f7056af424 100644 --- a/configs/recognition/r2plus1d/metafile.yml +++ b/configs/recognition/r2plus1d/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: R2Plus1D README: configs/recognition/r2plus1d/README.md + Paper: + URL: https://arxiv.org/abs/1711.11248 + Title: A Closer Look at Spatiotemporal Convolutions for Action Recognition Models: - Config: configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py In Collection: R2Plus1D diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index a55a9fdec0..9c2bfe5d14 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: SlowFast README: configs/recognition/slowfast/README.md + Paper: + URL: https://arxiv.org/abs/1812.03982 + Title: SlowFast Networks for Video Recognition Models: - Config: configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py In Collection: SlowFast diff --git a/configs/recognition/slowonly/metafile.yml b/configs/recognition/slowonly/metafile.yml index 0e8d1e8196..5e604c058a 100644 --- a/configs/recognition/slowonly/metafile.yml +++ b/configs/recognition/slowonly/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: SlowOnly README: configs/recognition/slowonly/README.md + Paper: + URL: https://arxiv.org/abs/1812.03982 + Title: SlowFast Networks for Video Recognition Models: - Config: configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py In Collection: SlowOnly @@ -15,6 +18,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: slowonly_r50_omnisource_4x16x1_256e_kinetics400_rgb + Converted From: + Weights: https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmaction/models/kinetics400/omnisource/slowonly_OmniSource_kinetics400_se_rgb_r50_seg1_4x16_scratch-71f7b8ee.pth + Code: https://github.com/open-mmlab/mmaction/ Results: - Dataset: Kinetics-400 Metrics: @@ -55,6 +61,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: slowonly_r101_omnisource_8x8x1_196e_kinetics400_rgb + Converted From: + Weights: https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmaction/models/kinetics400/omnisource/slowonly_OmniSource_kinetics400_se_rgb_r101_seg1_8x8_scratch-2f838cb0.pth + Code: https://github.com/open-mmlab/mmaction/ Results: - Dataset: Kinetics-400 Metrics: diff --git a/configs/recognition/tanet/metafile.yml b/configs/recognition/tanet/metafile.yml index 19a5d47839..d32d710b17 100644 --- a/configs/recognition/tanet/metafile.yml +++ b/configs/recognition/tanet/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: TANet README: configs/recognition/tanet/README.md + Paper: + URL: https://arxiv.org/abs/2005.06803 + Title: "TAM: Temporal Adaptive Module for Video Recognition" Models: - Config: configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py In Collection: TANet diff --git a/configs/recognition/timesformer/metafile.yml b/configs/recognition/timesformer/metafile.yml new file mode 100644 index 0000000000..a93c57b392 --- /dev/null +++ b/configs/recognition/timesformer/metafile.yml @@ -0,0 +1,70 @@ +Collections: +- Name: TimeSformer + README: configs/recognition/timesformer/README.md + Paper: + URL: https://arxiv.org/abs/2102.05095 + Title: Is Space-Time Attention All You Need for Video Understanding +Models: +- Config: configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py + In Collection: TimeSformer + Metadata: + Architecture: TimeSformer + Batch Size: 8 + Epochs: 15 + Pretrained: ImageNet-21K + Resolution: short-side 320 + Training Data: Kinetics-400 + Training Resources: 8 GPUs + Modality: RGB + Name: timesformer_divST_8x32x1_15e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + Top 1 Accuracy: 77.92 + Top 5 Accuracy: 93.29 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb-3f8e5d03.pth +- Config: configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py + In Collection: TimeSformer + Metadata: + Architecture: TimeSformer + Batch Size: 7 + Epochs: 15 + Pretrained: ImageNet-21K + Resolution: short-side 320 + Training Data: Kinetics-400 + Training Resources: 8 GPUs + Modality: RGB + Name: timesformer_jointST_8x32x1_15e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + Top 1 Accuracy: 77.01 + Top 5 Accuracy: 93.08 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb-0d6e3984.pth +- Config: configs/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.py + In Collection: TimeSformer + Metadata: + Architecture: TimeSformer + Batch Size: 8 + Epochs: 15 + Pretrained: ImageNet-21K + Resolution: short-side 320 + Training Data: Kinetics-400 + Training Resources: 8 GPUs + Modality: RGB + Name: timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + Top 1 Accuracy: 76.93 + Top 5 Accuracy: 92.90 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb-0cf829cd.pth diff --git a/configs/recognition/tin/metafile.yml b/configs/recognition/tin/metafile.yml index 2fe338d80d..a820f93c9c 100644 --- a/configs/recognition/tin/metafile.yml +++ b/configs/recognition/tin/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: TIN README: configs/recognition/tin/README.md + Paper: + URL: https://arxiv.org/abs/2001.06499 + Title: Temporal Interlacing Network Models: - Config: configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py In Collection: TIN diff --git a/configs/recognition/tpn/metafile.yml b/configs/recognition/tpn/metafile.yml index ecf75fe278..44b1130d6e 100644 --- a/configs/recognition/tpn/metafile.yml +++ b/configs/recognition/tpn/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: TPN README: configs/recognition/tpn/README.md + Paper: + URL: https://arxiv.org/abs/2004.03548 + Title: Temporal Pyramid Network for Action Recognition Models: - Config: configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py In Collection: TPN diff --git a/configs/recognition/trn/metafile.yml b/configs/recognition/trn/metafile.yml index 81578d8437..39bedaa26f 100644 --- a/configs/recognition/trn/metafile.yml +++ b/configs/recognition/trn/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: TRN README: configs/recognition/trn/README.md + Paper: + URL: https://arxiv.org/abs/1711.08496 + Title: Temporal Relational Reasoning in Videos Models: - Config: configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py In Collection: TRN diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index 3dd07ec59b..f8bfab65c6 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: TSM README: configs/recognition/tsm/README.md + Paper: + URL: https://arxiv.org/abs/1811.08383 + Title: "TSM: Temporal Shift Module for Efficient Video Understanding" Models: - Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py In Collection: TSM diff --git a/configs/recognition/tsn/metafile.yml b/configs/recognition/tsn/metafile.yml index dfb6774980..40325d2e98 100644 --- a/configs/recognition/tsn/metafile.yml +++ b/configs/recognition/tsn/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: TSN README: configs/recognition/tsn/README.md + Paper: + URL: https://arxiv.org/abs/1608.00859 + Title: "Temporal Segment Networks: Towards Good Practices for Deep Action Recognition" Models: - Config: configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py In Collection: TSN @@ -468,6 +471,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: tsn_omnisource_r50_1x1x3_100e_kinetics_rgb + Converted From: + Weights: https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmaction/models/kinetics400/omnisource/tsn_OmniSource_kinetics400_se_rgb_r50_seg3_f1s1_imagenet-4066cb7e.pth + Code: https://github.com/open-mmlab/mmaction Results: - Dataset: Kinetics-400 Metrics: @@ -488,6 +494,9 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: tsn_IG1B_pretrained_r50_1x1x3_100e_kinetics_rgb + Converted From: + Weights: https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmaction/models/kinetics400/omnisource/tsn_OmniSource_kinetics400_se_rgb_r50_seg3_f1s1_IG1B-25fc136b.pth + Code: https://github.com/open-mmlab/mmaction/ Results: - Dataset: Kinetics-400 Metrics: diff --git a/configs/recognition/x3d/metafile.yml b/configs/recognition/x3d/metafile.yml index 38a9f69eb9..2608a6a910 100644 --- a/configs/recognition/x3d/metafile.yml +++ b/configs/recognition/x3d/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: X3D README: configs/recognition/x3d/README.md + Paper: + URL: https://arxiv.org/abs/2004.04730 + Title: "X3D: Expanding Architectures for Efficient Video Recognition" Models: - Config: configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py In Collection: X3D @@ -13,11 +16,13 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: x3d_s_13x6x1_facebook_kinetics400_rgb + Converted From: + Weights: https://dl.fbaipublicfiles.com/pyslowfast/x3d_models/x3d_s.pyth + Code: https://github.com/facebookresearch/SlowFast/ Results: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 73.2 - top1 10-view: 72.7 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_s_facebook_13x6x1_kinetics400_rgb_20201027-623825a0.pth reference top1 10-view: 73.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] @@ -33,11 +38,13 @@ Models: Training Data: Kinetics-400 Modality: RGB Name: x3d_m_16x5x1_facebook_kinetics400_rgb + Converted From: + Weights: https://dl.fbaipublicfiles.com/pyslowfast/x3d_models/x3d_s.pyth + Code: https://github.com/facebookresearch/SlowFast/ Results: - Dataset: Kinetics-400 Metrics: Top 1 Accuracy: 75.6 - top1 10-view: 75.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth reference top1 10-view: 75.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] diff --git a/configs/skeleton/posec3d/metafile.yml b/configs/skeleton/posec3d/metafile.yml index a9b87a714d..cd3fc3e2f3 100644 --- a/configs/skeleton/posec3d/metafile.yml +++ b/configs/skeleton/posec3d/metafile.yml @@ -1,6 +1,9 @@ Collections: - Name: PoseC3D README: configs/skeleton/posec3d/README.md + Paper: + URL: https://arxiv.org/abs/2104.13586 + Title: Revisiting Skeleton-based Action Recognition Models: - Config: configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py In Collection: PoseC3D diff --git a/model-index.yml b/model-index.yml index 1336135c17..e76d6e5b17 100644 --- a/model-index.yml +++ b/model-index.yml @@ -8,6 +8,7 @@ Import: - configs/recognition/r2plus1d/metafile.yml - configs/recognition/slowfast/metafile.yml - configs/recognition/slowonly/metafile.yml +- configs/recognition/timesformer/metafile.yml - configs/recognition/tin/metafile.yml - configs/recognition/tpn/metafile.yml - configs/recognition/tsm/metafile.yml From 95d576ee19c242a1ce4d83c9958739d224c9d737 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 8 Sep 2021 19:46:46 +0800 Subject: [PATCH 248/414] [Fix] Compatible with new mmcls (#1139) --- mmaction/models/recognizers/base.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mmaction/models/recognizers/base.py b/mmaction/models/recognizers/base.py index b9f97f0241..25356ef7d2 100644 --- a/mmaction/models/recognizers/base.py +++ b/mmaction/models/recognizers/base.py @@ -154,6 +154,11 @@ def extract_feat(self, imgs): x = self.backbone.features(imgs) elif self.backbone_from == 'timm': x = self.backbone.forward_features(imgs) + elif self.backbone_from == 'mmcls': + x = self.backbone(imgs) + if isinstance(x, tuple): + assert len(x) == 1 + x = x[0] else: x = self.backbone(imgs) return x From fc9f4c3029dac95148b2b5a6e61d706bd26cbad3 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 9 Sep 2021 14:35:13 +0800 Subject: [PATCH 249/414] [Fix] Fix performance number of some checkpoints (#1138) * fix x3d * update * update * performance number fix * debug * revert tsm change --- configs/recognition/slowfast/README.md | 2 +- configs/recognition/slowfast/README_zh-CN.md | 2 +- configs/recognition/slowfast/metafile.yml | 4 ++-- configs/recognition/tsm/metafile.yml | 2 +- configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py | 2 +- .../tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py | 2 +- configs/recognition/tsn/README.md | 2 +- configs/recognition/tsn/README_zh-CN.md | 2 +- configs/recognition/tsn/metafile.yml | 4 ++-- .../tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py | 2 +- .../tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py | 2 +- .../tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py | 2 +- .../recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py | 2 +- .../recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py | 2 +- mmaction/models/heads/tsn_head.py | 3 +++ 15 files changed, 19 insertions(+), 16 deletions(-) diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 5f07c2cc31..09b055c478 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -21,7 +21,7 @@ |config | resolution | gpus | backbone |pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50|None |74.75|91.73|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json)| -|[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8| ResNet50|None |74.34|91.58|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| +|[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8| ResNet50|None |73.95|91.50|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| |[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50 |None |75.61|92.34|x|9062|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json)| diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index 64be756818..18441b1818 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -21,7 +21,7 @@ |配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet50|None |74.75|91.73|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json)| -|[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |短边256|8| ResNet50|None |74.34|91.58|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| +|[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |短边256|8| ResNet50|None |73.95|91.50|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| |[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet50 |None |75.61|92.34|x|9062|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json)| diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index 9c2bfe5d14..f0da9e4f79 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -45,8 +45,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - Top 1 Accuracy: 74.34 - Top 5 Accuracy: 91.58 + Top 1 Accuracy: 73.95 + Top 5 Accuracy: 91.50 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index f8bfab65c6..d9b23cecd5 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -252,7 +252,7 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - Top 1 Accuracy: 72.8 + Top 1 Accuracy: 72.80 Top 5 Accuracy: 90.75 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20210621_115844.log.json diff --git a/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py index 7dcf579f21..d28b979fd9 100644 --- a/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py @@ -57,7 +57,7 @@ test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='TenCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py index 8955c8a74f..150e0f14e3 100644 --- a/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py @@ -54,7 +54,7 @@ test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='TenCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index f20034ebbc..1752ad93a3 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -56,7 +56,7 @@ |tsn_r50_320p_1x1x8_kinetics400_twostream [1: 1]* |x|x| ResNet50| ImageNet |74.64|91.77| x | x | x | x | x|x|x| |[tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py) |short-side 320|8| ResNet50 | ImageNet |71.11|90.04| x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014-5ae1ee79.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json)| |[tsn_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8| ResNet50 | ImageNet|70.77|89.3|[68.75](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[88.42](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|12.2 (8x10 frames)|8344| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_dense_1x1x8_100e_kinetics400_rgb_20200606-e925e6e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json)| -|[tsn_r50_video_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet | 71.79 | 90.25 |x|x|x|21558| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json)| +|[tsn_r50_video_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet | 71.14 | 89.63 |x|x|x|21558| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json)| |[tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet | 70.40 | 89.12 |x|x|x|21553| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb_20200703-0f19175f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json)| Here, We use [1: 1] to indicate that we combine rgb and flow score with coefficients 1: 1 to get the two-stream prediction (without applying softmax). diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index 1e3e89bc94..69e95459a5 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -56,7 +56,7 @@ |tsn_r50_320p_1x1x8_kinetics400_twostream [1: 1]* |x|x| ResNet50| ImageNet |74.64|91.77| x | x | x | x | x|x|x| |[tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py) |短边 320|8| ResNet50 | ImageNet |71.11|90.04| x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014-5ae1ee79.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json)| |[tsn_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8| ResNet50 | ImageNet|70.77|89.3|[68.75](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[88.42](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|12.2 (8x10 frames)|8344| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_dense_1x1x8_100e_kinetics400_rgb_20200606-e925e6e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json)| -|[tsn_r50_video_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet | 71.79 | 90.25 |x|x|x|21558| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json)| +|[tsn_r50_video_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet | 71.14 | 89.63 |x|x|x|21558| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json)| |[tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet | 70.40 | 89.12 |x|x|x|21553| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb_20200703-0f19175f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json)| 这里,MMAction2 使用 [1: 1] 表示以 1: 1 的比例融合 RGB 和光流两分支的融合结果(融合前不经过 softmax) diff --git a/configs/recognition/tsn/metafile.yml b/configs/recognition/tsn/metafile.yml index 40325d2e98..fa941ed5b2 100644 --- a/configs/recognition/tsn/metafile.yml +++ b/configs/recognition/tsn/metafile.yml @@ -383,8 +383,8 @@ Models: Results: - Dataset: Kinetics-400 Metrics: - Top 1 Accuracy: 71.79 - Top 5 Accuracy: 90.25 + Top 1 Accuracy: 71.14 + Top 5 Accuracy: 89.63 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log diff --git a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py index 7e3cf98476..6ca137a845 100644 --- a/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py @@ -55,7 +55,7 @@ test_mode=True), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='ThreeCrop', crop_size=256), + dict(type='TenCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py index ad67dcb74d..aff8c54d2a 100644 --- a/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py @@ -50,7 +50,7 @@ test_mode=True), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='TenCrop', crop_size=224), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py b/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py index 7c6d5e820e..e2f0107262 100644 --- a/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py +++ b/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py @@ -55,7 +55,7 @@ test_mode=True), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=224), + dict(type='TenCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py b/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py index 24b0bb799e..baaed73fb4 100644 --- a/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py +++ b/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py @@ -15,7 +15,7 @@ test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='CenterCrop', crop_size=256), + dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py b/configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py index 5815d8ab44..0de5cf95ed 100644 --- a/configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py +++ b/configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py @@ -15,7 +15,7 @@ test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 192)), - dict(type='CenterCrop', crop_size=192), + dict(type='ThreeCrop', crop_size=192), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/mmaction/models/heads/tsn_head.py b/mmaction/models/heads/tsn_head.py index 5b34bfa430..73d9ae4f3b 100644 --- a/mmaction/models/heads/tsn_head.py +++ b/mmaction/models/heads/tsn_head.py @@ -74,6 +74,9 @@ def forward(self, x, num_segs): """ # [N * num_segs, in_channels, 7, 7] if self.avg_pool is not None: + if isinstance(x, tuple): + shapes = [y.shape for y in x] + assert 1 == 0, f'x is tuple {shapes}' x = self.avg_pool(x) # [N * num_segs, in_channels, 1, 1] x = x.reshape((-1, num_segs) + x.shape[1:]) From c6cc475ff810318ff9701b090b29d1bd83da7305 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sat, 11 Sep 2021 16:44:31 +0800 Subject: [PATCH 250/414] add runtime dependency (#1144) --- requirements/runtime.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/requirements/runtime.txt b/requirements/runtime.txt index e9fee58f79..9ab91d82ec 100644 --- a/requirements/runtime.txt +++ b/requirements/runtime.txt @@ -1,4 +1,7 @@ +decord +einops matplotlib numpy opencv-contrib-python Pillow +scipy From fe9fea0ea54f89d7f322650b22eaa2d00054e932 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Sat, 11 Sep 2021 20:16:01 +0800 Subject: [PATCH 251/414] [Feature] Support st-gcn (#1123) * master * master 0721 * stgcn 0901 * stgcn 0901 * stgcn 0901 * stgcn 0901 * stgcn0906 * fix names 0907 * fix names 0907 * add unittest 0907 * add unittest 0907 * add docs 0908 * add docs 0908 * add docs 0908 * fix mmcls 0908 * add unittest 0909 * add unittest 0909 * add unittest 0909 * add unittest 0909 * add unittest 0909 * add unittest 0909 * add unittest 0909 * add meda readme 0910 * add meda readme 0910 * fix comments 0910 * fix comments 0910 * add unittest for graph 0910 * fix comments 0910 * fix comments 0910 * fix comments 0910 * fix comments 0910 * rename * rename * fix bug * fix bug * fix path Co-authored-by: Haodong Duan --- configs/skeleton/stgcn/README.md | 58 ++++ configs/skeleton/stgcn/metafile.yml | 22 ++ .../stgcn/stgcn_80e_ntu60_xsub_keypoint.py | 80 +++++ mmaction/datasets/pipelines/__init__.py | 10 +- mmaction/datasets/pipelines/formating.py | 51 ++++ mmaction/datasets/pipelines/pose_loading.py | 64 ++++ mmaction/models/__init__.py | 14 +- mmaction/models/backbones/__init__.py | 3 +- mmaction/models/backbones/stgcn.py | 280 ++++++++++++++++++ mmaction/models/heads/__init__.py | 4 +- mmaction/models/heads/stgcn_head.py | 61 ++++ mmaction/models/skeleton_gcn/__init__.py | 4 + mmaction/models/skeleton_gcn/base.py | 175 +++++++++++ mmaction/models/skeleton_gcn/skeletongcn.py | 29 ++ .../models/skeleton_gcn/utils/__init__.py | 3 + mmaction/models/skeleton_gcn/utils/graph.py | 165 +++++++++++ tests/test_data/test_formating.py | 34 ++- .../test_loadings/test_pose_loading.py | 36 ++- tests/test_models/__init__.py | 4 +- tests/test_models/base.py | 8 +- tests/test_models/test_backbones.py | 161 +++++++++- tests/test_models/test_head.py | 38 ++- .../test_recognizers/test_skeletongcn.py | 50 ++++ 23 files changed, 1334 insertions(+), 20 deletions(-) create mode 100644 configs/skeleton/stgcn/README.md create mode 100644 configs/skeleton/stgcn/metafile.yml create mode 100644 configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py create mode 100644 mmaction/models/backbones/stgcn.py create mode 100644 mmaction/models/heads/stgcn_head.py create mode 100644 mmaction/models/skeleton_gcn/__init__.py create mode 100644 mmaction/models/skeleton_gcn/base.py create mode 100644 mmaction/models/skeleton_gcn/skeletongcn.py create mode 100644 mmaction/models/skeleton_gcn/utils/__init__.py create mode 100644 mmaction/models/skeleton_gcn/utils/graph.py create mode 100644 tests/test_models/test_recognizers/test_skeletongcn.py diff --git a/configs/skeleton/stgcn/README.md b/configs/skeleton/stgcn/README.md new file mode 100644 index 0000000000..b4fca5d1e7 --- /dev/null +++ b/configs/skeleton/stgcn/README.md @@ -0,0 +1,58 @@ +# STGCN + +## Introduction + + + +```BibTeX +@inproceedings{yan2018spatial, + title={Spatial temporal graph convolutional networks for skeleton-based action recognition}, + author={Yan, Sijie and Xiong, Yuanjun and Lin, Dahua}, + booktitle={Thirty-second AAAI conference on artificial intelligence}, + year={2018} +} +``` + +## Model Zoo + +### NTU60_XSub + +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [stgcn_80e_ntu60_xsub_keypoint](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py) | keypoint | 2 | STGCN | 86.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json) | + +## Train + +You can use the following command to train a model. + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +Example: train STGCN model on NTU60 dataset in a deterministic option with periodic validation. + +```shell +python tools/train.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ + --work-dir work_dirs/stgcn_80e_ntu60_xsub_keypoint \ + --validate --seed 0 --deterministic +``` + +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). + +## Test + +You can use the following command to test a model. + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +Example: test STGCN model on NTU60 dataset and dump the result to a pickle file. + +```shell +python tools/test.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.pkl +``` + +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/skeleton/stgcn/metafile.yml b/configs/skeleton/stgcn/metafile.yml new file mode 100644 index 0000000000..a5e8893a65 --- /dev/null +++ b/configs/skeleton/stgcn/metafile.yml @@ -0,0 +1,22 @@ +Collections: +- Name: STGCN + README: configs/skeleton/stgcn/README.md +Models: + Config: configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py + In Collection: STGCN + Metadata: + Architecture: STGCN + Batch Size: 16 + Epochs: 80 + Parameters: 3088704 + Training Data: NTU60 + Training Resouces: 2 GPUs + Name: stgcn_80e_ntu60_xsub_keypoint + Results: + Dataset: NTU60 + Metrics: + mean Top 1 Accuracy: 86.91 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log + Weights: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth diff --git a/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py b/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py new file mode 100644 index 0000000000..e23f501fe5 --- /dev/null +++ b/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py @@ -0,0 +1,80 @@ +model = dict( + type='SkeletonGCN', + backbone=dict( + type='STGCN', + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='coco', strategy='spatial')), + cls_head=dict( + type='STGCNHead', + num_classes=60, + in_channels=256, + loss_cls=dict(type='CrossEntropyLoss')), + train_cfg=None, + test_cfg=None) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/posec3d/ntu60_xsub_train.pkl' +ann_file_val = 'data/posec3d/ntu60_xsub_val.pkl' +train_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='PoseNormalize'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +val_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='PoseNormalize'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +test_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='PoseNormalize'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) + +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0001, nesterov=True) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict(policy='step', step=[10, 50]) +total_epochs = 80 +checkpoint_config = dict(interval=5) +evaluation = dict(interval=5, metrics=['top_k_accuracy']) +log_config = dict(interval=100, hooks=[dict(type='TextLoggerHook')]) + +# runtime settings +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/stgcn_80e_ntu60_xsub_keypoint/' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index c942084cbb..09e3fa7649 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -6,8 +6,9 @@ RandomScale, Resize, TenCrop, ThreeCrop, TorchvisionTrans) from .compose import Compose -from .formating import (Collect, FormatAudioShape, FormatShape, ImageToTensor, - Rename, ToDataContainer, ToTensor, Transpose) +from .formating import (Collect, FormatAudioShape, FormatGCNInput, FormatShape, + ImageToTensor, Rename, ToDataContainer, ToTensor, + Transpose) from .loading import (AudioDecode, AudioDecodeInit, AudioFeatureSelector, BuildPseudoClip, DecordDecode, DecordInit, DenseSampleFrames, GenerateLocalizationLabels, @@ -17,7 +18,8 @@ PyAVDecodeMotionVector, PyAVInit, RawFrameDecode, SampleAVAFrames, SampleFrames, SampleProposalFrames, UntrimmedSampleFrames) -from .pose_loading import (GeneratePoseTarget, LoadKineticsPose, PoseDecode, +from .pose_loading import (GeneratePoseTarget, LoadKineticsPose, + PaddingWithLoop, PoseDecode, PoseNormalize, UniformSampleFrames) __all__ = [ @@ -36,5 +38,5 @@ 'RandomRescale', 'PyAVDecodeMotionVector', 'Rename', 'Imgaug', 'UniformSampleFrames', 'PoseDecode', 'LoadKineticsPose', 'GeneratePoseTarget', 'PIMSInit', 'PIMSDecode', 'TorchvisionTrans', - 'PytorchVideoTrans' + 'PytorchVideoTrans', 'PoseNormalize', 'FormatGCNInput', 'PaddingWithLoop' ] diff --git a/mmaction/datasets/pipelines/formating.py b/mmaction/datasets/pipelines/formating.py index b12978a7e0..be6fe12e9e 100644 --- a/mmaction/datasets/pipelines/formating.py +++ b/mmaction/datasets/pipelines/formating.py @@ -321,6 +321,7 @@ def __call__(self, results): # M = N_clips x L imgs = np.transpose(imgs, (0, 1, 4, 2, 3)) # P x M x C x H x W + if self.collapse: assert imgs.shape[0] == 1 imgs = imgs.squeeze(0) @@ -371,3 +372,53 @@ def __repr__(self): repr_str = self.__class__.__name__ repr_str += f"(input_format='{self.input_format}')" return repr_str + + +@PIPELINES.register_module() +class FormatGCNInput: + """Format final skeleton shape to the given input_format. + + Required keys are "keypoint" and "keypoint_score", added or modified + keys are "keypoint" and "input_shape". + + Args: + input_format (str): Define the final skeleton format. + """ + + def __init__(self, input_format, num_person=2): + self.input_format = input_format + if self.input_format not in ['NCTVM']: + raise ValueError( + f'The input format {self.input_format} is invalid.') + self.num_person = num_person + + def __call__(self, results): + """Performs the FormatShape formatting. + + Args: + results (dict): The resulting dict to be modified and passed + to the next transform in pipeline. + """ + keypoint = results['keypoint'] + keypoint_confidence = results['keypoint_score'] + keypoint_confidence = np.expand_dims(keypoint_confidence, -1) + keypoint_3d = np.concatenate((keypoint, keypoint_confidence), axis=-1) + keypoint_3d = np.transpose(keypoint_3d, + (3, 1, 2, 0)) # M T V C -> C T V M + + if keypoint_3d.shape[-1] < self.num_person: + pad_dim = self.num_person - keypoint_3d.shape[-1] + pad = np.zeros( + keypoint_3d.shape[:-1] + (pad_dim, ), dtype=keypoint_3d.dtype) + keypoint_3d = np.concatenate((keypoint_3d, pad), axis=-1) + elif keypoint_3d.shape[-1] > self.num_person: + keypoint_3d = keypoint_3d[:, :, :, :self.num_person] + + results['keypoint'] = keypoint_3d + results['input_shape'] = keypoint_3d.shape + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f"(input_format='{self.input_format}')" + return repr_str diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 9fac43fceb..1f221f57db 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -629,3 +629,67 @@ def __repr__(self): f'left_kp={self.left_kp}, ' f'right_kp={self.right_kp})') return repr_str + + +@PIPELINES.register_module() +class PaddingWithLoop: + """Sample frames from the video. + + To sample an n-frame clip from the video, PaddingWithLoop samples + the frames from zero index, and loop the frames if the length of + video frames is less than te value of 'clip_len'. + + Required keys are "total_frames", added or modified keys + are "frame_inds", "clip_len", "frame_interval" and "num_clips". + + Args: + clip_len (int): Frames of each sampled output clip. + num_clips (int): Number of clips to be sampled. Default: 1. + """ + + def __init__(self, clip_len, num_clips=1): + + self.clip_len = clip_len + self.num_clips = num_clips + + def __call__(self, results): + num_frames = results['total_frames'] + + start = 0 + inds = np.arange(start, start + self.clip_len) + inds = np.mod(inds, num_frames) + + results['frame_inds'] = inds.astype(np.int) + results['clip_len'] = self.clip_len + results['frame_interval'] = None + results['num_clips'] = self.num_clips + return results + + +@PIPELINES.register_module() +class PoseNormalize: + """Normalize the range of keypoint values to [-1,1]. + + Args: + mean (list | tuple): The mean value of the keypoint values. + min_value (list | tuple): The minimum value of the keypoint values. + max_value (list | tuple): The maximum value of the keypoint values. + """ + + def __init__(self, + mean=(960., 540., 0.5), + min_value=(0., 0., 0.), + max_value=(1920, 1080, 1.)): + self.mean = np.array(mean, dtype=np.float32).reshape(-1, 1, 1, 1) + self.min_value = np.array( + min_value, dtype=np.float32).reshape(-1, 1, 1, 1) + self.max_value = np.array( + max_value, dtype=np.float32).reshape(-1, 1, 1, 1) + + def __call__(self, results): + keypoint = results['keypoint'] + keypoint = (keypoint - self.mean) / (self.max_value - self.min_value) + results['keypoint'] = keypoint + results['keypoint_norm_cfg'] = dict( + mean=self.mean, min_value=self.min_value, max_value=self.max_value) + return results diff --git a/mmaction/models/__init__.py b/mmaction/models/__init__.py index 5aa3ccca1d..39b85a2e94 100644 --- a/mmaction/models/__init__.py +++ b/mmaction/models/__init__.py @@ -1,5 +1,5 @@ # Copyright (c) OpenMMLab. All rights reserved. -from .backbones import (C3D, X3D, MobileNetV2, MobileNetV2TSM, ResNet, +from .backbones import (C3D, STGCN, X3D, MobileNetV2, MobileNetV2TSM, ResNet, ResNet2Plus1d, ResNet3d, ResNet3dCSN, ResNet3dLayer, ResNet3dSlowFast, ResNet3dSlowOnly, ResNetAudio, ResNetTIN, ResNetTSM, TANet, TimeSformer) @@ -11,7 +11,7 @@ DividedSpatialAttentionWithNorm, DividedTemporalAttentionWithNorm, FFNWithNorm) from .heads import (ACRNHead, AudioTSNHead, AVARoIHead, BaseHead, BBoxHeadAVA, - FBOHead, I3DHead, LFBInferHead, SlowFastHead, + FBOHead, I3DHead, LFBInferHead, SlowFastHead, STGCNHead, TimeSformerHead, TPNHead, TRNHead, TSMHead, TSNHead, X3DHead) from .localizers import BMN, PEM, TEM @@ -22,13 +22,14 @@ from .recognizers import (AudioRecognizer, BaseRecognizer, Recognizer2D, Recognizer3D) from .roi_extractors import SingleRoIExtractor3D +from .skeleton_gcn import BaseGCN, SkeletonGCN __all__ = [ 'BACKBONES', 'HEADS', 'RECOGNIZERS', 'build_recognizer', 'build_head', - 'build_backbone', 'Recognizer2D', 'Recognizer3D', 'C3D', 'ResNet', + 'build_backbone', 'Recognizer2D', 'Recognizer3D', 'C3D', 'ResNet', 'STGCN', 'ResNet3d', 'ResNet2Plus1d', 'I3DHead', 'TSNHead', 'TSMHead', 'BaseHead', - 'BaseRecognizer', 'LOSSES', 'CrossEntropyLoss', 'NLLLoss', 'HVULoss', - 'ResNetTSM', 'ResNet3dSlowFast', 'SlowFastHead', 'Conv2plus1d', + 'STGCNHead', 'BaseRecognizer', 'LOSSES', 'CrossEntropyLoss', 'NLLLoss', + 'HVULoss', 'ResNetTSM', 'ResNet3dSlowFast', 'SlowFastHead', 'Conv2plus1d', 'ResNet3dSlowOnly', 'BCELossWithLogits', 'LOCALIZERS', 'build_localizer', 'PEM', 'TAM', 'TEM', 'BinaryLogisticRegressionLoss', 'BMN', 'BMNLoss', 'build_model', 'OHEMHingeLoss', 'SSNLoss', 'ResNet3dCSN', 'ResNetTIN', @@ -38,5 +39,6 @@ 'ConvAudio', 'AVARoIHead', 'MobileNetV2', 'MobileNetV2TSM', 'TANet', 'LFB', 'FBOHead', 'LFBInferHead', 'TRNHead', 'NECKS', 'TimeSformer', 'TimeSformerHead', 'DividedSpatialAttentionWithNorm', - 'DividedTemporalAttentionWithNorm', 'FFNWithNorm', 'ACRNHead' + 'DividedTemporalAttentionWithNorm', 'FFNWithNorm', 'ACRNHead', 'BaseGCN', + 'SkeletonGCN' ] diff --git a/mmaction/models/backbones/__init__.py b/mmaction/models/backbones/__init__.py index 7262bb86db..2304d3db77 100644 --- a/mmaction/models/backbones/__init__.py +++ b/mmaction/models/backbones/__init__.py @@ -11,6 +11,7 @@ from .resnet_audio import ResNetAudio from .resnet_tin import ResNetTIN from .resnet_tsm import ResNetTSM +from .stgcn import STGCN from .tanet import TANet from .timesformer import TimeSformer from .x3d import X3D @@ -19,5 +20,5 @@ 'C3D', 'ResNet', 'ResNet3d', 'ResNetTSM', 'ResNet2Plus1d', 'ResNet3dSlowFast', 'ResNet3dSlowOnly', 'ResNet3dCSN', 'ResNetTIN', 'X3D', 'ResNetAudio', 'ResNet3dLayer', 'MobileNetV2TSM', 'MobileNetV2', 'TANet', - 'TimeSformer' + 'TimeSformer', 'STGCN' ] diff --git a/mmaction/models/backbones/stgcn.py b/mmaction/models/backbones/stgcn.py new file mode 100644 index 0000000000..f360d95db6 --- /dev/null +++ b/mmaction/models/backbones/stgcn.py @@ -0,0 +1,280 @@ +import torch +import torch.nn as nn +from mmcv.cnn import constant_init, kaiming_init, normal_init +from mmcv.runner import load_checkpoint +from mmcv.utils import _BatchNorm + +from ...utils import get_root_logger +from ..builder import BACKBONES +from ..skeleton_gcn.utils import Graph + + +def zero(x): + """return zero.""" + return 0 + + +def identity(x): + """return input itself.""" + return x + + +class STGCNBlock(nn.Module): + """Applies a spatial temporal graph convolution over an input graph + sequence. + + Args: + in_channels (int): Number of channels in the input sequence data + out_channels (int): Number of channels produced by the convolution + kernel_size (tuple): Size of the temporal convolving kernel and + graph convolving kernel + stride (int, optional): Stride of the temporal convolution. Default: 1 + dropout (int, optional): Dropout rate of the final output. Default: 0 + residual (bool, optional): If ``True``, applies a residual mechanism. + Default: ``True`` + + Shape: + - Input[0]: Input graph sequence in :math:`(N, in_channels, T_{in}, V)` + format + - Input[1]: Input graph adjacency matrix in :math:`(K, V, V)` format + - Output[0]: Outpu graph sequence in :math:`(N, out_channels, T_{out}, + V)` format + - Output[1]: Graph adjacency matrix for output data in :math:`(K, V, + V)` format + + where + :math:`N` is a batch size, + :math:`K` is the spatial kernel size, as :math:`K == kernel_size[1] + `, + :math:`T_{in}/T_{out}` is a length of input/output sequence, + :math:`V` is the number of graph nodes. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + stride=1, + dropout=0, + residual=True): + super().__init__() + + assert len(kernel_size) == 2 + assert kernel_size[0] % 2 == 1 + padding = ((kernel_size[0] - 1) // 2, 0) + + self.gcn = ConvTemporalGraphical(in_channels, out_channels, + kernel_size[1]) + self.tcn = nn.Sequential( + nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, (kernel_size[0], 1), + (stride, 1), padding), nn.BatchNorm2d(out_channels), + nn.Dropout(dropout, inplace=True)) + + if not residual: + self.residual = zero + + elif (in_channels == out_channels) and (stride == 1): + self.residual = identity + + else: + self.residual = nn.Sequential( + nn.Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=(stride, 1)), nn.BatchNorm2d(out_channels)) + + self.relu = nn.ReLU(inplace=True) + + def forward(self, x, adj_mat): + """Defines the computation performed at every call.""" + res = self.residual(x) + x, adj_mat = self.gcn(x, adj_mat) + x = self.tcn(x) + res + + return self.relu(x), adj_mat + + +class ConvTemporalGraphical(nn.Module): + """The basic module for applying a graph convolution. + + Args: + in_channels (int): Number of channels in the input sequence data + out_channels (int): Number of channels produced by the convolution + kernel_size (int): Size of the graph convolving kernel + t_kernel_size (int): Size of the temporal convolving kernel + t_stride (int, optional): Stride of the temporal convolution. + Default: 1 + t_padding (int, optional): Temporal zero-padding added to both sides + of the input. Default: 0 + t_dilation (int, optional): Spacing between temporal kernel elements. + Default: 1 + bias (bool, optional): If ``True``, adds a learnable bias to the + output. Default: ``True`` + + Shape: + - Input[0]: Input graph sequence in :math:`(N, in_channels, T_{in}, V)` + format + - Input[1]: Input graph adjacency matrix in :math:`(K, V, V)` format + - Output[0]: Output graph sequence in :math:`(N, out_channels, T_{out} + , V)` format + - Output[1]: Graph adjacency matrix for output data in :math:`(K, V, V) + ` format + + where + :math:`N` is a batch size, + :math:`K` is the spatial kernel size, as :math:`K == kernel_size[1] + `, + :math:`T_{in}/T_{out}` is a length of input/output sequence, + :math:`V` is the number of graph nodes. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + t_kernel_size=1, + t_stride=1, + t_padding=0, + t_dilation=1, + bias=True): + super().__init__() + + self.kernel_size = kernel_size + self.conv = nn.Conv2d( + in_channels, + out_channels * kernel_size, + kernel_size=(t_kernel_size, 1), + padding=(t_padding, 0), + stride=(t_stride, 1), + dilation=(t_dilation, 1), + bias=bias) + + def forward(self, x, adj_mat): + """Defines the computation performed at every call.""" + assert adj_mat.size(0) == self.kernel_size + + x = self.conv(x) + + n, kc, t, v = x.size() + x = x.view(n, self.kernel_size, kc // self.kernel_size, t, v) + x = torch.einsum('nkctv,kvw->nctw', (x, adj_mat)) + + return x.contiguous(), adj_mat + + +@BACKBONES.register_module() +class STGCN(nn.Module): + """Backbone of Spatial temporal graph convolutional networks. + + Args: + in_channels (int): Number of channels in the input data. + graph_cfg (dict): The arguments for building the graph. + edge_importance_weighting (bool): If ``True``, adds a learnable + importance weighting to the edges of the graph. Default: True. + data_bn (bool): If 'True', adds data normalization to the inputs. + Default: True. + pretrained (str | None): Name of pretrained model. + **kwargs (optional): Other parameters for graph convolution units. + + Shape: + - Input: :math:`(N, in_channels, T_{in}, V_{in}, M_{in})` + - Output: :math:`(N, num_class)` where + :math:`N` is a batch size, + :math:`T_{in}` is a length of input sequence, + :math:`V_{in}` is the number of graph nodes, + :math:`M_{in}` is the number of instance in a frame. + """ + + def __init__(self, + in_channels, + graph_cfg, + edge_importance_weighting=True, + data_bn=True, + pretrained=None, + **kwargs): + super().__init__() + + # load graph + self.graph = Graph(**graph_cfg) + A = torch.tensor( + self.graph.A, dtype=torch.float32, requires_grad=False) + self.register_buffer('A', A) + + # build networks + spatial_kernel_size = A.size(0) + temporal_kernel_size = 9 + kernel_size = (temporal_kernel_size, spatial_kernel_size) + self.data_bn = nn.BatchNorm1d(in_channels * + A.size(1)) if data_bn else identity + + kwargs0 = {k: v for k, v in kwargs.items() if k != 'dropout'} + self.st_gcn_networks = nn.ModuleList(( + STGCNBlock( + in_channels, 64, kernel_size, 1, residual=False, **kwargs0), + STGCNBlock(64, 64, kernel_size, 1, **kwargs), + STGCNBlock(64, 64, kernel_size, 1, **kwargs), + STGCNBlock(64, 64, kernel_size, 1, **kwargs), + STGCNBlock(64, 128, kernel_size, 2, **kwargs), + STGCNBlock(128, 128, kernel_size, 1, **kwargs), + STGCNBlock(128, 128, kernel_size, 1, **kwargs), + STGCNBlock(128, 256, kernel_size, 2, **kwargs), + STGCNBlock(256, 256, kernel_size, 1, **kwargs), + STGCNBlock(256, 256, kernel_size, 1, **kwargs), + )) + + # initialize parameters for edge importance weighting + if edge_importance_weighting: + self.edge_importance = nn.ParameterList([ + nn.Parameter(torch.ones(self.A.size())) + for i in self.st_gcn_networks + ]) + else: + self.edge_importance = [1 for _ in self.st_gcn_networks] + + self.pretrained = pretrained + + def init_weights(self): + """Initiate the parameters either from existing checkpoint or from + scratch.""" + if isinstance(self.pretrained, str): + logger = get_root_logger() + logger.info(f'load model from: {self.pretrained}') + + load_checkpoint(self, self.pretrained, strict=False, logger=logger) + + elif self.pretrained is None: + for m in self.modules(): + if isinstance(m, nn.Conv2d): + kaiming_init(m) + elif isinstance(m, nn.Linear): + normal_init(m) + elif isinstance(m, _BatchNorm): + constant_init(m, 1) + else: + raise TypeError('pretrained must be a str or None') + + def forward(self, x): + """Defines the computation performed at every call. + Args: + x (torch.Tensor): The input data. + + Returns: + torch.Tensor: The output of the module. + """ + # data normalization + x = x.float() + n, c, t, v, m = x.size() # bs 3 300 25(17) 2 + x = x.permute(0, 4, 3, 1, 2).contiguous() # N M V C T + x = x.view(n * m, v * c, t) + x = self.data_bn(x) + x = x.view(n, m, v, c, t) + x = x.permute(0, 1, 3, 4, 2).contiguous() + x = x.view(n * m, c, t, v) # bsx2 3 300 25(17) + + # forward + for gcn, importance in zip(self.st_gcn_networks, self.edge_importance): + x, _ = gcn(x, self.A * importance) + + return x diff --git a/mmaction/models/heads/__init__.py b/mmaction/models/heads/__init__.py index 17c8c38643..edc3a0d553 100644 --- a/mmaction/models/heads/__init__.py +++ b/mmaction/models/heads/__init__.py @@ -9,6 +9,7 @@ from .roi_head import AVARoIHead from .slowfast_head import SlowFastHead from .ssn_head import SSNHead +from .stgcn_head import STGCNHead from .timesformer_head import TimeSformerHead from .tpn_head import TPNHead from .trn_head import TRNHead @@ -19,5 +20,6 @@ __all__ = [ 'TSNHead', 'I3DHead', 'BaseHead', 'TSMHead', 'SlowFastHead', 'SSNHead', 'TPNHead', 'AudioTSNHead', 'X3DHead', 'BBoxHeadAVA', 'AVARoIHead', - 'FBOHead', 'LFBInferHead', 'TRNHead', 'TimeSformerHead', 'ACRNHead' + 'FBOHead', 'LFBInferHead', 'TRNHead', 'TimeSformerHead', 'ACRNHead', + 'STGCNHead' ] diff --git a/mmaction/models/heads/stgcn_head.py b/mmaction/models/heads/stgcn_head.py new file mode 100644 index 0000000000..74b952cb6f --- /dev/null +++ b/mmaction/models/heads/stgcn_head.py @@ -0,0 +1,61 @@ +import torch.nn as nn +from mmcv.cnn import normal_init + +from ..builder import HEADS +from .base import BaseHead + + +@HEADS.register_module() +class STGCNHead(BaseHead): + """The classification head for STGCN. + + Args: + num_classes (int): Number of classes to be classified. + in_channels (int): Number of channels in input feature. + loss_cls (dict): Config for building loss. + Default: dict(type='CrossEntropyLoss') + spatial_type (str): Pooling type in spatial dimension. Default: 'avg'. + num_person (int): Number of person. Default: 2. + init_std (float): Std value for Initiation. Default: 0.01. + """ + + def __init__(self, + num_classes, + in_channels, + loss_cls=dict(type='CrossEntropyLoss'), + spatial_type='avg', + num_person=2, + init_std=0.01): + super().__init__(num_classes, in_channels, loss_cls) + + self.spatial_type = spatial_type + self.in_channels = in_channels + self.num_classes = num_classes + self.num_person = num_person + self.init_std = init_std + + self.pool = None + if self.spatial_type == 'avg': + self.pool = nn.AdaptiveAvgPool2d((1, 1)) + elif self.spatial_type == 'max': + self.pool = nn.AdaptiveMaxPool2d((1, 1)) + else: + raise NotImplementedError + + self.fc = nn.Conv2d(self.in_channels, self.num_classes, kernel_size=1) + + def init_weights(self): + normal_init(self.fc, std=self.init_std) + + def forward(self, x): + # global pooling + assert self.pool is not None + x = self.pool(x) + x = x.view(x.shape[0] // self.num_person, self.num_person, -1, 1, + 1).mean(dim=1) + + # prediction + x = self.fc(x) + x = x.view(x.shape[0], -1) + + return x diff --git a/mmaction/models/skeleton_gcn/__init__.py b/mmaction/models/skeleton_gcn/__init__.py new file mode 100644 index 0000000000..b57750f018 --- /dev/null +++ b/mmaction/models/skeleton_gcn/__init__.py @@ -0,0 +1,4 @@ +from .base import BaseGCN +from .skeletongcn import SkeletonGCN + +__all__ = ['BaseGCN', 'SkeletonGCN'] diff --git a/mmaction/models/skeleton_gcn/base.py b/mmaction/models/skeleton_gcn/base.py new file mode 100644 index 0000000000..6a9d1bcaa4 --- /dev/null +++ b/mmaction/models/skeleton_gcn/base.py @@ -0,0 +1,175 @@ +from abc import ABCMeta, abstractmethod +from collections import OrderedDict + +import torch +import torch.distributed as dist +import torch.nn as nn + +from .. import builder + + +class BaseGCN(nn.Module, metaclass=ABCMeta): + """Base class for GCN-based action recognition. + + All GCN-based recognizers should subclass it. + All subclass should overwrite: + + - Methods:``forward_train``, supporting to forward when training. + - Methods:``forward_test``, supporting to forward when testing. + + Args: + backbone (dict): Backbone modules to extract feature. + cls_head (dict | None): Classification head to process feature. + Default: None. + train_cfg (dict | None): Config for training. Default: None. + test_cfg (dict | None): Config for testing. Default: None. + """ + + def __init__(self, backbone, cls_head=None, train_cfg=None, test_cfg=None): + super().__init__() + # record the source of the backbone + self.backbone_from = 'mmaction2' + self.backbone = builder.build_backbone(backbone) + self.cls_head = builder.build_head(cls_head) if cls_head else None + + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + self.init_weights() + + @property + def with_cls_head(self): + """bool: whether the recognizer has a cls_head""" + return hasattr(self, 'cls_head') and self.cls_head is not None + + def init_weights(self): + """Initialize the model network weights.""" + if self.backbone_from in ['mmcls', 'mmaction2']: + self.backbone.init_weights() + else: + raise NotImplementedError('Unsupported backbone source ' + f'{self.backbone_from}!') + + if self.with_cls_head: + self.cls_head.init_weights() + + @abstractmethod + def forward_train(self, *args, **kwargs): + """Defines the computation performed at training.""" + + @abstractmethod + def forward_test(self, *args): + """Defines the computation performed at testing.""" + + @staticmethod + def _parse_losses(losses): + """Parse the raw outputs (losses) of the network. + + Args: + losses (dict): Raw output of the network, which usually contain + losses and other necessary information. + + Returns: + tuple[Tensor, dict]: (loss, log_vars), loss is the loss tensor + which may be a weighted sum of all losses, log_vars contains + all the variables to be sent to the logger. + """ + log_vars = OrderedDict() + for loss_name, loss_value in losses.items(): + if isinstance(loss_value, torch.Tensor): + log_vars[loss_name] = loss_value.mean() + elif isinstance(loss_value, list): + log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value) + else: + raise TypeError( + f'{loss_name} is not a tensor or list of tensors') + + loss = sum(_value for _key, _value in log_vars.items() + if 'loss' in _key) + + log_vars['loss'] = loss + for loss_name, loss_value in log_vars.items(): + # reduce loss when distributed training + if dist.is_available() and dist.is_initialized(): + loss_value = loss_value.data.clone() + dist.all_reduce(loss_value.div_(dist.get_world_size())) + log_vars[loss_name] = loss_value.item() + + return loss, log_vars + + def forward(self, keypoint, label=None, return_loss=True, **kwargs): + """Define the computation performed at every call.""" + if return_loss: + if label is None: + raise ValueError('Label should not be None.') + return self.forward_train(keypoint, label, **kwargs) + + return self.forward_test(keypoint, **kwargs) + + def extract_feat(self, skeletons): + """Extract features through a backbone. + + Args: + skeletons (torch.Tensor): The input skeletons. + + Returns: + torch.tensor: The extracted features. + """ + x = self.backbone(skeletons) + return x + + def train_step(self, data_batch, optimizer, **kwargs): + """The iteration step during training. + + This method defines an iteration step during training, except for the + back propagation and optimizer updating, which are done in an optimizer + hook. Note that in some complicated cases or models, the whole process + including back propagation and optimizer updating is also defined in + this method, such as GAN. + + Args: + data_batch (dict): The output of dataloader. + optimizer (:obj:`torch.optim.Optimizer` | dict): The optimizer of + runner is passed to ``train_step()``. This argument is unused + and reserved. + + Returns: + dict: It should contain at least 3 keys: ``loss``, ``log_vars``, + ``num_samples``. + ``loss`` is a tensor for back propagation, which can be a + weighted sum of multiple losses. + ``log_vars`` contains all the variables to be sent to the + logger. + ``num_samples`` indicates the batch size (when the model is + DDP, it means the batch size on each GPU), which is used for + averaging the logs. + """ + skeletons = data_batch['keypoint'] + label = data_batch['label'] + label = label.squeeze(-1) + + losses = self(skeletons, label, return_loss=True) + + loss, log_vars = self._parse_losses(losses) + outputs = dict( + loss=loss, log_vars=log_vars, num_samples=len(skeletons.data)) + + return outputs + + def val_step(self, data_batch, optimizer, **kwargs): + """The iteration step during validation. + + This method shares the same signature as :func:`train_step`, but used + during val epochs. Note that the evaluation after training epochs is + not implemented with this method, but an evaluation hook. + """ + skeletons = data_batch['keypoint'] + label = data_batch['label'] + + losses = self(skeletons, label, return_loss=True) + + loss, log_vars = self._parse_losses(losses) + outputs = dict( + loss=loss, log_vars=log_vars, num_samples=len(skeletons.data)) + + return outputs diff --git a/mmaction/models/skeleton_gcn/skeletongcn.py b/mmaction/models/skeleton_gcn/skeletongcn.py new file mode 100644 index 0000000000..ffd14d9f47 --- /dev/null +++ b/mmaction/models/skeleton_gcn/skeletongcn.py @@ -0,0 +1,29 @@ +from ..builder import RECOGNIZERS +from .base import BaseGCN + + +@RECOGNIZERS.register_module() +class SkeletonGCN(BaseGCN): + """Spatial temporal graph convolutional networks.""" + + def forward_train(self, skeletons, labels, **kwargs): + """Defines the computation performed at every call when training.""" + assert self.with_cls_head + losses = dict() + + x = self.extract_feat(skeletons) + output = self.cls_head(x) + gt_labels = labels.squeeze(-1) + loss = self.cls_head.loss(output, gt_labels) + losses.update(loss) + + return losses + + def forward_test(self, skeletons): + """Defines the computation performed at every call when evaluation and + testing.""" + x = self.extract_feat(skeletons) + assert self.with_cls_head + output = self.cls_head(x) + + return output.data.cpu().numpy() diff --git a/mmaction/models/skeleton_gcn/utils/__init__.py b/mmaction/models/skeleton_gcn/utils/__init__.py new file mode 100644 index 0000000000..b60b3a16ee --- /dev/null +++ b/mmaction/models/skeleton_gcn/utils/__init__.py @@ -0,0 +1,3 @@ +from .graph import Graph + +__all__ = ['Graph'] diff --git a/mmaction/models/skeleton_gcn/utils/graph.py b/mmaction/models/skeleton_gcn/utils/graph.py new file mode 100644 index 0000000000..04e8fc19f3 --- /dev/null +++ b/mmaction/models/skeleton_gcn/utils/graph.py @@ -0,0 +1,165 @@ +import numpy as np + + +def get_hop_distance(num_node, edge, max_hop=1): + adj_mat = np.zeros((num_node, num_node)) + for i, j in edge: + adj_mat[i, j] = 1 + adj_mat[j, i] = 1 + + # compute hop steps + hop_dis = np.zeros((num_node, num_node)) + np.inf + transfer_mat = [ + np.linalg.matrix_power(adj_mat, d) for d in range(max_hop + 1) + ] + arrive_mat = (np.stack(transfer_mat) > 0) + for d in range(max_hop, -1, -1): + hop_dis[arrive_mat[d]] = d + return hop_dis + + +def normalize_digraph(adj_matrix): + Dl = np.sum(adj_matrix, 0) + num_nodes = adj_matrix.shape[0] + Dn = np.zeros((num_nodes, num_nodes)) + for i in range(num_nodes): + if Dl[i] > 0: + Dn[i, i] = Dl[i]**(-1) + norm_matrix = np.dot(adj_matrix, Dn) + return norm_matrix + + +class Graph: + """The Graph to model the skeletons extracted by the openpose. + + Args: + layout (str): must be one of the following candidates + - openpose: Is consists of 18 joints. For more information, please + refer to + https://github.com/CMU-Perceptual-Computing-Lab/openpose#output + - ntu-rgb+d: Is consists of 25 joints. For more information, please + refer to https://github.com/shahroudy/NTURGB-D + + strategy (str): must be one of the follow candidates + - uniform: Uniform Labeling + - distance: Distance Partitioning + - spatial: Spatial Configuration + For more information, please refer to the section 'Partition + Strategies' in our paper (https://arxiv.org/abs/1801.07455). + + max_hop (int): the maximal distance between two connected nodes. + Dafault: 1 + dilation (int): controls the spacing between the kernel points. + Default: 1 + """ + + def __init__(self, + layout='openpose', + strategy='uniform', + max_hop=1, + dilation=1): + self.max_hop = max_hop + self.dilation = dilation + + assert layout in ['openpose', 'ntu-rgb+d', 'ntu_edge', 'coco'] + assert strategy in ['uniform', 'distance', 'spatial'] + self.get_edge(layout) + self.hop_dis = get_hop_distance( + self.num_node, self.edge, max_hop=max_hop) + self.get_adjacency(strategy) + + def __str__(self): + return self.A + + def get_edge(self, layout): + """This method returns the edge pairs of the layout.""" + + if layout == 'openpose': + self.num_node = 18 + self_link = [(i, i) for i in range(self.num_node)] + neighbor_link = [(4, 3), (3, 2), (7, 6), (6, 5), + (13, 12), (12, 11), (10, 9), (9, 8), (11, 5), + (8, 2), (5, 1), (2, 1), (0, 1), (15, 0), (14, 0), + (17, 15), (16, 14)] + self.edge = self_link + neighbor_link + self.center = 1 + elif layout == 'ntu-rgb+d': + self.num_node = 25 + self_link = [(i, i) for i in range(self.num_node)] + neighbor_1base = [(1, 2), (2, 21), (3, 21), + (4, 3), (5, 21), (6, 5), (7, 6), (8, 7), (9, 21), + (10, 9), (11, 10), (12, 11), (13, 1), (14, 13), + (15, 14), (16, 15), (17, 1), (18, 17), (19, 18), + (20, 19), (22, 23), (23, 8), (24, 25), (25, 12)] + neighbor_link = [(i - 1, j - 1) for (i, j) in neighbor_1base] + self.edge = self_link + neighbor_link + self.center = 21 - 1 + elif layout == 'ntu_edge': + self.num_node = 24 + self_link = [(i, i) for i in range(self.num_node)] + neighbor_1base = [(1, 2), (3, 2), (4, 3), (5, 2), (6, 5), (7, 6), + (8, 7), (9, 2), (10, 9), (11, 10), (12, 11), + (13, 1), (14, 13), (15, 14), (16, 15), (17, 1), + (18, 17), (19, 18), (20, 19), (21, 22), (22, 8), + (23, 24), (24, 12)] + neighbor_link = [(i - 1, j - 1) for (i, j) in neighbor_1base] + self.edge = self_link + neighbor_link + self.center = 2 + elif layout == 'coco': + self.num_node = 17 + self_link = [(i, i) for i in range(self.num_node)] + neighbor_1base = [[16, 14], [14, 12], [17, 15], [15, 13], [12, 13], + [6, 12], [7, 13], [6, 7], [8, 6], [9, 7], + [10, 8], [11, 9], [2, 3], [2, 1], [3, 1], [4, 2], + [5, 3], [4, 6], [5, 7]] + neighbor_link = [(i - 1, j - 1) for (i, j) in neighbor_1base] + self.edge = self_link + neighbor_link + self.center = 0 + else: + raise ValueError('Do Not Exist This Layout.') + + def get_adjacency(self, strategy): + """This method returns the adjacency matrix according to strategy.""" + + valid_hop = range(0, self.max_hop + 1, self.dilation) + adjacency = np.zeros((self.num_node, self.num_node)) + for hop in valid_hop: + adjacency[self.hop_dis == hop] = 1 + normalize_adjacency = normalize_digraph(adjacency) + + if strategy == 'uniform': + A = np.zeros((1, self.num_node, self.num_node)) + A[0] = normalize_adjacency + self.A = A + elif strategy == 'distance': + A = np.zeros((len(valid_hop), self.num_node, self.num_node)) + for i, hop in enumerate(valid_hop): + A[i][self.hop_dis == hop] = normalize_adjacency[self.hop_dis == + hop] + self.A = A + elif strategy == 'spatial': + A = [] + for hop in valid_hop: + a_root = np.zeros((self.num_node, self.num_node)) + a_close = np.zeros((self.num_node, self.num_node)) + a_further = np.zeros((self.num_node, self.num_node)) + for i in range(self.num_node): + for j in range(self.num_node): + if self.hop_dis[j, i] == hop: + if self.hop_dis[j, self.center] == self.hop_dis[ + i, self.center]: + a_root[j, i] = normalize_adjacency[j, i] + elif self.hop_dis[j, self.center] > self.hop_dis[ + i, self.center]: + a_close[j, i] = normalize_adjacency[j, i] + else: + a_further[j, i] = normalize_adjacency[j, i] + if hop == 0: + A.append(a_root) + else: + A.append(a_root + a_close) + A.append(a_further) + A = np.stack(A) + self.A = A + else: + raise ValueError('Do Not Exist This Strategy') diff --git a/tests/test_data/test_formating.py b/tests/test_data/test_formating.py index a75c9d49fa..b40be4a57a 100644 --- a/tests/test_data/test_formating.py +++ b/tests/test_data/test_formating.py @@ -6,7 +6,8 @@ from mmcv.utils import assert_dict_has_keys from mmaction.datasets.pipelines import (Collect, FormatAudioShape, - FormatShape, ImageToTensor, Rename, + FormatGCNInput, FormatShape, + ImageToTensor, Rename, ToDataContainer, ToTensor, Transpose) @@ -193,3 +194,34 @@ def test_format_audio_shape(): assert format_shape(results)['input_shape'] == (3, 1, 128, 8) assert repr(format_shape) == format_shape.__class__.__name__ + \ "(input_format='NCTF')" + + +def test_format_gcn_input(): + with pytest.raises(ValueError): + # invalid input format + FormatGCNInput('XXXX') + + # 'NCTVM' input format + results = dict( + keypoint=np.random.randn(2, 300, 17, 2), + keypoint_score=np.random.randn(2, 300, 17)) + format_shape = FormatGCNInput('NCTVM', num_person=2) + assert format_shape(results)['input_shape'] == (3, 300, 17, 2) + assert repr(format_shape) == format_shape.__class__.__name__ + \ + "(input_format='NCTVM')" + + # test real num_person < 2 + results = dict( + keypoint=np.random.randn(1, 300, 17, 2), + keypoint_score=np.random.randn(1, 300, 17)) + assert format_shape(results)['input_shape'] == (3, 300, 17, 2) + assert repr(format_shape) == format_shape.__class__.__name__ + \ + "(input_format='NCTVM')" + + # test real num_person > 2 + results = dict( + keypoint=np.random.randn(3, 300, 17, 2), + keypoint_score=np.random.randn(3, 300, 17)) + assert format_shape(results)['input_shape'] == (3, 300, 17, 2) + assert repr(format_shape) == format_shape.__class__.__name__ + \ + "(input_format='NCTVM')" diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 7d984601e5..7b8119b00e 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -7,10 +7,12 @@ import numpy as np import pytest from mmcv import dump +from mmcv.utils import assert_dict_has_keys from numpy.testing import assert_array_almost_equal, assert_array_equal from mmaction.datasets.pipelines import (GeneratePoseTarget, LoadKineticsPose, - PoseDecode, UniformSampleFrames) + PaddingWithLoop, PoseDecode, + PoseNormalize, UniformSampleFrames) class TestPoseLoading: @@ -355,3 +357,35 @@ def test_generate_pose_target(): skeletons=((0, 1), (1, 2), (0, 2))) return_results = generate_pose_target(results) assert_array_almost_equal(return_results['imgs'], 0) + + @staticmethod + def test_padding_with_loop(): + results = dict(total_frames=3) + sampling = PaddingWithLoop(clip_len=6) + sampling_results = sampling(results) + assert sampling_results['clip_len'] == 6 + assert sampling_results['frame_interval'] is None + assert sampling_results['num_clips'] == 1 + assert_array_equal(sampling_results['frame_inds'], + np.array([0, 1, 2, 0, 1, 2])) + + @staticmethod + def test_pose_normalize(): + target_keys = ['keypoint', 'keypoint_norm_cfg'] + keypoints = np.random.randn(3, 300, 17, 2) + results = dict(keypoint=keypoints) + pose_normalize = PoseNormalize( + mean=[960., 540., 0.5], + min_value=[0., 0., 0.], + max_value=[1920, 1080, 1.]) + normalize_results = pose_normalize(results) + assert assert_dict_has_keys(normalize_results, target_keys) + check_pose_normalize(keypoints, normalize_results['keypoint'], + normalize_results['keypoint_norm_cfg']) + + +def check_pose_normalize(origin_keypoints, result_keypoints, norm_cfg): + target_keypoints = result_keypoints.copy() + target_keypoints *= (norm_cfg['max_value'] - norm_cfg['min_value']) + target_keypoints += norm_cfg['mean'] + assert_array_almost_equal(origin_keypoints, target_keypoints, decimal=4) diff --git a/tests/test_models/__init__.py b/tests/test_models/__init__.py index 86888bd8a3..7ae5f7087a 100644 --- a/tests/test_models/__init__.py +++ b/tests/test_models/__init__.py @@ -3,11 +3,11 @@ generate_detector_demo_inputs, generate_gradcam_inputs, generate_recognizer_demo_inputs, get_audio_recognizer_cfg, get_cfg, get_detector_cfg, get_localizer_cfg, - get_recognizer_cfg) + get_recognizer_cfg, get_skeletongcn_cfg) __all__ = [ 'check_norm_state', 'generate_backbone_demo_inputs', 'generate_recognizer_demo_inputs', 'generate_gradcam_inputs', 'get_cfg', 'get_recognizer_cfg', 'get_audio_recognizer_cfg', 'get_localizer_cfg', - 'get_detector_cfg', 'generate_detector_demo_inputs' + 'get_detector_cfg', 'generate_detector_demo_inputs', 'get_skeletongcn_cfg' ] diff --git a/tests/test_models/base.py b/tests/test_models/base.py index 6d9e9eedfc..49c1fd7ad9 100644 --- a/tests/test_models/base.py +++ b/tests/test_models/base.py @@ -46,7 +46,7 @@ def generate_recognizer_demo_inputs( imgs = np.random.random(input_shape) - if model_type == '2D': + if model_type == '2D' or model_type == 'skeleton': gt_labels = torch.LongTensor([2] * N) elif model_type == '3D': gt_labels = torch.LongTensor([2] * M) @@ -135,7 +135,7 @@ def get_cfg(config_type, fname): influencing other tests. """ config_types = ('recognition', 'recognition_audio', 'localization', - 'detection') + 'detection', 'skeleton') assert config_type in config_types repo_dpath = osp.dirname(osp.dirname(osp.dirname(__file__))) @@ -161,3 +161,7 @@ def get_localizer_cfg(fname): def get_detector_cfg(fname): return get_cfg('detection', fname) + + +def get_skeletongcn_cfg(fname): + return get_cfg('skeleton', fname) diff --git a/tests/test_models/test_backbones.py b/tests/test_models/test_backbones.py index b962917471..9cc7ae2884 100644 --- a/tests/test_models/test_backbones.py +++ b/tests/test_models/test_backbones.py @@ -6,7 +6,7 @@ import torch.nn as nn from mmcv.utils import _BatchNorm -from mmaction.models import (C3D, X3D, MobileNetV2TSM, ResNet2Plus1d, +from mmaction.models import (C3D, STGCN, X3D, MobileNetV2TSM, ResNet2Plus1d, ResNet3dCSN, ResNet3dSlowFast, ResNet3dSlowOnly, ResNetAudio, ResNetTIN, ResNetTSM, TANet, TimeSformer) @@ -769,3 +769,162 @@ def test_resnet_tin_backbone(): # resnet_tin with normal cfg inference feat = resnet_tin(imgs) assert feat.shape == torch.Size([8, 2048, 2, 2]) + + +def test_stgcn_backbone(): + """Test STGCN backbone.""" + # test coco layout, spatial strategy + input_shape = (1, 3, 300, 17, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='coco', strategy='spatial')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 17]) + + # test openpose layout, spatial strategy + input_shape = (1, 3, 300, 18, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='openpose', strategy='spatial')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 18]) + + # test ntu-rgb+d layout, spatial strategy + input_shape = (1, 3, 300, 25, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu-rgb+d', strategy='spatial')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 25]) + + # test ntu_edge layout, spatial strategy + input_shape = (1, 3, 300, 24, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu_edge', strategy='spatial')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 24]) + + # test coco layout, uniform strategy + input_shape = (1, 3, 300, 17, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='coco', strategy='uniform')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 17]) + + # test openpose layout, uniform strategy + input_shape = (1, 3, 300, 18, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='openpose', strategy='uniform')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 18]) + + # test ntu-rgb+d layout, uniform strategy + input_shape = (1, 3, 300, 25, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu-rgb+d', strategy='uniform')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 25]) + + # test ntu_edge layout, uniform strategy + input_shape = (1, 3, 300, 24, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu_edge', strategy='uniform')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 24]) + + # test coco layout, distance strategy + input_shape = (1, 3, 300, 17, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='coco', strategy='distance')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 17]) + + # test openpose layout, distance strategy + input_shape = (1, 3, 300, 18, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='openpose', strategy='distance')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 18]) + + # test ntu-rgb+d layout, distance strategy + input_shape = (1, 3, 300, 25, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu-rgb+d', strategy='distance')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 25]) + + # test ntu_edge layout, distance strategy + input_shape = (1, 3, 300, 24, 2) + skeletons = generate_backbone_demo_inputs(input_shape) + + stgcn = STGCN( + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu_edge', strategy='distance')) + stgcn.init_weights() + stgcn.train() + feat = stgcn(skeletons) + assert feat.shape == torch.Size([2, 256, 75, 24]) diff --git a/tests/test_models/test_head.py b/tests/test_models/test_head.py index 7a02829652..7c9b42fd36 100644 --- a/tests/test_models/test_head.py +++ b/tests/test_models/test_head.py @@ -10,7 +10,7 @@ import mmaction from mmaction.models import (ACRNHead, AudioTSNHead, BBoxHeadAVA, FBOHead, - I3DHead, LFBInferHead, SlowFastHead, + I3DHead, LFBInferHead, SlowFastHead, STGCNHead, TimeSformerHead, TPNHead, TRNHead, TSMHead, TSNHead, X3DHead) from .base import generate_backbone_demo_inputs @@ -517,3 +517,39 @@ def test_acrn_head(): acrn_head = ACRNHead(32, 16, stride=2, num_convs=2) new_feat = acrn_head(roi_feat, feat, rois) assert new_feat.shape == (4, 16, 1, 8, 8) + + +def test_stgcn_head(): + """Test loss method, layer construction, attributes and forward function in + stgcn head.""" + with pytest.raises(NotImplementedError): + # spatial_type not in ['avg', 'max'] + stgcn_head = STGCNHead( + num_classes=60, in_channels=256, spatial_type='min') + stgcn_head.init_weights() + + # spatial_type='avg' + stgcn_head = STGCNHead(num_classes=60, in_channels=256, spatial_type='avg') + stgcn_head.init_weights() + + assert stgcn_head.num_classes == 60 + assert stgcn_head.in_channels == 256 + + input_shape = (2, 256, 75, 17) + feat = torch.rand(input_shape) + + cls_scores = stgcn_head(feat) + assert cls_scores.shape == torch.Size([1, 60]) + + # spatial_type='max' + stgcn_head = STGCNHead(num_classes=60, in_channels=256, spatial_type='max') + stgcn_head.init_weights() + + assert stgcn_head.num_classes == 60 + assert stgcn_head.in_channels == 256 + + input_shape = (2, 256, 75, 17) + feat = torch.rand(input_shape) + + cls_scores = stgcn_head(feat) + assert cls_scores.shape == torch.Size([1, 60]) diff --git a/tests/test_models/test_recognizers/test_skeletongcn.py b/tests/test_models/test_recognizers/test_skeletongcn.py new file mode 100644 index 0000000000..4f416d3283 --- /dev/null +++ b/tests/test_models/test_recognizers/test_skeletongcn.py @@ -0,0 +1,50 @@ +import pytest +import torch + +from mmaction.models import build_recognizer +from ..base import generate_recognizer_demo_inputs, get_skeletongcn_cfg + + +def test_skeletongcn(): + config = get_skeletongcn_cfg('stgcn/stgcn_80e_ntu60_xsub_keypoint.py') + with pytest.raises(TypeError): + # "pretrained" must be a str or None + config.model['backbone']['pretrained'] = ['None'] + recognizer = build_recognizer(config.model) + + config.model['backbone']['pretrained'] = None + recognizer = build_recognizer(config.model) + + input_shape = (1, 3, 300, 17, 2) + demo_inputs = generate_recognizer_demo_inputs(input_shape, 'skeleton') + + skeletons = demo_inputs['imgs'] + gt_labels = demo_inputs['gt_labels'] + + losses = recognizer(skeletons, gt_labels) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + skeleton_list = [skeleton[None, :] for skeleton in skeletons] + for one_skeleton in skeleton_list: + recognizer(one_skeleton, None, return_loss=False) + + # test stgcn without edge importance weighting + config.model['backbone']['edge_importance_weighting'] = False + recognizer = build_recognizer(config.model) + + input_shape = (1, 3, 300, 17, 2) + demo_inputs = generate_recognizer_demo_inputs(input_shape, 'skeleton') + + skeletons = demo_inputs['imgs'] + gt_labels = demo_inputs['gt_labels'] + + losses = recognizer(skeletons, gt_labels) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + skeleton_list = [skeleton[None, :] for skeleton in skeletons] + for one_skeleton in skeleton_list: + recognizer(one_skeleton, None, return_loss=False) From 176c12d4e382cc54e5e69fa5f87feb05bae1a455 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 15 Sep 2021 15:38:39 +0800 Subject: [PATCH 252/414] [Fix] Correct args name in extract_audio.py (#1148) --- tools/data/extract_audio.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tools/data/extract_audio.py b/tools/data/extract_audio.py index 0249e4957a..6f56de2691 100644 --- a/tools/data/extract_audio.py +++ b/tools/data/extract_audio.py @@ -47,13 +47,12 @@ def parse_args(): if __name__ == '__main__': args = parse_args() - mmcv.mkdir_or_exist(args.out_dir) + mmcv.mkdir_or_exist(args.dst_root) - print('Reading videos from folder: ', args.src_dir) + print('Reading videos from folder: ', args.root) print('Extension of videos: ', args.ext) - fullpath_list = glob.glob(args.src_dir + '/*' * args.level + '.' + - args.ext) - done_fullpath_list = glob.glob(args.out_dir + '/*' * args.level + '.wav') + fullpath_list = glob.glob(args.root + '/*' * args.level + '.' + args.ext) + done_fullpath_list = glob.glob(args.dst_root + '/*' * args.level + '.wav') print('Total number of videos found: ', len(fullpath_list)) print('Total number of videos extracted finished: ', len(done_fullpath_list)) From 8b1031ce91b23c259a6d95e65d7508e0780af483 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Thu, 16 Sep 2021 18:32:04 +0800 Subject: [PATCH 253/414] [Doc] Add stgcn in README.md (#1153) * master * master 0721 * add stgcn in README * add stgcn in readme-CN --- README.md | 1 + README_zh-CN.md | 10 ++++++++++ 2 files changed, 11 insertions(+) diff --git a/README.md b/README.md index a9668aca51..4ea8484cfd 100644 --- a/README.md +++ b/README.md @@ -133,6 +133,7 @@ Supported methods for Skeleton-based Action Recognition:

    (click to collapse) - ✅ [PoseC3D](configs/skeleton/posec3d/README.md) (ArXiv'2021) +- ✅ [STGCN](configs/skeleton/stgcn/README.md) (AAAI'2018) diff --git a/README_zh-CN.md b/README_zh-CN.md index 58c17b3764..7402bd9fe6 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -121,6 +121,16 @@ v0.18.0 版本已于 2021 年 9 月 2 日发布,可通过查阅 [更新日志] +支持的基于骨骼点的动作识别方法: + +
    +(点击收起) + +- ✅ [PoseC3D](configs/skeleton/posec3d/README.md) (ArXiv'2021) +- ✅ [STGCN](configs/skeleton/stgcn/README.md) (AAAI'2018) + +
    + 各个模型的结果和设置都可以在对应的 config 目录下的 *README_zh-CN.md* 中查看。整体的概况也可也在 [**模型库**](https://mmaction2.readthedocs.io/zh_CN/latest/recognition_models.html) 页面中查看 我们将跟进学界的最新进展,并支持更多算法和框架。如果您对 MMAction2 有任何功能需求,请随时在 [问题](https://github.com/open-mmlab/mmaction2/issues/19) 中留言。 From 66cd83741bbfd76f055b72df29b2357a96efb038 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sat, 18 Sep 2021 10:22:59 +0800 Subject: [PATCH 254/414] [Script] Fix THUMOS TAG Proposals Path (#1156) * fix path * update --- tools/data/thumos14/fetch_tag_proposals.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/data/thumos14/fetch_tag_proposals.sh b/tools/data/thumos14/fetch_tag_proposals.sh index 5d692b7346..39f05fd1e4 100644 --- a/tools/data/thumos14/fetch_tag_proposals.sh +++ b/tools/data/thumos14/fetch_tag_proposals.sh @@ -7,5 +7,5 @@ if [[ ! -d "${PROP_DIR}" ]]; then mkdir -p ${PROP_DIR} fi -wget https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmaction/filelist/thumos14_tag_val_normalized_proposal_list.txt -P ${PROP_DIR} -wget https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmaction/filelist/thumos14_tag_test_normalized_proposal_list.txt -P ${PROP_DIR} +wget https://download.openmmlab.com/mmaction/dataset/thumos14/thumos14_tag_val_normalized_proposal_list.txt -P ${PROP_DIR} +wget https://download.openmmlab.com/mmaction/dataset/thumos14/thumos14_tag_test_normalized_proposal_list.txt -P ${PROP_DIR} From 459896110efe77fd71bdfd7ff311c1fcdcfa3700 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Mon, 20 Sep 2021 15:44:07 +0800 Subject: [PATCH 255/414] [Improvement] Use docker in CI for speeding up (#1159) * master * master 0721 * add docker in CI 0919 * add docker in CI 0919 * add docker in CI 0919 * add docker in CI 0919 * add docker in CI 0919 * add docker in CI 0919 * add docker in CI 0920 * add docker in CI 0920 * add docker in CI 0920 * add docker in CI 0920 * add docker in CI 0920 * add docker in CI 0920 * add docker in CI 0920 * add docker in CI 0920 --- .github/workflows/build.yml | 87 +++++++++++++------------------------ 1 file changed, 31 insertions(+), 56 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9b16ea2f29..c5c1436b50 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -81,19 +81,20 @@ jobs: coverage report -m build_cu101: runs-on: ubuntu-18.04 + container: + image: pytorch/pytorch:1.6.0-cuda10.1-cudnn7-devel - env: - CUDA: 10.1.105-1 - CUDA_SHORT: 10.1 - UBUNTU_VERSION: ubuntu1804 strategy: matrix: python-version: [3.7] - torch: [1.5.0+cu101, 1.7.0+cu101] + torch: [1.5.0+cu101, 1.6.0+cu101, 1.7.0+cu101] include: - torch: 1.5.0+cu101 torch_version: torch1.5.0 torchvision: 0.6.0+cu101 + - torch: 1.6.0+cu101 + torch_version: torch1.6.0 + torchvision: 0.7.0+cu101 - torch: 1.7.0+cu101 torch_version: torch1.7.0 torchvision: 0.8.1+cu101 @@ -107,34 +108,22 @@ jobs: run: pip install pip --upgrade - name: Install CUDA run: | - export INSTALLER=cuda-repo-${UBUNTU_VERSION}_${CUDA}_amd64.deb - wget http://developer.download.nvidia.com/compute/cuda/repos/${UBUNTU_VERSION}/x86_64/${INSTALLER} - sudo dpkg -i ${INSTALLER} - wget https://developer.download.nvidia.com/compute/cuda/repos/${UBUNTU_VERSION}/x86_64/7fa2af80.pub - sudo apt-key add 7fa2af80.pub - sudo apt update -qq - sudo apt install -y cuda-${CUDA_SHORT/./-} cuda-cufft-dev-${CUDA_SHORT/./-} - sudo apt clean - export CUDA_HOME=/usr/local/cuda-${CUDA_SHORT} - export LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${CUDA_HOME}/include:${LD_LIBRARY_PATH} - export PATH=${CUDA_HOME}/bin:${PATH} - sudo apt-get install -y ninja-build - - name: Install TurboJpeg lib - run: sudo apt-get install -y libturbojpeg - - name: Install soundfile lib - run: sudo apt-get install -y libsndfile1 + apt-get update && apt-get install -y ffmpeg libsm6 libxext6 git ninja-build libglib2.0-0 libturbojpeg libsndfile1 libsm6 libxrender-dev libxext6 python${{matrix.python-version}}-dev + apt-get clean + rm -rf /var/lib/apt/lists/* - name: Install librosa and soundfile - run: pip install librosa soundfile + run: python -m pip install librosa soundfile - name: Install lmdb - run: pip install lmdb + run: python -m pip install lmdb - name: Install PyTorch - run: pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html + run: python -m pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html - name: Install mmaction dependencies run: | - pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu101/${{matrix.torch_version}}/index.html - pip install -q git+https://github.com/open-mmlab/mmdetection/ - pip install -q git+https://github.com/open-mmlab/mmclassification/ - pip install -r requirements.txt + python -V + python -m pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu101/${{matrix.torch_version}}/index.html + python -m pip install -q git+https://github.com/open-mmlab/mmdetection/ + python -m pip install -q git+https://github.com/open-mmlab/mmclassification/ + python -m pip install -r requirements.txt python -c 'import mmcv; print(mmcv.__version__)' - name: Build and install run: rm -rf .eggs && pip install -e . @@ -156,11 +145,9 @@ jobs: build_cu102: runs-on: ubuntu-18.04 + container: + image: pytorch/pytorch:1.9.0-cuda10.2-cudnn7-devel - env: - CUDA: 10.2.89-1 - CUDA_SHORT: 10.2 - UBUNTU_VERSION: ubuntu1804 strategy: matrix: python-version: [3.7] @@ -179,37 +166,25 @@ jobs: run: pip install pip --upgrade - name: Install CUDA run: | - export INSTALLER=cuda-repo-${UBUNTU_VERSION}_${CUDA}_amd64.deb - wget http://developer.download.nvidia.com/compute/cuda/repos/${UBUNTU_VERSION}/x86_64/${INSTALLER} - sudo dpkg -i ${INSTALLER} - wget https://developer.download.nvidia.com/compute/cuda/repos/${UBUNTU_VERSION}/x86_64/7fa2af80.pub - sudo apt-key add 7fa2af80.pub - sudo apt update -qq - sudo apt install -y cuda-${CUDA_SHORT/./-} cuda-cufft-dev-${CUDA_SHORT/./-} - sudo apt clean - export CUDA_HOME=/usr/local/cuda-${CUDA_SHORT} - export LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${CUDA_HOME}/include:${LD_LIBRARY_PATH} - export PATH=${CUDA_HOME}/bin:${PATH} - sudo apt-get install -y ninja-build - - name: Install TurboJpeg lib - run: sudo apt-get install -y libturbojpeg - - name: Install soundfile lib - run: sudo apt-get install -y libsndfile1 + apt-get update && apt-get install -y ffmpeg libsm6 libxext6 git ninja-build libglib2.0-0 libturbojpeg libsndfile1 libsm6 libxrender-dev libxext6 python${{matrix.python-version}}-dev + apt-get clean + rm -rf /var/lib/apt/lists/* - name: Install librosa and soundfile - run: pip install librosa soundfile + run: python -m pip install librosa soundfile - name: Install lmdb - run: pip install lmdb + run: python -m pip install lmdb - name: Install PyTorch - run: pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html + run: python -m pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html - name: Install mmaction dependencies run: | - pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu102/${{matrix.torch_version}}/index.html - pip install -q git+https://github.com/open-mmlab/mmdetection/ - pip install -q git+https://github.com/open-mmlab/mmclassification/ - pip install -r requirements.txt + python -V + python -m pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu102/${{matrix.torch_version}}/index.html + python -m pip install -q git+https://github.com/open-mmlab/mmdetection/ + python -m pip install -q git+https://github.com/open-mmlab/mmclassification/ + python -m pip install -r requirements.txt python -c 'import mmcv; print(mmcv.__version__)' - name: Install PytorchVideo - run: pip install pytorchvideo + run: python -m pip install pytorchvideo if: ${{matrix.torchvision == '0.10.0+cu102'}} - name: Build and install run: rm -rf .eggs && pip install -e . From 07cefb8773b84a3b8d26f387e12294b56b0543dc Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 20 Sep 2021 18:19:58 +0800 Subject: [PATCH 256/414] [Doc] New Doc Style (#1160) * new doc style * update note * update notes * update path * update cn page --- configs/detection/acrn/README.md | 4 +- configs/detection/ava/README.md | 4 +- configs/detection/lfb/README.md | 4 +- configs/localization/bmn/README.md | 8 +- configs/localization/bsn/README.md | 8 +- configs/localization/ssn/README.md | 4 +- configs/recognition/c3d/README.md | 4 +- configs/recognition/csn/README.md | 4 +- configs/recognition/i3d/README.md | 4 +- configs/recognition/r2plus1d/README.md | 4 +- configs/recognition/slowfast/README.md | 4 +- configs/recognition/slowonly/README.md | 4 +- configs/recognition/tanet/README.md | 4 +- configs/recognition/timesformer/README.md | 4 +- configs/recognition/tin/README.md | 4 +- configs/recognition/tpn/README.md | 4 +- configs/recognition/trn/README.md | 4 +- configs/recognition/tsm/README.md | 4 +- configs/recognition/tsn/README.md | 4 +- configs/recognition/x3d/README.md | 4 +- configs/recognition_audio/resnet/README.md | 4 +- configs/skeleton/posec3d/README.md | 4 +- demo/README.md | 8 +- docs/_static/css/readthedocs.css | 6 ++ docs/_static/images/mmaction2.png | Bin 0 -> 31100 bytes docs/api.rst | 3 - docs/conf.py | 114 ++++++++++++++++++++- docs/getting_started.md | 36 ++++--- docs/install.md | 35 ++++--- docs/tutorials/7_customize_runtime.md | 45 ++++---- docs/useful_tools.md | 4 +- docs_zh_CN/api.rst | 3 - docs_zh_CN/conf.py | 109 +++++++++++++++++++- requirements/docs.txt | 4 + setup.cfg | 2 +- src/pytorch-sphinx-theme | 1 + tools/data/jhmdb/README.md | 5 +- tools/data/kinetics/README.md | 5 +- tools/data/thumos14/README.md | 2 - tools/data/ucf101_24/README.md | 8 +- 40 files changed, 387 insertions(+), 99 deletions(-) create mode 100644 docs/_static/css/readthedocs.css create mode 100644 docs/_static/images/mmaction2.png create mode 160000 src/pytorch-sphinx-theme diff --git a/configs/detection/acrn/README.md b/configs/detection/acrn/README.md index 014e6fc710..125184a28d 100644 --- a/configs/detection/acrn/README.md +++ b/configs/detection/acrn/README.md @@ -40,12 +40,14 @@ | :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.8 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-2be32625.pth) | -- Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. +::: + For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index c1f84f3fa5..79a7c4762a 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -65,13 +65,15 @@ | [slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-874e0845.pth) | | [slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-345618cd.pth) | -- Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. 2. **Context** indicates that using both RoI feature and global pooled feature for classification, which leads to around 1% mAP improvement in general. +::: + For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/detection/lfb/README.md b/configs/detection/lfb/README.md index d086343f4d..9f71cbf785 100644 --- a/configs/detection/lfb/README.md +++ b/configs/detection/lfb/README.md @@ -24,7 +24,7 @@ | [lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | short-side 256 | 20.17 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-19c330b7.pth) | | [lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | short-side 256 | 22.15 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-37efcd15.pth) | -- Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -34,6 +34,8 @@ 4. Before train or test lfb, you need to infer feature bank with the [lfb_slowonly_r50_ava_infer.py](/configs/detection/lfb/lfb_slowonly_r50_ava_infer.py). For more details on infer feature bank, you can refer to [Train](#Train) part. 5. You can also dowonload long-term feature bank from [AVA_train_val_float32_lfb](https://download.openmmlab.com/mmaction/detection/lfb/AVA_train_val_float32_lfb.rar) or [AVA_train_val_float16_lfb](https://download.openmmlab.com/mmaction/detection/lfb/AVA_train_val_float16_lfb.rar), and then put them on `lfb_prefix_path`. +::: + ## Train ### a. Infer long-term feature bank for training diff --git a/configs/localization/bmn/README.md b/configs/localization/bmn/README.md index 7e25d192b7..3042ded98e 100644 --- a/configs/localization/bmn/README.md +++ b/configs/localization/bmn/README.md @@ -37,7 +37,7 @@ | |mmaction_clip |2|75.35|67.38|43.08|32.19|10.73|31.15|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809-10d803ce.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.json) | | [BMN-official](https://github.com/JJBOY/BMN-Boundary-Matching-Network) (for reference)* |cuhk_mean_100 |-|75.27|67.49|42.22|30.98|9.22|30.00|-|-|-| - | - | -- Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -45,6 +45,8 @@ 2. For feature column, cuhk_mean_100 denotes the widely used cuhk activitynet feature extracted by [anet2016-cuhk](https://github.com/yjxiong/anet2016-cuhk), mmaction_video and mmaction_clip denote feature extracted by mmaction, with video-level activitynet finetuned model or clip-level activitynet finetuned model respectively. 3. We evaluate the action detection performance of BMN, using [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) submission for ActivityNet2017 Untrimmed Video Classification Track to assign label for each action proposal. +::: + *We train BMN with the [official repo](https://github.com/JJBOY/BMN-Boundary-Matching-Network), evaluate its proposal generation and action detection performance with [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) for label assigning. For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/data_preparation.md). @@ -86,7 +88,7 @@ You can also test the action detection performance of the model, with [anet_cuhk python tools/analysis/report_map.py --proposal path/to/proposal_file ``` -Notes: +:::{note} 1. (Optional) You can use the following command to generate a formatted proposal file, which will be fed into the action classifier (Currently supports SSN and P-GCN, not including TSN, I3D etc.) to get the classification result of proposals. @@ -94,4 +96,6 @@ Notes: python tools/data/activitynet/convert_proposal_format.py ``` +::: + For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . diff --git a/configs/localization/bsn/README.md b/configs/localization/bsn/README.md index a16767ad47..7c40b4d6af 100644 --- a/configs/localization/bsn/README.md +++ b/configs/localization/bsn/README.md @@ -24,13 +24,15 @@ | |mmaction_video |1| None |74.93|66.74|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809-ad6ec626.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809-aa861b26.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.json) | | |mmaction_clip |1| None |75.19|66.81|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809-0a563554.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809-e32f61e6.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.json) | -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. 2. For feature column, cuhk_mean_100 denotes the widely used cuhk activitynet feature extracted by [anet2016-cuhk](https://github.com/yjxiong/anet2016-cuhk), mmaction_video and mmaction_clip denote feature extracted by mmaction, with video-level activitynet finetuned model or clip-level activitynet finetuned model respectively. +::: + For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/data_preparation.md). ## Train @@ -144,7 +146,7 @@ Examples: python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json ``` -Notes: +:::{note} 1. (Optional) You can use the following command to generate a formatted proposal file, which will be fed into the action classifier (Currently supports only SSN and P-GCN, not including TSN, I3D etc.) to get the classification result of proposals. @@ -152,4 +154,6 @@ Notes: python tools/data/activitynet/convert_proposal_format.py ``` +::: + For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/localization/ssn/README.md b/configs/localization/ssn/README.md index d73600626a..61faa9108e 100644 --- a/configs/localization/ssn/README.md +++ b/configs/localization/ssn/README.md @@ -20,7 +20,7 @@ year = {2017} |:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:|:-:|:-:|:-:|---|:--:|:--:| |[ssn_r50_450e_thumos14_rgb](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) |8| ResNet50 | ImageNet |29.37|22.15|15.69|[27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|6352|[ckpt](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/ssn_r50_450e_thumos14_rgb_20201012-1920ab16.pth)| [log](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log)| [json](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log.json)| [ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth)| [json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json)| -- Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -29,6 +29,8 @@ year = {2017} 3. We evaluate the action detection performance of SSN, using action proposals of TAG. For more details on data preparation, you can refer to thumos14 TAG proposals in [Data Preparation](/docs/data_preparation.md). 4. The reference SSN in is evaluated with `ResNet50` backbone in MMAction, which is the same backbone with ours. Note that the original setting of MMAction SSN uses the `BNInception` backbone. +::: + ## Train You can use the following command to train a model. diff --git a/configs/recognition/c3d/README.md b/configs/recognition/c3d/README.md index 39829a1408..066af1f100 100644 --- a/configs/recognition/c3d/README.md +++ b/configs/recognition/c3d/README.md @@ -23,7 +23,7 @@ eid = {arXiv:1412.0767} |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[c3d_sports1m_16x1x1_45e_ucf101_rgb.py](/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py)|128x171|8| c3d | sports1m | 83.27 | 95.90 | 10 clips x 1 crop | x | 6053 | [ckpt](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/c3d_sports1m_16x1x1_45e_ucf101_rgb_20201021-26655025.pth)|[log](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log)|[json](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json)| -Notes: +:::{note} 1. The author of C3D normalized UCF-101 with volume mean and used SVM to classify videos, while we normalized the dataset with RGB mean value and used a linear classifier. 2. The **gpus** indicates the number of gpu (32G V100) we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. @@ -32,6 +32,8 @@ Notes: 3. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. +::: + For more details on data preparation, you can refer to UCF-101 in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index acae7e42fb..32d3e240bc 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -43,7 +43,7 @@ doi = {10.1109/ICCV.2019.00565} |[ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | IG65M | 82.5 | 95.3 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth) | x | x | |[ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|8x4| ResNet152 | IG65M|80.14|94.93|x|8517|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json)| -Notes: +:::{note} 1. The **gpus** indicates the number of gpu (32G V100) we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -53,6 +53,8 @@ Notes: 3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. 4. The **infer_ckpt** means those checkpoints are ported from [VMZ](https://github.com/facebookresearch/VMZ). +::: + For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/i3d/README.md b/configs/recognition/i3d/README.md index d6fac8dd8e..d7bc5fe1ff 100644 --- a/configs/recognition/i3d/README.md +++ b/configs/recognition/i3d/README.md @@ -43,7 +43,7 @@ |[i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb.py)|short-side 256p|8x4| ResNet50 |ImageNet|73.37|91.26|x|4944|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200815-17f84aa2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json)| |[i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py)|short-side 256p|8x4| ResNet50 |ImageNet|73.92|91.59|x|4832|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb_20200814-7c30d5bb.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json)| -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -51,6 +51,8 @@ Notes: 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. 3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/r2plus1d/README.md b/configs/recognition/r2plus1d/README.md index bb2cf2ea50..0ed3e0241a 100644 --- a/configs/recognition/r2plus1d/README.md +++ b/configs/recognition/r2plus1d/README.md @@ -25,7 +25,7 @@ |[r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | short-side 320|8x2| ResNet34|None |68.68|88.36|1.6 (80x3 frames)|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8x1_180e_kinetics400_rgb_20200618-3fce5629.pth)| [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log)| [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json)| |[r2plus1d_r34_32x2x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py) |short-side 320|8x2| ResNet34|None |74.60|91.59|0.5 (320x3 frames)|12975| [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2x1_180e_kinetics400_rgb_20200618-63462eb3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log)| [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json)| -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -33,6 +33,8 @@ Notes: 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. 3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 09b055c478..f7cad0827d 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -36,7 +36,7 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[slowfast_r50_16x8x1_22e_sthv1_rgb](/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py)|height 100|8|ResNet50|Kinetics400|49.24|78.79|x|9293|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20210630-53355c16.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log.json)| -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -44,6 +44,8 @@ Notes: 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. 3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index 92672e5ec6..974355056a 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -97,7 +97,7 @@ In data benchmark, we compare two different data preprocessing methods: (1) Resi |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb-34901d23.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.json)| -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -105,6 +105,8 @@ Notes: 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. 3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 52be55ed6a..45ef911cba 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -28,7 +28,7 @@ |[tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|47.45/49.69|76.00/77.62|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json)| |[tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|47.73/50.41|77.31/78.47|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20210630-7c19303c.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log.json)| -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -37,6 +37,8 @@ Notes: 3. The values in columns named after "reference" are the results got by testing on our dataset, using the checkpoints provided by the author with same model settings. The checkpoints for reference repo can be downloaded [here](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing). 4. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/timesformer/README.md b/configs/recognition/timesformer/README.md index 710d9a4f38..4c4ac9cedc 100644 --- a/configs/recognition/timesformer/README.md +++ b/configs/recognition/timesformer/README.md @@ -25,7 +25,7 @@ |[timesformer_jointST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 77.01 | 93.08 | x | 25658 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb-0d6e3984.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.json)| |[timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 76.93 | 92.90 | x | 12750 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb-0cf829cd.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.json)| -Notes: +:::{note} 1. The **gpus** indicates the number of gpu (32G V100) we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -33,6 +33,8 @@ Notes: 2. We keep the test setting with the [original repo](https://github.com/facebookresearch/TimeSformer) (three crop x 1 clip). 3. The pretrained model `vit_base_patch16_224.pth` used by TimeSformer was converted from [vision_transformer](https://github.com/google-research/vision_transformer). +::: + For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/tin/README.md b/configs/recognition/tin/README.md index adf49412d8..b16224c676 100644 --- a/configs/recognition/tin/README.md +++ b/configs/recognition/tin/README.md @@ -35,7 +35,7 @@ Here, we use `finetune` to indicate that we use [TSM model](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) trained on Kinetics-400 to finetune the TIN model on Kinetics-400. -Notes: +:::{note} 1. The **reference topk acc** are got by training the [original repo #1aacd0c](https://github.com/deepcs233/TIN/tree/1aacd0c4c30d5e1d334bf023e55b855b59f158db) with no [AverageMeter issue](https://github.com/deepcs233/TIN/issues/4). The [AverageMeter issue](https://github.com/deepcs233/TIN/issues/4) will lead to incorrect performance, so we fix it before running. @@ -47,6 +47,8 @@ Notes: 4. The values in columns named after "reference" are the results got by training on the original repo, using the same model settings. 5. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/tpn/README.md b/configs/recognition/tpn/README.md index 0d65841127..d3d513118e 100644 --- a/configs/recognition/tpn/README.md +++ b/configs/recognition/tpn/README.md @@ -28,7 +28,7 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tpn_tsm_r50_1x1x8_150e_sthv1_rgb](/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py)|height 100|8x6| ResNet50 | TSM | 50.80 | 79.05 | 8828 |[ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20210311-28de4cd5.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log)|[json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log.json)| -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -38,6 +38,8 @@ Notes: 3. The values in columns named after "reference" are the results got by testing the checkpoint released on the original repo and codes, using the same dataset with ours. 4. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md index 223d769168..fa44f0b06f 100644 --- a/configs/recognition/trn/README.md +++ b/configs/recognition/trn/README.md @@ -27,7 +27,7 @@ |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[trn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | height 256 | 8 | ResNet50 | ImageNet | 48.39 / 51.28 |76.58 / 78.65 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210816-7abbc4c1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log.json)| -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -36,6 +36,8 @@ Notes: 3. In the original [repository](https://github.com/zhoubolei/TRN-pytorch), the author augments data with random flipping on something-something dataset, but the augmentation method may be wrong due to the direct actions, such as `push left to right`. So, we replaced `flip` with `flip with label mapping`, and change the testing method `TenCrop`, which has five flipped crops, to `Twice Sample & ThreeCrop`. 4. We use `ResNet50` instead of `BNInception` as the backbone of TRN. When Training `TRN-ResNet50` on sthv1 dataset in the original repository, we get top1 (top5) accuracy 30.542 (58.627) vs. ours 31.62 (60.01). +::: + For more details on data preparation, you can refer to - [preparing_sthv1](/tools/data/sthv1/README.md) diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index d9b2c0fcfe..e1042b2d74 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -101,7 +101,7 @@ |[tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py)|8|ResNet50|Kinetics400|94.50|99.58|10389|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb_20210630-1fae312b.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json)| |[tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py)|8|ResNet50|Kinetics400|94.58|99.37|10389|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb_20210630-8df9c358.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json)| -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -136,6 +136,8 @@ test_pipeline = [ 5. When applying Mixup and CutMix, we use the hyper parameter `alpha=0.2`. 6. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index 1752ad93a3..c12b582df8 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -174,7 +174,7 @@ In data benchmark, we compare: [2] The performance of HATNet and HATNet-multi are from the paper [Large Scale Holistic Video Understanding](https://pages.iai.uni-bonn.de/gall_juergen/download/HVU_eccv20.pdf). The proposed HATNet is a 2 branch Convolution Network (one 2D branch, one 3D branch) and share the same backbone(ResNet18) with us. The inputs of HATNet are 16 or 32 frames long video clips (which is much larger than us), while the input resolution is coarser (112 instead of 224). HATNet is trained on each individual task (each tag category) while HATNet-multi is trained on multiple tasks. Since there is no released codes or models for the HATNet, we just include the performance reported by the original paper. -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -184,6 +184,8 @@ Notes: 3. The values in columns named after "reference" are the results got by training on the original repo, using the same model settings. 4. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to - [preparing_ucf101](/tools/data/ucf101/README.md) diff --git a/configs/recognition/x3d/README.md b/configs/recognition/x3d/README.md index 42509e6a09..ff6627f109 100644 --- a/configs/recognition/x3d/README.md +++ b/configs/recognition/x3d/README.md @@ -26,11 +26,13 @@ [1] The models are ported from the repo [SlowFast](https://github.com/facebookresearch/SlowFast/) and tested on our data. Currently, we only support the testing of X3D models, training will be available soon. -Notes: +:::{note} 1. The values in columns named after "reference" are the results got by testing the checkpoint released on the original repo and codes, using the same dataset with ours. 2. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Test diff --git a/configs/recognition_audio/resnet/README.md b/configs/recognition_audio/resnet/README.md index 9acdb80591..a873edaeed 100644 --- a/configs/recognition_audio/resnet/README.md +++ b/configs/recognition_audio/resnet/README.md @@ -22,7 +22,7 @@ |[tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py)|1024|8| ResNet18 | None |19.7|35.75|x|1897|[ckpt](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth)|[log](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log)|[json](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log.json)| |[tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py) + [tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py)|1024|8| ResNet(18+50) | None |71.50(+0.39)|90.18(+0.14)|x|x|x|x|x| -Notes: +:::{note} 1. The **gpus** indicates the number of gpus we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, @@ -30,6 +30,8 @@ Notes: 2. The **inference_time** is got by this [benchmark script](/tools/analysis/benchmark.py), where we use the sampling frames strategy of the test setting and only care about the model inference time, not including the IO time and pre-processing time. For each setting, we use 1 gpu and set batch size (videos per gpu) to 1 to calculate the inference time. 3. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +::: + For more details on data preparation, you can refer to ``Prepare audio`` in [Data Preparation](/docs/data_preparation.md). ## Train diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index c99e109dcd..7ce507cdfd 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -75,13 +75,15 @@ | [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 85.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth?) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json) | | Fusion | | | | 86.9 | | | | -Notes: +:::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 8 GPUs x 8 videos/gpu and lr=0.04 for 16 GPUs x 16 videos/gpu. 2. The values in columns named after "reference" are the results got by testing on our dataset, using the checkpoints provided by the author with same model settings. The checkpoints for reference repo can be downloaded [here](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing). +::: + ## Train You can use the following command to train a model. diff --git a/demo/README.md b/demo/README.md index 3ca4f08c1d..7fc362d1af 100644 --- a/demo/README.md +++ b/demo/README.md @@ -253,7 +253,9 @@ Optional arguments: - `DRAWING_FPS`: Upper bound FPS value of the output drawing. If not specified, it will be set to 20. - `INFERENCE_FPS`: Upper bound FPS value of the output drawing. If not specified, it will be set to 4. -**Note**: If your hardware is good enough, increasing the value of `DRAWING_FPS` and `INFERENCE_FPS` will get a better experience. +:::{note} +If your hardware is good enough, increasing the value of `DRAWING_FPS` and `INFERENCE_FPS` will get a better experience. +::: Examples: @@ -287,12 +289,14 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, --average-size 5 --threshold 0.2 ``` -**Note:** Considering the efficiency difference for users' hardware, Some modifications might be done to suit the case. +:::{note} +Considering the efficiency difference for users' hardware, Some modifications might be done to suit the case. Users can change: 1). `SampleFrames` step (especially the number of `clip_len` and `num_clips`) of `test_pipeline` in the config file, like `--cfg-options data.test.pipeline.0.num_clips=3`. 2). Change to the suitable Crop methods like `TenCrop`, `ThreeCrop`, `CenterCrop`, etc. in `test_pipeline` of the config file, like `--cfg-options data.test.pipeline.4.type=CenterCrop`. 3). Change the number of `--average-size`. The smaller, the faster. +::: ## Long video demo diff --git a/docs/_static/css/readthedocs.css b/docs/_static/css/readthedocs.css new file mode 100644 index 0000000000..c8b2f6bdda --- /dev/null +++ b/docs/_static/css/readthedocs.css @@ -0,0 +1,6 @@ +.header-logo { + background-image: url("../images/mmaction2.png"); + background-size: 130px 40px; + height: 40px; + width: 130px; +} diff --git a/docs/_static/images/mmaction2.png b/docs/_static/images/mmaction2.png new file mode 100644 index 0000000000000000000000000000000000000000..f0c759bb78c5424b4394d18a5ba833a8c9f43add GIT binary patch literal 31100 zcmYJZ1yoyG^FAEh-5r7!cXxuj6!+ln4#i4wcXziU!67&;?k)w2Tai+%{d(_xf8T%B zN>0|wI{WNB^UORmb0$VZO&$Z46cqpfU??hpv;Y8Dr@y~DkrDrXn^rXQ{k@>LDHwPH z0PM8?d|>=4Bzyn>R8@N!84V3PS1(sjJ6AVqMHv}tHxE}EdnaoEz;B~K+gCsBkx+d1 z(HcuvdhOK`tAbP);snb>} zHrg=my|LgG)mBw|aKQED<;&_z?{WU|>x};_RwxcGL&{O913*aYqp0v2Rg${G$d|rQ zSOSGTcxvpU$st>WQ2^iu&fhP@t6Aa|rpXlm3vgRzr^6lp_9voSgFz2KodVbc;vS>H zBx?aC*mWlG0TaZ4X6;rhR6sfa;N%}94FuF;050wo#Ss8&HECN!fVEQc8e~8M3?PR| zo(@*B2Vnj_Op6s3Is_=LmjtrILmOd24Itd|u%Uej0G~wtWGqQ@KxmFM>lA=m5EdXq zi8l>LtpZofxiGL-W9Ig;Qvn75oy?fdQeNP}@64DZ^}W5>+o#zda{4T7LG%vW2IUZ# zpOr_8r*I6XGJ_5PkUq^1{uY!-K0Z49c4TwxbbUWs`rGlw6(~;gWA}G&JS;o_TW$7w z!|eO_6*#wXm}c(*#~LHRhAzNv_sN836Q$S?buQp3&R_6VlCrQwV?vaI92E`ilHRhU zkMkf-{14AJ*S)xqpMU@Ty%}%=aL<*@X@HhRyt!Mr_%v!I9#6tcbGbYo)BYa+_~9+_ z-TXf9XJb}^*dO{aof<&ZasMu@6cEjqjD6hG58OYfGT%_rd@%s^QR+}N2vYl`6Ex%s z1~v{;f58VJ8FI%KPl}cTz_#V(_J0Hb(gvQABw1hp(^=&Q06^~vv2k@ek;)h{001fu zV{Vp0LLbDhhhpXrQuIQJU@Z_qlIV0$QEX|%!%%Y665>oL5*?`U8hm>*9BDC5?l!UB6nLo#WIPKR;bn+(;O+^|zR_r zfhqI_d z`T*Rg5kE`e&&m37^$b@ulr)9(ri_?)W@!p^m|s#4)lg~Zlerbq6de^d71+wP%f%P4 z?ekR_&Xb+L>>MJugw|7MrF0}?C99{td>KCMz!x5k-d8~>rPC@;$5rC9miP`|QN%5s zQ(UV#T~oM(TpKN1WT@y|?4!Y<{iz0z*E^SfI*(9hvYf4Kc`n!9*8b6sBS)nYVKFObLWgsen=HpR8!OxO zy*~3#<{*NUuece)8M+xE8OOCc?>*m3Ec@4@>D0Z)sx#FMu9SPLd6H+h0KQ|P(@qDv(~2GCdH=U$bA|HbL{ipjnI0&(hSWE#!Mg^9Ge8&=M{$r zn}(1EfsKHT{te4Zk4wVK^Nnk7w*tum)`ACNXW>V0ly3fR&F+ugo8Gf$;)~#O|5KEQ zuMe1~+w<}d&i7>xL}%A4*Yi{YoV>{V$s|r(g5aB}Wblb&wteUPP2{&~oJqUxlo7kK zdBrluPKK*bwg-HEMB+B}uH=^WygSxgj-nQp_S~E0Bjuxq{qLsJ9dm8E=i3&&rluxX zx9ZpGzg#2=E$2HHTJ*2?ZoVdbtdPwgRf^+}8_(Rx44Op9RLy*>&(;;!_1AsSU9DGK zF+OKKcV5wR(je2zL(db=n}k$1x+@8c)ETX^@N?Xtmj@p^%Jk$Zu7DfH9g zC;d<7pR*KGdv9$wy4K2C1E=jGpchigV^hEMfu$!tqJ3-0)cDwUHbakG4~ zh_s!nVyj$=0wy!QlU{W%v@brC<@MxkHLN=?ZBL1f2G_l)vgeU~F&P%Smb$-r)ZcfX zIw+{hs~WmXKISm>F*PuK>7!I{&1WcF;dGwZ95LGKn+!A;78kRAo;igR9}|Zy3Ml&2 zU)c}b73-%BSqaGrL3%BG5qX|`OZqkN&HtA0hZ=%8%plBnn7%O5uvR!nI4bxY_zEOG zgam{hL=JRu0%>#<39mE!h*8XUmo~cb?!Yash!M*)!)9g9_yePGZz7@-B9W zYvq&5=@bMxdQaXdx|7PDB!}oJ82=qJcP;ywW}CK`yVoP@O8ue1i)~-xkEELTE|EvZBWojKF$bQo$4hWnO@+= z?^~r^L`SDHlZC#b27Cs|L4qH%akE+^zDH0?Yj6;&f3I>;rNFz5%=@S=+h3Ycs`|1YrVg9@ZY*k)QVWBsE}a^*W=BoN&hnG(s*y!1tze_uzG*(khN^2`+=D# z!>rz|zPj;Ovs~xVzMh)da?pdn^Tx)GQpeZJbA|KrbC>!@gMlWcJ_o7gxu_eA2C^Rs z-^t9Jm3F*J0vsL^r`x8#PQ&q8@Ld{BLFzYeOZdxv1{8cSnbdu2sO#uoXV_R7*!B&) z!;GLH5ZSp-zvyje@zZnclMQ}wS-1+{P9n7=>mE1?+3uA0RUa)(3aki3`YC&T?%?I> zwAC~)(1PNu#h*Z_Anf)%FVuA-?=si7-OZ30b4cp@AmeUjP~d^aNy*Ic*6{Rh&WDZ% z)H+4Q{3!8 zqkqkv$8}vzlkhn0I92bAzu1q;*M^zR;Ooxo?%nY|svM?Q$=@1x^UvV%syX#X@#CQ1 zuN;q&cQ5n{MLd7AO=_@$mMQ=cKo0u%|_(_ z0GAp?kd%(!#zlc|ua#}^&cJm}&L+emaPep5dwE)T*ccm0>Ig}5Vcf)?1Hr_0wR{`I zaWzXLr3=9r3M6B<(KW7)3 zh@Xaj3N;E-+zZ*;g!+cQL3Pve`RO@)VM2_}1`MvJ&n6>whNY0yt_m7B#Q6ZYb zuy!D$azzGR46b@=(&d6{EA@L))(KW|v z#_~h;!}o*rBgHFncl%Ewr%OI{qWfXEp)u4#p#9>gqR6it=Cll7Av>vE6i~ySZ6-Hl zKjbwuQTv!3ah2z#|M#T)H9#*q-us?;+-7QCH`rg?8n8!Tt@55wk5FhsE4<66?JGey z>|f#x-rY!kgnm0$v;Xg2d{#JigdL1tm^B9!17g%0R`dZ#Q{JuuLl z<>GvG?YpkZ_NM7xVCKjrf+=8TV{{oRB>1lk9hq~5E86x`EK0f&aOSrpAmIZfZgLS# z+a?#G0-=1F_0I4R7(IaA#;Wi7cO=f51=`Fuu`qjczl|$L^u?4jhduKD#G;3~=?-zv z8*0SrZn!r5rf%u5{1fhR{lK|0hhf_WWz*;7PHhh(Iv;eXsJ!)4AGc#XwOd!Ugtt%6G3&QL zY(MhFf&g^w7oV9F0i=ILer<-4$(*v1WX~CzWC1j4{iwM|*#l@nOsT`}B23AmX~ED> zoWKRSNF5R?PxafP7!#B|{>0-lr(yBh^ zimrEHT@^Maj2Y-NaRWc_nRKp6vOIo57$k?S8)h6gafv+6soC+u2{G7}ACcQm4@CVx z&7Lb8qXa>5AL`HNkjv%rJ z{2`9Y_?g7~#`4*}$lu#T-mJG|8DltWAkGSxJsuQ7VXzJ$$CK?Lqs3}LvA~f zVgzPU*Ylm8CQvL z?f#@ka)3lXX+ng{{@XUOyXk)hK$2gpq`2ea*XS@W7iKTBUkE9Y2GfbpgauIg5vN?! z{8kU|hGlb=Jha96jUgAk`hPwFv6n*8|LD%>hjgP957Q$<9BVs&`+^MHLYBf79l`?q zfDn{mqvw#6R5!K%pI(qR5s8_fz;e#H;SMp=|c(X04|`bq#yA? zF147%(Hu2C+L7)5A%vY69j-1+07fGAv$qFLJ{=PR9^T%nWQ}I}Gf=4yfsI4{Fknxe z%cQE0(FyZkNo+RJZ-b+q{2?4)u=J3?lTF@S!Y zJ%gXfScQ35ARV{#yR?s@o~RD(Cx+Z9S(21eR(Qak`if=u032()urL_;wg36NNRm5_ zT;Ty>yv{i4Ba@fpk|6%7g*H`7gC<&79|g46o_&?z=Vf`Zm;Ty ze}vZQm>??5D69yk(Yr&8QL}bgg||UNK`A229r2suK>+n;Xhyb?t-@X2ig;3ZT(3=U?Wp>w4EfBP^2(>9Z7HywG{`6Fx9Ab-LdI$ezFJWYgE z0Yf9G8L`*nSBzL=uHu_f80bX=Ikhz*^u#$?k8rN-v};&FtErH2wy;1-iRhxSLPLGGHZWT*9Xu)THNFnz+Oq&90$UKRS{uM31qJmn zio=AcJnrs@VJxAKFiq`5j#SO~c+Q&+v-W?vt&_?R+3#y=M^_rk4^K`?ILK(wauS)(s>WSIVk-yD8l^9%v4A1)r9Z*fX&#m*!&+s}c$K=+~L<5s} zA4~MYZC6tf_j)~h1u8J&2lN!aXk%0l4ugeKSijmlWHVf)?5OaL%|E^fJ9PrYO9q$r zJr}KX7c9_#mmuf%P1D4hj4g{SJ@DD?Ps5={khyZ~z?KrlsqzeXP+|&N(gZ==QNH}_U`g=J77j5b*J`!zsUwvi zsg(q{-oR znf%@jO}0Rjo@RSynfZvvV~O?OZjC`N^?unZl?iw45UaT@?K! zt-QRn51}dT6vD9+viLm(Npy|uIb57}3uP89E0dmt>v0nXpv(=I`6jNnMwsF3la;_w zUyE)DBrS+PC2itn9rnSyJyR916jCoU8&lvE{CB)${SNrf#-0*B2KTfj25hGP`@;(W`aS&_u$<-*oPx`I+v-9n{GCPTBxRdF||UPP*}G4rk9Mxg)$EXIeO&xumyV z;CPM~2f}GO!pnXE<3B?AXl4m??g)S8ms1+~m#1+c22iib06X2#dY8EDc59)EhY!w* zO87BLnxMx1UGqq_=qB(r7$l6sZIxS60qZ|tf`e~R|I zrlg4v*23fOf_rLgOo_6I(fX>|^}t_Q1gQaYl$d3X=5?WC+m zE^1am{fHEM((2HJ*h*0%`Op}LonS~B&97O&$E;wM%AD4o0w8%pB+^FYw>U4 zK&RR({<*sV2t8D|jR|a*Tv5qNjavtsLX48qo{1PGm`QNeg5F4Yrc^D$*~TkyZ}Dc- zfS3mvng`!pv~qq35ukhV-wTAQQfE=O)O>7?fttT_rqY9Hg4qc_5h!!c)kbJ4;e!xi zr6ZbfHu+~49dO(oMCqu}wz>9rIBMqC6Z;{z{XeTj6*LE2;=HRz%*^Soz>d)ZA+gi# z6DdZ=NABurdOM-ev8(lVsOj6J^V%A_2B8zON6N;jj5O$c4P0uC7L8fqFjrX=fEW|a zD(~NraYmg>M6S4&LCwoF=@i1$pp9VUA-xM625@1N~e;Gp-=$H69q#t$12 z{~~dZa}x&=gTiA^7}p16mQ@B2cHlC8;iRv%;$DW35%`Hs{H*EAFUDrXKDfoy%Hr*Bc=CUZ2i#3jI z#8_!92CNIE#W}|?FLk>{dcpxgau8ufDCZfzzr)psZL;T@=bsg~jkcGrZmQZO#V#Do z?tjd#i&11*t!!C3`mY|nr?JS(<;1hES&EBsLTSc0AR__Vg9LCvI9&>`M3yF+80d<0 z?6G08sh#3QbL~;NyA49GBzq_z7wJjNWC|v4x^g&yNUdw>U&ugf5FrUHA%iV5XbnjC zp3xQOnnptdfCo*B6w&Po?q_7k1!3^@(QW-NCY{QD<>DM z!h4Pkbi`S=EOT51xWc+fcfzuvX>JVDGNj4{cwyK#7%YZG8%gCRAS3X>h+2NJ@}2!L zVn1tZoM-CE<&pZ!352HfZ37@FL`GC@`0q@+H zgjhZ42|rW8zKgT!RjdQfSeeLm;ippo3x&B0o~S8-Ew{vH#e8r61)*Cc+v?!}h9Aeo zduS0j2-{Q6xveJQ7S#ls&>+H+g|o@J1>MMO6o;PxXg;_hprGi^AV^j%*iUF5%=rfF zu0@Jebv)4e9NhEtH2>+&E1^Gff-)IJLl0&ka)kxs2c`*JL+%Kxf_Q|)I1VJ!(Y{CN z+@vgI+eqihuAON19N29`Vli#M^Kbohk7QwlxP{zH#^wr;3zgRhV(|4UwJ5&w^<>YJ zbcTuAAjx>&CBHAfM(u(7%~;6z%qaJb%D{KP0*uDueO$2)16p)$N7-6oU8!55>X2?N}dcVzC` zRX>)>>_}!++dz2};MB3ge;7Ps7#$npPs~!>0dNH!qKe=ZBIwfA(`8j8Wyf@x4rlrn zYM)s~daFj^vH#9uiXtSIQ#xxaYqMYS;abtD={X%7;jo-|U$TaudWis1Av%OyRx=iH zZ-{-KBc2z&c1P#hHY_F_>>$Z4oP z8j>ud1V6t(=ZKs9O@m+9kkQrD7%X+;yY0k@eAb8(mdi)hrHwFnKp*sn%UpETn+YS^ zW!(~GCVz^9>EhoSKRKeAAgo=Iq?5wWh<1O`kWhC>AJG}mpW?W%+P$ghvXW2GXS{8>%WI8WX_{HKwN*89l)&;)=g+)SbDjtAb^Vo_AYCv!JWvLt*V zL=Sxs3b3~CZc%{Y(jLAc4*>^cDp@0QsCb&CWHY~EY&LuDjKBDAh@pvUey6f@;jlL@ zxf92^Fh*raD?b?;cMx2xwWpr*r8V0~)R%0+AyS%qHip{G9mEbXpl)vJrDG2YigEBO z7D&aGO64garjqwBY!CgaqR$ZargW~&)UH1_tz*}>!n!6|oUT|N(1sgiHV^!=zsW?Z z`EDuHC2#!$M#axER6Gi^)i4-5lRAmkuMtmid-uvS)6jo5X0d^?siYy1cw=EM_RuJn zVIJE^+m9K~t6t8wGc(V3za_x|vB&wj5aJS%bfI0cBtT_xyYj6Xc;{G% zVswToUHeHMu*{^zp6Jjqtha9sKtGGUx@fjFiAEEEPRuG$y9NvGuonZ44-u7F)h;wn z;aVKUOCfDn^#`(TKcE=9o~Jd-x&OjkhR&jgB!vpepFl2Dw_PQ~olu>jrJb~scul5o zDb?=AlhB!^9l?r?S*capn$`^$sF$MI87&L}bz$`+#L~LE(+Pf@P?dmbEDI59-JX@E zQFnO3zA3-;UN>fMTBk95gvG#vL@mjqTWHswTJ0f3dXh2RBwT4ER3(-A=c*Z|%bHt> zqod2p;}Nljw^pgFR@d47R~m9zywHGt)4Q>;8S3K*+g$Q1dk;bIXvKxs=MVK*eOA4w zTS!Jr^<5U;TYR|L>Y;acx9s^wMHj4Z(B$lEu5OjB5`*J35N0pJio@S+0K_VJn&P-!)m7VDjVA?MYU~-WR2ThzvLf_ zTmPHj1xT(h5^c{blB-s+06+oq49QBcbl`zV}IFk)2PcqHmt!ntE+0Su@(tjS+W(z zG5M!w^684v`fqzuo%YrPFE$!|nfH%t9yyyPS_naF8HIF`wFq=Y`{L3`4(~8`c3c4)`31`>BeryOpINYRZ&pdQ2doR1(Z#h;T8B@HZxHrt-~FKHe!I(eXQuALaC3rgcMy3T8H4Pg6`+=x;4 zq(HQaQT-X(U?MN6?>5JmeUcAjba8_IoKqWV7!#@v=%63Rrg~}6t$vm@Sv6I?FhT2l zCay_$;PE2=$WspA_CneJT7I(5}&-f6iY9l>mIg`H}j#-v8pn{HjUDu-xt4a~N(e z%!mLMfp9PCg25BZdbCYl^Usbp!XX%oCa>hh?+^Q8Yl_f)qZ|sBznhb%z(teGf>KwP zS3b25BfIrInOpVKwD9sp z)$|PnRwrLGWZ7PwAU9SoQt|2t>Oi)b8z3z&8wPsG<9?I+NIM1w(|dk8r0-`g3y+uF z$WDB-%1X6H@4)Zdz3ZMH?#v=nzOjx6p!lIL<_On=oaVH#=cfb2t*>i-PN6YNI+3-z znugBR2m*aDM@yDi=wRU@ntl2bw!HnM$P$FQ18J=O)gfATv*NbdJ2e?rmBXQKC*~2= zL(BM+tDUL(A(l$t?RGG6iU|{<*25~0qE%WI$!6Pq4<~Y%*Q<-$FGlI)xlI-|O~H6C zz=qzPf(!Eeo9-I(4BnY>tcg#IO;|Mk`FBKh&-IMb3RGcG6uuYt0L2m3x!Pc6Cq^-e*&BALHHLNpmX@~pX#e$h@rFP_h0nM&!xn##U#VSJxIM8b;KWs5B~CYdB%??<9~eYIrd*0wqAL6eyQw zBlDgS4tG%jn{Xmyf?{!nEjO>}fv!r|mO0CY!3hs<;-~Z8s>bdsA-eSfn5=zPB`&H6 zSIS;0-C&Bo#y6QT*Q((o6yc&Cxp${mK8?v6kb)OOTdO*#GgvO;sQuJbe49Jlbx!F0 zPtH3bzXps}J3MUKKry}#lp!XGZn8Wi)p{}npT-wY$sB40_GV9*gIAJ{WWRY5@?m-; z(dSl7cmq-LW5M1q`_w3~{meFZl-@B%n9VS%la(x-u%jeQg;D-@=D&jk12LmMwIq+D zD6h?!pMB3IlI2xmxfyX-HpDSD6KJTkbld{Gi!{AcqrJrk&W;zgW8D!!&%7)9TO1WQ zx_v1AfhW8E&Iry7=-WNL^e_{${2+bwk&-Pi)m@lhiU4Ad3Zx= z^@>BSJl(@?jxm2LHb-z}dus3v4+v$4N0xwj<9D)}Bpl!a8X;(SAb!7wd9b{&V_=;2 zxg&&*_$_p1EwaK?Pq$+Mz5_!r-x8G`68^qAUbS>EM)Prb;=j>@nrXdT-TLT}?U;-? z043V=vZQrv&_T^Xu4lyOI&H0>R=16Xed=Fd(Kb*V3o;jxlcS4bI-L`$Np_NBs;wYK z@F!ogPqu(>DscKZV}OvDF>*#HJ*R&NPOn!7HrwrT>`Et~(av>L};g4>;DzUG1v zMKyL-)R^=v1z|gD=bO&?FyCC*aqN*e1cNraUEX-?T&cdsU3tut60(QWPsS%2aziX$ zb&GPOZZGyY$wp4G!Zt;=&Hx#=WBKDoSg+zYQPIA7FxPF917%uo)ET!CR}4ZbEQ8%? zH1X`Gbzv-*wto;O6{9X3Y`HSyILw1u29UPkf|M_9YV!_=@~osg@KUKQ=1RJ2Ii?;K=vGiwHRfm0LW$#As+?rZI;VH+&6I?dE-hDk4bmIW>MTXZ{EMCGhw@p@7 zb8?k-arqz!uLYB3wOs1Tt3E#a_rb4a#56A>c#V&X)NI~f4hQ48X>NX8%ng`AH*wYa ze8{?%Ker!?O|1U*p(62HjV-vSeFL9W-_96gOkUA^nv&tHGsosk@SBf9mMR@4CK0;O z7D^BTo~H6}1+=4DXIHM`mB9B0d4S4-=w3rM)Ago;-e9(C7GJhjsqf~h`eeZ?#*KS> zUZ4jqL^4uI(e_HoIi++=0*-n!48aOgqg7B2$^z)+f{{MjzrPKoKLpO@Pw`bBeW$tCI-LfWv$^yb0GTVZ=UcQZjHW3k9>P?0PDal*us@FV7 zEfuPzY5I9XTvNGVr;$cbj@3$01Eox3r_EOiNY7Gs{fw=~yF?2p%p~_6GR`JBr$ziZ zQR_>!#3P13DyYq2=f7a4`!^WFOH zc>-g=0`~~>VUD(iaLgO3*QxwQ?_2YI9mPl)KGvuG?U|04&fCKKw}SMD8feAJS9H#R z$hvLE${Q^wo)&DtAfEEKZ-x&c3ol$89XPpzD+e_Im@%hR0j6DU4a~m>m6>%W~ za^Fvnu2-`1N>C)5lpCR`%aaYx8?ed6Hw>Lxy-u{#6m^v?;);nKs|XiX{XVb^<=SUwXHvHa?TE8{JOLdb1CvPkp5f!_COnf``cPf7 zy#L-|D3rCs16j@%hB$Q@;%vIF;}i}p;g8)?E#oSp$*rW8I>Gjbz%*z(+;CTw2pGWT zg@nK1&=jEb5@>@rFid4Cx^X_C;WQx}d3Pn{ZCiUyNUf+CQiNnEVEwQkX5O3Js?ZuZ z32@eI>@+A#usg|)mWr5Q1*|7OjkKy;*5UB;kCow#N#iRz82Q4cyg`R^6y9@|;wg@m zD&!|7s@TL7KNzUtrycdD*4xhq`JoC*8>i&-xzw`3lK(P5`-K}jWG|Um_TcWo2h94c zsXsu%z5}nVZC~#qCdcE#x(HVo=Qb;J4-~HCMCivPqU3)xr^@P@Jei|0FaiW&`n|AX zl)%4o`PL*^(K-!AE7XPsM6X3MPIr^^PRN0D9|Y;U2~(We4r1vggyT})@PRMbZ&nWF zOdh+!3W%wS#vAR^O8de}JdZ&OW=^zD!;8Oe#KY#nan>qILy`LOMS*BJi)ESl<9Mv| z8cdQk**=%-Er-j>q_&S15TQsV?{_y5VjG*zE~H)5?wUmx!{3Hz9O0nHVXM=(8lrm# zTNOL`(rXI8ql);>7W<1ZGTR=wQ=7-Uij@LeLC=1rD-@%4cB($2V^hcUKN6606peSM z9x3Ho=mU|mkmD15Jc^QxhBC7qcp>i-bgB&6c2z#CsIHO?Ot${|&9mf?_8ABAt|$0e zgJaC=lQf|sRu?tU>6-BZJC$m)TF30PG7SxLJg_-Q`-)rGD>MmC;Qos~DzwNmYlB#Q z&5$8I8bu;D6trxBu*>tZ5!u}3i$kk~OccTD@YPGwt4|Nn(kfF1%2QCmxX56=P3a%k z8^uM}m=(#kAkxmbYtBK#)PsZ6VBv=C|8}l2ZpV*X%4O(UrLCAuXf#V$ZFJ{-=Q~g# zbB}F!J^JYQ0|O~U(vvf#%>_x<3zX3A)j!3ywQ~I8dGbMYAtG4x8+)0|*vW>vMY+f` z3K2}vA^Ue(6;VZd(NZ#W%QAqoMANX>`~&l72mU-!UQDiE4F;X1XbQXwdCEApKl9?@ z+JaDBE5Sp@^7^u(@ld63v)8s4C!0R1vFxxt@cXMW{oz-Z68#e^6{NpYD(}du0W@{K zNoS!KpD=wADW%Te?ASiHP~qXp3F>*JiYlPYi%6|l#Wo^bpFhPac@w!gCp|ukGcQXC zG#G1iX0c{QefbsM6OTzUvBAG5@a*L$0FWRhc!w$0#8Qt(UH~~z>VC3QkJxc zR*$sOPQ~M3m=vwm-sFYoN1hV#w>==($D67(YEYOS$&gfjcT&`QVyQBDw=i`q!oTt^ zYzygPaP2+)9G~a3PDC-zoKMPIf z<-%ikwDbX+ykfSlp4yWLkxJ`GzvYPne-UQfzFkQRTS5O+z9b%=jx>2>e>X^T5r#YmaKsddTi?3tzPAxp8BB%IB5~D{{-4rIGQvoQ7iE zKXGQkNbIVuZ6G6m%dJ(M3zRDCFv;LLq~$(?;HDIe4Zo|gplL6(MP@U?F<9lEQ^*SY z8wY*le2LQnlxZ|kKq^DvQ@8yDTIbKD-N7;&gy#6gaXB6OEhE`bTUsDE+a`I@pv+GR z4Mmk#UF!~ui%<4di>G}0G3`~Ipcy8;p=>FfFvBp8;kX@p6U#j4t`bf_v!>BadkTTT zIuGD72wH2kg!SXdxTU><{j1&&g|r8+Bv)F)FhiA#(I2T*O0B7K`zq_WK};J8hG_PN z3C(D15jhO)zpU4M^4S%N;5TPN<+hDHdo+{BB@3r{x#Z5~Dk_b^#@+6x1N7sk}2bMRg zdqr@1;MeJ_4eBv6uKgaQK)3ks@b%BJ4VtCVBy(OQ4twYm?(y9(`zOw@ty--j^ZAjS z@Z6LVn@TsKn)(i3U>e#@7ko}2p48zQLz;UubF+C5y+1LA*)g35l(+-!*0V-j>-PCC z&XFfY_fSDAu(LL#7fU)-_WfnqF7u8&^99{gpI`@D&6qF#^lwCH$>Vl3E%$)f+~dY9 zHAGaaJl--Z&{cI5@NR-~_3O~WeAVtWUEo3%Rr!d>?L}aEBa|NG_9CEUoC!`5#7{{t zp#w0#BbEFYOgKNq|2k#w*hPBH-k6q$WV-SqLmo=F@RO9@0@x=$o+7wm+xh@ z6o_cJNqjNm2!KWX#LJBFyOa7qb(>h}M|FF5;WDcR~6hbpV!|7n;n*WyN zLD>P{iyCfH`Xwz27PYc0m53*15_08>GenO}`y-#t9UTpOeZINNffmyM`Mq+%mVGj# zr0G{Rpa`}5XL$aDCkXD*`n7-^O(u*odqZ6GPE_+(?3eV_=wXcWo0~Hv^rg%n1}Gmj>XUp+JHg2>x3)H zv``vN$@5Ax?=fko&LzQ&*+<{_S9g83oKMK=TmY<5_2LaO!qgaLAwSwX}zE7 zt(bI%?#})=hu|*0`C(L+Iq}HhT9a^M>o1ObDNv1jNr|Hb5p^ahJ&_1p|J)9yXnA~i z5B(11H;1bfa%8%(519&q2}T7Edt=>DFn^!FyYDvsnDXXd5xldyh`&(OwhG3Q^SyT2 zE2>T(uLLuBFtS|k8dV9(`dZFN1;H{MuetQmI9a$(R!axo>B2N1X*fs#XK^Y#b}yJQ zQnI5gexV9K`fEo!QJ-W|fBT`hm3aw2%S%b_6t@AolG%SUYL1ZtPN;6`mv*c0BzImB zlc~levp+7Xt)Qm}IFD~iVwn$(*|XLzdI~#AY+-752%Sf_ooXVehY9Dcjd1aOw3CS0 z3ZP~g+zhK8tTb%O3K2iH%{r;`4EYr{KlK$X^q|JP{88s?)$vkBt&SODFIMLlwIc%! zfbPckmbu@UfDcHo^f%LmD_-ww3HHjwL|^_^8HxuYk`=vNmgKugjTbli^DnvmFT$bj z`+gE`o>3g>%x>yUh&zsER8?AU@_`b60=kivr-XXpeyJp~naNiK`A&%wffqh>fJL(o zI73gLO^|vCJK+RXE1uu&t=6PJ-v4rHn(5lEx~-I^O@2f>eg#-eZhh8=|GCDrZe0^8 z-R|`sa}(=VVDy7od_EIV_y+7$lBmwb9PN$2-K?|0^bAT54^=XxULsj^mDC|!mxQ9kX?~L&XpqTe5M)Ehhn|;z_ z|KQc5ZD^lP?H{sVV$9!L1*b;(OaQNRH=g%$oBAun$8A z;C>@CGaZ`}!b%{%@-Q!}0|pc-UbT|sI%$u6KCIR$m;1DuU0I2H)T+sgIqalQZL(3cF(BU_~j1qF(h;<`E3Dm_z_9V-NG3@p?1zzi;JRm z-ZG^W5D!YLiU(C0jQk-sER>7e^%9%FB~Uz-yiA9V@)KAIxiH3Eh18@GQnvq` zE=82LO5a|0A9RTuT=m9~kMu#MqCQMwy6X)CBGOL9*w6u!1otPQIyL2VR4ZQc7_3w4 z>UL3k{OE%wxBmMed!g`jEScU#U8APIsj19?Vd%v_Ue}@NGwLtyjI-H$bF*^~Cr`=C zBb}t#FO#3APZ=zfm$=a6X6tWjZpepftio}A08mA6bZt^Y?XGMR)+iIsMIb; z?t}407eW|McidIBzr|oux((vDi(ADQiJo~-{-9`-4eEoj5;Kfdon$e*Y)T~*YU-`I zmFo%zU8b3k|AC0-wGnF9{6@L?Q)$0&2A_8c|0rSH)?oYiC_{|Vs5dh-uLt`T$T zr#9~SqT6Y#Q}uO;m+A{#NS@LIQP0-wE&1j}QPiD5COFTQ%>qp}7`PZpgKNPb(wNZ9 z`%81~oTpl464l(f{>1LSt%oviz&YqpV642XOy6A_!GrXWWbQauGZh`@v%(YZ1ZxQ4 zR{s@F?ny)~uxsz)OF*Q8E9a06a*Xt(P2?%+uDZGK_rk9@av^IjR6kUZ@#9WbcaSS! z&gn_tebJPjlxuU6IxnU1g`ar!VhNhFlEr1PtDKahfdVR zzNbO)9_>G{zYsId9L766c8z2!o*ki7iv{f(95<%LN)_F$>Wwq!VJ|2GBPtxf(K61k zd_M;lRzJp=7KUyq`jkBBidyG58VWN2`56BWrUyqWkj=-&0ds;efh{@RpX{}CCRr!z zHBL3iV&Z`^iX)S+cho~tbk($3Gz1ljie}QxKj@XrRQKDr%jNMx=sr((os0xGNLvL$ zNI6tcJsgoTCP|B( z#Ma7sbc~CH=?&4kl|=sLW<^FUO$=9+#h53@#5a{JMo0wDzP>X!b0b6{054q{=N;wahwabkYtEg=E8*&*E!74TDt$=V^?e@C3hzxbGFf$c5tN3-;DtqH)n>o~hv9oLPsXht2egMLw1)OVAE0+x zNp%=%TJ$$aE#9gcXf2d9sO%vm!%?Sw_BQ%vp5|W?S@U^Ksk5OS2L%V#zd zwl=JK+)3!0_{99h=(9VYBlbEkwy4dLIgpkSSIqR(;*(v6DpD>~tR`)M8fPiA5bN2Y z%`{cGvgVzB`+|b~l7j)&n&;ozla*99{BvtSHgN+ns8~muzn-?3k66A#mp0)4tLZEl z+6!3iya(&EM4-5r9vyB3POOMxP#KyjDi?ryy~J?H*{?0%Wqot=5- z89TaMO1&acWjdlgfB7abJAC@i>Qn9VO)2XxG1rfuUKdnsS|a2Ne~8E_bJ(=B$4=;b z=j-cdeJTSb4>TIIhPf`ZS?6pxes|umvd(4iEf?&nAq9EtF5cjHj7ZBa(ubKgC1S?r zev6}VW9NAwGiY!OpuFo_*t3HQx1CeL-uElAg8S+TOtTc=2wsiG;q>2=#hG?dos=A3a zHWjVb^<+|YWaM7ETynKE@EqFr16h~`(&Q#>2pbN}$Ck+JJzd9lLikpBR7!3qNArpD z*sBoV@>5NnAj6#S)EV&GdY0$=-_UO)UXO)^_NeI%Jzj~AB|YnL>m6OI#lg-PiTg#- zmyyY{A7VTBs%4W6S9bUybySIecCcm%yfCka2>Sg{IGemhr(6RAe1@dTr>gU50) zoGhcy$653D1+tnt{A0`X5?Qx)V^iKH#axn3mrBh*n_+d;1q-77J2-jc@9zzGZ(ky& zyvYHTSyRAXmbyyL1TukSM(==$YHS$8c&~GpzEN$O1h1O!Je!RI{oKTtJe#eQOwrxF z4g&{}al$HC3As|8Hkk8yn+i?AU-jJQ(8y?G2&birG8qoQ8GNDu{2JBYf=3oM;$1mL zbiijT92ZfCW`rq2tG_D1N;cPUMgX`6IG0_!8V6f)Xt$_X@t`PR`cw}PrCLuk@=fPY zpQ$g$^k5a-?pkW+RYyvDC@aSqMRRI!Srt2R4u)48h2J$340s!27n-5>gNuckswsOj zv-$UYXTIx>*0SGy)$(CCPI~tqCLqzJ*CFc<$^esXi8aSIo#wanZ5&1$m7tCM5V~Gm zlS9`+X;_DRwvbd&FU&_WECaZ=`rJAO5Xlt68A87SaH1KHBvX+ny&> z_1P_49kPXCrTm154Xtb>%z4pdSItN>i?C4wx%S13o)w8#-qe3Sd(9LfnzQa>exg+h5}@VCqj^;ha9#)7)aCUsMb1%Slh)$T#f=W=V3R7d@sV z4l2hZoGp8AyXi^G1q;k>-v53NoS>(sIi4+xQaO`Dt&n6I# zC0r588_<)o;b<>8gHpRe219r2-mPbPw|M*&ec3Mde$=VXS^n#-r>{SG4_D2ZRu0Af zK4#d@8*Gx3P|r=OvoMMKGM%Q!`2_mdJno&3x&!xv?Bx z#AVRO7yO!+I`}9wG)lv{2b>989qkNS*jatK5AOB6T#K&2%|iv=(K$2v>laN!s7A<+ z*+f=l5VCH~82iKDMUW(}&EA-9ce_BujAf&9a}0f(=%Q)Z^?_Rr+U{<@J%5|cNDJ|k zy$ELj_?xY4>^vNhp7_J6bYu1W+7Btl!!BAF~Rzg08S zt@-o3|3Jws@xXIb{(@r+IM1Q?0#09%#9jrKzT^=!5cTA1X#NZ^qI=(~>b*yUDrt%O zYHNh%DVcf||0ALKqCJ4B?Ylu|*JqW|j*@)pckRhPc52CA(L6>5X#0^asd7r%a7NG} z6b^KfN`pgRVC&$Gn9FGGVT|x^x7N$Enn#8pPes8X_^-H+0!22bha5_gYNfM{ays5W zs@}&eu+CUMQK=fb%Pq=CDo_?Hokdx8<0rcIiYH3x@LkLL4*APgGx$1E43Xe6DRjSR zO8=3w>_)F976kVq{dc)ArS0ldV*T_2!va!d39vVido59@Bg!Ir-om?dCVzJ9oa zWK6;0kkxv#JI&YKdEMtE{ky%y!TkLr{yd1_vgQIEy7wjYrBbR>p@f4ZoWtu#nMEm; zXdCnu9mIwq0czl3J8lK(f_w{%C+;7cmP?wyp&5%|WUc-fk@!mUaWkg9#M);c7jk1d zK8ncc9L@J(%jY5RiB2_$YuKgN8huM+;MX2jQd!&*AQ>(JP0A@Oq}=l60-7f#?ukQA z;4(MBt>LaDg>zh=2wm9bR5}&Kee;wbfkW8m7F;#3kAT2bYp1N5zWC&g11i;yJE#?O z6P{M33_~}b0d4r@gh|(6`TH*QDmpH))YyY~1)9++G-{oH6@T@Kxfw8>5QkbvhYeB9 zpV|*B0gU^?0fZW3nZ!0vLLGO>J2U$54_L6m@r*_@N@ypjJe1>>!v;XLRwZwGd;uKo zxDj)pGOY}+Dbpy!)ZPK`To-@^c*wB-K8$Ovuc8;WD^a9}S=6RQU#Kb+9%bgA%l^%P z;M`+_R_E_hJdM6ICNI8KCqlKy;G$SFM{@GWi;?N@UX?wDNk+M!mV(6t zs|4K3SC0G3Y<=66bQbmY6InzpE>t=jdL7p ze;>PfOnuaM_74I`18PQ>Z=3!rMLsz|&lKkC^NuezNuO;io$Y|;;yqFPBg&itJ8^k-A zjfVIH1xWDTSG)s^F`M1 z$U*+$?^C{`mIfCPYlSo#GU*cG%W{Ot$=#hSM2rfFp=}`E@${i*|Cnb?>qrV94x6V$ za;JUQkY#s@D;hbi5Un~&CB5^uqiG)8?@R7Au2=>brn~Iu{bO>YI|GYrk2k&L{6$GM zSI7q2hz(H5TZ-X_Z*>fVpE3XFm}NyVW9!J3I5;1lE>;s}xn$HMb)dz9J!15ppQ*cI z9dbA{9`atD<-NO*dM?77;l%cuOle2?@c!SpoD~oZb=K5!{b9^jZtuhQoE5(~<;U|f zFI&m?TA%W?ss{N=e<9p?T;7xUyZ0u$96$QW&m(c=jb^)?8RI>)w+VI+%G_HP&E*V^ zUv=q=%;B>E8YmOPa4}JJaY-R|fr|Jw{KqyxELvwn_q7&+{%Y_B@`O;mr~!2Aw8 zf8*Mp=iP053@|~g7af)rZar>ASsCVDtM$4ZTc!2cLe)*;$5T>6j`fQHQ1JNYl8R93 zDy@z zr@4&#EOTVKjdrtg0zRiP;vPnpmqLG_4EI{+qyGESj%J1Z0R1zis!#eMSTm7OH@`p3 ze4f_y$4TZsAlM}(SyNZx@($bIzZ#(TxGL_eb*+y^-4r-W&=KfQQ(_iK`VCrCoHs(S zQi3g27(|k438`vI{$mW}kT#`}_?DK~7fHc&>xK#}qvKSMq=n2#Z$Ye-5BlMoj?F{7 zpLmBxy7QE6sXi6n5+&=X78{ann8<{ShlOh~^ObM++2P(W`6SX^w z=CG+p9adBevf?$_VgeDP%bnbJPHUUQZL;8*egibMUQ9N=uAKP?Pnpp^dz;@j>Cdj5rR4zodlLPq z1AUx}5Ri`8+@b6n{Rd)uiD)Y!Puf(q62nFYh%HwfIjvQ){MYh)I+@BndDQGn2H%ep z)xBz^UA|3p}hMT_*Ka27c zQv=$Q1>pkZyFkW7FO(OOjaAU|IT=+!2=CaEAR}d#w)6Mi{Za!bVLkj(TwT8>Z^Ot- zjhLn?<>wVDfZqout_JK!rZ}L=q|oFn#&Jtm%6Gx0%8IWFw;yiL6xplQ^UD=&i=)9X z+HhAqO}@FF7QuUdH+~wjS|fT2PdJPzJ&4Izep!RRI~uH3vxK!?J_{7gGP+FMQ8CM9 z=NV{e&Gm+#U6xq57nw-Ea9>mkx2!-|o?gAvhbN2GB+TblD4(duTx=WTm6!7WI7uYC zbk5C))!>V-Z!GHh1n#KSz`qeOyLkmT#82yu$uIVoRKNK)-oAd|6G;7H>dkD$TW z6jB+P@Yj_Y)#Zjs%O&UGGv8udT0b1(Tg)O*+FaF~)F$=Rpmk|82}K^e!+w0fdn;wt zt0thSX`MZ9p5m=coEF>#$4&={F%u5A)rl8zo}JAo{i@Kb(rA9lpc>`Mn)dMIKNH7V z&-T%-q@X2>6aF^6$!q=yK`n(lptd(p-g7_du`^Q5O?FX2y7Vbhf+lATujPkui}0rB z@0{<7adzg#6GxU4r)9clMGK*)s}yuLpNyxs2lP+OF+2r5MpU!~FTiw&N^l+D3Fs+! zrlhpxt3DpXPqVu8YVByjkai6QMAsGF;BUkGq8d_ML4eM6x;A6 zjj+q?fPqN7ls4Z?rcU-xS{t_`18#c4f1n$EN11)0pZ?5~JNC=Qt-e4hFvQKuCN?ZyfCMhjb_lfGZF=rix_o1ke@R zL^b0c*G!tIE6pv@AgN1l*O=fK)SV4y)?6xXbH$pcgwAG`e;6}4tygYTsi_d?k}au%x;_!5a$Vl-;pp>=)oc*dJ<$dxg1ppHWk43T;N)SE(`VM2P=5 z{#<0mjiBBF0l_0;8qhw!9{nY2(Y3GwXJK_ED!aK6XIT~tw`hkC=t<{U9vo?t>}Nsa z8IPyCVCBE}2~~dRZJ^T$;;74IGu9C{OeZ{Oo_CfzbABTJnpL`K~NFY({ zh(v-(1x5*5<=3Wbz;Z|vJ}FK~uO1fOaPC7+3eFO2m5c?>)Lig-9T^gGgZ%QH@$4nF zVxH;8t{mm#27Dv8?B;aFp-1Da+S^|Wr4z)SJAyS4WqPVipKXd$uwLWEe!P%dEPkfE z8jY7q3iNa+*dLLA=6%ylrVDgCR_A8=*sVR@rX!7Yz14mIKSL6pEk`I}%0$m`0Lc}J z&GRGi%7*E8bsvlIn|tEmgu=FPLy3)IhMS? zmQOIB7Cv{W%%a%6^?3=fY#_bJPuN}mWYI(S9ltE>QGkN`VsJJ6U6F>pPBA2V8zXu9 ziX`Z4qPa+O#-O{v-2_>C*sqAIEb1J2X*Ti6ooJT#H=Q8>BGj#wc)inMPw6@QUb!N9 zlaJIxhw*zU(U&@K7{Ug_MwK7@QWJXYVx)O++6=_p#FK7N{;s=YE}M!UofG0N#Z5_w zWE{S`jurN%+Yn6Cybd?AW<6}z2?di^7r4w+qnYJ#o{}d)dl;|ZMXHz#x)g_>%HHr1 zZxFY|DBqfC{`4ADxIU`rp2BUpPOCHOo&ALYwf^oL!y+|kNZg7r>XnbzYSF0jj?kQf$PDmpbUxzGxKS)cX44_+e zA{FGeSvw=Z?c{(ZO-|8t{Hjvx3mnk;C{4~PdE@;Ip3oex$d9mf3^p@mPl~v#9oY?FYHZR zklvgprP#I!BouDxbU+#^o7|6(Ju^ix;l01&@Zd*Ug&sMMoI|_K2?&%Ll__iRhx8-( zG$wY>Q~KH<2K?eZ5#$lD&-BMNqLTScH|Hvtm6&_BQpu_cGL^3_s$k%bHIQTIK04NZ zyhJXj>T~crlEWq&zMFc$0smOyO;#H$kmLJ3aqB}w%WA-y@^X`er1c`}Tzc|`Xye8u zEbH7v>ImB|6V0b5WBd#E;fN=yd-!DNN)wfDlSs+7C7}U^;M(bFR2AkI3Jiml-I}sM zkBT+l%1SkGwal2D~KRE-_ z_#*WSB&rLGi3I#6g<9-gZ}eQN$=@N{|7<^96K;JbWSs+Gy+R)iUD1lOoa24z<&_ETXBhOmriJjsK=e`FC%*2c~WiJjdsD2y&Xq`7R(!tr!KbpeWSIKW|37Kvd81&^-7zo{a9Fhlr+DWsw0Eqr8>YsQ0Gbr1c=6Q`|%yQ@SB9 z%dC3kyI6r@{6aLpQ9I2z>^=LCY@=o(lb^g&9$vn7VLrq|CF1yJLwf+5zuD^tAKo~d zvwynA&PC@b!tsKvF+Z*Fgk6*qPRHhZ0!@kKbgWstWuE!=O*ZFac7fPlr}9Z#fQajnQ_7n7$GgHv&g_ zZv+>}&`qLi{(Rq|yUgv6`7la#vdPyE#nrWUVVHuHK0Yt9}<~&Sevtp+nE9LhXkr}AiyHAdp z176%1%Tl9US;|KXZ*ZNBG67{2KsfD5^#qPM4KcNYvl@mb0^d*gYL+|=_w|m}@3NI^ zwra3Q&KVKxIkC?5BHE7%Z_#c}mTNk|z451BO;PEEEWq~4M&CQ5uVd=HFZC8py-Igd z?3Xm|g4F(Of3*pEuF#Z(YDsc)?p<&%vZY$5rvC2OTMSP%8zT7^B6mI^Z@M@8c<-up z&0DGsag6MsNMSCbJBw%xTV<_jwzyGC@6Z6=Vm|XW1Hbd=SkB?L2r0+%xVJ0DTQAaU z4$p%ZVRb`lTepq@J!^)048@GuD5{Uz_mvm7#z1#ko>_o{Z0Kt2gsB5z zW4ZqrWG_|(1Q#Hpl3$;n6L$S7m_hVvUwcc%I%hcf(MGEROQWJELiyHylMU6hI*z|j z5?t}dn0l0uon?aBe)FY&9nKdjCBUxGZ1r$_uK?ATV0jiHo1VI8{=DxXU&hsbmg`WF zTyWxatX$4jVJsU!(M?ef5b|ckGto9H#xbCMc{1Ak+)1gSx})?1p=B9hV`LE+E;p-D zVDQT^y5W5=D*-sU$t|+ww>5C9|L)_r*JZTR`ywVYkLhB+P=L9W)<>K<#4e=7F`V`Y zwO2B9D(_3NuH^lqVp{+sUaO;lyE8J{q;6Cih#74Us;K)YpYo_(!MFrAc)G?>0LYnx zztKIGNZvs;W^=-V|4H{oYG-|+E(Uk!`=*Ig7lB_(|qIMheWOe-^6*1Gy=W-5tn{S6w(5j4Z{-1Ip4L zcc%Po$?zC0|6I@#->9oQ<&E~fgO4B;MmQY>k0eHd-#*h`{GM?`7tR?~hf`+9b9lip z$bc(GP=qQ<4~<9c%gI-fPDiRDK^p7w#2PKQ(VUY0HU%V@J4_=!uc4-^p;0+oJd@k4gD}J$|M{M-2-jA|Ot3g-K zAw9Fd7R(}fYHwuTa7WiP?>|B6d8nV?J}nsxjlhZVf0%BAqbmBlF^SY=cb*{>V>wu- zLV#&mqh%sEUaf79Y*?I7{^~h)$Pt9UNmUOa>k-dSdhY|5RG5t9| z9;)Z}4#fCjUXU&-?SIB~^+p5*2p}u?OJ3x!Vg6E`mq2-0*Q4xBe}|#c;+<>xeO8=i zvK6H~rKDKHIYE7=MMHYEh52KQhRl2GMQ^G3;AUI@ z1OI2E+FzT;fBZfedW6Ng@1Q?N(hP)*AEXJyB7s?Tf>wV$xga(fgrrecQ+=JTDJEj9 zq11JqdJigcK+@V>y}P#%mob^H&!23+?JU>a-xWLa>pK$`Go&0MXcYx+Y)nqnA72A5 zs91NiI=wPSmRGp>>s;oK7i`cVa=EmR1bdiyK%wPt$Z0%uh><$kp$a&d?}wE_kpuBF zzqZB!Ea)Nk;Q-O^eqSj&jp*}f^2i_MYc6!+v)KEG?wfQp_zC`^Msgx!o{-|27+I`pV?WM5F0oe||iM(b_}O~!P6IY}5U z{H4~NpnB$|A0=>@vxMjFV@V9!Mh$WfDd)u!2i*;{s1;RlI@H!-s31i->PK)EL7O)_9wTeX9lS zk=+3JW=lX`%q2%z6w1!q=@>cVoQ`BZAr)h@oI&y@9qy^D9R+E_~^GkqdWDoI7bsBR1 zL2;y8)XOwLQmVA0WlZUH%_X!c7inFl^yQMLptMZWcj%~56poso78W3FggZJsckSMMF>Ngj+?W z{TojX`o)UXmBAth=@C!lJ}houm)0)Uihln~bcx?>ot7anbh2 zB0XavGNvHy^xP=N5r503dDv#5@nO&qcVgRK^jF+=>S_gExkzWI9>cc2{|)4gQk!UF??JMt zGB5DlHC0hQD&P#jGN>)L)E;~stXbJM6X^izf~) zM=UyLW^Voph6sOBPTDIRM@FBLs%(NU2rMe!EQ;aB*h>_)b2u7^XpH^at{XWvqW%HK;#k`Kr7<36`4Supdt~eDhmXuh_x)b7j#7SN$*6gvEYvJaf2D zy&CX)BE4e@%y(H?@fL_uenj0wis@O)({96pwr;?#l&47kDQn5YZ*_qA^yNjwf;H=j zbQ3BKj9yZUpxb|Xe7m*NUS>J{fw zZ?Pf#1ZY5=Pu_oPSMz5p^xoU$=#r(B3r!r%EHvUT*@}!!da&c4B!|_XZ$1BnYqwBr zj5XV*PH^w>E;O5s4_7d>={VI0ne}Mb{X_8O#%j{QQnXAZN$HSPXuvNgpRP=0m&S zP~CZwzZ#QA8LHoO2z(FS&OS$jEQV1sFLN`^dYf2ST`7Li_RuFy{ndU0%|Qezo1z*W zYWu`j<~y@1r<1I@3X~qRSB90ENT4C7T)Fo5p06dZNtIl%2h;FPc{L|Vwfsm+%p-yZ z<*YRW%}&u8N^^`c-)~z*diWnFZD1DPMWr%h6EKS^q6REf;|osq93mRrblZy8LQ52R5Y9@#ltT{@1`(I^YnJJ|Bo(P0@FCbah-yn_ zk{5gzG>-Y8-U%dsvWIpG++wjdTIBXQUSEevV_&$_c^_hZ_LG(U# z)z@1Sl8^@nFbj2lsn3(w)gyEcKzRL*Q!lB?>%-dkkpJsJ5|)<|?t|N=Sszj;h&{2+ zJXAU%bV(hLqj9V4wXS`gg0o+>CpP_%zQGypkJA^P>*|mCNqOb@ANA;2gI!r8*A-=m zYuR;J)+8`Nx~2Aagudmj36=|V9}{T_!-u^R4J1O`?D0)_`5$|n=A=R1tj@MF_2)E zB7*=V&qdW>QiqiN-RQcC&H4dTHh^In4G)+!RV#YTH;SHXNNt$flI#FU8FA3qj_(^4 zf09;zW=MP$to!(dOE1pix6Ps;-9=kg?pWTYqx#ji)gnt*0qt~yKany_xBM^ z5&w{5s|>cc4!rm98w0NT)yOq+JMyY10o=+U#)gOoj|t=cUK1P*&IOq*aP-h!(X1?H zW|(G!{JC`mID+Vl5v%IimH?U^ofGm2Tt}yVg?z3N*ED0$rqQPb50qqy&pFT82K%2w zmEdJ;>xiU#WD*Oywn?7Ee2%h}H8)nL=UtE!NJ(38N72qv2xW($Zr16a zArsL!b9hn3{IFVKuImm9tw1m)ptKW-GTs}6XNZ#uU&qK-AYa1ZBlf&&9_mPv;^?gQ zLeurxH5CG!-dc74P%gmu6&iIC=`Ig|TmO$YLmjnj^DMaEDN#BdAz zm3=?6!JewcMl}a;(2CraFM_J%QSx8f%&^BUR96ca-YHU5(5kkmQMizDxV9>Hc=<4QQDQgQC#|70zcQ$)uoMj@xu4kY z3hg1ten_j*kHh_a+V@nteXWaSt-<@Z3XH_a9-ol(REYpy)gEm$`7cqt5wr;2LhqY_ zh(#G|oQRRL<)A|!8LtH|q@?wVB9R)}N9h39{b@3il9{NxP?ro*3L^wh#1B7a-#xDtO*6X=9LtEe@|7WBkf0xEc2yXNn489K=2R0-v;--Xk;`{TKU z*N-=bp5BkzYA`4|k#rer@$g|(Nj(>teha^2`c5rXQh}WTt}Mp^W}6 zAPNd`e*^%~xhaDeU_ZgH1eHSGebpv`4#B|c5^^b#V^~-~6cyKv`^tkOX~mT0lV z%Ll2YTv3}MHp8laqodr02_Q`hq58O#C13LXPO6wf!@CM7yc9S<8j?ut3Wb2}X7~<@WfZ zwCwR)<`-B*7+)#pXpTlhO`6UchJ_&3;PZZYP>%X?zKk`quic1B@c*Ia zZPLCD+PD!F>_KKoUG{-_bBv});kP%5AvP^fsJY_H7S~%JmrgivA^h?y5^liaH4}2e zGES5XuU)lRD#Q8u=QXSPe^{|!P|){qs6JFcLWl;5ac@F4lLNU3Qo5QIrmAj%QT5^> zIYbv|A6R!W!>zuk0w)UnV(=v~C+D{Uxl6V>1+SxZvsW7;ONA}tDmikToIS387D;(}M{ zQX8}z6xM!z+Ov8B>6Cp5uYZm<0fjmb3O6t&7ct({;QV}3D!JF>GSxIR(lTCZlpjVnfn)FM_RC0HlU7&v}{bIN)45h$`ECyQ1D0+x9 z6B^Ya__Lyt$&e@<@Bj8jF~r^+tE06Iqm^AWC1D?U@J2YDR53QDyH}^&89EJQ>lt5VMNux1Dk3d?J^mvgB6IDHkzE<#4 zbwSLDoCMt?%O(OqVe03=J%LU*4k=KB;57X=Sf&gKCjdsDjnx&k6y1^~uvS&Gp6;rQ z%PnS%UHd)bPH9qwiAsE()h0sE-Q}y1uej}WA!e{Z=m<(rY*nN`YBlmjX&~)4vg)s@ zt~HL~SrTl@`Er%Tv$J_|4Fk%>H=)lr|Gz@rh&uYhPkfvla6}6k38RNJ9OBj+8r?YE zb3-c4LJ8{csxbLC<$!U8%|$d+I!GYRD7-Dq0#YC`^6w6Mo%8~&{ss8*Wg6>mxZL+w0C!jX&ssW2c|&Mhws79!-SH?FM(I_rUlFzF_Yg3`cw zv}*ny!-1J(&o^eB+`=JkVA%(2v{K)cD``;BH#Z_BR~GeuUZSvQ8Cp{^<~R7$j()b_ zhndO>N(Neqwu=z6sR$w_JQW5;;6!S8Hk?62=s0imVhVEz4THw@F6QS3UK#>qX0E^* zQhnZ2Y?|hzYCkqw@CP)`OENO`-SIv@`cWT(Xej73P{o55o#^}EXFODbl`bCC7%+35 zJfX5BX~AZ?s$`g@HGeEbzTycitImrs?)T+?gEZ#J`4~U0RZ=#)G_^D|)$s-qPtu{r zaDw!Eb*Ia?aa)3nW7uR_k8oQME!8dcxejGyCvaE_U!|jr>FmHVKC(3M2QsLJ*boS< zeGe6IL0QqR?;z(fgjpX5Y@RRvQPr~2G(y{r zC&krch)eh{9JyaW;{ZgaS$F~N6lOSkz`fIqtk8)3EEqK6=FBZNIqT)7wis%Txsm`P zB>D~Iw8|VOK}}nFkh&?f8gd{ldq4tQ8ObJ4vDBBrPQeL~@(mc7jhRDA)KOBZ4uvS? z3Sd>Y7;ye;CX4oh(18%&3gB}z6G5N$62-;G>~M9QXf1f~dCwL2Sw zW40w-VpF<>kJ~z$qU!rm-LE1A3qr;{iUgFnV%5Mq(2)U{T9s0iLoYI5-Gy*7>wD+!2kPswfH~ zz;9TjiU2AJ)e3AP6G}cZ?e)g<6V+DNzdL zoCA51+L*UL%&Bd41YKRB?W2{+UK0TLlc!Vgbg*f>abhFOti&N7m~0tBX)9+00wGHA z^9#ICjs)YBHwAl{Pg8*(FjvlpNiK>LvM2X3do?1@8-)jr5!>qnNni!b{mp-8y!5-n z{z17wj2S)z#|SL41(D;lBhj5$3u_TMt*Xhn;NPVaE(`Xzmm06Xs=^NA3R^TSG%*+~ zj$~piE<{_J84i#5Lok+5vSSCm(AXt(-UE1|w7y>M$^rAGT+j^jrN@MV<3BVG=h0LK z*j6AY4n%OGYXs>1Wo~=HMOsw;yY3U0Y&1zQNfCipW#TWnnaWLyr~75BiLy~qI03MuC{zH`LEU5VLb<3p=P zjLo>n`$oG?pu<>=lM_nx?{7u^&)gxz9{oIID7q7Q5Bkg)3u0prT0y8nOlu)dKx~T& zwV9`9R8EIi{{BFsswQIvQ^wf2X0c}26b?osK_*aUDK?o0XY4Cx{sY-UzmgxUozj?) zJf|)81)=QpB3#!mj_jaa$U9i&p)!`uM1%K8-)M#p6aCaHOm{f)#8JkyOfOeYLc4@z z{)>r+&TYz(&F!K@q=G*~)G`9tr3a^QKN(jBJ@yq8N~GP-{=J7wDcAL+%8{^ZTAbIz zFeyIc`0sfk)W3(*=cJmxJJ9zj$gWRIb#Q#PE7BI8$f!dNvpy4B3aj@Gy6c2g2qB?XJ)PcQ8tFYQP$q HFTwu@rHlVn literal 0 HcmV?d00001 diff --git a/docs/api.rst b/docs/api.rst index 4f2bcab21e..a3c8bf0a8c 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -1,6 +1,3 @@ -API Reference -============= - mmaction.apis ------------- .. automodule:: mmaction.apis diff --git a/docs/conf.py b/docs/conf.py index 1bf9f44d53..5faa526c6e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -15,6 +15,8 @@ import subprocess import sys +import pytorch_sphinx_theme + sys.path.insert(0, os.path.abspath('..')) # -- Project information ----------------------------------------------------- @@ -41,12 +43,15 @@ def get_version(): # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode', - 'recommonmark', 'sphinx_markdown_tables' + 'sphinx_markdown_tables', 'sphinx_copybutton', 'myst_parser' ] # numpy and torch are required autodoc_mock_imports = ['mmaction.version', 'PIL'] +copybutton_prompt_text = r'>>> |\.\.\. ' +copybutton_prompt_is_regexp = True + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -61,17 +66,118 @@ def get_version(): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = 'pytorch_sphinx_theme' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = [] -language = 'en' +html_theme_path = [pytorch_sphinx_theme.get_html_theme_path()] +html_theme_options = { + # 'logo_url': 'https://mmocr.readthedocs.io/en/latest/', + 'menu': [ + { + 'name': + 'Tutorial', + 'url': + 'https://colab.research.google.com/github/' + 'open-mmlab/mmocr/blob/main/demo/MMOCR_Tutorial.ipynb' + }, + { + 'name': 'GitHub', + 'url': 'https://github.com/open-mmlab/mmocr' + }, + { + 'name': + 'Upstream', + 'children': [ + { + 'name': 'MMCV', + 'url': 'https://github.com/open-mmlab/mmcv', + 'description': 'Foundational library for computer vision' + }, + { + 'name': + 'MMClassification', + 'url': + 'https://github.com/open-mmlab/mmclassification', + 'description': + 'Open source image classification toolbox based on PyTorch' + }, + { + 'name': 'MMDetection', + 'url': 'https://github.com/open-mmlab/mmdetection', + 'description': 'Object detection toolbox and benchmark' + }, + ] + }, + { + 'name': + 'Projects', + 'children': [ + { + 'name': 'MMAction2', + 'url': 'https://github.com/open-mmlab/mmaction2', + }, + { + 'name': 'MMClassification', + 'url': 'https://github.com/open-mmlab/mmclassification', + }, + { + 'name': 'MMSegmentation', + 'url': 'https://github.com/open-mmlab/mmsegmentation', + }, + { + 'name': 'MMDetection3D', + 'url': 'https://github.com/open-mmlab/mmdetection3d', + }, + { + 'name': 'MMEditing', + 'url': 'https://github.com/open-mmlab/mmediting', + }, + { + 'name': 'MMDetection3D', + 'url': 'https://github.com/open-mmlab/mmdetection3d', + }, + { + 'name': 'MMPose', + 'url': 'https://github.com/open-mmlab/mmpose', + }, + { + 'name': 'MMTracking', + 'url': 'https://github.com/open-mmlab/mmtracking', + }, + { + 'name': 'MMGeneration', + 'url': 'https://github.com/open-mmlab/mmgeneration', + }, + ] + }, + { + 'name': + 'OpenMMLab', + 'children': [ + { + 'name': 'Homepage', + 'url': 'https://openmmlab.com/' + }, + { + 'name': 'GitHub', + 'url': 'https://github.com/open-mmlab/' + }, + ] + }, + ] +} +language = 'en' master_doc = 'index' +html_static_path = ['_static'] +html_css_files = ['css/readthedocs.css'] + +myst_enable_extensions = ['colon_fence'] + def builder_inited_handler(app): subprocess.run(['./merge_docs.sh']) diff --git a/docs/getting_started.md b/docs/getting_started.md index 7fab90948d..990184d39e 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -5,21 +5,22 @@ For installation instructions, please see [install.md](install.md). -- [Datasets](#datasets) -- [Inference with Pre-Trained Models](#inference-with-pre-trained-models) - - [Test a dataset](#test-a-dataset) - - [High-level APIs for testing a video and rawframes](#high-level-apis-for-testing-a-video-and-rawframes) -- [Build a Model](#build-a-model) - - [Build a model with basic components](#build-a-model-with-basic-components) - - [Write a new model](#write-a-new-model) -- [Train a Model](#train-a-model) - - [Iteration pipeline](#iteration-pipeline) - - [Training setting](#training-setting) - - [Train with a single GPU](#train-with-a-single-gpu) - - [Train with multiple GPUs](#train-with-multiple-gpus) - - [Train with multiple machines](#train-with-multiple-machines) - - [Launch multiple jobs on a single machine](#launch-multiple-jobs-on-a-single-machine) -- [Tutorials](#tutorials) +- [Getting Started](#getting-started) + - [Datasets](#datasets) + - [Inference with Pre-Trained Models](#inference-with-pre-trained-models) + - [Test a dataset](#test-a-dataset) + - [High-level APIs for testing a video and rawframes](#high-level-apis-for-testing-a-video-and-rawframes) + - [Build a Model](#build-a-model) + - [Build a model with basic components](#build-a-model-with-basic-components) + - [Write a new model](#write-a-new-model) + - [Train a Model](#train-a-model) + - [Iteration pipeline](#iteration-pipeline) + - [Training setting](#training-setting) + - [Train with a single GPU](#train-with-a-single-gpu) + - [Train with multiple GPUs](#train-with-multiple-gpus) + - [Train with multiple machines](#train-with-multiple-machines) + - [Launch multiple jobs on a single machine](#launch-multiple-jobs-on-a-single-machine) + - [Tutorials](#tutorials) @@ -214,7 +215,8 @@ for result in results: print(f'{result[0]}: ', result[1]) ``` -**Note**: We define `data_prefix` in config files and set it None as default for our provided inference configs. +:::{note} +We define `data_prefix` in config files and set it None as default for our provided inference configs. If the `data_prefix` is not None, the path for the video file (or rawframe directory) to get will be `data_prefix/video`. Here, the `video` is the param in the demo scripts above. This detail can be found in `rawframe_dataset.py` and `video_dataset.py`. For example, @@ -226,6 +228,8 @@ This detail can be found in `rawframe_dataset.py` and `video_dataset.py`. For ex - When rawframes path is `VIDEO_NAME/img_xxxxx.jpg`, and `data_prefix` is None in the config file, the param `video` should be `VIDEO_NAME`. - When passing a url instead of a local video file, you need to use OpenCV as the video decoding backend. +::: + A notebook demo can be found in [demo/demo.ipynb](/demo/demo.ipynb) ## Build a Model diff --git a/docs/install.md b/docs/install.md index aa051821dc..40fd91c4ee 100644 --- a/docs/install.md +++ b/docs/install.md @@ -4,14 +4,15 @@ We provide some tips for MMAction2 installation in this file. -- [Requirements](#requirements) -- [Prepare Environment](#prepare-environment) -- [Install MMAction2](#install-mmaction2) -- [Install with CPU only](#install-with-cpu-only) -- [Another option: Docker Image](#another-option--docker-image) -- [A from-scratch setup script](#a-from-scratch-setup-script) -- [Developing with multiple MMAction2 versions](#developing-with-multiple-mmaction2-versions) -- [Verification](#verification) +- [Installation](#installation) + - [Requirements](#requirements) + - [Prepare environment](#prepare-environment) + - [Install MMAction2](#install-mmaction2) + - [Install with CPU only](#install-with-cpu-only) + - [Another option: Docker Image](#another-option-docker-image) + - [A from-scratch setup script](#a-from-scratch-setup-script) + - [Developing with multiple MMAction2 versions](#developing-with-multiple-mmaction2-versions) + - [Verification](#verification) @@ -29,10 +30,10 @@ We provide some tips for MMAction2 installation in this file. - [PyAV](https://github.com/mikeboers/PyAV) (optional): `conda install av -c conda-forge -y` - [PyTurboJPEG](https://github.com/lilohuang/PyTurboJPEG) (optional): `pip install PyTurboJPEG` - [denseflow](https://github.com/open-mmlab/denseflow) (optional): See [here](https://github.com/innerlee/setup) for simple install scripts. -- [moviepy](https://zulko.github.io/moviepy/) (optional): `pip install moviepy`. See [here](https://zulko.github.io/moviepy/install.html) for official installation. **Note**(according to [this issue](https://github.com/Zulko/moviepy/issues/693)) that: - 1. For Windows users, [ImageMagick](https://www.imagemagick.org/script/index.php) will not be automatically detected by MoviePy, +- [moviepy](https://zulko.github.io/moviepy/) (optional): `pip install moviepy`. See [here](https://zulko.github.io/moviepy/install.html) for official installation. **Note**(according to [this issue](https://github.com/Zulko/moviepy/issues/693)) that: + 1. For Windows users, [ImageMagick](https://www.imagemagick.org/script/index.php) will not be automatically detected by MoviePy, there is a need to modify `moviepy/config_defaults.py` file by providing the path to the ImageMagick binary called `magick`, like `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` - 2. For Linux users, there is a need to modify the `/etc/ImageMagick-6/policy.xml` file by commenting out + 2. For Linux users, there is a need to modify the `/etc/ImageMagick-6/policy.xml` file by commenting out `` to ``, if [ImageMagick](https://www.imagemagick.org/script/index.php) is not detected by `moviepy`. - [Pillow-SIMD](https://docs.fast.ai/performance.html#pillow-simd) (optional): Install it by the following scripts. @@ -44,8 +45,10 @@ CFLAGS="${CFLAGS} -mavx2" pip install --upgrade --no-cache-dir --force-reinstall conda install -y jpeg libtiff ``` -**Note**: You need to run `pip uninstall mmcv` first if you have mmcv installed. +:::{note} +You need to run `pip uninstall mmcv` first if you have mmcv installed. If mmcv and mmcv-full are both installed, there will be `ModuleNotFoundError`. +::: ## Prepare environment @@ -62,7 +65,8 @@ b. Install PyTorch and torchvision following the [official instructions](https:/ conda install pytorch torchvision -c pytorch ``` -Note: Make sure that your compilation CUDA version and runtime CUDA version match. +:::{note} +Make sure that your compilation CUDA version and runtime CUDA version match. You can check the supported CUDA version for precompiled packages on the [PyTorch website](https://pytorch.org/). `E.g.1` If you have CUDA 10.1 installed under `/usr/local/cuda` and would like to install PyTorch 1.5, @@ -80,6 +84,7 @@ conda install pytorch=1.3.1 cudatoolkit=9.2 torchvision=0.4.2 -c pytorch ``` If you build PyTorch from source instead of installing the prebuilt package, you can use more CUDA versions such as 9.0. +::: ## Install MMAction2 @@ -153,7 +158,7 @@ This part is **optional** if you're not going to do spatial temporal detection. See [here](https://github.com/open-mmlab/mmdetection#installation) to install mmdetection. -Note: +:::{note} 1. The git commit id will be written to the version number with step b, e.g. 0.6.0+2e7045c. The version will also be saved in trained models. It is recommended that you run step b each time you pull some updates from github. If C++/CUDA codes are modified, then this step is compulsory. @@ -170,6 +175,8 @@ Note: or specify desired extras when calling `pip` (e.g. `pip install -v -e .[optional]`, valid keys for the `[optional]` field are `all`, `tests`, `build`, and `optional`) like `pip install -v -e .[tests,build]`. +::: + ## Install with CPU only The code can be built for CPU only environment (where CUDA isn't available). diff --git a/docs/tutorials/7_customize_runtime.md b/docs/tutorials/7_customize_runtime.md index dffb2e9668..dcd0b4d1ea 100644 --- a/docs/tutorials/7_customize_runtime.md +++ b/docs/tutorials/7_customize_runtime.md @@ -4,26 +4,27 @@ In this tutorial, we will introduce some methods about how to customize optimiza -- [Customize Optimization Methods](#customize-optimization-methods) - - [Customize optimizer supported by PyTorch](#customize-optimizer-supported-by-pytorch) - - [Customize self-implemented optimizer](#customize-self-implemented-optimizer) - - [1. Define a new optimizer](#1-define-a-new-optimizer) - - [2. Add the optimizer to registry](#2-add-the-optimizer-to-registry) - - [3. Specify the optimizer in the config file](#3-specify-the-optimizer-in-the-config-file) - - [Customize optimizer constructor](#customize-optimizer-constructor) - - [Additional settings](#additional-settings) -- [Customize Training Schedules](#customize-training-schedules) -- [Customize Workflow](#customize-workflow) -- [Customize Hooks](#customize-hooks) - - [Customize self-implemented hooks](#customize-self-implemented-hooks) - - [1. Implement a new hook](#1-implement-a-new-hook) - - [2. Register the new hook](#2-register-the-new-hook) - - [3. Modify the config](#3-modify-the-config) - - [Use hooks implemented in MMCV](#use-hooks-implemented-in-mmcv) - - [Modify default runtime hooks](#modify-default-runtime-hooks) - - [Checkpoint config](#checkpoint-config) - - [Log config](#log-config) - - [Evaluation config](#evaluation-config) +- [Tutorial 7: Customize Runtime Settings](#tutorial-7-customize-runtime-settings) + - [Customize Optimization Methods](#customize-optimization-methods) + - [Customize optimizer supported by PyTorch](#customize-optimizer-supported-by-pytorch) + - [Customize self-implemented optimizer](#customize-self-implemented-optimizer) + - [1. Define a new optimizer](#1-define-a-new-optimizer) + - [2. Add the optimizer to registry](#2-add-the-optimizer-to-registry) + - [3. Specify the optimizer in the config file](#3-specify-the-optimizer-in-the-config-file) + - [Customize optimizer constructor](#customize-optimizer-constructor) + - [Additional settings](#additional-settings) + - [Customize Training Schedules](#customize-training-schedules) + - [Customize Workflow](#customize-workflow) + - [Customize Hooks](#customize-hooks) + - [Customize self-implemented hooks](#customize-self-implemented-hooks) + - [1. Implement a new hook](#1-implement-a-new-hook) + - [2. Register the new hook](#2-register-the-new-hook) + - [3. Modify the config](#3-modify-the-config) + - [Use hooks implemented in MMCV](#use-hooks-implemented-in-mmcv) + - [Modify default runtime hooks](#modify-default-runtime-hooks) + - [Checkpoint config](#checkpoint-config) + - [Log config](#log-config) + - [Evaluation config](#evaluation-config) @@ -205,13 +206,15 @@ In such case, we can set the workflow as so that 1 epoch for training and 1 epoch for validation will be run iteratively. -**Note**: +:::{note} 1. The parameters of model will not be updated during val epoch. 2. Keyword `total_epochs` in the config only controls the number of training epochs and will not affect the validation workflow. 3. Workflows `[('train', 1), ('val', 1)]` and `[('train', 1)]` will not change the behavior of `EvalHook` because `EvalHook` is called by `after_train_epoch` and validation workflow only affect hooks that are called through `after_val_epoch`. Therefore, the only difference between `[('train', 1), ('val', 1)]` and ``[('train', 1)]`` is that the runner will calculate losses on validation set after each training epoch. +::: + ## Customize Hooks ### Customize self-implemented hooks diff --git a/docs/useful_tools.md b/docs/useful_tools.md index d2575df328..b303e63911 100644 --- a/docs/useful_tools.md +++ b/docs/useful_tools.md @@ -85,11 +85,13 @@ Params: 28.04 M ============================== ``` -**Note**: This tool is still experimental and we do not guarantee that the number is absolutely correct. +:::{note} +This tool is still experimental and we do not guarantee that the number is absolutely correct. You may use the result for simple comparisons, but double check it before you adopt it in technical reports or papers. (1) FLOPs are related to the input shape while parameters are not. The default input shape is (1, 3, 340, 256) for 2D recognizer, (1, 3, 32, 340, 256) for 3D recognizer. (2) Some operators are not counted into FLOPs like GN and custom operators. Refer to [`mmcv.cnn.get_model_complexity_info()`](https://github.com/open-mmlab/mmcv/blob/master/mmcv/cnn/utils/flops_counter.py) for details. +::: ## Model Conversion diff --git a/docs_zh_CN/api.rst b/docs_zh_CN/api.rst index 4f2bcab21e..a3c8bf0a8c 100644 --- a/docs_zh_CN/api.rst +++ b/docs_zh_CN/api.rst @@ -1,6 +1,3 @@ -API Reference -============= - mmaction.apis ------------- .. automodule:: mmaction.apis diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index 8338f28df2..853974e6e9 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -15,6 +15,8 @@ import subprocess import sys +import pytorch_sphinx_theme + sys.path.insert(0, os.path.abspath('..')) # -- Project information ----------------------------------------------------- @@ -41,12 +43,15 @@ def get_version(): # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode', - 'recommonmark', 'sphinx_markdown_tables' + 'sphinx_markdown_tables', 'sphinx_copybutton', 'myst_parser' ] # numpy and torch are required autodoc_mock_imports = ['mmaction.version', 'PIL'] +copybutton_prompt_text = r'>>> |\.\.\. ' +copybutton_prompt_is_regexp = True + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -61,15 +66,111 @@ def get_version(): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = 'pytorch_sphinx_theme' +html_theme_path = [pytorch_sphinx_theme.get_html_theme_path()] +html_theme_options = { + # 'logo_url': 'https://mmocr.readthedocs.io/en/latest/', + 'menu': [ + { + 'name': + '教程', + 'url': + 'https://colab.research.google.com/github/' + 'open-mmlab/mmocr/blob/main/demo/MMOCR_Tutorial.ipynb' + }, + { + 'name': 'GitHub', + 'url': 'https://github.com/open-mmlab/mmocr' + }, + { + 'name': + '上游代码库', + 'children': [ + { + 'name': 'MMCV', + 'url': 'https://github.com/open-mmlab/mmcv', + 'description': '计算机视觉基础库' + }, + { + 'name': 'MMClassification', + 'url': 'https://github.com/open-mmlab/mmclassification', + 'description': '图像分类代码库' + }, + { + 'name': 'MMDetection', + 'url': 'https://github.com/open-mmlab/mmdetection', + 'description': '物体检测代码库' + }, + ] + }, + { + 'name': + 'OpenMMLab 各项目', + 'children': [ + { + 'name': 'MMAction2', + 'url': 'https://github.com/open-mmlab/mmaction2', + }, + { + 'name': 'MMClassification', + 'url': 'https://github.com/open-mmlab/mmclassification', + }, + { + 'name': 'MMSegmentation', + 'url': 'https://github.com/open-mmlab/mmsegmentation', + }, + { + 'name': 'MMDetection3D', + 'url': 'https://github.com/open-mmlab/mmdetection3d', + }, + { + 'name': 'MMEditing', + 'url': 'https://github.com/open-mmlab/mmediting', + }, + { + 'name': 'MMDetection3D', + 'url': 'https://github.com/open-mmlab/mmdetection3d', + }, + { + 'name': 'MMPose', + 'url': 'https://github.com/open-mmlab/mmpose', + }, + { + 'name': 'MMTracking', + 'url': 'https://github.com/open-mmlab/mmtracking', + }, + { + 'name': 'MMGeneration', + 'url': 'https://github.com/open-mmlab/mmgeneration', + }, + ] + }, + { + 'name': + 'OpenMMLab', + 'children': [ + { + 'name': '主页', + 'url': 'https://openmmlab.com/' + }, + { + 'name': 'GitHub', + 'url': 'https://github.com/open-mmlab/' + }, + ] + }, + ] +} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = [] +html_static_path = ['_static'] +html_css_files = ['css/readthedocs.css'] -language = 'zh_CN' +myst_enable_extensions = ['colon_fence'] +language = 'zh_CN' master_doc = 'index' diff --git a/requirements/docs.txt b/requirements/docs.txt index 8a342a3717..52af18d4fd 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,7 +1,11 @@ +docutils==0.16.0 einops +myst-parser opencv-python +-e git+https://github.com/gaotongxiao/pytorch_sphinx_theme.git#egg=pytorch_sphinx_theme recommonmark scipy sphinx==4.0.2 +sphinx_copybutton sphinx_markdown_tables sphinx_rtd_theme==0.5.2 diff --git a/setup.cfg b/setup.cfg index 32cd9e1f7c..8625daca73 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,6 +19,6 @@ line_length = 79 multi_line_output = 0 known_standard_library = pkg_resources,setuptools known_first_party = mmaction -known_third_party = cv2,decord,einops,joblib,matplotlib,mmcv,numpy,pandas,pytest,scipy,seaborn,titlecase,torch,webcolors +known_third_party = cv2,decord,einops,joblib,matplotlib,mmcv,numpy,pandas,pytest,pytorch_sphinx_theme,scipy,seaborn,titlecase,torch,webcolors no_lines_before = STDLIB,LOCALFOLDER default_section = THIRDPARTY diff --git a/src/pytorch-sphinx-theme b/src/pytorch-sphinx-theme new file mode 160000 index 0000000000..5134e93c22 --- /dev/null +++ b/src/pytorch-sphinx-theme @@ -0,0 +1 @@ +Subproject commit 5134e93c22d704334aec9079175bc214eb533baf diff --git a/tools/data/jhmdb/README.md b/tools/data/jhmdb/README.md index c026052cb6..2a39061858 100644 --- a/tools/data/jhmdb/README.md +++ b/tools/data/jhmdb/README.md @@ -86,7 +86,8 @@ mmaction2 ``` -**Note**: The `JHMDB-GT.pkl` exists as a cache, it contains 6 items as follows: +:::{note} +The `JHMDB-GT.pkl` exists as a cache, it contains 6 items as follows: 1. `labels` (list): List of the 21 labels. 2. `gttubes` (dict): Dictionary that contains the ground truth tubes for each video. @@ -96,3 +97,5 @@ mmaction2 4. `train_videos` (list): A list with `nsplits=1` elements, each one containing the list of training videos. 5. `test_videos` (list): A list with `nsplits=1` elements, each one containing the list of testing videos. 6. `resolution` (dict): Dictionary that outputs a tuple (h,w) of the resolution for each video, like `'pour/Bartender_School_Students_Practice_pour_u_cm_np1_fr_med_1': (240, 320)`. + +::: diff --git a/tools/data/kinetics/README.md b/tools/data/kinetics/README.md index 6ece037493..72c64d7bac 100644 --- a/tools/data/kinetics/README.md +++ b/tools/data/kinetics/README.md @@ -18,12 +18,15 @@ For basic dataset information, please refer to the official [website](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). The scripts can be used for preparing kinetics400, kinetics600, kinetics700. To prepare different version of kinetics, you need to replace `${DATASET}` in the following examples with the specific dataset name. The choices of dataset names are `kinetics400`, `kinetics600` and `kinetics700`. Before we start, please make sure that the directory is located at `$MMACTION2/tools/data/${DATASET}/`. -**Note**: Because of the expirations of some YouTube links, the sizes of kinetics dataset copies may be different. Here are the sizes of our kinetics dataset copies that used to train all checkpoints. +:::{note} +Because of the expirations of some YouTube links, the sizes of kinetics dataset copies may be different. Here are the sizes of our kinetics dataset copies that used to train all checkpoints. | Dataset | training videos | validation videos | | :---------------:|:---------------:|:---------------:| | kinetics400 | 240436 | 19796 | +::: + ## Step 1. Prepare Annotations First of all, you can run the following script to prepare annotations by downloading from the official [website](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). diff --git a/tools/data/thumos14/README.md b/tools/data/thumos14/README.md index 7c3c7533c0..eaddb60cbe 100644 --- a/tools/data/thumos14/README.md +++ b/tools/data/thumos14/README.md @@ -75,8 +75,6 @@ bash extract_frames.sh tvl1 ## Step 4. Fetch File List -Note: These files are not available temporarily, we will update the download link soon. - This part is **optional** if you do not use SSN model. You can run the follow script to fetch pre-computed tag proposals. diff --git a/tools/data/ucf101_24/README.md b/tools/data/ucf101_24/README.md index f93b12c6dc..f7c3579eea 100644 --- a/tools/data/ucf101_24/README.md +++ b/tools/data/ucf101_24/README.md @@ -22,7 +22,9 @@ Before we start, please make sure that the directory is located at `$MMACTION2/t You can download the RGB frames, optical flow and ground truth annotations from [google drive](https://drive.google.com/drive/folders/1BvGywlAGrACEqRyfYbz3wzlVV3cDFkct). The data are provided from [MOC](https://github.com/MCG-NJU/MOC-Detector/blob/master/readme/Dataset.md), which is adapted from [act-detector](https://github.com/vkalogeiton/caffe/tree/act-detector) and [corrected-UCF101-Annots](https://github.com/gurkirt/corrected-UCF101-Annots). -**Note**: The annotation of this UCF101-24 is from [here](https://github.com/gurkirt/corrected-UCF101-Annots), which is more correct. +:::{note} +The annotation of this UCF101-24 is from [here](https://github.com/gurkirt/corrected-UCF101-Annots), which is more correct. +::: After downloading the `UCF101_v2.tar.gz` file and put it in `$MMACTION2/tools/data/ucf101_24/`, you can run the following command to uncompress. @@ -73,7 +75,9 @@ mmaction2 ``` -**Note**: The `UCF101v2-GT.pkl` exists as a cache, it contains 6 items as follows: +:::{note} +The `UCF101v2-GT.pkl` exists as a cache, it contains 6 items as follows: +::: 1. `labels` (list): List of the 24 labels. 2. `gttubes` (dict): Dictionary that contains the ground truth tubes for each video. From e4a9398c3d90177d7b5547a2557e48783ee038ca Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 22 Sep 2021 18:11:32 +0800 Subject: [PATCH 257/414] [Weight] Add tsm_mobilenetv2 port from TSM (#1163) * add tsm_mobilenetv2 port from TSM * update * add metafile --- configs/recognition/tsm/README.md | 2 ++ configs/recognition/tsm/README_zh-CN.md | 2 ++ configs/recognition/tsm/metafile.yml | 22 ++++++++++++++++++++++ 3 files changed, 26 insertions(+) diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index e1042b2d74..97a0df1e71 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -45,6 +45,7 @@ |[tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4| ResNet50| ImageNet |70.70|89.90|x|x|x|10125|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json)| |[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| |[tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|short-side 320|8|MobileNetV2| ImageNet |68.46|88.64|x|x|x|3385|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json)| +|[tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|short-side 320|8|MobileNetV2| ImageNet |69.89|89.01|x|x|x|3385|[infer_ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port_20210922-aa5cadf6.pth)|x|x| ### Diving48 @@ -135,6 +136,7 @@ test_pipeline = [ 5. When applying Mixup and CutMix, we use the hyper parameter `alpha=0.2`. 6. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. +7. The **infer_ckpt** means those checkpoints are ported from [TSM](https://github.com/mit-han-lab/temporal-shift-module/blob/master/test_models.py). ::: diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index 8f65a3bd14..165a5fee40 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -45,6 +45,7 @@ |[tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4| ResNet50| ImageNet |70.70|89.90|x|x|x|10125|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json)| |[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| |[tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|短边 320|8|MobileNetV2| ImageNet |68.46|88.64|x|x|x|3385|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json)| +|[tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|短边 320|8|MobileNetV2| ImageNet |69.89|89.01|x|x|x|3385|[infer_ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port_20210922-aa5cadf6.pth)|x|x| ### Diving48 @@ -142,6 +143,7 @@ test_pipeline = [ 5. 当采用 Mixup 和 CutMix 的数据增强时,使用超参 `alpha=0.2`。 6. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 +7. 这里的 **infer_ckpt** 表示该模型权重文件是从 [TSM](https://github.com/mit-han-lab/temporal-shift-module/blob/master/test_models.py) 导入的。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index d9b23cecd5..2957da5f19 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -806,3 +806,25 @@ Models: Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb_20210630-8df9c358.pth gpu_mem(M): '10389' +- Config: configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py + In Collection: TSM + Metadata: + Architecture: MobileNetV2 + Batch Size: 8 + Epochs: 100 + FLOPs: 3337519104 + Parameters: 2736272 + Pretrained: ImageNet + Resolution: short-side 320 + Training Data: Kinetics-400 + Training Resources: 8 GPUs + Modality: RGB + Name: tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port + Results: + - Dataset: Kinetics-400 + Metrics: + Top 1 Accuracy: 69.89 + Top 5 Accuracy: 89.01 + Task: Action Recognition + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port_20210922-aa5cadf6.pth + gpu_mem(M): '3385' From f355fd6279a15ad5a92a425048cf5a65270c2db1 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 22 Sep 2021 19:48:03 +0800 Subject: [PATCH 258/414] [Fix] Modifications for Auto-Regression (#1165) --- configs/recognition/c3d/metafile.yml | 2 +- configs/recognition/csn/metafile.yml | 18 +++++++++--------- .../slowfast_r50_16x8x1_22e_sthv1_rgb.py | 3 +++ .../trn/trn_r50_1x1x8_50e_sthv2_rgb.py | 3 --- configs/recognition/tsm/metafile.yml | 2 +- 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/configs/recognition/c3d/metafile.yml b/configs/recognition/c3d/metafile.yml index 865e8020ac..f3e7ec9a5f 100644 --- a/configs/recognition/c3d/metafile.yml +++ b/configs/recognition/c3d/metafile.yml @@ -18,7 +18,7 @@ Models: Training Data: UCF101 Training Resources: 8 GPUs Modality: RGB - Name: c3d_sports1m_16x1x1_45e_ucf101_rgb.py + Name: c3d_sports1m_16x1x1_45e_ucf101_rgb Results: - Dataset: UCF101 Metrics: diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml index c598555b0e..ece1f28043 100644 --- a/configs/recognition/csn/metafile.yml +++ b/configs/recognition/csn/metafile.yml @@ -18,7 +18,7 @@ Models: Training Data: Kinetics-400 Training Resources: 32 GPUs Modality: RGB - Name: ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py + Name: ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb Results: - Dataset: Kinetics-400 Metrics: @@ -41,7 +41,7 @@ Models: Training Data: Kinetics-400 Training Resources: 32 GPUs Modality: RGB - Name: ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Name: ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb Results: - Dataset: Kinetics-400 Metrics: @@ -62,7 +62,7 @@ Models: Resolution: short-side 320 Training Data: Kinetics-400 Modality: RGB - Name: ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py + Name: ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb Converted From: Weights: https://www.dropbox.com/s/3fihu6ti60047mu/ipCSN_152_kinetics_from_scratch_f129594342.pkl?dl=0 Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 @@ -84,7 +84,7 @@ Models: Resolution: short-side 320 Training Data: Kinetics-400 Modality: RGB - Name: ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Name: ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb Converted From: Weights: https://www.dropbox.com/s/zpp3p0vn2i7bibl/ipCSN_152_ft_kinetics_from_ig65m_f133090949.pkl?dl=0 Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 @@ -107,7 +107,7 @@ Models: Resolution: short-side 320 Training Data: Kinetics-400 Modality: RGB - Name: ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Name: ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb Converted From: Weights: https://www.dropbox.com/s/ir7cr0hda36knux/ipCSN_152_ft_kinetics_from_sports1m_f111279053.pkl?dl=0 Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 @@ -130,7 +130,7 @@ Models: Resolution: short-side 320 Training Data: Kinetics-400 Modality: RGB - Name: ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py + Name: ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb Converted From: Weights: https://www.dropbox.com/s/46gcm7up60ssx5c/irCSN_152_kinetics_from_scratch_f98268019.pkl?dl=0 Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 @@ -153,7 +153,7 @@ Models: Resolution: short-side 320 Training Data: Kinetics-400 Modality: RGB - Name: ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py + Name: ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb Converted From: Weights: https://www.dropbox.com/s/gmd8r87l3wmkn3h/irCSN_152_ft_kinetics_from_ig65m_f126851907.pkl?dl=0 Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 @@ -176,7 +176,7 @@ Models: Resolution: short-side 320 Training Data: Kinetics-400 Modality: RGB - Name: ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py + Name: ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb Converted From: Weights: https://www.dropbox.com/s/zuoj1aqouh6bo6k/irCSN_152_ft_kinetics_from_sports1m_f101599884.pkl?dl=0 Code: https://github.com/facebookresearch/VMZ/tree/b61b08194bc3273bef4c45fdfdd36c56c8579ff3 @@ -199,7 +199,7 @@ Models: Resolution: short-side 320 Training Data: Kinetics-400 Modality: RGB - Name: ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py + Name: ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb Results: - Dataset: Kinetics-400 Metrics: diff --git a/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py b/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py index f1e692c050..6cc79902db 100644 --- a/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py +++ b/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py @@ -72,16 +72,19 @@ type=dataset_type, ann_file=ann_file_train, data_prefix=data_root, + filename_tmpl='{:05}.jpg', pipeline=train_pipeline), val=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', pipeline=val_pipeline), test=dict( type=dataset_type, ann_file=ann_file_test, data_prefix=data_root_val, + filename_tmpl='{:05}.jpg', pipeline=test_pipeline)) evaluation = dict( diff --git a/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py b/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py index 09e8e2f6d5..ab0ba48bb2 100644 --- a/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py +++ b/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py @@ -74,19 +74,16 @@ type=dataset_type, ann_file=ann_file_train, data_prefix=data_root, - filename_tmpl='{:05}.jpg', pipeline=train_pipeline), val=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, - filename_tmpl='{:05}.jpg', pipeline=val_pipeline), test=dict( type=dataset_type, ann_file=ann_file_test, data_prefix=data_root_val, - filename_tmpl='{:05}.jpg', pipeline=test_pipeline)) evaluation = dict( interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index 2957da5f19..c438aea968 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -110,7 +110,7 @@ Models: Training Data: Kinetics-400 Training Resources: 8 GPUs Modality: RGB - Name: tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py + Name: tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb Results: - Dataset: Kinetics-400 Metrics: From c088cb419e364e37bd6cb7e9b0b87f840ba817d2 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 22 Sep 2021 21:43:55 +0800 Subject: [PATCH 259/414] [Doc] add mmocr (#1168) --- docs/conf.py | 4 ++++ docs_zh_CN/conf.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index 5faa526c6e..5326309703 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -151,6 +151,10 @@ def get_version(): 'name': 'MMGeneration', 'url': 'https://github.com/open-mmlab/mmgeneration', }, + { + 'name': 'MMOCR', + 'url': 'https://github.com/open-mmlab/mmocr', + }, ] }, { diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index 853974e6e9..f72e7afc52 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -143,6 +143,10 @@ def get_version(): 'name': 'MMGeneration', 'url': 'https://github.com/open-mmlab/mmgeneration', }, + { + 'name': 'MMOCR', + 'url': 'https://github.com/open-mmlab/mmocr', + }, ] }, { From a9719428d5830788025b632fcdbdb97ba2dac2a2 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 28 Sep 2021 14:48:15 +0800 Subject: [PATCH 260/414] [Fix] Fix device bug for class weight (#1188) * fix device error * fix device error --- mmaction/models/losses/cross_entropy_loss.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mmaction/models/losses/cross_entropy_loss.py b/mmaction/models/losses/cross_entropy_loss.py index 40072c9718..5c84c6b43e 100644 --- a/mmaction/models/losses/cross_entropy_loss.py +++ b/mmaction/models/losses/cross_entropy_loss.py @@ -58,6 +58,7 @@ def _forward(self, cls_score, label, **kwargs): lsm = F.log_softmax(cls_score, 1) if self.class_weight is not None: + self.class_weight = self.class_weight.to(cls_score.device) lsm = lsm * self.class_weight.unsqueeze(0) loss_cls = -(label * lsm).sum(1) From 091297ac15d5824963536239c0b7c7052845b656 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 28 Sep 2021 14:48:51 +0800 Subject: [PATCH 261/414] [Revise] Remove the useless aug MultiGroupCrop (#1180) * remove useless multigroupcrop * update --- .../tsn/tsn_r101_1x1x5_50e_mmit_rgb.py | 2 +- docs/tutorials/4_data_pipeline.md | 16 +- docs_zh_CN/tutorials/4_data_pipeline.md | 16 +- mmaction/datasets/pipelines/__init__.py | 37 +++-- mmaction/datasets/pipelines/augmentations.py | 137 ------------------ mmaction/models/recognizers/recognizer2d.py | 4 +- .../test_augmentations/test_crop.py | 41 +----- .../test_augmentations/test_transform.py | 36 +---- 8 files changed, 37 insertions(+), 252 deletions(-) diff --git a/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py b/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py index 85e22a12bf..11da547492 100644 --- a/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py +++ b/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py @@ -77,7 +77,7 @@ test_mode=True), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), - dict(type='MultiGroupCrop', crop_size=256, groups=1), + dict(type='CenterCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), diff --git a/docs/tutorials/4_data_pipeline.md b/docs/tutorials/4_data_pipeline.md index f60ab6bbc4..2caaf6442c 100644 --- a/docs/tutorials/4_data_pipeline.md +++ b/docs/tutorials/4_data_pipeline.md @@ -4,11 +4,12 @@ In this tutorial, we will introduce some methods about the design of data pipeli -- [Design of Data Pipelines](#design-of-data-pipelines) - - [Data loading](#data-loading) - - [Pre-processing](#pre-processing) - - [Formatting](#formatting) -- [Extend and Use Custom Pipelines](#extend-and-use-custom-pipelines) +- [Tutorial 4: Customize Data Pipelines](#tutorial-4-customize-data-pipelines) + - [Design of Data Pipelines](#design-of-data-pipelines) + - [Data loading](#data-loading) + - [Pre-processing](#pre-processing) + - [Formatting](#formatting) + - [Extend and Use Custom Pipelines](#extend-and-use-custom-pipelines) @@ -197,11 +198,6 @@ For each operation, we list the related dict fields that are added/updated/remov - add: crop_bbox, img_shape - update: imgs -`MultiGroupCrop` - -- add: crop_bbox, img_shape -- update: imgs - ### Formatting `ToTensor` diff --git a/docs_zh_CN/tutorials/4_data_pipeline.md b/docs_zh_CN/tutorials/4_data_pipeline.md index cfcd03013f..e47c940296 100644 --- a/docs_zh_CN/tutorials/4_data_pipeline.md +++ b/docs_zh_CN/tutorials/4_data_pipeline.md @@ -4,11 +4,12 @@ -- [数据前处理流水线设计](#数据前处理流水线设计) - - [数据加载](#数据加载) - - [数据预处理](#数据预处理) - - [数据格式化](#数据格式化) -- [扩展和使用自定义流水线](#扩展和使用自定义流水线) +- [教程 4:如何设计数据处理流程](#教程-4如何设计数据处理流程) + - [数据前处理流水线设计](#数据前处理流水线设计) + - [数据加载](#数据加载) + - [数据预处理](#数据预处理) + - [数据格式化](#数据格式化) + - [扩展和使用自定义流水线](#扩展和使用自定义流水线) @@ -192,11 +193,6 @@ train_pipeline = [ - 新增: crop_bbox, img_shape - 更新: imgs -`MultiGroupCrop` - -- 新增: crop_bbox, img_shape -- 更新: imgs - ### 数据格式化 `ToTensor` diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index 09e3fa7649..d99fd7ce74 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -1,9 +1,8 @@ # Copyright (c) OpenMMLab. All rights reserved. from .augmentations import (AudioAmplify, CenterCrop, ColorJitter, Flip, Fuse, - Imgaug, MelSpectrogram, MultiGroupCrop, - MultiScaleCrop, Normalize, PytorchVideoTrans, - RandomCrop, RandomRescale, RandomResizedCrop, - RandomScale, Resize, TenCrop, ThreeCrop, + Imgaug, MelSpectrogram, MultiScaleCrop, Normalize, + PytorchVideoTrans, RandomCrop, RandomRescale, + RandomResizedCrop, Resize, TenCrop, ThreeCrop, TorchvisionTrans) from .compose import Compose from .formating import (Collect, FormatAudioShape, FormatGCNInput, FormatShape, @@ -24,19 +23,19 @@ __all__ = [ 'SampleFrames', 'PyAVDecode', 'DecordDecode', 'DenseSampleFrames', - 'OpenCVDecode', 'MultiGroupCrop', 'MultiScaleCrop', 'RandomResizedCrop', - 'RandomCrop', 'Resize', 'Flip', 'Fuse', 'Normalize', 'ThreeCrop', - 'CenterCrop', 'TenCrop', 'ImageToTensor', 'Transpose', 'Collect', - 'FormatShape', 'Compose', 'ToTensor', 'ToDataContainer', - 'GenerateLocalizationLabels', 'LoadLocalizationFeature', 'LoadProposals', - 'DecordInit', 'OpenCVInit', 'PyAVInit', 'SampleProposalFrames', - 'UntrimmedSampleFrames', 'RawFrameDecode', 'DecordInit', 'OpenCVInit', - 'PyAVInit', 'SampleProposalFrames', 'ColorJitter', 'LoadHVULabel', - 'SampleAVAFrames', 'AudioAmplify', 'MelSpectrogram', 'AudioDecode', - 'FormatAudioShape', 'LoadAudioFeature', 'AudioFeatureSelector', - 'AudioDecodeInit', 'RandomScale', 'ImageDecode', 'BuildPseudoClip', - 'RandomRescale', 'PyAVDecodeMotionVector', 'Rename', 'Imgaug', - 'UniformSampleFrames', 'PoseDecode', 'LoadKineticsPose', - 'GeneratePoseTarget', 'PIMSInit', 'PIMSDecode', 'TorchvisionTrans', - 'PytorchVideoTrans', 'PoseNormalize', 'FormatGCNInput', 'PaddingWithLoop' + 'OpenCVDecode', 'MultiScaleCrop', 'RandomResizedCrop', 'RandomCrop', + 'Resize', 'Flip', 'Fuse', 'Normalize', 'ThreeCrop', 'CenterCrop', + 'TenCrop', 'ImageToTensor', 'Transpose', 'Collect', 'FormatShape', + 'Compose', 'ToTensor', 'ToDataContainer', 'GenerateLocalizationLabels', + 'LoadLocalizationFeature', 'LoadProposals', 'DecordInit', 'OpenCVInit', + 'PyAVInit', 'SampleProposalFrames', 'UntrimmedSampleFrames', + 'RawFrameDecode', 'DecordInit', 'OpenCVInit', 'PyAVInit', + 'SampleProposalFrames', 'ColorJitter', 'LoadHVULabel', 'SampleAVAFrames', + 'AudioAmplify', 'MelSpectrogram', 'AudioDecode', 'FormatAudioShape', + 'LoadAudioFeature', 'AudioFeatureSelector', 'AudioDecodeInit', + 'ImageDecode', 'BuildPseudoClip', 'RandomRescale', + 'PyAVDecodeMotionVector', 'Rename', 'Imgaug', 'UniformSampleFrames', + 'PoseDecode', 'LoadKineticsPose', 'GeneratePoseTarget', 'PIMSInit', + 'PIMSDecode', 'TorchvisionTrans', 'PytorchVideoTrans', 'PoseNormalize', + 'FormatGCNInput', 'PaddingWithLoop' ] diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index dc411dd9e8..d157e14442 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -551,74 +551,6 @@ def __call__(self, results): return results -@PIPELINES.register_module() -class RandomScale: - """Resize images by a random scale. - - Required keys are "imgs", "img_shape", "modality", added or modified - keys are "imgs", "img_shape", "keep_ratio", "scale_factor", "lazy", - "scale", "resize_size". Required keys in "lazy" is None, added or - modified key is "interpolation". - - Args: - scales (tuple[int]): Tuple of scales to be chosen for resize. - mode (str): Selection mode for choosing the scale. Options are "range" - and "value". If set to "range", The short edge will be randomly - chosen from the range of minimum and maximum on the shorter one - in all tuples. Otherwise, the longer edge will be randomly chosen - from the range of minimum and maximum on the longer one in all - tuples. Default: 'range'. - """ - - def __init__(self, scales, mode='range', **kwargs): - warnings.warn('"RandomScale" is deprecated and will be removed in ' - 'later versions. It is currently not used in MMAction2') - self.mode = mode - if self.mode not in ['range', 'value']: - raise ValueError(f"mode should be 'range' or 'value', " - f'but got {self.mode}') - self.scales = scales - self.kwargs = kwargs - - def select_scale(self, scales): - num_scales = len(scales) - if num_scales == 1: - # specify a fixed scale - scale = scales[0] - elif num_scales == 2: - if self.mode == 'range': - scale_long = [max(s) for s in scales] - scale_short = [min(s) for s in scales] - long_edge = np.random.randint( - min(scale_long), - max(scale_long) + 1) - short_edge = np.random.randint( - min(scale_short), - max(scale_short) + 1) - scale = (long_edge, short_edge) - elif self.mode == 'value': - scale = random.choice(scales) - else: - if self.mode != 'value': - raise ValueError("Only 'value' mode supports more than " - '2 image scales') - scale = random.choice(scales) - - return scale - - def __call__(self, results): - scale = self.select_scale(self.scales) - results['scale'] = scale - resize = Resize(scale, **self.kwargs) - results = resize(results) - return results - - def __repr__(self): - repr_str = (f'{self.__class__.__name__}(' - f'scales={self.scales}, mode={self.mode})') - return repr_str - - @PIPELINES.register_module() class RandomCrop: """Vanilla square random crop that specifics the output size. @@ -1862,75 +1794,6 @@ def __repr__(self): return repr_str -@PIPELINES.register_module() -class MultiGroupCrop: - """Randomly crop the images into several groups. - - Crop the random region with the same given crop_size and bounding box - into several groups. - Required keys are "imgs", added or modified keys are "imgs", "crop_bbox" - and "img_shape". - - Args: - crop_size(int | tuple[int]): (w, h) of crop size. - groups(int): Number of groups. - """ - - def __init__(self, crop_size, groups): - self.crop_size = _pair(crop_size) - self.groups = groups - if not mmcv.is_tuple_of(self.crop_size, int): - raise TypeError('Crop size must be int or tuple of int, ' - f'but got {type(crop_size)}') - - if not isinstance(groups, int): - raise TypeError(f'Groups must be int, but got {type(groups)}.') - - if groups <= 0: - raise ValueError('Groups must be positive.') - - def __call__(self, results): - """Performs the MultiGroupCrop augmentation. - - Args: - results (dict): The resulting dict to be modified and passed - to the next transform in pipeline. - """ - if 'gt_bboxes' in results or 'proposals' in results: - warnings.warn('MultiGroupCrop cannot process bounding boxes') - - imgs = results['imgs'] - img_h, img_w = imgs[0].shape[:2] - crop_w, crop_h = self.crop_size - - img_crops = [] - crop_bboxes = [] - for _ in range(self.groups): - x_offset = random.randint(0, img_w - crop_w) - y_offset = random.randint(0, img_h - crop_h) - - bbox = [x_offset, y_offset, x_offset + crop_w, y_offset + crop_h] - crop = [ - img[y_offset:y_offset + crop_h, x_offset:x_offset + crop_w] - for img in imgs - ] - img_crops.extend(crop) - crop_bboxes.extend([bbox for _ in range(len(imgs))]) - - crop_bboxes = np.array(crop_bboxes) - results['imgs'] = img_crops - results['crop_bbox'] = crop_bboxes - results['img_shape'] = results['imgs'][0].shape[:2] - - return results - - def __repr__(self): - repr_str = (f'{self.__class__.__name__}' - f'(crop_size={self.crop_size}, ' - f'groups={self.groups})') - return repr_str - - @PIPELINES.register_module() class AudioAmplify: """Amplify the waveform. diff --git a/mmaction/models/recognizers/recognizer2d.py b/mmaction/models/recognizers/recognizer2d.py index 7626918f51..a1acc09138 100644 --- a/mmaction/models/recognizers/recognizer2d.py +++ b/mmaction/models/recognizers/recognizer2d.py @@ -88,7 +88,7 @@ def _do_test(self, imgs): # `num_crops` is calculated by: # 1) `twice_sample` in `SampleFrames` # 2) `num_sample_positions` in `DenseSampleFrames` - # 3) `ThreeCrop/TenCrop/MultiGroupCrop` in `test_pipeline` + # 3) `ThreeCrop/TenCrop` in `test_pipeline` # 4) `num_clips` in `SampleFrames` or its subclass if `clip_len != 1` # should have cls_head if not extracting features @@ -127,7 +127,7 @@ def _do_fcn_test(self, imgs): # `num_crops` is calculated by: # 1) `twice_sample` in `SampleFrames` # 2) `num_sample_positions` in `DenseSampleFrames` - # 3) `ThreeCrop/TenCrop/MultiGroupCrop` in `test_pipeline` + # 3) `ThreeCrop/TenCrop` in `test_pipeline` # 4) `num_clips` in `SampleFrames` or its subclass if `clip_len != 1` cls_score = self.cls_head(x, fcn_test=True) diff --git a/tests/test_data/test_pipelines/test_augmentations/test_crop.py b/tests/test_data/test_pipelines/test_augmentations/test_crop.py index b8d754505b..400327deac 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_crop.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_crop.py @@ -3,9 +3,9 @@ import pytest from mmcv.utils import assert_dict_has_keys -from mmaction.datasets.pipelines import (CenterCrop, MultiGroupCrop, - MultiScaleCrop, RandomCrop, - RandomResizedCrop, TenCrop, ThreeCrop) +from mmaction.datasets.pipelines import (CenterCrop, MultiScaleCrop, + RandomCrop, RandomResizedCrop, + TenCrop, ThreeCrop) from .base import check_crop @@ -292,38 +292,3 @@ def test_ten_crop(): assert repr(ten_crop) == (f'{ten_crop.__class__.__name__}' f'(crop_size={(224, 224)})') - - @staticmethod - def test_multi_group_crop(): - with pytest.raises(TypeError): - # crop_size must be int or tuple of int - MultiGroupCrop(0.5, 1) - - with pytest.raises(TypeError): - # crop_size must be int or tuple of int - MultiGroupCrop('224', 1) - - with pytest.raises(TypeError): - # groups must be int - MultiGroupCrop(224, '1') - - with pytest.raises(ValueError): - # groups must be positive - MultiGroupCrop(224, 0) - - target_keys = ['imgs', 'crop_bbox', 'img_shape'] - - # multi_group_crop with crop_size 224, groups 3 - imgs = list(np.random.rand(2, 256, 341, 3)) - results = dict(imgs=imgs) - multi_group_crop = MultiGroupCrop(224, 3) - multi_group_crop_result = multi_group_crop(results) - assert assert_dict_has_keys(multi_group_crop_result, target_keys) - assert check_crop(imgs, multi_group_crop_result['imgs'], - multi_group_crop_result['crop_bbox'], - multi_group_crop.groups) - assert multi_group_crop_result['img_shape'] == (224, 224) - - assert repr(multi_group_crop) == ( - f'{multi_group_crop.__class__.__name__}' - f'(crop_size={(224, 224)}, groups={3})') diff --git a/tests/test_data/test_pipelines/test_augmentations/test_transform.py b/tests/test_data/test_pipelines/test_augmentations/test_transform.py index 67fe310d66..31abd647f1 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_transform.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_transform.py @@ -6,7 +6,7 @@ from mmcv.utils import assert_dict_has_keys from numpy.testing import assert_array_almost_equal -from mmaction.datasets.pipelines import RandomRescale, RandomScale, Resize +from mmaction.datasets.pipelines import RandomRescale, Resize from mmaction.datasets.pipelines.augmentations import PoseCompact @@ -114,40 +114,6 @@ def test_resize(): f'(scale={(341, 256)}, keep_ratio={False}, ' + f'interpolation=bilinear, lazy={False})') - @staticmethod - def test_random_scale(): - scales = ((200, 64), (250, 80)) - with pytest.raises(ValueError): - RandomScale(scales, 'unsupport') - - with pytest.raises(ValueError): - random_scale = RandomScale([(800, 256), (1000, 320), (800, 320)]) - random_scale({}) - - imgs = list(np.random.rand(2, 340, 256, 3)) - results = dict(imgs=imgs, img_shape=(340, 256)) - - results_ = copy.deepcopy(results) - random_scale_range = RandomScale(scales) - results_ = random_scale_range(results_) - assert 200 <= results_['scale'][0] <= 250 - assert 64 <= results_['scale'][1] <= 80 - - results_ = copy.deepcopy(results) - random_scale_value = RandomScale(scales, 'value') - results_ = random_scale_value(results_) - assert results_['scale'] in scales - - random_scale_single = RandomScale([(200, 64)]) - results_ = copy.deepcopy(results) - results_ = random_scale_single(results_) - assert results_['scale'] == (200, 64) - - assert repr(random_scale_range) == ( - f'{random_scale_range.__class__.__name__}' - f'(scales={((200, 64), (250, 80))}, ' - 'mode=range)') - class TestPoseCompact: From 1ba2f72baee4c3602f199332c1b4d852617bcb87 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 28 Sep 2021 14:49:13 +0800 Subject: [PATCH 262/414] [Docs] Fix invalid url about getting-started (#1169) * fix docs * fix docs * fix docs * fix cn docs * fix cn docs * try url * try url * try url * fix tutorial 7 * fix cn picture * add switch language pinging * update docs * fix docs --- README.md | 6 ++-- README_zh-CN.md | 8 ++--- configs/detection/ava/README.md | 2 +- configs/detection/ava/README_zh-CN.md | 2 +- configs/recognition/trn/README.md | 2 +- configs/recognition/tsn/README.md | 2 +- docs/index.rst | 2 ++ docs/merge_docs.sh | 12 ++++---- docs/tutorials/4_data_pipeline.md | 2 +- docs/tutorials/5_new_modules.md | 4 +-- docs/tutorials/7_customize_runtime.md | 41 ++++++++++++------------- docs/useful_tools.md | 2 +- docs_zh_CN/index.rst | 2 ++ docs_zh_CN/merge_docs.sh | 10 +++--- docs_zh_CN/tutorials/4_data_pipeline.md | 2 +- docs_zh_CN/tutorials/5_new_modules.md | 4 +-- docs_zh_CN/useful_tools.md | 2 +- 17 files changed, 54 insertions(+), 51 deletions(-) diff --git a/README.md b/README.md index 4ea8484cfd..050bfa1473 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@
    - +
    ## Introduction @@ -20,11 +20,11 @@ It is a part of the [OpenMMLab](http://openmmlab.org/) project. The master branch works with **PyTorch 1.3+**.
    -
    +
    Action Recognition Results on Kinetics-400
    -
    +
    Spatio-Temporal Action Detection Results on AVA-2.1
    -## Citation - - - -```BibTeX -@inproceedings{gu2018ava, - title={Ava: A video dataset of spatio-temporally localized atomic visual actions}, - author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others}, - booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, - pages={6047--6056}, - year={2018} -} -``` - - - -```BibTeX -@inproceedings{sun2018actor, - title={Actor-centric relation network}, - author={Sun, Chen and Shrivastava, Abhinav and Vondrick, Carl and Murphy, Kevin and Sukthankar, Rahul and Schmid, Cordelia}, - booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, - pages={318--334}, - year={2018} -} -``` - -## Model Zoo +## Results and Models ### AVA2.1 @@ -92,3 +70,27 @@ python tools/test.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r5 ``` For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . + +## Citation + + + +```BibTeX +@inproceedings{gu2018ava, + title={Ava: A video dataset of spatio-temporally localized atomic visual actions}, + author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={6047--6056}, + year={2018} +} +``` + +```BibTeX +@inproceedings{sun2018actor, + title={Actor-centric relation network}, + author={Sun, Chen and Shrivastava, Abhinav and Vondrick, Carl and Murphy, Kevin and Sukthankar, Rahul and Schmid, Cordelia}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + pages={318--334}, + year={2018} +} +``` diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index 5fa66a4c18..befcf88b0f 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -1,5 +1,9 @@ # AVA +[Ava: A video dataset of spatio-temporally localized atomic visual actions](https://openaccess.thecvf.com/content_cvpr_2018/html/Gu_AVA_A_Video_CVPR_2018_paper.html) + + +
    @@ -16,31 +20,6 @@ AVA, with its realistic scene and action complexity, exposes the intrinsic diffi
    diff --git a/README_zh-CN.md b/README_zh-CN.md index 7402bd9fe6..fa732f9cfd 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -1,5 +1,5 @@
    - +
    ## 简介 @@ -19,11 +19,11 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa 主分支代码目前支持 **PyTorch 1.3 以上**的版本
    -
    +
    Kinetics-400 上的动作识别
    -
    +
    AVA-2.1 上的时空动作检测
    @@ -254,7 +254,7 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 扫描下方的二维码可关注 OpenMMLab 团队的 [知乎官方账号](https://www.zhihu.com/people/openmmlab),加入 OpenMMLab 团队的 [官方交流 QQ 群](https://jq.qq.com/?_wv=1027&k=aCvMxdr3)
    - +
    我们会在 OpenMMLab 社区为大家 diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index 79a7c4762a..bac296f79a 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -1,7 +1,7 @@ # AVA
    - +
    ## Introduction diff --git a/configs/detection/ava/README_zh-CN.md b/configs/detection/ava/README_zh-CN.md index 51de6fa290..6cd82f4a3e 100644 --- a/configs/detection/ava/README_zh-CN.md +++ b/configs/detection/ava/README_zh-CN.md @@ -1,7 +1,7 @@ # AVA
    - +
    ## 简介 diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md index fa44f0b06f..582d3f73df 100644 --- a/configs/recognition/trn/README.md +++ b/configs/recognition/trn/README.md @@ -77,4 +77,4 @@ python tools/test.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index c12b582df8..f5e754b745 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -231,4 +231,4 @@ python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb. --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/docs/index.rst b/docs/index.rst index 749113fe03..b64cb6ea47 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -3,6 +3,8 @@ Welcome to MMAction2's documentation! You can switch between Chinese and English documents in the lower-left corner of the layout. +您可以在页面左下角切换文档语言。 + .. toctree:: :maxdepth: 2 diff --git a/docs/merge_docs.sh b/docs/merge_docs.sh index 1c988cdba6..b38366f616 100755 --- a/docs/merge_docs.sh +++ b/docs/merge_docs.sh @@ -3,15 +3,15 @@ sed -i '$a\\n' ../demo/README.md # gather models -cat ../configs/localization/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# Action Localization Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > localization_models.md -cat ../configs/recognition/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# Action Recognition Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > recognition_models.md -cat ../configs/recognition_audio/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' >> recognition_models.md -cat ../configs/detection/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# Spatio Temporal Action Detection Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > detection_models.md -cat ../configs/skeleton/*/README.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# Skeleton-based Action Recognition Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > skeleton_models.md +cat ../configs/localization/*/README.md | sed "s/md#t/html#t/g" | sed "s/#/#&/" | sed '1i\# Action Localization Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##t/getting_started.html#t/g" > localization_models.md +cat ../configs/recognition/*/README.md | sed "s/md#t/html#t/g" | sed "s/#/#&/" | sed '1i\# Action Recognition Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##t/getting_started.html#t/g" > recognition_models.md +cat ../configs/recognition_audio/*/README.md | sed "s/md#t/html#t/g" | sed "s/#/#&/" | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##t/getting_started.html#t/g" >> recognition_models.md +cat ../configs/detection/*/README.md | sed "s/md#t/html#t/g" | sed "s/#/#&/" | sed '1i\# Spatio Temporal Action Detection Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##t/getting_started.html#t/g" > detection_models.md +cat ../configs/skeleton/*/README.md | sed "s/md#t/html#t/g" | sed "s/#/#&/" | sed '1i\# Skeleton-based Action Recognition Models' | sed 's/](\/docs\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##t/getting_started.html#t/g" > skeleton_models.md # demo -cat ../demo/README.md | sed "s/md###t/html#t/g" | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > demo.md +cat ../demo/README.md | sed "s/md#t/html#t/g" | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##t/getting_started.html#t/g" > demo.md # gather datasets cat ../tools/data/*/README.md | sed 's/# Preparing/# /g' | sed 's/#/#&/' > prepare_data.md diff --git a/docs/tutorials/4_data_pipeline.md b/docs/tutorials/4_data_pipeline.md index 2caaf6442c..2ecdedb5ed 100644 --- a/docs/tutorials/4_data_pipeline.md +++ b/docs/tutorials/4_data_pipeline.md @@ -28,7 +28,7 @@ A pipeline consists of a sequence of operations. Each operation takes a dict as We present a typical pipeline in the following figure. The blue blocks are pipeline operations. With the pipeline going on, each operator can add new keys (marked as green) to the result dict or update the existing keys (marked as orange). -![pipeline figure](/resources/data_pipeline.png) +![pipeline figure](https://github.com/open-mmlab/mmaction2/raw/master/resources/data_pipeline.png) The operations are categorized into data loading, pre-processing and formatting. diff --git a/docs/tutorials/5_new_modules.md b/docs/tutorials/5_new_modules.md index 940205b596..366d979049 100644 --- a/docs/tutorials/5_new_modules.md +++ b/docs/tutorials/5_new_modules.md @@ -258,14 +258,14 @@ In the api for [`train.py`](/mmaction/apis/train.py), it will register the learn So far, the supported updaters can be find in [mmcv](https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py), but if you want to customize a new learning rate updater, you may follow the steps below: -1. First, write your own LrUpdaterHook in `$MMAction2/mmaction/core/lr`. The snippet followed is an example of customized lr updater that uses learning rate based on a specific learning rate ratio: `lrs`, by which the learning rate decreases at each `steps`: +1. First, write your own LrUpdaterHook in `$MMAction2/mmaction/core/scheduler`. The snippet followed is an example of customized lr updater that uses learning rate based on a specific learning rate ratio: `lrs`, by which the learning rate decreases at each `steps`: ```python @HOOKS.register_module() # Register it here class RelativeStepLrUpdaterHook(LrUpdaterHook): # You should inheritate it from mmcv.LrUpdaterHook - def __init__(self, runner, steps, lrs, **kwargs): + def __init__(self, steps, lrs, **kwargs): super().__init__(**kwargs) assert len(steps) == (len(lrs)) self.steps = steps diff --git a/docs/tutorials/7_customize_runtime.md b/docs/tutorials/7_customize_runtime.md index dcd0b4d1ea..232057e376 100644 --- a/docs/tutorials/7_customize_runtime.md +++ b/docs/tutorials/7_customize_runtime.md @@ -4,27 +4,26 @@ In this tutorial, we will introduce some methods about how to customize optimiza -- [Tutorial 7: Customize Runtime Settings](#tutorial-7-customize-runtime-settings) - - [Customize Optimization Methods](#customize-optimization-methods) - - [Customize optimizer supported by PyTorch](#customize-optimizer-supported-by-pytorch) - - [Customize self-implemented optimizer](#customize-self-implemented-optimizer) - - [1. Define a new optimizer](#1-define-a-new-optimizer) - - [2. Add the optimizer to registry](#2-add-the-optimizer-to-registry) - - [3. Specify the optimizer in the config file](#3-specify-the-optimizer-in-the-config-file) - - [Customize optimizer constructor](#customize-optimizer-constructor) - - [Additional settings](#additional-settings) - - [Customize Training Schedules](#customize-training-schedules) - - [Customize Workflow](#customize-workflow) - - [Customize Hooks](#customize-hooks) - - [Customize self-implemented hooks](#customize-self-implemented-hooks) - - [1. Implement a new hook](#1-implement-a-new-hook) - - [2. Register the new hook](#2-register-the-new-hook) - - [3. Modify the config](#3-modify-the-config) - - [Use hooks implemented in MMCV](#use-hooks-implemented-in-mmcv) - - [Modify default runtime hooks](#modify-default-runtime-hooks) - - [Checkpoint config](#checkpoint-config) - - [Log config](#log-config) - - [Evaluation config](#evaluation-config) +- [Customize Optimization Methods](#customize-optimization-methods) + - [Customize optimizer supported by PyTorch](#customize-optimizer-supported-by-pytorch) + - [Customize self-implemented optimizer](#customize-self-implemented-optimizer) + - [1. Define a new optimizer](#1-define-a-new-optimizer) + - [2. Add the optimizer to registry](#2-add-the-optimizer-to-registry) + - [3. Specify the optimizer in the config file](#3-specify-the-optimizer-in-the-config-file) + - [Customize optimizer constructor](#customize-optimizer-constructor) + - [Additional settings](#additional-settings) +- [Customize Training Schedules](#customize-training-schedules) +- [Customize Workflow](#customize-workflow) +- [Customize Hooks](#customize-hooks) + - [Customize self-implemented hooks](#customize-self-implemented-hooks) + - [1. Implement a new hook](#1-implement-a-new-hook) + - [2. Register the new hook](#2-register-the-new-hook) + - [3. Modify the config](#3-modify-the-config) + - [Use hooks implemented in MMCV](#use-hooks-implemented-in-mmcv) + - [Modify default runtime hooks](#modify-default-runtime-hooks) + - [Checkpoint config](#checkpoint-config) + - [Log config](#log-config) + - [Evaluation config](#evaluation-config) diff --git a/docs/useful_tools.md b/docs/useful_tools.md index b303e63911..77bb8c321c 100644 --- a/docs/useful_tools.md +++ b/docs/useful_tools.md @@ -19,7 +19,7 @@ Apart from training/testing scripts, We provide lots of useful tools under the ` `tools/analysis/analyze_logs.py` plots loss/top-k acc curves given a training log file. Run `pip install seaborn` first to install the dependency. -![acc_curve_image](/resources/acc_curve.png) +![acc_curve_image](https://github.com/open-mmlab/mmaction2/raw/master/resources/acc_curve.png) ```shell python tools/analysis/analyze_logs.py plot_curve ${JSON_LOGS} [--keys ${KEYS}] [--title ${TITLE}] [--legend ${LEGEND}] [--backend ${BACKEND}] [--style ${STYLE}] [--out ${OUT_FILE}] diff --git a/docs_zh_CN/index.rst b/docs_zh_CN/index.rst index 71a9527095..4c4351e59b 100644 --- a/docs_zh_CN/index.rst +++ b/docs_zh_CN/index.rst @@ -3,6 +3,8 @@ 您可以在页面左下角切换中英文文档。 +You can change the documentation language at the lower-left corner of the page. + .. toctree:: :maxdepth: 2 diff --git a/docs_zh_CN/merge_docs.sh b/docs_zh_CN/merge_docs.sh index 2bc75b8f89..1265731a97 100755 --- a/docs_zh_CN/merge_docs.sh +++ b/docs_zh_CN/merge_docs.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash # gather models -cat ../configs/localization/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# 时序动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > localization_models.md -cat ../configs/recognition/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# 动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > recognition_models.md -cat ../configs/recognition_audio/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' >> recognition_models.md -cat ../configs/detection/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# 时空动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > detection_models.md -cat ../configs/skeleton/*/README_zh-CN.md | sed "s/md###t/html#t/g" | sed "s/#/#&/" | sed '1i\# 骨骼动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' > skeleton_models.md +cat ../configs/localization/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 时序动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##/getting_started.html#/g" > localization_models.md +cat ../configs/recognition/*/README_zh-CN.md | sed "s/md#测/html#t测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > recognition_models.md +cat ../configs/recognition_audio/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" >> recognition_models.md +cat ../configs/detection/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 时空动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > detection_models.md +cat ../configs/skeleton/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 骨骼动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > skeleton_models.md # gather datasets cat ../tools/data/*/README_zh-CN.md | sed 's/# 准备/# /g' | sed 's/#/#&/' > prepare_data.md diff --git a/docs_zh_CN/tutorials/4_data_pipeline.md b/docs_zh_CN/tutorials/4_data_pipeline.md index e47c940296..a54bbcbd3d 100644 --- a/docs_zh_CN/tutorials/4_data_pipeline.md +++ b/docs_zh_CN/tutorials/4_data_pipeline.md @@ -25,7 +25,7 @@ 我们在下图中展示了一个典型的流水线。 蓝色块是流水线操作。 随着流水线的深入,每个操作都可以向结果字典添加新键(标记为绿色)或更新现有键(标记为橙色)。 -![流水线](/resources/data_pipeline.png) +![流水线](https://github.com/open-mmlab/mmaction2/raw/master/resources/data_pipeline.png) 这些操作分为数据加载,数据预处理和数据格式化。 diff --git a/docs_zh_CN/tutorials/5_new_modules.md b/docs_zh_CN/tutorials/5_new_modules.md index ead61332bb..870d1886b1 100644 --- a/docs_zh_CN/tutorials/5_new_modules.md +++ b/docs_zh_CN/tutorials/5_new_modules.md @@ -246,14 +246,14 @@ lr_config = dict(policy='step', step=[20, 40]) 到目前位置,所有支持的更新器可参考 [mmcv](https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py), 但如果用户想自定义学习率更新器,则需要遵循以下步骤: -1. 首先,在 `$MMAction2/mmaction/core/lr` 编写自定义的学习率更新钩子(LrUpdaterHook)。以下片段是自定义学习率更新器的例子,它使用基于特定比率的学习率 `lrs`,并在每个 `steps` 处进行学习率衰减。以下代码段是自定义学习率更新器的例子: +1. 首先,在 `$MMAction2/mmaction/core/scheduler` 编写自定义的学习率更新钩子(LrUpdaterHook)。以下片段是自定义学习率更新器的例子,它使用基于特定比率的学习率 `lrs`,并在每个 `steps` 处进行学习率衰减。以下代码段是自定义学习率更新器的例子: ```python # 在此注册 @HOOKS.register_module() class RelativeStepLrUpdaterHook(LrUpdaterHook): # 该类应当继承于 mmcv.LrUpdaterHook - def __init__(self, runner, steps, lrs, **kwargs): + def __init__(self, steps, lrs, **kwargs): super().__init__(**kwargs) assert len(steps) == (len(lrs)) self.steps = steps diff --git a/docs_zh_CN/useful_tools.md b/docs_zh_CN/useful_tools.md index 1cb301ba28..710e513460 100644 --- a/docs_zh_CN/useful_tools.md +++ b/docs_zh_CN/useful_tools.md @@ -19,7 +19,7 @@ 输入变量指定一个训练日志文件,可通过 `tools/analysis/analyze_logs.py` 脚本绘制 loss/top-k 曲线。本功能依赖于 `seaborn`,使用前请先通过 `pip install seaborn` 安装依赖包。 -![准确度曲线图](/resources/acc_curve.png) +![准确度曲线图](https://github.com/open-mmlab/mmaction2/raw/master/resources/acc_curve.png) ```shell python tools/analysis/analyze_logs.py plot_curve ${JSON_LOGS} [--keys ${KEYS}] [--title ${TITLE}] [--legend ${LEGEND}] [--backend ${BACKEND}] [--style ${STYLE}] [--out ${OUT_FILE}] From 5c76ee2ad6ecca22ae9da63a75800e46795d4903 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 28 Sep 2021 15:26:52 +0800 Subject: [PATCH 263/414] [Fix] Fix resume when building rawframes (#1150) * Fix reseume when build rawframes * use resume log as flag * use resume log as flag * add run_success check * add run_success check --- tools/data/build_rawframes.py | 126 ++++++++++++++++++++++------------ 1 file changed, 81 insertions(+), 45 deletions(-) diff --git a/tools/data/build_rawframes.py b/tools/data/build_rawframes.py index 066141a9e3..70054e5b5e 100644 --- a/tools/data/build_rawframes.py +++ b/tools/data/build_rawframes.py @@ -5,7 +5,7 @@ import os.path as osp import sys import warnings -from multiprocessing import Pool +from multiprocessing import Lock, Pool import mmcv import numpy as np @@ -21,48 +21,54 @@ def extract_frame(vid_item): Returns: bool: Whether generate optical flow successfully. """ - full_path, vid_path, vid_id, method, task = vid_item + full_path, vid_path, vid_id, method, task, report_file = vid_item if '/' in vid_path: act_name = osp.basename(osp.dirname(vid_path)) out_full_path = osp.join(args.out_dir, act_name) else: out_full_path = args.out_dir + run_success = -1 + if task == 'rgb': if args.use_opencv: # Not like using denseflow, # Use OpenCV will not make a sub directory with the video name - video_name = osp.splitext(osp.basename(vid_path))[0] - out_full_path = osp.join(out_full_path, video_name) - - vr = mmcv.VideoReader(full_path) - # for i in range(len(vr)): - for i, vr_frame in enumerate(vr): - if vr_frame is not None: - w, h, _ = np.shape(vr_frame) - if args.new_short == 0: - if args.new_width == 0 or args.new_height == 0: - # Keep original shape - out_img = vr_frame + try: + video_name = osp.splitext(osp.basename(vid_path))[0] + out_full_path = osp.join(out_full_path, video_name) + + vr = mmcv.VideoReader(full_path) + for i, vr_frame in enumerate(vr): + if vr_frame is not None: + w, h, _ = np.shape(vr_frame) + if args.new_short == 0: + if args.new_width == 0 or args.new_height == 0: + # Keep original shape + out_img = vr_frame + else: + out_img = mmcv.imresize( + vr_frame, + (args.new_width, args.new_height)) else: - out_img = mmcv.imresize(vr_frame, - (args.new_width, - args.new_height)) + if min(h, w) == h: + new_h = args.new_short + new_w = int((new_h / h) * w) + else: + new_w = args.new_short + new_h = int((new_w / w) * h) + out_img = mmcv.imresize(vr_frame, (new_h, new_w)) + mmcv.imwrite(out_img, + f'{out_full_path}/img_{i + 1:05d}.jpg') else: - if min(h, w) == h: - new_h = args.new_short - new_w = int((new_h / h) * w) - else: - new_w = args.new_short - new_h = int((new_w / w) * h) - out_img = mmcv.imresize(vr_frame, (new_h, new_w)) - mmcv.imwrite(out_img, - f'{out_full_path}/img_{i + 1:05d}.jpg') - else: - warnings.warn( - 'Length inconsistent!' - f'Early stop with {i + 1} out of {len(vr)} frames.') - break + warnings.warn( + 'Length inconsistent!' + f'Early stop with {i + 1} out of {len(vr)} frames.' + ) + break + run_success = 0 + except Exception: + run_success = -1 else: if args.new_short == 0: cmd = osp.join( @@ -72,7 +78,7 @@ def extract_frame(vid_item): cmd = osp.join( f"denseflow '{full_path}' -b=20 -s=0 -o='{out_full_path}'" f' -ns={args.new_short} -v') - os.system(cmd) + run_success = os.system(cmd) elif task == 'flow': if args.input_frames: if args.new_short == 0: @@ -92,7 +98,7 @@ def extract_frame(vid_item): cmd = osp.join( f"denseflow '{full_path}' -a={method} -b=20 -s=1 -o='{out_full_path}'" # noqa: E501 f' -ns={args.new_short} -v') - os.system(cmd) + run_success = os.system(cmd) else: if args.new_short == 0: cmd_rgb = osp.join( @@ -108,11 +114,24 @@ def extract_frame(vid_item): cmd_flow = osp.join( f"denseflow '{full_path}' -a={method} -b=20 -s=1 -o='{out_full_path}'" # noqa: E501 f' -ns={args.new_short} -v') - os.system(cmd_rgb) - os.system(cmd_flow) + run_success_rgb = os.system(cmd_rgb) + run_success_flow = os.system(cmd_flow) + if run_success_flow == 0 and run_success_rgb == 0: + run_success = 0 + + if run_success == 0: + print(f'{task} {vid_id} {vid_path} {method} done') + sys.stdout.flush() + + lock.acquire() + with open(report_file, 'a') as f: + line = full_path + '\n' + f.write(line) + lock.release() + else: + print(f'{task} {vid_id} {vid_path} {method} got something wrong') + sys.stdout.flush() - print(f'{task} {vid_id} {vid_path} {method} done') - sys.stdout.flush() return True @@ -182,11 +201,21 @@ def parse_args(): '--input-frames', action='store_true', help='Whether to extract flow frames based on rgb frames') + parser.add_argument( + '--report-file', + type=str, + default='build_report.txt', + help='report to record files which have been successfully processed') args = parser.parse_args() return args +def init(lock_): + global lock + lock = lock_ + + if __name__ == '__main__': args = parse_args() @@ -205,25 +234,28 @@ def parse_args(): if args.input_frames: print('Reading rgb frames from folder: ', args.src_dir) fullpath_list = glob.glob(args.src_dir + '/*' * args.level) - done_fullpath_list = glob.glob(args.out_dir + '/*' * args.level) print('Total number of rgb frame folders found: ', len(fullpath_list)) else: print('Reading videos from folder: ', args.src_dir) if args.mixed_ext: print('Extension of videos is mixed') fullpath_list = glob.glob(args.src_dir + '/*' * args.level) - done_fullpath_list = glob.glob(args.out_dir + '/*' * args.level) else: print('Extension of videos: ', args.ext) fullpath_list = glob.glob(args.src_dir + '/*' * args.level + '.' + args.ext) - done_fullpath_list = glob.glob(args.out_dir + '/*' * args.level) print('Total number of videos found: ', len(fullpath_list)) if args.resume: - fullpath_list = set(fullpath_list).difference(set(done_fullpath_list)) - fullpath_list = list(fullpath_list) - print('Resuming. number of videos to be done: ', len(fullpath_list)) + done_fullpath_list = [] + with open(args.report_file) as f: + for line in f: + if line == '\n': + continue + done_full_path = line.strip().split()[0] + done_fullpath_list.append(done_full_path) + done_fullpath_list = set(done_fullpath_list) + fullpath_list = list(set(fullpath_list).difference(done_fullpath_list)) if args.level == 2: vid_list = list( @@ -234,9 +266,13 @@ def parse_args(): elif args.level == 1: vid_list = list(map(osp.basename, fullpath_list)) - pool = Pool(args.num_worker) + lock = Lock() + pool = Pool(args.num_worker, initializer=init, initargs=(lock, )) pool.map( extract_frame, zip(fullpath_list, vid_list, range(len(vid_list)), len(vid_list) * [args.flow_type], - len(vid_list) * [args.task])) + len(vid_list) * [args.task], + len(vid_list) * [args.report_file])) + pool.close() + pool.join() From fed66554b41aef23d698ba594b29f546f2bcd6d7 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 29 Sep 2021 15:49:34 +0800 Subject: [PATCH 264/414] [Fix] Solve issue #1189 --- mmaction/datasets/pipelines/loading.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index e5b08bd812..14a2fbe202 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -58,6 +58,9 @@ def __call__(self, results): category_mask = torch.zeros(self.num_categories) for category, tags in results['label'].items(): + # skip if not training on this category + if category not in self.categories: + continue category_mask[self.categories.index(category)] = 1. start_idx = self.category2startidx[category] category_num = self.category2num[category] From e8d1240ae17947a2c495df8a7c85d07751e03036 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 29 Sep 2021 16:18:20 +0800 Subject: [PATCH 265/414] [Improvement] Add the outputs of NTURGB+D pose extraction pipelines (#1195) --- configs/skeleton/posec3d/README.md | 1 + tools/data/skeleton/README.md | 7 +++++++ 2 files changed, 8 insertions(+) diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index 7ce507cdfd..c6a1db643d 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -81,6 +81,7 @@ According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 8 GPUs x 8 videos/gpu and lr=0.04 for 16 GPUs x 16 videos/gpu. 2. The values in columns named after "reference" are the results got by testing on our dataset, using the checkpoints provided by the author with same model settings. The checkpoints for reference repo can be downloaded [here](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing). +3. You can follow the guide in [Preparing Skeleton Dataset](https://github.com/open-mmlab/mmaction2/tree/master/tools/data/skeleton) to obtain skeleton annotations used in the above configs. ::: diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index c6bfa92af5..fc53d6b817 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -27,6 +27,13 @@ bash download_annotations.sh ${DATASET} Due to [Conditions of Use](http://rose1.ntu.edu.sg/Datasets/actionRecognition.asp) of the NTURGB+D dataset, we can not directly release the annotations used in our experiments. So that we provide a script to generate pose annotations for videos in NTURGB+D datasets, which generate a dictionary and save it as a single pickle file. You can create a list which contain all annotation dictionaries of corresponding videos and save them as a pickle file. Then you can get the `ntu60_xsub_train.pkl`, `ntu60_xsub_val.pkl`, `ntu120_xsub_train.pkl`, `ntu120_xsub_val.pkl` that we used in training. +For those who have not enough computations for pose extraction, we provide the outputs of the above pipeline here, corresponding to 4 different splits of NTURGB+D datasets: + +- ntu60_xsub_train: https://download.openmmlab.com/mmaction/posec3d/ntu60_xsub_train.pkl +- ntu60_xsub_val: https://download.openmmlab.com/mmaction/posec3d/ntu60_xsub_val.pkl +- ntu120_xsub_train: https://download.openmmlab.com/mmaction/posec3d/ntu120_xsub_train.pkl +- ntu120_xsub_val: https://download.openmmlab.com/mmaction/posec3d/ntu120_xsub_val.pkl + To generate 2D pose annotations for a single video, first, you need to install mmdetection and mmpose from src code. After that, you need to replace the placeholder `mmdet_root` and `mmpose_root` in `ntu_pose_extraction.py` with your installation path. Then you can use following scripts for NTURGB+D video pose extraction: ```python From 65a6d1e5bcb472f463ce93342c8596cd3a875721 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 30 Sep 2021 19:49:28 +0800 Subject: [PATCH 266/414] [Improvement] Refactor Inference API (#1191) * refactor inference API * update * fix bug * update * update * support URL * update * refactor demo_posec3d w. mmaction2 api * add comments * add deperecated warning --- demo/demo.py | 16 ++-- demo/demo_gradcam.py | 3 +- demo/demo_posec3d.py | 21 +---- docs/getting_started.md | 12 +++ docs/install.md | 2 +- docs_zh_CN/getting_started.md | 14 ++- docs_zh_CN/install.md | 2 +- mmaction/apis/inference.py | 128 ++++++++++++++++----------- tests/test_runtime/test_inference.py | 53 +++-------- 9 files changed, 127 insertions(+), 124 deletions(-) diff --git a/demo/demo.py b/demo/demo.py index 1b299d32fe..85565cb5a2 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -160,8 +160,7 @@ def main(): cfg.merge_from_dict(args.cfg_options) # build the recognizer from a config file and checkpoint file/url - model = init_recognizer( - cfg, args.checkpoint, device=device, use_frames=args.use_frames) + model = init_recognizer(cfg, args.checkpoint, device=device) # e.g. use ('backbone', ) to return backbone feature output_layer_names = None @@ -169,14 +168,13 @@ def main(): # test a single video or rawframes of a single video if output_layer_names: results, returned_feature = inference_recognizer( - model, - args.video, - args.label, - use_frames=args.use_frames, - outputs=output_layer_names) + model, args.video, outputs=output_layer_names) else: - results = inference_recognizer( - model, args.video, args.label, use_frames=args.use_frames) + results = inference_recognizer(model, args.video) + + labels = open(args.label).readlines() + labels = [x.strip() for x in labels] + results = [(labels[k[0]], k[1]) for k in results] print('The top-5 labels with corresponding scores are:') for result in results: diff --git a/demo/demo_gradcam.py b/demo/demo_gradcam.py index 6c8631f4a3..4af6851ac4 100644 --- a/demo/demo_gradcam.py +++ b/demo/demo_gradcam.py @@ -174,8 +174,7 @@ def main(): cfg.merge_from_dict(args.cfg_options) # build the recognizer from a config file and checkpoint file/url - model = init_recognizer( - cfg, args.checkpoint, device=device, use_frames=args.use_frames) + model = init_recognizer(cfg, args.checkpoint, device=device) inputs = build_inputs(model, args.video, use_frames=args.use_frames) gradcam = GradCAM(model, args.target_layer_name) diff --git a/demo/demo_posec3d.py b/demo/demo_posec3d.py index fd0d497fb7..33b6fb6dae 100644 --- a/demo/demo_posec3d.py +++ b/demo/demo_posec3d.py @@ -9,10 +9,8 @@ import numpy as np import torch from mmcv import DictAction -from mmcv.runner import load_checkpoint -from mmaction.datasets.pipelines import Compose -from mmaction.models import build_model +from mmaction.apis import inference_recognizer, init_recognizer from mmaction.utils import import_module_error_func try: @@ -204,7 +202,7 @@ def main(): config = mmcv.Config.fromfile(args.config) config.merge_from_dict(args.cfg_options) - test_pipeline = Compose(config.data.test.pipeline) + model = init_recognizer(config, args.checkpoint, args.device) # Load label_map label_map = [x.strip() for x in open(args.label_map).readlines()] @@ -239,19 +237,9 @@ def main(): fake_anno['keypoint'] = keypoint fake_anno['keypoint_score'] = keypoint_score - imgs = test_pipeline(fake_anno)['imgs'][None] - imgs = imgs.to(args.device) + results = inference_recognizer(model, fake_anno) - model = build_model(config.model) - load_checkpoint(model, args.checkpoint, map_location=args.device) - model.to(args.device) - model.eval() - - with torch.no_grad(): - output = model(return_loss=False, imgs=imgs) - - action_idx = np.argmax(output) - action_label = label_map[action_idx] + action_label = label_map[results[0][0]] pose_model = init_pose_model(args.pose_config, args.pose_checkpoint, args.device) @@ -263,7 +251,6 @@ def main(): cv2.putText(frame, action_label, (10, 30), FONTFACE, FONTSCALE, FONTCOLOR, THICKNESS, LINETYPE) - cv2.imwrite('frame.jpg', vis_frames[0]) vid = mpy.ImageSequenceClip([x[:, :, ::-1] for x in vis_frames], fps=24) vid.write_videofile(args.out_filename, remove_temp=True) diff --git a/docs/getting_started.md b/docs/getting_started.md index 990184d39e..bcc6e4c6d3 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -152,6 +152,10 @@ labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels) # show the results +labels = open('tools/data/kinetics/label_map_k400.txt').readlines() +labels = [x.strip() for x in labels] +results = [(labels[k[0]], k[1]) for k in results] + print(f'The top-5 labels with corresponding scores are:') for result in results: print(f'{result[0]}: ', result[1]) @@ -181,6 +185,10 @@ labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels, use_frames=True) # show the results +labels = open('tools/data/kinetics/label_map_k400.txt').readlines() +labels = [x.strip() for x in labels] +results = [(labels[k[0]], k[1]) for k in results] + print(f'The top-5 labels with corresponding scores are:') for result in results: print(f'{result[0]}: ', result[1]) @@ -210,6 +218,10 @@ labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels) # show the results +labels = open('tools/data/kinetics/label_map_k400.txt').readlines() +labels = [x.strip() for x in labels] +results = [(labels[k[0]], k[1]) for k in results] + print(f'The top-5 labels with corresponding scores are:') for result in results: print(f'{result[0]}: ', result[1]) diff --git a/docs/install.md b/docs/install.md index 40fd91c4ee..fc81324db1 100644 --- a/docs/install.md +++ b/docs/install.md @@ -249,5 +249,5 @@ device = torch.device(device) model = init_recognizer(config_file, device=device) # inference the demo video -inference_recognizer(model, 'demo/demo.mp4', 'tools/data/kinetics/label_map_k400.txt') +inference_recognizer(model, 'demo/demo.mp4') ``` diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index efd306b67b..76275691e4 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -150,6 +150,10 @@ labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels) # 显示结果 +labels = open('tools/data/kinetics/label_map_k400.txt').readlines() +labels = [x.strip() for x in labels] +results = [(labels[k[0]], k[1]) for k in results] + print(f'The top-5 labels with corresponding scores are:') for result in results: print(f'{result[0]}: ', result[1]) @@ -179,6 +183,10 @@ labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels, use_frames=True) # 显示结果 +labels = open('tools/data/kinetics/label_map_k400.txt').readlines() +labels = [x.strip() for x in labels] +results = [(labels[k[0]], k[1]) for k in results] + print(f'The top-5 labels with corresponding scores are:') for result in results: print(f'{result[0]}: ', result[1]) @@ -207,7 +215,11 @@ video = 'https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4 labels = 'tools/data/kinetics/label_map_k400.txt' results = inference_recognizer(model, video, labels) -# 根据配置文件和检查点来建立模型 +# 显示结果 +labels = open('tools/data/kinetics/label_map_k400.txt').readlines() +labels = [x.strip() for x in labels] +results = [(labels[k[0]], k[1]) for k in results] + print(f'The top-5 labels with corresponding scores are:') for result in results: print(f'{result[0]}: ', result[1]) diff --git a/docs_zh_CN/install.md b/docs_zh_CN/install.md index f922536df0..fcd809eb43 100644 --- a/docs_zh_CN/install.md +++ b/docs_zh_CN/install.md @@ -240,5 +240,5 @@ device = torch.device(device) model = init_recognizer(config_file, device=device) # 进行演示视频的推理 -inference_recognizer(model, 'demo/demo.mp4', 'tools/data/kinetics/label_map_k400.txt') +inference_recognizer(model, 'demo/demo.mp4') ``` diff --git a/mmaction/apis/inference.py b/mmaction/apis/inference.py index 1c721d78eb..aad3b5070e 100644 --- a/mmaction/apis/inference.py +++ b/mmaction/apis/inference.py @@ -2,9 +2,11 @@ import os import os.path as osp import re +import warnings from operator import itemgetter import mmcv +import numpy as np import torch from mmcv.parallel import collate, scatter from mmcv.runner import load_checkpoint @@ -14,10 +16,7 @@ from mmaction.models import build_recognizer -def init_recognizer(config, - checkpoint=None, - device='cuda:0', - use_frames=False): +def init_recognizer(config, checkpoint=None, device='cuda:0', **kwargs): """Initialize a recognizer from config file. Args: @@ -27,23 +26,20 @@ def init_recognizer(config, the model will not load any weights. Default: None. device (str | :obj:`torch.device`): The desired device of returned tensor. Default: 'cuda:0'. - use_frames (bool): Whether to use rawframes as input. Default:False. Returns: nn.Module: The constructed recognizer. """ + if 'use_frames' in kwargs: + warnings.warn('The argument `use_frames` is deprecated PR #1191. ' + 'Now you can use models trained with frames or videos ' + 'arbitrarily. ') + if isinstance(config, str): config = mmcv.Config.fromfile(config) elif not isinstance(config, mmcv.Config): raise TypeError('config must be a filename or Config object, ' f'but got {type(config)}') - if ((use_frames and config.dataset_type != 'RawframeDataset') - or (not use_frames and config.dataset_type != 'VideoDataset')): - input_type = 'rawframes' if use_frames else 'video' - raise RuntimeError('input data type should be consist with the ' - f'dataset type in config, but got input type ' - f"'{input_type}' and dataset type " - f"'{config.dataset_type}'") # pretrained model is unnecessary since we directly load checkpoint later config.model.backbone.pretrained = None @@ -57,21 +53,14 @@ def init_recognizer(config, return model -def inference_recognizer(model, - video_path, - label_path, - use_frames=False, - outputs=None, - as_tensor=True): - """Inference a video with the detector. +def inference_recognizer(model, video, outputs=None, as_tensor=True, **kwargs): + """Inference a video with the recognizer. Args: model (nn.Module): The loaded recognizer. - video_path (str): The video file path/url or the rawframes directory - path. If ``use_frames`` is set to True, it should be rawframes - directory path. Otherwise, it should be video file path. - label_path (str): The label file path. - use_frames (bool): Whether to use rawframes as input. Default:False. + video (str | dict | ndarray): The video file path / url or the + rawframes directory path / results dictionary (the input of + pipeline) / a 4D array T x H x W x 3 (The input video). outputs (list(str) | tuple(str) | str | None) : Names of layers whose outputs need to be returned, default: None. as_tensor (bool): Same as that in ``OutputHook``. Default: True. @@ -81,15 +70,34 @@ def inference_recognizer(model, dict[torch.tensor | np.ndarray]: Output feature maps from layers specified in `outputs`. """ - if not (osp.exists(video_path) or video_path.startswith('http')): - raise RuntimeError(f"'{video_path}' is missing") - - if osp.isfile(video_path) and use_frames: - raise RuntimeError( - f"'{video_path}' is a video file, not a rawframe directory") - if osp.isdir(video_path) and not use_frames: - raise RuntimeError( - f"'{video_path}' is a rawframe directory, not a video file") + if 'use_frames' in kwargs: + warnings.warn('The argument `use_frames` is deprecated PR #1191. ' + 'Now you can use models trained with frames or videos ' + 'arbitrarily. ') + if 'label_path' in kwargs: + warnings.warn('The argument `use_frames` is deprecated PR #1191. ' + 'Now the label file is not needed in ' + 'inference_recognizer. ') + + input_flag = None + if isinstance(video, dict): + input_flag = 'dict' + elif isinstance(video, np.ndarray): + assert len(video.shape) == 4, 'The shape should be T x H x W x C' + input_flag = 'array' + raise NotImplementedError(f'The input type {input_flag} is not ' + 'supported yet, this is an interface ' + 'reserved for torchserve. ') + elif isinstance(video, str) and video.startswith('http'): + input_flag = 'video' + elif isinstance(video, str) and osp.exists(video): + if osp.isfile(video): + input_flag = 'video' + if osp.isdir(video): + input_flag = 'rawframes' + else: + raise RuntimeError('The type of argument video is not supported: ' + f'{type(video)}') if isinstance(outputs, str): outputs = (outputs, ) @@ -97,14 +105,33 @@ def inference_recognizer(model, cfg = model.cfg device = next(model.parameters()).device # model device - # construct label map - with open(label_path, 'r') as f: - label = [line.strip() for line in f] # build the data pipeline test_pipeline = cfg.data.test.pipeline - test_pipeline = Compose(test_pipeline) - # prepare data - if use_frames: + # Alter data pipelines & prepare inputs + if input_flag == 'dict': + data = video + if input_flag == 'array': + modality_map = {2: 'Flow', 3: 'RGB'} + modality = modality_map.get(video.shape[-1]) + data = dict( + total_frames=video.shape[0], + label=-1, + start_index=0, + array=video, + modality=modality) + for i in range(len(test_pipeline)): + if 'Decode' in test_pipeline[i]['type']: + test_pipeline[i] = dict(type='ArrayDecode') + if input_flag == 'video': + data = dict(filename=video, label=-1, start_index=0, modality='RGB') + if 'Init' not in test_pipeline[0]['type']: + test_pipeline = [dict(type='OpenCVInit')] + test_pipeline + else: + test_pipeline[0] = dict(type='OpenCVInit') + for i in range(len(test_pipeline)): + if 'Decode' in test_pipeline[i]['type']: + test_pipeline[i] = dict(type='OpenCVDecode') + if input_flag == 'rawframes': filename_tmpl = cfg.data.test.get('filename_tmpl', 'img_{:05}.jpg') modality = cfg.data.test.get('modality', 'RGB') start_index = cfg.data.test.get('start_index', 1) @@ -120,24 +147,24 @@ def inference_recognizer(model, total_frames = len( list( filter(lambda x: re.match(pattern, x) is not None, - os.listdir(video_path)))) - + os.listdir(video)))) data = dict( - frame_dir=video_path, + frame_dir=video, total_frames=total_frames, label=-1, start_index=start_index, filename_tmpl=filename_tmpl, modality=modality) - else: - start_index = cfg.data.test.get('start_index', 0) - data = dict( - filename=video_path, - label=-1, - start_index=start_index, - modality='RGB') + if 'Init' in test_pipeline[0]['type']: + test_pipeline = test_pipeline[1:] + for i in range(len(test_pipeline)): + if 'Decode' in test_pipeline[i]['type']: + test_pipeline[i] = dict(type='RawFrameDecode') + + test_pipeline = Compose(test_pipeline) data = test_pipeline(data) data = collate([data], samples_per_gpu=1) + if next(model.parameters()).is_cuda: # scatter to specified GPU data = scatter(data, [device])[0] @@ -148,7 +175,8 @@ def inference_recognizer(model, scores = model(return_loss=False, **data)[0] returned_features = h.layer_outputs if outputs else None - score_tuples = tuple(zip(label, scores)) + num_classes = scores.shape[-1] + score_tuples = tuple(zip(range(num_classes), scores)) score_sorted = sorted(score_tuples, key=itemgetter(1), reverse=True) top5_label = score_sorted[:5] diff --git a/tests/test_runtime/test_inference.py b/tests/test_runtime/test_inference.py index 15acf740f8..f1f6a7b5ce 100644 --- a/tests/test_runtime/test_inference.py +++ b/tests/test_runtime/test_inference.py @@ -10,7 +10,6 @@ video_config_file = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py' # noqa: E501 frame_config_file = 'configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py' # noqa: E501 flow_frame_config_file = 'configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py' # noqa: E501 -label_path = 'tools/data/kinetics/label_map_k400.txt' video_path = 'demo/demo.mp4' frames_path = 'tests/data/imgs' @@ -20,14 +19,6 @@ def test_init_recognizer(): # config must be a filename or Config object init_recognizer(dict(config_file=None)) - with pytest.raises(RuntimeError): - # input data type should be consist with the dataset type - init_recognizer(frame_config_file) - - with pytest.raises(RuntimeError): - # input data type should be consist with the dataset type - init_recognizer(video_config_file, use_frames=True) - if torch.cuda.is_available(): device = 'cuda:0' else: @@ -55,32 +46,20 @@ def test_video_inference_recognizer(): with pytest.raises(RuntimeError): # video path doesn't exist - inference_recognizer(model, 'missing.mp4', label_path) - - with pytest.raises(RuntimeError): - # ``video_path`` should be consist with the ``use_frames`` - inference_recognizer(model, video_path, label_path, use_frames=True) - - with pytest.raises(RuntimeError): - # ``video_path`` should be consist with the ``use_frames`` - inference_recognizer(model, 'demo/', label_path) + inference_recognizer(model, 'missing.mp4') for ops in model.cfg.data.test.pipeline: if ops['type'] in ('TenCrop', 'ThreeCrop'): # Use CenterCrop to reduce memory in order to pass CI ops['type'] = 'CenterCrop' - top5_label = inference_recognizer(model, video_path, label_path) + top5_label = inference_recognizer(model, video_path) scores = [item[1] for item in top5_label] assert len(top5_label) == 5 assert scores == sorted(scores, reverse=True) _, feat = inference_recognizer( - model, - video_path, - label_path, - outputs=('backbone', 'cls_head'), - as_tensor=False) + model, video_path, outputs=('backbone', 'cls_head'), as_tensor=False) assert isinstance(feat, dict) assert 'backbone' in feat and 'cls_head' in feat assert isinstance(feat['backbone'], np.ndarray) @@ -91,7 +70,6 @@ def test_video_inference_recognizer(): _, feat = inference_recognizer( model, video_path, - label_path, outputs=('backbone.layer3', 'backbone.layer3.1.conv1')) assert 'backbone.layer3.1.conv1' in feat and 'backbone.layer3' in feat assert isinstance(feat['backbone.layer3.1.conv1'], torch.Tensor) @@ -108,7 +86,7 @@ def test_video_inference_recognizer(): if ops['type'] == 'SampleFrames': ops['num_clips'] = 1 _, feat = inference_recognizer( - sf_model, video_path, label_path, outputs=('backbone', 'cls_head')) + sf_model, video_path, outputs=('backbone', 'cls_head')) assert isinstance(feat, dict) and isinstance(feat['backbone'], tuple) assert 'backbone' in feat and 'cls_head' in feat assert len(feat['backbone']) == 2 @@ -124,19 +102,12 @@ def test_frames_inference_recognizer(): device = 'cuda:0' else: device = 'cpu' - rgb_model = init_recognizer( - frame_config_file, None, device, use_frames=True) - flow_model = init_recognizer( - flow_frame_config_file, None, device, use_frames=True) + rgb_model = init_recognizer(frame_config_file, None, device) + flow_model = init_recognizer(flow_frame_config_file, None, device) with pytest.raises(RuntimeError): # video path doesn't exist - inference_recognizer(rgb_model, 'missing_path', label_path) - - with pytest.raises(RuntimeError): - # ``video_path`` should be consist with the ``use_frames`` - inference_recognizer( - flow_model, frames_path, label_path, use_frames=False) + inference_recognizer(rgb_model, 'missing_path') for ops in rgb_model.cfg.data.test.pipeline: if ops['type'] in ('TenCrop', 'ThreeCrop'): @@ -149,8 +120,7 @@ def test_frames_inference_recognizer(): ops['type'] = 'CenterCrop' ops['crop_size'] = 224 - top5_label = inference_recognizer( - rgb_model, frames_path, label_path, use_frames=True) + top5_label = inference_recognizer(rgb_model, frames_path) scores = [item[1] for item in top5_label] assert len(top5_label) == 5 assert scores == sorted(scores, reverse=True) @@ -158,10 +128,8 @@ def test_frames_inference_recognizer(): _, feat = inference_recognizer( flow_model, frames_path, - label_path, outputs=('backbone', 'cls_head'), - as_tensor=False, - use_frames=True) + as_tensor=False) assert isinstance(feat, dict) assert 'backbone' in feat and 'cls_head' in feat assert isinstance(feat['backbone'], np.ndarray) @@ -172,9 +140,8 @@ def test_frames_inference_recognizer(): _, feat = inference_recognizer( rgb_model, frames_path, - label_path, - use_frames=True, outputs=('backbone.layer3', 'backbone.layer3.1.conv1')) + assert 'backbone.layer3.1.conv1' in feat and 'backbone.layer3' in feat assert isinstance(feat['backbone.layer3.1.conv1'], torch.Tensor) assert isinstance(feat['backbone.layer3'], torch.Tensor) From 37a06394ed9b7ca38a258d1cf8a6474e1c0da5a6 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 1 Oct 2021 19:22:10 +0800 Subject: [PATCH 267/414] [Revise] Delete src directory (#1200) --- src/pytorch-sphinx-theme | 1 - 1 file changed, 1 deletion(-) delete mode 160000 src/pytorch-sphinx-theme diff --git a/src/pytorch-sphinx-theme b/src/pytorch-sphinx-theme deleted file mode 160000 index 5134e93c22..0000000000 --- a/src/pytorch-sphinx-theme +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 5134e93c22d704334aec9079175bc214eb533baf From b3a154e6e8cb5209aca9070376f26e1768dd6df3 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 2 Oct 2021 21:29:05 +0800 Subject: [PATCH 268/414] [Fix] Fix the original wrong range of ``_freeze_stages`` in MobileNetV2 (#1193) * totally _freeze_stages in MobileNetV2 * fix * totally _freeze_stages in MobileNetV2 --- mmaction/models/backbones/mobilenet_v2.py | 13 ++++++++----- .../test_common_modules/test_mobilenet_v2.py | 16 ++++++++++++++-- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/mmaction/models/backbones/mobilenet_v2.py b/mmaction/models/backbones/mobilenet_v2.py index d86a80149c..b0047b81e8 100644 --- a/mmaction/models/backbones/mobilenet_v2.py +++ b/mmaction/models/backbones/mobilenet_v2.py @@ -129,8 +129,9 @@ class MobileNetV2(nn.Module): channels in each layer by this amount. Default: 1.0. out_indices (None or Sequence[int]): Output from which stages. Default: (7, ). - frozen_stages (int): Stages to be frozen (all param fixed). - Default: -1, which means not freezing any parameters. + frozen_stages (int): Stages to be frozen (all param fixed). Note that + the last stage in ``MobileNetV2`` is ``conv2``. Default: -1, + which means not freezing any parameters. conv_cfg (dict): Config dict for convolution layer. Default: None, which means using conv2d. norm_cfg (dict): Config dict for normalization layer. @@ -169,8 +170,8 @@ def __init__(self, raise ValueError('the item in out_indices must in ' f'range(0, 8). But received {index}') - if frozen_stages not in range(-1, 8): - raise ValueError('frozen_stages must be in range(-1, 8). ' + if frozen_stages not in range(-1, 9): + raise ValueError('frozen_stages must be in range(-1, 9). ' f'But received {frozen_stages}') self.out_indices = out_indices self.frozen_stages = frozen_stages @@ -281,10 +282,12 @@ def forward(self, x): def _freeze_stages(self): if self.frozen_stages >= 0: + self.conv1.eval() for param in self.conv1.parameters(): param.requires_grad = False for i in range(1, self.frozen_stages + 1): - layer = getattr(self, f'layer{i}') + layer_name = self.layers[i - 1] + layer = getattr(self, layer_name) layer.eval() for param in layer.parameters(): param.requires_grad = False diff --git a/tests/test_models/test_common_modules/test_mobilenet_v2.py b/tests/test_models/test_common_modules/test_mobilenet_v2.py index 5589b03b3e..f476ef5ee3 100644 --- a/tests/test_models/test_common_modules/test_mobilenet_v2.py +++ b/tests/test_models/test_common_modules/test_mobilenet_v2.py @@ -33,8 +33,8 @@ def is_block(modules): model.init_weights() with pytest.raises(ValueError): - # frozen_stages must in range(1, 8) - MobileNetV2(frozen_stages=8) + # frozen_stages must in range(1, 9) + MobileNetV2(frozen_stages=9) with pytest.raises(ValueError): # tout_indices in range(-1, 8) @@ -60,6 +60,18 @@ def is_block(modules): for param in layer.parameters(): assert param.requires_grad is False + # Test MobileNetV2 with all stages frozen + frozen_stages = 8 + model = MobileNetV2(frozen_stages=frozen_stages) + model.init_weights() + model.train() + + for mod in model.modules(): + if not isinstance(mod, MobileNetV2): + assert mod.training is False + for param in mod.parameters(): + assert param.requires_grad is False + # Test MobileNetV2 with norm_eval=True model = MobileNetV2(norm_eval=True) model.init_weights() From c8d6307fe29613420f4db71319d0977282da941a Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 4 Oct 2021 15:07:41 +0800 Subject: [PATCH 269/414] [Fix] switch the default value of persistent_workers to False (#1202) * switch the default value of persistent_workers to False * update * update --- mmaction/apis/train.py | 4 ++++ mmaction/datasets/builder.py | 4 ++-- tools/analysis/bench_processing.py | 1 + tools/analysis/benchmark.py | 1 + 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index 0154bf85d8..54b1fb0163 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -48,6 +48,7 @@ def train_model(model, dataloader_setting = dict( videos_per_gpu=cfg.data.get('videos_per_gpu', 1), workers_per_gpu=cfg.data.get('workers_per_gpu', 1), + persistent_workers=cfg.data.get('persistent_workers', False), num_gpus=len(cfg.gpu_ids), dist=distributed, seed=cfg.seed) @@ -129,6 +130,7 @@ def train_model(model, dataloader_setting = dict( videos_per_gpu=cfg.data.get('videos_per_gpu', 1), workers_per_gpu=1, # save memory and time + persistent_workers=cfg.data.get('persistent_workers', False), num_gpus=len(cfg.gpu_ids), dist=distributed, seed=cfg.seed) @@ -144,6 +146,7 @@ def train_model(model, dataloader_setting = dict( videos_per_gpu=cfg.data.get('videos_per_gpu', 1), workers_per_gpu=cfg.data.get('workers_per_gpu', 1), + persistent_workers=cfg.data.get('persistent_workers', False), # cfg.gpus will be ignored if distributed num_gpus=len(cfg.gpu_ids), dist=distributed, @@ -190,6 +193,7 @@ def train_model(model, dataloader_setting = dict( videos_per_gpu=cfg.data.get('videos_per_gpu', 1), workers_per_gpu=cfg.data.get('workers_per_gpu', 1), + persistent_workers=cfg.data.get('persistent_workers', False), num_gpus=len(cfg.gpu_ids), dist=distributed, shuffle=False) diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index 821123f1cc..fb2e3e9859 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -49,7 +49,7 @@ def build_dataloader(dataset, seed=None, drop_last=False, pin_memory=True, - persistent_workers=True, + persistent_workers=False, **kwargs): """Build PyTorch DataLoader. @@ -76,7 +76,7 @@ def build_dataloader(dataset, the worker processes after a dataset has been consumed once. This allows to maintain the workers Dataset instances alive. The argument also has effect in PyTorch>=1.8.0. - Default: True + Default: False kwargs (dict, optional): Any keyword argument to be used to initialize DataLoader. diff --git a/tools/analysis/bench_processing.py b/tools/analysis/bench_processing.py index b2f7034091..df90899da6 100644 --- a/tools/analysis/bench_processing.py +++ b/tools/analysis/bench_processing.py @@ -45,6 +45,7 @@ def main(): dataset, videos_per_gpu=cfg.data.videos_per_gpu, workers_per_gpu=0, + persistent_workers=False, num_gpus=1, dist=False) diff --git a/tools/analysis/benchmark.py b/tools/analysis/benchmark.py index 2e546552f9..8e97a3b2e1 100644 --- a/tools/analysis/benchmark.py +++ b/tools/analysis/benchmark.py @@ -43,6 +43,7 @@ def main(): dataset, videos_per_gpu=1, workers_per_gpu=cfg.data.workers_per_gpu, + persistent_workers=cfg.data.get('persistent_workers', False), dist=False, shuffle=False) From 2ff37975b9ea5851a071909c9847b89132c5f99d Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 4 Oct 2021 15:50:29 +0800 Subject: [PATCH 270/414] [Doc] Improve ERROR message (#1203) * switch the default value of persistent_workers to False * update * update * Improve err msg * add FAQ * Revert "update" This reverts commit de45e6c785ec72420c3fd53553f63f665feff11b. * Revert "switch the default value of persistent_workers to False" This reverts commit a7346915206346cf6dc91170f8fafd0583ac4c46. * update * update --- docs/faq.md | 12 ++++++------ docs_zh_CN/faq.md | 5 +++++ mmaction/utils/decorators.py | 8 ++++++-- 3 files changed, 17 insertions(+), 8 deletions(-) diff --git a/docs/faq.md b/docs/faq.md index 0813b3be0b..0a462b7b8a 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -23,12 +23,12 @@ If the contents here do not cover your issue, please create an issue using the [ - **"OSError: MoviePy Error: creation of None failed because of the following error"** Refer to [install.md](https://github.com/open-mmlab/mmaction2/blob/master/docs/install.md#requirements) - 1. For Windows users, [ImageMagick](https://www.imagemagick.org/script/index.php) will not be automatically detected by MoviePy, - there is a need to modify `moviepy/config_defaults.py` file by providing the path to the ImageMagick binary called `magick`, - like `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` - 2. For Linux users, there is a need to modify the `/etc/ImageMagick-6/policy.xml` file by commenting out - `` to ``, - if ImageMagick is not detected by moviepy. + 1. For Windows users, [ImageMagick](https://www.imagemagick.org/script/index.php) will not be automatically detected by MoviePy, there is a need to modify `moviepy/config_defaults.py` file by providing the path to the ImageMagick binary called `magick`, like `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` + 2. For Linux users, there is a need to modify the `/etc/ImageMagick-6/policy.xml` file by commenting out `` to ``, if ImageMagick is not detected by moviepy. + +- **"Why I got the error message 'Please install XXCODEBASE to use XXX' even if I have already installed XXCODEBASE?"** + + You got that error message because our project failed to import a function or a class from XXCODEBASE. You can try to run the corresponding line to see what happens. One possible reason is, for some codebases in OpenMMLAB, you need to install mmcv-full before you install them. ## Data diff --git a/docs_zh_CN/faq.md b/docs_zh_CN/faq.md index b2fc15dd34..2f328792f0 100644 --- a/docs_zh_CN/faq.md +++ b/docs_zh_CN/faq.md @@ -18,6 +18,11 @@ 需要获取名为 `magick` 的 ImageMagick 二进制包的路径,来修改 `moviepy/config_defaults.py` 文件中的 `IMAGEMAGICK_BINARY`,如 `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` 2. 对于 Linux 用户,如果 ImageMagick 没有被 moviepy 检测,需要注释掉 `/etc/ImageMagick-6/policy.xml` 文件中的 ``,即改为 ``。 +- **"Please install XXCODEBASE to use XXX"** + + 如得到报错消息 "Please install XXCODEBASE to use XXX",代表 MMAction2 无法从 XXCODEBASE 中 import XXX。用户可以执行对应 import 语句定位原因。 + 一个可能的原因是,对于部分 OpenMMLAB 中的代码库,需先安装 mmcv-full 后再进行安装。 + ## 数据 - **FileNotFound 如 `No such file or directory: xxx/xxx/img_00300.jpg`** diff --git a/mmaction/utils/decorators.py b/mmaction/utils/decorators.py index ce923620f2..9da6b48719 100644 --- a/mmaction/utils/decorators.py +++ b/mmaction/utils/decorators.py @@ -10,7 +10,9 @@ def decorate(func): def new_func(*args, **kwargs): raise ImportError( - f'Please install {module_name} to use {func.__name__}.') + f'Please install {module_name} to use {func.__name__}. ' + 'For OpenMMLAB codebases, you may need to install mmcv-full ' + 'first before you install the particular codebase. ') return new_func @@ -25,7 +27,9 @@ def decorate(cls): def import_error_init(*args, **kwargs): raise ImportError( - f'Please install {module_name} to use {cls.__name__}.') + f'Please install {module_name} to use {cls.__name__}. ' + 'For OpenMMLAB codebases, you may need to install mmcv-full ' + 'first before you install the particular codebase. ') cls.__init__ = MethodType(import_error_init, cls) return cls From d5ab34805fe6a02bb98e4af158626a21790b6974 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 6 Oct 2021 21:55:00 +0800 Subject: [PATCH 271/414] Add code spell hook to pre-commit and fix typos (#1208) * fix typos * fix typos * fix typos * fix typos --- .pre-commit-config.yaml | 5 +++++ configs/detection/ava/README.md | 2 +- configs/localization/ssn/README.md | 2 +- configs/localization/ssn/metafile.yml | 4 ++-- configs/skeleton/stgcn/metafile.yml | 2 +- demo/README.md | 6 +++--- demo/webcam_demo_spatiotemporal_det.py | 4 ++-- docs/changelog.md | 2 +- docs/data_preparation.md | 4 ++-- docs/getting_started.md | 2 +- docs/tutorials/1_config.md | 2 +- docs/tutorials/7_customize_runtime.md | 2 +- docs_zh_CN/tutorials/7_customize_runtime.md | 2 +- mmaction/apis/test.py | 2 +- mmaction/core/bbox/assigners/max_iou_assigner_ava.py | 4 ++-- mmaction/core/evaluation/accuracy.py | 2 +- .../core/evaluation/ava_evaluation/np_box_list.py | 4 ++-- .../ava_evaluation/object_detection_evaluation.py | 2 +- .../ava_evaluation/per_image_evaluation.py | 2 +- mmaction/core/evaluation/ava_utils.py | 4 ++-- mmaction/datasets/pipelines/augmentations.py | 8 ++++---- mmaction/datasets/pipelines/formating.py | 12 ++++++------ mmaction/datasets/pipelines/loading.py | 2 +- mmaction/datasets/pipelines/pose_loading.py | 4 ++-- mmaction/datasets/ssn_dataset.py | 4 ++-- mmaction/localization/bsn_utils.py | 6 +++--- mmaction/models/backbones/resnet3d.py | 2 +- mmaction/models/backbones/resnet3d_slowfast.py | 4 ++-- mmaction/models/common/lfb.py | 2 +- mmaction/models/heads/fbo_head.py | 4 ++-- mmaction/models/localizers/bmn.py | 2 +- mmaction/models/localizers/bsn.py | 2 +- mmaction/models/losses/bmn_loss.py | 4 ++-- mmaction/models/losses/hvu_loss.py | 2 +- mmaction/models/skeleton_gcn/utils/graph.py | 2 +- tests/test_data/test_datasets/test_ssn_dataset.py | 2 +- tests/test_data/test_formating.py | 2 +- .../test_pipelines/test_loadings/test_decode.py | 4 ++-- tests/test_runtime/test_eval_hook.py | 2 +- tests/test_runtime/test_precise_bn.py | 2 +- tools/analysis/check_videos.py | 2 +- tools/data/activitynet/tsn_feature_extraction.py | 2 +- tools/data/hvu/download.py | 2 +- tools/data/kinetics/README.md | 2 +- tools/data/omnisource/README.md | 8 ++++---- tools/data/omnisource/README_zh-CN.md | 2 +- tools/deployment/pytorch2onnx.py | 2 +- tools/train.py | 2 +- 48 files changed, 79 insertions(+), 74 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 702fec4d1b..da61b68786 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,3 +39,8 @@ repos: hooks: - id: docformatter args: ["--in-place", "--wrap-descriptions", "79"] + - repo: https://github.com/codespell-project/codespell + rev: v2.1.0 + hooks: + - id: codespell + args: ["--skip", "*.ipynb,tools/data/hvu/label_map.json", "-L", "formating,te,nd,thre,Gool,gool"] diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index bac296f79a..8eeadf52b1 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -99,7 +99,7 @@ You can train custom classes from ava. Ava suffers from class imbalance. There a Three steps to train custom classes: - Step 1: Select custom classes from original classes, named `custom_classes`. Class `0` should not be selected since it is reserved for further usage (to identify whether a proposal is positive or negative, not implemented yet) and will be added automatically. -- Step 2: Set `num_classes`. In order to be compatible with current codes, plase make sure `num_classes == len(custom_classes) + 1`. +- Step 2: Set `num_classes`. In order to be compatible with current codes, Please make sure `num_classes == len(custom_classes) + 1`. - The new class `0` corresponds to original class `0`. The new class `i`(i > 0) corresponds to original class `custom_classes[i-1]`. - There are three `num_classes` in ava config, `model -> roi_head -> bbox_head -> num_classes`, `data -> train -> num_classes` and `data -> val -> num_classes`. - If `num_classes <= 5`, input arg `topk` of `BBoxHeadAVA` should be modified. The default value of `topk` is `(3, 5)`, and all elements of `topk` must be smaller than `num_classes`. diff --git a/configs/localization/ssn/README.md b/configs/localization/ssn/README.md index 61faa9108e..8e4c606538 100644 --- a/configs/localization/ssn/README.md +++ b/configs/localization/ssn/README.md @@ -16,7 +16,7 @@ year = {2017} ## Model Zoo -| config | gpus | backbone | pretrain | mAP@0.3 | mAP@0.4 | mAP@0.5 | reference mAP@0.3 | reference mAP@0.4 | reference mAP@0.5 | gpu_mem(M) | ckpt | log | json | refrence ckpt | refrence json +| config | gpus | backbone | pretrain | mAP@0.3 | mAP@0.4 | mAP@0.5 | reference mAP@0.3 | reference mAP@0.4 | reference mAP@0.5 | gpu_mem(M) | ckpt | log | json | reference ckpt | reference json |:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:|:-:|:-:|:-:|---|:--:|:--:| |[ssn_r50_450e_thumos14_rgb](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) |8| ResNet50 | ImageNet |29.37|22.15|15.69|[27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|6352|[ckpt](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/ssn_r50_450e_thumos14_rgb_20201012-1920ab16.pth)| [log](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log)| [json](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log.json)| [ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth)| [json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json)| diff --git a/configs/localization/ssn/metafile.yml b/configs/localization/ssn/metafile.yml index 7470ea852c..d2b588009d 100644 --- a/configs/localization/ssn/metafile.yml +++ b/configs/localization/ssn/metafile.yml @@ -26,5 +26,5 @@ Models: reference mAP@0.3: '[27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)' reference mAP@0.4: '[21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)' reference mAP@0.5: '[14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)' - refrence ckpt: '[ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth)' - refrence json: '[json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json)' + reference ckpt: '[ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth)' + reference json: '[json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json)' diff --git a/configs/skeleton/stgcn/metafile.yml b/configs/skeleton/stgcn/metafile.yml index a5e8893a65..0ea65b5708 100644 --- a/configs/skeleton/stgcn/metafile.yml +++ b/configs/skeleton/stgcn/metafile.yml @@ -10,7 +10,7 @@ Models: Epochs: 80 Parameters: 3088704 Training Data: NTU60 - Training Resouces: 2 GPUs + Training Resources: 2 GPUs Name: stgcn_80e_ntu60_xsub_keypoint Results: Dataset: NTU60 diff --git a/demo/README.md b/demo/README.md index 7fc362d1af..baa4466912 100644 --- a/demo/README.md +++ b/demo/README.md @@ -46,8 +46,8 @@ Optional arguments: - `--use-frames`: If specified, the demo will take rawframes as input. Otherwise, it will take a video as input. - `DEVICE_TYPE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. If not specified, it will be set to `cuda:0`. -- `FPS`: FPS value of the output video when using rawframes as input. If not specified, it wll be set to 30. -- `FONT_SCALE`: Font scale of the label added in the video. If not specified, it wll be 0.5. +- `FPS`: FPS value of the output video when using rawframes as input. If not specified, it will be set to 30. +- `FONT_SCALE`: Font scale of the label added in the video. If not specified, it will be 0.5. - `FONT_COLOR`: Font color of the label added in the video. If not specified, it will be `white`. - `TARGET_RESOLUTION`: Resolution(desired_width, desired_height) for resizing the frames before output when using a video as input. If not specified, it will be None and the frames are resized by keeping the existing aspect ratio. - `RESIZE_ALGORITHM`: Resize algorithm used for resizing. If not specified, it will be set to `bicubic`. @@ -206,7 +206,7 @@ python demo/demo_gradcam.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} [--u - `--use-frames`: If specified, the demo will take rawframes as input. Otherwise, it will take a video as input. - `DEVICE_TYPE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. If not specified, it will be set to `cuda:0`. -- `FPS`: FPS value of the output video when using rawframes as input. If not specified, it wll be set to 30. +- `FPS`: FPS value of the output video when using rawframes as input. If not specified, it will be set to 30. - `OUT_FILE`: Path to the output file which can be a video format or gif format. If not specified, it will be set to `None` and does not generate the output file. - `TARGET_LAYER_NAME`: Layer name to generate GradCAM localization map. - `TARGET_RESOLUTION`: Resolution(desired_width, desired_height) for resizing the frames before output when using a video as input. If not specified, it will be None and the frames are resized by keeping the existing aspect ratio. diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py index ac91e38c26..ae3ab2c59e 100644 --- a/demo/webcam_demo_spatiotemporal_det.py +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -160,7 +160,7 @@ def __init__(self): self.processed_frames = None # model inputs self.frames_inds = None # select frames from processed frames self.img_shape = None # model inputs, processed frame shape - # `action_preds` is `list[list[tuple]]`. The outter brackets indicate + # `action_preds` is `list[list[tuple]]`. The outer brackets indicate # different bboxes and the intter brackets indicate different action # results for the same bbox. tuple contains `class_name` and `score`. self.action_preds = None # stdet results @@ -333,7 +333,7 @@ def predict(self, task): result[class_id][bbox_id, 4])) # update task - # `preds` is `list[list[tuple]]`. The outter brackets indicate + # `preds` is `list[list[tuple]]`. The outer brackets indicate # different bboxes and the intter brackets indicate different action # results for the same bbox. tuple contains `class_name` and `score`. task.add_action_preds(preds) diff --git a/docs/changelog.md b/docs/changelog.md index 7ee0375adf..f37e2c379a 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -37,7 +37,7 @@ - Remove redundant augmentations in config files ([#996](https://github.com/open-mmlab/mmaction2/pull/996)) - Make resource directory to hold common resource pictures ([#1011](https://github.com/open-mmlab/mmaction2/pull/1011)) -- Remove deperecated FrameSelector ([#1010](https://github.com/open-mmlab/mmaction2/pull/1010)) +- Remove deprecated FrameSelector ([#1010](https://github.com/open-mmlab/mmaction2/pull/1010)) - Support Concat Dataset ([#1000](https://github.com/open-mmlab/mmaction2/pull/1000)) - Add `to-mp4` option to resize_videos.py ([#1021](https://github.com/open-mmlab/mmaction2/pull/1021)) - Add option to keep tail frames ([#1050](https://github.com/open-mmlab/mmaction2/pull/1050)) diff --git a/docs/data_preparation.md b/docs/data_preparation.md index e864cd3357..cf1b127d14 100644 --- a/docs/data_preparation.md +++ b/docs/data_preparation.md @@ -134,7 +134,7 @@ python tools/data/extract_audio.py ${ROOT} ${DST_ROOT} [--ext ${EXT}] [--num-wor - `ROOT`: The root directory of the videos. - `DST_ROOT`: The destination root directory of the audios. -- `EXT`: Extention of the video files. e.g., `.mp4`. +- `EXT`: Extension of the video files. e.g., `.mp4`. - `N_WORKERS`: Number of processes to be used. After extracting audios, you are free to decode and generate the spectrogram on-the-fly such as [this](/configs/audio_recognition/tsn_r50_64x1x1_kinetics400_audio.py). As for the annotations, you can directly use those of the rawframes as long as you keep the relative position of audio files same as the rawframes directory. However, extracting spectrogram on-the-fly is slow and bad for prototype iteration. Therefore, we also provide a script (and many useful tools to play with) for you to generation spectrogram off-line. @@ -147,7 +147,7 @@ python tools/data/build_audio_features.py ${AUDIO_HOME_PATH} ${SPECTROGRAM_SAVE_ - `AUDIO_HOME_PATH`: The root directory of the audio files. - `SPECTROGRAM_SAVE_PATH`: The destination root directory of the audio features. -- `EXT`: Extention of the audio files. e.g., `.m4a`. +- `EXT`: Extension of the audio files. e.g., `.m4a`. - `N_WORKERS`: Number of processes to be used. - `PART`: Determines how many parts to be splited and which part to run. e.g., `2/5` means splitting all files into 5-fold and executing the 2nd part. This is useful if you have several machines. diff --git a/docs/getting_started.md b/docs/getting_started.md index bcc6e4c6d3..7df61337d6 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -88,7 +88,7 @@ Optional arguments: - `JOB_LAUNCHER`: Items for distributed job initialization launcher. Allowed choices are `none`, `pytorch`, `slurm`, `mpi`. Especially, if set to none, it will test in a non-distributed mode. - `LOCAL_RANK`: ID for local rank. If not specified, it will be set to 0. - `--onnx`: If specified, recognition results will be generated by onnx model and `CHECKPOINT_FILE` should be onnx model file path. Onnx model files are generated by `/tools/deployment/pytorch2onnx.py`. For now, multi-gpu mode and dynamic input shape mode are not supported. Please note that the output tensors of dataset and the input tensors of onnx model should share the same shape. And it is recommended to remove all test-time augmentation methods in `test_pipeline`(`ThreeCrop`, `TenCrop`, `twice_sample`, etc.) -- `--tensorrt`: If specified, recognition results will be generated by TensorRT engine and `CHECKPOINT_FILE` should be TensorRT engine file path. TensorRT engines are generated by exported onnx models and TensorRT official convertion tools. For now, multi-gpu mode and dynamic input shape mode are not supported. Please note that the output tensors of dataset and the input tensors of TensorRT engine should share the same shape. And it is recommended to remove all test-time augmentation methods in `test_pipeline`(`ThreeCrop`, `TenCrop`, `twice_sample`, etc.) +- `--tensorrt`: If specified, recognition results will be generated by TensorRT engine and `CHECKPOINT_FILE` should be TensorRT engine file path. TensorRT engines are generated by exported onnx models and TensorRT official conversion tools. For now, multi-gpu mode and dynamic input shape mode are not supported. Please note that the output tensors of dataset and the input tensors of TensorRT engine should share the same shape. And it is recommended to remove all test-time augmentation methods in `test_pipeline`(`ThreeCrop`, `TenCrop`, `twice_sample`, etc.) Examples: diff --git a/docs/tutorials/1_config.md b/docs/tutorials/1_config.md index 20abeeba3d..f31ac47682 100644 --- a/docs/tutorials/1_config.md +++ b/docs/tutorials/1_config.md @@ -217,7 +217,7 @@ which is convenient to conduct various experiments. load_from = None # load models as a pre-trained model from a given path. This will not resume training resume_from = None # Resume checkpoints from a given path, the training will be resumed from the epoch when the checkpoint's is saved workflow = [('train', 1)] # Workflow for runner. [('train', 1)] means there is only one workflow and the workflow named 'train' is executed once - output_config = dict( # Config of localization ouput + output_config = dict( # Config of localization output out=f'{work_dir}/results.json', # Path to output file output_format='json') # File format of output file ``` diff --git a/docs/tutorials/7_customize_runtime.md b/docs/tutorials/7_customize_runtime.md index 232057e376..c2201cab65 100644 --- a/docs/tutorials/7_customize_runtime.md +++ b/docs/tutorials/7_customize_runtime.md @@ -41,7 +41,7 @@ optimizer = dict(type='Adam', lr=0.0003, weight_decay=0.0001) To modify the learning rate of the model, the users only need to modify the `lr` in the config of optimizer. The users can directly set arguments following the [API doc](https://pytorch.org/docs/stable/optim.html?highlight=optim#module-torch.optim) of PyTorch. -For example, if you want to use `Adam` with the setting like `torch.optim.Adam(parms, lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False)` in PyTorch, +For example, if you want to use `Adam` with the setting like `torch.optim.Adam(params, lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False)` in PyTorch, the modification could be set as the following. ```python diff --git a/docs_zh_CN/tutorials/7_customize_runtime.md b/docs_zh_CN/tutorials/7_customize_runtime.md index bbf451a0db..027f0837f0 100644 --- a/docs_zh_CN/tutorials/7_customize_runtime.md +++ b/docs_zh_CN/tutorials/7_customize_runtime.md @@ -41,7 +41,7 @@ optimizer = dict(type='Adam', lr=0.0003, weight_decay=0.0001) 要修改模型的学习率,用户只需要在优化程序的配置中修改 “lr” 即可。 用户可根据 [PyTorch API 文档](https://pytorch.org/docs/stable/optim.html?highlight=optim#module-torch.optim) 进行参数设置 -例如,如果想使用 `Adam` 并设置参数为 `torch.optim.Adam(parms, lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False)`, +例如,如果想使用 `Adam` 并设置参数为 `torch.optim.Adam(params, lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False)`, 则需要进行如下修改 ```python diff --git a/mmaction/apis/test.py b/mmaction/apis/test.py index fa6291cdc0..95bff34a00 100644 --- a/mmaction/apis/test.py +++ b/mmaction/apis/test.py @@ -138,7 +138,7 @@ def collect_results_cpu(result_part, size, tmpdir=None): # noqa: F811 dist.barrier() # dump the part result to the dir mmcv.dump(result_part, osp.join(tmpdir, f'part_{rank}.pkl')) - # synchronizes all processes for loding pickle file + # synchronizes all processes for loading pickle file dist.barrier() # collect all parts if rank != 0: diff --git a/mmaction/core/bbox/assigners/max_iou_assigner_ava.py b/mmaction/core/bbox/assigners/max_iou_assigner_ava.py index d347282369..a7c50e962d 100644 --- a/mmaction/core/bbox/assigners/max_iou_assigner_ava.py +++ b/mmaction/core/bbox/assigners/max_iou_assigner_ava.py @@ -34,7 +34,7 @@ class MaxIoUAssignerAVA(MaxIoUAssigner): same highest overlap with some gt to that gt. Default: True. """ - # The function is overriden, to handle the case that gt_label is not + # The function is overridden, to handle the case that gt_label is not # int def assign_wrt_overlaps(self, overlaps, gt_labels=None): """Assign w.r.t. the overlaps of bboxes with gts. @@ -95,7 +95,7 @@ def assign_wrt_overlaps(self, overlaps, gt_labels=None): assigned_gt_inds[pos_inds] = argmax_overlaps[pos_inds] + 1 if self.match_low_quality: - # Low-quality matching will overwirte the assigned_gt_inds + # Low-quality matching will overwrite the assigned_gt_inds # assigned in Step 3. Thus, the assigned gt might not be the # best one for prediction. # For example, if bbox A has 0.9 and 0.8 iou with GT bbox diff --git a/mmaction/core/evaluation/accuracy.py b/mmaction/core/evaluation/accuracy.py index a125a22c5c..7193a7b8c1 100644 --- a/mmaction/core/evaluation/accuracy.py +++ b/mmaction/core/evaluation/accuracy.py @@ -210,7 +210,7 @@ def binary_precision_recall_curve(y_score, y_true): Returns: precision (np.ndarray): The precision of different thresholds. recall (np.ndarray): The recall of different thresholds. - thresholds (np.ndarray): Different thresholds at which precison and + thresholds (np.ndarray): Different thresholds at which precision and recall are tested. """ assert isinstance(y_score, np.ndarray) diff --git a/mmaction/core/evaluation/ava_evaluation/np_box_list.py b/mmaction/core/evaluation/ava_evaluation/np_box_list.py index ddfdd5184d..255bebe399 100644 --- a/mmaction/core/evaluation/ava_evaluation/np_box_list.py +++ b/mmaction/core/evaluation/ava_evaluation/np_box_list.py @@ -66,7 +66,7 @@ def add_field(self, field, field_data): """Add data to a specified field. Args: - field: a string parameter used to speficy a related field to be + field: a string parameter used to specify a related field to be accessed. field_data: a numpy array of [N, ...] representing the data associated with the field. @@ -94,7 +94,7 @@ def get_field(self, field): collection. Args: - field: a string parameter used to speficy a related field to be + field: a string parameter used to specify a related field to be accessed. Returns: diff --git a/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py b/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py index 508a076def..1886521485 100644 --- a/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py +++ b/mmaction/core/evaluation/ava_evaluation/object_detection_evaluation.py @@ -39,7 +39,7 @@ class DetectionEvaluator: - """Interface for object detection evalution classes. + """Interface for object detection evaluation classes. Example usage of the Evaluator: ------------------------------ diff --git a/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py b/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py index 2d06672d89..9a6e0d9e40 100644 --- a/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py +++ b/mmaction/core/evaluation/ava_evaluation/per_image_evaluation.py @@ -15,7 +15,7 @@ """Evaluate Object Detection result on a single image. Annotate each detected result as true positives or false positive according to -a predefined IOU ratio. Non Maximum Supression is used by default. Multi class +a predefined IOU ratio. Non Maximum Suppression is used by default. Multi class detection is supported by default. Based on the settings, per image evaluation is either performed on boxes or on object masks. """ diff --git a/mmaction/core/evaluation/ava_utils.py b/mmaction/core/evaluation/ava_utils.py index e80708d9a5..eac227ce5b 100644 --- a/mmaction/core/evaluation/ava_utils.py +++ b/mmaction/core/evaluation/ava_utils.py @@ -70,9 +70,9 @@ def read_csv(csv_file, class_whitelist=None): boxes: A dictionary mapping each unique image key (string) to a list of boxes, given as coordinates [y1, x1, y2, x2]. labels: A dictionary mapping each unique image key (string) to a list - of integer class lables, matching the corresponding box in `boxes`. + of integer class labels, matching the corresponding box in `boxes`. scores: A dictionary mapping each unique image key (string) to a list - of score values lables, matching the corresponding label in `labels`. + of score values labels, matching the corresponding label in `labels`. If scores are not provided in the csv, then they will default to 1.0. """ start = time.time() diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index d157e14442..0f88adc769 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -363,7 +363,7 @@ def default_transforms(): """Default transforms for imgaug. Implement RandAugment by imgaug. - Plase visit `https://arxiv.org/abs/1909.13719` for more information. + Please visit `https://arxiv.org/abs/1909.13719` for more information. Augmenters and hyper parameters are borrowed from the following repo: https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/autoaugment.py # noqa @@ -1812,7 +1812,7 @@ def __init__(self, ratio): raise TypeError('Amplification ratio should be float.') def __call__(self, results): - """Perfrom the audio amplification. + """Perform the audio amplification. Args: results (dict): The resulting dict to be modified and passed @@ -1838,8 +1838,8 @@ class MelSpectrogram: keys are "audios". Args: - window_size (int): The window size in milisecond. Default: 32. - step_size (int): The step size in milisecond. Default: 16. + window_size (int): The window size in millisecond. Default: 32. + step_size (int): The step size in millisecond. Default: 16. n_mels (int): Number of mels. Default: 80. fixed_length (int): The sample length of melspectrogram maybe not exactly as wished due to different fps, fix the length for batch diff --git a/mmaction/datasets/pipelines/formating.py b/mmaction/datasets/pipelines/formating.py index be6fe12e9e..7e03979db8 100644 --- a/mmaction/datasets/pipelines/formating.py +++ b/mmaction/datasets/pipelines/formating.py @@ -41,7 +41,7 @@ def __init__(self, keys): self.keys = keys def __call__(self, results): - """Performs the ToTensor formating. + """Performs the ToTensor formatting. Args: results (dict): The resulting dict to be modified and passed @@ -96,7 +96,7 @@ def __init__(self, fields): self.fields = fields def __call__(self, results): - """Performs the ToDataContainer formating. + """Performs the ToDataContainer formatting. Args: results (dict): The resulting dict to be modified and passed @@ -128,7 +128,7 @@ def __init__(self, keys): self.keys = keys def __call__(self, results): - """Performs the ImageToTensor formating. + """Performs the ImageToTensor formatting. Args: results (dict): The resulting dict to be modified and passed @@ -185,7 +185,7 @@ class Collect: Args: keys (Sequence[str]): Required keys to be collected. - meta_name (str): The name of the key that contains meta infomation. + meta_name (str): The name of the key that contains meta information. This key is always populated. Default: "img_metas". meta_keys (Sequence[str]): Keys that are collected under meta_name. The contents of the ``meta_name`` dictionary depends on @@ -222,7 +222,7 @@ def __init__(self, self.nested = nested def __call__(self, results): - """Performs the Collect formating. + """Performs the Collect formatting. Args: results (dict): The resulting dict to be modified and passed @@ -271,7 +271,7 @@ def __init__(self, input_format, collapse=False): f'The input format {self.input_format} is invalid.') def __call__(self, results): - """Performs the FormatShape formating. + """Performs the FormatShape formatting. Args: results (dict): The resulting dict to be modified and passed diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index 14a2fbe202..a8201bf7f7 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -1593,7 +1593,7 @@ class AudioFeatureSelector: Args: fixed_length (int): As the features selected by frames sampled may - not be extactly the same, `fixed_length` will truncate or pad them + not be exactly the same, `fixed_length` will truncate or pad them into the same size. Default: 128. """ diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index 1f221f57db..e7c713e100 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -280,8 +280,8 @@ def __call__(self, results): def mapinds(inds): uni = np.unique(inds) - mapp = {x: i for i, x in enumerate(uni)} - inds = [mapp[x] for x in inds] + map_ = {x: i for i, x in enumerate(uni)} + inds = [map_[x] for x in inds] return np.array(inds, dtype=np.int16) if self.squeeze: diff --git a/mmaction/datasets/ssn_dataset.py b/mmaction/datasets/ssn_dataset.py index cd0f0b908f..065c7422b2 100644 --- a/mmaction/datasets/ssn_dataset.py +++ b/mmaction/datasets/ssn_dataset.py @@ -148,7 +148,7 @@ class SSNDataset(BaseDataset): aug_segments (list[int]): Number of segments in starting and ending period. Default: (2, 2). aug_ratio (int | float | tuple[int | float]): The ratio of the length - of augmentation to that of the proposal. Defualt: (0.5, 0.5). + of augmentation to that of the proposal. Default: (0.5, 0.5). clip_len (int): Frames of each sampled output clip. Default: 1. frame_interval (int): Temporal interval of adjacent sampled frames. @@ -489,7 +489,7 @@ def evaluate(self, return eval_results def construct_proposal_pools(self): - """Construct positve proposal pool, incomplete proposal pool and + """Construct positive proposal pool, incomplete proposal pool and background proposal pool of the entire dataset.""" for video_info in self.video_infos: positives = self.get_positives( diff --git a/mmaction/localization/bsn_utils.py b/mmaction/localization/bsn_utils.py index 6938fb8eab..27de337215 100644 --- a/mmaction/localization/bsn_utils.py +++ b/mmaction/localization/bsn_utils.py @@ -13,12 +13,12 @@ def generate_candidate_proposals(video_list, peak_threshold, tem_results_ext='.csv', result_dict=None): - """Generate Candidate Proposals with given temporal evalutation results. + """Generate Candidate Proposals with given temporal evaluation results. Each proposal file will contain: 'tmin,tmax,tmin_score,tmax_score,score,match_iou,match_ioa'. Args: - video_list (list[int]): List of video indexs to generate proposals. + video_list (list[int]): List of video indexes to generate proposals. video_infos (list[dict]): List of video_info dict that contains 'video_name', 'duration_frame', 'duration_second', 'feature_frame', and 'annotations'. @@ -140,7 +140,7 @@ def generate_bsp_feature(video_list, """Generate Boundary-Sensitive Proposal Feature with given proposals. Args: - video_list (list[int]): List of video indexs to generate bsp_feature. + video_list (list[int]): List of video indexes to generate bsp_feature. video_infos (list[dict]): List of video_info dict that contains 'video_name'. tem_results_dir (str): Directory to load temporal evaluation diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index 68da5a43ef..8ae00c64fa 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -697,7 +697,7 @@ def _inflate_weights(self, logger): Args: logger (logging.Logger): The logger used to print - debugging infomation. + debugging information. """ state_dict_r2d = _load_checkpoint(self.pretrained) diff --git a/mmaction/models/backbones/resnet3d_slowfast.py b/mmaction/models/backbones/resnet3d_slowfast.py index aaea09e625..0b70f4ac19 100644 --- a/mmaction/models/backbones/resnet3d_slowfast.py +++ b/mmaction/models/backbones/resnet3d_slowfast.py @@ -215,7 +215,7 @@ def inflate_weights(self, logger): Args: logger (logging.Logger): The logger used to print - debugging infomation. + debugging information. """ state_dict_r2d = _load_checkpoint(self.pretrained) @@ -472,7 +472,7 @@ def init_weights(self, pretrained=None): # Directly load 3D model. load_checkpoint(self, self.pretrained, strict=True, logger=logger) elif self.pretrained is None: - # Init two branch seperately. + # Init two branch separately. self.fast_path.init_weights() self.slow_path.init_weights() else: diff --git a/mmaction/models/common/lfb.py b/mmaction/models/common/lfb.py index a80ff1785e..3fb82cf333 100644 --- a/mmaction/models/common/lfb.py +++ b/mmaction/models/common/lfb.py @@ -100,7 +100,7 @@ def __init__(self, warnings.warn( 'If distributed training is used with multi-GPUs, lfb ' 'will be loaded multiple times on RAM. In this case, ' - "'lmdb' is recomended.", UserWarning) + "'lmdb' is recommended.", UserWarning) self.load_lfb('cpu') elif self.device == 'lmdb': assert lmdb_imported, ( diff --git a/mmaction/models/heads/fbo_head.py b/mmaction/models/heads/fbo_head.py index 95dee4884a..7790e7d539 100644 --- a/mmaction/models/heads/fbo_head.py +++ b/mmaction/models/heads/fbo_head.py @@ -175,7 +175,7 @@ class FBONonLocal(nn.Module): Args: st_feat_channels (int): Channels of short-term features. lt_feat_channels (int): Channels of long-term features. - latent_channels (int): Channles of latent features. + latent_channels (int): Channels of latent features. num_st_feat (int): Number of short-term roi features. num_lt_feat (int): Number of long-term roi features. num_non_local_layers (int): Number of non-local layers, which is @@ -289,7 +289,7 @@ def __init__(self): self.avg_pool = nn.AdaptiveAvgPool3d((1, None, None)) def init_weights(self, pretrained=None): - # FBOAvg has no parameters to be initalized. + # FBOAvg has no parameters to be initialized. pass def forward(self, st_feat, lt_feat): diff --git a/mmaction/models/localizers/bmn.py b/mmaction/models/localizers/bmn.py index 420495940c..df137b3818 100644 --- a/mmaction/models/localizers/bmn.py +++ b/mmaction/models/localizers/bmn.py @@ -158,7 +158,7 @@ def _temporal_anchors(self, tmin_offset=0., tmax_offset=1.): Args: tmin_offset (int): Offset for the minimum value of temporal anchor. Default: 0. - tmax_offset (int): Offset for the maximun value of temporal anchor. + tmax_offset (int): Offset for the maximum value of temporal anchor. Default: 1. Returns: diff --git a/mmaction/models/localizers/bsn.py b/mmaction/models/localizers/bsn.py index 1f8e7983cd..ef595fe783 100644 --- a/mmaction/models/localizers/bsn.py +++ b/mmaction/models/localizers/bsn.py @@ -87,7 +87,7 @@ def _temporal_anchors(self, tmin_offset=0., tmax_offset=1.): Args: tmin_offset (int): Offset for the minimum value of temporal anchor. Default: 0. - tmax_offset (int): Offset for the maximun value of temporal anchor. + tmax_offset (int): Offset for the maximum value of temporal anchor. Default: 1. Returns: diff --git a/mmaction/models/losses/bmn_loss.py b/mmaction/models/losses/bmn_loss.py index 5641847626..eb997c9ea2 100644 --- a/mmaction/models/losses/bmn_loss.py +++ b/mmaction/models/losses/bmn_loss.py @@ -62,7 +62,7 @@ def pem_reg_loss(pred_score, temporal_iou. Default: 0.3. Returns: - torch.Tensor: Proposal evalutaion regression loss. + torch.Tensor: Proposal evaluation regression loss. """ u_hmask = (gt_iou_map > high_temporal_iou_threshold).float() u_mmask = ((gt_iou_map <= high_temporal_iou_threshold) & @@ -113,7 +113,7 @@ def pem_cls_loss(pred_score, eps (float): Epsilon for small value. Default: 1e-5 Returns: - torch.Tensor: Proposal evalutaion classification loss. + torch.Tensor: Proposal evaluation classification loss. """ pmask = (gt_iou_map > threshold).float() nmask = (gt_iou_map <= threshold).float() diff --git a/mmaction/models/losses/hvu_loss.py b/mmaction/models/losses/hvu_loss.py index 25388b98d5..9deb862177 100644 --- a/mmaction/models/losses/hvu_loss.py +++ b/mmaction/models/losses/hvu_loss.py @@ -109,7 +109,7 @@ def _forward(self, cls_score, label, mask, category_mask): if self.with_mask: category_mask_i = category_mask[:, idx].reshape(-1) # there should be at least one sample which contains tags - # in thie category + # in this category if torch.sum(category_mask_i) < 0.5: losses[f'{name}_LOSS'] = torch.tensor(.0).cuda() loss_weights[f'{name}_LOSS'] = .0 diff --git a/mmaction/models/skeleton_gcn/utils/graph.py b/mmaction/models/skeleton_gcn/utils/graph.py index 04e8fc19f3..02bbf9effb 100644 --- a/mmaction/models/skeleton_gcn/utils/graph.py +++ b/mmaction/models/skeleton_gcn/utils/graph.py @@ -48,7 +48,7 @@ class Graph: Strategies' in our paper (https://arxiv.org/abs/1801.07455). max_hop (int): the maximal distance between two connected nodes. - Dafault: 1 + Default: 1 dilation (int): controls the spacing between the kernel points. Default: 1 """ diff --git a/tests/test_data/test_datasets/test_ssn_dataset.py b/tests/test_data/test_datasets/test_ssn_dataset.py index c3d04f9ada..3b71f3cde9 100644 --- a/tests/test_data/test_datasets/test_ssn_dataset.py +++ b/tests/test_data/test_datasets/test_ssn_dataset.py @@ -80,7 +80,7 @@ def test_ssn_dataset(self): assert ssn_infos[0]['video_id'] == 'imgs' assert ssn_infos[0]['total_frames'] == 5 - # test ssn datatset with normalized proposal file + # test ssn dataset with normalized proposal file with pytest.raises(Exception): ssn_dataset = SSNDataset( self.proposal_norm_ann_file, diff --git a/tests/test_data/test_formating.py b/tests/test_data/test_formating.py index b40be4a57a..c3607e64a3 100644 --- a/tests/test_data/test_formating.py +++ b/tests/test_data/test_formating.py @@ -29,7 +29,7 @@ def test_to_tensor(): results = dict(str='0') to_tensor(results) - # convert tensor, numpy, squence, int, float to tensor + # convert tensor, numpy, sequence, int, float to tensor target_keys = ['tensor', 'numpy', 'sequence', 'int', 'float'] to_tensor = ToTensor(target_keys) original_results = dict( diff --git a/tests/test_data/test_pipelines/test_loadings/test_decode.py b/tests/test_data/test_pipelines/test_loadings/test_decode.py index 2fc927bd80..b3444beed1 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_decode.py +++ b/tests/test_data/test_pipelines/test_loadings/test_decode.py @@ -408,7 +408,7 @@ def test_rawframe_decode(self): 240, 320) assert results['original_shape'] == (240, 320) - # test frame selector in turbojpeg decording backend + # test frame selector in turbojpeg decoding backend # when start_index = 0 inputs = copy.deepcopy(self.frame_results) inputs['frame_inds'] = np.arange(0, self.total_frames, 5) @@ -423,7 +423,7 @@ def test_rawframe_decode(self): 320, 3) assert results['original_shape'] == (240, 320) - # test frame selector in turbojpeg decording backend + # test frame selector in turbojpeg decoding backend inputs = copy.deepcopy(self.frame_results) inputs['frame_inds'] = np.arange(1, self.total_frames, 5) frame_selector = RawFrameDecode( diff --git a/tests/test_runtime/test_eval_hook.py b/tests/test_runtime/test_eval_hook.py index b10798159b..3e2c323c13 100644 --- a/tests/test_runtime/test_eval_hook.py +++ b/tests/test_runtime/test_eval_hook.py @@ -211,7 +211,7 @@ def test_eval_hook(): assert osp.exists(ckpt_path) assert runner.meta['hook_msgs']['best_score'] == -3 - # Test the EvalHook when resume happend + # Test the EvalHook when resume happened data_loader = DataLoader(EvalDataset()) eval_hook = EvalHook(data_loader, save_best='acc') with tempfile.TemporaryDirectory() as tmpdir: diff --git a/tests/test_runtime/test_precise_bn.py b/tests/test_runtime/test_precise_bn.py index c4825bcca3..42d5fed7e6 100644 --- a/tests/test_runtime/test_precise_bn.py +++ b/tests/test_runtime/test_precise_bn.py @@ -123,7 +123,7 @@ def test_precise_bn(): model=model, batch_processor=None, optimizer=optimizer, logger=logger) with pytest.raises(AssertionError): - # num_iters should be no larget than total + # num_iters should be no larger than total # iters precise_bn_hook = PreciseBNHook(precise_bn_loader, num_iters=5) runner.register_hook(precise_bn_hook) diff --git a/tools/analysis/check_videos.py b/tools/analysis/check_videos.py index 323f1c3cb9..d2b4576193 100644 --- a/tools/analysis/check_videos.py +++ b/tools/analysis/check_videos.py @@ -127,7 +127,7 @@ def _do_check_videos(lock, dataset, output_file, idx): # prepare for checking if os.path.exists(args.output_file): - # remove exsiting output file + # remove existing output file os.remove(args.output_file) pool = Pool(args.num_processes) lock = Manager().Lock() diff --git a/tools/data/activitynet/tsn_feature_extraction.py b/tools/data/activitynet/tsn_feature_extraction.py index 525f44deee..c3d53f46e5 100644 --- a/tools/data/activitynet/tsn_feature_extraction.py +++ b/tools/data/activitynet/tsn_feature_extraction.py @@ -110,7 +110,7 @@ def main(): assert output_file.endswith('.pkl') length = int(length) - # prepare a psuedo sample + # prepare a pseudo sample tmpl = dict( frame_dir=frame_dir, total_frames=length, diff --git a/tools/data/hvu/download.py b/tools/data/hvu/download.py index fcdfaee86f..2ab18e8434 100644 --- a/tools/data/hvu/download.py +++ b/tools/data/hvu/download.py @@ -51,7 +51,7 @@ def download_clip(video_identifier, output_filename: str File path where the video will be stored. start_time: float - Indicates the begining time in seconds from where the video + Indicates the beginning time in seconds from where the video will be trimmed. end_time: float Indicates the ending time in seconds of the trimmed video. diff --git a/tools/data/kinetics/README.md b/tools/data/kinetics/README.md index 72c64d7bac..725190ee41 100644 --- a/tools/data/kinetics/README.md +++ b/tools/data/kinetics/README.md @@ -66,7 +66,7 @@ For better decoding speed, you can resize the original videos into smaller sized python ../resize_videos.py ../../../data/${DATASET}/videos_train/ ../../../data/${DATASET}/videos_train_256p_dense_cache --dense --level 2 ``` -You can also download from [Academic Torrents](https://academictorrents.com/) ([kinetics400](https://academictorrents.com/details/184d11318372f70018cf9a72ef867e2fb9ce1d26) & [kinetics700](https://academictorrents.com/details/49f203189fb69ae96fb40a6d0e129949e1dfec98) with short edge 256 pixels are avaiable) and [cvdfoundation/kinetics-dataset](https://github.com/cvdfoundation/kinetics-dataset) (Host by Common Visual Data Foundation and Kinetics400/Kinetics600/Kinetics-700-2020 are available) +You can also download from [Academic Torrents](https://academictorrents.com/) ([kinetics400](https://academictorrents.com/details/184d11318372f70018cf9a72ef867e2fb9ce1d26) & [kinetics700](https://academictorrents.com/details/49f203189fb69ae96fb40a6d0e129949e1dfec98) with short edge 256 pixels are available) and [cvdfoundation/kinetics-dataset](https://github.com/cvdfoundation/kinetics-dataset) (Host by Common Visual Data Foundation and Kinetics400/Kinetics600/Kinetics-700-2020 are available) ## Step 3. Extract RGB and Flow diff --git a/tools/data/omnisource/README.md b/tools/data/omnisource/README.md index 08c4c1475e..ef3ea7e442 100644 --- a/tools/data/omnisource/README.md +++ b/tools/data/omnisource/README.md @@ -33,9 +33,9 @@ OmniSource/ ├── annotations │ ├── googleimage_200 │ │ ├── googleimage_200.txt File list of all valid images crawled from Google. -│ │ ├── tsn_8seg_googleimage_200_duplicate.txt Postive file list of images crawled from Google, which is similar to a validation example. -│ │ ├── tsn_8seg_googleimage_200.txt Postive file list of images crawled from Google, filtered by the teacher model. -│ │ └── tsn_8seg_googleimage_200_wodup.txt Postive file list of images crawled from Google, filtered by the teacher model, after de-duplication. +│ │ ├── tsn_8seg_googleimage_200_duplicate.txt Positive file list of images crawled from Google, which is similar to a validation example. +│ │ ├── tsn_8seg_googleimage_200.txt Positive file list of images crawled from Google, filtered by the teacher model. +│ │ └── tsn_8seg_googleimage_200_wodup.txt Positive file list of images crawled from Google, filtered by the teacher model, after de-duplication. │ ├── insimage_200 │ │ ├── insimage_200.txt │ │ ├── tsn_8seg_insimage_200_duplicate.txt @@ -89,7 +89,7 @@ The data should be placed in `data/OmniSource/`. When data preparation finished, data/OmniSource/ ├── annotations │ ├── googleimage_200 -│ │ └── tsn_8seg_googleimage_200_wodup.txt Postive file list of images crawled from Google, filtered by the teacher model, after de-duplication. +│ │ └── tsn_8seg_googleimage_200_wodup.txt Positive file list of images crawled from Google, filtered by the teacher model, after de-duplication. │ ├── insimage_200 │ │ └── tsn_8seg_insimage_200_wodup.txt │ ├── insvideo_200 diff --git a/tools/data/omnisource/README_zh-CN.md b/tools/data/omnisource/README_zh-CN.md index ed85b7c557..90aea5f4d1 100644 --- a/tools/data/omnisource/README_zh-CN.md +++ b/tools/data/omnisource/README_zh-CN.md @@ -88,7 +88,7 @@ OmniSource/ data/OmniSource/ ├── annotations │ ├── googleimage_200 -│ │ └── tsn_8seg_googleimage_200_wodup.txt Postive file list of images crawled from Google, filtered by the teacher model, after de-duplication. +│ │ └── tsn_8seg_googleimage_200_wodup.txt Positive file list of images crawled from Google, filtered by the teacher model, after de-duplication. │ ├── insimage_200 │ │ └── tsn_8seg_insimage_200_wodup.txt │ ├── insvideo_200 diff --git a/tools/deployment/pytorch2onnx.py b/tools/deployment/pytorch2onnx.py index 8852c765d4..178d0e63eb 100644 --- a/tools/deployment/pytorch2onnx.py +++ b/tools/deployment/pytorch2onnx.py @@ -160,7 +160,7 @@ def parse_args(): checkpoint = load_checkpoint(model, args.checkpoint, map_location='cpu') - # conver model to onnx file + # convert model to onnx file pytorch2onnx( model, args.shape, diff --git a/tools/train.py b/tools/train.py index dcbda35c6c..705473665e 100644 --- a/tools/train.py +++ b/tools/train.py @@ -168,7 +168,7 @@ def main(): datasets = [build_dataset(cfg.data.train)] if len(cfg.workflow) == 2: - # For simplicity, omnisource is not compatiable with val workflow, + # For simplicity, omnisource is not compatible with val workflow, # we recommend you to use `--validate` assert not cfg.omnisource if args.validate: From 7d24eb30eb87bb8426fe7259159d8ffa609fccc1 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 8 Oct 2021 00:10:27 +0800 Subject: [PATCH 272/414] [Doc] Update ChangeLog (#1210) * update * update --- docs/changelog.md | 44 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/docs/changelog.md b/docs/changelog.md index f37e2c379a..4f09057489 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,49 @@ ## Changelog +### 0.19.0 (07/10/2021) + +**Highlights** + +- Support ST-GCN +- Refactor the inference API +- Add code spell check hook + +**New Features** + +- Support ST-GCN ([#1123](https://github.com/open-mmlab/mmaction2/pull/1123)) + +**Improvement** + +- Add label maps for every dataset ([#1127](https://github.com/open-mmlab/mmaction2/pull/1127)) +- Remove useless code MultiGroupCrop ([#1180](https://github.com/open-mmlab/mmaction2/pull/1180)) +- Refactor Inference API ([#1191](https://github.com/open-mmlab/mmaction2/pull/1191)) +- Add code spell check hook ([#1208](https://github.com/open-mmlab/mmaction2/pull/1208)) +- Use docker in CI ([#1159](https://github.com/open-mmlab/mmaction2/pull/1159)) + +**Documentations** + +- Update metafiles to new OpenMMLAB protocols ([#1134](https://github.com/open-mmlab/mmaction2/pull/1134)) +- Switch to new doc style ([#1160](https://github.com/open-mmlab/mmaction2/pull/1160)) +- Improve the ERROR message ([#1203](https://github.com/open-mmlab/mmaction2/pull/1203)) +- Fix invalid URL in getting_started ([#1169](https://github.com/open-mmlab/mmaction2/pull/1169)) + +**Bug and Typo Fixes** + +- Compatible with new MMClassification ([#1139](https://github.com/open-mmlab/mmaction2/pull/1139)) +- Add missing runtime dependencies ([#1144](https://github.com/open-mmlab/mmaction2/pull/1144)) +- Fix THUMOS tag proposals path ([#1156](https://github.com/open-mmlab/mmaction2/pull/1156)) +- Fix LoadHVULabel ([#1194](https://github.com/open-mmlab/mmaction2/pull/1194)) +- Switch the default value of `persistent_workers` to False ([#1202](https://github.com/open-mmlab/mmaction2/pull/1202)) +- Fix `_freeze_stages` for MobileNetV2 ([#1193](https://github.com/open-mmlab/mmaction2/pull/1193)) +- Fix resume when building rawframes ([#1150](https://github.com/open-mmlab/mmaction2/pull/1150)) +- Fix device bug for class weight ([#1188](https://github.com/open-mmlab/mmaction2/pull/1188)) +- Correct Arg names in extract_audio.py ([#1148](https://github.com/open-mmlab/mmaction2/pull/1148)) + +**ModelZoo** + +- Add TSM-MobileNetV2 ported from TSM ([#1163](https://github.com/open-mmlab/mmaction2/pull/1163)) +- Add ST-GCN for NTURGB+D-XSub-60 ([#1123](https://github.com/open-mmlab/mmaction2/pull/1123)) + ### 0.18.0 (02/09/2021) **Improvement** From c2725e7a41515ef36d5dd170e337153dac863101 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 8 Oct 2021 00:12:53 +0800 Subject: [PATCH 273/414] Bump Version to 0.19.0 (#1211) --- README.md | 2 +- README_zh-CN.md | 2 +- mmaction/version.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 050bfa1473..7b1549c7a5 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ The master branch works with **PyTorch 1.3+**. ## Changelog -v0.18.0 was released in 02/09/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +v0.19.0 was released in 07/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Benchmark diff --git a/README_zh-CN.md b/README_zh-CN.md index fa732f9cfd..9ec7fa2a86 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -53,7 +53,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 -v0.18.0 版本已于 2021 年 9 月 2 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.19.0 版本已于 2021 年 10 月 7 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 基准测试 diff --git a/mmaction/version.py b/mmaction/version.py index bd2fd41acf..ad1d1efcfb 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.18.0' +__version__ = '0.19.0' def parse_version_info(version_str): From d8f55cc72cff68fffc828e3638237972892cf641 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sun, 10 Oct 2021 17:18:45 +0800 Subject: [PATCH 274/414] [Doc] Revise README and add projects.md (#1214) * Add projects.md * remove benchmark table * fix trn conf * fix timesformer position * add projects in README * cn * revise README * minor * adjust seq * CN * title * no CN Co-authored-by: Haodong Duan --- README.md | 343 ++++++++++++++++++++-------------------- README_zh-CN.md | 308 +++++++++++++++++++----------------- docs/benchmark.md | 4 +- docs/projects.md | 23 +++ docs_zh_CN/benchmark.md | 4 +- 5 files changed, 355 insertions(+), 327 deletions(-) create mode 100644 docs/projects.md diff --git a/README.md b/README.md index 7b1549c7a5..4131fcd596 100644 --- a/README.md +++ b/README.md @@ -20,213 +20,208 @@ It is a part of the [OpenMMLab](http://openmmlab.org/) project. The master branch works with **PyTorch 1.3+**.
    -
    - Action Recognition Results on Kinetics-400 +
    +
    +

    Action Recognition Results on Kinetics-400

    +
    +
    +
    +

    Skeleton-base Action Recognition Results on NTU-RGB+D-120

    +

    - Spatio-Temporal Action Detection Results on AVA-2.1 +

    Spatio-Temporal Action Detection Results on AVA-2.1

    -
    -
    - Skeleton-base Action Recognition Results on NTU-RGB+D-120 -
    - -### Major Features - -- **Modular design** - - We decompose the video understanding framework into different components and one can easily construct a customized - video understanding framework by combining different modules. - -- **Support for various datasets** - - The toolbox directly supports multiple datasets, UCF101, Kinetics-[400/600/700], Something-Something V1&V2, Moments in Time, Multi-Moments in Time, THUMOS14, etc. - -- **Support for multiple video understanding frameworks** - - MMAction2 implements popular frameworks for video understanding: - - - For action recognition, various algorithms are implemented, including TSN, TSM, TIN, R(2+1)D, I3D, SlowOnly, SlowFast, CSN, Non-local, etc. - - - For temporal action localization, we implement BSN, BMN, SSN. - - - For spatial temporal detection, we implement SlowOnly, SlowFast. - -- **Well tested and documented** - - We provide detailed documentation and API reference, as well as unittests. - -## Changelog - -v0.19.0 was released in 07/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. - -## Benchmark -| Model |input| io backend | batch size x gpus | MMAction2 (s/iter) | MMAction (s/iter) | Temporal-Shift-Module (s/iter) | PySlowFast (s/iter) | -| :--- | :---------------:|:---------------:| :---------------:| :---------------: | :--------------------: | :----------------------------: | :-----------------: | -| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p rawframes |Memcached| 32x8|**[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_rawframes_memcahed_32x8.zip)** | [0.38](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/tsn_256p_rawframes_memcached_32x8.zip)| [0.42](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsn_256p_rawframes_memcached_32x8.zip)| x | -| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**| x | x | TODO | -|[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p videos|Disk |8x8| **[0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_videos_disk_8x8.zip)** | x | x | [0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_video.log) | -| [I3D](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py)|256p rawframes|Memcached|8x8| **[0.43](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_256p_rawframes_memcahed_8x8.zip)** | [0.56](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/i3d_256p_rawframes_memcached_8x8.zip) | x | x | -| [TSM](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |256p rawframes|Memcached| 8x8|**[0.31](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsm_256p_rawframes_memcahed_8x8.zip)** | x | [0.41](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsm_256p_rawframes_memcached_8x8.zip) | x | -| [Slowonly](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p videos|Disk|8x8 | **[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowonly_256p_videos_disk_8x8.zip)** | TODO | x | [0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowonly_r50_4x16_video.log) | -| [Slowfast](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p videos|Disk|8x8 | **[0.69](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowfast_256p_videos_disk_8x8.zip)** | x | x | [1.04](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowfast_r50_4x16_video.log) | -| [R(2+1)D](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py)|256p videos |Disk| 8x8|**[0.45](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/r2plus1d_256p_videos_disk_8x8.zip)** | x | x | x | +## Major Features -Details can be found in [benchmark](docs/benchmark.md). +- **Modular design**: We decompose a video understanding framework into different components. One can easily construct a customized video understanding framework by combining different modules. -## ModelZoo +- **Support four major video understanding tasks**: MMAction2 implements various algorithms for multiple video understanding tasks, including action recognition, action localization, spatio-temporal action detection, and skeleton-based action detection. We support **27** different algorithms and **20** different datasets for the four major tasks. -Supported methods for Action Recognition: +- **Well tested and documented**: We provide detailed documentation and API reference, as well as unit tests. -
    -(click to collapse) +## News -- ✅ [TSN](configs/recognition/tsn/README.md) (ECCV'2016) -- ✅ [TSM](configs/recognition/tsm/README.md) (ICCV'2019) -- ✅ [TSM Non-Local](configs/recognition/tsm/README.md) (ICCV'2019) -- ✅ [R(2+1)D](configs/recognition/r2plus1d/README.md) (CVPR'2018) -- ✅ [I3D](configs/recognition/i3d/README.md) (CVPR'2017) -- ✅ [I3D Non-Local](configs/recognition/i3d/README.md) (CVPR'2018) -- ✅ [SlowOnly](configs/recognition/slowonly/README.md) (ICCV'2019) -- ✅ [SlowFast](configs/recognition/slowfast/README.md) (ICCV'2019) -- ✅ [CSN](configs/recognition/csn/README.md) (ICCV'2019) -- ✅ [TIN](configs/recognition/tin/README.md) (AAAI'2020) -- ✅ [TPN](configs/recognition/tpn/README.md) (CVPR'2020) -- ✅ [C3D](configs/recognition/c3d/README.md) (CVPR'2014) -- ✅ [X3D](configs/recognition/x3d/README.md) (CVPR'2020) -- ✅ [OmniSource](configs/recognition/omnisource/README.md) (ECCV'2020) -- ✅ [MultiModality: Audio](configs/recognition_audio/resnet/README.md) (ArXiv'2020) -- ✅ [TANet](configs/recognition/tanet/README.md) (ArXiv'2020) -- ✅ [TRN](configs/recognition/trn/README.md) (CVPR'2015) -- ✅ [Timesformer](configs/recognition/timesformer/README.md) (ICML'2021) +- (2021-09-11) We support ST-GCN, a well-known GCN-based approach for skeleton-based action recognition! -
    +**Release**: v0.19.0 was released in 07/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. -Supported methods for Temporal Action Detection: - -
    -(click to collapse) - -- ✅ [BSN](configs/localization/bsn/README.md) (ECCV'2018) -- ✅ [BMN](configs/localization/bmn/README.md) (ICCV'2019) -- ✅ [SSN](configs/localization/ssn/README.md) (ICCV'2017) - -
    - -Supported methods for Spatial Temporal Action Detection: - -
    -(click to collapse) +## Installation -- ✅ [ACRN](configs/detection/acrn/README.md) (ECCV'2018) -- ✅ [SlowOnly+Fast R-CNN](configs/detection/ava/README.md) (ICCV'2019) -- ✅ [SlowFast+Fast R-CNN](configs/detection/ava/README.md) (ICCV'2019) -- ✅ [Long-Term Feature Bank](configs/detection/lfb/README.md) (CVPR'2019) +Please refer to [install.md](docs/install.md) for installation. -
    +## Get Started -Supported methods for Skeleton-based Action Recognition: +Please see [getting_started.md](docs/getting_started.md) for the basic usage of MMAction2. +There are also tutorials: -
    -(click to collapse) +- [learn about configs](docs/tutorials/1_config.md) +- [finetuning models](docs/tutorials/2_finetune.md) +- [adding new dataset](docs/tutorials/3_new_dataset.md) +- [designing data pipeline](docs/tutorials/4_data_pipeline.md) +- [adding new modules](docs/tutorials/5_new_modules.md) +- [exporting model to onnx](docs/tutorials/6_export_model.md) +- [customizing runtime settings](docs/tutorials/7_customize_runtime.md) -- ✅ [PoseC3D](configs/skeleton/posec3d/README.md) (ArXiv'2021) -- ✅ [STGCN](configs/skeleton/stgcn/README.md) (AAAI'2018) +A Colab tutorial is also provided. You may preview the notebook [here](demo/mmaction2_tutorial.ipynb) or directly [run](https://colab.research.google.com/github/open-mmlab/mmaction2/blob/master/demo/mmaction2_tutorial.ipynb) on Colab. -
    +## Supported Methods + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Action Recognition
    C3D (CVPR'2014)TSN (ECCV'2016)I3D (CVPR'2017)I3D Non-Local (CVPR'2018)R(2+1)D (CVPR'2018)
    TRN (ECCV'2018)TSM (ICCV'2019)TSM Non-Local (ICCV'2019)SlowOnly (ICCV'2019)SlowFast (ICCV'2019)
    CSN (ICCV'2019)TIN (AAAI'2020)TPN (CVPR'2020)X3D (CVPR'2020)OmniSource (ECCV'2020)
    MultiModality: Audio (ArXiv'2020)TANet (ArXiv'2020)TimeSformer (ICML'2021)
    Action Localization
    SSN (ICCV'2017)BSN (ECCV'2018)BMN (ICCV'2019)
    Spatio-Temporal Action Detection
    ACRN (ECCV'2018)SlowOnly+Fast R-CNN (ICCV'2019)SlowFast+Fast R-CNN (ICCV'2019)LFB (CVPR'2019)
    Skeleton-based Action Recognition
    ST-GCN (AAAI'2018)PoseC3D (ArXiv'2021)
    Results and models are available in the *README.md* of each method's config directory. -A summary can be found in the [**model zoo**](https://mmaction2.readthedocs.io/en/latest/recognition_models.html) page. +A summary can be found on the [**model zoo**](https://mmaction2.readthedocs.io/en/latest/recognition_models.html) page. -We will keep up with the latest progress of the community, and support more popular algorithms and frameworks. +We will keep up with the latest progress of the community and support more popular algorithms and frameworks. If you have any feature requests, please feel free to leave a comment in [Issues](https://github.com/open-mmlab/mmaction2/issues/19). -## Dataset +## Supported Datasets + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Action Recognition
    HMDB51 (Homepage) (ICCV'2011)UCF101 (Homepage) (CRCV-IR-12-01)ActivityNet (Homepage) (CVPR'2015)Kinetics-[400/600/700] (Homepage) (CVPR'2017)
    SthV1 (Homepage) (ICCV'2017)SthV2 (Homepage) (ICCV'2017)Diving48 (Homepage) (ECCV'2018)Jester (Homepage) (ICCV'2019)
    Moments in Time (Homepage) (TPAMI'2019)Multi-Moments in Time (Homepage) (ArXiv'2019)HVU (Homepage) (ECCV'2020)OmniSource (Homepage) (ECCV'2020)
    FineGYM (Homepage) (CVPR'2020)
    Action Localization
    THUMOS14 (Homepage) (THUMOS Challenge 2014)ActivityNet (Homepage) (CVPR'2015)
    Spatio-Temporal Action Detection
    UCF101-24* (Homepage) (CRCV-IR-12-01)JHMDB* (Homepage) (ICCV'2015)AVA (Homepage) (CVPR'2018)
    Skeleton-based Action Recognition
    PoseC3D-FineGYM (Homepage) (ArXiv'2021)PoseC3D-NTURGB+D (Homepage) (ArXiv'2021)
    + +Datasets marked with * are not fully supported yet, but related dataset preparation steps are provided. A summary can be found on the [**Supported Datasets**](https://mmaction2.readthedocs.io/en/latest/supported_datasets.html) page. -Supported [datasets](https://mmaction2.readthedocs.io/en/latest/supported_datasets.html): - -Supported datasets for Action Recognition: - -
    -(click to collapse) - -- ✅ [UCF101](/tools/data/ucf101/README.md) \[ [Homepage](https://www.crcv.ucf.edu/research/data-sets/ucf101/) \] (CRCV-IR-12-01) -- ✅ [HMDB51](/tools/data/hmdb51/README.md) \[ [Homepage](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) \] (ICCV'2011) -- ✅ [Kinetics-[400/600/700]](/tools/data/kinetics/README.md) \[ [Homepage](https://deepmind.com/research/open-source/kinetics) \] (CVPR'2017) -- ✅ [Something-Something V1](/tools/data/sthv1/README.md) \[ [Homepage](https://20bn.com/datasets/something-something/v1) \] (ICCV'2017) -- ✅ [Something-Something V2](/tools/data/sthv2/README.md) \[ [Homepage](https://20bn.com/datasets/something-something) \] (ICCV'2017) -- ✅ [Moments in Time](/tools/data/mit/README.md) \[ [Homepage](http://moments.csail.mit.edu/) \] (TPAMI'2019) -- ✅ [Multi-Moments in Time](/tools/data/mmit/README.md) \[ [Homepage](http://moments.csail.mit.edu/challenge_iccv_2019.html) \] (ArXiv'2019) -- ✅ [HVU](/tools/data/hvu/README.md) \[ [Homepage](https://github.com/holistic-video-understanding/HVU-Dataset) \] (ECCV'2020) -- ✅ [Jester](/tools/data/jester/README.md) \[ [Homepage](https://20bn.com/datasets/jester/v1) \] (ICCV'2019) -- ✅ [GYM](/tools/data/gym/README.md) \[ [Homepage](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) -- ✅ [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] (CVPR'2015) -- ✅ [Diving48](/tools/data/diving48/README.md) \[ [Homepage](http://www.svcl.ucsd.edu/projects/resound/dataset.html) \] (ECCV'2018) -- ✅ [OmniSource](/tools/data/omnisource/README.md) \[ [Homepage](https://kennymckormick.github.io/omnisource/) \] (ECCV'2020) - -
    - -Supported datasets for Temporal Action Detection - -
    -(click to collapse) - -- ✅ [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] (CVPR'2015) -- ✅ [THUMOS14](/tools/data/thumos14/README.md) \[ [Homepage](https://www.crcv.ucf.edu/THUMOS14/download.html) \] (THUMOS Challenge 2014) - -
    - -Supported datasets for Spatial Temporal Action Detection - -
    -(click to collapse) - -- ✅ [AVA](/tools/data/ava/README.md) \[ [Homepage](https://research.google.com/ava/index.html) \] (CVPR'2018) -- 🔲 [UCF101-24](/tools/data/ucf101_24/README.md) \[ [Homepage](http://www.thumos.info/download.html) \] (CRCV-IR-12-01) -- 🔲 [JHMDB](/tools/data/jhmdb/README.md) \[ [Homepage](http://jhmdb.is.tue.mpg.de/) \] (ICCV'2013) - -
    - -Supported datasets for Skeleton-based Action Detection - -
    -(click to collapse) - -- ✅ [PoseC3D-FineGYM](/tools/data/skeleton/README.md) \[ [Homepage](https://kennymckormick.github.io/posec3d/) \] (arXiv'2021) - -
    - -Datasets marked with 🔲 are not fully supported yet, but related dataset preparation steps are provided. - -## Installation +## Benchmark -Please refer to [install.md](docs/install.md) for installation. +To demonstrate the efficacy and efficiency of our framework, we compare MMAction2 with some other popular frameworks and official releases in terms of speed. Details can be found in [benchmark](docs/benchmark.md). ## Data Preparation Please refer to [data_preparation.md](docs/data_preparation.md) for a general knowledge of data preparation. The supported datasets are listed in [supported_datasets.md](docs/supported_datasets.md) -## Get Started +## FAQ -Please see [getting_started.md](docs/getting_started.md) for the basic usage of MMAction2. -There are also tutorials: +Please refer to [FAQ](docs/faq.md) for frequently asked questions. -- [learn about configs](docs/tutorials/1_config.md) -- [finetuning models](docs/tutorials/2_finetune.md) -- [adding new dataset](docs/tutorials/3_new_dataset.md) -- [designing data pipeline](docs/tutorials/4_data_pipeline.md) -- [adding new modules](docs/tutorials/5_new_modules.md) -- [exporting model to onnx](docs/tutorials/6_export_model.md) -- [customizing runtime settings](docs/tutorials/7_customize_runtime.md) +## Projects built on MMAction2 -A Colab tutorial is also provided. You may preview the notebook [here](demo/mmaction2_tutorial.ipynb) or directly [run](https://colab.research.google.com/github/open-mmlab/mmaction2/blob/master/demo/mmaction2_tutorial.ipynb) on Colab. +Currently, there are many research works and projects built on MMAction2 by users from community, such as: -## FAQ +- Video Swin Transformer. [[paper]](https://arxiv.org/abs/2106.13230)[[github]](https://github.com/SwinTransformer/Video-Swin-Transformer) +- Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 **Oral**. [[paper]](https://arxiv.org/abs/2107.10161)[[github]](https://github.com/Cogito2012/DEAR) +- Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 **Oral**. [[paper]](https://arxiv.org/abs/2103.17263)[[github]](https://github.com/xvjiarui/VFS) -Please refer to [FAQ](docs/faq.md) for frequently asked questions. +etc., check [projects.md](docs/projects.md) to see all related projects. ## License @@ -251,9 +246,9 @@ We appreciate all contributions to improve MMAction2. Please refer to [CONTRIBUT ## Acknowledgement -MMAction2 is an open source project that is contributed by researchers and engineers from various colleges and companies. -We appreciate all the contributors who implement their methods or add new features, as well as users who give valuable feedbacks. -We wish that the toolbox and benchmark could serve the growing research community by providing a flexible toolkit to reimplement existing methods and develop their own new models. +MMAction2 is an open-source project that is contributed by researchers and engineers from various colleges and companies. +We appreciate all the contributors who implement their methods or add new features and users who give valuable feedback. +We wish that the toolbox and benchmark could serve the growing research community by providing a flexible toolkit to reimplement existing methods and develop their new models. ## Projects in OpenMMLab diff --git a/README_zh-CN.md b/README_zh-CN.md index 9ec7fa2a86..e828780625 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -19,194 +19,204 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa 主分支代码目前支持 **PyTorch 1.3 以上**的版本
    -
    - Kinetics-400 上的动作识别 +
    +
    +

    Kinetics-400 上的动作识别

    +
    +
    +
    +

    NTURGB+D-120 上的基于人体姿态的动作识别

    +

    - AVA-2.1 上的时空动作检测 -
    -
    -
    - NTURGB+D-120 上的基于人体姿态的动作识别 +

    AVA-2.1 上的时空动作检测

    ## 主要特性 -- **模块化设计** - MMAction2 将统一的视频理解框架解耦成不同的模块组件,通过组合不同的模块组件,用户可以便捷地构建自定义的视频理解模型 - -- **支持多样的数据集** - MMAction2 支持多种数据集的准备和构建,包括 UCF101, Kinetics-[400/600/700], Something-Something V1&V2, Moments in Time, Multi-Moments in Time, THUMOS14 等 - -- **支持多种视频理解任务** - MMAction2 支持多种主流的视频理解任务 +- **模块化设计**:MMAction2 将统一的视频理解框架解耦成不同的模块组件,通过组合不同的模块组件,用户可以便捷地构建自定义的视频理解模型 - - 动作识别:TSN, TSM, TIN, R(2+1)D, I3D, SlowOnly, SlowFast, CSN, Non-local 等 - - 时序动作检测:BSN, BMN, SSN - - 时空动作检测:SlowOnly + Fast-RCNN, SlowFast + Fast-RCNN +- **支持多种任务和数据集**:MMAction2 支持多种视频理解任务,包括动作识别,时序动作检测,时空动作检测以及基于人体姿态的动作识别,总共支持 **27** 种算法和 **20** 种数据集 - 具体可参考 [模型库](#模型库) - -- **详尽的单元测试和文档** - MMAction2 提供了详尽的说明文档,API 接口说明,全面的单元测试,以供社区参考 +- **详尽的单元测试和文档**:MMAction2 提供了详尽的说明文档,API 接口说明,全面的单元测试,以供社区参考 ## 更新记录 -v0.19.0 版本已于 2021 年 10 月 7 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 - -## 基准测试 - -| Model |input| io backend | batch size x gpus | MMAction2 (s/iter) | MMAction (s/iter) | Temporal-Shift-Module (s/iter) | PySlowFast (s/iter) | -| :--- | :---------------:|:---------------:| :---------------:| :---------------: | :--------------------: | :----------------------------: | :-----------------: | -| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p rawframes |Memcached| 32x8|**[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_rawframes_memcahed_32x8.zip)** | [0.38](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/tsn_256p_rawframes_memcached_32x8.zip)| [0.42](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsn_256p_rawframes_memcached_32x8.zip)| x | -| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**| x | x | TODO | -|[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p videos|Disk |8x8| **[0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_videos_disk_8x8.zip)** | x | x | [0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_video.log) | -| [I3D](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py)|256p rawframes|Memcached|8x8| **[0.43](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_256p_rawframes_memcahed_8x8.zip)** | [0.56](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/i3d_256p_rawframes_memcached_8x8.zip) | x | x | -| [TSM](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |256p rawframes|Memcached| 8x8|**[0.31](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsm_256p_rawframes_memcahed_8x8.zip)** | x | [0.41](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsm_256p_rawframes_memcached_8x8.zip) | x | -| [Slowonly](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p videos|Disk|8x8 | **[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowonly_256p_videos_disk_8x8.zip)** | TODO | x | [0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowonly_r50_4x16_video.log) | -| [Slowfast](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p videos|Disk|8x8 | **[0.69](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowfast_256p_videos_disk_8x8.zip)** | x | x | [1.04](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowfast_r50_4x16_video.log) | -| [R(2+1)D](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py)|256p videos |Disk| 8x8|**[0.45](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/r2plus1d_256p_videos_disk_8x8.zip)** | x | x | x | - -更多详情可见 [基准测试](/docs_zh_CN/benchmark.md) - -## 模型库 - -支持的行为识别方法: - -
    -(点击收起) - -- ✅ [TSN](/configs/recognition/tsn/README_zh-CN.md) (ECCV'2016) -- ✅ [TSM](/configs/recognition/tsm/README_zh-CN.md) (ICCV'2019) -- ✅ [TSM Non-Local](/configs/recognition/tsm/README_zh-CN.md) (ICCV'2019) -- ✅ [R(2+1)D](/configs/recognition/r2plus1d/README_zh-CN.md) (CVPR'2018) -- ✅ [I3D](/configs/recognition/i3d/README_zh-CN.md) (CVPR'2017) -- ✅ [I3D Non-Local](/configs/recognition/i3d/README_zh-CN.md) (CVPR'2018) -- ✅ [SlowOnly](/configs/recognition/slowonly/README_zh-CN.md) (ICCV'2019) -- ✅ [SlowFast](/configs/recognition/slowfast/README_zh-CN.md) (ICCV'2019) -- ✅ [CSN](/configs/recognition/csn/README_zh-CN.md) (ICCV'2019) -- ✅ [TIN](/configs/recognition/tin/README_zh-CN.md) (AAAI'2020) -- ✅ [TPN](/configs/recognition/tpn/README_zh-CN.md) (CVPR'2020) -- ✅ [C3D](/configs/recognition/c3d/README_zh-CN.md) (CVPR'2014) -- ✅ [X3D](/configs/recognition/x3d/README_zh-CN.md) (CVPR'2020) -- ✅ [OmniSource](/configs/recognition/omnisource/README_zh-CN.md) (ECCV'2020) -- ✅ [MultiModality: Audio](/configs/recognition_audio/resnet/README_zh-CN.md) (ArXiv'2020) -- ✅ [TANet](/configs/recognition/tanet/README_zh-CN.md) (ArXiv'2020) -- ✅ [TRN](/configs/recognition/trn/README_zh-CN.md) (CVPR'2015) -- ✅ [PoseC3D](configs/skeleton/posec3d/README.md) (ArXiv'2021) - -
    +- 2021-09-11 支持 ST-GCN,一种广泛使用的基于人体姿态与 GCN 的动作识别方法! -支持的时序动作检测方法: - -
    -(点击收起) - -- ✅ [BSN](/configs/localization/bsn/README_zh-CN.md) (ECCV'2018) -- ✅ [BMN](/configs/localization/bmn/README_zh-CN.md) (ICCV'2019) -- ✅ [SSN](/configs/localization/ssn/README_zh-CN.md) (ICCV'2017) - -
    +v0.19.0 版本已于 2021 年 10 月 7 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 -支持的时空检测方法: +## 安装 -
    -(点击收起) +请参考 [安装指南](/docs_zh_CN/install.md) 进行安装 -- ✅ [ACRN](configs/detection/acrn/README_zh-CN.md) (ECCV'2018) -- ✅ [SlowOnly+Fast R-CNN](/configs/detection/ava/README_zh-CN.md) (ICCV'2019) -- ✅ [SlowFast+Fast R-CNN](/configs/detection/ava/README_zh-CN.md) (ICCV'2019) -- ✅ [Long-Term Feature Bank](/configs/detection/lfb/README_zh-CN.md) (CVPR'2019) +## 教程 -
    +请参考 [基础教程](/docs_zh_CN/getting_started.md) 了解 MMAction2 的基本使用。MMAction2也提供了其他更详细的教程: -支持的基于骨骼点的动作识别方法: +- [如何编写配置文件](/docs_zh_CN/tutorials/1_config.md) +- [如何微调模型](/docs_zh_CN/tutorials/2_finetune.md) +- [如何增加新数据集](/docs_zh_CN/tutorials/3_new_dataset.md) +- [如何设计数据处理流程](/docs_zh_CN/tutorials/4_data_pipeline.md) +- [如何增加新模块](/docs_zh_CN/tutorials/5_new_modules.md) +- [如何导出模型为 onnx 格式](/docs_zh_CN/tutorials/6_export_model.md) +- [如何自定义模型运行参数](/docs_zh_CN/tutorials/7_customize_runtime.md) -
    -(点击收起) +MMAction2 也提供了相应的中文 Colab 教程,可以点击 [这里](https://colab.research.google.com/github/open-mmlab/mmaction2/blob/master/demo/mmaction2_tutorial_zh-CN.ipynb) 进行体验! -- ✅ [PoseC3D](configs/skeleton/posec3d/README.md) (ArXiv'2021) -- ✅ [STGCN](configs/skeleton/stgcn/README.md) (AAAI'2018) +## 模型库 -
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    行为识别方法
    C3D (CVPR'2014)TSN (ECCV'2016)I3D (CVPR'2017)I3D Non-Local (CVPR'2018)R(2+1)D (CVPR'2018)
    TRN (ECCV'2018)TSM (ICCV'2019)TSM Non-Local (ICCV'2019)SlowOnly (ICCV'2019)SlowFast (ICCV'2019)
    CSN (ICCV'2019)TIN (AAAI'2020)TPN (CVPR'2020)X3D (CVPR'2020)OmniSource (ECCV'2020)
    MultiModality: Audio (ArXiv'2020)TANet (ArXiv'2020)TimeSformer (ICML'2021)
    时序动作检测方法
    SSN (ICCV'2017)BSN (ECCV'2018)BMN (ICCV'2019)
    时空动作检测方法
    ACRN (ECCV'2018)SlowOnly+Fast R-CNN (ICCV'2019)SlowFast+Fast R-CNN (ICCV'2019)LFB (CVPR'2019)
    基于骨骼点的动作识别方法
    ST-GCN (AAAI'2018)PoseC3D (ArXiv'2021)
    各个模型的结果和设置都可以在对应的 config 目录下的 *README_zh-CN.md* 中查看。整体的概况也可也在 [**模型库**](https://mmaction2.readthedocs.io/zh_CN/latest/recognition_models.html) 页面中查看 -我们将跟进学界的最新进展,并支持更多算法和框架。如果您对 MMAction2 有任何功能需求,请随时在 [问题](https://github.com/open-mmlab/mmaction2/issues/19) 中留言。 +MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如果您对 MMAction2 有任何功能需求,请随时在 [问题](https://github.com/open-mmlab/mmaction2/issues/19) 中留言。 ## 数据集 -支持的 [数据集](https://mmaction2.readthedocs.io/zh_CN/latest/supported_datasets.html): - -支持的动作识别数据集: - -
    -(点击收起) - -- ✅ [UCF101](/tools/data/ucf101/README_zh-CN.md) \[ [主页](https://www.crcv.ucf.edu/research/data-sets/ucf101/) \] (CRCV-IR-12-01) -- ✅ [HMDB51](/tools/data/hmdb51/README_zh-CN.md) \[ [主页](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) \] (ICCV'2011) -- ✅ [Kinetics-[400/600/700]](/tools/data/kinetics/README_zh-CN.md) \[ [主页](https://deepmind.com/research/open-source/kinetics) \] (CVPR'2017) -- ✅ [Something-Something V1](/tools/data/sthv1/README_zh-CN.md) \[ [主页](https://20bn.com/datasets/something-something/v1) \] (ICCV'2017) -- ✅ [Something-Something V2](/tools/data/sthv2/README_zh-CN.md) \[ [主页](https://20bn.com/datasets/something-something) \] (ICCV'2017) -- ✅ [Moments in Time](/tools/data/mit/README_zh-CN.md) \[ [主页](http://moments.csail.mit.edu/) \] (TPAMI'2019) -- ✅ [Multi-Moments in Time](/tools/data/mmit/README_zh-CN.md) \[ [主页](http://moments.csail.mit.edu/challenge_iccv_2019.html) \] (ArXiv'2019) -- ✅ [HVU](/tools/data/hvu/README_zh-CN.md) \[ [主页](https://github.com/holistic-video-understanding/HVU-Dataset) \] (ECCV'2020) -- ✅ [Jester](/tools/data/jester/README_zh-CN.md) \[ [主页](https://20bn.com/datasets/jester/v1) \] (ICCV'2019) -- ✅ [GYM](/tools/data/gym/README_zh-CN.md) \[ [主页](https://sdolivia.github.io/FineGym/) \] (CVPR'2020) -- ✅ [ActivityNet](/tools/data/activitynet/README_zh-CN.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015) -- ✅ [Diving48](/tools/data/diving48/README_zh-CN.md) \[ [主页](http://www.svcl.ucsd.edu/projects/resound/dataset.html) \] (ECCV'2018) - -
    - -支持的时序动作检测数据集: - -
    -(点击收起) - -- ✅ [ActivityNet](/tools/data/activitynet/README_zh-CN.md) \[ [主页](http://activity-net.org/) \] (CVPR'2015) -- ✅ [THUMOS14](/tools/data/thumos14/README_zh-CN.md) \[ [主页](https://www.crcv.ucf.edu/THUMOS14/download.html) \] (THUMOS Challenge 2014) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    动作识别数据集
    HMDB51 (主页) (ICCV'2011)UCF101 (主页) (CRCV-IR-12-01)ActivityNet (主页) (CVPR'2015)Kinetics-[400/600/700] (主页) (CVPR'2017)
    SthV1 (主页) (ICCV'2017)SthV2 (主页) (ICCV'2017)Diving48 (主页) (ECCV'2018)Jester (主页) (ICCV'2019)
    Moments in Time (主页) (TPAMI'2019)Multi-Moments in Time (主页) (ArXiv'2019)HVU (主页) (ECCV'2020)OmniSource (主页) (ECCV'2020)
    FineGYM (主页) (CVPR'2020)
    时序动作检测数据集
    THUMOS14 (主页) (THUMOS Challenge 2014)ActivityNet (主页) (CVPR'2015)
    时空动作检测数据集
    UCF101-24* (主页) (CRCV-IR-12-01)JHMDB* (主页) (ICCV'2015)AVA (主页) (CVPR'2018)
    基于骨骼点的动作识别数据集
    PoseC3D-FineGYM (主页) (ArXiv'2021)PoseC3D-NTURGB+D (主页) (ArXiv'2021)
    + +标记 * 代表对应数据集并未被完全支持,但提供相应的数据准备步骤。整体的概况也可也在 [**数据集**](https://mmaction2.readthedocs.io/en/latest/supported_datasets.html) 页面中查看 -
    - -支持的时空动作检测数据集: - -
    -(点击收起) - -- ✅ [AVA](/tools/data/ava/README_zh-CN.md) \[ [主页](https://research.google.com/ava/index.html) \] (CVPR'2018) -- 🔲 [UCF101-24](/tools/data/ucf101_24/README_zh-CN.md) \[ [主页](http://www.thumos.info/download.html) \] (CRCV-IR-12-01) -- 🔲 [JHMDB](/tools/data/jhmdb/README_zh-CN.md) \[ [主页](http://jhmdb.is.tue.mpg.de/) \] (ICCV'2013) - -
    - -标记 🔲 代表对应数据集并未被完全支持,但提供相应的数据准备步骤。 +## 基准测试 -## 安装 - -请参考 [安装指南](/docs_zh_CN/install.md) 进行安装 +为了验证 MMAction2 框架的高精度和高效率,开发成员将其与当前其他主流框架进行速度对比。更多详情可见 [基准测试](/docs_zh_CN/benchmark.md) ## 数据集准备 请参考 [数据准备](/docs_zh_CN/data_preparation.md) 了解数据集准备概况。所有支持的数据集都列于 [数据集清单](/docs_zh_CN/supported_datasets.md) 中 -## 教程 +## 常见问题 -请参考 [基础教程](/docs_zh_CN/getting_started.md) 了解 MMAction2 的基本使用。MMAction2也提供了其他更详细的教程: +请参考 [FAQ](/docs_zh_CN/faq.md) 了解其他用户的常见问题 -- [如何编写配置文件](/docs_zh_CN/tutorials/1_config.md) -- [如何微调模型](/docs_zh_CN/tutorials/2_finetune.md) -- [如何增加新数据集](/docs_zh_CN/tutorials/3_new_dataset.md) -- [如何设计数据处理流程](/docs_zh_CN/tutorials/4_data_pipeline.md) -- [如何增加新模块](/docs_zh_CN/tutorials/5_new_modules.md) -- [如何导出模型为 onnx 格式](/docs_zh_CN/tutorials/6_export_model.md) -- [如何自定义模型运行参数](/docs_zh_CN/tutorials/7_customize_runtime.md) +## 相关工作 -MMAction2 也提供了相应的中文 Colab 教程,可以点击 [这里](https://colab.research.google.com/github/open-mmlab/mmaction2/blob/master/demo/mmaction2_tutorial_zh-CN.ipynb) 进行体验! +目前有许多研究工作或工程项目基于 MMAction2 搭建,例如: -## 常见问题 +- Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 **Oral**. [[论文]](https://arxiv.org/abs/2107.10161)[[代码]](https://github.com/Cogito2012/DEAR) +- Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 **Oral**. [[论文]](https://arxiv.org/abs/2103.17263)[[代码]](https://github.com/xvjiarui/VFS) +- Video Swin Transformer. [[论文]](https://arxiv.org/abs/2106.13230)[[代码]](https://github.com/SwinTransformer/Video-Swin-Transformer) -请参考 [FAQ](/docs_zh_CN/faq.md) 了解其他用户的常见问题 +更多详情可见 [相关工作](docs/projects.md) ## 许可 diff --git a/docs/benchmark.md b/docs/benchmark.md index ff40f1852e..68472f95b3 100644 --- a/docs/benchmark.md +++ b/docs/benchmark.md @@ -47,8 +47,8 @@ We provide the training log based on which we calculate the average iter time, w | Model |input| io backend | batch size x gpus | MMAction2 (s/iter) | GPU mem(GB) | MMAction (s/iter)| GPU mem(GB) | Temporal-Shift-Module (s/iter) | GPU mem(GB) | PySlowFast (s/iter)| GPU mem(GB) | | :--- | :---------------:|:---------------:| :---------------:| :---------------: | :--------------------: | :----------------------------: | :-----------------: |:-----------------: |:-----------------: |:-----------------: |:-----------------: | | [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p rawframes |Memcached| 32x8|**[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_rawframes_memcahed_32x8.zip)** | 8.1 |[0.38](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/tsn_256p_rawframes_memcached_32x8.zip)|8.1| [0.42](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsn_256p_rawframes_memcached_32x8.zip)|10.5 | x |x | -| [TSN](/configs/recognition/tsn/tsn_r50_video_1x1x3_100e_kinetics400_rgb.py)| 256p videos |Disk| 32x8|**[1.42](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_videos_disk_32x8.zip)** | 8.1 | x |x |x| x | TODO |TODO| -| [TSN](/configs/recognition/tsn/tsn_r50_video_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**|8.1 | x |x| x |x| TODO |TODO| +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p videos |Disk| 32x8|**[1.42](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_videos_disk_32x8.zip)** | 8.1 | x |x |x| x | TODO |TODO| +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**|8.1 | x |x| x |x| TODO |TODO| |[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p videos|Disk |8x8| **[0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_videos_disk_8x8.zip)** |4.6|x |x| x |x| [0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_video.log) |4.6| |[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p dense-encoded video|Disk |8x8| **[0.35](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_fast_videos_disk_8x8.zip)**| 4.6 | x | x | x | x | [0.36](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_fast_video.log) |4.6| | [I3D](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py)|256p rawframes|Memcached|8x8| **[0.43](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_256p_rawframes_memcahed_8x8.zip)**|5.0 | [0.56](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/i3d_256p_rawframes_memcached_8x8.zip)|5.0| x |x| x |x| diff --git a/docs/projects.md b/docs/projects.md new file mode 100644 index 0000000000..031f116fb0 --- /dev/null +++ b/docs/projects.md @@ -0,0 +1,23 @@ +# Projects based on MMAction2 + +There are many research works and projects built on MMAction2. +We list some of them as examples of how to extend MMAction2 for your own projects. +As the page might not be completed, please feel free to create a PR to update this page. + +## Projects as an extension + +- [OTEAction2](https://github.com/openvinotoolkit/mmaction2): OpenVINO Training Extensions for Action Recognition. + +## Projects of papers + +There are also projects released with papers. +Some of the papers are published in top-tier conferences (CVPR, ICCV, and ECCV), the others are also highly influential. +To make this list also a reference for the community to develop and compare new video understanding algorithms, we list them following the time order of top-tier conferences. +Methods already supported and maintained by MMAction2 are not listed. + +- Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 Oral. [[paper]](https://arxiv.org/abs/2107.10161)[[github]](https://github.com/Cogito2012/DEAR) +- Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 Oral. [[paper]](https://arxiv.org/abs/2103.17263)[[github]](https://github.com/xvjiarui/VFS) +- MGSampler: An Explainable Sampling Strategy for Video Action Recognition, ICCV 2021. [[paper]](https://arxiv.org/abs/2104.09952)[[github]](https://github.com/MCG-NJU/MGSampler) +- MultiSports: A Multi-Person Video Dataset of Spatio-Temporally Localized Sports Actions, ICCV 2021. [[paper]](https://arxiv.org/abs/2105.07404) +- Video Swin Transformer. [[paper]](https://arxiv.org/abs/2106.13230)[[github]](https://github.com/SwinTransformer/Video-Swin-Transformer) +- Long Short-Term Transformer for Online Action Detection. [[paper]](https://arxiv.org/abs/2107.03377) diff --git a/docs_zh_CN/benchmark.md b/docs_zh_CN/benchmark.md index 7d033d1eb9..7bd65cb169 100644 --- a/docs_zh_CN/benchmark.md +++ b/docs_zh_CN/benchmark.md @@ -44,8 +44,8 @@ | 模型 |输入| IO 后端 | 批大小 x GPU 数量 | MMAction2 (s/iter) | GPU 显存占用 (GB) | MMAction (s/iter)| GPU 显存占用 (GB) | Temporal-Shift-Module (s/iter) | GPU 显存占用 (GB) | PySlowFast (s/iter)| GPU 显存占用 (GB) | | :--- | :---------------:|:---------------:| :---------------:| :---------------: | :--------------------: | :----------------------------: | :-----------------: |:-----------------: |:-----------------: |:-----------------: |:-----------------: | | [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p rawframes |Memcached| 32x8|**[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_rawframes_memcahed_32x8.zip)** | 8.1 |[0.38](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/tsn_256p_rawframes_memcached_32x8.zip)|8.1| [0.42](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsn_256p_rawframes_memcached_32x8.zip)|10.5 | x |x | -| [TSN](/configs/recognition/tsn/tsn_r50_video_1x1x3_100e_kinetics400_rgb.py)| 256p videos |Disk| 32x8|**[1.42](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_videos_disk_32x8.zip)** | 8.1 | x |x |x| x | TODO |TODO| -| [TSN](/configs/recognition/tsn/tsn_r50_video_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**|8.1 | x |x| x |x| TODO |TODO| +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p videos |Disk| 32x8|**[1.42](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_videos_disk_32x8.zip)** | 8.1 | x |x |x| x | TODO |TODO| +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**|8.1 | x |x| x |x| TODO |TODO| |[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p videos|Disk |8x8| **[0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_videos_disk_8x8.zip)** |4.6|x |x| x |x| [0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_video.log) |4.6| |[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p dense-encoded video|Disk |8x8| **[0.35](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_fast_videos_disk_8x8.zip)**| 4.6 | x | x | x | x | [0.36](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_fast_video.log) |4.6| | [I3D](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py)|256p rawframes|Memcached|8x8| **[0.43](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_256p_rawframes_memcahed_8x8.zip)**|5.0 | [0.56](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/i3d_256p_rawframes_memcached_8x8.zip)|5.0| x |x| x |x| From 5561ac71ea74c362faaab6c8ecce8c4d3b5c05db Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Mon, 11 Oct 2021 19:32:14 +0800 Subject: [PATCH 275/414] [Fix] Fix Tutorial link (#1219) * fix tutorial link * fix tutorial link * fix --- docs/api.rst | 2 +- docs/conf.py | 6 +++--- docs_zh_CN/api.rst | 2 +- docs_zh_CN/conf.py | 5 +++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index a3c8bf0a8c..ecc9b810e9 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -16,7 +16,7 @@ evaluation .. automodule:: mmaction.core.evaluation :members: -lr +scheduler ^^ .. automodule:: mmaction.core.scheduler :members: diff --git a/docs/conf.py b/docs/conf.py index 5326309703..464852f08c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -74,18 +74,18 @@ def get_version(): html_theme_path = [pytorch_sphinx_theme.get_html_theme_path()] html_theme_options = { - # 'logo_url': 'https://mmocr.readthedocs.io/en/latest/', + # 'logo_url': 'https://mmaction2.readthedocs.io/en/latest/', 'menu': [ { 'name': 'Tutorial', 'url': 'https://colab.research.google.com/github/' - 'open-mmlab/mmocr/blob/main/demo/MMOCR_Tutorial.ipynb' + 'open-mmlab/mmaction2/blob/master/demo/mmaction2_tutorial.ipynb' }, { 'name': 'GitHub', - 'url': 'https://github.com/open-mmlab/mmocr' + 'url': 'https://github.com/open-mmlab/mmaction2' }, { 'name': diff --git a/docs_zh_CN/api.rst b/docs_zh_CN/api.rst index a3c8bf0a8c..ecc9b810e9 100644 --- a/docs_zh_CN/api.rst +++ b/docs_zh_CN/api.rst @@ -16,7 +16,7 @@ evaluation .. automodule:: mmaction.core.evaluation :members: -lr +scheduler ^^ .. automodule:: mmaction.core.scheduler :members: diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index f72e7afc52..22acdaabc0 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -76,11 +76,12 @@ def get_version(): '教程', 'url': 'https://colab.research.google.com/github/' - 'open-mmlab/mmocr/blob/main/demo/MMOCR_Tutorial.ipynb' + 'open-mmlab/mmaction2/blob/master/demo/' + 'mmaction2_tutorial_zh-CN.ipynb' }, { 'name': 'GitHub', - 'url': 'https://github.com/open-mmlab/mmocr' + 'url': 'https://github.com/open-mmlab/mmaction2' }, { 'name': From 6993693f178b1a59e5eb07f1a3db484d5e5de61a Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 12 Oct 2021 10:54:05 +0800 Subject: [PATCH 276/414] [Deployment] Support TorchServe (#1212) * tmp * support torchserve * update * fix * update dockerfile * update doc * add subtitles * fix docstring * optimize logic * update * update README --- README.md | 3 +- README_zh-CN.md | 3 +- docker/serve/Dockerfile | 51 ++ docker/serve/config.properties | 5 + docker/serve/entrypoint.sh | 12 + docs/useful_tools.md | 66 ++- mmaction/apis/inference.py | 3 - mmaction/datasets/pipelines/__init__.py | 20 +- mmaction/datasets/pipelines/loading.py | 47 ++ tools/data/gym/label_map.txt | 99 ++++ tools/data/gym/label_map_gym99.txt | 99 ---- tools/data/mit/label_map.txt | 678 +++++++++++------------ tools/data/mmit/label_map.txt | 626 ++++++++++----------- tools/data/skeleton/label_map_gym99.txt | 198 +++---- tools/data/skeleton/label_map_ntu120.txt | 240 ++++---- tools/deployment/mmaction2torchserve.py | 108 ++++ tools/deployment/mmaction_handler.py | 78 +++ 17 files changed, 1350 insertions(+), 986 deletions(-) create mode 100644 docker/serve/Dockerfile create mode 100644 docker/serve/config.properties create mode 100644 docker/serve/entrypoint.sh create mode 100644 tools/data/gym/label_map.txt delete mode 100644 tools/data/gym/label_map_gym99.txt create mode 100644 tools/deployment/mmaction2torchserve.py create mode 100644 tools/deployment/mmaction_handler.py diff --git a/README.md b/README.md index 4131fcd596..3b1483167c 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,8 @@ The master branch works with **PyTorch 1.3+**. ## News -- (2021-09-11) We support ST-GCN, a well-known GCN-based approach for skeleton-based action recognition! +- (2021-10-12) We support **TorchServe**! Now recognition models in MMAction2 can be packed as a `.mar` file and served with TorchServe. +- (2021-09-11) We support **ST-GCN**, a well-known GCN-based approach for skeleton-based action recognition! **Release**: v0.19.0 was released in 07/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. diff --git a/README_zh-CN.md b/README_zh-CN.md index e828780625..2e5b8f0ac2 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -43,7 +43,8 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 -- 2021-09-11 支持 ST-GCN,一种广泛使用的基于人体姿态与 GCN 的动作识别方法! +- (2021-10-12) 支持 **TorchServe**!目前可以使用 TorchServe 部署 MMAction2 中的动作识别模型。 +- (2021-09-11) 支持 **ST-GCN**,一种广泛使用的基于人体姿态与 GCN 的动作识别方法! v0.19.0 版本已于 2021 年 10 月 7 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 diff --git a/docker/serve/Dockerfile b/docker/serve/Dockerfile new file mode 100644 index 0000000000..e0004cfa58 --- /dev/null +++ b/docker/serve/Dockerfile @@ -0,0 +1,51 @@ +ARG PYTORCH="1.9.0" +ARG CUDA="10.2" +ARG CUDNN="7" +FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel + +ARG MMCV="1.3.8" +ARG MMACTION="0.19.0" + +ENV PYTHONUNBUFFERED TRUE + +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \ + ca-certificates \ + g++ \ + openjdk-11-jre-headless \ + # MMDET Requirements + ffmpeg libsm6 libxext6 git ninja-build libglib2.0-0 libsm6 libxrender-dev libxext6 \ + libsndfile1 libturbojpeg \ + && rm -rf /var/lib/apt/lists/* + +ENV PATH="/opt/conda/bin:$PATH" +RUN export FORCE_CUDA=1 + +# TORCHSEVER +RUN pip install torchserve torch-model-archiver + +# MMLAB +ARG PYTORCH +ARG CUDA +RUN ["/bin/bash", "-c", "pip install mmcv-full==${MMCV} -f https://download.openmmlab.com/mmcv/dist/cu${CUDA//./}/torch${PYTORCH}/index.html"] +# RUN pip install mmaction2==${MMACTION} +RUN pip install git+https://github.com/open-mmlab/mmaction2.git + +RUN useradd -m model-server \ + && mkdir -p /home/model-server/tmp + +COPY entrypoint.sh /usr/local/bin/entrypoint.sh + +RUN chmod +x /usr/local/bin/entrypoint.sh \ + && chown -R model-server /home/model-server + +COPY config.properties /home/model-server/config.properties +RUN mkdir /home/model-server/model-store && chown -R model-server /home/model-server/model-store + +EXPOSE 8080 8081 8082 + +USER model-server +WORKDIR /home/model-server +ENV TEMP=/home/model-server/tmp +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] +CMD ["serve"] diff --git a/docker/serve/config.properties b/docker/serve/config.properties new file mode 100644 index 0000000000..efb9c47e40 --- /dev/null +++ b/docker/serve/config.properties @@ -0,0 +1,5 @@ +inference_address=http://0.0.0.0:8080 +management_address=http://0.0.0.0:8081 +metrics_address=http://0.0.0.0:8082 +model_store=/home/model-server/model-store +load_models=all diff --git a/docker/serve/entrypoint.sh b/docker/serve/entrypoint.sh new file mode 100644 index 0000000000..41ba00b048 --- /dev/null +++ b/docker/serve/entrypoint.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e + +if [[ "$1" = "serve" ]]; then + shift 1 + torchserve --start --ts-config /home/model-server/config.properties +else + eval "$@" +fi + +# prevent docker exit +tail -f /dev/null diff --git a/docs/useful_tools.md b/docs/useful_tools.md index 77bb8c321c..2e76a9921f 100644 --- a/docs/useful_tools.md +++ b/docs/useful_tools.md @@ -4,14 +4,21 @@ Apart from training/testing scripts, We provide lots of useful tools under the ` +- [Useful Tools Link](#useful-tools-link) - [Log Analysis](#log-analysis) - [Model Complexity](#model-complexity) - [Model Conversion](#model-conversion) - - [MMAction2 model to ONNX (experimental)](#mmaction2-model-to-onnx--experimental-) + - [MMAction2 model to ONNX (experimental)](#mmaction2-model-to-onnx-experimental) - [Prepare a model for publishing](#prepare-a-model-for-publishing) +- [Model Serving](#model-serving) + - [1. Convert model from MMAction2 to TorchServe](#1-convert-model-from-mmaction2-to-torchserve) + - [2. Build `mmaction-serve` docker image](#2-build-mmaction-serve-docker-image) + - [3. Launch `mmaction-serve`](#3-launch-mmaction-serve) + - [4. Test deployment](#4-test-deployment) - [Miscellaneous](#miscellaneous) - [Evaluating a metric](#evaluating-a-metric) - [Print the entire config](#print-the-entire-config) + - [Check videos](#check-videos) @@ -136,6 +143,63 @@ python tools/deployment/publish_model.py work_dirs/tsn_r50_1x1x3_100e_kinetics40 The final output filename will be `tsn_r50_1x1x3_100e_kinetics400_rgb-{hash id}.pth`. +## Model Serving + +In order to serve an `MMAction2` model with [`TorchServe`](https://pytorch.org/serve/), you can follow the steps: + +### 1. Convert model from MMAction2 to TorchServe + +```shell +python tools/deployment/mmaction2torchserve.py ${CONFIG_FILE} ${CHECKPOINT_FILE} \ +--output_folder ${MODEL_STORE} \ +--model-name ${MODEL_NAME} \ +--label-file ${LABLE_FILE} + +``` + +### 2. Build `mmaction-serve` docker image + +```shell +DOCKER_BUILDKIT=1 docker build -t mmaction-serve:latest docker/serve/ +``` + +### 3. Launch `mmaction-serve` + +Check the official docs for [running TorchServe with docker](https://github.com/pytorch/serve/blob/master/docker/README.md#running-torchserve-in-a-production-docker-environment). + +Example: + +```shell +docker run --rm \ +--cpus 8 \ +--gpus device=0 \ +-p8080:8080 -p8081:8081 -p8082:8082 \ +--mount type=bind,source=$MODEL_STORE,target=/home/model-server/model-store \ +mmaction-serve:latest +``` + +**Note**: ${MODEL_STORE} needs to be an absolute path. +[Read the docs](https://github.com/pytorch/serve/blob/072f5d088cce9bb64b2a18af065886c9b01b317b/docs/rest_api.md) about the Inference (8080), Management (8081) and Metrics (8082) APis + +### 4. Test deployment + +```shell +# Assume you are under the directory `mmaction2` +curl http://127.0.0.1:8080/predictions/${MODEL_NAME} -T demo/demo.mp4 +``` + +You should obtain a response similar to: + +```json +{ + "arm wrestling": 1.0, + "rock scissors paper": 4.962051880497143e-10, + "shaking hands": 3.9761663406245873e-10, + "massaging feet": 1.1924419784925533e-10, + "stretching leg": 1.0601879096849842e-10 +} +``` + ## Miscellaneous ### Evaluating a metric diff --git a/mmaction/apis/inference.py b/mmaction/apis/inference.py index aad3b5070e..c9e82f9a22 100644 --- a/mmaction/apis/inference.py +++ b/mmaction/apis/inference.py @@ -85,9 +85,6 @@ def inference_recognizer(model, video, outputs=None, as_tensor=True, **kwargs): elif isinstance(video, np.ndarray): assert len(video.shape) == 4, 'The shape should be T x H x W x C' input_flag = 'array' - raise NotImplementedError(f'The input type {input_flag} is not ' - 'supported yet, this is an interface ' - 'reserved for torchserve. ') elif isinstance(video, str) and video.startswith('http'): input_flag = 'video' elif isinstance(video, str) and osp.exists(video): diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index d99fd7ce74..e3745644ac 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -8,15 +8,15 @@ from .formating import (Collect, FormatAudioShape, FormatGCNInput, FormatShape, ImageToTensor, Rename, ToDataContainer, ToTensor, Transpose) -from .loading import (AudioDecode, AudioDecodeInit, AudioFeatureSelector, - BuildPseudoClip, DecordDecode, DecordInit, - DenseSampleFrames, GenerateLocalizationLabels, - ImageDecode, LoadAudioFeature, LoadHVULabel, - LoadLocalizationFeature, LoadProposals, OpenCVDecode, - OpenCVInit, PIMSDecode, PIMSInit, PyAVDecode, - PyAVDecodeMotionVector, PyAVInit, RawFrameDecode, - SampleAVAFrames, SampleFrames, SampleProposalFrames, - UntrimmedSampleFrames) +from .loading import (ArrayDecode, AudioDecode, AudioDecodeInit, + AudioFeatureSelector, BuildPseudoClip, DecordDecode, + DecordInit, DenseSampleFrames, + GenerateLocalizationLabels, ImageDecode, + LoadAudioFeature, LoadHVULabel, LoadLocalizationFeature, + LoadProposals, OpenCVDecode, OpenCVInit, PIMSDecode, + PIMSInit, PyAVDecode, PyAVDecodeMotionVector, PyAVInit, + RawFrameDecode, SampleAVAFrames, SampleFrames, + SampleProposalFrames, UntrimmedSampleFrames) from .pose_loading import (GeneratePoseTarget, LoadKineticsPose, PaddingWithLoop, PoseDecode, PoseNormalize, UniformSampleFrames) @@ -37,5 +37,5 @@ 'PyAVDecodeMotionVector', 'Rename', 'Imgaug', 'UniformSampleFrames', 'PoseDecode', 'LoadKineticsPose', 'GeneratePoseTarget', 'PIMSInit', 'PIMSDecode', 'TorchvisionTrans', 'PytorchVideoTrans', 'PoseNormalize', - 'FormatGCNInput', 'PaddingWithLoop' + 'FormatGCNInput', 'PaddingWithLoop', 'ArrayDecode' ] diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index a8201bf7f7..d1da624e00 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -1338,6 +1338,53 @@ def __repr__(self): return repr_str +@PIPELINES.register_module() +class ArrayDecode: + """Load and decode frames with given indices from a 4D array. + + Required keys are "array and "frame_inds", added or modified keys are + "imgs", "img_shape" and "original_shape". + """ + + def __call__(self, results): + """Perform the ``RawFrameDecode`` to pick frames given indices. + + Args: + results (dict): The resulting dict to be modified and passed + to the next transform in pipeline. + """ + + modality = results['modality'] + array = results['array'] + + imgs = list() + + if results['frame_inds'].ndim != 1: + results['frame_inds'] = np.squeeze(results['frame_inds']) + + offset = results.get('offset', 0) + + for i, frame_idx in enumerate(results['frame_inds']): + + frame_idx += offset + if modality == 'RGB': + imgs.append(array[frame_idx]) + elif modality == 'Flow': + imgs.extend( + [array[frame_idx, ..., 0], array[frame_idx, ..., 1]]) + else: + raise NotImplementedError + + results['imgs'] = imgs + results['original_shape'] = imgs[0].shape[:2] + results['img_shape'] = imgs[0].shape[:2] + + return results + + def __repr__(self): + return f'{self.__class__.__name__}()' + + @PIPELINES.register_module() class ImageDecode: """Load and decode images. diff --git a/tools/data/gym/label_map.txt b/tools/data/gym/label_map.txt new file mode 100644 index 0000000000..daca3aa7f7 --- /dev/null +++ b/tools/data/gym/label_map.txt @@ -0,0 +1,99 @@ +(VT) round-off, flic-flac with 0.5 turn on, stretched salto forward with 0.5 turn off +(VT) round-off, flic-flac on, stretched salto backward with 2 turn off +(VT) round-off, flic-flac on, stretched salto backward with 1 turn off +(VT) round-off, flic-flac on, stretched salto backward with 1.5 turn off +(VT) round-off, flic-flac on, stretched salto backward with 2.5 turn off +(VT) round-off, flic-flac on, stretched salto backward off +(FX) switch leap with 0.5 turn +(FX) switch leap with 1 turn +(FX) split leap with 1 turn +(FX) split leap with 1.5 turn or more +(FX) switch leap (leap forward with leg change to cross split) +(FX) split jump with 1 turn +(FX) split jump (leg separation 180 degree parallel to the floor) +(FX) johnson with additional 0.5 turn +(FX) straddle pike or side split jump with 1 turn +(FX) switch leap to ring position +(FX) stag jump +(FX) 2 turn with free leg held upward in 180 split position throughout turn +(FX) 2 turn in tuck stand on one leg, free leg straight throughout turn +(FX) 3 turn on one leg, free leg optional below horizontal +(FX) 2 turn on one leg, free leg optional below horizontal +(FX) 1 turn on one leg, free leg optional below horizontal +(FX) 2 turn or more with heel of free leg forward at horizontal throughout turn +(FX) 1 turn with heel of free leg forward at horizontal throughout turn +(FX) arabian double salto tucked +(FX) salto forward tucked +(FX) aerial walkover forward +(FX) salto forward stretched with 2 twist +(FX) salto forward stretched with 1 twist +(FX) salto forward stretched with 1.5 twist +(FX) salto forward stretched, feet land together +(FX) double salto backward stretched +(FX) salto backward stretched with 3 twist +(FX) salto backward stretched with 2 twist +(FX) salto backward stretched with 2.5 twist +(FX) salto backward stretched with 1.5 twist +(FX) double salto backward tucked with 2 twist +(FX) double salto backward tucked with 1 twist +(FX) double salto backward tucked +(FX) double salto backward piked with 1 twist +(FX) double salto backward piked +(BB) sissone (leg separation 180 degree on the diagonal to the floor, take off two feet, land on one foot) +(BB) split jump with 0.5 turn in side position +(BB) split jump +(BB) straddle pike jump or side split jump +(BB) split ring jump (ring jump with front leg horizontal to the floor) +(BB) switch leap with 0.5 turn +(BB) switch leap (leap forward with leg change) +(BB) split leap forward +(BB) johnson (leap forward with leg change and 0.25 turn to side split or straddle pike position) +(BB) switch leap to ring position +(BB) sheep jump (jump with upper back arch and head release with feet to head height/closed Ring) +(BB) wolf hop or jump (hip angle at 45, knees together) +(BB) 1 turn with heel of free leg forward at horizontal throughout turn +(BB) 2 turn on one leg, free leg optional below horizontal +(BB) 1 turn on one leg, free leg optional below horizontal +(BB) 2 turn in tuck stand on one leg, free leg optional +(BB) salto backward tucked with 1 twist +(BB) salto backward tucked +(BB) salto backward stretched-step out (feet land successively) +(BB) salto backward stretched with legs together +(BB) salto sideward tucked, take off from one leg to side stand +(BB) free aerial cartwheel landing in cross position +(BB) salto forward tucked to cross stand +(BB) free aerial walkover forward, landing on one or both feet +(BB) jump backward, flic-flac take-off with 0.5 twist through handstand to walkover forward, also with support on one arm +(BB) flic-flac to land on both feet +(BB) flic-flac with step-out, also with support on one arm +(BB) round-off +(BB) double salto backward tucked +(BB) salto backward tucked +(BB) double salto backward piked +(BB) salto backward stretched with 2 twist +(BB) salto backward stretched with 2.5 twist +(UB) pike sole circle backward with 1 turn to handstand +(UB) pike sole circle backward with 0.5 turn to handstand +(UB) pike sole circle backward to handstand +(UB) giant circle backward with 1 turn to handstand +(UB) giant circle backward with 0.5 turn to handstand +(UB) giant circle backward +(UB) giant circle forward with 1 turn on one arm before handstand phase +(UB) giant circle forward with 0.5 turn to handstand +(UB) giant circle forward +(UB) clear hip circle backward to handstand +(UB) clear pike circle backward with 1 turn to handstand +(UB) clear pike circle backward with 0.5 turn to handstand +(UB) clear pike circle backward to handstand +(UB) stalder backward with 1 turn to handstand +(UB) stalder backward to handstand +(UB) counter straddle over high bar to hang +(UB) counter piked over high bar to hang +(UB) (swing backward or front support) salto forward straddled to hang on high bar +(UB) (swing backward) salto forward piked to hang on high bar +(UB) (swing forward or hip circle backward) salto backward with 0.5 turn piked to hang on high bar +(UB) transition flight from high bar to low bar +(UB) transition flight from low bar to high bar +(UB) (swing forward) double salto backward tucked with 1 turn +(UB) (swing backward) double salto forward tucked +(UB) (swing forward) double salto backward stretched diff --git a/tools/data/gym/label_map_gym99.txt b/tools/data/gym/label_map_gym99.txt deleted file mode 100644 index 8bcf084d5f..0000000000 --- a/tools/data/gym/label_map_gym99.txt +++ /dev/null @@ -1,99 +0,0 @@ -Clabel: 0; set: 1; Glabel: 1; (VT) round-off, flic-flac with 0.5 turn on, stretched salto forward with 0.5 turn off -Clabel: 1; set: 1; Glabel: 19; (VT) round-off, flic-flac on, stretched salto backward with 2 turn off -Clabel: 2; set: 1; Glabel: 20; (VT) round-off, flic-flac on, stretched salto backward with 1 turn off -Clabel: 3; set: 1; Glabel: 21; (VT) round-off, flic-flac on, stretched salto backward with 1.5 turn off -Clabel: 4; set: 1; Glabel: 23; (VT) round-off, flic-flac on, stretched salto backward with 2.5 turn off -Clabel: 5; set: 1; Glabel: 24; (VT) round-off, flic-flac on, stretched salto backward off -Clabel: 6; set: 21; Glabel: 67; (FX) switch leap with 0.5 turn -Clabel: 7; set: 21; Glabel: 68; (FX) switch leap with 1 turn -Clabel: 8; set: 21; Glabel: 72; (FX) split leap with 1 turn -Clabel: 9; set: 21; Glabel: 73; (FX) split leap with 1.5 turn or more -Clabel: 10; set: 21; Glabel: 74; (FX) switch leap (leap forward with leg change to cross split) -Clabel: 11; set: 21; Glabel: 77; (FX) split jump with 1 turn -Clabel: 12; set: 21; Glabel: 81; (FX) split jump (leg separation 180 degree parallel to the floor) -Clabel: 13; set: 21; Glabel: 83; (FX) johnson with additional 0.5 turn -Clabel: 14; set: 21; Glabel: 88; (FX) straddle pike or side split jump with 1 turn -Clabel: 15; set: 21; Glabel: 96; (FX) switch leap to ring position -Clabel: 16; set: 21; Glabel: 104; (FX) stag jump -Clabel: 17; set: 22; Glabel: 134; (FX) 2 turn with free leg held upward in 180 split position throughout turn -Clabel: 18; set: 22; Glabel: 137; (FX) 2 turn in tuck stand on one leg, free leg straight throughout turn -Clabel: 19; set: 22; Glabel: 146; (FX) 3 turn on one leg, free leg optional below horizontal -Clabel: 20; set: 22; Glabel: 147; (FX) 2 turn on one leg, free leg optional below horizontal -Clabel: 21; set: 22; Glabel: 148; (FX) 1 turn on one leg, free leg optional below horizontal -Clabel: 22; set: 22; Glabel: 149; (FX) 2 turn or more with heel of free leg forward at horizontal throughout turn -Clabel: 23; set: 22; Glabel: 150; (FX) 1 turn with heel of free leg forward at horizontal throughout turn -Clabel: 24; set: 24; Glabel: 156; (FX) arabian double salto tucked -Clabel: 25; set: 24; Glabel: 163; (FX) salto forward tucked -Clabel: 26; set: 24; Glabel: 169; (FX) aerial walkover forward -Clabel: 27; set: 24; Glabel: 171; (FX) salto forward stretched with 2 twist -Clabel: 28; set: 24; Glabel: 172; (FX) salto forward stretched with 1 twist -Clabel: 29; set: 24; Glabel: 174; (FX) salto forward stretched with 1.5 twist -Clabel: 30; set: 24; Glabel: 177; (FX) salto forward stretched, feet land together -Clabel: 31; set: 25; Glabel: 181; (FX) double salto backward stretched -Clabel: 32; set: 25; Glabel: 182; (FX) salto backward stretched with 3 twist -Clabel: 33; set: 25; Glabel: 183; (FX) salto backward stretched with 2 twist -Clabel: 34; set: 25; Glabel: 187; (FX) salto backward stretched with 2.5 twist -Clabel: 35; set: 25; Glabel: 188; (FX) salto backward stretched with 1.5 twist -Clabel: 36; set: 25; Glabel: 191; (FX) double salto backward tucked with 2 twist -Clabel: 37; set: 25; Glabel: 192; (FX) double salto backward tucked with 1 twist -Clabel: 38; set: 25; Glabel: 195; (FX) double salto backward tucked -Clabel: 39; set: 25; Glabel: 198; (FX) double salto backward piked with 1 twist -Clabel: 40; set: 25; Glabel: 199; (FX) double salto backward piked -Clabel: 41; set: 31; Glabel: 207; (BB) sissone (leg separation 180 degree on the diagonal to the floor, take off two feet, land on one foot) -Clabel: 42; set: 31; Glabel: 208; (BB) split jump with 0.5 turn in side position -Clabel: 43; set: 31; Glabel: 213; (BB) split jump -Clabel: 44; set: 31; Glabel: 219; (BB) straddle pike jump or side split jump -Clabel: 45; set: 31; Glabel: 222; (BB) split ring jump (ring jump with front leg horizontal to the floor) -Clabel: 46; set: 31; Glabel: 223; (BB) switch leap with 0.5 turn -Clabel: 47; set: 31; Glabel: 228; (BB) switch leap (leap forward with leg change) -Clabel: 48; set: 31; Glabel: 230; (BB) split leap forward -Clabel: 49; set: 31; Glabel: 232; (BB) johnson (leap forward with leg change and 0.25 turn to side split or straddle pike position) -Clabel: 50; set: 31; Glabel: 234; (BB) switch leap to ring position -Clabel: 51; set: 31; Glabel: 251; (BB) sheep jump (jump with upper back arch and head release with feet to head height/closed Ring) -Clabel: 52; set: 31; Glabel: 256; (BB) wolf hop or jump (hip angle at 45, knees together) -Clabel: 53; set: 32; Glabel: 268; (BB) 1 turn with heel of free leg forward at horizontal throughout turn -Clabel: 54; set: 32; Glabel: 270; (BB) 2 turn on one leg, free leg optional below horizontal -Clabel: 55; set: 32; Glabel: 272; (BB) 1 turn on one leg, free leg optional below horizontal -Clabel: 56; set: 32; Glabel: 279; (BB) 2 turn in tuck stand on one leg, free leg optional -Clabel: 57; set: 33; Glabel: 289; (BB) salto backward tucked with 1 twist -Clabel: 58; set: 33; Glabel: 290; (BB) salto backward tucked -Clabel: 59; set: 33; Glabel: 295; (BB) salto backward stretched-step out (feet land successively) -Clabel: 60; set: 33; Glabel: 297; (BB) salto backward stretched with legs together -Clabel: 61; set: 33; Glabel: 300; (BB) salto sideward tucked, take off from one leg to side stand -Clabel: 62; set: 33; Glabel: 306; (BB) free aerial cartwheel landing in cross position -Clabel: 63; set: 33; Glabel: 309; (BB) salto forward tucked to cross stand -Clabel: 64; set: 33; Glabel: 312; (BB) free aerial walkover forward, landing on one or both feet -Clabel: 65; set: 34; Glabel: 331; (BB) jump backward, flic-flac take-off with 0.5 twist through handstand to walkover forward, also with support on one arm -Clabel: 66; set: 34; Glabel: 334; (BB) flic-flac to land on both feet -Clabel: 67; set: 34; Glabel: 335; (BB) flic-flac with step-out, also with support on one arm -Clabel: 68; set: 34; Glabel: 336; (BB) round-off -Clabel: 69; set: 35; Glabel: 357; (BB) double salto backward tucked -Clabel: 70; set: 35; Glabel: 359; (BB) salto backward tucked -Clabel: 71; set: 35; Glabel: 363; (BB) double salto backward piked -Clabel: 72; set: 35; Glabel: 367; (BB) salto backward stretched with 2 twist -Clabel: 73; set: 35; Glabel: 370; (BB) salto backward stretched with 2.5 twist -Clabel: 74; set: 41; Glabel: 398; (UB) pike sole circle backward with 1 turn to handstand -Clabel: 75; set: 41; Glabel: 399; (UB) pike sole circle backward with 0.5 turn to handstand -Clabel: 76; set: 41; Glabel: 400; (UB) pike sole circle backward to handstand -Clabel: 77; set: 41; Glabel: 411; (UB) giant circle backward with 1 turn to handstand -Clabel: 78; set: 41; Glabel: 413; (UB) giant circle backward with 0.5 turn to handstand -Clabel: 79; set: 41; Glabel: 416; (UB) giant circle backward -Clabel: 80; set: 41; Glabel: 417; (UB) giant circle forward with 1 turn on one arm before handstand phase -Clabel: 81; set: 41; Glabel: 420; (UB) giant circle forward with 0.5 turn to handstand -Clabel: 82; set: 41; Glabel: 421; (UB) giant circle forward -Clabel: 83; set: 41; Glabel: 425; (UB) clear hip circle backward to handstand -Clabel: 84; set: 41; Glabel: 431; (UB) clear pike circle backward with 1 turn to handstand -Clabel: 85; set: 41; Glabel: 432; (UB) clear pike circle backward with 0.5 turn to handstand -Clabel: 86; set: 41; Glabel: 433; (UB) clear pike circle backward to handstand -Clabel: 87; set: 41; Glabel: 441; (UB) stalder backward with 1 turn to handstand -Clabel: 88; set: 41; Glabel: 443; (UB) stalder backward to handstand -Clabel: 89; set: 42; Glabel: 453; (UB) counter straddle over high bar to hang -Clabel: 90; set: 42; Glabel: 456; (UB) counter piked over high bar to hang -Clabel: 91; set: 42; Glabel: 462; (UB) (swing backward or front support) salto forward straddled to hang on high bar -Clabel: 92; set: 42; Glabel: 465; (UB) (swing backward) salto forward piked to hang on high bar -Clabel: 93; set: 42; Glabel: 466; (UB) (swing forward or hip circle backward) salto backward with 0.5 turn piked to hang on high bar -Clabel: 94; set: 43; Glabel: 471; (UB) transition flight from high bar to low bar -Clabel: 95; set: 43; Glabel: 472; (UB) transition flight from low bar to high bar -Clabel: 96; set: 44; Glabel: 481; (UB) (swing forward) double salto backward tucked with 1 turn -Clabel: 97; set: 44; Glabel: 484; (UB) (swing backward) double salto forward tucked -Clabel: 98; set: 44; Glabel: 516; (UB) (swing forward) double salto backward stretched diff --git a/tools/data/mit/label_map.txt b/tools/data/mit/label_map.txt index 0f7495ea5c..c1160edf2f 100644 --- a/tools/data/mit/label_map.txt +++ b/tools/data/mit/label_map.txt @@ -1,339 +1,339 @@ -clapping,0 -praying,1 -dropping,2 -burying,3 -covering,4 -flooding,5 -leaping,6 -drinking,7 -slapping,8 -cuddling,9 -sleeping,10 -preaching,11 -raining,12 -stitching,13 -spraying,14 -twisting,15 -coaching,16 -submerging,17 -breaking,18 -tuning,19 -boarding,20 -running,21 -destroying,22 -competing,23 -giggling,24 -shoveling,25 -chasing,26 -flicking,27 -pouring,28 -buttoning,29 -hammering,30 -carrying,31 -surfing,32 -pulling,33 -squatting,34 -aiming,35 -crouching,36 -tapping,37 -skipping,38 -washing,39 -winking,40 -queuing,41 -locking,42 -stopping,43 -sneezing,44 -flipping,45 -sewing,46 -clipping,47 -working,48 -rocking,49 -asking,50 -playing+fun,51 -camping,52 -plugging,53 -pedaling,54 -constructing,55 -slipping,56 -sweeping,57 -screwing,58 -shrugging,59 -hitchhiking,60 -cracking,61 -scratching,62 -trimming,63 -selling,64 -marching,65 -stirring,66 -kissing,67 -jumping,68 -starting,69 -clinging,70 -socializing,71 -picking,72 -splashing,73 -licking,74 -kicking,75 -sliding,76 -filming,77 -driving,78 -handwriting,79 -steering,80 -filling,81 -crashing,82 -stealing,83 -pressing,84 -shouting,85 -hiking,86 -vacuuming,87 -pointing,88 -giving,89 -diving,90 -hugging,91 -building,92 -swerving,93 -dining,94 -floating,95 -cheerleading,96 -leaning,97 -sailing,98 -singing,99 -playing,100 -hitting,101 -bubbling,102 -joining,103 -bathing,104 -raising,105 -sitting,106 -drawing,107 -protesting,108 -rinsing,109 -coughing,110 -smashing,111 -slicing,112 -balancing,113 -rafting,114 -kneeling,115 -dunking,116 -brushing,117 -crushing,118 -rubbing,119 -punting,120 -watering,121 -playing+music,122 -removing,123 -tearing,124 -imitating,125 -teaching,126 -cooking,127 -reaching,128 -studying,129 -serving,130 -bulldozing,131 -shaking,132 -discussing,133 -dragging,134 -gardening,135 -performing,136 -officiating,137 -photographing,138 -sowing,139 -dripping,140 -writing,141 -clawing,142 -bending,143 -boxing,144 -mopping,145 -gripping,146 -flowing,147 -digging,148 -tripping,149 -cheering,150 -buying,151 -bicycling,152 -feeding,153 -emptying,154 -unpacking,155 -sketching,156 -standing,157 -weeding,158 -stacking,159 -drying,160 -crying,161 -spinning,162 -frying,163 -cutting,164 -paying,165 -eating,166 -lecturing,167 -dancing,168 -adult+female+speaking,169 -boiling,170 -peeling,171 -wrapping,172 -wetting,173 -attacking,174 -welding,175 -putting,176 -swinging,177 -carving,178 -walking,179 -dressing,180 -inflating,181 -climbing,182 -shredding,183 -reading,184 -sanding,185 -frowning,186 -closing,187 -hunting,188 -clearing,189 -launching,190 -packaging,191 -fishing,192 -spilling,193 -leaking,194 -knitting,195 -boating,196 -sprinkling,197 -baptizing,198 -playing+sports,199 -rolling,200 -spitting,201 -dipping,202 -riding,203 -chopping,204 -extinguishing,205 -applauding,206 -calling,207 -talking,208 -adult+male+speaking,209 -snowing,210 -shaving,211 -marrying,212 -rising,213 -laughing,214 -crawling,215 -flying,216 -assembling,217 -injecting,218 -landing,219 -operating,220 -packing,221 -descending,222 -falling,223 -entering,224 -pushing,225 -sawing,226 -smelling,227 -overflowing,228 -fighting,229 -waking,230 -barbecuing,231 -skating,232 -painting,233 -drilling,234 -punching,235 -tying,236 -manicuring,237 -plunging,238 -grilling,239 -pitching,240 -towing,241 -telephoning,242 -crafting,243 -knocking,244 -playing+videogames,245 -storming,246 -placing,247 -turning,248 -barking,249 -child+singing,250 -opening,251 -waxing,252 -juggling,253 -mowing,254 -shooting,255 -sniffing,256 -interviewing,257 -stomping,258 -chewing,259 -arresting,260 -grooming,261 -rowing,262 -bowing,263 -gambling,264 -saluting,265 -fueling,266 -autographing,267 -throwing,268 -drenching,269 -waving,270 -signing,271 -repairing,272 -baking,273 -smoking,274 -skiing,275 -drumming,276 -child+speaking,277 -blowing,278 -cleaning,279 -combing,280 -spreading,281 -racing,282 -combusting,283 -adult+female+singing,284 -fencing,285 -swimming,286 -adult+male+singing,287 -snuggling,288 -shopping,289 -bouncing,290 -dusting,291 -stroking,292 -snapping,293 -biting,294 -roaring,295 -guarding,296 -unloading,297 -lifting,298 -instructing,299 -folding,300 -measuring,301 -whistling,302 -exiting,303 -stretching,304 -taping,305 -squinting,306 -catching,307 -draining,308 -massaging,309 -scrubbing,310 -handcuffing,311 -celebrating,312 -jogging,313 -colliding,314 -bowling,315 -resting,316 -blocking,317 -smiling,318 -tattooing,319 -erupting,320 -howling,321 -parading,322 -grinning,323 -sprinting,324 -hanging,325 -planting,326 -speaking,327 -ascending,328 -yawning,329 -cramming,330 -burning,331 -wrestling,332 -poking,333 -tickling,334 -exercising,335 -loading,336 -piloting,337 -typing,338 +clapping +praying +dropping +burying +covering +flooding +leaping +drinking +slapping +cuddling +sleeping +preaching +raining +stitching +spraying +twisting +coaching +submerging +breaking +tuning +boarding +running +destroying +competing +giggling +shoveling +chasing +flicking +pouring +buttoning +hammering +carrying +surfing +pulling +squatting +aiming +crouching +tapping +skipping +washing +winking +queuing +locking +stopping +sneezing +flipping +sewing +clipping +working +rocking +asking +playing+fun +camping +plugging +pedaling +constructing +slipping +sweeping +screwing +shrugging +hitchhiking +cracking +scratching +trimming +selling +marching +stirring +kissing +jumping +starting +clinging +socializing +picking +splashing +licking +kicking +sliding +filming +driving +handwriting +steering +filling +crashing +stealing +pressing +shouting +hiking +vacuuming +pointing +giving +diving +hugging +building +swerving +dining +floating +cheerleading +leaning +sailing +singing +playing +hitting +bubbling +joining +bathing +raising +sitting +drawing +protesting +rinsing +coughing +smashing +slicing +balancing +rafting +kneeling +dunking +brushing +crushing +rubbing +punting +watering +playing+music +removing +tearing +imitating +teaching +cooking +reaching +studying +serving +bulldozing +shaking +discussing +dragging +gardening +performing +officiating +photographing +sowing +dripping +writing +clawing +bending +boxing +mopping +gripping +flowing +digging +tripping +cheering +buying +bicycling +feeding +emptying +unpacking +sketching +standing +weeding +stacking +drying +crying +spinning +frying +cutting +paying +eating +lecturing +dancing +adult+female+speaking +boiling +peeling +wrapping +wetting +attacking +welding +putting +swinging +carving +walking +dressing +inflating +climbing +shredding +reading +sanding +frowning +closing +hunting +clearing +launching +packaging +fishing +spilling +leaking +knitting +boating +sprinkling +baptizing +playing+sports +rolling +spitting +dipping +riding +chopping +extinguishing +applauding +calling +talking +adult+male+speaking +snowing +shaving +marrying +rising +laughing +crawling +flying +assembling +injecting +landing +operating +packing +descending +falling +entering +pushing +sawing +smelling +overflowing +fighting +waking +barbecuing +skating +painting +drilling +punching +tying +manicuring +plunging +grilling +pitching +towing +telephoning +crafting +knocking +playing+videogames +storming +placing +turning +barking +child+singing +opening +waxing +juggling +mowing +shooting +sniffing +interviewing +stomping +chewing +arresting +grooming +rowing +bowing +gambling +saluting +fueling +autographing +throwing +drenching +waving +signing +repairing +baking +smoking +skiing +drumming +child+speaking +blowing +cleaning +combing +spreading +racing +combusting +adult+female+singing +fencing +swimming +adult+male+singing +snuggling +shopping +bouncing +dusting +stroking +snapping +biting +roaring +guarding +unloading +lifting +instructing +folding +measuring +whistling +exiting +stretching +taping +squinting +catching +draining +massaging +scrubbing +handcuffing +celebrating +jogging +colliding +bowling +resting +blocking +smiling +tattooing +erupting +howling +parading +grinning +sprinting +hanging +planting +speaking +ascending +yawning +cramming +burning +wrestling +poking +tickling +exercising +loading +piloting +typing diff --git a/tools/data/mmit/label_map.txt b/tools/data/mmit/label_map.txt index efa36158ec..ae89927a8b 100644 --- a/tools/data/mmit/label_map.txt +++ b/tools/data/mmit/label_map.txt @@ -1,313 +1,313 @@ -crafting,0 -paddling,1 -raining,2 -weightlifting,3 -clawing,4 -hitchhiking,5 -autographing,6 -cooking,7 -gripping,8 -swerving,9 -frowning,10 -giving,11 -tattooing,12 -dipping,13 -leaking,14 -plunging,15 -barking,16 -stroking/petting,17 -piloting,18 -camping,19 -towing,20 -loading,21 -parading,22 -submerging,23 -squeezing,24 -sculpting,25 -stomping,26 -punting,27 -kissing,28 -smoking,29 -pouring,30 -texting,31 -adult+male+speaking,32 -adult+female+speaking,33 -crying,34 -unpacking,35 -pointing,36 -boating,37 -landing,38 -ironing,39 -crouching,40 -slapping,41 -typing,42 -ice+skating,43 -boiling,44 -chopping,45 -bowling,46 -fighting/attacking,47 -tapping,48 -applauding,49 -driving,50 -sprinting,51 -slicing,52 -approaching,53 -waving,54 -dusting,55 -wrapping,56 -knocking,57 -snapping,58 -gardening,59 -combing,60 -tickling,61 -carving,62 -smashing,63 -smiling/grinning,64 -dressing,65 -pressing,66 -lecturing,67 -telephoning,68 -exercising,69 -riding,70 -draining,71 -flying,72 -wrestling,73 -boxing,74 -rinsing,75 -overflowing,76 -inflating,77 -picking,78 -sowing,79 -shaving,80 -baking,81 -shaking,82 -running,83 -throwing,84 -stacking/piling,85 -buttoning,86 -leaping,87 -fueling,88 -pitching,89 -child+speaking,90 -breaking/destroying,91 -lifting,92 -filming/photographing,93 -singing,94 -reading,95 -chewing,96 -operating,97 -bubbling,98 -waxing,99 -cleaning/washing,100 -scooping,101 -erasing,102 -steering,103 -playing+videogames,104 -crashing,105 -constructing/assembling,106 -flooding,107 -drinking,108 -praying,109 -shouting,110 -winking,111 -dining,112 -repairing,113 -tying,114 -juggling,115 -rolling,116 -studying,117 -marching,118 -socializing,119 -ascending/rising,120 -arresting,121 -cracking,122 -laying,123 -clinging,124 -frying,125 -vacuuming,126 -combusting/burning,127 -filling,128 -standing,129 -howling,130 -dunking,131 -spraying,132 -bandaging,133 -shivering,134 -slipping,135 -racing,136 -roaring,137 -planting,138 -yawning,139 -grilling,140 -squinting,141 -skiing,142 -taping,143 -trimming,144 -preaching,145 -resting,146 -descending/lowering,147 -clearing,148 -screwing,149 -chasing,150 -speaking,151 -manicuring,152 -tripping,153 -performing,154 -teaching/instructing,155 -blowing,156 -painting,157 -sneezing,158 -packaging,159 -punching,160 -clapping,161 -rotating/spinning,162 -skating,163 -cheerleading,164 -balancing,165 -child+singing,166 -covering,167 -snuggling/cuddling/hugging,168 -bulldozing,169 -jumping,170 -sliding,171 -barbecuing,172 -weeding,173 -swimming,174 -shooting,175 -dialing,176 -measuring,177 -pulling,178 -celebrating,179 -playing+fun,180 -knitting,181 -spreading,182 -erupting,183 -snowboarding,184 -swinging,185 -protesting,186 -sitting,187 -inserting,188 -bouncing,189 -surfing,190 -extinguishing,191 -unloading,192 -aiming,193 -bathing,194 -hammering,195 -fishing,196 -opening,197 -biting,198 -packing,199 -saluting,200 -rafting,201 -laughing,202 -bicycling,203 -rocking,204 -storming,205 -wetting,206 -shrugging,207 -handwriting,208 -gambling,209 -writing,210 -skipping,211 -dragging,212 -unplugging,213 -kicking,214 -sawing,215 -grooming,216 -whistling,217 -floating,218 -diving,219 -rubbing,220 -bending,221 -shoveling/digging,222 -peeling,223 -catching,224 -closing,225 -eating/feeding,226 -falling,227 -discussing,228 -sweeping,229 -massaging,230 -locking,231 -dancing,232 -mowing,233 -clipping,234 -hanging,235 -burying,236 -reaching,237 -kayaking,238 -snowing,239 -sleeping,240 -climbing,241 -flipping,242 -tearing/ripping,243 -folding,244 -signing,245 -cutting,246 -stretching,247 -stirring,248 -licking,249 -kneeling,250 -sewing,251 -dripping,252 -queuing,253 -pushing,254 -pedaling,255 -flossing,256 -buying/selling/shopping,257 -smelling/sniffing,258 -emptying,259 -sanding,260 -smacking,261 -carrying,262 -adult+male+singing,263 -poking,264 -brushing,265 -adult+female+singing,266 -scratching,267 -welding,268 -crawling,269 -skateboarding,270 -turning,271 -dropping,272 -hunting,273 -cheering,274 -drawing,275 -sprinkling,276 -spitting,277 -competing,278 -bowing,279 -hiking,280 -drying,281 -launching,282 -twisting,283 -crushing,284 -hitting/colliding,285 -shredding,286 -plugging,287 -gasping,288 -rowing,289 -calling,290 -drumming,291 -walking,292 -removing,293 -waking,294 -stitching,295 -coughing,296 -playing+music,297 -playing+sports,298 -interviewing,299 -scrubbing,300 -splashing,301 -officiating,302 -mopping,303 -flowing,304 -sailing,305 -drilling,306 -squatting,307 -handcuffing,308 -spilling,309 -marrying,310 -injecting,311 -jogging,312 +crafting +paddling +raining +weightlifting +clawing +hitchhiking +autographing +cooking +gripping +swerving +frowning +giving +tattooing +dipping +leaking +plunging +barking +stroking/petting +piloting +camping +towing +loading +parading +submerging +squeezing +sculpting +stomping +punting +kissing +smoking +pouring +texting +adult+male+speaking +adult+female+speaking +crying +unpacking +pointing +boating +landing +ironing +crouching +slapping +typing +ice+skating +boiling +chopping +bowling +fighting/attacking +tapping +applauding +driving +sprinting +slicing +approaching +waving +dusting +wrapping +knocking +snapping +gardening +combing +tickling +carving +smashing +smiling/grinning +dressing +pressing +lecturing +telephoning +exercising +riding +draining +flying +wrestling +boxing +rinsing +overflowing +inflating +picking +sowing +shaving +baking +shaking +running +throwing +stacking/piling +buttoning +leaping +fueling +pitching +child+speaking +breaking/destroying +lifting +filming/photographing +singing +reading +chewing +operating +bubbling +waxing +cleaning/washing +scooping +erasing +steering +playing+videogames +crashing +constructing/assembling +flooding +drinking +praying +shouting +winking +dining +repairing +tying +juggling +rolling +studying +marching +socializing +ascending/rising +arresting +cracking +laying +clinging +frying +vacuuming +combusting/burning +filling +standing +howling +dunking +spraying +bandaging +shivering +slipping +racing +roaring +planting +yawning +grilling +squinting +skiing +taping +trimming +preaching +resting +descending/lowering +clearing +screwing +chasing +speaking +manicuring +tripping +performing +teaching/instructing +blowing +painting +sneezing +packaging +punching +clapping +rotating/spinning +skating +cheerleading +balancing +child+singing +covering +snuggling/cuddling/hugging +bulldozing +jumping +sliding +barbecuing +weeding +swimming +shooting +dialing +measuring +pulling +celebrating +playing+fun +knitting +spreading +erupting +snowboarding +swinging +protesting +sitting +inserting +bouncing +surfing +extinguishing +unloading +aiming +bathing +hammering +fishing +opening +biting +packing +saluting +rafting +laughing +bicycling +rocking +storming +wetting +shrugging +handwriting +gambling +writing +skipping +dragging +unplugging +kicking +sawing +grooming +whistling +floating +diving +rubbing +bending +shoveling/digging +peeling +catching +closing +eating/feeding +falling +discussing +sweeping +massaging +locking +dancing +mowing +clipping +hanging +burying +reaching +kayaking +snowing +sleeping +climbing +flipping +tearing/ripping +folding +signing +cutting +stretching +stirring +licking +kneeling +sewing +dripping +queuing +pushing +pedaling +flossing +buying/selling/shopping +smelling/sniffing +emptying +sanding +smacking +carrying +adult+male+singing +poking +brushing +adult+female+singing +scratching +welding +crawling +skateboarding +turning +dropping +hunting +cheering +drawing +sprinkling +spitting +competing +bowing +hiking +drying +launching +twisting +crushing +hitting/colliding +shredding +plugging +gasping +rowing +calling +drumming +walking +removing +waking +stitching +coughing +playing+music +playing+sports +interviewing +scrubbing +splashing +officiating +mopping +flowing +sailing +drilling +squatting +handcuffing +spilling +marrying +injecting +jogging diff --git a/tools/data/skeleton/label_map_gym99.txt b/tools/data/skeleton/label_map_gym99.txt index 8bcf084d5f..daca3aa7f7 100644 --- a/tools/data/skeleton/label_map_gym99.txt +++ b/tools/data/skeleton/label_map_gym99.txt @@ -1,99 +1,99 @@ -Clabel: 0; set: 1; Glabel: 1; (VT) round-off, flic-flac with 0.5 turn on, stretched salto forward with 0.5 turn off -Clabel: 1; set: 1; Glabel: 19; (VT) round-off, flic-flac on, stretched salto backward with 2 turn off -Clabel: 2; set: 1; Glabel: 20; (VT) round-off, flic-flac on, stretched salto backward with 1 turn off -Clabel: 3; set: 1; Glabel: 21; (VT) round-off, flic-flac on, stretched salto backward with 1.5 turn off -Clabel: 4; set: 1; Glabel: 23; (VT) round-off, flic-flac on, stretched salto backward with 2.5 turn off -Clabel: 5; set: 1; Glabel: 24; (VT) round-off, flic-flac on, stretched salto backward off -Clabel: 6; set: 21; Glabel: 67; (FX) switch leap with 0.5 turn -Clabel: 7; set: 21; Glabel: 68; (FX) switch leap with 1 turn -Clabel: 8; set: 21; Glabel: 72; (FX) split leap with 1 turn -Clabel: 9; set: 21; Glabel: 73; (FX) split leap with 1.5 turn or more -Clabel: 10; set: 21; Glabel: 74; (FX) switch leap (leap forward with leg change to cross split) -Clabel: 11; set: 21; Glabel: 77; (FX) split jump with 1 turn -Clabel: 12; set: 21; Glabel: 81; (FX) split jump (leg separation 180 degree parallel to the floor) -Clabel: 13; set: 21; Glabel: 83; (FX) johnson with additional 0.5 turn -Clabel: 14; set: 21; Glabel: 88; (FX) straddle pike or side split jump with 1 turn -Clabel: 15; set: 21; Glabel: 96; (FX) switch leap to ring position -Clabel: 16; set: 21; Glabel: 104; (FX) stag jump -Clabel: 17; set: 22; Glabel: 134; (FX) 2 turn with free leg held upward in 180 split position throughout turn -Clabel: 18; set: 22; Glabel: 137; (FX) 2 turn in tuck stand on one leg, free leg straight throughout turn -Clabel: 19; set: 22; Glabel: 146; (FX) 3 turn on one leg, free leg optional below horizontal -Clabel: 20; set: 22; Glabel: 147; (FX) 2 turn on one leg, free leg optional below horizontal -Clabel: 21; set: 22; Glabel: 148; (FX) 1 turn on one leg, free leg optional below horizontal -Clabel: 22; set: 22; Glabel: 149; (FX) 2 turn or more with heel of free leg forward at horizontal throughout turn -Clabel: 23; set: 22; Glabel: 150; (FX) 1 turn with heel of free leg forward at horizontal throughout turn -Clabel: 24; set: 24; Glabel: 156; (FX) arabian double salto tucked -Clabel: 25; set: 24; Glabel: 163; (FX) salto forward tucked -Clabel: 26; set: 24; Glabel: 169; (FX) aerial walkover forward -Clabel: 27; set: 24; Glabel: 171; (FX) salto forward stretched with 2 twist -Clabel: 28; set: 24; Glabel: 172; (FX) salto forward stretched with 1 twist -Clabel: 29; set: 24; Glabel: 174; (FX) salto forward stretched with 1.5 twist -Clabel: 30; set: 24; Glabel: 177; (FX) salto forward stretched, feet land together -Clabel: 31; set: 25; Glabel: 181; (FX) double salto backward stretched -Clabel: 32; set: 25; Glabel: 182; (FX) salto backward stretched with 3 twist -Clabel: 33; set: 25; Glabel: 183; (FX) salto backward stretched with 2 twist -Clabel: 34; set: 25; Glabel: 187; (FX) salto backward stretched with 2.5 twist -Clabel: 35; set: 25; Glabel: 188; (FX) salto backward stretched with 1.5 twist -Clabel: 36; set: 25; Glabel: 191; (FX) double salto backward tucked with 2 twist -Clabel: 37; set: 25; Glabel: 192; (FX) double salto backward tucked with 1 twist -Clabel: 38; set: 25; Glabel: 195; (FX) double salto backward tucked -Clabel: 39; set: 25; Glabel: 198; (FX) double salto backward piked with 1 twist -Clabel: 40; set: 25; Glabel: 199; (FX) double salto backward piked -Clabel: 41; set: 31; Glabel: 207; (BB) sissone (leg separation 180 degree on the diagonal to the floor, take off two feet, land on one foot) -Clabel: 42; set: 31; Glabel: 208; (BB) split jump with 0.5 turn in side position -Clabel: 43; set: 31; Glabel: 213; (BB) split jump -Clabel: 44; set: 31; Glabel: 219; (BB) straddle pike jump or side split jump -Clabel: 45; set: 31; Glabel: 222; (BB) split ring jump (ring jump with front leg horizontal to the floor) -Clabel: 46; set: 31; Glabel: 223; (BB) switch leap with 0.5 turn -Clabel: 47; set: 31; Glabel: 228; (BB) switch leap (leap forward with leg change) -Clabel: 48; set: 31; Glabel: 230; (BB) split leap forward -Clabel: 49; set: 31; Glabel: 232; (BB) johnson (leap forward with leg change and 0.25 turn to side split or straddle pike position) -Clabel: 50; set: 31; Glabel: 234; (BB) switch leap to ring position -Clabel: 51; set: 31; Glabel: 251; (BB) sheep jump (jump with upper back arch and head release with feet to head height/closed Ring) -Clabel: 52; set: 31; Glabel: 256; (BB) wolf hop or jump (hip angle at 45, knees together) -Clabel: 53; set: 32; Glabel: 268; (BB) 1 turn with heel of free leg forward at horizontal throughout turn -Clabel: 54; set: 32; Glabel: 270; (BB) 2 turn on one leg, free leg optional below horizontal -Clabel: 55; set: 32; Glabel: 272; (BB) 1 turn on one leg, free leg optional below horizontal -Clabel: 56; set: 32; Glabel: 279; (BB) 2 turn in tuck stand on one leg, free leg optional -Clabel: 57; set: 33; Glabel: 289; (BB) salto backward tucked with 1 twist -Clabel: 58; set: 33; Glabel: 290; (BB) salto backward tucked -Clabel: 59; set: 33; Glabel: 295; (BB) salto backward stretched-step out (feet land successively) -Clabel: 60; set: 33; Glabel: 297; (BB) salto backward stretched with legs together -Clabel: 61; set: 33; Glabel: 300; (BB) salto sideward tucked, take off from one leg to side stand -Clabel: 62; set: 33; Glabel: 306; (BB) free aerial cartwheel landing in cross position -Clabel: 63; set: 33; Glabel: 309; (BB) salto forward tucked to cross stand -Clabel: 64; set: 33; Glabel: 312; (BB) free aerial walkover forward, landing on one or both feet -Clabel: 65; set: 34; Glabel: 331; (BB) jump backward, flic-flac take-off with 0.5 twist through handstand to walkover forward, also with support on one arm -Clabel: 66; set: 34; Glabel: 334; (BB) flic-flac to land on both feet -Clabel: 67; set: 34; Glabel: 335; (BB) flic-flac with step-out, also with support on one arm -Clabel: 68; set: 34; Glabel: 336; (BB) round-off -Clabel: 69; set: 35; Glabel: 357; (BB) double salto backward tucked -Clabel: 70; set: 35; Glabel: 359; (BB) salto backward tucked -Clabel: 71; set: 35; Glabel: 363; (BB) double salto backward piked -Clabel: 72; set: 35; Glabel: 367; (BB) salto backward stretched with 2 twist -Clabel: 73; set: 35; Glabel: 370; (BB) salto backward stretched with 2.5 twist -Clabel: 74; set: 41; Glabel: 398; (UB) pike sole circle backward with 1 turn to handstand -Clabel: 75; set: 41; Glabel: 399; (UB) pike sole circle backward with 0.5 turn to handstand -Clabel: 76; set: 41; Glabel: 400; (UB) pike sole circle backward to handstand -Clabel: 77; set: 41; Glabel: 411; (UB) giant circle backward with 1 turn to handstand -Clabel: 78; set: 41; Glabel: 413; (UB) giant circle backward with 0.5 turn to handstand -Clabel: 79; set: 41; Glabel: 416; (UB) giant circle backward -Clabel: 80; set: 41; Glabel: 417; (UB) giant circle forward with 1 turn on one arm before handstand phase -Clabel: 81; set: 41; Glabel: 420; (UB) giant circle forward with 0.5 turn to handstand -Clabel: 82; set: 41; Glabel: 421; (UB) giant circle forward -Clabel: 83; set: 41; Glabel: 425; (UB) clear hip circle backward to handstand -Clabel: 84; set: 41; Glabel: 431; (UB) clear pike circle backward with 1 turn to handstand -Clabel: 85; set: 41; Glabel: 432; (UB) clear pike circle backward with 0.5 turn to handstand -Clabel: 86; set: 41; Glabel: 433; (UB) clear pike circle backward to handstand -Clabel: 87; set: 41; Glabel: 441; (UB) stalder backward with 1 turn to handstand -Clabel: 88; set: 41; Glabel: 443; (UB) stalder backward to handstand -Clabel: 89; set: 42; Glabel: 453; (UB) counter straddle over high bar to hang -Clabel: 90; set: 42; Glabel: 456; (UB) counter piked over high bar to hang -Clabel: 91; set: 42; Glabel: 462; (UB) (swing backward or front support) salto forward straddled to hang on high bar -Clabel: 92; set: 42; Glabel: 465; (UB) (swing backward) salto forward piked to hang on high bar -Clabel: 93; set: 42; Glabel: 466; (UB) (swing forward or hip circle backward) salto backward with 0.5 turn piked to hang on high bar -Clabel: 94; set: 43; Glabel: 471; (UB) transition flight from high bar to low bar -Clabel: 95; set: 43; Glabel: 472; (UB) transition flight from low bar to high bar -Clabel: 96; set: 44; Glabel: 481; (UB) (swing forward) double salto backward tucked with 1 turn -Clabel: 97; set: 44; Glabel: 484; (UB) (swing backward) double salto forward tucked -Clabel: 98; set: 44; Glabel: 516; (UB) (swing forward) double salto backward stretched +(VT) round-off, flic-flac with 0.5 turn on, stretched salto forward with 0.5 turn off +(VT) round-off, flic-flac on, stretched salto backward with 2 turn off +(VT) round-off, flic-flac on, stretched salto backward with 1 turn off +(VT) round-off, flic-flac on, stretched salto backward with 1.5 turn off +(VT) round-off, flic-flac on, stretched salto backward with 2.5 turn off +(VT) round-off, flic-flac on, stretched salto backward off +(FX) switch leap with 0.5 turn +(FX) switch leap with 1 turn +(FX) split leap with 1 turn +(FX) split leap with 1.5 turn or more +(FX) switch leap (leap forward with leg change to cross split) +(FX) split jump with 1 turn +(FX) split jump (leg separation 180 degree parallel to the floor) +(FX) johnson with additional 0.5 turn +(FX) straddle pike or side split jump with 1 turn +(FX) switch leap to ring position +(FX) stag jump +(FX) 2 turn with free leg held upward in 180 split position throughout turn +(FX) 2 turn in tuck stand on one leg, free leg straight throughout turn +(FX) 3 turn on one leg, free leg optional below horizontal +(FX) 2 turn on one leg, free leg optional below horizontal +(FX) 1 turn on one leg, free leg optional below horizontal +(FX) 2 turn or more with heel of free leg forward at horizontal throughout turn +(FX) 1 turn with heel of free leg forward at horizontal throughout turn +(FX) arabian double salto tucked +(FX) salto forward tucked +(FX) aerial walkover forward +(FX) salto forward stretched with 2 twist +(FX) salto forward stretched with 1 twist +(FX) salto forward stretched with 1.5 twist +(FX) salto forward stretched, feet land together +(FX) double salto backward stretched +(FX) salto backward stretched with 3 twist +(FX) salto backward stretched with 2 twist +(FX) salto backward stretched with 2.5 twist +(FX) salto backward stretched with 1.5 twist +(FX) double salto backward tucked with 2 twist +(FX) double salto backward tucked with 1 twist +(FX) double salto backward tucked +(FX) double salto backward piked with 1 twist +(FX) double salto backward piked +(BB) sissone (leg separation 180 degree on the diagonal to the floor, take off two feet, land on one foot) +(BB) split jump with 0.5 turn in side position +(BB) split jump +(BB) straddle pike jump or side split jump +(BB) split ring jump (ring jump with front leg horizontal to the floor) +(BB) switch leap with 0.5 turn +(BB) switch leap (leap forward with leg change) +(BB) split leap forward +(BB) johnson (leap forward with leg change and 0.25 turn to side split or straddle pike position) +(BB) switch leap to ring position +(BB) sheep jump (jump with upper back arch and head release with feet to head height/closed Ring) +(BB) wolf hop or jump (hip angle at 45, knees together) +(BB) 1 turn with heel of free leg forward at horizontal throughout turn +(BB) 2 turn on one leg, free leg optional below horizontal +(BB) 1 turn on one leg, free leg optional below horizontal +(BB) 2 turn in tuck stand on one leg, free leg optional +(BB) salto backward tucked with 1 twist +(BB) salto backward tucked +(BB) salto backward stretched-step out (feet land successively) +(BB) salto backward stretched with legs together +(BB) salto sideward tucked, take off from one leg to side stand +(BB) free aerial cartwheel landing in cross position +(BB) salto forward tucked to cross stand +(BB) free aerial walkover forward, landing on one or both feet +(BB) jump backward, flic-flac take-off with 0.5 twist through handstand to walkover forward, also with support on one arm +(BB) flic-flac to land on both feet +(BB) flic-flac with step-out, also with support on one arm +(BB) round-off +(BB) double salto backward tucked +(BB) salto backward tucked +(BB) double salto backward piked +(BB) salto backward stretched with 2 twist +(BB) salto backward stretched with 2.5 twist +(UB) pike sole circle backward with 1 turn to handstand +(UB) pike sole circle backward with 0.5 turn to handstand +(UB) pike sole circle backward to handstand +(UB) giant circle backward with 1 turn to handstand +(UB) giant circle backward with 0.5 turn to handstand +(UB) giant circle backward +(UB) giant circle forward with 1 turn on one arm before handstand phase +(UB) giant circle forward with 0.5 turn to handstand +(UB) giant circle forward +(UB) clear hip circle backward to handstand +(UB) clear pike circle backward with 1 turn to handstand +(UB) clear pike circle backward with 0.5 turn to handstand +(UB) clear pike circle backward to handstand +(UB) stalder backward with 1 turn to handstand +(UB) stalder backward to handstand +(UB) counter straddle over high bar to hang +(UB) counter piked over high bar to hang +(UB) (swing backward or front support) salto forward straddled to hang on high bar +(UB) (swing backward) salto forward piked to hang on high bar +(UB) (swing forward or hip circle backward) salto backward with 0.5 turn piked to hang on high bar +(UB) transition flight from high bar to low bar +(UB) transition flight from low bar to high bar +(UB) (swing forward) double salto backward tucked with 1 turn +(UB) (swing backward) double salto forward tucked +(UB) (swing forward) double salto backward stretched diff --git a/tools/data/skeleton/label_map_ntu120.txt b/tools/data/skeleton/label_map_ntu120.txt index 863633776e..69826dfebf 100644 --- a/tools/data/skeleton/label_map_ntu120.txt +++ b/tools/data/skeleton/label_map_ntu120.txt @@ -1,120 +1,120 @@ -drink water. -eat meal/snack. -brushing teeth. -brushing hair. -drop. -pickup. -throw. -sitting down. -standing up (from sitting position). -clapping. -reading. -writing. -tear up paper. -wear jacket. -take off jacket. -wear a shoe. -take off a shoe. -wear on glasses. -take off glasses. -put on a hat/cap. -take off a hat/cap. -cheer up. -hand waving. -kicking something. -reach into pocket. -hopping (one foot jumping). -jump up. -make a phone call/answer phone. -playing with phone/tablet. -typing on a keyboard. -pointing to something with finger. -taking a selfie. -check time (from watch). -rub two hands together. -nod head/bow. -shake head. -wipe face. -salute. -put the palms together. -cross hands in front (say stop). -sneeze/cough. -staggering. -falling. -touch head (headache). -touch chest (stomachache/heart pain). -touch back (backache). -touch neck (neckache). -nausea or vomiting condition. -use a fan (with hand or paper)/feeling warm. -punching/slapping other person. -kicking other person. -pushing other person. -pat on back of other person. -point finger at the other person. -hugging other person. -giving something to other person. -touch other person's pocket. -handshaking. -walking towards each other. -walking apart from each other. -put on headphone. -take off headphone. -shoot at the basket. -bounce ball. -tennis bat swing. -juggling table tennis balls. -hush (quite). -flick hair. -thumb up. -thumb down. -make ok sign. -make victory sign. -staple book. -counting money. -cutting nails. -cutting paper (using scissors). -snapping fingers. -open bottle. -sniff (smell). -squat down. -toss a coin. -fold paper. -ball up paper. -play magic cube. -apply cream on face. -apply cream on hand back. -put on bag. -take off bag. -put something into a bag. -take something out of a bag. -open a box. -move heavy objects. -shake fist. -throw up cap/hat. -hands up (both hands). -cross arms. -arm circles. -arm swings. -running on the spot. -butt kicks (kick backward). -cross toe touch. -side kick. -yawn. -stretch oneself. -blow nose. -hit other person with something. -wield knife towards other person. -knock over other person (hit with body). -grab other person’s stuff. -shoot at other person with a gun. -step on foot. -high-five. -cheers and drink. -carry something with other person. -take a photo of other person. -follow other person. -whisper in other person’s ear. -exchange things with other person. -support somebody with hand. -finger-guessing game (playing rock-paper-scissors). +drink water +eat meal/snack +brushing teeth +brushing hair +drop +pickup +throw +sitting down +standing up (from sitting position) +clapping +reading +writing +tear up paper +wear jacket +take off jacket +wear a shoe +take off a shoe +wear on glasses +take off glasses +put on a hat/cap +take off a hat/cap +cheer up +hand waving +kicking something +reach into pocket +hopping (one foot jumping) +jump up +make a phone call/answer phone +playing with phone/tablet +typing on a keyboard +pointing to something with finger +taking a selfie +check time (from watch) +rub two hands together +nod head/bow +shake head +wipe face +salute +put the palms together +cross hands in front (say stop) +sneeze/cough +staggering +falling +touch head (headache) +touch chest (stomachache/heart pain) +touch back (backache) +touch neck (neckache) +nausea or vomiting condition +use a fan (with hand or paper)/feeling warm +punching/slapping other person +kicking other person +pushing other person +pat on back of other person +point finger at the other person +hugging other person +giving something to other person +touch other person's pocket +handshaking +walking towards each other +walking apart from each other +put on headphone +take off headphone +shoot at the basket +bounce ball +tennis bat swing +juggling table tennis balls +hush (quite) +flick hair +thumb up +thumb down +make ok sign +make victory sign +staple book +counting money +cutting nails +cutting paper (using scissors) +snapping fingers +open bottle +sniff (smell) +squat down +toss a coin +fold paper +ball up paper +play magic cube +apply cream on face +apply cream on hand back +put on bag +take off bag +put something into a bag +take something out of a bag +open a box +move heavy objects +shake fist +throw up cap/hat +hands up (both hands) +cross arms +arm circles +arm swings +running on the spot +butt kicks (kick backward) +cross toe touch +side kick +yawn +stretch oneself +blow nose +hit other person with something +wield knife towards other person +knock over other person (hit with body) +grab other person’s stuff +shoot at other person with a gun +step on foot +high-five +cheers and drink +carry something with other person +take a photo of other person +follow other person +whisper in other person’s ear +exchange things with other person +support somebody with hand +finger-guessing game (playing rock-paper-scissors) diff --git a/tools/deployment/mmaction2torchserve.py b/tools/deployment/mmaction2torchserve.py new file mode 100644 index 0000000000..91a52aa894 --- /dev/null +++ b/tools/deployment/mmaction2torchserve.py @@ -0,0 +1,108 @@ +import shutil +from argparse import ArgumentParser, Namespace +from pathlib import Path +from tempfile import TemporaryDirectory + +import mmcv + +try: + from model_archiver.model_packaging import package_model + from model_archiver.model_packaging_utils import ModelExportUtils +except ImportError: + raise ImportError('`torch-model-archiver` is required.' + 'Try: pip install torch-model-archiver') + + +def mmaction2torchserve( + config_file: str, + checkpoint_file: str, + output_folder: str, + model_name: str, + label_file: str, + model_version: str = '1.0', + force: bool = False, +): + """Converts MMAction2 model (config + checkpoint) to TorchServe `.mar`. + + Args: + config_file (str): In MMAction2 config format. + checkpoint_file (str): In MMAction2 checkpoint format. + output_folder (str): Folder where `{model_name}.mar` will be created. + The file created will be in TorchServe archive format. + label_file (str): A txt file which contains the action category names. + model_name (str | None): If not None, used for naming the + `{model_name}.mar` file that will be created under `output_folder`. + If None, `{Path(checkpoint_file).stem}` will be used. + model_version (str): Model's version. + force (bool): If True, if there is an existing `{model_name}.mar` file + under `output_folder` it will be overwritten. + """ + mmcv.mkdir_or_exist(output_folder) + + config = mmcv.Config.fromfile(config_file) + + with TemporaryDirectory() as tmpdir: + config.dump(f'{tmpdir}/config.py') + shutil.copy(label_file, f'{tmpdir}/label_map.txt') + + args = Namespace( + **{ + 'model_file': f'{tmpdir}/config.py', + 'serialized_file': checkpoint_file, + 'handler': f'{Path(__file__).parent}/mmaction_handler.py', + 'model_name': model_name or Path(checkpoint_file).stem, + 'version': model_version, + 'export_path': output_folder, + 'force': force, + 'requirements_file': None, + 'extra_files': f'{tmpdir}/label_map.txt', + 'runtime': 'python', + 'archive_format': 'default' + }) + manifest = ModelExportUtils.generate_manifest_json(args) + package_model(args, manifest) + + +def parse_args(): + parser = ArgumentParser( + description='Convert MMAction2 models to TorchServe `.mar` format.') + parser.add_argument('config', type=str, help='config file path') + parser.add_argument('checkpoint', type=str, help='checkpoint file path') + parser.add_argument( + '--output-folder', + type=str, + required=True, + help='Folder where `{model_name}.mar` will be created.') + parser.add_argument( + '--model-name', + type=str, + default=None, + help='If not None, used for naming the `{model_name}.mar`' + 'file that will be created under `output_folder`.' + 'If None, `{Path(checkpoint_file).stem}` will be used.') + parser.add_argument( + '--label-file', + type=str, + default=None, + help='A txt file which contains the action category names. ') + parser.add_argument( + '--model-version', + type=str, + default='1.0', + help='Number used for versioning.') + parser.add_argument( + '-f', + '--force', + action='store_true', + help='overwrite the existing `{model_name}.mar`') + args = parser.parse_args() + + return args + + +if __name__ == '__main__': + args = parse_args() + + mmaction2torchserve(args.config, args.checkpoint, args.output_folder, + args.model_name, args.label_file, args.model_version, + args.force) diff --git a/tools/deployment/mmaction_handler.py b/tools/deployment/mmaction_handler.py new file mode 100644 index 0000000000..f62a270e15 --- /dev/null +++ b/tools/deployment/mmaction_handler.py @@ -0,0 +1,78 @@ +import base64 +import os +import os.path as osp +import warnings + +import decord +import numpy as np +import torch + +from mmaction.apis import inference_recognizer, init_recognizer # noqa: F401 + +try: + from ts.torch_handler.base_handler import BaseHandler +except ImportError: + raise ImportError('`ts` is required. Try: pip install ts.') + + +class MMActionHandler(BaseHandler): + + def initialize(self, context): + properties = context.system_properties + self.map_location = 'cuda' if torch.cuda.is_available() else 'cpu' + self.device = torch.device(self.map_location + ':' + + str(properties.get('gpu_id')) if torch.cuda. + is_available() else self.map_location) + self.manifest = context.manifest + + model_dir = properties.get('model_dir') + serialized_file = self.manifest['model']['serializedFile'] + checkpoint = os.path.join(model_dir, serialized_file) + self.config_file = os.path.join(model_dir, 'config.py') + + mapping_file_path = osp.join(model_dir, 'label_map.txt') + if not os.path.isfile(mapping_file_path): + warnings.warn('Missing the label_map.txt file. ' + 'Inference output will not include class name.') + self.mapping = None + else: + lines = open(mapping_file_path).readlines() + self.mapping = [x.strip() for x in lines] + + self.model = init_recognizer(self.config_file, checkpoint, self.device) + self.initialized = True + + def preprocess(self, data): + videos = [] + + for row in data: + video = row.get('data') or row.get('body') + if isinstance(video, str): + video = base64.b64decode(video) + # First save the bytes as a tmp file + with open('/tmp/tmp.mp4', 'wb') as fout: + fout.write(video) + + video = decord.VideoReader('/tmp/tmp.mp4') + frames = [x.asnumpy() for x in video] + videos.append(np.stack(frames)) + + return videos + + def inference(self, data, *args, **kwargs): + results = [inference_recognizer(self.model, item) for item in data] + return results + + def postprocess(self, data): + # Format output following the example ObjectDetectionHandler format + output = [] + for video_idx, video_result in enumerate(data): + output.append([]) + assert isinstance(video_result, list) + + output[video_idx] = { + self.mapping[x[0]] if self.mapping else x[0]: float(x[1]) + for x in video_result + } + + return output From 3480252ab63dba891d336b536c811ef22752609f Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 16 Oct 2021 22:34:55 +0800 Subject: [PATCH 277/414] Fix GYM links (#1224) * Fix GYM links * Fix GYM links --- README.md | 2 +- README_zh-CN.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 3b1483167c..c0adf8a5db 100644 --- a/README.md +++ b/README.md @@ -165,7 +165,7 @@ If you have any feature requests, please feel free to leave a comment in [Issues
    OmniSource (Homepage) (ECCV'2020) - FineGYM (Homepage) (CVPR'2020) + FineGYM (Homepage) (CVPR'2020) diff --git a/README_zh-CN.md b/README_zh-CN.md index 2e5b8f0ac2..04b2fd64d5 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -161,7 +161,7 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 OmniSource (主页) (ECCV'2020) - FineGYM (主页) (CVPR'2020) + FineGYM (主页) (CVPR'2020) From 21da521cb8320ac918efec576ddd4720e78bcd25 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 18 Oct 2021 19:01:13 +0800 Subject: [PATCH 278/414] Update README.md (#1226) * Update README.md * Update README_zh-CN.md --- configs/recognition/omnisource/README.md | 2 +- configs/recognition/omnisource/README_zh-CN.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/configs/recognition/omnisource/README.md b/configs/recognition/omnisource/README.md index 1860af906c..3691fdb6c8 100644 --- a/configs/recognition/omnisource/README.md +++ b/configs/recognition/omnisource/README.md @@ -2,7 +2,7 @@ [Haodong Duan](https://github.com/kennymckormick), [Yue Zhao](https://github.com/zhaoyue-zephyrus), [Yuanjun Xiong](https://github.com/yjxiong), Wentao Liu, [Dahua Lin](https://github.com/lindahua) -In ECCV, 2020. [Paper](https://arxiv.org/abs/2003.13042) +In ECCV, 2020. [Paper](https://arxiv.org/abs/2003.13042), [Dataset](https://docs.google.com/forms/d/e/1FAIpQLSd8_GlmHzG8FcDbW-OEu__G7qLgOSYZpH-i5vYVJcu7wcb_TQ/viewform?usp=sf_link) ![pipeline](https://github.com/open-mmlab/mmaction2/blob/master/configs/recognition/omnisource/pipeline.png?raw=true) diff --git a/configs/recognition/omnisource/README_zh-CN.md b/configs/recognition/omnisource/README_zh-CN.md index 7e9f1da5f8..a330faeb38 100644 --- a/configs/recognition/omnisource/README_zh-CN.md +++ b/configs/recognition/omnisource/README_zh-CN.md @@ -2,7 +2,7 @@ [Haodong Duan](https://github.com/kennymckormick), [Yue Zhao](https://github.com/zhaoyue-zephyrus), [Yuanjun Xiong](https://github.com/yjxiong), Wentao Liu, [Dahua Lin](https://github.com/lindahua) -In ECCV, 2020. [Paper](https://arxiv.org/abs/2003.13042) +In ECCV, 2020. [Paper](https://arxiv.org/abs/2003.13042), [Dataset](https://docs.google.com/forms/d/e/1FAIpQLSd8_GlmHzG8FcDbW-OEu__G7qLgOSYZpH-i5vYVJcu7wcb_TQ/viewform?usp=sf_link) ![pipeline](https://github.com/open-mmlab/mmaction2/blob/master/configs/recognition/omnisource/pipeline.png?raw=true) From 6bf225d1a9280c65f20a9087031c1d0cc842ef41 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 19 Oct 2021 11:44:01 +0800 Subject: [PATCH 279/414] [ModelZoo] Benchmark PoseC3D on UCF and HMDB (#1223) * update * update --- README.md | 5 +- README_zh-CN.md | 5 +- configs/skeleton/posec3d/README.md | 12 ++ configs/skeleton/posec3d/metafile.yml | 40 +++++- ...ned_r50_u48_120e_hmdb51_split1_keypoint.py | 131 ++++++++++++++++++ ...ned_r50_u48_120e_ucf101_split1_keypoint.py | 131 ++++++++++++++++++ mmaction/datasets/pose_dataset.py | 14 ++ tools/data/skeleton/README.md | 6 +- 8 files changed, 338 insertions(+), 6 deletions(-) create mode 100644 configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py create mode 100644 configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py diff --git a/README.md b/README.md index c0adf8a5db..e942f75caa 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,7 @@ The master branch works with **PyTorch 1.3+**. ## News +- (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. - (2021-10-12) We support **TorchServe**! Now recognition models in MMAction2 can be packed as a `.mar` file and served with TorchServe. - (2021-09-11) We support **ST-GCN**, a well-known GCN-based approach for skeleton-based action recognition! @@ -194,8 +195,8 @@ If you have any feature requests, please feel free to leave a comment in [Issues PoseC3D-FineGYM (Homepage) (ArXiv'2021) PoseC3D-NTURGB+D (Homepage) (ArXiv'2021) - - + PoseC3D-UCF101 (Homepage) (ArXiv'2021) + PoseC3D-HMDB51 (Homepage) (ArXiv'2021) diff --git a/README_zh-CN.md b/README_zh-CN.md index 04b2fd64d5..4127e0a8be 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -43,6 +43,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 +- (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 - (2021-10-12) 支持 **TorchServe**!目前可以使用 TorchServe 部署 MMAction2 中的动作识别模型。 - (2021-09-11) 支持 **ST-GCN**,一种广泛使用的基于人体姿态与 GCN 的动作识别方法! @@ -190,8 +191,8 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 PoseC3D-FineGYM (主页) (ArXiv'2021) PoseC3D-NTURGB+D (主页) (ArXiv'2021) - - + PoseC3D-UCF101 (主页) (ArXiv'2021) + PoseC3D-HMDB51 (主页) (ArXiv'2021) diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index c6a1db643d..9f07f60282 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -75,6 +75,18 @@ | [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 85.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth?) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json) | | Fusion | | | | 86.9 | | | | +### UCF101 + +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py) | keypoint | 8 | SlowOnly-R50 | 87.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint-cae8aa4a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.json) | + +### HMDB51 + +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py) | keypoint | 8 | SlowOnly-R50 | 69.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint-76ffdd8b.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.json) | + :::{note} 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. diff --git a/configs/skeleton/posec3d/metafile.yml b/configs/skeleton/posec3d/metafile.yml index cd3fc3e2f3..b4c29ac730 100644 --- a/configs/skeleton/posec3d/metafile.yml +++ b/configs/skeleton/posec3d/metafile.yml @@ -118,4 +118,42 @@ Models: Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log - Weights: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth? + Weights: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth +- Config: configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py + In Collection: PoseC3D + Metadata: + Architecture: SlowOnly-R50 + Batch Size: 16 + Epochs: 120 + Parameters: 3029984 + Training Data: HMDB51 + Training Resources: 8 GPUs + pseudo heatmap: keypoint + Name: slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint + Results: + - Dataset: HMDB51 + Metrics: + Top 1 Accuracy: 69.3 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.log + Weights: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint-76ffdd8b.pth +- Config: configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py + In Collection: PoseC3D + Metadata: + Architecture: SlowOnly-R50 + Batch Size: 16 + Epochs: 120 + Parameters: 3055584 + Training Data: UCF101 + Training Resources: 8 GPUs + pseudo heatmap: keypoint + Name: slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint + Results: + - Dataset: UCF101 + Metrics: + Top 1 Accuracy: 87.0 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.log + Weights: https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint-cae8aa4a.pth diff --git a/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py b/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py new file mode 100644 index 0000000000..158469e107 --- /dev/null +++ b/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py @@ -0,0 +1,131 @@ +model = dict( + type='Recognizer3D', + backbone=dict( + type='ResNet3dSlowOnly', + depth=50, + pretrained=None, + in_channels=17, + base_channels=32, + num_stages=3, + out_indices=(2, ), + stage_blocks=(3, 4, 6), + conv1_stride_s=1, + pool1_stride_s=1, + inflate=(0, 1, 1), + spatial_strides=(2, 2, 2), + temporal_strides=(1, 1, 2), + dilations=(1, 1, 1)), + cls_head=dict( + type='I3DHead', + in_channels=512, + num_classes=51, + spatial_type='avg', + dropout_ratio=0.5), + train_cfg=dict(), + test_cfg=dict(average_clips='prob')) + +dataset_type = 'PoseDataset' +ann_file = 'data/posec3d/hmdb51.pkl' +left_kp = [1, 3, 5, 7, 9, 11, 13, 15] +right_kp = [2, 4, 6, 8, 10, 12, 14, 16] +train_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), + dict(type='Resize', scale=(48, 48), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, left_kp=left_kp, right_kp=right_kp), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48, num_clips=1, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 56)), + dict(type='CenterCrop', crop_size=56), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='UniformSampleFrames', clip_len=48, num_clips=10, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 56)), + dict(type='CenterCrop', crop_size=56), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False, + double=True, + left_kp=left_kp, + right_kp=right_kp), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type='RepeatDataset', + times=10, + dataset=dict( + type=dataset_type, + ann_file=ann_file, + split='train1', + data_prefix='', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=ann_file, + split='test1', + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file, + split='test1', + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.01, momentum=0.9, + weight_decay=0.0001) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[9, 11]) +total_epochs = 12 +checkpoint_config = dict(interval=1) +workflow = [('train', 1)] +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy'], topk=(1, 5)) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/posec3d_iclr/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint' # noqa: E501 +load_from = 'https://download.openmmlab.com/mmaction/skeleton/posec3d/k400_posec3d-041f49c6.pth' # noqa: E501 +resume_from = None +find_unused_parameters = True diff --git a/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py b/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py new file mode 100644 index 0000000000..6e5f34d3d3 --- /dev/null +++ b/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py @@ -0,0 +1,131 @@ +model = dict( + type='Recognizer3D', + backbone=dict( + type='ResNet3dSlowOnly', + depth=50, + pretrained=None, + in_channels=17, + base_channels=32, + num_stages=3, + out_indices=(2, ), + stage_blocks=(3, 4, 6), + conv1_stride_s=1, + pool1_stride_s=1, + inflate=(0, 1, 1), + spatial_strides=(2, 2, 2), + temporal_strides=(1, 1, 2), + dilations=(1, 1, 1)), + cls_head=dict( + type='I3DHead', + in_channels=512, + num_classes=101, + spatial_type='avg', + dropout_ratio=0.5), + train_cfg=dict(), + test_cfg=dict(average_clips='prob')) + +dataset_type = 'PoseDataset' +ann_file = 'data/posec3d/ucf101.pkl' +left_kp = [1, 3, 5, 7, 9, 11, 13, 15] +right_kp = [2, 4, 6, 8, 10, 12, 14, 16] +train_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 64)), + dict(type='RandomResizedCrop', area_range=(0.56, 1.0)), + dict(type='Resize', scale=(48, 48), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5, left_kp=left_kp, right_kp=right_kp), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict(type='UniformSampleFrames', clip_len=48, num_clips=1, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 56)), + dict(type='CenterCrop', crop_size=56), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='UniformSampleFrames', clip_len=48, num_clips=10, test_mode=True), + dict(type='PoseDecode'), + dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True), + dict(type='Resize', scale=(-1, 56)), + dict(type='CenterCrop', crop_size=56), + dict( + type='GeneratePoseTarget', + sigma=0.6, + use_score=True, + with_kp=True, + with_limb=False, + double=True, + left_kp=left_kp, + right_kp=right_kp), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type='RepeatDataset', + times=10, + dataset=dict( + type=dataset_type, + ann_file=ann_file, + split='train1', + data_prefix='', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=ann_file, + split='test1', + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file, + split='test1', + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.01, momentum=0.9, + weight_decay=0.0003) # this lr is used for 8 gpus +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[9, 11]) +total_epochs = 12 +checkpoint_config = dict(interval=1) +workflow = [('train', 1)] +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy'], topk=(1, 5)) +log_config = dict( + interval=20, hooks=[ + dict(type='TextLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/posec3d_iclr/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint' # noqa: E501 +load_from = 'https://download.openmmlab.com/mmaction/skeleton/posec3d/k400_posec3d-041f49c6.pth' # noqa: E501 +resume_from = None +find_unused_parameters = True diff --git a/mmaction/datasets/pose_dataset.py b/mmaction/datasets/pose_dataset.py index b9fb509090..2bbea4c434 100644 --- a/mmaction/datasets/pose_dataset.py +++ b/mmaction/datasets/pose_dataset.py @@ -23,6 +23,9 @@ class PoseDataset(BaseDataset): Args: ann_file (str): Path to the annotation file. pipeline (list[dict | callable]): A sequence of data transforms. + split (str | None): The dataset split used. Only applicable to UCF or + HMDB. Allowed choiced are 'train1', 'test1', 'train2', 'test2', + 'train3', 'test3'. Default: None. valid_ratio (float | None): The valid_ratio for videos in KineticsPose. For a video with n frames, it is a valid training sample only if n * valid_ratio frames have human pose. None means not applicable @@ -40,11 +43,14 @@ class PoseDataset(BaseDataset): def __init__(self, ann_file, pipeline, + split=None, valid_ratio=None, box_thr=None, class_prob=None, **kwargs): modality = 'Pose' + # split, applicable to ucf or hmdb + self.split = split super().__init__( ann_file, pipeline, start_index=0, modality=modality, **kwargs) @@ -92,8 +98,16 @@ def load_annotations(self): def load_pkl_annotations(self): data = mmcv.load(self.ann_file) + if self.split: + split, data = data['split'], data['annotations'] + identifier = 'filename' if 'filename' in data[0] else 'frame_dir' + data = [x for x in data if x[identifier] in split[self.split]] + for item in data: # Sometimes we may need to load anno from the file if 'filename' in item: item['filename'] = osp.join(self.data_prefix, item['filename']) + if 'frame_dir' in item: + item['frame_dir'] = osp.join(self.data_prefix, + item['frame_dir']) return data diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index fc53d6b817..081e942b8d 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -19,7 +19,7 @@ We release the skeleton annotations used in [Revisiting Skeleton-based Action Re ## Prepare Annotations -Currently, we support FineGYM and NTURGB+D. For FineGYM, you can execute following scripts to prepare the annotations. +Currently, we support HMDB51, UCF101, FineGYM and NTURGB+D. For FineGYM, you can execute following scripts to prepare the annotations. ```shell bash download_annotations.sh ${DATASET} @@ -33,6 +33,8 @@ For those who have not enough computations for pose extraction, we provide the o - ntu60_xsub_val: https://download.openmmlab.com/mmaction/posec3d/ntu60_xsub_val.pkl - ntu120_xsub_train: https://download.openmmlab.com/mmaction/posec3d/ntu120_xsub_train.pkl - ntu120_xsub_val: https://download.openmmlab.com/mmaction/posec3d/ntu120_xsub_val.pkl +- hmdb51: https://download.openmmlab.com/mmaction/posec3d/hmdb51.pkl +- ucf101: https://download.openmmlab.com/mmaction/posec3d/ucf101.pkl To generate 2D pose annotations for a single video, first, you need to install mmdetection and mmpose from src code. After that, you need to replace the placeholder `mmdet_root` and `mmpose_root` in `ntu_pose_extraction.py` with your installation path. Then you can use following scripts for NTURGB+D video pose extraction: @@ -99,4 +101,6 @@ For skeleton data visualization, you need also to prepare the RGB videos. Please - [x] NTU120_XSub - [x] NTU60_XView - [x] NTU120_XSet +- [x] UCF101 +- [x] HMDB51 - [ ] Kinetics From c7251e98e498da5e3604781176afe6d12abf3178 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 20 Oct 2021 20:18:52 +0800 Subject: [PATCH 280/414] [Minor] Rename: formating -> formatting (#1227) --- .pre-commit-config.yaml | 2 +- mmaction/datasets/pipelines/__init__.py | 6 +++--- mmaction/datasets/pipelines/augmentations.py | 2 +- mmaction/datasets/pipelines/{formating.py => formatting.py} | 0 4 files changed, 5 insertions(+), 5 deletions(-) rename mmaction/datasets/pipelines/{formating.py => formatting.py} (100%) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index da61b68786..d2e5d004cc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -43,4 +43,4 @@ repos: rev: v2.1.0 hooks: - id: codespell - args: ["--skip", "*.ipynb,tools/data/hvu/label_map.json", "-L", "formating,te,nd,thre,Gool,gool"] + args: ["--skip", "*.ipynb,tools/data/hvu/label_map.json", "-L", "te,nd,thre,Gool,gool"] diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index e3745644ac..0ec9273077 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -5,9 +5,9 @@ RandomResizedCrop, Resize, TenCrop, ThreeCrop, TorchvisionTrans) from .compose import Compose -from .formating import (Collect, FormatAudioShape, FormatGCNInput, FormatShape, - ImageToTensor, Rename, ToDataContainer, ToTensor, - Transpose) +from .formatting import (Collect, FormatAudioShape, FormatGCNInput, + FormatShape, ImageToTensor, Rename, ToDataContainer, + ToTensor, Transpose) from .loading import (ArrayDecode, AudioDecode, AudioDecodeInit, AudioFeatureSelector, BuildPseudoClip, DecordDecode, DecordInit, DenseSampleFrames, diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 0f88adc769..33f3b763fd 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -10,7 +10,7 @@ from torch.nn.modules.utils import _pair from ..builder import PIPELINES -from .formating import to_tensor +from .formatting import to_tensor def _combine_quadruple(a, b): diff --git a/mmaction/datasets/pipelines/formating.py b/mmaction/datasets/pipelines/formatting.py similarity index 100% rename from mmaction/datasets/pipelines/formating.py rename to mmaction/datasets/pipelines/formatting.py From 5fad5ec10275fd9869690bb8a0347c657b6334cd Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Thu, 21 Oct 2021 16:34:36 +0800 Subject: [PATCH 281/414] [Feature] Generate pose annotation from NTURGB-D raw skeleton (#1218) * master * master 0721 * 1011 add nturgbd raw skeleton convert * 1011 raw skeleton convert * 1011 ntu skeleton convert * 1019 convert 3d * 1019 convert 3d * 1019 convert 3d * 1020 add ntu120 * 1020 add ntu120 * 1020 * 1020 add guide * 1020 add guide * 1020 remove config * 1021 cancel fixed size 300 * 1021 cancel fixed size 300 * 1021 cancel fixed size 300 * 1021 change channel order * 1021 change channel order * Update gen_ntu_rgbd_raw.py * Update README.md * 1021 fix keypoint len * 1021 fix keypoint len * 1021 fix keypoint len Co-authored-by: Haodong Duan --- ...RGBD120_samples_with_missing_skeletons.txt | 535 ++++++++++++++++++ ...TU_RGBD_samples_with_missing_skeletons.txt | 302 ++++++++++ tools/data/skeleton/README.md | 17 + tools/data/skeleton/gen_ntu_rgbd_raw.py | 354 ++++++++++++ tools/data/skeleton/ntu_pose_extraction.py | 2 +- 5 files changed, 1209 insertions(+), 1 deletion(-) create mode 100644 tools/data/skeleton/NTU_RGBD120_samples_with_missing_skeletons.txt create mode 100644 tools/data/skeleton/NTU_RGBD_samples_with_missing_skeletons.txt create mode 100644 tools/data/skeleton/gen_ntu_rgbd_raw.py diff --git a/tools/data/skeleton/NTU_RGBD120_samples_with_missing_skeletons.txt b/tools/data/skeleton/NTU_RGBD120_samples_with_missing_skeletons.txt new file mode 100644 index 0000000000..e37c94eb42 --- /dev/null +++ b/tools/data/skeleton/NTU_RGBD120_samples_with_missing_skeletons.txt @@ -0,0 +1,535 @@ +S001C002P005R002A008 +S001C002P006R001A008 +S001C003P002R001A055 +S001C003P002R002A012 +S001C003P005R002A004 +S001C003P005R002A005 +S001C003P005R002A006 +S001C003P006R002A008 +S002C002P011R002A030 +S002C003P008R001A020 +S002C003P010R002A010 +S002C003P011R002A007 +S002C003P011R002A011 +S002C003P014R002A007 +S003C001P019R001A055 +S003C002P002R002A055 +S003C002P018R002A055 +S003C003P002R001A055 +S003C003P016R001A055 +S003C003P018R002A024 +S004C002P003R001A013 +S004C002P008R001A009 +S004C002P020R001A003 +S004C002P020R001A004 +S004C002P020R001A012 +S004C002P020R001A020 +S004C002P020R001A021 +S004C002P020R001A036 +S005C002P004R001A001 +S005C002P004R001A003 +S005C002P010R001A016 +S005C002P010R001A017 +S005C002P010R001A048 +S005C002P010R001A049 +S005C002P016R001A009 +S005C002P016R001A010 +S005C002P018R001A003 +S005C002P018R001A028 +S005C002P018R001A029 +S005C003P016R002A009 +S005C003P018R002A013 +S005C003P021R002A057 +S006C001P001R002A055 +S006C002P007R001A005 +S006C002P007R001A006 +S006C002P016R001A043 +S006C002P016R001A051 +S006C002P016R001A052 +S006C002P022R001A012 +S006C002P023R001A020 +S006C002P023R001A021 +S006C002P023R001A022 +S006C002P023R001A023 +S006C002P024R001A018 +S006C002P024R001A019 +S006C003P001R002A013 +S006C003P007R002A009 +S006C003P007R002A010 +S006C003P007R002A025 +S006C003P016R001A060 +S006C003P017R001A055 +S006C003P017R002A013 +S006C003P017R002A014 +S006C003P017R002A015 +S006C003P022R002A013 +S007C001P018R002A050 +S007C001P025R002A051 +S007C001P028R001A050 +S007C001P028R001A051 +S007C001P028R001A052 +S007C002P008R002A008 +S007C002P015R002A055 +S007C002P026R001A008 +S007C002P026R001A009 +S007C002P026R001A010 +S007C002P026R001A011 +S007C002P026R001A012 +S007C002P026R001A050 +S007C002P027R001A011 +S007C002P027R001A013 +S007C002P028R002A055 +S007C003P007R001A002 +S007C003P007R001A004 +S007C003P019R001A060 +S007C003P027R002A001 +S007C003P027R002A002 +S007C003P027R002A003 +S007C003P027R002A004 +S007C003P027R002A005 +S007C003P027R002A006 +S007C003P027R002A007 +S007C003P027R002A008 +S007C003P027R002A009 +S007C003P027R002A010 +S007C003P027R002A011 +S007C003P027R002A012 +S007C003P027R002A013 +S008C002P001R001A009 +S008C002P001R001A010 +S008C002P001R001A014 +S008C002P001R001A015 +S008C002P001R001A016 +S008C002P001R001A018 +S008C002P001R001A019 +S008C002P008R002A059 +S008C002P025R001A060 +S008C002P029R001A004 +S008C002P031R001A005 +S008C002P031R001A006 +S008C002P032R001A018 +S008C002P034R001A018 +S008C002P034R001A019 +S008C002P035R001A059 +S008C002P035R002A002 +S008C002P035R002A005 +S008C003P007R001A009 +S008C003P007R001A016 +S008C003P007R001A017 +S008C003P007R001A018 +S008C003P007R001A019 +S008C003P007R001A020 +S008C003P007R001A021 +S008C003P007R001A022 +S008C003P007R001A023 +S008C003P007R001A025 +S008C003P007R001A026 +S008C003P007R001A028 +S008C003P007R001A029 +S008C003P007R002A003 +S008C003P008R002A050 +S008C003P025R002A002 +S008C003P025R002A011 +S008C003P025R002A012 +S008C003P025R002A016 +S008C003P025R002A020 +S008C003P025R002A022 +S008C003P025R002A023 +S008C003P025R002A030 +S008C003P025R002A031 +S008C003P025R002A032 +S008C003P025R002A033 +S008C003P025R002A049 +S008C003P025R002A060 +S008C003P031R001A001 +S008C003P031R002A004 +S008C003P031R002A014 +S008C003P031R002A015 +S008C003P031R002A016 +S008C003P031R002A017 +S008C003P032R002A013 +S008C003P033R002A001 +S008C003P033R002A011 +S008C003P033R002A012 +S008C003P034R002A001 +S008C003P034R002A012 +S008C003P034R002A022 +S008C003P034R002A023 +S008C003P034R002A024 +S008C003P034R002A044 +S008C003P034R002A045 +S008C003P035R002A016 +S008C003P035R002A017 +S008C003P035R002A018 +S008C003P035R002A019 +S008C003P035R002A020 +S008C003P035R002A021 +S009C002P007R001A001 +S009C002P007R001A003 +S009C002P007R001A014 +S009C002P008R001A014 +S009C002P015R002A050 +S009C002P016R001A002 +S009C002P017R001A028 +S009C002P017R001A029 +S009C003P017R002A030 +S009C003P025R002A054 +S010C001P007R002A020 +S010C002P016R002A055 +S010C002P017R001A005 +S010C002P017R001A018 +S010C002P017R001A019 +S010C002P019R001A001 +S010C002P025R001A012 +S010C003P007R002A043 +S010C003P008R002A003 +S010C003P016R001A055 +S010C003P017R002A055 +S011C001P002R001A008 +S011C001P018R002A050 +S011C002P008R002A059 +S011C002P016R002A055 +S011C002P017R001A020 +S011C002P017R001A021 +S011C002P018R002A055 +S011C002P027R001A009 +S011C002P027R001A010 +S011C002P027R001A037 +S011C003P001R001A055 +S011C003P002R001A055 +S011C003P008R002A012 +S011C003P015R001A055 +S011C003P016R001A055 +S011C003P019R001A055 +S011C003P025R001A055 +S011C003P028R002A055 +S012C001P019R001A060 +S012C001P019R002A060 +S012C002P015R001A055 +S012C002P017R002A012 +S012C002P025R001A060 +S012C003P008R001A057 +S012C003P015R001A055 +S012C003P015R002A055 +S012C003P016R001A055 +S012C003P017R002A055 +S012C003P018R001A055 +S012C003P018R001A057 +S012C003P019R002A011 +S012C003P019R002A012 +S012C003P025R001A055 +S012C003P027R001A055 +S012C003P027R002A009 +S012C003P028R001A035 +S012C003P028R002A055 +S013C001P015R001A054 +S013C001P017R002A054 +S013C001P018R001A016 +S013C001P028R001A040 +S013C002P015R001A054 +S013C002P017R002A054 +S013C002P028R001A040 +S013C003P008R002A059 +S013C003P015R001A054 +S013C003P017R002A054 +S013C003P025R002A022 +S013C003P027R001A055 +S013C003P028R001A040 +S014C001P027R002A040 +S014C002P015R001A003 +S014C002P019R001A029 +S014C002P025R002A059 +S014C002P027R002A040 +S014C002P039R001A050 +S014C003P007R002A059 +S014C003P015R002A055 +S014C003P019R002A055 +S014C003P025R001A048 +S014C003P027R002A040 +S015C001P008R002A040 +S015C001P016R001A055 +S015C001P017R001A055 +S015C001P017R002A055 +S015C002P007R001A059 +S015C002P008R001A003 +S015C002P008R001A004 +S015C002P008R002A040 +S015C002P015R001A002 +S015C002P016R001A001 +S015C002P016R002A055 +S015C003P008R002A007 +S015C003P008R002A011 +S015C003P008R002A012 +S015C003P008R002A028 +S015C003P008R002A040 +S015C003P025R002A012 +S015C003P025R002A017 +S015C003P025R002A020 +S015C003P025R002A021 +S015C003P025R002A030 +S015C003P025R002A033 +S015C003P025R002A034 +S015C003P025R002A036 +S015C003P025R002A037 +S015C003P025R002A044 +S016C001P019R002A040 +S016C001P025R001A011 +S016C001P025R001A012 +S016C001P025R001A060 +S016C001P040R001A055 +S016C001P040R002A055 +S016C002P008R001A011 +S016C002P019R002A040 +S016C002P025R002A012 +S016C003P008R001A011 +S016C003P008R002A002 +S016C003P008R002A003 +S016C003P008R002A004 +S016C003P008R002A006 +S016C003P008R002A009 +S016C003P019R002A040 +S016C003P039R002A016 +S017C001P016R002A031 +S017C002P007R001A013 +S017C002P008R001A009 +S017C002P015R001A042 +S017C002P016R002A031 +S017C002P016R002A055 +S017C003P007R002A013 +S017C003P008R001A059 +S017C003P016R002A031 +S017C003P017R001A055 +S017C003P020R001A059 +S019C001P046R001A075 +S019C002P042R001A094 +S019C002P042R001A095 +S019C002P042R001A096 +S019C002P042R001A097 +S019C002P042R001A098 +S019C002P042R001A099 +S019C002P042R001A100 +S019C002P042R001A101 +S019C002P042R001A102 +S019C002P049R002A074 +S019C002P049R002A079 +S019C002P051R001A061 +S019C003P046R001A061 +S019C003P046R002A061 +S019C003P046R002A062 +S020C002P041R001A063 +S020C002P041R001A064 +S020C002P044R001A063 +S020C002P044R001A064 +S020C002P044R001A066 +S020C002P044R001A084 +S020C002P054R001A081 +S021C001P059R001A108 +S021C002P055R001A065 +S021C002P055R001A092 +S021C002P055R001A093 +S021C002P057R001A064 +S021C002P058R001A063 +S021C002P058R001A064 +S021C002P059R001A074 +S021C002P059R001A075 +S021C002P059R001A076 +S021C002P059R001A077 +S021C002P059R001A078 +S021C002P059R001A079 +S021C003P057R002A078 +S021C003P057R002A079 +S021C003P057R002A094 +S022C002P061R001A113 +S022C003P061R002A061 +S022C003P061R002A062 +S022C003P063R002A061 +S022C003P063R002A062 +S022C003P063R002A063 +S022C003P063R002A064 +S022C003P063R002A078 +S022C003P064R002A061 +S022C003P064R002A062 +S022C003P065R002A061 +S022C003P065R002A062 +S022C003P065R002A119 +S022C003P067R002A064 +S023C002P055R001A114 +S023C002P055R002A092 +S023C002P059R001A075 +S023C002P063R001A075 +S023C003P055R002A093 +S023C003P055R002A094 +S023C003P061R002A061 +S023C003P064R001A092 +S024C001P063R001A109 +S024C002P062R002A074 +S024C002P067R001A100 +S024C002P067R001A101 +S024C002P067R001A102 +S024C002P067R001A103 +S024C003P062R002A074 +S024C003P063R002A061 +S024C003P063R002A062 +S025C001P055R002A119 +S025C003P056R002A119 +S025C003P059R002A115 +S026C002P044R001A061 +S026C002P044R001A062 +S026C002P070R001A092 +S026C003P069R002A075 +S026C003P074R002A061 +S026C003P074R002A062 +S026C003P075R001A117 +S026C003P075R001A118 +S027C001P082R001A063 +S027C002P044R002A092 +S027C002P079R001A061 +S027C002P079R001A062 +S027C002P079R001A063 +S027C002P079R001A064 +S027C002P082R001A092 +S027C002P084R001A061 +S027C002P084R001A062 +S027C002P086R001A061 +S027C003P041R002A087 +S027C003P080R002A061 +S027C003P082R002A061 +S027C003P082R002A062 +S027C003P086R002A061 +S027C003P086R002A062 +S028C001P087R001A061 +S028C002P041R001A091 +S028C002P087R001A061 +S028C003P042R002A064 +S028C003P046R002A063 +S028C003P046R002A066 +S028C003P046R002A067 +S028C003P046R002A068 +S028C003P046R002A069 +S028C003P046R002A070 +S028C003P046R002A071 +S028C003P046R002A072 +S028C003P046R002A074 +S028C003P046R002A075 +S028C003P046R002A077 +S028C003P046R002A081 +S028C003P046R002A082 +S028C003P046R002A083 +S028C003P046R002A084 +S028C003P048R002A061 +S028C003P048R002A062 +S028C003P048R002A073 +S028C003P073R002A073 +S028C003P087R001A061 +S028C003P087R002A061 +S028C003P087R002A062 +S029C001P043R002A092 +S029C001P044R002A092 +S029C001P048R001A073 +S029C001P089R001A063 +S029C002P041R001A074 +S029C002P041R001A084 +S029C002P044R001A091 +S029C002P048R001A075 +S029C002P048R001A081 +S029C002P074R001A081 +S029C002P074R001A095 +S029C002P074R001A096 +S029C002P080R001A091 +S029C002P088R001A066 +S029C002P089R001A065 +S029C002P090R001A067 +S029C003P008R002A065 +S029C003P008R002A067 +S029C003P041R001A089 +S029C003P043R001A080 +S029C003P043R001A092 +S029C003P043R001A105 +S029C003P043R002A085 +S029C003P043R002A086 +S029C003P044R002A106 +S029C003P048R001A065 +S029C003P048R002A073 +S029C003P048R002A074 +S029C003P048R002A075 +S029C003P048R002A076 +S029C003P048R002A092 +S029C003P048R002A094 +S029C003P051R002A073 +S029C003P051R002A074 +S029C003P051R002A075 +S029C003P051R002A076 +S029C003P051R002A077 +S029C003P051R002A078 +S029C003P051R002A079 +S029C003P051R002A080 +S029C003P051R002A081 +S029C003P051R002A082 +S029C003P051R002A083 +S029C003P051R002A084 +S029C003P051R002A085 +S029C003P051R002A086 +S029C003P051R002A110 +S029C003P067R001A098 +S029C003P074R002A110 +S029C003P080R002A066 +S029C003P088R002A078 +S029C003P089R001A075 +S029C003P089R002A061 +S029C003P089R002A062 +S029C003P089R002A063 +S029C003P090R002A092 +S029C003P090R002A095 +S030C002P091R002A091 +S030C002P091R002A092 +S030C002P091R002A093 +S030C002P091R002A094 +S030C002P091R002A095 +S030C002P091R002A096 +S030C002P091R002A097 +S030C002P091R002A098 +S030C002P091R002A099 +S030C002P091R002A100 +S030C002P091R002A101 +S030C002P091R002A102 +S030C002P091R002A103 +S030C002P091R002A104 +S030C002P091R002A105 +S030C003P044R002A065 +S030C003P044R002A081 +S030C003P044R002A084 +S031C002P042R001A111 +S031C002P051R001A061 +S031C002P051R001A062 +S031C002P067R001A067 +S031C002P067R001A068 +S031C002P067R001A069 +S031C002P067R001A070 +S031C002P067R001A071 +S031C002P067R001A072 +S031C002P082R001A075 +S031C002P082R002A117 +S031C002P097R001A061 +S031C002P097R001A062 +S031C003P043R002A074 +S031C003P043R002A075 +S031C003P044R002A094 +S031C003P082R002A067 +S031C003P082R002A068 +S031C003P082R002A069 +S031C003P082R002A070 +S031C003P082R002A071 +S031C003P082R002A072 +S031C003P082R002A073 +S031C003P082R002A075 +S031C003P082R002A076 +S031C003P082R002A077 +S031C003P082R002A084 +S031C003P082R002A085 +S031C003P082R002A086 +S032C002P067R001A092 +S032C003P067R002A066 +S032C003P067R002A067 +S032C003P067R002A075 +S032C003P067R002A076 +S032C003P067R002A077 diff --git a/tools/data/skeleton/NTU_RGBD_samples_with_missing_skeletons.txt b/tools/data/skeleton/NTU_RGBD_samples_with_missing_skeletons.txt new file mode 100644 index 0000000000..5ad472e404 --- /dev/null +++ b/tools/data/skeleton/NTU_RGBD_samples_with_missing_skeletons.txt @@ -0,0 +1,302 @@ +S001C002P005R002A008 +S001C002P006R001A008 +S001C003P002R001A055 +S001C003P002R002A012 +S001C003P005R002A004 +S001C003P005R002A005 +S001C003P005R002A006 +S001C003P006R002A008 +S002C002P011R002A030 +S002C003P008R001A020 +S002C003P010R002A010 +S002C003P011R002A007 +S002C003P011R002A011 +S002C003P014R002A007 +S003C001P019R001A055 +S003C002P002R002A055 +S003C002P018R002A055 +S003C003P002R001A055 +S003C003P016R001A055 +S003C003P018R002A024 +S004C002P003R001A013 +S004C002P008R001A009 +S004C002P020R001A003 +S004C002P020R001A004 +S004C002P020R001A012 +S004C002P020R001A020 +S004C002P020R001A021 +S004C002P020R001A036 +S005C002P004R001A001 +S005C002P004R001A003 +S005C002P010R001A016 +S005C002P010R001A017 +S005C002P010R001A048 +S005C002P010R001A049 +S005C002P016R001A009 +S005C002P016R001A010 +S005C002P018R001A003 +S005C002P018R001A028 +S005C002P018R001A029 +S005C003P016R002A009 +S005C003P018R002A013 +S005C003P021R002A057 +S006C001P001R002A055 +S006C002P007R001A005 +S006C002P007R001A006 +S006C002P016R001A043 +S006C002P016R001A051 +S006C002P016R001A052 +S006C002P022R001A012 +S006C002P023R001A020 +S006C002P023R001A021 +S006C002P023R001A022 +S006C002P023R001A023 +S006C002P024R001A018 +S006C002P024R001A019 +S006C003P001R002A013 +S006C003P007R002A009 +S006C003P007R002A010 +S006C003P007R002A025 +S006C003P016R001A060 +S006C003P017R001A055 +S006C003P017R002A013 +S006C003P017R002A014 +S006C003P017R002A015 +S006C003P022R002A013 +S007C001P018R002A050 +S007C001P025R002A051 +S007C001P028R001A050 +S007C001P028R001A051 +S007C001P028R001A052 +S007C002P008R002A008 +S007C002P015R002A055 +S007C002P026R001A008 +S007C002P026R001A009 +S007C002P026R001A010 +S007C002P026R001A011 +S007C002P026R001A012 +S007C002P026R001A050 +S007C002P027R001A011 +S007C002P027R001A013 +S007C002P028R002A055 +S007C003P007R001A002 +S007C003P007R001A004 +S007C003P019R001A060 +S007C003P027R002A001 +S007C003P027R002A002 +S007C003P027R002A003 +S007C003P027R002A004 +S007C003P027R002A005 +S007C003P027R002A006 +S007C003P027R002A007 +S007C003P027R002A008 +S007C003P027R002A009 +S007C003P027R002A010 +S007C003P027R002A011 +S007C003P027R002A012 +S007C003P027R002A013 +S008C002P001R001A009 +S008C002P001R001A010 +S008C002P001R001A014 +S008C002P001R001A015 +S008C002P001R001A016 +S008C002P001R001A018 +S008C002P001R001A019 +S008C002P008R002A059 +S008C002P025R001A060 +S008C002P029R001A004 +S008C002P031R001A005 +S008C002P031R001A006 +S008C002P032R001A018 +S008C002P034R001A018 +S008C002P034R001A019 +S008C002P035R001A059 +S008C002P035R002A002 +S008C002P035R002A005 +S008C003P007R001A009 +S008C003P007R001A016 +S008C003P007R001A017 +S008C003P007R001A018 +S008C003P007R001A019 +S008C003P007R001A020 +S008C003P007R001A021 +S008C003P007R001A022 +S008C003P007R001A023 +S008C003P007R001A025 +S008C003P007R001A026 +S008C003P007R001A028 +S008C003P007R001A029 +S008C003P007R002A003 +S008C003P008R002A050 +S008C003P025R002A002 +S008C003P025R002A011 +S008C003P025R002A012 +S008C003P025R002A016 +S008C003P025R002A020 +S008C003P025R002A022 +S008C003P025R002A023 +S008C003P025R002A030 +S008C003P025R002A031 +S008C003P025R002A032 +S008C003P025R002A033 +S008C003P025R002A049 +S008C003P025R002A060 +S008C003P031R001A001 +S008C003P031R002A004 +S008C003P031R002A014 +S008C003P031R002A015 +S008C003P031R002A016 +S008C003P031R002A017 +S008C003P032R002A013 +S008C003P033R002A001 +S008C003P033R002A011 +S008C003P033R002A012 +S008C003P034R002A001 +S008C003P034R002A012 +S008C003P034R002A022 +S008C003P034R002A023 +S008C003P034R002A024 +S008C003P034R002A044 +S008C003P034R002A045 +S008C003P035R002A016 +S008C003P035R002A017 +S008C003P035R002A018 +S008C003P035R002A019 +S008C003P035R002A020 +S008C003P035R002A021 +S009C002P007R001A001 +S009C002P007R001A003 +S009C002P007R001A014 +S009C002P008R001A014 +S009C002P015R002A050 +S009C002P016R001A002 +S009C002P017R001A028 +S009C002P017R001A029 +S009C003P017R002A030 +S009C003P025R002A054 +S010C001P007R002A020 +S010C002P016R002A055 +S010C002P017R001A005 +S010C002P017R001A018 +S010C002P017R001A019 +S010C002P019R001A001 +S010C002P025R001A012 +S010C003P007R002A043 +S010C003P008R002A003 +S010C003P016R001A055 +S010C003P017R002A055 +S011C001P002R001A008 +S011C001P018R002A050 +S011C002P008R002A059 +S011C002P016R002A055 +S011C002P017R001A020 +S011C002P017R001A021 +S011C002P018R002A055 +S011C002P027R001A009 +S011C002P027R001A010 +S011C002P027R001A037 +S011C003P001R001A055 +S011C003P002R001A055 +S011C003P008R002A012 +S011C003P015R001A055 +S011C003P016R001A055 +S011C003P019R001A055 +S011C003P025R001A055 +S011C003P028R002A055 +S012C001P019R001A060 +S012C001P019R002A060 +S012C002P015R001A055 +S012C002P017R002A012 +S012C002P025R001A060 +S012C003P008R001A057 +S012C003P015R001A055 +S012C003P015R002A055 +S012C003P016R001A055 +S012C003P017R002A055 +S012C003P018R001A055 +S012C003P018R001A057 +S012C003P019R002A011 +S012C003P019R002A012 +S012C003P025R001A055 +S012C003P027R001A055 +S012C003P027R002A009 +S012C003P028R001A035 +S012C003P028R002A055 +S013C001P015R001A054 +S013C001P017R002A054 +S013C001P018R001A016 +S013C001P028R001A040 +S013C002P015R001A054 +S013C002P017R002A054 +S013C002P028R001A040 +S013C003P008R002A059 +S013C003P015R001A054 +S013C003P017R002A054 +S013C003P025R002A022 +S013C003P027R001A055 +S013C003P028R001A040 +S014C001P027R002A040 +S014C002P015R001A003 +S014C002P019R001A029 +S014C002P025R002A059 +S014C002P027R002A040 +S014C002P039R001A050 +S014C003P007R002A059 +S014C003P015R002A055 +S014C003P019R002A055 +S014C003P025R001A048 +S014C003P027R002A040 +S015C001P008R002A040 +S015C001P016R001A055 +S015C001P017R001A055 +S015C001P017R002A055 +S015C002P007R001A059 +S015C002P008R001A003 +S015C002P008R001A004 +S015C002P008R002A040 +S015C002P015R001A002 +S015C002P016R001A001 +S015C002P016R002A055 +S015C003P008R002A007 +S015C003P008R002A011 +S015C003P008R002A012 +S015C003P008R002A028 +S015C003P008R002A040 +S015C003P025R002A012 +S015C003P025R002A017 +S015C003P025R002A020 +S015C003P025R002A021 +S015C003P025R002A030 +S015C003P025R002A033 +S015C003P025R002A034 +S015C003P025R002A036 +S015C003P025R002A037 +S015C003P025R002A044 +S016C001P019R002A040 +S016C001P025R001A011 +S016C001P025R001A012 +S016C001P025R001A060 +S016C001P040R001A055 +S016C001P040R002A055 +S016C002P008R001A011 +S016C002P019R002A040 +S016C002P025R002A012 +S016C003P008R001A011 +S016C003P008R002A002 +S016C003P008R002A003 +S016C003P008R002A004 +S016C003P008R002A006 +S016C003P008R002A009 +S016C003P019R002A040 +S016C003P039R002A016 +S017C001P016R002A031 +S017C002P007R001A013 +S017C002P008R001A009 +S017C002P015R001A042 +S017C002P016R002A031 +S017C002P016R002A055 +S017C003P007R002A013 +S017C003P008R001A059 +S017C003P016R002A031 +S017C003P017R001A055 +S017C003P020R001A059 diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index 081e942b8d..a58ed66465 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -94,6 +94,23 @@ For skeleton data visualization, you need also to prepare the RGB videos. Please +## Convert the NTU RGB+D raw skeleton data to our format + +Here we also provide the script for converting the NTU RGB+D raw skeleton data to our format. +First, download the raw skeleton data of NTU-RGBD 60 and NTU-RGBD 120 from https://github.com/shahroudy/NTURGB-D. + +For NTU-RGBD 60, preprocess data and convert the data format with + +```python +python gen_ntu_rgbd_raw.py --data-path your_raw_nturgbd60_skeleton_path --ignored-sample-path NTU_RGBD_samples_with_missing_skeletons.txt --out-folder your_nturgbd60_output_path --task ntu60 +``` + +For NTU-RGBD 120, preprocess data and convert the data format with + +```python +python gen_ntu_rgbd_raw.py --data-path your_raw_nturgbd120_skeleton_path --ignored-sample-path NTU_RGBD120_samples_with_missing_skeletons.txt --out-folder your_nturgbd120_output_path --task ntu120 +``` + **TODO**: - [x] FineGYM diff --git a/tools/data/skeleton/gen_ntu_rgbd_raw.py b/tools/data/skeleton/gen_ntu_rgbd_raw.py new file mode 100644 index 0000000000..3e484602bf --- /dev/null +++ b/tools/data/skeleton/gen_ntu_rgbd_raw.py @@ -0,0 +1,354 @@ +import argparse +import math +import os +import os.path as osp + +import mmcv +import numpy as np + +training_subjects_60 = [ + 1, 2, 4, 5, 8, 9, 13, 14, 15, 16, 17, 18, 19, 25, 27, 28, 31, 34, 35, 38 +] +training_cameras_60 = [2, 3] +training_subjects_120 = [ + 1, 2, 4, 5, 8, 9, 13, 14, 15, 16, 17, 18, 19, 25, 27, 28, 31, 34, 35, 38, + 45, 46, 47, 49, 50, 52, 53, 54, 55, 56, 57, 58, 59, 70, 74, 78, 80, 81, 82, + 83, 84, 85, 86, 89, 91, 92, 93, 94, 95, 97, 98, 100, 103 +] +training_setups_120 = [ + 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32 +] +max_body_true = 2 +max_body_kinect = 4 +num_joint = 25 +max_frame = 300 + + +def unit_vector(vector): + """Returns the unit vector of the vector.""" + return vector / np.linalg.norm(vector) + + +def angle_between(v1, v2): + """Returns the angle in radians between vectors 'v1' and 'v2':: + + >>> angle_between((1, 0, 0), (0, 1, 0)) + 1.5707963267948966 + >>> angle_between((1, 0, 0), (1, 0, 0)) + 0.0 + >>> angle_between((1, 0, 0), (-1, 0, 0)) + 3.141592653589793 + """ + if np.abs(v1).sum() < 1e-6 or np.abs(v2).sum() < 1e-6: + return 0 + v1_u = unit_vector(v1) + v2_u = unit_vector(v2) + return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0)) + + +def rotation_matrix(axis, theta): + """Return the rotation matrix associated with counterclockwise rotation + about the given axis by theta radians.""" + if np.abs(axis).sum() < 1e-6 or np.abs(theta) < 1e-6: + return np.eye(3) + axis = np.asarray(axis) + axis = axis / math.sqrt(np.dot(axis, axis)) + a = math.cos(theta / 2.0) + b, c, d = -axis * math.sin(theta / 2.0) + aa, bb, cc, dd = a * a, b * b, c * c, d * d + bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d + return np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)], + [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)], + [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]]) + + +def pre_normalization(data, zaxis=[0, 1], xaxis=[8, 4]): + N, C, T, V, M = data.shape + s = np.transpose(data, [0, 4, 2, 3, 1]) # N C T V M -> N M T V C + + print('pad the null frames with the previous frames') + prog_bar = mmcv.ProgressBar(len(s)) + for i_s, skeleton in enumerate(s): + if skeleton.sum() == 0: + print(i_s, ' has no skeleton') + for i_p, person in enumerate(skeleton): + if person.sum() == 0: + continue + if person[0].sum() == 0: + index = (person.sum(-1).sum(-1) != 0) + tmp = person[index].copy() + person *= 0 + person[:len(tmp)] = tmp + + for i_f, frame in enumerate(person): + if frame.sum() == 0: + if person[i_f:].sum() == 0: + rest = len(person) - i_f + num = int(np.ceil(rest / i_f)) + pad = np.concatenate( + [person[0:i_f] for _ in range(num)], 0)[:rest] + s[i_s, i_p, i_f:] = pad + break + prog_bar.update() + + print('sub the center joint #1 (spine joint in ntu and ' + 'neck joint in kinetics)') + prog_bar = mmcv.ProgressBar(len(s)) + for i_s, skeleton in enumerate(s): + if skeleton.sum() == 0: + continue + main_body_center = skeleton[0][:, 1:2, :].copy() + for i_p, person in enumerate(skeleton): + if person.sum() == 0: + continue + mask = (person.sum(-1) != 0).reshape(T, V, 1) + s[i_s, i_p] = (s[i_s, i_p] - main_body_center) * mask + prog_bar.update() + + print('parallel the bone between hip(jpt 0) and ' + 'spine(jpt 1) of the first person to the z axis') + prog_bar = mmcv.ProgressBar(len(s)) + for i_s, skeleton in enumerate(s): + if skeleton.sum() == 0: + continue + joint_bottom = skeleton[0, 0, zaxis[0]] + joint_top = skeleton[0, 0, zaxis[1]] + axis = np.cross(joint_top - joint_bottom, [0, 0, 1]) + angle = angle_between(joint_top - joint_bottom, [0, 0, 1]) + matrix_z = rotation_matrix(axis, angle) + for i_p, person in enumerate(skeleton): + if person.sum() == 0: + continue + for i_f, frame in enumerate(person): + if frame.sum() == 0: + continue + for i_j, joint in enumerate(frame): + s[i_s, i_p, i_f, i_j] = np.dot(matrix_z, joint) + prog_bar.update() + + print('parallel the bone between right shoulder(jpt 8) and ' + 'left shoulder(jpt 4) of the first person to the x axis') + prog_bar = mmcv.ProgressBar(len(s)) + for i_s, skeleton in enumerate(s): + if skeleton.sum() == 0: + continue + joint_rshoulder = skeleton[0, 0, xaxis[0]] + joint_lshoulder = skeleton[0, 0, xaxis[1]] + axis = np.cross(joint_rshoulder - joint_lshoulder, [1, 0, 0]) + angle = angle_between(joint_rshoulder - joint_lshoulder, [1, 0, 0]) + matrix_x = rotation_matrix(axis, angle) + for i_p, person in enumerate(skeleton): + if person.sum() == 0: + continue + for i_f, frame in enumerate(person): + if frame.sum() == 0: + continue + for i_j, joint in enumerate(frame): + s[i_s, i_p, i_f, i_j] = np.dot(matrix_x, joint) + prog_bar.update() + + data = np.transpose(s, [0, 4, 2, 3, 1]) + return data + + +def read_skeleton_filter(file): + with open(file, 'r') as f: + skeleton_sequence = {} + skeleton_sequence['num_frame'] = int(f.readline()) + skeleton_sequence['frameInfo'] = [] + + for t in range(skeleton_sequence['num_frame']): + frame_info = {} + frame_info['numBody'] = int(f.readline()) + frame_info['bodyInfo'] = [] + + for m in range(frame_info['numBody']): + body_info = {} + body_info_key = [ + 'bodyID', 'clipedEdges', 'handLeftConfidence', + 'handLeftState', 'handRightConfidence', 'handRightState', + 'isResticted', 'leanX', 'leanY', 'trackingState' + ] + body_info = { + k: float(v) + for k, v in zip(body_info_key, + f.readline().split()) + } + body_info['numJoint'] = int(f.readline()) + body_info['jointInfo'] = [] + for v in range(body_info['numJoint']): + joint_info_key = [ + 'x', 'y', 'z', 'depthX', 'depthY', 'colorX', 'colorY', + 'orientationW', 'orientationX', 'orientationY', + 'orientationZ', 'trackingState' + ] + joint_info = { + k: float(v) + for k, v in zip(joint_info_key, + f.readline().split()) + } + body_info['jointInfo'].append(joint_info) + frame_info['bodyInfo'].append(body_info) + skeleton_sequence['frameInfo'].append(frame_info) + + return skeleton_sequence + + +def get_nonzero_std(s): # T V C + index = s.sum(-1).sum(-1) != 0 + s = s[index] + if len(s) != 0: + s = s[:, :, 0].std() + s[:, :, 1].std() + s[:, :, + 2].std() # three channels + else: + s = 0 + return s + + +def read_xyz(file, max_body=2, num_joint=25): + seq_info = read_skeleton_filter(file) + # num_frame = seq_info['num_frame'] + data = np.zeros((max_body, seq_info['num_frame'], num_joint, 3)) + for n, f in enumerate(seq_info['frameInfo']): + for m, b in enumerate(f['bodyInfo']): + for j, v in enumerate(b['jointInfo']): + if m < max_body and j < num_joint: + data[m, n, j, :] = [v['x'], v['y'], v['z']] + else: + pass + + # select two max energy body + energy = np.array([get_nonzero_std(x) for x in data]) + index = energy.argsort()[::-1][0:max_body_true] + data = data[index] + data = data.transpose(3, 1, 2, 0) + return data + + +def gendata(data_path, + out_path, + ignored_sample_path=None, + task='ntu60', + benchmark='xsub', + part='train', + pre_norm=True): + if ignored_sample_path is not None: + with open(ignored_sample_path, 'r') as f: + ignored_samples = [ + line.strip() + '.skeleton' for line in f.readlines() + ] + else: + ignored_samples = [] + + sample_name = [] + sample_label = [] + total_frames = [] + results = [] + + for filename in os.listdir(data_path): + if filename in ignored_samples: + continue + + setup_number = int(filename[filename.find('S') + 1:filename.find('S') + + 4]) + action_class = int(filename[filename.find('A') + 1:filename.find('A') + + 4]) + subject_id = int(filename[filename.find('P') + 1:filename.find('P') + + 4]) + camera_id = int(filename[filename.find('C') + 1:filename.find('C') + + 4]) + + if benchmark == 'xsub': + if task == 'ntu60': + istraining = (subject_id in training_subjects_60) + else: + istraining = (subject_id in training_subjects_120) + elif benchmark == 'xview': + istraining = (camera_id in training_cameras_60) + elif benchmark == 'xsetup': + istraining = (setup_number in training_setups_120) + else: + raise ValueError() + + if part == 'train': + issample = istraining + elif part == 'val': + issample = not (istraining) + else: + raise ValueError() + + if issample: + sample_name.append(filename) + sample_label.append(action_class - 1) + + fp = np.zeros((len(sample_label), 3, max_frame, num_joint, max_body_true), + dtype=np.float32) + prog_bar = mmcv.ProgressBar(len(sample_name)) + for i, s in enumerate(sample_name): + data = read_xyz( + osp.join(data_path, s), + max_body=max_body_kinect, + num_joint=num_joint).astype(np.float32) + fp[i, :, 0:data.shape[1], :, :] = data + total_frames.append(data.shape[1]) + prog_bar.update() + + if pre_norm: + fp = pre_normalization(fp) + + prog_bar = mmcv.ProgressBar(len(sample_name)) + for i, s in enumerate(sample_name): + anno = dict() + anno['total_frames'] = total_frames[i] + anno['keypoint'] = fp[i, :, 0:total_frames[i], :, :].transpose( + 3, 1, 2, 0) # C T V M -> M T V C + anno['frame_dir'] = osp.splitext(s)[0] + anno['img_shape'] = (1080, 1920) + anno['original_shape'] = (1080, 1920) + anno['label'] = sample_label[i] + + results.append(anno) + prog_bar.update() + + output_path = '{}/{}.pkl'.format(out_path, part) + mmcv.dump(results, output_path) + print(f'{benchmark}-{part} finish~!') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Generate Pose Annotation for NTURGB-D raw skeleton data') + parser.add_argument( + '--data-path', + type=str, + help='raw skeleton data path', + default='data/ntu/nturgb+d_skeletons_60/') + parser.add_argument( + '--ignored-sample-path', + type=str, + default='NTU_RGBD_samples_with_missing_skeletons.txt') + parser.add_argument( + '--out-folder', type=str, default='data/ntu/nturgb+d_skeletons_60_3d') + parser.add_argument('--task', type=str, default='ntu60') + args = parser.parse_args() + + assert args.task in ['ntu60', 'ntu120'] + + if args.task == 'ntu60': + benchmark = ['xsub', 'xview'] + else: + benchmark = ['xsub', 'xsetup'] + part = ['train', 'val'] + + for b in benchmark: + for p in part: + out_path = osp.join(args.out_folder, b) + if not osp.exists(out_path): + os.makedirs(out_path) + gendata( + args.data_path, + out_path, + args.ignored_sample_path, + args.task, + benchmark=b, + part=p) diff --git a/tools/data/skeleton/ntu_pose_extraction.py b/tools/data/skeleton/ntu_pose_extraction.py index d53abaa44c..e02f9f0add 100644 --- a/tools/data/skeleton/ntu_pose_extraction.py +++ b/tools/data/skeleton/ntu_pose_extraction.py @@ -313,7 +313,7 @@ def ntu_pose_extraction(vid): anno['img_shape'] = (1080, 1920) anno['original_shape'] = (1080, 1920) anno['total_frames'] = pose_results.shape[1] - anno['label'] = int(osp.basename(vid).split('A')[1][:3]) + anno['label'] = int(osp.basename(vid).split('A')[1][:3]) - 1 shutil.rmtree(osp.dirname(frame_paths[0])) return anno From 90fc8440961987b7fe3ee99109e2c633c4e30158 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Thu, 21 Oct 2021 20:47:53 +0800 Subject: [PATCH 282/414] [Docs] Add skeleton dataset CN docs (#1228) * skeleton translation * fix * append --- tools/data/skeleton/README_zh-CN.md | 127 ++++++++++++++++++++++++++++ 1 file changed, 127 insertions(+) create mode 100644 tools/data/skeleton/README_zh-CN.md diff --git a/tools/data/skeleton/README_zh-CN.md b/tools/data/skeleton/README_zh-CN.md new file mode 100644 index 0000000000..fd774efd49 --- /dev/null +++ b/tools/data/skeleton/README_zh-CN.md @@ -0,0 +1,127 @@ +# 准备骨架数据集 + +```BibTeX +@misc{duan2021revisiting, + title={Revisiting Skeleton-based Action Recognition}, + author={Haodong Duan and Yue Zhao and Kai Chen and Dian Shao and Dahua Lin and Bo Dai}, + year={2021}, + eprint={2104.13586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## 简介 + +MMAction2 发布 [Revisiting Skeleton-based Action Recognition](https://arxiv.org/abs/2104.13586) 论文中所使用的骨架标注。 +默认使用 [Faster-RCNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) 作为人体检测器, +使用 [HRNet-w32](https://github.com/open-mmlab/mmpose/blob/master/configs/top_down/hrnet/coco/hrnet_w32_coco_256x192.py) 作为单人姿态估计模型。 +对于 FineGYM 数据集,MMAction2 使用的是运动员的真实框标注,而非检测器所出的框。目前,MMAction2 已发布 FineGYM 和 NTURGB-D Xsub 部分的骨架标注,其他数据集的标注也将很快发布。 + +## 准备标注文件 + +目前,MMAction2 支持 HMDB51, UCF101, FineGYM 和 NTURGB+D 数据集。对于 FineGYM 数据集,用户可以使用以下脚本下载标注文件。 + +```shell +bash download_annotations.sh ${DATASET} +``` + +由于 NTURGB+D 数据集的 [使用条例](http://rose1.ntu.edu.sg/Datasets/actionRecognition.asp),MMAction2 并未直接发布实验中所使用的标注文件。 +因此,这里提供生成 NTURGB+D 数据集中视频的姿态标注文件,这将生成一个 dict 数据并将其保存为一个 pickle 文件。 +用户可以生成一个 list 用以包含对应视频的 dict 数据,并将其保存为一个 pickle 文件。 +之后,用户可以获得 `ntu60_xsub_train.pkl`, `ntu60_xsub_val.pkl`, `ntu120_xsub_train.pkl`, `ntu120_xsub_val.pkl` 文件用于训练。 + +对于无法进行姿态提取的用户,这里提供了上述流程的输出结果,分别对应 NTURGB-D 数据集的 4 个部分: + +- ntu60_xsub_train: https://download.openmmlab.com/mmaction/posec3d/ntu60_xsub_train.pkl +- ntu60_xsub_val: https://download.openmmlab.com/mmaction/posec3d/ntu60_xsub_val.pkl +- ntu120_xsub_train: https://download.openmmlab.com/mmaction/posec3d/ntu120_xsub_train.pkl +- ntu120_xsub_val: https://download.openmmlab.com/mmaction/posec3d/ntu120_xsub_val.pkl +- hmdb51: https://download.openmmlab.com/mmaction/posec3d/hmdb51.pkl +- ucf101: https://download.openmmlab.com/mmaction/posec3d/ucf101.pkl + +若想生成单个视频的 2D 姿态标注文件,首先,用户需要由源码安装 mmdetection 和 mmpose。之后,用户需要在 `ntu_pose_extraction.py` 中指定 `mmdet_root` 和 `mmpose_root` 变量。 +最后,用户可使用以下脚本进行 NTURGB+D 视频的姿态提取: + +```python +python ntu_pose_extraction.py S001C001P001R001A001_rgb.avi S001C001P001R001A001.pkl +``` + +在用户获得数据集某部分所有视频的姿态标注文件(如 `ntu60_xsub_val`)后,可以将其集合成一个 list 数据并保存为 `ntu60_xsub_val.pkl`。用户可用这些大型 pickle 文件进行训练和测试。 + +## PoseC3D 的标注文件格式 + +这里简单介绍 PoseC3D 的标注文件格式。以 `gym_train.pkl` 为例:`gym_train.pkl` 存储一个长度为 20484 的 list,list 的每一项为单个视频的骨架标注 dict。每个 dict 的内容如下: + +- keypoint:关键点坐标,大小为 N(#人数)x T(时序长度)x K(#关键点, 这里为17)x 2 (x,y 坐标)的 numpy array 数据类型 +- keypoint_score:关键点的置信分数,大小为 N(#人数)x T(时序长度)x K(#关键点, 这里为17)的 numpy array 数据类型 +- frame_dir: 对应视频名 +- label: 动作类别 +- img_shape: 每一帧图像的大小 +- original_shape: 同 `img_shape` +- total_frames: 视频时序长度 + +## 可视化 + +为了可视化骨架数据,用户需要准备 RGB 的视频。详情可参考 [visualize_heatmap_volume](/demo/visualize_heatmap_volume.ipynb)。这里提供一些 NTU-60 和 FineGYM 上的例子 + + + + + + + + + +
    +
    + 姿态估计结果 +
    + +
    +
    + +
    +
    + 关键点热力图三维可视化 +
    + +
    +
    + +
    +
    + 肢体热力图三维可视化 +
    + +
    +
    + +
    + +## 如何将 NTU RGB+D 原始数据转化为 MMAction2 格式 + +这里介绍如何将 NTU RGB+D 原始数据转化为 MMAction2 格式。首先,需要从 https://github.com/shahroudy/NTURGB-D 下载原始 NTU-RGBD 60 和 NTU-RGBD 120 数据集的原始骨架数据。 + +对于 NTU-RGBD 60 数据集,可使用以下脚本 + +```python +python gen_ntu_rgbd_raw.py --data-path your_raw_nturgbd60_skeleton_path --ignored-sample-path NTU_RGBD_samples_with_missing_skeletons.txt --out-folder your_nturgbd60_output_path --task ntu60 +``` + +对于 NTU-RGBD 120 数据集,可使用以下脚本 + +```python +python gen_ntu_rgbd_raw.py --data-path your_raw_nturgbd120_skeleton_path --ignored-sample-path NTU_RGBD120_samples_with_missing_skeletons.txt --out-folder your_nturgbd120_output_path --task ntu120 +``` + +**待办项**: + +- [x] FineGYM +- [x] NTU60_XSub +- [x] NTU120_XSub +- [x] NTU60_XView +- [x] NTU120_XSet +- [x] UCF101 +- [x] HMDB51 +- [ ] Kinetics From 90ad3f30be426d7b7d9c73230e470c0ae573d46b Mon Sep 17 00:00:00 2001 From: bit-scientist Date: Mon, 25 Oct 2021 20:35:28 +0900 Subject: [PATCH 283/414] [Doc] Create custom dataset training tutorial with PoseC3D (#1234) * Create custom dataset training with PoseC3D * revise the doc * update links Co-authored-by: Haodong Duan --- README.md | 3 +- README_zh-CN.md | 1 + configs/skeleton/posec3d/README.md | 2 + .../posec3d/custom_dataset_training.md | 42 +++++++++++++++++++ tools/data/skeleton/README.md | 2 + tools/data/skeleton/README_zh-CN.md | 2 + 6 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 configs/skeleton/posec3d/custom_dataset_training.md diff --git a/README.md b/README.md index e942f75caa..eec10aad25 100644 --- a/README.md +++ b/README.md @@ -42,8 +42,9 @@ The master branch works with **PyTorch 1.3+**. - **Well tested and documented**: We provide detailed documentation and API reference, as well as unit tests. -## News +## Updates +- (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! - (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. - (2021-10-12) We support **TorchServe**! Now recognition models in MMAction2 can be packed as a `.mar` file and served with TorchServe. - (2021-09-11) We support **ST-GCN**, a well-known GCN-based approach for skeleton-based action recognition! diff --git a/README_zh-CN.md b/README_zh-CN.md index 4127e0a8be..32ea807a45 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -43,6 +43,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 +- (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! - (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 - (2021-10-12) 支持 **TorchServe**!目前可以使用 TorchServe 部署 MMAction2 中的动作识别模型。 - (2021-09-11) 支持 **ST-GCN**,一种广泛使用的基于人体姿态与 GCN 的动作识别方法! diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index 9f07f60282..52ac7f384d 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -113,6 +113,8 @@ python tools/train.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoin --validate --seed 0 --deterministic ``` +For training with your custom dataset, you can refer to [Custom Dataset Training](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md). + For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test diff --git a/configs/skeleton/posec3d/custom_dataset_training.md b/configs/skeleton/posec3d/custom_dataset_training.md new file mode 100644 index 0000000000..c01a183750 --- /dev/null +++ b/configs/skeleton/posec3d/custom_dataset_training.md @@ -0,0 +1,42 @@ + +# Custom Dataset Training with PoseC3D + +We provide a step-by-step tutorial on how to train your custom dataset with PoseC3D. + +1. First, you should know that action recognition with PoseC3D requires skeleton information only and for that you need to prepare your custom annotation files (for training and validation). To start with, you need to replace the placeholder `mmdet_root` and `mmpose_root` in `ntu_pose_extraction.py` with your installation path. Then you need to take advantage of [ntu_pose_extraction.py](https://github.com/open-mmlab/mmaction2/blob/90fc8440961987b7fe3ee99109e2c633c4e30158/tools/data/skeleton/ntu_pose_extraction.py) as shown in [Prepare Annotations](https://github.com/open-mmlab/mmaction2/blob/master/tools/data/skeleton/README.md#prepare-annotations) to extract 2D keypoints for each video in your custom dataset. The command looks like (assuming the name of your video is `some_video_from_my_dataset.mp4`): + + ```shell + # You can use the above command to generate pickle files for all of your training and validation videos. + python ntu_pose_extraction.py some_video_from_my_dataset.mp4 some_video_from_my_dataset.pkl + ``` + + @kennymckormick's [note](https://github.com/open-mmlab/mmaction2/issues/1216#issuecomment-950130079): + + > One only thing you may need to change is that: since ntu_pose_extraction.py is developed specifically for pose extraction of NTU videos, you can skip the [ntu_det_postproc](https://github.com/open-mmlab/mmaction2/blob/90fc8440961987b7fe3ee99109e2c633c4e30158/tools/data/skeleton/ntu_pose_extraction.py#L307) step when using this script for extracting pose from your custom video datasets. + +2. Then, you will collect all the pickle files into one list for training (and, of course, for validation) and save them as a single file (like `custom_dataset_train.pkl` or `custom_dataset_val.pkl`). At that time, you finalize preparing annotation files for your custom dataset. + +3. Next, you may use the following script (with some alterations according to your needs) for training as shown in [PoseC3D/Train](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/README.md#train): `python tools/train.py configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py --work-dir work_dirs/slowonly_r50_u48_240e_ntu120_xsub_keypoint --validate --test-best --gpus 2 --seed 0 --deterministic`: + + - Before running the above script, you need to modify the variables to initialize with your newly made annotation files: + + ```python + model = dict( + ... + cls_head=dict( + ... + num_classes=4, # Your class number + ... + ), + ... + ) + + ann_file_train = 'data/posec3d/custom_dataset_train.pkl' # Your annotation for training + ann_file_val = 'data/posec3d/custom_dataset_val.pkl' # Your annotation for validation + + load_from = 'pretrained_weight.pth' # Your can use released weights for initialization, set to None if training from scratch + + # You can also alter the hyper parameters or training schedule + ``` + +With that, your machine should start its work to let you grab a cup of coffee and watch how the training goes. diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index a58ed66465..6769033940 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -56,6 +56,8 @@ Here we briefly introduce the format of PoseC3D Annotations, we will take `gym_t - original_shape: Same as above. - total_frames: The temporal length of the video. +For training with your custom dataset, you can refer to [Custom Dataset Training](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md). + ## Visualization For skeleton data visualization, you need also to prepare the RGB videos. Please refer to [visualize_heatmap_volume](/demo/visualize_heatmap_volume.ipynb) for detailed process. Here we provide some visualization examples from NTU-60 and FineGYM. diff --git a/tools/data/skeleton/README_zh-CN.md b/tools/data/skeleton/README_zh-CN.md index fd774efd49..456ac496e6 100644 --- a/tools/data/skeleton/README_zh-CN.md +++ b/tools/data/skeleton/README_zh-CN.md @@ -61,6 +61,8 @@ python ntu_pose_extraction.py S001C001P001R001A001_rgb.avi S001C001P001R001A001. - original_shape: 同 `img_shape` - total_frames: 视频时序长度 +如用户想使用自己的数据集训练 PoseC3D,可以参考 [Custom Dataset Training](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md)。 + ## 可视化 为了可视化骨架数据,用户需要准备 RGB 的视频。详情可参考 [visualize_heatmap_volume](/demo/visualize_heatmap_volume.ipynb)。这里提供一些 NTU-60 和 FineGYM 上的例子 From e49f86fb59db6847ed2dfd15ecc9eff00c2adcaa Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 26 Oct 2021 12:19:36 +0800 Subject: [PATCH 284/414] [Doc] CN README for PoseC3D (#1237) --- configs/skeleton/posec3d/README.md | 3 +- configs/skeleton/posec3d/README_zh-CN.md | 133 +++++++++++++++++++++++ 2 files changed, 134 insertions(+), 2 deletions(-) create mode 100644 configs/skeleton/posec3d/README_zh-CN.md diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index 52ac7f384d..7463824b16 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -92,8 +92,7 @@ 1. The **gpus** indicates the number of gpu we used to get the checkpoint. It is noteworthy that the configs we provide are used for 8 gpus as default. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 8 GPUs x 8 videos/gpu and lr=0.04 for 16 GPUs x 16 videos/gpu. -2. The values in columns named after "reference" are the results got by testing on our dataset, using the checkpoints provided by the author with same model settings. The checkpoints for reference repo can be downloaded [here](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing). -3. You can follow the guide in [Preparing Skeleton Dataset](https://github.com/open-mmlab/mmaction2/tree/master/tools/data/skeleton) to obtain skeleton annotations used in the above configs. +2. You can follow the guide in [Preparing Skeleton Dataset](https://github.com/open-mmlab/mmaction2/tree/master/tools/data/skeleton) to obtain skeleton annotations used in the above configs. ::: diff --git a/configs/skeleton/posec3d/README_zh-CN.md b/configs/skeleton/posec3d/README_zh-CN.md new file mode 100644 index 0000000000..4c4cdf8d46 --- /dev/null +++ b/configs/skeleton/posec3d/README_zh-CN.md @@ -0,0 +1,133 @@ +# PoseC3D + +## 简介 + + + +```BibTeX +@misc{duan2021revisiting, + title={Revisiting Skeleton-based Action Recognition}, + author={Haodong Duan and Yue Zhao and Kai Chen and Dian Shao and Dahua Lin and Bo Dai}, + year={2021}, + eprint={2104.13586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + + + + + + + + + +
    +
    + 姿态估计结果 +
    + +
    +
    + +
    +
    + 关键点热图三维体可视化 +
    + +
    +
    + +
    +
    + 肢体热图三维体可视化 +
    + +
    +
    + +
    + +## 模型库 + +### FineGYM + +|配置文件 | 热图类型 | GPU 数量 | 主干网络 | Mean Top-1 | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| +|[slowonly_r50_u48_240e_gym_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py) | 关键点 |8 x 2| SlowOnly-R50 |93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint-b07a98a0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json) | +|[slowonly_r50_u48_240e_gym_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py) | 肢体 |8 x 2| SlowOnly-R50 |94.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb-c0d7b482.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json) | +| 融合预测结果 | | | |94.3 | | | | + +### NTU60_XSub + +|配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| +| [slowonly_r50_u48_240e_ntu60_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py) | 关键点 | 8 x 2 | SlowOnly-R50 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint-f3adabf1.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.json) | +| [slowonly_r50_u48_240e_ntu60_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py) | 肢体 | 8 x 2 | SlowOnly-R50 | 93.4 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb-1d69006a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json) | +| 融合预测结果 | | | | 94.1 | | | | + +### NTU120_XSub + +|配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| +| [slowonly_r50_u48_240e_ntu120_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py) | 关键点 | 8 x 2 | SlowOnly-R50 | 86.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.json) | +| [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | 肢体 | 8 x 2 | SlowOnly-R50 | 85.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth?) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json) | +| 融合预测结果 | | | | 86.9 | | | | + +### UCF101 + +|配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| +| [slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py) | 关键点 | 8 | SlowOnly-R50 | 87.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint-cae8aa4a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.json) | + +### HMDB51 + +|配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| +| [slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py) | 关键点 | 8 | SlowOnly-R50 | 69.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint-76ffdd8b.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.json) | + +注: + +1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.2 对应 8 GPUs x 16 video/gpu,以及 lr=0.4 对应 16 GPUs x 16 video/gpu。 +2. 用户可以参照 [准备骨骼数据集](https://github.com/open-mmlab/mmaction2/blob/master/tools/data/skeleton/README_zh-CN.md) 来获取以上配置文件使用的骨骼标注。 + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +Example: 以确定性的训练,加以定期的验证过程进行 PoseC3D 模型在 FineGYM 数据集上的训练。 + +```shell +python tools/train.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py \ + --work-dir work_dirs/slowonly_r50_u48_240e_gym_keypoint \ + --validate --seed 0 --deterministic +``` + +有关自定义数据集上的训练,可以参考 [Custom Dataset Training](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md)。 + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +Example: 在 FineGYM 数据集上测试 PoseC3D 模型,并将结果导出为一个 pickle 文件。 + +```shell +python tools/test.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.pkl +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 From ca01f4ea22237dd22be92216ad291b2e7d40478c Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 26 Oct 2021 15:21:13 +0800 Subject: [PATCH 285/414] [Feature] Support ST-GCN for 3d skeleton (#1236) * master * master 0721 * 1025 3d * 1025 3d stgcn * 1025 add 3d stgcn * 1026 modify formatgcn * 1026 modify formatgcn * 1025 modify readme metafile * 1025 add readme * 1026 modify readme * 1026 modify readme * 1026 modify readme * Update README.md * Update README_zh-CN.md Co-authored-by: Haodong Duan --- README.md | 3 +- README_zh-CN.md | 3 +- configs/skeleton/stgcn/README.md | 5 +- configs/skeleton/stgcn/README_zh-CN.md | 59 ++++++++++++++ configs/skeleton/stgcn/metafile.yml | 20 ++++- .../stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py | 77 +++++++++++++++++++ mmaction/datasets/pipelines/formatting.py | 16 ++-- 7 files changed, 173 insertions(+), 10 deletions(-) create mode 100644 configs/skeleton/stgcn/README_zh-CN.md create mode 100644 configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py diff --git a/README.md b/README.md index eec10aad25..9d0f77c03e 100644 --- a/README.md +++ b/README.md @@ -44,10 +44,11 @@ The master branch works with **PyTorch 1.3+**. ## Updates +- (2021-10-26) We train and test **ST-GCN** on NTU60 with 3D keypoint annotations, achieve 84.61% Top-1 accuracy (higher than 81.5% in the [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135)). +- (2021-10-25) We provide a script(tools/data/skeleton/gen_ntu_rgbd_raw.py) to convert the NTU60 and NTU120 3D raw skeleton data to our format. - (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! - (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. - (2021-10-12) We support **TorchServe**! Now recognition models in MMAction2 can be packed as a `.mar` file and served with TorchServe. -- (2021-09-11) We support **ST-GCN**, a well-known GCN-based approach for skeleton-based action recognition! **Release**: v0.19.0 was released in 07/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. diff --git a/README_zh-CN.md b/README_zh-CN.md index 32ea807a45..380d4715c5 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -43,10 +43,11 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 +- (2021-10-26) 在 NTU60 3d 关键点标注数据集上训练测试 **STGCN**, 可达到 84.61% (高于 [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135) 中的 81.5%) 的识别准确率。 +- (2021-10-25) 提供将 NTU60 和 NTU120 的 3d 骨骼点数据转换成我们项目的格式的脚本(tools/data/skeleton/gen_ntu_rgbd_raw.py)。 - (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! - (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 - (2021-10-12) 支持 **TorchServe**!目前可以使用 TorchServe 部署 MMAction2 中的动作识别模型。 -- (2021-09-11) 支持 **ST-GCN**,一种广泛使用的基于人体姿态与 GCN 的动作识别方法! v0.19.0 版本已于 2021 年 10 月 7 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 diff --git a/configs/skeleton/stgcn/README.md b/configs/skeleton/stgcn/README.md index b4fca5d1e7..e90c639c6a 100644 --- a/configs/skeleton/stgcn/README.md +++ b/configs/skeleton/stgcn/README.md @@ -17,9 +17,10 @@ ### NTU60_XSub -| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| config | keypoint | gpus | backbone | Top-1 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [stgcn_80e_ntu60_xsub_keypoint](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py) | keypoint | 2 | STGCN | 86.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json) | +| [stgcn_80e_ntu60_xsub_keypoint](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py) | 2d | 2 | STGCN | 86.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json) | +| [stgcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py) | 3d | 1 | STGCN | 84.61 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d-13e7ccf0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json) | ## Train diff --git a/configs/skeleton/stgcn/README_zh-CN.md b/configs/skeleton/stgcn/README_zh-CN.md new file mode 100644 index 0000000000..4f4f8e53c7 --- /dev/null +++ b/configs/skeleton/stgcn/README_zh-CN.md @@ -0,0 +1,59 @@ +# STGCN + +## 简介 + + + +```BibTeX +@inproceedings{yan2018spatial, + title={Spatial temporal graph convolutional networks for skeleton-based action recognition}, + author={Yan, Sijie and Xiong, Yuanjun and Lin, Dahua}, + booktitle={Thirty-second AAAI conference on artificial intelligence}, + year={2018} +} +``` + +## 模型库 + +### NTU60_XSub + +| 配置文件 | 骨骼点 | GPU 数量 | 主干网络 | top1 准确率 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [stgcn_80e_ntu60_xsub_keypoint](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py) | 2d | 2 | STGCN | 86.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json) | +| [stgcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py) | 3d | 1 | STGCN | 84.61 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d-13e7ccf0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json) | + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 STGCN 模型在 NTU60 数据集上的训练 + +```shell +python tools/train.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ + --work-dir work_dirs/stgcn_80e_ntu60_xsub_keypoint \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 NTU60 数据集上测试 STGCN 模型,并将结果导出为一个 pickle 文件。 + +```shell +python tools/test.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out result.pkl +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/skeleton/stgcn/metafile.yml b/configs/skeleton/stgcn/metafile.yml index 0ea65b5708..7012930b43 100644 --- a/configs/skeleton/stgcn/metafile.yml +++ b/configs/skeleton/stgcn/metafile.yml @@ -2,7 +2,7 @@ Collections: - Name: STGCN README: configs/skeleton/stgcn/README.md Models: - Config: configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py +- Config: configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py In Collection: STGCN Metadata: Architecture: STGCN @@ -20,3 +20,21 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json Training Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log Weights: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth +- Config: configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py + In Collection: STGCN + Metadata: + Architecture: STGCN + Batch Size: 32 + Epochs: 80 + Parameters: 3088704 + Training Data: NTU60 + Training Resources: 1 GPU + Name: stgcn_80e_ntu60_xsub_keypoint_3d + Results: + Dataset: NTU60 + Metrics: + mean Top 1 Accuracy: 84.61 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log + Weights: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d-13e7ccf0.pth diff --git a/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py b/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py new file mode 100644 index 0000000000..4422dd759c --- /dev/null +++ b/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py @@ -0,0 +1,77 @@ +model = dict( + type='SkeletonGCN', + backbone=dict( + type='STGCN', + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu-rgb+d', strategy='spatial')), + cls_head=dict( + type='STGCNHead', + num_classes=60, + in_channels=256, + loss_cls=dict(type='CrossEntropyLoss')), + train_cfg=None, + test_cfg=None) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/ntu/nturgb+d_skeletons_60_3d_nmtvc/xsub/train.pkl' +ann_file_val = 'data/ntu/nturgb+d_skeletons_60_3d_nmtvc/xsub/val.pkl' +train_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +val_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +test_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +data = dict( + videos_per_gpu=32, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) + +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0001, nesterov=True) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict(policy='step', step=[10, 50]) +total_epochs = 80 +checkpoint_config = dict(interval=3) +evaluation = dict(interval=3, metrics=['top_k_accuracy']) +log_config = dict(interval=100, hooks=[dict(type='TextLoggerHook')]) + +# runtime settings +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/stgcn_80e_ntu60_xsub_keypoint_3d/' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/mmaction/datasets/pipelines/formatting.py b/mmaction/datasets/pipelines/formatting.py index 7e03979db8..e26da9366e 100644 --- a/mmaction/datasets/pipelines/formatting.py +++ b/mmaction/datasets/pipelines/formatting.py @@ -378,8 +378,8 @@ def __repr__(self): class FormatGCNInput: """Format final skeleton shape to the given input_format. - Required keys are "keypoint" and "keypoint_score", added or modified - keys are "keypoint" and "input_shape". + Required keys are "keypoint" and "keypoint_score"(optional), + added or modified keys are "keypoint" and "input_shape". Args: input_format (str): Define the final skeleton format. @@ -400,9 +400,15 @@ def __call__(self, results): to the next transform in pipeline. """ keypoint = results['keypoint'] - keypoint_confidence = results['keypoint_score'] - keypoint_confidence = np.expand_dims(keypoint_confidence, -1) - keypoint_3d = np.concatenate((keypoint, keypoint_confidence), axis=-1) + + if 'keypoint_score' in results: + keypoint_confidence = results['keypoint_score'] + keypoint_confidence = np.expand_dims(keypoint_confidence, -1) + keypoint_3d = np.concatenate((keypoint, keypoint_confidence), + axis=-1) + else: + keypoint_3d = keypoint + keypoint_3d = np.transpose(keypoint_3d, (3, 1, 2, 0)) # M T V C -> C T V M From fd9338867b8cccaedef1d6be726e912d910f8a03 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Fri, 29 Oct 2021 11:28:33 +0800 Subject: [PATCH 286/414] [Feature] Video Structuralize Demo (#1197) * master * master 0721 * 0927 in progress * 0928 WIP * 0928 WIP * 0929 WIP * 0930 stdet+skeleton recog * 0930 stdet+skeleton recog * 0930 stdet+skeleton recog * 1006 * 1006 * 1008 demo * 1008 demo * 1008 demo * 1008 demo * 1008 demo * 1012 * 1012 add rgb-recognition * 1013 * 1014 add skeleton-based stdet * 1014 add skeleton-based stdet * 1014 add skeleton-based stdet * 1014 add skeleton-based stdet * 1014 add skeleton-based stdet * 1018 format demo * 1018 format demo * 1018 format demo * 1019 fix demo * 1019 fix demo * 1021 * 1022 fix bugs in demo * 1022 fix bugs in demo * 1025 poseinference * 1026 new test_mp4 * 1026 new test_mp4 * 1026 modify det_thr * 1026 add readme news * 1027 modify readme * 1027 modify readme * 1027 * 1027 add readme-zh --- README.md | 1 + README_zh-CN.md | 1 + demo/README.md | 139 +++++- demo/demo_video_structuralize.py | 797 ++++++++++++++++++++++++++++++ demo/test_video_structuralize.mp4 | Bin 0 -> 579876 bytes docs_zh_CN/demo.md | 156 +++++- 6 files changed, 1085 insertions(+), 9 deletions(-) create mode 100644 demo/demo_video_structuralize.py create mode 100644 demo/test_video_structuralize.mp4 diff --git a/README.md b/README.md index 9d0f77c03e..fd54bef9d3 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,7 @@ The master branch works with **PyTorch 1.3+**. ## Updates +- (2021-10-26) We provide a demo for skeleton-based and rgb-based spatio-temporal detection and action recognition (demo/demo_video_structuralize.py). - (2021-10-26) We train and test **ST-GCN** on NTU60 with 3D keypoint annotations, achieve 84.61% Top-1 accuracy (higher than 81.5% in the [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135)). - (2021-10-25) We provide a script(tools/data/skeleton/gen_ntu_rgbd_raw.py) to convert the NTU60 and NTU120 3D raw skeleton data to our format. - (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! diff --git a/README_zh-CN.md b/README_zh-CN.md index 380d4715c5..3064632d47 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -43,6 +43,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 +- (2021-10-26) 支持基于 skeleton 模态和 rgb 模态的时空动作检测和行为识别 demo (demo/demo_video_structuralize.py)。 - (2021-10-26) 在 NTU60 3d 关键点标注数据集上训练测试 **STGCN**, 可达到 84.61% (高于 [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135) 中的 81.5%) 的识别准确率。 - (2021-10-25) 提供将 NTU60 和 NTU120 的 3d 骨骼点数据转换成我们项目的格式的脚本(tools/data/skeleton/gen_ntu_rgbd_raw.py)。 - (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! diff --git a/demo/README.md b/demo/README.md index baa4466912..924453213c 100644 --- a/demo/README.md +++ b/demo/README.md @@ -8,8 +8,9 @@ - [Video GradCAM Demo](#video-gradcam-demo): A demo script to visualize GradCAM results using a single video. - [Webcam demo](#webcam-demo): A demo script to implement real-time action recognition from a web camera. - [Long Video demo](#long-video-demo): a demo script to predict different labels using a single long video. -- [SpatioTempoval Action Detection Webcam Demo](#spatiotemporal-action-detection-webcam-demo): A demo script to implement real-time spatio-temporval action detection from a web camera. +- [SpatioTemporal Action Detection Webcam Demo](#spatiotemporal-action-detection-webcam-demo): A demo script to implement real-time spatio-temporal action detection from a web camera. - [Skeleton-based Action Recognition Demo](#skeleton-based-action-recognition-demo): A demo script to predict the skeleton-based action recognition result using a single video. +- [Video Structuralize Demo](#video-structuralize-demo): A demo script to predict the skeleton-based and rgb-based action recognition and spatio-temporal action detection result using a single video. ## Modify configs through script arguments @@ -492,3 +493,139 @@ python demo/demo_posec3d.py demo/ntu_sample.avi demo/posec3d_demo.mp4 \ --pose-checkpoint https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_coco_256x192-c78dce93_20200708.pth \ --label-map tools/data/skeleton/label_map_ntu120.txt ``` + +## Video Structuralize Demo + +We provide a demo script to to predict the skeleton-based and rgb-based action recognition and spatio-temporal action detection result using a single video. + +```shell +python demo/demo_video_structuralize.py + [--rgb-stdet-config ${RGB_BASED_SPATIO_TEMPORAL_ACTION_DETECTION_CONFIG_FILE}] \ + [--rgb-stdet-checkpoint ${RGB_BASED_SPATIO_TEMPORAL_ACTION_DETECTION_CHECKPOINT}] \ + [--skeleton-stdet-checkpoint ${SKELETON_BASED_SPATIO_TEMPORAL_ACTION_DETECTION_CHECKPOINT}] \ + [--det-config ${HUMAN_DETECTION_CONFIG_FILE}] \ + [--det-checkpoint ${HUMAN_DETECTION_CHECKPOINT}] \ + [--pose-config ${HUMAN_POSE_ESTIMATION_CONFIG_FILE}] \ + [--pose-checkpoint ${HUMAN_POSE_ESTIMATION_CHECKPOINT}] \ + [--skeleton-config ${SKELETON_BASED_ACTION_RECOGNITION_CONFIG_FILE}] \ + [--skeleton-checkpoint ${SKELETON_BASED_ACTION_RECOGNITION_CHECKPOINT}] \ + [--rgb-config ${RGB_BASED_ACTION_RECOGNITION_CONFIG_FILE}] \ + [--rgb-checkpoint ${RGB_BASED_ACTION_RECOGNITION_CHECKPOINT}] \ + [--use-skeleton-stdet ${USE_SKELETON_BASED_SPATIO_TEMPORAL_DETECTION_METHOD}] \ + [--use-skeleton-recog ${USE_SKELETON_BASED_ACTION_RECOGNITION_METHOD}] \ + [--det-score-thr ${HUMAN_DETECTION_SCORE_THRE}] \ + [--action-score-thr ${ACTION_DETECTION_SCORE_THRE}] \ + [--video ${VIDEO_FILE}] \ + [--label-map-stdet ${LABEL_MAP_FOR_SPATIO_TEMPORAL_ACTION_DETECTION}] \ + [--device ${DEVICE}] \ + [--out-filename ${OUTPUT_FILENAME}] \ + [--predict-stepsize ${PREDICT_STEPSIZE}] \ + [--output-stepsize ${OUTPU_STEPSIZE}] \ + [--output-fps ${OUTPUT_FPS}] \ + [--cfg-options] +``` + +Optional arguments: + +- `RGB_BASED_SPATIO_TEMPORAL_ACTION_DETECTION_CONFIG_FILE`: The rgb-based spatio temoral action detection config file path. +- `RGB_BASED_SPATIO_TEMPORAL_ACTION_DETECTION_CHECKPOINT`: The rgb-based spatio temoral action detection checkpoint path or URL. +- `SKELETON_BASED_SPATIO_TEMPORAL_ACTION_DETECTION_CHECKPOINT`: The skeleton-based spatio temoral action detection checkpoint path or URL. +- `HUMAN_DETECTION_CONFIG_FILE`: The human detection config file path. +- `HUMAN_DETECTION_CHECKPOINT`: The human detection checkpoint URL. +- `HUMAN_POSE_ESTIMATION_CONFIG_FILE`: The human pose estimation config file path (trained on COCO-Keypoint). +- `HUMAN_POSE_ESTIMATION_CHECKPOINT`: The human pose estimation checkpoint URL (trained on COCO-Keypoint). +- `SKELETON_BASED_ACTION_RECOGNITION_CONFIG_FILE`: The skeleton-based action recognition config file path. +- `SKELETON_BASED_ACTION_RECOGNITION_CHECKPOINT`: The skeleton-based action recognition checkpoint path or URL. +- `RGB_BASED_ACTION_RECOGNITION_CONFIG_FILE`: The rgb-based action recognition config file path. +- `RGB_BASED_ACTION_RECOGNITION_CHECKPOINT`: The rgb-based action recognition checkpoint path or URL. +- `USE_SKELETON_BASED_SPATIO_TEMPORAL_DETECTION_METHOD`: Use skeleton-based spatio temporal action detection method. +- `USE_SKELETON_BASED_ACTION_RECOGNITION_METHOD`: Use skeleton-based action recognition method. +- `HUMAN_DETECTION_SCORE_THRE`: The score threshold for human detection. Default: 0.9. +- `ACTION_DETECTION_SCORE_THRE`: The score threshold for action detection. Default: 0.4. +- `LABEL_MAP_FOR_SPATIO_TEMPORAL_ACTION_DETECTION`: The label map for spatio temporal action detection used. Default: `tools/data/ava/label_map.txt`. +- `LABEL_MAP`: The label map for action recognition. Default: `tools/data/kinetics/label_map_k400.txt`. +- `DEVICE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. Default: `cuda:0`. +- `OUTPUT_FILENAME`: Path to the output file which is a video format. Default: `demo/test_stdet_recognition_output.mp4`. +- `PREDICT_STEPSIZE`: Make a prediction per N frames. Default: 8. +- `OUTPUT_STEPSIZE`: Output 1 frame per N frames in the input video. Note that `PREDICT_STEPSIZE % OUTPUT_STEPSIZE == 0`. Default: 1. +- `OUTPUT_FPS`: The FPS of demo video output. Default: 24. + +Examples: + +Assume that you are located at `$MMACTION2` . + +1. Use the Faster RCNN as the human detector, HRNetw32 as the pose estimator, PoseC3D as the skeleton-based action recognizer and the skeleton-based spatio temporal action detector. Making action detection predictions per 8 frames, and output 1 frame per 1 frame to the output video. The FPS of the output video is 24. + +```shell +python demo/demo_video_structuralize.py + --skeleton-stdet-checkpoint https://download.openmmlab.com/mmaction/skeleton/posec3d/posec3d_ava.pth \ + --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ + --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ + --pose-config demo/hrnet_w32_coco_256x192.py + --pose-checkpoint https://download.openmmlab.com/mmpose/top_down/hrnet/ + hrnet_w32_coco_256x192-c78dce93_20200708.pth \ + --skeleton-config configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py \ + --skeleton-checkpoint https://download.openmmlab.com/mmaction/skeleton/posec3d/ + posec3d_k400.pth \ + --use-skeleton-stdet \ + --use-skeleton-recog \ + --label-map-stdet tools/data/ava/label_map.txt \ + --label-map tools/data/kinetics/label_map_k400.txt +``` + +2. Use the Faster RCNN as the human detector, TSN-R50-1x1x3 as the rgb-based action recognizer, SlowOnly-8x8-R101 as the rgb-based spatio temporal action detector. Making action detection predictions per 8 frames, and output 1 frame per 1 frame to the output video. The FPS of the output video is 24. + +```shell +python demo/demo_video_structuralize.py + --rgb-stdet-config configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py \ + --rgb-stdet-checkpoint https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth \ + --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ + --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ + --rgb-config configs/recognition/tsn/ + tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + --rgb-checkpoint https://download.openmmlab.com/mmaction/recognition/ + tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/ + tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + --label-map-stdet tools/data/ava/label_map.txt \ + --label-map tools/data/kinetics/label_map_k400.txt +``` + +3. Use the Faster RCNN as the human detector, HRNetw32 as the pose estimator, PoseC3D as the skeleton-based action recognizer, SlowOnly-8x8-R101 as the rgb-based spatio temporal action detector. Making action detection predictions per 8 frames, and output 1 frame per 1 frame to the output video. The FPS of the output video is 24. + +```shell +python demo/demo_video_structuralize.py + --rgb-stdet-config configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py \ + --rgb-stdet-checkpoint https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth \ + --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ + --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ + --pose-config demo/hrnet_w32_coco_256x192.py + --pose-checkpoint https://download.openmmlab.com/mmpose/top_down/hrnet/ + hrnet_w32_coco_256x192-c78dce93_20200708.pth \ + --skeleton-config configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py \ + --skeleton-checkpoint https://download.openmmlab.com/mmaction/skeleton/posec3d/ + posec3d_k400.pth \ + --use-skeleton-recog \ + --label-map-stdet tools/data/ava/label_map.txt \ + --label-map tools/data/kinetics/label_map_k400.txt +``` + +4. Use the Faster RCNN as the human detector, HRNetw32 as the pose estimator, TSN-R50-1x1x3 as the rgb-based action recognizer, PoseC3D as the skeleton-based spatio temporal action detector. Making action detection predictions per 8 frames, and output 1 frame per 1 frame to the output video. The FPS of the output video is 24. + +```shell +python demo/demo_video_structuralize.py + --skeleton-stdet-checkpoint https://download.openmmlab.com/mmaction/skeleton/posec3d/posec3d_ava.pth \ + --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ + --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ + --pose-config demo/hrnet_w32_coco_256x192.py + --pose-checkpoint https://download.openmmlab.com/mmpose/top_down/hrnet/ + hrnet_w32_coco_256x192-c78dce93_20200708.pth \ + --skeleton-config configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py \ + --rgb-config configs/recognition/tsn/ + tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + --rgb-checkpoint https://download.openmmlab.com/mmaction/recognition/ + tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/ + tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + --use-skeleton-stdet \ + --label-map-stdet tools/data/ava/label_map.txt \ + --label-map tools/data/kinetics/label_map_k400.txt +``` diff --git a/demo/demo_video_structuralize.py b/demo/demo_video_structuralize.py new file mode 100644 index 0000000000..6b8ec31323 --- /dev/null +++ b/demo/demo_video_structuralize.py @@ -0,0 +1,797 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import copy as cp +import os +import os.path as osp +import shutil + +import cv2 +import mmcv +import numpy as np +import torch +from mmcv import DictAction +from mmcv.runner import load_checkpoint + +from mmaction.apis import inference_recognizer +from mmaction.datasets.pipelines import Compose +from mmaction.models import build_detector, build_model, build_recognizer +from mmaction.utils import import_module_error_func + +try: + from mmdet.apis import inference_detector, init_detector + from mmpose.apis import (init_pose_model, inference_top_down_pose_model, + vis_pose_result) + +except (ImportError, ModuleNotFoundError): + + @import_module_error_func('mmdet') + def inference_detector(*args, **kwargs): + pass + + @import_module_error_func('mmdet') + def init_detector(*args, **kwargs): + pass + + @import_module_error_func('mmpose') + def init_pose_model(*args, **kwargs): + pass + + @import_module_error_func('mmpose') + def inference_top_down_pose_model(*args, **kwargs): + pass + + @import_module_error_func('mmpose') + def vis_pose_result(*args, **kwargs): + pass + + +try: + import moviepy.editor as mpy +except ImportError: + raise ImportError('Please install moviepy to enable output file') + +FONTFACE = cv2.FONT_HERSHEY_DUPLEX +FONTSCALE = 0.5 +FONTCOLOR = (255, 255, 255) # BGR, white +MSGCOLOR = (128, 128, 128) # BGR, gray +THICKNESS = 1 +LINETYPE = 1 + + +def hex2color(h): + """Convert the 6-digit hex string to tuple of 3 int value (RGB)""" + return (int(h[:2], 16), int(h[2:4], 16), int(h[4:], 16)) + + +PLATEBLUE = '03045e-023e8a-0077b6-0096c7-00b4d8-48cae4' +PLATEBLUE = PLATEBLUE.split('-') +PLATEBLUE = [hex2color(h) for h in PLATEBLUE] +PLATEGREEN = '004b23-006400-007200-008000-38b000-70e000' +PLATEGREEN = PLATEGREEN.split('-') +PLATEGREEN = [hex2color(h) for h in PLATEGREEN] + + +def visualize(frames, + annotations, + pose_results, + action_result, + pose_model, + plate=PLATEBLUE, + max_num=5): + """Visualize frames with predicted annotations. + + Args: + frames (list[np.ndarray]): Frames for visualization, note that + len(frames) % len(annotations) should be 0. + annotations (list[list[tuple]]): The predicted spatio-temporal + detection results. + pose_results (list[list[tuple]): The pose results. + action_result (str): The predicted action recognition results. + pose_model (nn.Module): The constructed pose model. + plate (str): The plate used for visualization. Default: PLATEBLUE. + max_num (int): Max number of labels to visualize for a person box. + Default: 5. + + Returns: + list[np.ndarray]: Visualized frames. + """ + + assert max_num + 1 <= len(plate) + plate = [x[::-1] for x in plate] + frames_ = cp.deepcopy(frames) + nf, na = len(frames), len(annotations) + assert nf % na == 0 + nfpa = len(frames) // len(annotations) + anno = None + h, w, _ = frames[0].shape + scale_ratio = np.array([w, h, w, h]) + + # add pose results + if pose_results: + for i in range(nf): + frames_[i] = vis_pose_result(pose_model, frames_[i], + pose_results[i]) + + for i in range(na): + anno = annotations[i] + if anno is None: + continue + for j in range(nfpa): + ind = i * nfpa + j + frame = frames_[ind] + + # add action result for whole video + cv2.putText(frame, action_result, (10, 30), FONTFACE, FONTSCALE, + FONTCOLOR, THICKNESS, LINETYPE) + + # add spatio-temporal action detection results + for ann in anno: + box = ann[0] + label = ann[1] + if not len(label): + continue + score = ann[2] + box = (box * scale_ratio).astype(np.int64) + st, ed = tuple(box[:2]), tuple(box[2:]) + if not pose_results: + cv2.rectangle(frame, st, ed, plate[0], 2) + + for k, lb in enumerate(label): + if k >= max_num: + break + text = abbrev(lb) + text = ': '.join([text, str(score[k])]) + location = (0 + st[0], 18 + k * 18 + st[1]) + textsize = cv2.getTextSize(text, FONTFACE, FONTSCALE, + THICKNESS)[0] + textwidth = textsize[0] + diag0 = (location[0] + textwidth, location[1] - 14) + diag1 = (location[0], location[1] + 2) + cv2.rectangle(frame, diag0, diag1, plate[k + 1], -1) + cv2.putText(frame, text, location, FONTFACE, FONTSCALE, + FONTCOLOR, THICKNESS, LINETYPE) + + return frames_ + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMAction2 demo') + parser.add_argument( + '--rgb-stdet-config', + default=('configs/detection/ava/' + 'slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py'), + help='rgb-based spatio temporal detection config file path') + parser.add_argument( + '--rgb-stdet-checkpoint', + default=('https://download.openmmlab.com/mmaction/detection/ava/' + 'slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/' + 'slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb' + '_20201217-16378594.pth'), + help='rgb-based spatio temporal detection checkpoint file/url') + parser.add_argument( + '--skeleton-stdet-checkpoint', + default=('https://download.openmmlab.com/mmaction/skeleton/posec3d/' + 'posec3d_ava.pth'), + help='skeleton-based spatio temporal detection checkpoint file/url') + parser.add_argument( + '--det-config', + default='demo/faster_rcnn_r50_fpn_2x_coco.py', + help='human detection config file path (from mmdet)') + parser.add_argument( + '--det-checkpoint', + default=('http://download.openmmlab.com/mmdetection/v2.0/' + 'faster_rcnn/faster_rcnn_r50_fpn_2x_coco/' + 'faster_rcnn_r50_fpn_2x_coco_' + 'bbox_mAP-0.384_20200504_210434-a5d8aa15.pth'), + help='human detection checkpoint file/url') + parser.add_argument( + '--pose-config', + default='demo/hrnet_w32_coco_256x192.py', + help='human pose estimation config file path (from mmpose)') + parser.add_argument( + '--pose-checkpoint', + default=('https://download.openmmlab.com/mmpose/top_down/hrnet/' + 'hrnet_w32_coco_256x192-c78dce93_20200708.pth'), + help='human pose estimation checkpoint file/url') + parser.add_argument( + '--skeleton-config', + default='configs/skeleton/posec3d/' + 'slowonly_r50_u48_240e_ntu120_xsub_keypoint.py', + help='skeleton-based action recognition config file path') + parser.add_argument( + '--skeleton-checkpoint', + default='https://download.openmmlab.com/mmaction/skeleton/posec3d/' + 'posec3d_k400.pth', + help='skeleton-based action recognition checkpoint file/url') + parser.add_argument( + '--rgb-config', + default='configs/recognition/tsn/' + 'tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py', + help='rgb-based action recognition config file path') + parser.add_argument( + '--rgb-checkpoint', + default='https://download.openmmlab.com/mmaction/recognition/' + 'tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/' + 'tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth', + help='rgb-based action recognition checkpoint file/url') + parser.add_argument( + '--use-skeleton-stdet', + action='store_true', + help='use skeleton-based spatio temporal detection method') + parser.add_argument( + '--use-skeleton-recog', + action='store_true', + help='use skeleton-based action recognition method') + parser.add_argument( + '--det-score-thr', + type=float, + default=0.9, + help='the threshold of human detection score') + parser.add_argument( + '--action-score-thr', + type=float, + default=0.4, + help='the threshold of action prediction score') + parser.add_argument( + '--video', + default='demo/test_video_structuralize.mp4', + help='video file/url') + parser.add_argument( + '--label-map-stdet', + default='tools/data/ava/label_map.txt', + help='label map file for spatio-temporal action detection') + parser.add_argument( + '--label-map', + default='tools/data/kinetics/label_map_k400.txt', + help='label map file for action recognition') + parser.add_argument( + '--device', type=str, default='cuda:0', help='CPU/CUDA device option') + parser.add_argument( + '--out-filename', + default='demo/test_stdet_recognition_output.mp4', + help='output filename') + parser.add_argument( + '--predict-stepsize', + default=8, + type=int, + help='give out a spatio-temporal detection prediction per n frames') + parser.add_argument( + '--output-stepsize', + default=1, + type=int, + help=('show one frame per n frames in the demo, we should have: ' + 'predict_stepsize % output_stepsize == 0')) + parser.add_argument( + '--output-fps', + default=24, + type=int, + help='the fps of demo video output') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") + args = parser.parse_args() + return args + + +def frame_extraction(video_path): + """Extract frames given video_path. + + Args: + video_path (str): The video_path. + """ + # Load the video, extract frames into ./tmp/video_name + target_dir = osp.join('./tmp', osp.basename(osp.splitext(video_path)[0])) + # target_dir = osp.join('./tmp','spatial_skeleton_dir') + os.makedirs(target_dir, exist_ok=True) + # Should be able to handle videos up to several hours + frame_tmpl = osp.join(target_dir, 'img_{:06d}.jpg') + vid = cv2.VideoCapture(video_path) + frames = [] + frame_paths = [] + flag, frame = vid.read() + cnt = 0 + while flag: + frames.append(frame) + frame_path = frame_tmpl.format(cnt + 1) + frame_paths.append(frame_path) + cv2.imwrite(frame_path, frame) + cnt += 1 + flag, frame = vid.read() + return frame_paths, frames + + +def detection_inference(args, frame_paths): + """Detect human boxes given frame paths. + + Args: + args (argparse.Namespace): The arguments. + frame_paths (list[str]): The paths of frames to do detection inference. + + Returns: + list[np.ndarray]: The human detection results. + """ + model = init_detector(args.det_config, args.det_checkpoint, args.device) + assert model.CLASSES[0] == 'person', ('We require you to use a detector ' + 'trained on COCO') + results = [] + print('Performing Human Detection for each frame') + prog_bar = mmcv.ProgressBar(len(frame_paths)) + for frame_path in frame_paths: + result = inference_detector(model, frame_path) + # We only keep human detections with score larger than det_score_thr + result = result[0][result[0][:, 4] >= args.det_score_thr] + results.append(result) + prog_bar.update() + + return results + + +def pose_inference(args, frame_paths, det_results): + model = init_pose_model(args.pose_config, args.pose_checkpoint, + args.device) + ret = [] + print('Performing Human Pose Estimation for each frame') + prog_bar = mmcv.ProgressBar(len(frame_paths)) + for f, d in zip(frame_paths, det_results): + # Align input format + d = [dict(bbox=x) for x in list(d)] + + pose = inference_top_down_pose_model(model, f, d, format='xyxy')[0] + ret.append(pose) + prog_bar.update() + return ret + + +def load_label_map(file_path): + """Load Label Map. + + Args: + file_path (str): The file path of label map. + + Returns: + dict: The label map (int -> label name). + """ + lines = open(file_path).readlines() + lines = [x.strip().split(': ') for x in lines] + return {int(x[0]): x[1] for x in lines} + + +def abbrev(name): + """Get the abbreviation of label name: + + 'take (an object) from (a person)' -> 'take ... from ...' + """ + while name.find('(') != -1: + st, ed = name.find('('), name.find(')') + name = name[:st] + '...' + name[ed + 1:] + return name + + +def pack_result(human_detection, result, img_h, img_w): + """Short summary. + + Args: + human_detection (np.ndarray): Human detection result. + result (type): The predicted label of each human proposal. + img_h (int): The image height. + img_w (int): The image width. + + Returns: + tuple: Tuple of human proposal, label name and label score. + """ + human_detection[:, 0::2] /= img_w + human_detection[:, 1::2] /= img_h + results = [] + if result is None: + return None + for prop, res in zip(human_detection, result): + res.sort(key=lambda x: -x[1]) + results.append( + (prop.data.cpu().numpy(), [x[0] for x in res], [x[1] + for x in res])) + return results + + +def expand_bbox(bbox, h, w, ratio=1.25): + x1, y1, x2, y2 = bbox + center_x = (x1 + x2) // 2 + center_y = (y1 + y2) // 2 + width = x2 - x1 + height = y2 - y1 + + square_l = max(width, height) + new_width = new_height = square_l * ratio + + new_x1 = max(0, int(center_x - new_width / 2)) + new_x2 = min(int(center_x + new_width / 2), w) + new_y1 = max(0, int(center_y - new_height / 2)) + new_y2 = min(int(center_y + new_height / 2), h) + return (new_x1, new_y1, new_x2, new_y2) + + +def cal_iou(box1, box2): + xmin1, ymin1, xmax1, ymax1 = box1 + xmin2, ymin2, xmax2, ymax2 = box2 + + s1 = (xmax1 - xmin1) * (ymax1 - ymin1) + s2 = (xmax2 - xmin2) * (ymax2 - ymin2) + + xmin = max(xmin1, xmin2) + ymin = max(ymin1, ymin2) + xmax = min(xmax1, xmax2) + ymax = min(ymax1, ymax2) + + w = max(0, xmax - xmin) + h = max(0, ymax - ymin) + intersect = w * h + union = s1 + s2 - intersect + iou = intersect / union + + return iou + + +def skeleton_based_action_recognition(args, pose_results, num_frame, h, w): + fake_anno = dict( + frame_dict='', + label=-1, + img_shape=(h, w), + origin_shape=(h, w), + start_index=0, + modality='Pose', + total_frames=num_frame) + num_person = max([len(x) for x in pose_results]) + + num_keypoint = 17 + keypoint = np.zeros((num_person, num_frame, num_keypoint, 2), + dtype=np.float16) + keypoint_score = np.zeros((num_person, num_frame, num_keypoint), + dtype=np.float16) + for i, poses in enumerate(pose_results): + for j, pose in enumerate(poses): + pose = pose['keypoints'] + keypoint[j, i] = pose[:, :2] + keypoint_score[j, i] = pose[:, 2] + + fake_anno['keypoint'] = keypoint + fake_anno['keypoint_score'] = keypoint_score + + label_map = [x.strip() for x in open(args.label_map).readlines()] + num_class = len(label_map) + + skeleton_config = mmcv.Config.fromfile(args.skeleton_config) + skeleton_config.model.cls_head.num_classes = num_class # for K400 dataset + skeleton_pipeline = Compose(skeleton_config.test_pipeline) + skeleton_imgs = skeleton_pipeline(fake_anno)['imgs'][None] + skeleton_imgs = skeleton_imgs.to(args.device) + + # Build skeleton-based recognition model + skeleton_model = build_model(skeleton_config.model) + load_checkpoint( + skeleton_model, args.skeleton_checkpoint, map_location=args.device) + skeleton_model.to(args.device) + skeleton_model.eval() + + with torch.no_grad(): + output = skeleton_model(return_loss=False, imgs=skeleton_imgs) + + action_idx = np.argmax(output) + skeleton_action_result = label_map[ + action_idx] # skeleton-based action result for the whole video + return skeleton_action_result + + +def rgb_based_action_recognition(args): + rgb_config = mmcv.Config.fromfile(args.rgb_config) + rgb_config.model.backbone.pretrained = None + rgb_model = build_recognizer( + rgb_config.model, test_cfg=rgb_config.get('test_cfg')) + load_checkpoint(rgb_model, args.rgb_checkpoint, map_location=args.device) + rgb_model.cfg = rgb_config + rgb_model.to(args.device) + rgb_model.eval() + action_results = inference_recognizer(rgb_model, args.video, + args.label_map) + rgb_action_result = action_results[0][0] + return rgb_action_result + + +def skeleton_based_stdet(args, label_map, human_detections, pose_results, + num_frame, clip_len, frame_interval, h, w): + window_size = clip_len * frame_interval + assert clip_len % 2 == 0, 'We would like to have an even clip_len' + timestamps = np.arange(window_size // 2, num_frame + 1 - window_size // 2, + args.predict_stepsize) + + skeleton_config = mmcv.Config.fromfile(args.skeleton_config) + num_class = max(label_map.keys()) + 1 # for AVA dataset (81) + skeleton_config.model.cls_head.num_classes = num_class + skeleton_pipeline = Compose(skeleton_config.test_pipeline) + skeleton_stdet_model = build_model(skeleton_config.model) + load_checkpoint( + skeleton_stdet_model, + args.skeleton_stdet_checkpoint, + map_location=args.device) + skeleton_stdet_model.to(args.device) + skeleton_stdet_model.eval() + + skeleton_predictions = [] + + print('Performing SpatioTemporal Action Detection for each clip') + prog_bar = mmcv.ProgressBar(len(timestamps)) + for timestamp in timestamps: + proposal = human_detections[timestamp - 1] + if proposal.shape[0] == 0: # no people detected + skeleton_predictions.append(None) + continue + + start_frame = timestamp - (clip_len // 2 - 1) * frame_interval + frame_inds = start_frame + np.arange(0, window_size, frame_interval) + frame_inds = list(frame_inds - 1) + num_frame = len(frame_inds) # 30 + + pose_result = [pose_results[ind] for ind in frame_inds] + + skeleton_prediction = [] + for i in range(proposal.shape[0]): # num_person + skeleton_prediction.append([]) + + fake_anno = dict( + frame_dict='', + label=-1, + img_shape=(h, w), + origin_shape=(h, w), + start_index=0, + modality='Pose', + total_frames=num_frame) + num_person = 1 + + num_keypoint = 17 + keypoint = np.zeros( + (num_person, num_frame, num_keypoint, 2)) # M T V 2 + keypoint_score = np.zeros( + (num_person, num_frame, num_keypoint)) # M T V + + # pose matching + person_bbox = proposal[i][:4] + area = expand_bbox(person_bbox, h, w) + + for j, poses in enumerate(pose_result): # num_frame + max_iou = float('-inf') + index = -1 + if len(poses) == 0: + continue + for k, per_pose in enumerate(poses): + iou = cal_iou(per_pose['bbox'][:4], area) + if max_iou < iou: + index = k + max_iou = iou + keypoint[0, j] = poses[index]['keypoints'][:, :2] + keypoint_score[0, j] = poses[index]['keypoints'][:, 2] + + fake_anno['keypoint'] = keypoint + fake_anno['keypoint_score'] = keypoint_score + + skeleton_imgs = skeleton_pipeline(fake_anno)['imgs'][None] + skeleton_imgs = skeleton_imgs.to(args.device) + + with torch.no_grad(): + output = skeleton_stdet_model( + return_loss=False, imgs=skeleton_imgs) + output = output[0] + for k in range(len(output)): # 81 + if k not in label_map: + continue + if output[k] > args.action_score_thr: + skeleton_prediction[i].append( + (label_map[k], output[k])) + + skeleton_predictions.append(skeleton_prediction) + prog_bar.update() + + return timestamps, skeleton_predictions + + +def rgb_based_stdet(args, frames, label_map, human_detections, w, h, new_w, + new_h, w_ratio, h_ratio): + + rgb_stdet_config = mmcv.Config.fromfile(args.rgb_stdet_config) + rgb_stdet_config.merge_from_dict(args.cfg_options) + + val_pipeline = rgb_stdet_config.data.val.pipeline + sampler = [x for x in val_pipeline if x['type'] == 'SampleAVAFrames'][0] + clip_len, frame_interval = sampler['clip_len'], sampler['frame_interval'] + assert clip_len % 2 == 0, 'We would like to have an even clip_len' + + window_size = clip_len * frame_interval + num_frame = len(frames) + timestamps = np.arange(window_size // 2, num_frame + 1 - window_size // 2, + args.predict_stepsize) + + # Get img_norm_cfg + img_norm_cfg = rgb_stdet_config['img_norm_cfg'] + if 'to_rgb' not in img_norm_cfg and 'to_bgr' in img_norm_cfg: + to_bgr = img_norm_cfg.pop('to_bgr') + img_norm_cfg['to_rgb'] = to_bgr + img_norm_cfg['mean'] = np.array(img_norm_cfg['mean']) + img_norm_cfg['std'] = np.array(img_norm_cfg['std']) + + # Build STDET model + try: + # In our spatiotemporal detection demo, different actions should have + # the same number of bboxes. + rgb_stdet_config['model']['test_cfg']['rcnn']['action_thr'] = .0 + except KeyError: + pass + + rgb_stdet_config.model.backbone.pretrained = None + rgb_stdet_model = build_detector( + rgb_stdet_config.model, test_cfg=rgb_stdet_config.get('test_cfg')) + + load_checkpoint( + rgb_stdet_model, args.rgb_stdet_checkpoint, map_location=args.device) + rgb_stdet_model.to(args.device) + rgb_stdet_model.eval() + + predictions = [] + + print('Performing SpatioTemporal Action Detection for each clip') + prog_bar = mmcv.ProgressBar(len(timestamps)) + for timestamp in timestamps: + proposal = human_detections[timestamp - 1] + + if proposal.shape[0] == 0: + predictions.append(None) + continue + + start_frame = timestamp - (clip_len // 2 - 1) * frame_interval + frame_inds = start_frame + np.arange(0, window_size, frame_interval) + frame_inds = list(frame_inds - 1) + + imgs = [frames[ind].astype(np.float32) for ind in frame_inds] + _ = [mmcv.imnormalize_(img, **img_norm_cfg) for img in imgs] + # THWC -> CTHW -> 1CTHW + input_array = np.stack(imgs).transpose((3, 0, 1, 2))[np.newaxis] + input_tensor = torch.from_numpy(input_array).to(args.device) + + with torch.no_grad(): + result = rgb_stdet_model( + return_loss=False, + img=[input_tensor], + img_metas=[[dict(img_shape=(new_h, new_w))]], + proposals=[[proposal]]) + result = result[0] + prediction = [] + # N proposals + for i in range(proposal.shape[0]): + prediction.append([]) + + # Perform action score thr + for i in range(len(result)): # 80 + if i + 1 not in label_map: + continue + for j in range(proposal.shape[0]): + if result[i][j, 4] > args.action_score_thr: + prediction[j].append((label_map[i + 1], result[i][j, + 4])) + predictions.append(prediction) + prog_bar.update() + + return timestamps, predictions + + +def main(): + args = parse_args() + + frame_paths, original_frames = frame_extraction(args.video) + num_frame = len(frame_paths) + h, w, _ = original_frames[0].shape + + # Get Human detection results and pose results + human_detections = detection_inference(args, frame_paths) + pose_results = None + if args.use_skeleton_recog or args.use_skeleton_stdet: + pose_results = pose_inference(args, frame_paths, human_detections) + + # resize frames to shortside 256 + new_w, new_h = mmcv.rescale_size((w, h), (256, np.Inf)) + frames = [mmcv.imresize(img, (new_w, new_h)) for img in original_frames] + w_ratio, h_ratio = new_w / w, new_h / h + + # Load spatio-temporal detection label_map + stdet_label_map = load_label_map(args.label_map_stdet) + rgb_stdet_config = mmcv.Config.fromfile(args.rgb_stdet_config) + rgb_stdet_config.merge_from_dict(args.cfg_options) + try: + if rgb_stdet_config['data']['train']['custom_classes'] is not None: + stdet_label_map = { + id + 1: stdet_label_map[cls] + for id, cls in enumerate(rgb_stdet_config['data']['train'] + ['custom_classes']) + } + except KeyError: + pass + + action_result = None + if args.use_skeleton_recog: + print('Use skeleton-based recognition') + action_result = skeleton_based_action_recognition( + args, pose_results, num_frame, h, w) + else: + print('Use rgb-based recognition') + action_result = rgb_based_action_recognition(args) + + stdet_preds = None + if args.use_skeleton_stdet: + print('Use skeleton-based SpatioTemporal Action Detection') + clip_len, frame_interval = 30, 1 + timestamps, stdet_preds = skeleton_based_stdet(args, stdet_label_map, + human_detections, + pose_results, num_frame, + clip_len, + frame_interval, h, w) + for i in range(len(human_detections)): + det = human_detections[i] + det[:, 0:4:2] *= w_ratio + det[:, 1:4:2] *= h_ratio + human_detections[i] = torch.from_numpy(det[:, :4]).to(args.device) + + else: + print('Use rgb-based SpatioTemporal Action Detection') + for i in range(len(human_detections)): + det = human_detections[i] + det[:, 0:4:2] *= w_ratio + det[:, 1:4:2] *= h_ratio + human_detections[i] = torch.from_numpy(det[:, :4]).to(args.device) + timestamps, stdet_preds = rgb_based_stdet(args, frames, + stdet_label_map, + human_detections, w, h, + new_w, new_h, w_ratio, + h_ratio) + + stdet_results = [] + for timestamp, prediction in zip(timestamps, stdet_preds): + human_detection = human_detections[timestamp - 1] + stdet_results.append( + pack_result(human_detection, prediction, new_h, new_w)) + + def dense_timestamps(timestamps, n): + """Make it nx frames.""" + old_frame_interval = (timestamps[1] - timestamps[0]) + start = timestamps[0] - old_frame_interval / n * (n - 1) / 2 + new_frame_inds = np.arange( + len(timestamps) * n) * old_frame_interval / n + start + return new_frame_inds.astype(np.int) + + dense_n = int(args.predict_stepsize / args.output_stepsize) + output_timestamps = dense_timestamps(timestamps, dense_n) + frames = [ + cv2.imread(frame_paths[timestamp - 1]) + for timestamp in output_timestamps + ] + + print('Performing visualization') + pose_model = init_pose_model(args.pose_config, args.pose_checkpoint, + args.device) + + if args.use_skeleton_recog or args.use_skeleton_stdet: + pose_results = [ + pose_results[timestamp - 1] for timestamp in output_timestamps + ] + + vis_frames = visualize(frames, stdet_results, pose_results, action_result, + pose_model) + vid = mpy.ImageSequenceClip([x[:, :, ::-1] for x in vis_frames], + fps=args.output_fps) + vid.write_videofile(args.out_filename) + + tmp_frame_dir = osp.dirname(frame_paths[0]) + shutil.rmtree(tmp_frame_dir) + + +if __name__ == '__main__': + main() diff --git a/demo/test_video_structuralize.mp4 b/demo/test_video_structuralize.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..1170c88e8856aacf38332a50b448635e1e311a8b GIT binary patch literal 579876 zcmX_nQ+Q@gv}SDE>ZD`awr%H&ZQHh;j&0kvZJV8>lgWS1%v|iMg?Fu5sEgXWo(%*9 zL}cphVQ=ANX9ENT0`%Yf^Dr5@8Z+A1voHby0YRBM0sufzV|F$shR#2j8VK<3?}{za z^X`*%iPlt_HKO%jH@EIgY%D~yM8NX zRGgNDNKj4q2WbK@{!s|q*?U+6Or43C7#NsonHU)VLz_E0+jG&=yScg1xmuV2?5quK z>FgZM=>J=V&fM9?`UhiY?`&ab>%>K5Y-nU?%*#mR2r%VkCNcpSS=$*~@iKBTa4`@W z+8SDWI01MW+?lx;+!+~}iEIG8<^XphCl{k1ik-;b$>YcLXViBz;bo*__%Zq!h-@s} z0VewYd1U;t(04SnH3RT6vJn}ZJKEV8>i?KB5;;2ptgS7aekcxi4ijVNAHvwdhL_748+yEA4=FUbxIy-xSt-hI^{g3v4h4w$DRsfG5-@Ht$4F8wZx3RGO zVTqiK0k!~R7iV4;hX2BJH2klmjsPd~A9F`z{r{ite|kq_USmg7A{(Qhy8f5e55vp! z(+nbq|B~TlpkwubV*r zKtR7P0O7$vc@F?YLa6DX`nC7UXR zwC8I(o_u8r$q=KqhtCU5_A!F1uGstc2zKmp{bwliillb>yp-k`*Qe&uDH}mb&WVKD zz`{)stvI@mNl3qGgxW{=s)JT+xfK($XNfJWn<0&?h}RM9oT&ZqcLYQa5an-mgajU? zh{xW!zIzl$)(+yHJ1TR3=@p^K{oamfMoxaXq^XQ9Ytfj_7((A?bg1v-SG-dza&*Uz z!*tjcg=OHV6kFZ*ROfdnx1Ubdbr-WOk^nj_k;Ci4VPd=jA*sh;6b7jdNd>d+;>?@l z9c2!f)Cqd`>d+~he~<8&p9fdo7$L!*SYla~0R5z!eJFtc-e~hYp=P&;=uPRZC2~b;UCK5iQ2FTc{;raAZP`{KHJ$J)k2ikaX3;1D(^jB zOr0JE8qA|*u#L?+_`6d1Wn{tFAp2r%mH0hd##7M<;$IVQd!dUyoY%{INZ3z3`;UB^ z_ar50nQ*4hy9e!`755O!4P44Nhk<;bfNnkzl_Z0&DXjW~xv(59 z>ctLy?1c9&OaWnZI&dR?r;F0jASw$df2b}XM2a)MB!8kQFq8U9oUHv2WNnX>MtvP4 z`PH&Jr|9Iw(D2Ij>h&8*@Pn#jmK1s0w(1q8H^+dn{-q@M@p}yjX7HWGvr?!}0o8Cx zw^#zy$$#%cdS9m>hm3ABi}lD(mD@OVN>Es6EAd11icqN2k5-emQ13f&?mOwmZOM{_ zIy_e7;=Tk{jb;*iD39$T>E<`=Abxdd3fY+V6QiB6 zir>2WYsUw|eFp_hg?;baVza@wS~4MZRN18g^?Uy?N)}BCf$3FziE;1sC0V(Dn-I|V zYV_6!BG``P;gKzqg~!zBMY!{r6_Zibp>sb4A(}(p<}cK-4MNA#;=Kp74D`{0_XH2# zrA!n^FS5P^bWeNL+cno>$OC@9bI^($^tIFT{@eOWa+?9Jb=7H>Uf?j)|G*x zCY)|-2$A!*Yl@Pp#mP{x28j?`Q$2>th(bupsxa+Z>1MK{8$=Et?4yUV<3kKz>uzq| z!?eOh;NzsB(dntbvkVkG`~husP_EA^IyL46T+c4QctACWy4WRPWEP12>}#<(D=eFnChf zafmEIDTXlFp5Sp3y1Z{tPZMc7d91{*xmh$WTTr%q9b}>)QlO ziMZK2g;NbsuIl`RmM&)U82`nCOOJA4PaUD^%h8jSead?Xra<^}r!OlOuvobgBhz3B zA4x44t5cq#t#8*o2P7uBV;pD-}%DcTf zny|5R!*z+$Po#6dco+C8gs$S0qNN)+=1hE{yMwr*^{pF%(i&QeX57F3nO}`Kv<7`! zvc%A)*k;k7#g2wUn$*QfT2bwtPJb_H6qU=(l>4K@o<7A~XoiAu}JQWNmj1LkurD(8C@*qCPF7G_Cra zEoAnoif%b%;R_PuleFB)lDMvw>Ze;qBEPB%ZXuIy*#Cz+d?<^g zn_4xEyi=6SQ=#>9JXfvZZ>9w&Jh^aNdM34E3-0wAn^8|HuEqBis73X7(&`{Cc#RtU zt+J=0B4@5CgWZr2JVoqzIJJ-{Jc2L-eI)MFdy|vsKYI!GU&*h-n$UASC!K%AHv51F z6M*|woVuR8VkNR?Icn|exd~=%v9WXSBcu>y^-6In@JeSdN^KS`M}oK?0IXgG67_8R z^&D$o_R~oIm-o_vq_LPXHR~AX%X36_6EVLA@y;`yqy?O!!WE;lryfgp58<{GbNMpW zB%Z^(T-qYF5MuDv9{_icHoQh@UD2*$EAtPmCzQ#LkO_`G&C`oCZX6f68kR1`v~ajk zR|Ul*N3^=qGJOFhIfT=`4TXhr96$^Jm>>Idelgt$z9sFOeg?(Po3sWZG{ z(Aj!DXjqbpvKfV{DK#*A;OU@eY6L%LfA^KBmMZ|R?Zdn>=F)}|F9bmQ7{0|BtxblK z%LNcrNu^RXO`0Ge+gm{D z_>rt!wbNZZgSKDTGFfAIH+T6WG-3|edj^)_Qxa862rF;?YDEB&?oP8td-&k!7}JQG z7l&%)aoqXH&v3Hiy44*RH!9n7pw>*XTqXDy5I_95Sigj_`pU+KNXdg2=iAj|=NFbG zVKPN?>p#&;wHDwJaMxh@a=8`&=-h>M#lWnwip z&PYm(DnL$8w1gTg4)L=~e^bMl?mYZ08caGN&v8VH$-+LzZu0Bpw=QE|vK{Gnr4v`D z*S~N3bERzAy>@B963N9V#GJ4Y^f54ldN?$|l4O5r+NVw1U4s=-KiIZdnzMb&VMT=q z&E%WEsHz5zTT7G53tr8K6*hMBc|UHFN;2%hXon^LCLC_7zCFWGGT5GKz+s2zIKd6Y(d$|CBV5S+j{KujHy{#uL+q87<@S_=0}H;BgLWk77tm#^g9bt zl=+pZ-$KqbVKDiy@VVFtx0~HE3eS5 z5~(I=RYN8JzEh6xMtOm0ba&;Rfd_o-zX7aUI}c|L->rr_>FEJZtTd5wFu|sj=1=e# z?TjgpMaA4MSF79TmUu(b(zRBC90f_Y!~#fpNE^hj3U^kQj=+?y^AJ@2sD=!)OvJq# zJu8k}M$Wlzmc{9XOfEakwL8TP3EmS%xmfXkr9R_a5p6C-eOig% z_3M}Nu2>JJAoZWK;e!{3m5mq077DN5<-_lsMJf#vZ-XJB{@SFOK@CFt-LIvXdR_@T zYAMA1lLhAQ_m_wMzKMgAvb?8KM`Ay6RgJi3M6Cf!8RsUt68wS3UUpFp_ zezDLKfj*W>A@`RD{z()FnLEYn&=?whMBBD80SRrD#4!f;!MUz%#9?aL`Kq(db=q$M z(}^#l9yPPtFLuqHriW)NV^5Roudk6pKiPd~p7u;z3wy4&EdGbAqeMF`ZRepjiH*DOsTx} zZ$0Hk`cd-K$rvpKe9y-M`iV}U#F_zDjH$t!OA81n6GW2O{aTdBF}XqUaOru`Mq?=b z?%zN_F2bY6Hc3*>8O;LkR6WBAh*~l@}_uDZ#?=zr=7T! z`nZcXV1YF)$3ve)I+%pQl3*04xDwVZ| zRB6_FvJF&zh09%{P7A-1&gg>&3x4+^FJZ85h}ufB~ z<-{`k28F_#-_4}$GB_*&xxJOnujc_C@(9yv9d~A6C6qpy%IKppL~x)*O)amd_(_hu zCgC@>f;T*e_7DMKQ54^WSe!5Isg_IgI>3^a{-&@wa~ZO2a^ z3+txPvi@qfHH@e%&ggH-noG~tMaM+3DBjYo4PCl7IKj@%;Sc)rM62mh<*-MCT=EQv zSkz$hHhZlM^}p*$S*|x>E-Vzrx>~T9vwmstR`fNj=*J&uRv{goM8SrXaDy(CCHL#6 zV3SMu9kAZLV$L775uzRF7_4W9vxM+Kue_<`pl{Alpp7vW9-tH%wC+jYQ?B&~aQA*! zWgL12mOv&&YEA6p=lYJQhxboDK>wK!^rTbbXjPnl+ zEX_mO7=GC<7MwcT8b`L-Gj0@rwf4sb$-y$D19G^@;=I?S_sdie#i68XFX|Qy638iT zcKXgAHF|a&2azrmNz*x-k$baQkPGP!b|YwM?SZst=REc!UFOj564?M|YN_f^znROM z4#yO0kD~MHkBiDzYQkwTe5AA51~@-qhT#aU&|PejH&rWQjN!6Yg}F99P)sIt!Y#Qi z0A+=F$@e?^QkQRl-n4rX-}tjT#D1GUX*=^c3M7(RIur+;aq?SK8Kt!yzSUA^eoL7o z8b(X1aGTNe{h2!hUC4UOf$iK@77sUq+j$jJi7ZgI1TEzw2F5#eQ&X#JN!#TI`tEr0 zqm!woyqPO6gfcGYCTDi${M3fRy_32SED=3f23K!28hT$l$iHda)2>&Wb7E}U1kHumx%KU>Rsb_<6?7baOASxHskqAYdsu1#f z?iMs1O5##q*w}eaLhKed8_2=GS+Jg|QOa|ca4ttFha!w2AaAIa*-Of6H47NzH`VK` zQ;h;HVm+>dx8<>`cwm3F_PeM}D8jCW`RzIBN3O^~1%nGvg`>H;2Kl~DlYXEx_qQ%>Wg`uE!&gQMWzbH+*4lK_Md(R8G{qvq6RLU4b9|-U6viu6a7>K zTQ(&L2aSy!i?2-o)^O?a@7mEhbR+>{9f-VP9?MHAaltao?<41XO<&|LEe)2#v|J}8kQUu~R4H6{GzIF5xSCag^$B*YqA~q9<&ARK4 zY0WDl2Op|oqud)qt)q_b@M7^j86PfM@$bFnlGwLemN!<{WjY8-7lj2(HBv`T?blaE z%6M~hA1V8J8cePB<|%9I_}Rs+MEX;4WJF|{SmW4$T&o=iqdA4uD1{oNl(m)onzzW* zkp1bt`@~-Xk9L9b3l(EH+poy(U7IT=EHJ9i^nWqq%Bn+JtKb@Yl@nM!O9W){GRoG; zm0KFZGr!xK`#Su7{$v=C6xQ1436=XmVjVJqQj17NzLi@k?Ii8w_ya*cBGn2>_1bM# z!=4MGq3w1Lv}Vt|R(ZogPv(paRezEiIMlsCzJj7cSd9%5nb@M7fqr(kvFKNwBIZX( zcg?WZXlAdK5;ckW87UPT%j!u@en%!WLU_SM;J9obMIFTp9>0kocm z=V|bvS~12Jn*{&g@zdvf>@_@-Edw!0{9iw>TC`##*v=TEr4*D0F zOwsHLm}PL^{YD(*%Ds!PijA;J3~LNJJOwhWdsIozNZiOjj($f?o$O6G)rsC?{g1^A zF|yE;8}U{kevS_q!WmnqBzLvxMs`6>9Y>mO7IlEc4zDKdU{ey7JOaYEsF^&Hm$J9v zf+E)@{?gWJA3w>Utsv&_LFBeQ2)nU}_VT(h=s$E14*?+AY!E`NXu#glQ8`PF(^TL( zmzxLSOo9z9+G6PxXUv-S+nj6Sd)KMrEtjKggbAD{jD+GNL7!FrMWA;y{t+v$^H5h! zh~INGwrQNC{n!5F%$*k6 za6NHfkDr*~NIZ&HXG27^9WowakxcKWoN2Wby4l#>&j0}u*F`-o;{EGdNz4co^;`SG_lH4DuiZ!m^qUbt@nD|I0P7atO zrQCYm&OZ3MvMFje30E3<+MLX(3GS@Wxcj;m3@BuGzxd%}?q(hK))XgqIf~9Duiox= zVfkST(^Xh^2+8;kvHOGUADep}xF0hUlm6t)hZIQ7F)@uq8(+^nPUj=|;y!ym7UNXuloQ1mS zt{mzS0+f7`@HZ{z770GH$2uR3zN-x|>$2+FY6t4*xXLIuXhag#R>)`sEu-c!YEV%Od%Z3-Pf#9L1 zf{zb2U<+ULWy&KoNRzz9Zc=D1uX}paaCqYkKY0MFMZ;N*>Z9Jk!yQ2+L-Ehom0{Ns z!u4Kz(QG#te~tY~goQbFZc2LbA=+a9NltpK$|N!byTJDA;tQjO3Zu8a%mhi|tUK4* z9>MDeo>6l%ONB$yqYbbl`%|4E7%vyvWt;jTcEgP5cg3de2QLMQL#U%Me5v9%n*26( z&`!zD#X!|Y1OGLql&WL2T2Rod9)e?#B`Pg6B%y&p0>8{r#)`UZMWi{O+`HCudYYDg zpo}G)pBEcX#ljC~G3Ng34-BZq$Q@#>c1LM|wiudNdd*H37L+JZs3|c5@v+m3ppYEQ zS7^z(k&@MY6FMSg*&SP+9`+f37Z;q3KSZ z%AZyT{^l1jUPJ|mK1^~r$#Z`*K^ATE{6|rr^34HnQfW9nipuyrw9$1TRT^cMHV;xN zYsp=sz$7hAdK6RLQ;o0%V&>7>f}=ltAVf#qL=G2pR@CNq(3~9HXSKt&A}2StlLDJK zaZRIE)KARldGCexc-?z}anC@>#}FYckr+Gu)TpI z5p8d|lHrCHQHs|{v@24q%U|A6?SCuPnztE}B}~>FP)@fSPnGfjL*ySY;DU_^L33W- za55T^y9ivLEx}*`F*k#Fu|n4`LTGbaJFiK*}rE6!$j z=W%5j88IdF`bg$Sl>G=(cR-;AAPGkVxSg+cDcf$`UAo*i%GZ<#1JH~J`kltXC>IbW zW*cY(w=OuUyjdlz$zaxPUAGXDAI(XI0r z_aVIM^;*(%iEUp}5w90ZjZ~y8m_&WYm^>#YfqujADCM+k4epAs!V>&Sz_&2gHCETq z+kf<0<(y$^c)MR(1EwN;2wQ+^f}G=yu<7)Zb6O!Lu}PcOS5?AK4qA;X4SbCx>7^HX zb)=4Zf@BD8!^BgRNIbI}r0}`bAD1(Ku-)>N#HfXuk>MP~+!^}822NI>dMQYDLO}elO&cqZcBG@9f#9n= z$_-MNJOD*Rm9P&EmRiFJ^S#kd4^R(pM?WK8g5+#aE5j1-Q9%cEp(F>Yu$Rf{$-NJ$ zJk=l@5~2|9;@LOiIOSZOA4pn{`u090-y`HBfq7?G(s;}@uNu?5%D3VwEfmgiZwPOi zFIfDS$#P3N5XBRue$ZEO!rQ${yw`uVcii6!9y%YVwyFxnX{Zl#END)|AG-9;Hx+xe z1LPezga;O=bF#8OyAcwVXUNMYTjdD&bqVjYKZ@l3E z^^6np{YOJ>T};FCBpcQp^~GLA$-mdro-sm6RDy-G;+%VPS=fAv36sopbFr0vbn@&s zPZ`RVO4^*)5UKHum&Ymv%l7m9+snFjlqwODmi!W=0;11t7YAtn#julWu$wvSGf*DV zN=!0q;x3yz5$wHaS6uH7-)LfX#CjXHKyqG?QR0LJOB}>r?F#Af{VRwtDC9V)KSR)V z?tr)hLGy3j-8zG=wCYlPH*ej1PFA@4hEABt=@Qea$RzW^tYI>DlGO?Fu?9gn@yH-} zhuN8$TaCM*ibATMM!0=G_)Sva(#L~EqYUkSHHIicd|vFA>~@Ce2=RGz3W1XIlnbU? z%w*)v_2vgXItgLADH#{<;}XgX{mrw=t6UFLR@k}~DKYzBFkX!mlTubRH?o|u%$~o| zG^!e;33<5}IW`Nk`__4XZy>q1osed~JfrD);S>EwqFfzKn*_+2CuS^3-VzZFO_@CeirF=6Jw0y(5aiM-s1fFKwj=XJ zOGWzX=gm}$G~lyu0jhDT1kr`V^R&`KJxmEcmpxYKYh0Lx+H9>N{H}rTc zh?CQ|b~wd;wpTPzyi^_IL|y)`%0XBzfYgpWSIXU-`^vUOul}4AH3H6C?A>*k9iD_E zc!Cv?8n$>KSfj$$v6v?fE7`O`Jy)l(C1@ozNell=@&qQr+lajk zPihwSt@o)H^r2-rwdG>gAqC=QJ&GgpL!Mq@3ti`<#$(xA{h!*iL883Mtuo@!(9m_$ zMcDItBlBveVDUPc2ui)5;1=%`w8FXRE!^mbJiZfn;@Y1ak2GBW=! zukgoUHg#fNJ8G0fn=;$83q!m@sYth^SG?0KdLE_;ALsZ;ki`)>IWyNsUCT&*gQR;h zfov@n%1U@Syd#Y&MslC+fm)Q`)tN*tZO{=CQtvEwjFmz=^m})=9yEF4huZKb-=2CW z`WKT?l7Prwbjmv(B}xEUKDSVb9eOiPNqM#FBf|&YGYB&QQPeKc7;6ek&BD9|&1D0G z{L2wPhhJ4N%vDp6Ag6E;bPmR)=Cnd%O#)ju1z3ymeCN@i7p-|`we(;JJj23=FN0aA zCx5zER;02o9j{u3Bd`g0nssJc!oO)e{|&m1&l6(29I8>cb!?&?KFURi?!{{ciR8g+ zCJ?4+N2Z{5i!9A2M}y$u7p62sG_l5^cP<*UzqdOi;)rC43G>5;=~EvrS52kz#49mW z4v=Y~`~6aH)l#5P8FjzgMzF-4Jqf!R65!NONCpSwjx%3CkAERyN+bh2`WV({JFo1m zi4ao5g@Ri;2dg6M*xZ+TI|-yP8x!6cJF-7&LH?2jUca2xvsBJ#`IvnK)YmLQ<0J+E zBDnH<9g0&app~sgFmCiRJ5nR%#`=D7r}2vhi0j`is3;_=h+5a+hA+WiZ)bRmxHoyM z9XAwGQ#rmB8t7^k;$LuhFZaUQu`b0Oqnd5jB(t8g5&6E2BYqZTxTY>08z4+ zOA?DLy050U`q%gUovr%<3 z);m+P+Vu{#`jP&P;6+cuc}zxloS~3?!0X049ZLi;Xq5l1$Dw;QIywRW$iMzVc~Ekj z)KG@%JX%GaGOEM_m0h)o_B{1w5>Hi~yExAFiwB&amoCF}%J#ZWy5~#wrYbPA(CG(B z?$1LfQ6H&A(XQ$Qo?^YiVS_^X)0f8<-uM!jYJ`i+vRwAhrvD}ZPJgRm_~7XB--9`qOQ7UF7-LF-zHJplX;`{s#r4btmSVmSu| z^~wJu?NpI7E#^(d!(an{v2y)j4lgHAPU;-qg~#oW?=o6S1zPt$m*tK)#fCmniv`QG zJgglmRC`KqC9&AOk)#Sqs8bFQoiu0xi~)i~Qp*E`q?K?x%%KE(p1HqgYvANpzwYhd zzdDxpXRt@X8csC0 z+CA9i8v+5LZ3||S8s|bDgUv3ur+k-b8`74}(v-0nVT-waq{p!cADqKtIoQnxe48U*_ z{%u#oZ`xEQr{jFkO+K9gu?Kest;??|33>SRomAe|-BCJAsnBxRK- z+*a-R6UF$gzzjr(L{4(pAH!Dk-N1NvQ46XpGNj1rC!;AOy+}wjBb=)X$_B5LQ59yS zo=j=(jN#%PBL#ab-WaSpEv1)Qt!0<79Q*=<6uEj2nSW?C`BuUN zcnAwskgP+BrPBY+qVS1)Yz_BONd@{t{a-|GEr5N6LS^&UTPk>Y(aoRNGx9c`E*h1^qM{g!}zC}VZW+5b?1lvf(!{(WY-<| zyZXJ&g@{jG!A`4H-JmB_-*BReTTNL?!65s+waZ!XI|XqFI5{1)?qv`tij{f&sNVT0 zd68z6&Oe8zH}DVrqk^S)fB56Ec%F{U`0IMiA!vQJJ!4=$Xx40@Lap5m8LJF>~DiHJ(>JOEDMt-dK?ASCb?uNH#(o zQubJ4WYS;!mFP&R<2%uVUljkqY|hjCWOU?z+_!a>?Y4p0rk5-FLF4E5$7t34*|(2! z!euNlIB5upQP_K9EO zX&?pk5M4w0+Y@i>`USa%+FdT<{0~{U7YG43dIFM95e)}9oz>nKQoCDD!9YEtPd~_h z*f5aP$j}tAVYUfaAtYZqgMYm|7*|APUWhwe<{PfFN!Tu-X|FCDH||rSMntFN->g5K z{aE{I@Zi=r!ck?+qnp)F(GwM-=P|_xV8YpgvGIlqedqk^@0n|x7^{tz1hGT*=W_Pk zm}n++f9ipuUO^rkBR`e8Q%2AcKZQOFCR9+w?V-T$4W&s zw_%N{omJU$ZE=22jt1Z7``YO~+4PIRQ2io?S(i>i4GRn?t9`P>gc9JfJMGF|U=R-* zu_4WZ)skw9x??_eVaxK=LTgcx=Oz+}gwHajWZ(#Ns~I1947WL;3PD~y%;0!zP3sKG zNQa_y@Y!w6?^;BZ8){P~DLf!BrOTzkzIY*S;eqU`j=YD({%4L{RaFD=?_KsLCnQfR ze+5*pNw2eUO)^?0FoD`02Ijt`DScqFV&2>~0!u^mGZty{(&1(j%Jdk@5N)A0ugqVfcAd+W`}-5vQVuv57S(n?NaG z>#MnQI=QEW!VY&RV|NDP0Gn6k>>Q9>IH$BmyYof?Iq6_bKveD!QB|C*Vb)rX63j?- z^%WY?_AD|ANyyJHA_Q4H)Y+>*D`-vTCech77yk3YH||u6QBN^d z5uy7RxcfboguJ>ybdOZ`=@$l8X?z~G7aGiLOX4vRgqTwQWtZ$gwpu;OEHiC2!$wo z+A5#RCqxNYnfFiyMi1?IgoijFOMfh+Yhc?e21C-@$~^m@tNtU-i#aKOUYo-y0>iCr zz5qbdK*Ar?paon#w?KRY3o5ahVMhnrW55x%|2}D6vAUYfE!sKL?cXWuZQ}|KanR|K z)w>WD{(zS8o3?QH;2VUe^D@&pLO-ztzg$7&T(qwV@XkS-m554`^JnUcDQY+i42v}4 zW-bfkmMUScjF2k|E`7O$I!lQMK~q1}GPrtFxJ$&mUNQt9z4?+Wdq{ng*ev&lEj4ub`F`^K2Grg`%w1kiFu>c*EtCPRgqZT{*H%Fj; z(^x2g+>v;7ptJz#!uX)!CY=YzjJEI3-h|u1F?eLJ%ZgvM&XUm5?01s26S>6X9_b4F z4b}pOt1@9`tHG{Wk?bU7X@GOP^yI3U+Afv5x?vd)Lr?cK)$5gKW6i6oZ7jIwC*(KK zAY3=$Q{SFR#Dv|!Me{iGo5vdqB$PFG6_5(pa?A`&_A+uI?c55zPG_Mb-pf z7U%OnSO>Tr*WLGp0>BQzo!}XpyzjJ7~DCnTi& zCSRS%J4fK`W!mo}-GsQv|LD$)2{Pf(v+fa45F)vFZph^WQ$SfyUwA$n6;)oZ586RrO5~ zTGJ~YFx5)C30oQU8l2U!7Nsd=e8*NeNyShUm5zC zu_?`HkIWs;WOy0M31hiPXwfK2YTEAsL&8JjgF2bFnV!x*U~G$oY~<{}aFp1j;X8<0 zeYIo_(R#()ZvYzuS?!7PEvMMUNqhlp$|Ma62F&f9!^Zz`OE@3B*?n}NDqNj9*g0(a zqJTbYB1nw|w5BSUm+ak;+z@T&?pxD6r%gGnvQAbyYva1ixx#}*WEJBVcI{Q^v z4;6GI5NsiPA4plc zPKqgf$LJ$J6>IzW2lEe3VGW&X@D-(r zQK+`m42V6_ER=RH4?IPia!H}La0q2Zp7w&gku`1BhMcbyUR*0m3NY~{Z11%-#ku)P zej?5?kGXWPt~0p@E3n`kih8Z|Bh!TiWC~;VrLg@DbP7#1TI&8(*`0_Jr_`atD7BMg zIMI}u{`IO)D&bEtDPz#2!HWv4Um|rF1e?Gn+e5gPNKe$_8lg}A*pT;8)C+EgZd=$* zOuDAf<>53H5zPB2dnUb2K|{)3&)RX{z9O~Vl8S7hh-JQS#c~yym4~?buOBZA9&kJu z06Y#_WRHMoU8@eI#P2uC$?_nDY;67K%#(3& zKZ#o;+f4%EyJSUcj5{ejG28c0NB@X@$jTnQtlQamG8gA>nN={UZ7*7^WD(j*s`E31_6ZGQJky@39QI(NqCtJSk%wQAm6P`h8y0$nT zFDJqL;qKuag5a)*5RGu0HYnI!KfnYpPqx7IB?g7x#PSm6-Ia<*S7EL$wIoPn3LMo{ zx?Ehw2RJwR>c^W&jCv!A8rIUUwXp?-`4A^usE7J?>q`op*ib>scU`2=AgJ4#sI>Zr zLlw7~0?Lf9oi9St*R_oQXQ5b9Jsh~gK6blS`#yMj84G3M^qNWnC&Y17HmaWX(6x*0 z{Z%tQG*I8LbHaP2Oj5jrQD(kL;^Vpzy5*Uv12br}o(@Bc($t?7<6XiY$rbNmFJ^98 zG)Xn!w&||DHN|CDSHhPy_Bf8uN8w+rFV+ad5e_o6Fl2XVbqjgphyl*ULrUB`LXWf9 z+s9C{fl}4&?m~Jf$4(bx*tZN2ka%_Rxj5`C7qsDq;(RiDm)ii$kX0`nLj&;*pKQb% z0)`g`d{i1LJHXuX7BI9z-I!)*YQaAlXXnZSw27kE$3^ zlus>`MWj$f^)CsZg~jZGiSQeT*VlAwK5{<=9S8$W2)gPP)udBOsJ%T0`_do|o`_}e zwknewf{A9|0>f1TXp;)%od!-FdEf0WEWUPm&WcK9R`oI$R+k&Xl#z>2rUUYB>=VO9G0sMtMDzyUnD`suZU!0 zHcZi-wfv(g2`V^X>{hI6y{bU98FVCSKz>bea<96HiHB+0a2}KrqreTHkv+43;4>5@ zv-Sc>3~-g0q`aX6Ph(gCqMx@L8!6O~q^w%PHRz|c7uM6E>clO)>@oJs8*QfxtXO5=+VZ=yc5WcI_hD2_qa{{9P3s3 zv(I@}by#VLDwMD4f)1QtID%?f5^+To7Z@KLtFfOX^K5DPE)52@7r~;|EwNt~Fm}4) zLAQmzLltPiDPvabR{ACm2K*5KKyw8(k@sJu_CyLkUw+Auzt_?ksA>-7Eq!01E~427q-N@MsBJDCOa;*7x=w{o?}$XTIIA5(sRKptkuq$*lL|L$;@~@K!VFT(~xy) zU7Up(Zc56Nx5j!Rbo0$F1^)6)tJ|$_b)HVGv0{Il>1UQ42cyMLEmwEZmjDa|g^}Zh zLA0Pv?1gmQSKNR=1g8X91ZGl{Mn+UZNLkf+;PYotKz=@4MShLa{mS1qf6k+(DQR`% zcwhpbbs#hF+#`cmc9OH);OLJj$@{8fudW!1uicI!Ba@GYxu0nTF!F~{(MnolVIgPQ z(@4cpT|N5RXC4ca=CDKY-M;f?@}L?VIz{F}241O*&}FCuAa|hBGFt+lEcov+eAbmZ zrJ#&CU3q{;4>kMbrbOD{VMoHNJ2=J+M~?u<1+m#}LoNOh&oIXIEo)2%{k{&V3>pJA zZ5)P1wP*CDk5%FWUpU6aW42Fp!qMAWoM&ybuu5@|72k`SYbK4=0z{bM2v45Vr^Bxr zXoIk|{rhWK5hI-bL$JWY&^9WMN8(fC#$S&pm!MZyqFPVIy@+03$D!_7c+i)+C^O(a zx_46($A#q|pWw}KMIf3?oto(cHpOg3O_P^i2;HV@gSKRPvF3XB6&&Obb?6vx;&*O3 zym&JZzB(0)9KAV!1agVXUYTSn$`8h4N5~l^BGwUk}fholr?Gca&0}t#y<7DD?5G>B zoD|v_=c06_Y)Qc5YhrfprtGtrApZWe@^Sv$qFhv{Qt3(1=X{!-Ycdayu!|3Cjd;^o znBj%D=Jba>632=;WTHZ^tqGV+5M5yzI0@Vl`fj7fOHCQh{e#T+6g`*obC021jkx&yy?d z+2RDLaHy(Ll+rY8%D(^aj@3^4WdR`>|1MYCsw+XD_OnEs?Fxou@lqe|mrrH%hF>re zf1)&csgyyNhkJ@LShr=pkKJr)0SkEFx42pS%O|4_D*n$sRIDJw?LhnDW>Nh*w#2!W zQO~|iihJmMwxAjMVG0)t-_QLiIhBvSvUI`0Z4c|iLH-?B7uWX&xTaNf+;ahOtj2}% zqcWihKb1z61!`BeyJKI_cnnm^LO%?jD<2HGxm7SK$PEc^@cVw z!;}}Ak6$8+qJVOlSu#Rp5(DNi0qYKZe0#b$)&O4fv!k!%6*XYW$l8gwG-G zGI(r*3l;sb%gQ@gyWb7m~{xx88jW8iycS82gfUD8RWsVST&a=V4gM{WP)7ytM2z5Rt<&Xd~mKfLOVq8J8%xb$eMFX)GOy z?5_kTjiVZCo!ToL!}-2ZW2e5!cesi`G#OgC-l@rHY>x|$v2P)m%I+sQq!h7B5#3vJ z@HIho*$Z_?{`>*nP( zSK^ic+zylct}eE0{3*9u5C)U35kW86D$ZHI_nS+aOAwpJt~MsZ9g{Lfi@Ns&%`d7le{76Jyfdw9t_x43BNrtHj)BNw?I!p~fA<>Oc zxLy(pER2;0j9y;Gq-rT`m%aVrd!o#%LdK6Yk{wi3U_@b4k1%kFpvr?MB z&0%npL!Z$=q*#-9ctYS`%0wYDvm$6X^(yM}GpM2F06o|#2<0|{`2$`%S=AYh$nJ)V z&7uSW34xq%vi}1&K*+yrnbOhE5XlijWO7xekGbZ34ux~kmU!JCyJOYG$|_E>*j=u{ zT$G34GfAX0^Olju_woEMPh#UAB-Zc7aM#W?-ucOt#KDz6TzW&c@XFD+HwKGHzmhQU zyPs%itbhHbi3NmCmod+~R1p%UYk(2Xz2;K$KFy15|CkbKFU5m`>J5RtAXH&24!<{7 z1oi5bB!R<|m)PZr;{y->+%NGhrs;)3YRcct>>@A5(-9{hS`SclGaQo}C%y<_4sTGp z2Wsgp2+FA6n88nL30l_?=qgS-LcIZVy_2XY$0oF6_L*^1Hvc`MTZah3NdLub0fmSWO2#9LtK zQGp1oh-{R|)aKI>(P!Yr0&yv7CrhDFXN8bGb|8v`V@_i_Ri}H}pFfJ@FZcEgM>nhT ze)BTWH6%j`UFTApPWTf6FF|dS22=V7y7ExGBGaaag$wsPZ2G9@c&xqb6BZ#L(D7)f z0br!X$4e zST8p;Nf>xM#$5kLbd#f}?_VszVNb#&Fnawh$_DVUn1ICj6{#pbLW@&qwf%2xNJDK; z=?G~rg?TFgEy(!^hfR||lNEpZ>DU~vWcEG(Ln+gc$Kw!s_(jXRK5@d**VAvsrdmo>KBPT!n8DKEJlBXRjAgY?aF!F{cvfTN6Na+ z6TTCsARgwwERam<@?91f&zS227m2~EsJxX*XV`{PzEsoFXqxAx9Ko#LCI>jneA| z{eA=o?uweEcGIRl(G!MDi@U2-WDItrSbmLX@jv^5LlrqcI$pBZV+THzpuyvT^fjjl zUS>@{piU4WukL9T*2fI%swX;j$x*^7eVL}8RWzsOzVAe7ef<@{L#}=ZczR#Vn;g3|PJ%u(sIjfLP;(f6yL|2DjHM6j)n%ibbcP^Ngj(`G{U!toeO?A1BRXkcF~A7?^EW5YPE3(yPkrC7{MmigkN`JBtQf$ zn>dl0e<~!yP&84~8o0Qn)b8=P=}Az5$Xh>>k?n5)00X4~o?&!EfBb#EnJQ)A$Gq$M z{*23+_6Tl$pPRB)e`}!|XVweIW^kF$N(B80`ffQoc|ZyT9fc@w4{R)GLOH{uTW3Da zWFoL_0<3mvT)fR64k=uNxnoto$a z9lmn<4!Y*kJbn5rowD&`Hvme$+hagIl8R$q)b6v&gSu=AQ+y^}$1MFxi=c_K$i*P1sc7_@6`1#Rm0fMbC)L5v!9}w%gp>25_hbKs`2-Yn`Xl`=eY!;##vgAcrZ{#G$K=vDs2FdYvIm z!!WUx=8{$#=wZ#v1ryj_dA(BA+PHW_yh;ZChqeesdC+G2G06PUg}zI2$~ z9kZ3A4UHX@Iqd6s8h7odL^5tEAfq|=sz?T1xHqSVHLq>3*(#5)Y1?4hHA}ysc%ut0 z?g$4v{fDaS=f1i`*y?9DQ%=f?(~op=pTKZcs?gjKcjc*;HER&O-28F@@+MimIR-6W z`tw{F0gWdQtmwD*38&Ce!=B2frskJo^O$?d_jrfHAEFkps0}mf-KEmY(JTpv^)we1 zaIiSTY2RL#&YdT^o>XYBA!)Vv7ooVdrhPj?55$GMC)e39>Am({~+)s$V!y;6w z*VWoR=a?$}!EKW{FH5APOpz!c`yNhZd`2TVM`VNYa0Ci?#Nc>SQ2A60G(6{_r?)jmZSi_iv`Gn)PO5{u3!2R3>9RWfeXOs zny?*@L9VqZ+%m#mdlLD3ELLQMaj^fogg&o5${L0$8mICnaGE*^-m32IO+qT)ef3fA zS7^U{@tlp&VZK5K$r&t9{2^{SF!gJEaG&Pb-pEeaTbB|Yzyg`Af>x<$@vBV9;(7r= zmd=#J-Uh=cnS5#3Qc<~PD7ASkZV;4Loj3LQ&wpnN>#}>-huq*E%dZ%Z*io5*O^d&| z?B`|;xqDQL^E8FxDseES)`RUBB?0SG|Mzif2K+K3000E|0iI)OM1S;ejGLTu%N!+a zt_ha^eA^5WSuvZc9wq@##-2gxM6Lx*@3GJOoX^PQ7W^H^^8`Ki8*BLelZT9|T-keH zmM3c|nR3aY?%wBQfa^pg6T8S13R1%KAx%BSNvUM*K4>U^5z9KjZ7FSy7G$S@bCF3F zhZf2hj=MJR?uqoJrvl7muGqK<(F`%s>ZjbGWJW@Z_O{DhP^m3%ZSNH{=rYkm6cAy= z^-GTMR}xv_uAcO zbL~QorOvm1c>)a|t)fi3d z4=NjZrGO>6EKH(&12@R#mF0#L)4tpf(7?0k6irI2kw){?1A?3n zK~-ahTng;ablE<$On#y;Sr{CzLu@>@Q>xbo$#cqgV--lB57~*6Co~sctxV3>?*RyV z-1*QvY&86tOXlYbb<)C&7;FXB5_{Nyq8=oG1~P0wTuV~w`~ur7ftGW4?C%PuZFokc zditF(!`p~mF3qQGoOfcxanZ|dKZyG%x;UL!LJZ`h4C9cjS_V%5Mi0u&IZ))e@!geG zUCm|rGUA&;>^1Jp$%JguVqXlvQ6v=L9$+B$ZOVyjOPN$?wcnF^nU$nYk>esZFMtm@ z=|BV-I!mb}6*CaWFN6==UA4IqJ)fnP=GB(1l1?RnkK5s>N`HE$rvO@aFT*KACoVOyS&)mnU>2*w( zNG>0m_|<2sxM1NYp2r*zX3d>5F694tjb3li>g{uy}Sc!SRt*KKyB&0FPnZ z36eU?%qkYD`8dD_3cZHSn9@`&{U@;|gj}K(Dq!_L*|>jXM*fz%`TINVa4rYuB@E6VAkn-g=0$98d= z`|(qY#l(L(#Jg|qf$qH-p?e(q7ma}1$%Z}qf9`GKA}gl|yq$(OYC(<9mqo%uGJbfn zqv2Pf3}i6_m8U`g$$_X|)V!_1<8b9kIJe`qGT41T)(&*-9|=^K)zm;~B9iv4@NL?M z`;>jlvFEwXSf*q0e>pGQhI2Q6s-;l`zv5xF0FaIjUph=n+y?i2;n#3}icC>!{ZHre zOXyyrGu;kvX>aGM+>`(^iqoXoUa@Nq*HD>GklR`fcyzQ4D0AN~_LFx3Erq`@jeLp7zTTx%WiyEt>hH)is(h#Ur1STo~A!DlDHuus(X52UA- zxU0sf^u443Dd&S9Hbzg?{#%W5>+_YTfjoK}gaNa6oIcTweTOe4kk6S?56Cw%Kex9h zTU5k_8{A?OUr}KE9Libd5dP;?qk~KX%%n;qbak6E7`zPmU>J7@tG%4sCi%(-)}E{! z^b?VnHf@rFxM!}rcf5!r0@e~K0;DqlOU zf=J+6j;}V{*bKTqSI?hM07W=5UZGgnCBs`Xfo%!AJN}&iX~0b2iG@Y!WF4i@n;K)q zY(3=!z++rt6awuF9?(ie=G=nA6qPg+GW5IpSF5;nf}?Ki0@h_%CYhsw3CQNpO_=*KVV> z*Wxj_-08K7K1NQDiG0sSvhf!KXsTq2Mgbu4%yc&d6@!Dsn98cryQg?O(EQqa6OQ@% z+xD;Okr1l?HLYRs`W|HWFpRV+N`cLxNyqegxv|Y2Z7>e5XKI}~iOODF+b>tk5JL24n_1+j$C@NC+5$l5K zMhsTj$|{?EW*VAgNxWKl3oFvQRaf<4?oPRKd5MA5uDT+aR`v>$jwjpS4Q)6iI$8Fg zR!b{tqiI?*4UFr+x?aC`NYe5-ghH58_Z5xFPZYJWJ)U(J0Tz!LK<-d`5Fn}v{W?HJ zdOCg3DNZI4w^h5A!${(U16GCPfhVCKhpUO(g^szI)V6E<>stRxSheU%oMSmFKUaZ` z39O7C>J3hV_sI^V3e)zzK4V4<#$eDk>q zcd6bPgbO!tZtr8w`sp}y16%Kv+*2qq^q~SEe_eX0{La}y$FA46a01*I_?z2i11_>F zsK&=qZWD-7%4yTH?sY?G{~}M`70xpfjmefb`y>Cizy{+%u<204IvBo?;DPp_39J8J_7Gdi6i0bw^Iw<12e@BSwxD9wzMU*(hy%|K_Hp2mP4b{06|; zRUDh5?=J<1E{tBv8+)7?0FdOye8~$nU(C3Lxn61`p>CP@13e4+%XIN)L61tWmmkDs0sH2xP93h#QE-aWjR_i~&?_PkAzdjd&}m-nq?P0q3E6%*_oZG2 z2bPeIj8xjMw$;BVxq8yN!-KQE35N?DW)44`$z!PijsVwZGw%5pI10VXRd+Gb03ZPh zcr7WsZI{ryKWbAzG_M>S%LzEGl!ovrlGRdNczSp=t;6Dro3s}_kgwu|Ox=7T+4^H+ z0sR3u8cP-}S}lE1IX|l1q7$)@g(QP@OtjFXl*PN{m5TQ#Pk5T8(zf?+audMes5BXC z*`;`qgJl;A%=k)E@OyJ|$RGnx$lQwx^sx^*P3z8AE@-81m^Udk%;oyiBlo;1OiP;m z><8>U0D;?D}*t}lRZC!A+o>v77 zIjCUIpzoI_^eE+7Q3@!787MHME+E0a0z5c^;4L(~f}?%IOg1f9YO}Vfl{dAB&Bl2Z z>2}sXQ(@>q*){;5AAR|vn~sAsNa5RRFaugIa$yNTmCjAz9#sF(gy4usSNjw*n>D|Y zEFbF4zQ^2c`j0rxmXlPu;zkKfd=fiI3R)8}IP+JMX^hX6amV>eXIrU7`ST~R+Y(LDX-t9LgfM+(!(M;5&6HIB39-~D?mK#~S9z1jzKi@08$T#29;U62R#?JvHFu{aa4Qw9IQ}M7ZzagU7Yp&Fb$IBF3oVVS z6W8BW07q@I@=(`|TU>9J8&TuU2pqLrP0ZEQ0kxc1c&f9$t6T;%M{?k4D{zQq#`6*B z=XC<=VZtEYzx61*5xCB-KQ)={cj`n}8$40l{$@R3^FSX^=fzJ)Li=F_8&Fe7cn>$P z<(vYr#VkyUc}k&III)4ICE|HnH>dz|J5J068+aOJSWnC{J*WZ&*sM3K=?v9*x)p~*3^q`6Cm7eam7I5e~Z=#@-I8-XdOD&CTOsv@NTAJy@pY8Qcdt z*7*;Y?;4Fjpv~OKKNin=%t*%^9vd|6tCe{-cPdzQHFBGT1hT4Qa{AnG&!8vYMt5)^ zH-kq99m;{PZxKX3V9*GIY*j>fm{Ul)TC?mWA65rq(E7+>TjC8kG7c&rZs(-FT!i~J zc`>c+N`IO?NAnfQt0j}#cpIw;&?J}kWmiN($IwktnW}!)u&2V zRKgZ)YgYr-19pX6m9Anr*h1oUS*Gk3o$7BmIB%vu5us`L5!o7ZNlAR{eVcEnBed+x z7|=_V^Qv2vRyf7gjgUL76zLcOQPX~0lC{pt^Ih;4I0%BT^Hjo{;qaVaL_}vc=O0!* z+yZ3AcW{SQT1=l!s?clc>t7vYoKjtpBpEK%+f#;GpJx8&MrA*KpoRJEE{J_xrA8A6 z;;#gx*faS0RTV6YJ)RA3!2x~%R=N2uAc2YJ8nIYkU|Z2lpe%|+1!f@NRy`N=^Q#AmvaN>v~!ZodlzXX+NNEo>;ntqB##k2q-ea*$uzdW z3!bbF2l}3WaGSo`mwj7WVducr)Gun-z&m&A6P;Nt+j-~=K)AamWMaw7wi|(%fHm+4H z+$>i{ycy)Ko!!_U1cOoRmOl}fbwcAW%1i3L<0L2gf4?1z*L7Y)LnOcQYAfttPCpiR zaAUWh)t3d}iU2kRnqu74h3$A^!YBe~1H3`4z{6J6VWS#9TJt1L9Zuy1HXtzja#1$R za&rDsgFzIRM+u5u#nDAx78uO!#9m!>JjJ?}JA?NS`4&u+5GU%b%;{inzqH)i*U3>iaUJk$DNFTJPrjx{6z0h1}CdHbyIxd zFTM?((Hlxu?YS*ZDM|wPLW$Si=t1s`{ozpLu&0A;arj{bXs>(@M{b0vXepGt2kAEY z+S9rtw#NWMR%O^ulyYq|r8|Y1FYDo{i%TGBqn|2hO{ay}kc*fN4?cGFQ+-7{oWMl} z$ZtwR2xTOe?pn%T_w(qq4fIeqLyfQgE-BUtPOV#`p?0fT*VsD}q*{Eqn5gn=SewQ7a{|&2!;LkK@QsU>8|Cw%Yp1_Mj zQX6QQd>wMmyu)t(Aq_74yFGbXF*POHxBlw^>~Uif4|*%sg&v`+L=~u!ht+KC(d|#o zFhfTLWlPouSw~fYMp6m*)!L|JzWoc6Bj~tSs4emR`uLe5OHz4rd9`+AosJZdRd3aBHO*D zQDQkBiW(;&^oYLclDc&hjW`89zuMmaTEWN`)!@UO%q%Bo8zqa zItcoj87c!}NdL5{#S>a2otEF*a{0Cza#3RV3*VC6=1o<{0%CjWHo0|le1H_d9D{W2 z7@Xhx*O-yDdb72Ve`G2e7q|OE1ZIDg_Bb>yf9P3&0Uq>%4jl9u;{KH$-l6@ALVJp?_BHZ>M8hzfw`38(0rbro?WcO>1d8iF_`5Az!18_V=u@A@l z+4f_Z+!UPOcfg{Jd>e5T(bWT_bY#vYW*p^NNGpX&aV`iswx!#?I@;7e&HyO)ZK)t` zPWPRSeOBW$xZakU^etQVB!#?{od8)cNXKW@Vpbl-Z)?)!~TuK!;dJ`zQ_y?#! z;hS@84*`BW^(l>__bo5}w*yhYgz(Yz^o|H(L0~fZvp%L%xYx!PRR&2w3Cl=7R0wKc z93{zxDT#Lre;9vEZiCm}T&M7mB#gDpLkMsaP{^SVr2=f`{4$0oh^MG2VY9DmI@f)o+Ph|_l& z=9N6d2BIPNVpQoR$0Tx*SxvoS%|Ejfb000OPL7s+15iFnJFVOE{_dGbL zUA|Z^K>ZvG-sZ3NTUVEMD>+XPqLZXCY@Z^^yV?`Uz2i2m4Bzj%#YA9U=j(FM?+`v_;?6CR(t%dUz2ZS z>#3DB$y`T|rDBQyP>WP>oKBK`!<-B*oZ(q`O zHXS1V?qo*g&v|hq?2Tu!mMQ`wCElfzk#qrfMp;G_8*)9bzx8A#)?)KHVz4IsY?~<> zK@f!|_({`wzxI7Ug19rYgKglHq!xN%ESkpts8D>$* z(cT=e91gQYII%-zlSed(O>s1fpTB#EJE=?2LqticyzV_S+^A3FYZ8UfTc1l0cu2qd zGRo*5!LUdY$0P1uy~2^;->3*>sg;poK0A<|T3ue#$oXBN+Fijpl6&Pz4+~P%H^UC(#6^HwiJ=mfg&xW*DK%M^8R>I%(HHgUn)WQjC=T24yswj&1U^qc zMDc~IFH6Ul;sqBQm981w{Gqskxutu30Yd}Ji0KXLH6ByORx+r~*=m1t;n@M{iqS?_ zHMkQ&SRKF&Gspnh@@_8%&^#X>uEA^}Ybq=Yo|3-=U)E=UV{~KaTRTy%#bte}18aQ( zB;v;+%R&F@Sn-Ol7#T1HNUI_XO980VrXUv>Ax29&OY=tPf^m7We0z|Ik#Eo>JrBwV zWmm9z0t4Ght|(k+os|$8Hr2*^Ks-pKEZBUqnLa;YsG{-4p0hgkZ@GLJAMvV?_N_T3^hM1GvfA)kl|)^K z{d-k_eiCwT+{R6kS}}m;(GwB-b#x@-m+e|TA>$*hL6R{{#K++o zi27@xFz3u!vz-q2uPnccQ0<^f#^s(&K63{OV>W$x7}0rEWYJ64-e^jbGIuXdR}xJH zrpvI%U?`k&jzxum_j5B~6s%x8@3g$mTjGZUu4ttap;9Gp9cC!e132gkE!QK5qH-=^ zbeM#F>l)X`In3xX2WEW;y9_+0b;SAY0mx_Jl8e2!6jdYOnw9;-FWAKX>!qXw_!CQ0 zvh!_(eyJGBabThF-P2rLAc)2d5Z@V_$)(uuA-P;sFmXDdN1RLM?#yrFP9Y;ER#2$bNU} zt*$m$q;PZ7Fc!4L9Q?3OIjypOHEWoE7$9iRz$fBJL^Im4vo=?eg6;gY>)D zAlol|DlNyhK_utM39o?}bEh;0f=%kP(geXAq3I8^iR#Cl;CkF9(WW3zd{4!umsfMh z-$KYQkS0=nR=Hk(aaz=|7ulG+cd~WGeT8ew&6$zD8}*s#$p3tMhF@Najp2;xz*U- zb()hhP^Oxi+--ok$t*aa+;v%XR6xT%vFw=!uYF!(ed)(c>OTom>xk1fd7nZm~#TUxp;?&w#SjKA#5g|pH7!(CL?Vs5DOAA<5{jTEt zFoTR5{9W;N1`mcPkb;-Y(s1kC_TgwMx)VGndIKulN!V#pJ->0J(DKlvF` z`oa;rUj?MHp_5VdS7GisyH;%=CLARU(e10bon1RNB$)k88TU-MVgIE`8Od}%So)u7 z6tVZF&Q?d6DpTsd4b9RTKkb`i`2YX|&jFsLbVPsVwu{gJmT-9R0JKs0d{B=WeY?_3IUqbTMH#S~3xYX0XPehI%OyfC zVLE;JOI3Lc&$ytc#djS1Z?){~N9}isr#viV5*iyOfKgyw#fDe|vU(e^VkwEwx*sK+ zuvuo_Mg9|u!5!+3-5Lp@JV^*IkwR@l^BhWmY3`H=zt^}d52_3==33?%c4RLDd<=J& zoT2|!&l-A5({OsAp$XN;d`vymWrMv8tItwgZk||#YC<9ms6n-6xQTLD_=G*h<$h@f zze0|{M9b$C=~b^%^ZpL#)6gJK;>_$45J#sC;S1$KTNVI`;Il28Ym??bu9>O}@ho`{ ztTQiPFW?<~&G_3=cBVda-G;(gt{$?QDm^kHv`jHcSCm9HKElGu;0fWqjX6V@CpMhV z_}WqhrJ_)nB0eu_9wa|k!1fKjf{M5vN$dHJ%7Z<@sDfLHE>y4OjTuDq1X`K1zyRex zt+}Zfyt}O=?u^mgd9BQbp)ZxD@gM+J1FqtUAYghXnRHRZk?Fu56nocp*r6&`qGSAHRk`VCY(#{4__5XYvy*i(FoDh5Qb&3jHOD5 z1rKsQv0xrtuw4tE7J{_N`?!?CO;~&lLHX`Psxpq46e0+31ch-Z6$ZgUUnM{9l@7yQ8p9NUigIFDpG#l3fK0|Q-fXGEAElXF73!Mn%pGx7AVvN3e*p8>d^ zh}R4QGp>TlTh>B330WM6!fnS@3mhY;+b7ch^#P%^_Nnxdi_Wl2Q!O#XbLm)Ddrg<* z#nRDf1;c0u)DdBmZ~fd}0L@q*-D?ziy2^45x)^M?ItKa8;dQU{F1 zg`lLpzTu!Gkj3-uBmS_g$h^OSZbS9{I&0=Rv|D<9oUq{dg;&djdF^d$>6MZ9lLGt@ zcW0KX=ze!(KWuS(qI8`Z54e@Q=J_yxoFtze$6B)lRdzcP01nHd`l+g#8|AIFQa4IP zGwRaaGeaX}c1I56Eeq`sI`GBc>_(c@Z`JTQOYf)isodGqd>bv5E#mo!>cK1i0Qbke zCQ2Xth?gxOd5az@I^koNG_wiLT0n+m1Kd35>fKzaN! z?&9JVTKdy>yeR(3`>=z>c!sPNG!iLnY9sZ11N4oJeL2KpQb&CG`T~oIFF`|eRTKca zw(Nf*4kxSycReQ=ev4LULGy#FspcnxRB+bE_dkFRC#d&GuX`W=A; zTBHvDq6E2t`Y=1hML?skZc2jseamnv*FO}pCHP{a`^uCozkN$%=yloVbZu1IJ|qk~P)X$ImQ~9j3wx@$_U9XZ*B8ay^-ho%T-5byF&jy0=ms!fz&m__ z&Ri&aa(^;=Tn0R8a(Q>cl2x%77CnBsz$`CkOZEev{}&*?;3#p-P)VD-k*C}e9YMru zOhmW~jp6!bt-?v=o~>;BzuyRgEkrsrO#8UM;*y75dmGs?1y{f9*eYUI>ZBcm=?R+7 zC5cy)#lQD*Zbme<{eGcrm(asmki1cvkR&gdy?k}Rjf^tP7}3Il1V*&}nBg^@VP-~7 z89*UdOw>b!f#8ODRTg{rX@MDz<~p?A+1jI9*iA(f{Yue8#z9ZjsW8oP9JDnhv2( z#+#mUmVHffoabbGEnc2n4vqqD`v(AR2EgRw7)<+(zMNP%_pD<@|61kHT1P zN2fN4{puCpyXjv+X1`A}Sbq>DTAXIJP_mDZIf5}meSSH$Z*4fu3L}hE%qXd#9Sb^L z2lhSbJnachhiSVe_3R7+RoZTtMcZ4i^7(`@)^&l5<3J-rlp*1%7cPT+=LtpJ<|`-Q zUq|;EJWfmcDdN6<82Jh#r2Z0Ws%q$dB)C^N&-V6ydTnTA)1niJI*IZH|)#jnBsp zk7d>?=wiK*6<0fg{WPi`R8J7I%&Is<)8Cuu6lbg^6RYb9-vlj;&NGRuv2;H1s&@x@ z$BP--X05r#oBkNhKGS4xfWh_#6oaO_0+C!i;fD(Ai9r}*X>w;k9}SlE?1AP1bCK?^ zj0U4=9JW9$o)xT%wb2MZfVxPC{F>I8UA&^5comOn%mAa8p$Rx{p|uxj8*n~iw1M}K zGZ`%x%9W#gx=Ok8HaRb}A}M!H)b%@{KXZZcQqRwi!#8R;m)^gxN;eThR) zGU6f7!nlvQ!H@H!gdT(g?fs9RI>4ly#w(-iO|MI~ooy^4jpl!g6$L0SG7FMNr&>vv zNEEii^s>f&=)Jei`(m8AHNLA1e-7jnq~8Af^{HKnXfi8J`>9qBOp|~R)NRcEVPIn) zNuO3UASevE)AlWh7-%I-4t7q%OuVcG1_n|R?V?JIQ0OgSU!Pn{H{K*Hgz3@N{0N7d z-}_rlzQvj4ZB}oiSpSNfQ$cBnxYT-ou@=c7k^OB}a|9BE5*S{)=alARR3*+jeeeSf%cT3T2biyVJn`$Rx#vn?6|=f6)<0 zuaz)QNo=kH%QNEyAVA-&zxmW3;K$Lio7>Hwo3i*XQ#e|$E}oe{5gB8x4B*CIW`@2n z^59FQ6Tp`> ze65tbeSrtn-}e%`UhbRB!F$_Ad$ansU7zl8g!_lerRP5KM9Wo%^cV$6^&rz8b%{Fg zodIHp)8Cw^Z?}__>5G)j0#t=i3v&V{IbHE!_(-_EhAYgNkdR-(RvtP+aLEEYWs44_ zHmhGymduq$VZ`mK5rSkZGs}wBb|7(qj8PB#9Nkz&jhRf!OXVxaqZ3-iOqS94(PTiG z3rK62t9x&i3YtZNT!*;UZ6j*+S$xj>t0ggZQ!`R~ykRSF=EI_vOQqx#G_kv}7^bo{ z?blx@%anDKzO(CqoR9}ZzRtil(txf-RKCl{EniDM#SlLW=Z=PoIkvDv5 z5yiZy3c=+5?`?FUVP#K(@Ht*a>iHmwOsf8EzXq-A=J|9+ICwuAEMV~?1{w#j3kHM1 zZQAQpYL6~X>c!nX|a z(yl|C&i~<3?T0mBt;4ptaE9f=RpdV{RM&lh*0IBir~xIhVs2<`q;FPfIKCiVv(|Pp z-+Z4*1y4l^w&j`F9itKicP8gVr$mK=ip#F`;kHGzH6n~Z6vA*c_N2~J*hUSm5~oJ} zgwK-CD5_QDYTZ>=OR~em_VV1tZgxi&d zhuj9S#Z2krM;4NFg>D0eir1&3TQ~m?ww~FXk`1cG!mWLpI_@{CRFR?FyMd~sFmIA& ze2bOsF>jLetW?OZ`R$H<2tl-;Tf|gg1}GJRXXLy!j+Cm0oDUv-0Eyo~HzrskrIZao z>;KsL^y8!m0*{PqSXZRou&P+91Aa4{=_*+Erqd`diuvyn_r0J-go~Hm#^w|G4$?K0 zVL?4m(gTlpvB^57n@0PhO0C1{fHulOre{kq>3#=2AdRF5dk-x-F{5u2JC6=>I3lA* zGkzuCq0=L02V^*7Ud*Pzw@1+3pD#9^AVl?2;0c7Am^@v%pd{KjKjV(f4#{yWWNbO# zq)K#NwfA~;bd~0AJ-_cNX0fbW9}sx?)?K(@JU@M8YCJka+iF|XjbD6HuBlD-!hR`)H3nxSQL_A z{zmji(c+11M@w%K(kaamy8Kl~urJpoqO65KdW3R)>U|3%TVWRrpMp1a9|)x!o(m20 z)&4>1Hev5sv%5g-7 z{GJBJ>&X&an>BuNaZT+tf0}Y{n!2*vNlpclh<>QsVyxFr6p9; zKH~pP0z@cSofDTPOK$i;?tS1L)0`hyrgGmQa=sg}#ZJ0J0%ooN3w3;{3M_rw_+H}4 zeb?FpHaS2M^!jnmGaa7103zb8dHAsxeYDsiEjVGM2YWc66e5 zG5&+o*a4|`_#MMJMM_;1} z@^jC%wBE>=(%*ckct$^yhxJOhEofUy4MP?*54`qT47p%6l!mvN{mOpDu{St5*1OvH zrYICUvQh$>380Ne1}!+d)5OJKTo;}n)9LaJknXq7s=;nBCMn>m!#NbLEy1{@ zms*t^j@ThFGT69H)s3*GWy@UK7pbJ#ooMMQ=ut8U*N)vq2ylSiQvU_H?plLG`uzwH zBwf~hkc%_m0dv~WuXA;E8>;H2Hes)q5}>x~EVb9M+M^f5ELh5zBr6akltd%>BK5<( z=bk3NnXbcJo5e++dzJ!q_zRF|Y(ayJ8i@~EvhjWiQ>z-Me@@CBNyiYP#rv2^($p(5 z?elEwphYY2VLa^&LPovQ6G{-8g=Y)Q#<$jY6r3=xRU2?f$-gwsw-BIYyF5jfErK@} z01T$Qq!6>!4CjVFCBxetm3QK2iYow0K()VEqeF>vvx3TE43la;1a*1mj&eFlS)s#b zjWAFu7jGv7%b$$?5GFXpX8G@=_w%!(kOS!~Yl~@OOs@51T@WN7Q>dXlp(e7B8PgK3 zMbEq=&$y6Fusz|;Z_VEak<1pkiXB2mkR9166MlVBeceOKK!kt~AR^;Ci!naG*h!*a zC`!)c`K#7rbf_k?@2ld@hkE`nVNf)!V#qyscyJva%^ZGmTxN3n#Itzq<%kOQ=RJpfHXe@<7z6SGH)@*D7HO;5i{sXM0) zM&tG1rUvNs^_adoT8t%wQKsaSL*z7wOph4kk!$r)Z0?2~sZPnR5PtrU@DVs?4$RSW z6SU%+e&hyT`3;D+OS?;9DE>i^&9`6x00vt@p2|fPEJyFHD4`cDfDWC4Og~8b7+Y|VV@J9=fxEBc(h}dQB&R@C&pj9eW!?6L|e^2DffhNew9%Ky#H$H zWge&5RjGXo#=#Ss%jP^GF$0q=){edYD>+cSxI%TU81JSbzGaQcMU`g(BgTD2bbS|Y zGCJ4kok(ctfsL4Fodr>29`w+Fet-2hs5hF2`HzCo_}+Gi2tVD*efFq}NblnD_U18XD0O^$EQ_ojeg9K-MAGj{Oun_~U@h zbR|%DJ&#^9s-`|PzyI)TXF+lfs0W++`K?J@&Ob(bZU_G@^fSF3Hz~+{UFjYcKsGRf zNdno=G!}~N@NdzMvZ~Z znx3x+PAg&se-W&soN_-wv7SYcZw{Q7p*iyqex}-+$vz2i`Co-uctb(^M>ZdmaxZY> z>F82qH0r_j;2JEQ;VJ^Qa9b3^p1FK_bt**2(pPfioU+&2q%mmSI%&DaY_QEfj-kU% zAWQ7OUG~%crEQd7p@~nC%6TkMi#1B&T#0|2OC<;N#lO-xbX%b`)XXZ-7%!J^UuPlM zE?&#ha`m?0+pb8^;=d_Np*WpWq?uAenIa(kP(y%IB|HNV2DXx!94t_3hmS`z{kBAs z1*!fsB9QxQt0Hai9HA-?gaIz>28B-0=qJV_J%%b1_MkZw1qwy}T5XV^@PH$vqn8U? zy#%gf^pqn5Qpi)-U7dc9m2&pnimy$Yg#=~i`L6ESpJjR;Hc?dGEqLwKp)}kdvq(P2 z0j}sy=#^+Tfg&GV(K?opp7VKYCJ23ew`$dIXQwWnR}c+Wa?-A~Jiw4zJdgCK6&i9~ z@MF~(p>LTWX&HEO{MPa-#O`iCi}P6Y)0GygqM~PI7~(}1b(qC$srEb-H5`Nir>D#YbPRi0#XRy6q4|b%Hm0!RWIO9p8{nvR7 zN8MW*PZD`%!QIGFT+NM3yUY}AJ~ z7Nav5KszYEGx%8UXqLG|@uR7&FGBVDZ2frB7FvfnPlEFw$cn^Aq=~+s&&5_Q-}HRH zwCvgWzpPz;BZ&#?Fv9ueMJqR$gJ)nNpAfbON*r6N0>Er|mW~bBwpLL&J*8{aU4!nV z?$`Bz3;m2Hpa9p#QZd1;Gdale>Sr@yjG?s~Veiql0~07D4raj$-R3a!Zw4KN7XmGC zA2U8XR@h!ABX+5gS)J5Am{3TP8eQ~c?5Ja-jIRsKTo(pj;*dQ?6QpOrE4`Y5H7kDnz3gZ?u z|-LWT2Qm`YfrUhz0MfkV0NOR>Fv^lPyN&?$~j zwTk|YxT8F-rLd;sxr%Y~jGMN$!((C7!=ckc0CrOmxu^Ile7?OxVFEld5(Z%gl{wy7 zNOD~c!B91~+=qC*VhcJyWtF{f4T|2`(ln@<=!MV>`t&Ti7=qKEQ1kP&C z%P4PAdzHf@;)q;alQyzwtO2Qzp5H)G({#A48b`g!=!zgsh#&9Ka4mX0BE<-qHc9JV z@G{KZgNS2S;eT`Z2Ja$#Pv@>0%J0bwQYFk_?g*>NoYfdciY+|iO~C*F0^tFk>2ySY z(Hx4!&p&4a2PN1eDD_4F23=Mtlri5pCyN&b6O&+lSJ)TR{Sv%OI0`VKBt@7bgRoP~ zKtT%r9zTRG#;_A3riiT7h^`%fCbZZ)-Cu??q_Nb+bCN}N+p3o8n)Xw4?f(B>$}3B(7iyYnluvUC+YKo7Nh znJo1TYvPbf#OC=aKve_&eW%?U4bK|(uT}q-Dkg9|#B`1g@SR+`c|xr3b>B2kH*V1) zg-!`FsE+)Nrs|0}MK;r=b?CyO5>EL*_3wP!%~(NPFUm*WO2^1B&rg^b42I*TZL?`| zIqK5Sou-b^g#JjzuPEO_WCGwqTn8&QHXuRkam^}1(dsP$o7LEVwX%@cV;r_ZlZ-xcJ#Xx;9T9B4C;Ykbh_QJwE!~AwlrVo6} z3bk>$=&paMu3BdDgZw{FyJq?3`4eJH(8$}zLJ;+etd3rTUla8_E;s?4$(TSqP~InN zTl*0Ng;6`dt6dz9ayRoM@z{}Tomh~8UsA4hLHHlgjc5wAhCyUtR`2B^M-Hi?bfh3| z7@t(0%4vXhoRA|N{1lNfxGI#CucZIXq}ym6Jk%iMl(az6H{XEZ+>6~CyNJleF>3GY zzn^imNt1+5A#@~1S!MJhealJVq+JScb8aQgr{B=kr>FeFKh#R^YPs|`BIDoxtOob~ z(Ca3jcROlM2|A-zDq`aQG3W1LAC*p0sjb?>KbxVci_PuPWj{fNo$~PE+k@v3a%Sj_ z4Dq}G00Qy>p6hBvf9ZaG#gC!J<;R6P&%rWVN4&wb9)7LYU;fEjaM!c)P_}^VRs~Z2 z*v@o|kCpU;2PNWoiDGx{?APDjB7;yqWV&N~;yI4!QNGjDfGkkwdrmn3IX%uXqn5wE z2H8(vh@ABRT5DJfgxG!&Ld8xb1Lw9)Y1lQ)9gC-8bpy+fvePOgQ0-;%ILBEGM@sjX zzvTie?h~@kWAkl6!c+OpD)Zg#!b8dR&@4OG*Os#LOqJ7$FUE<~dQ2pkpEX2n z*6zizZRreCVbq*2P-C)WN^r>WLSB}Sr?3pND)*C^96(&oCe)bon&(<~##3p;;A}9? zMS-*N`4dx=pbH(i&>U281r`WeTruAuIVQgq$LnE1lB>q&P+mz0^oK(y6N{6;6Q&$2 zlD=LKH*(h?aQ}WphHmXX;L-voq{D=LKrPxW(0(*k>xg}CVF@;wLZ*;K?AW9|oZr*} zb8%~tXw|WO&xBfr=wG;1&1xtxFYZ#yPjIF=y(an>6}M@r?LS|)$rBA$mj=!%9EmHD z1hIj3S68G=A7mI(BSOw}MhJ*#9t9fWU$_K;ntBpAc%l<;8Q95HTN4tx>b!*LvebRu zUE~qrBr8{8E$BoOu6dRPF%}ktRg!qA=p85miHm@}w??iC@a1m)kU-*ZASP-A&NYu8 zqD(Y;31|@h=Xv?*vEDCAI^17NRZDVqa&;0$cLSn@7h@@$f6U1@rjCsk-g}WO{LltY zuJjSL(~vH@9v!4zU^7o^FG zPU>bE^Hal?BC2$dW#J~~%Zci!f_zsgOGB33pwAoWi}tvv|NX5q)m96^YJ?$~#Z_xj zXmDbftGmp*WjPMDuvAnA)J&!Q^!cX<{+dGz+R)j1KmY&}#6gnOf zrjaYwu`0-KVkiK2|M*o&Vg|TFh@mtx5jMEUHna-^$o39?4t!%oU%GZ$HNIEu-C0NO zeB{iMmrr*T;FdQ@m!Xd;w{D&!)NrXqhtOVeEY9YE5gb|6L9k&uVPcUhNmOg2dE`>P z@B4zGZ)P6+puFF&BoZl^z|jx&&6JH_>vO&o$^DwsP{EUJ?qbs#js4}HLU5s6RLS24TOqh zBRX{i8XuFIiqx*LwY;8cnsCz~eyQDxSx3$LO`;y7DZqc&^& z1F(b~KJ6C12vKURSM3$xec!^x_8CWk*-vo5fAtNgAOY4lIU@84pjwz-{jNgg)u9`} z;=TlOvwb9Pg4W?&9gd!c$w7+$rQpl=-FIr{Z>YI1=s(hZ+bg9SjC#bjl=58xk_~pU z+177#%ANeR8Tda4Q;(o*f2eIZy?R`r=2br$t6rrz{Q%K9Ku@yFR%_AMOjPuqAb6#;9|?}4K1VUx z=S?TFE*{cVezlefc(!ij*wPVy>@y}#FH;v=fZ~pV6&|6 z!X4NO&yWTwTU^AZXspTsfp7?T=pvxQ=~=L%uMIx(`cJE{RAfY1+KKo2`xRP)So&0a z4A@S~<~Vf5hC$QN?njO^;kDLSS~&g=Z3gyF&UL(E<2iI;RxuVEp^~f#(qoQ(eT*H` zuyXd6SJ<~sw%2YA5J`2HKxgcN;}sw}uwJvUOtEe{)T@xs$H(iSUPAq6Ht1qv$WLCW z#Lv;~J_QlalLUq3#LigO13pyPfrdP^=Mse0!tMOxkEeQaz*1Ws3InPpD7tO26Ut#- zAs?F-{vS_xg?~-*g3uzmso}sV;c>s9>x43a_|=(3qt2PRy@ED z8t4wbshlY@Xi6LEFzNoEC__ZX?n~m+dm+W?EOZF^-I@+r$^u7mRBEmJf77aQZg@JT zVb9vDCREL=gzUlWu$%vuumYue2Ktc=9&FGkb}HS2ZUraUaMtdkfh|g5VFJ|Npnh=> zkVk3xaE$Hp8@iZQ5|q)n8WgfPjWZ#yZZy%n$0)_8N}sC>`s@{R3J{cH&e{&+fJF#Z zIW-V|c3*I%49<_U53H5(?gKT0E?=JQ4;}^s{Aq>`i#oyv9x^7eFj69Dg1Sq9_xg~7 z6r+Vs)0qgHS`hN$w4^|t@E~;Ap==oqq2ezf2fflSoAQ0xw2XQm(wDVRvv;#|O3!7Y zc_YsJ!JQZ>6LUF?5YD<4_ERMeJV(DNZhZ3$8KzzbdnWXNo@|MLnYKc5VifN#?R4e* zJixIzV8~$)$+U}())R|A_|Zyt?f1xNeA{RhmpR2k*Bg+2R7?L0k-cUbm-D!CHSBCU zUG4jU2ZG<`(X+b~F4@avkzMK!ne89u)`)ofBE!4OqjJmuQ##IY$mpS#uV)Z=``(Oz zI*=;;gM4PeRugNoSuyPJz8izJ7uu|xkwHqS*#*@_iPDaM z`xR#N+e_))C2Ze9Mr}|U_1N$rSRNBO0x(=!0$hN(yBL2U9z8Q<*p$+JFJla_&JIKq z|HyqhL!JCG)j;GuXOYT80VK*GXBxVOIH%QRBN-{lQNf6hD$tb}t?D~5{lQN_Vwzc` z;$;&lEE7$rM=K)pbT;3i*gi~E6Tv?1m?jT0oV#UcQ|RQFp|#o3g&n|0eb!I*@(_}U zO&Z37nxyg4?=O0@WfLR1i^(p|=47bZ$DB906axBTe}UWneLWz|0{mk|z;_9OU+unb znTeTHJ&YBnes&80b2x?pwIngAa%uQq@}9177g*1ntCrEjauD$TMZl3TOrf%cqjL1Q0N#EA7Cg1nVpq#3)_CL;oAt3~U5TuA2l!)!^?&jDhl4mAREMYZN6>(BVRuq9K>20?5RV) z(n4##$7=kX#p@gGUQ)gDqE2%KO{Kn*&6`CXMbutUbQhlH`g}5h54t=>mBY{XJ+Z#_ zpt}s>0qbb<1Ib$7>XLfjJ%XKDZ;CmVRTj!vp!SKG>(>a5-IIGWEb4HmR61?GJw?e9tkd$Qv6HqgAsFpfpB^B zI5&YLr^3qT3Yg00n7yi+vaNQQ9tJ1MzS3Ok=lgn9teZKN4w#fSSi@OwF5^#Lnq+3U zHAaI1{e~R)lU&MfWByLsGS3N@*5$oBujShmH2l0-Mk}E0_8p+Qp94NsV1P7I1dU;! z6jug(MfWf{V4+li)Odh>hg(V$%Uz=&MM7jP>d8aJ`5SUEKmQhLd6-g%sG(c+3dje| zR1hGvJ9|s~VK!|nF7XH2KR5B#j@~}RI(-9eAbY8mu=G43JbdGKh5Xy5LQ0ErVao~A z8viOE8}}%@P6EFoYL46I05^M5O;$C5tI!JJW&koUea|g6ci}b%EdGqXguc2f9NvJ) zWQ#ci2fwL$iw>g^$yx(Q@f%fsE@?U6UZn`3#IBHl*lD@ACE9bQFM)>7j>xuAI_zaH zG}{*fynWn%34un<=LHtA_bplz63XqCtuwbQ=Ryr!Mbn`qHAZRmANQahS=aVlh|M~0 zv-T9OsJs97IW=^=6^(xAq1_}8N{n3Vq=WVwOOg%w3={x%Bx>cw^(rl7>p2`m`Wg=Z z*e0B?L+`XHks2i;NyZ<2Nx8-)W<3GlB^B|81CRsWJ9r+wIsJA>U1wkwO0SNmAs*PV zZPMI0k-18u@;|HAAxyg|-D{BH|Nb#ulrewRLuZVyzS^ay8lD=^Rku1aj&01$)Lmu< zEGOeg-w6TH&%vDz(|ljf#-dPS+dGQKSMX69j<{cUT|d7DvZ|L~8643^&tKyS!hH|% ztfxRFXacWJX`%@hF&cw`bSLxF{5k=8P)2*Lpas$oNFs&at&=E$J(n;}Pp>DCTfCVC zPnhb|d^($&5va~&B|bz{CY21Ah(LfrcHAug1gn|Eb(UG$2gcfPW6 zyh`2a7kK662@=s^D-*4B7z@ROw=07f{A$Y6aIU3u4!Z#$xp}^cnbuvbjx$VM1A60L zjVqnr88ojAu&{+Cc;u$}M71Q?&!E;HDV1R;OMb^ zA1lHUTvW|YyVq!x(G$>g1WXIyGS)J^^+SJ1@sQuWf%JAJPCa&=fug~Qj^N#lIY3X}bM-YagKkA%W zN=HSgJ3 z!`F-J*Y&FTQgyMz{$+adq1hnCfRIkLY;i$q#J>jWR$oUu(}2Lj8+`KNfJ8(c7@!Im z5^XGJN$9lD)dm8bek-tR?)OYsBK|jQpqBFdnkK|{4Pjo5 zUoX3huv!~k6<2|&v@Ssb^h zvi6COQb4#nRy}Qu@}2Q9YCSuB8F#5G*Ahx>73muNw{xfx1H^U_y}ddH%8H=@6UJZR1D+{x3sb@_KPK zSs^P>1WTCixrkPVY@Ei@G+$rAcrOj2En4T&KQQZ5+~N_K8`SjBW^d`lcb5P6>uynv z7lL&VXw^deg|q^$tOB#pwgwA@?&Jz{n9RgV&ZW^Rx!@6f^FLpFUnst1 zdH4-#ld~jdT9K!>3jE(t0t2EraHDo25R}1+^5>0_r$Jwa6243)-tv|JoHZYB%SnuI zNKWwTnYt2aw^PsPIsRhkT)P|CbNOpY$BRa3{cz_JOWC6O~190U<7V=z9^@D=W zMwqU^53mF2mQix`^_9USU%nUCl6Lt`cV17zT6i@G%+j!-hNBIZ&jF5c+5gN1 zdnSuVkC|@4ik1ddN@!JgVnDY8mt?RnCXRy6{ZRb69%JmD?QWU`rAQf7gmr@jcwiOy2yse*pNAX!0bue=yq7~U` zv!bFz-6A&Q-mKRg+-L-{a*VM?NjzD#S0904N=<%EbnN6wb#(YaS`VpuC9DfZ#2-jm z=Ph|W@TBjFU_hlCo%7g~V4Z|~e2u{`>B=oqAdMiA(|~Y)peax&O7bpWqK@A_CwwM1 z0A6?m_$>%(@w(u5$<_)Pq`glsRKJ*R5!8Tft*zHni+dh++XkdK3jMF|&2;sS1lmS6 z`|Q@Js&b;q)Jx-k8np9eotjEVyvNcQCIX?@Sk`U;D{{C32>|f)L#?X7~`U0t)7Q!yHKIZEZ#ShSnwj9sk!X zDsgE4J0Q)cT^ZgM_wQl36UF;A=j{7`Nk${kvdFIPDn2ol`w^a-nYu_7nej@(k`Lw^ z@uGNP<~=kAWQg`EDG-hF+#TOF8GePR?k1dX!>?*hpFu}M%j)&d_{a$7sh2u@@K%~u zRp^+fH1Sx#(33_wK@TrSPES<@(VWO zfBa2SAAZSwp*{Brx3%i(Fm^qN8Og3~NoK>|mRp5QHn5kW8|~lK6w4Lzb21akiTo?7 z|}E;4Qle7D8$3z;^h!EF4=Ap|hu$ex23!8=6LsmC21 z(Nrghn68Y{>pT>0VI#0xFX?lyqeo!NC+r!_V`E5w=Rh-XQuQOpm8i6kl%xG@g zDp$$L+m!YaDz#}IX915HJ?%*)aq+?7x{WsvNGv9vO$P*Lfz-Lg+^(KN0Y8ltnU;dC z_ydqPME+3;|5ZULKB#8KZ8s`1HRf`s?ksaDHVI~^rhJf+`O&xS4v-D+U((CY`^Vov z3&q^{S_d_>WWsdBSw8Vf=-uHNxb8N$>SS+fidEB~2Uxc?ZovVd@q;M0Wln3`Av(rhyRHTOKWC177|v#N!zcoOLFLF{3#Qkmhi;-cm9rbXfx$6p0He z7LpV2cdneSdt-{n&8np|{rDa8ND8yRwJ(ytM0gj?oNC@H^=+i?9U|IwseVK}X@#3q zOoOaXiG51#SosmLs(j+bk|`b4SSz9Ye~$Yz?=;9Ic@%j+cbHG{V~gl&6~N!7@|UGG zCud1Gf!^j$wQeVeqSJt>o1k?A)0q&<#=4p5X?1u^ADRkSh^>hlg3L+LFdPj`(LGJk*xGG&}_b;6Pn%hR*z?ZRmm&HoC?%?6bdk+3cqWB0( zFLuz9QHX^-?N5cR_2-T3Tsfr$LhXVmccwniatV>@QXo!%Y%J?(0Xap~0Z@or1)sHU zKsk47dBaZf8J>qmJMTE~O3|o)6?nW_iv34e$(KkChvPO0-L8QDd<%;nY}N5Rln<>Y3mA~9 zEpOq&_xGFEa*!jnP#Y{q5vYX21pvIhNCu-?B1J3v5@7eOI3~Q9f$oGOCuL=<4KaE9 zt^!n=H#Dc3&PNi#qPm>*#80d^f?rMh-$PN{;rase2op1HPheuJ;VWC$*)H z0_cASk76h@8X_AG@eH4^&tlFtC_P}E33ze*WG@`k?K%uef{P!@JR)9&6PYp9e8Ib) z$_VApB)cu%Ukk_>Fak_5bk;!_G41lGoey!gYbp0pJ}Q*PJ}5Vg92RkY$foV2$7;>T z=nN8zT6J|mqM>@MG+$~<0&{>P^(--TQ}SSJ(ba_if>i`kp6_l=+FtV`>t!y%(eU8j zG_Ndnh=w_aHP^8ezN_D2C{ZNc6uU@`6GcbgAbYRn#`#!Vg!YhJ>l!)FEa!Xsn5i}a z)>jMmUEGO-L%H*lCfFXamHVk7M7usl!DJ8ewGvyy@y6Ko+!z>X7*OW>o8{9_h!iQc zPbt%CV+1?s?H!#D**q#4IOJ_vrfsfFP_RS(-;oGSNZ+_yafB2Xi+^UtU5W-h5bF~> z(W*rX>%g&AgDngPvqj@Bi8zn!+yJ~evMCKbFyR3Ta;RV7;vrfHRBpO9xNZAuKlXfC z9n77s`U`_y8jr+Z50qYg4|DEu)#8fdW4v2%yVog}qRQDb2Tg~5d<%9r6^)?D-*3*u z_*N(@^dX{)H=iLlz%cv?DLwzoY4jLfG;=Q)C$Ks=PuJ_(GFgcvga4h=nrZclp8fuh z^UhliUJ{v{h_lI|&Reexno5vmsSWFxRqdNzY`~8D1kHAex{psINM{`NFyV26+(2^X z7D?IVLx54%7AIC(QzGOM*m5;mh%SShEokpb#wKYsZ_JACm8Fjd!yp}N?-Bl3*{|3x zO9|lIKE@!p<{ufmAMJSwW8ky0nGwPcc#ltjFA0STSe+3Hg`Pp6RZqxwheAt4vl;Y% z345Y2?%#Fe-;-FB)~GR41KnWv4<>bx_U}GvN6^y|75V<{DNdccNbVh&ca{RqR zA2pPl@@I`=|5`QktyX+{J{zV?uVE4D7p}arW$7{@vA8vjNnnPDw^7s{2$*u1UNdCS zIaFM^?q=ewwQ;a}%5IJboiQj3+E;+DCY)T}A`?!lzsJR=k9@h%;w-HeXRnKxDLwV9 zaNv05x0xX}g~X-+R_xi_A8&Qq-A7{+ADHClIK+XWuimdg{Rh$I%UUYT8RL|qvtK%f zn&hcuu(xV5G_t6KdAH<{KEu@gmvNS7c^z8WXVLarE4cySB&4A540`I zEd+k@{%uOH1=0ID5AAvpD=Sdyd7oZW4^|o7=DSW<&HOuUK8xviF^XKDa2183R<}2^ z?pqMtPbU{|x^ZPFyh?Me$4f9P~o;=}tGgY0OXdto3iVO0s^ z)I-=CW&JcwC?zkKJXFRtr=4*g6Y97CY5v!I|69*XESYwP4JGQ4WDu3s&$@%*8z($7 z$LYvq)l|{XzF?vh!^}<2YTKBd%6IQL@*|35ydZGPJP)n!%SRhM6pn*aDaxiq-8d)p z4Nh}!;7DC5;8LLYj*sGI6Do-NoFmlb!0jD#k1f4jIPS@6ae;U(9%ooxa^v<}C$hhK zJa#ROvhy8t;mB+)S4FTRxp~_tDrPZEw>nXcEnn+5H{K zci8G?%NAJlMDMzkN(DV?jJdG+=SsZ#;2Z@bNF5}|N(QRYM)cq?iGKxR{nt8*E}yUe z0wgxNW8z@MnQFE~U6=6IL{#06Z9fZik~vCYsX=~9K2yP6zF0IWzP=~p*%=unZj~4~ zU|5!#f8giD@Kq6B?Skl*A0RrY$4}IC`L_;(m)`-VG4|B)w#T~9UaymeE@P!I`3%#4 zRnnp*V2&xV55(WGv$fvH>9eukwr%lbZ2*?hIrl)D+?j+pMw5TOqbX9w$zqv!6jueI zUO)%hC9#IUtka8saMl5>3-#`wZjz=-{)+gbREL;#^536~o;%8{c&Y;eS+gelk#@RbGT0REdN#u-4=hXh(Lx;IV&#+D z|HZ=vEZ=LZ>=!XAnI%V_`r10(^T zFKR*`@YeeryC{8O8sOLhB~SVwOvghP>tI6{KRkik@ckI@40u0TELiI`sAe?of-npQ zLalK6gRCImV{ritOFYXyfBRKDADv*{s=M-4UrCGs0)tVNe!H*^;{~iBC4KI7v`Rft zweG0sVgqXc-|2@wzZJ^hyS}6AJdlZD3rq{2SHCYWga?q0Aamfh{QiX-s7|(2ffXA# z%n(BRx}g8UB3iF-^Ulq|kM<3bA8aq-XQo;!H!LQB_vEn-Am)Vxk;^FNue7q6w=!^7 z3WEGechj(8{plr66!Iv(N?67Yh5Q=K=XEVp=VE^6VZhT=+cWwp*!bUxIAhm?0nS3m zI|*f*Oq@!Vou}muzlw4bxr~fEu0(fkur{yqMr-(xsT!x*A$P;-Sr1o|Lw5ItqwRDt z9~yb@TO&hVWA{S`{w|s+7fsbo1%(MF$tgbDkl2L~g0JC6rQ9IN_p@6_9o`1AP=Y(= z>tANCMgbdI`iX@m1Zk|NQ2BK(AEzy1=b9EfGegCb$%h;5&iW9T;UH~-bFx2ORvWz|7X09t?uY6N zgNZ%$QF7a}QqSDrOg+3_p3aDy4)zZY!jOWoRu$!hb1A9xB@r*oX>&IbM5S`>D!JaG zxPQDx|L$kEdNsYu>NPV~b(l9uS(%n^JW4I9tk{bv@ z85&>}`S9-|?J`m+yrzu+Xo6}+3E;Yqq2k^?9VkkOtOw&<+SxyVEfS(pk}dTafTN?$ zr2-$hAyXd6+69O_YLNOr$g+dN)l8Te(6@C^c z+%x2Oe}vfK7&6d5iE8qeFhxafRf-o4i*kl8xSd?V?BEt-9=}xZ!Sjv{HMR121$R1- zc~2|&AMBojnS6s&-THrBo6AhiyeN1@Olx2S{-b$B9rw50Wf%t2j(Q41$W*Q@vX_VLkXMu;#=l)ZK2~xM8r*cFOm+s#bx{|?RbSAcB?_~r{v?LYP9ktm@Ua~yf zxg1A1{fS($Qo1UYCc$bP;}F>PuPJ8o&&=w;00UO4dDw6jg1Mqq+=pI$j;hK%29EE9 zhRq+=!bO|;@@><(UOWQXB^B3h?sv-~rr9(E{1mldiRWbsh=OTRA?;+?gIwgk?!qP1 zL8(jgjC~_;o;cT9TYxBNM45+CNqm9OxiMB)P!1ppukwfpx`?*TuZXrz=l^WA@Wm%W z`tf;Br$?J85`w+2WZd5o1T>Q&!zW`Aj#P9jdqBfP)>!>fO?)r3(!TaFg}s6r!u6uV zupxSimPI?U8nl7dH306a(BN!2FdL?53VU2`#PdgYzqLSayprTYm?aj#4iePh2lJ|@ z*f@Dwq?b0L+EO@2QMD_V35Z<@G|jBjJ;D;WI^arB8S~;OEKBYi4&}A~lVbzM6q5sA zVZXmO8IDN^z;ZwF6QPnVEZdMAs6k(DXmm z{OEY{3Mm{X{#Q$)6We}CRk!zS)>%#idCdW}BrdtN4w>f%&{z0FiU*!zo`SL%2EJ&7 z#+&rs0FIG!@eHfXckcGXhV0&z)oiLd~p7*8dnzJN=Vj*X#w-=IuaaM_2RB=9*Ge>oI2t~R#(MuCh8Kls!Yurr&NRI%;#|2@f^$BY096XcCYMvu_`D92kIZ_;5mfgcN{w%?|+@}N|{<#;fL_wWbW zd#z#We>ZBIg-G#r4S!g?f`8cU_a|Z&bZ8dniwyR-RV`9tlHjQK#j84;o>Oh7$_L{* zD@6Pr7LI)ca@>5BwL@RlUS5IDQuu)krO9;E@UV1A(GU>Er@(V|JMt1d9>0~@S1 z>&kctxz#eg$y?ha&wO!}>`&{1rdF3rvy^A>0AnGTo-nHlf+wb;(jN(VVJ)*q4IVJD zC@G2OQg2XnRLleLX|i1K{a^RPH$puYrgKBzx>ISwW#-Ak>MACbV{GR_^2~63kV3EpELK?;=L2~_ma*KWZFqW(XG=Ry`s7g7Gp3>w3uJ7p|MML&GAk+5iQ(AM|()!;d5}0t?4XRkrrl_&TZ%s6i0^I%*8CThFw0b>QAO_ z7v=RMS+6#DD9MuLYw!)}9O7(<9=3(t)I9yBUE-}v^93C9p$Nw-;9mtHpZbx)%oTEv zFFRrK!8>tei;0OZ4RiiB%JDvNc$eggz_k#R^O{*ksM=lv9JyCtRkcoTvSoR$JuzXQ@-ZhANb|@RY*H2An{@Zlu z?OeU)Rc{6<+=t)wx4Ut<`uaJr8{5S5tK3NWC_di)a8R#zHeC=i$w zH$U>w!F%YMsMJ+3d!*NwqJLnL)%9A(n#{Q1JVt5IdxoyLQ5q^Px zyVzTHPGujOj%NR2F{M_lCG0>~z5&RqI&$ES?nI=2`G^yM>Ei@fe>}iHg z$RDVRr@j8u^If3Ax_VzH#dDX6!qN0s8-%R3;js9M;haXmm8UPIzr;ikV8EG->+ZYP%L6LRl*xZqFPyqiwl(v)5Wr~ zs^V>;B7sLF&%eVB@Zs_!^Oiaw;LgF{EKFt( z`!P)gq2-KbbtT`|yGMz|NG8#N6yBuPhDln;cf%htJd-3gpK!rQ^}Er=X5$0jUVD6m zNwyZ!2^N$ssEaLaD6FXNrQX23ChfB0IE(0HUCIsNDGjW9NAw|O#nNQ)Zr!=iX^}?+ zQHo&dN+&-4O*k8x4=U0UFPAxj#f$&- z!|pkdq6hr(iuKoXQnXk~weupuiE8(tTr-t5H?{D5R>r~8Jb%FR0$FV>}gXwcy%Cgpte7plKh0n=jmEeakKD0wUg;N}sD)m{9f~`Gt z4pXrM#Wqp1?8eDUot*^?qS%1FPcpkm@Wq;PZb;825GDE>eeAIHDu3bxa{Wj{F7||D zTXd7n2!g1B?Br+`B_`R!E?LEd-2`udJRW9OZs=!NZoWJ%J4kPz(gRZA;Cu0n(2664 zYXr~&eh>XXt_Xi1M*KjEMDC-945t?Sz|kqFLelG)l+H&9)uH{hmF{>3F~^?j!s&9XR>>JMEkZfSqgd zwDEwZqY2qJ!yohPq**Ix7p1V~B02~+j*hn5e8T8IFC&ldEAH4HP6gtc7WnWQwH`ot z3+GaO-kJYUT7=zqtFo`$@aRCA)jh7O-dt{1fmV5Ah@?Qn-rB+pKGQDhp)SgYtSlx6 ztWIrH5H$&OAUfJDT1pSEI%Q)?gfAczVQCEN4=sQt9&7XTV-n5@(8ia*87)Y|cCZkN zZr#N$oC4j5tjMyxQ8U{bRzXN(*h4N4GV?l6l5zBFavOWkf0hVkd4mHC1iyPCxOP0T ziLF8roW;RQI14Z?wA-2^^?pnX*EuVsxX+KAS?{2vmLzr=uyUEDuDd0{phi$pn_6)}XcqII3-)QO%d5L5}RwS|v;5T!*y#w#;~J?@$*fIyy5e+1og-7PZvtE z5>PG15*g;*pd(6Ie_(mvhWK9Y)wSA5*y%@&6d9Js859I9_du-JyE`ZP0Ty+t$v{XQ z@-OS!W8s{dF%VLVvXoWmhjit-c_+`;1{IZ$n=($y-!(I2@un+Z=H7kW-=8Z3N9x1- z#-g9tJ+(mJ)fwK@vla)aItQQHRwM1IJ&l7AlzrY5>IS$kXtD|!2D6{pt>>PTrU#a4 z6o9Tg^>)ov@|<-Sxg(O(4jKET1D4v9w;IftolxbA&p5;YggRCFB(lL%MUq~ z1wU)CzaGlH)0@rpSt@mhT^KI6Z93Y>Uj>X4$5*f6*n0KDVwp|-`)*n8u&< zS=dZ@DgfssE6AMY?;#sZJXJl=@e789I^{8kB-~_%HKpVJYD$@1J?&Y!S0N036H%Pz zAX!>wOU8Gk)nB*)VOppi$AvZ<#8B)G>N6!(*Ibk?q0ul-f&JkX-+LG7fWhs>IpnYA;^rkO=VCpdijaVUm2jcLXnJ#LT8vBafj13my7KNwmb z5qkitOA&}o7g2i*4m`M){;?$Dm+;aDXlrNo5O`XG;OGgs#06_^) z17O*3(=4{DY?Q9sD!Njr3W5+PFB@MhnI#CY9GkCyI8yrU!`2^Xh~%{cj_odOwT+(# zYFd4lsY?UgHHBs@Dd6bF{!o*@2unQz;oq)_0ZNX59Oj>1I!;?rC`uOGDr zHZc{QtR(7gEA`?fId6jFu!kXHlF9Q5quo;b#+?R3ppNPVk5W0>B>)D(+cwDo?r?X& zNdMX&$@k)XuA=+@Ym*t?pWxw)zoWK^kb^YiDa9KPtGFJbA3xG@HyB8fkSmYGJ-j6r z;(Y!mD#15#Q37^3oPs3pk*7XzVz(i}1NKXzwNd_{ckHlUJh3l3#K8Tzy^IB_zbLG| z>da?mF3L$_kvoe=@;qNkr*ks00w9WvYcbijBvcEroSsH60HOgZ?p#$x)4 z<1<0KIwd^0R={=dqg^nMz+$4Zk~kr0pXKj9hQT~JqCy$=3#s&nc7*G*Q`J3_w78d_ zdf!a*-tY+kkfdGfvP=2u2TZHKR!~x9o#=!(@(zSY(*F{ zQ}AlX%XI2Md)UleVmB;&7}ix4=Y3NR-m+V5#O@efPmjAMWQWlr6n)s*Kr^#$ zFn_tdOu>R?%TuAphU+a&KwgE0!?yBz9mPji!HSdaG}ayw93?5zBeN000J&L7!4Z6)bo38eS3O zQSrPgWmu8_$HE?pLhG5LN1Y%$a)m|U`a%P%>-maXhrOFdNsRKOHl|bWh3$7YO_iR+ z7y7jdRz~OJ{Q~eC;tarXJ$tgCdc2ya=#NqdfZ?NyFbd$t4tv(7yB&x$bR-N(t|p{b zS#T{YaN>&yKcIZZV-+^XBv#Xjqy;8>@ZI&KA~8YO(rsTDb|uAH4r&ds4JRup=B)~C z_F{2wQ{w!=I4n zUtbQD%wbx&@+$&2viKdFgOTdU!FLvuNOOkZfS$tweMSrXe*j#@UA3=n2q<+bI}ciA zKZf5W3-F7Ne$&1tR+$p&qC&dZw10MMu5A1~TFR;zn?&*%F7$>po9z&pBe+?3uhrVP z$=g8{*k z6#Iu;P`69i{rKE&Jnfqj%y98~>8cpaLr?#uS{{4=u!z;@dW>0nqH%sK^jb)uu|eJL z(Tn?;$!rw0@%jFSu-rO!aD7SArdIo|T833~4`@O4%p~sSFPi3#-RwC*>WdAq+ZQ9h z(JdVJo4If&W`8g{DOX)=91njc z#RbCJw5#h*N^*X9nYUH;#1P4`=urU6K-`{?BiL;iIdl?6e}NLdwVMVXLxL3D@qhlk zI3(dqQm%Z*!sJxg1FPAaS_2;gg+1B>E*?$8xD9eRo?O;$6eNJTP6`|yC?Ou z7IpI#SBCG?6|CwQYvZR>ShqtWFob~sx*_B7u#B+KVikGjzLmVYhKfwg)*L>uM=7y~ zz3xsNl!eWcqj$I3N@a|A%n`*Q(E-;GU}62EM~nly9L3myrL^|Fs=B(y0y@f%YNN{X zYdrQvQn<>Ij__%_amnxD2EQ4z|2`33QvrK66<97K?qtu4oqMT*A92v8WD?Oa(()3P z(7ou5dR%*!fck`akz7I@pLyWx#Z$h8Q@0VYfx*$V880)9>{=}lJ32JxJg^f?dr86hdDpdc%p1SQG*U!KKVxW7?e~-F76r;e@$wb}KGCZEDOwyJZy-5zOoFQD2`o_T4ly?peWc6FPR>nCVKty9S~y4( zLLOBA6VZXsMg7~$RfXzF_LPc$cd#ZCw0J1host=IAvumjnK@kXec8rzTz0sKY_c?# z?egox$yi0FS1#|pVu3|XP1#fn_pt~HN&h7j#0Io|M@ppRG=Prh9dcA!n)TA#>UV3X z)-NPe&2vKUJOApY*w(95{xBNSXpu&iBtlO6zAW{Yi-Axl_7Np0I1f5$7pKww_n#C` zG=@@U0;yqYov)f)hs+FPY$~78S=A1+MNg7`TjxFCPntMaUDW-CU>CQoDLx$;u7XWb z9AY(bk^QLRUQ($I5kSf|>j3XOL%D2gTaH}k&B=v6bCHk>xT$&akN ze9jXQVQUx<+_(DT7atN(%e8=qnG@G6ZUP!{7zN`6um9}CCnn7{>9~Tao49haxFcRJt4x#@;36y?g!ud0 ziec6P%@VEziajb7h((TmhePbV6V1M^e;j({cI-8N0a6g_mkx!$8l@np3$sz00j@qeRU(5+eDa z!yE=>`uiU#skABi(!p!!iTPhUdHtkP8<`zYW0NJ zl<#!||0oLVM3e#JN?8ri6iNfz@R3Zy4U-E}{^Tdp&S=5Ks1IbL`dhq^S+bS<`Xgne z3LrkO^ESGUG~2&^+Xl=gR=>gu!ws+_$5s-`U^cNjtf&^}U_R&6)D$3q&QbAyq}#*O z0sb1A?L{%h=0wI|zzrPqbMglT7!V0(aTQn&PeL|b=PsLN$&LWdjKAp;eV5k^dIneF zjTRc2cFL8xczyXkkc`_5ji4&)^$r|P654FD?iOCP(i}IYL*M+T`XNIPifj98iez$V z_ZuZa^rpXS8jVmnFccob_C3OGKz*@9AuWNV~qcF>WmRhMyTK4N{!+hM;phN}60xGp*YaEp~dH_px= z%Av8Uw6h$rKz(1Lvcc+eARjxViltskL((&Bkl$m)S|g}z{yMkK5^4XajImk^*++5J zw?cfirpaX3K?SbE;b#p}$h@!zH2&H(E{^*6chhA$Z#7G zj*L(+^^`+yV=U@u31JpgVTOaBCCaGM1=o zSOhUU0n|`2dN!~<#N+NQ&`4f$*HGwL#B14)!gD$JAE?RKa;EN03)gTHq#6}mN@y93 zO}z;5vtu#ruK7qYp!&mZsG>(S1Si9SE(@ikUu{FCbJp@YY1qCNl)nHsGjzsoP>Bhz znqByL#p)0)^q3y0O5dIv`6;ta^O^@1chx^R3YJ{&93Mg13Of)Xn`S#rh)5$KW-^Nh zFv9OEDC7`Ehv`M%v@6`3tObD8#SUN38FH6B&kLguuIk#RcmMzbP63~DYC>P@2<-b9 z25n)FJRuY{=)j%#<9Jqvxcebw1ObV3_Wd+SOaeY6oaPsaZW8~HkBdb9KPQ`BG|3u? zeV^g2@vaC?6(;!yEgG|qM$N8_eH9!*Ef~3O|eFe)|2Bswr ztdqbr!E;KlHbPR=705>MVyYM?m#TpFvOm9N>HGaVjqj&A#HFZ;(B8if6a+To$rDlY z1i&gdTOCsPAEw%5{)cNg9ifwtl8M>}NkX8pZJG(#OQ>nj66`4|Kl<$&)Go57-nugPcf3+?f&-}jW z!3V0h>+(Ig-W$(En2ZkQo_+&-%82r|@sbLt1XZsWHEGg@0PW1usKHp3)$MZNQUJ45tFT8;x-m}yVM>9yOguQZmZmW9Q z&Jca{K4W zgZV_BDw7!xss&QM-HO9fx+VlA`4-IuJj6(!dwx!M&&ygHUZpSyNF z;eZ2pN;ic701#+Fn|Mp8LJ(Ua@IO#LFbJ>iB`R(X56(C9cb2tUMjk-W1#&X60bpsk zL8NPvv}wO2`6GdayiUHLp#Nur3HHM9)UtJF0V#sCT(l?&lC`*a7|k`tkcUp=N(kN4 zTRK!5~!EfFed}#G$+(f)nd4>$`r0Y zah+YrZ|AD@L&mT_#~T}{5^aT-Vpy0xH$L$hl>Xh!lNR$E%RmYDd`rVyWW>3pTym&4 z*=?(^Ss*F5q)Fvne`0!`yVhZRh0?)w8~7x}v(SrmyH&Xs$XyvP+pe{^VTa5-2d@&? zS8io^ODgK$ed&?*{p?zO5TG=<&4 z@XKROl!uKlsR&S+r99VHlRNP4u|S%%A|gj6qM{-C4xOh;lX|-fd=r z;~CDeLqtH|pVlhpq3&0wV#No|QisP~kaK1wDAx|ei@wg;RDcH=4=EZWmL8t!q`#^;81K_d zG&zLmvU{twgdCIrd8fUl3#!H4eZ)6%pxk;FyFlXIY;#9HxVD6yIU&#`f?cZzr zUz`WU-~c(cSbhOOA5VyEyDg_5<)`cP;)PVaXjxO@_H1-R?!fMg?r=|t5G0DU;WNr> z)$0obSX+%h08FC%hTnxI1r`v0RK9PAU*q-SZ9y+$!gVtMe#xUK(B{i2uzNantFq5VgYsoOf{(!4t!}TUAV5M`uw~cGs1ud#Nv@>1V(t-623?75k?3K>Y9|PT1H-T= zcc4T!fQeT-zM>H(l_0JzO70zfYUopdw~qioVzmBEJtWLF615T8DHk3(F60(TSLGM3 z?J9XD#SuTm4h-GOYh~^L)ghzmd>wmqtYyQjXtG$)l)5S)Aq&0sgMY<CwGMlv=%gFeRLQs2M*S>Ko_PyU#&3`#DJW3U#g~^xe zv~T8f|67A@vcb^WXXV`=V&M4GZtQIuv8|1Alt3eEQjDlE0=1gTUt}iht3Nfe%l6An zxa%JJdG3WcI`})$o8A=kK>mldCaD;TI1Z!2Bd)eCSpw67%3>_VU0CMd?Z!RBeW3=y zLUADj63R50Gz+t~GooTITuf_`twkqSsfDk&{gr#_Uf?WCJ2X%(KXFrS3^q4pjM~;i zUDPLpIydZ5D&QIGQNMz527G@dJ{f1}~$N+|@;_80(}Z>q8YyLgrS?LXrq>tHu@C zPo$I^z01s#9C zv-fms9=Omk!hPd#3@C;`u76e5kKuF-M!>x_T55!eRjcmh(HX=Qv_SinMfhW7iASbM zHkU?}?&dCCaGSY#GB5xC2jhS_<*oo*EQ}C5u(*OOH35sN-gpp_VdsvJBS(sIetZo}YiOw=>?8 z!zDF>!Z2Ss15JZ8%t~{>ccs-bdw0jxp$IjMoQp%pIT<{XSWo`q&h& zta9EJ2LlXRb&OxFN@j!{N%srNTFOaq!x#~l>7zh?lp-hncE_2hQx*NqNMP%@w6M&cxQ6E`Nt}J|IZS@75R1|ZmW?m z&Kdz3>d!H)^PAlx>QQ+t4-c*i{_CtoZ)pAplW@vic}B60_c`BKf1%vh)an|+#K$5N zwwtpo_{MSPiUZaI!QZa`{gKkxbO`HnO_k_sWxReqIByngB;ki=J2=$eJh8EWz@tVj zaN2fS##AGXyMIfEMNafkjx&G2*{SO;GlqAOra_=ARe+Q_JCbKGi4Hf4T${CD`_!BT zw-Bf*_VGi>NnrLg4>0pTewBw7^Su7veua$@ zya}_WmY?Y}jMvf|1B%jwIiBKbE7)EkeVe`%bY%#-3fI!K_>P}<5RNj1I8J<0Ky2s5 zI-JS-uqXo(gP2!3LH56(}*aXbB2WwyySfI--#G=+%+3X_SS4^-FTj=>e( zvLzJ!x3GN|Do6wN`hi3=$ka*}H?*z_5V{%nBsB65+Cb1IaoiR6R|TGEtGG3@VoE*} z6l<`A39c#Faiu97eLXb2JDdIUjE2X%YcZrVJ*sb^qDl>hRBFq;C3nQ&4qz z#l7jzhRk*ByfSO20VfBY?`bRw_;l5QW4vo^D4~(zI~~!4Jyg`j{dW>HjbjK#Zp?oX zMw1AQEM23g;b@+0nJ(wHmHzcz3o1-vaw#^T?-c05lKS)gqrff~7v4k6OSb;yHf!Js z2=m9g*_2kR6R)OvPlJP>@35wa85J4ima;tQ-$yNV4KT%e={}7x?>$)cE zCS{z8nCRO45snRBVl__LH+kpW2Q%OTDt;VpGU9&ph7*O|JV;PtlW*d0+!jte_dphM z`BSdGF@fy3F?DV6SoHog)J3cQ6?G;pe0TqvC}6p>omf2aUble}2cusLI=8j2 z*fE4{`L{K`QgzZE7Tb-hq2#$$+sVM`QnYO;OBo3$iO=MJIcjo#LsGj{WrwBDI+Hmh z$$x5T=KcuTv8(z?)9+V<+UW9!aV?q`wK_&xapf_Jc*4ZZe&Km*NQ29YTSbYa zSYHsrbqgBLp$1!0wG31Zm|gl1>}gnQt<3-No-4vFNnb4z;%lK@!wKeZ|2UijC6&{` z)?{PA7v-=x?n}^1?_5BhliRs!ozmdWgv?KXQ+7!F>av|ev%t02`D$C z1Ky+lcv%HCVv?0lZEh1j)t$4S;u|9f)@-|$w7lSp1$3S4xMZO}V0uJ_`)^L@8m7CM zm`~H7J`A;JL4)C?ly(T*U=DN8E4Aw41rpM2zdItPt(|FOEO)E9;oJEXTXz>#7TNLe zrH0C)i#rs}zIrq0R{}Au8dE1>zg@YN{7Gu=8>qrx*#Jxq_u6SCu#=DH)r9_FbpJeR zie_jH;2UfPgE9-K*EPeVJ!w1KDbjq~)g<@jGAJO8J#p*1NVCte+_PI^_)kS7gHYd& zmf*-?K6=5xEf?p<8^bwuHSF7!G1?-knKU{g{k8n%WeD1Ki27(wv3dtpd{#%|7KaJAf6d%tX#*pWQ((S|8*rYhNFX7LU3=;T}& zRz%>7l@P`17{f)PD2uB4I-|pC$Vd@wqVPwMJoACgF3|{0VvD$tvPZ#eUy8VA!|mr% zwOJ3f*;(e=kDR29=*a#yB<+DV*fQe{`R+NylK^V;N^y-{!8e|ZOfsg1NeEbN+skx_+)#o~mS@EL&eFp>U_)SPulhDfa}N@A!kU1VUZE#-nNf zb<3P3(R+|>aC$g;WIdSI^(1;>9ct3~Y+tCQZMWi|;sOMZ8=73=^=BjESQNrxTn~`| zI=k8sI|}*9FzGC(zObn3Ty{air7gt+eZ04wE|Pj(Q0vvdF9}#g+-x5I%(7VCU68@e zendfCFX(sSU*r;1h_cG8j8e4C_t9UDR`9?RHT&T6&31JzMr(Lo zrlVz$StiwBFmNPX!{0ArzeFit8MU6V@MK}R(2;Zy>kp(eQKT%KTU<1!<)3Gd1QW#_ zNqIK=X}fTwchO3$CVdMTKTB|CiB4X=M}#MHVWGS)KN55j(M+h8wr|6<(DHM#$nUz{ z%TOYy?O)z>1^vx_!*9hXb5Cj7bZj$a_jrUevQ=g+nH}EA(;{>j9FWIEP}Wsh=b5Nz z6Zl{lC~Z}ogTtMQ)?ruhB%9hi*c5~VEA~6URpV3jW}VBnv;G|ESs7^NGo*x18ft$4 z00uolpO!@xEO+aXvlF{fEhyRX^UtZ9{^v~ieESqA{BD(7ABQ&6*Q|dPqT|cl4dM3( z9!;)t_M2JKO-uBH8(o@P{)eb*n-3yc@g7ttt>4(_JTMlfLsSSS&p(6f1k(}*scAHPTo|Ywmg%B*1RTkrL*5=E#NYWS zC3Sk9%42k9u`9f5t^q^$O@6cKXEt!F15Uavj6Fe?8^Tv8+8OBhakjom2x_lHHO5vW zxmHHx8rv~+h{+J7WOmm7X{|KBG3E?xrDHx~1>G6GPTtaeHv-3~b_Bi<(@l zN4c>e_itB43^Lm7Y8jyB9o`MqGHSiB$kqnn6tz^>>)Y0|Yb~P}i4eP4AXo^k)`ck7 zCL_e+*zS5X8)DT;4R)~dTfPi)Qb3bPZ+=hVUX$z+o$dWzCW7y`(w-?|kiB(@H$B@@ zR|~TReY~W|WjF>DnkF&bc}+LwW5#Tzf|j3(_oAyc>Ka75+_W2XIQlx{^`k^(wOa-# z0(IYAK7+9~yNn``y3LTnpQK?xC)Qj zQkrUi^v{I+)_C1UG-I2cnA3dRN-6<#eb$>%5k_bG7w@5}N6X^X;R9T46i(l^tH-qUGa^CeS{aD$)n_-*U6 zmoOK`bN9a(`Oy`ZDTLEYf4u{&kut5pGXZB%NHmQIZN4Ckb_`ny`TRHDBYgIMhT69O zAci;3XK3G#H%vb{U( z>p4s7${-2Q+$pDPkP2x$`Rh&xcf&Cjck3LC&nvmUWP1&CBxNKV0lB)?pQ@~hx1|;+ ztPTH72;k`E3-6hKJpWSd!Nz6L0jyXwkSolP{9@RtzrFNhZaZH~<>s`yhGGZvm&aYL zCdd0Wt)aN{0cRywj|%zr0mV`C6&ywmS)(itTr2i>n!8qQpVVUtu%Mb&Tz=C!#vb<7 zII%dQUN1wgQ2a|B1_5AltOl3*&gaf9Ua909%_& zm*+P9bOxhP2>sD8oRwXOEs-8N44g%f%J5PH{jw7BU#a}&eL%JHyDw%Olc3MfMBy3Hy&a8={Q565he&$Lk;7SG!1JuDWbWa1_~J^sBPyjx zibgHYLc)}#B`qFJ>)ljXDNP9S;D4+xvWVZTr!QP>`XMftN5A2|Z3dkb;}6oHGHgGi zh~TRn9wFpCFv$I3GECAJly)Sj0|UTlS`8vKk5tJWgsvG~68P|8^dPSj9d&VNZeQ^N ztU-0FmDzETvML^HcmO$4Ex4S^d!qyiJWj9|0G%9eQp8jy+tYaG*%Y|Ds!%D--G++vZ>7Wcmd3%Rl075Xv5A05NZ<&9kVm3!oR?4)!I_^8Ma zeuX#XE-wjLY`lUeeI}G&9_sW`N8Vz4R_B9n_}bSl-71`3y|pXg0008g0iU&WLSO1d z*K8HTU5^2@`^AR*MY(1&S@>g97o+kLC1P^nO*o~DwU}831*fMF?<~Lu*FwOQI7OU$ z%Z~;FD|O)$8V6Tn#JRK-OiBiA0sknkei&fI*%Avx{8Mk3C{YCC<(KJBsiiK&>nPSg z&f3)Yw*%uuQ4ZX(Cdy@hVDl4_(aqYI6Uiqy10?oiC&{8FSZgTnaI+6-WSTs@_}9Mu zrR_^H;iPj^mG_|p-9f0zogh?W7e#$vaIRufcE?<S?c%eXQ;F`mb5+!5r*!n=Gs@ zbM&km(NHL?H0xaCj8ikOHGE)8Kp-;iVe%m%WfgSzrZHffQbPyyGV{V=4Cv?brhPW2 zkY;+dW#fZ*(m(qV6Kov*@PHAMYDqpDD~Oh!BEF%0HU0Zki3E!F=3J2y#|^2Mg7?qG zl&Ochw&5Z+wo(*diAR;|p3V$DeTL^XxkE>9w(+2PXVgw7u0SJeraE9pcq18D`kEZ8 z$Yu|)7;};rv5qfLC?#U3=?}q8yNFbr5!RYD8Y+tZ!)d)j`e(9Sgf`enMGQ0cDC>g2 z>2ZlbaQX`mp9vhVF+cg9lqDEzNWMSNpVQ&E#SBJaC`(sVz{3l$=4qP;DjJR48l#JwF55sY0uS zg?CS1wM94Pdc~y20~=nimlh+L8ng?IT4V5|6p*eeuVCe z!_{bOAGR((|8Yqj>>w zZ2rO702G#i9Y2!E0Jw-fvv4&bk6N1aaGPwby8nL`($@D`|5COID1tZPw%k4*#DN{Z zJHQVeV8Jh#M#4XbZxXbd2ID_DfVPB-hz6jsE|EfVj<`=J#VPx3{?2^cdft${o??Dm503M4d=DJngz;0WZJ2x5nQrID zg@5S$=Wg8q07tsly95Kik%cJ1i_5|x%WAcs-jeKSUrz&1(%)Yie34W$G2^!pCCzx5 z5VWM+OvmKCSMzg^c4_4{m%Es&a0|t3Hu{cO(bDyjwtoMqs>QHKHa4CBDjP#NX@^U)1`!IDLWhg=3M|TEQ-NI~XU01BWzr>lo0fFmJ^Qaf z>8L9}w=tI9n7`hizwB{g?pfGgN%Hq%TU?j&rl9F(-KB6=qvl>;0pGY{%^G^^je#%` zi=QT06Qwf)HK%C0n^ZWp{>TJwpRjray7!iqKVZ;_YMdL|hr+DLRf&aVAbiROkVkGs zIfEY_=5W_Kt78>LeyJE=3L!0|kwFK#&(U=A~3tJjbeDK^AlL?>uN0f_0q zzWk%-lQie^cDHPtSZO-{eY`u@Uljqv5^B-_)-68y(ZZ&3tVxEj==3(VdOEp_E1udB zw_X=1e1qXJKQBha_#_7E*{^J*N>N7q`ekLQ?sEB2#_O#by(r8#(Bpbh3@QA_0cvF* zv_{rIiCFJy@1m8f4l2L*T>O&G#v%oSUthPKYF~njJ$JsF7~9V;&*hWB?x@_?Y(^ca zYt^`&k;sBthD$7C##n2!ZB9A=9c?R zlXM?ahe;^1Wku2Pwqh@SEZq44um8-RUkjr|s*%if?EecOs6)_7qJAp3(hxxUlmj~l zi01$s=`o0ziemSCEG0aq{4B*Rd1=+vCb1iU9`5ZM?A~(Qi{Fbg0Q@_}`<#jli_Vj@ z1xCHy;i{PK1K-8?vQ1JpzQ61SCZ=CP?d?!r5r*ao=LaJNhdFY7L<6X&gEF+h|BAZ= znNvcO$Jm@ZhiBR>q8c})h*b(C_PQ8L5EUK|gIjTup9WNcP=r8}=C;F5r=6@HsUmr< zVLI?X@uC$!!JNH>+{$t5o+d8R&BB+l&vNGs@nM;NH}skGn!1iUs~{w5Vg^5eBV~$WAa=>D-;^C~I(oU~pPr1ZS*MUQS02;i`qm!F4(Y+}`%iFa4=insA8 zLZCjU9s)Ax$cSRMdJbMs5j|62fsn{2tN={5bU4lm1?T1BK6jxS{fGlC%vTi0J|e5G zCfkOpM5j8WSc|D@ldqR)^F&nUVW2F=FEJ7Y@-5-;XCoPwfvdRlF4jDWci{^WD;|*P^9DbW@cE`n+uB)+F zwpkqPIt62@`Gq395%BAC#Gz_%{_Ey@gJ$- z{Wp%_j1;l1bbYiF|EfWZ^gD-mMtW7H5}5rI^)v&tIqxWF%6Z+3>B?ymDADo17)RoQ z1$c^blb1lPX_ii`KT_sQTc9J|2v^%L+d7NZ3wf(QuH$TSY9*bE$oocT<>Z-K-NIe! za-4E(>7dhfH)s;(!2rzguoC5Vx#;yUIm2}`bs+x-%VKML5?M^VW!9YIo^V zpQd3pYf%-~7t}YNAR4*8Tkq=zhN{i6-91kA z^;Lq^?Sh;>=crOBJ=|ruVTG?~zX^@frX7D#7!khSAObrcht#E$SeP*^Qk{96Ol@|@ zLzq)k>gJXf67V{o5RGXoHQ3e|A$Jk}-7&U6G3ow6`?J6(lf$Jcqu+~@^2=;5FGIJ^ ziz1x{HOk!~x9azR+^R49cv`{+??_}L!=M}9M2D$i%<7tcW*6Q7a45?9RJ{<=0K+Uv zLZsXCzEN;En*nF}iIX+gLdBdwzD$OM%E~Jr-4~m?q*y?8#=@^B+7S>)aw%5|1up=P z!Km461YTzCOYp<#ab#{c>C?;sCI?*W?G{MynRJuvoEtWC*r)Pfr2l++lkyk|Tf6<2 z(>e%Aj2T02*v3o^MTdbu7v<2I&T&9+>$c3z>zgezXy+rff3%xUSrPA`K$B*;%;Bge zqLc88sG&l3`%uV+w=M)1*5%_sOGyTjx=g|q9#QMdccP=;C$$)Hz+&(IZ9IGBWWP0# zfJy~QoKEom17*>c76w$e%$rc;<7CGZtCbREsBcWh=g`29J=1J$B(Bp?+(kUEQS;ChhZDws^l#RLQr)AOu$kjYvN91H70yHyWWEd(MY>w zqqOe85=dU$^u^ClFs1QRAh^WIhWi!&$D%O&{MZ0NKdO%kL)J_{sIZQ_s>O_C+?Z`% zdC|5+g<*y-e_yBxdCX6ua}$~_iqOi`6Za58UCcdud*)FhDB&9C2+TPCGRc^I!(F;K z3Okm=kj%J6ytK4f?th)T@)Ow++^A-I&)aY~<^8w1m&^YYFumq3AmCzO1}Q?iDaqON zd7iQ^0RxSoBz{K5phbjJ1DrUu^po=9kExj())N$KnTKvcWx4aXi*RtQ;FQSIEs9N8 z*@SC!DupQ?Tv|fXeGxmaP9*Ly?1=uAmKu{>fv4D`N*X^L5}j$SG8#rwk_0+SBXXGZ z{r5XM@~f|7Ix1;HAUpkATojBW-4gyEvw?BnyJTIG&dlH5{sl$3t2{R5HC1IN*XQBh z1yI?x%jb0nZUpoBHrq15vOnwJ!e6?0QT_uiF0qT0%+o5LCO>OrQ|TQ^5}ZB-Bzpiq zFPrSx4k(PuAs!xak_E;_6=k+Bl1Fh}&9?UzcYXsJP&YZ;K_cPc8?(?0>ezdaB-4M5 zKb(d~JU{j3gWT0}C!>ZYI|g1kfO8eK#CU4w(ALk-hgy)2xay&fFILR(e(E+!;W zvtrg1Zw`0NCliCM<2mPJ%;Fb zw*K{PuJXr&=D#Em(V1Mb`OyX(@cD7cv_lpDZuvuAL}TbwEq`+vKA+IfZ=jB5l2-!= zU#IqanyoWdBu}uhk#E~Vd%gL9JT8Rs0;+t1JO-pqj}K_HbwKsOUaqD_)r`Ai} z(m*+5Gaysqx0VnKQkLWcgTJ9xr z+5~>NgU;8$n$3rX)fy@w>$SQ2FU245a}wJ6KFUFsDIJ>OE1+H;nx-A;K!%D3{|-29e(v1vJA&+^yCk|;yX?nKWTkv5 zo?Ga62OD+7STeJ4g40oD6<-KO|Bd7A4TH*q>|V>-ndCXr5-mDp_#!}NM=Q<<8$HPIdxVK)xoJk8ipCiNq%toYI5Ua4bue!qOn0ZP{@>(3%yq!vKo0?4X3UK zT^S>3L+G4Slmh!GlG(y|1!Oy7*>l;QJ7=d}j~6gr=R>RtD|-Aw=%lKUvD+j<3X-G! zw9?D&AR2N(F}A_v_(TmcOpI^Bp_{B(dn%v4wiPtNRJ7Y0J#G)nR*w#8>p4`jeobv2 zc_?V#f@6;g>T5Ugq77(>e?J)|OHvjIlsXvz2QtafnANOKN3>aRZ;XG9*$bSo=`U+% zsV;vAW!vMuH)dqjZ2(d5Nz;{?w?20m9buEvb;>@@jD<-bm|#Fb`3hC?uZS6@U5+Xg zVz8mPO?&w|KaOB1ukVA}RCKRFAKDstwqg|JU!M1{i;>vZOz;8y0Hsx4es(bcZGQ?j zSkq{0Vq12)oI|cs-SQkty%WMN!j$cx2%KND;Wq@bnAQe;aZ24^q{gJB*D&M#j&iQ; zQDzXhE4(QcK$*pv_1_#wRj@ZWb~uR%(;bZw3e*~Ms@WMIPWKfb)qD~hmKRY5<%y$Y zVSatd6~=Wo3l&)QNsq|3q3X$+DyCAo=80KMzmna7p__t8=&Lp2s`dUdqALat$`%zhA1EtIAD ztUoXB1DFx@nBEin1Ve=R&vZ0PTT^dJ>yCl|+) zQ;Mn@tKf9_+UAS*)Hf&^fGxlX#yPr)KpGrKMo$hXXWPa_Qe>xIy5F+^J~JB&?b2zQ zW((Tw4*E{Lg&^tO5jMa9L!q6&?9UBQ|JQN8555ViQ0iL-T4`D)&!HyIrr5b^UFb`Z z0LA{9jw!kT6>8l1;ko&iznhzd4_eg^yi#Y%*ejJK20Wso#N@`(xbS^1+5_E>f1oL* za=+E7C%1MEj8AO)J$aXSenns&`PE3OGXIvGM-lhn#-nC53LOXF8j}?xuzegw8V7)Q z=AY)BLFqbS;&o$r3z!FNBU6~Cxr1-Qs3`URRAgH8LR!cjxA$#McS5MHq}Y_V#2IChAy+WJRud;!Cr^(A|Nq- zx{0hsV@2za^_WAtt*rw2@d!Xfbs+*1d-g`A+zl7MUFs|=PbcA((PR8hdL zPfQ)(ee+$d{W$%(QuakF0K=bWT4P>c|)n_Pnq^{;9fc+KVG zK6VTYFVp5^XVR&^27_fl000GRL7&=16)YwH_xFQnHBfw-2gFI3KCfDp)4sQDRxwI| zJ-YDn!O&0-$CCb7jf6?`MKUw-2Vyx0@SXC}2FQHaNB{^*H9?`jiR^F~7D<8{77ha+ z<9Oyx-BCShv5)f@#?Qs3u>lrJRu41e+m)x|1E1;`j{^v-rcr>!c!YMxp)ixizmq;+ z4@VNfJ}x**@L`X&%tfFgzVGaeHK`ZM89~-pjYwTaIi7Z?5H@wdS7X*!e^xP)wC}lT z;X-av+IBYBvJhGn+4Puc#?QOdlxPdJpnM_}Mh0AJO9~qr`TAuQPu15zzZlvNA`r-Q9;>;dfb@E@^mZuHMr`siF z)A?GAI%bL)(zPu?+{s#m(guu>3>24m_(M=MFjZGMDrY}7EhqrplR#CIlrI~(53hk| zel=Klf79n{o-)uD{1%$}0YkUd!B`m9@V?Wc_Az4lNK05OUHRS!1Bz2+x8~SNw^WIW zHMs_`GZUFR6JYV*5ghR#f%utR_!Y_9dnEOLEgQ0*?EQ8&=_^iqEzZIBEA8eD?(}_o zrN0)N*o%HQ@0U^dMuB<^+cp^m)O&tpYc&5adeb?>1g9dYwoM|zSrXSPxch=`06IUW zivhk%p@d`9*eb0-cvFF`OkBR~+E6@K)7_av*6`-%_RR(!uzoBhD+_eKF;Q4{)Jj^9 zbFRufRhEZER?Lf&Xk~*AT^Lrza|6^6)wha@?F^OxLM99{k6e{I-QINg^{f%wPOagD z$WFbAtV(kdLF!j1OYggV^?Ect+A$9BuW_Sg@EElgwzW9(m2BQ??HkBT+KR{csT!MvElF10hG1gJ}@p*YR52lv<87c*D zZDhBe1}WAcAmk&jkM;R_Xo56ZS@Sg9U=@UK7rx>kk&O>)GR~^c4Dd6uE)1;vIINus zl(!fE{)_HLax&VXs9*ZLx4(=UtqKJJ9G(=QhK$ppZbY!44|5~Pt?*tyaUwl9{?qmoF{g)DTwDj=Q+F5)cL&vQHUW#_ z%Yg4X#?CZ+WrWaAG&D3TlgFL*>6^n)iIuC_)>Fi3%{Jqf47@aJB~M!IQe4V*AW1q& z1**BfC^|s`(M$d6O1orESl;!vqhPU$ywfV}I_ zg1!KbKyklZh8{AvUC9i)R5|_a!5hD>0FgXs@sSoH4YMfh=xU`qr2S1qIXZpf#8*)$ zWaz^412;0Iug)5elKHr$nV;brN4FWbl02@e@61uAucD0xt%Stbrbm(mI7POCyE+02 zA=!t;0z~rKQ~V$@^S#GD@OI`}sn!CjMKiTrvL;h@&vv(1$(|KNOTd&1ubatg&wXUn zEsA4mFi#c1000AT0iXGFLSOXCCfNr)q9-3PIa(`01*~lB#srxfQmATda7yyC)^R;8 zj?*DOixyJok{R;eQxR+XcM6$W-3Wac{EOh9vKQE&+G<^IN9&*?ZT>~( z&xXm^s!j@gcS#P#`oLKOq5H+&RPd&Tr~f|ALG;{D4Wv6oLCU^@P)P<4ro~O&v}_9% zpe4#*zsctHwh|ODoSp%v00VT57xFE^^d~JUCN(r+CFyHfRH-l^=<&ztqdR}Qn??KU z$zJB9(Q*n^@XN~?wos7=ucRHBJ#uW$u(J-MQi*h@Jq%N-LN0YbGo4s{&|CiW#M`~6 zHg}Bg3!nqj!(Na?1ayZ&qq++cN%)<+-uG;v&DGqak!>wO1XG`bTWWSlV+4NVEhho9 zOmt5K#F{eS#F->VYB|)5zb<{`5P$7g%#$l4<$LEgpK&;9vF2Nrr zf2L_T^3z0Vv-sj}i8WiWqFQ!- zgc3}!7GgD(iH~bdrf4P$ilE-C8BJ+T>mZ0OO8UD3!T%AS{GukqF)|tXpOdkJ!wk9C z?;>CW;~}s#>p5UX9QsppF;~PSDC41e_tMNzIYN2)0%ig zl`7X>2tz)U)9%kZo|t(_m1#?Y40i-Loiq$@AT&svoQqzQs483?0g$NSY7d#Uw#DOx_+%TCe+>7@u+%MRWV3)uU0Vs35;qs| z){0t>OqKCuPyq+se5dj<4OD_A?z$LK8#8<_{p)z5@gZO;)G7_H1v)VoIzBM+5uST_ zL?ISpKe0CE7hLV6pcVogbPkt(rFCEkRH@y-+2u>+8M~2izbfAcJl9vwg2|UxvmbNQ z;U%-(*C$&@Lu$o6i-!fZP8v@kw^WjwDSjG`vD|F#_x_-(XE-c-2d2RRegrEOjggRx zebTva3Mku-C)v_QWJy7I%&ql%BivcpE6)Q-HwD%X^|HDB$$+Jr#29Q_^}`5FE+-n07=G? z`tWcyg;-vL6^_6c_bR1U(8;!uJ)1JZ-QA0eud zmBEpEcL6J)^NLGlqT_K<UP_Gz>9exG~Smy4V_J9#N4qQ~+8AoRTh zToOf67tao}BSO2^+UnS3_hxAGsg%y1Qvd0rNK z38S%3Z5{m0iX7)Tl%#NHXSJjlK(RH#PA-op z(D@KPGWhP-A!a`%S#bPUV~9M^eb{eifbqt>o_w4|PpZfhvRYlL6x-M={jlt1{CSB= z)#!t9umN~Wh+@Rpv4~LYD_o{#P>Q9sdk$Eh(0bD^m@PTv0PybuyYN$dNv-f}(Y^*gEljVyeaeQW5Mn_=m$ zD?hAGoHqXDoQU}P>(8JuV$J{LViLr&5^67usLZ-%1(B%U(B8akrs*fdX&HGLDcvew zMdOwo_XFutkGzb?$*R@OdvTZu@5e|TcIWh!{K^T^RtNHHX!Fkjd&pZIUTXQ30=S># zp^$SgLxJ{~xfYwDj{JL*q(NzO9b5iJEk(59JH;5GseS}ORaN0kYeyNniAHww`{zCO zcOilpX6Ne+Rovw7Y!R<(8tUS`^YuQ(M7`RM=_0%l;&m;b9HP0nvcUiV5fVY0;7h1N z5L+OO2lWB`z#_k$nzM00FuCqElLYcPH-l{48m#TkXeyN=oV66NaC${D8hTM{*6rm; z4KNXh?@9qLpr3{lTCVaaQ?60w)v;LLDLVhHX(<<=g;W(pRRsIo1dc!~*S|&$+SI~D z7-sSlrP9nRhqzlu#cmW}^?nB1+3v>R7$~B+ogmKr&#Aax%uC8lfwzayHI4@ey^hz- z;04uX-to&Q*63}U78H`dlZj{bt57;eH4V#dh$BLK29THrA>F}F17k^6CSP<1P&iuE z3zg{MqmO)03{gVh(2~ljqgHTdhU>hIFd0m zn?pa0;jEb|rtLBsuI~l`d(yD&r3o#yj)`tk7Gvkq{{ZH|N8l3;m-3w#T$l;|1CeoA za$mgloLbPj4P)HDv9;<`z>>Khu%dtq7}U=cMuu?pEzjFrlv%0W17&#cext;ipl|3i z)0ykL0PvoS$Ugl!F=hg*fOETNg>kq@y;=3IQA;CmyoU#{K^a&W&I+AQPc4naSKa@1=OE-*z=me}ptU4ZKQmg=dpRUyEHgA0Xz&x-Y}#>@B_ zWtXXb8k>G2)yWRN#;dc@H-)f!jR_u9lGLL$_uD7i6v|8$Al za}-Fs^5!_4iADN}kcnjaiaG`>;u;?y!PoqJIUN^b1m=dd?#41uKOJ!e78^)7vYI5)B_IW+az%Fx3=`k2K^hSNlE zc}`H03`%%I+H!AP;!@n?Y7Jl`uUZ@mp|5ar+<#^&`(;$WpT9~r-)y-BP69NNjL4Ix751m2co{vPi$?txlz3L3am8W< z?Y^X^j&PsK&_QEar>H9>Ha)H(y%~|9;(Uegcq-9N^$26Z&$#G86O%6@#OArQB1oew zfez5aaF)QZu8nXC;)$ChuN!oPi*AO17#Rlsa9ijPra$AM`Wma{e7GeKxWkeW9z@JG zVqbqK0L~97SU3A-z~+-XwhJAr0gN1=4Pj-h5E}zPF~t)8F&v7UzDQ#uE0%)LO#=9T zkHDX9G*ie8ZmF!MFkGxqmk?#W(_ALBP)PGuC+z@EU$P5l4RoDmP}y~ohohZSLY30@ zZn@?g?S%sF)ir|enx^9-|LTsm@oteXQ8;(tUpSI={u3+*=z*33@X~78%t_@ z{k61JNcG^&EHExr7E-83;H^-ocHUz197Jvo*i_K$3LS0>3^Uq=KTV|uD3&RW0gS^Z-RP*f`5kgT`;;p#*-=w8{I*5pU&D^LUNBGINRby#I|rriI$8KQwrDVr%8FC|pZ=33?KHeS zTpcOmB%E1Qa4flmePxI?Pi&3 z+D%d=i{SnCFk2Ip-@);_9rP?RBcEemE^+M7_iBuT>McxIuRb@th0RQ^61P@Z?;~#M8OX*4m<;d0B@N+bPbY#b z=hwL=`q$>#kpst*E)f(N(8qcS3zY>$)xbYx9fxWkmX|ToZ3kB4j%Pb>z zN~fq@FpMY7P4a-!uANq6bcCx*eJ|vE z-Xqi5ui`?%KL?nq~R@&T&PJe4J-zE zh5IhWJQAHmkMq9Ip?mvUj?{$ZHqz}>T|`&s$KQSDRoM+MN<0qbEp4T#N7-6kqf@K0 zljAgAt&@brD*Du?Imeuyiht}Xt>-jw(KK}ppuV>?d$1OxC&EaW_PU!z8}iu^FU@v3 zO6v$yFC@%#h)}VKq0oM*a;+A3Zovb!2wvbWS%w-JK#yK6;FBTh{mBPQdNS^C?Tlx* zerPwKnIgR}V$dy}AM*}?G%cVML`M8A5Q0t{svEC*vi$xOy+%0!U>p1)2}SloRfFY- zSVcjD1cRpnGLAth*nJqXc~2 zROOpk(~@x@&~sO0KuzM3&A(qG0tTzJ)kHv_dShm@U?DjyGD=ZKFuJdusxf}LHOPG5 zkh_}oz@4{Zz;q|PDV5jZHFjY|&aMBilKXHlBYClF40Nq!G4w z3@hf?zv$p>qt0ZLts(>WO;DoJaD=+`X<~RG@08}|DJ@`kG@|J`gzLjQWE^O8YpsTY zz!DuZVMH{1B*jv2zuXW9ZVA9oJ*I7HHwJS6N{sGPgXKsrTWG|99}+ebGAF>kbUjJa zWn${g=PQ>B(PghFM;P4@yn|)3em7eH^*%P11$KQh65O>x06^y1P0A2h&Rs^=qjVtA z_gM-bBK*h%|F)2*wk7`6;i-PgMIU5beCpc>C5il`laye!T*GA{G8XuPtgH;a|0O5pACJXkJO_|MSJytbENh6$+<)$<=0148+!*<;{{IcZ~f2@R7>LgkV z@&%7SiI20EHYBI$8`|Y8gOLu`l|FM3p@IZ{RC_M6e%;7hcN0TdO9}il6-psJWXOSM zylZ#K%5*5?w0(KkJDn3QhnIV|idPaHDI44(y!_R&E@L}z)>0|%Z9Qm?UJm~uSyJ_S>XLdeOgaspfG%*4*I#Sr+J0&j%tlpHun%Yn=o`{mmf#+zF;54Ot%$~4+BveE3_4&-rdGcNI<)Bne z**y2pC5B&r#n^FUP`pwRRKPXbsURIxA)@)I?zrA4Ve0m%y3)$EX9>f=(7I4mg80^% z0obeNTqLLPya4aw1f6i)OI*4!_DLPUtJ5ke|C+N^#(f>(n8uX!tRb#HB>eQKs24!| zL388cniQFDFtrmI{rs#$!=3FV41A^)L#kMq2m>SundT;eZ56TXO&L_7W+2v08g5m6 zU62YVV#6?(gJ)pr+_pNA&-~N_DzC<$6o@46HdR9*TsE`!>8qAoF*cvt=T530Nw_xmWHrN7<=0Kz2EgH#lBGrm(7SPsuo8$=(sHg=@?QC7$y|&XShFsr zh8_wsjgLTy3b(aUGzNx#=XLWe-HjSQFN-zhBCH%2U7*ZVq`IcYh#O?$*77?4B$pP( z$U~&OdsgC5?=;UV0W|7BZ$#N`O+c~qc1?_91j#W9RBjLJF5Oy&fISJnCd(@dU7OJ; zYu^9`!?!q!{G@NYdUJJUo`@R$D-V6sZxh^V1jb4m4`TX z&Bp#+6E=(2(+O?w33X^1tikb=|AIlp6vdzB-rd4|7-G1((vXIG&3b@;wv{^!y}<&8 zhMmhHJAuk4{C}FwFXl%i0<4p(RMTzaxevka@hl9Zuq@=$*(PY|N|zjE;i_N>uexQM z`oTHjies6Bt$h$-=W+iEv*YmVdV1WOXWPWYajhKQnqyrTbHT+k^23_Nj1+!mjpM)Q zsoUG$<}~n?!cGi15^@2~-(TulLmo|S4aOeKgm-026M{lU@D0pEItySEBGaQ$!Zw1L z+t#(=>#fXvxB%bw1`!~TNL#dml5Y?)9E5Ceztnt+>VcIaglMvMyrF>YA7zu0!CabP zS$=(bVZO~ix_P+=zS^H}N7*@GnJfu+Uo(;o;+be~R3YLe5jNZi`8Zo9)9B5kbh37r zU+k0E8qE-H2jjzG31beKs92_#x*fwm>s`TOI7^Z9i$d|v-+GKU)$c%hk2uQgXwmEh zZE($xsQlF3i2oanROY?hJ>Iy_9}$K&8YzBf8}nbF3$Q3 z4GhLIFe;I>u=47(!)Vq<9wPUhX8Rt%$gPd6)0XiIJ0335aH6?za+6|JgHUg04Dx@rrMCw#o`9j< zFZ2-w{x^MqLoqw(=HF|MY(Qlv69krOV$e)$yK;L}oT#x-Q{m*9>&=-Qs0SBQ{NMtD z0A@R7jftEe2mCQUaQKG96IVD_jYRVoLNZ_zRCx*nJU=tqa~_2fSDq`4)$$N;qu=Ys z6c%B;9esAth>$S-OQZTPojI2(_mpVpeuQZy%Op%000E0 zL7pB(6)bo1p?#=VQ5|4$c_?HqKY(=!8oQG$#jS1f8Ly?L=>27h{i*KG3^;g7dFval zan=*X2p%Kub)+VS+ORkn?lDqZ0}UY5K*@6ud;GZn*Mlvn&$6#GQo2(fLKpx^I`$=U zHz=tL73Jz6Ob~YaUlt@F7W0s`I0*BpY%XG6ff%)>7Jd&EW)D%IFs6})P?T-$nUsFmA2Ga&qDLg8$&a_xt^Cy52TpCCGVPhJryEY#-cYEzT zt3-!O{T^orle?6{MhYn_Cd8R*RaX?VJO%>jmZ+g1cPvtbjWmgL00!ddXfDF;p8x87 z+)B7Ihb6(dt#g^~`VVhG-uHC+;ZHFc>7&Nx|B6wZ4USII+X*BA9`8>3K80iivLH^m zQ4!AW-Nf_A>lKH7WasfJ2QX7+jfQu@V{Cgwz`@2^lJ$swC;#VHnkH$aMoLrW2PAke z(J}=Z8)*Yr2%8REuAxHaVioP4@Yy9WhdVr{>3fRjMBnkD%4jc)o%=xlRIGib=ipz6 zW6vCJd~z?Il!&mU{P@Ny*hO0o=O4fng{zb%;)7ck5zYD-!;k4K$~+GT$k;w%@nFJ+ zhc_?m7*y64X(b+SJ*v+(?+~|ah)5rC_%cx~G2=yGACcRaZO>-WC%DPtN>V%icY$5_ z=ld{+_+C`B3s@M#!IRp;cD1M5|2-Wuvn9QtiToRze98?j(rwgguIKt67vPvxeYQCf z0U*)r>0CliZ_}lKL269^5t~F09)GH_0x+nT`y~=#4P?aij%4?aw4n*;*$v^<@UqC_ zsH2rhJ0KndHML}!aeW0PDdS~)SzIWMnZ2ujWn>x`UdtfeyqWG%R8sniy{DNe*$Z3KZSYUVc9(t;c%H zztQLWHuy4)*{Lb{;lQso8FBPRQ_tMKgETpc{%H)WVbrQ5)p$BXh!#Qqi76+pV(Qil zp2Mx-+j~f1G5uTKf};@L+{4uJV4bQN$wFVF;0Qb}k2m!dqORg=!i?@JO-niPN&m(3 zeZ}L;b#d%aBtiI@@LRW^Ix173^oOLi{8-^@Qucb5seP9SgxNc92yr;7XjMI+rIc29p-j% zxu|T@8Vi-ERA?kwXt8aB*P3O^6@4;yRun3oD9c^NWb>M<*LB3EL73DxDEJ8C>O|qH zI}D3a;!h|~wj}3S624jZUJx|3N5t8>t~@`` z_}o!*$L% z!yOzXe7P7n>!ZuO0C2mYc`d(`_l2O9MGn6|sueYny2Z_u1HU80gojL; z4&7Fi{sE-Etl@hwd$m|kAOSU-r4umsAA^QB&N$vXgcMhrk4BI2OFu>JD12G!;CsYj zs_#%vTLrh^ysyAb!#Ot~lEJp-i?_ol&=3HV;$NiFhh|6jXS-k&84z!l(}FXJ;u7y$ zu=05I6FNl2aqx-?Bf$EvmX|vNiRMrC!#KN}J@VKvpfSY6zn(O#w~V!`cb})i_&Q7D z>TknL4Wc=GM#`aORZ)BTqIUB_(kf~S?Whg9+@I9y*vMKji*#is6r(dTYxJa_(CExa z3&?{gij|UlHmid?wJugYA;9kzumAu7asi${YC>P@1D#~=?(uBc`s_Y>;awsuDJ#T7 zoqz!?8RWI|LA2heqs=L*HE8OQ#a4{yH&eE!O+@|{TYPTM|0DV5Q}~l~8{a$ei@@L| z#;Y08ZZv_jVrqI&bOF1V1<-*5SX`Ush^bukmv;fHYW*OH@oOZ@jzGmZ3B`ODGnsI*Iq9O2)Ytne^Setd18B9rqt(YA;*vY^7Ms;%L9r%k#Puf|^^sT)sZcSaxo!CElWeKC}Vy9&4sMsT~g!x9tTtrfuBC1H5mk?lf z(Sv}97k2aZc|wL5ce7Hb3<(?d%_fVaU~oM`*OIhGGd;*Y^zq4Iidt^_w---q4u-OB ztG`n&3!)Z^8|4w>ndzl!(Y)DE zPf5jUZ)(>S(okPhPcl)5#|TvYWQ^>0B!oDZOqZ7c01+HPnj}f6L2Q{!2#>?WSx?da zs(qCYlgh*nl$77Y8%QVGvI_Qzm=vFq?+bJ6=eo8(JDZc!@m+}lkTqI50dG7cq%$4V z+8?I@Z2`;NHIrq`Qcz>2EyAv8`te2}9yF?6NBT9)6?f7h&6(FBt(iFLxfuUksoAbd z5HUbHWb@Wdjx6@rDzeDSJ=cJC7lk9$d6m3}ral5Xf^8`Z@;K}5c-i#oBG*ai=thXt zM!!qT6}L-?2jcv6jG(K4Tyj)hJ$``srN@UsW{`2YgDY9k8AiFmj&cv==?nOEPut7J zI3S0C*@WW&Lv?hO*)33b5-Z56)D4-g!=U411F=2l10adA3bVs_t_w*mI6vV!$Q{m- zIh%GJ3m{@HKf~UB?X?O0;920vw7T{gRBr3jla5r!WBAa}n7BPr>(hhu5kKR>?Hzwv zJQbc3YjPq*ck8x46mZoPG+Zk3b^uzV9Dtdtu7u^Bo%_?Rlu-iu#~7;e>5$X8jtK+@g5zgAo?8ARRK?Xw#(N`rr|47$#MC+~zUvV@M%rM4y( zeYzvg&!b`m>GbwA$|Qfx5GK$3V#r|lV1k}$3WFc+f^8|C=Q8qm2bbbsl!KV{9%aP9 zd(XDtOt}~d7RtCrO8{wJ=GFisFzvW0njCbeYa=*t$3lVvtY7jzKO=rNS=^!~Vz#*u zyZOK9)i7Tv^4qeL91y||k^Z)R5=X1^kyfzHL*d?m2EsDs=3-APcDzUI$+pa3Nd|ml z1_IX>{Q-ZSDI8o{-)v04t4T!dz`fS5-_7Z5V;FU4s7zxh!Gi*u$J>7!7L>NtuY?>R zh=Y;hfhIiWn72_r^SiRlL!jKQ4};F)IN^l)9vljI=)6=vRI3Wb0no@`d?RM#NMMgz z9LF>`^K1$G(l_Kfbsy~XU7D^+xU#WUD4ZlYUtok`EOBe*r;R zQ9Ai&q6JS#ub#W$r1Ggmo9*}_;cGC1k9ZA?ihcCXU%yX`@CZS~0D2Kl3hAt5lPIJYG{PozxP^cn$Vm<0mu6CRejA-=DCbHVl8-H- zd)Z~8L|j~WtKvmddrpO;V2gWbW3&sGDSRWlH+9+X^ZHQcKg~4R zNS0#vA;xjAHIx>f>a{4q9mvy-$GtZ6V@W&3P>0Zk5J#U zP*UPplbY0cgH@jy;xHt$^Z^_o&kz5P$+s`^>N9&HVgXb*%9}H#_vDBCw!{^}up%G+ z>Qe4O1hMlg1ONM!sMv@~i;DGR&KB=rBt7|CP+hUb_t-yuA8J%9J1GRo{P!XJuIK(Z z^?YPZikMXG`y$4$78Y)cwxfHLS*Rr>*4X9!j2LOh4hVjsjT6K|;B8bPGgFX_bs;KS zYLv2*H_*^>jY0pJYpTUc&_*m>uK5-pZ^x#-jBQ?Tg$RR)!C@nR+WOZ*C6nGZ>RVf| z!A=*eP_63Tn4{tQq_|$ygz8Cup+{@!y)Y=JJ%2^{UzJgXD=P+7uz&g~-0^K9j%WMJ zDb;-KSlO(j^H73W=k;V(M4!%ngO!_#R|h!}Fxz&F8We}c zJ@!#_v)WTRIFiri))MxpTW6NCM#U-x-bdP^S(XTmBV#&bngpguw3SGEW(~d2JQv95 zdcAgEH0-XI&@x<9eB4@*&$HoT*?K;^F+h+L{(_vY82%9` zY@9`^I~Pbpj9V7D+SviKA~??sZzu!!+|gJcQlqYg1+*wij1?ck)X2_N`PI`i|IAvb z02Yr3ufeJ%4+OoGD4QADdBL(FWF-1<1bUL|VUBnhyU1^f=vd+8!s<}w&htrE#Y z6AO9~Wr;rEXqXtJT9n~M=?NgIh}WN>p6>6Dp^gwEraGJ;aKJ^@5_(^>K*rl-86Lcp z?V>;~C*jgSZ#Gvm8j-KSes?CwnK&Q}>z5h-jB%Fkd1j-GFPvLFqb16(pC{k_tC+89^uaDEId0{+gdHcR`{QZzNKtcjV-``JeL}lJhljT;kfmP8j z@>vT%yHZVZu|U4b8Z|e2Yl7~1o?Ip!rF9A^ln$V^%ud0ndL<0$oa+57iPwW>eDAu z0ZK@mq${v>K4|A2un1X%T~kXM%tZ_{1}qsvZ^lYoolN>x#K^2@-lQDpj6STk>(ZgPVVNKya*Xe&T%QeM-0=B}WmhYj9m#ye=F_=&w%@&4S_kWh9flk3% zN#tpvf-v2;8{WT`_1yq>$+&N5!wBqY+{FH14X$cTBVw>`GuPD*$M`GlIM(s8yqAf; zws`ptG9ZTO<&NVDiBNZXv0Z?YFB5B<@kr%4jdi5+Pb*Y{rWmn zCD&bW)Wtp-5Jel7uhf1fP>UESrXUp3k*BGTc4V1;f#WSyoX{~kENe@>OpmtUp7unQ zZ0OJ*+@xrrESVeV_CX@dD3ea?P7%OrgW|a(SIlbu! zNHMtcE?BV;4G`kyb~(!RdGsD_E4y;ln+Udh*4!2|RbRjjq2awr`)9PUCrz-UW%ABn z^r3K|pN31V022_kc;5sl?%N|s78QXphovb`okD7_CPhy`=n5OOsE9aYgml&Ao)XIQ z44>4xTJ-BgPT*OkUzYHcT&iAQbi;v0CT9J(_UqqYyJR|a`fVcu8;tFjH!H+gkaok| zL`K){rF}ZIsA_g?7A43*JUU@jaE}9-+8)YeCk0R3kBUwI7H_;1Rr3G!JeilvJ&Wp+ z&{dKdfMp*}XZ)#N%Z&Bz%SpGVZxue-KZ%+SLVGk`uAEc?4@nQvyu@_ZgN6(C#{KVQ zld)@$H3b;-wl;>SwaS$doaHMU3n?u9re@ed>Hvp1N)3_KgSYSKhk)=_1A<=-z-dzm z$B+ZQGbd*AQ23=Hr5;?((Mc)*K4npkCYBNi`etVF9J{jq1){h#gzfwl%DMUg^ZZw~ z-^PMBIaL=?t$Pl18Z8qP#-95H<_6b6?fuox#g?kUr}>Vbl@bMPIAQ~3seL|oKe^*B z=13ov2d0#U$Do$12FB#XQ4(}|gVR=b7yeJv?Y^wJ?KEJ6AJ}JRg{UpAFO>X@8@Ste6Q~rD8g|0AK@wvR+_W zRV8G89UEAdalxuVDjp1J)c6k)gtIGj?`kC=2fSCqHyk8G+%(sg{l@=8WxV?ln9X;*0ETa{QSdC&96v5`KZPJt zKvmtxm42b2$kiU0-c{n7<|e$=MK$x&QSHy?W&VZDkwx#X-PjGyGWjzR>el>xFO1m} zX!^d%T81Ta2dcRdoD*&SNqo=9%v_R=CiT_KL5cNYK=U!`Af{`d5 zNT1T02ImK{9w8g`6FU-2*%H(r*dt#L8b>ijPxKS2xu&bn zsTn8s_hV}LEJ6L|iv>U;I^2ly;C&d`EJgm{rqNl+~1HvH4`)7#x87)8a>lNnleU$eZS6}%4symf`Ap%zTFKbK7BL!iUw0ygr zqhXUHZBe%FYF-RGmYcGl8s)kN8z}~pIapmQ0`12gu_Dc*aB4f)EdWewfeNbY*!ovx z(_H`hW)uQ)LRfSr*E!WpA+s_eZxU98>+qgSI+$)6-#x-K%9h_9Dl#-iQtN6&3Zso< zt{h}cPV9te=*19ri~kdN?{lmxO`aF4(sVLsg_#RDWVhd7KWGyErGrUKfc|RiuYES< z7J0K-+5C01B zc&w?eX6+XBDQlWKfj(F^{p}OR*)@0DeFkxLV>o$FC~I=%DJWG!gVEMClW2GXNeirpUnbO z2)<4#ir|0!Ti=K~WS)#O7770jLeCzoXDwRS^Ua&C6M(Dj67ephJ+hL(rtay8u<}Lpv+P95Rs&*z2EIS%ds}*4jB@ z7`s-1x0TjF$$vFML9;6UYSI(EkcddKv@NrXJHDi$Xf}o-UtW}c#X>;_v-nLJD z*dA)Ef-w1H+xPr^MIBFz$wCFvJJgz;>gP=79mV_g5MWrg@+SnYVf@ zi&(jroWhxTeP>LhCGTt>5~C{E6>V$!@gJ(dsp=K0>;xUJa1F;6r8LAC*VMQ@k2KvZ zl=tW-%vAE3Ji&jtIB3ur_#o=NDJVwAP;4?q*xoO=&C|xxhTeUh4j`pzq?|$Fpbnfm z1#|7M3_>Wu6SJq&k*zk@+h_~T#t`QE1XH0kCT}iNYW9jrZ?LJxL!USY)uadj00mb; zo9r>Gng zvPEPkvZdIe16l{6mwiyqkuN=dnX5|F&=8(&SZ(oLGPK-9$er@`Y9p9o9-nRm)Bneb zx!S=DhXEZ&6|v~Lz)iVLyf2Ao8JZG1}68*0i+M-wx7Qa{Rj`c zI5TC^0-Y<0ygPw5bT(k|xW{TPd>EU+Q^+AM;lm1RiStp#b*rDZ$3MMZ^~${UwY9DD z$l{29kPd&cJ6-2AE@g<1=BPmq>^MrjXYc$?dac*B1^@&-wZ-9mp{Ho5z9;qCT=6`J8=-fDnDW(l`1 zXMqZS<7@qbQz(v#`TKWT;!N)oZ-Fz!AF~BM6e9!^cZIl?;I~QmuW4#+$M}>hmr)Tn!>hRgxL$v0MCa{f1MGfh?Z;$Y{}qcbHh9~Hj+VtwGp|J%+qsMPVtK>P*6_jW0Hf1n z6RmXp*xP;j}? zDx^j!(jr4>Zf8pusR;{KMR1igp^u5QbOM5I0noG+#ewkTa`so9!#_?x7iryIN4j0P zQ)l%6_#f@S{WC}f0=NYAoce$%oGxULtGlQgCLP*-<(g-O%nZOlP2k}4Hoe#DezGH1VP~?Y;;!HWH9-7ROGuW?D?p36G7XLLo#;I2Gr}KxNR^)_7FCm zi)gu@fArd-U2VZl=Z}bGjBQfteO!Fpo=a|mQa=)kZ+x`1J3C6!)#C|_OV;1>Ky3rN z{*6uV_koQwC6bKO?iAkFx}HZxF?_8EpI`0+G9<>3la**U+FquS%N>efak*w5Kn@Q> z$*PeFgeNIg{-_}zRyh1kE#AWu!p`+U&ZR6ZzC592-gdOR;-o1*#rzOj#<~auez>mSN5c3m~hC)CO zOUw#VD;g+a@a>k=hR^1t0<_@smLgXjy0>bz4Qp;6+3J1X+5|&X8u$!eB?b^skmI?? z!+ChvpETi5?73(af5GSz*8qd*$zl>G+gC+m~0(AkNV`@TQ>j?UY91Qr< zVQZiN3s?o}D+D_I5%BV@RnjfhnuFO``(e8`KcF4kS@TXx6JG_gkPJtZ8L?ATZ7uEC zpOcY^=1W=Y-=ypxpSoE-&Aw3w#iv>KSIRWV_vMp6W7w#Wuen^qT*ku+$PscOG|r~> z%*qHQBrEu1SLieZ%YLpz25$Va-~i4SKTf-yg8Z~CHOA(wF{&)X?8?9hNAoI7&Y?Ta zv3;g>shA@uAn~Ev#pF#_KN}vI^Yh~ID@tu6o!Dx|wKCw!?k&tZl={wn97OLuqS^e^ zI4FjxK60zQz&ppV7lV|fZ3eSc>%C7mbItWZ?TG+&1{qwLiy#NJ2Y}CbkdLE=`?CqV7IF<2|N^H5}!?fs|=1rXCI> zmM$a4r)aQ_zJJioN6O2BPv0S@-1U}r;E3{-p)L~mJr7f&?bx)@yPySKhz7U?&i&nY zMVhgOEp?arKKfsS)@@uL2wud*NTY}%%S85XP+Q!i4L}sF$X}S?VQPT@020YTnrKU? zLJ(Ua&jAGBAc_J50R&J8uf*?Ee5=Ls>@zBT7mYW(OWmw~3os~QIM{Q4M{jp;$CWe{=p^`D~KNj25E;s)ZEAtrC z*YL+MoVr|TF~r;ZF7DF3|1k)*c4N}Jeejo3J{^cns~-^trABWOX>w4?VFmUVVmnxI z_c*AO^+|$8=7oCROVV2CNdpwy1b>-D6yDf61*_EyMSGCqQT3?_JBi{#o7&gMvn;v$ z=i_>)Z+RY)uC*`Qeknq;jGs}QXoS=dH|{-LrBZL-v{aA#>u!Z2I4!!az~H0Dt2Ry4 z!lpb$mkI!z)UkL|ddf1Pif+qgJ&@Q2xNtUPxc-vg2XA4d7lMA|eq^dJ=%MB3O=uc9 zuY)9^_x~|g)7eU@@<W3Y2K77>rUeC_zAPP;WOFOHpdOrjOEPGMlHJ3)J`E|f z%f1Rxn#hr#ZFL#|PjtFkJE?4%;(~?_8i=z_Kn$7&Ah zT6Vtc7mKeLz>p)aT?tf}ha4lYQw$O_Q_e6IrT1sfbjI$!dUZsEWYoRrAe-K-RC(>u z&Q-deu6^a9-9<7<>~r&@gP3QY|D6K1Huk>i<5K%=tQM^Vw7^p{vAXIop% zn%;-7+igl(0TXel*eHM)zm~ZA^?P8gAEnuc?x`&PP%iD9?wRZLZpGpB z2s=+Yx)_pkrGVkUr=*DAH>HPKf6b=FnzAp%3WS>Bp@0$k2FJm63C6<7zoRP(@g03Zb8y68iY+In-l|Fj@J0&l zylZ`!KEqzy5u?qz`&~kPjxT@Ul86aY0y*N@?yS~)v?ZL`2SMvk${)EfVWs`>N2YJ$ z1&IvTy_x53JR)3hp`j7Pl%yxwLJ2VrmUXsv0mnMqy=o4Q08j;sxes$IZit_7 zhxYO)WxGi>pP>tWnM_|+VP>zb0KtirGtxapBWe3!s$-|lKP1jrmL8D^NatrF5&&{Q zjlZej@TndhkRH!-`lcoH63%KLjVJ-?e(4CnMuR<<>-)V#Can)M#BLa88r&Sn4vs3r zi%f!T?NMR#p7fdRG_jXPq~Z`zXoyuQwy!=3yS&MM8Xs~S)oU;nL=>wGtJ4qV$T)YS zWk$g%D*J~2Ff$+&waAz}f=^~^^4{bL259{8<;VPBacl2-mp+TvnH~MtC?m)`NOOYy z`v~#}+Rh0_CvZ(!bwS@-X8BErH?NJPP8$JY@W{h?VDQu1SF_p+q}HRD|25 zq^gV3v-s~u0a(MiXE_}^;RZ3PPUK{HBwzXrE11!?>1U6gB#&d<^Vt%J-I`g*>|Rt0 zifI-w(f=S3LNR<|3jKw?$`w+u1*NbO8TguK&MBjQx(YzoRE*v#KGvtiabw0Lew?%E>!XJJ0M71q5Z?% zWG+ao)FL6i;dEwAkjMH7VoGhmhg&x)sq0G{E$%*B9ajqrPf^Gz{Njj#(!*)<&1FM~zMKTT) zqR!OgyG&CshZq}+8Dx(WCE*iDok?4(9@4a@ZKM1N1+u^Qz0-JFgcY^l-JTu9BQ%AK zl8-%3HF>ZItONx7hf)|gx}|}v5IOwdd$;X!m8mg(47EtTt;N+?7toj&!^(`H zUpY-31mZfZ*V{;`Zc9fML{Z1-n*{V)IocO|BSxl<(6MTaS(xb=V7NPcnQ~X;Ti_@& zzdDLhL>M}Y8;F-Ulr5i`-dc>O{BMN>AZgl|XGx7hj%QvM4%tijxI5FXZ<5mxxpiHQ z0uoc5(M@BoCHtg9)zVTwP)V62E`q@ zu^|#BoIsB_0;9_8FB~ATozcHh=5Cuep^vKl* zjMf+qS#FX1L)wpCyJ=`Nh3PB=)r1>CPqlcK7(Ry#CCAu{Ub2quLD?3=uYbp5aS=TM zkIOzuT-3H~B>}?Sx9r(L1$xnY)ja}c?^Eb8G*E)EG8{+#$CEp{02<_%h-VWM`-Z0D z4P@*?o354)CluA4lV5}js;mxG2*$sF%VGU_(}SnFDyj37PGbK7N1GCmz&FnT(X(>{ zx)?SI>{r&1xIu#H<=%N$NoNESn|Ue?k!+vY)>fvzrO$055p(T~7X}xa^Ou1!x)Xhe zd5j+$3r54MmQ;~|_TTut52EkWP=SjGGeYMAGbIz!redEhH03B8T3##wpRFNfv?8Pj zYuY!yurrvvY29~POLcLPUsz1#ICgY;2}s>bzkvC?{F&vvXyOrs_BHJe;Ct=$>1qWc zgOf#~Yh*x&b}u8nv82?VO?2drHQVasxRE^JTv_gKElX5=D@tL4&qRM5C2ibR1xD9Q zrwteHvT>bV1f}^n3*~DvQOc9BbG8Pn(1`8XX(*LmBv+W&=}r7n`nuFk@K<-72d_Y$ zK6IgGabL?oAf!rQ`nzHUE2AVr$t(OIAAsqO-Zu@x!LQDV41<5kgB@r7_?aUYAvnkI zhzSMGJBext*Q*;Ylc$sc*A9D68zVC_x&0xhn?5(qeH

    ++^{g5NRC6nll4g#kKy! zi6=QWL5088gg*Qz@TjcNWM?24dUB|o(c`p9w|1A)Yf4Lh^FZ9srp|ow_=4@!y&|JA zOvT~5beA0{Z+x9M3o{eECmxVd$5JcwP1HWOwlso$E5%_WqvxM{)t>FZdl#2gI$^4o zT^^P1^fd8Av^A)!)^=c}=R<_Jr|&kv{UMkZm03=G3|>PYvx z#}lB0d!unwZHd%t(2a5M0`GL1P;6{3C`+2RN4K5e8gq8SYA0K4^8nDC&>>$mb*dR4 z9lh>U#h><-@ssP<++5-JRwvLv?K0BSxTyF&|3i7D9s2bm%|41^O ztfzt8m1O)7kIJQBe`V`O^x$BIsq)(M1!u=GWOVOck~SVLtN{~Myq?;sHMkE>IB7}@ zvp_xbaI?RzqhzJh^92|I%`Fs9qR6OYxA;sE4Dp8mZfM^@!O8XB1n8#D(nEl_5FlX0 zdyNQr7O-taZfd7QM3%9r6~ ziURAI^^plGwgNjgqI1%7vp}_%%U?InX5@U)GG$}v{<%$)BpDMN{x8k%&7-R1U;nB|_OhE-dHA%&3YvxA>c>u8DmagD!e%d=gN;vlVmDON%ii=9J5s7E-6EXlx z4`5ponJPFt7DTtruj7)E90CEBRR-Ise~F$sa4TS1RhJd1M5oCm54wQ<&wjwuO}M;c ztVn6J&*wT$u@;e(F6d+^O)KwJ#om84wx${kG<%0UpBKqHJA54*)k|y*Z@r1LhT-I} z(5cj@+uvBcJhJI9Im3}{EvB_81AiMA4I5FUP^N35ZCy8Kxa>xZAd-}iDkE>QnbzLV znCp`(r{L-D?IoH`6_gdJTrsCiF;Fk;H=JnOV5?~oc!^tI&=7iEsx{-KQAf^PtEJYU zQkrQOH4+P~h^>Sxk&_jmEWQnHzLr46k199BRska1!kKyh z9#pHs%ZP0*S=nVZxLwJ5A_23%1yX=>fMS25S7%E`(iZ!b!=V$U2MIeCSZp>-)OkaiI*# zh$Nv6HA*6O7;uOi>vs&;TYct$k)ttP{b#@#>&z8U=uX`5htb0?H7zonDj8Hx1p|_N z-q~5?j+}o+{%O$0gPjkkaH{Z4qn7%$B7I)~d8xfW6!QJzWtatkZ^9LWw;+nXfkin(NS-YTMg`U0-#%3rTUZiBAv-lD*@ccvH-7+SD!ER422E_|=9 zOMLpiUlyUWXd#ID#1$px+3Mvy4oZ<-+WEJ$U!O?K4|cx=Yvp8ZjgcKj_gbZ+fQiRB zfgYzS2!o^+zTCzYfPmPI0fs*oa!r#Q#l9C1>1=b%2Cl%rGD`ahLRRP)&+nby1gwCi z(zT|$l-@lQpB+~LK`W61I`D|9wa(NT6W5^RcUGHB1oXoI-M*r5W| z$wOS01{dt^Ii{npqKLycjZxgzy>ilfaUZg#=PjmHtrJdTKeu_Sx3_7+2{EHDtLk); zd??u-Tu(HItYQD!+*rX|v{Ut4Kg1LPN>woQSq85ly4Uf`iLKNEqL$jiKK$j&LA?gE zbR<_?%VorH5cN(~R*eMW1l4pgK-9)Q+f=f*CY=w5f+t)pTzb5?7>S~>pp`KXcKF&y z@g}fTt8WM}#uuC&=itZ3f;60R`C7JC{Fy_asROMTi-Vw@+&1=W)0s}6=chDM1ku(Z zMcHI17P;uJ_cZQu?lKGicjUTVv{qr;y52*N$)1Y7;njrDBnN1=Bj^a+O!Z0>!O$0b zns(4jp`kCI6WVo9R=Gb8B{CWEUoD;((?wu#BlBbtJl9Oq%Wa2L=B{b z5#6H1MD3}ea;E|RDf^!&H+CEFb%{<9pxxyhJ_e-H3I(6Hr36$2f&b3CMaNv~3esr< z@ZG+@>VVA#-d}Z3PJY32+}elp@v%?h(2(c-bq+!HW`7Gvhc78B(lN1#pzVt1gVCxo z*apesBr`EmA|OB`N6NZIe@R9MMQQUeB3h0yLm;1pDdcZ2D25*Gma6(gI04BJ$RP1)^H^!bKA0kRbe7%?*xEIS89dlXiJODks<)%%O+=w_OHy4rr*KpN*IoQ~JZ!jTlZ*q&*URNk# zZa23800CqHo~3j`U-Xdx(wh{HhXy~fw9>jhlHsHQ$sxbq6R2l+4YyWHf<-Ts>e812- z7Ql2bwMp+DOi>IqM90psfyDU*9ChH) z<7uw?`iKBnZ(yw0Fb4|H9+tun|HdI#NNhBYVz=F5;^W3heaP45Wqew6s1Yc+*7W6FwxFrWq32vp+O)_t* zEhiQmm9HqCalEAiDmb5#0b#x{OGLq8g*NQN6wq{O=4=?T3t<00jh%%m-79F^Ew{@} zGJVF{H{{H`^9y?vr4HS(2^yb{V3=p+9wGB`cK<(52za-x7?8_h)nrP8M1b~eq3JIG z00Joio~LR;U-k4x>QqNKB!aJK8c7Yp=s1Mte7)GkHV;I&fu{eB(=HkYsD$Yk|C|6p z6eF#+$4j;I371>Tou|NM||WiYbHu5jrDo z_d&3S07hzNEH0+E%G#0>v<`uY4+y3IXcpJ-q30abd7H;uWh{!uG02jO#o?0L`yIS= z+qRxuzjO)?^w?ksuxZ}{h__?;#b-xZMB=nih$ zHHik;*%?B?s@1Lb@Da7sfeY*?z*XmBj@JS-Opo_qr?MoR$GAR##uSpCJ*|_MdvLoT z;$rEmojZTVD)ZmN$9zBCma)u!q8n|fq>X?ZbL)8>h<)MrNifq#NcMvMkB(@b)D+xh z6cf`h3`Y_LYGo>Jr4ds6lL*ObAQ= z+Qc%8llS?##-f^E6HUG^*e3a?6wbGEIHM@yvy};*yvNcQHp70h-Z_=Qv z!s??jl*nKEkhBg=F=IakuoCpRAR~#G_7?~|>gTXoN0%v+Pc~j(wI?*tTaic})PHp3 z!*`0=e<=6_AP7TUtzHfnQSjqm&V{-SAh^S~0ny?rRY%M!gizS7MdNVQ&OkwbC#u?E zRA(oF<-pC=ngmh(RiPTUPW8D~2eBsAHDnHY0I&huXVJ)~NAI}%)t?wzGheKchY_8B zu3WaljPt+Vy!~Q-+kokb9hfA9;lQN^3m7PlHDD;EG??NOfaI<*6yxD)l8+cjma9*% zdICIa3o)qleUe{SZ!!ZaIa`-vcrSwgoQ4MCC9ARa)*&y`B63~DzJPcSu0EK;A)Kke z(pbNN-TUn!&Sngw$e#CiMTE6DcvAcOn5JPm0^)oIkrrn^KNp9`Q2iX8RELAYSflQM z*iJ_oDITp6|CX}`=Z}J1rOQ?narIa<3nYh;V=MH^(fwqYw;@=jh~rQ6gwP)@8{YVn zeUFL!`sSLJEv$GSrgYv7J#l@rI0aClmZavtS%p2C>j8;d+vWii?Y0 zaGDSA`bYpPlwEtF#^DQumlaOV;xPuG2M=G1uQQcQa`?jb4lNu(nD&qyX?D{Sf)D{n z9R}gTu7Tm8wocMy(tSvUrn#D~kAizw9q~z0+=)`Q*%ifmqzZc(7!Z~CHu5C}QBTq~ z2P}q{)WQ6NSeBU#@5lkKd>yG+DaXLbkNNw{ddRc5=5#>*NGQk8U z)aW~6WC2;`aqqwEEU>5NCDl%Z5w3V{_bdGy<_*o?u^G__{*CTF?)Rp}!yTW25JzG4 z+}S^suCL;3ixCt^Ik?=r%8r%9`5@H}2OErj0gjAa8a0xm?N9k!xid-p0S&^{hz+#j zjlgHt(7wA1GuSDoPKd8%==CfJuD9NAZF+u@zx#~s3gYsgtd%;+p*{RnK+p=MPdz1TEP_HjG79Pe&;Umm3Q8{WA{!cSUb>dn8L z>`{YZQ=(fssq$~YMfTCW$;^&ahr6*{xVTZDgkIF8e9zdf3F=hBqRQU>I>z##H5V7r zVUvoBJWJ>&Gxk?r2S=?W1~I4~Sz2uQZ7m`W>~t)%;Ehh$7&^}lucm97VxsCnLTtN5Bd5*VS zb>o|?ESF0`lRn|q-Kf+8|LOn`MgRcOp12@%<&m6Y5dooL)H>#2$;4Es4!4Y=DkV${ zPzXLY1=+pgbg+!<-+kc*-#EoGx8CP*Y;Ya${q}2;g(+GLs$xYOAhWkCm(m^MH|fxC z7M8v2fg|s`tgK;CQ}f+XjC``TXg(#S!ffygB)*3V=V(Q`j+nFs70dNGzFA66J#g?pMYq#kxrn&4yuACTuc{=_qt!{ z;B1`wo*0c?4E0Lb#f?UZkI^s|j}6|)5?Vq%C4v{#D19R8i=b7Q?JlP?QfWlH_zDUD zbThIxaGjs|O~linE*QK`A=ZiH8?BYEBydcAlUjFCN){Xf1m&bg9F##&le2gwg1tgungtVA0O!rZU0H1S{0oqMqo1e;eN~j33d2o z0z3F~9X7b(%ZoXg?!Q|#89c0;hxAtt*(*njoTm?D410<=`{FrtOLG@97uL93_&wy=!hT7$qpK`ed&+%(#68gaPu6LI zpW2dHh{k{R6nCLx)!Wd4q}ACq@h$TAs;a~4xk<^xFOCR{F1oqS|Yx|_tz0i0I>bJ)& z;uX|25zGlhd!kqW!PVr+7?6H1!Se00D-Bm)fV4m&(8~%$ZU2No2r)>FNy1Hh?uvRg zdVdIG!H z8p+YT9U)ju`>LKW;HH0fVkN{3*5rd)hzjw-B!b!FrwfbLp%T^>KNZwjP2Hyekd^Z2Id?Y5pP1mpf(msK03gl@Lh zmq6LBZ)LN|Dy6^rbX?S?Z5dZgd{aM=F4(8zwyc(3JStj*`0uRSHbn;MGWioVmW?c7WN-uTB$eWc-keh97ufrous#bcA#K=S6!O{Vd13##{8#_1$$$Fg1!u9%G4(%-R(72ucNfmRaijetpgA(H`R&%j3L;O~x5<-uA^zxb z4ySsE5Me_;aUT?Z3;oYo(q`7oWN(keS5959ic;0b@MXcW8<3`qE9_tfXal+?NqOc( zd+ZW8dS)bLIzT;lmB3E&P;p(5FvJ$VA!|UMNv68dGX`|IsDcwvz+MocZiejRa1DcI zg@pkOZf$$sZ=U5w?%yb%J|MenV3QJ$%yPFVJb9Le#o@i*Sp!AYm_#1rhRZyT!)J~mSR~aq4V{i3CikCs zBXIg}Axt@8n>mC(rC_t0dIIH!m}rlvHyXiMCbg8)Ks<;81*lQ@0>2Ux zCd==wYF0i#nmLxp#Cb<;m1?UAh@%fWxlT!^Esf&^n~iv8$o5Yp&}ITfAoyZ6I z)rq=J#)5rz*c{d0kfVpLeh*zg;kq@>UC&yqv(e`W$Yi-(9{`C@C`s%p@EUcdJy=IY zxL+H+PqQArXn01W%CKbuR{LN3#2v;y>A>+zy#B*zIw@{(KSFM2paCjuEY&*)rD#c> znYA}RvSxt7S+aV z2Bd{3*R#lb)l6WMq?!#HydN>CCWU26Id=~O!%Pb`B!_q|d`oQ=W6~yxta1G!jP8Nm zameqd-7M6t+2~oM9-_l%g;rqFI0B=8AR1GG+uSu>q)yyn zUvKdGHAG(+lN~hVsbH_CyI>@*xG5eMx4&^{?g=;t0h-W zz|JN&!n^aB@XG);Rdad~V?;)5yBUcVF++lUy4h^dVMc*SwTQRiP&o|x1>OTkk-M(5 z=1B4&Uq=7*UTZoOkx{`bA5H16G;K6tRwxf1=f<;R;znH?5z7w@o0k~TRunY%=aYGP z@d_kkl)w<`P&krpS_w~=M`zTFEN*)}_a<1DZa7Ovf{!&+PnFxzrl{xbW8X7}r`%Wb z<|C-|K3FFZofm6x-616Z;C)aqSbZjzy&4SAMf|F82?P5Ikk$mXNTdrxyDXK9o|}9q zDuJ&&wVDDG(B0NgwY{`5MVHb+nBxqE~mzJ z!7kYizNGo%{sLSzMkPDF=q}m*fk9V~t*!6uBhW)PKEbdh9v*z<5~Nh$+plGVcCIhp zpQeiCN)V{1X29NVK;vlPrYR*V))v#UR5UBBESKvfQGFmnhm&XhBm2;*#gp z$ED_aA=}!T2$J*`Pmaxi0j?`sNzAxLE7^ek;nn{(We-YZx!4s-y@aA-E*SAm0tX%h z5N29_b+*bf=!DDODqN3ZR=?L!8k_->n2Io;LfHo*DBT3aeOkT^AQLqR>3$NoHD@A4 z7D+BP8w|r>v-KX*B=I*K_UxPTCtro=2O?q4b7e&TwiZeuAL^AxzeoYc0Xz_VHarLt zk$X=f%Km0Hf8*hP%6sw2yM0(zTOBdNNmru8_R<`?fVuAYLYu^I)(yZm*5Ga4mab@#b$T z2A5yQ)~|{^@>1iZ|G=F}l5a-#2J)Q|ijtHztujl@dL|3_KJn*XBPL(u97{tdC%9!p zN5;@+P!hCKjPFyZN?IUq4z&LSH(oP~>I%UeaWw4`h}*smtye}4|7AI1=?;$14{)PQ z)zVMt?SX6(=r5L2Nd4Zri_=*E_I|}!W)e=L;PpoXE0j3AUI|Vm`k1xnTz~ggkrrkR zs{_0#W+4nPe;lA^blSX`2C_1rB|8eRCfD50Y#Tv#Q24Jt?sbi%?doO?W`fUgE|e_5U)JPT?HBf+y0oQq&yC_1y>1p@)*@Ya z-q380TBEKY7gDY)^h3(u7yE}m{+p>B z+lvOCxknl=;9|@K3HGF_freaShd&>?*z(2t72P?QFT&p>AJp<*CZIcK>@%F5H`}bh z0007v0iNq>LSOhxAiqE(6k%j-@mqnut=K8Do`XFx;B|60Z9b_UceTxlf+BvT{GuN2 zCq=;-1}gI0A(Pd?S67J>U9zQ;2ae9k8^r1^WlL%SiAgi;1rT|X?WGY*DIw)?R6s+y zWP5)rjPYi_bA2KP6}s2RQCs{FzCrZx&F3oGnv}yb!&;lq_dVjG?Edp$2nA2o(UNdr zvP;&GiYHqLi?b@wJM@wsO;_S@O8O`~kt-&&!VwRHephJGk}cx@e*b0o_4t_=qvI$zaEdj>C}SPp{#7ctiR~k zja_jGZ2!;R2}I3p#JC(kAK=2L4GCJvoXorZFHa{)wvR7Lap4ALz1n=lnD2h`(v)Fe zb6Q>G<)l6jYKB?1L{`2*lH4KXsuk=XIk+16JZAzFvr^)hf{tq$zsVEP0wt4ew6n-; zb}p(ddFM0vg4{2yOvjJ|7GsAN|KKkKQyA&t_&)LMfI{$WD(7y7Gy=UHpt9_S>v<1d zU?kk@vQ9T?82`137hPAcuj8}zlgMZIZJ@=^_{RdPF-eY;UDjA$16%&L#aiX#ch^W> zC$3NIG57K9qr_JgyK;`QNGn zw@s{%9NeYWG?>;T=x&ehpJUHsR`Hi)A9lvDhwr>#000phL7MPMs6lL*ObA2&!s=>j zuG1C>!W7>#fOjTZ$Y>rA6kFtv|0o{JwD1HZ6&b|JR*uSn%u=Ul`{{9;jcnYCQgfzL z{#5ToO~koOqXbVh+FRKR@F3I6Zc#-!M0R=&I%__9P}O@l>m@4-a_A zO?%KIaXy`zZH*)85bzLXSOPRG*;OIjmq@(Gp_-~0yHWF|T2%TLH|^O*<6x7>F_JP3 zL;N}z_cu`8k(nuybkMoW?H%{u4TbCC5rnt@s!|S{eP3kI{fD&GUbPOjd00^(LBLx{ zE6zPyF^h9-eKJ(ramOlYG}AAnXAUt$tdf^j{MD~+-5mNbf8_}8+~7cE%d!OT^+p}TE;(lZZn<}qXd4#+^3dhi(qs=Vo9#Tm-V zJ4mj*5Ur@RM7>+{gB-Ic0ooMXJ}b+?~MYnYv#I>GCJ)!*+4^J+((+5_G%Eb*tM?3ZH0OzMCmu1VcPq~e}iu*#XGn% zs2}P|$YXHPk$VuG-)Oo^_>A^LFg+)N_-EB9aEvvg@SOp5xya_PtGhYOBs)z~&lc)d zvn(eK=((j?PI$7=wNy;P@gQl|GG<@<<~LV9SF~9FuA#u(yuO_83BnaNYT9h=RqDc@ zBcLYrE6h5#(=ft0 zua6^fPKit_g;N9i|8I3j+%f;KZ+B@R77@k4vlejX#S7Q|;yWx)NsvoZa{#nawRL?K zp39u0#k50%Z|;-_)1CMX$^*a=(WV^r?(Q7@h_=5NyztKLd5M3SxT#%S@W>2`0eA5y z4c=K9XlA{4(7{>ZAoq|GO1_OG13CTvZ!8!By2=CiuxwYJxm2S_qaj1%&IUZ7EOIO)y$_r{_pMzH?u5&zQ?-#v|vyN>#I z$y`=qCq&{Hh^uM(2Z`R52dba>J3zat5hy_YBLi1#=_c+h?r0TT{YNwU)~-n9659PG zqQ2GC=DOF(^kMQ%>zRL`t9`ksr|g<%G3=u(OD$r%mYw+?c6_ynT6t)hiYxaSOyQTw zS-xeDq1bRhi9cD}F6AvL1iy=s@~67NjWO$pGX*C~GO{{_h}r zt>o*TXHxoNrx(5Jn%U@a`$NVhb$l3sSR?Bv0l-5&FhdFP?8NIwoBYGYn1kr3tMva^ zHnmu$p1TKov>FzIFM~iEFJEUVxp1;1a;zYyLAMc$mu1l*=Stx`F@%(P%kc%+o8Goe zjOQdG8_idgV!~+aS{daH0GNVy_a7D0>XmL_D`lF2>_}$jmkOb=StSDLT)Nxb%PrM_ z0tA?)Enb1lTpxkITLR-)w1Vn%l&l;$el>Jg8et@=cZMS!#(cahFMO>DwlCO6RyG2K z@KFwa{N!uo*1pNmV_eY4vDE82#PX<8Fv=UU1sY3gImh&9+g3S@2z=E{N*y~&Z$Y&X z3L7_W0}g?k17Bz>FptGNQO{|9#Ylyz=(OX?7W`#G7i8}zj%fd|Wr&7yCsKLaQ04QI zp$dDNBOB6fQ`7~C$m?%Vtg`0qNx^VK0h4-PBHTULGS!lYBzd>V84)YTSepw-M(1Wy z?=VpI5`h)GAT^5}qm)-?UHC}*7e_W>IUJn{A0FAnKl4XXxCF-0Paup|_fcQJBo-t_ zHDw=~ggum!RZw(v&y)>y#bwHobE&HCL`U`!6lKWEJ7{zzbz~UsPE=~R+!PBO0tqGw z+vCDh1c4DI*Re#5?@@GmQ$=hyRJV|N`tgUvy?aD{K^(mDPgpUH6SZvlNHSeVAPjrW zciD*QtZU<|&5S$Dj%F6_9@~2iU)>kXi*)s^M}nt#vkKPwIjMq}8ZlH4R?`HEC|l+5aQNUx?xyL1+mU5JU{Eoui+to zxi-6HyWAn2WJF_2$6YV*R%5hlfmOQ6qCprHuVEeDdqzzlB8`)#iJwdli3UzCH{?cSZ;q>!EbsD0TglYWW-Xe+I?sRFNZ@00vW`gT zC9_P}jIdM1KY4!%ODU0hk%F$$+&+~)wg2~5{T&Y(ymgpZX@z-q$qPBj^Rl+bbp`IK zi)d-W;73;;X6{$-&NMg`3WY1E+iv;Lm&iP}hu>1~fSH&K)K(2d)J9cuFnd6Zu>YS0 zFOtQ61NESD_bA$03#IwPq&oH9CL5uQ5xHXFDnCC3*E0aItSU>&A=u%iL`VEa{Hmm( zb7i%s2*S_m+)d1|8m?(M;nACH_v8mu>_}3QF-_~@F>|>mzqB>h_g=Le3aan?$JD=s zJfblFEu7C)c}GKG&AAhv$m-g)KZQnKVhP1`uAKXs)5IIF|CD$}4XBIUHHss_e^w$| z!E8OA3e~t}bd*L$4p=u7`ry#pwmsc!e=V%c#_G7Pc;eljcb|NFK~Bt*u`rD|y8K8L zBPb|tb@4Xw@83Zmg!$W{Ny+8ekeSD0lBH7wK!sav<*I=Eefo+>`T*PaWV|aNpLZ&? zHku`XKn1K#P>x4ZCce|KJ|D@0wamX?wrv?>z$x7?w_l*2jUD+naUA@;(qOD8cZot5 zzGs_K8OdPX?pDBh;(EO}XtSsPgGru>{B#_U0wA*(=GaS=-Yt;jiU7PvY4uC2B`AZY zhv8)vH!V5Mev=?XCbW^K^Ln4aW+y-#LBfDnR|4goK?D>?)%V{Nu ztr`)*R9^IvU~V%DJZ;rfjnT%`;MiNdnC3B+!8!AqfI#8{aJx3bK9)Mk1M{SPLDbo- z3UPlomOOFBnfYv$ypAst?4)(~XI5MlO?hThxf4AsA-~0QFfR_}te-Lb`ONZk!{&V16!0{4E;Upe%&|$1J%YxZ{`|<`bbb--BU*X-o$- z`NpQ)wMnmUgZ5CZ=ZB06AH?18G&TgTygna)nLX-S&6bWy4pmJFg$Y6*4r}h+nXgjt z4QV5L&wO2IU}oQG(PpS#9tpJA6$mD|5fceBk#Nd#apDR4lij~izwXykr8P+z+dp6g zj~P8dcteMXxYI8?k7RxfzBzA~`v3*QbxV4Czf$+=(dTDy-~ z009bIRrc|fL!H{+`fL^RRW?>5SGyQAltSY@DCYk}u^w#6}!N`w8H1^qaw5^qSx9Z5zMKAQ@yw$dzK6M^8 zkLU_T8%o>^{aCXR1_7Kxygbsl8D+gI4cpP%)#9@ruMW^O?D#&1WpV#kh*x`=?2-~! zEGA{{O?`RZvg77o3tOa(U!<3Aveajgidka^X>eE0BDq!P~PzIU)|7#IQ1`pD_^sgYQD{oqYI<*}~lD^)c?-k)Ag=+#PZ9%`J) zkbMr(qejg?CxK9%^g+iaE8Ltj0hr-uq}_l_7LYc}94)GHPi>1A5KM7|ue_->l0xrl z-3y1+BN43bZQlRT>PSZC6_^dUE*w-p(&Df`z0l#s4Mq6}BA4Vuo&axqb8v4?ned0a zzi<|cn_M2h2ni-1mxa(je~Z%s^nf)Pi7bFyj2irDnjHkcye^xTJVqm-uCvC0F$)?Q z?D8$?!wXK%|Fmf8(d-HTyxNDs@V*ZN|4LD3yr+!e-6)w#orVp%sRymMx#!FE#9i%6 zU}XgNe6S@PKEJ!dYpB>|Y4TmvOnH&`bi7^dFif5c(@&nUGp_P5-(v}MpWn5_kOIZO z(8f!7J95OuXBUw2mU^_qfkcARo42Qf{^}^gp^9k=QL!^^e460!9d<+tMI9#HF`UKV zGwb%~Po?g7%~W9||AO;inYr?#GQKJn!!~otyP!Ew(sp23xRn__ zmK2fV7HI1D=oGQZrD=p`WUfk9Km)aQzNp(kZ#aKw-w~4UfoQwo%s4vHwZ>XCwr$Db9--(ry@a)+1`ob-f8J_5Ng< zH-f`PUqU&1*{3ReLn`V-E*_Vm)IB0aQuaN974=lRe0%Pg=`x65Oh8Ve)Vc1wre&;Z zVxf>A)Z6>~boDwpn_Y|1Ab*OPH2IP+xhalY82Rc>2@l+)XM(2^fegMH^JEy9S;8Qm z-%Xqga)V*3mh_pwWxqK6TNQ+`(aMCWN;dee%bt0*R%OMdkQU49^vlYYCR-_^>QNiH zn&=d?0^kj^SjOdbG{fVuIT-R3@{EvstDIv$ORp4#4eS~#sqDR|Q`gwr=3;`JYqzT0 zP5_#D#bEd8*(Wybb!3dqCuK46t!yhDdF`FT*fN>yggM4V(md%c;o$&mPd6V{YD}bY zm`J=l-=x1`Ib})xuiS-cwOh1@ppVm~w6~`m&4tLvF9;W2lOAPGm6zWrLJ{DTQyfq7%KNQB5DZd1INe+)UT z&*jg`?hwruMPCtH7;7XJGa;bk4|=PnZOUXbb7@})m=J4Q;X$w!w}HaY7^amziZ%^k zegu0wBmUl^#A50dvAMP809in$zngEII6;sf4sXZkSn~C&CzI*s1|LL~t z-fj5gp&(W#ckOmWs3eYJE#RIbSy9&K3t7%v!Tv03D3r5IDfiz`6&) zzE@e1SujAlNC~xZ;Iv#J*-vIqyz)gYkPNDkJ4vthR6Dc9e)Ir|<9CQ>tE1vR&aR*? zMJq921O5i6;g%1Y>aJpZxj!-tu{PXl2?Ow|TmUR?BryKN25ASF@ZVBli;mntcAG_w z|8S9W@C{JK4?)PO*d>}zo6NUuPw4o|nNJL%NG0<(xJX0%H((FU!I2Mp;Kn+{%Q0Jw z1CG?mMe8!LpF>q$>h4(yxz@ca%2D*sBmTR*9sp)RK!&GJra1-@-q|j*c8`E@j*W+U`aIwK~Rk6vI zo0=?E{)UK<#i}$f`RPW&P0=v?!35+!POJN0ZeHNEeGYd03C7vZ)BcxL?%fi-3adBS zjCghYA|2mAg%qRQ`un=i$DpSjV}V5V%7?g(jI#fVxMk(LbDcK649g-mK|Ry^8r^+- z8!}?agGr_^sct1+Kz5991tpDcBjA)X2G4Q;)u5$Y#A);TcLd+!Fviu$K+N-oJ6Mg_ zZy6nDoykkur^ras?UrMcZ-Tws(kz&7V|`SyRm#Lkn9@U}=}>5Y_oKpyEovsR2Fcq< zCdf{Gj-zCAC*9VJ!a5)h2JQ{1fyEdVqUW@{4wdH*kV|6v`YMm@>8u#nvMiHXQm zlqPmvL$Oc~UtTO#(YSi{`ot{H4ICqFOk&S!nJ6R+RiuO&zY`+MS zdIy5mE{KIZh}f28!GjO^Sh+4^Ui;g&UTxBn#hX^kL!mB%iVHrSlOA23$e!>eaTb&W zw8#^yU^eudQ#rrwq3L9u@@Ae<0xgAb^~@;9C@dvecHy-v#xjo?V4o=wlbOjE(>WP>$fH|JWhuESHETW7J|Eyh*;uQ zUvs`NdI)@2kmH@ML~9(BpNZ2PF&1dC(!TbR*}7e|ArDtM4=RPi&yL~2H=e6a2}6}h zEtgjqOA!YX-n8PWC%o2AIgGk4R)EK|cN@n8TDm&NUjosG`HmAY@PkUby%lBnd*=P= z8XA~WH(NpxOqaIWoNhvHD}pJ1P$`j62wpfMH)sho{1GE!I&;p+9Z-PV4Mc6KFv{&( zZjOOr$zi=iunaJ@N44TdPbVGaZ%g^4QYG)FVP_ zKPbrkdkE(NoyGtI_4`m8mcUXGkVR5AF)OY})~Um;0@oWOc|AD#!XFF}riTDsED~t7 z=PQBxs0nNwJP^i));2o1UAf*7CC8LD16P(Y z4Myb%{6As|Pg~WmBrsY<(PqcDOHe@J++A{fx{>{AyZTkRUMw9%nGtfvvr(RKO>k1t z^T+JQR~-_!fJI$o?EdPjQUtnd_Ad3Ao&FMK(Xe(x6|Z}ylsl3Yxnz_xm=9^ZZgY;{ z?mXWhA*{TOr7ij_!wqiQN0PdD4fIJ9aQs+OOYbyt4eVLb?Kp%#&)Eg-Dsw#BC}upM zu+n_kukrVX2RiiI|Cqj#wHGFg40ig63+E*o{|N+J0y6;<#KMj~Q6%%aGNKdf={QFQ zWh@s+n@ql~6B6!6`Zx7{4>LHGNW=6P&^0$LO@Xnso^v_S?)bVi!2z~)jJK3wPwFJJ zqU6batbY*D(Bc32&q;5uH&!LCfosrZ@av~$G3OA(0vD!SQLFtI`o~hnWB+9hv- zRSf{(2l(6TT`h<&OQ3n8E#uQgp~7}z;Ia3utHVJkL-?22(8}a%ykI?2iG%CfWZ=m7 zr0L%Yi>ZLzuyf^{IkABT4qgofIo&t?emYhQ>iD|%LBZy_G+lYI<1L&b$!^w8?ou)W zxcON^VeoWdKvGNoum>fmz@IA@6+T5M3aE)@kDh@h!rHtxg1uM6K~jILebGSE6%s>3yL)TXy&}AlCvjr-md1V zdb9W2PMMT#B(;*&%{#uPgMMN_iEH!+#rHN&Cj1CQgXBu4+ZEU$LQ`=FyGyp)IDYh) zE{DjBzCqQkZ#b7g#0F87dTyeoUwwPYZt@}ecwx@sbKWFvw}=E*c2<0`Lx#Ws?Xi*% zu~uHf=X!b}&bk7-L?;5O&}Yp$2yz$K2<79a-ugC)z9U6i`6)m+mIx%t5b%w@S?=>? z>;vd2l8?n(EDH8{zIS)~6+Y>2JP1CO3?kPQTeT*v=Y6n6fc_7)s}8Nr;(14>YwdSw zF7nmH7N}>hPUD3=48wQP?0dI~7Zh%s&En9*S9^CwlI6u66 zj^qO&&sZ&e8##&k22FWNqVMm-sqV?_lvwXRZg1ajvvz7N;v-?R z)@v2#UrIQAegsG8M?QU`ug4QyBWq{l1zDxVM`ZrMf_*sZnMnh$dP`aEH8G>y$sGp1SH2>R47#52# z&3)Eh9&Z6=y*of1n639?1Y5{9q|qIlYEgyg%#Sm&ES|jR-$>A_rne@n*ou47L=Si^ z$XGEvY{!M}7zms$BHbTM1&)h^7Vm`rdMH2;Ygcva z$!naTD%U}QW;GCz1-PoXcT$~@x{d{OD}0E(spu_sM-T8d>Rx>85Dg1+z2+S-2%0Te zWN0SR6_ZKEy~QgfewuP!x~1MoTn`%P1FsKa5DFM0 zh#CZAZ}%0v|LAyGt7q-f6)9o*j%#k=K_fl`Rc(EjZvmdp6k44M!697wl@AS=Mq6vx zCR(1nT+48XSMnO!1T7z^$1NjJ>4jfnBJ|9YVg1HV7`B5pQ{PpgN}Jq_FZ|j@mf^Uo z#NL&X>x+H4d~9KagL?Y#DXta)Dpg4eEnU>v^T7&D3K%)o^}L;wLq4QY-`n1D^B)%X z0+MaH3qi-@P>N+va`TMRVf?|7fVIaqe#rP3cr6!P7{A$?iPK2?%>s?B?D9Khnd?v- zAUhKWZyQ5tBoXWmHS*>PGM`5^f+e%?dy*Tv7$KYDD75)BYN)KzsYOEAEsf!5OyfAz ze^%SPrO!dDkA@bIsFGJi zwbj%QH{(eSKR%S<8tr8U5`w0cAK0bztoc0_k`O^EiI*sW%SMz~+&Q8DE@JYdT#^_sPH1GJBXt2f z#Vj^Ro;q5(SN1N?o0V5E12B)q(%P2z4ePE=BM*#JFpiIy`$SZ_s!?fEQ^b@2X8FL1 zZ`yiIZTHOzog6bK@E+5GAeLB^xrufpX@a*WN6f(ws#x?PDG4qbSk|KN-DT><$0c8_}jZ(`HD;5IZPfT`HI% zGah=z(jOEg*D@!XDNGlRfopxGH#uaLE-jswEAqXMyeIDo;y%YllugmMFs(tE+P4*?OJg<1?#5rE1qT#5;<-G=2G%2(ka(%lZlKB%D zUkR1Tf>o}fI=2g<^)X@#>dymU!>@)sCZI&+YWskh(>mv3(`TqStz({O^ypk<`>OWd z>ax=A&TX|4(z>jA{x^2}ef9moUYdjr`x~9Y0`nuSmLw8kZmpyH-tC1v=B{l-NNdqs%4=Q1@(QQlegh35Iy%L)NVn;>^)R7TMqgyt3<7)$r4*V?ryaC zt{Q3;6`5W((*enq?~1~Knum)g3f4=kF2d)h9hp79=p&j!ywkj0%Kr=X%64G{YNAO( zdL@5Tk6jPvbl&O1=-oGL^P=`R>YHSAaDC1lT&m zx5U?!)R4cpi-e+xO8%N2v@d|o`tL6 zCGOKx4pEyb%)t=Gq7>-GVfyYUw1A2qHc(Jfg-ASZ!Z zJz$iANqnhwcn*qHIF~(DFU?tyYMkXbM7riTg+TWDDjfgSmnC?Rlvd~tIN@hQP;g%x zpJm2bu}_CBm-GOh*}$lllZy;-;@bE-c&f@aW@&$k@}+IH&Jpd~eN3Hgc$OItt}B~x z=ZD!B6pg65>b2GemY3@T!eyHq8>SWM4Jc`tB{M`^>M|?pGi| z$D|SY-u_^(xT)4!w1y_sdUW~t3L{1*a9lR}syoi#4`_xk3l8buV9VL;xd+AJ7f}`r z^9fPyR_ZHr(aSjLMaQv^ESf@{C!0wKRa*9k8*YfS`2*P+1Xo)*8Cp&7@!rWq$ol=w zKCqf?{-5!&y03Nkhrh$n79Hv2(EFxY&^PYlh*nCK#_9=}H?*gvWopb)POuy3?K~xM1nE~(|0;=y3J1M|febBer6djxf)xbR&86@; zTdwTfkb>}l@27356%uL^kGRR`j(BD~AHj%Q{I?O+-~a30x@zymn*82qEoPU4+gO-6 zd&O&Xdex2Pk9XhN8!O!IC?_7^VijOdK#8O=g8jk$XOfiVy?h4blxQky^a6 zc%hJ}tKX2~p z!dHA6@T*CZ3o)VTRfT>gIIs-cu_-auF~A5N*p@jdgM|;-QVN3MMBJig#!T`fFvszYH*^6a$Xv07t9 z7*nH{MmQ&Y)k^3Rs;fCrRO_CzY`1}K)J)#OxKudH1q-GvC+0$cXPLxNOR4bTtA7P?4iSyP zfzGWR@#;XUH*ZCD=3kBtX~9-VYbc#n9nayL_r+iU00osnpHf8?EZ^+>dREtt$R%g> z6nN_+mKEibJ*=oMBfIj3xE3{Gbq6a~Xq!Uy|6WsA=I)IQssdWTOf#{H<9zJAdvPy1 zc16#;;~LOczKwhlHbm#v;)?*UZD0LgEakMSo7Y;=j`T2)=gT6&r@+LAX`eiyQH`E2h%?*F&cufhX`&X2i5mn;>Ljo|FB3V*9H; zyjzI#Kl&1a2_{;m%UsD~2)2JKN=HO>uwn~-K16DlA;Es(s^2Y>-wU;RUCa-c4{G{R zgqYX@nv`(9MQjz2Kex+csz5axf}&mlr`r-`HvU&9PKF>bE87(;s$8u|x<(yaTA_Ff z^}!_Fziga?Vw0mUaMt_40#$Ky`Q>;xjmclaWYm*?EEZ`o$NyoEPSZ+4L=j1Ls++D> zS<6Z-&RGWEc+z>(J<0`WT&$OuqpYZRm2+@Vy!Pw)^S9@btW@fXc@dt;MVlma_J9U# zh1%&@T1p;s@=y$h4>Z;iFx~5M{D#&Bqph1}Xx#j;eZPSSD9#F(^rO6{IG7GEHnk7* zv_%h7{_7=B6Fk90{+^RiA>dXDT)m4pcY$2K=%kpU_zr9n`fq%9<1Y4Em7zI^Fzg`t z{#DnArDSkzZMNEO7qcB_pmTT(f@ClkTq+u?c;(|BYkWyEDv$R4t z?L!r?WuY*y?XP8RGpzV^JvoV^8*s671}`%|-L zAfk09nlQ-20?c+JvntAPBk0`ZB{4hO2ZVvnmPhQxzSr!&ks0U#_ImVf4EQh`5(Ed^ zAx`noN})+qRye81S<>bePkaQg4;9Rd>*O+N4hJ#pFOvRp`%yO|)=qFIR`V_*;Y3&_ zPCX4Xov)a(H3kS|hr(3EFFjcSsE`&N**GyNzKdE_;u8>a6?v$!W~`;Saov)eyVujAE5dHvZ*wUWT$ zKiyxi(oMCcdG-5$(qgcJEQOZ9G?B!Wcz2tqdkV(jg~&@Mx7qF-DD5=5q!?+%_)$I&!m2A zwFN4k`)2e-b$=R7eP|U^470O7o=0^#YVD{`U}(!x;_6k+jRa0Fp!iGd#;~zH!pmnw z?bo_f%;()y0t`eLg||BM$>)guWbtH`&BF8JJDYIcah}Qbj3~D3ts7KH8SF4yGvEYd z%a@$ExjPlG!z?Z7dmGw)Qn;NW^EoQZoT~o;ZnMPUKhs)`a!_eO!N`KK59O9)5sFvn zWHrhrdH#4dgVrF(!ucL{u0e*r>h8SY4mWscX~xI-1~G&|+@o;=8Yo>{2|1ZHs$!XE z;?apYd?TWrG(3b5k3cF{A&Nw|odf>q)A9b{jv<0Pi@5)uQ)>FRAckmT6>8+OfcY&} zB6uiT@e$92J+Z&_!a7a)ZRAuyW>y(tdGjo<9(WTGmI@A1XQP~ z>xfSI9=ptMQAi_p_Kfz=LyPvGESqyzjeS$R)WaYN6iUrb)$QsG_X@m=XJ)bZf%hmi zj2&c9%5F?k!jVnq79mWrEkGrODMS)zqlG9OIH>S9Od^D)7jQZ_YPsP z8s#$%nGe6a@f4~0FKHRe3aX!DQZ?$pGK1ie*?J#b>Q3Fl#j%5IF% z#!W@9*S=-wh`{8RZS>>iS#PbjYsC&WXUP}qZZWM)LVT% zGbmJ;?t@ct;TaPLAO=+5F3XcdYY{?|_a!?bCoE=N$d*Xp0nZOVC)B#Xee!4zD}rc; z&Yn`}SON9k4n;!d=@$gr%e8~Mb_-Sae#P4^iatt>`F&=o2{wDDIBkp_U2OwkR!UuG9`}6}(9p7mmLLL@U}g<6ah3t4Ysr zX!zpm6(|>&(+z9Hr)re9C{*y;*b)xmX$`)jJU@Jth$c$|V=}u74n%&k?wg6Dob!UQ z-wC(v2FAkqvbPHyL{=G$nC96fl5r`NP@{~0ys4S(KGe3CMB!`+Zx}G4B)&ne()o%F zxE;~mKv4+3CDAaRufJO7eSStqRVoGaLq;QUO&E~6J$Q$|-%T-NF2JEEgL0~vcTB1y zo%2_(gkmpo!;O9@!O$Y@HKR7~{B=jz&~{G}H5g&7oj)x9NQ14_3q|b$gMQwgt&1$W zfb%@i2&!>z)XR}4h17AGKbHoh_l9w?%AHsx+zqc-sfu8qkFqs3p*&pX>s0O-Vgx;EOUaI4x6U!Ke>f+C_fHIs@7D&Yomw^<9% zFng|~I1KhL!)Tt2;H2E(0007n0iSbfSMxttH9YI`+{H`>hjfAxvLhXreN0_J>Bhx+ z79Se4WJSzBtpRds`OxDwCYC#uA+%?269;~3k_Uw7kazJz=un7nQnP?bRa`#Gnios6 z*N^I$6-W^o)^t7@G{+#dZo4v2Em*x<%Z<4nRK8ztL1^lCWd=XQcIl-#ekXZ+W*7RZ z{eh2wPlb#IRrxogMAZLBc@3(`tK}*|D*$zPJa0Elhr+ekq63C|=X1LxX^qm)hk^P+ z>+G=9dR|mOSvGc!VOh8_ffl@rFy9a4#!-gu`;}sgEqw)->dX;}M~x%Mj?_bKLXgK27w&8*GFiO6&-0hMjO>2owS7ky#>*vJ9`I#P;SNK#5E zEN*^-<``a{>B!q*K|Rrr+}66bM^-0T*I^S8kvQYEy-CiT9fP?P8%ZDtMY!KH3lj26 z{c^k13N<&wo-OuEGph9c_<3jc{LL7@_jW-}!zPl7XC>!HX$3+)000lML7R9rq7$nIAj^_>i#)t@t)0(c=m{Km+ z8X259I(Ba;n?ds;MFVLC`4GMtfmMzfp#X|5g|QKGF-v7vpa-~rnC2AL7^nrU5Z<{f za{03}bOTl=?{}ObH&iAEc?_>Cr_PNIhSYPGitoAGk_>$Ib6i1D0Vjcur+498N-G&; zU6+#y6qkloaTM|S1 zS$BrDHUJaf>xmo_+84_k8zvy;d1oNIl#dK9-E$6g&uQ~`@NUrB4b6{0n)AG#zJi)m z_q^B|AUAI}=MsSWB3>3X+kG8+js~ z1SRSj4wc?{LIol-0?p6!l)IdUIo>m33rsyLw;3OFJtM{j#mF(?@e!Wr{_r1e(jj<) z3F`uo-~6GL5Eg={pgBC2L8EM%j$Oh3x^a-=)$e*cVeQhmvK7}iJZ}=gp={zFf$5my zmA>DKgo{HOO^dkbA;m7U{L>0`vVLYyindCG_5}syLp)*s3`o1ax$8yvGvD82s~SmD zFZaKzuFH1&0nW8m&CtET(DgS0v!dNx9kJF%jB;oC@FY*4$laeSwxLM*pxsYq=b$`r z_?*{6+{PpMEkZzKz^qbm2L+!{r52S>WuajS#WFQoe;^6aVaV)lI&zP)_A4fX52PL< z_a@gjo*Ut{`KWO$sTzZw*Uq9qal_xyOLs(jdX&0Fg(}rS*H^#gXcNpv3%KDJP0Xq<2JL{`Oz@oy!(GCb``iKoGyI8-qW zQ8lPP=N(6U88D(>efnqwgJ1wvOPEwP*TrFQhQ}od2RJL0;B4OH%3i)CL^z}A8s0zN z7fgj_$`o!dtCpDwspSQ8chk|fY}QGu1gJM{B%k}U@Mz@R2cS!9vU3py;$l^^P~rYc z-Pm|;)(3_Ul-93f=qF-!V5oR84A4lZ9BF!D5mogjtenf)hw26;R%v{1MN-|j$#(-P z%7b>Z`ajc^u&2e&ALJ(=_qQ0-)YRhSH-)I2Y+u5;C+bNIMkr9U)2vpB=G)YJLZgd1&hi z9LmbG*|ytRs3c(-MV}@6B`>EUEP*Wo&m#n%Zm?iwrOTvM`gtDp(xVNPgB=|_78UJ> zheLiu^jUXfinExGC0~lh>ap^2g5K$t&{0#IA~4Ke1-1rF)Z49SU;mN%buD=wz zq;dKO@!^?tgFE)VV~wPJppn_ti?%&?0j4`h4SM*XTLpgRn|X;LB>jgR0HBL|)047| z>=$}_>uSA(CI+$H`+xyk{97eJLdS|}VpoDOhV<9Rt^C(0AiX?T=8=AE^SkN1&w^Ag zfEg5jM2WQ>rJP)SPvgC+X_euEgHzuCc>P#;=I{G=3d0ZH5K@mpV!C6T(96`JJy?_7 zeNc-ndxg(oA#zwh)IRKkpuD2B2=QV*kP?MZ;2bb+-a?co1)q+pzoeNb*|d~Fk=pnZ zB@$c`s%N_2NZ-I)=T?19^R3`Hy0JjG>p=qGCE8m}EwK$z!ZNRlK;ub2BsoXFlb)#gEmH=(50Y~v#>FJ?K&O*WC7U5YdsJHlvwNF zs`M+^@TGSrW*X)j#NXS;D%nqS-y>56uN3{ys}Y5zc>|_xRjFEU-@mLZ$P&TI!0LUw zgJzf7#>^7sQX8~vSktc+7Hf$(0W0I)9GligT#tk}QTOe3L*QX%#SblegZ`PBz%WAJj$745{M;b^M;gcC5B zOMqvBN=aNoG&5*!Vp@bOzFZ}Dj(RD~tSit4xNT}bb3;~aj3u{jFKO#~O>J!d;nFZ7 znVCH6>aP13aSRUyekGX9wF$}@P?-};Y^^})B6H~jXlrlViVty$$q6pV|${MGtNv^6BEJ;MrOBtg$fwFno{&GMpZ#s8dcrP z$Q(EQA~T+6mt?<%C!9v8CJ_zftvy=pbK8 z-0<@ohzd?EZo6tf{7G6ouPe5*EnE>5a)GX&xlDQ8=h0hYB4j;(-FA2ti*0D2b)QW~ zu}p0)*CYmgSOX=C(;7>wWBiM`=u2bH5-?%sK`ns07-kmABQPlt?7`Ld4~tO;Ujh08cE)K2$zzB!8j5qd(hO92E*Jv?K&NKdO*&sdf1&@ZM2eAzb z>?NMt@lJzxd8b2Oy8cTNO&$0DDBZl&44J_$1jBzk7fy)gAD};Yljjb}S~eB3k8@9A ziZcWZSVxGdNxxe#BEc-MS9*DuC;P?lbr#jyVlbIDPtzfsWC^ep>5+6Kab6_n=K?Y`M&ljxqbtUEhjxOK7>8@c1tE%rA)tkCP~KH{561sxYfG2 za2xf(qDqGj)Z*2}d=Z?ZaxWdShrJ+9+2@80mIHOQbNApH;Srd%vJNKgoqM^e@w3Mj zW4ZxxR^Rh3*(prLb@c_Uo9>}0iO9s$DT;5%LomM24&cIxE z-g6`F9>f4`g4uK^1*FP54!x)}!-aRn#BJN#9)?{1S7+rM$u#A_{H8lN$uBw$u(98W z#4)Zm0*fV6vElZHZaJsoK$^sZdGt2HUe5HRas-024`w6j9Mih}=33^2s5FcLM-*JA zZH70yIh+!`qu36`aEWB})}7~@(s&d_i9@O6CXR@;hRxnhI2t5`4(yv2Zg3T&gux+w z-XnJKnQ_aEu(Jltiq5oHH0p@7Id>Qehg)1U*h0hT` zTvC5#p|e{S13cgdDoU*P>1ql+-=tVAeYrt3znVYQ=Um?!JuOb&n+?=%Ep!>EHIPV^ zGUF@DvdlaT^rk+~I>yIS0N`1*i1hAhifgrgq(DU#r+V`oUa^+I=Q*|6qt_GstQ*)g_J<^s5Yi4o*53<0LIb z<~7}i_N2uY>y*>pok4ZKo8fCTQWS2LJ)cXbA$ns9sXptv9qicXowe9Fqro&5!K^Y} z!h_UvbQ_mq*@5ywxgt*IalbF@ProE@T<4q8C%%Bn`~wv&6fzZMNeT$}6+W)39{52=cB%u^xUIRBldHPyqp&O(i zt@FD}z>EnQ5D51|f;0#t5WumdQI1nM1^PZr0{6%*86WnEguOpDaFAX(hupbl^||Vd zB;CHs!k@Wk@>#f%k|(yb4*K0si7#&08CFMU;0l`u{AoAK$d|RS2{}{Fc0V~oTbW7_ zX_CrO`@-=P&1jFV@`E9Dp?kv=$ zdpTMfVE_OGES&K-Tj6gPemY7#VjEyWE<1~kX4t792q_jUr?bYvd4q=q)%C z{uAT1Cnhkmn~W&yEX)3(=0TKY+pweiOMu?FSkPe#sks){dnLXPWnXqj~(#(`B80=HtNrPMSTQ7vwu6fja7rvZDMr@PBe4l~hFg(pQNt z?-(k|zOCi9Ui%JWXtta|MwIx8;}t=@N~)dZ)m>=# zJWbo1FqW!@jk0{&h!W434qir(xOQEg;hD{t?Q(e83sVh1!#s~bzKT(k4Jj?IW0}vl znI7;Yyh`UR%Dg4u-W3DDk?DePd^PKcds^evIT@9^eggZI|6*0?mS#p+-K*{Of?YDd zQh7W)k5xJ7x_5e+Cw^U1g69^Pk0zofT{)H~*K{T~MZZ9#3!ig1!nom@GHLV3YUW6- z=%BucJZC*+_2gOC+o@LT@8gn6M}~|aOPy+^3P~cYFL$zL@>wP)w@ik&C!US)j6C4y_(GwWG=J3#ON6c#BylnR0h> z0OqXb=yEXOly>71FTVh}9!suOe+fGHEd$dV)@&y8)B8xvPSc@GQvhszkH9dLZWjaE z9&aZ_6*!6hxFNGub7iKh6qq1pqV(5|hoVz-2Z;d0-$#!tPlg~nY}}|pB9(~%rVe`b zDU@G7ZO$GJvKyUu!&}+)YLXTKE^kXv@5-~BKhU4*byPh2Dy=cIw&FI)BHbr$L&aKCEqWvFL{ZU#QK zDhs_hT87XBd+we|z0YvK>~4+m{qeBISTb?31e)Py#{a9zvje}Ugc6yHvh`!z3=6=7 z-(9_oX3?fo1PH`4%sU2kch_I!!Ch>ayFHMGU*FWrzR|YMLwrwRN6doG9X~wYx}gvJ z=4DLXr0*s)>ZFV22zGkn=&whvu&cJ&9_umnM?+G3l^6DUO|cU4H`==F$%I?)i`a00LbBpS5&X&eK?9 zQN<5z(XjWwfF|$Km_OwZ%IEZ$zY6_{qQN%Hm_XjOGYy5iZh@|F6|3BTV?V}_H2rG% zWPw6g9?y{e11-@9?QyrlbK&y-vp#=XKV_h#8SkbM?d0IXgA>k)hDp00#5y!=7DK<( zcX@~Z1bbr_%o|gWH-NTY?9)aR7%RYG0&@T0FZS|xGN$85@ebu?Fb^CK zs9lia%MLxGa99D~T_f&^w1-Lw$Lk}iTsJ9G+2OAB(KFS5z(s7~0g7+{HS@D@Q+0;U zKOe`{<7e8FHzrz#@qp9(_c7D&Sfh9!P2rl-DO7lpQlFEOS7-|Gc4}VRR!PsX!ds-^ zKrBo72IPrQvyWuJBAnjD2u5pFnRP`l7ATKL{di$`kF0IwiH{6L|EN4$k~ThpvD}Z! z1fq7iGhl#hp(D$1HzL|~Ikjms%X^4-4eqtfjID4%5w}GynY3hj&;e_xvo*lSk<0$& z(b*fZxh(Jf;*m{1j7CN-F`e{{f{CiZ6=W=n zgri)vljJfW><%ebHpOJI;Ma#HB>?&t39_H)5W#M=clx&q z`$hzyfeE*$5WUiB8{7Kf(E>foEwS6@9`c1lVs9ndeqU`5q!7(>zrg@0A;5+Rl=JmmwVg~hx00173 zL7Tius6lL*ObLI7XnE%(V}N=0#OW`p&;WREJ#qM%8_elrhS46**iDPBDQlvWcW5rk z8H2NA!bb2SpXt^yFUvK=StO3yCS9LSiV`vU=9moo`TilZ)6g zbSyD)If_c*t1&mX_~oNlx7~?Ne35vSMX}kSAyqB)XZIo+$85}E{$mX#tQ7O>Q{hJg zd$Yrclpqc@$oz`Fi@ehpm-LNF%9*OXEvr^-l~io(%Wm0rnQ=p#;+7Sy@F?t=pv5U4 z8m6u8KV?6*LJR|O5gNF|_5qm`JN9ZupnQQ|+c+1>v?>La$1f|&HVn&5FpwyH+%~}% zX*Yud!R4;Nx9b|q4NzvGXINiZ`kk*qrU7)?mwnxwAcI{&qnIRm{p+BZh_(WGiKaYa zK6_0sgND@Bau12Lq%r!OUT-uvEcRlyT-*!s{Sy(3vJ$5r{+`~((zrc`F<-W*yq($P zXr+a}7C^{sP)7#-k}S3pblRvlu^>D5OmLlXy%~gO(RZz%rq^d9h@>`43Cux);bbw1 zcP7%2OlSYphwhOh3YYoxw(wcZX4AB)D>k-nij6c@9|TZ0qG@imc)g19X@mp~j`I|+ zb7$4Ggxn<0jj99dL^!@zP2>0Img27Bg z4wnvd&JNPS4wL!Khc@6vw$A489|ZcP_cL`S5yl&9ve25#g>QR!)Nhy@21$LbU{r*c z&rMW4Z}zTbr4CpiZAoxpvmu$yU5+V5y@cM{1fRAdE;c=|=D`N$>91k(T`u+R_wKgO zW1Ci;tFVM?KO?ewh*Zoc>p9S1%y2iVTu>{&(FPv*I?$QO(_}*MP?6%dh)!&rokYJK zE`o1y78ptx{bZnz&9B0`XVZ>vu@JTs4X%y{8&j$@r)h}Aj8h!?G85EQwC9sm?a85z zeO>y!>LMNf1o_w2e!wBho4m<#gTHBEdAWZ>`zP8WsuG*LrI1(lstWu*g^v{Gi~lBZM`N%$ zL;d7}0Do-X^vMT*2V2z6<=WJMgQNkx-Pe>8hej*kvyv~7`RAQXEjalg#O*Ix!&NXK z4x0mer+s&IaNg3lk-ckzCxI)}FF_VrH29wTh*^rOFLIW(Tt{%D@K?AyFeJHusSK5& z?vuD{JrESsVI{4AAqu{J5mX=gXh?&)$N$&O?irQx=ojc1qe=MX$R6^dE5tQj>Ur1J zB2o>1J9~$vHmkLm=R2<3HFH__a@ zv%bwJ%hVfun{zM0Qu2b0riFOS-%LbuTi7xt=QW%#P-J-@WLfCrfAD$6NHza#ZN7*A z>$`)nE{NFZ>DT^n_ZDCpKyIcyX#mLPE)!zS82_fZFXHLE1K8r}k!vlK%&od+y{4x= zZDL9DtX5{(O`LIg9v5*FvpKC>snHT$0Z{f95X2}qHJt7aZ3~o)z|A)P>3vwg@>){{ z2e#xK)8?L9?lAyWK&rnQJuFy@s~|5#W=qUUogIsnyP^;JGqYc>89b}MX%^UU590vl z0jx(gq*hi6(zfLaXKk!MryukcK)JLIM`wtV>U@**>H6}zc$v3XH#3fWz{cg}Z9}so z8u0;NyBu`JV-pvbf%VG1eb_^JZ8z)Iq~~~x>%eu324sTSc0L%Fw6F{_(gpG9_EN$q zXR~0D9)Oj+1Z&%UGTeqEi10lDTHxPsEopbPKkl#_s(Qg?xQg4`1@gdB?gp&;an#@d z(}MH4D9DjauOMlK0Y@$ryH`oOEwY(9-<{^B+7_~^2! zdPtlP3l^`kZTOJPsvNuSf^4MC113#KFA>_X%-enUCXWbLFtw98>tMq0u^N&)a{sum zF+r(i;q{Um_(PWohcr4C6z{K}qvXPW{5LnH%ga4HNFP8I5n`b(u;9lcAq>B*(e6Ns zgi_pT89^K^BXXl>f-Qv2)y}&4&&2=C1)QoOi+FX;J~UCnG9b{8X}m6#C6gvmE3IA( zA!#UArny5jsC&q#LA>I#>gTMaZU$ET6#K0xrPo-w@t&6XNpcj{pPmd>$~K2$*h{je>nzmv;s7E>m9U1x@pUn7R!C?M&nEnwtKUt zC%LkiGR`zZE06dj{JquylmzUIn6G?c3Ti!`6e@Ic%`Swr(GV|T7s({{{i@bU`83*Y zmU#!uf#It*PjSTq+U`-*zPfdYk4wW8A8MK7nvaM>^N)Fynvo~dGG|dE2qMHL=%`u% zuG3dP*+A0V#)HBl2TDP$ax}m8G!C;jM-8X*wlyUqo<^DZAJER13@;d_VX+8zmapvv zmAF{S-$zd(VeLY+wZXjy^Fl`LEBi}tWr{;dkb0{Q-+v|gR}VGo#qK`(w~MVtwPD+j zkXz=cTgaNYZ|Cz20!OYboc#!_a7D2wBzJ6$&#Rfts}J+aNLA4f!=p*cgUh7K_T*Ip zkTg-QP3JX#QKDXmqh(2>-2FnZ*O9aH;D^0q7bUnp&~9}96MgBGEx*`x--%6)tNBuS z-j@pe&p!T1uKeV<%ckgmY*cAgXV-`;jfF1_IJI;}f&gkw|E%HR;?zo5*Fk5w4QPDX zib9@+(W>txZSUel&D&a&x=T=vGx++=*z8Pq6VbxXLiJDzoL;rNuO7%@yB#I0TXEYA7qCyI zT1K161R(3aPfFqRQ)DOh^)qij3&_$czPnUJ9obg~rW1}?hYEGV4ERlzop`|Bxd7Ap z=}0vngZp`b!Cdr zQ~H(0a~Fd?uwO4hEsVs0ko9!G7uX6Bj`mYbSXSL(!&3|GGn@r62K;*3*3C$9a6LI0 ze%p7&2AmUNw0YPd9~*f}-kJ+rWLT+NAR6V_+Y@}f^H*-{I>wss(#We5>+jQZ_phdM zU>JvfwJis;G`jyyyjo4^tv`y9?^IF9GKQQX#JQZIub5p(((?W&+}+R(CdWHY zc^c#Zp=QJd@ZM>YO^%eno^OwfD4yFkuKSY8f-L~|RTJuU+j?>%CzJBZ^0O>REEsWj zwh4C2x3CKgKgB_acFO^q>mzA~irZP7BS8oo5w8I`I?vgB8S*Me4fmIVtcjqs4K#>C zjC2QxJ4Ej%$cE%c^HrEuf|s({%6Ql?qKxcQ%<9{L|L{Q-IQVm}7v`j}UQeE{QL!)D zXW=6UgZ<~cK`h-WT2*9Y?nhKe3J07|PYX%#gIp+z;DHE*U**gz!S+|1C=;pe@6j%& z;@MBzk^O8%BY6Mn<7|VGOg*$P;YD_ao9-0LPNu%eo<~_M(v=Q1)NF+?s$)tle1<_Z zLD}fC1H`!*+j(QlW}5-E$8v?O-j1Ql?zXJ`a&0UdCBtGOb?#!NyET3jn0N@tkKbki z5+;W1S0wJ2U_Ku5FxUj9nzQ8ri|-qg;p?_x?ZdQwPg@-K$N#9~f(Kih4T2i|eit3J zeEagjYjdSUG=zx1WZ*vA`Q&$;nIrx%8pOF}U-a-PD%tS2bq*>u8T1GUv)e|8p z{i$8=&gkB_E$g_%Jh4<8+=tvDPHDZdb4&te)di5Hl6Vv2$&7X}dnzy%Ew6VP}_|sxPBLdkaR#NFi zZy3F2{boE&0g-`kKvq}Rk1L@jwvuh@`|4MKyi99BO^Kqm5b(WnlVbFQ&T6XR6b=1$FBmha=d z_6Kf&F<0rxNx`~snqDpspHBb%uy@koUY~d4fznIQ;{H&1Gh`gT!soa}^=or(ygL4P zMfh!(okKnc4k-FSCz(mB7A|6NliO)&Z-d4g5ad1UMG$M;Y&g^k=3d#ugj?_6OA|9o$seBe$S<1( z0UdL=kbTrk$HE7dYbe9Es+?X@NnBB;6aC9gsD*QXjIZdb%6xW=o>l`fK)O)`?26M6%jK|gx!O(Z>gIvy(Q z8)XsM{si>bb7W&UPBJGG@y5B+Mx2Vc8fL5#2}Tb?!Ef<&`A72KAi8YPD^aa_~OYvv7y_fSl2>o4REE-rt*#i z!}rPCCgS$<{oqON8AVZC;+Sn6?%x3lh0uj*H&Fcqymag9p8kT$_W_*e}*% z;(26@9%42RQQ8*}M6H9nj``>yz;okkDW>1@GK4x9bwD1c#JVi_3aheFZ_llfK5dbC zObky}qB`|T%OZ8#IdFgppq%s(dV=lWDoBLukFZlSPS4RefZkEs)$8fcA-g(I`I3?Q1g1%AZ2f_?T^ z2yY448N){Na6z+Nk9xrocl(OA#W55H@&SatBn|RK$S}+psmL%Nrt!t?PLPGdU~HJK z<$A5c&34f32>IkL)_(OnP(2oZA1$V){gfY@&R|{%v4_j}5h69{gA;ZC;N-2vTR{2a z8)=vS*_ap(QGPWJqZ^pnvxM|m@+n&`_VuHWknd=qM~3X$9O!++m7H^v+iYdlgH_J^ zo@NZ?2NY#+Z@E3KOWC##trkTHuf~)eyzr;<2%_|H76ABrtqF-?Wdua}86kTd{25O|zao zi{e!ZYe*h%>$peXLNdfcRNH>#^D}A9Fe0heSm~KyIL5b}X0tG1{0KoXsd;)X`c5Qi zu1C7t!&P z{CS=}#lS8&ZC&W(ON3>~Ya+@y*G)4-87v3akUJe^@n2+xOoM-cBlKE7e=>*)>N<|g zP&SXJ5t>_C@#j(2=zfk*1lHe@$Uom3_cnR{NjzSJ>6rS1UpbS%ML)V4>Ki?sfl6eh zc}7(KV2>|cakIELeRTS?X{FanYA6@aXo(h~8AD++pg;`V%7!KB#b_(Fi?DogDT;$q zSmFMJIpt@@>5g;c1^v_-h0IB7K@I}jj$)2fdadG?92pr#SVqhFAj$aB)#m(V#o~xS>E_JHP0RKWsm+Oll3ZogSdc6caH)qgLdZ9xEsoTTCXd z9x-X?BH-y-&sYB`7zn%Y6$T<0h-NoBuqAKu@)}WB%KO9`1T-JDIU(Ec9H%V8vyegp zW;dQ|w%f61?n4=Cb@6nW)`YN$?;vax5+YRB!cmF5QEWnR zGL>o*oNM`56UVJ80n}fNp|pFs45NVg=r(NBb=XGVzmbCI_K$hZ0<1o%@?9KFI}8B8 z-eOD9XtG$U9vQIAG%^SO@}J!xOly8&Tx1KlWPT2zm~<7ms`$AUbjO|#usi4eQpkg% zO6(*P4R)gHn9Rmxe*=UEad} zMk(p=qnnbn*u6<<4lnqAFzyT8P;I1QN?ivF-Gdh>O7x(J(WgRb+F_Od*?vph5|Nc2 zKZSQU1t&0?sNWJPf7w!??+rA~?CHdPfza|jhV|=q z68VOUQDkiE5$2a#68|h>rkk8%?n^-PFPVwJA}3ErPSnz`y%lQ7uQ-3VmZoheRS2yXL_z%@M7MFl)jQZP^lRJT=V^4qlkZ( zXlTIhSxf^qM2cRpmObys13Aw9;^s1Yljjm!$jUieo8GZ=g4>g4JMG^m^N)1;KZNTX z&d>gyXg!wIXF-dK<(cxgaW=EV9825s#*>J!fIHgmLh|qP@-=r7m8V3a{jpls;$nbG zoX<>SBr_3!FZ;w%DV#H#rB=SPF911Qa13_Q@qP1aVJCOG~N!N21Oxvpap?Ack-S>d;~$cJt+e27tZm`87@0y z_vYpy6eF?{aM%%8Bc^Sb$aeeH1%1D5%Rl$4<~}NDqpvqw8WrvRBnS~9!~46dhO&8H zryJKGVj$Q$-_~r&3BHC@;nvIPUS@%8D3AY|hDp#|n$GY?gGzwS?faj~z+iLg>p4j- z4un6Z896As%qV%XX}O3RO=1jP4}Z*y^9Oo|8P%>0rCuGx>}h7xxM9+0Wp$SFX!@1j z??aM`OM%xNgF?mG15gcW=-d$yD(`u6>d6)rXnn?^!o2H@?QWT_XV0rh zIja)oKF~v%HJa%N#L5zC%%e)!3Tjc0wX7Cs2d~VZ^xsUWpN!E&eX89Rf?8_HCU*-D z(eV{9|Ar&UP}}A~ zLGrK}dW~DRQHIRl-|16ET)<<0B8!pf3y@)MJ72anjQI+}a=@>81p`$+^iF7(70+ry zer7bgI%#!lwSF~*DcL(uj@MGB}KU6xdTD&|c!Zc_}S2w@3& zz!N2Rc#ers{QAmcU4(yNFJ&E6Bxu(eUXHDJKR$Vy9Z1nNxFEryd>&Z|!ZU#;9xtQz+OjP<7{6-{-MC|HH~H#J9QZ%Yw4|u6q4%SrcJv$^@mkU^(ZM#S?hA+g@)b+Z zooMr|aTc|kAZ`=ZA)cHy-Gc##pD$jmcKG2KQ)AfQAh|gxtzp~>GhkkIV3;T!A6RF) zIfFb_wDMvOiqOjjG*i3_8n=zcQbKFOMs*M_$YbnrFNTK_25qpX=b>B~6})(RRT{Q< z9of-kp6JzgELEussY5M{Wabj&k4=0WE&>L^cI@uaQY$Ka;(G>i82RhaivT^r3(kE* z&17RPp1&|ifcF?tdb`nWVE}@RY_wpJp6rbXwuh;T$eAWx{L*e%*wBOEJOnfanu2Sx z=(-vYxy^FxQJB~<8}7POENeM@KZcJ}!h%x@HX_$NI9+y~UA=h#qSvh~!R z`Pjwd@*Bo-gbms9=lGh5@pD$>V|_^DnD71?y#6!=YoHX(K7wFNcq%==R4fGKda)3z7cWkw^QWMeUhTAtax>Mza)3 z09pw8HbWN@)&*8ybLUk1${wwo9Q38)+;pyYZfG9y6)m-(znQOzU#9KicNFHq_N5@e zL~+XHLW@W{ko_NpKI={vfx*5o00M^#i> z+@lZ7Tp%H@1&2?o8Z?J6fy)Sr6`4A|$AY`wd9xbvC+?!0rty8rhR}xeU!wQ_7EtV) z_nxyQnvg_4R0IX(&tsL2c^xQf6~>es(v!xwOi0kC0W4e>5w?zk*5=zHolkA;pU~p< zvq8HglUIfmIp~c3mpM^g4E_yeKNvhVwh7kF?Zl(nLb*}zEs|Ru@Td?-(o||TL&+3c znl+Kp|2r7%^4&JW2Fp!qe{F&=+>#w8g>a`FopsA4vQx;53%+zMlC~;I`2N*TKSCyT ztuk=X&~n&Qqsp8O!Fm~~FNb_Db1O&a#XF&m#=kJWL%a;D*u?n5+01EQx?eXhAO)=n z&9EpnzV^EWx6fr>v&gu(!KUSuk;OVnrQ0)W(;B?cZAxU6yrKdE&xF-~O8Q(zz6O3Tt_g8I9h6!WeyZeK49L?!+gmuUQI)`slzLftMwxiGk?16!uIxn^e|NHsF|7j|2GnU5k4`0hn zr^&^4D2B#kg%Ij16&GeqN9mqgpA}H35B$&Zt88?QV#oeok=4pSq)8+t#(uMip^RuSgDro2$ z2dcKr&rS0ab_~?-NVB_=E5fTP$Tf4mb4bAtr#1gSm+q%JLnr>Ro)cR!{ zFocWIw4be74Oo5(EVFB2dX&IV+!XkK0lyyQBsN z84wf}VoNyMw9B2QE~vtc&BiJDN#+d}Ir{;?iJH-bQ96qof?H`T7bB+E{hnse2AwB| z+08VRD)+f``8VBgxI~d8LuA-ZoU5jtCu(pWq!wrW(9D{LjFw=ewXEaxMXjpFt=@&I z0kE)gm1szTIvb!zMk&%F!4GD!)dC6q(dMzwu1Rj_dobK9u>5PcQ5_4frQw(j(^E%` zyvIg~vE{KvCD&lV9&!Wwas~#dcCYmV97;(E6|HFYceIhs8;zXw_46VW5 zGSny9t>t)mO4<$AbBcdR&N9&B)e>9A#4Hq!OdA#t#)yF?ui5R7+!8VCUitJd{tdxf zD7YWrV>INB9?j;o%SP$jAh-AsaF%z9$WMO*U)WVCobmDPU3$Azm)G^>n)D-h*Xxfz ziok#VT~Wp+yn7xdr7!4O%Ecu8%@-?l{ixzrt67cwmgLm25gYmBSSW`5TR#$4G468t zIKH0MLWkm>haElX(d(MnX#a`lsosi;wRA)ecW3UM`icxM^!)??Lf65V&77dR&IYchOouQ!kJ14b%A! z%W&U(rA(@8#DmJaj}@Kl)43S5%TKNem;&=s`bQ`=?LJ7%T@hpSq%kit09Hd++csoc zdecN5OD(8iN~PTBbeSP4-F~j+9iL7+Po9W;dd0lTZ4)RC3`U+oqMtx9r|Xxr;w^1Q zsT&A#Jo0>Gc>$&0RV!(J1>do*h9CilL&g|8cU5qX)zoF%fGvYR@#RBwg;;iiNr(!R zvhW!Y6Ewl48%!>+`ugD;dRj1u<$JYpJphr*C{&Jr2LDdEO0^y_cd;%$tXqWVHIV$5 zW z=E!Q4efTQVvtm*+A$JF@6@{!d!14wv2o_w_N9Y+^OihW`$qps%qe)*2#BR48rTVdn zDHb{zWAteQSc3%&X>Uj@rIhmhXpEI_@z>0UTw_=!t7k%5GKOeKzdYi=0ViE9T+;_S z4L|ojst&N=xNuB)58ewW&DzZgUo!;##^6=0zTk3m+C&;2DvLWL9EncIKHG2DcdC?VW7yOWI{q zAk<>Z?NuVGkFmiSHc5y(gbgcV7e1E66M^VK2M^6UA+B1u;f8(mcWkM$;2607t?#<9 zFeNeMK9Id?@g0xM^!gNT^GG<-NW3^RTPqqyANvv(z99_ailx$ulA1Jm@leT=oHxx4 zcjgmtJ+YB;)8$8u}_Lf z*MALO>gZ=8W`BUDClT-|ISpOJ{hs5eb5PcuCly37{s13B124<`h6;P0PTsLC`IldB zoCyBxO>}cmsjJk-6MiglX%T*`HVWm~o)RKb0`d3(*XLbG>3rb>H8MCFxckA(H&2ZmBu=~$rO2y2P1B1urcZllT=ZSv7}NZmnBGxl{m0X1Tj{=d0g)N$k-j3UAGN;yen^SXNFLC=cAk zzQPLL2V&sTUiguZ49E;_Hw%(h_uz4jv*qfbRjhq0LsH;NsL{;e{Imr@#afRwfXG>7 z(3xWwe;kqO@~lFOCF?MS`x>o87%+uX6#4BK7L+v`{V2IK!)0z%G@A!HEZO%imGU#f z*pIMh)oj+*sh$khn~Oy00v(HpZjV}|LUge zcbDaA?#p;(v47=Lu~epVw^C;t%S`g2VUE(|qGC2w*4%2<_#O0H z)=y!$$<1V;$Hav|REvq#&P{2qj^Cl{U<93;D*^OL*$?R`y)JkwK4NrcFgKP$X=-1p zQJ`0{!6T`b3XR^Cd#SfZU)PLRY(j_okA}=~hj(ZD#GZ}rp`)B4?YD|8oa4H>$^$M2 zUC)%$2q{i@*lAS}rl1ipF}KAg|JCAVjK``SYm&;@^0x0dug-shP~j7vBIVmjLA?)I zM(67;+D>A5j@Z5uCdOv|m@9X~^<)DiRIAK*>!q3L_SF075T4dZyu|I>P-TMT0>uZ; zI`OHSZUZpL+y4^#Vr@CAl|ks-=1@$KG^P&OoFO4x>{gz!DQ%RBdGGPJ^N%TK1O^GH zCf3ReVYWebHc!I=Q9`BU$+@1}j!*)7FxnNhFmDNghfb%l!Gszvpm1}g6x)K6h)YXB z=gr%Jh^$I#rkvnRuIMIN~43Y9nrk3elaEkKcQ zM-v-nri-i8N4~AnNg_$_QdEF9&9dI;C@q?JeNYw|ExzFtm~Hjr(X(n^W6gN%^|-4* zU@r{f)P|$sa?9gCpOP@a4L?{Wh8}97$^Uqz{CY|MLUz&#>l{4dp;*j{j;$DH!pN2# z@}*W0{T8PwC^(3Zhg4iY1v)ZjW9oP0ksmLd>7i`;yuZ7wE~H~m0Z7`3+9uz`#V3|) z3QbgmdN&8QBo&2TLjJ4rR*p17AdDnxbmqY%CUqKE#y~~VCk!>sk~;dTsCF| zwgSRwYH4AZjQTepOXu?d8)QLiOav7#+dg2he(D!j>M6q#d%P>Jso=;PIEjSlFZ05i zvJTnbVpPSa<#~G)9I6hf6|`Elo+Q80(#(3Ne3iY66<;)#Z5o7rLft^AIkhNQc+0$K z_;}pyZ}MR1w{!vJN$_d&sd8{EXVB1Mc%sxTr(_%KVI3RvJ}_dw2}q7tC;R`Vky@Ph zJN-_YsM)Vkdb-82FUbm@P?M}AZ3K%;Px3meImiI%J2+8sme~!5wo&P!<<0vWzK|(f zZDy}(h9*Iur$i9V%-~}@Bx*lgZe~1nRXF_2&M0XC$#+2ounSrSsK9zOMg}pzhtyw_ z&={)SWq+ckB`29fXhF-&rI$n@#L;FWD9fhh1zfSS(X*I_s1ZaTsu#Ah+_8#DbfxIS zsQ<%Gff?gEHoTBllQ0D3L)IdvI=m9be%6maZA;4ZTiQmQT<)z75xfNvkhYxB&T;JC zbQllhw}C`N8cGWazpAR#+C)P~ar#%%7f;GCtQlF?Dsgi`uxD-9`NLNi`-PLh`ihOh z0~S<5|KZuHvV1^tp5jyf!f09-@*-@wE25A<{?3r}n~IKK`c{IP!Q7fg+LKz@bpU$H z1c6|Ic?ZSTC+yoH2nu6lOQ`V1ok?zK)~*YEuY}{1mx6qeTuZ z2!Ykpk0iH6v00J~-}GS9`*0z&cFKnr9fMMC8F`)4fm5_{$pNY^JPI$c1=ev{X#&>0 z!Xr`ve-2J;MWU9Oy`T=V66#KA0n=^5=<6Y-fP6uaTbiBRbISTtxbsu7@{v)F;H}EjVMFp!6P8l0aqDrD0o9Ex;!4$NKDIsa|JBQjMsS=bE?b)b^FGxs>aA1tc6G9zmqGb0SAJvfRZbuyE?xT0YJq93Q`!WfWp1&@{|BYSUgbJMAvs~!5A~cWW*+4{C&4m)Y(xf z61=t6*5-K z>Ee7?v`}s*-SmsMQV?z% zT{}CGFgx?^cA^LWtafo5i5C1LA@wgAoE&gZ(Rsf3t2?iHhGMuaATU|FW;vbzQb{U4Vxq%{s%(z zO%O28n_DLw&o8^ll7zoxlA>A#XJFw=PB2L|g5~QMc4u6?{|Zxe5|?pKfH}iG!Nw1v zpA$naCoim}lt_$;J-f?=?s3Dg(Es6n%Wq~2k7^<|oMTCH8R5MUPdy9~#5pTfcxiSj znbp6V!xan^t?xZa=?kVXO^gaBGnBd$MID`mKt*?GeQD`a{;3Xru_PrmO~{PiIcra? zOr?_R9-i3Gt&@F{3WlRYG65}s6f1r8i3*~dTk3ge=+k;Uo^2@u6p2hCmj<+4lc%+d zS638$C-sr1?J@+2KbJ5|f0fGXlUu#Gzd%!8=L$+Wh9#yZ=KRLb=}%TG{^VYLk4lLG zd90A51$JaD4wF;Lq(*h*U9xREy{KQrb@IeM@0|`I74T(+namIgL{Q#lHBOr0T~*0w zr%3;(HXDWWjZ#eL2C_MMN1`EMMe1X;V6(RR@kMKhNyI);uYB(=LKzzOjZ;?}QULjE z!ih&Q_i0}Mu*5tqZui=A^jTn1@HG}pxKQ`E;%)uSAbes2uSa;Shz={+azQQ*gCmtC zz*3cS1xVFG<)qDSGTe%4c#f9qdp(RA&&m_`Of=FITly56$`Qi-g+vjCAFq$5b1 z9TPI4n=4N+JTeLh8K5nDT@Bn5*ysL6E7f9^BT1OR>}g}JgAOqk#L9~?I{J#HYB8Wj&d|n0%6t=*wHKbsDEq8)=g$(r9+Z4 zqkcj014RHQ!?WCb?f6(9%)(#=RNmqlINKGz21E;&1t7!-SrSD!796;j+$6WdnOzQg zVPO%fzFYK?Z#qT$#tyd9f0YQR05q=Qog_PaxY&(W1Cr&{1ljKdDS_UvsTl!rl@p*67X5nnSC`N z^;Uq5#cQ=5Xm34+T@5h#lCMA3e9r2q3d<7y7Da|LqoIt_OWG*@Ixu`4Aye8CHIXp* zYN$}9&%Xz(P_Yx6DU0H-eJ<$THkExY{1SCAnvlG!7;21=O(8>0-e2}V<4Y0M#oC$% z2Sib~wX5A3ry#P9I@LQOzPVf%doOR+hHRwRH>|ts8yQL&D-j9?(5manHyf!o3f=nT z_nL7vl5OwMRPC?*XHy*ltp_2zftz{R6=7MtJ@5x70^o7IN~1t#X?6<-ajK!r3>l73 z1%+%wF~SUD(_%D)4>wRb^gRk?$1iu!`07%zr$$f0xEr_8uv5ZE)}x{n0(0{M+|Aw1 z8v-YDPdCgW0((S5XT{jXl#@YVq59svUIe@>29!lIvpI09BKP%6h!GaqhOP;F`zV$L zSeAh;IT;GRCd?gJM4r1oL+{J5ffGJp)!!SO*Nxm*B%?Miqp)i1xBjX~kJxj& zA^0Uab)&Ky`)9QZnS7#v1vJqDOTr-GoGgH z10dGxiHzT^ByCpAjmoYnfTdi2kWwV-M~)to%BBCz9%*68LzM`SEVX*xuWKU zByXUmjn9%P;ejeZqRdCal6jz)&@F#aYo{geZO>z-x<7fuEq7qai~+;T$F@bb+}lHE zeb1wGukSMmjClngKqJ$I{Z^LA1G0;>1VrPVqAHkM3V0zH3kz%2Mj4+?Nj?t;tjiSI z>G2HCa{g?A-`@f-=prFze2jR5Yh+DW*>mOplFB7R%3EX7utX4WsDg<4IB#uWM^ClY z)gkg~@cmN0UPsfjgZJW)r`X73O=ol*8)@2)-N+D9EvHDc;>SI>_Ez_ATbab6KsbLe=t3pPds}>O#}XuQdhXm_S#~2 zFmM#JHo)2B@k`ufmw{<8V9{J#r|Co(M!&H;z)5h$?vZ^>E1A+IJ)JHAT!(T&)OTk{ z%6^8S4sSXxy9Cr9du}6%iOZ(SW-*D7N)+hch*pP|7af9`S-mEX?ky!IcFqihW<+Gs^UR5^tXda|AZ&2}}ukur{ zqPtyCgTqxGoK{$kCKaa-v-oD==saRjr1WP*w5@S!;GNB2bjb=hKyPduJ!By2RKWVJ zT()rDuCs_9=VlRWSD1Yongi#ZD6DM=pK{*l$Wwh{nQZvKBp_yMsz=(?0&)7=LENyQ zUZJ~l36o6m>gn)|2SZ$iP;M}Jc1E=lvdvZb6xB=lsoV-6urBzjd_o%krYhl3S%Qq` z`Tu9J>4eb{HPrZC>Y4HB%Wgr%g%P&?mbBxzf_cZVR1% z=O^i92c)Qd>7OGp_B3-qI3~HvK6L z2rc-;U_xB+Ge?$!+x!a_UbG7HA+plk_ZZE-&}2`-__EdC#YB7(zD&m<`a)@wv~(Eg zd~l}w2zgRDyY}vYf|wN1AO3>Z9H4($5=f?+kL-emDE}gE(eh3KS@?2! zbe^~%Jm)_T$WtOwnVF8Vb`ATXM8h=5d<_r|EqV2cPqZTv_S=j3Jd+>i#!Qhowsui~ zL-c>e#@vH;sV@vWg>FRSAA}fmJp&s&CyICIHFEpPiSiw@ZgQkLR**5o=8a=iP6Yth z*8zSLpuYYGDYO^)WcLhrDJHk^Kc~So(FutW#+BBQaVal4IG?JD!&UDY&L|W;m0!`W zjuRV=pgp9Y000FoL7pB(6)d0NRC}-3jb$W!i4@g1c?E_^b1I!u6ZCdeB2Lq5HOb6; zb@@mE4+G4@Q7i4c=3ZDS$3VpbTUF1%Q+W1%?-J&Cu9X3y$C%sQNNp<63XuC! zZId>=;9L#OLXoIFP0Y&90T< ztK*72?>EGLv~^ngWQDbrXFUDKw@phI%eJS(L8Ao16e%ho85x7NKdhLO)DdGhG2{Xx z{wZ}&t-9!Q?x2HKS7)E)GVvXrxUJC&-HE&`qO*C91p#U!7Ci2k zD;(7qR&#fvHQ)7TPPE;j2?e7qULq2mJc&*^CN?;S+_B$n0Z6TVMk8>UD4eS!EA|T? z(0V1$Mp5r;&eTtWtg9{WNd`v7rwc@R1mbCU*j|X!wSbC%zR?Y)^bYRV^_m_IdKCp% z!G03J=@hBO9$A%)Cw?5XKmF!1aQ>#;vrXw0?Rb=1H%i%=nX+@kyBj7S$iH)xn>lp@ zRyV-a)YI4PNJNx0zK4F%Xuty?DpPl+PVKw6J@!$X0?mfvoa0`Nl#s|SM(v>!;i+x7R;JoZJ$6SRm{0J$+nDsVuAy#txSZd#2 zFBeDNHBU)bo%GcCTx&B3N3cDRV5-IG1Sk8`_eYo7`O5AGT17A9>=9N4pK!%X7zv= zzx|HX49}?alsU{umi{JTn$b5a>#oTaAg>gcB^Ta5i2TUk)pt0q3=O)gh(s5`O+c9a ztpHcp28>J20^@|L&u6FZJO{rzTnf}4^DGtZ8}S%lR<}K|bEsR|)wzxbLtT#8&}4j3 z&V$A@hI^5eh)m9N4Z@d$Q`onOFZO>0=>dS@H9`GfGVYs*47PcLn9xjCL=0XiamUcC zY!5abneSR$ir?A;o1vF7y@bf%`&=-YJS zxpmk^lfJ0p0!7o+2TABz=g>QRzlk|5he~iV;zSqzC6>_KbZd9GS9Z|M#F_!9eUvFUNa!!{P+fl6M#-4tE zoVj4e)_?gszX|8EnCSCNZsjRAECcpG4C9WVIQ-!}e{Pm>4GILd^cvP8ND99`CUV69 z;fdL$u_m{Jf-1Cbf7CZLde60QzOe7@k-sSIO!6uA2qU&IMy>rL#THQ{RepzLw0goQ z-YNyR-!2>_BvL{2>ir=_{vMA?xF3o*YyFm8IVHtFh%3ml7i-gKtf-;oj~?j-NZU zpYw8laaQ-9-1!SNz#l<+cRKIhM?>?2RhMQak=$yoJnQ~sOFi$5#R`&b$HGaQYx$Q8r3m_vM~?O2b1W)g4|A>4$pm0JpL^@04*%>aoR3+qE*d=(n)28* z6oe@n8w7@9t!HTJe50l=HQV>t0mI{Ck_2VfS^|16`Lhd24R3zObbiNzBJ+DrI=q__ zJ6C%Rq75kG%6hB;{?;k{15j1ZYQ^j!dxZ%NZ0TtR$Fe;6#3`68DTUl)IEEi0eXf0` z;f@Ag$52K+ei@qBk_$37X&|rt-d$3dlupl~q}V_n@Q6Rtz|_!hbXewWHB}j(IjUj^ zXM)Aon=WYSIjII`q3UWmOWh7f28h1ZzYcFs&zIvdZX~plidGD<{jq{l>mXN{MTUHZ z&e5KXC3N)kV&o@FFNCTStlI6u*Ex$(7Kg+V6Gk0*JOF-cG=}^=Z!TUGFSMZJQ41`Z zw!tPK#=$)qIFsMFvQ(?YpPyNcOI1ryxzh3W#}Pzg+23z1RrH`dEuW}uWv<Quzycu5!j7kA%GbXR$`ZF!iXY6fiMC^2yi*?Hh4W^ z`iLOcw8gk>k8^YK{86QXoXcVmU#G0?~ec@l_yxQ@4|Wkq$q7I-`$gs2jd*fSYHwyBHH)z z*0M4@XSy5wB{#OeZLJmN4a`FxhEX=Li-8F$s_y9e!kaskIc3e+BOZ#%8}`galvIIL{{)a@mc4Y*+wqtn8jI%~g!=bT#{l2*G_e z&8oo(@VTx);wUKQ6I~c^h_8;E_67Ye#%*XK<-QL)EPxak79!A?3x-^w0;)(~kMGDa z(FP9i{xpvm0C7)GxFGl%t~p(5@V9I(ZH(#cMNFGps}N@do2Iz*{lS^c?d;T`cGad&l=j- zpsJ23z)v%lc+4~M1f!n0OJIpEYN*j6L#tREY?iv9i?>xVqQ3V6LqCdwATvaX8)o1> zEjf^z1`rV$@qD56Q6dl>-=Gtbh`mk+Cn%??OHBOCHrzYU-LH%LG;W^w3XXx#lMS*=UI^cW4vb~LcfQFsbcXr z@U!`yB;8~MRucgA5AB_fm;`l^Kk8mmA^&O|qV`|06GcQMlTl|)AR;lLPP$C%U67Ve z0CPZ$zr?5sPAtIyR`W1y~HWep$f+pXk*mh?Y`_=3X*|feJ-<{`_L`^!a zaE7cOe_W@|Q42r~k${mj)V|L>CiF+Kd~~Q%3{+`jhW?KBOwPPSOG1==6Q9Y(J9&t| zBGEx3^=)EKVx#@4<=|=`#`FYnpR%kKplrC7$3&e)MkFy-%!`S|?v4rH63**o+_!;% z5Xhvornc`N27AP_V|4E*tJ7c!34F!RfI11Ha2cMeM)PmZQNrfa6s+D75O1M`m0t-gxWgZV)vu}{M($I z0n-YdY5b={GK10%M2UwmMr6-dmUY`%OAJ%NK^K06BJn2wn0c=TeO^o%{NVXmCc?(e zCDGh+V=D}hkOpvIwno^0!p1fwZI;13IrKh?{K+z-UK(Us==W)nlmt%i9K{M4*oSWB zxOhBqLFnW076&S*#rI-93idNZ?pAjhQUbsO>6R#+*WQi-@vP67Js=6^`G{milavVn zg4Xt*HW`b}2T{l~Z@b1t_LGf&IUlyydEq-Lftt(fXA~%0g{9`g@um{uirGX|t*1w$ z2N4_CwDwmU#=#6ZtP5ZIdH;$=F>+2*Y^MH~#a4+tVNOZTZZrfGuy%s0sm~r{ zZ{m{K0Q?Vs31VUf$p_;4%ze^Sw3ZyQ$w`OFBfs<^!!}*o#OD@mrbAC>ft^{hB_0_~ z>EP~~5n>~Gj_(~dVXvG94M0$kPss_>Ft4`y7q3OQ2WpJ_}D#$;$th|@l$!ESptyX*}S zaMXEB?egX{D2@`2*eX`d2cjR^thuo-pL8Wcim7@wY(tq)mrX->@f$s`7G7Pz4V9rg zu^+7j)WZx|5ou=up)E!-Ktsv#D=n3%slv0?cGTo*8J1S5_unPqq5sMh@>3kiq8>TX zTm!R#Gf~qx!zr7wyaWG|MzV~1{*OWvcLCI46TA+L@eRVM@#etAiV;&!*-{$AehEnUYvT_EYf<&{(<1I2YF=9%|B$*IE3)NBIe?(+qIwGtAu$AO zqKVI}Ph*MRZ!TB}rBWb;K$u{=023+KadusXyE|QFqLgbSPiYsTi($(#i^Ej79_osO zS-7Dp^}^6ND4-_lI*}e=)QvS3X!S&|^}~(3m107+k2I&lN)7kL)Ms&0xnxXRItMt$bBl zA%D8oVA*4*I~ z{p`-lhcW7PrzVd9!B(BXng|tQ1 z&1K%hj6r57UIC{xWeb%L&p{g^s^>l#TmM~nIe9q*4DF>)$9wFUe3$$gjWzH6xZ`zk zNWu(gLjhn(IMXTA^Wn(Uclzcmrh|K*DMc^2jt0Y1_Rxqd^vQw}gkImLU$Fj(RyG*P zl{<0YuiXT&NmNgV%-tNrp-L@kev=Y*F3e_{{TP9*zMs%}kd=yXqd2SJSi5{p>!Le<-jb z5zRXzlm)~WbpAYbBE{3S2bL{~p^Pa`AMi8>>OX zrIsr;7WKv7Q*A)k)u~vZ>3G|sY(XuFK!mXjpbA+iHFNr_s6?mu2S}BuOwU{T`hnrv z?QG`erV5NQUYx;Wh~e`+WL9vh#|UTBUxy!~e-hDyiK~%Jl!Oq5mKu!f=Uq}LSKtI@ zK`}Cau!NKyZ1v6XSz{bMCg{iBsf{5Dw$<2h zvJ^MUZggU|RO{hSvQj<7s)&Uk%qcTb(t2^#2W%D>gF+LRL{1x>k-041G36Gm9BIaN zIV8q>%Ty3kz~xCaF^s)IixK3mbjY4D_X086D_EDoy(x>6kaVUwtBTj7;>JEtIA|!t zaO$yq3mTjz@SLMI#yGt5YL}KGS|As|@DSkdi0Bv%gjnF<$hbd1P;+#kN=$7h;t=3h z&;?aU#I=&+%3+MTXP>tCRTk&F%`W86Q&~Icw4N>_xK{Kg(K=#0il0XG>U?y|w=~r$ zlVj_qVg)LPm(r+MR_^!eV#={%N$235=c;2eq4TKom?mDd!O^mQeHm1_}AQXXxFG zJ7r`a7mES^GBLY^88;4-*^XHV=WwQ@O0kE!f^wq%(tEUNv-9kV86-D@peyR*RbZMI zG%wEAu(^~v7?@QiU6h_}W{yDx#N0oXw*k5?r;}wxO*Sg@OWKW?E7MT5Vg`ecKF5aX z=)~EzTno<&hK@T;bd=m)d6V*O=4qD`j{H5r3G0%)7IDbwROZ8Rci;kzipf#ba!He2 zLq+SD25e_RfR}=GC;zj|!G57TwfvYy&quAQK6oS0MmaqG9wMJV@h|tf3HmZwM>=M> z6f+7Lw_=+dus9Et#O~CD4Mh&!h4K)O3r?f50M*@^(3SVc~Bo%EXys`_o8WuH~>i=wTb>LDc+dW<|?lDzegfj zc*dw?!Y@XR06Mw*UNYO3n%W}xf;-iys2G_0JjK6iig#(B2-ZKTgSDN|t(KBHq~+5P zhOqogWUG*bwjN)aN_9fTuSCJ=^r>--Fm*6kr_pDj=PEq-mSM*-gTJI@8|yU4aGCNx z20NE?s%9lsw>3$=z+oRDq1U$$WCH4*I&ul#6U$ZekmheFl-Sxb11-;RLr^;pZj?ex z{2udWtyLQ|k8X7bjVbHgyGbP%2k8@jcrgkLI>qvVa3XT2y<>W$u%VZ!8(X&eDUt2= zf3N7l?AK-?|D$$rYoS$kVQ<|@z{>LRTJJ6)XRXuW81Vk=3x3A!B2?f!kQ^(xi$m`rx1nLmk*7}6OZ6meLeiz74AiRU zQ*c-EA|CBWzG_8@1~pLA^=gYlJ74BVYo3UmUF=`8SXsaSh;7|cB9t(D@adfv;8l5| za$%=f@*`*-paPJ%VXkrtRQRFZa9p10Fv;l&dBHiM^Cs_Mx6FlhQaOINX(BWgQhQhPEVQiWKIh*y;pf00yvBybwjkX zMiL!p=c7ew^K(fOQGB{fP{xiEVx>SUS>0>~n zVW#5~_S0GB2>)V;bE^R!Cwj7_uW?)lrit=2%wel$N2Z&A!yG@@`~uB23@*_)R9i`A z9}bO*49%gd4pucv%CtU7%DRYKxG=T+J{vFry$c2`94oCiwZG6MKZCPQVI8&5pE++&8WHg;c%8JsjaX*1bx$2AGv<{=hCXX$U=R&3Zzz~;{3X6nn!_?eXQ5Ef!}Q` zGij^WsDt|+w3^d(A1xs#DzE)oqc&KF`OA)a9-SNtPW->b2DIjAz?6(-Vtrw!FBl>HBADscL z-m-2Z)}t!rp}QZ`A@(oLCx|MQ(sy#UAwlkW%=lTZ0G~5f3)2u>k7GzfR^e5xB0NrT zUnhA9B)?<3?wSatlFqY8pYF(2+ZsYChmfg9<%P=at#1`BQ~IGsEHSCW_CvE}GsY{Q zVTOy$W>Itk=lEOPe~$ZyCO#s<8t9T0k#vz0?O+_}BjI8>ckFhuSK1ny@v%k0tAB1s zA{z{1?irNd1kF~vQ(g&^Jkex($5#<)|4bgtUJBGb4IvdDt(xBgJZHZE;qbmd+u;g8 zQ>|yk(skWq0Bfr^7U!uC2hS;E`k0CE0N3yWL#+YdFu%h8t|oUHApMIj|6PV(QRA1ZrtOY+!Hk;nYy&ObRmAKZS`cL!Zt}qC?jTUBA3A7`_f&qp}8@?FOWwJ-hE@f_{ z8OKT&k=?ZEHfd)qsVlLtmU&%%loG_VD5QIrxsCK9h9#>M+ISM`H-aX%R}70)tLM|&+5k2x zn<@M^ibiO2y)SQYQwIQkPiSlXm)9?|oHdd{xuj>_)lt!060(!kfYM%&>*-FWHFmaM z;H-aE23ONL4lit=>Qzx1nJ$tw#KQiFMnV9yXlLy1hXBzMuhH)qoeP$;uy?SGOl3|S zxhw~B5-2dnW-x*(+C8V$ndc`VZN(`_^Q4|;yu5oPmR8E{)S!lm8a@tt zxA?QD#Wg~3db_0FcB_E=7x*St^#Qdfj#wlsx*RE*C(G!*`RD`t^>4yWf0C8G@g|G` z)l`$f>pIN0oFOdH9RzJ+BqAw=P4_9RgP|XDV!)+R+ajsH8{&Nk@Frda0!{}$GZ3?G zCSiu{MYhgV#)ExxG9HEp`S+s{l}g!3bWmR&Gr@NGxaCplP?Wvw5xJl={yuWSbcFk) zH&R3y?(LE~cwW}J#=Wa9W>sUe&<}*fM1;Mj3jy>W6HT%`)_>k-?FUHN`MCs!Ugg=< zA&tbtW;9AmRdgQJ6AEB*M$G5a&}lV)>89JuuRgrD%XJ95qWuv(b&*l~2?^P-5td)l zipNABL<`?Jri<=oMEnEMs~u~<=8xN#%XgtZodFSk@TdjwS4y_OMw;Q*A{4J zl-aW*-x-aA-fxG%OmbFY!9w$qvv(W%@{TV38LaP!JBi3IAR#-*cc%QskyM)=u$9}% z$LEJ19TJ-PN1iMjw2={^xsid#EA!<-w_Dv5KEvcT8E2TKRgL&tKz}e$cf_5WI(qEo{2hB znUY5;6G2)pAa;zQ`b?ut5bEXnCmcr6-r9R0emNikJ>YA3dTGr!%C6dzcUD}#iDYj} z`1VCal&zz1sDKG(_NzrwE2dZ({trE=uc)StbUON;)3^8q5X|FPZ%zn#gWroC-vzL( z;$g9+0+X{j!PvH1Z!EOHW0XoGp{Pg=S$I0AsU*Mr2=_HnMyvmY14KJGIbWSH#5Avx zm&@f_VxRGHEN_N7?bJJE$ysZk3dw+Q6tU2!K)~CX`jR-1c6E=Sf1j2y(~HN6H=$C!gXPnY`KgIW8=I|^ncW0ma3 zuA4R@+Q8N?^YB@!)Ys#EWw%}^_FV=pF?8kp-H=&6xs&h7Y7Y+igNW_iW+P5o6^GQq z=tvzl{1bb~r1G#+Lc)cq7#}618gj9a1j*BRU}n;f!`Xh=Dg&AuBuA8$vbQen1t5{Y z`q z+qpc09(k5mc1!isUVPfC9`RE}(qm*Fzr-u!VAT?0Y?%N61AYOXVRS@)=a$03*Rgn$ z5;J&Ea(M>k4#F#)^l}da3hhh^ecZ3j9UpM;D>^ogE(h%@5?oYN)sp~6EQ#x^L>A<# zNQ#h8jt^t91K8Oe3-W~XL883hcclHil$(Ns?0erRM@ zXjU4V5RT}q=Y_W6{3}acZXVY;F-Cn5r>jBkQ|OxBGMsGlnJApoOsf^$mJ-ySqnSWu zi2syua#iL1LqNvKst9zBJV-MLA93?vgt18Sl?g%F?;1#3srqJB-+ay`)pGn2Nwx`2 z;=ii`g^$SkMdLGn(sx1y_sDu2=G%1~e3~0U; z#oebF-Q;u77LPJo)^Aq4@hlM_-VImfw-`xp=zz8(#My~2}R#|rIbpzK(X2D zrinrYhWR&T2~$dhCi15{R4_EJf9}fsO;FZoRbz>%)?Tts+tzQU_B*R5`iZ{9{sq(_ z*w??78jGt_aq!~powk~deK&p&zN$Ub#$)uA&ly z0qh^kK$w?C`-%y^u3clPGS$Vpz7+35Z|O!hN*ge>Q~CvWv&a1)1caoP#I3j+T_spU z1JH5hbif+FDzm_*55qFf5wLS(O7*tp*se^WOuYok2@7`k0008R z0iI)OM1Sdu5ib&^AQ1wUn24n}!zM zB+w+&A)WXLMuaPxqI)MMxui^XdSxHHF$9H+rEq4>_O%#x?V;=yc}C#e5H2L2N}Q>7 zNX!Lic#+}T6uGUYSSbSE@!;PfCCIPbhDM}V$ebj^izgx4X%N;i$?yZu=&4s(0TOEzo# zsdHwZ)*TT#nl`|a##D8K^llaed{gG~W?Y-tfUa^e!rtQB-gGK2*Wj-eEQYmCy?sUr zg5Qs*cyZ!O;_zzF4&^=G0xhACR~EKfK!|`023^hq`-~J7 zQA4Y?iyUih9c^)xx8~WIpfHTt7x&Btax5$UZ&}NNH{%bz?D#dcJ)1!(DxOe&sQlb> zxk9~AhV&WfFoZR!v1rJFx6pK_2mk#`%3<2FmGq<7C>#77EHp65lWbHOG6Ssqq-UlA)WUQU? zGvWSS6>)XXLsPN$_M*-EGRd=(c4jIeO$xW3!u6Q|01$OSnrKO=L2Q{!3;%{ve$$Vd zb~jG~JeSg9x(}EP=tYavDbHE)Bk!sr;_xtzL4eKi1)sX24-CUq_mB3RiW=CU%U2n}r2ot79+nUkALqD@ojjj|T!~!;hcLYoEf-`~C zfM(PJF)DpVIA5@acApPrP}+Sn;f_4o3XKf8dJZ5@yiMm z43ANri3O)X^Pp0qMuuWDzD%41?yw?ax1ir{m`ZL4(48 z*J{)biH!QUgAk8|MDIW5&b`cM(u8##NE}`r{tjT}8dQs8DMp@;OtivEGOPhvF-FV>TlizuuU9r4d9A#2HugQ7H*z1%`&t5 zL*?!FdW14~*}LSLbdB>+6wCr-8Mq>_)41^nlSV5=B~o82kwcAX{+Sl-iI_oHt65$Q z*zL0ke>dZ-6E!cVYzfz(KZ!O?f)iNt$(#59L(m7#|&F=Plc$ zJ2xhxo`^7g%3~;~qHxO?Rxj?UgH1T)CLZPB#05hwJ^Z>n|M<}Jan7j$qPz?(daz7< z2A7~0$c?5#zVm$heo8dYCX0kdG9=o8Tct@2Dqd;$@5G3RPrH@+_jYUC#x%waG+f5$+^rp#ZmDnRu~(q|&c)y=RJTNrGZw55TuvWxFC4VF@V=2o)Gr zglr-)#N)$JU_e{%JBoOyf^R9JoOppMa1u$zENjo6fEq=xBsVKb|~bJ&Uxv+Uaalk;HM#LhqR zh&KlVY8WNGG(=q-P%>|C;X!UGG4F3v3YvE9!xH@MKBxhzuCFdgnEv)~zW|nuzYY>~;JHd@ z?~F~=hp8^f0NfM^6v!6Q5}$@lqs&Wp!*@;k5aN+qWlwk$T0F%5&g!IC9)Ms7Bp$Pm zxG%3ytt#ce7nA}QIIta*uOo}4y)9RLM4s$8LA&H8b zJOLRgp|Iwpmp;Cp%pS`B3Jv#|5q)x8HI(aRhB?h+qKIasl0h8OHt74w^7@`<7H1%s z*S%vA!oRc0vj-XgJ(LN2Qo%JDXK&_`)mtS{%-ZKyN*XmsYq}Mw$qdAL*mUqVqX6Zb z3DR$+>JIsGE*+n97|`^%b8+tz2hVIjyK?~iwpyx8w=o9E?~0f`ko*aO^`wq&jr}2| z4ht--#_YzaS=(9khGmn|2eM1~-)KuCoRGx2q}OOgL2&1uUT z{+u79f%VTFF1?ub)Tkg24xYQxDok9Rk*S8jtYyN4GNW`yYEFl}$S+&Fn+3lwHr&S> z%=%3z@@NxbTc};3xkD)Q{{QXcA9(>SC&*o=3|+&<-L*!HT8J6Db*2TUv%+-Vn3@xlBxrX_a zlC9~82>D&3+Y`iZ6qAMGld;#KLj0UHI z50G6PkP{E^67vvE@3ycy7^3Q514XVt1nujMQ}C!kx@KWnvyCI6f)rZDB=(Q>sSlPp zPE3rq-?A`Ti@C`|U-@Y(AYwM}P~AVuVT&=X7MN{O4^H&14i8=eG?7&%53);Y(zQiQ z_29ITjM&w|Ri6F#+0v66mJg*1js}bjS$5)}Ggs6e7!CPkzR{_JNJ7A37LIUuWY3PD zQ3$0S&)ek?hP{K%Tr5x#GmVO(h$#ZlVY4R!U_Pjy&|;eX;VLdHmtTvG$u`&^d9e>@ zlr}JdIZiCQYheD2Z{&#G%uK6-12CRYinZ>EQ)A&q<{Nhpj-O31fGlH3(fm3Gg=@hY z?51ZrJC_>uHXj9?`@U^I2L#UJx1=u$;|_1A_o5o@ylJeGrXmc8N9b)V|ChjL{j&5# zD4+<1Cv?+XT(6v+#|MNY`&t)Xkyo@kknLiedQEkefTe~f>^-12*^~G*i`NqjH84$b z(ery5{&$^S-*c|&tGN`yrF1r{1S``2Mf8=)R7yv?tw|p`eTk|GnyP#Rz=qd)A$zTI z^*KP5z(z{l+n;*0=S({FG#^qzsF?%KMyuLjaKDkaA|QY;+zKcWX_c>|WG&PO(socb zH#B9S^Q3cwl+dhl7{SXU?i~d*ST8qcXa(p*90lo{WBaL5rs$|7e<|SB5R#&&fDm81 zI6!JmZb8I4a{Xb7leM?)-Q4Fi;Ef{AqnxL_byVFs#%syZ@m=Y!ktPMI;RAd1i*1z) z1fiYu3-%weYP;UhPnN8c{}B5LzV84F52?J^!jFqzoiXb3 zDyrBI%yV5)PXBR;lG`yPif(6_X#ZF;566!dWmy>pCIZd7~u%x5Zc|ll!IVg76Ykb|Fn?@Ha4Y3!}dy^lirVRp|wQ z`EyDs7WC*Hc8J#GtcI2`xY@>}la#ZN!Tjz_^e97SW6agm2R$yD*4zhV>b(#%;1!W) z98C2Cl~Nzp7i&TwGQ=ROULN?NbtYm){~Edmf}jcLniwCx$e-Nsr8^m&2%N{^w{7AB zh2@m%hLG#Vk=dD864Qw7{hE6lt;ro3f+UFWsAt30P6^@DSGMA_l&qokG+@(>RVrM@ z0 z5AQtzM8ufo!wvYfxy>kImhTtLW(h{d!K6;zI&}Uqytd zDN|j1S5YUC*|p<$yM2|#?wJxbjgS3LIZC>?oMs7F_Qt;s7!Zr z`Rd+}Z~}pMj3{w-}PRNgw z6TBg!Bq{Tmlr%R}XsBtWDz@6d^`lnt0(&78Fws-xWvLf9JS)F0Hr)?h@3hB6qN>?$ zF9vp6&AVe%_obY`RhhcR**CCZo9V-yZ<#ahZ)uX{9sW(lynCdYOt0J1vPZ{+^go>I z)Ci06SfoY$j_Ro^tNl@&UKol|kMqbPWD-%SkRB~7w*I5@!eyfb_`CZ-7yGDBdj)d{ z@*Zvm2HSs3PKSiCxHu|S;=Djt{1+d&JXm0R8;RLTI z)|jUr%WvNUI5)s?;n12lD!X}O9_+*-hz)-Us=nbG|1Tn~369%RkE zbfxK2O~rY4ZyiV%Ja{!^j4b8-Z5s^C&$^U3J|zvchWkCX#%)Ukt)u!E3P@LMixHvf zv9S4HJ4zY@PinT}yY*luc&>(CSLBdO6UPY@`~IzE3$mLDbeUET%9VJhEC3O?CSn-Y z2FQ2n(zD#iOkzfuJy$4yaJB zYl!4vjP%AItA21?tPq)Ml7Ao<(-jtnR{yf0`iF(nHTUSH7$R`Q?Y+VSB|QGLzu!aQmvf`r4bH&ohlQgI^SL!LzF5mH%i`JX{%^{iUfm zNK;Fi+zr3~F-{I#^|n9v*vdU!kpyzrppGO1aTiS`lDhDHO|k9tf8IBskZI50Ock#qJn#oqXK3yFiOmbV`Sd8 znD{tZ*o%J$f?|NN2Uo1;D&W#~UBEijmgSfRm)cv?^Avn4z+zdC+l^}a5W9-m+nCIQ z{`$v()0hAhy3C`4a+&>yH8Uqh8|0C>n=uIN;t=)FcG^;@000IML7s+16)d0QS)w_^ zIme-Z3(G8cHKJzlvusCZ81}sR|7FD_yQ&jcF1q=gh?PoabLV_{NH8H#(eGetASJi& z8atRt=nDjoQE8ud5_>+^M_2>j0SMqqzw}N%7LNc$9CwWJ3Ky-lX)^8_-mQY%qCK1D?FGP0ejNofE6r**X2~+~`TW1e&T4~&(Y={EP+I;$Y=p`S0&70O zT>(JC%Qij>qe0fT7$#=v(EiFbBS7Wf`3LV(Rw_}o{Slm5)w|n;Kp2jjH5Rq_7m*D# zXETRC#AVmvk~i--V7YR%P%uS%=!?^5LAvm+a>mK7C|4g?$0rJV1+0qDbJ!tj4|_#c zR?=(fjjI-AuIvjG8cuhPi|z8c{lu3{d_GPrQ4dF$0Wh+Vh@=1^VF~f30Hbw^vy?G` z08yxbbwc&}=gk8%jHSxHsJ0!UKbO*_CQ0hV@Z(J~1-kL+eH-f8**n6Yf{J?zZPwo1 zyTQ7>E|{fLb%B@a`}N{cEM5clo^BA@o}69mI{W#f{FZ$Yyn7AJ`P-H4|`1ft9Izy z0yKX$qahQnOAak$$gBeo;$_1S1#BbU2}@HL0~(=CrMy@Ua%84d-y?YT6;1E(XwIfG zbF1fwENR?$L3@L)BJDg_q#?J-ygQgc#{Jx^G3Q~0zRNVqcYy0XQ{Q!H!+9#Me^B=v|FnZbauL1OS zPTLwd=%$m9iu)3xLL=m9lfr-`lwg=(vxDLPOD&f%*{sQ|jW_9vz=$4M1rmb}6k zQGjl7mie-uKGy8?XbMpx;oDJ4?0p#nt%Li1QgvX?tHRy@_j%bbuwkWHkY!D5!1}wq zi9-=4IE zqMvpq6vW-j;mTo%jID{r`t7+~NSu+~FxE2-MH*ce z@yan?c+?-_Ep~@QY%6*|VP0pATcg3id<1>@CW_=Vr8ALDMejyn2&Y*kf9)DA{>#c8 zEsCfI|M}CSh=K~wkCsh{uC(}dC4e*3+3EHvbBhPXQbd%~Uu&jq9cM13y*8$gno8N##)6NE^gWRg9 zgXg?rHF+JX4y=-l6?;>@MUX33A3sTdbmIqui1sDn000A60iLCFM1SPebOf#^uGSll z$!hJTcVqHG8(S!8LOB4ySLvd<+Oj zyz%!O@lNySOGDi3eOqHC_s6!cbTWt-{2r(@kD&T3{`e3sSgapcJT$92Qs+i2 zWo;eNFh&biN`UN9^SDEj;4nVME?N@%IZC0UGr5~}c zCc3_g(RZ3*OxgJkpaZ|>u~NSjDcrw7|L(d$WjJxfE2ihG8@KNADN^+A0ws#$Z?E8c{twP}dF>e7(}bBEE|$*8u2 z@!ks#=w$H8leM8_kqHtnRm~PFIb*>}bd*o%T7L{iIm8%j)NiGn*=4D7Owhf!2vR3S zjfVga=Y$=a?f%Qb3{=Ru{?wV5lA9u5m}^{nA{x)c$9~NH_y`yiXM^h3!vmjSC4eO{ z4MBbnz|-dv%pGS)%9c)r(=8do5`gH+fwI0x;$sgvan-Z`p<+x9cdUnt)`&;Y0fi5K z!aHpcJMMr$HD;%MJzvDO1{JCv6$Y?N6vr5WoIV_A;2RV`1Dox1e6MV)uc9R%JSl+~ z`sEjPzfOZkJRP;0zY>pjOv5>q?=@4Cow-$PI%UQV{(7fGilvftDr2LoBu5_apJ4JN2+u_Z!clA{PYoI)Pd;{J~H&KyzLsp-Es zQpfD|2W*v$-k1OIHZaNqkrNKwNo$yjlIZHMSkafJSgA^fhzly%%S{z8@eGO?7g)2|;C&Ie5cG_ug$HgK zUvnibm=2nlz>7#Ev*G4o000fSL7J>ds6lL*Obh>4o?P2xeUq`Atx1I)ap;H&O z7_o}33~ZL2=7D&Vr9~u;FU^nxqQ! zXgsytSS*bpiC3zLXDEpMs}JKT)3q~nVFs!?L7$drytetnSyQ_QT>gWVi^5`;1=S=U zt7tj6sGAee23B{QPJ?9>orQ@TX$cG{lNAf*HA>va`d8STm9#F7o7)KOO``w@sAv=n z82TjDIDzkEk`72?;4x;QTk8)(l*7knYqC65u`hN)7wvu_81ZaYxqrUruKWAE5wd)L zI@g&kuJxz|5f`l6V&fDKnxP>7`N{f##Z@K1^raOV+Xg?pe#701 zI5a{OWgT+#o9**+O+h{+0WQtuP2)e!oMj`~K~E3+Dd|Bg4}o{prc#cO2a=d#f-Lq0j)W>`stg{>zP{LB(nYepuykgwTD)fUtiGJ}W+ z$|DN_lfL!MN};=0RPuA{tYZ@joGgzS4K{HnY1#YFPtY?%6E*9um%_a2miA*pFG9iBr&Esmfo*EDbXi=^w3Tb92$rQ#GG+Fkc)GJVvh6um&y1S^BDCg=2m+afar#z0aS7X!z0v zI5I{nm!4(MdGxI5N69gEDrC2s`r=Z$SzV2jqjWJXGo+KL`IFan=-R-)`4FTanBax% z*gd_40{u-v7K7*r41u>MHD_c!t6$>VKJ#gk!)6BON0;0U%3Nr+cF{wykp{1%TulXsixT!X z?)&0tC#E)#x|ljG=#M$gqMotkUI!!cVOInGEAcxNNXsznQYS)h|060IHe@}E%Y3od zTZ@JP7K+Y?XQ1Toa4V#?ZDmuOITr7`8gn!v0-kJM_C{*|ATkNIvh$?ny>B|^8q@Ko z%zi}3At*q6_;G0;;{LwZsglT-ZR@HzxAns#5jTuvFt+NGO(y2RFeq{*@+vJ~VXI`uL?DLvb`5h#^; zrH*~S6(lDi8g!4n-q?x@HiSzpRENLjI9XMl00|0JDD;qQ5>XVi+2rW-{qU;}}}QhEQO6$JJMwE81&ELkx%SwdAjE9BgjFyii?LeFI` zuEpu)p(4`;Ij)a|GaCluE_2Cot3UDfH4{B|OtSt7fJs&JYVaNou`*D!XoWeM{o42Z z+xPYMc`mGu7w}Z}1p)20^0yW+0A4_$zx4ns?&WkL<*fU@o7~S8BJxO98f|TeGfuaMTuUo_wxDSM zantP4c=HYK#bLVy)}rn-3=I{9aL{05Qg1f_AuD4ZVmGR)FK#BDdM?*j+8iyO48lal zM~?Dc)QUSv55qU0^8%o-io{U@F~IWz$3$;7?F#gBBYgO7qTUOiDY19aq~)l*8@B)R zD)e=Xi*%JLpAX?zQJd9`EzrpqBT`xW&H=JZ4spvgO6e+90s~0CSRT|Mg0(%bs-+TU)%H)9%80(?+GI(m#Ef9`p1B`Cr9P8H#9kDbzE5BgD&?^OEA6EAlv++~ zC(T`<1LWdNzv}c?Sffywn}#aN=nsn$5nulD%wLEs^!y;-5$Ozo(z_Lrc6GZRSQU9I zseh{oo9w#zQr#V317CEByGTm%>`W_>)wLfA_wJ86pv%w*O@}$57zQRdKPvQ-_mprp znz>*^F-FWu;Hm#)++t@(Bi1Xlzf6NB(|{6I7WeL{#q1rQGE+jto}zJ=a%z#k@in@j zqkpn5GUP#C8>wa$SuGb-jg1&at>%`|BVZsj(3B$h2ea!5C?>_dDxXX;vb;D-U=*0nSpe z5IcNLfChuP&yXQ;=aJ&RGf=~ljSm&0!+M_h9|7jIB0CQMML@-5HiDVU05R4w(n@;( ze)u}{cZ(k$iKHg}RIWvTM*j2JU|c}HqX#uf>k5T-Cqy@lhU(r!vPVt7aW-kYVq#;2 zxWEaUNQ}Sj2RJq{OMWLlEWVe3KtVYQ&3F3rY3x^(5%zARfXTV8o9B%H_nI3W_n8xw zJZowD>t0rU#o@)k@Vb^Lrs@AWZu*juOX?E;xk~~jMute}#Y#~4c?lSRiAZulk+B(6 zKUsZ8@Fk&z%Bu6}i&FhJzO_D*0pu}N_CcS`_fv(iQ-UI&3~F9Wo8rFe>bwpaG5X8L zVEr)@p9pD>apHr|4zS#z?Kuk2!p>4UqWpx2m8ypL01oqtnTfX-o9O$p7A)V@)#Rw_ z)uTR>NmMq`reGg$ZjuE3fKOIXMlxxHMS5S^Xw54F#(K_qac^+N@>Nao_`zN3{NW6l zQ&t4kKAgx+zNej?4>ux@9vx2#SHxP8;}zeqIiB#^2N$>w!W^T)g2Pfc{*lDFJGet} z59HFq6-`|o`un6qI+5gMC9@=tcF=yJKYJ@TwhW&}En!oG>744WDTd(0b zFnpqLD@W;v(0o_;tz`K)nllg0Gf){rkrRXoC!*IwDt!FS{Y3yrVDfuIi(g_pFomi@ z&7i)VsTJ0?t0)bv&DQnjyT9xnsEmkx0NA6@EWiVqy!MKFZiY@a>ypIE=+8AqU92tQ zHjy@fMCZ;R-wJUPZ#_oGkyTqJ7JQn_tkeMt@jAoDL;DWCZ6^QiFwm^uwBj}1Vurf%0(3{NA8ug<5UnrEUV8%EpX{?qR|_-U#5RXsV!;is2lv1Dh zYK0+bOc=zb)R`K9B_q$nH#Zm92L?e>-LJyW5R};MtSOt<3`+I~tM;w8f0SppTCKBE zbA$+BEURsbf=RUfXD>1DTz2S1r+{$qqIfgB%;u5D3w#R>K}nq$y|C5(i5L7bu0UJL zm7u06MPNp{^x|)~9*FKm8{;=~2lY?P?Tn1!>JFrRa3-#?DZ`GyBpZHC6mVsv{*n*3 zImdFMdx_zhk@j&1Oj#ln_+MNkflu z&wk*WKIC3XEo%zh94^Ap8;~Pd?s#Lvd!=_;ZdZ|SSRXrfh`}@R)Her5>>1y|&0h&wG+4%E3a;(lGI8BHQ z1ikV(p2^;{IZqDEK;5rzF;NV4@>HW5h}rs;=Fa|PF|2Ky$uqv44Rok44@!Pcm6rMO z$sMqk=NcR5ALt=#vaK&7c#*mvW>{Xr@uwdp4Q}!i`{nW=b5TV;usH;UNO=aizAwOF zTc=DN0Pb^1SJ!!6lRR^S$Trl*WQKRbZU~eIdKtm9O(Y=Fe4PCg!&`bOyG<(Cv57|x z+62sx06yN3&ccsAx#>c|ITy`b5k^@B&7wDY2asmfR1r?@U5Z4mfBl%|YH$*(7{IB` z*9fkmQNe1JPEVi7Ey}G1wW>#hj!sjuEd8BW=Zr)xRv71%xiO=qA=WU4n5}8PKeJZ7 zBDcH9YnPkN0;(ds^ZvGTJ>5>>sN`Z0S8(a0>QKgU?oB{nzO8aJGt=MT8&P3)p>2hL zb8RSv&LlXQA1Ou5(lmHi<^&BUGv#H~6AP#OP=2m*b$vJ91AWHZ~+e+sxw$HXQ zZAoSHxP&P{2pa_^9E;|N0$AY{m(WnFP;+NGFpK{JU7E*D2o)%K`JQ=43$^bSi}B-AI{-Mq|qx`-M=BXvN>d^|Um zeYI$D%y|D19j21|FB(Rpc;}UG>{*_k3$n|OqdS@XTS&%CaI7()N(b=pMKZ|cS0H(( zbm3p{#$QzgcHNzU8|ut7Vku4PWElC&3tS+t9%?ql_L1}|FpbJ>o!7H-A{YzPkCktO z5p5+6+9xb32v@xkZ^lnFP9xV1sy zs#kMP9j*rG2>2DHo=_16xqvY$7zb@s*qh%Ru+bZtuQ)d+t(m?)x#;AJ@`r?e^0m78 zU-(z1$eLkP>+0fO_RaERqK6)895{klV0Y(^f1p^KGD+q=NAGZuWbfrN^H60CBE$4; zVCNid*|gZik&dXX-2o%2TEIPYw3A`3QFz7@5rVQs;-bxhW&M=AYy#mAn0KKsk;G?^ z$d{lpr%XGf{}HfRGz!hPvbnv7gX;s^jT=fIa%-T>h&dlsM1Sdr z%&b2dWsgpM=w@!dkf%s>@^fjl>cGcr%!C%riGtD{r8QoyJSOR@Gjg9)E&DgBx%cGm zBHFOd?5)dant_?SclVcw8mWSsr1gI+Fx)HYSK9JHfdP`XT+S@Cu)>F$4n9GZd0{jH zD$aAsX{-Y`!8WEFBL&5jcc63B_)h6^RcJ{na0;f)5>3ZiGmEebPMXWX&J1lMROD@S z$4m6FUBP*}CJblbvZR-<1{%S%rxSdiPmD%2k1u5JNNc4qoZMDi2q#Fyp>0#yelmXA z;!w6pnKh@3CwxRQkH)vpkPL7sa!{5H_*cABA8h%hz}B>S{q5cX-SaPy{%|nmooVU3 zD@{4l$TXuae8T?Y@F@z^OdWv=pN%>~0_(#v`tBn0GXmdqGj$nvg&ixh$^u3>=k|!% zi+}QxY}`)y^Aq)lB?C3G!PKsT1w$@cx*ATTW~-?HI7)4cD~WfVI2^TU`qY2#I)u%S z39rpj)k+|w^~(mGEY*8g#~_4rt?lnfA2&*4L-dclVjhf=PPh=+Qwq5xwt0I#F?MM< zVvJqAP=YUVc8qTqi8*3Sc#g?2mcY?Vn{&1oiA-CoQCC^2on#=l`z9t;>PDpheGMD_ zx@*3N_xtg?%CvC>ggjQkMLXg3N{COGAl#=2(aeXdBc!UBs=36}$*PVG@| zD1UQwU@T@{R2MRaB5yp)2a+^k&0T?yM1gC)( z@RYJ#Hs#0ZCxga$dnAIpfEmY?Jco=_(A6<~A(j(ZTS2^Dp=X*6rAx!=#LgiPm^?g` zY!X;kbiaIRLg|@ePL)*bCoT#wdxKz`@rLa-;K8cT?N~wp01UQ4n(#@eL2Q{!5C2eF z(U#OEJ+$=kz4@^QhxeUPXyu`AD94Iw!`kn;RlKDiO!Q7OYkiuL9Nw7EeEXqT(*Y?m zyV(TkN%nDDZ_d6dH_P#@Hri>uMm0<9*jFhkLJs5OlGwc(D7oMqM{g=}3dzaxPVo>d zN)B;0@PVyPhEO@>JSl#5X{DrJZ4Gl4TS}QD=L1>L1dstud!K=fFV>$#WhUMzr8dl& z75P?#(-yh>{$tlx8eqw6(;FnonVt&Vlvo~g5mf0jmz6Lf&vHop92CkXe_aUcKM%Gj zj&9q1s#y91w+N9(sQ#k=+(4Qgj22P=^mtL=LJYd*H&9!!(6TzBX;r2P>IVcsXg+&! z75C1awtD_t?A%O~o@uSzGeW>Lsq6v*yNHEEaz#V%x^wuyRG3-@0l_Ucs(~2>Z9MyW zBGPqLE8K$v>FkF9jVibU=}oCnxo5!4)=h)`1uxjhVve_LnDnOhjahV*W<2B>4&~IS z*MHHCQ;_7#b>MAQ1RWyqt}4Uwrv49@(gd)QwY`GzFLk$5OnvzOQh5x4mz>x$K$p|& z29~c!w{TCnt`>iGa@DW0c73)aD)6{pYl7Yp$?>I{UnK;s4KbpN##{!yPa<7~+oY-Qt`|I(>n~9R0J$1V_jP z=1^ri0-L|A%rINjFKZ{t4Cskgm$!0XEbYY}G@40UGNt_FQvzjcA(EQ|M~uek8!5^!NJX0kyCZ=5MdUPVMrnOYAj zo3bAJ=W1OI&OzWk-X<099-|UNfz>@#E;$JMNh)3ZS8J-cXOE0H!Nzf2h})8;1uu{F z(r*x1-@a55C?*woTt2=%U`M9nK*jNan>jfA5!}yh7U@#!rBvL4DCKGna2c4WX7@)Q zWtz~KqzmThWk>qJ=nqJIpyhybYCeutXpwG%V93Px5bp)&wWA{Wq3DWtz zOs?Q_Mb89##{S!|FF9N-fn%RMEq6+bwim}*YP6)xW=Z0UIng=*x)|!&ibzy#Q z>V4xTc!Q@DB(xN17U58ji|3I8Ln=u$a?j8EDVovmdr1jldhj*_5X{dq6U(}V=0}m; zjQAYmW{LI0GFRC1k(Ey*^ovvUwN=#8f#*s@#^XT)asvr!jX)ZGLeYmuew4cM9mPe2 zpnCraEre^Q43I975uBm~huEAA&AHIME>oA^r;FY}HzTH2q8Tjj9W5*jA>)WBsYH}S zs^W3Wj~-r(&wrFS3lBO}X*TOSNG8&o3Q|iGvT#^M%fai*#^x~rk@=#|nEq6|vEes} ziK}wm!?K9h8SL;MP{jxP!sX`z>&aM&9pGUzOw997henL zj0vp8QT=n&41qa3``4x`uja0La|IOKH_QZ32hcy|T(M{t(UzhPGp`m}b;sQJ2)wk*pSxXu6K(kI1O_bV zb$%gH5k<`7ttDCD)3M($qzsnxv1v7G{V9?g2*?J;i1aY^MtPof0|JI^9B30$0lqXn z(;33Yuaub=THNPj{v{7O~HpKI!HL!G}IdVraDQ>RiP z+1bDAm0Y<7(bHOD&{mQ}HLWwlOgsKRf6kjqN9?#r&J-qee960ZHq+gY-^Bjgm>gX> z5yJf*NU2C;wzTRY)|?Wvf;A!S8Rka(WsfLROwkU7opWBH$;)(yXbF&6VzJe^fo6_U z;qNkN*076QIDkCshP!Yr4ffvF$`l6)@M#vX$gGq>IWt&ybdxn$h$I`0fFB&($M7$a zu+~r65upD_ci7u3cuQkJ(B-5btex3QT)2o#GyLYzp_#EI3X)8;DJrbT>jvmiQb&el z?9IWUT8`GkrD877bxZ(Le>zWYKqs$4o~7Y{onwOzr*pDvBOLg+b=WG8v%o^U_fy8} zl8=197!sbOT@%I|T^{dtWID?AJSr&@fkji@%2-2wE<^MA>z!)Q83#Yslh1x0-3jEg zs2v0wLLar`ZB~z4{?bZ7;>{^5&HI>giMxF+`VKd>*d}J8R#yjR-``lE79}0Ab+7s=7cbfL`hQo3 zX&qw&`JM$nVVoW+F8fU{4vq3e$Pwhs+^An`%D^KF#E792Z&n$h?JdA3P44v$=j5Cx zeL}*^4slR>uGH1^buf9YCV+GCDq5u?CqRvsT-U^OQY=>FHFl=W<^mjj2UhYl!X8ri z&Wd8VK1Ca|k+e3Aah05e#x+>;nTE>ibE7*9O{TT59(AE~n6$aZ+>EhIdvD-z2qfXS z3t&6Bb2hZy=N3h>X=e}$lP4Ie2o6`p7DwzCp*NSd(j zl3&|zK`F4qbc_m-H!n<=-x{}q!BD`Q4FElrZefKLG#nV^+JhV{^6ubaBrO%KRjwAp z1!Bk%7O01 z4rK6PS^kemJ3Ii$u;+SS%L~)LhqLPTi|#2-e%0g|VZ_*PL*SFkwGi(9xFS28L7i;o z?z@U@1)0vG?krv(aV}mKt$Z;^;u`d~@AeoOX zo7R4Z^G-*oh|M*vo~`eDqoH*uflwHfk3bZ|?tt^7RH>w0NT2X3IKIMLUH6&o^JoTd z2IRrPu3>5C2yp5Hvf!o2x3tzn(uLGn36~2KPmTb2qSN^It=b=Uwu{7zKY>M)%xdn6 zTE=$o7S!&=z;`G@qpjc8GJj+sScJrp*}tX(@o9_TF-m=v)G$iux6U6>^P=ISW3ymg zV93`Qo0dd*ZGsFge(t;bCT#T|m;5M`5SuZAV`3Fa>brt&Y+e;3Q$udztx5V^J}6S_ z>q3f}I?c`#^Uigl-Ctu*y-gc7ETz_YYmbGbrKe#(!LJfcxQ;)JL}%~tuw#-dMQY0T(V0y(FhIs4y9Pg zC!Uk)7{Zgcq%^k7huk$MT}Z70WK&-Vdvj>Ig3UgAVI0?eaADx1EB4^tslyg9Xjwm# z4G+-mw}$S4b*$l%*F259M>p-WK%@~DNi0>c*qU(~D$fJ&d&p*Jg1Gd}cm7o(I|4eD zD*-Woq2xU~Qt~vL=4dMup&gaD8lI<^Rp=dp;fPD8lDW+~YA8rA=+J#Qo-zbv!Hx!w zuBw0ncHW0RGuzhbX)&?>#{d8X)j^*QMHMWc;5)%%LhJrtsTkfiFaY47c7|6mN+mP0 zYpz%Z1l5D-ihxiTYzckk(iDuuReaJgb%SxCxB)My^4{(l`K>i>rqr8EooMy{CCf+L zb^}t8?DYHI6o)im7}_MC{XFWwJ7C;)gsFR-t(C)xre?S-tsW2{`^QBWec#YAhJFR! z6))a^p&b?ZVuG6_2dtx5ZzT{`mE4ztV+_b(@-YQ}%byr>`t z4P>duZ&V0RuFYaXF#rCkn$`|4qivVa%--_t)!}D-{Yep0t7}HA^ zh&9-nz}eT60}9hv$g=2>8o2~G=iDsj{uqa83yU^uyRK~`Mg!XV{dcaN741Q9H8`Ms z1+;s@HQnh6Tw_YI9!x16h~!&t-==@z&A?EgRCx)e1&0jAoHW2MnK(ow4E%@lSf)?G z3j@^A6Drt$ z$Y56Z#TCH+Sc&{QnqR$PCmYetZht?_OFAUsw;nM}bm%vAce*P*jV3r-ZghBDH~M(Q z@`^^WZN+vW`jtC_){HShtAr z6rO(b61djBEosE+5;l)fkH10XNN0)9MXM>B2o}-_D5yqF^t}ctq9Ss2FlV# z?{S{Y_e8td^&5w-Oade8oBuL6W1$QozQ%wFV)7N$RzqtgTg{Q65QTlx(#P@@XUriYG!JO1=~El&Ah*IR!NR7-3Jab!pgpPJ@C7s4oEWu4Lrj#Z9w^w%y??Y%c90>iaX zpqbwpuo*0g(pQRX2m^p=Jq(zh10H4|gK<44ifJT&2yrpdbdl_PSLL5**4hpz)eFQE z?A5E-=?~loH&}5#4>6+8ErOcb;IoqvN`0ZUbdauULGl4sqJT`-3=nfxu~|kN=_?&n zS)+s9B7_?W@wz9Ts2RE-8HV(7jt$Ud%~a0?pZ7AXO@b}pX?93=UXaJa*D@e}O*(88 z2O2(78xL6LA^|#id}83*9m{3U8LU}cLvGV$bEUpf;RGbS9?{T zWNuKC_jN2)BlhYp`{C~B4hwYO-zAF;2-xkycZ|fH*>al2NDWZZF-?a*=>6Qli1b6_ zb~{)BR){8`Yry~j0^0$fEp$YG=C(IEN(H+_bE$S75BE-ts$|~=7;Obq7?Vf-nK(L| z2mf^d7N#z@pj?GXDVJ)IXEfm_dG~@m z{^-J#J;YMUjNrM(C*rsQw9K z$86?R1&8Pn;D1xOp4XUV#&XsYum|DwLF8?t9!afFg8ARE_0G7Zw9xSHJO%u&;>O=O8XN{4kD|^BOqKKh`p|vpkySG*x&E{k-ZEj{}9`Eq*t>7d$^^=*s>vU z%9djvx!tWrpIu667FTwpHY}u0iZ&q`($)Rv%YZyZn?fht!nALE;jQMV^_HnnS*X)m z@;c(!c6m7KUsS4gS*&X{vsujG_o)2uswD5aVAA&6L``k5R8sZQip8fU9T%t|oZV#1 zgFQgWF@^9u{bL-`#Q4wnp$d@~yRm&xQ2;CPY1^Aexh`*L?#ctNN_`cnw8GQ*8$jBu z-jtcyzN71>9zl2a4;_ltDw?hhp)p$^BkIMl_|aUem)5>Zrf@Yk(}D)FH{$+vSOWhLA{jte_`{~==~w3 z!OX7;oqXf%1r{Bk*($~h54N84Q*v!wbHaEchOMUvM2|sNJ%^7GiJ&Nl&j@mZ?vG~T z)(j{3y zV>M?5D!F;Wv@c@z0p6cOkWpqvO@iu2;}YZ}FeY}a98Afj%PpA(Dv=zH7!eQ({-8fX zaUAMyUMwj&Hf?)kALkX%FU`BE)qW@#&cCmQ48qGRgNWKln@ocDaEc-hT+SGjl5f0s zsc>g|VC(x_n`qwO*0IThL2=sHf_{x2xXPYs3bis14i(5zLqK>GqY6BTP$ul%Q;q8q zCHy}7Sz8}~Q)gRpE=kcqfuoU$F!hW^zs8;$Pa=@L06VAu&M1Z%9!xYCM8CX`iHPr4 zmy}lqpaC>Z5m>2-IMM6heH$6iSmIE?d6yWzTzNdk2#)&lNt;3Lo7n%*PSSv8A8B<()UkCed9hLQYF4dDst z4d-Mw)Bnh*u*{P8PuaOm1|U+N81W}B1V44?RXh*{T}0+!AL&s59XE5-KDUJEeU5U` z7A+O0{9DS(+-lZdFU;6*avq!sy6%p)%&l zVKZ=*!7t>QoA6;yQ-)VqJmBD$Gqd zkhH)A(Ri0Om3nD2vl1a~9CmNXz_Tz22lYZy%S+=R%Bmq2Md2BM@LV=*2*`?R07bG8 z#j^y-Yybcd9YLEkNvJ_=nM@D=P*-Y<&evQEgRkiC7(dyh5$M$)zOXeV)XJcdpP3{} z?eJE(U;&AXf#*vQp(6lcq6S_jDP?ThN{>yU*n{&bF<%iMK0Ba%ZohggR0ZL2Ro-yV zAsQ^TP2$>0qzUD#)c+#V*DoQqT_7w^hDPG!0ie%~;FDM9(Zwxt{))*=sii7KdKthL z!4Y>aPY_&BDiSFMKQ(XU+SwTA`;_$Qq?U>9PJ?%9d%wK%hu5UT>XCIT<+RwQ^Et4! zvO>U+{^d=gc=M!gk)Ta~J}OW10X&k@eb)&Ls83-QedLve#tGJw<$zEMhIc%0e9y%= zmx6i-vCP}Dx|WV=OwBR{Xk1IIhX+Qm|44#Q-KiTirjO)z*hK#vkPC`=e1CWUj}R?s z$8~i&l&UWE<1Lo|Ln9Ea@&i3gb*M+cp64&0;tw5#aMTeV?!})%QajtOTe0x zIL0lXF@HkzE?r}KK=AX@*n_zz=GRfGWXeZt&quCtM3Oq6SK42LG!)hI4@1f|HGAG^ zjN*lmL#kP$ng>-btfyX1J)m18Uo5OoL+}u3mAbAnqH7QxtAstnRARsPpclhY8)N#b zg|(r8VC4Zh-1`WcdP2Gng)w`)wcrH0Cja%|@=-pj#Rsdrh3mBAA?Dl-I4hTjevb3-n{bA51^ zFOE8J+n-=agr43t+CQ*S$*RX&39vGY4j5Eiuebh8YvAUs7JFRA=)HVw)hhj;hS}DY zi*eMIR#n;kx(-5tsk#|MxUZ`2$r+&qI7z*gJOss`1lyw9!I5b074(T|<1#F_4Ir|b zU3IIWH0g%Hm04T;D01X8H&c=u4HlmRw&lf#VXwZ z7$^pMw5HQbCqeZ(=?CUgp|o{fb5DB?#~hfTmT>kd12b?NDj*tY*{>c&sDT8!68m)%pFG?fv4m+=B{3kVs7Gu}1RNxc`bGMFiHPOAUZ{P>W^ zv0)ACXcLb8whjoq0R&k8%V?TlCnNt>QM!HGm7-YVYwZGd(Yl!;%^gz|#b=QzdNHsyUUNq?!f%*D$wB;>&OJWd}nRK5NH zAIskI_?0!6_t9>!45TBp={pUQfIm>N81TR}c9pp{qr78;kjV_XEctW5D%8Rz!@R1i z3ga*`XGLXH_W}GlaHU`^g~MG9VccxxX>Xmb9Ddr{dTW<3fbmLxTo@5DngG)MOmZ;S zlIb_UjmFz5`Ix}Z$?CdYLY+t!|EPzGnCkFviqv1wM2slz zL~Rur(&+t$2R>dUi#SJYr?L(ZC zzhC$$`#K7?CO#GJs`EP$(`}gdb=n8GN)Ipvq7@RafO73xisn>{YGuUPhv!l~Q#kqK z_`12WM#&dD9{gPO3y~EY#<1Z4-P%Q7m>5uE^K*5e*KytQ3I})OhTfoI9YCX&$B1QG zcHJ@?pje4WQwT-vcb)*dST7+wrA)_@n#TZGZ9~P?q65cX3*yW2FUihvxTH;QvGXeS zbK0O1#ms>w3F~{aXOi+<2c`ZOcxSh4rVgb5-FEzZsdITJZT3M)`t+7w z{?P~On4a{lcxf(>Wd%Y#I2w*J))%w-ejUu-$?y$l)6`+KIc0q`*laI z;l18jCwJ%ER+}|TFpMkcBPW!IsDjT$2ewvJ*5q{iLGWcCj|vAs(?o}ZCU%~YSZo^t zncrW1oe?cp>bHZF!X1L$q6dZ1!Lrh?sGjoPZNi@whjRK_*2F+0<|SU#`l;M^U%Dpycxtb zi!EoCi|t4>55-pse|ettH%Bqx1fZ0S2FD*E;qT_d>4h;31ESLURz7VX0AwEIxF&f2 zJQ>QL8#FkdHh(HWf3G9-Raopvjx)c1XEQxjJRu>}T#|z*S*%|bu`4Uzs9@nosAnvv zFva9vJ`%n-=1Le6YPd(T$W<<~P6l`DKEL`>xi@q)@HY;DtVSNcz!pvvN1dZdFlNZd zHa^;Q_q7XSN7{YORi1dGHzuUJFJsebja*L}4(^u^cz_HX ze=0|fC2{rNkA0%!isRSw>N^ci?Y@vNBTxhJK5ROh0e<1eCz!3-OW-euuI& z3(Gg)t|_Hqt)^bO1G<{;LV;dexBA)Zo?#qBn>#w67FB26+%duiy70>D^3)_Q7NF-1 zU}$4fq|4B}Af_BN(R-C*?sKdm*fC^yM&+fB5#d}v z7<(dGnoMJ%sz^zI?^|TxT!5sUDl{sLB_=K96NWh|YiM-jVIBmIgx^94M5%fUQJTP< zG@NRhzJS*&1xXGqOB<{um8!|jXdOuSjIw{B6V+$iu6)I&LOEq;ED4lZVxP*pX%#set^Xx5sf42jnF_DG+nEg7{yzJM37 zH6Mivm9Nk+93@TQe5lUIecV6Q*=r-95+{??!18W{4JQk!-kf?9jy5t&^POKp_BFFtpPr#1Ad-_3}5b~ zEbl?u_uuo6NV9K<*P-}B9fksWc zX@gf&-!Q)DQ9$0#95kXw`VLj`oE}|~Bmm|ibSTK?9{%x-uHBPwEh$gPir!xBZSh?Q z90kMq#XWpbs8rx_6KZ2Z4%_zcttkp7K*XMWxa$5q;{!GJWj2!tk<``BIGp4#f3gVu z!mrL(W;KwL$`ajcSQQ^&nY#;On_P_Ajkh&upT5)rlH}_ta6~bGla+2C5l!* zDH)RN@Tb6o0_%$x%F_+B>*9pXk(Z0WYJdO;&!X8DNXi1K&0rhK2@Li1-LUlm(||Kv zD@i_gW<1*i9^f@;^kU|)dHI?0BkUosAa1<5g0`=5+1MJItbKE%nO#Mj z$%gAdJWr)J`UMRNS@b-+t8QrFEs6y4yPe|8YB=g}*&xj#{)O{g`f%OCRAObSL|E^R z{&MoV@swc#ViEwKT4zQiUlXk!!A`bBXSYT3FTF^4`%Wq4oRuQEw%biMB2^SZD?CMq z>%Meou}bN&2RKbzF`Q~$k7)=~ zHJVXzg6yRX>mwyDU~4Z6lzIwS{{?I=A~Ebh{VKh7a`gx1gy)tafzK|-uU_o<$ro6` zO@jIGgGI*gpyjGPUe`97&Q>;y{Xc~Sb*~+EL#65D@}Kugu?eTqI?kMF-I#>x0m(v0v0| zc>JZv=Alg=d+`#tdveNC6G>Kil^Egys$Hz6S!2T+uHqkagw@QB`rUtZC-W_gi7l_> zxawc`^oW_e=W2w3#syah2Rf(O9+WgZJ((i%l^|6O6qj?v67mV-Y?fAjNO*n=AAJoE zy9SoAd1L~nw4JEsuHwMDUu7+5=3UMO81rPYRP>55F@x{{HGi=$(KH5drq1%pAKQbiRkpWq+LFB;$|6<2<&>z&eR zS;VeDI!An-T#>qn#?$JVO?F?<#?lkb_G6Vg zsXz!GgOwHRi|2e`Q&0Csgz35~kmFEjAOWUz1}v35D7lZg!&4PDHAL03j1+6$+y|29 zsG6@GAd+Zg}!KJ_J?2eycTjmNnp6 zI`K*nB7}LA6j5e23oxrnOID_2ND&O0o@b5R5Uxl*b1tqV%ci2mZ&0aNG)Q?C^$Fj1 zaNKe!QuqCZ$n~OgxjbJCtxSZtOq8@DrWtLBg3ec@AvJcLdlo-3V?WMCo6^km6(zkX zJGWflJowxAvpLgzDVR+SbiiTghjnp5xT@r4s!sgjpL@R6!KeJyOOEzoDKQz;3G>BJ zk0D+^({)qnS~siil^S0ap1s*KgEOYM1P)ahMWXyme!4%_RTj!4d-}CYI9?#%81572 zY&d_y?zCi*q50do7!7q25xwG^Rpv<4KYf1xG(BQ|ES%ik*SGdI;D} z8io`^MBKqU_JCPZbR#$9f#;yHqr9!^SWdQ!JnjH9g) z!EOvxbcSGp$4;IpG}@$M!sY%by17J947LfyRHOdx3LQ z`ZkE+aSulKTSqkI3ajn4GV$lv_eup~e*MY~ zDloNPT3PAdYb4PeMoC4Xf`XtZXkRs|VEOHfZAo3BHJcG<_8X4@(XY-XHtS7|G~4R~ z&;xiSDGCUf_m}Bj+mlCT+<*WRHDbOc`oEgLS9JF&5!1cfHD9d-C}aIcVi)-XEWvw< zkd9)S<85Guqat!-L@1`j;cevjZI^d$M3x+mH+Pu$%S8h@)p`%F=#DN@zO5nSFP}*n z`f7YT)!R^F@9GJXC7F|W>s$S>qYT9&AGZ2x`|gsql|C@8iKsmW60+S^{1ro;dDDyS znJ@g-UeALZBO_mMbifY%C6AJ0YCTr#T+KAHJTpGca|9_Y<$u*f_4tx>SZW%n`6tQM zg_#id6JO)w=1kl^V$1~}L4_yb`~v@D0%fJbG4Y%@b6LEZoaSxzj2W}=7?>zGJ^a|A zx*1pC`vn~))=MX@G}#-==YvT=?ertmtAT(NHLK%HM1dye0g+dpIYQ13Sh-EF{!Qhw zCUn`CJ#8k0!|#Jl^&08|%Il1}iyH?}7HeW`5g3;%|M+3#Kw7U(TR;Qew2LXU5nq)s=|3Dm6lHN;2@ z>reZn+oiIWrYr9)#x*tm45Dm7!}Q>=v_del>xz3lTe19JT|Xj&0q9(^e<1ZvY5XFG zbds3A!m~C`3GAk6;9{gSL``KoPr~P-!6_d_kI^Wu={un5fdBvkxdESXbVPsVu^`CG z)ms8?6r|dpDk>h@?~VzwP*~UvedNbPRaC4mO0YyRuy*t@y2!<%}3~ue`5s z6&X*%h|Pt&E8$hsBIIv@-v1Qd6BMEwJzy8Tjt|<~X zOU5SfT3Ks+LaO>n^n2Vk7FO@G zGPUljAw(%*etYXg$0xrSoY;_hwjZ37Ysn0O5p&)r5w$KC3St=ILz$Kpvwd zbPHe;E+r^Q(bY#v)&;?6${W7>`MxF+^fS&B&z&?kjYNzD;~V8#M{~gc8c9`T$)H%|HNrMd?Wy?-54#EzG>XkNnZ!Zb(_ zqz=2Z3|I|b0+ZV4I)HL$_uiNQ^!Cxw@!4or4rKGFFaS}it&cYDo@f&7FfcOGVMhu9 zVFZNw=mBEE{whg7ERI5bzZ*odQSaW{mNM?YiJ020tF+=m*s{c5!pIO{lpd+;07u^l zsLM2ckNf$1uI2y$0~7(Db819?vd_OWQ1(ITr-S-o>4+m?v25Z&-VbO~?W#*~KFX@d zH8R)ZrO24nyG)~;#ju#0Ra00`yVl4;Ogc!m?4*J#=z_rqGFc9;wGU!q-fx7K<3GBC zx0SP%VI0D-^rt3wzD4`Wv;=tUsxX!XZ`j#Bg-zUt28cEpuF?&tw6wV)BI~?BBo%9^@IM33(gn_MLP8;#8>~>rS3xu; z!ZliVkaP(sT<23l=|pWKG8--cZa|U0s}vVxCMH3M3%ou8U3uNxIfH z9~)G0oa;?2ITeTJ(v2ukbyA((%xP6=)$}VlKZ1YyixI1++o~;#Rpj308s6#|1kd(K zJ(T=HNJaR>wtTdoWvT&O=CifW9<}#*12hsymYFh^fvtsPJVbo*TN}E<p?1)t3d@teq3|scxRt4l1MW|x>YhACx!gEp9`L0cu;3z3T`>iXDMc=1ciNs82 zXDyc5Nwu??4sfjxDGI_j_&aM&|3t1z(ktLcV9oeveO27Tm{4o{RMd!}BFDeh0J%vc zyV|h`SMFrKfaR|48oAIQc1q*au9ZVr++n7v_x5(V_+ML_RRb8IFFKwJ*e695n;jU( zh_2bt4z%SbPIe(WSSw73RGMCp4r{pPu^va=c_hFdAa9b7bxLgSdtKw0;comz`) zuBA!XEe;g5b;~%oMTGT+NVjgJviiq}FwK(%Dh4~>$?ZWlAU**iT93x;CqyWJ(E*Py zlGVym*k8J<2;PhFq!$VSV_7+DgE-L_t&YEL(VJPmy;CL~ufUr(M41JmeXJ#EQ1!wf zcDXvLzY4llA9+9WrGNoAx% zbER(#TIXd*8BN+1*4H&nggnRYUa(eJ46BJPz5+KuwA^mPY#??&+6)t|=ei@gns*u9t%A28?caZ5%4Kk@`4BQPUA_ zbxA{tSLNR)hQkLAX$L9bsXV!=NZNKb!(7QI{4*ocdI#asFrwCfY!@n-M{)*GuS4&- z9T=c#QiJ%(s0CDRfTN_%_lkNqh_ciGmaoh1FS>>P9O;l&AVy2PYjQ+7Cq_GU@%d-c zkMG8|K(&B`_?tZzJ=U?7rWGRK!Gnu(zD)+6h~H{fSGEi86H4hJYwd|6qYpIuC=joQ z77Q<{2uQH0a(h&Vvj>uKT^le(9%9l@_-A{q-oS*a9UeDCbIppDfW!5*j21Yo&x~6j znq00_HvI|E(~1f_+6N~RE|9bk*%(|OUN*KK6`q$^?*LDdH6_$EeUp`->d$eqdH2>p zKtE|nJPt(I);eX7K{Cx^!p9Lk;E6P(6u88-<=~L@I$OP!+$}o|Bkt&N=P}Jff&NS= zq+@fK4>WsLRw?1B5RSJxj9f&MDY`|6yahUomTFH(Xj7e3FE4aZqG`4kfJc_QZq4{S z9aQjjP*P4-a(bXR)DS&BurtUjyZ%2(cMpahec= z4((KH)N{VGou|ZxHwb;!7fM0jtZT3cwzpr>Mz60Xe>~$ zBDK51bYBOZq*+xy|A@BoE+v@G)EFu6C{LL zKw52HX$Et|0$JV{ts>7VhFnC!fvHv5;=WV8v*~U7q7P|A`UU;4n3#5jQfU9pe~yx4 zV)yEAHrt;d+pUum%UoJt5<3)pIZYfD{AfCq0Itv*9rV{_fW&YS!G$`dKbykGD3cnQ zJWjIx2f~5*6WQ{%sSvoLb$SQf5D(_8!iRQh;xT3cmW|+L9oPL~nxeE)u@QBxo--euK7$pCFEG>F+kb#2>ELspfv1J% zthjakXmdvJV)5&UZ=;Ik3=?~bdg*@TO^_PD2`kq=A0E<3bAN*GX0kHBlF&SRnioMn z(br&R^nzUr3nt>5n~Aw&ZoGfcSw}eSm{gOaZ$=7hePx87n>QzxavER`^u>J+t9%|bMtXRkULusJWcfHVoRARZb>lxKhFsg zv$GjRB&Qb^6EU3MJ4rTV(KK+L9BpM+cIlODXWT_gbe7$*piXxt{k9Xw%!fl-CtLN& z4yMTrc$0bVK*-2Y`M8>d*CJXdd3u!(7gaHC^%y**4QD*0DHfAR6q;R<&_CO4Mq z3=YQvdQsV^A@EX$203j|{`4}7Z*7N0JRQ(e0^9p>Bh?M@%J>gKdpZ4sYGpW*L2;z( z#qtqUq#O6KH#FGgG*m++F(avn@S8$?15GU7kmFoWml&ryQt5AKMo&j@pS@81%?Otz zF6toypZ2~a9A?yO#9=Gv?yf%K+O1jSKo1{6G6qUq=^nyb!blyTB?h~{EU?j3!2dcg zl1@oTRc@IWnp)w~;1xWlI{@uh+KoDl#pnE^fOk9Yb&1c*LI2D;(CK=T0-bfXAozTw zc(x62by9tv52&Y=+m^9}glasj%pw8_-itwyDw$t!vWs{uLUSL>lBay@OoA#ZRBBTT zSy9V8B<3I1x;(;q)U%`iy`S>^Y8t$-KCMnAM`AE(I5t0#iv+jR-FSK;vkz>dqaSqM zfAyK}FZZ5lNn}$0f!8i{GykD?dn=} zKa3#j_IzRh&}`}z;Ml!A2<#2k+**yi6~L8S0YZNs(i8>KH07@SH)=V&h~;7c8}QaR#@=nTK}E@64aI`nw3-eUoSNa+8gh#2JMfm2 z`Fd*cpv+v6FVdB;hT1t-ng(9-d3Fqd@9AIRne-R>jafoT4;|GJo(o8bli%lWP{`-iXOF5K7Bo*%J%q ztnhi?>M*o$#ci7)jP?lG!2jTP5eCrQXTj#Ur&Zj$uP52csoZY=EL5?ge%7)yPdPB#nDHPeV zP3dN@d+0dC+2-oJ2C}hX6p2x1!YzzAtPuQ>jm(Az_|6n)rwJM41HKVQt#;oHR=Wxh zc-1cyzz5H`8MV4o77}BSt<*|j6{R7H2dg&1>OQofnD(<;41tR#X0il4k3P}FLp=ne zT^8~pOEbT_%@GB~2-!uV)fc%;Vw3Lo1+QTe%HKg7louXQ5}1`yOxJP1RW>69VQK+# zb8Ju*e8HX2yvKs1k@RyVcFkPGYG|t2Rc4F)ih%t~@`HZBXiYn&aAYMIqwBK9(F47w zYtx<`*5SQ&Y4RLKTXpo*EbH-j3797DDETkmfp*sd#n_2%m~wI`u9`Putw`|(z5|FM z+)6yB3F8Qll6MaiU}Ebd50Ezv{jRA%43r6*T>N^Sodl9twu(~Tr<}jm>mR6Yrg;QJ zi04@EO4z(WpxYcIc|2WYM0nAenqz+2{q9R^I~B%J-@fsahY66{@wf)+&Ca4Oewmdj zD87hvPN8me@hOYa)sts2xpqD^B(Qu_5}+jv)JK@86SB+g0h4-MGrd%@KG7I>EtZ0>9oC}IiC*t`a;mrI}NcbJqAY|pikU;TlW(nN&Y#M%F`nll!H zlO!lKTC#oCLwMi(qW`3I^!45%lpEXU7$iwW<*TsZfzV80shxB|X+cM)Rmz03QhmG& z4p{|5K4?R_bO6RJGkGhG2oozL^eug~y`Axu7Wt%rDLEpe{eIYohcl&<6y<{2dg~x&%IzIAfh5LiE zVFAahjvFILRE&}?AAzxMwLQ%+4AZY}cfbao`?sq?uE|`))ROAg_HQR^MCF)BST3xL zPD#By_uc)D5MxwL5I@9co$1Dk0L+G`nAj{NyV_DZDHO<}C*3BCf8tgo>zHJ}8I-;N zCJX=FJHMv*6r({|s_{08rdBsH4`NeLRAjN82=@Ga%!Yad7rragKN_%Y zLf(VZ$ye*u83^sN3u6=}O9uKh{bL1Tg>eJ!%pS8BLM>4&%`v2f3zC`n@HzGw*gTNr zUh{KaN=E~_mHutUkOG0pA!M5*8{b^^Yrvb2)V5GA2%$J@ftV7x(Qrso_fK$3bgyba zAZiY|LF0x_1eWVvq%$Tr`gqH~E!^WtfODd=q{OHZjy#6$Czi-mYfwX={>C$7qs zdZUp%7n%qbkoeYAD`!{r4ZscK`o?{1h2Yx<1cn-fkADLZir%#9UK>^dHuwkYBMxf0 z{|1!&fKUAQ8=GW$qHaES6IY$)&ZaK_dv%q?FT{F_(RT_;)76kpB_A~E`sg^E(3`Gv zFxq0@Z?7yCLq^PjulnYjl{7FL%FiiCW+R;7yrM1X^wTo?nR>zRK3nAfvxfzqG9Q8LN!Bv|^v-G78DrQo<10uwR^a8h^mr)H z|K&!$!S=#NGB0y0GZu9%#3pA`V1M*9AU+bn4^G&bL; zmc~aU#utGj8bc;iV(ZrQMJ}&M1J|`C!f}A&JMg_lm;mEeCs<@kBHKKoPl26{iRW)tH zauanhWI_Gldlg?j0<^6;9#KUe+bQ4Px!S+{$Oai~MZvM=Oih1AMiDle>n1``Du_S| zhSmP?bprF>+^FH-)4^52av$Gs_=Pl)j7a-iMpit3^2!(xw7f^xEY;~1Q&>|=WTtFQ zPpV1l*;2%)QA=nR)JuUX_PR<-%Doj0Wa-W?GpU0qH{i0UgITFWk?IXq)0Tx6GL_|TTtVcaiKP^+FiSP&pudQh7z0W7=YswtDb_3Xazc)9WBv;VNe?26iZ| zK55Onvxzr8-s7|EAoCeu*PDzwavN2+2RBn_+7$&x83xhX=ZmEn^^I;$jdt@5AXrt3 z`Oa`&U!)DDM(KlyPN8!s^xP`=?lz1wrfwW+Ohf?Qh)^*YmclGYVSpkw8g?4} zjRXFt;=Qv1m~BH1kMwdj;U7RTjm+V-EQUK9Zu=rY{loAR^-k%UVzcbP(VY!051Bc1 z0?f3^0GZNB|IhFDAfH3$;SvpSkIy)?tBf>d=Mx)G8LUKuQbq%q37}Gcmu;;s?O$b; zpk1tI9~PvgiU&v zagvq9Y1sQ<(Z1n#TUr-oskZo{$$paJn){ziMt4aITxNCAbT)cT_5dudbgpo4&kG~q_AXO%XhC?PHTGwG5rMoX- zR0xc$IOBz7q+4xo%ZRL}O0jma!;CxQZ^|zQgsP}qZYB33osEc&ZyGO;aT7;|{vxzr zAdy>4Ngb`{PO@1Od7!T>rLIs8V3ck2?d+j_(%JAagEPdQ!Pkh>R+IisW7LlOq=Jkh z_mpXaVAG%hHMacpsqY)Hqn>O&0YA$Usmno1{8qX~R{#HP*#q5kWoZ{sl5rlk(%lXD z_ynQr5_BAvPPD?l{i(yAJ*waICf=zWRiJ|PQ2{qk3Q1ZH?6QFftXW>&Y;VPOTjpe3 zQTgIc6X9t&Ks4c3oLs}J=uX&E@g80|Fo;vB5uYnUu$c~$^kKw{xaIsN&vblx z-nI3eO0*hEYgGCUqqwOP<1_Z;!kT{tVI)Vu*t@WC(V)gttyioRT2nGYvIZYL^X)z3 zo{K>ewo~BajVJ(poMZf(X==Q)2gg*A3hFmfYq1^~s97304XNGjm^OR0{L8P2O%NXN zNp(BC=71P*+J}bzV}PELVZ@na8X4GJ28dL4RmXgHQvuN#B){$Xx20G?CZi98W-B<| zw?EbzyiN6COy8)t+`!7n(&a`Do=lgZPBRC&(eX?m@>t4k(lM4{)H^ zqFmjTPK-&rGx6sb7SYk^;iD%AUH0qloCtCj$BzI*?D4Iw0Ei{~szO`R#&wp_YHPek*RY1zrC4GFa)s`5OJqWARKUcD?9zMMUA9vks6fFr z_NqP0`5l57a^r5yXXKKYY^aosn_cGF@%R?HwAaT>&M>GpDK@)ln8RBB9P9p1Ik1Y9 zJZ}fJR1rihgGR9gf$jmtyCYZ2v+;g$H8)caS#+yUx34zWhxX zynfeN44gO!yqY7msC-x?2tB;uA%nUD#fiyz26FXkchZ^rsegUI#kz%ivQ6<8|hvExqv4FS0OQ!8A2jJ*$1%? znEHsDQJssc3^i zMJZRV%4c8o-$B7!tBaak708XD5_??~mQr5WS!kveaYrK63{GfK(BxDdTM^{r%S;o#1keZ&B zUrCJP+jh@sccd z9YRbulHj#(?c<-84m`_&{8lcCqu?R@J32h3#R0ZMSYq!N{vC76-k;1h`IQE{!C`zd z`1%H#8n=(AbtB8~F$Sb*Ri06DYcA8#Vxjw`qiuX=uP{(5?8GMI4B0X1XY=(E)a-_J z_n&K1T|$VO4dHD2f@KlNqb7#_n<5V#?6RFdCF?6k82Twm-OmPuIi_F`#vCSbRw}8A zSbv&XLui(m?z7rN8Jt=bUGkL2`C`=boXB(`rV~vnU0dKLd&J-;T)gBfA2rd>Fu7EY zd{9qWDgzs6c)KQdyI|$~3nvPsWpEj0Pd0GlEZ7PFV7eZ z;c+e}V?nk#k?$2{IW=7WhY7;{soP3sR&4y=%@kY^T7V32iPG+x!?+1`f=n08)>7a| z?tmc^&e8jti2qoa)o#-(DTQ#CU~B4N3@H11U8Hz}`zkS-nKEr&8rcJkk^}DF#Rkok ztwuxceMQCN(!lN*<)h)HeAo*B>uyKFoGuv3Ib|a4nkc_*1yH6wVu@}IIr3^m{5V0= zH0tSALap`tRIn6KV(1{giQqL8+Y0Mw^G*^6OiGCyJ9uj?rAdUwi)4`0*-2Ok9K#sM zQ)hBq1yD6vsY-;J%K%AtL)(F1^{aJ>ehP!x9oKpT8vp+31#cQt?43 zp}=U+^D916h$2t6aQWe}T>ao)kvwc*yc#z%oCS!F1EoUB^JKoK!a=>aDH&S7^rcYx z7;<@z-S1fLyb`Z(1TlQC)8F#dKP?Kau<6l$4axbTg_hztp=>NSG)ueton{2Q9X)zg zM^#kgT28$mTAZCF+Ehj4X&+DSEHryIrHdgYJ!z*|T|%Ive@jXZ`aL#G(kY`^h&Sr`5?%xViZuHhTN^}nA(k>Mljz%=$(;Ed4?Y8k5UCOx)Ie zm)gEH|C`@&q-QcH2b9tfvYYBr@2kQUJUEG7vvWP+>j!cTTW(tGwjnl**UiLc5H?{y z5AE7|SNGs30jW@Mlhx)$jTaIa^;mGkqZ-p{hO{fGK7Db>jFI=e7RTkg{B85{P#%uf z0Gqe1jr6@$C+(Hn>w?uTL4V}^Dof-HcUc6{U=2_q;lQ}yK}TUdwJ7rE`rT-=OvBzU z50&x6B|uC5RuoVoN@=S>NNr2K-KC_<{E(WbMk+_LI^6;eR25nE1VAY?Tq$I^iSwyOX>CE`Tvu zL4@^D7`UUr4CJB>7X5KV%j6^}r4MS0o$?-b&!(Pcqn4e-6R<1V1Iu;pLIXbO3{jo8 zz8QWHvNOGyPZ!v$7y1_04XSN{Xf^@GthxjoD(CiF0V6JN-?unVGKtKN*dx56Szg%X(O-H(8tzufUpzbKomOSdU zS-|P*mB;vKd&g|9gscq|+j}O?cAAeOKbm>Crx9NVnS|HjCry3vXdlbU-P`&mv+khH zVJ>HBN@qfG3vjm~DRi%~P1Z%HiR_qNv%LHC*_aj|~rQ6}Y@8!a%2f&xd2Dpe~slWo7`5;31 zctc$H)A9So*Xo|dd!EIY8-a@Oj$U`ly97X$lK&UxN}XHCQ@oHulF;x|pC+}6F=05? zE+4PEM0x~5vU)q$F=AiNi&Q!E=lLVV=&*Y)~4dl*(;8EELi&M_1+atfsCL0 zse?^9?X^$stw1_XUVKt9sj0^l;2;V=vX>@9iv%?N#nnP!Q>KCZ!#~i2!(sG1J_kV>-IMWtdo;*47oQ5g2 zY0hr@(w|gW=5XSaxj~cW-N!iilnrPHcU3AZ8}uoPxi~je@PAgig4Pml$A{LR0SQU| zN}@f?S+x`e-MM=)s8(wMK_3bO#P_rkMoB+E61eDp`@N{(J;$WXDfVFqp}^XAKjZ9* z5XaBoy8@5A8~H}{fHR-rr_Je8(uleiQ$VVKo`QLdOOv1z#Rm$f4UDPO?u7U;H18Gl zpYrZXBfUh}pvInw)j`myNS6Vct|Q$QbsdO#*63a>PVJeM%~ zAjF!-mC6FtD*T_^KnG7BCL}d0sTHh@uP!XVONMhQFxv*tgU;u^)nyVtz39b(GWO7; zAV;*xUXJhI1I`sD=tpGRzQ}fKftG-;W|5hS7OOpOq+rT;(c!%6K3XM_=Ng z%GRw=4-k5Hetj*@(+R4^sjzyg8Hq|70*x~{33Cz?BTnt|4ct~Fb%Q?0Qr!zQCdB@)J&(H2YcC_W>&zOamj;ksd zT}zaaxy^C{D0U<>Fd2GD{S5r^5N(z&l|hPh<%!PQXELLd5OJS7JSq7|o|#s9&%Qib&~p0T-R zXowxyGt0orMN{zhPiqrVPgyb2;=PJxJDCZqm^oS=zH|TkgC^O%UA~OQfE^h4k(ce5 z^TWe@nF!!d`Exf=W^W2{KpdD&6~cd)NFV0W%^$boBgZCt!2LB7UEMRHzl1ez_C-E{@Q`oAaXq%7FpET zvVKUN1P{ri;(gL?g{Hz30a-Ie?2!q>uoYNWbat^BMf5IFIVuXc-w*jPPNGMS>%(JYlRZN87Yz?*f+}I{19apdab@mV@V)XE1A+H#)~H$W zafM(jCjhYn4;`-8(NhqOd0xE^22w|sYo5Pgo>uCbC}5AfT1f*s7beBJ=V^O>>J($D zc%!NtACer7TyxVI&nnU^hfxD=;b1zP9U?#RyVD6YTM-ls&{d(7J-QN1VX4so)ds82 zygJE)uJd!N3vSVbso8wEy`#7BNhf>`fW4ZLjvKPPO-+eX#5umX{hwtYBl%+C^?dS> zE6QU9J)RBdDP~lOb$Vzkkj_ez1nRO+XEzqo$hR6n6?gbkVG#DV^Jh*?P=G_}@lc9Laoh|K*5@7VZHyiYhVpLq#)w6UPc_ z>w4&IFkmE}Lo$TO&%M^K5CK8~0)@~Gp`5-|+7cYHb@zf7#eDPthlp%9PP%<)+NaNx!fE?x$&29q^-DkFL4Vng&jE>0Esc z6a!TbeslXGeOh2hu!Op+qsDyZB|F+YN-cwmo`Ag}dKtJFCJ-fAq=DBj@EQKDhPap6 z&RWnXWp=mgkw6iRQ#t)6sD4iOhSOq}^8$kq%+vHaA*>~hWfd3RJzW)Fo%97fKReux>>=9-JVdrKc~xk zX+mLy28?r#e-c7X+d}X(i^iNOfBOwjid@o;R#FsaUVm(*`Xw#OIWltQdE4xe_sn5M zb9lBq&X@lg(1E+-jX2q-O$#ZfO z%5dY0&&c{rNh6o4E*zd2{A7sCjVz2~iIUmGPnzaTk^#28YTGPVZ&B6Uu6Ga?#qovN ztJAvUSSY9A`@=ypoBmwvL1oC<#t?6sN5W0s$hmNIw6Av_;a+$Tj}FmZuZ8`&WuwcZJH;iDjEdly*5Fl)#6^V|f?Z!Iu;lX+8b zi#uM4&>hcRi%^5%hOLH)6e3YMy}`KqIm*Bhu9!d~c=Yp0`OP?f&b`X+T*^2P)@@Dy z-)vkeOx)xI6bFtovg^*j-d67p`RI|c(^RY>#9lAj&DEw zZrlEV7s$r2`6oH7a$K`zaG};lyRf7N9mWiqb;x9tz7aR5zLsCM{m4LE6Icq;wODMf zz5H_BBt>Awd%<^&pgD=mC&Y?UzJ_@2g1da7yL!YNV1et*2kG^5x@;K}wl^WMi$KV8 z{$)Q|4zf2jZp%BDaYtM~e8XlF){N8hnnA$bNe=ya@!HMoXmrA)FN5ZK>n9B&$p3?b z@{1ot`6op3>L=c`#%^(V6m+KmG~E()l8+m1mklw(h~(InmTsHoC>G-2^ZOu z+0Kt2hp}Cz!u!6=tFK|nq}S2C~b?V>FJX8yH%S_v|)$%=_p(FGng&0Hh795XerbrtYdFNz*@ z_3=#JnA0+uW4Lvf^LdTNsic$TTpYS>wrS>3( z6jkI3WL{)kd) zQ*0WnP?AX9X$U{F?y&=1G{1tn;Zq=;9%vbEDh|^h=ZBbJk zkEKlV$rzJ_1};zeV^;9j#ClD07;l9B({mxf000E&L7&=16)Z>W`b0r_jU*UmH&4_5 zF$8N!N>Ruc@Ib*mVx9uhgsJSt`a6CcliAE;R0rIUnFG&^%NKrlDzIi zECT|wXWpe3U;5(5oG_W_Ak^eMFq>yvyX~~FQwXv>ybMx%T_iGx{-KT2U!!@mSp++7 zc@vwU(hdjn*RsV@yUY&HO>>~n-ArhPiy)vFD#2Y&f6E1bZ31lE$op$-Ev_^oAVwhC zZ(3xD^KqL`k^A|wyPdzSGmlh-ij{5QS4P0YQN>tMWlKj;h#Tl%d)=M6RTWwQU}fY| za-s0H^_Z&@QM{8X!W#S6RY@O~-xW08K6h$K*5Vvc-3dTmhOq?x7uSA5i?g; zOB+!S?)7pJ>wDO$KP=ixU4dP@Cn*#D0<;hv@ud~eLtcS5Ck!FV^1PhZP=@amlme<; z`%a|Kj&OQ}mVHSrO)kx?Fat;;F?0cx@Wq6{hO*|!4HV7tdYyN?<@60wz;@!tcIs4} z_0sS1JwP3ky+{Q|*C*lR#C`+lWv-2)H`tcgY6vr|S-BG7SerP3MzZ(#pub~fc4o`G zL8e~TukEdX%+m0mlWCWCl)VJtG#h{HP7V;VS&VWrcYu10g`2~4IyL33fn-XY8M>6_ z+ZVt}m1BR$&D3?D?e@{RbK7mY1reKl-1063#yCj=TMRil9^%bztLh^eUY1-yE=E!H zVlUB&c20_WGp!K$J^Eh?Z2Tiu=4QAwqYH(5L)l2vam>CLf$-Az&Z3m~j=^$@sOhRv z1xFI=sR+63Z^b*~8n>cF{v61Z`27d5`)NGZ%+e|jVms-JRU5!xw`RyO8!$_v_l9N856@9vDru5D+0Ez0ho*GiG5e}(g)8A6 zjJM3)x|Dz&IsqeE*+*9{Qkk*2^Yn+00at~8N}=+#B9Z!Fb)PO}{c@WWSfJx$X*4$J zj4ay0y&Fir3}HyqdxEU7Nt7!DYub8Bf~ch|&D^%z;#x9H zuWqyb6#{cqVJ9uj4n7K_=l@HgBUfTfh_ac6gd4=Y@S2npgT7HUxsk06GPv*hGg9Z3 znPT{&MuSs$j;&LnN7OL&gkv)0ThhO6z;T=&Tjj}R)@~-|*-6&f7jT=&i~C@Q!w9E5 z%GO73%vacThUMh2<)&=a>@zHpzIY6QPC0M$6pxF^F<@Ln(CK+A$kaw8efd=Gbcikv z5znZupwp@Uq4LwAv;m>~Fs!Sxj=rDA{@^vq0&R}QU2S-pMx~VzX!dNB;*a5}(vwb+ zp-Lt+?W^T63?yaQVY2@_DT6PtnJ$rArtM*Cz~v}9dfAmoM#~h6@I|`KEg5h6$4ayN z=yf5Z5K=zNHW72?l~HQaT;$0c%A1t6g$y^(^h!}2jKA#8*)IWH)Cp*fF}P<3D!!<7 z`k5y0 zboM?aSp-JliTQ6ZC1zT~)K_Wh`idt=|8g^ejlw2E9)H8)vWdhe({5_hz=_E|Ub4EUp_$2jSzLYIA0W-jp-r!* zmY96BHCSd|ULJpzt^1XILK8E%cYc=f{#mxs0BeUGZ#v0|1_Uq)sGHM#Tbzs%V79C< zOGLlXR(9{L#DGI`sA${^jHjp;REV1v#n4@c!~Z-y4cof9g%E{A)K|3=Lrtjkb0fhfkpuk9S9G`Z-mA_RSs>S1)R|NW@37IlpGp6B#|C zyLt_mKwYjfn10fmeVt46?{GTc{RjI|vK7aJXm^DXW=T+d&V)b0CbcybugWbyl3_P$ zxsv`6<2G2s_>B+63Smh=N}}xCVl|TRfmuH9Y@axEQ_XXGH?BR4P ztP{@Kb~S0Y8%$fLXv#11xbo({l2`oHw@{Y+@bmpkl+J=wLN*Ui{GU^OCKV`k(`Ks=7T>Bqec+4k||^EV}XP z1&S2WN;sS?1`5Lne$5l8F@XRt#1n>-CL@@fZUVws3XCMvOT&v`2f^f<| zRyjJB6v35<6PzLmiY*XGJb)_hUvyzWl2{b92qkk(9qfZyyFjm%a{Z{%{5F(7RQo+f zHyKL()Y4IN>+#!y`ck#^Qu!DCEUmn@9x~m8a9LncCo8w}1dt}Twuk?tXf3o>nW#6|1<)+wsbC=uT5i^1 z+q<9tT=Xt>94s+y{4s|f1Jc(KnUoL<5h%$(4w{quL9xE0ov2Dq)gWPAWJhLRkL+2{K-&sdDPoV zE2S8V<5=50i}+KSy7sF^cXJ3^9g?pTzM-*)%NDPtB*Pj0gt(2G_n#@U;ZwL?_iq?W#xGHyoDLVzbH$H zH3Fe1GE)S*ax2bEvp#2Y|_^VY$!J%OTgtF3B6ls-9)cGu+5(K2@hPbouX_*6vb>7y$ z7IAqrkqpV6dUO*Nx1$c5DmcX)-LoyXf-o5gSbCZR)?(?UzOHF?mlZBUlNj-?BA)Y@ zNS6pq4y@(vC0@xJh=Al(%Xu;ErNR3=%dS6Hq^f8r^0E}CxuK}u3T{cd)?54O-hg0b z7-E{uBrxbPOq=g%oQa$u=ULjh5S3@Z2oytm4K0{3BR*m-6l1WI!9eCkTUWahTLL|WlUMeukMb>kG zr7Fa1KvZ?LTdliuExSoNcYdCF8T-Rr%;^>6>QFCfY{;z1qF<@Mo4(T)u;A92Qm|y2 zgU}4E6RSr)s_+XOdJ03}$?71aKpJ0p*BYJ-^{hm(j;zw2b9VrC!jST9dl0oep)&H{ z5fC5*yh2%YG(l!c;&yQMT0GE67bB+Sc0J{lv`LVG5~#Q@Jd#Xl*Ombh>5oX@FK=@$ zZ4&H+{>k=Ga$UGRUY8f=(<3($lUIp&?>$>p z1M5U5n@^ad;;hH}?kzo3jX?}tot(s#KMAQirk>X27CX&@&5s?7{nHP5`k2!#G?$_r zu|pfpvY1|p*_Q@~9yufDW6$Tw`@>`Ik+yLLeP{K^7!~(er_a)WOOnS;AGDOg$g#MI zIZ!&eQM$B=f41LTeNIrmrQqDsCab)f;6Ht^s%Zpw($wWWvR%C z{fn!wRrtGZoP!|jzF-$0u|YX#&QO;fe;GQYRMiX@?;dhB5QKww^YHl$&=okw zBX-yczfAo*n_;XxCbB@crYynmpsauom_i!6va+p@irJxVuLS1G1mMM>DqMkJtek#vGwF zuBSv3IjWR3%gm);8rK71zIHPwX3HwjSn7>_!-pgej=4)gW6;4_rb~iwKQG#>=rkMA z?L7&rGzXq5PExKJsIHvBkZ|f~yuqr&&2F8e}a&qrg+XXcU7aM)BPRX=+l!vhI_B_CMI+I+_*uyNux&a3e>^AXfYw zzW&yL3y=5IbvmJ?v5qE%{DIeEtWMCT$PN1rtd<6sV~eVvSPXMYY!T$4~OaJ52=CyxS~*=-GpG!uw!mp01H1&$4FTgH^{IvJIZ={KTGTb}&% zlqhzKfQ$?(c+F>}nC{=RRV2`Owm zeJ`{+oNs|=Zh{8XkN8xh9rJX{MkP58GP|~0sOoAEpNwtFB3alukp+U}_K4tB>7Id> ziCm#K8`#Jh#zs{ITQ>n`Zz#%@${{wS{GdV?S!+O^oe;Tw+WpeMuNs*2=&k&}D@}~M zjy+yfJiL^Hbh_rhCXkCDq1)KhQAtoB;5yFM-7^6w#ezJ~x5Fcz5X2*Qm}m>?j~O3( z3dIYy((@P!WmQ;5VUT)jMl*l$KFV_SZDgJZ-zKcq-BbWsK&HRhrs*F#Yo=(Cix{xehF-oN4;BEO61Bs=qyeI%jbC|fyA9;i05$q& zJecab7J0|PjQ^`6edYiH&t~Zp>jZcd8+8>rp5P*M4I}`Cbg%8P3-}~ECa$?gQa}K0 z43XEh*4Dxr9oOc5r~Y) z!%A9PDA?nM;*~<%Oerc9Ia>-nzgr+kELuvo|H^v=j75Ta{@;Fa3@r zeAO}dThKqU3|3a&R)>AUHNP-t1XEtI@_2!oaECl=gJ7l?x3xq|{;0P^?*urSq(Tv3@ckQ{erk@3mNCE6Y@AZ`0&r2b zDvtC-LI4DA{7?i-rDk-hGQR@o>zfrGRh!sxm63yv4=zIFOw%u@M7|i+Sp)oX^5TO5 z0bN37hfKjIk!YyYP_gLca2ChdRCw3Tv!5Jy9|A{GBSdB>L(=)Hur8Mj5m?OlP43Ul zMqIu|_5`($KM<*h!3OyK zbnQ@+n`IfWN4ZSDCUoc0D(|M%yL7g4CE!Y@uVfIZbf7hvU89or45!L~TLbJ7GK*qL zMn{8;u^zr6&43qkEh0=Oqbc50fC8>xR$5|H3I3G^`C=}A&Y>%hhC(qV2Y=-@+asIF zbuK;2M2rRA5_;zG)Pc!?ts9uFr`DBmH|?ZMzRv^v%XWXjfybQxQR1$z-gr>wYwGmR znYz&hhfB;;vgE{{xap%RpQ`ai83nLS&a>^y1={l|FCMTxR@3}-GwvUQcQHfaZghG5 zv8OHNfI6hzRp=}56sp%$_($*kD!(>@26{LUDg*hkC#3nYs)2!2^=>kNPUPg*0ien& zOS%q3u=}V@{1H>5yqi{$O(fdPS}A04y#FK4VT}2}SLDK}0Nrl!3Rd>}5DX0Zxk!|K zhY|W8(e@;aO_D-UK5k{bX;|*2a-MJC3QzCPm!schlm}S6G^FR?EB}LQP6Obc6kIsk zmA1@^w^*PUF#|vih9GUDq>V?2U`xOdeBK*p;usa3%4i;PzV2JqK3;a`Gy+4ryQiGW zH2#|=(}5t18%Yr2!+2|TQ_Wx$bo=6duFbFRzul>B=uCIK=CfAg`xr|c)bubQX&Hj< zyI7nc=9U ze{e@tn%NbPGPqhltiwCt^uGVz1($MmbzgYDrWD5s12BBn=_tZsmjN6 zU~-sVf`Z$X+etyB8gompVAo%$o0g_jSjXvPOg6OiSBI95)u;!Cl4v6=+*tSYl&dmJ z##hC|#P$8E3sW4@ZVA>=ly-|db=;exQQAwc1MC^?2bZvW$VvN-gX@3L?rxB&*KT74 zDHhJyDoxv_sE_Qxy&j{UJ~rDKU6Mb+zPx4_c>1LZ_P(4R+CL-q@~pP$`}76u5iLN-y6K>DmThUo$IimJ3Y%EzA4cFyWZ7MFw%o zPac@wU((2qZe>j=QEN!elH2xb;my!&YEqCVKH9FON0D7%!0>BQ)L;wd$HIG#W(b7x zG}Wh^c~?uu4)V)s+2!M$26@Vu`7e%qa7UHG{*a6ZyMK4CUy@yn$b&s9-UX>cA#WT2 zAlhR<0TIKIvr8pyuoqB3HIIt1GaE96ShN|&>_z zT&NXI5?%{^^P|8*nMDaVJm3YsV6-ExNy?MyA3I`JXvbOmgkONN1Q z3H8}8at+O9CTwj2j9W5knJ$GVU_0`m=E)`;gRL;sT0o4q1LAb=-#MF1I8wkK!cPpM z*`}iAgdL1T5TH%z>C$kiQT!!k_AR_(7A~n&A*)X&p~-iV?{a9P-;tpC{{U`|goyRVi5MZ@EuVzKLPR=Lc|sDXX2VQ5`qyur#$IB9+>c@%-@5&1bQ$W@fjeZ#xsOo&7B!V3 zH8CgTfY4^?(WY6zLJD1w_VR>npQ0&p?h_vNvG>6LZpYqc^S65bm~O)f46ZQmM0ES( zr4+9f*ZFRF?h|D|Idq;2vb3~TZ^pcb6NV$YwpQ)B0i*#gcA%XaqlwFma~ z-RhghyW3=&#m9ESGH^KPjAFoDhtGe+i$<7WR?^RTL`8IL(McywF%|@C&L0=?KM5-E zG;@Wv2#YJou0#JZrF`dzmW^R4NuwZ1Qg@o5lJHk%&(^o*r&h3Yy}kK8e+vF9fuyAA zP$x_vLz~ovx%&XGfR8tt+=N@E#1zzkp?C_hU|`!14tjwbMn6LsuI1Xu_K=s60I28i zx4HZYiIOWjW@E6i;&&qj?oeW0`II)BP5v)mB?(9z>(i5%U$t0*MFhQE24o6b>skI z#I|H+K|LT;nU78`+y&RELQ&A`)jqFr;LILP$UjWcM9!5tGg+R-Bcyb_4q`g27#4Fz z-6NSTqAsl^4P@8tul@ReiSwAx^5Q4KOx%;zyK#k{spqAOUa!4G+e>zvG_TJzsW-Q`SKx{*;!4Wn36mffj!S|n#kR83@ya$Pm%%=H zpo)1@XBTc;SpE`bJ$Vo}-yqD?wgqR2MIQ1FlUx@#)||bRpSyw0+X;Z2AS?U<@eZB< z!*!^6Q_aov{m@v;WOZ2oYpSA&@1s2ZI83-D$)XX_V0kwQLNIwiV=&v$Ww^9b4qXx)0j=tYPJ)OOGQN(V`Tz2*=O>YF~Vt%cCKQ)n}_Ub#MljGI)}qSj`JBGi}mZczJrT;v%^kv_2~3 z2)a=wcZHRV6wLyPQVUTBeh0TXu1ZD;TmU2X=r{;8pT3!ew8Y30>g++|jpo^rGaCVi znG|O*jsy0VE!|6QF<&ahXuog(00UtGo;`F#f5&9I@?{Dy%K3omgesqq=3IerV;1N} z3e6lWc`%B#OTv&5N!mMS^{g$cO_tQveFpI$FOY=NKx0hfN=wqzyR?S~Oy|H+uv_xX z7v*TlsTTJi_lc&9%kC9iA-(ja8pE2PM_Vn}K=D*A3zpm8mar;OXwYR!J-m#Q|C+nN zLXZO)zNZ`4cs>S0G&>W3>EKl-v)a?2i8UMq%FriL3&Uzps4{aoOXjKerQ{Q3i|0m6 z4bGnw``h+bcnwrP=+fe~Y~_12LTZ$@XS70I=VLtBhIg}076oy2d9NaNm45mdQ@J7M z`>^Z!YZ)9=?@XS-((zp9FRX??2Lmu!;p6M#%ym38mk?f@jc5ZL<1Bmfxz=~h7m10pp{ZZdgek<@5RCx`!3=Lau;TD9d=P)iu&<&H0VnW` zVhupCC%BZJ2pj*u^+vl|ZstkL2bip^^r7*fS#xC)PS!5}BVJ&2=2&v_)=#tKN_UUY7G%4U(Y>~SPgeVT5`lfzvfiETpVaq0>k`_m@d!PT=iz#U+ z1>j#4butB15Kfq2D{i8@oUqmB`l4RfrH%x=LCQ6i-$7imis^|QOfxmk!J1W)0eA48 ze2SOq+;_$hr;4EH%S*LAVe5fUF+tnfBJj$FuA7kFSIT4YE1TCQw%H%ew~>iQ_~N20 zeit)~@FmV$b}iKyK9IW3Ac1#0ulrE=6HJ58k@S31YOyUPsAUi3yc|94wk_HYi$I1w zoi~S6I_oh_6a~J|BQ0j_8ZLS7%US4M5e@((%%jrD+bFM`lXlAwkVfgmkQXw2mm)|N za5ioWs?8lN=hHEY897Aw$KUUYF*r_B;xb%#Mzx{1@85!dkARqdO26x-;x1iYjid{;+Y&9s~iKCOt!{tu>IZp9n(jK~N-o3hK# zamlY_Gm$!Ww3vW^8vLgwu8@O%$wVDf_^rhmu-d=&IsHByGCQStIV??qb z7&%KkE8M6as^X<#pTrA(9US+#haDuwS^yw7qamY19k@Z4S%rL=|7~cLd;+Q6mLF7V z;`?rZ000C?0iHiZKcgfImPAbj$!e+(S~i?zG1MpJBd2lGj*HXw2EaRSZ3{#EAmRwRsN2m-R`*v| zl;NHY5k<9m7_{l;sRl~_M~!X)pk^^wpG%7H57)av_p2Eo z)fJGNzccjixkrmu1VcEhufG&#b&JG@IihA2r3oNd*c9wR#gM9g&HL~rRUD=H9N8lK zX$;g>PQGetN(ctp^5RkS-a9Sv6LWFb0;U-c3RVro)Guy?X(C~jQ!Ur^3l+bu_B2sW zsuIDCq-o#e(-Hv5lMS6%--xNNVC1*_rrRGc{!YeLTN1ER`cC~VD$O%b6xKZxkK5*_ z_5=R8sb|bz;S$BVUfMoKt|dO_fH`(Gl%O{0R!ug1wy?9O9w3b8Si!6mtt?_jt1ZM>umyE%{v*vGBH@Lx9(H(Wctp!7#YB&faf; zusmceAX7zH?WW+`l0Dm>v6Rh)=Q04l*A$}x2tl2?B8Jx6EI=TpU5RAChx^t|YJ z7kwR#KtEQ##t(f?&)QdizW~CrD%=Lq=!pIv>Bo;q2s{74*q;6nz6br5ffX2pK%PwW z-{q%x4n@Mz#4cvT{Cn3iOGgI&aPZc53LP zdGb}QW(p}BA_Q9O+yF{m6mL{AYG83sbVn7C6l%HA)T<+1A>%fw*f;ZW>7elU^L+qIwngQ)Bai0~`bU>7a3 zZ6yk({)PtJ$DiL`tL(o41`1$p@g3vJ!px13GQ8h;lRo%#5cs)=&4MxNk{;n{XVxNb zAT?%+_I)gDCMv%jOU3N&`p*$d+6WiM>nvq209u2P?Kt_H|DfNU;?L+i;0|S=03%rC z!N4OC*4-v&HgEixpC>coDtp^?{g$u~)Uq^9iFYmb`^4GO4d5L zVwP)@l}-5zZ+2$wZ43!-=n=^wt-O(x@Ju{6l#P8QlO^DW0zT6qS+|lJoq_U%N~Q0$ zDZl!^NaMV3BC!9$c)Ir`>J~SHi2hS{=wIk||du$i;nq_3`>rgHuMyoG@LLyRN|=oryA;IGg9lPH?Nz+ zIwe1|TU=mHWeWPre%VL^k+~#S!~~sS+CT$__TQL5 z!Osv4Eg)P_+~NF=%K6e|#~R>5V&@ouqh4ke`za8}0^hHl1pA9q; z>4`0s2LR!Rh;RCKZ9VJ$I?C)f1uH}U;yJhN069JnRAgukhKEu*$` zXZ}^>Rs!7~=w%As-@-_wdCb(GpaU8}AGs${JbRrWbiTa9igi#rtqGiNhbTOS$W23k zCHihIYP-8H=)~#BL)^Uo*IVXL4NXcjza+uTlnJ?LZNur;jF33W6Gwlt@u;yX3eAHz zpCJ{|8%*hUPLm$Hh8#&3z>2TbMET<+63T-`2b(VtxPu-s=GuWvUXSHX^T`MV@dP(N zh?(JeFv>y1nCBlvv?3NVs8L~$c=9owrAg&+*Ewdt zb}{o#doBG&`F*#`&cy-(68-W$8U^CSRsPq|Q8!XH3U($XD_80HsRtT0j00Y)+X1Hh zKvo&z{M-pJx#EnjF)>6bF9Nqu1fde0+R_RroqG3?_+5a13j{Kv6H{%VhS>em-qd>| zT?qVY;Z{?+PZRr+GQ$=VCQ{!U(*dc%Snvg_7lWq`L`__0V;0(_(h~+CN#2x+VpZ9k zL6$qRXFcDiZ4O)3ZzN8*#3w0o7>+m=c|h6nQ8dYIbzVSEZ7_N{6^7xs-;Hp`;<)T} zW(gQLVqc)_a)lzr%(==K;6u||YpI~P(Yqu6-UkQ>NyX4fu3X$c!}7QhF@%qlb)+SC zMdZpY$1`sS49I)sgBa(8tMjn>;v;h`z!cTxCfkvX7|_u(4oQo1CPge54-x7B5>~>> zlK7GHXX+eVp8+Rq(V`_(?2xwbMwjO2KJ@#+2{T)(7P$i|2A(DeYx~d7cv%~k(EDJ6 ziv8ujhse%FkXnw^FSGSZ^r-8ti7a_Xp(;Q*G`9zxR+ZjNue;|1r-*c=PcoBOMd?gt)vV4YCtL5 z$!`!`3DAu}{F8HWi^2n7Hcvhk6FcrU3 zMK97=MDDn1))1upGwx3Dt?@??gP&pD1G=26!b!?j=$8dNqHi^{NEGWB-%TgkDO$=! zZabL}z8Cmis&Q8})C?pbL}v#vk=SQ~R&St!hb{~DiBA_OYk~HJ8+YZVqSI%wxN3R& zDOzV~bNaZ&br%9)X7*$)Jaw+haa9O&S5KWWzHG;Gxe?>a4qZH7x{cGGpyEzb)+}$@ z@%IFoI5>E~K5`ys#!}=Wxpi`iQ9>(b35o>BKWi;5*&O;UG492aMyyD9Q; zt#E>aFh*zZEt_u?-f9}bUY#Vk6su(T&dXpp@e$HgA~dm|HqoI|h~@Dg#@b;=Nch*; zbDD!Mx_P!Hs9C#mZLfdS$@hR)H=b}CSjjVU3?wW!>4v*3h3tKMccmhpDSn7`;S=JB ziJFQ~HTq?!fGpp|bGRH?Ok7_DdI^I6Jr+5LKle)!?CJ+R>$89L!BUW)jgh_iOZZtp zT2uRLKk2v)i&#qZe`lbk7$5`@aw@zk=1z{X0Z~&Q%vk4TSS!UI)AekvN^HP9*jpM- zgVHcLMF<~beC&x~v=70VOnaR#Pw*SU$Y56I_o!B`xXh0Fo068ljv-0Pmx%PW zydR-fGAds6eOL#Jj-RChdYnt4sl&Nj7|X4-`vLq8(fTqXzkC&0Y1CMPd_TTi$$G}q zrv|M=z$Kq{KL@MgJ%o|x&FbnPokCC>;#f!lj2NK(g{A8twXica5p~~qs)Txp)vs1E z8dOyd`4fIRbU@)S&jB^BR;0FdJ0uk#w$E04HuPN^80 z1?AvD{4d2mKHw?tES0DwCGvI#=);WhB!lZR#4%TYt;Kjf&!C1I|7caZml8$Fzw^FV z?YCJ9RmHSc90;{-*WfGFr?O_eWou&EEn)R>09EH3`Ai#il*hTcRjt&qfC&x%<&}iy z(5p!qTwQ49j)4`%z>a%}PuR~YmXz?A6;~D)ZL_=om38*){f@^TgsAPEfTf5Plbl(l zfr3R@lIY)3Q2rkkomCk2*+3)X+H0v!xY+dBVwE8_V+lXTw;&Tx4m;KLjDxVP_6?d6 zcRhSqoPu3)o|mM55zE6@9WRULVH6QO{(D%SKE4h)Zt7Qa=PBs|1mB+4f3V%gZ4wcP zUJU<(FtdJs|hFwS#h$5{lq15K*jzX`_#RYxA}FNU;MJ*G<-FJX)6aI?1B$cZ+I{_^&O+N{m_x+ zOmnIq2F#TWN@r@hN?eh;NJ{xkO$(c+OoF5%lu{=b!UOmy8=Z*Y%n!Lg2rU< zS*Yyt`)tq#YM}yC7w|FQ>lj!)_W^lf-y#8WeOSl5_KNsicATdYx9e18SO_%M(Z%|ZcC_kjHI&qqsh(}$F>SGj(E8FZ+~*EuLmqUy>>RJ z3H7cVQsA!TWW{gs14#nS_Y9-3}!R*Qih~ETlZT!=4W<(P`s`rL}J{lK~dNl zjwqtik(7Akw>u5m3BEKz1gAu@eG=B?@+q=-ZJl-nr&>9Bn6N3^3E;TAZn=dA@N_Dr zaWjM+^hW+OnevE?cyc_NP1^I&cwwFxFWkzN~XtU6UeF#TA*azc`77sL$0Z!~FzSJ_# z%u_bp5@-L@KiP}lRj?Vukvx{?YIU#k#uEol4k!YTdOc9er-hj^;rtbB#yjzshB*TF z^ysUjca6y)wI6YH)WkpTGgjX&%EtmNNw}qdn>B~GrtCpW6uQG!8fa=(Y8p-tV+Xhk zQ~Cm@`~7FunpZYgEu_ErjHM;^_hk*YYcetU^eSlHq4`BXIepUz!|X)d(eZy0@D^6m z3n2n^=_LIy`yPN1G{PIH#zPJBw+HboXSHivOq3nN6o9^B@7FDzA5!y6z#rG06zSEw z|0KPc3Zn``#k`8gfax*`ra`vp;pRV~g75>rkUJB)je#l*+axiRaBdxi(+bfZ`sj#; zJKah!U9-N@hz+kU?Lf?zFnrL3TX*%bwn+BEWt9LJmTOT0>|X>LjD-v6=&qU<5PV9o z13y4MkH?u}RgGWCS#;4=;H_vQmvAod!q{;XOUG7qXv)QkoeNR4bUAGbv?|Ca=Jwu&dpT^VcXdut#u{$1=E4wQnd^*Sp9}Z+fRc8fpXR z&6;AxCxY)wWBMFfg>$uGWRudnby5j9GKRzD75jJ;Ai7tJ;_t|)li~Bu$(HZjmr~PQ zw+P>dzjKW0LV#WP6z{4~FTd3D58??Olcx7O$<(hmcqk-!WGQx{#c(1C1Y)S9B;UoY z*(j0t=Jt`@N4{jW-lF4VtmzqoX^Vub%1Qiru7w_vSH_g~EmGtANg&{ByFo#@FG-zyVnsUNE3h|ktwVCr|tzg$^2CO1xT%c`S4@~$w72EK_15+-~?vPaiR&#M# zTA4PU*25y=TB~&54i{Re&&qE$Namg>D`n?xR^&z#Ub#&p+-9}^c~JZD!X=9urQ_CZ zNs3LZU5XAIsO>Uzn6~>0=b^`kwbSPbb|lx?iwAetKwKA$jpojGSV<-Y@u`?8SKKMI z;Gv9?1RpC3F0VPqPL*&>rX0T^^UJPG?dCtp#cXt}gp6;8o?n#Hxi9*a_J-_!|Fa^> zLBv5RCvRr>ANgs1VyNa%J&_;on9?LNn4Q&#w8!8800s>~oOTu`-Yb(-Z3TxvnCs`zvPjq?-7D!V!oHJW)I}%2DUHeyPn-&$$~e@mvwg_L(j0pJWvq8W*>4#$(A)5kyxUxd`T z+hEhpR5dc^O+R^YAat17r$@rd(|YHUEz`~NuX5Yk#l22&y#v7pWlH79k&sr%2|BLR zT`a~7MP(U7ll2is*6He6o1DL&S6v?lzaW9^%9ZzS5Qym1TxR(UoY)GctkV z@SkBQY2p-d`9?tg_2p^2;x^0tYO#Y2H^vi3a#a!G;ziW7aiTBSSWm^0@}dS%G`N4~ z&#n;(_n1K5WZI?i42W8oQpAyn^}+BVg7W2G*Uz8aGV{v#2x!%-ka@3+;!{ofX;|YE z4AqvH&s*dEwAE^GMu8tZQaD;OD}H+Qtw@2yMvYtw@rmQ;A5r3SyH5aCTE)w|O5{Fo zaHA(00RQhdSpb45gwMn*dx2cE9^N7^Vcw`e{6GL+#9rD&8{R$x)SsTAJ^k5XySXoz z|0+zy_AZfBH=c9s=v+kPaIrQ}wB)<5%`h*HCoIeU!1uOJ0Rks#iL2+X8E0kXeY;|^ zxzZ!v_}d_|1j*0Wp@n@b1Fw(^%AFb+}B9i=N8?zH>C2Yo)CKm_s}+lSc&A zMre|^ZC1H)U3FPr2R8@xO*`#^dOgf1jaAT=Ivs&5NCLD9MkwTG(6VxTh_0`b#s_&i z{_(U0b=iq?ZXHef4_7aZP-SV4Mz@=mzI1gPLj{`BxdcD-U-aLnDsLFV@2mA>ZNHOk z&2eY?LiNA*Vs1IhtyKZ?0d>^50<~%+feQA@2zBY9Ci0e#VKNf%nj*9K#sO-vgl-S* zk<*64T91L?1#+84ul>{pXkMtOCnsmxGv3!1h`qYk%+Za=nXia3Js-H3$^AMM%)Fsh zZtGg~L;ujOQ&b4$a}hDhXJ8BVivalL|8xl-*gkprem}!zvr(Ar*MU|sg)W}+x5dn_z(8C^Bc(rQjKuOVn_Hr<*cEo)2TNRV zYzMWB=rwb$zuYmon4XJTvZKp8vk+{1;l8`hg)_Pzj=p5K@^}&lGy<9EIYy&({0z(1 z)h*LXv(=8vh5{vV3-$d{M@~X@sB?t`YM}%;$}<98Kr*LM%J)<#`v(A~7Ed77=IkEY zAR>o{e4PJa({5B4Tz;<+?&mQ*LW1DSC+JGthtodEF~x-i^9iHw;vuSI;U2#-JPqsR z2^Ep+N6F&t_G6B|g?;_>S^@Drs_*YBhpi%D2}0a?Q@a=T71EIEYQ!eqPmnjXS-#-S zoxf7a*-HK6Foc$x==_I}t`7HY#N_@y9HINEA`v?YoMMD9M7mS7nb@n6eCS#@;v)b+ z6_>W`gd0DHPAK!9^8Z6g*YWmCGU^hZm^}3a)xtm9Pu$)?2?gA2}N5 zwDO!1iZ0~yJiq%KH|LXc%J@+b!jdp!vQ2TW-g=r`NXmwdAL`L&WY6P$=A8Go z8=(O%Em52{bGHjoH*5z+w4G_`XyEYDfD`+12;wS1^!|d0#gqe;QYKa8g??Ux_;J#o zdeJE|kx~J8M`a0@S?haumB}GBG(Nu^RKXJ*Y=5sICj!%FXeHdA-zYB;o08#Ua7OY; z87JFAM-O%6ki6(k>Xkc?JY64ts2tNN0007X0iI!WSLw}OwL&YwR$7-8&vvEap1wk|%E(RpJ5|S@)&z8dxmcLE%&SEVT zfWkD1a23LI{^usA=OqVf_F#i}3&svzLs`Q!pd6}v-7j49=V59})HJZ-t-qXf9>4#O zY7scerMMw-4?s|L`T$rnd0jJUtp&2-k@__OI^%RYhL9gUEr0_|Q^u&9gd0?NLvQQ>=6g;_ zXb!f*Wf1{9vfB%JH&KAd$L_yn`=E{%owO#e@$8vjD|e*xg;2pun`((4STb7H~q*Q{P5T>8^4$2M+rxxA!Ebwc2AuGi)B?x6m{s5Ee#1cM1T7#d25vmuT?tG(T*A}SzfZiaR$`jg|IX8o7h4wui$bOmjm?9Q`Y&=+uKBX0F&(QN_p6Y@rNZvVf z2g}Eu(&^uz4NV07rsl%9nif000AA0iI)OLLcl#`@@|IBv&w6 zwO|3tq|_K*T^4qKW;VUiWXf-0(juK4&KGc>(M$=fe><06%7O$Wj1Qx%z2W!Ee;{1x z6X6Gy1ghvrs=5W&^-SHtzV{FvNKhtqs&OCO-_?biY?gZ;-^$se8Ti<-D5oe z8t%cc42>5goV;2oC-~OG473@ec|$A(^?Kmv{ZjU}ojY1@B+M9@;V2-zB;ictnW59U2} z+e<{_ixnjt($Lg78-h$io}P-SG?wTd=#dDZ*Crc2=1T)i550m2*mn75V3?tH4am=m z|BwIf`BEm4eV)80Glufzu|^Ym|L83k)v)sOXjVvngrieick^%_i{CxWjJ*t`{9~I$ zJ8D@oVe6Q2ISqTEfYy*tZ5n?2i~Kks%VP!)P*BD(+(~A6GbYjV^gy`O_8RvhhN&PA z(@@NPWxSG2N8u8JLZG!$Q+{ekuYMj8X9PJW(~zBVVL@Net(%O5%(^&_{tGK_LFi}9 z`oFt}I@vVF@S%}8Qh(eC?sc+8C}dcQ@G1%G5$|6shebSS+pj(f(a?Y+9h$UxKo-xoTZkqHtxxu?n$0pjmq{Jmm zj7XX(=v+v-YN7gaqi@duqv!+5NyGzTX|Y*7Y)-KSN;d&DXEbghXr}@(X<;kh@910{ zq>b+Rtsc}@N1)o6j&Y_|x@qjtP_?GOkn!CB#)2zX=E%v<>BuSJbQ4NRr|Zeox-XeX zTeNElVW%}lPtk_- z`c6*2v)`TRG-i2(C#X-pl=&n+Nx~9RJ!U#AdNU@tJww-s+eR&A^|SiSyvQa#jN&Al z@*Xk}B{slu)5o|=w|COD@wg@f@w*A5@&d3vZyzBO~uH;(h$4bZHJ&sx&LH z_Z(<;OiS_n>h0m%vVs`mgTre%Lq`T@MDPjcCnosia~phxdWee;PPi4@;ua)lO`vA+ z$Yfga;+}KR5OgLdWMA9Mm!N2VPV{SQiR*;lY7slN{nf8;bIy=hT%jVrU>*70-ipsN z8Ago~BxN(Ihet&sWM>AyBlA|~q~Q_o>11~Z(w_31RMyx{WKkE&;U(6v_m$S}b(?Rs$;z-E}S>V!Uop%S3aqSXNqmo z*#6mE>O@#%GU)CVCPKXbpTZJTa$N(5Sfa=Rw=`x4n5jta4WefBTz8WbOJ{CSR%MM+(LRZMYDmXk|D>OiZ z1qN7~IVaNBc?IY-r z<-0)_gu6Hm0)GmC)?HZxBO0*m0^S}Diiab;-b%%pi>?{evTtPLKEpOQR6JP}ZqVy+ z|Eq4NK_@{t+FT)kKE7gHdMIK*xQsI&aP~+hzVri(pkF-V z+Gn=CpNX7(d|h!_sGoamJ5>-b==hoqztCK^TiV>XLEbNYjLjhWR81}tlXJ9SU$ zfoWmCE3c83Qy@tOlPFVGmI1Cq9fev}ki`xcF3h6^w%m1@Sn<6GKdIQe|L_B3F&8JO z`(ph3R$e~GYbHsKqI(>OgE}|Y55E}Wtd&M;8PB1$y;VSbGgFu0;h9vWxgKNFOOn*Z zDA@zrssV|hf>r1SybxC$Iovt`%IM!azF}%n@;ekFTTc#leBEaZ1Z{B&B}()B2$3KU znT=%MFMmO37L|L~C@k=2ax>MU4y}*6bz|j5EH9I2%szbtbgt6wSUG1zPDWs&&nwtq zi$|fG|AlNN$RXMq|ICB@P+$A~_@Tj;c0YF+1T*@gf0F zZI6l(_)3f~UlY{v<~>+jlAdTOzE^`}u5q;86U0p{W2wsIB>V5_4!N}a%7m6l5b4U? zTduALdhg?h?~2+ZtfQdI_49}1Bgz^>Tlf==txMb>c2j$Gg8h`^x?#^jjmJS>iP=D- zDuY6nyFS@?kIKKo#Kbkr^gV^c^l5)R1!ge$LaoYAQ4gE3>R18`f&^)k;|0B9enjm@ zW2mT;sf4dRJ5N`qW%o>PI5DZCjxc_Mg&9gDBz~6&7j?J7NBO5rQ+bWXPfK4^y_D|i z%*7cq#j@EF?IHaj0b!{Bdd@@ndHe2%@5fY_sIQJINPrOd?NP>Qf+vw?vj96;Ous5} z-B}(zu80JMbmD8`Tg@bp2RPVI8cbtj0!^;fMMbX#BN=IbF$|s~A%xTzv0=FiTU!U_ zotj|(V_hYz(^mz|xbFvV=Yk6})rFp9wf1>8dG#c0bP8im-E!RegOs)b* zIi6kQCRds}6zHzE*qA0B_eam$i}zLaSf=UV-UaOFi-aeWepNtfaQ+6rUY8l`IjHJ{ z79eE0;nCSx`v+h{w?cGk`;&Gu$)4w*LDP3|9N$Kvnre|5k`*b|zZA6pGGI?$FyK}q z9WRm$W))Lu_(t6O0f&8sOamTI^ILq(`2lF}tlL0W*c%_7qiNR1t_s!G8uV#TiQvCY z#YdXi);Y@W4?aLYktcXPN0kC3DC#X;KKClyqCb&7Zkhr5f4dPrn|M37-BaFxp8=Ra zGtajw_SY@pE!cE3JrF@@c;n6v_u{Cq1t1S; zG&N5|^J=>aa~-$0UqWvt#Y9^>@X@(N7y#1!!ZWuMVy}@(i=C=k+7Pp)g1k(Se-9nKiE5H^0Du`?vVPbW zx??W!DbGlY2(`MUZi`Qm34Y2X&;@{^K(+H8y5&OYUlxdouq41|onL$#Gdudr{x%k) z2x!2*Y&?9qf6+<1)A1k-3w^6?W|t04#WQtCb~%RvpdYO*jFIa)&3=zr)AQ@Do+Mr6 zrlndzIn>c5$#Pd0G{JLkD1=!5&&Wtiko&5`%;XQ)LF%68?uR{1?VKcP-oz+(x4UC?$HUZ8c;O@c4I z`*Enj(^%e*sfL%V*9K?wduMNbc~{D_-czK^#uJBUk=mZJ6vI|r9u>v9akh>?+JiNy z^eyNJ%rU`i)j9mk=2(jF@uElm6nA<{4At#Cv9iOHUY-v)=&<{mm+gGHlc_BV;LAPJ z1?-HdivMPqvh!tENVPk60t99PYZthBPu1bOgBTYz|J%@^d0RJLh6n>#7V<@fPqyi( zz>H-rS znrQx2F)X*BnNTs0J6M(Jn}Znq688w1s*tTbYy&#$=%uG&w&gM+PZME^j=$O{9my($ zGETYgUNr!2g#gW!NTiFu2aN!%WG6qcju63EV>^bWjd4Cg-1x_v=;x2d$(y1@Gyw-0 zDz3857r6ya`zZuN=+LwRsyzpqz#hQw)pp54TXe4U5#gZXr(cE@)JMTlD&iS*DYpbs zBT=Al_O2l(Rk$g=NeEFD+fH8*n14r4=AU-d;Y7s|$0h@n!if3qdb8JOS)-P*c-d{>)c_WjLJ4d)0dxbDa{eg#M9=a5Rw zehy5xI`~Zf=-lYR{GpZPom~=Gyo-R*^RsMa&h)tOKvzsbz`!v9ewL3UY9CEjJ#Ki~ z)aI7JesQ|0YN*@b4jjX8OOFC@21)b>o?%pB(;QD4uM ztSu!m1=0la#f1ui%8aGAVR}=p|p40@)4TZ`7hJ!@VFX@e_DN9ci6XK4eNuc zp3VI+?+Vmcyro0>R;^1}2Eu2y9zATeBs9f{T{Il!5ocANuFZc7BkeEZgJAK!KS#gd zk%K62{!yyxEUa!D+{+_AvqNt=AV>Mkrk(P^^uKxs*87BBf&i@d2BaL79GVyW^eB^{ z&9LmJttjlAYrl6Bk6$C7_{@N46YbF3bqbe*4VW}|F4dLmiln`DeO5{5=TkY;S~h!< z2?H}7?$R@40x`7(&xM-u=Qg|5foap~3kK09S{y&=L7_DJJTJN6Ge^AhlF}HR8!*>=$3)jsc?S zGj{@u60_@G+0H?`ZqHji* zU8fdt-|AddblPEllWuF%zU&(hGrN+`*T|I>JF0%yJ7X61`v_`8Q3$K~>H$`bbU5@S zT&5R-+UAPkrG7saG{2tx&dGR(O_{*@L1RvBRY+l!wG=fQDA~A(GxQ%=8=Vvo7ht#9HU03&naj;SXS&YK15vMFxLDLPePOjf}_-e z*pvz;q6gf5O(&eK%D`LDhvDz-PpZ0u^xlOQQ@z4j2*!SQUL^>NCt5QID2DtN@0rdr zS9j+Dy-~Kdgpxz}Knbg7Rsjh*oA%Q2?uH^Oi zqcmSpOojyTA|$zHSWpg`cuA&Fgr)7N(npSpun3h^F+^CPwEdI-Oa^qqbma=5sWC~w z|9D$gPbjs{e*d|94tag%dHJ4o`6B@Xy&=b5m>d;va_dBMAM%W-Ue~9bgx26hlTkey{|jiR)l__JPWo0NBi(dsA~+o zCl|E&qFJ*rny`GD&ElF7OZLqSkk?%zW|+$e-D5at_A-%CFJ@$k{3}Y(><6$3V3{Vf ztM{TK`B6c>@6Xp`a)b#3Msm8CY#=W%8H^vqM~G{=zq#%T!;!n1HxlrJqb_f#!A+hp z=s2cjY~jUo;1SiQlI(exH>g`Jlsa05c7~G6c~POT_c8r?8rNHJ&i1{8cynA4Dq>XR z4bQ_;Ew4drk`J;uDZUE%PT{4EU(*cZYjq#>h7u)|vSUNwFxz0qB17q}<6Pc5lheX- zsE0SpkpH=6!`eh?*>T54m>=*}Yv1AuUBlinBb-@+0lsP!Z@X2!!x~yqbmcpTI52Rq zWdS=?#0#X?dab$?xux@W*TY6KY+Rx18Z2gzf8=a2_P@`C0;XUg&CvFk1>_;&hUX5X z8omMJ8-Uw1LweOarR3>gpDF7*4yx8>uB4;R;0dPQs#alNqaH$Jh~NCbt< z%krkbPaE0w$6wU+BPet-V4 z_xr&?{DMIXY_hO4Sh;cmnnb?L){H$8_?jYjEp9Wi9f?^OrGif+QcK)AAw&}j$3YKm z6SEb4L`JqS$65_A4IvZCJT;Gx{cY*F9HZJe=vq;Q_W1FK7(nh<+?Em8q$q-fjm8?s*zc9g5MuW*cEAgLC_fEcE!zWrNO1$E7fwew3P zQUWFbpA5fh$5#{YuD-|PidMWQL-#KSRi6abkadR|SNP@#>2VMOAfFZIM4Nth~{b|g|k zS;qhX0+<1wrF24H>PWDN4BfDB9JM>FkrU}j%-`U8&Tg&Utbu6b#HggQ1e}!g$_O|L zg8Fnb7;>D1axm>ry&~Y@RSF3iDe88!uX+Ok>?$?`98gvv>o_Sr+gx=(!j9%{#NXCx zstRQL@fNHUHYaT}l!wh`NrU41DpYkto2A+4vhO8{6y>&BZJ9ZL8rjJ+0rDV&MD`x; z_5@`1ds!?$zJl6=@0~NhWM9dfYR2~@?=P5sxO)}p>+O^*8sWNhL&`_dvEjd@&ir)@> z(80A@BH_-x5+Opt81AeLfo$+!%}*?9KxeQ?34#9o!E?RxRXhwclbj;5Z~Tl}iz+PK z+09@IU}IQa?Xq%1zySKg)H*m8tfz)m?`LBw8)Am|S_T|N;2wa|6zerk0fls3MR{mE zQUO>ep+t0Pm(iMob>pYricM20l^Yg*Y76nC%NfDIjTSc z5o+2FnuPH`AaY&Da-0f{UB0HJEkLePZ|9K$SmSxDNK64Q!L zBYl|13j&(w;MFQEv{Jv)O=kV=3x)$L5#ESm+8yu6Wat!k9(wsPe!y3ts>L0}272 zr)ol9jbVzX@9%4Skuxp+^x#tKXXrg8wKca`FpWW(j!RmDKiV;Uu>FezoP|Wx5aS(i zKCu>wUm&hbbV3=%{`^kmS8{R~wgrk<&y#Q+Q@=K6CPSdR?a7PT z90UigKrkl?FKvx#=Gj#QmB8djMcXcXOKt=&0~|}K>{6D&$#Fw0@3}63dc%i^FP1RV zQ0~ox`)t}2{fTFszc-qz_X~4>+odYOEz}Bp8XpQ7A3^g288-P73$%j?7)HxJI5?^J zHA=PuX4p*Odw3v*9p^vtmymPv^g8f6@2){y~@Fc#R>ZLKAg+JnaxCS&swbUu#<_h|H4%I`oLi0Cx~vqEJ8$J_hp+{Qli0 z5KH=x06|0v3d}I?EYv#TP$UWxD?P!O4Gp}DOlxQ^1`G&Mh48WtQ}X~}VI0o1pd$f` zMZ;`kK%D?w0W;iG_$heyBd}`t_$2K72J&WIsq;q#>c!ZsyZ;_NM*tozXk;b0Uid{a z#fv;OPY-J`-sY)W+F*BOSiA?o;V`%)H?0hRWjG`@7QfI-Qhf12pgrF9hgbrT(W1fm zU?b1<6ATVQ3l7TJS5^~Lpt>ccDC_xObZLC-D3*rmqBf{sSIh`f6zs@Y5{K_86s2LQ z53*>ePE1#oynhDH9I1|e$Zx{DD=~_>oA@{SWAmr#0QShkI=i^k=Vp;ykQC!(e?9F^ zmdsc6zEeR=9^OPB*7_1f)k_-&v2^gB6xsDm_zoKxwM7k8!K%HnTG-#r(u`ipl;y1mrn=cw73b{N=P%anxd96h;nU@?Rmdf+UdiS26zvUV4M000uc zL7J>ds6lL*Obh>Q55=#ZrWv+d^vo~n6Uu6r9EJhmTeg*~;D)4s^US$l=8I=8QLMpR z5972pgl|5Oqh;#(E5ZE7ug4D)TjjFYS8pxaB4TPZ{i1B~sH(V|QasHfx?d7l7_fi{ z`yyoOk&%4l&Z3pquRL~v;4`aR)Z=!~)PKE>kq2}g3q^HT*FT_L^8mfrv&^qJBX{(G`)bNca!YWAc%9


    8#Y6lw4X2Dt zWlG6@wS*k~2%_ktC;$IiC!TEdCdUU2z8i$Pl}^7IV(~8e**q}z1h+NHHM3fUbq_I| z*|DTFH^ZGZ#oi;@w0BqDYl!|_aVu}8>>q_I38MSbxmKj&;Fh4CP@vRbc+da*j`vSkKOM-6VBHbzmkyqB)q>FlZA9pu1AOxIS)dh#fbwfBCRb$vm5DT?njOU-5 z6RnBzIGvnI4l5h4=hY)Q>ld5i6S;WO7G@7v9-GkJPJ)!ap4$R5lTz688@CPQ{PgJ1 zqMxYyz~1Cb)|OgnXJ{miA7RKooIm^HZ%cr} z;JSI96+TW^Fry9=2u=gY+m-SMFm^78=4wobM88Y>m_$5Et$^f$9rS3MNaz@PpXX@Q>0dcTn+G$>~MWlzf4PBFkQ8-sbkyh zB-ZLuMW)rfJQl4AwVa$2eW?6LZ#9HD9*KVLZY6#&u`S;N7;|Z+oD9d?vMI|r6+vZf zALXMCzuxGTV+>Z2w)`IP#3uOxCf#3Taw^%3NW?4$reb4^9%VnPdXw9wXyw&9uDSja zVE9yqO58&rt``qp%k2ef)H!q-d>-EKvVOhX6Gk4XkiwP+2XN)Of#D9`HkLo4DJZ90RfFXkD)VkAL{m$$}t<~(f~UhMD$b3PYT;xSxVnjS$~F&YC=}LB&r(x z#+tB0?2>2CwIDSIts*mF)szml3WxW^zTt(*@4p-?wPG&{GwIMhEx5-}w?C?SF4ap( zCUA|bimBc3(s=cU6S=QIZm}5RXa2Fps8-MKJuyHeT|qu>N72hhvK-q zJb5DJVfG*pQw5g1sY6-4LB$6U==+gc=@3~~B6jvuk+5V}l=zSb+x97uBivpK_jRSlKMT6wMN`qv~Uk;EW+vNX2I6z7HJkQ@N?Ah z@FDk4>Q9)BoM(_E+gjbm!7bwC%5fH|S%`aSlqC3|WQX zav(JOOPa?6Xr2nb`SGc?wbeahs<5{mEqg4#^33G24GfJ;a8+j(z~YFwmrQbv$?|mz zaP(=YuuG=X+bTWtRWiFbBdWz^)qgr#8bky(@rTUrL5HQniC{@vk1Tkj|Jt^oKB>sM zn&ki#+>pmS*v(i)OCnRb^1<-FMKvlm=m+KwVLJzlmOmH78y)7I6Qt_t7aaAuLO|as z;ov_wY@2JI+qXB7A&Ybxlj-N_paYCDSvOpF{vX4OXu~xZTdx|GDi8#(KeuS1N(n{_01%po*NmH-ty)j%r!$<7#dhPzx|F_@9wlN5vJK^ zi_61gIZQi06)&zv{>8U}^pA@*)!?kNk2_P-8(@KV99LKJD~ zsIF8XDV2K*I|7PZy+Bk;B~GJ>ehM1+ar*O$jz)2Aa1FHQR1OmNqcUb9G*}EYUs`w0 z#@jN8mz4F~7&C?}8-glffU#HW)``2l5;c~jJp_bKvS7}((kyb=#CQLrCF;3nCf3R_c8)&N+%}0;L$x4#1*NMtkPtzdD zY+YS+t3Ga|lhT_9{zU7O zBr&?7RCe-+idah)^eJ8Hm1JP%X`iRI+BZ4lVj3H6?;}g(P*;HQV8AybssvD0Csb%( z`?NH87`>AM@Tasf4}17E4S>mSLGBLv>vA;{(r|xX!=GS%5%=KU&O|r!(paDsu$(8j z&J1hh#i|}eQ`6X7S6Jj;%Sc8;Y1|-9b5?LP|HAenM#~dYgWNyGivZAs;zX7*as>Z| zbc_Y|IByS4dLx_AnvA>wT)eJDYjDAZDU#EU`x~mnFMa<}e`VxTsUy}~d=Y-9sAO=? zl}h-ErSPLhbDuAY&@rL^IC3VbY;0c=I$6dtSC$}kS{!XHN=W*}(O5R5^VE9B>WeP- zmg?h(eGV&|GOFwg?hu~tavlGuhiXU!9tzmc_W7-K^1X1!GY(mIQsV59w=vm%L;Et3 zR@|Tho$dpZAQ%Bfe5v6;UuRm*wOBS169Wp*l*Z<+jr3E_UV*gaVBDJ;Oa)Kt-fTKY zYh47$lj~9?=lqoTaK4?fhZQ&||C3&6jvNRF2@e@q)ni@%+`ZoqQ*1l1_~_73IzcPU zNCu5y>wtVk<~e+;=D#fH59`p1!~ThYVSY>)D*fQI^#5HoA9g8ny`IDOZ9t<&jDKN4 zCd@z(E2dtexrwJlpeJtn`DlI}-S@ZcdiVsT{uFeJ9$z|4n9>!WNVIyYaMzwEpz9^zk(UG@ww~I|-9wW}!ja)5$|&pO_O~?IWd{GWGu-Z5X-577PdUJ2;3FYbR;p zgmG&0@z|ya5-Mjm=*R_f)zO+|C{f-&^pG-1K!W3+AM7@ZYB1kpS$gc7UX1$DLu;#Ic9OozZC20Ae9WuptlgM#PAb@bjAj8-)a+ri@jyA1lOUuJGf!BKo& zN}zYs@7x;h-X&&?r4?^f-1a1ca^&8e@+g4zq%{}#E^W~OrjaW^9I>nhx&j%@(aV{@ z-ZU1mi2S?kDnE^^F1I--h`Cvrj`Yy8ugiI+FDJTyD3c}sq`*@=l^G#<#Mmcc%&yYP zR->t`^&#JSiXyUo=Oy#>I$1HsP(dPgB~41jcJ=PQlF?N}n{j9qy0!v9^Q(}MX)cyu z8c4eKuvK>F)_j&Ne1JPApM6#Uc#g_$`?FnBZ+{Z7DQ2`>hcAJ1v!%MbU<~xv4OT*V z?isf0w<0&vg-xEq2Z{rzf9&(TU`Pq=MhG3_$x4n`Yaoh$^CW(mHmuRA`sUXsI^$2K z+?=rdZ)q&8LnO33bMVCuPU%+2CZJ;&nHQ!2L|NH7iY3=$qGz z_WH?vxtq2Ytjfi$IAW_+XraQZT0l{8{2rG}3^}OfjDhnF-TnQ}i`98ovn*co$;RPI z^q)~q z?7L~_%T*%b=Q^;v>Xz=CGEZE4BJeyGM7bmPPoZ3DDYG`?U@p1{Gs2z0)A8zPT@1@C zaWZoGxCfS;ke5CF)o2PRq4o{eVhhmg)nb&T>l0*p&qq)6{+UbxZXD1$2anh7LWES{e?WY256mIwI++FDRJ$V?bv^oR?uZCui6p}xy)Kd2}gRgCDktSVk~R(9xIzVU5qN2F=v`x zsdwuhevvFXo$JZReo`#A#(N*Hiq10PZf2T*g%~NJvWT|w{8fbMu?Z`=BH)BCs&d|D z?NpgiFjA z+S3Ep-SNNrm>JHk+Ium$^ITlPeCW zqtpt1D!N6XxclV;xx^H_7E7cHpUW|azQDj^Xyo5u?abR7Q53SZHZj-3QlgBg^Oj(@%^ybM*O=)5Sw*!FWsTqkc}p5R;l62f%7o` zN}NnfeT2jPXzpK)9K4O}2A*^p=OG(uqX8u4uQ!!lNIKC08IWMM+MZ`dg{F<2|9H9s|H}T`A#}0We=k$=sd17FaEh zyITtJ?kIckk4B_;U%%IgPzWb06ws^%0A|Bn?ip?EXJb*TD8R31*E7!~R%OnHF&?ps z&Uj#AHqv#_xSEke6~KhD3k9Ss{BOlB(IZs*bj|32h8tDN6*};qRdw<#7J;ElJM>hV=qQ%ldgX=4%|2D*9 z9eIr@W+<&BBhC+{L2U9hh7AuX6v3wwreXgL>?_Icd$Ud}7)T02nErV?eN4Zm4M^tH z>uE7(TK4513O`HLuoFV&{6mO$w{t5tJ#2?+lq@FhcQ{?6nwZ z)S=5)iY+_pqhw}x%lk~zwZHO~bG`Xe4eJ7eq+Xi$7{S+`J1}xHl~|>j)@m1)JMiVW z@oP7pyw^IN-tILc!dyC@Wmy@_Sdkb^6Ee5kc9PBz@s0f<}yU}7KtXVjm(73NfJ zOuL(!%1$7+w(kc$nQEylL*G+tdi(ltS`O@EWkVaCcBzq^No1Jml^<`?wa(`Yd8P?VYJ};gkem+r;_(&2q>B+!mibw4#!t^|qbmxHbe(w*2 zP6K8>Yx;=Q%F?df8hLn~nhrS62N8L={&x9g$d+QVYWaZmyyt>^M^?NYlFJ>}o37rL zNtZ@Ki`_)Y>;N0NsL#6+;APsH8dG84Yj*1AU7NxuTEcgD^^UD*U&~oPzM_ zaWJ|3%fhx;B0nqEKmyZmQRuMr0vmtJ)Bezg8~*z(=+n{zM#PV~*L+QCvS+XZDt9_j zu?lHRq&ZQlvVO;>?e3fSO=u=7gDj~rqCV8i1#+zqnV=C2;jXMWjO6IgNV>JXC}2DVQi3-EIge^TaY z5xZqcDrTJmk+Gx1yW888W?^Ae{6~(7pi~^vbcqL@s6waY7b*UF(oUt=T?Ox`b4Hn!y#F_dETuN$IX-Ctp5R*~Pz{9}5v44^KPwa>gULw|(T|oh~l_d{RM)7q% z#oDO5#IIi0_jYQ%7xl9*!yHq1$@sp8)4}5h+KFKZ*fwu*%Mj30nM5!re-OrVM53$# zZnEx|kSUZEi)TfVB98VNcV^7P*`Y(>)woIB91x?LtAbQy`H~&=|LS&7gGNM})CrWB z$+p5Yu+?Ah0qXGh#0DY2k_UIZEm2yLL^;ioj&m1-U zfMj=uCBK|f!J-}_+;~T9sc;q`(@mo_sV$ZWV+M4GH#xE=58H_beh=~h($lmiI`t?CG`gI7r`r-C1iQt~ z_LIS|n)Wjm!BYEcZF+#4Jbw*V#N%MIx;^}Dh`E(`YK}iH3}AhD$rhV88m;F?lvMsV zMeC8UX72)*7cFlb0K}npdm}Q@&{5f#|J=gY0w-2cDchWKHon$pxxdYQZh5Ha+LGG3tdsn`P1QH**NhER(yvW#6J1J+&C_DMC}J9pOK~!M z+08SD!*>DXCMI4hu_zMX$yFZ9GJ%JN1bVy%GN8~fAAc&hE))dLMp302`VW+;P3H18 z$eViwCiyn3C-iE4-U+22pt&{bV3=<9ka3|EO^YRHO7=!imRNobI%bW=PIu#Ma3kE` zG!-AEhEdzPJs}*3MR7ba;!59W{;F+U;c750?@G3@yyE2(Mgh5;ZSh1lEg$W_**Du_ zKA%X4SCA^TMxnKmKEGOry2gby+~W9Fl>AT6BvRN8!;lCU@za9LrH;5E#cRt|W$ApK zZ`3jU@*NP)VRF3{7zlU1Wuck^}tx& z>)baLW8Z1@z8p6wKaPq!p0n2U>2u4N&G&bfIUokqZIC%5+D+DqA!Rf57FMYt@ zfJH=I(f^ceqK&4Hu+IOZJra;N0U-nDPqFEKPf}-76cO_UZ*C|v)HdddqinPlB{A5i zBp8`OxFInXIQ+FeTjGmbH@F}{Ug0z^~F808ig|+@ek6?>_DN+9II3Z@qq4J zHzs0LO2m(RSIe`!l}(xazHz zoE3M=`+AC-+MK>63c56=rCi8Kxfh9~L;wH-paGuiYD9nTriU{Au%)>vSK1sZYHI4W zSgx&*u$LTK5sA@RzEP$VJeuq5uPZTSFaQcL{_;j_(V4-v5GZO@5-qkR?3F@M4-tea-n@0{UV$Bmo`a!&6d-fO3VTL3272PaN1=xHnRw#aU5)>if};I*Xf*h*6wv=Z)X7>d)<_elGIVhuAJ)v~A^q{iL|cAoBI7$!hW2NRCL zpF+P)+0anv$gzq4#h&5FaOfpM^yc#RW&HRBul-9@p%x2)3Okcetwgl7X6saN-bDjG z^SZYt?Rnp0W-~(!!#`41z>$88<2jeBMEbtWhrin-tEs8y4dr~Z<<%B{L z{XbqLHGk7nJFJV+IusfI5Y1ga+9zk>@cn!4sdH)C<>#V-Tpv}o7Vy*dJ9&D`R~<8-cLJCcl0*M1{_d4 zfftwJab-OeKxmARr-wf?Br_TI;o+ewAhsxUxP`138*K3*XT7T+>)4_C3-LNkG&fJ! z0=nJ`%lXN+m^VRpy#3veczw#p_FjGSD@@x*mAjrKHuXwPbo-uDp%dS+w#p{4=C5uW zXfDYscYd!%|NEEFR+{fQm?>?a6ci0!TZRJa0SkpJM#Ea-`v~~!#E#}zRRpqz(CH_R z<3C7PlloP%L2J6@Yp7y z4M@3FzkF#g?zr(I;D1OgbZ@us$M2Sw6)Gk;l*4M@5cZG~U!%)|%wsMd~2f?$xB!Gd&qO zW1{3>APEI)L`RFxZQL3c#}Ab;q)Wj7?}@!|9~2Mrfe)DWnXa5E*e-FHCOKc5M9Zev zq$qPd_V!!^Kw&NDyZ}sDzWYF~@A{9mJ^B72_d8JZ7sHkuehVt%JmlZ8l#D|jeRw?2 z#k+wghq$;~$&%fMkL~ZitPY+6&ob&^duw1~Xi!YbrLIz*nkJr~whmClKaLoA2j+&L zz+|{bg4Zkr>u3OFXJO?|YAKM%S-aF1%2l_-*f(FaSv;|;5Z#}*_AK5~&ZH!}L;fdv zPQieGsupO)I50NROxVuIoPF?5bK31;@6pc|M<6<$0{mK@YIPjq8(}8~Fd$4* zVE%9NLVzfE8C#&;@woZ}$J;}CjQ`|1cc(fj)3tzZTj3H)*sM2%NQ0&X%EnzZhb66W z6VDU*Af?}{z-tjQ~2JVI<(kc3(BnL5*%QV4mCYay81QESJ0}ne3mVB zt88#a9|d^F;Dj5#JFj5?##81M1H{sWMLct#A-r`~jP5<3zab?0+<;uSZ34n$Uacln zbl^nd4mODrXwmzEJYMRcB9HL_RE9wrevEm$b2BjgFJf4oL>037Qnvf|qJb7hYkOfB zmj_FE_DntnF(*NwIc!L!77%h5%J78n-%P0I6k=dYP5E42Q?i^W;&l-7E1PEXk6F?= z(#@OlDq^!~4jVW-=_%7>4^qlkX!ton+gr+nI;XrZ)^;ck<>^6|y|YuderE{LOq+upEfCnN$4M_+g9 zNB>d^+}vKoItdRf@`n_n4k9U2l7^wu#YS7@3uf7tdO9fG$8@51G~u6}^c5$ar$3j}r+goj_CA^sn_ zhf14?Tav|*$v2p)>lMDQ4;S}8b0OLrlfNymk*q2=*w1DSmIK^kCH?d;v=YEk8=W}kOKPcGtOs^XPRFw*87c$X#>DLm+9d(DNKIHUx3P<|EJ@*P zY_f}P2-KZPnx#h*mpU@s>}n6D8qOHh(u{u+MdzG7Zz=}FM{G)phB1U&$-%`QKcjgq zHWfNjh?=K5OlbFGw6<*SBpGGD;|5IRctPA2RF5d5>kig_`4yLb6uN`ndSKi=Nw6DE znb8ur=mT-(%rI9j+H2MTtIEa(YL<8>7_7{JA{9sYtT1HW_t*CUTMX(D$K<%=jS-UN zBXUh{#68NP?`(%3P3`hNhc){o@2sLS+$FsX@3uzPu$to6>U z?5emxZ+g)Eb9!?7GIiK3gePNhuzzyGI;9QDNcPeKWP!(3WnQm$da2<>FYc+vIA$1X zy`*7O=9OL~^XlxVD`wcAq+2{|5?~NAC2&$(sG9qh3EiuA0BU4(d4-GcimgU+dP@EklZfd9N9&l|8S@rs*wB3x^OwU5%qub zlufMYfwplmfCAK%!HxZgse`0PkS_eDBi}N*s%vpEHZ8c81u{is+-ZLLTFvT_^O)n0 z5zo({4yV<()Z&}o`Zu}^%pISS!%J!G7=ZDGFAhCKj(%(cJow4FAyE}FtBq_$YfVD> zaH4%n==!-HAvMyLfZBtslyg^zOVqBFrU_4Vb1QSLSz`L-*X zBHy+${rhEYLAEW?YxAOCw{ZMCc^cdC4~t|j`FNYP%plJOgZz4>VJM-x?)6euAsHwg zAZ7c`nZm#J5h#yUZ4QWx=7-H;iE+Bb-nGRl)Yb8Hs&T}q!@bKuW2^LEY6;^@j;076 zotL(}5v+rT`gKbVd{Lntg4@bGFL%{YH6I8F@WfXX{v497FWgceahs^5#b^9`WmLIwyKkM zsjsKB28~||hSRA$hx;!_kgrdmgxS~)m~r_uCb0!*29xb;7O9dYt5A>&!V;+T)Sf>3d-d?2kFvf66}H@#2)!5bj6uEabxB7L@lp^U?2yOb3qn@^dyt}WhV5>@Jv4~d<*L# zNLiJd3lcU8!z{fmegX`7j_S1`IV1#u6U>%G+cI|SL_#|sP)%l;NdGOP$G$F_@C&QB zcYj6ymyRrM!k|w?QhTA5o%8*;!+(zm0fDM3EhVH`lnW5ly*+49=0QN5B&YbcPNA{( zAx;cgb4t7es9$0+I^`u|tE1f7ssJ{mp+?~#?8z7+!C1pAKgi~OozHC;1|?LMApAeF z%=^*pY5QmUkFPWqbhOwxY}|!qWtJ~E?t)&nJPpx$Gn=`j>*YDoa7QmRn&2;j7vur< zyVBoJCEb(hs6lLfI{at>{J1D81W%H7ERiI3W#3I4+Ty4t3*b5%oG6k(bhSpMiUx_O zHIhw@s90|h+@qFIbQPmjd~yG?$&K-E9V!HPxk@yoQ#<^0-h|4)&rY zF}nyE3>N2+_h`q~UxMtsPQ+UCvw8Pj92ZCP8%&2WW}(G(_pTFp$}Ah-j&hsJD@d)u z3yPIg-=7FxHutUDOjJS!`kkvsqRAvH?akaTZqi4sL#GS^-1|xRQR-c+CTLlSR7w?z z9YI=8>Ok%w6N5(8CKyX8Ez!5rgkh8aQD$TAEmw5eGaT1{lPR#nP(}Bcu!4>`yTIp2 z!+#KIIa&T6Wpcv2>Z3a7>JpFVR(G^O1I#W^L;4Tt>v$qa1gq3wHp}n8F_W3QTbLV4 z1IcK8$W3c1_{Wh^BArfo5kH@SmRdVRa&!jK3$o5+)Eh{t(TgMv(RJGWI^SJvViK&fB}PO%(Xg|?$6#rS*8?Kuwd|R#p=kDcfExdL z>p-oYc(njpK&8LL;L;U9zB!p)tl5}alAK2wx3(H2V|hSs`bc(-!Tmplb)scYU;k~# zxSRQ5MfS9oioKW^taL85^Aq!@J(0WlSY;kP6PQb`yrW86Bx*w!hapX<>oN{;Ly&2l z(B2Uy3)ju*V6PD{%^D5l#hECPYPO)m>im#_NWY<0epDr9pAm?8E6??xn5_-tdr!_* zLSrQHUYCttsih8m?BjJ`n$M;Kpo;b3CrEV~=>b==9t*khbvY_=pDCp(hH+$x^G$uGc~q-a@4pyj z`_$TRg055XkQu)|Oq@1A)|FZC% zZE#E4{Ifj?e*xly;l;R}t|@nJz)bfxA$m^e3W(m+qaN_Edb0`q)JZT&Ag9AH!s^nhKd zuk=~T4KS*M!HAdRtGlopq|t$sCz&WCk-OZde^e~&FmrAq5hc@p9sb5kHrU-1&IelF z%M(@L5^}tN?3=nH}nUP%N*F;1b{3 z{;z5obbIefd+f~X%nKn&Y~k2|AT4IWOoXPL1;aTVWeMuePbk!nc^lT9GLDE#Gw#{d8Zzd@f4MHMWc=4|l7qoH%b*jEI^ zK&1)`>EyY}lPhgiCtm~uYkEEcu5)S5Vb2=B;>MMdm^ZL>DI^Gh0c=w&55oe(x&cbX8Qy~AeiEYPkY96|q2Nr^kH z8Ncpm*Dy&MSUKo$j--p5h6dP7gg^Z&DRxCEvMTe|5uA^zD#kX$$m7_yu=QEf*wRT) zcJmQPM_&NQekj_3&3&jjsiu^w<%S-p1rG0sH%c{bU1$z9PkMcmZljd1Uq-%Re{YBm z71c05mWu_O7?q9Hsl!(|cW_u_INzYA4e?YLgzIEZY3V}%=>CWy$LrcPYFjv_IDKFd zi5qtS4-|n*jFn()LR`7uJAZxCHb!DkIdfCPuHdt2MSxAq@kox9@SN6CS$VEe{2fDv zlFgPhiQm$?cy}HK9Q@OJb)KsuUr+3`+3NRhEqQGWo=+Ao&AeTX@eb+M>Ubh6c$7Z! zpv2_wnYD`NI_jrYWrbjsDUCdYA9bJ{NR>IhM=?((B0EqK(_cpyXI<;R`M{P2Hv z|GY{f6Y>{A?yhqC6FtbOHgexbPaW4G`O=};jSs|>0tZ})r`Yg_s4svfV~>eWPw@R| z=3e3rw=S0!tm*SLE93xf2qN z4>^xM3k}xRaM9CSQL{f1aUA!2SQgc`ez8I!ZxpF&AQACZt*DevkaGGzn=54sH$QFF z-pUgGj=^4iAs$J(W1)oc!Wnc&8d?rlmB(^LSrqU0%45`6G0CtNmRBacxCz@z0ZtxB z$OUHXRGURq<0nV@&HqEG#2BI}wtY#x$r7|p>QPnbO9UQ|w9JhVG9wWOSm1& zT|eI4DTG^!_M3>TOGvN}NRqHTi|_#r|I0D2NRi3kY3mbv+J}b>%cCsZ_Vd6A{wBRH zQayoQ#7-qqI3rj6Yl=ZV)BP%uD@)X3wdTI=>}Q}pr99o1T$WN=+#?_4{7c@8DHISGra@9~$ zb96VQei42-pDc08NnQvRBJ~^E3r^?0som^#^XMNyxR|J*_)z9O3>y>7BGx)UiUV}< zy`}}*_=VZo%x)iujGOAxs^Zqrp=$2KPchKH7t8uzb|4n?hFsh+IAa@4@d24icL}6; z>d(?V+zojpq}Pa#eIhuHpZSrs=%*MKJ$nDIvN#kzrts{H|ME-lC zIKdS5(lb|wg!F$=g7eE^za>Oxd+Qeir)U)s^#-x0)oPptky%#}WWofe6CL&dzDHZi*VzH5^aUQzmva>n)0nKzpFg5d0I8b;xaewAUpE!<_jMd6`VHhn@9tsEXM+|dKzm| zQv$}TXeW++L%ZsB=%%=~ov5_o3`ItLU*-t!y#F&Gi%v|P(kfcLij8qZOxH+yiRsIV zLBU#1uD--hEUL(6ce11G!}gXly-PN@~^F29M0Dte(RcH1K+6M5=_K zwQAxj1PTZ9ob6!Sz~o8gEd2X$_F_Ol44A^g{T_6;@VKe&`1aMsf3^!KRK7Cw%H31T zN!hCAMAlrBTUt(=;WPRhIpbxrh_mOm5XsA8f#DS19@}Lyl^Xgr1a{v5j*X!A@<`_` zXwhpm&lqndf;si`;K|L?f6NFIt$k!2@N>vnp;6$jqTi#x{dbzVKMpY5GG+E#4lCB% z5DJ(sKVNz1yfhUQ9%^J&NgK*g#*!M7xL5hYbloBgQD>jY*m9IPBD96gWT@Vb^h`9h z!(c5$tT4Rkcw#<|D!@tA0&=VtgChSLEo%oVeTDMhe4jxVC zS7Sh=+d%J^m!~UGu-1H-o9y->d(d+sBG z*kYc)LZ0_>?n)l(+ji8`h-tSiAK=*$LG|}~Mmm{za2_a<*QjDR`;gW+3HY=(h3D$d zt*EtC2g1y}<(gP7ZGo)inB78`-wFJwZty&y6lt;6>(2&v%B#u{)fd@o_9z?c>8!qq^S>HYG?Q94nT@}3`qN)& z571k|-pFjy&x3o+VeOzL4Oc!MmY6IRm(lED)xaB|*KMqwZ?FaLD!{Kq6I;dU-1%8O zrzn&5N;V7@v2m}81~k}JC18>GfMW|}R2^Uay;9;7-b;Myi**`?PanMt#QX;u^4b{? zEMLA^ZFEUZhg8pl5JqgR723WoyI+A!6GYYQ`@L&#%htvo!rcf($=JBh=n4BO_uPk4r zR5aBkpSRAc5_Pdhnt^+aH&LRp!hF`vk@fZrl-zxPy>@_OmPU~DXXI9qpvUvFkUyyt zT%8P#HAuGWaM)r2qMYD;*T~(%9^-4cQqQ1k9#BDl_O}cm-TAZ&2it)kk_E$h?T=V= z?h(zcAR&b9I0Jgn7j4o91Om_+C1mH%THyS^t6Ydr!XYZOsXu!f{_-hDTS_>+igb<3 zi0zN zL2Q{!3IAX3(}w2fDD3)JK1(k)BdLZCQ34eA-1rR4JCH{uzO-5Yn-QX#<4nLE3q`vD z8Vac+*-kNh(H_D@SQwD>b8!f?U_X(D2NSMhVe;dJ8O7j`9u0D-l)s#Dx|c0L0;Bag z%tL-UyoAr#ckxQOrPg1DWWoFyC|VpD4m@i7M4VSu$R3ILJ(`Z=95Sl8*scqRxJ$Z5%R3@2$?mZ9tTCt*ra^D0XN|?P2KdnkFGX8Yvy= z{(bu~=Kfmcs0g;FG~`3+s+#3+x-@_1i)VL0x2;CUfcRuHJmkeLwu%qr*$mH0C+RjH z84(lAcpD4c%I3V4E}uy5&)s4vH}TKDW}0`X5e1SZ)+-By73x@)^?6&4^*WZ><+I4Z zm5U1SBD=JRDJ}O)%X5+QIw3HCjv|Z|vvv?|K+|3j&rZ%vbgE@_?VSZC`-Yf%*79L*MDyy871BjyP-VPU-RS7WwsfN9U2RDd76<2`?1q5TFE8)0^U z2C=SJo0lgpfgI77cAGV|Oba1m(t}`W!@^!9rd(G1F7q)nU{B%p0BhrRWE0Td!@}i` zOp@FEcomwx=9SmkP|H}96QBbrD7wB{Bn$Q^6ExwImKv>6!|~XsADc~M$Pa)L%%)L$ zKq95OE%x!D5(&2@`FAk!k*vjmQ>^Cs!J-(S(w`S6)9bbUuKL&j(8s*V+#y6Wz!Kbv zO7ItWwazxQUFVQM2eVw#F!UIt8IgsQDd-SkSyl#Bk$mPn;p_OlM>#q;;C(y>47f@{ zyxem!Uq;xq4Puk4xuokM&6@=2)9uiPj>cf`6$23ZHL22-AA7Er?m^XfHi^UlU-QKb z)p*!A-a{7YXs^$m#^0D+j+y+BXt&&xe2(+ZA8e`D|Oo{7J8Y{=& zUURpL!!OW)1qhl&Wtwf=;ea5ya`^kY4ZDcE6)-J>eflgU>{wVL0d_w_o1=d=ZCvDs>75wwrtmu9#ZOM3#?IKlg9tYEK>qW@{2 zW?r|1Tnv5R%n5{nF}y9($LR)BbGciC9zJ6CW^^IUr-HY|1$ko|vj<=)Wg zO7-?l>rWDh%5`f)`6b@pe;gN?;sWuXkSINXd9U1(rXgf~X*1VRiSxK}but_C-9M=V zVIU#7oc6E@PTR4gAu1=Rotc~0hNe0mLFvJtpA`ob&Ot+VbZbm5O^zA%qTZOgnE@|W zm5CKVFnKh#iHD(4J$uUg2P`w}1rrOpQ9H#hFZIp5Dg0K*iPfMKiSE~dAgQ{7BIY#; zD;wG(OG9FoWOj3%Fh#v6ng(yz$r~_g*7bDhOVAhx4|{FBSH?HADjcApR^qk(}bCUJ4;h&>K{3Hx_*UTJ8_i7XVRjBq@PMCly*O6SoUzjmfzeJ=B8~2 z|Baj}pneg}+0o6rWTRUtgrCb;7+Rin^%Y{-%cOcbPuPi&L=_GidUe*%dghLb#+o{( zR2+gxM#6tSYCJeRRxA~z6nRE01o+XHb%WnC<2ChU++j6T$TdO?m8Chsbs4oTVoOcV z;Bm-_Xqd<5Zve*mxxt%pU>5Tsh>$FnPfp5~IzwStEZ+&W5ud^BzdyC=$&ApUprbJ| zT}y;HLCd2PekYo!)TeiVy>QlV7>PWkdT`_xNQaHX;F4 zZqR_*ysK#(0qMolP6{^14$o~#x<$HSRA4*>sctjt%AEafWeJ2hfwJ&;1~Z_X6sX@? z6cm@AU}Ai6BYwz6J8=KD1Isk|MG*QLw&F)t4o zLSBsCsl=P-OGw8)R%jy;_Fl-;KAtMXI`o9%-4;yMTv&$o6P-iXu)El;=|f9kIEx$o z@>bZGXzh4u5KMA;k}X?eRYcZGPBN8_BLFqLkzBfGW)Idc5JN3HiR}f-N!*`M&c^(- zyGm$0Rxx+;DT|^SD=5}IKaEBIfaIgW^gLFlS9d-l+P1J=xVOOKfpsOjhyCj9v~Z@; z`17IFMKSq4)(hp@6V+l9Eu>4R^mts?qV}nqgQq!(EW-p4|GO%SPyB#VUw(*~0m9qE zwwTl4q3XoW#;B$Cp~miaZyIct_HqZ~AOD*IhI(S-l!&k;gCwP4@WGSN>>P>`$d0Gv ziAkWSo~ummda{yW-rF82$Cf0am361>`tNl&79T^bu7-l=V4Y8;Fo7a|1iXT^F)u0Y5N6vG})h*9cJ-SQFAkOs~S zS5VO9s6}MduplK9Z=NiTeOmH;N&<5~@;I626NYe+w*w_7AFM+zT&-YK`9_lbf>1W{}@GuB2^CKOP%5^?Vo9h8)zG?JY>s`{bL zwO19W3r*@73Q6#!ku`$E38D5mq_3ig-q%Du`n>0=hnlDCH*q2xHryFF#9bSL#wERG zyI(bZL1dUPtL3rUzNf8ze|NCvs0Zsz!c?*W6)VL*Xd*gX;$}bWd4o|@WxapjHExAw zIIJY~g-kuDNW*s@|H`7OgM%ch%FMUR%*^U+)rPgdlck-nM}K2&2Sp8rs8$d5#tXdg zIY8KIp$F~>mG4crMe+uI{I6@G2lA=m>AMsLnrDGG^&nteAkJvIn0eeFtI1uOMQ4Hn zfJA#!e$5dX=0q{l;+TZv(8gC@kb|O;m1j%DThsNM`0Y3Dz07$>KPDYMhB=DOI!f}m z4gwdd!l|pYR{KtV$vKko@))Ue-QKgyJ?0vZXW!-XoPj>Xs`r2oDf&(AY)%60SQ4iX zN7SYWE_YX~E}-^m94luQrFm6r?!i0cP5``xHbip1k6Qz@=nXqLUix4xseG)DN!_sa z7F&O^C-Z+wL}~pV_%O_=Y~%3daJ76NH-34pIFryN z<@sx?rWun5V6n@vfmm02_BwUghn5`}3o7L(GX~CRLc-9vcMLF(_+>d8#vJa#@XWuN z1RE?a9O~$UyY#$y=V32{H3X*X*~RT1yISO+jMDWt4@2?VTmLI04e|c>qL6^dQbSKg z-&iqT2CBSwKGtswgnZXi6@=t~Gh+;!<=i?)%Ru^*6vflNFs~3|q&~ZuQj@}Z`fWv; z?1@Y1SBQV-$ZzNa`j2w&=A3AUU-tZZ0%t6-zIJ5>JY<{T!tdAYux;!FfNv37Qc`Wl z*=!v{>_(SCb6&ok_kq-TMzfXmqkgS7oT_CoF99XxMLGif${Z%Da3#tsvHslIT+*Tz z_9>SZd&C#GBahzqNmuM53}72zKR1!foK$ld%uvevP|FUfHvHG5C|(#@vg4^pn*(## z+*n;a%LUAB=*lPyd^ z5k}uR!GDM|t(J*{7-zEh5qA*&yWhE^toEg4T>~F#uQoSNDxtgLtx|i;=AJPB>*?@q zrJnDBE>fP~6gDbJi;dg4Ni-k7QS)*`5pGNC8Az;(b_~XaiV~pil;r$j<68&cT7Nr7 z&*~rSyS$7!?9MOoe0R4tqyCCN!YEa7|8|VLSSBRHEdYtKp_$P!TXulV>y1vgXV*Li ziDq>qHl#hJ5V*6CVfJr0VwY8)22%_#8)9{HJJxvi#XmOpdo>NLbB1a(=ITzh&s!2S zTOckGq?I-;WjBAa6nTlFa;g|3@uaR&mRO0@AprzIc7&Z#zU7OXD-^u8R)(*EhrdI&j0 z>=;Rt(%UjfsitGpomm@~A}3+oQbiHHJ=}FWdYG^f+8G2Dww=4k-U!O_dc^40IOwKl zpV4P%L#MzAUned=xFRf;!uB4H*i$g-tAGFi1|mV9QbiRk-|7GU@Nvk*u^@!bv&IBh zzX>zkpn|LfaTwwUw{(s%cQq$%@hKSdJ}+$>sF-X5S}D3i!U_E@XV>^JIE3#;*$_HN2TLK`QJ)|C`*#!NEvJKQdzK) z3Rvc^So7y(i+2m`WBBSU3bhz^Vr=tbtsH;G<}S6?F5I%9y1}yvx*^d&xE1sT%2oO6=jEu{e z^4iD#HnQSy{FYQgvBMXw(rqg>LVBmh5}nZVN4vzZBP8H6ecFjBUuZ?8+jOZFVf^~1^pWZ^kc(urrKP%o! zEt~!KwEc_uMnbN+wE{(vV`|up$ZBY1VncV!7=w{W0@*2o%83+U;kv zuHQ&blw;|bRUi!d{1o!^V5{p0bzc^JU)s5{H=}UBc5{jdpj>)Ag@^A#@CrwFWVjK>p-SauPCD~vr3E6ig6{4Im1VGg)$oMJJq&WnF{A!%D6 zcuzZH&b22NB7~lh9}%^sKWt#C9gf{icI ziz>^zig}e(?!_LZQR_`9C)tu-MfNsA>Y9nQ=^@qa96tU@%t>d{RzE&5{U%lqPDW|k zqf;itoqZ5v%TS+^Zy1!tNKiCuHC-6c5Qmk!lz(LV@CNUXK)F2*Kg(rLTPEaDN(oR1 z(;7Jsm*op8kwL8`vV^r`6* z&)y}dN6Ktg>a)7vbs0fvCrxl{PhfQJJ$jxnb4~U@Wfmoi&SCZ4Ut*ziR=Hx1Jo*>N~vy$caJ+CIDlV)j9 zCokx#Yldf(O6F~4?}&DLT)F`IY^4VP?ICRJGTfD9F88^-*Sm&{&E1!rdb&gKrvk9m zj=jiqNi>{7W|c$Z2n7&BgyGmAzo>uoGv4v+9q9s+FUHyVzvc~1-2((ps4$)ST7H>h zmIDZy*vm2GE|F2B3y8{B^y9?y4!HtO^2^L>q+pMy-!<-XQ%E;g0xyRV)uttT4mNLBa#a>IqOZ`qw*9sW;-KNF-df1@Q7q(|JGIgV~ zo2G%*$AvkCxcX`xoN8T$6%$&?6au(C+Xm5hQa(?E;vuQ3a+1{4x4lPz_Xq-gvYcsv zU%cD6JXnx^+s^fNu)Sa-4}#17u_(k=(9?szMJmhn&cBQ)b}$+o54!mO--(;ZW*=Lz z6s;CR`#HFA!oN#=OWh91*MZyEu-1XO`J&zZ(k59SwkfLk&j?$he!2Eho1h(H!2&I* zQ}~6~yQU0I_R4x>;8h#;^3)i!HSV*mSe_%7zSD}(>yaST-hy-b#5~Fxs5kCDdeC4h z4Dj8gq$|!GTG6nkATmMrHImTXAXfJ=uaWbbDo_==FCR|D!``rA@Sq!ExTn3rEww(6#R>+tC$!|J*r;rT_e=otE% z!)_sm8H!6*w}DLRWa+>d%b;WaQP|@bSMF3y7({Y$=ZNh0O2iMFpoUq9g9uSS*nr&YW9|&e}{ZBE?ePM z7VprttP-X#J`oeRzrpKVSD)P%q|`~2=Q~|DcEvu4=lzdNN1`RNeE;mY3{fIOl4d4W zrpeal?5IPEXg|rt2=E4uw|hC4(wMy#xiP?&69}CowwmdL-5j(=W+Lcm_-F=jsQ z8YX@IpfZr!>KhB^W!rB36y{ojOxI4tZ4L5}n|=d-mYaA|(ZM^ETa?&Wn7EZDBB8UV zGEk2=Dh-69cYbNr*nX%ZcT(6T*02)PLnq;Q1Gp-~xM~$Qc39DAGvNp_(L^M7lhIKa z66B!HE-_@T7hB+1{c*Dt$gKvV9^}Ce9^bZaU=GU2A;te0QaJ`cA#1BBCHoPp-x=)~ z)@qaEsb+X_WGC?+9&6=KgAFeY*e-U;HO0)kiVPeUoD0odY+*!R1u>x3yH(yUv?}#C zlmH>32A{4!GwNgG&2wuuhiH#Md*tHZUu!ZM1AtTs%{68+AzewQo^f7vLwjkr@Eq`2T`2ZF&W&7hnVnd#`J>#R)HNr=C93vav`R2hO=Hk+b%c zL-}=Q%Ek^ItGJqP=xuZ-;K$#E7uyF^%9H)y7R{U#lGeZN+VMcDQ*dY<#Rm1Tnms~@ zlUyx8{LvzK(vNB>q0I?I8u#L+Sq9rW*2$OxB^_dpP0Z`LDUS&fgj{f2eq0k5BvPTC3}}K+h9}VfB5l5uKRBuV+dM|@=}!4Gq~Upw zFKoP;@E89FyAD3I!lVvVrAD>jkG%zn1wIyk)n0^nF!~n75O)Exmp!9Y+&6{M!h$U; zc8AVLu(^z$0u?ZTjT;@PE%*3hZ%E{TlZ7;%8n;4v;8DCR`()}WZI zUvbS1n7eF})_3$@w-X~BZciH~L?P18@#xN}xxq*;*Ih<|WHsD}UEJ%~Jr?0V#;$hxHBvaja4Y*{h*A z4t$laL43sQLgoF8^&KyplU}AA=b?>vIer7Mfx!$0Dd=!=jh(%qOEeKPvHQtuaZ)6( zGveChd#Au8Y=Ip10&TKk!O2!h?|jfWKGJ5}L=)=V3sZ@u+vSKRUd@4)!jI#wtGW;} z9+7=BSmWM}*WzDw-W4bya0yqnX!jniK0<8!KLiKh3W3#U0MKKVUNct?+;B*jpYDXY zU-bQr%mZxceJA*vt*I);mL=J8Xu*Ae1wl^Qn!`y?C8ur$kEaiiwZ)~&2q4YN#bfuW zZx+eRaA?xvdc&t-1D2uL|al;Ho!f(pF6tNjearU(P2Clk`lOJ7tjCNU!s z&JGf!vzWcL2Pra~2TsXN4;w3nS;*w#rzV#7fJpU}Cz2Z(92$0<7E^n00!mpx&lU`- zs#^nj_AtSPEfeO30`6zh4YG{A7!%Y_m=1|X55>Y5|F}OdA;mrgeEjDpb1c7%CIf>n zq?^7;SZ$5EVV}N%53!Iny+K6L`))c6PBWApYa@9Q% zlrnx2<%gTQf#N`)TTuR(g1|R7<^WK9MVgY`clY7@T?$)tC~}1oe5RLOU?B9^6xR6( zW<;t$Q7gRFJpA?&xc}9U*Hg>_w!1Ax1_3Lnll3?);=s$<-kQKcYMCj&wRX&s7_G`F z6o1N5JUnx}%1#|1No>qC$sct$_-OB-gQ6CpM>9xX59L>7v0#C=9`dSOrq%MT|n=i`p0ZN zH{06_BwF=>My}S}^n8hUa!vO6S>Au)VLyQ~9flpNf^Z~=TE%fwwP7d#Bau)eMw+!} zjk1QT{0zK$j~T~arYI#(pT+n&S{jv8iw&ADeSESFZpual?ddNcgFhm_o)R7R`KL9C zlnm@-oH1_=|_sMK}M#w)P-}*6!`$-8x z2j84rwMVYcuv3Q9b-W$BU`g;(!^4*R^LCyX@F0L!tA6kc)>gj{z_dYdX96N93Qgxq zz@l(?8V{SSe2$u86_cG1EAi|%9eJZ0QfiUtR?=C8aPkgVHByr)P)e%h%I87Si$WuF z|EfOrjJucofP0rU5r(b0{hb16q*b`t@hn9PsIoN+s4|&<7`v{DeY!*Hz#5R2gxVfd zxcBj>&w{=%ix>`&|7CfEjkS>~*OyH*xWRs6B%Q!PALvC90Uj5mx*p0{}X%o{@o&54fGp9JFwpx=S3t?o@?XA*9VGe>7!H!B3MQJyUznSbNzh21( z_uErj+OwB`nyVb2%)`!h9vBf z3;k?wK-$8ruK#OWT_eQ)V^y94v0l~OC?UN*p58-klf~9SmKC;hldmvlg)P=2e2M){ z+M3;i>hzN&+dR0)Pi+@HB08Yze*(n1P5+PZ=C~N*cQOg@NuPih3V5?0X1Dv7MSX~} zx=Z^ZajVYW0c0@F39=8P!K%D(%0ncXPE9eW;|(53%CY15aF+3TE1h239iGXU;^RYy z$u-Quq7zPqO29JX5oHOo18z90>;$yReDJ7OGF0cJ&#X`=$57C?5$SMGK8^}9{~Fu+ z7>r0o@@K*~!YJuBOBa%{e9b9e8BSfQ1AS$R9(cpyHs_ah_$B!FK&M)P-FWAKdhP47T?3_UN-)oP|DqpT;Z>JUD+#DYe9Ql`05%s}h>jEq*(2wDQ zz!ki`jrZzMZTdWfW&jQEUag%y4Cu3DH&RpA(vO6#Ns2A-RYJcyw;6`C#2{gYc$m{W zPgYpbH=KAjTTGWvgmigq*#S79DGp%il`hmKeG~MF55N5;13IW=(5ByEOLq715dwO( z%!#bjlC(tBiDl4Lr#><{7z6uAg9guDyV5jEGFtTrj}Nb&fQKvqMUPqZ&j@}yx6LTo zE~`u9#xbqA)JO~gW)*azvx(56lCBH%!61Si_&uCh28+<7rtdwGCqtQe4%wO)5-U@C zLsOzmnAdY68eE!qAB7272jibH_Z^UJlOiN^W#O%c9zNyjtH%J}D6EYVWr`e}3Wi7i z^Vc(kns?#3L$0#8o?4x~LxQ}~Kgv?PfXEgBv&)Z6ttL9D^^jz6QW8t;5nya$F9 z6dMk>eQ)uaau}#i1vdPiNow(^*DB zW3VOcsd{S)QEe6dV8w* z?qLFAO5tiDXKT8g&uD<#ZGZSK=hOr#jR>TW$+s z@$~Q{?=(z7UtkCg6)a>!+@%!b98sKxkVe~ZZO6dS$yEz!(@EI;y8XoPyeWSIBpr02 z&fI#gPU}oJhOCXeP_Uu^v^V#gQ~Sbr5l><$tyQsI$6yPc?{N}X87Jg%5YzAZnt74sVE#pw+x<#=AHqO2ISRi~D^h9l z7%Lm*TxgQ7F$+4YBGzVD)zv*>6o_nPohVI9vHpa$Y>Gy&4gv&(rS4^@0u*ae$hp!T zjxg}gR05hZ)EWXA%lwYk&h*~W8b;nn2nMorVisfiI& zU6^w_n>8e}xr$;GF(7aW*kwZBL;!J0q(EQt$U(T_!#VcVIR?yl4gz?Az$D=J?Xf1009%vC) zDMMJ20Y9U=R*zCBVtd6bkPn?ToB5308T0vi!|$#abBYiKNBRlRepUY@85N@4HIu;o zNcd`FVb~*-z{BRnPYr!dsrO2EIO1~=>*T0|Yb))|8g57Trt8phVrR-il3F0Yh#bKO zJvOb8WXvfMX!=@SYDWLlfBm9b!0l3h+jUs!jXc6zm?d`6rwa2+-h|buD_^&6<~q?F zNi)}Y2P*FOuPX>(C9k?d^PlKHC2<$+S$G1`H1PNlZOrLA(_4Tb-_)yGDrZFs-A32R zipLW!gx-ALJlhK0uJU|I=)Wv8!grb3pA{gj;JgzK?|Iu*?FQ~;a4uSW2-;puv&3<0 zF`}g5sfIbP4eT2To$aPi8?$4K+Q>$bQgR#_p8q}iWaki`SOI4HUbJ2Q<-^cXT{pEb zh&f7%J>$Z6TjCEX@Ia}}N&Dal6&6)x_XWHlx?tr^Hb4LJpV^&OsWhmI#f)XtNoS4b zWClJ0XU4bO!{nco3Y6@w^4h}ciZcDet#Gnr_(P_-+5t!TFs zCjW^VsS&g78wAtu@g6(zaWR_=#M`3TD-w9}XK&5GS11`{20b-rVdvN2Ni!w6gLrS? z=TR;-P|r6RlO-oGAmHH;0^=N0cOOfO9j)E!I8APqkh|Le85|*yaTsyelIEF zd0V_;y88Sp=wkYnd&Z|W9ZU^YHoKiiZhBEqB}gcOhG}jdfv`p>oab9KvS!hHP-!36 zCkhTnmN#h$C*R%K{PMfB67XKR;@aP$Y-8{bLw&TWM}n7%ZV)y%p;b`)%<67!(BxqJ zUoW7)gB4$wJ-R6#N;0((Fppin?@A~(@Qy|kdL!pNT}1X;pf7P9z(@)jO}UCrtiyUu zY)N+H-X;9p8UG|c%}-nGAs}|mKHuf*top_1>GZMo?55&A%$*{t%5_a&+7>_a`~i1w{g-6_A!gVU=caOqo? z9^-Sy_P)z0+RL~r3#&6L%%b?U(l!s!Aum*edj+M?S;GlT$YY*HoKfZL0psMR4#__?n)l*hygyt^^=rXJ1<1jr386X3of^^Wrtnt+T#8O-+->HnNK!tEIa7fE00xdhpO!@xFaOP-N?p_GHq(F@ z0vAV85!la^iJe$rCp1qT3p6=r?iViB(eK=sgztdk6~7xfF-#&SX=YAo;WASaYCJ|b zXcO-RCK-bI^G{S8x*cUVan5?--sk;Kn26p7WDYR*!H5-_y_7z&^d~Af_{Eb1FVH&H zFI^H}tYNh}W^WSxxCqzFY{Q&DIWKlR;$q;S@(I_0avN~e?Jc=$VJ5V*6dxP0LDN9f zYBMi}aW6R3o&oC2>v=NJ{CirhO2j1y*d;qljHGA?a`-~q{7TnAvA41*r(E_1A@&OB z#td}@s*d_<`1Q%<%Di*k<+DUvJ>CA55{h%dkuP2W#d!AH-;RWqlYJFqAtc?(8XXry z>9lW&fK`ffQG-!QG_BLcTL9=p)dzaUz0ysdS9YfECi8*RIk4Nys-dqoTOvOh-SonU z3|{fFk9)cU<-5~Ptz{*na^d9vi3bT-A$x(JDyB{rXrUEVw%ILl>9y1!7i=-kJnwE1)cZcK6U@ybZ3vExLh1c z_WLgkG|dyn`X{1|aMjre*Z#^t`;;F(> zGNdShxv~9ET(JAhnmdY=-FbqNL$T#=WG(9pl^&dv1cc_~xCi14AV!*2$~4GRlO;0p zOngI2`8{|QBv}p?t#Zl-44>G?O$yugnpjg^xRKh1lQMTBwK2L$m1iEy(3D_M&9 zDES7eu8!jro-VRC$t*J`+SyMPbtba9vn%{lr`f7Jf5(cPumk-Yv71dF6Ohe8Vj3}h zhhy!m?cSl4gdLd}Ym34?m2Mf99!R6CGH+g4Nl%@-6ZwS}*MQsZ&nj0B(rNvXu_ZEu z4rfV#E9$WAu<{?r_?$~v*@|@6bbcMR1bXi7#*kv% zK9j+kWDs4EK@U>&1lJ^?XR9NSNelN+RV61DDvFz!3-c`*Jqcuh|C(mcHGXLO;#!uM z*6oA?V>p51C8;Xo_pfvlSZgd9^Hy(r7#!V6OnTMZeq0`zVMJoG(59RA!64(r0IHGT397Q zoDL?LZIS}h5~C<@yeutKAAm1d{TjA&{hhlp#@}~McH6Kl^ZG!+0ZiICb$z}uaXkl- zJ-Zo3f${fE7gkr8&K|_B-0nMvy9R0rN$QA~DnlbKu3Xd}To*IxXh$)}`D62KPPr(z zNu*R|zg!dcbVZ_HcHGg4AJ2sa1To<9r56-~ak4R@9Uh0)i%gXA%4&!t*smo(+nRZ4INC@|FIW z<%+lF@jQ2N>%krbZthUmrIBp@C&QJOs1pIZO4Es``e0;Dh>ye2*uMmQp1mp=t0XNA>sAgmHAZP%{#=-85#H}z%TaY35|3bkGB1fU!woP8$(uVe?eb3vlc4jz zYIxgyX|`vm7N1Y5EPhGI*asEIn((y`h5S>W+k&Hx=l}o% zcmbcabV>i};dC+y41ztwA_z(b9FAKlA^^$o{{;Rwql4?lK{KD~r<4d{l|}OqV;b$2 z7kPH{)y2I2KI9PC1h$;+vl>yv08BRWNjTw%ic_HADh=Wvj6dZ&Y_a6uXqe2KTRbgF zr3>#O859vo$Iv*BI4cW>S*M^=vHo1>jL!1Tdlm_(KCb3?N@;dcsYYyi=9|ofU#FBn zE<+(QQ;#QE3pV>bw^RERU~5Ya%r}g3L}NIWL%kUP%%GR@(yHw2qmK@)Zo4lY96Y-L zp_~t6RSu_;Ve>;-lR7;SOZZFsaI|qF$pOdHWkGZ~6H(!6h#)tn7|riQvX-E!2VIsZ zly~yPnhPhbH=^+9G4sJm&w~t8ig5{-{8A7?iF+Z!FWTj*xvbDQWaapM^7*uiV<>@f z*FlwL{@Py#-B)bLLznTRGGI9C^LrI1qZ3Le{Df!MiE>RZFdY+{K<%tKU{D&z`zCO8 zkI?2?loQJ*zfYF(61KOAk?OT!b|oSUoo?Gi45CnI06j@T`J?g+X|0Pwq|pklS~nNw zA%5S@WXFOhoTsMkdq>Sh^El-D`dCt;epfEhN_>nKUMad?%302SHg}sJzV~Y^lAGI{MuT5{|pXxl%XeP6bgR41L~@ zsK+${!!j!hU#HC4Tn0`kQnJk!*x+IMTF*5oq5|(cvIaC>(seop7!IggX?E`};Z(?y zxf^k_hrn1km?)DZ_8)!J>Vq5s0FgPL$kp5K4 zb>J-rfrCoWkVIuBch4o2>;`Q+75hDp2yGa^<`ysGPCMA_k2bN)&UpUuHY4>qxDyt( zf#3Uar@g)TiT!Ulp?`gGnb5_~>h(x}qWnzx0w*1E+Ys|mplIPbi*&!MvDVt#`gq*9 z5GWQ1UhsS`vV*8m?O)Rfh@VbZV?&Sci~0z;YS?>Zd|jHV{b6x$?P^)KwR{40!v~A7 ziZIr$ZGSaT_2X?P@<+7z>)paGw^YFA25@xwZ~M`=aCAV>$JMXNGTD=sXl=CvsVknaISHqm`COH~~r zZB177E}*5t5pj1__i-SYy%PdyQ*@+2P*w@5hg1hO%z|lo@u)Mw9#fd@n&JyB71(Vu zsy*e})r#sAAzKceUddLD!?z$ArZIX&Us#lE=YLh*?}tM}G7a5xL}D1fUV20a)FWi`RtL>v$t}9Q~c71MzqC+hB^1S)Xx1J9*AuePg`+(ki@9D0DIWPu-r|uf6W>-nq zGsvD5T~vbG(Bw=$5!>n7vpLEXAftTHAb?v)t@V9|OuZh?Op4&bfzXpXo;2V5ryI_C zlp6#ULtHk#yTJnnJ+_#pUl;{S-#pFQP=ptR+EpW=T2<79~&6@2^9gNVy6~HWr z7i~j)XdQpJu=6W2`%z8BFzf8!RaDlsa!tczcx~e*cTHG>2E`GVuqtu#qR5eP6r5Dz zW;gRD5Db_xVvVrZ!KMmWu{t9FPzsQ4F{N@dR4Iw7UyAeTk)n?61n~mc|wMWVnzV zBP)Y!RuiX#2-1TGghy5f_mFBdwbjz)A{e7ZSdM%Mj-2b+tB7r@=Y!aQO=ZCw3ILge z*t%!Z5|hmk8Vi;ljLcJ%cdTl_h)ActVuCy@1Oqvc{?;o$Ksu~f;w=IMt?eW+R;3+<1!{0Q7>+cjP<&&+iO!v>o!4*=dG5Fa(`APg~$1g4JDVhOqT1;Yk zPbP4q*HhF8VM)t;gb_oAJasYt zXeMDC#3=blmh5|JZS+D;eYNicJQB$g_^JoCF57gkbWH!cBfRTW!&7+B<9#{#H1!HL zu~)mN^dk5rm#r{Um`!hR#Rn?_wFYH6sAL*o8u zbJsTT|2cRcS7Q5lok@40`#u>_)S?i|YMID9BV|!=RRRzRr0+>JsFq>lk!}HIEmSCx zhln}HhOoTrv8=NAyQ@yAlOxIkl@UWFD9rf#5U5vr(sYjZT2ZPdBVWA&iHF2{_tj!& zZU)Eksx*_aCCoaXO?tJA8=UTJZy0>$z;B!lx(+vauw;A;OHo9lW3U*};rt~Q*vXWs z8xK{-?oTkL9bHE7#Aq`gw) zdS6I_=6&z4135)m5W+cb11+>mK;wV_^hmar0O^^~i!03C{#JWNHTSBOxgK3VrFyj) zffu#7hCcfu%Ie;l?ZTnu(mh)TE3207LkUQt23U1C*!(=HlBY}kq7#QN;^L;l^Ed_S z$6qzCxK+?tRA`<^V8`!lGEuwSR)wQCS~o$(9CT+;<3u7j&hHTH@frI30N!*ET)1fo z?%nO^%phnqHGDADKeM~WQT-OaMcO-r5=omZnZt6T#`1bjwm|ZushL5I>ZuQCv|G6; zKHf;a5+42AQ-r}8sP%ZQa8x()6c>giVa>FiFp8v6B8rFjK`i<4RI&4R3(E1Y z+Fkln5sUyyyR!gnElHLqzIP_qdbzx+ShZ%qZKFS!t7d5d2926^PcnXoyXvA<)h(xh zRJ$9o3;5UfL<7BkZE*+R0ek;BnL~b`JfoGhz_1YLth^%{4mEJ)JeC6qr<<41C8Xhk z`1=)WNZB*B)A2ZVgCk{uTX!sxK{eDg>ZY=PHEsDxNyXxVwL*(SH4qY@a>vf9)pJDU zR3jjKd{LweWiS>N1hY$mGz7tcLvttXcpnrg2pXWVZyb~a3e*repacmAYxEMTyUXNt z{QLs-e&*xnM79pe8G@R1E7g3I-`l-<| z&r;QrV$O1|OH<-q+27U!9*$bMkN`kvKwdRAg>j-yv#d2-CiHFABQe)7r>%5G@6PaC zt1nMKXARR!a0sC>mZknr57MxUp%3v->`Lx6r`lEKy@cK0%b9AkxlUwE4BNLyX2049 za;!Zg)8~xT)6J_i^!G$bCy_*{ose4FG!Xv+mhI0 zWB@omd#^*k?dgZ|U+^<$c64j|ZhycDs-u={IWZ*}Ae%U9jfkzoH8VNssiH9~Rdncf zE!oNTuy%&|PVK3Eh*)47HjeK{nD2VQ$4qUaLB4Z0hSgoFPx2wC!z=Y?MUEi5%_Qp9 z-PYv$-sojf!$1{^-ew{ECRsC2O`2NKg4s1z>pr--Ycj3ke|W)hh1?uF{>Z?;MAeg{a3uRh`@qYD{L5&JgpbGNoG~`euzW#qed(}>kpg5d zvwuG=9w^qS02hzmONZJw%p$iV!u5SR4jKAr*&la{WVPRg_W0B|V(3NWhOg}Xvpi&= zV|e?ub4ua;6?;dukcQ9jw%`gQo?*Qgg}D{i8_uRD8SSqM*^`GJY?E``vX!aU4aBNj zm%K8RI^zzdDDTgh2{u-}GM^n{RJSY`-FLIbId-j)c3=jtiS^$?7phm#FsQgslCp4fMpG?4%>6LXD+t zGki2-x??Yvc&2JJC&5&I<|<8*?|C39Xn0nD#lEb(T)*ri%Xr;q1BB}M(~U0ZKp0V< zRP+=$8!X{e?#-Hj7-N;afd_$Xwr;Gwa)=lu^$@3fBm=_t zZk!le#RKP_L=dt=@EL_H$x!pQdvEK7mF#>1n&Hw#sY+Ed+WwTQh0%x^kn962Niu=x z2Tfd*_BE`iQ0MYCca;lBhk|%}klpH6z6T!8{_4O%i=~W=_((Hm_Fis$QRJuAZj`C4 zdZ7U6LZS3RXP-{S;dJ-}6JWZL=Sct5Ze29egVv|pX`)W5AI)N6eI0>F_%30KpVgZO zFZKS5`-gZ>;Lj1rG{*oZm-I6A0gkNe>=942fZfr*^<92ojgg%HCStla1E|rTz*1~4 z!+@%FNwFF;0Mn@K^+^w=o@sm#h1J1X zNBox8oq5IfE=p?}59tlR1IOdE8nlqz?2I6y3|@dw{P05-2f$MRHYCUkt6-pZEs^k( z$lW^g$@7v9M}`F7_|>n_?;{n=umjtjCc}D8OzoEBxcv-e?Lzmxef+vBOdHH?WlqoPP7nQZEMAl&Qx_D~Huk}Y(b!&t zk)B7o_b#j6>_^Lmju0UZ*1Pr95dFogBI`}BQ-(yYxEt+J=e*_%i$V_`pI|4a8tf?I zp#sba7a;8?0EWKJsw1)tJd%g!#dmJEZBzpVO4)!Ak9qW%-ycEfJt~AYQv7`$>Xt)s ze>GSgS=2qy6<6n~B}j7&%#HdFK||oV8sQujdZ|H_xog3jR4Fzp*S|@)%8V^oiysYz z$Gj`)qP!*YA*hY6G^@Y{FTPj(o!*k!GMYnV4h9fBxc}5CG|wG@y)aJf@?^SM4YCLd zE4sq>agjmhkJvAS;M*o-vokI_`QHSwWVMgVvi)n(762k@MwrT2@Oa!;p%m5Y26#cB zdZEvBQ@=3WH;H6~1;ZCn3AMOJNF+;nc?uB8k|c14f|^V3pgJQnre3juNR8QF9>za| z2-(4EjpGxqQbn&mVAiCbo7H67Qy77oHQ@1c_T#Dqdn`hlbAQP7oxt9qYkU^h2GX+Z zYmi4%-Md``vTa^36k~711J(c*A+?L#t=b?L8zeTrx}&Sv5m_5hQXaL!Eu!YD^Lbua zNF&NHG+jB+@2gX{A(3%LEO8I^8((V8u<^F_P|-GaJ2zv$B0#`pI{`@9lKn@hKcTW% zcX!M~LGaEPjF40QD$iofMEWobn1xCRGm(s2e0AL!;zg^=o~abmid{3P545+S(%@DQMkw5Slq_X2H^MY*KnF`j38tzhoDm#*;RnOQ z*?sr`00p-}pV~zgEr0cjOpR>wmNlyBn=Lgrc_7{%r&JRQD_ zx?i^>QUMGLma`urX7SuLy6?4;PC`wP=IDT&qMptQddd*X5(V$Ili~uLb9lGp7H4KW zT!ZMBs=G37g^ZA-e#ny=F|a+__ovr@NxLnQ?)D4!cEYMwYF-3jrV{gz{$t8nL23V0;g z(|5N*L^;gD!OHrfJv-;<&a27Y)H@>u(J0&aO=5s6sG6t+2VC=NWIP8hV8Ci4B z++wPS;SIEBjz%X#=E2TbN$YUq?qj2JSn-6uoEC9UF4J!G-RI|61Ci19Lu!+yT*^Xj zO1A84O~i6sLLiPFzr!3ytPl>+XF^p;lL6((;jv$T+Bj@VLtniCN4eZAZweB4aF^R$ z1h+eEY}HZVI(n5n*+mww>22x9h`bP-yqAzvN`S&f5!4)L)Z=071(Ml{hI94naH7pQF)bsN^1Wnq0SM*C+NoSh;}LvWhvki9UHJgNn7oejX2 zk|}T1NEzW>^mzjvPlBr*{a6_21_!)SHZK#n46-ye(V5=+X&Wpd62%U6w42(eMfG8x zAJE7i5EU+AjJkkewNxR662()ql(8_~K)dP`R}d5NRydcI{`$nzl>W+lEMeUW=&2hP zcPH1OwA89(ZO_w%$hiFNAMy*Tqrb^lxBMu~SU$IFY1(yJH%hc9aRW3lpe9!gEI#Mp z4E^YNCw@8PXFv?FAH@xJysJc%$m+v`aToNurm_q0_th@}{_N5lL%P&A{>+=+*(!i* z%B8(=AK3C;3`Jw#X22sgQrIrL^(J<%%P9Q)N7PEnZu2*Xu-~Rm2epDU=&Q8cCv;1?UMAQTW|Y%j`UspO=U7|hT$Q1B zK{nq}U1>E1BT!H&65W{Oj&&U41lL*qDH&pN<@Klp+rk?6UvCb$AxpUWU@BK2yN*%o z^-t-KW#Gt6^F!+R?h%-*sHQRBj2sSG46O#!1!jX3Ml!^*a$vWjb$5cH{D#(7+067WIxaH=+^WG3g!4Thfo^5t9;Ke=w#Pq(rRY|i^Ks@EXk2x9f* zW5siJM5u`R5Xc1?3J$`^HK$GLUcKa#}Xx3~t&AOXlEF=8H}h*C$xs2z#O zu3-$lAp)p#yrHA`&n!A{#}45+WZd(mONqwHP&dPuqPb5R(ihQi;sWNlw3R+8ym!bw z&umctXPI+06zTxBqS%;h_Vo7ED+;QCD*>>t!mggQowO~gUm)?x%bDh7P*Y~54d66n zylW~6>tvdfVbt+gEB8vXjLKYxL<_}*YaZ@hbtgp71n`u4v(f(poaXx0Ex95$aLi|r zC8SbQEQ~6@W=KcqALpe;>(z#!haE}&l7s;d*_!u}0%#w1a^kdX67b35mfq&K5I2dw zX#xt4i*~T1Pr;oN<3891^gmJVDnsxplBR|KDvw(cC~VdjxRv($0J>SE1AEBsX(L*^){hYoYcoHi zld<*r>&;q0(c1M4TF}%HDe@_|Lwu4whqnm-S{DyAlO5%PcE9dww}k=+O95rN!^dE2 znn==<&6`xqi1;=Pw$HgM^taPHKW2tQ47Jj$axTlzKmG--W~46aqBsS2NGN)CjMuO3 zy`;Z1RjCbudgK90J#~f~o7akd5ceVQVC;`j9XOI46~^>nG=P-k28);9kDoE)a(2FQ zmk9{LNB0ff$&jAsj2{7s{BAvwJ%fngLzzJ zpnw>i^}z+q0WVc-qf31AuukRUbp@fk)B1m>Op?6%<;Lv=Z(X+rPG3b*dTmNI6j};` zCcjOpJu{n|4P>6u)U`pgAp&4_-S$L@{D>;SX|cE4&opNkR`v#4h!28a z7;L5rIaI>gdCp^20009f0iXM7N&oqdz27^VSJ(zda~}dpV~h#VsPZih(?H)Bs8|Q1 zyKuV#S6}ev292jea zn#pQ#4N%{s|08c)YO|#7Cj>WAgBRIJ{OI*czSF&Iq#?G2dM+j!Z9~Dm$?6TsQ_+cg1^>#b++_&#a%^r8yOt7(Csa*RkLOhlvAKH z&`@HOq?`(9xs)nJ_{jG*!lx!J>H-*Zi?=h11Zon+>Y3#U+Pgi5qKAV_U^nAPQ%JBL zF+=~aoX5|7b@E9b1qOZw$CFyP{^xMYkQ6%#qMp8h9#Ft|^$n)}iX-)t8~j_S-M2MC znaLpJ8==k7x4Kmu_$U*^8&P(Mezg?&%)7p>q)BSL=zaF|;vrRE3ke%Y!d#JW2!7%@RHe~sgkYZ;cs4`_RE0&5QHWltpd!;R+qsfKlpTT#dCk-|K0ak=dw z;h9KFy5YwX4;H9-5`+mSiqfmH*0tdMSTQ(yQ;kZvtrtT-vt)dl!R2(nkat9>wC{)4 zFUeELHMfq#-S`tTjkmRTl`$q?1BjlI7Xrn0Oo7tETD zQRSEY-n1&F^hR-T*cqBKY#nEdKMvOm+*SyAPD}bC==)~g$!BSZbJH9}sS6i8A_uS# zJIo^g*|fpXf_VzYP`bITfls!wX+<^O`7E;U9Iga>;&*hnhk1fH0`aGGPI{&_GcYnh zr&V~oO638=o8CZOSS90m(zV>+G6=*2ncB)*%P&BQg_4Gdb|u`pVI8`gVJi*D#jHGkv7)ozGx?lTkzt zqD6B9?C7a+f~;sDr~HKt@K*Y1)>x1$w%X3V1ZfOP-Xx!mg?fG(c6(UC23xM(9a;)H z&G8f{npSf}+uI<3DBpKn&QU2H{u;{$16iau_V5-JWKprU?p8Ozq$t-%k}iT&v)H(} zoLJj}1c{f35HqQ?>{FMt9>OY&iY-4QOqzS+Fk&M1q;L0(l9)cIYq#5Ip`-x_-yvl= zRTcJ6x%OZ(4xeBFMpf%E1d2))Xx&wn3jlGB?F#VhlL=U}yH_TjC7dm7^xzh~&Q0Zl zzvEjW2E$r0@6E9L3>@)rw`jwWRm@S(Z3t-j)W#FwQs+HED& zmD9&pLmbtzep9i`Ni<`P_2>~ccmcF`wZfw!qvP9faOlwA?sF+5)Px@Q0s707cv@O7 zfjal6Ppf8jCxk7+%$QmEpbR@&=N0z(UYU9l2o8)a;IY-fk_00TH3uZJ0{v|Fxj zn6iMNJR9Z_-92UB$ZlHzwm-h5-sHxW6;d6F+wc@u#L0jDftRNtc(!F-t$4Uj-26@A zLx4-xOhHk)fno5hIw9@<9Fq-ZPVJ&@mG4q0zPt(#<1c5JXhDmWf(fv7=bm51)Xapa z-C>f>M^4FzlfJe^_WM?5rJW_Rer>xBbl^4+0Uwgh-+4P{6#>ZA_F&2om!J`Ya#W}d zL_cKITbyxVTYp0sGXrcXeQMJ`zz7=|1Cmkey5vmGLJ!VXS%I1#?ec1iYe9jSeW!@i z0IoN4x<2_o#(pNFO%@%NbSufyeI)|ZMiX)0+`|U=(_fdu2u%Ozb zub*j8-B)VWWZ=5VBWhNIOBXU^jxFS7W2EEGyvG0V*SfS)OfZ}Llv5S2_@i%RhtkfKmJv@+*|w$KhD2`_cjEn_hLa6Z%j zz~K9pn$KY@HEtvk0f)89>P|gpY#BxFL1VYoxBNL*R2RblU2O*Qo``B;@E?nN24=jd zysGCU-g-R8SW)il9Jd5YeWUj@k~keqYL#buGf@9d%r}KpV*qEMxI9mr*~g6KP|6A zE_#fOGKE0%>rkEU(6{x%JaqNpCArw-^7{KuIe@I8@1!9%m{J{HmgI{}D=sezOuiyO zo||lvS5TVC$Tt>BsdBG$)5Z&1I5iFsTOCm(2SCEP4%AtBk*ErCl=s&R1Y7%i!-R6_ zsV`o@(SMZx3_C@Pue9zqPz{_JQg^v9lyB7EE->mKrI<%LuMAxkc7K}P#-XOt+z}bO z1$C5rzE<7yt3mVO_PhR<3TqM}WP9z*mCButcPz_=@hS!M$vj60%VW^yQTiHOR_MuEH$1~GXDcyvzV_xe_1{W zp#08iY|$=E4dYy@2K^y&vM-~W>o>WL^dcJynia~?rY1a?ZQhB|YN#9^CXqW56JADv z-39qyIYyE*)+9rR+h z;J@1aNDFMY1&G~Y*1aihp>zb%uI?5(u8;UpJ^ao{}4+6d2a6^2)8FU5xr~Tl@wY3V1rR53BGNBjgOOb@6y^oa?iJ zE%)`iZzboJH&ZFt+=}xVH~v@k>pikWg?bpA8%=2ZG^tyyiO&GZxk8+TE%dXzdpyu( zj9wAJyxg3MT8G3CjVMkxzp{EUcQ2bKE>9_j>AB@xNPxotl`RZHE08ieKJkSq5Y4yt z*De>{Ha4xxiav7fysDjomyYlo0KQY}+m3hg!A!I8Q;_oM4Y=v|6l&Muzc5xEOkr(? zhcszEj7Lq}cN4a#*DwrB#wd6?okzd{Eyoxq9sW}8IGTC8Q&0-e%Qof)$xZ57JU@Tk zs1;XU1mt^0BE6OR_7+S-OgNFMQc8G|X+OB>Xx_rs6j0McDg_2)0%m4_tBNljClEq` zy-%cg6rGcOh>mn?+U2EZjn~o}ga;qU8OEMCG3+#{i0*P4;WiSb=wW*L*;{g-iv`S) z$T4a+y!d*v=(D|P&uFl7_C_;#avom{!yLnfw`L^0pix9wWv_mkYxj}DaO~`>G;4u> zupPz0uhp7`1J$~W)dcx3by$1d<8`C5iImlpS(n`~zUYgIf2xV4n%5kX}Se37_ z@ko%4u@ne~y3Gmw+JywqPxp=`+IFnE;yWFNp7Umiho^Awk=Mve!UJd%yrE6M(}zk@ zNigUyV3sSNG`9YH|4@mHIpFVdpP<)Va)&d!4JHVeD5ZQ^r{^YE*A88;sOqZv5H>}j}9VkVet zYd}nLu;(w7?Kr}XgO`&zZq3zAP-;~y8`)uaoWtG|OuJRj5X#?x9_Fne1p+1s zSd2h4+1aFpL)70%b#q`gFt4TGaA{A)O2Eq$m1OfSc@PFUUMlCXX+=N{g6ZLLgfofS zPmbm2iao8ZEa;iyPZN**5gz+ee++W>^h;}hh}l`MxTx7m(af)E9xGDu2I>1DAr9Z! z?KbKtE4@p>GzQ|Wa04*?j14V{CwtyJ->r-dOQaeEl`NIOF_IKC*QKO;v$l4ePUnF+ zM}8hZhy9t9O<*@`2SL&Vi(7>cjz~(=PW8o9mz%@1(!=I01T(GkNFn*;JMD6CLGY=E z;8QXAzfiqF0`p0RwDk}q<^uk1`!s6TD1fp1>KYjKnhR$TDlIq!Pl~1wAW>xUgG<00 zOIKWCaCNN`#bX%NF9)wcM<=KJvI)7LxiILbm7QKMV!@i6v0xC#??d{{0s@N*5=fHJ z-uvgy`3e)KtXTq=eh&ZzGaeml<5&oqjXH3hAHD;%}>XJ-p zzKNs;6W8|F9K1@=kN{V%YjR4LaDK)oU?|miYhGyBqwGU`_5%Q>AOTeObOZD{;Z2Y?%1JDGQew9ak!Z%MYsmFZIA_T;2eCgCV&}0?v#f9Jpcd+5J8?EMHMgq`few1uLt%Mu-HYY)Qj*4eWl}P>Yp4(j-e1(t~m=Yu=&pG zG8TPw3Q^{F2ne3%zfhfL1QBcQul_p*t&|m{;av zQHNb83OXX^OzrbP)L$fp-4gt-I(d~aC~9{W#PazBS(*3%e7ShNziOBm!gY2_rbvXJ zBB?{Bqj@h9IgUOzYXq}5lzjYoem>wgCw?7Z{ftF;O8cRo>*hLJQ^r5*M2ngR{c08( z&#Ie>YYim!uv}Pi=20ubA`2GET@$^PlvclE+wppfpI$s)utX(BaR>U8ozgMt_slQ} zLTcmmv$VTWJ8SDNfFvj@W~ssKY@QGH{4x21Q3GW8p&Or2Suxx}46qZsp;1dziD6T+ zGjv^-WKAI!p#QC&+g+sSf_xESL4S-j#k~6wo3h;8pAP7-!w3$%_yhSwe; zm{e^v-?z1%cLGW{+qCtk%4q~eJdl))#F@D$&6X|O4l0KomJ7q zjEZRrj@k3k^k(gdM+fJxpR@Xve0|_*FO6L**(mN#z17eu57i?@VI1X@M!wlbD(;L> z5!ETguC<%;S~QbaV#q-63@a4El@!$g-hICMRWa0vDo?&+Z9LHDWW(+y;;`_a*GQ|@xM{;dDd%kSF zF(mN&e=iI|q}Ekf7W{I|i_LxqO3d^Df&epr(BNKS0}k1a`Zq(ydC$WAUn4uVcI>T3 zD_Ge#t8o*)bcsWJkGXMs;nj+VEViX*kdVD@6^&EH?3)vzbMh>!(C4RIx4wh&+7eBJ^L$1*0is2j zikX2Gh2)3WGP%XF_Fn7>794i2)DIv%xy#F-+uxF`Y_gu`{^`9nZ<{^DzCYTncl=Ks06 zZUr$x#o$jOwJXJJ;iyFG8_HXka(JH3YL#Y*w2o(-2rgwT3S79$US|*K|4=U1pZtYc z7Tn;@4e9e6b(bmiV!vDW-rpe2)OQYblu?goF@$#u*v(5PZIx)6-)Zq_+oEkElOGDd zz#8_w4mqK@dvzDp0L~wN{!s?^SJh6DPrNbeEBD4L-W)SnVPrC)`|M-qhQd`tD6&^4>bAXtncmAP8py*T8RNZ+OgG!$d&)sbLt_1k}Gcc{k(b_|Fx7<1xYm-6ZTFXEJY2Av(CVvS@C$ITI6eM*}+*(LDkJhtp z_eOD%*TSM>B|Wr8%&T$0{CyzRq+xbB!yZ8=#?l^^u7ylP{tt-gb8|tdT^yGTkkeCt z875AiphgROt@s@S=2y4IAaVCG^CG0OkjdU+P}vMvEx1JY3ZR5x@%8`?@6xxKsYId-R_p1#7XLQf54jw&HFCV-qGMx+t; z!*XPGwOW9#Fu6a?h*&;vFvn5bc9B=ta{^NYIx)W0->W<_s1;6>M!fCXEQEj_N{+^F{5PB za%6MbeKbKCo;kbvDsm_}!>+;>txQ&a@TA1%%ZFES23^laJM0F9E+B$Peq@RnUo^^r zF`6NS%E$oi?IKvO06eB?Y3fH$k$@zqhrd1RD)klNMOUY)9K?iDOs$2NnMP0h z3n&N6cLKA<+#-7{chu;@oxpZ*{m%-r|1z7~Q{`lx^ERw$IzvX}I5Ja&94e3qk{*(g z;(S8%>wz6ZJK9XEI4_RHA6oM6{uMiz*2-HtYDtcGS$*}9;YM_jq|uAeXqxb~k3RrQ zK(xQ_$n4C9*`@YGHeRBzdHc4Oz{??d$@Vd~oo@(XB-VZcZ?gYNj*DWy+O8fSHX`Fv zyPX?DpDAo)-?+r$mI&07f>?WLoNoBL$l<<%L4oAjo<>4Hslso+Q4(vDo(57{j?-*? zbmRZ6A%1!=+l$8xa4(!maSUTmeh1;&#Bt3fU_D8x7)dkK53<7Yn}&odq@|AuM{|5t zW{1?ar!L?Lu0aUg(LjYe)Tq*PH~8WhYcmOzk`v^1*?zW-Gs&N@f8I03y-v4shYdX4 zoaR9q+f`ShZw}t96!tUlm^*jnP;Q+eM{0rrp4XRuMujo}bo-tt67Nm+{&IfbWavBS z>u|&xs%5UEz){e%&Epy#`&YdETYT7jH2jj!xKD{Nrp^yj@b18?ha<}JuttDOhd5*dx+`m!vwcC~MivzK zTP`wtMJk--R;?h^-%DLx#vQ8CxrN0?ohTmJyg6BV@olye*?sxh0_}my_TMvEhaB5I zo`6~(jBACKV2GN zcS55PZ2bM`>U@KIvQ5F(VDrxRU`yHnu=YbFLQ<@)-rIU^hipUimcHzTH^24|1^HI& zXPUFpQ9)F1)r{|mD7UkpS=dP|9V3jwpkZod^tn27TJNAOz}usJ>UzbUs$*R z)ZmhyMQ}WYbsrT7HB&x&Hy(9uX}@5eQ=jk^4LZ=2ar+Xq>W2f9KXNu=w@BxpJv_Wn zlv^HN<0$hCrysFO10SG5+r(#I=T>oknCLMFfpUN%REZGaHG{I#mVy58D~*sj;$2qj zOxE|%u}Fe50teoZ*ITMjd*9;*5M$Q84UsILLa(*=9xqy!i_&7!hdCD(*)* zMp%37?~T1Bkrgb#h5!_b^V)X}mx)Gw@gix1HU?JrmJc|8MW*;Ejc%eR@@*r#gZB^` zD6fgwASdb2r@O>A{r^Bf-nyJ7cR(jP@>NX8hZnmgh4Erd$fDAcovS8mUOu)=K~;cJ zbw1b51j4p^CXsER(F+-pNzj4BzQMkh8U&6GmHB;?;g7z^A%@Lx*#r=yg+xw4-#QD? zl)Lskh~U>2UED+B_-SFYwbLSr(PRj`@Mc7?*u6O;1ibIOcBX;1dpS#_9QCM-fZ5QY z3w`HU0K)->5aUjAuT8z{E8?yozm{$zX)n91SY3{a+n6OsGYTceFbr75SO zF9Uz^E!09Mz?s&p4f1ZX!v-fHszOtocCw3qd7RYQD`lWDtG6XVn>eRij9m%)fZ%yM zlBW3khDj6fMJHXW=rm4&{{4bybBvC!Zr}*DoWa1vIm1q2Z5cR>skh1?)uMwftsm2X zGwA!<)_5f8ywNu4xwhQ_M%tQ1)mjuE)i8D#0`Y~*V~GNvUL|aJI^BL={d0W&lXU1$ z%psVejhq;jUzZW{qTG!GoKZg`N;i9?VUSx3;U0j&ze}A%`Y=Giob@@H8C;vgw`&EEbvGic4w?v0j@<0Ab>ifE9DO8A>IB#+1z0r@vQ1oe zGHx#lg6`ulhKoIsO=Hn=tp-CKuso3!Nt%(qfTvXP&%>X^T&bh`H|MptqYUHmh7jZm zFvvC6M9h~fM000C50iHi?PKf=ab5))~k(F$759Er-ST4PX&fN+Gvt4617<` zbV`Boa&Vx$>wW@C)Xj%4h&~2mYHCF7rUp+08VGrw(Vaf& zYNfHc2^473^PO48xlBgQ0?m-F7xPsO$IGLpLtd z!{K^#d=(cEh+MsyXVKqoXwR(U+9LMyB>KsZl%)>?`R)vTS^=r&_Y*j54iL4J=Lyt2 zwb=ShM_oSk4f&%7y&a2X#1pTUcd9Dae=~ z;~>~MD1X4Pr}eK9FjilqF<~xQHnqrkM2F4UE_gkxtvUO&O+EGGYPsf}{b;+C9LBln zQ29JPmLAKmaj!WML-|v_WT*unJuUCan4#_Fqad=wUzIw*CQj#PYsV>y#Z0ou=X0ei z0);Qwfmp|F(3~+l!-=M*)K1?|dU>yT*>gmov%df)YK_*C?swOD(49335eG0wPgk(> z>D%;5rO}DfC*JzzZt*L?ds3RjXw_zQA$4s~Z|jP~>dC!TliT!c%F%9QwNJ>v7# zE)_>V@97>??ikgqk~vc`!y|>BGaaBsGepj z`BX1d=o0>fO=1Rs{_=~k6_?0SHt`XFzqm51os*~YG8!odrZyJCITxx*GjO8Ov7{-N zRg3G7jSrH&W%izS2g)=BhhjMUElkRZdM)+)HX9tBdc&+z`g1DF1mstOc^HjRGX%!a z)8RAjas|Y2{ZbaJupnC?DpT;HHrVYe^NcMWwmn6UR{pooHoCN|NUW)sIvlcGqOE!BT2wLcAx3HDw zV4yTPa(Q*lr!--qF*ss|ZvRo@Bao|1?q9ELaBHo`$G0xx;rLBZ%y;q+h4o;Oi<=TmyH)3$L?RJmwDru=#sL#kQ|npd`m?FRU>`IC9? zizpi1w>*u=hf`3+zf+?`VdUNVDEZcOBw04~Gti(lopt90baSchZ^QJe@X>mtyK9e0 zU}QunsLG201jM2GzW8~7wlbS|IpzvPpOX~SxEr}Wof01;?ELoyOMMp}fzMfwS|!!@ zYVUIKP#YGG|4Ti6MFK7^W8FXMxyH)XgW(Za+a9>u#)!r_!N2A= zbXhhSgtLj!7wTIm<+NAaXKGu&`UXwtPVSZoR);Azrp%dn!o%z#gEDgtqb}U(H}8d3 z>7aiPhTpO+g8F`_tB`jo&ZI}NrjNdBjMf6_fU++**R#O1t2h~^!>3C5DS|^kUZVDG4gyO2D?~X#O9o(N#o)pS;mP`q*zYtX zt73$leZ1!d7re~R#xOLAzV6pmMGvXP^klsYr0c0OIk9_=N`-Hy0K;<2=;70nMowN2 ziKdv@lN@Lb5hVeh5dgi3Wqa-21$d#J%rq~Z}3)EIMZB@yIqg~ zJA8;Cpbhkw8nvw`FtrDXVI@7wP4$acwRFV}KSu3KUoe`}c=kX^TZhpGg~wL~#tY#W zhJ5?kyKh%mSEd*PIjvyy4YgB6hp6&GoR^WsR*=Z~E9F#h6a3e9%WqQrASZ@V#nYHLXh2)V)_2;dhKCprIYy~4O&`+E&j@Qzi!7WU<2h6cq%4b-|jQ~JFqNIVgINj-5<^1EhqIQ~&y+tglwF{X3xrshE% z4%?b14M98di8k7*NA|P>vZ_l5O0#ZZJgBxn3xZCxDJ^`3qVya!CS3`GR+3i=g89J^ zZo_*ygi($eT(j8j2w~3Tg4ZIh`ygZr@x?Qbpx9mbm!duF4Ichq(5Y`4RSk|!uX*&v zKIcz&47MUx$G%$;0Dm-i;xoh`BiECn(Cb!-Ruaul3U zkeR~Rq*SW1)L28?EGKCM{`)yxgTq?~Q4X@XJ3sd3v;^AEj(h19NHrZCE<7Oj-91~v z+LemAuu6%$EqP%0On?ZJ`V3QMh{+FTjoow&u?+K>6hH&OlD1s$+YUF0xK-R|W1cYC ztK#;M>?!X>gA?tp>+52aK&AKGDA6^IimH)^E=? zP7y!XH885T!+5My=YIJTNKIf@AwWC%#8(fMmWQQ%;Z3umbWnP}X4N`Q)AtcNb$v&_{r;N%>4Q zeVl8yD%K6@J-W2?n^Sy`ZoV?d$XDoF$IGM0Zhz%2T-7zHdYQ}w+@%(D5ST|_>%6#s zlkf)fj4*`re?C2n)3HTf;Vrnb7w*KZBFw22e>J8M#|e106{DeOHSV<;0{>!;gTV9^ z*i&alvDsy>*7R4$fXrU=u+TG~4eCQcNASYaCyQq`@D=7cDj{w6nvufcoJG`>Go;w7 zU$NstKUE4RE*6hgK}vhI7EX_z1oF1R*_LTziV&W1@c?cr?Kth#p9saF|8Q!1#1Pxr zyUjYQ;2Cn%I%Hr>P}E$?s`+Wf^)g|W>9I9KNQB9xzS*dMOOe@)R{^NVhN+pyzq+DnA=*k6Wvsfa9HtdREbdMyng-5J1!haFFEaXBg@h2z3>c=v&ZZ2Exhd7 zrQ26w34UA6J3$~a*@mptfIr9MONiZNjBQf%k{TNlkd#ttSF}{mYS0iKN!(+RVj;*8 zjcU>BB8bJV;Im({1LaSbf6U$gWRmg^<%9xec(Jl29|Iz~u2})u%g6sOmMjaT_9;dP z&4B(#=hmzd*naByOtWCce%FI{$BEWr3L_yDMb}O*tZvP+(3}~TZi@;Is5f%{&~4rk zkdfOAvq)a<{zsmg;MSj8NzmaNyA4Cy+uG7XMe_~5xYovw_l7(9kmdP>9e9u4g0=C- zt280ls~V*r4!K(==TkhOQ5S_YUvjQB!+H$YlPK|HCfi>}>k`nY2FLhArhmsGX8kVB zw%u*hOB|T>xM{bFhap9kv=)lwas}JM=G$ujZVwuu`c!h(ddPFsr8VaCsc1q5J$X zc9iXYktxoEz{0fPi5hL)Z&noSl@y69Lmo&~89Jk9g;v zxSMUz0MdqvN|fA4+Q@zfkkW0|c|^0{{K6@OzP?p6tP0cFP-fN^)UAwiO4&viKVc&ku36Y01%AFelfCd-2qtMiGwSc#L8 zXCD`^_6Xy6Fradhmlz~9Z7hRrvUrIdcx+d3GE(-TM5vOMAyy4Vhnf&PH zd64WHbbblbKM#lEs;hx&SF0AIpu^xX77`yZ9mDE8d4oi4KeZ|acDBPLCR3)*P0pkGzzQOn9EGCD8GP|PpxOrta(LbqceJYjOnF5Q zYbu@0qriKIsL7kSMY3jVTBt9)hVFTk-DhC!!MXi*NrhC)4cHvWnau{`%u|E(UQ!9A zrb{O36^J;Kc;8q%`JIYW%|)hM+G0$5({7_gAK?%9(=2^1*oebrICfixtv2T6F>>@+n7L%$te~Fk4*x!5sEQD0VSJMJFy0Y?`jk|L?HUy}M|hb#G&nkX@_2jk*4W&=MjT4xU#;;|DIiz1Fn}`@#h6;4>-V#4 zEpe5O@I?7dwDVpGk-t*t_G2k}x?Kn9i^0jO{e6dw4H348{HH}de)q>CeO`gNpA8a0 zZycO3c#n{8uh%W9>sQi|`ERu-QO+cyc=1Ph(v?<-Bj>A*eXv~8toYuXJu;M25UJK8 zkWevqnKargrWh#;zEc3>D1Gi|DE1#g<^+C^jiKs^L}v#&9uewFmOvzslBsiuF^Q#Z zS)ts)Qhm|AUp7ixVM1TGm=&1q2!_4+k*XZ|uKOrtE8hpUUtBUevWD`mT7daJT&C!RHT+Q zTacpVACmoZ+zEHPKV6x{`e?P=<&Jx1r;^Eo!|nfg(0WI#9D3r?&jG zu%ndjc=(wT4>u+}$j!uGKi!Pc@N2SCx06FqWuByDSM)jKl*AoZB-X9-9!$l;mL+x*hd@VquBVrft^%5P4h)IGSO5SAK>?mYYFE~Nw=Kt+Q^Z2H&*7~@ z4H2VZarg9pcBE*4l(u6mGNr4fc;gBB)p}74NNJp1vPG}M5dM6n(xMeh;MV(;Y-iRd zaJtDyxiY+TAiIK8=*s*_ykOTSoOeZZfcHF_&B9s`4dZ4MVz%vwt8SZ`TC2U<%Yywb z(t!7HpJjuz(N~(7`+J6nLe&?a5Y0@@r$wIZ?xhMi!hn6YnZ*up9$3w?Dic@UGGmW% z_?obuL|W^yxAh)FpAjXlOQ%IAVL~W0q66LJ8@esO4q+)5B=N#9hqFVna7mS_{*-F? zXKfyf4R%%4o=zw}cM1x~ylo0u6qs^T4X1n2MV$Sn3te1j;6Ujq^Vb7^TP=)=Z2sjm zu&h7Ky1N%pyXr6UCHAI^CMh3$n&GNaFA|K?m`n|`KguIiHsEI)+?ji%$|8|A#3drj z%0)a{?70PXx*)R*U8dn^B=ixnE7|DPH^30lLK-n>`bxCcArS2(3}gtcLVygWQ3@7M zcmQk%967)%g$z*M?9!OC--m%&AZ<9|aM@JIisO)@igtm@I*Q{SLAs)?&5!@_>by`f ziAK3=69Q8PheDz`=!jd~OOKrbOiqCRKW2AE@j9a8XC&Qskdv>OJRZkkP$j{1N6slI z3yIDf-)MkmG{qp7wDd@Y!u>l&?DKo0BP?a_+3~&kxkQ*8;0fQdR+_-$sYw4mwc&qk z2w#@bc3aqAoSQn~BB9AR>8ySPArhY6{sbszET>ErZf6o)7kGGEVN(9TTh%RrD4gX*Ai%2X;z&n(ColKI5PSc2 zr=s#OT%P+I{kd;fH8_j4vtbio!^3fhvZR(RBGTH4aT=s&V9SUJLmO%#_(f-b_z_-XqY1UltelfMd?wLVJC z3$whdJ4oN`#*RwJuNBvd0EGvI@HSx>a5F-{&YhP@8!MK}_n@Nae5>^8<9Mkjav{e$ zb`6hSA8pOK;HD*YP!nA{^BRpH8rB@bw^0|v_pQN0$hdYYle3oEOhd(T-~`0s@|Xt_ zb-s}>p;mF+X2KR`Vp>KY3t91YaWd2uL`Yx4no5;vxe<)xg0QIK54 zx%IRCbj=2lEp(ZNOoSdrhx~75yR=>IA<^vNI$Z{m6 z;r?YHPZD>iGt9M)c;t6J8*1W2jwLR_eJiMWU{LYG#ZJy<>^0A@S?wn^ON}y#vcga{ zR=~$h#u;ArRWD(>V&!mV|8?6=T1g)V>U$cojj;)vElAzL&&&N*ALd*5tC~i2Z?mWN zjw69Nwin>~%C1pUmd;@k+X0b_={BM!X(Je(Tv^v1`c_P-ADO}YNuMgyscM+)p0)PB zy@Q(;iQovRkyh9W+ST#8q9y&>@8>_zGjCbZ&%r5f8pDJ z;~#K}V5usSdK?$AjMT=?GMnLJ?kN<=-_s$ue-+O zSI<@g17ufy$*x-f4nX7o-AK=R4)UD~@6m`6#Z|CD{#dUk*Wb zyffY`IKl`Fq-xj)&SpB7JK+ET6V^eRMoHlcQe`kdu=ptrs{;#U7cfQoPDi1`otEJ} z)b*3j86>x{%yME#&s3O!?MD<)nc4DS!dEkpu!Oac036;;kXd#Y4Oq_7-IP`X`aL@j z)&Wi_Ec`Q$_Wz6znr+NGjPw{)6t%pM74pzYUwO4BsZGTwxge|qksL&qsvs_b)NO$T zKV0Q1@j>g36Li33eiBldlo02<=t#{;z3Wg!{Yhas$YCdQ)Vq|IpMota1UQ=PGM1<& zPH|PWEQx>dlw|U!@rgmw@_L-ucved8)rUe0J^T)0dr5}`wWJM);^3BJ!yLWxMuFb zlerZ^-!aN+5i}U%4vK5J-F8Enk)qWiUf8RPlr{+*_%(j%k^+y*!*s2pUmS4~!KJC=rxyIN#V4=Hiqu)>hkrvI9 zk%^wj2Bss+XO|;CtKu<(3upy{lxLodnpQ78-8YoXo9(k*6FX zNY@*IkNwAe+gIK;FIzcd&DOH=gNGnoV{P39la#GV5h_E*`*i`;ZXuK7^h$6vu&ce& zk|-A*ZO7xV50g=v_e{gW@)qIuagG`|=i2c(T!k#BFq#GHX@IMe?G15&1Gbfrv-Q`y z9Q-v-gb_!$#WlhyDtcP1L$KkKP0It z``-GtREs?Aobs{UBAP8AUnD;sRYSK+5&w<|X2uwS#~K@T-6mL{;Uws5YfyBxb>W(= zG*T*sI?H4r77+(xPc3UsR2DK1v_lw_$Z0QZ`;g-4u$lvPKs)exuH{m4)Qp6qq8BC$ z93LP3S!dDIQb;C+;B$0Wn5t16~kSrFdPz-cu`y`ozB!T>I zcB8LiP3&3_!q=+8qxC2FS;d`AwDzR4V3)KT=;Kk>iYKskbu)iGF$ltzO1|tJ-V{!H z9kE|$@ikCSH0%2TQx z9|fjaWIXR|05cfpVg=A>T=>OsE9_%>_u3Nx7Z{-#ro96PpcVuRazv;%>?mFh(1}ofc z9$0)eY&X;Q;FJjR>drGSU${(8=HeEQI0EF7V7^02@-I!k#IIL|aMPg^>hdY|O>U5^ z=g-*V!Bz2KE>((v%_@vkmA#`!E^3e=wWepqp+vHsUs}SE(Nd9D4V25sWX9LE{dTy& zWy1%ipz?J?WBV37vhq#-+Ah%I(3N-?8DpT?^t8c^X#$7?%$4TrmW=me`7HS1-uF&I z7lpcxgjPV9Cj&4Cw0B$;U`@X<)oj;h=SoS@boQwVDE1 zO8(bTLaf__<#!sP#fpTbVTXX%?pnWrr1x1^KPrnpa1Yz%0b+vpA|PU1A2eW>$4ROiP?HX+8&inTo*1E^FZ=VMAh^qdN>k z*qV+S4PalNYO)4V1%tknSlnp)AlkBCyg#bkS*?~M$S>+@vh=+aM9=1Gt;eA~cqMD4 z;1wVv5vCUE;#|1Er-;{*1;07;>!rs`Ckd5NB+Y1~+(S)qq=+%pAZ9PYLv;&)co_p zbXjyLF?Jmwrl3>y6E-Euy#XfZyHOiLkpb}2zMHD~L;Kh$=+u0<^ZqZ7v&xcA+eA;& zw$cfitM~m8`GHJ!(*F5dTIPg4OE$;NXL<`qAp!=_z6 z+!W;`Dg!knCB5n?_q~-YD2$)PfAE#r^($4R#*J&y!@pOfiSQ)xE^;Wp6^*KgIB(UB&s<*(pPcg3KR)S<^p$;xnYo9@D{l!J9t9ml zczfeq=AeJWogu)?ry3jg;fr_f8@NR1fTb&25SBz-Dd8~5;+P?9r-J!gVI(jbK83g& z*j_X&k`ork^JyZ?ElTOgE%OI~s@;TKcj@lc* zrDt4j0w{Hh-c2-v+Y3~{iyKkYg%=&n4=}s&^M4&;+I|Sf2u)nF8{ zUUtocPZG!K4-tV{IVzU7l&%g9Gy&Sw$_MzqYU&n~ax~awf zEa%eOOkhIZCwQ~tbweI+HgbEXv4=t>8dGX__?$Qq3oh@t#vY3p;2zw?%C=JYk4&E0 zjndpE^~lI?%C`l_gG{c1uOhPx%UJ>G_RH_(ZkZqc_)hn&(CO=g_eH|<4Qk;Yt;Qlz z$(9v^tBsI8u-S-Ik(W@IpX*(d4r1}@6Ye*XUNAuKM%Bf*kTfkJglyGNGc$gljeW&g zuS)ufVg>&lAm>W{TY$Z1`&vemZRv;;*bzyG;b}^P|LR*&GZ8HEU`#sR(xNYZ1*IeS z%vK_eKzhO^GEO`lDMqHdtfa{5l^6HvmCVypsDXSU+qS*X(}^sYz}1GOzKhZrrX(Y$ zaQ~~*EtvQw__meCc^ngxeB2xSKQdmCB+m>5jR-UNjII;JdRjSkXk@TO++OaVqkzXJ zRz}S!C=Aydub2{kdLn}~zQ{z=-nC%j!;cC)DR;k3e6ZO&#ODtoiIC`MSqjtKivYUV z9WYE%ii@QwGqn$ePY%TRHY3HgU|AY98}5v4h;)71sej{`+Q<1PhSBhVEicTQCoQgM z7jf~DDtfdmpwYsM$^@Cs^x>OAm$&ai7)j!Oq6CRNKrX|-Q@fp-Ms9G2T*8jNUOlzA zSU-uqd}GSBQA(vP65@jj@6AeOoO+FYeVyOvlbzIRnu2@55NX~4M75N5j!g2pF zX}P)+3|JzW5$ClANPdJlU_K#T9uI!cD!U@v4fv-A>Ujs(%bHaM2ib!Nc{K;CC~%kz z!yeOc0hj=Y&Nhmh)|u%((m@KB+DQ}Xrt$-}><`^dr!)Z4Yc#uFDh`DajB5+=5r7m!GzwxTqn9(M80jV$|AIpLl;~7 z#-!ft%9!&wb`v$tljh~Hp+3M#KJ=~%eHh&$0P0vjQGOg~b14~MZyBa143JlS_!VU< zRjl3295k2A$UbFY7#ZgbUM7W&F&Lv|NnvCm-Y31hh;tM_)Q|K8n z<&ZD(w5?1a#Gxp*iJbxJE!rcf9Al+8G4qQ6b}{oY^s}I$5-qB=RP(*Qkt#c*;Gbaq zJObx_uL7y7?17fBn{ydzwW{QlO`6|O^4V2tQN3Og8F!E)oIBBR=8+?Ztv(#F!f!9` zl|qoM;tmcd_=?9%-Fb0o?R0SkB}2%}>oq%Y`3kN9V_7rH-*vf$%po=L#heF}P6DP7 zoSK_F(GscR(QsWL+eNHG#f>eRa1o9@rTg6vHYiT z4{y8N@J|PZ(ZEN0R0L;;GFezV2NJ{CK3+yoZ8{wzz~bZUu23zY%`3ut63?gf1oxHb+h#Wx%)g}N?ly%7Bizi)03FIOrt2@!h}OSQ8qyJt0lIbtTVjF9Eu^4?++dUT`82X#qMvmelY+<{0DP zM&FxcY@)+`8ZoFh`&Bxd<@W8G_d7YB<_EsAxd6KJs%7|v55qzR*{ufAptDlEhI^l_ zqJ&AvvV1L#3IzEkZ7jdCXE+@BvZQAtil9((`JYJ(n>z(7QwL&g@iao^j~^>rDVo3Ro> zX9833A?aa1+SrGtKTFP(e(~)y2apFr>vE+;OG1rs1-M^xHi!Cv%AI{9b@hEt?rTh$ zPHg?nxC(@_D|eMA53(SA1iLp*-#m*iv!o$M?7Rrf0U?Y6w3Si{Z^U*WrMB{gl)V)K zJFbOG;X&$@aNzGK{!E@4McljBQ|yL4n2br(l0gQ5Fo_44VFwC71^nla;hY(2mVT1} zoI>@BE0is(JdcS4S0-kYX%;+c()^47i82@yLPQqo5)wVNuS8H;X~Bgdqw~}LyKY*8 z^Lc*lzoo56+TjTuLOIj7RrG%LhBD$m zT!jWB5%mUWocI)`?sfnnY}tCyu#AYsSqIji>h{ZJG_XciZi_@clJN=-=s#LA>iR(9 zTFh4<^OR4nToUo>`^Pr8#ds@#Nhs zb?iu4xDnfBc@X0xQrYHwa-m6Iij0zGa;ToJtLy~P2pyJ_BLy1&%G zvt~bg)T|*XhUSxXaH^@8^0w_ole!vGXfbmBL7rqqG)@1|pTO(c0&$J%sY>khK)811Qb7G;u9KzwLND;Ib3BgN7O%-3 zQ_bZfAYUfYtyH+r>5qN+D;t&*AkwDb74{n?J~_g)l{D3{5oZcS8%~=E!W7xUtPxU1 z7q^AM<|B9UfyN%337@Lyp+TL11yK}?x-2pHQ(6m-ZcS9O~U+*5#Cwi?^c z%t$`pa--2(E&1}Dga^;k4~<9mR+B5VH^7;nh|%`>304jNPnP}H;xdZe7@GZpbLCmO zJ&zhd8r_P=X=W98n}}}Fzh*{Vf6g4oZ^sp^QQ!&U7_1~MbKW-p$R6~LxpbjRKCRe* z9F^6-fOuqKEDv?;AP1MsZ>DO~o0XHSw=9xdAsu9vY3LntO#K!~u2?v&DyZ$4v?SZk zSE_hi?TxHfb7esD5YE4kC9J$pBY}q3GuLxl!t`jqeTILDJ2zSDNWy0c@f@#Q6@JTo6rK%V=9x7+Z$*pyHhOf@*Tj=DC zaxOo}xGwY9#z)8VLZEB%vgF`>^2GgaJqvuorMw!daS_c9N<}bK4b)*G`@Rv{N+S=F z-C)1a<$IxkY_|Q;NxZ|4H;A;9cZjoG!kon6KpqQr*ud}1P1U6b z5n4sMcajyE0EOF0`pMMkhY$!yl#gCdTUNk=(s~ToldKIhAI8coH|=BSZr8|0oGILG z`QHO4wpci$duLXwnaIKqmw#+4h}K~rHGLMe=X57*%eRUlcdN!};yW$w>${4P2N%$n z#zb{UU17lBz`)uU<%@|~+4O*|S)*WQuhBivOhF(ruE8~@=ah7HZnSf<-zjmzCGS-U zeS&E>cN#(NB-G(c+;@}HibF9#Ow>5)dK4wjNl}EQ1FQ~D;j#^Pgl`4j!|bbpr3!R$zs-?_ zE4P-+)>R4zcV*g7xVC2__>_!;`R{}-au(lT|cBVz89r zzcy~#7YVYyX6AbHN`?Y~U8y1S~uuuju^(;5}8djMv=%?2rnw30g6#u=yx&E8mv zf7kv^u}vXnBWZOZ7X+?yJo=19XMxK3%czHdNMm(pDI70&=!)`ts}qa8p6k3d9L0l; zfjt<)>u2iZ6I3i^bh$NwoL;kk8{h;%)WeIG>?z|AGi?U-3uIH@{P=KoQ`Zr?NCvMw z4g!x)YbIDX01Q*eTs=y|@R)|p%N5cs2jTA>(Am~;E00r9uy;lkV@JNllyH-_S*P6D ze3iAs=?Es85xHiuEEAUQec$3*O&wZGrY(47dD(vhIvAqD9Og&p^!c1^zk^2Ff-$|U`* z72c|nY;GDktrgXgs5A!VF_APL+he#r zdWN8~SKIB5)FgRzdj2`;S`tSD)nu*cT~}|iV4njFuN9rfsD9;f9NQLsdybeDi7~5V z5%ncwIe^-?&PgQYKuFSOT zMDejT-Phg$5XkleWJ&4JqqgnY5S)ly{L;2OB;D_=lBgupyQ&jMq-aL*O5PFBut&<6 zaLhAi#3)H=96*jg z``Q`k&lu(wYNbn_d?tC`t~q#XK@8q9Y8$s$lm7w0Q7&d5u(zNGZ^+6DX3HeSv}ePZ zm%{)=1s&y)Neo;_{T=~TU>l`~nKZvyCP<{VeE+l}(FT3Zrp{b>D5L|Pc$g5Oxafjs zWJ;I&TnEEhy91D1)vhM!9(}7#JSXSBfNt#=c>n+d;sKt6bXTnTfOe@h(aWXZ6bE`& zk3(q@HZ)H$9It=wO$4Ew#M(|!et0w12hxAK{Wu6qBtsjg2!^b9zb=fIzz3`V-^}os zI&Qa~{N#!}R^~y!iDMPbH6CKGWiUYGfx4XGLuM#~Y=dg*NUHa$g~~~FBmQmb)e5!;AP2t5r99d|b0I zeIfEFQ{X2f3@=!NcOj2YA!~Ya2=%f2s@XHm@EB-$L@6^BIy9HqPK8!&9%>YqXUNp1 zDYKOM6E;v_TU8Vv5i<3D??gv&-tU9V#fs^l5}!51!la(_ukjG1IVjgZ%ew8#Zch1R zVX9p{!IW(73uhkPsdOliYSN@ila(2-pgeyLP#=)?%=jFr{ctp)PzCLKXZ6wUj|bw8l~Vv4}eL?x|>DZrgxZ?nF>jVb^Rk3P2GD%tqWP*f)&-wT+lwj!=* zZUocH2%|n&;APe`g|nMND=hDf`|z&A&st`7uDCWTN(I)Vc*reA0m5aLiOsA_2^Ro>VplzIQSn&i}}OFDm9W zGF5B0y3tjVgBQ!@+ZHFh((W((XW&D;hN@&3rcRL_CwgAIjZHQY>^c}d(9o3^iJhN` z9&udzV8%u|^mSJM-0jAxXj$V+03Jr?dTKW)6VKy)j;W5F6o|(sdG{yb000DX0iK0w zSF$Tse&sZugTk;_^{Gy{-+So@93stgYC=q#Yse0K(qXlU;PO)(j&k8NfKD@{v@*&H z%kRppqKiMDgHy-PP#Br^api(o_8rxJ_nH#XfkdyzaSAMj>1)q8x{_S)RTh;PYWwdGpe!#sh_6^P# zQ#~6uaP(*_xVy6MyFPi*;I)0zMGz{q#vAoyUZqM|mTm2pTyo-gJKxaS8*0jNh3sLn z-7eV~BO>6vv=QO?;C$PgXla%S+7o{}Pv_Zycp(wi@jV*77&f2$xKH^BBvdo)V}tfL znSlV!abU5mUkAtw>?0{B4y-K+3VGu=Fg@l0$&kV=K}n2y-py@BQ-h6j8Xv~$rA@2D zKZsbfhZ(gvK}gmmGYkBBH|?Y@3w3t7t#JN?1B#f|wsU9}Gb<>(MyVk~)i~73A9c!4dY}q_3XEhwToZmI;m@=jlg(^mXN7ul6|Zptx7( zzzM04z2BXzX4v56-9O7w!I2f^DFh3oUCXGtx(}C8t~vz)4>zIIMpw91N=kjU@_8Je z>V)Uf0Z6P+OvC5b7$;?pHK zd$(Znp-w|W3fg(+lxCte55O*BmknrOKqW#lPs!y3JChY5#-IXiAp{Kg$8v9=IluQ` zZ-nl-!0*W%c_l|W;dd7FG?#>NKPC-w7ZW`0Toy0Wc=@lA?6nHAX?i$giCE2uWsGCp zv$*pU?Il-5216)`knRj!tcp{Ld;Y<@B8<&T#8AjIMUF3kr3+Xhj2S=v3U;04$|pRQ zza}5Sw)ys-yUS=qp$mh~@Vz<_nNzR%k*ts7ku|*L(}e8l_)ak;Tk20HzWI{XLZ<^J zDL z5XoQ?Ino+6yeak`GiP5FlwQF5?o7V1Z8**&k`QCfpr|fF{cMhn&#^qJFx-=+)7k__KIH4J>DkA+&)FHH3mz6eKo+)jvk4s)*O6G=RP_ zkF3VNJ^&10VfJsRZPMJjG)>E7A8}ZS#YLT^Zc~;1EHeSAZZ<&f>7KNvuu30a@&bbS zUw$zSk*G1Hru$uOsI3&n87lP{1C`%;mQ8el?$hgE`(XUpJ`Wy7?ykx|^izAojLtVw zeDY9t7w^qBCS|J;AIMZWP2qkrT$caH_0|>pSHZS*M5A>p0q5cXNod($KMwy`oEXeb zwQ^Oz712G#zE{w`H4q<@>AQj}NLtM|M%#Qe+}7%|hneH*IndeNi#ouN&<$9gwQ>#7 z8eBOt%QXcj90%1L(@H7$av`}1ZsWPJjZp4%7l?eWYBvuT0QU#hQy^$i+~YO&wV2w^ zpws25fJKzN?CUthIY-o#k`mQDceBORL1x^6w^7bE+!HMdq}QObs8_$-XVR}gJkQ<3 z(9wA?AVOU}zS(DX{0?xG!_(~vLcCGh-YAw+k3I#bTv0Yn+_VLyX^2$Dj$AJLcBX@r zg9FRXPPP%AT%&$B^GXLXj$yeI(^(z|5j_)nHc%Q;O7XEJ0Diu^Y^gO%@5NONL-Wfm zdZ{9bmT=Pm>H^Y0=>|Y>X3Oz;n;gSnPuv<^q-DBX4U($4cPO+ea*2|0?jbhK6h)b2 z`=+qyqq>oXqR3t9R_q!YMh)hlT;jV49gEg#XFM%{OhHDIWoYfohMm6XfQ-tp=H7B7 z7L0XbB(9#S1tWQUx&{aF9?kzhI3fR1-O|a1sm*^{c!HNUZA1Sdl-iF@O!H$xsCGx`4vnn=Lw(mQ`xft_61mOYvN|4Je4mj@;cf2to>&GqIsfjF~DAT#ltIoG&bu!=#W#a70X&VGg6HXw=vVs)%8-cNv#_J=6~9z(t_MCH|pb!@$#P zyB%k&wdRjJyp{60D*oFr_`GWbg5&Cjo<`>=e3-z{1XWvs0w#UuWv&UxrX|m)p5tw^ zCE6QXD_dgAY3n0{!*?82wHENh%x^kD_)uVyj=39FzJSp~qY0L@2FRG%d0`YXLKta` zg+EG+74Xx(9poZ@R9u!QCx$>^wAfRpjLBA5{JdN(KMJQN)oHcN&P~W{8en+*SaW6u z85R3%&4zKvnI>fmNaOd_m^WL#nVcF!C1foGr9I>FRAv(C>wJWW=7UTyB<11iE$${b z`5s+VE@2FmHj5uI89*XiEa5UO^{fxDrr$&eeqS-V+BV4AWlfIF}&k-o42d3 zTrr4C1!do)2ke7c_PUjq)#=#GSiU01k~aDihKy~(&3091F#=VNJ_|f4C(oRE4CiES zh_?i2jrOWsgsMbYDP8S%wbB(kp`SvJ6!2$}Odoa(`aH%E#S+sHddDdnxy{bNpgsox$wsSy# zv^VjYJ)np<*@Xz!=V>bLp~kn$@?uG;Hc|$`a;q`DNZkJS zovH`*l7SiEBK|l?Pdx~>r%z4Cur3K?%DB_(GR{w zb&SB#-OgYiGI1gqw?aD?!SK?~9-+*E4ph~&rrv=mO=zk)q1j8l{VE}@sMv^qpYe!v zjFII%dDrJj0zHojEl6XiFg0Nx1%;^BD@z6w0vTij;($lU3jLE~}51lkELmPFmBq@IQ9)sYSH8&mbl(&R@c)h17W{diCo}fB9o%O=pk(K z2`F|J)W|4bUSy0l{=PUpMJR&2#Y+8u} zm1B^8#lad1p4ljnm$KoUC3D@*UN~O(o#wyq;@rQoj=O*NGEB3A045cMVn?g9uj(`g zk^gHbwW?E!bw7n<)L+!R8IK{ZdF8GZ;|#Dvu%?|8r9%PnT0^?&V@D>w{i{;&<9iw& zGM_zK(pR#cYTd-?p80T+#na!`y*Skd^Hp$G!+RsSu8eyZ;$Ma81@zHvrm?336q2Jf zbRPSDc+vJRcbLKeb=5$06OSGOVp0gKLCv|mGV*8@sMi^K3RH7mzWQJcurL73&dj2X zM&5M)zY8keZWJU)cq7N^0;d3>l^F_z-oa4+LsrZmS#u=%W;px4W4yl8DC5BPlquVT z9-6TIwHt@fH8hFPJ1RyKhTT`v(e%QDyl3aoalwojc6Rby&4h#8`7c}V9}Hu=S>2=F zqCO1Yt-Dz?Twi|6z53@exSFw_nY$g$3Qgan}1Vy)6D(n4bYX`IsDI8r|$xqru3H`6cb zeDT=Qv-26q0qT6RCEK{aG6qDV-)(vlE&O`_Lr*e>+_mpy#^VA|FxJcmEp)1S(Atj* zuR=SNFlBd{h#6nXv9#-8kFqn9U>6nq1IB<3wyASmt&6K4N!M=?$Y&i9jOUL$&t5fP zYDq>cdy&bD4>H)Yd_5=pA^|ehpUK{6?sQiYX}j+q_H1!{cXrD|_jewP!FD^#ijT~5 zSR8fMy|+*DkBQ@ZtMJ^Yh%rlGiGM5XZ)(NY&6ciYZYdw<3;A6Et*D6`d9RDtN!>GH zS7m|$^Ew+J4V9pDC8tjrJL@fVga&hrz-I^8cyDD~`4qPxD@ZH(oA4|{C;JLH zwD$3pMO}sV4))Tvf4%SbFj=)wtATdee0z@g7bvt%qjoe}lyirAnLfpJm2~c%1JU^==G-kjiOee<(eB!QY(TK)fMhym@;Y?05&jV9KsP|ywcQRm z0YTAAQX4e0??~@e>H(zd9~hSv88y+!T-rHp)aF$P^4%tq7(&{ZNH$P5p(w zaL4W1oRA|xm(3`)NC%}Q&hn@aH~wZ@!#`H%ke$z#@9B0u{}0$`0A-Qqtr#$^w$FNe z(}^$J`(f+L+T14(5*X>4p<(`0t>~z*;(c|9wvo8SRcyHQKn7p-ylf*QA~so0X}*YP zPzsTOcEw958(54z!-O0>cY&dbdpXDg<569!=P2h_2j3s8i)Z!W@9G3!!x2)q_9Urxl_o8!Vx z#DVDeX;OTTSi4s9T;3td4-3bpHZnq@*tK{fF@mCkf}Kx4DB)OSUsUM4C2Q(qxXqXn zH2g4rs)_fJN>b3KqKNAtrIG^#g$aCTi5b<+iIQ$tW5h-i_1$U}yT$VkOc*C;*_)jM zq@c;HRRk(i^8?@n;Pe+GIck0#WXzl?r`GNBCo$4!G<)X)eaSb7x~xP~dj;O8H|P+Y zX8jxHu4DkCQdhXzXDqes>A4sjh5Yi?N`p4h84b;=0dMySl^}pc5B{5DNIP!SytJVg zU=`1+Xw#y!qp*AnxLaPvOSjtPIo>rVXA}1CBLJls4WhJAvunXD(x51h>ShU?qqCM4 z`S4J)zd9?stuAcpmXry?DNWHeK&&~dQyUV|hqC0PrdQ^%Euz?z4u%b5Q(q4C)u2V zB|!lYz*kh8?EMDcUF*Q4ylbSo1DjW4)jwPW4(TWb z)OgjR^KAoq2_mDDT-r2a%f%9c3y81}k9B|CR4Lm~)GhA~tmTO2>>&uN{mEp~2VI<;OBpHA!%(>oc+jG{-r@&xWFirqNUgl05!|7V3_m6FZW8E({Gd@IYc>0 zjxbD4*V~w0npJw&!ZY{aW2pOa@*aMugGoox1lp#d1q=|D7T($KGXLvZ`i{)`B{>5+J#_LpJf2#xYMr+V(VPGfd;C+=k(G6ifpd> zjzb@+kaw~E1O?1~eDpy;ZVXni@9T&x6-J+hnucH!6WG2ohm&tmsj5NQcNaAR06uVO zE&#t=*xf*ay+-1CX+^ju3j6|i(EPc-_5IDX+oU!-%IlZ9D!qn?EYxNECHuLj$V9a4 z=*e3fw-yy zmH#6d!K7}UYgqa*9xSS+82lQc8+{IGr7O(REHZC?!fvNy!J!kQfxB8t6?|i=Z)Y-I2v;vHMjAEYO%y*BburVcU6ZQ|y>KeFS_?q+3EsD$CMJ0uaM-deQS9Z1{2Ck+UH_B)oTvUfmp>l4S@ zQ;=8UgR`_0b_6FwIE#5(23!FfEgrlvX;t!SeJe2qtOVQ)a+x|;LH13fL+L`wGMO15 zZ?HVv$_b8|&=WI93jb8PJhhNm<1R>7iXV4}yPe*@^v?JU9{SnGquKYIqpD;p?cp>#@*!_qH*w>9u^3fXu! zkjQoA;LLi!)3YDoUtA~&m1>e-JK;VRIYM+m@bJUT5+d?Z?L(yfA!D?G>Mj271GvL~ zjI7C4s2~~sC9#{fQ5J(rLr)e<;+f;^9z4|ztuty=y!)2MQbg->IRmK755Ep zx6ummWu5(-4v`(Qpj0O~AdSq-VC-P54j)`R3Dz(_f_%(04EsV7Vp+gsgV z4(~ERmN>o%8Ruy3go4-$f$M(!a~mdgdaM^v5_sM3FkdUa4gB6r$bq)rk;v@U1tpp* zJTo9AzD33_DM+h|VOGr*L zGx3CTw~&Sk5uaENlkOZ%i*}tRoL=m#X+N4bS!wbJgI&J*xX&%DRm*zVj9AwF(>l`K zD%*8G9%CGu`Tnuno{w(cjRXF>HOy~O>pRFmCfUFvfmRx`A@CiK+`y=lZ2D{2b_Hk ze<0S-!xegW02rQ;Nsu?Os=Uk8hDtTLzv34oujP z*zZ>*!_g%nB{+3 zUd?~LULW`L77M$0Ea2s{>BcBbOx$k_3g~F4nl}g`yQA36&k%=JF#>3T5niXq1ty$)crUCOI*1_ukrB^5&!x{V76wnR#J=~M7In;L z8E0vH=k1IE(xraLx1l`Ch?PmI;6HATyDFl4Xf6)KmRi0Y_X!GKu4;8D$NUf zIQw1OzG*<@^sDAJo${-*zEei7iU7^9SKzBfF| zeCc+XI6ylBBN~V%1pm%CVM=0Q_(~uqryKj_JPQV5t_$N>8nC#$2r4oIaC^NSePqrw zgik$?v!-46XR7F9jM9iAF_v1#OrkZ9%yK)xU}%a;s+zli)_1n25UPktTt+t83*x}Q za@tuTdE0fOI!)$1o4SpnAajrGu%=jd6xlYLTWtf=(?!lr7vnZ;YQEE_y%^P@eO%Jty16}>x z`C<2zrX3Qr*;B<&x04swI=*HXK@ zm!&E{v-)I@ziVLj!N^BA31<94S+AcFQIo@iCLZcfM?FymQG}YRdVvS@;3`mh_@9Q9D_>d)jmEbDR!y~?z6=&Z3ufu`?gG_0Adpsw7q*- zB|WDq0p6tInDgj_vjy{`d7f&(_HuLV(7cg-i`s@I2mM$}@{wQP^jI^n&6c+NnqRzK z8oK^6xznj-p||?)$n`zo$G4}#H;PS|sYn6Ckt7h!4do}RiqF=#KlcQLeihBaD?EG| zt{fm-FS;Wd&mZ0i7n-sV8t0U^717{1<} zB&FX^A9fC)TZC7hVO-u(Fv|s0|@;bkeb$#9T^Xwq8s+j3m9F zbVFAFfw|`*Ax5+UrI&NFWsGLE@Z#;rj zAio+q{Qcnclf23|;|t!bUN<-#5yqP>W)Ly6RTA6Xs;HM_hm?Gz|)~epITGASob;gRY=`Vr=wI>PvU#LZ_RFX|lHqPXC=V z%WGb+_+Pz;74Oq#qS&pDshQ+P%Le(nzewa)Ceew8+p=1lBxx^>`s^wh+C zYd1a9&|*o(7?iX@Eu+l;cG~>Q`S+X7)x*kLy8UJMZfXC32B!^#pw4Onx+QwIvD0a2 zW#RzYz5;m1_GP;vpL5tebLyn--n-Wav!4{D8TVt+kx*OLVXTQjuf0w(E}o_lzNn-4 zGKMV%NGg{ilCC`vI- zLhQN(Hwa7>Uk$pM*z<>Gnk*S;=r0tRZf2$U`D5YkZuw3ptNLdAf%?<*;6(bO9hy(g z;nJ|rbBrxtaw9*Zl*j-81YZH3$8=Z8{(^$(C!}EUV@EV0*%q#zk|btf6TcQU?cL^TK}-)I^m8!mBQCd`#@mHMp$C z?~|t*wsas(CN}~k9oyz|W4T*hS&+e(zYcr>p`EeLU}<0Y?{M*SI<`IiDe$#F7T-|q|3^Hb|)mr$UA7qAC@^hXPXa2)w1c_Lpq&M$1b zr%ztQmquS~Kq-CannLR1gO`FTa3uWfiWlMLXd^)kFi}U--pUL4k+*gE@<}$BLVL=p zw^aZ)1B-6?>{KH73X3W_NokJGHz)PgCI3~`f@)~NELzKM*~>r_?>Wp66h+X2A}F0# zO>smDH&q3BLlMf~7_NekJYxpzcaXCj?1ds-&4@$a4qG~{Z6{@dPyXf8K8Rl6Q2q zNR3m>_&yPFMBcGiC5>+W>!cSbfUFRAI&SD}$m;ory{IJ;4V{rQwXp^Ji|CbgYkT%W z7O9%p-<=ZP1!GK1AV|!_<1hTL%5oZBG$Ti)y?AzI`UK zlPT|DRTV5BIS_d^MV#qv=T1+A>92ZL0fBXL9(DfGuFIL}FgnOu?$rLN$slDj|MAp8 z(H>f?^}g=Bf#SFM3AVJsqH_PC9c$?*j+n2gTT!h~o? z6=-sUQ}w(}_njpfXJx@XBM-RHLn8@yEb=Xe7wH>q|Mo6Lul12s{WTyhgj0n*Q`v44 zkH55FZVtAybss27V$(kCc!$KW4O&tb(t$uWcUbjGrdNfDXtqJ{>PC>QdakArge$57 zfN5pq)t1;wZvqyn-Tnre{&^=MF7fuW|7vE-f{2h@ioo3XEL@N>H>|1RbUwt5&|BI1 zV~T!wOz)mGuqf9jW>6Y-Mp;l3xmXNXQ7VY}7j#AzeWO9U7RAx0#|}S_Ct=t%GQBck zuna?YdeOlT5Q6B6H;sG{iv)^~?FaQ7oT>+lS zYDxd&T8%LM(86&kvAolo>zBM5diQq&KVBI`*G6UN_VZbjy^~jbW>d9GIU%w%=pApc zOrvbo3O9#ZG23K#g{N=r3ECm|Fe=3YuNmx3?ucG-K41n2W*PQ=qv@6!I0*Jb6Vj%@x_VoJQ^9%C3DtyX9~rjL9?Amp2ip6Crsgg)7+Z zpx+PUKsA$7K@rX}c$)=ut;7e^SuGBu?F*4B%=X5&TkH7;e`siximeaIEtozH}!dx>RLq3CZ_ zt|dnMV2U1pouSbr37sYezoMB81QbA%{#ToX0uRNOD%2zK#9~p&OI{7bt_?BQ@mtIU zYucOW#p-C?XTi{szfGM>^^?t!0+ol_lhcnB_X}}*&BHmayRuOM%hXyrtH(SjzPE{u zoqOm!NUndnI1{gHzufCuy9&;`eC*Ah46WX&1YKQE+4^{<@q1c3O2Acr7)SPwiYR<|W z_t~{y81qGfK^B!$+;pT9opH7uKXrKjdsIq@TBUFUviKL3&u5w0oo{k2pOMJ219S`p zqQKVWCS+1jLf?_5J@334Sk$zBnq}2b>SX{bZgat@G1I>U7`mMg_F*oxjO0C+v?EnC zyVJQ=T}!dZ3-;Y_EBs|9VyqtPSUmGb%LNU6_(cE!6~jTA&P%955L+ikQ2;0ZRIyO- z{PTS8_L^o|^KrnOkU>?fP3@myxi zhLf+W7cLQ)1gGVWZ_}>kU&QW*g!g*xx6AzhA0WjeShz*{j2d>hgB3RqVUI4$;EJWi zCn|Qyw6oGqfTe#}jndIYqeE*Qp#>G7h#Piq=WfA!Tq zWHvis=A~8Qw8!N4?Lfycw)P}XIH8JSQNoBqS&v-kawRyBmU{k3i=0~G55KZOwE;Vu zN?XVq6U7+aQ8xv7A@in3&iUU)%+|1YB@FKRYlhX7$fOqP zq2P5MGtoMuYqQu*c52Fyr17$c{t_JHLbY*umHCXmemIoZ%`riSob3{qF)5I23NPT} zZViTxQ#K$%I}h7svVom}lHDZHV$Y(umkEdFh?vQSdRDcCBrqhMD0If^il<)b2Zuyo zW9*^L1<>g!-&w*|zXO(+eu&#yp(;qCM{G0yZ19ns_=|j_W z1{$ooQRc-6<&Rs!A?0(rDJ(D0$1WZO6-F1pvp|zf86(Tv4z*tPDeu3R{VkX}9kDMh zxTsbIn~N^h#LH|qs?0Uvs)0MR95m*g?Cpv;RF`x>@B{i_INq?R!1;+NC(4o z#xtZk_u}T;200yPQ;4Jxk%QaK+BQnG=_~&%5niUk<40!3z9Q6T8SmvT>>t>RGyS8 z%=j*GRZsFw)nEU{IVb1PvJ$A{S>ZUm!R<$ZvznW@-Vle6)gA3SLbdhc)&%cq(3D%Q z_qh!OKU9Ko7as1cmtcW34XWaup|;k0C&B_vlE7x`|F!+FFvvxuKyb|1y4|q(8)HSA zx}cFp4RYLW(nra#6{bQ{GJzFs5gJc)b55(Q24E;-eN<$!Z-9^AOj7E~o3LIzHJoq> zSwfaL(DVJ2Q4TDmbp4CUj@IP*_evta(~X-yH}nb2VWU7Z#iY-WJ~1epl#kg9sHAc1 zoP*U2)W~Fh)tzx-#ly7PRGcKt&`K)&0uoScp?8jTG90WyWnK&%6ZRl{eh!4DV}{<{_EGglagmQ7qcT6eUhgv1|!=DDkd?P~psCgELn)%tTI z7p??pOnlo@%Y%l&mw6b)+Q%YxoZoX6H2$wiKJ-s3w z&u%HF$aN4cX8exMdRD3BqmQ2b{?jKbfuKp=Qh=3lXD@5$MpW2OL~b6#tM4}MP~ly- zDA7abRODq(z%?(%+e-OG5c1=i=rG6MSs^2Yz{WWeorB^+a zYr0jyB6*brGAn#OvNZWKn#ThK{yNf(DUnpYWLA+FywnYL=l8VFjdV}%* zEm|n$2#4FV!Zy)B@#AdNHj}&)Moe>ZIt<(6@mCaBTRKfxpnpgJq)h%98uuCmihnT6 z8cW7XWBF)F>I`^7-~o4ci+CXt*OcJZB(s4~7u!#ePfa%O%R@i#dodHz|BGb=U>9HI zT+`#?X~th$A&tk@im*su6lQi&c8^%H<3Rvofr<6)tAcOT!g5t%an0uUXX@jc)nh<< zx|mGnoz`egZR6xjwK$pfv+RLtG;61)-Q)}CPjzh+(Kcagpz?zWggt(oq8HY_>90X-2j8PjxUaqs$HwZtel01|L0GdTBlT&U zU)PP)&Xh>rmhE&>YEu8*0|?9m`uB2bKI(q4#s(<9+phNT9pw*uo6P6uJl7*_hv%%q zoviSmG{{Ue?zLQAQ8KLiGR4Q90l0K1QH6$A^2u+7br0|{;^)w=aRJ8onB>O@_ZSt3 zlNz$&kDs{OKPJ>BANxJ z)_oPwt$+pNr3w_!qeg;YxVJz`Kx&U<)hd!)RCSj!alTre3`m|uVQkm9YQ0$2jOE)v zw!`UFtlB;aB#o}6at)2AHf?M6ZoYB=8@jB!^7MUiro>9bV3KML|UZeM+UapImdonX2ej&kxfDC5Xk?`wGWD zL3y+qV`qzEX%5DYAELtu#RxdRuvzj&(D)oC=v}>TcBKDR5|vrF8-ma8z|w)czrtIT zSy9JrNNqnNVBdX5tB|F3v(9y$#7lN0SQeogM;<~E>s zt^g5IXZDy_*ESBGeF9o+o|*1Sl5k!5!SBuFemux-Yw|xA80=k3)6LIou)9CU8BF#i zuZJ-BV`%C-T+daw3a+FDretYJkAHPE{o|2%XLp(nA&OtfCFP1iK&RX5hb{ASXi{p` zs}W%zhWR=G)no?;H$2lf{!V>|%hTV9C&^@n>WB#55I^2h^Dc%ew9=xCMvooM0Rs`I z_KMYTellcWxO$~V=T+9@PVYTDsABls@Hsep)8Z6_)n z>OO<*D(-j!eSICga8OAr(u-D60)7%h>t*Uds8fx3HDtObI20YnaeMFL~n&-zj+aj^f0o{IoogI7{!6mqj6!|`wFS>eiS}&#$__% zesP35q)FYN+rGjW45meCW&6vdhN5{;r@?&F@d800bTn^B#e)>|8Cxu~WD8DVRhb8m znwxk@7`j=qy@T*c|$CB#b1BC-cHxwxu@eW$EoKY0kBo2uNmc1<^w8K;D* zxhfJ^s2VkAuSzh!VKp~a63G@B>2`^m<@5T4Or`Q~$ zMtqzEK9>x5vJa!L3O)Gt@%i=8n3l{VwaMliaX-3u`V%Ej=S&5-DE4UomWxN$U2^!{ z9;r-JU{dQoxuFDlavKe@`}NI~m}oh?bZzSX zy20bn|KHysq_{Tt)rEJ#@J(0;&yDX({~91uerz#qqz07>_w{=9WflihCWDYaOn46O z5dwx*bgtkT?*$=$UzL2aLDktpm=g%NwImZDyZ7>dKS@&p`nrh26TKi3?}nv}mnBpO zx)EPy2pGX(y3q0xK_Zl&V^SJG+P|}TPQQHQlau(HU7Aj<<i&$Jr8+3uJ@JWX66;q^_}b-IL>7VU-FvENEJhA-`8S4u0znt0Xks-2#DGQ0>BlS6_$GH6$d}ECo4d!@3 zQ7_~>U1D7I+ypZmu{IHm$Dq=>yNWHI)-bAC#6v>-{=#^CFv(Tw#5!S0<<`oxK6$bktzDkgmguOV{0VrMM+ zl@}s&^_PW+#=7E8K5_#H-s>x^wSeCGAYA3Ebq@(^KA|?N3rw>~Mqv{26lI1QunZrf z#P!X99=}2eq6NJ0=J6VhY zslYCNc%5ym5TYD@h+8!F&+QvTl9r($>A*uAgqKdR0alC(hwyP`X7U`bGkpUiG8E-WeE(nAqGcc4|7+*AxJ3;rz~SDB zcX&a%)`j$EAWj`#E8P16oy0Ho?32o`YS_)#CpGnxxFCF+x%VIuqPe)%tTC1e1oAs&$K1~aW1m)s&ll@w zCV9qVY?d5Ho;ZUAXVnBHf+PXDeO{(ShrkU3aiA#k60E+KFp~KMn@;Q(zy+E4Wc1#h_ zFZ(AmvEds@nprLe!_*A4Lr0KnQq%hY?1CA|2o&7ShGjnJTL*UfogQ-U$w=A9TTZ_h3+1n>tE)F;qX?HH;+PBMu&Jb(0x#8;o{rLZ z#<5txZV+-_9n<{W`OTAc<_bhCbemj57})0M#y3%k=@<<>5p5rAzKZIQ^GF|0cJNN} zGBTb2!9$n&A9M$&r8RSprA)HHr*YC@P-7iw>yHzDJ9tM<15@MAbLK}TUE*nzlH9t0 zM{wr>eiIStG2qjkZT@h(SO5#PToXQEFd%kM5C|x_ak>~NK!RazQp~SyC?LDsa+z4F zN)Tl~ZWI{$XxP>oa)XM1`comMrFT?DRcbuM&wynBhAz#mM|yA(+2W+8C=O3`=S;|# zZ~6D4jWm;`RHVvJEB9X0(&V7p)$4Gt&n?ulu4ZTE3-t7nIe-wHPNE@tJ*&?<@V_KI z0#5@FQcpAt&06T#Y<)~6?;{RrfrKP3nTbrDf7LYXVpg<2^SStxOOssZ7u2WMvU}E& z+Yk1?xe4p(#T&ih08Bu$zXKmG=?mO4=$6fMn!)Nr@mS_0-~|Zp=_-&;Q>l3Y zxCb#(BRo+=?X%ul=hDj|dAFIxmgC=vQtBDm`|qa@o|YPl^hRIuI{q`EBC#>S-N5swPUVtG=|}0j|{1 zq7#de9k{RS*)zbsro3JL>M*$e`S1fpyEn^d_XJ!TjDaO;yES?G%Qr1=Hsc&8?j+dZ zpN_^u2auaN8L)n`+nKv{A%7e4*vKwCM5Q82Ll}thF%QDX5_kn+GvNXlA>Ei%!R`T= zvQ5|2s9FW~e?vwZjBFEHZWyVUmK*rgsnQKLJXA2%Q%e@=akvJEO#4n?8uqj~pup$^ zsR4+=dd@nm*!ct=koiY{=87##>Hbc#PYj^18|JXJrXk32pAY+E-?b);wA+1xtHYFG zzRMH>H>Xf7Z&=8o{5l$G3hxF=u5E2SS4jQ(FihApOI$*{33DmVK#HKEpAlA)SBR0D zb~Q1xWi!=}8&rvG>82Bj4yc%3_m>uQ83@ygqkh##h5hnqTMpQ~x%#7dJ6sE2j{g@r zRhCZh!yr_o1WsE7@QA_qg#CZ-8H1uAgAs{-nW^(QdOHTCmjFJl(GNJLIrabm2XjH5 z>_rtV|L@+{Ufy0ZSVPEkR3&(cjABfwm{fcq-!>kP8QD4#YLe>btOC3d>sZrDO~l#0 zZ5zIu-|ZoY{Vno3v(6F0Mp}S?%d4czB4F^=n>P*I={zkfGkdi1CiF*vDnQXS3FP-u zK}`1dtxLEjUySr;aPzef>0S(n4ai$BlCy6wn}W8zMnlI2jx$p2JLVAj9^0|IBSbv! zgn)dY*FN5QALXhu-M z3*!d5Wg#_A4ki3 z9b<_SbO!rP3`(Uh$PEcNlLgV;yfsb6aF3`itvZc`()FI@#yFA8;<(1B8)qt!n#%& z^)sW1yckGm?mghr<07`1#k>gJG<^%!N4^2I?37F%QFfmAV%pyUF`q7x+n{_ z13~bpHVOt|>R=D0+b{K5+oCG`X0xy~Gk?a9l6n}mK%~4yC;CM38sj7$OZ47G zB+P%r0hp7oa;J6V60M+Xz;Y{FW66XE%{l@7p9pB*bs45Qv>5!$9gpH3?XcU@cR0a} z)DmMl{8!9)Y$S?T`MSxrQwat@%e4$J^Zsv!r4R;oO2NIMixJ!Smpts;hGyJz=aH*L z=T&V+lP2MrrbyB_6IVaZ`r%++y*^_fOtN_)u}-%=%x;NZ;8fwv;tq_QeC`?%IE9#z zk*t3S&$Zc8*|40@TKt3~A%&1=djQK|@dK(ke}A=p4sri%q5zxCR8kop890s zCg#7sXy=9Ij=c~Lx}}Eq?)3PIDRptJ=Hrl#Fm*GErD^^~nT}N?wd^V?$<9?u{V<8! z-D(l{pb?AbAKi7Pq(^W&5*SKbf$>RSB*evc2#l4i1%Q1ZslN^?%Lnt|({MJU4+;BVX7|?51Y5rOHhQN~HG@!GJR~+jq$O zlhmc2dt_hbU)Q5v_zCye(^vaCu!9B$2_Vk6fc&*^{?$TPpSa_HO&2FnrFwHhEK$qj z+ioAqenJq<@^)zKPR%6uVkdhhkWs(n<;NnwnxBpx zGFv6F8r)n0-z-AFgQ}=6c+z9^XC|w_6VHx>7KzSIGkf(b@YoM$GS+ZF9gG{1ebMl1J3yG@sc5zr5)jM z7->r@jQqb4F<2;DO|mHq|%$||5nffoO=(p{isn;e}6!orTZtyw>8XdF# z@0LES1jqn{QLIR3l~;&v(L`SgxsEe!qcb$l1DWf;dpU8gDW=X(b|idD#g6fPdaWL8 zO)huUjKoPJ=lvd2&>Cl2UiuKgqK~-auabebew)St;Y)M?DQ0)!=wY$zDgbQ=nXMnCaDW22bT@!6c7Jzi?0#BWJ!P} zF!$oMwL?F(AKd}8DmJnX_sh(W?)^y59t%%VewysBW*)Mlnt?1$%CevjFs_&sdaj=e~|I)H5Ow)qh@_(cKZx7Bur~!n? z>fP@V*$RdvESryiWs)k6lv_K%xO#g5bkJvPjnHO{xbecxum;&?3J{(T>?pa~aLbuY z9r)IC9UrKS^twT>o9+->6J=aCKmHrKDr#fk-0-+Aj2xrg$BLiHea+_ z1L2BuLpb`U-jn`>06OznoPph6+k;1J(Stz62R6uhNjm1?)JKf(Y8ngd{7muzDdjd- z%ja*TAyvzE6Jp$kaLETP1Gn3p>jjnQANh#YRQ*JYO!Bim8Y#821h~$pBz5OL9ElA+ z#HdJRBLJS|r-Ftz-s3sgz&v`%o|Qpq8kYg7H@Z15m;e9;T>+m9bXV}Me?zRtB`w%m zIj;ce=k&F1FMf_5Kr;KhB_Jh3Chvx>bZ(W;pz8b^WNnm@gR0r+{3Kd-2@k@UmNr(9 zD26Jp*;AgiB|qUIH)P)!_}?||51Zx4bJM`#?WqLS&|P`zJ$jl=M9L{l*J>Shk}$CA zj&V3~{O1hQn*++_r1G|uqRIbhZEGTIHntq;iQ+iq4N$5C?`wfijpL6f{8E;$I`o^S ze2J_;Ae6Qq7h38;+x2)C{dfJ5tj!%#WxCCs1Z%~AuyGFf%klzy}%-8iKqd zRAGy1)FqXTmN*r@+v?IKp69*7suZc=HTjraKgE9c?QoJ?6!9TjIV0g(MUHJO%I?wW z)to1ViC5B{I(R2j^Yc@AuQ(bEBt1o5IDQ5;(}(&yY(fY+`iFFeYK|lvWSheWv`=9L{FdBJEVj8Ii781J33t6O*X$134VapR*A;&Ab|gH_x7%i0hLKQG;1Wd_c9D;nWixACX$N* zS~QcSrD`-hU9w@bw^=-KJLzyHUwm0}SM##?zLQ||=Sz})5h0IW+7HDgNh;HEivaC^ zb}geceLURD#TU9Yf@tPB5vkzcX@_avD?I&$;*fx%SvhNV%fg%*_l@c`TQ?^gGE=i0 zgk!@a?Bg*?5vDnw)46imm@MTtW!6w_~wlOxK8;@s1TdOw|d8OI+#*UNM1|z7^b(>hf-J$S7!X zU2~pMKnV+;h)9UejnzU*lAv7cL>(d;?Q0jTBkQ4WBj2Cqs1%L?}Wm1{hOUZR1r zlS1;jWFf50uku-3jugVwc!`i<0$MV(GDOYwV6%C_%t>QWLqA$+S!1`~i&jS8{LCxw z)~fQZu7*h|H{os4+I176=dO`A+jIah=D$2O{V4K3DmsIqN z)b!=FEYuRku6M{!0i}E#95^OBr2#_*1W%cvv|j{EE<7AKHfUL~^SS!_fW-^HSGdII$|xWm z6$jI2K5I#Sab$hw$f1n;ko{3Y)6$4uYtUrpRcny2{z(cnuhvo9C;K=osv3aY<+Ft6 zLVmU|icl$R|Lg1~`t z9Z!r$d0zkk0w@8W4Qf}m@_R9_K`aYo=>VAj$4aE;e72hl!~xea^$!0&-C~?`RTq=5 z7^H_cJ%bh@ibs`FGu?pbSbCM`dT9jHWXfmJ+Qt$l zE<*QMb#S8Oq*NE5>rloXic)@KP~*;1aqTpT)+tbCpE+*;wmZ*)hrBZL?L~jMZkf63 z(13=G6q=RCxsm=QieAGthjBdWuNZ%c>KAn1DpUbCl#Ri$B*qgp`4ZP5pC9|k%i-&7+*5K-V|#81fbiey zVLHo~dQ%d-eJ@P&sitCA>6O7y)Xf z_1%4sh?~#OfI;%sdtn6bBy@hOGo}U!>_tX^Y{f6TqiM^=0P^?jJB(0tqw?u&+GRuK`ifU-M$Tbla~h5!1Qq-TG|n}s^%2-_JduOp;ehsVfN7N`Y?tAYxu zgJ#eqGcPg?l{rS3F6reV+>4mavx)Xe7-G@(x_!i&sdQQAdmc-%TM}!6qu+7PFjgR%bb0{U;pP{RN930$nosZDU7xx3TuIIazb%Q6lds zB0d8eBbU2{{hX#YLH3IH+Q{1OBdRIfczP(=?2gZ&n56IfuKcKKoM8X}6JqVSRbft^lF8gw3D+nVK>ZO3i@oIK^)mqwQ)D+S#RK1}M7OCl~1t;d)i z{o&O0525;DKbH2ij7{8tr^QmnHag8QPfYc#eT5(o9wV;l5foq9el2X^zRod@$7C@; zzQIw_kx%!J;I=w}a55;aU{T|aP2MctoaUK-($L2wk(!^&SpwP6MhG$FqBZOlp&Iy? zdHGorBE|MRP56}dIu-Yuiv&Sj9?P#NrYI#MSL!t?mgu>7cpL6a8fE8k(XrC#k(H-( zEIbZ>7QANo{<2A1ao;{W8at#H#(k0o!$X^F3EqBFCo=+;iwqNd5MpfO-K_CQuxBKM zuB64avaQ@ljNMuQ5(uvRMOU`4y<{5K^g%(f`hU9FQmV<;|eVJe(F@1RY ze8j3fqVIGnZOn)$7fZxAZ&WZj-Fh^uq-k0{A74EL(+GBoKj1QHd^`unHO;5Gyta{BR!umzPfI(r@ey9xO4ED2LB=OwoVjqaSs%}=+oqIZxXMGcodOaC!Y$14G z^GJ`oG=tEcQNISIdQ;J(&0r>!dAxRB{0HjO`Sp@xK`ij|s;niW;*;j3Wd2Zz&|S5A zmxH4Y)I9LxyNs}NgivrP$gWdL(aO=_Z#R}lFvfwvbXIV`?*+mt`SHG_m?jJ-lm5`M zDIvu5MKoi@Asz+1xM&yn*ci;*O+^p%>w7*&3Mw7EP(7jCgJfWraEuBzym)Wey?~wt zDrQatS~^(K;&4KqBB`b%bjaYLw3@@1@~~>wTnYM6rz{Vyibfva=>e^l1SHJERYS8i zgNk8Bi~;9zMVkKhIUcC3O6|qMG4lk!(!b5Nx9AxOWfj}6 zi4neDl(cu^JyR#|yFw%$NizTO>|Ei@RTWNAk6rIk>z&oje|`shXtdpPVOt1StGbnj zRo5$%F3-K=R{92bm}YGz5urCp&q35;uKecmE?cS)L`JWArWIUPzAqGiX8s%rXq*CT| zBo2(Omp?pCZFA;VES`_ySyNnSr1@`$$O@>z%X|Q@EzDNTW`_? zjksj}hLKqzKAz#f59VC!WD5WOQQeH<1L^Z3#2MrF&L2ZeI-Af1k>49m!mOJ7!#f)N z$<1u@xUTtvm=dm}+yoK!Ps8LjO)moD6X|nE6|G@HPC1G((C!35TXT!DcX@Hh1iANw zbvxq!<(y_=-ULXL;$I}Lw)$d#&mS5>;&{3Cl=L;hnBYVp54W_bBP&;H677YyR%11T*! z3hvEBw*!`?^Cax*@edf{AN4J^V7Cl`bR)mt)^oDv7Cj{ zDXx9Se0mu1!C^6a5l*-u%7D19J9xV1x)Ki})oxQYtg|OaWyxy(>y>@B@lGLR$g@SH z;krU>f&UE#Rx0CQQ4eQbyWOfZ@E(~+unX?9SH#>2_P$U$VZ)EyI!Dktx@6EDQgG%^ zSu;LmGBavA&}DcIY&XISHn0x!(g~x-8<+fMio~|r;N708np@ipfqqixFOjC}#BU+x z5h0Ix8{@ zG^U#TJ~4^dc2c_wQJC%B@0x{cv!6G6>b-0d>yHA;flPZHb?m`B154-tjJFmam#Wb% zi|W}$L~fNUV}mWhKaR2(f%R7Ww00v2c^L;Qs6k ztX_crsJ;xHh4JY=RCb4W#-j@C1F=Ltg!886v$cX#47M+CQ(imprsoU-_gxI~#gkc4 z*=$2CB3k74mO!BkqlvZ)B#WUGVaU)BC20F!4F~bXME}4G=nsSYg&H&_TwmGbC);8 z4Ar+AM#5Vud~1!r*d=CbRG3ID%B|WV0EeLbU?a5Nim|k5Gx1SnaA1X^uf%X$I5Q+QCXWOt|u0 zpUh)Z^8064>qa+psF8gSNfaC2JU^}iGyVYlUr*1rxwp*~_24b}%~$7EM=o^F^b$l% z!mQa=)P?B*Ud~@X(YmHN0RXC;xR3adh?7lhp25YWVt|USkn{dgmVeRB^P&C;`82aQ zfWa+8rP&K{E-^i8RQK@|6lL~{0ndv1o@S;^9&6wkf@lQEEvvIZ{}^V;DB~Ef@WXb{ z6{QGQAb4VuA;HDe!KeLI_KmuK={OYiwfdm@NTmv2rG&OI=~bf(^L}+NQhML44}1Ex@S+!$fK|KfJRpcnM*8J98{I<^H5Oe8|9s?mj7hBg+295d*i2qNvn zj4Pxm5vqcmOk$35a(|x4Pl99#TYf@`o+?^WnGOCCqB;S19o@DRgDnIYLj!dIVzB`bs^HoB1+PS`_dczS7Vqm*_h z8n?Dy(-~nH&Ty2(IzeyquZ&i32*aVrKt9}#mMV!M6#fFnI#rPZ1X8-=307!O8zxF9 zJ_)$R5GpDeaIB`rXsSn~U;t}_a@7gm(#&j*R`l^A7SfJH|7PeLRU9tqU9wF_qzU&;nADuCJnv^okl;Ub@DE?jW!uD0x{Ra6}74Ty^I1e|p{X9)Wf~#STHp?Y| zm#`IUII!!c`Ek=GF=1%EnYUFI34zxNjxa_qI@0cJD_wrmBt24=`kP`% zo7LyQSC~9>+%E%~M`Y(8_WLMgyJ)$8y5;1n8V|I5TlC zRZFonr@wCR+N>FI>9Yni?5C_xp+Js~DDC%C=ZgOl4@IwM}eS(nDr;UxkZaENOCmvw4%c5IQX}amdc%p|8e1&U(h@ z7G^rK8hU>)9683n8Q=?;TU1Lq+Iopu7kM^(g0)ciU$owxo6`< znKi~TSA2R&@g1br({#!o%XB5ws9se!m5{t!q)|*qpMW6{R;Sj*Oi3Vm#L<<5t8nup zpPjO$(!rcOMDMLr8rkDEx4ELzn{Pdz)&jz_y`2e(I7!Noe#u^@DBNmsLVE@KL0y^v zlnL%>Nxao8nbZkvrM=ofKqa?NCC`}a?+!cIOlr6PfUryfLlT-=!MTU*GuC}m$737f zVgf!RUWwMNzMU2L;WOj>}^-G7q71s8&#P<=8Y{JQW{P`Ggs!vTn^EmP+eR#kLd=&YlqsT z^285_4C;DsBFLt3htUc;AoYTev4w9?WG^@YcgD017C{|Yn!^E&K&P;&s3^P~mvbQ< z$qp`87`>v(M6|TB)#@r`lE&qV?n9~)9qMJ z!jL-F!TqB1b&R7HqC?e3W_Oz<+Kec>A$_JT|FaU%OgR)E(Shp-F>tdf7BV8<9_pVGy4d}jl9T-I zMS-lJRR{&(g>6oGUcDEDdJWd*bw-Jxde2i@ofz8&d&TM*TE>z3p}EePFqM(R_-w(Q zPy3kjWYA9)$44;nhA3FY-ksMSEEXZCjF!)n6R@xw(GFIBjN4XT^-=CSvQO$26 zX2IO>z_}S=CK&G_ZJf{c)^^JCZLEs)h=Do)tE73tC{5VUJeDYB9?7D`+p_igVN;Pv z{DUyepR9l`t*%CqKv&00=TI>s{yzgTN92(hx3PZ?3wr%cEK?-%}fKzLz@ zQq+xlTqJeTv@Tqf6t6x>$M*Tx#KYYYCWY*EAz7UhyE?D$vd9^K78L@&NeEf@dkaV* zedXl@)7fgc)MxR=cT5`)gG@j_?Bco-pB6a$>L%+PJHdxfDmC`abv_mg+N^YJy^?HN z1f6erI}43o-)*|hBD0De6uQxfvTEJ@N0n2p0Igf-rJ;7;6SL(T)ldK1)KVLvB6-1Z1 z$Dtn+j0WNdLg(|b&xaDTOn>6El)QN&-R|*^q+r)WhfL!Q+W{XL0jwp zD-arG;LpSvAIS{6Lb*b=GREXo&Vo|7?z9?oGW0%#vYZmL=vD?)*a|LbR#@xI73LpI zS*y>>B9u_`*g92pF92A$eCvS#00#s?pD;xgFaP@E=dVMCVRy3Qu^EMI6!4&t4@a(8 zSH#dujCLCi--q+TQ`Z*CqW?bKG)&Cj!e+MLjO;bacdcvb_UR*0kc2<>QJ*!g4XHp; zDRDGBFCAb2tE~tgNqS+WYBsL#Tm5?aHIN*ybUMah;?>+RyebW5Q^;J1~OKF4oJl)SJ`OS+%avd|C#p4B+mx+qK;5Q4cfp zN+{O^bu|5Z8Jiou_Y($}>1f%ei&L-@7b|5r;Om0lXs}wIACM+!Vt*$sFN0WtpN6+l zr5DZu%vyYLb0XD-mlKYGW?>nSr5xla7|CVb8}mR{fHc+yNv^0X>j~qk&X_YOd_A=5 zO;T17{V~+{W4^8V7D<_!5Cd2z`_V6E4-}jWo7NTMJzg%a`=Ps3+Ls*XJ3q&7O01`C z$l5V$LY9khNZd=WrJyvjsK)&qhMMWMpu+MKAOaOqlLeZygHf~kni~GrR3|rD#b^BZ zUOyQbm@>FmJm2YXb$Kq$H5gM-$_NCimhGs`; z*Dj}FPM9f>0KnyVqFZg5J!oV5D>?svHvUn}RYgR{{i}cf|1WKx0tvwoh@kRk+>Hl3 zepCDf>5+MFns;a@8l+a{#pVN7zBb>~l$n;qlKU3_60*~t)jMK=J-V@wt;U#$5*@>~ zJ4pH}Y=4iW#}PH+!5>9I5HU`)yp^csB`(p+7sG3pR(&k!GB^NdGc`TuO+PbcfB#F9 zF;=h2nGsi4AqZU()IXMfT0h}a-M)H!y@NC31_8Eh*C=1>x^oy7vMOd!N0lLu6J~aIghx&+k~4Il1zc~ zX++|Bszszyc(*#XJyG*Cv!-BA$g@PVZH9aQZ z(^c9!59pq^(k;9lP%c09pTfO`CmSk9;O2=9G$aM8JZ|DQzC@3424QXBd!CE6- zH&me_y5<;FW0f-yolE-p%bFuo4R%$kFO?ZawYVRasY64x9wv!IV6*q)4+)jITMv5# zlgC>)^@K&Uy2ef%ZsD3J+@2 zxYoOEO{x9_{Q)q>@7B;3UV*0ImGvd4Arr&W^d-TMfXEBlQEdYEzp|bSXB@&t#PcEZ zbBKj=*0jtgR4SEu_5C)-Pc7PEBR_fWas#UOu7H1qBf(Z>9}4YgQ%l@5qDYhvVh1Pt z!}L>k|LGf=f(ZpWT>|e>Fp48HxbhEfGj&Lb{D-QGL0&f=)zm(-?sK64!;$1Ln+45Z zV3r|jgSS4dzrZuIz0!u-id&=oW#t9JB-3kq$zkP9!-$Sv(8?=yqwTxcqf%EH|3=Qn z=er+1d(#ThZ=?Sj=1(%I8D4nGt|n=eB0d0ISh*7yCM1RBi^$Wj8wm=^*7pzL==ZKN zh{0}yMtka;o>r0%?Zf8@bJT{qQaDl7>$=GsIE_xIlu2gg#|wapsG)zVBwbuufNghz zMJ7~Y`+{Mdz2SEae|uKU8UOCPFw+c#8fVvxvc|XnzdlBT+=c-ghI?#L)#|&;LlUC( z+smf)6^S%B!NWvR>i;fI{-|HN$dZO*W(X%@>d=?uCs*M5d$@u~c}%sv(!V$_i#uy| zcO~maRV`tU30p2=6ms zDcU(0Ok*2=mJ~aT{(J%Sqou@A6ze^!@FT`H#)}KD8Ec3jJ^$S#KB=RiSC^P{usR1n`EvGk;n9N0|#) z?^yT?BP;Sk>nMfCw_kMJ#dQ~~r1U|I+|;(>hiJPsE!N<$`LxT@f9i1G3s=XD&r>OV z5wixxHH{#2Z6i}}Xb>48$04c;F&>)UKhha?4p1c+PI%p{EQK}z6ZiCjwH_I$SVi2+aBmGEIY}8Yg2DEU4nEQ7 zq>2f_uY4z1)h_ZfZ~Gv21f%5XATD0k#@r-wzzZyWL=A6OtxK&7;O^VoIXumK9I=5; zWAZr%_?-=QXpB#(Uw9FAMu<#}0f6IF>c;EG( z0h`m`8a-W+%fJ=fCIbe<>pQ}2hNQ0)R(O|UABq}t%?ei_Zf=scZV~X3K>|09TyU9g z`g+n$MBoITn!rQ{|kl#9gIMTi34^z1EB8ok9zwID_c(Zl=@torT zpU@c8_yJ=I6@lj8yYJD4P<%Sidq??y3RWsML)ewM8a(SI&G)_04RG_fMmG`z5k0+p z_S<|p@zs6_@wC0V6t(WV93kR#8)zyZleHSO>1raxR=-(Be)176fl(Ah-mpKdLlitK zRH0nsUUEa6-21qvG7>UYukIg#Ua#e^FyH3w1|-1e=#yw>JVUf`WZ}F~GN8&~bc$1u zMhY!nS=ZJ1`TU=OR&{W#=Ze&JC`nU1$A-(8CB_s~n{>ely7214g*P}H$Oy(2VII)BrwTpw5~c?# z%f8Bos}TyeY%{B$WZcHNm}o~bTaK)k9yAl^NW7xWvLFim0|0PhD7NW9|36x4MN zr1ljPc`JA9%hRGo^$#f_^GtDG{pLkHWK)-ZXVwDmoXk~B21?@ zT)Y~XYuZOg)w*dYnqZonJ&X*)%rE!6OIvM}UH|&?{&f-=rMb;=@Y!^m4~&<$nkrMrb>t9KSUG^lo(IT>K-?T)WsA?yjRT9j@GFJN$N=c@Iu5p&gruH^Ke4(CDf;=-QV$WBf>+@}uw`S!*(9Pd5 zg!gOSy@uHQ)yo7a?`HGh7EQZIm~5n(TAS&t=JJHk3t0MJVbe;0yA7Cab`^$j4tlun znu$_rtd&R^)4)6t${#nQDRNOrLv~>Ov?7r{J6|En?7f4Y#p+V>z^Hd@yq;og`xZ}PBIlb; zF^@WdmLeddw=OuXTuJezD6Al@7(tP#EIOc_Tmgbwt@ z5hLQiNx?dwl_23~N8R8x?U!-)s2L@8w&{juAd|4RoP+`s-R8PM1}V80UF zsBJuf))3r>*jT}BtRL`*;;uhD;$XBQf2~3paEkM(Jf!UmjNVa_3Uz_1iS zGFcD8|CTn>>#rCQ(mQFZTo-=WG3@e~6wBnF^w^tdltLTspT7|<=6ln47{6ggO=Ens zy49*bdh~&?7u16{9I^DEXdsPeaWvD|Md)`wL=%1jMIFnsq1)6XAg;_LRV-PVo7aeI z9Bp!Huc?3GRj_`AgjBKjw=6>`y4HwH)wC44@@!wW>1W z!v%75X4g4@0lw>LXEcrH2w)Z(_r&GFp7Ltbv^2_jT-!1AF!*y-{dOyDkSG`)d@1+o zU9P^XbH{cbM>11drIftug-z%v0(Pw;mQR5#pMUTdm@R>cosEsTu9YC5Zjx0VsA;`)XsK#2{4rdt=X4aw^1LNOZ z%#bbq|7GtE(c}pPtgxABQg=r_hH*zZPH1Iu%yv^VE0}4G6#~<&ADs3(LWh9@FMwIe z+@CGrLjt0H#)p*(C73+EoI2!Bq z8Z=4P4xhWw4CY5Do7|GYdbxf6|5WlKl=mV`fX3Xehctbn251WuBQVm}3?pK6yJ~d6 zir8V%*WLW(J$13}pN~Q}X!mH2WihXN6WvZr*{vC3@EC;tfYi!;{i&Bxg{4@s|)_AuPjElZ#jO^5=NbHgWZSOp85 zj7wP0o5JkZHYVAYagPT;5t(Jb@0?~Zpop=I z!qrBGVqq&^f6ZuTa$Q-&POOrVo$NWd)l7Um-s1sN)OZ`)32t9q6a0U;3aAypStL`z zpUzv=Lmlu%8k&az-tO0s*GGlAr-iya472of$_AQ^%T9Dg^YtIFmpLawV=}9-XR;hc z{m)dU!|0rZRW3OeJ(=X~?#;=!`IRJE?o4H5kBHN+MtIvzx8lEi z#xCqSVwAIu0AQgszWV&d2mE9v_L@xo{lPWs7Ifo<86kjqz-Mjxj4f;aS2BHQ$W_WP zb6Dh6b6f74Z@d zRPl58&MzE`bLi#b*rUg5_{t^4(Vp)LZxLk_)w{j`XgIQoVSl3=MXmaI6qLYOl77;b zO5jCB-%8BftxA3cFk^sVu&l|vK=KL*hUI-R5)4cof-(9Ot}Vc84w}nKf2<&v)U=)J5=}{} zQGbEj$S?c>1u~ZTHPH32Q8U{x@V%~=I=sZE=S_)BK!=h30P}{|EE-C|Fo*VBk~n69 zGw(*w{?-tgxO9jToy6P;^bet>P@3~%_fl=?_k8thW;*L9YSfII91>>J*>wRR-g%zx zTcf0`RjQ!;t7ZmfDE?0U;v0&xm02F7DlC829xDB$_gUi& z1PYgxeqXtN*uaa0_2j+gIbeDYH47aP$^^aha8`sZxPDvntQ65j!hj77Yw-!>5Ur+! z&X}H=x+kcPNV#CK_0LW_Ir>&52rw#cy#sk;f}D4Ml+&EVyTWzE#Dl!csE(FaMehNk zg?hhsP$m*L6lFj-k=IWe6g8SCp+J&LSAM2!t-_>cd89zKcjphLH&Zk-7q za$Tfudxo0_dEn#vh!4Xl&IphjE!7maR_6+(T*j` zcj_G)g|dMd(}cT!^e?2otQ-k!$Ek~xHBPhwtu@hoHw_-TC!?pQLvp2y9wGp=@FuLV zQ6vQv`P|h0_vHl;8GTH>gA+hcSXBRG48IdPCPW{H`r{E#dL*6?A$f_&IUA~#6z(&E zSZW}c?eC>1jX^E&1r)UZ874?U)>JRaP` zqgf!Rt3+*?bh@vSXG6#DjcTh!dh=**PFDa!x%wD*FAH;D;zM7Ib75 zN8o^l{0cEkXUU9V%SBfcnLHhhF~6aj7GV39M|C_+ zE|M82TAOvXppC@3Q&ns9NUfeq$>%&WH6Q1;C6b%o`vI_2Fpzd0Z|WF1G})!_=VQr` zJNO8Ey#c0rL>hstaYfqF)JC`PZKBkB380thJsRx6A>dd@wJ~5IN4a(L<+jJN=-s%A z&Q)cA*Rva&dohh*P@f>A>e^{b2l2bm4gVk8VtZY4YpmFhGAZawfA1JnDS_R?EsYq2 zcAi51A;|G~&AvIanENqfl0pv6jaW95i3W~22TEsg6%+v+Z&WsPp(Vp8jfXlF;lQ)U z6I0rBwPU}z%_!6x4{*FFw1gVGQ2@=Cu0IM{|p{SlGx%5!-3F76O4(i3lFA=e^EKs6KXN8%B+#ALW1*~t#?FyCdJ zA{3$VTpD&uu-VjBm>uf_MOd2(V}&gY25)AHNmv$;-RelMC8sS!E0S$W2xqt|mS|DP zm_-4f!L@x#IwD#;oG07m>reG;;MqE3dnTZV(n%Am$vkjCT6uJwQVr2aBK#&Ol}!|l zXm4a_(7&BO?2q6;$OwAkP-V8!1rb!bXGbG~nmzXS#3cAO+K_TAPzCv(3;<$4oxh;- z;@RI^zXw-~pB~t^G}rY68I|L6R)Bb1yn3AAC%gYeS}0bt?>@mob`b$=9awv3i-7v6T9)RlM&xncv`ZP^N9Z@ou+Z69E6WR~NC z-a7T@?aV}3S{K}488zozqaOiP%w}pj0uI>WsHTmyAa+MdN0_5sXy8vlk+dR#6?4&T z@sFXGLy5+(jK%iC4_UJt08WQg*d8eN)e8=Z%;fJ!ri-c9t@6B`)^rJSu&G=sI^&9# zaHD09r*~DUj(V2?Zg#8@1HAB>N4dqELw8nX>y;>eHe?TRYJ7ip6D{XO>E({dgk?8@ z968)(eL3n`^e~0j&%n7%(xl?AL2@HAxg?zNISFI5JS`nW+=G!Z{Vkuyw1y{P3{PaL z2oYK0d)+I3F{eJUpIHz&hvl(k zEliV&(O(hezk*i|X53Y4sP$VTC|$K=#o$8#41lJ0%1%UMj9==S7SDRF;UC-axv#(v z#r7YDxh*eI_t1x!t7t}~cXpz6kY1%z&5LvLXtn4sM*l`?9D*X$OZ8E-M+SkiCVqUd z2>9*6()yw5MH|MAbsV?;{MEk<5GlaeP0SiI3K5TxRMs3;9Y4{c3$wkS%pTJD{xT)& z2WE^SqUOnb1cEqFyshACD${;vEJo6xZS%jz0DYDa#NFb{O?N3-J}V?tPYka@B~Y-7 z_|8gK0X5r=IuSw|ha<3sadKB`nGtLd3g>rfZ}`lTAhrn8l_8{nm}UO%9v2$S-xw@%e%}TB}b&O6`Ci zf!o2NOg8mO5;Bd3!H`9clcQ%J1bC3aUM?zr<4m0TKS)?uWfi@JUZCXb+vr8Qb{Us& zxG7JDI2~V&TG4F&$UR}MUCpQBTnK$wE=hX4Y;;BlW&u-dV5unD=zP&05MxaBIvJZ3 zF#&8WJs-$%Pv63KQn;nH6_M~1#I&+>FA>_W=706pf_DaR26bb_t1v)`4uP1*w=7rt z;F5St_}cTEBmm1X;v#}9PJ_Y_I3=5ypR*0#s{UpInj|oY zpR8N@f^rQOZp8T0iIb(#j<}oP3ysf}jph2C!S>u{*5J>#)h}b&U+Pa~Egz*M7w!X) zz{t0K(EdW|M0EJCctS#UAZS7idbg4cO)qe39Yy?pksCQK4x{xSnzLT%DCm=CT#K9h zt8Up?1?F@KLDBgOy{i|w3fp$ayzP~&0yOYgCPAL(qNHuxQGBAF4 zP)###(9iE%tCM4~=e8VR1EclR$B%F}8ETJKa$pgT(m1RJ=+UFuYSXGD*1h-zod%PO z&?#iA;^OaD6?1U$ij}yWWBcYvQ-Q~Zv33VEM&;6pdSQ=X7>YVe)vR#@;?s8n+yanM>L>PM`%_8Z zH#k+yX5~ZuaA)1S>C%GQ{Q$paEBbw4tRNbp*kM1W^Gth10l$R8&gEhMzrna!v8X)7 zwhHEE>%z-7cHMByBdH@}D>7i`0KSfeeHg0&ou73kfPq7gGbA0IMD77c-A=08uMwAh zwSRl(9~N-ioH1#@$BZ(f^RQX>q8E?=_{Bwp)EQk)5OM$3u6n1;TKHzuJm5WDeBI^K3Jm^;z3N)hp{+(7)CIb{w5K7c95BEKm+_8I5BI!mF}L$fx8V>S@5dNJ@F$a zXTTQF>L7PlC@%8-$qedLM^O!AC>WfciVE9dDeKVdRWZ8@B0%I6yWqPc4^ z|8Zc%#S-opIX3^1MQ7iDQ&Ow-#5C&2-+I8Jj}T2^p~=}0CuT@3-}soI4T3O13}@QS zNd!tVB{A^o>Rv(D@TN|CBOeJqm+#%;k^qX4t4R_N_ueQsrhnhfrQ=&h>j&Q1$p*cll~KIQ?^lWg9AA z6Y;pxGV!ZE;H4@fi^%uA2IP}A-sJ&Mv%D9;xp(YGVgHd)3Z_=^%K5DLlMfflh)!O^qjnqPT?4$MKcO?-;vzku@o9SFC9%rZ zB7ZMfl8P~f1)cS8C<`g^$f#v-jJbNRNbgw?`=*dsOS9iax1teiBxr3_>25>ia2$S3 zN_~zFrLs2W+R&oUgOW03#}b{Vp9caq{oJRF+C5x)GV`!I5n80Q#MK4%=WY@JEiOcUB^0F}{IR^|Hb`|%@PK?e7ZH#d@L3$TPQYBi4 z__B>-W9hBKSlTvC?@u4<~t;;YRFS$J&fFjtfj z*!k>o?f0}#s5!oVV|61-@X~E$24)APVuN;0ksJV59&}m#HxgK+(ta#q%*F}2{vjc( z$(?J&;?g4;F+-h138VE`nTZKC>yl;Ok3kY6$^WEHfYc_=BMzG-b*Ry>b5tX#cB?U& zd(dul0W-&=Yud|)wb`eg*5A9mc%ua$<{$F+L4&x0F%yRZ0fckx)_l$`rSJkJf$fFE zrs%I3KNHM6|Gt3Hu(v#Pk)i9OL7EwG&=MhK(o~d266vN9hRPAdgCN@6a~G2dU`-)= za`Uo{!6*r^%kRyucRkZ*zqc)A7V;$%X;Z9C99t+?v3@2V;lp105t%U6Mbd5f@wgG`1##)1Q@v?+KKD7y6E&Ua+s zcl~#izCviY>CmwiUqU>_S}jYL@*03VxDC9maXxX^7!KQvebQ%*U=|ZfnKX!8r7;-i z5@9s3VT%`Ll`#SGmAdzGJ2O<#k5s15Z;@u}bE@hKKL$6WIQq{e!IC|wDm^tbc97}+ zdx#I_*#^zaG1q6bL`@TZa8HLjI0dtl(XL*aA|GAX9U`H7Up6M5ck6E_{`NmG8VK&f z#!$w*Gtc9@9c4X82tWOg;7%`;Q3)(87w?k!&u=xw2!me{ponv^3zR9%|69a#2+ zF8aW@E^+5xKPbaXgW*$&nTr33;MsIk{AD`l%yqAO1K`BX6B~#BzLAxXPezmzhU56L z%yx><*(O4OV6CcJX|nWc-00c1spObV~n1o#%{`RCBZ~!K8 zv;^7A8lIsih(rWRHm&?v<-xddvL@P);axm`t3|Z$9ULx?eDIu_tqBpqweuR3dE`bbuU%-85TTolh{wH5Se9oc4RD-|rq`)OpO-Fjf>?2&Qz2~@I z!e}tH;8p9tjl=FI(7#FRv~$^G_Y#|qNvG$36js$8d4rMD1alE(sa!_(fq(%WlwX@$ zKaliOFgN`biRLctVe6wM1)2H-$tcpFB2gBaQJfKN*KHVKx^qM2aM4Z2^31Gw1OQS$ zH(y|)W~4q4o2HPrTy`E7hRD$%ECmi9qb^5$Gs2()f}nVTr2OxHIl1)tPCI?Axi0U( zmn~P!2;j9q1sA7du;Fp}f>4-@ZTw zcIo-blOg33Rix$2D6;seq!$bYli+*}rLW((bm#3s8<7eb_S3Mb%jIU|`2K(4GH1(> zt5M0WktkWA99gp{d{e_WLPkLNZTdh+u*_~zP!Ii(>$~})mlvW4g$^(#MYE;+fJJjW zD4rL|$6+0sH9`flgyU1Pkofp`?lAX87%o*N){wc#OD#|>7vx+?87_{+NwqY^z?ZW6 zxOM+#)x)YmNO*uXU~RSR5tCWKXcUK_4eA4mu&W>y>8S{Xa}Zk9DlA*?=ULLiuCDi6 zb=*4*feIUKD*oZ}NwJ!dU>PQLD&7)DK7+prvgst1TA{52vOi0u`%LYM=z&K33@iiO z^y4%6rQE2zrICg?inp1)~vQe;14&NS&>_fSpmnl{NHAH(udx%F^yCrADsCW%CnAXtZ355r{ zC_AWbnlIu#UK0PF94o*Xi-1^g#DRVY8k4Yz?8gk*i~K_{mjE|e-AeIGr!B#5Xn%!u zYNo`w?!06zi>7sQlSvV>kHC|^^vq<6m+rB;yBp4M?Ctr#Ab1&x_|xPkakj0qKBethu~8*jK)ER^0{FY? z&}9Q(Z=)kt=Yb-IqX9P2?}C2s^&7})*Lop&o-`<3K;8X~?D1000Cq0iTs>f0ouzLhoofG)%Pp!}Uky-mQYUp9CO=qPbgM z1brfXEE!v?*&zG2G;bv8q%Cj9-^+mEdJnOHnlNpzf?Wj_~HRU znEuuedVJI-WuSRnD5B6kDfv5~MSkIsRg_2NzjYH!KPIdd3Rr!k#@srU3(n-Po>(rq zq17Roocyno91wA?hDij(f4*mB1r;>Vpn5~5RBd8@8Z;jU>A0YVaj&(-rvTZ(y(yuC z0s_}+bUS9}!Azh&eFYATWcmnDAd&J6DybgbJPz9Y3%olYcsW5W4Q36oO#UMX@Q{7F zNu^6WNxu6WqzN{YcLCMP@$tnWR)xp+g@G2#xNf)dw6#^Ogeupqho;34F?uyfJ95r? zW$nr!C%0~sQ0_YL@t#HYT4&Ht-IAgm(5u)7rc#46Qk`-XS&)t!CN(j*%Ayn0fk%M$}Nt%N`HwV|$*CCG9gt^3X6j{tdjfHG}C_W?RXS zzCII^Wiwrz<>$qjI?*^)Uy#H96Fl=lv26PdVH-IuA+*ZfZS{90lq>O0mi}<{;|6JV z!veyNl+3fDND+tkc2}+30bdKV5HS|RH7F|H*FRPX0?9fSiJ$R<;3XE`y8Nl3>m8qz z0SVXdUe{k(SoO=|_F~b{5A)&bE>u2B(Y};}@EbRKJwtC38z#pIrcaUo4HnGd;uxq1 zg%~FNH}nkeTD%Q2GT`90?<(2)t$J=C>x$4qPOUzAy2 zSBcLyKZ&<-^=Z0MdZ38wRb(+Fue>OnlVvanc!Z8(OvV5>z_4iOomyDOCJbQTDN_kV z49(xSN=c7Fh%Py3&5wS%bw}@_9t<0e@Vpua$+{y-O=q_|YHQwyA$!Qu#ZNG{TlKhZ zXeOkaaSmNqxefCX9i`vFKk+vkt|(qI44} z&^Nqyh|04%Prg^JrnK#sz0`0-$&*JA7OsCWh>Iuipl2U(x~WSoNPCW1aNZRjhBvHb z)uHF?{)Fnwhh|sw=LBunp8rVwot_8Sdg|CJrTmHKtm!Y|#D{+DaShu^R%xm5*gF$L z*oo0k>;y7|SYwjxcNR150wTs6Y3Ywt~#!sBK)z-F+rmr4q+2n8CbHNEv56 zKmY&^rqr~a*N&L7?RM}rAjwmO0n|Q(F?lM7SzZZBD|l8~G-|#?8r3{%4Q_T!9ktVd z000(QL7SROs6r51A@DyW56}t!b95W<7+U|PFFLG~aFs>eoC1^ZV;~z0g5lInoKLM; z%Gf_Y>ozXPu&4=N0V)ygH2t0xjz9;KOd1s*HEQ7Nq&6KI4()R@lV88|!%jyfR^KfD zl(PiUrMu94#~qh1@n4Y|_kJ+uuK1uE1v8T-gQJL`ka*84 zCAI5uE9HXmO#zV7ao=dkhA>XIHFR@2Kc&i4sQ7G3$G0M^-T$&I+-3QLu*SQ-3uSug zY`%A_3nENL`#-q=wE{?~CPHn^C5A^hWLjf`Eo8b?0PSWt)ra67W~jYz&7Cno-r!ol zl{W0}%bL|fmJ-ssg4U1j%_fvyya+fNH=?2DAi-|ZyF)P;c4a?{q;66>$Qd(-_sk}+ zn5tpo5dii7iq9xbG#AR+k)LUz5Q` zqZHV7%ZsXALwFy3R_*s%3v3lnsDK!&4bycIe0|6YM7-yDqn9>qMVT!nsF@y7dPQnTLFx}5@KMr+nSQ{%Zd?&*LS;mWK#^^oP+T|wuTOdBTbDSp~l z`ovf*KX>H@uW4uEMnx{Zz7dx{NXQN-vtvaHaJ1G7ai9NyI|)>Ka6)XW;8DAFR^3o9 zcG>adm-#@UAsqlp^`A~Ep~=Mgir1%N1%tTzn_Q7`z%4$dcJ(8?Xg zgRZCg>&_U<#IDi;og|`y_dC6WI^ewJLw%_`A}38*rqC}TyDwxLeeUSz=7Vos7HXI% z+hoowJ!2xHFML3vo*rCkdTP{0Cc>}&cO;}zq~B0Y4_^shkBbvi2tlkr+IJ(V;mY}~S`g}r$Z0hMm zZ;=GFAkrwK@PuHe9xytaq<_j4Qr^X)_co(X*yE=DtcjPy6qQRBfeH+S$*fBU90a`Xs(`8J1>}@T9Ql+7vjh1aJtV=)_=$iybUm7Tu!segj;7zfw zz)62h+cB%Lp-^eI z_mHk&GVB4=`ZdrxtFsy&V-zUu z7Qk;S?I|bRB=PL%cK|({*DN>Z4<8Lfg4)HAsMdR6rNtBx+@hZ+XBP8EoMl0%z zE96r`eEjTJ-UTVp*R+)HEPw**xA=6%2=VntB4W5{7e)h)UuSdT9?U_%DUP+sjgG^( zv(Ct0-SBOKxK-|bmd_AO1uV|+D@8=+cL#WEt*%u^U9(n6tq{4mMIe*56sk@GnX0}L z(O5KhpzYMKH1q-8Mnjo&D^YoIdTClC3W0Fz3#vsn(}gO+KhnS0B{F`3To z;R4$$`9KrwTSlo-U>{>EJ$N`TInE5OOZXxtV;yvyGQ181^r7S^_m9v-dH;#|t< zh%1C$MeEG4gePTSdmnQQrl_oHl|zDX{_ZVNIHfVUfSGJ}aH(ws15XR1^6K3-u z54Tw-X9|Q!_s1|TRK9)&;<#ySl^>Et#;5`4Ym*3tkxi-2Z>s;>&3JCck7t*h8zFMs z+nUOp=xKGg0MZ!{ef4JxvXf3qFrl6Z4%Dh(2cr3BMTx-r6(2keSv^QEK_|Bi2?8DL zn3`Y4fnuQR9rm2?@e%9Vmq71xnB!LOA?VxyLXAAZg(0aKP9hEmd5kqW9@Q@CPcT5+gi0#2zx2SK??i)*z?D)N^=Lw|Zff4tTYBLQtn<6zJacN@O4jN>#e zhtT-J1fpgDg1Bxe>09|LT|+2xuUq0Q+OyHm)~oJ1?HARlVCtp=!fijW!=e^~w`2~2)nnF1-GG&k63|3k20$_z!yGb!lg*7;9oig)aL)<0 zv@iZx17l0a@IvK_OjO*9$Z~hpqA?H!f}xA>(1g%v-xU4j0yN@rARZqqbYm_VyW>n% z>Lx>l0(X76tdo)WX{rwq>$j&&6fsswYg1dmgK1&8+LWQ7=15GO4$CT+hQnlcBNBs5 zaFC7^+#WnVdIXa;F8%nB?z34|+uv!05ws3`IT97`)#5KsGE`hCE~(KV zgc#SGyKPUJ!u$4E+0f^TJ>i=ZXn)ymZZ0%Nb56x&l(<2OH0EqXI@cIkUzZ4EI{v|) z*q(J%c5>RS9DbV7aV=nFUl693r}MD(8bGHcz-)gm@XZ;$_2|Gordu~+ z>CLZcD{a}!Mx2fH%ISpTO!>^eYgkX1apxXK8G0b*F!8>}Al{7~Q(JV~@>`L~aKpXS zCaUmGX@xVW?z{pj+HWQlH4jPRuMvRH^$RMa;(WR+}Wndu0yPq3JbtIDP`RMU1IAmUx zX>XlQ)=yS*zFbxYWvB~rfqEV}WZjL83z*YXcXMK!RuN%AY+PPEP`TR_GQFQDXvMY1 z$HXIz?kcWqk8V7=6Hr2N@SsRnI4g>zT8Z+k2m`z%iz`h*{kV0rGQzbTmImsLIegCc zU`rd8<(@K|@&gsi>VHrj+i=Fn%KzeyaxsLfiRXEQ;?6s32u`PwK-ER$9Z+|!ZZ2&n ztr#LyWU?KKjLXpc7GZv{e}O`aX(rRO6f=rjD(9RWh|$R6c!h3SiS@Ze_O`yG_RoP# zwYfG$$bXz*w&TEOe0Q()Cr@KA7KR0+D|$r$c$b(2e7}twg8bmUJGiq^-$OcWfOl3` zDLa?qVh_o~$8v5a_wHmrKnC{?2K9FI`Jlfj`@@;^C!vBCRZ9-T1vyj70U_0|=c#fc zGs((T6x0)cdrCj+!zj&8B;8D*X|J!`)I+L~IW%rFey)htpv3BDXEAPQY-q@sHTQ8h z@KfY`losB@q;PFiYk{p>?I~0cMma}877|Rb18%hSHo&ieK?LK?n*ZHuZo)5foVr5A z7`MIWJPfMh-#c<^WOzsQqbH+gS0#vKk#E#sbXY*EyP<%Ixg%&Hppq_aSNfiIUo?4` zWN@dSP$O0|$b>?+2RDsNL!gp$bLm5x@&b1bcT^%}-q!yu7#$Wl@#6vGURkL!P)Rk6 zmN}pL&mRV8hh5Sb$gKqotZu`0HhLf(M@O+3{@FlejZj0ZQpK)U*BCCftNt$%j6I{7W! zH!khhZFq6&?4(XXSO#Q<<4?5I*c=5YaT3jy^j0BfpXpXV4g$@@6)zOjpt6~gQZy}8 z@>D^UVtm`K6V2eS8|i>Lo)(HruXwou(vIlB)pb$l8e|V6SKO3Vy8(;5Nnk2~(A(h! zvY|u45W_j6#JOl!f{2w5gQmyj=4i_c^>0-VmDmKQRO#~S>;K7V3$0MO4a=nKQF+2k z%8T%HVc}RBaR>qQL*Z(+`FYH1(YJ6h2^*Vc?REsiqsPm%#E&IpArYoE_fQknGUrm< zbmHnM-D?)c7hOULDR{=4@=K5xF>XrOB=v|+$5IOF)#R16zRWj_JM6&GWB6&EVJZ%t zLAXzAJ?cirCYTop+=6dSEB@2Nu1~qqXE3B=rLo4hv(i}HeoK6bTKZ=j?SQ#T7Rbv!l z8|P}it~j@5bi#{5ps?) z2x6G$tMWpjWQWCiC240ZDP(om!nJ_J#~3g>9U=H(~%DEkU{7Ey)t?HHY@yA+UmYF{4Au}yiEZ%+y|X|che zPf&%Ml3xsb4#>sS?XKhd5o3GL+~P4Cz`hXY)E{#I?)vf+V>2I55(9htg{_RB9c1fS z%;KO+zHtWNk7w0MeesFT=(r}aYl|FCdaF~1M_h#Y^$i=P(oKNfl{L>hiIZ1dEq=M+ zVf}GFZwvhr=Um$etpV`~CY4YG^at>TqmF>sP@DZ!kbu4Gjj}K384)>cGoFxDu?_d4 zzDBY<=YRU`eh5=pd&|Lkg6pVRq|8pz2^lW@#2$z7tbPprRNS%pWdf9&?it$Vm58S} zXg}_+?uadKUhfDc`Be7+BdO%PMIIjzsi6wsW@@)jy@*mKT3zsSyoOv{J(!KO zy`J&YvZt-e=2o!QxL?7YNPEE+#!Fi5VR@JV#@Ub3;h#GS&IPas0GS`43{|kjfcP~C zf;~+bv)u8#>p&M@hN&pJ;Bhm-t{V+8z$bF*w=u`|6o?rWnXvnwI=IQAto1HZxXvc& zQPKCA5x*cl@&T3Q5Hh*lxSy5|+`Kg?g1S^%77`8~G%N(F$f`xA-ij1~7i$;ATqICg`)i&)U= z+g~IHp4GGI70cij_0o^m)DNU;R%=({Kr2whJ@7w|+D<@*Vly9QdUi4zr2y?Phz?x| z`Rm|}LZ1uQrpwpV)$Ou0jtN#qVxnbD<)2~cy&YR-=|omvkbv)SAj|+ZWn~1cX7@~j z=(bxNvd3L_n#j@n=%7yz7sWc*uDSq+RNlvTt)`VE@V>^YAg0l7)Ky0@N#jh>W68h= z3{i2gJAu`2?bJfzB_##}8eOnr?-%hT5O-s4&S`WsMo9MNMjBzGCxpc%j_kv>ac zGsOmyIwCqE#NO%!u`$mTo70{$z3*-3#1QjV>HaZ*7>J7f^wPkudxoMm5-?TIZ}3_B zr4NJ3B{nfN0a;P9f)s3tN^qmza<*W!X0B8$#ZnQeEjt`VMiwv$*gh9*;mHJTmP(N<37YbY1__>$C~#u%cb8-9RfY zFw0FK2bN!3cDGLXGcuo3wk4fOCw{4M6%XI2Bn!*b4_(EfFx0_=2?E}uZ_vBnO9zY+!L)i7umEj=1Wa`>E$736k@u7ISQG899u8^LssApaHpohVv-DdH7mo-KpAdH9!jaA>ow^s># z6^oYDB{2`}=V0-Noi-2i8uo-5oR|^V%lk=_yswt7PHqO*5VX(%chA3vG`6c&c|PPr z*tzlk?phN8O6RkNb4TSM?B+dxGOcHA*>po0&8|d)!((T6WNlxCaz~JvLl|P(as_}j ztEh7x=WQ`8-%#>Tu<0zK^~PI733t5Z3d&wu<#FKUN$7mre&RBa{rsh2Q zHwQHEJHxF2jOg2-H1+Z>$=E5=FLqv+R{SWSOhe~P#%x?vC$+E>X9jLR!ZZNMYudzu zgy%i@FujR!bY4{g5`H2>6v{K^kuOuuI8`DY=#(f>?zx|;zUKMS$3Ei8uUw|`mL>cU zg5D44tjs%sUJVS{(TqqZ1iU-ZE&i8mh%R6N&4E};q+_?k%JY+<<0dM-reY3ppn7oE z3a3WOwhgS2@YKvDGpv2r(B%$TA1>f@_GngZ&t>$jS2Ok$9Oqdr+|f4W?%6w_IIE_n zsY=>frDj!#O9V&j=y;a33mG2SmRir;<#E61O7~#QiJ-#r68D5MbvJNFCN5`$*p#DrB))&qV_0V z^_Xz%{~!@mSvu^2oR5whH#tT~FEyEuQYo$@eyn?sr}JHvMT%Zy)18RbVKSpSY3td> zL^bIDx>c;$U{&~W!PAj10N(+SC=Lz6%j*Q-@ku*tCX3K;f40)JW6|Cu&vvUHk^mMO zYil4Cv$BbKceAZjP`1*>B+TFU0k2aZQPbBW=y~Y+((A9-pjN+)m7+37=ro|J!g~>w zJf6fQ`F0}|sv#asBASiAYFO(^&Kk^1hQhD7R5B?C6`l$w6sQiOdOey@QZs)hsl~%5v97CNRSlJ+`ahb?%;7fpAD|?vlj*I2ms}a36U! z;#^(VVC6%xP4~8~`9gD%838q2_gN{qXV9SyGq-=2iryWP&3V@Gq}u(by{Z@!@U{KS z8^EVuU2ky&j962fv)e2=JaGpF6rq9G7ngC6&eoOqc3<>HjNlTg4Ra)ubO%LbCE_Cz zF@4Befi}Z)Lz7yzG%3l@Ed1}Art1NUwIpbfiUp4R*SoKzej6(A%u{!nE_uA6mYVQy zxrtPPK!Y|9M@`-p(tle;o<9IF401&nfKC9(s2yZ_my2_&n4HZs@|=M#r1tU*b>}RxSIm(ZnLmI2Yhn$GHD&y0VP)#<0(IFU zhO>w)el43V_J4BitNAljvt($6_r>I}A$9>go)7_ql%&E@car#kPO;QH*c$~)q>xP3 zahT`kb!d~q6kSSZY@CnNO_1X*oLGbX+=${44CYVD4--5C1S|gaonEq=gCa1!`cyIA zbRWTJA=dBI27q4a?WceXLAS(CaZ4IjSB?|qG5!|F9FyqM9`e3VAhMPYTR`Kgf8)Mi zxifJCov_rcb~$W7^T{`75QJ`UK+l>4c74WI;@gR{(8m81Cmd5&>(zuZSD(q~;WAF> zk?5zNaJAhGBxOdrt8aD2+*T==k9M*9t?~1JUgK~FA4@gkC~xXTFT=wPjGt2)&SNYR zJe0v7#gXysSTtfSFEA2ZpWu5BDv2WdnCKXyjw3+psH)}l?CKt*{IP{JbpH}Ig}sG?&<|CSShkIKr7#Vg;RUJHw&%T zXvEWAPey0XxD~;gk?n2r@hBAwO7Ff(4=w(Q5)J(d6Ccv*(B}ph>?A{RP~+;8se%f9@Q6EE3MVcSz4J--<{o4 zivG^C{4CP2UYEv8-KTrP@LW39R@W%>_&d5Tm6p}6Z+Gw``cpU4JY^HEQE0a&a`VBv zkt$Qy|HeFdIqxvr7OxgU=YejK2=>yruecr~KQI9)L@FN?ZDO$z6*(gJoF>(vI`As6 z|8oVhWh3dHGd4L784x6&emGE+gtwv8L?wphVxYkyW_W{c%L1i#8lD^o{)8ScB zvPkXTgL)=!VIS@XmeWAL99w)JJbrIQv3Ruw{w9rUyK@Z=p(@NU%|+KNi_t>4!*p~r z(@V?qE7Uy?s_Mt>WK8K1srcs z0nmK#v*z3Y=4%Z!&oIbPlDGs>Bf`8%L_&N=t0H0NXh>l_Dcj7Cq>)VikbEnhqbwS2 zmQD-c6MfVSVa7{E8q`VKNCbG6_%#X2j(k3`8ZS72iLgokfGW{>m&eeC6oZQ)5+THP z@OoGdTHt+!jk1Z>@}J^1;pmd>;syJ4tHgi~0R~;tnyRbwQGZ2&(8y_X9DRdU53k*v zaj_`6R|VgBgb?vy>$@@aYMZduE_t3he8?~x9n-BsD< zLrzwy8Uz#G8zw~ENokH#BoF<$wfX~+%`Nk#kw#&tcr;*~0+~4FxESe)V0%BU9vEp^ zK}4lkWSNG`Ee5k4?2O>uLtB1!X8Xh`0e%6o|_PI*JnQG5-(r`sMHt;dUyGmam(bM&SxjU0%R+a2B=td87^c z5{4MqLtYfAk5vQzfgx%Rgd>7?@VT}r?*hT3elg}`>=DWm5P%3J%9Z7Zpuda1vMDxX z##jnPzjhq^=zYZT0*&?JX*eGap?M9_x8qSU))j1Z)cI2-$E2m^{$5{FFO4ow7*Skg3XKzmZbE8`*q78lbp!`_O^8U_A+`g-xGZ-%>2me^b zs_PYeo$OCvPxeza#gMbBvURu0O`f%;f0j1a5abAUbaryz6+{OpZ<|Z8#Y~nn`bk)D zL4u|LN#dN?17j-;UgIzImi$8%i}mc0cxQ^sEoU&hb6Cq;@lSU9_=uU})PxLmK@JQC z%d6OG3uA!5Tqt|Gb9`D&5wk01OiW(CbK4OVmiX%7>7p6=*@#a@$uezQG620&jwjK( zxph|AwUtOv}q!Bf)AV`A+KVce~nzhz*}3DWC@ zYy!_iJ?7r%t1_<; zpA&>|5KRm^_%RZG^w)y%z#Yb`zyLz4$2Shr^&#PhMF-UWU|B)G5fcIq*5d$KRLj(_ z-G=S02!qu$s9UT%{6-2D=Xnu0qMrhMmBjv@5W=;7)2!Bi`z%W3O+BUpA7fd;WO4Lz_p;ViVH+i_k8Fd+FxFtjBNf#vB1$mq7N>XhVw?Da^3!os6dMN4h`5S91sM36G4>KE~{&4=KdbX2>+;)7*!ogpN zCFMh7YD4&RZ=w1!5y$)>>%7tb^#%p#IGVXr0-0o*!>26$=AVM&`@T8`b-=iU+zBwP z0JEw>?(r39Xk(z=b`40j=~)%(>)aD;3_}b-^|}mQnB38X{0}gGow}x!s`9#Pr2!QZ zPBcK*D@Lr$ZfETrJFG_#a>E(!_RvZ)bEoM&xGEnJl8S^l(Af;$_39^XhW}j{V4Emf zERE!ma0-i1Dcll02FQ3q1X7G%{UZ!UE!){-v#&ayB!vSNK? z$uX{1gdqYe8>iO%#V4VSlmKk7+)lX#Dz30MUeA{<{IwMPu$;YybRvx+?8s}y*9OlM2c@A zC3_DVVyT$PRT_^{O=kK$TYK{{p12Bpp0G9xBOzY$P(J~sTKMt8irK~eX=o&PhHMCYG&>p^D44`C8+H(+U+Vdm7ZB7gaf^-m;252Qi;sP>76=3?$; zeiJzB6{|4GF)EfTkr|2TDPdrk91xkw`NfJ*VQ2U#MEqt$As%7m+=#rbW}_nKy+`ZQ z(T>uB_{RIwwE=1abozyc+YDl|8L!52C#9VjOlCx-8})gjOTp#BR}-o7FZP8a!I{EU z|6G8I5iw6L+;D$;?LGCWowP#JP=Il(;>$)gm~uds&}ksi;L^hCWpcSCqIS16t9L_( z6J4`#LjnewqFj>ndIYki99C74SQ4L?c`^rK;mO;w@%DD4|BNna(s$Q4`X%NdGqyx| zfPJI>S5P~NVYia))qeIPkMih2%1b=E@DY=k<9c$hR&OwF0mcYe zJ|o516ml5@EpN$m+yQu-cI``WS5=cyq#WZ&5NgW?T&+tw3BuT}9yuSBM%W6rXHTvi zBct~hTxuVdK=}&>WBS1B-HF{D!-gw)U`z&iYco7)qK5B)Ewq*Bddb($);MN6d+tnW}#Iah~3LibrEEJo%o=^b4-;kB)- z8hL7HSvbwu)}Em)2*Lvtn{!2Y?PxB7H&`i}7<-2@E+)+FDg+=>B{UDNWN93>XD<_@ zg(rG#@w)!TitZh!A*I7^D#R1zG@z2MQG=2Wci6`Rx?=kK`H=V)g?r2~-5U*taz>^&_SEtNvJ_=nM?@(ej6o^ z)XkTBpHPZ~BO&#Qm08wW8QooEdJsD^zxe{*s?hLEx6=7c+9;cL8dkY}%+(R~;Rg9# zYr#M6ZcJv=v;O6zZa|mN=~8IEuL2>0{ZSAHIVY$>p|Jd)8*idV%@;zWS##C z=GzwS^+>n*>D@y!BtYot^4$ByREbT&X?xsLryI$jSz%H0VA{kMz9N!?clT+4nn}`< zR4oL?AB=C?bGAD&MFAsQ@bKIB=UK=B0X#a_jn5prJm^+Sq`RS|l2N%x$Cj0Ksnifh zQH)hVQ=wqys>=OaA)^`I=&2XOzi*-fpruHFRCb&1Pd|D2sknOKkZ?D*vDYGR ze{sD_eKWrAIXuT6Gt!hq^?vj0BsfeH*(lp-CGG>)KJ@{P|?!U{-CH;)rEVRX|1DUFIL}Y+B3rl8v6ryxDYRG>G%psxY0n%7uQLObAjy zNJ@9bu+4cadf`C!#~BXo0KoG*i`jM!j0rp!?M?N6B$T+JN5fVnR%6l`47nv9zEyFT<;jMWa&|dbkQxDLuSiL#B~<0A)a$ zzny)j=;@QDazdzh>d>dXNEDbpU3pg2A{&Nje1J6sbqhJ>ykd!-d#UI*4^8C$aK9r; zI}~3q%fhVyiXK=F0kI#LjQ`MLIx7jK=^T}dMq-+&d)KwyYD0x*#@LR2!=;#Dx2#4& zHad_POO9JE!k+C}vsjG_u>J8q`ltY@l!fC=9k0bRGf3^3Kfl;ynzM4q?C%#^cBdY8 z-b;HVo_%HuLF1p`Miz@UC_Zh$_ItTaeUGTP=+gySl5RE!9s_f{O<>i33x(5SG z$tc~n51*NX+g5t-^o2d)A3U{21GmYdm(?ez7pJ-YjN{29TS0fIFWR2%9H(n+>*rHi z$!&%#k9x1<4YlTQppqA#{2^zF1F78KUo&@4#;cKy;Hdi)@ce zpf_eo8H%HJ0n04lql2thftuu&6MOFxtW|S%4#e_{0E`qNL8kb1goMF1lN}Nw8ie*| zJ|AhLL-M&6;?84p+PZT8vw^ZPFbx_~p7vFU#jl&_8EC)7uFU;^Yi6DNv+hu?njEx# z9K}Csy)f`}w}r)_S7-mMhz(48dBLB?MYzEjqk^W`53yl37n>4B(CNv|*~FzKz6@6f zv896vxLB8GKaDB4^|8}p6c9zP{+RdYQ~Wn0T!{9JUzvA8;i4rR5naEe?RjT&$P>(qg{Ue!7*G*S}0A_>rsa}{gv3$m2{0}pCg`> zN~>R$c5QRkfU#<4pp_k?qc;6$5RfJUjm*pBbm@M;n=bsBGyGyocO)O0y?Kx(xg3E4 zz+M?|3>y~ZHGs+?;V$$S@fzmwRGp9Y6ZkzE^L!Nr(L0bX39r?9XmjW#&b>)KkP5`k z@si6e{OXO%N%lk(0&5dFDXZwcps)EYLmJG*BAb0uRra72BI;7P85iX*O;pl;(bs`^ zX=SchBWD@O2F6%h9MwC%S!;5DlJl5nERnmI4v(OOqg<*MM=0#Kb>!k*r(&$nbMXn?7WRO(g{U-mhL*^$>aU*t^j5TSH*Fw+0 z_OlO>`Y>=fswvuNLGm7AS5t4Z*T_bW7z2Oh;LT~p8-$5HU zt(RRfoKJNH6e|l<&ja#H?1HYTLPebR?d=y270mcx+C({6FdW?CWFCsyL?l?oJ5iIJT7 z)uK_3PNYZ3V*{6iLGL0r2x*LYvj4WPb1Xe;jRPizRc3_Zwm6ce8Z?Mn{mH58*{QR^ zTi2~eh1Vc&P=cM2=q84CXShJ*IpC{MG3qj66>^q%GMKkZhHR1sgK_m0{6M(hZlWV6 ztZI83zAE9Q0F|`s*g7b?@|BvXfKw%*1BX?9*PnPnipCWP@r|XuT#D;cvG8Bri>3FS z1G?z_8_+SRDQYksoQch599kCHYlLaK(|t`M_5Tc!nO{Wle01$6s*TmTvjYt~p#@Bs zn^O?!T(g__H&pyRPLmZ&kKg~bN+0Qhp4aS*H3E6+4fHXe5a0YASdezCN*x&`z2<$z zKNQY4=sRwnU|=qY5NKL@Hso>7PbBlCBG~X%6V75_jzj2$CRFz<%=b}lR?+mx|C3bYng5+?c&(r4G7aeCy7ZgYT*=8wpe zK6q2%T}r2Vs7{dQ>r9~=b(Yb1vrDI-vnhp-bdfIsE`~R)&S)v}-d4^1tWj>toyK~? z0Pll(QA2OsS}+1tgcQ-8osy(_wE>@M56*aNmLN~)o}Ygvw9jaUX&RKg7U{!++tv>k zb0hFoB!?k2%#H%Nha!eb9tXAu17i_!z*Ld%K183SRtQI~D!Qv85tbdzy1KPw6+vZat=Q zp%*c8PnMqHxMpEoVNwWuOpPSsfw9z4b>hJNUHynWVfkv6rd?4(ZyjO}!o~76yti9^X-%PI(kE&{%ZUbq+mALo!ifh{2h`T+PZz`M?bioBDFJ%;&fc&D0i7 zuOm~T)3wIhT(QmMJl%#oz}dvTUpJ(UZ z38|z>ntxWuF_n+o7O=Si8BA}m6Q8SKIGo707!CGHY$q*f4;aK49HjN3_5ToScW;HL z#zj;96L$`0!%9E37U626vHo%c0L{45D=X0ApqY|i+#|y!Q@I)b(}?bLZ`!L`GHqIE zab*l;$A3pK;);6IrV~@}T>!Shq+yp6ol>xQrG0$HZ)b0Gj(@-9|fGgoN&aMM514C0n;sF67Dop+$(9^^Y1I{i2GSM8MlR26{5K zG9&m-LBzfe`@Y9}P2DTdHdkK&#cm!%RG(=GaU%HOxl*v9AP3(5>#h;L?QNzf(^h== z2yrV&jMidc?PP~%<=4-dl>>a;CsL<@eSNYmx?-IUn+$j}#v71lDHy|6jCL1Cr%XN5 zqXeZ(akUk&j|~+=^jY0D3aGfSYylcESZA|!cGoM+)lS&1#+65lNSELj)WBF>e!Y^_ykD>NFf%pG918W93gDoO!gv~ z#Ni?`&)J~ypHQya#u_)IVOe`4m5 z%ABZ39BPff5L>^oHXA`#^-Gcbofz~tPTQsGvn^jlu>78Wx z<`Fk@l@G299z0@MMAn_xfv`OjiWY*Jxz+fped%^3(2j(LUdGDDg_D?=fns;3i}KG@ zGFayKukcw2&XJ6H(L7bLVmeepBm(x$I}L$_3kEz^eP3LqL&4cvTXhLaKWTiX!B(3k z^&7(T@4aN-&Hp)QAJ6@%xW-t?^zt+}g8b}SjKz;Z{K$|sM+20-3~q5D@J9=d9~4c_ z@0Vv?2Tm~(F#_~s#yvqpH=!^W{i6c!|BC~yIO?&$z-}_otWHOrCJ!)O?N$HdD> zDze~M4eD2qGd2Iwj;{_42Sv6&KAcr)iuTq0#CF3qQ|Ej}TcZDl(dBoRU(4P+4z(np zfxmBpC)cM%-WY1J&C4n~&XG+h>@1IoDaIACCNvc z@=QdYqhI4NR+p=+A{nS=X-1Zl3SB*iT+BGs1%!f62W3(!f)aGen*6ZEw@J3fg+OcC zQeHApAXsX^>a^J#{dF`;xTMp^{{BdgR`)(1@Gq?p-6`bGP#5JOck@$HVOli~7^I<1 z6pod^KNpp|*=C=0=812v13477Z6N+W2m4-i1+U-jzg^y&B+~RcCcq6T_A21v^Sv8I zIs-ywj|fYS$n0){L^OT))5B(i5{I;lRQK>GXVA$(6{7@QVADNW{yce!_z z3Kg6+I}VnU^*`Uus5@w^NMOvm_Ul;Nw@jxRHe_Kz=8~%=K{3mAMyQWAM6khIe_-^8 zGWrDubi20Dnj*0H=|kG2+q6yw(jjPt$D+1eD+rnZP-0U|fDaaDU&smM zx%d?_D3$ijnzT&1sX}1iNCrqAGMrWpkYnrH3sI zpluu5oaFEn8tFS?`;8g-H2M1QpR!WUcb5Nzg!P$}&dAo7x*@tT%FhCx@#&<%n?UxnehG|B;M_m+jg3e(GA z(rCBpq6A}eb-hD0o$H6KJ*WTnHeRbz&SjJ-$_16HNC6&RgmHV(_?5pqfF9b{LAL;65dd$!(V|8@Zm`6e?4GuXZ)H5EX1-4iWDeAC`{gUk6z9D( zu)!M`o8;{IB`|Z+4=cV^m6ifE4$JK^kk+Tn=Igk@W5PoSipm%C{D3$dU9ZAJHR@}# zJWT0I&iDf)I0g-4V&sm{Gq(RpTv_fG6Q+6FJK0uJZ(0I1j6Etfp_Y)T*Id@H=Q-Nb zf+)exU7|k}(B_N)9~;vL@Zz`h&slMZYV08bt#32yFLSKHmfg7dVrso3+~1B#5*blFtlILI-j;APagz!Tlwp~zoCEc~a6|93m*jZNl@$pNy|D+!jsIWm zY8u)fhOfZYhy|?Zo~VachcH)?M^d(@yS3jd_@B%b&Qj)jh0%6EQ;b7&GgdoPV)90P zx+KHbaR2}T+5w&$bbtEu{xK!KdA#<Tt% zDpZ05%8C&}L+(5LHsJ5e%3Mh`abm*YTSNcP&$V8mOy)rma0|k-)Eqh6nwEmn;=H<%=Em$fFI3&p{pe9%5s{Ny4_g_Xs&;dnq6uqV7KmG{rhBOFuki(phCPySMgjKig!w;7)6`a^6PR zP`1IrUNLl!3*F}=NE0mp00PAUo*im``P(@9*G1^7f`OuLLxd|pm={}{o&U6Bd&j+C zgS@~%qmT1yp327OoXZqTx3f#W_6&6*h_+_ieGQE|6Q)n&Cr=u%Xpb(9}aju~} zdSJEI9XvJhdSMPQl4<<&?&Dk}f@_fpU*PA&fb`05`%Rp2z-M0Uq;@brwC<_5{onvu z{5O0-%rtUbJ`OL=-_O1vWok-Ygujy~6>oS9Vd@FPjZ*A*YD;}8=&YY-Z&_$IX&$yU zm2q(h=R``du)heyrcFPDJ86rnZ-<-UX*XUMv zl>9-&Z|fu28@b>}oC z6?%C8ex-DNN);XKbm^D~O(Us_dFDGvZsQvHM|*Akj_Ljxk?CN?;@mr4cIB`!R59^< z8Bhwu56k}@=1!|#Z?pFg(Sg8%W(g9&cia+4lje9I+1?*px)F-R+MU%|<7}jZ)u>Bc zrMU`^U1At=Tf@)m=h@fc4LQz>b3HDfmFY`y`0mK;4ilnw*P+V>|Cqy~uxkV1J#S|t z000oiL7Dn{4?5}v3{X~e>r-;Up!lbUcOs2S*** zzHG3+_du~wb(6WXP`ZccZ(1J~Qiu2{C%J#Jqqw3oMdqr6V<2z&BXB;8;IN;(34IUK zDdQQN?#=N5l}0_e7Ef($bvAPCh9)=m0vHicBx$xL)bs*06zp!@$Hv#I=sre%{AJiX z(pcAOKQd~);L%F@JZeg{=)Oz)oaMbEH;9~lCIoy;YG4aogs$Us_JUqqR|9UgbWobn zA^VKD58;6)Knmw5+Q~)?%1)1H()_KCxBsS4Sr3!@en2|`j?fRtRXs2>J@ahbPdDrLjwJZRva{+2 zsJfZki>XvQ*vPGyQ1SE;i}h2=PUicw5ypX&=>)n)%QcFNVsYA|nw}zkqH|OjKW-@3 z{xtdgRQU~LHbe}WkkdG1l#9RqMS$S~D#9euRBlP8B--cnokT_=s!Ec2Z@>+{y5M0g z*ETzZWpqH=gksi(yDKIX52hU|#mUgCxA?pedET12Q(TFy-%}>VMA&k?aZ7?o{ofu< z1(lhV-l$*u8QXbKQer?(j2uqOy`DI7p@RYN?^^M(Tb}#tzb=yYvg~ymk{OF*qP`AO z;M?D2@P&mRZ7cU^^){`1b@Y@%n_2Dzj~pgV9yJ+tJ6hvujLI2TvWK|AXndDjOe&{n zY&)^uFS<{!!|8e6W-1F~>15>Hs1IuXNBVy1pznOc=Cl4NK<58at4QyUrQ(=Di>gfd zXgxKIs7=_oBrI`W${I$5@W;YO<1<+Kg98Gv1mg{S<2|_U9W+)r5^gm6F^B=IsxiGw)#KUxMxfz+jWbenRrL?^=bTn=55?Sa*35z@pgxu!VcmX(W z1vGT@-xjU)9NrM7K-Ii?M9lkolB_5Wx@n~Z^>>#>!6VCGkJ`MD)~I-#>tATjy=Tsa z?71p)8bIAfW`N^N^$q%26fb5=^*>{9vRrt8jlp*^Dj$?gE=cqrarZU?Lz|WAR)dgB z#p$_M0RL3t@0z@Y4crR#hrnsFB!adxh+q|#&EwQ9$FOf7`Bm0Sv~$IZy=A#1a_wR+mUcBq zu+Tjv`%fbtyqG|ZQ`7h#f(;GHAeTU*uhO9-j2wT)6NL65?1TSrO^*`vsc3n%n91!{ zNdoXVG3WmV*0&(-lvJZoXn^?~^xSaV)~y=e^J<;0COWYP*4w+A1du(=Ti5a#5kBZ$ zj=Bg^gN^HeX{BKleveA92CNM%U*oIn>}zy3%Bo9Tx}g=Z+^{<*g^+G)56SE6otLQC zcXHDmudTm_;JWIzqhA96zp2cP0TqG3dRMlAd$aiOaMjkBm7TCf@d^@24EBES40)2IO1S@;gaH?##ou`5z z?kd*jFknlV3I~_K&@3j}I?8HaM7~}qP?4WStqu79AxM4k45Ph#vGdLE|1b)#f-$5) zHqwJ3?tfitKMhUqNdx|jMiMq+F9Os8jG__o;=xKU^IZ-X&@G~hu2+j2h-9PscVxP0=Q-iw`W7UfHMk;%o{!!(i%t{f_1 zo-vbPG<=V(hA1)~A77*Er)DeWS3NW&1hBeyF=Fxjs=t(EHT3Ns&lF15DZ0 z1mTg+`HiA7ErEUpDRtmkUg*d*sve4Gh!huufc-0(U5W1Xy$oy}gS~l=He_HK`XW;w zBNerzr94e>m+f%;d)zaix=zN60xh)Dp^$1rBy%l_pg6mdvshf}Xiv$Ge9ZA5bT#EB z&9ZA49=jVL^GnW_psZIeJTMbWz+9mD5v##uF+@H;l*ldRXU5npGvCnlV#j*{08Y;E zqbTpg1fcB~Fy?4EV6XpsqU9s(PzzJC08c{&HAs=6eG8DKgiy^zpS8mpK% zGh6hl9F0L&y1Aj?->HXrDuT9Dq~PxiK?!McnFOL%**S(}{lePjkPkmi(GhS^Ag!!& z{OLZH8Nvst`}HnhdfU)I^T|B^n}-jlBFszq$Y+ap=D0@p7N~G2Yhlzeda;gND^cJi z;ovYsD98u;4~^&p<-3u+aVT_TKILp81pZzmc!@3RLQO3mcCO8oIiU*{GJ43X=h(Qv zW>BgV(lq3Lqk-t8F}*c#lC!(`ExlT-ePq4u5ks0aSfPV#xE`u0k2|Py zS*c+KhZm;gdOiH+xSQzHKM_c<cRB^VL#(e`9{Q)9@f~|(h`@2)vplU+D_Y}C}Nq0 z^sWRrS(h)^s-tPo87D^hQvb7`q5E+79q-mPHP-oD+>+vzRB2Y()=~{jWPC6^<+g^z zJO2*c0b-IGVF<@L2N!+`DQ<@}~CHM`AXi zTzOb-xajkRc0VJf?Gmuy5{A=3w z^V00(6aDx|+%|o-*){uh(z7)h3)LA0YVMx$N_9%B3K2lFlxqvty?=7M0 zbC9`)ECM}ljTG38g@V@;=0<92H3SAQB_WxKl&x+MVdx=0mujfs`@P=BL-goooE@2A zTGhsWRDSei8pZ%f7>^x#JsE5~ut&X%qJ$@}ZJ(?cA!{L{WIbA!s{PMD%1IE6cPu(2 zGq%*WIyVMpkvt~5D7cYm{K2y*L@d%bw+ufVTxst)q<`mI`u(B<2e%_9Q0Lhgs6ZJs zMmbd>i??E=0LIx1=O0R)D8BU(TU11x-lP_Xb~CKFb;M20T5pp8;C@VWW}-J*%`Dq3 zzZz%AihX~c(x_NrCp*hUv`P;qJ14S4XNCRTuJHGk7k_yPo6>zsE%TpM_WCP756|lCFkyBz6l&!5U(#&S|Pd!B@kbra!f%c zGA^q+=v%sfH-%zj4r@s#KMr-8jk^TuUWD-b&t^>;&SrD$lgqcJb{a&Brck`F&lF(*qb&! z^pz39Z5a+EO7ni&hV5D25C1><^5a(i4cZ8U>}h?c7*-I-h>eMAAm36?)ige<0S}HP z?le%>?>|fg-7iyY9y=3PG78lZr6XQ~C@M5GfY4~=gtm#4z;E)8+-qo0O^mpC-PYvh9 zc{1im;DzbaAPoG(5(^6Z1eR8P-WA~%$E9xeWpM)&Mp!o72#!s#`R?RHgZmH(|MkeT`uA%$7VWcO+uBRa%?EU{!#Oa&*Dj`{@!DC)#9!R^@#}B$*{* zkW6In7Z3h-?z#dL^o<`KHBbCUQ2-AnYn|u)ggh&yjQ|r7BrtKRPo9-mJX?k6N$vew8xIIc*NM)sa&;dMJ<8}C43RV3TYNRIyg*Y;dzXeN zniB{i91O&X?yojj?XVubNY?-s4LfYn&Qd~5`L=DSeCaCqx?7zzH2#lFp5n!N6 zVvVUpKKz9F!ONcl>%mad(A@$d;m`BCKu<6jF>HbS)eM_AcB`Baqw*dK9A9OmESn3! zplGrZq>?ALk6ZW9F!uGWW60w~0@A%7rxB?84fn(;Ht}+YE1mhhKH8A@4MYcRm&%mv z*vtdo&i||MqoMHQYox^RwHP2y?O`b*lKpu}Oevox_8t^U z{xyqi3-H=Q>MMo54yn^Um}YU5LY3jKU*0X?n+Wp1m{%9<1Fav_xF=MOZLQ*Eiq$@x zciBWrr)7vHS7pJr3`3&oen>Dd&P>b@^8xXBH{)oY&C2qJRPCM%_8B}%RG+M14{T^t zMb%|9c?zPb5ldke<^>MQ&qdW9B*R<5oqHO{`}Rf|@qm@mOp^LV^oCLDj(bwc16gFiq{pHUD|yQfx>f4@DZ!cFiiNI@7kU0N_c2M>uy}@%kZ2&SH6ZNtiI3xzhj1v_KzJQKa%C70rGMwhNk>|O z`rcONf9RaaA%>=8c5jGHo1p^tPKQH5xJL_@i=pvyWFZb3g!`N$(4(>L<}~ zowf1W~$SFAc|VZajFk1YjBgE{~@xjg2l#^zq? zpb;QZAX>2udCXDoy7^duQ{ku?OTY$HfK*SkRP+U`{Y5Y0>>lNR|8b;N2-N2Q2F2-# z&-6t*<&G^mYP)oE`$&%T0=gh|zepX1aYc~D#2phrE3|4B17g@;4t4(4*36Ktpt9>n ztqh6wQV2g24O%qD#1+`brhn=hVJ!#b$j-5A76_w#9bwOKpnuA2!xH!9Q?&SAD1(Mf z1EERo&G5*1P~kF(r>zdwu0QUFCNyuBRlWQul z?_X;ed}MB3!|7jy$14blMR?hxpqLQ$p7{3NmJ;q)LKle6v5Ws8i9eO1&I+1hteoGk zD{Ct+B{>gRzOwr@vBoeY4migDRi53=EbM!XFYJj+L30=cHF4YGNUtnu{wLx-Z;HpZ zh~~O5_oW_HslGu`O0&+th4m=&5DRC_Y9ogN)ZbtFa1n!sY6qV^Mj971wOM2(Q;Y0& zB|3iCgJH2c@X-c9W99CPm(V(OJKs6iZMeO!>7yYkdD_dSuTLc3YGDIBk|8rgw4ky+ zTSf#VSzWkdi-lfLj%5%@Y;E3?`maapV46I?lOsb*(JIN2C|1{Ge*bYtZ}_LRrDt8l zS<2;6^z=F`k=8^{7g3mX) zD0zF6m3LjeX1J%L`$l=-2M>CvijK$fqwO{2beNOsKnqR@IlxL`L48ZExZ%s3aPzoL z2A3`;`Wx!aKHz%n>iqA*>HHcx`W*sT*xsN+y@HkL{7k*uE(Tel{SuaoFHJ^J*D0BG zWA*)~v}KaMTJEaPK<{jiueNu9tkGp#V5>!aAolsXo8g1KUsR(NY>ZRbOuBxLM?+N4 zJ86me=tzkAp2v)+Y^*!bJC_l8(kE%=G8i~MYs+*)@cFPv|GKeH>tVVaoUh_0u!NiY z-@&g>pDBguMM*vvD5%i9mL&Ns7Y47|QJOLsk-izl*ETPx^&b(cV-1X|yUy!|9tt~C z-o-^-vMkk^PR^BMzwP$jNWd~Hz0}(sFTORB1q5AzfVJV<=TAJJe9y|ehLek)6;Jos zc~LwX`bXTyPDY`)?3%9}@t=@0NR0fFOud*wf9Cq=5`+pG^SA5oCaPXZIReY+mRwl*1VELzfZxvG1*poNWsU__T>6zqcX8WK(u+QHs;NWvs@>0Q>wLzrG62+I zAMf%VfNd(H;WuacDxqtMCkBoVl>&3lh0+ZB*tSYv4_(~x9V%lXLtT3lA}WgM z4GH;?2D#tOR*@{kxRC$=14{v(UvyXStv4L0h!nXQ#Dn!**^5se^)Zl$38rOBRtR;% z*gmpKGOirhn0l%Q0$HBpD^)CQtTCK!FOZ{pAk1s;@I;$hI4++kbel2o4~tvml0;v< zr&|)>*K=*cN;Z4^s*bxH;KHy&rP|I__VtP6q`(4xu5xqp7q_!W2FvAzUUOMX1;%bk zUVS|VMqH{>GqnMU0*%hpGbfiE6)Ci^g+W~<@=IR9d`QMrXZ}5O%{f1`6aZbjZrw=# zugpS=72V@M%*&p!GBkqjZ9yGug8>We-HaW@yTs9k457MAe2yObnjo4jXSN`cy}1fV zjkom;H~-6G_eybCL|b1u>@lbbdF6jguqKc@A6bgy&aR7yvgd0w`08QUjk+Km+$&`5 zqS|iAodciN*p7aRZt_xq2+sx z3D*XO8h;GUj`}<43TxV74T%nK(gO4KQ{4*@U#W#^d^x;!rqqYlLQeZo?~^BRFQV_@ zoAlxh3id@TtTa2E>5fHZi+|PT*Kz^GoO{7=?YM$-q*C)hxcW}d+D+cK74c4uFDl-V zP0v>9pKbS9X!u&w`&YF4vexsJN^CyNlTqQRThd@PKS)!eb4P6YZ7M>Mor#JN{+O3o zD(??`V-jUPEmBV9$;nWu{Ird%pJ}>1r9hOw*w>kO!Yok?iEexpm?!%O#i!IvYDaUS zl&iE&FP4)F$-_4xXyvAe`yQ$XWs3e<{1dArDh zT+r_Jz9D^{*L{rR+BwN`{>;WsPc^PAM7oYVOExYmKmRz234@)fQ5h%nR-=g9_g;M^ zU!pv?$(!2===x6VB!~Rha$5=Rtv|2p(g*@PF%JpP9=C)ff1Gmkh!Ds4@MC+PYrQQS zES09y4#?C2KLc*<%}!g<8PkVPRnrs^R7Zqjv;Y7Cx&fX+YFG0-SFbh4K8Uq*<_-Da z0|0w>t7L6c#Zfu0sHS?_1>#<#lhD)=n#Kd7$_YSp&Kb}r;;0O4p>^F9HrX~St*jSy zYZJc&nt0yj1_*&|WWG+4Md;Z6U|$j1L7&=73seOkR)MjOiX12s$SMP0JU$WcHb?`h zz;pG03xn_vBTOGoxrYJ?3ZW%oXYz9;-5~qN>!1NgX7TaPO|7X&|eZy zn)q!n=fQ~G8}EFe%b*4fGO!mO@2A-d|5#&Ao2kSeryTt3BgL5%Rg^~|XxD}5Z%00S z>cf9_PccMF8i-fjZabN0M|R)aPC#ZJn%^J^|53rJgM^Y~C9)To!OmP9B?u&>1{@7y zQ~hcahT+r6AnhFhMk%GMP6XCQn{kXgZ*o<;$$@UhjCl6rB7Aw9utltYLfSxAESq}sV#7_eXa}GqGn+RI-l6z3rVX~sJ>5gt>PiqZ@-lCmr zScq$&$;;&dOZ0UWT!~QN!Oq#ff`i)t0fy`pat8m}j7qv=TXV%OicqhbG6}*48<61_ z!o7h201oa!nnp>eL2Q{!2)||LQ4N$`N0&4am;<-dD;!&=c5y4`5~*(a6G#Y!!AYk~ zK&RDP)kUbe&hzO5e_K)g3AqJ^;6{MIbI}Uoi+C;=i5PHkDL#Vt2@(phTZN2BdtJ<} zg(E;EUm#>wiBKi<%Dp#*`AHitWXprX4gnRb!;g8YWmiZZwTV2e3&y_^x&Vvv7Ea&D zw@Qp2kU%ai-(~&^(H(?Jr(+G_$=>KZ8BkenwzYlg^|sLB6Qg;o`03s!ySurOV+tKG zL(3}4*I8|vkxBF+dnA|{pU-Cy=QgULp8pomvjXfG_vM7Cscc%`N@w-6m%B!lV&YL+vY_qrGC&2R`ADBtTmcjg-!f2PFem0%sOms81ryr6!mii zm7M#C$f%sE)1rBhG?V=)U3VL{4c^9WJXvP)X=Jh5XXuCfgQm3RpTx{ZlAAWB7D^qa z&N3(}U*Ay<(KICI3|?P?ha5WMJHel#^kX4alZk>CnUB&ZoM;a2PtZ!5bGgDbs7Nk; zF618xXeS_-*C5;rMyj>cQmyWGn$@r~0Q~O}Fc|!;Ditm+PbmZl9n4k+czK^9Ux$VP zMn-V=isi|#f+Fm3rT#@=UzN)flQ=>_cR|TcEd3TysH%TEZKT17q7+j*o3AqT_$zB5 z@(_|OYp$ji-TLi6iP1faKoZPuo%m4tv0x`jl={U|S|r8eRx)6ITH%4mW1{9}>4iJO zn6##75YLy``Cdr`do~lspT7;8o`H$1^YJeTooV$>#5Dm1UvD9*&+soZWBDh z_%Vv;Xf<;#bkO4`OG=do+%JWQ0Wh5%{p*szM6heVo@yQCwR+8MSzf+pQHwzF(JKiR z)Mx=@tB~TxF;3 zy4W0m9#+n)jl<5sKC=5&ybyC&OacN|N`dl4}Y4f7XfHSfO>oRt% zbBHyK#fnri!xr4~IL&-gO2$Ku;+iA08R;$r{dh9yueL0WZ+o!=E*u{=nuJ7H-vtN9 z9{0U#UTHl%nIdSFMP$erLU6#kErt7)LPi67><6wO^+G<(lYiw?jJktB{lzNTlG zreXocQv@*I*Gd~_JJt+DFwKyI)F}HDyMz&FlU>g7eOy;dVjMR5eEJJO!nsnJiSBMW z3uAfO|H=(=YRxaK+*Zk_ehjy2^J`@w5FoD@+v!4{0jKz+#cF z=JiT^cd9eE=ZDE4IghacEaofLSty-)gPL?8k=vuGa@TqXW}|x+M^E&QSKEMu-s0J@ zRC=y>2Ak5)0YA#j3vmEq(`LL6&tdZ_zU{5+T!QrQbR>Frv0c&hp0lFhBXVonBuPQ+ z_9juMTuQ5@ot`u&crKna&{bRF%r7_s=ORT**T~y*mmu{pN8hBR%zR`H2imGu2K0}b z8@RIH6{m;n!o`D-;h`CEWm6f#Obi`9fi1A#*>gfAy>~vFGfVWpuSSXySLwkB{8$h+ zO%A|-tRX&tCJHPN(yJTUr!y4*i}fWP`1O5(=!B^1g434!KB|oPesGj{_1uH?0WE@lp7b^>FlK%syAFrB$a%;*(1jVI*d9Ipdag zCV(-qNp`Vod5{hiGzlrpRENnzJniD1sW5SQ#+b2^@SK4vy4Frj*jSvR=*j)pWIxu!kBze7 zgOavUvrxysJ@t6SKBwiW=8@*=oYz1f$pz7onul?BrG$eunez7F&FX0+dmjB%C9+b` zkt_RnX7$dz6M)7{6I^z+W(+|$F`eXem5eDFGUNMFs{hW~P*aNi#j|-Q-LT~~dF-D8 zCYuVO;cMWo#)DTFnw@ei>!S5LMh#h?U7S?2dpXlyRFau?U2r5$08jw!87GmYE9z-B zc~FCB0`10rA?oK=wozwewumJ1{1L?oTd%tieS|jcgnroj6Vy{IK|3yM+(RjD-|s`NuA%14rD)Rr_#C)gLDOb&56$3`^V~&Z|sB6 zXR@H~kw1KCq*g`FRa}Q8-B-De5sFRV?E+_O)uinz-3h`dIi+^UR4l5u#(=T%OH_9r z=x@(;`5X~TJTahgv_^=9=LKUI7-F#!e7eIzl9GA!H)!Zt6A8=SscIHPmfr#q#JMa0 zj-ZMcUah~0z_sLijsNe3^lglvf2aY@{#lq(XPu8c6#ock2k-dmq7}n-;aTbx4(-v1 zUjU{oN{+dF3dxjNl`aiMrNH@|I0P-VXrTukFN!ehy=xM@QXZiyKF@tgyRK+e-g^V2 zk_LG}Q);2o9#l_Bv*B)dBIN>u9euPiwGpALXf-G1QuZ_ctEU9;czoXtZ>Uo;h{)T;JUs4Fz>#|W3Q~= zV141)(W7A1gF3Bf#$~=X5$-;})u%33l8(d~ZM$>gQuR4+j&vo5-Odk2K46#{DeZ8@k%8qE2mU)mhLLkr7=1^Y5}I;g5E7*|9rSFArpIM#w?Kd_Y1#I zWt@R3GY~3a)%RTJqsdDFs6+-W%`8PR_wfuUm@jVm@2Y8=W3eEE34;2B$^xoBQY51+ zY8*T~YN$)^8C8nEb$lomzcpiCLD7f&oB{eJpwK6CIzt67hw-?0` z4D}He%Qo=WE+&!y<-a|Bvu8~d)Bc%q$0=j4`!Ha|S6KR&i%2RwiVMgR3xoI9rIS&4 z`-XM7^M?B{llNHJUO#rg)~C1AD=_F5Qz23v;zw;Ty-TUp2BA2H3l#5p+ zf*;FkZ{RDt#BLo=P9ApB*~skgo)t4iO~jS_wyF90-+E})_#SMq`6&)E$d8+q{=i-zq8_i6J{WW+(R2QjjX}?x zwavwH(7ar#EH41veXFajm9FvZBU!qE$=}h1QU29Y3Qe{Arl0r*j~gM~vb>->gzMtu zQ*L4{QC9`C4_>x-@4O^|?-uq7qIc+=7f7~32nIRQ){iarW{M==%Ngws_j_ba@)@@tSh9A0k79{WAd6$^F6S^6BUlE;^vx zs`YoOio3P8>`^wxEqkYt0YTR$o+azfzROGLesW3^-IBl_4bpc%5o5n-S*r1ElWOv$?gYzxEJKYCrq`_$oE1 zLcquZRITEVDWRVKArx$-$)I&P0a6l1EI!AL-PnOew1Ip)53z4V5CA+j5X`R>p2qh-0N8aRvJHqgv7=ScQr)y+90FAq! zhTlsiM=gp9%0o-lc%_GM=h|RH7K0}iMG!csIXl}&s`!SWaoFXqF&T0&j>iGLcmskJIGTIdEO(= z8wgTgG5sq3ZD43lNndO>Kwp;LO7Mr`$07hIfWcsLG8TR0;AnOc7e?+E2WrVKEG_uX ztoZvtE6kkb65SyyU&QEPmG_hw@K( z06toH#oyrTWqau{6vNlZf2&@q?+0xba&*F*d|U^$iaznGtRMgY1nxneWJMJ$zs_se za>YeRKW`>r1_49=j2k|_N6l3RJ+%^%P9hqp9+_uU(ouGHs*L}C=3eOw5|*O-C+_?J zL7M!F?vqj~kZ0_qqcW26M&&e47~JQ+x$aCO(L+klN_7HFT1*L*>Zq!0x8LC(`m=Z^ z7{Vp!RfOmgG|skDPkav0`Om|?{I|>=2lOoNOzl+vlRS0%NnIkbY;4&BC`NCW7enWd zH1FoiS3TfFoy&v`0Uhf0U#Dsul6AjBjin^ z=J}TYt!D#rPVUH%Hw5*))MhluvZsVD_HxlA+t<}IJ!;A1iRy0gW)NiwT!U;KJeN7t zxEk&OW#{km&CTLXJ;0)AZe5A?k5}>^@Lag&R16A1&7U* zuSiUO>1l8jxdT1pZYqev7@(guF#>UB(UvJ_YR+9Hq)?Z%1|N|c^lO_>w(QLYtT&b& z-9*tDoSyHh{pq#6S*%?yBSRc%qZzJ*p?FOis@4dSGv+i-oFUn5a) zW9XC0%o;HUtv-nR&Iz^E$i5uP+IkcT6?o0-ckY<-$WdCO znV@_oLB5{`{jZa^#8kRZVUNix9Qq}}%r$^;J(UIT-s^Pg$nI{`!gf0ZwTuL!=ci6Y z8jzt{{HeLhw zCF3fMi>k0%mjW2hVhSEGlcLbXF28qw9Y_uj(xG1FG9(u~+_l<70>svNTy9)o{ zuUFnbWYyyUvqlym_e%b_ojgEJm+JUbbhQNyq_mGKM7m>-%(2tG;{T@{G%~rcmzCa$ zK+{yguF5fZ!m{JWE!rtHvl^YNRyg&5g^{FxQXYA_V;KoJIZYqq3V+T!k9`&{CgYwPWLJNJbYzh`~i zWzo7RM{)+OtfEh*Oqz9vsc!wIHHPM5S^{?6laqo9cXm=pXhWdr$f zvd=mi%nMsN8y?iVQ4Q?-hYrrDJ&7p;5@~~W93_Fn#y>6E@1;wyyvn{anv$To(tZzJ z{p#CofN_XPx^19H%vsNL0E!#eVTKs)SH%ck6gslJx&NX6m?pZ6U1KZdTfcv;1Rmc6H+I6kKRtS1f%jl=k^DuhB9< zqew|OXEWIv)zt@bP_fshwT7>jCXA>*lZYua~IA^ne4vKNP1GQ!b5q0wVCUeuET&CO>Mo0>;+`1X1a35M3^`dc$c ztp94km71N7*oQNZjoXicx%ecmO#5kguMU_ylxDH3GkMpNA%i1w&@s;<)q1GLSo*7# z4h~-;Z#yr?6esW656(#|?*nTr-`(X6(4Q$??m`ZFff=M=p7|I`w0F^PG}QfIaUo@^ z+~a*;4=EsUFxA0-DIA0l`Y<;8$6$wfmOdADW z9H4GYAA}_%+q{3CklhOT)iy8`r=^#LQa~$T23*%ZRAd>s&BeQwSRdUvEWVB=;+N4} zXi{O6s(@v7N^lZ6>3A>{`G`4i??2e&w9N)w6qH;tzw%*{2*l`$3cVp193_*(B z7mZ3$7F6f7C|3e5wJ3uOw!wvhuu7xMI3Eonkuc3Ph!{031MmF)nvb}+L92v6xO@0n z4^bERh&qgAUJ_p$Q0rS>s7A}Eq~dp7lpGmv47XW4`40VTA~IFhm*Wh|8d+x*y1jO0 zh8iHUXHBHR_&SjA%3vA~s2W!sDkZTMY~7XTSnYZb9WA-_` zpO5Cp)4K$|L~e4gyUlJLPZ^k1sC@%|k4p{mM^HVU;9C}#+!6Bc&((&=o^HkBc<=Vy zn3D5HDIt|CMFJ52%(U z)T}e*@~ESwR(nTNftAll>FAgSMvCtjB_4zEjVjUfDtZsrX4g4g>G6Pbxqv$d?iexGz6h0oQLA;5#TaRMZ*kOqH z9-K{+awJ(hy7N^D4E3p^U*upkQyQO%*elR}|7{>gfU{ibPX^{z$~D^@_%xFOxP>3u zTPg>lPt@((UfxUd*+YKDi-i8&Chs=EFw-4cVyNkPU`m>p9LUGO z3*aOVbJ6^4IF)pj=`pc}2R#->(f(H>c9+q?E74ha+V&b z#lxmR7b!?KyT2tO2+RP%(*B4OuF03k!lGB~kBloHf02)Nca z3ul^r>r!mdQ5pQpqw&G9B`fgFr`#tn^7Zz_4n#i_T5WOeXw_`Qef{T}V<21$y5mp9Q9vqht3tf34+i#3?A%oz%NP=FfhqiR zB&sEWMAIx%+1f%Ko*dpIY>jcjEJg$EHV}|5H{T6&ak)1|OyuLS{w-msH9;&z2K^E6 z8nprOsCQRAx9g_ZdK{B@3iun{F}H0>r4y0DMmIF?Yt(u#R4?jET|7yXhFga49!0r1 zCwfY-Y6kVjaZ_D#a>hHP?RyCYB!SH!WgD1|2D#V-uHmB8&hc#%PANu)RaiH-1%p3Y z9{IORkj36}@*5Jj1{XmZaxiN=icT4nvXE1V!+P%cL2PkzAS2k+nt{BO-Z(EZq4AwC z?+267>7fNiw$FrMonGAk?u{AtZvpgdv>;X`Rn|00wsw%(qz4Pq@HY5tbq>WI%t(KN zF;@QBjB#>)b8u(QRzekQQbf^a8t<`cD=x>mz6OArDz^ma{!zyu4~B%lkmm2Kb5&F) zq*oBozKTTkj{IfX{qA)&;BLdF( zSvg8i471=z3kxwn2ADj<>;nwmOBj;8v<;X3kel=K2b>Z=O1M3b$;htqU>N6Tcz#;0 z+O+=U-_!zPLPT?=SJ~X?vQEyvVqD5XiALd>Vw}Cve+7lCFVJXwVjDLLWvI&e$yh=+ z$hU_TV!{+_^jhNacjrG&`*113+vXS53`zz(=0knuDVdZ8BX`A56N#keP5^^zlI z@PJ_ycA`EbK(kVdbvNuM*VLT|b6G*mdyWChsr-dTt!uS@0M(cj^-jM*bx49(#gC?p zRx?<;Lb|E{99r|yib@J&&R?Qxjt_U4YDlSApZ#FGg}$<0_4E3vJu^MF$~OvwGhQ(a zm$0B@d|xK6s$+%_21jhE3ZW_@_rJy~HxZ7;V9>i!yl~MY5KVS zx*jYbBQ)&_d<0!Y!ss{3o3}n7%OobQeE-iZF2;+=D3H3}6W}W?2vY`qLL= zi5bv7T3F`!pMgBp92&X=g{3l$He?qkcP^V8;&S1EgTmW~POBI1K%MhwSM`MZLNyn? zEZ4yV)QQ*d-)9){bkoJ!sLnJ+x3}1!`Q3h9NnCS2a7YGp40sw$ieM3bsCasvG0$h3 z=wqGZDNQ2wlG*?IUo=JB!pOG9NB;0pV!CG2$W^T3255*?Quf7K8(GPy&_SsLxm%`{ zTtPzK)}lSetXnR@Ak!!uV}@!!qdyBt}IBym`5@IFg+Pn6x~eo{;nVzxgn@> zmNp2>jkipk5Aalg=w}W#o>QWn3dP!@FD`UZ6+1);|lbC45kOnw}qU=`H+#cc#N= zFgyh5Tord2B(Ly5XaHJl1G z9?*y`%bv9eR-u3`K3_8cEq?DOp9OzrmU#~64!K>Kw?NeRwoOra)P-xZredtQj^#y{ zNy0mkRL!R%{ykGyi*yx!;o^Q62j9{b!6!E4*h!@iNCwG^THlPDh$$R}?pqhrz{{zd z@KzDc)K&CRIxoW73rek0T^tG(1@Po=+M~Kso&surhH|Y9zpb}8VAmlhUK%T#$3aFe z62XNP&+TKIJ3;~PH33t(GzQ`8xZz%`9Et8s&bDPnr;PabZ&3fAcKCXCr_hoFhOBJ{ z;s9AAWa`x_1Tz_fyXqR;@euLTYAtrQshzthlf6j5C79CY;KFP_kArzK(wYynggzqZ zAoQ^iSpDofxY5)T^9&xvqHN9IfrqM%5ykt1af*BZw^1yzV?75O&a8wMI6&8xy(?OHgp!qHvCxT9vshNuGz_Q#Z^z%6gC2twtI zE(3EfQ>{)c2QN|~nH|9l% z?J@Hym1&_kR2N1rKAb1^Tnw0vAWDp?~ELv^-#=7({y_ocM|cXkE6;lw%n z>4L!G2&}XY$pIH41K+f2h2$yHg{QEPX;As_W6{f^P>iDw9 zs38uFcK)>3zRqueAg6`IV|~SpCYkY>YB1MyGjq3M65+BjXaP{|6>Jm_+_6aN9sA3i z_Iqw?P=9*oxvZtL1uaUiv}ad^S{R*gJGgH_KP6GI@8ZVh2k7=J3$B~ASgo&$Dr#=5 z+FvwdGT7oG=v~b-m;`}FxZE63VLU#K=TyqGaqLmQ9Ku8CHS{UsoY9wnZ+PzAVmBJ( zXFIdK&dkXeB<#J)i&3>@s>|miojI|Y$HNpftoe1bq}NS=@rou2%BpXmcwy0gX2?&i z2YP*um^G1~b7@s=pXhO{eLx;X(?hquSv`h9aK-NNK%F<52%(dl;x?*C2Y~*EzRl-j zc;EGHxTx@ZLYz!Hu-QPLowye4DTfT|e$bgwCzDQTHQ{rm`4}h4t{DMpgY}8_Jpm!t z?W2m?nDZ)E9PD1u4D=ew7m!@$$=&^fAja|oH;|n((aZB`NNketP+~ZkYriqd4O^ai znO|uf!}@R)rKP&yd@hM2^IQ^zoxkIW1Pr63*vT3SW}NPA#qO)3uv@)shQ-kqa3MI&>*b2GcY2)t z{pp194PnphfB=KPXD+2guRv|GXAh!lTHs>9;9TPd zgTrT~8)VgHk#>HAlL3~k_M8dD#0c3tPrM)-qw8t|DpRg00T(~qB(zp^1Y#y%5UnOK zNRy%fHmlc*HC@dyr^oM9!<^%?X&Hp9=~y?a_0?qkk-WEA1riSr(nGfPAg(}#`5f^4 z8EQLaV})GDoQ`tZn{xN6Y}=Z-*|fJmUV*Z?KjM^gTo9A@(J@ZSijzpL$^IN4&E^km zQP)Zl-!23YYFfs(r2)N}_h4O?k0v2qXumC+1yYJzDJ*6Qa3o$Y+IMiyXI%{-)G+Sh z;wHBBc@3;d$i1Y9Z?yqi3tqZK75|J6&+(?bkh17?-tj)+-j6LTaN9U6fufgstNBDA z{S9&e!!Upl2l{BXXvK9YjE7Cg$$Afdw2b8zioLwWot{1d(%>ub&;&>eT&9p6)uH>; z3%{TgR{eGJN4ba=S^XpQG z$eXHzVa?3TV!V(N&z8M}rdoOCQU^UH;N@CGVQ{79^gl}730qyS?GcY!vwEi%ZJvws zmRxJ!=r|3*IBL;Vq|fY_1-C9qDH$WQny*0yap6tEMG*zXf@yPXliwbp3FFk?c^hM8 zy&LFiYY{xDe4zpsWU6zx!|t0gJAnWI1@J+hs6`bm|E$P+$d-K;=F~d#X$3OhWQXu1 zJWkdG0InwW9ZYJW`NF=;ujTlAblr3?5omN48>8pnX?`1kV^{RyHkg-NtU5z*H3A~dRFL;>}_9KOTnTH zy@xZc<@gu>e>zbr<^so`N~f|N`q)x(1g6KjF$b7bCs5wm&Qd@^j=3}2OjPED!WXu@ z>&j=SMTiY-%?{&kk6wcN@-~|34@_@Soop<5&PKM`QgWe8Q9(iR+n?HoFg+$Hu4NIM}X=Qmzh2aPP|tdE8IvH+q6dIHO7|g z0?XG&ZbRvPqm14@Rg(DOJ2s6dWI=N^xe9LmDzUZRmr~)8%IydjmB4 zX%Jo=_MB2(Wd^f?85L5q^Ae%?bKmp3n0+PCNqTD%RHbgk^n)shcR^Q)rH#jqdIBu3TN8X=;+e1eaVL=J5_+n$X{eMhTXQyq1T=p) z4Woul{0Rvw`U3$2Ctk2v$mUXUkgcf*v#Wm@#e8#T#YMh4X^Y+++O8MzS=_Q_|PcV3klPRs86P<{nSFdp05AkY$UvVvUY0x-dhhPZm!Q z!Ltr*g<`NYA{NF1f=tqwimsc$eyr~2+gvzR6{dRxj0RyO?Aa6NBW&6kEuJ~IPw(&aX;p~BFxi1s z9dhuJh+seRde|-KuilFdZy*ENhbu1B+Xh`;@wIEhjXJC0Teimd16`2{U-JI$!q|`L%bYB$g9PBj1QR{HX#Jq*u*a92SyL}hZNsp>; z|F;k0QC$7@P_i`EpbU$&{6Uo$x6huak6w-ph0dFBB7v)S^m#vOYc~j+`Yv|A!{wBl z4>~Y@aT=)Xw-n^e5604Yk6; zERtdPzj>&UqESwG0H}EAtN@wXpJvuDyu@2T6j2%Pz9K}2IEY~HsmkryQdZ`)7zmvC zs%T`cZNz8%Jf^_*Kxl$q5V9vyW!LWWZdch0ivkh+D(*`u&7k$i#zMT@zpDK|o=GQ9 zT6zjF-d7choCmNPdj(v|KUm`=WiaO(>*9pWU#rpbv47d z-x-Bu;JzugRL&gfpf$#c2gCykNqFErY2t~!-EKpe=idMu@0GtA7YVilm4KE-p+j@A z**n;eYQjQcYxSR_Jb6OrWcBG_whf!-03%epK{Yx^!(FNUC}rTxVs-a&yZui1-T^YN z79As}09jsgJz=ul2&gqknIi*6dcs!(b^aOLT#HBmv33H~6h>eDtxanrq`^Hju#oR} zjv+f(ii$|c%Q_4s2npllT2?kZM+AXIDLhiXl-!h+B;|7KU1|UT1HS>D$8=Y;Vzf=7 zh7fa5e?WFHgL`Xiwn84j^h+-`7iHORPLjuaerqGN z#Rc}KcG_Mc^e0P5O~G)uoYXCH2+3;LTF_Xx4D;AO$DQ+(d~iBI_o#PK%O^5k0Hzgl zuP@;IiZvpmE50ns$SmO?j1)k0#Hzqg|8Yc)+_>L7d!rh}Yymcsi>`nbmD>%n#8-99 zz2S~jWKva{A{Uo00RwifB%a2OjpfhIug-~<42@4pt!#)SyF+@D8v0c;FZ?1t*eQW6 zM!Lg~RUz)`=rZj2S$hjvMma>@ zgMk)7ZH(@Db9zVY_^gg^{-(0Kc%v!Ck_5R$g`&y{90P0E9ubC677dQx@#5bofFO#H z5f>&LopZ8Yn>7h=p{G+Hc0%C{9x4#Pw-Q8~$wF4Jl1NPGZ1TLWtGH8*>&4pPW5y-T z7xAFusb#R+9*p#4cI@E<*#u>CPP`5>W~hdps=xqQU|Mv$dC@<~K3YR>wbDVdb}a^) zw>WC61IsnO%v9aR}4#FL7xSY3<#PQxo}ly z-z*U|sO7yZhGbx>mKL!KWwXI^SGDRl`FToia@1ZpyAgyIkYZR$C=V>wJGmwx}LoHZbd^(Wn6X}zL^ z#j{>jeDA`GA!@VVB+sUBlJ=7{)NQ^d=Sw1_XKIA4=Hr;{h6xXyS0id12mA)7q3)RU z2-7n+*Ou1aYf~dpyt#+EQUxqEd_E;wCtA4xg6kWraAPQe_*k()%ngjI%`HIN0+Af) zG=1bztfJ@kZ|0dZM;gO{tS8^Ju-KNw1%-%C)KRyOH*i@rbJyk#s)xw8mLn0e!P}Ck z-8;vefrt%K9!o{Prw`);d*4XBvmz!+&-o`~XVdJ1P>kyvu9AN;o5HQ-b&fG~1Va&* zPQCw@NXOzKUps?ph~vml@;WJ%4|hKL6MF0Rr!My;>0YFUl%5v+Dy*!Qc$y}W1IC1- z`-r)rEvA;G#@PPa9rrn@WGR?AN6Y!my7zW2AZlrBt;*Y>VVuZ27`&ZZE#Vyu6sepT zskGff$Q*kPvfrkno+fsri859?+z4nP)VAhGF@V8!96|-IUi#e6cR~NW`78SJaCSPJ z)@Ksc#uB;|k#v&z+iur^+|I#spQ-=w$WQpl*eIvF1};^_s9v1zWNm}b&)a1&vMEE1 zyIFER`8~BYrM6OJcNF*ddb}+`evuk(J-7=m9_dmddIh)C@l#TcjgPS1+h3VSF*lA-F7fCcr!neGBA10m3_Th|Q4Q zhJo?mi2fJ(W>7QOOQ|J1{7EzL?-R+|_YMg#p8)OJ^0_N|pkuSO%7+YxTjfHVrk-Njh#l z)fwu5S(2$hH4c8dSem4s`*{RZ8yy$Glz-j9^JZariu%vlpF=sp;GH$*3)1J1KS!(ZNZ(4Ed+Jq~YKqDRhMMcdX@RbCGd*rmx$92YjjN6W*w-V3` z%~6!RGkxU&#*iHM>VaU|wL)M6oxBDR{rZVuDOFB||J(H;C=VPMp=22WE!qg2+L>WM+8vMF8_6~jhpC^JXB}*Y#*QO<()Qa=)oCyF#x})`G9uR&z zG|T&JrhXp_vfcVwL2pj~f|*;xXV0{0gqK$V)&RFty4%+S;;O`TVQoU?t`RL5at+{2 z{30=VQv{Saqz7#gb?lM0L93hC6g(=03n08p#4LT9;78OJ;7C~ed*G&=4A^k0Q?!g; zuDU_OrFLTUra!Xmr_SE9j=wPd$roOz2DRbyhk3c06 z+@Q0>2+<&w2_D%fqArD^7J(Ra4~pw^0NGX3@8S5=lBW4dOnG|Bxegwv$kzHNT` zC!pA@N?dRr^?7pFJ{>%4H+5#J*o3}>dtv~iLcyh{s&=vEc?>oD@m`B3lF+TULBvmQ zwk^e>QnOoQboE^W(wwNEEZ=!DZj}bVghdz?LVqo66WZRyN`;46L0)*H<@e}G zm)O<&9bHUSR#od2@7WADG@YL$^GWSwO29da#MU2*7NqbE!~AUqMxm znyi*DZf<1^COAfl)E-?hF?mI>VTl5y(QVg?PYT_R3HOJ{Q@xv=+a<5?3wuGVa1%UMR zm2-^(GN8JHdWIG0k(GHde+65V;Et_8sx^+qJ9zz=>&?3r!OX3vsg6}5iKe@|OZl2h zbLRAD`w#S%z)sIS>4O<1zg8F!YK)`<9HiH}_kxY1A4*YT2J*kS?-2s-<>faWPnbjp zp1yZa?C?0N{1SsdaW-X3Bd0uW(k{bI$#$mW;H$qRrVIv)62Fpe_WmAx=Ns_{QV~18 z=3GMyc)b_&OJ%8q`uLZS21EHD*Rp(&Z_5Lt$lX`u??KVx(|4I)lxB7+j&&85S|)f* z+^^a6(=kXUNUJjD;IT!^cAMjc?-$r?c&W1%!~@y3EG3lYerRX`i$?hl(99Ack)9(9 z#DxF$a~!DtV8NQZDs;@6U83HcKhE;o2k>@8D9%38e<(qrF!Mlt>+?)DHfI63%bD2X zBIwQN`Xe(A8k55IJ3$+Z*{6KOZn)9A>h$VIqZ8k7g za5pL`O~68eU($z?gzFl$t z5P+_rCsKBF#GPFy)qD1cZ+}lDM`k?Bk`t=PTsE8>$@h?u$#&1&J|vYpSI7~PJE7yc zSj+8pMOa0$UA7x7;O0IDRl*(;i+0h_);s$^rY`e-}@tv}SXFde)9p$ck}dXBkw z^4KKBG);Gp(Q})IEK81BjD*6|np-b} z#jOeJUQ&+Hbd7twrAjs(F1i;sFz5sj7eZctq3w@M+B^hi?Tms!wMO(u8k+%%0S%>CY65zomK8XZ%Ci z4wTt9d1ag$7aU~cFhw8-x)Vhj_Xo?E_4@iV7d&N#A<{uJK-9jN9_+9&@A;L=#1#NmNnE9fpb7>=N z6zE9$;lni&L)PXGH?Grt7{P>}el?s*Ez|1s)#;d?uU#CECaT8~Gnz2S!} zhE?p4tfjt10vv6=_46cl(7b%5g_juE=-i`->#UDXY(!K*VtFlM3Nqy>tUQDE%zhA& zBhoNOe9rUw<<$29Xf414If+sHw4odAQ-PxS`Zvsf99BQZWsGqE9GLJnf=U1@{g&_O z$qKTiSRZ;JRxTv-V6Z)oq2*N;2&KOK<~s^WDM9Gh;+*pm$#6;hFdjq28Tfs2ZE8PE z>p@8Q2Se^kU3)7IGl43kd9^u7c)Kl^55@n~e0TSf8813ZyS&EO2JzppzC4maYwzW*J4Ost0ia4$=hKFt51Gr`efSompTjlIuGA75p(Bu9cbAMTDbQ7}3#-l7qKp}~j4 zR+tRRla33Zz_0T-xnSi;P?%nz>PI0t!{$D|@BY9X^Lhfx#ZTvjr)c{CgSD$-3McEd zEukW!Pp{4zv^M3*v0IhZ7DPLtCOUBsSM?Yi^fUz4dcw1hJHj*$i77$GI~8xDq||?h zaZ>NY1Wgd#|Kt6q4wAgA{M*v(u|-V6VSb_p{U8fs-c@$%NEmls6s*Fe8`B>Z_KMmN zbMCWdwuIrEb6pMV|7}eXz0#zYVQTcNo;a5!;T`&|`17yzcAurCPSpW0jtSl>ImUT? z8mLbIDaKj!PvU8S$d$JjvSuJN@s3Brsfan-@Vt5q(9Alm3XwATz~~j{e^wKvx`e|{ z{RJ#+M(ACrNGVsr?cGSNvmnUYo83uSBc`}p(#>DNcLevv^00PN>im9BonRncpD8m3 z{F?c=Rgv`s@lZh+#(j~kwYfb@tQw|w0<0?J4;K$n2*VsvE?_1qNFYlzViGvCn=>L! zL=AE9e0vK3;Q6G>g=(E15?nYi!QAB2o!P^%*%=-K(g!=NtL7Jy)t@?J+$vmthnnCS zScSE~1Be+3b=fu*(n&+ zzk<67*q#^EN1^f{s=uBVqj#Ph9!AF(h;C}xKCYg@A5Bwat4C%4F&Sbo?j_y&>A>g zurN?MqGNBgA(!pmPv9QXH&jC?Me2n<$Q4p{%Q$xF|8PVYC*$idp;8 zS(Qy6X#cZGV=%BHnwM%F3}z_fCS+e;#!f_v+3xKCL_81}^pv8{J5Y|DStm5zge8YJ z)A~XXQ&F1*^FL+sQeQq{fiMt0td>MC=H?_88^7~>hAnGoiI6O#tSCGR&DR-YFr*Ih z^u`FfSM28qhNFN(G>9gbed)axKr|t! zzL-w4j{S#0xnH0tfr*FQ>t9+Y7Xn+K-t`Q)DII`ZA5#xFY$C?~&YZ%ABSKp+{;q~M zZ7>NRVF8(>q;w&2tE?co#8t`vyKg=vbx+wYn2uzae(mMslc!05M!qmL9^Qe7$L|uX z;v5MB@L^jVF_R)Ym%%{6<^s8OSKl`P{$2Fl?{(cLrFF*8em%;ETVEpsRRi%ar=cbx zYvtsG6s%8sHya@o5|OCzwYen_=yxf_tmC$aHL`9$8Bpsz`r^=d~pneIa{LUxSpcivl`1Wv8!FE{_Fko|jJZOpRuyTHdrQ$mOlf5?9fg|2X zjP?n~_=VCx=*xdrDz)p^o^_rwh9tStJO0m4Y^rw$p0;a)TkmctLc)(5P1@jXODwXr z`z@?3eZnob=N`NvRgx4IsztoY=g4tcw`NMv5qc&k9IlXebM^`Df)bY(N!`*F5sp9r z00lrnp6o>xE&s~*`2(6QWRMfJh`EYRGSajjkqSQu{AswP`7DRiX6UrpjePbP#WNOF#YT2U=sKTJ_&|j1hcf#|D z9brg0%ehvCy@*eKGWLfAW6;rLu>AI?PQu;62I#&um@WPJ)u@3x2>q~ym)o1Aisc4o z&`}3&mCvbDu%$~x&&GE?>^|sCh#Pu(Rvt({pTjb8pIeL23`dqc6UrW&)XTGdRCQa! zb!Xq5O*RP3n6mP8@|>%{A&r>QxR#BVUWj;kKYQdU`i)9A5pknQ`W7%S0)2yM6+1d} zrLz~#M7;J?yhi919$g?EG@9C8lG+uUJE~eK7w?ZP)v1w6GCu+v3^)O?+fn|9)kcvS__9F5o1~Wb?@I~Q)+#a37sJeCD&>!+> znHU&cLIZHTBABBo=D{T9JmN1J=uAx~zmsob9_I21fMvbaku#z*+tujJUqZd@7u*jv z%l=Kj*P$$D)efCsoxImJA5H#T15LV#9@s#6>^VW@gZWg4&2L6AR=OASsKMWNh|%Gr zp4XAv-3(*E5R@ z3(ER5{ARyKJnk07G!%OP(n0H7t?6ztWg!(vA7NJXh6QwlHU!M60UzZiOajVVT23)tyB#?F zyG50$%0$e*rFAnM60=(*oZgw3o=+srTeZQ2?nXAWNobhVsvH24c!-5RYY^3=;2uU% z_#xF4$L21X*sdOKuhPz`(G}xBt>9m>H-*2(xuUbl_L8vk}^FF;eJyj^cg>&pUts00QCx zp9^$XnK31J{!jPGLMQM|w{u{X#V+jxCK+(vTJQKdCexgex5VivfQ%1R8A|jV=>fs& zV6jNkL`+7aZl=*M4&hv_MX}WHfxxvr*9we_GdqsSEd6BkJoTP2bS7D6_9>^{)^|AI z&x@8W4dnmah57dpbUxmHJN{Dojm-Mw}>-_zSK?3QZuK807*}=ki&l3{@ zg>&mY{R*WxAJq{49F0mJ4FQZ14{tA#2mn8z!iHpAfBFVp>0cYLxJ1m!)#JwxSLL55 zz_$GLLB!m)$H`_WPoUzFPz4gB{5(6-3w-ctOP5LaN&SgYIH96rS&fT^Ffb%UWe_=B zFK)!^ws4Yepi_BEp~Rbv*cB~ZL&)j{JV5zE0kTmavOD&s3?WX>h29MRj79!!D>IGy zMF`M`!sR4x^|U ztL!bGc`g43G#@Y1H;{TbjB7;Ev;wMBOubKhrl0sxwBTUAOt#qIyPi-w8YLnOi{#9J z%qn@suJo|cmY`t_bf*0@*=>|eEMlW98Dva|^;<8Pg=y>j^hVV1RYP~VCyTLmIxD1_ z%SepVFO=ov{u`vUhWaxy+YBmzN%$T1^XC_m7%yIm?BZ2+%$&&v8py**`hSt^f>j@C zaU@|hws48L3$Pe%Oc7{J+Pe4~r@g4k3>f12_(!(lJZkzCc~p6CuPZg0_jbV!ftBTY z8&#&?`_$hy{!^qWtMkc5H4QhY%_C(BO&~(FX9+gT3n?jTk`!sCBR_Rv_82=yq zUI_+O6hC)WP&D-oyDh4Q905oSs9DmH92v!nb%u!)2yA1To2jwq`aGL@w~t(u^Y{W! zEF(ptLoW*)yJ4iQ60b;V$Bz$&!c?S8@!Akoo6M^Z(!KeRmg*M}uF1N^59ZpD-gMvl zj!i`2rie?#J>U^~E%ufTO<}Z)#-a*gVb{Qa`xnrQrvUsE7?q0Nkq zI!RXis!T*FwaE1d4UTj!I~XA5M?i7zA_Z*naAyiRZ7f1-NUOZ{1diXcmPctb?IK1# zU`iY_x!AHR^hZi}?9Cdp2%T?>^PY_pq7|b5N*ZNgFegQ6Gdc7x4Y&sL;-FLZ!Rj4&ov~IMsO9DV)VhvrB@Az|U($ z6_k|eYCQWn4R2wTAj$DtohK_lJPnsyi{x~$RP$?a<<0B}*gDq{>Pboo$j7+PY@sr9 z*>Pf@XHmIhHMVyOXSEUjvZ`cYoP!P#bkX{g4Ezt5QqRaoaqUOyiCHC=3_Pbt?mOVJ zM>K6(c5l^sz!|wscVL$cy`Svk1M{@)%6FmLIx0l$JjLjyG)0DYu>PLQx&_f4b}4NjatCw+vX98Hq#rAjc5w}$ z$z2b)TsL;3QWqrUKH_Rx?@W?l9#*X`lrBsDn6F6PeCMoqeapYCqyCm7DzM~CK>z>} z-9eiYNvJ_=nM??uZo-U527iq{N~T)1OHMo$pL!-iASHL6gZ&Lvy`wSOJ~Quf{yon!Q*e~ zMnJGR{jw%%Dw_pJoVI<<0pyw>lI8@<6ErSPs-qx27*gmM4EI1=obF|@lNG2^U3!}g zC5aVBoO)aTZXR?mC5`^omv5Cp25quAxc;rV^?k#=fa_3&P|2g!E2|TB5oczmVio^6a+K2mxDz z*9bh67_yy9le7o=-#-pd-$AKqY!B>1EW9P^%;`q)#ma; zP|lm#Nv#%$g!i-rIxZS!e)DaIaVoO1U7p~U@^`Mj9n$rt^FUT)Q@<*gU3(QEa{6!v1<_!6yiGhlKSihL%2 zm3@~q7^w@S8rCAX&*sz~lB8E4R04aQKG#;!;K${c+~j}6Iwr%fFF`e_+Y&t}3ltxM zum7N$WZ9L*`g*LrLyQD9bgeh!im1vU>s^qM?7*RMDwu>dkj6PXmZ6}x94VLSvNAGLx+5f*e;$6L8gUTuzl zIL#BO`0D*ikdBR}Q|OV8BJncOh-8o&QB2F+a)Q1kTGA}CiW z$`>n|N|8#J5Ca=iWYvRPl*#I!@cZdm zYxfp2)A1k2wq^rF-<=4dHZ-)GbC-8fRNjE5lj4fNVbNcrK!V)cuMr6fA=sPmrWAE0 zY_}kZh;nt3^n9Cc`#=jt&Gon$im-Px!_Lj`4W`5M;D$$E^#-6X#df$QPXjmD^=^Yu zD(k2rdyJyE6CoUD)j71#%x|kzcP#Yl{u{Y(u~HLaaKNu#>E)vX6G#{H@J(r!+je1jW zL?W3vbH9D``KXsJXoJd_E-u# zDRG4j1h7AciS{6duJEZ^8E@Xew?KtCGreeOVBGT~hFOnak|Vn9pD?u|EV*Il{7Nlx zWmd={_$7GEsRHIa!?LTFF`jhW1n+JOm%_%D&YUEgULXxw-%Vx^hQw#CzBg2xPV{zL zxo5!GLJ9U}7d^wR4i?fzy!9BcN#GjCj#=&qTMJ zy>}!@A$bekuHgj4%h`3u)y}sqjFH;9N;|^75cOHv9N(=Baf9}BtH+2>u;o*zG3d@t z79PO1SKOj5O97Y6!l;;x{}a5|+{P~VVe|@3=wMLx00tKQ>A^DuH(ll*GH~L z0%n`#HFtXmPN;6y*tS{AYupJ+pD|f_Jwp>K-xf-QqQBeZf^i9#9z5f@Jsk=KFyoqZ zQ7Mcw+mz6}o=S9d-qK;*l_5-HJ|VF^LX1hy!5#?`Y*iap(G|T=&`U(*QX1G$hoU#~ z0+owrFUt=yodvse1fDTf%pNPhB&ygCTp6dEDvVUGGg9pUs*$Ms>CZ&cwvGW-CBVPo z9h-}M%O!{gHq#H?#~(yp{*0^&*dt63(& z$Rc_BtgjyjyfNkLjm1$cf9OEQ^D$NR0z!s_f9fdNEUv0t5AZM=H1?1mv=b%lzoz=R z3QiQLf|%!X3=wTTC(?)aFO`ozsRu=c>}s-cvZbTmH7zIb+69aVk> zH4_?>?^ zCPMW^+AFsuN*f}wDpSm2(-jO5(zL~R5sRz~c-4;v4|E#NT?QPPlznPUB6Pn((N0o)lEh`)>L5IE;Lm$xqWW0{eBs-jSz^o!l>tO1#<}~ zCqAc7&BUT2_&bnG<*&XDk_#mDD3JfATIIZuxt&&Z&6a7A?(*_WH~6Pu$ut?MvUI9N zb_w%r?yvGqZ3+2qn;$1@r(iiJseC#b9SGuH^7liA$JWlo4fw^&&7CKNCGq?g02ns5 zAJmLP1NqgVdTy6%eG-QZBf`BSFJOv*XcSDIiDNHk4c-hNgp8*m~q z@0dGJ+OKI^HnS>Rrz4qaJP=C!6SJvgv^c(GqB1nUk)C5NT z>uYcr6(@VhGCikI$M0ckykBI&PoX;(-B$UXtvV$)st$Sn@#1#X>7A&3$hp zvy_4K_^CJ6%w6+1q9JeS`mllBszl^}#uB1_wb@<&wzcI3%GIY;M@Bx7!$*ZH@{^s1 zaGo$75nCBW|C2VO+-wEDdO|prOp(KZ{P*aQ(XPEk<$<l}oXPZdKA`{+$@%EG@)t)ud+|sa{T&#D6S?zhojUc%L|(dHy0Hk3 zsr#eKlq98n_ibf0`HhJbA#9??{)xel)C`Yc4z8TUDkTJHu6jeMTCn4Xp%PFl=q;C7TA`8LI|vGjUzGm9+9uqTU9~raJPZB&xw6v(sL7#KuMc+e?zs z3%dwVIxLw&0u`C1>cMBT9t~oo>P>3MjJ2!qK-Gk?zkNNKiu$xBy9Q zL@zKldKGp;OpeZ8vQ#dCS7;dz=oTkNB(zt`a4I{^DzcGE%fT~G5p4&_aQ=Ckp%*6< z)6kSUbfRQ&1NbY|lB>=30p_TaK65j@xuP`F$AOeH?vt^)HQwfwPE!^c_~1d;N77rs z95LobLkQte1f@;T3={0qZQ_IQyLV1(GJ_I4RHd$Dp7^9z7eJQ)7NC3ldpOkk?OWKK z=lWHfqr@zpvI*7@RH@=_xG+FQI)Utac`H zb!)ntB(bUCQm-}yErh|84}KDX(QHx7w70$@eMVKRw$UBWO@@z%Dl2KuhX1}iezOT( zox*cS2*m3FT?ZL)s(>N21DY+*pukh9z!ZLWO(#V+sOy7sU-V7vv0G3EmrH*nk8Y4? z_l3pz6zsHRM8U9>cU+~5-o|w-*C8BOrcJ%cw5(d&t!tRs9ZQY39Z(Cacz77Rj}zHt z)(}!D`O$tLHIh<~vedR%zBH_X;zH%x^pb@K5t=}pzPd(>os!UP0t2~$thdHt80Do( zxei!reLSjV)DA<`VT#mKInXAq!ig$+TM)d+9z?nA3fwSU|6C<=E$7I;M4lQ2wAmP9&&EqO5P2-M`Ypc-S8Ida5 zJ8_B!*y)xWYf2ls0YZU--tFweZ z`divp#v4Q&R~K*bpC|o*s4C-KA6&jmn8~XGq$kydj!pRXhU}Dh=-L;G|Jkya4Stig zbMtPR49coq*!=@AQpE|eJ7BPC7l>82?5qB(Id?VmlCiH5xwIUDJ`2uzo2lsRNo3s7 zi7vIOh*S;_by{7iF*AR-xjL*uk6G1}Sv4#5#ELR$G|M{(Y49;Qw#N|ua5>b=i~?Sr z-Gyfq@+<6!P?>1Ve>rXJgc9hrcostx~Z(*d*6ukFT`3bfSHgZQhX$DJ&K! ziuUX?ylZ^5LRGm%X~JNY=#pm_adTJyAAFmi$g3e`CFs!F4`b0gz-R-{+|;B)UdY-$ zbhdQ+olgT#89Y^;1g)K_&16^&diggkM@TrF4(!#(GRq?(G`3Sr^F1;j%=SO^8EI$JdTzNxn56#UBAaVe=Y_FG?){&YTLIoS<`sdZm|4f zK?Ppo7<%D++s}LaV~HfVD8X(bMNg+V{R)@TQaDnNt(Kq*{|4&B7+t!224eTMu{h3) z!?6?Oc({o%3=NVbK?SKB4zb1?#}yBi9)4RNqDIdQykaGhLn-Ui>a#m{0{!s#;_S4Z z9fg8BZ9P9`C=xr<+HE%6=Rk#J)*@GP$dh^;(FQYK@)FfW^G^sUnnc2{{+>4lJDLV! zE<726U$mt6jmJMp8vww(XVip-)7c_m*!;*GvNkQ0tDadki z+EPAy!wUJ%>YXQzV)PTc@>ZZZfiX?SyR8o2LJ~u)(oo6Yi@UO`m0Tanc-G{g^EM7t zB@A1Jm1f`X0A+`7s5P2PLZlxiK0J1tqez|_r$*Q@BU3fEO}{!2GOMgSa3a1HT+1_y z6Y3mmh{bD+YkP-jkauz+2`h{eOf1_LBXh2d*wDR)L5oIN-rtO4no?u;C5iDolbUH! z`$VnxcJyJ9TX-YtO^2=iFp;vnhPwW#ufRl(HEkn)ir6T7-Y4{?CCZh=$t;Q57EoA! zHgXfbzs!_&)@6tFou|JGTiCqHFb@`Kq(ksx9p+%?sd5dvg=K^FPu=JV)jpRM045acTQ2VuMtB zTSSUBeaCunH(o{2hAF?(f@Bl~_5p+_0R=bZlVPjeZ+WZz$TpmSzzyZU8esqc25Lc{ zFhvzCzx80noY}6L;ZXEmkz^@6%QK_NUi`C|y@4w`PyYRZni=h|~=q#2(YJ&m) zBW2sQnEe$bZ=`UkTb8Hd1TQUf>TxsAgbyin0%>oxTzOc|PMYVd;1E45jG2Mr{^Jhl z*(r{Hn>pRwdcrn0)0lhtnNu}@p$IV|$*(|Gdvx(X6u(!~vhgEmGGv9ah4q8fj-Fbb zhq=)RWw$wCqvzOy5*0PJrSDgn0U}o)+hfjie9*)dHn@zl*}^f0c=~UCwq+DqqZoEA z^-xkiC+=-XQc-(lcIW5h=CjNcmP9<~Abu_xZNIU9NVJF9Kf8Y(ja+Cy&uE0-g6^|h zR@5_(+qSI?kCtaszbwRN2tT37OA=9J9q0?c01_`0)9XQ@-bFxrb-OmGL4H$S%yuHgoOC$=OL_GR$fNbzn| zWRjF@Yr1kwU(=KYW}g!!hbxFk^1Hge^g!4n15?prkGie6l3+9gW|7@&9#<&}8 zyz?3}tcY*8D}ZwrTAn3C_LGI7+^`>@ZMDEYt%3$K%5*aHic_xIVq1dt*Jcvoo{Q{p zw$L+Ac`7$BXN1=bJIEhwrDc)@mpTbMdT{sN_#s7yWoIIFBZJ&y@8`)Ze70*?2By2w{&gr>Q-))|5T4~pVTLdcS5=zQVUNKX0sJq7rI0;-v?!`y`)(>@UGRAsQM=Yo+cvo5jpRHoe%LEu>zzC3247qc zj*P})6*#rsl4Z*W^^NQY;b-+YUBA>e$|Z$U^~csP^WUqTbZ9h!J_Drrc4Pcri&^4h_o%BvE_B#1a_k;XCc4rOwDkI!p2i5*5h;Z2~!w?EYdY znbtPgr4Hw^+?QygWrE@E=A)uIf)#q`lW`KuJl}?NAZ2l1r5S{WCSDo45W9jsa7}!a z9u2jNhTrhq>3RTY>xG);ND}U3jf@Ffy?crVi!ni6?Ir{KxfC0>{fbAptA;c7?f<4O zRi0C2foKVJQTRk#J)~VGZR=kstvgr=azD4rws9h54~{JdcK?rEe6Cllv(PDJL86>y8oe_p&o7 zBL^*_Sd<$R_7TOdFFei3Xf3m|9g8a*CJANon%(MQ-^_|klU@^yzd};f1YA5ne^Oc zs_+!*{-J%b7n#+i%=LhAM;7J_`^xE;90{@6U#~MO(?7dEK6;Vc1%5qN`5?ylNMBy* zY~e4TTw|K^;TRLJHd(g}1 z1=iL#wT*YPwmZ-cLBD;_phrGpg@e{1T#N0{;~dU6CkJCa;NvC`o4Q%G0PV420L)0Q zQ6YVS9gOK?RO=TRW1@2uNL=ecyEZl(PaFiC0006@0iREFf12pm9rqN0<7fkL-B9!f z*ccDrhu>`_FUCubY=bS8UIffFk#6k@oYnWi;3RFbITQc+OeG?krKpPkDUP4ej|3|8 zkt#|wWyPmH@}UT=tE6FkfM-Ma=~7DfNmkPAIXv0p!Tp+DpgGM(I+IJ_3?tCII+rBV zYzd?W0Z|c$P^f6quWSG#_;5>+x*QHndV=iwd6oWwcU==PwSAL9dy^@;^^<^xVnHPK zGwFtb6^`osvtlI{>)CoaU33&z$G-Ps z13aw6rP*5m4=&w^gL0p|%6d$nxQnuGE}nFf#rYDvEH3uHBZ%LuaKh4C2f80je86QF zHAGfd5R!Cp=pM~K(9BI-LcrtXD=4Fh6`56CU~oX<2-^}5rO4S{y<6rLcN^%B+hYsH z{TybqMGUf(I*NrqVoiJp``Zbc0ODlobE4_w^9#)~$@r<{Rr6Tw-Tc0qb{KX*tfzp1 z-Q_nX+X>A((Z|vAvkJEjf`GPCBYu0&tyrZwM5ZI+)>&K-yMp4xTY}=*cM#-FCz1<$ z)4S|r6bypoEf!w93*|II+k;?@rEC&C`)_x1vq>j{y@?n+h50C3p5hShPJ-&rPXohp z_(=zNGL@-8uxqsc%3`jRG;E7icq+I600Er= zpHXUm{?S$5qpcD^ySdqJJ7Ca+t%InBy-@YyD6_nT^})H~)6X@ZzDr#wz+KpLvVn?( zDBV_A!Lc$ejbcJq`HC=<@yd->vS~l2z^aGpsRr}Yg^J%izbcR4UN&{Tgh4*mH^T`! z1RS{tqXyB<93Yj%#1UFrK!4vd5m48Tyu|x#;Bgft2YCJS2#n>;ot|P6w$*nk$0Mk% zx?|ck)IH03HB3y|l}!xPzGPd%&*8E=oBXpz6UZPX2YBcEok=RKGyB@Fw~(3aV~@!g zr)Zxo2%_Qzi@d2Ke(x-M2gx|LEv^St6=017ZErnGM|oW1Z4;l%`!3DW-7sYi;qS`{ zDgv5d)(jWoHcZVwMlyF5tpOA?fd~!sI(1}`3=(czdAt7*!XE3OHOPqqXY4nCO9&cA z?RW%*tO)o%WK`5qF78u2%XT2TWHSs8AtpVe$}y408W5=sEMx+7H2;sS4@@=%-fl;#(H zZ!8-iPz~`p_CwtfmR3;J+#Qq0@HHkK5fTo=51lENX8jgSwZL~=q|V}G*^PKlN8f~& zN0HaTqboxH3;X2m^|$F+h3>^9A0^LRuStK+71E%=V(49?38d2x1t1F*YF@l*uk)4Htb>%@30B^R|FgDKzf;G)W$_Y^3QG0Ne>tYWJ!{c4J)4 zt@Y>rsnuXEIPdL$(q3jBYv4W}vG_~k6c>beo=FL|f(@0!ix-GhB^eO480e8^3rj`I>2a-rBn|M>lT_&lSl<^O?oo;Xzi!>}{ zG{gKUBx#0g@*XDdm-8g4a7CNPMFA*6JrFk~QiZe0zC~P}*=9DO<=QYk=IC&2DwJ}Y zy(vYO)6~3n5ibYCVD$yYaMsYx$`09(C1|PQN2NH|+EB6!MxX?70eWuBA0hGddm`C6 z7D)}#DaN}zt8*vTPj(ewtV+`{zc6tn{?E@}*$U6IT@BL*Ep)8}u)-@jfA^Qb@G`2e z9Cv@}g>p$hxM#8|-fcrH-L_m)2_BxKKLTH%0s`&DG2dRK_DI^32MhX)dB51%zck1IH&ZM4|qRijW3f=SwwoY8|1V#*e;WLGu)*+4q*D&J*22K4K_wgbQYa{ zitU~fsVf~bD3pjX%1kAg8qykBWr_fV8q$KjzkyW%Kk|8djcAk^!FG;A9_sFKldiaZ zksu_q?sVOOjdI2P?*NAN)vK*j!L}QT#DBXHl>QrRyQVSiy`0pby5#(HXuAIQd%j1V zGRR4|P(mE2WK@w}z4yGJPJ4V}UvFPU)!*zqEVE475IjD4*a=7{#{s#19;y)!a%O$R0heILC-oRkCwO*vr~$b?VmM|^vn6s* z`_s3z3W@XN!BEr7JDI3kWz1n^VZlR8r8FqHJ^)0xskH`{sYT$p>(K@F7)d%aiarRF z95Tyj37fMf^6G%;VQ9=%0O!lqcH;yv0+6NWiD}BO+(Cc}nP&1wS$or8;3uv03GAA~yBk9}2MOXA?f^W_e&@DDDWdflZp1V0`D&OKgY73hJk|M4!HF zdD?}*m{x)HuidnuulAzDM~uf~;*X(9u8XSBvGE{w)vdnkjq)LBtE&3ugFnVgTg|`K z&8Sy&F~VGXrVLdG$ej!Un_KKEClSxz%g3(43jbFE=_esuIabD~SpdY-C;bZ&TsAFS z<}A(OZsxx`NL;Y%V0QJL<+gS%>n&wZ<+9fj&>Gvlj1A%XMpRzc1)y;V2`6I9V6z3ZT(81VK!w~k1f zfL8u?IPm5sESfb)0sOg3q5z6-?OlO3C%`I0RVmzFv&e)Xew$+KH&V-*BlrpA-Zie5 zQ$(6mQ)Vr9Gvj}si~Hr%d&ubwpStKSot^~M%y9J^*&}Ej)u9t2tg7WKeLF`)Nyrqi zi|?JPy9+d1z$PQ;YQ4jRfE?G=*HWi+|7<9i=FEVYO9Rh1R=HmrrnJdNdAX-s46-j& zm82xka;FmYGPjOE`TB8Ta}uo($Q0DHn^s-P;8ULxw6 z_nV;6{d{R6#I$`;j* z1%a;nadRR8+c$l;yTX)BH&uO zE00{%TkkrDgZjx7O0EeEI8RQqEV&YSby+z4BadtTHKL4(EGK*di0z1#NMq4}(*|`*jZ8i^rR-3N6YQiZWq0lnt@HU_M$*7J^oX0l@^e z(KGhhBDO9PgoFztb`DOKiuPk5IUvlgGRmbSFu@z@DFxUPdpItI#rY6%Sms-R+;nf% z^08SOwIf@pdX^!mAnl4@p{;s5LDlI??b5U~`mpr;rNEru*$4*9T25b$WoT^W@;cS{ zYDb_KJdI}6lc!k@LU&clH^R?ObI?dkFLQpK_=f%SP%hbc^&szzCZ49OW;gYzTvXH9 zzd-%2x;45t-cysm&${$=pe!Fl=c5jQ1Snk>{{!{ER~3E~VI9^UhKS?{DWla)z zO$88nU#7l(>X1U%*Wun@3AUp6>M&WC!xe_uw`$d4?kX;74jyGx_fLW!UYEWauw(J8 zeO_#=Fm?Z)`zrPM?*0VlzLchs!>m0D^1q73f9W2@H7L(wNeSA^h*85KL*mQWtAq_8 zf)UTqFMR3h3=4RY%!fnE${wX`PPHcQgCHzLr`Lx8FAd;SxINf8s6iQ!x`26uIQ zqPw@Z_(`v%Fm;9=mJ%&Q1ERD$lv#dS@Xg*=Nv|qw^pI;aIhipmF;gw8(lr-D0?(^S zeGLwA!r=C#)RP0>w$1A$S;qPJ=`pgbxhGsjQ-!QC)aKF&2?q(K%nMrD?zgnXzf)lO z`3Z=T^rs<+I--Ei=Nq^h`sg7tAX2D>pyE37H!%w)Pyq9gu_GJmwbj&NeHl)*CsAwU4z zIz|3JZZ4GGsz+q9dNoseQI?XWOsCnk?c4^T8RdGw7##~SEjp-U%_pF<(4UEn*RocHVfo9%ieol! z8KOx+k!ztj7I-?^Wq!zFX=i}*gW!Yzk3TW3#1Q^-MF;zg)lkT{7R|{ zx1;rO4UC>&w(tV_Iu6`(1BM1e=z@=eRh5f6)9$v`UDOeXlid#t2EGvweMkb3C1|F2 zBC!KM7^@*&!w{fYOXInCt1NK5_zj=buOxLtUA=~WK|MRq7D6M}6dz;p+qKun2)hHT zBLVlKbo>QL;6k68B57Zl1kl?NystF6KA$o5QcEI_88st*v)>)#PxG&lNBt2&vUk## zDCvnU8)dE@`Zwm`%ICttr;05_q(hK$xUBYl`Nt_VDZ0wiP}%rzW2u7hgADGFH?R{C z0#T%nc_CTdD3OsE;X(?!dSFOX=2#ogC#=mg)Ijx_X58QfyQElK4f^!r^fL@2cnWe_ zO-f(fNfC7%D;;^iIpRrF-fmT|L$HSFw;$)WyNW!XofD`7Pe2P?oq?dXRyd?Z!?NbM znlcq;P~$RJ9Z8O?fC9nfuad@W$2k@7-h*-IiQeEFd&54O=Vcdl>rO8?e`ry|91$l> zsu(nh6TiN^m9&4d6p+hxBhCZ+{-bCUj%lFG{oyc|N6i~3x$NaGj!+6tOgLZJRLtM~ z^3dy-GPQGWQU0kw($iVax48IA7;fQft|Er^mzyQrkJ%oj6eo)O!gEP*hoVC{qJtOn z>_M*{Ye#fjqv(rq0WR#H1)eN3h+eo?3)IXk-;o0OWY~W29BN$nc2Z#XjbXQvf-@%M zH|wgf)dR%Oa}7-7n>|RWgXdEyc4Z@S)0?pV53;Cqx?yAS%X;w4%#bCU2r&O3i%~Ck znx(~u8Xm~5gv8L$6W3QmDkwA6`(5Eo=4Ogl?dxF*q&vsM{*q)5+%p~zp7%_T+FZZR z?0}FVj?&i2!mR2`B^fJy1okVW*|KD+5U#Yyk0)bV?4XiXzgx{~zFAeeqZ?X+poK|! z~qM|3jdEu8z=E4!8`>fG%*=uuj%Dp2BMCx zo|KyPjD%M7(~~_S($&+QE8hzs7REueqP{v7tTh_w4#R!Ay=51Mw#CeFfPo-HdZWR? zZlgL&W8VOTtLLmjlpSnf;98D?X>8}wIiq6MR?)sVGI-46C$kA)cS@k3ez3-(Ck$_V z6wX|{ZHRB`W)DC7~54jSw4W~Vqq1@ zKvTeo{-|k+>#;f1PZ5RUEF~yAb(fWq)(d%pvXn(~0GGLdN5yqu8}`71kN;Owpm=*~ zAd>mGuRlkYo?PE=x}K;u`$7c<|8@844&8=p0mPu*v^i|a3jf`*EV=s7`{uNF!Yx1b zCH3f}Ai&>|5v@uV)O$4MJ9Ppxeo|lAiPr>uW;!GQRNKJyTeWD`y5|6u)vgd}I__ct=i=PeptM;qEQV<%LgEIAF9$F)`!Uc`P`x z99mZpZ=(kkVn%~W>m@y}FgvXlk7Ohtx?oa#;4yqT^*|9XUAY1dkzb1`?s@4fiDcVQ zjZZnneaNk(?(1fvvYTeB4m&ZkhG`lc2A02UcoVdp*)Ey`3K4!2gwvKJlsXay9a9#S zb9SvemsRs?A%KM}QYlr?w<4kIhNSxxFJ|8L*G4-0*w8J+b``Y^h)ETB0009H0iTs> zN&o23!1BahTh&-)j(qU!*Uo_m4biqd^kDXetj@?UK8r>o%W6K};voG>(7sstoUV8V zUEB0P{Sf_-x;T1AQTmPi(ZN#U#q92RhZ*QlX!j_0dihrL5_8C9nRpKo6a(|06xAEj zk{@Yb`j!mmM@(BRkvJ)6$dr}yk8=mLfVa&-axt`z))aevEtQsEBx#I7wCG+i4kCek zq}r`bI0t1gTdA}{3wq7ot7NOgZubR{AG6iY;ZF7Tl3|`c&1s=<#tm<*_DTAW@!T zLtNx|ITDQzq|X1W*U}CJZj4~k2H+2CiK8R@(ClNk*8~p(eK{?M7!ulysdw6> zL~Rw{U(&09+0<+S3{2VZ`~?%BDkd;N8A@6-TpwE&5*WD@TaaN95Cc2Xjr&MxTk#?0I>byWh;$kuP~V~6KU(c zjsJd;RLnLIW++b#k(AEQr)kq2=Im@vCM6x-b|rT;y98oZXW5*y3E#lRfX4^~C?ZsLt|F zD16})jdcOTcmfZ{e)%}!$YLWUsoN9r_Wn+wPedrBS^Ev8zPYp^2wH=9${Q5)6H30# z05OI467K zgumz9Dta}Mr5SEswg_n>RsyeY(a#WOe_pX&e0^7s#t8$KICN2P9JeB#K_F4w$$$U= z5jR1bnn|cZY?(|5pKij7!3eQHEs@KX;fQLX!bHM2Io=nt7ztA0(Pp5voWP}n5<&+E z@n_^(s~^_OOwir^3vC(}Jtgvmt-kbm@&H5O;ggNI7YP2nx0D^6ppddzWE- zPJb%7oVjcKV$f=`4NlC#r*DJ=WK|3EWWl`sGKQnh%$3Hbd4dJTdicv~;1%C*Nx`Y@Q;WC1vS$>subxIgGC zzr|`E#^p93F&D2m$84JRV$8%YgA1tBv4F^oy9_7DB2%C-dwJcQ=Imzg79=_9k*rm1 zS=5Mg5Hc0ICy?0KfmsS-sa^W-2<+|vlq5yKZtmWGj9U~3j)`DSco7Y<>3K&Ey%cU-YaFL6vkr4f4=u3pDfuF z7s6b7sysqJ%a4a@rd*ShKc3y-u`h=UN0`H?YRQ!YT`Df(i<)Ipv>5mWI6kug*7$uU zMv7)%`d0W=3GPYlPrUcYR4GYVa+aA05J<>|$KHkrAep+4`aDQNZapt}Qa_s@UXGr zx$W^Jy6Wml6_ItiCr*mCXCrz9*tdAj0!%;paUnFSzBQXElfYQ=LJ*bOz#-E^W z%j;@+W-zL;!j9_ixx>GHUu7veeBny_*SsTFTf08;i?%~e20g6oWBOJlPzqqMMF|wi zGIkSraaVVs7pyn7nR~ZO14{`$asV=^?x$@1yY~O5sNyN{BmQXOUBD%0Q2|>D>^uDg z#+Yut=5gly_|l#KN_LkdDJD0tX02sS5Ug=^GW1YmlvzErp}y@Dnt)GLI3NMAZ$2$m zQWBSAfZ5w=#(HC2QJ7m$2t$i8=*;bBe)%YhH!emB$=oHDJWtF8if{k>rTO#fhp1WV z))z&jiWV%m%ja#;0C_Ogoj-a`W0iBGGTaFBWkZK%F-QHg25ahzIVP4Yh+@td?~5+n zS4n)3T8~QD4suAae9zt2-(GxFU0f;&1*7hfxF_r=5=r}b&ixu|YS*x1={340fyh}8 z5+yRd&BnJXa|h5q@HQUvy#&-}dJT z2kLPpwS_fr=-z9|DWF!j@kuzz#>0JMj_oMjc4u;f-tMW*B_CbjBE14K9&B40;j`_Z zEp0|q`p@n$?5l9l5TEm<2Q15n;P~N47XFDcUPyD1mml&W2TVx%7=*6iK^^}Q_UBn3 z(@-X+*m)BI`EwxIzwlaasE4DjVqn?ViGUXG=5piX*d-W)@LB1HdfQfhWu|?k*}0v+ zYwlQr{wS3RKUJZH&>G~PKx$!iO5kD;n>%g{EVBR>44>^YT{%5`>1^`qH$jS&W|{;x zghOHR4@c@$nD`NJw; z`3=!pyx)1f)EdXkM-u-_Mf-vDGf^JELTgKS9pguU-ze0<->{ODCSsRENJ0VQ@+J#_ z1s-`5&>3JS?>QmMh%Dz6teAb^)EI8OFkW{>bx@O0cr1(RcOyO;p06Thb#Hh`zDjW8 z|JjPsgw^%Nm|wH6E_7~!_a0R-t2^H7v4tk)SMTJ5fX}H&G0M4aJ$BdB18SZtD9?;h zib`zkK>GrsuV@4YvEnEm#H%$*y)<69`Upx0FI^e9{=87vmeCS&wc&8TqQb4g3Hz~yMtZ>jnE3aa?>zt063Y6PK;zY! zIaAr68_x*b5L3A4Wqs#A)Z5MBSJJx|i}I5t}&gy);c{3<~qERECi;G&`TiN`A-m5#LCXr*Wk zALHX*Z!l{3;>T%!NWJiRf5|wse4j3IkU$uKf7uiOU_hV0(0K2DFS3CRj4F*i{OwSxDtn?KfQMU3!&R^JdoU;I#X+=S!s!`l57ruQ|Bm!GbJR1xOQo4Bw5hxUxE zIf*HGM;>ZPFouYPaa`%#v54h&{&fWW$zw2bxXIQV1UqdUf8tgX3}yhWPEq1P%Ul~a z*%EyQc!}^3C&}Pn9ZOlMuVrf*PT_3OWVVsYGD@+8#Qz?otW z@9j^z&`?$jJ6s^6Q%0d{Rk-W5W~mStN{QIdC4@uAQJEd#*_7?WlN8yeeT?7QT7Yu? zlpp%sPeRl2I^)ljMT=**BQPT${&dX~zhWGwYdhVlur*eJL5VRpaQ&2nAqp- zP!5FGyB22nJ}4x2*E+akCd_h|^z0YAR1wVNMl7@(Iud78r8RXICzZk}cRx7UNzo zfu}*3e?WEV434Wlejw0jq|Ul8gm5Uty8xyF7Q(?Qado$(u=uKAcCX6BX^+U^T)n$c zDc+ck#u}SYX>hn+CaxM~5$r%OZ)Pz{+Iit~AVHix^6~k_ZYPQoJ!bbioiqa&6~8wsm+hGjQ$RMx!fP;Ya7C3( zf(cG?6r<0F^LuCLs!!iO;t@KE*1Oj{_DmhUCZIdz2A$kbEZLVmQ4NQk6u5E=X8yg} z1vvWwn>ai;U^^=9%&_2Fv4o24m{o4P`dfqn=)yn&gqr;k#zX@1BRI4x*QML7cvu5GgEo#v`Zk@%wf!SyA`&sqw`!(Qv6*+lJC`w3oI!wC3zETJ)Pd99VzEdN_58(( zn!rczZ@)Xq6f|X%?8=~JgHSi^vgUaxU`_2|ry(2=rYOjSY6A-x-u8u@D0k_6=6HV} z4s%=Zwia_x$Z7MTGo9g(gK+5=6r87rfwHOByC)KfmYd{bJnn=0?9Q=Gq*p=OPErhy5gW#J_&tS{9Lgv6i}>Q#y7ggHS5d>ACW0X;75JcI zM?rW|C!xl^B?C0<5K}Gu361sV=7{a=faOekEBt=_lI8;rsF~1Jew9kERM^!=aO6GJ zX=aj$qyI=|7M4-DN3OP&3kHL8P2Wsv2bh^A_&iOSoCq_Tb0uY5pjp6i8Z^4^5lMpJ4T<5t0mTkSn`q zXd;qG8Z)Z>9=FKcfl%yn*R5u>1Zr6+x<33B-#U$OHpQ+*8}2bwzXm)V1-_%J1GyiB zQG>f1OL;Z*4jRXASCDQt`m3+PS(knG;Sl(rl>b*Lc@jyLi-^lG=E=Mx{wgvPYF*8k zUAJ53Cy~d9wuJbNlG@wCOES$SvQKR7HN33?1AA}H9Xx2H7X96#ZcE0|$DlNozR=vi z!|u1E7iKp%(Gz%K)YQ;BQJ%&t=G-#N=27Rw-JRTPOQP7{_Aq5RaituRW+vaUre2vW z6h&CF`I)WKaeTaPbfTMu3e?`rzP=;W&V__iovCFp%I>IU!$|5z0}==v2!A_6w+#EV zgv^jse)|bW`i5j^jtzs*5%|seyLmF6yH&GI>q@N&EDQ?315IjGX~;*av^E31psu5f711$ z{$T;-Aj=CJMB1o!sK>7UUFFr~XyBdyqn~2ZB@jx3#;w)nGOIxbY8A$z>u6RK|6lv> zgQhdo(WPZUzWlav!OL;GzIuks8UBTpf>^%icNER%pV6=@Zhm{Ob3 zF6$@w9NOfiGKOrGX+8GQ5ex3jx^L|kck8Z$;Gge~;J#{uQW6wtOzTRcIA>Old1cS~ zQi4%>B5WQ#N=bHgfN9!bGz#Z(h_4+9vAx-<3`m)3%kTTbLl&s!sPfn4ar3m*nZ`#HUGAJ~E`|A0td3Z>QN~ra znl$MDK!U{Nzg7gm0g%re=#pV#ZP<9RFK2LZd83?zPGgA+J*3%K_!%AuEN)`H!g;v4TX1=yrBqPQE_&Hal=f0<#?_Rr1GgrDk-$2{sqTkRwZQ@$|B-yoRL z`N-4_TUf5UK|HO%x;>AWeai(yG_$Iv`+R=Jh}H{lBD%BhO0EiM#TZq^%x0k<9hzs( zoMyQBu;iU@8<uj6-t}d$T>iCgq?7hI+r zYL(qSPZZ3*y-{uhGR}h-M7U%`cnUsD&bVoGkyFdZSg1VjhvYps6@$ z`i>L4^_L+e&*Mxcb3RC14uYwan6F!TNztrPzY^5TyR*zSq|Pq^MND}V9^jF0)-Bv+SGzS-Za$WkSimE`>N zZ_cIgJj7F2ntpy7NW#PRj_#w9<;uopzf-wQ28&siCg5wAu`}K(ios+Blf3V4Dx)Aj z2X$mE!imF=xDWzpcb~78jZTZN2)VY*g4ZI9@_=HZH8N_J3g4^tVh64SOB?U4%2(z? zWC#K{zX)d+-_HWCRN${WEe~}MRafWRRgtN)=9>x-iZob%ULKfO7aJqmY9s%EoA~4i zGd$`7z9@<4gNou$VO~2vYmT@U+cKSwP;bhS4p#b?JDAHn5p)1wqF@cl&{>B;gD<#8 zp*I(JJ&3Bz&!;Gd`{gr$HI3s-3FBO`AGYD9iz|X<2J3hI-Eov&K3x4*z~8~y5ham? zmt>N4bD@4kv;R5*1?iBDWk&BS*J~K3cPL>;~F+ z@A21pSQxFr83@#|;Z?d3YLRWbG(F{=!D3&Vh`!@g9u5FtJEtOi5yhISs7=HV!NiC~cj@yhBEFjwQip`-PLUu+!g`bom}QK_~A)R4&e#vt!0anm`bm44P76aKGk|f2oOt#B-6a*ON4JAc5{}+L8Nr- zaTGm(@5zM_)uYb;_bRx{je;QGi0fxsjym80Y~G!qAP=&tgMx z_9|KiHI9beXe88?`wh4e=GOX}w&wxP3f*VFRwV{dwO@@E^YRR_Frhc^0H>%Qc_6_(20G055EbtMQTRoiac}{fuI20>&kL z(S=VOxW`Zqg(`?=z))!z4}SEu4Rr#h9&fp+$qZ>+>Ryb#aOuAZrS@^ot?NyH+#Zb3 z24?-KfvL7?)T^Rg_^R9H5^;qO^%Cfj2i72HljJur-&^y&P&}AnYSpbrku4Vb!-<|M z0+DKy!5VoyIQc7UQc*Qq^(loKqx5kwfJM zz2;`HkHT;WPMxV0POOj6i z{ikFavd!TI52=Jw)rTJTyFJa;KF0kI##%50MBG)v@`@qUq2|Q8Ci6-E-%J40JoW{l zvQ9Hz_COkVC+Ks+Z)ubeYB}-TBIY8yVOhy2(h6-_sB=__0hOz2Ag9nfQ2Ecmke{p* z8D4=O3hgfZa7gmI*ezCDRCq1X+4{im*=)ReIx2l7t>8E9vJeA?2a^<|VVT5t3z6m7 zNNtZM=$*zZncy`gm|R9@oZQ=0Icjvz_Wo)(7hLez-G}{jsKg`PGiK^N8%r3b7;uD zn1l`7$D!pLM+7DXNALgu0!#s)*=kAuf(WjTZc^7+6jG>^s8~NyaBh&_TI%-r!6D^{ z)VzEz`mOMv_%_arDJ83j8%G5T&` zlDaxg4L9}bCQ+_%QIiC2$-hHdwB4j<&CnAOdp_T+&NIdkijjKkG3)UY3C=im@cFmr^9i8~s!RsU>-sTR) zq`@MD1#>CZ7XWZgjN-!HOKNN9CsPq@=;EyNkW)X7HUOw zRPwyXEv-R?qzRbv{iuP(Pst#}Rji?YdRfb~`?>2Ut zQ${DIdcQX4KvJ(IAWP=mcHsUibCUEsec7rH`o#`%M7(%Y&vd89g?D4G`PgPOdr-y3 z6P>i^Mpz3|)$Jc&)klD^(At5h_RO`q?ps@ggFbi}XY&X%fdeJ6q%^296`7&J(KqD4 z3cy-Xr5zJg0U9i(pN4x!&s>*ywgfPhEp*()G$}v@EfEU+-i+L$y$ED{Pj z;Ds9;1g{~m-5!>0i-vskWdO^?FeV=N4(Sma2s+W8A!`>t1goJf`V$T)tgm_;X}pS> z>=xT(h zM@wn2n(lG=(HW9hJV?E`U-cY!!>{L+NG;3WI~AlKz~T%$e@<#R zSPH$D$mk&TO@EtoGy8H66~RN?088}cDx4KMZ(5fg<6)|w%(ecwZrhcX(_JkNA}1PL zl%GYolgWt{!OD>ugO2V;*wz~Ff<+-91MKWL?KZ-!!#waMgw>A^ey^p*{)yM+!CuI`18(xYTI9WK6_%HTGR4Cf=M+=}44T=qJNqI= zM1^D3!c^rDr;k{$&wrA)8T%Dg#7Bj{{}pMFCVslqSYh zjPLFY1}M$U^`?C7Dz*EQFxyFfSM8r+K*`7svy{=j-g&RzmuAar>*^Pl$W87E%Q&bG z0j=Bq$55pJm|fUY7I0h|S{B6#d*;%+H!Zy4spm|cnkvnT4rkOdRIgxJrqhQz6 zWbF@QKWJujI7h&HU3?#2eEVYZ3dp>B0*^{lHQ^cW!!Qxy4F~qb*queJ3Yce2v(p@V zefyu28>`T9G{KnQT}j!TGP_*ZRHlcQWl9tu!T+ITIj$1pztKBi=y0k#S$2ToSc7Gs zJW%MG*cww|c_OykR1WJgEfAq6@%%e@k0dkygsRgmVs@kZTlTya@&6|t%Qv5@cs-gh zP%_!H&oo}L;`#n?tCYI^zzkAOD`6NPz&c(u7}IIloQdD{Q>Re1VN5trqsGpBw}E%* zs_$>Pzxe~Iux3UM0e|g^qEr?brNT$983InynYY5dw%DL+re8PY=S=ex2)r?NDSqi@ z#eo1MhGubo0q*P%HmnO35;FFF)KrFa3-z52g%uq!?<12c)vpwekp7EVv^qR5qXbVk z7rF!^S<^>m%G-7A?~%ON5rGyvr1aafgi05++I1@Vky;SOhV#OgHP5wBI0m~YadmM8 z;STNWz6Ya5oZ1q^>7y*k0jkPN1fFh|E`Nvjx&*{VtzdZ=jqj7xf0VJ7uW)~6hv+dr z@&PwvKB4rv{&FnVaL%w10PhI&?q??v!I`D=mdsQnk=mLkuiH`r>y4R=lP7xc9U*hW zZ4hht)?(ai%=-Y~a}De+;!+U^@1RVQ%{lTQxhN1hV)~A0X<)J2PD=FLL_Pf1^jraM zfRN6=4(VAKJ2d2@xk#dt7lS*8%lB1#eth~kL@C`#7rX**$`M59vO)7tgM80G;lJfA zOvFM5|6T{dN}CKNcoBUW6l>>O&3jJYTcd(Ye8tWlw)yqIiz7jFqN7LtW4|w>4%Wj8 zQ2AUBgb21o!_&0)N04dUTRX}sWGr}%TV$PFp&L?6g%?w!em*MT^VG7!FrDGe72HV zxhQ?v$ur1iiyMzyke`>P)Ml{Uic`D@PlC2L7?`WXUsW0k&3pmXb231zYy%=#GG!%x z$dg?*m!2_SaLY|qbmB%z0Pk^~rZ!>W*Gub4$xI6_~eD)11~$$BY^8Fr7<}fc{B`+2?v6B-y4`4jABSLg+aMQ zZZMqWXk<&4Cy|g(d*SQcr`WrhLEGeG(R?>TH}KE{tQ7DY>r>`p{zvF-m5K6(oeB%T zkZb0r6rc~c#4|^bUW8G%i7{27X6GWZVO#g>RGZc$g!^GpYg8>Na0yt^$jQ+YHsRtJ zd5@;|lA1vM^r2|cw!qsl!=lj(?sWc0?aB^;;gbk!9MdP;J4&L7i;b}JZ`Gs^qjWWe z%Dryt;P8oP4(usZ-X#RZbz29$mDKn&;pqzV=ylvtEI^)Z{NfO5{ zY#jnDzWQYHz0Y#n>3caI$WMqr1&(#0bwC}kPRtB-s3a;b@u%;SO^6^VT>k~k7gT88 z!Hp3jq)=A^{F;!8bij6djIJTE_q;-VGlTJCuQWJBQ78&~Yakr&n1S zU=_dezr^+UET_hg;CMhnYQRewb-_D;Vu#42=KVgzs56FUsCQ6%$pMrBv_jy&l+%t)~puWyRKmGJr_s3606r{dVEYHT?A+%Q>y#ms_oNKf6KH$}8H)PX;N256qdqq8a zNJgNAZi+gD&Pe#n(AtDgoXrJMK$&m$@P$ktkNub)GmXy~SLiP_@TE30~zjMZcj zi$S#6pF$v3uxXQ(Xp>*LZ|{EVCED72H*Dm%v-NpDI+g^Z$3D*o0ujn!5j#qDH``Gl zYc4l1tp~z49aS==KC(=XGjq#{+r@}dR4budFqx}sB2R(jO-Lv}O0j5P-eC{{F8N%U ze<(b$3>IP$iVh#itV1ob)ZH-@ijFi-Yk7&&TeFwr#$@6`Qq_mws|Yy3y~)y!3wvbo zQoo23KE6x>aAh9r*(V1kHu60vsu7E`-qKj_sJ9aX?9r@^Ej}VNi_G(zUoA{qXT^d4 zctj@DjUAf5Cp(goO*Zm@6kpXlQmbYRy(fiIS049^bCj@|(iP*2D%h=cdHZjEdJX?5 zTdpfDyd-;71){?NHu9JwvRGBFtT4b!Du6%?G0cupUM6oiTN~vW!i=;zoB#j=nn9oZ zMHMgq(xH)s&Ks+tGB)B^;FZJ_=x9g8nhr3{UeAJ2`}O@Hn`np(V5{RY}VMq z0a9tR24)k>`I1at6FluD2vg0lGRBz<_%B-Y?j~!06RrI<9CA=;yp2e9C~2THE=*?+iWB!YO;K(m5*(%xNZT*Zm~| z<9%#C#uV`~9KXej_tTTf$&}}I53f>hYf9^u?ryS`n-3w^6@EcxXbYJqIsJm$a^iz! z*^jg=odQEPqdWh-=YqbeLDXO9a<4obKyJ$5A90@w%V7CgpGax_H(C+sWrst$R7|L6 zppa*bz+O^WLNDADp+EzFzi%8C-ZQM{S`km**>1a1d2t>!A7I+OID2O?N960@%kl!b zGZvPo%*g4f^&(&cDfIZSS|(>lWBU;Iia=-X9wiY8gD zyy_pUzciT4b^@%Mnjmr<{a`u4(M{iO8XFF?-Kn@`z#4DFuSk zyQ~i6*#^U6t#Cvt^XaV1m4JoYzsb6ZH+iDF(CHMPA17&l4N*{&qP0MaDNfr;vCG}J z+n3j>eW)W`LhZzA8w&n3a8aXEfYZzm%8N!V5MPHqZpFRkDKEFv-TF#=L1l#>=BHZXh~wgT z#Uo@?;EUitsQ`1$cby(%loG4vg9iO-OyAK<$|DyxhBdzq4DpDigCw9mv)a|v4pPZv z<~wwygU1#k-NwQ-a2)B%Io7Ynj$P#>4vJ$2wNj4*x&EUk+ z4_pQBskNA@WlAtiJpNGblOlPLc;`^8%-XcYm?ndQhg8o100CnGo*Q&Y|6gb}-M$Hk zn@dKu0C*A(hrk=(9bzK!#kU&)FZ)~4&p{i+mzCGq0EV)Fe~7;e^8lPcgOR<{AH)q3 z9zx~rflhmGVT~@+b+p@aY-W>a%y*oFSN4900~^}ksTf1LrsF7cI1jb-YSXn+Q_0$d z?YWmGt~y2GGa&9HS&OwlD|4(X$YhRok#eQY_BjjC_Dd{qpiOHos#>9p6t?>Dbvoq9 z&TJB$jgNTD_2-RoTS~;9DPT+Y~CIMG87JI z{h^ZK+3GmnvH0{n_sr=aV^1ai2l&K!usPjtFc6(fjXhHwPCBBd$+&Td&6=Jx zu(tu%da1Omas3Uw0?(1fC(+oG9|S@Is%BlfDYtC9p;)myG8G$he0X7jMrg|pHv(#RlsoY+292yi*yyC6vl zT%bpP?1#=QbX4?`Esbi<;`~%!62~zHnr=r#*2#>xr+GzUJ&wzXz}sL9qn#^ zFD^Z3QJ<&KXBYqV+(xHi7O~~^^5&F5-FhYU4-G+Wtr?(bw8q2d@y%HJ&DtC=)MX}1 z4t-Wy{u;R6nf2BSMmqf;ymV0|AhguMf~JdScqRBC-R^eZWI$%s%tADBaEt0NT0|01 z^Um@xn8YA8(^}xqA2Yqto>hsfC^ldN-F%DR?20s7W64P@d6EUBRM9=nD#8(G#I0#x zB0M^Zf+lHCyDxjzD9gnJ{|0)x=ascv1J1{k_~ol&`#?ZSR2tl-V!B2rs@e-Dk6rm+ zdjG`ib1|C6!|h6C-JPY8DZdz6I8Jrdi9>OBK8Rtd%gY%}-_(cnJ*@j-6cqdLw=n+U zw{_&)?+AEFo(Xe3B7>+XG1s0jtYQnxkOJ8PGO3Z`jlF;q39U;!L3~aa4oB5nl0iAn zj4&+(K#WZ~wRtG0hnEHNtf&d0qeQfNEmeK_I@z-vjH00rZB0{-RWgP^JN3KO7?f!F zyyUPpO2in%4-U2LFk%Fz`&O(Hn%ig@8Pzy3o}@-3D;`ilWwv{k#+yYY{gs>0JIruv zFmGuysu8xvQE=NE!r5eyJN;bKCmnrz@Htlg;~2fFj<>kymD$lX8cv$rCkPRhe>J$l zpUjQxu^bBXsqN;I=a^IyCb%xgSH?PN_VYK(Ri9m{!bxGQ1e*0wkDMT%CMQn12Y>(o z5phA90!gSrY?(|8|AHRHN7gAb;ehb#Z~J|QyNP((`#_PFx(rRQMFt5&#_cdQ7yqDPJ|qQ= zc2v%Nezn0pBO>@wz7?7B3}XX~JWZ)f`_4F@h%(icCjs8#2drLKK&uth8f1$YlZ$D5P=pScv=#QhUiR-fh-O7v?g zR)#O3!$mt3H?h-72vmao5lr;kOSF!NcR9kLZN?s+-%;Ax_B1w2tyQ9Q1q)_q@e(*x zTKYj@`1ubZpHJI$U5kb>ssgV1K9^}jZu^BgX*bXNA!Q#xs&ytVueuBa%(swuq4IgS zBhW3+FHwS5U^;U~XhsAWc3?C`l=F`)Z%XbjZ`&t0Qe82fhn}CI-mwWzaLWOOUKMMR z_u#r+vn3V@3%OySZVopXSsJ=IG`BEOW^||ev@3?mNA7$Qqy}_vGeC)3Ha$92m~IB? zUfQK6tP0RH;~L8QjxT(^zLrC5d-i=RUuq0 zI1%SxVfF2hIE8rm!Sb8V|8h~V5z01y7V(L6TWhZNci|KS*1YSTCT zF*3ZRGsu-WtiqROKT}zipLL`>+v|_b%Ho=_g6aqAo;DsX;qjoWFX+Zz@MdrzyK+1u zvczkl6>q$Fj`*#wHg(d7WZO{B{l2{eVrIP!^aGF-yCye1)g2JC&>H2|^%xQGX@AJ( z?Z)Sp=RkMH7||i&J)ra9r2s@TV*}Q!p-VnCe#uL zTY$$V8j9={D(PS%7js2=0e0SalsDy7wmC7gj!sVL9V3u>#?c%dnMJp^Xsad~ojx*r z#OxGi3h)4PUNj;$q6-k6=jWO3nY@UtRW%LB|HxEscUX*4JK8V*7j)^5p2L}{*n;*Y01zbQzmxl<@mRMZG`7b~22+gk5 z6xAE;C?zJy<)tLV5S978L3okAro29DQ7+evpPN_o^Q7mIDG7IZoTCSMD+j>{-pJ=m zteVy1 zloUXJfDZt@@uq9TJMLWVj(mQi++Nvsx|Avc{~p+HkIk0@w^Irm$~dKh;M_%Ym`xOX zK0XMkzx@p>17Ol6oej&=!6cO*D0Ya3efp41#%3hw|QmFKdk3uy;)<<2?>#-gWU4WGYF_CoIp|HH2C4V4g5^?$cW$)_`b#P9 z zR7aAcq1U1X6K!z8tv$RfVEiCjm}81a>|1i`!yr=1Ab-41AL3<@V?(-o0qG){vst}Q z0dUfM&jtcxr0wghE(()=@36zkWxSO+B=!;=)_Np|?2Msw_tGrtdo)qeXsj+&G@!c&SQJYkz~~e~!8GVlOF@0U{!KLXVw=vl zzXD#L07Bep$DJOWhk~e+_1`jI$eGsE<&RdE&qkcjK=(B=W?gZk_iiBYX|JQ-Il1TD zr}h2h*eqwPr1LxaEI#LXm&@?dRyNQoBEjIT1m_m>goi6a#LWW*gfu-Ew~-K8(LM)X zCWP}$%JHtVNLpGAv}TEa6$a5zNHUzZwULbaIHq$O`eJDBCUOV8$KGb4VVsYkWhsy) zMrq>TWA%ld&T;0TbU);mSD~aygug7zci|_KAi{S!O1cOarmV}HUTP$(JFi)1_w5Jn zgpO>g3hVJrdmWw74Erb+>$73H@6|gMo~qXhkaJ>m*5F0|{(Rilc6d+6O+Os< zv2@CaJ41uK@N(3&90jF^u^_lPEzB7JETep2!97;4C~eB$2L+w<5qO5Zn?8YG1+*o1 z*=yx=O?vL}CiY#kLs#OSCaprKGW%)qb`Ix( zr78)4gPZ6_sd6)C<}wQB#I;5X4F6%(A(TOyG-+|=eN#5Ovs#e#jrz_uoqz@v$q;Q~ zMUcDILsbgKy1YK%T7@F<oh%$ zLcno4qIEAdo^0ww)$jO0F@6E#Uzt_btPOp;RZOWm0|{YPz1on-Pi!pPj!9egX^Qx_ zA8I^I*eFV0KQ3)3G@nsC`ki|Cv}r*0BjD1u?UrQQjKOZo6WM+h3NmEviW9lOMLYq8 z=kiGkfXZ0t4*Y84bTl2jTz+$04Eds|$6T4b8_rtYoNUj_q`BzU;uE!^UKVQQ(Knny zrBpEZ496l`0put=(E*e1jOShJ_?1hw9_tu0hL>`>JBylGi1@t`$MsLF*3vJK5OS{e zE6rl&X-?jfCMjdQ3nX=yB8sxmBY-hbb19Q))7lIXU;*AOcrAw_8s>^;QvfT7vIO8c z9=nKSp1PPGUShE-Ed3!X3B@i-m-{s!bXkFBr8YdOXS$T}f!Mwb zRpILBY7+CEvQeHZo_s8+u7PFemEWoW^+a6UlZj1Xlefqfj?g7L+CB+(YD{T_ljQNX z)bS7^IJ^qv+Lsmv#Prs(3(3 z?fJfV_8^udR-}{~qpimrKVEC>W;qX9Uj zUrngX7oYJTz&M`A*`ZlZynH^Iw02)>OL?^IkVOE=xCGl2Kj~Emi0~@ZklxUC3gz%o z@x#;$JzD}eYAVl+1C-0su$qbsdx?^(09_VDZvj2W-Igl6WuPyCG3LRJDw9R&T2j`; zGG9dPwm=}@7~l*6|2TJ9A4*}193q)GZV`-WxW&BAY3O_Jg>R3!57wrc0#1ozaNyH{ z2XH>`DS>)$+C^tEXS-!P72Y6&F>T&00>_dvtEACIN{l$d9ZFyx?z=!JQ+?W%;(BOq z7XcuJxw1}7m^-1VF(IcLJo{GlFXI%5JrV{^&#Nhw6g>_obb zguU}qh1`ml(YuIS6p@JRAAtqaz+hAzZn4gR(WtBu*D_e`tvL(2X;J*J^`&T*z=kr8 zV=59Dyl2Av?nnO4o`nn0zwR>phNp^Kgbvt<;r2~Y=iF|8KK()RB|?NjY?9WD5Mr{6 zPs~GEsbzaaXzoH($9vPaDRL_Y-<}W`dY1^q#;0fjDHM|;&l)`leeqgE+#Q4xT(AAm z7>A1kEjwfI*eGbzXe1pA-e@fA$ION`L2 z)x)xWGSj$&dV5b)Z?B2I7k$!+3}Gt(wCUJ*K?kt2*=r7U^@MZwfU%@tG-m-^Tgbck z)1^w(;kuTKyVk0+!#dL=dz#0<`PNkB_-2o>690j z_pT&i(Ipb%&%p~)fe5VObSU=e=z}#`-=f*PkZbjx! zp1dpAOc!st!w ztny>hU!c-RfbFnYfoE040Vv@q#&1NDe9)DMo2JSNnR^_dSqa2kZt&)O;jnouar6C- zqrC1Y)^FB$OKxmoZQKiE51~5XKxFX9*FTZ%+w$fP@$7EhId?+h2qlzHvVZBn4`xJX zDSB(HP6Tp}4_$UUoT<3%C^d!-wFnOE1vA74R@lf2*Wi(K7fW#Q*!tKU@ttx200t64 zo*+dPFaPm$9JM)4V(|9QYJ({~40|~X`PkAv*sbYgdaj6g%f^_MaMA%IUgEq215tnx zdd{9RIk3qouZk?QM5P?w$e7}@>6mM2Uy`_V<0qO1dm1N~0RNXXkF)-_fFVG`JBAzC z?py5IoJz{Q9wxmSH+6^wIx&EUvWj)D zTqat(mJl;$#k*@UH36dsU{?|3*e_}$hWa7mp~#k&cimqnmezDS;59aN)K?Urov9LE zDlZBV> zmSGtUXU6m5K4lk}z!67@R$z*At^GUaz&RQ~Y4gTZVp}wo5KtvNE`3DeQed|h)BBX@ z(Iw6k%5C8aAAr>%hvB%v8YuXd9fs!ah^7H7R7q*!tP>oOXy^vTYD+7p9K2k1hjIvG zV@kw7aHDPK1LUruT*PYXtSEvfO?~I0B+N5gO(gf0#ncatx(_9q2_#}n{2iinRSgoB zN@{Zb_F=F_@!~VwURulQCfJ89|c zK}@QYPgKD5Fh>~z5Tn&Z$vKj8Ox}$_a8B!?>*|fF%7e2J_n+7uCwZn>{HZSWAs<4p zh3Lo=Yu66lqy0BlK60W`@*G-2@xs;0>-vMI?&C`(TXcC*M=D=@-w2(G&x2*H7oo~5 zW~EZFnkVNcOZo4&Iz1WSe%et7;SAYdl&~uw4^}40onHM7iu!aRxmaRBzu+ArC^it^q-J6xmW@t{eCE+7~*S`6B|ouuu^E%_XQ>MmGxI zDI%MT&?uuqdn!0;xN$oxewZH2^_`66#&{;94lAJ^;acS;`)L#Bi(~%fGA})EQ>$7x zzCqg}UHS1pla9_((Ba-Wujif{d7^|i3_pc*0R`hfyrRmF5 z>HZyU{zwm>CYwt*f;(O(EB?1FYYoLgK;b8Cepc_1(tuUKtSvv(Kj4dd67!HAGtoN;bfcQ4dAK^JH*)Axae4%h}3wk}szl+2b32cxzq%i~-WD>XV zL>8Q?iEWOa&iz=k$1RSDvCz>K87s)F93S4#O7t_M&HUV1Ijl@RcXeE{9BO#3|JFQV z@|tC6V~_i?V+4o|KuzMaeAyJd%<;vGGLL%H%oL+vkR zCaRgFPM4?vqbrvx&(NVOO3L^%frA}PI#36Q$3R`xU5s!#-$ECOwU*t!x0>=NdjDg{ zDAVW^VQqxz!-G;_=ou&-nN}A$LUvMQs(LpeY_^~8oAz!T>h7h7$hpm?((m%je|nb z=5 z>rR&7fdBl`53g;zod?QN-=(NS35_fl49^$K#Z|tjQiaSt23HXT_iPNJjtsMwn*^X& z6XXOC*_hrDa6LUmAbq`>ELB0WO?cLJhw}1x-{gtd&HfPU0OSQWHN4-6u&+Sf#MSlE zHqG@fq4c|9cUz@1NTM&cu_zC;rRacV=Z_6R2eBU!zJwIQ96YSh@V>#?X}lDa;!$6>2J_&qiIC>d?TDLbDJI`8z*3zTT& z>HMI3tP`^NVmRq83}!{9YYbaJk^q#w6Rksm^5ut2bth% zg_srOk}B<(O8!6SMl7njNGURUFW~iFC=}iQ>4Qz^Q9yb%~I08^0*yD{|%&JUag7UH1>ldIwBLB}(wJC^p$e0UdI zwC3{3i905%PfYJ<3o){12mk;AD*>KCYDxdED}~Ui`yL@b!=Gc@MfD=Bc{ON2PB7wL zLcu`8#ytnvH#jrVF!>z-ad*y?6^MH&C(OaQ$ypBlrvoQX)zY=%{3!;BBk#J!}EDHBdC{K9KEBE)OYezn#DU1ngN*y!;!9 z+5|&p*xgZ(GkyIX2kp@y5yp_tglgeW;uW|x z;?-c^UG%;aXe3l(vXR{8@^Oj*sF2x4+ph!m7RzB^-Spt8Zt8}u%q56>jOAqpRp46e zxpc&>qe&qI#N%~!9uGQ7zZD0yp%86mPw-#gif*@EDh&vBx%E9fn0o`4?-ANPLbCc~BS* zscyrLryTm@?82A$Uz&#fPT|$kq7d^q!xV4I6d-U3TeAzQGYsAuJw_>x5l=s@03JtyN~*MojgNlrQ3&DHi?eoan)5j>ve-2 zjPSON000k#Li6s=|suF*VPnD`JiYNcXhW0fG1wm#mts83xnTzI`QmR zeL5-X2=x=Y{dYtn^%F6pui5+syN>fo8#ZZ37;_>Bw*d$^l{UKjKh+0JrGljM_v#cHH4_78c9x~hvRB~@3ozQTY@tq4+0B|l z3ke=gIa57B)=zsR?A8FLItl`i_nQ!EwCVzc$JjZog*QrBueh8=WYAuh`Fz(!TEdL7 zsW~0jH+GDnkh|FTo_JMYDA;-w6tBU^mQMP&)MXIQJb2)KE1n~v%0)6cvBDtfyatXE zwxAP66*B83tD@yMnIthIC5)?(95914TKm?xjKEEVg0(;WOQB|QFX#6q;$TMaKm}g~ z!BF6dh3uF*u&S+=O_Us1dbuZSyQNM`oq*fz#iQc3-MnCAN$BPczT74eo~YaCkT1ls z8I1Ra3!C3tIBc6IAx+$Rlrd{1+9nZ|;6C&2=ubi*78=$2YSBnm?#6REk>!8;+Zf<$ zcuxMgse`ULpz?fdKf?^ItxkDKj4&5dBJxTJCBLm1CQREi$-PEi3vuV9>}%kH%k&-c z^L+JP1pRc!2+@GYueQYmk|;IfiuyXb$cJejTeqYJVOW{^ubGP03L}-I1?-}tQpM*j zgT(c>OhW0h*bHo4e0Sp4Gr4^5@XWz_EVD7`xG+=puH&X9b%GL%O6}|Q_qnxfCDNgG zaGBoyHb2I8LaF!?7hh7)7(`7-j*-R0v{PQDCbuCS)v3~KbPEHa|KD#3agw5gF%PRY zoDbzt?7a4T$fwO){uc+%%{#NlC^*ZcHoe;FAQ@os7Y2#eH){A*6d{{#7=#S_xH9 z8rSU<%1UL`@W+C)9}Aww0V0ssD4lSek{}OYUY;jp2)%$rTvsN>aP%;7gmTAi;Q+_^ zHdGhec6&sTd)A14(>5^fO|zx9k+Y9Dhni;V3TXWB-PN&zmCt!oWs5fSryHD8lkr@! zd|Eid?4|8t_6tN)dXMzNw(GN{pu?QL_C=+4OdS-N;sW_nM7a};R;=enOjP^#bmcRt zpA4#J*;S5*7~OLG1|6Q^B;1)iYEsmpXJG zR9;!p125D<0u@1qypJvh7zp^Y9w|{@*PFb>m$91@&xDlbe(g&^>h*Iy*6o#E&UNl% zVBL94l9UO%%9hT7`KTzsB#-+}3;Li$yUoe`$i#pd!nQ6$E7PA@^8ssrYKFA$b6RGH z7uih6<+F>MIPN3vpj$FtNxFKd0)pkt-O{P#-O(P(#t_dLcUw1zB-@u-R^g7)ViEyO z=|#a+M6}MlFvpR*j{EJHFOy$GPn;S6V~Ie;p98Y^7Hs81^H`ksL|)l=Y-*=<_U9rVR3_V5$qZE;>^@(r#3RKT38uzTbUZn#|o=f2Gla(VvJDuzUS zugnj7g>*F`sR{q{QOI=hcXKTiGhJO|={}GkVtclc$u7T-Uwf4t%BB$GXRsqaA5bWE z-KK`NIzcRAPT;3RN%kV#pdX!R7RaXHzwiYRpS6DZp3?ef<4Gy*FfD{N!e@u`!M#is zDy~$h>3gq=F?ZpS*wtTH6`HL}$2R9LytS$8Qd}nU?_MGm-6_O2U6f#q@YU1;3NzCv zn_LSpt!p1^yM` z2OyV`f`k$L+g_{+!UOS<4S<2W@<#!I^cjTU)1w`fUz4!T7bb+zACD0^To>!F%F1wt zU+E|9zZ}ktX=g}XB#Y$1pfOlMCDmfmPZ#<^-|k?$jcL{{J=9b=jN2@xBOc#2Q2hG+M+} z!_qV1q`3g_GBsad5sJKLOkVAZs8pjKupBe|XM#G>fR^%h>3eI{aB!-wh+eAmMbrfl zN!b}+ih*RK+i6FSTP@8+11q=xIkuCvbjMfv>xmB`%q(;%e>T1lBIda!M# zKr-yu?FpLqC^cyfAXDAbm0~;VhERm}ASXIY3>XvFkHMeV$4X<1-aheeb5Lv;NkhwI z_7uHTj3S+`O!q}@Sj@!W;idEnO&Ffff3*PRFjcUa$jWqCn`A1``22rzdOSYU-ZPRw5^0--w_-_wS`jZjg&_w(67-<% zYt?BHetu==LIkwLYfZ)3VtWBvC07^@%kL@d)K_z*xT zr9}-m7+y!R)E%RCaIo*hq#&zh8#QGpGl1s^3%S&WfXt^U<#X)KHrW0*|CuJijoM7W z7GU7#au*bO0je>k%mijyH*DPLu-m5z{DbHj!TV{mi#JdQQVNRN7L54N1ByslTdq&* zVfGT^NZq&+G}^u3o^Cb;Z)mb%5D|my803$<5nAKmLr5Y&^q|N53Fr@={#8pev>+j^ zHEUPGQ=z#Jl-;HQeA%J^Y553yPG0IAu8vF*c5Fr3GBMl8 zhjO9H-~n88xgwyl6Z<(N|E=ynxz~cInqI>s^MS&gN{As14iK?Lg@5lZC-svCczBgt zZPLH){PEA-!zl@p9zfgmGo+n{QE)VoN=F_4?_MrcenrBgE&vKQctOvPXoJwKMf1^L zr#K@Ae=stgC*fTzE>cM24Ey@;lAa`4uQHiwp?i# zbHxpX{Se#VfN?{BFta^Lz=dy1QVbCD0N$HbrcnhX@z1c{vPhH)FcpF5B;p#t6ZD4P!7gJZ|vZpiQOabk6RS*&xmHC*_ zpLjpinI}IP7x+0s;~o|Ty`B^=F^SFyjOJ6htD&7Ef7Bv*f}p<*Z;h$(qpWL9lphmJ z=_Rpb>{1WLR_lE-ki%?6Zk6TkHwaOmR`rw&v2vd9T0CXEgS(gJJM;F7vkGs%h*zcM z#k8H8!kZNRcHvX{)&GxJf2Y7mWj`z)497uy+?%;q*)iY^oYk_L33QdwNIpM?6NM=x zs7LMLaUheK>AqeQ6(oIx1)!yzIhV+S_oTqY?% zw`&G#LE$lM9Sl-o5{#LTLXra#_3Z5Urb)>gf6Y2d&^d0&_5M`gecy-FH|Dp$9Lt0W z+#IqbqA-mM`PQ755Lp9#5+AL3+8bMOH?W^Zf@{GplfsYz+A9e1Hk&We?2qn_h=w1L zg3(LQ*7r1;kwM4ZL5phfW)PL((?8WI@!^z*fzc>`8 z0F)xsTZbx{>~OmNfx(xya?D=6YA{H_CPDO9-gV754|Y#*#VYdAXZYyg6iTDQ#BarIiw!?_b+mPw}lc<+l*!d_dFTPQ__;s|2^xAnx} zLtI79j9M20+~O!RVEuWQp3@##XzvKC1eE3<*ggYoZ9+}KBvVtpQSy)26m^G}2M`BA zeY@Xy=TE)3vXL^9>5tLlFQr~X6scN4qP8ftdtR+kG4Otm43Ohf3!zwcHQz`5@{7zp z6LhBB7o0xt^k8{F7Op+1&OTEmZztVd_pogJs zfzrTJnoge~PS$r{hw~KVJ5u4Rt4vVb5kq_l!}Yfq+J0TaECFB#yqE~LUXmp~utdM@ zS)M5!e#Q#)!Cq{oe^MUOEK4b!g>vdjcxPkXMO3Qy&4*MLJ%U~U00jv_o@7N8FaP;d zI|bm<bn=eOn|bdV_*f(_J@kXsEW$o(RP1GtuP5U}L~m4L z#o4O0aR}LU*uJRFdtJ?SO~$?xcTn%5FRavk7UiKUYe%;raGMU)P=H`Okk4RL9srRW z-rtH)pE*S2iYcVR2$e!nG(+G{?Tu+*dkh^%H$DsAYUEGoW8=9!?@yL`2G8^0TUt%Q zyu=HdFpmoJLjnCMe%V`53^XR@y^%jlVd<X!8&dz6txM6)(O@kuw9V{ovEl^v*2m!7-%}jA3)>H?1Xo%EgdK zVis!v;HldF|L^5hb@K)KzyX?>7G{XP2jH&*Y7N6J+59Cl^93)CcZkn@IMx)2_SZUd zgSb8yZf@=bzh-{}VwytlRjdJ>uLYZ&!5@0v7MkvWOYwk}-=n=G*+))FP^N^n{N1kb zbX+88>6gL3_E)#lsUVdywH>rrgsK;7ZK?{*O7kGe)q)Z+-d zwYpnZLhWMU5CO&LeoT`aZR%~lNc%~oY2IP>A?#5wU}4~VbN*hT!}&dU8Y<*eq!y?D zKVPUd6J$p=)Ol=j=1D}0#E+Orrth2TbgPQUdPytBYN7ms1EvE)Qi-%v|&>QJty=j#f!lvU3-2eXtnER0{xtm)G{`Rk41qkjX1@VazuYqMs zxrHuHW^re0+i>U9td|4G(07Ctz@Ou-rd@)Dj*XO9<&AVX^q6<3V(2ezob>yjBz=vM zvaj*;xwN*oQ0Zl_*OfvTZjDeV&Qvpj%C39pJsUV6-s940t%cH)xw>yiJuH?2%E3N^ z+z_!^_-hD=%Dv811pG*IJJW4Q^dYVYb$Vk^5*2LwgJ(9Y7nZ}TVt)O2_V{Mq#%I(} zE>VFkV?gKvsz;?<{aODqMB9mnRBIb+`4OxuL#R8vGVtbTc-Uhv+!jnKwX5Ch5?Hns zK4E69pl}qFloLO(!J#{+kXy>ap|21i`vqekq;Ufu9#C+1QS+8?i9rrYEum+|&)5WB&oZKv%I@OO z4gNya2r%U>T`a}B=anhg_<3vz1?tuIjUg1x#`-okpnT`_hl5tsKKK-;1Gtuh33Y0@ z{8#t)Y=s!n zBLxs-3=zsH$-u1%#LqOC>;;d-b1WvzBB2P0)Xryl0007i0iJ_&N&l(!Uo4wpp6^85 z_@ zI)rwr<7eld^4#++n*!*Y)_=LZakPYH$H!hXzQmzF~~?*WfYuaduPhre!qj4*op z?I0e1z+!CCkNJZ^uqu7)PmpKd}wN0!*-m29D;5Pj+j>kx1@So=^ z5H-LdmZ3rWV!`)?tTG&N`S5JC5p05>m8qxi&26p<4TVjP?NHfI($A+BsMx=QUDlE= z=4SKnn}J3kc^$9QxUt=mkIF9U`yhc8%~cqo&|u zc-IQojci7pqBU5MrNt-wJrIvJs6($~eKmv5!T{|Du#un|e!BmnKTw=RJ^Vps4`dc2 z*G1(hi(E=X5r}`=d^Oqb%vt~n&2NSDJ&QWsDKo%p!PU`#N!1$9)j(n&;}1@`XQxp@ zyISDYN_36ZM6(&{^ilLlnS@6^PwNj6V9k^vrdwdWcWCaIt=h8xWSfa1Hv47fyGlsl z*03An+(>zodM8=Yd5{(D{Y2e>(Fov#GB!m5g2GYK_t9O=Y>zN8u_k0i=8RjCjk4_k z00RX9o`q^gfA0VzT!m0GU^K;Jm?Ru+a0XP;8y7@1mi2D;wE27n;Wd=%8kB@czZs7J z4Rgf4GSWB{@q4OG)Cz|ugXU6=8<4d?TlUQDMd?Y1e$pI)8#8ak7p?g)V;7{KR<*f6 z=_qqr9%p*jE6$*p@4sKW(dfac(96xfPET3^m(;>Gp6`a6P4LSzO04C;v^&(gm8qjV z4L(x)i|3VQqAHI2wcNC%JewRIK5niWq*rHAY60FlRNu z>+Ro?Wn4JsiBPb^>Z_PSr%S4{*i&UWk?mc?8c$Iiy!4~39&NK312aLPwFeKVUEjOU zjo|qBI$i<9Q2Q^gx`DCf3t^|TYGGhtoa@sS&E#mQL)&p*sXS`vn=w2W{_u(da_qu>q)PWR5`>H!`$9HNnja4LV+#E z5d}hbTE)6-OBsE_&CpJ;dONVUKN*npb?GKFyt33^U*ArALp+nhJ}kTv4O|f}J+Nib zmXKB3@ELSD@OxG=lkQ;OK)J(!q?!zC6NozY|DJf)NQgGH4GQF1EJ zK};}PVG{fO=G-z;hcKb>*!S5Xh6K%!#m`YspvfQN1gdFj3iYjm6znQg%KrZvPHzOQ zh;oSy^FoauH?mWy*br{?$Mi4wnqU(aVPi5G53ShLX?f0PN?4! zM8ULV{<%fl1xg!$XI%Mjj^0Llr7|YUED#Uz@pW`H{PK})ha}{ziJjWrvw|InoKu_W zz}bE^+zV_s|Mf27#2nyq>2NUp)9{P1Y@1&DftuYD8e_a(w-bvQ9|+~-$IGJ;000dY zL7Iq3s6lL*OcYFK!20-W`@sjc>QsC3EMxO`mM?n_m{akIjH;uc;BBRDpZTnZqGuw; zl7IqR{bdOp*bEmcfhSt9COQfO$H!!23%_`WqxNeG3G_=vCW}vWI9YjPFWAWIYT!{YxY9~YR`D}j#w za6VONnwgRf?n+MY(DVTO&ZGm%6fcYu*jhAiF&Jxe}vYIOZbL zO4B$*VfZC`G<+fNt;N)yraIu7=P@d0-xX@wGx8e%c+*(f%1z%^D;1~E_@1-VZ-HrX z6$Jwo-p#M~8;Dr3@rl&}?Sn{TXg9c6>*s*;22#ocOO9CCVNBHHiYPT#7`HNroPl)> z$#qKCgQ#q0iN~csAgeioe57rnd=*w zW$$$1_(Ey@M#HwZ#8QiH1Yd7YE80myyFSvTbb#EJSo_XAb zrOh-4M73JCrIi)%X})cwRdXc3fp;CRW1&O*)1_@ovjd4O!68-rH=l?4E5r*Q35)tR z;-WD8=5kzbi%XUiL`j9)X!Iw^wQV~s?*Q3n3m@Yd?|0sHlK* z$U=?ko@Hb#`#@^3ZZht9Nfr{brjzF&VWYzF;U-J0pjdWw!Up{5GvX&y(zU(Jl) z3vSU2oITK}>q^$=ncNf^8O=ZO(Ud0y=S2d_zX)TyB0+8cw@2v0L!4aL=K&aB5rbj& zTtI*=0#8(PO0j(7R6|zM%@EUKywg9-H2peU zLo7w=`9FwWC0czZm_CBb1< zVEB60*UR3TB`-wJb}F((Jc|?e(9HJu18699&=-LUJK?fIS-Ne}+J6FJfqy&EqA>tT zXH{F)TA-%(@R>8ai!1hV?FF$$X<4DloQwfnopQ7XC< zJ@`{=XMOu4daUoTzo-sm&dcw7Mx+1EWXh46JWAFtBCgRCrVq)W&HX)%lKGLEr^EaW zB}I^PT)A=%EN@mrrUlqih7p`KeA;tNQg0UO%l$uD0$kP5mlS(hpX$5R4QL7FZeO}i zsRV7o@H`9zu70^9_4zC8D3zJgQgH;yY5Yjp3LnfOkg}4P3gPM=I_G1Yei(vpb9)PX zZau=F?%5V>?2iD!YP%C1xQIauxT>~3XVtp#{v(XBwsO+_gEZMqT$8QR2&C*9L(6vH z-Y!xviSL}|2ai`D--$>cE=SY@kaRG}xabx)HL=^Zba=Sh8K+px%JgB8e$fB}?V|5^ zhE54|{W%8vd|A_}S%DO+{fX{+$Y&3I4w@ihc|VD?3N|%01l6gdTf#uvZls+!AAp|} z4L@LsokldX-_6N``j&LNf5#Tg@9kn5rRZ58FLo#)=5jC6efgVW)-U9kJY-!B+*OxP z4fsAQNjxDS{75}%Etf29gL6Z3%9ku|XE4nbV-&bk_8ZL$4Lt_bXUdx!-!JP4W!@R; z_`0_r5%Ww!Av=`KgWZ1Sw<-_El5E74*}1S}g!S@W2&e@D^F-Hdnjh+tEpCb;tpF zZRbS2lM{J6`<1BX7?lh=8hBze+7_Pp97Cw6aMk{>7%iJCy3Z*2n#AtN5DPJ=Z19_1 zLGLrGHfV9)q@5GX&%{+kq9iF^R8m(-b&>F2)$yeXX( zUx4fhhHm;Lc+{z?nTqzt{I+H_2pyg3%z)b-LO~kmYgBVP)y5i{>0KfQ0y%^gQ3G~} zv`Nx;U#0xx}nV>n2UXc{~M;^w#GLwP~1heK- zHzYvL`!Ei;C1aGsRabO3urKq!lqk;35Ob@2yDDUw#R5>aQGwVSPh3=?vGpg=e6+Cp zHANyag5@YS-0~Bi5gxXC(NrRwYT|;`UCXi;M7h)$zVx}Q9$}Hz8I24gD$p@GCll)4 z?8i?~b}c$d>(6Q*RP#RLt}R)bsn&2jev0gz?I)dDM_>L_OHl!lGh(Mfi&a!V7< zqpRpHqygpxrLS(*b%QxDg#{Z(>-*!VbC4y1o`9KRU->icz=O&XM1lmIU$B*)hwgy6 z7{jaAddRP- zOJ?YBxY1X`L%P?G1e&(_7%6Jl?)2-q&Y?>7_=25K)+9?$Oa(%nm^B-aX@YDl9TtwK zt=oMf($04qn{Pn~2mOCi>6eut`B%e286BrB3)Lfbduh@7-6p9~uj8%tyRRJ+5fD_U z_;w;#Deijc;-kU`rio`_JJ=AFow-nua{{j~@E|qT+#HhNKyh;1CT5p(dSF=K9l(!p z6x+sD*CD9Sx#eRoz1oEv3M^0}F?kZ@ z{vhYjX}BO^SjX5h8cXp>)V=ATic6!;fJvTDlFXAMNAHM z>gyG#7}`9VZ=zU#f)}elSAKJIVB0=24DF{3VgsIW{*i1QH>^93t>h$jDiKNCnos>H zd}K2ZnEk$r#z@EL-p8F;ih{Y%!2kdQMFF0tYC<3Egk;`M(3iOwJpNCIZUPXv@Ajc4 zv0w%;q1t~Jwb5FA&Y|e5=xF$aaNpGzVGu<P0?CLkydN6!obB&n&OpcU&%D@gG#gOcJo|(9CDherj7%8@ zI8(vTVn!+|m%-PhU$271_cA}o25Xy6Pv7=NW*Y`XI#FFW;L~jb+OI~?M4sQp9b^Mk z5sqAkM6!;O#M%PaGNEC>@e0BG!A~|idwITI+jpFYmr$-9eN@w?C5mue@hkatvOi(7 zYy&w)N5sOZPkbL(;2XaRbCn?me=l%_>|Oyd1Vp~wcB@tSi!`iq@gYj{Vb=#qPT{eN zXy%+g1&6>C2?_Ql%r^8^Hj=7;T8?b$ADH~0EtC%S)$ytsjn<+EgZB)zH*8l7(2vnt z>D8lo5Lnf(m%vzK{~JGWx$9A-tPxCJ zQX<>%h))v6RE{Zxzyd-cT*NgV9xV50DIF!fR)pA32ru^V z7&lbGK~$_83mOji{Mw|pBVJ({-UQ8|Jj~P`R57I%JtV!VtQe`k955SW3^IC6r zOXBgXq95-Ja~Twj*O2UFtkoulxGt1bdkK?XXBQf!-d#gW3|T+|;|(G`ewfCv-6$7> zt4sxV&pmzUSY=|?!9;aMa=yL*ViF1KQ`}?QA76G6aOFMv5i<Ypz=l|wk%tu!Us;Dq#exAIOBRd2mCz2-z7 zY|BW$5IJWF=0ftj`-gPe030w$D@|+2*v_1AqG<;gKTvUu9E>{5rBl~Iw~;RA*_0Zl z@3>~forEOfIS_+FLFv}RGY}k;d^X*GJ>ewI*|zvjJ^7SQXEfwC_|eRu9Jo;z9Gu$ri! zDBb)p77_(JNb9sIJvk)r%Fn5|Ai0ta(3~oLeD_M`3kNkQy5|jg=t?G&ki@(j7S`lt!;(>D)m$@_i*z6$tw5U8 zBE4n8&_z5cj7a<8UM;xB(fJ-wjB&bJf^AK9pJrbpbiQ6W#hA#U?kq;K?DF=TwYwad zU%AUP1RCr?wx=%8^}BYChZy*_1n7+ap{ft__MP_)H$dfNdo`3JC!cyj)~FG z>H4x1z3Ga87384w@dRu0oMH(q%{(d(N>s+gdOR0@BuCn|pcTc{CsN@r&u_Ll+4SI(frxdOYZZgYp@jk!s zBS{2JA?91&%V~pH!5&~W(nXVAOuJIv{|hKHHg=>@Q4hpl;IpVRlb^d_s?}^Rs3BD1iWR@>vFOdput<^7`DWG`0m%5ADexrMjI(LlJ~v8lKuG;3 z_<)1>yRhpI51=h|u1*`>QxAit8(r0D)Q$zbXT^Q65e0ffk`=Sqkm`qM0s$c)1ofuT z0kr3tL)Q*xge0}%Y`+&9!!*$vMk91MXi>9=nLqB#djrT>#^O{j(1z-Bv-JiB_2L#BKRW$lF zKlMc^FOrTqVwLLg}ShWVya{8~lg1~M1*5?JI_AXd5 zI`%gG0`{%Ua(hblpI5G*U||NtwptyP;v^aD1^tayIEt5qcIXr~j8SJ;tD9^1Ni0u4MC}H-?;fU4T+O zVT(q4#nb=>!2Ragu}}olt}n)t5bD{&b;ol#PN%*Hv1qe^fV}h^?)&M@L9E$j zxG*pvNoBq3__cB^<#7)q<;`V@hgnY~;@rv5f0%7wA7Ns3-EXod({^QXf+U)3z6m}) z4mG+T`C3TB;~iJ>V|j4WijQLfzV>Qdgx0+Dp88XSiV}*<`bq~4AW)OG1u}jP1v8gB z!kS_*cpL(s9G9rP4%AvDstxU4i5V*`W4~OJ{;cSXn2$L^KBHdf?(H|H;0mlm*BYUS za#}6P)fHGf0_53xU%QRR`F0g%MkG)4uoxUKMz&jQYD5i?wzf9fmP{=mldP^c>s*g* zVfgkoKTj;~%VPk$`tP(nfWvrastLIk`MJIj^KqdA-fZDxGEzARwcm9pYV5(SZ1r28 zjrDlPCWR4uuLLC;4d#HSR@ai*!zguT*~-z%6gY8!wg){9!EWcZ zOOHfS>;EmEw~P!}VM7NCny~-P!yps}ZJ}HH_Pk!Z#M
    + +
    + +## Citation diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index 8eeadf52b1..5fa66a4c18 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -4,7 +4,19 @@

    E;k}vrhw~JhpsnQJ4RE? zcr44{UNj@a3hxl)=fmVyu@d9~_L#zdHK{y2eR(>m%h;Au7Muh3A?yCM@0;i$W-;zh zmlhI{dxRqLcyb?tL{7_Dy=FOnwz1Lu?NXtV^4nn{tUzS8%_*gtfneda%10g*6IH#4 zf_aoHQcQ37-SC?S|%OaWpdDt=05_<;NjiFSOG%91NSp+ z89{W4%ezQlYD@pMYl&_LL64;>6PvR4QQ(`u!3@}6O1A(W?)3{7ZMo}!MH#KdzHtwi z4t_^fzA%sLDoQlX z+z%4Onw4%<_5HpSN>Z#ua@^l9hTzpp1ka)!8q9Q2{DsYoAfYM3dX`t2{}CEcq52mu z_)(y}ggUoPU~=|~6>?p@ zZmfM}D;&q5Z@P9tR_~7b+fZq1Zp!*K@s1ylq;`f&3GtdpDtuj-=ML>#IVT-WaCQ^A z02+sFl&`w~@3?^yOOW!;}t# zEDTlzkc4|t6o!h{XI1jgSl5=RHDaf0RKn7d?A0e-Z6x?gGP{IpK*OpGT(0LMjSspW zvmi%Sf3%Eg64W(gRQA{o4&c|43lr(=G1Sis(x8@Dk+eqJG9QkrX{9ZVaEnnCR)V83 z{>GsK2edVLw55d0Ea|k9`cdNiG0U&rd(|kpX6@%+wHA^ZGOI`2f&O5@zwN&aB{olr z2R*{_OBp?@4hI1H#bm%zmJC^8^gQ@r7MqmN8 zXT8F!!gRlK;^0q#lbb*S0(LC|#=N0?DMJ#89PSWJ-Nl-9H2xymgPFLk@g-WIw$!Yf z;w9NUNdGb)*M5@(BxE!f^{>-WEQ|o;&-(T2xc}#MzY%M&wkoCFuiO^RZEjQ-SK=Vi z!@t$2^gsYEdSvz8&OjvDl>typA^^;8=u=q?)3PO(#Y1m=E<8A<)1bgOBpzEjx9893@$zr^4Z85*Kt{#Ke=&n&7tPlQgI7Wh52?p39qe?jHX?A_S*a6 z=7%}nsS?rr$DEFz{R0;dL5u}6uO@Jv$ma}btLov?ybRmK&mLp&m+^4lTJ5ZHFyUqa zI^~O)8H~{yhBY3gmjXZ_fo-VS1Hc;8(Uo^(b5_w5WZsw^+O5C+6-v=;0-Bk3Bz!mj z+ttA`(x5O`;VYolo9>p0y(ej^-+MbK0Cqr$zf<8a#C+=ATDsab+PSXcb|OK;r&8P* z%h-|3CVfRCrY1>|*X>{)JLGhkMnYUS)p><7f&YNbfB*mnoyH(&!(9qj zhvl>{XYoLYXVN)-?>|*Q7}n5LpEu19IZeeeM$N2DaS@%~YPf~h2hb?nV9VxXe=&ip zT>C`g7I+>&WyZ@O=wZayGSwhT9->>9S8|OzU`kJq5~w{NRy%JAC5Lu=sHD{Kw;XCs zwbO$%lev4sFbwB*e0Ee{m2oCO)cKN|t(u|6;Q?hqx`5xE%ysM@Ny4F8T}_8-@5%Jb z^Krsy#}Vmzw~l}176vWV@#{`Q!!wAUrcJd=(y{P+EebLx=3vaPoKAGQye?=o`o7Eo|!c>~P#Ygbp00-Fx!t}JPP9PC&sF(7kBCy=ekyaMl+4Qc$l?Qyou@2HM< z9^8t!R!|{boXi;!vGG&s3bk>#oWC=S{m*+wMA}Mn@zM+8cdz2iozi3 z!5`|xh%8(c6xI?{LX0T-Z4h(bzoexrv^`EZc-6Y_fq;fAwPIVwXOS{=tXaRafETNBX6e>GKG%|U}0=eP4tQ^sb zqUS2lgz8+z{7CfbiYr8uB!&)wK3pD5KbM}6qkQH9qfiNrK#&!4?lOjzK+Y)p6K2kvH(pIREJTS7pO zBS=R?D`n;ry#smmxKh`E9i~vg4j^(b;+#PP1ojR(liB#$bFDb>6jk*ktuctv!0fP< ziRrgYA2qdnzlqa$!BSwa%T1ANdl#fv2SA^EPhwh1S`9W2&}G@nDHRLT)X}eNsnq~j zl0xEtfo$kRmynp#lv1SHJj5v%!v4m702S*e$UpY&Z<=+*$Iufm|8NC;NZj?m_!;j} zFD=(yJOWc*Y;6|5IHS>bUITCKOI|cIao3 z_z|CVxc0;N8X1AwYZQGx>xR`EF8P4>_z?s^D$TRTi{%>k5H; zA6Ei-r!_VRk!IzXFTDT(K($CSDV18vkD;wu{9BE(cGjm@%ZrL!VMcAvccR*2Nzzni zlS{YHQI%q43Mo+38;4`1|drQ$!|frU5T0teJ0y&2F3oKis!#gOlO_Q!=eN)=);f?>N^ zSP8uhVpPPRP`)^XE{B!ytQm6Plq_wPuGCoLO$a9sqt`_%wXPK_nX}Gdx%?|sl>v#+ zBv2{LB4A_QHr)eKJtK7h00Gkhp6PT#U)*}S)GqD}O=9Q6qmv3|!_;nT#ZkVrV4ij{VH0sy<+8g$xcWT~ z-6|M<8>1JOqYRpWg}%rqfZH3*wFmTXv^#?5nrF=H#5SO&-K}11uTdDBaCt65zABQyex|MOoq*vKA!bo)AmO0h~>KqisPqKWuFRsyP01-?k)bLC_7%os8@CE_Oy_jU;6`Vo&E6@ z@Pz)pbzn!XA$NRaduXo-B=HzXoauhp(DI$LMk;f zXOs~)O*w888w0mm(L$!s7R~WWe0zUsnw?vvC*_Z%wl3=E@L~F5Gmbe@-sCy-hwCiR zKT4%KVXg2Z3cR9dw(q+hzrv(wT4TCowi3qqLGL{|8mrg;ojTHHy2G>eT=kBBw_JnU zh0pXfsKEx^(1Z};ZiB&YtCD_<(+L7->ySaJKs9$T*_x=uN$@^rB8D%%y&mdmle?eggnX+b;oCo*{MJ z-*t*(IQOLbvM(Gri~cex*Zwd|(wfV&Iy(iv?XE&m7}=dA0G%p_%|Y)!$Z$YVfkNIH zX5&F{?3daekI;y%7|VdLYuUuH8LL&!JY9!tjF??3Oymv&M#FO+{o81RzA?lu;!3Y% z=c~e|JL<2n3nSzUTo3}DZWD|d8$0p3`q-@F&*cdgOJ0eAF?G2Eln4=R$yV2BPXQwnHv(O9a!eoiOlNwVE<7L?n98VEyX>qi`#X{C{ql6;{a}dMT zw@YQdC4CZUNSsp)(fTvja9~hMh2t^aFxbQxO z)CU&AD%f#*Vi^V@n4wYy&Wer6>71C7RMQHEK5)-OahG0Rl)st!5wyA8k2a59+WhC> zsMCQ|z!hlh&IAt9Q+n#DF=oxAgWRw5^4>LiU@JXmaXqik1@4OV*iBk+^ z1OsTT8UF5Yak>Fw*+L=YlDuHU)!%=w-nS&Rs%c;04rI;ha$y9XmEukTS%K9IlCm5u zpy9~tMyNs{#J(l(CzMsEOWMZt+uxD{(~*b$qy8-=Wji|Zz-7S^`7CWV?q`9Qe}l&SKs86JWx_|8v*bgKUmv! zn?pGQOk7k389a1*3c7y5#mp)Qc(cK?FkG8MyApzS)fFe7Yf`v87s89!$D=B3Cfz2v z*1u+9r6e{vM94*@R6f#jLS?*f`>*6sukbvxqgY`1 z4!}%G6YZYShLh9@7F$lBSpV^nn^;>)wksaCql93yKS<8?$FfthLw}?c#||Si5u`Jc zQg56ma@e1j&v6Fw=xC^?n+80uM^Dfn@}G}Fys?kN(bz}%4PS>%awJwK`Qw*M&3vG6 zfnq${R~Lg?k#syxlx<$=@5$@GmAEPYW|x@P;Go}BGRe}6--Ym{Gx}lrBM636>HkPV z(Y`4utfJ_3#Hg9_RM)`O1Q?|o5K4kkoejj~2P_a-2R2|*a-ax8^iYt25}*fL&8g>7 z)1i(nu7%ntc-^F4KEoZ@Uu_#xhD4a|B0wVshRqw)=WANkqb`ak$?rrV?w}bEu$x7* zFe7S@P&CPa186*{>?&BbPVLn&00OuYej6J%O~^!Jo0%H&Ps{x<2I*L>xsY-g6e+gO zR$iykMz$%Q;fk@AcR3D0=HkO7p}<`Vo9g3o0eM2OKZ8N~?y(XuDRMj6>;R|-mYa8j zk+uuzc2j{l(pmth@~-eJU*aBe3-V2oOan^xofj3BhQ{fZkSiC2m2{XQv))lD(l0CV z8M(}4o!}49FA*~Pp|8G04I5Q{*L;vFB6IX2q|Fqqy;lEyv=Qo15|td0G{3W^?D5ia z3QQ-y$lh>zMkUmnFYIlHl~(CNg?f+X738}wfl;z5o_`(L0S8Mw1yXgKtf!VL7wP7D z{=FRMuu@ek>O^#C=OMH_4_ID;kqFA)Y!Pxa=Ey&(D}&PWMnCd)}E>D;{0`9o)Y zChE|^A8qy8Ee#-6@s`+R31P2jWqG64;21gd+?Tc#1kJ?eqT#Qd)O1Rrm#8VU93B6a zcmgdRf*7g;-ahG3ooc#?7oSD-+SPtx@jE#XKKD#Hlakr{tcLeLL#{ZHfPLqEexrhrUkVYBJTG&pqiiG^dptebwOOs z8V~zw`6DMnIi@T92tq^XKE1@k1qh?eKrW76OJnAeb){l_7u-toj zeoK)%a3PpipNsUS_6eo-b^%*8GqC!aAx%k=Mj11~O0!C{jERvv`fEsxQE7%r82J3Jo49l!M4Izr|#uPKp?hsZqtt20$2w5|1mX7{;Uj z`g|vdOdGzyJkH((_wsPsOaZCC}# z0eNmY-(=zG!08gy!MLoyn7-XNI1c1FzA%^g(9=Lts&g|0;Md8JVvoG2%@JcjTqE~0 zzQzWvj)+Hq?1W-P*z!QM7?1r_dJyQfamqZ$znmg@hDv4`N%cMMWOW|<@e)efdE1h? zslYDFZryd_64!E^#KB}~$SD(Lt76gE?p}FM8VdWH@uIb++A;tL4gw#V%R~iK8>76^ z)dOX$YcOljb7wSGBG<~^OoVgE91U1U_i7lI*GqKQDSs-=Fc$kx-bL)XyWzjp6b6#Pqfs1cdWCL347xc;!|`j&NPZzpJ$hX zK3v30=tNB2L!&mG8s%Y+|Da80z+Jg5tdF{>%FbNpS3Gzz=)KQF4R2L_gtr|@5=ur2 z;w;N~RC{fk^eEe^!S>tAZa=#A% zVCqb$up-v9;Vr0}~k#E-pEVeqfw<<1Js0iq7hs5yeW{h4pZ{l_IvfJ^Jj;tWmNIdB3~Yi*5o_kKErdsN zt9AWo*UkQRl1r+@MYxLSk}!tUYh>9>81)UosGS6B>;5S*P$&}D@I|jZU4TkE3?%9} zl;R|ZjG-^)EVp4^6+Sr3d*A``yDRbR^^$OXzDg>qkxOyT?sklPWx6z6eB0*v6CQSO z4(rXXLxtFnu!jAvXF$XQR)@=&!0HolQyDw6aLmJ&RNX1LlKNmrO$Nb;g_A*o*VrAN zrZ2ma{&6iJ*Me6>sgc+a50S>`e>CzCrINdvA&Y#JaxY0c)ZAH*MeXo=#!Qi~5%5-a z){Igjq55Ol}OCg#Y zyONfJ0pMKV6ivT-8~ovKgnO3U{@8O#rivAlWC0@{+K~BKi=@@}RFctCCmF&cV6=}K zV+(>OIX-8}?X4#sxeUV^(+QAi^)tpvH)e0Pq|EgMp&EL(&tz8|=#Kui%<S(RT4}uqq5v=I_V#)++w9L^a55q3_5Sp3R(U`~7di^p_%X^>Y6WV8Vw|ajNLaYU1AwxL~qBbCzr0xVhT|rx= zY3L{&p1U8Fb(E%pN#pMgQ-B*Y)$P<*J(>>@_Y>4$9{T(T?&(O@K{CQDh1VFh?FJzq zP^yf$ecEUM6%Clz?wptG9anDTJ@9rz;GmmWi6F?-e{7vQNDEhQBOq}g84M|IUsPC) zIPChT7Y~MY1Z=`$;-BXmGX?jrsteYxP(z*;P}C85JM_t$%M9rj0Q#-QV{-tRVOD5g z!xZoS7Udte9z(Ho?~X2XHUUq!h28xMJVotLHkxE`vg~wV1@+=_yw9xmkGY~M!-Hf1 zgs`#X<;R?v}YMo#`Y5#)6`i500Anc08ax?0azDFRZ0mSDp%BTf$-inQHpMUBy zV^^1{EZ_}-W>~znLirxV)6KhbSiHRbAqYX|co;Tw7=Pq=A=>DLsTI2)UX~xcB?`n^TWYx5v6TkF@r)g9Sd2tLLdLaK z!Zq}Ao9mItW;j|z*uf~pB=Eni}}JvjVp+Ti(SPkUxrRDNerftfw0PC>M0bMl*jQp?eS|RqoXvuf^<6sIo5A3xAgAdEVa3FvZ;hV9O1{V*u6+>j+-ad zFv!V*CY{A@=sIk}WvhYR9ZHgc`O0R>?L>2PT7I>cxIvcNEV;wH~QW4(wU3|s?e z=qywRWZ-b@B4nExL7A*{E~1Sw1O~yc#Zq2p(`0bnKb~C)7nokJf~7`-$MYmwuPMZ^ z)+A#J%e0>D#j?Dv`?%;VqnEMA_1>MxOQ2@4+EKmYT?MNXq~{V+PzX?qoS@3tf<>K& z$w-zySWQ#oUULNTYZouJLwevoj|u*G8&TVH-BwV`O0|KRlrWR1Lu}S^pBU(jVeZ~q z7WDs?;+&XqCaoTlWMKY~jc%fBNX$??2DS}~>H)07HD7iH?*J@^32zwSeEJ6sO_=0B zQl5YnY^`D+qf?X+b9c(D@ugwL>9SrB10ch6!}5G0nvyXW3&~2mUj7VwrFIX#hRsVM z5VD`sogbahD#6f7J0!x98vxjjltRQ=*yhQlDP_*A2p&GcBK+Y18JlobC@L(L))iRD z-hh~V6oC^{_xFkx{;6PUk-ilr`AEqRG$v&HH)BD-`8&4z$$Sax_dnTV9l%qL z>yLd(g3Q6|y&IrdBZHDS+v%|DOl7bZ*-!=maMY!aWSqs37B9nZVXDH`dut^t1->|~ zw}7(yF~v}!mFs&H zBWkF?+&CZhuXc}(wJo(5nT;)`9yb4^e>qZV2CtX%>j|QOEOfky>+zfVV@1VXzy6Iyq3>;$jWK5j5J(N&9FbM~yK7v``R{ z>3`vLgLo@O!={W_ehMBmFf1wsV%8kBc>n+etU;d+MG-6||LonMklD<DEswG*S7aTPrt>7 zrMWcXJ%iS z2=p>{H)^ii-_fZM4dE<~s~sNP``Ocv$j!C%12OB-DVj5eSjRAs5xeZXb>Q7!*sgM4 zK@=C?|DN-;!frQy8Id+?Uj}4Sf1wk5mbOtuCt9Za79IIrng!L}9d^`wM9J=0%=uav zvv)}%V1FtM@*qXZ+srLnuv;6I3i4MFar)5HF^cQYKv6T(1FkkH+uJ#kDI=$^YtDxB zBLor~wEZnddc}7$%a=S6mp{d_rZRUy31{6$6$n`QA9{HeQo?U!Xx;jcem~6q5s5&a zVxM0@|J_Eg7U&bsV;4)9AHJPff|Jy4{ELt5iB@*IOF%;i!-qLsiOm>`%(ycfL&Ud; ztj+gWG@d8qHV2F|^!)+(`g{x7kvAvMv|1iK8a!R}i%)(B*nkc$_*a*|%Ac&l=N;FG z^Iq_ykm5anoN=VW7x?uk%U=puTKKFZa#1Ze^JmPbkb|>DF_+t*3r%@vUz`M9wX3oG zmP#u`KQF6$I}>SR4Iern4E_KOMTPv`j18Y*>{g@{E2kQqvbJDD1^#NL0)Ngoz)Tme5#=aa2~o zjfqEmwqa@8qEdcvjJe9?rP_yvMg4S+s;^5|h8hS=?atK~D+J|(mA~Hi~e&53G?z;eAsL7y@qC+$TbGu9w{|-$g*yT;IHDs=mQ7zHm7x0x!AXp zk{5E55+eR$>&Pec)N89TQ*iHriu+WEMuFDD6;p|Al$i*+PSk9bJb|=EJFN^;oc=Cu z07W-M{7>b4;4M&2)%icLR2zD#rV2u&w)W3u^DB=0#hc&8z*CxA8qufv+3L)B{AKmr zMl5r!s_A-UU-giGovrUWlD${P4YrXL3PlIVGlLD&WFqBo%1@R%89g>9oVg_prWGw~khm7)=y ze|coMr6=x$%;KWima@gY_O~bygUtm>J3VH25D@&>xGvy^s8>yiA{AK!d7#MP_3K^_ zLihnz){$+=o92@$CRG7Ag2@6sqjqS978U>fyev&})SO&UVx70i8(&P`KFHL;4!J%= ziA54;grnSClc(OeHw}DK9g9#Kl)q>CU?}9W(MO>zHTsohwG5cPZ%Q*%8yKkm0mkt^NcO!X-L{3@rK&%qDBuH7$gU!?(BtT-L z`KMqRe(@&r+pQK&NrWC!0d zDQIQaFthUdQEg{jEQWr1Kgf)KiyStfhO<6~33X9xMmZl{f!J-5W|!}SBiG+x|9aJy zES`BrSxbnR`PDMV>WU2wR2iL5rHH{WW??bi`{)Q+^&Ub_SDc+kdSgBiIUPIK>F|4h z*V6yhE&efh&r=?|?{264??jW%&FvdO>fDW#}rt=mrVhG%7dDM6_Eue)`@QyK+bS&%xj3V0HPzj5Zb zAqbm*cd)x&1heaxH8!$h`N((w#X7=vB%jBXLA_TJmvDktUaQT|*?djPLYXH;l`&Q# zPk|c7_2qT5m3jI8C6QZ6o6S8QIryTRSCQolbx|q8tpduRW*=(8wa;1(@VENf4zfVU zbR9=l1ecz!7aHJ04-0Z&50@Fa&d`_vja(h~OLRR&FAg1DO^?d76ACxi5FDniQLpFh zoHrh2TO|vc3=qEgDaTr}a(K7Pz3%`30X+eqFKR+x_0L*yoNSor4Y~_or}-yS$b`)h zSIDFDU~MoL3Ja{cd&K<@|Bru82(exbLV84=#WPG$7ERL0=Yn$_>$n{~f3PjieAAMy zozB~Db(!16AJ=ghahQE|wNk!1f8;O18xzLsJ(RKG+u7^vHO1q+&ywUx8WiN}G@?9` zPy1RvNYDDYul0g2c$`1U9DWoMaLR>6LVJS8m=SV0@!#FQzgaqSVHGy^nux@TTB%*L zdc>e9P!y%z-s z#aV_PLqcGx)Jdo{y4Xbkm_3?HsI)HzkNalD5o1}MJ(Lgy*0%A3r^zX>+b}B0P>VJL z8fF!#^D27oIwX#4KhZ+vnJACK{V-)T@lo*oS609P02&fOn>0zNL2Q{!2+#1O$?~=` zQVv!&LsY#^^QQ3?rB9LsDIF_ep7b+MVmjyc&=%fxr4)wNvS+zn*|)k1de$PIR6Al- zc)b@*>fn|6PS1y{rDhPDYa4v5t3sPylXfk(5p({#op5Svn`{5XIlzoqeh>vc=@<|9 zbUEJ86n8UO1%?xkyco+qj9Q|k_~2+ZT%fKGN-cOW0|VR4U2XFLyd9DU1`08vI2hQK zQ`C?MC_rt}6C|$EGl_w$MKSjg_ahlh=(5$s@p4E{jOy zsCF)QvfIM8<_pK}N59w9ktn;5P5l~Hj;*@DuI?MmEh!XXC`I=fYqCqu7$1F@(WplV zl=FnKOJ$C2($8coy_h~)JXDu;C~G^>`DH zi4{K>%K_?oe^$Z;@fEc3k4p{_1c#S0kBfL@{Y}1O40Q1wm&E;$VQO_-oGe=`i4j#q zb+jmiSzbFMqlh$#dmrKlY!4O)j*-d1UsH{*Fhpc5oafeE!l6mrNXZm57C;_Z{4r+m z!S!j?-Y1998Wwv_vOU)UDUgL_YdHrCBs zsZ8dvHUoMSVzA(Nxr*wy(}PgvpkId?;_ib zzf(t6K9Q)%mUK;1jyY6^cpvZ+a$nT7R0t&jVG1a>2rQ}W)`=nu7Y70 zcXC9mjMz2i=PgU=L1J1;?*!B3p9E+dmn%LSfVz+p#R6{;PKp(#Bh`Gl50z#bu;)m? zQzD_c%o1gS&AMNl4moRd1OlF@)>%>-e4S@lQ&B?|$zmeMCCqPX7=TJ_ zeJ2X()QnlIK*rgh9=Tk3GQ*xJkFjuC6lsQl-^?Wa>8yx@6aQn{zV1VYl##4px>Oah zpT{u_s@CMV7bs@3PPP#kE6zX&P}#j?^rc*$bP{JM3+^`h?kd|) zF%tz<;ei(B`G%nG+@+{LpJY8ZHcx^W_9!IxB4qnf>#Xyl7QdukQp+yrnQB@RdD8rn8f z)U6{bYl?sXD6_Q%J#pM>JfKNOi(EatS2%(@bo> zMTVxqQ_`rlyN96S699vPWNROG{ow=%W|tJ-!n+HKHB%&ct^jI^qa!1T{#!S7)`e6?QSVk3xI zXG6aNS|P+5I3G?%&uO)5i}ZAdG2WvPMlyX-bIDo04+UG9rY#?^r|QyjSQo7MvhQ2S z;HA)J-IfwAM6^O8NRwD)rKQ=--a8{ussRWnl_Bg|X;|f;$jny*R2uG4kUd}MO}W@QdC__H=q-`8*dpIBK`2<3$olEjhz0UK+7PZj zw<0YB^4m2wu5bNU3^DAwA>Ej>OR+^XPK^eY(3DQMZUBI7= zD$fQxKb2!2`E38Ts*q=q`yy8qQ%b{nN(V_9b7Ux(m zUS%==6j_T~*bNUG3w6WZ^fkQf>PY-dFeB;k~K8nkg!e z6OonAFOd{+aDMrmefz0MnTiaQ-|%riW7iq}WGKygBr=4M+-zcuyH4RCSz^!0ws`_# z?N2__Yiss&G#c=A36F*NFSe6;oMk{Z&s%ZWXy=p=yTAQaQaqTWRa}Ins^Ym0abf^l zXhZM9_M1>6X_$Y)8k+M&jz7$QvyAHvyExP#kr2Mrm+0j~c+XQ|m)0*}B49EUAE6F~ zx;Hk7;pay?ckT>rKOCE0Cmn!?Ykep~+Mmk(Wg)#-0S;;hii1jUZiZ@{5ZdI+NzD#b z;-kNr@E-wxMT7g>#u!(p7FPZbn1?`#~G>8Y_;?I(=^4{as z(I!kD(stchyvqkktL2|bd&k8I0O=UB8^YC&HO3%P0d`xj>{wG{`_9Lm_dphPuIMfh#|SK;V@}&nIaCa4#o2F+*sltI zfkjJ@9xW|Y$4*BZ=b(%um%~45td{Q|$vMTG6;Xj!y*cwbRDQ}IL{)UX>6osDf;Jyb z?J17D3x$7jtZ@3Hp%sHynjl2(R)#nl zcIEC)XmykQoEKkX*Q2XTFT>j2IvBG<7XwIofR3jrUAcAHu-8lWF2Jxc*&yZT9;d_D z46PXtYp>(_ZyrS7j(}f{o(l;2U8gUCZe7mxcJ6WxHX*R8NV)5MJqjs6Eyr=UExfxQ z-DV}b`dC0A6OLTPg$&oTZ^R4Q_;q}agJs`tLO(jewFSE7DH(f>bPp@4~SJC0N*rHm@p)Xl~EEmug;6xgsX`ycr`B{Jhf(l59!AVOywH zpH6|Xsb4d(<=ACL%nK{iQh+`VOOf`C_MTv3cU|kD5StZk@Ku)D^b6x?zc5+}`Jqt( zXTP!Te0yu^CF2zMGq6xoC$)w6iH-0y65@9r3ANIX5}m}SExCjcT)^bKoxwLl@o6kX zki2=%38*1RPcOsboP<0IQVM3V7Hy-WX%#Wrgc-LDNGSsUD?!O}qcl;v{jjJPr5AH4;W5J#m zm*I26(ZRM(j=c1qwBkO0?R}$=$l5Znvlfru`=$5h)PNIXUf~$!@nRasxLz^IH8#V@ zBrljiwZH*VE{RiRyk~Bt(oT^!VS`w`>E4T@Tn2k0te?7;Ui6lECTvF%8L{5)B#3qs zu-Y!%P?0$FbS-;l&EmqL6D%`IlI!`oH7fU5#G$bX`9uOCB*nqDQ{Z7z`$%zJ*%NeU z2FydIG|4=UBzSlOlmDIjU%40hj+g>Q(}RV}EwpZ1lBR!wAhN3_SXY!4nj|SWzB{P9 z%?bh=AZ;Xf9p;@3(E=QUc@qDCKkStq6C! zo60TVBlbBmXjndm?)qsWghln?nk$k2Y*rl+=Vg?deetH%cDw+E(pN6Nv`N`DVA|cA z${O82y`r(9Xx{q5e4RKeORbH4vfL*9=MqHnlrFVFOxTeF9j+a!2)FMY*sUpK$?1Ze zRzL?hoJxu6S83c){U~STiLfuUD=;xHBBpnDHvXL*QZvTePCF?>0wxH!o`>zlLszF( z1cu^e#1^NtU;)~4cd-yZ>2b#+QaiiS9~xn}x`ik6^!Yy6FFA9~bN3dP2>E5t(9%~< zZ%{x~vnkRS{S0QQ==1Gr^^Q_Xl2IkK!@vr9`HbbSJnjy|2X=8qJFvy@^iScfgqx9F z9QavYIZvrBcfXFl;W!`QNuKBZ6j4f$_FOR?7?+Wjw5S&n>Z`5jn5^o39?YM(PFcyY z%1G3bw=J0KkWW9)lLBmXnOhcn*gQd>7_Q>xn16t<=-e3jx+zDlQ*v@x*L?4xR07in zUloS?Z%;AkrhxLNOq8>=G5-}DDd1!JuKdP`WUJSGG0v&9sVzb~%fhVpCU0m5$huEe zQy!^VTT8+p3o}YoWC}YuMc;QY89OD&xhW~!WdB0^7XVixOAC7-3F;g%sRf}nLPHf!tCTb{P2`V1IbW>1lLV zGq6!w1qvW3RSRMOUmPl=us6$JG;YwVKwXk-;!%V>=+hK43CCvlK~Lb!42fe1xN1n7 zW+g;ac?cHP3#VPZnsjCZrY2s{Y;2j0yd0^4O*~*KEQ;&}0m{z{G#&3Y=@Kkr*$Muy zW^*`{7(b4Q|I3iYwkfRS#1QI+%S9#;jT|T>ND)jI)RMIy*Xt5Dn+xU{^?|nz*#=4E z8azFt!s9G}#>k_@U8I)Ob4uv>RtK|1Klx4J9_eLHuPf$>O1{o<=yqlhy<{oxyP7?h zHid=`R5)M=c2jQaG`7RrqBLt32b_;ocX)kfFN^X;1VlxM>EX#dHgTQqV$UI%8hkT1 z-qD)Xr(E@`?ilhKf8O|9;gdeGy&p4LrYgtXn@e_c6wC40Lf4O>L-|DXzsw2qf68C7 z$4sa!`wqMA^pduholG3*uj!TRr{Pkfet2vq1rTIf=8F7Cu?;jC0=Z9YD|${_NX2Mf zq%-l40KLq8N7&&M#XVyjdufo9bEHgsw+1RCG&^y5&(&i zOc3%X7_MWWFrY>j{}9Eo0D_MJkrpl~4%=4(zjP(`Guwu7qFq7Czm-wc9=6IfENN6n zb1M{+C({2Niqmw%3$>1HZWSTGgq;mB2T@-bRFF|~*EsC|JTfvR8@QctK-ZX^)-;f6gb z=2v2RW~`fSGLf!jU<_$msQH0x;*C^yfEjwDismYnr_cxFc8nCME!xDh(^>f$RHfOb z8Vm&3p%{_*jSMk}mx7z$5aPH^q}k86sMh_F{=fW|hu1Su*^81#+lrWr5%Fta5_YjsT@9$X!Sewz0~oeG0}?+Lu;@Y| z$={H@@cg%d(1X{l6<+LH_P_Rrt>(WKXZr#)(7lE&uzW{W`E1P?OQ^nlgTH0Z4)BS< zf{@tcWiEbIk=j1Km=eIpFkYG~hw+x}9}i9gp^AmA z+n2JGks5oO%f07gadJGo`3rEb8%`avv`%<~p%fo5Kqoj9rh{H#s^gX&=}3smo6?GU z`zP>{&Hn+(2Ew$*J#xsvN=dzyHMK1F+l%9>O~5-}Te>WSoOL$00hl=9o)k3H=-csY zG|#1a@8@bvN$hKueKbmuws4wP*Lftdmx##|_aj%aCf|}i2{5DhPMYfD7?wkf(vDc# zdyg_cWM+bC|2dW*m`#>8XIbh+=7S@n7^zNLX1x)APeIrAV1Xh*(pur5u4W|C&@&|E zB|3s4RF>i5o+RIq?VGmr7tCWxqTEYq7Z5W#0U6e`OY+X8Wa=M-z|x9Cl3!`b?yC^K zx02ieMXheNxivGXH;$ZxtKxu4C;Ng={viDAmK&62JT&QnWBBzmpS~Ks=1-KlTm`-7 z)9ZU>C%!^%=J^XBbp(1=JM39!L{w_pYyW|>16L);5xjUF+Oq{lmjJRsAYN#x;_QOo zzLhS=qEMgc@|Wg;L@n|@q#$PB%o4s?XX+63xSSeMBW(!~k%EK=9^YPMS^x?jWU)&> zp0Ia^F)s9@7+g(TbTU3`{`47-Rn)r$O~YiD^S?&;-1<8acvxyKTa7&2!>AjF5UY4n zFbsU`Q8A}47{>$Wf89mi==Bdg*;%v3Qjk2u(LbXho={R`MokPukz6v9cMyI@hG1am zV#aB(2>5go%+6ta7-FzzUtWqrg}o?W+`Q0esA91Uk0Q|aC?tCJXHhGwMvoy~0VWti zI?%C%t8@whObpM(h43qeB%tPWcF6=f)>q4x3Yc{7z@;Rjm^Ep^w!Hf9k|TsKT9>U6 zq6rv)USd4lJ$c+CtpmW#MSHGFphxswcVMP*VY;we{QX^TB3x!0VG4Pwg2EZXLdyg_ z`5*j|pl8hn*4uc|L#_u&w=Uv9e}tPC81JsMXz@{BNm=z@mTx{HI`A%-V$^PHUV`oC zOe~On-C24^iYyQm5)9Z+3GBBiP2rzD(%I49wx~>VawWigEZOU*%|oZH8t2eJpLJ2l z^F0j#PdtYO9^}~?Slq_Kzjm9#8vBv{j?>>PPiTXfng-rhW^0#j?*2WsooVG~cx6#y zH=gFEFMd<#8KA7cJ-zwP!wR$CK;yNXW)Z2NhvhBRplyqN2A%xvTjciGmbV3w5u7e!CzO>P zpxU74{Q?P%^G#(1@sn3w6yyWmVnn4F^2LS^FA~tOc3Mix6QuAUl@(*rDDqV~wcL(` zNbRnALCu>&zIxFQX*XyoAzfbBldapjX|4GFJAak&=xRcA&ouh_kwTx;Srrq!oXH%! zr(P5>9hR5a zT@^5^bozI;c;>WrihJ|IsV64w*g`A%8d8!fbEUe!e|ttu+tTPX&!{ebOwPftV!2yC zr~m*6FF~JDMHMU||LhDzbMoDjf2Bu}O}&=IQe593EjN0R*F3y~jF#DX+#xyW+~S5? zyrzY~p7EdK5=MD|q|T0TvjdZQSd7c=UP{#s#CrR@nSmZJ^T?R5^j|Q=lK3i zgc8?hFAqg;8eRCp{ciwLYCYZfFSJY%k86TiU>pWji(oQJ4w@OUom43TcXW?~oof6< zVvP$-OH$w_J?98M^)F?#5}$|MB9rWr_l2>QscqLlG@N!1A9~G5%Xjbg0>E~z8 zPz9h{0_LV~?U{shM9fAB4>{nI2n8hjktc{#pvQ0TXEO}Kpp*sx?9$gag_s^To#sU#eQl4tTWOYoua{6Bit257-QZzpk@#OiNb0Q%yS}IWq2@&_Go^ z+JNAYF#9lOh@_CzW3MS1*N0=ySiz*cINz2Wr!5vbD1c^BMYJ)q8d#uvyBxdkJ)tz$ zR;082=Ub5RJC)>R)8^d82(7$6o+T?p-Yuywd25{$u>_Y5@K!?p86?{Pv%1=7y`J!Y z0mN~?z#GfBKeD#Fs%!$iar3m$Vu8NtU+I`8<%Y)@>pZTPP>8ALh$&f$g(DUW7}hf^ z*AWF{V6^aPj&IWRjMISruOVLfjDu^eZ~i2w$oLQfMPwBA$V?dn0 z(i`J4WK<5py9ym$E>=7i3bQt9v%tI3CFl(0Th4&Q;wq`I6ERCav`*i!9x*O=Z8t-p z&s5>kWtHwjbZwQuGIGbwnLx%rj&yKTz>3RFN|&ss43LbkM*}#zC?ec4ed)Gcb;}LS z-nj*tD?$xP3R(o$V2KYGe{^0zo}?i_j@#*Dzl0&bFrO2dQnf}dWAoOou(;DqncFll zN2~*DYwc?C`pY&;md0;^vs-fc-%C%V?(PFYb~G`PotVGpIXuB#FsV(=z@U zwYW5hd4@cIliIDd1!%c0(B$i4v!Mteu{tEzb$M1Rc6zR=IfkRd#~NX#spz+ zLf1n3ehko0Ek@$Qf>&5H{`4SX^%U=C8)Sp_{$KOMA>~eU6_2_Q zU&&nDZK-;prv!w@XR24DL$a(>`Ox#VNe9>`;(MEoP<*-%S0z2*65EM*Oobv&i#Im1 zWy6N|j~PeZ`?tU+`Wfl+OSR1S^$mCM5a0|*Bg%VzInG?yiicq`vo{W#$S*B;%`=~! zPF9duxurA_53?q3uIs-ZcM$DzeuSEFE4m5QmG+>pw9Kd58F<$prub3LB;pSsKpY$i z$KG7Qw=XA5Uyjk}`YP`mUm6Bd$_S%X*4t>LV8cuwLnjelK8uH{y9blF8{3ecRKo6J zDBuY|G!g)s^%5E#?Set34ESqdR^UbvY&6uD?OD;ro?GqsS4^)eiZ}RpA0E zH=AoAym7I?X%_zYU3>V-V5tHPFXfCvJy`kGO*qm<`&aW7q(xM&UCFh9bK z4Chggk;A#qN7JY-iNj)r1#gVLMH_TuPC0vltyn71V@4G3(vcL)u`;?XjwR zi;%>{LD!!kCklLr98PlxWV*4?e=)a+bw1QdYFuchRb-_2KY&U<)s?CBz~Dx$eazmE z{{A!+u*%qsm3TQG3pxAIA+?FnNnt`HgT`mMxl)!gx`CqaAuKeAk?9%z;OpkepT%qf zl2|=$NG|x_5?iRP5X0u4gF%Git2j+F8~ceKlBk%OdfDFMuISzrH~!bccznXD(*-Lu zqR-@EM9uY7v9=_Jl^`i3B({DK{LTgVw}GSk5Alrr7-+M4vPFO!ttKSlXk~5PYg--B zzOs=}0a7y_hiL{armj$?k?@W}&Z8r~?*IS+s{x;JbV6VB%M;OHxcUBK2hg<_16|8f zQFFl5&ysKV){2A=M-%aFFe1fx)TH6`py{OA)K1?#h;yp@&`Y*88xhz);^|Oz_8e3@d?2zyJB!r@Oi+uga#Y}TSuZ%;V zHp7%PC87RxbTwi=NfR*A{wJ#`B=ysIQ%M2Hl|?abblGB0Sxv>XdE`_()0Un`)m=`X zwnHX;uAv$nRtUG#NckcUDZFVdjp!<8e+!Y`Q0*U4 z0e`NyJ$LMjaQpkHeO*vyooIy`XS|ZC%Gg&t5M8*W^Grf7b}6NH}*YhNlXCRGI zx0fbcGSp$ybfB=bhJXCcWGp0lLw4*?1-wF z0IhV~L!`u=-v(hs>T6lFn9LtFe zR0UvVJ}bi7)e1H!q<0`=m%q`9d*se@kL;#2ln(-Lh6H@;eNoUQ60|YD@yIq!AGeKe z?SP|Fh}6wxfkYv`EYno?PhGAt+6<}>YXU6Nq^q|ibPqhdeUY8_N?^p7M@2}-uAz0( zUt}a%z_;5&lLxU=KenWQR^R#eA!dAvUz`A=C$v?Ny(}r>i}*c+9b7ZTvUNl42_5Gr zsQ4Z@&#t5B%EY0T>~|e&F3Oksqa~L0uB3kuH}G;^UXmUNUv6m6t^q_wGbJsgF0f7U>g*;pkU9OZm8ZeO zCH?4O-Kn#k09)H^{V{odprD>A*556S*MU4GSZd<<^kK=DJQI>#cD6NC z5a~9_7-==9W0f1&hg|nN0)R@Agz>ydXT_d^I(eSb@P0l+g@S~VpQt=%D<VGkpK6)n@4fJTcFiq86t*v;bf!|RvZ3f#&Z%4ClpX0&7aI+U6B%0;|w zQh45z!Wx>00ZUV4`nkmEHieSV&&+Q8qUe`Jy28HZ7x*qmSuO9UMM|2(dUm1s+<}aQ zNS~_MIs%HYDlase+Hlu-wP#Gqo&80ie_ZuDFv_2waDgY+F|V^jo`!KsLy~ja&#FP> zn5;dCIT?jEQCyNQ;k--{KlJSe{ZANG3M<_=Y+4%Ub+2K?`|FV1)7{ z>-Hl=dZGj+ux=Eh7YerRrcA3VYpB{H3be9$nULzaJpMI9FOEI`xRG5XUIq^R)rbb7 ztL-bR(NMewr9uN+A(W(S@U5&WJqqNJYA^r*5|crjcuA;1Y?(|6ztr#@e-thSb<|1o zsWzU?EHh&WAJk01*a=DGiMjBgD;>D49oz<)646`LWl^h61RE&~LH->zbMc#gtuO%8 zF@D_bbet#A-LzXkz{r2F#m6b8T^A#<*%d}w67U)zt~hE%MwuSO2u5pG-3kC{?}(}9 z=As_Z=nMZfTYQ{_XCLDD8H8myveMBB{J&N?2{DBLVpK@k=bo2`KYx%#YHuomP=3&+w zjMW&Y;V&;U?&Z(uA-fAPoNz;oy%QmRm6puF?!1Y@+Rl7e4c??=aB$p_gZK|3Gr#Kp={Ur zU-bcGG*qvc)W65zu^FT?58;gj$`!AUgWu<`Sk$6rm!^)7%2KP|p&-`g=#Vy`e#6~) zB+fK3=iXrXKFp$H$o|CFBZfT>$@6I3G(MNOcii{X+d%-0IIbike?m5RLF40nvK&D4 zV_pcgZ*3 zAFEmOLrx|>9I)_FXE3kfqY#hmMC9HOKEr|@v9;BcI|d-7guwN88*IHS2$QrhK@~Nw zSJN^X=VRG3#QTWhyx`a+l3I~O>(@kJi(QkVrt+gK=m$GT+@l3`Za%@+5>~Is@zg{; zXfa}3kkU0c7f$i1E!1RTX`D+`-3Ekrlm~>%rWDMdxnNMIi86p-gi%S0*@!NZUCviq zgF94f&9FK@iaXs$nH%hpXQY!zJ$-xy8DaU&isV_P z<=_a%OQT7h9iDk*C~N5w4Py4rZ1M^!5jUHk_iqfeR@cZR)iGW*$v zNPx#Jp(n4;hZdij(Fxl*hq&sGBg@(m*u5z z_+@~u4tErRwQy8u)fPgbR0t?_8m#+Zzt+Ft&nq)A!_@UR^ugbE7NY~=8G@$)gghn@ zToQSBNwKYg^u8QNrk`oqvZ1reHqh_`osfFp#dHv?bx6}$St_@q$hol514qi_Ij<8F z0y?Bt#14Qv0U@TbF09~YJM?lNpbM=R>q2xWY8Ay}A!{<7Vg_WPpqD=a1=e5RUf?9} zkj%Q4K;bn*xoy7tMY3(;E!K0zL8O;Um92Z*Aj)2;FQGSg*;1kaQ~5*f+PYw2E3#Gq z2sQ0#mETPU*5wUr?l*32ch3oTG;^ELhIc!OIRI)8mTkI=G6{dnXi^1#V_cIPxq=#; zydD0MW&aPZ#tHo?l!;>&@FKl}NB+!M4v34VODUp2&ovLSg`wVdS?{}L$ysHjeberF z7WfDmfZr61qv5H#0BYQ2;APVEl^#HeFeK~dYNMp;+(v8z_#_M%hM~@^>Bx{GKNQr+=6DzCDV9+BhbEg7%NyJFge|)boI=^^P+lL{lZ=+c;7sK2>~8-mO4M=0Lv`on zOeqs=zYF%*U{J_8DuL3JoBbk4m5 zjXtD`IqG@KJ?nN(jM)0|ofGPlwAktfb z&;U7=d-0v#U6`TCLr+@?0C!WIh^H@_0u$nv*bfO)XLL|yJ!BV9(m1Qp1*+&>T2+o% zfr??ysN4|znAcg5k7;=7wu=?SYs#r6OVC)oD7p)wvO>HNX3R*qAD7Ix)(0zhKab9pp>Z zuX=wKBCJCmvma{0FR63-xKSSZ_z%#f=D}ssetFNTXBj2LuRi_X=+d9J*)VVwmuR9hQHA~(7NHUN_XKxCb?dgR*Uo_C=31O-*?YJ0k` z2x6~X=an;0v}#lBo?E$U9)^!hs;VUYz_b2EVX@=xR?Kwghm`W~%GJA6*1hYoUug{Y zfbuiU;SDTWz%cSv!Zy5TJ=Mk@Ku;x$8`w)j>NlI^W- zGeLxL2;pC<@_CO{b>ZI+jJ|brHt9+aE?F6uqcFpa%vRne4nNv6B@ z2)pVU!~6M>U{tKW<0ptfB@#oq^Dy%}Bz~ThUS~eYM+pRuAwsN5a*~tSMsY|*wQ3Zb zJ3#EI?9HOkffJ`i!tUVs`|4pD=ahCp?@%j;EQo1--8gXjBc#C7Ph){w=;XTut)NB7LC$@zsFSZLN-cI{h#d*-cAe+L);-najXj zkM-X|ao{m}mTXeFGux%BZ6dU3I|{vzPiyU5cWSqHV@`NCo098TFbJpia6=C4K`e-Q zXSgZnd|+Fk^Da6LveXX@Vrgx0X~v|Cily3PVfySsSpwXKCgm88>y*}vI|{4p|3hX( zFdklPn7o_fiQc@RI6Y5;IlL~L_&u-?k84EXkCAxD9kHbc#`BAjNdpil5&pMc#yG!z z;>{^?_USr)>FVflwrKIX*bARy)&7~BpOg@@_sQg|&9biX^|+yYEZGQHdcOA-Dt@hr z^KafX$7aHZY3wfD2qdgF797r}1IC<1tAD7hv|6+h|QzG-qc7Ojn2fcSVi1aK=ABY4mZ8Kgf@UEF4! z7-S@_$g9X2U8OSLoOTsX7)ZwhhMOr}k&xp*Jqbb&4^(2}fqXfUM(-nvbR$KqCuuU18Y=h;f@<2(+umSh>mhR0H%4quc+gUzs|7L4~Lmm4K zRz5DWu-pl;@Q&cDN!n#X1NImjHUc*ql4&}mh{)vS&K6Ywi7{I*O*4PAccfcDD@z0W zYDs35HhEyQ+jebHiF&YieItJpb8aE&$$FmN6q~RrM*zMujY{uAHyACZxXiAjl_gZg zJcH8(jn%g;eR;h|c8Aakq3tF{H*D)E?Lscg^C*rpz_UI8A3>ywEwZ_uIX)46yIgdH z=kjn&H59%fr88&6zSbvTut{IByPkryW_sb17p-5xNdy|K@Pr1s+RYI$tuB{ATQgu4 zm3Q5C$0Z6MLTBt#tTqd*>7^e|bQCWNikGMvQpy(xwtoGK=+*_K+)4-veZJdmrwJhjt8 zb$1>;L3JjkX1R%Z$7;4!1&Be@Kh3FJ4qY|W{0B)trRi?s1Sg>Yal{cn-%JKZlD=&| zWiilzj`D0Bl0`u72tX)>^9-Ol`Q907;%1yOrKgI*o7~}9plo3<%Y^(SSFo4>XZ2t+ zrju5KrV4^!RMAcWWRzy~EqxHCAdiyLV;bQc_9~Fz?CS8vGNkaNlJ2Oijfu0Wzl#F; z7C!T9d0F*%%h%VY(~4i4_r}w#8M941VN5-mS->DhXXtAz&Z)t&WJ^wZbFSr`I(^3f zJDwtu8Yc^6hC9m4_3Tz&u-?J6W3jhXA#qg<$694cMbJ%aR=Z5!Mjw3iJF4GSPzeO% zB4|b_7cZ}6CK|hB(!-BgU5oz>9TPExFReYmT(oTv$j=T}%L$4gD#GeBLjJWC5}}K; zj?)^kRE6$NzJA&wyQ9aeeeJeuZMrqM8ZS1cNu|hzms;oie^90QroRHc6ewJk7iA); zym-b0Yz;zJG}O+7&ROKu=8k`^vz(1&6kg6nmV&sp1M8SbRj8{Y&k=@mGyv&rHsU&Wt)agv-z_|fg*f`)Ss3;Ih7t&;(AkI?`jf+!0Jd|VZn)6 zWc2u)+8WaA;L3!Vg(k=*hW2R&c%wjQf?AfY|Hk+{6K!jP^8v46|GnQ=1X18LlZB3Z za#!4u@^8nq2yT@ey&UtB(~axGGjg6!H_I|a%X)PSbVG@ndvge~@3Zw=!;m(F5e4`0 zLTWI>8^OR13L-^uLXQ$qUb(;z%`r#v7yKdgVThAGh1GzjVd7uc9fsOc)N1Di=J)>m zeE*C%+g% zWp1QIFlIITa4Mjb_Upu8uN^H`U8APww-hv%)foWs7Vqgs>RRn6AZV#&N{c#)Az4U# zTz6BH`JE>RMMs+$WDy90rAoiIiU>?RkJr*q*lw*EBHjPd90v%-kog}6MWYH3G>dAIQM4y+ zKcwT&qX16usqs?^K+ITMwfzVR)Ky8%P^6U<0v+ZG6*)X81pgd$Z5-ps0*tMd_Q<1pQ8@ua*~w{t1-MY3|49a6+U~8kCDa<=@o578BJ3(U z2aJ1PYlPBN`h9jSeBnO-pC!VH4PgyvD{&_P`xDI0L~;9z4&<-_ZIj7undhny4PP zGmN#wyCA6Ho|>j2kxRH{L>ZEQCa+UMtPhR{iD%*k7!ki=NqpQ+1<@5r88!0l*=v+H)ZJ^DqNNoHR#OK76VHh;1^-S`Bt_n;)?3y(h)oocxL-iRKRFhFN@hOyQ;c>e z)s0g9(E`_tcI?(lK-=UOgT3TW7Z|Qpyp`XpX3r?7C+pNZ!(p<5iJ~Nv!3js0Vodd4 zJLDBq9<30?@uHN*A>`j8U3CyUQ8$jCDX$}QA|4{yD&0q$DYo6z3pJKgvo}s@%ljyo zP61#50A+(`aWEh^_lItN!V1+6OKDHpe6D)_6O$>GWH?sThJ%f_V#<@z%+w5Blir6mtUOo*MxM_jcPdqUkkj_R{%V&i-KNhqMls@#viOj93v_PllyE=DGEE#xyYq<_is9TTd-|WT1I$K&06uw#-l1foBB`EHv*-oTD>)OdIO^L#f%dcjzZC zkbeI}&^ZS^r5L15GO~-3KeK=rV=t6j*fxWa*$16-2OSE8t56qu`TpGudtHn2&v=}u zEH$aGnVEq&(V{N%&EXmTy-&|&hUBZe7V5J%Zz2`#X-6r+{Ji3Gb-Bd(0tm+7(4K#NnA1GB9a<(HqGrD_j$Z$r}`wUl@r*$gbofq5^4 zR^GTB!W--f)&2$6_Uru6I=@1MqDpcgtFKQiX?5nUcrr2 z2z^}(X+ro62$rB;3!R?Zl;v~-#)t0UHZ$xw>K<1%psXcd=%+dIXl@Pf+uZ|S;X87* zr%2EhSFGJCr=?k&dKWM;kZtAjPl>gz(!cc{r$GCqHFK;iKfGCcw0FJ-_00Nl-pS5&CU-n1{T(bla zPho%xX-tOIE8>8cO)GJC+w;|%P(HGSv;$WA2}}KA!V14MX=naiA4xo6Gc;#qAs)L9vVX&|DH#=`EJi6U-xTHQ)Y|SKnDcD*pp`c^S7Lm5>#IfJ zzFd5=8mGsdkX6xJ+-+WVBrrx=Crhe^E7=fNe?~>afk`#+jp~5}2`FJl!j@K^PrXt{ zH)ohFYfl$YZH-82U+;Fm_+h_ELgNBEg4oTLj`HKoNYnh9dX^D9e&TbrEJzQp&#_I$ z52CcNE;CN1wkBiHIMW-Rz(w#(l{FlcG==9C;YIgDqf&k|=g)f}Mh#>u_R z46JVz=4(C6|0xNlJ9^F7v7Wxa43SJwN(|~$TU480mh@9CQF;?>%WS|NR5$aSoCRV4 zQ)mED=+TcmqUmetM0T2fR)7NNJF4=;ygts{2~PGv=bdF zAVxp)Vog!88z8eg22WJId>H0?%~&*Srh$2~?A!ueF`nsr_=tfoEoH4)kOnlrsCLe` z@$G?!J00!LIFiFZao*pYesr7`l^tPBEg=CGZ=g#9)@>lwWqienQ+lmtx(SI=8f(_Z zkWX>Hq#sBqDnDbQvHkg}7WENzgqbGoS)j}LCb=j1y(i~AcF*HkEQ931bRQwfb4GAv z<+zoiS>Uo)@{p$BX;jyW``LMuHk@p0W0eCxm<`tHhPLYBZ(P|A$w zDfLdeJhu|RH&sWy34wz@U0i`eg%dClW{a|))DufeJFkI{)h&NVJ6Aj?kMdU?t)b3X06C1%Epg58(fY>El6VKRVa zkmo=?J^1RIj`YGGWV=+mNfhD3^F6*hg0eb-+=2q(FK#C{P8)N=4g7zKEiHE>m-_;o z=A@KWYszFkkZqSbVp{HiU)Wd!m%~0AJ-#=fsosRXoM`&-snyGkSV^#lD6-j{(snW} z2ygeD#Hy-m&RgcqNAAvb%b%actky{ap##pd!t6&mEuwGUDun+3tSBk%N=zg5LH*X* zLmW3QcM)Tnm*z7z093@?q$)&&?nbo6$C=*G1VY{w(WL{DEqj3VeSv|1W?-B~r`_jw z0g!RN&2vwYnaYcMMY!fqI01nA#0kxGx%}ew?@E-lJ;g#n&Ti5X7IlkS0hfXKdRPW9 zJ0}>&Z8fxf&;Fw1u>OAXvo`~zqbJb*|68D1hmyY%1#46j%uoI&Pzh*H%=JoMQY*9T zD9;v4HN8U7tAOa-;g&18Pw6_@rd1{o;Y^|)T&SLY5Mq*zb-7XS-ec4+VbL%=XQzHZ z+pEyX3YCXT)>TORIw_q|v1`KhN1%(IqHn?x=+oFCvH^*q-`(1J(;N}NqECZh&)BP(1BXgj zGwAF}ja}4tLWeo~%TUAI_!uYgaXYTLj0V1o=S_Nf*8`*gcL|1azEeet*JfO=aXQ+& z&j}S$s`w;Wll%1UBPT;2FE4ZM@M(avtq~N@B83YyfK@dIr}8hgP==m4j?~29@-8=L z8tlK0O9-~lKAf#SO`h1(Tr=CzJ~UJ!)l-D zfQ9!Wehzd0IUMi%Zo0RukQ;Ku%WT8NV~|}-X})5?DAFs)#*i-{@W0xQJ{+Jj7(KeD zq(`TTyzeR}h>YXnTkxSm6r^cM9bHTPn(OaC8sN%pp(|w%P}`^i6%BtJYxKWc^9dGN zt9{AkT85$;d|C95s{!$Re=p&__1^R-(6>j-mSC$YjPF2`9r5?#Ja!NCzbPfVlOjvk z+uD~KG`;D@fnpO&HMYaYK?I5BUJOWcgrYf}&a~bP6v>$I<^fh-J~dX@u2kX`=FV9W z#rX^=>4n1~LgyEt=NME8&QBlrE7@c@nXZA7X_Ik&bb|l@6IMZ+yh*4*Y?(|6KhDgQ zCO_nkCycsBWy2(g~7cE8hZLZy$x%`eDR8$Ynbu3`k!WO2dW zxE(uGo%zh!)*jvmx}@1uCqU|_4PUy7=d95*;hyh%^$1f4!c+FNa7uhCx1UaY<;r8x?<4h=@AuWt5*KW<3fp7CQ~ruA4V>Nxr<&Lhy|RtrQ#n z?mx@*UPuq1u#$j!m<+Mbc_u`%XlbXLAa?&R^t7Q^zuhLBL*n=YI2vavtbN5Icwxc!v_~z?14l>qjv}2h*I?SRV6HpjUK~!x4`M-5TF?vPY;)}pe(X+K@Z6$N^es59Fw+m#cy6sx!@C>^G=tS z|2Ru)AY#_8uoGg*#YkIRGDGt3P_USHWa_bTg{HuK7Rp40=pm6%_XQ|_hkDDeW{f6a z8St4K&}1Gqc*zPbt66jfRn;gbLjo39kg9riFKd4LEABxEO=tHMu@e}5RS`kEm1h6+ zdqDEJIUr&q+$#JJK^aSyA<$wYQ%K@j-f({S4og4Xd#VH#q1@o1npy<;S zeKEUmYgwL{+`k*Ko!DzBznwI5$TCkj<*SAmaRdiy33pb4Ei?S=qgtPxha z5Vn?w9ds`s#HaIRWc>L-!gdF?g!E=$CxNxyNd@9UZmUynxE>>)+PFY~i_6Yg*ZX4JcVH9&3alT#@Pf@l z0c*OKso&qMZQQ8sO>RFoxDT znajFRIh9ApsY$X^ZZk{pV5Ked;OF!fp7Fd@N6#%fxARAR4yLrEY+0_}wECo>1E}*5 z|JDPM*hUnHT56NvJvpFOBd{e>LLcK{3z7u!bq!XuMQr&ebcF&Q@cWB&8a-5XSZp`b zKbi)zOS-dHg8Ik-^F=5ekB#o{bZGusGR%4|L!4L~YooiiAQVByLB6~K*^Pr|qSmCFMV`GY=&N4pFp z;N>;vMh;0NcB60l&cs+q4egw2$ttfMajgMQ*xpv;xtz!fK09nFQIMqH^TPe@MxM5; zzQN0P_=9We*m^ZBe$f|N2pE;p+?OlQOkpT>1EZ7w0=lIzloZ+ z_33MZ@g0O?$rXrN^9323MZYFyS|*w~)8LV`XlI)jPK@Red|vQ=MAk8O2;iUZK%ma+ z@9QFHexCpDf=^75MXHb^->DCtR^uiRcL=`k0V>mtla0)skz?tZY1MmmWtN9#Ms9P1M24)jj3+c7R+Ec?(!!N9|h{!Tb;`hgj(L5jqC_$5KN((Q&HGK!~Iuo8?(nJcLn+e`V zI6cgB`~QsHB9Y&rBj!CxxbRevpo6xYq#M_@9h>tUs^s}2}D z^5*Z-{d+;7UD;RSZ89)NMaANZZ~E||c#NMtEATB~FyVxs1lvHhF9cvhw0Oh|fhiBs zGvaWXOg;eDa371t;5=(rOy)N4r9;pTZ<>vq5Jw5vJg3ibCqPU=AL^~J-Mx(xSS44>y0i013?HlU^zK`t5)^VhmK(};t zA;ptqj4C8tGdL3zR0(M zGCC^HF)8u$JLPxmv{HsYJAup)!xcnNd|klrf8(KlxEM89UM?Hb?!!KVL%2R+YGHe< zZ9D&uTdKrO(pujniX<2{iuURN`nRk&;MffH7Aaxnc(Dmb6I1AZo~=+Bg(q`XW9Nx7 z>ydm_b2{*NS!-QhabmwtY1>hyjEm?>(O3 z)MVaEm9`~YpWoL~8&TIKO>Vl&Zg*=MJ&l)E5v#DxX!_Kc)95)qyZQ#O5{$WZU|yj) zxB7K!cTvdDpk7B|L+NY*o*P)-WWA4c; zfl)1;$&u%z*YYuBclnHHlPYOVQ&iqx*wd+klG33Ys_B&#i5*0V-DcWE^jVHn<3`}# zesJ#{WIKIX^OGHQR~4Cs~K#l$i4A^@gD5CrhYd@mtp-4W)4+wvW*Lo9AS-_85NygUNCITi9y|i~m ztpwkmdX-a@l_wuJ9u}ar zK(nK8v|0yT9cnXN=K6a>_CL*;=Upm=t-KcO99s%p2|}{Lh(>r32A=j-@#k(7Oa|FP zjpMOiVWnJshu#yPw5JHmmNiPjyqe~@AK}wBQNcJ7(SFKjuapkBOjPbY8~AvR z9yK7^G3W|FlrN9o)oYA+RQkl{xOv!l@jd z#{rW&bFfXdBi6y9R3v0(v;rUkMJ%M+G!ONLr>|tl9Q{Ko6 zPZ>yHhGxHdR=Gv{?qWu1%&L@-UtC_g6k8b!i^%gPh)=O5xk{gWq5O zB=^DryjZu>CR0}~LGPKCL{VrB=*p1fuoQZ_5%f{$$1UUgL$LM1_e)X5v!O8z#dkB; zHRhv0VeBQdl|54RuE6P(aHek``!5p?k|FdeGs4AR>Pri8C8$~AMqP3a_KBS(H8+5VIUrEv>F0^^UW1&5B(qW|D z?2u#T=n0XuKqdkpNR}5#8^L8FQIA3`*aqy#aKt^S_ZTeauiG{Q0r3?1pDUpYl>Wvx zP>WFq_U2tFQJAL(jc;8?fNsg9Kii4$+<_Eo#;zZ&ic27I+~&R}+nCkR2ZDtMO0sfe z@VplGsBqiv#u?_GG(?jRzi!*ByMRXJSu%D_O=xcL{%5JMM*ZV~CM&5T{eUe9VJ~+) z@wm6vm}`b^-=C?TH_{MnW2XQ#ijksFtU^l%3Pi;kkW*~9r9ydFT%Y`Z-DS)aSf-y4 zKX5L`q)f7uR+)0?v1AOztfBss>HH$fZvT>*5Tf(arxW>Z&E+=>4kTTIh1Piw-})HgR3wv=e!6;+uPa zZ?+GKS!HJlN=vAGM5>W(U){CM`%ud+?klwK#Ms82*xPY%s0OtfHg3`y#2m#wp7=NV zwyK^{9%6z|{Qzu0%Z0`}w+5DutdYJJVdH4+1Pa=xQuFLJX|A9E4N39Vp+Nqc!2)6S zhGYLeg!8scT(L!6Qnn&IDcizoKsmJ3bn6V+i=EUVzQ_Xm1364Y;4}~eE>~(4wm8Cb z-)tM}!=#qkKIGX>Fmig8CQk@o5|_#ZuB8q=%-mU09u1_yzImKFKL)i`Tq^quDjH7^ zIcugLzH)-3Q#Oepk(lNRCs0q#b!S*Y_pF0$jy++U7BFl;>V@Mo*Qj z2TRG7R4d20GYvbP%~I54$w&f)AtO*onmK7VfXGs@2!z{C`k+N~J% zv1A|W-Fse{Hw(DUN2so&39Q;a-R^IahYemzG1sV~4@i6pybE;ECTY+zaxb$PJKW-TjB1}OT&G?0(o;3|@sElT zZWR4ZWTkz%ypn!%5`YP8f+YaT_{>TXrzch#ioZ99o)H#jr|>u2Vq=Hj4%m-qLhPvE zU|8;j&mV6>-)yki_B4ztDtW)*wVSCLts8jTQ(12^qIu8noz$us)*mR+M`=)t`cr%P ztW!G>oA9irN6|3NXnS&7MIUxckXw0ugOlP(i-l#lB(EHfH+lxK_6nf*?m9%}o972= z)>*OdP5>GmkZ+`IWT@CihEFn4`Vq61ZCd z@NsBC_E?;haS%VGm!=NwjrMwyG}!z(U$?%O0QD9jnZ;3-J>>4P@8V$%xAaieQB*8T z?rIM%g3D2hZFs8iU`YRp5+-hM5tf8uK=vlc<`T~WgM%f*^ zV|7lZ2sNQaaGBgoa*7uwy6&R6WkBzfI}n9&rZdm1cmcQ+Z|JSa6`s78>lmGBAQ)c{4xDPIhxZZQ z=4C-FQ=2*5SP;eJa!%t~q#hKZw9^P~-3eL4?BTeqY`o7 zOKYN83DiL<n66udz{0Qs<^lS_h|EoNa0^JH35Sp`Ms`}1ZLRv#Tn;5WgB~U*Z zRDA#giCH@{P<9SVF*V{Z<`#HdfKWNNz~I4qfLlB29qoU;#YuGqr4^`6CQH9GA$@0U zOf`@61shFa--%lM|HsZ`P(J}c%Z0U^kt!ol+g9-^mbU}Zr;oBpvS|95pR|-QdKMf5Cp#wN1v{bZHA!PtFIZ^J}a;<|z|F^do%6%a_~j2lG1XM;p~o$gBOP$^HWx`@HNa2GE!mg-4%7TJRLqFB%v8v3>s zjmHD7mJ`roWVXrv0`Yq$0}%(mKcE7dS~X?(3u~vO{n_p?_R6^^>4z7cg#Z8nn*pEu zYDxc?D#&g)ZAJ%j_K6xw;7VHR5`puy%Rf4UjYV?p}WwNq`Vxo zWd4~)&GmLUXyOmwecGX(Ooq}U&8g&`sJlI&L(aiar8uIP#k=Bpzv<{f>^@V*8R~r? z6Fm4tSbgPek){Nv7D2~`fym4h=rWuht>fYTSZy$jtax?*djj@9=%S@jnYEWgAGvCg zaTcr144H$c^})p@0D(=X28Eu`Tpl&Z?&a8AoUs`o0z!V=6%1D)CnUxndiW$%0RN|| zh*94x5jpy!7scfW`1X+443ZHn?|f1#bGq^8o#M$JoEG|e^t-1J@bf4M{VbD2W;&TK zv7E|9_pyUl9vPeV1C%VmYWbCu$c~3F<2p&meA8034^y}0S6N7vwi000lJL7U)7 zs6lL*ObGA$z$WgKb~L=+N4#t=r8*J_>r^7@urTxtvFFFA+Ja&jU?B!w*5d)2WH)Jr zSF|bu5W9W(b{T7}Wu}<j2i$R5oFV%@&b6u`2b;72U)Zo$ju@S-wlNF=I zVr+$pG zWhOT)YSrQypcTpocNl?Nx`jQ;9ccMj7lDaAdiw)gxZMk3hxkmg!v6daioPS0$bRKd zsu{B;5ED!(WpU_8=Tz4dDw5pv=g{IvE@d;sB|NW}ajZVXPW-6NzNuo@%Hd1emkhS} zXns`Y#0~~`cNd!UZA&*bXNuz~JzwHh)ALB{?!01!-<8cEY^UG#3y?=U?%yT)<=pGg}k;A(br##lA#VB$4 zR<$m=fWXth;GPx$fF$EJmwf5`e&{FNeX`(qe2VOZf(_ zodFxGcxR}MOG?3prl?_&;p3;KT;`H-v5ljPq-KDEv&AJYvoC-Z%cUoQ8L_GIec)zc zhCbSqi!Ohy#U3@$}j^hKQyJ6)EedgQ{NH`lIHl;>E}z7pALr0#x(Vf)2jARAUi z-3FQ(8IIQef~jZ6yP9xJ3L|dJ3QeeGHzDdGDdcN&{?^*}eAPk#N)a$cm1H3ywoe1b z%KkxTbAg^CE*G(_uvCn*S$DB8pSQ#)GH>0+E@PP4KD0GQ#?w>mOe_8ZZ*T-~u|fqN z`p-Sz-pdFxa6?*dxdSQqIc-fbL8oSn@R$D@NRdY&ddCZ`(|o}nT8!|iL6jcpC0nKE z$JPPdJ-2K)vFA0R!0?zbkP7WdLx(=rTCQt%%P|;l zgy|sgC~8X$WiGPD;Ya?Q6-ZkK?axHA`{0>WFz8Tm5~AYco+y6%*4DQgRak-`W1Ule zO?}f$VCeG^jkzykO*n7mN%=Nj5+`fp0l5X03q2e9Xa2b)5&2537bxbb$G`{VusGy| zN`FCdF{A(eRl(-~X8A!)OBynhm^W(WoVbhIy6uiT0qkM9`|;s;-})-;shWAq97!a; zhE|z)*)wb<=2$=rLjZq+a3ewt#Vzd|ba-Kjg60Q&y=VnN@wMQE>#jbIBeY^a?cH@e z6O)(r%Pz+ObmvDs%6wlXVLoUlF&>Ob6Dhcg)dR)%ve+IUB{#d@TG)!^B| zKhwXuQXTM*j})3p4AW!Ss-b1@^@^c*bMIQmGGIUoh&ahYYIabh;^?4T?4Q1)cvAuO@J_S$iq!syxb#!PsU4lWtW(j9NXV( zAPy2wZ1V~^+o&yS7(07F7kKssH2XGg=1i^8U#?Rb)FPFCY4h~~kr`Ncvpd6^tMj1a zcx&QGTq8Z}g`xXY_=I7u01y>OO{m0R2<8XCz4h0!-WAl?qh~a#1aeD_gBDy+XeQMQ zxh~&8b$aPLz%Wi|$6xm( zn`xW&@1s73qq^-)$JBRUy+6f*k0p^VGnmOy{W(6UEX*a4&IUNX1w@8+1deh~H0X4yQYmcR z5Qvf@=ZOM%@RSQhna^VbG@~HDhZ-8y?^5$csg&?_wdegLvN-^%Voq>L->{R+DDPh} zgZGpT))xU20nSwdmyYLt)3TPddCf>SESH_FNgvR zW@7$D5XoJeA2gELm&I1+_G&UUB9eLeKJojEg&ci1_eanodbP1lvgb)Uzh=Q+p%-7C zsh?@iYkN<Mr%!4VV@>kK?=c%ar#kd08AiwPv|+X|H5pmDdm zgx2_J^PNG8E&K4(J(B1fJbDOZPtz5(Gv?<#Ie0oh!ZbKrRz|6Mu5|iR0%4M`?g!&Y zxt>(J;K8zNdCyPG_rUlBkOo2hVBgYXpVZE#4c$jq#t<&{482H4EXMse= zSRhklj9*S`Ed_q(#TFalMYZnG0ra>XY8N&v%bY7%WKQn;W1nMYHW54+!2A^-<}!29 zLEoL$T_qGrMyCPZMs;TibPZ^zF25ilx}nFe9h&!a&1N0Tu{Be!d6=Iy;Aw=~A)N|h zh+zfI&lH9PtKX`Wx0{wigcHkiv_~T4sAxMMF6S6UN3GfouYllTKiakni0dWdd?l}o zgn(@T$1jDmmGDrX15?L>H7Vcgl)w%RMR!cKFlnozrw9XpuqfEiRx<|a+2n#OZ}u2{ z3qK8XcLQ>wN{DVKN;8daZ!W{34r!sW^ee}4;Cg{p0fG7JD(b>T_&7V!{84adga*?x z7L8uau*qiFz{Fv28Nue2c)m$Uf|+M*!>RTG=y?2E^ml$cahwr7reCWd#v}c@+10YQRB+^n}l+#X1j+-(f(L};@{peAPX1P>vpG}FG3(m7)G#c+(KA= zuXkGv*L$OqYxwqaK-8EbGE)gwxy_@KBZ$1EQg?<9U%n3(t4N-I6&Xh+s5|g&tSc^_ zk=$jtfd5pLmBwstd=bAOeszy#-|wS=JCD_nm47qd7jSo68{B)FFlc=o%1(+a0m8UO z%foG6nx506>40GEt5}eqDj4iS`_+<@k6t>0S&<;==FYMM3Q$VS(Q4H4`~~~NirYP$ z)BMB;SMFNcdPiCY+M1H;Ct83Ww<##^Af$cJl1C{Ud#Qhp{NPxPIwGFJbdH}he*!#* zftQs?_e=GCF*`R%vF}F}QLH##+s~3bLs}0Nivc85rS9>Js222q>m0$8}&*Mzy8X!Mv7+4t;ap6vn3&x)laQ3 zc{)am)O_)wDBFQ$6>XN+hj~>i7!N3pngjvbe63UT7P)T zO#E6HPUNW!!!qWSZ$3Mly2*0on-V-I*An`*K?1V*{9N?fQVg6F8mt|lwp`hKy;0AA z|3(TU$E^>8YBBn$DP>+*y+nm&#`>6_wHY|>V+JEh&6j}!h#)JPKpAdpZ{;cjPQLa^ z<$B5+i|FP%EChO4PvR*(c#BpX#+u;1W>#xS~+#6_ya zUV}lQ^7X!cG!^`iCikoZ@{Q6gQw^SW8Nz>BEW0dk2~xbnB8g@yxkyrL z8k*XL^_`MW1;K&OtI1@p@A)2I2M-BPN*TXOhGwDS4}r2Xqx{f82J=x=3+8ftVo}q2 z(n@pHCjw*Ph%el~vyK0;a!c?^?E=AUrU-%IE3Hv~W)Qa!f`tf><;nv7CGi#7BNGB7 zojf>7D3JgF1iwL^9z_)_ckjg&P98!)f}P6DE=X^L(pa;jYLc~*9r~akg79As(Y@{g z(f6jL$jVKL!Oz6+2y+pD$toc2IXoW_%`_);us@}ephFX1L6lL{=fC%P zSjt2a>V?1KqtFw>P>l^kEme2@@hl9eBoU%A|1o+{vp~MVALipJ@dE4 zV-!phw-WIJ&ABaKu^?3X1%iAPAZI9_rO)!T2@EM6HdOFZnnE;{>dx$5uvx!H{^xjy z1tO6TwI-na1FQFa5HAYa+W^mDFGYn}d23d^tbFLJQ&-P(;9gioaz4}>{W6`X68vrK zS;M|jRNZ+B=m$9^G%kY`L6%ug0cNei0Ztc%FGW6VHH0e*d7U`bQ%zRk%P3tvj(sr= zheF#$5FH5GtvF2*`Ue}TvhIYi`)RjH2qZx0B|cm?`YU;Fh|G%Z<&b4cn$Z2CZ4Ykt zi3tfg!S6pL!cTMl@e;`&5rCid$%_oUeMuT8vw+nbB-R_0C%fz96-RK7fJPBz z@Xbg1R)_1T9R37sbZ&PCJ}*{aEvXj5#05a@O zhy2@5=}bCoG;7t8HXZ2;>Ce4B%!8&5pnTbmZB|Ye{2e2wxW?OE9Exqa!;rm6tu+?{ z`wGEv$0tZl^$G2E*OjMW+O2~1ah+A@Q*TA4l1P>)ninYlgLItir&11 z?TyZ&n+tDbuOJrv(DV3Qxrpd9-~R;XB05 zGB4h?q=)pUjg|ZUHEeDS>@~MfS9VUbID*U{tBLFCH1Kz0*Tw2$tI|YEYIsxMQfIYfRVdW{Tu1#|>rHO9e(Dzg zu0_GC;1Ay1o8NjX5cBP)5ZtG^z7B=Sgsm(CUntXr;M#0!%(Xt|-*h50g$4!_irSW$ zv9mkZv^)tIUZI|+00pAHS_QbUN(3QXpPPtfy{y^f?YhY{z;GmzT`Zrh5~G5fxg znQ0t^K(#$Fu;jDl-5_3a5iKRe=XK*F_wGZ!=$SRqjf4ddEFQn|`=ETDvz$HoUuAxq zF};(e^bVq;we(%aliada_L}q=X zjE(Wx!Wv&=y3_I6a=!wyBH?{i%F}fLec+=}bN8C#HkXcaGnUN+*_33EBtFjf1{+DF zK*bD48E@VjCH$V-W2hvIhX5aKUd*6&+UTNbN2-jfDGli3DKT8dVMYzjjS{cts%v_n z5n^b#JwU|sUKk-Z3!qOdE%oZFn+l+EW5eAtlDn3RhaJr%(_yl1o%~u^w5uwVba=ZI zr}NK3gBy!1iTcgY8~uu}%6M6m!=h#-UN!K>k(S15vhe_5LRe$|e1wX`X$^DK-}~7q zQF6m^C?iPK704`dwvJ67(v7dKQUMd6`*o-TC=AlXmtKlZi<^*!C}3kW)%!@R6EM(_^ zmfP2)moQ4~fN1~g%BO~I!XM-9f-=xThf5>}z@O{C5Kw{&J_Uo;S(*{Z2IY1{OkH>& z2l5@9yv4P;*=mqfHJUkB+l4;X;V#HXhXmHrPfr z6Bp2h+pl%v(`q!=#QMjcsWmyHJv}2W%sFCgMqBw~EA^CD2b@>!DmYik96OzrL7Po2 zJ-4*R)*<<-p|hWA%IQ$JP*fYx^O$?~Q0PiBdAMRW)2ACYG zJ=9MrOHYX%4@>l!g0JIB%1?zjNhy^TM|2rV*+(}1s(zy524J6l z<1&gaU$9Vobf!VrL+dH}j)Dh$Xb#?0VTwS%B`qyXM*a6=(0?OINUFjX}q~T-fG?zsZ{kkS}<*8nmhB2%Ku!J%C&c*&1eVkLnWM5kgmO$qn z=*xzL_Qgawl$30I$Zr?$#@s7fBA)yYN@COFDESvOQd|oAKB050mO>U*)HLDf2rW}e`RbMbb_Uwu6JlOIb-rn;uvmE4ipROFW33ikGsb$URS_JmZ)g-G_;xM3~Lkk zRp%#p`+-QMVvbuHQi*oIurgPByPkdTO@!=la>?|RZGq4OG)B{KaMI?WcgSj{U2kcr zWrTQtouMVcom4^9#+6wdTjBm8D^|*DW8ttyI3W-nu=AfU=%2&;b4sfvXBX8uO-e#x znz(6M28!6QQAb|LBz38ard10O>VG@3^3W~nKi3Cp!8~`JWx6>jgJAtY){4tHHJKsmG^U6U2?(k}g;MM|y@OMPiE?P{=~NpqY_TN~UT_JL zKtq`PP=(av+iQp*<@lL9;s-=G1r@DvJptFap7>i4&-4aX8Y0oj2=ko`V0Wm75Lq~^ zM1s17gWVul@kxV}fZz0Ji{J+|84KYtRSn}%jg)qwKp3*LF@XQUS+e%ZfY%v%nmiG` z6eQr$BV>^)Z@DcSMII=aYF_KH0u|M^5teD(zTLfYpTPhC5ez|^BuS`2Y?(|5L;u)G z-fH#aqEx&^w=7^Ji1i*%D|P`jIKA^M0RH(gF+6?IbPsZ)-XSejG!417Bt_U1@J5o- zlaGf8rn_VR?TE$8c8P-=(;$M$>hPQes57xV#2nm=8$*VpPsbrkOVhS6at%(nl_D|l zzePW4xAkeg&aILab^yAV*HD1@4QGJqwXBz(x=!3FnxNZZw}CwngOIJ82L z^^Gl5aU^3X+;jxyK9SAu8gdUcYVfisUnDr~vr3deL@1Ygn0jn{I~M+W{y#I<0Fq5B z{PCd#UG*P!X-zp2~HYronw@a!N6xIQi@JV*#wiO9t;j4W_1Vp0wGDLUCZ zU2fG?T~L>pdcee{%~UHws0`mwqFjDZPSfAp z6(R9J0NjIBU%b%xBu-#5iOeQ^tANG4Is*)r58@$>w))4g`>Q7;FPkhngmYW+kk|aX ztF!Ya9@occX_ah&$TroSjpVr7^oU47SVC3+pzxf(L`itbC_aPYV z6Tjcb*uismAEX3*9l_7$vosW(U!9M!M1O$e0l&A~U zVb#5-R%%|9KnnzX!UctZ)kYkvPe|(4R4RuveJM-htP-e3vxR@-c?3H~c=(ig5oogsd6X4aDeo8_d-_5veTfE3{b6y=lEi8n)xaJ=fv3BBB#cj{ zMgP*}7t;WRm#W}GBR7j1`V3+b0QRZrZ@m4~Gg|x$NiTNP!I#&0vlCF;aC)|OKBD%o z9V99eEYjk_$_w+{#G$*I%p{{{I)7k}dv<_f{BNN7ICpU1vlM!muFO>)S@!O=dJ0Hw z%4n*)nx9FOSTAcI;Q8&=3P5vx()uj_h90AQT!d)BBjCjAAk@A|kQp3WQX`MHo8Fb! z_>1;w0jS9hZ)TAIyN8P`;N3rtw_yIeThK8{l_A`SQ%DYXVvOoAM9zbcHbsIGg#7T< z^(t`XY2!_)j>qaS?>*-xfT}C$Yh;a{u+ssH(Z=I6dEC17;U@eVU=cZE?9_1s|JM&U zr3k!E2yh!WZ0qkGa^Oi_;6=RCS1Fm*=P#M}#nQ5}i1R)` z5$GjL+;NcD2gevW%XuaV!%eOdl{oWNRV>0?af#N3xqt^1F!j)|R&$xWq&ULPxV{pe zBJjX{dy=LmeuA={Y{=bRWq1a_N?%Gfb&{s>XjY;-1k(JMqPnPy0Ioq6EiE@pI-!?) zto1chPoS5*ABQJr+RuPw?9w#Mr)9VUkE)9_9AV<%8}*Nq3H{Il{O5 z5K;({TifIit?RNg!KVcQm;SlTLJ={8 zoTgO+0HJm1q#{J$`(zE)xKU)Gq0M>4Wpg4j?@TqjhlJ=aB$a0D846B$e_H=;UkV3u zHONUC_6LMqM3QAtTxV!dg5?wqDiAs^s+%g12~!xyeQ;bGf^kUt&=ziMDq3V*FrTug zg$>PV*>o1UZ2I%ZG7aXU*E}&pw9*d*+}1=bVJWxx;0t0O45Xf!e;o(f*SR{^*K?7n zKIvCP=Ccc<-tC(JUNlFKNoI>7kA*zcV(-nk;b7K%7zGQ6Q79D&WwkGCHVTEFh~PlX z!PP|)Jal8Z7Kj=pf4Kc7Bry*ObpG4qgjo^N0H-YY#dxU&l#XD?Z<@d`A<~8$O#V0S za_CSmO{B&4Z`8wTXf*o^syhe_)BWja|4@NQlYq7j34zHWl4-b`{RC-_Ey}OfU;o%3 zKktw$V}OzYbvC}aFhqqfhjs23yEU7M(22ycz>VhBBtL*pP?OM6vM4-Ir?xoK0O-`= zgD$SOPn}+L^%0{y98!L{)F$WG@oUmdCC5<~7Tx7(X3ls}-?-jqxl`SDN zUZeKeukc>zeonW%*1sv^fCuTX(>q)NA7BJ%2oj2mn-85iwAz>`1!^`}B5l@q^2DK+ zG|0Zuo8_g3gB7_drn7R+J*Qq;VC9#*ZRb4G+q7z_G=gVzpJv=anA<26*epvd7!?y^ z@Jt0^WWrA?)^oANr&YL1Ql^}IFL_Y`l5{-?z{Q?ky^`pg4c?*IUIjjNZ}kr-gq!D1 zn_gs0nXo1uoM@T(Uco0dE}TftwXU6!CoD;`%&%3%Nc`Z{5wkhsx(tP`uvaskA#9ZB zfvDl{qp9p}wL?eYX0PGr>>O6~Hl(8PQ5Rrkk8s!(YAIQY#vyPz9@Kzh;VdB->SOgy zhD*E6Di%GR{(Qe9K)$fw=Y+?(H`+1VoNp*zGw)Zs zbzl9+C{d0Q{H1tHE|fuWW$fL~mgK&$`e=BfhJ^z*3yuS)EArGCC~kc@u8IPT69_3v zzT4dC%Y)Ol49oLY@s#c^_pa=M+N#@x86|MPMfK5vTn`u&?b!{vPFrgocDZqfG+K2< zk8Nm^1(op&rEXKn*y)jJ(?Ev=sI@Fl-e&{_PzfA%Ycp{KE#Uk4EFFLyQd}2hL10Q;aB}SL z?wV&>LSD%B5_;g6C0)go<0%S22h6$grcmR*ULGmVlX^6-hYs2N)~x;bKZ^n_^>n4g5&Jr0eJ)6ZlRo1)?~`oLx%|2p#MyPlcktR z@(4lowbu&bxE9`Yz&DW>@+_L|$isW_7^D>F^&$ar6DBUU;ahk2z;l6=fXi}hyg<97 zVUq)xZqdB!>X?&Z9aG!Rv>z>D3bw(#M*z83T$m*3kj?S+;^w|Rs-THh_KoV&ma0p}2_aVQpM^|k405@S_N%-&vsQmqFhQ!QV6R*??5iogT8fCE2`wHZ(C#iJiEVcqv`AQc>~2U?o6*l-X>_i-Ob(JV$>jaq z=`k6}C*T)AFi7yxR<)$nFBs9bw^Sd0zwaC$p@-oIoNDD$Y%rJFLOHc$=zVTx0~q>i zSbi-`vj7%0YM+<3nogkcN1sBK0Abgfb`+dZqa+ZJ=c9pno;+zuN!`Yz z{OL!w5I-IQC+&z3h2*{`se6&jny^zzkmJ`X{&s|X=&h{(NlbQR+uvHR8Kq`WM9AM! zv%*Dv;JUUyz}XUxw{+A-)PS7AgqX}p=BIv2)TfH(Q@gH2Q#b|-)Wl(2b`ZqKydfP` z?gt)4cZZhy5JhjCqKWlU2Nj6FUt~rfhOOOVA+##;Pt4x!yt7fUI$}BF zryjcUE}U=!6wmlx)$3icT$E1QS@r#>6Y~8fJHS)1@&tf;Pi=RB9~?&qwtVGaswYEV2pOrwc|cgPiz&b znkMNFU64I$=DjBSr_FLcS{+P^ z=&o!k^EANlwXSBgouO}ByUeMeq6nz|YUY_=&-^33`+xud1xi7lLPZrUCI9v%$^iZf zK)G}q{GRX?jO;bEMmHtm$puP~I@#C>z84}Rgp;DDNtL1LE#_2C3w)jFMFZOE@{MC< z4?3S$+J;#gI*>HCeetLrrWSAH79sy*#YaQM$*Av7E(Q$2k^QxB0rUj;RiMdGhv(;y zn{g%#DDzkP;AM9>+VM+F*c@B4EYPCn?E*@aQE8QhD85g~Gzq&7>7i;sK5B*>D8k!p zFJvVt?y3_t`+nGY#6`ThT3L#z9{*}cRMyzph@~q+>~dkqu>6%i>GhthV_t1UGcL2! z^yCLm+x5nJTxJb9M6GrafAHb3=HeNd-tGJsVM)KkVF;wgrpysshV6VYUqR?GiWS|~ z$CBHRb_mdNP_a&L)zUm!*DSa`(CgL^p#CNo-eF7Fc5K+6r~wrZV~SiK+sn|s5*geT z6}G3UIKlE9VUQgU)u^)9pnTah2_gfcJy2 z19m}6B1vULPsu`yDQ;-xKb0LLl;!BbBrI2T$I{4$NPfq9oMRd}jv)QiwV@9o_&zA9 zB~XOS^_3#}@DAis1t0uKG!-lN?497CLK(yha>CkK%h0;{BdNGj(#|zrPKMOqZ^dn| z2B9BD-IP--;j>~1vF1OJ_Q1~iBIA<&$}Hc32@X{0cF(CnB@)~1hiJKn8s_x z_^DuJWT&Gn25lTTKFK~N6xC+c12jo92&CHp^^K0^SqqY5hQ59$qg)YW2YtY2wU3%O zO$@@?Br02|gAymAV~Pvep0un6=^;!0z@0|vOF;kz@eA4xOQ9+=QkTC|Co9<8-}?d$*&b zy}Sk)ts&EWx=sw0{>s)Ag^p`;SC zUpMslHj>LRP2Ljjgeqb?T&!xKO>D0{;Vo!%qImH6zr9)_GZe#AxWt7AVek zehtCo-3rhj_ZTh8O5p7R?YI7aKCm+fJ*jbM!p+oJ?6URzd1>iwkIbT{8*O-x9qiw+ zvKZ*Giwyc*eP|Z7AZ1{6zN_O$U3e>jK%SgaUa&a!=(vApvchF%g#j1BiRK~CQ1Hmw zTCHz*;h!$7_7u_^JEMRIS=AdvSiJ2|rhPg#U&%5G&|nqtR4~EhmU^m|xiQF* z?QMW-^tP4KR~dR4@u5m|R`K#?l^(%u`SANGx4vWYAGI2E>UM-D1dz~EkjkcCt^_(Z zEo6=HF`8jb&4RoiTmzM!6bm!5i08-GT&{T^Ve&U-6?vo;E2EZ}NWZaZ>4}`5(3~1+ z41ub>z%3vtm$|!ljaZ;70-4ZtclyUkLaNMi!?LtZsP>d?S=tvrq{Ui@V_7b08rL_e z!-pP5ge}KMA{IS#WuDES2ItO)0z z)5Z3F%`>ty@?636V&qM1`_>R=$ly2h6tgI4fXFkih#J;e!W*{_0008&0iI!WLSObb z8O!H|O#Kug!-zDTR~;ov`WkK9Swqw@!)+e4w9&G5m4UTKLiTa3trLL{ulVP>cDZwq z7h&|{8eUBUU?E<=2k$qn{0%@96IWojPEX~h4OG9&#{+DwwJPsN?yMIkkxzvxH(8gA z@YcZ+_)SAsm^jzHOTg3UCMSys;EanRh{HoaXDzwc0e%4SJas zR0wAehm2*q|A#6IWk5*nlzdFfgAIbIo}sy(6ll7XXeU5UE?MWJtn;mzpatXYYgbqv zmN5FHu^krE7s?NTZ!9Ehf57h#X0oK|L^(1~SLu$9)WgJO1Snq_92CClTvumNqk%3t zT5c;lKXEFRvDIcYFrpA!m4t>Pz8X-Ug;=c1+>%-X;V_x9HyE)OmJFAjzm|t})snI^ zD<`ol-mgHQbXv-8#?}9jOvoz2_L}1zf0d_uyWaG-T4;S7)_)FsuhbpaI8|GlZUObL z(E*C9&H?ZtUr!i!_lI|sfm%i3i z8HQ#epa8r(39r7sO5!#4nlet)lz#!aqL@wt4Vb7(GZp~z+oHHI=yqA!U+TU~JCK>l zbYg?o*QtGeVA@MAR#I|g?}0P)vrcr1Ka1RZgRLAYqm6mwdJK_scdJ-eK*6=@6KrBL z^%G2L_AMNNXj1le+d>F1Snp}Cz^jvFmGGuaSE+>+x*7{2lEc{T*B;FKy>Uj zYHcMTb+HQ@qZ{?@s)Z+UsH7p>e+#~+`&1#(gnV73nVA}Z5M(Nh0zx??SQSB1+%H6X zgv;)8=Zl+o@=yQ(0*V2iV`@TQ{+eytiLk*Z^Dx4ggFQ#}F=%@xg{3p3aKrV%$iO+T zg=t+w?>nODmLgDZKBTv=lt;5ikV5IsPy|oeBD_88xe4wQ)AYi(EqWsMp1-^Chj32m zp>CAsh;g3G-HRdiB-tFIS|Zv_RE(v4g(8yB<&8gQ{w=CWYJH*0Zs@uN0D&lN6groc z^qoE-4tyPj^YNRU8dUK#KP^s$;{}777Hol(%e|}8E@(qm}_kXh?tR| zrg^Cw3DWDquv)s2)GI@vsNQ5DtSsJ3c2qR~YPHTM#|;iQFh%qNvq8NSKeRjuc_s|} z4iU8ogk(t{-+dL;Y=lTM;tZnv&aGV`M*fsSmsIpZI#%D(2jupT;^upN3E;NUx&R?N znl`1r^ZB1CVG3+9`*nW%u{M<73DBSJ37)(u2V+hC#QcyR9I%1<3>WaBZj_Qdg|)&< zQ*L95E$5goAyuIAxTZ!FHe~#AsCM+bkfnDk>jPZQQ&|t(=-e)j1XjuY^ng}0q9B4n ziV)s>?~IFU(*|KpdO8);+KlpLDUfGDyyn|BtoGx zRo2;gFm}7Oj=|<53|?e?PE}z#fKypOp=sNCvs*-`LSTGQ(ycJWm`Fjqb_Fg&eSb{) z+53=Sa;bivHIVNdraEmKTpkA=tr_)my7J2;!l~ z-o=);b<)CfC1fC#iRMwCYTQVkeFM%afkJjBQ#zPOuf?7PD%2=(000gLL7Heus6lL* zObD-^`9M@U(QhZ#jfwwah3kbJVCSP2yJ~3NaeF&brj(XSNi#w*F-}vkW7~Hft*YCb zRWWXfnR(HFh_kbXw1VyYz4D=X7Qx6E_S97ZMV}EmdMv6sMR~-sn%F2=Km9`V(bK0< z4_s2^-*P7kG@%thsOHPL8F?`U#?mLma@lq!gV0%JfiHQImyI}Quv(^(MVUG0EQuga znVOF%l3Tytzwfp*i3!v$3198xD3ttxK(3^u*j=~yfW$P`$oF~{LDvqXtyt_$c}g1s z_LR;v6qJzmkWNJ*Vtz~3n=ThlcE7U2%Hpm%$Y6~Kz2^KGvGL_@%dVvFtl@n1pf@?J zJZ~tMj|$tLUnu1|UQzV^f+A%L;?~%Ed%09LY`{s)5`TJ;ofQqpbw>fzG+W)Jc?A6= zL+iiK^UHVmPcBf}%P+iG22r*0dVev#np2Cbi{Jp6_4%oH+!x5@s#RsWR&S8p?9LI2 zj#XS06T7FKC^S#ze+WCF*gC|8*T5^y#jF;AXSazE`zJ0w^v|GPTAlji=rMru)BmJwRvENB*DHCQXr10xu!7PV)GyU z0g`vZpeg<{FNa$=7#g`!Z2XqgIx66&|6qaNK+K)6jFIAy67$SXIb{i?$0ho(9ck+bd`|>i`Q)B$i zZMB-q4ojHmc9KB)VH*POyAWEn_VxmrM!|HJ^dhGW>_5V$1Sp%|ZUm*nsi$pZM9%uF zJDAB$HYnC~xFqF<09U>)&G6Hv9wKk7XeH_)iK2Xon1h1RkisIn$3J+5f!?H_JZr@4 z=z(=gH@i&LXDU?()I!=AJI*mxm|80`sv z3t-Q{!~{>C)UV)o4^vj}kta7CjgdP=HMa>+qNB`!v|mS@mg>~*7dc`Rg#FS;Vy?Zd zFx=wqewk*w?Z8=b2?uz1J;;IRIVzNxzr?lG6EPE+qbXJ|6stbWKtXjTw;_7Gai(nP z5jYa{AMNOLcc}71#bwvnINf}&u4VnstlP*Nb(bK-CR}x?p;-9IeCip2FoZh-k-=?h z6xQt_(+wQkb!C8EMX}S4b)>?)l-yfMJxGi|9w{X|rFaKQMdRXJDC=q)SakjBsx%2M z^Ykzx8|9YbnshT3XwtK9OV?xHx+!)>V~&y7~J(#-2oTldrG&*-U(lf~%%cco=Ye}?CV z{`?!9)Ahj5?T-#PW|#YxJ1{WejhkgpxGhezmVUA3!(e@+KDN`!G>ZaL`c@7McLV_7 z?_>mcBtm%$NPPlEK>mGr9rDL?)v3xT_NW#2^;bcq?|u@#^3b#de1(Cx3Je5-0f2v> zlu=`4#8^p8Vc0Pkg>_laCU>cMZaL+g1|IPDCUeYC-h?Mnb?0a2#qrS~!IiqnD7wm*!?;~WfvSmS& zRU~#v_S?@y-~c1sOS9U-f=MTjVwUZ#O6r(5>IW!8U@g6L}y~MA_;Z zfcMKh7!vO!vT5j(iLFI*V{sZ7Z997##>SmnMW(*|fKuOq8Q7q1g+Pym4QQM-^gA5= z5lUVx_8kBp+4VVBHxXTXFL8LUwWhFjcvWgyWcMrTZg6BfUV%ZnZ;cs2E-y9}b*|SU zjL+XV{X%;ct`e}B!`p2!41;yq8 z7Zqvdp+=VXE8VlTMA%*~l|oM%Sh^RWG2owZf;R(2U!4n_%LvG<&!pRqHoJkK`coO_ zUSF|N#X&V?Qqd>0_GI;oY1e(oGuUv;PnV4xdbAro?~#}^S_LTHMc{nJM@}>4r?Gby ze3-G?VV*`5l3p7N=M%1G-`u{M_K7C<{AGKJc!86zUX+0(C*?SYx{Nz3-S2e?UWECR zoCYC;PGOQ5Vp_CWu4B({gvhKLbLIWLM0gp%in4BB!OIV+U_~Jc@ilkSJ^NLAQ1XiwE+AdhUj%#aM@jKBe2?mmK<1@Dm%kFg)1HMs*{E@B{NU8iHVu^bJpEoDB}TfY1%=tp)?3t8844ANGrB zu+Z3SC|U~g{Ejj| zk2`zMMhMxi553kwa8)NY?OjqnCQ1Qdxwow4;SIPp@g*c(-5$^d^(K&j$&ZGX-7Qz! zK652$sD^m9)~0b8TKc>P5;hfdl-on8T4pe29CU0)!yQ3gohwk?AEZ9tiK{(~x~e|` z#jjxYGTk0g_SMC>B_a^c_`I3W0F8ZWd~)X~oC~MuUg$ZRUI?DUb96}*zy_nbkhU?C zroV+b)V6Dl++CgU`+N#f;h1p6BKYOGLkVh%Vfw{fY|j~F;~Y+Xaf8-tW)=7SKs`V+ z63FDXVH*l%FLQEdy`lHsbe3kDw$>^ixiOmsGpfEnWHREPt-H{}7eT+RQ4a1PO(UK9 z-uqQcw2iP2(6N}jvc8poU zebSB-%KO%Wm1cMEA=ZcRZHOV5NONLYcS5-uRRa*046WKzQHvG}xC+V!kvuRJ4C#9$y)WbPZK*!UR-eg|Xq1T7T+x=EurGz_5n?7Y>|wj4*?j1TJd~>IJ3e1IpI= zZ~&LZkFc0%v?sns%V4q-LTnQ71}r}$a(>40c` zd0QKl)(qo!-oX21^LNMX$KV*}#k-scBmSl;rUO_@E`kNxMKYpC8{C;tr@f9Oq zr}7{HH310`PaW&kh_!9FOybo2UxVRdN{34}*p~#47X=4YTjy$(Uy$52AT|i1Dg=}9 zb!X7j+G^m6>!ZJmsDQj!VjwNMfNrf+Rmx`TC28}<2pRwwUlYQ5e(yH(ide+H+Y_R! zTiy2)=cBKZIalr2Y4ycCj(_hsUKhs8f)Qktod?EN+K@4A#p~wue{kl^X1rw4GZ++72k|eX z4nnt8l~nez{L1Q}(0mdt@Wu^it&>R_>tZe~qWD8wbWuN0r>Q!Ke7E!5Lw+Wrk?m zgSNa|XbWd{M|!lqI;e9%*UR-(k)@;-u|7qj9NyCHgoll}VQ%0jFOO3o7q9?0^Bd8D z000CzL7s+16)YkD?(Y(XN(xzX@jH%+5sI*{Qr68D8~^IkRN(jDuSpA;fppd|_a0Y6 zKC2D~@Xc@5SJ>EI@^F&}P52yq=k{&2*g0x^Kf?70XCg zw_k#7n}I;8^TPLrWn{M6i&@rOkKnV-fQ~*T|2AIkdiuMJrcUj>{2{I$o_K}1QGb1L zu2lt~cNxuku@_FW01LzaNBH-6qM;7`LLb26qrXQqFHJFT;eDKW;#6I`dr~}k=6`U{ zL7SS2OnE;YiTGhLkRisLVfk?>@P5cLz8mAnn2S@qaE?FHoI_IzSX{&^ZqHJX{p4^1 zv&Ahg>+=`hSYy$2l9QVJRBxw-D=6?GG}NO=cuQC~a2WMzbv()#pAp~WFMS>_|NZoM zO8KE|`w?Sq3k!B(BH2j?{VAR0mPIdo&*pmvB@T19vRE0702Ko_D8ys8bIRN_teTGq z%cNHm$2WV#wOMT3(<>c9i{!#_4nxcl(l|&6oT?xDib(wYN-4V7*2PUYqtN#M zW@1_m`;vN~{3OaN)WrX7E3h4#J+j)Mr6~`{)uK<^7gc(H{^V2NaC$(?O35i{SS~&< z`*mSEdWcMhvg%#Zd>olhwk0anP3XB~!nt;8n6HNsB%g0_mg)LIXJ=&F z5byzgzd^9$_&w|=InKq;N}=Q6fsPw&e{avY5oVPgY&WP}UR?25=tH?6)+1^j=`k3`WOC|+%xMhFF7|S@dS=J$Xz&EFtl<<2{FJ)C)Nt2R&d4T(J@qsPkVOZ4f zQWw3DNggB{Fu@KN@X_8kTSWwKo|T}_1u z38*%ELSwj&Y)82(l`k!QEGkSJQ>Gw)?DeC0bLEKqCX+!ti6we5FE+zMmhcN&of^ zw|wn)rjwuNzI(HiHl5OkAX+hL(c~|wegFgHY|hFR=6ZDU*hf0-hAJppjQ{`wKmnem zbV6V6j#a|5xfV^sfZ#SdE{UEK4mf%@DETgfOal2#%w3t-=X2I;%ff9Cjm=deq6&3Z}5QuDiiDZCD$SZEIqy_jmsYat~624wT; z0K0RLh`(G0)5|<|wtC(8a+UJPn@JY1>(*l{zh8>HRi0zI^|8Wd*4jvjIbWFsIWS^a zU*nm&6#656RhN>T`nMyAGZ#zIeCH)d$d0ok-l|Eh8?uETy;ZdswxN+p)oUt!UiW5L ziSl-rvJapL)&;BltWM2vbUnNjTGGVSIMWiH6GY|tDbic4t>v9QBXT@GGi^m-!nQot zvS+?i-FK~N%6te$Pl^hlzm}TuA zpX0He?64}b?a}6Grc$tjYzb9gXr}tw>agPuihB!I2Fg^e4a{iOLrylW^!sT&CuxAsu5ymc{N0-eW5B`;zQT-kfgRs(3@Sr}ETt+JW z)c^nj=K-FlYC>P{z9%ZqJre%v*{dpcM^OG1qWFt7{z0)yd%az^_gQJk0@n$W$_N8V zXJqzXP5sQFK@X3FXQ+h?OxE~oTfB;W(tPka-nM(GG11_wE>(xbqDSnZP%_OKqQ!dZ zT-CbG&uY!gBApR{yF|5y3=NR#CEGoYScr3q>>to%FA(zcdcwO9d%n^)UqZlu<8y{v zNDj(wQAx0@K?YUf;}emNvWR|67{Q~!ihGxy#OZJZ01RG0bo-;9=F%l~HKG7NQ+T6P zODY-th3upd^M#86SA&HJV+jPgL)PmP?yhB^6HUxZtGd%$44KO1_O5NPWAo5x_cd`Z zqcmP;in~)kQz9u>Qt%!$nXq7n-mm)C8r4mtW(TceOUN#3K-z&80IV&;bKh{Xu)j!y zL?tPwIfHjT-nRJ_^yy(H7ws60QQC7Xb-LnNIz&)RIHylYE*mg0GkU$N3s$ozbGGq5 za8lgzCMI(*@yrB;)C*0Pz-XITmXvODd;Ztl9IStsm~ZvGTxPIb>?2Hzy3l=eLqChykeD_ z5qkB^NuTUj&IUy$+zhDk+va<|Wy64+124>!)Z$0@?bYus194S^qJtA;B}=4w`r6dT z)0&S?Z@58~_TJ*2!Y<7f4Ow@-sIg{iG-S{^z8|f@FfO$A4t~~%W3q_~QM?qG=lLQt zAl59mT?oKxu^n_rWp{cT^OhGu;Yf>++f z`1}qh3y=#W-oQ{KU%kP1eR^crxDv^QY(%u`Voq&72Z$N9j;CHM^}A(;gtgN32)D}m z#6vUROszxXTxx{EQktlq#56J(000d-L7J>ds6lL*ObCyrQaBFXv5m``)#HP!VruUW zKBPqq9XHZ;e6gHaHe=2$cj!Mn6b=%)Pn$rhDj?$u2;3FaYsgxcHS^fjF0UiH5vg8s z!0Sb+2rcpJ8?iMvcC|UMh?jD^1GkEU36v7)j?lN6M)5%&0DeZxs8sG7NfcW=_XetY zh`n7uC?kTiwJo^g800c;UsRDCk5BA1^psK($k0*2_KArmSa-j4ww~+z{f#LBLo8%! z-u;%}aU>X#2b{_QBAx^0*xTY2oqzxbS!Jbm`?-JcOJCdk^LkB`YHE%HczA_pty>&M z=1rqyPt~7%{!Q1#K8~y8s8hJBueuAT?B!X^DfVsp8@e6Z#4V7%>TPg^~ zN<;xj#lR^B?e`+uf+W&;oO^~6t8k)(=#M6j+EQWVal}tsswKkzhPqmow@c{xnfWo|MA?OGWLZ2vWsvzP?9wYdh^Gso;v&X zEQJzve&RE8l7<5piu!a)Bul^(HQ%Tmi9FfB`=lvy%YS|vwV-GmQeUdR_NmL_d)B-f zb*VY82K^y3;H&9Q#jQ!pBf7ITazxH7z1mF81HN4h^Ud)4D5?Sk*z`K;0z@F9K4As~ zK}t3tBFsT(!Me|sLGK^HGKRK*1~*6ly*E72j6U0VtJqO=Q7`hZPmhC7T#H{kGFZ9VUp*an@5|2;JSwpcMX$-e~WKH319vZ6pE2v(F!_r=v+bDII)0E zu}MG)4q`rtyyk@%5uzt&UW<1Jflgnl<#W>oeDgf&ZAo+2W-zbqOg(p;p#TTr+q}7k z)Zb$l=TPFoL)31X7H}VY&guN~M$T{^4$IxReoml+1~=r+wP~Q;<6;G5+sbv@1)p4A zX<5&eu)2%GQ&6u|7HSsZroaNBWq*|hCd$_N8Tdx6z3=4F8gc7`$Cp}`Gi`ErmLD!u zWg-D(h8A%d7R=7&uc+uy2%R`e`g)-Yj2zrDv3H$HF?-rxPW!jc*d}9JV3rX2OvRN} z6%X6F*s(c%+$K!<2I-7l;mc`l3Y{0g|C4=Y61VBN(eqjV8e>u*1qZ}0txaB2+7E9R zo-0Z+gzY6vRI1wdhd5*sZHyu9aIinWPiY;39JbH8{HL zUQX+hc>E7Dn>;huGpZebT?QN;6W4MVeMdxfq-jFq%Tqv&$BtsOAdlG}>t*rt06AuXL6M0mLDF%TKS{I8Lus=e zXI)^7Fiv;xisZAcuT-?)Fl4E1LWS%lu+%KgKK;vR==L^ar~S{*n!7akb)J_@dutum zY$!7A6x5Gkm^1YAGMC(#FimP#NHp7$3#g+uLAmu<1GsaGd!I${a)Lkl)T}^N{c90x zKu0y)F!LXA#z@z8;^ZGMJrl(aU=|CzN6%H}6Y<<&K)(uugnRWuOF2_o!q6Kf*T=p! zFaaW|KYzPht7Hjr;{%$7jWL{$Me*=avO`<=W=ZcWipTEs1y?`1-q31PQ?rK{K|{@5 zJ*8y?!|Q49e=lj(Y+9U?&?Hk>r{9MmD^YU+EFFUzymPZW(<9f!7I~SSbr$k37*wB5J>5+5)d{j z;+;3&qqU+m?F3A7V+^M#aMr?o zhNqRhsDQCyq52(wtX_9?G0y&Gij>q1ONX?@64;-UST;c5Tzu))8tcy69dJ%ZW8L>d z<>g0e^)nEFS0R5dEI-B0sjZRh8cCVt3=2cR2^z%#iqAM^v%spA)zL*Nt~<-ztAL+y ztCC20K@}aY?aikh&lYz?qM)uFEwrT+pH?z^n#x<}fmYrgouIjX8p1k<*;lY5pqrRs zQ;{IAEu+o!qvFVh6Ti4r*GU;;@rTC`4b(kE%l4L#*v{TZ2_;R{b7?pZ6rM9m>vLV7 z7znpQuzI~5Z5IEpXYBQBYP6@fJ$Ih=tRb=#@*fs*G#NbMYz9)4AE+Y0nxkk>@I}w>I?e&jxYvN~d1%yTMLF)pl9oyxRufhJ zf>yVhl`vweEDRMan@T|B=;UgFuAi(Lx!V$zWdKS*@LxH{qR?n?tBkoPnUM1W-ZZOK z!D$cSxCA*i$XA;QJt$XXsS8xz%xfZig0G9k*z7O*+%aY}ftmmW~qre*gDt-zYo|noYTfqN!IZ1eR z#@^QbrOIIN5jk&Wiv6}vefB5TKD^w9E}T2mbuc450yjUD!EV5x!G48-5T3eaD92cE zONPy%j}Fyc>C4P<^Z7XkzWWtY0neQEaFFzYn4U!XTFsg3JI@6zOv>}guG4t&d8}3p zIu1_Z{tbOCttzMso9zL0fGuzit896Q%WXEEb@hBSt}FdnS9$G(jMT%3h6xc1Ohjvz zB$)?^6K+#h(oKzgxAw$c7sAR)c3eS^2u*}lS0dK96c+A#mAH4&RLo5mt23_A`1BCG zo5nj`80*{@fCHyfYPndU)+n<)P=yecdvUiPU1m$!7xZ^>ooO3^Vm+X_*)*lietdYK zy)!d8Wy`d7X3P{mpLFbe^?F(aeB`0Er%8qvjRo zqO;`yNN~_#IFK!^MkGs}UeOD1=BUGu3;qBryDa=LDeQ&>ZOJQh13AF2d1psDWkI_# zXsxEDROr42RMo?xIRu|=8YtDJz)9r5EY~D!I3)K?Cj3s8WdQ_1pUK;3lYCDpIQ)gA z&Id`}_mhu?^A`+iTIQWecDX@ zmNh0Q2h-1yQko3dKWX0_gq&XfVz1voq6qjvm!+PfzPJJ+!%|W>Pk0kAHKDT2q_a z+Hyin=JG}(4W`bxg`#HPp+I-tEja+o@;p9Cp_>$12d8`)j02|)9UU%4qy-o??@HNIKGNNezpAQX=xk%#HYPHS57!l$jDm~~Q3ukbDsD&s&8R{xSN=5nD1tColPR~bue*nc1 zT25?X`o59={CXE-MIu7f>}db`lXJtiY-60I7~z{4b}E$f&uI>4vT275fmf6;xhK)y zU*QPOV^mdF)1f7~A7te~b{J6ik}*rZ7*E}H%TeOIFBvxidBU;cT6B=`^#lM(Fi||t1p!v0bMPn?|U(cKjQ*4i#z8zvMIQ;ZS;ljTcke<%{r zFf$M4P+AeImmsK+0t<>EgXDhMjKR8VZVO5d^H4L+t0c9|1L51@s5Z57tfPF8?-+Cj z+3_E(0*fM4WjusGldWpAxNP!p5bb#K{Xq%661QL!Yg_y=pS}xU7U)+#tUG&7f=e^* zJ(H=kXDS>h&a^2h+pk1O6db{SMg9QJ_d*Jr``}!`&zB9DX!UwYu(@tKbv?h%b|Bp> zjr+X(fYUt+Us~L-d1|nZywFGL0E1sF5_2g9SO^cPej}pc)qEibQ%hYj4r8@tFk0R( zJKp2YocN89>D6bnk2N^Ix!&bR5t*ql5x-&j;`(0LosWly3h?DL?lJ>OYPTX`oXHDL zkd-n>2{HM5zsis4cPOop`kd!44t8TG+5nqk^t-J`jHr_nbu0-?>f*VtmZv;_e zL3f?047P8Fj))6&e7lz9W+EosInSD^Z_zaH4M$t$zk?NLj~ga4whC0$E(N+6v6Nm& z8;6cKya1Emx$8+Wh3lj%7N@i!%;kmv=wr~=MIv{=zFZ$NVsySAESyj4A^lXQTGnBe zMSqQqn0w+A7_zO*^t7Wo41Fm6fICkQH@!UI8$x5-60bsx{xWt^1fgo5$5yh?8(777 z8Wj{cZz9FWnMA<;V(tlRbp|JltU{>x~lM&$X# z?UvLRCyc2Ko@6>wwWRfoF)7OaN^?#!bT;jzThddk2TPYJ-K-VhWCc?cd74Y`f`Ffx zpSZvaw|yLC!JmJ)^zB@u0xd?}@$!hZZ+4GSTx3`kBptXZwf3XA0007W0iNk}LSO5{ z(hXNO%mO<1@$5k?eb<03$52!DSVfv_e!C7{4eJNkj}80O#@_Vxj!nV>i*L&m5z8s~MPg#Xw9Q@vPToNiC-+KrPSzy@%~}mD#w?XDw$o zi}Yaq-MSLYv+SZzCfg#DY|tq9uiH)0EWe8tc#M)H0&Lt&@93OPPe1|Zr|Q5KE+(fW zt^f8h+1F?I!8I;^uQQ;DV`(3o1?P z$RCee**VkZ?INOldCyg?jH*u}Q_FQZQgXmwg7ZHaT@BuCy_EZie#}Qz+w>~siKQh5 zg}WljGpf%H0n=Lg9y9DF1YHP$U+vmq?9koof}K%}Cp6sUDmgo5J@OtDhY^y5;mo_3 z^wQ^uqLk?Tc>HH_?dKp5LS}|ht#Is2;{5gdwON|i^1-itp6N*m98@?)vXr9AP~(xj z;jTR))|pEdLmyXoeKx&~i!M#`)U=adb-M6$={J6KnbJ&b1b5$lS=CC#M)@eR~`kgREoS0-GHeg0$wPndIwl zoQl?_d7=2uU;X35>#rH##lC#vkGc`8BnUVpyb$k4eTp=kKK2J^P25W@>69k`00C(M zp6hBtU-X^=Vjav(iU2)xnY8Bi7+Bxz)YY%O>&s2D>13AkE!^WOdSaP6 z?=3&fj4U+h?o{-w@hz>|4=u5t0R6XS_N5)@lKd-F6wR#b`{mNOlC9rK1=A|vd6Hpj zK^&Mb4_qS8pxDzP*}}kwYr>yv0sZEctXPnSZ3+)23ao9=7>*k{x{?7$+ugS1;=E@f z>kWeYKK~tc#Ld7TDs|Le4KOg?o{znA!&Z&}l=&*Ef8eHFy6v<27bv>ijphhRZ*@E_ zasO){xa}}Jj8+@rNnA=OaP>x*n4LU=Be$dVxtb2NU?a+;XIt9kpZIBRvAX=QFsF=5<1aS)FB(yAnJ z+ESBV;<_oJM##*_xGaOMObFA_H~-|4kDK^53#d6VS~7SsS)M6NL2CfaL~b^GY5aVv zzWl`>+C5QPss!L`IUnmm8yB97JinLxe43|rWgbA$RIK2#rrNYn1K{C1bmnotj#tvY zL^5UMYF@wgHSc7uEa}y++G|foEP%;Jh67PNJM4gv<;B}9k&1&+1^jJ~U_aWZN;X*N zW^z;f#a8-Ujy%9#zn(d_0wB8gpPp7R(g@W%cP5 z8nk4ESXKO8)J?*_u`R%1_7_P6ga%3Ur-Sb$wGwB9mP1wkV$rqHU?@C<9;AdHMa(fOtJ2o=lT43Xv*iwLgS$h^|7Sz|Spn0;qJHl%juzx7gFsnN z@n2Pw#B)@<#}O`$)0TC~7H_wn0y%XHM9gEQV^p_tMt&ik6k96ehZ^;JbL@LS5BKwp zB{DyvKO^iuRiYb3ywz;f>Mweib zlen?(VGp%KOYSqt8-6nMC?u-9v)fg4=MQv2A4N1@H3v2Z*+F}|>_Z1WZO)TF;yO@* z=b;Z^vK=@-0PRP~W0Z(J=DJbo_`~1Gg2pelc~sEmz)cYXCamtp7V2=5889AJ;XwB2 zIcWQR!+}=je-W!5jdd_RF`6+H9E5WIErYC}-L7in_RzNTZG=4G8jI_=GL0|u##g7J z*JsBB#ylEWbWw?{3JlP7aD=7cj+XNj@N>A8QTVV?+dirDk?%>iCpgV>3xYH4kfFW zZ|y|DUvj?oVeU1+^;J=s@6zjK8B1aQV487-P#0hTlQ&MT67~d)8f(SGpd>aO>`Nvf zNg5x{SHr`Ai69GzZ@puH|7ak_z_!Ct{+zu|f7Qtg3ye-(PM8z!B-UeDzVjqEtGfzR zaFP>@(ivhqt$?@}Zm5E(8T#=%<4hbKh{QYuE%Q^Qg>00TU|LJ*IhQcL$P(K}H<(vx;d~9vWT_+09dxKdSFqKwp%so;@dNrvOSj>ug~ z{yaJm0~{9BF_J?@9va2cqd#~?94f$Lv{@!j{O(3&Lt7!c1thFGNRkhXYgQnU`fEDGQ-<^wEewAZ@ZII zAzIIDi9ymu7R`Cp0JkAS8uKhg69^iM8=)I7I_|+@*Vvs9AQBk*XyPFe+Vr9YRKCh* zN!cDTv!REcV5Sm=ptJF#cP~%RvoUp{6mCC~X6`tGZq1cg={{lAU}Tzp^1!-bd4GP?=kesP|VMcWJSEul|P-Men&Qhn=OinzHU zDI($7S(~jHn<^MSp9YeNXaC^hL8N~xK!^US(|z|LX@G~c(Zg$0EQDDpQR7SD8Bj|w zR9JBKGK+MOy}aSUq?!NebsFYcPSj%<2sZBQVLi%7Bv6m3_Z}kN%8uMJh+i1YJ1uwf zkjIqujs~JD>*dzxq=>3;l)%+kcpm(QDdxt2%F>Avtlw80j;gV{qtc^=M#vaQm|M|V z-voF}&1gQUhT_veGCqur1i50uHx5vgx+KQpM*lk5O^lr~0;3`RNQ|`B<*Fo0I#RMk zuC1+eh0h^KwOBA$GGC+P-DlXbxqr6;X_AF`XkPctW?j#?*NMb@Z$e>5#8)r&U@(x2S)SKUBC27}M8 zgy~aBCX$mVC)675jF=?B%3=EW4DqKNtAI{|=sw*S@R%g&tk-spm5N&GGl)_fhDv+1 z?*CvjJfh`V=q)hV4D6WQdX~r?e)&<{S-gb))k5#@Ss5Oji1;x$Lq1MvmDugibJ(n> zhf4rcQ9VP38%GXM5wut{5X-+YUvBm$Q+#VT(O$7sn`gS&v%uvAu zqgBnedbx;@QbHvjrbXjGs=G@F70yXZFPF@9(vklAUW>WK*>(mGJcP}rzM)>}V9;+m~4iU|(Kcm)I`q|wkyODYec(~kQ= z(L3r}>5(igm-F3C8EgwKor|R+>F;libTZ(?(kMU>eh6OD%>j)_S(WiVEh#qVbuJ4E zoMM7z&4<&N5908rRvvZ51~uVXo81(7sJXr@D+=%LAc2$~i1i`i|K(>7e<++MXpy@-d0RkiO zx2RaF682;9%m!4`d95*!0?Y}aN`JpL$KD}YKKU)|kbaoI0$JdHv~tEoknBTTpo);C zDGlDk4v)=JyX-0)Rn9yqAi4x$Q3N@MJOp*=KK&3}A?}#jbD%T}$hlT%q<+>^Q|aJe z4i6$F{-|P%0zKWTv|xUKfGX|=*z3~h`UgK~g2TTSMsa`hh+pfGD}oR61AaYwqkJGE zk|1mRrMFZ@fzQmtscjLDD!!#pFrd#Ldi+0#6kii)(C}+7Pa?TGyNcRtmiFF~`VfgR zX^_n=l)VIcU2P9_!u}LK1ldwdn!2&Z1lwcBK{tys0jP5nby{U60p&^l+-RlaROVYR zEb-9;RUHj(5Z?<2mHj;8Ome@_K+s?_&$vf-N1nXD#Ak4$4oF05qXWqgzc?VQWf*H= zVr}m3x<@+b&6`;)hPzgCep)-{hZO9#c-!uQ7_RWnD1s$IbVNs&XTok6V^aeMm9ZHct?Vj?z1HoW55fvwr3nnJ!i*c8tf`dTntg4C|xO&c))}sudgt zwXVw$9?dcLKy&5n2@(=Q509qD##SVc{Gt5POE@aubY$IO%@eUPIFjMFpEF0aBY0BzVVARDW1h%d$)0lj{H%uU-3h`!*=FDrDn~^(w+I0F1@vc6* z_d63JG@3CRYlrDUJ0T>O`nWW9a&KtfMr5V&mRhH&-*IC6rj`hc0TIHr&EdDa358}_ z@fi9`9OHT;stw(1ms|WCnX;Zmbh-!)w%!zgzbvGGK({TSRehydImIr){T@W2u%{?J zJC6umN8OmXDQeHutl|B>b3Yf)W1njgOoxVUK1GqvqRy1hP_{16@{x2%61Lumf?5$#V^xeuQ-wlL_?tzu9r_F7S>tXIozUuNy&#*Hu*L z1hx<7-V3z;)^Wi3dlF@IOdepAA*7kf@z^88VlSHo3xnEHiyCNVPFQReZA8RSI z9#$t4H^r@%IULxembKb{02G-9K25G!1T9gX9=Nc8v5RxUJW2JVKKD;`)>@II-_8q^ zf9i=OTKr5Kym}~!mX^~em~sTw=Nx-nr+1%)PPLcn?AZ;fdHpX8)a&@51h7LQBgPjj z+08jHTn6fKf1V=|rX29A>VIBBQ)8Y&a5!5Lp+=gA275RocPUB5=M0?u7oXl++u-Bt zJJd-!PlvHkvZDk7y7e0Ydn}^6qLy)`+&%3#0005G0iP{&LSK}Ihb;Q>FoIBZO82%C z-M)tY0012wNi=;;4xso?w#d5cHcEkt$LU;HSQBRxl>hu^NzI^E5!g|p4dNpf=~8n48d7SEDjU;z{d>lxcr|y&HZV=$HEn8FrIKBY{2un`j+@O*h=3as3)6 zpUuGJVc+kRXqDy8P*>e&;sPRZ zeNUP%?fXKG_n!Zkx!?x$ z2s&pRH=8FT#xnW)Dqczuf-3U9oQms#e>89F zBx0uO7-xVXQ$ggZQ*z_hm$jJki${{SIlQb$tcB|b5Rc!S)%_{33N@B70or5V=34T$ zj3)_O)~gMMETIJ%O{BgWH73M-)})gUuRc(*+V=K(J1L0u<_jF5^G87S_kx}DQ1nv{8L3H|W{5GL>d47wuRI|n<6)Q@;;ZL!?D{&#_#4tA(e zE@LLN`;^+RD)quwob(2wGZK(;H^jn|{zqQGZ%!w1Vaq+t_PnlA&)bF`EMPMGD(nv( ztrs@3p_(uGt3{c2SCgKI!ZJ`v?ZX2nAL8Q-*-JXeEm|#Kp~?E!3@;>rcU7~H~QU=);JNbwO5tC8{dUCQJ~n17FA0 zzY^(2<;-bCv$7J<*6w*~7(HEj542~wxjt2y%w={ck&npeIRD8;U?h_Sg}WWAQh=%O z+iWwYr=!pEU)UviIe;UMZliq~4&gH>IZJe@yGy4p9CfsL+E=TMD|n_~*xFqAV*tTs zDGGZHX{~;{WDiwWU>8OKI6d7=Xav{I5K%x6RVe?@r?ertQG8c$TRq*}oM>qN?IX?( z6#eXn|5gzRXU$M@7AgTMwddLLBpX@h!8UFDU-PPyvcvihTl3;}`kJK@&ebF@m>HKcOm` z`HP)!OgU5s7U+O15ex~<`I5l7@Jr@j#$4LPy^t#N_{f|=WkgZH9CwYA`;9 zc0?;fMZkqA?&!;(5k`AI$_|d7`;G>c=J`7CQT1Z=)_raQygS==<^GrDNW73S`y4p- zwBAFA_&%7KF{Q3hnN?S4umO~+yj9uP9{>Om(?OdwNvJ_=nM??ev3gL0S1DS-;}LCL zMdyv+`{gxAwS5=2zo>p??uJK<+)nD4eDdy!&hApet`Tif?hr~OYzT`;sC=z{=BN~y zCT%wL&ra5|I1hz8ZgiDgdx(hva}A>2;LXdIXkrw%Ti66xmP8ZTn(oi#z<{lL2y7}u zJ8b>^>n|uS0&KJzODo2ob@wt*|I7*`?Qglo5Su|FrdvvxZ_x}`%N9U_6*(_xOPjf{ zrRLNdXXop<&UXP(nvc?Ni+-MZ{!oS$+J&xtx(3sLi>VD>{AU*SO;$IhG?C&RgS3!4GT= z9=H-Nu#(|sKzz9qAF|rItS&M&GsI*mgOU$xIh(t>|B=-0>!CbflD2^1Fpx+WwH^oG zW2MyHQ<7`tzna4bjeh5IxQ-%FN!fttRw0WkpUY85L5LBJ^@5(=@_FsqZ>3@$eW5Pb zrkj><;h-iKs3r%7)<&DHZFZdC5bHpH`4R9e_cI5sJup6Gpbs(YwT-kyq@ks46<}Y2 z<~9#3Fg8NcHuD{A%M@~HOi;-j&>%u-cNp^(Os5Gytu?D^IqIfb@xN!y5Y%!VEO3+Y zDs85tMH23&ZVNg89nodS02BH2dM}TZ#siRk@7Ffa-W$~lF_?=g-S7BHfEA^5Hw%nm zA|Fs)D1KfkLAy)bogCV3u`;^bt$nu`T{))WY&{WBo)X{zjAw59pyratYFUMJCu)OG(4n#SwK1Sq!JV4lKe3sfU7;w55MzY zi;@CrvRZ{&GUzs=n9F%0{S>GyIQDIugk&!PZJJR1A4VcDE~46E4|aF}sjql)Sh)8o zX9$|^jAYZHy4N~)vMt*5I6YsI*2}~yR~eM^B0Rq+!vA7!TCu%1 zABmeDp=+FetOD*Gu;4kpGT$;2%NelZp4*Ywr?@GJQH@?19y)Klt>6#yI{!{vAPMa@ zz*|bBOBel_3@(m|$5h*T@mo}?oNGKs{A1eCd4|)ilEZ2GIHyo$`q^iVr>6gnj5hDV8 z;m?PONBN>y!hXnT7^}v)RAtC*`M2vaMH?7aI=6lbTGD@FRZ+uo$y<9qz~%(@#M15? zl7?#YrIreY{JVLe=2si1oZH_T0mivg-r=PE;z7q_dRqV0S!0WHcq;4I?CsBLm#`%{ zW(PS$pMiPuj^Jateuw5X`;k^@*voiU)ALaj1}|)2iYFQ>4NO^cx#&riu%3C3{{qqi zCn-S>HPjl=MUo%aR>|e5o~LnJfFJvO2lwhrJ1DiuwiM{j_Y7m@)r3ejyhZ@ zWE0h^-W6#SRzfA@A?=HuK;8#LGd!XIjr=W4!X$g{D->*2kF& z#?nonW?T@7oh0U1u_@8wugYc9)&xkjEYa_P3ff?Wg<#Sh?Gh~O#%hNy@t8&6kMsojOJ~u(+$no z4rFer>zWNBuRFfZ8ps=}X6q6AAZQi68fIXHqvUL%aJOBdvXi;76!z`Vgcj%19BuSu zH2MlhP;0K-fJ@qzayxHIB+h(z$&KALR6eoi?ue9qu5Q8cmhI^DI_@VI6&(_R@I?+lstzQ0D2X=eyET`DX4IOuhW>kgHk8FE;fyCC0DEF#%uqb|uJ2uy zBc6vvBZ6skmnypYq^fc3eVB&|W=7_f_MLIH(a@d=6qykdmM=|x_+TEnqxyaIPM~f| z)R!hCA$SuCk$J-V!okuPPk?v%diGpIC>F3j4~0g^c;Q;0Nn4i>ZowxTU|+mR6FiXm zX&(YHwYuUZz|%HKmsRh9w=^kB>V}F3_=bj^^F_B zA)Nyc(skhemQ{jm1*bi4jUnCaW+z%40Pvwpd@jK4v-F+wY(r0=#hm-Y`iD@$p=7Vuc!%Ty8{H{@)kJD1apHLr zI%eMtIAqN4pK}vj@Y$iBD^C5Ebt3di?tkUCRn0~4!VvXTkE$(^qPV}T@eL#_%-9!G zl_KvWd(28vUe+`ox1TJfy<~ApmAi3S#v1V*I|BPw$bvA z_cQ(O&kAS%QFUK#u z=Qu8fnlM-C1Uq!pv*&9*wqakOFVSUfG6BiT%j(a%xG{1gb|ZkH}tf zPQCoK&2w%@KPyQ@06n;sbbHPzP;El1m6nr-9HcD|beooF^lY>Quh;Za&bnd?qeQ!A)N zbbvr{=*8nhLJ{Gjd5*vE#(!!rnqZ$!NXZ{19#xTnOf5U@hI=tOYA?k?jjc!m;idPP zuBOva)WS(rnzt^}ZNvq|vbjui>EkjG#MdX8>v_xbhweY^1LSETO7 zm7^oUMqP}Br-w>R`dr5rsYGLl3;*6$la42R2X{2L075>SY2BYlAgV^}8L)rL6(^r$ zk1hJYZcZBI+7@~rmZ3S6uftu8td9e8%`%X%X9e{V0|yJNZ8*{!_5uBd<-trOmq-jX zSfn?n2W*o-FEwA;6O|DJQc7EENd{xy)TZF6H1)pQ(=q;v>*#ye>!c6^IUZLC)hY& z^%ahZY-%IPaeC(RWe7ZWajIYm9Q4-6xNRB(b#-x{uGY156osjADzKwQT-G4RwgQKf zhRWPUBSBi++II<+!)rHwk8)fet2x{^I{_VmCmWinn&JE2^8Hy0^G8=;Qg(L2S&fJ? zsH1NF!&>1o<$Ck!c5Z0qq8gmKSzaLUA4x$A`+CA}0lsuaJIRqcD2k6j52Ue@6Zg}lHUGJfU@ez>?j@LkrK)nMf zb#mAwf`*`Xk50gg6Z?X0n=>>byjeR;A~pzMj+Lgn`haA-%ne0QE(5(`g+@1HQY>2T z#`_l1?qfIDx(~}2aPF*2p+J95)0j{M9O65oC`_*nZX81+ zM=H{W9p?XMQvHBZTHJlCY7-lH^El#XUKeO1(0c2|W`%`h4w@7-{fI`aRoJh8b&F06 zIw6cYP~qr`F4U!LAxq#3FBXWMT4{1Gt$>J112`OABJGmM$+cr{QxvFKOc9_0FpIGJ z{CH=$NdyV5wtZqO$Pd#o(Z$8=H?2KT+3ZY{oSaHxVeT6nD7M`ym8Z`Nh9ECS6&YWz z@UFD*B{ylx|MBI+{7lH2+fNFI+Z0jCc_|@lq}n4@kI=$T-^lq^NzRIJ#*`w~FySzK z6c;~4vV}V8)I1juQD4QoTtqQqwl;^6HlFoI0j#1U5vI?v3nI7|u%HJGiG&yUE>RB_ z*L0A+O4c1*;60;8*Ro7w0xu+LIB}h!2$w^CL#H2SHlinVbV?HK?*eEQMAY-Zl%CQcn}IxNk^UgfapZt)JvHbSGtLwlI@HX$gJb66ep()tr2|r zsMwxR=jrPKhRdyEZ6ET*l_7}GwG#KH>Wl*EsgZ6!9A;ekXB&X5O5VKMi_5hdUpqBu zFtJ2U4OA;8<3i-?3G&1OUA!*s$`co1!aVWX6<5@pW`z5LIBK788?)iprV>w!pc#3D z7W~wVo-KlKf!_D62nh{WltlZx;f|R|nrHWrbiCE(8qMTK_}cI_YosCVUfN`{Uw9mW zO(|Ivo7@J?wXM@1t_@1*J9rx?E!?4WD=b6QsROaNU+jCg0ak7?5sRS<|D;$i$AK6y zo8SJ_($KoeCbQ{DYD+V7%O~PXu{^~w8AB-)SeQHlx?fDPQPkzS5k{q#&80N(q;rO| zfD__jff^_PcQI8#MiFci7YDYMR7Kl4HT*B}A^Md8%DOniT2G82x0n5x?{kf*bPoZq2VpDFb=4vNMzXA9&1&v2zqQm>r8i z&x-Q@7T?UM={G-{s)x?waAI?%gV&^7?NDl_&;T6nwOKGlmJ?8ff{ugckE}Z*>3FY$ zC--%aJ02C7sGIb%JRz*yVyN@WbjbM%VFVkG7)^4$x8S^DS9#7`3nKh0j)dd5*gzUIj0+u1oS3&6zCvAOTI~TDu?bl$C=+h3Zv%(w#oS@Hn%K zn=A2Gr22ic(hi7is$aO4!eDxA8@vV;GDOW;u+2(MObZA%L+4ojiB1I@#Tb|B4a#Tt zZm^ku@)InAl42Sc#202md@r6pH}qdS&%p0;kbFIqvD?^>wRXt24)DNWiug6&z z+jlYyq4HT{=DOhTtpg>1000PqL7!4Z6)YwH_uijce>m%^zs|+m^rOr&>yA07?^Kkr zW^<1%>oqtg;pLHf$=mMb+2X#a(W{k!P1rU4DF1vh)cT*a7hs*MTv=(4$rHODodGo8 z6Xse2(ex(bcs33KsKGztgW-6^!gh@vnuK)Azll!OX6r>#g5G|I_Uf2OjvM(#B#Vgu zUn0<`QXAP2163nQaH9jiiuAVWwu!an{ymWY(%)PFc_4ORa-n(Uq~YGXPJ;*Kq)0J; z(nM?A2<(peS#SXG&x1U}HwqK0#suSt^bWfuxqz5D(BAK4tPH%z7z?RvS2``5{vu(# z^GhIS``f&JV{FvcI9vnj>~Bb9@C=nb)y4Y=pr8+{6OI#4e}{jyNJ)qYIwC?fG;uC7 z$BEK!>ib0g?=;<+l2c%>TrTv5j)FzrN-tePTC;s0x;f|3+BTyt6Rak{Nmfp;aITAx z^Q(A>@JC56s+*?l$2>DeK~#k#7WO?>%31abUbgV&pMVd4@BuKiinJfxWTJV5_Fy(h z95~FRNnKZu-<<|0Fic)8^$34ZlI)qQ9X?3J3zbr%pRx`qGNs&Iu}d}xxxuADVgumD zp#LrOkGJe>W-2FqC@Ft7uVpz@YaBsipgt}yZb^ea^opI>*SPo%ewflHww*Xn&+K`;gz6Ad=qvNB*)tT z%jHA4|EXBhS!Ix?*d5L4Q?qpShYLn)&6!W-A^}eYyY~x)YvT4tR^D+5aU?{9B$`0( z>AW;}MGLEYOtpk}68fg5d5lD@dVVmDjD#zR)_fE{G=6chRJ|f09k_(_4>bz@dRBhTngxgh9sqDgN8Q(sp-tdz z!=Kxu`R9Y|%@q_;Nau=dum9_Qzs>tlN8p?EY_J5Iv^5sq`uN+d&3Kbc2CoJjPObvh z$G6n4horb_U%w;Vf!hsviFdFc#82HqLlsAT1EP?UP_5|Y^iYF8xN;27mHf+=F5D>- z`qusi(HQ8_=xsO*uiY-hNsgPU>)`M!@PEFa8(By ze~nVguLOh1ctZ_XNYT%G>TTYF@N+^O=mmvMh!>slj3Tn;B85`?GWS#f*dXm$-4!LA z%p?}v$osb4G!fk9v0XOia)nTZUVhj+M}W4sE=@;Q6F~F(3H~%m1i*sCE*|R=qB|zu zHY!IK*B;uQD~GM03g6)xQB9zA;z6LAN;JQteygguZSrSi8;U1D)T1Pq%Hf?y_FY}I zV?T0s9gC|&6Jni7Ae<}+FA75^@MM*0-I9@*mmUFi+!GQ>tC);0JH1>EK6)Dladb*GY&HAHhvY*hH1HkmH4?rZ zHQn1a7=p@`k(4k8|G($~2MSspgQsUm*EnM>htCvE_kyt*gZ~q*I_|X?ca^)b^osdw zhJb62iG%^em-?@@cr~(rJYG#4r9mk(mm{I=`fW>wXm$1*oSoe4%D<=R4hP}#Ay|xZ zDIs?t8#vj*UM^XR*k(^-JrP2Mq}6`o&j|6F?hkSqBYGpge?d6lu#UwNhDKIfAnQPZ zB7X-JG@7a~Lb~%WKO26~2zQkVSzp}_)SQ!kL&@jC4b1l#}O(r2;>88 zvYe{Pycfom2}z0;?MF(ar-;NRau?|mFGe+lQDGz#0lL)w+Zc}JD=mC-x# zrfanaw;fk^Gatuo=j(;}+v1ckWYMQ2ktSp^ib^E1fo)&~6W5RHZe)Fg^m~eBj&an| zrI`Nm-p$+!$0gvH1t)sBNcb$D7ZLkmpc5Y5WzSL(SvoUL&(o6KOhaeGUKWLKlhDQN zj?9nAuNN7jwfRmA{dZPL(2RvweKP21_<^&<^amWA6S_;5nx=jGPKmyjo`?vJ3;pf5FFxAubc% zXcq0F+b_F;czE?-C(II%_Bk~?2v<>28`Dt$N`>b$mX2k`A#BUK#m&Yc-?N_&aAc?O zwnQ8Vm4SbT9X02Q>IzZ!kV$clI`7I&s;nWf>^_M=Cy&eg@g5+6vKZl@|6lQVPP6=c zk!j$J$wdcWFdV2U@+1G+$s6DK`M7(M*^T+o3Ej+chUfU2kmEu#gh>||>P=Fvb$%hF z3Vva<>m!TpP=~nbe!O#z@peAy)UbQI)bXLjCW{od2WS)tWyDM|kH1{%xOfo{)*0^N z`+|R93p!egAIYa^#}cv@v*EPER2-MyW#{F}W~7uLBS6?z(mz_{q&{mT>L)q>m3yN8 zoX1I@pCTvtv<3)fu|k=$S?WVG9cUSYnS-OA=i(5w+FK=BmPn(gp0|<1s6s;AA8}#r z#h|72Jx;tM6^oZ=X1hBO0l|A7`Dgp(E((kZ;>vJwf_Z*TbgEB1m0XWw^fK*L+J^Es%NO#(V*ns5mTp3Yg;DQLO|kXb0v`~Nb3`sg*2sK1!nP=J=w^g z#~w0f%pTk!c-5-aiJqX!P?|U*UMI!=(D_1q&B4y_-9b%x zV^_Io5AkEBs5V*NIC<>Bnfe5bo6Lq7<)dsqp&Oe1^~<%RkI2dvaf`cucrgwt zAYu54qb4$7DhSAvFq%KFmE%SXfwcC-1e`kDeF{wc!AV=*ESWJK+#jtO9%g3zz0zSh z=0h(-3y_tg^!e56A;&F)30vXUAvE0fb_CVm4edvvpPQQ2c-Y;zKmZ023$2E87l=UML+WZ~|_t4?gisi{~ z#l>|e)8~^Qv7&030hD**Dc&-TrU1R48~g|f)uIrydYmT<(u9_Mc@W_4AAWih-~yH0 zA#HR(A2H?#-(8gKS%`2NwwaWBP$^h>PL!4juoemvFifz0)cXcjfR$TKj)oY-a?ail zbc3}0%g4waFR)sg0N<^Sco2_NHP-_i=I6NdU`H|4KloY_+~}bO`LgxcWD2SGvj3c) zpYs3?OTTD2Kyipl;V%0pXv=mJXC3)PLMfXats(0=UV#T%YYp@}7e+H!(L`AyWC!H1RKZ(7q>rK)m!FCMMQ1jx_&^TKV3#AhIP*x#&G0Bnn>AV81(?9 z&Bmnm633!Y`E_=1nSIVIFs}p^p4~M@(Y@S(HS7x3ZVOqVL9CqhAp9q^19tCE_h46h z*^c??dLDeOFdM#xa_bXrl04AB(p_X&ROJv2r%807yRFou^}myh+n!?Q9=s^=umYMQ zBUwfWrrfLbtVb7@Vn#)*^&6eOJ#=p5@Y}Ybl#IDwLXr+PaT;N6?B0Ca)g>f3Hz5@Z%ao@t=Lt4V=CYTHRj{a(O4uWCSMh)kM{P4hJb(ZI7i~eC zcuA;1Y?(|6e@Y8$4JkIR<|`~aUB}#hcw^#)T^jy_63v(|ZZo;zb$ceQl;YMs zIH1$gIIvNyE86FHpeg1c913D@_@AalWG$M2D7LY|=Zh}3q1T6S{v#xa#mYR{a4d^h z7^;IqiGP0WkihJQRy?stU5BwssUcqpd`XVfj4rzp)IALAC*8Vt4as79Ti86yWl|pd z|D@qb1~bxG76I8RA@bh@q*J@Tr<-}q{ik7fTw-2anhcg?^o1M=3HnoR{NA1&(x8_R zWv9Vv|3(w#HL=77;o2QtF0k`7diF6i=JUM?2S2ZV3%^yMr0rKeC;JB@k^0zV^;p@> zzupgLUc8!T7X<=->HGcwP63_tHGv67-P7`(?pEK&k)rwTpTsdcijTv6I)+x2vJF&CtEtCpG&fLT6ZQ%`Q^i$C6RzGSs!E%jq?QcF??&J6KR8|Q4zX5W$ zP|mk&`n&JK)6XH@el351UeT~sCL-OzH$l@7)Hj!$F{zxcAic!YWKA#-3F!fWf_TNy|r48 zI~!CvsZD$PftPv6s4#vN19}01*DgIQ8W*YC41-&^|OMId!=yN=PtFMYMKsQ3dwtHj)`>D+fwv(2y zVa2KEu4Et^6&#|ICYTDECK8g*Gqs~*wr)C%E)iD|nhDBr#>}b7tn=hLbeqtA70uzglbwv}BpSdP`k~V#Z<+e@-Tr<6 zpDP}+t?2?bV;?9=KIxRB5BC;NBlB0AN?!@ap&ed^Icu z7hDlVHk0}MXF}J)jLBJx+0eaq3e^mZ*1Bb%mZ@awC4tb;ueZx zfi;jlfJ~-;+TQq-ezy@b?I9Y~)C8FZrI5}O#)*Q;nO9ZiPh3XE^+uey2VLU`Q3?*E zFMVNMOEax}X%YRq3Y(FjS0S;|PXA78NdF|pQ?pG}Q1mzQhz@W%z%ix(qLr#=w!(42jJ&v<39`sCy{b9*mt~{n zc_*DtaS=I8T;GKXr|3M>0Hys2ZXOG{@m_`o8!HrFHV@{ecYr~6OJW_QM8mLgkEa6r zW^j+EiMC$@{V|WK6gx$<&shD1Wy%d1$(ID=X zcpjbhollj59~RodR0kt!KLQTMq4#qN$)R97+v`N@i}W#_b{zQ)g6A>F$+yw36sGRq z8ao9j?H_rSndSzADwI#08;}wLoToVZ4skO3vvnoi;~`tSJYJzH1y{2Foi^ zV%s&gp+yI11MMXe-)3)L5g&WFYvscWwWq?eOd?W~1^n{=_*->#F)c4WUgx&7N}jL< zAf=|hU|{ot^4qFEaqFVVge=lO9M0!)a*XQvOmT;USV~sQ7P6RK{C_edGjR4>hJOa8 z11yIy@+4Ew5<8or!_geVb8L=$U7I$AY9N$lViji92mW3b_I5x=5&pBE7yfdiB+e_K7>Djo%nQn`S zF==OcQMn+2Ah41gyoMKUZ!2M&dY?X}cJ{eQzx0OQ2v1zN@);wIAhjKlKR(ETjh@Xt zE_9xJ+HQ!`FgT4TCONhP(zSRP1ZP@fi>k+n)gr-Ga}3y10s!Lfbku1w6$ zx%my`r+-3(u`ftDpTf%$N^gNMN0jCyw<@o?%pfgy5EQGN5GmGgLEwWZMyvJ>bL9iV|06jX#;uT7@;YHfYuWIIQ?q?osyV-R5fN44GNxKqPxjZqGfgO^ji?eJS28MlXNys+B*d0evpX65bLm{{SyrbYQ-Y-LNn%BF$##g&-v+CvDq1p>PTh!ov z()Kg;GSJpl>Kir%b@8z3nF2Tug|!UGU0!sN@y=dgTou*7u2jdZaOxn%T;1TeHSlYH zoA%v%y^uD>v+o7Ta&TJgyw?am-5d=$+jUxL;xBP0cDr=5Zi?ws(0)uWrg>-H=HVQX zc2acN7qKcCsZf8fE?it(CJlwUv+?5UTD+b!(iyaFzWNz00@Ryj5!=CsnX3ENTd3@O zu9+q9agaB9lYF|_ed*g-9StOiw<(!mN`LKxMrR&3o_#l34FDYkpTUG7dN36eV-_85 z=Rz#em3?DJK2@EGp=|HN8$DH{GfF*yHz7YIsMx?RNjX?V7g44$$Z+wj9OM#_A8O!j z{&h15-3`ThX#ygvp81Q(9eA~YMVlYim!MU-tefFOFGnZQ$E%z3HDFxru!h0mG;E9U^oeaMte-%rRdQlKRCzD8eY*^!F$6eIcH3A8GRqp$GoPjDj%C22qFU z=8dZjRZ(&;r`b;N0MO2n{hWOmmT*EvWkBVNQ_*bz+;fqY1c88W2nuNfmxp7&_^^l1 zE_j*Z|0~iO3+lkM!bj;stL3i}=YICrq_31iW%+6&ACc|zrvEiOja_%7w=!|s?rp7x zrI8~aKbzLaiB*zPN%0B0&xul!-E^tjTA=D~MW#DjkmF)dvWchGL;5%}(*sW(TWX5k z=cB6YxFx+eAu=61JE&f>Iv5oF!k8MBcnyq%N+Oc}Yiq?&e#a9mat3#ZD0|L@=XiL13G)*5=zdg{9O5$S7X%noA}m=o#RC z_w*hS3O;Nt2Q-Y?${!-Mkj7N^dAl;mduhlxwYA_*)oRk-YYc*rDc$(Z;=qkFoBeF2 zCN9sOqT-_?LoIFw7xEylx|5YVB#ihpsCdNl)6Op5mc$focUXbQW*^F2T1}*8a96|l zL@|7MeCpydVSE5Pqf2y)E*|V&eFnCM1GaKsQAdLjy!G4^%Zn?nEfw$>M-=y}74p;U zmEK%txEh6nUddHk+;M|F8n?&rV__(pXQ`AZY8`~XXOevl!1JSs-70_Xvzgr}y%5Z# z^2JduOs~U{;Kfue3}W>dtW&rm7{?W?q`4oN66tD<)JL>UstNR#xx@bgMs1xy^myop zKazKV!}xjm9tCD@9ziiQyvs*qxyKD!O*Z41wT5=v6`H@HNZ-7@&TBM9tshIod;aW^ zGElb%SVrkkF)GlmfA%OCNci(bqh3HZIjby|Qwfh8kyG3pbk(0h!IZy4(a%1O-<$Ci zp*+-Wy@)OV&HK)0@2#;zd_jNbmIvPK7M@5&@nhMJtFndmTT9%xSG>U(#BjWxzZqp@ z*-C<(pv?AuYV(G@U1NLWABhrFZZ+@L5$^=+qBrhWSknf_3e|^aL$Ot$P-2iZ!_|i9 zpJ7IpGLs7~0MuQc*-3^;c&kBe5hPEipMax2<1=$<8ndV0vtqB-_=@0U+ZP&87PJut z@iX2DPFUQCdMpb9lMJ)z99Z_!YLy~@##8L!Il=YJa3Y(!R2_BjkBiPgsaEaueVf6V z7tu7U{k4&LH|M$Lm8vNdApM{&juy)RF@A*X{Nyq!AB&p(5{7;cswh(?M-r@m&9zOrXgSQV1u z!miZ1aeiOJ6TX|j*ZE&+kF!;01*2;GaOIfq4YlI}w(qk?xFugC{DX-JP^r^QZ4;MX zRY-LbA}P@612Z4vA2g|KrUorL_u=uZSuJz&l{12~MP5MN%!}r(j=ZUWO1pS{xya}H z=kuZ~wR$Er#CXlVu;PNvtB^1{@-wH~kmWGMq$0+PlG;f!XF!KIMg80DZD|O;<7LuuiuPZ~>FQ!j_h`sa=Ro zt=K2ZE1G=jakQxpx`-9FfbUGql~HZ++r6N3@2rRw8*@EFe0&MORoo+cb`&Y(U z13+auX;DVGOS@bkS5JQR576%UdS=S|zh=s(E$c6Ki+%4H)$yTc&GhDAQ2IAlkMqOA zlcb}u;E-~`II&Pqkc1wnoYqx`MV*T7hDn93MImgHRqSQtB_VGYsEt z984IDXlY2mN7&Yp`uxo%G!MJitM*4ENdS6h(Eo*!QqPzoLZo%^d|bir+?k%hx6v%j32A? zj&OPuyvZ&>@@(u507|AonFLp`M<5t<& zXfl0qY$8Z;?)drT&tWCLvdsVyCpDBg6>+pIs`1@@xjBvCNwDh4z&5xBRr#_Kk#W=t z<#+;jN4oqb*~TixBz+wb8*-nor?s^P1vRiB99izj%FYgzEf1n`9~u5g=I{WKEqJ^dQqqlz1Ew~+VXw*96ZSOZ`GNH+FI}s))0Zw6 ztk8|oY2ZZo*y-X1YdvpWpN0CWKsF@B0N-=2%w=@jj|=50a{kN}d`x_I2x7)CG?xRP z3`n|TgG?hCN(F#2p4*EX4Fws(0HAP}VBq!&Gt1a1L*NuhE4n^~rbysE)X3@%%~oG! zXz=5cmYLxyil)K)c%Zc47;SfFbTb1p#Kzv99rCdtQm(IHbS>NG)Y(0pj?%U16a9x- zsQ=5&h9FA0P}Ak?Vb_eX<>LcD)q28(_{GGH=~~DH(kk()y8I1@(|^JKMn+%wmM;I| zAo4D)V{-v(XDV!P4_1{NlH6`cXCAVIo6S>sNu`ApG6(`!o1<7S-Np-I>u6%w&A7o3 zKp(*-h56&Q9_c-Gq8I6=DKrYMvYJe&JJg>NGOpEPc#Gpva~!2UN0%IKwbv;Xn?NVi zWiCWM^2N&BFME(EBEYGkkVBQF<4P+%ANb9j0`Xv%T@h!M`2hpMdKcqpOs+@m=%;^d_ue(9XB2`Qvs_SLCib4;lYlf+dE@?Zq5o&xB!6W^SIlFtcCfOWZ-?8%QGPHVQY8)-aF$as zW^`@U`yMYOMK>21W7W{lyBBQBY5j9@tB$3&A0#Z;=jNMoM%#0(R7(E1TXgZ1%GwZr}|fvnWI=G*$&Bezx?@D^wm) zlpOkOfxT2q6K+W>6zG^|=~-P_6Q%G|Yf3x=-l)&;>!%eZ{4{vA1*K`Nij8_kT+aTv zpQKoSw#d8`JqN=3qicSvDZ6_&l)HVeh-_^(&9}}y9s~ovbA&i%LK%N2$9Ro4bB98? z#38ZUSPy!@i#fQ=x8)PH_r&TY4vuH5PlQsl>y|vHAf(zH!VX(w@6XtZTZ@T<1&alZZ-Kj>y@Hd86)eDFP#mi!Ss@|`QIBz#@gC&xsIrz`BVuVFYW2; zIyCl!;7Qq(BT?;md*ulErU)i#IvM|X zP-k)jQ^s&E*f5RO4}ie{00&e-pO!@xFaPxR^E^22T^bu_MhBU091BmYGCFslMM2|; z3gJf041$RPr?$bSK>qXEF{n9%3H*wGHn4@+WoUL>vk zjjfWE9{S*^hn6c8o56w-Zk6po3FxVVfOK&;iIrcjjZaJ5l`pznMI*6 zimBtWk@I-r=5^pNAU68?705vn9E$gPnZ8l`x-OPqCQMqwQmVM!xZ|7?(O zk#fdx22)m4Lh{H`jHEXPC_a}Bq2)M~Hj@*T^h_5iHiz~`L|iP@o^+%e%gVDy_l@<< ze79eb_v++7Q%(R_6!kd;LW*eK>tS5);us(TH3(CbCW=io11KYtLFE0SUbFFxmmp;a zKx)sFL~$TV1c@oXRQQ2jWmxYpZxZIY2rR}-*TcMAotxNav(Wy*}cE{mJl#6gvndJOdq{3yyzv`hxyr=_y9i=|Kd$rqxp8F+NF3WZsM)=#ZNnXRQ<)Jn2DmKzM`IxJlqsKdDm3&LB`UCl z_k_D3?blbjRbCfXZkGm3B>n4>Q5V9Y$9K}&_Ik|w6=_NrP@B=U#g%M@=W-}8T)^>} zX<^Ky<@V0Osd?D7_V(wj*ZxXCmY~utum0~&D~sT3@F_Ce&woSsuhn*y{}Bu7dXxEN z8eay6Ucdcr)2#%=5m7HTW#g7yek4@sNx@Psv}n zqSf|w+Sg&3|Bcygt||3Nso(5`A5XyM>6LS&_Sx6N37+ugo|=T!G)ti->;Qgh?T_!- zDESnq0J7M?tmPMsxLL!&?2pChEIsy%fI=#(L!O(}-r8*YwuE}KAh;a}l6Ur6y|iu* z)kgb?Qd#Liw^OwK5ZaLRx1oEt6)eqBRc1+Kt^4r!Vb%RZ0d|i=>U_Iy1hI{0>{;1I z)f!gVfB5t!lwEMLUC&JteDp9d|Ml8FbP`Je7ljyWUfyyN2r2O8<#T|EWf!5UhyLQ1 zj>-z6-B~i=5t-~yFm&kr;2B_)?)*3Qh>av>5;s-?*&EEfkjj`t0$vF2iV8n(TXXHT zZ@J`W_vHiU8)@e=CT8GxP%wG?Kn78IN$7PTC?qHgN2I(_8vZ?p57Ennll=#5bkOZjVEv0aR^ zrs^E_fWV?zbzeoWzD&6ni;#w6pO7)g-YBu+(j~pgNzUR1-$ACSTyd4NRi6#~mmVct z*v(KkPLK>By~CiRc@dl?1|!!7{2n*F!~E8t=VkX(LYMCi79h}KY#%l(;2Dc)gi{pj z==+@jNIM98_6Jtpe$UI#U z^-vA}9{rvIeoPF17JOmkKm}4YUo?58u|vu1pv=pL@dOmeI-NLKj_BHmNx1Y^TMo0S zgh#>BI1KOn4{(5!(LZkfj*P|yrURx;g^!;eu^PKWVmS9~!mB2eyuhP)I$LvfRd~UC zFy08PPX?&o8XcqE!HcxIaMGXyHO3HvUhn=h#^PxD^m-%UpYfL_ z)P-SMK|LCr;1UvSW{>oJ%FTn=KIwJ;+lXl(000Bo0iU&WMt}A8nSO-N1$!#ENN?7v zHUD4dQD%*4Wo373|BVVxLO30BX>_@kWmO6$(EIoxB-X9uDx)x&N~TVD%(U4wreap( zVTO(W$F$S14>?q8-qHuGez8I+pkKd0K|-RW+F6rg5Qy~hYM=dK0R-J}WnY_8*Zme% z0^DwKYzCmul_^~gMb3*7|!$;L>$w zM(<$>O_Sz#hl(2dnRB~U{rG=u`+zQ~ckJ^i)6Lpm^;0akC8iS;^=xNnF4}&831a50 z^mlJ^gC(+Wqb(Gq33sP#;B0|v<3PKdsAk2P(`gC9utxzA4X-`qDv)LMoi*=4Wa{nt zNvTdnJIo!WrJJ}>q?fO30uRhdB|n*Qxt6hK;@qe2RYoKQis^g4GC{afKu~kA)#hk3 zm0(w}^y@4Yy=hqR4R?u2jX~;6o}~a;l}s{1 zadXJ1sey=`vYM)>a$y+p8qBl39|0{q>V7~4f_*g!)YhgAuiJ*krUHU{o$?6~$OieB z=@eF6P&ngfJc3P}gO;^-8ETX~fmnd%m64vu3DJuFOV^SlGo+`yWh*|R62WApb~9|mWmMdrYYw-WNjmj zj8pepIM%FKxTUF|prwRR;8)LyfF6oKFOvP1ru>QjsEFzRWIpMRBagd?4~F zmB0@I-YaLFZ`BgUn#U<2J=vV4E&+J4<_iZS(y&r>J-}c!V@ZPv+-?Yn(Dz5{Y)m&g zj(nOyxa9UTc+4;&1=cRIgyR(T;u8DNYSq5n-2eat6ak;NYFD&RG zI39L+y4570oHVjqd1XB|-{klQp?+D`4u0mr5fnm%;eqs3ynyn*XW;1xbS3R5p&WNZ zaBW=Fe-bWeeJa7wE_Zx%_Qt||RD1=22apn|VTvVJuXY6mq5S549AL`_u$TGXfnX;S zA?2TJsQ9j7r)DR^zT*EfV2KT5)yMf9FAF-lWI)KN#_Zr^mP@#fQ}YxbeMYJK#&sL% z1_y79wy8Cw#ePya{8831tf?i4NVd*{_gg;Zgb%g<0LRrG9Wt5f4&>^-O0F;sh-?bDXi$;t6a733b!~MspKWk_ zon;<@-n*ojdZJf~CZsTq(RaVt1BK-U&_tkK=~f#eBbKq5zISNol~mD&#ySv z1%}Gedvv!=NXUObGWBrwj5j!x0QSA&Vh}LNsR01)=bY?O*}GWLVL*1)S=0V(w$C$7 z$VDFkzK+tU1`IxU!fHdsMvH-gY*9+>`02<72hXJNLv_s40Jzm;JL3m;)*sRDofHMY zD!8obCB(1{;FjnHHjPgj++=6PDax_F=JA9(|NKiA51O;`-Q19i&SaUTL0w2bjsFmi z-c>3}LH7<|czI@BB!JEO%KnYguAhbhf$)^#A9wtHK~EUrslF1y@f_YD<6NQ&AAc{Ix$s&y^#Xjg>!!4;wVTD;#_ z*0p~e*T+g0hg7~3pfIsw%4B@Me}f}b-hT{Oke=yC#B#Ri=6j!Sl?*tXVVB&P325fI zKl3isB7i|4!E%B1KHr3c;hvW@l!$T?VmLoIMmn+3cgNtp&CI|3(|31w7HoJplljw) z)x3W%G+C86+`~MI&3)~OH}dy0@k2>$%GuIwLYT*5O-eM1#{L&zn@gWt=?(x%1%IVj zE5KL1V_O6Hu$QYuC?2K7GXzEclm-o@2rt$_ZK@K|7a0Knly@~@v~{UaWY>R7R3`(h zH0mN|rWrG_kMAk1*ve(qdB|0o$a@w2f+(M>f zz5VE7)_r|g0>mx3voj4)$EP)@%|_~OyhiyM!2Sd`i4Xop`tzljlZ5x7ZzXv!s|VY6 z6WL%YV_ByjH)t~&DzuoA+TRcb?g^0$a-?kimrvNdrNVCt8<23rQ23W9Mw(`t*Mtor`ZE&EVI-`%#!q?8^RMvaoYXPLk#lU$DdUzgBYT~+G zj=TKi!QKUR>2GL~=&Mo&0%IC5V=g=4BNWdZZ`DRWVpvztO|Dm{FoSy}p&?ykZvt*<&u1+uA zRb*fkT7&>4T^%g6mD8}~2^+HN+*W8!^;7vnASy0G(=>nYFZtNz+ymG}w;5|kD;nyD zwx$8MM{67}6xO{ip;}6^(^yv$c8DJjevgaS8U9Tw1vL+NNzE@S!uNGq*G#V&P>uF5 z5ki(<5UfeYx`3G=10OH+l)%&I%;j7fiF%rG^;X#!AzL1soflV>Py|{`vqs=;Nvxk# zaQGD|X_bItGwP5iULm8pXPAIsuzcn`X|;Jj)!pSGl?UqehRRQ;_$4C*oL}KZr>GG- zamW_00$)2D^6?Lf+4xuf>-pa>2@Jwt(!a=THZtwI82r!w#uGvFWt4y;Y~j5_ZSmjm z7(zXai01sJs4L2@Ms`AlK2wofl{y_TNBP6AOG?e7%#F1mlwDQJ9<{1q6o~{fqog}L zk2k??#sFlvZWcna%2i?-%nV_<9YW*XPgMAEc4`Zl&6Up^sW)e_nHu*42(!-07!0LhB=BN`|B=N zRqG9Gt474(@a)*;}Ai}tlEdqoe1lVH;| z={q2kR8F?d)y{~N=KmzFBzeaEgx~6cB-)=k=rxh{zDZ1r+9c!1-f{*l z9@Me}hwA45Y;>C9V``LO6LEWzD<1@&01;hq2n1jakf-vy+3no=^meqN^i2W+yhfr9 zov<5!$ug{JYXv24_f52O@kkGJqtsjjR>gr!(<;g!elqz3+{4!tScW1pWveAG1Scn`?v-!|;)LI+AViklP45yu5GUix-y%fPb81=Wv=V zj-}>|2Fz3uau@l1m3=C{@x^dnA|=|E+-~RLwn$pS;LvA_bP&VZY0X@W;1IIfxb?xA zdmXOy)RrTU*e7!S@3$-)_^fhk_pEaq2Gwp;Q?A-%bJhW<#~lV#$XBCINWpoZC!Ysk zT=JZigRBESk3}J0aVq>RXX`wkqq}J4z+MR0j`P;kE7oG;UuXV1yLeirHH{dFc|L?? zj)t&8| z2Hpl4W_k9Q)-g_f{J*W#N~BQ^nqj)Zt(TG`?Ad9MiD z+o!%`?ekV9ogsjgyQ3Vwpr59zUG5JEL5f8HJDNZYR7`?E)V@$kI48e*rH~bg)-8W6 z&Ht}u#k@)Yy$wa3UbbhUr-q=rJL=#2H^=|qOB-(Ib2HYcyY8oXcisx5E$)*-H@7<* zIIP19LDoyJN8G*_R8G{2fh!X3`W<{eBa~{QJpIsS;a|9BFtgQ*r)AS5)NOOem4Ae3 z+6t;ATH0B4&ZJ9Ed#~I7&Q>vT5IN5+8*{-QDh~S&u;8IVxqq@2bfd~X+xtr9-I1?d z=_l28Qn1$&&-k~Juoqa!Uz97lny0k2tdGXHBw-&gCz%!0S=8+7rcw1je6Xy$G#CeC zuLv(P??O!8p%3ky6GMIkmCH0}R-yXNv~!;SfYg>gOl;&V!Z*qF>mP)V<+1(8_9Cwc zB}7=u#p}k$u?uzQ0Y9GPbfaM)bx9&OsfDO(##P7v|wX(#rPbM=dqQDV~7u?D=k$bxOE z094DrcQQJ0e)>`PUkDZ?g4urtD)I~yG_6u4>|*GJf$-!v7#D~*E?q|V6W(Otb9PDJ z#6B^J=-=9{FaOK6+i7xB2)A7{2~u(=(>X9b)D~vEM8~ip4UFIdvLroCGEOFAkb7C#x=;vD3mcHqR6UVi$=B&7pKWK zrA0s0byMNWaO(3{G`+=S(-$^?;~g2%+@J%cADFT9!lb;4&Xr#rCORDVI~a7K=J(*| zbf8@up%#ytRN}jACl+Py{T|Ua-XNq)RL?ar#3LanQGXdrF8e3|86Ybt z_bw{3=fKmmE(p9s6x9H@8c+Kau=61!3NFg75tcw?0WK_QvyGK1p)_|<&o^^xBj|^% zU_Da{5!*;*M3i5VII9k-0;?WIJH3lk0}&@xvDV#*(?r18Q=g={A_d_K4oz0OAx)4o|B(^?(VA4VbMY}n!LT3M>XD31rQ&jdvNv$oRg)MC zm{0gcEUlCS1~xe%YS_Q5`M8r4CgspYNft*brW=SS;_L=!+{-S?b0Lx@yOoD_>fBis zQkjP++2TKE#T$$Ity3XjVFW%{uc!c!sNHBMoTky>qNUfUUZQO)P7<@T-58GZ%`sBk zu_|Q!n27<6A5ItxFSJtB5qLVTh#zaROYhUQ;5*D!?|JiQpHywcdx>7n7mA@JsTaZO zXc%)R3`5;nd4tFO;|&6tZi~R=M1TQ2uQ*XW36U!7KUR0!fInn>!AIWgZ#r*juN}Au zi}@-IkGtvlpx$Mz6;;kA*T$6|%hYxZPs*X~0=DW;eb6qz6}z9`p>b3+oRUtt$F@3b zr1!eH$0;|ZY$S&Xmn3b*ZV>+2kiL{Y0YE%{Q#kyPHI(3sNPRA9*R2+;@3kcUAt2+q&O#X!Zl4T)6~fgQ1z=>rN`5+9Ors z7nm*KNg3c{BkmKR9MkL+Tr@CzHO5~o!;^QErl*&%eZU~P1&ONkJ3N7Q6&H%Rn3ruzRoGBqdNzA_lv;I zqByWjJf;nhQSVO(+BYS1Rl&!8zEF)No1X)%KF&d~C+r)E!TS8Rt zVu|$df1O>1!^&q9Mwyo1B`GSNF+z5<`T`cO41Oi0yBu@cZg}|-q<9cfWD)$Okp1n@&*0RijOnejJ4{AUj`^pD?;V?BxL`! zwv88LBAL|65wh4dC6L)mb_Mi1B?`Ub^~6Dg6Bcs_e8UFzW3aoyL0tDS?BLFm{=%e3 zI_PuIjSAjxT{a-?BI`Pb0duRAM5Jd?)B7^PrK$9X-TyGEG5BBXIFBL2N1fby4X+7j zP0&wQ6BmM1qcG~>RFJrs_K!WI9YS~zKl_Cu()L<{rXV4Om%nZ~-fSGe=@IIJ-_8P} zY@520V^^hC&9QJVn+3N6w6a_HZl5igZC5^@nNjnh_Ks9Y)t?Ql<~m)@pp3uzGXagt z$(PH*uBsY8qJUZ8u`h;37il6D41IZV1iAP#H*p1DK|&G>{UsrO%CFao$?L@Xq?AH+jw)q!@gW~s9){}0=F>rNa{HE+0PF=G_fCtOJ`scL9h z@m-_+kWOX_R=XpYBVo((yM^o!?>dFJiv{Ck_YaRKf}@jf92aMg^KKal8enl ze8p$8R6GT!EoMx0-SrE~qWDE13?dqWQ!)7p+hU|+U~J7`Nd=A}9?ksuWcayBasduD! zVb!GmrePD;<+45|cWn%eW5beNRlUn#6FU|4M$}}j=dX6h&Ep6mStQTIa8Nb~46Jzs z(?Iyo_%VD5>q68w^k)=8rmz*m*vNORGEE&L%KL|M0R^(-esJo5)xg{)TMt)e*LWAJYg z8w!Ua+OfeNrnp`^V-dFMD1tjoqohQ&gTJ)8BQ?5+?fJrKVE$3MoTNFm4|4B^B1!x` z94EMSMO&)*F5hYh(1jn8`tp~^TD(%FdwEN2c$*AdJWBXGmb)c#*$KgjA=?_gn|TA9 zOr3kUE6(gwj^LYg{8n}x1!a5p3b1S@~X@b4! zB0#G5y$%V~<`fX+uSOD!w*yjDJIIbIkse`rj_h(&mzgFT^s@erC)k1tY9~gnd5*r~ zp+;_BP=iHf!DxQSkIyRwdx`pxhM| zopPD#;(~@ZeL)znbM`BAI4a~sSiXAo3GQ1$LR(PfrauIty@erP>b*zubpki0$i+(F zlhH)fZJHaS9l1`a%8FXQHZzidf06sH;IJ?$S+|atxSlJ8N%$i{AKEemH7`Tth3Y{$ z*y6pL#Z0Zc3sxVIa6R3qD3ZESCLa&o@d{+jghjgi>blbF$L^bCnmvR$a&S+++TSM9 z@ME!kC781l!->ex@r|EQ$-FSQwCP2i8$+z@ff4MI4zrzQbb421zG9Ub5U^m@mgTtK z{s^*Zv_U62h!2%>Oc(jj=4^mG9Q1d|Wj9aooJ{F%PN0B(gs9227PYgVyY!#6Y3EXD zUwPLEP|}|yM09=RP;>}0{>;(oq;2NfcAJ$-&>1eUec9Tn6FuKQHym@NzZhhv`3x?f zLk9VetjdR|f zC)nw~py|?iQ*O_LPK~aXPk!tu?2E2^nFL1=7U{uvJ@M&1NL~!I6YYdHFlEAz(9>}> zjjB~|b1%Ys0)1(Gd1;U<^S7-ZC+4fRrNLpNP~%XA?As`GC?0SaNur01iWZsX4oK@g z!fb4=>?hl@G0axGc!0ZUXXvqiHQ>9HCd6F`@z+IJs;L5KvOal3L}}0<0V=^VVt&1s zu}p{I5q?loPdBAEBy-cubuY=PB+}qb>A2Z9k(?_8O*_BG$Tyl`_T}ruo(@6RvYD5b z>ffeTJZ!yGpetcXt;+jqSt@!d;PA3~P6Iy;)DG~)O^IAtg$(%vq(TwjzukT`xHs`rfZh1JB)eyl5c**(L zfZ+ZoQp;dT5b03^1&kzc(1t!~8-KqYQx||At%p^Z79aiNzY3uj0m?$cC;(jn6?i@p z2198wT#91sF%|__TPE~FXglb(JbrSq>xHtEDNSQl;$Iz?IwmG;FcOBVUlRi6mBu;DTHzz92-G%n14+< z)dltFuX|1Yc%kzH9Asmu4 zIwA6P_Qtla^5zzasp^rev>KBjw|pH6@uQPL&|OtQBlQIJ3vyk2rcA@p6-^AJWtQOzNq@g~%(7bnR-eQ|r7qCH@z$ zr}U~T6wyOPaC4%et_}xSa^-lI{rj33CiYJ~x>}dEIDkFQ#2dHcEBGr~ynDse&%-yH zqqRW~+lWfhiRUOzgS6n!FT&!G;7oR90D(B+os66SSJ{a6kl&Lj9Cbyu%1rgF`l#}* zI0Ht`Eo=GEKqXMYUzQJ$zUu#jZ_u^nGE*}qW5A$3WE3DzV@mIyc%)#>yFXFmsq%V= zyX_KJeT+QG)EmC3prBb^DxZ!1eoaboTRm$pb)J?sm6&g^J3>j*aZR~pii5onba=*8 z^D){$?b|55YIdrd+!pG#mwp`Bpz{ZtoHV|!%8R=TtKcGPs2u^BigZzneFWnL5qB$a zS=7;Vf-W9Xq9R^HQf;?Et%8z@9IBz!4Eb*h0E(m<<(@3-lAv}K2tRuzPSJIBwm5IH zl%#uiwGe|T4bq#cPq1}cm)iJ+6XY`UgN_s>iU|B6#-!@iL1c$>nX}& zSY0_5Bi!E-NSA2vFP8+gOa>iZP{vFn0`?yze-Qj&4-B|sD8V=DjZlEUUA6Wo=JiZ; z&EAp;HuU}wLEt~GsWvl3r{<#eyq$dY{kO#xBWR_RNAs6? z$s8p^pmyM*&$Ga|N+SLCNy4{12vCvK@jh69Ya710`Mp6*t9`lTl^^iJY^qu?@3^~l z5FnDIgR2T@)TdbxV9R`p4Tu{@IQD-pEF&Kx_Nwf{G67rxp$boiyvx0|Ls>Oo7~T9fh1u)&8C zJ#l(XBhlW%)n^n9N^yI4|4GA5ZUhgVl%U25E=M-l|JEwedl{u%ZaKVetOjtIv2?^uz7$(c*BM| zK|_1*lqPMRkteN)9}30-i!S@f1RVkV>24D&ss7Zn^32|%`C26@Mwbfx%FMMOm{TLJ zWCeA&N$4a2Sp^a3P1a(9Ym0{zs)K>-2CuL_}JP94Q!;BGo-%5+C%Yuemb!*rR^;A&?nwM%^Qx}(NIrSZYQBCo7s>HO5&$ZfA zW|P&=%T8bvKerVWj=-H9a@Q3StOxCs9(4f!Mfe^H6Ezl{X|ygpK9N2Y3LQjA`0vsqCHzZ0JR5662;=6QghPFkfftCAd^Y zCSO}_FKMfL+={Hn#1-p-GMBWP29=)=cf?M#Hd@v%(v<=b?EJHUtC(4mLNo;$;_00t zAp_lc24ye7*G)rCoPIYtVvYyoAkf5fFXVzQ*Nw2@X~ckzFY*kFUwoq_p)jG@95M78w853TnB$0 zs!wRzg)Nr3r4+pAg(kKQ=qh#sX!e^wF}yr#vS*mkAOoP0L?^GD^b8W&81Is1s5d97 zF|u>#%+E8~y+gDb06hlJ1(pTdpJ9K6&VsJq4wNHP!CX`rC&MeOo*khEV%3>*9ca~O zTAlfrx$9g;U^lZQ>-NoJlTsy3(KFeK6&=5_@4ftSH_WjZyUR zAHwSra^P1V=f%z9N7oKJGYlD-GI|%GkFlq1vXV?CLxhz5Zzm6%KS;x;YRYHja?F!f z-+=*g(fwu`Mj-RdUS~nJR|B*~xw@DPd~}%W*GOipZLDMJ`Pwlu<h66p`MTmNd{Zr=HJ zofbm%cW}VZ6>70$;Hr31RfoVu)mCRAFRpT|bw)t-KjM4F0O;7I*fz`QFlb)CK(ZlF zZHQyG`iP`HluYTi{?Rr~)EDgJ;%~L^$vOl$+HlQQT6h>{{P6kVAtyleSe$vT|c0%Kw$8dR9t4e zJQ@9|jASxlj)J|2&^W=`HE*3y$0PDi5^5dDB-2dx3HN9gp^<=Oj^{5V#SzcpnI~HxVYt`|* zjrth9gICQM-C~ksqe8}mix{T=-O=WU4SPv0OFify8l)-s2#GlRb@XeEE0f0*OvUHv2uet-Z>gXrQ)7i>M!GM1+z;=r6xyW+0 z!FxL3HYwd}@h2kTn4NCLxOFv7L}BF)?-D|E{@bYE`xn1i6uCOiBX8-h(vN7B#86!X zVyMZoxjzS;E^^TUBOyC z5w-#%N!H#E3X;t>vc}OC^&T|P zWL@si4Iyo|Ib_f(Z@&3u8Vhryc^3k_5yetPOWOc}IW5vQH~S`Z3@4JqCV9IIdKfLi z>#w(#6hvU59VEyjLxG!nf^Q{uSX$ER5+o$=;7#%&jfpQ_ExCL>Z5CMw$7Ybnu|QC7 zMNX#A>$VmCFT5zBI*n##N9VnWT@S4uFt81E?F5RK;~wuA_1n(skAnwieE0Mp-UHE< zvS9gKroABH6Ks2O9+~fQ{`9AH6O(kok~?PEB8uW{bjbj_#xzP@7?p2P#Z= zY^6`W;c9w;{XrO+7vtnwpkCMr|CNR|z)k=v>Y%Nk7mwFfA(PN~ey;S4YQbq|Tp3AOvH-snmEs1-PE-}~^(LgZolXnr}v}B93(JF4Sd;~3J``5|o z)#5ZarR_}w%)dV|{-eipOvNPIU&2Tw5w<1wAyYunsRH{A_*h-l`xU1&GN`L3F=atk zGd*~}&}xGdPM@bj25fmRXwZ5Xf8<47OEbt|+%b2jT`_tQ8*bK>pv5>mY+_%BE(MYa zNyH$Z;O43aXd>}oxjT)t1e()DW2C^>wkm(QoC8B?9qqhV3fZ7wowq5c$oZ*sx1|~? z9&Z?=FZY!&dBGCI8!4&Tj6~7ox&+0oDjIhmb~(B{4{<(1dj3UGcKw2uVMe$;L6|=% z>xF_j+F#a~n@%^cLB0kn7N>e<4X>zg<|`>!PfMHS+L>kg9g%(}bAvnF0yr&ij>?_I z`tB{O80PA&)yB47RlM&_++NTwQEad}U#NpW9`r`0XyE{EUkITyymUq_2+zY$B0!8(lhd?`vy4S^mPWQ?^zZ3-<)#$1XPV>&Jv0sUfxn0 z@Ax&xlIXkm{ib(&d7_Q=X zhRc>UW^2LKkBF+wyhWCd!P7S2NIIKvmORY}B-e{4>wf&k&1^6U`)jqZAbYrx{yZ_E;N0Nvn2r&z@T-J6_Ddo#=WuA6V68)`2<)&k0^7qAiN zMObQOWiW{L?N?XPxY-@zD{Yo6m)C`dEvrZCr!Of}pr#kk)i3qRJE-hiPQ{Xp)`|KQ zXXElOlEZjOxGn_wJJDk&$VwQs^QzZ&2W}ev^2;e!m)S1ds|UIY7&j0Qzj|v&W8$@I z6`{?5Nr^&70;6tV6myoW*09yFE&ccI$ZgX}Enyr48OEx}bIpF=LEp_h=!BoJlhsg8 z=}+Uk5zHbssO$WSDT^!vKw5&}CD=mXFsI?<^$q}AaSamL#aU^vXx5%mTF_WN)JGt+ zIPkr0!zgsuwv0;g6Cr5GTct#C3TKu^VO|8RFoJKf&HE90)c}7d8O~cOo**T}%rC$m zfGvmTAG8H%LF!<7201Wwc|C2mpogaaSJt#KVpV5`(3Pg80b)kOBdt}y0KaghD%g;; z%DB^YW5?r>+PeaxZYmsPXmjqpC{_m`CVkJHnLQ%h^d2KIjgUt@Ay|ax{98AcxT41T z9clhTdxT$Xra}>mAfM$eVm6_D}_#;^so zvg>kj7{c{eY7XUWO3|bNk3}A@-2sV>gliP zoOtP01ZX?NbM0&ccn+HmvJF#g-IBHF_Ffegt2cmdvT=F=nH9kd8ewQf@Db0C@(2E?KFFoI6SNMFee(=co2(IE1uWqNeN06{^XCU2biOzhM%vs z{2MJ44e>l`fi>Q#Agbr8X`I>M^P#ih83O(11S?#K!^|MYl$!pp%Yx$mKg{s{%3(U& z1OztaUrvAz#eP%kMw!5gYBe5&L+`Jc`kgwNs z6WlAGlKxHpoGPb*w*I^CqIT{M3u4(FuS;WI^-IC8g1^>*fn{4L#b4mK3wp;6N8S)O zUK1h0F-0>K4-oMd^sKf=KBHKi$#Ex#b5uodaqg-XHtSa~To4R43>H5__nT&e@PYz(y4NT+)qPEah)zizi~?=3)pb?k7{Z;J9QEOMeYO z)q()+`2U3FC=|EZ|K2`J2-8)@gaAv@A_@jgIm1){>!62Gxl`Ni_ud#W89E4MPPKiY zIgZHUv2NR%<6 zkjp2$oml&OEK{jk6k+pN6>Ie}sQ1{M*4%kjEw-zBkxXP8MPp+uLnP$4@N^T1WW+;z ze;3$c)(zelZ$`Lxge9dif)%&G)?1rQWI=+vNfa#5YR0qPa+qL5+$_zH(n)9Z%Xm$ z$^+p=W>DH$MSsRnL{N0>yu9@2T1Z)doa=@&W>&tupj%o9iSJuH9)4|9zP_#}v@uBy zMS)m0-R=s+)8N20d_m&92Oj+19V1&T@ojxcd2z?fkK(1#qauyz7HI3 zXIb3l@>ysu<%UJ{AQ_ps`=Ka3Pn?j}+Esk$gznUO@9nY*Al(VP6ZOifz!h&j3|h5@ z_Yo&zI($+FV4@cxdBz$?kq)q-91ho_-Wr7&^B&ZWh5gEqZ604!swK6aiY$Un0fh!- zP)v`UYfF}&B7KU?X7oK^^DCSs7$hrTs0cub#+`_1*N|^!-}F1Al9il#G>1B3RIen3 z5o@FnpgKd^icZ#28iqx@Dcf)+(nETVz`EL4m{ao^ZpEHeU0Lp28bu^koYM!Ga$fAB zQ{RLPTB5VdvTIq)wOknrwnUspQ8J;531-9vljKLURbhzomA#<})T%($u`Doq9lr}} zU{4uCP0QQHqWJBfhmM|;A4zQwNdv7w08rjfK21j+#**N^$OgrYAQb}f=-y6Lr2>Ui z`XURjcJPa7Y+~I##L)I% zOq!oj$yi7^HHO38?0F9ztDRammt&`L;>N$h5sT<$NJo*X z5?l~dMF_jQaac*?Puz`Xw=IJQHU?Vdf}h3Y+o?E>+Oo7Cg~3||wB{aoqX$9tHjFN-CT;o9~Wg|KP`4PKrHQ%-PpA!fv871Ch(OFO3GgZ%Qa*g5Yc;xCod+|m; zMeY?%)nAGJYqu-WP2dQJ&3f<`h$V47h0ZcQCpB{pKSVH&poK3kk{KDv64NZg;nZBr z`ik;PvE}eleG;RW0awNOQ<*$Y}J_c4Pte5N;N3 z*DaEepv<2nix$3wgdA0_^S3%v)}$$+WFB3bA-`#Hr(}a&4zD37$e$nl!=f18krw@@ z#%iOj^?PiSv$n|Xk_HzN5J55WWYs(TX8(WhY*tynyX5i2> z-V`TR{+02Lp@EUTj*nJfh?fXT$t={Se0ef6$&+R&n)^Ia?t-4z9D~xm$Rj*Mq+mPt z?yN0oL}}>uo<4_{0jTzD2M}xI<#?-qg%U`C(0c)k!8Ves*}1Hr1JK7Ww6=sP znTh?Iyt!S&z`M4-YKP+hO|K1;KTK2L!;F6f-#KthPr(l7wt7Fl@1lZm?LzT)NuTfxStKy{nGbiyGJC_g?mN-; z$vo806;Z;QteKhdCDRcXlbfPuA#6F}#IvtC*xgv)7&0p{=-Zd;V#UT+KsT!6f(%eu zpRNM@9!X?HcDgNljsn3HpUV3>`We^j-nM_3JiGi%PAxMWeGOlp) zY-nGn>IuKNqj)kt_q^hR_@L^^9B;Fg$?hU``Oh7ZXuuBW@O74u!&!>sz+8tBl!FG< zuLDGe3y(XU^B}CdrBHPDsZI5HR#2h>rh}49Avu5)7OO)Sx(~Xx!>YO*y7FDd&6%D%6`1qYsE~vd~9sLV!_#5xYFm&>70Bt>-j;({L z4S9Eir{6AL6dE>(#Fd(hwS!iS#Nd}`?N{a6VVLH_+I@WY85T-#p|^~;P*+yOS-lZyor8h&=ajcKP3=fFoY80>AI(~2v1&Vk}3xNgCvQAY;t46 z%PY$PY^xD9yxC}UCiBCr%_eIbzMW^F@)?h(ol;;{Db)ve$$o{ts&Ui;MMhqa{N63; z>(fpnDo@G1F-z)Z4eVJZR(Ah-_tLLC5oEqK=y2foFBGA6fvy{P7=rLa?IFY)AXB+x zT#ONkSNzW`E{>)sm+miknS&SHXQwc<{N2^Z@Pp%VM&7WD{=Rf&7KDlc`@1Y$vs}5pR=Riu{sOj3#rcF>2tns?eQ6JU84S{gG^Es z&g~>wOQ7=5AI;@FQ*L;hHiY`w+&X5{UeylJC}I@?hz6SN!)MxrIM$>hHTy1Pt=d^u zU=3U|zZ=79VSz+-1xv;&V6vCRDt)~4?j~CZRq=OiT0W1+lD=_(rK8b4my4V!A(sIf zINs1^DFDK#<#F26wB{R^B;^d1qc;D&@I;TgPzhlUYMUUZrQHEmW9f8^-FztcqQOhX zOTF)dK^uBsaZFA1eP_dx9;e?*)1-EqeE^QRYuz0JqSBM{c5mDIwsk-(T4{;_V1|)T z0%vp$>!-L|y@1O!YqM$Ui9Qai3Z`1_bA!`T(uAztT1IZrL=3tTVEwS<0MBJbu@N!; z$?nnJhMF$s#kR>Mwqj**K>3uJlae!mUTwZ-X&oXg!mZ|P!%W1t5HQjoEqBi}@%x=8QkB!0 zri>KlD(yQwxelakRS^o=fn9-c9nx6hko`o9sKOEESL&Q^X)Y;nTFw&3y=zBd+rtt3 zLlW*5aM5ac)o1s+E8{rSa}2S}w)*33Qag($kfq(z;gt7Olr`)S3LFy)5Op$|%|iFZ zNjLlX@ZRf8%~88VR3*YNC&JCE)MuEXBOUppjq0b;0i9M&s5; zf8ilLFJ9qljlDTi8R7VnE=}J_b2G>934IWYD@)sR%O<_DTrX7JWX7}ivgfGyB*VM@ z)8Q8r3BuY2^S~n&UF=q1EESAf#ANL0%rLfPuEiDgMsDnSC4g{mwk>aHw0@7B*OVA?E9ib(N_M${HRJz;jyou7c)EP$4jb_+~P^=8R1 zg?~^$Rzf-JoGu3xA(J>Awd$`WfJ-9BGY1pu-j{9To^*jOQ*CHk|{X z_2*jj0oS3&_t;zJ1>OOMpMv7s!ls7@uVl)fzK6CcM1}r0FdC4R3b6zY;Z^%CnKzI6 zx0O6>=#3x7JVW@lc)$bP42sDWbyFwew(ioT*^C^DWy6QEjF@^ilEq9oe7GRDxpouh z9+MBUJ7>FX3d5sA0*pD2%|(>|enQu+p%x6%YGh!L(uHeEijw8!c~J>uW4ON7>I|Rt z`bb-;aGfu@^Of)fP)T}szZR6UJAEh9V>Oz`bgi@HJj-wqq*_8J!|KM8-{hqA7P0pz zzjS1hZ+yQ9C=bdc$_WL*U-NNgi9)Ct-_L3jRB8mr8{-io=;nCTVxHBkV}-2(Z2nR! z-_#ZnUVSN-8yGr^&;JI-IYbX^q-dOj(a1e8h!=b>%1 zfaAUv$hHaCpZC$>wWywhCr3K% zEa68JE1Uf@uK3L$f|zKDnE4U$0OIZ1NQybYu(s#D&<>j$NZ%VQ{@44QLX?5)qp9s$!wkKPw}QwQkRYsoj>#)HnU3!JPw08@;gq>zn9k1d(GjJz2hmcLnz_?m+ZhA<7pF|CSNP6D+X(b5Pyfc82 zA6Wca03Kg)?paLB=WE$RAXo~~r-;T|Fivjbb3&~|)hL)s^3QdUpk1Mrqzj=obwA16 z2v`G2yC4K(Fw>el>`XNo#jcx7Ua!2V)s$<=zveJQeT5ZWP15v$r!;ttvV(){L?8uD zyk6V`n~$0UA~t3>LsPfUYD>+tzj7v8=Q`~us%?LYS%XOXqwnQpp1c(SsbcGI%Z;HM z^=F4JM|Ia-K$z7@Fc->k| zg+&+4ro6k*mC?uM(XS4}K8-cueG=MAg^B={>ME{KAbW==UW=r_MYTsF>Ix=A8p1*~ z^K=gN-WNJY1;8JfK2_IUiy>{S!<$Gzb8{xT@(e%ro#yMZ;jbG!l zA*fvIZzrf}juG8@(scXjUrS2Y)`_n;l7nXTSpt?KqBbCj#Fgc%+QPv@2F=3*pF-ynti~`4=3j5HM=GltaYp{o9)W0Qg1uHU)**q+ruD&g z$G6~R(Q3E;`UBJUguhtU$TsU^-3^2QAYE))hD2cep#DhAgWim_Eiv*TkMMmONnQi* zvIk6Nq$y+Kd_O#{MdvC@!@jX2s71RA1n*e%jL+HBSR+3y*Nf(Oxp5e3eDjOT8;7+D zd>%)f%r$>9Qoudza`2-kdCxsEa2~rH-=I0uFX3_laGSr(OvS*M_nKu_f{yt7xc0AG z-y<(s#RX1atR9-mdVT-Mv(GJ(`a+S$*>*BW$$+|!g5$6rHW_+m+L3y0xMH6FYG6>{ zid3vOe^$|02 zs7husV!q=@j(;`_mzM{g+tx7-gxZXZKny$@C4ZrNShuVn+S^s+(N*rNFQv^HrBe_q z11xa1(3AFrn3msAqz7uebl9Sh%P)I13~r7(Rx@y+Wox3f?CXKd#2HhUO2(mOZWye< z-R-pfnIlNmLkWg(0}X*UNY40n%44nkulIqP>BW136Yjh)d6eh`C`2##ub zaq)1=?}A@-OD{0JdhV`M(XtEfjnoTzLk?3JL-Lu(ytU0zwc*1m{3xQ@a;`V&#ZB^H z_q{RqCk9?s3ffrF#Mm@C48nY^2J_YU^WLeg(yk;pkYQ68&Hf#(Y4bq9$;C<|#s( z)PX+5Q%8)!fGR=}j`c1IC**%q{-k{3# z^l*AqzT1)0Z7|_#YDBL+{*huuR3HvCeU>k6rSz7#8bGw^4q^pd@YgxshUGo!5UaR zQEvMCKUgscx5jp%KoBN41E9m{Uj@rJl<%nFCdCWzY(I}k5TnZpI4SbzN9++$U@F(Q z?}|BS!`%VajjP=*z)b1yZ)((c4=J4LqV!ods)nJUk1P8werF0Z(?*&DVyI3wT8AU8 z%S$%(PL7MJKad6|fLN;RiNQHr4=shF*c>y#anjAu-ru_eE?!to8u5US;$NCvVJ430 zAI|(yLmufs5i{I~6pre9NMf5RXJ=fmA=RkNejXceI$|K74JgpN^6}0L-oZV>Wxt%F ztLWgOeR4f8P7~n@%Wa z5>5IwXlZNVP}*dak`+MJHsHGO)D0RcjVS?2V6}bqxi+3?r#sFTD9P2`z^QN0MF)Qa z#P&3)`t+nnmS5g@@xd@@Z}DDeBDJ-^+Hql zD7CV8lNR%ycv>e(leLIP0(#?$dU=70SUlPpI_$roV^;v3+*F!ki6H<00vZ9HV`@qN z!_$8#xr|urrv%&*Zngq}S9X?_fW?wWhy+!X13-JCh*6I;Zj%D^`u4=*9-Z4^JjG_A zhw98m2p$$FHzXvpyNZ$9RKnG68s*NW?FmR@>th65o0p6Irz#zDp6 zzUIYw?=b|*m~MUM;mEHC$SPUzsC5I_;{GM6$_wJ#Zc*KI;%rgz5==FCV_AonDAS#H z$2G>2Iz@OG$X{3c`YamxBn~v0?*<^3`h3gXBVEjt#evwszag86Uf&6Pxw&2cowAMu z1x#Gd7!xy1EoG&TDkcOKm?8uw58G$>KkPg<^hYW%EMU}u=--s{hPx8mdwl!88cD%O z?;f@jI2>v?H9I!_E=6I1Gf8-=3Du$QySlvrkeL=wXzZFq`C+axAFWGPl^HBtyNp6i zpT%WKXE*94dI(@|jQ3G-tv8V@1N~zRznK*4Tr|`@6()jnt>hSTViJvq>)AzOYefAg znJ=f125{?4qxhV zLrb84Zjks*A}g6iJN`)z2n`}xuUUozBf2+S%N@P`a6cW`ulv&*={$939p`{4|Hhum z0{D%gF31G_Z~aS&Y^8e-;Z!9`!Cvqp{6PtX@zllYm*wZIx%Ufn`~ONmmrE)X_}zhD%y8WpZg+5|tV%Rg^4)im}u)7>r{=>=q4N(^E0cI|9<|EsWx-EF_qmEJq*{(KFXzY1}dX5M6BB5P~8ur_Nn z9aJyP2V_M?^RkPOpD?g4_7a0c_Ib4@S6{BOFkRQ<4J{c9-BV)R;dN@ZW_4&l(5*)t=Iz9BxxWG7acx z8=O)?7OC{%806jnPT$nZz#Z^JK}F0n2hxQH!IWVMa;gf%3d~m?!H&nKha_T%d%+&o!ko^I`wQgRcD#-lzK90MHw>9JZat z7}xcR=eN0`M@ zuybqa4re?4Jw zm!0!v{Z;rhHSUn?MWXA2!?cS3PQw<~jw(}~AnOb2%E&4A5%p9_@)wOb0y=H;+}oal}`hcCTfc{eYL z7L!gEb5&>jS-}KCs5sKyfA9bx{Mm`KG5Awg6+lTg!3C?7dT3yg?I?0VeET6SXJ+Dl zUvVKbpq~HYJ5#aOw+a#V{l;l>JCR4((rBydKS@bYb*Tvh#OoMHG{g~%OG1azOpF^x zqUdxQP^aT~yg`p5 zfa~y9akNv1DfNuROD{}!6;a%7pl)wXTxYkdb#`ZoQ-wj-)*>2V`BSJ#WNO;GnV<5X z^yM>2v=pXtJlGf=L&ADTlx~D)=f7wB8Pk|$=q8EaU`rA<&hE`DHPr~%!lX|YTQ%f$ zyAq!Nqx65Qjj<#DLbV75j{=3f(UCcq*e?cRU71`*U$28K7;|3K&NR{MLp&_m!9bT; zc2o&@wKI}@j($tNf*NCGY}l6>p*EK_?1O-)=z7DTjWI(fbS~IIlxS!$bf`m`fs9aP zYRqyV{z}x7SCPN(nhhkUw=^<2V)sV*8}VEhCmE%W_Sdcz*`QsaTGbeQ{(j%93J2^8 zri)OnIfJ%-8%rnrt-uZ9vdx9ZyZ!a~B4bAbFuFT6V>YJB4r^@^?o*2o9bM~vD*f}y zxBF6|i<{CfR_4`ZKKtIwyA}wn7l#QEXa&Dvl}Si*e(HIYMGnx=BkU8e^_Ak5GXXlJ zHDrnHG2a3vJ^IPHsCq+_^b$|pa@@K@qUGZex^sD+e>lb;_Tf&mj%5pc5u4oxL(A_Ztwc9mzZj2ab-Y!QSgDN zv?&%I?5AuKu_0*i=i!Q?%Gm{h7`2yM61C@(`s2=F1q+0;{{WD8eUdkOo0zN{^J+L# zsy>|y|7C87@gm0=ob`Q~9xuv3Idmn`oItHoSb;*LIphAo{xtRr8Z9jmdK2(#CfLct`LEgrxoV~_; za;&q^50;*iLh-53JjD3UlKZf^`A}0MmXJ!HZq!|&&IfWdPB*a3bX+kg<(9kutrfS8uvg&D-54+G#?>B$B2vAU!CJzqpmu)BAn6J8{M2y1&eW zc;!=Fh%4;>gI(_v)PZ-me?ot3O!-XYO&4r#x>>b_XNSK4IRdpAh7g* z%%wFrL*M}~+x-RAjkU;maSVqI2`=;~-`Q(2I ziNpo|Mb2i5s!ERY05D<{XFt}K8QuTR=@0L6n)DY=kUlA}R2WNQveRJB*Y43b6JY(y z4L1xp!?;@}m1_3)W0aXlSj2ta%#RgSAX9@rYJsZgEP1vR{hZFC9AzkuOn)_N-aJJk z6MB!j4Q_SXHE~E|g%S4LfEbtS)TpO-vEGW#wPV`6%S5_eO!+Gw-LsB*JV+)jn6r|E z8u`Cp`1C@PUnGGh3i_OWOTR;%x5DY-$>N7(0n0F|MY=6%PT7IR{VjLRoTPqX!5-@! zSUiZheKkc*V_t$)-orp+SAZ&=*`36zky4%xF;7P96QM{7ZX_J|yXKA}GHmwVf3Ngc zPzs)M=|zlWh&obHY@luY&KX%=@}Dwd8?sG}H*z2o!KJLyyx`l#d%c!FL{u0!<`(c- z;LJfHGE~yh+GM%}$w}g=iP{u2@$>2TII=pKL-PFt?S#WVbf+vKPLQ$5|Jx8?dRZnS zvI(cn zjD|}8t2Cee8F>}7sEg`Y5Rvia%!NLd1Yz^`v|B=Pr31mYKvqxije(=M*1Z*|VBaMR zps5t11+0nDhT@+@5-AU&lSH~xr+%Q`>ichXhAk#Mx<`gEbh$sAMY?}JJwD49pxKd| zakq82Vo~sp3(tlzinYiyBL13kz8}4Y${0{mvAZF4$$9H;!KczK=$%=KGz1lOEot6R z2>Y59Jm(QJec~EiBY2Ede!l=#+YD9J<(^PZPp`2v9+UxQt&d#sR&&HWrb6n^lnrJ< zx(zG7w4jF}E$tw?*T#>?`H}k!)9WLXX{8=W76joWtxg^ykk@6rP5VtvbvKsBQ+FWI&}RQpUnhK) zT<+4H$p)yQJC?)t9P)7>r)h3W{dQ0Xfv@c&;&bGdNwW!#;i`j3jm6_TvAGW9VrJ`Wpv5=BJs;6 zCvu>vdwWTWN6e`{!X{d#^#sFmEk!?4i);E;rZP(&f_7Ucir6s|d$*yU(|rI}kr?~A zIBCK}m7sGH>Z&0qnyVqH;9zJY&zcE#tC66(=JiT%Kp;ENa^j?Yl>&!zzxtHzl-SA) zh+4)LRk~eEU|TrZKI4*nl?W&6$SZyU$vd%+nK^eF&%1`l>e2}JSf1j|SXQaTEsPG2 z;fd`*t9SO=)6CMpzv4yq)rGlWIWHZd*dbLQ+@m+Nr7pzUqx3KQQ~#Sbh*)b=3J$s) z;jU%QrrHOe#{Ji7USR-aUy0JTl&CoSsDx_lR~lmH77HRpMTc~r9G&nyg1a%=dcx7a z?PCFYnIhB072we=SwTL-J%47Pk#U_Iw}C4d6g7!4M0B|CE@|gS}rrOH)rnzv>Jyq*A9abCQjlR2M-OJK|#uUZZ2vC_rnj(%5sbMKz@uOU*!L#~r& zH=_RczTt#lM78@l=PpeJ?R#>&SSDPet8VUsTo{Xw`0gV*{zzE#Z|Vc_3*5IetpiIZ ze2=hal5Q^UY*gg@Y8Utmw>2~>>S{-#%DgH7+^u0?g*Ww5KbP6!CQgS8a-vA84GV@! zNY4^+58ElmAvZy#11Fr%mT=kq(?`^_Pr14#{kL>5sJc(yzL{X;gRZhjrypcm_&=B( z%ja>bY^lPuHc8E$W9z4+$&e<-|Bi3k^T269+Sxe0^wx504@U>%zI?eQ{+~&h&Iy?C z3ayUsB^gDJH)yx4p;__-({g4@&U=}O$1X49Qpc9qg*1FOD_TvA*YJ~f-5M9|jM0u6 zneF{CIYI->21KJ>QJ144GKd*U@7BsxC$ucJ;rs_nV)WkH@zuQcB8_Af6)>B-S6@4U z>cxh+F4NCE6NaMjC1F;vZ7(`iGhs-EIbixH>n;1SQufqCX~zpXfBWXL56ylM%>4Bn z1*CX`kxL;tjH|S zqD1^z@P1aj2OUxLYCosItBP^qO54Xa1qP?riUq{w;VjBbQH&eL3%1MXo)K?IAjing zob;|gITEkkAs>Y3%f$M`dGs+J%G$ZdC<2bKdJr4DD4u}ob@PvfCY8IWNO6YJc_fw- ztJ2J~`m>Hv;=zLKr`MMFFEJ?*L9Js3MYc_Fi(wlv?hzElw)H0_2bz~kY`s6Uw2fGw z$5a3mnp9gawDN}^7J+ap1}n*ZwQT_O#^jkrL>Ogg0E#f&i|OGkVmx34-HiCEmYM{PX(kzGeWgcdFugh0mP?2 z>ZsKHy2Zz$FbF#og#W6aiTuTnpX~xhatgZ&BU}NBpAU_xkY;`;?slUL6#49uV%Fyx zZ?n#tL1-Jpsa?hV{-I7i@p@i$g0 zbteKOOH9a{Ou?fAujJU&Pvmx0Sydj9vm;QeU0h(pxe`I)lFcv$T6_l4;_(Bhz0Aig z5WvR2?Uec$CW|mUAekKnqIKqTNCuExUx=c6)6txhcB^;1S)s;HJUHM>7KY@Q8 zjIa{967{r=src9A%FiE!+Gv#GDY2DI3ZLig&Y-w2tLGA#S#WSnt3!03cS3Led^eyA z4ZbPodG%GoXrSn`RJIw70czZz0009Q0iLCFN&msV=$s=R;F;fAL{&EsKBhh)y$Or3 z`-HxM2^W7JZ+4f`5X%fM7BFN*H0=4k^8C{bcQ-RKV-t0;-P`aBYHnry#M+u+XP0~k z%Jl-4sV9wqRyTW$YDn(RT_~6})cQRUEA&1*#Bbh+hXUMmN7RNRhINSd>ar|hzyu;T zX=LdTQCECu60~|ojY>QOARpgvGH8`M)4?wAeShrCX{J%He2DWo@STEJ@?`ZhT*BwT3& zpK2{$W%$pY%(XEasegr%l8idhYIac!GsAfe&)J# zH_mfMyyl%kIJQ}E{3uQ5R8I@7&V}mI7_O3T9JHteo0Jb>a?Q<_e0UqY>&pAPB}AFs z`Nuiuhy9!erXD!cLd2`+YLqJyq$oEd337st>PW~UXRS_e&4RW>nFcSU4<>{!vjg9M zNvyqK#9a zPNqv0Z_DdTDRzcgE&E-`YDID4Ax1U#*5s^3H4fVmEBL@+9^IwruA-xqY}=#4IF8q% z=R8bdJXG9$b2iTC*FX-#9Nom$zo5FUtJv(1(-=@jrmXv#f+cMtF_5tY3mlYDr59=V zS)mu6!3V}L*V6Od^M0-Q)%rduP|3|rM9(%SU zR4SW0*sU499@XM#>zlao~LR_|Hx<|BpF&2 zy2wHe$UNNS;Gy``aS`UCT-De{?=A=GbT~N`&d84YyCpYf_obsOVr1drECUxuF<`0r zv_B@??4iBuQ&Cn9;XwOj0r1{LQL|;IHGla=Hemf{5?igZ^#dVWvpYo5uJId(q0BVn zuI>}-SOCZWXfl=%>5LapijTiWi=TH<@n2a!*g$L01et&|@vRRiUA>7|El(60X>{Tj z^KcT*2AdT%(L^rwg!)r4+s9MRCab9;z|@*)6P!Lx$I}DHM3=M2@rpD}(i}=$F>zyh z_4tA%{R-K0<}7Z#)21Ky|}g=k?^bW>Cg)U zsqb@hE?wY;$5m9w#i-oLDYt>Df%IGgdbZ_&@u^sqst8~p#uy7WuFiPw*IxHg>eRbz2!qP3~-)CDY~hCmqVJvEPu91Nf&APX2<`c5b@Q1#4IW%!3Owfrhm z3$hro`LF?F5?VJs+@)i|RDMaU8mwQncsYI@ChTqr$YqY=ob%NLP46WNvJ_=nM?}*KZvMjn$LDg!$6KRAyw|rfx04qn0h-U z85WTel3EBxKft0q4p->APNh{l{DRcfpIcXl*`G4=tg3!eog3lC>Q)9gA|MC@3 zxiAu+ifs|{(3lL)~t6!EYaz9yl2Kw2shgB}>K*Ek!mSBAuGZqHP=$KjZ~us@_lRb7 z-Uib^G0e4{ZJww>=)xp{9-g>mhE=E~US!9xF0qsCnq69}cSbx|&ZNl^l61)!#L2Q? z(OBeMVKd4+m2+iGl3NT+31fNPBJ0$O*=RJxGJHh_K@?Woi=e)vv+`KFbgQ^uI5ISI zKO8#zj3d-e2Vj?bR?tUwSYw0}1bw-Rf8O`B%5aVb?-q5%4a6c0AQfw1lm&uaJJ!6F zTfu$BMlzhtcQhKIMZ$aA_8^)ZC`|2^yjUk)b}D+FC)f) z=?ClNP-<;Y?n2A1EqQk>Z~Ek-&<$WjfhwCnz3VdYo+!lv8P-{3M}tvu?MK%1#n5`{#HFnvqT5!V?4sj$IA3(_b%w;v}H(bU8B-J}^~YbfqD zYJ)SAB|iVD^e zZpZ>!V&R|<56K;OK;(b9A?G(vyOfmlsGP-60uLE8>Nt%H+KS4q9o zRThLI)0`bl-$#~89&9I~M;NJG-5!Nu40B^FbejiZZ>Ae_vrJeBt1z9)qNcP!)DM!K zHJe0yJ5i8(4jBH(vStQE(jKaXSEp;1cXZET3;J~Vvpj8<4TiaD~2*t}0w(_MeGqqur?p7{)Wv_;}LTX8km0z!8}ozL5iVy)Rn zeNRtF3ilji*IDdTzf=QaXFV7EXJ6_zkP^+5)r~J$VA{YFKBHt0lHmQ^_g2pir1YQx zL`T1z&kb>!vo5sV5Nv!|@nz3)gH?)5llyT?U9guSd#aXp;GJxFdr{SE6Nw}$dmq8k z7T_vV6YOY9y!AB`HP;hy;j~+JPNr#rBVPFj_35{3I z>*BYK&i zR@8Jq_G2d{Mq<&3ON!s6-#Xn)muPD3mU{zj?s}3;ucSdPDyn+L$rX{QEYXI^Bobj7 z-q=$Ptxw4y5FW}RXijmXC{6~z>1a_43ZQg{A{LHZ73MNKZYYK?dF+B?4!B+`E^gtxTWQoRrCCu1%vKg(_~@R*yOq z8jsOdBLBWHNGImi}DI1?xE0*F_OJLFNJvWaJd)S0mf)n?w%y22iDSS*;_vEtnamRPl{Q7=Q zvY{WIPB|zZ3kxp(ti27iW|}`>5|PK_Yc4n8-1r2V zPHb4YPWi~>3TwSX0HXdf^Z^a0P>hSb$JHnvT<{c{Dy@4m z2t%Rl%8oie%dVR(#vk)9m+mo+g8hny>p>N?K2URwxwJ-Kq|ltGbzGqXjE6Hb3TH#i z&i&5fST){HyDTCK3T8gaJ>bdzl#g~kC7CToj0|V{KD#7|CoGyY^3<{S$rCXO@rFqE z@208ts*rYwaL@gCP?;~kOW6n1!bReEIU~Y@X*^G!6u71xtKq!nYwZwg`y}wn4`MzI zLx_INHo+fTJ?opp2%wkyoTbm^w%up-cE2!u6fyW5AujE%QbjFGHogz$*5fm1rYw^$ zdx9~w(e&KDX_w3a)h-G%ZRLG3SV9Ss={k)=<3v50KUs00BmGVrM**D+4(n?ejQKT3 zhMx?|MTWiR|C$$DPB!OD**VEl)2@^e8PjR4Su1$ejUi+3U$m*6@p!9)e~4s(^H-22 zrsYK7Lca)-VmBK<)V$@)WkIVbxPZhbvZdpT(-+~Fj%JWv zqyRS$A}kKxpD%px#-Hs7tE9YRj-*_=8&@V7$8uye7cMDWsdl) zv_Ks7vQ=f#^h@Lj;#rxOc>=xwPZ#^b%dfbg6=XMXOg?hyf%;2;Tr9cuXc8V_&>rq% z$V0Z?StHnxkW{jc&hsH)Tgs;Snum($$c|d{8&e|np{5Mu-qDpkG-Pn%(Sqkm(rT+_ zas~JMI4S1?#uCl5c4_SVxyMb7I5q_R}(t?TQpl{)lv9_naGvn}>Q>G91D|ChddexSpYoq!5mC6gtxzS^f8H1K3D1k-L=wREKg&d)n&7E<5YXi{|Y183>0)5=D^*+OG? z-T))6=K2OdOdVi`k3E${HOiDR_#hug=I1IfZ}&4CY{|f?j@x8 zs?egj#6$z(3*P{qG}Z>jIRrlq2ut^j1muz3Ph9m>NNqLSDz!(&eMuq4jr)j>Bk&+z zHBDRDI7lQb5ex3Mbzy`6vD`+!DXKCf$u;t#J27N@?qZEolt!vvJ>hEfq24y`4(8To z!VMWyjhehfZd8xrIvdJg=CwXb5d@ZdlBV6N5j}<-d_~-lhHU`@*-~|l$$!`)xsok% zNz7WC{=uuRMG(GnnU@Yo-6CYRZ~16ux6v$9T@ z-H?c8!|8(CqyE;U(+lw>KVo(=BXio6jQQa$^mNAUhQyuoSSg#QC6nv+No<>jxx-{6RwJ@TuU$>*7z7T+QN5kOn46_J z%50UatBYVlqh2v@O)y{ud?ETYc43oN`j>rU74WVc6n19p2*YS7L`1I6bkK>~XB_6{ z;*1ck;R=$9l}pwSvyl|eY-hR2I6Lz1AhtlR<8wtMnPm{{LP|vagq<<_sD^L&WFnNg zaefGM*~W+ojp}b44Q^IYU>_ve3QjcfAhU|n>YD|uK3P5fI*_N7TxVMwsq}RN1}@z9 zEao@j2oI1oUK{x!prmeKoZe+ww5v(4FL}!5!tV0~CA#@fgOwHfL>DDmJ?gBromaDKU0-k8G|#24!gpJKrE>is6~a^&xa}~c#|6NPboDG5IHP;q9 zXw^Nm0jq1lWVkLVna^Z=3`mL=&dna5_w&3HL%R>3=u(-h%WOr;j+jqoKfvSi*2UWb zL|pBBHCF34=FSnaPbrpl8L;z^>Ufb~mL`)S~;3N8Fod{v!T|C=m#w2ajTOlioanB2cbfJCsPXD$F zInOt2nz0IyIgnjW57MX-lJO@d`)@>0-FFs|;(GSK#5MxmVg+Rbxtkx?$d`{7r-w zk|8_jOpUh~GD$)zo{!3&#y!Gj2ji9pwbNhp3sm_f_ikh<#(u0dhcUoT5!_BvMdgjb zR^5*)c@3#ELXlwA?lt`#1x~;K00uZgp2|fPE&uu;xY}4%%Rpqzrcs#n81nuf?ZElj z?LjK&fw4u&T`pjSVJw(dq)6NRpRF(rNN@*tb^Sln1s~A`V!29=xZz`NqwOOw-EcN? zrzFo^TeQZsEtj=2_s3o^Yy=1GYz7**VNNksel~~`{R2H4tgmSQVlT9)8D$09qrGRf zOoXE7c%oatW;sZwcwj^rT@3C?Q9DF!D8!VtM;EDV+jGc#cCafM$5nCRm9Ag6dMLGN zbK}>kHl=(M#48(f;WL6d`WpmS)Qi`7K*`v$Sw;}kb?AaxD-`+;i=KH6$WJJ9<^Afo z0-Y2{)0RCUuB;Vwgn`>$q3LHYP)ATxO!*+&xWw13nX}VtF#ZMW0r$%%Xv&_uf^q_W zz?ou;zQhv4Mz9$JFyR!2Qw50DDADrIPL1&{4dZOZW5Y%B>!130r|b^_bkAiH`lfi5 z!2oVlKS!%9(-8Dm%_jGE*smJBkJdxi!IQi5AE?FOB4{SwcyEf%RRUn;XS$eIPLXc| z2YYEeP8XfyS9yjL9zQ_#kM1)car3IGtxMe;ew27f)#FPtz3&g7p9UNzzf}$Uki2J% zW-vO(!q%VzP3#kA?8?eikNJ>O$B2>if{OiVr2$z1dikR&hAnm3EwHiodS$vBh`>7$60jD$h+NNqbv%i#1T}|h4-SbwrE?36U6v@r8()z1WE4Qb}AwgtPm*rg(1|_C> z|3w#3>cG*u#HBPXJ|pA}L z_!+e96_e#}IAD65?v93Wb}tDNx_GOQIe5<-8?Nf1H!x zD-nT)<^EBo7wUhO0johrD`^b-cT~b0{Rzb~2O(so5FnD}x|bFZ#)d zBwq1Qg^~vNW@x%u%`gLr&~l#8r-o3^oN8v^jbD1BxX`%KJrbl3%L1=`6ffYu;fq$C zABF&E9tFRM&=C^zMcHQk&2i?TO`v^F%V%aUqx->|AJKy zbGmccelx?N6Ie@H2pN+MZ5r9ctm(aM6T90eY1wf4RuM~J&0ck6j_)ICU((9F>C%{< zan@hC*ku{uyh*D}z8GuPiGsLOhsK8>!Zdg{a06BI@<1J_RZplp$Wym5tt^+~&1*>m zGZZmAE8saog|f|UqQ`%RYV0B}w;^da7>!CmU|JIrw5J>F>Ky8eTuI~aFZgR=p7F&dXS9p~Ktb_%0yD{#H`gp|kwU=^BnNpm$rpkjA9|{c8 zoBnb=ee%M%YxHqKicU_Nl>W^^ZpZ0V)f-YiE?2jzkR~C;hcF}GF18BB@Yyxqr!&NOH zLC}g8=&%+zCm_2Kd04<%-gcnegd3ww#xZ?(t((C~=ci3`i#R?4xoVAU8MsO3C@hWt z)7r;!#T4qgbhNvV209gBH~? zTikUo)Q{4e82_5} zT@;4km+@pT6zNAstVwnjL^}+;w@ElOUexWtOBsjv%$d4PeZVZ!lfMz3Vi=m~8h$k$ z{^6=pi{%DrsXU#M^4&i!SsKT4x1Y>qyZuz0d?b9Z0%fNWu6Q$ zo82%KvPij_vkL@G2b7tKxcg9NM6G%c4@{Xp&+112e;a?$5jGP;`xqSXokx18kF$#5 zEPmGK{*8-^qyVg|J3A;L``Z(I^Kw!?wUK!$lhDRiB0Nh5|AhEGlQS6zmV_E#S+p@! za7wn30_|i%60YWORw>4OlUw2>F^{C--ErXpdM(z?xOz*T4rcsEVf=4R6ciR_ zZc9}@NE89@kO0Wux3UeX_XR^Q!cD1hAduGATwnP&4>9NPd~k3Y;sj}R$sI&-C6I4% zfEDkwraS-oXtf20;Qom}ayOhp%SGuG+iKILDb8LT#BcIub3LZ?u&`3xcxa{=jM*rz zfMC@nxdl}a(YISEkr?W?NP!G<;pl0&! zM;lP>QWg%)Eq6;txVOO=sv*=*Xk!ET+cP%GpCk$}0%*){=Eyu?;hfs*t})kTC&PKW zB(YLAS}Vo;>!B*>unS)&-Sl%Gg8gx|iZF!|tSumKOA)p{R(t9wZX=ScWZpQ>`LwiH zy82sTXST;31CnW~Zkvxo-Y6G8GC#w+>|8fw%q9+{XFfbQTk3a0{9u#*!; zgea7YSyK|W~C!(rsemgtG0GXcAjO1EFFzP$C@&I)utkh z5bY>N51haN&n{AfpM5W>6>5=DI z>?V)?_6KZ9t-*Mz*Gwy2XkE_dJ%T<{I()alg@38L151YMo)eASk_v)IjfPC00Jv38 z^UbQ;?SIHY)Zxk79IVQ)2j45nU4&ry!wRQBLMq1aezFbn z&l}ZSvF<2a{8A_&mKglea1NImPa=!OfI^FIKI^TVg?ibrJ1MxC3yi9&1iG}B9Sqx! zd6RTq@m@g!&o<;({hZp7fOh({`@}=%6#XGo3*Tg^N9s2(I=|EmTk}irN_~ut2W{8_ z%1menGpRo>bg77vJ&$!TDGIt2grL~M|5F-@U6ct7pZmQP=G=z0|)%VH-^TD+Q6MPVv>NpyW!7o_YT{lK=lu8Y0; zyrchU`zGCyHlVpGX=d+AVf5WV2&F=)AUt^g=yaZH2%F=O_D`#~>#X}gEJ{Hc3wyB; zP)}djeKoPNI+2ONT&I+%PGGO4CyuOP77`6C4YU$&telp68zJI1k6F5r%C3}_yboWYym4lZiM){<>-47*YHb_N zHKh2&&aU)t000ZTL7MPMs6lL*ObGA0^uDYNg!ofWx=%5X;=7rLW#d*iqm}Yaa)?k6 z(YmBAX2EEJYnT-#N2>}5oqZduaBI1C_6b?X1K*y&l3K^GifzoMeRykod<&TLfDyyd zimgkfT+(vS8NRU93QE#`dQY4NQSUxygiusj%vHTAPC*#sEVi2ilgX3MF%}I#cf4Ns zq_|V^Jk$QRBq%npgea!BjhC}AnujoD+&J5QBc%U0eXYmzYY+ByLbI}z5aa&z+=+r^ zQxxUYBZH}CLa!V$t3#h=;n_gqqQbe~QnKo6mk-Myk|>jb3e6AGqI(}<2plhf#=Hp* zc#@rt$_dz4ndh!!)IXyvu__%1q#&@bi|hU8y@unW@|z+Vy)|-{@ZBvDifGZ$9hnpT z)Q8+XjrBiHV&Q>mHh@MYR}Euq>VXT0xf3{6hpToR?OvhCsO|`UZN4o#E}g#y0O#zGY{4U?N)7MrcO0R|4Ota_705J>Y|HwxH1a}HQ?ljIsILq5@krnLK%a8&zqmwbrQofWRm;Xa zZJjnwDDVoq28D`dO0mN^BDZt+;Q*D3Esq(zNRozI7Q<+t{ zi#W~MJHpa#VV75bXY3U{em%$h?IC2fsOgIuQltxDrOol}ayP%|a2Cm~c=gTXK(vb4ym~;P z9xE-)@35AY(MRSoJXahf0VE~sR0AQTVq<}~d`~xMmR!MSYE8k(;dp#|q@i`C!r<`5 zHIV9k^~o7uvm0))M(yDvmxf3sPyF;svhV&1uD(BUiK7D!?DKOmQ3dvZMFR;JH;8#H zv&2Woi>An=&{d~bkOkclXMtjLQFUX)>xo2FBn;#T>p|Pjwj1A`Vddq1_RU&XU)2>f z?*%dkF>01ez#Vf45L^SB;BoVdM*}P-RgU?yc<;O;`CFKuat`gh2dpmJ=`mS>|Ix*} zEHf?AXs`ZA>s3%Q_fs8ml_t$@4e?|!pOf;R#b5IDDJ|t*fU^fUd4O68>U96K$5kg{ zPZi6?JX5Vgw@$G8`Z|h5TmRDguYm-u1C%m z^QuM+6l<0wkkc834tVpsHq|cuZJch~z1EQUEq)lT66BV@s1f2a$V&|dftFLf@~OO0#5->t=8(SE?g5aK2?@4>sZJv23Q!Q zT1v0ie0_wVgd{9>r+)3WsCc#SbTXxUxxthN{H=Ao`&`KS1h4s{ij#x@U2A-EhQ5>9 zOX9iV_ls}Pvv&7YeeyQgR`5f66@pD4Le|eVos(THr$2dhoMBx=vmuFl+=ilu52H9eoJ$=@g4XpW8!HaX-`g}|7|;QBn{h|2%YZ^h>Q zp0z>tRWd3oNVs#6ANY7VeKdDY(FGTK-6vc?eB5XMv9KIgC)1Cr4cmu^UHziR`{Lyb zk7=uXfI+w2OokBSJtDjALWmft+y`=Q&5&XQFTFhHx6}2X50Rq9smI>aX{UwOvKo9E zGGW^*T$*xDeMxn7q=F_XynJsK2mORh={^g?At6;2Q$F70ku+<hg7mCA zWDVVp3rTMOu8T24UFeXcXY;~a0qfr4DT~Q#Tw;Re26n+~KGzWC#%z|eCAP<9t~PKW&xqW-1%^bq}&zKd5j$8PQ}CA`;m0CuejsfpW*e5B8kc}z_Y zCU|LlP$j_sKq_iiODat>aEi(FNNIh+Y7vHz8)p4Tk*!FRYj-N9s)4=LK=WSv}V2jPt5WE4+*mXbBOZvuW|)O? z7=PP{iXy%n##J%l=ftS+$`(Vckt~VD(j&dW)e60SO4LmC#JhQZSJClZ{;MWu z=6U%d2eTyP`xf98i)ddpwM264Jer`C<8pFc6_Q6R2V8eUs1e&c%SCP z32LYbV+b=Ga<{7#eilu%;*2>v9%g$%)m_7{GG&(~@ldP(@}UT?;0ucI6?Dyqd0IHM z8w>+dz$l~Y%!7d-J(&3s=+P_n#-@3b7G6?#C&I%rOf^euyPa6=BNVphZmDkcVB!0~ zh4clwXtz308HP#Z)_}}t5cFZhVR1i*fX6AvYBN@B9PZLVt6Y4zF75EO#mbU#ksIu~ zVC(;hXv$`}3$0j}fmw<9DNo5qS!yB`DoTmse`A{7`h{lur#!!DffRR@^PYOCc6j^J zdCFqUh{$(v+SX{VCJ6D6-1Ma^L zr0@!X{m<=dUrA~AzBTfVgibp!N9U@ZPJH>hMz94cfmad^78~gsLp}sjj3H{ALE$3i zeI-JrR9T*SBTdD@CR2FZ=0{|^Q@UR90bn8~I}vAoU4^oNx#~{#0)^RQf*@UzH0s7l zj1~#%eN(Hlr!I?GCnY=C(cIuH&v;}dzuqh+{P^H3Rgi^h|3^h%^ zvTd5f?{m-dpubIYs8sJWuLYPDUu-}@haNWon^Ub&bS`+#Wtj)NuzLPA6zRjMNsIXJ z5i{F|9BT};DfkG|r{UbruzeK-Mgt> z2&{O!+`IkxhD{{R3!(#Wk`(Zo2Kya=#)k*P?u-G_)?ueMNpIrO4ee~ztkWR~(A29b zxJDcDKSK$-^G5Dwv>QMjXD_vY3DBI0>uU6H{(mIY9aO}JAg4EW28GNXRf`c=jS&S2 zJx?Q@igb5S%1SB(wp)O`-EvU!cO0bzhyqxEms(DNM2REAu1=u9MdKLzDmAP6pWbo7 z000FwL7xsq6)acdQ(`Eqj`k4KRf1;q@1!gi)ikif+OJg>f5vRT_}K&zd%_7|6>`g) z6B?`-(Q(h6QHks^TT0S=vR|KT@Ox!FeO0ItKB#*OTquskkzMw7172ZnET6y0%G9a{^E zA#Jdn6Q~6n(q7wrm)ah;p_b-=)!DUlX~araHk3^7tj@Tx{_%GYKD^nH|I?rlB(QF( z3qh(JYLahijs9Z7vQXzcGO-Jeald)JV*VA%*E&M{wrFuLX}>Ue9A5hf_>l^F^_9L5 zE8n~QkZdbaQhxln6)9HNs1}veN8|(gzY^Z=^mBcZW3{0YaDvWhwBqa7#V&HYe*N|* zy40JWQ)m4@)-i)(*%+R!@Lz++0$7H`$pk`rbL6P=NV|b`L#)($<529sKf}x(7@FIm_ZT5 zWMmPJ@4K6yj^;Imm~vAv4J}}Y9yqVS8sUJ$F>qud;B4n%2v0e5es~ZvOgu3ZBw%f7 zZLtJFmTu5rxyvB!j+GF=H4z;MWuj9?bGl$p@it>WFla~hiZ&1R84LjHo6-u1$F~L5 zRr=UviYVf-yJ<$XEG|fzXZ<+GqZisc=Sbu>UH+ZSw{#BSq-&`X*ov(c&E!b_%Y^uV zH_F24=jGzDZly_}jOuUxG5v(Ma_B+p8UX@kFqfmG2(~*JzEp~5ThOQyC4KZ2JctFH z1{cf2KE<$_eY&BPQd|KWjsB3u+>}>b3Ae*>SCZ7-Lz~H*o3y#hJJn7xI<}Q<&&#tAWl`L$79fi&9|qMLu7SyZ}f{~AonK>-vtW=p+1Pe$*NFY_so;k#FB%XsmZ?!DticO%d@H7h zMs=!nlO4dBLD|Q4(gbUZpTOeYJr9Jz=t9Dr;wTrhn+n5BUgdvX7=~2JplioD9l7%% z$H)%qe$tx~{sOFrZs#)Nj)PtfMmqqlRjdp!){>#RlqlgwYSnQ29M`7t8(Q0=V#R!O zzl)M@gYAie2X=@l%}?-sFCMCN`~NU%$0ul{8Gbh4@ry|#vgGJ?lJt@3co6qF%+WDM zfgj6-5?Xo2U|`b)(Iz~Uq;q?0=JuCw;+y2)49l+KhGlk+jla@$ZOn>tm)l*UZcY@!h3N_;+Ob(GbEZEZ?1D7Qa5m~w#nSkkzqL|{0DUYXM!t+@H*GuVx(Yp`WobuzUJeol3 zkvVU!G0JYvMBU8*5=d;(Wm1$43qWu0Z`^LW*Y8GF7I~F>fB*mjN&%lObWQ*2=(E1q z^QGPM@NWxGxM^NP32l#}809A&Fl|$~+6}%+d!R{&Dr>Nc-%A0CT zBS8+nXQ%Ws)iY@3ur2x#J>PeOjSw!shI9@BRi(_Su|rqrNzLGu)VUn!)-B-X307O1 z@yZW#;H)Cv!|2kq!sef|ZT$ScQ9>$beq_w&;sHLgZ?mtq?q#EjEnx#6 z@)CiMQWgF=!f(tC`}us27_WV@|BnAQXqu#C_^t%Y3-#;Hep_}NY7U%VsaYtT_HBd; z2t!AUJE#dZh5EVQuLi82!4MADkUwA)T$xC@H2FBfb?=pVdq|--APLrt7k<-5P88B?>hMAIN}Rvr9Dk?ry_a5nVUiIe zf(azEO3ad^+Wt?##Okt{*;$F`Bxnafdu+H1GH|k>&+!f1>PgvKfBPNXdP3({q>i~a zYvf=600LeCpD$`cAMM@l-qu>folc!iOOlE6;$RQ2+s+*K)c&@vJgpWX-(gO5j^o1V?&q$&p`Fa;>?yc9A`Y_(nC9ijaXkk(oFCCmHWz&m+%N zr&EER#%DN>vSYEN*#JZr$KuoIJzIAI5i^DdOC-F9 zEpP0Q>=*!jVn5Eb2UfI&-HxAYq%_VRD-W}DR8GUN2)Q|z4EGITD;izWj3i~K3NsOq z|44?+7qx>TRjmv9fue*gxhc&Rh5w4tJEoEz9I25R2RDj9iBHzMC4;fzjC8%==7CT| z#m~%Y^+6Z)bvRH6Cn0;Y1az7)qNwdIyA}AhuY?BL+a{;3~lX?-|$x(zPFF)z3eF{?b@h~|g zz-sq)DsQ0|tUWZzQ6$x47JGzfz|BtEHF0rM3C)}-xPK-sVS{|u@%bV(@%Q1x^A&?q zG=M{2w2wCZu)psKl)Wm1Jm`qdxx9UoO@yQ@vHpbZW2Q$K?Y3_`W}v|HnG0iv)LW0=9R{)lAnfq(hU*)jjg0`oIxXPv{9zF*yj;lV5kz)#f1 zLLNZbm*)ojDC5|kGuUH}QrTt5C>~Xi4~eXC>0tN=>ZFidRORGx*Ectu`#96t|Jr2U zj&%MrG*sP1rU?v(q~89#cwXwC&Ugj3Zx5pBV0H=SPh#C0nLx<$PO=zhdW-5> zF&Y(dyk0ZT%%NoBJVugk{em_%OVLDrzRYz zj<8dgh0Ni-Ga*VpQLOl!D{6?plB3JH=nPFiHLN@T)OS5Ri0ldd{W7N!=uJGkgr@gk z@Y-M)%T!=It$zFW{^vdjD>aK-1#v>$6qGF+1=_C&E5KLns zTS3m>d!bj_OQFZF`P4VO%utP2fCck|sF@SLAKze0+Bz;iY!#j0qJyut`R?|WPWc3j zXKeI~99E|V9xjM#ieLn~blV+@fCe6wko(wRKa;8^5O`@L?kVV(Qcz?p`QXt+GHK-% zpSK0lkU!Q%V|CqK4V}}9MuCott=R4aEdOFJ8vI#A;vNhF)eQ}Ts!}SPd1@reJvW*l zt8fkaLUc(wBuo1 z^@2QAE}EvkQiO8CL(sU564M#=O%~Nsz`gVJOEk4sY%_z5Yl_!<*x)AOv-=W*3FeLL z{IVn)KZ{Ng-9O`=bOb=zK5w}g$FkH4w`gIQqt~+h#y-BgHb@}U z#6e5mIsTwc!k?Bx zC*kHuJeh{ck05-F(JoCB($@LCp9^N7AH-%)=S$|2(kjWxtCFy#0(yVyfs*^aBbFrj z_Mj_1g>m#V<-&FDAWBKIgSQ0+|uIzp`_Q zCAZydMG55a77&GBT5)^`Q_T8Uh5guGy~TEEYQSC;rO$o`#uDo-6;^cyK5Hb&DCbe- zS0*E@V}<6IJAehrR?58#WZpEbnfktdBFGtZD{`p_x?RdBD#O%i?wV@~Z4?`;iuN}Ha<>J74(_Dlfm+{HuS`VHB-QB7V^|RE!Kf#h*A=lg_U&*+yeWekN{T@He_$f3M zWc88AMJjjUp-cw~&FLUi0hH)yg(tmez(MA}Q41tqnxuGd)LvXD$|xW;ENB*JZcxc8 zF}4A}CxSkLL6hXmkFw&d?GvpJ> zZK?Y^Y<^9N1w+I=Bim())HB7Y8Gt!YO7K25FS(uq6HLAQs};}T8Hd`V!=NpDvuZPQ zy5;KHV|Ar%Jx~1fw@jaQJ}z7K^0QSY?g(Nxk)VSJlqq0nlQH1`^+s$|L2HS=(@(Ee zMR`PoCx2LRCM0y$-IOHsE@(`x9QgVvece<{kl;lntA0gZjDhho)Tl*?F<@bfJOykh zc-{jmgRuT=+%bk8Csj7J(S8$YhB)NAvZ*N~v2&$pdJ6SV7i;bWkETWen(B!bt>}DqDhMg{a)ik7K;ChzQbwsJMU#+fFzK7i=XDG0B`WvsN*n zlnaGtK(ZSR5#(j|{Ugmj4trYlgs&@K%ZJ@GR2`zQVXcs-hmvg&8zgPPa~QXP%H7Tf zH7Y>M`k0`U+HGKdlhwxuN&KDN`>_Flzx0B!zlBO5saao9VJXMHPXRjqD_r$)ULjPa zVPC__X>#-6?ycr4MxF?b6Im5GOQ3S({QSP2Xw&d1_sBfthiyI+G!AyI6imMAy}9suP}gq@5#j?r2W8f`hbtP=?R%xLJRmzukz8kR$nE4r{;Gu85uFKL z7dlcBj0l5e!?lfT)#2D<)%to$M<&5+0dE9VNd}%`%!M`PgKI^<6PaKSU&b!L%uM;7 zo`lez!cZm^o53P96gFP9aVCJ~%4k2Uwmz0PHP?v7T7))FF0++7E#gV0_2%;AFY%nm zGhICT!>(=vW92=@y9R~0WZ}-#$c6zk&yfBCK;~|g41PNfE!gG+ zu-oI9Z)X!FT3BE(ncr=!YPpBpZwd z%-~NTcAG>^?&>MCOPtNgITo*EAyDih-o!YeH>hlhf(IMQFG)D3Xwo9|yVFdqBq2b~ zCn0gycmR#L2Y_P&o+#hx{p6cTyDBk+x?VGxC45aKNe9X>29TTJ(m6Sa^bm_p*>$Qj8LgW_kBQdHZ%C9P z%G?lPNw%7oWW7l^yCI50;a&~PZNsvJT z@u2`Ln?HCMl-|5#WY0)b8`_@VvV7Wu^`D4)pb{!gOqUL)xs8KYP4j=$x(i5d)G-ILh1^1Q3vC! z7!>;B3r^tYoydQ|Bd;1Ah*cmvzs9>%DV)fC0mSr0CZTeMWR}(R98k{d3~`U%Dlf76 z)#ilcAUEul_*1%;!0I|qr{XT~lV>k6^{$gjY_V{QZu6Q{gc&4AtbwfO<4lA(a&XFs zg-9%=?FX8Wj=juu?*#lS!<8s&KeAnV5y(nA$}r(y9%YpSVR5()8QP5!4|5I75~*?x zFCNG`R+wS66XL$Q;;%K)8FwnzmiZ^yQ}e57&B*j+=S)bpUQbjmt_PRuVfvrQo|&i> zyU=FzkWw&*-)k;DSa&-QlHNBoaYf`#J;!mFIxhT5VX&j<*>2 z$oCiapC+vc^ zR`;mzA08>!y?Mp#E6>^(pg;BZR$!*gmy-DoNYi_3P?xo{THafF2J_FD;K<0ekvxso z^U9;(g^+noTM~;ETYAis&~A1qL(i#NnGtY}@%p~efLfrdFi4MRzjqqLxdMbZEicq# zUls;Mq`ict&C_X#7rsbEM$09prDV?$luscg1ID-Nc*T0~z_HZhixlJ;Wnd1gT$7s? zOfnE2{k#&ifwx3MXv}@rDAnXpE!8+B=otdy#yO#A@azdhY(wfkV}pMNy1#jF)rXME>Hwj zT`c?PGV;^|J!EiIbcDRahPrMPFuUf`C%!F_A+&GlC+g@bNw@ zKj;ZwC3>cb>+zNd4z+0DYOiR7lkDyuumA_TfRq1mB<(+$dT%s*a@n0)jx$Mi(bUfQ zc{c$wi&j7?3*!TVCXX z`J&ThAL3YL%d!swy1Y)*E(TNg{DSn*mdlyRgsU=(5932veS0rve9@hu^>qn9a$r}o zw%TMa$kC3BlX)Z_m!((hPmMr%TqznHs@k@{SP*%mhsLxz=^SM=e+vyym*;%=nv_5s>Is@Qa||i5;||dD^OmW|N5kMh$!MM=ygM<^IEDhK**074uG6{KW@;tu$mC?jR$b}DuuzX+gZ)dW+nOOP4yBw>Hwm?hyVZrdI6ttbV6V3 zkgOc%JGy(DkQ6fZTbNYxoQ@!*uhilNo88T|S2jdS9ZcwM_&$fN1kl^Unv|z5;QX=q zR?>eXNE}pVFJ5_r<+FgLRLUdPLDJ0m8n{&n#P|9IsVmd8u3Q-~F*yMXFogz@M@l?< zk*oZPihL8DDcJE4566O>$G&Wqx2?IoRhBn${Ni|gpv&JmUEb zHvIbZsnbsTAQbolOb8k!zndqM_FG(e`D8zUD;c!tgKnNQ+2-kPR%g5AnCCZJQC!rS-!7fa9#1_;tzvJF)AJ z2K3$tNHdbe$9fL5=|IF?Bec2GgRAMPm4C9de@m^85C)}=E961<(9STCp(`7@ z5FA_z9GLwHwCwf{e*D>psQ^HCIv}xir#&x9esTSlbVAZ;KvcGxJOhJD&Vy$iS!ew7 zXt|=VR8s}qsX1kmuf1W`=vf*0k9f&Hh}1 zXINgez)XY1eZ}>~ z0T(fV0n zG0PWTKyJw9DjfU3ezpR$N=#ztd`RQmSs6MTxWdxGDnM?I>}6l!)aJwe*5akZ83L-D z9}+3`jURI9!laZG0KPLVbmeylEGenp&kd`Pmh0S|L-X=uUGitBguSQ(yB^U4{eZBK zkPZGY)VHEmCWoHt59c_cBu#!|c;0LV4qP)LF^w22oYbk3Df9nY6npOm$avV?GD56) zc}YN6Dtq@u94Q|l5~~?JbPZnfN9{H#^13QfC?a%q3<=yv3jAV2G?9tyU?Bkvx^wfC zInUI981DYe2ZZ#85QukdAtJM-zI=hlU&h&u}XTbJErRYAdAsy8^&GrE5<#zl@Z~weQlR7=+B3dT(1ogQe$wJktWs)IKWE zKaG_dGbE0huTtP4ExoRzB9$XSs^gUUFi&CjtL-ScxQL=p%zcs7*(^0Dd@p!~EgQ9f z*L5voW*ryIPkvFxZkgGqWsg~US4o_-@)gO)d~OkKc{EMYi^`)L<_Jh=usTl!fxYBchyNN}C>DVOsLhh{+bXk? z)|iLaOrD9WFcD&HkKXtc1(y<^_&=Rx(q?cTfsI*-V*nQS^CC!^248BwO4p!sve52? za@xhk=|e@c9BaE3U``D}2AAWdFIyvd-i_zv^NNzv`(28BbQO2j{KARuD3nm!11; zzPD+UA$$|dTtnj@nNEiJhrb>LNJi0(K?I6G{6C2@UspZLambf^dg_lj8cFN;vU;RY zbXH>PI4bWUi(ZUwn-BIP)~T_lA?@Xo3qJ)%8o7{D?Cxhc_!UHlw^1Wa9Bmzyy}I#I z=wjb6ZnAv6skc9EuO2B!5}N9zBVv_8ePUQ&3Z$o)m!~OvILBH8KdG}07rO5;4v0~t zz%ZeG+UTe-;D|^J8`?>P`BRF23J#_uPr4b>{~p@#JsnzO2!mpLCG1Es-~$wUtD^Yn zc?NE>7dm1b9_-IiCP^%|PU}S+vo+s?vU95Rafi=<;1YHfe_{;)yD(kbi?f&q7jcX#ygx z5cBrDWaq}5IpBzc?s8k6&8CG%VWu-Qf=mKr&vQPQMgHhfU(-lSD!!Bus=t$W@liV$I{_8a1bF2M7=FpFFQo zpk6UF;coqUqePbzEA$qWKjPJu{115P_er>aO7L4Q(@b;SBe7RkSJ?dS%;0+DKluBF zr5J%7fbcR^A4BQGLzl(N(KDy^@4l5%R(6;56n|G&hdN zzM`dbZy1w8LpUarln)%#INZ7gG8O3$2+)l?=Vs(?FSY(1BZdipb0@_~gf!|qoiln} z)2t9g(yhD2>E@5S6>c+&ail+5c$1{{$A`g#otfcp*3&}K<&gSBX#C8 znha5_u`AlbBMM=<-9?O*L^Dh~*Q6x#V!fDPDlA5|7BBLv{z?*>*qR+!LM4NIsn&=U z8`gI;>Sf{>uSRA5FaznlKM zAMIzWvW}bcA_Cs&p$8rtw3;;USP7KoYfDB$B|t~9MA!%1*v-D{`*b3p!4KhB6N*J_+W=}nmA^f^fhW)+0>oXR*_fVKP-D$%tz&ErhM3i&i<63P#Wa!Y2@8A?WWm~StNdxJHhg}?ZlvV*odb#9d_32 z+8N;%WzR*z=6?63jfDmBeYAV>B%ruPd92R|)uAc7fEjjkoiQCRJfe}We6ydQwBiQ9 z9+1RMKOIU8Cx5}{MoDY-e1h2cT(+=7;gomPjZ)yu6P$4IUhP(D+ncEbNT^Vf zT*-~lKiRI<&FVo6R=V6*X3&iJi3Mj!Vk&8wtn6clV;hc_&U9hKb+AuAk(!n!+oTyf z<)VLm8PGEz4SS`@7{%lmFu%m%(^q3c6(sa;wjyd?UL>aSY#hPi+7jd=H72s&RPa^w zvP@Gyx17v*i30e{)dXUclDRt6u*BCRoEHt+78aoc9MYcYWl0{@eTg4Z2|bnaNvT0EzY89y~{UmWW71XbRMp#e;iBYpuo*78`u63J$s9dhUmGoWp-yL{)BqNV_`PkJ8gg-6Wt?fe+X zO{`;~&_D|SFrhV%QvBd2nS3GR<@BvHPkaCprhMI;>uksHk1hQQ8()yOOVnR*$P;EE zrySH21)t(9;+|k-xRT~j+*9F&4iPHzFs%_|+M2w5>Qa3WS0z+u*WZGOS9nqHIlb9p zztLlfPgz)kwjY%6exVFHp$uP4C|mF&WXsyEXZ~La{EWd-)B1QyFaoYPWur)Ps1#a{ z_ZCL&MKeV6LQ)*ATj~&*-VM+wHJy0$DpMnGfAye9mc?1STKl!0k-VP4(}zZHk-_LB z=P3@7XmN9M?G`mNhft6Y;s02>tBJk%=S0}lw=)>b2)hzR;1P!%Yta9(h(vhRKP8&i z)A0N-!OsXuMJDLP5`z$SOYzl2PK-cTyiviiS+uPlO)4ab*oFJ`i1(Rq;9HpiQ&W59 zLbb}?UV|bIJCTwZZwk#&igIdPR}XlVuDOfz4uQKCL__KuS1%pOpSCrzxqFsM)|s#^ z#yt|ZQm?hzM-u}xJ&>)owA00JkrGOM6Hra*5`{=*O}RM8p_f^@*af!$yq08aXh^x@ z&*{R^QT%DpOArI(=n z*%E<&8IV0rCamp{SO+Ld3-<8#t=aN3&&4FwuAz@Q+p6OJ!dk3I`R||6#HQW#SOSu& z+VJL=iZRjzo1EvBqtBAtuTTBBSmG3G5=*pnL#)_}YRYLvuGP6yVaWA$w+STczaBV} zk8uA)86>i06x%EnG8-5i0WW>42 zB!V)xjfIKxTpz{2*d>jVi}xa%Vx(aFzZZ2R5XzpgLy_2&f(iliu+$)rn|epP=Z#Hj z8e{cm;AH1&p$!;kL_$4pS-gqD!|*9k{z+`+Jk10P=$V(?`HDMKKCsAlz+D#}^gBpv zAj7}Jm*om2fS1L`f$Fxtee|13mo zJnw}Q^--4WBNNSQ=0uQt@f%ER7Yd@iK_s6WYa`V5bwr6MfdrGF;g11kh_U%^WpO$y z#WknihhasaOBSV0=e!k9SY&>idhgmH_Odp_nI=UxomT8=I$W7j!XvJ%WbI;Zz0?_V z^2(;UN)}uPy%m>A1`$n`3@|XS!IzZ2%xjd>zHj8oyjo@;r?~-3$BO(#9Q9*=Pb3wW zX{~TOBHME>R}Ft=SdTzQIH~sp>BK%KDpdCPNEa|Qg>D7>Fi)@(e*7Di18am*{tc^xBjI zlyCX=D)sNr8u^;uMKBa3G#Z|n1R2o|RYjp293X8@)UZTKd*TK9*I7UjcE~gi&<9)j zHB3`#hAR{zjrvoA zFX}5`Ds9t$$4ekPg*VB=78_cHmkqs861?M3%LRaJ?8Sm!U01bE9BlEd2&|3o1D zD!mq2ACBwVQH^f6c%B)u8$De@Tku6SKJV<<=7K#fNO_;dO%1d#9?||wk`~BW0;4W?7%j!+jZd4RulJdBv?) z7S74PIAPnT-~;E0;d7%;)B*|ux7QVP?f{m-To6_*iTt!>urgnAyq6ePDvO{oQSVu~ z^0d$#5e&c}6Q>^zWWgxwf0^6poHp%sL9fBx7AmZ>(YM%fU~6fFXFz38jTheji_uMZ zLtBapg9z?l?vu^5*I!qeW8T|h>oO?|qXQ!&yqD_TD^u|F7LY~R`mA_1QaX+VDQRP+ z;Cy)V>BW^XMfEf3jVNC9Y!QhQ1C#xQg1BaN-K%X_8=pOV6!&j`=p*Z73d#3lmzq5i zmrnm?`23!t%^`h16UcS2$=DAO*jJ$Cq8Rik!w(<~aN zt*^6@`cTkPM?Y62$X+)qzuNOc+cA~10{A=y6SiFQozFnRqXO`^HKPoX9}kdFE%HM3 zC-7%$_b^HSIF74IVju_y^l|uXktpGR=r;t%imMgU>o&wB?T&jK!|jl#(D#o_dg_CD z+PF5D%~_N(2aFcrHdYkv&!>dX!Sz$XgJoHOk-8=!Rg~yiS(Q2uy3IPT*BFdRHIbld z-W?UNVf|}iGb12tXGs zmSk{(+;^aW#Rfh^^2P(bK|QhQ@*JP+Hh5Nu1Lww$On^j`2@AJKaLi|YxRmp{!!pej zngNR;!|P=X}CXvV|z3vkrQCZuAlaCjbBgJVBq8MHMV1|MonwJDoX? zk`6=S9ib08%A&ibr|rxuFVOWV$4UPkEL=`@k?%T1ny8za_sHddtYiM~ z;*^wcOvqN09>bH1pek*%vOgPVvufyM1K?*Pm#A7vX!|QEoCH)d!F1|Z9x^DYFhu@V zzdbn?)dvSZoW&GnZ}gk3x(gRau%GBe`ukK)mlK3=T$*v;zA{q{F{%TPTg#3%WhJ_C zQveIiqH0cjAmyiWATDS2zkzu+!j&aiOaK)^=xygybhIl0(_))u=+rs$Amr3mwbJEH zB37xac~p<)dxax}|4@{R&}_A4!{NOUqdi=l^&}y2L2cWZA-zF?Yqj!}S#UL}#r3TW$GqWi!q*s!#GgE;pi_f7K@3ZJA zm#Qh7DLtd%4G6M&$-ikC6x&N5?Wq4I|q#*zc`vuy{ZauK@7>OKfJ@DO=*@ zyC_e=SmY#~>r>oZHP+qlCcaG5Y!%Bog(Whq!VUqPI7r~Es>V+W6iN$W3{Eg!_5bYA z#Ei{7^PR0>{EnjY1Ta_PTLZ`uqgst3zXkLRz0A`hzyJUQ-2tDrYC>P_V3Qv&T_@Q- zlPp;fc%d(%H8+<6$@|mY$0?G(mvp|_>MVL<4Gk8P8`}Ch0CjCjPz6-0JEUnX4FCFT zOP292lL!^ug&zT>;6P(3kKP*B&q^0TkMTqf7Xas-CBbJVhbEOVOu(VznqSdM!XqzN zfU9Fa=uR&A_>ek8jiH6*Ux#A3`7NBIE<~zAXbg!jKQwe^#Q9{XU{c^Q}yw(vu zlK1CuRtLtndN6&)>O^+>S0cUip5gTHIt!$x&a*!Ir7X2aF+nEV zPu&X|D8H8!<~&1@ac8rGnW?VXURA416QRz7xn&o&12vn@G`?~hxyQN#_4s3v*>x8h zNzq(s&0Am#;tX2AJYR%h2(9-5)4hj>Yu4mOoWM#n-;fP6%rEV3in;L8W> z!BT{A?_JfgrQOSmGmKPhc$-a@!hbM9m%`~1RR3})>+4Bmy1F(4MPH0dBz{3zL8*01 z9;_J?c~`V8%(W;rbUhg(0lz(%LYIG z3C%G7JISkyz{li3eLxj^2HiLb@tOzLD<3n|O9Yi|8qmeR7a|^!;fV5s{YA{~=T|gP z_oios5>h`2v!siKEchg!v#Ft}DuwzU-R|VsTkM2mCe>R!oi53i$@L#oLE|{%){nmu zTrR9W7n^D|J1MW8r({biy}F_r8NXOUC3JkseU?i#S~3<2d>tc+PuzM3rX4vo`dw#3 znNnj4W%9>32O1|lcS-gS>OZ?0fAnV8VrGQM{Fh1Bjh`$84-V5+^7y7Zi|B&j#Nc|K zK(f6*pe$1irK&zm@%Ci7W^U#m%L`L7V(6*R>=01`)H`#&B4OaYgqkw(A-pn|} z$mLwfFkx;7k1~J!b9hIG4UY*-hsqQ_-3?2Gr_UUCf1%IJ|-+U0` zE*ui4Y_yu*d)vWmU(Y?als-?c)t%3dCp0G>@U=C>&ojJ9Mx=eu1#RV2535+1cN5mm z%K(|Xwc}r0i|a^#1gjSb_!I8M=MFNeh&V*q3rD1_I9JcMpy9t$$Uj!lhQBX*8twks zjVo2_x5?lhlA^?0Pfb+1SABg_$=8q)el`8bhSvw zD{P2$My4rO?48pI5ik|&Z1pWZUtDRypa+|}`ui23{jNd&Q!$O2r{G>XWX6bp)i0gS743E+Qk*x70X-q@##Tx zs7|sEgIpmvCb|Pu+nC1|$T3Y-<5b0bg3(Id`@%pqJMZiUU&YC;4ZflWwO7oD+gC0? zfbbGP39n00V;%3$_;^F8#N0qZX!$cdH7ir_qw_}2GQ!`pKm6=HQL2+GvQT3<&A%%u zxW+WdDM}UghZ6!SUo~!^RrNnU*Jl(be%(4crYiD$wkPrhyDzY&Z>+wpNt;cy;=-|^ zlS%Dld_h(_Kw{VDb!UX$sAdJQb;$*ZZ9FRl9ZVA&P4ZX#_!~*kEXMVE=q1%VVkGA^ zt?7Xt@QR?ROdlJ`E~Y1(;&ii6AlvQ|0r?up7yIq=zGYbv^N#cA=@A4*hM5#N$xRc% z);-Aeb0Ys#kgWGr+}^M=+fR3S z3$+}!|AK8|>Zi)2jj#_6h~2u)1NN=6v>1HT^eAfik9_}|GtCKfJb zm?H36axC!f#!JNNq+qJliH(4jTVfeYoBZ8au>XEJMnk(WsQ}RLjA?C?9d$U1=fZz) zeXf`K`Am{O4s=bt7legN-C@B1JAMU0(dCuTd54fzdX{Nr)<3hq8%Qil^&f zYrgfB@HU`P{n8&8n&h8rpy$ROG!WLRu^)DIs&wO(n#EJ`TJC^2HkZ3C#G6#>q|inb z!~{jPRdM7GxkpW81B-|(q2fHGFDO5}*d%KaZi^nQdPY+7TPZ$AFG^koi^1vr9POf5 z+&ze4bqyH3)s_%_T1C&f)V=C2b>VJQdNRFC2aW>0mGm@#%hy~~+Un}Jf{^MuXaHt@ za^5@V3tg{aZXr6}3+DgUCbAYwfr9;*WY)t5fE_aYF>Olvzbl*1^NY$eW+q|gb-5P0 zOTeD9fYQAvjayCH|DK?I_b9=AZPt*?y*f;!_i5}juo)qu=P)g6Yf1-W7TEpaDwo*1 z6C@2y04#NiGMLOnx>Ix3`&m-y>8iPWib&-h#ZI~}nT${e6iEcCa~`Cv?xrWzIe0uE z7G_FmQc;bj6yl`!&nUX;RQ&S^n;O%XZAd8+nl=B{W61IqqM<#Kw>j|H_ml$4jY(;W zrFNm`njdq=6FR=yB?I(Fb;X=qXeF+fGYTqM-d{Bo1N3W})!cE)DuE_gE7r4kk;&86 z@HT8ED_-hwuNj?qt%$PXfM%4zD=Z;Snz)Ae7AFV9kmWozEz>-!rV;0f)|lFMV7qCS zzZjcq$4;Y`9cTV<-%pTdN<={1{doB)5iL!#PAWGo-y=GDnSXsq zP+y?G zQ2hA4P%8#1OC}#Y7=cY?Q|fIl|73U~%xs z#{~Pgz%5}|pCQBo|3MaNo1u4<8GpkqkD&P-PTomw#d@q*uqy$Q9_&bod-Ddn!4I38 z2$ep$4I!oZIRsrf`>)az|Fe=is5ElKtw7n1|4ANnC=;Cw0lCZb#xqMldJ-6 zN{jSD)Mu$BR9|GABbQs*C*HQaUa=e=KDor$+K;}oHYJrfbwSjH^hF{^oRW8YTq5^h zpyXrp@hqcc{lBI+tB?&93GbjX8-(P??)0J{jxDklT_Q^*OBVs>!a1||#v~pt4#=eE z4tip7pEEJv|8$#IU;OiSje9)9I%LfdH-ce5nDOUQ0Wq92qPP%bd~khM29$_r7t&Ff zPFhEpa7B-)$-8QE4UMQv$o#U;$|WNKBPNu`QmbLAj+hd|VE_OEfdQY`YC>Q1p=D>I76RBTa8|k;_XKer}rz%O%KEoRJp*wFW(FGMye}SF9e5?TAgW>kKxe z$^6dT{KpZ3?3r*$<9p)Rpc1|5%aG}qQ-GX=6&fzjKw47NO>P)$-!!zKrke%}aE(qj zzekg=Kidl;#%VlGe`Gucd6@29f|lN!SO&T#X?R=1KMAql1sX)LddCe5|2Al2wMj+j zJPWjk$(j8oSdoI;nMsd91d68TYJ3Krm|Ui2HU}d6qY)3#p*IQunf>}_!JHBMs^=rx)MefJoupim~z^IflVy4kC)-#Q)7K3obSGeJa~xC4y7E)Pgq? zmYJtvSoV-wxU>_l-G3j)_3p|VGe$n%4AeTzcTb(t64%Lfhf@yg+-TtBJO=ytHtq{ zv3dWt(42seFQMR58?HGGqyjw5*P<6kj;q2>W4T>X%5syjAHWfFk$#E0?9nd=j_2^R z#ULDdcVq7|wKwi0ktd<;x{eW=QR8hyl65j-EmzfD&3c9(sVF4n0!Ls--MV-DKio6F zFA@JYlJEck2L?f#+DYLGQe`k9FUQ@W$qOv+-+++Z!Mp&(sUl>u$2H*;#7yC?y{G=t zilxu*)y%uo{l*N3f+IybwYlOd6W|&8G_seup|DS(u?iyIFL$RC2%f5SL2?ZP5{R7% zRKnGNg*>%JZ;SkeIK!}`$b$0y#p>X}VOxzk94&YjIj56C(0%53bQPr;#9I@2IrU*< zIuC(z5VVh4pLlL??amk%)Mt^&tvyBgF)1fd&B~UrB<5=AS2qppsYc2L0(#fcqIep= zi?=DGY$EF9VWdV^=QMoF;e-jyF6A>kn3fM-s#ds#pyjeEgoTFi7pAakt6fuT<<#BC z+#a%U3768Y>1hC2l)QiaWGBWnhLbr@UD{s+XgoRFI%nXG9%(a6g6fFZY01 zTmq5ZZzm?Zx>XM(EP*Ux(od4F$(e{O(wbtsJA)8!R0KalhghDqeHg4yayxfRZ$$uP1uQ`h{f(Au> z60kkrxC-Ria0AO~%D@XDt#DWj9FOqJeh~OIbw>Nmhj;o;B5DfLH5v1p+R7#zFo7fc zEPmzB9Hta(oAfVcW;m)H*%R`t)Ez879-<}ir@tvV-&QU0E$%PvgNEj z*(5A)-|}!0{ZS1W(&#Xmfwvs5wfTQ>qTFE@WIaXPvCU193|%dXeI@RXmX^UR4QKif z(v$z3mmbyVu+DU*P=0AAsbw#tcJbvFAsBp-b&xaau^%GcB6j*Xk$o%IMq z6woH=vZJIxv@1r@2C@t!etTqSBrx{7$aGCHpbstY@TIFzEd!K7VX40zO*}VN`KTKy z&A>sWn3>&{j7P=}&pbcJMMapy5wtq9$a24sxaGY^W%m;%&&cMxRWqwX>prqH z2K=ZteUU4pGeU0xc&0AJ5EgxAse> zfG-PYV5G?YDQ|}VTC1OF(u(joFJBY3>~dK3TA2Cd{DYF;4H*eE3+Pfa!0sjgIbv9E z41<^D84P^EQ1|}mA7plOFc0jBS2#F0Vy;TP9-o|mr)5<~I-7MD`PsnbSk8ip?(`$v zkUXfgp0aCj{r@FgqyX`PNJC5qusFfIE@2*Y!Gk9^G8x&KT>s<}RuajRm)!Mm4vI}5 zI;GmY;kjZ9waB0bvA-*4=Drr@ShEHrU0$oHRXg($)(M*h3kIjm8N_5rs@{X2k0+<^ zPNBDv3DR<%Tr9k4rws5Wr#!}=%}8EQ_{0q*KP;X?B?al(Bo%2In5qri=^xe+m4dKU z&r{9>_g>qTd-~7L#>5Ph1b{!nT&Oo*lA!ssl6=+i1kFbEESLrZ1CnhxjC}%o@o*wT zw4~~Jt%mGNY~p3)%e@YC&sA^X;afx-!+jY+;XD{oDv+FeKAA;t(Eic5|SELO>U*$c3>e+bGCL^@S6W}V)w_#S!zD5Y#C000D9L7V$Y;SPzE zz>NQ?8WV$jgD6gQ-)^mnXmiazk_`2X!XNw7TG9BP49S5-0DYl#*nnRoAt-rsXN$Sh*d+;2ZZLp%1*U_99su}HX&CBC!=s3q zFDU?4`ClaeRuvp$+F?+Xi!Un2D2J}2ld4Ipu~L8-#x#(SDwKF*Rh}C=CM*8I?2x@s zxE0c}!Ht#+ufsTFHLTgby%T!#)}qP=ZC+I$ z75rhuId8Wf+=QlkKWNVC_Y}d0e$_Ph;!V>G=cDoLn&$XMJulO9xt#~IyUzNyPS+~% zq(pmxF5c&6Bnx@+chRv@SSQm$bq4mzHmu+S`KFp(SDUoQbG{6LpgKRi3)mZv{j62J zX$lPE=NDSX{H=@9q1Aqs5q6vq;QxpCs4UjJ(KP3H#Qt_CFDA$@66uG!?yaG{|BDMO zw=!z9!40acsV$<;g!uDSmXR$Q_;e{8fiOIHWL!w-Yf!l|0yA#!Uom`hI*Az~NA7&P}cz@N+6hEpT3*A}mOaY#7 zR)a+~MsFhd>*TDo)hDHU5-^YBu5Ek!F=mlRV6!nHC2ui?)!F@B>!`SK`M_=uVqV_e zS;^&XQ|AIIUrjl^`kLm}O_Edi=ku|+Wz6|=uoF2cKS>VXD&~Vv)-Jgi8R&gwP@gL` zOUC|#e8k~i?S-6c+u#MZ$-r0}u0Bp_gYM&8dx#{srurD`G?!%~n%YHicTW$4`n$=D zVW5v7HC3p~o@fg&wS3=S2~t;wK*2Pje-}~gz_y-0U^(b}b48RS ziY%F=W&hfxP*HqQju`8%7`}TAkPiVx>`?tf06$HQQW|Y1Q0lg*Te0j+7;RrC8IhJJ zz`v6(`*6-e5V$YY>&?jAqau=u|8D0KP`Z#tOFzBB#vI6FYXiitJgDSHCDU>+*!Q~h zuHYU5O4Z$R&4YWp4A%ev23kRy97*91CQ}0cY+8e7EfP73Q?|@hT!ks8@fdX3+)WBA zk;>jj__m+F{_XV5t^nS-DGZjzL=4%H_h`%r_QZNZGUFLZ$6CHwCzQrfO!+C_3pt}Q z_cyj3CGN=zGM=tN-3`Wgfl65YVyuRB>rUAhZ{u8(rAIU+XIT;Lz`FsN)jRC=?Chy}-7pB3cpUHh z#qARlLj=F407bmQDEh2ma;R_JoS&Z7LZn;Ew((LhLL50C^Zc9&p9Hw;e!!8o9jW1s zf0sLLI(U=4Ph9-KiAgm=0d*-F*A3KqK?tacaiI%pEK1eBKe5T0m+m|qo4Fj($Q|2N z2NzqwBliDB(lHNRZGZfF>DGV@0?cwqd&C*#{)=XYhBFlqqQf+9c^@+3hG1;3gvz^` zP^IH$ejnUVRu@X_PAhzS+l&Gu!ZHD;=T)RP;~&tpYF%G2vQP&rbNP#{TIHn}+SksQ z9@-CmOBSOZr8(P}Uk`8?F)WkmLRZQh$ZjMbD#x4DHYO1*TOL6Js2)1}mMU8*)^-WD zY9M2%OZAv#Tcxfxxv|sH*LMJ+!&3+qmXj^v4D}y({W|*+jgdxRx2(#IJ<};`C`SVn zYMVoT#_J`Jt|G`?d?xsWFhmarO>vLcqgTH-2b~hsE*gt?J&y+ zpDtJKGD1g5m!{ZhO}x8!E84hPHe#&sCLd68?n#^^Fx7X(+Xb8S_Dh6@bY)}?KDAze z5!BWWbT!Hv4NIp_>k-t7GZ`IsVBn3B!xz>;tU=-b%S1>9d0y8$7KcuQvt=)8(wI|qASQkNwQ30FPPUL>xU?nl{cN@gl?oZdo`@sp;>K@j+(JD z*AL8)G(C5hd0t#r355+?{Ad)mTMhX7Lz)e*KERH9vodZxW z|8nzpgMMFipk}#=Ann1erygeEwzumUE<9&(RouJdetwE4AEbdy(7?++&YJ3#hg%I; z1!>CwxB^CDRFbIEs9XynFzAD|# z>TZd4lOJi=(&#qxrzGV09}$a%Hok@r5tP=h$#xu@Y3n&eM8{_*v{N4=w6#E~wBY-{ z_6c`L52lGF#W{X-(L{eO5XL3rVcY!whjx(bbK3xvzA--%YW^U-q_cel;MPzL9ZrgC z@#lMF!HSg)08q{x$9R&G> zAc9F25=!@C8Nuj3+U)8j1I~M&{Pt<|7z!2fNdGELWJBhQP3EcYNi)BFa9NiB+M)Sgzwa06k(_D{5!Bvmm8D~u3^7`?4*)W)xn`w_f30= zj#-{^_nx|9&!cq{d}}sO=}6b=9#+n;-bFKpD!?5f*;o8W)1i2<#i_&N-O0B8$rN000czL7F~E;SVNL1ra^}RZrcB zY;%rlCtb?rVbZGroNlPJzZ3EtVPn|>o$mu8ago?OT0S+g?qVf(SM)eZQgRqrtqZq6 zS0C74HP%WrNNlLOZc6@c$_t>+uQhT*AahlNbx=g7}H>%oD_cpr0~Mi70*Rt z&ugYFQZIg69!WcjS>aNEAk8VEc2lH?+|=N^>Q5+cf?r*Sg>o#j;uG0)`kvkU5?X(t zUzxKvS5vwB*$mk>)}K48T&K$ONtI&7$qDoN&RgE=1t4+UV=%0#NK9jlfy`)Go} zFcS>H-IUSKr52SHd0n9A4UT#6lr&HET@-F^yIILYEr`dZ2_wYDPvO!c=IR>AwdMAY zgY$o*7)G%k+k-fmDj8w4g5@&d_M^hHroc})iIPNK&;U0(cTC?$9#F>v3i6Me%Lqki zFuEe*LK!#}YbPfm$9S% z<%`dgY_Q5GrcWqoDJvW%ahMAX09SEFf^8+%?2g?#opg2M^=8xq^7BO6aDS5DSl;2+ zkg_-7PNf6#C!SrIfz}DQd}bp|UG1gda|6?mw#zK<4S(&R^GoXT(#9^va)|Uv$G88LXHV(v`j471 zijfo<^j95Y`K7}Qmv4A9j0@&v2DbnOb4bu5LAY&Y@?*5H+9k;Nb#`FgG;ZZ3QjCIo zHN)6FDo_uuu+%J=M>BD9t)3X3J|$&>E^*c$BU7>09blLG`GoJsMlwMawK1$3CD1erHy z&AekGc{{$|`wkmtg`mJ;gzz}ljF^4{LKIY-{`yug!lY-UMp;wT{{2}_^xG}Lr^}e| zKeu+yhGO_>yXlO~%|700Cqz~7q)Bm%hVO)ZRN(r{<<$Mg;cGji*vhe0*vbqlkgXO?-A-_M~xgeIdXd3~7BWl3E< zpN-A_gIV1_<*UR^Y`y1K46YQz_z28%@TlK!^Yb0!9tVe(bD@a(K-Hpt(=#%Gk zq-AJD#JNNua47db zfxD}}Tns>aN9|7E4!j^1YTsqwYQM94Zq7#_$!L$nxRwNmaxDB0P3lee-^zE`{*M67 zd2bC??J3!lyfEbdebmMk7PcuRk3b@5X0hRivzdzqfN6H>v?SDIddf1YiMV1`@YUJE z{|L6eL-O%qfW{b}$wPqU>RLdv{U9i_ifsrJX$R?mpVL0?dRzmn(oCvAyObZEeorCU z1?^nS-4ME5fchQD&)6IVQfLHz{?;hIpsb<1ll==u{MGDc#E3F+GPg{7V#bIizYJDo zUMjBoE247XE$S(0j;B;^;dufc-!_8*h5p%X7X(M<7>|0IJD#T0ogSJ# zGA3ALLuG2IrO{F|>5s8xF-gp^bkSx_~Fzqn~#|;){GyW(&zClZH2mXEB zfD8;soBew2E-CwS`-ix#ci@NjMTe#DhQA9ZViR$ES}f~zk#AU1O6H02`y*mgK%isn z&N}YzSCi=@ssnQVVR|Q-qgRi^Kex$^p71$c15N8gBCz_<-x5^vy8LJK0&>IAB;DIL z_>$xhCc{Pp+uHTyc|qM%!s323xT^oWAdCB8NnP#6UF~(?>6$dM&uL$!Ncujn-HURC ziviQrOYNMYaMgEwy|CpTAU++7;Fbmqhq)jM<1`aQ!Uk{?)YNu~X0YjWF2#?H4>&tVKw%Nk}R zKiVz<26aDsnNJNjQ0DcP4Ip1vK|*c#>BevQeP?SHgC(vunlzh#98*e<6qRQ;lm&1n z=BOAkO5D|84AwIEr}Hf6@l>h(`qiXA0XgXJ@byrLS-N(9GqRT+Rb!#E1H&a`DW4Hs zNuMi^Sga-rsbk(GGASxZqFQ9cZPlgqf}e0o;rQ-WhEQR^J$Q=Wu2wId+(lNO+Wwcx?koB0U2+UmLOJtG8FV`!7Oc zUJam-W~Yz>%Q4xnxd)h1O|AQr^!trQ)DJWcc94lh{b+lMBY7B`_}^yw>RRj~#B!hv zGiEO6AE&}Kc~b9umkdXj&T16!co35++308Fh?^e@g<1+07IOf5y^1 zgI)~OoR30T9Vq(RSC|x@xGKo3148C<;;*Zx;Q#W~0}A)x&agyLn3DqX? zVhJDKbaixb**f30qCVhR|DZiC1w!C+LB~a)G8oquy6v!pq!8;{peA412Rs*GQFpe6 zN5V_ix0(?~DyqUf*zevhq|RT#oh)_D)iX?RTy0J{Ne3vJ5xNnZC~2L2gbR~HniANy zG+_lXt-X^FfkXBhcui_Kr>$dQAq*dLAv|5vy8rXqcy&5!Y3$;zHv+THn9~3D@zmE> zL%6X`P+8hh{1D2zw8QT>ks#m{ozLhbbsI(=9noCMX|l`T%SMM_hYmL%tgt2?{zBJf zYs5Ixk+#1Rn&uYhD^mfdCT#A@pJKCP#%J-2F2cI1%O)ik{_0-zA2{P~57Ccuofc`? zn;@(@eRO>j^`2PiW;rYSC<~E}?{vFvnnVzwaRx{B%n59$421f_wFAlORwC;GuZPW{ z!WjdJ7e~vG)cdA>tLijL+q~bG=Z!>kh}`1`71Eg+-MCuRQV-mWk99&#tTVt zA#W+4Q9DNQhzrnrj6#7|*eks6Pc9uuPjju5G*)&4o%x)IZTr^gXCles>FjafoSW$N zO!{eYB(X#WsK$ZFWIt?;S#Y5~3@k*>MxM?eirGcSTo;5KqUSI5G+MLAz-jW-R@wzd z+5Q@v4dLf~qkR&d)SlC@xzD`6&7vUITdNEQ;vH%;RaDgA>kJB;C_*dXWaVk({%JjH zJf)9Hru25ngY-b&VV%LJMc@2-Cb*F&#K13p%~=+8uYCjtU=zL4|4rp{^>IB5^112h z`vovK<*DG-_-5UA&f>foJFk97c<^(F`j=)J@70@EnrRjga$_>4p20WSN9RlirxhI0 zVqCJ&G}2T)MIXu6R?RY_Edt1!NF9XG)VztA8}~OX7oeU05K(TV;npY^#1wnvoy_Mr z74c-sOI<(<=T(ID(x8qzK;4M6 zzv-~Wfd9tEQ_S<~5o#&uG7oLl(Ts;@O_J!TPea+t>6JZnrx`mm+;bdW#HUQq^G+XDxPghG?3!w@GazsY2 za@7+~m4+=*q$QD3+f#w3`U>Y;am-Rl!5oGk3F5F@QvHSrm1zzlMF^=fN^T%V9P5hT zF$DM7Z}|HAXadZaQk;DAbyWh{m&KP22)?y0vdjLs8#dyJHVURbjWKUdSpSe* z3g}gMI`m(3XjK(sod0@W5*rWQgDNXG_ZcwN_6sX(6M&Q^%b#V>imwF16PaO^On<-_ z0+P%Z;Lum|%}EhAI|$Gt+qAP+c*TD}APHw1o+(kENCr*3bee3A!CQ*>PwQT5{{x`_ z00cMzo?U8J__BvR-^m}cTopN=1NwucRSBNUs&ie1fg|7K6~oR8B__iBV7;Z0h1!<^ z^DHQMZ_0y7N$TqzT6-xEDL06Ezll zd- zi`D+F$>{@Zqkv8~Y)*^IEjg2#V!zugV(@XtkO)XPLe7T&jDN1m7p?q#mL5usBkEk5 z=NiR54IcjS7st~84RhLfJhomMzL#|Vax!yq)rP#i1fH7pUW^`44O0qbtrCk3#S+sj z&(b5}Y>F=_k^Yu6+=STbj(hu471wl<&H|A4N*G&l4HH!Wpf+(xXFTubbnQ(jOI>s= z(M=Su@SkP@u8&yU_*I|o+jQ&Nt!KwGuvimPIJ?={STxs>C-vbwRP2@Z=?%5xXDu2p z5EFsR(}y&7tJ57WM88P`mB(ao(|5pPZ>o}TyY)F?tF1j(>J=gEPeDq;_!6wyA0lX$ ziPUOA8GG?nB0bgugF)G@66t9LC{1)E{}FP0DSkUUwqv-88_=B5=D<8Z=JpdCucCMV z?G&<_w(Srx#Vd4Wes$+AP@^?JMs#s!+J_?6ho~iw`8K?S$tj%_<9va7c2oEoUAZNoV;NelgKGNxMKjd)&h}q zYSTNLY!pLI`@SN3&HCvI09-4(7ot=Vlmdh(D!w02{d9k{kkI*J9(sTfe(?q;1Su2* zhfB?CauyYqdm;<9A;FRFlPldEM(Gm}%BQs#Y17~qlz!Z^OwJ#&z}9|q0Bt~$zhA&L56tvoTW=dX zfbC_e^yr!000Q| zL7HDl;SVNL1OIMs-R~8G>DW-W;XE@G(git6h{-ZZwP6RTw$E{(+s-=Bz4LxL98A9Q zi?J3gQKsmrbMP;sT$9`vASGd#j%u+04JQzTYWuH_4;^#=>6m^BBLg| zjL{YW_1(j5#ALT)qwSa9Xo6CjEXISK{PB(_6%K1*(A+!f6)7B`oQg-HRr$a7BuP?A zUVXc}-uQ`rWS)it+{Si%D8Ro*mxDvFDHPe}Q%OwSA>FefstQuJG^L_u89IR2R4ZEkO6k!q?q}fDKb6Bz{Hy| z1|ELBQu)7J01jytPt)WCj0jkE{@RtC_xZ{(eRjd!44sS67#tI9!NV#H|?B z!?n3EkFBt!qxqRsOA6a|y8xv4#(cLB_4dr2<(&XP49 zO=SH03|`Xp)ab7z=a?yJ8-%pU7sYjJctLT{Ee;cW zdr&)9R^y4p#T{(jfqRRDhUUaAfzH9HUKZDkT}uFTCF%<^!tryVs24qPA4qrtHZcmA z~yBZksn=0PJ7&ejIi=5$mcx8U2k_KOLIh zEeTG}5}F!&+v1e;KkZOcn7{%G{DsxiV4B2SDTAz|^9$M6oCEpaT)R=HnSA$+WT743hC@!`#=QV-nx#{|u*=`~ym75LzBMmhrfm~_nq80r zQOc1>U${|w4-%9RY`o$CHN~{wpOV8ZG6_$AbK`JAlsVxzVlX#al}BECEXLnS{03u_ z^v7BTbXkk-(0|AV{XNodcMK%cvM9A2cP*+r^_DM4K^nCjmbYZDA2_zLFels)wEk*| zO4BjxA67_h@8imy5Ma|sqLfj1XGL(nY+6GLo=N;eeSxHyr(J4!e!$~SBHC_UPw9Aqcc z3`8MN$_etWoIfhT|E6?RfB28L`;h zXAF@y$CX-D46WLNR5D#}T^Lz10x}sw7<+eK+n|<0nTR`qG8E*9Iia@g@Fz%Rt~0_Y zz!*|~?Gci>;EGHj1G&*TiU3i{GvB?uw8J%-1N9aso;An=AsqHa+3a7qcl|qSGb=2W7&6f5=vh!{UhF1ommMmBnQyQRwopytc3NRTN23 zGc9T1JfwtZ)PF@!Z~m`TVdT^`66axuHfC>(GM_IA-*h;e<9XjV*HgBe>i$^#xV3^7 zr|rAeulA4i?d!r_EB;_xDK%vSn9%I>T9{m|-u@Z=Wh5gPlnaFVS!4Nie?jFYK5|@8 z7N^`@H54N-0V0SQxf_BFo1g&87R`nscB6q_p35`skLS3f`fuk}n6Qf4rUmFV?~5p5 z;&R7q6mgC&2dKh?n|6FhHfN`LqNtthX(=EX5i5i9J2(}iM7&ge@_A!6A0!G&Nz~K3 ziN5f9tqL;b6P4IB<1D5wio6NmX7Gk}gx$-{^8-M3j`uw7&`Tfe_S3~!lSIc)$U=YL zAQ0h;qC=WUJ4`?KdaD_eZJyHL_a}Ij4KI%gIMg)EUjI%UKW4-^+E{;_jDD!&L7iVX z+IlCCwTt@FRm?isiCpS{k^03U8;sF_(7^~X;jVg>ZHK%H&R>-?SwbWM243N1 zj|cwU5!YtG;%zROEjg^wVgL88j*12mwIdtx5LpoY(!&jGatke=i^tv-waxR;8ZgeQ z^G5Y`NFIR4`8p7v_7Awg8}gyuQn`r$u`cZYYF~O$FhJv_q6be-{GA7id)N9%E!PLH(PXI z1?{vy000cVL7HMo;SVNL0uulBy%JD!{DAYfm320kdIl_H%zLZuU<*TjmG7=f!%Q8u zz#zXM#f(ecjDM-*`wAmzZ$wF~T1d|eZ_~50g7Bosau=#QH%c#)6iFe46YfFK+w1iu zTsUp#i5`-v`wmg$9e>9uP+??0=!^aAa2B0+K;WQK>ygE0Iy8jpFfs%q$I;66w;B>k zfrYFedNR0EN@hO>pkEnG;axAz;o{#`A_#vu7DB4SN)^<=Z51!!9saQ-eC5Zcrrp(l zWCD5#>{4w`_WEWJvo7IE4Vir^LVDng{V4#w3(88DlY7VK*QWYTg;vwFg#>o)jAIb< ztMiQ4C?{3VS*Ka5j&^;95A0q7OZRHSx}wvJqcgOoG1{lpbxs;Hh#UrNcrXKMp|AB~ zuAQEOzYFxDqQUhHZQNjvieRtEqToW7dHTxWJ?gcBRmOeMC<>M>nWl{%&dsv8fJL1h zYXZc2gET{<)YZ5Kc);&^Fq?r+B>HVD8xdhnw%rA15>3Vgf=^Igw7}L5;wlwn7(pHB zHx?-9(!Y^0B4Lz4^wcJ+M_@#{CG>iErNTvE>j!<1*RcHNBwP-9QTh1v8?V%ej~MGA z&Dx(-ug?h3oVM1xlwX<{5wbW|hNSzv!^!om^cQV=)T-|hxtM+&Yi#5^%38RIP`}uB zp{3y%S7Vcz2`?~ zBu6OeT4KsfI@E^uToz_i!cCi}71CEOrAkWkG4Mq>3+cM~uDW&caLk$A?zha9U z{+H}RKstXd4RS)3gn>=qw;$Q7{hwUg7Khd&|($ky>V_! zi7_5DJy&ma5w1So&>TjqYnp7LLM1z=d8{wBoCArAkcT}9`-i{D5q^g%;7#njh!x~W zlU0s1_WNV@=j+Dz!qW?33MJ??H+4V1W^&H(L61CqFF)GInC?Q2iP+;3!@)lw0X%7D zvVsh9${-!rN6G>O524GcW;{zq#EGzATSNPD1hJ^=rgeUJ!kA{9IDwH|u8dfn8$mV1Oah4dOR9nO8nxBOeAOy=5WNu6yRj$LjzVuI9 z-k1xoHB;AAiPivCkZ(2l;#r8m*pQGb zwWMal4o0S3q=|5Kx=+&#(d#CqV0Oc!nQa~w*Li`gj)f&v>TE4t^T|HJvfslEekNb! z*&FD7e@OU$htK~V7msK{B-|~ON!GqaP;L<^VqDJo0trKE&q0JljuG*+J%BufLdG7e zD%gC(ga|NunNd{{zpigti8H%+4pLfWf^hMhn!%{0qWvE&%!0H~i4+s@Qw;*lp-J6V z4DPSu8AB`t7{PTFA2~4+P*wypKR$G}G;4E=>U0SRgCht_1ZX{V+H;=6`~CMqk(PUu zOEtyZ%Nd3WlJ=Ef3>G}fr777Wod9Yvww|Gn#8inJu?%o@1`1Qv>4}^S_WAw`CHo4u zp!2Bck9aZ8{g?ZM!K|erk@VYjb60HCZ4)8toEAm!ee2I&WKyW=R#5%Rb+2W7en?#p zLQc=LI16hPrX)6rSH$5nQnQug3O-~Vj+xi!cr`3G$Xr<;e@CFlidYAPr9h$csqcmb z$`u0Q?KpT+)){3vS4X(XypbIw>GPyPyhxvt;h#MFZV5j1S_U*{JK2)wqvNx=(2@O^J3cei)kStxb~s$@m3#%#zGtCFO-g;(8KoCQn`ASKj%g5 zWI=_$YCApH5{<_O^EPizqmGhvo(@Z8zq5$D?Luvi(CQvNvU7ceJ-mP5OL;M}wzEvk zf9_rF;tdIq)UeSERISvrJ6xs=G7$A`4=s*07sxnb&b}{7OR|XcnkRlDM+e>}H^qgm zB^^@?u11ZlHH1^{69CbRduc+!>=b*hm9_UX`)Wh|@cH7!?yP^MeS5Mk=eZvT7Z=>D zaec<_kBssV+doFXC2VIqN1I#Kdb_zS-cLxo_$ld23aE!wuS=Dq!6U4LxvtuCrRA=r z2#Bp(OXN=U0Y+GqhwX82BoM;}@f?OKPB+2yV6(@R`fus9fkl}Ka=6{7<5CEOdY8y` z82s;{ONHTN+{Icf)SR0R`fl54s!Z%%C{9Z4Rr}dm_OifaF`+)W$t<55v>kA)pZDz2 zrk$Q49U%tUfP(R_)cocYr=5|zX%={V)km5nqDnu!!%3x;YNtWC<10|nyB&0QA^Te#R*cqP8%mY=YBJ$#-I z?qs+IhJU)xy3`{gFm2}dvRVx%fMAbXB^XRt&)T+A>AI9q9xcqz1%p~n<<6O)8kqIR z4~4pary)_aiq3DJ8L5%_u5fj!9)vosD2FDC0%r-xAAqAhn0g$gaw62$xU-vjojnz8 z@(eYaP;n#r~xnIK+Sh!ZU8=B6}|X(xgog^OuRr zZan0mJ@qyl_xw;*LHnSWi6XXj8WMUVV3;5&_mZ5q)!VnUO%dDAGxYOjTvFczld1Mb zHJ0)n&K5HNWHpI8RscjT&k!*<^@UXJ)h938gE-g{4t**N8~WN*AE!0(v{q2Q|BD&j ziZC|Gts<>8dq+(ZB~vX!t2Dnc{j~emO5!m2Q&^SqN!3yu73n5MG9C#${r~WE4SfO| zAia*Ro4Q5s5n!ql{fuGlq%S_1RxLGrI2W%Mt!2)&@t_I~Ne((%b~1{WX;3IXKxCDj z2+`;1eI!^e`a9Q+HZs+x%u-j|CVda1ta|_zX&I#mp>WvjTXBrWhV>a- z=WXGOoPRn(pM=-ZzDM{i48c}|AB4wMBq^%UjjtPjto7~W&dz*DAaNQ8JmbXW?c2FX z=uIbW;%W+4H*se76UriA@)>g%u;E3O+1Pj0;G{aF&K{Zk{=xlV$+_9z`M&5bgn6s1 z3aaL47}SeLH-TyE3-Nq!H?)1zPx+B()s*aJEo9CvhLa}u^%9smjw0#A%vJVr_xe^)y^<_SlG$SBzuvC$Ucr+UDdvxw0@PIa+1-9$sCS~8 zOnt@M@*dlE0B^;}mOrru=Tev0HG-~ztWS<>%oz!fWzR-ziSDUx+=9mG)k%4&f(ic* z+sx=Eh*B39xDx)@2i&_3|UcGINjc5|~R#lyM{D$^@kmkLBJal0^+^!`A(hqG4N!9H9% zs-VFF{;jmc7d|gTNR{%^-)8g5Zkg>QKY^c^OadP4aM%XJB7ex4Eq9e3mahhVXRAr? z!XpPsinQK^5*?PK0OlB-E6j+&LWiD={k%*tv>`t-qbSlue}AsVxitZ-?K4C%#AMVv z3eL5*q#A88HTxjZTcN zN(}{3Mju3MC|PoNJb}(2raLq59SCj8u^^I@6+gQ2ST3UQa*P# zv3uq+*dG-HF&aUMBOv?KA1%pay$@6=Eu+*#Orvfc46O7qmaAq>BJeI9Q7v)~Rs^4V zXUNg+-*Ndrk(Z;Bw7MFJ5cC_@A9<847<|A&o8X7Z3b6zOtN^H*YeY(w!lK~WE0yuL z$yG4{lf#^p{#QFf%tKzO0H;wP43;`kFO0q0PYvSu2xr~zT8NF*v9omW)ttgspFX$~gueh` z4lvJ$dcj%iT|Z;>jjo&Oa;fO9ImB=Ho!`XV(oV;xOIBN%pLi%bR#uD_x$3wgWycz{t{%y;pCj zP$Z626GMT4c2|^*k5b=y)+Y(;7umEQ*K97;Q`R0ZXpOSc0?{B_`e+rPBc(wk3OIPl z!;pc@u^B97t9=S<+*}aDj`Nyby)~Y0zTZQ}%>VV&rfo1Xg*VH{0EJDg$PqK#|8v1u z)@~?*Y`W|CV8Q^eAMaUga_67KXY#&s>G$ zb(?S`Zxbf9NFG|j*XgOAwEYah{8G=4ArE7?Yp~oro>I{3y3y>*u>Q4zFIS_w5fIiH zY^JJ=KT!@>_Dyp|U6}-r|Dws^r)KlmKP2;yiq84NsAu0iI}t`;@Hrgt;<*stFA8+C z0LkgBnHO+PolyNGQ%wGv0v*8j-$_=2PCZWeMUUV8y!TLjL8Y<6DUAXW+Y7(*DSo=o~SV$O<4+Ejus~ z?52_q!y_8$N08#W05N4k+9VWgg*7kIBn>j6>(W{J&xGF3M2oH#eI01W^Fyp%1M8Qw zV!#UfDqB)r*6i;U%=Hc@FTQXdSV;f?1u8+Bqe-YiXqik1@4QrYgD3+;4)7A{5Xah+ z6N-Iz>Iuy2M>ZJF8*dE{o`yDDXGEIHnvT^@$CMX6&-Y3jxE(Bi@bm?3;C}2zLP=#L z7_3UoVfCtGUWuHS{`2J_#~2|t#o>^Jt5MAuVfU2X+F88z%!`Vbjst;=boEBxE^9WH zXkDF#Gbdh6l&KZ<1HY`sPafwqWvZaRx1rvfSy-|{GT zg#zcAs&?*o(f<93#?YNzThgTlh5JwWt7US#u1x$hVyA2{GK4p@Q2RBs;#sC%#!<4W zc=4=uKlB|t$kf#{ApwhQ;lCn2c+#v>`F+TtTR-x(G<6_PYpV^Y$*ZOw3NeBP_yVe zR5od{2iLF#q%bljbIYmSDpQF1&XNQwAxB#~J#}nlS>fl^ba_n|f_y3lc53W&MOEkF zx*xGro#6@ynyPLtEmd~Bf_$C*M-A`Vt<>GO-AcF3`!R>>(3U3N2#Ncf5l2rccXJom z`tz;bFE%#4?XOLyMI8#W=G%Kj&r^|{W$rEzu6-ryH-6My^GmR==KvT{KIwZ7Mpd%T zULQn;=g7kY&)#O+;*ufGDz>$p*z@&M*szyh!q1^0HodgKXzey&#LJ?`vmDc5W$NCg zlFbv<);maE{nA19QNhx7s+nW}k;3>E(n%@k7IiT|nMcE+<7@B)E@EoHb6x^8ydIi9 zz9@f$l|+MI)Lu2kv-;`tyChdm1XKZsWrj5qF4U};){`%#2R~rsB}UT_E#V83Mq5=Q z+C^KqbFVw#p91)aMH~c-be10ijLa!aIfedzc*zjL zQp11;8Dl#9zaboPpF^VNYuP3ne1q@3iy<_XWtDT_g&~+W9Qt$Fm8)K&R{O#wF)Qhd z-gyoM47MU&?kag;Mp`G%!Ea{y8mZ&212Vqucl-G0I(!Q-aFcVZg;np=sP)8Tmp#@= zytKK=S^kQy*%S&lmW$hucUCM*$=Bpc3(i^}Se$pJ4+v2_ky}^XJ1fA3Gg&`-nfzM< zJh;~jU&Q!_E5TRkiCM$rM#6qH983Ld9?gps>`7*y?Y!UZ`7XXH!LrMl-uL^fc@(~> zDQ8x^qNfT&mi?nI8^6QXzQ$M9CF=7YGHHC3P8_Ak)vdVT7)NKWl1s4f1w#9B_ku4u z1$eqF0Fti7ki~ds`Y{1a90F1M@rDRMrgEP7HE!SdJWV|hJyE_uGUC@!V0?(7JsVc` z5kOGV#o!y-bZs%Op}F|oX7bzhQsH@zE}TFK`E^7yA6RA9^trw5obQ>rWeRD$?ieM| zKGpH_EUvF|1NY}#mJfUe)sCL?Jz3|X-;H6e?^oEH^#{mkpy)rfXPtskSg4fBDVO>F zLADTb;sS%ArtB+g7)-g}@?S_OzzV$KeCA4~-HmHUM0`VU=?Jl1XN778s+2YkL9xm) z`ccQ`>A8ghjLZbpY5zjN8#viO6CM^M@i8;z6Dbe@IGFX&;MN%b3mQ1|_H0IZ000M| zL7K!#;R;e^Ff0E**$Iq%XPlYA)DvUXgS?@qR#o|1TL>%eN!fB#aRfBj;iU3p#fr{F zwvEa$oe2K7?FBGI+Fze9Z;+qxY!i^zU;;@D*{CdqxP)FS@;_rf}hr>Wd zn|KOYqKr?98aC2i@+ddno`@nKW9ev zMr@IwoyBxzIndfhQPCbs5LS;|WA(417(S8^fAs*yx$3b%j#oQpKk+*cusrnOE=;!E zA-G`{D5(l=cPHF_Aat^DozA7)OHM}1;ftP|^AR3-2)RrRmT7$?8YG-WnvE*!x*eiT zbq(P?6uPgg$wS@E36eZwn77j=lPts>c6z}dykN^uC0yK zb|YW-m*D^iWF?fS11VcfC0j@}^nUh)>!qaol1#Clp9r!b{pqOwuT5_mBk3_3(s=Lg zbv_Tq-PjtLu%jNv#JWjWF6htDv8o-j2E&(a+ZNEL<38WePgwjEQr{#zK>{IHiZhm^ zk=2TIR#7|RH@Ijji|tz;(4ZV9mmj29w}#zRH&!Mt4p=h!iq294lpl43C9wAZ`tMo` zxVYs~3j83!CI*<64s;=uu(OdP8K)Dn_hwEl|Fsr#kzY8lb46ov%W6^rOr|i z6abtTV#$%GW9Kv0mpR&%H8JSzu^9hZR|m?XdOWo7?i6`R7a#m!og~q3tlD%2Iuj?A zomv1uu?jKbek)Aj>|LS}4IWyyL)&>GBX@TduGGC*W|M*;O$eWl>hic*M)x^iD9KI| zysOMI>{~wYa|lJaF7wBp2k{%d1NSQpG%3eekfom_SLDtMbqo8x5VK^zLaL8d!)P$g;aj&=Nc^~6HseTP`0I4J1Hs)2%q_N! zrLoMX!)uVioybOWHsl$HR~aov-mwt;4LhA@gC~!=TiLL05=TU3n}a2`OeMqv4~c0e zC*QAQ{%~s*ZMgc6k<+vu!??3K?YmnL8%tujNM!8qNIf??qzb$c0t0R64d^wl7y!F3@a7p}J`WUoWZGCkwX z8p#j2EU;L6Q`8IVLsiR$?Vz``qc{A8@?+;a0%hxivflzeL4_Etl8LsdWu$ctC z0s0iNP>6%Y{A#R@Jh6IQDCta+u$q$r@Zs9V((>hjMk=Uq5%$sM!5tt3jy9L5RU&R@ zzqW7N;U7qt0~!o28-gG$3t%<^03)Ww?~^l=*gNrq18c!LA5jfyYqx=xvTH z@gvFT_)?XL7M;`j?#M1i=MfXa=>5W~ll?**?I<^crME2I>)Y}Yf}}0c`I+nqB8Of8 z@7wOPwcaZRdka=r9)?WhVwLG8e{UK((e%(k~oH_Df>`gm_8+JLD*jYw?)$iu2(TY&!*lcG2T-(W}lD29lC|OICCX*HF0r_kXIHm&&Q`SE0>`b(V$q zaV@F|pKNbA^s~3POr6Df!Du5=2WPqJ06Qu>o(TWXX$2gEg~9$dB5G07)AIO~Kx>sM z2l^JwmCt4MrJZ~<<(_jaBV`NizC5~LO!uIG000NCL7L@B;SPzEz?1)k^ro@w^N~`7 zZQ@Lmdg4U02qR+I-#|nJiZ07z_^hZUA=u*NuwPP+iR=i(|!&&IxWwXb#5G=pUks?+en8*K+3HgX?n9g*nS z;(8gbzBgH1{>1qUTtfuM)}OBrC?@M-&udB}=n z!w%b;1{YW{deViS7ioWf2!xu#tTy!fP0C{&sb%LG=&v>~DMuyl-xNYl_nP<>oUgJc zFB5ryywa5@2**~HPFX| zB~p+oj(u^H7|CSv{+1}BqYDg-j4>k8*lH^h{KTWX#XA6p)AaIcmz1Z8Qy-nC`KsE3 z2ZJ$zG2heu{v^>Cs+CY@{&Hr{TbsREmXe}+bRMX}RA3)H<@++nf2}p_o2}61X*c{J zM=*w-{q{=Tk^VD$X%NI~0;Kix=nbPvy%=WNk_8;Uo9*J2h2Yb^_hlG$!Eejtes=Hc zEl5M5ukq0O*4fAt6>x`LfMYiq{QdVo>-LlxJ5jwW35)F9x?~=bV|V-@r8oJA_3arb zb9btIM};v7nkE<3`~WhA zrP1e~%R8?|1(ROMTfD-+nx55YZg)fulCw93xBD@0L44=e!h2w~a>v?<%luWzK$2vE zerlwM?Z0TLw(B783eno_+XBsF3Hl3px&m8JR%1YyKG1z(#*UV*V$`GTEmpSjBg>M` z7H@=adG|G=uz-xT>=Fm)Yd>>E;#8#O-SMhb47?&&Fh}EPCGgpVP{0caf;N|=LhfQ= z)gbo|{4^^L=avu<0=ry{f0%!o`d<)Ow`GaIEI%M$qGPybt^M|cI#rpgMOfqa~X31stG_;XD0@T|LyuU{?X@=l~F5GA#7kahRj(Ibi-nDhb7%7!$Ihada_yGe2_Xk?lhZN-`iZxVo?T-W)3woq!(rJO|@Jkvvzh| zGO6ZdrLPQU`$=OGZWMA3n1@nYwHhc@JRtGM7x*MM*0qE19>z+R0WPc)Yx}Mpk1YE( z4tJ3zYY}JT{=OA99t@=-)nARk)ta<5R3soA{P&AHZ9e3FU<6{=yJ()z1sCQu2}(!r zXLHNN&De!PuP!7OZwt*`tjEP{tAN#enHDHR{feVV^3uI;BH39`ug&n3hzY*oo2N8E zY3takXMr~L@@m79Ee9|_16 z^?of)J~q5qramOOGVAB<}q zORv`}G;EoHXQ&-yN?HJ4ddnq6i47j!Bi<=_j3cb0Aq@7As5H+XDQK))4(1xT*0*a8 z(LS?WSiYm|K1KN!d+l|{t7Z#<2kYRCLI&5q~lqS|q)njERn~vUEhdtq}i(=`r9m|`;rQu=n;l_E> zypJ2#_1=vWu!_60%0~4$9xn5<@ZbZ;dhhWMs?-`gNSvt;!~i8m+1Hu(pT=%K&)AIM z*6tZX1lG8>m!EecMjo958;-Psbxz^FB=!QqfISEPdjst0)|@?-sk;)cVQ$QA!@0>I zgpj;#fH7+&EcyAivJo52!Xz@9!#m@@P^37U~)(R-kWjH}GO)hxHKulJr0P z(BojvYhAa6HRlHE+g*TtoOZ<_7=Y^jyeYXcUWA*rL?}ir!2Am+WKkQe;hG43Ica#s zAz!c|Ij#QKkf)7+CPP1s=AHer8d&-Nz)uD{YCb8;kGWRG7Ly+$pPMZJDOydT1s~P3 zC7lL8!ejPiUDe;&rRJJC5>Ei+p*$`6PrBm3R!#HDvU)*^pbw@tvRFM*iz z5Zc9nC1-8%^ux0sAy&=O(~$jm5N}UdsX4Y3g?fKiH&yu#_>5rB#doEX;f)qqiD03! zSbZqamE|F=JXJ#7+bbG<8|u!)K#`+67|1iIP^!yjvCj@=R=<9NRrn*g=2F0N@TA5m*tUcBX2k;tPd>*EaKBjHs=EKrqj7Mxt?Db3_o2taYD28FjwG&9&O{Agsgvcz;zG}qb!keOHE z=QsP~J6npXP_MElaEv?M;kJf+nk!YUI(;Q643%!soIZGCe^~mj>it4N_-iA>I+10L5OIk+0vRV1(V=sB#3+lQqaz>d z89`(}^NNSZYochG<&=tUl)L|7c>^$#le1o+_X#)ON7LT+)jKyenISFd2+v`i|@O$0UNG^{ISw!-h7Jgglk`^(L}T{ z_|H?T;Y4m~p{+Zd8m1Gto!BtoeYns+-zb2%x; zfOcZZTDnzI-n-Grg|<@VPJ@g-E!^~}x~v2-lYqKt+THQfM5};Dn*8pJZZRTY`P#PC zNkE{#>IZi}B3Xmiy*_oT(^l*2Y|PXBPn=GpMG9p#Fahd)j!Hs6AccdcyHX<=V1*E4 zp0h)Ee29hSX|dGt2>OG>6}LlUF#rGs96_5WN#PGBQv&~gtQs98J+t5qiOoQ@J@T{7 zFNf+-8O`&$S3X_aC9Qto#5&^1kEE!GuL-+%H6ovS%IL2CFv$5gt?T7YWjn*_lGLKc$(&_A!#;4DyT8?l8a-K`aZGmSqpuq zfv_za3k6;M%o}pjv-<}|cSDyUE$-d+B{aBxu!@+fF7G=d$D6f+ue|4KCM=4fZ>l+V zQ2Snhw zKYbsj)AO_jA3K@q{%LIvv$tu2Kj;B&X-(R|xwVoXr+}1Wvn)UwOcmepp&~7@B<~wq z+2F?UrNGPIZc_l`>OAS)JA|7oR?qH@Oriz}Zus{^*KnSMFbT@bA-Gm->>Wka9Co&J zT?<+D{oa@mk#6fpL)E+%`1*+iY$de8QJ_g0d3=Bj=RJbrz@0;mCy_jwaP@EWBn5D| z<8d(MdJXPtpiPi8f4({p5d zcU{c!sMyzBEO?<$@mgplOF;I`))cv9NLN{uHIK_2-KC=?x+wX{oAtf9ju}Qb>86b@ z=7f-W*huZl9@gsAU5$Ink^QB3c; zaKh5uaWEq@o`Ix%`vaMZE%VDeA%%(1!4K3OZqOUIGo`!c`!mF=gCrh@`eR~}7WNwM zUcWkoC*lE)fbdC?V?4TFzZ9;r>Ohcmk?nja32g?H15MofUO#E}qyjP=rgzUq8J0Yg z6h!9lf2$Au3%Rtp96>g}zUh`|DN-F;-Pv=OD3w9S&_Y1Pi$Ve%kvJ*{gR7& z+hfDIX1l1q%5^}yjAwZ-9kA9l0st&|J&l*`y3wK^vp$r&?R%uAL&%q%B2T-xzNMD%|Bz`A$Wd)1}?s+S^lD@K0g=~7gdGQlviPmV)$_fdP4kZ3X4 zgUtZuzUgo*7yZO;)8oAl+{~)c`&Lhw5oEC2CasVSosU5cG7c*~s9r8=&Q&lP+Bz=) zS!Zx}q7JbK%#<>Z{TE&iyyajo3vd#TF`S`1FI{z>xX-N=0PNlwfe2x>866CzTF5%Z ze=5<$#YLJR#j{KVOlsJ9!NBU+>?Jyb4W;N0B1I47+yDRvu|bB?_E+Rjbi;TMXa(^rFACj_0Wm|4=MGC>Lz`DI>+R5FMkl}3fgSOEgeTy1V z8ootqc^Rx4=$cd*lTV+vw_P`it|6RRV@HH{GIG{jJ#5bp{J^~t$wsEHQ95PcbHNpt zq6Kr!l6YlDrS@e-)t#unhkDUxeDErSpe+h(ivmFArw%;HLMhSPyrPC)reM|d`zCf}jm$-I*4EOZu z$k|G{rl+X-vG2PM}-NUqT7=Le6!%27Tc#UZB zh&F9AYX`mrk75bp4f+Vufw2+ggU$6BsC`HIcuBj%B1YX{moGsIWW9n?r?;z}ZAP&e z9HIh`FO7VIWH32?^=8@#F8jF6D%^H+)FbkoNx9$=*otJ-k?Y=|=%@vjA~Zy;E+pUo zL_qPDwd%lu{V)dcG;U#o`tWL<-@%m9xlE9!9EwA{gaxx0hl95Eq=aY{d%_lZ`r#r4 zs=XNf-00qjb~oh{^ls@F&gg-YW)@1lvWEFK!X+VbRxK5?UH;!c>FHr+^Nin>lVN`! zeh;oJF0_3}fKwxf+Q3#0H|{O0x3Lx|J*w<=jh7bV1g=V~)+mt`0;$?b#Th#N{uG+UCO4t4Lq_ogH--#h{NQnZbONX3+yMZYb)ao7+5;Q~qW!J2YXK)I0*(r5f{x(S_qTct#i`>roTQdXr({u|{V$27?4ufQ? zmkMQ;jbusw!M^ua2Rrs;awm)sH;*NhCrItR zWl$bLw=VkP?(S~E-GjTkLm;@jTW|>)+}+*X2~Kbc?j9sK0q*3Jd%nHz`FpEQo!V10 zy`Emut5^3s(_=l*SBaO-6*-*TAI`qzSW@&9cjke`uXA@#rpw67bdf`*qZgwH%pHIF z=B7`O-43rWnVsxOpR6e0X!fKVC8QlUpL`tF2iuDt+jN)1xf|<27lRS*lSJ9S#^_M( zZVQ>|IFzUCL{aZVe&EC259n=s_8Wd-KjWd7?x_m~xMYQ{nLR&nlwES&O`55>cWa8W|O)LCm*TZ zO?;sqDipFcplx_%+e4wo>liZTEc(&MUysO)v{SaCW4Le)b{U+y9EU8mVa~;!osky zSXy9p-%n91f&KHMaAYX=@T%P*)b@&bP)~nApfXysa@C+Bi+FU@xF->XJI310^3lz- zh*J)q+x3g=et3?jSYMPGffmD$B#ST8VwDyp^32nQOB>8NM=atBT^ZpV4^hlDoe(;Rdb`}eMtL2?@#cP|X*?5_Q? zrMrqoy6rUxF1&%uE9!WK_h4*16}Fw0e*Aj$-masH^A^Cxyj~wp zuih|Y(;tuG>(tHQ(;Dmfxvdy$PVqB`J;v9;(4EGo!`YI;d#l_(9*qnFDHFtIjO-NW zP1)1IylC=CpacvPKcRCjjol63wk!P^tMdd)&$koiJ)X@5%^w-8&>KifA+Z+crX=F* z?C~$3%sv5=4rd%IF{IDN!14t$${Cn>|NLOF zAzqpx%SE!j+ZX+AIwEqg-vsNp+%Ib7nc$YVONPl9#C|=|w{g;rtCLR5*#-+ZR~Yqp z=B;Hes^H{y{0yNwd~CI^tFZFTj$c9i%xerwW2mYj8(`mPUZl)oQd(sduT_sFr|j^G znF$vmQGJcdXBZMglgDtu^}Un3+(s0*1FLv^s%EL{DOJ=2Q@Q1Wl{st7V-ls}Bg47A zfHlyF)J;UEw2MG8HXvN`g2h2o*q@FfHH|_R_pf0jnM?14=*%@7 zC}Hu;(~Mmxd)u@;7Y`vmc|{t*ut1uQg3G+F?*`4Up)x0OO+nMc8h*^6y-;5C zwK^7w?Z@bYPc94jM!&6T)8Dt+y`dht_x97Pd4=DWr&Oq!H}hJj_^j7YMtC=d=_gO24>)&$ z&V@qbt8UCryM!dI4~^rfxuwMfVudkUuFMPb2p(__!X8Ld4qdA;)-jn%MNhu5eSLY`e6 z(&Ok+2%-%NQE?yX%x5TjC0QdYpT@-Zde0bR&^mr)!>2E1xVwgf4;M{ld`{@a0l#`; zb~_0k%7L)8Zz=JCv8qN z%s1lv>mvmc=qUG{$B^rC3m&QONWAq71B&DBkDJ^{*~LL|V)hiYQ!>~_h|06ztesmPF2?=@t z(PW)acu=T)moEcvKh3lpyEHL+qkG%KD7ySOT&0MdRB7zOX-WJGu$k~Xjhy6!wDAG54f_+$PCG@KbU4wFNRZhX(kB0@(x;50i@-l75i`tbg z6tg$UKmL@b%uIVlCbgUkGAhdoIT$HSLJ1GKR;Hi#I2Ju-P#;nYiran}o85eviGl8t zRu6{Q1XbA?qX6%};mI%`(vlmln5P`3Yldg>k}_o{3fW_=Ayb$&g5X?i^%(Me=URh* z+BT{ax70sNNqi*8RgL;pu~4z`cC4UwhBk!o1C;h@@H$Z1i5fBwLqF?n4^@|V*QtN6 z@5QPF@)+$0O^v|K8!rjs)Irm{fkPmx+`flb9ZZE}!$iS{_Kol3;_hs$#a^Uc8-@X7 zvY2clm_;$RF2tALz_o443;hr~srjIZ;l1vJj*?x}7M~*-^}us&xu8|XAHNDc#c}UA zr4AE4^xZGx#0sFI8pCv9dvc}eC!jV-s6lYF)Dl0upf9f9Z*F@62Rlnfi_*r5Q9`)UTiYNc`fqxp&AYbseulxt*Re zeY48v&KX>TQ41%i0G-QIsqSF9OrsRD;-r?#rF?MiwRFRCF-v6BSVkWp)-L@m`c^xT#2-3?Pl?^2J>*bFJizQZ{hy2q`~sjU3gj| zO&BSx|9dCoStK*+U)RYrJ|fc-oLG(Ev>&tCkomvp4=T%ZTB;PUpvFpTz1Q}|{9=E_ zfWf7)-Il@_pAG%8uh_fD(nq%8iJ^ArDl^v|nsui8>bM3~N}APTDr!|Wz&65=*a!*g z=-Ur{di^C@V$GIH)E`287Vu4>PJ&`wk_+X_ok3~C9nlD#m z{3B;>YH}>e*R*LJ5Vc4Fxv1W1muW79G)ekCxPSG*hN%5Qs4<_wKiP3~}-deEq$a5w;#o+WO z9k=*RqsG*?(-^0tDCWA9i}=sOEAYREtW_#;8Fyr5enZHLm|_;PG)EQqPaj;CLkuSD zll}^MLXw+SB)`VTpSTDg;_&UGL{&+2I^26u!)7SX?+%y!-9zR=`|wfj=8W6w+e?eB z*wwW5L`X$n{ty}RunzAVruH_`d=-Ta{8LsgA)boQ8bk49xnv2;9rdihuzR=3+qBi5F7Gn*>j?Bx(9_?=d zf;hC`KU_Bnn)m3tk(HT3*r?D3nOd53U6~#%tBB*SzT~Z&Tc5sur4)?aab?5ptQXoo z&(MLxpNWBby*MY6u`cI&2tZ6WF${eF`F^rA7f}Y4X9mYEFiMq-Mipkmq!H!k6k{rO z)8D(`D@Tuts2uo<;~no|4;f+({&}8DiBCXa=S2KbmFf@JWEHCWbla$3EijW7%4BOF%$$g{IA-&ly z26n|n<}c?@;zx zh&01A8H<~>>{-I_jWCei82@^)za)HEiNwv9JvPn$Im<~wDsZY^7GnL5u?2IQsCd1eu=<-%c+Xy+z28;4 z9@75!yL))lZlu8^N+YS&VO=neeWZ$np+bHmihMJQ*kH(UL`cA@#rat5^`h+Iz^aW8 zf#k`LLEzGb(pZ_9`X^_Ra%LY28oNoaWD;URn(K9oQHWc*P11WFIMi|aIp%Rf?j!Ji zW{5XcFTL8Mni7etS}?wi0<4IW`R3PoJJ;J?(lE=XWZ;o`T#Bv&``_>;4l&t9IjV7E zJ|_7C>NE>3(VFxeSx0vgeU)xOtu(*GnZSAoRT99ceVT_!L~Tr}iw@}=KRx-6Wf*<0 zZ%S>#Z~Wr$+n!otXlhrYF2Q?kzQT>6f&o_FeZs9ty`<{;^e}Zs*Hv9A;+(_bO|W;x zR4qjB{_>OP7ZzALS(Vpa9)X`Au#&hX%mFB?i|YxiO3OtOm#2YO=fHY2qxEu0&e+ef z)(8(gBK9wgRT2x08L5nCc#&x(LoAZP;XZ0pPjef$T#vksPwuVuGQ0whD=j!aO)<{t zC`K)mdY*|)M)9#OipnW*!42kL_?@`WylE5FB|-&D4V~`fm3EC@hFogL$8V@@k6${O zJU$%M!U+wI9=l(60xtm>cvKkA==Hcm6*HaB*t_6JV7~%;|2~n-%70%4{9YkT$fb4E z6!OrL8s_%_O5@WhxzR7&0{i?#RsuU2gYSiYM^uSeB~V-aVNGzPT@>0TZPyaTWRkky ziv82w-c9=rZ{(j5&VMyI{3NJBfE3zIy0F;yBaLC{DR&H5jq9T0GkJzpQcBsPiVgK1 zt7YJ&GxWied)h1y#ftNhdtq5)ii1-?LGOo}&u~Z1#roB`og&I>3fm=N(_j|OLIC5k zHdWef&N&w5&J^w4$?)Bc_bcDDhWlDxg)0Zae1>bvbPs=OO|Wys{tYy`$&Tjwcs& z^`58gCf#8$USZ6Jy09*tG2!ctUOu+_Sv?TgA<&Ui2RPNfB*L1;j=kW}feJIi^y@k>~32 zhfkX5*t%aeEukm{pTARCOX+0E{@&9!JUByTdVO&zsT+2_3?hrCGJEMb{6gzvH7tuO zz%RW-P5ImU#7=?hV4P%^NXkGolc4(>m3JbUUy>{36y=_MZ% zd>6SKj-9DK_cvG7C0=xhL}_l6B@MZ5ga!?tcgR@3*5QL671(U+oT{dBbus!9muJ{Y zm&}7q_4UK!r@ksIY+SV|9rkXChU^@uMFg;4tK#owFU3)x>rGn;w+#v^MZphX0?t%) zt?8Hb$m|Ll$c7<+1@&;H`u zKb;hSV3je`f*nsOxvM$)6D&skQ-`aU-VX+YK}q=6HlH{$rdovc+V#D)`;_U!TVwDpiDV8nVO+1df?l#yiOuZ>`kY zI5lyg2Q2Y#aAd^5npv`NC@RBVRCW^Sk_{Y#F}Wt*NAdNg^M3oj#1t=Vg(P3ekA^{0j+d2?9d*O?ey!1ED6Poq zRRmoe*-;|tpi(&Lo)vh3Nuj$R<(E)9>O%b0nu^uceE1?Pe;Q&g@0GBu# zlrEt=Qpfn2@uTKwK1=!8LeNuWg=A;VAej)E9m&|6RMZ30ebgT?5?_`6ogM=wXn6l( zvm>tz*ku%>G;S@1{J`Pfs^^VK66LB;F+T351~q~!ag&2bwfJD!$mj($X1F%Jq4o{& z*u=J7=2`_bG~4xiuV!U_i8N#pkJf_2yXq8H-ZX5d`QL)~2I$J|Vwyt4s@6AQmKBGF zX=Jm(M6d}8g^CsLEeJ<}wlfx3o3&Gta@2!Hw2`TNdKsBlys{>l(yWq$c;YRb%(}sE zaxGpB%O)B4*LANjtol1Nw4S3=7-RL;AF4vAo6U- z0qYMnaSB#FOMjGn49EUGmz~vBDb%Ixoy@GoP*amn@@!uKO2lm9&nd;G46j`rzF^(` zBOKt(eQ4lL_vF7W3Mtpd$e`qEKz37aBSQ(Gg!=Bkc}wXPhAytg;ua?uR5$!~#;t)- zB|v?!oaI7f?g5sqfl*2e<1iWZQ~?S1f(tb` z5w2drYZh~n_d*#zAxEIU6F!Wl1CH9CT zm~uP#Cn={zqR@7iTOvN3r%1;9_YNN{^z(%7CX&W}Y)lOF2hReS?sfC`<#QZ|4N^4i z=&&Ltd$_+^PE%~v>VSP zu^oR;^iGY{G@(G2E*4SGrkYdT7fGS=-63!P3StNPTmeEehxnimjkoAN&~}s7oQ^>R9u(;Np2#;(J+`LA9G@FeZ z%t*0r5fSa$cH3M~+sF(@>xgrL^dhr4(OaQ-Vu`QNsvvoqdpoj<`ZM{yU0{j>m(q~` zl}bZkojUw`K_c!xH;*4^lMwbyH*}zh>eizF#N0ItvR~vwLOp+2v^%^FWVjkrx6nj~ zJ+T9BdS61A+Nf+JKCu}>&)$KWP{l`tHAFUigSnEusM}JXHf;UW$|4%=Q&D%-oUD~5(50Z>!dCVQ}g-QrT zJ{YADe(9JS(w`rdSZPK+;G*D5`&HgY>|a0kUMHs!919nA8GJ6~ zEx!7opmoBt@B#l*E^vo)?I5?YDerTKW4zkw6N;&UrqM#-evwQ{jE%38gs(i5c5b*y zk#IYrX+*}`qbkkXg=I@k5!$?|)I;tVium@L_}K6B6(mbe>88X$Ih98EuZ@E#J`Sp< zd_%A{-!LqROt}cH+r2-J$k2hCGMlDAOo~YJnPgH7wp9k0CVc(oBaO4CJM>9=8FXZ! zdxj2Ypymp!>V(7|TGP`5eP`SfX@N}1ofTH6|8XnWiMKUEBfRn{Fv&Frj;xOpk}$^r zIix4R;8HrwpNo40-tDS!-J9w4>X-hc>&Pg>#u4bc$n!>azjI5P#6MLTFY^ljFS91E7a#dwnv#S>PY)dg=Dx@2dnEj8A(j7M&24OT}vvU7p35 z=Z^m3=SYM{vX7SL#Z5;>=3HR7K^dsnoM(|eGjgIC{5idE%ZNEFkjVb=PM%CzR;mE4 zFv?fPPa+A;g0{I2)mJl&FZ6n)EwElLZt&9NWJ`MN} zkF;{HMa7OY=mp4n&Yobnls#afJsdV=MuLc!d3pN!O_+MTFW#n3XuA1V8u3IsUu^b0 zt~udaMb4~nVzR?-{p8Zd#_!c)K8-UOlv=lCmG~#nfLAg;jPi6GPb&r>?qGTaGQF7? z*7d0m>Zh0?mK(gv-Hfc7JBXe#3pxn*Vo`oFKB_WbboK|&uAnz6ol{$4 z69jIQKCa+$4V{2T3~O$Tl>&9M`6E)-5n{DHgzYikfc84L#wPixwSLt{0nwd>D2kZI zc7f6!BKI!8G&(*zSFeX+KZNk?a~So?FI6yiS9>9EE)+y1`nMAxV)Ptm(x1qOe57~D zT#TzpNg7+jC35X3j4k5p0vh%w({!mi;#=@)j%uHUO%mrtF~%@k8>3F4dw1)oMrOEHc1CAffnWj$IMv zTd=x%2xh876Yb84Fr_4yygP4Q5w`LLYR#BK3kj zvJM7rEhT5_xwqz~x$kUoAd4uNCkBU3%rxpDIr1ai{UGVuyN;~Nk%(!$b3?|>}@RyLQLC{#qWxz z6+@9C7WhS!Ur1KR?_R-4UH-KZds0A${9D8rH=pJjx^v|62_<^F|72J8{RvHZs=vbRJA5`F z!7)aRDAU2IMEj+YIgtY&*WLB++F94ZL$>aY@%5k1ixz{WzlPP|gB0vHk8ygkY(K6=lnI+;_1yi2b_Q)p zt6vUvu+h%UdvXqE%JNI*y!#B@TZJPo$a0 zx&aq2mbO;MbN6B=IE^*nwGv9X;b<96V!WG&boXJnJx1Q!k|ZKXH3<~nR%~2N6=m>( zYrzn)SQeaZFyzPgLo{vs+=hME96Q`yErO;*OO9+QF1(>ncfmwQZbJB4A_ZN>$sqWimGl zN_6t~de={N8!Bs6r+0aWk)LPfDjtwMcCz$?^WI*0X0pZ#^CQnwO_OR?i3rKmO(Pue?a zc4+BlHcI#OS9S&ksAsA%`x(C2Hxx25=Px*@2y^L+MI94Q;PvBQmz>at-@y9BMJo=t zu=1QGkBA5rXL2K>T_kbJ5&8n5c%xc8>1!UhR6JaAop|)hiv+uP6|4I~h~?WAP{w|t zpD|2|lQiVVN;E4%SM~VXufKkOS){#A!>z=6p=(7?sUV6A=hBcU`37T<2kA1kBE$fL z^^juY$rEDZjo=Lohv??CA??mZ?5T%S)kpDj^#qN%jnR2g%JjQt+>+#1!dIsS8=0f{>ERrj;&`7tfy;v) z4kOWsBC3}i;|xD8l*txa4IJG~kcde0NxHq4L_Y4(7HrV|&lhADdrsanWiRiO5G zKJ=Pzm~e-=*+k50{)GrL%kYk0JOXrJSM1Bdl*W0qqM`pa#%Q{nw537>PPf!KK^=Zg zLrp9hReWJ+@Dbfm!h)rf$8U2+@#-Y} z5f)e0MtQCZi^T~kYFJw|-M4k(`E8Y2>p%+`a#058GVniafI9E z{ea#QsWC5G@Jurud1S#UrA=()qlCJmJfZDU+16)_qoMRAnRr9$t_lD2B$6))rWKBa zYi!t;G6>mCAajqz{i>-pQpZ#(I#myyKbF-3E{?vCShOwp8y?`R`>7X0X`&v>NDj&! zzr}1Gb}-#F?E0b34w^nfHwSa+6V(i)4rW59n@Iud6TBj5D;{V3c>bPzxQmh}y|$v# zkRaxS+E>V7snk~3in$D3Gt0Nh^_LY zZ-h`by3Y8at#&ZWSP0$@>}DP;wL=4iOq&56Ix-vBP{RnZ7}#Q3D?^xz0((#GpoWQy zQ26|iBXrCM{+?FP>XQCur9|8BFXid=$sZ*i8+QzW2apdrmL8wD9;n{M!ELt`IisZZ zxf{5nH=Ur%t!t&<$B2UkKda9##Va$dWJ{PKKsxyo1R938=-3U2+{A>T@R3iRrWNa) zY08lHXY?QhpN_02?V%h7|0-5YF0EX(y<77qrqRaoUU7J2ASji0t~SVX8~>AG;e&_5 zZB5X$1=hes8IdTqv)%?ndAA0h2c_yWRwS1->QWPOtZ_Su!x#g6$_hGTm}4}Fcj{#( zg>vJP|BC%sdEAaSXl9Rxx~A2QKw}6&0C|kn(D)oOrtdy^R_=}B0TZoaSpIHBSRtaL zLMo}M1w#FB(u8YJKp63dp|=Z3FzS(Rq#h$rbz>?l!F{C~?P33%K648YB^v$8*WD6LLDR^d-z}Wq$ zchwCw>w79h&<^X9_H;CqL0Uo6)TX)F%$l@>NPI~1)|&PwF`K8sZ96@l-PY*c;)@gv zU5TKHyEkoSO-77tjyz5jEU)f34cs?56Gmg-0Jip+pkIez-1m6XA47j0bnKYt8E2WD zOyjoc*BaN}7{F4ZUcZ!C1|hoC%t5ciYqtBVkF-zqj=0xN4-|c6?-jB)&=MKv=In60 zC@Hl-|LQBrVL{7N^Jx|4d7`xaJW7{-c7qyNtDdL-)K83{+@CLpHsEIxvtc?~T@%tS zUox1se|Pm_96f9GL}~<6X?&P{Ka~}uM;3z| zkZ`VG5lv<}H%g1Ah;$^wM(C8{nhY~%+xj|w4K}g$oM-;o3p_<=$1 z+GH}B=awNKppGf_YibiLzYlxPB+wrRE~qdFDG2sd*wokK>UJP7{_t_q60N;2M;2?| zgvQ-W7&sLZ5XoTv*KucGwg*zEw|k3M0(0sG)pygWA^OA~!Ki3lX&PV0Exw)ZqNW04 zNW`x4sg>)~9ZRR4X>2jZCn*gj3m(_gkJ0t6-UD=D)9|4mcVo&hF)e3uK^JL{P<<1Z z#k9YwxPFq>5s${OWjbCqDu>Y%Wg`NUcSW7 zY9O(%2*Ck|vcglIz@W)()ruYTWGbOU$H@dqZ(_U^;I+i{bxYOcf~7$7&dIQ-d5x$f zUSK8Wj+U=?<;K6NP({J;$uLn=hSVf_Kq|H2G3SHO%Xo@|tqb*CS>HG@_cy^IF&FI+ z%#K{`zBRj1X`9n5Bz%++efcX?X4eLyDfk>xT`B5{P*hqmu_J*CNyoXB=&;E6W5}Eu zx&gMbSF7AP=?978u>+TchDRY- z#&Ps0lNRO?5FsfHZFI+&PU44@k+=V}ZlPH9J2&Y^_uzHg!c&Mpe+m*oB3s}v_s_i~ z$W+GLkZ3-*)l7DJwD?`!*Jp7D^m45y*L6P0EH zV(cX1Lo7ggUoMR~dGqm&c!EoObUgRXCtEhC?cTjvg+gLkbVsM4tPH6=cA2*#hl*~N z0M-(c4zfP^GYqFy1`~i^|3wQo6UCHquuMV8EXg8~`MuIWD(;OyFOxSs`QVs8Ob0!G zwQ!E_Ftr3meU9Bl?o==*?90^+h>NohK4$2vdFqa+I!(yNku*-5sdEJ-DYA}*-;pbb zhgr{EQA>b2K3MIJJqy$;uNkwq-9$6r($m*AEI9RN2G{C-Q{{)iTvs@qHpH-KhK4k0 zB2{_CL1~~CF1m~)q)xo7t57(4bq_7Z(=bVde%FyMeU#riP+Isu0PMdFZdn<3Z;Wcp_DwJ{W2EuBN zLA+`+Tvu#CV1{ZKe)}(M+Wy91@+EIiA!R%AsH29DpgPe+fQ#Rv~dUf zV?5M3OKM6N=pV9czZ;QbJfw!S9=gVgl_*2A<&mUbSet~cL{)W5s7N5miqdS zcnF;xnL*N89RK1LhnAdXRg<;bw%I>o{bk&sM_wMW-mPugQ~pZ56NZ=`hq-V1%yMB* zwG#2Ok`El;Pn>e(m&DFk&M2$*P~jLnloL184+SC_yc?Y1GcdW!r;>+m5R16S1k-p; z5N7d@Szl=s1qKC#82q`&L>o;8Ib$|b zZ>HC|m8pSM8{1!7K1|>0srm66v7|zuDM%MgpWme>W-DSbGzTSOnEcWRb9solf-+#A zOkYMvj~xWtz>y`FjTUw*>NqLe*)7lw#_s-+d<-7+htNRR3T6hT`BV^tR0m5wfa!Rq z`|7@NX8I3;TzOprUaA`wm9D55BSX7|es|;07UvqHwLU_8J|TQ;X+a~M6*gbV#lBFR z!JIrnEyNHb?lETT(1$%C*}-87 zk%N8QhFPy6x{=>KAeg6Lmgc#;1n5`8OdS5^ne7V}-53%SMq&@nxjmoN&GsI7s{n17PwXy;oiy5>Pdqp>uym1Itg3K}ZV)oTfAwVSI8wZVrkZyex{2kn_@yBoNo z8>Z3b<)Y;fK=sQgCAA$j+nKCTjCYxX3$EY%=G8sp{O-@Rd6egsB@jMLfggR2R`gBL zdIe|s@|I(&L1RArYm2t7Dqn?5$~QqoXZBI~VS;tLqPi>Q?*@ddjnU7Y;cI>(Q}nx( z0m2F{(x#{vZYzcn9N0+0%E@EXRld$~eDD0P+Z3I}As6Y?4(FfzNJKjIq=)Y*FG_bO zy3}hOAZ-j%eJqGU9pFU}upg_gFNwd+CaFV)e26l^Zz%s^S-)#iI9YTA`Avu@;HR$O z`Yv!mrw`mHz%991M%t+_ruBy9BG{Kq<0W1rVqDs|1LK1$Pmbs}-cNO$gPHXvsGmk( zU)EcyW+ro;>V!bsA-*xD^%G+;Mk3Q<3li*~Ux?SM3b+UMD%gaxrI$qJ8rMS&JQ77X z?_H`sS~4(Ssx(*Yq9xq4RejY&W;%nP&zgtt(Mr{X<02NSI_!7|<_`A!J}wo4jaIB_ zH7tD5xr)N&iI6bK4+h(Pu}WU*?2=^DoHqS^t_@s_pPI05z0roG&Y^x~F)i6&&ejt( z$!NS>(3T4_Tl@V_r5$_#ZeF+lIqsB?b2VzpgS^!TK@~71)g`Yvr_Nt@*&Ht*ItRgv zVhu5zbP=$og+C$E!kd`)l+wPNM1`%adQH{pKBZ zT%X;Xd#}f$AV^XJ&r%J#cX(V4*D*xH%6#6N%b(Wde~fN6s19DW%pioM^3m8Vq7pB_ zuX&nLRb~_5i#m=>@c)T9qsp=Rfm^jr_FQ);o}VGipUrs4RYZhxEnSiIZqD~(W|6+H z3z{D1M7^`6YC1Y5ocH{Y!ZtT>AQ0k7vLysM_mWe}#y0J9eBOj^uh*U|KC!^p0kx@5 zYZ`WDB4h}wW8<)XVbN>^r?af?)5}+z5lgBEx1?)E3=uN{80wwUI8gm6GHFwfc|2Hb z`%}&rQXs#qlF<4@+FTaUIh<3b zzCj?{{bhwK?u9pe1&cFSJ|qsQ%Lbjv#zaK!hwtmW_-~b{rO@iVBb$c3`5_zyYN=r? zV0G-KbMxu*Vl=K~{Yw)A@4*FN%L%m+P!Y?_p2u;%M^9A*-=OWVgvb3@qP|D1)bhIKjm9oY|p}p`Sg1_NQYS*Td-%YASc~TdS2d4KPl4%G+<*cb7f@& zx8QiIsi4_@M`B$tIEJSJ$%`0Rhagkt4Xz%n(tKRXZz8eZ&$lJO5Exn>!EpbPkHoOM zg^V&mdT>5JxVzh9bz?Xt6juC2hD2 zgp76l`POu-$}5D?8NtBm-Y(Mr*=jzrys$^yT@i(8qmqXr-+|bYS}7>JX9ycS(wF*~ zVB^$s=QmAFPEn$V)uIeqZOT6E>5@;z5aIzHuTYOW75x5|(z5ES`!G?Idnlh{b2$N8 z7~PC0^?XQ=*Kw_zF*DUk|9egpPm~4G{Gwo3 zRmw!!$0C8#{2F<68EV54CHdsi9lpm3-s`oj=Wla`&&pQf6zqY2D(p0LO4$Qy_PTi@ z?R|R$%9lTkRC?@kG~qiL&$Z*PTQ74Im=8jZpHNEQ`A`b{K@{$dg)t|!}$0vn`6{>Co{ zWRVEyw~aW^n?D3R(eI-mEo)XhVJKYSR9Fx_}JAE~G zJ#dS%H^J}Kt<|MgFep8jMrV@; z{i_y^?^4^M+;*a}CJJ>oKP-s~pM>QyDHoeD)@LO~FuX)0&u648O6MLwAJ(`3JfVfW z&x7%TDDk%p+SrfBP7C96B3Q6-#lp?M*AOw{_e3Oyo9?@M8?E~?RI*EBx) z)1|Nm)z`U~6peoJKX(eFm#`EgpPQ2Va-6S`5jL$i5vkrt^tW^_SPE^f3k&Mb==_%+ zepO7x`fFzcYB=Omi+R49P0<*(4O@JUssyd z(S7-NkMz}&yJx<^ERp|!LsT@J6ZHnJ5j!O zPIa~{Jd74%)22(bXNB<~ar-pJZ8s6VH~BpGnV@f}8P2CJ;C4$?;u;Xx7=yk-|nzHQwQ zt;e*>tE6+LroP&b_Q6#gU!-Wk~5lfSs=qlJ<&xq`2 z{PJpR4>ssvWQH{tWXGo5#5;OU^6T#UKd|kdh2X#8V{@Fai^+ReoN`d`$3>@8yOelQ z7zN?-6jwjNF6UjK)*cs@wA5`N+=|e(~%@55IY(2MyQ!*+)YC!GB!M)_Wj% zxXh506-I8GJn>8AkIxc1`8bsv3wz6+rxS50iMQbema5gG-0dT}P#np(tP2nFWpa(R zW3lWGgAzo9*CPr?q1)V^48q}~t8AssF+(KI(XBb%U<%CfKU&=f&R2xha`FW8D_P>6 z`rA{O2?SG2TS2**O7V6^g9>Rl`CVm^AFyJk>kS44#Lq7F;eAj_-;$6O9g<5Gj+!%o z*SsNGzI%oAd;-BybCA`RGCvJUPIoF-^H#zN3HpTvrp&aFEc!O6tG!h4&aHf3+HR6JzE3(MX%sjq$eV@wDB?D4yesOR*rwHB`s9ne&T8ih2OE?lKGx4#F&)}1 zrFa(?ec2U4QAh0nfly=Y930$1Adsz{yQL|R|HlS#3~ri zlKkIM;2;o6tE;oI4ZzfQwfRS#5dR_gFKa-(|9$&!_5APZ1*-s*uKuA&X=du`0#Jlz zwl1#!G6h)pYZqqjpSmc_cBWRw0FTtp^uM>=jRgdmjHog64^2i(Q(NbM*#Nm)nVS8Z z{+nw8*g@LZ-qhCYuMF}}J1cv0fWUOO`-jp0beq!jpFBcSXS2UDf49GUL^o$!(tpy( z87{6Swg8>t;_CA64*4q+X6*iV`I>)h|7r7|1rq|aAOCUR-vPt| zSQP^^1!2AKfN(5;ylPdX{!I}T1o{{cLM{Pao2-5U13_D!?KF(m0rI#muKx(@Uqe70 zU<|+qf=>Qf?(#R>e>b3u|55)72n4(G`Tvy%!~17B5dMGgF=GBJ2GBps|6vcX{XhKw ziO=70^zZTVKjY*7uHOej=l@;p`JesO|G)dI{~0g;2mM}vWBkAO%m1g3dz2y|Zh*i4 zo>%;raRf{~|J{!NQvM&uo&Tbb@-KNpV0QKyu={rdWdA)E`kO-z0X*-2aQ^Y{?;MFb z510_{fIx73fFI?6%pVX4YXO)aVF7p^Nc_N590z#*$Ov%t07&=1{AU!vD}by5z^4U) z&{Y7O4A7qdzA7LO%o3qR0D1{XfK4Vq@{a0Yji^qXB$RpbU^jDFp1<12_x7$Ur;>fW8IlfrJ4tFlNw? z0Qo_H2KojP@Dml7FGB-$f*%8AK)X?a**SC~K>Gp-kVh^6>;cELzx@I60k99~!vT6w znt(nOP(NTBMgs8bCy-|YGKWAp&@b4)n1H~ z^dp2ifI|WN*C$ROaRB;2yx@QsLY4yjCm@asfG*HhuuVXJ4DfRt@RJANDFL_#@bhn7 zKmwlt63}O8!hrlOzz5n2?g(Hm0E+`O;NRbVLf--O|HcW>2g?WSuK=Q00Vu1<_7Swfig7!GXrq}>V!f9=wP4>4v@zKa-iQ)mH;^* zhG5G;+6FK;;Ab7cvjeaQ2!s#VfMo~hT0kH02R#+Y=KwoEEFooqxab11e|-ZIST3O7 z0F(o5f*=B9fY_s<0`iIg{R>F3KpYSNz92yVZ7a|gq>lhT1!y4lU_hP7fIK86Kz9Q+ zfVQIne!&8<{~L4gMGy$b3FrrHfVTmZ;R0pkAP^(aHWZ)^sK0&T43rTA@%I6+AAkt} zd=Fqrpq&B$2F58kU<;uS;PV3XU*AjsKNJAFHURApq%fdfDL@wJBS@ehV6}j;U9{H!vUp@dKH-nYtPSxvrhrKXQNsk^SeXF>pHVZ0zW0 r` Date: Sat, 30 Oct 2021 11:54:44 +0800 Subject: [PATCH 287/414] [Doc] add changelog (#1246) --- README.md | 3 +-- README_zh-CN.md | 3 +-- docs/changelog.md | 31 +++++++++++++++++++++++++++++++ 3 files changed, 33 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index fd54bef9d3..b03d833336 100644 --- a/README.md +++ b/README.md @@ -44,12 +44,11 @@ The master branch works with **PyTorch 1.3+**. ## Updates -- (2021-10-26) We provide a demo for skeleton-based and rgb-based spatio-temporal detection and action recognition (demo/demo_video_structuralize.py). +- (2021-10-29) We provide a demo for skeleton-based and rgb-based spatio-temporal detection and action recognition (demo/demo_video_structuralize.py). - (2021-10-26) We train and test **ST-GCN** on NTU60 with 3D keypoint annotations, achieve 84.61% Top-1 accuracy (higher than 81.5% in the [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135)). - (2021-10-25) We provide a script(tools/data/skeleton/gen_ntu_rgbd_raw.py) to convert the NTU60 and NTU120 3D raw skeleton data to our format. - (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! - (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. -- (2021-10-12) We support **TorchServe**! Now recognition models in MMAction2 can be packed as a `.mar` file and served with TorchServe. **Release**: v0.19.0 was released in 07/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. diff --git a/README_zh-CN.md b/README_zh-CN.md index 3064632d47..8999e52fd9 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -43,12 +43,11 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 -- (2021-10-26) 支持基于 skeleton 模态和 rgb 模态的时空动作检测和行为识别 demo (demo/demo_video_structuralize.py)。 +- (2021-10-29) 支持基于 skeleton 模态和 rgb 模态的时空动作检测和行为识别 demo (demo/demo_video_structuralize.py)。 - (2021-10-26) 在 NTU60 3d 关键点标注数据集上训练测试 **STGCN**, 可达到 84.61% (高于 [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135) 中的 81.5%) 的识别准确率。 - (2021-10-25) 提供将 NTU60 和 NTU120 的 3d 骨骼点数据转换成我们项目的格式的脚本(tools/data/skeleton/gen_ntu_rgbd_raw.py)。 - (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! - (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 -- (2021-10-12) 支持 **TorchServe**!目前可以使用 TorchServe 部署 MMAction2 中的动作识别模型。 v0.19.0 版本已于 2021 年 10 月 7 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 diff --git a/docs/changelog.md b/docs/changelog.md index 4f09057489..9fdd697e32 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,36 @@ ## Changelog +### 0.20.0 (07/10/2021) + +**Highlights** + +- Support TorchServe +- Add video structuralize demo +- Support using 3D skeletons for skeleton-based action recognition +- Benchmark PoseC3D on UCF and HMDB + +**New Features** + +- Support TorchServe ([#1212](https://github.com/open-mmlab/mmaction2/pull/1212)) +- Support 3D skeletons pre-processing ([#1218](https://github.com/open-mmlab/mmaction2/pull/1218)) +- Support video structuralize demo ([#1197](https://github.com/open-mmlab/mmaction2/pull/1197)) + +**Documentations** + +- Revise README.md and add projects.md ([#1214](https://github.com/open-mmlab/mmaction2/pull/1214)) +- Add CN docs for Skeleton dataset, PoseC3D and ST-GCN ([#1228](https://github.com/open-mmlab/mmaction2/pull/1228), [#1237](https://github.com/open-mmlab/mmaction2/pull/1237), [#1236](https://github.com/open-mmlab/mmaction2/pull/1236)) +- Add tutorial for custom dataset training for skeleton-based action recognition ([#1234](https://github.com/open-mmlab/mmaction2/pull/1234)) + +**Bug and Typo Fixes** + +- Fix tutorial link ([#1219](https://github.com/open-mmlab/mmaction2/pull/1219)) +- Fix GYM links ([#1224](https://github.com/open-mmlab/mmaction2/pull/1224)) + +**ModelZoo** + +- Benchmark PoseC3D on UCF and HMDB ([#1223](https://github.com/open-mmlab/mmaction2/pull/1223)) +- Add ST-GCN + 3D skeleton model for NTU60-XSub ([#1236](https://github.com/open-mmlab/mmaction2/pull/1236)) + ### 0.19.0 (07/10/2021) **Highlights** From 48ba0fb6dbceb0084f95f4ec288b23b244d2c360 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sat, 30 Oct 2021 11:57:25 +0800 Subject: [PATCH 288/414] Bump Version to 0.20.0 (#1247) --- README.md | 2 +- README_zh-CN.md | 2 +- docker/serve/Dockerfile | 2 +- mmaction/version.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index b03d833336..fc3aa0ffbf 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ The master branch works with **PyTorch 1.3+**. - (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! - (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. -**Release**: v0.19.0 was released in 07/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +**Release**: v0.20.0 was released in 30/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Installation diff --git a/README_zh-CN.md b/README_zh-CN.md index 8999e52fd9..3584bc51f2 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -49,7 +49,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa - (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! - (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 -v0.19.0 版本已于 2021 年 10 月 7 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.20.0 版本已于 2021 年 10 月 30 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 安装 diff --git a/docker/serve/Dockerfile b/docker/serve/Dockerfile index e0004cfa58..69b8ce8ea0 100644 --- a/docker/serve/Dockerfile +++ b/docker/serve/Dockerfile @@ -4,7 +4,7 @@ ARG CUDNN="7" FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel ARG MMCV="1.3.8" -ARG MMACTION="0.19.0" +ARG MMACTION="0.20.0" ENV PYTHONUNBUFFERED TRUE diff --git a/mmaction/version.py b/mmaction/version.py index ad1d1efcfb..ffa55d38ae 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.19.0' +__version__ = '0.20.0' def parse_version_info(version_str): From aa6d14d7ebbb21cc37d54d5eff49ce7a79422658 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 3 Nov 2021 11:44:13 +0800 Subject: [PATCH 289/414] [Doc] Add MMCV links to doc (#1251) --- docs/conf.py | 39 ++++++++++++++++++++++----------------- docs_zh_CN/conf.py | 41 ++++++++++++++++++++++++----------------- 2 files changed, 46 insertions(+), 34 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 464852f08c..f8275be40f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -113,47 +113,52 @@ def get_version(): }, { 'name': - 'Projects', + 'Docs', 'children': [ + { + 'name': 'MMCV', + 'url': 'https://mmcv.readthedocs.io/en/latest/', + }, { 'name': 'MMAction2', - 'url': 'https://github.com/open-mmlab/mmaction2', + 'url': 'https://mmaction2.readthedocs.io/en/latest/', }, { 'name': 'MMClassification', - 'url': 'https://github.com/open-mmlab/mmclassification', + 'url': + 'https://mmclassification.readthedocs.io/en/latest/', }, { - 'name': 'MMSegmentation', - 'url': 'https://github.com/open-mmlab/mmsegmentation', + 'name': 'MMDetection', + 'url': 'https://mmdetection.readthedocs.io/en/latest/', }, { 'name': 'MMDetection3D', - 'url': 'https://github.com/open-mmlab/mmdetection3d', + 'url': 'https://mmdetection3d.readthedocs.io/en/latest/', }, { 'name': 'MMEditing', - 'url': 'https://github.com/open-mmlab/mmediting', + 'url': 'https://mmediting.readthedocs.io/en/latest/', }, { - 'name': 'MMDetection3D', - 'url': 'https://github.com/open-mmlab/mmdetection3d', + 'name': 'MMGeneration', + 'url': 'https://mmgeneration.readthedocs.io/en/latest/', }, { - 'name': 'MMPose', - 'url': 'https://github.com/open-mmlab/mmpose', + 'name': 'MMOCR', + 'url': 'https://mmocr.readthedocs.io/en/latest/', }, { - 'name': 'MMTracking', - 'url': 'https://github.com/open-mmlab/mmtracking', + 'name': 'MMPose', + 'url': 'https://mmpose.readthedocs.io/en/latest/', }, { - 'name': 'MMGeneration', - 'url': 'https://github.com/open-mmlab/mmgeneration', + 'name': 'MMSegmentation', + 'url': 'https://mmsegmentation.readthedocs.io/en/latest/', }, { - 'name': 'MMOCR', - 'url': 'https://github.com/open-mmlab/mmocr', + 'name': 'MMTracking', + 'url': 'https://mmtracking.readthedocs.io/en/latest/', }, ] }, diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index 22acdaabc0..a04e175cd8 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -106,47 +106,54 @@ def get_version(): }, { 'name': - 'OpenMMLab 各项目', + '文档', 'children': [ + { + 'name': 'MMCV', + 'url': 'https://mmcv.readthedocs.io/zh_CN/latest/', + }, { 'name': 'MMAction2', - 'url': 'https://github.com/open-mmlab/mmaction2', + 'url': 'https://mmaction2.readthedocs.io/zh_CN/latest/', }, { 'name': 'MMClassification', - 'url': 'https://github.com/open-mmlab/mmclassification', + 'url': + 'https://mmclassification.readthedocs.io/zh_CN/latest/', }, { - 'name': 'MMSegmentation', - 'url': 'https://github.com/open-mmlab/mmsegmentation', + 'name': 'MMDetection', + 'url': 'https://mmdetection.readthedocs.io/zh_CN/latest/', }, { 'name': 'MMDetection3D', - 'url': 'https://github.com/open-mmlab/mmdetection3d', + 'url': + 'https://mmdetection3d.readthedocs.io/zh_CN/latest/', }, { 'name': 'MMEditing', - 'url': 'https://github.com/open-mmlab/mmediting', + 'url': 'https://mmediting.readthedocs.io/zh_CN/latest/', }, { - 'name': 'MMDetection3D', - 'url': 'https://github.com/open-mmlab/mmdetection3d', + 'name': 'MMGeneration', + 'url': 'https://mmgeneration.readthedocs.io/zh_CN/latest/', }, { - 'name': 'MMPose', - 'url': 'https://github.com/open-mmlab/mmpose', + 'name': 'MMOCR', + 'url': 'https://mmocr.readthedocs.io/zh_CN/latest/', }, { - 'name': 'MMTracking', - 'url': 'https://github.com/open-mmlab/mmtracking', + 'name': 'MMPose', + 'url': 'https://mmpose.readthedocs.io/zh_CN/latest/', }, { - 'name': 'MMGeneration', - 'url': 'https://github.com/open-mmlab/mmgeneration', + 'name': 'MMSegmentation', + 'url': + 'https://mmsegmentation.readthedocs.io/zh_CN/latest/', }, { - 'name': 'MMOCR', - 'url': 'https://github.com/open-mmlab/mmocr', + 'name': 'MMTracking', + 'url': 'https://mmtracking.readthedocs.io/zh_CN/latest/', }, ] }, From 491fe53b70ade539eea5647e6976c43959d37dd3 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sat, 6 Nov 2021 12:45:51 +0800 Subject: [PATCH 290/414] [Fix] update the inference part in notebooks (#1256) --- demo/demo.ipynb | 26 +- demo/mmaction2_tutorial.ipynb | 2389 ++++++++++++++------------- demo/mmaction2_tutorial_zh-CN.ipynb | 8 +- 3 files changed, 1225 insertions(+), 1198 deletions(-) diff --git a/demo/demo.ipynb b/demo/demo.ipynb index 0f7ff116ef..fd9e42487b 100644 --- a/demo/demo.ipynb +++ b/demo/demo.ipynb @@ -55,15 +55,21 @@ "# test a single video and show the result:\n", "video = 'demo.mp4'\n", "label = '../tools/data/kinetics/label_map_k400.txt'\n", - "results = inference_recognizer(model, video, label)" + "results = inference_recognizer(model, video)\n", + "\n", + "labels = open(label).readlines()\n", + "labels = [x.strip() for x in labels]\n", + "results = [(labels[k[0]], k[1]) for k in results]" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { - "scrolled": false, "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, "pycharm": { "is_executing": false, "name": "#%%\n" @@ -72,14 +78,14 @@ "outputs": [ { "name": "stdout", + "output_type": "stream", "text": [ "arm wrestling: 29.61644\n", "rock scissors paper: 10.754839\n", "shaking hands: 9.9084\n", "clapping: 9.189912\n", "massaging feet: 8.305307\n" - ], - "output_type": "stream" + ] } ], "source": [ @@ -91,9 +97,9 @@ ], "metadata": { "kernelspec": { - "name": "pycharm-d922ce35", + "display_name": "Python 3", "language": "python", - "display_name": "PyCharm (mmaction-lite)" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -105,18 +111,18 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.3" + "version": "3.7.4" }, "pycharm": { "stem_cell": { "cell_type": "raw", - "source": [], "metadata": { "collapsed": false - } + }, + "source": [] } } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/demo/mmaction2_tutorial.ipynb b/demo/mmaction2_tutorial.ipynb index 0b8f3ed146..14441ab79b 100644 --- a/demo/mmaction2_tutorial.ipynb +++ b/demo/mmaction2_tutorial.ipynb @@ -1,1230 +1,1247 @@ { - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "MMAction2 Tutorial.ipynb", - "provenance": [], - "collapsed_sections": [], - "toc_visible": true, - "include_colab_link": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "accelerator": "GPU" + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "view-in-github" + }, + "source": [ + "\"Open" + ] }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "VcjSRFELVbNk" - }, - "source": [ - "# MMAction2 Tutorial\n", - "\n", - "Welcome to MMAction2! This is the official colab tutorial for using MMAction2. In this tutorial, you will learn\n", - "- Perform inference with a MMAction2 recognizer.\n", - "- Train a new recognizer with a new dataset.\n", - "\n", - "Let's start!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "7LqHGkGEVqpm" - }, - "source": [ - "## Install MMAction2" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Bf8PpPXtVvmg", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "f262f3c6-a9dd-48c7-8f7e-081fd3e12ba8" - }, - "source": [ - "# Check nvcc version\n", - "!nvcc -V\n", - "# Check GCC version\n", - "!gcc --version" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "nvcc: NVIDIA (R) Cuda compiler driver\n", - "Copyright (c) 2005-2020 NVIDIA Corporation\n", - "Built on Wed_Jul_22_19:09:09_PDT_2020\n", - "Cuda compilation tools, release 11.0, V11.0.221\n", - "Build cuda_11.0_bu.TC445_37.28845127_0\n", - "gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n", - "Copyright (C) 2017 Free Software Foundation, Inc.\n", - "This is free software; see the source for copying conditions. There is NO\n", - "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "5PAJ4ArzV5Ry", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "b68c4528-1a83-469f-8920-040ae373fc7c" - }, - "source": [ - "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", - "!pip install -U torch==1.8.0+cu101 torchvision==0.9.0+cu101 torchtext==0.9.0 -f https://download.pytorch.org/whl/torch_stable.html\n", - "\n", - "# install mmcv-full thus we could use CUDA operators\n", - "!pip install mmcv-full==1.3.9 -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", - "\n", - "# Install mmaction2\n", - "!rm -rf mmaction2\n", - "!git clone https://github.com/open-mmlab/mmaction2.git\n", - "%cd mmaction2\n", - "\n", - "!pip install -e .\n", - "\n", - "# Install some optional requirements\n", - "!pip install -r requirements/optional.txt" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", - "Collecting torch==1.8.0+cu101\n", - "\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torch-1.8.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (763.5MB)\n", - "\u001b[K |████████████████████████████████| 763.5MB 23kB/s \n", - "\u001b[?25hCollecting torchvision==0.9.0+cu101\n", - "\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torchvision-0.9.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (17.3MB)\n", - "\u001b[K |████████████████████████████████| 17.3MB 188kB/s \n", - "\u001b[?25hCollecting torchtext==0.9.0\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/36/50/84184d6230686e230c464f0dd4ff32eada2756b4a0b9cefec68b88d1d580/torchtext-0.9.0-cp37-cp37m-manylinux1_x86_64.whl (7.1MB)\n", - "\u001b[K |████████████████████████████████| 7.1MB 8.0MB/s \n", - "\u001b[?25hRequirement already satisfied, skipping upgrade: numpy in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (1.19.5)\n", - "Requirement already satisfied, skipping upgrade: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (3.7.4.3)\n", - "Requirement already satisfied, skipping upgrade: pillow>=4.1.1 in /usr/local/lib/python3.7/dist-packages (from torchvision==0.9.0+cu101) (7.1.2)\n", - "Requirement already satisfied, skipping upgrade: tqdm in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (4.41.1)\n", - "Requirement already satisfied, skipping upgrade: requests in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (2.23.0)\n", - "Requirement already satisfied, skipping upgrade: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2.10)\n", - "Requirement already satisfied, skipping upgrade: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (1.24.3)\n", - "Requirement already satisfied, skipping upgrade: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (3.0.4)\n", - "Requirement already satisfied, skipping upgrade: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2021.5.30)\n", - "Installing collected packages: torch, torchvision, torchtext\n", - " Found existing installation: torch 1.9.0+cu102\n", - " Uninstalling torch-1.9.0+cu102:\n", - " Successfully uninstalled torch-1.9.0+cu102\n", - " Found existing installation: torchvision 0.10.0+cu102\n", - " Uninstalling torchvision-0.10.0+cu102:\n", - " Successfully uninstalled torchvision-0.10.0+cu102\n", - " Found existing installation: torchtext 0.10.0\n", - " Uninstalling torchtext-0.10.0:\n", - " Successfully uninstalled torchtext-0.10.0\n", - "Successfully installed torch-1.8.0+cu101 torchtext-0.9.0 torchvision-0.9.0+cu101\n", - "Looking in links: https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", - "Collecting mmcv-full==1.3.9\n", - "\u001b[?25l Downloading https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/mmcv_full-1.3.9-cp37-cp37m-manylinux1_x86_64.whl (31.4MB)\n", - "\u001b[K |████████████████████████████████| 31.4MB 94kB/s \n", - "\u001b[?25hRequirement already satisfied: pyyaml in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (3.13)\n", - "Requirement already satisfied: opencv-python>=3 in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (4.1.2.30)\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (1.19.5)\n", - "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (7.1.2)\n", - "Collecting addict\n", - " Downloading https://files.pythonhosted.org/packages/6a/00/b08f23b7d7e1e14ce01419a467b583edbb93c6cdb8654e54a9cc579cd61f/addict-2.4.0-py3-none-any.whl\n", - "Collecting yapf\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/5f/0d/8814e79eb865eab42d95023b58b650d01dec6f8ea87fc9260978b1bf2167/yapf-0.31.0-py2.py3-none-any.whl (185kB)\n", - "\u001b[K |████████████████████████████████| 194kB 8.8MB/s \n", - "\u001b[?25hInstalling collected packages: addict, yapf, mmcv-full\n", - "Successfully installed addict-2.4.0 mmcv-full-1.3.9 yapf-0.31.0\n", - "Cloning into 'mmaction2'...\n", - "remote: Enumerating objects: 12544, done.\u001b[K\n", - "remote: Counting objects: 100% (677/677), done.\u001b[K\n", - "remote: Compressing objects: 100% (330/330), done.\u001b[K\n", - "remote: Total 12544 (delta 432), reused 510 (delta 344), pack-reused 11867\u001b[K\n", - "Receiving objects: 100% (12544/12544), 42.42 MiB | 30.27 MiB/s, done.\n", - "Resolving deltas: 100% (8980/8980), done.\n", - "/content/mmaction2\n", - "Obtaining file:///content/mmaction2\n", - "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (3.2.2)\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (1.19.5)\n", - "Requirement already satisfied: opencv-contrib-python in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (4.1.2.30)\n", - "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (7.1.2)\n", - "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (2.8.1)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (0.10.0)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (1.3.1)\n", - "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (2.4.7)\n", - "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib->mmaction2==0.16.0) (1.15.0)\n", - "Installing collected packages: mmaction2\n", - " Running setup.py develop for mmaction2\n", - "Successfully installed mmaction2\n", - "Collecting av\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/66/ff/bacde7314c646a2bd2f240034809a10cc3f8b096751284d0828640fff3dd/av-8.0.3-cp37-cp37m-manylinux2010_x86_64.whl (37.2MB)\n", - "\u001b[K |████████████████████████████████| 37.2MB 76kB/s \n", - "\u001b[?25hCollecting decord>=0.4.1\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/11/79/936af42edf90a7bd4e41a6cac89c913d4b47fa48a26b042d5129a9242ee3/decord-0.6.0-py3-none-manylinux2010_x86_64.whl (13.6MB)\n", - "\u001b[K |████████████████████████████████| 13.6MB 231kB/s \n", - "\u001b[?25hCollecting einops\n", - " Downloading https://files.pythonhosted.org/packages/5d/a0/9935e030634bf60ecd572c775f64ace82ceddf2f504a5fd3902438f07090/einops-0.3.0-py2.py3-none-any.whl\n", - "Requirement already satisfied: imgaug in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 4)) (0.2.9)\n", - "Requirement already satisfied: librosa in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 5)) (0.8.1)\n", - "Requirement already satisfied: lmdb in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 6)) (0.99)\n", - "Requirement already satisfied: moviepy in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 7)) (0.2.3.5)\n", - "Collecting onnx\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/3f/9b/54c950d3256e27f970a83cd0504efb183a24312702deed0179453316dbd0/onnx-1.9.0-cp37-cp37m-manylinux2010_x86_64.whl (12.2MB)\n", - "\u001b[K |████████████████████████████████| 12.2MB 36.2MB/s \n", - "\u001b[?25hCollecting onnxruntime\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/c9/35/80ab6f444a83c708817e011e9cd4708c816591cc85aff830dff525a34992/onnxruntime-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.5MB)\n", - "\u001b[K |████████████████████████████████| 4.5MB 29.5MB/s \n", - "\u001b[?25hCollecting pims\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d5/47/82e0ac31e01a271e5a06362fbf03769e9081956f6772f91d98b32899d743/PIMS-0.5.tar.gz (85kB)\n", - "\u001b[K |████████████████████████████████| 92kB 13.1MB/s \n", - "\u001b[?25hCollecting PyTurboJPEG\n", - " Downloading https://files.pythonhosted.org/packages/f9/7b/7621780391ed7a33acec8e803068d7291d940fbbad1ffc8909e94e844477/PyTurboJPEG-1.5.1.tar.gz\n", - "Collecting timm\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/90/fc/606bc5cf46acac3aa9bd179b3954433c026aaf88ea98d6b19f5d14c336da/timm-0.4.12-py3-none-any.whl (376kB)\n", - "\u001b[K |████████████████████████████████| 378kB 43.1MB/s \n", - "\u001b[?25hRequirement already satisfied: numpy>=1.14.0 in /usr/local/lib/python3.7/dist-packages (from decord>=0.4.1->-r requirements/optional.txt (line 2)) (1.19.5)\n", - "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (7.1.2)\n", - "Requirement already satisfied: scikit-image>=0.11.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (0.16.2)\n", - "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.15.0)\n", - "Requirement already satisfied: imageio in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (2.4.1)\n", - "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (3.2.2)\n", - "Requirement already satisfied: opencv-python in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (4.1.2.30)\n", - "Requirement already satisfied: Shapely in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.7.1)\n", - "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.4.1)\n", - "Requirement already satisfied: resampy>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.2.2)\n", - "Requirement already satisfied: pooch>=1.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (1.4.0)\n", - "Requirement already satisfied: numba>=0.43.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.51.2)\n", - "Requirement already satisfied: audioread>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (2.1.9)\n", - "Requirement already satisfied: soundfile>=0.10.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.10.3.post1)\n", - "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (20.9)\n", - "Requirement already satisfied: joblib>=0.14 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (1.0.1)\n", - "Requirement already satisfied: decorator>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (4.4.2)\n", - "Requirement already satisfied: scikit-learn!=0.19.0,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.22.2.post1)\n", - "Requirement already satisfied: tqdm<5.0,>=4.11.2 in /usr/local/lib/python3.7/dist-packages (from moviepy->-r requirements/optional.txt (line 7)) (4.41.1)\n", - "Requirement already satisfied: protobuf in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 8)) (3.17.3)\n", - "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 8)) (3.7.4.3)\n", - "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from onnxruntime->-r requirements/optional.txt (line 9)) (1.12)\n", - "Collecting slicerator>=0.9.8\n", - " Downloading https://files.pythonhosted.org/packages/75/ae/fe46f5371105508a209fe6162e7e7b11db531a79d2eabcd24566b8b1f534/slicerator-1.0.0-py3-none-any.whl\n", - "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 12)) (0.9.0+cu101)\n", - "Requirement already satisfied: torch>=1.4 in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 12)) (1.8.0+cu101)\n", - "Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 4)) (2.5.1)\n", - "Requirement already satisfied: PyWavelets>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 4)) (1.1.1)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (0.10.0)\n", - "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (2.8.1)\n", - "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (2.4.7)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (1.3.1)\n", - "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2.23.0)\n", - "Requirement already satisfied: appdirs in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (1.4.4)\n", - "Requirement already satisfied: llvmlite<0.35,>=0.34.0.dev0 in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 5)) (0.34.0)\n", - "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 5)) (57.0.0)\n", - "Requirement already satisfied: cffi>=1.0 in /usr/local/lib/python3.7/dist-packages (from soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 5)) (1.14.5)\n", - "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2.10)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2021.5.30)\n", - "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (3.0.4)\n", - "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (1.24.3)\n", - "Requirement already satisfied: pycparser in /usr/local/lib/python3.7/dist-packages (from cffi>=1.0->soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 5)) (2.20)\n", - "Building wheels for collected packages: pims, PyTurboJPEG\n", - " Building wheel for pims (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for pims: filename=PIMS-0.5-cp37-none-any.whl size=84328 sha256=436632b7a982144fd933f01d12e38a419eb8a636f2d6dd4bd4a43680734979e2\n", - " Stored in directory: /root/.cache/pip/wheels/0e/0a/14/4c33a4cc1b9158e57329a38e8e3e03901ed24060eb322d5462\n", - " Building wheel for PyTurboJPEG (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for PyTurboJPEG: filename=PyTurboJPEG-1.5.1-cp37-none-any.whl size=7979 sha256=755337aaa622b48be036eca6d743e99bf4528fc6c64e810da11a71236a78bcca\n", - " Stored in directory: /root/.cache/pip/wheels/19/cb/78/5725c881ee618936d956bf0ecd4272cb0f701cb898f44575ca\n", - "Successfully built pims PyTurboJPEG\n", - "Installing collected packages: av, decord, einops, onnx, onnxruntime, slicerator, pims, PyTurboJPEG, timm\n", - "Successfully installed PyTurboJPEG-1.5.1 av-8.0.3 decord-0.6.0 einops-0.3.0 onnx-1.9.0 onnxruntime-1.8.1 pims-0.5 slicerator-1.0.0 timm-0.4.12\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "No_zZAFpWC-a", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "7e95038a-6f79-410b-adf6-0148bf8cc2fc" - }, - "source": [ - "# Check Pytorch installation\n", - "import torch, torchvision\n", - "print(torch.__version__, torch.cuda.is_available())\n", - "\n", - "# Check MMAction2 installation\n", - "import mmaction\n", - "print(mmaction.__version__)\n", - "\n", - "# Check MMCV installation\n", - "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", - "print(get_compiling_cuda_version())\n", - "print(get_compiler_version())" - ], - "execution_count": 3, - "outputs": [ - { - "output_type": "stream", - "text": [ - "1.8.0+cu101 True\n", - "0.16.0\n", - "10.1\n", - "GCC 7.3\n" - ], - "name": "stdout" - } - ] + { + "cell_type": "markdown", + "metadata": { + "id": "VcjSRFELVbNk" + }, + "source": [ + "# MMAction2 Tutorial\n", + "\n", + "Welcome to MMAction2! This is the official colab tutorial for using MMAction2. In this tutorial, you will learn\n", + "- Perform inference with a MMAction2 recognizer.\n", + "- Train a new recognizer with a new dataset.\n", + "\n", + "Let's start!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7LqHGkGEVqpm" + }, + "source": [ + "## Install MMAction2" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "Bf8PpPXtVvmg", + "outputId": "f262f3c6-a9dd-48c7-8f7e-081fd3e12ba8" + }, + "outputs": [ { - "cell_type": "markdown", - "metadata": { - "id": "pXf7oV5DWdab" - }, - "source": [ - "## Perform inference with a MMAction2 recognizer\n", - "MMAction2 already provides high level APIs to do inference and training." - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "nvcc: NVIDIA (R) Cuda compiler driver\n", + "Copyright (c) 2005-2020 NVIDIA Corporation\n", + "Built on Wed_Jul_22_19:09:09_PDT_2020\n", + "Cuda compilation tools, release 11.0, V11.0.221\n", + "Build cuda_11.0_bu.TC445_37.28845127_0\n", + "gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n", + "Copyright (C) 2017 Free Software Foundation, Inc.\n", + "This is free software; see the source for copying conditions. There is NO\n", + "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n", + "\n" + ] + } + ], + "source": [ + "# Check nvcc version\n", + "!nvcc -V\n", + "# Check GCC version\n", + "!gcc --version" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "5PAJ4ArzV5Ry", + "outputId": "b68c4528-1a83-469f-8920-040ae373fc7c" + }, + "outputs": [ { - "cell_type": "code", - "metadata": { - "id": "64CW6d_AaT-Q", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "d08bfb9b-ab1e-451b-d3b2-89023a59766b" - }, - "source": [ - "!mkdir checkpoints\n", - "!wget -c https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \\\n", - " -O checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "stream", - "text": [ - "--2021-07-11 12:44:00-- https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", - "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", - "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 97579339 (93M) [application/octet-stream]\n", - "Saving to: ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’\n", - "\n", - "checkpoints/tsn_r50 100%[===================>] 93.06M 11.4MB/s in 8.1s \n", - "\n", - "2021-07-11 12:44:09 (11.4 MB/s) - ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’ saved [97579339/97579339]\n", - "\n" - ], - "name": "stdout" - } - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", + "Collecting torch==1.8.0+cu101\n", + "\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torch-1.8.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (763.5MB)\n", + "\u001b[K |████████████████████████████████| 763.5MB 23kB/s \n", + "\u001b[?25hCollecting torchvision==0.9.0+cu101\n", + "\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torchvision-0.9.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (17.3MB)\n", + "\u001b[K |████████████████████████████████| 17.3MB 188kB/s \n", + "\u001b[?25hCollecting torchtext==0.9.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/36/50/84184d6230686e230c464f0dd4ff32eada2756b4a0b9cefec68b88d1d580/torchtext-0.9.0-cp37-cp37m-manylinux1_x86_64.whl (7.1MB)\n", + "\u001b[K |████████████████████████████████| 7.1MB 8.0MB/s \n", + "\u001b[?25hRequirement already satisfied, skipping upgrade: numpy in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (1.19.5)\n", + "Requirement already satisfied, skipping upgrade: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (3.7.4.3)\n", + "Requirement already satisfied, skipping upgrade: pillow>=4.1.1 in /usr/local/lib/python3.7/dist-packages (from torchvision==0.9.0+cu101) (7.1.2)\n", + "Requirement already satisfied, skipping upgrade: tqdm in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (4.41.1)\n", + "Requirement already satisfied, skipping upgrade: requests in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (2.23.0)\n", + "Requirement already satisfied, skipping upgrade: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2.10)\n", + "Requirement already satisfied, skipping upgrade: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (1.24.3)\n", + "Requirement already satisfied, skipping upgrade: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (3.0.4)\n", + "Requirement already satisfied, skipping upgrade: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2021.5.30)\n", + "Installing collected packages: torch, torchvision, torchtext\n", + " Found existing installation: torch 1.9.0+cu102\n", + " Uninstalling torch-1.9.0+cu102:\n", + " Successfully uninstalled torch-1.9.0+cu102\n", + " Found existing installation: torchvision 0.10.0+cu102\n", + " Uninstalling torchvision-0.10.0+cu102:\n", + " Successfully uninstalled torchvision-0.10.0+cu102\n", + " Found existing installation: torchtext 0.10.0\n", + " Uninstalling torchtext-0.10.0:\n", + " Successfully uninstalled torchtext-0.10.0\n", + "Successfully installed torch-1.8.0+cu101 torchtext-0.9.0 torchvision-0.9.0+cu101\n", + "Looking in links: https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", + "Collecting mmcv-full==1.3.9\n", + "\u001b[?25l Downloading https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/mmcv_full-1.3.9-cp37-cp37m-manylinux1_x86_64.whl (31.4MB)\n", + "\u001b[K |████████████████████████████████| 31.4MB 94kB/s \n", + "\u001b[?25hRequirement already satisfied: pyyaml in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (3.13)\n", + "Requirement already satisfied: opencv-python>=3 in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (4.1.2.30)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (1.19.5)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (7.1.2)\n", + "Collecting addict\n", + " Downloading https://files.pythonhosted.org/packages/6a/00/b08f23b7d7e1e14ce01419a467b583edbb93c6cdb8654e54a9cc579cd61f/addict-2.4.0-py3-none-any.whl\n", + "Collecting yapf\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/5f/0d/8814e79eb865eab42d95023b58b650d01dec6f8ea87fc9260978b1bf2167/yapf-0.31.0-py2.py3-none-any.whl (185kB)\n", + "\u001b[K |████████████████████████████████| 194kB 8.8MB/s \n", + "\u001b[?25hInstalling collected packages: addict, yapf, mmcv-full\n", + "Successfully installed addict-2.4.0 mmcv-full-1.3.9 yapf-0.31.0\n", + "Cloning into 'mmaction2'...\n", + "remote: Enumerating objects: 12544, done.\u001b[K\n", + "remote: Counting objects: 100% (677/677), done.\u001b[K\n", + "remote: Compressing objects: 100% (330/330), done.\u001b[K\n", + "remote: Total 12544 (delta 432), reused 510 (delta 344), pack-reused 11867\u001b[K\n", + "Receiving objects: 100% (12544/12544), 42.42 MiB | 30.27 MiB/s, done.\n", + "Resolving deltas: 100% (8980/8980), done.\n", + "/content/mmaction2\n", + "Obtaining file:///content/mmaction2\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (3.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (1.19.5)\n", + "Requirement already satisfied: opencv-contrib-python in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (4.1.2.30)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (7.1.2)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (2.8.1)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (0.10.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (1.3.1)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (2.4.7)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib->mmaction2==0.16.0) (1.15.0)\n", + "Installing collected packages: mmaction2\n", + " Running setup.py develop for mmaction2\n", + "Successfully installed mmaction2\n", + "Collecting av\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/66/ff/bacde7314c646a2bd2f240034809a10cc3f8b096751284d0828640fff3dd/av-8.0.3-cp37-cp37m-manylinux2010_x86_64.whl (37.2MB)\n", + "\u001b[K |████████████████████████████████| 37.2MB 76kB/s \n", + "\u001b[?25hCollecting decord>=0.4.1\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/11/79/936af42edf90a7bd4e41a6cac89c913d4b47fa48a26b042d5129a9242ee3/decord-0.6.0-py3-none-manylinux2010_x86_64.whl (13.6MB)\n", + "\u001b[K |████████████████████████████████| 13.6MB 231kB/s \n", + "\u001b[?25hCollecting einops\n", + " Downloading https://files.pythonhosted.org/packages/5d/a0/9935e030634bf60ecd572c775f64ace82ceddf2f504a5fd3902438f07090/einops-0.3.0-py2.py3-none-any.whl\n", + "Requirement already satisfied: imgaug in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 4)) (0.2.9)\n", + "Requirement already satisfied: librosa in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 5)) (0.8.1)\n", + "Requirement already satisfied: lmdb in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 6)) (0.99)\n", + "Requirement already satisfied: moviepy in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 7)) (0.2.3.5)\n", + "Collecting onnx\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/3f/9b/54c950d3256e27f970a83cd0504efb183a24312702deed0179453316dbd0/onnx-1.9.0-cp37-cp37m-manylinux2010_x86_64.whl (12.2MB)\n", + "\u001b[K |████████████████████████████████| 12.2MB 36.2MB/s \n", + "\u001b[?25hCollecting onnxruntime\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/c9/35/80ab6f444a83c708817e011e9cd4708c816591cc85aff830dff525a34992/onnxruntime-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.5MB)\n", + "\u001b[K |████████████████████████████████| 4.5MB 29.5MB/s \n", + "\u001b[?25hCollecting pims\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d5/47/82e0ac31e01a271e5a06362fbf03769e9081956f6772f91d98b32899d743/PIMS-0.5.tar.gz (85kB)\n", + "\u001b[K |████████████████████████████████| 92kB 13.1MB/s \n", + "\u001b[?25hCollecting PyTurboJPEG\n", + " Downloading https://files.pythonhosted.org/packages/f9/7b/7621780391ed7a33acec8e803068d7291d940fbbad1ffc8909e94e844477/PyTurboJPEG-1.5.1.tar.gz\n", + "Collecting timm\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/90/fc/606bc5cf46acac3aa9bd179b3954433c026aaf88ea98d6b19f5d14c336da/timm-0.4.12-py3-none-any.whl (376kB)\n", + "\u001b[K |████████████████████████████████| 378kB 43.1MB/s \n", + "\u001b[?25hRequirement already satisfied: numpy>=1.14.0 in /usr/local/lib/python3.7/dist-packages (from decord>=0.4.1->-r requirements/optional.txt (line 2)) (1.19.5)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (7.1.2)\n", + "Requirement already satisfied: scikit-image>=0.11.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (0.16.2)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.15.0)\n", + "Requirement already satisfied: imageio in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (2.4.1)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (3.2.2)\n", + "Requirement already satisfied: opencv-python in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (4.1.2.30)\n", + "Requirement already satisfied: Shapely in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.7.1)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.4.1)\n", + "Requirement already satisfied: resampy>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.2.2)\n", + "Requirement already satisfied: pooch>=1.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (1.4.0)\n", + "Requirement already satisfied: numba>=0.43.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.51.2)\n", + "Requirement already satisfied: audioread>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (2.1.9)\n", + "Requirement already satisfied: soundfile>=0.10.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.10.3.post1)\n", + "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (20.9)\n", + "Requirement already satisfied: joblib>=0.14 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (1.0.1)\n", + "Requirement already satisfied: decorator>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (4.4.2)\n", + "Requirement already satisfied: scikit-learn!=0.19.0,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.22.2.post1)\n", + "Requirement already satisfied: tqdm<5.0,>=4.11.2 in /usr/local/lib/python3.7/dist-packages (from moviepy->-r requirements/optional.txt (line 7)) (4.41.1)\n", + "Requirement already satisfied: protobuf in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 8)) (3.17.3)\n", + "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 8)) (3.7.4.3)\n", + "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from onnxruntime->-r requirements/optional.txt (line 9)) (1.12)\n", + "Collecting slicerator>=0.9.8\n", + " Downloading https://files.pythonhosted.org/packages/75/ae/fe46f5371105508a209fe6162e7e7b11db531a79d2eabcd24566b8b1f534/slicerator-1.0.0-py3-none-any.whl\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 12)) (0.9.0+cu101)\n", + "Requirement already satisfied: torch>=1.4 in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 12)) (1.8.0+cu101)\n", + "Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 4)) (2.5.1)\n", + "Requirement already satisfied: PyWavelets>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 4)) (1.1.1)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (0.10.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (2.8.1)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (2.4.7)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (1.3.1)\n", + "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2.23.0)\n", + "Requirement already satisfied: appdirs in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (1.4.4)\n", + "Requirement already satisfied: llvmlite<0.35,>=0.34.0.dev0 in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 5)) (0.34.0)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 5)) (57.0.0)\n", + "Requirement already satisfied: cffi>=1.0 in /usr/local/lib/python3.7/dist-packages (from soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 5)) (1.14.5)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2.10)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2021.5.30)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (3.0.4)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (1.24.3)\n", + "Requirement already satisfied: pycparser in /usr/local/lib/python3.7/dist-packages (from cffi>=1.0->soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 5)) (2.20)\n", + "Building wheels for collected packages: pims, PyTurboJPEG\n", + " Building wheel for pims (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for pims: filename=PIMS-0.5-cp37-none-any.whl size=84328 sha256=436632b7a982144fd933f01d12e38a419eb8a636f2d6dd4bd4a43680734979e2\n", + " Stored in directory: /root/.cache/pip/wheels/0e/0a/14/4c33a4cc1b9158e57329a38e8e3e03901ed24060eb322d5462\n", + " Building wheel for PyTurboJPEG (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for PyTurboJPEG: filename=PyTurboJPEG-1.5.1-cp37-none-any.whl size=7979 sha256=755337aaa622b48be036eca6d743e99bf4528fc6c64e810da11a71236a78bcca\n", + " Stored in directory: /root/.cache/pip/wheels/19/cb/78/5725c881ee618936d956bf0ecd4272cb0f701cb898f44575ca\n", + "Successfully built pims PyTurboJPEG\n", + "Installing collected packages: av, decord, einops, onnx, onnxruntime, slicerator, pims, PyTurboJPEG, timm\n", + "Successfully installed PyTurboJPEG-1.5.1 av-8.0.3 decord-0.6.0 einops-0.3.0 onnx-1.9.0 onnxruntime-1.8.1 pims-0.5 slicerator-1.0.0 timm-0.4.12\n" + ] + } + ], + "source": [ + "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", + "!pip install -U torch==1.8.0+cu101 torchvision==0.9.0+cu101 torchtext==0.9.0 -f https://download.pytorch.org/whl/torch_stable.html\n", + "\n", + "# install mmcv-full thus we could use CUDA operators\n", + "!pip install mmcv-full==1.3.9 -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", + "\n", + "# Install mmaction2\n", + "!rm -rf mmaction2\n", + "!git clone https://github.com/open-mmlab/mmaction2.git\n", + "%cd mmaction2\n", + "\n", + "!pip install -e .\n", + "\n", + "# Install some optional requirements\n", + "!pip install -r requirements/optional.txt" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "No_zZAFpWC-a", + "outputId": "7e95038a-6f79-410b-adf6-0148bf8cc2fc" + }, + "outputs": [ { - "cell_type": "code", - "metadata": { - "id": "HNZB7NoSabzj", - "outputId": "b2f9bd71-1490-44d3-81c6-5037d804f0b1", - "colab": { - "base_uri": "https://localhost:8080/" - } - }, - "source": [ - "from mmaction.apis import inference_recognizer, init_recognizer\n", - "\n", - "# Choose to use a config and initialize the recognizer\n", - "config = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py'\n", - "# Setup a checkpoint file to load\n", - "checkpoint = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "# Initialize the recognizer\n", - "model = init_recognizer(config, checkpoint, device='cuda:0')" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Use load_from_local loader\n" - ], - "name": "stdout" - } - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "1.8.0+cu101 True\n", + "0.16.0\n", + "10.1\n", + "GCC 7.3\n" + ] + } + ], + "source": [ + "# Check Pytorch installation\n", + "import torch, torchvision\n", + "print(torch.__version__, torch.cuda.is_available())\n", + "\n", + "# Check MMAction2 installation\n", + "import mmaction\n", + "print(mmaction.__version__)\n", + "\n", + "# Check MMCV installation\n", + "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", + "print(get_compiling_cuda_version())\n", + "print(get_compiler_version())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "pXf7oV5DWdab" + }, + "source": [ + "## Perform inference with a MMAction2 recognizer\n", + "MMAction2 already provides high level APIs to do inference and training." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "64CW6d_AaT-Q", + "outputId": "d08bfb9b-ab1e-451b-d3b2-89023a59766b" + }, + "outputs": [ { - "cell_type": "code", - "metadata": { - "id": "rEMsBnpHapAn" - }, - "source": [ - "# Use the recognizer to do inference\n", - "video = 'demo/demo.mp4'\n", - "label = 'tools/data/kinetics/label_map_k400.txt'\n", - "results = inference_recognizer(model, video, label)" - ], - "execution_count": 6, - "outputs": [] + "name": "stdout", + "output_type": "stream", + "text": [ + "--2021-07-11 12:44:00-- https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 97579339 (93M) [application/octet-stream]\n", + "Saving to: ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’\n", + "\n", + "checkpoints/tsn_r50 100%[===================>] 93.06M 11.4MB/s in 8.1s \n", + "\n", + "2021-07-11 12:44:09 (11.4 MB/s) - ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’ saved [97579339/97579339]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir checkpoints\n", + "!wget -c https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \\\n", + " -O checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "HNZB7NoSabzj", + "outputId": "b2f9bd71-1490-44d3-81c6-5037d804f0b1" + }, + "outputs": [ { - "cell_type": "code", - "metadata": { - "id": "NIyJXqfWathq", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "ca24528b-f99d-414a-fa50-456f6068b463" - }, - "source": [ - "# Let's show the results\n", - "for result in results:\n", - " print(f'{result[0]}: ', result[1])" - ], - "execution_count": 7, - "outputs": [ - { - "output_type": "stream", - "text": [ - "arm wrestling: 29.616438\n", - "rock scissors paper: 10.754841\n", - "shaking hands: 9.908401\n", - "clapping: 9.189913\n", - "massaging feet: 8.305307\n" - ], - "name": "stdout" - } - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "Use load_from_local loader\n" + ] + } + ], + "source": [ + "from mmaction.apis import inference_recognizer, init_recognizer\n", + "\n", + "# Choose to use a config and initialize the recognizer\n", + "config = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py'\n", + "# Setup a checkpoint file to load\n", + "checkpoint = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "# Initialize the recognizer\n", + "model = init_recognizer(config, checkpoint, device='cuda:0')" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "id": "rEMsBnpHapAn" + }, + "outputs": [], + "source": [ + "# Use the recognizer to do inference\n", + "video = 'demo/demo.mp4'\n", + "label = 'tools/data/kinetics/label_map_k400.txt'\n", + "results = inference_recognizer(model, video)\n", + "\n", + "labels = open(label).readlines()\n", + "labels = [x.strip() for x in labels]\n", + "results = [(labels[k[0]], k[1]) for k in results]" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "NIyJXqfWathq", + "outputId": "ca24528b-f99d-414a-fa50-456f6068b463" + }, + "outputs": [ { - "cell_type": "markdown", - "metadata": { - "id": "QuZG8kZ2fJ5d" - }, - "source": [ - "## Train a recognizer on customized dataset\n", - "\n", - "To train a new recognizer, there are usually three things to do:\n", - "1. Support a new dataset\n", - "2. Modify the config\n", - "3. Train a new recognizer" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "arm wrestling: 29.616438\n", + "rock scissors paper: 10.754841\n", + "shaking hands: 9.908401\n", + "clapping: 9.189913\n", + "massaging feet: 8.305307\n" + ] + } + ], + "source": [ + "# Let's show the results\n", + "for result in results:\n", + " print(f'{result[0]}: ', result[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "QuZG8kZ2fJ5d" + }, + "source": [ + "## Train a recognizer on customized dataset\n", + "\n", + "To train a new recognizer, there are usually three things to do:\n", + "1. Support a new dataset\n", + "2. Modify the config\n", + "3. Train a new recognizer" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "neEFyxChfgiJ" + }, + "source": [ + "### Support a new dataset\n", + "\n", + "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/tutorials/new_dataset.md)\n", + "\n", + "Firstly, let's download a tiny dataset obtained from [Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). We select 30 videos with their labels as train dataset and 10 videos with their labels as test dataset." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "gjsUj9JzgUlJ", + "outputId": "61c4704d-db81-4ca5-ed16-e2454dbdfe8e" + }, + "outputs": [ { - "cell_type": "markdown", - "metadata": { - "id": "neEFyxChfgiJ" - }, - "source": [ - "### Support a new dataset\n", - "\n", - "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/tutorials/new_dataset.md)\n", - "\n", - "Firstly, let's download a tiny dataset obtained from [Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). We select 30 videos with their labels as train dataset and 10 videos with their labels as test dataset." - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "rm: cannot remove 'kinetics400_tiny.zip*': No such file or directory\n", + "--2021-07-11 12:44:29-- https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 18308682 (17M) [application/zip]\n", + "Saving to: ‘kinetics400_tiny.zip’\n", + "\n", + "kinetics400_tiny.zi 100%[===================>] 17.46M 10.7MB/s in 1.6s \n", + "\n", + "2021-07-11 12:44:31 (10.7 MB/s) - ‘kinetics400_tiny.zip’ saved [18308682/18308682]\n", + "\n" + ] + } + ], + "source": [ + "# download, decompress the data\n", + "!rm kinetics400_tiny.zip*\n", + "!rm -rf kinetics400_tiny\n", + "!wget https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", + "!unzip kinetics400_tiny.zip > /dev/null" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "AbZ-o7V6hNw4", + "outputId": "b091909c-def2-49b5-88c2-01b00802b162" + }, + "outputs": [ { - "cell_type": "code", - "metadata": { - "id": "gjsUj9JzgUlJ", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "61c4704d-db81-4ca5-ed16-e2454dbdfe8e" - }, - "source": [ - "# download, decompress the data\n", - "!rm kinetics400_tiny.zip*\n", - "!rm -rf kinetics400_tiny\n", - "!wget https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", - "!unzip kinetics400_tiny.zip > /dev/null" - ], - "execution_count": 8, - "outputs": [ - { - "output_type": "stream", - "text": [ - "rm: cannot remove 'kinetics400_tiny.zip*': No such file or directory\n", - "--2021-07-11 12:44:29-- https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", - "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", - "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 18308682 (17M) [application/zip]\n", - "Saving to: ‘kinetics400_tiny.zip’\n", - "\n", - "kinetics400_tiny.zi 100%[===================>] 17.46M 10.7MB/s in 1.6s \n", - "\n", - "2021-07-11 12:44:31 (10.7 MB/s) - ‘kinetics400_tiny.zip’ saved [18308682/18308682]\n", - "\n" - ], - "name": "stdout" - } - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "Reading package lists...\n", + "Building dependency tree...\n", + "Reading state information...\n", + "The following NEW packages will be installed:\n", + " tree\n", + "0 upgraded, 1 newly installed, 0 to remove and 39 not upgraded.\n", + "Need to get 40.7 kB of archives.\n", + "After this operation, 105 kB of additional disk space will be used.\n", + "Get:1 http://archive.ubuntu.com/ubuntu bionic/universe amd64 tree amd64 1.7.0-5 [40.7 kB]\n", + "Fetched 40.7 kB in 0s (88.7 kB/s)\n", + "Selecting previously unselected package tree.\n", + "(Reading database ... 160815 files and directories currently installed.)\n", + "Preparing to unpack .../tree_1.7.0-5_amd64.deb ...\n", + "Unpacking tree (1.7.0-5) ...\n", + "Setting up tree (1.7.0-5) ...\n", + "Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n", + "kinetics400_tiny\n", + "├── kinetics_tiny_train_video.txt\n", + "├── kinetics_tiny_val_video.txt\n", + "├── train\n", + "│   ├── 27_CSXByd3s.mp4\n", + "│   ├── 34XczvTaRiI.mp4\n", + "│   ├── A-wiliK50Zw.mp4\n", + "│   ├── D32_1gwq35E.mp4\n", + "│   ├── D92m0HsHjcQ.mp4\n", + "│   ├── DbX8mPslRXg.mp4\n", + "│   ├── FMlSTTpN3VY.mp4\n", + "│   ├── h10B9SVE-nk.mp4\n", + "│   ├── h2YqqUhnR34.mp4\n", + "│   ├── iRuyZSKhHRg.mp4\n", + "│   ├── IyfILH9lBRo.mp4\n", + "│   ├── kFC3KY2bOP8.mp4\n", + "│   ├── LvcFDgCAXQs.mp4\n", + "│   ├── O46YA8tI530.mp4\n", + "│   ├── oMrZaozOvdQ.mp4\n", + "│   ├── oXy-e_P_cAI.mp4\n", + "│   ├── P5M-hAts7MQ.mp4\n", + "│   ├── phDqGd0NKoo.mp4\n", + "│   ├── PnOe3GZRVX8.mp4\n", + "│   ├── R8HXQkdgKWA.mp4\n", + "│   ├── RqnKtCEoEcA.mp4\n", + "│   ├── soEcZZsBmDs.mp4\n", + "│   ├── TkkZPZHbAKA.mp4\n", + "│   ├── T_TMNGzVrDk.mp4\n", + "│   ├── WaS0qwP46Us.mp4\n", + "│   ├── Wh_YPQdH1Zg.mp4\n", + "│   ├── WWP5HZJsg-o.mp4\n", + "│   ├── xGY2dP0YUjA.mp4\n", + "│   ├── yLC9CtWU5ws.mp4\n", + "│   └── ZQV4U2KQ370.mp4\n", + "└── val\n", + " ├── 0pVGiAU6XEA.mp4\n", + " ├── AQrbRSnRt8M.mp4\n", + " ├── b6Q_b7vgc7Q.mp4\n", + " ├── ddvJ6-faICE.mp4\n", + " ├── IcLztCtvhb8.mp4\n", + " ├── ik4BW3-SCts.mp4\n", + " ├── jqRrH30V0k4.mp4\n", + " ├── SU_x2LQqSLs.mp4\n", + " ├── u4Rm6srmIS8.mp4\n", + " └── y5Iu7XkTqV0.mp4\n", + "\n", + "2 directories, 42 files\n" + ] + } + ], + "source": [ + "# Check the directory structure of the tiny data\n", + "\n", + "# Install tree first\n", + "!apt-get -q install tree\n", + "!tree kinetics400_tiny" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "fTdi6dI0hY3g", + "outputId": "ffda0997-8d77-431a-d66e-2f273e80c756" + }, + "outputs": [ { - "cell_type": "code", - "metadata": { - "id": "AbZ-o7V6hNw4", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "b091909c-def2-49b5-88c2-01b00802b162" - }, - "source": [ - "# Check the directory structure of the tiny data\n", - "\n", - "# Install tree first\n", - "!apt-get -q install tree\n", - "!tree kinetics400_tiny" - ], - "execution_count": 9, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Reading package lists...\n", - "Building dependency tree...\n", - "Reading state information...\n", - "The following NEW packages will be installed:\n", - " tree\n", - "0 upgraded, 1 newly installed, 0 to remove and 39 not upgraded.\n", - "Need to get 40.7 kB of archives.\n", - "After this operation, 105 kB of additional disk space will be used.\n", - "Get:1 http://archive.ubuntu.com/ubuntu bionic/universe amd64 tree amd64 1.7.0-5 [40.7 kB]\n", - "Fetched 40.7 kB in 0s (88.7 kB/s)\n", - "Selecting previously unselected package tree.\n", - "(Reading database ... 160815 files and directories currently installed.)\n", - "Preparing to unpack .../tree_1.7.0-5_amd64.deb ...\n", - "Unpacking tree (1.7.0-5) ...\n", - "Setting up tree (1.7.0-5) ...\n", - "Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n", - "kinetics400_tiny\n", - "├── kinetics_tiny_train_video.txt\n", - "├── kinetics_tiny_val_video.txt\n", - "├── train\n", - "│   ├── 27_CSXByd3s.mp4\n", - "│   ├── 34XczvTaRiI.mp4\n", - "│   ├── A-wiliK50Zw.mp4\n", - "│   ├── D32_1gwq35E.mp4\n", - "│   ├── D92m0HsHjcQ.mp4\n", - "│   ├── DbX8mPslRXg.mp4\n", - "│   ├── FMlSTTpN3VY.mp4\n", - "│   ├── h10B9SVE-nk.mp4\n", - "│   ├── h2YqqUhnR34.mp4\n", - "│   ├── iRuyZSKhHRg.mp4\n", - "│   ├── IyfILH9lBRo.mp4\n", - "│   ├── kFC3KY2bOP8.mp4\n", - "│   ├── LvcFDgCAXQs.mp4\n", - "│   ├── O46YA8tI530.mp4\n", - "│   ├── oMrZaozOvdQ.mp4\n", - "│   ├── oXy-e_P_cAI.mp4\n", - "│   ├── P5M-hAts7MQ.mp4\n", - "│   ├── phDqGd0NKoo.mp4\n", - "│   ├── PnOe3GZRVX8.mp4\n", - "│   ├── R8HXQkdgKWA.mp4\n", - "│   ├── RqnKtCEoEcA.mp4\n", - "│   ├── soEcZZsBmDs.mp4\n", - "│   ├── TkkZPZHbAKA.mp4\n", - "│   ├── T_TMNGzVrDk.mp4\n", - "│   ├── WaS0qwP46Us.mp4\n", - "│   ├── Wh_YPQdH1Zg.mp4\n", - "│   ├── WWP5HZJsg-o.mp4\n", - "│   ├── xGY2dP0YUjA.mp4\n", - "│   ├── yLC9CtWU5ws.mp4\n", - "│   └── ZQV4U2KQ370.mp4\n", - "└── val\n", - " ├── 0pVGiAU6XEA.mp4\n", - " ├── AQrbRSnRt8M.mp4\n", - " ├── b6Q_b7vgc7Q.mp4\n", - " ├── ddvJ6-faICE.mp4\n", - " ├── IcLztCtvhb8.mp4\n", - " ├── ik4BW3-SCts.mp4\n", - " ├── jqRrH30V0k4.mp4\n", - " ├── SU_x2LQqSLs.mp4\n", - " ├── u4Rm6srmIS8.mp4\n", - " └── y5Iu7XkTqV0.mp4\n", - "\n", - "2 directories, 42 files\n" - ], - "name": "stdout" - } - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "D32_1gwq35E.mp4 0\n", + "iRuyZSKhHRg.mp4 1\n", + "oXy-e_P_cAI.mp4 0\n", + "34XczvTaRiI.mp4 1\n", + "h2YqqUhnR34.mp4 0\n", + "O46YA8tI530.mp4 0\n", + "kFC3KY2bOP8.mp4 1\n", + "WWP5HZJsg-o.mp4 1\n", + "phDqGd0NKoo.mp4 1\n", + "yLC9CtWU5ws.mp4 0\n", + "27_CSXByd3s.mp4 1\n", + "IyfILH9lBRo.mp4 1\n", + "T_TMNGzVrDk.mp4 1\n", + "TkkZPZHbAKA.mp4 0\n", + "PnOe3GZRVX8.mp4 1\n", + "soEcZZsBmDs.mp4 1\n", + "FMlSTTpN3VY.mp4 1\n", + "WaS0qwP46Us.mp4 0\n", + "A-wiliK50Zw.mp4 1\n", + "oMrZaozOvdQ.mp4 1\n", + "ZQV4U2KQ370.mp4 0\n", + "DbX8mPslRXg.mp4 1\n", + "h10B9SVE-nk.mp4 1\n", + "P5M-hAts7MQ.mp4 0\n", + "R8HXQkdgKWA.mp4 0\n", + "D92m0HsHjcQ.mp4 0\n", + "RqnKtCEoEcA.mp4 0\n", + "LvcFDgCAXQs.mp4 0\n", + "xGY2dP0YUjA.mp4 0\n", + "Wh_YPQdH1Zg.mp4 0\n" + ] + } + ], + "source": [ + "# After downloading the data, we need to check the annotation format\n", + "!cat kinetics400_tiny/kinetics_tiny_train_video.txt" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "0bq0mxmEi29H" + }, + "source": [ + "According to the format defined in [`VideoDataset`](./datasets/video_dataset.py), each line indicates a sample video with the filepath and label, which are split with a whitespace." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Ht_DGJA9jQar" + }, + "source": [ + "### Modify the config\n", + "\n", + "In the next step, we need to modify the config for the training.\n", + "To accelerate the process, we finetune a recognizer using a pre-trained recognizer." + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": { + "id": "LjCcmCKOjktc" + }, + "outputs": [], + "source": [ + "from mmcv import Config\n", + "cfg = Config.fromfile('./configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tc8YhFFGjp3e" + }, + "source": [ + "Given a config that trains a TSN model on kinetics400-full dataset, we need to modify some values to use it for training TSN on Kinetics400-tiny dataset.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "tlhu9byjjt-K", + "outputId": "3b9a3c49-ace0-41d3-dd15-d6c8579755f8" + }, + "outputs": [ { - "cell_type": "code", - "metadata": { - "id": "fTdi6dI0hY3g", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "ffda0997-8d77-431a-d66e-2f273e80c756" - }, - "source": [ - "# After downloading the data, we need to check the annotation format\n", - "!cat kinetics400_tiny/kinetics_tiny_train_video.txt" - ], - "execution_count": 10, - "outputs": [ - { - "output_type": "stream", - "text": [ - "D32_1gwq35E.mp4 0\n", - "iRuyZSKhHRg.mp4 1\n", - "oXy-e_P_cAI.mp4 0\n", - "34XczvTaRiI.mp4 1\n", - "h2YqqUhnR34.mp4 0\n", - "O46YA8tI530.mp4 0\n", - "kFC3KY2bOP8.mp4 1\n", - "WWP5HZJsg-o.mp4 1\n", - "phDqGd0NKoo.mp4 1\n", - "yLC9CtWU5ws.mp4 0\n", - "27_CSXByd3s.mp4 1\n", - "IyfILH9lBRo.mp4 1\n", - "T_TMNGzVrDk.mp4 1\n", - "TkkZPZHbAKA.mp4 0\n", - "PnOe3GZRVX8.mp4 1\n", - "soEcZZsBmDs.mp4 1\n", - "FMlSTTpN3VY.mp4 1\n", - "WaS0qwP46Us.mp4 0\n", - "A-wiliK50Zw.mp4 1\n", - "oMrZaozOvdQ.mp4 1\n", - "ZQV4U2KQ370.mp4 0\n", - "DbX8mPslRXg.mp4 1\n", - "h10B9SVE-nk.mp4 1\n", - "P5M-hAts7MQ.mp4 0\n", - "R8HXQkdgKWA.mp4 0\n", - "D92m0HsHjcQ.mp4 0\n", - "RqnKtCEoEcA.mp4 0\n", - "LvcFDgCAXQs.mp4 0\n", - "xGY2dP0YUjA.mp4 0\n", - "Wh_YPQdH1Zg.mp4 0\n" - ], - "name": "stdout" - } - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "Config:\n", + "model = dict(\n", + " type='Recognizer2D',\n", + " backbone=dict(\n", + " type='ResNet',\n", + " pretrained='torchvision://resnet50',\n", + " depth=50,\n", + " norm_eval=False),\n", + " cls_head=dict(\n", + " type='TSNHead',\n", + " num_classes=2,\n", + " in_channels=2048,\n", + " spatial_type='avg',\n", + " consensus=dict(type='AvgConsensus', dim=1),\n", + " dropout_ratio=0.4,\n", + " init_std=0.01),\n", + " train_cfg=None,\n", + " test_cfg=dict(average_clips=None))\n", + "optimizer = dict(type='SGD', lr=7.8125e-05, momentum=0.9, weight_decay=0.0001)\n", + "optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))\n", + "lr_config = dict(policy='step', step=[40, 80])\n", + "total_epochs = 10\n", + "checkpoint_config = dict(interval=5)\n", + "log_config = dict(interval=5, hooks=[dict(type='TextLoggerHook')])\n", + "dist_params = dict(backend='nccl')\n", + "log_level = 'INFO'\n", + "load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "resume_from = None\n", + "workflow = [('train', 1)]\n", + "dataset_type = 'VideoDataset'\n", + "data_root = 'kinetics400_tiny/train/'\n", + "data_root_val = 'kinetics400_tiny/val/'\n", + "ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "img_norm_cfg = dict(\n", + " mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)\n", + "train_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),\n", + " dict(type='DecordDecode'),\n", + " dict(\n", + " type='MultiScaleCrop',\n", + " input_size=224,\n", + " scales=(1, 0.875, 0.75, 0.66),\n", + " random_crop=False,\n", + " max_wh_scale_gap=1),\n", + " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", + " dict(type='Flip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs', 'label'])\n", + "]\n", + "val_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=8,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='CenterCrop', crop_size=224),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + "]\n", + "test_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=25,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='ThreeCrop', crop_size=256),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + "]\n", + "data = dict(\n", + " videos_per_gpu=2,\n", + " workers_per_gpu=2,\n", + " train=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", + " data_prefix='kinetics400_tiny/train/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames', clip_len=1, frame_interval=1,\n", + " num_clips=8),\n", + " dict(type='DecordDecode'),\n", + " dict(\n", + " type='MultiScaleCrop',\n", + " input_size=224,\n", + " scales=(1, 0.875, 0.75, 0.66),\n", + " random_crop=False,\n", + " max_wh_scale_gap=1),\n", + " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", + " dict(type='Flip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs', 'label'])\n", + " ]),\n", + " val=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", + " data_prefix='kinetics400_tiny/val/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=8,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='CenterCrop', crop_size=224),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + " ]),\n", + " test=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", + " data_prefix='kinetics400_tiny/val/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=25,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='ThreeCrop', crop_size=256),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + " ]))\n", + "evaluation = dict(\n", + " interval=5,\n", + " metrics=['top_k_accuracy', 'mean_class_accuracy'],\n", + " save_best='auto')\n", + "work_dir = './tutorial_exps'\n", + "omnisource = False\n", + "seed = 0\n", + "gpu_ids = range(0, 1)\n", + "\n" + ] + } + ], + "source": [ + "from mmcv.runner import set_random_seed\n", + "\n", + "# Modify dataset type and path\n", + "cfg.dataset_type = 'VideoDataset'\n", + "cfg.data_root = 'kinetics400_tiny/train/'\n", + "cfg.data_root_val = 'kinetics400_tiny/val/'\n", + "cfg.ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "cfg.ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "\n", + "cfg.data.test.type = 'VideoDataset'\n", + "cfg.data.test.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.data.test.data_prefix = 'kinetics400_tiny/val/'\n", + "\n", + "cfg.data.train.type = 'VideoDataset'\n", + "cfg.data.train.ann_file = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "cfg.data.train.data_prefix = 'kinetics400_tiny/train/'\n", + "\n", + "cfg.data.val.type = 'VideoDataset'\n", + "cfg.data.val.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.data.val.data_prefix = 'kinetics400_tiny/val/'\n", + "\n", + "# The flag is used to determine whether it is omnisource training\n", + "cfg.setdefault('omnisource', False)\n", + "# Modify num classes of the model in cls_head\n", + "cfg.model.cls_head.num_classes = 2\n", + "# We can use the pre-trained TSN model\n", + "cfg.load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "\n", + "# Set up working dir to save files and logs.\n", + "cfg.work_dir = './tutorial_exps'\n", + "\n", + "# The original learning rate (LR) is set for 8-GPU training.\n", + "# We divide it by 8 since we only use one GPU.\n", + "cfg.data.videos_per_gpu = cfg.data.videos_per_gpu // 16\n", + "cfg.optimizer.lr = cfg.optimizer.lr / 8 / 16\n", + "cfg.total_epochs = 10\n", + "\n", + "# We can set the checkpoint saving interval to reduce the storage cost\n", + "cfg.checkpoint_config.interval = 5\n", + "# We can set the log print interval to reduce the the times of printing log\n", + "cfg.log_config.interval = 5\n", + "\n", + "# Set seed thus the results are more reproducible\n", + "cfg.seed = 0\n", + "set_random_seed(0, deterministic=False)\n", + "cfg.gpu_ids = range(1)\n", + "\n", + "# Save the best\n", + "cfg.evaluation.save_best='auto'\n", + "\n", + "\n", + "# We can initialize the logger for training and have a look\n", + "# at the final config used for training\n", + "print(f'Config:\\n{cfg.pretty_text}')\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tES-qnZ3k38Z" + }, + "source": [ + "### Train a new recognizer\n", + "\n", + "Finally, lets initialize the dataset and recognizer, then train a new recognizer!" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "dDBWkdDRk6oz", + "outputId": "a85d80d7-b3c4-43f1-d49a-057e8036807f" + }, + "outputs": [ { - "cell_type": "markdown", - "metadata": { - "id": "0bq0mxmEi29H" - }, - "source": [ - "According to the format defined in [`VideoDataset`](./datasets/video_dataset.py), each line indicates a sample video with the filepath and label, which are split with a whitespace." - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "Use load_from_torchvision loader\n" + ] }, { - "cell_type": "markdown", - "metadata": { - "id": "Ht_DGJA9jQar" - }, - "source": [ - "### Modify the config\n", - "\n", - "In the next step, we need to modify the config for the training.\n", - "To accelerate the process, we finetune a recognizer using a pre-trained recognizer." - ] + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-07-11 13:00:46,931 - mmaction - INFO - These parameters in pretrained checkpoint are not loaded: {'fc.bias', 'fc.weight'}\n", + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n", + "2021-07-11 13:00:46,980 - mmaction - INFO - load checkpoint from ./checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", + "2021-07-11 13:00:46,981 - mmaction - INFO - Use load_from_local loader\n", + "2021-07-11 13:00:47,071 - mmaction - WARNING - The model and loaded state dict do not match exactly\n", + "\n", + "size mismatch for cls_head.fc_cls.weight: copying a param with shape torch.Size([400, 2048]) from checkpoint, the shape in current model is torch.Size([2, 2048]).\n", + "size mismatch for cls_head.fc_cls.bias: copying a param with shape torch.Size([400]) from checkpoint, the shape in current model is torch.Size([2]).\n", + "2021-07-11 13:00:47,074 - mmaction - INFO - Start running, host: root@b465112b4add, work_dir: /content/mmaction2/tutorial_exps\n", + "2021-07-11 13:00:47,078 - mmaction - INFO - Hooks will be executed in the following order:\n", + "before_run:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_train_epoch:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_train_iter:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_train_iter:\n", + "(ABOVE_NORMAL) OptimizerHook \n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "after_train_epoch:\n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_val_epoch:\n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_val_iter:\n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_val_iter:\n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_val_epoch:\n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "2021-07-11 13:00:47,081 - mmaction - INFO - workflow: [('train', 1)], max: 10 epochs\n", + "/usr/local/lib/python3.7/dist-packages/mmcv/runner/hooks/evaluation.py:190: UserWarning: runner.meta is None. Creating an empty one.\n", + " warnings.warn('runner.meta is None. Creating an empty one.')\n", + "2021-07-11 13:00:51,802 - mmaction - INFO - Epoch [1][5/15]\tlr: 7.813e-05, eta: 0:02:16, time: 0.942, data_time: 0.730, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7604, loss: 0.7604, grad_norm: 14.8813\n", + "2021-07-11 13:00:52,884 - mmaction - INFO - Epoch [1][10/15]\tlr: 7.813e-05, eta: 0:01:21, time: 0.217, data_time: 0.028, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6282, loss: 0.6282, grad_norm: 10.1834\n", + "2021-07-11 13:00:53,706 - mmaction - INFO - Epoch [1][15/15]\tlr: 7.813e-05, eta: 0:00:59, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7165, loss: 0.7165, grad_norm: 10.8534\n", + "2021-07-11 13:00:57,724 - mmaction - INFO - Epoch [2][5/15]\tlr: 7.813e-05, eta: 0:01:09, time: 0.802, data_time: 0.596, memory: 2918, top1_acc: 0.3000, top5_acc: 1.0000, loss_cls: 0.7001, loss: 0.7001, grad_norm: 11.4311\n", + "2021-07-11 13:00:59,219 - mmaction - INFO - Epoch [2][10/15]\tlr: 7.813e-05, eta: 0:01:00, time: 0.296, data_time: 0.108, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6916, loss: 0.6916, grad_norm: 12.7101\n", + "2021-07-11 13:01:00,040 - mmaction - INFO - Epoch [2][15/15]\tlr: 7.813e-05, eta: 0:00:51, time: 0.167, data_time: 0.004, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6567, loss: 0.6567, grad_norm: 8.8837\n", + "2021-07-11 13:01:04,152 - mmaction - INFO - Epoch [3][5/15]\tlr: 7.813e-05, eta: 0:00:56, time: 0.820, data_time: 0.618, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6320, loss: 0.6320, grad_norm: 11.4025\n", + "2021-07-11 13:01:05,526 - mmaction - INFO - Epoch [3][10/15]\tlr: 7.813e-05, eta: 0:00:50, time: 0.276, data_time: 0.075, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6542, loss: 0.6542, grad_norm: 10.6429\n", + "2021-07-11 13:01:06,350 - mmaction - INFO - Epoch [3][15/15]\tlr: 7.813e-05, eta: 0:00:44, time: 0.165, data_time: 0.001, memory: 2918, top1_acc: 0.2000, top5_acc: 1.0000, loss_cls: 0.7661, loss: 0.7661, grad_norm: 12.8421\n", + "2021-07-11 13:01:10,771 - mmaction - INFO - Epoch [4][5/15]\tlr: 7.813e-05, eta: 0:00:47, time: 0.883, data_time: 0.676, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6410, loss: 0.6410, grad_norm: 10.6697\n", + "2021-07-11 13:01:11,776 - mmaction - INFO - Epoch [4][10/15]\tlr: 7.813e-05, eta: 0:00:42, time: 0.201, data_time: 0.011, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6949, loss: 0.6949, grad_norm: 10.5467\n", + "2021-07-11 13:01:12,729 - mmaction - INFO - Epoch [4][15/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.190, data_time: 0.026, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6290, loss: 0.6290, grad_norm: 11.2779\n", + "2021-07-11 13:01:16,816 - mmaction - INFO - Epoch [5][5/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.817, data_time: 0.608, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6011, loss: 0.6011, grad_norm: 9.1335\n", + "2021-07-11 13:01:18,176 - mmaction - INFO - Epoch [5][10/15]\tlr: 7.813e-05, eta: 0:00:35, time: 0.272, data_time: 0.080, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6652, loss: 0.6652, grad_norm: 11.0616\n", + "2021-07-11 13:01:19,119 - mmaction - INFO - Epoch [5][15/15]\tlr: 7.813e-05, eta: 0:00:32, time: 0.188, data_time: 0.017, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6440, loss: 0.6440, grad_norm: 11.6473\n", + "2021-07-11 13:01:19,120 - mmaction - INFO - Saving checkpoint at 5 epochs\n" + ] }, { - "cell_type": "code", - "metadata": { - "id": "LjCcmCKOjktc" - }, - "source": [ - "from mmcv import Config\n", - "cfg = Config.fromfile('./configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py')" - ], - "execution_count": 27, - "outputs": [] + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.9 task/s, elapsed: 2s, ETA: 0s" + ] }, { - "cell_type": "markdown", - "metadata": { - "id": "tc8YhFFGjp3e" - }, - "source": [ - "Given a config that trains a TSN model on kinetics400-full dataset, we need to modify some values to use it for training TSN on Kinetics400-tiny dataset.\n" - ] + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-07-11 13:01:21,673 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-07-11 13:01:21,677 - mmaction - INFO - \n", + "top1_acc\t0.7000\n", + "top5_acc\t1.0000\n", + "2021-07-11 13:01:21,679 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-07-11 13:01:21,682 - mmaction - INFO - \n", + "mean_acc\t0.7000\n", + "2021-07-11 13:01:22,264 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_5.pth.\n", + "2021-07-11 13:01:22,267 - mmaction - INFO - Best top1_acc is 0.7000 at 5 epoch.\n", + "2021-07-11 13:01:22,271 - mmaction - INFO - Epoch(val) [5][5]\ttop1_acc: 0.7000, top5_acc: 1.0000, mean_class_accuracy: 0.7000\n", + "2021-07-11 13:01:26,623 - mmaction - INFO - Epoch [6][5/15]\tlr: 7.813e-05, eta: 0:00:31, time: 0.868, data_time: 0.656, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6753, loss: 0.6753, grad_norm: 11.8640\n", + "2021-07-11 13:01:27,597 - mmaction - INFO - Epoch [6][10/15]\tlr: 7.813e-05, eta: 0:00:28, time: 0.195, data_time: 0.003, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6715, loss: 0.6715, grad_norm: 11.3347\n", + "2021-07-11 13:01:28,736 - mmaction - INFO - Epoch [6][15/15]\tlr: 7.813e-05, eta: 0:00:25, time: 0.228, data_time: 0.063, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5769, loss: 0.5769, grad_norm: 9.2541\n", + "2021-07-11 13:01:32,860 - mmaction - INFO - Epoch [7][5/15]\tlr: 7.813e-05, eta: 0:00:24, time: 0.822, data_time: 0.620, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5379, loss: 0.5379, grad_norm: 8.0147\n", + "2021-07-11 13:01:34,340 - mmaction - INFO - Epoch [7][10/15]\tlr: 7.813e-05, eta: 0:00:22, time: 0.298, data_time: 0.109, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6187, loss: 0.6187, grad_norm: 11.5244\n", + "2021-07-11 13:01:35,165 - mmaction - INFO - Epoch [7][15/15]\tlr: 7.813e-05, eta: 0:00:19, time: 0.165, data_time: 0.002, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7063, loss: 0.7063, grad_norm: 12.4979\n", + "2021-07-11 13:01:39,435 - mmaction - INFO - Epoch [8][5/15]\tlr: 7.813e-05, eta: 0:00:17, time: 0.853, data_time: 0.641, memory: 2918, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.5369, loss: 0.5369, grad_norm: 8.6545\n", + "2021-07-11 13:01:40,808 - mmaction - INFO - Epoch [8][10/15]\tlr: 7.813e-05, eta: 0:00:15, time: 0.275, data_time: 0.086, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6407, loss: 0.6407, grad_norm: 12.5537\n", + "2021-07-11 13:01:41,627 - mmaction - INFO - Epoch [8][15/15]\tlr: 7.813e-05, eta: 0:00:12, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6073, loss: 0.6073, grad_norm: 11.4028\n", + "2021-07-11 13:01:45,651 - mmaction - INFO - Epoch [9][5/15]\tlr: 7.813e-05, eta: 0:00:11, time: 0.803, data_time: 0.591, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5596, loss: 0.5596, grad_norm: 10.0821\n", + "2021-07-11 13:01:46,891 - mmaction - INFO - Epoch [9][10/15]\tlr: 7.813e-05, eta: 0:00:08, time: 0.248, data_time: 0.044, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6470, loss: 0.6470, grad_norm: 11.8979\n", + "2021-07-11 13:01:47,944 - mmaction - INFO - Epoch [9][15/15]\tlr: 7.813e-05, eta: 0:00:06, time: 0.211, data_time: 0.041, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6657, loss: 0.6657, grad_norm: 12.0643\n", + "2021-07-11 13:01:52,200 - mmaction - INFO - Epoch [10][5/15]\tlr: 7.813e-05, eta: 0:00:04, time: 0.849, data_time: 0.648, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6310, loss: 0.6310, grad_norm: 11.5690\n", + "2021-07-11 13:01:53,707 - mmaction - INFO - Epoch [10][10/15]\tlr: 7.813e-05, eta: 0:00:02, time: 0.303, data_time: 0.119, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5178, loss: 0.5178, grad_norm: 9.3324\n", + "2021-07-11 13:01:54,520 - mmaction - INFO - Epoch [10][15/15]\tlr: 7.813e-05, eta: 0:00:00, time: 0.162, data_time: 0.001, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6919, loss: 0.6919, grad_norm: 12.6688\n", + "2021-07-11 13:01:54,522 - mmaction - INFO - Saving checkpoint at 10 epochs\n" + ] }, { - "cell_type": "code", - "metadata": { - "id": "tlhu9byjjt-K", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "3b9a3c49-ace0-41d3-dd15-d6c8579755f8" - }, - "source": [ - "from mmcv.runner import set_random_seed\n", - "\n", - "# Modify dataset type and path\n", - "cfg.dataset_type = 'VideoDataset'\n", - "cfg.data_root = 'kinetics400_tiny/train/'\n", - "cfg.data_root_val = 'kinetics400_tiny/val/'\n", - "cfg.ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "cfg.ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "\n", - "cfg.data.test.type = 'VideoDataset'\n", - "cfg.data.test.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.data.test.data_prefix = 'kinetics400_tiny/val/'\n", - "\n", - "cfg.data.train.type = 'VideoDataset'\n", - "cfg.data.train.ann_file = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "cfg.data.train.data_prefix = 'kinetics400_tiny/train/'\n", - "\n", - "cfg.data.val.type = 'VideoDataset'\n", - "cfg.data.val.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.data.val.data_prefix = 'kinetics400_tiny/val/'\n", - "\n", - "# The flag is used to determine whether it is omnisource training\n", - "cfg.setdefault('omnisource', False)\n", - "# Modify num classes of the model in cls_head\n", - "cfg.model.cls_head.num_classes = 2\n", - "# We can use the pre-trained TSN model\n", - "cfg.load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "\n", - "# Set up working dir to save files and logs.\n", - "cfg.work_dir = './tutorial_exps'\n", - "\n", - "# The original learning rate (LR) is set for 8-GPU training.\n", - "# We divide it by 8 since we only use one GPU.\n", - "cfg.data.videos_per_gpu = cfg.data.videos_per_gpu // 16\n", - "cfg.optimizer.lr = cfg.optimizer.lr / 8 / 16\n", - "cfg.total_epochs = 10\n", - "\n", - "# We can set the checkpoint saving interval to reduce the storage cost\n", - "cfg.checkpoint_config.interval = 5\n", - "# We can set the log print interval to reduce the the times of printing log\n", - "cfg.log_config.interval = 5\n", - "\n", - "# Set seed thus the results are more reproducible\n", - "cfg.seed = 0\n", - "set_random_seed(0, deterministic=False)\n", - "cfg.gpu_ids = range(1)\n", - "\n", - "# Save the best\n", - "cfg.evaluation.save_best='auto'\n", - "\n", - "\n", - "# We can initialize the logger for training and have a look\n", - "# at the final config used for training\n", - "print(f'Config:\\n{cfg.pretty_text}')\n" - ], - "execution_count": 28, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Config:\n", - "model = dict(\n", - " type='Recognizer2D',\n", - " backbone=dict(\n", - " type='ResNet',\n", - " pretrained='torchvision://resnet50',\n", - " depth=50,\n", - " norm_eval=False),\n", - " cls_head=dict(\n", - " type='TSNHead',\n", - " num_classes=2,\n", - " in_channels=2048,\n", - " spatial_type='avg',\n", - " consensus=dict(type='AvgConsensus', dim=1),\n", - " dropout_ratio=0.4,\n", - " init_std=0.01),\n", - " train_cfg=None,\n", - " test_cfg=dict(average_clips=None))\n", - "optimizer = dict(type='SGD', lr=7.8125e-05, momentum=0.9, weight_decay=0.0001)\n", - "optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))\n", - "lr_config = dict(policy='step', step=[40, 80])\n", - "total_epochs = 10\n", - "checkpoint_config = dict(interval=5)\n", - "log_config = dict(interval=5, hooks=[dict(type='TextLoggerHook')])\n", - "dist_params = dict(backend='nccl')\n", - "log_level = 'INFO'\n", - "load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "resume_from = None\n", - "workflow = [('train', 1)]\n", - "dataset_type = 'VideoDataset'\n", - "data_root = 'kinetics400_tiny/train/'\n", - "data_root_val = 'kinetics400_tiny/val/'\n", - "ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "img_norm_cfg = dict(\n", - " mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)\n", - "train_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),\n", - " dict(type='DecordDecode'),\n", - " dict(\n", - " type='MultiScaleCrop',\n", - " input_size=224,\n", - " scales=(1, 0.875, 0.75, 0.66),\n", - " random_crop=False,\n", - " max_wh_scale_gap=1),\n", - " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", - " dict(type='Flip', flip_ratio=0.5),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs', 'label'])\n", - "]\n", - "val_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=8,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='CenterCrop', crop_size=224),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - "]\n", - "test_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=25,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='ThreeCrop', crop_size=256),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - "]\n", - "data = dict(\n", - " videos_per_gpu=2,\n", - " workers_per_gpu=2,\n", - " train=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", - " data_prefix='kinetics400_tiny/train/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames', clip_len=1, frame_interval=1,\n", - " num_clips=8),\n", - " dict(type='DecordDecode'),\n", - " dict(\n", - " type='MultiScaleCrop',\n", - " input_size=224,\n", - " scales=(1, 0.875, 0.75, 0.66),\n", - " random_crop=False,\n", - " max_wh_scale_gap=1),\n", - " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", - " dict(type='Flip', flip_ratio=0.5),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs', 'label'])\n", - " ]),\n", - " val=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", - " data_prefix='kinetics400_tiny/val/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=8,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='CenterCrop', crop_size=224),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - " ]),\n", - " test=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", - " data_prefix='kinetics400_tiny/val/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=25,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='ThreeCrop', crop_size=256),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - " ]))\n", - "evaluation = dict(\n", - " interval=5,\n", - " metrics=['top_k_accuracy', 'mean_class_accuracy'],\n", - " save_best='auto')\n", - "work_dir = './tutorial_exps'\n", - "omnisource = False\n", - "seed = 0\n", - "gpu_ids = range(0, 1)\n", - "\n" - ], - "name": "stdout" - } - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.9 task/s, elapsed: 2s, ETA: 0s" + ] }, { - "cell_type": "markdown", - "metadata": { - "id": "tES-qnZ3k38Z" - }, - "source": [ - "### Train a new recognizer\n", - "\n", - "Finally, lets initialize the dataset and recognizer, then train a new recognizer!" - ] + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-07-11 13:01:56,741 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-07-11 13:01:56,743 - mmaction - INFO - \n", + "top1_acc\t1.0000\n", + "top5_acc\t1.0000\n", + "2021-07-11 13:01:56,749 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-07-11 13:01:56,750 - mmaction - INFO - \n", + "mean_acc\t1.0000\n", + "2021-07-11 13:01:57,267 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_10.pth.\n", + "2021-07-11 13:01:57,269 - mmaction - INFO - Best top1_acc is 1.0000 at 10 epoch.\n", + "2021-07-11 13:01:57,270 - mmaction - INFO - Epoch(val) [10][5]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n" + ] + } + ], + "source": [ + "import os.path as osp\n", + "\n", + "from mmaction.datasets import build_dataset\n", + "from mmaction.models import build_model\n", + "from mmaction.apis import train_model\n", + "\n", + "import mmcv\n", + "\n", + "# Build the dataset\n", + "datasets = [build_dataset(cfg.data.train)]\n", + "\n", + "# Build the recognizer\n", + "model = build_model(cfg.model, train_cfg=cfg.get('train_cfg'), test_cfg=cfg.get('test_cfg'))\n", + "\n", + "# Create work_dir\n", + "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", + "train_model(model, datasets, cfg, distributed=False, validate=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zdSd7oTLlxIf" + }, + "source": [ + "### Understand the log\n", + "From the log, we can have a basic understanding the training process and know how well the recognizer is trained.\n", + "\n", + "Firstly, the ResNet-50 backbone pre-trained on ImageNet is loaded, this is a common practice since training from scratch is more cost. The log shows that all the weights of the ResNet-50 backbone are loaded except the `fc.bias` and `fc.weight`.\n", + "\n", + "Second, since the dataset we are using is small, we loaded a TSN model and finetune it for action recognition.\n", + "The original TSN is trained on original Kinetics-400 dataset which contains 400 classes but Kinetics-400 Tiny dataset only have 2 classes. Therefore, the last FC layer of the pre-trained TSN for classification has different weight shape and is not used.\n", + "\n", + "Third, after training, the recognizer is evaluated by the default evaluation. The results show that the recognizer achieves 100% top1 accuracy and 100% top5 accuracy on the val dataset,\n", + " \n", + "Not bad!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ryVoSfZVmogw" + }, + "source": [ + "## Test the trained recognizer\n", + "\n", + "After finetuning the recognizer, let's check the prediction results!" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" }, + "id": "eyY3hCMwyTct", + "outputId": "ea54ff0a-4299-4e93-c1ca-4fe597e7516b" + }, + "outputs": [ { - "cell_type": "code", - "metadata": { - "id": "dDBWkdDRk6oz", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "a85d80d7-b3c4-43f1-d49a-057e8036807f" - }, - "source": [ - "import os.path as osp\n", - "\n", - "from mmaction.datasets import build_dataset\n", - "from mmaction.models import build_model\n", - "from mmaction.apis import train_model\n", - "\n", - "import mmcv\n", - "\n", - "# Build the dataset\n", - "datasets = [build_dataset(cfg.data.train)]\n", - "\n", - "# Build the recognizer\n", - "model = build_model(cfg.model, train_cfg=cfg.get('train_cfg'), test_cfg=cfg.get('test_cfg'))\n", - "\n", - "# Create work_dir\n", - "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", - "train_model(model, datasets, cfg, distributed=False, validate=True)" - ], - "execution_count": 29, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Use load_from_torchvision loader\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "2021-07-11 13:00:46,931 - mmaction - INFO - These parameters in pretrained checkpoint are not loaded: {'fc.bias', 'fc.weight'}\n", - "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", - " cpuset_checked))\n", - "2021-07-11 13:00:46,980 - mmaction - INFO - load checkpoint from ./checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", - "2021-07-11 13:00:46,981 - mmaction - INFO - Use load_from_local loader\n", - "2021-07-11 13:00:47,071 - mmaction - WARNING - The model and loaded state dict do not match exactly\n", - "\n", - "size mismatch for cls_head.fc_cls.weight: copying a param with shape torch.Size([400, 2048]) from checkpoint, the shape in current model is torch.Size([2, 2048]).\n", - "size mismatch for cls_head.fc_cls.bias: copying a param with shape torch.Size([400]) from checkpoint, the shape in current model is torch.Size([2]).\n", - "2021-07-11 13:00:47,074 - mmaction - INFO - Start running, host: root@b465112b4add, work_dir: /content/mmaction2/tutorial_exps\n", - "2021-07-11 13:00:47,078 - mmaction - INFO - Hooks will be executed in the following order:\n", - "before_run:\n", - "(VERY_HIGH ) StepLrUpdaterHook \n", - "(NORMAL ) CheckpointHook \n", - "(NORMAL ) EvalHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_train_epoch:\n", - "(VERY_HIGH ) StepLrUpdaterHook \n", - "(NORMAL ) EvalHook \n", - "(LOW ) IterTimerHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_train_iter:\n", - "(VERY_HIGH ) StepLrUpdaterHook \n", - "(NORMAL ) EvalHook \n", - "(LOW ) IterTimerHook \n", - " -------------------- \n", - "after_train_iter:\n", - "(ABOVE_NORMAL) OptimizerHook \n", - "(NORMAL ) CheckpointHook \n", - "(NORMAL ) EvalHook \n", - "(LOW ) IterTimerHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "after_train_epoch:\n", - "(NORMAL ) CheckpointHook \n", - "(NORMAL ) EvalHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_val_epoch:\n", - "(LOW ) IterTimerHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_val_iter:\n", - "(LOW ) IterTimerHook \n", - " -------------------- \n", - "after_val_iter:\n", - "(LOW ) IterTimerHook \n", - " -------------------- \n", - "after_val_epoch:\n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "2021-07-11 13:00:47,081 - mmaction - INFO - workflow: [('train', 1)], max: 10 epochs\n", - "/usr/local/lib/python3.7/dist-packages/mmcv/runner/hooks/evaluation.py:190: UserWarning: runner.meta is None. Creating an empty one.\n", - " warnings.warn('runner.meta is None. Creating an empty one.')\n", - "2021-07-11 13:00:51,802 - mmaction - INFO - Epoch [1][5/15]\tlr: 7.813e-05, eta: 0:02:16, time: 0.942, data_time: 0.730, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7604, loss: 0.7604, grad_norm: 14.8813\n", - "2021-07-11 13:00:52,884 - mmaction - INFO - Epoch [1][10/15]\tlr: 7.813e-05, eta: 0:01:21, time: 0.217, data_time: 0.028, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6282, loss: 0.6282, grad_norm: 10.1834\n", - "2021-07-11 13:00:53,706 - mmaction - INFO - Epoch [1][15/15]\tlr: 7.813e-05, eta: 0:00:59, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7165, loss: 0.7165, grad_norm: 10.8534\n", - "2021-07-11 13:00:57,724 - mmaction - INFO - Epoch [2][5/15]\tlr: 7.813e-05, eta: 0:01:09, time: 0.802, data_time: 0.596, memory: 2918, top1_acc: 0.3000, top5_acc: 1.0000, loss_cls: 0.7001, loss: 0.7001, grad_norm: 11.4311\n", - "2021-07-11 13:00:59,219 - mmaction - INFO - Epoch [2][10/15]\tlr: 7.813e-05, eta: 0:01:00, time: 0.296, data_time: 0.108, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6916, loss: 0.6916, grad_norm: 12.7101\n", - "2021-07-11 13:01:00,040 - mmaction - INFO - Epoch [2][15/15]\tlr: 7.813e-05, eta: 0:00:51, time: 0.167, data_time: 0.004, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6567, loss: 0.6567, grad_norm: 8.8837\n", - "2021-07-11 13:01:04,152 - mmaction - INFO - Epoch [3][5/15]\tlr: 7.813e-05, eta: 0:00:56, time: 0.820, data_time: 0.618, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6320, loss: 0.6320, grad_norm: 11.4025\n", - "2021-07-11 13:01:05,526 - mmaction - INFO - Epoch [3][10/15]\tlr: 7.813e-05, eta: 0:00:50, time: 0.276, data_time: 0.075, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6542, loss: 0.6542, grad_norm: 10.6429\n", - "2021-07-11 13:01:06,350 - mmaction - INFO - Epoch [3][15/15]\tlr: 7.813e-05, eta: 0:00:44, time: 0.165, data_time: 0.001, memory: 2918, top1_acc: 0.2000, top5_acc: 1.0000, loss_cls: 0.7661, loss: 0.7661, grad_norm: 12.8421\n", - "2021-07-11 13:01:10,771 - mmaction - INFO - Epoch [4][5/15]\tlr: 7.813e-05, eta: 0:00:47, time: 0.883, data_time: 0.676, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6410, loss: 0.6410, grad_norm: 10.6697\n", - "2021-07-11 13:01:11,776 - mmaction - INFO - Epoch [4][10/15]\tlr: 7.813e-05, eta: 0:00:42, time: 0.201, data_time: 0.011, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6949, loss: 0.6949, grad_norm: 10.5467\n", - "2021-07-11 13:01:12,729 - mmaction - INFO - Epoch [4][15/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.190, data_time: 0.026, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6290, loss: 0.6290, grad_norm: 11.2779\n", - "2021-07-11 13:01:16,816 - mmaction - INFO - Epoch [5][5/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.817, data_time: 0.608, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6011, loss: 0.6011, grad_norm: 9.1335\n", - "2021-07-11 13:01:18,176 - mmaction - INFO - Epoch [5][10/15]\tlr: 7.813e-05, eta: 0:00:35, time: 0.272, data_time: 0.080, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6652, loss: 0.6652, grad_norm: 11.0616\n", - "2021-07-11 13:01:19,119 - mmaction - INFO - Epoch [5][15/15]\tlr: 7.813e-05, eta: 0:00:32, time: 0.188, data_time: 0.017, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6440, loss: 0.6440, grad_norm: 11.6473\n", - "2021-07-11 13:01:19,120 - mmaction - INFO - Saving checkpoint at 5 epochs\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.9 task/s, elapsed: 2s, ETA: 0s" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "2021-07-11 13:01:21,673 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2021-07-11 13:01:21,677 - mmaction - INFO - \n", - "top1_acc\t0.7000\n", - "top5_acc\t1.0000\n", - "2021-07-11 13:01:21,679 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2021-07-11 13:01:21,682 - mmaction - INFO - \n", - "mean_acc\t0.7000\n", - "2021-07-11 13:01:22,264 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_5.pth.\n", - "2021-07-11 13:01:22,267 - mmaction - INFO - Best top1_acc is 0.7000 at 5 epoch.\n", - "2021-07-11 13:01:22,271 - mmaction - INFO - Epoch(val) [5][5]\ttop1_acc: 0.7000, top5_acc: 1.0000, mean_class_accuracy: 0.7000\n", - "2021-07-11 13:01:26,623 - mmaction - INFO - Epoch [6][5/15]\tlr: 7.813e-05, eta: 0:00:31, time: 0.868, data_time: 0.656, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6753, loss: 0.6753, grad_norm: 11.8640\n", - "2021-07-11 13:01:27,597 - mmaction - INFO - Epoch [6][10/15]\tlr: 7.813e-05, eta: 0:00:28, time: 0.195, data_time: 0.003, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6715, loss: 0.6715, grad_norm: 11.3347\n", - "2021-07-11 13:01:28,736 - mmaction - INFO - Epoch [6][15/15]\tlr: 7.813e-05, eta: 0:00:25, time: 0.228, data_time: 0.063, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5769, loss: 0.5769, grad_norm: 9.2541\n", - "2021-07-11 13:01:32,860 - mmaction - INFO - Epoch [7][5/15]\tlr: 7.813e-05, eta: 0:00:24, time: 0.822, data_time: 0.620, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5379, loss: 0.5379, grad_norm: 8.0147\n", - "2021-07-11 13:01:34,340 - mmaction - INFO - Epoch [7][10/15]\tlr: 7.813e-05, eta: 0:00:22, time: 0.298, data_time: 0.109, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6187, loss: 0.6187, grad_norm: 11.5244\n", - "2021-07-11 13:01:35,165 - mmaction - INFO - Epoch [7][15/15]\tlr: 7.813e-05, eta: 0:00:19, time: 0.165, data_time: 0.002, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7063, loss: 0.7063, grad_norm: 12.4979\n", - "2021-07-11 13:01:39,435 - mmaction - INFO - Epoch [8][5/15]\tlr: 7.813e-05, eta: 0:00:17, time: 0.853, data_time: 0.641, memory: 2918, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.5369, loss: 0.5369, grad_norm: 8.6545\n", - "2021-07-11 13:01:40,808 - mmaction - INFO - Epoch [8][10/15]\tlr: 7.813e-05, eta: 0:00:15, time: 0.275, data_time: 0.086, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6407, loss: 0.6407, grad_norm: 12.5537\n", - "2021-07-11 13:01:41,627 - mmaction - INFO - Epoch [8][15/15]\tlr: 7.813e-05, eta: 0:00:12, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6073, loss: 0.6073, grad_norm: 11.4028\n", - "2021-07-11 13:01:45,651 - mmaction - INFO - Epoch [9][5/15]\tlr: 7.813e-05, eta: 0:00:11, time: 0.803, data_time: 0.591, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5596, loss: 0.5596, grad_norm: 10.0821\n", - "2021-07-11 13:01:46,891 - mmaction - INFO - Epoch [9][10/15]\tlr: 7.813e-05, eta: 0:00:08, time: 0.248, data_time: 0.044, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6470, loss: 0.6470, grad_norm: 11.8979\n", - "2021-07-11 13:01:47,944 - mmaction - INFO - Epoch [9][15/15]\tlr: 7.813e-05, eta: 0:00:06, time: 0.211, data_time: 0.041, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6657, loss: 0.6657, grad_norm: 12.0643\n", - "2021-07-11 13:01:52,200 - mmaction - INFO - Epoch [10][5/15]\tlr: 7.813e-05, eta: 0:00:04, time: 0.849, data_time: 0.648, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6310, loss: 0.6310, grad_norm: 11.5690\n", - "2021-07-11 13:01:53,707 - mmaction - INFO - Epoch [10][10/15]\tlr: 7.813e-05, eta: 0:00:02, time: 0.303, data_time: 0.119, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5178, loss: 0.5178, grad_norm: 9.3324\n", - "2021-07-11 13:01:54,520 - mmaction - INFO - Epoch [10][15/15]\tlr: 7.813e-05, eta: 0:00:00, time: 0.162, data_time: 0.001, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6919, loss: 0.6919, grad_norm: 12.6688\n", - "2021-07-11 13:01:54,522 - mmaction - INFO - Saving checkpoint at 10 epochs\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.9 task/s, elapsed: 2s, ETA: 0s" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "2021-07-11 13:01:56,741 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2021-07-11 13:01:56,743 - mmaction - INFO - \n", - "top1_acc\t1.0000\n", - "top5_acc\t1.0000\n", - "2021-07-11 13:01:56,749 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2021-07-11 13:01:56,750 - mmaction - INFO - \n", - "mean_acc\t1.0000\n", - "2021-07-11 13:01:57,267 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_10.pth.\n", - "2021-07-11 13:01:57,269 - mmaction - INFO - Best top1_acc is 1.0000 at 10 epoch.\n", - "2021-07-11 13:01:57,270 - mmaction - INFO - Epoch(val) [10][5]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n" - ], - "name": "stderr" - } - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "[ ] 0/10, elapsed: 0s, ETA:" + ] }, { - "cell_type": "markdown", - "metadata": { - "id": "zdSd7oTLlxIf" - }, - "source": [ - "### Understand the log\n", - "From the log, we can have a basic understanding the training process and know how well the recognizer is trained.\n", - "\n", - "Firstly, the ResNet-50 backbone pre-trained on ImageNet is loaded, this is a common practice since training from scratch is more cost. The log shows that all the weights of the ResNet-50 backbone are loaded except the `fc.bias` and `fc.weight`.\n", - "\n", - "Second, since the dataset we are using is small, we loaded a TSN model and finetune it for action recognition.\n", - "The original TSN is trained on original Kinetics-400 dataset which contains 400 classes but Kinetics-400 Tiny dataset only have 2 classes. Therefore, the last FC layer of the pre-trained TSN for classification has different weight shape and is not used.\n", - "\n", - "Third, after training, the recognizer is evaluated by the default evaluation. The results show that the recognizer achieves 100% top1 accuracy and 100% top5 accuracy on the val dataset,\n", - " \n", - "Not bad!" - ] + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n" + ] }, { - "cell_type": "markdown", - "metadata": { - "id": "ryVoSfZVmogw" - }, - "source": [ - "## Test the trained recognizer\n", - "\n", - "After finetuning the recognizer, let's check the prediction results!" - ] + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 2.2 task/s, elapsed: 5s, ETA: 0s\n", + "Evaluating top_k_accuracy ...\n", + "\n", + "top1_acc\t1.0000\n", + "top5_acc\t1.0000\n", + "\n", + "Evaluating mean_class_accuracy ...\n", + "\n", + "mean_acc\t1.0000\n", + "top1_acc: 1.0000\n", + "top5_acc: 1.0000\n", + "mean_class_accuracy: 1.0000\n" + ] }, { - "cell_type": "code", - "metadata": { - "id": "eyY3hCMwyTct", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "ea54ff0a-4299-4e93-c1ca-4fe597e7516b" - }, - "source": [ - "from mmaction.apis import single_gpu_test\n", - "from mmaction.datasets import build_dataloader\n", - "from mmcv.parallel import MMDataParallel\n", - "\n", - "# Build a test dataloader\n", - "dataset = build_dataset(cfg.data.test, dict(test_mode=True))\n", - "data_loader = build_dataloader(\n", - " dataset,\n", - " videos_per_gpu=1,\n", - " workers_per_gpu=cfg.data.workers_per_gpu,\n", - " dist=False,\n", - " shuffle=False)\n", - "model = MMDataParallel(model, device_ids=[0])\n", - "outputs = single_gpu_test(model, data_loader)\n", - "\n", - "eval_config = cfg.evaluation\n", - "eval_config.pop('interval')\n", - "eval_res = dataset.evaluate(outputs, **eval_config)\n", - "for name, val in eval_res.items():\n", - " print(f'{name}: {val:.04f}')" - ], - "execution_count": 30, - "outputs": [ - { - "output_type": "stream", - "text": [ - "[ ] 0/10, elapsed: 0s, ETA:" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", - " cpuset_checked))\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 2.2 task/s, elapsed: 5s, ETA: 0s\n", - "Evaluating top_k_accuracy ...\n", - "\n", - "top1_acc\t1.0000\n", - "top5_acc\t1.0000\n", - "\n", - "Evaluating mean_class_accuracy ...\n", - "\n", - "mean_acc\t1.0000\n", - "top1_acc: 1.0000\n", - "top5_acc: 1.0000\n", - "mean_class_accuracy: 1.0000\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/content/mmaction2/mmaction/datasets/base.py:166: UserWarning: Option arguments for metrics has been changed to `metric_options`, See 'https://github.com/open-mmlab/mmaction2/pull/286' for more details\n", - " 'Option arguments for metrics has been changed to '\n" - ], - "name": "stderr" - } - ] + "name": "stderr", + "output_type": "stream", + "text": [ + "/content/mmaction2/mmaction/datasets/base.py:166: UserWarning: Option arguments for metrics has been changed to `metric_options`, See 'https://github.com/open-mmlab/mmaction2/pull/286' for more details\n", + " 'Option arguments for metrics has been changed to '\n" + ] } - ] + ], + "source": [ + "from mmaction.apis import single_gpu_test\n", + "from mmaction.datasets import build_dataloader\n", + "from mmcv.parallel import MMDataParallel\n", + "\n", + "# Build a test dataloader\n", + "dataset = build_dataset(cfg.data.test, dict(test_mode=True))\n", + "data_loader = build_dataloader(\n", + " dataset,\n", + " videos_per_gpu=1,\n", + " workers_per_gpu=cfg.data.workers_per_gpu,\n", + " dist=False,\n", + " shuffle=False)\n", + "model = MMDataParallel(model, device_ids=[0])\n", + "outputs = single_gpu_test(model, data_loader)\n", + "\n", + "eval_config = cfg.evaluation\n", + "eval_config.pop('interval')\n", + "eval_res = dataset.evaluate(outputs, **eval_config)\n", + "for name, val in eval_res.items():\n", + " print(f'{name}: {val:.04f}')" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "collapsed_sections": [], + "include_colab_link": true, + "name": "MMAction2 Tutorial.ipynb", + "provenance": [], + "toc_visible": true + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 } diff --git a/demo/mmaction2_tutorial_zh-CN.ipynb b/demo/mmaction2_tutorial_zh-CN.ipynb index 7c03cadc54..28940ce931 100644 --- a/demo/mmaction2_tutorial_zh-CN.ipynb +++ b/demo/mmaction2_tutorial_zh-CN.ipynb @@ -344,7 +344,11 @@ "# 选择视频进行推理\n", "video = 'demo/demo.mp4'\n", "label = 'tools/data/kinetics/label_map_k400.txt'\n", - "results = inference_recognizer(model, video, label)" + "results = inference_recognizer(model, video)\n", + "\n", + "labels = open(label).readlines()\n", + "labels = [x.strip() for x in labels]\n", + "results = [(labels[k[0]], k[1]) for k in results]" ] }, { @@ -1390,7 +1394,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.7.4" }, "widgets": { "application/vnd.jupyter.widget-state+json": { From 4370f294b0707d8f62bd49545649edb7ed08d4a7 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 9 Nov 2021 19:35:41 +0800 Subject: [PATCH 291/414] [Fix] Update map location --- demo/demo_spatiotemporal_det.py | 2 +- demo/demo_video_structuralize.py | 8 ++++---- demo/webcam_demo_spatiotemporal_det.py | 2 +- mmaction/apis/inference.py | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index bbaa06e0fb..38bb533237 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -352,7 +352,7 @@ def main(): config.model.backbone.pretrained = None model = build_detector(config.model, test_cfg=config.get('test_cfg')) - load_checkpoint(model, args.checkpoint, map_location=args.device) + load_checkpoint(model, args.checkpoint, map_location='cpu') model.to(args.device) model.eval() diff --git a/demo/demo_video_structuralize.py b/demo/demo_video_structuralize.py index 6b8ec31323..48fc8154ea 100644 --- a/demo/demo_video_structuralize.py +++ b/demo/demo_video_structuralize.py @@ -472,7 +472,7 @@ def skeleton_based_action_recognition(args, pose_results, num_frame, h, w): # Build skeleton-based recognition model skeleton_model = build_model(skeleton_config.model) load_checkpoint( - skeleton_model, args.skeleton_checkpoint, map_location=args.device) + skeleton_model, args.skeleton_checkpoint, map_location='cpu') skeleton_model.to(args.device) skeleton_model.eval() @@ -490,7 +490,7 @@ def rgb_based_action_recognition(args): rgb_config.model.backbone.pretrained = None rgb_model = build_recognizer( rgb_config.model, test_cfg=rgb_config.get('test_cfg')) - load_checkpoint(rgb_model, args.rgb_checkpoint, map_location=args.device) + load_checkpoint(rgb_model, args.rgb_checkpoint, map_location='cpu') rgb_model.cfg = rgb_config rgb_model.to(args.device) rgb_model.eval() @@ -515,7 +515,7 @@ def skeleton_based_stdet(args, label_map, human_detections, pose_results, load_checkpoint( skeleton_stdet_model, args.skeleton_stdet_checkpoint, - map_location=args.device) + map_location='cpu') skeleton_stdet_model.to(args.device) skeleton_stdet_model.eval() @@ -633,7 +633,7 @@ def rgb_based_stdet(args, frames, label_map, human_detections, w, h, new_w, rgb_stdet_config.model, test_cfg=rgb_stdet_config.get('test_cfg')) load_checkpoint( - rgb_stdet_model, args.rgb_stdet_checkpoint, map_location=args.device) + rgb_stdet_model, args.rgb_stdet_checkpoint, map_location='cpu') rgb_stdet_model.to(args.device) rgb_stdet_model.eval() diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py index ae3ab2c59e..0f473578db 100644 --- a/demo/webcam_demo_spatiotemporal_det.py +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -290,7 +290,7 @@ def __init__(self, config, checkpoint, device, score_thr, label_map_path): # load model config.model.backbone.pretrained = None model = build_detector(config.model, test_cfg=config.get('test_cfg')) - load_checkpoint(model, checkpoint, map_location=device) + load_checkpoint(model, checkpoint, map_location='cpu') model.to(device) model.eval() self.model = model diff --git a/mmaction/apis/inference.py b/mmaction/apis/inference.py index c9e82f9a22..24f8318b0a 100644 --- a/mmaction/apis/inference.py +++ b/mmaction/apis/inference.py @@ -46,7 +46,7 @@ def init_recognizer(config, checkpoint=None, device='cuda:0', **kwargs): model = build_recognizer(config.model, test_cfg=config.get('test_cfg')) if checkpoint is not None: - load_checkpoint(model, checkpoint, map_location=device) + load_checkpoint(model, checkpoint, map_location='cpu') model.cfg = config model.to(device) model.eval() From 2f7a2bc6f7842af853f3b3ed164e28f6efd6d1db Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 18 Nov 2021 21:23:14 +0800 Subject: [PATCH 292/414] [Doc] Add MMFlow (#1273) --- README.md | 11 ++++++----- README_zh-CN.md | 11 ++++++----- docs/conf.py | 4 ++++ docs_zh_CN/conf.py | 4 ++++ 4 files changed, 20 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index fc3aa0ffbf..453b2e59f0 100644 --- a/README.md +++ b/README.md @@ -258,13 +258,14 @@ We wish that the toolbox and benchmark could serve the growing research communit - [MMCV](https://github.com/open-mmlab/mmcv): OpenMMLab foundational library for computer vision. - [MIM](https://github.com/open-mmlab/mim): MIM Installs OpenMMLab Packages. +- [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab's next-generation video understanding toolbox and benchmark. - [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab image classification toolbox and benchmark. - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab detection toolbox and benchmark. - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab's next-generation platform for general 3D object detection. -- [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab semantic segmentation toolbox and benchmark. -- [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab's next-generation video understanding toolbox and benchmark. -- [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab video perception toolbox and benchmark. -- [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab pose estimation toolbox and benchmark. - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab image and video editing toolbox. -- [MMOCR](https://github.com/open-mmlab/mmocr): A Comprehensive Toolbox for Text Detection, Recognition and Understanding. +- [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab Optical Flow Toolbox and Benchmark. - [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab image and video generative models toolbox. +- [MMOCR](https://github.com/open-mmlab/mmocr): A Comprehensive Toolbox for Text Detection, Recognition and Understanding. +- [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab pose estimation toolbox and benchmark. +- [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab semantic segmentation toolbox and benchmark. +- [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab video perception toolbox and benchmark. diff --git a/README_zh-CN.md b/README_zh-CN.md index 3584bc51f2..e9ee522b9d 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -252,16 +252,17 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 - [MMCV](https://github.com/open-mmlab/mmcv): OpenMMLab 计算机视觉基础库 - [MIM](https://github.com/open-mmlab/mim): MIM 是 OpenMMlab 项目、算法、模型的统一入口 +- [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab 新一代视频理解工具箱与测试基准 - [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab 图像分类工具箱与测试基准 - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab 检测工具箱与测试基准 - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab 新一代通用3D目标检测平台 -- [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab 语义分割工具箱与测试基准 -- [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab 新一代视频理解工具箱与测试基准 -- [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab 一体化视频目标感知平台 -- [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab 姿态估计工具箱与测试基准 - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab 图像视频编辑工具箱 -- [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包 +- [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab 光流估计工具箱 - [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab 图片视频生成模型工具箱 +- [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包 +- [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab 姿态估计工具箱与测试基准 +- [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab 语义分割工具箱与测试基准 +- [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab 一体化视频目标感知平台 ## 欢迎加入 OpenMMLab 社区 diff --git a/docs/conf.py b/docs/conf.py index f8275be40f..341dedbf33 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -140,6 +140,10 @@ def get_version(): 'name': 'MMEditing', 'url': 'https://mmediting.readthedocs.io/en/latest/', }, + { + 'name': 'MMFlow', + 'url': 'https://mmflow.readthedocs.io/en/latest/', + }, { 'name': 'MMGeneration', 'url': 'https://mmgeneration.readthedocs.io/en/latest/', diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index a04e175cd8..e45875b79b 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -134,6 +134,10 @@ def get_version(): 'name': 'MMEditing', 'url': 'https://mmediting.readthedocs.io/zh_CN/latest/', }, + { + 'name': 'MMFlow', + 'url': 'https://mmflow.readthedocs.io/zh_CN/latest/', + }, { 'name': 'MMGeneration', 'url': 'https://mmgeneration.readthedocs.io/zh_CN/latest/', From 2b6f9ac69b3609b96a514501ffe30fc90545f518 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 19 Nov 2021 13:48:57 +0800 Subject: [PATCH 293/414] update --- configs/recognition/csn/metafile.yml | 6 ------ configs/skeleton/stgcn/metafile.yml | 12 ++++++------ 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/configs/recognition/csn/metafile.yml b/configs/recognition/csn/metafile.yml index ece1f28043..408e11944f 100644 --- a/configs/recognition/csn/metafile.yml +++ b/configs/recognition/csn/metafile.yml @@ -95,7 +95,6 @@ Models: Top 5 Accuracy: 95.3 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth - inference_time(video/s): x - Config: configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py In Collection: CSN Metadata: @@ -118,7 +117,6 @@ Models: Top 5 Accuracy: 93.5 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth - inference_time(video/s): x - Config: configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py In Collection: CSN Metadata: @@ -141,7 +139,6 @@ Models: Top 5 Accuracy: 92.1 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth - inference_time(video/s): x - Config: configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py In Collection: CSN Metadata: @@ -164,7 +161,6 @@ Models: Top 5 Accuracy: 94.2 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth - inference_time(video/s): x - Config: configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py In Collection: CSN Metadata: @@ -187,7 +183,6 @@ Models: Top 5 Accuracy: 93.0 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth - inference_time(video/s): x - Config: configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py In Collection: CSN Metadata: @@ -207,4 +202,3 @@ Models: top5 accuracy: 91.3 Task: Action Recognition Weights: https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb_20210618-4e29e2e8.pth - inference_time(video/s): x diff --git a/configs/skeleton/stgcn/metafile.yml b/configs/skeleton/stgcn/metafile.yml index 7012930b43..61f4d8a0a9 100644 --- a/configs/skeleton/stgcn/metafile.yml +++ b/configs/skeleton/stgcn/metafile.yml @@ -9,13 +9,13 @@ Models: Batch Size: 16 Epochs: 80 Parameters: 3088704 - Training Data: NTU60 + Training Data: NTU60-XSub Training Resources: 2 GPUs Name: stgcn_80e_ntu60_xsub_keypoint Results: - Dataset: NTU60 + Dataset: NTU60-XSub Metrics: - mean Top 1 Accuracy: 86.91 + Top 1 Accuracy: 86.91 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json Training Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log @@ -27,13 +27,13 @@ Models: Batch Size: 32 Epochs: 80 Parameters: 3088704 - Training Data: NTU60 + Training Data: NTU60-XSub Training Resources: 1 GPU Name: stgcn_80e_ntu60_xsub_keypoint_3d Results: - Dataset: NTU60 + Dataset: NTU60-XSub Metrics: - mean Top 1 Accuracy: 84.61 + Top 1 Accuracy: 84.61 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json Training Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log From 5ba44b4ae45bfd26ffdd5def8510f33fc20fde11 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 23 Nov 2021 21:51:09 +0800 Subject: [PATCH 294/414] [Fix] init_random_seed --- mmaction/apis/__init__.py | 4 ++-- mmaction/apis/train.py | 35 +++++++++++++++++++++++++++++++++++ tools/train.py | 15 ++++++++------- 3 files changed, 45 insertions(+), 9 deletions(-) diff --git a/mmaction/apis/__init__.py b/mmaction/apis/__init__.py index 8a68055e70..15961080e7 100644 --- a/mmaction/apis/__init__.py +++ b/mmaction/apis/__init__.py @@ -1,9 +1,9 @@ # Copyright (c) OpenMMLab. All rights reserved. from .inference import inference_recognizer, init_recognizer from .test import multi_gpu_test, single_gpu_test -from .train import train_model +from .train import init_random_seed, train_model __all__ = [ 'train_model', 'init_recognizer', 'inference_recognizer', 'multi_gpu_test', - 'single_gpu_test' + 'single_gpu_test', 'init_random_seed' ] diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index 54b1fb0163..f04ed6c712 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -2,7 +2,9 @@ import copy as cp import os.path as osp +import numpy as np import torch +import torch.distributed as dist from mmcv.parallel import MMDataParallel, MMDistributedDataParallel from mmcv.runner import (DistSamplerSeedHook, EpochBasedRunner, OptimizerHook, build_optimizer, get_dist_info) @@ -15,6 +17,39 @@ from .test import multi_gpu_test +def init_random_seed(seed=None, device='cuda'): + """Initialize random seed. + + If the seed is not set, the seed will be automatically randomized, + and then broadcast to all processes to prevent some potential bugs. + Args: + seed (int, Optional): The seed. Default to None. + device (str): The device where the seed will be put on. + Default to 'cuda'. + Returns: + int: Seed to be used. + """ + if seed is not None: + return seed + + # Make sure all ranks share the same random seed to prevent + # some potential bugs. Please refer to + # https://github.com/open-mmlab/mmdetection/issues/6339 + rank, world_size = get_dist_info() + seed = np.random.randint(2**31) + + if world_size == 1: + return seed + + if rank == 0: + random_num = torch.tensor(seed, dtype=torch.int32, device=device) + else: + random_num = torch.tensor(0, dtype=torch.int32, device=device) + + dist.broadcast(random_num, src=0) + return random_num.item() + + def train_model(model, dataset, cfg, diff --git a/tools/train.py b/tools/train.py index 705473665e..e871c53088 100644 --- a/tools/train.py +++ b/tools/train.py @@ -13,7 +13,7 @@ from mmcv.utils import get_git_hash from mmaction import __version__ -from mmaction.apis import train_model +from mmaction.apis import init_random_seed, train_model from mmaction.datasets import build_dataset from mmaction.models import build_model from mmaction.utils import collect_env, get_root_logger, register_module_hooks @@ -143,12 +143,13 @@ def main(): logger.info(f'Config: {cfg.pretty_text}') # set random seeds - if args.seed is not None: - logger.info(f'Set random seed to {args.seed}, ' - f'deterministic: {args.deterministic}') - set_random_seed(args.seed, deterministic=args.deterministic) - cfg.seed = args.seed - meta['seed'] = args.seed + seed = init_random_seed(args.seed) + logger.info(f'Set random seed to {seed}, ' + f'deterministic: {args.deterministic}') + set_random_seed(seed, deterministic=args.deterministic) + + cfg.seed = seed + meta['seed'] = seed meta['config_name'] = osp.basename(args.config) meta['work_dir'] = osp.basename(cfg.work_dir.rstrip('/\\')) From 3e173d18400e394fc5237148de1393bf3db020da Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 23 Nov 2021 22:38:31 +0800 Subject: [PATCH 295/414] [Doc] Update readme (#1286) * update skeleton-based * update ava * update localization * update recognition * Update README.md * Update README.md * Update README.md * Update README.md * Update README.md --- configs/detection/acrn/README.md | 13 +++++++- configs/detection/ava/README.md | 14 +++++++- configs/detection/lfb/README.md | 25 ++++++++++++++- configs/localization/bmn/README.md | 13 +++++++- configs/localization/bsn/README.md | 13 +++++++- configs/localization/ssn/README.md | 13 +++++++- configs/recognition/c3d/README.md | 13 +++++++- configs/recognition/csn/README.md | 13 +++++++- configs/recognition/i3d/README.md | 13 +++++++- configs/recognition/omnisource/README.md | 39 +++++++++++++---------- configs/recognition/r2plus1d/README.md | 13 +++++++- configs/recognition/slowfast/README.md | 13 +++++++- configs/recognition/slowonly/README.md | 13 +++++++- configs/recognition/tanet/README.md | 13 +++++++- configs/recognition/timesformer/README.md | 13 +++++++- configs/recognition/tin/README.md | 13 +++++++- configs/recognition/tpn/README.md | 13 +++++++- configs/recognition/trn/README.md | 13 +++++++- configs/recognition/tsm/README.md | 13 +++++++- configs/recognition/tsn/README.md | 13 +++++++- configs/recognition/x3d/README.md | 13 +++++++- configs/skeleton/posec3d/README.md | 13 +++++++- configs/skeleton/stgcn/README.md | 13 +++++++- 23 files changed, 300 insertions(+), 38 deletions(-) diff --git a/configs/detection/acrn/README.md b/configs/detection/acrn/README.md index 125184a28d..4f34bec2c4 100644 --- a/configs/detection/acrn/README.md +++ b/configs/detection/acrn/README.md @@ -1,6 +1,17 @@ # ACRN -## Introduction +## Abstract + + + +Current state-of-the-art approaches for spatio-temporal action localization rely on detections at the frame level and model temporal context with 3D ConvNets. Here, we go one step further and model spatio-temporal relations to capture the interactions between human actors, relevant objects and scene elements essential to differentiate similar human actions. Our approach is weakly supervised and mines the relevant elements automatically with an actor-centric relational network (ACRN). ACRN computes and accumulates pair-wise relation information from actor and global scene features, and generates relation features for action classification. It is implemented as neural networks and can be trained jointly with an existing action detection system. We show that ACRN outperforms alternative approaches which capture relation information, and that the proposed framework improves upon the state-of-the-art performance on JHMDB and AVA. A visualization of the learned relation features confirms that our approach is able to attend to the relevant relations for each action. + + +

    -## Introduction +## Abstract + + + +This paper introduces a video dataset of spatio-temporally localized Atomic Visual Actions (AVA). The AVA dataset densely annotates 80 atomic visual actions in 430 15-minute video clips, where actions are localized in space and time, resulting in 1.58M action labels with multiple labels per person occurring frequently. The key characteristics of our dataset are: (1) the definition of atomic visual actions, rather than composite actions; (2) precise spatio-temporal annotations with possibly multiple annotations for each person; (3) exhaustive annotation of these atomic actions over 15-minute video clips; (4) people temporally linked across consecutive segments; and (5) using movies to gather a varied set of action representations. This departs from existing datasets for spatio-temporal action recognition, which typically provide sparse annotations for composite actions in short video clips. We will release the dataset publicly. +AVA, with its realistic scene and action complexity, exposes the intrinsic difficulty of action recognition. To benchmark this, we present a novel approach for action localization that builds upon the current state-of-the-art methods, and demonstrates better performance on JHMDB and UCF101-24 categories. While setting a new state of the art on existing datasets, the overall results on AVA are low at 15.6% mAP, underscoring the need for developing new approaches for video understanding. + + +
    + +
    + +## Citation diff --git a/configs/detection/lfb/README.md b/configs/detection/lfb/README.md index 9f71cbf785..2bd9a2a233 100644 --- a/configs/detection/lfb/README.md +++ b/configs/detection/lfb/README.md @@ -1,6 +1,29 @@ # LFB -## Introduction +## Abstract + + + +To understand the world, we humans constantly need to relate the present to the past, and put events in context. In this paper, we enable existing video models to do the same. We propose a long-term feature bank---supportive information extracted over the entire span of a video---to augment state-of-the-art video models that otherwise would only view short clips of 2-5 seconds. Our experiments demonstrate that augmenting 3D convolutional networks with a long-term feature bank yields state-of-the-art results on three challenging video datasets: AVA, EPIC-Kitchens, and Charades. + + +
    + +
    + +## Citation + + + +```BibTeX +@inproceedings{gu2018ava, + title={Ava: A video dataset of spatio-temporally localized atomic visual actions}, + author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={6047--6056}, + year={2018} +} +``` diff --git a/configs/localization/bmn/README.md b/configs/localization/bmn/README.md index 3042ded98e..43147c2109 100644 --- a/configs/localization/bmn/README.md +++ b/configs/localization/bmn/README.md @@ -1,6 +1,17 @@ # BMN -## Introduction +## Abstract + + + +Temporal action proposal generation is an challenging and promising task which aims to locate temporal regions in real-world videos where action or event may occur. Current bottom-up proposal generation methods can generate proposals with precise boundary, but cannot efficiently generate adequately reliable confidence scores for retrieving proposals. To address these difficulties, we introduce the Boundary-Matching (BM) mechanism to evaluate confidence scores of densely distributed proposals, which denote a proposal as a matching pair of starting and ending boundaries and combine all densely distributed BM pairs into the BM confidence map. Based on BM mechanism, we propose an effective, efficient and end-to-end proposal generation method, named Boundary-Matching Network (BMN), which generates proposals with precise temporal boundaries as well as reliable confidence scores simultaneously. The two-branches of BMN are jointly trained in an unified framework. We conduct experiments on two challenging datasets: THUMOS-14 and ActivityNet-1.3, where BMN shows significant performance improvement with remarkable efficiency and generalizability. Further, combining with existing action classifier, BMN can achieve state-of-the-art temporal action detection performance. + + +
    + +
    + +## Citation diff --git a/configs/localization/bsn/README.md b/configs/localization/bsn/README.md index 7c40b4d6af..d15b6361c7 100644 --- a/configs/localization/bsn/README.md +++ b/configs/localization/bsn/README.md @@ -1,6 +1,17 @@ # BSN -## Introduction +## Abstract + + + +Temporal action proposal generation is an important yet challenging problem, since temporal proposals with rich action content are indispensable for analysing real-world videos with long duration and high proportion irrelevant content. This problem requires methods not only generating proposals with precise temporal boundaries, but also retrieving proposals to cover truth action instances with high recall and high overlap using relatively fewer proposals. To address these difficulties, we introduce an effective proposal generation method, named Boundary-Sensitive Network (BSN), which adopts "local to global" fashion. Locally, BSN first locates temporal boundaries with high probabilities, then directly combines these boundaries as proposals. Globally, with Boundary-Sensitive Proposal feature, BSN retrieves proposals by evaluating the confidence of whether a proposal contains an action within its region. We conduct experiments on two challenging datasets: ActivityNet-1.3 and THUMOS14, where BSN outperforms other state-of-the-art temporal action proposal generation methods with high recall and high temporal precision. Finally, further experiments demonstrate that by combining existing action classifiers, our method significantly improves the state-of-the-art temporal action detection performance. + + +
    + +
    + +## Citation diff --git a/configs/localization/ssn/README.md b/configs/localization/ssn/README.md index 8e4c606538..c5e5dc09fa 100644 --- a/configs/localization/ssn/README.md +++ b/configs/localization/ssn/README.md @@ -1,6 +1,17 @@ # SSN -## Introduction +## Abstract + + + +Detecting actions in untrimmed videos is an important yet challenging task. In this paper, we present the structured segment network (SSN), a novel framework which models the temporal structure of each action instance via a structured temporal pyramid. On top of the pyramid, we further introduce a decomposed discriminative model comprising two classifiers, respectively for classifying actions and determining completeness. This allows the framework to effectively distinguish positive proposals from background or incomplete ones, thus leading to both accurate recognition and localization. These components are integrated into a unified network that can be efficiently trained in an end-to-end fashion. Additionally, a simple yet effective temporal action proposal scheme, dubbed temporal actionness grouping (TAG) is devised to generate high quality action proposals. On two challenging benchmarks, THUMOS14 and ActivityNet, our method remarkably outperforms previous state-of-the-art methods, demonstrating superior accuracy and strong adaptivity in handling actions with various temporal structures. + + +
    + +
    + +## Citation diff --git a/configs/recognition/c3d/README.md b/configs/recognition/c3d/README.md index 066af1f100..067097fdbe 100644 --- a/configs/recognition/c3d/README.md +++ b/configs/recognition/c3d/README.md @@ -1,6 +1,17 @@ # C3D -## Introduction +## Abstract + + + +We propose a simple, yet effective approach for spatiotemporal feature learning using deep 3-dimensional convolutional networks (3D ConvNets) trained on a large scale supervised video dataset. Our findings are three-fold: 1) 3D ConvNets are more suitable for spatiotemporal feature learning compared to 2D ConvNets; 2) A homogeneous architecture with small 3x3x3 convolution kernels in all layers is among the best performing architectures for 3D ConvNets; and 3) Our learned features, namely C3D (Convolutional 3D), with a simple linear classifier outperform state-of-the-art methods on 4 different benchmarks and are comparable with current best methods on the other 2 benchmarks. In addition, the features are compact: achieving 52.8% accuracy on UCF101 dataset with only 10 dimensions and also very efficient to compute due to the fast inference of ConvNets. Finally, they are conceptually very simple and easy to train and use. + + +
    + +
    + +## Citation diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index 32d3e240bc..3a48f6bbda 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -1,6 +1,17 @@ # CSN -## Introduction +## Abstract + + + +Group convolution has been shown to offer great computational savings in various 2D convolutional architectures for image classification. It is natural to ask: 1) if group convolution can help to alleviate the high computational cost of video classification networks; 2) what factors matter the most in 3D group convolutional networks; and 3) what are good computation/accuracy trade-offs with 3D group convolutional networks. This paper studies the effects of different design choices in 3D group convolutional networks for video classification. We empirically demonstrate that the amount of channel interactions plays an important role in the accuracy of 3D group convolutional networks. Our experiments suggest two main findings. First, it is a good practice to factorize 3D convolutions by separating channel interactions and spatiotemporal interactions as this leads to improved accuracy and lower computational cost. Second, 3D channel-separated convolutions provide a form of regularization, yielding lower training accuracy but higher test accuracy compared to 3D convolutions. These two empirical findings lead us to design an architecture -- Channel-Separated Convolutional Network (CSN) -- which is simple, efficient, yet accurate. On Sports1M, Kinetics, and Something-Something, our CSNs are comparable with or better than the state-of-the-art while being 2-3 times more efficient. + + +
    + +
    + +## Citation diff --git a/configs/recognition/i3d/README.md b/configs/recognition/i3d/README.md index d7bc5fe1ff..5a2bfd7a33 100644 --- a/configs/recognition/i3d/README.md +++ b/configs/recognition/i3d/README.md @@ -1,6 +1,17 @@ # I3D -## Introduction +## Abstract + + + +The paucity of videos in current action classification datasets (UCF-101 and HMDB-51) has made it difficult to identify good video architectures, as most methods obtain similar performance on existing small-scale benchmarks. This paper re-evaluates state-of-the-art architectures in light of the new Kinetics Human Action Video dataset. Kinetics has two orders of magnitude more data, with 400 human action classes and over 400 clips per class, and is collected from realistic, challenging YouTube videos. We provide an analysis on how current architectures fare on the task of action classification on this dataset and how much performance improves on the smaller benchmark datasets after pre-training on Kinetics. We also introduce a new Two-Stream Inflated 3D ConvNet (I3D) that is based on 2D ConvNet inflation: filters and pooling kernels of very deep image classification ConvNets are expanded into 3D, making it possible to learn seamless spatio-temporal feature extractors from video while leveraging successful ImageNet architecture designs and even their parameters. We show that, after pre-training on Kinetics, I3D models considerably improve upon the state-of-the-art in action classification, reaching 80.9% on HMDB-51 and 98.0% on UCF-101. + + +
    + +
    + +## Citation diff --git a/configs/recognition/omnisource/README.md b/configs/recognition/omnisource/README.md index 3691fdb6c8..f354c0401f 100644 --- a/configs/recognition/omnisource/README.md +++ b/configs/recognition/omnisource/README.md @@ -4,7 +4,29 @@ In ECCV, 2020. [Paper](https://arxiv.org/abs/2003.13042), [Dataset](https://docs.google.com/forms/d/e/1FAIpQLSd8_GlmHzG8FcDbW-OEu__G7qLgOSYZpH-i5vYVJcu7wcb_TQ/viewform?usp=sf_link) -![pipeline](https://github.com/open-mmlab/mmaction2/blob/master/configs/recognition/omnisource/pipeline.png?raw=true) +## Abstract + + + +We introduce OmniSource, a novel framework for leveraging web data to train video recognition models. OmniSource overcomes the barriers between data formats, such as images, short videos, and long untrimmed videos for webly-supervised learning. First, data samples with multiple formats, curated by task-specific data collection and automatically filtered by a teacher model, are transformed into a unified form. Then a joint-training strategy is proposed to deal with the domain gaps between multiple data sources and formats in webly-supervised learning. Several good practices, including data balancing, resampling, and cross-dataset mixup are adopted in joint training. Experiments show that by utilizing data from multiple sources and formats, OmniSource is more data-efficient in training. With only 3.5M images and 800K minutes videos crawled from the internet without human labeling (less than 2% of prior works), our models learned with OmniSource improve Top-1 accuracy of 2D- and 3D-ConvNet baseline models by 3.0% and 3.9%, respectively, on the Kinetics-400 benchmark. With OmniSource, we establish new records with different pretraining strategies for video recognition. Our best models achieve 80.4%, 80.5%, and 83.6 Top-1 accuracies on the Kinetics-400 benchmark respectively for training-from-scratch, ImageNet pre-training and IG-65M pre-training. + + +
    + +
    + +## Citation + + + +```BibTeX +@article{duan2020omni, + title={Omni-sourced Webly-supervised Learning for Video Recognition}, + author={Duan, Haodong and Zhao, Yue and Xiong, Yuanjun and Liu, Wentao and Lin, Dahua}, + journal={arXiv preprint arXiv:2003.13042}, + year={2020} +} +``` ## Model Zoo @@ -55,18 +77,3 @@ We also list the benchmark in the original paper which run on Kinetics-400 for c | :--------------------: | :---------: | :---------: | :----------: | :---------: | :---------: | :---------: | | TSN-3seg-ResNet50 | 70.6 / 89.4 | 71.5 / 89.5 | 72.0 / 90.0 | 72.0 / 90.3 | 71.7 / 89.6 | 73.6 / 91.0 | | SlowOnly-4x16-ResNet50 | 73.8 / 90.9 | 74.5 / 91.4 | 75.2 / 91.6 | 75.2 / 91.7 | 74.5 / 91.1 | 76.6 / 92.5 | - -## Citing OmniSource - -If you find OmniSource useful for your research, please consider citing the paper using the following BibTeX entry. - - - -```BibTeX -@article{duan2020omni, - title={Omni-sourced Webly-supervised Learning for Video Recognition}, - author={Duan, Haodong and Zhao, Yue and Xiong, Yuanjun and Liu, Wentao and Lin, Dahua}, - journal={arXiv preprint arXiv:2003.13042}, - year={2020} -} -``` diff --git a/configs/recognition/r2plus1d/README.md b/configs/recognition/r2plus1d/README.md index 0ed3e0241a..f9cd05cca1 100644 --- a/configs/recognition/r2plus1d/README.md +++ b/configs/recognition/r2plus1d/README.md @@ -1,6 +1,17 @@ # R2plus1D -## Introduction +## Abstract + + + +In this paper we discuss several forms of spatiotemporal convolutions for video analysis and study their effects on action recognition. Our motivation stems from the observation that 2D CNNs applied to individual frames of the video have remained solid performers in action recognition. In this work we empirically demonstrate the accuracy advantages of 3D CNNs over 2D CNNs within the framework of residual learning. Furthermore, we show that factorizing the 3D convolutional filters into separate spatial and temporal components yields significantly advantages in accuracy. Our empirical study leads to the design of a new spatiotemporal convolutional block "R(2+1)D" which gives rise to CNNs that achieve results comparable or superior to the state-of-the-art on Sports-1M, Kinetics, UCF101 and HMDB51. + + +
    + +
    + +## Citation diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index f7cad0827d..0e6d2119c0 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -1,6 +1,17 @@ # SlowFast -## Introduction +## Abstract + + + +We present SlowFast networks for video recognition. Our model involves (i) a Slow pathway, operating at low frame rate, to capture spatial semantics, and (ii) a Fast pathway, operating at high frame rate, to capture motion at fine temporal resolution. The Fast pathway can be made very lightweight by reducing its channel capacity, yet can learn useful temporal information for video recognition. Our models achieve strong performance for both action classification and detection in video, and large improvements are pin-pointed as contributions by our SlowFast concept. We report state-of-the-art accuracy on major video recognition benchmarks, Kinetics, Charades and AVA. + + +
    + +
    + +## Citation diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index 974355056a..403a535813 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -1,6 +1,17 @@ # SlowOnly -## Introduction +## Abstract + + + +We present SlowFast networks for video recognition. Our model involves (i) a Slow pathway, operating at low frame rate, to capture spatial semantics, and (ii) a Fast pathway, operating at high frame rate, to capture motion at fine temporal resolution. The Fast pathway can be made very lightweight by reducing its channel capacity, yet can learn useful temporal information for video recognition. Our models achieve strong performance for both action classification and detection in video, and large improvements are pin-pointed as contributions by our SlowFast concept. We report state-of-the-art accuracy on major video recognition benchmarks, Kinetics, Charades and AVA. + + +
    + +
    + +## Citation diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 45ef911cba..24e07e1122 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -1,6 +1,17 @@ # TANet -## Introduction +## Abstract + + + +Video data is with complex temporal dynamics due to various factors such as camera motion, speed variation, and different activities. To effectively capture this diverse motion pattern, this paper presents a new temporal adaptive module ({\bf TAM}) to generate video-specific temporal kernels based on its own feature map. TAM proposes a unique two-level adaptive modeling scheme by decoupling the dynamic kernel into a location sensitive importance map and a location invariant aggregation weight. The importance map is learned in a local temporal window to capture short-term information, while the aggregation weight is generated from a global view with a focus on long-term structure. TAM is a modular block and could be integrated into 2D CNNs to yield a powerful video architecture (TANet) with a very small extra computational cost. The extensive experiments on Kinetics-400 and Something-Something datasets demonstrate that our TAM outperforms other temporal modeling methods consistently, and achieves the state-of-the-art performance under the similar complexity. + + +
    + +
    + +## Citation diff --git a/configs/recognition/timesformer/README.md b/configs/recognition/timesformer/README.md index 4c4ac9cedc..54b4f25443 100644 --- a/configs/recognition/timesformer/README.md +++ b/configs/recognition/timesformer/README.md @@ -1,6 +1,17 @@ # TimeSformer -## Introduction +## Abstract + + + +We present a convolution-free approach to video classification built exclusively on self-attention over space and time. Our method, named "TimeSformer," adapts the standard Transformer architecture to video by enabling spatiotemporal feature learning directly from a sequence of frame-level patches. Our experimental study compares different self-attention schemes and suggests that "divided attention," where temporal attention and spatial attention are separately applied within each block, leads to the best video classification accuracy among the design choices considered. Despite the radically new design, TimeSformer achieves state-of-the-art results on several action recognition benchmarks, including the best reported accuracy on Kinetics-400 and Kinetics-600. Finally, compared to 3D convolutional networks, our model is faster to train, it can achieve dramatically higher test efficiency (at a small drop in accuracy), and it can also be applied to much longer video clips (over one minute long). + + +
    + +
    + +## Citation diff --git a/configs/recognition/tin/README.md b/configs/recognition/tin/README.md index b16224c676..cf57eed749 100644 --- a/configs/recognition/tin/README.md +++ b/configs/recognition/tin/README.md @@ -1,6 +1,17 @@ # TIN -## Introduction +## Abstract + + + +For a long time, the vision community tries to learn the spatio-temporal representation by combining convolutional neural network together with various temporal models, such as the families of Markov chain, optical flow, RNN and temporal convolution. However, these pipelines consume enormous computing resources due to the alternately learning process for spatial and temporal information. One natural question is whether we can embed the temporal information into the spatial one so the information in the two domains can be jointly learned once-only. In this work, we answer this question by presenting a simple yet powerful operator -- temporal interlacing network (TIN). Instead of learning the temporal features, TIN fuses the two kinds of information by interlacing spatial representations from the past to the future, and vice versa. A differentiable interlacing target can be learned to control the interlacing process. In this way, a heavy temporal model is replaced by a simple interlacing operator. We theoretically prove that with a learnable interlacing target, TIN performs equivalently to the regularized temporal convolution network (r-TCN), but gains 4% more accuracy with 6x less latency on 6 challenging benchmarks. These results push the state-of-the-art performances of video understanding by a considerable margin. Not surprising, the ensemble model of the proposed TIN won the 1st place in the ICCV19 - Multi Moments in Time challenge. + + +
    + +
    + +## Citation diff --git a/configs/recognition/tpn/README.md b/configs/recognition/tpn/README.md index d3d513118e..98477f64d3 100644 --- a/configs/recognition/tpn/README.md +++ b/configs/recognition/tpn/README.md @@ -1,6 +1,17 @@ # TPN -## Introduction +## Abstract + + + +Visual tempo characterizes the dynamics and the temporal scale of an action. Modeling such visual tempos of different actions facilitates their recognition. Previous works often capture the visual tempo through sampling raw videos at multiple rates and constructing an input-level frame pyramid, which usually requires a costly multi-branch network to handle. In this work we propose a generic Temporal Pyramid Network (TPN) at the feature-level, which can be flexibly integrated into 2D or 3D backbone networks in a plug-and-play manner. Two essential components of TPN, the source of features and the fusion of features, form a feature hierarchy for the backbone so that it can capture action instances at various tempos. TPN also shows consistent improvements over other challenging baselines on several action recognition datasets. Specifically, when equipped with TPN, the 3D ResNet-50 with dense sampling obtains a 2% gain on the validation set of Kinetics-400. A further analysis also reveals that TPN gains most of its improvements on action classes that have large variances in their visual tempos, validating the effectiveness of TPN. + + +
    + +
    + +## Citation diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md index 582d3f73df..ff2f4d8785 100644 --- a/configs/recognition/trn/README.md +++ b/configs/recognition/trn/README.md @@ -1,6 +1,17 @@ # TRN -## Introduction +## Abstract + + + +Temporal relational reasoning, the ability to link meaningful transformations of objects or entities over time, is a fundamental property of intelligent species. In this paper, we introduce an effective and interpretable network module, the Temporal Relation Network (TRN), designed to learn and reason about temporal dependencies between video frames at multiple time scales. We evaluate TRN-equipped networks on activity recognition tasks using three recent video datasets - Something-Something, Jester, and Charades - which fundamentally depend on temporal relational reasoning. Our results demonstrate that the proposed TRN gives convolutional neural networks a remarkable capacity to discover temporal relations in videos. Through only sparsely sampled video frames, TRN-equipped networks can accurately predict human-object interactions in the Something-Something dataset and identify various human gestures on the Jester dataset with very competitive performance. TRN-equipped networks also outperform two-stream networks and 3D convolution networks in recognizing daily activities in the Charades dataset. Further analyses show that the models learn intuitive and interpretable visual common sense knowledge in videos. + + +
    + +
    + +## Citation diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 97a0df1e71..20293cc5c7 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -1,6 +1,17 @@ # TSM -## Introduction +## Abstract + + + +The explosive growth in video streaming gives rise to challenges on performing video understanding at high accuracy and low computation cost. Conventional 2D CNNs are computationally cheap but cannot capture temporal relationships; 3D CNN based methods can achieve good performance but are computationally intensive, making it expensive to deploy. In this paper, we propose a generic and effective Temporal Shift Module (TSM) that enjoys both high efficiency and high performance. Specifically, it can achieve the performance of 3D CNN but maintain 2D CNN's complexity. TSM shifts part of the channels along the temporal dimension; thus facilitate information exchanged among neighboring frames. It can be inserted into 2D CNNs to achieve temporal modeling at zero computation and zero parameters. We also extended TSM to online setting, which enables real-time low-latency online video recognition and video object detection. TSM is accurate and efficient: it ranks the first place on the Something-Something leaderboard upon publication; on Jetson Nano and Galaxy Note8, it achieves a low latency of 13ms and 35ms for online video recognition. + + +
    + +
    + +## Citation diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index f5e754b745..f3f5811ef5 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -1,6 +1,17 @@ # TSN -## Introduction +## Abstract + + + +Deep convolutional networks have achieved great success for visual recognition in still images. However, for action recognition in videos, the advantage over traditional methods is not so evident. This paper aims to discover the principles to design effective ConvNet architectures for action recognition in videos and learn these models given limited training samples. Our first contribution is temporal segment network (TSN), a novel framework for video-based action recognition. which is based on the idea of long-range temporal structure modeling. It combines a sparse temporal sampling strategy and video-level supervision to enable efficient and effective learning using the whole action video. The other contribution is our study on a series of good practices in learning ConvNets on video data with the help of temporal segment network. Our approach obtains the state-the-of-art performance on the datasets of HMDB51 ( 69.4%) and UCF101 (94.2%). We also visualize the learned ConvNet models, which qualitatively demonstrates the effectiveness of temporal segment network and the proposed good practices. + + +
    + +
    + +## Citation diff --git a/configs/recognition/x3d/README.md b/configs/recognition/x3d/README.md index ff6627f109..a7a3c7e715 100644 --- a/configs/recognition/x3d/README.md +++ b/configs/recognition/x3d/README.md @@ -1,6 +1,17 @@ # X3D -## Introduction +## Abstract + + + +This paper presents X3D, a family of efficient video networks that progressively expand a tiny 2D image classification architecture along multiple network axes, in space, time, width and depth. Inspired by feature selection methods in machine learning, a simple stepwise network expansion approach is employed that expands a single axis in each step, such that good accuracy to complexity trade-off is achieved. To expand X3D to a specific target complexity, we perform progressive forward expansion followed by backward contraction. X3D achieves state-of-the-art performance while requiring 4.8x and 5.5x fewer multiply-adds and parameters for similar accuracy as previous work. Our most surprising finding is that networks with high spatiotemporal resolution can perform well, while being extremely light in terms of network width and parameters. We report competitive accuracy at unprecedented efficiency on video classification and detection benchmarks. + + +
    + +
    + +## Citation diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index 7463824b16..3b8b686db6 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -1,6 +1,17 @@ # PoseC3D -## Introduction +## Abstract + + + +Human skeleton, as a compact representation of human action, has received increasing attention in recent years. Many skeleton-based action recognition methods adopt graph convolutional networks (GCN) to extract features on top of human skeletons. Despite the positive results shown in previous works, GCN-based methods are subject to limitations in robustness, interoperability, and scalability. In this work, we propose PoseC3D, a new approach to skeleton-based action recognition, which relies on a 3D heatmap stack instead of a graph sequence as the base representation of human skeletons. Compared to GCN-based methods, PoseC3D is more effective in learning spatiotemporal features, more robust against pose estimation noises, and generalizes better in cross-dataset settings. Also, PoseC3D can handle multiple-person scenarios without additional computation cost, and its features can be easily integrated with other modalities at early fusion stages, which provides a great design space to further boost the performance. On four challenging datasets, PoseC3D consistently obtains superior performance, when used alone on skeletons and in combination with the RGB modality. + + +
    + +
    + +## Citation diff --git a/configs/skeleton/stgcn/README.md b/configs/skeleton/stgcn/README.md index e90c639c6a..0f17d7a6c0 100644 --- a/configs/skeleton/stgcn/README.md +++ b/configs/skeleton/stgcn/README.md @@ -1,6 +1,17 @@ # STGCN -## Introduction +## Abstract + + + +Dynamics of human body skeletons convey significant information for human action recognition. Conventional approaches for modeling skeletons usually rely on hand-crafted parts or traversal rules, thus resulting in limited expressive power and difficulties of generalization. In this work, we propose a novel model of dynamic skeletons called Spatial-Temporal Graph Convolutional Networks (ST-GCN), which moves beyond the limitations of previous methods by automatically learning both the spatial and temporal patterns from data. This formulation not only leads to greater expressive power but also stronger generalization capability. On two large datasets, Kinetics and NTU-RGBD, it achieves substantial improvements over mainstream methods. + + +
    + +
    + +## Citation From 01c94d365a429e06ff7515eac73d2a091d9cd513 Mon Sep 17 00:00:00 2001 From: Jamie Date: Wed, 24 Nov 2021 13:24:23 +0800 Subject: [PATCH 296/414] [Fix] Fix bug that start_index is not used in SampleAVAFrames (#1278) * Fix bug that start_index in not used in RawFrameDecode * Use start_index in SampleAVAFrames * Fix lint & pass unit tests * Set the default of start_index to 0 for AVADataset * Fix docstring * fix lint * fix unittest Co-authored-by: Haodong Duan --- mmaction/datasets/ava_dataset.py | 6 ++++++ mmaction/datasets/pipelines/loading.py | 4 +++- tests/test_data/test_datasets/test_ava_dataset.py | 4 ++-- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/mmaction/datasets/ava_dataset.py b/mmaction/datasets/ava_dataset.py index 547cd37204..312e0a299f 100644 --- a/mmaction/datasets/ava_dataset.py +++ b/mmaction/datasets/ava_dataset.py @@ -62,6 +62,10 @@ class AVADataset(BaseDataset): Default: None. filename_tmpl (str): Template for each filename. Default: 'img_{:05}.jpg'. + start_index (int): Specify a start index for frames in consideration of + different filename format. However, when taking videos as input, + it should be set to 0, since frames loaded from videos count + from 0. Default: 0. proposal_file (str): Path to the proposal file like ``ava_dense_proposals_{train, val}.FAIR.recall_93.9.pkl``. Default: None. @@ -97,6 +101,7 @@ def __init__(self, pipeline, label_file=None, filename_tmpl='img_{:05}.jpg', + start_index=0, proposal_file=None, person_det_score_thr=0.9, num_classes=81, @@ -135,6 +140,7 @@ def __init__(self, pipeline, data_prefix, test_mode, + start_index=start_index, modality=modality, num_classes=num_classes) diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index d1da624e00..4843fcbe50 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -449,8 +449,10 @@ def __call__(self, results): -self.frame_interval // 2, (self.frame_interval + 1) // 2, size=self.clip_len) frame_inds = self._get_clips(center_index, skip_offsets, shot_info) + start_index = results.get('start_index', 0) - results['frame_inds'] = np.array(frame_inds, dtype=np.int) + frame_inds = np.array(frame_inds, dtype=np.int) + start_index + results['frame_inds'] = frame_inds results['clip_len'] = self.clip_len results['frame_interval'] = self.frame_interval results['num_clips'] = 1 diff --git a/tests/test_data/test_datasets/test_ava_dataset.py b/tests/test_data/test_datasets/test_ava_dataset.py index aa7babde6d..0d054023ac 100644 --- a/tests/test_data/test_datasets/test_ava_dataset.py +++ b/tests/test_data/test_datasets/test_ava_dataset.py @@ -130,7 +130,7 @@ def test_ava_pipeline(self): assert result['filename_tmpl'] == 'img_{:05}.jpg' assert result['modality'] == 'RGB' - assert result['start_index'] == 1 + assert result['start_index'] == 0 assert result['timestamp_start'] == 900 assert result['timestamp_end'] == 1800 assert_array_equal(result['proposals'], @@ -152,7 +152,7 @@ def test_ava_pipeline(self): result = ava_dataset[0] assert result['filename_tmpl'] == 'img_{:05}.jpg' assert result['modality'] == 'RGB' - assert result['start_index'] == 1 + assert result['start_index'] == 0 assert result['timestamp_start'] == 900 assert result['timestamp_end'] == 1800 From 2ef6903ec87f376af95d0692c57d3d734e6b6885 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Wed, 24 Nov 2021 21:08:41 +0800 Subject: [PATCH 297/414] [Feature] Support 2S-AGCN (#1248) * master * master 0721 * 1101 * 1011 2sagcn * 1011 2sagcn * 1011 2sagcn * 1011 2sagcn * 1011 2sagcn * 1123 fix init, 2sgcn_name * 1123 add bone formatting * 1123 add bone formatting * 1123 modify 2s_bone config * 1123 remove gen_bone_script * 1124 fix bugs in JointToBone, add readme metafile of agcn * 1124 modify readme, meta, formatting index * Update metafile.yml * 1124 joint86.82 * update Joint2Bone * update links Co-authored-by: Haodong Duan --- .../2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py | 79 +++++ .../2sagcn_80e_ntu60_xsub_keypoint_3d.py | 76 +++++ configs/skeleton/2s-agcn/README.md | 87 +++++ configs/skeleton/2s-agcn/README_zh-CN.md | 76 +++++ configs/skeleton/2s-agcn/metafile.yml | 40 +++ mmaction/datasets/pipelines/__init__.py | 6 +- mmaction/datasets/pipelines/formatting.py | 60 ++++ mmaction/models/backbones/__init__.py | 3 +- mmaction/models/backbones/agcn.py | 308 ++++++++++++++++++ 9 files changed, 731 insertions(+), 4 deletions(-) create mode 100644 configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py create mode 100644 configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py create mode 100644 configs/skeleton/2s-agcn/README.md create mode 100644 configs/skeleton/2s-agcn/README_zh-CN.md create mode 100644 configs/skeleton/2s-agcn/metafile.yml create mode 100644 mmaction/models/backbones/agcn.py diff --git a/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py b/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py new file mode 100644 index 0000000000..b41cefab58 --- /dev/null +++ b/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py @@ -0,0 +1,79 @@ +model = dict( + type='SkeletonGCN', + backbone=dict( + type='AGCN', + in_channels=3, + graph_cfg=dict(layout='ntu-rgb+d', strategy='spatial')), + cls_head=dict( + type='STGCNHead', + num_classes=60, + in_channels=256, + loss_cls=dict(type='CrossEntropyLoss')), + train_cfg=None, + test_cfg=None) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/ntu/nturgb+d_skeletons_60_3d/xsub/train.pkl' +ann_file_val = 'data/ntu/nturgb+d_skeletons_60_3d/xsub/val.pkl' +train_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='JointToBone'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +val_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='JointToBone'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +test_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='JointToBone'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +data = dict( + videos_per_gpu=12, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) + +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0001, nesterov=True) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict(policy='step', step=[30, 40]) +total_epochs = 80 +checkpoint_config = dict(interval=3) +evaluation = dict(interval=3, metrics=['top_k_accuracy']) +log_config = dict(interval=100, hooks=[dict(type='TextLoggerHook')]) + +# runtime settings +dist_params = dict(backend='nccl', port='1031') +log_level = 'INFO' +work_dir = './work_dirs/2sagcn_80e_ntu60_xsub_bone_3d/' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py b/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py new file mode 100644 index 0000000000..53f25cbda9 --- /dev/null +++ b/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py @@ -0,0 +1,76 @@ +model = dict( + type='SkeletonGCN', + backbone=dict( + type='AGCN', + in_channels=3, + graph_cfg=dict(layout='ntu-rgb+d', strategy='spatial')), + cls_head=dict( + type='STGCNHead', + num_classes=60, + in_channels=256, + loss_cls=dict(type='CrossEntropyLoss')), + train_cfg=None, + test_cfg=None) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/ntu/nturgb+d_skeletons_60_3d/xsub/train.pkl' +ann_file_val = 'data/ntu/nturgb+d_skeletons_60_3d/xsub/val.pkl' +train_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +val_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +test_pipeline = [ + dict(type='PaddingWithLoop', clip_len=300), + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM'), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +data = dict( + videos_per_gpu=12, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) + +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0001, nesterov=True) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict(policy='step', step=[30, 40]) +total_epochs = 80 +checkpoint_config = dict(interval=3) +evaluation = dict(interval=3, metrics=['top_k_accuracy']) +log_config = dict(interval=100, hooks=[dict(type='TextLoggerHook')]) + +# runtime settings +dist_params = dict(backend='nccl', port='1031') +log_level = 'INFO' +work_dir = './work_dirs/2sagcn_80e_ntu60_xsub_keypoint_3d/' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/configs/skeleton/2s-agcn/README.md b/configs/skeleton/2s-agcn/README.md new file mode 100644 index 0000000000..b41338838d --- /dev/null +++ b/configs/skeleton/2s-agcn/README.md @@ -0,0 +1,87 @@ +# AGCN + +## Abstract + + + +In skeleton-based action recognition, graph convolutional networks (GCNs), which model the human body skeletons as spatiotemporal graphs, have achieved remarkable performance. However, in existing GCN-based methods, the topology of the graph is set manually, and it is fixed over all layers and input samples. This may not be optimal for the hierarchical GCN and diverse samples in action recognition tasks. In addition, the second-order information (the lengths and directions of bones) of the skeleton data, which is naturally more informative and discriminative for action recognition, is rarely investigated in existing methods. In this work, we propose a novel two-stream adaptive graph convolutional network (2s-AGCN) for skeleton-based action recognition. The topology of the graph in our model can be either uniformly or individually learned by the BP algorithm in an end-to-end manner. This data-driven method increases the flexibility of the model for graph construction and brings more generality to adapt to various data samples. Moreover, a two-stream framework is proposed to model both the first-order and the second-order information simultaneously, which shows notable improvement for the recognition accuracy. Extensive experiments on the two large-scale datasets, NTU-RGBD and Kinetics-Skeleton, demonstrate that the performance of our model exceeds the state-of-the-art with a significant margin. + + +
    + +
    + +## Citation + + + +```BibTeX +@inproceedings{shi2019two, + title={Two-stream adaptive graph convolutional networks for skeleton-based action recognition}, + author={Shi, Lei and Zhang, Yifan and Cheng, Jian and Lu, Hanqing}, + booktitle={Proceedings of the IEEE/CVF conference on computer vision and pattern recognition}, + pages={12026--12035}, + year={2019} +} +``` + +## Model Zoo + +### NTU60_XSub + +| config | type | gpus | backbone | Top-1 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [2sagcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py) | joint | 1 | AGCN | 86.82 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-e9c57448.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json) | +| [2sagcn_80e_ntu60_xsub_bone_3d](/configs/skeleton/ss-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py) | bone | 2 | AGCN | 87.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-aef54a2d.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json) | + +## Train + +You can use the following command to train a model. + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +Example: train AGCN model on joint data of NTU60 dataset in a deterministic option with periodic validation. + +```shell +python tools/train.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py \ + --work-dir work_dirs/2sagcn_80e_ntu60_xsub_keypoint_3d \ + --validate --seed 0 --deterministic +``` + +Example: train AGCN model on bone data of NTU60 dataset in a deterministic option with periodic validation. + +```shell +python tools/train.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py \ + --work-dir work_dirs/2sagcn_80e_ntu60_xsub_bone_3d \ + --validate --seed 0 --deterministic +``` + +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). + +## Test + +You can use the following command to test a model. + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +Example: test AGCN model on joint data of NTU60 dataset and dump the result to a pickle file. + +```shell +python tools/test.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out joint_result.pkl +``` + +Example: test AGCN model on bone data of NTU60 dataset and dump the result to a pickle file. + +```shell +python tools/test.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out bone_result.pkl +``` + +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/skeleton/2s-agcn/README_zh-CN.md b/configs/skeleton/2s-agcn/README_zh-CN.md new file mode 100644 index 0000000000..d7d21594a3 --- /dev/null +++ b/configs/skeleton/2s-agcn/README_zh-CN.md @@ -0,0 +1,76 @@ +# AGCN + +## 简介 + + + +```BibTeX +@inproceedings{shi2019two, + title={Two-stream adaptive graph convolutional networks for skeleton-based action recognition}, + author={Shi, Lei and Zhang, Yifan and Cheng, Jian and Lu, Hanqing}, + booktitle={Proceedings of the IEEE/CVF conference on computer vision and pattern recognition}, + pages={12026--12035}, + year={2019} +} +``` + +## 模型库 + +### NTU60_XSub + +| 配置文件 | 数据格式 | GPU 数量 | 主干网络 | top1 准确率 | ckpt | log | json | +| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [2sagcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py) | joint | 1 | AGCN | 86.82 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-e9c57448.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json) | +| [2sagcn_80e_ntu60_xsub_bone_3d](/configs/skeleton/ss-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py) | bone | 2 | AGCN | 87.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-aef54a2d.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json) | + +## 如何训练 + +用户可以使用以下指令进行模型训练。 + +```shell +python tools/train.py ${CONFIG_FILE} [optional arguments] +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 AGCN 模型在 NTU60 数据集的骨骼数据上的训练。 + +```shell +python tools/train.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py \ + --work-dir work_dirs/2sagcn_80e_ntu60_xsub_keypoint_3d \ + --validate --seed 0 --deterministic +``` + +例如:以一个确定性的训练方式,辅以定期的验证过程进行 AGCN 模型在 NTU60 数据集的关节数据上的训练。 + +```shell +python tools/train.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py \ + --work-dir work_dirs/2sagcn_80e_ntu60_xsub_bone_3d \ + --validate --seed 0 --deterministic +``` + +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 + +## 如何测试 + +用户可以使用以下指令进行模型测试。 + +```shell +python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] +``` + +例如:在 NTU60 数据集的骨骼数据上测试 AGCN 模型,并将结果导出为一个 pickle 文件。 + +```shell +python tools/test.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out joint_result.pkl +``` + +例如:在 NTU60 数据集的关节数据上测试 AGCN 模型,并将结果导出为一个 pickle 文件。 + +```shell +python tools/test.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py \ + checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy mean_class_accuracy \ + --out bone_result.pkl +``` + +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/skeleton/2s-agcn/metafile.yml b/configs/skeleton/2s-agcn/metafile.yml new file mode 100644 index 0000000000..8227bdd1d9 --- /dev/null +++ b/configs/skeleton/2s-agcn/metafile.yml @@ -0,0 +1,40 @@ +Collections: +- Name: AGCN + README: configs/skeleton/2s-agcn/README.md +Models: +- Config: configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py + In Collection: AGCN + Metadata: + Architecture: AGCN + Batch Size: 24 + Epochs: 80 + Parameters: 3472176 + Training Data: NTU60-XSub + Training Resources: 1 GPU + Name: agcn_80e_ntu60_xsub_keypoint_3d + Results: + Dataset: NTU60-XSub + Metrics: + Top 1 Accuracy: 86.82 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log + Weights: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-e9c57448.pth +- Config: configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py + In Collection: AGCN + Metadata: + Architecture: AGCN + Batch Size: 24 + Epochs: 80 + Parameters: 3472176 + Training Data: NTU60-XSub + Training Resources: 2 GPU + Name: agcn_80e_ntu60_xsub_bone_3d + Results: + Dataset: NTU60-XSub + Metrics: + Top 1 Accuracy: 87.91 + Task: Skeleton-based Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json + Training Log: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log + Weights: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-aef54a2d.pth diff --git a/mmaction/datasets/pipelines/__init__.py b/mmaction/datasets/pipelines/__init__.py index 0ec9273077..1905bf9893 100644 --- a/mmaction/datasets/pipelines/__init__.py +++ b/mmaction/datasets/pipelines/__init__.py @@ -6,8 +6,8 @@ TorchvisionTrans) from .compose import Compose from .formatting import (Collect, FormatAudioShape, FormatGCNInput, - FormatShape, ImageToTensor, Rename, ToDataContainer, - ToTensor, Transpose) + FormatShape, ImageToTensor, JointToBone, Rename, + ToDataContainer, ToTensor, Transpose) from .loading import (ArrayDecode, AudioDecode, AudioDecodeInit, AudioFeatureSelector, BuildPseudoClip, DecordDecode, DecordInit, DenseSampleFrames, @@ -37,5 +37,5 @@ 'PyAVDecodeMotionVector', 'Rename', 'Imgaug', 'UniformSampleFrames', 'PoseDecode', 'LoadKineticsPose', 'GeneratePoseTarget', 'PIMSInit', 'PIMSDecode', 'TorchvisionTrans', 'PytorchVideoTrans', 'PoseNormalize', - 'FormatGCNInput', 'PaddingWithLoop', 'ArrayDecode' + 'FormatGCNInput', 'PaddingWithLoop', 'ArrayDecode', 'JointToBone' ] diff --git a/mmaction/datasets/pipelines/formatting.py b/mmaction/datasets/pipelines/formatting.py index e26da9366e..528aa8c5b3 100644 --- a/mmaction/datasets/pipelines/formatting.py +++ b/mmaction/datasets/pipelines/formatting.py @@ -374,6 +374,66 @@ def __repr__(self): return repr_str +@PIPELINES.register_module() +class JointToBone: + """Convert the joint information to bone information. + + Required keys are "keypoint" , + added or modified keys are "keypoint". + + Args: + dataset (str): Define the type of dataset: 'nturgb+d', 'openpose', + 'coco'. Default: 'nturgb+d'. + """ + + def __init__(self, dataset='nturgb+d'): + self.dataset = dataset + if self.dataset not in ['nturgb+d', 'openpose', 'coco']: + raise ValueError( + f'The dataset type {self.dataset} is not supported') + if self.dataset == 'nturgb+d': + self.pairs = [(0, 1), (1, 20), (2, 20), (3, 2), (4, 20), (5, 4), + (6, 5), (7, 6), (8, 20), (9, 8), (10, 9), (11, 10), + (12, 0), (13, 12), (14, 13), (15, 14), (16, 0), + (17, 16), (18, 17), (19, 18), (21, 22), (20, 20), + (22, 7), (23, 24), (24, 11)] + elif self.dataset == 'openpose': + self.pairs = ((0, 0), (1, 0), (2, 1), (3, 2), (4, 3), (5, 1), + (6, 5), (7, 6), (8, 2), (9, 8), (10, 9), (11, 5), + (12, 11), (13, 12), (14, 0), (15, 0), (16, 14), (17, + 15)) + elif self.dataset == 'coco': + self.pairs = ((0, 0), (1, 0), (2, 0), (3, 1), (4, 2), (5, 0), + (6, 0), (7, 5), (8, 6), (9, 7), (10, 8), (11, 0), + (12, 0), (13, 11), (14, 12), (15, 13), (16, 14)) + + def __call__(self, results): + """Performs the Bone formatting. + + Args: + results (dict): The resulting dict to be modified and passed + to the next transform in pipeline. + """ + keypoint = results['keypoint'] + M, T, V, C = keypoint.shape + bone = np.zeros((M, T, V, C), dtype=np.float32) + + assert C in [2, 3] + for v1, v2 in self.pairs: + bone[..., v1, :] = keypoint[..., v1, :] - keypoint[..., v2, :] + if C == 3 and self.dataset in ['openpose', 'coco']: + score = (keypoint[..., v1, 2] + keypoint[..., v2, 2]) / 2 + bone[..., v1, 2] = score + + results['keypoint'] = bone + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f"(dataset_type='{self.dataset}')" + return repr_str + + @PIPELINES.register_module() class FormatGCNInput: """Format final skeleton shape to the given input_format. diff --git a/mmaction/models/backbones/__init__.py b/mmaction/models/backbones/__init__.py index 2304d3db77..0beb89ddfe 100644 --- a/mmaction/models/backbones/__init__.py +++ b/mmaction/models/backbones/__init__.py @@ -1,4 +1,5 @@ # Copyright (c) OpenMMLab. All rights reserved. +from .agcn import AGCN from .c3d import C3D from .mobilenet_v2 import MobileNetV2 from .mobilenet_v2_tsm import MobileNetV2TSM @@ -20,5 +21,5 @@ 'C3D', 'ResNet', 'ResNet3d', 'ResNetTSM', 'ResNet2Plus1d', 'ResNet3dSlowFast', 'ResNet3dSlowOnly', 'ResNet3dCSN', 'ResNetTIN', 'X3D', 'ResNetAudio', 'ResNet3dLayer', 'MobileNetV2TSM', 'MobileNetV2', 'TANet', - 'TimeSformer', 'STGCN' + 'TimeSformer', 'STGCN', 'AGCN' ] diff --git a/mmaction/models/backbones/agcn.py b/mmaction/models/backbones/agcn.py new file mode 100644 index 0000000000..e88ef27089 --- /dev/null +++ b/mmaction/models/backbones/agcn.py @@ -0,0 +1,308 @@ +import torch +import torch.nn as nn +from mmcv.cnn import constant_init, kaiming_init, normal_init +from mmcv.runner import load_checkpoint +from mmcv.utils import _BatchNorm + +from ...utils import get_root_logger +from ..builder import BACKBONES +from ..skeleton_gcn.utils import Graph + + +def zero(x): + """return zero.""" + return 0 + + +def identity(x): + """return input itself.""" + return x + + +class AGCNBlock(nn.Module): + """Applies spatial graph convolution and temporal convolution over an + input graph sequence. + + Args: + in_channels (int): Number of channels in the input sequence data + out_channels (int): Number of channels produced by the convolution + kernel_size (tuple): Size of the temporal convolving kernel and + graph convolving kernel + stride (int, optional): Stride of the temporal convolution. Default: 1 + adj_len (int, optional): The length of the adjacency matrix. + Default: 17 + dropout (int, optional): Dropout rate of the final output. Default: 0 + residual (bool, optional): If ``True``, applies a residual mechanism. + Default: ``True`` + + Shape: + - Input[0]: Input graph sequence in :math:`(N, in_channels, T_{in}, V)` + format + - Input[1]: Input graph adjacency matrix in :math:`(K, V, V)` format + - Output[0]: Outpu graph sequence in :math:`(N, out_channels, T_{out}, + V)` format + - Output[1]: Graph adjacency matrix for output data in :math:`(K, V, + V)` format + + where + :math:`N` is a batch size, + :math:`K` is the spatial kernel size, as :math:`K == kernel_size[1] + `, + :math:`T_{in}/T_{out}` is a length of input/output sequence, + :math:`V` is the number of graph nodes. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + stride=1, + adj_len=17, + dropout=0, + residual=True): + super().__init__() + + assert len(kernel_size) == 2 + assert kernel_size[0] % 2 == 1 + padding = ((kernel_size[0] - 1) // 2, 0) + + self.gcn = ConvTemporalGraphical( + in_channels, out_channels, kernel_size[1], adj_len=adj_len) + self.tcn = nn.Sequential( + nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, (kernel_size[0], 1), + (stride, 1), padding), nn.BatchNorm2d(out_channels), + nn.Dropout(dropout, inplace=True)) + + if not residual: + self.residual = zero + + elif (in_channels == out_channels) and (stride == 1): + self.residual = identity + + else: + self.residual = nn.Sequential( + nn.Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=(stride, 1)), nn.BatchNorm2d(out_channels)) + + self.relu = nn.ReLU(inplace=True) + + def forward(self, x, adj_mat): + """Defines the computation performed at every call.""" + res = self.residual(x) + x, adj_mat = self.gcn(x, adj_mat) + + x = self.tcn(x) + res + + return self.relu(x), adj_mat + + +class ConvTemporalGraphical(nn.Module): + """The basic module for applying a graph convolution. + + Args: + in_channels (int): Number of channels in the input sequence data + out_channels (int): Number of channels produced by the convolution + kernel_size (int): Size of the graph convolving kernel + t_kernel_size (int): Size of the temporal convolving kernel + t_stride (int, optional): Stride of the temporal convolution. + Default: 1 + t_padding (int, optional): Temporal zero-padding added to both sides + of the input. Default: 0 + t_dilation (int, optional): Spacing between temporal kernel elements. + Default: 1 + adj_len (int, optional): The length of the adjacency matrix. + Default: 17 + bias (bool, optional): If ``True``, adds a learnable bias to the + output. Default: ``True`` + + Shape: + - Input[0]: Input graph sequence in :math:`(N, in_channels, T_{in}, V)` + format + - Input[1]: Input graph adjacency matrix in :math:`(K, V, V)` format + - Output[0]: Output graph sequence in :math:`(N, out_channels, T_{out} + , V)` format + - Output[1]: Graph adjacency matrix for output data in :math:`(K, V, V) + ` format + + where + :math:`N` is a batch size, + :math:`K` is the spatial kernel size, as :math:`K == kernel_size[1] + `, + :math:`T_{in}/T_{out}` is a length of input/output sequence, + :math:`V` is the number of graph nodes. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + t_kernel_size=1, + t_stride=1, + t_padding=0, + t_dilation=1, + adj_len=17, + bias=True): + super().__init__() + + self.kernel_size = kernel_size + + self.PA = nn.Parameter(torch.FloatTensor(3, adj_len, adj_len)) + torch.nn.init.constant_(self.PA, 1e-6) + + self.num_subset = 3 + inter_channels = out_channels // 4 + self.inter_c = inter_channels + self.conv_a = nn.ModuleList() + self.conv_b = nn.ModuleList() + self.conv_d = nn.ModuleList() + for i in range(self.num_subset): + self.conv_a.append(nn.Conv2d(in_channels, inter_channels, 1)) + self.conv_b.append(nn.Conv2d(in_channels, inter_channels, 1)) + self.conv_d.append(nn.Conv2d(in_channels, out_channels, 1)) + + if in_channels != out_channels: + self.down = nn.Sequential( + nn.Conv2d(in_channels, out_channels, 1), + nn.BatchNorm2d(out_channels)) + else: + self.down = lambda x: x + + self.bn = nn.BatchNorm2d(out_channels) + self.soft = nn.Softmax(-2) + self.relu = nn.ReLU() + + def forward(self, x, adj_mat): + """Defines the computation performed at every call.""" + assert adj_mat.size(0) == self.kernel_size + + N, C, T, V = x.size() + A = adj_mat + self.PA + + y = None + for i in range(self.num_subset): + A1 = self.conv_a[i](x).permute(0, 3, 1, 2).contiguous().view( + N, V, self.inter_c * T) + A2 = self.conv_b[i](x).view(N, self.inter_c * T, V) + A1 = self.soft(torch.matmul(A1, A2) / A1.size(-1)) # N V V + A1 = A1 + A[i] + A2 = x.view(N, C * T, V) + z = self.conv_d[i](torch.matmul(A2, A1).view(N, C, T, V)) + y = z + y if y is not None else z + y = self.bn(y) + y += self.down(x) + + return self.relu(y), adj_mat + + +@BACKBONES.register_module() +class AGCN(nn.Module): + """Backbone of Two-Stream Adaptive Graph Convolutional Networks for + Skeleton-Based Action Recognition. + + Args: + in_channels (int): Number of channels in the input data. + graph_cfg (dict): The arguments for building the graph. + data_bn (bool): If 'True', adds data normalization to the inputs. + Default: True. + pretrained (str | None): Name of pretrained model. + **kwargs (optional): Other parameters for graph convolution units. + + Shape: + - Input: :math:`(N, in_channels, T_{in}, V_{in}, M_{in})` + - Output: :math:`(N, num_class)` where + :math:`N` is a batch size, + :math:`T_{in}` is a length of input sequence, + :math:`V_{in}` is the number of graph nodes, + :math:`M_{in}` is the number of instance in a frame. + """ + + def __init__(self, + in_channels, + graph_cfg, + data_bn=True, + pretrained=None, + **kwargs): + super().__init__() + + # load graph + self.graph = Graph(**graph_cfg) + A = torch.tensor( + self.graph.A, dtype=torch.float32, requires_grad=False) + self.register_buffer('A', A) + + # build networks + spatial_kernel_size = A.size(0) + temporal_kernel_size = 9 + kernel_size = (temporal_kernel_size, spatial_kernel_size) + self.data_bn = nn.BatchNorm1d(in_channels * + A.size(1)) if data_bn else identity + + kwargs0 = {k: v for k, v in kwargs.items() if k != 'dropout'} + self.agcn_networks = nn.ModuleList(( + AGCNBlock( + in_channels, + 64, + kernel_size, + 1, + adj_len=A.size(1), + residual=False, + **kwargs0), + AGCNBlock(64, 64, kernel_size, 1, adj_len=A.size(1), **kwargs), + AGCNBlock(64, 64, kernel_size, 1, adj_len=A.size(1), **kwargs), + AGCNBlock(64, 64, kernel_size, 1, adj_len=A.size(1), **kwargs), + AGCNBlock(64, 128, kernel_size, 2, adj_len=A.size(1), **kwargs), + AGCNBlock(128, 128, kernel_size, 1, adj_len=A.size(1), **kwargs), + AGCNBlock(128, 128, kernel_size, 1, adj_len=A.size(1), **kwargs), + AGCNBlock(128, 256, kernel_size, 2, adj_len=A.size(1), **kwargs), + AGCNBlock(256, 256, kernel_size, 1, adj_len=A.size(1), **kwargs), + AGCNBlock(256, 256, kernel_size, 1, adj_len=A.size(1), **kwargs), + )) + + self.pretrained = pretrained + + def init_weights(self): + """Initiate the parameters either from existing checkpoint or from + scratch.""" + if isinstance(self.pretrained, str): + logger = get_root_logger() + logger.info(f'load model from: {self.pretrained}') + + load_checkpoint(self, self.pretrained, strict=False, logger=logger) + + elif self.pretrained is None: + for m in self.modules(): + if isinstance(m, nn.Conv2d): + kaiming_init(m) + elif isinstance(m, nn.Linear): + normal_init(m) + elif isinstance(m, _BatchNorm): + constant_init(m, 1) + else: + raise TypeError('pretrained must be a str or None') + + def forward(self, x): + """Defines the computation performed at every call. + Args: + x (torch.Tensor): The input data. + + Returns: + torch.Tensor: The output of the module. + """ + # data normalization + x = x.float() + n, c, t, v, m = x.size() + x = x.permute(0, 4, 3, 1, 2).contiguous() # N M V C T + x = x.view(n * m, v * c, t) + x = self.data_bn(x) + x = x.view(n, m, v, c, t) + x = x.permute(0, 1, 3, 4, 2).contiguous() + x = x.view(n * m, c, t, v) + + for gcn in self.agcn_networks: + x, _ = gcn(x, self.A) + + return x From 26b4b93d1d58dc0f1fd2cb7bb9609d9325fc2ada Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Thu, 25 Nov 2021 10:44:16 +0800 Subject: [PATCH 298/414] [Docs] Add 2s-AGCN in Updates. (#1289) * master * master 0721 * add README * 1125 add undates * 1125 add zh update --- README.md | 1 + README_zh-CN.md | 1 + 2 files changed, 2 insertions(+) diff --git a/README.md b/README.md index 453b2e59f0..3e1ba85712 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,7 @@ The master branch works with **PyTorch 1.3+**. ## Updates +- (2021-11-24) We support **2s-AGCN** on NTU60 XSub, achieve 86.82% Top-1 accuracy on joint stream and 87.91% Top-1 accuracy on bone stream respectively. - (2021-10-29) We provide a demo for skeleton-based and rgb-based spatio-temporal detection and action recognition (demo/demo_video_structuralize.py). - (2021-10-26) We train and test **ST-GCN** on NTU60 with 3D keypoint annotations, achieve 84.61% Top-1 accuracy (higher than 81.5% in the [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135)). - (2021-10-25) We provide a script(tools/data/skeleton/gen_ntu_rgbd_raw.py) to convert the NTU60 and NTU120 3D raw skeleton data to our format. diff --git a/README_zh-CN.md b/README_zh-CN.md index e9ee522b9d..05550aae20 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -43,6 +43,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 +- (2021-11-24) 在 NTU60 XSub 上支持 **2s-AGCN**, 在 joint stream 和 bone stream 上分别达到 86.82% 和 87.91% 的识别准确率。 - (2021-10-29) 支持基于 skeleton 模态和 rgb 模态的时空动作检测和行为识别 demo (demo/demo_video_structuralize.py)。 - (2021-10-26) 在 NTU60 3d 关键点标注数据集上训练测试 **STGCN**, 可达到 84.61% (高于 [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135) 中的 81.5%) 的识别准确率。 - (2021-10-25) 提供将 NTU60 和 NTU120 的 3d 骨骼点数据转换成我们项目的格式的脚本(tools/data/skeleton/gen_ntu_rgbd_raw.py)。 From 241c56ac3c47c6d3dfddea028a85a814fbc9cb14 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 25 Nov 2021 11:23:27 +0800 Subject: [PATCH 299/414] [Doc] Add descriptions to 3D skeletons (#1290) --- tools/data/skeleton/README.md | 2 +- tools/data/skeleton/README_zh-CN.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index 6769033940..271258200b 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -96,7 +96,7 @@ For skeleton data visualization, you need also to prepare the RGB videos. Please -## Convert the NTU RGB+D raw skeleton data to our format +## Convert the NTU RGB+D raw skeleton data to our format (only applicable to GCN backbones) Here we also provide the script for converting the NTU RGB+D raw skeleton data to our format. First, download the raw skeleton data of NTU-RGBD 60 and NTU-RGBD 120 from https://github.com/shahroudy/NTURGB-D. diff --git a/tools/data/skeleton/README_zh-CN.md b/tools/data/skeleton/README_zh-CN.md index 456ac496e6..43810a4f72 100644 --- a/tools/data/skeleton/README_zh-CN.md +++ b/tools/data/skeleton/README_zh-CN.md @@ -101,7 +101,7 @@ python ntu_pose_extraction.py S001C001P001R001A001_rgb.avi S001C001P001R001A001. -## 如何将 NTU RGB+D 原始数据转化为 MMAction2 格式 +## 如何将 NTU RGB+D 原始数据转化为 MMAction2 格式 (转换好的标注文件目前仅适用于 GCN 模型) 这里介绍如何将 NTU RGB+D 原始数据转化为 MMAction2 格式。首先,需要从 https://github.com/shahroudy/NTURGB-D 下载原始 NTU-RGBD 60 和 NTU-RGBD 120 数据集的原始骨架数据。 From dffb38ac7f6f771c02ad62ba3c87a58d41249e46 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 26 Nov 2021 11:27:10 +0800 Subject: [PATCH 300/414] [Doc] Fix Doc (#1294) --- docs/stat.py | 7 +++++++ docs_zh_CN/stat.py | 7 +++++++ 2 files changed, 14 insertions(+) diff --git a/docs/stat.py b/docs/stat.py index 82f5e5fc72..53e64004e2 100755 --- a/docs/stat.py +++ b/docs/stat.py @@ -28,6 +28,13 @@ def anchor(name): # title title = content.split('\n')[0].replace('#', '') + # skip IMAGE and ABSTRACT tags + content = [ + x for x in content.split('\n') + if 'IMAGE' not in x and 'ABSTRACT' not in x + ] + content = '\n'.join(content) + # count papers papers = set( (papertype, titlecase.titlecase(paper.lower().strip())) diff --git a/docs_zh_CN/stat.py b/docs_zh_CN/stat.py index bfb0bb4417..fe7590afdd 100755 --- a/docs_zh_CN/stat.py +++ b/docs_zh_CN/stat.py @@ -27,6 +27,13 @@ def anchor(name): # title title = content.split('\n')[0].replace('#', '') + # skip IMAGE and ABSTRACT tags + content = [ + x for x in content.split('\n') + if 'IMAGE' not in x and 'ABSTRACT' not in x + ] + content = '\n'.join(content) + # count papers papers = set( (papertype, titlecase.titlecase(paper.lower().strip())) From f0c8b15b6f0906119ef7a28d15a2b7ec6101cee7 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 26 Nov 2021 11:37:26 +0800 Subject: [PATCH 301/414] [Improvement] Support skip postproc in ntu_pose_extraction (#1295) --- tools/data/skeleton/ntu_pose_extraction.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/tools/data/skeleton/ntu_pose_extraction.py b/tools/data/skeleton/ntu_pose_extraction.py index e02f9f0add..5e25991d33 100644 --- a/tools/data/skeleton/ntu_pose_extraction.py +++ b/tools/data/skeleton/ntu_pose_extraction.py @@ -288,7 +288,8 @@ def pose_inference(args, frame_paths, det_results): print('Performing Human Pose Estimation for each frame') prog_bar = mmcv.ProgressBar(len(frame_paths)) - num_frame, num_person = det_results.shape[:2] + num_frame = len(det_results) + num_person = max([len(x) for x in det_results]) kp = np.zeros((num_person, num_frame, 17, 3), dtype=np.float32) for i, (f, d) in enumerate(zip(frame_paths, det_results)): @@ -301,10 +302,11 @@ def pose_inference(args, frame_paths, det_results): return kp -def ntu_pose_extraction(vid): +def ntu_pose_extraction(vid, skip_postproc=False): frame_paths = extract_frame(vid) det_results = detection_inference(args, frame_paths) - det_results = ntu_det_postproc(vid, det_results) + if not skip_postproc: + det_results = ntu_det_postproc(vid, det_results) pose_results = pose_inference(args, frame_paths, det_results) anno = dict() anno['keypoint'] = pose_results[..., :2] @@ -325,6 +327,7 @@ def parse_args(): parser.add_argument('video', type=str, help='source video') parser.add_argument('output', type=str, help='output pickle name') parser.add_argument('--device', type=str, default='cuda:0') + parser.add_argument('--skip-postproc', action='store_true') args = parser.parse_args() return args @@ -334,5 +337,5 @@ def parse_args(): args.device = global_args.device args.video = global_args.video args.output = global_args.output - anno = ntu_pose_extraction(args.video) + anno = ntu_pose_extraction(args.video, args.skip_postproc) mmcv.dump(anno, args.output) From d0e22b542e5cbd1516e79b4186f9d2265a194b72 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 30 Nov 2021 12:08:04 +0800 Subject: [PATCH 302/414] [Doc] Add MMFewShot --- README.md | 1 + README_zh-CN.md | 1 + docs/conf.py | 4 ++++ docs_zh_CN/conf.py | 4 ++++ 4 files changed, 10 insertions(+) diff --git a/README.md b/README.md index 3e1ba85712..0c8acf0423 100644 --- a/README.md +++ b/README.md @@ -264,6 +264,7 @@ We wish that the toolbox and benchmark could serve the growing research communit - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab detection toolbox and benchmark. - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab's next-generation platform for general 3D object detection. - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab image and video editing toolbox. +- [MMFewShot](https://github.com/open-mmlab/mmfewshot): OpenMMLab few shot learning toolbox. - [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab Optical Flow Toolbox and Benchmark. - [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab image and video generative models toolbox. - [MMOCR](https://github.com/open-mmlab/mmocr): A Comprehensive Toolbox for Text Detection, Recognition and Understanding. diff --git a/README_zh-CN.md b/README_zh-CN.md index 05550aae20..9430593bf3 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -258,6 +258,7 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab 检测工具箱与测试基准 - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab 新一代通用3D目标检测平台 - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab 图像视频编辑工具箱 +- [MMFewShot](https://github.com/open-mmlab/mmfewshot): OpenMMLab 少样本学习代码库 - [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab 光流估计工具箱 - [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab 图片视频生成模型工具箱 - [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包 diff --git a/docs/conf.py b/docs/conf.py index 341dedbf33..d292fbef49 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -140,6 +140,10 @@ def get_version(): 'name': 'MMEditing', 'url': 'https://mmediting.readthedocs.io/en/latest/', }, + { + 'name': 'MMFewShot', + 'url': 'https://mmfewshot.readthedocs.io/en/latest/', + }, { 'name': 'MMFlow', 'url': 'https://mmflow.readthedocs.io/en/latest/', diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index e45875b79b..b66cf507d6 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -134,6 +134,10 @@ def get_version(): 'name': 'MMEditing', 'url': 'https://mmediting.readthedocs.io/zh_CN/latest/', }, + { + 'name': 'MMFewShot', + 'url': 'https://mmfewshot.readthedocs.io/zh_CN/latest/', + }, { 'name': 'MMFlow', 'url': 'https://mmflow.readthedocs.io/zh_CN/latest/', From 43d797d97553efff94be66dbef9f505764c67bfb Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 30 Nov 2021 12:09:48 +0800 Subject: [PATCH 303/414] [CI] make lint independent (#1302) --- .github/workflows/build.yml | 33 ++++++++++++++------------------- .github/workflows/lint.yml | 23 +++++++++++++++++++++++ 2 files changed, 37 insertions(+), 19 deletions(-) create mode 100644 .github/workflows/lint.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c5c1436b50..3676d26c13 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,26 +1,21 @@ name: build -on: [push, pull_request] +on: + push: + paths: + - '!demo/**' + - '!docker/**' + - '!tools/**' + + pull_request: + paths: + - '!demo/**' + - '!docker/**' + - '!tools/**' + - '!docs/**' + - '!docs_zh-CN/**' jobs: - lint: - runs-on: ubuntu-18.04 - steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - name: Install pre-commit hook - run: | - pip install pre-commit - pre-commit install - - name: Linting - run: pre-commit run --all-files - - name: Check docstring coverage - run: | - pip install interrogate - interrogate -v --ignore-init-method --ignore-module --ignore-nested-functions --ignore-regex "__repr__" --fail-under 80 mmaction build_cpu: runs-on: ubuntu-18.04 strategy: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000000..ced724c3b6 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,23 @@ +name: lint + +on: [push, pull_request] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + - name: Install pre-commit hook + run: | + pip install pre-commit + pre-commit install + - name: Linting + run: pre-commit run --all-files + - name: Check docstring coverage + run: | + pip install interrogate + interrogate -v --ignore-init-method --ignore-module --ignore-nested-functions --ignore-regex "__repr__" --fail-under 80 mmdet From dbf5d59fa592818325285b786a0eab8031d9bc80 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 30 Nov 2021 12:12:22 +0800 Subject: [PATCH 304/414] [Fix] try catch when making symlink (#1303) --- setup.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 8a4accb7e1..ac9aaf4f07 100644 --- a/setup.py +++ b/setup.py @@ -136,7 +136,18 @@ def add_mim_extension(): if mode == 'symlink': src_relpath = osp.relpath(src_path, osp.dirname(tar_path)) - os.symlink(src_relpath, tar_path) + try: + os.symlink(src_relpath, tar_path) + except OSError: + # Creating a symbolic link on windows may raise an + # `OSError: [WinError 1314]` due to privilege. If + # the error happens, the src file will be copied + mode = 'copy' + warnings.warn( + f'Failed to create a symbolic link for {src_relpath}, ' + f'and it will be copied to {tar_path}') + else: + continue elif mode == 'copy': if osp.isfile(src_path): shutil.copyfile(src_path, tar_path) From 00f2e65267831b3d8a6ff3677312613786b55941 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Fri, 3 Dec 2021 14:37:45 +0800 Subject: [PATCH 305/414] [Docs] Add mmhuman3d (#1304) * master * master 0721 * add README * [Docs] add mmhuman3d * modify doc_zh --- README.md | 1 + README_zh-CN.md | 1 + docs/conf.py | 4 ++++ docs_zh_CN/conf.py | 4 ++++ 4 files changed, 10 insertions(+) diff --git a/README.md b/README.md index 0c8acf0423..5b125d86ae 100644 --- a/README.md +++ b/README.md @@ -267,6 +267,7 @@ We wish that the toolbox and benchmark could serve the growing research communit - [MMFewShot](https://github.com/open-mmlab/mmfewshot): OpenMMLab few shot learning toolbox. - [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab Optical Flow Toolbox and Benchmark. - [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab image and video generative models toolbox. +- [MMHuman3D](https://github.com/open-mmlab/mmhuman3d): OpenMMLab human pose and shape estimation toolbox and benchmark. - [MMOCR](https://github.com/open-mmlab/mmocr): A Comprehensive Toolbox for Text Detection, Recognition and Understanding. - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab pose estimation toolbox and benchmark. - [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab semantic segmentation toolbox and benchmark. diff --git a/README_zh-CN.md b/README_zh-CN.md index 9430593bf3..910820610e 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -261,6 +261,7 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 - [MMFewShot](https://github.com/open-mmlab/mmfewshot): OpenMMLab 少样本学习代码库 - [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab 光流估计工具箱 - [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab 图片视频生成模型工具箱 +- [MMHuman3D](https://github.com/open-mmlab/mmhuman3d): OpenMMLab 人体姿态和形状估计工具箱 - [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包 - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab 姿态估计工具箱与测试基准 - [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab 语义分割工具箱与测试基准 diff --git a/docs/conf.py b/docs/conf.py index d292fbef49..5a3f69abcb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -152,6 +152,10 @@ def get_version(): 'name': 'MMGeneration', 'url': 'https://mmgeneration.readthedocs.io/en/latest/', }, + { + 'name': 'MMHuman3D', + 'url': 'https://mmhuman3d.readthedocs.io/en/latest/', + }, { 'name': 'MMOCR', 'url': 'https://mmocr.readthedocs.io/en/latest/', diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index b66cf507d6..a66fa3acf5 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -146,6 +146,10 @@ def get_version(): 'name': 'MMGeneration', 'url': 'https://mmgeneration.readthedocs.io/zh_CN/latest/', }, + { + 'name': 'MMHuman3D', + 'url': 'https://mmhuman3d.readthedocs.io/zh_CN/latest/', + }, { 'name': 'MMOCR', 'url': 'https://mmocr.readthedocs.io/zh_CN/latest/', From 576d553308270d89c2d5eef42350448ee1c6a352 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 3 Dec 2021 17:20:31 +0800 Subject: [PATCH 306/414] Update __init__.py --- mmaction/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmaction/__init__.py b/mmaction/__init__.py index afd7d5a336..72a2a34d16 100644 --- a/mmaction/__init__.py +++ b/mmaction/__init__.py @@ -5,7 +5,7 @@ from .version import __version__ mmcv_minimum_version = '1.3.6' -mmcv_maximum_version = '1.4.0' +mmcv_maximum_version = '1.5.0' mmcv_version = digit_version(mmcv.__version__) assert (digit_version(mmcv_minimum_version) <= mmcv_version From 61d7eb8fc04aa4d519f289efbe1c81ff41e61416 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 4 Dec 2021 19:23:55 +0800 Subject: [PATCH 307/414] [Fix] Fix interrogate error (#1305) * Fix interrogate error * correct dir name --- .github/workflows/build.yml | 2 +- .github/workflows/lint.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3676d26c13..6a1d251dd7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -13,7 +13,7 @@ on: - '!docker/**' - '!tools/**' - '!docs/**' - - '!docs_zh-CN/**' + - '!docs_zh_CN/**' jobs: build_cpu: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index ced724c3b6..f85e90e41a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -20,4 +20,4 @@ jobs: - name: Check docstring coverage run: | pip install interrogate - interrogate -v --ignore-init-method --ignore-module --ignore-nested-functions --ignore-regex "__repr__" --fail-under 80 mmdet + interrogate -v --ignore-init-method --ignore-module --ignore-nested-functions --ignore-regex "__repr__" --fail-under 80 mmaction From 08f18409e6849d38a88991aa66cc0f90d997f908 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 7 Dec 2021 13:06:53 +0800 Subject: [PATCH 308/414] [ModelZoo] Improve some sthv1 related models (#1306) * improve some sthv1 related models * update * update tpn --- configs/recognition/slowfast/README.md | 2 +- configs/recognition/slowfast/README_zh-CN.md | 6 ++++ configs/recognition/slowfast/metafile.yml | 10 +++---- configs/recognition/slowonly/README.md | 2 +- configs/recognition/slowonly/README_zh-CN.md | 2 +- configs/recognition/slowonly/metafile.yml | 10 +++---- configs/recognition/tanet/README.md | 4 +-- configs/recognition/tanet/README_zh-CN.md | 4 +-- configs/recognition/tanet/metafile.yml | 22 +++++++------- configs/recognition/tpn/README.md | 2 +- configs/recognition/tpn/README_zh-CN.md | 2 +- configs/recognition/tpn/metafile.yml | 10 +++---- configs/recognition/tsm/README.md | 4 +-- configs/recognition/tsm/README_zh-CN.md | 4 +-- configs/recognition/tsm/metafile.yml | 30 ++++++++++---------- 15 files changed, 60 insertions(+), 54 deletions(-) diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 0e6d2119c0..e31bd2ece7 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -45,7 +45,7 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo |config | resolution | gpus | backbone |pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowfast_r50_16x8x1_22e_sthv1_rgb](/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py)|height 100|8|ResNet50|Kinetics400|49.24|78.79|x|9293|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20210630-53355c16.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log.json)| +|[slowfast_r50_16x8x1_22e_sthv1_rgb](/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py)|height 100|8|ResNet50|Kinetics400|49.67|79.00|x|9293|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20211202-aaaf9279.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.json)| :::{note} diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index 18441b1818..71c75ed8c8 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -30,6 +30,12 @@ |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| |[slowfast_r152_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet152 + ResNet50 |None|77.13|93.20||10077| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json)| +### Something-Something V1 + +|配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log| json| +|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|[slowfast_r50_16x8x1_22e_sthv1_rgb](/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py)|高 100|8|ResNet50|Kinetics400|49.67|79.00|x|9293|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20211202-aaaf9279.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.json)| + 注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index f0da9e4f79..bc0dc50eed 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -206,9 +206,9 @@ Models: Results: - Dataset: SthV1 Metrics: - Top 1 Accuracy: 49.24 - Top 5 Accuracy: 78.79 + Top 1 Accuracy: 49.67 + Top 5 Accuracy: 79.00 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/20210606_225114.log - Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20210630-53355c16.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20211202-aaaf9279.pth diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index 403a535813..d5846782ae 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -106,7 +106,7 @@ In data benchmark, we compare two different data preprocessing methods: (1) Resi |config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb-34901d23.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.json)| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|47.76|77.49|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20211202-d034ff12.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.json)| :::{note} diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md index a8e87e4174..917be85500 100644 --- a/configs/recognition/slowonly/README_zh-CN.md +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -95,7 +95,7 @@ |配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|46.63|77.19|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb-34901d23.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.json)| +|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|47.76|77.49|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20211202-d034ff12.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.json)| 注: diff --git a/configs/recognition/slowonly/metafile.yml b/configs/recognition/slowonly/metafile.yml index 5e604c058a..9e4110ea09 100644 --- a/configs/recognition/slowonly/metafile.yml +++ b/configs/recognition/slowonly/metafile.yml @@ -542,9 +542,9 @@ Models: Results: - Dataset: SthV1 Metrics: - Top 1 Accuracy: 46.63 - Top 5 Accuracy: 77.19 + Top 1 Accuracy: 47.76 + Top 5 Accuracy: 77.49 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.json - Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb.log - Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_r50_8x4x1_64e_sthv1_rgb-34901d23.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20211202-d034ff12.pth diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 24e07e1122..37760e5042 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -36,8 +36,8 @@ Video data is with complex temporal dynamics due to various factors such as came |config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|47.45/49.69|76.00/77.62|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json)| -|[tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|47.73/50.41|77.31/78.47|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20210630-7c19303c.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log.json)| +|[tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|47.34/49.58|75.72/77.31|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json)| +|[tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|49.05/50.91|77.90/79.13|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20211202-370c2128.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.json)| :::{note} diff --git a/configs/recognition/tanet/README_zh-CN.md b/configs/recognition/tanet/README_zh-CN.md index a92230c0fc..4902cf8430 100644 --- a/configs/recognition/tanet/README_zh-CN.md +++ b/configs/recognition/tanet/README_zh-CN.md @@ -25,8 +25,8 @@ |配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | GPU 显存占用 (M)| ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py)|高 100|8|TANet|ImageNet|47.45/49.69|76.00/77.62|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json)| -|[tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py)|高 100|8|TANet|ImageNet|47.73/50.41|77.31/78.47|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20210630-7c19303c.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log.json)| +|[tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py)|高 100|8|TANet|ImageNet|47.34/49.58|75.72/77.31|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json)| +|[tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py)|高 100|8|TANet|ImageNet|49.05/50.91|77.90/79.13|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20211202-370c2128.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.json)| 注: diff --git a/configs/recognition/tanet/metafile.yml b/configs/recognition/tanet/metafile.yml index d32d710b17..4e5746bf72 100644 --- a/configs/recognition/tanet/metafile.yml +++ b/configs/recognition/tanet/metafile.yml @@ -45,10 +45,10 @@ Models: Results: - Dataset: SthV1 Metrics: - Top 1 Accuracy: 49.69 - Top 1 Accuracy (efficient): 47.45 - Top 5 Accuracy: 77.62 - Top 5 Accuracy (efficient): 76.0 + Top 1 Accuracy: 49.58 + Top 1 Accuracy (efficient): 47.34 + Top 5 Accuracy: 77.31 + Top 5 Accuracy (efficient): 75.72 Task: Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log @@ -70,11 +70,11 @@ Models: Results: - Dataset: SthV1 Metrics: - Top 1 Accuracy: 50.41 - Top 1 Accuracy (efficient): 47.73 - Top 5 Accuracy: 78.47 - Top 5 Accuracy (efficient): 77.31 + Top 1 Accuracy: 50.91 + Top 1 Accuracy (efficient): 49.05 + Top 5 Accuracy: 79.13 + Top 5 Accuracy (efficient): 77.90 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/20210607_155335.log - Weights: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20210630-7c19303c.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20211202-370c2128.pth diff --git a/configs/recognition/tpn/README.md b/configs/recognition/tpn/README.md index 98477f64d3..7ce9ce6f63 100644 --- a/configs/recognition/tpn/README.md +++ b/configs/recognition/tpn/README.md @@ -37,7 +37,7 @@ Visual tempo characterizes the dynamics and the temporal scale of an action. Mod |config | resolution | gpus | backbone| pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tpn_tsm_r50_1x1x8_150e_sthv1_rgb](/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py)|height 100|8x6| ResNet50 | TSM | 50.80 | 79.05 | 8828 |[ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20210311-28de4cd5.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log)|[json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log.json)| +|[tpn_tsm_r50_1x1x8_150e_sthv1_rgb](/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py)|height 100|8x6| ResNet50 | TSM | 51.50 | 79.15 | 8828 |[ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20211202-c28ed83f.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.json)| :::{note} diff --git a/configs/recognition/tpn/README_zh-CN.md b/configs/recognition/tpn/README_zh-CN.md index e6e04cdf44..ec66656d1d 100644 --- a/configs/recognition/tpn/README_zh-CN.md +++ b/configs/recognition/tpn/README_zh-CN.md @@ -26,7 +26,7 @@ |配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| |:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tpn_tsm_r50_1x1x8_150e_sthv1_rgb](/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py)|height 100|8x6| ResNet50 | TSM | 50.80 | 79.05 | 8828 |[ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20210311-28de4cd5.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log)|[json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log.json)| +|[tpn_tsm_r50_1x1x8_150e_sthv1_rgb](/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py)|height 100|8x6| ResNet50 | TSM | 51.50 | 79.15 | 8828 |[ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20211202-c28ed83f.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.json)| 注: diff --git a/configs/recognition/tpn/metafile.yml b/configs/recognition/tpn/metafile.yml index 44b1130d6e..973b6adaa6 100644 --- a/configs/recognition/tpn/metafile.yml +++ b/configs/recognition/tpn/metafile.yml @@ -68,9 +68,9 @@ Models: Results: - Dataset: SthV1 Metrics: - Top 1 Accuracy: 50.8 - Top 5 Accuracy: 79.05 + Top 1 Accuracy: 51.50 + Top 5 Accuracy: 79.15 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/20210311_162636.log - Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20210311-28de4cd5.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20211202-c28ed83f.pth diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 20293cc5c7..c3528ee5be 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -75,8 +75,8 @@ The explosive growth in video streaming gives rise to challenges on performing v |[tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.65 / 48.66 | 76.67 / 77.41 |[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb-ee93e5e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.json) | |[tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 46.26 / 47.68 | 75.92 / 76.49 |[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb-4f4f4740.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.json) | |[tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.85 / 50.31|76.78 / 78.18|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb_20210324-76937692.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json)| -|[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|47.62 / 49.28|76.63 / 77.82|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20201010-17fa49f6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json)| -|[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|45.72 / 48.43|74.67 / 76.72|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json)| +|[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|47.77 / 49.03|76.82 / 77.83|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20211202-b922e5d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.json)| +|[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|46.09 / 48.59|75.41 / 77.10|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20211202-49970a5b.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.json)| ### Something-Something V2 diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index 165a5fee40..f95876fd9e 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -64,8 +64,8 @@ | [tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.65 / 48.66 | 76.67 / 77.41 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb-ee93e5e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.json) | | [tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 46.26 / 47.68 | 75.92 / 76.49 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb-4f4f4740.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.json) | | [tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.85 / 50.31 | 76.78 / 78.18 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) |7077|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb_20210324-76937692.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json)| -|[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|高 100|8| ResNet50 | ImageNet|47.62 / 49.28|76.63 / 77.82|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20201010-17fa49f6.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json)| -|[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|高 100|8| ResNet50 | ImageNet|45.72 / 48.43|74.67 / 76.72|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json)| +|[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|高 100|8| ResNet50 | ImageNet|47.77 / 49.03|76.82 / 77.83|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20211202-b922e5d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.json)| +|[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|高 100|8| ResNet50 | ImageNet|46.09 / 48.59|75.41 / 77.10|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.json)| ### Something-Something V2 diff --git a/configs/recognition/tsm/metafile.yml b/configs/recognition/tsm/metafile.yml index c438aea968..6ad13f2948 100644 --- a/configs/recognition/tsm/metafile.yml +++ b/configs/recognition/tsm/metafile.yml @@ -519,14 +519,14 @@ Models: Results: - Dataset: SthV1 Metrics: - Top 1 Accuracy: 49.28 - Top 1 Accuracy (efficient): 47.62 - Top 5 Accuracy: 77.82 - Top 5 Accuracy (efficient): 76.63 - Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/20201010_221240.log - Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20201010-17fa49f6.pth + Top 1 Accuracy: 49.03 + Top 1 Accuracy (efficient): 47.77 + Top 5 Accuracy: 77.83 + Top 5 Accuracy (efficient): 76.82 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20211202-b922e5d2.pth reference top1 acc (efficient/accurate): '[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py @@ -546,14 +546,14 @@ Models: Results: - Dataset: SthV1 Metrics: - Top 1 Accuracy: 48.43 - Top 1 Accuracy (efficient): 45.72 - Top 5 Accuracy: 76.72 - Top 5 Accuracy (efficient): 74.67 + Top 1 Accuracy: 48.59 + Top 1 Accuracy (efficient): 46.09 + Top 5 Accuracy: 77.10 + Top 5 Accuracy (efficient): 75.41 Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/20201010_224055.log - Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth + Training Json Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20211202-49970a5b.pth reference top1 acc (efficient/accurate): '[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' reference top5 acc (efficient/accurate): '[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)' - Config: configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py From ace8beb46399bd0c881cdeccbfcb0ed1fa7b31ad Mon Sep 17 00:00:00 2001 From: congee <35596075+congee524@users.noreply.github.com> Date: Tue, 7 Dec 2021 13:09:23 +0800 Subject: [PATCH 309/414] [Fix] fix typo in slowfast config (#1309) * fix typo * remove wrong ckpt with fusion_kernel=5 of slowfast_r50_8x8 --- configs/recognition/slowfast/README.md | 1 - configs/recognition/slowfast/README_zh-CN.md | 1 - configs/recognition/slowfast/metafile.yml | 23 ------------------- ...slowfast_r50_8x8x1_256e_kinetics400_rgb.py | 2 +- 4 files changed, 1 insertion(+), 26 deletions(-) diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index e31bd2ece7..4bbdbd4f0c 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -35,7 +35,6 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo |[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8| ResNet50|None |73.95|91.50|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| |[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| -|[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50 |None |75.61|92.34|x|9062|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index 71c75ed8c8..7605871d2d 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -24,7 +24,6 @@ |[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |短边256|8| ResNet50|None |73.95|91.50|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| |[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| -|[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet50 |None |75.61|92.34|x|9062|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index bc0dc50eed..e9dc05a94c 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -74,29 +74,6 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/20200704_232901.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20200704-bcde7ed7.pth -- Config: configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py - In Collection: SlowFast - Metadata: - Architecture: ResNet50 - Batch Size: 8 - Epochs: 256 - FLOPs: 66222034944 - Parameters: 34565560 - Pretrained: None - Resolution: short-side 256 - Training Data: Kinetics-400 - Training Resources: 32 GPUs - Modality: RGB - Name: slowfast_r50_8x8x1_256e_kinetics400_rgb - Results: - - Dataset: Kinetics-400 - Metrics: - Top 1 Accuracy: 75.61 - Top 5 Accuracy: 92.34 - Task: Action Recognition - Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log.json - Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/20200731_151537.log - Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb/slowfast_r50_256p_8x8x1_256e_kinetics400_rgb_20200810-863812c2.pth - Config: configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py In Collection: SlowFast Metadata: diff --git a/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py index ee68e80e05..49a30be628 100644 --- a/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py @@ -5,6 +5,6 @@ resample_rate=4, # tau speed_ratio=4, # alpha channel_ratio=8, # beta_inv - slow_pathway=dict(fusion_kernel=5))) + slow_pathway=dict(fusion_kernel=7))) work_dir = './work_dirs/slowfast_r50_3d_8x8x1_256e_kinetics400_rgb' From 92e5517f1b3cbf937078d66c0dc5c4ba7abf7a08 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 7 Dec 2021 17:31:40 +0800 Subject: [PATCH 310/414] [Docs] Update pre-commit (#1313) * master * master 0721 * add README * modify pre-commit --- .github/workflows/lint.yml | 4 ++++ .pre-commit-config.yaml | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f85e90e41a..be26c3623a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -13,6 +13,10 @@ jobs: python-version: 3.7 - name: Install pre-commit hook run: | + # markdownlint requires ruby >= 2.7 + sudo apt-add-repository ppa:brightbox/ruby-ng -y + sudo apt-get update + sudo apt-get install -y ruby2.7 pip install pre-commit pre-commit install - name: Linting diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d2e5d004cc..655a926490 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,8 +29,8 @@ repos: args: ["--remove"] - id: mixed-line-ending args: ["--fix=lf"] - - repo: https://github.com/jumanjihouse/pre-commit-hooks - rev: 2.1.4 + - repo: https://github.com/markdownlint/markdownlint + rev: v0.11.0 hooks: - id: markdownlint args: [ "-r", "~MD002,~MD013,~MD024,~MD029,~MD033,~MD034,~MD036" ] From 2ebddacd146626ba615baa92988b786ce7ab709d Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 14 Dec 2021 14:59:15 +0800 Subject: [PATCH 311/414] [Feature] Update Agcn (#1299) * master * master 0721 * add README * 1129 update agcn * 1129 update agcn * 1129 update agcn * modify 2sagcn readme * modify 2sagcn readme --- README.md | 2 +- README_zh-CN.md | 2 +- .../2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py | 4 +- .../2sagcn_80e_ntu60_xsub_keypoint_3d.py | 4 +- configs/skeleton/2s-agcn/README.md | 4 +- configs/skeleton/2s-agcn/README_zh-CN.md | 4 +- configs/skeleton/2s-agcn/metafile.yml | 8 +-- mmaction/models/backbones/agcn.py | 50 +++++++++++++++---- mmaction/models/skeleton_gcn/utils/graph.py | 19 ++++++- 9 files changed, 71 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index 5b125d86ae..eeee2b9bc1 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ The master branch works with **PyTorch 1.3+**. ## Updates -- (2021-11-24) We support **2s-AGCN** on NTU60 XSub, achieve 86.82% Top-1 accuracy on joint stream and 87.91% Top-1 accuracy on bone stream respectively. +- (2021-11-24) We support **2s-AGCN** on NTU60 XSub, achieve 86.06% Top-1 accuracy on joint stream and 86.89% Top-1 accuracy on bone stream respectively. - (2021-10-29) We provide a demo for skeleton-based and rgb-based spatio-temporal detection and action recognition (demo/demo_video_structuralize.py). - (2021-10-26) We train and test **ST-GCN** on NTU60 with 3D keypoint annotations, achieve 84.61% Top-1 accuracy (higher than 81.5% in the [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135)). - (2021-10-25) We provide a script(tools/data/skeleton/gen_ntu_rgbd_raw.py) to convert the NTU60 and NTU120 3D raw skeleton data to our format. diff --git a/README_zh-CN.md b/README_zh-CN.md index 910820610e..2a7690e897 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -43,7 +43,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa ## 更新记录 -- (2021-11-24) 在 NTU60 XSub 上支持 **2s-AGCN**, 在 joint stream 和 bone stream 上分别达到 86.82% 和 87.91% 的识别准确率。 +- (2021-11-24) 在 NTU60 XSub 上支持 **2s-AGCN**, 在 joint stream 和 bone stream 上分别达到 86.06% 和 86.89% 的识别准确率。 - (2021-10-29) 支持基于 skeleton 模态和 rgb 模态的时空动作检测和行为识别 demo (demo/demo_video_structuralize.py)。 - (2021-10-26) 在 NTU60 3d 关键点标注数据集上训练测试 **STGCN**, 可达到 84.61% (高于 [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135) 中的 81.5%) 的识别准确率。 - (2021-10-25) 提供将 NTU60 和 NTU120 的 3d 骨骼点数据转换成我们项目的格式的脚本(tools/data/skeleton/gen_ntu_rgbd_raw.py)。 diff --git a/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py b/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py index b41cefab58..4a8ffbfc97 100644 --- a/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py +++ b/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py @@ -3,7 +3,7 @@ backbone=dict( type='AGCN', in_channels=3, - graph_cfg=dict(layout='ntu-rgb+d', strategy='spatial')), + graph_cfg=dict(layout='ntu-rgb+d', strategy='agcn')), cls_head=dict( type='STGCNHead', num_classes=60, @@ -71,7 +71,7 @@ log_config = dict(interval=100, hooks=[dict(type='TextLoggerHook')]) # runtime settings -dist_params = dict(backend='nccl', port='1031') +dist_params = dict(backend='nccl') log_level = 'INFO' work_dir = './work_dirs/2sagcn_80e_ntu60_xsub_bone_3d/' load_from = None diff --git a/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py b/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py index 53f25cbda9..b2f4422a6d 100644 --- a/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py +++ b/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py @@ -3,7 +3,7 @@ backbone=dict( type='AGCN', in_channels=3, - graph_cfg=dict(layout='ntu-rgb+d', strategy='spatial')), + graph_cfg=dict(layout='ntu-rgb+d', strategy='agcn')), cls_head=dict( type='STGCNHead', num_classes=60, @@ -68,7 +68,7 @@ log_config = dict(interval=100, hooks=[dict(type='TextLoggerHook')]) # runtime settings -dist_params = dict(backend='nccl', port='1031') +dist_params = dict(backend='nccl') log_level = 'INFO' work_dir = './work_dirs/2sagcn_80e_ntu60_xsub_keypoint_3d/' load_from = None diff --git a/configs/skeleton/2s-agcn/README.md b/configs/skeleton/2s-agcn/README.md index b41338838d..d6049c735c 100644 --- a/configs/skeleton/2s-agcn/README.md +++ b/configs/skeleton/2s-agcn/README.md @@ -31,8 +31,8 @@ In skeleton-based action recognition, graph convolutional networks (GCNs), which | config | type | gpus | backbone | Top-1 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [2sagcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py) | joint | 1 | AGCN | 86.82 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-e9c57448.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json) | -| [2sagcn_80e_ntu60_xsub_bone_3d](/configs/skeleton/ss-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py) | bone | 2 | AGCN | 87.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-aef54a2d.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json) | +| [2sagcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py) | joint | 1 | AGCN | 86.06 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-3bed61ba.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json) | +| [2sagcn_80e_ntu60_xsub_bone_3d](/configs/skeleton/ss-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py) | bone | 2 | AGCN | 86.89 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-278b8815.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json) | ## Train diff --git a/configs/skeleton/2s-agcn/README_zh-CN.md b/configs/skeleton/2s-agcn/README_zh-CN.md index d7d21594a3..ae7cc00a20 100644 --- a/configs/skeleton/2s-agcn/README_zh-CN.md +++ b/configs/skeleton/2s-agcn/README_zh-CN.md @@ -20,8 +20,8 @@ | 配置文件 | 数据格式 | GPU 数量 | 主干网络 | top1 准确率 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [2sagcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py) | joint | 1 | AGCN | 86.82 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-e9c57448.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json) | -| [2sagcn_80e_ntu60_xsub_bone_3d](/configs/skeleton/ss-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py) | bone | 2 | AGCN | 87.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-aef54a2d.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json) | +| [2sagcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py) | joint | 1 | AGCN | 86.06 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-3bed61ba.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json) | +| [2sagcn_80e_ntu60_xsub_bone_3d](/configs/skeleton/ss-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py) | bone | 2 | AGCN | 86.89 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-278b8815.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json) | ## 如何训练 diff --git a/configs/skeleton/2s-agcn/metafile.yml b/configs/skeleton/2s-agcn/metafile.yml index 8227bdd1d9..30d5804f74 100644 --- a/configs/skeleton/2s-agcn/metafile.yml +++ b/configs/skeleton/2s-agcn/metafile.yml @@ -15,11 +15,11 @@ Models: Results: Dataset: NTU60-XSub Metrics: - Top 1 Accuracy: 86.82 + Top 1 Accuracy: 86.06 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json Training Log: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log - Weights: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-e9c57448.pth + Weights: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-3bed61ba.pth - Config: configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py In Collection: AGCN Metadata: @@ -33,8 +33,8 @@ Models: Results: Dataset: NTU60-XSub Metrics: - Top 1 Accuracy: 87.91 + Top 1 Accuracy: 86.89 Task: Skeleton-based Action Recognition Training Json Log: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json Training Log: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log - Weights: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-aef54a2d.pth + Weights: https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-278b8815.pth diff --git a/mmaction/models/backbones/agcn.py b/mmaction/models/backbones/agcn.py index e88ef27089..689e15f588 100644 --- a/mmaction/models/backbones/agcn.py +++ b/mmaction/models/backbones/agcn.py @@ -1,14 +1,34 @@ +import math + import torch import torch.nn as nn from mmcv.cnn import constant_init, kaiming_init, normal_init from mmcv.runner import load_checkpoint -from mmcv.utils import _BatchNorm from ...utils import get_root_logger from ..builder import BACKBONES from ..skeleton_gcn.utils import Graph +def conv_branch_init(conv, branches): + weight = conv.weight + n = weight.size(0) + k1 = weight.size(1) + k2 = weight.size(2) + normal_init(weight, mean=0, std=math.sqrt(2. / (n * k1 * k2 * branches))) + constant_init(conv.bias, 0) + + +def conv_init(conv): + kaiming_init(conv.weight) + constant_init(conv.bias, 0) + + +def bn_init(bn, scale): + constant_init(bn.weight, scale) + constant_init(bn.bias, 0) + + def zero(x): """return zero.""" return 0 @@ -69,10 +89,15 @@ def __init__(self, self.gcn = ConvTemporalGraphical( in_channels, out_channels, kernel_size[1], adj_len=adj_len) self.tcn = nn.Sequential( - nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True), nn.Conv2d(out_channels, out_channels, (kernel_size[0], 1), - (stride, 1), padding), nn.BatchNorm2d(out_channels), - nn.Dropout(dropout, inplace=True)) + (stride, 1), padding), nn.BatchNorm2d(out_channels)) + + # tcn init + for m in self.tcn.modules(): + if isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d): + bn_init(m, 1) if not residual: self.residual = zero @@ -175,6 +200,15 @@ def __init__(self, self.soft = nn.Softmax(-2) self.relu = nn.ReLU() + for m in self.modules(): + if isinstance(m, nn.Conv2d): + conv_init(m) + elif isinstance(m, nn.BatchNorm2d): + bn_init(m, 1) + bn_init(self.bn, 1e-6) + for i in range(self.num_subset): + conv_branch_init(self.conv_d[i], self.num_subset) + def forward(self, x, adj_mat): """Defines the computation performed at every call.""" assert adj_mat.size(0) == self.kernel_size @@ -274,13 +308,7 @@ def init_weights(self): load_checkpoint(self, self.pretrained, strict=False, logger=logger) elif self.pretrained is None: - for m in self.modules(): - if isinstance(m, nn.Conv2d): - kaiming_init(m) - elif isinstance(m, nn.Linear): - normal_init(m) - elif isinstance(m, _BatchNorm): - constant_init(m, 1) + pass else: raise TypeError('pretrained must be a str or None') diff --git a/mmaction/models/skeleton_gcn/utils/graph.py b/mmaction/models/skeleton_gcn/utils/graph.py index 02bbf9effb..d746ba34a5 100644 --- a/mmaction/models/skeleton_gcn/utils/graph.py +++ b/mmaction/models/skeleton_gcn/utils/graph.py @@ -29,6 +29,13 @@ def normalize_digraph(adj_matrix): return norm_matrix +def edge2mat(link, num_node): + A = np.zeros((num_node, num_node)) + for i, j in link: + A[j, i] = 1 + return A + + class Graph: """The Graph to model the skeletons extracted by the openpose. @@ -62,7 +69,7 @@ def __init__(self, self.dilation = dilation assert layout in ['openpose', 'ntu-rgb+d', 'ntu_edge', 'coco'] - assert strategy in ['uniform', 'distance', 'spatial'] + assert strategy in ['uniform', 'distance', 'spatial', 'agcn'] self.get_edge(layout) self.hop_dis = get_hop_distance( self.num_node, self.edge, max_hop=max_hop) @@ -92,6 +99,8 @@ def get_edge(self, layout): (15, 14), (16, 15), (17, 1), (18, 17), (19, 18), (20, 19), (22, 23), (23, 8), (24, 25), (25, 12)] neighbor_link = [(i - 1, j - 1) for (i, j) in neighbor_1base] + self.self_link = self_link + self.neighbor_link = neighbor_link self.edge = self_link + neighbor_link self.center = 21 - 1 elif layout == 'ntu_edge': @@ -161,5 +170,13 @@ def get_adjacency(self, strategy): A.append(a_further) A = np.stack(A) self.A = A + elif strategy == 'agcn': + A = [] + link_mat = edge2mat(self.self_link, self.num_node) + In = normalize_digraph(edge2mat(self.neighbor_link, self.num_node)) + outward = [(j, i) for (i, j) in self.neighbor_link] + Out = normalize_digraph(edge2mat(outward, self.num_node)) + A = np.stack((link_mat, In, Out)) + self.A = A else: raise ValueError('Do Not Exist This Strategy') From 581af9173c82d3a54711d9f2380b83df88359ca9 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 14 Dec 2021 15:32:45 +0800 Subject: [PATCH 312/414] [CI] Cancel previous runs that are not completed (#1327) * master * master 0721 * add README * workflow cancel --- .github/workflows/build.yml | 4 ++++ .github/workflows/deploy.yml | 4 ++++ .github/workflows/lint.yml | 4 ++++ 3 files changed, 12 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6a1d251dd7..e9db239da8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -15,6 +15,10 @@ on: - '!docs/**' - '!docs_zh_CN/**' +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: build_cpu: runs-on: ubuntu-18.04 diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 13cc82b801..a136e0cc3e 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -2,6 +2,10 @@ name: deploy on: push +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: build-n-publish: runs-on: ubuntu-latest diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index be26c3623a..a306b42760 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -2,6 +2,10 @@ name: lint on: [push, pull_request] +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + jobs: lint: runs-on: ubuntu-latest From 61b3d7d914ee5da537bc17f69b1954757cb8b7b8 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 14 Dec 2021 15:40:23 +0800 Subject: [PATCH 313/414] [Docs] Use shared menu from theme instead (#1328) * master * master 0721 * add README * use shared menu * use shared menu --- docs/conf.py | 82 +++----------------------------------------- docs_zh_CN/conf.py | 84 +++------------------------------------------- 2 files changed, 8 insertions(+), 158 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 5a3f69abcb..3248b1f326 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -111,84 +111,10 @@ def get_version(): }, ] }, - { - 'name': - 'Docs', - 'children': [ - { - 'name': 'MMCV', - 'url': 'https://mmcv.readthedocs.io/en/latest/', - }, - { - 'name': 'MMAction2', - 'url': 'https://mmaction2.readthedocs.io/en/latest/', - }, - { - 'name': 'MMClassification', - 'url': - 'https://mmclassification.readthedocs.io/en/latest/', - }, - { - 'name': 'MMDetection', - 'url': 'https://mmdetection.readthedocs.io/en/latest/', - }, - { - 'name': 'MMDetection3D', - 'url': 'https://mmdetection3d.readthedocs.io/en/latest/', - }, - { - 'name': 'MMEditing', - 'url': 'https://mmediting.readthedocs.io/en/latest/', - }, - { - 'name': 'MMFewShot', - 'url': 'https://mmfewshot.readthedocs.io/en/latest/', - }, - { - 'name': 'MMFlow', - 'url': 'https://mmflow.readthedocs.io/en/latest/', - }, - { - 'name': 'MMGeneration', - 'url': 'https://mmgeneration.readthedocs.io/en/latest/', - }, - { - 'name': 'MMHuman3D', - 'url': 'https://mmhuman3d.readthedocs.io/en/latest/', - }, - { - 'name': 'MMOCR', - 'url': 'https://mmocr.readthedocs.io/en/latest/', - }, - { - 'name': 'MMPose', - 'url': 'https://mmpose.readthedocs.io/en/latest/', - }, - { - 'name': 'MMSegmentation', - 'url': 'https://mmsegmentation.readthedocs.io/en/latest/', - }, - { - 'name': 'MMTracking', - 'url': 'https://mmtracking.readthedocs.io/en/latest/', - }, - ] - }, - { - 'name': - 'OpenMMLab', - 'children': [ - { - 'name': 'Homepage', - 'url': 'https://openmmlab.com/' - }, - { - 'name': 'GitHub', - 'url': 'https://github.com/open-mmlab/' - }, - ] - }, - ] + ], + # Specify the language of shared menu + 'menu_lang': + 'en' } language = 'en' diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index a66fa3acf5..7949166dc9 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -104,86 +104,10 @@ def get_version(): }, ] }, - { - 'name': - '文档', - 'children': [ - { - 'name': 'MMCV', - 'url': 'https://mmcv.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMAction2', - 'url': 'https://mmaction2.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMClassification', - 'url': - 'https://mmclassification.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMDetection', - 'url': 'https://mmdetection.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMDetection3D', - 'url': - 'https://mmdetection3d.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMEditing', - 'url': 'https://mmediting.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMFewShot', - 'url': 'https://mmfewshot.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMFlow', - 'url': 'https://mmflow.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMGeneration', - 'url': 'https://mmgeneration.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMHuman3D', - 'url': 'https://mmhuman3d.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMOCR', - 'url': 'https://mmocr.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMPose', - 'url': 'https://mmpose.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMSegmentation', - 'url': - 'https://mmsegmentation.readthedocs.io/zh_CN/latest/', - }, - { - 'name': 'MMTracking', - 'url': 'https://mmtracking.readthedocs.io/zh_CN/latest/', - }, - ] - }, - { - 'name': - 'OpenMMLab', - 'children': [ - { - 'name': '主页', - 'url': 'https://openmmlab.com/' - }, - { - 'name': 'GitHub', - 'url': 'https://github.com/open-mmlab/' - }, - ] - }, - ] + ], + # Specify the language of shared menu + 'menu_lang': + 'cn' } # Add any paths that contain custom static files (such as style sheets) here, From 37222430fb17e90577a69d0da6286647ed3d72a4 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 14 Dec 2021 16:37:07 +0800 Subject: [PATCH 314/414] [Feature] Support Publish Model in Windows (#1325) * support publish_model in windows * update * update * update --- .readthedocs.yml | 1 - tools/data/activitynet/download.py | 2 +- tools/deployment/publish_model.py | 11 +++++++++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 2c98050982..73ea4cb7e9 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -5,4 +5,3 @@ python: install: - requirements: requirements/docs.txt - requirements: requirements/readthedocs.txt - - requirements: requirements/mminstall.txt diff --git a/tools/data/activitynet/download.py b/tools/data/activitynet/download.py index b604e65d23..1d1bf41a2d 100644 --- a/tools/data/activitynet/download.py +++ b/tools/data/activitynet/download.py @@ -132,7 +132,7 @@ def main(input_csv, output_dir, anno_file, num_jobs=24, is_bsn_case=False): if is_bsn_case: anno_file_bak = anno_file.replace('.json', '_bak.json') - os.system(f'mv {anno_file} {anno_file_bak}') + os.rename(anno_file, anno_file_bak) mmcv.dump(annotation, anno_file) diff --git a/tools/deployment/publish_model.py b/tools/deployment/publish_model.py index 5d3912e45e..1c59508ce3 100644 --- a/tools/deployment/publish_model.py +++ b/tools/deployment/publish_model.py @@ -1,5 +1,7 @@ # Copyright (c) OpenMMLab. All rights reserved. import argparse +import os +import platform import subprocess import torch @@ -22,13 +24,18 @@ def process_checkpoint(in_file, out_file): # if it is necessary to remove some sensitive data in checkpoint['meta'], # add the code here. torch.save(checkpoint, out_file) - sha = subprocess.check_output(['sha256sum', out_file]).decode() + if platform.system() == 'Windows': + sha = subprocess.check_output( + ['certutil', '-hashfile', out_file, 'SHA256']) + sha = str(sha).split('\\r\\n')[1] + else: + sha = subprocess.check_output(['sha256sum', out_file]).decode() if out_file.endswith('.pth'): out_file_name = out_file[:-4] else: out_file_name = out_file final_file = out_file_name + f'-{sha[:8]}.pth' - subprocess.Popen(['mv', out_file, final_file]) + os.rename(out_file, final_file) def main(): From 9ff6a14c103967d97e193957370392c4acff9936 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 16 Dec 2021 15:56:18 +0800 Subject: [PATCH 315/414] Update Dockerfile --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index f746272b61..506366f70a 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -13,7 +13,7 @@ RUN apt-get update && apt-get install -y git ninja-build libglib2.0-0 libsm6 lib && rm -rf /var/lib/apt/lists/* # Install mmcv-full -RUN pip install mmcv-full==latest+torch1.6.0+cu101 -f https://download.openmmlab.com/mmcv/dist/index.html +RUN pip install mmcv-full==latest -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.6.0/index.html # Install MMAction2 RUN conda clean --all From 67ad4832f61d005439c6e9d12e4df48edc6d2054 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 20 Dec 2021 12:08:13 +0800 Subject: [PATCH 316/414] [ModelZoo] Support BABEL (#1332) * support CBFocalLoss * add imports * support babel * update --- configs/skeleton/stgcn/README.md | 11 +++ configs/skeleton/stgcn/README_zh-CN.md | 13 ++- configs/skeleton/stgcn/metafile.yml | 72 +++++++++++++++ configs/skeleton/stgcn/stgcn_80e_babel120.py | 78 ++++++++++++++++ .../skeleton/stgcn/stgcn_80e_babel120_wfl.py | 89 +++++++++++++++++++ configs/skeleton/stgcn/stgcn_80e_babel60.py | 78 ++++++++++++++++ .../skeleton/stgcn/stgcn_80e_babel60_wfl.py | 86 ++++++++++++++++++ mmaction/models/__init__.py | 6 +- mmaction/models/losses/__init__.py | 5 +- mmaction/models/losses/cross_entropy_loss.py | 70 +++++++++++++++ tools/data/skeleton/README.md | 6 ++ tools/data/skeleton/README_zh-CN.md | 6 ++ tools/data/skeleton/babel2mma2.py | 24 +++++ 13 files changed, 538 insertions(+), 6 deletions(-) create mode 100644 configs/skeleton/stgcn/stgcn_80e_babel120.py create mode 100644 configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py create mode 100644 configs/skeleton/stgcn/stgcn_80e_babel60.py create mode 100644 configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py create mode 100644 tools/data/skeleton/babel2mma2.py diff --git a/configs/skeleton/stgcn/README.md b/configs/skeleton/stgcn/README.md index 0f17d7a6c0..98b95a5cf9 100644 --- a/configs/skeleton/stgcn/README.md +++ b/configs/skeleton/stgcn/README.md @@ -33,6 +33,17 @@ Dynamics of human body skeletons convey significant information for human action | [stgcn_80e_ntu60_xsub_keypoint](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py) | 2d | 2 | STGCN | 86.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json) | | [stgcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py) | 3d | 1 | STGCN | 84.61 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d-13e7ccf0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json) | +### BABEL + +| config | gpus | backbone | Top-1 | Mean Top-1 | Top-1 Official (AGCN) | Mean Top-1 Official (AGCN) | ckpt | log | +| ------------------------------------------------------------ | :--: | :------: | :-------: | :--------: | :-------------------: | :------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [stgcn_80e_babel60](/configs/skeleton/stgcn/stgcn_80e_babel60.py) | 8 | ST-GCN | **42.39** | **28.28** | 41.14 | 24.46 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60-3d206418.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60.log) | +| [stgcn_80e_babel60_wfl](/configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py) | 8 | ST-GCN | **40.31** | 29.79 | 33.41 | **30.42** | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60_wfl/stgcn_80e_babel60_wfl-1a9102d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60_wfl.log) | +| [stgcn_80e_babel120](/configs/skeleton/stgcn/stgcn_80e_babel120.py) | 8 | ST-GCN | **38.95** | **20.58** | 38.41 | 17.56 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120/stgcn_80e_babel120-e41eb6d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120.log) | +| [stgcn_80e_babel120_wfl](/configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py) | 8 | ST-GCN | **33.00** | 24.33 | 27.91 | **26.17*** | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120_wfl/stgcn_80e_babel120_wfl-3f2c100d.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120_wfl.log) | + +\* The number is copied from the [paper](https://arxiv.org/pdf/2106.09696.pdf), the performance of the [released checkpoints](https://github.com/abhinanda-punnakkal/BABEL/tree/main/action_recognition) for BABEL-120 is inferior. + ## Train You can use the following command to train a model. diff --git a/configs/skeleton/stgcn/README_zh-CN.md b/configs/skeleton/stgcn/README_zh-CN.md index 4f4f8e53c7..c7e57077cd 100644 --- a/configs/skeleton/stgcn/README_zh-CN.md +++ b/configs/skeleton/stgcn/README_zh-CN.md @@ -17,11 +17,22 @@ ### NTU60_XSub -| 配置文件 | 骨骼点 | GPU 数量 | 主干网络 | top1 准确率 | ckpt | log | json | +| 配置文件 | 骨骼点 | GPU 数量 | 主干网络 | Top-1 准确率 | ckpt | log | json | | :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | | [stgcn_80e_ntu60_xsub_keypoint](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py) | 2d | 2 | STGCN | 86.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json) | | [stgcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py) | 3d | 1 | STGCN | 84.61 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d-13e7ccf0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json) | +### BABEL + +| 配置文件 | GPU 数量 | 主干网络 | Top-1 准确率 | 类平均 Top-1 准确率 | Top-1 准确率
    (官方,使用 AGCN) | 类平均 Top-1 准确率
    (官方,使用 AGCN) | ckpt | log | +| ------------------------------------------------------------ | :------: | :------: | :----------: | :-----------------: | :----------------------------------: | :----------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| [stgcn_80e_babel60](/configs/skeleton/stgcn/stgcn_80e_babel60.py) | 8 | ST-GCN | **42.39** | **28.28** | 41.14 | 24.46 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60-3d206418.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60.log) | +| [stgcn_80e_babel60_wfl](/configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py) | 8 | ST-GCN | **40.31** | 29.79 | 33.41 | **30.42** | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60_wfl/stgcn_80e_babel60_wfl-1a9102d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60_wfl.log) | +| [stgcn_80e_babel120](/configs/skeleton/stgcn/stgcn_80e_babel120.py) | 8 | ST-GCN | **38.95** | **20.58** | 38.41 | 17.56 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120/stgcn_80e_babel120-e41eb6d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120.log) | +| [stgcn_80e_babel120_wfl](/configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py) | 8 | ST-GCN | **33.00** | 24.33 | 27.91 | **26.17*** | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120_wfl/stgcn_80e_babel120_wfl-3f2c100d.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120_wfl.log) | + +\* 注:此数字引自原 [论文](https://arxiv.org/pdf/2106.09696.pdf), 实际公开的 [模型权重](https://github.com/abhinanda-punnakkal/BABEL/tree/main/action_recognition) 精度略低一些。 + ## 如何训练 用户可以使用以下指令进行模型训练。 diff --git a/configs/skeleton/stgcn/metafile.yml b/configs/skeleton/stgcn/metafile.yml index 61f4d8a0a9..f4e2b7fc06 100644 --- a/configs/skeleton/stgcn/metafile.yml +++ b/configs/skeleton/stgcn/metafile.yml @@ -38,3 +38,75 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json Training Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log Weights: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d-13e7ccf0.pth +- Config: configs/skeleton/stgcn/stgcn_80e_babel60.py + In Collection: STGCN + Metadata: + Architecture: STGCN + Batch Size: 128 + Epochs: 80 + Parameters: 3088704 + Training Data: BABEL60 + Training Resources: 8 GPU + Name: stgcn_80e_babel60 + Results: + Dataset: BABEL60 + Metrics: + Top 1 Accuracy: 42.39 + Mean Top 1 Accuracy: 28.28 + Task: Skeleton-based Action Recognition + Training Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60.log + Weights: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60-3d206418.pth +- Config: configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py + In Collection: STGCN + Metadata: + Architecture: STGCN + Batch Size: 128 + Epochs: 80 + Parameters: 3088704 + Training Data: BABEL60 + Training Resources: 8 GPU + Name: stgcn_80e_babel60_wfl + Results: + Dataset: BABEL60 + Metrics: + Top 1 Accuracy: 40.31 + Mean Top 1 Accuracy: 29.79 + Task: Skeleton-based Action Recognition + Training Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60_wfl/stgcn_80e_babel60_wfl.log + Weights: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60_wfl/stgcn_80e_babel60_wfl-1a9102d7.pth +- Config: configs/skeleton/stgcn/stgcn_80e_babel120.py + In Collection: STGCN + Metadata: + Architecture: STGCN + Batch Size: 128 + Epochs: 80 + Parameters: 3104320 + Training Data: BABEL120 + Training Resources: 8 GPU + Name: stgcn_80e_babel120 + Results: + Dataset: BABEL120 + Metrics: + Top 1 Accuracy: 38.95 + Mean Top 1 Accuracy: 20.58 + Task: Skeleton-based Action Recognition + Training Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120/stgcn_80e_babel120.log + Weights: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120/stgcn_80e_babel120-e41eb6d7.pth +- Config: configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py + In Collection: STGCN + Metadata: + Architecture: STGCN + Batch Size: 128 + Epochs: 80 + Parameters: 3104320 + Training Data: BABEL120 + Training Resources: 8 GPU + Name: stgcn_80e_babel120_wfl + Results: + Dataset: BABEL120 + Metrics: + Top 1 Accuracy: 33.00 + Mean Top 1 Accuracy: 24.33 + Task: Skeleton-based Action Recognition + Training Log: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120_wfl/stgcn_80e_babel120_wfl.log + Weights: https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120_wfl/stgcn_80e_babel120_wfl-3f2c100d.pth diff --git a/configs/skeleton/stgcn/stgcn_80e_babel120.py b/configs/skeleton/stgcn/stgcn_80e_babel120.py new file mode 100644 index 0000000000..bf6bac29f0 --- /dev/null +++ b/configs/skeleton/stgcn/stgcn_80e_babel120.py @@ -0,0 +1,78 @@ +model = dict( + type='SkeletonGCN', + backbone=dict( + type='STGCN', + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu-rgb+d', strategy='spatial')), + cls_head=dict( + type='STGCNHead', + num_classes=120, + in_channels=256, + num_person=1, + loss_cls=dict(type='CrossEntropyLoss')), + train_cfg=None, + test_cfg=None) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/babel/babel120_train.pkl' +ann_file_val = 'data/babel/babel120_val.pkl' +train_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +val_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +test_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0001, nesterov=True) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict(policy='step', step=[10, 14]) +total_epochs = 16 +checkpoint_config = dict(interval=1) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) +log_config = dict(interval=100, hooks=[dict(type='TextLoggerHook')]) + +# runtime settings +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/stgcn_80e_babel120' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py b/configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py new file mode 100644 index 0000000000..63516b2e1f --- /dev/null +++ b/configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py @@ -0,0 +1,89 @@ +samples_per_cls = [ + 518, 1993, 6260, 508, 208, 3006, 431, 724, 4527, 2131, 199, 1255, 487, 302, + 136, 571, 267, 646, 1180, 405, 72, 731, 842, 1619, 271, 27, 1198, 1012, + 110, 865, 462, 526, 405, 487, 101, 24, 84, 64, 168, 271, 609, 503, 76, 167, + 415, 137, 421, 283, 2069, 715, 196, 66, 44, 989, 122, 43, 599, 396, 245, + 380, 34, 236, 260, 325, 127, 133, 119, 66, 125, 50, 206, 191, 394, 69, 98, + 145, 38, 21, 29, 64, 277, 65, 39, 31, 35, 85, 54, 80, 133, 66, 39, 64, 268, + 34, 172, 54, 33, 21, 110, 19, 40, 55, 146, 39, 37, 75, 101, 20, 46, 55, 43, + 21, 43, 87, 29, 36, 24, 37, 28, 39 +] + +model = dict( + type='SkeletonGCN', + backbone=dict( + type='STGCN', + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu-rgb+d', strategy='spatial')), + cls_head=dict( + type='STGCNHead', + num_classes=120, + in_channels=256, + num_person=1, + loss_cls=dict(type='CBFocalLoss', samples_per_cls=samples_per_cls)), + train_cfg=None, + test_cfg=None) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/babel/babel120_train.pkl' +ann_file_val = 'data/babel/babel120_val.pkl' +train_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +val_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +test_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0001, nesterov=True) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict(policy='step', step=[10, 14]) +total_epochs = 16 +checkpoint_config = dict(interval=1) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) +log_config = dict(interval=100, hooks=[dict(type='TextLoggerHook')]) + +# runtime settings +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/stgcn_80e_babel120_wfl/' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/configs/skeleton/stgcn/stgcn_80e_babel60.py b/configs/skeleton/stgcn/stgcn_80e_babel60.py new file mode 100644 index 0000000000..dd338b9d17 --- /dev/null +++ b/configs/skeleton/stgcn/stgcn_80e_babel60.py @@ -0,0 +1,78 @@ +model = dict( + type='SkeletonGCN', + backbone=dict( + type='STGCN', + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu-rgb+d', strategy='spatial')), + cls_head=dict( + type='STGCNHead', + num_classes=60, + in_channels=256, + num_person=1, + loss_cls=dict(type='CrossEntropyLoss')), + train_cfg=None, + test_cfg=None) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/babel/babel60_train.pkl' +ann_file_val = 'data/babel/babel60_val.pkl' +train_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +val_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +test_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0001, nesterov=True) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict(policy='step', step=[10, 14]) +total_epochs = 16 +checkpoint_config = dict(interval=1) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) +log_config = dict(interval=100, hooks=[dict(type='TextLoggerHook')]) + +# runtime settings +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/stgcn_80e_babel60' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py b/configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py new file mode 100644 index 0000000000..b19714d673 --- /dev/null +++ b/configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py @@ -0,0 +1,86 @@ +samples_per_cls = [ + 518, 1993, 6260, 508, 208, 3006, 431, 724, 4527, 2131, 199, 1255, 487, 302, + 136, 571, 267, 646, 1180, 405, 731, 842, 1619, 271, 1198, 1012, 865, 462, + 526, 405, 487, 168, 271, 609, 503, 167, 415, 421, 283, 2069, 715, 196, 989, + 122, 599, 396, 245, 380, 236, 260, 325, 133, 206, 191, 394, 145, 277, 268, + 172, 146 +] + +model = dict( + type='SkeletonGCN', + backbone=dict( + type='STGCN', + in_channels=3, + edge_importance_weighting=True, + graph_cfg=dict(layout='ntu-rgb+d', strategy='spatial')), + cls_head=dict( + type='STGCNHead', + num_classes=60, + in_channels=256, + num_person=1, + loss_cls=dict(type='CBFocalLoss', samples_per_cls=samples_per_cls)), + train_cfg=None, + test_cfg=None) + +dataset_type = 'PoseDataset' +ann_file_train = 'data/babel/babel60_train.pkl' +ann_file_val = 'data/babel/babel60_val.pkl' +train_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +val_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +test_pipeline = [ + dict(type='PoseDecode'), + dict(type='FormatGCNInput', input_format='NCTVM', num_person=1), + dict(type='Collect', keys=['keypoint', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['keypoint']) +] +data = dict( + videos_per_gpu=16, + workers_per_gpu=2, + test_dataloader=dict(videos_per_gpu=1), + train=dict( + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix='', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix='', + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0001, nesterov=True) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict(policy='step', step=[10, 14]) +total_epochs = 16 +checkpoint_config = dict(interval=1) +evaluation = dict( + interval=1, metrics=['top_k_accuracy', 'mean_class_accuracy']) +log_config = dict(interval=100, hooks=[dict(type='TextLoggerHook')]) + +# runtime settings +dist_params = dict(backend='nccl') +log_level = 'INFO' +work_dir = './work_dirs/stgcn_80e_babel60_wfl/' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/mmaction/models/__init__.py b/mmaction/models/__init__.py index 39b85a2e94..8cb15bc49c 100644 --- a/mmaction/models/__init__.py +++ b/mmaction/models/__init__.py @@ -16,8 +16,8 @@ X3DHead) from .localizers import BMN, PEM, TEM from .losses import (BCELossWithLogits, BinaryLogisticRegressionLoss, BMNLoss, - CrossEntropyLoss, HVULoss, NLLLoss, OHEMHingeLoss, - SSNLoss) + CBFocalLoss, CrossEntropyLoss, HVULoss, NLLLoss, + OHEMHingeLoss, SSNLoss) from .necks import TPN from .recognizers import (AudioRecognizer, BaseRecognizer, Recognizer2D, Recognizer3D) @@ -40,5 +40,5 @@ 'FBOHead', 'LFBInferHead', 'TRNHead', 'NECKS', 'TimeSformer', 'TimeSformerHead', 'DividedSpatialAttentionWithNorm', 'DividedTemporalAttentionWithNorm', 'FFNWithNorm', 'ACRNHead', 'BaseGCN', - 'SkeletonGCN' + 'SkeletonGCN', 'CBFocalLoss' ] diff --git a/mmaction/models/losses/__init__.py b/mmaction/models/losses/__init__.py index 2e94b7b240..41afcb7ace 100644 --- a/mmaction/models/losses/__init__.py +++ b/mmaction/models/losses/__init__.py @@ -2,7 +2,8 @@ from .base import BaseWeightedLoss from .binary_logistic_regression_loss import BinaryLogisticRegressionLoss from .bmn_loss import BMNLoss -from .cross_entropy_loss import BCELossWithLogits, CrossEntropyLoss +from .cross_entropy_loss import (BCELossWithLogits, CBFocalLoss, + CrossEntropyLoss) from .hvu_loss import HVULoss from .nll_loss import NLLLoss from .ohem_hinge_loss import OHEMHingeLoss @@ -11,5 +12,5 @@ __all__ = [ 'BaseWeightedLoss', 'CrossEntropyLoss', 'NLLLoss', 'BCELossWithLogits', 'BinaryLogisticRegressionLoss', 'BMNLoss', 'OHEMHingeLoss', 'SSNLoss', - 'HVULoss' + 'HVULoss', 'CBFocalLoss' ] diff --git a/mmaction/models/losses/cross_entropy_loss.py b/mmaction/models/losses/cross_entropy_loss.py index 5c84c6b43e..fbb91d19ba 100644 --- a/mmaction/models/losses/cross_entropy_loss.py +++ b/mmaction/models/losses/cross_entropy_loss.py @@ -1,4 +1,5 @@ # Copyright (c) OpenMMLab. All rights reserved. +import numpy as np import torch import torch.nn.functional as F @@ -119,3 +120,72 @@ def _forward(self, cls_score, label, **kwargs): loss_cls = F.binary_cross_entropy_with_logits(cls_score, label, **kwargs) return loss_cls + + +@LOSSES.register_module() +class CBFocalLoss(BaseWeightedLoss): + """Class Balanced Focal Loss. Adapted from https://github.com/abhinanda- + punnakkal/BABEL/. This loss is used in the skeleton-based action + recognition baseline for BABEL. + + Args: + loss_weight (float): Factor scalar multiplied on the loss. + Default: 1.0. + samples_per_cls (list[int]): The number of samples per class. + Default: []. + beta (float): Hyperparameter that controls the per class loss weight. + Default: 0.9999. + gamma (float): Hyperparameter of the focal loss. Default: 2.0. + """ + + def __init__(self, + loss_weight=1.0, + samples_per_cls=[], + beta=0.9999, + gamma=2.): + super().__init__(loss_weight=loss_weight) + self.samples_per_cls = samples_per_cls + self.beta = beta + self.gamma = gamma + effective_num = 1.0 - np.power(beta, samples_per_cls) + weights = (1.0 - beta) / np.array(effective_num) + weights = weights / np.sum(weights) * len(weights) + self.weights = weights + self.num_classes = len(weights) + + def _forward(self, cls_score, label, **kwargs): + """Forward function. + + Args: + cls_score (torch.Tensor): The class score. + label (torch.Tensor): The ground truth label. + kwargs: Any keyword argument to be used to calculate + bce loss with logits. + + Returns: + torch.Tensor: The returned bce loss with logits. + """ + weights = torch.tensor(self.weights).float().to(cls_score.device) + label_one_hot = F.one_hot(label, self.num_classes).float() + weights = weights.unsqueeze(0) + weights = weights.repeat(label_one_hot.shape[0], 1) * label_one_hot + weights = weights.sum(1) + weights = weights.unsqueeze(1) + weights = weights.repeat(1, self.num_classes) + + BCELoss = F.binary_cross_entropy_with_logits( + input=cls_score, target=label_one_hot, reduction='none') + + modulator = 1.0 + if self.gamma: + modulator = torch.exp(-self.gamma * label_one_hot * cls_score - + self.gamma * + torch.log(1 + torch.exp(-1.0 * cls_score))) + + loss = modulator * BCELoss + weighted_loss = weights * loss + + focal_loss = torch.sum(weighted_loss) + focal_loss /= torch.sum(label_one_hot) + + return focal_loss diff --git a/tools/data/skeleton/README.md b/tools/data/skeleton/README.md index 271258200b..25c7f62892 100644 --- a/tools/data/skeleton/README.md +++ b/tools/data/skeleton/README.md @@ -113,6 +113,12 @@ For NTU-RGBD 120, preprocess data and convert the data format with python gen_ntu_rgbd_raw.py --data-path your_raw_nturgbd120_skeleton_path --ignored-sample-path NTU_RGBD120_samples_with_missing_skeletons.txt --out-folder your_nturgbd120_output_path --task ntu120 ``` +## Convert annotations from third-party projects + +We provide scripts to convert skeleton annotations from third-party projects to MMAction2 formats: + +- BABEL: `babel2mma2.py` + **TODO**: - [x] FineGYM diff --git a/tools/data/skeleton/README_zh-CN.md b/tools/data/skeleton/README_zh-CN.md index 43810a4f72..fb6de5925a 100644 --- a/tools/data/skeleton/README_zh-CN.md +++ b/tools/data/skeleton/README_zh-CN.md @@ -117,6 +117,12 @@ python gen_ntu_rgbd_raw.py --data-path your_raw_nturgbd60_skeleton_path --ignore python gen_ntu_rgbd_raw.py --data-path your_raw_nturgbd120_skeleton_path --ignored-sample-path NTU_RGBD120_samples_with_missing_skeletons.txt --out-folder your_nturgbd120_output_path --task ntu120 ``` +## 转换其他第三方项目的骨骼标注 + +MMAction2 提供脚本以将其他第三方项目的骨骼标注转至 MMAction2 格式,如: + +- BABEL: `babel2mma2.py` + **待办项**: - [x] FineGYM diff --git a/tools/data/skeleton/babel2mma2.py b/tools/data/skeleton/babel2mma2.py new file mode 100644 index 0000000000..59e59324b2 --- /dev/null +++ b/tools/data/skeleton/babel2mma2.py @@ -0,0 +1,24 @@ +# In this example, we convert babel120_train to MMAction2 format +# The required files can be downloaded from the homepage of BABEL project +import numpy as np +from mmcv import dump, load + + +def gen_babel(x, y): + data = [] + for i, xx in enumerate(x): + sample = dict() + sample['keypoint'] = xx.transpose(3, 1, 2, 0).astype(np.float16) + sample['label'] = y[1][0][i] + names = [y[0][i], y[1][1][i], y[1][2][i], y[1][3][i]] + sample['frame_dir'] = '_'.join([str(k) for k in names]) + sample['total_frames'] = 150 + data.append(sample) + return data + + +x = np.load('train_ntu_sk_120.npy') +y = load('train_label_120.pkl') + +data = gen_babel(x, y) +dump(data, 'babel120_train.pkl') From 65a732285e815c5d0fc80903c6b40b05e9902038 Mon Sep 17 00:00:00 2001 From: makecent <42603768+makecent@users.noreply.github.com> Date: Wed, 22 Dec 2021 12:13:52 +0800 Subject: [PATCH 317/414] Update installation command (#1340) * Update install.md * Update install.md --- docs/install.md | 2 +- docs_zh_CN/install.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/install.md b/docs/install.md index fc81324db1..986df4f056 100644 --- a/docs/install.md +++ b/docs/install.md @@ -92,7 +92,7 @@ We recommend you to install MMAction2 with [MIM](https://github.com/open-mmlab/m ```shell pip install git+https://github.com/open-mmlab/mim.git -mim install mmaction2 +mim install mmaction2 -f https://github.com/open-mmlab/mmaction2.git ``` MIM can automatically install OpenMMLab projects and their requirements. diff --git a/docs_zh_CN/install.md b/docs_zh_CN/install.md index fcd809eb43..154f760c7b 100644 --- a/docs_zh_CN/install.md +++ b/docs_zh_CN/install.md @@ -87,7 +87,7 @@ conda install pytorch=1.3.1 cudatoolkit=9.2 torchvision=0.4.2 -c pytorch ```shell pip install git+https://github.com/open-mmlab/mim.git -mim install mmaction2 +mim install mmaction2 -f https://github.com/open-mmlab/mmaction2.git ``` MIM 可以自动安装 OpenMMLab 项目及其依赖。 From 364b54d024a87ce1aa599aeccdbe83a7e2653fbe Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Wed, 22 Dec 2021 14:38:31 +0800 Subject: [PATCH 318/414] [Docs] Merge docs & docs_zh (#1342) * master * master 0721 * add README * 1222 merge docs & docs_zh * modify docs path in files * modify docs path in files * modify docs path in files --- .github/workflows/build.yml | 4 +-- .gitignore | 3 +- README.md | 30 +++++++++--------- README_zh-CN.md | 28 ++++++++-------- configs/detection/acrn/README.md | 6 ++-- configs/detection/acrn/README_zh-CN.md | 6 ++-- configs/detection/ava/README.md | 6 ++-- configs/detection/ava/README_zh-CN.md | 6 ++-- configs/detection/lfb/README.md | 4 +-- configs/detection/lfb/README_zh-CN.md | 4 +-- configs/localization/bmn/README.md | 6 ++-- configs/localization/bmn/README_zh-CN.md | 6 ++-- configs/localization/bsn/README.md | 6 ++-- configs/localization/bsn/README_zh-CN.md | 6 ++-- configs/localization/ssn/README.md | 6 ++-- configs/localization/ssn/README_zh-CN.md | 6 ++-- configs/recognition/c3d/README.md | 6 ++-- configs/recognition/c3d/README_zh-CN.md | 6 ++-- configs/recognition/csn/README.md | 6 ++-- configs/recognition/csn/README_zh-CN.md | 6 ++-- configs/recognition/i3d/README.md | 6 ++-- configs/recognition/i3d/README_zh-CN.md | 6 ++-- configs/recognition/r2plus1d/README.md | 6 ++-- configs/recognition/r2plus1d/README_zh-CN.md | 6 ++-- configs/recognition/slowfast/README.md | 6 ++-- configs/recognition/slowfast/README_zh-CN.md | 6 ++-- configs/recognition/slowonly/README.md | 6 ++-- configs/recognition/slowonly/README_zh-CN.md | 6 ++-- configs/recognition/tanet/README.md | 6 ++-- configs/recognition/tanet/README_zh-CN.md | 6 ++-- configs/recognition/timesformer/README.md | 6 ++-- .../recognition/timesformer/README_zh-CN.md | 6 ++-- configs/recognition/tin/README.md | 6 ++-- configs/recognition/tin/README_zh-CN.md | 6 ++-- configs/recognition/tpn/README.md | 6 ++-- configs/recognition/tpn/README_zh-CN.md | 4 +-- configs/recognition/trn/README.md | 4 +-- configs/recognition/trn/README_zh-CN.md | 4 +-- configs/recognition/tsm/README.md | 6 ++-- configs/recognition/tsm/README_zh-CN.md | 6 ++-- configs/recognition/tsn/README.md | 4 +-- configs/recognition/tsn/README_zh-CN.md | 4 +-- configs/recognition/x3d/README.md | 4 +-- configs/recognition/x3d/README_zh-CN.md | 4 +-- configs/recognition_audio/resnet/README.md | 6 ++-- .../recognition_audio/resnet/README_zh-CN.md | 6 ++-- configs/skeleton/2s-agcn/README.md | 4 +-- configs/skeleton/2s-agcn/README_zh-CN.md | 4 +-- configs/skeleton/posec3d/README.md | 4 +-- configs/skeleton/posec3d/README_zh-CN.md | 4 +-- configs/skeleton/stgcn/README.md | 4 +-- configs/skeleton/stgcn/README_zh-CN.md | 4 +-- demo/mmaction2_tutorial.ipynb | 2 +- demo/mmaction2_tutorial_zh-CN.ipynb | 2 +- docs/{ => en}/Makefile | 0 docs/{ => en}/_static/css/readthedocs.css | 0 docs/{ => en}/_static/images/mmaction2.png | Bin docs/{ => en}/api.rst | 0 docs/{ => en}/benchmark.md | 0 docs/{ => en}/changelog.md | 0 docs/{ => en}/conf.py | 4 +-- docs/{ => en}/data_preparation.md | 0 docs/{ => en}/faq.md | 6 ++-- docs/{ => en}/feature_extraction.md | 0 docs/{ => en}/getting_started.md | 0 docs/{ => en}/index.rst | 0 docs/{ => en}/install.md | 0 docs/{ => en}/make.bat | 0 docs/{ => en}/merge_docs.sh | 0 docs/{ => en}/projects.md | 0 docs/{ => en}/stat.py | 0 docs/{ => en}/supported_datasets.md | 0 docs/{ => en}/switch_language.md | 0 docs/{ => en}/tutorials/1_config.md | 0 docs/{ => en}/tutorials/2_finetune.md | 2 +- docs/{ => en}/tutorials/3_new_dataset.md | 0 docs/{ => en}/tutorials/4_data_pipeline.md | 0 docs/{ => en}/tutorials/5_new_modules.md | 0 docs/{ => en}/tutorials/6_export_model.md | 0 .../{ => en}/tutorials/7_customize_runtime.md | 0 docs/{ => en}/useful_tools.md | 0 {docs_zh_CN => docs/zh_cn}/Makefile | 0 {docs_zh_CN => docs/zh_cn}/README.md | 0 {docs_zh_CN => docs/zh_cn}/api.rst | 0 {docs_zh_CN => docs/zh_cn}/benchmark.md | 0 {docs_zh_CN => docs/zh_cn}/conf.py | 4 +-- .../zh_cn}/data_preparation.md | 0 {docs_zh_CN => docs/zh_cn}/demo.md | 0 {docs_zh_CN => docs/zh_cn}/faq.md | 4 +-- .../zh_cn}/feature_extraction.md | 0 {docs_zh_CN => docs/zh_cn}/getting_started.md | 0 {docs_zh_CN => docs/zh_cn}/index.rst | 0 {docs_zh_CN => docs/zh_cn}/install.md | 0 {docs_zh_CN => docs/zh_cn}/make.bat | 0 {docs_zh_CN => docs/zh_cn}/merge_docs.sh | 14 ++++---- {docs_zh_CN => docs/zh_cn}/stat.py | 0 .../zh_cn}/supported_datasets.md | 0 {docs_zh_CN => docs/zh_cn}/switch_language.md | 0 .../zh_cn}/tutorials/1_config.md | 0 .../zh_cn}/tutorials/2_finetune.md | 0 .../zh_cn}/tutorials/3_new_dataset.md | 0 .../zh_cn}/tutorials/4_data_pipeline.md | 0 .../zh_cn}/tutorials/5_new_modules.md | 0 .../zh_cn}/tutorials/6_export_model.md | 0 .../zh_cn}/tutorials/7_customize_runtime.md | 0 {docs_zh_CN => docs/zh_cn}/useful_tools.md | 0 tools/data/activitynet/README.md | 6 ++-- tools/data/activitynet/README_zh-CN.md | 6 ++-- tools/data/ava/README.md | 4 +-- tools/data/ava/README_zh-CN.md | 4 +-- tools/data/diving48/README.md | 4 +-- tools/data/diving48/README_zh-CN.md | 4 +-- tools/data/gym/README.md | 4 +-- tools/data/gym/README_zh-CN.md | 4 +-- tools/data/hmdb51/README.md | 4 +-- tools/data/hmdb51/README_zh-CN.md | 4 +-- tools/data/hvu/README.md | 6 ++-- tools/data/hvu/README_zh-CN.md | 6 ++-- tools/data/jester/README.md | 4 +-- tools/data/jester/README_zh-CN.md | 4 +-- tools/data/kinetics/README.md | 6 ++-- tools/data/kinetics/README_zh-CN.md | 6 ++-- tools/data/mit/README.md | 4 +-- tools/data/mit/README_zh-CN.md | 4 +-- tools/data/mmit/README.md | 4 +-- tools/data/mmit/README_zh-CN.md | 4 +-- tools/data/sthv1/README.md | 4 +-- tools/data/sthv1/README_zh-CN.md | 4 +-- tools/data/sthv2/README.md | 4 +-- tools/data/sthv2/README_zh-CN.md | 4 +-- tools/data/thumos14/README.md | 4 +-- tools/data/thumos14/README_zh-CN.md | 4 +-- tools/data/ucf101/README.md | 4 +-- tools/data/ucf101/README_zh-CN.md | 4 +-- 134 files changed, 243 insertions(+), 242 deletions(-) rename docs/{ => en}/Makefile (100%) rename docs/{ => en}/_static/css/readthedocs.css (100%) rename docs/{ => en}/_static/images/mmaction2.png (100%) rename docs/{ => en}/api.rst (100%) rename docs/{ => en}/benchmark.md (100%) rename docs/{ => en}/changelog.md (100%) rename docs/{ => en}/conf.py (97%) rename docs/{ => en}/data_preparation.md (100%) rename docs/{ => en}/faq.md (98%) rename docs/{ => en}/feature_extraction.md (100%) rename docs/{ => en}/getting_started.md (100%) rename docs/{ => en}/index.rst (100%) rename docs/{ => en}/install.md (100%) rename docs/{ => en}/make.bat (100%) rename docs/{ => en}/merge_docs.sh (100%) rename docs/{ => en}/projects.md (100%) rename docs/{ => en}/stat.py (100%) rename docs/{ => en}/supported_datasets.md (100%) rename docs/{ => en}/switch_language.md (100%) rename docs/{ => en}/tutorials/1_config.md (100%) rename docs/{ => en}/tutorials/2_finetune.md (96%) rename docs/{ => en}/tutorials/3_new_dataset.md (100%) rename docs/{ => en}/tutorials/4_data_pipeline.md (100%) rename docs/{ => en}/tutorials/5_new_modules.md (100%) rename docs/{ => en}/tutorials/6_export_model.md (100%) rename docs/{ => en}/tutorials/7_customize_runtime.md (100%) rename docs/{ => en}/useful_tools.md (100%) rename {docs_zh_CN => docs/zh_cn}/Makefile (100%) rename {docs_zh_CN => docs/zh_cn}/README.md (100%) rename {docs_zh_CN => docs/zh_cn}/api.rst (100%) rename {docs_zh_CN => docs/zh_cn}/benchmark.md (100%) rename {docs_zh_CN => docs/zh_cn}/conf.py (97%) rename {docs_zh_CN => docs/zh_cn}/data_preparation.md (100%) rename {docs_zh_CN => docs/zh_cn}/demo.md (100%) rename {docs_zh_CN => docs/zh_cn}/faq.md (98%) rename {docs_zh_CN => docs/zh_cn}/feature_extraction.md (100%) rename {docs_zh_CN => docs/zh_cn}/getting_started.md (100%) rename {docs_zh_CN => docs/zh_cn}/index.rst (100%) rename {docs_zh_CN => docs/zh_cn}/install.md (100%) rename {docs_zh_CN => docs/zh_cn}/make.bat (100%) rename {docs_zh_CN => docs/zh_cn}/merge_docs.sh (89%) rename {docs_zh_CN => docs/zh_cn}/stat.py (100%) rename {docs_zh_CN => docs/zh_cn}/supported_datasets.md (100%) rename {docs_zh_CN => docs/zh_cn}/switch_language.md (100%) rename {docs_zh_CN => docs/zh_cn}/tutorials/1_config.md (100%) rename {docs_zh_CN => docs/zh_cn}/tutorials/2_finetune.md (100%) rename {docs_zh_CN => docs/zh_cn}/tutorials/3_new_dataset.md (100%) rename {docs_zh_CN => docs/zh_cn}/tutorials/4_data_pipeline.md (100%) rename {docs_zh_CN => docs/zh_cn}/tutorials/5_new_modules.md (100%) rename {docs_zh_CN => docs/zh_cn}/tutorials/6_export_model.md (100%) rename {docs_zh_CN => docs/zh_cn}/tutorials/7_customize_runtime.md (100%) rename {docs_zh_CN => docs/zh_cn}/useful_tools.md (100%) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e9db239da8..11aa8b38db 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,8 +12,8 @@ on: - '!demo/**' - '!docker/**' - '!tools/**' - - '!docs/**' - - '!docs_zh_CN/**' + - '!docs/en/**' + - '!docs/zh_cn/**' concurrency: group: ${{ github.workflow }}-${{ github.ref }} diff --git a/.gitignore b/.gitignore index 587b296482..68cb7f5941 100644 --- a/.gitignore +++ b/.gitignore @@ -65,7 +65,8 @@ instance/ .scrapy # Sphinx documentation -docs/_build/ +docs/en/_build/ +docs/zh_cn/_build/ # PyBuilder target/ diff --git a/README.md b/README.md index eeee2b9bc1..8991f5b018 100644 --- a/README.md +++ b/README.md @@ -51,24 +51,24 @@ The master branch works with **PyTorch 1.3+**. - (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! - (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. -**Release**: v0.20.0 was released in 30/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +**Release**: v0.20.0 was released in 30/10/2021. Please refer to [changelog.md](docs/en/changelog.md) for details and release history. ## Installation -Please refer to [install.md](docs/install.md) for installation. +Please refer to [install.md](docs/en/install.md) for installation. ## Get Started -Please see [getting_started.md](docs/getting_started.md) for the basic usage of MMAction2. +Please see [getting_started.md](docs/en/getting_started.md) for the basic usage of MMAction2. There are also tutorials: -- [learn about configs](docs/tutorials/1_config.md) -- [finetuning models](docs/tutorials/2_finetune.md) -- [adding new dataset](docs/tutorials/3_new_dataset.md) -- [designing data pipeline](docs/tutorials/4_data_pipeline.md) -- [adding new modules](docs/tutorials/5_new_modules.md) -- [exporting model to onnx](docs/tutorials/6_export_model.md) -- [customizing runtime settings](docs/tutorials/7_customize_runtime.md) +- [learn about configs](docs/en/tutorials/1_config.md) +- [finetuning models](docs/en/tutorials/2_finetune.md) +- [adding new dataset](docs/en/tutorials/3_new_dataset.md) +- [designing data pipeline](docs/en/tutorials/4_data_pipeline.md) +- [adding new modules](docs/en/tutorials/5_new_modules.md) +- [exporting model to onnx](docs/en/tutorials/6_export_model.md) +- [customizing runtime settings](docs/en/tutorials/7_customize_runtime.md) A Colab tutorial is also provided. You may preview the notebook [here](demo/mmaction2_tutorial.ipynb) or directly [run](https://colab.research.google.com/github/open-mmlab/mmaction2/blob/master/demo/mmaction2_tutorial.ipynb) on Colab. @@ -207,16 +207,16 @@ Datasets marked with * are not fully supported yet, but related dataset preparat ## Benchmark -To demonstrate the efficacy and efficiency of our framework, we compare MMAction2 with some other popular frameworks and official releases in terms of speed. Details can be found in [benchmark](docs/benchmark.md). +To demonstrate the efficacy and efficiency of our framework, we compare MMAction2 with some other popular frameworks and official releases in terms of speed. Details can be found in [benchmark](docs/en/benchmark.md). ## Data Preparation -Please refer to [data_preparation.md](docs/data_preparation.md) for a general knowledge of data preparation. -The supported datasets are listed in [supported_datasets.md](docs/supported_datasets.md) +Please refer to [data_preparation.md](docs/en/data_preparation.md) for a general knowledge of data preparation. +The supported datasets are listed in [supported_datasets.md](docs/en/supported_datasets.md) ## FAQ -Please refer to [FAQ](docs/faq.md) for frequently asked questions. +Please refer to [FAQ](docs/en/faq.md) for frequently asked questions. ## Projects built on MMAction2 @@ -226,7 +226,7 @@ Currently, there are many research works and projects built on MMAction2 by user - Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 **Oral**. [[paper]](https://arxiv.org/abs/2107.10161)[[github]](https://github.com/Cogito2012/DEAR) - Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 **Oral**. [[paper]](https://arxiv.org/abs/2103.17263)[[github]](https://github.com/xvjiarui/VFS) -etc., check [projects.md](docs/projects.md) to see all related projects. +etc., check [projects.md](docs/en/projects.md) to see all related projects. ## License diff --git a/README_zh-CN.md b/README_zh-CN.md index 2a7690e897..bf0a554a95 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -50,23 +50,23 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa - (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! - (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 -v0.20.0 版本已于 2021 年 10 月 30 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.20.0 版本已于 2021 年 10 月 30 日发布,可通过查阅 [更新日志](/docs/en/changelog.md) 了解更多细节以及发布历史 ## 安装 -请参考 [安装指南](/docs_zh_CN/install.md) 进行安装 +请参考 [安装指南](/docs/zh_cn/install.md) 进行安装 ## 教程 -请参考 [基础教程](/docs_zh_CN/getting_started.md) 了解 MMAction2 的基本使用。MMAction2也提供了其他更详细的教程: +请参考 [基础教程](/docs/zh_cn/getting_started.md) 了解 MMAction2 的基本使用。MMAction2也提供了其他更详细的教程: -- [如何编写配置文件](/docs_zh_CN/tutorials/1_config.md) -- [如何微调模型](/docs_zh_CN/tutorials/2_finetune.md) -- [如何增加新数据集](/docs_zh_CN/tutorials/3_new_dataset.md) -- [如何设计数据处理流程](/docs_zh_CN/tutorials/4_data_pipeline.md) -- [如何增加新模块](/docs_zh_CN/tutorials/5_new_modules.md) -- [如何导出模型为 onnx 格式](/docs_zh_CN/tutorials/6_export_model.md) -- [如何自定义模型运行参数](/docs_zh_CN/tutorials/7_customize_runtime.md) +- [如何编写配置文件](/docs/zh_cn/tutorials/1_config.md) +- [如何微调模型](/docs/zh_cn/tutorials/2_finetune.md) +- [如何增加新数据集](/docs/zh_cn/tutorials/3_new_dataset.md) +- [如何设计数据处理流程](/docs/zh_cn/tutorials/4_data_pipeline.md) +- [如何增加新模块](/docs/zh_cn/tutorials/5_new_modules.md) +- [如何导出模型为 onnx 格式](/docs/zh_cn/tutorials/6_export_model.md) +- [如何自定义模型运行参数](/docs/zh_cn/tutorials/7_customize_runtime.md) MMAction2 也提供了相应的中文 Colab 教程,可以点击 [这里](https://colab.research.google.com/github/open-mmlab/mmaction2/blob/master/demo/mmaction2_tutorial_zh-CN.ipynb) 进行体验! @@ -203,15 +203,15 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 ## 基准测试 -为了验证 MMAction2 框架的高精度和高效率,开发成员将其与当前其他主流框架进行速度对比。更多详情可见 [基准测试](/docs_zh_CN/benchmark.md) +为了验证 MMAction2 框架的高精度和高效率,开发成员将其与当前其他主流框架进行速度对比。更多详情可见 [基准测试](/docs/zh_cn/benchmark.md) ## 数据集准备 -请参考 [数据准备](/docs_zh_CN/data_preparation.md) 了解数据集准备概况。所有支持的数据集都列于 [数据集清单](/docs_zh_CN/supported_datasets.md) 中 +请参考 [数据准备](/docs/zh_cn/data_preparation.md) 了解数据集准备概况。所有支持的数据集都列于 [数据集清单](/docs/zh_cn/supported_datasets.md) 中 ## 常见问题 -请参考 [FAQ](/docs_zh_CN/faq.md) 了解其他用户的常见问题 +请参考 [FAQ](/docs/zh_cn/faq.md) 了解其他用户的常见问题 ## 相关工作 @@ -221,7 +221,7 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 - Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 **Oral**. [[论文]](https://arxiv.org/abs/2103.17263)[[代码]](https://github.com/xvjiarui/VFS) - Video Swin Transformer. [[论文]](https://arxiv.org/abs/2106.13230)[[代码]](https://github.com/SwinTransformer/Video-Swin-Transformer) -更多详情可见 [相关工作](docs/projects.md) +更多详情可见 [相关工作](docs/en/projects.md) ## 许可 diff --git a/configs/detection/acrn/README.md b/configs/detection/acrn/README.md index 4f34bec2c4..75d18765c1 100644 --- a/configs/detection/acrn/README.md +++ b/configs/detection/acrn/README.md @@ -59,7 +59,7 @@ Current state-of-the-art approaches for spatio-temporal action localization rely ::: -For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -75,7 +75,7 @@ Example: train ACRN with SlowFast backbone on AVA with periodic validation. python tools/train.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py --validate ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -91,4 +91,4 @@ Example: test ACRN with SlowFast backbone on AVA and dump the result to a csv fi python tools/test.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset) . diff --git a/configs/detection/acrn/README_zh-CN.md b/configs/detection/acrn/README_zh-CN.md index 3ec59cc495..13d7837d1d 100644 --- a/configs/detection/acrn/README_zh-CN.md +++ b/configs/detection/acrn/README_zh-CN.md @@ -46,7 +46,7 @@ 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 -对于数据集准备的细节,用户可参考 [数据准备](/docs_zh_CN/data_preparation.md)。 +对于数据集准备的细节,用户可参考 [数据准备](/docs/zh_cn/data_preparation.md)。 ## 如何训练 @@ -62,7 +62,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py --validate ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -78,4 +78,4 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] python tools/test.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index 5fa66a4c18..c46895f007 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -86,7 +86,7 @@ AVA, with its realistic scene and action complexity, exposes the intrinsic diffi ::: -For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -102,7 +102,7 @@ Example: train SlowOnly model on AVA with periodic validation. python tools/train.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py --validate ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting) . +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting) . ### Train Custom Classes From Ava Dataset @@ -140,4 +140,4 @@ Example: test SlowOnly model on AVA and dump the result to a csv file. python tools/test.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset) . diff --git a/configs/detection/ava/README_zh-CN.md b/configs/detection/ava/README_zh-CN.md index 6cd82f4a3e..a682a039ac 100644 --- a/configs/detection/ava/README_zh-CN.md +++ b/configs/detection/ava/README_zh-CN.md @@ -72,7 +72,7 @@ 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. **Context** 表示同时使用 RoI 特征与全局特征进行分类,可带来约 1% mAP 的提升。 -对于数据集准备的细节,用户可参考 [数据准备](/docs_zh_CN/data_preparation.md)。 +对于数据集准备的细节,用户可参考 [数据准备](/docs/zh_cn/data_preparation.md)。 ## 如何训练 @@ -88,7 +88,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py --validate ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ### 训练 AVA 数据集中的自定义类别 @@ -126,4 +126,4 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] python tools/test.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/detection/lfb/README.md b/configs/detection/lfb/README.md index 2bd9a2a233..ea88419dd8 100644 --- a/configs/detection/lfb/README.md +++ b/configs/detection/lfb/README.md @@ -98,7 +98,7 @@ python tools/train.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_ --validate --seed 0 --deterministic ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -123,4 +123,4 @@ python tools/test.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/detection/lfb/README_zh-CN.md b/configs/detection/lfb/README_zh-CN.md index 4c90a66bd5..3cb8bf5186 100644 --- a/configs/detection/lfb/README_zh-CN.md +++ b/configs/detection/lfb/README_zh-CN.md @@ -75,7 +75,7 @@ python tools/train.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 测试 @@ -100,4 +100,4 @@ python tools/test.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/bmn/README.md b/configs/localization/bmn/README.md index 43147c2109..8c4cfcf8eb 100644 --- a/configs/localization/bmn/README.md +++ b/configs/localization/bmn/README.md @@ -60,7 +60,7 @@ Temporal action proposal generation is an challenging and promising task which a *We train BMN with the [official repo](https://github.com/JJBOY/BMN-Boundary-Matching-Network), evaluate its proposal generation and action detection performance with [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) for label assigning. -For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -76,7 +76,7 @@ Example: train BMN model on ActivityNet features dataset. python tools/train.py configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting) . +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting) . ## Test @@ -109,4 +109,4 @@ python tools/analysis/report_map.py --proposal path/to/proposal_file ::: -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset) . diff --git a/configs/localization/bmn/README_zh-CN.md b/configs/localization/bmn/README_zh-CN.md index 3778f390fa..5e15f6b06e 100644 --- a/configs/localization/bmn/README_zh-CN.md +++ b/configs/localization/bmn/README_zh-CN.md @@ -48,7 +48,7 @@ *MMAction2 在 [原始代码库](https://github.com/JJBOY/BMN-Boundary-Matching-Network) 上训练 BMN,并且在 [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) 的对应标签上评估时序动作候选生成和时序检测的结果。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 ActivityNet 特征部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 ActivityNet 特征部分。 ## 如何训练 @@ -64,7 +64,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -95,4 +95,4 @@ python tools/analysis/report_map.py --proposal path/to/proposal_file python tools/data/activitynet/convert_proposal_format.py ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/bsn/README.md b/configs/localization/bsn/README.md index d15b6361c7..b87eebcb9f 100644 --- a/configs/localization/bsn/README.md +++ b/configs/localization/bsn/README.md @@ -44,7 +44,7 @@ Temporal action proposal generation is an important yet challenging problem, sin ::: -For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -68,7 +68,7 @@ Examples: python tools/train.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Inference @@ -167,4 +167,4 @@ Examples: ::: -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/localization/bsn/README_zh-CN.md b/configs/localization/bsn/README_zh-CN.md index 6d0ddfc2df..7271bc1cf4 100644 --- a/configs/localization/bsn/README_zh-CN.md +++ b/configs/localization/bsn/README_zh-CN.md @@ -32,7 +32,7 @@ 2. 对于 **特征** 这一列,`cuhk_mean_100` 表示所使用的特征为利用 [anet2016-cuhk](https://github.com/yjxiong/anet2016-cuhk) 代码库抽取的,被广泛利用的 CUHK ActivityNet 特征, `mmaction_video` 和 `mmaction_clip` 分布表示所使用的特征为利用 MMAction 抽取的,视频级别 ActivityNet 预训练模型的特征;视频片段级别 ActivityNet 预训练模型的特征。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 ActivityNet 特征部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 ActivityNet 特征部分。 ## 如何训练 @@ -56,7 +56,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何进行推理 @@ -153,4 +153,4 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/data/activitynet/convert_proposal_format.py ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/ssn/README.md b/configs/localization/ssn/README.md index c5e5dc09fa..b5c2a68257 100644 --- a/configs/localization/ssn/README.md +++ b/configs/localization/ssn/README.md @@ -37,7 +37,7 @@ year = {2017} According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. 2. Since SSN utilizes different structured temporal pyramid pooling methods at training and testing, please refer to [ssn_r50_450e_thumos14_rgb_train](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) at training and [ssn_r50_450e_thumos14_rgb_test](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py) at testing. -3. We evaluate the action detection performance of SSN, using action proposals of TAG. For more details on data preparation, you can refer to thumos14 TAG proposals in [Data Preparation](/docs/data_preparation.md). +3. We evaluate the action detection performance of SSN, using action proposals of TAG. For more details on data preparation, you can refer to thumos14 TAG proposals in [Data Preparation](/docs/en/data_preparation.md). 4. The reference SSN in is evaluated with `ResNet50` backbone in MMAction, which is the same backbone with ours. Note that the original setting of MMAction SSN uses the `BNInception` backbone. ::: @@ -56,7 +56,7 @@ Example: train SSN model on thumos14 dataset. python tools/train.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -73,4 +73,4 @@ Example: test BMN on ActivityNet feature dataset. python tools/test.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py checkpoints/SOME_CHECKPOINT.pth --eval mAP ``` -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/localization/ssn/README_zh-CN.md b/configs/localization/ssn/README_zh-CN.md index d1ec5bbcee..3b85c61ad1 100644 --- a/configs/localization/ssn/README_zh-CN.md +++ b/configs/localization/ssn/README_zh-CN.md @@ -26,7 +26,7 @@ year = {2017} 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 由于 SSN 在训练和测试阶段使用不同的结构化时序金字塔池化方法(structured temporal pyramid pooling methods),请分别参考 [ssn_r50_450e_thumos14_rgb_train](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) 和 [ssn_r50_450e_thumos14_rgb_test](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py)。 -3. MMAction2 使用 TAG 的时序动作候选进行 SSN 模型的精度验证。关于数据准备的更多细节,用户可参考 [Data 数据集准备文档](/docs_zh_CN/data_preparation.md) 准备 thumos14 的 TAG 时序动作候选。 +3. MMAction2 使用 TAG 的时序动作候选进行 SSN 模型的精度验证。关于数据准备的更多细节,用户可参考 [Data 数据集准备文档](/docs/zh_cn/data_preparation.md) 准备 thumos14 的 TAG 时序动作候选。 4. 参考代码的 SSN 模型是和 MMAction2 一样在 `ResNet50` 主干网络上验证的。注意,这里的 SSN 的初始设置与原代码库的 `BNInception` 骨干网络的设置相同。 ## 如何训练 @@ -43,7 +43,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -60,4 +60,4 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] python tools/test.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py checkpoints/SOME_CHECKPOINT.pth --eval mAP ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/c3d/README.md b/configs/recognition/c3d/README.md index 067097fdbe..0ea46809cb 100644 --- a/configs/recognition/c3d/README.md +++ b/configs/recognition/c3d/README.md @@ -45,7 +45,7 @@ eid = {arXiv:1412.0767} ::: -For more details on data preparation, you can refer to UCF-101 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to UCF-101 in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -62,7 +62,7 @@ python tools/train.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -79,4 +79,4 @@ python tools/test.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb. checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/c3d/README_zh-CN.md b/configs/recognition/c3d/README_zh-CN.md index c4f02c16f2..6f1965bee0 100644 --- a/configs/recognition/c3d/README_zh-CN.md +++ b/configs/recognition/c3d/README_zh-CN.md @@ -32,7 +32,7 @@ eid = {arXiv:1412.0767} 3. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 UCF-101 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 UCF-101 部分。 ## 如何训练 @@ -49,7 +49,7 @@ python tools/train.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -66,4 +66,4 @@ python tools/test.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb. checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index 3a48f6bbda..5e3d4e4f57 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -66,7 +66,7 @@ doi = {10.1109/ICCV.2019.00565} ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -84,7 +84,7 @@ python tools/train.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1 --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -102,4 +102,4 @@ python tools/test.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_ --out result.json --average-clips prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/csn/README_zh-CN.md b/configs/recognition/csn/README_zh-CN.md index 06a28cd5c5..4ad92b64fd 100644 --- a/configs/recognition/csn/README_zh-CN.md +++ b/configs/recognition/csn/README_zh-CN.md @@ -53,7 +53,7 @@ doi = {10.1109/ICCV.2019.00565} 3. 这里使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 4. 这里的 **infer_ckpt** 表示该模型权重文件是从 [VMZ](https://github.com/facebookresearch/VMZ) 导入的。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -71,7 +71,7 @@ python tools/train.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1 --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -89,4 +89,4 @@ python tools/test.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_ --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/i3d/README.md b/configs/recognition/i3d/README.md index 5a2bfd7a33..bf67c6f189 100644 --- a/configs/recognition/i3d/README.md +++ b/configs/recognition/i3d/README.md @@ -64,7 +64,7 @@ The paucity of videos in current action classification datasets (UCF-101 and HMD ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -82,7 +82,7 @@ python tools/train.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rg --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -100,4 +100,4 @@ python tools/test.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb --out result.json --average-clips prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/i3d/README_zh-CN.md b/configs/recognition/i3d/README_zh-CN.md index ac10732615..6e778cd7c9 100644 --- a/configs/recognition/i3d/README_zh-CN.md +++ b/configs/recognition/i3d/README_zh-CN.md @@ -52,7 +52,7 @@ 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -70,7 +70,7 @@ python tools/train.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rg --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -88,4 +88,4 @@ python tools/test.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/r2plus1d/README.md b/configs/recognition/r2plus1d/README.md index f9cd05cca1..9671e88cb4 100644 --- a/configs/recognition/r2plus1d/README.md +++ b/configs/recognition/r2plus1d/README.md @@ -46,7 +46,7 @@ In this paper we discuss several forms of spatiotemporal convolutions for video ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -64,7 +64,7 @@ python tools/train.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinet --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -82,4 +82,4 @@ python tools/test.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kineti --out result.json --average-clips=prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/r2plus1d/README_zh-CN.md b/configs/recognition/r2plus1d/README_zh-CN.md index d720508dc0..5df080c927 100644 --- a/configs/recognition/r2plus1d/README_zh-CN.md +++ b/configs/recognition/r2plus1d/README_zh-CN.md @@ -34,7 +34,7 @@ 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -52,7 +52,7 @@ python tools/train.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinet --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -70,4 +70,4 @@ python tools/test.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kineti --out result.json --average-clips=prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 4bbdbd4f0c..61f461c98e 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -56,7 +56,7 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -74,7 +74,7 @@ python tools/train.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kine --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -92,4 +92,4 @@ python tools/test.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinet --out result.json --average-clips=prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index 7605871d2d..95e9383140 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -44,7 +44,7 @@ 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -62,7 +62,7 @@ python tools/train.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kine --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -80,4 +80,4 @@ python tools/test.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinet --out result.json --average-clips=prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index d5846782ae..622c3fde7a 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -118,7 +118,7 @@ In data benchmark, we compare two different data preprocessing methods: (1) Resi ::: -For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -136,7 +136,7 @@ python tools/train.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kine --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -154,4 +154,4 @@ python tools/test.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinet --out result.json --average-clips=prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md index 917be85500..29109d2c5d 100644 --- a/configs/recognition/slowonly/README_zh-CN.md +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -106,7 +106,7 @@ 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -124,7 +124,7 @@ python tools/train.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kine --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -142,4 +142,4 @@ python tools/test.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinet --out result.json --average-clips=prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 37760e5042..10f76d2aa9 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -50,7 +50,7 @@ Video data is with complex temporal dynamics due to various factors such as came ::: -For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -68,7 +68,7 @@ python tools/train.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinet --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -86,4 +86,4 @@ python tools/test.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kineti --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tanet/README_zh-CN.md b/configs/recognition/tanet/README_zh-CN.md index 4902cf8430..02e42201c3 100644 --- a/configs/recognition/tanet/README_zh-CN.md +++ b/configs/recognition/tanet/README_zh-CN.md @@ -38,7 +38,7 @@ 3. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。对应的模型权重文件可从 [这里](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing) 下载。 4. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -56,7 +56,7 @@ python tools/train.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinet --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -74,4 +74,4 @@ python tools/test.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kineti --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/timesformer/README.md b/configs/recognition/timesformer/README.md index 54b4f25443..2ee361d949 100644 --- a/configs/recognition/timesformer/README.md +++ b/configs/recognition/timesformer/README.md @@ -46,7 +46,7 @@ We present a convolution-free approach to video classification built exclusively ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -64,7 +64,7 @@ python tools/train.py configs/recognition/timesformer/timesformer_divST_8x32x1_1 --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -82,4 +82,4 @@ python tools/test.py configs/recognition/timesformer/timesformer_divST_8x32x1_15 --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/timesformer/README_zh-CN.md b/configs/recognition/timesformer/README_zh-CN.md index c844917e01..d84d2fe2e0 100644 --- a/configs/recognition/timesformer/README_zh-CN.md +++ b/configs/recognition/timesformer/README_zh-CN.md @@ -33,7 +33,7 @@ 2. MMAction2 保持与 [原代码](https://github.com/facebookresearch/TimeSformer) 的测试设置一致(three crop x 1 clip)。 3. TimeSformer 使用的预训练模型 `vit_base_patch16_224.pth` 转换自 [vision_transformer](https://github.com/google-research/vision_transformer)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -51,7 +51,7 @@ python tools/train.py configs/recognition/timesformer/timesformer_divST_8x32x1_1 --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -69,4 +69,4 @@ python tools/test.py configs/recognition/timesformer/timesformer_divST_8x32x1_15 --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tin/README.md b/configs/recognition/tin/README.md index cf57eed749..eb7fc375d7 100644 --- a/configs/recognition/tin/README.md +++ b/configs/recognition/tin/README.md @@ -60,7 +60,7 @@ Here, we use `finetune` to indicate that we use [TSM model](https://download.ope ::: -For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -78,7 +78,7 @@ python tools/train.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -96,4 +96,4 @@ python tools/test.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tin/README_zh-CN.md b/configs/recognition/tin/README_zh-CN.md index 2747fa6c94..431769addb 100644 --- a/configs/recognition/tin/README_zh-CN.md +++ b/configs/recognition/tin/README_zh-CN.md @@ -46,7 +46,7 @@ 4. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。 5. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 ## 如何训练 @@ -64,7 +64,7 @@ python tools/train.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -82,4 +82,4 @@ python tools/test.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tpn/README.md b/configs/recognition/tpn/README.md index 7ce9ce6f63..304bc5ecad 100644 --- a/configs/recognition/tpn/README.md +++ b/configs/recognition/tpn/README.md @@ -51,7 +51,7 @@ Visual tempo characterizes the dynamics and the temporal scale of an action. Mod ::: -For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -68,7 +68,7 @@ python tools/train.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kineti --work-dir work_dirs/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb [--validate --seed 0 --deterministic] ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -86,4 +86,4 @@ python tools/test.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetic --out result.json --average-clips prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tpn/README_zh-CN.md b/configs/recognition/tpn/README_zh-CN.md index ec66656d1d..e525c2140d 100644 --- a/configs/recognition/tpn/README_zh-CN.md +++ b/configs/recognition/tpn/README_zh-CN.md @@ -53,7 +53,7 @@ python tools/train.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kineti --work-dir work_dirs/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb [--validate --seed 0 --deterministic] ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -71,4 +71,4 @@ python tools/test.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetic --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md index ff2f4d8785..75ad603e63 100644 --- a/configs/recognition/trn/README.md +++ b/configs/recognition/trn/README.md @@ -70,7 +70,7 @@ python tools/train.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -88,4 +88,4 @@ python tools/test.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/trn/README_zh-CN.md b/configs/recognition/trn/README_zh-CN.md index d0e85f015c..beb575159f 100644 --- a/configs/recognition/trn/README_zh-CN.md +++ b/configs/recognition/trn/README_zh-CN.md @@ -57,7 +57,7 @@ python tools/train.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -75,4 +75,4 @@ python tools/test.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index c3528ee5be..994723a308 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -151,7 +151,7 @@ test_pipeline = [ ::: -For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -169,7 +169,7 @@ python tools/train.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb. --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -187,4 +187,4 @@ python tools/test.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.p --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index f95876fd9e..ccf584d57a 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -145,7 +145,7 @@ test_pipeline = [ 6. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 7. 这里的 **infer_ckpt** 表示该模型权重文件是从 [TSM](https://github.com/mit-han-lab/temporal-shift-module/blob/master/test_models.py) 导入的。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 ## 如何训练 @@ -163,7 +163,7 @@ python tools/train.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb. --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -181,4 +181,4 @@ python tools/test.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.p --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index f3f5811ef5..276234f280 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -224,7 +224,7 @@ python tools/train.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -242,4 +242,4 @@ python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb. --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index 69e95459a5..5cee3ea365 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -213,7 +213,7 @@ python tools/train.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -231,4 +231,4 @@ python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb. --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/x3d/README.md b/configs/recognition/x3d/README.md index a7a3c7e715..cea5789cfc 100644 --- a/configs/recognition/x3d/README.md +++ b/configs/recognition/x3d/README.md @@ -44,7 +44,7 @@ This paper presents X3D, a family of efficient video networks that progressively ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). ## Test @@ -62,4 +62,4 @@ python tools/test.py configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_r --out result.json --average-clips prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/recognition/x3d/README_zh-CN.md b/configs/recognition/x3d/README_zh-CN.md index 3b09e5276b..947b5bf093 100644 --- a/configs/recognition/x3d/README_zh-CN.md +++ b/configs/recognition/x3d/README_zh-CN.md @@ -31,7 +31,7 @@ 1. 参考代码的结果是通过使用相同的数据和原来的代码库所提供的模型进行测试得到的。 2. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分 ## 如何测试 @@ -49,4 +49,4 @@ python tools/test.py configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_r --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition_audio/resnet/README.md b/configs/recognition_audio/resnet/README.md index a873edaeed..6ebdc8cc72 100644 --- a/configs/recognition_audio/resnet/README.md +++ b/configs/recognition_audio/resnet/README.md @@ -32,7 +32,7 @@ ::: -For more details on data preparation, you can refer to ``Prepare audio`` in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to ``Prepare audio`` in [Data Preparation](/docs/en/data_preparation.md). ## Train @@ -50,7 +50,7 @@ python tools/train.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_ --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -68,7 +68,7 @@ python tools/test.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_a --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). ## Fusion diff --git a/configs/recognition_audio/resnet/README_zh-CN.md b/configs/recognition_audio/resnet/README_zh-CN.md index bf1188ff46..c3a38dd0e1 100644 --- a/configs/recognition_audio/resnet/README_zh-CN.md +++ b/configs/recognition_audio/resnet/README_zh-CN.md @@ -31,7 +31,7 @@ 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的准备音频部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的准备音频部分。 ## 如何训练 @@ -49,7 +49,7 @@ python tools/train.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -67,7 +67,7 @@ python tools/test.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_a --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 ## 融合 diff --git a/configs/skeleton/2s-agcn/README.md b/configs/skeleton/2s-agcn/README.md index d6049c735c..5013f4e7c4 100644 --- a/configs/skeleton/2s-agcn/README.md +++ b/configs/skeleton/2s-agcn/README.md @@ -58,7 +58,7 @@ python tools/train.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -84,4 +84,4 @@ python tools/test.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py \ --out bone_result.pkl ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/skeleton/2s-agcn/README_zh-CN.md b/configs/skeleton/2s-agcn/README_zh-CN.md index ae7cc00a20..5e5f0f4092 100644 --- a/configs/skeleton/2s-agcn/README_zh-CN.md +++ b/configs/skeleton/2s-agcn/README_zh-CN.md @@ -47,7 +47,7 @@ python tools/train.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -73,4 +73,4 @@ python tools/test.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py \ --out bone_result.pkl ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index 3b8b686db6..7cc5c22f16 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -125,7 +125,7 @@ python tools/train.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoin For training with your custom dataset, you can refer to [Custom Dataset Training](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md). -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -143,4 +143,4 @@ python tools/test.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint --out result.pkl ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/skeleton/posec3d/README_zh-CN.md b/configs/skeleton/posec3d/README_zh-CN.md index 4c4cdf8d46..734adb9213 100644 --- a/configs/skeleton/posec3d/README_zh-CN.md +++ b/configs/skeleton/posec3d/README_zh-CN.md @@ -112,7 +112,7 @@ python tools/train.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoin 有关自定义数据集上的训练,可以参考 [Custom Dataset Training](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md)。 -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -130,4 +130,4 @@ python tools/test.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint --out result.pkl ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/skeleton/stgcn/README.md b/configs/skeleton/stgcn/README.md index 98b95a5cf9..6ca35a4e20 100644 --- a/configs/skeleton/stgcn/README.md +++ b/configs/skeleton/stgcn/README.md @@ -60,7 +60,7 @@ python tools/train.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). ## Test @@ -78,4 +78,4 @@ python tools/test.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ --out result.pkl ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). diff --git a/configs/skeleton/stgcn/README_zh-CN.md b/configs/skeleton/stgcn/README_zh-CN.md index c7e57077cd..48fc4f6d90 100644 --- a/configs/skeleton/stgcn/README_zh-CN.md +++ b/configs/skeleton/stgcn/README_zh-CN.md @@ -49,7 +49,7 @@ python tools/train.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -67,4 +67,4 @@ python tools/test.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ --out result.pkl ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/demo/mmaction2_tutorial.ipynb b/demo/mmaction2_tutorial.ipynb index 14441ab79b..72c0639b27 100644 --- a/demo/mmaction2_tutorial.ipynb +++ b/demo/mmaction2_tutorial.ipynb @@ -430,7 +430,7 @@ "source": [ "### Support a new dataset\n", "\n", - "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/tutorials/new_dataset.md)\n", + "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/en/tutorials/new_dataset.md)\n", "\n", "Firstly, let's download a tiny dataset obtained from [Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). We select 30 videos with their labels as train dataset and 10 videos with their labels as test dataset." ] diff --git a/demo/mmaction2_tutorial_zh-CN.ipynb b/demo/mmaction2_tutorial_zh-CN.ipynb index 28940ce931..c5b545893b 100644 --- a/demo/mmaction2_tutorial_zh-CN.ipynb +++ b/demo/mmaction2_tutorial_zh-CN.ipynb @@ -405,7 +405,7 @@ "source": [ "### 支持新数据集\n", "\n", - "这里我们给出将数据转换为已有数据集格式的示例。其他方法可以参考[doc](/docs/tutorials/new_dataset.md)\n", + "这里我们给出将数据转换为已有数据集格式的示例。其他方法可以参考[doc](/docs/en/tutorials/new_dataset.md)\n", "\n", "用到的是一个从[Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/)中获取的tiny数据集。包含30个训练视频,10个测试视频。" ] diff --git a/docs/Makefile b/docs/en/Makefile similarity index 100% rename from docs/Makefile rename to docs/en/Makefile diff --git a/docs/_static/css/readthedocs.css b/docs/en/_static/css/readthedocs.css similarity index 100% rename from docs/_static/css/readthedocs.css rename to docs/en/_static/css/readthedocs.css diff --git a/docs/_static/images/mmaction2.png b/docs/en/_static/images/mmaction2.png similarity index 100% rename from docs/_static/images/mmaction2.png rename to docs/en/_static/images/mmaction2.png diff --git a/docs/api.rst b/docs/en/api.rst similarity index 100% rename from docs/api.rst rename to docs/en/api.rst diff --git a/docs/benchmark.md b/docs/en/benchmark.md similarity index 100% rename from docs/benchmark.md rename to docs/en/benchmark.md diff --git a/docs/changelog.md b/docs/en/changelog.md similarity index 100% rename from docs/changelog.md rename to docs/en/changelog.md diff --git a/docs/conf.py b/docs/en/conf.py similarity index 97% rename from docs/conf.py rename to docs/en/conf.py index 3248b1f326..40bc92fc8d 100644 --- a/docs/conf.py +++ b/docs/en/conf.py @@ -17,14 +17,14 @@ import pytorch_sphinx_theme -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath('../..')) # -- Project information ----------------------------------------------------- project = 'MMAction2' copyright = '2020, OpenMMLab' author = 'MMAction2 Authors' -version_file = '../mmaction/version.py' +version_file = '../../mmaction/version.py' def get_version(): diff --git a/docs/data_preparation.md b/docs/en/data_preparation.md similarity index 100% rename from docs/data_preparation.md rename to docs/en/data_preparation.md diff --git a/docs/faq.md b/docs/en/faq.md similarity index 98% rename from docs/faq.md rename to docs/en/faq.md index 0a462b7b8a..583cdc31af 100644 --- a/docs/faq.md +++ b/docs/en/faq.md @@ -22,7 +22,7 @@ If the contents here do not cover your issue, please create an issue using the [ - **"OSError: MoviePy Error: creation of None failed because of the following error"** - Refer to [install.md](https://github.com/open-mmlab/mmaction2/blob/master/docs/install.md#requirements) + Refer to [install.md](https://github.com/open-mmlab/mmaction2/blob/master/docs/en/install.md#requirements) 1. For Windows users, [ImageMagick](https://www.imagemagick.org/script/index.php) will not be automatically detected by MoviePy, there is a need to modify `moviepy/config_defaults.py` file by providing the path to the ImageMagick binary called `magick`, like `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` 2. For Linux users, there is a need to modify the `/etc/ImageMagick-6/policy.xml` file by commenting out `` to ``, if ImageMagick is not detected by moviepy. @@ -56,7 +56,7 @@ If the contents here do not cover your issue, please create an issue using the [ - **How to just use trained recognizer models for backbone pre-training?** - Refer to [Use Pre-Trained Model](https://github.com/open-mmlab/mmaction2/blob/master/docs/tutorials/2_finetune.md#use-pre-trained-model), + Refer to [Use Pre-Trained Model](https://github.com/open-mmlab/mmaction2/blob/master/docs/en/tutorials/2_finetune.md#use-pre-trained-model), in order to use the pre-trained model for the whole network, the new config adds the link of pre-trained models in the `load_from`. And to use backbone for pre-training, you can change `pretrained` value in the backbone dict of config files to the checkpoint path / url. @@ -106,7 +106,7 @@ If the contents here do not cover your issue, please create an issue using the [ - **How to set `load_from` value in config files to finetune models?** - In MMAction2, We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/tutorials/1_config.md), + In MMAction2, We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/en/tutorials/1_config.md), users can directly change it by setting `load_from` in their configs. ## Testing diff --git a/docs/feature_extraction.md b/docs/en/feature_extraction.md similarity index 100% rename from docs/feature_extraction.md rename to docs/en/feature_extraction.md diff --git a/docs/getting_started.md b/docs/en/getting_started.md similarity index 100% rename from docs/getting_started.md rename to docs/en/getting_started.md diff --git a/docs/index.rst b/docs/en/index.rst similarity index 100% rename from docs/index.rst rename to docs/en/index.rst diff --git a/docs/install.md b/docs/en/install.md similarity index 100% rename from docs/install.md rename to docs/en/install.md diff --git a/docs/make.bat b/docs/en/make.bat similarity index 100% rename from docs/make.bat rename to docs/en/make.bat diff --git a/docs/merge_docs.sh b/docs/en/merge_docs.sh similarity index 100% rename from docs/merge_docs.sh rename to docs/en/merge_docs.sh diff --git a/docs/projects.md b/docs/en/projects.md similarity index 100% rename from docs/projects.md rename to docs/en/projects.md diff --git a/docs/stat.py b/docs/en/stat.py similarity index 100% rename from docs/stat.py rename to docs/en/stat.py diff --git a/docs/supported_datasets.md b/docs/en/supported_datasets.md similarity index 100% rename from docs/supported_datasets.md rename to docs/en/supported_datasets.md diff --git a/docs/switch_language.md b/docs/en/switch_language.md similarity index 100% rename from docs/switch_language.md rename to docs/en/switch_language.md diff --git a/docs/tutorials/1_config.md b/docs/en/tutorials/1_config.md similarity index 100% rename from docs/tutorials/1_config.md rename to docs/en/tutorials/1_config.md diff --git a/docs/tutorials/2_finetune.md b/docs/en/tutorials/2_finetune.md similarity index 96% rename from docs/tutorials/2_finetune.md rename to docs/en/tutorials/2_finetune.md index f29263601e..91d075f3c3 100644 --- a/docs/tutorials/2_finetune.md +++ b/docs/en/tutorials/2_finetune.md @@ -91,7 +91,7 @@ checkpoint_config = dict(interval=5) ## Use Pre-Trained Model To use the pre-trained model for the whole network, the new config adds the link of pre-trained models in the `load_from`. -We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/tutorials/1_config.md), users can directly change it by setting `load_from` in their configs. +We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/en/tutorials/1_config.md), users can directly change it by setting `load_from` in their configs. ```python # use the pre-trained model for the whole TSN network diff --git a/docs/tutorials/3_new_dataset.md b/docs/en/tutorials/3_new_dataset.md similarity index 100% rename from docs/tutorials/3_new_dataset.md rename to docs/en/tutorials/3_new_dataset.md diff --git a/docs/tutorials/4_data_pipeline.md b/docs/en/tutorials/4_data_pipeline.md similarity index 100% rename from docs/tutorials/4_data_pipeline.md rename to docs/en/tutorials/4_data_pipeline.md diff --git a/docs/tutorials/5_new_modules.md b/docs/en/tutorials/5_new_modules.md similarity index 100% rename from docs/tutorials/5_new_modules.md rename to docs/en/tutorials/5_new_modules.md diff --git a/docs/tutorials/6_export_model.md b/docs/en/tutorials/6_export_model.md similarity index 100% rename from docs/tutorials/6_export_model.md rename to docs/en/tutorials/6_export_model.md diff --git a/docs/tutorials/7_customize_runtime.md b/docs/en/tutorials/7_customize_runtime.md similarity index 100% rename from docs/tutorials/7_customize_runtime.md rename to docs/en/tutorials/7_customize_runtime.md diff --git a/docs/useful_tools.md b/docs/en/useful_tools.md similarity index 100% rename from docs/useful_tools.md rename to docs/en/useful_tools.md diff --git a/docs_zh_CN/Makefile b/docs/zh_cn/Makefile similarity index 100% rename from docs_zh_CN/Makefile rename to docs/zh_cn/Makefile diff --git a/docs_zh_CN/README.md b/docs/zh_cn/README.md similarity index 100% rename from docs_zh_CN/README.md rename to docs/zh_cn/README.md diff --git a/docs_zh_CN/api.rst b/docs/zh_cn/api.rst similarity index 100% rename from docs_zh_CN/api.rst rename to docs/zh_cn/api.rst diff --git a/docs_zh_CN/benchmark.md b/docs/zh_cn/benchmark.md similarity index 100% rename from docs_zh_CN/benchmark.md rename to docs/zh_cn/benchmark.md diff --git a/docs_zh_CN/conf.py b/docs/zh_cn/conf.py similarity index 97% rename from docs_zh_CN/conf.py rename to docs/zh_cn/conf.py index 7949166dc9..fe1b066f29 100644 --- a/docs_zh_CN/conf.py +++ b/docs/zh_cn/conf.py @@ -17,14 +17,14 @@ import pytorch_sphinx_theme -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath('../../')) # -- Project information ----------------------------------------------------- project = 'MMAction2' copyright = '2020, OpenMMLab' author = 'MMAction2 Authors' -version_file = '../mmaction/version.py' +version_file = '../../mmaction/version.py' def get_version(): diff --git a/docs_zh_CN/data_preparation.md b/docs/zh_cn/data_preparation.md similarity index 100% rename from docs_zh_CN/data_preparation.md rename to docs/zh_cn/data_preparation.md diff --git a/docs_zh_CN/demo.md b/docs/zh_cn/demo.md similarity index 100% rename from docs_zh_CN/demo.md rename to docs/zh_cn/demo.md diff --git a/docs_zh_CN/faq.md b/docs/zh_cn/faq.md similarity index 98% rename from docs_zh_CN/faq.md rename to docs/zh_cn/faq.md index 2f328792f0..1a4f722f33 100644 --- a/docs_zh_CN/faq.md +++ b/docs/zh_cn/faq.md @@ -13,7 +13,7 @@ - **"OSError: MoviePy Error: creation of None failed because of the following error"** - 参照 [MMAction2 安装文档](https://github.com/open-mmlab/mmaction2/blob/master/docs_zh_CN/install.md#安装依赖包) + 参照 [MMAction2 安装文档](https://github.com/open-mmlab/mmaction2/blob/master/docs/zh_cn/install.md#安装依赖包) 1. 对于 Windows 用户,[ImageMagick](https://www.imagemagick.org/script/index.php) 不再被 MoviePy 自动检测, 需要获取名为 `magick` 的 ImageMagick 二进制包的路径,来修改 `moviepy/config_defaults.py` 文件中的 `IMAGEMAGICK_BINARY`,如 `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` 2. 对于 Linux 用户,如果 ImageMagick 没有被 moviepy 检测,需要注释掉 `/etc/ImageMagick-6/policy.xml` 文件中的 ``,即改为 ``。 @@ -48,7 +48,7 @@ - **如何使用训练过的识别器作为主干网络的预训练模型?** - 参照 [使用预训练模型](https://github.com/open-mmlab/mmaction2/blob/master/docs_zh_CN/tutorials/2_finetune.md#使用预训练模型), + 参照 [使用预训练模型](https://github.com/open-mmlab/mmaction2/blob/master/docs/zh_cn/tutorials/2_finetune.md#使用预训练模型), 如果想对整个网络使用预训练模型,可以在配置文件中,将 `load_from` 设置为预训练模型的链接。 如果只想对主干网络使用预训练模型,可以在配置文件中,将主干网络 `backbone` 中的 `pretrained` 设置为预训练模型的地址或链接。 diff --git a/docs_zh_CN/feature_extraction.md b/docs/zh_cn/feature_extraction.md similarity index 100% rename from docs_zh_CN/feature_extraction.md rename to docs/zh_cn/feature_extraction.md diff --git a/docs_zh_CN/getting_started.md b/docs/zh_cn/getting_started.md similarity index 100% rename from docs_zh_CN/getting_started.md rename to docs/zh_cn/getting_started.md diff --git a/docs_zh_CN/index.rst b/docs/zh_cn/index.rst similarity index 100% rename from docs_zh_CN/index.rst rename to docs/zh_cn/index.rst diff --git a/docs_zh_CN/install.md b/docs/zh_cn/install.md similarity index 100% rename from docs_zh_CN/install.md rename to docs/zh_cn/install.md diff --git a/docs_zh_CN/make.bat b/docs/zh_cn/make.bat similarity index 100% rename from docs_zh_CN/make.bat rename to docs/zh_cn/make.bat diff --git a/docs_zh_CN/merge_docs.sh b/docs/zh_cn/merge_docs.sh similarity index 89% rename from docs_zh_CN/merge_docs.sh rename to docs/zh_cn/merge_docs.sh index 1265731a97..187b8bd419 100755 --- a/docs_zh_CN/merge_docs.sh +++ b/docs/zh_cn/merge_docs.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash # gather models -cat ../configs/localization/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 时序动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##/getting_started.html#/g" > localization_models.md -cat ../configs/recognition/*/README_zh-CN.md | sed "s/md#测/html#t测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > recognition_models.md -cat ../configs/recognition_audio/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" >> recognition_models.md -cat ../configs/detection/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 时空动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > detection_models.md -cat ../configs/skeleton/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 骨骼动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > skeleton_models.md +cat ../configs/localization/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 时序动作检测模型' | sed 's/](\/docs/zh_cn\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##/getting_started.html#/g" > localization_models.md +cat ../configs/recognition/*/README_zh-CN.md | sed "s/md#测/html#t测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 动作识别模型' | sed 's/](\/docs/zh_cn\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > recognition_models.md +cat ../configs/recognition_audio/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed 's/](\/docs/zh_cn\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" >> recognition_models.md +cat ../configs/detection/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 时空动作检测模型' | sed 's/](\/docs/zh_cn\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > detection_models.md +cat ../configs/skeleton/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 骨骼动作识别模型' | sed 's/](\/docs/zh_cn\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > skeleton_models.md # gather datasets cat ../tools/data/*/README_zh-CN.md | sed 's/# 准备/# /g' | sed 's/#/#&/' > prepare_data.md @@ -29,7 +29,7 @@ sed -i 's/(\/tools\/data\/diving48\/README_zh-CN.md/(#diving48/g' supported_data sed -i 's/(\/tools\/data\/skeleton\/README_zh-CN.md/(#skeleton/g' supported_datasets.md cat prepare_data.md >> supported_datasets.md -sed -i 's/](\/docs_zh_CN\//](/g' supported_datasets.md +sed -i 's/](\/docs/zh_cn\//](/g' supported_datasets.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' supported_datasets.md sed -i "s/md###t/html#t/g" demo.md @@ -37,5 +37,5 @@ sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' demo.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' benchmark.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' getting_started.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' install.md -sed -i 's/](\/docs_zh_CN\//](/g' ./tutorials/*.md +sed -i 's/](\/docs/zh_cn\//](/g' ./tutorials/*.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' ./tutorials/*.md diff --git a/docs_zh_CN/stat.py b/docs/zh_cn/stat.py similarity index 100% rename from docs_zh_CN/stat.py rename to docs/zh_cn/stat.py diff --git a/docs_zh_CN/supported_datasets.md b/docs/zh_cn/supported_datasets.md similarity index 100% rename from docs_zh_CN/supported_datasets.md rename to docs/zh_cn/supported_datasets.md diff --git a/docs_zh_CN/switch_language.md b/docs/zh_cn/switch_language.md similarity index 100% rename from docs_zh_CN/switch_language.md rename to docs/zh_cn/switch_language.md diff --git a/docs_zh_CN/tutorials/1_config.md b/docs/zh_cn/tutorials/1_config.md similarity index 100% rename from docs_zh_CN/tutorials/1_config.md rename to docs/zh_cn/tutorials/1_config.md diff --git a/docs_zh_CN/tutorials/2_finetune.md b/docs/zh_cn/tutorials/2_finetune.md similarity index 100% rename from docs_zh_CN/tutorials/2_finetune.md rename to docs/zh_cn/tutorials/2_finetune.md diff --git a/docs_zh_CN/tutorials/3_new_dataset.md b/docs/zh_cn/tutorials/3_new_dataset.md similarity index 100% rename from docs_zh_CN/tutorials/3_new_dataset.md rename to docs/zh_cn/tutorials/3_new_dataset.md diff --git a/docs_zh_CN/tutorials/4_data_pipeline.md b/docs/zh_cn/tutorials/4_data_pipeline.md similarity index 100% rename from docs_zh_CN/tutorials/4_data_pipeline.md rename to docs/zh_cn/tutorials/4_data_pipeline.md diff --git a/docs_zh_CN/tutorials/5_new_modules.md b/docs/zh_cn/tutorials/5_new_modules.md similarity index 100% rename from docs_zh_CN/tutorials/5_new_modules.md rename to docs/zh_cn/tutorials/5_new_modules.md diff --git a/docs_zh_CN/tutorials/6_export_model.md b/docs/zh_cn/tutorials/6_export_model.md similarity index 100% rename from docs_zh_CN/tutorials/6_export_model.md rename to docs/zh_cn/tutorials/6_export_model.md diff --git a/docs_zh_CN/tutorials/7_customize_runtime.md b/docs/zh_cn/tutorials/7_customize_runtime.md similarity index 100% rename from docs_zh_CN/tutorials/7_customize_runtime.md rename to docs/zh_cn/tutorials/7_customize_runtime.md diff --git a/docs_zh_CN/useful_tools.md b/docs/zh_cn/useful_tools.md similarity index 100% rename from docs_zh_CN/useful_tools.md rename to docs/zh_cn/useful_tools.md diff --git a/tools/data/activitynet/README.md b/tools/data/activitynet/README.md index f3286f6fc1..8c36a9eec9 100644 --- a/tools/data/activitynet/README.md +++ b/tools/data/activitynet/README.md @@ -78,7 +78,7 @@ For this case, the downloading scripts update the annotation file after download ### Step 3. Extract RGB and Flow -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). Use following scripts to extract both RGB and Flow. @@ -87,7 +87,7 @@ bash extract_frames.sh ``` The command above can generate images with new short edge 256. If you want to generate images with short edge 320 (320p), or with fix size 340x256, you can change the args `--new-short 256` to `--new-short 320` or `--new-width 340 --new-height 256`. -More details can be found in [data_preparation](/docs/data_preparation.md) +More details can be found in [data_preparation](/docs/en/data_preparation.md) ### Step 4. Generate File List for ActivityNet Finetuning @@ -168,4 +168,4 @@ mmaction2 ``` -For training and evaluating on ActivityNet, please refer to [getting_started.md](/docs/getting_started.md). +For training and evaluating on ActivityNet, please refer to [getting_started.md](/docs/en/getting_started.md). diff --git a/tools/data/activitynet/README_zh-CN.md b/tools/data/activitynet/README_zh-CN.md index 7687b948db..5007d0a2ad 100644 --- a/tools/data/activitynet/README_zh-CN.md +++ b/tools/data/activitynet/README_zh-CN.md @@ -78,7 +78,7 @@ bash download_bsn_videos.sh ### 步骤 3. 抽取 RGB 帧和光流 -在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 可使用以下命令抽取视频帧和光流。 @@ -87,7 +87,7 @@ bash extract_frames.sh ``` 以上脚本将会生成短边 256 分辨率的视频。如果用户想生成短边 320 分辨率的视频(即 320p),或者 340x256 的固定分辨率,用户可以通过改变参数由 `--new-short 256` 至 `--new-short 320`,或者 `--new-width 340 --new-height 256` 进行设置 -更多细节可参考 [数据准备指南](/docs_zh_CN/data_preparation.md) +更多细节可参考 [数据准备指南](/docs/zh_cn/data_preparation.md) ### 步骤 4. 生成用于 ActivityNet 微调的文件列表 @@ -166,4 +166,4 @@ mmaction2 ``` -关于对 ActivityNet 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md). +关于对 ActivityNet 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md). diff --git a/tools/data/ava/README.md b/tools/data/ava/README.md index a416eb2632..4e297716de 100644 --- a/tools/data/ava/README.md +++ b/tools/data/ava/README.md @@ -64,7 +64,7 @@ bash cut_videos.sh ## Step 4. Extract RGB and Flow -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. And you can run the following script to soft link the extracted frames. @@ -141,7 +141,7 @@ mmaction2 | │ │ │ ├── ... ``` -For training and evaluating on AVA, please refer to [getting_started](/docs/getting_started.md). +For training and evaluating on AVA, please refer to [getting_started](/docs/en/getting_started.md). ## Reference diff --git a/tools/data/ava/README_zh-CN.md b/tools/data/ava/README_zh-CN.md index 5a7b96da88..6a922f5e1b 100644 --- a/tools/data/ava/README_zh-CN.md +++ b/tools/data/ava/README_zh-CN.md @@ -56,7 +56,7 @@ bash cut_videos.sh ## 4. 提取 RGB 帧和光流 -在提取之前,请参考 [安装教程](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在提取之前,请参考 [安装教程](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有足够的 SSD 空间,那么建议将视频抽取为 RGB 帧以提升 I/O 性能。用户可以使用以下脚本为抽取得到的帧文件夹建立软连接: @@ -131,4 +131,4 @@ mmaction2 | │ │ │ ├── ... ``` -关于 AVA 数据集上的训练与测试,请参照 [基础教程](/docs_zh_CN/getting_started.md)。 +关于 AVA 数据集上的训练与测试,请参照 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/diving48/README.md b/tools/data/diving48/README.md index 588cddd173..1cbdbcdb27 100644 --- a/tools/data/diving48/README.md +++ b/tools/data/diving48/README.md @@ -39,7 +39,7 @@ This part is **optional** if you only want to use the video loader. The frames provided in official compressed file are not complete. You may need to go through the following extraction steps to get the complete frames. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -120,4 +120,4 @@ mmaction2 │ | | ├── ... ``` -For training and evaluating on Diving48, please refer to [getting_started.md](/docs/getting_started.md). +For training and evaluating on Diving48, please refer to [getting_started.md](/docs/en/getting_started.md). diff --git a/tools/data/diving48/README_zh-CN.md b/tools/data/diving48/README_zh-CN.md index e91f8729a5..3210d06b9d 100644 --- a/tools/data/diving48/README_zh-CN.md +++ b/tools/data/diving48/README_zh-CN.md @@ -39,7 +39,7 @@ bash download_videos.sh 官网提供的帧压缩包并不完整。若想获取完整的数据,可以使用以下步骤解帧。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。 @@ -120,4 +120,4 @@ mmaction2 │ | | ├── ... ``` -关于对 Diving48 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 +关于对 Diving48 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/gym/README.md b/tools/data/gym/README.md index a39eda6fd4..22b09f66f9 100644 --- a/tools/data/gym/README.md +++ b/tools/data/gym/README.md @@ -55,7 +55,7 @@ python trim_subaction.py This part is **optional** if you only want to use the video loader for RGB model training. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). Run the following script to extract both rgb and flow using "tvl1" algorithm. @@ -106,4 +106,4 @@ mmaction2 | | └── subaction_frames ``` -For training and evaluating on GYM, please refer to [getting_started](/docs/getting_started.md). +For training and evaluating on GYM, please refer to [getting_started](/docs/en/getting_started.md). diff --git a/tools/data/gym/README_zh-CN.md b/tools/data/gym/README_zh-CN.md index cb3a796ec7..9fff9dd20a 100644 --- a/tools/data/gym/README_zh-CN.md +++ b/tools/data/gym/README_zh-CN.md @@ -55,7 +55,7 @@ python trim_subaction.py 如果用户仅使用 video loader,则可以跳过本步。 -在提取之前,请参考 [安装教程](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在提取之前,请参考 [安装教程](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 用户可使用如下脚本同时抽取 RGB 帧和光流(提取光流时使用 tvl1 算法): @@ -106,4 +106,4 @@ mmaction2 | | └── subaction_frames ``` -关于 GYM 数据集上的训练与测试,请参照 [基础教程](/docs_zh_CN/getting_started.md)。 +关于 GYM 数据集上的训练与测试,请参照 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/hmdb51/README.md b/tools/data/hmdb51/README.md index 206b548764..f003e58b97 100644 --- a/tools/data/hmdb51/README.md +++ b/tools/data/hmdb51/README.md @@ -41,7 +41,7 @@ bash download_videos.sh This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -122,4 +122,4 @@ mmaction2 ``` -For training and evaluating on HMDB51, please refer to [getting_started.md](/docs/getting_started.md). +For training and evaluating on HMDB51, please refer to [getting_started.md](/docs/en/getting_started.md). diff --git a/tools/data/hmdb51/README_zh-CN.md b/tools/data/hmdb51/README_zh-CN.md index a34c4b9ce9..f82f397b5c 100644 --- a/tools/data/hmdb51/README_zh-CN.md +++ b/tools/data/hmdb51/README_zh-CN.md @@ -39,7 +39,7 @@ bash download_videos.sh 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 上。 用户可使用以下命令为 SSD 建立软链接。 @@ -118,4 +118,4 @@ mmaction2 ``` -关于对 HMDB51 进行训练和验证,可以参照 [基础教程](/docs_zh_CN/getting_started.md)。 +关于对 HMDB51 进行训练和验证,可以参照 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/hvu/README.md b/tools/data/hvu/README.md index 755e71dbb3..f668f52788 100644 --- a/tools/data/hvu/README.md +++ b/tools/data/hvu/README.md @@ -43,7 +43,7 @@ bash download_videos.sh This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). You can use the following script to extract both RGB and Flow frames. @@ -52,7 +52,7 @@ bash extract_frames.sh ``` By default, we generate frames with short edge resized to 256. -More details can be found in [data_preparation](/docs/data_preparation.md) +More details can be found in [data_preparation](/docs/en/data_preparation.md) ## Step 4. Generate File List @@ -120,4 +120,4 @@ mmaction2 ``` -For training and evaluating on HVU, please refer to [getting_started](/docs/getting_started.md). +For training and evaluating on HVU, please refer to [getting_started](/docs/en/getting_started.md). diff --git a/tools/data/hvu/README_zh-CN.md b/tools/data/hvu/README_zh-CN.md index 5b3ffa1ea3..a83f85c571 100644 --- a/tools/data/hvu/README_zh-CN.md +++ b/tools/data/hvu/README_zh-CN.md @@ -43,7 +43,7 @@ bash download_videos.sh 如果用户仅使用 video loader,则可以跳过本步。 -在提取之前,请参考 [安装教程](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在提取之前,请参考 [安装教程](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 用户可使用如下脚本同时抽取 RGB 帧和光流: @@ -51,7 +51,7 @@ bash download_videos.sh bash extract_frames.sh ``` -该脚本默认生成短边长度为 256 的帧,可参考 [数据准备](/docs_zh_CN/data_preparation.md) 获得更多细节。 +该脚本默认生成短边长度为 256 的帧,可参考 [数据准备](/docs/zh_cn/data_preparation.md) 获得更多细节。 ## 4. 生成文件列表 @@ -107,4 +107,4 @@ mmaction2 ``` -关于 HVU 数据集上的训练与测试,请参照 [基础教程](/docs_zh_CN/getting_started.md)。 +关于 HVU 数据集上的训练与测试,请参照 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/jester/README.md b/tools/data/jester/README.md index 2e054ab33d..26161e78bb 100644 --- a/tools/data/jester/README.md +++ b/tools/data/jester/README.md @@ -64,7 +64,7 @@ data = dict( This part is **optional** if you only want to use RGB frames. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -140,4 +140,4 @@ mmaction2 ``` -For training and evaluating on Jester, please refer to [getting_started.md](/docs/getting_started.md). +For training and evaluating on Jester, please refer to [getting_started.md](/docs/en/getting_started.md). diff --git a/tools/data/jester/README_zh-CN.md b/tools/data/jester/README_zh-CN.md index 4b3fb17f0b..86f37badf2 100644 --- a/tools/data/jester/README_zh-CN.md +++ b/tools/data/jester/README_zh-CN.md @@ -64,7 +64,7 @@ data = dict( 如果用户只想使用 RGB 帧训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。 @@ -140,4 +140,4 @@ mmaction2 ``` -关于对 jester 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 +关于对 jester 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/kinetics/README.md b/tools/data/kinetics/README.md index 725190ee41..7351d1b128 100644 --- a/tools/data/kinetics/README.md +++ b/tools/data/kinetics/README.md @@ -72,7 +72,7 @@ You can also download from [Academic Torrents](https://academictorrents.com/) ([ This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. And you can run the following script to soft link the extracted frames. @@ -103,7 +103,7 @@ bash extract_frames.sh ${DATASET} ``` The commands above can generate images with new short edge 256. If you want to generate images with short edge 320 (320p), or with fix size 340x256, you can change the args `--new-short 256` to `--new-short 320` or `--new-width 340 --new-height 256`. -More details can be found in [data_preparation](/docs/data_preparation.md) +More details can be found in [data_preparation](/docs/en/data_preparation.md) ## Step 4. Generate File List @@ -147,4 +147,4 @@ mmaction2 ``` -For training and evaluating on Kinetics, please refer to [getting_started](/docs/getting_started.md). +For training and evaluating on Kinetics, please refer to [getting_started](/docs/en/getting_started.md). diff --git a/tools/data/kinetics/README_zh-CN.md b/tools/data/kinetics/README_zh-CN.md index ef49ba8e8a..1fa8741e22 100644 --- a/tools/data/kinetics/README_zh-CN.md +++ b/tools/data/kinetics/README_zh-CN.md @@ -66,7 +66,7 @@ python ../resize_videos.py ../../../data/${DATASET}/videos_train/ ../../../data/ 如果用户仅使用 video loader,则可以跳过本步。 -在提取之前,请参考 [安装教程](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在提取之前,请参考 [安装教程](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有足够的 SSD 空间,那么建议将视频抽取为 RGB 帧以提升 I/O 性能。用户可以使用以下脚本为抽取得到的帧文件夹建立软连接: @@ -97,7 +97,7 @@ bash extract_frames.sh ${DATASET} ``` 以上的命令生成短边长度为 256 的 RGB 帧和光流帧。如果用户需要生成短边长度为 320 的帧 (320p),或是固定分辨率为 340 x 256 的帧,可改变参数 `--new-short 256` 为 `--new-short 320` 或 `--new-width 340 --new-height 256`。 -更多细节可以参考 [数据准备](/docs_zh_CN/data_preparation.md)。 +更多细节可以参考 [数据准备](/docs/zh_cn/data_preparation.md)。 ## 4. 生成文件列表 @@ -139,4 +139,4 @@ mmaction2 ``` -关于 Kinetics 数据集上的训练与测试,请参照 [基础教程](/docs_zh_CN/getting_started.md)。 +关于 Kinetics 数据集上的训练与测试,请参照 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/mit/README.md b/tools/data/mit/README.md index e67ca45335..6e4ef0d37d 100644 --- a/tools/data/mit/README.md +++ b/tools/data/mit/README.md @@ -34,7 +34,7 @@ python ../resize_videos.py ../../../data/mit/videos/ ../../../data/mit/videos_25 This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. And you can run the following script to soft link the extracted frames. @@ -125,4 +125,4 @@ mmaction2 ``` -For training and evaluating on Moments in Time, please refer to [getting_started.md](/docs/getting_started.md). +For training and evaluating on Moments in Time, please refer to [getting_started.md](/docs/en/getting_started.md). diff --git a/tools/data/mit/README_zh-CN.md b/tools/data/mit/README_zh-CN.md index 74a3d0c247..21289e34e1 100644 --- a/tools/data/mit/README_zh-CN.md +++ b/tools/data/mit/README_zh-CN.md @@ -36,7 +36,7 @@ python ../resize_videos.py ../../../data/mit/videos/ ../../../data/mit/videos_25 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 上。 用户可使用以下命令为 SSD 建立软链接。 @@ -127,4 +127,4 @@ mmaction2 ``` -关于对 Moments in Times 进行训练和验证,可以参照 [基础教程](/docs_zh_CN/getting_started.md)。 +关于对 Moments in Times 进行训练和验证,可以参照 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/mmit/README.md b/tools/data/mmit/README.md index 5deedf71d0..3f6b618977 100644 --- a/tools/data/mmit/README.md +++ b/tools/data/mmit/README.md @@ -32,7 +32,7 @@ python ../resize_videos.py ../../../data/mmit/videos/ ../../../data/mmit/videos_ This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). First, you can run the following script to soft link SSD. @@ -110,4 +110,4 @@ mmaction2/ └── ... ``` -For training and evaluating on Multi-Moments in Time, please refer to [getting_started.md](/docs/getting_started.md). +For training and evaluating on Multi-Moments in Time, please refer to [getting_started.md](/docs/en/getting_started.md). diff --git a/tools/data/mmit/README_zh-CN.md b/tools/data/mmit/README_zh-CN.md index e070505e34..31d5cddcde 100644 --- a/tools/data/mmit/README_zh-CN.md +++ b/tools/data/mmit/README_zh-CN.md @@ -34,7 +34,7 @@ python ../resize_videos.py ../../../data/mmit/videos/ ../../../data/mmit/videos_ 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 上。 用户可使用以下命令为 SSD 建立软链接。 @@ -112,4 +112,4 @@ mmaction2/ └── ... ``` -关于对 Multi-Moments in Time 进行训练和验证,可以参照 [基础教程](/docs_zh_CN/getting_started.md)。 +关于对 Multi-Moments in Time 进行训练和验证,可以参照 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/sthv1/README.md b/tools/data/sthv1/README.md index 75f4c11134..eb837d435e 100644 --- a/tools/data/sthv1/README.md +++ b/tools/data/sthv1/README.md @@ -65,7 +65,7 @@ data = dict( This part is **optional** if you only want to use RGB frames. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -141,4 +141,4 @@ mmaction2 ``` -For training and evaluating on Something-Something V1, please refer to [getting_started.md](/docs/getting_started.md). +For training and evaluating on Something-Something V1, please refer to [getting_started.md](/docs/en/getting_started.md). diff --git a/tools/data/sthv1/README_zh-CN.md b/tools/data/sthv1/README_zh-CN.md index 11cc9318be..7506b4ad5c 100644 --- a/tools/data/sthv1/README_zh-CN.md +++ b/tools/data/sthv1/README_zh-CN.md @@ -63,7 +63,7 @@ data = dict( 如果用户只想使用原 RGB 帧加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。 @@ -139,4 +139,4 @@ mmaction2 ``` -关于对 Something-Something V1 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 +关于对 Something-Something V1 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/sthv2/README.md b/tools/data/sthv2/README.md index af112872da..ea4c66e270 100644 --- a/tools/data/sthv2/README.md +++ b/tools/data/sthv2/README.md @@ -36,7 +36,7 @@ cd $MMACTION2/tools/data/sthv2/ This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -115,4 +115,4 @@ mmaction2 ``` -For training and evaluating on Something-Something V2, please refer to [getting_started.md](/docs/getting_started.md). +For training and evaluating on Something-Something V2, please refer to [getting_started.md](/docs/en/getting_started.md). diff --git a/tools/data/sthv2/README_zh-CN.md b/tools/data/sthv2/README_zh-CN.md index 7d8080c5a4..87cd3558f6 100644 --- a/tools/data/sthv2/README_zh-CN.md +++ b/tools/data/sthv2/README_zh-CN.md @@ -36,7 +36,7 @@ cd $MMACTION2/tools/data/sthv2/ 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。 @@ -115,4 +115,4 @@ mmaction2 ``` -关于对 Something-Something V2 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 +关于对 Something-Something V2 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/thumos14/README.md b/tools/data/thumos14/README.md index eaddb60cbe..8b52284951 100644 --- a/tools/data/thumos14/README.md +++ b/tools/data/thumos14/README.md @@ -40,7 +40,7 @@ bash download_videos.sh This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -139,4 +139,4 @@ mmaction2 │ │ │ | ├── video_test_0000001 ``` -For training and evaluating on THUMOS'14, please refer to [getting_started.md](/docs/getting_started.md). +For training and evaluating on THUMOS'14, please refer to [getting_started.md](/docs/en/getting_started.md). diff --git a/tools/data/thumos14/README_zh-CN.md b/tools/data/thumos14/README_zh-CN.md index fb7140a24e..05bd862316 100644 --- a/tools/data/thumos14/README_zh-CN.md +++ b/tools/data/thumos14/README_zh-CN.md @@ -40,7 +40,7 @@ bash download_videos.sh 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 上。 用户可使用以下命令为 SSD 建立软链接。 @@ -136,4 +136,4 @@ mmaction2 │ │ │ | ├── video_test_0000001 ``` -关于对 THUMOS'14 进行训练和验证,可以参照 [基础教程](/docs_zh_CN/getting_started.md)。 +关于对 THUMOS'14 进行训练和验证,可以参照 [基础教程](/docs/zh_cn/getting_started.md)。 diff --git a/tools/data/ucf101/README.md b/tools/data/ucf101/README.md index abac25f0c7..4d71c1e9f8 100644 --- a/tools/data/ucf101/README.md +++ b/tools/data/ucf101/README.md @@ -43,7 +43,7 @@ python ../resize_videos.py ../../../data/ucf101/videos/ ../../../data/ucf101/vid This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. The extracted frames (RGB + Flow) will take up about 100GB. @@ -124,4 +124,4 @@ mmaction2 ``` -For training and evaluating on UCF-101, please refer to [getting_started.md](/docs/getting_started.md). +For training and evaluating on UCF-101, please refer to [getting_started.md](/docs/en/getting_started.md). diff --git a/tools/data/ucf101/README_zh-CN.md b/tools/data/ucf101/README_zh-CN.md index 96e9453ff4..28c696a059 100644 --- a/tools/data/ucf101/README_zh-CN.md +++ b/tools/data/ucf101/README_zh-CN.md @@ -41,7 +41,7 @@ python ../resize_videos.py ../../../data/ucf101/videos/ ../../../data/ucf101/vid 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。所抽取的视频帧和光流约占据 100 GB 的存储空间。 @@ -122,4 +122,4 @@ mmaction2 ``` -关于对 UCF-101 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 +关于对 UCF-101 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md)。 From 6fb9f47420c4f2011d2b14929cd269728fcc2b81 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Wed, 22 Dec 2021 15:53:52 +0800 Subject: [PATCH 319/414] [Feature] Add copyright checkhook in pre-commit-config.yaml (#1344) * master * master 0721 * add README * check precommit * check precommit --- .pre-commit-config.yaml | 6 ++++++ configs/recognition_audio/resnet/README.md | 14 +++++++++++++- mmaction/models/backbones/agcn.py | 1 + mmaction/models/backbones/stgcn.py | 1 + mmaction/models/heads/stgcn_head.py | 1 + mmaction/models/skeleton_gcn/__init__.py | 1 + mmaction/models/skeleton_gcn/base.py | 1 + mmaction/models/skeleton_gcn/skeletongcn.py | 1 + mmaction/models/skeleton_gcn/utils/__init__.py | 1 + mmaction/models/skeleton_gcn/utils/graph.py | 1 + .../test_recognizers/test_skeletongcn.py | 1 + tools/data/skeleton/babel2mma2.py | 1 + tools/data/skeleton/gen_ntu_rgbd_raw.py | 1 + tools/deployment/mmaction2torchserve.py | 1 + tools/deployment/mmaction_handler.py | 1 + 15 files changed, 32 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 655a926490..f8608ff032 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -44,3 +44,9 @@ repos: hooks: - id: codespell args: ["--skip", "*.ipynb,tools/data/hvu/label_map.json", "-L", "te,nd,thre,Gool,gool"] + - repo: https://github.com/open-mmlab/pre-commit-hooks + rev: v0.1.0 # Use the ref you want to point at + hooks: + - id: check-algo-readme + - id: check-copyright + args: ["mmaction", "tools", "tests"] # these directories will be checked diff --git a/configs/recognition_audio/resnet/README.md b/configs/recognition_audio/resnet/README.md index 6ebdc8cc72..1a5b718490 100644 --- a/configs/recognition_audio/resnet/README.md +++ b/configs/recognition_audio/resnet/README.md @@ -1,6 +1,18 @@ # ResNet for Audio -## Introduction +## Abstract + + + +We present Audiovisual SlowFast Networks, an archi- +tecture for integrated audiovisual perception. AVSlowFast has Slow and Fast visual pathways that are deeply inte- grated with a Faster Audio pathway to model vision and sound in a unified representation. We fuse audio and vi- sual features at multiple layers, enabling audio to con- tribute to the formation of hierarchical audiovisual con- cepts. To overcome training difficulties that arise from dif- ferent learning dynamics for audio and visual modalities, we introduce DropPathway, which randomly drops the Au- dio pathway during training as an effective regularization technique. Inspired by prior studies in neuroscience, we perform hierarchical audiovisual synchronization to learn joint audiovisual features. We report state-of-the-art results on six video action classification and detection datasets, perform detailed ablation studies, and show the gener- alization of AVSlowFast to learn self-supervised audiovi- sual features. Code will be made available at: https: //github.com/facebookresearch/SlowFast. + + +
    + +
    + +## Citation diff --git a/mmaction/models/backbones/agcn.py b/mmaction/models/backbones/agcn.py index 689e15f588..7c4127c14e 100644 --- a/mmaction/models/backbones/agcn.py +++ b/mmaction/models/backbones/agcn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import math import torch diff --git a/mmaction/models/backbones/stgcn.py b/mmaction/models/backbones/stgcn.py index f360d95db6..99ab938b08 100644 --- a/mmaction/models/backbones/stgcn.py +++ b/mmaction/models/backbones/stgcn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch import torch.nn as nn from mmcv.cnn import constant_init, kaiming_init, normal_init diff --git a/mmaction/models/heads/stgcn_head.py b/mmaction/models/heads/stgcn_head.py index 74b952cb6f..065552cf29 100644 --- a/mmaction/models/heads/stgcn_head.py +++ b/mmaction/models/heads/stgcn_head.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import torch.nn as nn from mmcv.cnn import normal_init diff --git a/mmaction/models/skeleton_gcn/__init__.py b/mmaction/models/skeleton_gcn/__init__.py index b57750f018..914fd3ec1e 100644 --- a/mmaction/models/skeleton_gcn/__init__.py +++ b/mmaction/models/skeleton_gcn/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .base import BaseGCN from .skeletongcn import SkeletonGCN diff --git a/mmaction/models/skeleton_gcn/base.py b/mmaction/models/skeleton_gcn/base.py index 6a9d1bcaa4..656266a4f5 100644 --- a/mmaction/models/skeleton_gcn/base.py +++ b/mmaction/models/skeleton_gcn/base.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from abc import ABCMeta, abstractmethod from collections import OrderedDict diff --git a/mmaction/models/skeleton_gcn/skeletongcn.py b/mmaction/models/skeleton_gcn/skeletongcn.py index ffd14d9f47..0576ee20a3 100644 --- a/mmaction/models/skeleton_gcn/skeletongcn.py +++ b/mmaction/models/skeleton_gcn/skeletongcn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from ..builder import RECOGNIZERS from .base import BaseGCN diff --git a/mmaction/models/skeleton_gcn/utils/__init__.py b/mmaction/models/skeleton_gcn/utils/__init__.py index b60b3a16ee..6c0b7c0529 100644 --- a/mmaction/models/skeleton_gcn/utils/__init__.py +++ b/mmaction/models/skeleton_gcn/utils/__init__.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. from .graph import Graph __all__ = ['Graph'] diff --git a/mmaction/models/skeleton_gcn/utils/graph.py b/mmaction/models/skeleton_gcn/utils/graph.py index d746ba34a5..9b7a54b2f4 100644 --- a/mmaction/models/skeleton_gcn/utils/graph.py +++ b/mmaction/models/skeleton_gcn/utils/graph.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import numpy as np diff --git a/tests/test_models/test_recognizers/test_skeletongcn.py b/tests/test_models/test_recognizers/test_skeletongcn.py index 4f416d3283..063a090214 100644 --- a/tests/test_models/test_recognizers/test_skeletongcn.py +++ b/tests/test_models/test_recognizers/test_skeletongcn.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import pytest import torch diff --git a/tools/data/skeleton/babel2mma2.py b/tools/data/skeleton/babel2mma2.py index 59e59324b2..3dedc1b31e 100644 --- a/tools/data/skeleton/babel2mma2.py +++ b/tools/data/skeleton/babel2mma2.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. # In this example, we convert babel120_train to MMAction2 format # The required files can be downloaded from the homepage of BABEL project import numpy as np diff --git a/tools/data/skeleton/gen_ntu_rgbd_raw.py b/tools/data/skeleton/gen_ntu_rgbd_raw.py index 3e484602bf..5ca73bf8f1 100644 --- a/tools/data/skeleton/gen_ntu_rgbd_raw.py +++ b/tools/data/skeleton/gen_ntu_rgbd_raw.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import argparse import math import os diff --git a/tools/deployment/mmaction2torchserve.py b/tools/deployment/mmaction2torchserve.py index 91a52aa894..d491ac7b36 100644 --- a/tools/deployment/mmaction2torchserve.py +++ b/tools/deployment/mmaction2torchserve.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import shutil from argparse import ArgumentParser, Namespace from pathlib import Path diff --git a/tools/deployment/mmaction_handler.py b/tools/deployment/mmaction_handler.py index f62a270e15..10626d15c3 100644 --- a/tools/deployment/mmaction_handler.py +++ b/tools/deployment/mmaction_handler.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. import base64 import os import os.path as osp From 6f98109b682c5689dc0548a98b89ed1c3b956ddb Mon Sep 17 00:00:00 2001 From: jenhaoyang Date: Fri, 24 Dec 2021 11:11:13 +0800 Subject: [PATCH 320/414] [Fix] Fix missing skip_postproc parameter (#1347) Fix missing skip_postproc parameter --- tools/data/skeleton/ntu_pose_extraction.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/data/skeleton/ntu_pose_extraction.py b/tools/data/skeleton/ntu_pose_extraction.py index 5e25991d33..42556bfdc8 100644 --- a/tools/data/skeleton/ntu_pose_extraction.py +++ b/tools/data/skeleton/ntu_pose_extraction.py @@ -337,5 +337,6 @@ def parse_args(): args.device = global_args.device args.video = global_args.video args.output = global_args.output + args.skip_postproc = global_args.skip_postproc anno = ntu_pose_extraction(args.video, args.skip_postproc) mmcv.dump(anno, args.output) From b347cc7eb497c68667c3922fcf69d27484abe3f1 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 28 Dec 2021 12:14:32 +0800 Subject: [PATCH 321/414] Update ava_utils.py --- mmaction/core/evaluation/ava_utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mmaction/core/evaluation/ava_utils.py b/mmaction/core/evaluation/ava_utils.py index eac227ce5b..7f6571d478 100644 --- a/mmaction/core/evaluation/ava_utils.py +++ b/mmaction/core/evaluation/ava_utils.py @@ -1,4 +1,6 @@ -# Copyright (c) OpenMMLab. All rights reserved. +# This piece of code is directly adapted from ActivityNet official repo +# https://github.com/activitynet/ActivityNet/blob/master/ +# Evaluation/get_ava_performance.py. Some unused codes are removed. import csv import logging import time From f0f3c2c3c3cf29268fbb64af0e02d22a7ee5b75f Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 28 Dec 2021 14:31:24 +0800 Subject: [PATCH 322/414] [Fix] Use latest youtube-dl (#1357) --- tools/data/activitynet/environment.yml | 2 +- tools/data/gym/environment.yml | 2 +- tools/data/hvu/environment.yml | 2 +- tools/data/kinetics/environment.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tools/data/activitynet/environment.yml b/tools/data/activitynet/environment.yml index fe4c3ffb57..f4e6d51fe8 100644 --- a/tools/data/activitynet/environment.yml +++ b/tools/data/activitynet/environment.yml @@ -33,4 +33,4 @@ dependencies: - python-dateutil==2.8.1 - pytz==2020.1 - six==1.14.0 - - youtube-dl==2020.5.8 + - youtube-dl diff --git a/tools/data/gym/environment.yml b/tools/data/gym/environment.yml index b9ecc82678..88d8998513 100644 --- a/tools/data/gym/environment.yml +++ b/tools/data/gym/environment.yml @@ -33,4 +33,4 @@ dependencies: - python-dateutil==2.8.1 - pytz==2020.1 - six==1.14.0 - - youtube-dl==2020.5.8 + - youtube-dl diff --git a/tools/data/hvu/environment.yml b/tools/data/hvu/environment.yml index 86e7e1a24c..bcee98f877 100644 --- a/tools/data/hvu/environment.yml +++ b/tools/data/hvu/environment.yml @@ -33,4 +33,4 @@ dependencies: - python-dateutil==2.8.1 - pytz==2020.1 - six==1.14.0 - - youtube-dl==2020.5.8 + - youtube-dl diff --git a/tools/data/kinetics/environment.yml b/tools/data/kinetics/environment.yml index 86e7e1a24c..bcee98f877 100644 --- a/tools/data/kinetics/environment.yml +++ b/tools/data/kinetics/environment.yml @@ -33,4 +33,4 @@ dependencies: - python-dateutil==2.8.1 - pytz==2020.1 - six==1.14.0 - - youtube-dl==2020.5.8 + - youtube-dl From ed53d94336bedbb385445d59bba31121951c5c37 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 28 Dec 2021 15:22:59 +0800 Subject: [PATCH 323/414] [Docs] Docs revert (#1359) * master * master 0721 * add README * Revert "[Docs] Merge docs & docs_zh (#1342)" This reverts commit 364b54d024a87ce1aa599aeccdbe83a7e2653fbe. --- .github/workflows/build.yml | 4 +-- .gitignore | 3 +- README.md | 30 +++++++++--------- README_zh-CN.md | 28 ++++++++-------- configs/detection/acrn/README.md | 6 ++-- configs/detection/acrn/README_zh-CN.md | 6 ++-- configs/detection/ava/README.md | 6 ++-- configs/detection/ava/README_zh-CN.md | 6 ++-- configs/detection/lfb/README.md | 4 +-- configs/detection/lfb/README_zh-CN.md | 4 +-- configs/localization/bmn/README.md | 6 ++-- configs/localization/bmn/README_zh-CN.md | 6 ++-- configs/localization/bsn/README.md | 6 ++-- configs/localization/bsn/README_zh-CN.md | 6 ++-- configs/localization/ssn/README.md | 6 ++-- configs/localization/ssn/README_zh-CN.md | 6 ++-- configs/recognition/c3d/README.md | 6 ++-- configs/recognition/c3d/README_zh-CN.md | 6 ++-- configs/recognition/csn/README.md | 6 ++-- configs/recognition/csn/README_zh-CN.md | 6 ++-- configs/recognition/i3d/README.md | 6 ++-- configs/recognition/i3d/README_zh-CN.md | 6 ++-- configs/recognition/r2plus1d/README.md | 6 ++-- configs/recognition/r2plus1d/README_zh-CN.md | 6 ++-- configs/recognition/slowfast/README.md | 6 ++-- configs/recognition/slowfast/README_zh-CN.md | 6 ++-- configs/recognition/slowonly/README.md | 6 ++-- configs/recognition/slowonly/README_zh-CN.md | 6 ++-- configs/recognition/tanet/README.md | 6 ++-- configs/recognition/tanet/README_zh-CN.md | 6 ++-- configs/recognition/timesformer/README.md | 6 ++-- .../recognition/timesformer/README_zh-CN.md | 6 ++-- configs/recognition/tin/README.md | 6 ++-- configs/recognition/tin/README_zh-CN.md | 6 ++-- configs/recognition/tpn/README.md | 6 ++-- configs/recognition/tpn/README_zh-CN.md | 4 +-- configs/recognition/trn/README.md | 4 +-- configs/recognition/trn/README_zh-CN.md | 4 +-- configs/recognition/tsm/README.md | 6 ++-- configs/recognition/tsm/README_zh-CN.md | 6 ++-- configs/recognition/tsn/README.md | 4 +-- configs/recognition/tsn/README_zh-CN.md | 4 +-- configs/recognition/x3d/README.md | 4 +-- configs/recognition/x3d/README_zh-CN.md | 4 +-- configs/recognition_audio/resnet/README.md | 6 ++-- .../recognition_audio/resnet/README_zh-CN.md | 6 ++-- configs/skeleton/2s-agcn/README.md | 4 +-- configs/skeleton/2s-agcn/README_zh-CN.md | 4 +-- configs/skeleton/posec3d/README.md | 4 +-- configs/skeleton/posec3d/README_zh-CN.md | 4 +-- configs/skeleton/stgcn/README.md | 4 +-- configs/skeleton/stgcn/README_zh-CN.md | 4 +-- demo/mmaction2_tutorial.ipynb | 2 +- demo/mmaction2_tutorial_zh-CN.ipynb | 2 +- docs/{en => }/Makefile | 0 docs/{en => }/_static/css/readthedocs.css | 0 docs/{en => }/_static/images/mmaction2.png | Bin docs/{en => }/api.rst | 0 docs/{en => }/benchmark.md | 0 docs/{en => }/changelog.md | 0 docs/{en => }/conf.py | 4 +-- docs/{en => }/data_preparation.md | 0 docs/{en => }/faq.md | 6 ++-- docs/{en => }/feature_extraction.md | 0 docs/{en => }/getting_started.md | 0 docs/{en => }/index.rst | 0 docs/{en => }/install.md | 0 docs/{en => }/make.bat | 0 docs/{en => }/merge_docs.sh | 0 docs/{en => }/projects.md | 0 docs/{en => }/stat.py | 0 docs/{en => }/supported_datasets.md | 0 docs/{en => }/switch_language.md | 0 docs/{en => }/tutorials/1_config.md | 0 docs/{en => }/tutorials/2_finetune.md | 2 +- docs/{en => }/tutorials/3_new_dataset.md | 0 docs/{en => }/tutorials/4_data_pipeline.md | 0 docs/{en => }/tutorials/5_new_modules.md | 0 docs/{en => }/tutorials/6_export_model.md | 0 .../{en => }/tutorials/7_customize_runtime.md | 0 docs/{en => }/useful_tools.md | 0 {docs/zh_cn => docs_zh_CN}/Makefile | 0 {docs/zh_cn => docs_zh_CN}/README.md | 0 {docs/zh_cn => docs_zh_CN}/api.rst | 0 {docs/zh_cn => docs_zh_CN}/benchmark.md | 0 {docs/zh_cn => docs_zh_CN}/conf.py | 4 +-- .../zh_cn => docs_zh_CN}/data_preparation.md | 0 {docs/zh_cn => docs_zh_CN}/demo.md | 0 {docs/zh_cn => docs_zh_CN}/faq.md | 4 +-- .../feature_extraction.md | 0 {docs/zh_cn => docs_zh_CN}/getting_started.md | 0 {docs/zh_cn => docs_zh_CN}/index.rst | 0 {docs/zh_cn => docs_zh_CN}/install.md | 0 {docs/zh_cn => docs_zh_CN}/make.bat | 0 {docs/zh_cn => docs_zh_CN}/merge_docs.sh | 14 ++++---- {docs/zh_cn => docs_zh_CN}/stat.py | 0 .../supported_datasets.md | 0 {docs/zh_cn => docs_zh_CN}/switch_language.md | 0 .../tutorials/1_config.md | 0 .../tutorials/2_finetune.md | 0 .../tutorials/3_new_dataset.md | 0 .../tutorials/4_data_pipeline.md | 0 .../tutorials/5_new_modules.md | 0 .../tutorials/6_export_model.md | 0 .../tutorials/7_customize_runtime.md | 0 {docs/zh_cn => docs_zh_CN}/useful_tools.md | 0 tools/data/activitynet/README.md | 6 ++-- tools/data/activitynet/README_zh-CN.md | 6 ++-- tools/data/ava/README.md | 4 +-- tools/data/ava/README_zh-CN.md | 4 +-- tools/data/diving48/README.md | 4 +-- tools/data/diving48/README_zh-CN.md | 4 +-- tools/data/gym/README.md | 4 +-- tools/data/gym/README_zh-CN.md | 4 +-- tools/data/hmdb51/README.md | 4 +-- tools/data/hmdb51/README_zh-CN.md | 4 +-- tools/data/hvu/README.md | 6 ++-- tools/data/hvu/README_zh-CN.md | 6 ++-- tools/data/jester/README.md | 4 +-- tools/data/jester/README_zh-CN.md | 4 +-- tools/data/kinetics/README.md | 6 ++-- tools/data/kinetics/README_zh-CN.md | 6 ++-- tools/data/mit/README.md | 4 +-- tools/data/mit/README_zh-CN.md | 4 +-- tools/data/mmit/README.md | 4 +-- tools/data/mmit/README_zh-CN.md | 4 +-- tools/data/sthv1/README.md | 4 +-- tools/data/sthv1/README_zh-CN.md | 4 +-- tools/data/sthv2/README.md | 4 +-- tools/data/sthv2/README_zh-CN.md | 4 +-- tools/data/thumos14/README.md | 4 +-- tools/data/thumos14/README_zh-CN.md | 4 +-- tools/data/ucf101/README.md | 4 +-- tools/data/ucf101/README_zh-CN.md | 4 +-- 134 files changed, 242 insertions(+), 243 deletions(-) rename docs/{en => }/Makefile (100%) rename docs/{en => }/_static/css/readthedocs.css (100%) rename docs/{en => }/_static/images/mmaction2.png (100%) rename docs/{en => }/api.rst (100%) rename docs/{en => }/benchmark.md (100%) rename docs/{en => }/changelog.md (100%) rename docs/{en => }/conf.py (97%) rename docs/{en => }/data_preparation.md (100%) rename docs/{en => }/faq.md (98%) rename docs/{en => }/feature_extraction.md (100%) rename docs/{en => }/getting_started.md (100%) rename docs/{en => }/index.rst (100%) rename docs/{en => }/install.md (100%) rename docs/{en => }/make.bat (100%) rename docs/{en => }/merge_docs.sh (100%) rename docs/{en => }/projects.md (100%) rename docs/{en => }/stat.py (100%) rename docs/{en => }/supported_datasets.md (100%) rename docs/{en => }/switch_language.md (100%) rename docs/{en => }/tutorials/1_config.md (100%) rename docs/{en => }/tutorials/2_finetune.md (96%) rename docs/{en => }/tutorials/3_new_dataset.md (100%) rename docs/{en => }/tutorials/4_data_pipeline.md (100%) rename docs/{en => }/tutorials/5_new_modules.md (100%) rename docs/{en => }/tutorials/6_export_model.md (100%) rename docs/{en => }/tutorials/7_customize_runtime.md (100%) rename docs/{en => }/useful_tools.md (100%) rename {docs/zh_cn => docs_zh_CN}/Makefile (100%) rename {docs/zh_cn => docs_zh_CN}/README.md (100%) rename {docs/zh_cn => docs_zh_CN}/api.rst (100%) rename {docs/zh_cn => docs_zh_CN}/benchmark.md (100%) rename {docs/zh_cn => docs_zh_CN}/conf.py (97%) rename {docs/zh_cn => docs_zh_CN}/data_preparation.md (100%) rename {docs/zh_cn => docs_zh_CN}/demo.md (100%) rename {docs/zh_cn => docs_zh_CN}/faq.md (98%) rename {docs/zh_cn => docs_zh_CN}/feature_extraction.md (100%) rename {docs/zh_cn => docs_zh_CN}/getting_started.md (100%) rename {docs/zh_cn => docs_zh_CN}/index.rst (100%) rename {docs/zh_cn => docs_zh_CN}/install.md (100%) rename {docs/zh_cn => docs_zh_CN}/make.bat (100%) rename {docs/zh_cn => docs_zh_CN}/merge_docs.sh (89%) rename {docs/zh_cn => docs_zh_CN}/stat.py (100%) rename {docs/zh_cn => docs_zh_CN}/supported_datasets.md (100%) rename {docs/zh_cn => docs_zh_CN}/switch_language.md (100%) rename {docs/zh_cn => docs_zh_CN}/tutorials/1_config.md (100%) rename {docs/zh_cn => docs_zh_CN}/tutorials/2_finetune.md (100%) rename {docs/zh_cn => docs_zh_CN}/tutorials/3_new_dataset.md (100%) rename {docs/zh_cn => docs_zh_CN}/tutorials/4_data_pipeline.md (100%) rename {docs/zh_cn => docs_zh_CN}/tutorials/5_new_modules.md (100%) rename {docs/zh_cn => docs_zh_CN}/tutorials/6_export_model.md (100%) rename {docs/zh_cn => docs_zh_CN}/tutorials/7_customize_runtime.md (100%) rename {docs/zh_cn => docs_zh_CN}/useful_tools.md (100%) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 11aa8b38db..e9db239da8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,8 +12,8 @@ on: - '!demo/**' - '!docker/**' - '!tools/**' - - '!docs/en/**' - - '!docs/zh_cn/**' + - '!docs/**' + - '!docs_zh_CN/**' concurrency: group: ${{ github.workflow }}-${{ github.ref }} diff --git a/.gitignore b/.gitignore index 68cb7f5941..587b296482 100644 --- a/.gitignore +++ b/.gitignore @@ -65,8 +65,7 @@ instance/ .scrapy # Sphinx documentation -docs/en/_build/ -docs/zh_cn/_build/ +docs/_build/ # PyBuilder target/ diff --git a/README.md b/README.md index 8991f5b018..eeee2b9bc1 100644 --- a/README.md +++ b/README.md @@ -51,24 +51,24 @@ The master branch works with **PyTorch 1.3+**. - (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! - (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. -**Release**: v0.20.0 was released in 30/10/2021. Please refer to [changelog.md](docs/en/changelog.md) for details and release history. +**Release**: v0.20.0 was released in 30/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Installation -Please refer to [install.md](docs/en/install.md) for installation. +Please refer to [install.md](docs/install.md) for installation. ## Get Started -Please see [getting_started.md](docs/en/getting_started.md) for the basic usage of MMAction2. +Please see [getting_started.md](docs/getting_started.md) for the basic usage of MMAction2. There are also tutorials: -- [learn about configs](docs/en/tutorials/1_config.md) -- [finetuning models](docs/en/tutorials/2_finetune.md) -- [adding new dataset](docs/en/tutorials/3_new_dataset.md) -- [designing data pipeline](docs/en/tutorials/4_data_pipeline.md) -- [adding new modules](docs/en/tutorials/5_new_modules.md) -- [exporting model to onnx](docs/en/tutorials/6_export_model.md) -- [customizing runtime settings](docs/en/tutorials/7_customize_runtime.md) +- [learn about configs](docs/tutorials/1_config.md) +- [finetuning models](docs/tutorials/2_finetune.md) +- [adding new dataset](docs/tutorials/3_new_dataset.md) +- [designing data pipeline](docs/tutorials/4_data_pipeline.md) +- [adding new modules](docs/tutorials/5_new_modules.md) +- [exporting model to onnx](docs/tutorials/6_export_model.md) +- [customizing runtime settings](docs/tutorials/7_customize_runtime.md) A Colab tutorial is also provided. You may preview the notebook [here](demo/mmaction2_tutorial.ipynb) or directly [run](https://colab.research.google.com/github/open-mmlab/mmaction2/blob/master/demo/mmaction2_tutorial.ipynb) on Colab. @@ -207,16 +207,16 @@ Datasets marked with * are not fully supported yet, but related dataset preparat ## Benchmark -To demonstrate the efficacy and efficiency of our framework, we compare MMAction2 with some other popular frameworks and official releases in terms of speed. Details can be found in [benchmark](docs/en/benchmark.md). +To demonstrate the efficacy and efficiency of our framework, we compare MMAction2 with some other popular frameworks and official releases in terms of speed. Details can be found in [benchmark](docs/benchmark.md). ## Data Preparation -Please refer to [data_preparation.md](docs/en/data_preparation.md) for a general knowledge of data preparation. -The supported datasets are listed in [supported_datasets.md](docs/en/supported_datasets.md) +Please refer to [data_preparation.md](docs/data_preparation.md) for a general knowledge of data preparation. +The supported datasets are listed in [supported_datasets.md](docs/supported_datasets.md) ## FAQ -Please refer to [FAQ](docs/en/faq.md) for frequently asked questions. +Please refer to [FAQ](docs/faq.md) for frequently asked questions. ## Projects built on MMAction2 @@ -226,7 +226,7 @@ Currently, there are many research works and projects built on MMAction2 by user - Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 **Oral**. [[paper]](https://arxiv.org/abs/2107.10161)[[github]](https://github.com/Cogito2012/DEAR) - Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 **Oral**. [[paper]](https://arxiv.org/abs/2103.17263)[[github]](https://github.com/xvjiarui/VFS) -etc., check [projects.md](docs/en/projects.md) to see all related projects. +etc., check [projects.md](docs/projects.md) to see all related projects. ## License diff --git a/README_zh-CN.md b/README_zh-CN.md index bf0a554a95..2a7690e897 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -50,23 +50,23 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa - (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! - (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 -v0.20.0 版本已于 2021 年 10 月 30 日发布,可通过查阅 [更新日志](/docs/en/changelog.md) 了解更多细节以及发布历史 +v0.20.0 版本已于 2021 年 10 月 30 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 安装 -请参考 [安装指南](/docs/zh_cn/install.md) 进行安装 +请参考 [安装指南](/docs_zh_CN/install.md) 进行安装 ## 教程 -请参考 [基础教程](/docs/zh_cn/getting_started.md) 了解 MMAction2 的基本使用。MMAction2也提供了其他更详细的教程: +请参考 [基础教程](/docs_zh_CN/getting_started.md) 了解 MMAction2 的基本使用。MMAction2也提供了其他更详细的教程: -- [如何编写配置文件](/docs/zh_cn/tutorials/1_config.md) -- [如何微调模型](/docs/zh_cn/tutorials/2_finetune.md) -- [如何增加新数据集](/docs/zh_cn/tutorials/3_new_dataset.md) -- [如何设计数据处理流程](/docs/zh_cn/tutorials/4_data_pipeline.md) -- [如何增加新模块](/docs/zh_cn/tutorials/5_new_modules.md) -- [如何导出模型为 onnx 格式](/docs/zh_cn/tutorials/6_export_model.md) -- [如何自定义模型运行参数](/docs/zh_cn/tutorials/7_customize_runtime.md) +- [如何编写配置文件](/docs_zh_CN/tutorials/1_config.md) +- [如何微调模型](/docs_zh_CN/tutorials/2_finetune.md) +- [如何增加新数据集](/docs_zh_CN/tutorials/3_new_dataset.md) +- [如何设计数据处理流程](/docs_zh_CN/tutorials/4_data_pipeline.md) +- [如何增加新模块](/docs_zh_CN/tutorials/5_new_modules.md) +- [如何导出模型为 onnx 格式](/docs_zh_CN/tutorials/6_export_model.md) +- [如何自定义模型运行参数](/docs_zh_CN/tutorials/7_customize_runtime.md) MMAction2 也提供了相应的中文 Colab 教程,可以点击 [这里](https://colab.research.google.com/github/open-mmlab/mmaction2/blob/master/demo/mmaction2_tutorial_zh-CN.ipynb) 进行体验! @@ -203,15 +203,15 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 ## 基准测试 -为了验证 MMAction2 框架的高精度和高效率,开发成员将其与当前其他主流框架进行速度对比。更多详情可见 [基准测试](/docs/zh_cn/benchmark.md) +为了验证 MMAction2 框架的高精度和高效率,开发成员将其与当前其他主流框架进行速度对比。更多详情可见 [基准测试](/docs_zh_CN/benchmark.md) ## 数据集准备 -请参考 [数据准备](/docs/zh_cn/data_preparation.md) 了解数据集准备概况。所有支持的数据集都列于 [数据集清单](/docs/zh_cn/supported_datasets.md) 中 +请参考 [数据准备](/docs_zh_CN/data_preparation.md) 了解数据集准备概况。所有支持的数据集都列于 [数据集清单](/docs_zh_CN/supported_datasets.md) 中 ## 常见问题 -请参考 [FAQ](/docs/zh_cn/faq.md) 了解其他用户的常见问题 +请参考 [FAQ](/docs_zh_CN/faq.md) 了解其他用户的常见问题 ## 相关工作 @@ -221,7 +221,7 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 - Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 **Oral**. [[论文]](https://arxiv.org/abs/2103.17263)[[代码]](https://github.com/xvjiarui/VFS) - Video Swin Transformer. [[论文]](https://arxiv.org/abs/2106.13230)[[代码]](https://github.com/SwinTransformer/Video-Swin-Transformer) -更多详情可见 [相关工作](docs/en/projects.md) +更多详情可见 [相关工作](docs/projects.md) ## 许可 diff --git a/configs/detection/acrn/README.md b/configs/detection/acrn/README.md index 75d18765c1..4f34bec2c4 100644 --- a/configs/detection/acrn/README.md +++ b/configs/detection/acrn/README.md @@ -59,7 +59,7 @@ Current state-of-the-art approaches for spatio-temporal action localization rely ::: -For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/data_preparation.md). ## Train @@ -75,7 +75,7 @@ Example: train ACRN with SlowFast backbone on AVA with periodic validation. python tools/train.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py --validate ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -91,4 +91,4 @@ Example: test ACRN with SlowFast backbone on AVA and dump the result to a csv fi python tools/test.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset) . +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . diff --git a/configs/detection/acrn/README_zh-CN.md b/configs/detection/acrn/README_zh-CN.md index 13d7837d1d..3ec59cc495 100644 --- a/configs/detection/acrn/README_zh-CN.md +++ b/configs/detection/acrn/README_zh-CN.md @@ -46,7 +46,7 @@ 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 -对于数据集准备的细节,用户可参考 [数据准备](/docs/zh_cn/data_preparation.md)。 +对于数据集准备的细节,用户可参考 [数据准备](/docs_zh_CN/data_preparation.md)。 ## 如何训练 @@ -62,7 +62,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py --validate ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -78,4 +78,4 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] python tools/test.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index c46895f007..5fa66a4c18 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -86,7 +86,7 @@ AVA, with its realistic scene and action complexity, exposes the intrinsic diffi ::: -For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to AVA in [Data Preparation](/docs/data_preparation.md). ## Train @@ -102,7 +102,7 @@ Example: train SlowOnly model on AVA with periodic validation. python tools/train.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py --validate ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting) . +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting) . ### Train Custom Classes From Ava Dataset @@ -140,4 +140,4 @@ Example: test SlowOnly model on AVA and dump the result to a csv file. python tools/test.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset) . +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . diff --git a/configs/detection/ava/README_zh-CN.md b/configs/detection/ava/README_zh-CN.md index a682a039ac..6cd82f4a3e 100644 --- a/configs/detection/ava/README_zh-CN.md +++ b/configs/detection/ava/README_zh-CN.md @@ -72,7 +72,7 @@ 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. **Context** 表示同时使用 RoI 特征与全局特征进行分类,可带来约 1% mAP 的提升。 -对于数据集准备的细节,用户可参考 [数据准备](/docs/zh_cn/data_preparation.md)。 +对于数据集准备的细节,用户可参考 [数据准备](/docs_zh_CN/data_preparation.md)。 ## 如何训练 @@ -88,7 +88,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py --validate ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ### 训练 AVA 数据集中的自定义类别 @@ -126,4 +126,4 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] python tools/test.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/detection/lfb/README.md b/configs/detection/lfb/README.md index ea88419dd8..2bd9a2a233 100644 --- a/configs/detection/lfb/README.md +++ b/configs/detection/lfb/README.md @@ -98,7 +98,7 @@ python tools/train.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_ --validate --seed 0 --deterministic ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -123,4 +123,4 @@ python tools/test.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/detection/lfb/README_zh-CN.md b/configs/detection/lfb/README_zh-CN.md index 3cb8bf5186..4c90a66bd5 100644 --- a/configs/detection/lfb/README_zh-CN.md +++ b/configs/detection/lfb/README_zh-CN.md @@ -75,7 +75,7 @@ python tools/train.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 测试 @@ -100,4 +100,4 @@ python tools/test.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/bmn/README.md b/configs/localization/bmn/README.md index 8c4cfcf8eb..43147c2109 100644 --- a/configs/localization/bmn/README.md +++ b/configs/localization/bmn/README.md @@ -60,7 +60,7 @@ Temporal action proposal generation is an challenging and promising task which a *We train BMN with the [official repo](https://github.com/JJBOY/BMN-Boundary-Matching-Network), evaluate its proposal generation and action detection performance with [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) for label assigning. -For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/data_preparation.md). ## Train @@ -76,7 +76,7 @@ Example: train BMN model on ActivityNet features dataset. python tools/train.py configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting) . +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting) . ## Test @@ -109,4 +109,4 @@ python tools/analysis/report_map.py --proposal path/to/proposal_file ::: -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset) . +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . diff --git a/configs/localization/bmn/README_zh-CN.md b/configs/localization/bmn/README_zh-CN.md index 5e15f6b06e..3778f390fa 100644 --- a/configs/localization/bmn/README_zh-CN.md +++ b/configs/localization/bmn/README_zh-CN.md @@ -48,7 +48,7 @@ *MMAction2 在 [原始代码库](https://github.com/JJBOY/BMN-Boundary-Matching-Network) 上训练 BMN,并且在 [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) 的对应标签上评估时序动作候选生成和时序检测的结果。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 ActivityNet 特征部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 ActivityNet 特征部分。 ## 如何训练 @@ -64,7 +64,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -95,4 +95,4 @@ python tools/analysis/report_map.py --proposal path/to/proposal_file python tools/data/activitynet/convert_proposal_format.py ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/bsn/README.md b/configs/localization/bsn/README.md index b87eebcb9f..d15b6361c7 100644 --- a/configs/localization/bsn/README.md +++ b/configs/localization/bsn/README.md @@ -44,7 +44,7 @@ Temporal action proposal generation is an important yet challenging problem, sin ::: -For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/data_preparation.md). ## Train @@ -68,7 +68,7 @@ Examples: python tools/train.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Inference @@ -167,4 +167,4 @@ Examples: ::: -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/localization/bsn/README_zh-CN.md b/configs/localization/bsn/README_zh-CN.md index 7271bc1cf4..6d0ddfc2df 100644 --- a/configs/localization/bsn/README_zh-CN.md +++ b/configs/localization/bsn/README_zh-CN.md @@ -32,7 +32,7 @@ 2. 对于 **特征** 这一列,`cuhk_mean_100` 表示所使用的特征为利用 [anet2016-cuhk](https://github.com/yjxiong/anet2016-cuhk) 代码库抽取的,被广泛利用的 CUHK ActivityNet 特征, `mmaction_video` 和 `mmaction_clip` 分布表示所使用的特征为利用 MMAction 抽取的,视频级别 ActivityNet 预训练模型的特征;视频片段级别 ActivityNet 预训练模型的特征。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 ActivityNet 特征部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 ActivityNet 特征部分。 ## 如何训练 @@ -56,7 +56,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何进行推理 @@ -153,4 +153,4 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/data/activitynet/convert_proposal_format.py ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/ssn/README.md b/configs/localization/ssn/README.md index b5c2a68257..c5e5dc09fa 100644 --- a/configs/localization/ssn/README.md +++ b/configs/localization/ssn/README.md @@ -37,7 +37,7 @@ year = {2017} According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you may set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. 2. Since SSN utilizes different structured temporal pyramid pooling methods at training and testing, please refer to [ssn_r50_450e_thumos14_rgb_train](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) at training and [ssn_r50_450e_thumos14_rgb_test](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py) at testing. -3. We evaluate the action detection performance of SSN, using action proposals of TAG. For more details on data preparation, you can refer to thumos14 TAG proposals in [Data Preparation](/docs/en/data_preparation.md). +3. We evaluate the action detection performance of SSN, using action proposals of TAG. For more details on data preparation, you can refer to thumos14 TAG proposals in [Data Preparation](/docs/data_preparation.md). 4. The reference SSN in is evaluated with `ResNet50` backbone in MMAction, which is the same backbone with ours. Note that the original setting of MMAction SSN uses the `BNInception` backbone. ::: @@ -56,7 +56,7 @@ Example: train SSN model on thumos14 dataset. python tools/train.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py ``` -For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -73,4 +73,4 @@ Example: test BMN on ActivityNet feature dataset. python tools/test.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py checkpoints/SOME_CHECKPOINT.pth --eval mAP ``` -For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/localization/ssn/README_zh-CN.md b/configs/localization/ssn/README_zh-CN.md index 3b85c61ad1..d1ec5bbcee 100644 --- a/configs/localization/ssn/README_zh-CN.md +++ b/configs/localization/ssn/README_zh-CN.md @@ -26,7 +26,7 @@ year = {2017} 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 由于 SSN 在训练和测试阶段使用不同的结构化时序金字塔池化方法(structured temporal pyramid pooling methods),请分别参考 [ssn_r50_450e_thumos14_rgb_train](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) 和 [ssn_r50_450e_thumos14_rgb_test](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py)。 -3. MMAction2 使用 TAG 的时序动作候选进行 SSN 模型的精度验证。关于数据准备的更多细节,用户可参考 [Data 数据集准备文档](/docs/zh_cn/data_preparation.md) 准备 thumos14 的 TAG 时序动作候选。 +3. MMAction2 使用 TAG 的时序动作候选进行 SSN 模型的精度验证。关于数据准备的更多细节,用户可参考 [Data 数据集准备文档](/docs_zh_CN/data_preparation.md) 准备 thumos14 的 TAG 时序动作候选。 4. 参考代码的 SSN 模型是和 MMAction2 一样在 `ResNet50` 主干网络上验证的。注意,这里的 SSN 的初始设置与原代码库的 `BNInception` 骨干网络的设置相同。 ## 如何训练 @@ -43,7 +43,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -60,4 +60,4 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] python tools/test.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py checkpoints/SOME_CHECKPOINT.pth --eval mAP ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/c3d/README.md b/configs/recognition/c3d/README.md index 0ea46809cb..067097fdbe 100644 --- a/configs/recognition/c3d/README.md +++ b/configs/recognition/c3d/README.md @@ -45,7 +45,7 @@ eid = {arXiv:1412.0767} ::: -For more details on data preparation, you can refer to UCF-101 in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to UCF-101 in [Data Preparation](/docs/data_preparation.md). ## Train @@ -62,7 +62,7 @@ python tools/train.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -79,4 +79,4 @@ python tools/test.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb. checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/c3d/README_zh-CN.md b/configs/recognition/c3d/README_zh-CN.md index 6f1965bee0..c4f02c16f2 100644 --- a/configs/recognition/c3d/README_zh-CN.md +++ b/configs/recognition/c3d/README_zh-CN.md @@ -32,7 +32,7 @@ eid = {arXiv:1412.0767} 3. 这里的 **推理时间** 是根据 [基准测试脚本](/tools/analysis/benchmark.py) 获得的,采用测试时的采帧策略,且只考虑模型的推理时间, 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 UCF-101 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 UCF-101 部分。 ## 如何训练 @@ -49,7 +49,7 @@ python tools/train.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -66,4 +66,4 @@ python tools/test.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb. checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index 5e3d4e4f57..3a48f6bbda 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -66,7 +66,7 @@ doi = {10.1109/ICCV.2019.00565} ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Train @@ -84,7 +84,7 @@ python tools/train.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1 --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -102,4 +102,4 @@ python tools/test.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_ --out result.json --average-clips prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/csn/README_zh-CN.md b/configs/recognition/csn/README_zh-CN.md index 4ad92b64fd..06a28cd5c5 100644 --- a/configs/recognition/csn/README_zh-CN.md +++ b/configs/recognition/csn/README_zh-CN.md @@ -53,7 +53,7 @@ doi = {10.1109/ICCV.2019.00565} 3. 这里使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 4. 这里的 **infer_ckpt** 表示该模型权重文件是从 [VMZ](https://github.com/facebookresearch/VMZ) 导入的。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -71,7 +71,7 @@ python tools/train.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1 --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -89,4 +89,4 @@ python tools/test.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_ --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/i3d/README.md b/configs/recognition/i3d/README.md index bf67c6f189..5a2bfd7a33 100644 --- a/configs/recognition/i3d/README.md +++ b/configs/recognition/i3d/README.md @@ -64,7 +64,7 @@ The paucity of videos in current action classification datasets (UCF-101 and HMD ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Train @@ -82,7 +82,7 @@ python tools/train.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rg --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -100,4 +100,4 @@ python tools/test.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb --out result.json --average-clips prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/i3d/README_zh-CN.md b/configs/recognition/i3d/README_zh-CN.md index 6e778cd7c9..ac10732615 100644 --- a/configs/recognition/i3d/README_zh-CN.md +++ b/configs/recognition/i3d/README_zh-CN.md @@ -52,7 +52,7 @@ 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -70,7 +70,7 @@ python tools/train.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rg --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -88,4 +88,4 @@ python tools/test.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/r2plus1d/README.md b/configs/recognition/r2plus1d/README.md index 9671e88cb4..f9cd05cca1 100644 --- a/configs/recognition/r2plus1d/README.md +++ b/configs/recognition/r2plus1d/README.md @@ -46,7 +46,7 @@ In this paper we discuss several forms of spatiotemporal convolutions for video ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Train @@ -64,7 +64,7 @@ python tools/train.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinet --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -82,4 +82,4 @@ python tools/test.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kineti --out result.json --average-clips=prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/r2plus1d/README_zh-CN.md b/configs/recognition/r2plus1d/README_zh-CN.md index 5df080c927..d720508dc0 100644 --- a/configs/recognition/r2plus1d/README_zh-CN.md +++ b/configs/recognition/r2plus1d/README_zh-CN.md @@ -34,7 +34,7 @@ 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -52,7 +52,7 @@ python tools/train.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinet --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -70,4 +70,4 @@ python tools/test.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kineti --out result.json --average-clips=prob ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 61f461c98e..4bbdbd4f0c 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -56,7 +56,7 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Train @@ -74,7 +74,7 @@ python tools/train.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kine --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -92,4 +92,4 @@ python tools/test.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinet --out result.json --average-clips=prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index 95e9383140..7605871d2d 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -44,7 +44,7 @@ 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -62,7 +62,7 @@ python tools/train.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kine --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -80,4 +80,4 @@ python tools/test.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinet --out result.json --average-clips=prob ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index 622c3fde7a..d5846782ae 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -118,7 +118,7 @@ In data benchmark, we compare two different data preprocessing methods: (1) Resi ::: -For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). ## Train @@ -136,7 +136,7 @@ python tools/train.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kine --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -154,4 +154,4 @@ python tools/test.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinet --out result.json --average-clips=prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md index 29109d2c5d..917be85500 100644 --- a/configs/recognition/slowonly/README_zh-CN.md +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -106,7 +106,7 @@ 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -124,7 +124,7 @@ python tools/train.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kine --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -142,4 +142,4 @@ python tools/test.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinet --out result.json --average-clips=prob ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 10f76d2aa9..37760e5042 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -50,7 +50,7 @@ Video data is with complex temporal dynamics due to various factors such as came ::: -For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). ## Train @@ -68,7 +68,7 @@ python tools/train.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinet --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -86,4 +86,4 @@ python tools/test.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kineti --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tanet/README_zh-CN.md b/configs/recognition/tanet/README_zh-CN.md index 02e42201c3..4902cf8430 100644 --- a/configs/recognition/tanet/README_zh-CN.md +++ b/configs/recognition/tanet/README_zh-CN.md @@ -38,7 +38,7 @@ 3. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。对应的模型权重文件可从 [这里](https://drive.google.com/drive/folders/1sFfmP3yrfc7IzRshEELOby7-aEoymIFL?usp=sharing) 下载。 4. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -56,7 +56,7 @@ python tools/train.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinet --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -74,4 +74,4 @@ python tools/test.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kineti --out result.json ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/timesformer/README.md b/configs/recognition/timesformer/README.md index 2ee361d949..54b4f25443 100644 --- a/configs/recognition/timesformer/README.md +++ b/configs/recognition/timesformer/README.md @@ -46,7 +46,7 @@ We present a convolution-free approach to video classification built exclusively ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Train @@ -64,7 +64,7 @@ python tools/train.py configs/recognition/timesformer/timesformer_divST_8x32x1_1 --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -82,4 +82,4 @@ python tools/test.py configs/recognition/timesformer/timesformer_divST_8x32x1_15 --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/timesformer/README_zh-CN.md b/configs/recognition/timesformer/README_zh-CN.md index d84d2fe2e0..c844917e01 100644 --- a/configs/recognition/timesformer/README_zh-CN.md +++ b/configs/recognition/timesformer/README_zh-CN.md @@ -33,7 +33,7 @@ 2. MMAction2 保持与 [原代码](https://github.com/facebookresearch/TimeSformer) 的测试设置一致(three crop x 1 clip)。 3. TimeSformer 使用的预训练模型 `vit_base_patch16_224.pth` 转换自 [vision_transformer](https://github.com/google-research/vision_transformer)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分。 ## 如何训练 @@ -51,7 +51,7 @@ python tools/train.py configs/recognition/timesformer/timesformer_divST_8x32x1_1 --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -69,4 +69,4 @@ python tools/test.py configs/recognition/timesformer/timesformer_divST_8x32x1_15 --out result.json ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tin/README.md b/configs/recognition/tin/README.md index eb7fc375d7..cf57eed749 100644 --- a/configs/recognition/tin/README.md +++ b/configs/recognition/tin/README.md @@ -60,7 +60,7 @@ Here, we use `finetune` to indicate that we use [TSM model](https://download.ope ::: -For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). ## Train @@ -78,7 +78,7 @@ python tools/train.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -96,4 +96,4 @@ python tools/test.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tin/README_zh-CN.md b/configs/recognition/tin/README_zh-CN.md index 431769addb..2747fa6c94 100644 --- a/configs/recognition/tin/README_zh-CN.md +++ b/configs/recognition/tin/README_zh-CN.md @@ -46,7 +46,7 @@ 4. 参考代码的结果是通过使用相同的模型配置在原来的代码库上训练得到的。 5. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 ## 如何训练 @@ -64,7 +64,7 @@ python tools/train.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -82,4 +82,4 @@ python tools/test.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ --out result.json ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tpn/README.md b/configs/recognition/tpn/README.md index 304bc5ecad..7ce9ce6f63 100644 --- a/configs/recognition/tpn/README.md +++ b/configs/recognition/tpn/README.md @@ -51,7 +51,7 @@ Visual tempo characterizes the dynamics and the temporal scale of an action. Mod ::: -For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400, Something-Something V1 and Something-Something V2 in [Data Preparation](/docs/data_preparation.md). ## Train @@ -68,7 +68,7 @@ python tools/train.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kineti --work-dir work_dirs/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb [--validate --seed 0 --deterministic] ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -86,4 +86,4 @@ python tools/test.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetic --out result.json --average-clips prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tpn/README_zh-CN.md b/configs/recognition/tpn/README_zh-CN.md index e525c2140d..ec66656d1d 100644 --- a/configs/recognition/tpn/README_zh-CN.md +++ b/configs/recognition/tpn/README_zh-CN.md @@ -53,7 +53,7 @@ python tools/train.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kineti --work-dir work_dirs/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb [--validate --seed 0 --deterministic] ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -71,4 +71,4 @@ python tools/test.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetic --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md index 75ad603e63..ff2f4d8785 100644 --- a/configs/recognition/trn/README.md +++ b/configs/recognition/trn/README.md @@ -70,7 +70,7 @@ python tools/train.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -88,4 +88,4 @@ python tools/test.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/trn/README_zh-CN.md b/configs/recognition/trn/README_zh-CN.md index beb575159f..d0e85f015c 100644 --- a/configs/recognition/trn/README_zh-CN.md +++ b/configs/recognition/trn/README_zh-CN.md @@ -57,7 +57,7 @@ python tools/train.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -75,4 +75,4 @@ python tools/test.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --out result.json ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index 994723a308..c3528ee5be 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -151,7 +151,7 @@ test_pipeline = [ ::: -For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to corresponding parts in [Data Preparation](/docs/data_preparation.md). ## Train @@ -169,7 +169,7 @@ python tools/train.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb. --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -187,4 +187,4 @@ python tools/test.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.p --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index ccf584d57a..f95876fd9e 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -145,7 +145,7 @@ test_pipeline = [ 6. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 7. 这里的 **infer_ckpt** 表示该模型权重文件是从 [TSM](https://github.com/mit-han-lab/temporal-shift-module/blob/master/test_models.py) 导入的。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400, Something-Something V1 and Something-Something V2 部分。 ## 如何训练 @@ -163,7 +163,7 @@ python tools/train.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb. --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -181,4 +181,4 @@ python tools/test.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.p --out result.json ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index 276234f280..f3f5811ef5 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -224,7 +224,7 @@ python tools/train.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -242,4 +242,4 @@ python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb. --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index 5cee3ea365..69e95459a5 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -213,7 +213,7 @@ python tools/train.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -231,4 +231,4 @@ python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb. --out result.json ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/x3d/README.md b/configs/recognition/x3d/README.md index cea5789cfc..a7a3c7e715 100644 --- a/configs/recognition/x3d/README.md +++ b/configs/recognition/x3d/README.md @@ -44,7 +44,7 @@ This paper presents X3D, a family of efficient video networks that progressively ::: -For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to Kinetics400 in [Data Preparation](/docs/data_preparation.md). ## Test @@ -62,4 +62,4 @@ python tools/test.py configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_r --out result.json --average-clips prob ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/recognition/x3d/README_zh-CN.md b/configs/recognition/x3d/README_zh-CN.md index 947b5bf093..3b09e5276b 100644 --- a/configs/recognition/x3d/README_zh-CN.md +++ b/configs/recognition/x3d/README_zh-CN.md @@ -31,7 +31,7 @@ 1. 参考代码的结果是通过使用相同的数据和原来的代码库所提供的模型进行测试得到的。 2. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的 Kinetics400 部分 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 Kinetics400 部分 ## 如何测试 @@ -49,4 +49,4 @@ python tools/test.py configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_r --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition_audio/resnet/README.md b/configs/recognition_audio/resnet/README.md index 1a5b718490..4c9ee539c8 100644 --- a/configs/recognition_audio/resnet/README.md +++ b/configs/recognition_audio/resnet/README.md @@ -44,7 +44,7 @@ tecture for integrated audiovisual perception. AVSlowFast has Slow and Fast visu ::: -For more details on data preparation, you can refer to ``Prepare audio`` in [Data Preparation](/docs/en/data_preparation.md). +For more details on data preparation, you can refer to ``Prepare audio`` in [Data Preparation](/docs/data_preparation.md). ## Train @@ -62,7 +62,7 @@ python tools/train.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_ --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -80,7 +80,7 @@ python tools/test.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_a --out result.json ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). ## Fusion diff --git a/configs/recognition_audio/resnet/README_zh-CN.md b/configs/recognition_audio/resnet/README_zh-CN.md index c3a38dd0e1..bf1188ff46 100644 --- a/configs/recognition_audio/resnet/README_zh-CN.md +++ b/configs/recognition_audio/resnet/README_zh-CN.md @@ -31,7 +31,7 @@ 并不包括 IO 时间以及预处理时间。对于每个配置,MMAction2 使用 1 块 GPU 并设置批大小(每块 GPU 处理的视频个数)为 1 来计算推理时间。 3. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 -对于数据集准备的细节,用户可参考 [数据集准备文档](/docs/zh_cn/data_preparation.md) 中的准备音频部分。 +对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的准备音频部分。 ## 如何训练 @@ -49,7 +49,7 @@ python tools/train.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -67,7 +67,7 @@ python tools/test.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_a --out result.json ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 ## 融合 diff --git a/configs/skeleton/2s-agcn/README.md b/configs/skeleton/2s-agcn/README.md index 5013f4e7c4..d6049c735c 100644 --- a/configs/skeleton/2s-agcn/README.md +++ b/configs/skeleton/2s-agcn/README.md @@ -58,7 +58,7 @@ python tools/train.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -84,4 +84,4 @@ python tools/test.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py \ --out bone_result.pkl ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/skeleton/2s-agcn/README_zh-CN.md b/configs/skeleton/2s-agcn/README_zh-CN.md index 5e5f0f4092..ae7cc00a20 100644 --- a/configs/skeleton/2s-agcn/README_zh-CN.md +++ b/configs/skeleton/2s-agcn/README_zh-CN.md @@ -47,7 +47,7 @@ python tools/train.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -73,4 +73,4 @@ python tools/test.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py \ --out bone_result.pkl ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index 7cc5c22f16..3b8b686db6 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -125,7 +125,7 @@ python tools/train.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoin For training with your custom dataset, you can refer to [Custom Dataset Training](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md). -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -143,4 +143,4 @@ python tools/test.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint --out result.pkl ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/skeleton/posec3d/README_zh-CN.md b/configs/skeleton/posec3d/README_zh-CN.md index 734adb9213..4c4cdf8d46 100644 --- a/configs/skeleton/posec3d/README_zh-CN.md +++ b/configs/skeleton/posec3d/README_zh-CN.md @@ -112,7 +112,7 @@ python tools/train.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoin 有关自定义数据集上的训练,可以参考 [Custom Dataset Training](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md)。 -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -130,4 +130,4 @@ python tools/test.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint --out result.pkl ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/configs/skeleton/stgcn/README.md b/configs/skeleton/stgcn/README.md index 6ca35a4e20..98b95a5cf9 100644 --- a/configs/skeleton/stgcn/README.md +++ b/configs/skeleton/stgcn/README.md @@ -60,7 +60,7 @@ python tools/train.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ --validate --seed 0 --deterministic ``` -For more details, you can refer to **Training setting** part in [getting_started](/docs/en/getting_started.md#training-setting). +For more details, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). ## Test @@ -78,4 +78,4 @@ python tools/test.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ --out result.pkl ``` -For more details, you can refer to **Test a dataset** part in [getting_started](/docs/en/getting_started.md#test-a-dataset). +For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). diff --git a/configs/skeleton/stgcn/README_zh-CN.md b/configs/skeleton/stgcn/README_zh-CN.md index 48fc4f6d90..c7e57077cd 100644 --- a/configs/skeleton/stgcn/README_zh-CN.md +++ b/configs/skeleton/stgcn/README_zh-CN.md @@ -49,7 +49,7 @@ python tools/train.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 ## 如何测试 @@ -67,4 +67,4 @@ python tools/test.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ --out result.pkl ``` -更多测试细节,可参考 [基础教程](/docs/zh_cn/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 diff --git a/demo/mmaction2_tutorial.ipynb b/demo/mmaction2_tutorial.ipynb index 72c0639b27..14441ab79b 100644 --- a/demo/mmaction2_tutorial.ipynb +++ b/demo/mmaction2_tutorial.ipynb @@ -430,7 +430,7 @@ "source": [ "### Support a new dataset\n", "\n", - "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/en/tutorials/new_dataset.md)\n", + "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/tutorials/new_dataset.md)\n", "\n", "Firstly, let's download a tiny dataset obtained from [Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). We select 30 videos with their labels as train dataset and 10 videos with their labels as test dataset." ] diff --git a/demo/mmaction2_tutorial_zh-CN.ipynb b/demo/mmaction2_tutorial_zh-CN.ipynb index c5b545893b..28940ce931 100644 --- a/demo/mmaction2_tutorial_zh-CN.ipynb +++ b/demo/mmaction2_tutorial_zh-CN.ipynb @@ -405,7 +405,7 @@ "source": [ "### 支持新数据集\n", "\n", - "这里我们给出将数据转换为已有数据集格式的示例。其他方法可以参考[doc](/docs/en/tutorials/new_dataset.md)\n", + "这里我们给出将数据转换为已有数据集格式的示例。其他方法可以参考[doc](/docs/tutorials/new_dataset.md)\n", "\n", "用到的是一个从[Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/)中获取的tiny数据集。包含30个训练视频,10个测试视频。" ] diff --git a/docs/en/Makefile b/docs/Makefile similarity index 100% rename from docs/en/Makefile rename to docs/Makefile diff --git a/docs/en/_static/css/readthedocs.css b/docs/_static/css/readthedocs.css similarity index 100% rename from docs/en/_static/css/readthedocs.css rename to docs/_static/css/readthedocs.css diff --git a/docs/en/_static/images/mmaction2.png b/docs/_static/images/mmaction2.png similarity index 100% rename from docs/en/_static/images/mmaction2.png rename to docs/_static/images/mmaction2.png diff --git a/docs/en/api.rst b/docs/api.rst similarity index 100% rename from docs/en/api.rst rename to docs/api.rst diff --git a/docs/en/benchmark.md b/docs/benchmark.md similarity index 100% rename from docs/en/benchmark.md rename to docs/benchmark.md diff --git a/docs/en/changelog.md b/docs/changelog.md similarity index 100% rename from docs/en/changelog.md rename to docs/changelog.md diff --git a/docs/en/conf.py b/docs/conf.py similarity index 97% rename from docs/en/conf.py rename to docs/conf.py index 40bc92fc8d..3248b1f326 100644 --- a/docs/en/conf.py +++ b/docs/conf.py @@ -17,14 +17,14 @@ import pytorch_sphinx_theme -sys.path.insert(0, os.path.abspath('../..')) +sys.path.insert(0, os.path.abspath('..')) # -- Project information ----------------------------------------------------- project = 'MMAction2' copyright = '2020, OpenMMLab' author = 'MMAction2 Authors' -version_file = '../../mmaction/version.py' +version_file = '../mmaction/version.py' def get_version(): diff --git a/docs/en/data_preparation.md b/docs/data_preparation.md similarity index 100% rename from docs/en/data_preparation.md rename to docs/data_preparation.md diff --git a/docs/en/faq.md b/docs/faq.md similarity index 98% rename from docs/en/faq.md rename to docs/faq.md index 583cdc31af..0a462b7b8a 100644 --- a/docs/en/faq.md +++ b/docs/faq.md @@ -22,7 +22,7 @@ If the contents here do not cover your issue, please create an issue using the [ - **"OSError: MoviePy Error: creation of None failed because of the following error"** - Refer to [install.md](https://github.com/open-mmlab/mmaction2/blob/master/docs/en/install.md#requirements) + Refer to [install.md](https://github.com/open-mmlab/mmaction2/blob/master/docs/install.md#requirements) 1. For Windows users, [ImageMagick](https://www.imagemagick.org/script/index.php) will not be automatically detected by MoviePy, there is a need to modify `moviepy/config_defaults.py` file by providing the path to the ImageMagick binary called `magick`, like `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` 2. For Linux users, there is a need to modify the `/etc/ImageMagick-6/policy.xml` file by commenting out `` to ``, if ImageMagick is not detected by moviepy. @@ -56,7 +56,7 @@ If the contents here do not cover your issue, please create an issue using the [ - **How to just use trained recognizer models for backbone pre-training?** - Refer to [Use Pre-Trained Model](https://github.com/open-mmlab/mmaction2/blob/master/docs/en/tutorials/2_finetune.md#use-pre-trained-model), + Refer to [Use Pre-Trained Model](https://github.com/open-mmlab/mmaction2/blob/master/docs/tutorials/2_finetune.md#use-pre-trained-model), in order to use the pre-trained model for the whole network, the new config adds the link of pre-trained models in the `load_from`. And to use backbone for pre-training, you can change `pretrained` value in the backbone dict of config files to the checkpoint path / url. @@ -106,7 +106,7 @@ If the contents here do not cover your issue, please create an issue using the [ - **How to set `load_from` value in config files to finetune models?** - In MMAction2, We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/en/tutorials/1_config.md), + In MMAction2, We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/tutorials/1_config.md), users can directly change it by setting `load_from` in their configs. ## Testing diff --git a/docs/en/feature_extraction.md b/docs/feature_extraction.md similarity index 100% rename from docs/en/feature_extraction.md rename to docs/feature_extraction.md diff --git a/docs/en/getting_started.md b/docs/getting_started.md similarity index 100% rename from docs/en/getting_started.md rename to docs/getting_started.md diff --git a/docs/en/index.rst b/docs/index.rst similarity index 100% rename from docs/en/index.rst rename to docs/index.rst diff --git a/docs/en/install.md b/docs/install.md similarity index 100% rename from docs/en/install.md rename to docs/install.md diff --git a/docs/en/make.bat b/docs/make.bat similarity index 100% rename from docs/en/make.bat rename to docs/make.bat diff --git a/docs/en/merge_docs.sh b/docs/merge_docs.sh similarity index 100% rename from docs/en/merge_docs.sh rename to docs/merge_docs.sh diff --git a/docs/en/projects.md b/docs/projects.md similarity index 100% rename from docs/en/projects.md rename to docs/projects.md diff --git a/docs/en/stat.py b/docs/stat.py similarity index 100% rename from docs/en/stat.py rename to docs/stat.py diff --git a/docs/en/supported_datasets.md b/docs/supported_datasets.md similarity index 100% rename from docs/en/supported_datasets.md rename to docs/supported_datasets.md diff --git a/docs/en/switch_language.md b/docs/switch_language.md similarity index 100% rename from docs/en/switch_language.md rename to docs/switch_language.md diff --git a/docs/en/tutorials/1_config.md b/docs/tutorials/1_config.md similarity index 100% rename from docs/en/tutorials/1_config.md rename to docs/tutorials/1_config.md diff --git a/docs/en/tutorials/2_finetune.md b/docs/tutorials/2_finetune.md similarity index 96% rename from docs/en/tutorials/2_finetune.md rename to docs/tutorials/2_finetune.md index 91d075f3c3..f29263601e 100644 --- a/docs/en/tutorials/2_finetune.md +++ b/docs/tutorials/2_finetune.md @@ -91,7 +91,7 @@ checkpoint_config = dict(interval=5) ## Use Pre-Trained Model To use the pre-trained model for the whole network, the new config adds the link of pre-trained models in the `load_from`. -We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/en/tutorials/1_config.md), users can directly change it by setting `load_from` in their configs. +We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/tutorials/1_config.md), users can directly change it by setting `load_from` in their configs. ```python # use the pre-trained model for the whole TSN network diff --git a/docs/en/tutorials/3_new_dataset.md b/docs/tutorials/3_new_dataset.md similarity index 100% rename from docs/en/tutorials/3_new_dataset.md rename to docs/tutorials/3_new_dataset.md diff --git a/docs/en/tutorials/4_data_pipeline.md b/docs/tutorials/4_data_pipeline.md similarity index 100% rename from docs/en/tutorials/4_data_pipeline.md rename to docs/tutorials/4_data_pipeline.md diff --git a/docs/en/tutorials/5_new_modules.md b/docs/tutorials/5_new_modules.md similarity index 100% rename from docs/en/tutorials/5_new_modules.md rename to docs/tutorials/5_new_modules.md diff --git a/docs/en/tutorials/6_export_model.md b/docs/tutorials/6_export_model.md similarity index 100% rename from docs/en/tutorials/6_export_model.md rename to docs/tutorials/6_export_model.md diff --git a/docs/en/tutorials/7_customize_runtime.md b/docs/tutorials/7_customize_runtime.md similarity index 100% rename from docs/en/tutorials/7_customize_runtime.md rename to docs/tutorials/7_customize_runtime.md diff --git a/docs/en/useful_tools.md b/docs/useful_tools.md similarity index 100% rename from docs/en/useful_tools.md rename to docs/useful_tools.md diff --git a/docs/zh_cn/Makefile b/docs_zh_CN/Makefile similarity index 100% rename from docs/zh_cn/Makefile rename to docs_zh_CN/Makefile diff --git a/docs/zh_cn/README.md b/docs_zh_CN/README.md similarity index 100% rename from docs/zh_cn/README.md rename to docs_zh_CN/README.md diff --git a/docs/zh_cn/api.rst b/docs_zh_CN/api.rst similarity index 100% rename from docs/zh_cn/api.rst rename to docs_zh_CN/api.rst diff --git a/docs/zh_cn/benchmark.md b/docs_zh_CN/benchmark.md similarity index 100% rename from docs/zh_cn/benchmark.md rename to docs_zh_CN/benchmark.md diff --git a/docs/zh_cn/conf.py b/docs_zh_CN/conf.py similarity index 97% rename from docs/zh_cn/conf.py rename to docs_zh_CN/conf.py index fe1b066f29..7949166dc9 100644 --- a/docs/zh_cn/conf.py +++ b/docs_zh_CN/conf.py @@ -17,14 +17,14 @@ import pytorch_sphinx_theme -sys.path.insert(0, os.path.abspath('../../')) +sys.path.insert(0, os.path.abspath('..')) # -- Project information ----------------------------------------------------- project = 'MMAction2' copyright = '2020, OpenMMLab' author = 'MMAction2 Authors' -version_file = '../../mmaction/version.py' +version_file = '../mmaction/version.py' def get_version(): diff --git a/docs/zh_cn/data_preparation.md b/docs_zh_CN/data_preparation.md similarity index 100% rename from docs/zh_cn/data_preparation.md rename to docs_zh_CN/data_preparation.md diff --git a/docs/zh_cn/demo.md b/docs_zh_CN/demo.md similarity index 100% rename from docs/zh_cn/demo.md rename to docs_zh_CN/demo.md diff --git a/docs/zh_cn/faq.md b/docs_zh_CN/faq.md similarity index 98% rename from docs/zh_cn/faq.md rename to docs_zh_CN/faq.md index 1a4f722f33..2f328792f0 100644 --- a/docs/zh_cn/faq.md +++ b/docs_zh_CN/faq.md @@ -13,7 +13,7 @@ - **"OSError: MoviePy Error: creation of None failed because of the following error"** - 参照 [MMAction2 安装文档](https://github.com/open-mmlab/mmaction2/blob/master/docs/zh_cn/install.md#安装依赖包) + 参照 [MMAction2 安装文档](https://github.com/open-mmlab/mmaction2/blob/master/docs_zh_CN/install.md#安装依赖包) 1. 对于 Windows 用户,[ImageMagick](https://www.imagemagick.org/script/index.php) 不再被 MoviePy 自动检测, 需要获取名为 `magick` 的 ImageMagick 二进制包的路径,来修改 `moviepy/config_defaults.py` 文件中的 `IMAGEMAGICK_BINARY`,如 `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` 2. 对于 Linux 用户,如果 ImageMagick 没有被 moviepy 检测,需要注释掉 `/etc/ImageMagick-6/policy.xml` 文件中的 ``,即改为 ``。 @@ -48,7 +48,7 @@ - **如何使用训练过的识别器作为主干网络的预训练模型?** - 参照 [使用预训练模型](https://github.com/open-mmlab/mmaction2/blob/master/docs/zh_cn/tutorials/2_finetune.md#使用预训练模型), + 参照 [使用预训练模型](https://github.com/open-mmlab/mmaction2/blob/master/docs_zh_CN/tutorials/2_finetune.md#使用预训练模型), 如果想对整个网络使用预训练模型,可以在配置文件中,将 `load_from` 设置为预训练模型的链接。 如果只想对主干网络使用预训练模型,可以在配置文件中,将主干网络 `backbone` 中的 `pretrained` 设置为预训练模型的地址或链接。 diff --git a/docs/zh_cn/feature_extraction.md b/docs_zh_CN/feature_extraction.md similarity index 100% rename from docs/zh_cn/feature_extraction.md rename to docs_zh_CN/feature_extraction.md diff --git a/docs/zh_cn/getting_started.md b/docs_zh_CN/getting_started.md similarity index 100% rename from docs/zh_cn/getting_started.md rename to docs_zh_CN/getting_started.md diff --git a/docs/zh_cn/index.rst b/docs_zh_CN/index.rst similarity index 100% rename from docs/zh_cn/index.rst rename to docs_zh_CN/index.rst diff --git a/docs/zh_cn/install.md b/docs_zh_CN/install.md similarity index 100% rename from docs/zh_cn/install.md rename to docs_zh_CN/install.md diff --git a/docs/zh_cn/make.bat b/docs_zh_CN/make.bat similarity index 100% rename from docs/zh_cn/make.bat rename to docs_zh_CN/make.bat diff --git a/docs/zh_cn/merge_docs.sh b/docs_zh_CN/merge_docs.sh similarity index 89% rename from docs/zh_cn/merge_docs.sh rename to docs_zh_CN/merge_docs.sh index 187b8bd419..1265731a97 100755 --- a/docs/zh_cn/merge_docs.sh +++ b/docs_zh_CN/merge_docs.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash # gather models -cat ../configs/localization/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 时序动作检测模型' | sed 's/](\/docs/zh_cn\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##/getting_started.html#/g" > localization_models.md -cat ../configs/recognition/*/README_zh-CN.md | sed "s/md#测/html#t测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 动作识别模型' | sed 's/](\/docs/zh_cn\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > recognition_models.md -cat ../configs/recognition_audio/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed 's/](\/docs/zh_cn\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" >> recognition_models.md -cat ../configs/detection/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 时空动作检测模型' | sed 's/](\/docs/zh_cn\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > detection_models.md -cat ../configs/skeleton/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 骨骼动作识别模型' | sed 's/](\/docs/zh_cn\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > skeleton_models.md +cat ../configs/localization/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 时序动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' | sed "s/getting_started.html##/getting_started.html#/g" > localization_models.md +cat ../configs/recognition/*/README_zh-CN.md | sed "s/md#测/html#t测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > recognition_models.md +cat ../configs/recognition_audio/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" >> recognition_models.md +cat ../configs/detection/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 时空动作检测模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > detection_models.md +cat ../configs/skeleton/*/README_zh-CN.md | sed "s/md#测/html#测/g" | sed "s/md#训/html#训/g" | sed "s/#/#&/" | sed '1i\# 骨骼动作识别模型' | sed 's/](\/docs_zh_CN\//](/g' | sed 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g'| sed "s/getting_started.html##/getting_started.html#/g" > skeleton_models.md # gather datasets cat ../tools/data/*/README_zh-CN.md | sed 's/# 准备/# /g' | sed 's/#/#&/' > prepare_data.md @@ -29,7 +29,7 @@ sed -i 's/(\/tools\/data\/diving48\/README_zh-CN.md/(#diving48/g' supported_data sed -i 's/(\/tools\/data\/skeleton\/README_zh-CN.md/(#skeleton/g' supported_datasets.md cat prepare_data.md >> supported_datasets.md -sed -i 's/](\/docs/zh_cn\//](/g' supported_datasets.md +sed -i 's/](\/docs_zh_CN\//](/g' supported_datasets.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' supported_datasets.md sed -i "s/md###t/html#t/g" demo.md @@ -37,5 +37,5 @@ sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' demo.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' benchmark.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' getting_started.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' install.md -sed -i 's/](\/docs/zh_cn\//](/g' ./tutorials/*.md +sed -i 's/](\/docs_zh_CN\//](/g' ./tutorials/*.md sed -i 's=](/=](https://github.com/open-mmlab/mmaction2/tree/master/=g' ./tutorials/*.md diff --git a/docs/zh_cn/stat.py b/docs_zh_CN/stat.py similarity index 100% rename from docs/zh_cn/stat.py rename to docs_zh_CN/stat.py diff --git a/docs/zh_cn/supported_datasets.md b/docs_zh_CN/supported_datasets.md similarity index 100% rename from docs/zh_cn/supported_datasets.md rename to docs_zh_CN/supported_datasets.md diff --git a/docs/zh_cn/switch_language.md b/docs_zh_CN/switch_language.md similarity index 100% rename from docs/zh_cn/switch_language.md rename to docs_zh_CN/switch_language.md diff --git a/docs/zh_cn/tutorials/1_config.md b/docs_zh_CN/tutorials/1_config.md similarity index 100% rename from docs/zh_cn/tutorials/1_config.md rename to docs_zh_CN/tutorials/1_config.md diff --git a/docs/zh_cn/tutorials/2_finetune.md b/docs_zh_CN/tutorials/2_finetune.md similarity index 100% rename from docs/zh_cn/tutorials/2_finetune.md rename to docs_zh_CN/tutorials/2_finetune.md diff --git a/docs/zh_cn/tutorials/3_new_dataset.md b/docs_zh_CN/tutorials/3_new_dataset.md similarity index 100% rename from docs/zh_cn/tutorials/3_new_dataset.md rename to docs_zh_CN/tutorials/3_new_dataset.md diff --git a/docs/zh_cn/tutorials/4_data_pipeline.md b/docs_zh_CN/tutorials/4_data_pipeline.md similarity index 100% rename from docs/zh_cn/tutorials/4_data_pipeline.md rename to docs_zh_CN/tutorials/4_data_pipeline.md diff --git a/docs/zh_cn/tutorials/5_new_modules.md b/docs_zh_CN/tutorials/5_new_modules.md similarity index 100% rename from docs/zh_cn/tutorials/5_new_modules.md rename to docs_zh_CN/tutorials/5_new_modules.md diff --git a/docs/zh_cn/tutorials/6_export_model.md b/docs_zh_CN/tutorials/6_export_model.md similarity index 100% rename from docs/zh_cn/tutorials/6_export_model.md rename to docs_zh_CN/tutorials/6_export_model.md diff --git a/docs/zh_cn/tutorials/7_customize_runtime.md b/docs_zh_CN/tutorials/7_customize_runtime.md similarity index 100% rename from docs/zh_cn/tutorials/7_customize_runtime.md rename to docs_zh_CN/tutorials/7_customize_runtime.md diff --git a/docs/zh_cn/useful_tools.md b/docs_zh_CN/useful_tools.md similarity index 100% rename from docs/zh_cn/useful_tools.md rename to docs_zh_CN/useful_tools.md diff --git a/tools/data/activitynet/README.md b/tools/data/activitynet/README.md index 8c36a9eec9..f3286f6fc1 100644 --- a/tools/data/activitynet/README.md +++ b/tools/data/activitynet/README.md @@ -78,7 +78,7 @@ For this case, the downloading scripts update the annotation file after download ### Step 3. Extract RGB and Flow -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). Use following scripts to extract both RGB and Flow. @@ -87,7 +87,7 @@ bash extract_frames.sh ``` The command above can generate images with new short edge 256. If you want to generate images with short edge 320 (320p), or with fix size 340x256, you can change the args `--new-short 256` to `--new-short 320` or `--new-width 340 --new-height 256`. -More details can be found in [data_preparation](/docs/en/data_preparation.md) +More details can be found in [data_preparation](/docs/data_preparation.md) ### Step 4. Generate File List for ActivityNet Finetuning @@ -168,4 +168,4 @@ mmaction2 ``` -For training and evaluating on ActivityNet, please refer to [getting_started.md](/docs/en/getting_started.md). +For training and evaluating on ActivityNet, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/activitynet/README_zh-CN.md b/tools/data/activitynet/README_zh-CN.md index 5007d0a2ad..7687b948db 100644 --- a/tools/data/activitynet/README_zh-CN.md +++ b/tools/data/activitynet/README_zh-CN.md @@ -78,7 +78,7 @@ bash download_bsn_videos.sh ### 步骤 3. 抽取 RGB 帧和光流 -在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 可使用以下命令抽取视频帧和光流。 @@ -87,7 +87,7 @@ bash extract_frames.sh ``` 以上脚本将会生成短边 256 分辨率的视频。如果用户想生成短边 320 分辨率的视频(即 320p),或者 340x256 的固定分辨率,用户可以通过改变参数由 `--new-short 256` 至 `--new-short 320`,或者 `--new-width 340 --new-height 256` 进行设置 -更多细节可参考 [数据准备指南](/docs/zh_cn/data_preparation.md) +更多细节可参考 [数据准备指南](/docs_zh_CN/data_preparation.md) ### 步骤 4. 生成用于 ActivityNet 微调的文件列表 @@ -166,4 +166,4 @@ mmaction2 ``` -关于对 ActivityNet 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md). +关于对 ActivityNet 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md). diff --git a/tools/data/ava/README.md b/tools/data/ava/README.md index 4e297716de..a416eb2632 100644 --- a/tools/data/ava/README.md +++ b/tools/data/ava/README.md @@ -64,7 +64,7 @@ bash cut_videos.sh ## Step 4. Extract RGB and Flow -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. And you can run the following script to soft link the extracted frames. @@ -141,7 +141,7 @@ mmaction2 | │ │ │ ├── ... ``` -For training and evaluating on AVA, please refer to [getting_started](/docs/en/getting_started.md). +For training and evaluating on AVA, please refer to [getting_started](/docs/getting_started.md). ## Reference diff --git a/tools/data/ava/README_zh-CN.md b/tools/data/ava/README_zh-CN.md index 6a922f5e1b..5a7b96da88 100644 --- a/tools/data/ava/README_zh-CN.md +++ b/tools/data/ava/README_zh-CN.md @@ -56,7 +56,7 @@ bash cut_videos.sh ## 4. 提取 RGB 帧和光流 -在提取之前,请参考 [安装教程](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在提取之前,请参考 [安装教程](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有足够的 SSD 空间,那么建议将视频抽取为 RGB 帧以提升 I/O 性能。用户可以使用以下脚本为抽取得到的帧文件夹建立软连接: @@ -131,4 +131,4 @@ mmaction2 | │ │ │ ├── ... ``` -关于 AVA 数据集上的训练与测试,请参照 [基础教程](/docs/zh_cn/getting_started.md)。 +关于 AVA 数据集上的训练与测试,请参照 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/diving48/README.md b/tools/data/diving48/README.md index 1cbdbcdb27..588cddd173 100644 --- a/tools/data/diving48/README.md +++ b/tools/data/diving48/README.md @@ -39,7 +39,7 @@ This part is **optional** if you only want to use the video loader. The frames provided in official compressed file are not complete. You may need to go through the following extraction steps to get the complete frames. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -120,4 +120,4 @@ mmaction2 │ | | ├── ... ``` -For training and evaluating on Diving48, please refer to [getting_started.md](/docs/en/getting_started.md). +For training and evaluating on Diving48, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/diving48/README_zh-CN.md b/tools/data/diving48/README_zh-CN.md index 3210d06b9d..e91f8729a5 100644 --- a/tools/data/diving48/README_zh-CN.md +++ b/tools/data/diving48/README_zh-CN.md @@ -39,7 +39,7 @@ bash download_videos.sh 官网提供的帧压缩包并不完整。若想获取完整的数据,可以使用以下步骤解帧。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。 @@ -120,4 +120,4 @@ mmaction2 │ | | ├── ... ``` -关于对 Diving48 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md)。 +关于对 Diving48 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/gym/README.md b/tools/data/gym/README.md index 22b09f66f9..a39eda6fd4 100644 --- a/tools/data/gym/README.md +++ b/tools/data/gym/README.md @@ -55,7 +55,7 @@ python trim_subaction.py This part is **optional** if you only want to use the video loader for RGB model training. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). Run the following script to extract both rgb and flow using "tvl1" algorithm. @@ -106,4 +106,4 @@ mmaction2 | | └── subaction_frames ``` -For training and evaluating on GYM, please refer to [getting_started](/docs/en/getting_started.md). +For training and evaluating on GYM, please refer to [getting_started](/docs/getting_started.md). diff --git a/tools/data/gym/README_zh-CN.md b/tools/data/gym/README_zh-CN.md index 9fff9dd20a..cb3a796ec7 100644 --- a/tools/data/gym/README_zh-CN.md +++ b/tools/data/gym/README_zh-CN.md @@ -55,7 +55,7 @@ python trim_subaction.py 如果用户仅使用 video loader,则可以跳过本步。 -在提取之前,请参考 [安装教程](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在提取之前,请参考 [安装教程](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 用户可使用如下脚本同时抽取 RGB 帧和光流(提取光流时使用 tvl1 算法): @@ -106,4 +106,4 @@ mmaction2 | | └── subaction_frames ``` -关于 GYM 数据集上的训练与测试,请参照 [基础教程](/docs/zh_cn/getting_started.md)。 +关于 GYM 数据集上的训练与测试,请参照 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/hmdb51/README.md b/tools/data/hmdb51/README.md index f003e58b97..206b548764 100644 --- a/tools/data/hmdb51/README.md +++ b/tools/data/hmdb51/README.md @@ -41,7 +41,7 @@ bash download_videos.sh This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -122,4 +122,4 @@ mmaction2 ``` -For training and evaluating on HMDB51, please refer to [getting_started.md](/docs/en/getting_started.md). +For training and evaluating on HMDB51, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/hmdb51/README_zh-CN.md b/tools/data/hmdb51/README_zh-CN.md index f82f397b5c..a34c4b9ce9 100644 --- a/tools/data/hmdb51/README_zh-CN.md +++ b/tools/data/hmdb51/README_zh-CN.md @@ -39,7 +39,7 @@ bash download_videos.sh 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 上。 用户可使用以下命令为 SSD 建立软链接。 @@ -118,4 +118,4 @@ mmaction2 ``` -关于对 HMDB51 进行训练和验证,可以参照 [基础教程](/docs/zh_cn/getting_started.md)。 +关于对 HMDB51 进行训练和验证,可以参照 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/hvu/README.md b/tools/data/hvu/README.md index f668f52788..755e71dbb3 100644 --- a/tools/data/hvu/README.md +++ b/tools/data/hvu/README.md @@ -43,7 +43,7 @@ bash download_videos.sh This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). You can use the following script to extract both RGB and Flow frames. @@ -52,7 +52,7 @@ bash extract_frames.sh ``` By default, we generate frames with short edge resized to 256. -More details can be found in [data_preparation](/docs/en/data_preparation.md) +More details can be found in [data_preparation](/docs/data_preparation.md) ## Step 4. Generate File List @@ -120,4 +120,4 @@ mmaction2 ``` -For training and evaluating on HVU, please refer to [getting_started](/docs/en/getting_started.md). +For training and evaluating on HVU, please refer to [getting_started](/docs/getting_started.md). diff --git a/tools/data/hvu/README_zh-CN.md b/tools/data/hvu/README_zh-CN.md index a83f85c571..5b3ffa1ea3 100644 --- a/tools/data/hvu/README_zh-CN.md +++ b/tools/data/hvu/README_zh-CN.md @@ -43,7 +43,7 @@ bash download_videos.sh 如果用户仅使用 video loader,则可以跳过本步。 -在提取之前,请参考 [安装教程](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在提取之前,请参考 [安装教程](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 用户可使用如下脚本同时抽取 RGB 帧和光流: @@ -51,7 +51,7 @@ bash download_videos.sh bash extract_frames.sh ``` -该脚本默认生成短边长度为 256 的帧,可参考 [数据准备](/docs/zh_cn/data_preparation.md) 获得更多细节。 +该脚本默认生成短边长度为 256 的帧,可参考 [数据准备](/docs_zh_CN/data_preparation.md) 获得更多细节。 ## 4. 生成文件列表 @@ -107,4 +107,4 @@ mmaction2 ``` -关于 HVU 数据集上的训练与测试,请参照 [基础教程](/docs/zh_cn/getting_started.md)。 +关于 HVU 数据集上的训练与测试,请参照 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/jester/README.md b/tools/data/jester/README.md index 26161e78bb..2e054ab33d 100644 --- a/tools/data/jester/README.md +++ b/tools/data/jester/README.md @@ -64,7 +64,7 @@ data = dict( This part is **optional** if you only want to use RGB frames. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -140,4 +140,4 @@ mmaction2 ``` -For training and evaluating on Jester, please refer to [getting_started.md](/docs/en/getting_started.md). +For training and evaluating on Jester, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/jester/README_zh-CN.md b/tools/data/jester/README_zh-CN.md index 86f37badf2..4b3fb17f0b 100644 --- a/tools/data/jester/README_zh-CN.md +++ b/tools/data/jester/README_zh-CN.md @@ -64,7 +64,7 @@ data = dict( 如果用户只想使用 RGB 帧训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。 @@ -140,4 +140,4 @@ mmaction2 ``` -关于对 jester 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md)。 +关于对 jester 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/kinetics/README.md b/tools/data/kinetics/README.md index 7351d1b128..725190ee41 100644 --- a/tools/data/kinetics/README.md +++ b/tools/data/kinetics/README.md @@ -72,7 +72,7 @@ You can also download from [Academic Torrents](https://academictorrents.com/) ([ This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. And you can run the following script to soft link the extracted frames. @@ -103,7 +103,7 @@ bash extract_frames.sh ${DATASET} ``` The commands above can generate images with new short edge 256. If you want to generate images with short edge 320 (320p), or with fix size 340x256, you can change the args `--new-short 256` to `--new-short 320` or `--new-width 340 --new-height 256`. -More details can be found in [data_preparation](/docs/en/data_preparation.md) +More details can be found in [data_preparation](/docs/data_preparation.md) ## Step 4. Generate File List @@ -147,4 +147,4 @@ mmaction2 ``` -For training and evaluating on Kinetics, please refer to [getting_started](/docs/en/getting_started.md). +For training and evaluating on Kinetics, please refer to [getting_started](/docs/getting_started.md). diff --git a/tools/data/kinetics/README_zh-CN.md b/tools/data/kinetics/README_zh-CN.md index 1fa8741e22..ef49ba8e8a 100644 --- a/tools/data/kinetics/README_zh-CN.md +++ b/tools/data/kinetics/README_zh-CN.md @@ -66,7 +66,7 @@ python ../resize_videos.py ../../../data/${DATASET}/videos_train/ ../../../data/ 如果用户仅使用 video loader,则可以跳过本步。 -在提取之前,请参考 [安装教程](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在提取之前,请参考 [安装教程](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有足够的 SSD 空间,那么建议将视频抽取为 RGB 帧以提升 I/O 性能。用户可以使用以下脚本为抽取得到的帧文件夹建立软连接: @@ -97,7 +97,7 @@ bash extract_frames.sh ${DATASET} ``` 以上的命令生成短边长度为 256 的 RGB 帧和光流帧。如果用户需要生成短边长度为 320 的帧 (320p),或是固定分辨率为 340 x 256 的帧,可改变参数 `--new-short 256` 为 `--new-short 320` 或 `--new-width 340 --new-height 256`。 -更多细节可以参考 [数据准备](/docs/zh_cn/data_preparation.md)。 +更多细节可以参考 [数据准备](/docs_zh_CN/data_preparation.md)。 ## 4. 生成文件列表 @@ -139,4 +139,4 @@ mmaction2 ``` -关于 Kinetics 数据集上的训练与测试,请参照 [基础教程](/docs/zh_cn/getting_started.md)。 +关于 Kinetics 数据集上的训练与测试,请参照 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/mit/README.md b/tools/data/mit/README.md index 6e4ef0d37d..e67ca45335 100644 --- a/tools/data/mit/README.md +++ b/tools/data/mit/README.md @@ -34,7 +34,7 @@ python ../resize_videos.py ../../../data/mit/videos/ ../../../data/mit/videos_25 This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. And you can run the following script to soft link the extracted frames. @@ -125,4 +125,4 @@ mmaction2 ``` -For training and evaluating on Moments in Time, please refer to [getting_started.md](/docs/en/getting_started.md). +For training and evaluating on Moments in Time, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/mit/README_zh-CN.md b/tools/data/mit/README_zh-CN.md index 21289e34e1..74a3d0c247 100644 --- a/tools/data/mit/README_zh-CN.md +++ b/tools/data/mit/README_zh-CN.md @@ -36,7 +36,7 @@ python ../resize_videos.py ../../../data/mit/videos/ ../../../data/mit/videos_25 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 上。 用户可使用以下命令为 SSD 建立软链接。 @@ -127,4 +127,4 @@ mmaction2 ``` -关于对 Moments in Times 进行训练和验证,可以参照 [基础教程](/docs/zh_cn/getting_started.md)。 +关于对 Moments in Times 进行训练和验证,可以参照 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/mmit/README.md b/tools/data/mmit/README.md index 3f6b618977..5deedf71d0 100644 --- a/tools/data/mmit/README.md +++ b/tools/data/mmit/README.md @@ -32,7 +32,7 @@ python ../resize_videos.py ../../../data/mmit/videos/ ../../../data/mmit/videos_ This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). First, you can run the following script to soft link SSD. @@ -110,4 +110,4 @@ mmaction2/ └── ... ``` -For training and evaluating on Multi-Moments in Time, please refer to [getting_started.md](/docs/en/getting_started.md). +For training and evaluating on Multi-Moments in Time, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/mmit/README_zh-CN.md b/tools/data/mmit/README_zh-CN.md index 31d5cddcde..e070505e34 100644 --- a/tools/data/mmit/README_zh-CN.md +++ b/tools/data/mmit/README_zh-CN.md @@ -34,7 +34,7 @@ python ../resize_videos.py ../../../data/mmit/videos/ ../../../data/mmit/videos_ 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 上。 用户可使用以下命令为 SSD 建立软链接。 @@ -112,4 +112,4 @@ mmaction2/ └── ... ``` -关于对 Multi-Moments in Time 进行训练和验证,可以参照 [基础教程](/docs/zh_cn/getting_started.md)。 +关于对 Multi-Moments in Time 进行训练和验证,可以参照 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/sthv1/README.md b/tools/data/sthv1/README.md index eb837d435e..75f4c11134 100644 --- a/tools/data/sthv1/README.md +++ b/tools/data/sthv1/README.md @@ -65,7 +65,7 @@ data = dict( This part is **optional** if you only want to use RGB frames. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -141,4 +141,4 @@ mmaction2 ``` -For training and evaluating on Something-Something V1, please refer to [getting_started.md](/docs/en/getting_started.md). +For training and evaluating on Something-Something V1, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/sthv1/README_zh-CN.md b/tools/data/sthv1/README_zh-CN.md index 7506b4ad5c..11cc9318be 100644 --- a/tools/data/sthv1/README_zh-CN.md +++ b/tools/data/sthv1/README_zh-CN.md @@ -63,7 +63,7 @@ data = dict( 如果用户只想使用原 RGB 帧加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。 @@ -139,4 +139,4 @@ mmaction2 ``` -关于对 Something-Something V1 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md)。 +关于对 Something-Something V1 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/sthv2/README.md b/tools/data/sthv2/README.md index ea4c66e270..af112872da 100644 --- a/tools/data/sthv2/README.md +++ b/tools/data/sthv2/README.md @@ -36,7 +36,7 @@ cd $MMACTION2/tools/data/sthv2/ This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -115,4 +115,4 @@ mmaction2 ``` -For training and evaluating on Something-Something V2, please refer to [getting_started.md](/docs/en/getting_started.md). +For training and evaluating on Something-Something V2, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/sthv2/README_zh-CN.md b/tools/data/sthv2/README_zh-CN.md index 87cd3558f6..7d8080c5a4 100644 --- a/tools/data/sthv2/README_zh-CN.md +++ b/tools/data/sthv2/README_zh-CN.md @@ -36,7 +36,7 @@ cd $MMACTION2/tools/data/sthv2/ 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。 @@ -115,4 +115,4 @@ mmaction2 ``` -关于对 Something-Something V2 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md)。 +关于对 Something-Something V2 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/thumos14/README.md b/tools/data/thumos14/README.md index 8b52284951..eaddb60cbe 100644 --- a/tools/data/thumos14/README.md +++ b/tools/data/thumos14/README.md @@ -40,7 +40,7 @@ bash download_videos.sh This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. @@ -139,4 +139,4 @@ mmaction2 │ │ │ | ├── video_test_0000001 ``` -For training and evaluating on THUMOS'14, please refer to [getting_started.md](/docs/en/getting_started.md). +For training and evaluating on THUMOS'14, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/thumos14/README_zh-CN.md b/tools/data/thumos14/README_zh-CN.md index 05bd862316..fb7140a24e 100644 --- a/tools/data/thumos14/README_zh-CN.md +++ b/tools/data/thumos14/README_zh-CN.md @@ -40,7 +40,7 @@ bash download_videos.sh 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果用户有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 上。 用户可使用以下命令为 SSD 建立软链接。 @@ -136,4 +136,4 @@ mmaction2 │ │ │ | ├── video_test_0000001 ``` -关于对 THUMOS'14 进行训练和验证,可以参照 [基础教程](/docs/zh_cn/getting_started.md)。 +关于对 THUMOS'14 进行训练和验证,可以参照 [基础教程](/docs_zh_CN/getting_started.md)。 diff --git a/tools/data/ucf101/README.md b/tools/data/ucf101/README.md index 4d71c1e9f8..abac25f0c7 100644 --- a/tools/data/ucf101/README.md +++ b/tools/data/ucf101/README.md @@ -43,7 +43,7 @@ python ../resize_videos.py ../../../data/ucf101/videos/ ../../../data/ucf101/vid This part is **optional** if you only want to use the video loader. -Before extracting, please refer to [install.md](/docs/en/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). +Before extracting, please refer to [install.md](/docs/install.md) for installing [denseflow](https://github.com/open-mmlab/denseflow). If you have plenty of SSD space, then we recommend extracting frames there for better I/O performance. The extracted frames (RGB + Flow) will take up about 100GB. @@ -124,4 +124,4 @@ mmaction2 ``` -For training and evaluating on UCF-101, please refer to [getting_started.md](/docs/en/getting_started.md). +For training and evaluating on UCF-101, please refer to [getting_started.md](/docs/getting_started.md). diff --git a/tools/data/ucf101/README_zh-CN.md b/tools/data/ucf101/README_zh-CN.md index 28c696a059..96e9453ff4 100644 --- a/tools/data/ucf101/README_zh-CN.md +++ b/tools/data/ucf101/README_zh-CN.md @@ -41,7 +41,7 @@ python ../resize_videos.py ../../../data/ucf101/videos/ ../../../data/ucf101/vid 如果用户只想使用视频加载训练,则该部分是 **可选项**。 -在抽取视频帧和光流之前,请参考 [安装指南](/docs/zh_cn/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 +在抽取视频帧和光流之前,请参考 [安装指南](/docs_zh_CN/install.md) 安装 [denseflow](https://github.com/open-mmlab/denseflow)。 如果拥有大量的 SSD 存储空间,则推荐将抽取的帧存储至 I/O 性能更优秀的 SSD 中。所抽取的视频帧和光流约占据 100 GB 的存储空间。 @@ -122,4 +122,4 @@ mmaction2 ``` -关于对 UCF-101 进行训练和验证,可以参考 [基础教程](/docs/zh_cn/getting_started.md)。 +关于对 UCF-101 进行训练和验证,可以参考 [基础教程](/docs_zh_CN/getting_started.md)。 From 69c1606dbdda3a453c9e710da394372832794097 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 29 Dec 2021 21:50:26 +0800 Subject: [PATCH 324/414] [Fix] Fix --test-best (#1362) * update * update * revert * update --- mmaction/apis/train.py | 28 ++++++++++++++------------- mmaction/core/evaluation/ava_utils.py | 2 +- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index f04ed6c712..7a3cd1351b 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -1,5 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. import copy as cp +import os import os.path as osp import numpy as np @@ -205,21 +206,22 @@ def train_model(model, if test['test_last'] or test['test_best']: best_ckpt_path = None if test['test_best']: - if hasattr(eval_hook, 'best_ckpt_path'): - best_ckpt_path = eval_hook.best_ckpt_path - - if best_ckpt_path is None or not osp.exists(best_ckpt_path): + ckpt_paths = [x for x in os.listdir(cfg.work_dir) if 'best' in x] + ckpt_paths = [x for x in ckpt_paths if x.endswith('.pth')] + if len(ckpt_paths) == 0: + runner.logger.info('Warning: test_best set, but no ckpt found') test['test_best'] = False - if best_ckpt_path is None: - runner.logger.info('Warning: test_best set as True, but ' - 'is not applicable ' - '(eval_hook.best_ckpt_path is None)') - else: - runner.logger.info('Warning: test_best set as True, but ' - 'is not applicable (best_ckpt ' - f'{best_ckpt_path} not found)') if not test['test_last']: return + elif len(ckpt_paths) > 1: + epoch_ids = [ + int(x.split('epoch_')[-1][:-4]) for x in ckpt_paths + ] + best_ckpt_path = ckpt_paths[np.argmax(epoch_ids)] + else: + best_ckpt_path = ckpt_paths[0] + if best_ckpt_path: + best_ckpt_path = osp.join(cfg.work_dir, best_ckpt_path) test_dataset = build_dataset(cfg.data.test, dict(test_mode=True)) gpu_collect = cfg.get('evaluation', {}).get('gpu_collect', False) @@ -242,7 +244,7 @@ def train_model(model, if test['test_last']: names.append('last') ckpts.append(None) - if test['test_best']: + if test['test_best'] and best_ckpt_path is not None: names.append('best') ckpts.append(best_ckpt_path) diff --git a/mmaction/core/evaluation/ava_utils.py b/mmaction/core/evaluation/ava_utils.py index 7f6571d478..e7aa10b2f6 100644 --- a/mmaction/core/evaluation/ava_utils.py +++ b/mmaction/core/evaluation/ava_utils.py @@ -1,6 +1,6 @@ # This piece of code is directly adapted from ActivityNet official repo # https://github.com/activitynet/ActivityNet/blob/master/ -# Evaluation/get_ava_performance.py. Some unused codes are removed. +# Evaluation/get_ava_performance.py. Some unused codes are removed. import csv import logging import time From 14f7ec73600cd4f268bca3768adad2fd3872a52e Mon Sep 17 00:00:00 2001 From: makecent <42603768+makecent@users.noreply.github.com> Date: Thu, 30 Dec 2021 13:13:34 +0800 Subject: [PATCH 325/414] [Fix] Update ssn.py (#1355) * Update ssn.py * Fix Lint Co-authored-by: Haodong Duan --- mmaction/models/localizers/ssn.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mmaction/models/localizers/ssn.py b/mmaction/models/localizers/ssn.py index a92ce1cf68..3136d651f6 100644 --- a/mmaction/models/localizers/ssn.py +++ b/mmaction/models/localizers/ssn.py @@ -114,6 +114,7 @@ def forward_test(self, imgs, relative_proposal_list, scale_factor_list, relative_proposal_list = relative_proposal_list.cpu().numpy() activity_scores = activity_scores.cpu().numpy() completeness_scores = completeness_scores.cpu().numpy() + reg_norm_consts = reg_norm_consts.cpu().numpy() if bbox_preds is not None: bbox_preds = bbox_preds.view(-1, self.cls_head.num_classes, 2) bbox_preds[:, :, 0] = ( From 1ee5d47b14f907c63a021ed9f6b73d526b4bf870 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 30 Dec 2021 13:17:02 +0800 Subject: [PATCH 326/414] [Fix] Openmmlab pre-commit hooks off temporarily --- .pre-commit-config.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f8608ff032..11320fe43a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -44,9 +44,9 @@ repos: hooks: - id: codespell args: ["--skip", "*.ipynb,tools/data/hvu/label_map.json", "-L", "te,nd,thre,Gool,gool"] - - repo: https://github.com/open-mmlab/pre-commit-hooks - rev: v0.1.0 # Use the ref you want to point at - hooks: - - id: check-algo-readme - - id: check-copyright - args: ["mmaction", "tools", "tests"] # these directories will be checked + # - repo: https://github.com/open-mmlab/pre-commit-hooks + # rev: v0.1.0 # Use the ref you want to point at + # hooks: + # - id: check-algo-readme + # - id: check-copyright + # args: ["mmaction", "tools", "tests"] # these directories will be checked From 62e7bacf4196634b99375a1ff62d34970556ed47 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Fri, 31 Dec 2021 11:56:50 +0800 Subject: [PATCH 327/414] [Doc] update changelog (#1364) * master * master 0721 * add README * 1231 modify changelog * 1231 add changelog --- docs/changelog.md | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/docs/changelog.md b/docs/changelog.md index 9fdd697e32..d6e7b0af2e 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,52 @@ ## Changelog +### 0.21.0 (31/12/2021) + +**Highlights** + +- Support 2s-AGCN +- Support publish models in Windows +- Improve some sthv1 related models +- Support BABEL + +**New Features** + +- Support 2s-AGCN([#1248](https://github.com/open-mmlab/mmaction2/pull/1248)) +- Support skip postproc in ntu_pose_extraction([#1295](https://github.com/open-mmlab/mmaction2/pull/1295)) +- Support publish models in Windows([#1325](https://github.com/open-mmlab/mmaction2/pull/1325)) +- Add copyright checkhook in pre-commit-config([#1344](https://github.com/open-mmlab/mmaction2/pull/1344)) + +**Documentations** + +- Add MMFlow ([#1273](https://github.com/open-mmlab/mmaction2/pull/1273)) +- Revise README.md and add projects.md ([#1286](https://github.com/open-mmlab/mmaction2/pull/1286)) +- Add 2s-AGCN in Updates([#1289](https://github.com/open-mmlab/mmaction2/pull/1289)) +- Add MMFewShot([#1300](https://github.com/open-mmlab/mmaction2/pull/1300)) +- Add MMHuman3d([#1304](https://github.com/open-mmlab/mmaction2/pull/1304)) +- Update pre-commit([#1313](https://github.com/open-mmlab/mmaction2/pull/1313)) +- Use share menu from the theme instead([#1328](https://github.com/open-mmlab/mmaction2/pull/1328)) +- Update installation command([#1340](https://github.com/open-mmlab/mmaction2/pull/1340)) + +**Bug and Typo Fixes** + +- Update the inference part in notebooks([#1256](https://github.com/open-mmlab/mmaction2/pull/1256)) +- Update the map_location([#1262]((https://github.com/open-mmlab/mmaction2/pull/1262))) +- Fix bug that start_index is not used in RawFrameDecode([#1278](https://github.com/open-mmlab/mmaction2/pull/1278)) +- Fix bug in init_random_seed([#1282](https://github.com/open-mmlab/mmaction2/pull/1282)) +- Fix bug in setup.py([#1303](https://github.com/open-mmlab/mmaction2/pull/1303)) +- Fix interrogate error in workflows([#1305](https://github.com/open-mmlab/mmaction2/pull/1305)) +- Fix typo in slowfast config([#1309](https://github.com/open-mmlab/mmaction2/pull/1309)) +- Cancel previous runs that are not completed([#1327](https://github.com/open-mmlab/mmaction2/pull/1327)) +- Fix missing skip_postproc parameter([#1347](https://github.com/open-mmlab/mmaction2/pull/1347)) +- Update ssn.py([#1355](https://github.com/open-mmlab/mmaction2/pull/1355)) +- Use latest youtube-dl([#1357](https://github.com/open-mmlab/mmaction2/pull/1357)) +- Fix test-best([#1362](https://github.com/open-mmlab/mmaction2/pull/1362)) + +**ModelZoo** + +- Improve some sthv1 related models([#1306](https://github.com/open-mmlab/mmaction2/pull/1306)) +- Support BABEL([#1332](https://github.com/open-mmlab/mmaction2/pull/1332)) + ### 0.20.0 (07/10/2021) **Highlights** From 99a0e0a7d4cceb4568dab765c53970e5c83dc7e9 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Fri, 31 Dec 2021 11:57:01 +0800 Subject: [PATCH 328/414] Bump Version to 0.21.0 (#1365) * master * master 0721 * add README * 1231 bump_version * 1231 bump_version --- README.md | 2 +- README_zh-CN.md | 2 +- docker/serve/Dockerfile | 2 +- mmaction/version.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index eeee2b9bc1..a665f3abab 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ The master branch works with **PyTorch 1.3+**. - (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! - (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. -**Release**: v0.20.0 was released in 30/10/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +**Release**: v0.21.0 was released in 31/12/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Installation diff --git a/README_zh-CN.md b/README_zh-CN.md index 2a7690e897..39bb492edc 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -50,7 +50,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa - (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! - (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 -v0.20.0 版本已于 2021 年 10 月 30 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.21.0 版本已于 2021 年 12 月 31 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 安装 diff --git a/docker/serve/Dockerfile b/docker/serve/Dockerfile index 69b8ce8ea0..9518e0a92e 100644 --- a/docker/serve/Dockerfile +++ b/docker/serve/Dockerfile @@ -4,7 +4,7 @@ ARG CUDNN="7" FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel ARG MMCV="1.3.8" -ARG MMACTION="0.20.0" +ARG MMACTION="0.21.0" ENV PYTHONUNBUFFERED TRUE diff --git a/mmaction/version.py b/mmaction/version.py index ffa55d38ae..19ddd59f9f 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.20.0' +__version__ = '0.21.0' def parse_version_info(version_str): From 6ec1af71562c1e4f145cc74b36ee815ae85ae216 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 10 Jan 2022 13:34:15 +0800 Subject: [PATCH 329/414] [Fix] Update train.py (#1375) --- mmaction/apis/train.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index 7a3cd1351b..e5298843d3 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -3,6 +3,7 @@ import os import os.path as osp +import time import numpy as np import torch import torch.distributed as dist @@ -202,6 +203,9 @@ def train_model(model, if cfg.omnisource: runner_kwargs = dict(train_ratio=train_ratio) runner.run(data_loaders, cfg.workflow, cfg.total_epochs, **runner_kwargs) + + dist.barrier() + time.sleep(5) if test['test_last'] or test['test_best']: best_ckpt_path = None From 638b9b45297ff63be943cc8e18dec7ff98c2cbcf Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 11 Jan 2022 14:25:09 +0800 Subject: [PATCH 330/414] Fix Lint (#1377) --- mmaction/apis/train.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index e5298843d3..d5ef788ed2 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -2,8 +2,8 @@ import copy as cp import os import os.path as osp - import time + import numpy as np import torch import torch.distributed as dist @@ -203,7 +203,7 @@ def train_model(model, if cfg.omnisource: runner_kwargs = dict(train_ratio=train_ratio) runner.run(data_loaders, cfg.workflow, cfg.total_epochs, **runner_kwargs) - + dist.barrier() time.sleep(5) From 0dde60b56317b1133ac1adfee051374888efffc7 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 11 Jan 2022 16:11:54 +0800 Subject: [PATCH 331/414] Deprecate the support for "python setup.py test" --- setup.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/setup.py b/setup.py index ac9aaf4f07..318f11fc79 100644 --- a/setup.py +++ b/setup.py @@ -186,8 +186,6 @@ def add_mim_extension(): ], url='https://github.com/open-mmlab/mmaction2', license='Apache License 2.0', - setup_requires=parse_requirements('requirements/build.txt'), - tests_require=parse_requirements('requirements/tests.txt'), install_requires=parse_requirements('requirements/runtime.txt'), extras_require={ 'all': parse_requirements('requirements.txt'), From a81fcb2a48e3ea2c59110b5e243d80534ccfb416 Mon Sep 17 00:00:00 2001 From: "Michael P. Camilleri" Date: Sat, 15 Jan 2022 08:51:49 +0000 Subject: [PATCH 332/414] [Fix] Fixed Issue #1382 and a Printout Bug (#1386) * Added argument to LFBInferHead:__init__() * Fixed bug in assert statement printout * Finalised proper formatting * Fixed another related issue with pretrained --- mmaction/core/evaluation/ava_utils.py | 2 +- mmaction/models/heads/fbo_head.py | 4 +++- mmaction/models/heads/lfb_infer_head.py | 8 ++------ 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/mmaction/core/evaluation/ava_utils.py b/mmaction/core/evaluation/ava_utils.py index e7aa10b2f6..24ad52998a 100644 --- a/mmaction/core/evaluation/ava_utils.py +++ b/mmaction/core/evaluation/ava_utils.py @@ -123,7 +123,7 @@ def read_exclusions(exclusions_file): if exclusions_file: reader = csv.reader(exclusions_file) for row in reader: - assert len(row) == 2, 'Expected only 2 columns, got: ' + row + assert len(row) == 2, f'Expected only 2 columns, got: {len(row)}' excluded.add(make_image_key(row[0], row[1])) return excluded diff --git a/mmaction/models/heads/fbo_head.py b/mmaction/models/heads/fbo_head.py index 7790e7d539..066e2f13d1 100644 --- a/mmaction/models/heads/fbo_head.py +++ b/mmaction/models/heads/fbo_head.py @@ -337,7 +337,9 @@ def __init__(self, lfb_cfg, fbo_cfg, temporal_pool_type='avg', - spatial_pool_type='max'): + spatial_pool_type='max', + pretrained=None, + ): super().__init__() fbo_type = fbo_cfg.pop('type', 'non_local') assert fbo_type in FBOHead.fbo_dict diff --git a/mmaction/models/heads/lfb_infer_head.py b/mmaction/models/heads/lfb_infer_head.py index 6dc7099b5f..0baa953b59 100644 --- a/mmaction/models/heads/lfb_infer_head.py +++ b/mmaction/models/heads/lfb_infer_head.py @@ -31,12 +31,8 @@ class LFBInferHead(nn.Module): 'max'. Default: 'max'. """ - def __init__(self, - lfb_prefix_path, - dataset_mode='train', - use_half_precision=True, - temporal_pool_type='avg', - spatial_pool_type='max'): + def __init__( + self, lfb_prefix_path, dataset_mode='train', use_half_precision=True, temporal_pool_type='avg', spatial_pool_type='max', pretrained=None): super().__init__() rank, _ = get_dist_info() if rank == 0: From f83dd60cd63c548e12f6e0e1ca61744b75b4efed Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 15 Jan 2022 19:29:04 +0800 Subject: [PATCH 333/414] Fix Lint (#1390) --- mmaction/core/evaluation/ava_utils.py | 2 +- mmaction/models/heads/fbo_head.py | 4 ++-- mmaction/models/heads/lfb_infer_head.py | 10 ++++++++-- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/mmaction/core/evaluation/ava_utils.py b/mmaction/core/evaluation/ava_utils.py index 24ad52998a..2075d4d57a 100644 --- a/mmaction/core/evaluation/ava_utils.py +++ b/mmaction/core/evaluation/ava_utils.py @@ -123,7 +123,7 @@ def read_exclusions(exclusions_file): if exclusions_file: reader = csv.reader(exclusions_file) for row in reader: - assert len(row) == 2, f'Expected only 2 columns, got: {len(row)}' + assert len(row) == 2, f'Expected only 2 columns, got: {row}' excluded.add(make_image_key(row[0], row[1])) return excluded diff --git a/mmaction/models/heads/fbo_head.py b/mmaction/models/heads/fbo_head.py index 066e2f13d1..42bbbb34d9 100644 --- a/mmaction/models/heads/fbo_head.py +++ b/mmaction/models/heads/fbo_head.py @@ -338,8 +338,7 @@ def __init__(self, fbo_cfg, temporal_pool_type='avg', spatial_pool_type='max', - pretrained=None, - ): + pretrained=None): super().__init__() fbo_type = fbo_cfg.pop('type', 'non_local') assert fbo_type in FBOHead.fbo_dict @@ -348,6 +347,7 @@ def __init__(self, self.lfb_cfg = copy.deepcopy(lfb_cfg) self.fbo_cfg = copy.deepcopy(fbo_cfg) + self.pretrained = pretrained self.lfb = LFB(**self.lfb_cfg) self.fbo = self.fbo_dict[fbo_type](**self.fbo_cfg) diff --git a/mmaction/models/heads/lfb_infer_head.py b/mmaction/models/heads/lfb_infer_head.py index 0baa953b59..2ad7cc5828 100644 --- a/mmaction/models/heads/lfb_infer_head.py +++ b/mmaction/models/heads/lfb_infer_head.py @@ -31,8 +31,13 @@ class LFBInferHead(nn.Module): 'max'. Default: 'max'. """ - def __init__( - self, lfb_prefix_path, dataset_mode='train', use_half_precision=True, temporal_pool_type='avg', spatial_pool_type='max', pretrained=None): + def __init__(self, + lfb_prefix_path, + dataset_mode='train', + use_half_precision=True, + temporal_pool_type='avg', + spatial_pool_type='max', + pretrained=None): super().__init__() rank, _ = get_dist_info() if rank == 0: @@ -47,6 +52,7 @@ def __init__( self.lfb_prefix_path = lfb_prefix_path self.dataset_mode = dataset_mode self.use_half_precision = use_half_precision + self.pretrained = pretrained # Pool by default if temporal_pool_type == 'avg': From 7f5e55a0033d84f9892bce4a7c9c9c0f045241a6 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 17 Jan 2022 14:11:54 +0800 Subject: [PATCH 334/414] [Improvement] Support STGCN in demo_skeleton.py (#1391) * update demo_posec3d * support stgcn in demo_skeleton --- demo/README.md | 18 +++++++++-- demo/{demo_posec3d.py => demo_skeleton.py} | 10 +++++-- docs_zh_CN/demo.md | 35 ++++++++++------------ 3 files changed, 39 insertions(+), 24 deletions(-) rename demo/{demo_posec3d.py => demo_skeleton.py} (96%) diff --git a/demo/README.md b/demo/README.md index 924453213c..00a25e6cfe 100644 --- a/demo/README.md +++ b/demo/README.md @@ -450,7 +450,7 @@ python demo/webcam_demo_spatiotemporal_det.py \ We provide a demo script to predict the skeleton-based action recognition result using a single video. ```shell -python demo/demo_posec3d.py ${VIDEO_FILE} ${OUT_FILENAME} \ +python demo/demo_skeleton.py ${VIDEO_FILE} ${OUT_FILENAME} \ [--config ${SKELETON_BASED_ACTION_RECOGNITION_CONFIG_FILE}] \ [--checkpoint ${SKELETON_BASED_ACTION_RECOGNITION_CHECKPOINT}] \ [--det-config ${HUMAN_DETECTION_CONFIG_FILE}] \ @@ -483,7 +483,7 @@ Assume that you are located at `$MMACTION2` . 1. Use the Faster RCNN as the human detector, HRNetw32 as the pose estimator, PoseC3D-NTURGB+D-120-Xsub-keypoint as the skeleton-based action recognizer. ```shell -python demo/demo_posec3d.py demo/ntu_sample.avi demo/posec3d_demo.mp4 \ +python demo/demo_skeleton.py demo/ntu_sample.avi demo/skeleton_demo.mp4 \ --config configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py \ --checkpoint https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth \ --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ @@ -494,6 +494,20 @@ python demo/demo_posec3d.py demo/ntu_sample.avi demo/posec3d_demo.mp4 \ --label-map tools/data/skeleton/label_map_ntu120.txt ``` +2. Use the Faster RCNN as the human detector, HRNetw32 as the pose estimator, STGCN-NTURGB+D-60-Xsub-keypoint as the skeleton-based action recognizer. + +```shell +python demo/demo_skeleton.py demo/ntu_sample.avi demo/skeleton_demo.mp4 \ + --config configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ + --checkpoint https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth \ + --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ + --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ + --det-score-thr 0.9 \ + --pose-config demo/hrnet_w32_coco_256x192.py \ + --pose-checkpoint https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_coco_256x192-c78dce93_20200708.pth \ + --label-map tools/data/skeleton/label_map_ntu120.txt +``` + ## Video Structuralize Demo We provide a demo script to to predict the skeleton-based and rgb-based action recognition and spatio-temporal action detection result using a single video. diff --git a/demo/demo_posec3d.py b/demo/demo_skeleton.py similarity index 96% rename from demo/demo_posec3d.py rename to demo/demo_skeleton.py index 33b6fb6dae..3bf14a5411 100644 --- a/demo/demo_posec3d.py +++ b/demo/demo_skeleton.py @@ -60,13 +60,13 @@ def parse_args(): '--config', default=('configs/skeleton/posec3d/' 'slowonly_r50_u48_240e_ntu120_xsub_keypoint.py'), - help='posec3d config file path') + help='skeleton model config file path') parser.add_argument( '--checkpoint', default=('https://download.openmmlab.com/mmaction/skeleton/posec3d/' 'slowonly_r50_u48_240e_ntu120_xsub_keypoint/' 'slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth'), - help='posec3d checkpoint file/url') + help='skeleton model checkpoint file/url') parser.add_argument( '--det-config', default='demo/faster_rcnn_r50_fpn_2x_coco.py', @@ -201,6 +201,10 @@ def main(): # Get clip_len, frame_interval and calculate center index of each clip config = mmcv.Config.fromfile(args.config) config.merge_from_dict(args.cfg_options) + for component in config.data.test.pipeline: + if component['type'] == 'PoseNormalize': + component['mean'] = (w // 2, h // 2, .5) + component['max_value'] = (w, h, 1.) model = init_recognizer(config, args.checkpoint, args.device) @@ -223,7 +227,7 @@ def main(): modality='Pose', total_frames=num_frame) num_person = max([len(x) for x in pose_results]) - # Current PoseC3D models are trained on COCO-keypoints (17 keypoints) + num_keypoint = 17 keypoint = np.zeros((num_person, num_frame, num_keypoint, 2), dtype=np.float16) diff --git a/docs_zh_CN/demo.md b/docs_zh_CN/demo.md index 9cd91dec4e..b10709ad38 100644 --- a/docs_zh_CN/demo.md +++ b/docs_zh_CN/demo.md @@ -407,7 +407,7 @@ python demo/webcam_demo_spatiotemporal_det.py \ MMAction2 提供本脚本实现基于人体姿态的动作标签预测。 ```shell -python demo/demo_posec3d.py ${VIDEO_FILE} ${OUT_FILENAME} \ +python demo/demo_skeleton.py ${VIDEO_FILE} ${OUT_FILENAME} \ [--config ${SKELETON_BASED_ACTION_RECOGNITION_CONFIG_FILE}] \ [--checkpoint ${SKELETON_BASED_ACTION_RECOGNITION_CHECKPOINT}] \ [--det-config ${HUMAN_DETECTION_CONFIG_FILE}] \ @@ -422,23 +422,6 @@ python demo/demo_posec3d.py ${VIDEO_FILE} ${OUT_FILENAME} \ 可选参数: -- `SPATIOTEMPORAL_ACTION_DETECTION_CONFIG_FILE`: 时空检测配置文件路径。 -- `SPATIOTEMPORAL_ACTION_DETECTION_CHECKPOINT`: 时空检测模型权重文件路径。 -- `ACTION_DETECTION_SCORE_THRESHOLD`: 动作检测分数阈值,默认为 0.4。 -- `HUMAN_DETECTION_CONFIG_FILE`: 人体检测配置文件路径。 -- `HUMAN_DETECTION_CHECKPOINT`: 人体检测模型权重文件路径。 -- `HUMAN_DETECTION_SCORE_THRE`: 人体检测分数阈值,默认为 0.9。 -- `INPUT_VIDEO`: 网络摄像头编号或本地视频文件路径,默认为 `0`。 -- `LABEL_MAP`: 所使用的标签映射文件,默认为 `tools/data/ava/label_map.txt`。 -- `DEVICE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`),默认为 `cuda:0`。 -- `OUTPUT_FPS`: 输出视频的帧率,默认为 15。 -- `OUTPUT_FILENAME`: 输出视频的路径,默认为 `None`。 -- `--show`: 是否通过 `cv2.imshow` 展示预测结果。 -- `DISPLAY_HEIGHT`: 输出结果图像高度,默认为 0。 -- `DISPLAY_WIDTH`: 输出结果图像宽度,默认为 0。若 `DISPLAY_HEIGHT <= 0 and DISPLAY_WIDTH <= 0`,则表示输出图像形状与输入视频形状相同。 -- `PREDICT_STEPSIZE`: 每 N 帧进行一次预测(以控制计算资源),默认为 8。 -- `CLIP_VIS_LENGTH`: 预测结果可视化持续帧数,即每次预测结果将可视化到 `CLIP_VIS_LENGTH` 帧中,默认为 8。 - - `SKELETON_BASED_ACTION_RECOGNITION_CONFIG_FILE`: 基于人体姿态的动作识别模型配置文件路径。 - `SKELETON_BASED_ACTION_RECOGNITION_CHECKPOINT`: 基于人体姿态的动作识别模型权重文件路径。 - `HUMAN_DETECTION_CONFIG_FILE`: 人体检测配置文件路径。 @@ -457,7 +440,7 @@ python demo/demo_posec3d.py ${VIDEO_FILE} ${OUT_FILENAME} \ 1. 使用 Faster RCNN 作为人体检测器,HRNetw32 作为人体姿态估计模型,PoseC3D-NTURGB+D-120-Xsub-keypoint 作为基于人体姿态的动作识别模型。 ```shell -python demo/demo_posec3d.py demo/ntu_sample.avi demo/posec3d_demo.mp4 \ +python demo/demo_skeleton.py demo/ntu_sample.avi demo/skeleton_demo.mp4 \ --config configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py \ --checkpoint https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth \ --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ @@ -468,6 +451,20 @@ python demo/demo_posec3d.py demo/ntu_sample.avi demo/posec3d_demo.mp4 \ --label-map tools/data/skeleton/label_map_ntu120.txt ``` +2. 使用 Faster RCNN 作为人体检测器,HRNetw32 作为人体姿态估计模型,STGCN-NTURGB+D-60-Xsub-keypoint 作为基于人体姿态的动作识别模型。 + +```shell +python demo/demo_skeleton.py demo/ntu_sample.avi demo/skeleton_demo.mp4 \ + --config configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ + --checkpoint https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth \ + --det-config demo/faster_rcnn_r50_fpn_2x_coco.py \ + --det-checkpoint http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth \ + --det-score-thr 0.9 \ + --pose-config demo/hrnet_w32_coco_256x192.py \ + --pose-checkpoint https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_coco_256x192-c78dce93_20200708.pth \ + --label-map tools/data/skeleton/label_map_ntu120.txt +``` + ## 视频结构化预测 MMAction2 提供本脚本实现基于人体姿态和RGB的视频结构化预测。 From 35b2451066b0ef43266f7c5b0cb286223eec8f5c Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 17 Jan 2022 21:34:04 +0800 Subject: [PATCH 335/414] [Doc] Add openmmlab platform (#1393) --- README.md | 19 ++++++++++++++++++- README_zh-CN.md | 19 ++++++++++++++++++- 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a665f3abab..e61b7d8792 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,22 @@
    - + +
     
    +
    + OpenMMLab website + + + HOT + + +      + OpenMMLab platform + + + TRY IT OUT + + +
    +
     
    ## Introduction diff --git a/README_zh-CN.md b/README_zh-CN.md index 39bb492edc..6ad0e3c27e 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -1,5 +1,22 @@
    - + +
     
    +
    + OpenMMLab 官网 + + + HOT + + +      + OpenMMLab 开放平台 + + + TRY IT OUT + + +
    +
     
    ## 简介 From 3198ba7793d0847cf10ca4891b9f279a7bd4422b Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 18 Jan 2022 14:00:33 +0800 Subject: [PATCH 336/414] [Doc] Update links (#1394) --- README.md | 3 +++ README_zh-CN.md | 3 +++ 2 files changed, 6 insertions(+) diff --git a/README.md b/README.md index e61b7d8792..378b859d91 100644 --- a/README.md +++ b/README.md @@ -278,6 +278,7 @@ We wish that the toolbox and benchmark could serve the growing research communit - [MIM](https://github.com/open-mmlab/mim): MIM Installs OpenMMLab Packages. - [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab's next-generation video understanding toolbox and benchmark. - [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab image classification toolbox and benchmark. +- [MMDeploy](https://github.com/open-mmlab/mmdeploy): OpenMMLab Model Deployment Framework. - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab detection toolbox and benchmark. - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab's next-generation platform for general 3D object detection. - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab image and video editing toolbox. @@ -287,5 +288,7 @@ We wish that the toolbox and benchmark could serve the growing research communit - [MMHuman3D](https://github.com/open-mmlab/mmhuman3d): OpenMMLab human pose and shape estimation toolbox and benchmark. - [MMOCR](https://github.com/open-mmlab/mmocr): A Comprehensive Toolbox for Text Detection, Recognition and Understanding. - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab pose estimation toolbox and benchmark. +- [MMRazor](https://github.com/open-mmlab/mmrazor): OpenMMLab Model Compression Toolbox and Benchmark. - [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab semantic segmentation toolbox and benchmark. +- [MMSelfSup](https://github.com/open-mmlab/mmselfsup): OpenMMLab self-supervised learning Toolbox and Benchmark. - [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab video perception toolbox and benchmark. diff --git a/README_zh-CN.md b/README_zh-CN.md index 6ad0e3c27e..9dfa0455ab 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -272,6 +272,7 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 - [MIM](https://github.com/open-mmlab/mim): MIM 是 OpenMMlab 项目、算法、模型的统一入口 - [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab 新一代视频理解工具箱与测试基准 - [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab 图像分类工具箱与测试基准 +- [MMDeploy](https://github.com/open-mmlab/mmdeploy): OpenMMLab 模型部署框架 - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab 检测工具箱与测试基准 - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab 新一代通用3D目标检测平台 - [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab 图像视频编辑工具箱 @@ -281,7 +282,9 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 - [MMHuman3D](https://github.com/open-mmlab/mmhuman3d): OpenMMLab 人体姿态和形状估计工具箱 - [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包 - [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab 姿态估计工具箱与测试基准 +- [MMRazor](https://github.com/open-mmlab/mmrazor): OpenMMLab 模型压缩工具箱与测试基准 - [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab 语义分割工具箱与测试基准 +- [MMSelfSup](https://github.com/open-mmlab/mmselfsup): OpenMMLab 自监督学习工具箱与测试基准 - [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab 一体化视频目标感知平台 ## 欢迎加入 OpenMMLab 社区 From f937948a5519ef82d30b532544d917b1c84df3c8 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 18 Jan 2022 15:41:37 +0800 Subject: [PATCH 337/414] [Improvement] Update mp setting (#1395) --- configs/_base_/default_runtime.py | 5 ++++ tools/train.py | 38 +++++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+) diff --git a/configs/_base_/default_runtime.py b/configs/_base_/default_runtime.py index 96ef8c4b9c..3bfa975246 100644 --- a/configs/_base_/default_runtime.py +++ b/configs/_base_/default_runtime.py @@ -11,3 +11,8 @@ load_from = None resume_from = None workflow = [('train', 1)] + +# disable opencv multithreading to avoid system being overloaded +opencv_num_threads = 0 +# set multi-process start method as `fork` to speed up the training +mp_start_method = 'fork' diff --git a/tools/train.py b/tools/train.py index e871c53088..e319ef30b8 100644 --- a/tools/train.py +++ b/tools/train.py @@ -1,11 +1,14 @@ # Copyright (c) OpenMMLab. All rights reserved. import argparse import copy +import multiprocessing as mp import os import os.path as osp +import platform import time import warnings +import cv2 import mmcv import torch from mmcv import Config, DictAction @@ -19,6 +22,38 @@ from mmaction.utils import collect_env, get_root_logger, register_module_hooks +def setup_multi_processes(cfg): + # set multi-process start method as `fork` to speed up the training + if platform.system() != 'Windows': + mp_start_method = cfg.get('mp_start_method', 'fork') + mp.set_start_method(mp_start_method) + + # disable opencv multithreading to avoid system being overloaded + opencv_num_threads = cfg.get('opencv_num_threads', 0) + cv2.setNumThreads(opencv_num_threads) + + # setup OMP threads + # This code is referred from https://github.com/pytorch/pytorch/blob/master/torch/distributed/run.py # noqa + if ('OMP_NUM_THREADS' not in os.environ and cfg.data.workers_per_gpu > 1): + omp_num_threads = 1 + warnings.warn( + f'Setting OMP_NUM_THREADS environment variable for each process ' + f'to be {omp_num_threads} in default, to avoid your system being ' + f'overloaded, please further tune the variable for optimal ' + f'performance in your application as needed.') + os.environ['OMP_NUM_THREADS'] = str(omp_num_threads) + + # setup MKL threads + if 'MKL_NUM_THREADS' not in os.environ and cfg.data.workers_per_gpu > 1: + mkl_num_threads = 1 + warnings.warn( + f'Setting MKL_NUM_THREADS environment variable for each process ' + f'to be {mkl_num_threads} in default, to avoid your system being ' + f'overloaded, please further tune the variable for optimal ' + f'performance in your application as needed.') + os.environ['MKL_NUM_THREADS'] = str(mkl_num_threads) + + def parse_args(): parser = argparse.ArgumentParser(description='Train a recognizer') parser.add_argument('config', help='train config file path') @@ -83,6 +118,9 @@ def main(): cfg.merge_from_dict(args.cfg_options) + # set multi-process settings + setup_multi_processes(cfg) + # set cudnn_benchmark if cfg.get('cudnn_benchmark', False): torch.backends.cudnn.benchmark = True From cbd9d622d9152dd4992d8842219fa329a8738374 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Wed, 19 Jan 2022 16:40:17 +0800 Subject: [PATCH 338/414] Update parse_file_list.py --- tools/data/parse_file_list.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/data/parse_file_list.py b/tools/data/parse_file_list.py index f649ab411c..a87073efa6 100644 --- a/tools/data/parse_file_list.py +++ b/tools/data/parse_file_list.py @@ -426,10 +426,10 @@ def parse_hmdb51_split(level): def generate_class_index_file(): """This function will generate a `ClassInd.txt` for HMDB51 in a format like UCF101, where class id starts with 1.""" - frame_path = 'data/hmdb51/rawframes' + video_path = 'data/hmdb51/videos' annotation_dir = 'data/hmdb51/annotations' - class_list = sorted(os.listdir(frame_path)) + class_list = sorted(os.listdir(video_path)) class_dict = dict() if not osp.exists(class_index_file): with open(class_index_file, 'w') as f: From fa3221f23168f8e1d964e3d56b0af7d7861a03d2 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 26 Jan 2022 12:37:17 +0800 Subject: [PATCH 339/414] [Enhance] Setup MP both in train and test. (#1405) --- mmaction/utils/__init__.py | 4 +- mmaction/utils/setup_env.py | 47 +++++++++++++++++++++ tests/test_utils/test_setup_env.py | 68 ++++++++++++++++++++++++++++++ tools/test.py | 5 ++- tools/train.py | 38 +---------------- 5 files changed, 124 insertions(+), 38 deletions(-) create mode 100644 mmaction/utils/setup_env.py create mode 100644 tests/test_utils/test_setup_env.py diff --git a/mmaction/utils/__init__.py b/mmaction/utils/__init__.py index 7987cc8148..f6d43efc41 100644 --- a/mmaction/utils/__init__.py +++ b/mmaction/utils/__init__.py @@ -6,9 +6,11 @@ from .misc import get_random_string, get_shm_dir, get_thread_id from .module_hooks import register_module_hooks from .precise_bn import PreciseBNHook +from .setup_env import setup_multi_processes __all__ = [ 'get_root_logger', 'collect_env', 'get_random_string', 'get_thread_id', 'get_shm_dir', 'GradCAM', 'PreciseBNHook', 'import_module_error_class', - 'import_module_error_func', 'register_module_hooks' + 'import_module_error_func', 'register_module_hooks', + 'setup_multi_processes' ] diff --git a/mmaction/utils/setup_env.py b/mmaction/utils/setup_env.py new file mode 100644 index 0000000000..21def2f080 --- /dev/null +++ b/mmaction/utils/setup_env.py @@ -0,0 +1,47 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os +import platform +import warnings + +import cv2 +import torch.multiprocessing as mp + + +def setup_multi_processes(cfg): + """Setup multi-processing environment variables.""" + # set multi-process start method as `fork` to speed up the training + if platform.system() != 'Windows': + mp_start_method = cfg.get('mp_start_method', 'fork') + current_method = mp.get_start_method(allow_none=True) + if current_method is not None and current_method != mp_start_method: + warnings.warn( + f'Multi-processing start method `{mp_start_method}` is ' + f'different from the previous setting `{current_method}`.' + f'It will be force set to `{mp_start_method}`. You can change ' + f'this behavior by changing `mp_start_method` in your config.') + mp.set_start_method(mp_start_method, force=True) + + # disable opencv multithreading to avoid system being overloaded + opencv_num_threads = cfg.get('opencv_num_threads', 0) + cv2.setNumThreads(opencv_num_threads) + + # setup OMP threads + # This code is referred from https://github.com/pytorch/pytorch/blob/master/torch/distributed/run.py # noqa + if 'OMP_NUM_THREADS' not in os.environ and cfg.data.workers_per_gpu > 1: + omp_num_threads = 1 + warnings.warn( + f'Setting OMP_NUM_THREADS environment variable for each process ' + f'to be {omp_num_threads} in default, to avoid your system being ' + f'overloaded, please further tune the variable for optimal ' + f'performance in your application as needed.') + os.environ['OMP_NUM_THREADS'] = str(omp_num_threads) + + # setup MKL threads + if 'MKL_NUM_THREADS' not in os.environ and cfg.data.workers_per_gpu > 1: + mkl_num_threads = 1 + warnings.warn( + f'Setting MKL_NUM_THREADS environment variable for each process ' + f'to be {mkl_num_threads} in default, to avoid your system being ' + f'overloaded, please further tune the variable for optimal ' + f'performance in your application as needed.') + os.environ['MKL_NUM_THREADS'] = str(mkl_num_threads) diff --git a/tests/test_utils/test_setup_env.py b/tests/test_utils/test_setup_env.py new file mode 100644 index 0000000000..87c2f755a8 --- /dev/null +++ b/tests/test_utils/test_setup_env.py @@ -0,0 +1,68 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import multiprocessing as mp +import os +import platform + +import cv2 +from mmcv import Config + +from mmaction.utils import setup_multi_processes + + +def test_setup_multi_processes(): + # temp save system setting + sys_start_mehod = mp.get_start_method(allow_none=True) + sys_cv_threads = cv2.getNumThreads() + # pop and temp save system env vars + sys_omp_threads = os.environ.pop('OMP_NUM_THREADS', default=None) + sys_mkl_threads = os.environ.pop('MKL_NUM_THREADS', default=None) + + # test config without setting env + config = dict(data=dict(workers_per_gpu=2)) + cfg = Config(config) + setup_multi_processes(cfg) + assert os.getenv('OMP_NUM_THREADS') == '1' + assert os.getenv('MKL_NUM_THREADS') == '1' + # when set to 0, the num threads will be 1 + assert cv2.getNumThreads() == 1 + if platform.system() != 'Windows': + assert mp.get_start_method() == 'fork' + + # test num workers <= 1 + os.environ.pop('OMP_NUM_THREADS') + os.environ.pop('MKL_NUM_THREADS') + config = dict(data=dict(workers_per_gpu=0)) + cfg = Config(config) + setup_multi_processes(cfg) + assert 'OMP_NUM_THREADS' not in os.environ + assert 'MKL_NUM_THREADS' not in os.environ + + # test manually set env var + os.environ['OMP_NUM_THREADS'] = '4' + config = dict(data=dict(workers_per_gpu=2)) + cfg = Config(config) + setup_multi_processes(cfg) + assert os.getenv('OMP_NUM_THREADS') == '4' + + # test manually set opencv threads and mp start method + config = dict( + data=dict(workers_per_gpu=2), + opencv_num_threads=4, + mp_start_method='spawn') + cfg = Config(config) + setup_multi_processes(cfg) + assert cv2.getNumThreads() == 4 + assert mp.get_start_method() == 'spawn' + + # revert setting to avoid affecting other programs + if sys_start_mehod: + mp.set_start_method(sys_start_mehod, force=True) + cv2.setNumThreads(sys_cv_threads) + if sys_omp_threads: + os.environ['OMP_NUM_THREADS'] = sys_omp_threads + else: + os.environ.pop('OMP_NUM_THREADS') + if sys_mkl_threads: + os.environ['MKL_NUM_THREADS'] = sys_mkl_threads + else: + os.environ.pop('MKL_NUM_THREADS') diff --git a/tools/test.py b/tools/test.py index 849005562b..2fadba0119 100644 --- a/tools/test.py +++ b/tools/test.py @@ -15,7 +15,7 @@ from mmaction.datasets import build_dataloader, build_dataset from mmaction.models import build_model -from mmaction.utils import register_module_hooks +from mmaction.utils import register_module_hooks, setup_multi_processes # TODO import test functions from mmcv and delete them from mmaction2 try: @@ -274,6 +274,9 @@ def main(): cfg.merge_from_dict(args.cfg_options) + # set multi-process settings + setup_multi_processes(cfg) + # Load output_config from cfg output_config = cfg.get('output_config', {}) if args.out: diff --git a/tools/train.py b/tools/train.py index e319ef30b8..d91c68e958 100644 --- a/tools/train.py +++ b/tools/train.py @@ -1,14 +1,11 @@ # Copyright (c) OpenMMLab. All rights reserved. import argparse import copy -import multiprocessing as mp import os import os.path as osp -import platform import time import warnings -import cv2 import mmcv import torch from mmcv import Config, DictAction @@ -19,39 +16,8 @@ from mmaction.apis import init_random_seed, train_model from mmaction.datasets import build_dataset from mmaction.models import build_model -from mmaction.utils import collect_env, get_root_logger, register_module_hooks - - -def setup_multi_processes(cfg): - # set multi-process start method as `fork` to speed up the training - if platform.system() != 'Windows': - mp_start_method = cfg.get('mp_start_method', 'fork') - mp.set_start_method(mp_start_method) - - # disable opencv multithreading to avoid system being overloaded - opencv_num_threads = cfg.get('opencv_num_threads', 0) - cv2.setNumThreads(opencv_num_threads) - - # setup OMP threads - # This code is referred from https://github.com/pytorch/pytorch/blob/master/torch/distributed/run.py # noqa - if ('OMP_NUM_THREADS' not in os.environ and cfg.data.workers_per_gpu > 1): - omp_num_threads = 1 - warnings.warn( - f'Setting OMP_NUM_THREADS environment variable for each process ' - f'to be {omp_num_threads} in default, to avoid your system being ' - f'overloaded, please further tune the variable for optimal ' - f'performance in your application as needed.') - os.environ['OMP_NUM_THREADS'] = str(omp_num_threads) - - # setup MKL threads - if 'MKL_NUM_THREADS' not in os.environ and cfg.data.workers_per_gpu > 1: - mkl_num_threads = 1 - warnings.warn( - f'Setting MKL_NUM_THREADS environment variable for each process ' - f'to be {mkl_num_threads} in default, to avoid your system being ' - f'overloaded, please further tune the variable for optimal ' - f'performance in your application as needed.') - os.environ['MKL_NUM_THREADS'] = str(mkl_num_threads) +from mmaction.utils import (collect_env, get_root_logger, + register_module_hooks, setup_multi_processes) def parse_args(): From 242a561b4e892dc964b50fb1f4062b5b2434a4b9 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 26 Jan 2022 13:12:13 +0800 Subject: [PATCH 340/414] [Feature] Support CPU Training (#1407) --- docs/getting_started.md | 6 ++++++ docs_zh_CN/getting_started.md | 37 +++++++++++++++++++++-------------- mmaction/apis/train.py | 3 +-- 3 files changed, 29 insertions(+), 17 deletions(-) diff --git a/docs/getting_started.md b/docs/getting_started.md index 7df61337d6..713a4adfcd 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -57,6 +57,9 @@ For using custom datasets, please refer to [Tutorial 3: Adding New Dataset](tuto We provide testing scripts to evaluate a whole dataset (Kinetics-400, Something-Something V1&V2, (Multi-)Moments in Time, etc.), and provide some high-level apis for easier integration to other projects. +MMAction2 also supports testing with CPU. However, it will be **very slow** and should only be used for debugging on a device without GPU. +To test with CPU, one should first disable all GPUs (if exist) with `export CUDA_VISIBLE_DEVICES=-1`, and then call the testing scripts directly with `python tools/test.py {OTHER_ARGS}`. + ### Test a dataset - [x] single GPU @@ -352,6 +355,9 @@ evaluation = dict(interval=5) # This evaluate the model per 5 epoch. According to the [Linear Scaling Rule](https://arxiv.org/abs/1706.02677), you need to set the learning rate proportional to the batch size if you use different GPUs or videos per GPU, e.g., lr=0.01 for 4 GPUs x 2 video/gpu and lr=0.08 for 16 GPUs x 4 video/gpu. +MMAction2 also supports training with CPU. However, it will be **very slow** and should only be used for debugging on a device without GPU. +To train with CPU, one should first disable all GPUs (if exist) with `export CUDA_VISIBLE_DEVICES=-1`, and then call the training scripts directly with `python tools/train.py {OTHER_ARGS}`. + ### Train with a single GPU ```shell diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index 76275691e4..c1bf43b241 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -4,21 +4,22 @@ -- [数据集](#数据集) -- [使用预训练模型进行推理](#使用预训练模型进行推理) - - [测试某个数据集](#测试某个数据集) - - [使用高级 API 对视频和帧文件夹进行测试](#使用高级-API-对视频和帧文件夹进行测试) -- [如何建立模型](#如何建立模型) - - [使用基本组件建立模型](#使用基本组件建立模型) - - [构建新模型](#构建新模型) -- [如何训练模型](#如何训练模型) - - [推理流水线](#推理流水线) - - [训练配置](#训练配置) - - [使用单个 GPU 进行训练](#使用单个-GPU-进行训练) - - [使用多个 GPU 进行训练](#使用多个-GPU-进行训练) - - [使用多台机器进行训练](#使用多台机器进行训练) - - [使用单台机器启动多个任务](#使用单台机器启动多个任务) -- [详细教程](#详细教程) +- [基础教程](#基础教程) + - [数据集](#数据集) + - [使用预训练模型进行推理](#使用预训练模型进行推理) + - [测试某个数据集](#测试某个数据集) + - [使用高级 API 对视频和帧文件夹进行测试](#使用高级-api-对视频和帧文件夹进行测试) + - [如何建立模型](#如何建立模型) + - [使用基本组件建立模型](#使用基本组件建立模型) + - [构建新模型](#构建新模型) + - [如何训练模型](#如何训练模型) + - [推理流水线](#推理流水线) + - [训练配置](#训练配置) + - [使用单个 GPU 进行训练](#使用单个-gpu-进行训练) + - [使用多个 GPU 进行训练](#使用多个-gpu-进行训练) + - [使用多台机器进行训练](#使用多台机器进行训练) + - [使用单台机器启动多个任务](#使用单台机器启动多个任务) + - [详细教程](#详细教程) @@ -55,6 +56,9 @@ mmaction2 MMAction2 提供了一些脚本用于测试数据集(如 Kinetics-400,Something-Something V1&V2,(Multi-)Moments in Time,等), 并提供了一些高级 API,以便更好地兼容其他项目。 +MMAction2 支持仅使用 CPU 进行测试。然而,这样做的速度**非常慢**,用户应仅使用其作为无 GPU 机器上的 debug 手段。 +如需使用 CPU 进行测试,用户需要首先使用命令 `export CUDA_VISIBLE_DEVICES=-1` 禁用机器上的 GPU (如有),然后使用命令 `python tools/test.py {OTHER_ARGS}` 直接调用测试脚本。 + ### 测试某个数据集 - [x] 支持单 GPU @@ -335,6 +339,9 @@ evaluation = dict(interval=5) # 每 5 个周期进行一次模型评估 根据 [Linear Scaling Rule](https://arxiv.org/abs/1706.02677),当 GPU 数量或每个 GPU 上的视频批大小改变时,用户可根据批大小按比例地调整学习率,如,当 4 GPUs x 2 video/gpu 时,lr=0.01;当 16 GPUs x 4 video/gpu 时,lr=0.08。 +MMAction2 支持仅使用 CPU 进行训练。然而,这样做的速度**非常慢**,用户应仅使用其作为无 GPU 机器上的 debug 手段。 +如需使用 CPU 进行训练,用户需要首先使用命令 `export CUDA_VISIBLE_DEVICES=-1` 禁用机器上的 GPU (如有),然后使用命令 `python tools/train.py {OTHER_ARGS}` 直接调用训练脚本。 + ### 使用单个 GPU 进行训练 ```shell diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index d5ef788ed2..e4400d67ea 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -125,8 +125,7 @@ def train_model(model, broadcast_buffers=False, find_unused_parameters=find_unused_parameters) else: - model = MMDataParallel( - model.cuda(cfg.gpu_ids[0]), device_ids=cfg.gpu_ids) + model = MMDataParallel(model, device_ids=cfg.gpu_ids) # build runner optimizer = build_optimizer(model, cfg.optimizer) From a4cbb7e3b9f5f1e1a08f5dc42652337f67563b07 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Wed, 26 Jan 2022 14:52:04 +0800 Subject: [PATCH 341/414] [Docs]Update readme in configs (#1404) * master * master 0721 * add README * 1231 bump_version * 0125 new readme * 0125 new readme * 0126 modify readme * 0126 modify readme --- configs/detection/acrn/README.md | 56 +++++++++++---------- configs/detection/ava/README.md | 54 ++++++++++---------- configs/detection/lfb/README.md | 56 +++++++++++---------- configs/localization/bmn/README.md | 56 +++++++++++---------- configs/localization/bsn/README.md | 32 ++++++------ configs/localization/ssn/README.md | 32 ++++++------ configs/recognition/c3d/README.md | 36 ++++++++------ configs/recognition/csn/README.md | 58 +++++++++++----------- configs/recognition/i3d/README.md | 58 ++++++++++++---------- configs/recognition/omnisource/README.md | 32 ++++++------ configs/recognition/r2plus1d/README.md | 32 ++++++------ configs/recognition/slowfast/README.md | 32 ++++++------ configs/recognition/slowonly/README.md | 32 ++++++------ configs/recognition/tanet/README.md | 30 +++++------ configs/recognition/timesformer/README.md | 34 +++++++------ configs/recognition/tin/README.md | 30 +++++------ configs/recognition/tpn/README.md | 30 +++++------ configs/recognition/trn/README.md | 30 +++++------ configs/recognition/tsm/README.md | 52 +++++++++---------- configs/recognition/tsn/README.md | 34 +++++++------ configs/recognition/x3d/README.md | 34 +++++++------ configs/recognition_audio/resnet/README.md | 30 +++++------ configs/skeleton/2s-agcn/README.md | 32 ++++++------ configs/skeleton/posec3d/README.md | 34 +++++++------ configs/skeleton/stgcn/README.md | 30 +++++------ 25 files changed, 509 insertions(+), 457 deletions(-) diff --git a/configs/detection/acrn/README.md b/configs/detection/acrn/README.md index 4f34bec2c4..cd9ff26f5c 100644 --- a/configs/detection/acrn/README.md +++ b/configs/detection/acrn/README.md @@ -1,5 +1,9 @@ # ACRN +[Actor-centric relation network](https://openaccess.thecvf.com/content_ECCV_2018/html/Chen_Sun_Actor-centric_Relation_Network_ECCV_2018_paper.html) + + + ## Abstract @@ -11,33 +15,7 @@ Current state-of-the-art approaches for spatio-temporal action localization rely

    Rz+$idfc~f)oD!@%4Kh2?Xh-lK9gl^;A+rg@m%wf?++#yJ&MUq?<$Nvh?m%tQ-?5#5B)ivN`Lrqg=`cJl|zmCT}~|JLOE~C zV*s5vk2K;MQK4YU1($CoX`&deAU|9Wy?pjr=fmlO&k0p~(lfj!tC)yPKVYmQqPCFlL-))nTw5*8ENZllX9B+t40Q%GL1}_aZVH-j-ii&NI}I35?1rU?_Hp!7P7Lg3Q+i-b2$xITot z(B{+vvicP#y)R|Nxx?!m*b<6+!>fFqZw^{&t#`Zo$odovCUKvZb$aAo_8LsF9ld;} zPmN3pwDt#5aiCr*8T-27Ap@C*4)S;@Kf^4 z)Yb##M#|K1hQ!qBaAK*0J!Eo+(X3wa%hqr0dOapVFTNc#^9t09V?|{f;?t-^(!*Nu zR3rXJmyvU=!{INwwZmh5S!Tnj)-X^R&AYP&9&;7Vh+{Y@CeV)}yl_K%P-SeVA{nw{ zbd9EKwvI_e(Q~Hg@uyX0QU%M!$Pqb(8e)+910ErbQ$;F#*5r0(xOy1et-vsj~H)d#%)_r}8rjv$N|6+waiUZTrZZ@Zb z?cf#JjQ$ezvIAEXhToEwMh^2YjAQ`2D4Efe=->jV`NlEPH}KCiqWN$bnOg~2Tf;33 zZu_3C`!KrZN$$nxSuAwK5XC^9FAeO_oyF|xp{URnZ_-u((%H$C6WRdsM_b7V9}9#T zBEqwXQJe8*3M@FWD18yIo?uf0%SNrry4chcQV8(9u7f@uq#m}=-YMLeIXgDCKv6~q zYPtJT!G}3o6x_eqs-PJ)gx@#h&P_SN-wRa$`og7)EOuUN=85q*(sF4@n>4V@Pm-W4 zO?1-V#W(<>oFHA3!!>wCc^y~lpt13*sSk-J8CRg>U37$RwaCKhB;y?Wd;kG}mrx9< zmzF8OWt9#qlZi*Yx?Cp7h^Nq~B*00h4GH#StqypFdj&%nX&>{VyKTy@LEp+0T9QzE zkDLKqq-OpiTh${xVW*9CI2+DWwKzX3qEk}Tx60a5Sl&)9NSAJjad(JMRY{Z!ytU!4 zHXG13T)AqnB#-dYM!<-#qzanN+1KK?`pvWZaO|37N{BdFl36g$04aQDPL@?#HNqRht( z9c3kig>-X2m}p+iW~~~FWL6lp4~1Juvl743DFarLajg($(V&TMs1WoNDw!GNS%vt; zR@$LqDom4#aGAHdW5Us| zxLHLYw`%Ve{K*iy-U}5)ODBaLi?z`^kD~;4CwQ!8k|AOH3I-d9uWa$IE#7ZbXM=5W zP~w`&++TPSb6UZgX{r4bf2rPkiM{(k%Mfusw+=8$KkeOErYI71!fjnO;fV<-ykiM~ z2{M#JVPJy%x?@K()=4k$mNkRgu{*lxx7($>>gB`>=o(1=A0)U^h9vF;TU=vKY;F+O zeeOy{pD##@m@+UjvC4T5;m?jfKC?n+TiNUw?}*Yq9Mk}vvAS@>`M5>{ zi}@`2!d>~Vx=2|&j&CvD7`)JBLjIeWy#NTv@e0D`NWGR-P4L{GqnUVS`4S0Ev<52D)HW%3dwCw^MajAe&1a zxWWrN_I=y-rldLW5s}tJQTln;VUL(|ma3QIB@>F*_yH=`!=rK&aj9>a$6}rWXH^E_ zxufI1=qv7^0kEt`B}ukjie=fn=}Kzmx1HF4eso$1E&AJ9GALCIcF^lKIS*u_g! zW3cwC-ymE^DYGMdHv%ajm&$@-{@`l0ZAE84Y%XxxU>L|E6h48dhvoAXnyuA&nt%%h zJA^7kIQ$hhrF45Q7Vs2*Y6yj?>EbYGosZr%OaMbbyubF}Uqb!GSKdRyadrG7#Le9O z?0|yTr@kmG;EXyP_egh$>N3}6A0l=R9?fWc!x~{gau$)-&;GtrwJHWX{6zM~FXZ&e&IDNnR$?_%PYpHjKervYx)D)v2oCdaCewi!oUG1rCUd#>2ZY+rH z!olr85K)8?%sX9Z?l4)+dn#SeWIH%Cx|kuT)GMj{AX^0m_wnr$C?5@Bw8IQ$sw>{| zvx8q|FqwbW`|>-T914F4svgsYSLmKSZBw<3%Jbazsnv^1EA;KW#1H#T${`S9P~k}7 za%m|WmeDYx4?K(T8gDmEWMGcz<6E{@nt@T~XNFJf?@tHeESM>W^Lag}0X$u*5meeN z@-=bz69t>G$cW>8>U-DW5h=p*2jWsH{VW7)h)7xKoaoyuW&Rvxk+McW5{i|^0FyfK4*diIhq!^Fao63TGP>}W-o#F{++!NmnW z8qpR+)T^R*D;xFhovF5Lq9nba41_GJXg3rqRoqjiQ(P(HWKR zFrBFH`ev(lkQL|iGy#8;L6_RV@Oc>OTy$4#dMwk&HH?Y=vrn+J*td_laLT-I=@p&~ zCW=Vl-lOc}E%#P&3OB3Fm#qeX^rnP*aR09$U$sX-XMN%(=ZG2*nAJ^;g?1xb^{{Yx z+vm2hqG-A=N?9A3={>Tw(mG>INh4~?|Gzs!Ul2~bZrW?=)H3E1JwVO$0tQMfKP}hV z18MasUWzY@-+K*SkLq55r1&)mXFkFcx}g%KDLM+~j>Cuxh^VM>5)&IvlEfVS%3kL- z*DM{mZHX)i86bm2Z#I*Rd3dAuY{YcEI}c-K1N5$dd|H>)e}=w2qUp{15Av{~4b?^kt`&#=c%EBit&KptyN!;{<`S7HpN`Lr zFrExzYcvV>AIDEx6DM-Zrui|uU4L|9ls!(% z;8vN!3Q!#4cch!ncj-ua=106;!KX_$ejDoOpkPT1`5C7i{7`hcm^LW0Od3rQ&^Bqb3v{;-B_c9B zep{22bu<8${UiUFPbvuaGnC!Ob|=Ju`-QQ9Y3CG{i^6_V1v(lj!l<@fc$y5>@xcDJ zwdPRkhEHZeEuIBINmMDYcxY8uoxgn3d@&tVY`4Qp+S z7QT>0h0mFz_nI?MIdCu+xY4dKrhk&Z7lbf-N&VdKH`N;=k~3E86lG@D1rmoDE`oTc zw9sLZtn)(xo^WkjSI?J3JRyU8Z}Kxx0c?)toHE+Ebo6=5|I*;G6j(|!TdD6S3qaQ5){Y%EnR|~JIQUWSIm85^-k5f(TXmuc zH<0Aqtc0m7H$&=dtgEQEZgcVC#^hB!Mqq+aQy)oPK5Z$dt;a_~6u2x1-)-iQX z;vK>C0g)}Tx8RWw3v(V^`XC&oYmfU%tOyL4m)Z;cKrEc5ErD0kq$J*FT{f@wPDj!evp!DOxt#H zZi}D!L6eBpfX0vYJPCyfA4&^9i+fbU+&d67AJJ(4xQc&Kl3>v|8Si-6#AbiHCuwf8 zUCThd?vkyG)l_T28|EPpzeIzM1-6G-4vf3=B9vl}%&uCph;@<7oSjsmE9<*Q+XvLB`};%$;W1Zs8`Q`pWyEAm*gSIIuvL^a%>=lz)@8Fd5Ex&w;b3ZVH{uBroft%fklOFtX z0IZ3sa+2o2**jsB(JIN_3Wo3Z)jw@F1~N>SoB*6D^cm;4cF!AP3sT8#%Y3LJ0$*@V*QU}Mc@e^~z5$fZAi&I0yvZu}HZMTw+HWCmSBKT_s~pZ{sC-2A^Ac+X54N<2 zO9<1}oJeBbi$>j8pDvxS%!vM`c{gnuH~vBiT&)i z&XVy_rk(?ehVM7f1RbS!vl6n{z|jT4eYEK3te)m{h+%a&T!kw_i0dJ$Rt>sW9b&r& zD+{YIL2qn|Xq%K5ZZ;BABqas>a|X^e*)R`5_Ndr({`_{Oon0b2zza9X*LyY4RLvBA z+`C6F<S@K6Y`Ad~RD6H&)CaIi|8{V-fBzpYyQ4P!}D?#{c8S4yP?9-Rb7G^8XZ#TOzK z(CRW}I6wRk0&_v{X~@Kb9GxQBH(>nHZo}0Gi_6$flURBGU#Hp}^i>FvgrQ{jPxcOV zRQMgIV0B7Y&*PH3c&ICV2p&*%BXLhv(hV9dYqswvE2=jWNm%1!;wt@tf~S`(50Fak zxj|k@c`E@5|1Tg(Ud_F2+`cyiQ+Y=-(5xBdS)vz)k}zk~VS1+kG<64 zEiv*%_YM4T4o9uO`ku_%-+h|pOf}LUaP^;y19X@H<#yh4Z2?5fbWzTP4T3<=_Ltw_ z3=rFFeumf+kpz(8?e@VJrlIzQ^UI%?a+y1OevRw8HxQSO8VirXRx+I1AA-S-MRBtO zZNEIHV#GMHjxrjvCP~`K2UI!xr_|J-u1n#pr<0zUNINj!jf5b3TCbaI<>oKUJE!^ zwg9c+Oo{!&tJN-%bcK~VBt|O*J@(ZyoGL+aVAxZgN^W}B4Kd8yPZ&m8!G8y3WooIR;2$XltGP&l}_*QdA+ zo9;InHCL*z&vZBSp6@cshm>A71wwn$vBYb`RFi1x7TRvCNE=JVv1eau|3!X1b^H9y zZRuRYDU6V0J{^Bi+l8CZ>k!1|DWtiDlz)+`*P!yAe{Y)0KkPKW6!Ky3v>VP=dLQsY zKDGmYH|iF0ndS}P3VZ1#~*rA^2Z_C55>_I!z2v9e+1mZ!hGy^pS^p@i({Dtl(MP-_c`< z$@2O5KpR^4gvgs5xXULvn2<2^*^$g;#;D83DBi<1s9pb*KoaNx$uES$lRMXFp0iuY zRn~JpkGcOJkw>#bu$edN(8O>ujB-G$^Zh@m<>{xKOTP@bb?LJ3aDARBIwzi3*X(y7 zFnSz<=nQ#&sSz~Ke>1kffV;Q;%E3WB1=$yB`TQbDDUShsjzAtxnDO|sDq zQfvo)9#uoPP2n4?Pez~sjN1^Wwa3&zAz4v`eO_ZI}460$w zq2GvnU(St2nze|ojjYGtwn4=)(V%9a#Npwr;-VAQNJW_fnC@EU_imayEAt6AOJ92R z+s%;olop_njaqEr!&CXQu0APHDJYZ~8$}qvfSydg2xW2P!;mcCQ|^tkN5k6nr4kE7 zRjjXoCc!Br{j{*MZ{jpP1)g<_@$T*Kdkh}V{iu<`rdx`mMW)~?iG!}A%9_uQkMk~C zPC_PaYDx*+uT5KyGmzQioGU!RkyFcv#%Y9po-j5P zoL15v_{w4dc!y<`D{cgvucwrXrrq&-eC0c1M$E-Tz@@Lh>$*QN@BqaKCBtB3U8*fgU8=VcQ7dX0jF5Ts??HWu>B&Y>>gP}D_ML%css!3 z6)XA)%d(og!9ldOmahVm#3ys~5-GfRr7jik#>p$G!X?&apKtO0XsR^{P>bXOMB!jZ zL<{B|ex`V6uBkWaOSp$dbL^zmz$+gYn?f7m{P4B{%kT7UPAxs&dnd|Nb4W28SNo{>3 zPBW;}Fr8n%rtt-4QFTT(K59PMYC;BA)4#IdVOJ{^g($r4lyWtGw`F#?o>p{*#e_W-c(*ytt{l=_@z76Km8~V%jCKb+rE2IE@ zeRrGYdT}+okHrO2Z{S}-`R+UwO+2ocp;_Fsg@kgjUM4D8>%S=(hQ^!6x_#~2!zP<~gK2)L(~fE6 z7auyUQ4VbHMdq3TpgnmJR8#s83Wx2>j>48c4n|21yrk}?FZvolEZzGIP; z`>GX`)6S#b!7a$)q#*IHh3eqK3h;9%OhhG~Nc!gbGWm%E-xRA_Wgl85+pU;b!;jEK z30-xdFQ6UZip`fYn+1Yny|yWzy^KBeFl?@fs0GhzX?)TAJa@RHrj;V(7>RaA#-*6u z6_5!{kz>i}32veECu62aoR*l&?p&xqH}0D_;9dK^I4gEYlZ%0kB^oIx=&4o&gF_or=R`(W!L?j;58gld+cO`ojaM8hCNu|k&YvlEEbHu>g$XLr)KTU+ zeyBHrs0~6aTESGMlD5f(nX_RwtDy4Nl2k*$KMT@q2DEZ)0G$8|z4jr+rboamc1}GB z#vhqvdVCP}^BgI7Hs+zgfutk_5UJ_cpg1p$%h!fm=O$WXgRv#xiBTUeM?h}Cexv+K z<4<^9up!-wUooQTABrdvrAq7)DSE^(cx)(R4qo|@RT|sAh%a|2s8#!DAc(txP<8gW zvp=S?q&wz)N?17PxpZ()!C$J`JdF2c=7xfu(zXazOH~V*+7LmQszqfIp2%j$a2sjs z4_0(S{W}EzpwMcE@%ewDUNqO{a}(qk z<+54mWDS!EY`Dvmv%1}5tJ^kxV#=57^MAPethhSrfC)hXVv}D>u*&Y*M=P8O)QP5- zf|t|uL^!lIfG+-NgnCrf`G-HRZrDgdUr)gp9JJ(v5o_ZfUd*vSPg2Ylbf6D2x={g_G_4HO= zpmIRI2&-ZhPwAJoUq6I^R2Jl;!!V;vc0SFmukhlB(t6Ka-jCW)H)`JJn6}`v%Av>?yA+}2W6s?~HGi9{!;8wDv^_i&^cBROz&Bz0E( zM0wgSQk0$ag=0r!?78;6%-6*)|xHV|U9 zZzd4g|H@v)4o;adUfJ7vQfEG%0G_&$zqrMNjUB^w(KH;cZx$w*Wl)%sfpCKwDnCFt z)|7K$d7rF?kA`94#%r0H41S=ImmhONA|+z<54%ldW&m6Imz|AC#7+-Pn_zu5pk|!& zCj}e2x$We$J}7-!m*lVC))U{1C5BoBnMmrNn@9~>RfH2;H`QP%F)#-%B_a~aW1Su9 zk%`L6BR$y%R%)Eu_rVc_^&??5IQdqD@qdb0aPP1304%|Q7e!JlhhXn38?m<0f?%zZ z2$Me#asVG~``l9EgV4@RF9!KvZ7kVPz`Pi7?nzy*)7TUiw0*z9(=dYT`(v+}9};2b zd;a}_K-8(@n`X1yqBfGWrc2X(~3 zIl?%juW;7Un+1YXUW}sdA_*>w6ZtwBH$w+Y!Wq#;SbET34J+TfbFt>8SRat^srO?I zuS9g$rUEX6_#MF#dJPmIt3AOKf2~IPARKy1ka1S<5OVbxQWRulH6cwJ8>U$C7Hgza{A*Jk)szki1`0qlk~LyHeRhL#7;L4F(D)Z6Qoq5)_#j*NM?*^VZAY-i|l zg^d|zg3C=c?}-}gHMRuihk3vfWmMG*5^pqN)eEw-T@Bp3!}0GMmpdH}AR*qNGU2<+-o z7naGnj_p+KZr?sS!AavI9OPQ!D;spV~anZSOvDpSi_9-&M^X+m2P%RyT zJc4hv#)Ire(pvGs4OoZkSK>fd7V<(ED)&4()onT7(rZd6;-nDM$=6Ty^GsH<)K9Ma z2Kx-7l%274=G*B_q(-gfBy|6O)fp zkc~V~OkKQ4;%T;^kaJ(Y4gxnf2!O9DeoVSLmJkOdF)lVKxWy}fp{LVmWlDPcYaRe! z8PDQxtO2A~kf?}Gx$2d|=l7^F$PO#Svot8a2;(t8zG_X*X7FrVz5zOOLaLq>hzf{E z;Mkf$;8J_E!naqjfu%zgBUb9z9-Nu;35j`C`Z;I3F>RY7B)1VQys^C8X&a@s~2# zvXKKoj&-F4ZeOo-w0YnxZfVk|90AhWt{!2jB8f8jB@=x3fj1K;QaGTjv;xHB;gR!= z%#*2D(CCHc2U`sD4wT1%-TIimI|FFl6OWg=Q^ICBkEki!;*G!_KY(gsFlrgQMAj>g zDszuHOxvO8UX6Di^(D0SQ4w}YvOM+~_l11kv>gVim!@f>*BWr!EnRZ$!U++qR3g{C z<2YwT!+g^QVeG@I>oNRJHCKfX8G9xAda>7qq4fHE!+QPKO zuuE>B;-zoS(a`%ur7vB_C1J39!6`wD>$s*x2TL6M%hhMdiC9&|?-_wHhahkW%bwPf zMT{4$j^3l<*QSFql9PJCSh}GOh_m2U?1G#%K_pvb{Ktlq^l}b*_C(QP8N#W`VK#DM zXvUZ{Hz*qk^hp2g0+`&oCR8gd!>kpv2RY{mT&MtEWpVY?c>8&IDs-MP6;jrJ8zT`P zG0x($W@#fsW6+4P9=Kv%dFI<4ca3w^69EnwdE?>0rwxq$QYy0HLMxU&Lh_!nPr*gV zJZ*CZKY*gCwL^~`#3D9_D76xH2&0UlfzIqCuW$at|}~wm`~H1Trx&x zjgz7GKIyBrI;JPYk^Qh}xB)~@sh2@+?Sn{o4Js^pO{!f2towthwK>~Qo^lZrdgz{PFvG=iMX{>mN%b zP8d`1g|Vz!6nW6V6@S#f(ctW=%tn*&P!+J1s1EGV{0{y#4{Gz2OCZZ%s+Ir^9|^Fr zPB~F6$2@tKLG3|uhzira)SDRMx;MzGD!h`>ht3aZlZ}gf;0Dd}>Lh~>>jrQ>uU}WH z9ugO_UZc@^pRPJmIa55PC1sxwxU?%xrW)sFV&1)$W5`0SN|xOZa4F}?@Chr)xGK=b zX6PQX#LH#UyS%{fR1|=0%~FG5G0H)DCC@n)6z1)F0fSg6)uSjdSs`37Ww!jlc1W2% zGYe_6Vb&YnuPb=?v|z!ARlR}NC{i~$^|6CtT&CjGadk<1#g13R?N2sm&$+BDl*3Z= zbCw*0EhNaQnhY5u(%uZ5Byu5Ip8X%F+@%n@;CPihf?HM$=2_2Ga{0oX2HHuwBPq%K zW93mu4wU}%_3@Lr)LhLGf?&u*zxK_v`(C=}(9ayNBzC^Ds|)vRR&JlsVUO-TShDB# zv|A`zh%k;dCxA1Cawk&K^wBO<2AhaQrUX8FLjM8%#YG#dGpG+-PL@x8)JFecLBAFP zWI!M<;NXbV(Qe@5dhXdYizhRMuO6bIMaYHX*eSbb=;4_f@>UrJ76dN@G4)y(7#1zF za0>Mf5W2{aqLO+47*v0oN{v*Zb(;I8~xT#%GPLY^`tu_qkLJNhFL+ zEY)=B{aJOt3+*pUZh4Ix7%1`~Al;HWEKNxt!IslWvK}YjS%eMTsgu2mLQ3^3-`LpL zrvK50hjedOtg?0L3Ch1dbHt{4{kL|77@N(u-vk^giA)nKsvMq5%}(wkB93u~8~i&} zyxux{j~3dX#hhU@YPPtWSxi(HL6!@H+v3Ri2nfTO`WO7pT%cOSWLR=kmb#TbN=&zZ zlgm_HJ8|t|TB}KLP)dFt5RyA^KabE|2^6UI8_CU}wV=|W2TdAA2(9HZS`5VWuPkB# zi7(71HYRorGc*WhHv$}$501{1VzoyoJzJU#>*CMqro%FzR{Q&k6vj!e(k0n7xjke$ zg=pX58mh1pWrhf$y#yHpE{>e8__Xg_eJiqRLTz{93Jce5NrBR9oww7152VrD&~&sP zJVVqfSK~@=oN*>N+;uJHq4js`xMh&EP@nDY<`$(+dZOR`(mN=LV@tL1??}XKfW*O# zWOzMV3B}+nwsZ5T^EPcw>s3W{^44SnLd=;U{(vL$cMf|ECD_Sx^+Auwr!$z}ZoW5o zVOkUYv#d}@J-O#fMFC-|Q%_`IbK0BeOn)%zVkSvdwjxJ-$WV=X*>Tu`klO@9ey^*WI)cZ zx$c3t5iiW)lh(E1Y#BHHwT*(9xLW(fJOMf%a&RnD#l z9QIYh`*5FER={R=pzERkgu7(Ix~NtKAsp7%ps0wxPHwaLa9R^~`dLpWe+;K(1?VrY zwSVwd^Xynzq$ZpQKUZ~pX#eR!G0PE0Dg~>%)^iZ_%FdKLQozk}kJt)8En)onND(zzR7T zC>6vepy`Nx0WK~;p(rt9I(a8VeAuCn?C{F8+j4%}&CZWX3&}U;P!3A)!O@ww%K<6z zXQE+^&Hp#azGY@{2{kD1aZxGhsJbpLOrg~Ur}1w-IGO;P5Fr~aHG+y8=UPnY1BRtg z=88`Qo&{}4Y8wlDbSLX;Tb97Ps%$mZS!qW<{k*XSWI`%Nbglw9uY0Wu`8_#GN`t=w znFZ)-$dM$qy+&lN(P7%QF`Y!tUJ}CXAvU=6Ri{))R~x7m<6~I_IGFkAAUc~)h7CcD zczSceFjHTowU$e=3{TBbu9C44-{yj>^X%m#fvNHN!aT75-u;8IX(4AU!h~F(fQ_G; zH4@pl1R(%ZEWU%UowMQyjd8i`@w>x`Uq&u@9`oXkCO75jbB#-uEOa;l%q+WyWgw*$ z>>EM_lD8j{#L$YN5Kzxou#)YIsK_YCddt(t(8`$1Kc)_`88`stp;Qlm=&J`@XGSJ~ z7WmU?$hbhvVZhD=UWjLbCrm>gYgs#y>G@uZM;YzuldkWx(HepR7lV8Eux4j4CwXqx zb$p4^(lweE(e#L4nE-PsN1EFnSu_^0rC+GRVidT2%CkZ|KV9T|wO4{k?9 zl3g6wdIxwUP*cACeSm#$l-Ba*pS7CSsaYAcOeCq;Hn}ykbxR;tWPZtkv(F)hd1ykcv<`F{hYUYkd>JKT8BOpU1$)BM_h>N&s|7P`dvFAF zEg~IicDK046bOQ?G)F}6J9&0i;hVFRH<@}dCp(ju8K46u4w&lgAF?C2QmhMtiirUG zDJ8*XI-cP$i;hF~jxnyWJZ+JCItiUm-y$(5I_t>$xCz%n*hz4SiB1hbLg|oEmp0Hm z-uGpw?+Zn+9)Q#KC^Eh0eylq#h=vqCDFdn&aTFo`)Ev8j>y5Je=Yd@hosRA6`bl-bJ^&)KUwt-hsC3=nd5&KDc_9cm=f1u zalsKJS+EsGFLf4kTb<7A_QpWL&wm{abx&fky4p*cUw;FnDRC;b?p2-)dQNp>1p#DV|VQl^ZJn(7;W! zfycvxZ*|F*KyRUXi=FQ8uTl$@h&vhzGm2GM`hG;jlNL+)~do2d2QB^ai@-4)CR|lw~%qj~qhwlW3 z^Ja-DD>q6yHEICBjllIV2l&ZmBB13wv$vR?!J)8f)%;=%_v4!WQ$bcEsrwSQCH562 z(Th1#j!Ue35$%uXB}iARFL#aTBSKixEcy|aG%vI$dc=nv>@~9glZ5io5LPxxD>&IF z*zMYXd&ppYHw{iP%q2QpTfIqBJd6-etoU32OrQ31B3TQOf}IgI zDuC5OXoe5EQ&y+Pd?P8*z15a*gmGcUIL!x^y>vjjA@VP0B0(pOe2Y^LS+HCWKp37=#XTGx* zImO`Y{wrT?2aTjY(Xl%syq+|Mfg~2r`}?3G7RT8|n{*KII4Jra!zE9{o95SfT7sp>#yb-yk5#ob`!|^L=j34%~MrhqW z1ahPVk_0e8dvwKI+{Q)cm8x>Eq%I9902CXX+wE`0m84v{%bFg)kf>Xt~ ztE_oW&Fl?qY=DtPg*zPqLDF|K|71}?s{Orq!6(5bub1Mab+4`np3=w)+=e>XIye8~ zXdd8ro%yg?MkWurtildPB)!ckPIv9aq--zydoEw`&EtzO%9~r6hB&$0#w^g!hUA!X zSn#>PWjV@A4~j~r85VrBW6j~8cZOB)+D7GLMK8z9AqSy!g(0Zs50pi8{qdSN1^&{x z66H6j&%S*^oD{~7uFKB9onh?0cG5&IAEY9#h;6LpMK! zoe+wOHrvV3T)n0xK%+?M{Vc2a=XiroqL4!|Mc1TESt(oX@{+qZ51%Kk_r#YPRHy8Y z%#$cB>#13>07b*@Hz0_bLK=hw)O=!7<5%L7$)cEom}tB%k&Y)JCfLu%NK28CUy~t0 zzZ0ZwuF6^;Og=BDtEgPT)53?-imRnyR|^C;|3s(~wqoP6RRO)2a^tg};?Nz;ht_K2 z%>%!>`NRY=+&IY1$$0N`&$>}nC%41P0$DUXD3K8gcdZL7$SZw#0 z3+hUkqla=pt)2mjgfNnUSOX%t?i$2ou{&m+$>iuqTLN6DdzlW_vQo_DqO{);hS{wa z&K<}c2!wJ}kdJxI`CW1YlSS>|4?hk$425XMMi)1QtgTKDm{tu_07!FpTJ}-b1LCI+ zLiCPoas=9}|0_0V6EQqaAB&2F^j1|S9_FF6Zq)QGF>7Y9q$SoL9Zn9rMf!2#Xd5}! zr9Lo#cFkc33PfN-BHQrf&fIZ}#KaeN(#_ab4^$-m0$9Lq3GL^dAzunkHa}j_51;AMDMxx}&=myJk*aFxgPOY9n}DtNzeTW8c*0W%D_}sn7Dyzya(pG!`^%} z*0Ai=0$y<2uZ6Hh0z5y3SpWkg$F7A{Hm87kz5mJ)CLfW^#r03`wInIUConGOD!U{q zIL?|p$r%K}2mN;B51y#WVtW;Rg&&c(oPo?Ia~VGj34-vVE3L0Rblc=;YQjd@nai-7d~4gj!e?+aRVKb? zW&xKwYaMFRv)^6nU+&f_XN*QeN@&xiBWn>>=WV01%Uyc+=W4j`3~NZ@@KYBT^hVN#s%@ zJqk52mQQQ-anC~OEx%eB>Uy0{jOH+l+6a3=;(f|82y~ner!nR?HtMzDQ86+icjCG3 zLhw;jfG1-=`87N|;T}I#ondaQE(7oB7V1b&%xqC@hR)dcTVSCog?z-)iK-grx#xzd z1zkD#8~N`3TF%pNfRB!0Bv%v4RbmR{-OH2pC+*D=6Yove5-?D-o)G-Tl_^oTSV8J5 zpO!Ziukc0(S3Q*j)}#k7D8#UAQed#zsZ0?%4Arz`SkbN3h9#*uVH}NV?f)^oOe}rgR*f=NW z9q8<&pwvN>iuy>tV40z^3^S(bx^NMV&g;wwHapgY9vN{#r6b}ey=gdm6!zPbEqlyd zvg4p{YxhgEqM#*wj8qO1=A$w>7|y&;7S}rtP_tL-vl;JhNo2!KTMUlbMc;WN5bgn) zzvzh)Urr2wMxb9C8RJesr?^e~7Y3;;Q`SN0=L1582DF*(Z|2(#tzps??OoKJyrqM- zn%Q~ihc96aqf?1vhI|orOyQD^*W`oPF65UtDZ_sf7t>MAv661e;< zI=meEp6l*b)9ir;^mh{08cXfc&}mfzdM-(|{o+F!6wce5KjtMXtripY&LCpyutpOY z{6Ug)!Rk7Lo9>B$OY!v|ghF5T7wsTa;f-ydM`*ET%|1FK&1QTHZ(>ma19x-v`Q%5RAl z3w+$pk0!uNbBb9-5tiM%zjfqmFS!(_Sj@H8Tbr&pH^KRVv^A}P`*8Fel(Pb^FA}CYxj2!XAO;dT znIk(1=QDLcD|>3bI5Xez&#Z~q>Xa?6A-){@B%N9KR=lW?Dwm+t(3dC6*yVKKh`cy_ zoG`Nt|2RA_IK^4L21El((NZCK9;pt{$c=x!Ww29Wsa@ z>McFwsrj#@*@6D3v!a3oY6Q?o$3g)gzghwYkIeX=N%cp6- z(}>VL+I!1jQL^|LeY7;la+{K#6Nfv!8ezm7W&O{K`wRx$-L0Of`LHDF(AW1W$>q6x2Mo}Utcj6vfn)I54i)&ISNU1gcsb1^ri8gYM% zI6uE3;(nj!IT94sU<_x00vHgdYqkqrDi}y%FTO8wQXenB=jv|BajB^i{OLIc#S3S@ zC!GLO0oD=cls0_H)GWo*01i12M@%L`C9Bdly=`%zZHs0iM+bkXbzf!V@8ldN0E8Mr zbw{&d0oQfRP|qkb`*o|+cm@&;B_n>f?i*T(MG`Q1jhGz$SyNKEqP#6sQl)Cf_5cN$ zmG!Gm2IvgMgf%kzuj4&CY&Ji!R_O)`ol#kLzuDjkr0~ zuO$Zj4+*(8#^ECif#PE}{&b+rZ-`NFY&-|;%tmp**8&IOSw3{iGZn@=9Ea5<43hPq z5gTnKhr}DF@B!zCAY(M>OioM@Cu2Z@$|#{nFmaFx5j$*QDQPTxF#<;eAuENk*Jq#v zD}|_=uHy;2U#)Ii~^)gjz2pT}qfo>l{#Pdxj)>4RyS zsPK!TLbq_+nG^h4CRoyxafy~xgA{L6O+v>v$_M#vyTp>4+ig9Z7sIKO)iPrNzfEw| zI1Ui4DW8{n50C#*0RdQ#)@DcFxhWqBP>uM4Fqb~!o?*#{v!gml5-wK8Q$r0Z)?h{y zWs_6%BouL=dgLI)gB|@Er}3|IxlbgL3fg?Cdk4w>FC;JX*a9{cQ2W5VSpKm^!;_bK zR)L{hEU%l^u^w}FHBNUiApF(^(D#Qguq;JeD#EUDiER=QJ$AF-x;;M#v$n1!2#78I z+H^3bouW^Fjkf{8KIjy~NJ5P9{7^(Ct3+j#(1qS5JYl(LqZR&YBmc}#M5Dr_4`zgd zZ0-0amj9rMlPZZ*{-Z~gCZiQ%0Bnlq?U!1)-QXk^Fu@ZewcnZ@PJI3*aUk}|VrDJh znN<<93c#8{i-v7)pmJ)G855qk*27l2zfHv`CaWM_lg zX5P+KEqIFBV#IjK59B$V3=oPdz(hYVc#Rq|H^`%L(M3G!Xpo;llWr60fPyC46#NJ; zwagcxHnANxnO!Em@(P)KNia(g342LB`!$iITl1PaA`;8ZW_fA+#vYF1sjcw8LikWl zLhU}sbG|PlVgmhhhXk2xsFhZa6~(Ljc_G@n&>i868VC;sw#(8C54fPiV3fCoyC>N= zmJrlT;{0c-Z1VRO67@Ym{+NmC>5WACpL8!d!bpwpCQk-|qX`^-HGP?c7u=IGPs^69 z<9?xU!49ATG5u#i6D4+V7e5Vc3%Bn&MD%7M&@7h)QfF-K#Et%4EN!n>o8zS&feyM< zK|wi)kIW>`k@9Z!_@9(S2?xVFb6N9bn3{5Txv4KMuWLGdESSdrPhtC-G-6jK9mX+S#>~bLUT)UL#mPUSbi+ z9TFAdIRhUwNj!MI3C^$b4H8*?D9fKY_G2)xgm>G36tA%ndCmU>zC{7v3m^RXYcE`D zF&a_qicWB5-GL-gfj+haDrXMH^Yr$|11pvmL8_JpWR-9nz_iKt^Qzpflq}fg$-G%?G(ysM58u)FcW=sgj{Ysviiv^$UKX_|ORB}+d z&COno+w!&w2W?!``D_k5V3TK(e@s;FP9mw6)reg+t)i|zOavC#-tv30+g-9pgU*GhJGHG(0kyB|-kCcU7-T7@0?J0a!C3DJE`*A}+ zwHE*PIIdm7?;$^8b2h>m3cdm!N3+(B5iVb(6V1|@9Ms!by>3Ai0!k~20mXjL!NS>k z;NVWpSn-=i_cEUn30Q~IEx)Zh6?;HfqlZl^$X>ehHhG2SV&WL%Zx9a1eP6wK zyvh*|`G`X1AJy_Av*sPctK-AY%tD)_n;YZl63Td2U6fV6NmpFaNhvW}G z)x;Vw$uD|sXudawc=Bm!Jqr{FNl6Y zxXuMOosJ#oQ;6lyTbYbR$a`S&e7hQJqlhR1L-)qXaa0aq)+~!YbZNJ(UC7_{s$d~3 zK>$BMz`t^=S>its$=wK`?wTBonZ=j_I#oCIX)zNtZIT4a9vk^r|M0Q?Cn%4Exj2^_J41yc z0CI}NytgCVApqp>sl^0v+n?!a3QbANZWq7}(U+@3$;vB;R>r-b_Pa*-R_z^V{${1W zy%AT?|1bz4ijK?8)eJI5!%7K&GLy;u=AuxT}?J2k3)YHB+yHgnqWIcgUpjyS6K z%mQQjqJ+fn5ebspqZP$DxtFx@iPRin-i zFFP7fE0vrZ;gMDk&~>V3BX+esp9zVeO8D^vBkQ3}1d;KJ%Ke_lZg zM-eo2ml_E2LLNiVlSV2m3+w!>tPG=p(Tf-P=g|a?2sjFdv)kr)bV(E~;wSIhz+OYg zC!J-p`N#MMKAuWC&AUMRaCIaGjiiI^6LX2Kf}=O|s)+V8_vVS&cUvROG|kX=S>mkG z=hD`v442IPIY{8FPHpZSt;Qy`#R*QEb>toST9UBaYDnRdf?M36F~Vi6L`s^=AveCQ z=wBi-RO7+s8Sf%^AAl*+#2*-G3Uf-W0u5r?mw1oIC{iY7Zj`t08c(fN`Qg-oKNSH= z>@i=1SSbk@;byA$KF_Y1_k3laXLaQrYHy!~odn3=$F9C(a)(mA^(BDLuw+x(NsV&? znouAwAaqefFFD21W~KicTDof^8~%(UR&uuxmc0prD3+67Jc3d(RO8o&*b{t4ZKADF zY;x-e*gefM-_uz#-)S=D8;!#F3_HBW{Umv0ZdiaVi+$i}#@hqoF|LTq`Bhd%bF?rp zVC|#dA#k{0xt{|ig^2;PmEzLF;@%d@JY3DUf^wN|I=wOR;lD~&4r>~cR}BDCM3TV) za#svxW&oSHvYl8z{EN^|94oMQ9E)KRS}i2b^ItVG-=IARqnrNdW)`#6Hb}sn2(~Im zR_J)d41?7^jpuw6((2GPjqZhg$_(7*w!=KI)P1TLh1V{GpRcIGN%8TQ&z(fg`&)&X zfw_wlP*3$Rz^^2CA<*rQI@64{!OXqB?jqVtKU$yH=>vMpQl%@5E>}Z#ki$ofQ6=yi z#cVZc9@}?KCt>@2{gF~E2M)Z7_L!=YG31v;lmM3>x%Nq?hMArc=TFlN!)@>K@0`q4 zijZhSdS`f6)$*4CZYAFx_8Ph(3h{4L3?O;_L2^?-3mCi=MbNoj!6Du7+18cW*IyS+R?VHLcs9 zy_Fv+JizDD{kKPElSm%gg^#^zt4FJHNzn;34%->(QABCgejdUZgHt9xu zXlIJc=R5%(f@GSB8d<(-%+{k=z!e|e$QU?4@B=|4`#hQtq>|*1x7>TT9tiwH1|{GW z9-^xY#j?w@p(CVlJ5=oM;+Z0g@FgYBrn04>SVjjx;y#2F{|1mTxMP9ID)|1cqzjWIN&oM%q;9w5A?F|k z*G5!u-}P#BUBve;2hsJusE#aNcxJ@&LK;%3R7N5%PJ6`S%8#FY5e)QA=-}7&KExUC)N{3xCI6YrMJo)1{hF&7 zc8R4}F#A&*wU-~#=5XXcVBlj zCDAaO%YnzVPh2w~+)=xn{pWGt0mnmx-o$~a5GqN>;eWvT^43vVj$Tk5|6mrdBBxCa z8I_zO&sv)FGu}X;wyfDHz1YWXBwU)i&N-9};HjDJo>w+CQ@%v`O{pY`2BAo3bZttC z5qVHZ080r4F3NO#c9kY4^dleTL?O7+VcY=Skd>&-MW>sQvR(leKPRa!R8BtDGnj6P zIio~QIn>8i-4_7VKa{btK0v+ecMhm!0A_sSjwYaDSUTP$p#C7_k)_GaX9>y+UZ9U` zj7hy5F}Pd=BgH>I%fi#)VgNGAFy$>SUg)P~dVP+*V7nH!$Fzqwre5vt&tFuFer&ew~_)FRg7rQ7^m)MYb zsnVz1a#pVRmsXEKV*XTAmo)Y4u(Z!BWa%+DX1)WpVj7Ivb2AP|?`q}38I^kVG&oxwH}XW?R)TA^El_QxES-<>0PasE6;H(iKN zOMxy=$t|>jC;)W}FXv%lp@PY7O;4v!B zBq*s*0mualMPep&W?dST+-o4#hLi^jF*5O@&0$Sg!ciNAUIcotO$T@_M>NRx7GS8z zPjc=@gdO`#{XE6jFq-H3qv(%+JL3_&PsMjkA^p%oJmD z7{=zAGa!a4?8|ece}-c?))A;cDp1?X$pk~%s(degD^Cf=PYz!bO*O-GzWLmj!ZkWJ z@ATI~j{Sdt&v(Hk6Z?cN(Xf%>}&#XO#KZ7U*jdCoZ?2BV<-` z^lk%0?J631$3N`-J`AR`ebu;GLK1HyNI4*Len;F$^R6qG9mN54w8S7Eb)&w0bASzVG@jeN`8qiTx!1ARII?c$ zJ%E=)!Z=H<$U9kK+8}Xm8LgZgSbh67Nz@2^$58qB=`0yalLhW_3Rl0rW1|YkePKQB zCIXR?|3Q_TB~reX4?D3dI-ab_sLGJ8j(MTj+sSN+X(_hUzIoyc=9yA;1XD)RQL|R0 z?J|Z|X*g8A5^{?w*=8(f>ZY)`G2LK1MB4L4pU+Wr-*x?zVV9;8en%tY4*1bvONaT1 zG%W_f<%+x9x3-I{L# zAvLM~Hb5oJPSV&yVN;VP4&^f=gv+af0xF|6-yc+&qa=@*+6)9AYQl$HIzq?Wa)TvD zFKV_hn$zpx!?RXfCZRaezSG8UC?8Eby`F-SPX%1ap~p)?Th}wDL!m4~a|yI!1$<%J zr#4U(a@0lk^Yt2I9IhJt&nB~(v9Z`3I+|pV0km$$+e{o(&(xaO-(V&rE#R<%x)q^Z z-P;vez#-j)YcQ^%gn_9YTpM#Q^ZNvFsEfrm1t6t1sbk`@F~U#2t!LUBZHk3`*PXoz zsu}^DBWOJamzbLRAAjR`NfhWJFOS=Zk3+RHmc0J}qLUBuI=QP!%|`sUqb?@PaoVxT zZ)dWv=WvW#*!k^IVZojwl7FO&KyIM*;^liCU_KfH3R@CDf=9PAw@UHY)c>_&gl^gG zJ-T<)fj=^XEV?Iha)6RYIzXxzk%&{Wb_>Rd^Sxk8x5$2b0~XK1Ch@8aR=*4hz?`pt z&tfS3yC%Wpk9f#UkRlsJrp!>7_D3avQv|XW_UT^JVc$u}LuK;+e5?xaUEcqR{^2dGk@i!ROZs$ zor6YugQzzZD*m+ST_B(xeabYOE+aR_6PxqU_tq%?Q;!rrMX)DO-9m-%o4mL*fOMm6 zG}(zzYk>Sz_d%Zm+@mz9HGTS#=e7QgOARw1dz%x~%={jG%_LYl&~X026bv%{d1V!8~}+WNDB5hAwJw-(e~yRNo5j zpNlBo%>#o240qBfR<4h1w!UbvDM{InRneJT^hI> zi9lD>rxFl?J6NEO_!Pj%EQ)V=cm>UfGzjvk3otCbSt0G6Ysx(wp6Pbx3}V`rqXaKn z+HR5&a77rlJ0#dh!W^uF~9e#va1w>0WT>4-8o4rF@Y=%_I|5M%+dpQggS zyNP>}J#SlnrC;uCsG^C)I0_tP%-TlE;Gqm;Ep?p}zHA<$gF319`zid1gEO2x*)fWo zy1Jz$(Th}r8EJ(WYn>O>L%66tY2%w%s@xg|O7_~#!|@tjsS&wYNB857RzSKuK2;Xo zhN=#zImj<$tgDaLKcTf~>Y)y_-Wr;nKG>C`ZVh*4xfo^?<3VGXUGx|c=K_TOf!f+^ z_*~p&+HD6JIm2By%nvcXwQg$*Am*JjQ#cXlOZjrXi5rjK6ihGxb!OBX@1*`DC%;4gpIkgtB z&qCGwoE(=E$^B|+ zV5hckf^RgHv6B)xY`e~~q^h4L(Dp;ui88sOoTBK4V;Nz%4xkhvT(r+4C_+3njuybV zofomAB}elM6$S@&OO*69>BW1Wr#mT4-WY94VQXEGDJq$Y87z07Pg>$+L4OUYHw*wp zhmX7+2KsOsMtVYpU&|BeZ(9afj6Kp03eC)LLAwyHy>@KBY@9| zusJ2o`8A`H^~}oks5_I31pi*2ipq(MEbZ1)(Fc~;yHGJ>q&Z8{*k>!uZ|iKV8T1kf zsV(;*tiip)BQ6`fbb!J`aZq<#zeqVQDWkoc>q6HW-Gh)7Ob@xZ`;T0$YS4udpaH3T z#;;we2afB-vyw-}7+RccPlN$7tEYTE0xfr}g2m{bW2;zssK8XDxd^~+3jA6Y_@#| ztg|zOBOOkNm5^vE5|^MPwI%iGGglrzA87C%d|?jJvv9rs%{SgA)bx7TR3sDp0196E z8g!;!hpDA1SD4JE^wJrWdW6l?hqBi|$?Azb?n(R&=H*h^b)}KhA#JsbJqxXjj5fCy z#`NgJ-2h~?+2uhtr3ye#Nd;078VAAM!OvQ`VOfD|?9Xl1zkWYOKUC=rx#;J>)O7xj zaDqhe0n1Eq4r@?pjp3(j7xI326|2L6ji>`;!#2dFyx$Y>&{-6E+Q(+41@L$$b zptOf?Md?{2Ne(aKnOemuVD#--yX+>_#e=6@KEZB2c)y=jhCaruOLn^^EfEu^PI#la z7RUV^Q2T%_xBG8z@KN70!mBK@m8}~H$)YOQ#~Kh-hIogngW+rGo91e2aliXPSN1o?Ob;tSZ?iOXa}tCvQt zMCg@M2K7=poTPA&;Ih3q$8k=gVHM=THqK*liI0(@^EAdcvM;WNWj&Sj%Q>D@$>3E= zaTxU6>iXBfeM-fMi`A#nc> zRWgyh@L7v1?p?xbp|0)miOg*6i-i}p=h_n0x-KI^NXm}TmZ*O%%c|hQdJgd`Idia* zaa!@u>jh){qOb#*+V5U23MTm58Gs$D zn1qc6A1qx|wgFwFYVQB5xPy*otg11{V#+8@S_&l;y;3Jo-cb9Wbq1al`(4Q(VMT(W zAwTy359;@osB(;a*0VAJ0yrOME}tg-o=fKx$R60k(Wi;7(5S|`FV8&1)d=fOm3uEd z;2}rPzAx-Zc8`4+kA&B71U^~$quTQp@^Y8W*l)fpP}y9gioI9~EM%g|J#Kqtis=S_ z+#l77XeQ(JRME7^t>ce9wCYFHg8oqy#Ok3zDM)q8KrOC41@9Vpn>w=gKr##b_jRf} z%3W)@*JwG9r7dSGZK9~Zvf#hjbv-D!YF)6G0^)El2VSC?ZOrdd(OtEAwuhQ3;et=j zyfx7`@;-R`DDam^{t_%_lRj-;{+4G-_o}TfL(p9)#_L9}7!2j)M8lz2HB6@uv)U3R zbYIy|dCzY?=u3rMq?4C8=Lq~cra`a_1L{fm89cGr51g3RK()|7Xny!*hE4+;L5uN{ zu(w|}|5sMx@GZ(vSub&IbbGrQJry(ih6Y?m8BLFwk8WlWZVAz@h=TD}(nA8qi)^=x zH5Aiept%^(yuX2E*%E$bxxrN$|I8CvGIvd{8mGuT(n=a%+uGk=OMtHu&>rIi zkGBBts{(n^8cx5_{J@xpsc*yFFx1)InP@z)Gfs<|?@pfJ3%t-emSO__?|)yY{&|q0 z&V|)VF0AUij|fpXeRFf29_jWafq=Ybx~_ znhO`0$V_ok4QOS#P5FL@{vyVQGZbCIWbrx}gHtd-oTb<5I>A^9ZRW zK33&q)oFn&0kc2riJbDX#XIbU{rs!yg8)C;*IR_7JX81G&a~$&CVK>bD61c-Zp2+` zyiQ{ntpkPSGdciI_xal?bvllWTN0E}7SWiZ2C`I)LQ=*M9b9mtS5#!Xvz0Um=qLlq z%!$f_nvTmh^ZZW0J`IdF6d!^$j!LM3661bwpIPOMTMFi4OE99vO^ir8 zvSgYyj>y!T{xWqv|D%B%Xjne%VedEZq&wFg|DWSMaXlJf8Ye(z$;@EEB!ue|_@rrM z?|Ldarr|f^HwzguX!MFtrr&ZJHo@N<%SZI8UcXHzH7?HdC>gO{%)-eqH-Okk!~#c- z{-b)ol*=S)JfRi`TNtaM3gP`0e&I#t&?w4%H9G-MMxr5a6o2cS5#X0=CO){OETtPo z+5(&HKOM>E87T~GleNQe3m*A|H2{z(#r*i|u;+xXJ;3A!wg*9=FH>s(CW$)AU zyA`5DcH!<0KUN@8K*i0pEe)K)WEp@o5Ev~!XA+EvVJV4l>uCHK75XX%6V3Qm=eyDn z>btQFUt!Kg+_W0~ggv0!Pjv%%OK;k#Dk&_Ey4enTT%TljHpdRkq|y%IPm6B-<3X(o zgWOLqWy)IgpNtCF)t`LX;R+V+G&n95frjNm@BYCTB;U zWKuMl@`mY=qPVm(IyxoffARLI+D343QZEoz8Kau|T1gX9fs+P3);o7M@Y+Ns)kK*` zFS0pzfiLl=O5GK(_mG4^X11mjAQ9#63>&h=(1uDqaV7^k@ep-TtSozqCC=QwU`Zo{ z-EYC-RnrC`))AuHeWhZ*GYoc%Nc1h(lm1#iZ^Z(|y%#K_vCpNq;vQHn(;ScXScLsW z7TrJ1;3^b*y`}CT;F8I;L~+d^#8zbklm=(4)<{fn_b=U!%EtI5+LSR=H|8ouX^cpC z1d790575wlTSmPhPOzrLq7M(~FyB~5qZ63~(+@t{j8<&Z^P;IYS?!xpd!=l8^)Brw z+gbG_U}d%BNtXs+05o7CM0?eddYlO;c6l>}QlTHegn5Bxw7&J;=-HV7_xQ~Q7;`UZ zj-jc{N}-Qnj{MY8>%yYoeRKH{lG4g|%J$gT_HqIe&2}&-ZYk78lMM%Of5pX2ZodG1 zfNKFnV-D|idX#-0p>lWl!6it+&JF5Qcq?%{8u~YCJvimEd#`+Vd~1&qjF?M!cX{{p zB~qBg23r zxm-wy0}e=mmT-|*4px}#&nVo)zi9S` zTqvSTGxwL`h3Wd6rx)f9*oA5tjVcNiL{$TntM3+j0}?pc3;mQTn+spt%1;xi#7(`8>-M7st;@b0(vXy=**^^S&HxFGOM|Z*e$Fm|Ci)@;& z$XLt=;|YZp3A*f;5-mS}C~XAKqz*X}Tl-8aD{hkljK0$1Q4miQv&?U?^MQ>ZVUx0OM69``Vq>`gf-S$`QVH`_?3jdBWkEnS3d(+u?8gTa3Z@5Hk1g0yR6vvzQrPOVG2x- zj$!ioxV_G&a7}<3Lxi3i6=69>7_*EnktN~4CRg-h1&|u;&=}9{MhU8}c@7_)ST*ck z>tLn=E;db4vTnZ~&~)TcyH&HcJ?GB25S})b<9$WUAz#1l6q%es$ejVNN?pemW71bF zVWGl|$mXX+1-a@b1jVauT5FaWfomSIyk776NP2}u7_EGnrz#LCtNsJOipbi&z3?(GG@_qHF&f2N1in@6^ zA`~K+{zIz=Z8l`xrIz}l?|}emVjW`Ef&2-K>hVjg)*tkESna);pZ#?k)5sh>Qf}BH z2nk{08H4jN{({)6Vy8$r-LBjmq0r`50jgH%3_lKLjQn$E;;}zJO7|ELtL=bs$ygqk zviN5hQKDrrm4!_#KmZUGAO3a_4hGRz+<`EH$leTV1DsMOL%lXX?hY&m__4P3rM<-% zABhlBt?lM|=L!wo$F?Z(3t3QKPh0UCz~mYzu&n2t{gVKw#MidwiC|1%kQ(t`;{^`` z5d>N84OX!V1;rGT0VsTt-aQCAAhUgdJ&oA6%@p*mzNgZ4$#-!zmlQAFY6!J<0+?kM z>*O)*iuTj)R#|cAPEwXUs@Ab(Q{i)bL3dolNT+E0(7EUWR#Pw59Lm8S`R8l?IZtI6bboG0(E&x<5DxsN&AZpV<7`7&Co6v z6_-KrCR3bv6I_0kZYt%^01}CN-lKMH>j1>YeExRtvyoGij>;T1s!^3z!1DAx?yFIl z8g(%lpnn z=-};YfX;hB=Bixn^Yw<3{Fy4#T8M-+ofBSFqFrt<+)$vdg(-t(s$v`@q|q%sN6pBz5#HCMnM;~Z+-j;e~I`|MwvJ*1iQ8`81uxX%3UExIVM$5 zM9UVTMzm1-A0hl1kVJqXrp@ga`kc%7g93*!4i7pT1V3UZlSywt4iPTIKt%O?^+Ji1 zd#wuZ0&2*jxU1>C^H~GsuZ$Z?I_Uk_&l)6MkCC=KO~q+3k(O97T*p%ABo!1yu`$=x z_Y{YKHhquVjDvdV{|V~wNT7W}U?)e3(i4wm)Op?N)?oY7Gvx^4m9k<@3jcK-0kS%b`_e9 zs8z`^yvXGk17Zdbum|D)=vV!Yv$0#Ao6<}6%N4hl9Wk+d75GD(Ba7a%4_5|8j0RjE z5&_^#cjiXxbc%hb{)=3#+}OyMq=mL`Y@v;y398L`%MQ@pczWiJ@n;Go`|W#@`yd(# z(U}ig^3BnM0E25U*zWjBZ_0U}f_f)|DPJhU4n+#nazdgQ44z#i7}%P zG#@@`0Iu#@D9;~?TY|baK(En@+{c3iyy zPI7)~jOE6PisS$C%HQwiI?UiQNy}Ltz5;q13P6`5LvJ_UYMMqf1wv$k=%mp}7 zec=xcqxbXU%Zm7WB}3^p78Pz3RXDqtJg1)dX=@-83@ht+?RrfXI-$$8b_oBMe) zhQUeVUnQwzivhC|S`b7t=U?39Nn?4~^vX^VsOhJLrvy;9E zOFzgi1wL!=OP_8tpq07dRmhUF%^sCkev?+q5s#io*lllmzra7Pmtz}NvGzS>bTmys zh+X;C-ECB+5bp(}EG`G+oXn+|?V~q!2vq;J2p*EBpA$acots<^{QqUZySrr|K2+Hm z9I^l%AcfEQ7k0F6B~TN|JJsz@A0d1j zNPEDs!$&fFA73O(f`9X*^n0u6jIiwvCb`d>fTx@?Z)({CVhQ{ops5d?`W-RiAHBc* z(CFe%cn4djeZ^M&{&6_4tPhSXVIMHYUVg&S9lLJ9zzgP8$Dze*aYfG#A3Vn;(rzf{ z$k!HNc1I^ToV{#hLx3dcSUN7wVn!6N$eS|I#%Z-(-pSEHdlXVxiN909H%S5cTzVssb;w@dV@x9WO|Q zTHGIb>dSF>1tFos0^#@`YyDX&Onn+V*rY`p4Iv5UHR2YvuO)Ks)jFxg7pgTR1Q;o& zdFjXXkQ0;JHg5s)WNkRVCO&TN?M@D{#KLLk3rSf>1bK2p4ga3@>-6amHD-3j(MQ#6 zpktA9$8f(S9{?kub81{1z`GP33UuTb(FbknRKM4BG5h47>%(eyE>q(W0yngWL1g<+^=At-gC#<+7vxK0UjToa8~J% zMt%C2>w!2wUdlnlhCeF|d|3h%9r405A|@{b$*Nf9v3o6q0E(wZcQ}~U#bAb(k*w20 zc!u=yjtm}h(|Lb94qLRYZkGWkN<|&I-)fVYxcvW z8QzOsfprF_2dk(bFHFM#F#Ro+92jeO4Osu>f_kazk_p}C2+vLqhp!>uAhhMm&PTMh zN{=29*5{6L%Tjy;gM$Qgr+Gl$U>(Hol)nwpDjB{miZ-zMf*c=m#8@?ivTS3tKC4?k zcjC{_NgS1ChvSnQ#(y3djhbbY3%y}jVyKbz3B2({d^Q}=S_Ji8YHLJ;K4`chHjzN5 zmZkf|iHhm@rZzp!RD4-p(jE5JO7!i=qwcIRq|q8IL_SDtHrDs(Xt|LopxeU*f^aH# zucAGk3pb`k4t3KLc7hFu@zOD+aXu>zTM+lMS3Irbh9~ri{+|^9KVBCcl^xlG4pR38 z&N7S6^N5s_j4*`^Wx_f96$<@Hp35&^cdY?SG2GFn=4S6C?6Ms8A_;Z?4jJmnXndk# zzDdvzrgn&!Gg7<0cMe8Ya?ARIS_KNR{YxQ`r)tgI&;Kl0(`PNw07JeQm6Z)9Ux>1C5hY;ye|PG#2?CGE*Y@{KS5Iabr2-JNR!7hV8L&AAuxAb z55tmOs2D6$pf(7PXRYgmGCggoOfe(QO6f@fUWsY>n~CUG!e_{wYx>xhW)ZC#3WF+a z=hnLCaol15x1v1Rhxbkd|B+PzKO)D)*v>_J*)f|t2=AB6L6^Z|_WrHNw1TCC1U&Za zU7k=lg3VKC1kf(GPB(+{sd=W=WmUha=NavKdkP;Vm_K#9gD>W*)}>!k1fHQ6>w3IG4E>zv6X$zC~elSjP|4eM==*tl0}0uUm-5KAq%^9J$YFBFHc@;{gm92>wT#V&KzcKm8}*rZ zP~!6hgTqjuKph7Z?d(Jto%=T~5fi@Fsus6|Hi4+F=zzeJ^lBrfR;kwai4}}xn`fKO zdn@2ROfd#cA&URw|KxmgV*MS(e)+$Bp;f8^+&=TsOaSD}!=xxteXJkMOzdP-Ey86EIwb8 zK@8Ks>CZGDs4K8DKEgguFc}_#`V|X5Vs3t<+jB%38n_vaGsL{w-_J&^SpjQ&1_4F%8reWoBgtpRv9-z2CC13eQN%?T3)#z)kb+Xz89Z=&w8yq3e zeU|B+1^~V3ZRnbnlR{YT05yBQ*5n4LNbe^aK8g2GJkye_6g()gI2ya8;t$Nj2Cyx!4b&?= zYUnDAl64h3H^$gzneJwD*?oMAtxl%WV2VQVqt7=flPBG0=J3B|d>nlB2O)L}UFjKv zt9m>{(As({=_=1WEcfm{V;?%edEIzZiV@W)%-&E7sBTdq({MmQRz|91ziQpe9P&J`@8h)_i% z^p*uwdNW?wF7S&kKqn)fpK^$A3!k5b%b8)?uMh&{=soJa{OrvMPErT{z_-+mxL>wX zbh8(Nm2NfaW>2s`OLkU5=L+g8rTNsMm*HXPdBF=}`0c)aSW_L-(jQ}bXFx)tMqH#Z zY%gNx_7a(kdCSdIZJMU>OZkfQ?C*=2)S){7w;%I|voc=*}NXrlS|k%C7xx+Zf29+(rH_wHu29fBzor9`+@vtULav_3*;4aB;pR z>`z|=TAng5^Dy}9m5Jy6f+daVy_)H@93u@+lWgozy!e!Dzs(hpjSWDEy%(}+4_rGCtUsPt<0G*|8vrPCDMcQc)D=;N#h zC=>>*`EHPKMN11Jj4CEVJvR`GOf=_W*)8P<%jWDgNIx@+4!0Pn;af5HJoSGw8EGxT+XEt3D7b0-D5?jxFDdl4Eq3o-JT z2@dm$M%#i^Q}%LKe>=lO{pTkG*gMT-Cd6SlJ!pD>(PVg+bP?f{Ila5}(Hofi(cb_R zkj{>Kc!L>p;Hfuh|67I(d^Ec|W~^>45rRs_MkOwp`wQvO0H8l;E&h7Y_P$H2xT;J9 z^=&ld$EmRqe;(`)3FHR|;E7?#DP;_|<*ciY*|5zfj}Xs`q7)k<25rTDf)yHh1zV3- ze~xfH?FLS)buwtE+jyUhe6inBbs$RIWLy*ZY~W~e@Fq*x;HfU_E;kh=O9YxIvEHJ# z4ewAF_v?Zs*!#KHl8+d;&xi5-Nx2NZ(aFL-8c$CL;tsHbwJgVjXA9e9H*CFd$hp1+ zl%Q;j1j`l2kI`oK)6i(X+ZMdd8^212E^1=&U;WhfafiPs5ukt; zNSKs>1Na?7ayD!mu05fkZjJ(Kc#6Y-r={Ss`)eb-^wF>?Z+(WMz=Y4{4na_W53`P{ z;?+gs#$>#YhOOS;r}&LPGFnLqs=sjjvq6qRzQgK*;g(j?BEWL=9=3x)}gS__IuBV%ys(R78pA`XQTG8lfuhXnybI=THp#LTjO8AM4lp(-(Q zjD*)~n3NC)W={D1>vu~f5FH75$i)O{QIKHQpb_2-tcko$oee(|q~A~Nv+S%Oa7Tp= znZ`yA_~w%F$k!*hbuzKOUecCnEkQrEjg{X@V-JY1DCswse~s0%;lza*QFFlCI!Z%Y2|wm5qx^$ql3OGs zPVj;`rn9g*n}^269AbsyX*eeJl-3uiLw;6Z0t5hU$!Q z`al&<^UI8$O%%GvZS$M4bv$QC!%tLg|Ec*1X7WJSs__>bQ7roe{$SL~m3XSR#`pX> z92PcgGG!$N*j-G%-7;Abw$Tvsn`>UzaoS$RL+|_qD zKSl_suc*2rSNd8lO-Ry5T9bG{#nqn%8N$T!GeuJlKC@{@J67^hx!=xs^G6T%q}`g6 z<+P1}s+cX-ivwlf*Wl?W=Y&RjeWqB87`Lsn=Q?P+)-|rZ@BAz#_SSgq8RP#(xL`60 zO9mC&+|0%Frbegma(If@hO6a0pe5XJX8fD;?68Pw{$0@2d!qW8xj#@h2|t#&VYWp{ z_fXgWxhKG{ca{0n;a$&1*5b?i{!W`fJ8{z|>)9|!1P5P)^ejZs0&bH*ntJogyYX%d z@P&VsGxbUsc=lo}b2H-xGI4|Ppt^ekD$f~~J`h|?;9}3wIJ-ZWY9B4TRh+jzL?`V_dIQ$&W zng|Hpe{GZ@qt@!QZGy7@a zgEng4h(YXfE#>>GxQv9T{IkqB!?wC_w|9d@Xujcs|6jhJ3O!a|cL&3MaEz9)&69?r z6G7fMh)ztT^PU!WejXKa1lJvra3!(*00-?K0N7WV2iVnx43tI=iLzXwwofaKOToJV zYqilp?+XV6w3v?%KXOnr1mgas1Pg*;kGRcR$FM|$hNjm!oJP^dvxn7AE?zr`oyF@Q zs;;n!h`8UL6Od3tPZajR8K4_ZN|9jtSN63InhC--@w08?UvBZ^4rxC(_&u0DRi&2! zOGOr=osY`X*VeUEwz<)XBsgAJ?3Iw_|DkXPXz4erbuuhQEv~ zwkTdPAVgLi{g86JZcZGN3l481JtiPBkkuRXU;-og;aTw%Q5J*yP&ceB8m1rCtyDBk zJqXT_ArJe{;A! zveis{e=E3%#x@59a1zuGRgTu`w=vrn!cD&^5@Pbr_=-mB@(YfvA8(r!nEIPK*^iUB z-{)cX2qN6>a_(rHIEFA)m_lx4)fxw^sI3HkYyW=mf}Q*9#6IIu<9zs7+%f49BhR^> z`mzK1FT-hn-Eei{?MWV(4o7pr&B}tjuJYK{E%Cw(dE;)il&?}w*;5N_MGC~EinC`g zF^;5t&H*%F-l$GU+BAt&iW(DPn8(7rt&%5cU!szmtPKS2Xcq=#LOCa!BV#uVp$xG& z38#5CaJ1fnzM7@SW_8IC<8xoOaU>3oyX&mx6$=!T4-h{(1t&?3+WxX3i-3uk75Q-T09<#nagwBFnF;j}06Am|HB<*erR>#f+YqMCCSr6;TSVp-3=Q*kuVz3ktx zqTU|AgtN_<@C+@_(~X5)==_kZQUT=HSf!PafB)&;3b@4lxRi!hD`~5NoKZH#1Uf`M zysEfy@qmq^<=4X&_T^80NemMcwCC)+Q+qTZ$=a0u9Q_Bjhu!^K38udqIs&>!k$fSj zle0Th5m7)u&Q2~vJ7Hc3oD6BgVs1$EC8zX(hx3qPErDXvban+^3w(8otsfd3+@{f& zF0B5TM&a3*3WUopuQmie`;`jSd;mf~y}!IBAh)mf4E7!evQiLe9LodApoyDB_wmTP*6*r3e%Q>cmeEWrHNY@n1R!{qYAb1&rmn^N80yR8@s5`SU#;2L47|ub{ ztyfI;9Me6>28$}7#rJ|e6XxNdQej{3CqOD2#S+SzG>}gd3J*$*3+(2f-^~kxN%>ee zIa?(9CxbE+x>lo~SI_K)Uk**Uw<8w6C*&#OyNxiyS3o4kpP<~Lp>;o}i z1uiNeWfyyJaA@eF$0-THjHV4ercusssB?1+4fCHgX56R!l zTSAXJ+wO7vkT zpgUEtpGU62AB{mjxh8z7$)HC!99#`U; zDjj+Db!-Gx#e%8;AbQF|=hmr9icn~C_Iqao7v@k`)BhQYChBZ1?WnylA5T)pBJYZG zlaC;t(|ra!B*6jB2?5G0PC&ABYmvc1rijF8(GSM4jir)GO9eUVupVLx#9CWUBszO4 zrNUF@FTARL4b5OQaJQvkMzC<|-{jgyUgYdld0Xg_Vi|M^M?hdW<3N-v_cK)GK(9DH z50VtD>1&fOfZL&^gX;5tF}28wRZ&6!&SA@_AT9&Haq6y@OZ=P~a=*ik(ZQGNnK7QZ z;K5GlWTK|#gmx^w3adwVx9}|3Wcy@;l>=Sr-pZeQ6~J@i+2%Hnyx~L<+xVd`6x&YC zb({-g$V!-G#thONcbKKA9u=D4l_aDPBQvpSZ7_(Q^?a)*Asl!VxLBk$YrXeuDAT_J zPfUo$bc$noBNs%YdG! z!=tyXW|lGZuo!aMUB#D#O-l`F&VDk5EW@Q?D7u#*(covm8j+B(^fA5TNZ1f}WnuRG zSc_x4RD#+_Y4)PUlysRm2*B+S%(?G)$1n%fa~n4a>Y0`B3J_=iD*>Rhz;I8eVZgNP zxqYD%ZDfejX#gC}&dT?*RT#{MNetUC?k0oVE8r407*tdo5tkm7?sC6>`Og~qep^Hgl2$(5^ zKWDu8h-*Fx4C8(}1JkW!Y5ehaFfh;-N~Bg%G&H?ZM@bDV=98G{bNGrAzf^0$Czj>l zrKyjCN#%3*g2101vZEWl4&X^g%{2kuy@Ks0AUSL$c4OJ5b5^R8H+3itqA=A9(t1$= zElJ7_udH9*=Fke@!oV12R{4*NDi=-v0@bB5Z=ArOMH^I_TUbDhy@BKxNfo1z zpuo3N(JB=@;72z)9elv`Nj;C%@f{&CkBU7Um+WmTc5_JJ#b+`OvN27&E%J(np>Tt!r5qz|J<(;hY0wfdkVt{<8rzwI0MdiQUBEpG?0 ze{C^!#!bJ&D*F~2B$0Q)U{WI)!ZFdiPF+os<$YaygpfKMzhV)ye6^+I_O)4saxzzH z$oZzY)-QW^&YvXlTDz7F_Pv1q3E$IBsV8mclpTHlyN^q&bXYCPEod&d&(ux;+ki2w zE|lu9J@1AH;QKf)Zp=}3h}L}eYRsIr&HR9FT<79UAGeQr4rULNsNKAdgNi3~D#tS| z5|;PnKd~%4o8(}Api(MDOrA%!nw^01HJZN+T&@{tA84AdDvn>^))pvp%z5O?{b-{) zn_|!sjY&6DLpCF#K>y>-@-_~RoyTOA1WLG> z>PzKNvHU5nn)*U*;D-kZAhe9UOKLdkJNMcobL@?!EHst+dh+O2fN%?Mn{LJr74m!VP8W$W|9DPqd~&A&IaDy1Ahn+V_~gN ztM;89%`0>3B~^;?A@j@vV7-lQTWR^397fO_y(S$=#|LXxm00fn$W{|0(W*x9Wo@x? zHr9k25s}MZ7+WGL1G>t%xLeODs|5aGw5x)UCaG(ar^9%j+tlpQs)}S~3ML8am zavTFg!n3_`$Wwz}OvPcI1tkS;(!7K|{HWrAL0#8dU|@+LMEBg|4i^Ge;1O+I*ouo= zi<;z99qm5%)30RNf0jI>Vlmq?o)8I!88MeeSq&d*Rr~xtXe@C33MuemsqhiRq#hSGF zqxg$vLcUHHAGo!e-Ys`Up(4A{Jbfq1{bkHQP$cjG49G0-X(&8$^{eZ)Oe8D zp$!*}n7*RAXW1z=^q?7v(_M9_^5WDwi=@8tvHp2E5jl1T@x(A8eDb7E9~$a~hk`M+?;K3=(<0+;E@aWsk57pX8;P4pXy?pG|2#_drwHRouz8#eNv9)Ywc zrKwf0@qCd}s2L}d_eWYP1rE@LQqEy3qtr88S|CDZk-HFW#hglgtC zj?CC zScwjZJB@HK_eh9$9*Sfk~ zm2RvAhEHtRTHY)Z8ADg~Mv<9x{r7Z{Z7(?K&7V``9jJ_xLlErQlhb(ob?w9p+hH5G z)!IDJHg-`y*Cso?AU{DbJFNzH_gqwT`FX_Q9@;iYJXexN zb$x`-30^ciqKBa<)S?p#`rMC7NDV?#q}iZJl7eW*c71oY#TDESI?oJb1Hz%$Kh4Mp zHHvw?ln3P@;m7;0GvpUU%sOpyaGK-N32()}>3ckw_v@g2Zl1 zgxtbmZXAG(>kl5Dc%XNkfFxeqOU`DbJH(}s<<%7OC7o__NWx;f=wq_VC6nsNT&hmU zNud482quS`^GUImZfh>vc#fZ+`8qC*9Kuiv2jpvT|!}2V)3_1Asg1s|6ecvC^zmbmClfBP@6-l91r%akPhS@-S z=%z0}sU}dTW5p!|YI3Tvpq}a~feUi$WZ`$RFNCFC1#i`E|K{*33YWLQ*#wEA zU+~b1+wa0a8{J%vV`+Nd$J#0#Co^NXEC5a_8QW8xF@~GXH#4Q4G`6u-m{z5%$wwmc z1^glNOZA*r3uZWzR}=a)uuYZC2VzlvlK?Cy07L8E{8c}97`bax-j zTdawK$MObrHW2ar%|=roC#t?SF+1$-9rngMNk)pUt; zuc^9UwiT)6jP8sKXs z;!2fXvhc~5{BcCra^FB@rZp{me|_DDW+Lc@HKno%o~YHFtG&tN zUHew+io@ZJXCKs#DXql$XzFT~1U|-9bnuS#{X^|XOw$2CVdfA$%7K1%AJ@i+P!=}A z<5|3M3@s1tXZnH~!=C3VO`8kP1Nn@nD>kBru$OP3u2$B3h*> z#A^4Dh_Tf@Y&#@B+j;(2ILuem`J3xJ|wCI+66JelX>+8ZJ~ zQN0ehp4)=&fWSAbA7gp4->9kQ3HBBqGud7FMywK8a@ax#KAq}rzY^ABO-& zD2I=7<9@pjg((N3-N`v&46?EA=s3(+Qm#Lvc7AhM@1NNc0tYzrG>w|2csm<&*F&gu(bBn&_duOj&z5H~*V;a}W3beA3PrN}eq12> z?E`g}#ZCu@8e(3`R?MXrbFZRm?A1t>Sw~WobhyU*>)tY4Qj8MTQ#z23I3rQzzd>2Q zj(u$`V#?u@fIO~Bzk1xroL*_6$-^prn_-rYp?X--3b)7LN zc+11^1~PA+i@qg(;Ng#yF^rpiy}PSqx|9lwIl#y0q#OnbzW{9|M{|ZNDFv%g{<#Hh zg&^+evRn8t#T(n=Gl>4`ac}hf^LL8@SXxv3-)Dn`p}2=XegpAKLO>r{_k?K4D)*^r z_LTtX7hD#@(96m#;`qoS=0f$*fOjUaZ#3|>@S^zZNB?3QsE_eDKme9$)QkV^k79?9 zo=E9_Xr8&B&Fmv@`SK4Yh8g*Bd=~d2&I~LUC2bO21V*%IUimz^S&+?c7Uo~`v4GQP zSgj7x`IvIaXIT&2%P99%t`eogPGjH~LdG`W1W|Yiz>R85&FoL;YQ!`8bfXgF&2Z^ z4x4LrE3h|Y82ly8I1pXbs4X_}6mZGCf#dv-@{Lg~5Okt9<>EgcR34OpDIc*H#bB3y zk-94x{m8*%&;*Qz7~Pbj@bjC#Dm*csJYFk?(s6Z>UtR}i8jRYTferBakUDLG zpOG#jvnz%ro)o}i!7ePFPSPi?0S>T?g1x-xUif-U`Ow!KhL<;vsdU<*#WG!#QU+Z) zWE(0e_kEM6r$Upz3Asf>2c9kUrF)s(V5Dk&paTJbApl|(*C}5m5s0ebM%#FD;ids+ zNt7_Wf3YjbkEum`kZ5dooZ6#@r?{EmcB}fC3sX?n@;DHBx_!a8N#q@o^!#OEa;Zc| z^hqk@nAyDoy*NIVPrK$&!tNz89^2en>2l?VH4r|1nA@>Csx+*nwAa z27s!zi z@(5{ad9UxT<40(|LyE)O9@AcT>p&QakyJjYOduB*#SLH$xD9{g@!ZiQWU*?F*l-dK z!@lFmLYDrx$PB77NGy5uITW=9{rw**y!;V{LLA;bBxeRHBStd8sI(i(As;2sTU?zcoD0KoNiGpIdFa?kGe4lQiu?*a7!sSKB%4fr{FW>9^d483{+K#Ay- zvPE8BfI}h5o<@0wYTGV4s!gCu{-mUMIBj%WKEkrseM}?(M7yJ-5>7ci?k>gXaBS{- zMeG$a+C)CB*6*zwV8yBDYE>nFbERFDTe@{8IM_dZ4he|g4p~?mCXJ`?A=kZugCFm` zAu(azb5hGEu}~@^M>#bSYy39OmTb^XWwcK;Q(23_AKj7zTy2ABaXv;Z zXoK~eZoG065pi1n+n&7fOyTZ6EjJeTll37gZ1BLG@&qe&nSL!$6#&{^zIe_?JW(f*y?L-o1X#Kv72%EO zCPkWpPJO|H)+)(wGwC(dc|AKydSk!nz5Tn~LVU|j1XBdrql&2kD7&CaJr%Vk$H1gh z^FlyMgd5q&<;^5h{xO$5N{c0B>lDn#1!%QK?I585Sm?2Hs{i4`*dY5ajzy9 z71h{~Q=Qn2J`i6XlCA=oiW2^iV3FjI`DlNA)%04O40yEZJ3Z5)Mbf2? zO-T8-m!9IO22w|&-$?k3fO2IBc?di60N*24(n6`JK6xRjWxbEZv5tEa^(x50Ku&3% z#p&55$yaPlPr{l*i~z5(>u$*YLxT2TfV35x%&x+K918Qt{;ab#K8o{=v6HNF>TnRT z3}<2k0!8JR%hL&uUHP3tW5$D~iE{wG%R>q-FWnCmf0f$o+FcJF-%@|EmoErOn?K@! ztNjAKt`D8|X+~bzggCgn)PY%R_M!?oTWUKW+w##o0N>M8@c$ZC-WKEw^q>|TnAHqg zc?Zs@ky3d|cL-Jqk_oH+THtmi#?81X7vu|3@`IhSTv139mK@om{1VSH&rTOmm(P{S8oj>u7LM#j!?l!Ac1Ud7~I&Q4ijK02=X^ z`Nn$Cv1$^FuvBa+KW{I)m@I3$z8p$t?!wjrI9V?a6mk-q+d09G=mHe?G`xH7nM2jn zzH_(vC*?niCG!Lq|5$e{(u&Lz0mS-|pT;EI%ewB_yFn-xb7eS?WOR63sHhl9?@zT5 zN!k~Q&zjTba5$_{;ldZ5r5ySq)~EmUdHU6DYmmPm?A`qxrT_03(TU2cu+pTiu99w9 zDi`pf!}jqTeums0052D{f`@hbuN?vx!A`phk34t%EeOdcsAom-!|@5I%sx|>dd%la zJsOp4RK=su6ZO-vE`W+5VW-%(mpk_XKDtRCsMF~UWUO0 zc_qF}C@8e@UW}C+pOs%-%8$|WmeNJ~~tt{CEcHq65b5L?=&UnNNq^fwRU&$o#8X zc8e;@@v4I>a|V!h&dld&X&O2=1Mby5MWBsJmoyp2`^5T%Xys8osTu~!0TP@;$CP~jF`xupd!3+dW^$9gPKF)ms&slll+;t~{>Wx}hok+fffNf9 zPXAnY4Mr%0k|CVKN_Jc4l>yGzwJ9!EMOh(@t5W1J8TpqsjsFi`y)yYjW7zBkEM$WT zmqx?t4*OvwW$%Wjb7PH82uaw>{7TXVW)$1o!$v)0DJWD6+awU-hwpe-5FV!gK=Qa= zb3~Cq$N{iR;P!Y%WqQNKXKCe%LGo^IhpI2PtuIPb`JjJHdDbFH8`G{EJxuQPh5L9)>zlhr+j>@P} z0?};%f>PoKz#^v0sSJ!)|2KlLIAZcX`vuHqXshxaqJ3}gSK##8M`?`Cx_n+Mr!O&PZjhFqTo^$JmiK~p` zzPE_bJyV7vk(I?@^1v&bZss$Uo)vPbOt&LVY(1VIE8_-$*pY#Y_A$47C`5n<5?}u`LooK0$4==--JW&_D5n1_ zi?$XUbQc*cZDS&P6pB5c1tZMdpZ5N5oC7i9J!}_z*1u7BP=J$0g&fL$G;?O43_~-8 zFkrwXQN0Y1pw1bb7-M;mEYID`JaF1#n@(Zdia3QO?(6-9RqwIoaawj*9#dRtG5|E0 ziG;lX7A9uUjNZIc1>*om!JIg2GWbmz2q%dX6J%?3{U5Lg6CxZgT_-AWK6x-=D=p{A ze|?L920k=6$hPA0#l?OTn3Mf{+2U}BIbo#s!nFFmqJs+U-gA_QJJn8RqoR@Uw>*p$6F6|Mw1Y;^7H>+XEf$gxyHhYEsMZ8AH5M7 z8HDxHR(8Fa_F&;lKV3th2?KDwa%<~ZCj+MppfaZFCE5`$ z@^EqHmaWwX182P{_qh}0;Tb{J{D3w$X{|NsM~T$0<@hkGO%@p$>3u@SmKhqd4PvNZ zCbp`;Gx@PbUBGDGV=UxLIkir3Ve>-4aJ;9FlhOSz(EvIZ;Z+(=8bgB<#7icYgo)>g zg{X*Pu z^v3{2ONz*the@NN+j-;5Ycl4CD9us{`!6{{#?Ey0w2!97c==7DFY2F(f>)Sg@Zt1VK&h|`zxLkv>|6rtBdn?b{pa{wHw9*NQXl8$11C<0j(Ad zauEN9QQKZ$?p0|`%UKiAR@5OR=lhbeev~skDS3^HpIQG7g}{IFpg=I;;=CBA2r}&> zcatsuZ1~anJ-raAo?1U$P%A{5D;2gsfr^efZ^1wZKQQ&E#-$`G^cL^`Qv=ufso+}6 zq6`H(csW>@{(yN|9`*lDMWcB z<$M&IY@~?}@{{hzmNsSS;y(gaV*TnuI0`(u%eH z4fW+2A87&UwzZBq@G;E`bn)j+YUg;^Z1a87mp;-%+1_9us_6Ne)&8=F{BgL8%oV=~ zxK3mX$P@VW0wh@Jb&IMCaU*JuY}1x`&-|LPn3oQuvB+h@K%`7=|i zu_XM*z>9^CSmbP-c8Yo|DxzhvX|cl>@O$tUru?1lGR%>Hi|SyPjWC#Dr^-CEHz+ws z6@P-B6o?w1nD`E4V*xl54eHy&NxdKKd30Vt+82nHV?Py)b}G@wF5sz~LeS5*mi|QE zE`<1r#dDDGnVENMO|Qe0ig(w)F^wOj-7MV+Za@(CBmhJun3kZTEL`}z3l9S6N`Pl( zFm5`&+~LrjfsbzkY{&PLfZGT@4#W#ZzMug<|A>I?Vdh8N+>C-90!Ie8Dfz&+C_Y%{ zS#+Tn!+Pt+C_1`?HdPZy6Yv501Yg31{kBa9P2Gr!!oF<=^ZG=icn5 zgru@-V%$pWbIY2(GzFJ`!<=lCsPYk=2};bImqnmbCecKh65kiW6Qusor8RF93KyU&-x!x#;D*uZ zqw|M>1-JGtcmXSDZE(l?lk4+RTraIZ-3H#06Y2T$oyGySnoXlu8uNmX&lm zO<|NOlXE%)_%R;~i@>e^Kq@XS&~UFE-Z}G@!G$~)*3~rnJ#LKu{ZpWQun3?(T=h@Pewud1oP10A*VhQ(AU{|p_6s2xc9j*S2CCj$3%i9kzYw7#H@ko* zJt78vB*T^d%Ys2k`^ey{mB$9$y**mAIes}aDShn`e)P*Cfx|8a*e3_MCd6@j%^(VO zr&)7yVlh_ow3F6SjusH@v&|k&4hEs$uXkY)ZkR+oMN8g#0zZc0^l^&tl;M`b^-txr8b7FBg#x`=6DR?kCCQ=rt}pDfL&HIrVg3FI8cBH{I(8$KT-8i4^X z!hV8}%7_vW3 zHTuXnUZ`NdBB7B*ZVQAlM0>&eDVC2c;zm*w>eWw*o&nF2y<3_+GjXo3iF}}uf4L0^ zO%R%Q@DMBaHFfFaLH!yq(h8M>As`fb*Q8d+?A`GZw|>KTz1D)4r#4kpfx z5?zzrPV;I~obD1kUZ{??_+3}&X8kcFBMnz3E6%5J4rKJiB?ht=bD5TlibQ|Uj* zNSJUm9qd`KWLNsnnax1w7y;Cw=r@|oDY5cSlv+2KY{kF+70W%q*Pf=INyFz>Iqeoc z-zHVc{BeT!k?E_|$&Ex@ldr20@x7)&TH;Q+Z4!Qs0GVIw(O-kh!0nQ^itHRiLOZ&Z ze=h>y;J9;N1Q7FCEcPb23NzmOHCHEv4C&dmGxKQXv-{uDuX={pQt`6%P+n;)_lR)~ zd1hkVEa@0rr3=-WT(k>zp`bl7S0xXhdW)CjxTOB*qQdB?>tACvX|_I?qv1cG9@d#U z-BBEzD_fKE;6duGyc!!p{X)o5xsy|5P^7<##A$xpM$~+_KzCcF3cxxc{Zf|W9}u)Y zmvFipe+rqeB&Deavl#sQBG$z3t%(w^$L^rK9fY?y&Y^T4q)gW;mXn3F5&8de+;G5= zU+yF=k^$gmg&TjR7L>!z1Nk>Ile9SXZk#IlUO|~U#!zqN?J=lMLf%q{`vi*5??30r zVg2_<=0t^RMXRRpD(MNR7enQSm5oo*gCIx1fE#v+-d^~Ukg64k%lvrY*i8|PX`it? ztz}w~^8QWO*5eg5W^ki~$i+bYf9iY9ZrgArMILv-a{N>oUl1a;ZK ze_F9;xaBuA(Z%k`1!VHP{tGsE;~xC^_iE1h`FliuEV~bT2LP;xXXfiT$ul^?w~#{i z)9heDXdD{8bAlFyRXym)w_H7UK`<->oalblWgIwnj1W~zimxhdFQKr|OxfCM%2%*X zUz3N6Q`*Mem{&Kg zAwDWQxXKvjB_U3SEf=R;FOyIIevw|zaK{9gyku6X`mMiaAku*DnzVwkv3Q4G_}<-K z+$)d=*usX;&ADePp>ehbpQVk&?G1PRH zt!uOIC;48*%s%oJ3ow>Bw9@GqWPhIj7p|r~wwl?P0vf>?lV+%R^IZlUsr*mr{j8&R zfjXpQdFIMk?b4?;fvn4x7F+2CMWJf~of+%>fi3~|;Xa1G=WB2AhE!1;MNFNsdHQpO zkGF+m7m8SvFqnCRtp&bX4Jf7jGo0NLA;W1w0ajdV_V}j5^oBlRlS1b~WqTN@K|Ck> za$Zak>)_B%$QPE>j=9k=ekcL^$%bcs&Gkx7s?vd;LtM_bHcz6{^OKi+}nIG5)d)OpDiZ0?%6F)py&9Z{gmCwC|o z4z0goFe96KaNeO+b{X%+oGPt5FohDs8>@#pJqE3Wo!RrGc^%P~;On(W*&aZbTYx{o zFfe3eKe0?xpJZ-p?TF}IvobNC2!HtlHs~Egc+M{4S+AVKlPp|2T9+bC zX8M4o1=>QaYHrE6`9K-PNDM-(`|nbbx2%`ZxR>ZMgDDhmLKeJ}u-oCaSLdZLm^UjA+B8+lvSeNK z>ACIs#sg0!o>QQj>P&bQ{^AZ5O`b!dGw;(mj0VoYjSfI>^*w%1P%wn0wmF{LlzyiZ z2_V0!OZ{>SaMuaaI zn%3KVD0FI{?iY$2a7J2s#NykDOs@=2Y_4XRy! z8)xl+r0b}{QWs}SxFxHPAraN6iU+nH&+eQJCX1aOl=cajH4)43;>1-j!EaJCp2s68)T2qxeiW1+w(4CzOGx#sAze z=vlpIh0(2b-W#RTRxzz>1r43Yt^N*wh6lh4N3dmzuV;7P4obtTID=d|yej?bTT*&N7LI!K* zxE|?AstYV#oZ8j#3{;O`TD{-8!2^rXLjb>G)mDa{N@pMk7JKZCflPC`=~nGAXTMd= zr-hkZHNRd}2C}C>EcJ&vn~jc3&%(ajHg;bllWBr(7SI>j-B@D-7oZ>pF46L7jfcQc;3S~QzU|E-f;&kdhg;ol8kl10@CseUT z#$_jA_C%9u*y+W`;oQ<2CD`qa!Z4xd-S-xhsz8uK^HF;3%*5F5tLuq-M`G~Ym4YS+ z-|vmi$@Qho&pqx(Anr6i0&2aN&#k+@XQTB1VUd~~GYNc8qug3vz)qV?*Z*UIRKT>_ z5{^uD1jNH4B2yc3umC&meGu<>BYi@+M7aSfnL&`*68}rnc@#TReA&kL!UxX(TdpDR z!Vh6m7w&+ix|<*bF0pf4`3dzuu++NIYRuw+`D`b)i+wvq>V_lLbHHQUBa=`^{7OP8 zSWG6bn$3Gp{}b8lWz~?1RP^VUoKGMM`JXY?z-j*;9@q^JZsW_B&%l>T zME|vgh2m;jxagWKzGL7#oP@~5(PMv?$s3OWwS*FH9P;**%_Tg)MMe%mO&Z$6~ z)~PR9v(3vR2Czdz2BYYEA`}2S9%Qod;JAh=MZ#o}QaCmBiLvSO~WY zHIYVy=KLcN;^?BxN#ckV(QQy2F=wsVPLyqP$8Ix#V}vWSmBSBRkeiNew~7xyFGG5Z z<(c2g^}ya##_(@QsFao}AyyRVguE{PVkyx0{jxdmg&H-xdT>J=_w7-oQ&`i7`ALnE z#j|VnJ>X^A^>OLD63$zyM~Z9d*4XT=L=+|{9(3C7?LC%pQlAvfbMHtIJoS-~!MxBr zn=G|vhy_2?L($&e|0IpUHU8#)JhI7egr4QA8diF^p38qUHsc`g0|4$HF>2cCKwtcF z#NTjFm8Df=!~NeKzb^WWqRqK@Vsk>D+z1?maQEA=mH^pAgdW*`cwD7q=xKrAgtuS` zp$P$3*Cc8OE z&>*>{%qcX@ZynO+Bt>>4UVvCeDQX&5E)BveAVbbaFE{V@3a+z-{E#t)YS&K$oA-nx>yo) zrX&90kdj-gw4TiBQBSWnBAT2ly;=}YFRQ`E=!#sn3Bv>bL7z8{$O6m%tAr|9E~J_{ zg7A<4Btw=3_V-0(N<|5l4Zv9=wiEPUY^VAXkK@eF6F~`L0;jpRbuVhngoj@f=`;Lp zqkl6fg3M3Jdx#@kAyQ6|?WqaAW>#k`R3L{iJj%*jN7;A%hkb6-lu0WSFBfMX2)n?CsjA5W zzC@R(fNxh8>7|g24zS9itM}CD=%o#HEoFGiB`N~tfl(!0#f(z>bu=&WsQ53$dVQ@k zvLihZNpnDpN>>!gA--| zOXmda%~4oJ;S@vi%ncA?tlTH!U@xvzl&*pj6k57d?rd4);us-IS{)NNm1lst-n*^S zWhsV(`L)~xgceEiDf&8+nIdz+!A|&2F(%27PUp&O?|m2Q#BPh@7Lh#O@N^fqY?6bs zBf2A&Ct>wao%C_Bf~(Z3d`eg_9!F1wB&89Lxk1R1FYjuZu3u2OweY|4qEEbkSi;ff zK3^v~H!_b(l0p)$#%#W0E+-0;kku02NlIEM6H^`fodZBx{$JKcwzkX%08K!$zi7fA zbuW6lKq+Y9;^JEeIXb!e7LJ*VPqQcfd@LL(zWC+dH`|ohehRBsX2t4YX%S!EoJ|Fv zbLZ{ZP8fUrP`GAed6sJhDJ86ecFMqY(@c1a7nl~3Dxxot`cw(39jDa%Kcv#{rXUa^ zDi=S)$Be}QelPpUd%mUm(Mu+;w+26tcf;%ko+!Kuc9492t$!X2ic%QE@Ip?4<{mNX ze_UM6Fau{%i7C0ClC-V=o!qHAT=i&NtUdrMbL{4;PUS9{wZ7esb45J>H$;jk{0iRg zEJp&Uz`5PzJc(~e<4~@rh zDg!J37_8307)+fVhIFjmQAz`x)AJ(esre3X#o=D3(4a)pw*B`In?_~S*&TKc%CH1P zBT-}vvtUw<-P8zx-@9qr0U2>&JsM7P6wH;0ROCv(EMm7 z&`qA(QM#kqdA0e=|9G^bdB}w?58kpmf!8cQUi)0oEIV_$dr$#TaURq zyUluyN&>LG-!&D3C^}Xom4L2uUn+-*>xuk@+~YfG^-xH6wbQ1pX}k zAN_@tX(j3$ev8L2^JQn=7}H~}mw;2A%0s1|?PwqLgvmdWycZlf2wHBV3^kY6=*to? zKSr5?5IcV<0!2ecUo}Bj3^J}b3JRwHASx9F5?QxqYcQb5#eHV+K+l5@QTIiqlnaBI zFKH!Hq2;dO0wao5OpbO2@8>8lvb5U9@;63^!oZeTnn^0Sq2kgGc>|Vda2{`A+I3!Y z?2~_TS=F{blqU~;rxMdwuky(f7gLR5Q!NcfTHIgCm$8V|*gTzjMF^eMV7Li@n@kju z#r+GCY`w}s+?eCyc1B zmK$FZjhfpb?;EMJF!A_a2M<=;!>pL^eG5G80z3xOqI5T|@bU8tZ)PzB_u-U$;3|KS zk&kwBCl8tvVy-1`ERy%R%J=FpZ>2O^?$=`7MF8;MFD%HYM+W|3zt8_AD-wH7_y+q? zG-{tHpUCpZp)P7~qKC~4YIDSSqdXl$(^cE)4w9fq%Ud7@`>rOtn{jH+jYF!@h{zQ~ z2?2qXt9{eDt89KRKc;tT)x_~LjDz|Q3UJeR%O83GRs8qG=8GWnD_2Pu`_3;$4>4!f zDdp$HPw9|9;3F%`r7%7AjMq3qKU`YGdj(TT9e~+lv@2$8s+q z$gowVpf*wf-Gt9jUpAra-_g--{h`92i?!G=E-Ggdc%Vhl8G$L1HMERSGvO(EW#W}* zfVPjv3!(6^)Y#h0*8z+E^Wm3TjQ#VwbF_k8cm1113Z>58G?w|co%R|5YbT`SH7T8U zn&{}rx-`#Py6^MW5xk>4Qlu8WI$k}@MK@a0!WBRGm(ZbkyY5mFjU!^HvCngc?*}}p zHynp>)j&Uyef*4kHPvBmVB6fZf%{hr@F1ojQZ}Q*jGbQw}E+^3|~=iI^Z4 zUzCT#1xbXDL=ld@Y77YtgechzzSL0E(E&=UuAV3+9f3`s@5}K)Ik$u*K3{Hy>m{Iu z|JGqy_D!|R-C?hY>K`p##!Rt~nv!q_wJcGbr>M;ioknaXJ(-gK^S_bMB_FE2>8_$a zUyW7sb-FafdOe#?SUo)TCXai)hhTb0&`5bjY{t>zwZ+R>psL1Q$jO-azNPjqf7tT> zMwVYKc7vG7BprCC3z1V>HC|T;A)~oXk zlA2+r{kk4mQh5wvD!#E+d>E0Z=fdRiYl(4v5FbtN>8vzgGYauCO8fBJuGsFiD8F2D zU;!+hzZUA0hLSCJWnY-zOdjzp;^}&|m){=CzE+*^XH}n(p{9~xn?DkfR`y9?`5O5x zsO$i`vU!$*rA7tv{d&ER7_ep{Jrd*L)gg&e44ejCIqf`*B9;b7Gn%wKBjYP`&DVpM zek{rR<=xB^#p4*S1FYaH{E!a8SOhhOC3^ItHRG)JP=zYycrpjwByVu|uw%L7puMn= z8Q&FsB>NMnVr;@?0_?`|(TgWwEVEB8@sXMrgNF!@&q2k>Gwc|Clp1r-r?Y7*fj;`ZNR&zHF#*ylVgdJ1t_x9^I_Rx^+Xa+UNr`-eV#q-()N*ARDD zz<$-o=70lDAU}4V%TF&n_0p@#A&hB`3~6S<+SHaef`b0^o17Oj4I%L$)jELrJyD}Q zLK75u3D4}*HDP^)d^#in(Qel6AAH$0chEzfKLzC4FI{y#f7y(Ih6OpeKK)LVqt_9W z!ZOl%-~Te#G0pnC{)Wu`7C$md%~O2+x>36d(DsJC(X|BoUGz?I&&|h+7PYF>sJ#}Z zOn9yTI+~-Z56mMNAt%HZ5(md#pfPSq=+M%rA2%}j2P_u>;*iEIQ5x(h{kCZh>`oh1 zE+LkY=S`H%Bq=j5inD!{!j*D1m)sA6Rj>c^LTjlk0VUqvbeywssWzOf!W`meA2r#; zZ0u9yV7L&Lo3sEr4FQCXS|3DqcnRNb=86oZH>L#@hYm_j9Nd{Y}oy0Fh% zDw!T1Mlj7y=_8i~zarip`dkh(l=ad~3+;Y+FN&Na|C11i#QCv9^f_|L#Tx>Osp4H3anT z8=xC%^O)^Bf5PDnMj%r-Kdm(=OF4SUu!xRZmPGu3e(V#X&6=o702lPMC3PK78VC$-LSxc-F`3 zsl~fo34q<-KP>LG06hXqRYlnI%Fo)Wa)@U?U^VcdKYk@;_yXa#L~e?t zkYnI^E>i5Tzp#;>{bNJ?hPkltC0m=|67VBQ8W6)8KZFM~R6qC(!(a2VkeSx(=uy!% zQr1W5-NKjcoqiNDC)tIM7mA6ci1z$jaX#l=h`Z-t$o_Es(AC`5!8m{{gwubeK67Jh z2RAk-5rQhZGZrfUa`NU83|`z0mvT8vVWkbXvYoNN)hzf0+uNRBABdd4VB)|Wk)&fJwzDPv+CZ?2F!A2OhqJKyu?oIJ<1_(U9{WK!S>uRY>QUlAkuOc7Qagc z0nKX?xJ(u`O~$#81I?1SB!h3|TgaX;RY*XoLQ`xD;TdnQCW$c&odGs2ZF^IEbr*3zQ+;!--8_BVm|tD zg6g0Gx~KS~Q4&{tz1G6X9(@CCkHyPe(&jmWHTR%W%^x4V>6W3f)v%^A83sxci5diY zF?Dn%AC~%w4rUQwzPMk)LKX`9J7a&m7e#5$m3>8U;+o`1rETq3>nnTzPebd>I#!zt zqj*~ZY&rb*-*+*OjtX-D4I98%@%VW;95F&hV>WZXpDpAY{ay&LmmTQBIsUhwj6uK5 zI#92xH|;n-{448E$+`~SSh)n^FYk#CMt=z@{##qP`4^zRb>(9zI zhVx4q9%2|Y!tDlqOtc3LqzcSUs?kUHrb1&63-Z*^E!E)g9W$GfQ$leokkgJUkGPe- zH5KYV>>EY< zZu5YUJ?)N_R_#W8BU0T#z(8YCS@iek)t<={#_@BL+3VpmS!q0#VSz6I>*LHmj$H)j z$BJmgym|Q?rxEwqbGQ$PVXL(W>ziGP6~hprVsI=xz(*Ek?uqWkF65c2&G8^m>bks&K5$eZko2{}jjgQxZETJQI- z$9U**EW_(vKH&U9obmc4XL+2yr64o9_>4NNMiFa(Zl|-m&$|QkrUWMQ`dwEWekB2u zxJMf;@|q(UET?h+RCrsa`==l_*ex8GquqVGR11A-j|uqd@p#D;2KM+2DX@E5%06a= zn4mH`gS9YoePJwZ9jM!K*no4utEN)LqyU~%7utH04lj<3fO$B&WY4^ygJs#5{0ycT z%ilCZ?~&ehWviVikd$qmxU*Yr^m1kFncCx8QIPB~p#-J#epUQ%tCoT1Z5?4x5AaaY% z*QnQRuKE-teU*xY(Hbot`v>(=_gKbO1 z+svNWlqGU8$lR^Pj6KzGOa8x8(T-eQl(M?0%k{1MX?pAr%CtwSr)+Knnrx7+%`Hf| z%US|UtAK7u%L+NjToU`8U_?V1?X1lPz>=+Pu6;rgc1L5}buh){EnS~r3NUE-R~_Ay zF|?=cD|`?dRc?_c-wy(tX!?)+0oN|@LzaS>KV$2N`68!A@mP9LHxDdN2C_6(#lns7 z^<{*q67!)LPvcJsVIOYY3;^BaUJa{<X53p? zX2J&eRsy2(3)g=Qe<)^1@aX*U8{Rh%z#<7&OF$nH-Jr7mlRlzO`Rr3WlN*uw{e?0* z(rWypl*G(M_}kd4EV2M6q*811(x_u$`!i$(8~cTgf(7i0c`YHKAry%F9{=?Hjfhu~ zz3{9Gw&Q13g~q*W|5UZxzRS>@Wvsh0Ri5Gs%WAI?{EWlzXkvQ_+Y|6^h-4P$Eb&Yn zLaViYxJ+x;w~LjIDn<6#OP9FBm`Kp18Aj(eHpP=yoNL(kwwC^U!si(4#RV^*Ah@#L zYI@*Sw*%({J}RwKbpTn89bbo;}QXT|k4vre&^A3M$H!^ltQN z(>PaS5~OUzv*b22XVaeJcaY6D2s-Q>M zt{cov?%TcINkqLaX_}-(SG2__XM^myTz9&y2#Zc6KUuKOP=&ueg`DI2&sFpr7hSI@ zi_=7#6ZTWv(iKzY%R$7wi1FjB$hv}l8M%ZpZbkgUZ`DRq1B6i3l#)ty!GS1y3irmA z3?cg|SN9aH|0sJ0Jjji__^X4qGGYw|(&J3=kclGVdrqM7F}#|TtkZZ7#^die=#gg~ zi-cmp8b3wP(E5d9HU;ginA-xNO`ap&n5w~uC1JNN@PTF(7`;@p!{)=I)QUhaEt&2$a}3HOCrJTG zE>+BgCk(O@iis0_rzLC3PAuI836s~&=K41F4ldBlu@lK)YTYdz%=>vp4*B7UR#3It z4P4ky+m0I@oF}K^qV%COfU8$AEAlkH+yXKy!CihDfNAqW*Z6A##;*RohU#XS!Kf3v ze|*bLo&|v{rj2|khGgGzHFrClE{eYx3@Zo73|(E-7|x13+8_zfMphBUL{-cnvK4r( zg+I-djkG`e0qD&0tJ`>gF2eP=)aDQ0Usn5PNS?!At$@AVPig3|7v!fdn{sn4-%|g z(dPwdzJFtvSc)wgfpGIl>&*+if8s=!rIe1qwo^v|OdQ~hp`d^zfbIvFen6Mu5OC&w(}+E^ta`?- zd}_!UvwOI^IPH1yJ}VmS8U-Ft;}4N*#Wu*sZt#+9L#w~5$uK-pt7>h53AN$u+EgDV zIpT_I84|u=xQh{Wdx<~>M;g&2X5RsK9HC-fr@;=I0#ss!LWHm6W!Y=s`QLXq0JAT^ z+-FDsHTLb(Fj(e=?WL+P$7BXnp`YYUr(1tG~!iv|1soX`eK!dlaL2J@v9*XbqQ3xXi^%z2qh5M?~g0VG= z0xZ4nojpiwCThuk*d@x)>8Cmp?5A^qGDI8Q{ISOHD%(t_8o(NORV7zN{1;SkaRk3c z{n#eSdh~?G)qHxxN4aaF>XWh12nR%wRe|eg(RMRT2$H{jGA)v|@!}xdp_;m>w#Yvtcr1cf zET=xNT*+DjbU0;1Zp1KF&WzSzmOUXzd|k2d8EK9gIrzY6fZN%c!U3bD|8awX)@o2H zbX>87-j5j1*1X;i&dDsdwaFG%n=C)ri7Z$*ssR2Bi2=T>vysXRwrjV*WAjAP-KD|1 z<1YePn^OxrigYeD(6Uye=meTrg)MGW^zOwYiG9 zym#nj*o-ALNFHdhm@u-dg2NBW8&qQ?b8vmc7fq1P;J(`u7w;g+?WQ}XB(a&C7JG9b zu%NlRtPje8%uf8Twyy{QD*%OttpTPQd=}Kx4$>M%?2(4jovO(9OlzilR&Jf+C|oFb z7XM_G_iY>I{EPRWkfxt5U$@hKX~F`Yk+Nl<+?}*2fyx^;q_5wqNmVJ6jcyh40<HWe#tp%pOXrYu(5T7 zaqpgBip+{7%3bT`<`!ZCiE(LC4fYO{5`~Y9PHg{fX*Eg_-UCZqpbF`jO9}S;fpf1B@GIB z?Q?oX(&%HP8vf(Leu&Vvgr-Tg9GphNvZBz3G4q2Wz+O0(N@miTuIS}{>>zaGAU!m~ zPr{kjmQ4=EcmfET4@>T)XSE8eDSEHi$b3drBRB6`<{@S1kqwCF*pTI+k)tuW)H6(k z49SJZ2%23I$vS}q|LTWjYyj-df%we%A#xB^BNcV25A2(`mJo;dKkrTig{c#K5^?u` z%E!97Zw!m-4}lPqFS7$iV?U80z@636l#tF>;~23dN+F%ej(8Y|C||2s z(&p`=UjRj*c-dn2P)(Ng;*@Twr(`dX>j6#??e&EfT9&FTWFdsqtFY77M;=z&=w0!3 zyQ^sI4_~g2t&-k{%N5mG7n7w%{ThrxU_69-Frx}!ldkx5%=YMF?U0lf(mzUVecUs- zu2yubSndbwkAD@!k2R)BJyPBooB-qb0b!2Zb#GS zBA@#{z^U1`0Ph(DZt?MgfQw7{@-pYIcZ&9Qfb*}+E3rT4RY)g5*egX-fA{Amf(!oA zmsOBQ?9_#sZ_Jh4cPq_eH}qjrm%pr;*xVIGICDhn;UWEK(BVvuXKsFgVr{bo&nS=NxG1&-@`I526;tWacS-&p7qEb(>~?@Z*&>HnfR!_5jS%IGWOMg)(GpuNXB<#cHc=0x*(nY5UObaEEB8CI6cM z=|Rrf_Ta&_2Z@RYZ4RvdXvx22x22i`Q0{;w*s5#HIOI_1!T1P19+^qEQtSuc|4Tth z4ianTBZRkOC#BWH1~+18^nPt9DG7q!%aI3&Ytw|vIyd#}yOS#!?r>vlmP-+?XJct% zhm!(`-ZkAI-p{_DbIAEe?*R$`Ue^8k_OTL%38LN*aEEbFxgrg#O$SguCj~z{XBGST z@cGeykk;XL^&T$xjo>+VOumHZN>LyzR7h&Q?W|U@hE^xj9so6@`zrq=r=CluR1OBu zcAOHgfm21oQ@nG|3I4G5)z#iL2WeUO$NA1qou#^3@wN%o>C6W#rO~F8s|Y4?aGFu+ zO1_uJQ-|MntAKm|1JIp?I%&h_s1 zCEZQQnW({4#9;|{;<8BVUloT%WQoi|ILvA2bcEw=%VVFida^#RDaD#~Ee0)aFIW=6 zp*daNr!9*@uBo3%W3h(d*E!hT`phw{5Zux8VD5=f*?n<*Z5c3{FLo~K-YxYAwZcAE&KWD1ey*!Zz_g&j}I!}q- zuc5Jhz<_IjFlFC$q*w+TMV){Cvsv=*q~4e%fZ6>@14Xt&oB6R7Ye=4~cVgb>Of6Rz z`N=hLuylyB0m|F%*wruziyaT_>2x9%oduz?9SLTOE|03Tl*G@b#vrs=kogl&zc(Ia zc$D+kWPJUt_Xja~RpuWDDdm^lVYGCv3wDvY-!TPfxXjgJD+eN{B$wwxTu!HMw zPDkIS52nQ{D&2D-T!!|NaJt}s^$Y#|ckDqhJI&bCj7jZ>Q?`c2TZ~*p{mDAqh4+`R z)VZuL_PNW0tVx#siiuLV+cEaM+-ktOBw?Xvzc2|LXx2^uv^(#PUBdzUW4fHBKhwyk zI+DH36_8S|=jxe`x8wUwJP~ui78zE-=$dP)cKrT`wUb_Na5VHOKRe8hJz98X4 zUFvz$ImtGj+=QXmR&CI@)-)3{rvHqgM%krJkJ~f0-`CY6`lnK{o#R%vuNZMrv!23F zBs{-~1whp;c?d$9UG^e_%RYdm`^qX5sCN`rR|ilzy*0N&oTnvKN8)7xvbZ|XOAG{8 zgbAKz+mVZzy31!`SZUAokr;Xe1P^w$GnAXBjTc%P9NR2}gOr-WuO;$y`+@XAg%Zbx zqF9$Dh!%yk&Z}s}MeI7bWJm{wA6W7BL7;UIWNyYn?;bL40FE5#+X`|0fIZUPB(0_H z3G1C=$BBy`jqhuR7Ocyj)JlXSVb9XXyilDh|HxY zKWFOmHpOPDxAH7x1xT7^N=;YsTsW~h&+N1-l}OHO*|iWIj(TmvmU6t=G zWZ`!yCE%hpIqiB$5ekCvA#H0qQ3W}_mUm$Jl1|6PZ4uqO##W8<0rmf}!*{k%g*~Be zzH8+6uVN!l?ha2-gJFo4E8|OPX!Lr#vacv1CR}#2mt=}kh&WeBh0j|MUi;g=I)-F( znkSl-gr&@mLBy*LscV#e^h)^3IDx4ly|q{v`%i}umtWkxb3L@K&ED9Q@WOM@;i>&D zHMRhof(#fYVJ_3YV`VUDq6f?HkDEiTC4w7abBMfzN-(Ksh z&)h9BzD>N?0RkU}3GW9^64^9+v(U=Jf7-J zLif!IT`DsdwUi7D4#y4EX)F>i5!gR}$ndBn*tzRMlwx^}n?-+3q}#3+V!T8Ya9~~; ztR1&oMNJbSI}fUIYoE~9UtDPiKZ&B|Lu&-v;w$S`w%dDN&9qw-Qj`T4vapm++i$sO zQKG#}*=LxJrBhN;QrOosIZkhO#DhGF0UIdGTX5MSfxU64r>HsJ%sDt!Dp9Pvmi4_2 zT$Z&*!~;1G@LQ+8BTZqh>5=QKXxPH+c!i!Ix;XQr;=TA^jK7`kBxn<&yr`W+rYTSv zOQH>3Cxf9p?YyYW20Cl%xQ;;Q*ly2qJex=rv2!nd$;ax^l($bCq4VRVbVZy&{!-~?xVh90S zs@B_iK<)SCWk~v7ZaDD_%zh+1UAmjT`#FUwN*lIQ2;}rqvwdu7yK4*2*^FYNHn38d zV&CAi3`(O5eWL=`Nyh-R^3U>X zMc{{cwwOe@U&TM;*rD}cm=2}F8{XN?Q=HtOW2qE5j#&h`uOaUYbEBo~)9~T9Mp*ft z#gdfG?=2x%AYL>Pkt)kMMw+vH8P4S8jwr7^OacM%LntEzAmj1u-q;Wx>H)jkEk`4D zFK*mb6(|w9o#h6}ue6}gcllOqoN)4>riCt#olpCM1yP7PVVtAE>3PeN#~!j=oJH%5 zeS}Qf>On1x2h&h;()jR|PwWf_=R`XXnbnKOs2PfH@$T z7sXff3ZkBpi7a52j}jU~p{VAL$#BzW{YwfAUFhzle{1N#cHr-c2zO}@-JCLUh5EL! zMWlnM{Lx-vJ`;#bBR!F>E%5(8nj{qXCXoQ$M8fc5kqyiV(BDi(Az0Dj$NsJP5t%nw zaB*DDTZXUmDzjiI<-j*n_#22qG`_tta0PyV^?CDLv9GT^(b6Vf7e2{U>od{I*(Wwi zDhri|u{YP(d(+mtRK>jcueNuzuH-*&$h|Ss!80S}=|gAkVCMOS zQ(vmAZ0~FA+toHkeD9oH`}%_`2s!@Q$#jNB6F2M~=m|j64)(Kollk!Hy@5Tn<&JGh?M5{^7mRA z zV+6%chp7#Pw9OWKgwA`>s7ckZLq!r|{~@9zf@&f1A4XXVaiNx-x;g8Jv_5?h*CO_YHyfVv z%0m6Vmp9=Qkxm)myZ|{A&j%^?<*vDEX}S=ZTmRW|_YPf}9R_v*HrHA%g6LPqABcnu zeV^Cze>g?t<9upG({A1&gMkme|9mB?dcAbO@HQr7)b4N%06ixOac2AQffP*Tjhpi~ zjaQ8v8U@`nC=vV>@SHI~u|q6fGdxXW>3V4>j-9oWZ-h8)xv_!YS*A*&##AokXgVNy z7C8$-WamMH*M4p`XV1LvcXANj%CeuJ{ToDjr@orQ;uBI zG&=vwC+TxH1Cl?-v9RmI%g=kK+oxClk27VrzXr131jE|T3ZG{4^d>^esFs2$sl}v* zJ~xmB*6*MSkS4eKR@eD>VlJ`-#UR||w8OjDoGxUoA2}1k()3HZ1&9HVABYdyg-Xx0 z*Y{h=bB(|fao{+R%?R@?yCJ}hn8~-#;z%zrQFcCCv*xcHLe?orRYH_)B38&Qd{Syh z>$<3+xnl8_r4IqgWk3UZnw+ugdmA8H3>zXh~ zv?=nEEyXA-+jfON_3P6c@@?zay+Hv;sVW}Ia9xhqIyZg8*&!5vJ9Dk6^-O7-%t9WI zWk-8s$&oU%Y7|6{wj8vX8e&(a4c}=1NbHHLr%&@-sKSKS6xfBe69W3%-G>4M+=Lsc z<04`cav=vv(%J0j8=Mv3MjNUBN(UId@6Z1R2txq1$@eW&u7(^V5VDt*a{N?d(qFSA z1sp;w$%Lh8%Yn#_>>eQ;qP4FY{Fo%EjGx=lHmNLZ9;M%{nCKu!6?NX~ijxfF{q1Lg z;BPN_as^kTGKFbEO z=J;LKZS+6#4w~1KmRTdGCD~|@{O!L853e`>`b<38WYYUy1dW$Fe8gxQXn_-=y=lX^ zmWo8;nB~+o4P8=5sq@&BObIjwjXq~~$PEff{r|`8$=JBV_joqM=Sq1j5}Yp=a?EJ1 z+*m*F`C7EnQ&UX13Phv|nz>B!IUQh6_YR^*Ek=yEs@7*N^S5vv#1oWZPH)BB*$FVRGLB@C)1xL-s2Xd1BLAygzGeL3>z&kOoyZY+JwCvJ+OxL2Z+9T4 zLz}78`qC}idzyn?qM76ASp00tgr>b*hBMD8v$)){B@OIAXj7lzJ0eVbxjJ2}XOp`O zX31m$wXLvXWH@ZTv`Sb|eeGuHC^Vrbzxr8Y^FW$wQul7ijBu0ekwaxsE}oZl5O48~ z?f7_FKmLpfYfIt|vtX&cj`!-gvqO82zB9sKRnlQ8-6m=5Wb%b8eurao7pI`GhX8US z8~`zdk^gh=C%)ZH?*#IG+ly^xgOXZ%d-M#F&UeH)!GGq|gJThQ{+ZuPA=G9l=0@qU z-EqeKk*;0&FQV)ocW|lXeIt{Hg9Brot$=no@>VE|^>42W9vN=op@JSl&H+nAUmNjifMWLe~v@dd>?FRde12xs$vMDH1( z8fT+24vsIwE9*78j?KBXA^ksDv>c_ndoVE^Hlc8(n2uvv$>U>-$w*V!de(_xigY$Y z4(hS12nUA$hJ&|7Aw_ob-OJn;akUyx&6i!sUAz!h+(rFwNeN2lI|?re+m4OR3synl z&UD@#6YgO#8>v_-V%Mot#gy$GV@9N^K9e&EaCNOp`d3eBk0->5FiK-55LBy|&Dhfn z#!f2qZ`eX+YaR8n%BUqu4S4?^Fwx8Fac6IX{co5yORu~~{iOpOX4Ivf;kE*gxk%0v z@AB-klWEm|14XIRAYNH>b&>)?Md`%VXa{-uwF{lF{JYZJRq%C2KFn9A7e&tq2{B6Z z0l+xwt1UH1(hAuKqU7v*rX%O9U_9YB*9Gg23W@6(6EidQ`5%oDB;3TsA&h2{z`0j@ zmkzXh2Hv!EwbM``$2panunZs(eEwyOz%IZ!cZS3mWJR&^tpxCm_aNTjoVeyISuNlV z&$OaFolXqJF*SEC#35V1mhzUa`&|h~H3EMv&k}zciFA ze;zwH?VM}t48R*?Q1Zw7hodmB1Vuv*7(ajMqE_BXX&~V5+`GX5hspQwS?NO%{q>n@R+CYIvI}Nr)^b4!&iwci ziuTV=ItQtEm7P*aIhH~wNmU5Pslw)41#jA@Bstjs7(c=G#+RnRn?#%mW{18QPgH58 z7k%N`1y>?$+9DSxT<}@r;ILQ5l>ZJ!Ya@$5IUtq4lw=r(gGmvAs6%#FR{6e0kHM{) z)A`y%I=E7Y>W@aAa5e15t%XZX>!^RFN5h4V|DIH*Wp24q5721y+T3*oop0D!T={dU zaVt}Q!IgX2 zU?b#>G6AqZU@Eh`*vprz2c5pGjDHHmk1!dfP`rg8y+asKuk2D2v!fG=j5!B8q%ZC^ z9>U=JszIPNrkabaAj0cHK%QF_0UD$Fb#wG6>M3qYDE;S551ZFI;bIvV| zm;4ot!!hZxxYX81e+lbTnz&YtB{%GST>nWF!tBKzIjW zj2!$`N_P4_4ae{PZpi78Ex~6V>ba{ed9|&LU*rNd-^GA<+drpfH8LjeGpIXA@I63> z-P@%#q?gFq3WF6D0q^f94|5Lo1gtum-2b#GwnIoc-Xp)2y6&eSG^NBGtFKu{JRzH` zx6M@$Ux9MY_IW+`bKwU)Ur%p8t?ab#SN`6y)B!?#D~C<1=4M)n@YvSt-KNZYUsmV3 zhdg&#rrB{Wr_OwjDBxbj7}qk1I#VeV>#9q_Y5)J>(cV@~i2ZEWE0$g@w?$DsSmdy&^$o*7-S=_9fTVZq#O?R&^kda`lD&jpr zg4Oh(0<%Jw$_n+YA%719Uz0OP(6egJHO_Dow$7AGom1cU_LC7yP1PxmwvWb*qrP@9 zd)xTzTNo|(a-j%>2$q8`-+Yb7Y=|vKH@MdthXek>Id@70=0dP1O>4}9Q9+Dm0XSf@ z`y-MOv^uBoUBMO^3q|#*$?T*0*Y+#TWp;)Q2?tD5Vag(w4Aj_R7EGcWL-$tQFOr=) zs_Za!L9R~**kmLm+qN(|V#Trd{yDXW^5(%-q+gnOam3ILXPqKWCEC>1h)Z}=@rrf3 zc~vftw6SXrDRvE-y?QwnRl8(DY}@W_BNMu8c{)@Eu%N`DjjmZk#{Jc1WsbGn4|`GM zFVN3%-in1_Rm3iKio!Sb%xT!CTUhI15Dnaqb$2Z0R#Pv(jcMT$5{HM;VdX<)}bXd3k~smj0kWjT0e+ zbylLA5jlKazTZ@b!!Kv~_P%0W!&#~|QhRmy{;IngggY-sl@rDZ{}GTrp8F!Er%t5e z?bQUpJ`Kp0=|tvg&V$sS(2F=)oVwM|S_k77ir_M&E_j&FaWiy;<2B`z4s}AEN}NQB zgT?c-sFOk*?N_LNaa`6FkjH=*xSSJ_HB}2OX>MH4M`vZ4?d8bK_Q2439C&R_ z*tHfEyP7xe(T4ht3~8$P9;(bjWpYLDBi|AwpDn{zvmcunBt!AI`9V|B`#jk%@uoXUEb=Gvqtwvoe@UC7!qo=r*_x7ZI6G4lan=ymT20IQ`9#SJGqf%bQYS2!y(n?F ztY!ALD~XMp8onK!a#|kB!buMa%>T|OHXTkC(qCP-VR8djmpgEEWAqoT9{T-M^3>-YUVdcaEhAU>Y}3lG*1`&n_Lcts6IyKWD>ZPBZ7Y;ksw-XbshG? zlQxau&HU@F4&wEEI%AJpwa8v~c&`cGRrMMef*b&hL&VxV1VD zd95xr6bvnyh6MkkziS7C zzYoC>b(in^MfU_(_Yof+dox<(ugR6!TX@Qg7iV=6Waf^Awk!5HrRJSccl2P3N*}1~ z(AM&)7BFKWCzbsilSYl#Li_hSU4SzTMcY&JaZl?utd*RJW@}M$UXLp)UDiu(ZLfW_ z=4Q|CId8V%U=}~ysLJ#?RY*OAq)=eoNXx1kF^y^Z(>}usnSd$@><|IneyE3q_8JzSANC@Ldt+SVm7F~up z@gw)RxaW!>h#qo{d$|3AJeXA|?D%~<=wKls%oI7zk9!}iOfk9E!ijIBxmk7Cumx5U zx25a#R#oizE&Q%ct6>0qdZu87va(o3p0SxDhVdID$56}k49!?SXK+OaIEB+d zv&)>=6v2ZMHzYmfe?Faf;n?^({+sa@pdmjcjX`6S@k;#}AlACoOy(s0hf z5$|AT?S1D^+7oPpjd_>2jOF=5+U2>tlWdsa{vcWo=M=B+uLvxoA!a9g0YzS1jp(>9xRi9D0I1-r zJEMyK@PxAFsiUgJ?2IrmykgG=YJT2b{^;uD>9*e{>g>%YMt@yTLIEX=Mn9T6HEM&Z z4LL}hD)}fTdbH(Gk^xsVoO0k~znfz?{4xMTK)krmr0eVP^9$ zZf|=+Khvz|yfEuLNc$U?2C@ zyP^1us6oZY%7(i$sp%?5;J5et`ml=EsXMZ=8>!B8VJENf7JfvQZ8%yK>|stVZBHdY z)Lj4_EYH<2t6qT|S~l`Al~%LQ3{jX90yhh*BzOSxd8%Ip$a7)7oPd!x#RfaPRj25t znakJVUTq73P){pv?EeF!j$opyLTIjPbHb`^SP``1t3K=o6eic{p0o~rPAg2}Z?1^l zL5J>dmndm*mhUD5bk^)~=d(K-!zVaOAj>zJjR#BlN!MEkCkg>ZGA`ct*_@M_;gPXU zZfNXwM1w~&NuPI8rEPy}y!J&N;xAHM823Xv^SN){GW{1O5h7`&q5VrCA4Y>LJN{ex zvdlkh1C>Rrq(;9Vi!6_~EYMD`7tK|8_=J-ite@pn8;o95SK1h4y2E3jd#=xBE!;bv zg8uSn&s3Tgc04u{Ep!OlTgk7?WP4@@%s#kOE=ArJlG@IX^ml(huuzfQIi&@$k<@wO zkW=)MIDyjjj<8q|wJ{SXzvuI0=@jQbfS6h&{m6Si)1yE+Q1j3{i7v$fbSNqkm{nyZ ziV>1If7{_(GJ0u>zMA?yQJ%dGX7$T>CfQr}day6|SGxm^r0!gz{|6d)@Nvfk&;ZD` zL+HAt^*vbHk!O-CT?C^{Oxu2Wn%wF)7C>?>z9lV^jq}vquHndKw3iSC$G#&AAS8|5 z%LuXb>88%^6pd%>I0R+$I~XMq;|BM8sLx%*a5Y$On#VV8!;x7JdKVR&(40K4FIKQ_ zHVV{5h(vyDy<;%~18hEk2NddHMut0y6LUg`QPpAl@;!~7iAQ#7DN z@}0?jUn;y@iZu@__q>69v@7dA$f+TU@#98>WuEldM5frf`(heMhG&Tp_q9k~l3ln% z9pRrQGe{$g*EuWT6w<%r5;TOJIEB#)9n(ox(Je?6E}fYPDTP3mc)OTew%jsHOnGY%PZp zS7^ds%G}9Sio*y@iS;RLE|cUFSxm@Dc$oA!e@tZ$))Hh^4E?|^&~kFBtKcGp-ZOZG zBP-lkLkQYLV~P(0_o=8A1MU2D0SyKL;&+-f(&{50@NFR z-NB!k2R@oGga{-iiOKaZb8ASX5jX3t&A8|etqG)7g25bcvKwtZ9Ksj&BeVL zD={UuSBu-mT-ST3KfF{Lkdv!fRUyjWChlmZr7vJohCAT z>`-}<#OU{=b zv))_mu^G_u)43MCQ4C5n545v|COm#m^%y$9=sqbVx&*=2G84ANDl}*j3t+`JpU_{(a8L9;dp7b<<7MGi2(0Be;KY_x`Al?eA4^u&K-~X>MGVm$|5(h0!ir^57Blo?xlb#0B2R0KK`|1@6 zsd(Dj>nMI?h@I-wO$X5Ko(0)``hU@{MGg?YHT0i_>Yyw~%t?*ZgcC$fqq6`=W3c@+ zfi@o4^w|-5!ELq#4ym`dY(r@Gu)3Vf_-!PDlEEV>A__XXK^)&%i~^eGc9F+piuE$e z&}QkfF^;@mr2KAEc4vNmj(p#3Z+Zb$>Xp$P_?7h`q;PNbb=Ayh4$CX4iNpcLoaBMF z9W=YUm{^-Qs(oHc>s9@aT_qdYDwK}ijmei1KmsE}Dv5PRg*tQhsHBITcdK{~ksL~) zi;q8kPKNQI{(G01oVN~Eed4NO-FD5Y^c6CW$;390b(>FeV(2T`O@${*%*(_KR)ph0 z(d~M1INUnz8xB*MsJ)#GP=Mf;{aiRQ|K5rlaur_-9|5Tn)l<&ZrJi9fAT=#%K|}bU z&n&s+=|ZMa`q8xX03&2r7mRpOt!`L<)dj-~=U*yka3uX($*A>jf0ws!zJ(6F_A*TI z!r24oxQ~k9&WWD=u!+sN`vLeG8&#W6)&DF{kb5H$z{I{|x4$;SVBuy+R-i7F6jk9d z0(6AoUX^a{3;tA6rY&o5Y;!|Ln;0_G3HfDdOQd(PG;6=hn0mx|?#o$?>;TCg#4v|< zo;IQy3S%Xh0CQo>g%VaW7Qm!Pc_4^YQpCHeJ_NeG5@T1pXZjR+UJS)%{=BJ|M}?;8 zJKIi&gY{gkn_6JKheIA&MbN+pj=u7IH7%KOFx@EB=K0a5L8K`SIa=KC8SeN0&zJ`3 zy11a3VjPCzos@2UxO0ga1 z5ppIKG&7${Xrlz%F77FB0{kk!-rno|R&EwSjwJs+HIRg5Z8gr{|6e(@IQb`zlbgV;v(vW>y>vE7?drbbwQCOrrR4|J zH{4gO631@p|NA}d88eb}0ee5c@NpSstmN&o2PVvvyY(+0K4 zOb?RfPdj{KW9Iv3SMc2-^|{BzP8(GR2OOd6 z;(OWgXft_rNf;E>eJ|c9*F-S2+D^#jIwz8LpsK6#s2!y6El38HJt zbpR#|Xm<_^>?jYD!A@K_g*L?Cmhy}vIu$waIImPjOKSJxZ~vdNsxsr(BBW+Dd>MI^ z>@ixK$Iq@yUHE^E@f)$q+=NzB%VLC9vfb+0Gop*v0J!yV0LRYsvna|fk@7ZR5s*!e zW&3BGG}=oImY?@UBvD`wmuGppD!O#P2*rROu2NwK{K$1^+Uo>><|9O?&G6dIHUv6RyHcE?@#+^TlP**s zIRj|R*6PMW_9ieuveDDVaoffV`ty}PyO=}Aoz$)zyMbAgMm{5O7=KV%s+RL$s$7?e;HKhIuombD)=Ceh_)#{e5!^GI&S!`98B+%wFueP=+$Y)N z_G?Cy#f}DTc5Xs&9>Hy(_x$bs#c`0Lyf&R(n(2O!t%35%#TBC>ecV*;w;F#EujCB+ zpktc5i^Vf9 z=PMrm*j2{A*1pM2iTsK$-`Jml$n5r7U;+c~$YG2>(|3_n1zr#sW-TiOSJoHatRjGn zS!|@+$P+4~pV+BV9A3!WS-7CBOHS%xE6u`6F5%G2^0lOAnSiD!tR6p!W6Vh$RPD%+) zG;{uh8V70VcKo@W&Wk(Y1VZw1`}LP}`9KCrOWmC=uq>`j7&UJ&)ctCilcJdba0G;G zo$LH~VmlYy{Pom=f=U)Gs_f5DnRrNzbP5ERD$Tkr$2j|f)Ux2}ZMi9>k^xE*>5uE6 zu_yprG@SS`?HR(RJiLaaybHKi`bbQtOu2+V6~GBn@e6AVDkh7V&l=xYR>_=I)(<;0 zOn6iWC!%a(=Inn2NZB@5)9x8gH%sg5D@Nn!5QG78bvJ^YBrsiC&FlGHJA3%_S3|-f zjjR(lcTqQU$JPi%AdIr>vq8nSL@ieSC2;CbDkiKdJ>RMzj0^to9O2=C<$ymUSmda{ zU^hQRG)E>{QqdX{Q0$M?nh)vB(boAS(6s!Jwh7BJr8#YWJ|OJy%)TvxiDV3r*FGJ` zLjKklxj);R1!P(uy}l=s{R#2jVVm7U;LA%~?7hje?=Ot1br~W#^TJP+fW!V$ts;Im ztO=0mz&b95sV&EE6eTb18A$kcrh z@o%;X3`rj<2P-g-{M@ssZp36oWHy~sh)GMOBq^T>qy(ijxYE8%t?r^psyl9U8UqSxMJIuf%u`*fvy7BUsBH0uy@0GUBjHSTX37yy)BOsR~{|P@MCU z>KXkJBFW~{KTQ8#5`G)oQXV@j79-HvNZd<7JSju}@vMssEKvp`M-=HnNol&R%z*6p zo2ho6*0$Dv{`_3gS?b%}WYfsaS-lq(41jyj*+MH+AQ_}AJ%YHeV@2~l1859^<9 zq|~7ubQOHLr=C~lDp6;)1``4htU#rSjr+uQPRt_pA(aN1UnHkgub~h*hIYO9u_to% z5xP|jd;gzxO0Gj8yrOZFtW5I4RYjSLxKJUpu}Wx;2g!4GpMw7Y}1Gtiam2^9^3rTmSh2WC(RW zXmg_fbDG^mE}@^&_Bl+U(PWJX?-&_Lv$xea15OxA-~_NjN`2x7M4hqHZ?%u*Ev@{~ zK^wVEZfv@6_4g;)a6WeH3~$f07F`NT2z3#KC2A?hcW{|a1HZ{Ter`+c1VyZ)$)X%n z73}*)3X@48NpW=I5)<2%g(iUvD}=cbTTuy}*rZo^pE?7fXapaGiv@jR zc~N5&V-Mwp+j?n}rief`fj|gA%OQ=H6S;wWF%hFWrmPanU<5lV6D;V$XhZcTnAe?# z5=)X}G2$JQc>-rWG5$}2*0>fp_{CK`vu+?}b&YmSX#GOejXzNsSi3@QO+UX=3~iuL_%}X5FIN& z6VK=7(5HG!t}lp6XYhLXCGE=H#H^YqoZ3h96Ni~3$>z|A3LP6)C_5oz5=)PDN{JzpuZWlwiRmuK|3FNI7!& z+7qJc`u~Y=;%?BI3TZed%90}4-{avPy8q7{v{M#k7Ph45lo<&{KkGjefR+hIR1)cF zM&qltpn<8P*s2X8ECJPv@K1vv=M;&w+ySV^4_&1+RV5IXtnZv~AU<|c_W*n_gmK@V zIg{b>V+A>#K8oydAdn)*u-<;b9o{||1}P^Yn=#4sZ$^kidl@R}mE%z&)^=A{k#UGt zI#=MQ*qM?1e7&&;KjU0pnh6&J1L>ldW9sEtg=nd7ta3d6yLYd*q76{S+5Z>r9BBeJ zo|%idO@(FDG|qdeb&E7Z{SAm}8Z6iG^+iY!_^~GcaWDe z@C;kM1{Y24M2y>`D2voWKzfP{s%RF}~e6@uSonnJhK zsRe^VEm{ScD`;Xe4NUj0(c=ZVeqr<@dOvAN2Gnh#Y3hAT{7x+EC=1G51Tz0Yb&|P) zj8|k2Es9p}t%Z09axjOCwePEAoGQSjxvQ72Qbm||yEF2xV6S1G6{qBDmLLYYMtXs` zN_y=ha=EDCAvGPatUAKlU8KcS)CLDc(edyi4M5Iup}qn*1p`ddytZLN40@p=33RH@ z(*z{&3HlUXX3qBG)!F~lED}-Jk0>E7i;$~Nt3p-(A9)I&jine{`pm?(__TJky^rXPBNRe5! zw316@E-04>_?h#29z>zEDXJEohMP?URw8%`+7l=egVh9Li&Y(biW=*eZ)xiN7#_!X z&J}yrQ(?(SHAxK@4C~?B0NCyH2|tEdGr^dK=29;j2_1ic=2HA(;}}4F%0vUkHUa(b zaH$Q3@dOPKm(#tsG7qWdaMo?_DBf#yILq;dbrXfE4F=G}qqN#hThwX27N05L4hR<} zW|p6xH50#i#wp~F@#H{MM;^c#RlDS;u^ZjpL)sLtSeb5}GxrI=`}x_xotL`#)Xt{e z#(P-52&~qIQ){)GA=_SnQ;1A@^!mW`=A1&HI9ya*kXTIEWm2#oSR6ALS{$dvCyl|) zO#i?y24}W{r#OEQWdg+=FnDos3>U(((57nnwmk=cy@sk;->)nlLK{dGKvVN1|VLb6s`B8j++v7%Sl{^;Az)`YB= zt?7gQ&~q_Tf;UY>*8HLc;V#T?s=Lfsrt0m34L?b7o{~tMuMyBlOEH#FMS3h@pDfPl z$(_ka7gLzgNeL%tBPc6ds6uB6BQJ`~;_)@#U+{sTfqwi0I{Ow~6u0kh07rh!e2?TtB~N4# zM5du8|G=N7Ix5vTh!!XpL&AShsf0+$q7CwZAv7-`6VoC~e|bi&FWI%-!cMDDD{5D= z0#CY@iwSevKcF0~{axyDJ<~tLvM4c%QW`$I2V91g<2tw&I@1Koh7pco6O#L6^rA+{ zarOQ&=)~D;$IPHLxH529z5l8z{t3G@Ja#r;cOtE?Y=>r(kfd(I%_#C}(_-h(&Bq&MvYdC6Abo8i(88ZbZ$TzGPs zba%UzFG{_sOg=sf&B$@ZXd&lTUHgCVXOx&_MOJj-AWDuA>J5kYSsaZz7dXQ9srUf< zhRoqR;gR_(6eRP}^RNKuN3zIm8)~>u_ELyD9TWVpC!SADv6e{Dl z@8fZ1iiGEQ#e7L#Jf_fls{(~fAtkInd+Ws?IH47THT^Kqoj4*DL4~9j4B{iNwwg3r zgj|w=@Zx+VcOy-HY=}jatAjWAF)+GvNNqbXeSDS60iWJ1)V;r@B0)#t*B)iwuV8>Z zFVAKrtg3!7u>7RIQt8YB>)Es(dUQmp&5>!uqUoTPb?P+WiIBVPC2<2H6+a}QGBE~W z$hKf|opSg*ke&se2NXcKTVEgCDo7V|1Qx2vz}M=Q_oRi`9rQZ1Kt|8F&|o&F6J$6Z z4*1CyMndQztB>wV{}|yHi`YiL;YaK`Gx%QCkYi(&CJ^h;8#jJ}T;1n^xs84#%o3vx z^5`z>vjbGK0@6rEy>);N&BLae2klsZbbo?ZxXI76t%;n!Hp7O+ZfIYSMNBNE1tA3qj2ti7M9iuCwrt z73{*Xpr9<&d)(L|%Tn=DU4X*5_*q~h^mZW+5s0F6GJq;x^c$Z@dss7%!*K$DRL5G4 ze{v##9)F*YFU`X5|ANf#8J+ZA_84C1lH0L=0Ux?VClNjd$uM@T)7N^RQ7m-vLP*B` z3Z4Xf;kmR~Dp_QtG)uCiG4#PV%ZkI)4m{~o-7*9BbH}BdeY6ZQn{j)!A*JPBakNQ2AprEM*U0-_kJyym8RaTl_>o=xu7<(DA*9Y>Zm+`8 zgGP1CQwt&lz`D^s`qN5dXE3neZWq#8UmI6Nmk;FIorRiHNUyg?pwXffXnrmMMVX);O7A+9UyfN5v~y@0@|h5q6Gz zH2q$-wf&`Sls^u2^j44^Vbugj@T^3m?8LAy@;Z3pDwZHS!{szNRC}O!Mq(>Poca>uc{Vx3wN+fH5tbieMLSzxXZZ`H4F4n-bae~Io$qq-!VNS z%n~Ikz#8_sx#=0R4K1jCkRK#F5!pKZ4*t-^%~l2i3dwyvl9xVuO|&WHFvTRP3Gd%V zr$Ahd>ecg&q?jU%4pXNr*}9t zoo4|wK)bi(?1j(kX-UN0flsi#ly?DGd|+XQ!Tng7&OFyO%o*_lP$$?v?QA{kp?L&C zQHbj~c2+Tt;>`Ti4a$wJaFFGjj;ehX$k!7}uNZ1tMU2C$b#2$$@)BE;O(bxTrMRhu zn|+SMJ^A1owzGsgJB68MnV4Fi$w6Ff1T%O7t6e!3U}ci{=eSh3xH*74BjlPXnOLJm2gr(3;4|LZ)&-@l7A)21aE%BLfDxD?`KuORfieQ2 z7`sQ8<4J8%*P;qN&pH!_c;x>+^CQNhqcgVo^#H9 zO#R-IK(AQe-6(?Unn+o02xKL7kUP+s^Bwsl%4uogyOY8hyyo6_ZMT0zH0A`~l2`() z`rQ1K-b~Wf>a8$9rsH1@T=|zqxc)b~O1$(|l}&(4F$W89q36QWIH!|6^+lJVP}x0B z<}A^(#;!35@;=DlDbC1{cB_OQ|7i2akv`@=_f>)h9JZ%pgJrL%aE649v`WQ#E0OdE zn_JZaxKS1Zgn>{$-@ZUGDa+>$uAUJqFF~+ll|)g%(u_RLre&t1}Y8kCiIetncjL zfKnR*YWa>FG3!5dunToLusk6-@}5{9kS548&3u@Q1UNyOk0MeIO|&oJ&;d3!jzUVtbp~?x~wMSdLUr4-@8?l{9M33<(I^r2{_393w z23{fjua(1Lby|-QE_V=KrMMn02{9>kE9=L#Jv{e~g_zZZ8x1)dP^|XN@${%6Ffe3e zgE{~J00001L7NE}Q8RzvtMsg2!keP`;3d<9a${rLeQkn~wtAt8&T4Xe%2=k8-cvJ< zX!8u(1R^C-58)f{rxJa(VJ&_-6&g=!Ho2k;f}tBp|2)}f1vMmd( zH2|h!l1jA$BS=?q*Yw{l#3W^FZ#$n{@swFcn7uoHK_FE$QshOo&B7*31Q^b3mMEyK z4SIyrTZe*TYxJ%0f?pURqQ zMC5B`d-R?o&>-`Y0{t2Jdd5#q`b)8@ovu^xj8U=oIjf0_;;v4`iT9jz@QrL{F>jo|3dQ5pRM%}Vn5l&rg z9{CWM=!sbavSU{Cxgj-9di9MS=;u;H_2CtI82*g_>p}#CnO-AB@m7vfTbv7M5Bd-T6Zx*2*ngQ1<{@%yQQ&{a<_DBj#}_ zxxWsj=i|vu8`PMVytyLq8s4k10-y%Rpa%xj>gY#-StHXRpg?kvn!ch0O=9nwyPJzR z#o|<_L2L^D&uY~Y%Yv`QL4ws9DZw#K zKo`f^Vc0O`u4}XyF$OjHWhAK2Y-({)02)1>g~(2X$LO5;=7AtKAzXt) zQ0X>FY-N@QEq!nZkIkpL)pb9KJ`p{n^A|%#JIA?c#l+cwyhOrfz{pKks!TI{O#}cR z^QORsyk&c`XZHwM^}$^vIZ=HZ0qZY?QSjof|KhOuEwdO;bBR`t83AQO-4Xq@*@jj% zmW8BB3uHW@tnYfg|9~Qwo7kN0hE;xs1zcA9IP%N1PT)Ky(l2_^8+yM#=$*&q>o#n> zk{X!%aCxAYtU=gzGb@R@SR~@x1w3m$nR&UE%%G;B)~S9@2qA&6J8(i?V|^M=UXQ+JGXy&9`)JZt-bQ|46I|8WTMcGRxQ8}#ETe>C&MQOhS* zSWGsJ->DtYJJZ%rWCtU`Cg_WYT*uwwFOC1Nsa3Gemnyrvt) zf{w`u6j97KVs?ZR0N_T8#~N4UxJ9W4WeFyQTlrzrU&c1rHD{l+v&7p#GoLuXX$&=k z0uAgQc>;00S~NRZg=h znpXgxr!7I!YGH^&hl#y4RlW=jaBr647Jy=jYI&E~F;AXHfZ_byV18gX&bkR!;vB#k zN2paMa8Ulc4;!XAacK}H~gn8iCFW`(&z-_=v>h6!ELT57_(E>7 zP4CUneWr#+i)fCDqdRx>{KMN`&%CzQWNPnk!B4DV9uMIcR z369=_t%38tnTQYz!mK!EJILC}(hxz5{hJ6WDtx2+QI!Hu)lvG^uyTc7`W<|QXVHGm zxo|V+hN!TE(iA;R8MhIuRY_9dFfPr5UFqu3zqfO%dQ`J5N7eyn6`tU! zrH0B12gE;3f4mlj{kg_Q7Rx_}6MdSE>NE$ZK$(DO^H1xO;Ile{C2x6_FS|%TIFXT! zOnCT=Npb{J`dymICGzrY3%c<_9YiVx4L$fS0EHStT_X1w49(s%Sd*(N6^fKz0uRxV&m8zJht?5f&59(!DK}*sfn#yy4s|{AJKFZ z{+uN-ctBGnPE!A9@*3kWSp*ke5dcarPhX)yZ&`hVVkVJxPe<&a3JkY)$WLG=(J;%; zk~}=_`kwU!7;Rj|dxbn%8^|dppe8PNXjJKjlR$#xary&)y=epWI+bmP2;qU|13zk+ zR9q#o4$9bw+^hLawQ<-06MwZ~R$Mz!luLIi_Y(=-^C_gjvT+ay$qnPmg4l~*WVzw7 zDArMPbcjr14IZwmv8Ui+bWI{#l&;EPRE=?y;FQJA-y}<46wqn_u-{#HsC4>TSDM1% zIZlCESY59l;>AWlVJCS>>=L!9AJnzj)m!8k=6=mdPSxdkLJkr0#F0nx=&ythD5Y) z$WaPUPSz&y)p)Ov-C#WQtZoyz0C7Im{0wnB4^d4B?uq$HZC1pLMCE*~D;guComlYDaV_zqosvgo>vP0tO@>aHJRy3^v@8$$7l$R8nF!4!W{?&lp1 zwH_$kEs8i1a~x1ZVVQP)s?pLYzt)z0pz22%RXul@cCF*P<_MPS_~_AiY-PKm_P3{J z5FGJSPl3J2(EBCP#$OG0KTAvzVTI6mGFST?l!9R^^F9Y-tS$89ECkVf&1??j%LyUwjd@;-!v7;mNT_C z1egAAZRC8!SY>K1VtPRnQXdtctdimJ z+%613&v9c$qH_s;1mpCPJLL(pJ`4qcIs;8pq4QHSf2DI)Z%S8ve79ZYnh80@bU`|Kf;h7Nuj$cW)n${-}`k|4{n24zM| z-X@*&Kl2-=8O$%CJ z67l>Qd-l}MR6ZwM^uz+vV6X>4JJb+?CkdXL8OP(z7s^aT$NmhL8$Wpcdg#pgz?7w$ z>z5l19@G9Mw`FRUURO%f^wMbq-}`}&G(@J9I+3H`7^E$s_4v}FKxLPN0W(ggw{oci zfv-eO9pz^+W#rQMY@o#u^);uUQpsU056bKR;J=?$9Yz`RQLA;=1{p_HaaEc z;UMJ>;%zYD5jjVJ8xRGsP=X)slK4(kK|g1|@qqOPh`o$6UBi20%>Lv=2Ck`^&}Ooi z@OxrZ1j=vs&B#}Zg9{~Y)twx>q9lomzMFV2R{-GD|E3oUsNgxppr5 z5C6cDx^NmUhJk2NCi~vB22ns3%bX_nHaA`&^Hf*Vmb8Pk z*eGwT8)I}8D*bWS3T>6nivh#@GS7=)HS9lbkZTs$4r;nF**?<4gdI!0&eaYgEAga3 zhLikokjpZP>4PALGcUSPTxH?v0v6+^6cQG8#%(#sn0Yp9B9CcY@e}%m>eMS6u|)_? zr%?e28dRoUMMZF6VfUs8Jq}Cv*-KE1QJ_ed=A@)q6^Ex#;FtU0oZrILx&15CK4v@I zQJS9^SI^5SIj-RS3STZB$;Ej1?!j!r2k%3CB~B4a^7zvBXtgg-9yb=n8vX5_^-jLA z?LJf5?7L4HeoQnYgxrR2_{463U!eP;T(>rv6qLUQ9VX7OyRpF8ZE-P=Q5y0|H9o%# zxG+9Wqj6rpI$Aq`Dwb#p5n?|Cj$g}X&^d|UZxaTwk7sx>&;x9I4^MocxJgF<9vKQE zy1z(MeY1vzYdhZN+@Pt`f?8*pJw(-vP9UWK=$Wot(g^mFuF8;YuC7@y2$IsE2ZgVd zbYgy?@_j0@dSGfa4!&VCK{KV6udVhe;#2d68@dCx&AJBk>ElSX!2Ho*mCoks$14dR zJj_bzCrI8v!hH9?2cB^(zfHIw5%7}>PLk1q%QQ<_r_AmQ3XGw{FclrgF-4-QiBKdfZsHkH@yb}uXn3eAPa=C$2ET(+k@X?1U3D{<*kNyCxQpFb2{ zVT60s{m+&A1!bqTJ@VBDysfJL+?hU~S*aQ&)Jdw$wv95PgiP?J@y6I;QPAHjp;z|y znvlfV1M5v_iUTo)A7E}axTyFuFj-=ZfD8`$cFBY!-q6kO!TKkvTSy5I`rp!1@w!y+ zL?vEPBmmTw3GA{%+>KbrpBcO+>hwck9|_HGBT-T^5a*Es3>30?mhK%sm2m7a+<7~s z`=HR`Aj|{iWhx^GwnML`fS!pNzL=hBvB)b_5W21(y>a2|y*+|=8>hy)+A4!Oi z{11yhKnlj{cW+24`!G%5J{cAFKI}!BQP25{2gMbr`9dj0Pid(?``_-z*BQ4VG<3Qt zYjay$bpOv-*N7NmcJoY5i}zZgtGE#s9!`?W-*(sjSZve-i!HA?H-KO9da!ZWm4<=d zr=bY$@hp;<3Ui&GE)|+0!pxMa=?4o(T0RLeov9HlYES)#Jf7|j8|y1BngFiuqiw9C zf??NUUs+GbTmGi(Iz+)=1};m@8su=DThg5+kwk^kmr?(Z7rrx;rrGRNxxKwhWG?Q2 zd-f(luSTQG+D9By#iOx&v=yTKhRmtrTa|5rrcRisC%~xVMZF>)_cTfYTd4;(%tx&) z9T^|pK@Ea2&JM}*hni_q_US?iJ*~7E4+5!Mc^%ZX?_ft)5~T zk|MD7_|}_ch)I5ByUIjG5C^I?JivmBc7~2D43XwC6V)|oc5HFBg1yY|A&=Lv18kKA z8}y3Rw&9yv2%*MkV^=3ig@@<6*E--;0vqAa-c`Vq8mc?ccROeQ{jybwH7P*B#xnkh zSxO1AgTP65GZ7OgGM_vnX10Y(HwlrT#T|Jw4i|;r|9XZ4L%YrRTXoi!slOo5or)f# zn!QSB3mSJe5`W%|$MzS6?@Gsv*#(?~k?PlA;i|#0wa~T$KO8nH+}?P79{p&S__fxWX%1rP58HrUd8<4o=uk3Cs8e6 z&vZ5R3yXNfz3aJP(5Ume!-^P%T-})Au83=4KKIhpHY8TvJ(%G`-A?+_AkYa5d5JMf zYtVh)>`1&m^YWp?gUsTPcsvM1Ept@ntvsW?cd@jkV?S!m?eu>32YCO+dFoyPID3EH zYe7B#w3>(RaYC{dxcyeHJ{^+@y<23?V3JlD4AG_hpY8lMptsLW$N?kXDK*+no8kd3V7n{o`YaNyOIR}k z))tuwF1)Q|WDS`;4M6?2LFb$y<&H@W_z%Hx0=Dxh0L@U|9zz{Rrw(IR++02VA+wEA z5%=uxf&1!gidhuW5<1m^KTVzSDT73`u_Cb1$6gml{doRrj5(v;yD*ab$_B)|&bzvc zi`+37g4E7J=lrY!7|ygS#W3#$2wYC9dM)M+$1ur+IH{$UCm>;1@oD9@0h*8z0n>}D z160W%@L(8*HW}R1rkhYMr}zA>ujHg4IW`YX-6#Y#KR?sXD6;WqW@=@9PSC5j165z8 zWMxsHs}7c>BK>9Dt$X3jZB>?WBOsL=R8iDZ{G3={yQjM~Xr|V72LGODVK`i>Knl+; z>EaI5sDnh#$hjZ|tE0*&m*OgGM6k)s-o#laDA4;_X&uAbLNJ9}^!TJoMd*a{ot|b8k#-Wen!y3Z z3XTCdm-cB12F+)iZ9_N6NAywc_WXC~d>%ZvyPOuU*khA~jf(?4h#mzN?V7}W5JOEh zetTs;G2D8*Gacf#eP#x3ok7&#l(8CeeZ;;-Z}y4i_H)mZ@~>nZOe63#^Mo%sp>-&6 z^q{{F{c~_s=TbWlJkiOZS}(s>>_GPEJ93{1h+$@b*CNBO)d2MAu_1I=XN79edaTbI zemrqa|JumE2xbFK4&kt}D5*J!_O-0OYuS)Er-Pzc-!`kl9jehCb@Qwil*zKh?7}&N z2CA1?$^&Om@S}y^to6IvwTrp4&fC-#XlN-=T;?Q<+!6&HRtw6D-f53tUI|swgluI#wg~=%1sB?^zFHYgsOt(@@{76M!C^B0uV&ez1h}Lt$4q=~~yU^hN z|J@}DmMcg$1n1S$&v`Ox6J#nax;cA^#-jd70T7j~!PK$Akz27i#aMPLH`LVCv!h2d z10OYyuvoy2%imCJR6c)-xvmIh)LrG^DpvyzX0N(qe_15T;?RlET48_<}6CuqneeVPXa0*v~^Mzd_1*doA zbY6C*Q)AaW*h#0_ME{rDVC&xtmmRKpxWzT2Ao*CZ#G`jYf1!##bAtQR6--GqV)PCY zlEP1hF@{FtAmH9UZ%*m~dnL-~n%PHWkxmRc_cu(ZhiAAZtUc8Ii>2CKpVxtLov_{d z@|AiWMxe_%ZVN4ZW`^ggACA*i9J1tIy34WDqgh=IT(Jd4k;LY^Ryv>D1kN}+y$S$A zK)t_6Tk7DPAFr$iMG}OxGa;Sf<_zrZm&XKVQP{OnYh9CE$NfxIB%)(?A26Sy+ue+)&Es^j$wHT)yq>h0FcyO#6K}>;QS8zxK zbtIV%iD|6rI|9TXYsLmf&U%1Sp`pnRp_ijcXIVM&^1Kw7GmeRIx zeNIU-F0V(coV;}bRo?3H^=t+>6F$sSor*{AAGI3KYn8YQhx3v5c>4JdZ0+3OI<_~D z9!?m~#G&^-?)4klhIWWPY^CRYJTu% zA@3KTHl5#kFI_#~dW796?g%0}e4~#WSK6kZ`$S^$?$|cBGLTg|q!mFkQGH&~<~m=K zW9Y;3vTqvuVL&5S_7>`jQ!p?<6wo({32{1KEA)P`)K|}_?*>!Ao9_;kP{~7mv50%= z)W|ZeIu!ibO2%d?yNPE3FvV^nd`$P*4S?OA_oe<`n$RdKk)v}!=wcN08s59Qoln}_ zt_&mP!55u+^D-6KP`W@4_+vcQ))aExNJq+j2F&2~teU?2nJ6VfhA?xfcd`Zo62Vuu zfhn*4kkiA4{ItB01p{UF(*%Q1%J1>(V&J|RUXP+wPGRI%Aj)fYt-Y)Y;JxHrq6!n-To5RJ zN88+{adFsAIy3!@^-ZRk6X6}06QYL9DG)~Dpphop!y{8a*@zQZT5JX^b(A zA!t2IIWjayf%@G~9B>S=We=qQ=7)#EHhy-}^@mzKaZrQz@^@28amBpzRvIRVZ9`B~ z+zIKzPVjBxu@G>CQnUJ=EadR|9uo}%Ub1_eIQ0p~rjzyKo;cYJAp85Sx9yb|Pv}8m zoCXUNZV3l>pTwPYFGh6vFmtikDHCDsySb+)fQku%@T(lDTO&{U9ARkqfFCAE`&U!F zOIdVxUE0u6H>beH(gjxBE#^HGd7|GI0_Qe5uebq z*vE%eyCYkJ_1f_YL1o}ZE~m?|SUcHYJB{YTU6gVPEi%hP#I(8tAcc}rX*Q@QcJtmM&8f+fh$x+^mL<--wohZ&+CF1i4KAtRuO;UpnK*+ z!KDm{U+<;06h1t;(kK@R0V_?CZa$gUs^rrk8o=}WpH(3=ZjO`?!bMjd3%y_HZ_`i*s<;Sr zTlp*0RJv>Mu*NzzaEj48p=fSR2_Q(Y{~gq{vs6-~$WjZU_y=|cAAWB8pF5d@EY{L_?6P%INS5AV93uy z@^U(be=Qp{tW{)=&ZX)2aIdju4to1-K4aywFUbO$iXKdiT%`_w=~G%}PoBG~k|aP1 z<_HxarG&)O{IkI8Zgd_>@Tf87SsDN~v+hH2=F%9hOKnH3`r;9L78oIR)$x8U>fhk< z{DmOq-H4t^uiFoGV-7d~N92-_le z!g@p6tZCFPfv2g=3p25+A;tHE93$>~_byYB_NSUBYMoe#7Y3WeJrVMUh0&7Yy;NTHw)s9e;aDz=dE?$M6IV~p_xPhvl(9sOIq zQr)?n_j4-Qon*BTPnv9(-qL-tv^|G;YS%8i^THylg5YA9rPxG%wVHXh8w3SXVmf-i?Os*6{u zusZ1J!bj#`pe5{OJb&`G=z8_ov~(lsMNfL%WiQ2sZvvCyf80;8zdF%`2U)sO`F}tH zfqa@-#ngnQM0gfr7?#wnjNC1eMapsnjjUazYo*?KNHJzcb&>VSclw0~_p!M{m$j`- z>^JjR{#9Uu*P@^j z9)9+TxE}D9*mpu`eGJJSyGne>1j)FP@?~wDXaes=RbtY}J)blH`VUWL6sRUkMy9eW+(O;}z+L*;)~wE%nMP!;-`eyClMori3A-c(L$ej>9Fo;AS#d@FpE>a=WkAfM#P131t2-~+J(&KJs-NuAvIqXC zqPNuuVxDt^g-+SuDRf4!-7v{XL_aK?`JHf&2|7bIp3I7%dIur8R1(|CaTv4 z%Dv>zrL{2C)UOwtS8xqFRbP9!_Kk#yc*?YmX@#4L<^F@4zrzyM~}%%wLZ0>AqNk*54hjHAtNTkUepXf zN1rFyHcfVL_eD*1y)O_(50xHq1fxc@P1%5LtL??fW$B z(Mn0Ym#wBcMd%-J6>YU&+1PM;$)IKms~|#W_VLRACj5r06(O=1@G(RFVa+q`_G+_b z0VM>)dXvFSbJUe4VfW!*Y2&kVr5@TUSrbE8a-#quH;mdw^PMq<-PdFu~&OomfHOMYz zH*Imu2Yjw?uiq4Bq(-v^ul*0tTr|r|{QeR6S3_yZLO!TV0%YtwKZpo=6A66ju82Z0 zL57kTAbs>ft7v%{XBI^3S}uzny)T{2-XsT>2HJD1Wg3`e%t$AH*+X>%+7Bu%Rk4 zYI!AiV7_@;bv9-96XNIxCxVTlz`Q)J`MD$1(n$L4OCiaqjO>E5J49T6?dBd3V>4sf zsQocD{g<3IHgfmguDC|F>4_byTM_`|+ewk;dVcfc6HE%FwB>}!_NSNk1gs?3#&A1h zTT-sIx?nLbQd(qr>x=JyK5Q9w%5bBKrd7d5D9_Nu&G9?|uoshgUk z+C|IJD*L|1VIf#^IzMA)qQP0W@^VSzFZJYIFuTr^F+~l4WOL^KEClIUA;>}lqmNAs z4l7f)&O*9b9dX=%!F4v&qmU+==3Eh@SD1icEvYHk`~3c*B}9QU&&(0)e@udkb z8x1)9FA_N5Mc83K|Fq%t8eE_-*#qPPRsW1QQ6y0yvK?_V>l2lH7&C@2Z!_j1(hOzV zFJ9VF3I4<`c%9fSr(V>C(JSiQnj5pMfXK5)#t=kj~(olWQeSQSnM$0wig z@tuysLmEnjIz@ zh~u?SbGn#z^iDA&_Utg#4QrzROroTsFl&>$cghMe5uC_DV(cpfDZL$wL#>I1f|r~r zF-WXR@VUWXwUlcLMj(kYIyz&ivq^=1S=*`qpcvS0NZAsz=2uoEp0wa~WrW76?cGpc3P8Ihpe) z&*#;)cdIHW>IKS0nc;_h_tfi~5d9Fa_l5I{FIJyx4`=5b0KipK|sqip7d+CoiIoG z_*k5PVBX*?bVQ|9dbHZ+`0fUl^nD0G2o%QNa&35;R8tOog|wPe{IT1!uap914{>7` z?|Iz{5RBma9gZ^V0J>XRM{1#zg;`cjfJ|EEV&N1iZQT1k(0_VJW_MH*O1@Z27(B|LSq^PhA@9T#~DwJ}6RF2PLVLXZ0)5E9QEd{wrV$9gpW(5${c`EL%>s_^6D7%4f|1Yj%+Ft4o$lZPW5peGhm z=S1RXz+L&UMlD9z58>VlcIEjX588VJi#&MqxR+w{Qoyq}ENQ(j$|FSby)0|{q~(`{ zH`L2RC*@P1@@#vmu2QreR(1{NcK$CoappAudY}u(PsqQ$IKtI;WA#xjRgC($N?%2| z)weEaVWrZwrHRv9IYsCY$k2% zwuOgameuQ93|;me;rhhQzMM(w%GI(cf*o=1&ko=LcR4jt=+HkH0vcGtT**AsJH#OPT|UV4)2Vj=dJ zD|dS%Y*=6oxj3@(7(h+Ym+Z$#%;udG^viDF2VzbDIDTwZw|HErp=$K(FkUJ zsDHIxlK}@V)GD2rDpAStg&r(~FF2Sp(JPg{?gWldG2U#V)4tH@=`F+&5B9h@%g?Lr zX$}#Qm}9W@Hu7w(|KC{G`a;PMV`TMrPTQ$UDfBh;6Zr9Ix~26FJJbGH)|?Z~)R0$Y zoAl^D_<`rs21Zj;x9Hk|ZpS#V$U#XL(R2_23#@ZqF-#X&wLdiUT%fXj_@eQbG* z{%j(TC-ia7b?6WF_)($N4xMOYmGQY9Sd>?ZMfQs!dQOEPy36^GKb7RhxKKaQUV25j z0Rm>LfNrQ^UR8r|Am+8sV&EuCic{}oFDCFI%Jsq*< z)<(`-T`R6d+w6SA7i=BxMgaJj#k(H8BIob9uWD3w?fX8-BWHa!aC z{jH1I8N>>?(({^2h3c08x8t)ytQjo0&?4-i^sh%DfX;PQq*yysih8!4owf=u@wMhN z?40a+aqS3s9z>_AJqJiR`CHnXRB{imX)+{GNG2DGPER@>n-#4bgs@(1fmXkw4}gL# z<#&Js%5ZabK3D~Jk48?49~GAYNc~YS4$<^vGnYwsc(w|(AS3h(7h9Q;)XKUR1>~^{ z+hsUKDrO_IH5{c{FR}1)V1U%adkL7u=h6ahIYIB+w}ldU|uGX7d(OUiGo>SHgAvVPv=cC_s# zR>D>>xV$&=qzi;mf^^t+HhO}`t?E~EF0{x!1_&&i-SyyGt-_+L$1{LqjP~{^g3Z|CQnijSTnsk93%0md141=6ElK0Qw#i0+NQ zyky= ztR;k%MDcmrIRA)Z8zS@DVz*$ntip&k5Of%tr69s#1F#c%FbISDWw0ev^hjHN5iJjK zeGX&foJ%UsFw0Hs*0ofP?pzzst_c{h@!kO?2Cn^syNw>gD1wn4fz<;lJFg4LO9yR? zN3c$uc*R0s96X3#CA@-s5!?>H#(kFnP;dS`Ju1vgx%)+ru}0(|xhZU1By~ihsG`Xp zj}o_x*n^fLHpm=sJ=Ko6lrK_Z3m^`OLR)IXj)s>%6kEN$M=~jB$;B{5avz^;n zzrmhkKw9%Y`y=O`zWUiMoUpXTJ3&-w_{(wQ7Vx?^7lgSA}`;#wpIluA}d z$w47?$%*-T^e)q!sY%A0|Xg1uNQv?g0Hhw5fP%_G1`R?>B`8vvkKA$ zQ0P-$)!#r_@@aLUY3Z>94kPkUmoYscFRF30DfPkw6JUy%2zTYMKA0&eE@EscW$t^1Lf z10w66^uQF9gS*}rqe66onUc3F5TYP7o4#5EE7Ua2kwwRVh#0EZ&NeIFXa%!)2WD_n z*_<`B_zRpZ+OuyJanRkjr2G(mkhj)wWnSW6l#Jak9CZK3@G=s=!HF|fILx-_`~*?C#^o*9 z3OU8c^ISshRD+)e>#FP!CWGR8@X?tNUZD=9tj|P>ch2((*&_H}JcRJ4&SYVG?>Wm} z-97D);mRQsQ`O|jmTp}VxwdE)ZXn`ZAVi^0T<;M zmWE#$bS=Q}ybM`1^d^almnUnHn~aXWV7S@N4<8#y1pqkioCB*FQgp-U!n zIxEpN(|w?>+YQ(-9er5w*JkIf<-W@DCrFVaF0uHVA-;?(bT|3W$+0?^V{~_5YLRT1G(@K&#k4sGD6135Vn;D1Q@bcWK)5w=Hkxi} z(#R0WIWoW{A72L=+(JlyjS+)RR$t_ZeWGB!*5Ln5oRs|Yi_Iu0zl}Tnuq*=^@rq zhxv8io%A-40bm~d{Y8VA%@Q}lp=@tGDJ_*LG&XZQLRShQ;NEeklhJ zJiu}3p6GQc@;-TBs`jWfK=Z7RT(%gPw8N*)O?C>*Z$U*Bw^v`(S2llilWoqpc96=tc zH6MZYbm+1dJ-MS~**f-m>2Tj#0lIoXoJypv!(zuKb@wl1w7~ir3m}i0-usoyAS+Y7 z!A=n_kkNe!pk$o|Rtd)SM)-AW^Bj2xf)k1TuId$2z`_YXq|O+XFt0Z%lMiq#e>Y@C z+=Zs0gn3<^MqCLA9l94y53|2vN|SSrX=Ydn3NLvyLEua{8bI9Y8|yX|owIOiW6qSU zVUS{D;?}@XvrTHOAuELX$|2!{Uvj>=V5qLBa6IDZe;Cq`w~))IOVp~wPIT<7$CtW-@*`5A3{3T z$0kxLDy1dOOTa1(f}~<*qbRJ|836;0MKr>y7ZD0X{LyNmYw}MN(16J_qk;Tu)nC~? zO-INgQLJc)^UBA_rcYz=_;_+Q0q<=rF-D2-D2nzwvHCI1UO47}Idt z<_@4hvqyXQa)oPJ&Gb`&s3*+-P5m1`EE&a=&-Q{K98;04J?k9+a@3=vt0XfLEsip_ z^_+JLuDcLYR+~Glx;rdIoWI;&cRKNM0n}+e_R`)w`MYEzcXNFN>XWeZSJ2QEeG<#$1m z_G_nvmO$H0gP1pL{IYzUj-hV^tf!|DsKP9BI zQp1eVJr*IpEdC51^YLSdmdwRa$uZJNkNNlkUTJvl5crJsTm0SBt@TuBb-d2Q*+i>b zb&3FOFO}>$PVldC`yf*~Bc=!9h4)Jvk@u8P;6nKRf}E|APc-@-f1%N^)ZF<=hB zkc9ujZj>FG^=J5BKdK~Y8K=R1LB7}|be*Kxdxh7ir5^d`tExyZdif||zlOHG*SUK%sEGxTTf0Wz;dRWvI6yzx zs&y%{kG^XLzQH)(F)=%S;gGQ6z~I6U&bfT1=N$yEp0&96L;v>B%RHswGWmRfY z6+iJ%n8XEkOA`xgeI#ZZ<5MaLufX)5bQ*@b6I7oNjCms)h(A*H;|{J*yUOn*;#Rr~;d{p^R%Mrs9Po6o*FR1DR@$g!oat<<(v!TwS=)H)w(` zB{^T6*)ZS0`h_v4yA;DtVauo=g!@SUz&KO|@5a!{y#)BJOg(7%EE1fQ)oS!e!YP#D zPxym9E3i03GD>EhSdEp&5#W?m+9ro*Gq+#P>3c$C83kXhAq|TN%RO{a9()U4B_0x& zY7Fx9heJ#f`HTMcgEy&G(V$LpS%%pJG>1$zyjF}jVuRN$(EIBOtKft1<>;xhHy8iR zTvbvLrns92k$j4V?@`!UD6P@nWQ*2jw679MfeCvU?GW+l5MV9J!h8Sl+t0~Fuf3}^ zd5x);y;d4NxL8`a7qbwr?E?fs-ZmXlGZr3)I+@O8?BG`YKNeZ-OAL*LMMqe!l@P9sef5i}7fuDg-G@!pOlIfdAtd z8laMaTXh9k3-y93kJh7hn6-_TP|NI8*!Hq2p=$=olp|AVG`F3Qw1{uDl_kO|3Xy-lHpLIn<8>%(oZ&^6 z7pml*OR9a82zz<-6*o~vg{lPAc-!aMFex9sZS~ED?ExY9phw}4-lU5iu5&Xtlm@Kk z94gEalqBC?arb%px^p=Te$f4dWTo}iVJqDxoWZ_X?2K-c;3c9uf|~eCS@TMxKT5=> zkcTo)O$5TqEQ|VSCJE?|qgbuBDhM)MNV;UPc;9ThHVeT|H^tR5aT^tG6g0JR34=|M zF&oqYe0RYnYox`!vcSzd9CSfNpPe&o02o2NDW}U$#A&OtM&P>$j_ce*-zcv_XxmKZ6@uE^zMiQucYSx#K~Qr(f^;yyTld;MW{5c zrLeVpSb&HhiM}(OBF-hknzj20A8`O|$4|YQtTD_-6)4bfBXO_hX&3Sx(Q(V07?B(? zZ3rQ1*&P$FW;pGM3VvcCA^H(FKsPRbo3r6O`1yAaXW1~5CYZnBBEpIZ)AwC0Mv>2e zG1EOi?8YuoiqWGvgD~f{{{fMpkXL>QXT1z9nA0Ihi!3H`bY{aX#b-=AFuUd0>>1^D zHKVe?0VY-EYrMHQRYh17XiV=gC>@Xa|CzkL{xrwoTtmSbH%yQ>Bo>DC-@bn(cF z5!R_EKz|x6ZoCoD8PXYGdIP z+W%RE$7aqgaKoORMAqW-;%!X>aP?(^o$-{0s(tgSAbfKYPcOVh!cUL?>UyI-)bLo< z*W@|*W(?)uY%&+(a+goY>BPysPT}D|ng&n}A8T0|D<6Vn-OCdfCa(zb2^R?j0s=oL zs5@dFQH|C{1C7low{M6JMM_jmtYuPm2QNWr+b}wa;Ts;x$^PCq-jd%>ut9Mw=Y0@T z-i3fh@+GzURXlF0pxnb@S|1TTuUHO8vteD;duO(q%|4lcNtFYJ}kD zTyAEii?hYk95NvcUf5j|1fNjNEb%AKd-latgLZNdlz7=u5#SAXI{ zf&H_dRqMn>nG;=>Fkl5tVEOmY%{r(PRAjM~J{ZP(k8^DU56hEIlRr0SFf!O~W2I{g z`-&&5HD=e|T}tr2pPt1`xq~+!k{?kaE|8jmam}v1&^IC0`-g-K7Ynj+-hb$PE|vt2 zC~D4iMsQ840vMr1@>dRvX1tUAQr!3^9=Y14A|@&B7)JM6~>))$gMV54!$R|T^!=4*;~1+#~mvgJ>d| z@`0oWd`Rg_nDRfA-`{|&ZSFU&CfKBTt9V0QuB32Pp8SRWSAtc#jbM+==+eu$`6hiG zAZ|c^Xz(*e;jr4(dkG zHwfJ@fb)rHCqN-`hkXRv5y8|GW8e9OE`A76d9Y4w!C=)N6lFGnkHK9EI?W*>4oQhz z2y@Wi938p60Tz^%*?Je;XAMLqs>5Y*DJes83=CyOy4VF0E@B{X{Xtva(Zy^AN7EkZ zfzpIf{21IRV=M;G{&1)7*yY^nB=6BDoKEDh>bMDIA4Sw8Jh)KBTISl-Jmwld{rA9! zc7ASsv!WFOeh^meS^Ni>>(rQorS>{)GflkTG*f0&N5)(Dv`%KhwrK_poF7H6O6f;f zWWH6oNB2_9K^w5}X~@!uv)+gc)*UsPRL$FeO6v_i76)gA>bQ0!_28e?6jRF&o30R7 z&<4_pth+E9L*Do866Lm1ciDXNc^l=Jzc7k70j5)%|Ju0x)YQabo^fz!n1$tJqSpva zf;RWF7FL+oo@!DLo=E#e{86XX*Py=>g@)y2aJ{Md;=)DXV?K0m7+t~1l#S5)*(eLv z&No95*Z4-;ax{4;;3qpnPnew~1m`Jf;)>Wco#b12w8f|TU9E(L=j_NfIP>I~RK_v9 zW!=%*>cqM2SahgNc;GMhp9HShlNvjXMfzTa++SCWzo@lXm?Yi2nIrKLPTO{ zq8VcF-+_ft5ghld=5hsy`0u2?)T;ODD1vdeJf@gc<}t~f4ly{}D~Y;QqgyYabeXL+ zEy$CZXS&q&w&~4C3%i>(sL1N6Qc;55)}qc8Tb0+$j7~qvk{+Kx`!n}Q|8>{?Zp+@Q z=9ySBrg%Cmu%CAf;JUr`hr7$aiUNe1r2EmuxtLoem^fAsg(xN}ygUEIfNtYbiIhP) z?3yu-z|`(`-a5b_QHIo*KLd$$m-qbA9#rob_sScd6L3fJF)4}=K|KSQL1AZz1=rRr zsv(?FDV33wuqBvm;ZB;>LUrzbWo2!BQjReke|G4fwvs(eBBmCyiU6H~ z73ivk@NToTMVuH|wzD3%N19i=S3>V7`mF|aQEEsY$`?Xfv!bE0a<&@E*E zkfNXKPe~0;7pu|vt?@eAS198!_F#YiBS2&b8Y>iIRQCAmpe~ zz`p~sg{b=VNH$R|EN2hrt`-|BMWnjsaEB*hNt4?HJpT$eoxcbd0Doqp-UH9qH}baw zU*B;##xKD<+Ops+yw9xa6?WYBKEa} zT#e$m^({v(x%XE;W2z|L&2%KK^80?w;OzuKX5hFuRh`nL6reBLxm6-4s-1QW+F3rd z*kQ}I{+iVQ31F*IJ26K;Q`rr+BeyF8mflsKI&jcjIj~kqX9ir=yBtQp@N1YGi+0wR zsDZ8PRtt!@hJr9X0J5-Ah>szmo{mr<$AsTvCIvH46QKn9mu=aFeu zLkv1iz#=`93S(9E)%SGEE7BBfRKS;d>Pu?}#YH=cgzm6fSea!WSD^y=cu5PCqeQ!Y zNfq@zFl-1#V)2&jOtRM8&8wJGM6F)ljz}3qvP88H)79CjIm@CJ_sY&hse4x{@LaYm zd$&Ah8SM^KvhJ}nJxPpy!SFjb7n{_6q^d6=nJa6`Olve>Q;vEq>3qE_H{E=-@uVh{ zX;!R1eGzFgLe=g3z7+pr^jgUeizBx60~PiXeC|oj32>AsRr}Q{=qoYZCQ50J!kwbF zx>;Mm`0fWPE+|Zp<`isHZ~O<7Ujx|MP;84bAxVk-EFbxi9uB3M%Q(~@g2;b}xQd$klI|Iv(nC_FDAQ7P*gflLv&QgI)Xc{RtLa|lv0 zPzYLnk^TD1|9q6OzA2c)9niL647Bi`mMdp}>McNPSQ)}eg&%fclH%8i-83&YEiz9l znC~0eDXq~raNC z#O2~0w$_pG9sB@md+TUX&-93pGGQuY9@g{FY(RkA4=J%{##6e@By68rASwy6zrGW(tg{(A>fHpa`@i_5i?d=VE>0M{mi4JJqQ!e}(`ux42IwO=zX zU)a<~BE2)=(4V%iI!;RdaB(+_>rM}VZWdulUFOoE%x17kD1`z`!@O>xd!#^$OnyYo z)p`u)RtfU8%fWo_x4AvwCNEdT?!r>=c~JtydDIq`@*%GpzWl)d+c8bM-<;Bd(MaMi zs%hsuM)AS89sBkYJd%qV2l}O!({Eg6-WT^hxYZD-{i7^iUMb*yy`J9z=v}g%hBwC} z`-6=KXAZ=@-vQMXOd6{9)ee;pK|K40?Ral-<0Q1tM<^9Myv>~i1hlw#-|}a{5VgshN2Xk zJ?I0*(@!2ePR++hkl$eDbs8ETDa4%8u*ESuS5#u=;PiCyF{J3ZLh)EK&v z-}x24+NuUa(zJRa*@5Vca&6e)<8WMGT|KbZc*|>OTS$gqs6Mq^Ars$xBr!a7!=c2* zzc|xRriN(4L@9ibwfK(ZR(ehr{Nsy!_%!@8>Ak;enz;esxKaxC z()j_!Zr|=`!^{6e&qDKL@lITMRE#wL{)tO2DR}@r*&&Kzv@(Fp2W&4nrJm}yIl8y! z5Qj0Xepp4LF(}`N-R3^}C!`GO*IH12d7`m=chC+g&iMbo^(&~>4Fshy$V)dx*ts1DhB1zNDZ0f-%cKx1kp*!=KUOW`NGWdkGqfVTc?^N zlR^LE4X$T&S%{GrC)L26h(6J+_l9$}j=|_RjR$>((yIie>>E+pUzztfXGun7$Z($T z06NdaZ^8}3JJi*coOb@e)tB~E7>9QEn_&NwCu6fGHPpQj&8n(hBDK8%34{~=7zjtd6=ni`aR+Ahv#M~e0d+b0e) zteV7PyfaD@ev|re6uM}<7U)TvAJ!TuZRSOhV=~Bm>e?5e?6{h_-%U^GLyQ#gcLjRf z>+>*t!b19^ce4Lm{WGmH5Q6-)wXpV_Lx@kPjZ($V{OcPsY-zr>f77mg!5mx(O~@R4 zIV@w+@_-?oYsOM~59+rzWRztD_$E+`>1Y8s1LrFB<=&T$a|YY#fA7=HaK&*hxb{i5 z78y>`{|neLn|B(QERBCdc{*EXvlESd{}%sU)IM_HpyZQ6zlK~XCs?Dl|FZOz4arS! zbT24Cb+c_5jWq)C*?_jZI*;;x{3YGK9So!ZW0%qF+bYC!1sv zc$~gW_xFZUb+5BF#>cL6_))}I<@d2R0v0`Cf`T|$aAuShM$e-s5)L=Dx4Zb1&=eaV zdMVHIPs%I}XVyF0U`2B6r?D2C`x0I(WAnsozriR50t!$-b?Ch66_rl=ek4dYMbodcCL{kYN z{d$5$M5GLgALAwxw0G)s2&8L8%=>IqbY770wHY;6!x;KMzlK(xy#fKbuI*lHNsobz z7M$n`BcHkG1<8knct1bpqM}WldV2rZ0c%P-^NY?@1yC#7KeJ$=&PZ21iQ|le~J+v?KmJV4Y8;@+P4VR2=Hj#vUj%gBxcT|ZBo!bvl(dJ zx*J8??#}U|LgTYr{2hzxW(ilIs)9EksW{P)T3)4J>rXY zJ-_?jjJWXWU3KVC*63D|au_K~vW}1LGuX=xxs+YA7wUpYsOKTs%8d^?6F_bSi=ux0 zJ8oilDt+M#>dXy(HLipk=B6*E*Z*fSMOpkM36cyaP2MWGlf%me;Vb@%Gz4&B z6KiQ0pK>ld8-H3EpR{p`GCj?E{6EJKKXf+%CKV z^=@u+1aTfRp!ya*?w^7jq5E*Lagyoafr{t=ky|!@tb?^aK{tQ>{tk^2_zt~M#pp? zZn5XsMa^LeBP@ke0l{oczjkr`Ck@FJQ+ZSW%9s40bF!|3Ggtf1Al^Yor|_LN6((*P z9S7-;$z>oPNS2l^rV#)rvDn1e-49pDZcwpRggsLiSD@pXv0V%{$O@%dWh#sI? zp7!>S_juom)G})d=^x9iaKkGf~5j;fhGvgjWXP7aVLb%P=#*nm0H6#mKFG-NZao@xyneP$TcY< zJ7hdP^dhqRDyZ3!EJW_c5SyI-2`GY#^|Ct%BLrCO%@6=g{MZqg=Dr;4SE`8GQ*N)*|EI)}-C4SH zZ$_5H>T6kwFT<|?D446*Iws-PU+BQ9{~R&;$oz9#F+wsCyxMf)=qqPQCv#3Up$U1N2H=c2}oY>c}&Q1WU|n&3Qh6PYg=A+!6l}&Ol^K8I*5{DWiX|4^=Uw zD(M3LTukX9*$Nt;njOUy|1?=q?yFqfMhq@!1!NJ_p?;0c~?tVr^63U#Gz=b$vAIUJ7B?LvU4zEqI9i(YGkFz`_ez{ zjJu58=M0vojLQ7aT2LA>XRvNb{|fyzJ6QI(faa53P-zPdd8RAxpky4MRQ+(Z7G`Zr zQ(t_0^cuh7zc8RB-*>$Jd1hbDaSmcyyIM?URh__7NuT8SfKZLMX->cVpQ!7CfV&VKY>3$T%8#NimPI~O zYegy&I!)Fn=SkSoWL**kJZ~Q}KNL0oIskq+_hct7%RwHw*9j3vLTrOrfC_*t)Wo!0 zn9u!tE!=h5w0?*krzo1GUCG#UaoO2a#YIyRK?5vs6K3u(YMhUe&NgIZe1t)tu2E88 z@1SdqP^Zx1UF%9MB3R3z_B&yJf`Wi8 zZD-%Ue>?fMnn57+UMEM z;As^OS>j!VI&o|B7DW!vW{Eu$2d>^{4JIt0-TFTX)Oxm>n15M#3<+LF$?hedXXJ7$LTI9wY9K)WUp zy))aQoupZfG8gVO?y`Gaw~Jht*(*f{*TyyGngdu|ksA1alP4gQY=lA;_i7!$Y%X}r zs9`9obd8;&gC)JGX(0TQdxv2App@L96z(z!mm=66L9mup5q%Feep4{lP)t$Jt^Eb{ zY$PopEH4`Zf}>cHxAwk)?nFdL$qlEgXidlC$;fJ&QHkn4N#V>qu2kmOE(h)}j%_v^i;uv*IEC<MI3 zrQjoa68|4z(uB_Li@@u%vPRQ2me3LL)_L2PZ6d=RuI?(=Yh?Ev0!8fHord+{@$E54VKgM6PFK^XHA4am)gh(K+&bdZsgNnNRLac(qhnVa#v#BB z!Im;M%Kq1ziHX$W&$$PwaJa_W<>68)|M#jxyHMOmM@uN~1pT9zltPib4*PBR?dg}3 znW4})uKvr2@B8a4V1ClQS9q7A`q3qKEQ!H&+45Vf#4~8}bJ-9cx=WIhBN4+Eh+yTh zb9S|)9)tyZkAqb=O$gbbhe(Zs7zYdYMW)+<%@V+fkg)RJw;im4X^~anQ9Pxx7k4_` zn(SdQ5zZX3##anwxF${p?q0)^%aR%})lyABVWBEP-$Bv})d8g|HoL#qe&59FPoq4U zMX`T~!}0$Y%lFvE#i5FJ(bt8z<1g{~pdyVgi$?-qotnWoKb8FtWkzbz6awLkCxFZA zl^?*yeiHN7ZT&3RleOIA)&xq|7-4-r^c3X6HMdJc z33yI@bEhKV8oZEa1LyZa%9=vRB!;A!jy*Yo<0~x=MQ9T2#P~ajE<3i|CC`Xhy%T-6 zQf@scvZXtkmfWy4CXPoe9UbzvykDetdqV)&YKDpd|7Pt7+xq(-C}+6K>0Xaz7Sk;a!3Fydjx^?Q+=vRyZ4{l$;{qxK{M>-K-+>EL4t zQ$qQbU@$tFqG0(efAc2xPu2e8Rc|nVPV(f(e}Aq!u5%`J7i*GNyGxbmuQvOD?ewl~ z2yK}7a`YSm`|k%ptPrZP*RO@2j64mgKhJB<2=6tgT-h1btfPVGSagO+iyTN4;Gl88 zS7hhP$JCHG61^uX0ff4ism%(No?(Pr+ju8!y~KV@TypiFF*w)gI$&VtvMd4FNdp^ryJ21-|IZf8|)Ca zOR3n?Eq+1XV07M!|H`*t*4FspA8(Slg^9vZiZV_X(U%!VW1)SWH41({&t%h*Q?l&e4 z(z(6OZtpjrTdQcft6>+$S~e_@myoi#Wh0`I`uUGV9W(}fMgUT&DD0|M<@zrkZnn@h z;TqediDdEll!35}mNYf6ZQaVHWC~4?1E@DBRH=Vkn)&}W?ax099I%MzK1DE94tM-d zbM3|A-URm!wY5-D`p08cKc~b*in7_>HSDgxmck&gQBP=6)3f|(dm#t1BwZuU*MWA-R z+rl#SE2yQ-Nc6K?gabq`ev{WUw^zZa4MI>fWCi)4r_8E(}CGqU@@> z8@Dd)G{WYA;Z8}hQYO2`XL@Y*xntKHk(ORk4)luovo4kB_qR@d>ZA~BtiKNq;Xbcs z7%~}zcQ4#G20K+?RfTwa)Q==u;s02;L`L3=dZN@QW==V(96CL<5H8TiQwF@jr%*Tk zY;492L74m+^l`>%eH-p>8nlOi=`|TTeuyUAHc#sd5;#OtkB2k0!&d&A1i1J*JVL-) z7d>Gb(YQ*j6D# zWdx3U2K*H}%3x_RSaon)Pi63W%zu0-OIPniQQailDVrUGGyyN=Y27WT?OPaDt9irl zuwiBNUISA~*|R`Y@3)ABalRwboyT87a&C-s7)l0Vu@v<57u9buz`L$I%2K3o3flA< zEB@8En2xB5Q^rgI?kvjM`@d* zka)3P9GDs5_(r$@P*uqx4uJny9BW?MzYB^i9UX*G3ZLbzBdbXZVMYIqm6?iF(J-s1UDGj|=XPHA07!=(OUwm0!W1$pyf|%$(UfM-&TH+2WTLeB=t07z|J)geO4uloY z-1TR@YBIXMWnA|i)k}jo*fKSMBeopmGH%{og#X{q1?mdIN%DLdiqD$c@Ew;);;HN;Ol>_I2C1 zhk+;nlx|Gxy71p^>`nQK$LaR#(UOi`e4+dx0v-gRspkiAWM85qjcqBlu{n~4ERlME z5!1rUlXcib$t!%MA@Hid(VRZPYQWZ?jAmcgpL>GhmA8-F-#v(ems$Dm~3{cCK z+!0pOX9za-_DYK&v@x_4$Wu?c76?Xu*oWo?er&iep<9R(kHr#|2GdIOj%Jo6j2NUr zC)SBzccp&2Pm_~k0t4;v?Dae{nQW93zXLXs$w;y~Xy=Bl9Ztx{AI$XFVw;@ygJuyI zAHX;r3ya-$IFHJZM-7L*0$DW#X2_J+5*BNQ=LOM+)LizjOO0~WWFqdgjm*;mI1r*U zPsE=3V}k^X;=w`F@o2^{1{p_||N82C>eAb&GWCNUwqaAGA#Sg%WL|z11xCeFnF&&k$Q0VXDNR>55Gtg7k2B_ai|UhsL&?=qqunR?-xj zpD&O{0(ecw0f$E-V%Z!n{EsVTzrf&9A=bvVfZoL?5luaxFZ=s;S!P+%{3W{dX$gW& z_$G8SPWB|7QMZexbM3dSF~qX9t$!L$8Rg67w;REswiB!NkfoHFz8^*I6cRako7ow; zesC44NTgvczciWv0TBEbDVd^(iNUZyqz?ruO8n*-Jm7UNMfS;Bd*@ykg(2#?!|mv> z`d37jB?2LW2L8g@J=GFY$hwJ|5*{HW=zjLC0}ecde;7wh4!*!gahFnhs2nbBduf() z44dCIHzFDS)qlb0J5nb5Z5^%kUytN7EUx6c9$aP1K*)!_H3>$vg@~1hsL54v((3ne zYz9~4m!x+DQ*sI5<=ckUXZukM-V<_>M)g&I;o(>QJc=O!iuy*JH&{Wxt78oQGG$dr zFl7BR>X*{y`2ueP<)xZzcPuB$ZxlZY4j?cgR!z_pf^>QM321iQAYNm0W9J!7;3OJnD;rvw;X0i5#GQ5F!J{Z;|h=H49% zyt~oD&`x~uotjGDYiFCN++`zuUSh-{;~Pq_(x4mfv)2?x=3aSLaFrhey}}Q757b<# zaoAPzR$fb0yW#(A7ryKi+~s=vd% zISYslij?BU2OCFq1EX={&sbz0ZUm5z1JBbt`25wVSfEnI5}+87aCZLEOJ{$ zsi>Xgf<_s~#`0$**tddACZg&=o-+YHoe%0WTtJq?_#PNRF^UdJX}Fk3G|jRDZ`~1R z5mp=>Q%KzpjKA4j6KF{ELW8~p<{w^s1H*K+AEp2fQyBw2PZ$uyAOkrfO4eRFloEtc zep&rBR6lJT16f_l>myTS%%T(VdM}9nY1^Yw{5Rp@yeY{Q(vD7>$C;k7x4G^@Q<1<8f<^a)&uu=wpg zk(ttxS4$||Kqe`UeAR&zL*XwUuINYcCZ2gI1i{*h-JHl52&te5kjuK$-OxbSHk;EQ z1GKIESjj?-i2K-_#;Gt*F^s)P^ovge&g)Z>AftdWEWaxaW(b^W282%b{( zsJUd~oLKkdfZIv*!E6U3=s+JR8Ndq{Sri0s*Swb>xS=4pBRgys(8ZeEQ1{u^=SpxvXv4t!JF(0R^T4fO{oz}fSAtz5w#`e<>`whVx6s|!@|T-?{(OMgBb1yl zS)$l%gBp;fQU@kx?Kj-Mua$tIM;_Z0S=tTs1yhxYCnPl#_TmpyfJm2CWztc>^+Y(A z+2;}3PJ%lHmK{IkoSo}`@GSwRWH+_!O?=RUicG)B-clDu>rN>TgiJT@F68~oaX=LC zmZgoTq~9;^8re9}V+khe<|)=*aH6IQnk6m_NM@@%8NwXqQz-zZ2t2t88@{&fXagH2 zYw)1M1~$|y<`5Vc3{2=vHqszt^GCD8Bc%jmY~qD-v;wtO%no6$wf@uakwq;DxK0dV zaDW8*&*3Sdr?hDOY);V(78CBG%c5V%-)uUz$96wtD0Uy}-ij^Ru zM}1g+Z`8nZWOvDa+*V9eut-1P`dYu%(RaLAx1ex zN0uXUR2kY6>F(Or%#_S&2;+iHr#_>5TG0RAI`mmtzJBxN?uBJ?VwV6h7+e#PIu*rZ z)0+h=-@sQwmw-gb&>}= z@70bHueiX=)j4MH@9c~iPK8AHb0?1F*~Uj8#WN)1qL3iTPv9*Vvp#n$2ewK!={xos zmu~89su_xUyBJu#3#jW{w&i58DUo#h_FZ>U+ldh?lMw!3SOYkBj6ReOam`a!$2f1! z6K^@_Q!CPTs-gLDz@dwHcW61WWj@RQ0!z*B-q@s@=~j!J1G%(?U(`g%YiH$Btbyr3 z6(I*#n#}(TrsLSuMrVN+*Rr%JO|Az7qH~}0-H-TDDW+DA*b4dR%I5-i>!d~EtWEU% z{+o87K?YQ1#v@(b1lTnQ>asj%^yYqc3iY0pv;&!*kf;bkTzm+27b?;(UUlvBDMrdt z**F8d0U&k7HJlsw?I0{onQL-RRZExwmdG$f)NspWrtzlDCd!<%#{}6IwF;p~;2ldI z5e<@FAJ(!$?1K7a2fjkLq#xE^j%ezb;?$h@oR*#&OXfg1@*GgQzhlBYPG!|blaBy2 zbDJjfgwsR|lY$~zxl8$ozhC}rA1!{9n8av(@G#;8%W^nJ(3Y>@X8{+vcW5T}Vy^!}a7F5eZ1exFUm+!+2hiDIpY5SQS! z3ZOo7W=#6(-7gti&y7O70$&#Pu+@D8^cU}PBn)ZCucKs5BKiX=Z3%HS@ooDE_Xa~! z!gSC7Wo5MhdOIb2Uadza9t~B`vQ(soy@~$V3_*oOx$4L5i@T!tA4pQutI+w(RhYNLT1rQB9g>dM}a;S~JrazV8m;3LSLa#Lb(N3kXw z!gjHt3hi8deU6R+{?YuPj|GQUDn5Y4B6qVLHBDhhkp zx^Tt1y8P6XSEa62d*TX12hqNPQYwgZ`z(9|V(#P_s>uPMPB~fab7~Kw&6FWd6v!i& zXYkHUUH+X2FSW<#v>VVWp;>ICHI!gY9k@b&=@rhKmio8^oGjz{5d!}1F)S2->$hkD zN8*Znr#cUdVUZ6OHW-dV+b;QBjTeV5cW_PVK6XizLy`trRG4#dkmn(vATS(9tqRm`3jkur{cCav{0 zBTv=8j@M5v6cX@hv%6it1O{0q0)&0n_}g~@XAJ*D_7$h0`{JtAnEXZk!wP~fjx=0I zU?HWlZzib;NK&)l`z#4%B_CW9BvGaItGh7HKEs@vJt8tQaQ@n3BZI2oUok#fRT5$G zAC%i?Q6e|EJ)Mt#(GEVz0`LfT*{GMqnw zR#(XVElb5zTKEkr^L*>GSsN0B%sW`>zi9FaFo~-A1NdvD3Cqta`)Q&+JdlP1#wnGD z_mKd7(5?$#cSW`aicZWULbPpn&g_`=g2u<@N<4uMA~`0Nxl5d5)n+GwUXsI?M&gZi zRfs8FrUFS#nZ;7zyAE8}AKA7wtE@MiNhEg_J%gAvj z**~a;1dgT0WiaBlu;a?1FNPsV?ORCAcqv@f*F~c)$%5m5;TgVmh#Z^u*W?m0?@SLZ z(v7&*F9@a;Cx%Q@w7?N=<|DJ{R(2LHRuApbfMe%azjNz)4qX1PsU;+iW|pbV;DG4) z_#Phc42Q0-wJf$Q4-4L{hdhXLcu^S0KUtG+gLd`a|)({~(^unv;l3``Exs`=^^G3^4q=w0I;^{Vq#=F~1T8&Z*-JY{{1 z6UZ^xS#{a%qU^|ywF(E>Kh%-!8K~!nKMcG!b#m7?uYMSZ)^k1Y@B3nDJIr&ZNY^^{ z9%EW4W96-CFjW(cvCHiPdB!8ipZLH5*~vNE*;w1OY&OPaA?v@ zv{>?9I?zXX|NI?=s2t7tP4#&SA2PF_&P(y<3#*Op%-9=(z0nWFb2Hen&6ORr2~iCN z1vA9~p%hX?5|~SW0RZO=pE+=1Y4MTRog-$uuk4ccM${Lv)`5`j&3fA1)wO%;I%48| zsP+`D=(`xkiNq?>P&*TDs;nZwQHfe~Np9B!G|D5u`4-+WN?oTLIu1L)Mgi({39?!s9`lKHd!N1f#mXLZMn3GRXa%)qa7H6l|8t!K%rppLs_>5NWbR`f3Prb{ zWE>Cy@Ej+Re81K!`=gq**GR}sY-W3f(3eRNk6XA<{;&C14)?x}l;@ z_1ywj9)sGKuZN+~j2h2@&kE9_|JwI`VwO_AyDSme38unVSqVrw{gB5f11u{ycOML+DdSk<*yJkSTtZcP1JzrFe zQ0Kk^^EO5id>`C>X!S2B^(TAW^c%135|;yr-@x9A>D1VoS#|$Hz!=`M`hyl0N%v+l zTr)*?BP7_Me-hIcF;7T5}e4xsZ68Qq>uqZBjfj05Lg=CEY zmsD<^Z?TrEwe&)R@s2>!$#s$SRHM2a6#tHa+1UUwu>wUiI6GkqKq^zCE~k8Hlp-V@ zTy#t0WRriU$Vo5_+x?>E?>|LxqFxTX2W_!#fdS(zGK-U2wO!)sz><~nXsh%Bu10@G zc{ZVU^ZwbhxM|n?2#UMD&WnzzBEP0YvpzDvC@uyBJYFZKhmvIoAcecpT2;n+%2!%x zKasVZIKqT2w*u+i#sbytIt+Hz=8}zvHZ++{*9dd4&On!=Splq*1H;T2J(;Q0p9T0y3y$iK<&aP6Q;?g(u2wqWPmj$?ET`F`p=DmwDec zfEivc+GZ(R%tse@>=np8;US5ok!%j5$A&+y}SSOupw{?VN@yEJj!! zpbJE{#vL0xJ9B;HABDR!z2a-#k1DXSKU}hi?ERR1vcNN8VzFu8L+SGA!z|1QaCgwk zB!3vJ)FcbWw*GbQugy2OSWZ+h@jLdcPt&m;C|?{!7yIYd+GTQLq?VA^J+#d;acpI) z^>%djOl(QqQYk_8;xvB1d>reOS^~g1wwl%}vEc8)@wqiUq)&RNj?B5k_L4~&Kf+Tv zM-4!Jc4d3OPH%6LniJm@%$`{Zk6e7Dnm}HyOdcOxW*4a zgR!b#LW?xFC;Op73V4~;ZD-|l;n6m64Eg@Fe>Iy>Ejj#-X*KUTgC3K`!ROjUx4T9? zCW~)lLET-UhtYI-$YC7a(ABL=)qaOB{Uy^WgRkzmx^UP`cJ!e^2I&yFkfcx_ph6T7 zf9mqTr7z$4)OmsaO!v+dT=3RvIkrsdQrF-FO(Tg$RP5@w_&}!lr?)?_&f*y32%#eQ zdIdmq%#|*NJ>8< z{T5e)WYtvixO+MXo!Fn^3r*#%9|@pICzMw0Zja=HoZBY~0ve`GWi$=Udp?CWzL7LA zJ)X*4t4sks3G+_?OM=~h*QeWI{Jn(0yX;1)^BvHof@J=umelBjgc01+od-vysdA#O z$!}XuLO0q)2y?=~RT9Xg65aGr|5rEeQ_^IOEfv@#8_7 zAt~d@+@$VWN1>b!j7K=-sWCm8IKQQyXN>6Ru=^lpk!|Itnk{gei?ty|(&{|1ZP?#o zRUtqRs;r~oopL!G(%j7wjVeFlho)T~Jqdut1M(2YXXfI6>!0CL#0sQU5if!17Tu|R zl(@T!8eX{!p3&^9LeGec`?-Fqr? zpQz}JJKi^8Wm-96C~gIlNJlL_5OdsUz(C@1sUM=_;Zme(n1C}vH{$4W@5Wk&=|abC zh~{L)^~5Ao7>NSM4xa^`NE_6y+5(``A#*!`H@bgspaX9(6ozaOk8@FM+{NS1PUDxj z#Dkva5f7hwRKIl~t$-22Vs28DBAOfHNyw0Td<^?7U6*>&G+*&J_D+ot@mkJfB+G}! z9QnH%%AC=V7PU^mdxKv8#?Pm?^d5W#(fc{G>p>~bs7osUXoJ%0*wm}}rnm49n9k~s zK&+chwZyCj>vxZS(02GA`O$1WE))!y~fg}#F3$t*wK`VRH%{w z3g-ZlROs|>W&Apq&*+S#=h-CJg0`Yr8ZxH;S^Qy_c@MYs!|G6jC@`2{G$CgDzfM7D z8h~bkYZLAo_eFT!Ffe3e`yT)R00001L7z)S5j$UqdkJSCqz5vZI*w z}aZ0{!|m);SK%08&@H5EelifQS{wc|{wGK%gW`(hC-*T$sS! zFk$Edj-rBj_1p!woJiYC*k+3Z%e}Z?3(h$pHMWErVO~)ErKX3>4w`7!WNMP4!9p2b z(77x~X}`$rmn2JOoBJw;H0ZYxuX1kX0e*6aT|_tbW7N`lSu1gazQyR8|@1-)(xVs8mZ828ZDV0)M*qm zxG>ly8UD@cQR6c|@?)T?7PQZd=%BN-3~cElym{nh{iE7NiwYH466?f^^&n+w&^1?E zv3F^yPP4~5Noz)GgAfUHcKgWrq@y%**EF~@d-MHthe=z5*=ND7#755p#u{Z$oA=bL zC7=h+w=vcqa=Eqm$>5j6jIW%)1cYDBL7X(207zk*;NqcL2EQHu)I|@SYx(yUKT?Z% z>(x?2xbkP}3R7k8$%=12PSP(Qve-%w2CwpSJj(5g%OVk-^y59@It|gt*9UGDyLkir zzsMrFm;^ky#td4VECGrq5WsUjIRA4hT<_6ULN4UJpk$GZnnt#V8Fg6UC$Yt34R|2h zr?t=-y9vEF{+wlR_q0L3YMn5cr?_-C!R7)6SnfE&Mgzc?K;0dViZCp9{^|NMvLL+W z%7K2$vRxxYMf2(q2ou)O)3?d`E_;z6G28W*J2ZKummt<&Dq#FkRn#{bo?>d{|7S*y zTp;LdQ4tqNk(CVeT5diarVjxqIuBO)7z^-Rj@+ea{)EHylY^;wet(TvI_f3XqqvnW zt+p%>51j0X$(){W_$U~`>}lzWYn`hW0-Rla+@+UGFzDbB4I(T2L6h72Ol7S|czai= zya|!2IghN570)S=#7=cjErn{*`;H%{X8k&H-F7h|8=T_zoj}0-!@aZHUUH=)Re5Ut z$SfCTdU02FKJcch>jOmEjr;}Bhl?TEr!)Zu2~@c(1G=$xFoMQ*yJ2}Erh!T*Dltt$ z;3IhdkC{i*zks;QB12yy3~k7@9bpcxt#hDY@i4(67xEL37V{P}FreuKimz7rnjLIu zvL)O=Mv~pOSF11i&6@aQ7n3}-{Wlulg%3Z` zN9?6YN1MrPS4O)~hCS!Cc3dQGKg zR|}IVff3#Y+$PvU7c?C$Tjs`wU6%h>pOB%wV=n28k#J4=-fbrWheM3GCwiyC4~glt z?qE`Gtx%a~osbmy@qom>)Lcta@9J4PY;u*GT@s1$qaqU=H8F@OB%Z(zlit7Hqzp7- zj#;_HuBENwF}of-twK+5?2wo_GQrM50a3_tz4m{>Bt#t{fM}3}o}G`^*0lBa=p-Wg zQ4em+@;Qq#sF)+3CI49aTuduqw;|wya<7g)4-DV2!}v3G+cICruma@`!S=YQhGKoPJjk z49YXS`h~SZ2A>#`rD(k?m_u^^yrt1Bu{cM_E@$uR|H#H-FwL`GLxHpMEpr48 zarH*Dy3?is6LB#CN-8;$kX#q7H0`$DxeNzgIZi7r#~~mcX42c}JFdakO}HQwIp>bUt(e7{Vxx|fRrt$e>*)k!k%p%2fmHg*VA*V?23>O9;FiUQy> z+{jLQkcFnjde%FoR71L`-CL0EUs@C1m?oguY)F-IWN(A$i|elzh8<1WJ!~)Q)lRQ; zZ;fF^9>amBu$?m$Q{gtyk*A{p-_d=a91snw0B(I|OvnnObimmrzNpWBH*3-(y5cl3 z3sdA<{5(TlO{3I|^j>UX$=C z6)sk9A~2};D|)G&SL>g7x}X8)aiv8%MKi?pfBJsAbNR)0olp_u(s!+P@1-@qbsel; zAeCZbFGP(4`LMxu%D=uqPrBJqZaJ2;8ccyTNsmc#wzX?#E_v`0f;6H^ zCnkv?MPV|Z-ibg40G>1wp|Ck;*yvmrO7(x94hQ;9+`~64sYk+^s;coYOWh%S(S)$I z;h~vRP__$s3(<`mSH7QHyc49XG4Zl4YPe^z7^CpLRK|>8X8T#^cd_P5rHNN4^K2Wy zn?jP=#=|#=m0ct5C8qH2i8&G-=;8S(CSD=#8>c!oBnt)t15Y3lnu)Pq$ zJnZu#f2@CC74quR)BC+yoI8;4?j{(~@x0vr>k8`~Z`~#JuC}p}LiR19e^cv$tTtuS zvD*99me|LE7WGWxgJ`hFFRXK*``y0tQ7R;unX0)BD^1m+?(9M=a$uLIj4^toPSg28 zcqk}ecKEU`XHKE7PPg0=*PP`2hwtDy^!>I{OdQ2p!DjK-A)zDEmiyCk;X45w{y=xt z>|IKw7DS6{dsf}vY~BB)BPZth*KI^Kk7n^I56n0^h;*?EIeAb>LaDrsFh5{@DM+5j z;y(sqSH*j5&X-e01IT7M)OzF$yXIgxtwn%ercbL{kCBPO)IL3utX`}_&s^v~(8_Mp z5Dd1c)F1Ohpl^PN;SDZ#XDr}~XZKOB}Qu4*U z!DmdR+wX#&&ye`%=0gE}>>$mY>>3%R9#f~T{z_$bbtXPSW@|OQJr}rT>{?JKqUtty zMX%xW)zlF3uB<*z*_6qS{h+ zP^@>hHcy^Tf-koO=fV&#r+dK@@%O7DW0CA}Dj# zSuwj-9>6dX>2{-vP6cDZ&etQ9f|D*;xr~32%u9nOpz?g_7~6Z&($#w(&GK|U{C=%@ zAv+eZH`mZ}4Es$vbSq024NDsb@R+4y3ce01sQQ+6jvBvTza-C-c{R-B!IEGMG_0aUC=$Y8M z;n6nZUCwHhjV6T4B{`)|a!Y3+Yd}CaD7dDOlvfLRpu>mwhd}p!^bNf0yO`E0H+4KI z{C9>09M+jBFX^!nzP>^cty@d`>qBSq+|Ehd9tQJUD+6m@AX7!~T9WI&gw8htl@IT9 z`0~#~RExAiC1A1eSEATlRpTHVfY;we5or}SSp&@}K{6f-d7A|v5r-9X1dJ0qM}uL^ z9{}FUG~1JNH4~3pQ#8Kh2{fLJd|k>^_I_?Q%ID9jgGclI4{~p~>$6BJp5z*laVtrl zy--Gc4T)VqJfW}V7k_E~xgzV##kC_?dqan&3KU8^&S1g6r}jlKPt8)LU1fzTiOJc) z$y9M&wG4kjfm|KSC8oEZ<#`(WREKI?jZuv<0K`22KOSj~X`rh^ca{jW4B;Me3EUJd z-o`<*0nRWT{b4H805C(mtH4a9WTQ?+ainvVvxUA1G0Zwmt6RjY(K7rrg-d@yhTDYM zMNUCtwo1$LwQsLiK*XI1;8X*TFIzog9o~`-q!T5v0ekyeSOLOrMmA#7Ob*1)-P^n< z*r0EHrSIoMwWrbNsU{iJ*z!;Az|y1ug_17_LFyc)g%~s3GH*YxtQV(C>Dgr`{nx@j z)6*!svu|T)+eDyU%dgsVQXT0G@0 zIVe?EUYO07aL0w}^x+Zrq5(!yO2zldhA{ajc}aGpKAvJY8#|g;moDlbP5_hswSQcc z*DBuA!oe=M)GdZKJLfY^)aS$gVLlJmsVM6Y7a*A5*sfT*G@4ouJd6OQtK>SyY)0E< z0Dmynx+f)!yLqcQjd-Xef|j*8x{vNpJEFd6F+`{xackGgvf z6{43yhuDl=q%TzCHV4qe{um!gKxxz14WW#^$H5Wi@$ln>8lA?d>6OitBzF`|nBMV7 z;tVHodZX3g5p33?6fV&h^o1yl-TAq{0%4gTSQdC|Mmc^;!ER(Xnn3z>?xBmM=coe0 zC61enAocDRE}@X(;=|iWvGga#H0a~n24 zr@VuPud0SE5e_+V44X0~mc%RjQzA)3A#9bPH#FO*aQ|kNZ?E&8oTt}PWah-Kevay8 zgx6VoHuDez^q+~@5m6ETo-InR7#_!ZJ_oYGlT}_FTYQrT4oj5-_4QGu8)jA;i^qiz8eP{X1NZq5RH~Htu zil`9Dm7Efyg?A7~{q1>$YC*A@=#Lj5TI~)8Q4Foa=>v;bKnl?M_5H?oV3Q~)$tK6iHzI!ZEtDx}%JpkCvlFkqG)5!%+W* zsO+5u0Dq`#XzyL_M-!?82F%cLj2#&5b=ilWSRdyq65kO zAKytRSSnA}*mTYvC&NS4mDkd)fwsuqO1??F#??l+wji^A^`@u4j^EKMGy($XXOFEY z^Ed#6^cz>ZSJxxlm?!!Frc^M>AlYHaMKSk6QQr+dQAiT5#V4YV5c9XeNXmonYPzkk z+b1lkUT?nrutDHHL;NcZ>Ig~|j$qPFCbkab9AyvITibY?rPVg|v5tC&nI}b9Zychk zXttle=1y?OM4|wv_pOCu^>7!T6ABX}ys$bDk{>j3eI!2aKOXCYufXWOzF!y(l5FA9 z@4AaZRt^2J_9o)n2Zy6=5dQ4m+{E>3iU5Aw@`d;7kYfd=BLfni2Hoh;KILPHYxk8+ z&;acP>pTy9ujgyljd;MLRt<#-bzl}g8N-qVkp!jlfHrtuD%KOKj5C*Y_^)3W-1C=t z5|ej){6>Ft6u|oiD5&&-Dvv>#FIrzDn!tqPuN*d&}Pny0d%|7V^}Ajgqill^&u zNL%VRGcht6Ej)@xT~PG`Zz(8ikcB-=e}`_I-eV++y#0D3XEyGjjziqkFb3L)+QO97 zC;H_Q>qutw;E8}7b=wOxE*eQn0t&sR)}e-`ZDo|@YefKvk2I{yh!5oIGlpN*N7eXa zsZB-B2i7vuyMAw&acBs&FB!LP5+Tze7lWP=<|R}9kk$gJCx!?yg?NcbhQkO(HwE$D z#bk+u)A>l*p(OK%tue6|1FOIm0Rks@r4#?c7Z!usBn=5 zfw*Dtv-i*$)*YO-CKhIm@t5{g@HJ(^okPBcSs{^i81;9eLPB?%p{oao9tZwq*;u2h za{fG@L1}$pvn{GOz2#67XfxVJdflDHZQw*jWtT`=weUl6C(Ez1IHf@}%f#jRO*3ST zpl8aU*FF6y=|na!2X7U$kyGA%@gd9>*dL-7VmAfzNrNd#3zaU7{g>Qf&7xv@RZ1Lq z+W|-N>7t@bz_lKIDF{;Kb+uU9cB)coB{Y3okX2G|rTI~zO`NJ$*nWna0XiLaI5QSQdC$xjtkJ6>xvkVWRb4cWFr(Y_w;H2pd~Z#= zY@kfV$z`_;ZAW!#aw~p+K0_UT^W4}zLB8e8P8B!Hhofz9aQ2@2KCojL zT<{Fig!)8WP)GIC^o7-Ntls)WF}TO1E83(LWa zPBRCm7{i#UFF>Q}YZ`nnz8b^lT%6vrjgP;FJB@j; zVkoF|?Tn&fgv+*aDTO=wE~;=|6>S&e=9=wSEonc-?1j*}NqLaZR-tocal_+N&h1U# z_CZW{#P4*uerQZV&n987W7oTA4i=+AEU!vB1XW*q*B0Nd<~p<`!7xxj9i}}TPb|0{ zWIG0FXgE5;iz&g)_1q^jfYDfFrHag60HNdNo=>r!H;dX20~b%F6!F`KLytLNo&k#1 zG?iCOkgL!sLUsr=agqTI?GU#QdUeJwo{*qW5hTSpQf;^2SNO|vr>RGu#++)B=ZGM! zRLv7ou4$)cK-!S(B#Ik2sq%^%uuKiaNUs~4hXr7x`{bwi`>e4^@uIK_T|e5|4Y;-| zXK&-Yy5`CyUr{3$JXs7kr8qhnTfR5#)dH78$h%v8Dri3BSR3Gqej~s59<9}`_O;8` zL`B1Gl2_|#c&Ts *KXoA4reCQ2=6qsr`H_;}LKK}jI!Gd4;+lO8Sbq6yR#ITmz? zIJwxTFG`1lJ&^Ts(c9hv3#UfsYwlp*I7V>Pq@FXjVIuX)@$Rww+}DxtGwC$39uWLJ z*dJMjA4zudmtZ9YK=s_hsH)ZVNZvrSPgLiluNcmt4TH7jncD^u2em5}D}rxPZq9Cjpn$M$ zqS)u4qqu(am5Y+|j_5trd)MnJh%qm*w(U_^!uVk9lq1V z@5dru$`L`nF+I#DMOj1`bMO;K2|LYknGYSz|5gi1d}FvrXmHUsZEbg236LWifZIrv z@n8snmyo1#BAVp>>O7E#`cvAg>fjE*@B=JQMvdEHeOOj;1p-;^W$jrs|ZX! zZt5{T>VA_b0i|F~$a>!;s9j40Kc&iC;I^q_d=X1M0)p=-Gm_HM?X*@_4*yUio z25e4d1W@So(KcwA9pt){L+sjIutGOWb$e6mAq?Em7M2McO=&UK^Wx!t;_=3J{MvDT zs#;c(`q-E^UP9Vy#c&U3%o6HMVB_KwVC|Gc%0rkRs5Ve8qI>CRhNg}WJfxS}cA=P4 zT6~JjAS4+<)^8aar2sz62J>!fZ}6exZOu%?r{DMfPgo=aB9H*$jdht%6u-BT=IgVU zlgHO8g{*M#aI&3KR_|wESLNg+oD9#hje=mNR?Rvw=HJ!Fd?wOlfv0cUG)h2~qR+TJEK61Z3_DpH1J%LjoxAFxk8X~d*&vvu>jBWKod!9&Q`}qb24I&U z=J(7?@1hPxk-2SU+H|>|!WQBC?|_ebG=(O`6U6~<$U?C;WVB8HCyJ`zbi<~`&~<$^ z-iLKIHl)rmI=IhYbY|)!IG*V-FzHQiTPQP40$O28EHqZL-A6N@C;y*=mN31Zv}Bj% z{SnAnT(62yju8}#B%YA?37w)S4CwP7A5E-a_2dj|7KbB!(7*7m#tlRhN|pAf-3+Fl z#oK5fZ`|V24oYwSW`XuKUs|&2J9yO~dA(*Nkar z+OxuFJTm?X0=E%Q?pf0Chc%KZPx6u7v_*@^DIgDndfVQhmu0d#5ipP2?Ng>EodW9& zA>*E7ZyZuv;Tr6Hd3D-f#8ygFttzf%**xDJ7L6x;FeIISP&Qn_+?2xXOSJU6Pp-p)~hHa8AXpvVveyRO>VLM7_Pa zIVhGnQtH&nJXv>PDb;(-wLhc9u%d7*=ucG29@`4Fy6-}5x!nG$Ie^Jy`#79LI!1l?X-Jl&=1x+su+7q z%$#V5**v>3eJ&K+Zy)5w1{*(Kqxpnrki3uGAlzRNKSh@a4@#$FEW_@PR{_tOe^w(( z22;>JiFWx%DHOyrMN5oiETv8*ZU>)IM7mfs7z=T5tHVcA83d3vYVvhpYddD@6B3D1 zD*fe`%+q_JJ4#QTfR$||Z?GvIP~`5!+un_G5L@eC2*uHz=?q`dubdS+%nV#*n-cYo zB|2CY6~%-Wt`-*|Gq##wkAKxeU|y1)Q@L|v#&K|Q-%%w+4*)i% zdzla2hcu$opr&T#%yI|5xhLC0zCgEu2rElooCn_p$}xc5$k$pJ}?FqJ(%I-EYPIB?j~?2~j@4lHur6`+`*!ix@^^ z*N5{sMpHa2Iqj-}3ImS$CC?@s!?B}<5B!OVtgI;jEbXf?jnvN?OQv|JEtbqx0e0q# z-jFYmW5obyV5V{|AwCw1Glp#B%V5N;p6f%^-RrB=pW9j`HsANT0grP*s}zM!ySMm? z-a9r|d+^0qPKXXZ);h{4^h6Gi-z)j4Z8qeuRi-r^b}Ct>5o!dq-}YKex5>!GOjMgO zYR#1fe}Ie%VtE0^i>;p7tKYymDtl^LkxnW@dCw*=)*PY>&N&Q|*XK6j?TP<73=p7T*bG05C9Q zW0Mg8000000Rf+CbV47USptluj`rm{qAe4)#!4W#0kN+et0l-^1DEMf{d#|ykG?`? zd8f6_wvwlWn>$;c^{ad`?=pv;9iNgNhzBJF|Gs-_z))Ok-b(BJah<^#D2K`I!K{dp zZ@<<83h_(5JkzSmRSxQ9_AAjS;IIx^uP8WEq!u~gaf=Elr+xNUO@f-ijuBpe_|T*~ z^JgZHIoWdU29(`O$nG4*Kff&JIpb(Oc8?@K{3UJx+By~nCs=c+8 zrS3JRU@x)cRuR<^MDwY#cMXAZk%xkw9+SXAe&4?zhRff*uggF9wq{MN_vb1BUbeb0 zq%H?wE0MAozISrLtXu*C13y1T7yJ|o@T>;OGP<=@ka3HoJ0{Is1>X5JGYe%~*m()T z>zxPKQO6ncH2t4y<02D+$o}n|C5ko%27L1-C~ypqovkh#9UpfV+)eO7>KhJWU_WnA zGwPP#_*PRlfDc;}03)skXtQM@hve2TR4_CpR5dok6wNW@x6?RgJt6$`bp9&L9$;o# zPwZ{z4p><}-t_`sp8A*l!x`ruCiI=HiXD*U#!2)P{@)hGsa0}RrHYME1d!cvxxKQQ z!K_qyneV^axLkzK7+MTlhX@$L;yZV)L1Y##dUq}(=_5B|ucf?`$yjp@Bryr-tqvqf z4PHM`!&L#F&XZ4C%7F7+{v1poc6hO6k1Qx7??1{~Pkdm}u2Hi^w-%9(>sL!s>Lz;s zC3;F~sj+eBCuKVIJa^VNKRDYv$3s6@%F&DqsGjZR2)EYwFw24Y4L#*TxhemUCJDV* zz9yHY))AxZ03&rt$jjYc_;^hJ6uR?L5qO<^s;UH4C zGHo$aWpzWNqcMJ{x6s7>WF5H%p+nz5VKwBCBjY)=#YiQm(C%KC(N2DP0bf%z3{J=5;vJ_KSkN zt;IJu-2W_sW*^-eYt0v13)nCiuze^5vzjbju(zD?6bn<6nbMc@?BqPhyFFcOM2s`b z2mT;2rB?QLKQ1DHwNaQ_1rqjS_>)!X)}+7xDEN$iQr99eKn2Q)BoSYkZy)7{3dz9O zMGjU@N$2tHXzhguUQjfyanYhlE2rgAx6jX?Q#^~A!C^_9kn+MRbRdtNJ!3f<@I@^) zX6iS2%k%e@*#?#()cJ=5p$7?^sc$f88h`ATg|yD=_}W4%)Ie#M^~)er6u@{LOj+lv zEGeEw#wqN$N9DxjNj`}kK!HpjHpVz4Vu#P-G7+s$%TZ7GckI7r6I8gVuMG^9k-FIM zx&T@;9@hq!s12?SjD%&sb>sB9%Ge&@002Sxv(cIkIS>Y<1?Sp>SD}A$AO9%;S1E~E z$JCPG%{(r5Ei(+rl?NaIHK5n^R#pq?egcKcDAF4+_3AKa+34>ksc#~hxMlPfV=htS znkx!EyRJctzD1-vQ-4iXAE<@t9sE4wN8CtT1Z8W4v(Liw_+poCBwsv>iAc+)|J;FY zb>ygGV8w*F^;i(Cy0tz*Ip|g-FH1GPgvfui%?5HvmS~Y|D8}kgm4tklr?T!ld}eR_ z0s(L?ztGAmdVDy+w?;Aa7*9eXke?#WeRJnz&L3rK><*GM6pX6bZEvvz1sHP`ZGfY$%g719kG|w|z;7f-$rtZoZdq$Hm6@ce@Xtpi zo$jX{MPTpjcQ}Ff?r!*c+9fT4-o71{#?ri2zI*~t{Ke$2V6T>O6G{o3>N4}Q+m@`S@ocDZP;t*`mwBA{QWquIr2 zO1`_BSEmXUC=(apy@pe@eDlGEwRp@e zemXr3(KZp>3qc|?WOJ)S1x>Tk4_-BWTasQsolWut83fQ7(#)yLy>Kpff_@7ye2YA| z5^se;nBH_8Vh=simkh8h=G6`W7;5%qE?xo1xRCFnnE% zPNI~x&2lS6VLW0R0vx^lA%psgC_%@Q8yUZ!X}K2F*_Il@siYJ6U{BeH$6y#nt=w^^ z*7lH*~%Y>6s;j2v($bxX8)atylllPJ5SpnN?4GNarca5daTdUz0 za9rEamkXpM0c)@pNI9BqM7ZPql!EH8z0)Sk?i6s}-J!Ej*9zwR?>J?JnguRPhl;#d zi_4Yl6!f5veT1o3IIH7#0JlpO4S`Vp;HbOfqqhm(8y->(y~YXVVQFGMI*dUEGBq(P zNV_^Fg~UmPRIG=c%~}>jSpu6GbePj=rADre5tFD475sUsDY`zdLD<-UF#Il=ehtOr zxi{VhVVICPX-3UGrlcE-qOzciGk7uD8qDD7K0-IRR^kk@uZSF>PbDiV)WBgfGCho` zOcRe!J+`GdcdkzcexhX0ms74u;s7J-rGqC>?$=e4|3a_< z!WM5t7i@U?Jd3+{2bt z&pxw8!CRU=7N5RG43vq;3=O4W?8K(wKhy46xlXsxS@xk zl)jq|)^A~@;WP4d&E`(-FZdRBP%6^Q=k>Pd4q~PED3km%#bRWA0#`=uC$oMN6W;y3 z@;jvBT3XyLc&=enF!=QVKr!&U-#jrG?;HHCz)6u}tDX>sq$pP&c&BqD>$**;P5G!O z#0;i`??fE85{=eYeblkJ-)pEBb^WZ2q@Px%bz__*G^3kq6HJ0{III$4TYQ{_+?rgN zFUAW@R_}x8Mw10#Tl2W0Sz1UNMS(6cg2Kg}sJrTA5gxp6-4<%&qm&tc>lqs`ZJNjw z;WEUm7G(S?DRppx+F4-6<_#^xMQD|>l(K#7zdy{Z!BbkpJ|;($Nyp@o z)&%95qC8fi_M0OB9S1xKzYgj@G6z5J(;9PcVGnot26$&At=q(KhtBFnOs1}qg!HBl zqYB|ZK?Qw2BRNE8jK2y2uUL^Av{4{9r}$I%Il~2wIZf+8f%<^a%JNJwEic7KKa{VaM^2k(GS&WR(aG*QSsW;*j_8wa76%_Zq(Ftuch@>(7si<2G68Jz_ zee_kZ|CykF;*}>G?cFGQW;<*JSmJdt*OEP7L*P`?B#Yc(hMMiG#!g zhu{pSG~whm(x*kzaUt8}Y5BnkMHwrO{?tl2_>NoK!b`ZUaGo{yMaL@cr4FE2(FU+g zpt`5pv&sJNx3J3cb(riSV24GOCZH?}yB_>7u4qeUm{(eO!j1o^mru2%SHMR6_P};Scb9cL zKTd6=ajPBlCw{*W3fLkqRgw(+ag;XAK`S89jzyQCXut8mJx0(qw!HX-d8dOqqs6u} zEmUccn^FzjE=y^E(pc3Gm1LFj-M z1`R26Fwui%mn3m-TX>?2tF;EIiXD}5tiuv*2{{CMFW)5arN~u3KZFd86o4lrI6ALK zlH@fJjHE=mzyr0^*t+~JQ;0xpHsNOyPci@2DiOF6;XPbUwjkj{#T{|Vd@a8sRS+w# za1ux(X2cN%50rNsH+lKAyE`f>Vh`Ec31D-AIiswn^YA#gyetO( zoT%u@5m6gol1`xM2ZG5jNFXJ1%)fZ%f_83WYDdXQJ>+tuToRY{yBo=T}ZG=KsnhmZ@qhkw1o0oS&)8Wv${8lV(dBnohP!QKB ztzh-|fi(t`R`P%aX)H2Q$hZ*V{j{-#Lba9er$rD(j>bL3g|nTW@kxC$=>}guy5Dy% z)P>0V+hmUTJ{=TjBqLG;n(m)Ew5hWC9o%b*|1j*$W}$D70_I2?20YFvQY3ms(ge&C zH$N3%_H}Uh&co5ONG@cqAArh^xiEL2{W|*-!K*0105C9QW4sOk000000Rf+EL?Qpg z?bK8ts|kAA@>i0r`=kqy?JnPRPz_r8Q$L&#n#f{J3cM{O>n2^~UWuF>A`>=IWCg@` zSFvY{YHWvaTlg_RiSX=U*Ka4msEblZw-5mn^j?;3Oov{ST?~Wlp5otnGgX_|5{S5O zZqa4tbX|Wrpg>hAzuwTdb!})*a|FUC>F`_X#~e5D5)kl@O5YQ+y*-V0)EsI2?Lbk2 zxMe{oNV{~*sB|^VY?d3*sW6U^f&Et8Xf*y0)zd%JMK62Ak&Z~m+_cj+)R+dEF*kc3L`z$uKG)i&o5_BUYY3u9PW1ZhdD7$@=0@!J%;o1S;? zvYCBk*~-v$tQ=vka_*I6f6*<>Icn)%1 zI+xH9N4!>`=9qcF7}JZY0LnEgOoOPeb7Y4$P%s_HH^8Wu&e7?$TTnTswT#0&uV~sI zD86!oUa?Jy{5b;8w;mJg)i4!nd`uX71tZSx{MQnVklC0UEyqeJ*1uuL?4)fxlPR`$ zdK56Wq^|OlmvhsImgCOTb(BoKg5KrwDzq;#!>AEt|2zW$2Bs^jrsngDAF0`={Leb$(C@tE%N(Qm$Pj_}IA z)fw0kU$z5S)8d<{;PRFIQgeNHD|WQ6t#I^sqVw+b~UrTBN+Htl(AKtA{;#9?z;f&B>uF9LfEGb&tc;Gz$r!-$%^*il?yo?Y6mJre|Z zCwV6u%Qb8dvfCs-ACpDDxjP<(2H;foX8JtQ>2j{jt&B= zgWm4AkHN5ny(@L6Y`nqMT*Uq+vkZ=8^jnS^GvGRzsYVt?EDZP{?xDLDj}4osv%P`& zkKBogA#L#Y_A`6n#=W~AX4v0C5d4=FJzVM$zEhVWXjy+=8uo+NO(J7 zW^4bIm%KmuJRKFx!Mk1|68$U;5N%Sl{w@v_eA z_{}hIDlDCS^-Zhoz{O!P7jQpGn0`XnW3_x*;N_Zribe9JIYdE=# zsY)669UvQvv}<+H+e(9|OelNSJ>7ztXDCM68g*Si)L(&69>N)deyCR~A6h=O;}~}a zxHHGSaq`?HAYSQ04Jr1u%VNhW-JtBVZyz;4vUv=w!j_;`KwQzxR3lK&om=e+`PtUa zzLjp>(YhEYv~d9kB9Qja5VtXM9?Pv%yC^Y=P3e~sKUlI0XXe$9y*h-NrL0DYZdUYV z+_1bm7E(OfOD9~JG+GhDUQJ4inXF*H~Ws(8S4-(DOn~- zKhZ~oZq0-hJ7OiNn80Nigr~8tt7v;z2$vpTt<83j__vWUx=C7$Vsn!Vx$zf8j^kTD zm)OJgi>Dn$zm*S33n{J~3kFwL^<|rdxpIBayC}wty z1%3wsm^6y|jnt1MWRe9nU?fmE-Gd|Vjp5)8g~GM`a%6uJ5^dZbEC`d8{QRMs9c5Fy zT96E*QB#UQVrFJjd0&uulMRMVZ*@VQp|Qw};g&Y8p~785hn6|RmBP%w|9>wz+eXH# z{xTI|AA|<0oC<7QpY^?eYZc!bb^>q;Lq3>H^-|dAD^LYpJT4MUGccM2*eV#UOaWyQrR%<4aZEL>GPW+>7f?Dz&fG1v<*&<*&l zM-zs;I`p!@9Am@R9(UD1_Ev16^*VK>{943~B@j_ZhiTt$^3p5(R}mRF=(n(-Q*)TW znQO`3ob){*lk{8UIWG=02a>U4umjpGj*T<)ZylwBC0P4;FWf7WRZ^Q4W8J=c&M78jgR|^HFx+hSj^p}V^j$~ge7p;O`AiJQur}NC6Dnb^8`xB@+ z6+io~FeAg)sg-?lSTn$ zWUZary#MtmlhG5-0&k&$hH}IOO9XYTJCLC(PZ# zB^Ze_()1puF5G$JVimt)a&55gI1Q ziej?R|BXtn>Bxsn+=~__2^Cfw9?2UG~R zr22oe$tsMU-uk(>AxX0(AuupxV?s;-000000YRH^G^D{D|2{dgQZ?H!*qAzEdjCIz z5^360S)kmV)?JB@;gH^-Eg+(V%vspwh5xJqxb`ev@&9lXI@bXdnD0^J5^ zDUxGHjfe6V%b(ji5Y?6A&Um|XU}Gy zdKs74J1bhmkpv7c3EyI9isue%*(WD$o|rUz&<0iDzWx}r0@2X-LHDMf%-P)A^Dvm# z4PVQfWZ3XXk@Q(B0cFA}tqS6%?p&QboR}R)K$Vyq;SS7{NbREqxXBSyBS_>pHLjp$ zsk1_TY}PIlatp4q@KCb@0yxer`T5;g2DG+B^HVG zyDwuPVQ<5tSXJbf`oHQ)J`(r1Rv;$~)ukY?C1t!pK|gGYhG`L<_@41c>^WT@xYvxE zb|bRsZOkP5oKaE!B|8jI;*5Zf_q=WorW7U0-K$Q74*{Cr`)&}4#Y*EedAEl<*pTs; z<>W{#TR@h3;0bHW+{x(vmhGwtHzg*VMkuVm{cUH7vpCrgGuk7%zb50wTPxCi zS>&cAeyTx|g9g-aLPso5RUFRgVX;ins4~=lGyo}@I{@R>Y;S*_gG3|e8p3DBiKL-7 z%TO+T2Y+9F(*tuN@9{i$PI}_zqIaeq;@;d!P3%DrC21D;w%^PXs5y_9-S8 zE$f9D_jQ0PePQxnnt`SGp71RH-VD7_1v3o|(}CjSZ5DUvg5k6`Q-9YQjzqi{N(NOQ zqY5e1QN(?H=&_)x$SjZuD~5Nmu>Nt{BY#8_*H<=DW%s?8*qj1C=%QT1$Ug2vAAdu{PA_AN6GXY zJiqRP;Hs`%Mi?vz4Q+$IhBayn z>lf5=ssGp$(RAcq1a_+xDvkZ>6b#HZ*A@XCAET@YGjG}*nej{8Tz<$ypg8~>QqW-} zJqev)7XDep^zF0+aosxUKT=;Z&mVpBKuuuW6Im0v{_FSl& zO~O=erKo6X_?>Xh{{~<><|2Nl8!F%#?`sX<8!=}+0HJ9FLh*Ynf3F^MahTnUp0#0Z zd9E)SYC8TLfSX={W*u)Ni}MKct3a04VJ<~~63ptsJfo_)aStHN~5vL#>ni zS~n-$y=MtLg~3ilCx-EP@tZHY8ad&5-WV9IdlNKn6L!07#3=?%v0GS9ezo~zo;uAf zkb=Q})Llha<%b@Ry~(7Z7amT>)X?p3`*V41avcYY7??x4&(lsK5j9-KrL_K6TpB9u^Di`b~*2;QUOewucnx-|%;HT|4Y~fy`Ttp1@oLzz4_(r@Wee7DU`U$XY2ZvrWfv=$`^rAE z5-YWbJ$MTJ>a!Ss3XrQ@xP0`6Y;!avwFB`TNJU>_4GLRppiI&2iHRYoy_IaI>H6;K z^ze#ChNH35TVFi9naTuJ0JBDyavgvU!|OZAC>x3@nTD|IIGD z+45`UOfq~K^IjviLO70l;vK0CL^CeJ;K1vT=mUZkQ1i{e@JYwzvZ`Y2tOTjz4g!`x zqw~>}Fw$x?3!UMr0w@ZMD;wUF(zGIxJ?9N_b9G#3aDBA2P5NX8&7q#52yH^?O0)VI znHNn;F$pT_PI5U0o&Iv)>q-#XbQ~eDVBb|B){LY_=U`M`JctGxwQJYT+xXZ;X?ef# zlgTr%DX*%nei^+t9RoSWBM{V-_;NmS()1Pm)25XHzX7K@*VBF0JW^!khb?Sf;xtLR zdYT0gzwGOaA4D|s*sIb5Fi?U+Bz3nRxY%hiW3%B-yl8m15nRx?WQR0J zgu1O~ZqyVT#60#jwDeK~R*M?_nU48cF8{@?*%=+@$Z5iL}4-)oRB z(sr$nBjGh-6gswE?*Y_=di!x}!?SYe#GO@ggZ=HfVOi|TB&Ydr{SzAJ=jU}0Xb3{c zSj9xc-@12~47hOlg?|;YfH;=uB!@Ie?tNhDs$e5bJntcn#Jo?IZ?S(SJ$842jO+03 z1hS2eMQJB=o1dc07khc@l?G z8)K#ZjUn@N8nnLRB~aq+6vRS!^*4w`kiPIj-?q_re%EIocCQ68aFPM|uKGV%UsROu zFGVx0VRX+vM!lbnNXDWVU5exR#ycX0s++9?!FHmAcAKH#_WK=rUM32H*r8bisE z#v_=9sf`Qt{_Eo*(WEV=ol5n**^V}L3hsBcZwRSz%!CPD8;*)&;rpl+AkgTnED6Ma z3WBRAl;58EMiX~elj&_(x1yIEyW~)Qqag&PwiA7_Ga-M(#lahQ-K-egD7Zu+V+%H6 zMo-3_I_LiF9krxZG?{mOAgeG}ZR;O=yw`{jo*yESV+(BYN_`cutJWEHwitdIOF@_%b`5ts@yd-C@Oamq?qQ%Qvd>V&MFV+Ud__2QE~{7 zcmQ@(CWwZ^sxlmmt;z975MWl)%4lVf*Ha<}fPh64je3JN=MiUaKf-1Lv6Q`iUp6^2-q#L7 z{#-;FO6)pyX}eBRWR8iTcX5v=a#){lM38`xK663O=8{J2I+UdnM(5C%SavnZg?3K0 z`FQHPqnS50{HnIMqqUto%~ZAECT^1IxEo!nZzpgN2)7Io8AV3E_+!6!QO$N8ePQ7m z7w~dC3>6dN`N;o$XL+iS65m)GKpG zf_7P;6(&y+A%Os52S#Z5X1op_C34eVeAc+l?-N!vWO5Nd{au2H*a|ap@Gh$@D3LdJ zaiAtl*0|{kH5z6ABE;5L>r`_H))eKsfSwPW^MBf&WQ3glG54|l%KdqzxVr_4f_|L% zqdg?<9(0x6kM5~6ARQm0+Q^hwn_ScCno1fy%1O%a>l0w!w@Q&NsW-~s1ffdcRG!>} zaQ9~Lu{5G7$BZULQ#R$ydC#x*Zu@vK1ok%9T21A(ZP6Jw29q4J=V6XPMpng=#GWhx zoH>kn`9)kZ)NpwP2s=GMQfd3+5W>w#vKbNjilGd7<$psvQ=UQ3?N14VIi*mM4M(YH zBH2G2ic=kThfAGY_u1v}K!yS5(#LIAdc5}e&RkI!g5ApT9WZCX58?`w&UqNjl*vM$ zzt+1$u*9p+edR!)z~S7R)v3xKl$7##!BCDEF%TF68Ar|ltD6$a(BlFqlW8vM&#ZDK` zCEj>#g0BYFd1YmDApp72!E9h<3&0WQ@EGEK!8{Zr8qS}&jD%b18e=0_WDZ+Wd4JA5 zgq{T115QS5eI|1*az;HpIygV5SZtNvf+Xx_(Zpsj!dKZx3HoY&d?C9R$k)8tePbeg z3S@w~chwJ=yFdt-`awv^;c$Xf(R^e>`!WeDMg*yMUNXmd^t)NGQiNrzvxf*;wD*4QW0x-MW17m=n@GVZ zfjEsY`{rIRPRXsI2imog4Q`PS`!JZR_og4PNM6XePm7i+f649{E;~{0lPkZQaL&W! ztD6_C&V+Qc<1o9SL?gx=X+~DX=fS9mb6t`crc~dF;I?JlfisY)TR+JRO>>67T_9ep zA8@CFlKzwkDC+=tw2t0Mm~!iU$iT~bDdydZg zZE7*}NnvIe7o-d-xAdPkD_5!te2)0PaW4N#Eh?`N6mR9z&+*|)qk>?XpLC)Y$ZnFd z2d0=>j(xDQWh{H20AiQa;<|t`2gonak%Ip@U)Krpn#ZaZG?p>h?;oKRwlf8L$pa?YnD0$+k4q6K-A#3M|o2 zEQziOr>{Mn_CNJ1=hKK8G4Z&4si9=IbC(G?B9!te1oiq9 zy7C`b28}dE)jX=D&B|gjMdE>+1vW*dNoy~8d%ctf>|2yF)6qDRwQn>SO{9&JByzbVBMzWp_2B` z)tL_0Dwb=@_Q&8Uc~gd)9-jQC9N}pe+cXvM==dnX?rN|fPC{phkUq#=S-aU>>FKAd zo^#h;MY<8sTITzrR&9%m5Bjh|b{74?`@ zzHw~1-v1-q`nEXKneY3E8Y~Ks`f(5e#`OK}OZk6K$>0|mB0I}~6R`)IGTdBtOK&PM z-^mJ{yWT9ZmVJ1$!mM8m%aQ8*n-PX~M;0_)m5KgIerhWsC&$-IEL~4Fd7^HO?z{jt zia?h^d6_EpC}+jQ-OmJvzaa+W-u=1GhNA8hsREL)a4jDp)NS$!FR#!e0|@e}ukYpo zzE`S*TBAgb6x+yJRG<$4ui30Wz=5EH77fOM_n^*T2UL_U)f`Cu$YUsGm$oD|oD8J$ zI)0a-qkk-D96Jn`tM?d!X_4GKd@t1L$;0T0`9=XqZexmcN;E4hjJUsm^;5{f`q9;| zY|c8A<a6p^2=BO7^e$SOAj(kJ?@HHBXq4|?id^WnuR=hrVm(8?t~Owq z63qTuzBjsv*wYJr=S*Q-ul`LwmnIynj3*r@%B>x?IFeJ_FW9^uy=#KgqDe8yPOqf$ zD;`d(JJ?>_d0JMX*z{L z$EFA4n;@>{J-Vg(({(#UT;wVr$}4~m-K_QmDeuR@LZUU1uAN)<)4#1qCiH{*_J!?m zmFp-Uf&=wIhGREob6uM8_ci_C)~2_CltZ;QEp#DeU|Gim{|BnOtWA?053EB-xs3N9 z2;#*Y@(z5-rAyVx<-6icg)E9tyQrP*mYtrOOF@Rst{AylIkd%}(;V&g?)F#N_|vPi zyE^ANIYN@Gh#qX4d#_syqm6SXh0Zl1#}KgOd44<^EW&EP9!bhllIOs=DMVhfZn>O_UV}3q=b7r@e#A zHj=})yUmTfB);?~Nz#_@_JZ-~!|kof4pZPmT! z4@>B_;A&GF5!;mMF>12D>L2*bp(`D|{@K}|vGYYg~@+olM z27#Xs`aASaKvq-5Jpwp#rE2&wEy8XeNf8DntFCEv2xJfz91P{JMOoS${+%gvwsIwl z6YoZTrgild_8(}~q$}Oq9XY-;g8epf_ejEpb@ADu5)h0m3q+ORF{QtDTFL&-fM-1a zGQoQaySIP%COOTJFk*{I|;ub|BL(EgNUHB+tIScCpw9_87ZU|nT= z3x`-0h9|$iZOJr<;`&Od&bIwru&(^vXh2sC->m6gLJOy>v=PcBW-{r9VbOAZRtIj8 zc)z88%NVq~)7JA*xUJ-g_90C|hvP!7Y~dDjlRy2p8*N)+d38{gM6jID6OKNS?;Sog z(ISD?2;K4McuC_3V)QV+%j40&N2txa-PzT1yX|wQyK2`(MpeJ(n)$gex41Y~+i0JBVeSccYArd;^4snTHl#MBOj>3z zTjNICS0z8kJjJYtufijK>NpyI?;6Ho9eF}HuXz>)S}P|ERw(?*93yqoRkS^6`bfsr zsQLz0HjDf` zDLkI2Z`*M|%NJoRA+6_se!>Yn9GyE9d;a`(2J{IFiq~_d_?$N2z#``L1b>r?juL(V zM>alsmv?*GJfhGzkDy}7yR?BavZWzz9xlc>{^pcDS5T(sdOM>>zns7QrLdq+S_xi+ zi(=p;Zq5FCnvOzHIg#Qqi;8*Jxy~C-JQx!yFnv7C^Jb*x`#=B01Gs5TGETK40^pU5 zWlmzwm=nmnyn<-XAa>tr8|@e$)-RR#974I}ZOi1A$=!qc6V>C08CRBzpfZ;Gnl((; z0VN7#!S|cMrh8*_d>M~O74r<_wnvkaDP6<;a(%6(ul$(Rg@|UZyyg!5y*mxjq&>XN zE7Z$t`81Ta*p?QBux>c9{HFP6sS;)XJ96nd*rt8@&SNm|1YV9QbsVt~Mec4SCn0H` zE&@I^E#5o_#}!)X3x;8fUY>gUo*p{6h5PCsW@$#q@5=kZP|RqU-aT^HUgtpH^T-kdsq^F+wTiih(D0ZdXe4nqae_za!E) zWUJWALd3K7jn*_Q%Q#J>RUS1;$hEDOE3=kph_xbr@oKrHQpjBCz8c(8z-f$3{Ys0g z@vy=QLM} zt6>7PlR-Sb?{|EcaF`H(T%VshFrT+5@7Zn=yAHNr>N58YZr#VGEL!Hn-|G*Zn=2a z1%vTcR+&1!L23&VDv2)zKn=0ehrdQAIV!;+ZPVZf|KG}IH6SzF^QFs<{BLz@g~ppx zzacVng_lcZ!+m|=PQnO!$Q@f#<#v&U+1L-t$MHGs!({}EIzI|W6QhTgY(mW(bEe7&1r1hP1l*D?<-keT~#Tig#U99cjiF?n0IAe>p6}K8gYmN|vdSgCB z=<{Yitp$HKoIcOje)6k`l*j(9IV4+)sfyDa46Q$oHok7N_OTJ4!` zUK~J#s{Yi)a^aGUa|mL!AziUvWKe6>=*;Dfl!KK<98Mji?&{Fsd9geI6t+?#s3P z-r5V{J+Pi3WXWTq$k@i)J%*94@%Lz4S6Ifp=+gm#0TnZ#`rl54*jo$nf%p?(XVOz5 zB^1aexF9#H;lPnHP`ON7?+#Qn9WZ>$r4=a?w)q0?b5Fx+-nqih`2}|=HmFo0r>6P> zc70oXYt=p7HF7;;!IS_Sj>0g}_#_xnS5j&qo(`l}jc$i?%X{amg2hc-R9tepq8Ze;nKX-^WG1>^9}RRB~4g zl`t#yxMIn?q-oKHmnOGHSGVbR;}AC6Il^h}?9^G`W>|l#*SJeA_l;s}E~fc<$ISbt z0bmoMns=+)$v>UFo6L5?0q1ynnx2Ma2L`BJeP9b>xh!5` z!fgeWsk9w(@a)o|(bO*)mki6x+-`itZGpGv?L?cDHx62VlZ%)_Absf0VS=}5&V<(A zCxWP$D&zpC5BlX=T6O61Qr0hrJAron1uQIHl!yE~#mR6&ma$@;k;azk0?<%C&Cx(d z)a2&&1VP=<@%5hSD)BL`jKXRAd9cYySy~hDoOwxx7{v;1qGP43<%3C1APVl#6*W=^m-tGYh|NXRy05b>CSXHD=+qZ$ED1z+q?n9w1>oOjtFzi@c z>gC)gJu5n#w;1hA~7l+!u!J#fb8$Y3-`V7?vxNo?4L*_5KbtqB&e%15?BvnoGr$%09U%RAhC|r|+ z_}Iqw3`*6Re1rj>Yu+l&Um7V!wcCV5Qjef__@FZ_o3oxM|HU9TX z>*=G7&k;ok07%fw9%I;xg4%8(>|o9-b)6&3z8(_+;&`?I*^mg?K@F!I_CgB}Dc7+w zkJ0@>Mzave2ylRk4I%?q+F=@bRtX?dFSvHx)UPcKjt{vEe+UL#SfodaAOdTj( zHS|uOg0Qh+$p*`{j`S77s$qZ9gxN|*F=TjvL_o(j&aJWv?Z45{5xtN*Tl7r?9kl~? zw!_8oW0}I}GWQ=EKVs9E_+}B&i4&gLbc~iiU;r0QSY5tsggr{8o#K{a4CitTjm)Rec>_U z!+!C@y7q5Gw`Yl_k6sQ73#JWig~Z~AUM+`?vaD$XEkg++q{V{EU2cY?3&#t^tk7_m zLp<-+IT(2y2`!_AXps^=fR2s3WQ9D8ZyZ-mLzJoZu%V`MobUFaQ)WrJ_mmQ*WkZt- zjFjQLBm4K>-hG42zPV6KvjJN{{tXjL&`zQnDX=F7JHjxU<2vJI^k+CAwW=9hO|s$She>5VAE+=WUn z>ChnMk234Zq6qH<+6waY)y5{$L|~(U>^s@%+$!hcOsc}X!LNo%mA!%)?q%wDGpw}G^Xi6F8b(8FFPL}h zdIOHT@aTVpo)ZblCXGimfU58D$20-=%MIVUZ?|{XGp)`rW&E8KHu!bT>B-%Y@mWJe=D5Q+<)tve_vS$c5DB=LlDS;BJ^8Dx8ze@V{1{^j6$I zNcE%*7`QFP78RNmHmPWH?peg!nkMNDMZuU3JD)l0z84RM>tRPw3T(fVMUYaS@|n~z zh~_`GuWPGsdvST^aR@DW97Y50j+EaUvLs%;1{X0N12<+Lv^HH|lRwRe*Wz%r;3@{+ zNq2LK*j8klyPO#??d;XOyuxSL1Wla)%na{XJ5CIR(PQ~SvqWC>Ji#z;O#aSpLHj}i zI0zLF)UT@IeH`V5#CXHL>*#`#MA4k7s~OZ57Izq9Q++_*^k&52>e_v}1$~quIv>KH z@XOlaN6DFEKQzpYYmyQPFZ z;w8tKH8i%{z?fy!#;emldqO2BEa8HQNVtQ(x$?~OtwjV!tylNr-H(j_iMK0BYwu2n zvWPLTYdNvA`HNy~MW420e_F^J79jYw(0}OkLsfD%U%g}qWR+tyb=WU+55MCbq7T;| z+`C0CzeV2tVm-n~jvu%B9?&*v^-={Q?u?`v!sP=uVBmN^;-^O65_K7;;dinWah_15 z9=xhe9mvG<7rcf|E?JItje-(Nd)I$4!%dnM**_^pePjFUZ#u)RnfC=s1%R(Cj^G1RdR&b>8A+9JY*vOcf?u40VPC zbjfLg@ywsxf&Uc!%_mb?rln4!+)t!v4w{P*2lBIdER_Wk1tk$^vh$$4nWdNV>11F9 zTIuu}6&qy8Iw=Bc6Qb8YRRFvrxeFPHZ8v}0@3`)h9S7eS6Et5&rG=oBq)G{YMW(es zhH+E)5d1q)9!)xPaJq8&MKwbt%n!w((3Up&6mZbH&3S}6MH{AqiDC!p{x9N2hrv@1V+mTB5G=%OQT`hI(kobwlmvX0iY z=@tW7g{-#^JP=p!_6_p#GUx9r5dP*nz}v7BMC8U5JBA7)OF{ht63k~(t@x8!|B^3O zurH{xwSv}&woHmg;mRJ&-9Rs1TK;7%7{RlCsY3#95u%bX5CVq(YoC7L8<)|;;SzkX zM?SXof&U@wZOvvRWZ+9wF;;SAwK)P2I%)23DsQ{f zDp%Ly@w1Y7!mf=S={nCT+&uFbr#EX-7Xio4xEq0xSx}ZJGIer*r}69azMF}PmF?2uFzma< z9+6?iW6g!e(64o)(0_$~4F&7C4V*Si!MgjuVYzig1+3J5!L=^mq!!-tsJ@A;GZ2aQ zR%^I=zP&Y5kTYq&^QNQ)$kD*VC;|vHU0S+Lw3ZM{Oo7XCUQHcl*Nqf#iGKuDgt;koYf6F~xKxKD(K+b-Iy zJZMh8n0|4^mF>H^1HIDvrm3&q_)%a$qbK2&u8~-;_P`!eX&}c}$lC72G18e=y2C0S z#H9xT{ZS`{It>kREPHp6D147qzhf`K*r$=#b*n-XQQlM56ZPw+QxqR)pMcvst)#SS z=U&W2Rcx$TPmH(iGy5R zBLE=9TG{+)gU9`RLort%jUzY7Ap3&`WNU-!AOO7m(kz2V`K7B%lxR9=7NqYkf`$mb1t(8KFu~!G)Rl&=?H+eCos(lSq4&p`%`=#5F#h7qK%< zlHfJL7C`S|slE81>}xNpjWnGs+Y;T?0;O{5N?~Wool#suZtC>ikjoX3;sK#^N3n($ z`A9KrDuhwgW|s8*7O09}3&6{(`em^f@6n+~eJ1OjRUEjlF*`W&jyo27eYaKF`!3WK z+(HC-{;7fywsZm(&tT)&Hg!}W06cQzD7&Loe8x}R-}@EjLnk;-kj6=&3xEpAgGuX7 zgP$p~Q6-JkJJB_<>|~<>!vBv|66X3L1!7R2HTH!`#D1MZR?-fTPt(>lDU(ZiVkBkF;;tlvPe7_BvT( znilkFAPncfSgH%Gav5KxJoKq7gi^gLxOhF1OfAEh0|cpu$HrbJMi!FRfVWIaNs`8< zx;E^f=w7Me9SfK0QF+aR#}Vu!*uW~qq`{=o*T0E%xLKpCH#kqtO&6WYA(Re#9leW6 z1r~j12OWp7Us{~TW$3H1ly+Xg(er8Kb2xDGenCZMz{1WV8p7As9XeLp z8Xb?*P}11H8IHRMChgTWERvKxgTbvIjc01GxDCn!$Ukk3JZ+;qfsoncp09h9nigTV zmXo(O8XQ-RJJOqqfC*(}0w%AcA)`OH-{nq#+JTyh1SUF&gNTdJk_8Se-G!xV5gl{8 z2}%i`L@(sJi#3t5`D6<1?<9YL{>O36USO!1VrMr6b-JC={tKvys2qJK0|7H0n5D60t|tJOYqbK$_H#pX+hJ#Rw)I`{5uO=ooaUZYxcSqbbjQ1;T57-dIASR9OSmr%l02K-~&*?MvbdFi7 zl)nq=z0^11FS?c(o45t>m^C`YlmD?KIZ!1;1(b$#-7F{!H<~LZ3?g6Ubmp}P{Jcr_RZO2NcyV(uZBH#KY2DL-KbcLctyjhS&BcF4!slMz9Dq39F$%D>8 ztnGl+k<-N|i9KExXXI=xCJFcIy=CVKQ)o*=qg07DS1kFnZB@}HAuyTy5jw7MnZj`A zZ#<+e>w82%#Er|hHBqBOBV3ckD~`Q1s#R%nmMOC3d{lmQU4J$Z#JC+Pi(-<#S}=^T zK~APB3urB6u$$sra2;zuAh@W+t(J3KM_wZVJtrp7#qb(Zb$Xy@)*=Cb`|2*c54z6Y zK|yn89z6U;O)F>^gw->B&;p!3O+p)wZ^!QfpDat5yAAKGLzM(t8dWV$mGOnKeOSQju9&55LuPh&f-X#SC;1}mQjGiimT6CYc7q&Ip!usLrl(+9}7T(f7M3rawa>Pt3 zKwS%d_90E0!>b{TkZarois!Nw#=@qy(9UXa}}F zTOwQes5rGWRhAdgTWY0$VfkcU{H*K>0gis0cKWnvfyUQdbOe-GW8gq)~^ z|DTVG4efo2E7&}aA0IKnUxV0{42ReWD82>O3B*^AOBQ_Ts1A?IgYYRJ9_vQ>6f4H*{9fw(;6tGS;hhCSDV2k`Z)svuHVm=?|X`Kgyhwsd5m+60TgD; zV~Zmq0EYP$NYj{qpn!!B8ANvnTMOB<_+`2PfBn~ma_C-C(<+*Qy@a+oE$4xq|B04) z!9>+wUDU@RL9XWjHm}OlhIdRck*vjPV+^y7`qMU~#fD)w*(Ts3>V=UNE|E71rInkMZyXVKmX}$x#KyB)Z@^CaKteMKsCGj#uoc?)W)@ywb+@8^ zzK+7gbUkJuEE0+JYEVQ#v(H53M2M2}cvXxhEf23b*wuBjSacS&|JLzynoS*?u%N&J z;G%loSA!Wu+Rh#t{f-kBdj>&D44d zQQzp9eQawrcrX9io9Eh111+Zd1I^c;p=s8%wctey&Mt2GpX{!nR*;eHJO%7JHQ|mh zLa-`))mD0JrT8VX>COm@9kQ~~>ye~dzXX(VNwo6Y7bmy_@&PQ9It(b{7qEOWfF$1) zRscoO7&d~SJ(y`@=Q9%^_?oF*N4xUD4Nw&shZf?<-(6uMK3Us5WBHZ<$*-6+F~BJ_ zljT*qIU5DCfJ=4|udNdyR}3zNlMD<6^G5p|2Oz~Js-Nb>jUG`_Mu`0n6O}nsZ!T$@ zT~T@_ENYZ#a}^8(Fn6~EtSgYiU;7w z)T`ZKOBkINw&kvJGFKE&&fHRKy3yb!kw5sdF0_Y|G|v@`l`ZgBrH&=N8eg{Vjy3mh zwDfkUtJg`mI;57BoP(+=^?OII((r!D{EwB-ZDcaMNMau&jw0ClAiXB*U~Mt*5y-V2 zhl#jl7>9Ixc}kLs@9#M>X^yAupD7sv9NFytWPFyzgLC!)O(N6Bxr9mV{qSwN%u9R< zn#DCq;EKpq^e^*6Om3*=qfvaEj$dJhdW(gTK1!U-qetMZ0quiLZlX7=cj zfTn3X5=nO4&6sJw{woWN+!kB!&@498jXvJbKIWkvh)q1|9EBum@@5kfxL+Vr+wvj) z(f=vIKN`Fz3RAI+b&^HkH(5H|W`Ez_6Y!O$w~tDJvboyQ9OwqoX|$b%R9d$2XTI7i z$mevSj`6(*vdVG9iN@jodhWVe!Tc=g*cebmszZ_mMO&&x0)O&_F71j^DYUs$CDm*S zJ`}Vty%g~`vx6d)7&E~rW;yf84ka|y3oBAQO@?xKdDZtq0$%Ihy&JojDXn|s8z_TA z@c7djuI0pr9I6B92N`(}doK>G`^bhKs8<14mm33nKc8k^Or`6D>UG$-vxl!?sf6B&375gh zwNTz9=z=$(w`(bsV!1F=gIjJwfV*!qM%{g*@91TC+EP6YyLNgZ&0op&tO*k}5~tEy z{|-~T?#Obl)zL>|1+5|w^h-7F%UKPkh4UeasG$(4l0$Sl8rmw31uI(40Evx=VZ?za zCN1AOQ#*(O{&A;v#TX0}YUc3rZMh%x|Dh85Yr`BO(or0_^wBK0+@XnyX$=M843x;# z*Eb;otLx!1WzD5u<&+x-3j1rS^RqQQK$lVJ>y5BaD{=br~jE zSQFb|0;@W#*LB01yg72+eSFJ`HSJXl#PTHY{}nS@c+-Hmy&7 z;q;}XLHYk`m*llJStZmD2f7zKA{^V-8h9gfTjonXn0GvP8A@~ZCkbco{}qpKSlX!Z z?zYD+c_&^W1TaksM)^^86DZs-Af6rwHriynyC|G{GY1I=ic>Rk+YJ2mJO||mt+hx^ zN1xk}pU!|q-pJ6T&^n7jEB47Um}l8r4h|^v`OZKn`|x=MRXx%DeK{O4l3HKDds$Iv zG?<_*GMYh#rf^-si>Fm597hV;!|VN1xt6etQQ*y|sxC8Kw_VO@ zWd~A-wHCa*w;dX17AVYE(bOE@h0z(59+dW7L>u{hpvh{ zS4p5H!f8Y_NsVLn{b=Db{~`1h#S1nmLSrTpj@vBlNuAqk0m`O{e~k%zo!RoSeGor! zQeRnM+yod@3s->PCnrk>*S;uk-TGF;$I=0V@&};(1uktg&1|2%X5jd?^-Xx?UFIdC za+N;oYWy3_1>#a@9BzSevH`T;mcmP=Q^CaK)RVO>{IXts)3yL^!Ve^NE*S!b@gFq6 zLL+01o9r%_;_l+eqSqK6DMwd#b#Kr@_jTjya@|8R(^Dw8ruO#}EYriMY*(e`KIy{Y zIDySCVzvnExR^8GXT0HZMn+MVQUk`#SAD2^An+!}>hv)eS-59MkP2n__K`SX1!THU zUu;rz_(q|j0y?=tln?hUnANO0>Li|=u_z15)9~(c`NHeHMiFRI#@(El z-ZHypyr=lX`Hg^46A2-M(Q@VxZU2ccuOET6_$!pYMZU3GQ>S%x{%3s$MH5lZ@;DbW3D%95SB1rsSR$DmSy%Kyj^|xbn->Vyb_l%xwE{@Q<>D z&|XwEBUutLs-1^!$;H3UDp#Qkt{UeQO&8TsVS! zfe=NoUazt~wH8_FW-6Asw9ULqc`Mjw+Ks-M{6dIv@4L2vr|jo>qMZ!hdYb7CSv=IG z69X+@v^n7j5}{*kK3yjvBY8j@!=c(G?b(7JGhs?tpYUbgO?pHsW~o%F#0U|VsETNR zhUq3&0FZP@CVD|&9iHzwZ)EH&vj^}mi?xs&R~~?W(#2G%mJnU+XVpG!A=Lc?thWWV zvn9nUF*bFl;h8K6I?=UO0 z%hPGM=fC-|?1{oQKH0%UXn;l;j8Bx;ly{DFHgwoZPJ{xdjEC=c`E7BZ*`Xtd_4k?} zo_rHwdUeb4($5Uzca6oDcss*)q)%GadSJqr4>Lx5S};c&}R z)i)QiXbs9gUv_5@ws1%&0M~ix_b(w`!MvB@?pPyRhuk4Ur%0W4q~bY{mOX3a0lmOG zV3N5XU8lJ8`6C-Wu(aG9qT^1K8C zhhqiPazq-$!=7TJ*^cO9LyEd26K;i5+V1${$VO|gHZ`TV~7*ZWh&D| zsAb?7hx$rCY@oXrSui_$xJG`;`rfA>p0YPAOf60(DFElVPs-BQ_V|MJHXM-#c_sKE zw^>T_J>=mxA2vst48zq;w|W4_Ff0obe4BXd?qqd(a83!>|B5gC7^s_Fg!(qD?rz-L zOptS@K%aT)*+wJPUOMuG1jjZ}D8iF;Rly2-&@oC7y~ zxs>5x|1YCo&mBRjZh7re6H1QIG!CrH6t=!N!~orBRzq9+;X%=vK#K_yDykEvR)p{{f$B~_4tQWtF0EwqZ z&gj9)@KO@5I-8^iT>%6~hUr~oZx<(DBuU3DY4{rQiS_zkO_najw-PPA4wwyiW=QwO zaqz&tckLbPyhIO?et+m!oc!mx)MUK_Jd!h+Z6ViFq@ebQjHg zXNZWhrfO9S(9{)g^m31SeSSX?+BIbGwOxhseS59{IZ86pi!Y2bBi!}e;G2T}0qtNl zkjf=V4UOfQjb58(#216g)*CD-a5CEe-a7`XG-G(~?fiz?W`cNU()2H(FFbCaBdIo* zK4lrdHQQ@826d(8xewfM5*v>pqaO(TIHKF3)uSTKXE%;m#THbgaQS0ISG}xZ$|+yB zNb+uKZi^g%Q80LiABFSw<{w^Oc9k%i#tV!spTD0TPU4=^JMIK{UyiDq1?vZFH=5|g z8biv84U1mjH*r!2?9nWi%L12UAulC~C&U)v`XnN9EF|{W1yJYtSbVgs2u~0aFN{6g zU1l(QAe;?=pnZkjuTjk$XLt%?hVgh%eca4SX3g~w%LrTFFe0xVVTAeJCN4%7ML`As zUKAQ7hi)Iw8XO`!nu`9B%^gwj7Tt`!)!RUw-bb#}#@Oj2Ii`?yjp0LZn0Gk0Ej98a z1O}@)DSX#?xkR2L17w&UFC$l~&)nb(?W2&~i<_0z%`;gFVoKxCdO0}jZcz^r=d$-F zS$-B@K`5i~=3bUAd}NjjsT^7dSd{;482qb?fgE&q(N%eT8>s;fCCfqXm_3OqnP#?JfY?)*ZvO(9We!XzqaEDkDplU%- zcv!=g-VMxt&F%wE!Z;8KqABi}!0}7+0_etiLbReJUG8HQD78SD_DvW1?A;43Ld@eI zxxYyFS^===u%rh#`|1=j+p0$gEwL;sIZkS;?U&de`zCX5AM)^2ss^2PIF_CUnt3x5 zUcV+}u08Ipf#ve;J1WaKG9Hje={vqc;l@226}#KQ3Dqc>rrvanD^5()&N(3e$Y&lL zXJi`Q93m2OsZ-DM*}7H6PUzWdCY{#Eggq1J5oK|-UuVMNjoaho=rGghHowDF%KHd{ z!9vc8toMl!e0sIKk4WwXOQ#n%*LBKD{w;fkG0DMdSd*5tB;9)Eu&%`t}wq2n> z8F8@tx0aYe`@{kb)Z4)eS08dlE(+=?1|85WdHfdWnf$rWG<_gmwZtEI0I)Ks`7=g~ zjjjQzUH5U;(4pc&0kuu#pFrE^`DBB$_Y2PfG-%P{OSF=JE};23gFfNs!{Kcs2mJ~j zxic-gYv?kHq4Q2l`W99QWr{1Kya{q46c2f^ii+t9UV+|@m%eDS{7fI_jagK*5@lHo z?J!As5eQ=oEA(HVbj%wipK|#4YajzN9?lrX+ZE}f9N;w-^Kz*a4|K1L%&`{aB{ZM3 z?a3TTp2JW3>x#G8heXDZ`oDHh z_je$EMetI#N#Ob;j^3{H=6374gV2TyUCwe}8bE&@`sya047b3!&g=aukYj+PE9Rp; zwz<8@y}U>X8bkW%Q5UUQydujX%h?1?Kk1{imcPL%i@%`t=w4(iKwkmMLlZTeiSW>p zcixSzvBN~i$DRK@)uY-xix<7U&jcTxe@Nma*XL92GzI zmYqO7SVBCWS@;d*xu`hIBsx6hQpg<-{|e75N}o4vGr~YHOzQ(gqgYVW;3tZ6TrqPh z%^b%%%MNt>kDeab)qlumL6TQ*C7jIF>2TA067fzZN}|&Je^4QEt4YA?keIZd6tMfR zl4IrYjZ`e;_%mG+O}CP!(s*omZ{L`uHB`X^Z2`J1NHjB+JXViqJCZGlL?V8{S|JZv z`Q2Q1wmS24HuzW|-r;g~<{306&x;cHrDW^GstP;fzI*dO0)Izq3mDviHfvT`oX}95 zd(AL^e2VP*(j3-w&;Br@WQ@i~E>Z&(M1z!3_Ns(b3_+48C#KEGf~HKM8oOB0Ys?|9 zU%!z4BrP-Uh*InUO13}67~1zBYA%)M`7^efKr^bue?WkkESc+%J*D2R7cCWrAP1Q?{Uh?)x~~ zrwrp%N_wj5;;dLf_%Lo+Sid1)&in+1DKl>cvXYV~G=IyB(WLD!;fICFaUt`NYqrjK z+`C{L8^pQi8-_OSP3Mty1Q0b7+LzLmgGst+kdny>nF46_GyNrY;-62mh*)@#JVFYQ zZ~z1}?*Q#Sh*yIKo)xy&ZLT#y3}FS>)hA^eWO!tf%u-2?l>{TjrL(nhY>l_B{97LW z^q{O=%&=dP9+d{YC939q%m1rmT(G3PSjb5${?&LXS3Y$cEcK^oYDstujO@dN6)6a2 zrY6;lNJ1h&of?Xg4pnK*OK4sN@d?mX@zGp080+F0IN;0}aPu#zK-0u$bY&rE`etR_v?zW*LTv+}@4b17#11(x|$ z8+FNuOK(q<3>xZn6Q1sgU9c58d=sB-a!UqZmQ%>_?g$55GBfX;-@l`CLu^?&^TDnX zCFzydv)syWji?R!kK4rO5iw1zkt*3;lqBJ!;uC`OP4>e6uqjb&A3Hf(06}}B&8&Kd zZ+8Yx?Gs-8JZ$XuLJ-tv;=Vu644p(<_7a-(jZcsTXU46udN68OGL+f?LFwdi(N*Dl zlxo!$g?XdAf0slm&mV}Y;#W{Q(Xn9F>u44m$R zzY@yddrSehX3K9~*`XlfPX&QlZx-*dc!o&A z2)K=cvB!qU@ZRj`w46L$G8-#vIatqH62Ab(JA7!`g~;bh2qA{kcSrrcfk(|)$O5yk zcHwInJ919=gH2zk(CAmW$n#8U8=oq87VTE9Cx&O`wO%h`b`v^7Wa%6R&&V%5{#(UR5~vzaMYYKPZQ$3=yoE71%EVdV z+FPGcKUIRDC18F8SE9XlD%tl2taj=%p_fsvh9}i9iqSi@YLD6G1wfuqA!{P*qtpqw z3bneWXSZxgS@HWS7Dxi6Yy0$2@d=k09lW>VtMfcA^UW|WNEw$J_=HA`%j&SaL?I1z zEZBJDnYen(lwK&>!q?f67!($B5iGp|QizQ}VC(S=uy>W8(%ZWBApm zZnMj!|{QG{Nh2XdQz= zubRsPZN7lNKopS!AX)a**}1gYQ#$lz`MU`1^kM)CPGHe^&o5m9P*1@?9O{N|NH_<_!~<;`AHy}V{Cg2&!#(Kx z@!5ewiGIova1qHRKU&%ALIK+Ryv_`!-{P%3O$zHvL`|XoQ&Y`7_yWI}Xn0<2!9=p_zE;ihe16j?oLa@)cU?tifiBQ`N=#^b?`mjOb-17^9LLK}lM- zttFJ3I)~6i?uXJ(9)390JWMm~fKKCUynIH8{8{Ug&SnLV3dxV==Hwc`PrOx_qy8cw)lui11Q@4S4-CZKjSm;1Mc#Zi?D&s# znE~Ci*J0UdiQNPGpP>(0iRBTFV zlnmM0j|V=D%F;~OFCkr^;o!3aseN2hE|Tir;_;+7NUnhLB9gyh4832};alMluPBuo zWDb+&Ka{j>@QasQTg|SVOrO9xPNXdJ5Eymcr(Y9<$=8oE1K(T++767~@Zlu0;X||% zoW{M0*kN5Eaka46(3S1NL%IYY2X$a61#mVE1yR|@e@b*##2JY#mKS;?g6WQUFhP=(Ng@FFlwclU~XS{g$jH!T047_;>=go9$SK9}G>WYb%sY_f z|7&fsQJ=hpOJTd`h*^R8gL7rFL#U35MSC1qJ&9iV`jLbK$>uW)s4;L$0e~~ z?snApD5mVOGhg1(sHv822i`};rqN4Oftx8?1WtgGf3ui&Bf1yq>0KcXV#fGjr8O$k z8ASdX!#^*EB=)Gfg8BsENK?f8Ex`mZ_D+7oi~Uha2sQWJvNXK7{5s`|W&jXNC5-*! z%yFSE1AXtv;lpsHxdvJQ?~gum*mV~s^}`)$b8NpRwJMe%aem+LR%RwwC>(u z6=0Y1T2iiTLAr0joAS-<69k7l4#9eAm3tN?(UpsXPk$^C1p$=GFM#V|#ibO7zGL&A z*8VIYl{t*KUyW|bR(oS+*8c&mUU#>aO#q2^5TSU1@MD^%2gF8{#SO4I4}LjBjI}|Z z4{qI`Kw{9xTyeo%g&ou~fBU8w-@~MigLiM~hs|ktz9v`T8ki(fL~kRg@RymWJn96V znJBGAL=Mw`G#S=tj2bXOu?N)mp_CoQD;#2|{>*jTwU6aD^5ml;rs$d@pXr;fav=zi zG{VZf&$lxS!(lyzbeFZejVH&kJjtu@(~BdfjjrdNFFwp+1W+pMH{UA!Eu-`!Pp(@P zsf+xKmZ^r6OPU3Uu2S3EwZ6{QYk8GE=zue3O7=n`&VGv|P|@X3^ydvDrop4xxJTiM z$TJ;%e`ttxgMO}aJk~n>0o8fjNobkuiK;#8{$npzSK=U9`GhlzSGkacPrP`9V;nnfM zGf$nCeRAh$5F43m~`kR$i4kk)vi z%bPN)tNwB#SH?KtmAJ6Nm?ef1r0EkquXMhe?WT;eomdeB zSJd1PqVD0+^EYr;INm0@EFto?hMp2W2ESKx2i;LSK^#0!Nd$)&s6Q@vOn*+x^lX#T z|MM=Qm8+z%aim`w^Awr-(~QyUVT~;=WMXcpTUQ5Qw?W~4S$5{k3L!- z?=zyV2!^H1#P}4TX0VzrKB0xr@+VgNoy7WirHP+kQs(kqd2O_SLia31KX~_t?C546 zc!N%L3+7@WGogD9i;(M7>D=O3*D-31q_(N^Fs!#Ib(1ZnU+9J?wD!+&(!D<%UTV=f z7>#Fe)LY}E07BHagge{S_$wwo7q)o>0Wne7w|-y~X)Z6-kxWMv5a1jVtXOfx{YdV< zeyF{0Vg4?X+jA6BJqAyz&R~IJb(Z~pRhgKr#IfP!M%G=4VSF`TWkghGwh>dk!hA=v z4#W4^-l4lC5@+Fn%CLx`JDoUF-fF=qbYzDfC}DJuk{_P3Q#G`nb}L zRz`;DRynb&Za0d_S0bcl*FjL{m;^|1C|Q|6&c@jU=V3Pxm)}?Qv?5NKeg^*({Dy`~ zxZqhF2`VQjH+2#P#Mf98;&_6o;0|nB^L_ZF48d)kPLnxlYycAzYiv;C83t{I8`Xcq zPO1XH^Wvjj5a#u1wdm}M$xLGXn6$_o%%Fx5nVb`EYb1X;$yN&wU9~zDXh(+}-TGp2 zFDv}uD^<=`SJ4gb@b)W3c2pfrT*g=cVnCh0Cxob)j&=w!I!4Dh^|{gaO0{nZfI2}79p=>Y6cz8X0R9crn|fL**B}8QkDbUUhaspsL9-7lQQ7 z>@9&x*h6tis5%jJMFQF;=LIqUt^!j^95}iHc-*rb%sOvJawruq$-1-2Ex3aU+V5EP z`;*L)wmsr4;j}7pY8Q@q+2H^i_WISMG9`3#IT*ekd0BtTBG}q#ks@#8#WP-kZV!ye4dM7chVm=eim(~cF99u z#&5CpIZGTWogw~5!SNo*ts{L0nD@k!+Q{013+Tr2oO$^6J|V>-Xju0!%>j^ieo9f&v*G z>bt9_De|jh6PV&LlU~tjS5cnqJ;U6-)pva=z<044%H2gN=ou+BqQN9hhhxZh$Gr|A zfv%as!p02OON-3}#ZuIv+NaU%fUZ!_1Z2z_-YNBj6Pm3gPsTt-Q^e8#5GWn>@}o5? zXHc*qfLWk4$h5g>#o5P3*ZO8+CqMW%t#}j*35ou#b7i9KnLKO*S&(*5>iP|G=Us)m zS)4?3l+84s^u3u~Ot~SPZXFT;OjWv$ftNfuJ@YqM%ZEEHW}Z4u zo-zfus`@LL?R(`pvwR_00@&94>SX?3kSpSehFjJX->K)2i*@a@7bthNAzdEZ(>-jS zdT}EIHg-rQMrfhAi(Gyxt&cC|4cS8SNc*WqzT*Cp(*cf#P8u*-RXLh`9+grY}@(1=+8{PAzl>kvM7_$?ChomPG_2 z?erGu*v@R;3LAUS9mRz@vlm8Jg+yG+dJpn*W7Ca0^+4E>ojSyPkHTbCeJ?Q)NOC#t z*;{y@TNpI%bt(~sAhn@1Ag!|O!a@OQM(6a_CaVBD%Js@39p+0mrs4@>-Jska?Xl`h z+IHr9vSTJ~4e20oB#!47DL}u4^T3k=wmw<<%E-y%iL>MhQJk6fq9k5*i+Jh6SFQM_ zW$I;7$2p>>ljHO82QA6l$2%N<9C}-YElN{jcSCa_vnv1JTL~*~8*lrHU~T|2vfcG; zmC)Pakv-;IhxsTQgaedN%nKG&MJ@V`2@~%d=2t~bAyJ#_GO~z(cOM?@f!LpT_=Fv# zz@D(TZS7#QY@vzz;eHRuP)o--+(wjlNpDB^tujQR7vsVmt$Ii>S={`RD-?Il7FO-q z5u`iPOz8@OY$AYf8#Vp$-JQJ~kc&@as5S2ktK3jRB%#acZ39lJ)i4jt44qAN^|sQM zh65Sh+l1V4^OxcIY@%%0Iv?2iqU0_{NTZm*kD%bvjU?>vLTOHUSS(#S0#TzgFAf0! zSVfVxBwxu3#ttbbZ;vNBQ0ZEWi^p0bBlekbIpk@>HWdG|;qP}3*bpFwvoTK#XD^K3 zH8(JZee#QRAtiU-Q^+setvJxVBfIU1L$DF%9vuEQKShr zaAg39YSo0|g#K~r?DP!W z&KiG*0sbE>RD!TJObJAq?g@u=r;b!9j64dLL1pn7aHn>zaQk)lbp}$e8j&Y{o{zTj=t1O!&AR?#EYX#Q24(tal3c!(ozINIYl}AM{MyMBYV3*`!CZ5PuOq^MY zm0F!ax%hKy!~UP@NV~n!hX{iy-W%n4Arza*(z5q})qOyOlOMrOGw^QFdjo&CdS}}P zdpK!)0sQ883rLl8S;Ch=S0LvORPlQ}k}02^Pl$Lrij|cHK16MLF&>0fm{R1R7gs{4 zXE>S89qUU7)W6j^aEkBQ((=;d{_-prfrItxpZV^!_uBA*2z8wy_(_~@n;YZSp`2zK z%xR7xI=-lOy2xZ3z^C=!hSK@c}7-7f?^O+uIMKsNvq#8wO5^l zON<$9`r=+UVlN|_1_u|w>x zE}0b`>oxCZ6+hzNibq>1y15hazm%gHZm=he*pKX3GENQHn%DG;TBXTYZhq&H|7e4f z1FB2LN^8ZGA??K}zvcv`h)XM9UObKUH*PFolZX3|fq>IwU18|g{WTx$u{W1>R@Z_PrRSelLhLH<)ym2V{=r2rpa8}7R}q3&*G@b z-_P^;q7%kcNYo9{>`W7$-j>}-l0?W!otmohoosD%gx+!d<3*MC47dG{St!zc*Dn5` zfoGg9)TBy~bs-^4^9+oTsg*{sL9d{l|Hir9R4LD9GijIXV^e){WhHF3sHO zK1atAZ03?cF7-_-FaVBn7{$d!>FqwZ%U@Japogx3xxb5-_)S89lCYBg7OBvvcVF!% z8Dk>WQ{D%LVt^p&Njq5P_o4L}F!fGYemL7VX7f(EMj$c)Sf_Auo2h>FH)TSM;RCt( z>IC*>+4y);FYwUgbc|420@oc0CG^CDgG~xh-%j*n)nJfpVF4_!d1TuqusTx^lxOam zEp8~~iYZnf=NUG&WY=%b*rmw$X;=qgx1tX|u$XBiyZr{VthMgA`@}o$NhcwpF*U?- zTS9=9v++p_0`^&~4M0!QrCvf^;0oiH)flwzH7RkX0bq@o5Wqk1y zdb&zzPw&2U{uEbn;ud26M)XR$9r&*jMnH^z zFL$uaW+eLy!H5&|x@RzK311Aba@I4-_+13G@0dp<=>EVeqnsvN_07vc-5PtxjEB4W zTaZ!D909hxqo<%Px(la&s1zRP8L6>fQ~825zRaZVT`^2llaz8lyAuC^R3cHoZj#S^ zpo_>C)NlXLY=cgm_G6avDvz}j^WpamHv!9}eS&d-uzJ4K4VdV3#S@tUay$-7*9bBx z=-bc<7f9h*%sw(@>vr*6%_D=sVupxb7L|Z`Coq|V=9(QE=?#83H-M?-tja%>Hd$2V zPBf_#?kPG)rdbJw4Bn-tDY;hr5TVywMgTZlvp7GL<0sS3=tatrZ4=JJhPgI-A``wK z#U`vVbB7%aUw?L=ma}kasJ40(H8ybwn%kYsUpl+E4Er@c`M3;*GEvlob5$P|a-?@M zqaxoKTC*f)%%h`x#)h@cx~fJwlcBA{3GMo}K$0#lcHv)TbI?H?a)}v8-pe~&#L>p9 zwXiITxvN6<0u&T;n_&^5V+go=lk{mH5Jg-jlzb{L;A&vhf1P-jrE2ez60et~%kHxW zM%Yfy)bGJu)csEb+cC2Orhyj_*c^u93j?qr{h4%&2u99iciHvM{jiN0y&24*PEH!{ zgwiFJI`t(QQJ|#;G8s3&>}5a8IHKHwe6}!RNWG3L30}K~oOZIGJGh_;NHAfKFO$qh&rldMe#bqriGa^OvH4 z7}nr`9yV{y2WLqU)4WyK-#8deuKtD#uK?@~LgcZLkC2*-oVpun30qI61*uTlLP-T` z`*nb&h|=<>9>sD1OZhBMq79h{pwIeTdimYKr3^L1XE<+S(SYg*%fca$9vx{ZK+esE zB=G?kyKjN#n?=*RueU=a==}dL5>h-vUlysBM6@alfPPSSGJCD!TXF`hB_rMd1-6kE z5^_^$((T~oaRz;GFgyfCOH0`nHNd7$x;0J}nL5};NpQgR{S^rPbu;22XD*XNyEA!u zW>i$QzMd2HOfg1=UJT`@3^TN!Z4EVo!z0m{(Zsv=J#u-KZ>j?O=Gaae+#4AmOFD`9VY1oeRVG%044+D2 zGd6Q2x+lDOBQmH0R@*BGH@%iVn9;JK6e0s{lR<}|Bc1wo$$O;A!R#AMrl4=f=&+n_10zrpUNh4MQ zvTk2(d)$^aXjaQ~e2n&@eM#PJMk@ak$AKoL;m^OmzxxII;SK^W51q|H{iof?J3Tw8 z$r~M3yJIpU{lkKyxDthQK8Q>~=hla#qgbYC`vUN_1u*-VcSR=CF<#u%nhyLjNEViN zMiP87`PY*JF=;NtWC;HJBYf~+9%nKrQn+xTywjT{TuSmM-4avp2Xw^uN(X4Wa_#@@ zPf-#vJ8%Rwr(LJQIkVBK>P%A-i9qXI9vG}y%-?}d;>>x$i9mNz50`SfAUiUG@S^De zX56-!qf(47#nLy|nd>wA`w&}-lCi7F#h~J0uK1&ar~sB4>p-R%+}(LU3RY%Mhalcm zF=&Ynm|};-!qU6g>$a6J1xy~qvEM}x3)cG|_|uW^9JKwWwT;^jMLBrLr`vObO8<}Y zt=0z^HC-&JKbADZ^a@gX$GQv3!JN7+qIJ*072Pax__>0bsy~+ zbSzAgx{qQsMN=usOUf4G&i9Kx#XSOmEKG?+T%0?DgSp}2e(T<{<4O8-yT(S^0Lbsm zHdaBZG`?i>sPhZRz%FvXMI;%94A8GzIP$vZGCn9fyANESRH9GTG z6??owE@E*2`w1rYWrg9P!TB7=>1K*3T@H{ki0-J|j6wB6M~X?rpwSDU6UkoY>aBtY zD{7h*bQ5{MiSRC%EeUP{ikf8(IcX~)x`$!nr%lS!J`au(Kapn!;Dpw3d)Aq3gQ_oz zVm4TF_b%Jd%O53~SHQV>l-afnx`<$_GCYL$6OJZVx@54lD85nmlpD_{_`F`;CorC; zS5_Y_s?U=3NRtoNC-{O70@2neNlEzwY?;LvM(sB(WB0Eaw=k3pIISuo%yof8mQr>a zARTDuouoHtCD#yAIc>*b(70X#1C3 z+@-+clCT=qcBj+FDX(#~Ci7eh;_3h&kSgwNdyadF2+tI`zZ*p1)A_B(jmrjd5=S>* zMg-Y(@e?!U>l9t`2vSYBQ$~r5dm!xB{D~5X*RO-#T*Wonfc=m_85A<-5fzGaVfLUX z+v{iG$7qLARc!uJ1D9i8)ge`c_A>v8kx(;J;cOr~&fJM%#?9x*dsIE0ZQUs@O;8qB zX^`RxF+-pK6rc3>^c4P0-fTMHUJ$F-YzJ-4X-^DQ8$e7Hq}3A6#E+x8j~LT_?F2eR zmJqp7+?tuOdAzMQ^%TdrUce%K_hY-Qp%gW?39=QJdU+cd;biyQD~{fmZRLJ_HE3U7 z`f^hGOq>V?PkvWAw0n&4q57Ip#sdx;8NWu8bn)RJ3t~~9%UlIQtY4dT6N;y~HREAX zBrW)lP&Qr*Db*`f@NZaX5>*Ip8evYXcAPUmy1ubJFt@{SQKqFN3<4t*MeAYMJ$sV> zrE8bVbwZU^*won!kJ@~MTp8ca0S@&;zoOMeY8logU!f#MXeH8)Ce$xZ$1TN#2sPOuPGy{6xWTRkKe@Nf+k@=0_TVSHm?x1V(A@s56=QAif~{?K03% z8>}zQW{>I5i)XOtobm4#h2Cqq3FB<^$H;_z{eL39OqFWKRY&oVm1?dFwsG`OlDKQX zS7^hAk;qg5Pxu7KI`xFMN2-C1G46^B?I3yTg(anv_t`m(j1>TvRSV3Tq72TrAA zNz;u3Bl!6wTUW9QVtT(N&Go~MRj1w5MF{RF(1_gq0x9g|JvO4Xt8v?YL4IW;zIMqQ zVUEW-Z7^o8)a{WSZ{O|IlCn;u^PUKH@{c!F1qtD>*+}$ssUZtsX>3k42)qV73 zXoPgjB?jJWD~k7rXMMD<$|6h(6SCvPQC|CyFs2M`9X2t9lOU4w@&gpf&!Qg zA#*c&`&RSZO19$MY+JZ3>oK*}U>Xb=qCZ>7^tmwE7W?}LD%mx@m(8=E)H+HesE>ot zqIgVw#A#@L&PL`V+yNPzG)=B)WbRQ+^Z!gZS9h)FXs%tGi$2M4^) zL^AdE6-`cOVg<>OaV*n%wO3NsQYjDr;r4Fw+OepH-)l1mm7Cf9&Uy{*n}s^5l?5B7 zEL^zKgSGXg$ZpT7O&ze(h|>d^Hfs$%n;2NTzpol5p>~ z$~zMt%tSStGiSkF59VLx+rXCZ9f6rJWO%dPyX#nf@YD$`aR?K8mH=cvEkJG6$9|$^ z6KJ~2zR<=NHMKz0fqo`H%P$4TwiF!`Emlv0vo4f!dccw4P(jVTE0|DN1-ghMQ@! zRrdi*f9ADoc@;XLr0~BV#BOSsNzG5D_1H5@3kmdegvUElI>gP16Bfn`!^fi`S*AyR zdUoxIhz*M4Mjk9K`44PWkTDEXsB72&1=vq!CTcrb+`loxa))PY!?kf{IG;GIg+hxUh@z6-0ZhkbBq{6D?B z*f6$`fbQjgYsGW6M3&`ti&^5_7TS@{h%4^s#_RB@HCuiwzT4O`SPg>pI`LV4j9>T$ z(4oRSn-KYy8I094udg<(A$?$0cYEAz zs3r!TCpE?9wT%~{`)6&NW`MDf{K-4IN1%9hp#wDny0VsqPXWmA50FCO zVre)2mUO@dh(mlY~&^P@vph zBg9me8}n*`eR$LuZjE?feJIN^q7i)f>Bg5*#n1Vi2XMR3l8O!YbRaJ*ayi);e)$?A z@wMc(JkcrBS$OYtAdJ-@!BT2Ev)E>*jKY(yL{(*LXxVS12>iRwPN*6tPiUzb&M8_fe6SCLQWq<(ebk!2jE z7zZ|^7UH5(puF{ObuQ%SP}dM#W!gqGf#So!DyR-&(=K^wa}WC{zk!Fim+%`zrfr8* zo{N%XfrshSXQaou7k~5md<x;)y+A<@8oP3tZlISKy`HT$jQ3aLOI^t>Viq8HR~v zJU}&OX{`j*P@TThgAoBg+GX3+Wn-iLZCzN!=wA$7l9Pb6h)_}rqB}iI6M%BakMJDy zNTm7kVM8+7e&h*nj7S^WY(rjjbvMTRI5^|Ea%u)O-a<2@_^!x~726Ttb@~`gzM5wJ z&68>s3_MEYxg!nKxYwqmi(m&B-cy;uagm2on%mgdt_r&9xyR@(V1p|Bcs_uY zhW>XVIL_G|km@!OK`$!O#NooN)O>}Vfg$kcJD8qMX|jII{)7KM_B)RVE4 z;{(^0!urva?LGL`bm|+mevJE6_mUhR*@7Z~hv14WJ58#;T@~wi14!Xun0TXir4Gz& zcw;_Kyblkd^4@dx=SJ{5ZaLN z);RM4R>h)4H!)9n%Gmis>@cTl?sXQ!^*gxFcmsiqudh031f==fwyYiz%}wwFK5PFX z6P@6L9n~dw!7#&#n*df~fBSH2NhPqGP8^^iQo+R&DN9%51pm%0)^9U^@P^gx+NC@8 zX@-^IWO5!4L91EDDdRMw*)n3AZh~bbfqOPt_kKV*ZZ{|S`dO5}Yl?}~MX(YtOAl?6 zxArLMwn%>CKSqHPY3$aiqui1D;zR-Ho!0dI_8*K&h+>CePx3}XSzu$Y=MlM&fK{}F zjm~7-xT0a|65BKGj#21u0B(WgQ{>G>vkQh6Ov@O@Hg?>K>mlVNSWOE7MEQ81s~LKv zcT`orHqnJaaFRzu}lHFFZa)hQzWH^kr+Mm7!)e~34Ob0XWF zFxPfw;S^IXO-Avj?KKcYbZ`BF*TZ{(f@h*s584q;&Jw?>5o%j5z++G|`>r*G9nICc z$)t6Cr#8j8VAuVghL5;x(6|q`JZu!hOAN_o#BbwxCX>tMLY*@t&wwh2;}!Tfr*2WB zmNLeLT67~_65-6d3TNEa@e$B5PS^r~7K0RsLJwPE9ZHh%*i3N6Vfl-_7FKV=)D=2Z zr*h@;5`V*iJR8lJ<(?w~dWEEFwGkB*Fh2q*{!yF5xEidvz!kJ>;@gTzreX3LA;-Q`#$cLlk$3%ccZ}kj&NUwc5cKZOgF0!dh$BL z;q#Pz0d6MC7ctsmb9%jn>!h05KPPLdeG99zA!7HDgFN$&4+vr40M*7zIQbAuElz2u zqrk*@?4`@?m?Gv~vcgx@R?=-JxK>Kq>y*M~Qv&?mYCkjzO*r_xt!rJKKD#Q1x#s{i z8~xEiG7!!Z2_yu)`^`5a5dU)}%uHBLUA!ZX(v+qpiaODwBY94OHT#PXOyvulgc{_& z(q}A~+&SHEcv#}ftbxQKs+7;@(_fEt!uO@#Zlt5QY zp7}k(tn6gsKXK9y7(7M>)UHyq$VCRqi{5i89Udu5YvYD``?FV?KmQ=QH4&9Ly~Ncm zxU)t0RFD{F|Bwt6!n3Eg2J;Fk0DqblTv(Owd&snh0Z^XO{ zo=WSjV3#nOEJo-o26{?LTB6OXsSBrjazKzTcK3t7P+N(O({g84jS<`Q?(1il!X&|^ z;s3V6_fRP!|FeAXqed@phXN-3o$J4M-UqhkBDC~Hvp9CjD%?&r4eZ0FSW{UN@__+2 zPpN><73Rk_U9SwmZY>%Yw~LoS%S#&t^gZW#@8?rl<`Ra@fz)C+zG7SS@2Fu8Ok7t%v+U` zKEc%1aaapopm7va-@=2X*~A3KYA!y+Mq7Id0Q)h>q=*Pv>csp4rvibIs~GzRXS3j+ z6!M}p4|iD0{|d=lnmaK=VLvK>6I4C*RW((5G_uuF%!M9%NqnRHARtJCLJ+1wIua-UtI<{-nZ>fcKiQiiX z+pF4E^a4h$Bq%-uPk!;FAk&oPy!*-8_LpNIjT4!?en^_xO;3k?gUYc)`2KAvw!C_APS|Ge>d2;K54xAy7yV6%}OZ*4TEcD{$2#V4#k!#ERHZCF3T*T8qG z8)j-H%e-=9>%Y#CRj-+7=g(f7qx8_QsgITl-rtWiW&2z!*vuN<^}le28?c3D1w)bN zJ30&Q0EqMWzgt`;Sm|A_D&x9eJaeyoGe@hhHY}FX(o#LH>46AxN&Ij?E|O^<2KIWG z#S~x{L6q_amOZ|xxVRkq1(V2kz*<#g61DI%>#kKqK6a*}sVO33n)sM>qLQOw`jh!lPv=bCq~*XXrOWLs};@ zp#Q(I*+r3$x9Vk;loJ)ML|UznTFN2OsKVmx;ePx!C|sk4dvu?Y7kZBox*j_oE00v{ z#SumW#7rdFTxkG9z?a^zJw>EdI~FBm*za&Wu__mQ7>;pc`f8fVyo#Y_I zOS^4SO#-V^j@JA!69@x66MgUOMQo1gC{B{)KxzHb2KD-bN^A=a5{t+M$U0pw0wHZ|(Jl51VnD1VY-j2O7a zE|@DWPa@kZm%x?lD*EoJ^fsy4PzuGmS5HfLPu-Ws&qiytgxrV-#i1hu=Lclen^uv~ z8Tw!4!}_dV0Z+(%<0~1apCuQ~qvRxY^_S{0T+2$o9h(jlFU^rm4@$5`1m@776F3qG zL<8$N87HFC%7t56Gqfj5g@BqrLz#PB@2@EFZaRN=C&+yWJux&#;Lx2M(>~1>Zqb&( z`inZ>yOS$pX5t>HacQv3IKhaVWW``=Tk@1~BLUGI_mHBTy`>J^L38`rPfg3*-;=iT z^yaFtmAadgsStSKR7e zlW3Su<9W?1#4ogeOi|roa=68_H3&+iV$Q?k0@&YSmjLFuro!AAH12<%ziFq2f1NEF zcRTSxaL6SobTrhXMxQw2(xL$>zTrb1ZYk`#UPe~yOl4%M-{%Sd7LdEnO^TB5IaAb>0Z2KT{_>j=ad>B`-OnBe%if|Thn4?P{ zOPro)vwJ7|Yb14^*p0nn>Rb{@+AnJqF!7=ibD&%ppq`c2V8q0Pbd`c|WT z;Opj#W5{UvLqR_TF_W}F=?9_R2NfOW@QW^uF0N$%dv|E)svPJ--b3A3E4#}eOA{bp z9D(fo2&|rsOCWixC}ZPXv#6XlD!C|V2?omcEiSwN5~u|G=5&&W@2S#W!Bh6@X18#0-VO zO}BRF9t86*0=W9UL$QkXFGVl=gnlOp%Nh1Mbt*OaO?kQ&6;GL zI8Sj^rKb1ji*h9RZ3H~l>NGwF3%u|2QZzQOBIXy#4ib>r`OyOA8MZ1uxw=bU)_t$5 z675T;az4r1lNRmD^b=BFQyKjxnqx}nCFPr2nR9sE-A>-csF!;s#b$MG>vaY5WtM9} zv_Nv}iL1=ntB*RNLXBVzmnHvQc>3f)ko~_6SeeT%z{4tMYG6X(Z2~+%p%2#q4dxU) zGHI@_u(UL)OJoE21lK~62GqNop%uuh-DdGR$WRw z@+@-@0X!)NAd;B`$wwGpHr^2w#<5=VqJO&m&P8=Ue)ngX<=*@1@ko`zH-kgvq|eGt zYE_Id;ZriEh5|+|f?fVgT(Ss3V*US|NpH{-} zbGL{v6fvnPj_fO#@v1lkiI3NpM{K$pLy$M@6d%-dSU3%f$2l4b6CdGHR6MLP)y0>T zW&NoYK@SL~i94pQT@QO2lRF0I(hSD^acXi>)S;~EJgjO`>Z;p-gnK|&(Fqp{Nx9HG zgGJ+9oim9ZDiGN5djKYT;n`VVvANt`Q5R*+JZ!vl=MnOQS=o064e}!LSjuglA2|71 z_5j(`P}#A70V^7pv3jp~v0uQ__hK2BGix#3OmEN6+0%pL6U!J2*wu0Gin@ zy$Fm+zfr9@+nkKhjta-}3Y$fEt8ycfa(%+qvP5&8!?$+6A#6E5<2Cg~=|8kuNj}$% z2C`J@k|sFqth;CgFF-sil>n^y0~H%F$=1h{*fnNOmh=k-%b?#y(i1b80D+w2HtV3prfmL2!~`qj;>)70tG&H)P0 zyHixVr_7nXCw@ct;io21*zi?j2lL^C@b_o&wgxWAYkYT^)mJgP7tAw(zFkHQPyCxR*A>O(Ycm-p6Rag2UYM-`XTjk=L&Msi+|^{VHy zMhfWpQ)=U=Ku6V_~vQA@+H9)wAh1kZfExU7t!D6Xwx__Fl1xl8UO$Q0003&o6|iL3MyF73?$V4 zXmB^E&1uo2mgL}4p$?3=KryQPw)!rD@F~!UP6Ew;?AcVCWZ~#1VD-=L!)#f*HxG>| zQcIY&OcvvyVY(2>04O%&{T$R773z9iPEVYeGmhlH7;6YStmuqfd~FA&sW4U8)IN#C zTHUwIef~tEv*T5H;l3k{-3Uht@N`R5w~*bAv6;M!=tu~(&wMTvS?4cfX@ z$sX^>O$m#52t{Z%ZvrnzO=6Go@PXorvLifG_Q((?&}j2)p#C*}Tu6ySA*>^|$*LI? z%K$$?z)mRN7V^1?U9*(`=q?o{*C=0R zXXE}biSjHQ04|Z2PhQ*wCK+BA8sMz&cCvTJ@5Ew3|65VLw@13nb*`&$-KeOlNYU=q z74HVy$*Fee2ZecQNBk#J{S|Qii^wT=cyO227H7c|xy5GZ|2%jwCM4cM+Aa+M1qKg&CVCGWzWyk?z%IL!wW7)p5?* zvdZ_KgYK&Hra~LKK1SpoaBc}Gl2(r|f~AlX1sfdb|AB=!seSUG(9ljKux%RCnr|uP z{f}+@{G$*y_^|XVttX-2E9CtJiSLI}_hr^QFolGi#mm_JjYzNG!0G-jC^I_(0(m*a zN3WKmM^+kh;>H-AqrB=}V?V;qkGWcs^&aTyyE*U3DZu`QPFo+wekF!^j14=LpU?%5t7#^fzEef=PTBy+s&CaC~PnNaW2bzNC%A|G8K9Zir@` zudku#Q~#f2ZO(dUnnq@_9ZT^FuwH8~F^{WGw|+X+MyFp7%y9qcH`e~kdmZ|+Mm z>i6Foj;y!RUBEMhNX$vHjOE_|ub=cSPh~#{t;G1afb|92E>HvbFyDm71Fq|u2SwG` z7>RfZi?Gc&Wo&EMkY;jj6xSg1!Fc*m-c9AAHI(((RQgrxIwnR464vG`BN%)FHlT9- zNs-}VV&(umE_Xy($(tH^Xl|wU&0PXp%&B=*!}pT6hV0|{r<_G0^9&+tJZ*)-O~%+y zRG?RId`d`MjbRq#79g<<@Nuiv+NCE0N)Rz zq@cc_!{A?_AJ2yGPNY45&Tzi6Q3~@Ou;RfEjJLX@a~7WnBf!W%$x5ko;3V)wgf<)8 zNM^;6en&h|-F%CLkpK~srE*eQSN%Ce9K5CKyt9(K6Z=##%=D@P1j_C+nj}RY#&;X% z?c+08&OQr?)g<}}3dp8(N3cS-60K(G>ZbP_QjpyGH9VXsBNaka9S!+F{7v*GV6@9b z2J$>Xlgz9O9HbZ#VFttW7S)hnH={|a{gx)r=z zd9U8|kIw|afst6(E(6OOTJRKxF7XuD<+-Xbr+Yf&GK)z*zKEE0~A`CuyctL&CwO_c(r}cZ;23;&L&__#%gnxWyL?|2Pzv z#a{Fv(qJ1v{)yq&fMoI~3AaX`30oK6!>vfCnQ|4*Izyw<(Bs>rt5_M&JtG{U zwY~zs=%3P{V=bZ0sg+_1hivG9M{OY#61zfK0=1u*%qCP&tDVAE5_cu*;Lqog2P)C* zDq7CNl1$mKP~lQtGN)@0P%c1MLg+4SLP&jXUPBl>_D@dm zb*&V?<~J0X(0ty|;p8CvtHMy~UO4CTFG4h@p5E^45B!bOB|(${Y`f1WUYfbj{*L1` zr~cpAuh0#p0iJtQ{dKKI-MEcg&mp(uoP;NZ7sH(Vv?Of6c^t5T-JG!HnZ~jk49V}s z*h_i4MAmVUi6h-d zGrd=CmsBrmK(*5I=|@N**Vlg;L2^o!N8u! z^|OUJi#Y#mF@zkHgRduTTirstI2AV z`v3(O*8Qph3ed|;H?qR6*2iwOCJ47uFn+YkT`~3%H)L3Uy-Ev_9D>#~3CBOpQqqzd$qn>+#{w}J}mltYv9sEEX}#l_62#%fG7U`JLODlAR!K?x=g9w$+Ynz-m6l;D~~!urqe*YfJ(^oJC+ z*3smDme?*vP(8|n@D6*(048wca|04zME?L?K%&2!V=zE8ai)PxlQN9a1Hk!I*lz{} z;EDp*wuh%q&{d--)W7R*IA;3sHluW?@`33Tzxs?IqQg69CmClo$=I~Upi4vCv6549Rk!Pr6oA&-qGK# zf7NoHP~Q|9Mb}wt$q7!W(^i9d0Rj077^nWw%INzh>IPPS0F?|?!Byl6ppS9^KqTSB z6jE!Ip?uh)ndGsx?Lwy3@B=O^)t7f9oH0wNX-Ob@5rtul*g@HON(KV|5d3unUOhs# zH-(w_r=&0P_TMs03-w0g;a;ISNAx%hb45e~q=&2@o&lA{Bm6O+=et27mmNkEPB{3${xTsDcn8aF4tRX# zosin{EAibg8py!HfB*lQR&HOlx<2&Xa6GsDqkQ6umURJ3- z;xY_O<=@HPBq~vFQhlS*clClV$~MCV8PDP+VC}&hRgifg&q9??$+sXjOblGFgdZU# zx#>lHkTVGzpRE&)?yC-V%rd(}X%y73hsE$O_cA5#skaYR^<&*d{hk-Bz%RBj14^sE zN#yN-5FWRtu_;?4yB!P?aO^e_b<8Mg`SCH*b?%R_~V>h)UusOS+&IN%aM&fR#8{Wnba z95Mo&Rxe6_ZRn^}Fs4s-Yq4YLQxQ2Fj4Ws}DxkAgvIvfj1sL4Gz{x`EN4BHaZK!tF z2SImkJ-*r#8yJ^WVFd|hqJ@>FF&xx_i)hwR`BoTwZB>xxeecteEZ2qg7=YG;xYja- z5(qxIeKsMeNS)rlY8jor{7-z8g4@AIkiDw~1#ATw^J+F!w>~v-NWUCc)-a**Rl{Z7 zG&%ib9EMC>F#A_Y3zIOfmxo9@*QrZqY2JS6?*YGnoZnkzp&nW7)E?O@4Xor9<`WNf zut29{khhyC9jP zXnIHEmyS##Fxoz?+(Q)k21|93K}~XNbAHO1CQ4t*#<`_<7QXi#Ie_StkL!pU%(i{@ z20cHm-ETppFh^i<@O{7s#%X)B?l7Xsg4o?UvC@I=OQ;Asv0~oqGiS zsC_zHBhCM_uk}p|1ngElMA?W4sGX5&>_8QaOq?WcH@K?lDD;;yev4smYtmU#u}Q}b z$vpj*MVaWW&05uC4vqQl@vMfVAbZe?{|A!Vi5FlHmjDFT9?8d*(r=_^0RZ&3090`} zP+7yPZYFOuK#fQzby7;EEDEM9$VF-ba>+)is!}Y?Dw11e-@I7AzfsX_7uOK?*9BrWU|GH1#zzdu5_*erG#>X+HRsEsH_aG9fe1%j~8!>>qxsn>HBk3j7CE5 z7kH}7gu_cgbLMV6k#oJa_Z8!N4CwgS*>4GEOE*rX!#*v2t#tJQgVMpDmpWPjWj?E_ zZhnZWn~{dr41Doy`EAgafO@1%LEzSyYd{U$+{DqB4YZoymmwr-j*V=u_Nn08o){I8 zuZEWhGQalx4VAYTW7HbOM&|4I2tUo;7%4^_uATuakmm4gt$*9(_QttTv5EMgl$18) z%Du?Oac0PG&L+?boQGPRefS7t{jE$FZ6M?jyvP7>Q%kq(CH1{qNY)zNrf{7YE4!79 z&)VMtEh-UPCoYoaaQK&dR|j|S2%DRS3^(2Ey|u?7wY0iII|87?4L$_}qRI;8oZDbz z#^}tZfUV(}ReY?K4(mX-`i#!Tls z-?^XR%LBQDn{RXz;lN-pf^x5? zV*B8XMTyLhA7lLEOtNH%1D;INb9__FU-l4ghXPWZa(hQ+pY1byPFBAyBtWbtRElh~ z=y7&n-w@$SZ4oX@;Mn&pR_`mhEId?w*Bqt=SAd@~D$@QmU!i-BvXYOg=Xl?Vfo}f1 zek4(x!($S3;yPX8G!^u74OQ6d2Zv-4YKdH@EgY)i0E&I@IyyaKaqPQy4Tf=Axn#q}tk?(Z17eB3)5s6tF zi3hNiA(((dM!dcahLye3_WGwp|8HM6+4q#|dY-uQ5!hp8K9b$3}bmt?Z) zB1iNkQ#d>bha7jpSRQw~(W)45afQoLKZuJ0NGcm17+B}QdM5iM5+b3q_>yAlf@W>F7b8B%FFbAJ#BXJdrbda?OnIdV6gSNiMm`ycEb z<|ZY`+LO1(C!i@lNi&+Xu4YHl6HCy`Q+HjNx9D$NL?5oOiStGdiJ**(z0q~H+7)Bd zY7#hCdL8w5&}q;FMZQjJ{Y{6fE-w;F;99y@p~@!Vkx~JOACy5cg66V-q{Kb{JPbBW zi7Oi_yxze{eVm-2N9tnWKoAC%#XX>Lqeh|o;nUOcj%Ic}qNuWRK8Hnf#M?r6?XchC zBbtKhRN^eo!M)Q4ay{TOO5N}QFwCneW#H|KV7dDasBGOYxJZrVXM4-04|=5kWhl$oQ#7O$Z#h3;RS%l+A1x`7 zhiNa(UrvXX$gUA`k6m?F0c3CH98XUq!TT8_2tXk#j3)C_>_U-Qb{9LWS5QU9QIb^? zR-*S@{{);fsOAjHlzX>51j$!eok6*3YZlt5S_WrLw|9?S(S?y;z}3YH)tO2%hDC?; zH+1q&mjvkLx*?N6+%%iM5Ffq_0Iggah1lgN8u;F)l4gHWFCXhzLDXubh{^s(Y_y9d zCw5t^4PEOlrsCoB1(&!z&Is}cUqUj^8$|Ea*s}mn8DG^H0Dj`ENw)$8%#F97=heNd z<`CuqDEq18`X|6qXR1c9Lpq4SDdZF!E?vsVq#ME`9eyV@ZA=?rmC$u2t+`5X^Yorb zF@C3%N6lq)I|(^YSBJfX5EC)CNxm#Rv?+X_^(iw3^{GaXuCdD&)D*_b{j$$*Cw1`v z#_o>W-Sfvn-1lIFLq<=B`&ZFkwwL5cn0V*!M}lC)0zYNb^KkwmfK|4pRtd70EIUU6 z0w6~3X; z*8!8pN=U}FGML!`fo@?P$hT5&JFh1c+71xZlSxN30d?m`RexKy5}5vjiSTwj_UJzQ z1tO62Kx$8trujoOkEDNPJ+cQ8U4qTV=Q_5&)&Y%Z&qocsOS0ihD$TqhPz(4GC=2^; z$j+zb3T5fFttry@tR$-U{ulx*4Zhl16^|vTHH+S+d8cR`xx%SF%oO{tGAa7;(-OZu zPxsiKyhVbm?}13Utd}VU`j3n;77rekr6c9moP#qvs9B_z8uiVk2ubTWWm{6cG1A_q zX8^U(Hb+cl#gSs~;|*x0uV8Y}n>81g43DRTt(BT%f<*N89^2qkYH2Ze073cJ8g}C8 zo<79>i| zP}#S}5>N7$$920v5qNp_qTDC?I zI2j^Dl2y2&R)nzhs2vUjN&uWV4b>rfL%4QE&y-ETMG>BNY8^`MR!bQ#iMVtcjxtzH z%jH5>oma_Tx`PicJSjA}=gUy(dn5Nhn0Mh5_bK;a@5j(eBk@&**mQ_5go|D6kPYMj zbjCYy8#=o4$j26B`L1UTb6hJGl^z|0P(|tb_})d(P4vo1J0bu61>|Sj5w;)_TrU0` z;@`a={b)&O?#q!+d_s$*4ca=+HPpE-fhOxv0~Xd~{KJ7nOTG`0Aq}J^GEtRyV5hpk zy@wOz^o_5=R?5s4xGGe9x7i3Y!h+G)Vy}roG94QKbBZmDaC?m^H=E)V{N!|1l2E~( zdZY~w)OXHq(i44*37iuGFgooVAYYJ-ASa!3BF8C~{$BQ%O)-CbnMbBG*n_KEl&iO@v>BJRnEzYopqAkcy#6W#4A9N6U5;G(XWd; zm*68EJLW#xfzOL!2Ipu@u$2$f-*mwS2}sFfvKr2cZ_g%pC8IeoY;{v&4h27T7@yF~ zsKPCPxL|+3k@2`>@&A#$1*#^I_t=d9Qv_d0s+Eu3Tf!5rcpWJ@P=MQT0xwo3-C+U# z-En|+!GSfsM1=0@0m;h5YV(TZdbH zyV}tvZ1|kQ`ucI%oUb+-1;x+4Ih}c6>S>tb7xW3>Z`jDGh5B(jN^u)Ak)@x!_Zz}J z4*`)_h{2tZ4x3Gty^~7qtP}> zl`vxU`ymms!9<1b7Q|OrZN0lyWZum6LcrQ{z5Gc*Rj?CW3AU~rvFC?tGAjh^vrHZc zN`kFKqPPE+8m_vOwq;?c^h5^>^i|9sf>-L7e~Ta!>#UhgldjQfmjVv%ED zXq1iN2CL0pF^l|e8Y^2&FGYZMSJu&8HaiJ~2_r~}=G$@hUN7l0T*l=pE*UYa-yjf> zH4AzWf?U5Up$z_@-qWX3m5izMto5oy^S>PCY58G$sR%IPGm91zE8w)6v=tZ0c2~L2 zTN)E-rAo3*OX7a>m6gr@jXokYpjd6_;Q1Fa-zU))joXOfJRmbN@FRzUF>kyM?O6~i zjv-yQ{xq)HXCCtSZdC|q0=5&;q>BnbPQSTz%6`3o8p-BcbkJ?6sJM-xHbn?m2OlJE zb6Mka1_0t>Llj6io2+2su)425U*8{F77>?I9IfdAv5n<3oxlmU+?!6E^o9Oa1c@d1 zn5Pcn?b>dA<45DXvqo#~g6Vp+Y!3GI>svXX6`t1e8~_!Qij~Ci8UA&VMn0o%eaMVQ zOIivyvodr>H*hhPeJNsN6?&SZL~iZMI^-0f(TV*hZ}-EAAq7csDbZNHn1646V{i+v zVuNm-^{N&WLrtQ0FGpiJTU- zf4d+i4EgEfd`m)iMZFpeYp%uj3Uu~6W8q1LRqCKLnoyET?n;+07Fy3L!1)Gpvg}J+ z_l^(Z-y*sEb5`@DTMFNBI~$T|e?XDNPO*fVy;UoU(8BX$Me_ev-#mu3&b=_Ng$ zQfSjVHL}l%*&(4|4~|;@^Z1M-(@;PNvd*4$uWqLjb`2-7RxboXS>tef!u|KubUk%@ z_pLnIa3`c`k?r&He5ND6FNm^X1AqbD{QjuaT7l}>G(d5*iv#y43huaatY&BCsoxEJ zY;c{D*m~z#llGT&8{smC0HL2&G+b4L?_VAW9+qa#zYL<+$Rt_TG%7G;W*rHi`m$r9 zr>O-(>R#gdS8b}uH#?V%J;ry6t7@QHh=}I9%n8hs+fxi=V!6FRp_^kGD2~Y1dJIqe z{9yeFtvGUr$obrL3ytpGoXa*+tgv|IbsW`(Wje~ zMqeG)JP{In&nI-^1FLeGH*twvw3h0=REmQP;YBPsCGUwzI(i$1bF8}7&HG=-wni(~ zJy!Ak39=7&`!W+|ZC>mH0{+uy7M)!i0O>NOV?HRd6jA;nK~%dO>KF7w135@X{&a+Th{5?+$ECA8>%EspTobJnDkbBvI1-m z3q%A0%d)`04R`!=HDH%GtM-^c{^AgZe#m)_yxgrqm}sYUQ@b#o@f+>CPb^~I*6UQl z7g;rx;?ddRz859hq>(X^BcQ_9BFh9(CQ){3c7t!IJ^UGZ(wS-)g28FEuP07&h3uST z5)pU*+XnlS*I*|z>yC!Fj0M_6uGIXVeLp-R9xp1y7Ysv)tpI&|{Dy!)ds9xA^`mB* zC#ERJd1x>Vv!y9p2WZc@tk-j5Y0FfTQl|QaA8dqj56Ni<5HUpIJX~P^(c9kw{e5`nfEwze|{;?ZozX0ffBl z0`)aEK~L94#X96i`FDwy1nq6~BEs)LC4fs+<2r4?cJ%^Dd+Ri9q*TSY>z4l@16xlO z_X#7>R;qiZXf@A^qQS^ib}S79hno;#iyT5Qad{ z7IDDkh^?AJT7ft_1!0zW1K}((X9!iHfudjXDiRCztUw~uY*@BVA9c}u>wUG_RpIBL z@m0y`-^Qwn;<~l5P4rzXwmML;S@iJ9&kuvPa!%p_+;!Xck9sRxYJwJupIj7)a^?1i za7o-uxLd8^d(9&bv7K*Jlvl1kwjk#mbPqVmUp~g@Bq1wB62^q+KBE;*HFtr+qeIT*Rq1r`tzZyPI{r11is|U6bedhlEi{abSon;C*L=+^&fEv{_s)Oo_Z+5oPgrY1`tZA2|Y1>Y>#D*>g zo^68euPU6~;b@vcN zeKYm-NtExz5rb~Yaqh3CkFlXsJe=Y&xrlX1_Dc=Vt&*V{0WX8#Ry6uXBjykoN{&{` zG3xCRWpJDk&p-%F5N=@qCh?4wb|Bwdgvbhe1m?-YjFdw}NHMis_g-)H+7kqG17>oT z;b0v%C>X`i4yK;VM_QT{g~S6B=s=A&k?$YU&YF4gbpRCP3Etn^ACNX&@9UG#RgI+} zcvtG>kgK=Whpi?Qbx2dTrh?`AZgeq+!H|2s-%3yCd2!9iq+?>14bitx!jv-w)o$vQ z!_W;qo`r~|rXazQNJ|X$d=Sc#FhgT2{z;Z-s9+OWmiOZ)>-LEs{{+e{(Wc`L#!Jo7 z5Fa+#Hj2{tE6DV%6WV>eTBzfpQKUl)3YH)@aEj9vfKWzC|86DukTSR%(E>4>R9@3ei1<0Cr8h?@*B#CUZf#$3xiXl=c1 zFUi&DdGU(c4~+FI05ErsfZGbu#AR473@HpUMhOcB00f~j5-V6O+Dm+<=9iPe2~|Ds z-b)%!CyFi#6`Adh0}U)oA&9=x(1$%muz{I>9tDfv3eIOnW~u}n;U@?JJOw$Bi+jmz zuPEHl&YF@n*$u*y?q?b(gifOIJ!21)qALOvYuMqpFKtZAjhS2*$s=Dq2>lFC&@nQ^l3 z^DLCcd!VD8*I;_mVrr$f#^5%yxc`=jfFJ%)CXu=aK>0M$Ki%`?Kj>x#FlR$sKaZf? z;DZ#;GFG(TgHFNlt*?w9LVsZ3yV=&VPTtH1Mv|@Cossih-@!O%4H1g#*3UFlnI=}<7Ub1ITbX8n<}zjqb2?Y zH~@YH1rfbEBGe$e4P&Vq>fOGcfl=V&y>`Nzy*}uU7&pMSqCb!Dd8i!s#T4R)J}-o$ zjyN?M%sfkhbq-!qJj<;<;gNhg2i=!;=*{RzN<`uW8(uKHQsfg7#5{TqpQd^+rNxCk z4?&6k8>?|L0W}xrIQ>j`JwsH#=fA`rK$WVVR1lY?>KkNDn1i#m+*L!wK16n2i>PpHO4ys)o4wYv zZe-I`8KOb6bu*Z{fA1OA;ibdl^v6nbn+Xj6)EmNnR!PW^Y~R&n&(5p71NMkiFQ0jx1LOgu>| zdm1{-#L>Rv_I(ifvdbL6#XAJ;q4EnbxKrotBOgzXf*20qe?SN6r~ADa$BuN;3gWqI zqC03*D)$EeWIPJ5p>~54^%)ro?mA+w{Olq#1Y~byQ+txcWrl1k=geE1k}d&v+sh~| z+jYo3%-hovYH2oKiZpBlk2fxC#<$pZ03Y#HGHyeM^nHoE_0y3NEaYbLL+UBAU9mxM z8Nme8+U{BN3-)3ssWMs+E>x^dOFD2$&ln0?;C>YEh=OHwJIyP7a!*7nRkE-6!z&P< z-sM)gj8&?is7vDRVN0_afO_1ay2*k|Ma}|QBdd1L`L_7Rp%@e&5nhtrdtiSeK9T?d z_UE(EV*#1>;%hd*6N1yPtw_TNYi3don zA;FF7oij}FHWJ_40YL*({j;Prr!#v~5_V+b#s*=3K0H})xpE%*Ab(1;xTguCwN+U5 zDui*Xwo~W?K%X+F5r`2tOud;JhWUj3lKgmp{L($4GWQO9{jcVB{B71u#koywu*^dg zf-MZD$;-q-FpJAEmDp6r$5gF_h^P=%3#x&-;&S8ba?K{9E`>9TWTy4JozlTC(i8WI zBOh~BFJ&f6+(>d~HiteH_;mh9imsp_pn+~rEZG+j&g# znO#sSMIs6*utP{p;KaK-0_su!gdRr^#_Lj|*<<=9%9DBN=#Hm25KR^XOR1`GE#ys8 zhWSC34hr&Nga1D2z#MZO!l@_!Ul5f&+YLD zNG$x&=x%ql7kO)leP>abVIo?>yjxevqrGn!9~H@xER_h57J6J?dFC!t3JjR`#z>a> z3)dCTEBB(=I55Tro+2Ef+45kP{=@L$1^2Pf|Ffe@k7e_aB{2y^lhhY8uTvl3kH}$T zD0Xg1^H!AEmF(eSxiUxkw@f1~%8?YoTAWcdWD_&GJKK`X&tSf=0S!mmBtkUAd1Y38 zN~2+d0!(WngYl*fMjmVg+tz2Ifm1kf1g}h_E>LDX?*PTj>g*eNmEX1|KPR~nA!NC( zt;CBp(KDV>}$Hfs^;8YKMQDKNkKKC`3k@ znHqDsh1SZ+es_#JqVlgBd-h!1ilzYew`nAAgb+~rNb<2I>`C~FGkB^0%dPC?X&}UF z0c#)sYSl*#r$MCC(LmdNMh%oNNqp-Jp$1vV6|Cn_HjL{H;!~nQjbJlwF?rlWDjo7_ zyB)?mHh6##Aj%c)hS!YgSq(Rs7w6?&?|;pN-ss%HxFT6jPemszQq;Nc3 zWAL@Ak9%5H06wl(4QnmrUD%8eXRRlm4h&c$W2>i|Rq)l!v2Z4%mQyH*mb`b2CSH1% zlYwme!Nxma8%&(Q+V%X1qGve_Lz=JC|NkcFanq~M3U&0rIsGa!eg@n4!c7r09Wq6w;c@J6K0qfzJ+*7q-cs5VFtZ}>}SB7KmfM=P0-m5BeT09}dxdq3)d#wmjp zdr2gr{6`Wmq%E$#-}Z}?Bwl~#{FRn|UXYA2V0_rLm*fK#jb}ks!%G@N!PY8v(QN{> zMPAo8V{FMz`-eP5g+*EKo_MN0ML>=4=@XSSoa63v^r}T-O&uf5 zT+3#&2Nh;Z9Hd5)10cBo?mu*sAiv~M{}#t zlgS_{Giagit}SEJ@No0ZpWo+%lE8RnwZh1NeYr{43V~+6a$2>v3o0>D;MK=X^z$vJ z_>$(ww&_(s@9M}}8>xBzgfYVS`Au`c^6)Cd662Z7r-<>syQosyd%6PiWCkJRhn4XZ~K7X+?Htyu)f=r7*=?V0P&Ow!Ucnvwbq~ zG}*^b`C9}Y$F;NK%-8(6g48r1_BsXk`Y{A;Q*er*LCJhOa*+*E!9}_H9J!)uyJ@7= zsg-e*SP*@M^%2dCTh6SJ+Al}!utVX}fqd|67R+r4>ZHGg@(px0HIRfn>~5{IV*+g7 zc6EEW6in^B=E{(HY?z2U?pkq%H|kKqJe_bcfc|gk!EMvsaJ)>LZ9*PXHPeE2#Oe{H z)TVxnf$7WS4Ovo4H>ZUt7s|?=MLOZ%yejp!lNH-p>Z#}fdq4z_ zhG719Hb|5%fvo9t_LISs3CT^q(gsePF^Hss;-_pdV`RS31JE;c~=)3#qlXsitpF^PdUlxHZ5 z_oFgn8!IN2k$f)vHDEv-ERy#krtd!|nen1BQohMH?k6Il3qVE^|6vXWDx-VN!w!P9 zz5B^~I`dun<%FSCclM0!4wVFd;BT4^UmHh%&C!uOf?{Q)c(%Y7W(Re2!Z#%)CHLi7 z^=wMkm*WP3Mi~A_LWwz+ok!lxvaPP;Gs@2#q03t#KBcQA{W3?Yd>RDbg0Z6C&B9@^3gEldmk)xS z9n`@iGaaLd{yl{wQboLMitFh#W zy(|d{6kktC;^FZ?)-y%pr+ft=%AWfKx8;hd8t(6*o14lD!~njxA3J%d zl9ien?#Fop@hOx1!+|7mW%fbb8xaovnMmhaZAh%{<$QpG&kAq-5&$Z4n^Vng!8fej z9%pQXDZ2P4jG`NJ>pt=Gp2fs{F|s<8#kE9Adfu(s`kREiz%cizwJH!LE>N{m#F|VY zVFji1=DQcy(Fd3vhGkWqeu}kn@SC2ozmxhG;u^l>Y%QVw0xOlyg-++g7Fbllp4atf zBl6Tu7i;IYwRFq<+loyA9)?Jy>?<8oWvICexLg@JPR#szLbLH_AEW=cn!XEDV)8O7 z!x}2Ufvidk@H8|EvhtrJ;IIg@>yy$fRje}l1QCRVT<@mqh0QE<4{B%Wh%DGHdo??L z$SjJ9k8KPo@3cOYwL4}t*%yF7vRxo1AXLTYY&r)o@Ru}z7>FC-UVgF0G?PsUDK=BI z13eizmQ5wdXLo)bUVetQ`Vpc@i+-9(Z#}vBqN9K;zf02Iljf}%ZMjN^Qopv1dEg(l zo=X0=z{`U7$m`CeUaOskLc~xIM~cDacmOxB@_13uGVRdagTOStv28Wo)+)zcrMwzu z2bS3{9|QO5NV)KHNCkABXSCZ!77qOo+R0WFn35W?w^zcJG)&m#V}*?|Zms-t1695L z#c^v77H)L{`jJ$`IR+3%KZ9~8e7eBOvkK?0d+EY(FC{2@*<%!6<%C9{!@BBNS$vDy>eBFXMO$;fAl%tp|Z3IjEfri^uj3Yq`ESDLs=&G#SqcS_!W zmIa?cz<*pg5eqb;TCvpKXYR^BJ=rBPAE6~P?Dx3DOr84g_Fe2$S}rf7YdTd(LW??_ zFhi|PGP)*hfNK^<7Ls(%pqXdXG`z zP~+t*$CwT;=Vf>o2q#=>&Bov^?WSmZr5Z;8luoW#<)&Pzdu_T(jCH`D4zeXfDy4Wd zTjup)`xriJpZj%oIfwOut^C)6uvXi3%t`NLQ{XGp${S;OA%@07e{>VIeA-tthos%)xBs3mQ!Djr@m1 z=3M$;PBKI1x~J8Vo670BUg#L+&`%Rq0lY#9zX<#3r=-|i*v$X-zr_PJ{0>}T!X(;4 z&c<7q*4EL=s^u&wHelqR!kYEq3{8bEtG=`oz%Z_-J9(cAlogFlVfv?w7QWOcGZ@t< zZDT=-l$d~vmEa{)$x9LH$nb}JxAF(IiwcM)IkCt!-V>3GuY&sR?kMz+kPcVE-A&iG z0YfQmxs|lu;sdBXZ^KQ@%xjhGpltkH`*K`uF#6w@16HjEs+8yHqkzYOr;z~4?B>Dh z8SEOmtq#%xDcB|voCK>=o1FqgaMX98O*{zJqwsCZo1XS_ElAyz?K7{HS{iDE_ln{D z@CabN=QgHcRgQ|TBYl@TDqDhZK7Lk+A*R|*?J}wb{}D*#ObiFk_W?=PGaZ27^-NQ=@Ka~0gtKm84)~^Id#L|E>9?IW>A_d zsZAwfGQ5-%939CSO}vLxA^4BNskrtcE)C_0I)sNg&S#A5qr8APl}RB14Z`^M;8jUF zdx>rj_WuVoA0!VZcrSxE_xUcxd0GE8k?CyBLbT)7luA1Sr#4t#S(4-w`R6Na^?~Pfl(Y3 zIt##nFw<{4^i7VNr1gU}^BTW`9E>Q)&QrC`%P{|xS8 zr)3EFTm*hV;)8F*!DE^YBFJn7C%s9%p(^-Llq&0Szei1_Z}+}bF9t}jC9NA9nlxWE zMCW_q>-3ra$XeT{`x0sZPR?U~+(Ox}=}sJ9h1g3j%o`wJn2zKd8rTF1Dle@gTqs*_ zzh(oNT2-s!Q<3-5#7&ln@ix+%Ri#zs-(ZLFl3%Q=pIoa(58i_<8!Z4J1M#p7X(bTT zdd@)7a<8{xkBekTIs#+U&@_`TflOI#7ITxdj#mEoWIP8dZ97qQBZjXSc(@FOj01wF z16D6dWclMTJSwPGu>Y>zt=5yYc3b37!~Yy*R88Y)w%G`s3RQ@dDsnzdwQF4M`EabG zeECz-_Cp*LE;E$LjmqBrs;mJ=frOr859KOcDi=2PNt>B3x$LC}4}^eoA-;;Lf<*;F zzxMO@s(&3_Yg#>eX(6xVq!StmwSBUW%F8zxX9^W`r2_WJGgAky@0|*u3GO(=0<8^~ z{pIA9nQKM=t^S!;*_eVC_;(59#P}hxNOEDB%_)z;>k}FdMU3`hzsC=2Xw)y!SC5LK zz={>1G?8Ydw?;(n_H%Y108oXpVl%;NVdjX-XdshiqP$FMuujcOF|DKnh`JpP253s84Z$kyUI7*dZ+k@Do545?T|7IvFNh8v zRn$aCe-(bhWBn-#V!G%O&*7758(Fj{hX=IByscCi=$^N;_K*@;xFLHkPH>s557Z_xdtwaf+><1XKrpI>U*mmD z0c)KnDcdit`G|&lIGT18att6ZF=$~xWt;n1YISj-5dbhSWMjb)0000000BXo7Z_8= ze<;?HpQ~c@Cat4CA*PwYm-b6gP2gfGA1SkzG{|S6z6tIg?;d9P(#ZZFJ@|`rk{W;$ zj0f2z!X`+Rpr=2enoTa@nXtDhI<)h^2-~MAet9M~XWO`aW}E(TA;d*m0jq7Us^{(sLULkQ=%{mqY??`u?8IVg&;;Os6~CE+#&X+3&dTEmySv2x#y~4&lTwNK4~#q z`VLSXE~|taf*4v@!I{ncvNs6U%qt7UZZT{zrscjk(w)$*;*yl=Ln!3S?2*9p8Y!U_ z%22t=LZ9bfUDYSPN!gd+2M{x_Ro7?-NbESESwhZYveRt%uUM@VD>gwdwMnSfWyq%@ zz&@u1ELbj|Rz>@7Q}P=f58vdnO)xVS=(k065rGaCgUVC<0mg!rI~C(Fr)85;v)K+c zm{TGQcm{{DNGAYPTL9tt{u`dqTlNCLfcNO(P!b26MugGsv_`l`XFG~bMF0ULsp zr3x=haYM#EY!(fEn_zjEuhPg4PgxBQZJfp#iPYcRPkaG94c-C)bkNdg2@0p|+st7d zwkiP-}u`IX1 z?JO2L!m^|@DF_tJ*6gPNaMSH3>;ujh@x-$Ig65buq3Zo6Z*jh&>v*2Y$bCE zL|+LkF2`o`Q4CgwkTi z^vB06)GzQkHS}%2^8|q76+<~yA|NUV9VhX_Pg$(w>iYYZj1BCx$1y*|iey&JFe1+B z%mR6Tj&QG^c0k{gkF2&c&T1MS9ezEZm1?xFVbGV42Q}Q$ZUvNI(iUf$$JaFRp+NsG z9YZF{FK36%S^ptGQcPx!h}nwd14jePIXQGEk?C%DvWOWjPRI{AZWh?JKeP9QXf=iwq=<5voq}(j@e|tBtVIrjxmfokbq4S(j!U~ zr15QY8g(^P_hV1a~q zkKu7v#z2oSFw-n%$OP?^KY-+3!-uhj6!!?n+R+Y|7ik5slFasGN^ZUDbV@nHYGINb z?A>H1Ve^g^qB4w4JEF*N(Za^iVTiwBGAh@2BqzzIhu&GP>4TQ!&}W%`lte6vijQf>N*+y ztVNIc`Hd_M2)EPLXmxSTTkM@Bqs{=IPZf8?((DND#@Uo$J&e1N>C>VH6yR^|zrXLI z$lqhWq0_$g1*_bbGq?IMP4U6N;I#^yh_QTCrJV~#i^^V03MpMQJ7^v)BJl0HKI3+W zJ3krSFbCKH)}Ilvvs-Rx)Ruqli0|uN7|bi8goWCA_e7ex$mmYIs-DH6%Q8>v`mjyR z+hWa|0Y0GD2}qh&L@^|8i<<4nW8_fLaU`u(IRTLz;FoutJm&)`U%`<|(d-X*C7Qvq zo>kFj%Z7~Bes%LCpqebs50M{qGn6yT%hVe_Nu2pno`<2@R0KYqP`Wr!>a98lOsDOc zPCi&Z81sGhS#lgC3lJSq`PZT91i*hI{zs%Dp;^6TZBIK&+Fmz{cdbjOkI=>}%=4zap^3iL4++ojPe5{$WX8=SpjoZpR(7LY%V&^Y z8QQ9sr?;{;OFoeYbgGY*QGd4w?QK`|DB4Dadx_gILSt+jPc??9!F0^R4wc5@`!P1x znawr~Gu4Ked=p=%9Q}jF&6dSFbA!Do0G^*08xI90U;I^)22vfa^a6IZCa{g*i};$< zS&p~XFT96YFlcGqa*dSMZb)HiAP_hrS*&N0OvUG-VI=JA6Zw8@T{iYg687E4I;!eT zo*>#mjDjwKQ#-vJy4YnnPPa)>v>u>eSXuT*n6JB^9LkZaNuI-Oh}!(^#?IHLZav#i zV!4VvIa$yw@=fg(d+apd+HBMxUcyo_S4mg;@Q=V&^hAr0D>|R$G z^7@>X_kV$fG#S@zlI;TG*@k!CQYhF~cg6{Bl;RX7SaFY9~#@(ENtVKruC3c5G5Pv$Nz9dKQ z(A&?}%{*C1UVKS9>@M*j&iQc&1P;%T#X6uMV_B!*OGKYB*>1ML=<R5k zhKm&v)%2olSBkLFg&751c1VE)v}rU$0C_-$ztHK7E2DP#0)#f9==Cdtq1TS5&s^OB zbSlPfv^?6~dA$JM{r!OU%JxCE?-3;`Q6wyh#4IN4AfY#r?6U$b>D@tBb%TJ38Yd;ffg4Y2RTz7~Y<`dP0E1ifl`4njBBXk_Q-hbsQ|kb|gTD@(U4~_8 zbfD5b#|R9NmRh%VG1^Rp_Vj=yA=Hj=YsYS0x4Ebr;l)% z+p9qmJ}9pHyA<@jFXYfZAu&cRt#$)(tlO!=@kh@5%Q3;L^C1B!NGSYSA)$(f64K@eoM3kiPWT>30Op+W z1|K?qVm3Pc+w}+28UK&BRxD(1rRtm=@SqQw(nb+Go2wj(9QLN$t&KE9{Rf98Wef)s zgEPSP>QcPVt{AoR%nWCh0f`*I+m8`rdW+h0O^L&Ehuek`{JUE-5!9&NF+RrMUCdk)ut`l)R5{yXC+SGv zc{h2~azA!!$rgvar7w>4V|@r2*gzzeIeuHrI4UPOdH*m!BKW(9+Y+cEp?Q}ncT_(D zPggNRH>1jSmi0>E?>L>!R!6$ELLlH|lJF%y6K(2!e1&QF#Pj>ScdmqTUY4w>NZZiP zi?)Pggi9dhQK)cVobBeCa3ni69=OS-gpdO#rZR6eq=-*{xewQFHxxA42t~TKdOTL6 z@!{%QGK47-;yXE6Wqa>z8dq)gLhVFEE_GW{Th!n!e3eWYFEv!)R2}M6;$jbmr+R(a zJJpb>7HcB(CGl*N>lA@he1qigwp#O0A`uKXkv@KVCM)&5cHUhKR0w*oG0Pn;nM^QD zDo-R~umap6?H4!xo4}UO8x+n)iQ8%Ab7b&N@=%X_5yqEvVHbz*+j2gwD>mR{>-rT^ z$-9i9OlD&4aVTo=S_&bRZMJEm(kpr0K8RPuSvHTHZ{FGpv$#<(s()FGJNBF{T=0k) znhx&sByfz|Wi<|KKRZPJ+x@gTk5Jl8firB`!xcVR*2oR~Au~J$aE(jb1?ZaG(n4PA zTmEBzG)s;2?*R|{cKoXKUU+vYzIIt)P>w9|Sq#5Si+T@_KMl$2mQXDK&L8qj#OiWo zw*O+M8K$$%%YnqY&4k@4H~=c~sL%uy@tMk1z~1CCkYN3I{^94JxXu$Qc|9#1NDc^! zufRORM>|#s1;$pxAvD)ZrT*th{kDt`h8 z+Z=xYVpDNO`h`XXaZBjCTLim+r*b8?(Jfb$RhLb36@qMY6wLf6w1N}^hmQP z>OHm7N1Be${o1gqO}%VIL4AAB7TJ^3eZ3E34HMaB z)aZ{xU9(E55PFIc!x8R_x1@#B_V$}97Q%yzhEFQ(y z6t7!8Ri8Zv8Hc9A0a9g1uK&@^05C9QV^IYF000000Rf&iL@ocE^QG|1)`3!Hq+XqqL9uZC#gu=(_Cw>vfbLsbgyr zGu9hVJLF%#E^u3wu{{=>77+4Ysgw>kQ|$9ErzJ?UGa93Gy1SPyMpiJOpL>#|3$X>C|^n`i9$H zMi0)xPMgd#Dv9T!;Hae#wG-oEr+9de7ogjkR9zr36~#aWmsM7eT_XHM?+{(_2xVSw z213ix?eDterwLGmG%mz1`(1+;aT@k;hy=|%2g_drZsFPkv0_kkLw|H39;tY|JR4{a zVZ1!ILot}g0vFLPq&3ct1$j7ZL8(&+7}84*cEcPHjxRcg3yZV?_KD+euTDp zUz963-1aC-FLZwq8M%ooK+j*?+xTPZ#CX;IUitM)D7=fXJD8?*u_|1c@sJ(Dds{XkJA2^0F&1Z~32znO*7 z#M&1gL!?*B$$jp(^i>SL8@S^T<%P7{!+U zgzl}6qQ#4Y@i+ND<_Y#u4X1~FP68>ti#hsLPBAwII|iu0OtH-Cimdl8StWFcW<;-p zi|Nqgci!&Pb3Y57bX^AMdbDrZR_3=@ygA*HL;g3^5DQ+D1G8kkxB&8~6L_Jsy z0#uae3gYgM!WO??Gg!3M6075*x|cwJXO~RH;f}y3(Z4ploA(=~eg?z){r;L724&AB zLJZApqm=G=1BI0&Y?6fu!m}R`v0;HgS!SG>B0=|AkClwZId4a%mMYWLG(@3Fp*J-N zs>SMsE%5$UY;@Bwyyu+erBf#p~48eNii zZsEW%n6?v8glK8;c+INKk&Ejo;kFcRzS|^>Vk))O26^N*?_UdzFzV+3-ZaZU;n{-y z22(54zoQL1OU2Oqf=<^4HxbsFll>ZmRA1nE7DuoFr_#MZrJj_zInu&neA(c&8W;=u zPLN*q-aig2F`6@80ISL{z4urQUYnk=AN!lM3W_sdpF~W<8y@ABayzFS!c#)DkD~_Q zN{nL0B4hAjdR<1ku@Xpg4MpHnS>I@!w<=o2chh z1>Wluc4V^nK0npbMMF}DONzIV!8>4KsnB^{6J8vcj0PD|-isiVkPaxLqW7YX)jH=s zO<@i+MjI@Y(9{gm2X{CtQ%{O%qOC$F^~8o0* z2nVG>{Vv5?5!vWs&yv#ATgi-HtarRtY{${75oub^vpju)ynz5PFl1w$3IG5A0003& znmZUFPbwR;o-J55#R`a|n`70Zjq=2?^UXZ##r<3mLtV*%*yjJr#ijTw5ghth$-K1v zEqQ=b!Xxu8Uf5_{30+DCrP8ZENm#5x2(bZIJ7NgfiYLW-==G#7oje!&Pc42T zSaiYV6brD7gqf&l;Btl&o&|a_X=<~gRq@l8EdN{@z3fz*ixz|@SoO^L0;$n_O@1#A zoM3;19S~g%pcbEHeGPo zx#19qp%D6D#B}OSUk3ZRsJF@-s-xgso459Ve(VQ{YFuK4K9nM)`uuH2TBchruCTWj zT7gLg{0J-N;aia4XwuW~NVC4^+6nCL(Iod6gh z1lLSVr3ce8w)K#zEx804+EqJvrc~jN^-QddJq?<15BoyCU-VOiN1flACCf+Am}x?$ z7ZOZPa(njdCZFE=UGc6O>U!= zXPr9srYE-rHXSmsqXCWaN83{r2aIdqtE$wHA6?axc>?!&E=j-BdV`ZnjnL2FtG(d$&uJ#Q4XzZ7oJ`IB#WcU> z25!+f4@w8P#4TD@@_7Js)(T7l^Du~cuSki$acO5m_~Cg@N76qzmc43}`Mm=#Z>rLa zdhA%>(cjz9YhB!L0vA4s=h}n!oRwi|uH@jHbx}^;SHe%h^mzcDAJcDg)>Ja-M+ zJ}Wvk7XMN$&kl85W<^DIUd@^|(9!y8@`)0#D7@X!AKz!)d@=?9#gE#H{8`AkHg9od zzCI$oT&^l!q2NpY@u!~37wM{DaZyJwR&v+ZC)|*0I}FNO_?Ej8M$XR`9f<-jWzsO# z77Q`XX)3IitzwL&1`71f?cXvgv}ijwYD^ntRrH&;(NgYu;iKjrmgy09&#y?Enbtiq zx1p|Wa$Y&hIOOU_K17?kc1Qw}sZ@4D`<~RCY{qli(*g^ZvNbg32Zg!XtvDc8*jMhF3Q`US3<3DQlhKd}K1ogXMSZK`N=lIKRZv^o>S{2rO{zEWuhD|*DQ zsw7Zt={0Pg0#UJh3M?SVf~Xm4Ir32O`H;IV!nTkH(o?Oiawe?5Ozg;2cA|a4+R-wX{n4(6y`-Y%h zU(Xndskqb80HYQfNQGH8r4(a_u^>9DbX2ybL-4ZvufUNm!2#V~Y@!@;LVC$ewjGoB z*RLA%jaTZ8`S$tHWYLQxIqKV}>E0zAr5eQlT)n;e7@QRb`jzub`l!DU#ymnGAoR3zq2WlLW7Bp0v7s64jRZI|X36?{`=5hRf{%k9^iui5pfmif61 zF41*PypJ}+HJ3xITBd|Ao&|mi|DDrIIO5dVVLoCrqYm^V7CZU(8GI;Xuc_CdNQ^xr z&ORwF+{gLW?r5{aoWwF{41&Jb;t3AROGNQcERbp4ZuKDHKwlskh3#j@gP7c(N(#~t zU;{EwFD5!Lt&5CxL;OtI?<74Cf`}~_Pc`#-dR8TsuozeuedrnhG%^^CnS9z%o!zRg zDMQ0HNg>ss+WRC)4RWHf!h@@PXumr#=AAT~{tbQ0QFgFwEO!n?EO`knRv3UJT+sP{ zP>zUs*5y!L&p?2y&Ejt>=FEizEus)5S! zdTrdi;LdIKeuJyF6fiWh^SxE&1+huw^n9P!CZCKnuyRkNF%^?UkVRIXsQmf*L2DX_%mhRwDYF;p(R~{34i(RxqCDwjn$=^vWId25k_^V`*RgjR$vRz zqPA$ENEAS^0sA%Cm^yqyg|v&h>Y7-Z{9aOp%In#z9vaAL79Jq8Q%tX3u*@R0g*Jee zNz`lh^3n`LkAp(uB*@R){kZK{V_xi1##&1#sl+a&jcS0ZgEmMZgJ&ySC7G?)pl+o? z?QO&SrTVE~e8SZV9%igF!y@3(eUx^94}hl(?%5mB=U^U=y6XH{a1Ie$4K0bcB(RW3 zqh3^FY=*B&N--^bnmKI-3ZDJ=iLlpJ{DnOwkOX+6vKgJ8Vmi=2Mi^@dX9E=^$9h|7 zlCK+kK)wpon5|#S8d)F>z4noi88x#mR({xLQA{L!k60J&tPh97T1%$*G83beh@7LL_KL1550TA1U@v2C)Mw5P0A`F2 z6rgk3ioB;cAIAFU%}SvJP2+-Gn#3vm11NI4(77wG2=F~+WgkzquzjF^>wV+rrrtsuD;?<$zu@FA5^aXm?L zZ>Sf~L0;PurCn?5D*y29PkgCvqSdfVTT`?+6H;x$=)^2f7bmd$+!I_j$Q*OmPG4`B zTrh#gZc*wb7g#me7)U_W_7=(iioG**J2fJBMeq5UQUA2{zzRa~SQ&3{k9`OGT^YEj z=%yUAuZM=`zw^ui3><)sX1u!Mvo| zaPoBfBRBJavr?N!%|h+WkCLMk)y^Rp(oYpFyZbCu2-*8Tke#*75NO{-hno?V;rN}_ zDSwjP$pWi^05C9QV=)N;000000YRQwMG-tH|C_|L$TCQe&43Hl0W&7oK8*XUIkHOD z^4aGWaDPz_chy(((w!nbjR^My15?)QhMbUnTQIX7{bn8!;*4yv%;20NJg59Kxr&P< zQeL^8)o$xd125`wV}TLt7!-@Wm~G!DNwwL(NY|)PwX3(j;w_+VUAMGj8y_%ea&BUY z%C&xRHbgh;Pr7hUN76?yBSrblkY3X(0)9Wr6&LLrQ^C())uzh5xreZ$-7WIW;TZ+IOBMp=%BdzSE<e_2e?Y4(PT|N@9 z)L5xSr8eQq#Wn3w)KCqrd@7!m4FIzJ(q_KQsP|>eS0e37J5lkbM2}rK`I+FBU4rRP zbA2fNLFo2{A4SHt7*4kK{%~l{3y5OqR}lktDw)u#Hsg#szL<8?gyAOsvd%a-?}?ig zDZuw8`x`<45KZ48nn(m`fOV$xB|?M==%%Pbx`;|r_=6-R^rQ4Wm(16JW>;{KLit#k z_+1JYTaR#xdS8k~?Dy#OzVAkcPnA%tA#ef)ZXF4p6LklGmrUiv{@ici0tB2R8=*i= zwAX16aKME`vmV`N@)2$|`#Vi@*>2mO1@i4cN?eBY#Cz;x9Ft=zR`0@HzDlA3Hb9@D zNGewpX;K-OWpTwenDE*3)yo{W&*70qgp0jo52mZ!#CpN+0L<>j2JO=ua<62TjTo$s z-nhk8e`~i|Z=CT_j;YM-KKfN2M#T4tdO}c?Avro15zLPaNYT+k!p`p+-T^S73oVss zxfk`o)qyXtgw=XtMb}n<(wCaUxp&loB3s9B?s%M6l0bsc^6Z3v|H`*QF4x(vjtM4_ z&vu+Iklw_^#AXpTN4|omDSFm?in6#RUbOe5Tp2D@N%E)N=e1t&AVH>nE5d?-oq$5~ zPgiXAiePV~t)nx0WL(#D3sh}2#^t{WNQdwIxW!#NcH7(SWCac&ns~mlAUTRaFCPjG zsm`?@uxXp^uTl$8($UzuAQ%tusI(jH{8S4-1ovtKBy8(qSmJ~o#=9e`8)`q9ZcsTT z%~z!*USbvldC-b_cTviTfr!diqy>pnJL2k8^26Y*cUSzc4Spc205SvljXpe%5-P{& zqVIPWSZ@Mdgi;DGsqYI$iAgCVm#S_LAQe~uY60iFq1myk#X8s`e8EHRNfL`BSsD7J zfDvjq+bo}_U?-u@MP3A9+5aUu3us((__!m#O187BvQX!$WU&+Y2kPy#Myj&Y{!_OB z`{C{aL9+z2Z}<*nj7wVdu9@7r@CN1~4M}|=c4A;$u?Eg6o)Rk9H@2MUgRvWfzKJrf zcUrS12ReeS;U~!11jU%}EBj7|x8)H>0dgj|wX|gl8ejSMu5l{OX#$3M+p6LCSMUM? zXj_pH&z*n5COV(@93 zkl{_IaE%-A6h#o>(%1Z1|P; zgL5U-=;NaZb$xjsLykzYgpqdHx#;bi{PDYdIZ1;On+v`$@~*aafqzM_n*DVtEWWGg zHyc&?rC9Agp+89H2Z=FCNief*I_>dh@&P)b!nl%g9S~n{De|147gj??F2ltk661DF zmw{G#I

  • -## Citation - - - -```BibTeX -@inproceedings{gu2018ava, - title={Ava: A video dataset of spatio-temporally localized atomic visual actions}, - author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others}, - booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, - pages={6047--6056}, - year={2018} -} -``` - - - -```BibTeX -@article{duan2020omni, - title={Omni-sourced Webly-supervised Learning for Video Recognition}, - author={Duan, Haodong and Zhao, Yue and Xiong, Yuanjun and Liu, Wentao and Lin, Dahua}, - journal={arXiv preprint arXiv:2003.13042}, - year={2020} -} -``` - ```BibTeX @@ -53,7 +32,7 @@ AVA, with its realistic scene and action complexity, exposes the intrinsic diffi } ``` -## Model Zoo +## Results and Models ### AVA2.1 @@ -141,3 +120,26 @@ python tools/test.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x ``` For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . + +## Citation + + + +```BibTeX +@inproceedings{gu2018ava, + title={Ava: A video dataset of spatio-temporally localized atomic visual actions}, + author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={6047--6056}, + year={2018} +} +``` + +```BibTeX +@article{duan2020omni, + title={Omni-sourced Webly-supervised Learning for Video Recognition}, + author={Duan, Haodong and Zhao, Yue and Xiong, Yuanjun and Liu, Wentao and Lin, Dahua}, + journal={arXiv preprint arXiv:2003.13042}, + year={2020} +} +``` diff --git a/configs/detection/lfb/README.md b/configs/detection/lfb/README.md index 2bd9a2a233..8e125d7593 100644 --- a/configs/detection/lfb/README.md +++ b/configs/detection/lfb/README.md @@ -1,5 +1,9 @@ # LFB +[Long-term feature banks for detailed video understanding](https://openaccess.thecvf.com/content_CVPR_2019/html/Wu_Long-Term_Feature_Banks_for_Detailed_Video_Understanding_CVPR_2019_paper.html) + + + ## Abstract @@ -11,33 +15,7 @@ To understand the world, we humans constantly need to relate the present to the
    -## Citation - - - -```BibTeX -@inproceedings{gu2018ava, - title={Ava: A video dataset of spatio-temporally localized atomic visual actions}, - author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others}, - booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, - pages={6047--6056}, - year={2018} -} -``` - - - -```BibTeX -@inproceedings{wu2019long, - title={Long-term feature banks for detailed video understanding}, - author={Wu, Chao-Yuan and Feichtenhofer, Christoph and Fan, Haoqi and He, Kaiming and Krahenbuhl, Philipp and Girshick, Ross}, - booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, - pages={284--293}, - year={2019} -} -``` - -## Model Zoo +## Results and Models ### AVA2.1 @@ -124,3 +102,27 @@ python tools/test.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + + + +```BibTeX +@inproceedings{gu2018ava, + title={Ava: A video dataset of spatio-temporally localized atomic visual actions}, + author={Gu, Chunhui and Sun, Chen and Ross, David A and Vondrick, Carl and Pantofaru, Caroline and Li, Yeqing and Vijayanarasimhan, Sudheendra and Toderici, George and Ricco, Susanna and Sukthankar, Rahul and others}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={6047--6056}, + year={2018} +} +``` + +```BibTeX +@inproceedings{wu2019long, + title={Long-term feature banks for detailed video understanding}, + author={Wu, Chao-Yuan and Feichtenhofer, Christoph and Fan, Haoqi and He, Kaiming and Krahenbuhl, Philipp and Girshick, Ross}, + booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, + pages={284--293}, + year={2019} +} +``` diff --git a/configs/localization/bmn/README.md b/configs/localization/bmn/README.md index 43147c2109..16cdbbe568 100644 --- a/configs/localization/bmn/README.md +++ b/configs/localization/bmn/README.md @@ -1,5 +1,9 @@ # BMN +[Bmn: Boundary-matching network for temporal action proposal generation](https://openaccess.thecvf.com/content_ICCV_2019/html/Lin_BMN_Boundary-Matching_Network_for_Temporal_Action_Proposal_Generation_ICCV_2019_paper.html) + + + ## Abstract @@ -11,33 +15,7 @@ Temporal action proposal generation is an challenging and promising task which a -## Citation - - - -```BibTeX -@inproceedings{lin2019bmn, - title={Bmn: Boundary-matching network for temporal action proposal generation}, - author={Lin, Tianwei and Liu, Xiao and Li, Xin and Ding, Errui and Wen, Shilei}, - booktitle={Proceedings of the IEEE International Conference on Computer Vision}, - pages={3889--3898}, - year={2019} -} -``` - - - -```BibTeX -@article{zhao2017cuhk, - title={Cuhk \& ethz \& siat submission to activitynet challenge 2017}, - author={Zhao, Y and Zhang, B and Wu, Z and Yang, S and Zhou, L and Yan, S and Wang, L and Xiong, Y and Lin, D and Qiao, Y and others}, - journal={arXiv preprint arXiv:1710.08011}, - volume={8}, - year={2017} -} -``` - -## Model Zoo +## Results and Models ### ActivityNet feature @@ -110,3 +88,27 @@ python tools/analysis/report_map.py --proposal path/to/proposal_file ::: For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset) . + +## Citation + +```BibTeX +@inproceedings{lin2019bmn, + title={Bmn: Boundary-matching network for temporal action proposal generation}, + author={Lin, Tianwei and Liu, Xiao and Li, Xin and Ding, Errui and Wen, Shilei}, + booktitle={Proceedings of the IEEE International Conference on Computer Vision}, + pages={3889--3898}, + year={2019} +} +``` + + + +```BibTeX +@article{zhao2017cuhk, + title={Cuhk \& ethz \& siat submission to activitynet challenge 2017}, + author={Zhao, Y and Zhang, B and Wu, Z and Yang, S and Zhou, L and Yan, S and Wang, L and Xiong, Y and Lin, D and Qiao, Y and others}, + journal={arXiv preprint arXiv:1710.08011}, + volume={8}, + year={2017} +} +``` diff --git a/configs/localization/bsn/README.md b/configs/localization/bsn/README.md index d15b6361c7..1ec09467ab 100644 --- a/configs/localization/bsn/README.md +++ b/configs/localization/bsn/README.md @@ -1,5 +1,9 @@ # BSN +[Bsn: Boundary sensitive network for temporal action proposal generation](https://openaccess.thecvf.com/content_ECCV_2018/html/Tianwei_Lin_BSN_Boundary_Sensitive_ECCV_2018_paper.html) + + + ## Abstract @@ -11,21 +15,7 @@ Temporal action proposal generation is an important yet challenging problem, sin -## Citation - - - -```BibTeX -@inproceedings{lin2018bsn, - title={Bsn: Boundary sensitive network for temporal action proposal generation}, - author={Lin, Tianwei and Zhao, Xu and Su, Haisheng and Wang, Chongjing and Yang, Ming}, - booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, - pages={3--19}, - year={2018} -} -``` - -## Model Zoo +## Results and Models ### ActivityNet feature @@ -168,3 +158,15 @@ Examples: ::: For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{lin2018bsn, + title={Bsn: Boundary sensitive network for temporal action proposal generation}, + author={Lin, Tianwei and Zhao, Xu and Su, Haisheng and Wang, Chongjing and Yang, Ming}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + pages={3--19}, + year={2018} +} +``` diff --git a/configs/localization/ssn/README.md b/configs/localization/ssn/README.md index c5e5dc09fa..d28d5568b7 100644 --- a/configs/localization/ssn/README.md +++ b/configs/localization/ssn/README.md @@ -1,5 +1,9 @@ # SSN +[Temporal Action Detection With Structured Segment Networks](https://openaccess.thecvf.com/content_iccv_2017/html/Zhao_Temporal_Action_Detection_ICCV_2017_paper.html) + + + ## Abstract @@ -11,21 +15,7 @@ Detecting actions in untrimmed videos is an important yet challenging task. In t -## Citation - - - -```BibTeX -@InProceedings{Zhao_2017_ICCV, -author = {Zhao, Yue and Xiong, Yuanjun and Wang, Limin and Wu, Zhirong and Tang, Xiaoou and Lin, Dahua}, -title = {Temporal Action Detection With Structured Segment Networks}, -booktitle = {Proceedings of the IEEE International Conference on Computer Vision (ICCV)}, -month = {Oct}, -year = {2017} -} -``` - -## Model Zoo +## Results and Models | config | gpus | backbone | pretrain | mAP@0.3 | mAP@0.4 | mAP@0.5 | reference mAP@0.3 | reference mAP@0.4 | reference mAP@0.5 | gpu_mem(M) | ckpt | log | json | reference ckpt | reference json |:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:|:-:|:-:|:-:|---|:--:|:--:| @@ -74,3 +64,15 @@ python tools/test.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py ``` For more details and optional arguments infos, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@InProceedings{Zhao_2017_ICCV, +author = {Zhao, Yue and Xiong, Yuanjun and Wang, Limin and Wu, Zhirong and Tang, Xiaoou and Lin, Dahua}, +title = {Temporal Action Detection With Structured Segment Networks}, +booktitle = {Proceedings of the IEEE International Conference on Computer Vision (ICCV)}, +month = {Oct}, +year = {2017} +} +``` diff --git a/configs/recognition/c3d/README.md b/configs/recognition/c3d/README.md index 067097fdbe..801dc0930b 100644 --- a/configs/recognition/c3d/README.md +++ b/configs/recognition/c3d/README.md @@ -1,5 +1,9 @@ # C3D +[Learning Spatiotemporal Features with 3D Convolutional Networks](https://openaccess.thecvf.com/content_iccv_2015/html/Tran_Learning_Spatiotemporal_Features_ICCV_2015_paper.html) + + + ## Abstract @@ -11,22 +15,7 @@ We propose a simple, yet effective approach for spatiotemporal feature learning -## Citation - - - -```BibTeX -@ARTICLE{2014arXiv1412.0767T, -author = {Tran, Du and Bourdev, Lubomir and Fergus, Rob and Torresani, Lorenzo and Paluri, Manohar}, -title = {Learning Spatiotemporal Features with 3D Convolutional Networks}, -keywords = {Computer Science - Computer Vision and Pattern Recognition}, -year = 2014, -month = dec, -eid = {arXiv:1412.0767} -} -``` - -## Model Zoo +## Results and Models ### UCF-101 @@ -80,3 +69,18 @@ python tools/test.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb. ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + + + +```BibTeX +@ARTICLE{2014arXiv1412.0767T, +author = {Tran, Du and Bourdev, Lubomir and Fergus, Rob and Torresani, Lorenzo and Paluri, Manohar}, +title = {Learning Spatiotemporal Features with 3D Convolutional Networks}, +keywords = {Computer Science - Computer Vision and Pattern Recognition}, +year = 2014, +month = dec, +eid = {arXiv:1412.0767} +} +``` diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index 3a48f6bbda..a5bd924c89 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -1,5 +1,9 @@ # CSN +[Video Classification With Channel-Separated Convolutional Networks](https://openaccess.thecvf.com/content_ICCV_2019/html/Tran_Video_Classification_With_Channel-Separated_Convolutional_Networks_ICCV_2019_paper.html) + + + ## Abstract @@ -11,34 +15,7 @@ Group convolution has been shown to offer great computational savings in various -## Citation - - - -```BibTeX -@inproceedings{inproceedings, -author = {Wang, Heng and Feiszli, Matt and Torresani, Lorenzo}, -year = {2019}, -month = {10}, -pages = {5551-5560}, -title = {Video Classification With Channel-Separated Convolutional Networks}, -doi = {10.1109/ICCV.2019.00565} -} -``` - - - -```BibTeX -@inproceedings{ghadiyaram2019large, - title={Large-scale weakly-supervised pre-training for video action recognition}, - author={Ghadiyaram, Deepti and Tran, Du and Mahajan, Dhruv}, - booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, - pages={12046--12055}, - year={2019} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -103,3 +80,28 @@ python tools/test.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_ ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{inproceedings, +author = {Wang, Heng and Feiszli, Matt and Torresani, Lorenzo}, +year = {2019}, +month = {10}, +pages = {5551-5560}, +title = {Video Classification With Channel-Separated Convolutional Networks}, +doi = {10.1109/ICCV.2019.00565} +} +``` + + + +```BibTeX +@inproceedings{ghadiyaram2019large, + title={Large-scale weakly-supervised pre-training for video action recognition}, + author={Ghadiyaram, Deepti and Tran, Du and Mahajan, Dhruv}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={12046--12055}, + year={2019} +} +``` diff --git a/configs/recognition/i3d/README.md b/configs/recognition/i3d/README.md index 5a2bfd7a33..02a97b8dc9 100644 --- a/configs/recognition/i3d/README.md +++ b/configs/recognition/i3d/README.md @@ -1,5 +1,11 @@ # I3D +[Quo Vadis, Action Recognition? A New Model and the Kinetics Dataset](https://openaccess.thecvf.com/content_cvpr_2017/html/Carreira_Quo_Vadis_Action_CVPR_2017_paper.html) + +[Non-local Neural Networks](https://openaccess.thecvf.com/content_cvpr_2018/html/Wang_Non-Local_Neural_Networks_CVPR_2018_paper.html) + + + ## Abstract @@ -11,33 +17,7 @@ The paucity of videos in current action classification datasets (UCF-101 and HMD -## Citation - - - -```BibTeX -@inproceedings{inproceedings, - author = {Carreira, J. and Zisserman, Andrew}, - year = {2017}, - month = {07}, - pages = {4724-4733}, - title = {Quo Vadis, Action Recognition? A New Model and the Kinetics Dataset}, - doi = {10.1109/CVPR.2017.502} -} -``` - - - -```BibTeX -@article{NonLocal2018, - author = {Xiaolong Wang and Ross Girshick and Abhinav Gupta and Kaiming He}, - title = {Non-local Neural Networks}, - journal = {CVPR}, - year = {2018} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -101,3 +81,27 @@ python tools/test.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{inproceedings, + author = {Carreira, J. and Zisserman, Andrew}, + year = {2017}, + month = {07}, + pages = {4724-4733}, + title = {Quo Vadis, Action Recognition? A New Model and the Kinetics Dataset}, + doi = {10.1109/CVPR.2017.502} +} +``` + + + +```BibTeX +@article{NonLocal2018, + author = {Xiaolong Wang and Ross Girshick and Abhinav Gupta and Kaiming He}, + title = {Non-local Neural Networks}, + journal = {CVPR}, + year = {2018} +} +``` diff --git a/configs/recognition/omnisource/README.md b/configs/recognition/omnisource/README.md index f354c0401f..6f90a181c0 100644 --- a/configs/recognition/omnisource/README.md +++ b/configs/recognition/omnisource/README.md @@ -1,8 +1,8 @@ # Omni-sourced Webly-supervised Learning for Video Recognition -[Haodong Duan](https://github.com/kennymckormick), [Yue Zhao](https://github.com/zhaoyue-zephyrus), [Yuanjun Xiong](https://github.com/yjxiong), Wentao Liu, [Dahua Lin](https://github.com/lindahua) +[Omni-sourced Webly-supervised Learning for Video Recognition](https://arxiv.org/abs/2003.13042) -In ECCV, 2020. [Paper](https://arxiv.org/abs/2003.13042), [Dataset](https://docs.google.com/forms/d/e/1FAIpQLSd8_GlmHzG8FcDbW-OEu__G7qLgOSYZpH-i5vYVJcu7wcb_TQ/viewform?usp=sf_link) +[Dataset](https://docs.google.com/forms/d/e/1FAIpQLSd8_GlmHzG8FcDbW-OEu__G7qLgOSYZpH-i5vYVJcu7wcb_TQ/viewform?usp=sf_link) ## Abstract @@ -15,20 +15,7 @@ We introduce OmniSource, a novel framework for leveraging web data to train vide -## Citation - - - -```BibTeX -@article{duan2020omni, - title={Omni-sourced Webly-supervised Learning for Video Recognition}, - author={Duan, Haodong and Zhao, Yue and Xiong, Yuanjun and Liu, Wentao and Lin, Dahua}, - journal={arXiv preprint arXiv:2003.13042}, - year={2020} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 Model Release @@ -77,3 +64,16 @@ We also list the benchmark in the original paper which run on Kinetics-400 for c | :--------------------: | :---------: | :---------: | :----------: | :---------: | :---------: | :---------: | | TSN-3seg-ResNet50 | 70.6 / 89.4 | 71.5 / 89.5 | 72.0 / 90.0 | 72.0 / 90.3 | 71.7 / 89.6 | 73.6 / 91.0 | | SlowOnly-4x16-ResNet50 | 73.8 / 90.9 | 74.5 / 91.4 | 75.2 / 91.6 | 75.2 / 91.7 | 74.5 / 91.1 | 76.6 / 92.5 | + +## Citation + + + +```BibTeX +@article{duan2020omni, + title={Omni-sourced Webly-supervised Learning for Video Recognition}, + author={Duan, Haodong and Zhao, Yue and Xiong, Yuanjun and Liu, Wentao and Lin, Dahua}, + journal={arXiv preprint arXiv:2003.13042}, + year={2020} +} +``` diff --git a/configs/recognition/r2plus1d/README.md b/configs/recognition/r2plus1d/README.md index f9cd05cca1..3236372e7e 100644 --- a/configs/recognition/r2plus1d/README.md +++ b/configs/recognition/r2plus1d/README.md @@ -1,5 +1,9 @@ # R2plus1D +[A closer look at spatiotemporal convolutions for action recognition](https://openaccess.thecvf.com/content_cvpr_2018/html/Tran_A_Closer_Look_CVPR_2018_paper.html) + + + ## Abstract @@ -11,21 +15,7 @@ In this paper we discuss several forms of spatiotemporal convolutions for video -## Citation - - - -```BibTeX -@inproceedings{tran2018closer, - title={A closer look at spatiotemporal convolutions for action recognition}, - author={Tran, Du and Wang, Heng and Torresani, Lorenzo and Ray, Jamie and LeCun, Yann and Paluri, Manohar}, - booktitle={Proceedings of the IEEE conference on Computer Vision and Pattern Recognition}, - pages={6450--6459}, - year={2018} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -83,3 +73,15 @@ python tools/test.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kineti ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{tran2018closer, + title={A closer look at spatiotemporal convolutions for action recognition}, + author={Tran, Du and Wang, Heng and Torresani, Lorenzo and Ray, Jamie and LeCun, Yann and Paluri, Manohar}, + booktitle={Proceedings of the IEEE conference on Computer Vision and Pattern Recognition}, + pages={6450--6459}, + year={2018} +} +``` diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 4bbdbd4f0c..e53273f778 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -1,5 +1,9 @@ # SlowFast +[SlowFast Networks for Video Recognition](https://openaccess.thecvf.com/content_ICCV_2019/html/Feichtenhofer_SlowFast_Networks_for_Video_Recognition_ICCV_2019_paper.html) + + + ## Abstract @@ -11,21 +15,7 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo -## Citation - - - -```BibTeX -@inproceedings{feichtenhofer2019slowfast, - title={Slowfast networks for video recognition}, - author={Feichtenhofer, Christoph and Fan, Haoqi and Malik, Jitendra and He, Kaiming}, - booktitle={Proceedings of the IEEE international conference on computer vision}, - pages={6202--6211}, - year={2019} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -93,3 +83,15 @@ python tools/test.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinet ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{feichtenhofer2019slowfast, + title={Slowfast networks for video recognition}, + author={Feichtenhofer, Christoph and Fan, Haoqi and Malik, Jitendra and He, Kaiming}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + pages={6202--6211}, + year={2019} +} +``` diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index d5846782ae..5697f29322 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -1,5 +1,9 @@ # SlowOnly +[Slowfast networks for video recognition](https://openaccess.thecvf.com/content_ICCV_2019/html/Feichtenhofer_SlowFast_Networks_for_Video_Recognition_ICCV_2019_paper.html) + + + ## Abstract @@ -11,21 +15,7 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo -## Citation - - - -```BibTeX -@inproceedings{feichtenhofer2019slowfast, - title={Slowfast networks for video recognition}, - author={Feichtenhofer, Christoph and Fan, Haoqi and Malik, Jitendra and He, Kaiming}, - booktitle={Proceedings of the IEEE international conference on computer vision}, - pages={6202--6211}, - year={2019} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -155,3 +145,15 @@ python tools/test.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinet ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{feichtenhofer2019slowfast, + title={Slowfast networks for video recognition}, + author={Feichtenhofer, Christoph and Fan, Haoqi and Malik, Jitendra and He, Kaiming}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + pages={6202--6211}, + year={2019} +} +``` diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 37760e5042..56a010a100 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -1,5 +1,9 @@ # TANet +[TAM: Temporal Adaptive Module for Video Recognition](https://openaccess.thecvf.com/content/ICCV2021/html/Liu_TAM_Temporal_Adaptive_Module_for_Video_Recognition_ICCV_2021_paper.html) + + + ## Abstract @@ -11,20 +15,7 @@ Video data is with complex temporal dynamics due to various factors such as came -## Citation - - - -```BibTeX -@article{liu2020tam, - title={TAM: Temporal Adaptive Module for Video Recognition}, - author={Liu, Zhaoyang and Wang, Limin and Wu, Wayne and Qian, Chen and Lu, Tong}, - journal={arXiv preprint arXiv:2005.06803}, - year={2020} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -87,3 +78,14 @@ python tools/test.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kineti ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@article{liu2020tam, + title={TAM: Temporal Adaptive Module for Video Recognition}, + author={Liu, Zhaoyang and Wang, Limin and Wu, Wayne and Qian, Chen and Lu, Tong}, + journal={arXiv preprint arXiv:2005.06803}, + year={2020} +} +``` diff --git a/configs/recognition/timesformer/README.md b/configs/recognition/timesformer/README.md index 54b4f25443..43d9134b17 100644 --- a/configs/recognition/timesformer/README.md +++ b/configs/recognition/timesformer/README.md @@ -1,5 +1,9 @@ # TimeSformer +[Is Space-Time Attention All You Need for Video Understanding?](https://arxiv.org/abs/2102.05095) + + + ## Abstract @@ -11,22 +15,7 @@ We present a convolution-free approach to video classification built exclusively -## Citation - - - -```BibTeX -@misc{bertasius2021spacetime, - title = {Is Space-Time Attention All You Need for Video Understanding?}, - author = {Gedas Bertasius and Heng Wang and Lorenzo Torresani}, - year = {2021}, - eprint = {2102.05095}, - archivePrefix = {arXiv}, - primaryClass = {cs.CV} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -83,3 +72,16 @@ python tools/test.py configs/recognition/timesformer/timesformer_divST_8x32x1_15 ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@misc{bertasius2021spacetime, + title = {Is Space-Time Attention All You Need for Video Understanding?}, + author = {Gedas Bertasius and Heng Wang and Lorenzo Torresani}, + year = {2021}, + eprint = {2102.05095}, + archivePrefix = {arXiv}, + primaryClass = {cs.CV} +} +``` diff --git a/configs/recognition/tin/README.md b/configs/recognition/tin/README.md index cf57eed749..69449d71f4 100644 --- a/configs/recognition/tin/README.md +++ b/configs/recognition/tin/README.md @@ -1,5 +1,9 @@ # TIN +[Temporal Interlacing Network](https://ojs.aaai.org/index.php/AAAI/article/view/6872) + + + ## Abstract @@ -11,20 +15,7 @@ For a long time, the vision community tries to learn the spatio-temporal represe -## Citation - - - -```BibTeX -@article{shao2020temporal, - title={Temporal Interlacing Network}, - author={Hao Shao and Shengju Qian and Yu Liu}, - year={2020}, - journal={AAAI}, -} -``` - -## Model Zoo +## Results and Models ### Something-Something V1 @@ -97,3 +88,14 @@ python tools/test.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@article{shao2020temporal, + title={Temporal Interlacing Network}, + author={Hao Shao and Shengju Qian and Yu Liu}, + year={2020}, + journal={AAAI}, +} +``` diff --git a/configs/recognition/tpn/README.md b/configs/recognition/tpn/README.md index 7ce9ce6f63..1e95e1fe1e 100644 --- a/configs/recognition/tpn/README.md +++ b/configs/recognition/tpn/README.md @@ -1,5 +1,9 @@ # TPN +[Temporal Pyramid Network for Action Recognition](https://openaccess.thecvf.com/content_CVPR_2020/html/Yang_Temporal_Pyramid_Network_for_Action_Recognition_CVPR_2020_paper.html) + + + ## Abstract @@ -11,20 +15,7 @@ Visual tempo characterizes the dynamics and the temporal scale of an action. Mod -## Citation - - - -```BibTeX -@inproceedings{yang2020tpn, - title={Temporal Pyramid Network for Action Recognition}, - author={Yang, Ceyuan and Xu, Yinghao and Shi, Jianping and Dai, Bo and Zhou, Bolei}, - booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, - year={2020}, -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -87,3 +78,14 @@ python tools/test.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetic ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{yang2020tpn, + title={Temporal Pyramid Network for Action Recognition}, + author={Yang, Ceyuan and Xu, Yinghao and Shi, Jianping and Dai, Bo and Zhou, Bolei}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year={2020}, +} +``` diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md index ff2f4d8785..f5ab424971 100644 --- a/configs/recognition/trn/README.md +++ b/configs/recognition/trn/README.md @@ -1,5 +1,9 @@ # TRN +[Temporal Relational Reasoning in Videos](https://openaccess.thecvf.com/content_ECCV_2018/html/Bolei_Zhou_Temporal_Relational_Reasoning_ECCV_2018_paper.html) + + + ## Abstract @@ -11,20 +15,7 @@ Temporal relational reasoning, the ability to link meaningful transformations of -## Citation - - - -```BibTeX -@article{zhou2017temporalrelation, - title = {Temporal Relational Reasoning in Videos}, - author = {Zhou, Bolei and Andonian, Alex and Oliva, Aude and Torralba, Antonio}, - journal={European Conference on Computer Vision}, - year={2018} -} -``` - -## Model Zoo +## Results and Models ### Something-Something V1 @@ -89,3 +80,14 @@ python tools/test.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@article{zhou2017temporalrelation, + title = {Temporal Relational Reasoning in Videos}, + author = {Zhou, Bolei and Andonian, Alex and Oliva, Aude and Torralba, Antonio}, + journal={European Conference on Computer Vision}, + year={2018} +} +``` diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index c3528ee5be..ca5d4ac25e 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -1,5 +1,9 @@ # TSM +[TSM: Temporal Shift Module for Efficient Video Understanding](https://openaccess.thecvf.com/content_ICCV_2019/html/Lin_TSM_Temporal_Shift_Module_for_Efficient_Video_Understanding_ICCV_2019_paper.html) + + + ## Abstract @@ -11,31 +15,7 @@ The explosive growth in video streaming gives rise to challenges on performing v -## Citation - - - -```BibTeX -@inproceedings{lin2019tsm, - title={TSM: Temporal Shift Module for Efficient Video Understanding}, - author={Lin, Ji and Gan, Chuang and Han, Song}, - booktitle={Proceedings of the IEEE International Conference on Computer Vision}, - year={2019} -} -``` - - - -```BibTeX -@article{NonLocal2018, - author = {Xiaolong Wang and Ross Girshick and Abhinav Gupta and Kaiming He}, - title = {Non-local Neural Networks}, - journal = {CVPR}, - year = {2018} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -188,3 +168,25 @@ python tools/test.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.p ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{lin2019tsm, + title={TSM: Temporal Shift Module for Efficient Video Understanding}, + author={Lin, Ji and Gan, Chuang and Han, Song}, + booktitle={Proceedings of the IEEE International Conference on Computer Vision}, + year={2019} +} +``` + + + +```BibTeX +@article{NonLocal2018, + author = {Xiaolong Wang and Ross Girshick and Abhinav Gupta and Kaiming He}, + title = {Non-local Neural Networks}, + journal = {CVPR}, + year = {2018} +} +``` diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index f3f5811ef5..61656f7d71 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -1,5 +1,9 @@ # TSN +[Temporal segment networks: Towards good practices for deep action recognition](https://link.springer.com/chapter/10.1007/978-3-319-46484-8_2) + + + ## Abstract @@ -11,22 +15,7 @@ Deep convolutional networks have achieved great success for visual recognition i -## Citation - - - -```BibTeX -@inproceedings{wang2016temporal, - title={Temporal segment networks: Towards good practices for deep action recognition}, - author={Wang, Limin and Xiong, Yuanjun and Wang, Zhe and Qiao, Yu and Lin, Dahua and Tang, Xiaoou and Van Gool, Luc}, - booktitle={European conference on computer vision}, - pages={20--36}, - year={2016}, - organization={Springer} -} -``` - -## Model Zoo +## Results and Models ### UCF-101 @@ -243,3 +232,16 @@ python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb. ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{wang2016temporal, + title={Temporal segment networks: Towards good practices for deep action recognition}, + author={Wang, Limin and Xiong, Yuanjun and Wang, Zhe and Qiao, Yu and Lin, Dahua and Tang, Xiaoou and Van Gool, Luc}, + booktitle={European conference on computer vision}, + pages={20--36}, + year={2016}, + organization={Springer} +} +``` diff --git a/configs/recognition/x3d/README.md b/configs/recognition/x3d/README.md index a7a3c7e715..8b26c2bacc 100644 --- a/configs/recognition/x3d/README.md +++ b/configs/recognition/x3d/README.md @@ -1,5 +1,9 @@ # X3D +[X3D: Expanding Architectures for Efficient Video Recognition](https://openaccess.thecvf.com/content_CVPR_2020/html/Feichtenhofer_X3D_Expanding_Architectures_for_Efficient_Video_Recognition_CVPR_2020_paper.html) + + + ## Abstract @@ -11,22 +15,7 @@ This paper presents X3D, a family of efficient video networks that progressively -## Citation - - - -```BibTeX -@misc{feichtenhofer2020x3d, - title={X3D: Expanding Architectures for Efficient Video Recognition}, - author={Christoph Feichtenhofer}, - year={2020}, - eprint={2004.04730}, - archivePrefix={arXiv}, - primaryClass={cs.CV} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -63,3 +52,16 @@ python tools/test.py configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_r ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@misc{feichtenhofer2020x3d, + title={X3D: Expanding Architectures for Efficient Video Recognition}, + author={Christoph Feichtenhofer}, + year={2020}, + eprint={2004.04730}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` diff --git a/configs/recognition_audio/resnet/README.md b/configs/recognition_audio/resnet/README.md index 4c9ee539c8..40b143142e 100644 --- a/configs/recognition_audio/resnet/README.md +++ b/configs/recognition_audio/resnet/README.md @@ -1,5 +1,9 @@ # ResNet for Audio +[Audiovisual SlowFast Networks for Video Recognition](https://arxiv.org/abs/2001.08740) + + + ## Abstract @@ -12,20 +16,7 @@ tecture for integrated audiovisual perception. AVSlowFast has Slow and Fast visu -## Citation - - - -```BibTeX -@article{xiao2020audiovisual, - title={Audiovisual SlowFast Networks for Video Recognition}, - author={Xiao, Fanyi and Lee, Yong Jae and Grauman, Kristen and Malik, Jitendra and Feichtenhofer, Christoph}, - journal={arXiv preprint arXiv:2001.08740}, - year={2020} -} -``` - -## Model Zoo +## Results and Models ### Kinetics-400 @@ -92,3 +83,14 @@ python tools/analysis/report_accuracy.py --scores ${AUDIO_RESULT_PKL} ${VISUAL_R + AUDIO_RESULT_PKL: The saved output file of `tools/test.py` by the argument `--out`. + VISUAL_RESULT_PKL: The saved output file of `tools/test.py` by the argument `--out`. + +## Citation + +```BibTeX +@article{xiao2020audiovisual, + title={Audiovisual SlowFast Networks for Video Recognition}, + author={Xiao, Fanyi and Lee, Yong Jae and Grauman, Kristen and Malik, Jitendra and Feichtenhofer, Christoph}, + journal={arXiv preprint arXiv:2001.08740}, + year={2020} +} +``` diff --git a/configs/skeleton/2s-agcn/README.md b/configs/skeleton/2s-agcn/README.md index d6049c735c..2bfab3c7a7 100644 --- a/configs/skeleton/2s-agcn/README.md +++ b/configs/skeleton/2s-agcn/README.md @@ -1,5 +1,9 @@ # AGCN +[Two-Stream Adaptive Graph Convolutional Networks for Skeleton-Based Action Recognition](https://openaccess.thecvf.com/content_CVPR_2019/html/Shi_Two-Stream_Adaptive_Graph_Convolutional_Networks_for_Skeleton-Based_Action_Recognition_CVPR_2019_paper.html) + + + ## Abstract @@ -11,21 +15,7 @@ In skeleton-based action recognition, graph convolutional networks (GCNs), which -## Citation - - - -```BibTeX -@inproceedings{shi2019two, - title={Two-stream adaptive graph convolutional networks for skeleton-based action recognition}, - author={Shi, Lei and Zhang, Yifan and Cheng, Jian and Lu, Hanqing}, - booktitle={Proceedings of the IEEE/CVF conference on computer vision and pattern recognition}, - pages={12026--12035}, - year={2019} -} -``` - -## Model Zoo +## Results and Models ### NTU60_XSub @@ -85,3 +75,15 @@ python tools/test.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py \ ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{shi2019two, + title={Two-stream adaptive graph convolutional networks for skeleton-based action recognition}, + author={Shi, Lei and Zhang, Yifan and Cheng, Jian and Lu, Hanqing}, + booktitle={Proceedings of the IEEE/CVF conference on computer vision and pattern recognition}, + pages={12026--12035}, + year={2019} +} +``` diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index 3b8b686db6..60c47f2051 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -1,5 +1,9 @@ # PoseC3D +[Revisiting Skeleton-based Action Recognition](https://arxiv.org/abs/2104.13586) + + + ## Abstract @@ -11,21 +15,6 @@ Human skeleton, as a compact representation of human action, has received increa -## Citation - - - -```BibTeX -@misc{duan2021revisiting, - title={Revisiting Skeleton-based Action Recognition}, - author={Haodong Duan and Yue Zhao and Kai Chen and Dian Shao and Dahua Lin and Bo Dai}, - year={2021}, - eprint={2104.13586}, - archivePrefix={arXiv}, - primaryClass={cs.CV} -} -``` - @@ -60,7 +49,7 @@ Human skeleton, as a compact representation of human action, has received increa
    -## Model Zoo +## Results and Models ### FineGYM @@ -144,3 +133,16 @@ python tools/test.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@misc{duan2021revisiting, + title={Revisiting Skeleton-based Action Recognition}, + author={Haodong Duan and Yue Zhao and Kai Chen and Dian Shao and Dahua Lin and Bo Dai}, + year={2021}, + eprint={2104.13586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` diff --git a/configs/skeleton/stgcn/README.md b/configs/skeleton/stgcn/README.md index 98b95a5cf9..0f1d51c98b 100644 --- a/configs/skeleton/stgcn/README.md +++ b/configs/skeleton/stgcn/README.md @@ -1,5 +1,9 @@ # STGCN +[Spatial temporal graph convolutional networks for skeleton-based action recognition](https://jivp-eurasipjournals.springeropen.com/articles/10.1186/s13640-019-0476-x) + + + ## Abstract @@ -11,20 +15,7 @@ Dynamics of human body skeletons convey significant information for human action -## Citation - - - -```BibTeX -@inproceedings{yan2018spatial, - title={Spatial temporal graph convolutional networks for skeleton-based action recognition}, - author={Yan, Sijie and Xiong, Yuanjun and Lin, Dahua}, - booktitle={Thirty-second AAAI conference on artificial intelligence}, - year={2018} -} -``` - -## Model Zoo +## Results and Models ### NTU60_XSub @@ -79,3 +70,14 @@ python tools/test.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ ``` For more details, you can refer to **Test a dataset** part in [getting_started](/docs/getting_started.md#test-a-dataset). + +## Citation + +```BibTeX +@inproceedings{yan2018spatial, + title={Spatial temporal graph convolutional networks for skeleton-based action recognition}, + author={Yan, Sijie and Xiong, Yuanjun and Lin, Dahua}, + booktitle={Thirty-second AAAI conference on artificial intelligence}, + year={2018} +} +``` From 7c94243542985db813bb9021f97c95b59d136e52 Mon Sep 17 00:00:00 2001 From: "Michael P. Camilleri" Date: Sat, 29 Jan 2022 04:42:48 +0000 Subject: [PATCH 342/414] [Improvement] Variable fps (#1409) * Ability to set FPS for AVA Dataset * Updated docs for AVA Dataset --- mmaction/datasets/ava_dataset.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/mmaction/datasets/ava_dataset.py b/mmaction/datasets/ava_dataset.py index 312e0a299f..ec64a20c11 100644 --- a/mmaction/datasets/ava_dataset.py +++ b/mmaction/datasets/ava_dataset.py @@ -91,10 +91,9 @@ class AVADataset(BaseDataset): default value is referred from the official website. Default: 902. timestamp_end (int): The end point of included timestamps. The default value is referred from the official website. Default: 1798. + fps (int): Overrides the default FPS for the dataset. Default: 30. """ - _FPS = 30 - def __init__(self, ann_file, exclude_file, @@ -111,9 +110,11 @@ def __init__(self, modality='RGB', num_max_proposals=1000, timestamp_start=900, - timestamp_end=1800): + timestamp_end=1800, + fps=30): # since it inherits from `BaseDataset`, some arguments # should be assigned before performing `load_annotations()` + self._FPS = fps # Keep this as standard self.custom_classes = custom_classes if custom_classes is not None: assert num_classes == len(custom_classes) + 1 From 01bde6957af31ea04e5541004665895b983544d9 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 8 Feb 2022 13:13:50 +0800 Subject: [PATCH 343/414] [Fix] Fix bug in nondistributed multi-gpu training (#1406) * [Fix] Fix bug in nondistributed multi-gpu training * update --- tools/train.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/tools/train.py b/tools/train.py index d91c68e958..8c87ae650e 100644 --- a/tools/train.py +++ b/tools/train.py @@ -102,10 +102,21 @@ def main(): osp.splitext(osp.basename(args.config))[0]) if args.resume_from is not None: cfg.resume_from = args.resume_from - if args.gpu_ids is not None: - cfg.gpu_ids = args.gpu_ids - else: - cfg.gpu_ids = range(1) if args.gpus is None else range(args.gpus) + + if args.gpu_ids is not None or args.gpus is not None: + warnings.warn( + 'The Args `gpu_ids` and `gpus` are only used in non-distributed ' + 'mode and we highly encourage you to use distributed mode, i.e., ' + 'launch training with dist_train.sh. The two args will be ' + 'deperacted.') + if args.gpu_ids is not None: + warnings.warn( + 'Non-distributed training can only use 1 gpu now. We will ' + 'use the 1st one in gpu_ids. ') + cfg.gpu_ids = [args.gpu_ids[0]] + elif args.gpus is not None: + warnings.warn('Non-distributed training can only use 1 gpu now. ') + cfg.gpu_ids = range(1) # init distributed env first, since logger depends on the dist info. if args.launcher == 'none': From 171e360ae17343b28f014731f7bd4052d37bfbd6 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 8 Feb 2022 18:36:23 +0800 Subject: [PATCH 344/414] update isort --- .pre-commit-config.yaml | 8 ++------ demo/demo_skeleton.py | 2 +- demo/demo_video_structuralize.py | 2 +- mmaction/apis/test.py | 4 ++-- mmaction/core/evaluation/eval_hooks.py | 2 +- mmaction/datasets/pipelines/augmentations.py | 2 +- mmaction/models/backbones/resnet3d.py | 2 +- mmaction/models/recognizers/base.py | 1 + setup.cfg | 2 +- .../test_augmentations/test_pytorchvideo.py | 1 + tests/test_models/test_backbones.py | 5 +++-- .../test_models/test_common_modules/test_mobilenet_v2.py | 1 + tests/test_runtime/test_eval_hook.py | 2 +- tests/test_utils/test_onnx.py | 1 + tools/data/build_file_list.py | 1 + tools/data/denormalize_proposal_file.py | 3 +-- tools/test.py | 4 ++-- 17 files changed, 22 insertions(+), 21 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 11320fe43a..490c1a3430 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,12 +4,8 @@ repos: rev: 3.8.3 hooks: - id: flake8 - - repo: https://github.com/asottile/seed-isort-config - rev: v2.2.0 - hooks: - - id: seed-isort-config - - repo: https://github.com/timothycrosley/isort - rev: 4.3.21 + - repo: https://github.com/PyCQA/isort + rev: 5.10.1 hooks: - id: isort - repo: https://github.com/pre-commit/mirrors-yapf diff --git a/demo/demo_skeleton.py b/demo/demo_skeleton.py index 3bf14a5411..859a54e34f 100644 --- a/demo/demo_skeleton.py +++ b/demo/demo_skeleton.py @@ -15,7 +15,7 @@ try: from mmdet.apis import inference_detector, init_detector - from mmpose.apis import (init_pose_model, inference_top_down_pose_model, + from mmpose.apis import (inference_top_down_pose_model, init_pose_model, vis_pose_result) except (ImportError, ModuleNotFoundError): diff --git a/demo/demo_video_structuralize.py b/demo/demo_video_structuralize.py index 48fc8154ea..72120573a8 100644 --- a/demo/demo_video_structuralize.py +++ b/demo/demo_video_structuralize.py @@ -19,7 +19,7 @@ try: from mmdet.apis import inference_detector, init_detector - from mmpose.apis import (init_pose_model, inference_top_down_pose_model, + from mmpose.apis import (inference_top_down_pose_model, init_pose_model, vis_pose_result) except (ImportError, ModuleNotFoundError): diff --git a/mmaction/apis/test.py b/mmaction/apis/test.py index 95bff34a00..742b0e4ff7 100644 --- a/mmaction/apis/test.py +++ b/mmaction/apis/test.py @@ -12,8 +12,8 @@ from mmcv.runner import get_dist_info try: - from mmcv.engine import (single_gpu_test, multi_gpu_test, - collect_results_gpu, collect_results_cpu) + from mmcv.engine import (collect_results_cpu, collect_results_gpu, + multi_gpu_test, single_gpu_test) from_mmcv = True except (ImportError, ModuleNotFoundError): warnings.warn( diff --git a/mmaction/core/evaluation/eval_hooks.py b/mmaction/core/evaluation/eval_hooks.py index c870054e98..e125c3d2c6 100644 --- a/mmaction/core/evaluation/eval_hooks.py +++ b/mmaction/core/evaluation/eval_hooks.py @@ -9,8 +9,8 @@ from torch.utils.data import DataLoader try: - from mmcv.runner import EvalHook as BasicEvalHook from mmcv.runner import DistEvalHook as BasicDistEvalHook + from mmcv.runner import EvalHook as BasicEvalHook from_mmcv = True diff --git a/mmaction/datasets/pipelines/augmentations.py b/mmaction/datasets/pipelines/augmentations.py index 33f3b763fd..9bd5d266a1 100644 --- a/mmaction/datasets/pipelines/augmentations.py +++ b/mmaction/datasets/pipelines/augmentations.py @@ -102,8 +102,8 @@ class PytorchVideoTrans: def __init__(self, type, **kwargs): try: - import torch import pytorchvideo.transforms as ptv_trans + import torch except ImportError: raise RuntimeError('Install pytorchvideo to use PytorchVideoTrans') if digit_version(torch.__version__) < digit_version('1.8.0'): diff --git a/mmaction/models/backbones/resnet3d.py b/mmaction/models/backbones/resnet3d.py index 8ae00c64fa..f4ab71f9b9 100644 --- a/mmaction/models/backbones/resnet3d.py +++ b/mmaction/models/backbones/resnet3d.py @@ -13,8 +13,8 @@ from ..builder import BACKBONES try: - from mmdet.models.builder import SHARED_HEADS as MMDET_SHARED_HEADS from mmdet.models import BACKBONES as MMDET_BACKBONES + from mmdet.models.builder import SHARED_HEADS as MMDET_SHARED_HEADS mmdet_imported = True except (ImportError, ModuleNotFoundError): mmdet_imported = False diff --git a/mmaction/models/recognizers/base.py b/mmaction/models/recognizers/base.py index 25356ef7d2..a06ec10461 100644 --- a/mmaction/models/recognizers/base.py +++ b/mmaction/models/recognizers/base.py @@ -104,6 +104,7 @@ def __init__(self, self.blending = None if train_cfg is not None and 'blending' in train_cfg: from mmcv.utils import build_from_cfg + from mmaction.datasets.builder import BLENDINGS self.blending = build_from_cfg(train_cfg['blending'], BLENDINGS) diff --git a/setup.cfg b/setup.cfg index 8625daca73..ad08ec3e4c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,7 +17,7 @@ SPLIT_PENALTY_AFTER_OPENING_BRACKET=800 [isort] line_length = 79 multi_line_output = 0 -known_standard_library = pkg_resources,setuptools +extra_standard_library = pkg_resources,setuptools known_first_party = mmaction known_third_party = cv2,decord,einops,joblib,matplotlib,mmcv,numpy,pandas,pytest,pytorch_sphinx_theme,scipy,seaborn,titlecase,torch,webcolors no_lines_before = STDLIB,LOCALFOLDER diff --git a/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py b/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py index f8dfda0e32..61ab7d28d1 100644 --- a/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py +++ b/tests/test_data/test_pipelines/test_augmentations/test_pytorchvideo.py @@ -5,6 +5,7 @@ try: import torch + from mmaction.datasets.pipelines import PytorchVideoTrans pytorchvideo_ok = False if digit_version(torch.__version__) >= digit_version('1.8.0'): diff --git a/tests/test_models/test_backbones.py b/tests/test_models/test_backbones.py index 9cc7ae2884..b9f56caf47 100644 --- a/tests/test_models/test_backbones.py +++ b/tests/test_models/test_backbones.py @@ -326,10 +326,11 @@ def test_resnet_tsm_backbone(): def test_mobilenetv2_tsm_backbone(): """Test mobilenetv2_tsm backbone.""" - from mmaction.models.backbones.resnet_tsm import TemporalShift - from mmaction.models.backbones.mobilenet_v2 import InvertedResidual from mmcv.cnn import ConvModule + from mmaction.models.backbones.mobilenet_v2 import InvertedResidual + from mmaction.models.backbones.resnet_tsm import TemporalShift + input_shape = (8, 3, 64, 64) imgs = generate_backbone_demo_inputs(input_shape) diff --git a/tests/test_models/test_common_modules/test_mobilenet_v2.py b/tests/test_models/test_common_modules/test_mobilenet_v2.py index f476ef5ee3..09baee92fa 100644 --- a/tests/test_models/test_common_modules/test_mobilenet_v2.py +++ b/tests/test_models/test_common_modules/test_mobilenet_v2.py @@ -13,6 +13,7 @@ def test_mobilenetv2_backbone(): Modified from mmclassification. """ from torch.nn.modules import GroupNorm + from mmaction.models.backbones.mobilenet_v2 import InvertedResidual def is_norm(modules): diff --git a/tests/test_runtime/test_eval_hook.py b/tests/test_runtime/test_eval_hook.py index 3e2c323c13..8d601f247a 100644 --- a/tests/test_runtime/test_eval_hook.py +++ b/tests/test_runtime/test_eval_hook.py @@ -16,7 +16,7 @@ # TODO import eval hooks from mmcv and delete them from mmaction2 try: - from mmcv.runner import EvalHook, DistEvalHook + from mmcv.runner import DistEvalHook, EvalHook pytest.skip( 'EvalHook and DistEvalHook are supported in MMCV', allow_module_level=True) diff --git a/tests/test_utils/test_onnx.py b/tests/test_utils/test_onnx.py index 7e8168ddaa..6324ccc341 100644 --- a/tests/test_utils/test_onnx.py +++ b/tests/test_utils/test_onnx.py @@ -3,6 +3,7 @@ import tempfile import torch.nn as nn + from tools.deployment.pytorch2onnx import _convert_batchnorm, pytorch2onnx diff --git a/tools/data/build_file_list.py b/tools/data/build_file_list.py index c8438098e9..0ba15e75d0 100644 --- a/tools/data/build_file_list.py +++ b/tools/data/build_file_list.py @@ -6,6 +6,7 @@ import random from mmcv.runner import set_random_seed + from tools.data.anno_txt2json import lines2dictlist from tools.data.parse_file_list import (parse_directory, parse_diving48_splits, parse_hmdb51_split, diff --git a/tools/data/denormalize_proposal_file.py b/tools/data/denormalize_proposal_file.py index 3d7706c0ef..1e198d032d 100644 --- a/tools/data/denormalize_proposal_file.py +++ b/tools/data/denormalize_proposal_file.py @@ -2,9 +2,8 @@ import argparse import os.path as osp -from tools.data.parse_file_list import parse_directory - from mmaction.localization import load_localize_proposal_file +from tools.data.parse_file_list import parse_directory def process_norm_proposal_file(norm_proposal_file, frame_dict): diff --git a/tools/test.py b/tools/test.py index 2fadba0119..31514498cf 100644 --- a/tools/test.py +++ b/tools/test.py @@ -178,8 +178,8 @@ def inference_tensorrt(ckpt_path, distributed, data_loader, batch_size): assert not distributed, \ 'TensorRT engine inference only supports single gpu mode.' import tensorrt as trt - from mmcv.tensorrt.tensorrt_utils import (torch_dtype_from_trt, - torch_device_from_trt) + from mmcv.tensorrt.tensorrt_utils import (torch_device_from_trt, + torch_dtype_from_trt) # load engine with trt.Logger() as logger, trt.Runtime(logger) as runtime: From c418313cee3435855e7c4b27c5a85a513938276a Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 9 Feb 2022 22:00:40 +0800 Subject: [PATCH 345/414] [Doc] Update instructions to install mmcv-full (#1426) --- .github/workflows/build.yml | 8 ++++---- docs/install.md | 12 +++++++----- docs_zh_CN/install.md | 36 ++++++++++++++++++------------------ 3 files changed, 29 insertions(+), 27 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e9db239da8..052baa1a84 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -89,13 +89,13 @@ jobs: torch: [1.5.0+cu101, 1.6.0+cu101, 1.7.0+cu101] include: - torch: 1.5.0+cu101 - torch_version: torch1.5.0 + torch_version: torch1.5 torchvision: 0.6.0+cu101 - torch: 1.6.0+cu101 - torch_version: torch1.6.0 + torch_version: torch1.6 torchvision: 0.7.0+cu101 - torch: 1.7.0+cu101 - torch_version: torch1.7.0 + torch_version: torch1.7 torchvision: 0.8.1+cu101 steps: - uses: actions/checkout@v2 @@ -153,7 +153,7 @@ jobs: torch: [1.9.0+cu102] include: - torch: 1.9.0+cu102 - torch_version: torch1.9.0 + torch_version: torch1.9 torchvision: 0.10.0+cu102 steps: - uses: actions/checkout@v2 diff --git a/docs/install.md b/docs/install.md index 986df4f056..c45756a0b7 100644 --- a/docs/install.md +++ b/docs/install.md @@ -99,16 +99,18 @@ MIM can automatically install OpenMMLab projects and their requirements. Or, you can install MMAction2 manually: -a. Install mmcv, we recommend you to install the pre-build mmcv as below. +a. Install mmcv-full, we recommend you to install the pre-built package as below. ```shell -pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/{cu_version}/{torch_version}/index.html +# pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/{cu_version}/{torch_version}/index.html +pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu102/torch1.10.0/index.html ``` -Please replace ``{cu_version}`` and ``{torch_version}`` in the url to your desired one. For example, to install the latest ``mmcv-full`` with ``CUDA 11`` and ``PyTorch 1.7.0``, use the following command: +mmcv-full is only compiled on PyTorch 1.x.0 because the compatibility usually holds between 1.x.0 and 1.x.1. If your PyTorch version is 1.x.1, you can install mmcv-full compiled with PyTorch 1.x.0 and it usually works well. -```shell -pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu110/torch1.7.0/index.html +``` +# We can ignore the micro version of PyTorch +pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu102/torch1.10/index.html ``` See [here](https://github.com/open-mmlab/mmcv#installation) for different versions of MMCV compatible to different PyTorch and CUDA versions. diff --git a/docs_zh_CN/install.md b/docs_zh_CN/install.md index 154f760c7b..69d88db831 100644 --- a/docs_zh_CN/install.md +++ b/docs_zh_CN/install.md @@ -4,14 +4,15 @@ -- [安装依赖包](#安装依赖包) -- [准备环境](#准备环境) -- [MMAction2 的安装步骤](#MMAction2-的安装步骤) -- [CPU 环境下的安装步骤](#CPU-环境下的安装步骤) -- [利用 Docker 镜像安装 MMAction2](#利用-Docker-镜像安装-MMAction2) -- [源码安装 MMAction2](#源码安装-MMAction2) -- [在多个 MMAction2 版本下进行开发](#在多个-MMAction2-版本下进行开发) -- [安装验证](#安装验证) +- [安装](#安装) + - [安装依赖包](#安装依赖包) + - [准备环境](#准备环境) + - [MMAction2 的安装步骤](#mmaction2-的安装步骤) + - [CPU 环境下的安装步骤](#cpu-环境下的安装步骤) + - [利用 Docker 镜像安装 MMAction2](#利用-docker-镜像安装-mmaction2) + - [源码安装 MMAction2](#源码安装-mmaction2) + - [在多个 MMAction2 版本下进行开发](#在多个-mmaction2-版本下进行开发) + - [安装验证](#安装验证) @@ -30,10 +31,8 @@ - [PyTurboJPEG](https://github.com/lilohuang/PyTurboJPEG) (可选项):`pip install PyTurboJPEG`。 - [denseflow](https://github.com/open-mmlab/denseflow) (可选项):可参考 [这里](https://github.com/innerlee/setup) 获取简便安装步骤。 - [moviepy](https://zulko.github.io/moviepy/) (可选项):`pip install moviepy`. 官方安装步骤可参考 [这里](https://zulko.github.io/moviepy/install.html)。**特别地**,如果安装过程碰到 [这个问题](https://github.com/Zulko/moviepy/issues/693),可参考: - 1. 对于 Windows 用户, [ImageMagick](https://www.imagemagick.org/script/index.php) 将不会被 MoviePy 自动检测到, - 用户需要对 `moviepy/config_defaults.py` 文件进行修改,以提供 ImageMagick 的二进制文件(即,`magick`)的路径,如 `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` - 2. 对于 Linux 用户, 如果 [ImageMagick](https://www.imagemagick.org/script/index.php) 没有被 `moviepy` 检测到,用于需要对 `/etc/ImageMagick-6/policy.xml` 文件进行修改,把文件中的 - `` 代码行修改为 ``。 + 1. 对于 Windows 用户, [ImageMagick](https://www.imagemagick.org/script/index.php) 将不会被 MoviePy 自动检测到,用户需要对 `moviepy/config_defaults.py` 文件进行修改,以提供 ImageMagick 的二进制文件(即,`magick`)的路径,如 `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` + 2. 对于 Linux 用户, 如果 [ImageMagick](https://www.imagemagick.org/script/index.php) 没有被 `moviepy` 检测到,用于需要对 `/etc/ImageMagick-6/policy.xml` 文件进行修改,把文件中的 `` 代码行修改为 ``。 - [Pillow-SIMD](https://docs.fast.ai/performance.html#pillow-simd) (可选项):可使用如下脚本进行安装: ```shell @@ -94,17 +93,18 @@ MIM 可以自动安装 OpenMMLab 项目及其依赖。 或者,用户也可以通过以下步骤手动安装 MMAction2。 -a. 安装 mmcv。MMAction2 推荐用户使用如下的命令安装预编译好的 mmcv。 +a. 安装 mmcv-full,我们推荐您安装以下预构建包: ```shell -pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/{cu_version}/{torch_version}/index.html +# pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/{cu_version}/{torch_version}/index.html +pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu102/torch1.10.0/index.html ``` -其中,命令里 url 的 ``{cu_version}`` 和 ``{torch_version}`` 变量需由用户进行指定。 -例如,如果想要安装 ``CUDA 11`` 和 ``PyTorch 1.7.0`` 下的最新版 ``mmcv-full``,可使用以下命令: +PyTorch 在 1.x.0 和 1.x.1 之间通常是兼容的,故 mmcv-full 只提供 1.x.0 的编译包。如果你的 PyTorch 版本是 1.x.1,你可以放心地安装在 1.x.0 版本编译的 mmcv-full。 -```shell -pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu110/torch1.7.0/index.html +``` +# 我们可以忽略 PyTorch 的小版本号 +pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu102/torch1.10/index.html ``` 可查阅 [这里](https://github.com/open-mmlab/mmcv#installation) 以参考不同版本的 MMCV 所兼容的 PyTorch 和 CUDA 版本。 From 93e001f4898eccf45103c3eb2ee6d01f8220aa67 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 11 Feb 2022 15:46:39 +0800 Subject: [PATCH 346/414] [Fix] Set test_mode for AVA configs (#1432) --- ..._acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 3 ++- ...st_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py | 3 ++- ...wfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py | 3 ++- .../ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py | 3 ++- ...inetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py | 3 ++- .../ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py | 3 ++- ...wfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 3 ++- ...amma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 3 ++- ...l_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 3 ++- .../ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py | 2 +- .../ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py | 3 ++- ...inetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py | 3 ++- .../slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py | 3 ++- .../slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py | 2 +- .../slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py | 2 +- .../slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py | 3 ++- ..._avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py | 3 ++- ..._max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py | 3 ++- ...b_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py | 3 ++- configs/detection/lfb/lfb_slowonly_r50_ava_infer.py | 3 ++- 20 files changed, 37 insertions(+), 20 deletions(-) diff --git a/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py index 442165082f..d42ef11efa 100644 --- a/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py +++ b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -101,7 +101,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict( + type='SampleAVAFrames', clip_len=32, frame_interval=2, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py index d199598628..4d069cbb2c 100644 --- a/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py +++ b/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py @@ -101,7 +101,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict( + type='SampleAVAFrames', clip_len=32, frame_interval=2, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py index 27b5637276..a180bb9173 100644 --- a/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -101,7 +101,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict( + type='SampleAVAFrames', clip_len=32, frame_interval=2, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py index 3f1fadc720..f649374a0e 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -100,7 +100,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict( + type='SampleAVAFrames', clip_len=32, frame_interval=2, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py index 713136ca3e..413065cb84 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py @@ -106,7 +106,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict( + type='SampleAVAFrames', clip_len=32, frame_interval=2, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py index 89e83a0b8b..7c3826d8bb 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py @@ -101,7 +101,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict( + type='SampleAVAFrames', clip_len=32, frame_interval=2, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py index 1b02c1a205..9fa024f296 100644 --- a/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py +++ b/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -99,7 +99,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict( + type='SampleAVAFrames', clip_len=32, frame_interval=2, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py index 5c167e9bee..49641d1f0d 100644 --- a/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py +++ b/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -100,7 +100,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict( + type='SampleAVAFrames', clip_len=32, frame_interval=2, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py index 4bea67b696..a4979d9ba1 100644 --- a/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py +++ b/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -100,7 +100,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=32, frame_interval=2), + dict( + type='SampleAVAFrames', clip_len=32, frame_interval=2, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py index 4967ea3679..ecc89f7ab0 100644 --- a/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py @@ -84,7 +84,7 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=8, frame_interval=8), + dict(type='SampleAVAFrames', clip_len=8, frame_interval=8, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py index d42c6b67c0..54df99e59c 100644 --- a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -84,7 +84,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=4, frame_interval=16), + dict( + type='SampleAVAFrames', clip_len=4, frame_interval=16, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py index 0e6ff25105..30d9ba82dd 100644 --- a/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py +++ b/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py @@ -91,7 +91,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=4, frame_interval=16), + dict( + type='SampleAVAFrames', clip_len=4, frame_interval=16, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py b/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py index c18273bbd4..e0a055108e 100644 --- a/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py +++ b/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py @@ -42,7 +42,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=4, frame_interval=16), + dict( + type='SampleAVAFrames', clip_len=4, frame_interval=16, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py b/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py index bd05e864cc..105b832045 100644 --- a/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py +++ b/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py @@ -42,7 +42,7 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=8, frame_interval=8), + dict(type='SampleAVAFrames', clip_len=8, frame_interval=8, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py index 3c5adc3e77..23f3aaf5db 100644 --- a/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py @@ -84,7 +84,7 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=8, frame_interval=8), + dict(type='SampleAVAFrames', clip_len=8, frame_interval=8, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py b/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py index 4aa7e72ef6..067e174559 100644 --- a/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py @@ -84,7 +84,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=4, frame_interval=16), + dict( + type='SampleAVAFrames', clip_len=4, frame_interval=16, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py b/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py index 09f5ba43c5..6ba6a8fc07 100644 --- a/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py @@ -63,7 +63,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=4, frame_interval=16), + dict( + type='SampleAVAFrames', clip_len=4, frame_interval=16, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py b/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py index 8e7434c2fe..6c4dc19d0a 100644 --- a/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py @@ -63,7 +63,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=4, frame_interval=16), + dict( + type='SampleAVAFrames', clip_len=4, frame_interval=16, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py b/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py index f2d11ff3a5..bdd90ce6e0 100644 --- a/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py +++ b/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py @@ -73,7 +73,8 @@ ] # The testing is w/o. any cropping / flipping val_pipeline = [ - dict(type='SampleAVAFrames', clip_len=4, frame_interval=16), + dict( + type='SampleAVAFrames', clip_len=4, frame_interval=16, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), diff --git a/configs/detection/lfb/lfb_slowonly_r50_ava_infer.py b/configs/detection/lfb/lfb_slowonly_r50_ava_infer.py index f98d7bd67d..568f0765bd 100644 --- a/configs/detection/lfb/lfb_slowonly_r50_ava_infer.py +++ b/configs/detection/lfb/lfb_slowonly_r50_ava_infer.py @@ -32,7 +32,8 @@ mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) infer_pipeline = [ - dict(type='SampleAVAFrames', clip_len=4, frame_interval=16), + dict( + type='SampleAVAFrames', clip_len=4, frame_interval=16, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='Normalize', **img_norm_cfg), From f577456d4d19bd715256e8e2eb9b24a905808e4e Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 15 Feb 2022 14:49:25 +0800 Subject: [PATCH 347/414] [Docs] Add shortcut (#1433) * add shortcut * add shortcut * add shortcut * Update README.md * Update README_zh-CN.md Co-authored-by: Haodong Duan --- README.md | 8 +++++++- README_zh-CN.md | 9 ++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 378b859d91..7dd18f13be 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,13 @@ -
     
    + + [📘Documentation](https://mmaction2.readthedocs.io/en/latest/) | + [🛠️Installation](https://mmaction2.readthedocs.io/en/latest/install.html) | + [👀Model Zoo](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | + [🆕Update News](https://mmaction2.readthedocs.io/en/latest/changelog.html) | + [🚀Ongoing Projects](https://github.com/open-mmlab/mmaction2/projects) | + [🤔Reporting Issues](https://github.com/open-mmlab/mmaction2/issues/new/choose) ## Introduction diff --git a/README_zh-CN.md b/README_zh-CN.md index 9dfa0455ab..687284345e 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -16,7 +16,14 @@ -
     
    + + [📘文档](https://mmaction2.readthedocs.io/en/latest/) | + [🛠️安装指南](https://mmaction2.readthedocs.io/en/latest/install.html) | + [👀模型库](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | + [🆕更新](https://mmaction2.readthedocs.io/en/latest/changelog.html) | + [🚀进行中项目](https://github.com/open-mmlab/mmaction2/projects) | + [🤔问题反馈](https://github.com/open-mmlab/mmaction2/issues/new/choose) + ## 简介 From 32f56fd97098dd3002638e87ce596ddc2c76ac42 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Tue, 15 Feb 2022 16:23:46 +0800 Subject: [PATCH 348/414] [Pre-commit] Check copyright (#1447) --- .pre-commit-config.yaml | 12 ++++++------ mmaction/core/evaluation/ava_utils.py | 1 + 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 490c1a3430..a765292df3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,9 +40,9 @@ repos: hooks: - id: codespell args: ["--skip", "*.ipynb,tools/data/hvu/label_map.json", "-L", "te,nd,thre,Gool,gool"] - # - repo: https://github.com/open-mmlab/pre-commit-hooks - # rev: v0.1.0 # Use the ref you want to point at - # hooks: - # - id: check-algo-readme - # - id: check-copyright - # args: ["mmaction", "tools", "tests"] # these directories will be checked + - repo: https://github.com/open-mmlab/pre-commit-hooks + rev: v0.2.0 # Use the ref you want to point at + hooks: + - id: check-algo-readme + - id: check-copyright + args: ["mmaction"] # these directories will be checked diff --git a/mmaction/core/evaluation/ava_utils.py b/mmaction/core/evaluation/ava_utils.py index 2075d4d57a..ab11669b64 100644 --- a/mmaction/core/evaluation/ava_utils.py +++ b/mmaction/core/evaluation/ava_utils.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. # This piece of code is directly adapted from ActivityNet official repo # https://github.com/activitynet/ActivityNet/blob/master/ # Evaluation/get_ava_performance.py. Some unused codes are removed. From 982216cff43456637185c35de5ce3b8da12f4950 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 15 Feb 2022 16:48:21 +0800 Subject: [PATCH 349/414] [Docs] Model zoo update (#1439) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * 0214 modify readme * 0214 modify readme --- README.md | 1 + README_zh-CN.md | 1 + 2 files changed, 2 insertions(+) diff --git a/README.md b/README.md index 7dd18f13be..869be9c1a4 100644 --- a/README.md +++ b/README.md @@ -154,6 +154,7 @@ A Colab tutorial is also provided. You may preview the notebook [here](demo/mmac ST-GCN (AAAI'2018) + 2s-AGCN (CVPR'2019) PoseC3D (ArXiv'2021) diff --git a/README_zh-CN.md b/README_zh-CN.md index 687284345e..042982653b 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -153,6 +153,7 @@ MMAction2 也提供了相应的中文 Colab 教程,可以点击 [这里](https ST-GCN (AAAI'2018) + 2s-AGCN (CVPR'2019) PoseC3D (ArXiv'2021) From 69a12b47dd56e37e12d2ea7f842d216835cce717 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Wed, 16 Feb 2022 14:11:59 +0800 Subject: [PATCH 350/414] [ModelZoo] Slowfast steplr (#1421) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 slowfast steplr * 0207 slowfast steplr * 0208 modify readme * 0211 add prebn steplr config * 0211 add prebn steplr config * 0211 add prebn steplr config * 0211 add prebn steplr config * 0211 add prebn steplr config * 0211 add prebn steplr config * 0214 add prebn steplr result * update links Co-authored-by: Haodong Duan --- configs/recognition/slowfast/README.md | 2 + configs/recognition/slowfast/README_zh-CN.md | 2 + configs/recognition/slowfast/metafile.yml | 46 +++++++++++++++++++ ...n_r50_8x8x1_256e_kinetics400_rgb_steplr.py | 15 ++++++ ...t_r50_8x8x1_256e_kinetics400_rgb_steplr.py | 13 ++++++ .../models/backbones/resnet3d_slowfast.py | 10 ++-- 6 files changed, 84 insertions(+), 4 deletions(-) create mode 100644 configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.py create mode 100644 configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.py diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index e53273f778..476f681077 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -26,6 +26,8 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| |[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| +|[slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.py) |short-side 320|8x4| ResNet50 |None|76.34|92.61||9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| +|[slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_perbn_r50_8x8x1_256e_kinetics400_rgb_steplr.py) |short-side 320|8x4| ResNet50 |None|76.58|92.85||9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| |[slowfast_r152_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x1| ResNet152 + ResNet50 |None|77.13|93.20||10077| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json)| diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index 7605871d2d..a01a5cb59f 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -25,6 +25,8 @@ |[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| |[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| +|[slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x4| ResNet50 |None|76.34|92.61|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| +|[slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x4| ResNet50 |None|76.58|92.85|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| |[slowfast_r152_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet152 + ResNet50 |None|77.13|93.20||10077| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json)| diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index e9dc05a94c..0345aeedf8 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -97,6 +97,52 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth +- Config: configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.py + In Collection: SlowFast + Metadata: + Architecture: ResNet50 + Batch Size: 8 + Epochs: 256 + FLOPs: 66222034944 + Parameters: 34565560 + Pretrained: None + Resolution: short-side 320 + Training Data: Kinetics-400 + Training Resources: 32 GPUs + Modality: RGB + Name: slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr + Results: + - Dataset: Kinetics-400 + Metrics: + Top 1 Accuracy: 76.34 + Top 5 Accuracy: 92.61 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth +- Config: configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.py + In Collection: SlowFast + Metadata: + Architecture: ResNet50 + Batch Size: 8 + Epochs: 256 + FLOPs: 66222034944 + Parameters: 34565560 + Pretrained: None + Resolution: short-side 320 + Training Data: Kinetics-400 + Training Resources: 32 GPUs + Modality: RGB + Name: slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr + Results: + - Dataset: Kinetics-400 + Metrics: + Top 1 Accuracy: 76.58 + Top 5 Accuracy: 92.85 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth - Config: configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py In Collection: SlowFast Metadata: diff --git a/configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.py b/configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.py new file mode 100644 index 0000000000..392990c7fd --- /dev/null +++ b/configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.py @@ -0,0 +1,15 @@ +_base_ = ['./slowfast_r50_8x8x1_256e_kinetics400_rgb.py'] + +model = dict(backbone=dict(slow_pathway=dict(lateral_norm=True))) + +lr_config = dict( + policy='step', + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=34, + step=[94, 154, 196]) + +precise_bn = dict(num_iters=200, interval=5) + +work_dir = './work_dirs/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr' diff --git a/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.py b/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.py new file mode 100644 index 0000000000..284e107050 --- /dev/null +++ b/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.py @@ -0,0 +1,13 @@ +_base_ = ['./slowfast_r50_8x8x1_256e_kinetics400_rgb.py'] + +model = dict(backbone=dict(slow_pathway=dict(lateral_norm=True))) + +lr_config = dict( + policy='step', + min_lr=0, + warmup='linear', + warmup_by_epoch=True, + warmup_iters=34, + step=[94, 154, 196]) + +work_dir = './work_dirs/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr' diff --git a/mmaction/models/backbones/resnet3d_slowfast.py b/mmaction/models/backbones/resnet3d_slowfast.py index 0b70f4ac19..31da6fde07 100644 --- a/mmaction/models/backbones/resnet3d_slowfast.py +++ b/mmaction/models/backbones/resnet3d_slowfast.py @@ -39,11 +39,13 @@ class ResNet3dPathway(ResNet3d): def __init__(self, *args, lateral=False, + lateral_norm=False, speed_ratio=8, channel_ratio=8, fusion_kernel=5, **kwargs): self.lateral = lateral + self.lateral_norm = lateral_norm self.speed_ratio = speed_ratio self.channel_ratio = channel_ratio self.fusion_kernel = fusion_kernel @@ -61,8 +63,8 @@ def __init__(self, padding=((fusion_kernel - 1) // 2, 0, 0), bias=False, conv_cfg=self.conv_cfg, - norm_cfg=None, - act_cfg=None) + norm_cfg=self.norm_cfg if self.lateral_norm else None, + act_cfg=self.act_cfg if self.lateral_norm else None) self.lateral_connections = [] for i in range(len(self.stage_blocks)): @@ -82,8 +84,8 @@ def __init__(self, padding=((fusion_kernel - 1) // 2, 0, 0), bias=False, conv_cfg=self.conv_cfg, - norm_cfg=None, - act_cfg=None)) + norm_cfg=self.norm_cfg if self.lateral_norm else None, + act_cfg=self.act_cfg if self.lateral_norm else None)) self.lateral_connections.append(lateral_name) def make_res_layer(self, From a94382c67c58ec9aa161a6181ea148edcc3f89ef Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 18 Feb 2022 13:26:05 +0800 Subject: [PATCH 351/414] [CI] Support Windows CI (#1448) * windows CI * update * update * update CI * fix windows CI * fix CI * fix CI * update * update * update dependencies * update --- .github/workflows/build.yml | 66 ++++++++++++++++--- mmaction/core/evaluation/accuracy.py | 4 ++ requirements.txt | 1 - requirements/build.txt | 7 +- requirements/optional.txt | 2 - requirements/runtime.txt | 7 -- setup.py | 3 +- .../test_loadings/test_decode.py | 58 ++++++++-------- tests/test_metrics/test_accuracy.py | 4 +- tests/test_models/test_localizers/test_bmn.py | 4 ++ tests/test_models/test_localizers/test_pem.py | 4 ++ tests/test_models/test_localizers/test_ssn.py | 3 + tests/test_models/test_localizers/test_tem.py | 4 ++ 13 files changed, 114 insertions(+), 53 deletions(-) delete mode 100644 requirements/runtime.txt diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 052baa1a84..958f5c2f88 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,18 +2,24 @@ name: build on: push: - paths: - - '!demo/**' - - '!docker/**' - - '!tools/**' + paths-ignore: + - ".github/**.md" + - "demo/**" + - "docker/**" + - "tools/**" + - "README.md" + - "README_zh-CN.md" pull_request: - paths: - - '!demo/**' - - '!docker/**' - - '!tools/**' - - '!docs/**' - - '!docs_zh_CN/**' + paths-ignore: + - ".github/**.md" + - "demo/**" + - "docker/**" + - "docs/**" + - "docs_zh-CN/**" + - "tools/**" + - "README.md" + - "README_zh-CN.md" concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -192,3 +198,43 @@ jobs: coverage run --branch --source mmaction -m pytest tests/ coverage xml coverage report -m + + test_windows: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [windows-2022] + python: [3.8] + platform: [cpu] + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Upgrade pip + run: pip install pip --upgrade --user + - name: Install librosa and soundfile + run: python -m pip install librosa soundfile + - name: Install lmdb + run: python -m pip install lmdb + - name: Install PyTorch + # As a complement to Linux CI, we test on PyTorch LTS version + run: pip install torch==1.8.2+${{ matrix.platform }} torchvision==0.9.2+${{ matrix.platform }} -f https://download.pytorch.org/whl/lts/1.8/torch_lts.html + - name: Install MMCV + run: pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cpu/torch1.8/index.html --only-binary mmcv-full + - name: Install mmaction dependencies + run: | + python -V + python -m pip install -q git+https://github.com/open-mmlab/mmdetection/ + python -m pip install -q git+https://github.com/open-mmlab/mmclassification/ + python -m pip install -r requirements.txt + python -c 'import mmcv; print(mmcv.__version__)' + - name: Install PytorchVideo + run: python -m pip install pytorchvideo + - name: Show pip list + run: pip list + - name: Build and install + run: pip install -e . + - name: Run unittests + run: coverage run --branch --source mmedit -m pytest tests -sv diff --git a/mmaction/core/evaluation/accuracy.py b/mmaction/core/evaluation/accuracy.py index 7193a7b8c1..08cb4b49b9 100644 --- a/mmaction/core/evaluation/accuracy.py +++ b/mmaction/core/evaluation/accuracy.py @@ -22,6 +22,8 @@ def confusion_matrix(y_pred, y_real, normalize=None): if isinstance(y_pred, list): y_pred = np.array(y_pred) + if y_pred.dtype == np.int32: + y_pred = y_pred.astype(np.int64) if not isinstance(y_pred, np.ndarray): raise TypeError( f'y_pred must be list or np.ndarray, but got {type(y_pred)}') @@ -31,6 +33,8 @@ def confusion_matrix(y_pred, y_real, normalize=None): if isinstance(y_real, list): y_real = np.array(y_real) + if y_real.dtype == np.int32: + y_real = y_real.astype(np.int64) if not isinstance(y_real, np.ndarray): raise TypeError( f'y_real must be list or np.ndarray, but got {type(y_real)}') diff --git a/requirements.txt b/requirements.txt index 6981bd7233..3f6205f8dc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ -r requirements/build.txt -r requirements/optional.txt --r requirements/runtime.txt -r requirements/tests.txt diff --git a/requirements/build.txt b/requirements/build.txt index 71888da188..9bbe532c19 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -1,3 +1,8 @@ -# These must be installed before building mmaction2 +decord >= 0.4.1 +einops +matplotlib numpy +opencv-contrib-python +Pillow +scipy torch>=1.3 diff --git a/requirements/optional.txt b/requirements/optional.txt index 93af2f7a08..b34450bb70 100644 --- a/requirements/optional.txt +++ b/requirements/optional.txt @@ -1,6 +1,4 @@ av -decord >= 0.4.1 -einops imgaug librosa lmdb diff --git a/requirements/runtime.txt b/requirements/runtime.txt deleted file mode 100644 index 9ab91d82ec..0000000000 --- a/requirements/runtime.txt +++ /dev/null @@ -1,7 +0,0 @@ -decord -einops -matplotlib -numpy -opencv-contrib-python -Pillow -scipy diff --git a/setup.py b/setup.py index 318f11fc79..ce4f1be67f 100644 --- a/setup.py +++ b/setup.py @@ -186,11 +186,10 @@ def add_mim_extension(): ], url='https://github.com/open-mmlab/mmaction2', license='Apache License 2.0', - install_requires=parse_requirements('requirements/runtime.txt'), + install_requires=parse_requirements('requirements/build.txt'), extras_require={ 'all': parse_requirements('requirements.txt'), 'tests': parse_requirements('requirements/tests.txt'), - 'build': parse_requirements('requirements/build.txt'), 'optional': parse_requirements('requirements/optional.txt'), }, zip_safe=False) diff --git a/tests/test_data/test_pipelines/test_loadings/test_decode.py b/tests/test_data/test_pipelines/test_loadings/test_decode.py index b3444beed1..aca0943d24 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_decode.py +++ b/tests/test_data/test_pipelines/test_loadings/test_decode.py @@ -1,5 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. import copy +import platform import numpy as np from mmcv.utils import assert_dict_has_keys @@ -408,34 +409,35 @@ def test_rawframe_decode(self): 240, 320) assert results['original_shape'] == (240, 320) - # test frame selector in turbojpeg decoding backend - # when start_index = 0 - inputs = copy.deepcopy(self.frame_results) - inputs['frame_inds'] = np.arange(0, self.total_frames, 5) - # since the test images start with index 1, we plus 1 to frame_inds - # in order to pass the CI - inputs['frame_inds'] = inputs['frame_inds'] + 1 - frame_selector = RawFrameDecode( - io_backend='disk', decoding_backend='turbojpeg') - results = frame_selector(inputs) - assert assert_dict_has_keys(results, target_keys) - assert np.shape(results['imgs']) == (len(inputs['frame_inds']), 240, - 320, 3) - assert results['original_shape'] == (240, 320) - - # test frame selector in turbojpeg decoding backend - inputs = copy.deepcopy(self.frame_results) - inputs['frame_inds'] = np.arange(1, self.total_frames, 5) - frame_selector = RawFrameDecode( - io_backend='disk', decoding_backend='turbojpeg') - results = frame_selector(inputs) - assert assert_dict_has_keys(results, target_keys) - assert np.shape(results['imgs']) == (len(inputs['frame_inds']), 240, - 320, 3) - assert results['original_shape'] == (240, 320) - assert repr(frame_selector) == (f'{frame_selector.__class__.__name__}(' - f'io_backend=disk, ' - f'decoding_backend=turbojpeg)') + if platform.system() != 'Windows': + # test frame selector in turbojpeg decoding backend + # when start_index = 0 + inputs = copy.deepcopy(self.frame_results) + inputs['frame_inds'] = np.arange(0, self.total_frames, 5) + # since the test images start with index 1, we plus 1 to frame_inds + # in order to pass the CI + inputs['frame_inds'] = inputs['frame_inds'] + 1 + frame_selector = RawFrameDecode( + io_backend='disk', decoding_backend='turbojpeg') + results = frame_selector(inputs) + assert assert_dict_has_keys(results, target_keys) + assert np.shape(results['imgs']) == (len(inputs['frame_inds']), + 240, 320, 3) + assert results['original_shape'] == (240, 320) + + # test frame selector in turbojpeg decoding backend + inputs = copy.deepcopy(self.frame_results) + inputs['frame_inds'] = np.arange(1, self.total_frames, 5) + frame_selector = RawFrameDecode( + io_backend='disk', decoding_backend='turbojpeg') + results = frame_selector(inputs) + assert assert_dict_has_keys(results, target_keys) + assert np.shape(results['imgs']) == (len(inputs['frame_inds']), + 240, 320, 3) + assert results['original_shape'] == (240, 320) + assert repr(frame_selector) == ( + f'{frame_selector.__class__.__name__}(io_backend=disk, ' + f'decoding_backend=turbojpeg)') def test_audio_decode_init(self): target_keys = ['audios', 'length', 'sample_rate'] diff --git a/tests/test_metrics/test_accuracy.py b/tests/test_metrics/test_accuracy.py index 6769c2e6bc..e2ac82cbda 100644 --- a/tests/test_metrics/test_accuracy.py +++ b/tests/test_metrics/test_accuracy.py @@ -320,7 +320,7 @@ def test_top_k_accurate_classes(): np.array([0.25, 0.1, 0.3, 0.35]), # 3 np.array([0.2, 0.15, 0.3, 0.35]), # 3 ] - label = np.array([3, 2, 2, 1, 3, 3]) + label = np.array([3, 2, 2, 1, 3, 3], dtype=np.int64) with pytest.raises(AssertionError): top_k_classes(scores, label, 1, mode='wrong') @@ -333,7 +333,7 @@ def test_top_k_accurate_classes(): assert results_top1 == [(3, 1.)] assert results_top3 == [(3, 1.), (2, 0.5), (1, 0.0)] - label = np.array([3, 2, 1, 1, 3, 0]) + label = np.array([3, 2, 1, 1, 3, 0], dtype=np.int64) results_top1 = top_k_classes(scores, label, 1, mode='inaccurate') results_top3 = top_k_classes(scores, label, 3, mode='inaccurate') assert len(results_top1) == 1 diff --git a/tests/test_models/test_localizers/test_bmn.py b/tests/test_models/test_localizers/test_bmn.py index d97efd35c6..41e4de4d49 100644 --- a/tests/test_models/test_localizers/test_bmn.py +++ b/tests/test_models/test_localizers/test_bmn.py @@ -1,11 +1,15 @@ # Copyright (c) OpenMMLab. All rights reserved. +import platform + import numpy as np +import pytest import torch from mmaction.models import build_localizer from ..base import get_localizer_cfg +@pytest.mark.skipif(platform.system() == 'Windows', reason='Windows mem limit') def test_bmn(): model_cfg = get_localizer_cfg( 'bmn/bmn_400x100_2x8_9e_activitynet_feature.py') diff --git a/tests/test_models/test_localizers/test_pem.py b/tests/test_models/test_localizers/test_pem.py index f1f4a6f97b..c0e5ff7750 100644 --- a/tests/test_models/test_localizers/test_pem.py +++ b/tests/test_models/test_localizers/test_pem.py @@ -1,10 +1,14 @@ # Copyright (c) OpenMMLab. All rights reserved. +import platform + +import pytest import torch from mmaction.models import build_localizer from ..base import get_localizer_cfg +@pytest.mark.skipif(platform.system() == 'Windows', reason='Windows mem limit') def test_pem(): model_cfg = get_localizer_cfg( 'bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py') diff --git a/tests/test_models/test_localizers/test_ssn.py b/tests/test_models/test_localizers/test_ssn.py index 1e67455a32..f1de07462a 100644 --- a/tests/test_models/test_localizers/test_ssn.py +++ b/tests/test_models/test_localizers/test_ssn.py @@ -1,5 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. import copy +import platform import mmcv import pytest @@ -8,6 +9,7 @@ from mmaction.models import build_localizer +@pytest.mark.skipif(platform.system() == 'Windows', reason='Windows mem limit') def test_ssn_train(): train_cfg = mmcv.ConfigDict( dict( @@ -105,6 +107,7 @@ def test_ssn_train(): assert isinstance(losses, dict) +@pytest.mark.skipif(platform.system() == 'Windows', reason='Windows mem limit') def test_ssn_test(): test_cfg = mmcv.ConfigDict( dict( diff --git a/tests/test_models/test_localizers/test_tem.py b/tests/test_models/test_localizers/test_tem.py index 179362e190..ce19d385cb 100644 --- a/tests/test_models/test_localizers/test_tem.py +++ b/tests/test_models/test_localizers/test_tem.py @@ -1,10 +1,14 @@ # Copyright (c) OpenMMLab. All rights reserved. +import platform + +import pytest import torch from mmaction.models import build_localizer from ..base import get_localizer_cfg +@pytest.mark.skipif(platform.system() == 'Windows', reason='Windows mem limit') def test_tem(): model_cfg = get_localizer_cfg( 'bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py') From d29bfcfce79a1731a8b1d08f6c90a0dc8c747c06 Mon Sep 17 00:00:00 2001 From: Rejnald Lleshi <46654505+rlleshi@users.noreply.github.com> Date: Fri, 18 Feb 2022 11:34:59 +0100 Subject: [PATCH 352/414] [Feature] Audio Demo (#1425) * add audio demo * add audio demo * add docs * update docs * update audio demo docs * update doc Co-authored-by: Haodong Duan --- demo/README.md | 29 ++++++++++++++++++++++ demo/demo_audio.py | 51 ++++++++++++++++++++++++++++++++++++++ docs_zh_CN/demo.md | 28 +++++++++++++++++++++ mmaction/apis/inference.py | 11 +++++++- 4 files changed, 118 insertions(+), 1 deletion(-) create mode 100644 demo/demo_audio.py diff --git a/demo/README.md b/demo/README.md index 00a25e6cfe..9f0dadc810 100644 --- a/demo/README.md +++ b/demo/README.md @@ -11,6 +11,7 @@ - [SpatioTemporal Action Detection Webcam Demo](#spatiotemporal-action-detection-webcam-demo): A demo script to implement real-time spatio-temporal action detection from a web camera. - [Skeleton-based Action Recognition Demo](#skeleton-based-action-recognition-demo): A demo script to predict the skeleton-based action recognition result using a single video. - [Video Structuralize Demo](#video-structuralize-demo): A demo script to predict the skeleton-based and rgb-based action recognition and spatio-temporal action detection result using a single video. +- [Audio Demo](#audio-demo): A demo script to predict the recognition result using a single audio file. ## Modify configs through script arguments @@ -643,3 +644,31 @@ python demo/demo_video_structuralize.py --label-map-stdet tools/data/ava/label_map.txt \ --label-map tools/data/kinetics/label_map_k400.txt ``` + +## Audio Demo + +Demo script to predict the audio-based action recognition using a single audio feature. + +The script `extract_audio.py` can be used to extract audios from videos and the script `build_audio_features.py` can be used to extract the audio features. + +```shell +python demo/demo_audio.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${AUDIO_FILE} {LABEL_FILE} [--device ${DEVICE}] +``` + +Optional arguments: + +- `DEVICE`: Type of device to run the demo. Allowed values are cuda devices like `cuda:0` or `cpu`. If not specified, it will be set to `cuda:0`. + +Examples: + +Assume that you are located at `$MMACTION2` and have already downloaded the checkpoints to the directory `checkpoints/`, +or use checkpoint url from `configs/` to directly load the corresponding checkpoint, which will be automatically saved in `$HOME/.cache/torch/checkpoints`. + +1. Recognize an audio file as input by using a tsn model on cuda by default. + + ```shell + python demo/demo_audio.py \ + configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py \ + https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth \ + audio_feature.npy label_map_k400.txt + ``` diff --git a/demo/demo_audio.py b/demo/demo_audio.py new file mode 100644 index 0000000000..bcbde94a1e --- /dev/null +++ b/demo/demo_audio.py @@ -0,0 +1,51 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse + +import torch +from mmcv import Config, DictAction + +from mmaction.apis import inference_recognizer, init_recognizer + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMAction2 demo') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file/url') + parser.add_argument('audio', help='audio file') + parser.add_argument('label', help='label file') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + default={}, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. For example, ' + "'--cfg-options model.backbone.depth=18 model.backbone.with_cp=True'") + parser.add_argument( + '--device', type=str, default='cuda:0', help='CPU/CUDA device option') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + device = torch.device(args.device) + cfg = Config.fromfile(args.config) + cfg.merge_from_dict(args.cfg_options) + model = init_recognizer(cfg, args.checkpoint, device=device) + + if not args.audio.endswith('.npy'): + raise NotImplementedError('Demo works on extracted audio features') + results = inference_recognizer(model, args.audio) + + labels = open(args.label).readlines() + labels = [x.strip() for x in labels] + results = [(labels[k[0]], k[1]) for k in results] + + print('Scores:') + for result in results: + print(f'{result[0]}: ', result[1]) + + +if __name__ == '__main__': + main() diff --git a/docs_zh_CN/demo.md b/docs_zh_CN/demo.md index b10709ad38..d40d991c5d 100644 --- a/docs_zh_CN/demo.md +++ b/docs_zh_CN/demo.md @@ -12,6 +12,7 @@ - [基于网络摄像头的实时时空动作检测](#基于网络摄像头的实时时空动作检测) - [基于人体姿态预测动作标签](#基于人体姿态预测动作标签) - [视频结构化预测](#视频结构化预测) + - [基于音频的动作识别](#基于音频的动作识别) ## 预测视频的动作标签 @@ -600,3 +601,30 @@ python demo/demo_video_structuralize.py --label-map-stdet tools/data/ava/label_map.txt \ --label-map tools/data/kinetics/label_map_k400.txt ``` + +## 基于音频的动作识别 + +本脚本可用于进行基于音频特征的动作识别。 + +脚本 `extract_audio.py` 可被用于从视频中提取音频,脚本 `build_audio_features.py` 可被用于基于音频文件提取音频特征。 + +```shell +python demo/demo_audio.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${AUDIO_FILE} {LABEL_FILE} [--device ${DEVICE}] +``` + +可选参数: + +- `DEVICE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`),默认为 `cuda:0`。 + +示例: + +以下示例假设用户的当前目录为 $MMACTION2。 + +1. 在 GPU 上,使用 TSN 模型进行基于音频特征的动作识别。 + + ```shell + python demo/demo_audio.py \ + configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py \ + https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth \ + audio_feature.npy label_map_k400.txt + ``` diff --git a/mmaction/apis/inference.py b/mmaction/apis/inference.py index 24f8318b0a..8aaad8f5e2 100644 --- a/mmaction/apis/inference.py +++ b/mmaction/apis/inference.py @@ -89,7 +89,10 @@ def inference_recognizer(model, video, outputs=None, as_tensor=True, **kwargs): input_flag = 'video' elif isinstance(video, str) and osp.exists(video): if osp.isfile(video): - input_flag = 'video' + if video.endswith('.npy'): + input_flag = 'audio' + else: + input_flag = 'video' if osp.isdir(video): input_flag = 'rawframes' else: @@ -157,6 +160,12 @@ def inference_recognizer(model, video, outputs=None, as_tensor=True, **kwargs): for i in range(len(test_pipeline)): if 'Decode' in test_pipeline[i]['type']: test_pipeline[i] = dict(type='RawFrameDecode') + if input_flag == 'audio': + data = dict( + audio_path=video, + total_frames=len(np.load(video)), + start_index=cfg.data.test.get('start_index', 1), + label=-1) test_pipeline = Compose(test_pipeline) data = test_pipeline(data) From b4c50d407f2e8abdfb5fb77292367bf61277319f Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 19 Feb 2022 18:18:09 +0800 Subject: [PATCH 353/414] update READMe (#1460) --- README.md | 1 - README_zh-CN.md | 1 - 2 files changed, 2 deletions(-) diff --git a/README.md b/README.md index 869be9c1a4..b72b6ff7ab 100644 --- a/README.md +++ b/README.md @@ -158,7 +158,6 @@ A Colab tutorial is also provided. You may preview the notebook [here](demo/mmac PoseC3D (ArXiv'2021) - diff --git a/README_zh-CN.md b/README_zh-CN.md index 042982653b..60cebc4d55 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -157,7 +157,6 @@ MMAction2 也提供了相应的中文 Colab 教程,可以点击 [这里](https PoseC3D (ArXiv'2021) - From 40f8e583ed0a4e3d226273b39ead99f4bdeeb3ad Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 19 Feb 2022 19:08:01 +0800 Subject: [PATCH 354/414] =?UTF-8?q?=E4=BD=BF=E7=94=A8=20Colaboratory=20?= =?UTF-8?q?=E5=88=9B=E5=BB=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- MMAction2_Tutorial.ipynb | 1460 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 1460 insertions(+) create mode 100644 MMAction2_Tutorial.ipynb diff --git a/MMAction2_Tutorial.ipynb b/MMAction2_Tutorial.ipynb new file mode 100644 index 0000000000..e8dc94f09b --- /dev/null +++ b/MMAction2_Tutorial.ipynb @@ -0,0 +1,1460 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "VcjSRFELVbNk" + }, + "source": [ + "# MMAction2 Tutorial\n", + "\n", + "Welcome to MMAction2! This is the official colab tutorial for using MMAction2. In this tutorial, you will learn\n", + "- Perform inference with a MMAction2 recognizer.\n", + "- Train a new recognizer with a new dataset.\n", + "\n", + "Let's start!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7LqHGkGEVqpm" + }, + "source": [ + "## Install MMAction2" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Bf8PpPXtVvmg", + "outputId": "75519a17-cc0a-491f-98a1-f287b090cf82" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "nvcc: NVIDIA (R) Cuda compiler driver\n", + "Copyright (c) 2005-2020 NVIDIA Corporation\n", + "Built on Mon_Oct_12_20:09:46_PDT_2020\n", + "Cuda compilation tools, release 11.1, V11.1.105\n", + "Build cuda_11.1.TC455_06.29190527_0\n", + "gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n", + "Copyright (C) 2017 Free Software Foundation, Inc.\n", + "This is free software; see the source for copying conditions. There is NO\n", + "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n", + "\n" + ] + } + ], + "source": [ + "# Check nvcc version\n", + "!nvcc -V\n", + "# Check GCC version\n", + "!gcc --version" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "5PAJ4ArzV5Ry", + "outputId": "992b30c2-8281-4198-97c8-df2a287b0ae8" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", + "Collecting torch==1.8.0+cu101\n", + " Downloading https://download.pytorch.org/whl/cu101/torch-1.8.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (763.5 MB)\n", + "\u001b[K |████████████████████████████████| 763.5 MB 15 kB/s \n", + "\u001b[?25hCollecting torchvision==0.9.0+cu101\n", + " Downloading https://download.pytorch.org/whl/cu101/torchvision-0.9.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (17.3 MB)\n", + "\u001b[K |████████████████████████████████| 17.3 MB 983 kB/s \n", + "\u001b[?25hCollecting torchtext==0.9.0\n", + " Downloading torchtext-0.9.0-cp37-cp37m-manylinux1_x86_64.whl (7.1 MB)\n", + "\u001b[K |████████████████████████████████| 7.1 MB 10.9 MB/s \n", + "\u001b[?25hCollecting torchaudio==0.8.0\n", + " Downloading torchaudio-0.8.0-cp37-cp37m-manylinux1_x86_64.whl (1.9 MB)\n", + "\u001b[K |████████████████████████████████| 1.9 MB 46.6 MB/s \n", + "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (1.21.5)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (3.10.0.2)\n", + "Requirement already satisfied: pillow>=4.1.1 in /usr/local/lib/python3.7/dist-packages (from torchvision==0.9.0+cu101) (7.1.2)\n", + "Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (4.62.3)\n", + "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (2.23.0)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (1.24.3)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2.10)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2021.10.8)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (3.0.4)\n", + "Installing collected packages: torch, torchvision, torchtext, torchaudio\n", + " Attempting uninstall: torch\n", + " Found existing installation: torch 1.10.0+cu111\n", + " Uninstalling torch-1.10.0+cu111:\n", + " Successfully uninstalled torch-1.10.0+cu111\n", + " Attempting uninstall: torchvision\n", + " Found existing installation: torchvision 0.11.1+cu111\n", + " Uninstalling torchvision-0.11.1+cu111:\n", + " Successfully uninstalled torchvision-0.11.1+cu111\n", + " Attempting uninstall: torchtext\n", + " Found existing installation: torchtext 0.11.0\n", + " Uninstalling torchtext-0.11.0:\n", + " Successfully uninstalled torchtext-0.11.0\n", + " Attempting uninstall: torchaudio\n", + " Found existing installation: torchaudio 0.10.0+cu111\n", + " Uninstalling torchaudio-0.10.0+cu111:\n", + " Successfully uninstalled torchaudio-0.10.0+cu111\n", + "Successfully installed torch-1.8.0+cu101 torchaudio-0.8.0 torchtext-0.9.0 torchvision-0.9.0+cu101\n", + "Looking in links: https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", + "Collecting mmcv-full\n", + " Downloading https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/mmcv_full-1.4.5-cp37-cp37m-manylinux1_x86_64.whl (60.7 MB)\n", + "\u001b[K |████████████████████████████████| 60.7 MB 66 kB/s \n", + "\u001b[?25hCollecting addict\n", + " Downloading addict-2.4.0-py3-none-any.whl (3.8 kB)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (7.1.2)\n", + "Requirement already satisfied: pyyaml in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (3.13)\n", + "Collecting yapf\n", + " Downloading yapf-0.32.0-py2.py3-none-any.whl (190 kB)\n", + "\u001b[K |████████████████████████████████| 190 kB 15.6 MB/s \n", + "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (1.21.5)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (21.3)\n", + "Requirement already satisfied: opencv-python>=3 in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (4.1.2.30)\n", + "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from packaging->mmcv-full) (3.0.7)\n", + "Installing collected packages: yapf, addict, mmcv-full\n", + "Successfully installed addict-2.4.0 mmcv-full-1.4.5 yapf-0.32.0\n", + "Cloning into 'mmaction2'...\n", + "remote: Enumerating objects: 15036, done.\u001b[K\n", + "remote: Counting objects: 100% (233/233), done.\u001b[K\n", + "remote: Compressing objects: 100% (192/192), done.\u001b[K\n", + "remote: Total 15036 (delta 86), reused 72 (delta 41), pack-reused 14803\u001b[K\n", + "Receiving objects: 100% (15036/15036), 49.25 MiB | 25.23 MiB/s, done.\n", + "Resolving deltas: 100% (10608/10608), done.\n", + "/content/mmaction2\n", + "Obtaining file:///content/mmaction2\n", + "Collecting decord>=0.4.1\n", + " Downloading decord-0.6.0-py3-none-manylinux2010_x86_64.whl (13.6 MB)\n", + "\u001b[K |████████████████████████████████| 13.6 MB 10.2 MB/s \n", + "\u001b[?25hCollecting einops\n", + " Downloading einops-0.4.0-py3-none-any.whl (28 kB)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (3.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (1.21.5)\n", + "Requirement already satisfied: opencv-contrib-python in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (4.1.2.30)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (7.1.2)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (1.4.1)\n", + "Requirement already satisfied: torch>=1.3 in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (1.8.0+cu101)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch>=1.3->mmaction2==0.21.0) (3.10.0.2)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (1.3.2)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (2.8.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (3.0.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (0.11.0)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib->mmaction2==0.21.0) (1.15.0)\n", + "Installing collected packages: einops, decord, mmaction2\n", + " Running setup.py develop for mmaction2\n", + "Successfully installed decord-0.6.0 einops-0.4.0 mmaction2-0.21.0\n", + "Collecting av\n", + " Downloading av-8.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (36.1 MB)\n", + "\u001b[K |████████████████████████████████| 36.1 MB 298 kB/s \n", + "\u001b[?25hRequirement already satisfied: imgaug in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 2)) (0.2.9)\n", + "Requirement already satisfied: librosa in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 3)) (0.8.1)\n", + "Requirement already satisfied: lmdb in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 4)) (0.99)\n", + "Requirement already satisfied: moviepy in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 5)) (0.2.3.5)\n", + "Collecting onnx\n", + " Downloading onnx-1.11.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (12.8 MB)\n", + "\u001b[K |████████████████████████████████| 12.8 MB 52.3 MB/s \n", + "\u001b[?25hCollecting onnxruntime\n", + " Downloading onnxruntime-1.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.9 MB)\n", + "\u001b[K |████████████████████████████████| 4.9 MB 51.6 MB/s \n", + "\u001b[?25hCollecting pims\n", + " Downloading PIMS-0.5.tar.gz (85 kB)\n", + "\u001b[K |████████████████████████████████| 85 kB 5.2 MB/s \n", + "\u001b[?25hCollecting PyTurboJPEG\n", + " Downloading PyTurboJPEG-1.6.5.tar.gz (11 kB)\n", + "Collecting timm\n", + " Downloading timm-0.5.4-py3-none-any.whl (431 kB)\n", + "\u001b[K |████████████████████████████████| 431 kB 64.7 MB/s \n", + "\u001b[?25hRequirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (7.1.2)\n", + "Requirement already satisfied: numpy>=1.15.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.21.5)\n", + "Requirement already satisfied: scikit-image>=0.11.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (0.18.3)\n", + "Requirement already satisfied: imageio in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (2.4.1)\n", + "Requirement already satisfied: opencv-python in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (4.1.2.30)\n", + "Requirement already satisfied: Shapely in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.8.0)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.4.1)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (3.2.2)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.15.0)\n", + "Requirement already satisfied: PyWavelets>=1.1.1 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 2)) (1.2.0)\n", + "Requirement already satisfied: tifffile>=2019.7.26 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 2)) (2021.11.2)\n", + "Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 2)) (2.6.3)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (1.3.2)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (0.11.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (2.8.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (3.0.7)\n", + "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (21.3)\n", + "Requirement already satisfied: numba>=0.43.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (0.51.2)\n", + "Requirement already satisfied: resampy>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (0.2.2)\n", + "Requirement already satisfied: decorator>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (4.4.2)\n", + "Requirement already satisfied: soundfile>=0.10.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (0.10.3.post1)\n", + "Requirement already satisfied: scikit-learn!=0.19.0,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (1.0.2)\n", + "Requirement already satisfied: joblib>=0.14 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (1.1.0)\n", + "Requirement already satisfied: pooch>=1.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (1.6.0)\n", + "Requirement already satisfied: audioread>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (2.1.9)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 3)) (57.4.0)\n", + "Requirement already satisfied: llvmlite<0.35,>=0.34.0.dev0 in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 3)) (0.34.0)\n", + "Requirement already satisfied: requests>=2.19.0 in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (2.23.0)\n", + "Requirement already satisfied: appdirs>=1.3.0 in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (1.4.4)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (3.0.4)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (1.24.3)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (2.10)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (2021.10.8)\n", + "Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from scikit-learn!=0.19.0,>=0.14.0->librosa->-r requirements/optional.txt (line 3)) (3.1.0)\n", + "Requirement already satisfied: cffi>=1.0 in /usr/local/lib/python3.7/dist-packages (from soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 3)) (1.15.0)\n", + "Requirement already satisfied: pycparser in /usr/local/lib/python3.7/dist-packages (from cffi>=1.0->soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 3)) (2.21)\n", + "Requirement already satisfied: tqdm<5.0,>=4.11.2 in /usr/local/lib/python3.7/dist-packages (from moviepy->-r requirements/optional.txt (line 5)) (4.62.3)\n", + "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 6)) (3.10.0.2)\n", + "Requirement already satisfied: protobuf>=3.12.2 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 6)) (3.17.3)\n", + "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from onnxruntime->-r requirements/optional.txt (line 7)) (2.0)\n", + "Collecting slicerator>=0.9.8\n", + " Downloading slicerator-1.0.0-py3-none-any.whl (9.3 kB)\n", + "Requirement already satisfied: torch>=1.4 in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 10)) (1.8.0+cu101)\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 10)) (0.9.0+cu101)\n", + "Building wheels for collected packages: pims, PyTurboJPEG\n", + " Building wheel for pims (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for pims: filename=PIMS-0.5-py3-none-any.whl size=84325 sha256=acdeb0697c66e2b9cc49a549f9a3c67a35b36642e6724eeac9795e25e6d9de47\n", + " Stored in directory: /root/.cache/pip/wheels/75/02/a9/86571c38081ba4c1832eb95430b5d588dfa15a738e2a603737\n", + " Building wheel for PyTurboJPEG (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for PyTurboJPEG: filename=PyTurboJPEG-1.6.5-py3-none-any.whl size=12160 sha256=b5fffd01e16b4d2a1d2f4e1cd976501c1e3ea1b3872f91bf595f6c025735a4e0\n", + " Stored in directory: /root/.cache/pip/wheels/1b/6a/97/17286b24cd97dda462b5a886107f8663f1ccc7705f148b3850\n", + "Successfully built pims PyTurboJPEG\n", + "Installing collected packages: slicerator, timm, PyTurboJPEG, pims, onnxruntime, onnx, av\n", + "Successfully installed PyTurboJPEG-1.6.5 av-8.1.0 onnx-1.11.0 onnxruntime-1.10.0 pims-0.5 slicerator-1.0.0 timm-0.5.4\n" + ] + } + ], + "source": [ + "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", + "!pip install -U torch==1.8.0+cu101 torchvision==0.9.0+cu101 torchtext==0.9.0 torchaudio==0.8.0 -f https://download.pytorch.org/whl/torch_stable.html\n", + "\n", + "# install mmcv-full thus we could use CUDA operators\n", + "!pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", + "\n", + "# Install mmaction2\n", + "!rm -rf mmaction2\n", + "!git clone https://github.com/open-mmlab/mmaction2.git\n", + "%cd mmaction2\n", + "\n", + "!pip install -e .\n", + "\n", + "# Install some optional requirements\n", + "!pip install -r requirements/optional.txt" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "No_zZAFpWC-a", + "outputId": "1f5dd76e-7749-4fc3-ee97-83c5e1700f29" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "1.8.0+cu101 True\n", + "0.21.0\n", + "10.1\n", + "GCC 7.3\n" + ] + } + ], + "source": [ + "# Check Pytorch installation\n", + "import torch, torchvision\n", + "print(torch.__version__, torch.cuda.is_available())\n", + "\n", + "# Check MMAction2 installation\n", + "import mmaction\n", + "print(mmaction.__version__)\n", + "\n", + "# Check MMCV installation\n", + "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", + "print(get_compiling_cuda_version())\n", + "print(get_compiler_version())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "pXf7oV5DWdab" + }, + "source": [ + "## Perform inference with a MMAction2 recognizer\n", + "MMAction2 already provides high level APIs to do inference and training." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "64CW6d_AaT-Q", + "outputId": "d08bfb9b-ab1e-451b-d3b2-89023a59766b" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2021-07-11 12:44:00-- https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 97579339 (93M) [application/octet-stream]\n", + "Saving to: ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’\n", + "\n", + "checkpoints/tsn_r50 100%[===================>] 93.06M 11.4MB/s in 8.1s \n", + "\n", + "2021-07-11 12:44:09 (11.4 MB/s) - ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’ saved [97579339/97579339]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir checkpoints\n", + "!wget -c https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \\\n", + " -O checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "HNZB7NoSabzj", + "outputId": "b2f9bd71-1490-44d3-81c6-5037d804f0b1" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Use load_from_local loader\n" + ] + } + ], + "source": [ + "from mmaction.apis import inference_recognizer, init_recognizer\n", + "\n", + "# Choose to use a config and initialize the recognizer\n", + "config = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py'\n", + "# Setup a checkpoint file to load\n", + "checkpoint = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "# Initialize the recognizer\n", + "model = init_recognizer(config, checkpoint, device='cuda:0')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "rEMsBnpHapAn" + }, + "outputs": [], + "source": [ + "# Use the recognizer to do inference\n", + "video = 'demo/demo.mp4'\n", + "label = 'tools/data/kinetics/label_map_k400.txt'\n", + "results = inference_recognizer(model, video)\n", + "\n", + "labels = open(label).readlines()\n", + "labels = [x.strip() for x in labels]\n", + "results = [(labels[k[0]], k[1]) for k in results]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "NIyJXqfWathq", + "outputId": "ca24528b-f99d-414a-fa50-456f6068b463" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "arm wrestling: 29.616438\n", + "rock scissors paper: 10.754841\n", + "shaking hands: 9.908401\n", + "clapping: 9.189913\n", + "massaging feet: 8.305307\n" + ] + } + ], + "source": [ + "# Let's show the results\n", + "for result in results:\n", + " print(f'{result[0]}: ', result[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "QuZG8kZ2fJ5d" + }, + "source": [ + "## Train a recognizer on customized dataset\n", + "\n", + "To train a new recognizer, there are usually three things to do:\n", + "1. Support a new dataset\n", + "2. Modify the config\n", + "3. Train a new recognizer" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "neEFyxChfgiJ" + }, + "source": [ + "### Support a new dataset\n", + "\n", + "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/tutorials/new_dataset.md)\n", + "\n", + "Firstly, let's download a tiny dataset obtained from [Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). We select 30 videos with their labels as train dataset and 10 videos with their labels as test dataset." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "gjsUj9JzgUlJ", + "outputId": "61c4704d-db81-4ca5-ed16-e2454dbdfe8e" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "rm: cannot remove 'kinetics400_tiny.zip*': No such file or directory\n", + "--2021-07-11 12:44:29-- https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 18308682 (17M) [application/zip]\n", + "Saving to: ‘kinetics400_tiny.zip’\n", + "\n", + "kinetics400_tiny.zi 100%[===================>] 17.46M 10.7MB/s in 1.6s \n", + "\n", + "2021-07-11 12:44:31 (10.7 MB/s) - ‘kinetics400_tiny.zip’ saved [18308682/18308682]\n", + "\n" + ] + } + ], + "source": [ + "# download, decompress the data\n", + "!rm kinetics400_tiny.zip*\n", + "!rm -rf kinetics400_tiny\n", + "!wget https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", + "!unzip kinetics400_tiny.zip > /dev/null" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "AbZ-o7V6hNw4", + "outputId": "b091909c-def2-49b5-88c2-01b00802b162" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Reading package lists...\n", + "Building dependency tree...\n", + "Reading state information...\n", + "The following NEW packages will be installed:\n", + " tree\n", + "0 upgraded, 1 newly installed, 0 to remove and 39 not upgraded.\n", + "Need to get 40.7 kB of archives.\n", + "After this operation, 105 kB of additional disk space will be used.\n", + "Get:1 http://archive.ubuntu.com/ubuntu bionic/universe amd64 tree amd64 1.7.0-5 [40.7 kB]\n", + "Fetched 40.7 kB in 0s (88.7 kB/s)\n", + "Selecting previously unselected package tree.\n", + "(Reading database ... 160815 files and directories currently installed.)\n", + "Preparing to unpack .../tree_1.7.0-5_amd64.deb ...\n", + "Unpacking tree (1.7.0-5) ...\n", + "Setting up tree (1.7.0-5) ...\n", + "Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n", + "kinetics400_tiny\n", + "├── kinetics_tiny_train_video.txt\n", + "├── kinetics_tiny_val_video.txt\n", + "├── train\n", + "│   ├── 27_CSXByd3s.mp4\n", + "│   ├── 34XczvTaRiI.mp4\n", + "│   ├── A-wiliK50Zw.mp4\n", + "│   ├── D32_1gwq35E.mp4\n", + "│   ├── D92m0HsHjcQ.mp4\n", + "│   ├── DbX8mPslRXg.mp4\n", + "│   ├── FMlSTTpN3VY.mp4\n", + "│   ├── h10B9SVE-nk.mp4\n", + "│   ├── h2YqqUhnR34.mp4\n", + "│   ├── iRuyZSKhHRg.mp4\n", + "│   ├── IyfILH9lBRo.mp4\n", + "│   ├── kFC3KY2bOP8.mp4\n", + "│   ├── LvcFDgCAXQs.mp4\n", + "│   ├── O46YA8tI530.mp4\n", + "│   ├── oMrZaozOvdQ.mp4\n", + "│   ├── oXy-e_P_cAI.mp4\n", + "│   ├── P5M-hAts7MQ.mp4\n", + "│   ├── phDqGd0NKoo.mp4\n", + "│   ├── PnOe3GZRVX8.mp4\n", + "│   ├── R8HXQkdgKWA.mp4\n", + "│   ├── RqnKtCEoEcA.mp4\n", + "│   ├── soEcZZsBmDs.mp4\n", + "│   ├── TkkZPZHbAKA.mp4\n", + "│   ├── T_TMNGzVrDk.mp4\n", + "│   ├── WaS0qwP46Us.mp4\n", + "│   ├── Wh_YPQdH1Zg.mp4\n", + "│   ├── WWP5HZJsg-o.mp4\n", + "│   ├── xGY2dP0YUjA.mp4\n", + "│   ├── yLC9CtWU5ws.mp4\n", + "│   └── ZQV4U2KQ370.mp4\n", + "└── val\n", + " ├── 0pVGiAU6XEA.mp4\n", + " ├── AQrbRSnRt8M.mp4\n", + " ├── b6Q_b7vgc7Q.mp4\n", + " ├── ddvJ6-faICE.mp4\n", + " ├── IcLztCtvhb8.mp4\n", + " ├── ik4BW3-SCts.mp4\n", + " ├── jqRrH30V0k4.mp4\n", + " ├── SU_x2LQqSLs.mp4\n", + " ├── u4Rm6srmIS8.mp4\n", + " └── y5Iu7XkTqV0.mp4\n", + "\n", + "2 directories, 42 files\n" + ] + } + ], + "source": [ + "# Check the directory structure of the tiny data\n", + "\n", + "# Install tree first\n", + "!apt-get -q install tree\n", + "!tree kinetics400_tiny" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "fTdi6dI0hY3g", + "outputId": "ffda0997-8d77-431a-d66e-2f273e80c756" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "D32_1gwq35E.mp4 0\n", + "iRuyZSKhHRg.mp4 1\n", + "oXy-e_P_cAI.mp4 0\n", + "34XczvTaRiI.mp4 1\n", + "h2YqqUhnR34.mp4 0\n", + "O46YA8tI530.mp4 0\n", + "kFC3KY2bOP8.mp4 1\n", + "WWP5HZJsg-o.mp4 1\n", + "phDqGd0NKoo.mp4 1\n", + "yLC9CtWU5ws.mp4 0\n", + "27_CSXByd3s.mp4 1\n", + "IyfILH9lBRo.mp4 1\n", + "T_TMNGzVrDk.mp4 1\n", + "TkkZPZHbAKA.mp4 0\n", + "PnOe3GZRVX8.mp4 1\n", + "soEcZZsBmDs.mp4 1\n", + "FMlSTTpN3VY.mp4 1\n", + "WaS0qwP46Us.mp4 0\n", + "A-wiliK50Zw.mp4 1\n", + "oMrZaozOvdQ.mp4 1\n", + "ZQV4U2KQ370.mp4 0\n", + "DbX8mPslRXg.mp4 1\n", + "h10B9SVE-nk.mp4 1\n", + "P5M-hAts7MQ.mp4 0\n", + "R8HXQkdgKWA.mp4 0\n", + "D92m0HsHjcQ.mp4 0\n", + "RqnKtCEoEcA.mp4 0\n", + "LvcFDgCAXQs.mp4 0\n", + "xGY2dP0YUjA.mp4 0\n", + "Wh_YPQdH1Zg.mp4 0\n" + ] + } + ], + "source": [ + "# After downloading the data, we need to check the annotation format\n", + "!cat kinetics400_tiny/kinetics_tiny_train_video.txt" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "0bq0mxmEi29H" + }, + "source": [ + "According to the format defined in [`VideoDataset`](./datasets/video_dataset.py), each line indicates a sample video with the filepath and label, which are split with a whitespace." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Ht_DGJA9jQar" + }, + "source": [ + "### Modify the config\n", + "\n", + "In the next step, we need to modify the config for the training.\n", + "To accelerate the process, we finetune a recognizer using a pre-trained recognizer." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "LjCcmCKOjktc" + }, + "outputs": [], + "source": [ + "from mmcv import Config\n", + "cfg = Config.fromfile('./configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tc8YhFFGjp3e" + }, + "source": [ + "Given a config that trains a TSN model on kinetics400-full dataset, we need to modify some values to use it for training TSN on Kinetics400-tiny dataset.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "tlhu9byjjt-K", + "outputId": "3b9a3c49-ace0-41d3-dd15-d6c8579755f8" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Config:\n", + "model = dict(\n", + " type='Recognizer2D',\n", + " backbone=dict(\n", + " type='ResNet',\n", + " pretrained='torchvision://resnet50',\n", + " depth=50,\n", + " norm_eval=False),\n", + " cls_head=dict(\n", + " type='TSNHead',\n", + " num_classes=2,\n", + " in_channels=2048,\n", + " spatial_type='avg',\n", + " consensus=dict(type='AvgConsensus', dim=1),\n", + " dropout_ratio=0.4,\n", + " init_std=0.01),\n", + " train_cfg=None,\n", + " test_cfg=dict(average_clips=None))\n", + "optimizer = dict(type='SGD', lr=7.8125e-05, momentum=0.9, weight_decay=0.0001)\n", + "optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))\n", + "lr_config = dict(policy='step', step=[40, 80])\n", + "total_epochs = 10\n", + "checkpoint_config = dict(interval=5)\n", + "log_config = dict(interval=5, hooks=[dict(type='TextLoggerHook')])\n", + "dist_params = dict(backend='nccl')\n", + "log_level = 'INFO'\n", + "load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "resume_from = None\n", + "workflow = [('train', 1)]\n", + "dataset_type = 'VideoDataset'\n", + "data_root = 'kinetics400_tiny/train/'\n", + "data_root_val = 'kinetics400_tiny/val/'\n", + "ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "img_norm_cfg = dict(\n", + " mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)\n", + "train_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),\n", + " dict(type='DecordDecode'),\n", + " dict(\n", + " type='MultiScaleCrop',\n", + " input_size=224,\n", + " scales=(1, 0.875, 0.75, 0.66),\n", + " random_crop=False,\n", + " max_wh_scale_gap=1),\n", + " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", + " dict(type='Flip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs', 'label'])\n", + "]\n", + "val_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=8,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='CenterCrop', crop_size=224),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + "]\n", + "test_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=25,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='ThreeCrop', crop_size=256),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + "]\n", + "data = dict(\n", + " videos_per_gpu=2,\n", + " workers_per_gpu=2,\n", + " train=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", + " data_prefix='kinetics400_tiny/train/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames', clip_len=1, frame_interval=1,\n", + " num_clips=8),\n", + " dict(type='DecordDecode'),\n", + " dict(\n", + " type='MultiScaleCrop',\n", + " input_size=224,\n", + " scales=(1, 0.875, 0.75, 0.66),\n", + " random_crop=False,\n", + " max_wh_scale_gap=1),\n", + " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", + " dict(type='Flip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs', 'label'])\n", + " ]),\n", + " val=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", + " data_prefix='kinetics400_tiny/val/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=8,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='CenterCrop', crop_size=224),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + " ]),\n", + " test=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", + " data_prefix='kinetics400_tiny/val/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=25,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='ThreeCrop', crop_size=256),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + " ]))\n", + "evaluation = dict(\n", + " interval=5,\n", + " metrics=['top_k_accuracy', 'mean_class_accuracy'],\n", + " save_best='auto')\n", + "work_dir = './tutorial_exps'\n", + "omnisource = False\n", + "seed = 0\n", + "gpu_ids = range(0, 1)\n", + "\n" + ] + } + ], + "source": [ + "from mmcv.runner import set_random_seed\n", + "\n", + "# Modify dataset type and path\n", + "cfg.dataset_type = 'VideoDataset'\n", + "cfg.data_root = 'kinetics400_tiny/train/'\n", + "cfg.data_root_val = 'kinetics400_tiny/val/'\n", + "cfg.ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "cfg.ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "\n", + "cfg.data.test.type = 'VideoDataset'\n", + "cfg.data.test.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.data.test.data_prefix = 'kinetics400_tiny/val/'\n", + "\n", + "cfg.data.train.type = 'VideoDataset'\n", + "cfg.data.train.ann_file = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "cfg.data.train.data_prefix = 'kinetics400_tiny/train/'\n", + "\n", + "cfg.data.val.type = 'VideoDataset'\n", + "cfg.data.val.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.data.val.data_prefix = 'kinetics400_tiny/val/'\n", + "\n", + "# The flag is used to determine whether it is omnisource training\n", + "cfg.setdefault('omnisource', False)\n", + "# Modify num classes of the model in cls_head\n", + "cfg.model.cls_head.num_classes = 2\n", + "# We can use the pre-trained TSN model\n", + "cfg.load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "\n", + "# Set up working dir to save files and logs.\n", + "cfg.work_dir = './tutorial_exps'\n", + "\n", + "# The original learning rate (LR) is set for 8-GPU training.\n", + "# We divide it by 8 since we only use one GPU.\n", + "cfg.data.videos_per_gpu = cfg.data.videos_per_gpu // 16\n", + "cfg.optimizer.lr = cfg.optimizer.lr / 8 / 16\n", + "cfg.total_epochs = 10\n", + "\n", + "# We can set the checkpoint saving interval to reduce the storage cost\n", + "cfg.checkpoint_config.interval = 5\n", + "# We can set the log print interval to reduce the the times of printing log\n", + "cfg.log_config.interval = 5\n", + "\n", + "# Set seed thus the results are more reproducible\n", + "cfg.seed = 0\n", + "set_random_seed(0, deterministic=False)\n", + "cfg.gpu_ids = range(1)\n", + "\n", + "# Save the best\n", + "cfg.evaluation.save_best='auto'\n", + "\n", + "\n", + "# We can initialize the logger for training and have a look\n", + "# at the final config used for training\n", + "print(f'Config:\\n{cfg.pretty_text}')\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tES-qnZ3k38Z" + }, + "source": [ + "### Train a new recognizer\n", + "\n", + "Finally, lets initialize the dataset and recognizer, then train a new recognizer!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "dDBWkdDRk6oz", + "outputId": "a85d80d7-b3c4-43f1-d49a-057e8036807f" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Use load_from_torchvision loader\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-07-11 13:00:46,931 - mmaction - INFO - These parameters in pretrained checkpoint are not loaded: {'fc.bias', 'fc.weight'}\n", + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n", + "2021-07-11 13:00:46,980 - mmaction - INFO - load checkpoint from ./checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", + "2021-07-11 13:00:46,981 - mmaction - INFO - Use load_from_local loader\n", + "2021-07-11 13:00:47,071 - mmaction - WARNING - The model and loaded state dict do not match exactly\n", + "\n", + "size mismatch for cls_head.fc_cls.weight: copying a param with shape torch.Size([400, 2048]) from checkpoint, the shape in current model is torch.Size([2, 2048]).\n", + "size mismatch for cls_head.fc_cls.bias: copying a param with shape torch.Size([400]) from checkpoint, the shape in current model is torch.Size([2]).\n", + "2021-07-11 13:00:47,074 - mmaction - INFO - Start running, host: root@b465112b4add, work_dir: /content/mmaction2/tutorial_exps\n", + "2021-07-11 13:00:47,078 - mmaction - INFO - Hooks will be executed in the following order:\n", + "before_run:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_train_epoch:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_train_iter:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_train_iter:\n", + "(ABOVE_NORMAL) OptimizerHook \n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "after_train_epoch:\n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_val_epoch:\n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_val_iter:\n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_val_iter:\n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_val_epoch:\n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "2021-07-11 13:00:47,081 - mmaction - INFO - workflow: [('train', 1)], max: 10 epochs\n", + "/usr/local/lib/python3.7/dist-packages/mmcv/runner/hooks/evaluation.py:190: UserWarning: runner.meta is None. Creating an empty one.\n", + " warnings.warn('runner.meta is None. Creating an empty one.')\n", + "2021-07-11 13:00:51,802 - mmaction - INFO - Epoch [1][5/15]\tlr: 7.813e-05, eta: 0:02:16, time: 0.942, data_time: 0.730, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7604, loss: 0.7604, grad_norm: 14.8813\n", + "2021-07-11 13:00:52,884 - mmaction - INFO - Epoch [1][10/15]\tlr: 7.813e-05, eta: 0:01:21, time: 0.217, data_time: 0.028, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6282, loss: 0.6282, grad_norm: 10.1834\n", + "2021-07-11 13:00:53,706 - mmaction - INFO - Epoch [1][15/15]\tlr: 7.813e-05, eta: 0:00:59, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7165, loss: 0.7165, grad_norm: 10.8534\n", + "2021-07-11 13:00:57,724 - mmaction - INFO - Epoch [2][5/15]\tlr: 7.813e-05, eta: 0:01:09, time: 0.802, data_time: 0.596, memory: 2918, top1_acc: 0.3000, top5_acc: 1.0000, loss_cls: 0.7001, loss: 0.7001, grad_norm: 11.4311\n", + "2021-07-11 13:00:59,219 - mmaction - INFO - Epoch [2][10/15]\tlr: 7.813e-05, eta: 0:01:00, time: 0.296, data_time: 0.108, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6916, loss: 0.6916, grad_norm: 12.7101\n", + "2021-07-11 13:01:00,040 - mmaction - INFO - Epoch [2][15/15]\tlr: 7.813e-05, eta: 0:00:51, time: 0.167, data_time: 0.004, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6567, loss: 0.6567, grad_norm: 8.8837\n", + "2021-07-11 13:01:04,152 - mmaction - INFO - Epoch [3][5/15]\tlr: 7.813e-05, eta: 0:00:56, time: 0.820, data_time: 0.618, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6320, loss: 0.6320, grad_norm: 11.4025\n", + "2021-07-11 13:01:05,526 - mmaction - INFO - Epoch [3][10/15]\tlr: 7.813e-05, eta: 0:00:50, time: 0.276, data_time: 0.075, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6542, loss: 0.6542, grad_norm: 10.6429\n", + "2021-07-11 13:01:06,350 - mmaction - INFO - Epoch [3][15/15]\tlr: 7.813e-05, eta: 0:00:44, time: 0.165, data_time: 0.001, memory: 2918, top1_acc: 0.2000, top5_acc: 1.0000, loss_cls: 0.7661, loss: 0.7661, grad_norm: 12.8421\n", + "2021-07-11 13:01:10,771 - mmaction - INFO - Epoch [4][5/15]\tlr: 7.813e-05, eta: 0:00:47, time: 0.883, data_time: 0.676, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6410, loss: 0.6410, grad_norm: 10.6697\n", + "2021-07-11 13:01:11,776 - mmaction - INFO - Epoch [4][10/15]\tlr: 7.813e-05, eta: 0:00:42, time: 0.201, data_time: 0.011, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6949, loss: 0.6949, grad_norm: 10.5467\n", + "2021-07-11 13:01:12,729 - mmaction - INFO - Epoch [4][15/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.190, data_time: 0.026, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6290, loss: 0.6290, grad_norm: 11.2779\n", + "2021-07-11 13:01:16,816 - mmaction - INFO - Epoch [5][5/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.817, data_time: 0.608, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6011, loss: 0.6011, grad_norm: 9.1335\n", + "2021-07-11 13:01:18,176 - mmaction - INFO - Epoch [5][10/15]\tlr: 7.813e-05, eta: 0:00:35, time: 0.272, data_time: 0.080, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6652, loss: 0.6652, grad_norm: 11.0616\n", + "2021-07-11 13:01:19,119 - mmaction - INFO - Epoch [5][15/15]\tlr: 7.813e-05, eta: 0:00:32, time: 0.188, data_time: 0.017, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6440, loss: 0.6440, grad_norm: 11.6473\n", + "2021-07-11 13:01:19,120 - mmaction - INFO - Saving checkpoint at 5 epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.9 task/s, elapsed: 2s, ETA: 0s" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-07-11 13:01:21,673 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-07-11 13:01:21,677 - mmaction - INFO - \n", + "top1_acc\t0.7000\n", + "top5_acc\t1.0000\n", + "2021-07-11 13:01:21,679 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-07-11 13:01:21,682 - mmaction - INFO - \n", + "mean_acc\t0.7000\n", + "2021-07-11 13:01:22,264 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_5.pth.\n", + "2021-07-11 13:01:22,267 - mmaction - INFO - Best top1_acc is 0.7000 at 5 epoch.\n", + "2021-07-11 13:01:22,271 - mmaction - INFO - Epoch(val) [5][5]\ttop1_acc: 0.7000, top5_acc: 1.0000, mean_class_accuracy: 0.7000\n", + "2021-07-11 13:01:26,623 - mmaction - INFO - Epoch [6][5/15]\tlr: 7.813e-05, eta: 0:00:31, time: 0.868, data_time: 0.656, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6753, loss: 0.6753, grad_norm: 11.8640\n", + "2021-07-11 13:01:27,597 - mmaction - INFO - Epoch [6][10/15]\tlr: 7.813e-05, eta: 0:00:28, time: 0.195, data_time: 0.003, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6715, loss: 0.6715, grad_norm: 11.3347\n", + "2021-07-11 13:01:28,736 - mmaction - INFO - Epoch [6][15/15]\tlr: 7.813e-05, eta: 0:00:25, time: 0.228, data_time: 0.063, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5769, loss: 0.5769, grad_norm: 9.2541\n", + "2021-07-11 13:01:32,860 - mmaction - INFO - Epoch [7][5/15]\tlr: 7.813e-05, eta: 0:00:24, time: 0.822, data_time: 0.620, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5379, loss: 0.5379, grad_norm: 8.0147\n", + "2021-07-11 13:01:34,340 - mmaction - INFO - Epoch [7][10/15]\tlr: 7.813e-05, eta: 0:00:22, time: 0.298, data_time: 0.109, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6187, loss: 0.6187, grad_norm: 11.5244\n", + "2021-07-11 13:01:35,165 - mmaction - INFO - Epoch [7][15/15]\tlr: 7.813e-05, eta: 0:00:19, time: 0.165, data_time: 0.002, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7063, loss: 0.7063, grad_norm: 12.4979\n", + "2021-07-11 13:01:39,435 - mmaction - INFO - Epoch [8][5/15]\tlr: 7.813e-05, eta: 0:00:17, time: 0.853, data_time: 0.641, memory: 2918, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.5369, loss: 0.5369, grad_norm: 8.6545\n", + "2021-07-11 13:01:40,808 - mmaction - INFO - Epoch [8][10/15]\tlr: 7.813e-05, eta: 0:00:15, time: 0.275, data_time: 0.086, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6407, loss: 0.6407, grad_norm: 12.5537\n", + "2021-07-11 13:01:41,627 - mmaction - INFO - Epoch [8][15/15]\tlr: 7.813e-05, eta: 0:00:12, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6073, loss: 0.6073, grad_norm: 11.4028\n", + "2021-07-11 13:01:45,651 - mmaction - INFO - Epoch [9][5/15]\tlr: 7.813e-05, eta: 0:00:11, time: 0.803, data_time: 0.591, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5596, loss: 0.5596, grad_norm: 10.0821\n", + "2021-07-11 13:01:46,891 - mmaction - INFO - Epoch [9][10/15]\tlr: 7.813e-05, eta: 0:00:08, time: 0.248, data_time: 0.044, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6470, loss: 0.6470, grad_norm: 11.8979\n", + "2021-07-11 13:01:47,944 - mmaction - INFO - Epoch [9][15/15]\tlr: 7.813e-05, eta: 0:00:06, time: 0.211, data_time: 0.041, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6657, loss: 0.6657, grad_norm: 12.0643\n", + "2021-07-11 13:01:52,200 - mmaction - INFO - Epoch [10][5/15]\tlr: 7.813e-05, eta: 0:00:04, time: 0.849, data_time: 0.648, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6310, loss: 0.6310, grad_norm: 11.5690\n", + "2021-07-11 13:01:53,707 - mmaction - INFO - Epoch [10][10/15]\tlr: 7.813e-05, eta: 0:00:02, time: 0.303, data_time: 0.119, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5178, loss: 0.5178, grad_norm: 9.3324\n", + "2021-07-11 13:01:54,520 - mmaction - INFO - Epoch [10][15/15]\tlr: 7.813e-05, eta: 0:00:00, time: 0.162, data_time: 0.001, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6919, loss: 0.6919, grad_norm: 12.6688\n", + "2021-07-11 13:01:54,522 - mmaction - INFO - Saving checkpoint at 10 epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.9 task/s, elapsed: 2s, ETA: 0s" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-07-11 13:01:56,741 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-07-11 13:01:56,743 - mmaction - INFO - \n", + "top1_acc\t1.0000\n", + "top5_acc\t1.0000\n", + "2021-07-11 13:01:56,749 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-07-11 13:01:56,750 - mmaction - INFO - \n", + "mean_acc\t1.0000\n", + "2021-07-11 13:01:57,267 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_10.pth.\n", + "2021-07-11 13:01:57,269 - mmaction - INFO - Best top1_acc is 1.0000 at 10 epoch.\n", + "2021-07-11 13:01:57,270 - mmaction - INFO - Epoch(val) [10][5]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n" + ] + } + ], + "source": [ + "import os.path as osp\n", + "\n", + "from mmaction.datasets import build_dataset\n", + "from mmaction.models import build_model\n", + "from mmaction.apis import train_model\n", + "\n", + "import mmcv\n", + "\n", + "# Build the dataset\n", + "datasets = [build_dataset(cfg.data.train)]\n", + "\n", + "# Build the recognizer\n", + "model = build_model(cfg.model, train_cfg=cfg.get('train_cfg'), test_cfg=cfg.get('test_cfg'))\n", + "\n", + "# Create work_dir\n", + "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", + "train_model(model, datasets, cfg, distributed=False, validate=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zdSd7oTLlxIf" + }, + "source": [ + "### Understand the log\n", + "From the log, we can have a basic understanding the training process and know how well the recognizer is trained.\n", + "\n", + "Firstly, the ResNet-50 backbone pre-trained on ImageNet is loaded, this is a common practice since training from scratch is more cost. The log shows that all the weights of the ResNet-50 backbone are loaded except the `fc.bias` and `fc.weight`.\n", + "\n", + "Second, since the dataset we are using is small, we loaded a TSN model and finetune it for action recognition.\n", + "The original TSN is trained on original Kinetics-400 dataset which contains 400 classes but Kinetics-400 Tiny dataset only have 2 classes. Therefore, the last FC layer of the pre-trained TSN for classification has different weight shape and is not used.\n", + "\n", + "Third, after training, the recognizer is evaluated by the default evaluation. The results show that the recognizer achieves 100% top1 accuracy and 100% top5 accuracy on the val dataset,\n", + " \n", + "Not bad!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ryVoSfZVmogw" + }, + "source": [ + "## Test the trained recognizer\n", + "\n", + "After finetuning the recognizer, let's check the prediction results!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "eyY3hCMwyTct", + "outputId": "ea54ff0a-4299-4e93-c1ca-4fe597e7516b" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ ] 0/10, elapsed: 0s, ETA:" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 2.2 task/s, elapsed: 5s, ETA: 0s\n", + "Evaluating top_k_accuracy ...\n", + "\n", + "top1_acc\t1.0000\n", + "top5_acc\t1.0000\n", + "\n", + "Evaluating mean_class_accuracy ...\n", + "\n", + "mean_acc\t1.0000\n", + "top1_acc: 1.0000\n", + "top5_acc: 1.0000\n", + "mean_class_accuracy: 1.0000\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/content/mmaction2/mmaction/datasets/base.py:166: UserWarning: Option arguments for metrics has been changed to `metric_options`, See 'https://github.com/open-mmlab/mmaction2/pull/286' for more details\n", + " 'Option arguments for metrics has been changed to '\n" + ] + } + ], + "source": [ + "from mmaction.apis import single_gpu_test\n", + "from mmaction.datasets import build_dataloader\n", + "from mmcv.parallel import MMDataParallel\n", + "\n", + "# Build a test dataloader\n", + "dataset = build_dataset(cfg.data.test, dict(test_mode=True))\n", + "data_loader = build_dataloader(\n", + " dataset,\n", + " videos_per_gpu=1,\n", + " workers_per_gpu=cfg.data.workers_per_gpu,\n", + " dist=False,\n", + " shuffle=False)\n", + "model = MMDataParallel(model, device_ids=[0])\n", + "outputs = single_gpu_test(model, data_loader)\n", + "\n", + "eval_config = cfg.evaluation\n", + "eval_config.pop('interval')\n", + "eval_res = dataset.evaluate(outputs, **eval_config)\n", + "for name, val in eval_res.items():\n", + " print(f'{name}: {val:.04f}')" + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Perform Spatio-Temporal Detection\n", + "Here we first install MMDetection." + ], + "metadata": { + "id": "jZ4t44nWmZDM" + } + }, + { + "cell_type": "code", + "source": [ + "# Git clone mmdetection repo\n", + "%cd ..\n", + "!git clone https://github.com/open-mmlab/mmdetection.git\n", + "%cd mmdetection\n", + "\n", + "# install mmdet\n", + "!pip install -e .\n", + "%cd ../mmaction2" + ], + "metadata": { + "id": "w1p0_g76nHOQ", + "outputId": "b30a6be3-c457-452e-c789-7083117c5011", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/content\n", + "Cloning into 'mmdetection'...\n", + "remote: Enumerating objects: 23137, done.\u001b[K\n", + "remote: Total 23137 (delta 0), reused 0 (delta 0), pack-reused 23137\u001b[K\n", + "Receiving objects: 100% (23137/23137), 25.88 MiB | 25.75 MiB/s, done.\n", + "Resolving deltas: 100% (16198/16198), done.\n", + "/content/mmdetection\n", + "Obtaining file:///content/mmdetection\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (3.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (1.21.5)\n", + "Requirement already satisfied: pycocotools in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (2.0.4)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (1.15.0)\n", + "Collecting terminaltables\n", + " Downloading terminaltables-3.1.10-py2.py3-none-any.whl (15 kB)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (0.11.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (2.8.2)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (1.3.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (3.0.7)\n", + "Installing collected packages: terminaltables, mmdet\n", + " Running setup.py develop for mmdet\n", + "Successfully installed mmdet-2.21.0 terminaltables-3.1.10\n", + "/content/mmaction2\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "Download a video to `demo` directory in MMAction2." + ], + "metadata": { + "id": "vlOQsH8OnVKn" + } + }, + { + "cell_type": "code", + "source": [ + "!wget https://download.openmmlab.com/mmaction/dataset/sample/1j20qq1JyX4.mp4 -O demo/1j20qq1JyX4.mp4" + ], + "metadata": { + "id": "QaW3jg5Enish", + "outputId": "c70cde3a-b337-41d0-cb08-82dfc746d9ef", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--2022-02-19 11:02:59-- https://download.openmmlab.com/mmaction/dataset/sample/1j20qq1JyX4.mp4\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.254.186.233\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.254.186.233|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 4864186 (4.6M) [video/mp4]\n", + "Saving to: ‘demo/1j20qq1JyX4.mp4’\n", + "\n", + "demo/1j20qq1JyX4.mp 100%[===================>] 4.64M 3.78MB/s in 1.2s \n", + "\n", + "2022-02-19 11:03:01 (3.78 MB/s) - ‘demo/1j20qq1JyX4.mp4’ saved [4864186/4864186]\n", + "\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "Run spatio-temporal demo." + ], + "metadata": { + "id": "LYGxdu8Vnoah" + } + }, + { + "cell_type": "code", + "source": [ + "!python demo/demo_spatiotemporal_det.py --video demo/1j20qq1JyX4.mp4" + ], + "metadata": { + "id": "LPLiaHaYnrb7", + "outputId": "8a8f8a16-ad7b-4559-c19c-c8264533bff3", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Imageio: 'ffmpeg-linux64-v3.3.1' was not found on your computer; downloading it now.\n", + "Try 1. Download from https://github.com/imageio/imageio-binaries/raw/master/ffmpeg/ffmpeg-linux64-v3.3.1 (43.8 MB)\n", + "Downloading: 8192/45929032 bytes (0.0%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b3883008/45929032 bytes (8.5%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b7995392/45929032 bytes (17.4%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b11796480/45929032 bytes (25.7%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b16072704/45929032 bytes (35.0%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b20152320/45929032 bytes (43.9%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b24305664/45929032 bytes (52.9%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b28319744/45929032 bytes (61.7%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b32440320/45929032 bytes (70.6%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b36634624/45929032 bytes (79.8%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b40886272/45929032 bytes (89.0%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b45146112/45929032 bytes (98.3%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b45929032/45929032 bytes (100.0%)\n", + " Done\n", + "File saved as /root/.imageio/ffmpeg/ffmpeg-linux64-v3.3.1.\n", + "load checkpoint from http path: http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\n", + "Downloading: \"http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\" to /root/.cache/torch/hub/checkpoints/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\n", + "100% 160M/160M [00:21<00:00, 7.77MB/s]\n", + "Performing Human Detection for each frame\n", + "[>>] 217/217, 8.6 task/s, elapsed: 25s, ETA: 0sload checkpoint from http path: https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\n", + "Downloading: \"https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\" to /root/.cache/torch/hub/checkpoints/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\n", + "100% 228M/228M [00:31<00:00, 7.55MB/s]\n", + "Performing SpatioTemporal Action Detection for each clip\n", + "[> ] 167/217, 7.7 task/s, elapsed: 22s, ETA: 7sPerforming visualization\n", + "[MoviePy] >>>> Building video demo/stdet_demo.mp4\n", + "[MoviePy] Writing video demo/stdet_demo.mp4\n", + "100% 434/434 [00:12<00:00, 36.07it/s]\n", + "[MoviePy] Done.\n", + "[MoviePy] >>>> Video ready: demo/stdet_demo.mp4 \n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "# Check the video\n", + "from IPython.display import HTML\n", + "from base64 import b64encode\n", + "mp4 = open('demo/stdet_demo.mp4','rb').read()\n", + "data_url = \"data:video/mp4;base64,\" + b64encode(mp4).decode()\n", + "HTML(\"\"\"\n", + "\n", + "\"\"\" % data_url)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 341 + }, + "id": "-0atQCzBo9-C", + "outputId": "b6bb3a67-669c-45d0-cdf4-25b6210362d0" + }, + "execution_count": 6, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/html": [ + "\n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "execution_count": 6 + } + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "collapsed_sections": [], + "name": "MMAction2 Tutorial.ipynb", + "provenance": [], + "toc_visible": true, + "include_colab_link": true + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file From 36d74518943e0510f0f5a6dbf46f13dd84964fed Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 19 Feb 2022 19:09:31 +0800 Subject: [PATCH 355/414] update colab --- MMAction2_Tutorial.ipynb | 1 + 1 file changed, 1 insertion(+) diff --git a/MMAction2_Tutorial.ipynb b/MMAction2_Tutorial.ipynb index e8dc94f09b..eddea09098 100644 --- a/MMAction2_Tutorial.ipynb +++ b/MMAction2_Tutorial.ipynb @@ -21,6 +21,7 @@ "Welcome to MMAction2! This is the official colab tutorial for using MMAction2. In this tutorial, you will learn\n", "- Perform inference with a MMAction2 recognizer.\n", "- Train a new recognizer with a new dataset.\n", + "- Perform spatio-temporal detection.\n", "\n", "Let's start!" ] From cd47f08308332854631d149712249398fb7734b2 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 19 Feb 2022 19:13:28 +0800 Subject: [PATCH 356/414] colab --- demo/mmaction2_tutorial.ipynb | 2622 ++++++++++++++++++--------------- 1 file changed, 1418 insertions(+), 1204 deletions(-) diff --git a/demo/mmaction2_tutorial.ipynb b/demo/mmaction2_tutorial.ipynb index 14441ab79b..ff7964f6d1 100644 --- a/demo/mmaction2_tutorial.ipynb +++ b/demo/mmaction2_tutorial.ipynb @@ -1,1247 +1,1461 @@ { - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "colab_type": "text", - "id": "view-in-github" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "VcjSRFELVbNk" - }, - "source": [ - "# MMAction2 Tutorial\n", - "\n", - "Welcome to MMAction2! This is the official colab tutorial for using MMAction2. In this tutorial, you will learn\n", - "- Perform inference with a MMAction2 recognizer.\n", - "- Train a new recognizer with a new dataset.\n", - "\n", - "Let's start!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "7LqHGkGEVqpm" - }, - "source": [ - "## Install MMAction2" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] }, - "id": "Bf8PpPXtVvmg", - "outputId": "f262f3c6-a9dd-48c7-8f7e-081fd3e12ba8" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "nvcc: NVIDIA (R) Cuda compiler driver\n", - "Copyright (c) 2005-2020 NVIDIA Corporation\n", - "Built on Wed_Jul_22_19:09:09_PDT_2020\n", - "Cuda compilation tools, release 11.0, V11.0.221\n", - "Build cuda_11.0_bu.TC445_37.28845127_0\n", - "gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n", - "Copyright (C) 2017 Free Software Foundation, Inc.\n", - "This is free software; see the source for copying conditions. There is NO\n", - "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n", - "\n" - ] - } - ], - "source": [ - "# Check nvcc version\n", - "!nvcc -V\n", - "# Check GCC version\n", - "!gcc --version" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "markdown", + "metadata": { + "id": "VcjSRFELVbNk" + }, + "source": [ + "# MMAction2 Tutorial\n", + "\n", + "Welcome to MMAction2! This is the official colab tutorial for using MMAction2. In this tutorial, you will learn\n", + "- Perform inference with a MMAction2 recognizer.\n", + "- Train a new recognizer with a new dataset.\n", + "- Perform spatio-temporal detection.\n", + "\n", + "Let's start!" + ] }, - "id": "5PAJ4ArzV5Ry", - "outputId": "b68c4528-1a83-469f-8920-040ae373fc7c" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", - "Collecting torch==1.8.0+cu101\n", - "\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torch-1.8.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (763.5MB)\n", - "\u001b[K |████████████████████████████████| 763.5MB 23kB/s \n", - "\u001b[?25hCollecting torchvision==0.9.0+cu101\n", - "\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torchvision-0.9.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (17.3MB)\n", - "\u001b[K |████████████████████████████████| 17.3MB 188kB/s \n", - "\u001b[?25hCollecting torchtext==0.9.0\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/36/50/84184d6230686e230c464f0dd4ff32eada2756b4a0b9cefec68b88d1d580/torchtext-0.9.0-cp37-cp37m-manylinux1_x86_64.whl (7.1MB)\n", - "\u001b[K |████████████████████████████████| 7.1MB 8.0MB/s \n", - "\u001b[?25hRequirement already satisfied, skipping upgrade: numpy in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (1.19.5)\n", - "Requirement already satisfied, skipping upgrade: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (3.7.4.3)\n", - "Requirement already satisfied, skipping upgrade: pillow>=4.1.1 in /usr/local/lib/python3.7/dist-packages (from torchvision==0.9.0+cu101) (7.1.2)\n", - "Requirement already satisfied, skipping upgrade: tqdm in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (4.41.1)\n", - "Requirement already satisfied, skipping upgrade: requests in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (2.23.0)\n", - "Requirement already satisfied, skipping upgrade: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2.10)\n", - "Requirement already satisfied, skipping upgrade: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (1.24.3)\n", - "Requirement already satisfied, skipping upgrade: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (3.0.4)\n", - "Requirement already satisfied, skipping upgrade: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2021.5.30)\n", - "Installing collected packages: torch, torchvision, torchtext\n", - " Found existing installation: torch 1.9.0+cu102\n", - " Uninstalling torch-1.9.0+cu102:\n", - " Successfully uninstalled torch-1.9.0+cu102\n", - " Found existing installation: torchvision 0.10.0+cu102\n", - " Uninstalling torchvision-0.10.0+cu102:\n", - " Successfully uninstalled torchvision-0.10.0+cu102\n", - " Found existing installation: torchtext 0.10.0\n", - " Uninstalling torchtext-0.10.0:\n", - " Successfully uninstalled torchtext-0.10.0\n", - "Successfully installed torch-1.8.0+cu101 torchtext-0.9.0 torchvision-0.9.0+cu101\n", - "Looking in links: https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", - "Collecting mmcv-full==1.3.9\n", - "\u001b[?25l Downloading https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/mmcv_full-1.3.9-cp37-cp37m-manylinux1_x86_64.whl (31.4MB)\n", - "\u001b[K |████████████████████████████████| 31.4MB 94kB/s \n", - "\u001b[?25hRequirement already satisfied: pyyaml in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (3.13)\n", - "Requirement already satisfied: opencv-python>=3 in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (4.1.2.30)\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (1.19.5)\n", - "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmcv-full==1.3.9) (7.1.2)\n", - "Collecting addict\n", - " Downloading https://files.pythonhosted.org/packages/6a/00/b08f23b7d7e1e14ce01419a467b583edbb93c6cdb8654e54a9cc579cd61f/addict-2.4.0-py3-none-any.whl\n", - "Collecting yapf\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/5f/0d/8814e79eb865eab42d95023b58b650d01dec6f8ea87fc9260978b1bf2167/yapf-0.31.0-py2.py3-none-any.whl (185kB)\n", - "\u001b[K |████████████████████████████████| 194kB 8.8MB/s \n", - "\u001b[?25hInstalling collected packages: addict, yapf, mmcv-full\n", - "Successfully installed addict-2.4.0 mmcv-full-1.3.9 yapf-0.31.0\n", - "Cloning into 'mmaction2'...\n", - "remote: Enumerating objects: 12544, done.\u001b[K\n", - "remote: Counting objects: 100% (677/677), done.\u001b[K\n", - "remote: Compressing objects: 100% (330/330), done.\u001b[K\n", - "remote: Total 12544 (delta 432), reused 510 (delta 344), pack-reused 11867\u001b[K\n", - "Receiving objects: 100% (12544/12544), 42.42 MiB | 30.27 MiB/s, done.\n", - "Resolving deltas: 100% (8980/8980), done.\n", - "/content/mmaction2\n", - "Obtaining file:///content/mmaction2\n", - "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (3.2.2)\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (1.19.5)\n", - "Requirement already satisfied: opencv-contrib-python in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (4.1.2.30)\n", - "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.16.0) (7.1.2)\n", - "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (2.8.1)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (0.10.0)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (1.3.1)\n", - "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.16.0) (2.4.7)\n", - "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib->mmaction2==0.16.0) (1.15.0)\n", - "Installing collected packages: mmaction2\n", - " Running setup.py develop for mmaction2\n", - "Successfully installed mmaction2\n", - "Collecting av\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/66/ff/bacde7314c646a2bd2f240034809a10cc3f8b096751284d0828640fff3dd/av-8.0.3-cp37-cp37m-manylinux2010_x86_64.whl (37.2MB)\n", - "\u001b[K |████████████████████████████████| 37.2MB 76kB/s \n", - "\u001b[?25hCollecting decord>=0.4.1\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/11/79/936af42edf90a7bd4e41a6cac89c913d4b47fa48a26b042d5129a9242ee3/decord-0.6.0-py3-none-manylinux2010_x86_64.whl (13.6MB)\n", - "\u001b[K |████████████████████████████████| 13.6MB 231kB/s \n", - "\u001b[?25hCollecting einops\n", - " Downloading https://files.pythonhosted.org/packages/5d/a0/9935e030634bf60ecd572c775f64ace82ceddf2f504a5fd3902438f07090/einops-0.3.0-py2.py3-none-any.whl\n", - "Requirement already satisfied: imgaug in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 4)) (0.2.9)\n", - "Requirement already satisfied: librosa in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 5)) (0.8.1)\n", - "Requirement already satisfied: lmdb in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 6)) (0.99)\n", - "Requirement already satisfied: moviepy in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 7)) (0.2.3.5)\n", - "Collecting onnx\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/3f/9b/54c950d3256e27f970a83cd0504efb183a24312702deed0179453316dbd0/onnx-1.9.0-cp37-cp37m-manylinux2010_x86_64.whl (12.2MB)\n", - "\u001b[K |████████████████████████████████| 12.2MB 36.2MB/s \n", - "\u001b[?25hCollecting onnxruntime\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/c9/35/80ab6f444a83c708817e011e9cd4708c816591cc85aff830dff525a34992/onnxruntime-1.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.5MB)\n", - "\u001b[K |████████████████████████████████| 4.5MB 29.5MB/s \n", - "\u001b[?25hCollecting pims\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d5/47/82e0ac31e01a271e5a06362fbf03769e9081956f6772f91d98b32899d743/PIMS-0.5.tar.gz (85kB)\n", - "\u001b[K |████████████████████████████████| 92kB 13.1MB/s \n", - "\u001b[?25hCollecting PyTurboJPEG\n", - " Downloading https://files.pythonhosted.org/packages/f9/7b/7621780391ed7a33acec8e803068d7291d940fbbad1ffc8909e94e844477/PyTurboJPEG-1.5.1.tar.gz\n", - "Collecting timm\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/90/fc/606bc5cf46acac3aa9bd179b3954433c026aaf88ea98d6b19f5d14c336da/timm-0.4.12-py3-none-any.whl (376kB)\n", - "\u001b[K |████████████████████████████████| 378kB 43.1MB/s \n", - "\u001b[?25hRequirement already satisfied: numpy>=1.14.0 in /usr/local/lib/python3.7/dist-packages (from decord>=0.4.1->-r requirements/optional.txt (line 2)) (1.19.5)\n", - "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (7.1.2)\n", - "Requirement already satisfied: scikit-image>=0.11.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (0.16.2)\n", - "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.15.0)\n", - "Requirement already satisfied: imageio in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (2.4.1)\n", - "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (3.2.2)\n", - "Requirement already satisfied: opencv-python in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (4.1.2.30)\n", - "Requirement already satisfied: Shapely in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.7.1)\n", - "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 4)) (1.4.1)\n", - "Requirement already satisfied: resampy>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.2.2)\n", - "Requirement already satisfied: pooch>=1.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (1.4.0)\n", - "Requirement already satisfied: numba>=0.43.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.51.2)\n", - "Requirement already satisfied: audioread>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (2.1.9)\n", - "Requirement already satisfied: soundfile>=0.10.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.10.3.post1)\n", - "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (20.9)\n", - "Requirement already satisfied: joblib>=0.14 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (1.0.1)\n", - "Requirement already satisfied: decorator>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (4.4.2)\n", - "Requirement already satisfied: scikit-learn!=0.19.0,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 5)) (0.22.2.post1)\n", - "Requirement already satisfied: tqdm<5.0,>=4.11.2 in /usr/local/lib/python3.7/dist-packages (from moviepy->-r requirements/optional.txt (line 7)) (4.41.1)\n", - "Requirement already satisfied: protobuf in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 8)) (3.17.3)\n", - "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 8)) (3.7.4.3)\n", - "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from onnxruntime->-r requirements/optional.txt (line 9)) (1.12)\n", - "Collecting slicerator>=0.9.8\n", - " Downloading https://files.pythonhosted.org/packages/75/ae/fe46f5371105508a209fe6162e7e7b11db531a79d2eabcd24566b8b1f534/slicerator-1.0.0-py3-none-any.whl\n", - "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 12)) (0.9.0+cu101)\n", - "Requirement already satisfied: torch>=1.4 in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 12)) (1.8.0+cu101)\n", - "Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 4)) (2.5.1)\n", - "Requirement already satisfied: PyWavelets>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 4)) (1.1.1)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (0.10.0)\n", - "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (2.8.1)\n", - "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (2.4.7)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 4)) (1.3.1)\n", - "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2.23.0)\n", - "Requirement already satisfied: appdirs in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (1.4.4)\n", - "Requirement already satisfied: llvmlite<0.35,>=0.34.0.dev0 in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 5)) (0.34.0)\n", - "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 5)) (57.0.0)\n", - "Requirement already satisfied: cffi>=1.0 in /usr/local/lib/python3.7/dist-packages (from soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 5)) (1.14.5)\n", - "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2.10)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (2021.5.30)\n", - "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (3.0.4)\n", - "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->pooch>=1.0->librosa->-r requirements/optional.txt (line 5)) (1.24.3)\n", - "Requirement already satisfied: pycparser in /usr/local/lib/python3.7/dist-packages (from cffi>=1.0->soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 5)) (2.20)\n", - "Building wheels for collected packages: pims, PyTurboJPEG\n", - " Building wheel for pims (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for pims: filename=PIMS-0.5-cp37-none-any.whl size=84328 sha256=436632b7a982144fd933f01d12e38a419eb8a636f2d6dd4bd4a43680734979e2\n", - " Stored in directory: /root/.cache/pip/wheels/0e/0a/14/4c33a4cc1b9158e57329a38e8e3e03901ed24060eb322d5462\n", - " Building wheel for PyTurboJPEG (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for PyTurboJPEG: filename=PyTurboJPEG-1.5.1-cp37-none-any.whl size=7979 sha256=755337aaa622b48be036eca6d743e99bf4528fc6c64e810da11a71236a78bcca\n", - " Stored in directory: /root/.cache/pip/wheels/19/cb/78/5725c881ee618936d956bf0ecd4272cb0f701cb898f44575ca\n", - "Successfully built pims PyTurboJPEG\n", - "Installing collected packages: av, decord, einops, onnx, onnxruntime, slicerator, pims, PyTurboJPEG, timm\n", - "Successfully installed PyTurboJPEG-1.5.1 av-8.0.3 decord-0.6.0 einops-0.3.0 onnx-1.9.0 onnxruntime-1.8.1 pims-0.5 slicerator-1.0.0 timm-0.4.12\n" - ] - } - ], - "source": [ - "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", - "!pip install -U torch==1.8.0+cu101 torchvision==0.9.0+cu101 torchtext==0.9.0 -f https://download.pytorch.org/whl/torch_stable.html\n", - "\n", - "# install mmcv-full thus we could use CUDA operators\n", - "!pip install mmcv-full==1.3.9 -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", - "\n", - "# Install mmaction2\n", - "!rm -rf mmaction2\n", - "!git clone https://github.com/open-mmlab/mmaction2.git\n", - "%cd mmaction2\n", - "\n", - "!pip install -e .\n", - "\n", - "# Install some optional requirements\n", - "!pip install -r requirements/optional.txt" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "markdown", + "metadata": { + "id": "7LqHGkGEVqpm" + }, + "source": [ + "## Install MMAction2" + ] }, - "id": "No_zZAFpWC-a", - "outputId": "7e95038a-6f79-410b-adf6-0148bf8cc2fc" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "1.8.0+cu101 True\n", - "0.16.0\n", - "10.1\n", - "GCC 7.3\n" - ] - } - ], - "source": [ - "# Check Pytorch installation\n", - "import torch, torchvision\n", - "print(torch.__version__, torch.cuda.is_available())\n", - "\n", - "# Check MMAction2 installation\n", - "import mmaction\n", - "print(mmaction.__version__)\n", - "\n", - "# Check MMCV installation\n", - "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", - "print(get_compiling_cuda_version())\n", - "print(get_compiler_version())" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "pXf7oV5DWdab" - }, - "source": [ - "## Perform inference with a MMAction2 recognizer\n", - "MMAction2 already provides high level APIs to do inference and training." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Bf8PpPXtVvmg", + "outputId": "75519a17-cc0a-491f-98a1-f287b090cf82" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "nvcc: NVIDIA (R) Cuda compiler driver\n", + "Copyright (c) 2005-2020 NVIDIA Corporation\n", + "Built on Mon_Oct_12_20:09:46_PDT_2020\n", + "Cuda compilation tools, release 11.1, V11.1.105\n", + "Build cuda_11.1.TC455_06.29190527_0\n", + "gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n", + "Copyright (C) 2017 Free Software Foundation, Inc.\n", + "This is free software; see the source for copying conditions. There is NO\n", + "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n", + "\n" + ] + } + ], + "source": [ + "# Check nvcc version\n", + "!nvcc -V\n", + "# Check GCC version\n", + "!gcc --version" + ] }, - "id": "64CW6d_AaT-Q", - "outputId": "d08bfb9b-ab1e-451b-d3b2-89023a59766b" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "--2021-07-11 12:44:00-- https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", - "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", - "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 97579339 (93M) [application/octet-stream]\n", - "Saving to: ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’\n", - "\n", - "checkpoints/tsn_r50 100%[===================>] 93.06M 11.4MB/s in 8.1s \n", - "\n", - "2021-07-11 12:44:09 (11.4 MB/s) - ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’ saved [97579339/97579339]\n", - "\n" - ] - } - ], - "source": [ - "!mkdir checkpoints\n", - "!wget -c https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \\\n", - " -O checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "5PAJ4ArzV5Ry", + "outputId": "992b30c2-8281-4198-97c8-df2a287b0ae8" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", + "Collecting torch==1.8.0+cu101\n", + " Downloading https://download.pytorch.org/whl/cu101/torch-1.8.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (763.5 MB)\n", + "\u001b[K |████████████████████████████████| 763.5 MB 15 kB/s \n", + "\u001b[?25hCollecting torchvision==0.9.0+cu101\n", + " Downloading https://download.pytorch.org/whl/cu101/torchvision-0.9.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (17.3 MB)\n", + "\u001b[K |████████████████████████████████| 17.3 MB 983 kB/s \n", + "\u001b[?25hCollecting torchtext==0.9.0\n", + " Downloading torchtext-0.9.0-cp37-cp37m-manylinux1_x86_64.whl (7.1 MB)\n", + "\u001b[K |████████████████████████████████| 7.1 MB 10.9 MB/s \n", + "\u001b[?25hCollecting torchaudio==0.8.0\n", + " Downloading torchaudio-0.8.0-cp37-cp37m-manylinux1_x86_64.whl (1.9 MB)\n", + "\u001b[K |████████████████████████████████| 1.9 MB 46.6 MB/s \n", + "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (1.21.5)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (3.10.0.2)\n", + "Requirement already satisfied: pillow>=4.1.1 in /usr/local/lib/python3.7/dist-packages (from torchvision==0.9.0+cu101) (7.1.2)\n", + "Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (4.62.3)\n", + "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (2.23.0)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (1.24.3)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2.10)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2021.10.8)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (3.0.4)\n", + "Installing collected packages: torch, torchvision, torchtext, torchaudio\n", + " Attempting uninstall: torch\n", + " Found existing installation: torch 1.10.0+cu111\n", + " Uninstalling torch-1.10.0+cu111:\n", + " Successfully uninstalled torch-1.10.0+cu111\n", + " Attempting uninstall: torchvision\n", + " Found existing installation: torchvision 0.11.1+cu111\n", + " Uninstalling torchvision-0.11.1+cu111:\n", + " Successfully uninstalled torchvision-0.11.1+cu111\n", + " Attempting uninstall: torchtext\n", + " Found existing installation: torchtext 0.11.0\n", + " Uninstalling torchtext-0.11.0:\n", + " Successfully uninstalled torchtext-0.11.0\n", + " Attempting uninstall: torchaudio\n", + " Found existing installation: torchaudio 0.10.0+cu111\n", + " Uninstalling torchaudio-0.10.0+cu111:\n", + " Successfully uninstalled torchaudio-0.10.0+cu111\n", + "Successfully installed torch-1.8.0+cu101 torchaudio-0.8.0 torchtext-0.9.0 torchvision-0.9.0+cu101\n", + "Looking in links: https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", + "Collecting mmcv-full\n", + " Downloading https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/mmcv_full-1.4.5-cp37-cp37m-manylinux1_x86_64.whl (60.7 MB)\n", + "\u001b[K |████████████████████████████████| 60.7 MB 66 kB/s \n", + "\u001b[?25hCollecting addict\n", + " Downloading addict-2.4.0-py3-none-any.whl (3.8 kB)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (7.1.2)\n", + "Requirement already satisfied: pyyaml in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (3.13)\n", + "Collecting yapf\n", + " Downloading yapf-0.32.0-py2.py3-none-any.whl (190 kB)\n", + "\u001b[K |████████████████████████████████| 190 kB 15.6 MB/s \n", + "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (1.21.5)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (21.3)\n", + "Requirement already satisfied: opencv-python>=3 in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (4.1.2.30)\n", + "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from packaging->mmcv-full) (3.0.7)\n", + "Installing collected packages: yapf, addict, mmcv-full\n", + "Successfully installed addict-2.4.0 mmcv-full-1.4.5 yapf-0.32.0\n", + "Cloning into 'mmaction2'...\n", + "remote: Enumerating objects: 15036, done.\u001b[K\n", + "remote: Counting objects: 100% (233/233), done.\u001b[K\n", + "remote: Compressing objects: 100% (192/192), done.\u001b[K\n", + "remote: Total 15036 (delta 86), reused 72 (delta 41), pack-reused 14803\u001b[K\n", + "Receiving objects: 100% (15036/15036), 49.25 MiB | 25.23 MiB/s, done.\n", + "Resolving deltas: 100% (10608/10608), done.\n", + "/content/mmaction2\n", + "Obtaining file:///content/mmaction2\n", + "Collecting decord>=0.4.1\n", + " Downloading decord-0.6.0-py3-none-manylinux2010_x86_64.whl (13.6 MB)\n", + "\u001b[K |████████████████████████████████| 13.6 MB 10.2 MB/s \n", + "\u001b[?25hCollecting einops\n", + " Downloading einops-0.4.0-py3-none-any.whl (28 kB)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (3.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (1.21.5)\n", + "Requirement already satisfied: opencv-contrib-python in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (4.1.2.30)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (7.1.2)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (1.4.1)\n", + "Requirement already satisfied: torch>=1.3 in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (1.8.0+cu101)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch>=1.3->mmaction2==0.21.0) (3.10.0.2)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (1.3.2)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (2.8.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (3.0.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (0.11.0)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib->mmaction2==0.21.0) (1.15.0)\n", + "Installing collected packages: einops, decord, mmaction2\n", + " Running setup.py develop for mmaction2\n", + "Successfully installed decord-0.6.0 einops-0.4.0 mmaction2-0.21.0\n", + "Collecting av\n", + " Downloading av-8.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (36.1 MB)\n", + "\u001b[K |████████████████████████████████| 36.1 MB 298 kB/s \n", + "\u001b[?25hRequirement already satisfied: imgaug in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 2)) (0.2.9)\n", + "Requirement already satisfied: librosa in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 3)) (0.8.1)\n", + "Requirement already satisfied: lmdb in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 4)) (0.99)\n", + "Requirement already satisfied: moviepy in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 5)) (0.2.3.5)\n", + "Collecting onnx\n", + " Downloading onnx-1.11.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (12.8 MB)\n", + "\u001b[K |████████████████████████████████| 12.8 MB 52.3 MB/s \n", + "\u001b[?25hCollecting onnxruntime\n", + " Downloading onnxruntime-1.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.9 MB)\n", + "\u001b[K |████████████████████████████████| 4.9 MB 51.6 MB/s \n", + "\u001b[?25hCollecting pims\n", + " Downloading PIMS-0.5.tar.gz (85 kB)\n", + "\u001b[K |████████████████████████████████| 85 kB 5.2 MB/s \n", + "\u001b[?25hCollecting PyTurboJPEG\n", + " Downloading PyTurboJPEG-1.6.5.tar.gz (11 kB)\n", + "Collecting timm\n", + " Downloading timm-0.5.4-py3-none-any.whl (431 kB)\n", + "\u001b[K |████████████████████████████████| 431 kB 64.7 MB/s \n", + "\u001b[?25hRequirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (7.1.2)\n", + "Requirement already satisfied: numpy>=1.15.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.21.5)\n", + "Requirement already satisfied: scikit-image>=0.11.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (0.18.3)\n", + "Requirement already satisfied: imageio in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (2.4.1)\n", + "Requirement already satisfied: opencv-python in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (4.1.2.30)\n", + "Requirement already satisfied: Shapely in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.8.0)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.4.1)\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (3.2.2)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.15.0)\n", + "Requirement already satisfied: PyWavelets>=1.1.1 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 2)) (1.2.0)\n", + "Requirement already satisfied: tifffile>=2019.7.26 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 2)) (2021.11.2)\n", + "Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 2)) (2.6.3)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (1.3.2)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (0.11.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (2.8.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (3.0.7)\n", + "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (21.3)\n", + "Requirement already satisfied: numba>=0.43.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (0.51.2)\n", + "Requirement already satisfied: resampy>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (0.2.2)\n", + "Requirement already satisfied: decorator>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (4.4.2)\n", + "Requirement already satisfied: soundfile>=0.10.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (0.10.3.post1)\n", + "Requirement already satisfied: scikit-learn!=0.19.0,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (1.0.2)\n", + "Requirement already satisfied: joblib>=0.14 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (1.1.0)\n", + "Requirement already satisfied: pooch>=1.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (1.6.0)\n", + "Requirement already satisfied: audioread>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (2.1.9)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 3)) (57.4.0)\n", + "Requirement already satisfied: llvmlite<0.35,>=0.34.0.dev0 in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 3)) (0.34.0)\n", + "Requirement already satisfied: requests>=2.19.0 in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (2.23.0)\n", + "Requirement already satisfied: appdirs>=1.3.0 in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (1.4.4)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (3.0.4)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (1.24.3)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (2.10)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (2021.10.8)\n", + "Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from scikit-learn!=0.19.0,>=0.14.0->librosa->-r requirements/optional.txt (line 3)) (3.1.0)\n", + "Requirement already satisfied: cffi>=1.0 in /usr/local/lib/python3.7/dist-packages (from soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 3)) (1.15.0)\n", + "Requirement already satisfied: pycparser in /usr/local/lib/python3.7/dist-packages (from cffi>=1.0->soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 3)) (2.21)\n", + "Requirement already satisfied: tqdm<5.0,>=4.11.2 in /usr/local/lib/python3.7/dist-packages (from moviepy->-r requirements/optional.txt (line 5)) (4.62.3)\n", + "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 6)) (3.10.0.2)\n", + "Requirement already satisfied: protobuf>=3.12.2 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 6)) (3.17.3)\n", + "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from onnxruntime->-r requirements/optional.txt (line 7)) (2.0)\n", + "Collecting slicerator>=0.9.8\n", + " Downloading slicerator-1.0.0-py3-none-any.whl (9.3 kB)\n", + "Requirement already satisfied: torch>=1.4 in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 10)) (1.8.0+cu101)\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 10)) (0.9.0+cu101)\n", + "Building wheels for collected packages: pims, PyTurboJPEG\n", + " Building wheel for pims (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for pims: filename=PIMS-0.5-py3-none-any.whl size=84325 sha256=acdeb0697c66e2b9cc49a549f9a3c67a35b36642e6724eeac9795e25e6d9de47\n", + " Stored in directory: /root/.cache/pip/wheels/75/02/a9/86571c38081ba4c1832eb95430b5d588dfa15a738e2a603737\n", + " Building wheel for PyTurboJPEG (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for PyTurboJPEG: filename=PyTurboJPEG-1.6.5-py3-none-any.whl size=12160 sha256=b5fffd01e16b4d2a1d2f4e1cd976501c1e3ea1b3872f91bf595f6c025735a4e0\n", + " Stored in directory: /root/.cache/pip/wheels/1b/6a/97/17286b24cd97dda462b5a886107f8663f1ccc7705f148b3850\n", + "Successfully built pims PyTurboJPEG\n", + "Installing collected packages: slicerator, timm, PyTurboJPEG, pims, onnxruntime, onnx, av\n", + "Successfully installed PyTurboJPEG-1.6.5 av-8.1.0 onnx-1.11.0 onnxruntime-1.10.0 pims-0.5 slicerator-1.0.0 timm-0.5.4\n" + ] + } + ], + "source": [ + "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", + "!pip install -U torch==1.8.0+cu101 torchvision==0.9.0+cu101 torchtext==0.9.0 torchaudio==0.8.0 -f https://download.pytorch.org/whl/torch_stable.html\n", + "\n", + "# install mmcv-full thus we could use CUDA operators\n", + "!pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", + "\n", + "# Install mmaction2\n", + "!rm -rf mmaction2\n", + "!git clone https://github.com/open-mmlab/mmaction2.git\n", + "%cd mmaction2\n", + "\n", + "!pip install -e .\n", + "\n", + "# Install some optional requirements\n", + "!pip install -r requirements/optional.txt" + ] }, - "id": "HNZB7NoSabzj", - "outputId": "b2f9bd71-1490-44d3-81c6-5037d804f0b1" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Use load_from_local loader\n" - ] - } - ], - "source": [ - "from mmaction.apis import inference_recognizer, init_recognizer\n", - "\n", - "# Choose to use a config and initialize the recognizer\n", - "config = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py'\n", - "# Setup a checkpoint file to load\n", - "checkpoint = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "# Initialize the recognizer\n", - "model = init_recognizer(config, checkpoint, device='cuda:0')" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "id": "rEMsBnpHapAn" - }, - "outputs": [], - "source": [ - "# Use the recognizer to do inference\n", - "video = 'demo/demo.mp4'\n", - "label = 'tools/data/kinetics/label_map_k400.txt'\n", - "results = inference_recognizer(model, video)\n", - "\n", - "labels = open(label).readlines()\n", - "labels = [x.strip() for x in labels]\n", - "results = [(labels[k[0]], k[1]) for k in results]" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "No_zZAFpWC-a", + "outputId": "1f5dd76e-7749-4fc3-ee97-83c5e1700f29" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "1.8.0+cu101 True\n", + "0.21.0\n", + "10.1\n", + "GCC 7.3\n" + ] + } + ], + "source": [ + "# Check Pytorch installation\n", + "import torch, torchvision\n", + "print(torch.__version__, torch.cuda.is_available())\n", + "\n", + "# Check MMAction2 installation\n", + "import mmaction\n", + "print(mmaction.__version__)\n", + "\n", + "# Check MMCV installation\n", + "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", + "print(get_compiling_cuda_version())\n", + "print(get_compiler_version())" + ] }, - "id": "NIyJXqfWathq", - "outputId": "ca24528b-f99d-414a-fa50-456f6068b463" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "arm wrestling: 29.616438\n", - "rock scissors paper: 10.754841\n", - "shaking hands: 9.908401\n", - "clapping: 9.189913\n", - "massaging feet: 8.305307\n" - ] - } - ], - "source": [ - "# Let's show the results\n", - "for result in results:\n", - " print(f'{result[0]}: ', result[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "QuZG8kZ2fJ5d" - }, - "source": [ - "## Train a recognizer on customized dataset\n", - "\n", - "To train a new recognizer, there are usually three things to do:\n", - "1. Support a new dataset\n", - "2. Modify the config\n", - "3. Train a new recognizer" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "neEFyxChfgiJ" - }, - "source": [ - "### Support a new dataset\n", - "\n", - "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/tutorials/new_dataset.md)\n", - "\n", - "Firstly, let's download a tiny dataset obtained from [Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). We select 30 videos with their labels as train dataset and 10 videos with their labels as test dataset." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "markdown", + "metadata": { + "id": "pXf7oV5DWdab" + }, + "source": [ + "## Perform inference with a MMAction2 recognizer\n", + "MMAction2 already provides high level APIs to do inference and training." + ] }, - "id": "gjsUj9JzgUlJ", - "outputId": "61c4704d-db81-4ca5-ed16-e2454dbdfe8e" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "rm: cannot remove 'kinetics400_tiny.zip*': No such file or directory\n", - "--2021-07-11 12:44:29-- https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", - "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", - "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 18308682 (17M) [application/zip]\n", - "Saving to: ‘kinetics400_tiny.zip’\n", - "\n", - "kinetics400_tiny.zi 100%[===================>] 17.46M 10.7MB/s in 1.6s \n", - "\n", - "2021-07-11 12:44:31 (10.7 MB/s) - ‘kinetics400_tiny.zip’ saved [18308682/18308682]\n", - "\n" - ] - } - ], - "source": [ - "# download, decompress the data\n", - "!rm kinetics400_tiny.zip*\n", - "!rm -rf kinetics400_tiny\n", - "!wget https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", - "!unzip kinetics400_tiny.zip > /dev/null" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "64CW6d_AaT-Q", + "outputId": "d08bfb9b-ab1e-451b-d3b2-89023a59766b" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2021-07-11 12:44:00-- https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 97579339 (93M) [application/octet-stream]\n", + "Saving to: ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’\n", + "\n", + "checkpoints/tsn_r50 100%[===================>] 93.06M 11.4MB/s in 8.1s \n", + "\n", + "2021-07-11 12:44:09 (11.4 MB/s) - ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’ saved [97579339/97579339]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir checkpoints\n", + "!wget -c https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \\\n", + " -O checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth" + ] }, - "id": "AbZ-o7V6hNw4", - "outputId": "b091909c-def2-49b5-88c2-01b00802b162" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Reading package lists...\n", - "Building dependency tree...\n", - "Reading state information...\n", - "The following NEW packages will be installed:\n", - " tree\n", - "0 upgraded, 1 newly installed, 0 to remove and 39 not upgraded.\n", - "Need to get 40.7 kB of archives.\n", - "After this operation, 105 kB of additional disk space will be used.\n", - "Get:1 http://archive.ubuntu.com/ubuntu bionic/universe amd64 tree amd64 1.7.0-5 [40.7 kB]\n", - "Fetched 40.7 kB in 0s (88.7 kB/s)\n", - "Selecting previously unselected package tree.\n", - "(Reading database ... 160815 files and directories currently installed.)\n", - "Preparing to unpack .../tree_1.7.0-5_amd64.deb ...\n", - "Unpacking tree (1.7.0-5) ...\n", - "Setting up tree (1.7.0-5) ...\n", - "Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n", - "kinetics400_tiny\n", - "├── kinetics_tiny_train_video.txt\n", - "├── kinetics_tiny_val_video.txt\n", - "├── train\n", - "│   ├── 27_CSXByd3s.mp4\n", - "│   ├── 34XczvTaRiI.mp4\n", - "│   ├── A-wiliK50Zw.mp4\n", - "│   ├── D32_1gwq35E.mp4\n", - "│   ├── D92m0HsHjcQ.mp4\n", - "│   ├── DbX8mPslRXg.mp4\n", - "│   ├── FMlSTTpN3VY.mp4\n", - "│   ├── h10B9SVE-nk.mp4\n", - "│   ├── h2YqqUhnR34.mp4\n", - "│   ├── iRuyZSKhHRg.mp4\n", - "│   ├── IyfILH9lBRo.mp4\n", - "│   ├── kFC3KY2bOP8.mp4\n", - "│   ├── LvcFDgCAXQs.mp4\n", - "│   ├── O46YA8tI530.mp4\n", - "│   ├── oMrZaozOvdQ.mp4\n", - "│   ├── oXy-e_P_cAI.mp4\n", - "│   ├── P5M-hAts7MQ.mp4\n", - "│   ├── phDqGd0NKoo.mp4\n", - "│   ├── PnOe3GZRVX8.mp4\n", - "│   ├── R8HXQkdgKWA.mp4\n", - "│   ├── RqnKtCEoEcA.mp4\n", - "│   ├── soEcZZsBmDs.mp4\n", - "│   ├── TkkZPZHbAKA.mp4\n", - "│   ├── T_TMNGzVrDk.mp4\n", - "│   ├── WaS0qwP46Us.mp4\n", - "│   ├── Wh_YPQdH1Zg.mp4\n", - "│   ├── WWP5HZJsg-o.mp4\n", - "│   ├── xGY2dP0YUjA.mp4\n", - "│   ├── yLC9CtWU5ws.mp4\n", - "│   └── ZQV4U2KQ370.mp4\n", - "└── val\n", - " ├── 0pVGiAU6XEA.mp4\n", - " ├── AQrbRSnRt8M.mp4\n", - " ├── b6Q_b7vgc7Q.mp4\n", - " ├── ddvJ6-faICE.mp4\n", - " ├── IcLztCtvhb8.mp4\n", - " ├── ik4BW3-SCts.mp4\n", - " ├── jqRrH30V0k4.mp4\n", - " ├── SU_x2LQqSLs.mp4\n", - " ├── u4Rm6srmIS8.mp4\n", - " └── y5Iu7XkTqV0.mp4\n", - "\n", - "2 directories, 42 files\n" - ] - } - ], - "source": [ - "# Check the directory structure of the tiny data\n", - "\n", - "# Install tree first\n", - "!apt-get -q install tree\n", - "!tree kinetics400_tiny" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "HNZB7NoSabzj", + "outputId": "b2f9bd71-1490-44d3-81c6-5037d804f0b1" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Use load_from_local loader\n" + ] + } + ], + "source": [ + "from mmaction.apis import inference_recognizer, init_recognizer\n", + "\n", + "# Choose to use a config and initialize the recognizer\n", + "config = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py'\n", + "# Setup a checkpoint file to load\n", + "checkpoint = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "# Initialize the recognizer\n", + "model = init_recognizer(config, checkpoint, device='cuda:0')" + ] }, - "id": "fTdi6dI0hY3g", - "outputId": "ffda0997-8d77-431a-d66e-2f273e80c756" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "D32_1gwq35E.mp4 0\n", - "iRuyZSKhHRg.mp4 1\n", - "oXy-e_P_cAI.mp4 0\n", - "34XczvTaRiI.mp4 1\n", - "h2YqqUhnR34.mp4 0\n", - "O46YA8tI530.mp4 0\n", - "kFC3KY2bOP8.mp4 1\n", - "WWP5HZJsg-o.mp4 1\n", - "phDqGd0NKoo.mp4 1\n", - "yLC9CtWU5ws.mp4 0\n", - "27_CSXByd3s.mp4 1\n", - "IyfILH9lBRo.mp4 1\n", - "T_TMNGzVrDk.mp4 1\n", - "TkkZPZHbAKA.mp4 0\n", - "PnOe3GZRVX8.mp4 1\n", - "soEcZZsBmDs.mp4 1\n", - "FMlSTTpN3VY.mp4 1\n", - "WaS0qwP46Us.mp4 0\n", - "A-wiliK50Zw.mp4 1\n", - "oMrZaozOvdQ.mp4 1\n", - "ZQV4U2KQ370.mp4 0\n", - "DbX8mPslRXg.mp4 1\n", - "h10B9SVE-nk.mp4 1\n", - "P5M-hAts7MQ.mp4 0\n", - "R8HXQkdgKWA.mp4 0\n", - "D92m0HsHjcQ.mp4 0\n", - "RqnKtCEoEcA.mp4 0\n", - "LvcFDgCAXQs.mp4 0\n", - "xGY2dP0YUjA.mp4 0\n", - "Wh_YPQdH1Zg.mp4 0\n" - ] - } - ], - "source": [ - "# After downloading the data, we need to check the annotation format\n", - "!cat kinetics400_tiny/kinetics_tiny_train_video.txt" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "0bq0mxmEi29H" - }, - "source": [ - "According to the format defined in [`VideoDataset`](./datasets/video_dataset.py), each line indicates a sample video with the filepath and label, which are split with a whitespace." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Ht_DGJA9jQar" - }, - "source": [ - "### Modify the config\n", - "\n", - "In the next step, we need to modify the config for the training.\n", - "To accelerate the process, we finetune a recognizer using a pre-trained recognizer." - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": { - "id": "LjCcmCKOjktc" - }, - "outputs": [], - "source": [ - "from mmcv import Config\n", - "cfg = Config.fromfile('./configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py')" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "tc8YhFFGjp3e" - }, - "source": [ - "Given a config that trains a TSN model on kinetics400-full dataset, we need to modify some values to use it for training TSN on Kinetics400-tiny dataset.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "rEMsBnpHapAn" + }, + "outputs": [], + "source": [ + "# Use the recognizer to do inference\n", + "video = 'demo/demo.mp4'\n", + "label = 'tools/data/kinetics/label_map_k400.txt'\n", + "results = inference_recognizer(model, video)\n", + "\n", + "labels = open(label).readlines()\n", + "labels = [x.strip() for x in labels]\n", + "results = [(labels[k[0]], k[1]) for k in results]" + ] }, - "id": "tlhu9byjjt-K", - "outputId": "3b9a3c49-ace0-41d3-dd15-d6c8579755f8" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Config:\n", - "model = dict(\n", - " type='Recognizer2D',\n", - " backbone=dict(\n", - " type='ResNet',\n", - " pretrained='torchvision://resnet50',\n", - " depth=50,\n", - " norm_eval=False),\n", - " cls_head=dict(\n", - " type='TSNHead',\n", - " num_classes=2,\n", - " in_channels=2048,\n", - " spatial_type='avg',\n", - " consensus=dict(type='AvgConsensus', dim=1),\n", - " dropout_ratio=0.4,\n", - " init_std=0.01),\n", - " train_cfg=None,\n", - " test_cfg=dict(average_clips=None))\n", - "optimizer = dict(type='SGD', lr=7.8125e-05, momentum=0.9, weight_decay=0.0001)\n", - "optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))\n", - "lr_config = dict(policy='step', step=[40, 80])\n", - "total_epochs = 10\n", - "checkpoint_config = dict(interval=5)\n", - "log_config = dict(interval=5, hooks=[dict(type='TextLoggerHook')])\n", - "dist_params = dict(backend='nccl')\n", - "log_level = 'INFO'\n", - "load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "resume_from = None\n", - "workflow = [('train', 1)]\n", - "dataset_type = 'VideoDataset'\n", - "data_root = 'kinetics400_tiny/train/'\n", - "data_root_val = 'kinetics400_tiny/val/'\n", - "ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "img_norm_cfg = dict(\n", - " mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)\n", - "train_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),\n", - " dict(type='DecordDecode'),\n", - " dict(\n", - " type='MultiScaleCrop',\n", - " input_size=224,\n", - " scales=(1, 0.875, 0.75, 0.66),\n", - " random_crop=False,\n", - " max_wh_scale_gap=1),\n", - " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", - " dict(type='Flip', flip_ratio=0.5),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs', 'label'])\n", - "]\n", - "val_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=8,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='CenterCrop', crop_size=224),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - "]\n", - "test_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=25,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='ThreeCrop', crop_size=256),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - "]\n", - "data = dict(\n", - " videos_per_gpu=2,\n", - " workers_per_gpu=2,\n", - " train=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", - " data_prefix='kinetics400_tiny/train/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames', clip_len=1, frame_interval=1,\n", - " num_clips=8),\n", - " dict(type='DecordDecode'),\n", - " dict(\n", - " type='MultiScaleCrop',\n", - " input_size=224,\n", - " scales=(1, 0.875, 0.75, 0.66),\n", - " random_crop=False,\n", - " max_wh_scale_gap=1),\n", - " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", - " dict(type='Flip', flip_ratio=0.5),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs', 'label'])\n", - " ]),\n", - " val=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", - " data_prefix='kinetics400_tiny/val/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=8,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='CenterCrop', crop_size=224),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - " ]),\n", - " test=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", - " data_prefix='kinetics400_tiny/val/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=25,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='ThreeCrop', crop_size=256),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - " ]))\n", - "evaluation = dict(\n", - " interval=5,\n", - " metrics=['top_k_accuracy', 'mean_class_accuracy'],\n", - " save_best='auto')\n", - "work_dir = './tutorial_exps'\n", - "omnisource = False\n", - "seed = 0\n", - "gpu_ids = range(0, 1)\n", - "\n" - ] - } - ], - "source": [ - "from mmcv.runner import set_random_seed\n", - "\n", - "# Modify dataset type and path\n", - "cfg.dataset_type = 'VideoDataset'\n", - "cfg.data_root = 'kinetics400_tiny/train/'\n", - "cfg.data_root_val = 'kinetics400_tiny/val/'\n", - "cfg.ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "cfg.ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "\n", - "cfg.data.test.type = 'VideoDataset'\n", - "cfg.data.test.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.data.test.data_prefix = 'kinetics400_tiny/val/'\n", - "\n", - "cfg.data.train.type = 'VideoDataset'\n", - "cfg.data.train.ann_file = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "cfg.data.train.data_prefix = 'kinetics400_tiny/train/'\n", - "\n", - "cfg.data.val.type = 'VideoDataset'\n", - "cfg.data.val.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.data.val.data_prefix = 'kinetics400_tiny/val/'\n", - "\n", - "# The flag is used to determine whether it is omnisource training\n", - "cfg.setdefault('omnisource', False)\n", - "# Modify num classes of the model in cls_head\n", - "cfg.model.cls_head.num_classes = 2\n", - "# We can use the pre-trained TSN model\n", - "cfg.load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "\n", - "# Set up working dir to save files and logs.\n", - "cfg.work_dir = './tutorial_exps'\n", - "\n", - "# The original learning rate (LR) is set for 8-GPU training.\n", - "# We divide it by 8 since we only use one GPU.\n", - "cfg.data.videos_per_gpu = cfg.data.videos_per_gpu // 16\n", - "cfg.optimizer.lr = cfg.optimizer.lr / 8 / 16\n", - "cfg.total_epochs = 10\n", - "\n", - "# We can set the checkpoint saving interval to reduce the storage cost\n", - "cfg.checkpoint_config.interval = 5\n", - "# We can set the log print interval to reduce the the times of printing log\n", - "cfg.log_config.interval = 5\n", - "\n", - "# Set seed thus the results are more reproducible\n", - "cfg.seed = 0\n", - "set_random_seed(0, deterministic=False)\n", - "cfg.gpu_ids = range(1)\n", - "\n", - "# Save the best\n", - "cfg.evaluation.save_best='auto'\n", - "\n", - "\n", - "# We can initialize the logger for training and have a look\n", - "# at the final config used for training\n", - "print(f'Config:\\n{cfg.pretty_text}')\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "tES-qnZ3k38Z" - }, - "source": [ - "### Train a new recognizer\n", - "\n", - "Finally, lets initialize the dataset and recognizer, then train a new recognizer!" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "NIyJXqfWathq", + "outputId": "ca24528b-f99d-414a-fa50-456f6068b463" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "arm wrestling: 29.616438\n", + "rock scissors paper: 10.754841\n", + "shaking hands: 9.908401\n", + "clapping: 9.189913\n", + "massaging feet: 8.305307\n" + ] + } + ], + "source": [ + "# Let's show the results\n", + "for result in results:\n", + " print(f'{result[0]}: ', result[1])" + ] }, - "id": "dDBWkdDRk6oz", - "outputId": "a85d80d7-b3c4-43f1-d49a-057e8036807f" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Use load_from_torchvision loader\n" - ] + "cell_type": "markdown", + "metadata": { + "id": "QuZG8kZ2fJ5d" + }, + "source": [ + "## Train a recognizer on customized dataset\n", + "\n", + "To train a new recognizer, there are usually three things to do:\n", + "1. Support a new dataset\n", + "2. Modify the config\n", + "3. Train a new recognizer" + ] }, { - "name": "stderr", - "output_type": "stream", - "text": [ - "2021-07-11 13:00:46,931 - mmaction - INFO - These parameters in pretrained checkpoint are not loaded: {'fc.bias', 'fc.weight'}\n", - "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", - " cpuset_checked))\n", - "2021-07-11 13:00:46,980 - mmaction - INFO - load checkpoint from ./checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", - "2021-07-11 13:00:46,981 - mmaction - INFO - Use load_from_local loader\n", - "2021-07-11 13:00:47,071 - mmaction - WARNING - The model and loaded state dict do not match exactly\n", - "\n", - "size mismatch for cls_head.fc_cls.weight: copying a param with shape torch.Size([400, 2048]) from checkpoint, the shape in current model is torch.Size([2, 2048]).\n", - "size mismatch for cls_head.fc_cls.bias: copying a param with shape torch.Size([400]) from checkpoint, the shape in current model is torch.Size([2]).\n", - "2021-07-11 13:00:47,074 - mmaction - INFO - Start running, host: root@b465112b4add, work_dir: /content/mmaction2/tutorial_exps\n", - "2021-07-11 13:00:47,078 - mmaction - INFO - Hooks will be executed in the following order:\n", - "before_run:\n", - "(VERY_HIGH ) StepLrUpdaterHook \n", - "(NORMAL ) CheckpointHook \n", - "(NORMAL ) EvalHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_train_epoch:\n", - "(VERY_HIGH ) StepLrUpdaterHook \n", - "(NORMAL ) EvalHook \n", - "(LOW ) IterTimerHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_train_iter:\n", - "(VERY_HIGH ) StepLrUpdaterHook \n", - "(NORMAL ) EvalHook \n", - "(LOW ) IterTimerHook \n", - " -------------------- \n", - "after_train_iter:\n", - "(ABOVE_NORMAL) OptimizerHook \n", - "(NORMAL ) CheckpointHook \n", - "(NORMAL ) EvalHook \n", - "(LOW ) IterTimerHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "after_train_epoch:\n", - "(NORMAL ) CheckpointHook \n", - "(NORMAL ) EvalHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_val_epoch:\n", - "(LOW ) IterTimerHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_val_iter:\n", - "(LOW ) IterTimerHook \n", - " -------------------- \n", - "after_val_iter:\n", - "(LOW ) IterTimerHook \n", - " -------------------- \n", - "after_val_epoch:\n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "2021-07-11 13:00:47,081 - mmaction - INFO - workflow: [('train', 1)], max: 10 epochs\n", - "/usr/local/lib/python3.7/dist-packages/mmcv/runner/hooks/evaluation.py:190: UserWarning: runner.meta is None. Creating an empty one.\n", - " warnings.warn('runner.meta is None. Creating an empty one.')\n", - "2021-07-11 13:00:51,802 - mmaction - INFO - Epoch [1][5/15]\tlr: 7.813e-05, eta: 0:02:16, time: 0.942, data_time: 0.730, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7604, loss: 0.7604, grad_norm: 14.8813\n", - "2021-07-11 13:00:52,884 - mmaction - INFO - Epoch [1][10/15]\tlr: 7.813e-05, eta: 0:01:21, time: 0.217, data_time: 0.028, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6282, loss: 0.6282, grad_norm: 10.1834\n", - "2021-07-11 13:00:53,706 - mmaction - INFO - Epoch [1][15/15]\tlr: 7.813e-05, eta: 0:00:59, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7165, loss: 0.7165, grad_norm: 10.8534\n", - "2021-07-11 13:00:57,724 - mmaction - INFO - Epoch [2][5/15]\tlr: 7.813e-05, eta: 0:01:09, time: 0.802, data_time: 0.596, memory: 2918, top1_acc: 0.3000, top5_acc: 1.0000, loss_cls: 0.7001, loss: 0.7001, grad_norm: 11.4311\n", - "2021-07-11 13:00:59,219 - mmaction - INFO - Epoch [2][10/15]\tlr: 7.813e-05, eta: 0:01:00, time: 0.296, data_time: 0.108, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6916, loss: 0.6916, grad_norm: 12.7101\n", - "2021-07-11 13:01:00,040 - mmaction - INFO - Epoch [2][15/15]\tlr: 7.813e-05, eta: 0:00:51, time: 0.167, data_time: 0.004, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6567, loss: 0.6567, grad_norm: 8.8837\n", - "2021-07-11 13:01:04,152 - mmaction - INFO - Epoch [3][5/15]\tlr: 7.813e-05, eta: 0:00:56, time: 0.820, data_time: 0.618, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6320, loss: 0.6320, grad_norm: 11.4025\n", - "2021-07-11 13:01:05,526 - mmaction - INFO - Epoch [3][10/15]\tlr: 7.813e-05, eta: 0:00:50, time: 0.276, data_time: 0.075, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6542, loss: 0.6542, grad_norm: 10.6429\n", - "2021-07-11 13:01:06,350 - mmaction - INFO - Epoch [3][15/15]\tlr: 7.813e-05, eta: 0:00:44, time: 0.165, data_time: 0.001, memory: 2918, top1_acc: 0.2000, top5_acc: 1.0000, loss_cls: 0.7661, loss: 0.7661, grad_norm: 12.8421\n", - "2021-07-11 13:01:10,771 - mmaction - INFO - Epoch [4][5/15]\tlr: 7.813e-05, eta: 0:00:47, time: 0.883, data_time: 0.676, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6410, loss: 0.6410, grad_norm: 10.6697\n", - "2021-07-11 13:01:11,776 - mmaction - INFO - Epoch [4][10/15]\tlr: 7.813e-05, eta: 0:00:42, time: 0.201, data_time: 0.011, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6949, loss: 0.6949, grad_norm: 10.5467\n", - "2021-07-11 13:01:12,729 - mmaction - INFO - Epoch [4][15/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.190, data_time: 0.026, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6290, loss: 0.6290, grad_norm: 11.2779\n", - "2021-07-11 13:01:16,816 - mmaction - INFO - Epoch [5][5/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.817, data_time: 0.608, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6011, loss: 0.6011, grad_norm: 9.1335\n", - "2021-07-11 13:01:18,176 - mmaction - INFO - Epoch [5][10/15]\tlr: 7.813e-05, eta: 0:00:35, time: 0.272, data_time: 0.080, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6652, loss: 0.6652, grad_norm: 11.0616\n", - "2021-07-11 13:01:19,119 - mmaction - INFO - Epoch [5][15/15]\tlr: 7.813e-05, eta: 0:00:32, time: 0.188, data_time: 0.017, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6440, loss: 0.6440, grad_norm: 11.6473\n", - "2021-07-11 13:01:19,120 - mmaction - INFO - Saving checkpoint at 5 epochs\n" - ] + "cell_type": "markdown", + "metadata": { + "id": "neEFyxChfgiJ" + }, + "source": [ + "### Support a new dataset\n", + "\n", + "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/tutorials/new_dataset.md)\n", + "\n", + "Firstly, let's download a tiny dataset obtained from [Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). We select 30 videos with their labels as train dataset and 10 videos with their labels as test dataset." + ] }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.9 task/s, elapsed: 2s, ETA: 0s" - ] + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "gjsUj9JzgUlJ", + "outputId": "61c4704d-db81-4ca5-ed16-e2454dbdfe8e" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "rm: cannot remove 'kinetics400_tiny.zip*': No such file or directory\n", + "--2021-07-11 12:44:29-- https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 18308682 (17M) [application/zip]\n", + "Saving to: ‘kinetics400_tiny.zip’\n", + "\n", + "kinetics400_tiny.zi 100%[===================>] 17.46M 10.7MB/s in 1.6s \n", + "\n", + "2021-07-11 12:44:31 (10.7 MB/s) - ‘kinetics400_tiny.zip’ saved [18308682/18308682]\n", + "\n" + ] + } + ], + "source": [ + "# download, decompress the data\n", + "!rm kinetics400_tiny.zip*\n", + "!rm -rf kinetics400_tiny\n", + "!wget https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", + "!unzip kinetics400_tiny.zip > /dev/null" + ] }, { - "name": "stderr", - "output_type": "stream", - "text": [ - "2021-07-11 13:01:21,673 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2021-07-11 13:01:21,677 - mmaction - INFO - \n", - "top1_acc\t0.7000\n", - "top5_acc\t1.0000\n", - "2021-07-11 13:01:21,679 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2021-07-11 13:01:21,682 - mmaction - INFO - \n", - "mean_acc\t0.7000\n", - "2021-07-11 13:01:22,264 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_5.pth.\n", - "2021-07-11 13:01:22,267 - mmaction - INFO - Best top1_acc is 0.7000 at 5 epoch.\n", - "2021-07-11 13:01:22,271 - mmaction - INFO - Epoch(val) [5][5]\ttop1_acc: 0.7000, top5_acc: 1.0000, mean_class_accuracy: 0.7000\n", - "2021-07-11 13:01:26,623 - mmaction - INFO - Epoch [6][5/15]\tlr: 7.813e-05, eta: 0:00:31, time: 0.868, data_time: 0.656, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6753, loss: 0.6753, grad_norm: 11.8640\n", - "2021-07-11 13:01:27,597 - mmaction - INFO - Epoch [6][10/15]\tlr: 7.813e-05, eta: 0:00:28, time: 0.195, data_time: 0.003, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6715, loss: 0.6715, grad_norm: 11.3347\n", - "2021-07-11 13:01:28,736 - mmaction - INFO - Epoch [6][15/15]\tlr: 7.813e-05, eta: 0:00:25, time: 0.228, data_time: 0.063, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5769, loss: 0.5769, grad_norm: 9.2541\n", - "2021-07-11 13:01:32,860 - mmaction - INFO - Epoch [7][5/15]\tlr: 7.813e-05, eta: 0:00:24, time: 0.822, data_time: 0.620, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5379, loss: 0.5379, grad_norm: 8.0147\n", - "2021-07-11 13:01:34,340 - mmaction - INFO - Epoch [7][10/15]\tlr: 7.813e-05, eta: 0:00:22, time: 0.298, data_time: 0.109, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6187, loss: 0.6187, grad_norm: 11.5244\n", - "2021-07-11 13:01:35,165 - mmaction - INFO - Epoch [7][15/15]\tlr: 7.813e-05, eta: 0:00:19, time: 0.165, data_time: 0.002, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7063, loss: 0.7063, grad_norm: 12.4979\n", - "2021-07-11 13:01:39,435 - mmaction - INFO - Epoch [8][5/15]\tlr: 7.813e-05, eta: 0:00:17, time: 0.853, data_time: 0.641, memory: 2918, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.5369, loss: 0.5369, grad_norm: 8.6545\n", - "2021-07-11 13:01:40,808 - mmaction - INFO - Epoch [8][10/15]\tlr: 7.813e-05, eta: 0:00:15, time: 0.275, data_time: 0.086, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6407, loss: 0.6407, grad_norm: 12.5537\n", - "2021-07-11 13:01:41,627 - mmaction - INFO - Epoch [8][15/15]\tlr: 7.813e-05, eta: 0:00:12, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6073, loss: 0.6073, grad_norm: 11.4028\n", - "2021-07-11 13:01:45,651 - mmaction - INFO - Epoch [9][5/15]\tlr: 7.813e-05, eta: 0:00:11, time: 0.803, data_time: 0.591, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5596, loss: 0.5596, grad_norm: 10.0821\n", - "2021-07-11 13:01:46,891 - mmaction - INFO - Epoch [9][10/15]\tlr: 7.813e-05, eta: 0:00:08, time: 0.248, data_time: 0.044, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6470, loss: 0.6470, grad_norm: 11.8979\n", - "2021-07-11 13:01:47,944 - mmaction - INFO - Epoch [9][15/15]\tlr: 7.813e-05, eta: 0:00:06, time: 0.211, data_time: 0.041, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6657, loss: 0.6657, grad_norm: 12.0643\n", - "2021-07-11 13:01:52,200 - mmaction - INFO - Epoch [10][5/15]\tlr: 7.813e-05, eta: 0:00:04, time: 0.849, data_time: 0.648, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6310, loss: 0.6310, grad_norm: 11.5690\n", - "2021-07-11 13:01:53,707 - mmaction - INFO - Epoch [10][10/15]\tlr: 7.813e-05, eta: 0:00:02, time: 0.303, data_time: 0.119, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5178, loss: 0.5178, grad_norm: 9.3324\n", - "2021-07-11 13:01:54,520 - mmaction - INFO - Epoch [10][15/15]\tlr: 7.813e-05, eta: 0:00:00, time: 0.162, data_time: 0.001, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6919, loss: 0.6919, grad_norm: 12.6688\n", - "2021-07-11 13:01:54,522 - mmaction - INFO - Saving checkpoint at 10 epochs\n" - ] + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "AbZ-o7V6hNw4", + "outputId": "b091909c-def2-49b5-88c2-01b00802b162" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Reading package lists...\n", + "Building dependency tree...\n", + "Reading state information...\n", + "The following NEW packages will be installed:\n", + " tree\n", + "0 upgraded, 1 newly installed, 0 to remove and 39 not upgraded.\n", + "Need to get 40.7 kB of archives.\n", + "After this operation, 105 kB of additional disk space will be used.\n", + "Get:1 http://archive.ubuntu.com/ubuntu bionic/universe amd64 tree amd64 1.7.0-5 [40.7 kB]\n", + "Fetched 40.7 kB in 0s (88.7 kB/s)\n", + "Selecting previously unselected package tree.\n", + "(Reading database ... 160815 files and directories currently installed.)\n", + "Preparing to unpack .../tree_1.7.0-5_amd64.deb ...\n", + "Unpacking tree (1.7.0-5) ...\n", + "Setting up tree (1.7.0-5) ...\n", + "Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n", + "kinetics400_tiny\n", + "├── kinetics_tiny_train_video.txt\n", + "├── kinetics_tiny_val_video.txt\n", + "├── train\n", + "│   ├── 27_CSXByd3s.mp4\n", + "│   ├── 34XczvTaRiI.mp4\n", + "│   ├── A-wiliK50Zw.mp4\n", + "│   ├── D32_1gwq35E.mp4\n", + "│   ├── D92m0HsHjcQ.mp4\n", + "│   ├── DbX8mPslRXg.mp4\n", + "│   ├── FMlSTTpN3VY.mp4\n", + "│   ├── h10B9SVE-nk.mp4\n", + "│   ├── h2YqqUhnR34.mp4\n", + "│   ├── iRuyZSKhHRg.mp4\n", + "│   ├── IyfILH9lBRo.mp4\n", + "│   ├── kFC3KY2bOP8.mp4\n", + "│   ├── LvcFDgCAXQs.mp4\n", + "│   ├── O46YA8tI530.mp4\n", + "│   ├── oMrZaozOvdQ.mp4\n", + "│   ├── oXy-e_P_cAI.mp4\n", + "│   ├── P5M-hAts7MQ.mp4\n", + "│   ├── phDqGd0NKoo.mp4\n", + "│   ├── PnOe3GZRVX8.mp4\n", + "│   ├── R8HXQkdgKWA.mp4\n", + "│   ├── RqnKtCEoEcA.mp4\n", + "│   ├── soEcZZsBmDs.mp4\n", + "│   ├── TkkZPZHbAKA.mp4\n", + "│   ├── T_TMNGzVrDk.mp4\n", + "│   ├── WaS0qwP46Us.mp4\n", + "│   ├── Wh_YPQdH1Zg.mp4\n", + "│   ├── WWP5HZJsg-o.mp4\n", + "│   ├── xGY2dP0YUjA.mp4\n", + "│   ├── yLC9CtWU5ws.mp4\n", + "│   └── ZQV4U2KQ370.mp4\n", + "└── val\n", + " ├── 0pVGiAU6XEA.mp4\n", + " ├── AQrbRSnRt8M.mp4\n", + " ├── b6Q_b7vgc7Q.mp4\n", + " ├── ddvJ6-faICE.mp4\n", + " ├── IcLztCtvhb8.mp4\n", + " ├── ik4BW3-SCts.mp4\n", + " ├── jqRrH30V0k4.mp4\n", + " ├── SU_x2LQqSLs.mp4\n", + " ├── u4Rm6srmIS8.mp4\n", + " └── y5Iu7XkTqV0.mp4\n", + "\n", + "2 directories, 42 files\n" + ] + } + ], + "source": [ + "# Check the directory structure of the tiny data\n", + "\n", + "# Install tree first\n", + "!apt-get -q install tree\n", + "!tree kinetics400_tiny" + ] }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.9 task/s, elapsed: 2s, ETA: 0s" - ] + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "fTdi6dI0hY3g", + "outputId": "ffda0997-8d77-431a-d66e-2f273e80c756" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "D32_1gwq35E.mp4 0\n", + "iRuyZSKhHRg.mp4 1\n", + "oXy-e_P_cAI.mp4 0\n", + "34XczvTaRiI.mp4 1\n", + "h2YqqUhnR34.mp4 0\n", + "O46YA8tI530.mp4 0\n", + "kFC3KY2bOP8.mp4 1\n", + "WWP5HZJsg-o.mp4 1\n", + "phDqGd0NKoo.mp4 1\n", + "yLC9CtWU5ws.mp4 0\n", + "27_CSXByd3s.mp4 1\n", + "IyfILH9lBRo.mp4 1\n", + "T_TMNGzVrDk.mp4 1\n", + "TkkZPZHbAKA.mp4 0\n", + "PnOe3GZRVX8.mp4 1\n", + "soEcZZsBmDs.mp4 1\n", + "FMlSTTpN3VY.mp4 1\n", + "WaS0qwP46Us.mp4 0\n", + "A-wiliK50Zw.mp4 1\n", + "oMrZaozOvdQ.mp4 1\n", + "ZQV4U2KQ370.mp4 0\n", + "DbX8mPslRXg.mp4 1\n", + "h10B9SVE-nk.mp4 1\n", + "P5M-hAts7MQ.mp4 0\n", + "R8HXQkdgKWA.mp4 0\n", + "D92m0HsHjcQ.mp4 0\n", + "RqnKtCEoEcA.mp4 0\n", + "LvcFDgCAXQs.mp4 0\n", + "xGY2dP0YUjA.mp4 0\n", + "Wh_YPQdH1Zg.mp4 0\n" + ] + } + ], + "source": [ + "# After downloading the data, we need to check the annotation format\n", + "!cat kinetics400_tiny/kinetics_tiny_train_video.txt" + ] }, { - "name": "stderr", - "output_type": "stream", - "text": [ - "2021-07-11 13:01:56,741 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2021-07-11 13:01:56,743 - mmaction - INFO - \n", - "top1_acc\t1.0000\n", - "top5_acc\t1.0000\n", - "2021-07-11 13:01:56,749 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2021-07-11 13:01:56,750 - mmaction - INFO - \n", - "mean_acc\t1.0000\n", - "2021-07-11 13:01:57,267 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_10.pth.\n", - "2021-07-11 13:01:57,269 - mmaction - INFO - Best top1_acc is 1.0000 at 10 epoch.\n", - "2021-07-11 13:01:57,270 - mmaction - INFO - Epoch(val) [10][5]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n" - ] - } - ], - "source": [ - "import os.path as osp\n", - "\n", - "from mmaction.datasets import build_dataset\n", - "from mmaction.models import build_model\n", - "from mmaction.apis import train_model\n", - "\n", - "import mmcv\n", - "\n", - "# Build the dataset\n", - "datasets = [build_dataset(cfg.data.train)]\n", - "\n", - "# Build the recognizer\n", - "model = build_model(cfg.model, train_cfg=cfg.get('train_cfg'), test_cfg=cfg.get('test_cfg'))\n", - "\n", - "# Create work_dir\n", - "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", - "train_model(model, datasets, cfg, distributed=False, validate=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "zdSd7oTLlxIf" - }, - "source": [ - "### Understand the log\n", - "From the log, we can have a basic understanding the training process and know how well the recognizer is trained.\n", - "\n", - "Firstly, the ResNet-50 backbone pre-trained on ImageNet is loaded, this is a common practice since training from scratch is more cost. The log shows that all the weights of the ResNet-50 backbone are loaded except the `fc.bias` and `fc.weight`.\n", - "\n", - "Second, since the dataset we are using is small, we loaded a TSN model and finetune it for action recognition.\n", - "The original TSN is trained on original Kinetics-400 dataset which contains 400 classes but Kinetics-400 Tiny dataset only have 2 classes. Therefore, the last FC layer of the pre-trained TSN for classification has different weight shape and is not used.\n", - "\n", - "Third, after training, the recognizer is evaluated by the default evaluation. The results show that the recognizer achieves 100% top1 accuracy and 100% top5 accuracy on the val dataset,\n", - " \n", - "Not bad!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ryVoSfZVmogw" - }, - "source": [ - "## Test the trained recognizer\n", - "\n", - "After finetuning the recognizer, let's check the prediction results!" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" + "cell_type": "markdown", + "metadata": { + "id": "0bq0mxmEi29H" + }, + "source": [ + "According to the format defined in [`VideoDataset`](./datasets/video_dataset.py), each line indicates a sample video with the filepath and label, which are split with a whitespace." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Ht_DGJA9jQar" + }, + "source": [ + "### Modify the config\n", + "\n", + "In the next step, we need to modify the config for the training.\n", + "To accelerate the process, we finetune a recognizer using a pre-trained recognizer." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "LjCcmCKOjktc" + }, + "outputs": [], + "source": [ + "from mmcv import Config\n", + "cfg = Config.fromfile('./configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tc8YhFFGjp3e" + }, + "source": [ + "Given a config that trains a TSN model on kinetics400-full dataset, we need to modify some values to use it for training TSN on Kinetics400-tiny dataset.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "tlhu9byjjt-K", + "outputId": "3b9a3c49-ace0-41d3-dd15-d6c8579755f8" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Config:\n", + "model = dict(\n", + " type='Recognizer2D',\n", + " backbone=dict(\n", + " type='ResNet',\n", + " pretrained='torchvision://resnet50',\n", + " depth=50,\n", + " norm_eval=False),\n", + " cls_head=dict(\n", + " type='TSNHead',\n", + " num_classes=2,\n", + " in_channels=2048,\n", + " spatial_type='avg',\n", + " consensus=dict(type='AvgConsensus', dim=1),\n", + " dropout_ratio=0.4,\n", + " init_std=0.01),\n", + " train_cfg=None,\n", + " test_cfg=dict(average_clips=None))\n", + "optimizer = dict(type='SGD', lr=7.8125e-05, momentum=0.9, weight_decay=0.0001)\n", + "optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))\n", + "lr_config = dict(policy='step', step=[40, 80])\n", + "total_epochs = 10\n", + "checkpoint_config = dict(interval=5)\n", + "log_config = dict(interval=5, hooks=[dict(type='TextLoggerHook')])\n", + "dist_params = dict(backend='nccl')\n", + "log_level = 'INFO'\n", + "load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "resume_from = None\n", + "workflow = [('train', 1)]\n", + "dataset_type = 'VideoDataset'\n", + "data_root = 'kinetics400_tiny/train/'\n", + "data_root_val = 'kinetics400_tiny/val/'\n", + "ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "img_norm_cfg = dict(\n", + " mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)\n", + "train_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),\n", + " dict(type='DecordDecode'),\n", + " dict(\n", + " type='MultiScaleCrop',\n", + " input_size=224,\n", + " scales=(1, 0.875, 0.75, 0.66),\n", + " random_crop=False,\n", + " max_wh_scale_gap=1),\n", + " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", + " dict(type='Flip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs', 'label'])\n", + "]\n", + "val_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=8,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='CenterCrop', crop_size=224),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + "]\n", + "test_pipeline = [\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=25,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='ThreeCrop', crop_size=256),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + "]\n", + "data = dict(\n", + " videos_per_gpu=2,\n", + " workers_per_gpu=2,\n", + " train=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", + " data_prefix='kinetics400_tiny/train/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames', clip_len=1, frame_interval=1,\n", + " num_clips=8),\n", + " dict(type='DecordDecode'),\n", + " dict(\n", + " type='MultiScaleCrop',\n", + " input_size=224,\n", + " scales=(1, 0.875, 0.75, 0.66),\n", + " random_crop=False,\n", + " max_wh_scale_gap=1),\n", + " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", + " dict(type='Flip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs', 'label'])\n", + " ]),\n", + " val=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", + " data_prefix='kinetics400_tiny/val/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=8,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='CenterCrop', crop_size=224),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + " ]),\n", + " test=dict(\n", + " type='VideoDataset',\n", + " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", + " data_prefix='kinetics400_tiny/val/',\n", + " pipeline=[\n", + " dict(type='DecordInit'),\n", + " dict(\n", + " type='SampleFrames',\n", + " clip_len=1,\n", + " frame_interval=1,\n", + " num_clips=25,\n", + " test_mode=True),\n", + " dict(type='DecordDecode'),\n", + " dict(type='Resize', scale=(-1, 256)),\n", + " dict(type='ThreeCrop', crop_size=256),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[123.675, 116.28, 103.53],\n", + " std=[58.395, 57.12, 57.375],\n", + " to_bgr=False),\n", + " dict(type='FormatShape', input_format='NCHW'),\n", + " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", + " dict(type='ToTensor', keys=['imgs'])\n", + " ]))\n", + "evaluation = dict(\n", + " interval=5,\n", + " metrics=['top_k_accuracy', 'mean_class_accuracy'],\n", + " save_best='auto')\n", + "work_dir = './tutorial_exps'\n", + "omnisource = False\n", + "seed = 0\n", + "gpu_ids = range(0, 1)\n", + "\n" + ] + } + ], + "source": [ + "from mmcv.runner import set_random_seed\n", + "\n", + "# Modify dataset type and path\n", + "cfg.dataset_type = 'VideoDataset'\n", + "cfg.data_root = 'kinetics400_tiny/train/'\n", + "cfg.data_root_val = 'kinetics400_tiny/val/'\n", + "cfg.ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "cfg.ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "\n", + "cfg.data.test.type = 'VideoDataset'\n", + "cfg.data.test.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.data.test.data_prefix = 'kinetics400_tiny/val/'\n", + "\n", + "cfg.data.train.type = 'VideoDataset'\n", + "cfg.data.train.ann_file = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", + "cfg.data.train.data_prefix = 'kinetics400_tiny/train/'\n", + "\n", + "cfg.data.val.type = 'VideoDataset'\n", + "cfg.data.val.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", + "cfg.data.val.data_prefix = 'kinetics400_tiny/val/'\n", + "\n", + "# The flag is used to determine whether it is omnisource training\n", + "cfg.setdefault('omnisource', False)\n", + "# Modify num classes of the model in cls_head\n", + "cfg.model.cls_head.num_classes = 2\n", + "# We can use the pre-trained TSN model\n", + "cfg.load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", + "\n", + "# Set up working dir to save files and logs.\n", + "cfg.work_dir = './tutorial_exps'\n", + "\n", + "# The original learning rate (LR) is set for 8-GPU training.\n", + "# We divide it by 8 since we only use one GPU.\n", + "cfg.data.videos_per_gpu = cfg.data.videos_per_gpu // 16\n", + "cfg.optimizer.lr = cfg.optimizer.lr / 8 / 16\n", + "cfg.total_epochs = 10\n", + "\n", + "# We can set the checkpoint saving interval to reduce the storage cost\n", + "cfg.checkpoint_config.interval = 5\n", + "# We can set the log print interval to reduce the the times of printing log\n", + "cfg.log_config.interval = 5\n", + "\n", + "# Set seed thus the results are more reproducible\n", + "cfg.seed = 0\n", + "set_random_seed(0, deterministic=False)\n", + "cfg.gpu_ids = range(1)\n", + "\n", + "# Save the best\n", + "cfg.evaluation.save_best='auto'\n", + "\n", + "\n", + "# We can initialize the logger for training and have a look\n", + "# at the final config used for training\n", + "print(f'Config:\\n{cfg.pretty_text}')\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tES-qnZ3k38Z" + }, + "source": [ + "### Train a new recognizer\n", + "\n", + "Finally, lets initialize the dataset and recognizer, then train a new recognizer!" + ] }, - "id": "eyY3hCMwyTct", - "outputId": "ea54ff0a-4299-4e93-c1ca-4fe597e7516b" - }, - "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "[ ] 0/10, elapsed: 0s, ETA:" - ] + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "dDBWkdDRk6oz", + "outputId": "a85d80d7-b3c4-43f1-d49a-057e8036807f" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Use load_from_torchvision loader\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-07-11 13:00:46,931 - mmaction - INFO - These parameters in pretrained checkpoint are not loaded: {'fc.bias', 'fc.weight'}\n", + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n", + "2021-07-11 13:00:46,980 - mmaction - INFO - load checkpoint from ./checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", + "2021-07-11 13:00:46,981 - mmaction - INFO - Use load_from_local loader\n", + "2021-07-11 13:00:47,071 - mmaction - WARNING - The model and loaded state dict do not match exactly\n", + "\n", + "size mismatch for cls_head.fc_cls.weight: copying a param with shape torch.Size([400, 2048]) from checkpoint, the shape in current model is torch.Size([2, 2048]).\n", + "size mismatch for cls_head.fc_cls.bias: copying a param with shape torch.Size([400]) from checkpoint, the shape in current model is torch.Size([2]).\n", + "2021-07-11 13:00:47,074 - mmaction - INFO - Start running, host: root@b465112b4add, work_dir: /content/mmaction2/tutorial_exps\n", + "2021-07-11 13:00:47,078 - mmaction - INFO - Hooks will be executed in the following order:\n", + "before_run:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_train_epoch:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_train_iter:\n", + "(VERY_HIGH ) StepLrUpdaterHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_train_iter:\n", + "(ABOVE_NORMAL) OptimizerHook \n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "after_train_epoch:\n", + "(NORMAL ) CheckpointHook \n", + "(NORMAL ) EvalHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_val_epoch:\n", + "(LOW ) IterTimerHook \n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "before_val_iter:\n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_val_iter:\n", + "(LOW ) IterTimerHook \n", + " -------------------- \n", + "after_val_epoch:\n", + "(VERY_LOW ) TextLoggerHook \n", + " -------------------- \n", + "2021-07-11 13:00:47,081 - mmaction - INFO - workflow: [('train', 1)], max: 10 epochs\n", + "/usr/local/lib/python3.7/dist-packages/mmcv/runner/hooks/evaluation.py:190: UserWarning: runner.meta is None. Creating an empty one.\n", + " warnings.warn('runner.meta is None. Creating an empty one.')\n", + "2021-07-11 13:00:51,802 - mmaction - INFO - Epoch [1][5/15]\tlr: 7.813e-05, eta: 0:02:16, time: 0.942, data_time: 0.730, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7604, loss: 0.7604, grad_norm: 14.8813\n", + "2021-07-11 13:00:52,884 - mmaction - INFO - Epoch [1][10/15]\tlr: 7.813e-05, eta: 0:01:21, time: 0.217, data_time: 0.028, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6282, loss: 0.6282, grad_norm: 10.1834\n", + "2021-07-11 13:00:53,706 - mmaction - INFO - Epoch [1][15/15]\tlr: 7.813e-05, eta: 0:00:59, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7165, loss: 0.7165, grad_norm: 10.8534\n", + "2021-07-11 13:00:57,724 - mmaction - INFO - Epoch [2][5/15]\tlr: 7.813e-05, eta: 0:01:09, time: 0.802, data_time: 0.596, memory: 2918, top1_acc: 0.3000, top5_acc: 1.0000, loss_cls: 0.7001, loss: 0.7001, grad_norm: 11.4311\n", + "2021-07-11 13:00:59,219 - mmaction - INFO - Epoch [2][10/15]\tlr: 7.813e-05, eta: 0:01:00, time: 0.296, data_time: 0.108, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6916, loss: 0.6916, grad_norm: 12.7101\n", + "2021-07-11 13:01:00,040 - mmaction - INFO - Epoch [2][15/15]\tlr: 7.813e-05, eta: 0:00:51, time: 0.167, data_time: 0.004, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6567, loss: 0.6567, grad_norm: 8.8837\n", + "2021-07-11 13:01:04,152 - mmaction - INFO - Epoch [3][5/15]\tlr: 7.813e-05, eta: 0:00:56, time: 0.820, data_time: 0.618, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6320, loss: 0.6320, grad_norm: 11.4025\n", + "2021-07-11 13:01:05,526 - mmaction - INFO - Epoch [3][10/15]\tlr: 7.813e-05, eta: 0:00:50, time: 0.276, data_time: 0.075, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6542, loss: 0.6542, grad_norm: 10.6429\n", + "2021-07-11 13:01:06,350 - mmaction - INFO - Epoch [3][15/15]\tlr: 7.813e-05, eta: 0:00:44, time: 0.165, data_time: 0.001, memory: 2918, top1_acc: 0.2000, top5_acc: 1.0000, loss_cls: 0.7661, loss: 0.7661, grad_norm: 12.8421\n", + "2021-07-11 13:01:10,771 - mmaction - INFO - Epoch [4][5/15]\tlr: 7.813e-05, eta: 0:00:47, time: 0.883, data_time: 0.676, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6410, loss: 0.6410, grad_norm: 10.6697\n", + "2021-07-11 13:01:11,776 - mmaction - INFO - Epoch [4][10/15]\tlr: 7.813e-05, eta: 0:00:42, time: 0.201, data_time: 0.011, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6949, loss: 0.6949, grad_norm: 10.5467\n", + "2021-07-11 13:01:12,729 - mmaction - INFO - Epoch [4][15/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.190, data_time: 0.026, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6290, loss: 0.6290, grad_norm: 11.2779\n", + "2021-07-11 13:01:16,816 - mmaction - INFO - Epoch [5][5/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.817, data_time: 0.608, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6011, loss: 0.6011, grad_norm: 9.1335\n", + "2021-07-11 13:01:18,176 - mmaction - INFO - Epoch [5][10/15]\tlr: 7.813e-05, eta: 0:00:35, time: 0.272, data_time: 0.080, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6652, loss: 0.6652, grad_norm: 11.0616\n", + "2021-07-11 13:01:19,119 - mmaction - INFO - Epoch [5][15/15]\tlr: 7.813e-05, eta: 0:00:32, time: 0.188, data_time: 0.017, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6440, loss: 0.6440, grad_norm: 11.6473\n", + "2021-07-11 13:01:19,120 - mmaction - INFO - Saving checkpoint at 5 epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.9 task/s, elapsed: 2s, ETA: 0s" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-07-11 13:01:21,673 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-07-11 13:01:21,677 - mmaction - INFO - \n", + "top1_acc\t0.7000\n", + "top5_acc\t1.0000\n", + "2021-07-11 13:01:21,679 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-07-11 13:01:21,682 - mmaction - INFO - \n", + "mean_acc\t0.7000\n", + "2021-07-11 13:01:22,264 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_5.pth.\n", + "2021-07-11 13:01:22,267 - mmaction - INFO - Best top1_acc is 0.7000 at 5 epoch.\n", + "2021-07-11 13:01:22,271 - mmaction - INFO - Epoch(val) [5][5]\ttop1_acc: 0.7000, top5_acc: 1.0000, mean_class_accuracy: 0.7000\n", + "2021-07-11 13:01:26,623 - mmaction - INFO - Epoch [6][5/15]\tlr: 7.813e-05, eta: 0:00:31, time: 0.868, data_time: 0.656, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6753, loss: 0.6753, grad_norm: 11.8640\n", + "2021-07-11 13:01:27,597 - mmaction - INFO - Epoch [6][10/15]\tlr: 7.813e-05, eta: 0:00:28, time: 0.195, data_time: 0.003, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6715, loss: 0.6715, grad_norm: 11.3347\n", + "2021-07-11 13:01:28,736 - mmaction - INFO - Epoch [6][15/15]\tlr: 7.813e-05, eta: 0:00:25, time: 0.228, data_time: 0.063, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5769, loss: 0.5769, grad_norm: 9.2541\n", + "2021-07-11 13:01:32,860 - mmaction - INFO - Epoch [7][5/15]\tlr: 7.813e-05, eta: 0:00:24, time: 0.822, data_time: 0.620, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5379, loss: 0.5379, grad_norm: 8.0147\n", + "2021-07-11 13:01:34,340 - mmaction - INFO - Epoch [7][10/15]\tlr: 7.813e-05, eta: 0:00:22, time: 0.298, data_time: 0.109, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6187, loss: 0.6187, grad_norm: 11.5244\n", + "2021-07-11 13:01:35,165 - mmaction - INFO - Epoch [7][15/15]\tlr: 7.813e-05, eta: 0:00:19, time: 0.165, data_time: 0.002, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7063, loss: 0.7063, grad_norm: 12.4979\n", + "2021-07-11 13:01:39,435 - mmaction - INFO - Epoch [8][5/15]\tlr: 7.813e-05, eta: 0:00:17, time: 0.853, data_time: 0.641, memory: 2918, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.5369, loss: 0.5369, grad_norm: 8.6545\n", + "2021-07-11 13:01:40,808 - mmaction - INFO - Epoch [8][10/15]\tlr: 7.813e-05, eta: 0:00:15, time: 0.275, data_time: 0.086, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6407, loss: 0.6407, grad_norm: 12.5537\n", + "2021-07-11 13:01:41,627 - mmaction - INFO - Epoch [8][15/15]\tlr: 7.813e-05, eta: 0:00:12, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6073, loss: 0.6073, grad_norm: 11.4028\n", + "2021-07-11 13:01:45,651 - mmaction - INFO - Epoch [9][5/15]\tlr: 7.813e-05, eta: 0:00:11, time: 0.803, data_time: 0.591, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5596, loss: 0.5596, grad_norm: 10.0821\n", + "2021-07-11 13:01:46,891 - mmaction - INFO - Epoch [9][10/15]\tlr: 7.813e-05, eta: 0:00:08, time: 0.248, data_time: 0.044, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6470, loss: 0.6470, grad_norm: 11.8979\n", + "2021-07-11 13:01:47,944 - mmaction - INFO - Epoch [9][15/15]\tlr: 7.813e-05, eta: 0:00:06, time: 0.211, data_time: 0.041, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6657, loss: 0.6657, grad_norm: 12.0643\n", + "2021-07-11 13:01:52,200 - mmaction - INFO - Epoch [10][5/15]\tlr: 7.813e-05, eta: 0:00:04, time: 0.849, data_time: 0.648, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6310, loss: 0.6310, grad_norm: 11.5690\n", + "2021-07-11 13:01:53,707 - mmaction - INFO - Epoch [10][10/15]\tlr: 7.813e-05, eta: 0:00:02, time: 0.303, data_time: 0.119, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5178, loss: 0.5178, grad_norm: 9.3324\n", + "2021-07-11 13:01:54,520 - mmaction - INFO - Epoch [10][15/15]\tlr: 7.813e-05, eta: 0:00:00, time: 0.162, data_time: 0.001, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6919, loss: 0.6919, grad_norm: 12.6688\n", + "2021-07-11 13:01:54,522 - mmaction - INFO - Saving checkpoint at 10 epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.9 task/s, elapsed: 2s, ETA: 0s" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-07-11 13:01:56,741 - mmaction - INFO - Evaluating top_k_accuracy ...\n", + "2021-07-11 13:01:56,743 - mmaction - INFO - \n", + "top1_acc\t1.0000\n", + "top5_acc\t1.0000\n", + "2021-07-11 13:01:56,749 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", + "2021-07-11 13:01:56,750 - mmaction - INFO - \n", + "mean_acc\t1.0000\n", + "2021-07-11 13:01:57,267 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_10.pth.\n", + "2021-07-11 13:01:57,269 - mmaction - INFO - Best top1_acc is 1.0000 at 10 epoch.\n", + "2021-07-11 13:01:57,270 - mmaction - INFO - Epoch(val) [10][5]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n" + ] + } + ], + "source": [ + "import os.path as osp\n", + "\n", + "from mmaction.datasets import build_dataset\n", + "from mmaction.models import build_model\n", + "from mmaction.apis import train_model\n", + "\n", + "import mmcv\n", + "\n", + "# Build the dataset\n", + "datasets = [build_dataset(cfg.data.train)]\n", + "\n", + "# Build the recognizer\n", + "model = build_model(cfg.model, train_cfg=cfg.get('train_cfg'), test_cfg=cfg.get('test_cfg'))\n", + "\n", + "# Create work_dir\n", + "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", + "train_model(model, datasets, cfg, distributed=False, validate=True)" + ] }, { - "name": "stderr", - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", - " cpuset_checked))\n" - ] + "cell_type": "markdown", + "metadata": { + "id": "zdSd7oTLlxIf" + }, + "source": [ + "### Understand the log\n", + "From the log, we can have a basic understanding the training process and know how well the recognizer is trained.\n", + "\n", + "Firstly, the ResNet-50 backbone pre-trained on ImageNet is loaded, this is a common practice since training from scratch is more cost. The log shows that all the weights of the ResNet-50 backbone are loaded except the `fc.bias` and `fc.weight`.\n", + "\n", + "Second, since the dataset we are using is small, we loaded a TSN model and finetune it for action recognition.\n", + "The original TSN is trained on original Kinetics-400 dataset which contains 400 classes but Kinetics-400 Tiny dataset only have 2 classes. Therefore, the last FC layer of the pre-trained TSN for classification has different weight shape and is not used.\n", + "\n", + "Third, after training, the recognizer is evaluated by the default evaluation. The results show that the recognizer achieves 100% top1 accuracy and 100% top5 accuracy on the val dataset,\n", + " \n", + "Not bad!" + ] }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 2.2 task/s, elapsed: 5s, ETA: 0s\n", - "Evaluating top_k_accuracy ...\n", - "\n", - "top1_acc\t1.0000\n", - "top5_acc\t1.0000\n", - "\n", - "Evaluating mean_class_accuracy ...\n", - "\n", - "mean_acc\t1.0000\n", - "top1_acc: 1.0000\n", - "top5_acc: 1.0000\n", - "mean_class_accuracy: 1.0000\n" - ] + "cell_type": "markdown", + "metadata": { + "id": "ryVoSfZVmogw" + }, + "source": [ + "## Test the trained recognizer\n", + "\n", + "After finetuning the recognizer, let's check the prediction results!" + ] }, { - "name": "stderr", - "output_type": "stream", - "text": [ - "/content/mmaction2/mmaction/datasets/base.py:166: UserWarning: Option arguments for metrics has been changed to `metric_options`, See 'https://github.com/open-mmlab/mmaction2/pull/286' for more details\n", - " 'Option arguments for metrics has been changed to '\n" - ] + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "eyY3hCMwyTct", + "outputId": "ea54ff0a-4299-4e93-c1ca-4fe597e7516b" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ ] 0/10, elapsed: 0s, ETA:" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", + " cpuset_checked))\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 2.2 task/s, elapsed: 5s, ETA: 0s\n", + "Evaluating top_k_accuracy ...\n", + "\n", + "top1_acc\t1.0000\n", + "top5_acc\t1.0000\n", + "\n", + "Evaluating mean_class_accuracy ...\n", + "\n", + "mean_acc\t1.0000\n", + "top1_acc: 1.0000\n", + "top5_acc: 1.0000\n", + "mean_class_accuracy: 1.0000\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/content/mmaction2/mmaction/datasets/base.py:166: UserWarning: Option arguments for metrics has been changed to `metric_options`, See 'https://github.com/open-mmlab/mmaction2/pull/286' for more details\n", + " 'Option arguments for metrics has been changed to '\n" + ] + } + ], + "source": [ + "from mmaction.apis import single_gpu_test\n", + "from mmaction.datasets import build_dataloader\n", + "from mmcv.parallel import MMDataParallel\n", + "\n", + "# Build a test dataloader\n", + "dataset = build_dataset(cfg.data.test, dict(test_mode=True))\n", + "data_loader = build_dataloader(\n", + " dataset,\n", + " videos_per_gpu=1,\n", + " workers_per_gpu=cfg.data.workers_per_gpu,\n", + " dist=False,\n", + " shuffle=False)\n", + "model = MMDataParallel(model, device_ids=[0])\n", + "outputs = single_gpu_test(model, data_loader)\n", + "\n", + "eval_config = cfg.evaluation\n", + "eval_config.pop('interval')\n", + "eval_res = dataset.evaluate(outputs, **eval_config)\n", + "for name, val in eval_res.items():\n", + " print(f'{name}: {val:.04f}')" + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Perform Spatio-Temporal Detection\n", + "Here we first install MMDetection." + ], + "metadata": { + "id": "jZ4t44nWmZDM" + } + }, + { + "cell_type": "code", + "source": [ + "# Git clone mmdetection repo\n", + "%cd ..\n", + "!git clone https://github.com/open-mmlab/mmdetection.git\n", + "%cd mmdetection\n", + "\n", + "# install mmdet\n", + "!pip install -e .\n", + "%cd ../mmaction2" + ], + "metadata": { + "id": "w1p0_g76nHOQ", + "outputId": "b30a6be3-c457-452e-c789-7083117c5011", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/content\n", + "Cloning into 'mmdetection'...\n", + "remote: Enumerating objects: 23137, done.\u001b[K\n", + "remote: Total 23137 (delta 0), reused 0 (delta 0), pack-reused 23137\u001b[K\n", + "Receiving objects: 100% (23137/23137), 25.88 MiB | 25.75 MiB/s, done.\n", + "Resolving deltas: 100% (16198/16198), done.\n", + "/content/mmdetection\n", + "Obtaining file:///content/mmdetection\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (3.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (1.21.5)\n", + "Requirement already satisfied: pycocotools in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (2.0.4)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (1.15.0)\n", + "Collecting terminaltables\n", + " Downloading terminaltables-3.1.10-py2.py3-none-any.whl (15 kB)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (0.11.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (2.8.2)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (1.3.2)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (3.0.7)\n", + "Installing collected packages: terminaltables, mmdet\n", + " Running setup.py develop for mmdet\n", + "Successfully installed mmdet-2.21.0 terminaltables-3.1.10\n", + "/content/mmaction2\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "Download a video to `demo` directory in MMAction2." + ], + "metadata": { + "id": "vlOQsH8OnVKn" + } + }, + { + "cell_type": "code", + "source": [ + "!wget https://download.openmmlab.com/mmaction/dataset/sample/1j20qq1JyX4.mp4 -O demo/1j20qq1JyX4.mp4" + ], + "metadata": { + "id": "QaW3jg5Enish", + "outputId": "c70cde3a-b337-41d0-cb08-82dfc746d9ef", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "--2022-02-19 11:02:59-- https://download.openmmlab.com/mmaction/dataset/sample/1j20qq1JyX4.mp4\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.254.186.233\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.254.186.233|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 4864186 (4.6M) [video/mp4]\n", + "Saving to: ‘demo/1j20qq1JyX4.mp4’\n", + "\n", + "demo/1j20qq1JyX4.mp 100%[===================>] 4.64M 3.78MB/s in 1.2s \n", + "\n", + "2022-02-19 11:03:01 (3.78 MB/s) - ‘demo/1j20qq1JyX4.mp4’ saved [4864186/4864186]\n", + "\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "Run spatio-temporal demo." + ], + "metadata": { + "id": "LYGxdu8Vnoah" + } + }, + { + "cell_type": "code", + "source": [ + "!python demo/demo_spatiotemporal_det.py --video demo/1j20qq1JyX4.mp4" + ], + "metadata": { + "id": "LPLiaHaYnrb7", + "outputId": "8a8f8a16-ad7b-4559-c19c-c8264533bff3", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Imageio: 'ffmpeg-linux64-v3.3.1' was not found on your computer; downloading it now.\n", + "Try 1. Download from https://github.com/imageio/imageio-binaries/raw/master/ffmpeg/ffmpeg-linux64-v3.3.1 (43.8 MB)\n", + "Downloading: 8192/45929032 bytes (0.0%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b3883008/45929032 bytes (8.5%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b7995392/45929032 bytes (17.4%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b11796480/45929032 bytes (25.7%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b16072704/45929032 bytes (35.0%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b20152320/45929032 bytes (43.9%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b24305664/45929032 bytes (52.9%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b28319744/45929032 bytes (61.7%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b32440320/45929032 bytes (70.6%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b36634624/45929032 bytes (79.8%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b40886272/45929032 bytes (89.0%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b45146112/45929032 bytes (98.3%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b45929032/45929032 bytes (100.0%)\n", + " Done\n", + "File saved as /root/.imageio/ffmpeg/ffmpeg-linux64-v3.3.1.\n", + "load checkpoint from http path: http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\n", + "Downloading: \"http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\" to /root/.cache/torch/hub/checkpoints/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\n", + "100% 160M/160M [00:21<00:00, 7.77MB/s]\n", + "Performing Human Detection for each frame\n", + "[>>] 217/217, 8.6 task/s, elapsed: 25s, ETA: 0sload checkpoint from http path: https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\n", + "Downloading: \"https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\" to /root/.cache/torch/hub/checkpoints/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\n", + "100% 228M/228M [00:31<00:00, 7.55MB/s]\n", + "Performing SpatioTemporal Action Detection for each clip\n", + "[> ] 167/217, 7.7 task/s, elapsed: 22s, ETA: 7sPerforming visualization\n", + "[MoviePy] >>>> Building video demo/stdet_demo.mp4\n", + "[MoviePy] Writing video demo/stdet_demo.mp4\n", + "100% 434/434 [00:12<00:00, 36.07it/s]\n", + "[MoviePy] Done.\n", + "[MoviePy] >>>> Video ready: demo/stdet_demo.mp4 \n", + "\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "# Check the video\n", + "from IPython.display import HTML\n", + "from base64 import b64encode\n", + "mp4 = open('demo/stdet_demo.mp4','rb').read()\n", + "data_url = \"data:video/mp4;base64,\" + b64encode(mp4).decode()\n", + "HTML(\"\"\"\n", + "\n", + "\"\"\" % data_url)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 341 + }, + "id": "-0atQCzBo9-C", + "outputId": "b6bb3a67-669c-45d0-cdf4-25b6210362d0" + }, + "execution_count": 6, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/html": [ + "\n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "execution_count": 6 + } + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "collapsed_sections": [], + "name": "MMAction2 Tutorial.ipynb", + "provenance": [], + "toc_visible": true, + "include_colab_link": true + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" } - ], - "source": [ - "from mmaction.apis import single_gpu_test\n", - "from mmaction.datasets import build_dataloader\n", - "from mmcv.parallel import MMDataParallel\n", - "\n", - "# Build a test dataloader\n", - "dataset = build_dataset(cfg.data.test, dict(test_mode=True))\n", - "data_loader = build_dataloader(\n", - " dataset,\n", - " videos_per_gpu=1,\n", - " workers_per_gpu=cfg.data.workers_per_gpu,\n", - " dist=False,\n", - " shuffle=False)\n", - "model = MMDataParallel(model, device_ids=[0])\n", - "outputs = single_gpu_test(model, data_loader)\n", - "\n", - "eval_config = cfg.evaluation\n", - "eval_config.pop('interval')\n", - "eval_res = dataset.evaluate(outputs, **eval_config)\n", - "for name, val in eval_res.items():\n", - " print(f'{name}: {val:.04f}')" - ] - } - ], - "metadata": { - "accelerator": "GPU", - "colab": { - "collapsed_sections": [], - "include_colab_link": true, - "name": "MMAction2 Tutorial.ipynb", - "provenance": [], - "toc_visible": true - }, - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file From 5ab0d09a03f1572fdf5fe35042f6cc02b646ea3b Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 19 Feb 2022 19:14:16 +0800 Subject: [PATCH 357/414] Delete redundant colab file --- MMAction2_Tutorial.ipynb | 1461 -------------------------------------- 1 file changed, 1461 deletions(-) delete mode 100644 MMAction2_Tutorial.ipynb diff --git a/MMAction2_Tutorial.ipynb b/MMAction2_Tutorial.ipynb deleted file mode 100644 index eddea09098..0000000000 --- a/MMAction2_Tutorial.ipynb +++ /dev/null @@ -1,1461 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "VcjSRFELVbNk" - }, - "source": [ - "# MMAction2 Tutorial\n", - "\n", - "Welcome to MMAction2! This is the official colab tutorial for using MMAction2. In this tutorial, you will learn\n", - "- Perform inference with a MMAction2 recognizer.\n", - "- Train a new recognizer with a new dataset.\n", - "- Perform spatio-temporal detection.\n", - "\n", - "Let's start!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "7LqHGkGEVqpm" - }, - "source": [ - "## Install MMAction2" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "Bf8PpPXtVvmg", - "outputId": "75519a17-cc0a-491f-98a1-f287b090cf82" - }, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "nvcc: NVIDIA (R) Cuda compiler driver\n", - "Copyright (c) 2005-2020 NVIDIA Corporation\n", - "Built on Mon_Oct_12_20:09:46_PDT_2020\n", - "Cuda compilation tools, release 11.1, V11.1.105\n", - "Build cuda_11.1.TC455_06.29190527_0\n", - "gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n", - "Copyright (C) 2017 Free Software Foundation, Inc.\n", - "This is free software; see the source for copying conditions. There is NO\n", - "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n", - "\n" - ] - } - ], - "source": [ - "# Check nvcc version\n", - "!nvcc -V\n", - "# Check GCC version\n", - "!gcc --version" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "5PAJ4ArzV5Ry", - "outputId": "992b30c2-8281-4198-97c8-df2a287b0ae8" - }, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", - "Collecting torch==1.8.0+cu101\n", - " Downloading https://download.pytorch.org/whl/cu101/torch-1.8.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (763.5 MB)\n", - "\u001b[K |████████████████████████████████| 763.5 MB 15 kB/s \n", - "\u001b[?25hCollecting torchvision==0.9.0+cu101\n", - " Downloading https://download.pytorch.org/whl/cu101/torchvision-0.9.0%2Bcu101-cp37-cp37m-linux_x86_64.whl (17.3 MB)\n", - "\u001b[K |████████████████████████████████| 17.3 MB 983 kB/s \n", - "\u001b[?25hCollecting torchtext==0.9.0\n", - " Downloading torchtext-0.9.0-cp37-cp37m-manylinux1_x86_64.whl (7.1 MB)\n", - "\u001b[K |████████████████████████████████| 7.1 MB 10.9 MB/s \n", - "\u001b[?25hCollecting torchaudio==0.8.0\n", - " Downloading torchaudio-0.8.0-cp37-cp37m-manylinux1_x86_64.whl (1.9 MB)\n", - "\u001b[K |████████████████████████████████| 1.9 MB 46.6 MB/s \n", - "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (1.21.5)\n", - "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch==1.8.0+cu101) (3.10.0.2)\n", - "Requirement already satisfied: pillow>=4.1.1 in /usr/local/lib/python3.7/dist-packages (from torchvision==0.9.0+cu101) (7.1.2)\n", - "Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (4.62.3)\n", - "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from torchtext==0.9.0) (2.23.0)\n", - "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (1.24.3)\n", - "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2.10)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (2021.10.8)\n", - "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->torchtext==0.9.0) (3.0.4)\n", - "Installing collected packages: torch, torchvision, torchtext, torchaudio\n", - " Attempting uninstall: torch\n", - " Found existing installation: torch 1.10.0+cu111\n", - " Uninstalling torch-1.10.0+cu111:\n", - " Successfully uninstalled torch-1.10.0+cu111\n", - " Attempting uninstall: torchvision\n", - " Found existing installation: torchvision 0.11.1+cu111\n", - " Uninstalling torchvision-0.11.1+cu111:\n", - " Successfully uninstalled torchvision-0.11.1+cu111\n", - " Attempting uninstall: torchtext\n", - " Found existing installation: torchtext 0.11.0\n", - " Uninstalling torchtext-0.11.0:\n", - " Successfully uninstalled torchtext-0.11.0\n", - " Attempting uninstall: torchaudio\n", - " Found existing installation: torchaudio 0.10.0+cu111\n", - " Uninstalling torchaudio-0.10.0+cu111:\n", - " Successfully uninstalled torchaudio-0.10.0+cu111\n", - "Successfully installed torch-1.8.0+cu101 torchaudio-0.8.0 torchtext-0.9.0 torchvision-0.9.0+cu101\n", - "Looking in links: https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", - "Collecting mmcv-full\n", - " Downloading https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/mmcv_full-1.4.5-cp37-cp37m-manylinux1_x86_64.whl (60.7 MB)\n", - "\u001b[K |████████████████████████████████| 60.7 MB 66 kB/s \n", - "\u001b[?25hCollecting addict\n", - " Downloading addict-2.4.0-py3-none-any.whl (3.8 kB)\n", - "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (7.1.2)\n", - "Requirement already satisfied: pyyaml in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (3.13)\n", - "Collecting yapf\n", - " Downloading yapf-0.32.0-py2.py3-none-any.whl (190 kB)\n", - "\u001b[K |████████████████████████████████| 190 kB 15.6 MB/s \n", - "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (1.21.5)\n", - "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (21.3)\n", - "Requirement already satisfied: opencv-python>=3 in /usr/local/lib/python3.7/dist-packages (from mmcv-full) (4.1.2.30)\n", - "Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from packaging->mmcv-full) (3.0.7)\n", - "Installing collected packages: yapf, addict, mmcv-full\n", - "Successfully installed addict-2.4.0 mmcv-full-1.4.5 yapf-0.32.0\n", - "Cloning into 'mmaction2'...\n", - "remote: Enumerating objects: 15036, done.\u001b[K\n", - "remote: Counting objects: 100% (233/233), done.\u001b[K\n", - "remote: Compressing objects: 100% (192/192), done.\u001b[K\n", - "remote: Total 15036 (delta 86), reused 72 (delta 41), pack-reused 14803\u001b[K\n", - "Receiving objects: 100% (15036/15036), 49.25 MiB | 25.23 MiB/s, done.\n", - "Resolving deltas: 100% (10608/10608), done.\n", - "/content/mmaction2\n", - "Obtaining file:///content/mmaction2\n", - "Collecting decord>=0.4.1\n", - " Downloading decord-0.6.0-py3-none-manylinux2010_x86_64.whl (13.6 MB)\n", - "\u001b[K |████████████████████████████████| 13.6 MB 10.2 MB/s \n", - "\u001b[?25hCollecting einops\n", - " Downloading einops-0.4.0-py3-none-any.whl (28 kB)\n", - "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (3.2.2)\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (1.21.5)\n", - "Requirement already satisfied: opencv-contrib-python in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (4.1.2.30)\n", - "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (7.1.2)\n", - "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (1.4.1)\n", - "Requirement already satisfied: torch>=1.3 in /usr/local/lib/python3.7/dist-packages (from mmaction2==0.21.0) (1.8.0+cu101)\n", - "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch>=1.3->mmaction2==0.21.0) (3.10.0.2)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (1.3.2)\n", - "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (2.8.2)\n", - "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (3.0.7)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmaction2==0.21.0) (0.11.0)\n", - "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib->mmaction2==0.21.0) (1.15.0)\n", - "Installing collected packages: einops, decord, mmaction2\n", - " Running setup.py develop for mmaction2\n", - "Successfully installed decord-0.6.0 einops-0.4.0 mmaction2-0.21.0\n", - "Collecting av\n", - " Downloading av-8.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (36.1 MB)\n", - "\u001b[K |████████████████████████████████| 36.1 MB 298 kB/s \n", - "\u001b[?25hRequirement already satisfied: imgaug in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 2)) (0.2.9)\n", - "Requirement already satisfied: librosa in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 3)) (0.8.1)\n", - "Requirement already satisfied: lmdb in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 4)) (0.99)\n", - "Requirement already satisfied: moviepy in /usr/local/lib/python3.7/dist-packages (from -r requirements/optional.txt (line 5)) (0.2.3.5)\n", - "Collecting onnx\n", - " Downloading onnx-1.11.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (12.8 MB)\n", - "\u001b[K |████████████████████████████████| 12.8 MB 52.3 MB/s \n", - "\u001b[?25hCollecting onnxruntime\n", - " Downloading onnxruntime-1.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.9 MB)\n", - "\u001b[K |████████████████████████████████| 4.9 MB 51.6 MB/s \n", - "\u001b[?25hCollecting pims\n", - " Downloading PIMS-0.5.tar.gz (85 kB)\n", - "\u001b[K |████████████████████████████████| 85 kB 5.2 MB/s \n", - "\u001b[?25hCollecting PyTurboJPEG\n", - " Downloading PyTurboJPEG-1.6.5.tar.gz (11 kB)\n", - "Collecting timm\n", - " Downloading timm-0.5.4-py3-none-any.whl (431 kB)\n", - "\u001b[K |████████████████████████████████| 431 kB 64.7 MB/s \n", - "\u001b[?25hRequirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (7.1.2)\n", - "Requirement already satisfied: numpy>=1.15.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.21.5)\n", - "Requirement already satisfied: scikit-image>=0.11.0 in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (0.18.3)\n", - "Requirement already satisfied: imageio in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (2.4.1)\n", - "Requirement already satisfied: opencv-python in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (4.1.2.30)\n", - "Requirement already satisfied: Shapely in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.8.0)\n", - "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.4.1)\n", - "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (3.2.2)\n", - "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from imgaug->-r requirements/optional.txt (line 2)) (1.15.0)\n", - "Requirement already satisfied: PyWavelets>=1.1.1 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 2)) (1.2.0)\n", - "Requirement already satisfied: tifffile>=2019.7.26 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 2)) (2021.11.2)\n", - "Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.7/dist-packages (from scikit-image>=0.11.0->imgaug->-r requirements/optional.txt (line 2)) (2.6.3)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (1.3.2)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (0.11.0)\n", - "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (2.8.2)\n", - "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->imgaug->-r requirements/optional.txt (line 2)) (3.0.7)\n", - "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (21.3)\n", - "Requirement already satisfied: numba>=0.43.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (0.51.2)\n", - "Requirement already satisfied: resampy>=0.2.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (0.2.2)\n", - "Requirement already satisfied: decorator>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (4.4.2)\n", - "Requirement already satisfied: soundfile>=0.10.2 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (0.10.3.post1)\n", - "Requirement already satisfied: scikit-learn!=0.19.0,>=0.14.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (1.0.2)\n", - "Requirement already satisfied: joblib>=0.14 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (1.1.0)\n", - "Requirement already satisfied: pooch>=1.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (1.6.0)\n", - "Requirement already satisfied: audioread>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from librosa->-r requirements/optional.txt (line 3)) (2.1.9)\n", - "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 3)) (57.4.0)\n", - "Requirement already satisfied: llvmlite<0.35,>=0.34.0.dev0 in /usr/local/lib/python3.7/dist-packages (from numba>=0.43.0->librosa->-r requirements/optional.txt (line 3)) (0.34.0)\n", - "Requirement already satisfied: requests>=2.19.0 in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (2.23.0)\n", - "Requirement already satisfied: appdirs>=1.3.0 in /usr/local/lib/python3.7/dist-packages (from pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (1.4.4)\n", - "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (3.0.4)\n", - "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (1.24.3)\n", - "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (2.10)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests>=2.19.0->pooch>=1.0->librosa->-r requirements/optional.txt (line 3)) (2021.10.8)\n", - "Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from scikit-learn!=0.19.0,>=0.14.0->librosa->-r requirements/optional.txt (line 3)) (3.1.0)\n", - "Requirement already satisfied: cffi>=1.0 in /usr/local/lib/python3.7/dist-packages (from soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 3)) (1.15.0)\n", - "Requirement already satisfied: pycparser in /usr/local/lib/python3.7/dist-packages (from cffi>=1.0->soundfile>=0.10.2->librosa->-r requirements/optional.txt (line 3)) (2.21)\n", - "Requirement already satisfied: tqdm<5.0,>=4.11.2 in /usr/local/lib/python3.7/dist-packages (from moviepy->-r requirements/optional.txt (line 5)) (4.62.3)\n", - "Requirement already satisfied: typing-extensions>=3.6.2.1 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 6)) (3.10.0.2)\n", - "Requirement already satisfied: protobuf>=3.12.2 in /usr/local/lib/python3.7/dist-packages (from onnx->-r requirements/optional.txt (line 6)) (3.17.3)\n", - "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from onnxruntime->-r requirements/optional.txt (line 7)) (2.0)\n", - "Collecting slicerator>=0.9.8\n", - " Downloading slicerator-1.0.0-py3-none-any.whl (9.3 kB)\n", - "Requirement already satisfied: torch>=1.4 in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 10)) (1.8.0+cu101)\n", - "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (from timm->-r requirements/optional.txt (line 10)) (0.9.0+cu101)\n", - "Building wheels for collected packages: pims, PyTurboJPEG\n", - " Building wheel for pims (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for pims: filename=PIMS-0.5-py3-none-any.whl size=84325 sha256=acdeb0697c66e2b9cc49a549f9a3c67a35b36642e6724eeac9795e25e6d9de47\n", - " Stored in directory: /root/.cache/pip/wheels/75/02/a9/86571c38081ba4c1832eb95430b5d588dfa15a738e2a603737\n", - " Building wheel for PyTurboJPEG (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for PyTurboJPEG: filename=PyTurboJPEG-1.6.5-py3-none-any.whl size=12160 sha256=b5fffd01e16b4d2a1d2f4e1cd976501c1e3ea1b3872f91bf595f6c025735a4e0\n", - " Stored in directory: /root/.cache/pip/wheels/1b/6a/97/17286b24cd97dda462b5a886107f8663f1ccc7705f148b3850\n", - "Successfully built pims PyTurboJPEG\n", - "Installing collected packages: slicerator, timm, PyTurboJPEG, pims, onnxruntime, onnx, av\n", - "Successfully installed PyTurboJPEG-1.6.5 av-8.1.0 onnx-1.11.0 onnxruntime-1.10.0 pims-0.5 slicerator-1.0.0 timm-0.5.4\n" - ] - } - ], - "source": [ - "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", - "!pip install -U torch==1.8.0+cu101 torchvision==0.9.0+cu101 torchtext==0.9.0 torchaudio==0.8.0 -f https://download.pytorch.org/whl/torch_stable.html\n", - "\n", - "# install mmcv-full thus we could use CUDA operators\n", - "!pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", - "\n", - "# Install mmaction2\n", - "!rm -rf mmaction2\n", - "!git clone https://github.com/open-mmlab/mmaction2.git\n", - "%cd mmaction2\n", - "\n", - "!pip install -e .\n", - "\n", - "# Install some optional requirements\n", - "!pip install -r requirements/optional.txt" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "No_zZAFpWC-a", - "outputId": "1f5dd76e-7749-4fc3-ee97-83c5e1700f29" - }, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "1.8.0+cu101 True\n", - "0.21.0\n", - "10.1\n", - "GCC 7.3\n" - ] - } - ], - "source": [ - "# Check Pytorch installation\n", - "import torch, torchvision\n", - "print(torch.__version__, torch.cuda.is_available())\n", - "\n", - "# Check MMAction2 installation\n", - "import mmaction\n", - "print(mmaction.__version__)\n", - "\n", - "# Check MMCV installation\n", - "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", - "print(get_compiling_cuda_version())\n", - "print(get_compiler_version())" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "pXf7oV5DWdab" - }, - "source": [ - "## Perform inference with a MMAction2 recognizer\n", - "MMAction2 already provides high level APIs to do inference and training." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "64CW6d_AaT-Q", - "outputId": "d08bfb9b-ab1e-451b-d3b2-89023a59766b" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "--2021-07-11 12:44:00-- https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", - "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", - "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 97579339 (93M) [application/octet-stream]\n", - "Saving to: ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’\n", - "\n", - "checkpoints/tsn_r50 100%[===================>] 93.06M 11.4MB/s in 8.1s \n", - "\n", - "2021-07-11 12:44:09 (11.4 MB/s) - ‘checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth’ saved [97579339/97579339]\n", - "\n" - ] - } - ], - "source": [ - "!mkdir checkpoints\n", - "!wget -c https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \\\n", - " -O checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "HNZB7NoSabzj", - "outputId": "b2f9bd71-1490-44d3-81c6-5037d804f0b1" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Use load_from_local loader\n" - ] - } - ], - "source": [ - "from mmaction.apis import inference_recognizer, init_recognizer\n", - "\n", - "# Choose to use a config and initialize the recognizer\n", - "config = 'configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py'\n", - "# Setup a checkpoint file to load\n", - "checkpoint = 'checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "# Initialize the recognizer\n", - "model = init_recognizer(config, checkpoint, device='cuda:0')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "id": "rEMsBnpHapAn" - }, - "outputs": [], - "source": [ - "# Use the recognizer to do inference\n", - "video = 'demo/demo.mp4'\n", - "label = 'tools/data/kinetics/label_map_k400.txt'\n", - "results = inference_recognizer(model, video)\n", - "\n", - "labels = open(label).readlines()\n", - "labels = [x.strip() for x in labels]\n", - "results = [(labels[k[0]], k[1]) for k in results]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "NIyJXqfWathq", - "outputId": "ca24528b-f99d-414a-fa50-456f6068b463" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "arm wrestling: 29.616438\n", - "rock scissors paper: 10.754841\n", - "shaking hands: 9.908401\n", - "clapping: 9.189913\n", - "massaging feet: 8.305307\n" - ] - } - ], - "source": [ - "# Let's show the results\n", - "for result in results:\n", - " print(f'{result[0]}: ', result[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "QuZG8kZ2fJ5d" - }, - "source": [ - "## Train a recognizer on customized dataset\n", - "\n", - "To train a new recognizer, there are usually three things to do:\n", - "1. Support a new dataset\n", - "2. Modify the config\n", - "3. Train a new recognizer" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "neEFyxChfgiJ" - }, - "source": [ - "### Support a new dataset\n", - "\n", - "In this tutorial, we gives an example to convert the data into the format of existing datasets. Other methods and more advanced usages can be found in the [doc](/docs/tutorials/new_dataset.md)\n", - "\n", - "Firstly, let's download a tiny dataset obtained from [Kinetics-400](https://deepmind.com/research/open-source/open-source-datasets/kinetics/). We select 30 videos with their labels as train dataset and 10 videos with their labels as test dataset." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "gjsUj9JzgUlJ", - "outputId": "61c4704d-db81-4ca5-ed16-e2454dbdfe8e" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "rm: cannot remove 'kinetics400_tiny.zip*': No such file or directory\n", - "--2021-07-11 12:44:29-- https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", - "Resolving download.openmmlab.com (download.openmmlab.com)... 47.88.36.78\n", - "Connecting to download.openmmlab.com (download.openmmlab.com)|47.88.36.78|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 18308682 (17M) [application/zip]\n", - "Saving to: ‘kinetics400_tiny.zip’\n", - "\n", - "kinetics400_tiny.zi 100%[===================>] 17.46M 10.7MB/s in 1.6s \n", - "\n", - "2021-07-11 12:44:31 (10.7 MB/s) - ‘kinetics400_tiny.zip’ saved [18308682/18308682]\n", - "\n" - ] - } - ], - "source": [ - "# download, decompress the data\n", - "!rm kinetics400_tiny.zip*\n", - "!rm -rf kinetics400_tiny\n", - "!wget https://download.openmmlab.com/mmaction/kinetics400_tiny.zip\n", - "!unzip kinetics400_tiny.zip > /dev/null" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "AbZ-o7V6hNw4", - "outputId": "b091909c-def2-49b5-88c2-01b00802b162" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Reading package lists...\n", - "Building dependency tree...\n", - "Reading state information...\n", - "The following NEW packages will be installed:\n", - " tree\n", - "0 upgraded, 1 newly installed, 0 to remove and 39 not upgraded.\n", - "Need to get 40.7 kB of archives.\n", - "After this operation, 105 kB of additional disk space will be used.\n", - "Get:1 http://archive.ubuntu.com/ubuntu bionic/universe amd64 tree amd64 1.7.0-5 [40.7 kB]\n", - "Fetched 40.7 kB in 0s (88.7 kB/s)\n", - "Selecting previously unselected package tree.\n", - "(Reading database ... 160815 files and directories currently installed.)\n", - "Preparing to unpack .../tree_1.7.0-5_amd64.deb ...\n", - "Unpacking tree (1.7.0-5) ...\n", - "Setting up tree (1.7.0-5) ...\n", - "Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n", - "kinetics400_tiny\n", - "├── kinetics_tiny_train_video.txt\n", - "├── kinetics_tiny_val_video.txt\n", - "├── train\n", - "│   ├── 27_CSXByd3s.mp4\n", - "│   ├── 34XczvTaRiI.mp4\n", - "│   ├── A-wiliK50Zw.mp4\n", - "│   ├── D32_1gwq35E.mp4\n", - "│   ├── D92m0HsHjcQ.mp4\n", - "│   ├── DbX8mPslRXg.mp4\n", - "│   ├── FMlSTTpN3VY.mp4\n", - "│   ├── h10B9SVE-nk.mp4\n", - "│   ├── h2YqqUhnR34.mp4\n", - "│   ├── iRuyZSKhHRg.mp4\n", - "│   ├── IyfILH9lBRo.mp4\n", - "│   ├── kFC3KY2bOP8.mp4\n", - "│   ├── LvcFDgCAXQs.mp4\n", - "│   ├── O46YA8tI530.mp4\n", - "│   ├── oMrZaozOvdQ.mp4\n", - "│   ├── oXy-e_P_cAI.mp4\n", - "│   ├── P5M-hAts7MQ.mp4\n", - "│   ├── phDqGd0NKoo.mp4\n", - "│   ├── PnOe3GZRVX8.mp4\n", - "│   ├── R8HXQkdgKWA.mp4\n", - "│   ├── RqnKtCEoEcA.mp4\n", - "│   ├── soEcZZsBmDs.mp4\n", - "│   ├── TkkZPZHbAKA.mp4\n", - "│   ├── T_TMNGzVrDk.mp4\n", - "│   ├── WaS0qwP46Us.mp4\n", - "│   ├── Wh_YPQdH1Zg.mp4\n", - "│   ├── WWP5HZJsg-o.mp4\n", - "│   ├── xGY2dP0YUjA.mp4\n", - "│   ├── yLC9CtWU5ws.mp4\n", - "│   └── ZQV4U2KQ370.mp4\n", - "└── val\n", - " ├── 0pVGiAU6XEA.mp4\n", - " ├── AQrbRSnRt8M.mp4\n", - " ├── b6Q_b7vgc7Q.mp4\n", - " ├── ddvJ6-faICE.mp4\n", - " ├── IcLztCtvhb8.mp4\n", - " ├── ik4BW3-SCts.mp4\n", - " ├── jqRrH30V0k4.mp4\n", - " ├── SU_x2LQqSLs.mp4\n", - " ├── u4Rm6srmIS8.mp4\n", - " └── y5Iu7XkTqV0.mp4\n", - "\n", - "2 directories, 42 files\n" - ] - } - ], - "source": [ - "# Check the directory structure of the tiny data\n", - "\n", - "# Install tree first\n", - "!apt-get -q install tree\n", - "!tree kinetics400_tiny" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "fTdi6dI0hY3g", - "outputId": "ffda0997-8d77-431a-d66e-2f273e80c756" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "D32_1gwq35E.mp4 0\n", - "iRuyZSKhHRg.mp4 1\n", - "oXy-e_P_cAI.mp4 0\n", - "34XczvTaRiI.mp4 1\n", - "h2YqqUhnR34.mp4 0\n", - "O46YA8tI530.mp4 0\n", - "kFC3KY2bOP8.mp4 1\n", - "WWP5HZJsg-o.mp4 1\n", - "phDqGd0NKoo.mp4 1\n", - "yLC9CtWU5ws.mp4 0\n", - "27_CSXByd3s.mp4 1\n", - "IyfILH9lBRo.mp4 1\n", - "T_TMNGzVrDk.mp4 1\n", - "TkkZPZHbAKA.mp4 0\n", - "PnOe3GZRVX8.mp4 1\n", - "soEcZZsBmDs.mp4 1\n", - "FMlSTTpN3VY.mp4 1\n", - "WaS0qwP46Us.mp4 0\n", - "A-wiliK50Zw.mp4 1\n", - "oMrZaozOvdQ.mp4 1\n", - "ZQV4U2KQ370.mp4 0\n", - "DbX8mPslRXg.mp4 1\n", - "h10B9SVE-nk.mp4 1\n", - "P5M-hAts7MQ.mp4 0\n", - "R8HXQkdgKWA.mp4 0\n", - "D92m0HsHjcQ.mp4 0\n", - "RqnKtCEoEcA.mp4 0\n", - "LvcFDgCAXQs.mp4 0\n", - "xGY2dP0YUjA.mp4 0\n", - "Wh_YPQdH1Zg.mp4 0\n" - ] - } - ], - "source": [ - "# After downloading the data, we need to check the annotation format\n", - "!cat kinetics400_tiny/kinetics_tiny_train_video.txt" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "0bq0mxmEi29H" - }, - "source": [ - "According to the format defined in [`VideoDataset`](./datasets/video_dataset.py), each line indicates a sample video with the filepath and label, which are split with a whitespace." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Ht_DGJA9jQar" - }, - "source": [ - "### Modify the config\n", - "\n", - "In the next step, we need to modify the config for the training.\n", - "To accelerate the process, we finetune a recognizer using a pre-trained recognizer." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "id": "LjCcmCKOjktc" - }, - "outputs": [], - "source": [ - "from mmcv import Config\n", - "cfg = Config.fromfile('./configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py')" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "tc8YhFFGjp3e" - }, - "source": [ - "Given a config that trains a TSN model on kinetics400-full dataset, we need to modify some values to use it for training TSN on Kinetics400-tiny dataset.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "tlhu9byjjt-K", - "outputId": "3b9a3c49-ace0-41d3-dd15-d6c8579755f8" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Config:\n", - "model = dict(\n", - " type='Recognizer2D',\n", - " backbone=dict(\n", - " type='ResNet',\n", - " pretrained='torchvision://resnet50',\n", - " depth=50,\n", - " norm_eval=False),\n", - " cls_head=dict(\n", - " type='TSNHead',\n", - " num_classes=2,\n", - " in_channels=2048,\n", - " spatial_type='avg',\n", - " consensus=dict(type='AvgConsensus', dim=1),\n", - " dropout_ratio=0.4,\n", - " init_std=0.01),\n", - " train_cfg=None,\n", - " test_cfg=dict(average_clips=None))\n", - "optimizer = dict(type='SGD', lr=7.8125e-05, momentum=0.9, weight_decay=0.0001)\n", - "optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))\n", - "lr_config = dict(policy='step', step=[40, 80])\n", - "total_epochs = 10\n", - "checkpoint_config = dict(interval=5)\n", - "log_config = dict(interval=5, hooks=[dict(type='TextLoggerHook')])\n", - "dist_params = dict(backend='nccl')\n", - "log_level = 'INFO'\n", - "load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "resume_from = None\n", - "workflow = [('train', 1)]\n", - "dataset_type = 'VideoDataset'\n", - "data_root = 'kinetics400_tiny/train/'\n", - "data_root_val = 'kinetics400_tiny/val/'\n", - "ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "img_norm_cfg = dict(\n", - " mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)\n", - "train_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8),\n", - " dict(type='DecordDecode'),\n", - " dict(\n", - " type='MultiScaleCrop',\n", - " input_size=224,\n", - " scales=(1, 0.875, 0.75, 0.66),\n", - " random_crop=False,\n", - " max_wh_scale_gap=1),\n", - " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", - " dict(type='Flip', flip_ratio=0.5),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs', 'label'])\n", - "]\n", - "val_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=8,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='CenterCrop', crop_size=224),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - "]\n", - "test_pipeline = [\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=25,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='ThreeCrop', crop_size=256),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - "]\n", - "data = dict(\n", - " videos_per_gpu=2,\n", - " workers_per_gpu=2,\n", - " train=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_train_video.txt',\n", - " data_prefix='kinetics400_tiny/train/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames', clip_len=1, frame_interval=1,\n", - " num_clips=8),\n", - " dict(type='DecordDecode'),\n", - " dict(\n", - " type='MultiScaleCrop',\n", - " input_size=224,\n", - " scales=(1, 0.875, 0.75, 0.66),\n", - " random_crop=False,\n", - " max_wh_scale_gap=1),\n", - " dict(type='Resize', scale=(224, 224), keep_ratio=False),\n", - " dict(type='Flip', flip_ratio=0.5),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs', 'label'])\n", - " ]),\n", - " val=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", - " data_prefix='kinetics400_tiny/val/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=8,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='CenterCrop', crop_size=224),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - " ]),\n", - " test=dict(\n", - " type='VideoDataset',\n", - " ann_file='kinetics400_tiny/kinetics_tiny_val_video.txt',\n", - " data_prefix='kinetics400_tiny/val/',\n", - " pipeline=[\n", - " dict(type='DecordInit'),\n", - " dict(\n", - " type='SampleFrames',\n", - " clip_len=1,\n", - " frame_interval=1,\n", - " num_clips=25,\n", - " test_mode=True),\n", - " dict(type='DecordDecode'),\n", - " dict(type='Resize', scale=(-1, 256)),\n", - " dict(type='ThreeCrop', crop_size=256),\n", - " dict(\n", - " type='Normalize',\n", - " mean=[123.675, 116.28, 103.53],\n", - " std=[58.395, 57.12, 57.375],\n", - " to_bgr=False),\n", - " dict(type='FormatShape', input_format='NCHW'),\n", - " dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),\n", - " dict(type='ToTensor', keys=['imgs'])\n", - " ]))\n", - "evaluation = dict(\n", - " interval=5,\n", - " metrics=['top_k_accuracy', 'mean_class_accuracy'],\n", - " save_best='auto')\n", - "work_dir = './tutorial_exps'\n", - "omnisource = False\n", - "seed = 0\n", - "gpu_ids = range(0, 1)\n", - "\n" - ] - } - ], - "source": [ - "from mmcv.runner import set_random_seed\n", - "\n", - "# Modify dataset type and path\n", - "cfg.dataset_type = 'VideoDataset'\n", - "cfg.data_root = 'kinetics400_tiny/train/'\n", - "cfg.data_root_val = 'kinetics400_tiny/val/'\n", - "cfg.ann_file_train = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "cfg.ann_file_val = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.ann_file_test = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "\n", - "cfg.data.test.type = 'VideoDataset'\n", - "cfg.data.test.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.data.test.data_prefix = 'kinetics400_tiny/val/'\n", - "\n", - "cfg.data.train.type = 'VideoDataset'\n", - "cfg.data.train.ann_file = 'kinetics400_tiny/kinetics_tiny_train_video.txt'\n", - "cfg.data.train.data_prefix = 'kinetics400_tiny/train/'\n", - "\n", - "cfg.data.val.type = 'VideoDataset'\n", - "cfg.data.val.ann_file = 'kinetics400_tiny/kinetics_tiny_val_video.txt'\n", - "cfg.data.val.data_prefix = 'kinetics400_tiny/val/'\n", - "\n", - "# The flag is used to determine whether it is omnisource training\n", - "cfg.setdefault('omnisource', False)\n", - "# Modify num classes of the model in cls_head\n", - "cfg.model.cls_head.num_classes = 2\n", - "# We can use the pre-trained TSN model\n", - "cfg.load_from = './checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth'\n", - "\n", - "# Set up working dir to save files and logs.\n", - "cfg.work_dir = './tutorial_exps'\n", - "\n", - "# The original learning rate (LR) is set for 8-GPU training.\n", - "# We divide it by 8 since we only use one GPU.\n", - "cfg.data.videos_per_gpu = cfg.data.videos_per_gpu // 16\n", - "cfg.optimizer.lr = cfg.optimizer.lr / 8 / 16\n", - "cfg.total_epochs = 10\n", - "\n", - "# We can set the checkpoint saving interval to reduce the storage cost\n", - "cfg.checkpoint_config.interval = 5\n", - "# We can set the log print interval to reduce the the times of printing log\n", - "cfg.log_config.interval = 5\n", - "\n", - "# Set seed thus the results are more reproducible\n", - "cfg.seed = 0\n", - "set_random_seed(0, deterministic=False)\n", - "cfg.gpu_ids = range(1)\n", - "\n", - "# Save the best\n", - "cfg.evaluation.save_best='auto'\n", - "\n", - "\n", - "# We can initialize the logger for training and have a look\n", - "# at the final config used for training\n", - "print(f'Config:\\n{cfg.pretty_text}')\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "tES-qnZ3k38Z" - }, - "source": [ - "### Train a new recognizer\n", - "\n", - "Finally, lets initialize the dataset and recognizer, then train a new recognizer!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "dDBWkdDRk6oz", - "outputId": "a85d80d7-b3c4-43f1-d49a-057e8036807f" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Use load_from_torchvision loader\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2021-07-11 13:00:46,931 - mmaction - INFO - These parameters in pretrained checkpoint are not loaded: {'fc.bias', 'fc.weight'}\n", - "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", - " cpuset_checked))\n", - "2021-07-11 13:00:46,980 - mmaction - INFO - load checkpoint from ./checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth\n", - "2021-07-11 13:00:46,981 - mmaction - INFO - Use load_from_local loader\n", - "2021-07-11 13:00:47,071 - mmaction - WARNING - The model and loaded state dict do not match exactly\n", - "\n", - "size mismatch for cls_head.fc_cls.weight: copying a param with shape torch.Size([400, 2048]) from checkpoint, the shape in current model is torch.Size([2, 2048]).\n", - "size mismatch for cls_head.fc_cls.bias: copying a param with shape torch.Size([400]) from checkpoint, the shape in current model is torch.Size([2]).\n", - "2021-07-11 13:00:47,074 - mmaction - INFO - Start running, host: root@b465112b4add, work_dir: /content/mmaction2/tutorial_exps\n", - "2021-07-11 13:00:47,078 - mmaction - INFO - Hooks will be executed in the following order:\n", - "before_run:\n", - "(VERY_HIGH ) StepLrUpdaterHook \n", - "(NORMAL ) CheckpointHook \n", - "(NORMAL ) EvalHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_train_epoch:\n", - "(VERY_HIGH ) StepLrUpdaterHook \n", - "(NORMAL ) EvalHook \n", - "(LOW ) IterTimerHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_train_iter:\n", - "(VERY_HIGH ) StepLrUpdaterHook \n", - "(NORMAL ) EvalHook \n", - "(LOW ) IterTimerHook \n", - " -------------------- \n", - "after_train_iter:\n", - "(ABOVE_NORMAL) OptimizerHook \n", - "(NORMAL ) CheckpointHook \n", - "(NORMAL ) EvalHook \n", - "(LOW ) IterTimerHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "after_train_epoch:\n", - "(NORMAL ) CheckpointHook \n", - "(NORMAL ) EvalHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_val_epoch:\n", - "(LOW ) IterTimerHook \n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "before_val_iter:\n", - "(LOW ) IterTimerHook \n", - " -------------------- \n", - "after_val_iter:\n", - "(LOW ) IterTimerHook \n", - " -------------------- \n", - "after_val_epoch:\n", - "(VERY_LOW ) TextLoggerHook \n", - " -------------------- \n", - "2021-07-11 13:00:47,081 - mmaction - INFO - workflow: [('train', 1)], max: 10 epochs\n", - "/usr/local/lib/python3.7/dist-packages/mmcv/runner/hooks/evaluation.py:190: UserWarning: runner.meta is None. Creating an empty one.\n", - " warnings.warn('runner.meta is None. Creating an empty one.')\n", - "2021-07-11 13:00:51,802 - mmaction - INFO - Epoch [1][5/15]\tlr: 7.813e-05, eta: 0:02:16, time: 0.942, data_time: 0.730, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7604, loss: 0.7604, grad_norm: 14.8813\n", - "2021-07-11 13:00:52,884 - mmaction - INFO - Epoch [1][10/15]\tlr: 7.813e-05, eta: 0:01:21, time: 0.217, data_time: 0.028, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6282, loss: 0.6282, grad_norm: 10.1834\n", - "2021-07-11 13:00:53,706 - mmaction - INFO - Epoch [1][15/15]\tlr: 7.813e-05, eta: 0:00:59, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7165, loss: 0.7165, grad_norm: 10.8534\n", - "2021-07-11 13:00:57,724 - mmaction - INFO - Epoch [2][5/15]\tlr: 7.813e-05, eta: 0:01:09, time: 0.802, data_time: 0.596, memory: 2918, top1_acc: 0.3000, top5_acc: 1.0000, loss_cls: 0.7001, loss: 0.7001, grad_norm: 11.4311\n", - "2021-07-11 13:00:59,219 - mmaction - INFO - Epoch [2][10/15]\tlr: 7.813e-05, eta: 0:01:00, time: 0.296, data_time: 0.108, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6916, loss: 0.6916, grad_norm: 12.7101\n", - "2021-07-11 13:01:00,040 - mmaction - INFO - Epoch [2][15/15]\tlr: 7.813e-05, eta: 0:00:51, time: 0.167, data_time: 0.004, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6567, loss: 0.6567, grad_norm: 8.8837\n", - "2021-07-11 13:01:04,152 - mmaction - INFO - Epoch [3][5/15]\tlr: 7.813e-05, eta: 0:00:56, time: 0.820, data_time: 0.618, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6320, loss: 0.6320, grad_norm: 11.4025\n", - "2021-07-11 13:01:05,526 - mmaction - INFO - Epoch [3][10/15]\tlr: 7.813e-05, eta: 0:00:50, time: 0.276, data_time: 0.075, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6542, loss: 0.6542, grad_norm: 10.6429\n", - "2021-07-11 13:01:06,350 - mmaction - INFO - Epoch [3][15/15]\tlr: 7.813e-05, eta: 0:00:44, time: 0.165, data_time: 0.001, memory: 2918, top1_acc: 0.2000, top5_acc: 1.0000, loss_cls: 0.7661, loss: 0.7661, grad_norm: 12.8421\n", - "2021-07-11 13:01:10,771 - mmaction - INFO - Epoch [4][5/15]\tlr: 7.813e-05, eta: 0:00:47, time: 0.883, data_time: 0.676, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6410, loss: 0.6410, grad_norm: 10.6697\n", - "2021-07-11 13:01:11,776 - mmaction - INFO - Epoch [4][10/15]\tlr: 7.813e-05, eta: 0:00:42, time: 0.201, data_time: 0.011, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6949, loss: 0.6949, grad_norm: 10.5467\n", - "2021-07-11 13:01:12,729 - mmaction - INFO - Epoch [4][15/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.190, data_time: 0.026, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6290, loss: 0.6290, grad_norm: 11.2779\n", - "2021-07-11 13:01:16,816 - mmaction - INFO - Epoch [5][5/15]\tlr: 7.813e-05, eta: 0:00:38, time: 0.817, data_time: 0.608, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6011, loss: 0.6011, grad_norm: 9.1335\n", - "2021-07-11 13:01:18,176 - mmaction - INFO - Epoch [5][10/15]\tlr: 7.813e-05, eta: 0:00:35, time: 0.272, data_time: 0.080, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6652, loss: 0.6652, grad_norm: 11.0616\n", - "2021-07-11 13:01:19,119 - mmaction - INFO - Epoch [5][15/15]\tlr: 7.813e-05, eta: 0:00:32, time: 0.188, data_time: 0.017, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6440, loss: 0.6440, grad_norm: 11.6473\n", - "2021-07-11 13:01:19,120 - mmaction - INFO - Saving checkpoint at 5 epochs\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 4.9 task/s, elapsed: 2s, ETA: 0s" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2021-07-11 13:01:21,673 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2021-07-11 13:01:21,677 - mmaction - INFO - \n", - "top1_acc\t0.7000\n", - "top5_acc\t1.0000\n", - "2021-07-11 13:01:21,679 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2021-07-11 13:01:21,682 - mmaction - INFO - \n", - "mean_acc\t0.7000\n", - "2021-07-11 13:01:22,264 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_5.pth.\n", - "2021-07-11 13:01:22,267 - mmaction - INFO - Best top1_acc is 0.7000 at 5 epoch.\n", - "2021-07-11 13:01:22,271 - mmaction - INFO - Epoch(val) [5][5]\ttop1_acc: 0.7000, top5_acc: 1.0000, mean_class_accuracy: 0.7000\n", - "2021-07-11 13:01:26,623 - mmaction - INFO - Epoch [6][5/15]\tlr: 7.813e-05, eta: 0:00:31, time: 0.868, data_time: 0.656, memory: 2918, top1_acc: 0.7000, top5_acc: 1.0000, loss_cls: 0.6753, loss: 0.6753, grad_norm: 11.8640\n", - "2021-07-11 13:01:27,597 - mmaction - INFO - Epoch [6][10/15]\tlr: 7.813e-05, eta: 0:00:28, time: 0.195, data_time: 0.003, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6715, loss: 0.6715, grad_norm: 11.3347\n", - "2021-07-11 13:01:28,736 - mmaction - INFO - Epoch [6][15/15]\tlr: 7.813e-05, eta: 0:00:25, time: 0.228, data_time: 0.063, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5769, loss: 0.5769, grad_norm: 9.2541\n", - "2021-07-11 13:01:32,860 - mmaction - INFO - Epoch [7][5/15]\tlr: 7.813e-05, eta: 0:00:24, time: 0.822, data_time: 0.620, memory: 2918, top1_acc: 0.9000, top5_acc: 1.0000, loss_cls: 0.5379, loss: 0.5379, grad_norm: 8.0147\n", - "2021-07-11 13:01:34,340 - mmaction - INFO - Epoch [7][10/15]\tlr: 7.813e-05, eta: 0:00:22, time: 0.298, data_time: 0.109, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6187, loss: 0.6187, grad_norm: 11.5244\n", - "2021-07-11 13:01:35,165 - mmaction - INFO - Epoch [7][15/15]\tlr: 7.813e-05, eta: 0:00:19, time: 0.165, data_time: 0.002, memory: 2918, top1_acc: 0.4000, top5_acc: 1.0000, loss_cls: 0.7063, loss: 0.7063, grad_norm: 12.4979\n", - "2021-07-11 13:01:39,435 - mmaction - INFO - Epoch [8][5/15]\tlr: 7.813e-05, eta: 0:00:17, time: 0.853, data_time: 0.641, memory: 2918, top1_acc: 1.0000, top5_acc: 1.0000, loss_cls: 0.5369, loss: 0.5369, grad_norm: 8.6545\n", - "2021-07-11 13:01:40,808 - mmaction - INFO - Epoch [8][10/15]\tlr: 7.813e-05, eta: 0:00:15, time: 0.275, data_time: 0.086, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6407, loss: 0.6407, grad_norm: 12.5537\n", - "2021-07-11 13:01:41,627 - mmaction - INFO - Epoch [8][15/15]\tlr: 7.813e-05, eta: 0:00:12, time: 0.164, data_time: 0.001, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6073, loss: 0.6073, grad_norm: 11.4028\n", - "2021-07-11 13:01:45,651 - mmaction - INFO - Epoch [9][5/15]\tlr: 7.813e-05, eta: 0:00:11, time: 0.803, data_time: 0.591, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5596, loss: 0.5596, grad_norm: 10.0821\n", - "2021-07-11 13:01:46,891 - mmaction - INFO - Epoch [9][10/15]\tlr: 7.813e-05, eta: 0:00:08, time: 0.248, data_time: 0.044, memory: 2918, top1_acc: 0.6000, top5_acc: 1.0000, loss_cls: 0.6470, loss: 0.6470, grad_norm: 11.8979\n", - "2021-07-11 13:01:47,944 - mmaction - INFO - Epoch [9][15/15]\tlr: 7.813e-05, eta: 0:00:06, time: 0.211, data_time: 0.041, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6657, loss: 0.6657, grad_norm: 12.0643\n", - "2021-07-11 13:01:52,200 - mmaction - INFO - Epoch [10][5/15]\tlr: 7.813e-05, eta: 0:00:04, time: 0.849, data_time: 0.648, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.6310, loss: 0.6310, grad_norm: 11.5690\n", - "2021-07-11 13:01:53,707 - mmaction - INFO - Epoch [10][10/15]\tlr: 7.813e-05, eta: 0:00:02, time: 0.303, data_time: 0.119, memory: 2918, top1_acc: 0.8000, top5_acc: 1.0000, loss_cls: 0.5178, loss: 0.5178, grad_norm: 9.3324\n", - "2021-07-11 13:01:54,520 - mmaction - INFO - Epoch [10][15/15]\tlr: 7.813e-05, eta: 0:00:00, time: 0.162, data_time: 0.001, memory: 2918, top1_acc: 0.5000, top5_acc: 1.0000, loss_cls: 0.6919, loss: 0.6919, grad_norm: 12.6688\n", - "2021-07-11 13:01:54,522 - mmaction - INFO - Saving checkpoint at 10 epochs\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 5.9 task/s, elapsed: 2s, ETA: 0s" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2021-07-11 13:01:56,741 - mmaction - INFO - Evaluating top_k_accuracy ...\n", - "2021-07-11 13:01:56,743 - mmaction - INFO - \n", - "top1_acc\t1.0000\n", - "top5_acc\t1.0000\n", - "2021-07-11 13:01:56,749 - mmaction - INFO - Evaluating mean_class_accuracy ...\n", - "2021-07-11 13:01:56,750 - mmaction - INFO - \n", - "mean_acc\t1.0000\n", - "2021-07-11 13:01:57,267 - mmaction - INFO - Now best checkpoint is saved as best_top1_acc_epoch_10.pth.\n", - "2021-07-11 13:01:57,269 - mmaction - INFO - Best top1_acc is 1.0000 at 10 epoch.\n", - "2021-07-11 13:01:57,270 - mmaction - INFO - Epoch(val) [10][5]\ttop1_acc: 1.0000, top5_acc: 1.0000, mean_class_accuracy: 1.0000\n" - ] - } - ], - "source": [ - "import os.path as osp\n", - "\n", - "from mmaction.datasets import build_dataset\n", - "from mmaction.models import build_model\n", - "from mmaction.apis import train_model\n", - "\n", - "import mmcv\n", - "\n", - "# Build the dataset\n", - "datasets = [build_dataset(cfg.data.train)]\n", - "\n", - "# Build the recognizer\n", - "model = build_model(cfg.model, train_cfg=cfg.get('train_cfg'), test_cfg=cfg.get('test_cfg'))\n", - "\n", - "# Create work_dir\n", - "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", - "train_model(model, datasets, cfg, distributed=False, validate=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "zdSd7oTLlxIf" - }, - "source": [ - "### Understand the log\n", - "From the log, we can have a basic understanding the training process and know how well the recognizer is trained.\n", - "\n", - "Firstly, the ResNet-50 backbone pre-trained on ImageNet is loaded, this is a common practice since training from scratch is more cost. The log shows that all the weights of the ResNet-50 backbone are loaded except the `fc.bias` and `fc.weight`.\n", - "\n", - "Second, since the dataset we are using is small, we loaded a TSN model and finetune it for action recognition.\n", - "The original TSN is trained on original Kinetics-400 dataset which contains 400 classes but Kinetics-400 Tiny dataset only have 2 classes. Therefore, the last FC layer of the pre-trained TSN for classification has different weight shape and is not used.\n", - "\n", - "Third, after training, the recognizer is evaluated by the default evaluation. The results show that the recognizer achieves 100% top1 accuracy and 100% top5 accuracy on the val dataset,\n", - " \n", - "Not bad!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ryVoSfZVmogw" - }, - "source": [ - "## Test the trained recognizer\n", - "\n", - "After finetuning the recognizer, let's check the prediction results!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "eyY3hCMwyTct", - "outputId": "ea54ff0a-4299-4e93-c1ca-4fe597e7516b" - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[ ] 0/10, elapsed: 0s, ETA:" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.7/dist-packages/torch/utils/data/dataloader.py:477: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n", - " cpuset_checked))\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 10/10, 2.2 task/s, elapsed: 5s, ETA: 0s\n", - "Evaluating top_k_accuracy ...\n", - "\n", - "top1_acc\t1.0000\n", - "top5_acc\t1.0000\n", - "\n", - "Evaluating mean_class_accuracy ...\n", - "\n", - "mean_acc\t1.0000\n", - "top1_acc: 1.0000\n", - "top5_acc: 1.0000\n", - "mean_class_accuracy: 1.0000\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/content/mmaction2/mmaction/datasets/base.py:166: UserWarning: Option arguments for metrics has been changed to `metric_options`, See 'https://github.com/open-mmlab/mmaction2/pull/286' for more details\n", - " 'Option arguments for metrics has been changed to '\n" - ] - } - ], - "source": [ - "from mmaction.apis import single_gpu_test\n", - "from mmaction.datasets import build_dataloader\n", - "from mmcv.parallel import MMDataParallel\n", - "\n", - "# Build a test dataloader\n", - "dataset = build_dataset(cfg.data.test, dict(test_mode=True))\n", - "data_loader = build_dataloader(\n", - " dataset,\n", - " videos_per_gpu=1,\n", - " workers_per_gpu=cfg.data.workers_per_gpu,\n", - " dist=False,\n", - " shuffle=False)\n", - "model = MMDataParallel(model, device_ids=[0])\n", - "outputs = single_gpu_test(model, data_loader)\n", - "\n", - "eval_config = cfg.evaluation\n", - "eval_config.pop('interval')\n", - "eval_res = dataset.evaluate(outputs, **eval_config)\n", - "for name, val in eval_res.items():\n", - " print(f'{name}: {val:.04f}')" - ] - }, - { - "cell_type": "markdown", - "source": [ - "## Perform Spatio-Temporal Detection\n", - "Here we first install MMDetection." - ], - "metadata": { - "id": "jZ4t44nWmZDM" - } - }, - { - "cell_type": "code", - "source": [ - "# Git clone mmdetection repo\n", - "%cd ..\n", - "!git clone https://github.com/open-mmlab/mmdetection.git\n", - "%cd mmdetection\n", - "\n", - "# install mmdet\n", - "!pip install -e .\n", - "%cd ../mmaction2" - ], - "metadata": { - "id": "w1p0_g76nHOQ", - "outputId": "b30a6be3-c457-452e-c789-7083117c5011", - "colab": { - "base_uri": "https://localhost:8080/" - } - }, - "execution_count": 3, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "/content\n", - "Cloning into 'mmdetection'...\n", - "remote: Enumerating objects: 23137, done.\u001b[K\n", - "remote: Total 23137 (delta 0), reused 0 (delta 0), pack-reused 23137\u001b[K\n", - "Receiving objects: 100% (23137/23137), 25.88 MiB | 25.75 MiB/s, done.\n", - "Resolving deltas: 100% (16198/16198), done.\n", - "/content/mmdetection\n", - "Obtaining file:///content/mmdetection\n", - "Requirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (3.2.2)\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (1.21.5)\n", - "Requirement already satisfied: pycocotools in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (2.0.4)\n", - "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from mmdet==2.21.0) (1.15.0)\n", - "Collecting terminaltables\n", - " Downloading terminaltables-3.1.10-py2.py3-none-any.whl (15 kB)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (0.11.0)\n", - "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (2.8.2)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (1.3.2)\n", - "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->mmdet==2.21.0) (3.0.7)\n", - "Installing collected packages: terminaltables, mmdet\n", - " Running setup.py develop for mmdet\n", - "Successfully installed mmdet-2.21.0 terminaltables-3.1.10\n", - "/content/mmaction2\n" - ] - } - ] - }, - { - "cell_type": "markdown", - "source": [ - "Download a video to `demo` directory in MMAction2." - ], - "metadata": { - "id": "vlOQsH8OnVKn" - } - }, - { - "cell_type": "code", - "source": [ - "!wget https://download.openmmlab.com/mmaction/dataset/sample/1j20qq1JyX4.mp4 -O demo/1j20qq1JyX4.mp4" - ], - "metadata": { - "id": "QaW3jg5Enish", - "outputId": "c70cde3a-b337-41d0-cb08-82dfc746d9ef", - "colab": { - "base_uri": "https://localhost:8080/" - } - }, - "execution_count": 4, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "--2022-02-19 11:02:59-- https://download.openmmlab.com/mmaction/dataset/sample/1j20qq1JyX4.mp4\n", - "Resolving download.openmmlab.com (download.openmmlab.com)... 47.254.186.233\n", - "Connecting to download.openmmlab.com (download.openmmlab.com)|47.254.186.233|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 4864186 (4.6M) [video/mp4]\n", - "Saving to: ‘demo/1j20qq1JyX4.mp4’\n", - "\n", - "demo/1j20qq1JyX4.mp 100%[===================>] 4.64M 3.78MB/s in 1.2s \n", - "\n", - "2022-02-19 11:03:01 (3.78 MB/s) - ‘demo/1j20qq1JyX4.mp4’ saved [4864186/4864186]\n", - "\n" - ] - } - ] - }, - { - "cell_type": "markdown", - "source": [ - "Run spatio-temporal demo." - ], - "metadata": { - "id": "LYGxdu8Vnoah" - } - }, - { - "cell_type": "code", - "source": [ - "!python demo/demo_spatiotemporal_det.py --video demo/1j20qq1JyX4.mp4" - ], - "metadata": { - "id": "LPLiaHaYnrb7", - "outputId": "8a8f8a16-ad7b-4559-c19c-c8264533bff3", - "colab": { - "base_uri": "https://localhost:8080/" - } - }, - "execution_count": 5, - "outputs": [ - { - "output_type": "stream", - "name": "stdout", - "text": [ - "Imageio: 'ffmpeg-linux64-v3.3.1' was not found on your computer; downloading it now.\n", - "Try 1. Download from https://github.com/imageio/imageio-binaries/raw/master/ffmpeg/ffmpeg-linux64-v3.3.1 (43.8 MB)\n", - "Downloading: 8192/45929032 bytes (0.0%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b3883008/45929032 bytes (8.5%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b7995392/45929032 bytes (17.4%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b11796480/45929032 bytes (25.7%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b16072704/45929032 bytes (35.0%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b20152320/45929032 bytes (43.9%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b24305664/45929032 bytes (52.9%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b28319744/45929032 bytes (61.7%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b32440320/45929032 bytes (70.6%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b36634624/45929032 bytes (79.8%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b40886272/45929032 bytes (89.0%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b45146112/45929032 bytes (98.3%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b45929032/45929032 bytes (100.0%)\n", - " Done\n", - "File saved as /root/.imageio/ffmpeg/ffmpeg-linux64-v3.3.1.\n", - "load checkpoint from http path: http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\n", - "Downloading: \"http://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\" to /root/.cache/torch/hub/checkpoints/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth\n", - "100% 160M/160M [00:21<00:00, 7.77MB/s]\n", - "Performing Human Detection for each frame\n", - "[>>] 217/217, 8.6 task/s, elapsed: 25s, ETA: 0sload checkpoint from http path: https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\n", - "Downloading: \"https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\" to /root/.cache/torch/hub/checkpoints/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth\n", - "100% 228M/228M [00:31<00:00, 7.55MB/s]\n", - "Performing SpatioTemporal Action Detection for each clip\n", - "[> ] 167/217, 7.7 task/s, elapsed: 22s, ETA: 7sPerforming visualization\n", - "[MoviePy] >>>> Building video demo/stdet_demo.mp4\n", - "[MoviePy] Writing video demo/stdet_demo.mp4\n", - "100% 434/434 [00:12<00:00, 36.07it/s]\n", - "[MoviePy] Done.\n", - "[MoviePy] >>>> Video ready: demo/stdet_demo.mp4 \n", - "\n" - ] - } - ] - }, - { - "cell_type": "code", - "source": [ - "# Check the video\n", - "from IPython.display import HTML\n", - "from base64 import b64encode\n", - "mp4 = open('demo/stdet_demo.mp4','rb').read()\n", - "data_url = \"data:video/mp4;base64,\" + b64encode(mp4).decode()\n", - "HTML(\"\"\"\n", - "\n", - "\"\"\" % data_url)" - ], - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 341 - }, - "id": "-0atQCzBo9-C", - "outputId": "b6bb3a67-669c-45d0-cdf4-25b6210362d0" - }, - "execution_count": 6, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/html": [ - "\n", - "\n" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "execution_count": 6 - } - ] - } - ], - "metadata": { - "accelerator": "GPU", - "colab": { - "collapsed_sections": [], - "name": "MMAction2 Tutorial.ipynb", - "provenance": [], - "toc_visible": true, - "include_colab_link": true - }, - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} \ No newline at end of file From 2e503b4e6a2b086df04dae44a30bb0c95ab7a70a Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sat, 19 Feb 2022 19:24:30 +0800 Subject: [PATCH 358/414] Fix lint caused by colab automatic upload (#1461) --- demo/mmaction2_tutorial.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demo/mmaction2_tutorial.ipynb b/demo/mmaction2_tutorial.ipynb index ff7964f6d1..aadc0a6fa8 100644 --- a/demo/mmaction2_tutorial.ipynb +++ b/demo/mmaction2_tutorial.ipynb @@ -1458,4 +1458,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} \ No newline at end of file +} From 4cc6102f5bf7e17be017ee89dcb343982ad6f32a Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Sun, 20 Feb 2022 14:33:00 +0800 Subject: [PATCH 359/414] [Fix] Fix bug caused by distributed (#1459) --- mmaction/apis/train.py | 10 +++++++--- tools/train.py | 2 +- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index e4400d67ea..80d35218ac 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -19,7 +19,7 @@ from .test import multi_gpu_test -def init_random_seed(seed=None, device='cuda'): +def init_random_seed(seed=None, device='cuda', distributed=True): """Initialize random seed. If the seed is not set, the seed will be automatically randomized, @@ -28,6 +28,8 @@ def init_random_seed(seed=None, device='cuda'): seed (int, Optional): The seed. Default to None. device (str): The device where the seed will be put on. Default to 'cuda'. + distributed (bool): Whether to use distributed training. + Default: True. Returns: int: Seed to be used. """ @@ -48,7 +50,8 @@ def init_random_seed(seed=None, device='cuda'): else: random_num = torch.tensor(0, dtype=torch.int32, device=device) - dist.broadcast(random_num, src=0) + if distributed: + dist.broadcast(random_num, src=0) return random_num.item() @@ -203,7 +206,8 @@ def train_model(model, runner_kwargs = dict(train_ratio=train_ratio) runner.run(data_loaders, cfg.workflow, cfg.total_epochs, **runner_kwargs) - dist.barrier() + if distributed: + dist.barrier() time.sleep(5) if test['test_last'] or test['test_best']: diff --git a/tools/train.py b/tools/train.py index 8c87ae650e..84258f1c59 100644 --- a/tools/train.py +++ b/tools/train.py @@ -158,7 +158,7 @@ def main(): logger.info(f'Config: {cfg.pretty_text}') # set random seeds - seed = init_random_seed(args.seed) + seed = init_random_seed(args.seed, distributed=distributed) logger.info(f'Set random seed to {seed}, ' f'deterministic: {args.deterministic}') set_random_seed(seed, deterministic=args.deterministic) From 9bb896f3745421378d2cba3662d4ec036175a24b Mon Sep 17 00:00:00 2001 From: Jamie Date: Sun, 20 Feb 2022 14:33:31 +0800 Subject: [PATCH 360/414] [Feature] Support topk customizing in models/heads/base.py (#1452) * [Feature] Support topk customizing in models/heads/base.py * Fix lint * Fix lint * fix lint Co-authored-by: Haodong Duan --- mmaction/models/heads/base.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/mmaction/models/heads/base.py b/mmaction/models/heads/base.py index 2f6555c191..d89e3af312 100644 --- a/mmaction/models/heads/base.py +++ b/mmaction/models/heads/base.py @@ -42,6 +42,7 @@ class BaseHead(nn.Module, metaclass=ABCMeta): recognition task. Default: False. label_smooth_eps (float): Epsilon used in label smooth. Reference: arxiv.org/abs/1906.02629. Default: 0. + topk (int | tuple): Top-k accuracy. Default: (1, 5). """ def __init__(self, @@ -49,13 +50,20 @@ def __init__(self, in_channels, loss_cls=dict(type='CrossEntropyLoss', loss_weight=1.0), multi_class=False, - label_smooth_eps=0.0): + label_smooth_eps=0.0, + topk=(1, 5)): super().__init__() self.num_classes = num_classes self.in_channels = in_channels self.loss_cls = build_loss(loss_cls) self.multi_class = multi_class self.label_smooth_eps = label_smooth_eps + assert isinstance(topk, (int, tuple)) + if isinstance(topk, int): + topk = (topk, ) + for _topk in topk: + assert _topk > 0, 'Top-k should be larger than 0' + self.topk = topk @abstractmethod def init_weights(self): @@ -75,7 +83,7 @@ def loss(self, cls_score, labels, **kwargs): Returns: dict: A dict containing field 'loss_cls'(mandatory) - and 'top1_acc', 'top5_acc'(optional). + and 'topk_acc'(optional). """ losses = dict() if labels.shape == torch.Size([]): @@ -89,11 +97,11 @@ def loss(self, cls_score, labels, **kwargs): if not self.multi_class and cls_score.size() != labels.size(): top_k_acc = top_k_accuracy(cls_score.detach().cpu().numpy(), - labels.detach().cpu().numpy(), (1, 5)) - losses['top1_acc'] = torch.tensor( - top_k_acc[0], device=cls_score.device) - losses['top5_acc'] = torch.tensor( - top_k_acc[1], device=cls_score.device) + labels.detach().cpu().numpy(), + self.topk) + for k, a in zip(self.topk, top_k_acc): + losses[f'top{k}_acc'] = torch.tensor( + a, device=cls_score.device) elif self.multi_class and self.label_smooth_eps != 0: labels = ((1 - self.label_smooth_eps) * labels + From e9f141520bbafaf3cde3f3d35279ee4430bc4d86 Mon Sep 17 00:00:00 2001 From: "Michael P. Camilleri" Date: Fri, 25 Feb 2022 04:19:06 +0000 Subject: [PATCH 361/414] [Improvement] Singlelabel support (#1434) * Support for Single-Label Classification in bbox_head * Formatting * Formatting * Updated code comments * BUGS BUG Fixes: * Passing array of thresholds to bbox2result did not trigger multilabel evaluation. * BUGS, Tests BUG Fixes: * Single-label training with multiple (tentative) labels was not normalised. Tests: * Full unit testing for single-label classification * Doc Documentation: * Clarified method for enforcing single-label classification * Added note that this is supported in LFB Documentation * Features Features * Support for multi-target cross-entropy for torch < 1.10 * fix pre-commit Co-authored-by: Haodong Duan --- configs/detection/lfb/README.md | 3 + mmaction/core/bbox/transforms.py | 34 +++++-- mmaction/models/heads/bbox_head.py | 144 ++++++++++++++++++++--------- requirements/optional.txt | 1 + tests/test_models/test_head.py | 63 ++++++++++++- tests/test_utils/test_bbox.py | 17 ++++ 6 files changed, 207 insertions(+), 55 deletions(-) diff --git a/configs/detection/lfb/README.md b/configs/detection/lfb/README.md index 8e125d7593..dbdab545d5 100644 --- a/configs/detection/lfb/README.md +++ b/configs/detection/lfb/README.md @@ -34,6 +34,9 @@ To understand the world, we humans constantly need to relate the present to the 3. Because the long-term features are randomly sampled in testing, the test accuracy may have some differences. 4. Before train or test lfb, you need to infer feature bank with the [lfb_slowonly_r50_ava_infer.py](/configs/detection/lfb/lfb_slowonly_r50_ava_infer.py). For more details on infer feature bank, you can refer to [Train](#Train) part. 5. You can also dowonload long-term feature bank from [AVA_train_val_float32_lfb](https://download.openmmlab.com/mmaction/detection/lfb/AVA_train_val_float32_lfb.rar) or [AVA_train_val_float16_lfb](https://download.openmmlab.com/mmaction/detection/lfb/AVA_train_val_float16_lfb.rar), and then put them on `lfb_prefix_path`. +6. The ROIHead now supports single-label classification (i.e. the network outputs at most + one-label per actor). This can be done by (a) setting multilabel=False during training and + the test_cfg.rcnn.action_thr for testing. ::: diff --git a/mmaction/core/bbox/transforms.py b/mmaction/core/bbox/transforms.py index 6d9bb4eb30..4defb1817d 100644 --- a/mmaction/core/bbox/transforms.py +++ b/mmaction/core/bbox/transforms.py @@ -5,12 +5,23 @@ def bbox2result(bboxes, labels, num_classes, thr=0.01): """Convert detection results to a list of numpy arrays. + This identifies single-label classification (as opposed to multi-label) + through the thr parameter which is set to a negative value. + + Currently, the way to set this is to set + `test_cfg.rcnn.action_thr=-1.0` + ToDo: The ideal way would be for this to be automatically set when the + model cfg uses multilabel=False, however this could be a breaking change + and is left as a future exercise. + NB - this should not interfere with the evaluation in any case. + Args: bboxes (Tensor): shape (n, 4) labels (Tensor): shape (n, #num_classes) num_classes (int): class number, including background class thr (float): The score threshold used when converting predictions to - detection results + detection results. If a single negative value, uses single-label + classification Returns: list(ndarray): bbox results of each class """ @@ -18,19 +29,28 @@ def bbox2result(bboxes, labels, num_classes, thr=0.01): return list(np.zeros((num_classes - 1, 0, 5), dtype=np.float32)) bboxes = bboxes.cpu().numpy() - labels = labels.cpu().numpy() + scores = labels.cpu().numpy() # rename for clarification + + # Although we can handle single-label classification, we still want scores + assert scores.shape[-1] > 1 - # We only handle multilabel now - assert labels.shape[-1] > 1 + # Robustly check for multi/single-label: + if not hasattr(thr, '__len__'): + multilabel = thr >= 0 + thr = (thr, ) * num_classes + else: + multilabel = True - scores = labels # rename for clarification - thr = (thr, ) * num_classes if isinstance(thr, float) else thr + # Check Shape assert scores.shape[1] == num_classes assert len(thr) == num_classes result = [] for i in range(num_classes - 1): - where = scores[:, i + 1] > thr[i + 1] + if multilabel: + where = (scores[:, i + 1] > thr[i + 1]) + else: + where = (scores[:, 1:].argmax(axis=1) == i) result.append( np.concatenate((bboxes[where, :4], scores[where, i + 1:i + 2]), axis=1)) diff --git a/mmaction/models/heads/bbox_head.py b/mmaction/models/heads/bbox_head.py index 4d4d2d70c7..19787a5eb1 100644 --- a/mmaction/models/heads/bbox_head.py +++ b/mmaction/models/heads/bbox_head.py @@ -11,6 +11,25 @@ except (ImportError, ModuleNotFoundError): mmdet_imported = False +# Resolve cross-entropy function to support multi-target in Torch < 1.10 +# This is a very basic 'hack', with minimal functionality to support the +# procedure under prior torch versions +from packaging import version as pv + +if pv.parse(torch.__version__) < pv.parse('1.10'): + + def cross_entropy_loss(input, target, reduction='None'): + input = input.log_softmax(dim=-1) # Compute Log of Softmax + loss = -(input * target).sum(dim=-1) # Compute Loss manually + if reduction.lower() == 'mean': + return loss.mean() + elif reduction.lower() == 'sum': + return loss.sum() + else: + return loss +else: + cross_entropy_loss = F.cross_entropy + class BBoxHeadAVA(nn.Module): """Simplest RoI head, with only two fc layers for classification and @@ -33,10 +52,9 @@ class BBoxHeadAVA(nn.Module): Default: 0. dropout_before_pool (bool): Dropout Feature before spatial temporal pooling. Default: True. - topk (int or tuple[int]): Parameter for evaluating multilabel accuracy. + topk (int or tuple[int]): Parameter for evaluating Top-K accuracy. Default: (3, 5) multilabel (bool): Whether used for a multilabel task. Default: True. - (Only support multilabel == True now). """ def __init__( @@ -44,10 +62,9 @@ def __init__( temporal_pool_type='avg', spatial_pool_type='max', in_channels=2048, - # The first class is reserved, to classify bbox as pos / neg focal_gamma=0., focal_alpha=1., - num_classes=81, + num_classes=81, # First class reserved (BBox as pos/neg) dropout_ratio=0, dropout_before_pool=True, topk=(3, 5), @@ -80,13 +97,10 @@ def __init__( else: raise TypeError('topk should be int or tuple[int], ' f'but get {type(topk)}') - # Class 0 is ignored when calculaing multilabel accuracy, - # so topk cannot be equal to num_classes + # Class 0 is ignored when calculating accuracy, + # so topk cannot be equal to num_classes. assert all([k < num_classes for k in self.topk]) - # Handle AVA first - assert self.multilabel - in_channels = self.in_channels # Pool by default if self.temporal_pool_type == 'avg': @@ -133,38 +147,62 @@ def get_targets(sampling_results, gt_bboxes, gt_labels, rcnn_train_cfg): return cls_reg_targets @staticmethod - def recall_prec(pred_vec, target_vec): - """ + def get_recall_prec(pred_vec, target_vec): + """Computes the Recall/Precision for both multi-label and single label + scenarios. + + Note that the computation calculates the micro average. + + Note, that in both cases, the concept of correct/incorrect is the same. Args: pred_vec (tensor[N x C]): each element is either 0 or 1 - target_vec (tensor[N x C]): each element is either 0 or 1 - + target_vec (tensor[N x C]): each element is either 0 or 1 - for + single label it is expected that only one element is on (1) + although this is not enforced. """ correct = pred_vec & target_vec - # Seems torch 1.5 has no auto type conversion - recall = correct.sum(1) / target_vec.sum(1).float() + recall = correct.sum(1) / target_vec.sum(1).float() # Enforce Float prec = correct.sum(1) / (pred_vec.sum(1) + 1e-6) return recall.mean(), prec.mean() - def multi_label_accuracy(self, pred, target, thr=0.5): - pred = pred.sigmoid() - pred_vec = pred > thr - # Target is 0 or 1, so using 0.5 as the borderline is OK - target_vec = target > 0.5 - recall_thr, prec_thr = self.recall_prec(pred_vec, target_vec) + @staticmethod + def topk_to_matrix(probs, k): + """Converts top-k to binary matrix.""" + topk_labels = probs.topk(k, 1, True, True)[1] + topk_matrix = probs.new_full(probs.size(), 0, dtype=torch.bool) + for i in range(probs.shape[0]): + topk_matrix[i, topk_labels[i]] = 1 + return topk_matrix + + def topk_accuracy(self, pred, target, thr=0.5): + """Computes the Top-K Accuracies for both single and multi-label + scenarios.""" + # Define Target vector: + target_bool = target > 0.5 + + # Branch on Multilabel for computing output classification + if self.multilabel: + pred = pred.sigmoid() + else: + pred = pred.softmax(dim=1) + + # Compute at threshold (K=1 for single) + if self.multilabel: + pred_bool = pred > thr + else: + pred_bool = self.topk_to_matrix(pred, 1) + recall_thr, prec_thr = self.get_recall_prec(pred_bool, target_bool) - recalls, precs = [], [] + # Compute at various K + recalls_k, precs_k = [], [] for k in self.topk: - _, pred_label = pred.topk(k, 1, True, True) - pred_vec = pred.new_full(pred.size(), 0, dtype=torch.bool) + pred_bool = self.topk_to_matrix(pred, k) + recall, prec = self.get_recall_prec(pred_bool, target_bool) + recalls_k.append(recall) + precs_k.append(prec) - num_sample = pred.shape[0] - for i in range(num_sample): - pred_vec[i, pred_label[i]] = 1 - recall_k, prec_k = self.recall_prec(pred_vec, target_vec) - recalls.append(recall_k) - precs.append(prec_k) - return recall_thr, prec_thr, recalls, precs + # Return all + return recall_thr, prec_thr, recalls_k, precs_k def loss(self, cls_score, @@ -177,27 +215,41 @@ def loss(self, reduce=True): losses = dict() + # Only use the cls_score if cls_score is not None: - # Only use the cls_score - labels = labels[:, 1:] + labels = labels[:, 1:] # Get valid labels (ignore first one) pos_inds = torch.sum(labels, dim=-1) > 0 cls_score = cls_score[pos_inds, 1:] labels = labels[pos_inds] - bce_loss = F.binary_cross_entropy_with_logits - - loss = bce_loss(cls_score, labels, reduction='none') - pt = torch.exp(-loss) - F_loss = self.focal_alpha * (1 - pt)**self.focal_gamma * loss - losses['loss_action_cls'] = torch.mean(F_loss) - - recall_thr, prec_thr, recall_k, prec_k = self.multi_label_accuracy( + # Compute First Recall/Precisions + # This has to be done first before normalising the label-space. + recall_thr, prec_thr, recall_k, prec_k = self.topk_accuracy( cls_score, labels, thr=0.5) losses['recall@thr=0.5'] = recall_thr losses['prec@thr=0.5'] = prec_thr for i, k in enumerate(self.topk): losses[f'recall@top{k}'] = recall_k[i] losses[f'prec@top{k}'] = prec_k[i] + + # If Single-label, need to ensure that target labels sum to 1: ie + # that they are valid probabilities. + if not self.multilabel: + labels = labels / labels.sum(dim=1, keepdim=True) + + # Select Loss function based on single/multi-label + # NB. Both losses auto-compute sigmoid/softmax on prediction + if self.multilabel: + loss_func = F.binary_cross_entropy_with_logits + else: + loss_func = cross_entropy_loss + + # Compute loss + loss = loss_func(cls_score, labels, reduction='none') + pt = torch.exp(-loss) + F_loss = self.focal_alpha * (1 - pt)**self.focal_gamma * loss + losses['loss_action_cls'] = torch.mean(F_loss) + return losses def get_det_bboxes(self, @@ -212,9 +264,15 @@ def get_det_bboxes(self, if isinstance(cls_score, list): cls_score = sum(cls_score) / float(len(cls_score)) - assert self.multilabel + # Handle Multi/Single Label + if cls_score is not None: + if self.multilabel: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(dim=-1) + else: + scores = None - scores = cls_score.sigmoid() if cls_score is not None else None bboxes = rois[:, 1:] assert bboxes.shape[-1] == 4 diff --git a/requirements/optional.txt b/requirements/optional.txt index b34450bb70..631cfe7b86 100644 --- a/requirements/optional.txt +++ b/requirements/optional.txt @@ -5,6 +5,7 @@ lmdb moviepy onnx onnxruntime +packaging pims PyTurboJPEG timm diff --git a/tests/test_models/test_head.py b/tests/test_models/test_head.py index 7c9b42fd36..21ebf9a398 100644 --- a/tests/test_models/test_head.py +++ b/tests/test_models/test_head.py @@ -61,11 +61,6 @@ def test_bbox_head_ava(): ret, _ = bbox_head(input) assert ret.shape == (3, 4) - bbox_head = BBoxHeadAVA() - bbox_head.init_weights() - bbox_head = BBoxHeadAVA(temporal_pool_type='max', spatial_pool_type='avg') - bbox_head.init_weights() - cls_score = torch.tensor( [[0.568, -0.162, 0.273, -0.390, 0.447, 0.102, -0.409], [2.388, 0.609, 0.369, 1.630, -0.808, -0.212, 0.296], @@ -76,6 +71,32 @@ def test_bbox_head_ava(): [0., 1., 0., 0., 1., 0., 1.], [0., 0., 1., 1., 0., 0., 1.]]) label_weights = torch.tensor([1., 1., 1., 1.]) + + # Test topk_to_matrix() + assert torch.equal( + BBoxHeadAVA.topk_to_matrix(cls_score[:, 1:], 1), + torch.tensor([[0, 0, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0], + [0, 0, 0, 0, 1, 0], [0, 0, 0, 1, 0, 0]], + dtype=bool)) + assert torch.equal( + BBoxHeadAVA.topk_to_matrix(cls_score[:, 1:], 2), + torch.tensor([[0, 1, 0, 1, 0, 0], [1, 0, 1, 0, 0, 0], + [0, 0, 0, 1, 1, 0], [0, 0, 0, 1, 0, 1]], + dtype=bool)) + assert torch.equal( + BBoxHeadAVA.topk_to_matrix(cls_score[:, 1:], 3), + torch.tensor([[0, 1, 0, 1, 1, 0], [1, 1, 1, 0, 0, 0], + [0, 0, 0, 1, 1, 1], [1, 0, 0, 1, 0, 1]], + dtype=bool)) + assert torch.equal( + BBoxHeadAVA.topk_to_matrix(cls_score[:, 1:], 6), + torch.ones([4, 6], dtype=bool)) + + # Test Multi-Label Loss + bbox_head = BBoxHeadAVA() # Why is this here? isn't this redundant? + bbox_head.init_weights() + bbox_head = BBoxHeadAVA(temporal_pool_type='max', spatial_pool_type='avg') + bbox_head.init_weights() losses = bbox_head.loss( cls_score=cls_score, bbox_pred=None, @@ -90,6 +111,23 @@ def test_bbox_head_ava(): assert torch.isclose(losses['recall@top5'], torch.tensor(1.0)) assert torch.isclose(losses['prec@top5'], torch.tensor(0.45)) + # Test Single-Label Loss + bbox_head = BBoxHeadAVA(multilabel=False) + losses = bbox_head.loss( + cls_score=cls_score, + bbox_pred=None, + rois=None, + labels=labels, + label_weights=label_weights) + assert torch.isclose(losses['loss_action_cls'], torch.tensor(1.639561)) + assert torch.isclose(losses['recall@thr=0.5'], torch.tensor(0.25)) + assert torch.isclose(losses['prec@thr=0.5'], torch.tensor(0.25)) + assert torch.isclose(losses['recall@top3'], torch.tensor(0.75)) + assert torch.isclose(losses['prec@top3'], torch.tensor(0.5)) + assert torch.isclose(losses['recall@top5'], torch.tensor(1.0)) + assert torch.isclose(losses['prec@top5'], torch.tensor(0.45)) + + # Test ROI rois = torch.tensor([[0.0, 0.1, 0.2, 0.3, 0.4], [0.0, 0.5, 0.6, 0.7, 0.8]]) rois[1::2] *= 380 rois[2::2] *= 220 @@ -98,6 +136,7 @@ def test_bbox_head_ava(): img_shape = (320, 480) flip = True + bbox_head = BBoxHeadAVA(multilabel=True) bboxes, scores = bbox_head.get_det_bboxes( rois=rois, cls_score=cls_score, @@ -112,6 +151,20 @@ def test_bbox_head_ava(): assert torch.all( torch.isclose(scores, torch.tensor([0.73007441, 0.67436624]))) + bbox_head = BBoxHeadAVA(multilabel=False) + bboxes, scores = bbox_head.get_det_bboxes( + rois=rois, + cls_score=cls_score, + img_shape=img_shape, + flip=flip, + crop_quadruple=crop_quadruple) + assert torch.all( + torch.isclose( + bboxes, + torch.tensor([[0.89783341, 0.20043750, 0.89816672, 0.20087500], + [0.45499998, 0.69875002, 0.58166665, 0.86499995]]))) + assert torch.all(torch.isclose(scores, torch.tensor([0.56636, 0.43364]))) + def test_x3d_head(): """Test loss method, layer construction, attributes and forward function in diff --git a/tests/test_utils/test_bbox.py b/tests/test_utils/test_bbox.py index f3aba07840..41379efb3a 100644 --- a/tests/test_utils/test_bbox.py +++ b/tests/test_utils/test_bbox.py @@ -94,6 +94,7 @@ def test_bbox2result(): [0.079, 1.269, -0.263, -0.538], [-0.853, 0.391, 0.103, 0.398]]) num_classes = 4 + # Test for multi-label result = bbox2result(bboxes, labels, num_classes) assert np.all( np.isclose( @@ -116,6 +117,22 @@ def test_bbox2result(): [0.236, 0.189, 0.689, 0.74, 0.438], [0.024, 0.398, 0.776, 0.719, 0.398]]))) + # Test for single-label + result = bbox2result(bboxes, labels, num_classes, -1.0) + assert np.all( + np.isclose(result[0], np.array([[0.375, 0.371, 0.726, 0.804, 1.269]]))) + assert np.all( + np.isclose( + result[1], + np.array([[0.23, 0.215, 0.781, 0.534, 0.037], + [0.195, 0.128, 0.643, 0.944, 0.501]]))) + assert np.all( + np.isclose( + result[2], + np.array([[0.072, 0.47, 0.84, 0.898, 1.240], + [0.236, 0.189, 0.689, 0.74, 0.438], + [0.024, 0.398, 0.776, 0.719, 0.398]]))) + def test_bbox_target(): pos_bboxes = torch.tensor([[0.072, 0.47, 0.84, 0.898], From b7986c1df899ad93bcbd5601e90b8caa745a36b3 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Fri, 25 Feb 2022 13:08:51 +0800 Subject: [PATCH 362/414] [Improvement] Refine CI (#1471) * update * update --- mmaction/models/backbones/c3d.py | 5 ++++- tests/test_models/test_backbones.py | 6 +++--- tests/test_models/test_recognizers/test_recognizer3d.py | 3 ++- tests/test_runtime/test_train.py | 9 --------- 4 files changed, 9 insertions(+), 14 deletions(-) diff --git a/mmaction/models/backbones/c3d.py b/mmaction/models/backbones/c3d.py index 5221314d0c..ad5d4aa672 100644 --- a/mmaction/models/backbones/c3d.py +++ b/mmaction/models/backbones/c3d.py @@ -25,6 +25,8 @@ class C3D(nn.Module): act_cfg (dict | None): Config dict for activation layer. If set to None, it uses ``dict(type='ReLU')`` to construct layers. Default: None. + out_dim (int): The dimension of last layer feature (after flatten). + Depends on the input shape. Default: 8192. dropout_ratio (float): Probability of dropout layer. Default: 0.5. init_std (float): Std value for Initiation of fc layers. Default: 0.01. """ @@ -35,6 +37,7 @@ def __init__(self, conv_cfg=None, norm_cfg=None, act_cfg=None, + out_dim=8192, dropout_ratio=0.5, init_std=0.005): super().__init__() @@ -76,7 +79,7 @@ def __init__(self, self.pool5 = nn.MaxPool3d( kernel_size=(2, 2, 2), stride=(2, 2, 2), padding=(0, 1, 1)) - self.fc6 = nn.Linear(8192, 4096) + self.fc6 = nn.Linear(out_dim, 4096) self.fc7 = nn.Linear(4096, 4096) self.relu = nn.ReLU() diff --git a/tests/test_models/test_backbones.py b/tests/test_models/test_backbones.py index b9f56caf47..0afd83c58d 100644 --- a/tests/test_models/test_backbones.py +++ b/tests/test_models/test_backbones.py @@ -692,18 +692,18 @@ def test_timesformer_backbone(): def test_c3d_backbone(): """Test c3d backbone.""" - input_shape = (1, 3, 16, 112, 112) + input_shape = (1, 3, 16, 24, 24) imgs = generate_backbone_demo_inputs(input_shape) # c3d inference test - c3d = C3D() + c3d = C3D(out_dim=512) c3d.init_weights() c3d.train() feat = c3d(imgs) assert feat.shape == torch.Size([1, 4096]) # c3d with bn inference test - c3d_bn = C3D(norm_cfg=dict(type='BN3d')) + c3d_bn = C3D(out_dim=512, norm_cfg=dict(type='BN3d')) c3d_bn.init_weights() c3d_bn.train() feat = c3d_bn(imgs) diff --git a/tests/test_models/test_recognizers/test_recognizer3d.py b/tests/test_models/test_recognizers/test_recognizer3d.py index 3fcdea7337..f3bf5d62e7 100644 --- a/tests/test_models/test_recognizers/test_recognizer3d.py +++ b/tests/test_models/test_recognizers/test_recognizer3d.py @@ -289,10 +289,11 @@ def test_timesformer(): def test_c3d(): config = get_recognizer_cfg('c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py') config.model['backbone']['pretrained'] = None + config.model['backbone']['out_dim'] = 512 recognizer = build_recognizer(config.model) - input_shape = (1, 3, 3, 16, 112, 112) + input_shape = (1, 3, 3, 16, 28, 28) demo_inputs = generate_recognizer_demo_inputs(input_shape, '3D') imgs = demo_inputs['imgs'] diff --git a/tests/test_runtime/test_train.py b/tests/test_runtime/test_train.py index 14c3db30ce..3a205dfbb4 100644 --- a/tests/test_runtime/test_train.py +++ b/tests/test_runtime/test_train.py @@ -103,15 +103,6 @@ def test_train_model(): config = Config(cfg) train_model(model, dataset, config, validate=True) - with tempfile.TemporaryDirectory() as tmpdir: - # train with Fp16OptimizerHook - cfg = copy.deepcopy(_cfg) - cfg['work_dir'] = tmpdir - cfg['fp16'] = dict(loss_scale=512.) - config = Config(cfg) - model.fp16_enabled = None - train_model(model, dataset, config) - with tempfile.TemporaryDirectory() as tmpdir: cfg = copy.deepcopy(_cfg) cfg['work_dir'] = tmpdir From 7ed811c1c4bc9678591b191ee3d3e1c31ac68fc1 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Fri, 25 Feb 2022 19:56:03 +0800 Subject: [PATCH 363/414] [Docs] Structuralize readme (#1455) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * 0218 add demo on readme * 0218 add demo on readme * add gif * 0225 * modify gif size * modify gif size * modify gif size * modify gif size * modify gif size * modify gif size --- README.md | 4 ++++ README_zh-CN.md | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/README.md b/README.md index b72b6ff7ab..563930e838 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,10 @@ The master branch works with **PyTorch 1.3+**.

    Skeleton-base Action Recognition Results on NTU-RGB+D-120

    +
    +
    +

    Skeleton-based Spatio-Temporal Action Detection and Action Recognition Results on Kinetics-400

    +

    Spatio-Temporal Action Detection Results on AVA-2.1

    diff --git a/README_zh-CN.md b/README_zh-CN.md index 60cebc4d55..59cb27b7e6 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -52,6 +52,10 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa

    NTURGB+D-120 上的基于人体姿态的动作识别

    +
    +
    +

    Kinetics-400 上的基于 skeleton 的时空动作检测和动作识别

    +

    AVA-2.1 上的时空动作检测

    From 6f79f54a34fd53c18adf4577fc59ab934f5c7d07 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Mon, 28 Feb 2022 14:13:27 +0800 Subject: [PATCH 364/414] [Fix] Fix Focal Config --- ...gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py b/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py index 49641d1f0d..71af48e10b 100644 --- a/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py +++ b/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py @@ -40,6 +40,8 @@ type='BBoxHeadAVA', dropout_ratio=0.5, in_channels=2304, + focal_alpha=3.0, + focal_gamma=1.0, num_classes=81, multilabel=True)), train_cfg=dict( From 442b28153bfd5fe9ee25d4d89e3c5da995ad6bdb Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Tue, 1 Mar 2022 13:34:09 +0800 Subject: [PATCH 365/414] [Pre-commit] update pre-commit (#1474) --- .pre-commit-config.yaml | 2 +- demo/faster_rcnn_r50_fpn_2x_coco.py | 1 + demo/hrnet_w32_coco_256x192.py | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a765292df3..524e2eb267 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -45,4 +45,4 @@ repos: hooks: - id: check-algo-readme - id: check-copyright - args: ["mmaction"] # these directories will be checked + args: ["mmaction", "tests", "demo", "tools"] # these directories will be checked diff --git a/demo/faster_rcnn_r50_fpn_2x_coco.py b/demo/faster_rcnn_r50_fpn_2x_coco.py index 33fc564507..2387ce3c7f 100644 --- a/demo/faster_rcnn_r50_fpn_2x_coco.py +++ b/demo/faster_rcnn_r50_fpn_2x_coco.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. # model config model = dict( type='FasterRCNN', diff --git a/demo/hrnet_w32_coco_256x192.py b/demo/hrnet_w32_coco_256x192.py index 6ef3b6efd7..79086b9baa 100644 --- a/demo/hrnet_w32_coco_256x192.py +++ b/demo/hrnet_w32_coco_256x192.py @@ -1,3 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. log_level = 'INFO' load_from = None resume_from = None From 3860a700c8ecb251adb0886cb693dcb9840078ac Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 1 Mar 2022 16:56:21 +0800 Subject: [PATCH 366/414] [Docs] update OpenMMLab repo information (#1482) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * add rotate in readme * add rotate in readme * add rotate in readme --- README.md | 26 +++++++++++++------------- README_zh-CN.md | 30 +++++++++++++++--------------- 2 files changed, 28 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index 563930e838..30b8efebea 100644 --- a/README.md +++ b/README.md @@ -284,21 +284,21 @@ We wish that the toolbox and benchmark could serve the growing research communit ## Projects in OpenMMLab -- [MMCV](https://github.com/open-mmlab/mmcv): OpenMMLab foundational library for computer vision. -- [MIM](https://github.com/open-mmlab/mim): MIM Installs OpenMMLab Packages. -- [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab's next-generation video understanding toolbox and benchmark. +- [MIM](https://github.com/open-mmlab/mim): MIM installs OpenMMLab packages. - [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab image classification toolbox and benchmark. -- [MMDeploy](https://github.com/open-mmlab/mmdeploy): OpenMMLab Model Deployment Framework. - [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab detection toolbox and benchmark. - [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab's next-generation platform for general 3D object detection. -- [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab image and video editing toolbox. -- [MMFewShot](https://github.com/open-mmlab/mmfewshot): OpenMMLab few shot learning toolbox. -- [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab Optical Flow Toolbox and Benchmark. -- [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab image and video generative models toolbox. -- [MMHuman3D](https://github.com/open-mmlab/mmhuman3d): OpenMMLab human pose and shape estimation toolbox and benchmark. -- [MMOCR](https://github.com/open-mmlab/mmocr): A Comprehensive Toolbox for Text Detection, Recognition and Understanding. -- [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab pose estimation toolbox and benchmark. -- [MMRazor](https://github.com/open-mmlab/mmrazor): OpenMMLab Model Compression Toolbox and Benchmark. +- [MMRotate](https://github.com/open-mmlab/mmrotate): OpenMMLab rotated object detection toolbox and benchmark. - [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab semantic segmentation toolbox and benchmark. -- [MMSelfSup](https://github.com/open-mmlab/mmselfsup): OpenMMLab self-supervised learning Toolbox and Benchmark. +- [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab text detection, recognition, and understanding toolbox. +- [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab pose estimation toolbox and benchmark. +- [MMHuman3D](https://github.com/open-mmlab/mmhuman3d): OpenMMLab 3D human parametric model toolbox and benchmark. +- [MMSelfSup](https://github.com/open-mmlab/mmselfsup): OpenMMLab self-supervised learning toolbox and benchmark. +- [MMRazor](https://github.com/open-mmlab/mmrazor): OpenMMLab model compression toolbox and benchmark. +- [MMFewShot](https://github.com/open-mmlab/mmfewshot): OpenMMLab fewshot learning toolbox and benchmark. +- [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab's next-generation action understanding toolbox and benchmark. - [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab video perception toolbox and benchmark. +- [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab optical flow toolbox and benchmark. +- [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab image and video editing toolbox. +- [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab image and video generative models toolbox. +- [MMDeploy](https://github.com/open-mmlab/mmdeploy): OpenMMLab model deployment framework. diff --git a/README_zh-CN.md b/README_zh-CN.md index 59cb27b7e6..b0b40ab078 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -279,24 +279,24 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 ## OpenMMLab 的其他项目 -- [MMCV](https://github.com/open-mmlab/mmcv): OpenMMLab 计算机视觉基础库 - [MIM](https://github.com/open-mmlab/mim): MIM 是 OpenMMlab 项目、算法、模型的统一入口 -- [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab 新一代视频理解工具箱与测试基准 -- [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab 图像分类工具箱与测试基准 -- [MMDeploy](https://github.com/open-mmlab/mmdeploy): OpenMMLab 模型部署框架 -- [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab 检测工具箱与测试基准 -- [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab 新一代通用3D目标检测平台 -- [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab 图像视频编辑工具箱 -- [MMFewShot](https://github.com/open-mmlab/mmfewshot): OpenMMLab 少样本学习代码库 -- [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab 光流估计工具箱 -- [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab 图片视频生成模型工具箱 -- [MMHuman3D](https://github.com/open-mmlab/mmhuman3d): OpenMMLab 人体姿态和形状估计工具箱 -- [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具包 -- [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab 姿态估计工具箱与测试基准 -- [MMRazor](https://github.com/open-mmlab/mmrazor): OpenMMLab 模型压缩工具箱与测试基准 -- [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab 语义分割工具箱与测试基准 +- [MMClassification](https://github.com/open-mmlab/mmclassification): OpenMMLab 图像分类工具箱 +- [MMDetection](https://github.com/open-mmlab/mmdetection): OpenMMLab 目标检测工具箱 +- [MMDetection3D](https://github.com/open-mmlab/mmdetection3d): OpenMMLab 新一代通用 3D 目标检测平台 +- [MMRotate](https://github.com/open-mmlab/mmrotate): OpenMMLab 旋转框检测工具箱与测试基准 +- [MMSegmentation](https://github.com/open-mmlab/mmsegmentation): OpenMMLab 语义分割工具箱 +- [MMOCR](https://github.com/open-mmlab/mmocr): OpenMMLab 全流程文字检测识别理解工具箱 +- [MMPose](https://github.com/open-mmlab/mmpose): OpenMMLab 姿态估计工具箱 +- [MMHuman3D](https://github.com/open-mmlab/mmhuman3d): OpenMMLab 人体参数化模型工具箱与测试基准 - [MMSelfSup](https://github.com/open-mmlab/mmselfsup): OpenMMLab 自监督学习工具箱与测试基准 +- [MMRazor](https://github.com/open-mmlab/mmrazor): OpenMMLab 模型压缩工具箱与测试基准 +- [MMFewShot](https://github.com/open-mmlab/mmfewshot): OpenMMLab 少样本学习工具箱与测试基准 +- [MMAction2](https://github.com/open-mmlab/mmaction2): OpenMMLab 新一代视频理解工具箱 - [MMTracking](https://github.com/open-mmlab/mmtracking): OpenMMLab 一体化视频目标感知平台 +- [MMFlow](https://github.com/open-mmlab/mmflow): OpenMMLab 光流估计工具箱与测试基准 +- [MMEditing](https://github.com/open-mmlab/mmediting): OpenMMLab 图像视频编辑工具箱 +- [MMGeneration](https://github.com/open-mmlab/mmgeneration): OpenMMLab 图片视频生成模型工具箱 +- [MMDeploy](https://github.com/open-mmlab/mmdeploy): OpenMMLab 模型部署框架 ## 欢迎加入 OpenMMLab 社区 From 3f3ad9cae291c991b822cbc2ecfb88c1188e87c5 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 1 Mar 2022 19:13:30 +0800 Subject: [PATCH 367/414] [Deployment] Add deprecation message for deploy tool (#1483) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * add deploy msg --- tools/deployment/pytorch2onnx.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tools/deployment/pytorch2onnx.py b/tools/deployment/pytorch2onnx.py index 178d0e63eb..9b4cf5ca2d 100644 --- a/tools/deployment/pytorch2onnx.py +++ b/tools/deployment/pytorch2onnx.py @@ -1,5 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. import argparse +import warnings import mmcv import numpy as np @@ -168,3 +169,15 @@ def parse_args(): show=args.show, output_file=args.output_file, verify=args.verify) + + # Following strings of text style are from colorama package + bright_style, reset_style = '\x1b[1m', '\x1b[0m' + red_text, blue_text = '\x1b[31m', '\x1b[34m' + white_background = '\x1b[107m' + + msg = white_background + bright_style + red_text + msg += 'DeprecationWarning: This tool will be deprecated in future. ' + msg += blue_text + 'Welcome to use the unified model deployment toolbox ' + msg += 'MMDeploy: https://github.com/open-mmlab/mmdeploy' + msg += reset_style + warnings.warn(msg) From 696f9aba048623139ba7f0cbd8487b1cda7740f8 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Fri, 4 Mar 2022 18:29:49 +0800 Subject: [PATCH 368/414] [Feature] Support Multigrid algorithm (#1378) * master * master 0721 * add README * 1231 bump_version * multigrid implement 0111 * multigrid implement 0111 * multigrid implement 0111 * 0117 add longcycle * add short cycle 0126 * add short cycle 0126 * add short cycle 0126 * add short cycle 0126 * add short cycle 0126 * add short cycle 0126 * add short cycle 0126 * 0207 mofidy multigrid hook * 0207 mofidy multigrid hook * 0207 mofidy multigrid hook * 0216 multigrid * 0216 multigrid * 0216 multigrid * 0216 multigrid * 0216 multigrid * 0216 multigrid * modify muligrid lr * longshortcycle * update multigrid config * modify config * modify config * modify sampler * modify init * modify precise bn * modify precise bn * modify precise bn * modify copyright * add unittest * add docstring * modify docstring * add unittest * add readme meta * updates * updates * add path * add path --- README.md | 1 + configs/recognition/slowfast/README.md | 1 + configs/recognition/slowfast/README_zh-CN.md | 1 + configs/recognition/slowfast/metafile.yml | 23 ++ ...ultigrid_r50_8x8x1_358e_kinetics400_rgb.py | 152 +++++++++++ mmaction/apis/train.py | 29 +- mmaction/core/__init__.py | 1 + mmaction/core/lr/__init__.py | 4 + mmaction/core/lr/multigridlr.py | 41 +++ mmaction/datasets/builder.py | 29 ++ mmaction/datasets/rawframe_dataset.py | 52 +++- mmaction/models/__init__.py | 95 +++++-- mmaction/models/common/__init__.py | 3 +- mmaction/models/common/sub_batchnorm3d.py | 72 +++++ mmaction/utils/multigrid/__init__.py | 8 + .../utils/multigrid/longshortcyclehook.py | 258 ++++++++++++++++++ mmaction/utils/multigrid/short_sampler.py | 53 ++++ mmaction/utils/multigrid/subbn_aggregate.py | 22 ++ tests/test_models/test_common.py | 13 +- tests/test_utils/test_module_hooks.py | 23 ++ 20 files changed, 844 insertions(+), 37 deletions(-) create mode 100644 configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py create mode 100644 mmaction/core/lr/__init__.py create mode 100644 mmaction/core/lr/multigridlr.py create mode 100644 mmaction/models/common/sub_batchnorm3d.py create mode 100644 mmaction/utils/multigrid/__init__.py create mode 100644 mmaction/utils/multigrid/longshortcyclehook.py create mode 100644 mmaction/utils/multigrid/short_sampler.py create mode 100644 mmaction/utils/multigrid/subbn_aggregate.py diff --git a/README.md b/README.md index 30b8efebea..8f3779e6ed 100644 --- a/README.md +++ b/README.md @@ -71,6 +71,7 @@ The master branch works with **PyTorch 1.3+**. ## Updates +- (2022-03-04) We support **Multigrid** on Kinetics400, achieve 76.07% Top-1 accuracy and accelerate training speed. - (2021-11-24) We support **2s-AGCN** on NTU60 XSub, achieve 86.06% Top-1 accuracy on joint stream and 86.89% Top-1 accuracy on bone stream respectively. - (2021-10-29) We provide a demo for skeleton-based and rgb-based spatio-temporal detection and action recognition (demo/demo_video_structuralize.py). - (2021-10-26) We train and test **ST-GCN** on NTU60 with 3D keypoint annotations, achieve 84.61% Top-1 accuracy (higher than 81.5% in the [paper](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/viewPaper/17135)). diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 476f681077..ef3344550b 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -27,6 +27,7 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo |[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.py) |short-side 320|8x4| ResNet50 |None|76.34|92.61||9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| +|[slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 |None|76.07|92.21|x|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json)| |[slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_perbn_r50_8x8x1_256e_kinetics400_rgb_steplr.py) |short-side 320|8x4| ResNet50 |None|76.58|92.85||9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index a01a5cb59f..bdd0190412 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -26,6 +26,7 @@ |[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x4| ResNet50 |None|76.34|92.61|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| +|[slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py) |短边320|8x3| ResNet50 |None|76.07|92.21|x|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json)| |[slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x4| ResNet50 |None|76.58|92.85|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index 0345aeedf8..89e84ca64c 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -120,6 +120,29 @@ Models: Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth +- Config: configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py + In Collection: SlowFast + Metadata: + Architecture: ResNet50 + Batch Size: 8 + Epochs: 256 + FLOPs: 66222034944 + Parameters: 34565560 + Pretrained: None + Resolution: short-side 320 + Training Data: Kinetics-400 + Training Resources: 24 GPUs + Modality: RGB + Name: slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb + Results: + - Dataset: Kinetics-400 + Metrics: + Top 1 Accuracy: 76.07 + Top 5 Accuracy: 92.21 + Task: Action Recognition + Training Json Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json + Training Log: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log + Weights: https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth - Config: configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.py In Collection: SlowFast Metadata: diff --git a/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py new file mode 100644 index 0000000000..310cce0d30 --- /dev/null +++ b/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py @@ -0,0 +1,152 @@ +model = dict( + type='Recognizer3D', + backbone=dict( + type='ResNet3dSlowFast', + pretrained=None, + resample_rate=4, # tau + speed_ratio=4, # alpha + channel_ratio=8, # beta_inv + slow_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=True, + fusion_kernel=7, + conv1_kernel=(1, 7, 7), + dilations=(1, 1, 1, 1), + conv1_stride_t=1, + pool1_stride_t=1, + inflate=(0, 0, 1, 1), + norm_eval=False), + fast_pathway=dict( + type='resnet3d', + depth=50, + pretrained=None, + lateral=False, + base_channels=8, + conv1_kernel=(5, 7, 7), + conv1_stride_t=1, + pool1_stride_t=1, + norm_eval=False)), + cls_head=dict( + type='SlowFastHead', + in_channels=2304, # 2048+256 + num_classes=400, + spatial_type='avg', + dropout_ratio=0.5), + # model training and testing settings + train_cfg=None, + test_cfg=dict(average_clips='prob')) + +train_cfg = None +test_cfg = dict(average_clips='prob') + +dataset_type = 'RawframeDataset' +data_root = 'data/kinetics400/rawframes_train' +data_root_val = 'data/kinetics400/rawframes_val' +ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' +ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' +ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) + +train_pipeline = [ + dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='RandomResizedCrop'), + dict(type='Resize', scale=(224, 224), keep_ratio=False), + dict(type='Flip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) +] +val_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=1, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='CenterCrop', crop_size=224), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +test_pipeline = [ + dict( + type='SampleFrames', + clip_len=32, + frame_interval=2, + num_clips=10, + test_mode=True), + dict(type='RawFrameDecode'), + dict(type='Resize', scale=(-1, 256)), + dict(type='ThreeCrop', crop_size=256), + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs']) +] +data = dict( + videos_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', lr=0.1, momentum=0.9, weight_decay=0.0001) # 16gpu 0.1->0.2 +optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) +lr_config = dict(policy='step', step=[94, 154, 196]) + +total_epochs = 239 + +evaluation = dict( + interval=3, metrics=['top_k_accuracy', 'mean_class_accuracy']) +log_config = dict( + interval=20, + hooks=[ + dict(type='TextLoggerHook'), + # dict(type='TensorboardLoggerHook'), + ]) +dist_params = dict(backend='nccl') +log_level = 'INFO' + +checkpoint_config = dict(interval=3) +workflow = [('train', 1)] + +find_unused_parameters = False + +multigrid = dict( + long_cycle=True, + short_cycle=True, + epoch_factor=1.5, + long_cycle_factors=[[0.25, 0.7071], [0.5, 0.7071], [0.5, 1], [1, 1]], + short_cycle_factors=[0.5, 0.7071], + default_s=(224, 224), +) + +precise_bn = dict(num_iters=200, interval=3) + +load_from = None +resume_from = None + +work_dir = './work_dirs/slowfast_r50_3d_8x8x1_256e_kinetics400_rgb' diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index 80d35218ac..71feec5661 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -157,11 +157,21 @@ def train_model(model, runner.register_training_hooks(cfg.lr_config, optimizer_config, cfg.checkpoint_config, cfg.log_config, cfg.get('momentum_config', None)) - if distributed: - if cfg.omnisource: - runner.register_hook(OmniSourceDistSamplerSeedHook()) - else: - runner.register_hook(DistSamplerSeedHook()) + + # multigrid setting + multigrid_cfg = cfg.get('multigrid', None) + if multigrid_cfg is not None: + from mmaction.utils.multigrid import LongShortCycleHook + multigrid_scheduler = LongShortCycleHook(cfg) + runner.register_hook(multigrid_scheduler) + logger.info('Finish register multigrid hook') + + # subbn3d aggregation is HIGH, as it should be done before + # saving and evaluation + from mmaction.utils.multigrid import SubBatchNorm3dAggregationHook + subbn3d_aggre_hook = SubBatchNorm3dAggregationHook() + runner.register_hook(subbn3d_aggre_hook, priority='VERY_HIGH') + logger.info('Finish register subbn3daggre hook') # precise bn setting if cfg.get('precise_bn', False): @@ -177,7 +187,14 @@ def train_model(model, **dataloader_setting) precise_bn_hook = PreciseBNHook(data_loader_precise_bn, **cfg.get('precise_bn')) - runner.register_hook(precise_bn_hook) + runner.register_hook(precise_bn_hook, priority='HIGHEST') + logger.info('Finish register precisebn hook') + + if distributed: + if cfg.omnisource: + runner.register_hook(OmniSourceDistSamplerSeedHook()) + else: + runner.register_hook(DistSamplerSeedHook()) if validate: eval_cfg = cfg.get('evaluation', {}) diff --git a/mmaction/core/__init__.py b/mmaction/core/__init__.py index a86055476a..e4d85a9b82 100644 --- a/mmaction/core/__init__.py +++ b/mmaction/core/__init__.py @@ -2,6 +2,7 @@ from .bbox import * # noqa: F401, F403 from .evaluation import * # noqa: F401, F403 from .hooks import * # noqa: F401, F403 +from .lr import * # noqa: F401, F403 from .optimizer import * # noqa: F401, F403 from .runner import * # noqa: F401, F403 from .scheduler import * # noqa: F401, F403 diff --git a/mmaction/core/lr/__init__.py b/mmaction/core/lr/__init__.py new file mode 100644 index 0000000000..056c2933eb --- /dev/null +++ b/mmaction/core/lr/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .multigridlr import RelativeStepLrUpdaterHook + +__all__ = ['RelativeStepLrUpdaterHook'] diff --git a/mmaction/core/lr/multigridlr.py b/mmaction/core/lr/multigridlr.py new file mode 100644 index 0000000000..1a98b68dec --- /dev/null +++ b/mmaction/core/lr/multigridlr.py @@ -0,0 +1,41 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmcv.runner.hooks.hook import HOOKS +from mmcv.runner.hooks.lr_updater import LrUpdaterHook + + +@HOOKS.register_module() +class RelativeStepLrUpdaterHook(LrUpdaterHook): + """RelativeStepLrUpdaterHook. + Args: + runner (:obj:`mmcv.Runner`): The runner instance used. + steps (list[int]): The list of epochs at which decrease + the learning rate. + **kwargs (dict): Same as that of mmcv. + """ + + def __init__(self, + runner, + steps, + lrs, + warmup_epochs=34, + warmuplr_start=0.01, + **kwargs): + super().__init__(**kwargs) + assert len(steps) == (len(lrs)) + self.steps = steps + self.lrs = lrs + self.warmup_epochs = warmup_epochs + self.warmuplr_start = warmuplr_start + self.warmuplr_end = self.lrs[0] + super().before_run(runner) + + def get_lr(self, runner, base_lr): + """Similar to that of mmcv.""" + progress = runner.epoch if self.by_epoch else runner.iter + if progress <= self.warmup_epochs: + alpha = (self.warmuplr_end - + self.warmuplr_start) / self.warmup_epochs + return progress * alpha + self.warmuplr_start + for i in range(len(self.steps)): + if progress < self.steps[i]: + return self.lrs[i] diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index fb2e3e9859..0b9816b6ce 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -10,6 +10,7 @@ from mmcv.utils import Registry, build_from_cfg, digit_version from torch.utils.data import DataLoader +from ..utils.multigrid import ShortCycleSampler from .samplers import ClassSpecificDistributedSampler, DistributedSampler if platform.system() != 'Windows': @@ -86,6 +87,10 @@ def build_dataloader(dataset, rank, world_size = get_dist_info() sample_by_class = getattr(dataset, 'sample_by_class', False) + short_cycle = kwargs.pop('short_cycle', False) + multigrid_cfg = kwargs.pop('multigrid_cfg', None) + crop_size = kwargs.pop('crop_size', 224) + if dist: if sample_by_class: dynamic_length = getattr(dataset, 'dynamic_length', True) @@ -102,7 +107,31 @@ def build_dataloader(dataset, shuffle = False batch_size = videos_per_gpu num_workers = workers_per_gpu + + if short_cycle: + batch_sampler = ShortCycleSampler(sampler, batch_size, + multigrid_cfg, crop_size) + init_fn = partial( + worker_init_fn, num_workers=num_workers, rank=rank, + seed=seed) if seed is not None else None + + if digit_version(torch.__version__) >= digit_version('1.8.0'): + kwargs['persistent_workers'] = persistent_workers + + data_loader = DataLoader( + dataset, + batch_sampler=batch_sampler, + num_workers=num_workers, + pin_memory=pin_memory, + worker_init_fn=init_fn, + **kwargs) + return data_loader + else: + if short_cycle: + raise NotImplementedError( + 'Short cycle using non-dist is not supported') + sampler = None batch_size = num_gpus * videos_per_gpu num_workers = num_gpus * workers_per_gpu diff --git a/mmaction/datasets/rawframe_dataset.py b/mmaction/datasets/rawframe_dataset.py index 060fca83e5..9359e117b7 100644 --- a/mmaction/datasets/rawframe_dataset.py +++ b/mmaction/datasets/rawframe_dataset.py @@ -4,6 +4,7 @@ import torch +from mmaction.datasets.pipelines import Resize from .base import BaseDataset from .builder import DATASETS @@ -99,7 +100,8 @@ def __init__(self, modality='RGB', sample_by_class=False, power=0., - dynamic_length=False): + dynamic_length=False, + **kwargs): self.filename_tmpl = filename_tmpl self.with_offset = with_offset super().__init__( @@ -114,6 +116,9 @@ def __init__(self, sample_by_class=sample_by_class, power=power, dynamic_length=dynamic_length) + self.short_cycle_factors = kwargs.get('short_cycle_factors', + [0.5, 0.7071]) + self.default_s = kwargs.get('default_s', (224, 224)) def load_annotations(self): """Load annotation file to get video information.""" @@ -155,18 +160,41 @@ def load_annotations(self): def prepare_train_frames(self, idx): """Prepare the frames for training given the index.""" - results = copy.deepcopy(self.video_infos[idx]) - results['filename_tmpl'] = self.filename_tmpl - results['modality'] = self.modality - results['start_index'] = self.start_index - # prepare tensor in getitem - if self.multi_class: - onehot = torch.zeros(self.num_classes) - onehot[results['label']] = 1. - results['label'] = onehot - - return self.pipeline(results) + def pipeline_for_a_sample(idx): + results = copy.deepcopy(self.video_infos[idx]) + results['filename_tmpl'] = self.filename_tmpl + results['modality'] = self.modality + results['start_index'] = self.start_index + + # prepare tensor in getitem + if self.multi_class: + onehot = torch.zeros(self.num_classes) + onehot[results['label']] = 1. + results['label'] = onehot + + return self.pipeline(results) + + if isinstance(idx, tuple): + index, short_cycle_idx = idx + last_resize = None + for trans in self.pipeline.transforms: + if isinstance(trans, Resize): + last_resize = trans + origin_scale = self.default_s + long_cycle_scale = last_resize.scale + + if short_cycle_idx in [0, 1]: + # 0 and 1 is hard-coded as PySlowFast + scale_ratio = self.short_cycle_factors[short_cycle_idx] + target_scale = tuple( + [int(round(scale_ratio * s)) for s in origin_scale]) + last_resize.scale = target_scale + res = pipeline_for_a_sample(index) + last_resize.scale = long_cycle_scale + return res + else: + return pipeline_for_a_sample(idx) def prepare_test_frames(self, idx): """Prepare the frames for testing given the index.""" diff --git a/mmaction/models/__init__.py b/mmaction/models/__init__.py index 8cb15bc49c..8d94e8762c 100644 --- a/mmaction/models/__init__.py +++ b/mmaction/models/__init__.py @@ -9,7 +9,8 @@ build_recognizer) from .common import (LFB, TAM, Conv2plus1d, ConvAudio, DividedSpatialAttentionWithNorm, - DividedTemporalAttentionWithNorm, FFNWithNorm) + DividedTemporalAttentionWithNorm, FFNWithNorm, + SubBatchNorm3D) from .heads import (ACRNHead, AudioTSNHead, AVARoIHead, BaseHead, BBoxHeadAVA, FBOHead, I3DHead, LFBInferHead, SlowFastHead, STGCNHead, TimeSformerHead, TPNHead, TRNHead, TSMHead, TSNHead, @@ -25,20 +26,80 @@ from .skeleton_gcn import BaseGCN, SkeletonGCN __all__ = [ - 'BACKBONES', 'HEADS', 'RECOGNIZERS', 'build_recognizer', 'build_head', - 'build_backbone', 'Recognizer2D', 'Recognizer3D', 'C3D', 'ResNet', 'STGCN', - 'ResNet3d', 'ResNet2Plus1d', 'I3DHead', 'TSNHead', 'TSMHead', 'BaseHead', - 'STGCNHead', 'BaseRecognizer', 'LOSSES', 'CrossEntropyLoss', 'NLLLoss', - 'HVULoss', 'ResNetTSM', 'ResNet3dSlowFast', 'SlowFastHead', 'Conv2plus1d', - 'ResNet3dSlowOnly', 'BCELossWithLogits', 'LOCALIZERS', 'build_localizer', - 'PEM', 'TAM', 'TEM', 'BinaryLogisticRegressionLoss', 'BMN', 'BMNLoss', - 'build_model', 'OHEMHingeLoss', 'SSNLoss', 'ResNet3dCSN', 'ResNetTIN', - 'TPN', 'TPNHead', 'build_loss', 'build_neck', 'AudioRecognizer', - 'AudioTSNHead', 'X3D', 'X3DHead', 'ResNet3dLayer', 'DETECTORS', - 'SingleRoIExtractor3D', 'BBoxHeadAVA', 'ResNetAudio', 'build_detector', - 'ConvAudio', 'AVARoIHead', 'MobileNetV2', 'MobileNetV2TSM', 'TANet', 'LFB', - 'FBOHead', 'LFBInferHead', 'TRNHead', 'NECKS', 'TimeSformer', - 'TimeSformerHead', 'DividedSpatialAttentionWithNorm', - 'DividedTemporalAttentionWithNorm', 'FFNWithNorm', 'ACRNHead', 'BaseGCN', - 'SkeletonGCN', 'CBFocalLoss' + 'BACKBONES', + 'HEADS', + 'RECOGNIZERS', + 'build_recognizer', + 'build_head', + 'build_backbone', + 'Recognizer2D', + 'Recognizer3D', + 'C3D', + 'ResNet', + 'STGCN', + 'ResNet3d', + 'ResNet2Plus1d', + 'I3DHead', + 'TSNHead', + 'TSMHead', + 'BaseHead', + 'STGCNHead', + 'BaseRecognizer', + 'LOSSES', + 'CrossEntropyLoss', + 'NLLLoss', + 'HVULoss', + 'ResNetTSM', + 'ResNet3dSlowFast', + 'SlowFastHead', + 'Conv2plus1d', + 'ResNet3dSlowOnly', + 'BCELossWithLogits', + 'LOCALIZERS', + 'build_localizer', + 'PEM', + 'TAM', + 'TEM', + 'BinaryLogisticRegressionLoss', + 'BMN', + 'BMNLoss', + 'build_model', + 'OHEMHingeLoss', + 'SSNLoss', + 'ResNet3dCSN', + 'ResNetTIN', + 'TPN', + 'TPNHead', + 'build_loss', + 'build_neck', + 'AudioRecognizer', + 'AudioTSNHead', + 'X3D', + 'X3DHead', + 'ResNet3dLayer', + 'DETECTORS', + 'SingleRoIExtractor3D', + 'BBoxHeadAVA', + 'ResNetAudio', + 'build_detector', + 'ConvAudio', + 'AVARoIHead', + 'MobileNetV2', + 'MobileNetV2TSM', + 'TANet', + 'LFB', + 'FBOHead', + 'LFBInferHead', + 'TRNHead', + 'NECKS', + 'TimeSformer', + 'TimeSformerHead', + 'DividedSpatialAttentionWithNorm', + 'DividedTemporalAttentionWithNorm', + 'FFNWithNorm', + 'ACRNHead', + 'BaseGCN', + 'SkeletonGCN', + 'CBFocalLoss', + 'SubBatchNorm3D', ] diff --git a/mmaction/models/common/__init__.py b/mmaction/models/common/__init__.py index 7ed60859c5..3fca90af64 100644 --- a/mmaction/models/common/__init__.py +++ b/mmaction/models/common/__init__.py @@ -2,6 +2,7 @@ from .conv2plus1d import Conv2plus1d from .conv_audio import ConvAudio from .lfb import LFB +from .sub_batchnorm3d import SubBatchNorm3D from .tam import TAM from .transformer import (DividedSpatialAttentionWithNorm, DividedTemporalAttentionWithNorm, FFNWithNorm) @@ -9,5 +10,5 @@ __all__ = [ 'Conv2plus1d', 'ConvAudio', 'LFB', 'TAM', 'DividedSpatialAttentionWithNorm', 'DividedTemporalAttentionWithNorm', - 'FFNWithNorm' + 'FFNWithNorm', 'SubBatchNorm3D' ] diff --git a/mmaction/models/common/sub_batchnorm3d.py b/mmaction/models/common/sub_batchnorm3d.py new file mode 100644 index 0000000000..e7f4b890bb --- /dev/null +++ b/mmaction/models/common/sub_batchnorm3d.py @@ -0,0 +1,72 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from copy import deepcopy + +import torch +import torch.nn as nn +from mmcv.cnn import NORM_LAYERS + + +@NORM_LAYERS.register_module() +class SubBatchNorm3D(nn.Module): + """Sub BatchNorm3d. + + Args: + num_features (int): Dimensions of BatchNorm. + """ + + def __init__(self, num_features, **cfg): + super(SubBatchNorm3D, self).__init__() + + self.num_features = num_features + self.cfg_ = deepcopy(cfg) + self.num_splits = self.cfg_.pop('num_splits', 1) + self.num_features_split = self.num_features * self.num_splits + # only keep one set of affine params, not in .bn or .split_bn + self.cfg_['affine'] = False + self.bn = nn.BatchNorm3d(num_features, **self.cfg_) + self.split_bn = nn.BatchNorm3d(self.num_features_split, **self.cfg_) + self.init_weights(cfg) + + def init_weights(self, cfg): + if cfg.get('affine', True): + self.weight = torch.nn.Parameter(torch.ones(self.num_features)) + self.bias = torch.nn.Parameter(torch.zeros(self.num_features)) + self.affine = True + else: + self.affine = False + + def _get_aggregated_mean_std(self, means, stds, n): + mean = means.view(n, -1).sum(0) / n + std = stds.view(n, -1).sum(0) / n + ( + (means.view(n, -1) - mean)**2).view(n, -1).sum(0) / n + return mean.detach(), std.detach() + + def aggregate_stats(self): + """Synchronize running_mean, and running_var to self.bn. + + Call this before eval, then call model.eval(); When eval, forward + function will call self.bn instead of self.split_bn, During this time + the running_mean, and running_var of self.bn has been obtained from + self.split_bn. + """ + if self.split_bn.track_running_stats: + aggre_func = self._get_aggregated_mean_std + self.bn.running_mean.data, self.bn.running_var.data = aggre_func( + self.split_bn.running_mean, self.split_bn.running_var, + self.num_splits) + self.bn.num_batches_tracked = self.split_bn.num_batches_tracked.detach( + ) + + def forward(self, x): + if self.training: + n, c, t, h, w = x.shape + assert n % self.num_splits == 0 + x = x.view(n // self.num_splits, c * self.num_splits, t, h, w) + x = self.split_bn(x) + x = x.view(n, c, t, h, w) + else: + x = self.bn(x) + if self.affine: + x = x * self.weight.view(-1, 1, 1, 1) + x = x + self.bias.view(-1, 1, 1, 1) + return x diff --git a/mmaction/utils/multigrid/__init__.py b/mmaction/utils/multigrid/__init__.py new file mode 100644 index 0000000000..fd183a6df7 --- /dev/null +++ b/mmaction/utils/multigrid/__init__.py @@ -0,0 +1,8 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .longshortcyclehook import LongShortCycleHook +from .short_sampler import ShortCycleSampler +from .subbn_aggregate import SubBatchNorm3dAggregationHook + +__all__ = [ + 'ShortCycleSampler', 'LongShortCycleHook', 'SubBatchNorm3dAggregationHook' +] diff --git a/mmaction/utils/multigrid/longshortcyclehook.py b/mmaction/utils/multigrid/longshortcyclehook.py new file mode 100644 index 0000000000..8c27b14892 --- /dev/null +++ b/mmaction/utils/multigrid/longshortcyclehook.py @@ -0,0 +1,258 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import numpy as np +import torch +import torch.nn as nn +from mmcv.runner import Hook +from mmcv.runner.hooks.lr_updater import LrUpdaterHook, StepLrUpdaterHook +from torch.nn.modules.utils import _ntuple + +from mmaction.core.lr import RelativeStepLrUpdaterHook +from mmaction.utils import get_root_logger + + +def modify_subbn3d_num_splits(logger, module, num_splits): + """Recursively modify the number of splits of subbn3ds in module. + + Inheritates the running_mean and running_var from last subbn.bn. + Args: + logger (:obj:`logging.Logger`): The logger to log information. + module (nn.Module): The module to be modified. + num_splits (int): The targeted number of splits. + Returns: + int: The number of subbn3d modules modified. + """ + count = 0 + for child in module.children(): + from mmaction.models import SubBatchNorm3D + if isinstance(child, SubBatchNorm3D): + new_split_bn = nn.BatchNorm3d( + child.num_features * num_splits, affine=False).cuda() + new_state_dict = new_split_bn.state_dict() + + for param_name, param in child.bn.state_dict().items(): + # print('param_name', param_name, param.size()) + origin_param_shape = param.size() + new_param_shape = new_state_dict[param_name].size() + if len(origin_param_shape) == 1 and len( + new_param_shape + ) == 1 and new_param_shape[0] >= origin_param_shape[ + 0] and new_param_shape[0] % origin_param_shape[0] == 0: + # weight bias running_var running_mean + new_state_dict[param_name] = torch.cat( + [param] * + (new_param_shape[0] // origin_param_shape[0])) + else: + logger.info(f'skip {param_name}') + + child.num_splits = num_splits + new_split_bn.load_state_dict(new_state_dict) + child.split_bn = new_split_bn + count += 1 + else: + count += modify_subbn3d_num_splits(logger, child, num_splits) + return count + + +class LongShortCycleHook(Hook): + """A multigrid method for efficiently training video models. + + This hook defines multigrid training schedule and update cfg + accordingly, which is proposed in `A Multigrid Method for Efficiently + Training Video Models `_. + Args: + cfg (:obj:`mmcv.ConfigDictg`): The whole config for the experiment. + """ + + def __init__(self, cfg): + self.cfg = cfg + self.multi_grid_cfg = cfg.get('multigrid', None) + self.data_cfg = cfg.get('data', None) + assert (self.multi_grid_cfg is not None and self.data_cfg is not None) + self.logger = get_root_logger() + self.logger.info(self.multi_grid_cfg) + + def before_run(self, runner): + """Called before running, change the StepLrUpdaterHook to + RelativeStepLrHook.""" + self._init_schedule(runner, self.multi_grid_cfg, self.data_cfg) + steps = [] + steps = [s[-1] for s in self.schedule] + steps.insert(-1, (steps[-2] + steps[-1]) // 2) # add finetune stage + for index, hook in enumerate(runner.hooks): + if isinstance(hook, StepLrUpdaterHook): + base_lr = hook.base_lr[0] + gamma = hook.gamma + lrs = [base_lr * gamma**s[0] * s[1][0] for s in self.schedule] + lrs = lrs[:-1] + [lrs[-2], lrs[-1] * gamma + ] # finetune-stage lrs + new_hook = RelativeStepLrUpdaterHook(runner, steps, lrs) + runner.hooks[index] = new_hook + + def before_train_epoch(self, runner): + """Before training epoch, update the runner based on long-cycle + schedule.""" + self._update_long_cycle(runner) + + def _update_long_cycle(self, runner): + """Before every epoch, check if long cycle shape should change. If it + should, change the pipelines accordingly. + + change dataloader and model's subbn3d(split_bn) + """ + base_b, base_t, base_s = self._get_schedule(runner.epoch) + + # rebuild dataset + from mmaction.datasets import build_dataset + resize_list = [] + for trans in self.cfg.data.train.pipeline: + if trans['type'] == 'SampleFrames': + curr_t = trans['clip_len'] + trans['clip_len'] = base_t + trans['frame_interval'] = (curr_t * + trans['frame_interval']) / base_t + elif trans['type'] == 'Resize': + resize_list.append(trans) + resize_list[-1]['scale'] = _ntuple(2)(base_s) + + ds = build_dataset(self.cfg.data.train) + + from mmaction.datasets import build_dataloader + + dataloader = build_dataloader( + ds, + self.data_cfg.videos_per_gpu * base_b, + self.data_cfg.workers_per_gpu, + dist=True, + num_gpus=len(self.cfg.gpu_ids), + drop_last=True, + seed=self.cfg.get('seed', None), + ) + runner.data_loader = dataloader + self.logger.info('Rebuild runner.data_loader') + + # the self._max_epochs is changed, therefore update here + runner._max_iters = runner._max_epochs * len(runner.data_loader) + + # rebuild all the sub_batch_bn layers + num_modifies = modify_subbn3d_num_splits(self.logger, runner.model, + base_b) + self.logger.info(f'{num_modifies} subbns modified to {base_b}.') + + def _get_long_cycle_schedule(self, runner, cfg): + # `schedule` is a list of [step_index, base_shape, epochs] + schedule = [] + avg_bs = [] + all_shapes = [] + self.default_size = self.default_t * self.default_s**2 + for t_factor, s_factor in cfg.long_cycle_factors: + base_t = int(round(self.default_t * t_factor)) + base_s = int(round(self.default_s * s_factor)) + if cfg.short_cycle: + # shape = [#frames, scale] + shapes = [[ + base_t, + int(round(self.default_s * cfg.short_cycle_factors[0])) + ], + [ + base_t, + int( + round(self.default_s * + cfg.short_cycle_factors[1])) + ], [base_t, base_s]] + else: + shapes = [[base_t, base_s]] + # calculate the batchsize, shape = [batchsize, #frames, scale] + shapes = [[ + int(round(self.default_size / (s[0] * s[1]**2))), s[0], s[1] + ] for s in shapes] + avg_bs.append(np.mean([s[0] for s in shapes])) + all_shapes.append(shapes) + + for hook in runner.hooks: + if isinstance(hook, LrUpdaterHook): + if isinstance(hook, StepLrUpdaterHook): + steps = hook.step if isinstance(hook.step, + list) else [hook.step] + steps = [0] + steps + break + else: + raise NotImplementedError( + 'Only step scheduler supports multi grid now') + else: + pass + total_iters = 0 + default_iters = steps[-1] + for step_index in range(len(steps) - 1): + # except the final step + step_epochs = steps[step_index + 1] - steps[step_index] + # number of epochs for this step + for long_cycle_index, shapes in enumerate(all_shapes): + cur_epochs = ( + step_epochs * avg_bs[long_cycle_index] / sum(avg_bs)) + cur_iters = cur_epochs / avg_bs[long_cycle_index] + total_iters += cur_iters + schedule.append((step_index, shapes[-1], cur_epochs)) + iter_saving = default_iters / total_iters + final_step_epochs = runner.max_epochs - steps[-1] + # the fine-tuning phase to have the same amount of iteration + # saving as the rest of the training + ft_epochs = final_step_epochs / iter_saving * avg_bs[-1] + # in `schedule` we ignore the shape of ShortCycle + schedule.append((step_index + 1, all_shapes[-1][-1], ft_epochs)) + + x = ( + runner.max_epochs * cfg.epoch_factor / sum(s[-1] + for s in schedule)) + runner._max_epochs = int(runner._max_epochs * cfg.epoch_factor) + final_schedule = [] + total_epochs = 0 + for s in schedule: + # extend the epochs by `factor` + epochs = s[2] * x + total_epochs += epochs + final_schedule.append((s[0], s[1], int(round(total_epochs)))) + self.logger.info(final_schedule) + return final_schedule + + def _print_schedule(self, schedule): + """logging the schedule.""" + self.logger.info('\tLongCycleId\tBase shape\tEpochs\t') + for s in schedule: + self.logger.info(f'\t{s[0]}\t{s[1]}\t{s[2]}\t') + + def _get_schedule(self, epoch): + """Returning the corresponding shape.""" + for s in self.schedule: + if epoch < s[-1]: + return s[1] + return self.schedule[-1][1] + + def _init_schedule(self, runner, multi_grid_cfg, data_cfg): + """Initialize the multi-grid shcedule. + + Args: + runner (:obj: `mmcv.Runner`): The runner within which to train. + multi_grid_cfg (:obj: `mmcv.ConfigDict`): The multi-grid config. + data_cfg (:obj: `mmcv.ConfigDict`): The data config. + """ + self.default_bs = data_cfg.videos_per_gpu + data_cfg = data_cfg.get('train', None) + final_resize_cfg = [ + aug for aug in data_cfg.pipeline if aug.type == 'Resize' + ][-1] + if isinstance(final_resize_cfg.scale, tuple): + # Assume square image + if max(final_resize_cfg.scale) == min(final_resize_cfg.scale): + self.default_s = max(final_resize_cfg.scale) + else: + raise NotImplementedError('non-square scale not considered.') + sample_frame_cfg = [ + aug for aug in data_cfg.pipeline if aug.type == 'SampleFrames' + ][0] + self.default_t = sample_frame_cfg.clip_len + + if multi_grid_cfg.long_cycle: + self.schedule = self._get_long_cycle_schedule( + runner, multi_grid_cfg) + else: + raise ValueError('There should be at least long cycle.') diff --git a/mmaction/utils/multigrid/short_sampler.py b/mmaction/utils/multigrid/short_sampler.py new file mode 100644 index 0000000000..7a30593164 --- /dev/null +++ b/mmaction/utils/multigrid/short_sampler.py @@ -0,0 +1,53 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import numpy as np +from torch.utils.data.sampler import Sampler + + +class ShortCycleSampler(Sampler): + """Extend Sampler to support "short cycle" sampling. + + See paper "A Multigrid Method for Efficiently Training Video Models", Wu et + al., 2019 (https://arxiv.org/abs/1912.00998) for details. + """ + + def __init__(self, + sampler, + batch_size, + multigrid_cfg, + crop_size, + drop_last=True): + + self.sampler = sampler + self.drop_last = drop_last + + bs_factor = [ + int( + round( + (float(crop_size) / (s * multigrid_cfg.default_s[0]))**2)) + for s in multigrid_cfg.short_cycle_factors + ] + + self.batch_sizes = [ + batch_size * bs_factor[0], batch_size * bs_factor[1], batch_size + ] + + def __iter__(self): + counter = 0 + batch_size = self.batch_sizes[0] + batch = [] + for idx in self.sampler: + batch.append((idx, counter % 3)) + if len(batch) == batch_size: + yield batch + counter += 1 + batch_size = self.batch_sizes[counter % 3] + batch = [] + if len(batch) > 0 and not self.drop_last: + yield batch + + def __len__(self): + avg_batch_size = sum(self.batch_sizes) / 3.0 + if self.drop_last: + return int(np.floor(len(self.sampler) / avg_batch_size)) + else: + return int(np.ceil(len(self.sampler) / avg_batch_size)) diff --git a/mmaction/utils/multigrid/subbn_aggregate.py b/mmaction/utils/multigrid/subbn_aggregate.py new file mode 100644 index 0000000000..ce0da1f8a2 --- /dev/null +++ b/mmaction/utils/multigrid/subbn_aggregate.py @@ -0,0 +1,22 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmcv.runner import HOOKS, Hook + + +def aggregate_sub_bn_status(module): + from mmaction.models import SubBatchNorm3D + count = 0 + for child in module.children(): + if isinstance(child, SubBatchNorm3D): + child.aggregate_stats() + count += 1 + else: + count += aggregate_sub_bn_status(child) + return count + + +@HOOKS.register_module() +class SubBatchNorm3dAggregationHook(Hook): + """Recursively find all SubBN modules and aggregate sub-BN stats.""" + + def after_train_epoch(self, runner): + _ = aggregate_sub_bn_status(runner.model) diff --git a/tests/test_models/test_common.py b/tests/test_models/test_common.py index 0ee6e8abbe..3cd6de2f09 100644 --- a/tests/test_models/test_common.py +++ b/tests/test_models/test_common.py @@ -9,7 +9,7 @@ from mmaction.models.common import (LFB, TAM, Conv2plus1d, ConvAudio, DividedSpatialAttentionWithNorm, DividedTemporalAttentionWithNorm, - FFNWithNorm) + FFNWithNorm, SubBatchNorm3D) def test_conv2plus1d(): @@ -136,3 +136,14 @@ def test_LFB(): lmdb_map_size=1e6) lt_feat_lmdb = lfb_lmdb['video_1,930'] assert lt_feat_lmdb.shape == (3 * 30, 16) + + +def test_SubBatchNorm3D(): + _cfg = dict(num_splits=2) + num_features = 4 + sub_batchnorm_3d = SubBatchNorm3D(num_features, **_cfg) + assert sub_batchnorm_3d.bn.num_features == num_features + assert sub_batchnorm_3d.split_bn.num_features == num_features * 2 + + assert sub_batchnorm_3d.bn.affine is False + assert sub_batchnorm_3d.split_bn.affine is False diff --git a/tests/test_utils/test_module_hooks.py b/tests/test_utils/test_module_hooks.py index 6cd9fc1f3d..d77d9e94d9 100644 --- a/tests/test_utils/test_module_hooks.py +++ b/tests/test_utils/test_module_hooks.py @@ -10,6 +10,7 @@ from mmaction.models import build_recognizer from mmaction.utils import register_module_hooks from mmaction.utils.module_hooks import GPUNormalize +from mmaction.utils.multigrid import LongShortCycleHook def test_register_module_hooks(): @@ -119,3 +120,25 @@ def check_normalize(origin_imgs, result_imgs, norm_cfg): gpu_normalize_cfg['input_format'] = '_format' with pytest.raises(ValueError): gpu_normalize = GPUNormalize(**gpu_normalize_cfg) + + +def test_multigrid_hook(): + multigrid_cfg = dict(data=dict( + videos_per_gpu=8, + workers_per_gpu=4, + )) + with pytest.raises(AssertionError): + LongShortCycleHook(multigrid_cfg) + + multigrid_cfg = dict( + multigrid=dict( + long_cycle=True, + short_cycle=True, + epoch_factor=1.5, + long_cycle_factors=[[0.25, 0.7071], [0.5, 0.7071], [0.5, 1], + [1, 1]], + short_cycle_factors=[0.5, 0.7071], + default_s=(224, 224), + )) + with pytest.raises(AssertionError): + LongShortCycleHook(multigrid_cfg) From 70ae50c5d4718fe8754e002ba8e850a3d3011511 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Mon, 7 Mar 2022 03:09:26 +0800 Subject: [PATCH 369/414] minor (#1493) --- mmaction/models/__init__.py | 92 +++++++------------------------------ 1 file changed, 16 insertions(+), 76 deletions(-) diff --git a/mmaction/models/__init__.py b/mmaction/models/__init__.py index 8d94e8762c..d3936ced2e 100644 --- a/mmaction/models/__init__.py +++ b/mmaction/models/__init__.py @@ -26,80 +26,20 @@ from .skeleton_gcn import BaseGCN, SkeletonGCN __all__ = [ - 'BACKBONES', - 'HEADS', - 'RECOGNIZERS', - 'build_recognizer', - 'build_head', - 'build_backbone', - 'Recognizer2D', - 'Recognizer3D', - 'C3D', - 'ResNet', - 'STGCN', - 'ResNet3d', - 'ResNet2Plus1d', - 'I3DHead', - 'TSNHead', - 'TSMHead', - 'BaseHead', - 'STGCNHead', - 'BaseRecognizer', - 'LOSSES', - 'CrossEntropyLoss', - 'NLLLoss', - 'HVULoss', - 'ResNetTSM', - 'ResNet3dSlowFast', - 'SlowFastHead', - 'Conv2plus1d', - 'ResNet3dSlowOnly', - 'BCELossWithLogits', - 'LOCALIZERS', - 'build_localizer', - 'PEM', - 'TAM', - 'TEM', - 'BinaryLogisticRegressionLoss', - 'BMN', - 'BMNLoss', - 'build_model', - 'OHEMHingeLoss', - 'SSNLoss', - 'ResNet3dCSN', - 'ResNetTIN', - 'TPN', - 'TPNHead', - 'build_loss', - 'build_neck', - 'AudioRecognizer', - 'AudioTSNHead', - 'X3D', - 'X3DHead', - 'ResNet3dLayer', - 'DETECTORS', - 'SingleRoIExtractor3D', - 'BBoxHeadAVA', - 'ResNetAudio', - 'build_detector', - 'ConvAudio', - 'AVARoIHead', - 'MobileNetV2', - 'MobileNetV2TSM', - 'TANet', - 'LFB', - 'FBOHead', - 'LFBInferHead', - 'TRNHead', - 'NECKS', - 'TimeSformer', - 'TimeSformerHead', - 'DividedSpatialAttentionWithNorm', - 'DividedTemporalAttentionWithNorm', - 'FFNWithNorm', - 'ACRNHead', - 'BaseGCN', - 'SkeletonGCN', - 'CBFocalLoss', - 'SubBatchNorm3D', + 'BACKBONES', 'HEADS', 'RECOGNIZERS', 'build_recognizer', 'build_head', + 'build_backbone', 'Recognizer2D', 'Recognizer3D', 'C3D', 'ResNet', 'STGCN', + 'ResNet3d', 'ResNet2Plus1d', 'I3DHead', 'TSNHead', 'TSMHead', 'BaseHead', + 'STGCNHead', 'BaseRecognizer', 'LOSSES', 'CrossEntropyLoss', 'NLLLoss', + 'HVULoss', 'ResNetTSM', 'ResNet3dSlowFast', 'SlowFastHead', 'Conv2plus1d', + 'ResNet3dSlowOnly', 'BCELossWithLogits', 'LOCALIZERS', 'build_localizer', + 'PEM', 'TAM', 'TEM', 'BinaryLogisticRegressionLoss', 'BMN', 'BMNLoss', + 'build_model', 'OHEMHingeLoss', 'SSNLoss', 'ResNet3dCSN', 'ResNetTIN', + 'TPN', 'TPNHead', 'build_loss', 'build_neck', 'AudioRecognizer', + 'AudioTSNHead', 'X3D', 'X3DHead', 'ResNet3dLayer', 'DETECTORS', + 'SingleRoIExtractor3D', 'BBoxHeadAVA', 'ResNetAudio', 'build_detector', + 'ConvAudio', 'AVARoIHead', 'MobileNetV2', 'MobileNetV2TSM', 'TANet', 'LFB', + 'FBOHead', 'LFBInferHead', 'TRNHead', 'NECKS', 'TimeSformer', + 'TimeSformerHead', 'DividedSpatialAttentionWithNorm', + 'DividedTemporalAttentionWithNorm', 'FFNWithNorm', 'ACRNHead', 'BaseGCN', + 'SkeletonGCN', 'CBFocalLoss', 'SubBatchNorm3D' ] From ccd88e598adb44b22a64c5e582873a773f22e710 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Mon, 7 Mar 2022 12:15:14 +0800 Subject: [PATCH 370/414] [Docs] Changelog v0.22.0 (#1490) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * add docs v0.22.0 * 0305 add changelog * 0305 add changelog * v0.22.0 --- README.md | 2 +- README_zh-CN.md | 2 +- docker/serve/Dockerfile | 2 +- docs/changelog.md | 53 +++++++++++++++++++++++++++++++++++++++++ mmaction/version.py | 2 +- 5 files changed, 57 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 8f3779e6ed..24354c7a91 100644 --- a/README.md +++ b/README.md @@ -79,7 +79,7 @@ The master branch works with **PyTorch 1.3+**. - (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! - (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. -**Release**: v0.21.0 was released in 31/12/2021. Please refer to [changelog.md](docs/changelog.md) for details and release history. +**Release**: v0.22.0 was released in 05/03/2022. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Installation diff --git a/README_zh-CN.md b/README_zh-CN.md index b0b40ab078..83b4bfdce2 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -78,7 +78,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa - (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! - (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 -v0.21.0 版本已于 2021 年 12 月 31 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.22.0 版本已于 2022 年 3 月 5 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 安装 diff --git a/docker/serve/Dockerfile b/docker/serve/Dockerfile index 9518e0a92e..20fff4ba50 100644 --- a/docker/serve/Dockerfile +++ b/docker/serve/Dockerfile @@ -4,7 +4,7 @@ ARG CUDNN="7" FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel ARG MMCV="1.3.8" -ARG MMACTION="0.21.0" +ARG MMACTION="0.22.0" ENV PYTHONUNBUFFERED TRUE diff --git a/docs/changelog.md b/docs/changelog.md index d6e7b0af2e..470fa3ff90 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,58 @@ ## Changelog +### 0.22.0 (03/05/2022) + +**Highlights** + +- Support Multigrid training strategy +- Support CPU training +- Support audio demo +- Support topk customizing in models/heads/base.py + +**New Features** + +- Support Multigrid training strategy([#1378](https://github.com/open-mmlab/mmaction2/pull/1378)) +- Support STGCN in demo_skeleton.py([#1391](https://github.com/open-mmlab/mmaction2/pull/1391)) +- Support CPU training([#1407](https://github.com/open-mmlab/mmaction2/pull/1407)) +- Support audio demo([#1425](https://github.com/open-mmlab/mmaction2/pull/1425)) +- Support topk customizing in models/heads/base.py([#1452](https://github.com/open-mmlab/mmaction2/pull/1452)) + +**Documentations** + +- Add OpenMMLab platform([#1393](https://github.com/open-mmlab/mmaction2/pull/1393)) +- Update links([#1394](https://github.com/open-mmlab/mmaction2/pull/1394)) +- Update readme in configs([#1404](https://github.com/open-mmlab/mmaction2/pull/1404)) +- Update instructions to install mmcv-full([#1426](https://github.com/open-mmlab/mmaction2/pull/1426)) +- Add shortcut([#1433](https://github.com/open-mmlab/mmaction2/pull/1433)) +- Update modelzoo([#1439](https://github.com/open-mmlab/mmaction2/pull/1439)) +- add video_structuralize in readme([#1455](https://github.com/open-mmlab/mmaction2/pull/1455)) +- Update OpenMMLab repo information([#1482](https://github.com/open-mmlab/mmaction2/pull/1482)) + +**Bug and Typo Fixes** + +- Update train.py([#1375](https://github.com/open-mmlab/mmaction2/pull/1375)) +- Fix printout bug([#1382]((https://github.com/open-mmlab/mmaction2/pull/1382))) +- Update multi processing setting([#1395](https://github.com/open-mmlab/mmaction2/pull/1395)) +- Setup multi processing both in train and test([#1405](https://github.com/open-mmlab/mmaction2/pull/1405)) +- Fix bug in nondistributed multi-gpu training([#1406](https://github.com/open-mmlab/mmaction2/pull/1406)) +- Add variable fps in ava_dataset.py([#1409](https://github.com/open-mmlab/mmaction2/pull/1409)) +- Only support distributed training([#1414](https://github.com/open-mmlab/mmaction2/pull/1414)) +- Set test_mode for AVA configs([#1432](https://github.com/open-mmlab/mmaction2/pull/1432)) +- Support single label([#1434](https://github.com/open-mmlab/mmaction2/pull/1434)) +- Add check copyright([#1447](https://github.com/open-mmlab/mmaction2/pull/1447)) +- Support Windows CI([#1448](https://github.com/open-mmlab/mmaction2/pull/1448)) +- Fix wrong device of class_weight in models/losses/cross_entropy_loss.py([#1457](https://github.com/open-mmlab/mmaction2/pull/1457)) +- Fix bug caused by distributed([#1459](https://github.com/open-mmlab/mmaction2/pull/1459)) +- Update readme([#1460](https://github.com/open-mmlab/mmaction2/pull/1460)) +- Fix lint caused by colab automatic upload([#1461](https://github.com/open-mmlab/mmaction2/pull/1461)) +- Refine CI([#1471](https://github.com/open-mmlab/mmaction2/pull/1471)) +- Update pre-commit([#1474](https://github.com/open-mmlab/mmaction2/pull/1474)) +- Add deprecation message for deploy tool([#1483](https://github.com/open-mmlab/mmaction2/pull/1483)) + +**ModelZoo** + +- Support slowfast_steplr([#1421](https://github.com/open-mmlab/mmaction2/pull/1421)) + ### 0.21.0 (31/12/2021) **Highlights** diff --git a/mmaction/version.py b/mmaction/version.py index 19ddd59f9f..a8e7bbac41 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.21.0' +__version__ = '0.22.0' def parse_version_info(version_str): From 153504f46ae660714db8d18331ce594185fe53c9 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 8 Mar 2022 14:05:50 +0800 Subject: [PATCH 371/414] [Docs] Update gpus in Slowfast readme (#1497) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * modify gpus in readme * modify gpus in readme --- configs/recognition/slowfast/README.md | 2 +- configs/recognition/slowfast/README_zh-CN.md | 2 +- configs/recognition/slowfast/metafile.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index ef3344550b..12bb22a5db 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -27,7 +27,7 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo |[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.py) |short-side 320|8x4| ResNet50 |None|76.34|92.61||9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| -|[slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 |None|76.07|92.21|x|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json)| +|[slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50 |None|76.07|92.21|x|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json)| |[slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_perbn_r50_8x8x1_256e_kinetics400_rgb_steplr.py) |short-side 320|8x4| ResNet50 |None|76.58|92.85||9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index bdd0190412..7b662ffe08 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -26,7 +26,7 @@ |[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| |[slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x4| ResNet50 |None|76.34|92.61|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| -|[slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py) |短边320|8x3| ResNet50 |None|76.07|92.21|x|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json)| +|[slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py) |短边320|8x2| ResNet50 |None|76.07|92.21|x|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json)| |[slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x4| ResNet50 |None|76.58|92.85|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| |[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| |[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| diff --git a/configs/recognition/slowfast/metafile.yml b/configs/recognition/slowfast/metafile.yml index 89e84ca64c..353631e385 100644 --- a/configs/recognition/slowfast/metafile.yml +++ b/configs/recognition/slowfast/metafile.yml @@ -131,7 +131,7 @@ Models: Pretrained: None Resolution: short-side 320 Training Data: Kinetics-400 - Training Resources: 24 GPUs + Training Resources: 16 GPUs Modality: RGB Name: slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb Results: From 876330c19e5397de628a8a0d32a006a1cef39382 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Tue, 8 Mar 2022 18:08:58 +0800 Subject: [PATCH 372/414] [Docs] Fix work_dir in multigrid config (#1498) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * fix multigrid work_dir * fix multigrid work_dir --- .../slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py index 310cce0d30..0699459d2a 100644 --- a/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py @@ -149,4 +149,4 @@ load_from = None resume_from = None -work_dir = './work_dirs/slowfast_r50_3d_8x8x1_256e_kinetics400_rgb' +work_dir = './work_dirs/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb' From 748aca2e2a43ebd23a60c1643b1ff13c5aced79b Mon Sep 17 00:00:00 2001 From: Jamie Date: Tue, 8 Mar 2022 21:48:36 +0800 Subject: [PATCH 373/414] [Feature] Support custom_hooks in mmaction/apis/train.py (#1489) Copy from mmdetection https://github.com/open-mmlab/mmdetection/blob/98949809b7179fab9391663ee5a4ab5978425f90/mmdet/apis/train.py --- mmaction/apis/train.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index 71feec5661..5498a286e9 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -154,9 +154,13 @@ def train_model(model, optimizer_config = cfg.optimizer_config # register hooks - runner.register_training_hooks(cfg.lr_config, optimizer_config, - cfg.checkpoint_config, cfg.log_config, - cfg.get('momentum_config', None)) + runner.register_training_hooks( + cfg.lr_config, + optimizer_config, + cfg.checkpoint_config, + cfg.log_config, + cfg.get('momentum_config', None), + custom_hooks_config=cfg.get('custom_hooks', None)) # multigrid setting multigrid_cfg = cfg.get('multigrid', None) From 5b94a04df931c4e89a73912a45ab1e33d2337b7a Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 9 Mar 2022 22:32:13 +0800 Subject: [PATCH 374/414] [Improvement] Support Diff Seed (#1502) --- mmaction/datasets/builder.py | 1 + tools/train.py | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/mmaction/datasets/builder.py b/mmaction/datasets/builder.py index 0b9816b6ce..8a516af542 100644 --- a/mmaction/datasets/builder.py +++ b/mmaction/datasets/builder.py @@ -165,3 +165,4 @@ def worker_init_fn(worker_id, num_workers, rank, seed): worker_seed = num_workers * rank + worker_id + seed np.random.seed(worker_seed) random.seed(worker_seed) + torch.manual_seed(worker_seed) diff --git a/tools/train.py b/tools/train.py index 84258f1c59..d404980464 100644 --- a/tools/train.py +++ b/tools/train.py @@ -8,6 +8,7 @@ import mmcv import torch +import torch.distributed as dist from mmcv import Config, DictAction from mmcv.runner import get_dist_info, init_dist, set_random_seed from mmcv.utils import get_git_hash @@ -52,6 +53,10 @@ def parse_args(): help='ids of gpus to use ' '(only applicable to non-distributed training)') parser.add_argument('--seed', type=int, default=None, help='random seed') + parser.add_argument( + '--diff-seed', + action='store_true', + help='Whether or not set different seeds for different ranks') parser.add_argument( '--deterministic', action='store_true', @@ -159,6 +164,7 @@ def main(): # set random seeds seed = init_random_seed(args.seed, distributed=distributed) + seed = seed + dist.get_rank() if args.diff_seed else seed logger.info(f'Set random seed to {seed}, ' f'deterministic: {args.deterministic}') set_random_seed(seed, deterministic=args.deterministic) From fa6eea8e028ecc26d0621f33f74a3372bee5b627 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Fri, 11 Mar 2022 13:50:28 +0800 Subject: [PATCH 375/414] [Fix] fix BC breaking (#1507) --- demo/demo_video_structuralize.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/demo/demo_video_structuralize.py b/demo/demo_video_structuralize.py index 72120573a8..7fc5ee614b 100644 --- a/demo/demo_video_structuralize.py +++ b/demo/demo_video_structuralize.py @@ -494,8 +494,8 @@ def rgb_based_action_recognition(args): rgb_model.cfg = rgb_config rgb_model.to(args.device) rgb_model.eval() - action_results = inference_recognizer(rgb_model, args.video, - args.label_map) + action_results = inference_recognizer( + rgb_model, args.video, label_path=args.label_map) rgb_action_result = action_results[0][0] return rgb_action_result From 4ce921bc2b5676a4a8e081f0038093dadbec3096 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Fri, 11 Mar 2022 13:51:16 +0800 Subject: [PATCH 376/414] [Docs] Add Subbn docs (#1503) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * add subbn docs * add subbn docs --- mmaction/models/common/sub_batchnorm3d.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mmaction/models/common/sub_batchnorm3d.py b/mmaction/models/common/sub_batchnorm3d.py index e7f4b890bb..c020e87583 100644 --- a/mmaction/models/common/sub_batchnorm3d.py +++ b/mmaction/models/common/sub_batchnorm3d.py @@ -8,7 +8,10 @@ @NORM_LAYERS.register_module() class SubBatchNorm3D(nn.Module): - """Sub BatchNorm3d. + """Sub BatchNorm3d splits the batch dimension into N splits, and run BN on + each of them separately (so that the stats are computed on each subset of + examples (1/N of batch) independently). During evaluation, it aggregates + the stats from all splits into one BN. Args: num_features (int): Dimensions of BatchNorm. From 63554ddd1ba9ffa2aa6377af9087e95517ae5713 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Mon, 14 Mar 2022 12:31:25 +0800 Subject: [PATCH 377/414] [Docs] Add shortcycle sampler docs (#1513) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * add shortcycle sampler docs * add shortcycle sampler docs * add shortcycle sampler docs * add shortcycle sampler docs --- mmaction/utils/multigrid/longshortcyclehook.py | 9 ++++----- mmaction/utils/multigrid/short_sampler.py | 8 ++++++++ 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/mmaction/utils/multigrid/longshortcyclehook.py b/mmaction/utils/multigrid/longshortcyclehook.py index 8c27b14892..202c81045f 100644 --- a/mmaction/utils/multigrid/longshortcyclehook.py +++ b/mmaction/utils/multigrid/longshortcyclehook.py @@ -12,8 +12,8 @@ def modify_subbn3d_num_splits(logger, module, num_splits): """Recursively modify the number of splits of subbn3ds in module. - Inheritates the running_mean and running_var from last subbn.bn. + Args: logger (:obj:`logging.Logger`): The logger to log information. module (nn.Module): The module to be modified. @@ -30,7 +30,6 @@ def modify_subbn3d_num_splits(logger, module, num_splits): new_state_dict = new_split_bn.state_dict() for param_name, param in child.bn.state_dict().items(): - # print('param_name', param_name, param.size()) origin_param_shape = param.size() new_param_shape = new_state_dict[param_name].size() if len(origin_param_shape) == 1 and len( @@ -59,6 +58,7 @@ class LongShortCycleHook(Hook): This hook defines multigrid training schedule and update cfg accordingly, which is proposed in `A Multigrid Method for Efficiently Training Video Models `_. + Args: cfg (:obj:`mmcv.ConfigDictg`): The whole config for the experiment. """ @@ -148,7 +148,6 @@ def _get_long_cycle_schedule(self, runner, cfg): base_t = int(round(self.default_t * t_factor)) base_s = int(round(self.default_s * s_factor)) if cfg.short_cycle: - # shape = [#frames, scale] shapes = [[ base_t, int(round(self.default_s * cfg.short_cycle_factors[0])) @@ -228,11 +227,11 @@ def _get_schedule(self, epoch): return self.schedule[-1][1] def _init_schedule(self, runner, multi_grid_cfg, data_cfg): - """Initialize the multi-grid shcedule. + """Initialize the multigrid shcedule. Args: runner (:obj: `mmcv.Runner`): The runner within which to train. - multi_grid_cfg (:obj: `mmcv.ConfigDict`): The multi-grid config. + multi_grid_cfg (:obj: `mmcv.ConfigDict`): The multigrid config. data_cfg (:obj: `mmcv.ConfigDict`): The data config. """ self.default_bs = data_cfg.videos_per_gpu diff --git a/mmaction/utils/multigrid/short_sampler.py b/mmaction/utils/multigrid/short_sampler.py index 7a30593164..01326f85bf 100644 --- a/mmaction/utils/multigrid/short_sampler.py +++ b/mmaction/utils/multigrid/short_sampler.py @@ -8,6 +8,14 @@ class ShortCycleSampler(Sampler): See paper "A Multigrid Method for Efficiently Training Video Models", Wu et al., 2019 (https://arxiv.org/abs/1912.00998) for details. + + Args: + sampler (:obj: `torch.Sampler`): The default sampler to be warpped. + batch_size (int): The batchsize before short-cycle modification. + multi_grid_cfg (dict): The config dict for multigrid training. + crop_size (int): The actual spatial scale. + drop_last (bool): Whether to drop the last incomplete batch in epoch. + Default: True. """ def __init__(self, From b3d0b43259f18c2dea0bf30cc2768c2b7e6adb21 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 16 Mar 2022 19:51:33 +0800 Subject: [PATCH 378/414] [Doc] Update Windows Declaration (#1520) --- docs/install.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/install.md b/docs/install.md index c45756a0b7..c5db74571f 100644 --- a/docs/install.md +++ b/docs/install.md @@ -18,7 +18,7 @@ We provide some tips for MMAction2 installation in this file. ## Requirements -- Linux (Windows is not officially supported) +- Linux, Windows (We can successfully install mmaction2 on Windows and run inference, but we haven't tried training yet) - Python 3.6+ - PyTorch 1.3+ - CUDA 9.2+ (If you build PyTorch from source, CUDA 9.0 is also compatible) From d7591bf48722dcb83d8fa950d7cf33966053d43a Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 16 Mar 2022 20:06:05 +0800 Subject: [PATCH 379/414] [Improvement] Provide multi-node training & testing script (#1521) --- docs/getting_started.md | 18 +++++++++++++++--- docs_zh_CN/getting_started.md | 16 +++++++++++++++- tools/dist_test.sh | 7 +++++-- tools/dist_train.sh | 7 +++++-- 4 files changed, 40 insertions(+), 8 deletions(-) diff --git a/docs/getting_started.md b/docs/getting_started.md index 713a4adfcd..fef4b05e4a 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -412,9 +412,21 @@ GPUS=16 ./tools/slurm_train.sh dev tsn_r50_k400 configs/recognition/tsn/tsn_r50_ You can check [slurm_train.sh](/tools/slurm_train.sh) for full arguments and environment variables. -If you have just multiple machines connected with ethernet, you can refer to -pytorch [launch utility](https://pytorch.org/docs/stable/distributed.html#launch-utility). -Usually it is slow if you do not have high speed networking like InfiniBand. +If you have just multiple machines connected with ethernet, you can simply run the following commands: + +On the first machine: + +```shell +NNODES=2 NODE_RANK=0 PORT=$MASTER_PORT MASTER_ADDR=$MASTER_ADDR sh tools/dist_train.sh $CONFIG $GPUS +``` + +On the second machine: + +```shell +NNODES=2 NODE_RANK=1 PORT=$MASTER_PORT MASTER_ADDR=$MASTER_ADDR sh tools/dist_train.sh $CONFIG $GPUS +``` + +It can be extremely slow if you do not have high-speed networking like InfiniBand. ### Launch multiple jobs on a single machine diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index c1bf43b241..9da0aa8065 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -396,7 +396,21 @@ GPUS=16 ./tools/slurm_train.sh dev tsn_r50_k400 configs/recognition/tsn/tsn_r50_ 用户可以查看 [slurm_train.sh](/tools/slurm_train.sh) 文件来检查完整的参数和环境变量。 -如果用户的多台机器通过 Ethernet 连接,则可以参考 pytorch [launch utility](https://pytorch.org/docs/stable/distributed.html#launch-utility)。如果用户没有高速网络,如 InfiniBand,速度将会非常慢。 +如果您想使用由 ethernet 连接起来的多台机器, 您可以使用以下命令: + +在第一台机器上: + +```shell +NNODES=2 NODE_RANK=0 PORT=$MASTER_PORT MASTER_ADDR=$MASTER_ADDR sh tools/dist_train.sh $CONFIG $GPUS +``` + +在第二台机器上: + +```shell +NNODES=2 NODE_RANK=1 PORT=$MASTER_PORT MASTER_ADDR=$MASTER_ADDR sh tools/dist_train.sh $CONFIG $GPUS +``` + +但是,如果您不使用高速网路连接这几台机器的话,训练将会非常慢。 ### 使用单台机器启动多个任务 diff --git a/tools/dist_test.sh b/tools/dist_test.sh index 88286cf08c..4e90525c09 100755 --- a/tools/dist_test.sh +++ b/tools/dist_test.sh @@ -1,5 +1,8 @@ #!/usr/bin/env bash +NNODES=${NNODES:-1} +NODE_RANK=${NODE_RANK:-0} +MASTER_ADDR=${MASTER_ADDR:-"127.0.0.1"} CONFIG=$1 CHECKPOINT=$2 GPUS=$3 @@ -7,5 +10,5 @@ PORT=${PORT:-29500} PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ # Arguments starting from the forth one are captured by ${@:4} -python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ - $(dirname "$0")/test.py $CONFIG $CHECKPOINT --launcher pytorch ${@:4} +python -m torch.distributed.launch --nnodes=$NNODES --node_rank=$NODE_RANK --master_addr=$MASTER_ADDR \ + --nproc_per_node=$GPUS --master_port=$PORT $(dirname "$0")/test.py $CONFIG $CHECKPOINT --launcher pytorch ${@:4} diff --git a/tools/dist_train.sh b/tools/dist_train.sh index 47ce8ea3de..8944199038 100755 --- a/tools/dist_train.sh +++ b/tools/dist_train.sh @@ -1,10 +1,13 @@ #!/usr/bin/env bash +NNODES=${NNODES:-1} +NODE_RANK=${NODE_RANK:-0} +MASTER_ADDR=${MASTER_ADDR:-"127.0.0.1"} CONFIG=$1 GPUS=$2 PORT=${PORT:-29500} PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ -python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ - $(dirname "$0")/train.py $CONFIG --launcher pytorch ${@:3} +python -m torch.distributed.launch --nnodes=$NNODES --node_rank=$NODE_RANK --master_addr=$MASTER_ADDR \ + --nproc_per_node=$GPUS --master_port=$PORT $(dirname "$0")/train.py $CONFIG --launcher pytorch ${@:3} # Any arguments from the third one are captured by ${@:3} From 2068e1e7e42d7ea431fd5bd8ba789d104a959a6e Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 16 Mar 2022 20:16:23 +0800 Subject: [PATCH 380/414] [Update] Update colab tutorial install cmds (#1522) --- demo/mmaction2_tutorial.ipynb | 136 ++++++++++++++-------------- demo/mmaction2_tutorial_zh-CN.ipynb | 22 ++++- 2 files changed, 86 insertions(+), 72 deletions(-) diff --git a/demo/mmaction2_tutorial.ipynb b/demo/mmaction2_tutorial.ipynb index aadc0a6fa8..1996a2384a 100644 --- a/demo/mmaction2_tutorial.ipynb +++ b/demo/mmaction2_tutorial.ipynb @@ -3,8 +3,8 @@ { "cell_type": "markdown", "metadata": { - "id": "view-in-github", - "colab_type": "text" + "colab_type": "text", + "id": "view-in-github" }, "source": [ "\"Open" @@ -47,8 +47,8 @@ }, "outputs": [ { - "output_type": "stream", "name": "stdout", + "output_type": "stream", "text": [ "nvcc: NVIDIA (R) Cuda compiler driver\n", "Copyright (c) 2005-2020 NVIDIA Corporation\n", @@ -82,8 +82,8 @@ }, "outputs": [ { - "output_type": "stream", "name": "stdout", + "output_type": "stream", "text": [ "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", "Collecting torch==1.8.0+cu101\n", @@ -250,11 +250,11 @@ } ], "source": [ - "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", - "!pip install -U torch==1.8.0+cu101 torchvision==0.9.0+cu101 torchtext==0.9.0 torchaudio==0.8.0 -f https://download.pytorch.org/whl/torch_stable.html\n", + "# install dependencies: (use cu111 because colab has CUDA 11.1)\n", + "!pip install torch==1.9.0+cu111 torchvision==0.10.0+cu111 -f https://download.pytorch.org/whl/torch_stable.html\n", "\n", "# install mmcv-full thus we could use CUDA operators\n", - "!pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html\n", + "!pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu111/torch1.9.0/index.html\n", "\n", "# Install mmaction2\n", "!rm -rf mmaction2\n", @@ -279,8 +279,8 @@ }, "outputs": [ { - "output_type": "stream", "name": "stdout", + "output_type": "stream", "text": [ "1.8.0+cu101 True\n", "0.21.0\n", @@ -1234,38 +1234,28 @@ }, { "cell_type": "markdown", + "metadata": { + "id": "jZ4t44nWmZDM" + }, "source": [ "## Perform Spatio-Temporal Detection\n", "Here we first install MMDetection." - ], - "metadata": { - "id": "jZ4t44nWmZDM" - } + ] }, { "cell_type": "code", - "source": [ - "# Git clone mmdetection repo\n", - "%cd ..\n", - "!git clone https://github.com/open-mmlab/mmdetection.git\n", - "%cd mmdetection\n", - "\n", - "# install mmdet\n", - "!pip install -e .\n", - "%cd ../mmaction2" - ], + "execution_count": 3, "metadata": { - "id": "w1p0_g76nHOQ", - "outputId": "b30a6be3-c457-452e-c789-7083117c5011", "colab": { "base_uri": "https://localhost:8080/" - } + }, + "id": "w1p0_g76nHOQ", + "outputId": "b30a6be3-c457-452e-c789-7083117c5011" }, - "execution_count": 3, "outputs": [ { - "output_type": "stream", "name": "stdout", + "output_type": "stream", "text": [ "/content\n", "Cloning into 'mmdetection'...\n", @@ -1291,34 +1281,41 @@ "/content/mmaction2\n" ] } + ], + "source": [ + "# Git clone mmdetection repo\n", + "%cd ..\n", + "!git clone https://github.com/open-mmlab/mmdetection.git\n", + "%cd mmdetection\n", + "\n", + "# install mmdet\n", + "!pip install -e .\n", + "%cd ../mmaction2" ] }, { "cell_type": "markdown", - "source": [ - "Download a video to `demo` directory in MMAction2." - ], "metadata": { "id": "vlOQsH8OnVKn" - } + }, + "source": [ + "Download a video to `demo` directory in MMAction2." + ] }, { "cell_type": "code", - "source": [ - "!wget https://download.openmmlab.com/mmaction/dataset/sample/1j20qq1JyX4.mp4 -O demo/1j20qq1JyX4.mp4" - ], + "execution_count": 4, "metadata": { - "id": "QaW3jg5Enish", - "outputId": "c70cde3a-b337-41d0-cb08-82dfc746d9ef", "colab": { "base_uri": "https://localhost:8080/" - } + }, + "id": "QaW3jg5Enish", + "outputId": "c70cde3a-b337-41d0-cb08-82dfc746d9ef" }, - "execution_count": 4, "outputs": [ { - "output_type": "stream", "name": "stdout", + "output_type": "stream", "text": [ "--2022-02-19 11:02:59-- https://download.openmmlab.com/mmaction/dataset/sample/1j20qq1JyX4.mp4\n", "Resolving download.openmmlab.com (download.openmmlab.com)... 47.254.186.233\n", @@ -1333,34 +1330,34 @@ "\n" ] } + ], + "source": [ + "!wget https://download.openmmlab.com/mmaction/dataset/sample/1j20qq1JyX4.mp4 -O demo/1j20qq1JyX4.mp4" ] }, { "cell_type": "markdown", - "source": [ - "Run spatio-temporal demo." - ], "metadata": { "id": "LYGxdu8Vnoah" - } + }, + "source": [ + "Run spatio-temporal demo." + ] }, { "cell_type": "code", - "source": [ - "!python demo/demo_spatiotemporal_det.py --video demo/1j20qq1JyX4.mp4" - ], + "execution_count": 5, "metadata": { - "id": "LPLiaHaYnrb7", - "outputId": "8a8f8a16-ad7b-4559-c19c-c8264533bff3", "colab": { "base_uri": "https://localhost:8080/" - } + }, + "id": "LPLiaHaYnrb7", + "outputId": "8a8f8a16-ad7b-4559-c19c-c8264533bff3" }, - "execution_count": 5, "outputs": [ { - "output_type": "stream", "name": "stdout", + "output_type": "stream", "text": [ "Imageio: 'ffmpeg-linux64-v3.3.1' was not found on your computer; downloading it now.\n", "Try 1. Download from https://github.com/imageio/imageio-binaries/raw/master/ffmpeg/ffmpeg-linux64-v3.3.1 (43.8 MB)\n", @@ -1384,22 +1381,14 @@ "\n" ] } + ], + "source": [ + "!python demo/demo_spatiotemporal_det.py --video demo/1j20qq1JyX4.mp4" ] }, { "cell_type": "code", - "source": [ - "# Check the video\n", - "from IPython.display import HTML\n", - "from base64 import b64encode\n", - "mp4 = open('demo/stdet_demo.mp4','rb').read()\n", - "data_url = \"data:video/mp4;base64,\" + b64encode(mp4).decode()\n", - "HTML(\"\"\"\n", - "\n", - "\"\"\" % data_url)" - ], + "execution_count": 6, "metadata": { "colab": { "base_uri": "https://localhost:8080/", @@ -1408,10 +1397,8 @@ "id": "-0atQCzBo9-C", "outputId": "b6bb3a67-669c-45d0-cdf4-25b6210362d0" }, - "execution_count": 6, "outputs": [ { - "output_type": "execute_result", "data": { "text/html": [ "\n", @@ -1423,9 +1410,22 @@ "" ] }, + "execution_count": 6, "metadata": {}, - "execution_count": 6 + "output_type": "execute_result" } + ], + "source": [ + "# Check the video\n", + "from IPython.display import HTML\n", + "from base64 import b64encode\n", + "mp4 = open('demo/stdet_demo.mp4','rb').read()\n", + "data_url = \"data:video/mp4;base64,\" + b64encode(mp4).decode()\n", + "HTML(\"\"\"\n", + "\n", + "\"\"\" % data_url)" ] } ], @@ -1433,10 +1433,10 @@ "accelerator": "GPU", "colab": { "collapsed_sections": [], + "include_colab_link": true, "name": "MMAction2 Tutorial.ipynb", "provenance": [], - "toc_visible": true, - "include_colab_link": true + "toc_visible": true }, "kernelspec": { "display_name": "Python 3", diff --git a/demo/mmaction2_tutorial_zh-CN.ipynb b/demo/mmaction2_tutorial_zh-CN.ipynb index 28940ce931..57bad5e85a 100644 --- a/demo/mmaction2_tutorial_zh-CN.ipynb +++ b/demo/mmaction2_tutorial_zh-CN.ipynb @@ -55,8 +55,9 @@ } ], "source": [ - "# 查看环境中pytorch版本以便mmcv对应版本下载\n", - "!pip list | grep torch" + "# 检查 nvcc,gcc 版本\n", + "!nvcc -V\n", + "!gcc --version" ] }, { @@ -93,8 +94,21 @@ } ], "source": [ - "# 安装mmcv-full,注意需要对应pytorch1.8和cuda10.1版本\n", - "!pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.8.0/index.html" + "# 安装 torch 及 torchvision\n", + "!pip install torch==1.9.0+cu111 torchvision==0.10.0+cu111 -f https://download.pytorch.org/whl/torch_stable.html\n", + "\n", + "# 安装 mmcv-full\n", + "!pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu111/torch1.9.0/index.html\n", + "\n", + "# 安装 mmaction2\n", + "!rm -rf mmaction2\n", + "!git clone https://github.com/open-mmlab/mmaction2.git\n", + "%cd mmaction2\n", + "\n", + "!pip install -e .\n", + "\n", + "# 安装其他可选依赖库\n", + "!pip install -r requirements/optional.txt" ] }, { From 88ffae3a5099d7eb3a4c8c21cfbdaec9e03ca7b7 Mon Sep 17 00:00:00 2001 From: Jintao Lin <528557675@qq.com> Date: Mon, 21 Mar 2022 23:25:38 +0800 Subject: [PATCH 381/414] [Fix] fix num_iters_per_epoch (#1530) --- tools/analysis/analyze_logs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/analysis/analyze_logs.py b/tools/analysis/analyze_logs.py index d0e1a02608..4d2ca5b018 100644 --- a/tools/analysis/analyze_logs.py +++ b/tools/analysis/analyze_logs.py @@ -55,11 +55,11 @@ def plot_curve(log_dicts, args): f'{args.json_logs[i]} does not contain metric {metric}') xs = [] ys = [] - num_iters_per_epoch = log_dict[epochs[0]]['iter'][-1] for epoch in epochs: iters = log_dict[epoch]['iter'] if log_dict[epoch]['mode'][-1] == 'val': iters = iters[:-1] + num_iters_per_epoch = iters[-1] xs.append(np.array(iters) + (epoch - 1) * num_iters_per_epoch) ys.append(np.array(log_dict[epoch][metric][:len(iters)])) xs = np.concatenate(xs) From d32893e99d0c3e25fa003f23ca8f7d73ebc0e3b5 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 23 Mar 2022 17:46:23 +0800 Subject: [PATCH 382/414] distributed_sampler --- mmaction/core/__init__.py | 1 + mmaction/core/dist_utils.py | 41 +++++++++++++++++++ .../datasets/samplers/distributed_sampler.py | 9 +++- 3 files changed, 50 insertions(+), 1 deletion(-) create mode 100644 mmaction/core/dist_utils.py diff --git a/mmaction/core/__init__.py b/mmaction/core/__init__.py index e4d85a9b82..92c53bf8de 100644 --- a/mmaction/core/__init__.py +++ b/mmaction/core/__init__.py @@ -1,5 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. from .bbox import * # noqa: F401, F403 +from .dist_utils import * # noqa: F401, F403 from .evaluation import * # noqa: F401, F403 from .hooks import * # noqa: F401, F403 from .lr import * # noqa: F401, F403 diff --git a/mmaction/core/dist_utils.py b/mmaction/core/dist_utils.py new file mode 100644 index 0000000000..32f57b6245 --- /dev/null +++ b/mmaction/core/dist_utils.py @@ -0,0 +1,41 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import numpy as np +import torch +import torch.distributed as dist +from mmcv.runner import get_dist_info + + +def sync_random_seed(seed=None, device='cuda'): + """Make sure different ranks share the same seed. All workers must call + this function, otherwise it will deadlock. This method is generally used in + `DistributedSampler`, because the seed should be identical across all + processes in the distributed group. + + In distributed sampling, different ranks should sample non-overlapped + data in the dataset. Therefore, this function is used to make sure that + each rank shuffles the data indices in the same order based + on the same seed. Then different ranks could use different indices + to select non-overlapped data from the same data list. + + Args: + seed (int, Optional): The seed. Default to None. + device (str): The device where the seed will be put on. + Default to 'cuda'. + Returns: + int: Seed to be used. + """ + if seed is None: + seed = np.random.randint(2**31) + assert isinstance(seed, int) + + rank, world_size = get_dist_info() + + if world_size == 1: + return seed + + if rank == 0: + random_num = torch.tensor(seed, dtype=torch.int32, device=device) + else: + random_num = torch.tensor(0, dtype=torch.int32, device=device) + dist.broadcast(random_num, src=0) + return random_num.item() diff --git a/mmaction/datasets/samplers/distributed_sampler.py b/mmaction/datasets/samplers/distributed_sampler.py index 15b2c9d248..1d54079de0 100644 --- a/mmaction/datasets/samplers/distributed_sampler.py +++ b/mmaction/datasets/samplers/distributed_sampler.py @@ -5,6 +5,8 @@ import torch from torch.utils.data import DistributedSampler as _DistributedSampler +from mmaction.core import sync_random_seed + class DistributedSampler(_DistributedSampler): """DistributedSampler inheriting from @@ -23,7 +25,12 @@ def __init__(self, super().__init__( dataset, num_replicas=num_replicas, rank=rank, shuffle=shuffle) # for the compatibility from PyTorch 1.3+ - self.seed = seed if seed is not None else 0 + # In distributed sampling, different ranks should sample non-overlapped + # data in the dataset. Therefore, this function is used to make sure + # that each rank shuffles the data indices in the same order based + # on the same seed. Then different ranks could use different indices + # to select non-overlapped data from the same data list. + self.seed = sync_random_seed(seed) if seed is not None else 0 def __iter__(self): # deterministically shuffle based on epoch From dc9b3a18a58e9d5907ca18b12349367f1a3efe8b Mon Sep 17 00:00:00 2001 From: StephenFang <48404864+Stephenfang51@users.noreply.github.com> Date: Tue, 29 Mar 2022 21:08:56 +0800 Subject: [PATCH 383/414] [Fix] update the link for the article (#1544) --- configs/skeleton/stgcn/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/skeleton/stgcn/README.md b/configs/skeleton/stgcn/README.md index 0f1d51c98b..0b2c3f481c 100644 --- a/configs/skeleton/stgcn/README.md +++ b/configs/skeleton/stgcn/README.md @@ -1,6 +1,6 @@ # STGCN -[Spatial temporal graph convolutional networks for skeleton-based action recognition](https://jivp-eurasipjournals.springeropen.com/articles/10.1186/s13640-019-0476-x) +[Spatial temporal graph convolutional networks for skeleton-based action recognition](https://ojs.aaai.org/index.php/AAAI/article/view/12328) From 0d37a3a135091c06cd2d33a078b1eaad8d325e72 Mon Sep 17 00:00:00 2001 From: CCODING Date: Tue, 29 Mar 2022 21:10:03 +0800 Subject: [PATCH 384/414] [Fix] fix cd wrong dir error (#1545) --- tools/data/kinetics/rename_classnames.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/data/kinetics/rename_classnames.sh b/tools/data/kinetics/rename_classnames.sh index e36f847bba..a2b7a1b405 100644 --- a/tools/data/kinetics/rename_classnames.sh +++ b/tools/data/kinetics/rename_classnames.sh @@ -26,4 +26,4 @@ ls ./videos_val | while read class; do \ fi done -cd ../../tools/data/${DATASET}/ +cd ../../tools/data/kinetics/ From ee932ba41dabde480469dcfc52d91d22d20b8dda Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 30 Mar 2022 15:51:10 +0800 Subject: [PATCH 385/414] [Improvement] Update error log (#1546) * update error log * fix bug --- demo/demo_skeleton.py | 31 +++++----------- demo/demo_spatiotemporal_det.py | 13 ++----- demo/demo_video_structuralize.py | 34 ++++++----------- demo/webcam_demo_spatiotemporal_det.py | 13 ++----- .../bbox/assigners/max_iou_assigner_ava.py | 11 ++++-- mmaction/models/backbones/resnet_tin.py | 16 ++++---- mmaction/models/builder.py | 8 ++-- mmaction/models/heads/roi_head.py | 11 ++++-- .../roi_extractors/single_straight3d.py | 22 ++++------- mmaction/utils/__init__.py | 4 +- mmaction/utils/decorators.py | 37 ------------------- tests/test_utils/test_bbox.py | 20 ++++------ tests/test_utils/test_decorator.py | 33 ----------------- tools/data/skeleton/ntu_pose_extraction.py | 15 +++++--- 14 files changed, 77 insertions(+), 191 deletions(-) delete mode 100644 mmaction/utils/decorators.py delete mode 100644 tests/test_utils/test_decorator.py diff --git a/demo/demo_skeleton.py b/demo/demo_skeleton.py index 859a54e34f..f74f593f3d 100644 --- a/demo/demo_skeleton.py +++ b/demo/demo_skeleton.py @@ -11,34 +11,21 @@ from mmcv import DictAction from mmaction.apis import inference_recognizer, init_recognizer -from mmaction.utils import import_module_error_func try: from mmdet.apis import inference_detector, init_detector +except (ImportError, ModuleNotFoundError): + raise ImportError('Failed to import `inference_detector` and ' + '`init_detector` form `mmdet.apis`. These apis are ' + 'required in this demo! ') + +try: from mmpose.apis import (inference_top_down_pose_model, init_pose_model, vis_pose_result) except (ImportError, ModuleNotFoundError): - - @import_module_error_func('mmdet') - def inference_detector(*args, **kwargs): - pass - - @import_module_error_func('mmdet') - def init_detector(*args, **kwargs): - pass - - @import_module_error_func('mmpose') - def init_pose_model(*args, **kwargs): - pass - - @import_module_error_func('mmpose') - def inference_top_down_pose_model(*args, **kwargs): - pass - - @import_module_error_func('mmpose') - def vis_pose_result(*args, **kwargs): - pass - + raise ImportError('Failed to import `inference_top_down_pose_model`, ' + '`init_pose_model`, and `vis_pose_result` form ' + '`mmpose.apis`. These apis are required in this demo! ') try: import moviepy.editor as mpy diff --git a/demo/demo_spatiotemporal_det.py b/demo/demo_spatiotemporal_det.py index 38bb533237..78dd7bcaa9 100644 --- a/demo/demo_spatiotemporal_det.py +++ b/demo/demo_spatiotemporal_det.py @@ -13,20 +13,13 @@ from mmcv.runner import load_checkpoint from mmaction.models import build_detector -from mmaction.utils import import_module_error_func try: from mmdet.apis import inference_detector, init_detector except (ImportError, ModuleNotFoundError): - - @import_module_error_func('mmdet') - def inference_detector(*args, **kwargs): - pass - - @import_module_error_func('mmdet') - def init_detector(*args, **kwargs): - pass - + raise ImportError('Failed to import `inference_detector` and ' + '`init_detector` form `mmdet.apis`. These apis are ' + 'required in this demo! ') try: import moviepy.editor as mpy diff --git a/demo/demo_video_structuralize.py b/demo/demo_video_structuralize.py index 7fc5ee614b..a0cfc93ad7 100644 --- a/demo/demo_video_structuralize.py +++ b/demo/demo_video_structuralize.py @@ -4,6 +4,7 @@ import os import os.path as osp import shutil +import warnings import cv2 import mmcv @@ -15,35 +16,22 @@ from mmaction.apis import inference_recognizer from mmaction.datasets.pipelines import Compose from mmaction.models import build_detector, build_model, build_recognizer -from mmaction.utils import import_module_error_func try: from mmdet.apis import inference_detector, init_detector +except (ImportError, ModuleNotFoundError): + warnings.warn('Failed to import `inference_detector` and `init_detector` ' + 'form `mmdet.apis`. These apis are required in ' + 'skeleton-based applications! ') + +try: from mmpose.apis import (inference_top_down_pose_model, init_pose_model, vis_pose_result) - except (ImportError, ModuleNotFoundError): - - @import_module_error_func('mmdet') - def inference_detector(*args, **kwargs): - pass - - @import_module_error_func('mmdet') - def init_detector(*args, **kwargs): - pass - - @import_module_error_func('mmpose') - def init_pose_model(*args, **kwargs): - pass - - @import_module_error_func('mmpose') - def inference_top_down_pose_model(*args, **kwargs): - pass - - @import_module_error_func('mmpose') - def vis_pose_result(*args, **kwargs): - pass - + warnings.warn('Failed to import `inference_top_down_pose_model`, ' + '`init_pose_model`, and `vis_pose_result` form ' + '`mmpose.apis`. These apis are required in skeleton-based ' + 'applications! ') try: import moviepy.editor as mpy diff --git a/demo/webcam_demo_spatiotemporal_det.py b/demo/webcam_demo_spatiotemporal_det.py index 0f473578db..fd02cbdb8b 100644 --- a/demo/webcam_demo_spatiotemporal_det.py +++ b/demo/webcam_demo_spatiotemporal_det.py @@ -21,20 +21,13 @@ from mmcv.runner import load_checkpoint from mmaction.models import build_detector -from mmaction.utils import import_module_error_func try: from mmdet.apis import inference_detector, init_detector except (ImportError, ModuleNotFoundError): - - @import_module_error_func('mmdet') - def inference_detector(*args, **kwargs): - pass - - @import_module_error_func('mmdet') - def init_detector(*args, **kwargs): - pass - + raise ImportError('Failed to import `inference_detector` and ' + '`init_detector` form `mmdet.apis`. These apis are ' + 'required in this demo! ') logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) diff --git a/mmaction/core/bbox/assigners/max_iou_assigner_ava.py b/mmaction/core/bbox/assigners/max_iou_assigner_ava.py index a7c50e962d..3f5439bbbe 100644 --- a/mmaction/core/bbox/assigners/max_iou_assigner_ava.py +++ b/mmaction/core/bbox/assigners/max_iou_assigner_ava.py @@ -1,8 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. import torch -from mmaction.utils import import_module_error_class - try: from mmdet.core.bbox import AssignResult, MaxIoUAssigner from mmdet.core.bbox.builder import BBOX_ASSIGNERS @@ -134,6 +132,11 @@ def assign_wrt_overlaps(self, overlaps, gt_labels=None): else: # define an empty class, so that can be imported - @import_module_error_class('mmdet') class MaxIoUAssignerAVA: - pass + + def __init__(self, *args, **kwargs): + raise ImportError( + 'Failed to import `AssignResult`, `MaxIoUAssigner` from ' + '`mmdet.core.bbox` or failed to import `BBOX_ASSIGNERS` from ' + '`mmdet.core.bbox.builder`. The class `MaxIoUAssignerAVA` is ' + 'invalid. ') diff --git a/mmaction/models/backbones/resnet_tin.py b/mmaction/models/backbones/resnet_tin.py index dfada1614d..f5c8307c8d 100644 --- a/mmaction/models/backbones/resnet_tin.py +++ b/mmaction/models/backbones/resnet_tin.py @@ -2,18 +2,9 @@ import torch import torch.nn as nn -from mmaction.utils import import_module_error_func from ..builder import BACKBONES from .resnet_tsm import ResNetTSM -try: - from mmcv.ops import tin_shift -except (ImportError, ModuleNotFoundError): - - @import_module_error_func('mmcv-full') - def tin_shift(*args, **kwargs): - pass - def linear_sampler(data, offset): """Differentiable Temporal-wise Frame Sampling, which is essentially a @@ -38,6 +29,13 @@ def linear_sampler(data, offset): # data, data0, data1: [N, num_segments, C, H * W] data = data.view(n, t, c, h * w).contiguous() + + try: + from mmcv.ops import tin_shift + except (ImportError, ModuleNotFoundError): + raise ImportError('Failed to import `tin_shift` from `mmcv.ops`. You ' + 'will be unable to use TIN. ') + data0 = tin_shift(data, offset0) data1 = tin_shift(data, offset1) diff --git a/mmaction/models/builder.py b/mmaction/models/builder.py index 08e06b1bcc..86a5cef146 100644 --- a/mmaction/models/builder.py +++ b/mmaction/models/builder.py @@ -4,8 +4,6 @@ from mmcv.cnn import MODELS as MMCV_MODELS from mmcv.utils import Registry -from mmaction.utils import import_module_error_func - MODELS = Registry('models', parent=MMCV_MODELS) BACKBONES = MODELS NECKS = MODELS @@ -20,9 +18,11 @@ # Define an empty registry and building func, so that can import DETECTORS = MODELS - @import_module_error_func('mmdet') def build_detector(cfg, train_cfg, test_cfg): - pass + warnings.warn( + 'Failed to import `DETECTORS`, `build_detector` from ' + '`mmdet.models.builder`. You will be unable to register or build ' + 'a spatio-temporal detection model. ') def build_backbone(cfg): diff --git a/mmaction/models/heads/roi_head.py b/mmaction/models/heads/roi_head.py index 2969d6fb40..2a06a25867 100644 --- a/mmaction/models/heads/roi_head.py +++ b/mmaction/models/heads/roi_head.py @@ -2,7 +2,6 @@ import numpy as np from mmaction.core.bbox import bbox2result -from mmaction.utils import import_module_error_class try: from mmdet.core.bbox import bbox2roi @@ -118,6 +117,12 @@ def simple_test_bboxes(self, return det_bboxes, det_labels else: # Just define an empty class, so that __init__ can import it. - @import_module_error_class('mmdet') class AVARoIHead: - pass + + def __init__(self, *args, **kwargs): + raise ImportError( + 'Failed to import `bbox2roi` from `mmdet.core.bbox`, ' + 'or failed to import `HEADS` from `mmdet.models`, ' + 'or failed to import `StandardRoIHead` from ' + '`mmdet.models.roi_heads`. You will be unable to use ' + '`AVARoIHead`. ') diff --git a/mmaction/models/roi_extractors/single_straight3d.py b/mmaction/models/roi_extractors/single_straight3d.py index a06f5ec0ed..fb0c1542db 100644 --- a/mmaction/models/roi_extractors/single_straight3d.py +++ b/mmaction/models/roi_extractors/single_straight3d.py @@ -3,21 +3,6 @@ import torch.nn as nn import torch.nn.functional as F -from mmaction.utils import import_module_error_class - -try: - from mmcv.ops import RoIAlign, RoIPool -except (ImportError, ModuleNotFoundError): - - @import_module_error_class('mmcv-full') - class RoIAlign(nn.Module): - pass - - @import_module_error_class('mmcv-full') - class RoIPool(nn.Module): - pass - - try: from mmdet.models import ROI_EXTRACTORS mmdet_imported = True @@ -75,6 +60,13 @@ def __init__(self, self.with_global = with_global + try: + from mmcv.ops import RoIAlign, RoIPool + except (ImportError, ModuleNotFoundError): + raise ImportError('Failed to import `RoIAlign` and `RoIPool` from ' + '`mmcv.ops`. The two modules will be used in ' + '`SingleRoIExtractor3D`! ') + if self.roi_layer_type == 'RoIPool': self.roi_layer = RoIPool(self.output_size, self.spatial_scale) else: diff --git a/mmaction/utils/__init__.py b/mmaction/utils/__init__.py index f6d43efc41..393a1d1325 100644 --- a/mmaction/utils/__init__.py +++ b/mmaction/utils/__init__.py @@ -1,6 +1,5 @@ # Copyright (c) OpenMMLab. All rights reserved. from .collect_env import collect_env -from .decorators import import_module_error_class, import_module_error_func from .gradcam_utils import GradCAM from .logger import get_root_logger from .misc import get_random_string, get_shm_dir, get_thread_id @@ -10,7 +9,6 @@ __all__ = [ 'get_root_logger', 'collect_env', 'get_random_string', 'get_thread_id', - 'get_shm_dir', 'GradCAM', 'PreciseBNHook', 'import_module_error_class', - 'import_module_error_func', 'register_module_hooks', + 'get_shm_dir', 'GradCAM', 'PreciseBNHook', 'register_module_hooks', 'setup_multi_processes' ] diff --git a/mmaction/utils/decorators.py b/mmaction/utils/decorators.py deleted file mode 100644 index 9da6b48719..0000000000 --- a/mmaction/utils/decorators.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -from types import MethodType - - -def import_module_error_func(module_name): - """When a function is imported incorrectly due to a missing module, raise - an import error when the function is called.""" - - def decorate(func): - - def new_func(*args, **kwargs): - raise ImportError( - f'Please install {module_name} to use {func.__name__}. ' - 'For OpenMMLAB codebases, you may need to install mmcv-full ' - 'first before you install the particular codebase. ') - - return new_func - - return decorate - - -def import_module_error_class(module_name): - """When a class is imported incorrectly due to a missing module, raise an - import error when the class is instantiated.""" - - def decorate(cls): - - def import_error_init(*args, **kwargs): - raise ImportError( - f'Please install {module_name} to use {cls.__name__}. ' - 'For OpenMMLAB codebases, you may need to install mmcv-full ' - 'first before you install the particular codebase. ') - - cls.__init__ = MethodType(import_error_init, cls) - return cls - - return decorate diff --git a/tests/test_utils/test_bbox.py b/tests/test_utils/test_bbox.py index 41379efb3a..8f5e0ab7dd 100644 --- a/tests/test_utils/test_bbox.py +++ b/tests/test_utils/test_bbox.py @@ -7,22 +7,16 @@ from mmaction.core.bbox import bbox2result, bbox_target from mmaction.datasets import AVADataset -from mmaction.utils import import_module_error_func - -try: - from mmdet.core.bbox import build_assigner, build_sampler -except (ImportError, ModuleNotFoundError): - - @import_module_error_func('mmdet') - def build_assigner(*args, **kwargs): - pass - - @import_module_error_func('mmdet') - def build_sampler(*args, **kwargs): - pass def test_assigner_sampler(): + try: + from mmdet.core.bbox import build_assigner, build_sampler + except (ImportError, ModuleNotFoundError): + raise ImportError( + 'Failed to import `build_assigner` and `build_sampler` ' + 'from `mmdet.core.bbox`. The two APIs are required for ' + 'the testing in `test_bbox.py`! ') data_prefix = osp.normpath( osp.join(osp.dirname(__file__), '../data/eval_detection')) ann_file = osp.join(data_prefix, 'gt.csv') diff --git a/tests/test_utils/test_decorator.py b/tests/test_utils/test_decorator.py deleted file mode 100644 index b962bb1214..0000000000 --- a/tests/test_utils/test_decorator.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) OpenMMLab. All rights reserved. -import pytest - -from mmaction.utils import import_module_error_class, import_module_error_func - - -def test_import_module_error_class(): - - @import_module_error_class('mmdet') - class ExampleClass: - pass - - with pytest.raises(ImportError): - ExampleClass() - - @import_module_error_class('mmdet') - class ExampleClass: - - def __init__(self, a, b=3): - self.c = a + b - - with pytest.raises(ImportError): - ExampleClass(4) - - -def test_import_module_error_func(): - - @import_module_error_func('_add') - def ExampleFunc(a, b): - return a + b - - with pytest.raises(ImportError): - ExampleFunc(3, 4) diff --git a/tools/data/skeleton/ntu_pose_extraction.py b/tools/data/skeleton/ntu_pose_extraction.py index 42556bfdc8..7ce37cedb2 100644 --- a/tools/data/skeleton/ntu_pose_extraction.py +++ b/tools/data/skeleton/ntu_pose_extraction.py @@ -6,7 +6,6 @@ import random as rd import shutil import string -import warnings from collections import defaultdict import cv2 @@ -15,11 +14,17 @@ try: from mmdet.apis import inference_detector, init_detector +except (ImportError, ModuleNotFoundError): + raise ImportError('Failed to import `inference_detector` and ' + '`init_detector` form `mmdet.apis`. These apis are ' + 'required in this script! ') + +try: from mmpose.apis import inference_top_down_pose_model, init_pose_model -except ImportError: - warnings.warn( - 'Please install MMDet and MMPose for NTURGB+D pose extraction.' - ) # noqa: E501 +except (ImportError, ModuleNotFoundError): + raise ImportError('Failed to import `inference_top_down_pose_model` and ' + '`init_pose_model` form `mmpose.apis`. These apis are ' + 'required in this script! ') mmdet_root = '' mmpose_root = '' From cf69cad01cc4271daa65779651cfa4f539bc157f Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 30 Mar 2022 16:06:43 +0800 Subject: [PATCH 386/414] [Update] update arg names (#1548) --- docs/getting_started.md | 4 ++-- docs_zh_CN/getting_started.md | 4 ++-- tools/misc/clip_feature_extraction.py | 2 +- tools/test.py | 2 +- tools/train.py | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/getting_started.md b/docs/getting_started.md index fef4b05e4a..66a7e46663 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -72,12 +72,12 @@ You can use the following commands to test a dataset. # single-gpu testing python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ [--gpu-collect] [--tmpdir ${TMPDIR}] [--options ${OPTIONS}] [--average-clips ${AVG_TYPE}] \ - [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] [--onnx] [--tensorrt] + [--launcher ${JOB_LAUNCHER}] [--local-rank ${LOCAL_RANK}] [--onnx] [--tensorrt] # multi-gpu testing ./tools/dist_test.sh ${CONFIG_FILE} ${CHECKPOINT_FILE} ${GPU_NUM} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ [--gpu-collect] [--tmpdir ${TMPDIR}] [--options ${OPTIONS}] [--average-clips ${AVG_TYPE}] \ - [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] + [--launcher ${JOB_LAUNCHER}] [--local-rank ${LOCAL_RANK}] ``` Optional arguments: diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index 9da0aa8065..577e0cad88 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -71,12 +71,12 @@ MMAction2 支持仅使用 CPU 进行测试。然而,这样做的速度**非常 # 单 GPU 测试 python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ [--gpu-collect] [--tmpdir ${TMPDIR}] [--options ${OPTIONS}] [--average-clips ${AVG_TYPE}] \ - [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] [--onnx] [--tensorrt] + [--launcher ${JOB_LAUNCHER}] [--local-rank ${LOCAL_RANK}] [--onnx] [--tensorrt] # 多 GPU 测试 ./tools/dist_test.sh ${CONFIG_FILE} ${CHECKPOINT_FILE} ${GPU_NUM} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ [--gpu-collect] [--tmpdir ${TMPDIR}] [--options ${OPTIONS}] [--average-clips ${AVG_TYPE}] \ - [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] + [--launcher ${JOB_LAUNCHER}] [--local-rank ${LOCAL_RANK}] ``` 可选参数: diff --git a/tools/misc/clip_feature_extraction.py b/tools/misc/clip_feature_extraction.py index 1829bf9b5c..a6e37b3c3a 100644 --- a/tools/misc/clip_feature_extraction.py +++ b/tools/misc/clip_feature_extraction.py @@ -59,7 +59,7 @@ def parse_args(): choices=['none', 'pytorch', 'slurm', 'mpi'], default='none', help='job launcher') - parser.add_argument('--local_rank', type=int, default=0) + parser.add_argument('--local-rank', type=int, default=0) args = parser.parse_args() if 'LOCAL_RANK' not in os.environ: os.environ['LOCAL_RANK'] = str(args.local_rank) diff --git a/tools/test.py b/tools/test.py index 31514498cf..9152c90917 100644 --- a/tools/test.py +++ b/tools/test.py @@ -89,7 +89,7 @@ def parse_args(): choices=['none', 'pytorch', 'slurm', 'mpi'], default='none', help='job launcher') - parser.add_argument('--local_rank', type=int, default=0) + parser.add_argument('--local-rank', type=int, default=0) parser.add_argument( '--onnx', action='store_true', diff --git a/tools/train.py b/tools/train.py index d404980464..6461e7030a 100644 --- a/tools/train.py +++ b/tools/train.py @@ -74,7 +74,7 @@ def parse_args(): choices=['none', 'pytorch', 'slurm', 'mpi'], default='none', help='job launcher') - parser.add_argument('--local_rank', type=int, default=0) + parser.add_argument('--local-rank', type=int, default=0) args = parser.parse_args() if 'LOCAL_RANK' not in os.environ: os.environ['LOCAL_RANK'] = str(args.local_rank) From 1cb074857a63e840fe1e1228150a2a58e3f00161 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 30 Mar 2022 16:17:50 +0800 Subject: [PATCH 387/414] [Update] Update install commands (#1549) --- README.md | 16 +++++++++++++++- README_zh-CN.md | 15 ++++++++++++++- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 24354c7a91..82fc792379 100644 --- a/README.md +++ b/README.md @@ -83,7 +83,21 @@ The master branch works with **PyTorch 1.3+**. ## Installation -Please refer to [install.md](docs/install.md) for installation. +MMAction2 depends on [PyTorch](https://pytorch.org/)], [MMCV](https://github.com/open-mmlab/mmcv), [MMDetection](https://github.com/open-mmlab/mmdetection) (optional), and [MMPose](https://github.com/open-mmlab/mmdetection)(optional). +Below are quick steps for installation. +Please refer to [install.md](docs/install.md) for more detailed instruction. + +```shell +conda create -n open-mmlab python=3.8 pytorch=1.10 cudatoolkit=11.3 torchvision -c pytorch -y +conda activate open-mmlab +pip3 install openmim +mim install mmcv-full +mim install mmdet # optional +mim install mmpose # optional +git clone https://github.com/open-mmlab/mmaction2.git +cd mmaction2 +pip3 install -e . +``` ## Get Started diff --git a/README_zh-CN.md b/README_zh-CN.md index 83b4bfdce2..243eddc52f 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -82,7 +82,20 @@ v0.22.0 版本已于 2022 年 3 月 5 日发布,可通过查阅 [更新日志] ## 安装 -请参考 [安装指南](/docs_zh_CN/install.md) 进行安装 +MMAction2 依赖 [PyTorch](https://pytorch.org/)], [MMCV](https://github.com/open-mmlab/mmcv), [MMDetection](https://github.com/open-mmlab/mmdetection)(可选), [MMPose](https://github.com/open-mmlab/mmpose)(可选),以下是安装的简要步骤。 +更详细的安装指南请参考 [install.md](docs_zh_CN/install.md)。 + +```shell +conda create -n open-mmlab python=3.8 pytorch=1.10 cudatoolkit=11.3 torchvision -c pytorch -y +conda activate open-mmlab +pip3 install openmim +mim install mmcv-full +mim install mmdet # 可选 +mim install mmpose # 可选 +git clone https://github.com/open-mmlab/mmaction2.git +cd mmaction2 +pip3 install -e . +``` ## 教程 From 6017beaec7fad8ee20e889d1211bb8b30f2646dc Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Fri, 1 Apr 2022 20:05:56 +0800 Subject: [PATCH 388/414] [Docs] Update changelog v0.23.0 (#1555) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * v0.23.0 changelog --- docs/changelog.md | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/docs/changelog.md b/docs/changelog.md index 470fa3ff90..5acdf78ae2 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,39 @@ ## Changelog +### 0.23.0 (04/01/2022) + +**Highlights** + +- Support different seeds +- Provide multi-node training & testing script +- Update error log + +**New Features** + +- Support different seeds([#1502](https://github.com/open-mmlab/mmaction2/pull/1502)) +- Provide multi-node training & testing script([#1521](https://github.com/open-mmlab/mmaction2/pull/1521)) +- Update error log([#1546](https://github.com/open-mmlab/mmaction2/pull/1546)) + +**Documentations** + +- Update gpus in Slowfast readme([#1497](https://github.com/open-mmlab/mmaction2/pull/1497)) +- Fix work_dir in multigrid config([#1498](https://github.com/open-mmlab/mmaction2/pull/1498)) +- Add sub bn docs([#1503](https://github.com/open-mmlab/mmaction2/pull/1503)) +- Add shortcycle sampler docs([#1513](https://github.com/open-mmlab/mmaction2/pull/1513)) +- Update Windows Declaration([#1520](https://github.com/open-mmlab/mmaction2/pull/1520)) +- Update the link for ST-GCN([#1544](https://github.com/open-mmlab/mmaction2/pull/1544)) +- Update install commands([#1549](https://github.com/open-mmlab/mmaction2/pull/1549)) + +**Bug and Typo Fixes** + +- Update colab tutorial install cmds([#1522](https://github.com/open-mmlab/mmaction2/pull/1522)) +- Fix num_iters_per_epoch in analyze_logs.py([#1530](https://github.com/open-mmlab/mmaction2/pull/1530)) +- Fix distributed_sampler([#1532](https://github.com/open-mmlab/mmaction2/pull/1532)) +- Fix cd dir error([#1545](https://github.com/open-mmlab/mmaction2/pull/1545)) +- Update arg names([#1548](https://github.com/open-mmlab/mmaction2/pull/1548)) + +**ModelZoo** + ### 0.22.0 (03/05/2022) **Highlights** From f5b7a97afc90d1b002654d036c042ca483009e85 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Fri, 1 Apr 2022 20:06:31 +0800 Subject: [PATCH 389/414] [Release] Bump Version to 0.23.0 (#1556) * master * master 0721 * add README * 1231 bump_version * 0207 slowfast steplr * 0207 * version 0.23.0 * version 0.23.0 --- README.md | 2 +- README_zh-CN.md | 2 +- docker/serve/Dockerfile | 2 +- mmaction/version.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 82fc792379..dbbf6e13f6 100644 --- a/README.md +++ b/README.md @@ -79,7 +79,7 @@ The master branch works with **PyTorch 1.3+**. - (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! - (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. -**Release**: v0.22.0 was released in 05/03/2022. Please refer to [changelog.md](docs/changelog.md) for details and release history. +**Release**: v0.23.0 was released in 01/04/2022. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Installation diff --git a/README_zh-CN.md b/README_zh-CN.md index 243eddc52f..25ddcf831c 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -78,7 +78,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa - (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! - (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 -v0.22.0 版本已于 2022 年 3 月 5 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.23.0 版本已于 2022 年 4 月 1 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 安装 diff --git a/docker/serve/Dockerfile b/docker/serve/Dockerfile index 20fff4ba50..e5e51618e3 100644 --- a/docker/serve/Dockerfile +++ b/docker/serve/Dockerfile @@ -4,7 +4,7 @@ ARG CUDNN="7" FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel ARG MMCV="1.3.8" -ARG MMACTION="0.22.0" +ARG MMACTION="0.23.0" ENV PYTHONUNBUFFERED TRUE diff --git a/mmaction/version.py b/mmaction/version.py index a8e7bbac41..85c6bf45fd 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.22.0' +__version__ = '0.23.0' def parse_version_info(version_str): From 6ea00d23d66fd491bbf19317b4677bf6f396d26f Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sat, 2 Apr 2022 17:39:22 +0800 Subject: [PATCH 390/414] [Fix] Fix local rank (#1558) * update * update --- docs/getting_started.md | 4 ++-- docs_zh_CN/getting_started.md | 4 ++-- tools/misc/clip_feature_extraction.py | 2 +- tools/test.py | 2 +- tools/train.py | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/getting_started.md b/docs/getting_started.md index 66a7e46663..fef4b05e4a 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -72,12 +72,12 @@ You can use the following commands to test a dataset. # single-gpu testing python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ [--gpu-collect] [--tmpdir ${TMPDIR}] [--options ${OPTIONS}] [--average-clips ${AVG_TYPE}] \ - [--launcher ${JOB_LAUNCHER}] [--local-rank ${LOCAL_RANK}] [--onnx] [--tensorrt] + [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] [--onnx] [--tensorrt] # multi-gpu testing ./tools/dist_test.sh ${CONFIG_FILE} ${CHECKPOINT_FILE} ${GPU_NUM} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ [--gpu-collect] [--tmpdir ${TMPDIR}] [--options ${OPTIONS}] [--average-clips ${AVG_TYPE}] \ - [--launcher ${JOB_LAUNCHER}] [--local-rank ${LOCAL_RANK}] + [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] ``` Optional arguments: diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index 577e0cad88..9da0aa8065 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -71,12 +71,12 @@ MMAction2 支持仅使用 CPU 进行测试。然而,这样做的速度**非常 # 单 GPU 测试 python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ [--gpu-collect] [--tmpdir ${TMPDIR}] [--options ${OPTIONS}] [--average-clips ${AVG_TYPE}] \ - [--launcher ${JOB_LAUNCHER}] [--local-rank ${LOCAL_RANK}] [--onnx] [--tensorrt] + [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] [--onnx] [--tensorrt] # 多 GPU 测试 ./tools/dist_test.sh ${CONFIG_FILE} ${CHECKPOINT_FILE} ${GPU_NUM} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] \ [--gpu-collect] [--tmpdir ${TMPDIR}] [--options ${OPTIONS}] [--average-clips ${AVG_TYPE}] \ - [--launcher ${JOB_LAUNCHER}] [--local-rank ${LOCAL_RANK}] + [--launcher ${JOB_LAUNCHER}] [--local_rank ${LOCAL_RANK}] ``` 可选参数: diff --git a/tools/misc/clip_feature_extraction.py b/tools/misc/clip_feature_extraction.py index a6e37b3c3a..1829bf9b5c 100644 --- a/tools/misc/clip_feature_extraction.py +++ b/tools/misc/clip_feature_extraction.py @@ -59,7 +59,7 @@ def parse_args(): choices=['none', 'pytorch', 'slurm', 'mpi'], default='none', help='job launcher') - parser.add_argument('--local-rank', type=int, default=0) + parser.add_argument('--local_rank', type=int, default=0) args = parser.parse_args() if 'LOCAL_RANK' not in os.environ: os.environ['LOCAL_RANK'] = str(args.local_rank) diff --git a/tools/test.py b/tools/test.py index 9152c90917..31514498cf 100644 --- a/tools/test.py +++ b/tools/test.py @@ -89,7 +89,7 @@ def parse_args(): choices=['none', 'pytorch', 'slurm', 'mpi'], default='none', help='job launcher') - parser.add_argument('--local-rank', type=int, default=0) + parser.add_argument('--local_rank', type=int, default=0) parser.add_argument( '--onnx', action='store_true', diff --git a/tools/train.py b/tools/train.py index 6461e7030a..d404980464 100644 --- a/tools/train.py +++ b/tools/train.py @@ -74,7 +74,7 @@ def parse_args(): choices=['none', 'pytorch', 'slurm', 'mpi'], default='none', help='job launcher') - parser.add_argument('--local-rank', type=int, default=0) + parser.add_argument('--local_rank', type=int, default=0) args = parser.parse_args() if 'LOCAL_RANK' not in os.environ: os.environ['LOCAL_RANK'] = str(args.local_rank) From fbb86849a48cfc8e49ecb72dbd77d992c8518506 Mon Sep 17 00:00:00 2001 From: Rejnald Lleshi <46654505+rlleshi@users.noreply.github.com> Date: Thu, 7 Apr 2022 08:47:23 +0200 Subject: [PATCH 391/414] [Improvement] Add kwargs to STGCNHead (#1553) --- mmaction/models/heads/stgcn_head.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/mmaction/models/heads/stgcn_head.py b/mmaction/models/heads/stgcn_head.py index 065552cf29..1961b46421 100644 --- a/mmaction/models/heads/stgcn_head.py +++ b/mmaction/models/heads/stgcn_head.py @@ -18,6 +18,8 @@ class STGCNHead(BaseHead): spatial_type (str): Pooling type in spatial dimension. Default: 'avg'. num_person (int): Number of person. Default: 2. init_std (float): Std value for Initiation. Default: 0.01. + kwargs (dict, optional): Any keyword argument to be used to initialize + the head. """ def __init__(self, @@ -26,8 +28,9 @@ def __init__(self, loss_cls=dict(type='CrossEntropyLoss'), spatial_type='avg', num_person=2, - init_std=0.01): - super().__init__(num_classes, in_channels, loss_cls) + init_std=0.01, + **kwargs): + super().__init__(num_classes, in_channels, loss_cls, **kwargs) self.spatial_type = spatial_type self.in_channels = in_channels From ce1ef25bfd28a673384ebd920e892dc31872ab73 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Mon, 11 Apr 2022 12:13:31 +0800 Subject: [PATCH 392/414] [Doc] fix typo (#1571) --- README.md | 2 +- README_zh-CN.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index dbbf6e13f6..0c6ad8e594 100644 --- a/README.md +++ b/README.md @@ -83,7 +83,7 @@ The master branch works with **PyTorch 1.3+**. ## Installation -MMAction2 depends on [PyTorch](https://pytorch.org/)], [MMCV](https://github.com/open-mmlab/mmcv), [MMDetection](https://github.com/open-mmlab/mmdetection) (optional), and [MMPose](https://github.com/open-mmlab/mmdetection)(optional). +MMAction2 depends on [PyTorch](https://pytorch.org/), [MMCV](https://github.com/open-mmlab/mmcv), [MMDetection](https://github.com/open-mmlab/mmdetection) (optional), and [MMPose](https://github.com/open-mmlab/mmdetection)(optional). Below are quick steps for installation. Please refer to [install.md](docs/install.md) for more detailed instruction. diff --git a/README_zh-CN.md b/README_zh-CN.md index 25ddcf831c..6c3b0f20d5 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -82,7 +82,7 @@ v0.23.0 版本已于 2022 年 4 月 1 日发布,可通过查阅 [更新日志] ## 安装 -MMAction2 依赖 [PyTorch](https://pytorch.org/)], [MMCV](https://github.com/open-mmlab/mmcv), [MMDetection](https://github.com/open-mmlab/mmdetection)(可选), [MMPose](https://github.com/open-mmlab/mmpose)(可选),以下是安装的简要步骤。 +MMAction2 依赖 [PyTorch](https://pytorch.org/), [MMCV](https://github.com/open-mmlab/mmcv), [MMDetection](https://github.com/open-mmlab/mmdetection)(可选), [MMPose](https://github.com/open-mmlab/mmpose)(可选),以下是安装的简要步骤。 更详细的安装指南请参考 [install.md](docs_zh_CN/install.md)。 ```shell From f3d4817d781b45fa02447a2181db5c87eccc3335 Mon Sep 17 00:00:00 2001 From: gengenkai <30782254+gengenkai@users.noreply.github.com> Date: Mon, 11 Apr 2022 12:19:59 +0800 Subject: [PATCH 393/414] [Fix] Add lateral norm (#1567) --- .../slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py | 1 + 1 file changed, 1 insertion(+) diff --git a/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py b/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py index 0699459d2a..7abce40f17 100644 --- a/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py +++ b/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py @@ -11,6 +11,7 @@ depth=50, pretrained=None, lateral=True, + lateral_norm=True, fusion_kernel=7, conv1_kernel=(1, 7, 7), dilations=(1, 1, 1, 1), From 2c4f77f4312c6d60bfdaab5bf8793f55a9a9a241 Mon Sep 17 00:00:00 2001 From: wxDai Date: Fri, 15 Apr 2022 10:48:43 +0800 Subject: [PATCH 394/414] [Doc] Update getting_started.md (#1580) --- docs/getting_started.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/getting_started.md b/docs/getting_started.md index fef4b05e4a..6c8c2a803c 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -152,7 +152,7 @@ model = init_recognizer(config_file, checkpoint_file, device=device) # test a single video and show the result: video = 'demo/demo.mp4' labels = 'tools/data/kinetics/label_map_k400.txt' -results = inference_recognizer(model, video, labels) +results = inference_recognizer(model, video) # show the results labels = open('tools/data/kinetics/label_map_k400.txt').readlines() @@ -180,12 +180,12 @@ device = 'cuda:0' # or 'cpu' device = torch.device(device) # build the model from a config file and a checkpoint file -model = init_recognizer(config_file, checkpoint_file, device=device, use_frames=True) +model = init_recognizer(config_file, checkpoint_file, device=device) # test rawframe directory of a single video and show the result: video = 'SOME_DIR_PATH/' labels = 'tools/data/kinetics/label_map_k400.txt' -results = inference_recognizer(model, video, labels, use_frames=True) +results = inference_recognizer(model, video) # show the results labels = open('tools/data/kinetics/label_map_k400.txt').readlines() @@ -218,7 +218,7 @@ model = init_recognizer(config_file, checkpoint_file, device=device) # test url of a single video and show the result: video = 'https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4' labels = 'tools/data/kinetics/label_map_k400.txt' -results = inference_recognizer(model, video, labels) +results = inference_recognizer(model, video) # show the results labels = open('tools/data/kinetics/label_map_k400.txt').readlines() From f6fd02110a25c3550ab42f06e4ff4c864a3baf96 Mon Sep 17 00:00:00 2001 From: "S. Amin" Date: Fri, 15 Apr 2022 04:19:50 -0400 Subject: [PATCH 395/414] [Doc] Update Flow Extraction Document (#1541) --- tools/data/ucf101/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/data/ucf101/README.md b/tools/data/ucf101/README.md index abac25f0c7..9abaff1b90 100644 --- a/tools/data/ucf101/README.md +++ b/tools/data/ucf101/README.md @@ -67,7 +67,7 @@ If you didn't install denseflow, you can still extract RGB frames using OpenCV b bash extract_rgb_frames_opencv.sh ``` -If both are required, run the following script to extract frames using "tvl1" algorithm. +If Optical Flow is also required, run the following script to extract flow using "tvl1" algorithm. ```shell bash extract_frames.sh From 660ec046039dc9a39d98e8bbbc6d260cbb7a9957 Mon Sep 17 00:00:00 2001 From: wxDai Date: Sat, 16 Apr 2022 14:08:46 +0800 Subject: [PATCH 396/414] [Docs] Fix docs_zh_CN/demo.md & docs_zh_CN/getting_started.md (#1587) * Update getting_started.md * Update getting_started.md * Update demo.md --- docs_zh_CN/demo.md | 4 ++-- docs_zh_CN/getting_started.md | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs_zh_CN/demo.md b/docs_zh_CN/demo.md index d40d991c5d..95969ae385 100644 --- a/docs_zh_CN/demo.md +++ b/docs_zh_CN/demo.md @@ -20,7 +20,7 @@ MMAction2 提供如下脚本以预测视频的动作标签。为得到 [0, 1] ```shell python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} [--use-frames] \ - [--device ${DEVICE_TYPE}] [--fps {FPS}] [--font-size {FONT_SIZE}] [--font-color {FONT_COLOR}] \ + [--device ${DEVICE_TYPE}] [--fps {FPS}] [--font-scale {FONT_SCALE}] [--font-color {FONT_COLOR}] \ [--target-resolution ${TARGET_RESOLUTION}] [--resize-algorithm {RESIZE_ALGORITHM}] [--out-filename {OUT_FILE}] ``` @@ -29,7 +29,7 @@ python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} - `--use-frames`: 如指定,代表使用帧目录作为输入;否则代表使用视频作为输入。 - `DEVICE_TYPE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`)。默认为 `cuda:0`。 - `FPS`: 使用帧目录作为输入时,代表输入的帧率。默认为 30。 -- `FONT_SIZE`: 输出视频上的字体大小。默认为 20。 +- `FONT_SCALE`: 输出视频上的字体缩放比例。默认为 0.5。 - `FONT_COLOR`: 输出视频上的字体颜色,默认为白色( `white`)。 - `TARGET_RESOLUTION`: 输出视频的分辨率,如未指定,使用输入视频的分辨率。 - `RESIZE_ALGORITHM`: 缩放视频时使用的插值方法,默认为 `bicubic`。 diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index 9da0aa8065..6c53c74ac2 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -151,7 +151,7 @@ model = init_recognizer(config_file, checkpoint_file, device=device) # 测试单个视频并显示其结果 video = 'demo/demo.mp4' labels = 'tools/data/kinetics/label_map_k400.txt' -results = inference_recognizer(model, video, labels) +results = inference_recognizer(model, video) # 显示结果 labels = open('tools/data/kinetics/label_map_k400.txt').readlines() @@ -179,12 +179,12 @@ device = 'cuda:0' # or 'cpu' device = torch.device(device) # 根据配置文件和检查点来建立模型 -model = init_recognizer(config_file, checkpoint_file, device=device, use_frames=True) +model = init_recognizer(config_file, checkpoint_file, device=device) # 测试单个视频的帧文件夹并显示其结果 video = 'SOME_DIR_PATH/' labels = 'tools/data/kinetics/label_map_k400.txt' -results = inference_recognizer(model, video, labels, use_frames=True) +results = inference_recognizer(model, video) # 显示结果 labels = open('tools/data/kinetics/label_map_k400.txt').readlines() @@ -217,7 +217,7 @@ model = init_recognizer(config_file, checkpoint_file, device=device) # 测试单个视频的 url 并显示其结果 video = 'https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4' labels = 'tools/data/kinetics/label_map_k400.txt' -results = inference_recognizer(model, video, labels) +results = inference_recognizer(model, video) # 显示结果 labels = open('tools/data/kinetics/label_map_k400.txt').readlines() From 255bbc08634c21e6400af7b9d1a470b52285ebcd Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 20 Apr 2022 19:32:44 +0800 Subject: [PATCH 397/414] [Doc] Remove recommonmark (#1595) --- requirements/docs.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 52af18d4fd..96a033138d 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -3,7 +3,6 @@ einops myst-parser opencv-python -e git+https://github.com/gaotongxiao/pytorch_sphinx_theme.git#egg=pytorch_sphinx_theme -recommonmark scipy sphinx==4.0.2 sphinx_copybutton From c87482f6b53e839bc00506d474b38f797db0fd8f Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Sat, 23 Apr 2022 22:12:40 +0800 Subject: [PATCH 398/414] [Fix] Fix issue #1601 (#1603) --- mmaction/apis/inference.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mmaction/apis/inference.py b/mmaction/apis/inference.py index 8aaad8f5e2..f303d20ed8 100644 --- a/mmaction/apis/inference.py +++ b/mmaction/apis/inference.py @@ -122,6 +122,7 @@ def inference_recognizer(model, video, outputs=None, as_tensor=True, **kwargs): for i in range(len(test_pipeline)): if 'Decode' in test_pipeline[i]['type']: test_pipeline[i] = dict(type='ArrayDecode') + test_pipeline = [x for x in test_pipeline if 'Init' not in x['type']] if input_flag == 'video': data = dict(filename=video, label=-1, start_index=0, modality='RGB') if 'Init' not in test_pipeline[0]['type']: From 0bb5d92d9afec630f2cc36de18bf592959050ac9 Mon Sep 17 00:00:00 2001 From: makecent <42603768+makecent@users.noreply.github.com> Date: Wed, 27 Apr 2022 13:00:47 +0800 Subject: [PATCH 399/414] [Fix] Fix the log error when IterBasedRunner is used (#1606) --- mmaction/apis/train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index 5498a286e9..ed93287b54 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -216,7 +216,7 @@ def train_model(model, val_dataloader = build_dataloader(val_dataset, **dataloader_setting) eval_hook = DistEvalHook(val_dataloader, **eval_cfg) if distributed \ else EvalHook(val_dataloader, **eval_cfg) - runner.register_hook(eval_hook) + runner.register_hook(eval_hook, priority='LOW') if cfg.resume_from: runner.resume(cfg.resume_from) From 1af84d024420c2887208e48b031aa6519a0fdd18 Mon Sep 17 00:00:00 2001 From: Rejnald Lleshi <46654505+rlleshi@users.noreply.github.com> Date: Wed, 27 Apr 2022 07:10:09 +0200 Subject: [PATCH 400/414] [Improvement] Add openpose 25 joints to graph (#1578) * add openpose 25 joints to graph * refactor name --- mmaction/datasets/pipelines/formatting.py | 8 +++--- mmaction/datasets/pipelines/pose_loading.py | 6 ++--- mmaction/models/skeleton_gcn/utils/graph.py | 25 ++++++++++++++----- .../test_loadings/test_pose_loading.py | 6 ++--- tests/test_models/test_backbones.py | 12 ++++----- 5 files changed, 35 insertions(+), 22 deletions(-) diff --git a/mmaction/datasets/pipelines/formatting.py b/mmaction/datasets/pipelines/formatting.py index 528aa8c5b3..4b1fbc3f7f 100644 --- a/mmaction/datasets/pipelines/formatting.py +++ b/mmaction/datasets/pipelines/formatting.py @@ -382,13 +382,13 @@ class JointToBone: added or modified keys are "keypoint". Args: - dataset (str): Define the type of dataset: 'nturgb+d', 'openpose', + dataset (str): Define the type of dataset: 'nturgb+d', 'openpose-18', 'coco'. Default: 'nturgb+d'. """ def __init__(self, dataset='nturgb+d'): self.dataset = dataset - if self.dataset not in ['nturgb+d', 'openpose', 'coco']: + if self.dataset not in ['nturgb+d', 'openpose-18', 'coco']: raise ValueError( f'The dataset type {self.dataset} is not supported') if self.dataset == 'nturgb+d': @@ -397,7 +397,7 @@ def __init__(self, dataset='nturgb+d'): (12, 0), (13, 12), (14, 13), (15, 14), (16, 0), (17, 16), (18, 17), (19, 18), (21, 22), (20, 20), (22, 7), (23, 24), (24, 11)] - elif self.dataset == 'openpose': + elif self.dataset == 'openpose-18': self.pairs = ((0, 0), (1, 0), (2, 1), (3, 2), (4, 3), (5, 1), (6, 5), (7, 6), (8, 2), (9, 8), (10, 9), (11, 5), (12, 11), (13, 12), (14, 0), (15, 0), (16, 14), (17, @@ -421,7 +421,7 @@ def __call__(self, results): assert C in [2, 3] for v1, v2 in self.pairs: bone[..., v1, :] = keypoint[..., v1, :] - keypoint[..., v2, :] - if C == 3 and self.dataset in ['openpose', 'coco']: + if C == 3 and self.dataset in ['openpose-18', 'coco']: score = (keypoint[..., v1, 2] + keypoint[..., v2, 2]) / 2 bone[..., v1, 2] = score diff --git a/mmaction/datasets/pipelines/pose_loading.py b/mmaction/datasets/pipelines/pose_loading.py index e7c713e100..51a210da28 100644 --- a/mmaction/datasets/pipelines/pose_loading.py +++ b/mmaction/datasets/pipelines/pose_loading.py @@ -214,7 +214,7 @@ class LoadKineticsPose: joint. Persons with low confidence scores are dropped (if exceed max_person). Default: dict(face=1, torso=2, limb=3). source (str): The sources of the keypoints used. Choices are 'mmpose' - and 'openpose'. Default: 'mmpose'. + and 'openpose-18'. Default: 'mmpose'. kwargs (dict, optional): Arguments for FileClient. """ @@ -232,7 +232,7 @@ def __init__(self, self.keypoint_weight = cp.deepcopy(keypoint_weight) self.source = source - if source == 'openpose': + if source == 'openpose-18': self.kpsubset = dict( face=[0, 14, 15, 16, 17], torso=[1, 2, 8, 5, 11], @@ -292,7 +292,7 @@ def mapinds(inds): results['total_frames'] = total_frames h, w = results['img_shape'] - if self.source == 'openpose': + if self.source == 'openpose-18': kps[:, :, 0] *= w kps[:, :, 1] *= h diff --git a/mmaction/models/skeleton_gcn/utils/graph.py b/mmaction/models/skeleton_gcn/utils/graph.py index 9b7a54b2f4..e0fce39cb1 100644 --- a/mmaction/models/skeleton_gcn/utils/graph.py +++ b/mmaction/models/skeleton_gcn/utils/graph.py @@ -42,8 +42,7 @@ class Graph: Args: layout (str): must be one of the following candidates - - openpose: Is consists of 18 joints. For more information, please - refer to + - openpose: 18 or 25 joints. For more information, please refer to: https://github.com/CMU-Perceptual-Computing-Lab/openpose#output - ntu-rgb+d: Is consists of 25 joints. For more information, please refer to https://github.com/shahroudy/NTURGB-D @@ -62,14 +61,16 @@ class Graph: """ def __init__(self, - layout='openpose', + layout='openpose-18', strategy='uniform', max_hop=1, dilation=1): self.max_hop = max_hop self.dilation = dilation - assert layout in ['openpose', 'ntu-rgb+d', 'ntu_edge', 'coco'] + assert layout in [ + 'openpose-18', 'openpose-25', 'ntu-rgb+d', 'ntu_edge', 'coco' + ] assert strategy in ['uniform', 'distance', 'spatial', 'agcn'] self.get_edge(layout) self.hop_dis = get_hop_distance( @@ -82,7 +83,7 @@ def __str__(self): def get_edge(self, layout): """This method returns the edge pairs of the layout.""" - if layout == 'openpose': + if layout == 'openpose-18': self.num_node = 18 self_link = [(i, i) for i in range(self.num_node)] neighbor_link = [(4, 3), (3, 2), (7, 6), (6, 5), @@ -91,6 +92,18 @@ def get_edge(self, layout): (17, 15), (16, 14)] self.edge = self_link + neighbor_link self.center = 1 + elif layout == 'openpose-25': + self.num_node = 25 + self_link = [(i, i) for i in range(self.num_node)] + neighbor_link = [(4, 3), (3, 2), (7, 6), (6, 5), (23, 22), + (22, 11), (24, 11), (11, 10), (10, 9), (9, 8), + (20, 19), (19, 14), (21, 14), (14, 13), (13, 12), + (12, 8), (8, 1), (5, 1), (2, 1), (0, 1), (15, 0), + (16, 0), (17, 15), (18, 16)] + self.self_link = self_link + self.neighbor_link = neighbor_link + self.edge = self_link + neighbor_link + self.center = 1 elif layout == 'ntu-rgb+d': self.num_node = 25 self_link = [(i, i) for i in range(self.num_node)] @@ -126,7 +139,7 @@ def get_edge(self, layout): self.edge = self_link + neighbor_link self.center = 0 else: - raise ValueError('Do Not Exist This Layout.') + raise ValueError(f'{layout} is not supported.') def get_adjacency(self, strategy): """This method returns the adjacency matrix according to strategy.""" diff --git a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py index 7b8119b00e..055f4e6725 100644 --- a/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py +++ b/tests/test_data/test_pipelines/test_loadings/test_pose_loading.py @@ -155,14 +155,14 @@ def get_mode(arr): LoadKineticsPose(squeeze=True, max_person=100, source='xxx') load_kinetics_pose = LoadKineticsPose( - squeeze=True, max_person=100, source='openpose') + squeeze=True, max_person=100, source='openpose-18') assert str(load_kinetics_pose) == ( 'LoadKineticsPose(io_backend=disk, ' 'squeeze=True, max_person=100, ' "keypoint_weight={'face': 1, " "'torso': 2, 'limb': 3}, " - 'source=openpose, kwargs={})') + 'source=openpose-18, kwargs={})') return_results = load_kinetics_pose(inp) assert return_results['keypoint'].shape[:-1] == \ return_results['keypoint_score'].shape @@ -175,7 +175,7 @@ def get_mode(arr): inp = cp.deepcopy(results) load_kinetics_pose = LoadKineticsPose( - squeeze=False, max_person=100, source='openpose') + squeeze=False, max_person=100, source='openpose-18') return_results = load_kinetics_pose(inp) assert return_results['keypoint'].shape[:-1] == \ return_results['keypoint_score'].shape diff --git a/tests/test_models/test_backbones.py b/tests/test_models/test_backbones.py index 0afd83c58d..1933b98182 100644 --- a/tests/test_models/test_backbones.py +++ b/tests/test_models/test_backbones.py @@ -787,14 +787,14 @@ def test_stgcn_backbone(): feat = stgcn(skeletons) assert feat.shape == torch.Size([2, 256, 75, 17]) - # test openpose layout, spatial strategy + # test openpose-18 layout, spatial strategy input_shape = (1, 3, 300, 18, 2) skeletons = generate_backbone_demo_inputs(input_shape) stgcn = STGCN( in_channels=3, edge_importance_weighting=True, - graph_cfg=dict(layout='openpose', strategy='spatial')) + graph_cfg=dict(layout='openpose-18', strategy='spatial')) stgcn.init_weights() stgcn.train() feat = stgcn(skeletons) @@ -839,14 +839,14 @@ def test_stgcn_backbone(): feat = stgcn(skeletons) assert feat.shape == torch.Size([2, 256, 75, 17]) - # test openpose layout, uniform strategy + # test openpose-18 layout, uniform strategy input_shape = (1, 3, 300, 18, 2) skeletons = generate_backbone_demo_inputs(input_shape) stgcn = STGCN( in_channels=3, edge_importance_weighting=True, - graph_cfg=dict(layout='openpose', strategy='uniform')) + graph_cfg=dict(layout='openpose-18', strategy='uniform')) stgcn.init_weights() stgcn.train() feat = stgcn(skeletons) @@ -891,14 +891,14 @@ def test_stgcn_backbone(): feat = stgcn(skeletons) assert feat.shape == torch.Size([2, 256, 75, 17]) - # test openpose layout, distance strategy + # test openpose-18 layout, distance strategy input_shape = (1, 3, 300, 18, 2) skeletons = generate_backbone_demo_inputs(input_shape) stgcn = STGCN( in_channels=3, edge_importance_weighting=True, - graph_cfg=dict(layout='openpose', strategy='distance')) + graph_cfg=dict(layout='openpose-18', strategy='distance')) stgcn.init_weights() stgcn.train() feat = stgcn(skeletons) From 5e853b1029d30e76786ebd92997e4f13e3e2cc03 Mon Sep 17 00:00:00 2001 From: zgplvyou <18756963918@163.com> Date: Fri, 29 Apr 2022 12:11:34 +0800 Subject: [PATCH 401/414] [Feature] Support MLU Device on MMaction2 (#1608) --- mmaction/apis/train.py | 22 ++++--- mmaction/core/dist_utils.py | 4 +- mmaction/utils/__init__.py | 3 +- mmaction/utils/distribution_env.py | 94 ++++++++++++++++++++++++++++++ tools/test.py | 17 +++--- 5 files changed, 122 insertions(+), 18 deletions(-) create mode 100644 mmaction/utils/distribution_env.py diff --git a/mmaction/apis/train.py b/mmaction/apis/train.py index ed93287b54..b0c7e06a7f 100644 --- a/mmaction/apis/train.py +++ b/mmaction/apis/train.py @@ -7,7 +7,6 @@ import numpy as np import torch import torch.distributed as dist -from mmcv.parallel import MMDataParallel, MMDistributedDataParallel from mmcv.runner import (DistSamplerSeedHook, EpochBasedRunner, OptimizerHook, build_optimizer, get_dist_info) from mmcv.runner.hooks import Fp16OptimizerHook @@ -15,11 +14,12 @@ from ..core import (DistEvalHook, EvalHook, OmniSourceDistSamplerSeedHook, OmniSourceRunner) from ..datasets import build_dataloader, build_dataset -from ..utils import PreciseBNHook, get_root_logger +from ..utils import (PreciseBNHook, build_ddp, build_dp, default_device, + get_root_logger) from .test import multi_gpu_test -def init_random_seed(seed=None, device='cuda', distributed=True): +def init_random_seed(seed=None, device=default_device, distributed=True): """Initialize random seed. If the seed is not set, the seed will be automatically randomized, @@ -122,13 +122,17 @@ def train_model(model, find_unused_parameters = cfg.get('find_unused_parameters', False) # Sets the `find_unused_parameters` parameter in # torch.nn.parallel.DistributedDataParallel - model = MMDistributedDataParallel( - model.cuda(), - device_ids=[torch.cuda.current_device()], - broadcast_buffers=False, - find_unused_parameters=find_unused_parameters) + + model = build_ddp( + model, + default_device, + default_args=dict( + device_ids=[int(os.environ['LOCAL_RANK'])], + broadcast_buffers=False, + find_unused_parameters=find_unused_parameters)) else: - model = MMDataParallel(model, device_ids=cfg.gpu_ids) + model = build_dp( + model, default_device, default_args=dict(device_ids=cfg.gpu_ids)) # build runner optimizer = build_optimizer(model, cfg.optimizer) diff --git a/mmaction/core/dist_utils.py b/mmaction/core/dist_utils.py index 32f57b6245..cae452d9bd 100644 --- a/mmaction/core/dist_utils.py +++ b/mmaction/core/dist_utils.py @@ -4,8 +4,10 @@ import torch.distributed as dist from mmcv.runner import get_dist_info +from ..utils import default_device -def sync_random_seed(seed=None, device='cuda'): + +def sync_random_seed(seed=None, device=default_device): """Make sure different ranks share the same seed. All workers must call this function, otherwise it will deadlock. This method is generally used in `DistributedSampler`, because the seed should be identical across all diff --git a/mmaction/utils/__init__.py b/mmaction/utils/__init__.py index 393a1d1325..a1bbbb761a 100644 --- a/mmaction/utils/__init__.py +++ b/mmaction/utils/__init__.py @@ -1,5 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. from .collect_env import collect_env +from .distribution_env import build_ddp, build_dp, default_device from .gradcam_utils import GradCAM from .logger import get_root_logger from .misc import get_random_string, get_shm_dir, get_thread_id @@ -10,5 +11,5 @@ __all__ = [ 'get_root_logger', 'collect_env', 'get_random_string', 'get_thread_id', 'get_shm_dir', 'GradCAM', 'PreciseBNHook', 'register_module_hooks', - 'setup_multi_processes' + 'setup_multi_processes', 'build_ddp', 'build_dp', 'default_device' ] diff --git a/mmaction/utils/distribution_env.py b/mmaction/utils/distribution_env.py new file mode 100644 index 0000000000..6e241e032e --- /dev/null +++ b/mmaction/utils/distribution_env.py @@ -0,0 +1,94 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel + +dp_factory = {'cuda': MMDataParallel, 'cpu': MMDataParallel} + +ddp_factory = {'cuda': MMDistributedDataParallel} + + +def build_dp(model, device='cuda', default_args=None): + """build DataParallel module by device type. + + if device is cuda, return a MMDataParallel model; if device is mlu, + return a MLUDataParallel model. + Args: + model(nn.Module): model to be parallelized. + device(str): device type, cuda, cpu or mlu. Defaults to cuda. + default_args: dict type, include the following parameters. + device_ids(int): device ids of modules to be scattered to. + Defaults to None when GPU or MLU is not available. + Returns: + model(nn.Module): the model to be parallelized. + """ + + if device == 'cuda': + model = model.cuda() + elif device == 'mlu': + from mmcv.device.mlu import MLUDataParallel + dp_factory['mlu'] = MLUDataParallel + model = model.mlu() + + return dp_factory[device](model, **default_args) + + +def build_ddp(model, device='cuda', default_args=None): + """Build DistributedDataParallel module by device type. + If device is cuda, return a MMDistributedDataParallel model; + if device is mlu, return a MLUDistributedDataParallel model. + Args: + model(:class:`nn.Moudle`): module to be parallelized. + device(str): device type, mlu or cuda. + default_args: dict type, include the following parameters. + device_ids(int): which represents the only device where the input + module corresponding to this process resides. Defaults to None. + broadcast_buffers(bool): Flag that enables syncing (broadcasting) + buffers of the module at beginning of the forward function. + Defaults to True. + find_unused_parameters(bool): Traverse the autograd graph of all + tensors contained in the return value of the wrapped module's + ``forward`` function. + Parameters that don't receive gradients as part of this graph + are preemptively marked as being ready to be reduced. Note that + all ``forward`` outputs that are derived from module parameters + must participate in calculating loss and later the gradient + computation. If they don't, this wrapper will hang waiting + for autograd to produce gradients for those parameters. Any + outputs derived from module parameters that are otherwise + unused can be detached from the autograd graph using + ``torch.Tensor.detach``. Defaults to False. + Returns: + model(nn.Module): the module to be parallelized + References: + .. [1] https://pytorch.org/docs/stable/generated/torch.nn.parallel. + DistributedDataParallel.html + """ + + assert device in ['cuda', 'mlu' + ], 'Only available for cuda or mlu devices currently.' + if device == 'cuda': + model = model.cuda() + elif device == 'mlu': + from mmcv.device.mlu import MLUDistributedDataParallel + ddp_factory['mlu'] = MLUDistributedDataParallel + model = model.mlu() + + return ddp_factory[device](model, **default_args) + + +def is_mlu_available(): + """Returns a bool indicating if MLU is currently available.""" + return hasattr(torch, 'is_mlu_available') and torch.is_mlu_available() + + +def get_device(): + """Returns an available device, cpu, cuda or mlu.""" + is_device_available = { + 'cuda': torch.cuda.is_available(), + 'mlu': is_mlu_available() + } + device_list = [k for k, v in is_device_available.items() if v] + return device_list[0] if len(device_list) == 1 else 'cpu' + + +default_device = get_device() diff --git a/tools/test.py b/tools/test.py index 31514498cf..6b52e9fd1f 100644 --- a/tools/test.py +++ b/tools/test.py @@ -9,13 +9,13 @@ from mmcv import Config, DictAction from mmcv.cnn import fuse_conv_bn from mmcv.fileio.io import file_handlers -from mmcv.parallel import MMDataParallel, MMDistributedDataParallel from mmcv.runner import get_dist_info, init_dist, load_checkpoint from mmcv.runner.fp16_utils import wrap_fp16_model from mmaction.datasets import build_dataloader, build_dataset from mmaction.models import build_model -from mmaction.utils import register_module_hooks, setup_multi_processes +from mmaction.utils import (build_ddp, build_dp, default_device, + register_module_hooks, setup_multi_processes) # TODO import test functions from mmcv and delete them from mmaction2 try: @@ -157,13 +157,16 @@ def inference_pytorch(args, cfg, distributed, data_loader): model = fuse_conv_bn(model) if not distributed: - model = MMDataParallel(model, device_ids=[0]) + model = build_dp( + model, default_device, default_args=dict(device_ids=cfg.gpu_ids)) outputs = single_gpu_test(model, data_loader) else: - model = MMDistributedDataParallel( - model.cuda(), - device_ids=[torch.cuda.current_device()], - broadcast_buffers=False) + model = build_ddp( + model, + default_device, + default_args=dict( + device_ids=[int(os.environ['LOCAL_RANK'])], + broadcast_buffers=False)) outputs = multi_gpu_test(model, data_loader, args.tmpdir, args.gpu_collect) From 26a105b32666d0ea6e9e33d7dbef8affa936f099 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 5 May 2022 11:45:16 +0800 Subject: [PATCH 402/414] [Release] v0.24.0 release (#1619) --- README.md | 2 +- README_zh-CN.md | 2 +- docker/serve/Dockerfile | 2 +- docs/changelog.md | 22 ++++++++++++++++++++++ mmaction/version.py | 2 +- 5 files changed, 26 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 0c6ad8e594..042384ada0 100644 --- a/README.md +++ b/README.md @@ -79,7 +79,7 @@ The master branch works with **PyTorch 1.3+**. - (2021-10-25) We provide a [guide](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md) on how to train PoseC3D with custom datasets, [bit-scientist](https://github.com/bit-scientist) authored this PR! - (2021-10-16) We support **PoseC3D** on UCF101 and HMDB51, achieves 87.0% and 69.3% Top-1 accuracy with 2D skeletons only. Pre-extracted 2D skeletons are also available. -**Release**: v0.23.0 was released in 01/04/2022. Please refer to [changelog.md](docs/changelog.md) for details and release history. +**Release**: v0.24.0 was released in 05/05/2022. Please refer to [changelog.md](docs/changelog.md) for details and release history. ## Installation diff --git a/README_zh-CN.md b/README_zh-CN.md index 6c3b0f20d5..27e4e35898 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -78,7 +78,7 @@ MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLa - (2021-10-25) 提供使用自定义数据集训练 PoseC3D 的 [教程](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md),此 PR 由用户 [bit-scientist](https://github.com/bit-scientist) 完成! - (2021-10-16) 在 UCF101, HMDB51 上支持 **PoseC3D**,仅用 2D 关键点就可分别达到 87.0% 和 69.3% 的识别准确率。两数据集的预提取骨架特征可以公开下载。 -v0.23.0 版本已于 2022 年 4 月 1 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 +v0.24.0 版本已于 2022 年 5 月 5 日发布,可通过查阅 [更新日志](/docs/changelog.md) 了解更多细节以及发布历史 ## 安装 diff --git a/docker/serve/Dockerfile b/docker/serve/Dockerfile index e5e51618e3..8ea55de3f6 100644 --- a/docker/serve/Dockerfile +++ b/docker/serve/Dockerfile @@ -4,7 +4,7 @@ ARG CUDNN="7" FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel ARG MMCV="1.3.8" -ARG MMACTION="0.23.0" +ARG MMACTION="0.24.0" ENV PYTHONUNBUFFERED TRUE diff --git a/docs/changelog.md b/docs/changelog.md index 5acdf78ae2..0c51d891e1 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,27 @@ ## Changelog +### 0.24.0 (05/05/2022) + +**Highlights** + +- Support different seeds + +**New Features** + +- Add lateral norm in multigrid config ([#1567](https://github.com/open-mmlab/mmaction2/pull/1567)) +- Add openpose 25 joints in graph config ([#1578](https://github.com/open-mmlab/mmaction2/pull/1578)) +- Support MLU Backend ([#1608](https://github.com/open-mmlab/mmaction2/pull/1608)) + +**Bug and Typo Fixes** + +- Fix local_rank ([#1558](https://github.com/open-mmlab/mmaction2/pull/1558)) +- Fix install typo ([#1571](https://github.com/open-mmlab/mmaction2/pull/1571)) +- Fix the inference API doc ([#1580](https://github.com/open-mmlab/mmaction2/pull/1580)) +- Fix zh-CN demo.md and getting_started.md ([#1587](https://github.com/open-mmlab/mmaction2/pull/1587)) +- Remove Recommonmark ([#1595](https://github.com/open-mmlab/mmaction2/pull/1595)) +- Fix inference with ndarray ([#1603](https://github.com/open-mmlab/mmaction2/pull/1603)) +- Fix the log error when `IterBasedRunner` is used ([#1606](https://github.com/open-mmlab/mmaction2/pull/1606)) + ### 0.23.0 (04/01/2022) **Highlights** diff --git a/mmaction/version.py b/mmaction/version.py index 85c6bf45fd..1099c151d1 100644 --- a/mmaction/version.py +++ b/mmaction/version.py @@ -1,6 +1,6 @@ # Copyright (c) Open-MMLab. All rights reserved. -__version__ = '0.23.0' +__version__ = '0.24.0' def parse_version_info(version_str): From 9d4c890b19953f812a0d0801d0c417a9e180b83a Mon Sep 17 00:00:00 2001 From: bob Date: Mon, 9 May 2022 13:09:34 +0800 Subject: [PATCH 403/414] [Doc] fix url in config tutorial when talking about Config File Structure (#1622) --- docs/tutorials/1_config.md | 2 +- docs_zh_CN/tutorials/1_config.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/tutorials/1_config.md b/docs/tutorials/1_config.md index f31ac47682..bad5d56eee 100644 --- a/docs/tutorials/1_config.md +++ b/docs/tutorials/1_config.md @@ -51,7 +51,7 @@ For example, if some modification is made base on TSN, users may first inherit t If you are building an entirely new method that does not share the structure with any of the existing methods, you may create a folder under `configs/TASK`. -Please refer to [mmcv](https://mmcv.readthedocs.io/en/latest/utils.html#config) for detailed documentation. +Please refer to [mmcv](https://mmcv.readthedocs.io/en/latest/understand_mmcv/config.html) for detailed documentation. ## Config File Naming Convention diff --git a/docs_zh_CN/tutorials/1_config.md b/docs_zh_CN/tutorials/1_config.md index d3f5ae9ad0..3d7d44294f 100644 --- a/docs_zh_CN/tutorials/1_config.md +++ b/docs_zh_CN/tutorials/1_config.md @@ -53,7 +53,7 @@ MMAction2 提供的所有配置文件都放置在 `$MMAction2/configs` 文件夹 如果用户想实现一个独立于任何一个现有的方法结构的新方法,则需要像 `configs/recognition`, `configs/detection` 等一样,在 `configs/TASK` 中建立新的文件夹。 -更多详细内容,请参考 [mmcv](https://mmcv.readthedocs.io/en/latest/utils.html#config)。 +更多详细内容,请参考 [mmcv](https://mmcv.readthedocs.io/en/latest/understand_mmcv/config.html)。 ## 配置文件命名规则 From 18a7b574691bc3d09d60a37045c2af2ed2dbbc5b Mon Sep 17 00:00:00 2001 From: Rejnald Lleshi <46654505+rlleshi@users.noreply.github.com> Date: Tue, 10 May 2022 10:40:12 +0200 Subject: [PATCH 404/414] [Fix] Add dropout layer to AGCN (#1611) --- mmaction/models/backbones/agcn.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mmaction/models/backbones/agcn.py b/mmaction/models/backbones/agcn.py index 7c4127c14e..b3932de9fe 100644 --- a/mmaction/models/backbones/agcn.py +++ b/mmaction/models/backbones/agcn.py @@ -91,7 +91,8 @@ def __init__(self, in_channels, out_channels, kernel_size[1], adj_len=adj_len) self.tcn = nn.Sequential( nn.Conv2d(out_channels, out_channels, (kernel_size[0], 1), - (stride, 1), padding), nn.BatchNorm2d(out_channels)) + (stride, 1), padding), nn.BatchNorm2d(out_channels), + nn.Dropout(dropout, inplace=True)) # tcn init for m in self.tcn.modules(): From 62d8b42cc8b8c69b10d77e0970a7bf6c892085df Mon Sep 17 00:00:00 2001 From: bob Date: Wed, 11 May 2022 21:47:27 +0800 Subject: [PATCH 405/414] [Fix] fix config_file url of extract audio module in data_preparation tutorial in both en and zh_CN (#1627) --- docs/data_preparation.md | 2 +- docs_zh_CN/data_preparation.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/data_preparation.md b/docs/data_preparation.md index cf1b127d14..0e8dde9bf0 100644 --- a/docs/data_preparation.md +++ b/docs/data_preparation.md @@ -137,7 +137,7 @@ python tools/data/extract_audio.py ${ROOT} ${DST_ROOT} [--ext ${EXT}] [--num-wor - `EXT`: Extension of the video files. e.g., `.mp4`. - `N_WORKERS`: Number of processes to be used. -After extracting audios, you are free to decode and generate the spectrogram on-the-fly such as [this](/configs/audio_recognition/tsn_r50_64x1x1_kinetics400_audio.py). As for the annotations, you can directly use those of the rawframes as long as you keep the relative position of audio files same as the rawframes directory. However, extracting spectrogram on-the-fly is slow and bad for prototype iteration. Therefore, we also provide a script (and many useful tools to play with) for you to generation spectrogram off-line. +After extracting audios, you are free to decode and generate the spectrogram on-the-fly such as [this](/configs/recognition_audio/resnet/tsn_r50_64x1x1_100e_kinetics400_audio.py). As for the annotations, you can directly use those of the rawframes as long as you keep the relative position of audio files same as the rawframes directory. However, extracting spectrogram on-the-fly is slow and bad for prototype iteration. Therefore, we also provide a script (and many useful tools to play with) for you to generation spectrogram off-line. ```shell cd $MMACTION2 diff --git a/docs_zh_CN/data_preparation.md b/docs_zh_CN/data_preparation.md index 9fa2fd47d8..d6ad734580 100644 --- a/docs_zh_CN/data_preparation.md +++ b/docs_zh_CN/data_preparation.md @@ -138,7 +138,7 @@ python tools/data/extract_audio.py ${ROOT} ${DST_ROOT} [--ext ${EXT}] [--num-wor - `EXT`: 视频的后缀名,如 `.mp4`。 - `N_WORKERS`: 使用的进程数量。 -成功提取出音频后,用户可参照 [配置文件](/configs/audio_recognition/tsn_r50_64x1x1_kinetics400_audio.py) 在线解码并生成梅尔频谱。如果音频文件的目录结构与帧文件夹一致,用户可以直接使用帧数据所用的标注文件作为音频数据的标注文件。在线解码的缺陷在于速度较慢,因此,MMAction2 也提供如下脚本用于离线地生成梅尔频谱。 +成功提取出音频后,用户可参照 [配置文件](/configs/recognition_audio/resnet/tsn_r50_64x1x1_100e_kinetics400_audio.py) 在线解码并生成梅尔频谱。如果音频文件的目录结构与帧文件夹一致,用户可以直接使用帧数据所用的标注文件作为音频数据的标注文件。在线解码的缺陷在于速度较慢,因此,MMAction2 也提供如下脚本用于离线地生成梅尔频谱。 ```shell cd $MMACTION2 From eb7e1084389d47eff30ad43e86542c89f98d5b8a Mon Sep 17 00:00:00 2001 From: bob Date: Sun, 15 May 2022 21:58:08 +0800 Subject: [PATCH 406/414] [Fix] Fix issues in audio module (#1630) * Fix issues in audio module * fix yapf Co-authored-by: Haodong Duan --- docs/data_preparation.md | 4 ++-- docs_zh_CN/data_preparation.md | 4 ++-- tools/data/build_audio_features.py | 5 +++-- tools/data/extract_audio.py | 4 ++-- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/docs/data_preparation.md b/docs/data_preparation.md index 0e8dde9bf0..84788dcf2c 100644 --- a/docs/data_preparation.md +++ b/docs/data_preparation.md @@ -134,7 +134,7 @@ python tools/data/extract_audio.py ${ROOT} ${DST_ROOT} [--ext ${EXT}] [--num-wor - `ROOT`: The root directory of the videos. - `DST_ROOT`: The destination root directory of the audios. -- `EXT`: Extension of the video files. e.g., `.mp4`. +- `EXT`: Extension of the video files. e.g., `mp4`. - `N_WORKERS`: Number of processes to be used. After extracting audios, you are free to decode and generate the spectrogram on-the-fly such as [this](/configs/recognition_audio/resnet/tsn_r50_64x1x1_100e_kinetics400_audio.py). As for the annotations, you can directly use those of the rawframes as long as you keep the relative position of audio files same as the rawframes directory. However, extracting spectrogram on-the-fly is slow and bad for prototype iteration. Therefore, we also provide a script (and many useful tools to play with) for you to generation spectrogram off-line. @@ -147,7 +147,7 @@ python tools/data/build_audio_features.py ${AUDIO_HOME_PATH} ${SPECTROGRAM_SAVE_ - `AUDIO_HOME_PATH`: The root directory of the audio files. - `SPECTROGRAM_SAVE_PATH`: The destination root directory of the audio features. -- `EXT`: Extension of the audio files. e.g., `.m4a`. +- `EXT`: Extension of the audio files. e.g., `m4a`. - `N_WORKERS`: Number of processes to be used. - `PART`: Determines how many parts to be splited and which part to run. e.g., `2/5` means splitting all files into 5-fold and executing the 2nd part. This is useful if you have several machines. diff --git a/docs_zh_CN/data_preparation.md b/docs_zh_CN/data_preparation.md index d6ad734580..021fe4bc1b 100644 --- a/docs_zh_CN/data_preparation.md +++ b/docs_zh_CN/data_preparation.md @@ -135,7 +135,7 @@ python tools/data/extract_audio.py ${ROOT} ${DST_ROOT} [--ext ${EXT}] [--num-wor - `ROOT`: 视频的根目录。 - `DST_ROOT`: 存放生成音频的根目录。 -- `EXT`: 视频的后缀名,如 `.mp4`。 +- `EXT`: 视频的后缀名,如 `mp4`。 - `N_WORKERS`: 使用的进程数量。 成功提取出音频后,用户可参照 [配置文件](/configs/recognition_audio/resnet/tsn_r50_64x1x1_100e_kinetics400_audio.py) 在线解码并生成梅尔频谱。如果音频文件的目录结构与帧文件夹一致,用户可以直接使用帧数据所用的标注文件作为音频数据的标注文件。在线解码的缺陷在于速度较慢,因此,MMAction2 也提供如下脚本用于离线地生成梅尔频谱。 @@ -148,7 +148,7 @@ python tools/data/build_audio_features.py ${AUDIO_HOME_PATH} ${SPECTROGRAM_SAVE_ - `AUDIO_HOME_PATH`: 音频文件的根目录。 - `SPECTROGRAM_SAVE_PATH`: 存放生成音频特征的根目录。 -- `EXT`: 音频的后缀名,如 `.m4a`。 +- `EXT`: 音频的后缀名,如 `m4a`。 - `N_WORKERS`: 使用的进程数量。 - `PART`: 将完整的解码任务分为几部分并执行其中一份。如 `2/5` 表示将所有待解码数据分成 5 份,并对其中的第 2 份进行解码。这一选项在用户有多台机器时发挥作用。 diff --git a/tools/data/build_audio_features.py b/tools/data/build_audio_features.py index 05f5978083..f143427c50 100644 --- a/tools/data/build_audio_features.py +++ b/tools/data/build_audio_features.py @@ -290,7 +290,7 @@ def extract_audio_feature(wav_path, audio_tools, mel_out_dir): parser.add_argument('audio_home_path', type=str) parser.add_argument('spectrogram_save_path', type=str) parser.add_argument('--level', type=int, default=1) - parser.add_argument('--ext', default='.m4a') + parser.add_argument('--ext', default='m4a') parser.add_argument('--num-workers', type=int, default=4) parser.add_argument('--part', type=str, default='1/1') args = parser.parse_args() @@ -298,7 +298,8 @@ def extract_audio_feature(wav_path, audio_tools, mel_out_dir): mmcv.mkdir_or_exist(args.spectrogram_save_path) files = glob.glob( - osp.join(args.audio_home_path, '*/' * args.level, '*' + args.ext)) + # osp.join(args.audio_home_path, '*/' * args.level, '*' + args.ext) + args.audio_home_path + '/*' * args.level + '.' + args.ext) print(f'found {len(files)} files.') files = sorted(files) if args.part is not None: diff --git a/tools/data/extract_audio.py b/tools/data/extract_audio.py index 6f56de2691..46d2367593 100644 --- a/tools/data/extract_audio.py +++ b/tools/data/extract_audio.py @@ -18,7 +18,7 @@ def extract_audio_wav(line): try: if osp.exists(f'{dst_dir}/{video_id}.wav'): return - cmd = f'ffmpeg -i ./{line} -map 0:a -y {dst_dir}/{video_id}.wav' + cmd = f'ffmpeg -i {line} -map 0:a -y {dst_dir}/{video_id}.wav' os.popen(cmd) except BaseException: with open('extract_wav_err_file.txt', 'a+') as f: @@ -38,7 +38,7 @@ def parse_args(): choices=['avi', 'mp4', 'webm'], help='video file extensions') parser.add_argument( - '--num-worker', type=int, default=8, help='number of workers') + '--num-workers', type=int, default=8, help='number of workers') args = parser.parse_args() return args From 43b2282f3d2a66bd1aa1554dfdf4fdb379edeb0f Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 18 May 2022 20:31:59 +0800 Subject: [PATCH 407/414] [Pre-commit] use mdformat (#1636) * use mdformat * update mmcv version --- .github/CODE_OF_CONDUCT.md | 30 +- .github/ISSUE_TEMPLATE/error-report.md | 5 +- .github/ISSUE_TEMPLATE/feature_request.md | 5 +- .github/ISSUE_TEMPLATE/general_questions.md | 1 - .../reimplementation_questions.md | 5 +- .github/workflows/lint.yml | 4 - .pre-commit-config.yaml | 18 +- README.md | 19 +- README_zh-CN.md | 18 +- configs/detection/acrn/README.md | 9 +- configs/detection/acrn/README_zh-CN.md | 16 +- configs/detection/ava/README.md | 43 +- configs/detection/ava/README_zh-CN.md | 48 +- configs/detection/lfb/README.md | 11 +- configs/detection/lfb/README_zh-CN.md | 16 +- configs/localization/bmn/README.md | 21 +- configs/localization/bmn/README_zh-CN.md | 24 +- configs/localization/bsn/README.md | 107 +- configs/localization/bsn/README_zh-CN.md | 110 +- configs/localization/ssn/README.md | 7 +- configs/localization/ssn/README_zh-CN.md | 10 +- configs/recognition/c3d/README.md | 7 +- configs/recognition/c3d/README_zh-CN.md | 10 +- configs/recognition/csn/README.md | 23 +- configs/recognition/csn/README_zh-CN.md | 26 +- configs/recognition/i3d/README.md | 25 +- configs/recognition/i3d/README_zh-CN.md | 28 +- configs/recognition/omnisource/README.md | 43 +- .../recognition/omnisource/README_zh-CN.md | 52 +- configs/recognition/r2plus1d/README.md | 13 +- configs/recognition/r2plus1d/README_zh-CN.md | 16 +- configs/recognition/slowfast/README.md | 33 +- configs/recognition/slowfast/README_zh-CN.md | 36 +- configs/recognition/slowonly/README.md | 89 +- configs/recognition/slowonly/README_zh-CN.md | 102 +- configs/recognition/tanet/README.md | 17 +- configs/recognition/tanet/README_zh-CN.md | 18 +- configs/recognition/timesformer/README.md | 11 +- .../recognition/timesformer/README_zh-CN.md | 14 +- configs/recognition/tin/README.md | 19 +- configs/recognition/tin/README_zh-CN.md | 22 +- configs/recognition/tpn/README.md | 15 +- configs/recognition/tpn/README_zh-CN.md | 14 +- configs/recognition/trn/README.md | 13 +- configs/recognition/trn/README_zh-CN.md | 16 +- configs/recognition/tsm/README.md | 101 +- configs/recognition/tsm/README_zh-CN.md | 116 +- configs/recognition/tsn/README.md | 183 +-- configs/recognition/tsn/README_zh-CN.md | 192 +-- configs/recognition/x3d/README.md | 11 +- configs/recognition/x3d/README_zh-CN.md | 12 +- configs/recognition_audio/resnet/README.md | 15 +- .../recognition_audio/resnet/README_zh-CN.md | 16 +- configs/skeleton/2s-agcn/README.md | 9 +- configs/skeleton/2s-agcn/README_zh-CN.md | 12 +- configs/skeleton/posec3d/README.md | 39 +- configs/skeleton/posec3d/README_zh-CN.md | 50 +- .../posec3d/custom_dataset_training.md | 45 +- configs/skeleton/stgcn/README.md | 21 +- configs/skeleton/stgcn/README_zh-CN.md | 24 +- demo/README.md | 238 ++-- docs/benchmark.md | 38 +- docs/changelog.md | 6 +- docs/faq.md | 101 +- docs/getting_started.md | 112 +- docs/install.md | 4 +- docs/projects.md | 12 +- docs/supported_datasets.md | 6 +- docs/tutorials/1_config.md | 1132 ++++++++-------- docs/tutorials/2_finetune.md | 2 +- docs/tutorials/4_data_pipeline.md | 60 +- docs/tutorials/5_new_modules.md | 102 +- docs/tutorials/7_customize_runtime.md | 78 +- docs/useful_tools.md | 60 +- docs_zh_CN/benchmark.md | 38 +- docs_zh_CN/data_preparation.md | 14 +- docs_zh_CN/demo.md | 218 ++-- docs_zh_CN/faq.md | 91 +- docs_zh_CN/feature_extraction.md | 2 +- docs_zh_CN/getting_started.md | 144 +-- docs_zh_CN/install.md | 18 +- docs_zh_CN/supported_datasets.md | 6 +- docs_zh_CN/tutorials/1_config.md | 1136 ++++++++--------- docs_zh_CN/tutorials/2_finetune.md | 10 +- docs_zh_CN/tutorials/3_new_dataset.md | 10 +- docs_zh_CN/tutorials/4_data_pipeline.md | 72 +- docs_zh_CN/tutorials/5_new_modules.md | 116 +- docs_zh_CN/tutorials/6_export_model.md | 10 +- docs_zh_CN/tutorials/7_customize_runtime.md | 114 +- docs_zh_CN/useful_tools.md | 76 +- mmaction/__init__.py | 2 +- tools/data/ava/AVA_annotation_explained.md | 4 +- tools/data/hvu/README.md | 4 +- tools/data/jhmdb/README.md | 4 +- tools/data/jhmdb/README_zh-CN.md | 4 +- tools/data/kinetics/README.md | 8 +- tools/data/kinetics/README_zh-CN.md | 6 +- tools/data/omnisource/README_zh-CN.md | 16 +- tools/data/ucf101_24/README.md | 4 +- tools/data/ucf101_24/README_zh-CN.md | 4 +- 100 files changed, 3074 insertions(+), 3043 deletions(-) diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md index efd4305798..92afad1c5a 100644 --- a/.github/CODE_OF_CONDUCT.md +++ b/.github/CODE_OF_CONDUCT.md @@ -14,22 +14,22 @@ appearance, race, religion, or sexual identity and orientation. Examples of behavior that contributes to creating a positive environment include: -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery and unwelcome sexual attention or - advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic - address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting +- The use of sexualized language or imagery and unwelcome sexual attention or + advances +- Trolling, insulting/derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or electronic + address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting ## Our Responsibilities @@ -70,7 +70,7 @@ members of the project's leadership. This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html -[homepage]: https://www.contributor-covenant.org - For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq + +[homepage]: https://www.contributor-covenant.org diff --git a/.github/ISSUE_TEMPLATE/error-report.md b/.github/ISSUE_TEMPLATE/error-report.md index bac7c835fa..cab4b1b580 100644 --- a/.github/ISSUE_TEMPLATE/error-report.md +++ b/.github/ISSUE_TEMPLATE/error-report.md @@ -4,7 +4,6 @@ about: Create a report to help us improve title: '' labels: '' assignees: '' - --- Thanks for your error report and we appreciate it a lot. @@ -34,8 +33,8 @@ A placeholder for the command. 1. Please run `PYTHONPATH=${PWD}:$PYTHONPATH python mmaction/utils/collect_env.py` to collect necessary environment information and paste it here. 2. You may add addition that may be helpful for locating the problem, such as - - How you installed PyTorch [e.g., pip, conda, source] - - Other environment variables that may be related (such as `$PATH`, `$LD_LIBRARY_PATH`, `$PYTHONPATH`, etc.) + - How you installed PyTorch \[e.g., pip, conda, source\] + - Other environment variables that may be related (such as `$PATH`, `$LD_LIBRARY_PATH`, `$PYTHONPATH`, etc.) **Error traceback** diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index f2c6a9ab56..9b5bc40864 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -4,7 +4,6 @@ about: Suggest an idea for this project title: '' labels: '' assignees: '' - --- Thanks for your feature request and we will review and plan for it when necessary. @@ -15,8 +14,8 @@ If you feel we have help you, give us a STAR! :satisfied: **Motivation** A clear and concise description of the motivation of the feature. -Ex1. It is inconvenient when [....]. -Ex2. There is a recent paper [....], which is very helpful for [....]. +Ex1. It is inconvenient when \[....\]. +Ex2. There is a recent paper \[....\], which is very helpful for \[....\]. **Related resources** diff --git a/.github/ISSUE_TEMPLATE/general_questions.md b/.github/ISSUE_TEMPLATE/general_questions.md index b5ffabd97e..5aa583cb1c 100644 --- a/.github/ISSUE_TEMPLATE/general_questions.md +++ b/.github/ISSUE_TEMPLATE/general_questions.md @@ -4,7 +4,6 @@ about: Ask general questions to get help title: '' labels: '' assignees: '' - --- Before raising a question, you may need to check the following listed items. diff --git a/.github/ISSUE_TEMPLATE/reimplementation_questions.md b/.github/ISSUE_TEMPLATE/reimplementation_questions.md index 4b0d78d91f..babbaeb8b7 100644 --- a/.github/ISSUE_TEMPLATE/reimplementation_questions.md +++ b/.github/ISSUE_TEMPLATE/reimplementation_questions.md @@ -2,9 +2,8 @@ name: Reimplementation Questions about: Ask about questions during model reimplementation title: '' -labels: 'reimplementation' +labels: reimplementation assignees: '' - --- If you feel we have help you, give us a STAR! :satisfied: @@ -54,7 +53,7 @@ A placeholder for the config. 1. Please run `PYTHONPATH=${PWD}:$PYTHONPATH python mmaction/utils/collect_env.py` to collect necessary environment information and paste it here. 2. You may add addition that may be helpful for locating the problem, such as - 1. How you installed PyTorch [e.g., pip, conda, source] + 1. How you installed PyTorch \[e.g., pip, conda, source\] 2. Other environment variables that may be related (such as `$PATH`, `$LD_LIBRARY_PATH`, `$PYTHONPATH`, etc.) **Results** diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index a306b42760..68b58a2b21 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -17,10 +17,6 @@ jobs: python-version: 3.7 - name: Install pre-commit hook run: | - # markdownlint requires ruby >= 2.7 - sudo apt-add-repository ppa:brightbox/ruby-ng -y - sudo apt-get update - sudo apt-get install -y ruby2.7 pip install pre-commit pre-commit install - name: Linting diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 524e2eb267..9583e29727 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ exclude: ^tests/data/ repos: - - repo: https://gitlab.com/pycqa/flake8.git + - repo: https://github.com/PyCQA/flake8 rev: 3.8.3 hooks: - id: flake8 @@ -25,11 +25,15 @@ repos: args: ["--remove"] - id: mixed-line-ending args: ["--fix=lf"] - - repo: https://github.com/markdownlint/markdownlint - rev: v0.11.0 - hooks: - - id: markdownlint - args: [ "-r", "~MD002,~MD013,~MD024,~MD029,~MD033,~MD034,~MD036" ] + - repo: https://github.com/executablebooks/mdformat + rev: 0.7.14 + hooks: + - id: mdformat + args: ["--number"] + additional_dependencies: + - mdformat-gfm + - mdformat_frontmatter + - linkify-it-py - repo: https://github.com/myint/docformatter rev: v1.3.1 hooks: @@ -39,7 +43,7 @@ repos: rev: v2.1.0 hooks: - id: codespell - args: ["--skip", "*.ipynb,tools/data/hvu/label_map.json", "-L", "te,nd,thre,Gool,gool"] + args: ["--skip", "*.ipynb,tools/data/hvu/label_map.json,docs_zh_CN/*", "-L", "te,nd,thre,Gool,gool"] - repo: https://github.com/open-mmlab/pre-commit-hooks rev: v0.2.0 # Use the ref you want to point at hooks: diff --git a/README.md b/README.md index 042384ada0..01d4459282 100644 --- a/README.md +++ b/README.md @@ -17,12 +17,13 @@
    - [📘Documentation](https://mmaction2.readthedocs.io/en/latest/) | - [🛠️Installation](https://mmaction2.readthedocs.io/en/latest/install.html) | - [👀Model Zoo](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | - [🆕Update News](https://mmaction2.readthedocs.io/en/latest/changelog.html) | - [🚀Ongoing Projects](https://github.com/open-mmlab/mmaction2/projects) | - [🤔Reporting Issues](https://github.com/open-mmlab/mmaction2/issues/new/choose) +[📘Documentation](https://mmaction2.readthedocs.io/en/latest/) | +[🛠️Installation](https://mmaction2.readthedocs.io/en/latest/install.html) | +[👀Model Zoo](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | +[🆕Update News](https://mmaction2.readthedocs.io/en/latest/changelog.html) | +[🚀Ongoing Projects](https://github.com/open-mmlab/mmaction2/projects) | +[🤔Reporting Issues](https://github.com/open-mmlab/mmaction2/issues/new/choose) + ## Introduction @@ -264,9 +265,9 @@ Please refer to [FAQ](docs/faq.md) for frequently asked questions. Currently, there are many research works and projects built on MMAction2 by users from community, such as: -- Video Swin Transformer. [[paper]](https://arxiv.org/abs/2106.13230)[[github]](https://github.com/SwinTransformer/Video-Swin-Transformer) -- Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 **Oral**. [[paper]](https://arxiv.org/abs/2107.10161)[[github]](https://github.com/Cogito2012/DEAR) -- Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 **Oral**. [[paper]](https://arxiv.org/abs/2103.17263)[[github]](https://github.com/xvjiarui/VFS) +- Video Swin Transformer. [\[paper\]](https://arxiv.org/abs/2106.13230)[\[github\]](https://github.com/SwinTransformer/Video-Swin-Transformer) +- Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 **Oral**. [\[paper\]](https://arxiv.org/abs/2107.10161)[\[github\]](https://github.com/Cogito2012/DEAR) +- Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 **Oral**. [\[paper\]](https://arxiv.org/abs/2103.17263)[\[github\]](https://github.com/xvjiarui/VFS) etc., check [projects.md](docs/projects.md) to see all related projects. diff --git a/README_zh-CN.md b/README_zh-CN.md index 27e4e35898..f6b5fb4514 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -17,12 +17,12 @@ - [📘文档](https://mmaction2.readthedocs.io/en/latest/) | - [🛠️安装指南](https://mmaction2.readthedocs.io/en/latest/install.html) | - [👀模型库](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | - [🆕更新](https://mmaction2.readthedocs.io/en/latest/changelog.html) | - [🚀进行中项目](https://github.com/open-mmlab/mmaction2/projects) | - [🤔问题反馈](https://github.com/open-mmlab/mmaction2/issues/new/choose) +[📘文档](https://mmaction2.readthedocs.io/en/latest/) | +[🛠️安装指南](https://mmaction2.readthedocs.io/en/latest/install.html) | +[👀模型库](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | +[🆕更新](https://mmaction2.readthedocs.io/en/latest/changelog.html) | +[🚀进行中项目](https://github.com/open-mmlab/mmaction2/projects) | +[🤔问题反馈](https://github.com/open-mmlab/mmaction2/issues/new/choose) @@ -258,9 +258,9 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 目前有许多研究工作或工程项目基于 MMAction2 搭建,例如: -- Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 **Oral**. [[论文]](https://arxiv.org/abs/2107.10161)[[代码]](https://github.com/Cogito2012/DEAR) -- Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 **Oral**. [[论文]](https://arxiv.org/abs/2103.17263)[[代码]](https://github.com/xvjiarui/VFS) -- Video Swin Transformer. [[论文]](https://arxiv.org/abs/2106.13230)[[代码]](https://github.com/SwinTransformer/Video-Swin-Transformer) +- Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 **Oral**. [\[论文\]](https://arxiv.org/abs/2107.10161)[\[代码\]](https://github.com/Cogito2012/DEAR) +- Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 **Oral**. [\[论文\]](https://arxiv.org/abs/2103.17263)[\[代码\]](https://github.com/xvjiarui/VFS) +- Video Swin Transformer. [\[论文\]](https://arxiv.org/abs/2106.13230)[\[代码\]](https://github.com/SwinTransformer/Video-Swin-Transformer) 更多详情可见 [相关工作](docs/projects.md) diff --git a/configs/detection/acrn/README.md b/configs/detection/acrn/README.md index cd9ff26f5c..18574fcb7b 100644 --- a/configs/detection/acrn/README.md +++ b/configs/detection/acrn/README.md @@ -11,6 +11,7 @@ Current state-of-the-art approaches for spatio-temporal action localization rely on detections at the frame level and model temporal context with 3D ConvNets. Here, we go one step further and model spatio-temporal relations to capture the interactions between human actors, relevant objects and scene elements essential to differentiate similar human actions. Our approach is weakly supervised and mines the relevant elements automatically with an actor-centric relational network (ACRN). ACRN computes and accumulates pair-wise relation information from actor and global scene features, and generates relation features for action classification. It is implemented as neural networks and can be trained jointly with an existing action detection system. We show that ACRN outperforms alternative approaches which capture relation information, and that the proposed framework improves upon the state-of-the-art performance on JHMDB and AVA. A visualization of the learned relation features confirms that our approach is able to attend to the relevant relations for each action. +
    @@ -19,14 +20,14 @@ Current state-of-the-art approaches for spatio-temporal action localization rely ### AVA2.1 -| Model | Modality | Pretrained | Backbone | Input | gpus | mAP | log | json | ckpt | -| :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| Model | Modality | Pretrained | Backbone | Input | gpus | mAP | log | json | ckpt | +| :---------------------------------------------------------------------------------------------------------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | | [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.1 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb-49b07bf2.pth) | ### AVA2.2 -| Model | Modality | Pretrained | Backbone | Input | gpus | mAP | log | json | ckpt | -| :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| Model | Modality | Pretrained | Backbone | Input | gpus | mAP | log | json | ckpt | +| :-------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | | [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.8 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-2be32625.pth) | :::{note} diff --git a/configs/detection/acrn/README_zh-CN.md b/configs/detection/acrn/README_zh-CN.md index 3ec59cc495..c4ab0f1019 100644 --- a/configs/detection/acrn/README_zh-CN.md +++ b/configs/detection/acrn/README_zh-CN.md @@ -30,15 +30,15 @@ ### AVA2.1 -| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | mAP | log | json | ckpt | -| :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.1 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb-49b07bf2.pth) | +| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | mAP | log | json | ckpt | +| :---------------------------------------------------------------------------------------------------------------------------------------------------------: | :-: | :----------: | :------: | :--: | :----: | :--: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.1 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava_rgb-49b07bf2.pth) | ### AVA2.2 -| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | mAP | log | json | ckpt | -| :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.8 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-2be32625.pth) | +| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | mAP | log | json | ckpt | +| :-------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-: | :----------: | :------: | :--: | :----: | :--: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 27.8 | [log](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-2be32625.pth) | - 注: @@ -62,7 +62,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py --validate ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -78,4 +78,4 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] python tools/test.py configs/detection/acrn/slowfast_acrn_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/detection/ava/README.md b/configs/detection/ava/README.md index befcf88b0f..f46a3961bf 100644 --- a/configs/detection/ava/README.md +++ b/configs/detection/ava/README.md @@ -16,6 +16,7 @@ This paper introduces a video dataset of spatio-temporally localized Atomic Visu AVA, with its realistic scene and action complexity, exposes the intrinsic difficulty of action recognition. To benchmark this, we present a novel approach for action localization that builds upon the current state-of-the-art methods, and demonstrates better performance on JHMDB and UCF101-24 categories. While setting a new state of the art on existing datasets, the overall results on AVA are low at 15.6% mAP, underscoring the need for developing new approaches for video understanding. +
    @@ -36,24 +37,24 @@ AVA, with its realistic scene and action complexity, exposes the intrinsic diffi ### AVA2.1 -| Model | Modality | Pretrained | Backbone | Input | gpus | Resolution | mAP | log | json | ckpt | -| :----------------------------------------------------------: | :------: | :----------: | :-------: | :---: | :--: | :------------: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 4x16 | 8 | short-side 256 | 20.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth) | -| [slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | OmniSource | ResNet50 | 4x16 | 8 | short-side 256 | 21.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201217-0c6d2e98.pth) | -| [slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb](/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 4x16 | 8 | short-side 256 | 21.75 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb_20210316-959829ec.pth) | -| [slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb](/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 8x8 | 8x2 | short-side 256 | 23.79 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb_20210316-5742e4dd.pth) | -| [slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet101 | 8x8 | 8x2 | short-side 256 | 24.6 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201217-1c9b4117.pth) | -| [slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb](/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py) | RGB | OmniSource | ResNet101 | 8x8 | 8x2 | short-side 256 | 25.9 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth) | -| [slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | short-side 256 | 24.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth) | -| [slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | short-side 256 | 25.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222-f4d209c9.pth) | -| [slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | short-side 256 | 25.5 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217-ae225e97.pth) | +| Model | Modality | Pretrained | Backbone | Input | gpus | Resolution | mAP | log | json | ckpt | +| :--------------------------------------------------------------------------------------------------------------------------------------------------: | :------: | :----------: | :-------: | :---: | :--: | :------------: | :---: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 4x16 | 8 | short-side 256 | 20.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth) | +| [slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | OmniSource | ResNet50 | 4x16 | 8 | short-side 256 | 21.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201217-0c6d2e98.pth) | +| [slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb](/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 4x16 | 8 | short-side 256 | 21.75 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb_20210316-959829ec.pth) | +| [slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb](/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 8x8 | 8x2 | short-side 256 | 23.79 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb_20210316-5742e4dd.pth) | +| [slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet101 | 8x8 | 8x2 | short-side 256 | 24.6 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201217-1c9b4117.pth) | +| [slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb](/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py) | RGB | OmniSource | ResNet101 | 8x8 | 8x2 | short-side 256 | 25.9 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth) | +| [slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | short-side 256 | 24.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth) | +| [slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | short-side 256 | 25.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222-f4d209c9.pth) | +| [slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | short-side 256 | 25.5 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217-ae225e97.pth) | ### AVA2.2 -| Model | Modality | Pretrained | Backbone | Input | gpus | mAP | log | json | ckpt | -| :----------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-b987b516.pth) | -| [slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-874e0845.pth) | +| Model | Modality | Pretrained | Backbone | Input | gpus | mAP | log | json | ckpt | +| :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------: | :----------: | :------: | :---: | :--: | :--: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-b987b516.pth) | +| [slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-874e0845.pth) | | [slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-345618cd.pth) | :::{note} @@ -98,12 +99,12 @@ Three steps to train custom classes: Take `slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb` as an example, training custom classes with AP in range `(0.1, 0.3)`, aka `[3, 6, 10, 27, 29, 38, 41, 48, 51, 53, 54, 59, 61, 64, 70, 72]`. Please note that, the previously mentioned AP is calculated by original ckpt, which is trained by all 80 classes. The results are listed as follows. -|training classes|mAP(custom classes)|config|log|json|ckpt| -|:-:|:-:|:-:|:-:|:-:|:-:| -|All 80 classes|0.1948|[slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py)|[log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth) | -|custom classes|0.3311|[slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py)| [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes-4ab80419.pth) | -|All 80 classes|0.1864|[slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py)| [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth) | -|custom classes|0.3785|[slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py)| [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305-c6225546.pth) | +| training classes | mAP(custom classes) | config | log | json | ckpt | +| :--------------: | :-----------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| All 80 classes | 0.1948 | [slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth) | +| custom classes | 0.3311 | [slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py) | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes-4ab80419.pth) | +| All 80 classes | 0.1864 | [slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth) | +| custom classes | 0.3785 | [slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py) | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305-c6225546.pth) | ## Test diff --git a/configs/detection/ava/README_zh-CN.md b/configs/detection/ava/README_zh-CN.md index 6cd82f4a3e..863d1fe849 100644 --- a/configs/detection/ava/README_zh-CN.md +++ b/configs/detection/ava/README_zh-CN.md @@ -45,25 +45,25 @@ ### AVA2.1 -| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | 分辨率 | mAP | log | json | ckpt | -| :----------------------------------------------------------: | :------: | :----------: | :-------: | :---: | :--: | :------------: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 4x16 | 8 | 短边 256 | 20.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth) | -| [slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | OmniSource | ResNet50 | 4x16 | 8 | 短边 256 | 21.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201217-0c6d2e98.pth) | -| [slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb](/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 4x16 | 8 | 短边 256 | 21.75 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb_20210316-959829ec.pth) | -| [slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb](/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 8x8 | 8x2 | 短边 256 | 23.79 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb_20210316-5742e4dd.pth) | -| [slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet101 | 8x8 | 8x2 | 短边 256 | 24.6 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201217-1c9b4117.pth) | -| [slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb](/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py) | RGB | OmniSource | ResNet101 | 8x8 | 8x2 | 短边 256 | 25.9 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth) | -| [slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 24.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth) | -| [slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 25.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222-f4d209c9.pth) | -| [slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 25.5 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217-ae225e97.pth) | +| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | 分辨率 | mAP | log | json | ckpt | +| :--------------------------------------------------------------------------------------------------------------------------------------------------: | :-: | :----------: | :-------: | :--: | :----: | :----: | :---: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 4x16 | 8 | 短边 256 | 20.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth) | +| [slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | OmniSource | ResNet50 | 4x16 | 8 | 短边 256 | 21.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_omnisource_pretrained_r50_4x16x1_20e_ava_rgb_20201217-0c6d2e98.pth) | +| [slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb](/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 4x16 | 8 | 短边 256 | 21.75 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/20210316_122517.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_4x16x1_10e_ava_rgb_20210316-959829ec.pth) | +| [slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb](/configs/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 8x8 | 8x2 | 短边 256 | 23.79 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/20210316_122517.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb/slowonly_nl_kinetics_pretrained_r50_8x8x1_10e_ava_rgb_20210316-5742e4dd.pth) | +| [slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet101 | 8x8 | 8x2 | 短边 256 | 24.6 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_kinetics_pretrained_r101_8x8x1_20e_ava_rgb_20201217-1c9b4117.pth) | +| [slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb](/configs/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb.py) | RGB | OmniSource | ResNet101 | 8x8 | 8x2 | 短边 256 | 25.9 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb/slowonly_omnisource_pretrained_r101_8x8x1_20e_ava_rgb_20201217-16378594.pth) | +| [slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 24.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth) | +| [slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 25.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_context_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201222-f4d209c9.pth) | +| [slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8x2 | 短边 256 | 25.5 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_8x8x1_20e_ava_rgb_20201217-ae225e97.pth) | ### AVA2.2 -| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | mAP | log | json | ckpt | -| :----------------------------------------------------------: | :--: | :----------: | :------: | :--: | :------: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-b987b516.pth) | -| [slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-874e0845.pth) | -| [slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-345618cd.pth) | +| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | mAP | log | json | ckpt | +| :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-: | :----------: | :------: | :--: | :----: | :--: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.1 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-b987b516.pth) | +| [slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.4 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-874e0845.pth) | +| [slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb](/configs/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.py) | RGB | Kinetics-400 | ResNet50 | 32x2 | 8 | 26.8 | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb/slowfast_temporal_max_focal_alpha3_gamma1_kinetics_pretrained_r50_8x8x1_cosine_10e_ava22_rgb-345618cd.pth) | 注: @@ -88,7 +88,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py --validate ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ### 训练 AVA 数据集中的自定义类别 @@ -105,12 +105,12 @@ python tools/train.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8 以 `slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb` 为例,这一配置文件训练所有 AP 在 `(0.1, 0.3)` 间的类别(这里的 AP 为 AVA 80 类训出模型的表现),即 `[3, 6, 10, 27, 29, 38, 41, 48, 51, 53, 54, 59, 61, 64, 70, 72]`。下表列出了自定义类别训练的模型精度: -|训练类别|mAP (自定义类别)|配置文件|log|json|ckpt| -|:-:|:-:|:-:|:-:|:-:|:-:| -|全部 80 类|0.1948|[slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py)|[log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth) | -|自定义类别|0.3311|[slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py)| [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes-4ab80419.pth) | -|全部 80 类|0.1864|[slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py)| [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth) | -|自定义类别|0.3785|[slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py)| [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305-c6225546.pth) | +| 训练类别 | mAP (自定义类别) | 配置文件 | log | json | ckpt | +| :-----: | :---------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| 全部 80 类 | 0.1948 | [slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201127.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-40061d5f.pth) | +| 自定义类别 | 0.3311 | [slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py) | [log](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes-4ab80419.pth) | +| 全部 80 类 | 0.1864 | [slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_20201217-6e7c704d.pth) | +| 自定义类别 | 0.3785 | [slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes](/configs/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes.py) | [log](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305.log) | [json](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/ava/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes/slowfast_kinetics_pretrained_r50_4x16x1_20e_ava_rgb_custom_classes_20210305-c6225546.pth) | ## 如何测试 @@ -126,4 +126,4 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] python tools/test.py configs/detection/ava/slowonly_kinetics_pretrained_r50_8x8x1_20e_ava_rgb.py checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/detection/lfb/README.md b/configs/detection/lfb/README.md index dbdab545d5..0658acc9de 100644 --- a/configs/detection/lfb/README.md +++ b/configs/detection/lfb/README.md @@ -11,6 +11,7 @@ To understand the world, we humans constantly need to relate the present to the past, and put events in context. In this paper, we enable existing video models to do the same. We propose a long-term feature bank---supportive information extracted over the entire span of a video---to augment state-of-the-art video models that otherwise would only view short clips of 2-5 seconds. Our experiments demonstrate that augmenting 3D convolutional networks with a long-term feature bank yields state-of-the-art results on three challenging video datasets: AVA, EPIC-Kitchens, and Charades. +
    @@ -19,11 +20,11 @@ To understand the world, we humans constantly need to relate the present to the ### AVA2.1 -| Model | Modality | Pretrained | Backbone | Input | gpus | Resolution | mAP | log | json | ckpt | -| :----------------------------------------------------------: | :------: | :----------: | :-------: | :---: | :--: | :------------: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | short-side 256 | 24.11 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210224-2ae136d9.pth) | -| [lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | short-side 256 | 20.17 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-19c330b7.pth) | -| [lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | short-side 256 | 22.15 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-37efcd15.pth) | +| Model | Modality | Pretrained | Backbone | Input | gpus | Resolution | mAP | log | json | ckpt | +| :-----------------------------------------------------------------------------------------------------------------------------------------------------: | :------: | :----------: | :--------------------------------------------------------------------------------------------------: | :---: | :--: | :------------: | :---: | :------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | short-side 256 | 24.11 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210224-2ae136d9.pth) | +| [lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | short-side 256 | 20.17 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-19c330b7.pth) | +| [lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | short-side 256 | 22.15 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-37efcd15.pth) | :::{note} diff --git a/configs/detection/lfb/README_zh-CN.md b/configs/detection/lfb/README_zh-CN.md index 4c90a66bd5..dbf367f85c 100644 --- a/configs/detection/lfb/README_zh-CN.md +++ b/configs/detection/lfb/README_zh-CN.md @@ -18,11 +18,11 @@ ### AVA2.1 -| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | 分辨率 | 平均精度 | log | json | ckpt | -| :----------------------------------------------------------: | :------: | :----------: | :-------: | :---: | :--: | :------------: | :--: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | 短边 256 | 24.11 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210224-2ae136d9.pth) | -| [lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | 短边 256 | 20.17 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-19c330b7.pth) | -| [lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | 短边 256 | 22.15 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-37efcd15.pth) | +| 配置文件 | 模态 | 预训练 | 主干网络 | 输入 | GPU 数量 | 分辨率 | 平均精度 | log | json | ckpt | +| :-----------------------------------------------------------------------------------------------------------------------------------------------------: | :-: | :----------: | :--------------------------------------------------------------------------------------------------: | :--: | :----: | :----: | :---: | :------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | 短边 256 | 24.11 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210224_125052.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_nl_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210224-2ae136d9.pth) | +| [lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | 短边 256 | 20.17 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_avg_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-19c330b7.pth) | +| [lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py](/configs/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb.py) | RGB | Kinetics-400 | [slowonly_r50_4x16x1](/configs/detection/ava/slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb.py) | 4x16 | 8 | 短边 256 | 22.15 | [log](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log) | [json](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/20210301_124812.log.json) | [ckpt](https://download.openmmlab.com/mmaction/detection/lfb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb/lfb_max_kinetics_pretrained_slowonly_r50_4x16x1_20e_ava_rgb_20210301-37efcd15.pth) | - 注: @@ -31,7 +31,7 @@ 如,lr=0.01 对应 4 GPUs x 2 video/gpu,以及 lr=0.08 对应 16 GPUs x 4 video/gpu。 2. 本 LFB 模型暂没有使用原论文中的 `I3D-R50-NL` 作为主干网络,而是用 `slowonly_r50_4x16x1` 替代,但取得了同样的提升效果:(本模型:20.1 -> 24.11 而原论文模型:22.1 -> 25.8)。 3. 因为测试时,长时特征是被随机采样的,所以测试精度可能有一些偏差。 -4. 在训练或测试 LFB 之前,用户需要使用配置文件特征库 [lfb_slowonly_r50_ava_infer.py](/configs/detection/lfb/lfb_slowonly_r50_ava_infer.py) 来推导长时特征库。有关推导长时特征库的更多细节,请参照[训练部分](#训练)。 +4. 在训练或测试 LFB 之前,用户需要使用配置文件特征库 [lfb_slowonly_r50_ava_infer.py](/configs/detection/lfb/lfb_slowonly_r50_ava_infer.py) 来推导长时特征库。有关推导长时特征库的更多细节,请参照[训练部分](#%E8%AE%AD%E7%BB%83)。 5. 用户也可以直接从 [AVA_train_val_float32_lfb](https://download.openmmlab.com/mmaction/detection/lfb/AVA_train_val_float32_lfb.rar) 或者 [AVA_train_val_float16_lfb](https://download.openmmlab.com/mmaction/detection/lfb/AVA_train_val_float16_lfb.rar) 下载 float32 或 float16 的长时特征库,并把它们放在 `lfb_prefix_path` 上。 ## 训练 @@ -75,7 +75,7 @@ python tools/train.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 测试 @@ -100,4 +100,4 @@ python tools/test.py configs/detection/lfb/lfb_nl_kinetics_pretrained_slowonly_r checkpoints/SOME_CHECKPOINT.pth --eval mAP --out results.csv ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/bmn/README.md b/configs/localization/bmn/README.md index 16cdbbe568..ccf07450a0 100644 --- a/configs/localization/bmn/README.md +++ b/configs/localization/bmn/README.md @@ -11,6 +11,7 @@ Temporal action proposal generation is an challenging and promising task which aims to locate temporal regions in real-world videos where action or event may occur. Current bottom-up proposal generation methods can generate proposals with precise boundary, but cannot efficiently generate adequately reliable confidence scores for retrieving proposals. To address these difficulties, we introduce the Boundary-Matching (BM) mechanism to evaluate confidence scores of densely distributed proposals, which denote a proposal as a matching pair of starting and ending boundaries and combine all densely distributed BM pairs into the BM confidence map. Based on BM mechanism, we propose an effective, efficient and end-to-end proposal generation method, named Boundary-Matching Network (BMN), which generates proposals with precise temporal boundaries as well as reliable confidence scores simultaneously. The two-branches of BMN are jointly trained in an unified framework. We conduct experiments on two challenging datasets: THUMOS-14 and ActivityNet-1.3, where BMN shows significant performance improvement with remarkable efficiency and generalizability. Further, combining with existing action classifier, BMN can achieve state-of-the-art temporal action detection performance. +
    @@ -19,12 +20,12 @@ Temporal action proposal generation is an challenging and promising task which a ### ActivityNet feature -|config |feature | gpus | AR@100| AUC | AP@0.5 | AP@0.75 | AP@0.95 | mAP | gpu_mem(M) | iter time(s) | ckpt | log| json| -|:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:|---|:-:|:-:|---| -|[bmn_400x100_9e_2x8_activitynet_feature](/configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py) |cuhk_mean_100 |2|75.28|67.22|42.47|31.31|9.92|30.34|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature_20200619-42a3b111.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log)| [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log.json)| -| |mmaction_video |2|75.43|67.22|42.62|31.56|10.86|30.77|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809-c9fd14d2.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.json) | -| |mmaction_clip |2|75.35|67.38|43.08|32.19|10.73|31.15|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809-10d803ce.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.json) | -| [BMN-official](https://github.com/JJBOY/BMN-Boundary-Matching-Network) (for reference)* |cuhk_mean_100 |-|75.27|67.49|42.22|30.98|9.22|30.00|-|-|-| - | - | +| config | feature | gpus | AR@100 | AUC | AP@0.5 | AP@0.75 | AP@0.95 | mAP | gpu_mem(M) | iter time(s) | ckpt | log | json | +| :-----------------------------------------------------------------------------------------------------------: | :------------: | :--: | :----: | :---: | :----: | :-----: | :-----: | :---: | :--------: | ------------ | :----------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------: | -------------------------------------------------------------------------------------------------------------------------------------------------- | +| [bmn_400x100_9e_2x8_activitynet_feature](/configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py) | cuhk_mean_100 | 2 | 75.28 | 67.22 | 42.47 | 31.31 | 9.92 | 30.34 | 5420 | 3.27 | [ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature_20200619-42a3b111.pth) | [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log.json) | +| | mmaction_video | 2 | 75.43 | 67.22 | 42.62 | 31.56 | 10.86 | 30.77 | 5420 | 3.27 | [ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809-c9fd14d2.pth) | [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.json) | +| | mmaction_clip | 2 | 75.35 | 67.38 | 43.08 | 32.19 | 10.73 | 31.15 | 5420 | 3.27 | [ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809-10d803ce.pth) | [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.json) | +| [BMN-official](https://github.com/JJBOY/BMN-Boundary-Matching-Network) (for reference)\* | cuhk_mean_100 | - | 75.27 | 67.49 | 42.22 | 30.98 | 9.22 | 30.00 | - | - | - | - | - | :::{note} @@ -36,7 +37,7 @@ Temporal action proposal generation is an challenging and promising task which a ::: -*We train BMN with the [official repo](https://github.com/JJBOY/BMN-Boundary-Matching-Network), evaluate its proposal generation and action detection performance with [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) for label assigning. +\*We train BMN with the [official repo](https://github.com/JJBOY/BMN-Boundary-Matching-Network), evaluate its proposal generation and action detection performance with [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) for label assigning. For more details on data preparation, you can refer to ActivityNet feature in [Data Preparation](/docs/data_preparation.md). @@ -81,9 +82,9 @@ python tools/analysis/report_map.py --proposal path/to/proposal_file 1. (Optional) You can use the following command to generate a formatted proposal file, which will be fed into the action classifier (Currently supports SSN and P-GCN, not including TSN, I3D etc.) to get the classification result of proposals. - ```shell - python tools/data/activitynet/convert_proposal_format.py - ``` + ```shell + python tools/data/activitynet/convert_proposal_format.py + ``` ::: diff --git a/configs/localization/bmn/README_zh-CN.md b/configs/localization/bmn/README_zh-CN.md index 3778f390fa..fc88d8977e 100644 --- a/configs/localization/bmn/README_zh-CN.md +++ b/configs/localization/bmn/README_zh-CN.md @@ -30,12 +30,12 @@ ### ActivityNet feature -|配置文件 |特征 | GPU 数量 | AR@100| AUC | AP@0.5 | AP@0.75 | AP@0.95 | mAP | GPU 显存占用 (M) | 推理时间 (s) | ckpt | log| json| -|:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:|---|:-:|:-:|---| -|[bmn_400x100_9e_2x8_activitynet_feature](/configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py) |cuhk_mean_100 |2|75.28|67.22|42.47|31.31|9.92|30.34|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature_20200619-42a3b111.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log)| [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log.json)| -| |mmaction_video |2|75.43|67.22|42.62|31.56|10.86|30.77|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809-c9fd14d2.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.json) | -| |mmaction_clip |2|75.35|67.38|43.08|32.19|10.73|31.15|5420|3.27|[ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809-10d803ce.pth)| [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.json) | -| [BMN-official](https://github.com/JJBOY/BMN-Boundary-Matching-Network) (for reference)* |cuhk_mean_100 |-|75.27|67.49|42.22|30.98|9.22|30.00|-|-|-| - | - | +| 配置文件 | 特征 | GPU 数量 | AR@100 | AUC | AP@0.5 | AP@0.75 | AP@0.95 | mAP | GPU 显存占用 (M) | 推理时间 (s) | ckpt | log | json | +| :-----------------------------------------------------------------------------------------------------------: | :------------: | :----: | :----: | :---: | :----: | :-----: | :-----: | :---: | :----------: | -------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------: | -------------------------------------------------------------------------------------------------------------------------------------------------- | +| [bmn_400x100_9e_2x8_activitynet_feature](/configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py) | cuhk_mean_100 | 2 | 75.28 | 67.22 | 42.47 | 31.31 | 9.92 | 30.34 | 5420 | 3.27 | [ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature_20200619-42a3b111.pth) | [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_9e_activitynet_feature/bmn_400x100_9e_activitynet_feature.log.json) | +| | mmaction_video | 2 | 75.43 | 67.22 | 42.62 | 31.56 | 10.86 | 30.77 | 5420 | 3.27 | [ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809-c9fd14d2.pth) | [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_video/bmn_400x100_2x8_9e_mmaction_video_20200809.json) | +| | mmaction_clip | 2 | 75.35 | 67.38 | 43.08 | 32.19 | 10.73 | 31.15 | 5420 | 3.27 | [ckpt](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809-10d803ce.pth) | [log](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.log) | [json](https://download.openmmlab.com/mmaction/localization/bmn/bmn_400x100_2x8_9e_mmaction_clip/bmn_400x100_2x8_9e_mmaction_clip_20200809.json) | +| [BMN-official](https://github.com/JJBOY/BMN-Boundary-Matching-Network) (for reference)\* | cuhk_mean_100 | - | 75.27 | 67.49 | 42.22 | 30.98 | 9.22 | 30.00 | - | - | - | - | - | - 注: @@ -46,7 +46,7 @@ `mmaction_video` 和 `mmaction_clip` 分布表示所使用的特征为利用 MMAction 抽取的,视频级别 ActivityNet 预训练模型的特征;视频片段级别 ActivityNet 预训练模型的特征。 3. MMAction2 使用 ActivityNet2017 未剪辑视频分类赛道上 [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) 所提交的结果来为每个视频的时序动作候选指定标签,以用于 BMN 模型评估。 -*MMAction2 在 [原始代码库](https://github.com/JJBOY/BMN-Boundary-Matching-Network) 上训练 BMN,并且在 [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) 的对应标签上评估时序动作候选生成和时序检测的结果。 +\*MMAction2 在 [原始代码库](https://github.com/JJBOY/BMN-Boundary-Matching-Network) 上训练 BMN,并且在 [anet_cuhk_2017](https://download.openmmlab.com/mmaction/localization/cuhk_anet17_pred.json) 的对应标签上评估时序动作候选生成和时序检测的结果。 对于数据集准备的细节,用户可参考 [数据集准备文档](/docs_zh_CN/data_preparation.md) 中的 ActivityNet 特征部分。 @@ -64,7 +64,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -91,8 +91,8 @@ python tools/analysis/report_map.py --proposal path/to/proposal_file 1. (可选项) 用户可以使用以下指令生成格式化的时序动作候选文件,该文件可被送入动作识别器中(目前只支持 SSN 和 P-GCN,不包括 TSN, I3D 等),以获得时序动作候选的分类结果。 - ```shell - python tools/data/activitynet/convert_proposal_format.py - ``` + ```shell + python tools/data/activitynet/convert_proposal_format.py + ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/bsn/README.md b/configs/localization/bsn/README.md index 1ec09467ab..c307cb15d8 100644 --- a/configs/localization/bsn/README.md +++ b/configs/localization/bsn/README.md @@ -11,6 +11,7 @@ Temporal action proposal generation is an important yet challenging problem, since temporal proposals with rich action content are indispensable for analysing real-world videos with long duration and high proportion irrelevant content. This problem requires methods not only generating proposals with precise temporal boundaries, but also retrieving proposals to cover truth action instances with high recall and high overlap using relatively fewer proposals. To address these difficulties, we introduce an effective proposal generation method, named Boundary-Sensitive Network (BSN), which adopts "local to global" fashion. Locally, BSN first locates temporal boundaries with high probabilities, then directly combines these boundaries as proposals. Globally, with Boundary-Sensitive Proposal feature, BSN retrieves proposals by evaluating the confidence of whether a proposal contains an action within its region. We conduct experiments on two challenging datasets: ActivityNet-1.3 and THUMOS14, where BSN outperforms other state-of-the-art temporal action proposal generation methods with high recall and high temporal precision. Finally, further experiments demonstrate that by combining existing action classifiers, our method significantly improves the state-of-the-art temporal action detection performance. +
    @@ -19,11 +20,11 @@ Temporal action proposal generation is an important yet challenging problem, sin ### ActivityNet feature -|config |feature | gpus| pretrain | AR@100| AUC | gpu_mem(M) | iter time(s) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:| -|bsn_400x100_1x16_20e_activitynet_feature |cuhk_mean_100 |1| None |74.66|66.45|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature_20200619-cd6accc3.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature_20210203-1c27763d.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log)| [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log.json)| -| |mmaction_video |1| None |74.93|66.74|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809-ad6ec626.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809-aa861b26.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.json) | -| |mmaction_clip |1| None |75.19|66.81|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809-0a563554.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809-e32f61e6.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.json) | +| config | feature | gpus | pretrain | AR@100 | AUC | gpu_mem(M) | iter time(s) | ckpt | log | json | +| :--------------------------------------- | :------------: | :--: | :------: | :----: | :---: | :-------------: | :-------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| bsn_400x100_1x16_20e_activitynet_feature | cuhk_mean_100 | 1 | None | 74.66 | 66.45 | 41(TEM)+25(PEM) | 0.074(TEM)+0.036(PEM) | [ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature_20200619-cd6accc3.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature_20210203-1c27763d.pth) | [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log.json) | +| | mmaction_video | 1 | None | 74.93 | 66.74 | 41(TEM)+25(PEM) | 0.074(TEM)+0.036(PEM) | [ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809-ad6ec626.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809-aa861b26.pth) | [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.json) | +| | mmaction_clip | 1 | None | 75.19 | 66.81 | 41(TEM)+25(PEM) | 0.074(TEM)+0.036(PEM) | [ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809-0a563554.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809-e32f61e6.pth) | [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.json) | :::{note} @@ -48,15 +49,15 @@ Examples: 1. train BSN(TEM) on ActivityNet features dataset. - ```shell - python tools/train.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py - ``` + ```shell + python tools/train.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py + ``` 2. train BSN(PEM) on PGM results. - ```shell - python tools/train.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py - ``` + ```shell + python tools/train.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py + ``` For more details and optional arguments infos, you can refer to **Training setting** part in [getting_started](/docs/getting_started.md#training-setting). @@ -66,43 +67,43 @@ You can use the following commands to inference a model. 1. For TEM Inference - ```shell - # Note: This could not be evaluated. - python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] - ``` + ```shell + # Note: This could not be evaluated. + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` 2. For PGM Inference - ```shell - python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] - ``` + ```shell + python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] + ``` 3. For PEM Inference - ```shell - python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] - ``` + ```shell + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` Examples: 1. Inference BSN(TEM) with pretrained model. - ```shell - python tools/test.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth - ``` + ```shell + python tools/test.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth + ``` 2. Inference BSN(PGM) with pretrained model. - ```shell - python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode train - ``` + ```shell + python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode train + ``` 3. Inference BSN(PEM) with evaluation metric 'AR@AN' and output the results. - ```shell - # Note: If evaluated, then please make sure the annotation file for test data contains groundtruth. - python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json - ``` + ```shell + # Note: If evaluated, then please make sure the annotation file for test data contains groundtruth. + python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json + ``` ## Test @@ -110,50 +111,50 @@ You can use the following commands to test a model. 1. TEM - ```shell - # Note: This could not be evaluated. - python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] - ``` + ```shell + # Note: This could not be evaluated. + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` 2. PGM - ```shell - python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] - ``` + ```shell + python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] + ``` 3. PEM - ```shell - python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] - ``` + ```shell + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` Examples: 1. Test a TEM model on ActivityNet dataset. - ```shell - python tools/test.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth - ``` + ```shell + python tools/test.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth + ``` 2. Test a PGM model on ActivityNet dataset. - ```shell - python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode test - ``` + ```shell + python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode test + ``` 3. Test a PEM model with with evaluation metric 'AR@AN' and output the results. - ```shell - python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json - ``` + ```shell + python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json + ``` :::{note} 1. (Optional) You can use the following command to generate a formatted proposal file, which will be fed into the action classifier (Currently supports only SSN and P-GCN, not including TSN, I3D etc.) to get the classification result of proposals. - ```shell - python tools/data/activitynet/convert_proposal_format.py - ``` + ```shell + python tools/data/activitynet/convert_proposal_format.py + ``` ::: diff --git a/configs/localization/bsn/README_zh-CN.md b/configs/localization/bsn/README_zh-CN.md index 6d0ddfc2df..e980c9875b 100644 --- a/configs/localization/bsn/README_zh-CN.md +++ b/configs/localization/bsn/README_zh-CN.md @@ -18,11 +18,11 @@ ### ActivityNet feature -|配置文件 |特征 | GPU 数量| 预训练 | AR@100| AUC | GPU 显存占用 (M) | 迭代时间 (s) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:| -|bsn_400x100_1x16_20e_activitynet_feature |cuhk_mean_100 |1| None |74.66|66.45|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature_20200619-cd6accc3.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature_20210203-1c27763d.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log)| [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log.json)| -| |mmaction_video |1| None |74.93|66.74|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809-ad6ec626.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809-aa861b26.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.json) | -| |mmaction_clip |1| None |75.19|66.81|41(TEM)+25(PEM)|0.074(TEM)+0.036(PEM)|[ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809-0a563554.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809-e32f61e6.pth)| [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.json) | +| 配置文件 | 特征 | GPU 数量 | 预训练 | AR@100 | AUC | GPU 显存占用 (M) | 迭代时间 (s) | ckpt | log | json | +| :--------------------------------------- | :------------: | :----: | :--: | :----: | :---: | :-------------: | :-------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| bsn_400x100_1x16_20e_activitynet_feature | cuhk_mean_100 | 1 | None | 74.66 | 66.45 | 41(TEM)+25(PEM) | 0.074(TEM)+0.036(PEM) | [ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature_20200619-cd6accc3.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature_20210203-1c27763d.pth) | [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature/bsn_tem_400x100_1x16_20e_activitynet_feature.log.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature/bsn_pem_400x100_1x16_20e_activitynet_feature.log.json) | +| | mmaction_video | 1 | None | 74.93 | 66.74 | 41(TEM)+25(PEM) | 0.074(TEM)+0.036(PEM) | [ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809-ad6ec626.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809-aa861b26.pth) | [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_video/bsn_tem_400x100_1x16_20e_mmaction_video_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_video/bsn_pem_400x100_1x16_20e_mmaction_video_20200809.json) | +| | mmaction_clip | 1 | None | 75.19 | 66.81 | 41(TEM)+25(PEM) | 0.074(TEM)+0.036(PEM) | [ckpt_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809-0a563554.pth) [ckpt_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809-e32f61e6.pth) | [log_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.log) [log_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.log) | [json_tem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_tem_400x100_1x16_20e_mmaction_clip/bsn_tem_400x100_1x16_20e_mmaction_clip_20200809.json) [json_pem](https://download.openmmlab.com/mmaction/localization/bsn/bsn_pem_400x100_1x16_20e_mmaction_clip/bsn_pem_400x100_1x16_20e_mmaction_clip_20200809.json) | 注: @@ -46,17 +46,17 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 1. 在 ActivityNet 特征上训练 BSN(TEM) 模型。 - ```shell - python tools/train.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py - ``` + ```shell + python tools/train.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py + ``` 2. 基于 PGM 的结果训练 BSN(PEM)。 - ```shell - python tools/train.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py - ``` + ```shell + python tools/train.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py + ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何进行推理 @@ -64,43 +64,43 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 1. 推理 TEM 模型。 - ```shell - # Note: This could not be evaluated. - python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] - ``` + ```shell + # Note: This could not be evaluated. + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` 2. 推理 PGM 模型 - ```shell - python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] - ``` + ```shell + python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] + ``` 3. 推理 PEM 模型 - ```shell - python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] - ``` + ```shell + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` 例如 1. 利用预训练模型进行 BSN(TEM) 模型的推理。 - ```shell - python tools/test.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth - ``` + ```shell + python tools/test.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth + ``` 2. 利用预训练模型进行 BSN(PGM) 模型的推理 - ```shell - python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode train - ``` + ```shell + python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode train + ``` 3. 推理 BSN(PEM) 模型,并计算 'AR@AN' 指标,输出结果文件。 - ```shell - # 注:如果需要进行指标验证,需确测试数据的保标注文件包含真实标签 - python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json - ``` + ```shell + # 注:如果需要进行指标验证,需确测试数据的保标注文件包含真实标签 + python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json + ``` ## 如何测试 @@ -108,49 +108,49 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] 1. TEM - ```shell - # 注:该命令无法进行指标验证 - python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] - ``` + ```shell + # 注:该命令无法进行指标验证 + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` 2. PGM - ```shell - python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] - ``` + ```shell + python tools/misc/bsn_proposal_generation.py ${CONFIG_FILE} [--mode ${MODE}] + ``` 3. PEM - ```shell - python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] - ``` + ```shell + python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] + ``` 例如: 1. 在 ActivityNet 数据集上测试 TEM 模型。 - ```shell - python tools/test.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth - ``` + ```shell + python tools/test.py configs/localization/bsn/bsn_tem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth + ``` 2. 在 ActivityNet 数据集上测试 PGM 模型。 - ```shell - python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode test - ``` + ```shell + python tools/misc/bsn_proposal_generation.py configs/localization/bsn/bsn_pgm_400x100_activitynet_feature.py --mode test + ``` 3. 测试 PEM 模型,并计算 'AR@AN' 指标,输出结果文件。 - ```shell - python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json - ``` + ```shell + python tools/test.py configs/localization/bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py checkpoints/SOME_CHECKPOINT.pth --eval AR@AN --out results.json + ``` 注: 1. (可选项) 用户可以使用以下指令生成格式化的时序动作候选文件,该文件可被送入动作识别器中(目前只支持 SSN 和 P-GCN,不包括 TSN, I3D 等),以获得时序动作候选的分类结果。 - ```shell - python tools/data/activitynet/convert_proposal_format.py - ``` + ```shell + python tools/data/activitynet/convert_proposal_format.py + ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/localization/ssn/README.md b/configs/localization/ssn/README.md index d28d5568b7..7eb73213c4 100644 --- a/configs/localization/ssn/README.md +++ b/configs/localization/ssn/README.md @@ -11,15 +11,16 @@ Detecting actions in untrimmed videos is an important yet challenging task. In this paper, we present the structured segment network (SSN), a novel framework which models the temporal structure of each action instance via a structured temporal pyramid. On top of the pyramid, we further introduce a decomposed discriminative model comprising two classifiers, respectively for classifying actions and determining completeness. This allows the framework to effectively distinguish positive proposals from background or incomplete ones, thus leading to both accurate recognition and localization. These components are integrated into a unified network that can be efficiently trained in an end-to-end fashion. Additionally, a simple yet effective temporal action proposal scheme, dubbed temporal actionness grouping (TAG) is devised to generate high quality action proposals. On two challenging benchmarks, THUMOS14 and ActivityNet, our method remarkably outperforms previous state-of-the-art methods, demonstrating superior accuracy and strong adaptivity in handling actions with various temporal structures. +
    ## Results and Models -| config | gpus | backbone | pretrain | mAP@0.3 | mAP@0.4 | mAP@0.5 | reference mAP@0.3 | reference mAP@0.4 | reference mAP@0.5 | gpu_mem(M) | ckpt | log | json | reference ckpt | reference json -|:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:|:-:|:-:|:-:|---|:--:|:--:| -|[ssn_r50_450e_thumos14_rgb](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) |8| ResNet50 | ImageNet |29.37|22.15|15.69|[27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|6352|[ckpt](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/ssn_r50_450e_thumos14_rgb_20201012-1920ab16.pth)| [log](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log)| [json](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log.json)| [ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth)| [json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json)| +| config | gpus | backbone | pretrain | mAP@0.3 | mAP@0.4 | mAP@0.5 | reference mAP@0.3 | reference mAP@0.4 | reference mAP@0.5 | gpu_mem(M) | ckpt | log | json | reference ckpt | reference json | +| :---------------------------------------------------------------------------------------: | :--: | :------: | :------: | :-----: | :-----: | :-----: | :---------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------: | :--------: | :----------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------: | ------------------------------------------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------: | +| [ssn_r50_450e_thumos14_rgb](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) | 8 | ResNet50 | ImageNet | 29.37 | 22.15 | 15.69 | [27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started) | [21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started) | [14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started) | 6352 | [ckpt](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/ssn_r50_450e_thumos14_rgb_20201012-1920ab16.pth) | [log](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log) | [json](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log.json) | [ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth) | [json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json) | :::{note} diff --git a/configs/localization/ssn/README_zh-CN.md b/configs/localization/ssn/README_zh-CN.md index d1ec5bbcee..34a133cef7 100644 --- a/configs/localization/ssn/README_zh-CN.md +++ b/configs/localization/ssn/README_zh-CN.md @@ -16,9 +16,9 @@ year = {2017} ## 模型库 -| 配置文件 | GPU 数量 | 主干网络 | 预训练 | mAP@0.3 | mAP@0.4 | mAP@0.5 | 参考代码的 mAP@0.3 | 参考代码的 mAP@0.4 | 参考代码的 mAP@0.5 | GPU 显存占用 (M) | ckpt | log | json | 参考代码的 ckpt | 参考代码的 json | -|:-:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:-:|:-:|:-:|:-:|---|:--:|:--:| -|[ssn_r50_450e_thumos14_rgb](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) |8| ResNet50 | ImageNet |29.37|22.15|15.69|[27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|[14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started)|6352|[ckpt](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/ssn_r50_450e_thumos14_rgb_20201012-1920ab16.pth)| [log](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log)| [json](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log.json)| [ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth)| [json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json)| +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | mAP@0.3 | mAP@0.4 | mAP@0.5 | 参考代码的 mAP@0.3 | 参考代码的 mAP@0.4 | 参考代码的 mAP@0.5 | GPU 显存占用 (M) | ckpt | log | json | 参考代码的 ckpt | 参考代码的 json | +| :---------------------------------------------------------------------------------------: | :----: | :------: | :------: | :-----: | :-----: | :-----: | :---------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------: | :----------: | :----------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------: | ------------------------------------------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------: | +| [ssn_r50_450e_thumos14_rgb](/configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py) | 8 | ResNet50 | ImageNet | 29.37 | 22.15 | 15.69 | [27.61](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started) | [21.28](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started) | [14.57](https://github.com/open-mmlab/mmaction/tree/c7e3b7c11fb94131be9b48a8e3d510589addc3ce#Get%20started) | 6352 | [ckpt](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/ssn_r50_450e_thumos14_rgb_20201012-1920ab16.pth) | [log](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log) | [json](https://download.openmmlab.com/mmaction/localization/ssn/ssn_r50_450e_thumos14_rgb/20201005_144656.log.json) | [ckpt](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/ssn_r50_450e_thumos14_rgb_ref_20201014-b6f48f68.pth) | [json](https://download.openmmlab.com/mmaction/localization/ssn/mmaction_reference/ssn_r50_450e_thumos14_rgb_ref/20201008_103258.log.json) | 注: @@ -43,7 +43,7 @@ python tools/train.py ${CONFIG_FILE} [optional arguments] python tools/train.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_train.py ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -60,4 +60,4 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [optional arguments] python tools/test.py configs/localization/ssn/ssn_r50_450e_thumos14_rgb_test.py checkpoints/SOME_CHECKPOINT.pth --eval mAP ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/c3d/README.md b/configs/recognition/c3d/README.md index 801dc0930b..859890c11d 100644 --- a/configs/recognition/c3d/README.md +++ b/configs/recognition/c3d/README.md @@ -11,6 +11,7 @@ We propose a simple, yet effective approach for spatiotemporal feature learning using deep 3-dimensional convolutional networks (3D ConvNets) trained on a large scale supervised video dataset. Our findings are three-fold: 1) 3D ConvNets are more suitable for spatiotemporal feature learning compared to 2D ConvNets; 2) A homogeneous architecture with small 3x3x3 convolution kernels in all layers is among the best performing architectures for 3D ConvNets; and 3) Our learned features, namely C3D (Convolutional 3D), with a simple linear classifier outperform state-of-the-art methods on 4 different benchmarks and are comparable with current best methods on the other 2 benchmarks. In addition, the features are compact: achieving 52.8% accuracy on UCF101 dataset with only 10 dimensions and also very efficient to compute due to the fast inference of ConvNets. Finally, they are conceptually very simple and easy to train and use. +
    @@ -19,9 +20,9 @@ We propose a simple, yet effective approach for spatiotemporal feature learning ### UCF-101 -| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | testing protocol| inference_time(video/s) | gpu_mem(M) | ckpt | log | json | -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[c3d_sports1m_16x1x1_45e_ucf101_rgb.py](/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py)|128x171|8| c3d | sports1m | 83.27 | 95.90 | 10 clips x 1 crop | x | 6053 | [ckpt](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/c3d_sports1m_16x1x1_45e_ucf101_rgb_20201021-26655025.pth)|[log](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log)|[json](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | testing protocol | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------ | :--------: | :--: | :------: | :------: | :------: | :------: | :---------------: | :---------------------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | +| [c3d_sports1m_16x1x1_45e_ucf101_rgb.py](/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py) | 128x171 | 8 | c3d | sports1m | 83.27 | 95.90 | 10 clips x 1 crop | x | 6053 | [ckpt](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/c3d_sports1m_16x1x1_45e_ucf101_rgb_20201021-26655025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log) | [json](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json) | :::{note} diff --git a/configs/recognition/c3d/README_zh-CN.md b/configs/recognition/c3d/README_zh-CN.md index c4f02c16f2..4b475f573c 100644 --- a/configs/recognition/c3d/README_zh-CN.md +++ b/configs/recognition/c3d/README_zh-CN.md @@ -19,9 +19,9 @@ eid = {arXiv:1412.0767} ### UCF-101 -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 测试方案| 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[c3d_sports1m_16x1x1_45e_ucf101_rgb.py](/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py)|128x171|8| c3d | sports1m | 83.27 | 95.90 | 10 clips x 1 crop | x | 6053 | [ckpt](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/c3d_sports1m_16x1x1_45e_ucf101_rgb_20201021-26655025.pth)|[log](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log)|[json](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 测试方案 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------ | :-----: | :----: | :--: | :------: | :------: | :------: | :---------------: | :------------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | +| [c3d_sports1m_16x1x1_45e_ucf101_rgb.py](/configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb.py) | 128x171 | 8 | c3d | sports1m | 83.27 | 95.90 | 10 clips x 1 crop | x | 6053 | [ckpt](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/c3d_sports1m_16x1x1_45e_ucf101_rgb_20201021-26655025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log) | [json](https://download.openmmlab.com/mmaction/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb/20201021_140429.log.json) | 注: @@ -49,7 +49,7 @@ python tools/train.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -66,4 +66,4 @@ python tools/test.py configs/recognition/c3d/c3d_sports1m_16x1x1_45e_ucf101_rgb. checkpoints/SOME_CHECKPOINT.pth --eval top_k_accuracy ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/csn/README.md b/configs/recognition/csn/README.md index a5bd924c89..5fa387e5e5 100644 --- a/configs/recognition/csn/README.md +++ b/configs/recognition/csn/README.md @@ -11,6 +11,7 @@ Group convolution has been shown to offer great computational savings in various 2D convolutional architectures for image classification. It is natural to ask: 1) if group convolution can help to alleviate the high computational cost of video classification networks; 2) what factors matter the most in 3D group convolutional networks; and 3) what are good computation/accuracy trade-offs with 3D group convolutional networks. This paper studies the effects of different design choices in 3D group convolutional networks for video classification. We empirically demonstrate that the amount of channel interactions plays an important role in the accuracy of 3D group convolutional networks. Our experiments suggest two main findings. First, it is a good practice to factorize 3D convolutions by separating channel interactions and spatiotemporal interactions as this leads to improved accuracy and lower computational cost. Second, 3D channel-separated convolutions provide a form of regularization, yielding lower training accuracy but higher test accuracy compared to 3D convolutions. These two empirical findings lead us to design an architecture -- Channel-Separated Convolutional Network (CSN) -- which is simple, efficient, yet accurate. On Sports1M, Kinetics, and Something-Something, our CSNs are comparable with or better than the state-of-the-art while being 2-3 times more efficient. +
    @@ -19,17 +20,17 @@ Group convolution has been shown to offer great computational savings in various ### Kinetics-400 -|config | resolution | gpus | backbone |pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py)|short-side 320|x| ResNet50 | None | 73.6 | 91.3 | x | x | [ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb_20210618-4e29e2e8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log) | [json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log.json) | -|[ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet50 | IG65M | 79.0 | 94.2 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth) | x | x | -|[ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | None | 76.5 | 92.1 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth) | x | x | -|[ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | Sports1M | 78.2 | 93.0 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth) | x | x | -|[ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|8x4| ResNet152 | IG65M|82.76/82.6|95.68/95.3|x|8516|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth)/[infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-e63ee1bd.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json)| -|[ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | None | 77.8 | 92.8 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-d565828d.pth) | x | x | -|[ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | Sports1M | 78.8 | 93.5 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth) | x | x | -|[ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|x| ResNet152 | IG65M | 82.5 | 95.3 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth) | x | x | -|[ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py)|short-side 320|8x4| ResNet152 | IG65M|80.14|94.93|x|8517|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :-------: | :------: | :--------: | :--------: | :---------------------: | :--------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py) | short-side 320 | x | ResNet50 | None | 73.6 | 91.3 | x | x | [ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb_20210618-4e29e2e8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log) | [json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log.json) | +| [ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py) | short-side 320 | x | ResNet50 | IG65M | 79.0 | 94.2 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth) | x | x | +| [ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py) | short-side 320 | x | ResNet152 | None | 76.5 | 92.1 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth) | x | x | +| [ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py) | short-side 320 | x | ResNet152 | Sports1M | 78.2 | 93.0 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth) | x | x | +| [ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py) | short-side 320 | 8x4 | ResNet152 | IG65M | 82.76/82.6 | 95.68/95.3 | x | 8516 | [ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth)/[infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-e63ee1bd.pth) | [log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log) | [json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json) | +| [ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py) | short-side 320 | x | ResNet152 | None | 77.8 | 92.8 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-d565828d.pth) | x | x | +| [ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py) | short-side 320 | x | ResNet152 | Sports1M | 78.8 | 93.5 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth) | x | x | +| [ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py) | short-side 320 | x | ResNet152 | IG65M | 82.5 | 95.3 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth) | x | x | +| [ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py) | short-side 320 | 8x4 | ResNet152 | IG65M | 80.14 | 94.93 | x | 8517 | [ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log) | [json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json) | :::{note} diff --git a/configs/recognition/csn/README_zh-CN.md b/configs/recognition/csn/README_zh-CN.md index 06a28cd5c5..c7553fc01e 100644 --- a/configs/recognition/csn/README_zh-CN.md +++ b/configs/recognition/csn/README_zh-CN.md @@ -31,17 +31,17 @@ doi = {10.1109/ICCV.2019.00565} ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py)|短边 320|x| ResNet50 | None | 73.6 | 91.3 | x | x | [ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb_20210618-4e29e2e8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log) | [json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log.json) | -|[ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py)|短边 320|x| ResNet50 | IG65M | 79.0 | 94.2 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth) | x | x | -|[ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py)|短边 320|x| ResNet152 | None | 76.5 | 92.1 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth) | x | x | -|[ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|x| ResNet152 | Sports1M | 78.2 | 93.0 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth) | x | x | -|[ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|8x4| ResNet152 | IG65M|82.76/82.6|95.68/95.3|x|8516|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth)/[infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-e63ee1bd.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json)| -|[ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py)|短边 320|x| ResNet152 | None | 77.8 | 92.8 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-d565828d.pth) | x | x | -|[ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|x| ResNet152 | Sports1M | 78.8 | 93.5 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth) | x | x | -|[ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|x| ResNet152 | IG65M | 82.5 | 95.3 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth) | x | x | -|[ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py)|短边 320|8x4| ResNet152 | IG65M|80.14|94.93|x|8517|[ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log)|[json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :----: | :----: | :-------: | :------: | :--------: | :--------: | :------------: | :----------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb.py) | 短边 320 | x | ResNet50 | None | 73.6 | 91.3 | x | x | [ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb_20210618-4e29e2e8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log) | [json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_bnfrozen_r50_32x2x1_180e_kinetics400_rgb/20210618_182414.log.json) | +| [ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r50_32x2x1_58e_kinetics400_rgb.py) | 短边 320 | x | ResNet50 | IG65M | 79.0 | 94.2 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r50_32x2x1_58e_kinetics400_rgb_20210617-86d33018.pth) | x | x | +| [ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ircsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py) | 短边 320 | x | ResNet152 | None | 76.5 | 92.1 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-5c933ae1.pth) | x | x | +| [ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ircsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py) | 短边 320 | x | ResNet152 | Sports1M | 78.2 | 93.0 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-b9b10241.pth) | x | x | +| [ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py) | 短边 320 | 8x4 | ResNet152 | IG65M | 82.76/82.6 | 95.68/95.3 | x | 8516 | [ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb_20200812-9037a758.pth)/[infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-e63ee1bd.pth) | [log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log) | [json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb/20200809_053132.log.json) | +| [ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb](/configs/recognition/csn/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb.py) | 短边 320 | x | ResNet152 | None | 77.8 | 92.8 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_from_scratch_r152_32x2x1_180e_kinetics400_rgb_20210617-d565828d.pth) | x | x | +| [ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_sports1m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py) | 短边 320 | x | ResNet152 | Sports1M | 78.8 | 93.5 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_sports1m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-3367437a.pth) | x | x | +| [ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb](/configs/recognition/csn/ipcsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py) | 短边 320 | x | ResNet152 | IG65M | 82.5 | 95.3 | x | x | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/csn/vmz/vmz_ipcsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20210617-c3be9793.pth) | x | x | +| [ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py](/configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb.py) | 短边 320 | 8x4 | ResNet152 | IG65M | 80.14 | 94.93 | x | 8517 | [ckpt](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb_20200803-fc66ce8d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log) | [json](https://download.openmmlab.com/mmaction/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_58e_kinetics400_rgb/20200728_031952.log.json) | 注: @@ -71,7 +71,7 @@ python tools/train.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1 --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -89,4 +89,4 @@ python tools/test.py configs/recognition/csn/ircsn_ig65m_pretrained_r152_32x2x1_ --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/i3d/README.md b/configs/recognition/i3d/README.md index 02a97b8dc9..37fee079c2 100644 --- a/configs/recognition/i3d/README.md +++ b/configs/recognition/i3d/README.md @@ -13,6 +13,7 @@ The paucity of videos in current action classification datasets (UCF-101 and HMDB-51) has made it difficult to identify good video architectures, as most methods obtain similar performance on existing small-scale benchmarks. This paper re-evaluates state-of-the-art architectures in light of the new Kinetics Human Action Video dataset. Kinetics has two orders of magnitude more data, with 400 human action classes and over 400 clips per class, and is collected from realistic, challenging YouTube videos. We provide an analysis on how current architectures fare on the task of action classification on this dataset and how much performance improves on the smaller benchmark datasets after pre-training on Kinetics. We also introduce a new Two-Stream Inflated 3D ConvNet (I3D) that is based on 2D ConvNet inflation: filters and pooling kernels of very deep image classification ConvNets are expanded into 3D, making it possible to learn seamless spatio-temporal feature extractors from video while leveraging successful ImageNet architecture designs and even their parameters. We show that, after pre-training on Kinetics, I3D models considerably improve upon the state-of-the-art in action classification, reaching 80.9% on HMDB-51 and 98.0% on UCF-101. +
    @@ -21,18 +22,18 @@ The paucity of videos in current action classification datasets (UCF-101 and HMD ### Kinetics-400 -|config | resolution | gpus | backbone |pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[i3d_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) |340x256|8| ResNet50|ImageNet |72.68|90.78|1.7 (320x3 frames)| 5170|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log)| [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log.json)| -|[i3d_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50|ImageNet | 73.27|90.92|x|5170|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log.json)| -|[i3d_r50_video_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py)|short-side 256p|8| ResNet50 |ImageNet|72.85 |90.75 |x|5170|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log.json)| -|[i3d_r50_dense_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py) |340x256|8x2| ResNet50| ImageNet|72.77|90.57|1.7 (320x3 frames)| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_32x2x1_100e_kinetics400_rgb_20200616-2bbb4361.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log)| [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log.json)| -|[i3d_r50_dense_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet|73.48|91.00|x| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb_20200725-24eb54cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log.json)| -|[i3d_r50_lazy_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py) |340x256|8| ResNet50 |ImageNet|72.32|90.72|1.8 (320x3 frames)| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_32x2x1_100e_kinetics400_rgb_20200612-000e4d2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log)| [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log.json)| -|[i3d_r50_lazy_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet|73.24|90.99|x| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb_20200817-4e90d1d5.pth)| [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log.json) | -|[i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb.py)|short-side 256p|8x4| ResNet50 |ImageNet|74.71|91.81|x|6438|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200813-6e6aef1b.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log.json)| -|[i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb.py)|short-side 256p|8x4| ResNet50 |ImageNet|73.37|91.26|x|4944|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200815-17f84aa2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json)| -|[i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py)|short-side 256p|8x4| ResNet50 |ImageNet|73.92|91.59|x|4832|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb_20200814-7c30d5bb.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :----------------------------------------------------------------------------------------------------------------------------------------------- | :-------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------: | +| [i3d_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 72.68 | 90.78 | 1.7 (320x3 frames) | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log.json) | +| [i3d_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | ImageNet | 73.27 | 90.92 | x | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log.json) | +| [i3d_r50_video_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py) | short-side 256p | 8 | ResNet50 | ImageNet | 72.85 | 90.75 | x | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log.json) | +| [i3d_r50_dense_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py) | 340x256 | 8x2 | ResNet50 | ImageNet | 72.77 | 90.57 | 1.7 (320x3 frames) | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_32x2x1_100e_kinetics400_rgb_20200616-2bbb4361.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log.json) | +| [i3d_r50_dense_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | ImageNet | 73.48 | 91.00 | x | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb_20200725-24eb54cc.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log.json) | +| [i3d_r50_lazy_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 72.32 | 90.72 | 1.8 (320x3 frames) | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_32x2x1_100e_kinetics400_rgb_20200612-000e4d2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log.json) | +| [i3d_r50_lazy_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | ImageNet | 73.24 | 90.99 | x | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb_20200817-4e90d1d5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log.json) | +| [i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb.py) | short-side 256p | 8x4 | ResNet50 | ImageNet | 74.71 | 91.81 | x | 6438 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200813-6e6aef1b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log.json) | +| [i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb.py) | short-side 256p | 8x4 | ResNet50 | ImageNet | 73.37 | 91.26 | x | 4944 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200815-17f84aa2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json) | +| [i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py) | short-side 256p | 8x4 | ResNet50 | ImageNet | 73.92 | 91.59 | x | 4832 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb_20200814-7c30d5bb.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json) | :::{note} diff --git a/configs/recognition/i3d/README_zh-CN.md b/configs/recognition/i3d/README_zh-CN.md index ac10732615..cd29f40278 100644 --- a/configs/recognition/i3d/README_zh-CN.md +++ b/configs/recognition/i3d/README_zh-CN.md @@ -30,18 +30,18 @@ ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[i3d_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) |340x256|8| ResNet50|ImageNet |72.68|90.78|1.7 (320x3 frames)| 5170|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log)| [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log.json)| -|[i3d_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) |短边 256|8| ResNet50|ImageNet | 73.27|90.92|x|5170|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log.json)| -|[i3d_r50_video_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py)|短边 256p|8| ResNet50 |ImageNet|72.85 |90.75 |x|5170|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log.json)| -|[i3d_r50_dense_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py) |340x256|8x2| ResNet50| ImageNet|72.77|90.57|1.7 (320x3 frames)| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_32x2x1_100e_kinetics400_rgb_20200616-2bbb4361.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log)| [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log.json)| -|[i3d_r50_dense_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet|73.48|91.00|x| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb_20200725-24eb54cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log.json)| -|[i3d_r50_lazy_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py) |340x256|8| ResNet50 |ImageNet|72.32|90.72|1.8 (320x3 frames)| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_32x2x1_100e_kinetics400_rgb_20200612-000e4d2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log)| [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log.json)| -|[i3d_r50_lazy_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet|73.24|90.99|x| 5170| [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb_20200817-4e90d1d5.pth)| [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log.json) | -|[i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb.py)|短边 256p|8x4| ResNet50 |ImageNet|74.71|91.81|x|6438|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200813-6e6aef1b.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log.json)| -|[i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb.py)|短边 256p|8x4| ResNet50 |ImageNet|73.37|91.26|x|4944|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200815-17f84aa2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json)| -|[i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py)|短边 256p|8x4| ResNet50 |ImageNet|73.92|91.59|x|4832|[ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb_20200814-7c30d5bb.pth)|[log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log)|[json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------------------------------------------------------------------------------------------- | :-----: | :----: | :------: | :------: | :------: | :------: | :----------------: | :----------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------: | +| [i3d_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 72.68 | 90.78 | 1.7 (320x3 frames) | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb/20200614_060456.log.json) | +| [i3d_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet50 | ImageNet | 73.27 | 90.92 | x | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_256p_32x2x1_100e_kinetics400_rgb/20200725_031555.log.json) | +| [i3d_r50_video_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb.py) | 短边 256p | 8 | ResNet50 | ImageNet | 72.85 | 90.75 | x | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_video_32x2x1_100e_kinetics400_rgb/20200706_143014.log.json) | +| [i3d_r50_dense_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py) | 340x256 | 8x2 | ResNet50 | ImageNet | 72.77 | 90.57 | 1.7 (320x3 frames) | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_32x2x1_100e_kinetics400_rgb_20200616-2bbb4361.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb/20200616_230011.log.json) | +| [i3d_r50_dense_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_dense_32x2x1_100e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet50 | ImageNet | 73.48 | 91.00 | x | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb_20200725-24eb54cc.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_dense_256p_32x2x1_100e_kinetics400_rgb/20200725_031604.log.json) | +| [i3d_r50_lazy_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 72.32 | 90.72 | 1.8 (320x3 frames) | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_32x2x1_100e_kinetics400_rgb_20200612-000e4d2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_32x2x1_100e_kinetics400_rgb/20200612_233836.log.json) | +| [i3d_r50_lazy_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_r50_lazy_32x2x1_100e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet50 | ImageNet | 73.24 | 90.99 | x | 5170 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb_20200817-4e90d1d5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_r50_fast_256p_32x2x1_100e_kinetics400_rgb/20200725_031457.log.json) | +| [i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb.py) | 短边 256p | 8x4 | ResNet50 | ImageNet | 74.71 | 91.81 | x | 6438 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200813-6e6aef1b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_embedded_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034054.log.json) | +| [i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb.py) | 短边 256p | 8x4 | ResNet50 | ImageNet | 73.37 | 91.26 | x | 4944 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb_20200815-17f84aa2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_gaussian_r50_32x2x1_100e_kinetics400_rgb/20200813_034909.log.json) | +| [i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb](/configs/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb.py) | 短边 256p | 8x4 | ResNet50 | ImageNet | 73.92 | 91.59 | x | 4832 | [ckpt](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb_20200814-7c30d5bb.pth) | [log](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log) | [json](https://download.openmmlab.com/mmaction/recognition/i3d/i3d_nl_dot_product_r50_32x2x1_100e_kinetics400_rgb/20200814_044208.log.json) | 注: @@ -70,7 +70,7 @@ python tools/train.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rg --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -88,4 +88,4 @@ python tools/test.py configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/omnisource/README.md b/configs/recognition/omnisource/README.md index 6f90a181c0..daeda15424 100644 --- a/configs/recognition/omnisource/README.md +++ b/configs/recognition/omnisource/README.md @@ -11,6 +11,7 @@ We introduce OmniSource, a novel framework for leveraging web data to train video recognition models. OmniSource overcomes the barriers between data formats, such as images, short videos, and long untrimmed videos for webly-supervised learning. First, data samples with multiple formats, curated by task-specific data collection and automatically filtered by a teacher model, are transformed into a unified form. Then a joint-training strategy is proposed to deal with the domain gaps between multiple data sources and formats in webly-supervised learning. Several good practices, including data balancing, resampling, and cross-dataset mixup are adopted in joint training. Experiments show that by utilizing data from multiple sources and formats, OmniSource is more data-efficient in training. With only 3.5M images and 800K minutes videos crawled from the internet without human labeling (less than 2% of prior works), our models learned with OmniSource improve Top-1 accuracy of 2D- and 3D-ConvNet baseline models by 3.0% and 3.9%, respectively, on the Kinetics-400 benchmark. With OmniSource, we establish new records with different pretraining strategies for video recognition. Our best models achieve 80.4%, 80.5%, and 83.6 Top-1 accuracies on the Kinetics-400 benchmark respectively for training-from-scratch, ImageNet pre-training and IG-65M pre-training. +
    @@ -21,12 +22,12 @@ We introduce OmniSource, a novel framework for leveraging web data to train vide We currently released 4 models trained with OmniSource framework, including both 2D and 3D architectures. We compare the performance of models trained with or without OmniSource in the following table. -| Model | Modality | Pretrained | Backbone | Input | Resolution | Top-1 (Baseline / OmniSource (Delta)) | Top-5 (Baseline / OmniSource (Delta))) | Download | -| :------: | :------: | :--------: | :-------: | :---: | :------------: | :-----------------------------------: | :------------------------------------: | :----------------------------------------------------------: | -| TSN | RGB | ImageNet | ResNet50 | 3seg | 340x256 | 70.6 / 73.6 (+ 3.0) | 89.4 / 91.0 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | -| TSN | RGB | IG-1B | ResNet50 | 3seg | short-side 320 | 73.1 / 75.7 (+ 2.6) | 90.4 / 91.9 (+ 1.5) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | +| Model | Modality | Pretrained | Backbone | Input | Resolution | Top-1 (Baseline / OmniSource (Delta)) | Top-5 (Baseline / OmniSource (Delta))) | Download | +| :------: | :------: | :--------: | :-------: | :---: | :------------: | :-----------------------------------: | :------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| TSN | RGB | ImageNet | ResNet50 | 3seg | 340x256 | 70.6 / 73.6 (+ 3.0) | 89.4 / 91.0 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | +| TSN | RGB | IG-1B | ResNet50 | 3seg | short-side 320 | 73.1 / 75.7 (+ 2.6) | 90.4 / 91.9 (+ 1.5) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | | SlowOnly | RGB | Scratch | ResNet50 | 4x16 | short-side 320 | 72.9 / 76.8 (+ 3.9) | 90.9 / 92.5 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | -| SlowOnly | RGB | Scratch | ResNet101 | 8x8 | short-side 320 | 76.5 / 80.4 (+ 3.9) | 92.7 / 94.4 (+ 1.7) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | +| SlowOnly | RGB | Scratch | ResNet101 | 8x8 | short-side 320 | 76.5 / 80.4 (+ 3.9) | 92.7 / 94.4 (+ 1.7) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | 1. The validation set of Kinetics400 we used consists of 19796 videos. These videos are available at [Kinetics400-Validation](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB). The corresponding [data list](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (each line is of the format 'video_id, num_frames, label_index') and the [label map](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) are also available. @@ -38,32 +39,32 @@ We benchmark the OmniSource framework on the released subset, results are listed ### TSN-8seg-ResNet50 -| Model | Modality | Pretrained | Backbone | Input | Resolution | top1 acc | top5 acc | ckpt | json | log | -| :----------------------------------------------------------: | -------- | ---------- | -------- | ----- | -------------- | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r50_1x1x8_100e_minikinetics_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 77.4 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030-b4eaf92b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log) | +| Model | Modality | Pretrained | Backbone | Input | Resolution | top1 acc | top5 acc | ckpt | json | log | +| :-------------------------------------------------------------------------------------------------------------------------------------------------------------------: | -------- | ---------- | -------- | ----- | -------------- | :------: | :------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x8_100e_minikinetics_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 77.4 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030-b4eaf92b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log) | | [tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.0 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030-23966b4b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log) | -| [tsn_r50_1x1x8_100e_minikinetics_webimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030-66f5e046.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log) | -| [tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 80.6 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030-011f984d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_webimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030-66f5e046.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 80.6 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030-011f984d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log) | | [tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030-59f5d064.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log) | -| [tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 81.3 | 94.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030-0f56ef51.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 81.3 | 94.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030-0f56ef51.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log) | ### SlowOnly-8x8-ResNet50 -| Model | Modality | Pretrained | Backbone | Input | Resolution | top1 acc | top5 acc | ckpt | json | log | -| :----------------------------------------------------------: | -------- | ---------- | -------- | ----- | -------------- | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_8x8x1_256e_minikinetics_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 78.6 | 93.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030-168eb098.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log) | +| Model | Modality | Pretrained | Backbone | Input | Resolution | top1 acc | top5 acc | ckpt | json | log | +| :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | -------- | ---------- | -------- | ----- | -------------- | :------: | :------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_8x8x1_256e_minikinetics_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 78.6 | 93.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030-168eb098.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log) | | [slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 80.8 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030-7da6dfc3.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log) | -| [slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 81.3 | 95.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030-c36616e9.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log) | -| [slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.4 | 95.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030-e2890e8d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 81.3 | 95.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030-c36616e9.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.4 | 95.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030-e2890e8d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log) | | [slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 80.3 | 94.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030-62974bac.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log) | -| [slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.9 | 95.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030-284cfd3b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.9 | 95.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030-284cfd3b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log) | We also list the benchmark in the original paper which run on Kinetics-400 for comparison: -| Model | Baseline | +GG-img | +[GG-IG]-img | +IG-vid | +KRaw | OmniSource | -| :--------------------: | :---------: | :---------: | :----------: | :---------: | :---------: | :---------: | -| TSN-3seg-ResNet50 | 70.6 / 89.4 | 71.5 / 89.5 | 72.0 / 90.0 | 72.0 / 90.3 | 71.7 / 89.6 | 73.6 / 91.0 | -| SlowOnly-4x16-ResNet50 | 73.8 / 90.9 | 74.5 / 91.4 | 75.2 / 91.6 | 75.2 / 91.7 | 74.5 / 91.1 | 76.6 / 92.5 | +| Model | Baseline | +GG-img | +\[GG-IG\]-img | +IG-vid | +KRaw | OmniSource | +| :--------------------: | :---------: | :---------: | :------------: | :---------: | :---------: | :---------: | +| TSN-3seg-ResNet50 | 70.6 / 89.4 | 71.5 / 89.5 | 72.0 / 90.0 | 72.0 / 90.3 | 71.7 / 89.6 | 73.6 / 91.0 | +| SlowOnly-4x16-ResNet50 | 73.8 / 90.9 | 74.5 / 91.4 | 75.2 / 91.6 | 75.2 / 91.7 | 74.5 / 91.1 | 76.6 / 92.5 | ## Citation diff --git a/configs/recognition/omnisource/README_zh-CN.md b/configs/recognition/omnisource/README_zh-CN.md index a330faeb38..29e3c60c6b 100644 --- a/configs/recognition/omnisource/README_zh-CN.md +++ b/configs/recognition/omnisource/README_zh-CN.md @@ -12,12 +12,12 @@ In ECCV, 2020. [Paper](https://arxiv.org/abs/2003.13042), [Dataset](https://docs MMAction2 当前公开了 4 个 OmniSource 框架训练的模型,包含 2D 架构与 3D 架构。下表比较了使用或不适用 OmniSource 框架训练得的模型在 Kinetics-400 上的精度: -| 模型 | 模态 | 预训练 | 主干网络 | 输入 | 分辨率 | Top-1 准确率(Baseline / OmniSource (Delta)) | Top-5 准确率(Baseline / OmniSource (Delta))) | 模型下载链接 | -| :------: | :--: | :------: | :-------: | :--: | :------------: | :-----------------------------------------: | :------------------------------------------: | :----------------------------------------------------------: | -| TSN | RGB | ImageNet | ResNet50 | 3seg | 340x256 | 70.6 / 73.6 (+ 3.0) | 89.4 / 91.0 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | -| TSN | RGB | IG-1B | ResNet50 | 3seg | short-side 320 | 73.1 / 75.7 (+ 2.6) | 90.4 / 91.9 (+ 1.5) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | -| SlowOnly | RGB | None | ResNet50 | 4x16 | short-side 320 | 72.9 / 76.8 (+ 3.9) | 90.9 / 92.5 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | -| SlowOnly | RGB | None | ResNet101 | 8x8 | short-side 320 | 76.5 / 80.4 (+ 3.9) | 92.7 / 94.4 (+ 1.7) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | +| 模型 | 模态 | 预训练 | 主干网络 | 输入 | 分辨率 | Top-1 准确率(Baseline / OmniSource (Delta)) | Top-5 准确率(Baseline / OmniSource (Delta))) | 模型下载链接 | +| :------: | :-: | :------: | :-------: | :--: | :------------: | :--------------------------------------: | :---------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| TSN | RGB | ImageNet | ResNet50 | 3seg | 340x256 | 70.6 / 73.6 (+ 3.0) | 89.4 / 91.0 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | +| TSN | RGB | IG-1B | ResNet50 | 3seg | short-side 320 | 73.1 / 75.7 (+ 2.6) | 90.4 / 91.9 (+ 1.5) | [Baseline](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | +| SlowOnly | RGB | None | ResNet50 | 4x16 | short-side 320 | 72.9 / 76.8 (+ 3.9) | 90.9 / 92.5 (+ 1.6) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | +| SlowOnly | RGB | None | ResNet101 | 8x8 | short-side 320 | 76.5 / 80.4 (+ 3.9) | 92.7 / 94.4 (+ 1.7) | [Baseline](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) / [OmniSource](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | 1. 我们使用的 Kinetics400 验证集包含 19796 个视频,用户可以从 [验证集视频](https://mycuhk-my.sharepoint.com/:u:/g/personal/1155136485_link_cuhk_edu_hk/EbXw2WX94J1Hunyt3MWNDJUBz-nHvQYhO9pvKqm6g39PMA?e=a9QldB) 下载这些视频。同时也提供了对应的 [数据列表](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_val_list.txt) (每行格式为:视频 ID,视频帧数目,类别序号)以及 [标签映射](https://download.openmmlab.com/mmaction/dataset/k400_val/kinetics_class2ind.txt) (类别序号到类别名称)。 @@ -29,32 +29,32 @@ MMAction2 在公开的数据集上进行了 OmniSource 框架的基准测试, ### TSN-8seg-ResNet50 -| 模型 | 模态 | 预训练 | 主干网络 | 输入 | 分辨率 | Top-1 准确率 | Top-5 准确率 | ckpt | json | log | -| :----------------------------------------------------------: | :--: | :------: | :------: | :--: | :------------: | :----------: | :----------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r50_1x1x8_100e_minikinetics_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 77.4 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030-b4eaf92b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log) | -| [tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.0 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030-23966b4b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log) | -| [tsn_r50_1x1x8_100e_minikinetics_webimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030-66f5e046.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log) | -| [tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 80.6 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030-011f984d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log) | -| [tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030-59f5d064.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log) | -| [tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 81.3 | 94.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030-0f56ef51.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log) | +| 模型 | 模态 | 预训练 | 主干网络 | 输入 | 分辨率 | Top-1 准确率 | Top-5 准确率 | ckpt | json | log | +| :-------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-: | :------: | :------: | :--: | :------------: | :-------: | :-------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x8_100e_minikinetics_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 77.4 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030-b4eaf92b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/baseline/tsn_r50_1x1x8_100e_minikinetics_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.0 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030-23966b4b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/googleimage/tsn_r50_1x1x8_100e_minikinetics_googleimage_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_webimage_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030-66f5e046.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/webimage/tsn_r50_1x1x8_100e_minikinetics_webimage_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 80.6 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030-011f984d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/insvideo/tsn_r50_1x1x8_100e_minikinetics_insvideo_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 78.6 | 93.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030-59f5d064.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/kineticsraw/tsn_r50_1x1x8_100e_minikinetics_kineticsraw_rgb_20201030.log) | +| [tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb.py) | RGB | ImageNet | ResNet50 | 3seg | short-side 320 | 81.3 | 94.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030-0f56ef51.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/tsn_r50_1x1x8_100e_minikinetics_rgb/omnisource/tsn_r50_1x1x8_100e_minikinetics_omnisource_rgb_20201030.log) | ### SlowOnly-8x8-ResNet50 -| 模型 | 模态 | 预训练 | 主干网络 | 输入 | 分辨率 | Top-1 准确率 | Top-5 准确率 | ckpt | json | log | -| :----------------------------------------------------------: | :--: | :----: | :------: | :--: | :------------: | :----------: | :----------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_8x8x1_256e_minikinetics_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 78.6 | 93.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030-168eb098.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log) | -| [slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 80.8 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030-7da6dfc3.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log) | -| [slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 81.3 | 95.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030-c36616e9.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log) | -| [slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.4 | 95.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030-e2890e8d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log) | -| [slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 80.3 | 94.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030-62974bac.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log) | -| [slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.9 | 95.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030-284cfd3b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log) | +| 模型 | 模态 | 预训练 | 主干网络 | 输入 | 分辨率 | Top-1 准确率 | Top-5 准确率 | ckpt | json | log | +| :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-: | :--: | :------: | :-: | :------------: | :-------: | :-------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_8x8x1_256e_minikinetics_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 78.6 | 93.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030-168eb098.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/baseline/slowonly_r50_8x8x1_256e_minikinetics_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 80.8 | 95.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030-7da6dfc3.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/googleimage/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 81.3 | 95.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030-c36616e9.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/webimage/slowonly_r50_8x8x1_256e_minikinetics_webimage_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.4 | 95.6 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030-e2890e8d.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/insvideo/slowonly_r50_8x8x1_256e_minikinetics_insvideo_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 80.3 | 94.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030-62974bac.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/kineticsraw/slowonly_r50_8x8x1_256e_minikinetics_kineticsraw_rgb_20201030.log) | +| [slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb](/configs/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics/slowonly_r50_8x8x1_256e_minikinetics_googleimage_rgb.py) | RGB | None | ResNet50 | 8x8 | short-side 320 | 82.9 | 95.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030-284cfd3b.pth) | [json](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.json) | [log](https://download.openmmlab.com/mmaction/recognition/omnisource/slowonly_r50_8x8x1_256e_minikinetics_rgb/omnisource/slowonly_r50_8x8x1_256e_minikinetics_omnisource_rgb_20201030.log) | 下表列出了原论文中在 Kinetics-400 上进行基准测试的结果供参考: -| Model | Baseline | +GG-img | +[GG-IG]-img | +IG-vid | +KRaw | OmniSource | -| :--------------------: | :---------: | :---------: | :----------: | :---------: | :---------: | :---------: | -| TSN-3seg-ResNet50 | 70.6 / 89.4 | 71.5 / 89.5 | 72.0 / 90.0 | 72.0 / 90.3 | 71.7 / 89.6 | 73.6 / 91.0 | -| SlowOnly-4x16-ResNet50 | 73.8 / 90.9 | 74.5 / 91.4 | 75.2 / 91.6 | 75.2 / 91.7 | 74.5 / 91.1 | 76.6 / 92.5 | +| Model | Baseline | +GG-img | +\[GG-IG\]-img | +IG-vid | +KRaw | OmniSource | +| :--------------------: | :---------: | :---------: | :------------: | :---------: | :---------: | :---------: | +| TSN-3seg-ResNet50 | 70.6 / 89.4 | 71.5 / 89.5 | 72.0 / 90.0 | 72.0 / 90.3 | 71.7 / 89.6 | 73.6 / 91.0 | +| SlowOnly-4x16-ResNet50 | 73.8 / 90.9 | 74.5 / 91.4 | 75.2 / 91.6 | 75.2 / 91.7 | 74.5 / 91.1 | 76.6 / 92.5 | ## 注: diff --git a/configs/recognition/r2plus1d/README.md b/configs/recognition/r2plus1d/README.md index 3236372e7e..e22f36a605 100644 --- a/configs/recognition/r2plus1d/README.md +++ b/configs/recognition/r2plus1d/README.md @@ -11,6 +11,7 @@ In this paper we discuss several forms of spatiotemporal convolutions for video analysis and study their effects on action recognition. Our motivation stems from the observation that 2D CNNs applied to individual frames of the video have remained solid performers in action recognition. In this work we empirically demonstrate the accuracy advantages of 3D CNNs over 2D CNNs within the framework of residual learning. Furthermore, we show that factorizing the 3D convolutional filters into separate spatial and temporal components yields significantly advantages in accuracy. Our empirical study leads to the design of a new spatiotemporal convolutional block "R(2+1)D" which gives rise to CNNs that achieve results comparable or superior to the state-of-the-art on Sports-1M, Kinetics, UCF101 and HMDB51. +
    @@ -19,12 +20,12 @@ In this paper we discuss several forms of spatiotemporal convolutions for video ### Kinetics-400 -|config | resolution | gpus | backbone | pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | short-side 256|8x4| ResNet34|None |67.30|87.65|x|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb_20200729-aa94765e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log)|[json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log.json)| -|[r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py) | short-side 256|8| ResNet34|None |67.3|87.8|x|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb_20200826-ab35a529.pth)|[log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log.json)|[json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log)| -|[r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | short-side 320|8x2| ResNet34|None |68.68|88.36|1.6 (80x3 frames)|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8x1_180e_kinetics400_rgb_20200618-3fce5629.pth)| [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log)| [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json)| -|[r2plus1d_r34_32x2x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py) |short-side 320|8x2| ResNet34|None |74.60|91.59|0.5 (320x3 frames)|12975| [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2x1_180e_kinetics400_rgb_20200618-63462eb3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log)| [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------ | :------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | +| [r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet34 | None | 67.30 | 87.65 | x | 5019 | [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb_20200729-aa94765e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log) | [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log.json) | +| [r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet34 | None | 67.3 | 87.8 | x | 5019 | [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb_20200826-ab35a529.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log.json) | [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log) | +| [r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet34 | None | 68.68 | 88.36 | 1.6 (80x3 frames) | 5019 | [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8x1_180e_kinetics400_rgb_20200618-3fce5629.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log) | [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json) | +| [r2plus1d_r34_32x2x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet34 | None | 74.60 | 91.59 | 0.5 (320x3 frames) | 12975 | [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2x1_180e_kinetics400_rgb_20200618-63462eb3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log) | [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json) | :::{note} diff --git a/configs/recognition/r2plus1d/README_zh-CN.md b/configs/recognition/r2plus1d/README_zh-CN.md index d720508dc0..da2bd23f4a 100644 --- a/configs/recognition/r2plus1d/README_zh-CN.md +++ b/configs/recognition/r2plus1d/README_zh-CN.md @@ -18,12 +18,12 @@ ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | 短边 256|8x4| ResNet34|None |67.30|87.65|x|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb_20200729-aa94765e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log)|[json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log.json)| -|[r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py) | 短边 256|8| ResNet34|None |67.3|87.8|x|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb_20200826-ab35a529.pth)|[log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log.json)|[json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log)| -|[r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | 短边 320|8x2| ResNet34|None |68.68|88.36|1.6 (80x3 frames)|5019|[ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8x1_180e_kinetics400_rgb_20200618-3fce5629.pth)| [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log)| [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json)| -|[r2plus1d_r34_32x2x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py) |短边 320|8x2| ResNet34|None |74.60|91.59|0.5 (320x3 frames)|12975| [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2x1_180e_kinetics400_rgb_20200618-63462eb3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log)| [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------ | :----: | :----: | :------: | :--: | :------: | :------: | :----------------: | :----------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | +| [r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | 短边 256 | 8x4 | ResNet34 | None | 67.30 | 87.65 | x | 5019 | [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb_20200729-aa94765e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log) | [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_256p_8x8x1_180e_kinetics400_rgb/20200728_021421.log.json) | +| [r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet34 | None | 67.3 | 87.8 | x | 5019 | [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb_20200826-ab35a529.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log.json) | [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb/20200724_201360.log) | +| [r2plus1d_r34_8x8x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet34 | None | 68.68 | 88.36 | 1.6 (80x3 frames) | 5019 | [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8x1_180e_kinetics400_rgb_20200618-3fce5629.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r21d_8x8.log) | [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinetics400_rgb/r2plus1d_r34_8x8_69.58_88.36.log.json) | +| [r2plus1d_r34_32x2x1_180e_kinetics400_rgb](/configs/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet34 | None | 74.60 | 91.59 | 0.5 (320x3 frames) | 12975 | [ckpt](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2x1_180e_kinetics400_rgb_20200618-63462eb3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r21d_32x2.log) | [json](https://download.openmmlab.com/mmaction/recognition/r2plus1d/r2plus1d_r34_32x2x1_180e_kinetics400_rgb/r2plus1d_r34_32x2_74.6_91.6.log.json) | 注: @@ -52,7 +52,7 @@ python tools/train.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kinet --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -70,4 +70,4 @@ python tools/test.py configs/recognition/r2plus1d/r2plus1d_r34_8x8x1_180e_kineti --out result.json --average-clips=prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/slowfast/README.md b/configs/recognition/slowfast/README.md index 12bb22a5db..35fbc3f319 100644 --- a/configs/recognition/slowfast/README.md +++ b/configs/recognition/slowfast/README.md @@ -11,6 +11,7 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slow pathway, operating at low frame rate, to capture spatial semantics, and (ii) a Fast pathway, operating at high frame rate, to capture motion at fine temporal resolution. The Fast pathway can be made very lightweight by reducing its channel capacity, yet can learn useful temporal information for video recognition. Our models achieve strong performance for both action classification and detection in video, and large improvements are pin-pointed as contributions by our SlowFast concept. We report state-of-the-art accuracy on major video recognition benchmarks, Kinetics, Charades and AVA. +
    @@ -19,25 +20,25 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo ### Kinetics-400 -|config | resolution | gpus | backbone |pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50|None |74.75|91.73|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json)| -|[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8| ResNet50|None |73.95|91.50|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| -|[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| -|[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| -|[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| -|[slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.py) |short-side 320|8x4| ResNet50 |None|76.34|92.61||9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| -|[slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50 |None|76.07|92.21|x|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json)| -|[slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_perbn_r50_8x8x1_256e_kinetics400_rgb_steplr.py) |short-side 320|8x4| ResNet50 |None|76.58|92.85||9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| -|[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| -|[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| -|[slowfast_r152_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py) |short-side 256|8x1| ResNet152 + ResNet50 |None|77.13|93.20||10077| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------------------: | :------: | :------: | :------: | :----------------------: | :--------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet50 | None | 74.75 | 91.73 | x | 6203 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json) | +| [slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | None | 73.95 | 91.50 | x | 6203 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json) | +| [slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | None | 76.0 | 92.54 | 1.6 ((32+4)x10x3 frames) | 6203 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json) | +| [slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | None | 76.34 | 92.67 | x | 6203 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json) | +| [slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) | short-side 320 | 8x3 | ResNet50 | None | 76.94 | 92.8 | 1.3 ((32+8)x10x3 frames) | 9062 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json) | +| [slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.py) | short-side 320 | 8x4 | ResNet50 | None | 76.34 | 92.61 | | 9062 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json) | +| [slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | None | 76.07 | 92.21 | x | 9062 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json) | +| [slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_perbn_r50_8x8x1_256e_kinetics400_rgb_steplr.py) | short-side 320 | 8x4 | ResNet50 | None | 76.58 | 92.85 | | 9062 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json) | +| [slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) | short-side 256 | 8x1 | ResNet101 + ResNet50 | None | 76.69 | 93.07 | | 16628 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json) | +| [slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet101 | None | 77.90 | 93.51 | | 25994 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json) | +| [slowfast_r152_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py) | short-side 256 | 8x1 | ResNet152 + ResNet50 | None | 77.13 | 93.20 | | 10077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json) | ### Something-Something V1 -|config | resolution | gpus | backbone |pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowfast_r50_16x8x1_22e_sthv1_rgb](/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py)|height 100|8|ResNet50|Kinetics400|49.67|79.00|x|9293|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20211202-aaaf9279.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------ | :--------: | :--: | :------: | :---------: | :------: | :------: | :---------------------: | :--------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowfast_r50_16x8x1_22e_sthv1_rgb](/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | Kinetics400 | 49.67 | 79.00 | x | 9293 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20211202-aaaf9279.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.json) | :::{note} diff --git a/configs/recognition/slowfast/README_zh-CN.md b/configs/recognition/slowfast/README_zh-CN.md index 7b662ffe08..b26ba710d5 100644 --- a/configs/recognition/slowfast/README_zh-CN.md +++ b/configs/recognition/slowfast/README_zh-CN.md @@ -18,25 +18,25 @@ ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet50|None |74.75|91.73|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json)| -|[slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) |短边256|8| ResNet50|None |73.95|91.50|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json)| -|[slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.0|92.54|1.6 ((32+4)x10x3 frames)|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json)| -|[slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) |短边320|8x2| ResNet50|None |76.34|92.67|x|6203|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json)| -|[slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x3| ResNet50 |None|76.94|92.8|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json)| -|[slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x4| ResNet50 |None|76.34|92.61|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| -|[slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py) |短边320|8x2| ResNet50 |None|76.07|92.21|x|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json)| -|[slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb.py) |短边320|8x4| ResNet50 |None|76.58|92.85|1.3 ((32+8)x10x3 frames)|9062| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json)| -|[slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet101 + ResNet50 |None|76.69|93.07||16628| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json)| -|[slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) |短边256|8x4| ResNet101 |None|77.90|93.51||25994| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json)| -|[slowfast_r152_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py) |短边256|8x1| ResNet152 + ResNet50 |None|77.13|93.20||10077| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------------------------- | :---: | :----: | :------------------: | :--: | :------: | :------: | :----------------------: | :----------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) | 短边256 | 8x4 | ResNet50 | None | 74.75 | 91.73 | x | 6203 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb_20200728-145f1097.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_256p_4x16x1_256e_kinetics400_rgb/20200731_151706.log.json) | +| [slowfast_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) | 短边256 | 8 | ResNet50 | None | 73.95 | 91.50 | x | 6203 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/slowfast_r50_video_4x16x1_256e_kinetics400_rgb_20200826-f85b90c5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb/20200812_160237.log.json) | +| [slowfast_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb.py) | 短边320 | 8x2 | ResNet50 | None | 76.0 | 92.54 | 1.6 ((32+4)x10x3 frames) | 6203 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_256e_kinetics400_rgb_20210722-04e43ed4.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_4x16x1_256e_kinetics400_rgb/slowfast_r50_4x16x1_20210722.log.json) | +| [slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb.py) | 短边320 | 8x2 | ResNet50 | None | 76.34 | 92.67 | x | 6203 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb_20210722-bb725050.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_4x16x1_256e_kinetics400_rgb/slowfast_prebn_r50_4x16x1_20210722.log.json) | +| [slowfast_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) | 短边320 | 8x3 | ResNet50 | None | 76.94 | 92.8 | 1.3 ((32+8)x10x3 frames) | 9062 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/slowfast_r50_8x8x1_256e_kinetics400_rgb_20200716-73547d2b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb/20200716_192653.log.json) | +| [slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py) | 短边320 | 8x4 | ResNet50 | None | 76.34 | 92.61 | 1.3 ((32+8)x10x3 frames) | 9062 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr-43988bac.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_r50_8x8x1_256e_kinetics400_rgb_steplr.json) | +| [slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.py) | 短边320 | 8x2 | ResNet50 | None | 76.07 | 92.21 | x | 9062 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb-f82bd304.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb/slowfast_multigrid_r50_8x8x1_358e_kinetics400_rgb.json) | +| [slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr](/configs/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb.py) | 短边320 | 8x4 | ResNet50 | None | 76.58 | 92.85 | 1.3 ((32+8)x10x3 frames) | 9062 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr-28474e54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr/slowfast_prebn_r50_8x8x1_256e_kinetics400_rgb_steplr.json) | +| [slowfast_r101_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_r50_4x16x1_256e_kinetics400_rgb.py) | 短边256 | 8x1 | ResNet101 + ResNet50 | None | 76.69 | 93.07 | | 16628 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/slowfast_r101_4x16x1_256e_kinetics400_rgb_20210218-d8b58813.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_4x16x1_256e_kinetics400_rgb/20210118_133528.log.json) | +| [slowfast_r101_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb.py) | 短边256 | 8x4 | ResNet101 | None | 77.90 | 93.51 | | 25994 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/slowfast_r101_8x8x1_256e_kinetics400_rgb_20210218-0dd54025.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r101_8x8x1_256e_kinetics400_rgb/20210218_121513.log.json) | +| [slowfast_r152_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowfast/slowfast_r152_r50_4x16x1_256e_kinetics400_rgb.py) | 短边256 | 8x1 | ResNet152 + ResNet50 | None | 77.13 | 93.20 | | 10077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/slowfast_r152_4x16x1_256e_kinetics400_rgb_20210122-bdeb6b87.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r152_4x16x1_256e_kinetics400_rgb/20210122_131321.log.json) | ### Something-Something V1 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowfast_r50_16x8x1_22e_sthv1_rgb](/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py)|高 100|8|ResNet50|Kinetics400|49.67|79.00|x|9293|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20211202-aaaf9279.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------ | :---: | :----: | :------: | :---------: | :------: | :------: | :------------: | :----------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowfast_r50_16x8x1_22e_sthv1_rgb](/configs/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | Kinetics400 | 49.67 | 79.00 | x | 9293 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb_20211202-aaaf9279.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowfast/slowfast_r50_16x8x1_22e_sthv1_rgb/slowfast_r50_16x8x1_22e_sthv1_rgb.json) | 注: @@ -65,7 +65,7 @@ python tools/train.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kine --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -83,4 +83,4 @@ python tools/test.py configs/recognition/slowfast/slowfast_r50_4x16x1_256e_kinet --out result.json --average-clips=prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/slowonly/README.md b/configs/recognition/slowonly/README.md index 5697f29322..6e18ffc110 100644 --- a/configs/recognition/slowonly/README.md +++ b/configs/recognition/slowonly/README.md @@ -11,6 +11,7 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slow pathway, operating at low frame rate, to capture spatial semantics, and (ii) a Fast pathway, operating at high frame rate, to capture motion at fine temporal resolution. The Fast pathway can be made very lightweight by reducing its channel capacity, yet can learn useful temporal information for video recognition. Our models achieve strong performance for both action classification and detection in video, and large improvements are pin-pointed as contributions by our SlowFast concept. We report state-of-the-art accuracy on major video recognition benchmarks, Kinetics, Charades and AVA. +
    @@ -19,84 +20,84 @@ We present SlowFast networks for video recognition. Our model involves (i) a Slo ### Kinetics-400 -|config | resolution | gpus | backbone |pretrain| top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py)|short-side 256|8x4| ResNet50 | None |72.76|90.51|x|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json)| -|[slowonly_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|short-side 320|8x2| ResNet50 | None |72.90|90.82|x|8472|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json)| -|[slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50 | None |74.42|91.49|x|5820|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb_20200820-75851a7d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json)| -|[slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py)|short-side 320|8x2| ResNet50 | None |73.02|90.77|4.0 (40x3 frames)|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json)| -|[slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50 | None |74.93|91.92|2.3 (80x3 frames)|5820| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8x1_256e_kinetics400_rgb_20200703-a79c555a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json)| -|[slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py)|short-side 320|8x2| ResNet50 | ImageNet |73.39|91.12|x|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912-1e8fc736.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json)| -|[slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py) |short-side 320|8x4| ResNet50 | ImageNet |75.55|92.04|x|5820|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912-3f9ce182.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json)| -|[slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | ImageNet | 74.54 | 91.73 | x | 4435 |[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb_20210308-0d6e5a69.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json)| -|[slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py) | short-side 320 | 8x4 | ResNet50 | ImageNet | 76.07 | 92.42 | x | 8895 |[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb_20210308-e8dd9e82.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json)| -|[slowonly_r50_4x16x1_256e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py)|short-side 320|8x2| ResNet50 | ImageNet |61.79|83.62|x|8450| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_20200704-decb8568.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json)| -|[slowonly_r50_8x8x1_196e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py) |short-side 320|8x4| ResNet50 | ImageNet |65.76|86.25|x|8455| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_256e_kinetics400_flow_20200704-6b384243.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet50 | None | 72.76 | 90.51 | x | 3168 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json) | +| [slowonly_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | None | 72.90 | 90.82 | x | 8472 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json) | +| [slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet50 | None | 74.42 | 91.49 | x | 5820 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb_20200820-75851a7d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json) | +| [slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | None | 73.02 | 90.77 | 4.0 (40x3 frames) | 3168 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | +| [slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) | short-side 320 | 8x3 | ResNet50 | None | 74.93 | 91.92 | 2.3 (80x3 frames) | 5820 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8x1_256e_kinetics400_rgb_20200703-a79c555a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json) | +| [slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | ImageNet | 73.39 | 91.12 | x | 3168 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912-1e8fc736.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json) | +| [slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py) | short-side 320 | 8x4 | ResNet50 | ImageNet | 75.55 | 92.04 | x | 5820 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912-3f9ce182.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json) | +| [slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | ImageNet | 74.54 | 91.73 | x | 4435 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb_20210308-0d6e5a69.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json) | +| [slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py) | short-side 320 | 8x4 | ResNet50 | ImageNet | 76.07 | 92.42 | x | 8895 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb_20210308-e8dd9e82.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json) | +| [slowonly_r50_4x16x1_256e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py) | short-side 320 | 8x2 | ResNet50 | ImageNet | 61.79 | 83.62 | x | 8450 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_20200704-decb8568.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json) | +| [slowonly_r50_8x8x1_196e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py) | short-side 320 | 8x4 | ResNet50 | ImageNet | 65.76 | 86.25 | x | 8455 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_256e_kinetics400_flow_20200704-6b384243.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json) | ### Kinetics-400 Data Benchmark In data benchmark, we compare two different data preprocessing methods: (1) Resize video to 340x256, (2) Resize the short edge of video to 320px, (3) Resize the short edge of video to 256px. -| config | resolution | gpus | backbone | Input | pretrain | top1 acc | top5 acc | testing protocol | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :---: | :------: | :------: | :------: | :----------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| config | resolution | gpus | backbone | Input | pretrain | top1 acc | top5 acc | testing protocol | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------: | :---: | :------: | :------: | :------: | :----------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | | [slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py) | 340x256 | 8x2 | ResNet50 | 4x16 | None | 71.61 | 90.05 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803-dadca1a3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803.json) | -| [slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | 4x16 | None | 73.02 | 90.77 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | -| [slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet50 | 4x16 | None | 72.76 | 90.51 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json) | +| [slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | 4x16 | None | 73.02 | 90.77 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | +| [slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet50 | 4x16 | None | 72.76 | 90.51 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json) | ### Kinetics-400 OmniSource Experiments -| config | resolution | backbone | pretrain | w. OmniSource | top1 acc | top5 acc | ckpt | log | json | -| :----------------------------------------------------------: | :------------: | :-------: | :------: | :----------------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| config | resolution | backbone | pretrain | w. OmniSource | top1 acc | top5 acc | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------: | :------------: | :-------: | :------: | :----------------: | :------: | :------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------: | | [slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py) | short-side 320 | ResNet50 | None | :x: | 73.0 | 90.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | -| x | x | ResNet50 | None | :heavy_check_mark: | 76.8 | 92.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | x | x | -| [slowonly_r101_8x8x1_196e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py) | x | ResNet101 | None | :x: | 76.5 | 92.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) | x | x | -| x | x | ResNet101 | None | :heavy_check_mark: | 80.4 | 94.4 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | x | x | +| x | x | ResNet50 | None | :heavy_check_mark: | 76.8 | 92.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | x | x | +| [slowonly_r101_8x8x1_196e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py) | x | ResNet101 | None | :x: | 76.5 | 92.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) | x | x | +| x | x | ResNet101 | None | :heavy_check_mark: | 80.4 | 94.4 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | x | x | ### Kinetics-600 -| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------ | :------------: | :--: | :------: | :------: | :------: | :------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | | [slowonly_r50_video_8x8x1_256e_kinetics600_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py) | short-side 256 | 8x4 | ResNet50 | None | 77.5 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015-81e5153e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.json) | ### Kinetics-700 -| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------ | :------------: | :--: | :------: | :------: | :------: | :------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | | [slowonly_r50_video_8x8x1_256e_kinetics700_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py) | short-side 256 | 8x4 | ResNet50 | None | 65.0 | 86.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015-9250f662.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.json) | ### GYM99 -| config | resolution | gpus | backbone | pretrain | top1 acc | mean class acc | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py) | short-side 256 | 8x2 | ResNet50 | ImageNet | 79.3 | 70.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111-a9c34b54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json) | -| [slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py) | short-side 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | -| 1: 1 Fusion | | | | | 83.7 | 74.8 | | | | +| config | resolution | gpus | backbone | pretrain | top1 acc | mean class acc | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------------------------ | :------------: | :--: | :------: | :------: | :------: | :------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py) | short-side 256 | 8x2 | ResNet50 | ImageNet | 79.3 | 70.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111-a9c34b54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json) | +| [slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py) | short-side 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | +| 1: 1 Fusion | | | | | 83.7 | 74.8 | | | | ### Jester -| config | resolution | gpus | backbone | pretrain | top1 acc | ckpt | log | json | -| :----------------------------------------------------------- | :--------: | :--: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| config | resolution | gpus | backbone | pretrain | top1 acc | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | | [slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb-b56a5389.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.json) | ### HMDB51 -|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py)|8|ResNet50|ImageNet|37.52|71.50|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb_20210630-16faeb6a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log.json)| -|[slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py)|8|ResNet50|Kinetics400|65.95|91.05|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb_20210630-cee5f725.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log.json)| +| config | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------- | :--: | :------: | :---------: | :------: | :------: | :--------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py) | 8 | ResNet50 | ImageNet | 37.52 | 71.50 | 5812 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb_20210630-16faeb6a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log.json) | +| [slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py) | 8 | ResNet50 | Kinetics400 | 65.95 | 91.05 | 5812 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb_20210630-cee5f725.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log.json) | ### UCF101 -|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py)|8|ResNet50|ImageNet|71.35|89.35|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb_20210630-181e1661.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log.json)| -|[slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py)|8|ResNet50|Kinetics400|92.78|99.42|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb_20210630-ee8c850f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json)| +| config | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------- | :--: | :------: | :---------: | :------: | :------: | :--------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py) | 8 | ResNet50 | ImageNet | 71.35 | 89.35 | 5812 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb_20210630-181e1661.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log.json) | +| [slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py) | 8 | ResNet50 | Kinetics400 | 92.78 | 99.42 | 5812 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb_20210630-ee8c850f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json) | ### Something-Something V1 -|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|47.76|77.49|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20211202-d034ff12.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.json)| +| config | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------------------------------- | :--: | :------: | :------: | :------: | :------: | :--------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py) | 8 | ResNet50 | ImageNet | 47.76 | 77.49 | 7759 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20211202-d034ff12.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.json) | :::{note} diff --git a/configs/recognition/slowonly/README_zh-CN.md b/configs/recognition/slowonly/README_zh-CN.md index 917be85500..2deb7a4fa1 100644 --- a/configs/recognition/slowonly/README_zh-CN.md +++ b/configs/recognition/slowonly/README_zh-CN.md @@ -18,84 +18,84 @@ ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 |预训练| top1 准确率| top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py)|短边 256|8x4| ResNet50 | None |72.76|90.51|x|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json)| -|[slowonly_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|短边 320|8x2| ResNet50 | None |72.90|90.82|x|8472|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json)| -|[slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) |短边 256|8x4| ResNet50 | None |74.42|91.49|x|5820|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb_20200820-75851a7d.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json)| -|[slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py)|短边 320|8x2| ResNet50 | None |73.02|90.77|4.0 (40x3 frames)|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth)| [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json)| -|[slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) |短边 320|8x3| ResNet50 | None |74.93|91.92|2.3 (80x3 frames)|5820| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8x1_256e_kinetics400_rgb_20200703-a79c555a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log)| [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json)| -|[slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py)|短边 320|8x2| ResNet50 | ImageNet |73.39|91.12|x|3168|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912-1e8fc736.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json)| -|[slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py) |短边 320|8x4| ResNet50 | ImageNet |75.55|92.04|x|5820|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912-3f9ce182.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json)| -|[slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet50 | ImageNet | 74.54 | 91.73 | x | 4435 |[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb_20210308-0d6e5a69.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json)| -|[slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py) | 短边 320 | 8x4 | ResNet50 | ImageNet | 76.07 | 92.42 | x | 8895 |[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb_20210308-e8dd9e82.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json)| -|[slowonly_r50_4x16x1_256e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py)|短边 320|8x2| ResNet50 | ImageNet |61.79|83.62|x|8450| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_20200704-decb8568.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json)| -|[slowonly_r50_8x8x1_196e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py) |短边 320|8x4| ResNet50 | ImageNet |65.76|86.25|x|8455| [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_256e_kinetics400_flow_20200704-6b384243.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------------------------------------------------- | :----: | :----: | :------: | :------: | :------: | :------: | :---------------: | :----------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py) | 短边 256 | 8x4 | ResNet50 | None | 72.76 | 90.51 | x | 3168 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json) | +| [slowonly_r50_video_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet50 | None | 72.90 | 90.82 | x | 8472 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014-c9cdc656.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb/slowonly_r50_video_320p_4x16x1_256e_kinetics400_rgb_20201014.json) | +| [slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) | 短边 256 | 8x4 | ResNet50 | None | 74.42 | 91.49 | x | 5820 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb_20200820-75851a7d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_8x8x1_256e_kinetics400_rgb/20200817_003320.log.json) | +| [slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet50 | None | 73.02 | 90.77 | 4.0 (40x3 frames) | 3168 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | +| [slowonly_r50_8x8x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb.py) | 短边 320 | 8x3 | ResNet50 | None | 74.93 | 91.92 | 2.3 (80x3 frames) | 5820 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8x1_256e_kinetics400_rgb_20200703-a79c555a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/so_8x8.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_rgb/slowonly_r50_8x8_74.93_91.92.log.json) | +| [slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet50 | ImageNet | 73.39 | 91.12 | x | 3168 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912-1e8fc736.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_4x16x1_150e_kinetics400_rgb_20200912.json) | +| [slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb.py) | 短边 320 | 8x4 | ResNet50 | ImageNet | 75.55 | 92.04 | x | 5820 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912-3f9ce182.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb/slowonly_imagenet_pretrained_r50_8x8x1_150e_kinetics400_rgb_20200912.json) | +| [slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet50 | ImageNet | 74.54 | 91.73 | x | 4435 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb_20210308-0d6e5a69.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb/20210305_152630.log.json) | +| [slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb.py) | 短边 320 | 8x4 | ResNet50 | ImageNet | 76.07 | 92.42 | x | 8895 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb_20210308-e8dd9e82.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb/20210308_212250.log.json) | +| [slowonly_r50_4x16x1_256e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow.py) | 短边 320 | 8x2 | ResNet50 | ImageNet | 61.79 | 83.62 | x | 8450 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_20200704-decb8568.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_flow/slowonly_r50_4x16x1_256e_kinetics400_flow_61.8_83.6.log.json) | +| [slowonly_r50_8x8x1_196e_kinetics400_flow](/configs/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow.py) | 短边 320 | 8x4 | ResNet50 | ImageNet | 65.76 | 86.25 | x | 8455 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_256e_kinetics400_flow_20200704-6b384243.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_8x8x1_256e_kinetics400_flow/slowonly_r50_8x8x1_196e_kinetics400_flow_65.8_86.3.log.json) | ### Kinetics-400 数据基准测试 在数据基准测试中,比较两种不同的数据预处理方法 (1) 视频分辨率为 340x256, (2) 视频分辨率为短边 320px, (3) 视频分辨率为短边 256px. -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 输入 | 预训练 | top1 准确率 | top5 准确率 | 测试方案 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :---: | :------: | :------: | :------: | :----------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py) | 340x256 | 8x2 | ResNet50 | 4x16 | None | 71.61 | 90.05 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803-dadca1a3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803.json) | -| [slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet50 | 4x16 | None | 73.02 | 90.77 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | -| [slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py) | 短边 256 | 8x4 | ResNet50 | 4x16 | None | 72.76 | 90.51 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 输入 | 预训练 | top1 准确率 | top5 准确率 | 测试方案 | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :-----: | :----: | :------: | :--: | :--: | :------: | :------: | :----------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb.py) | 340x256 | 8x2 | ResNet50 | 4x16 | None | 71.61 | 90.05 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803-dadca1a3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb/slowonly_r50_randomresizedcrop_340x256_4x16x1_256e_kinetics400_rgb_20200803.json) | +| [slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_320p_4x16x1_256e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet50 | 4x16 | None | 73.02 | 90.77 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | +| [slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/data_benchmark/slowonly_r50_randomresizedcrop_256p_4x16x1_256e_kinetics400_rgb.py) | 短边 256 | 8x4 | ResNet50 | 4x16 | None | 72.76 | 90.51 | 10 clips x 3 crops | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb_20200820-bea7701f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_256p_4x16x1_256e_kinetics400_rgb/20200817_001411.log.json) | ### Kinetics-400 OmniSource Experiments -| 配置文件 | 分辨率 | 主干网络 | 预训练 | w. OmniSource | top1 准确率 | top5 准确率 | ckpt | log | json | -| :----------------------------------------------------------: | :------------: | :-------: | :------: | :----------------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py) | 短边 320 | ResNet50 | None | :x: | 73.0 | 90.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | -| x | x | ResNet50 | None | :heavy_check_mark: | 76.8 | 92.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | x | x | -| [slowonly_r101_8x8x1_196e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py) | x | ResNet101 | None | :x: | 76.5 | 92.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) | x | x | -| x | x | ResNet101 | None | :heavy_check_mark: | 80.4 | 94.4 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | x | x | +| 配置文件 | 分辨率 | 主干网络 | 预训练 | w. OmniSource | top1 准确率 | top5 准确率 | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------: | :----: | :-------: | :--: | :----------------: | :------: | :------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_4x16x1_256e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb.py) | 短边 320 | ResNet50 | None | :x: | 73.0 | 90.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/so_4x16.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_4x16x1_256e_kinetics400_rgb/slowonly_r50_4x16_73.02_90.77.log.json) | +| x | x | ResNet50 | None | :heavy_check_mark: | 76.8 | 92.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r50_omni_4x16x1_kinetics400_rgb_20200926-51b1f7ea.pth) | x | x | +| [slowonly_r101_8x8x1_196e_kinetics400_rgb](/configs/recognition/slowonly/slowonly_r101_8x8x1_196e_kinetics400_rgb.py) | x | ResNet101 | None | :x: | 76.5 | 92.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_without_omni_8x8x1_kinetics400_rgb_20200926-0c730aef.pth) | x | x | +| x | x | ResNet101 | None | :heavy_check_mark: | 80.4 | 94.4 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/omni/slowonly_r101_omni_8x8x1_kinetics400_rgb_20200926-b5dbb701.pth) | x | x | ### Kinetics-600 -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_video_8x8x1_256e_kinetics600_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py) | 短边 256 | 8x4 | ResNet50 | None | 77.5 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015-81e5153e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------ | :----: | :----: | :------: | :--: | :------: | :------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_video_8x8x1_256e_kinetics600_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb.py) | 短边 256 | 8x4 | ResNet50 | None | 77.5 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015-81e5153e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics600_rgb/slowonly_r50_video_8x8x1_256e_kinetics600_rgb_20201015.json) | ### Kinetics-700 -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_r50_video_8x8x1_256e_kinetics700_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py) | 短边 256 | 8x4 | ResNet50 | None | 65.0 | 86.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015-9250f662.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------ | :----: | :----: | :------: | :--: | :------: | :------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_video_8x8x1_256e_kinetics700_rgb](/configs/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb.py) | 短边 256 | 8x4 | ResNet50 | None | 65.0 | 86.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015-9250f662.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_r50_video_8x8x1_256e_kinetics700_rgb/slowonly_r50_video_8x8x1_256e_kinetics700_rgb_20201015.json) | ### GYM99 -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | 类别平均准确率 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 79.3 | 70.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111-a9c34b54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json) | -| [slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py) | 短边 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | -| 1: 1 融合 | | | | | 83.7 | 74.8 | | | | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | 类别平均准确率 | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------------------------ | :----: | :----: | :------: | :------: | :------: | :-----: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 79.3 | 70.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111-a9c34b54.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb/slowonly_imagenet_pretrained_r50_4x16x1_120e_gym99_rgb_20201111.json) | +| [slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_4x16x1_120e_gym99_flow.py) | 短边 256 | 8x2 | ResNet50 | Kinetics | 80.3 | 71.0 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111-66ecdb3c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow/slowonly_kinetics_pretrained_r50_4x16x1_120e_gym99_flow_20201111.json) | +| 1: 1 融合 | | | | | 83.7 | 74.8 | | | | ### Jester -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | ckpt | log | json | -| :----------------------------------------------------------- | :----: | :------: | :------: | :------: | :---------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb-b56a5389.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------- | :---: | :----: | :------: | :------: | :------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb-b56a5389.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb/slowonly_imagenet_pretrained_r50_8x8x1_64e_jester_rgb.json) | ### HMDB51 -|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py)|8|ResNet50|ImageNet|37.52|71.50|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb_20210630-16faeb6a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log.json)| -|[slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py)|8|ResNet50|Kinetics400|65.95|91.05|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb_20210630-cee5f725.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log.json)| +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------- | :----: | :------: | :---------: | :------: | :------: | :----------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb.py) | 8 | ResNet50 | ImageNet | 37.52 | 71.50 | 5812 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb_20210630-16faeb6a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_hmdb51_rgb/20210605_185256.log.json) | +| [slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb.py) | 8 | ResNet50 | Kinetics400 | 65.95 | 91.05 | 5812 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb_20210630-cee5f725.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_hmdb51_rgb/20210606_010153.log.json) | ### UCF101 -|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py)|8|ResNet50|ImageNet|71.35|89.35|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb_20210630-181e1661.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log.json)| -|[slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py)|8|ResNet50|Kinetics400|92.78|99.42|5812|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb_20210630-ee8c850f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json)| +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------- | :----: | :------: | :---------: | :------: | :------: | :----------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb.py) | 8 | ResNet50 | ImageNet | 71.35 | 89.35 | 5812 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb_20210630-181e1661.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_ucf101_rgb/20210605_213503.log.json) | +| [slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb](/configs/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb.py) | 8 | ResNet50 | Kinetics400 | 92.78 | 99.42 | 5812 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb_20210630-ee8c850f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_k400_pretrained_r50_8x4x1_40e_ucf101_rgb/20210606_010231.log.json) | ### Something-Something V1 -|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py)|8|ResNet50|ImageNet|47.76|77.49|7759|[ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20211202-d034ff12.pth)|[log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.json)| +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------------------------------- | :----: | :------: | :------: | :------: | :------: | :----------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb](/configs/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.py) | 8 | ResNet50 | ImageNet | 47.76 | 77.49 | 7759 | [ckpt](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb_20211202-d034ff12.pth) | [log](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/slowonly/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb/slowonly_imagenet_pretrained_r50_8x4x1_64e_sthv1_rgb.json) | 注: @@ -124,7 +124,7 @@ python tools/train.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kine --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -142,4 +142,4 @@ python tools/test.py configs/recognition/slowonly/slowonly_r50_4x16x1_256e_kinet --out result.json --average-clips=prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tanet/README.md b/configs/recognition/tanet/README.md index 56a010a100..25d224ea52 100644 --- a/configs/recognition/tanet/README.md +++ b/configs/recognition/tanet/README.md @@ -8,9 +8,10 @@ -Video data is with complex temporal dynamics due to various factors such as camera motion, speed variation, and different activities. To effectively capture this diverse motion pattern, this paper presents a new temporal adaptive module ({\bf TAM}) to generate video-specific temporal kernels based on its own feature map. TAM proposes a unique two-level adaptive modeling scheme by decoupling the dynamic kernel into a location sensitive importance map and a location invariant aggregation weight. The importance map is learned in a local temporal window to capture short-term information, while the aggregation weight is generated from a global view with a focus on long-term structure. TAM is a modular block and could be integrated into 2D CNNs to yield a powerful video architecture (TANet) with a very small extra computational cost. The extensive experiments on Kinetics-400 and Something-Something datasets demonstrate that our TAM outperforms other temporal modeling methods consistently, and achieves the state-of-the-art performance under the similar complexity. +Video data is with complex temporal dynamics due to various factors such as camera motion, speed variation, and different activities. To effectively capture this diverse motion pattern, this paper presents a new temporal adaptive module ({\\bf TAM}) to generate video-specific temporal kernels based on its own feature map. TAM proposes a unique two-level adaptive modeling scheme by decoupling the dynamic kernel into a location sensitive importance map and a location invariant aggregation weight. The importance map is learned in a local temporal window to capture short-term information, while the aggregation weight is generated from a global view with a focus on long-term structure. TAM is a modular block and could be integrated into 2D CNNs to yield a powerful video architecture (TANet) with a very small extra computational cost. The extensive experiments on Kinetics-400 and Something-Something datasets demonstrate that our TAM outperforms other temporal modeling methods consistently, and achieves the state-of-the-art performance under the similar complexity. +
    @@ -19,16 +20,16 @@ Video data is with complex temporal dynamics due to various factors such as came ### Kinetics-400 -|config | resolution | gpus | backbone | pretrain | top1 acc| top5 acc | reference top1 acc | reference top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tanet_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 320|8| TANet | ImageNet |76.28 | 92.60 |[76.22](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh)|[92.53](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh) | x | 7124 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log)| [json](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | reference top1 acc | reference top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :----------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------: | :---------------------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tanet_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py) | short-side 320 | 8 | TANet | ImageNet | 76.28 | 92.60 | [76.22](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh) | [92.53](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh) | x | 7124 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log) | [json](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json) | ### Something-Something V1 -|config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|47.34/49.58|75.72/77.31|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json)| -|[tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8|TANet|ImageNet|49.05/50.91|77.90/79.13|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20211202-370c2128.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc (efficient/accurate) | top5 acc (efficient/accurate) | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :---------------------------: | :---------------------------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------: | +| [tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | TANet | ImageNet | 47.34/49.58 | 75.72/77.31 | 7127 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log) | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json) | +| [tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py) | height 100 | 8 | TANet | ImageNet | 49.05/50.91 | 77.90/79.13 | 7127 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20211202-370c2128.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.log) | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.json) | :::{note} diff --git a/configs/recognition/tanet/README_zh-CN.md b/configs/recognition/tanet/README_zh-CN.md index 4902cf8430..144aa93152 100644 --- a/configs/recognition/tanet/README_zh-CN.md +++ b/configs/recognition/tanet/README_zh-CN.md @@ -17,16 +17,16 @@ ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tanet_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py) |短边 320|8| TANet | ImageNet |76.28 | 92.60 |[76.22](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh)|[92.53](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh) | x | 7124 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log)| [json](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------- | :----: | :----: | :---: | :------: | :------: | :------: | :----------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------: | :------------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tanet_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb.py) | 短边 320 | 8 | TANet | ImageNet | 76.28 | 92.60 | [76.22](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh) | [92.53](https://github.com/liu-zhy/temporal-adaptive-module/blob/master/scripts/test_tam_kinetics_rgb_8f.sh) | x | 7124 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.log) | [json](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219.json) | ### Something-Something V1 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py)|高 100|8|TANet|ImageNet|47.34/49.58|75.72/77.31|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json)| -|[tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py)|高 100|8|TANet|ImageNet|49.05/50.91|77.90/79.13|7127|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20211202-370c2128.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.log)|[ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------- | :---: | :----: | :---: | :------: | :---------------------------: | :---------------------------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------: | +| [tanet_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | TANet | ImageNet | 47.34/49.58 | 75.72/77.31 | 7127 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/tanet_r50_1x1x8_50e_sthv1_rgb_20210630-f4a48609.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log) | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x8_50e_sthv1_rgb/20210606_205006.log.json) | +| [tanet_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb.py) | 高 100 | 8 | TANet | ImageNet | 49.05/50.91 | 77.90/79.13 | 7127 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb_20211202-370c2128.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.log) | [ckpt](https://download.openmmlab.com/mmaction/recognition/tanet/tanet_r50_1x1x16_50e_sthv1_rgb/tanet_r50_1x1x16_50e_sthv1_rgb.json) | 注: @@ -56,7 +56,7 @@ python tools/train.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kinet --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -74,4 +74,4 @@ python tools/test.py configs/recognition/tanet/tanet_r50_dense_1x1x8_100e_kineti --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/timesformer/README.md b/configs/recognition/timesformer/README.md index 43d9134b17..71168eef5b 100644 --- a/configs/recognition/timesformer/README.md +++ b/configs/recognition/timesformer/README.md @@ -11,6 +11,7 @@ We present a convolution-free approach to video classification built exclusively on self-attention over space and time. Our method, named "TimeSformer," adapts the standard Transformer architecture to video by enabling spatiotemporal feature learning directly from a sequence of frame-level patches. Our experimental study compares different self-attention schemes and suggests that "divided attention," where temporal attention and spatial attention are separately applied within each block, leads to the best video classification accuracy among the design choices considered. Despite the radically new design, TimeSformer achieves state-of-the-art results on several action recognition benchmarks, including the best reported accuracy on Kinetics-400 and Kinetics-600. Finally, compared to 3D convolutional networks, our model is faster to train, it can achieve dramatically higher test efficiency (at a small drop in accuracy), and it can also be applied to much longer video clips (over one minute long). +
    @@ -19,11 +20,11 @@ We present a convolution-free approach to video classification built exclusively ### Kinetics-400 -|config | resolution | gpus | backbone | pretrain | top1 acc| top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[timesformer_divST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 77.92 | 93.29 | x | 17874 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb-3f8e5d03.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.json)| -|[timesformer_jointST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 77.01 | 93.08 | x | 25658 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb-0d6e3984.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.json)| -|[timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 76.93 | 92.90 | x | 12750 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb-0cf829cd.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :---------: | :----------: | :------: | :------: | :---------------------: | :--------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [timesformer_divST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 77.92 | 93.29 | x | 17874 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb-3f8e5d03.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.json) | +| [timesformer_jointST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 77.01 | 93.08 | x | 25658 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb-0d6e3984.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.json) | +| [timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb.py) | short-side 320 | 8 | TimeSformer | ImageNet-21K | 76.93 | 92.90 | x | 12750 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb-0cf829cd.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.json) | :::{note} diff --git a/configs/recognition/timesformer/README_zh-CN.md b/configs/recognition/timesformer/README_zh-CN.md index c844917e01..b54a909213 100644 --- a/configs/recognition/timesformer/README_zh-CN.md +++ b/configs/recognition/timesformer/README_zh-CN.md @@ -19,11 +19,11 @@ ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[timesformer_divST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py) | 短边 320 | 8 | TimeSformer | ImageNet-21K | 77.92 | 93.29 | x | 17874 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb-3f8e5d03.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.json)| -|[timesformer_jointST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py) | 短边 320 | 8 | TimeSformer | ImageNet-21K | 77.01 | 93.08 | x | 25658 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb-0d6e3984.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.json)| -|[timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb.py) | 短边 320 | 8 | TimeSformer | ImageNet-21K | 76.93 | 92.90 | x | 12750 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb-0cf829cd.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------------------- | :----: | :----: | :---------: | :----------: | :------: | :------: | :------------: | :----------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [timesformer_divST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb.py) | 短边 320 | 8 | TimeSformer | ImageNet-21K | 77.92 | 93.29 | x | 17874 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb-3f8e5d03.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_divST_8x32x1_15e_kinetics400_rgb/timesformer_divST_8x32x1_15e_kinetics400_rgb.json) | +| [timesformer_jointST_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb.py) | 短边 320 | 8 | TimeSformer | ImageNet-21K | 77.01 | 93.08 | x | 25658 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb-0d6e3984.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_jointST_8x32x1_15e_kinetics400_rgb/timesformer_jointST_8x32x1_15e_kinetics400_rgb.json) | +| [timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb](/configs/recognition/timesformer/timesformer_sapceOnly_8x32x1_15e_kinetics400_rgb.py) | 短边 320 | 8 | TimeSformer | ImageNet-21K | 76.93 | 92.90 | x | 12750 | [ckpt](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb-0cf829cd.pth) | [log](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/timesformer/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb/timesformer_spaceOnly_8x32x1_15e_kinetics400_rgb.json) | 注: @@ -51,7 +51,7 @@ python tools/train.py configs/recognition/timesformer/timesformer_divST_8x32x1_1 --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -69,4 +69,4 @@ python tools/test.py configs/recognition/timesformer/timesformer_divST_8x32x1_15 --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tin/README.md b/configs/recognition/tin/README.md index 69449d71f4..72aa519033 100644 --- a/configs/recognition/tin/README.md +++ b/configs/recognition/tin/README.md @@ -11,6 +11,7 @@ For a long time, the vision community tries to learn the spatio-temporal representation by combining convolutional neural network together with various temporal models, such as the families of Markov chain, optical flow, RNN and temporal convolution. However, these pipelines consume enormous computing resources due to the alternately learning process for spatial and temporal information. One natural question is whether we can embed the temporal information into the spatial one so the information in the two domains can be jointly learned once-only. In this work, we answer this question by presenting a simple yet powerful operator -- temporal interlacing network (TIN). Instead of learning the temporal features, TIN fuses the two kinds of information by interlacing spatial representations from the past to the future, and vice versa. A differentiable interlacing target can be learned to control the interlacing process. In this way, a heavy temporal model is replaced by a simple interlacing operator. We theoretically prove that with a learnable interlacing target, TIN performs equivalently to the regularized temporal convolution network (r-TCN), but gains 4% more accuracy with 6x less latency on 6 challenging benchmarks. These results push the state-of-the-art performances of video understanding by a considerable margin. Not surprising, the ensemble model of the proposed TIN won the 1st place in the ICCV19 - Multi Moments in Time challenge. +
    @@ -19,21 +20,21 @@ For a long time, the vision community tries to learn the spatio-temporal represe ### Something-Something V1 -|config | resolution | gpus | backbone| pretrain | top1 acc| top5 acc | reference top1 acc | reference top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tin_r50_1x1x8_40e_sthv1_rgb](/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py)|height 100|8x4| ResNet50 | ImageNet | 44.25 | 73.94 | 44.04 | 72.72 | 6181 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/tin_r50_1x1x8_40e_sthv1_rgb_20200729-4a33db86.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log.json) | +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | reference top1 acc | reference top5 acc | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :------: | :------: | :----------------: | :----------------: | :--------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | +| [tin_r50_1x1x8_40e_sthv1_rgb](/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py) | height 100 | 8x4 | ResNet50 | ImageNet | 44.25 | 73.94 | 44.04 | 72.72 | 6181 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/tin_r50_1x1x8_40e_sthv1_rgb_20200729-4a33db86.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log.json) | ### Something-Something V2 -|config | resolution | gpus | backbone| pretrain | top1 acc| top5 acc | reference top1 acc | reference top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tin_r50_1x1x8_40e_sthv2_rgb](/configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py)|height 240|8x4| ResNet50 | ImageNet | 56.70 | 83.62 | 56.48 | 83.45 | 6185 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/tin_r50_1x1x8_40e_sthv2_rgb_20200912-b27a7337.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log.json) | +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | reference top1 acc | reference top5 acc | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :------: | :------: | :----------------: | :----------------: | :--------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | +| [tin_r50_1x1x8_40e_sthv2_rgb](/configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py) | height 240 | 8x4 | ResNet50 | ImageNet | 56.70 | 83.62 | 56.48 | 83.45 | 6185 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/tin_r50_1x1x8_40e_sthv2_rgb_20200912-b27a7337.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log.json) | ### Kinetics-400 -|config | resolution | gpus | backbone| pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 256|8x4| ResNet50 | TSM-Kinetics400 | 70.89 | 89.89 | 6187 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb_20200810-4a146a70.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json) | +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------: | :-------------: | :------: | :------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------: | +| [tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet50 | TSM-Kinetics400 | 70.89 | 89.89 | 6187 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb_20200810-4a146a70.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json) | Here, we use `finetune` to indicate that we use [TSM model](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) trained on Kinetics-400 to finetune the TIN model on Kinetics-400. diff --git a/configs/recognition/tin/README_zh-CN.md b/configs/recognition/tin/README_zh-CN.md index 2747fa6c94..ef980a0a09 100644 --- a/configs/recognition/tin/README_zh-CN.md +++ b/configs/recognition/tin/README_zh-CN.md @@ -17,21 +17,21 @@ ### Something-Something V1 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tin_r50_1x1x8_40e_sthv1_rgb](/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py)|高 100|8x4| ResNet50 | ImageNet | 44.25 | 73.94 | 44.04 | 72.72 | 6181 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/tin_r50_1x1x8_40e_sthv1_rgb_20200729-4a33db86.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------- | :---: | :----: | :------: | :------: | :------: | :------: | :------------: | :------------: | :----------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | +| [tin_r50_1x1x8_40e_sthv1_rgb](/configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py) | 高 100 | 8x4 | ResNet50 | ImageNet | 44.25 | 73.94 | 44.04 | 72.72 | 6181 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/tin_r50_1x1x8_40e_sthv1_rgb_20200729-4a33db86.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb/20200729_034132.log.json) | ### Something-Something V2 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tin_r50_1x1x8_40e_sthv2_rgb](/configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py)|高 240|8x4| ResNet50 | ImageNet | 56.70 | 83.62 | 56.48 | 83.45 | 6185 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/tin_r50_1x1x8_40e_sthv2_rgb_20200912-b27a7337.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------- | :---: | :----: | :------: | :------: | :------: | :------: | :------------: | :------------: | :----------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | +| [tin_r50_1x1x8_40e_sthv2_rgb](/configs/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb.py) | 高 240 | 8x4 | ResNet50 | ImageNet | 56.70 | 83.62 | 56.48 | 83.45 | 6185 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/tin_r50_1x1x8_40e_sthv2_rgb_20200912-b27a7337.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_r50_1x1x8_40e_sthv2_rgb/20200912_225451.log.json) | ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py)|短边 256|8x4| ResNet50 | TSM-Kinetics400 | 70.89 | 89.89 | 6187 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb_20200810-4a146a70.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------- | :----: | :----: | :------: | :-------------: | :------: | :------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------: | +| [tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb.py) | 短边 256 | 8x4 | ResNet50 | TSM-Kinetics400 | 70.89 | 89.89 | 6187 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb_20200810-4a146a70.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log) | [json](https://download.openmmlab.com/mmaction/recognition/tin/tin_tsm_finetune_r50_1x1x8_50e_kinetics400_rgb/20200809_142447.log.json) | 这里,MMAction2 使用 `finetune` 一词表示 TIN 模型使用 Kinetics400 上的 [TSM 模型](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) 进行微调。 @@ -64,7 +64,7 @@ python tools/train.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -82,4 +82,4 @@ python tools/test.py configs/recognition/tin/tin_r50_1x1x8_40e_sthv1_rgb.py \ --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tpn/README.md b/configs/recognition/tpn/README.md index 1e95e1fe1e..dbb0d42e0f 100644 --- a/configs/recognition/tpn/README.md +++ b/configs/recognition/tpn/README.md @@ -11,6 +11,7 @@ Visual tempo characterizes the dynamics and the temporal scale of an action. Modeling such visual tempos of different actions facilitates their recognition. Previous works often capture the visual tempo through sampling raw videos at multiple rates and constructing an input-level frame pyramid, which usually requires a costly multi-branch network to handle. In this work we propose a generic Temporal Pyramid Network (TPN) at the feature-level, which can be flexibly integrated into 2D or 3D backbone networks in a plug-and-play manner. Two essential components of TPN, the source of features and the fusion of features, form a feature hierarchy for the backbone so that it can capture action instances at various tempos. TPN also shows consistent improvements over other challenging baselines on several action recognition datasets. Specifically, when equipped with TPN, the 3D ResNet-50 with dense sampling obtains a 2% gain on the validation set of Kinetics-400. A further analysis also reveals that TPN gains most of its improvements on action classes that have large variances in their visual tempos, validating the effectiveness of TPN. +
    @@ -19,16 +20,16 @@ Visual tempo characterizes the dynamics and the temporal scale of an action. Mod ### Kinetics-400 -|config | resolution | gpus | backbone | pretrain | top1 acc| top5 acc | reference top1 acc | reference top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tpn_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|short-side 320|8x2| ResNet50 | None | 73.58 | 91.35 | x | x | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb-c568e7ad.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | -|[tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|short-side 320|8| ResNet50 | ImageNet | 76.59 | 92.72 | [75.49](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | [92.05](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb-44362b55.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | reference top1 acc | reference top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :-------------------------------------------------------------------: | :-------------------------------------------------------------------: | :---------------------: | :--------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tpn_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py) | short-side 320 | 8x2 | ResNet50 | None | 73.58 | 91.35 | x | x | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb-c568e7ad.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | +| [tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py) | short-side 320 | 8 | ResNet50 | ImageNet | 76.59 | 92.72 | [75.49](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | [92.05](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb-44362b55.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | ### Something-Something V1 -|config | resolution | gpus | backbone| pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tpn_tsm_r50_1x1x8_150e_sthv1_rgb](/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py)|height 100|8x6| ResNet50 | TSM | 51.50 | 79.15 | 8828 |[ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20211202-c28ed83f.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :----------------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :------: | :------: | :--------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | +| [tpn_tsm_r50_1x1x8_150e_sthv1_rgb](/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py) | height 100 | 8x6 | ResNet50 | TSM | 51.50 | 79.15 | 8828 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20211202-c28ed83f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.json) | :::{note} diff --git a/configs/recognition/tpn/README_zh-CN.md b/configs/recognition/tpn/README_zh-CN.md index ec66656d1d..1ca531ec1c 100644 --- a/configs/recognition/tpn/README_zh-CN.md +++ b/configs/recognition/tpn/README_zh-CN.md @@ -17,15 +17,15 @@ ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tpn_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|短边 320|8x2| ResNet50 | None | 73.58 | 91.35 | x | x | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb-c568e7ad.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | -|[tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py)|短边 320|8| ResNet50 | ImageNet | 76.59 | 92.72 | [75.49](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | [92.05](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb-44362b55.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------------------------------- | :----: | :----: | :------: | :------: | :------: | :------: | :-------------------------------------------------------------------: | :-------------------------------------------------------------------: | :------------: | :----------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tpn_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.py) | 短边 320 | 8x2 | ResNet50 | None | 73.58 | 91.35 | x | x | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb-c568e7ad.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | +| [tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb](/configs/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.py) | 短边 320 | 8 | ResNet50 | ImageNet | 76.59 | 92.72 | [75.49](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | [92.05](https://github.com/decisionforce/TPN/blob/master/MODELZOO.md) | x | 6916 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb-44362b55.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb/tpn_imagenet_pretrained_slowonly_r50_8x8x1_150e_kinetics_rgb.json) | ### Something-Something V1 |配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| +|:\--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| |[tpn_tsm_r50_1x1x8_150e_sthv1_rgb](/configs/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.py)|height 100|8x6| ResNet50 | TSM | 51.50 | 79.15 | 8828 |[ckpt](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb_20211202-c28ed83f.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tpn/tpn_tsm_r50_1x1x8_150e_sthv1_rgb/tpn_tsm_r50_1x1x8_150e_sthv1_rgb.json)| 注: @@ -53,7 +53,7 @@ python tools/train.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kineti --work-dir work_dirs/tpn_slowonly_r50_8x8x1_150e_kinetics_rgb [--validate --seed 0 --deterministic] ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -71,4 +71,4 @@ python tools/test.py configs/recognition/tpn/tpn_slowonly_r50_8x8x1_150e_kinetic --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/trn/README.md b/configs/recognition/trn/README.md index f5ab424971..004bead94c 100644 --- a/configs/recognition/trn/README.md +++ b/configs/recognition/trn/README.md @@ -11,6 +11,7 @@ Temporal relational reasoning, the ability to link meaningful transformations of objects or entities over time, is a fundamental property of intelligent species. In this paper, we introduce an effective and interpretable network module, the Temporal Relation Network (TRN), designed to learn and reason about temporal dependencies between video frames at multiple time scales. We evaluate TRN-equipped networks on activity recognition tasks using three recent video datasets - Something-Something, Jester, and Charades - which fundamentally depend on temporal relational reasoning. Our results demonstrate that the proposed TRN gives convolutional neural networks a remarkable capacity to discover temporal relations in videos. Through only sparsely sampled video frames, TRN-equipped networks can accurately predict human-object interactions in the Something-Something dataset and identify various human gestures on the Jester dataset with very competitive performance. TRN-equipped networks also outperform two-stream networks and 3D convolution networks in recognizing daily activities in the Charades dataset. Further analyses show that the models learn intuitive and interpretable visual common sense knowledge in videos. +
    @@ -19,15 +20,15 @@ Temporal relational reasoning, the ability to link meaningful transformations of ### Something-Something V1 -|config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[trn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 31.62 / 33.88 |60.01 / 62.12| 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc (efficient/accurate) | top5 acc (efficient/accurate) | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :---------------------------: | :---------------------------: | :--------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | +| [trn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 31.62 / 33.88 | 60.01 / 62.12 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log) | [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json) | ### Something-Something V2 -|config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[trn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | height 256 | 8 | ResNet50 | ImageNet | 48.39 / 51.28 |76.58 / 78.65 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210816-7abbc4c1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc (efficient/accurate) | top5 acc (efficient/accurate) | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :---------------------------: | :---------------------------: | :--------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | +| [trn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | height 256 | 8 | ResNet50 | ImageNet | 48.39 / 51.28 | 76.58 / 78.65 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210816-7abbc4c1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log) | [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log.json) | :::{note} diff --git a/configs/recognition/trn/README_zh-CN.md b/configs/recognition/trn/README_zh-CN.md index d0e85f015c..7e9b698fe7 100644 --- a/configs/recognition/trn/README_zh-CN.md +++ b/configs/recognition/trn/README_zh-CN.md @@ -17,15 +17,15 @@ ### Something-Something V1 -|配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[trn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 31.62 / 33.88 |60.01 / 62.12| 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------- | :---: | :----: | :------: | :------: | :---------------------------: | :---------------------------: | :----------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | +| [trn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 31.62 / 33.88 | 60.01 / 62.12 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/trn_r50_1x1x8_50e_sthv1_rgb_20210401-163704a8.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log) | [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb/20210326_103948.log.json) | ### Something-Something V2 -|配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[trn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | 高 256 | 8 | ResNet50 | ImageNet | 48.39 / 51.28 |76.58 / 78.65 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210816-7abbc4c1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log)| [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------- | :---: | :----: | :------: | :------: | :---------------------------: | :---------------------------: | :----------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | +| [trn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb.py) | 高 256 | 8 | ResNet50 | ImageNet | 48.39 / 51.28 | 76.58 / 78.65 | 11010 | [ckpt](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/trn_r50_1x1x8_50e_sthv2_rgb_20210816-7abbc4c1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log) | [json](https://download.openmmlab.com/mmaction/recognition/trn/trn_r50_1x1x8_50e_sthv2_rgb/20210816_221356.log.json) | 注: @@ -57,7 +57,7 @@ python tools/train.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -75,4 +75,4 @@ python tools/test.py configs/recognition/trn/trn_r50_1x1x8_50e_sthv1_rgb.py \ --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tsm/README.md b/configs/recognition/tsm/README.md index ca5d4ac25e..35333731f4 100644 --- a/configs/recognition/tsm/README.md +++ b/configs/recognition/tsm/README.md @@ -11,6 +11,7 @@ The explosive growth in video streaming gives rise to challenges on performing video understanding at high accuracy and low computation cost. Conventional 2D CNNs are computationally cheap but cannot capture temporal relationships; 3D CNN based methods can achieve good performance but are computationally intensive, making it expensive to deploy. In this paper, we propose a generic and effective Temporal Shift Module (TSM) that enjoys both high efficiency and high performance. Specifically, it can achieve the performance of 3D CNN but maintain 2D CNN's complexity. TSM shifts part of the channels along the temporal dimension; thus facilitate information exchanged among neighboring frames. It can be inserted into 2D CNNs to achieve temporal modeling at zero computation and zero parameters. We also extended TSM to online setting, which enables real-time low-latency online video recognition and video object detection. TSM is accurate and efficient: it ranks the first place on the Something-Something leaderboard upon publication; on Jetson Nano and Galaxy Note8, it achieves a low latency of 13ms and 35ms for online video recognition. +
    @@ -19,79 +20,79 @@ The explosive growth in video streaming gives rise to challenges on performing v ### Kinetics-400 -|config | resolution | gpus | backbone | pretrain | top1 acc| top5 acc | reference top1 acc | reference top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |70.24|89.56|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json)| -|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.59|89.52|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json)| -|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |short-side 320|8| ResNet50| ImageNet |70.73|89.81|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20210701-68d582b4.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log.json)| -|[tsm_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py) |short-side 320|8| ResNet50| ImageNet |71.90|90.03|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/tsm_r50_1x1x8_100e_kinetics400_rgb_20210701-7ff22268.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log.json)| -|[tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.48|89.40|x|x|x|7076|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json)| -|[tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet |70.25|89.66|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json)| -|[tsm_r50_dense_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py) |short-side 320|8| ResNet50 | ImageNet|73.46|90.84|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/tsm_r50_dense_1x1x8_50e_kinetics400_rgb_20210701-a54ff3d3.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log.json)| -|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 320|8| ResNet50 | ImageNet|74.55|91.74|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20210701-e3e5e97f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log.json)| -|[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |72.09|90.37|[70.67](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|[89.98](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|47.0 (16x1 frames)| 10404 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json)| -|[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |short-side 256|8x4| ResNet50| ImageNet |71.89|90.73|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json)| -|[tsm_r50_1x1x16_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py) |short-side 320|8| ResNet50| ImageNet |72.80|90.75|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/tsm_r50_1x1x16_100e_kinetics400_rgb_20210701-41ac92b9.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log.json)| -|[tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4| ResNet50| ImageNet |72.03|90.25|71.81|90.36|x|8931|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json)| -|[tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4| ResNet50| ImageNet |70.70|89.90|x|x|x|10125|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json)| -|[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|short-side 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| -|[tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|short-side 320|8|MobileNetV2| ImageNet |68.46|88.64|x|x|x|3385|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json)| -|[tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|short-side 320|8|MobileNetV2| ImageNet |69.89|89.01|x|x|x|3385|[infer_ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port_20210922-aa5cadf6.pth)|x|x| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | reference top1 acc | reference top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :---------: | :------: | :------: | :------: | :-----------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------: | :--------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 70.24 | 89.56 | [70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh) | [89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh) | 74.0 (8x1 frames) | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json) | +| [tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | ImageNet | 70.59 | 89.52 | x | x | x | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json) | +| [tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) | short-side 320 | 8 | ResNet50 | ImageNet | 70.73 | 89.81 | x | x | x | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20210701-68d582b4.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log.json) | +| [tsm_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py) | short-side 320 | 8 | ResNet50 | ImageNet | 71.90 | 90.03 | x | x | x | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/tsm_r50_1x1x8_100e_kinetics400_rgb_20210701-7ff22268.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log.json) | +| [tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | ImageNet | 70.48 | 89.40 | x | x | x | 7076 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json) | +| [tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | ImageNet | 70.25 | 89.66 | [70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh) | [89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh) | 74.0 (8x1 frames) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json) | +| [tsm_r50_dense_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py) | short-side 320 | 8 | ResNet50 | ImageNet | 73.46 | 90.84 | x | x | x | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/tsm_r50_dense_1x1x8_50e_kinetics400_rgb_20210701-a54ff3d3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log.json) | +| [tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) | short-side 320 | 8 | ResNet50 | ImageNet | 74.55 | 91.74 | x | x | x | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20210701-e3e5e97f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log.json) | +| [tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 72.09 | 90.37 | [70.67](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh) | [89.98](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh) | 47.0 (16x1 frames) | 10404 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json) | +| [tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) | short-side 256 | 8x4 | ResNet50 | ImageNet | 71.89 | 90.73 | x | x | x | 10398 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json) | +| [tsm_r50_1x1x16_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py) | short-side 320 | 8 | ResNet50 | ImageNet | 72.80 | 90.75 | x | x | x | 10398 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/tsm_r50_1x1x16_100e_kinetics400_rgb_20210701-41ac92b9.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log.json) | +| [tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py) | short-side 320 | 8x4 | ResNet50 | ImageNet | 72.03 | 90.25 | 71.81 | 90.36 | x | 8931 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json) | +| [tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py) | short-side 320 | 8x4 | ResNet50 | ImageNet | 70.70 | 89.90 | x | x | x | 10125 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json) | +| [tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py) | short-side 320 | 8x4 | ResNet50 | ImageNet | 71.60 | 90.34 | x | x | x | 8358 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json) | +| [tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py) | short-side 320 | 8 | MobileNetV2 | ImageNet | 68.46 | 88.64 | x | x | x | 3385 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json) | +| [tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py) | short-side 320 | 8 | MobileNetV2 | ImageNet | 69.89 | 89.01 | x | x | x | 3385 | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port_20210922-aa5cadf6.pth) | x | x | ### Diving48 -|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_video_1x1x8_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py)| 8 | ResNet50 | ImageNet | 75.99 | 97.16 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json)| -|[tsm_r50_video_1x1x16_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py)| 8 | ResNet50 | ImageNet | 81.62 | 97.66 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json)| +| config | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------- | :--: | :------: | :------: | :------: | :------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_video_1x1x8_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 75.99 | 97.16 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json) | +| [tsm_r50_video_1x1x16_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 81.62 | 97.66 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json) | ### Something-Something V1 -|config | resolution | gpus | backbone| pretrain | top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| reference top1 acc (efficient/accurate)| reference top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 45.58 / 47.70|75.02 / 76.12|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json)| -|[tsm_r50_flip_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.10 / 48.51|76.02 / 77.56|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json)| -|[tsm_r50_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.16 / 48.90|76.07 / 77.92|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb_20210324-481268d9.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.json)| -|[tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.65 / 48.66 | 76.67 / 77.41 |[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb-ee93e5e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.json) | -|[tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 46.26 / 47.68 | 75.92 / 76.49 |[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb-4f4f4740.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.json) | -|[tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py) |height 100|8| ResNet50 | ImageNet| 47.85 / 50.31|76.78 / 78.18|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb_20210324-76937692.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json)| -|[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|47.77 / 49.03|76.82 / 77.83|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20211202-b922e5d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.json)| -|[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|height 100|8| ResNet50 | ImageNet|46.09 / 48.59|75.41 / 77.10|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20211202-49970a5b.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc (efficient/accurate) | top5 acc (efficient/accurate) | reference top1 acc (efficient/accurate) | reference top5 acc (efficient/accurate) | gpu_mem(M) | ckpt | log | json | +| :----------------------------------------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :---------------------------: | :---------------------------: | :--------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------: | :--------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 45.58 / 47.70 | 75.02 / 76.12 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json) | +| [tsm_r50_flip_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 47.10 / 48.51 | 76.02 / 77.56 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json) | +| [tsm_r50_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 47.16 / 48.90 | 76.07 / 77.92 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb_20210324-481268d9.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 47.65 / 48.66 | 76.67 / 77.41 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb-ee93e5e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 46.26 / 47.68 | 75.92 / 76.49 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb-4f4f4740.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 47.85 / 50.31 | 76.78 / 78.18 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb_20210324-76937692.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 47.77 / 49.03 | 76.82 / 77.83 | [47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 10390 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20211202-b922e5d2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.json) | +| [tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 46.09 / 48.59 | 75.41 / 77.10 | [46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 9800 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20211202-49970a5b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.json) | ### Something-Something V2 -|config | resolution | gpus | backbone | pretrain| top1 acc (efficient/accurate)| top5 acc (efficient/accurate)| reference top1 acc (efficient/accurate)| reference top5 acc (efficient/accurate)| gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 256|8| ResNet50| ImageNet |59.11 / 61.82|85.39 / 86.80|[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210816-032aa4da.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log.json)| -|[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |height 256|8| ResNet50| ImageNet |61.06 / 63.19|86.66 / 87.93|[xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json)| -|[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |height 256|8| ResNet101 | ImageNet|60.88 / 63.84|86.56 / 88.30|[xx / 63.3](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9727 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc (efficient/accurate) | top5 acc (efficient/accurate) | reference top1 acc (efficient/accurate) | reference top5 acc (efficient/accurate) | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------- | :--------: | :--: | :-------: | :------: | :---------------------------: | :---------------------------: | :----------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------: | :--------: | :--------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) | height 256 | 8 | ResNet50 | ImageNet | 59.11 / 61.82 | 85.39 / 86.80 | [xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210816-032aa4da.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log.json) | +| [tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) | height 256 | 8 | ResNet50 | ImageNet | 61.06 / 63.19 | 86.66 / 87.93 | [xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 10400 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json) | +| [tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) | height 256 | 8 | ResNet101 | ImageNet | 60.88 / 63.84 | 86.56 / 88.30 | [xx / 63.3](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 9727 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log.json) | ### MixUp & CutMix on Something-Something V1 -| config | resolution | gpus | backbone | pretrain | top1 acc (efficient/accurate) | top5 acc (efficient/accurate) | delta top1 acc (efficient/accurate) | delta top5 acc (efficient/accurate) | ckpt | log | json | -| :----------------------------------------------------------- | :--------: | :--: | :------: | :------: | :---------------------------: | :---------------------------: | :---------------------------------: | :---------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsm_r50_mixup_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 46.35 / 48.49 | 75.07 / 76.88 | +0.77 / +0.79 | +0.05 / +0.70 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb-9eca48e5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json) | +| config | resolution | gpus | backbone | pretrain | top1 acc (efficient/accurate) | top5 acc (efficient/accurate) | delta top1 acc (efficient/accurate) | delta top5 acc (efficient/accurate) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :---------------------------: | :---------------------------: | :---------------------------------: | :---------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_mixup_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 46.35 / 48.49 | 75.07 / 76.88 | +0.77 / +0.79 | +0.05 / +0.70 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb-9eca48e5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json) | | [tsm_r50_cutmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 45.92 / 47.46 | 75.23 / 76.71 | +0.34 / -0.24 | +0.21 / +0.59 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb-34934615.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json) | ### Jester -| config | resolution | gpus | backbone | pretrain | top1 acc (efficient/accurate) | ckpt | log | json | -| ------------------------------------------------------------ | :--------: | :--: | :------: | :------: | :---------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| config | resolution | gpus | backbone | pretrain | top1 acc (efficient/accurate) | ckpt | log | json | +| ---------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :---------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | | [tsm_r50_1x1x8_50e_jester_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 96.5 / 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb-c799267e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json) | ### HMDB51 -|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py)|8|ResNet50|Kinetics400|72.68|92.03|10388|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb_20210630-10c74ee5.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log.json)| -|[tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py)|8|ResNet50|Kinetics400|74.77|93.86|10388|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb_20210630-4785548e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log.json)| +| config | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------- | :--: | :------: | :---------: | :------: | :------: | :--------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | +| [tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py) | 8 | ResNet50 | Kinetics400 | 72.68 | 92.03 | 10388 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb_20210630-10c74ee5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log.json) | +| [tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py) | 8 | ResNet50 | Kinetics400 | 74.77 | 93.86 | 10388 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb_20210630-4785548e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log.json) | ### UCF101 -|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py)|8|ResNet50|Kinetics400|94.50|99.58|10389|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb_20210630-1fae312b.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json)| -|[tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py)|8|ResNet50|Kinetics400|94.58|99.37|10389|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb_20210630-8df9c358.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json)| +| config | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------- | :--: | :------: | :---------: | :------: | :------: | :--------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | +| [tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py) | 8 | ResNet50 | Kinetics400 | 94.50 | 99.58 | 10389 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb_20210630-1fae312b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json) | +| [tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py) | 8 | ResNet50 | Kinetics400 | 94.58 | 99.37 | 10389 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb_20210630-8df9c358.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json) | :::{note} diff --git a/configs/recognition/tsm/README_zh-CN.md b/configs/recognition/tsm/README_zh-CN.md index f95876fd9e..c8a64f9ca7 100644 --- a/configs/recognition/tsm/README_zh-CN.md +++ b/configs/recognition/tsm/README_zh-CN.md @@ -28,86 +28,86 @@ ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |70.24|89.56|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json)| -|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet |70.59|89.52|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json)| -|[tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |短边 320|8| ResNet50| ImageNet |70.73|89.81|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20210701-68d582b4.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log.json)| -|[tsm_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py) |短边 320|8| ResNet50| ImageNet |71.90|90.03|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/tsm_r50_1x1x8_100e_kinetics400_rgb_20210701-7ff22268.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log.json)| -|[tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet |70.48|89.40|x|x|x|7076|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json)| -|[tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet |70.25|89.66|[70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|[89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh)|74.0 (8x1 frames)| 7077 | [ckpt]( https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json)| -|[tsm_r50_dense_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py) |短边 320|8| ResNet50 | ImageNet|73.46|90.84|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/tsm_r50_dense_1x1x8_50e_kinetics400_rgb_20210701-a54ff3d3.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log.json)| -|[tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) |短边 320|8| ResNet50 | ImageNet|74.55|91.74|x|x|x|7079|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20210701-e3e5e97f.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log.json)| -|[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |340x256|8| ResNet50| ImageNet |72.09|90.37|[70.67](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|[89.98](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh)|47.0 (16x1 frames)| 10404 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json)| -|[tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) |短边 256|8x4| ResNet50| ImageNet |71.89|90.73|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json)| -|[tsm_r50_1x1x16_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py) |短边 320|8| ResNet50| ImageNet |72.80|90.75|x|x|x|10398|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/tsm_r50_1x1x16_100e_kinetics400_rgb_20210701-41ac92b9.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log.json)| -|[tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4| ResNet50| ImageNet |72.03|90.25|71.81|90.36|x|8931|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json)| -|[tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4| ResNet50| ImageNet |70.70|89.90|x|x|x|10125|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json)| -|[tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py)|短边 320|8x4|ResNet50| ImageNet |71.60|90.34|x|x|x|8358|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json)| -|[tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|短边 320|8|MobileNetV2| ImageNet |68.46|88.64|x|x|x|3385|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json)| -|[tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py)|短边 320|8|MobileNetV2| ImageNet |69.89|89.01|x|x|x|3385|[infer_ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port_20210922-aa5cadf6.pth)|x|x| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------------------------- | :-----: | :----: | :---------: | :------: | :------: | :------: | :-----------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------: | :----------------: | :----------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 70.24 | 89.56 | [70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh) | [89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh) | 74.0 (8x1 frames) | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20200607-af7fb746.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20200607_211800.log.json) | +| [tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet50 | ImageNet | 70.59 | 89.52 | x | x | x | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/tsm_r50_256p_1x1x8_50e_kinetics400_rgb_20200726-020785e2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x8_50e_kinetics400_rgb/20200725_031623.log.json) | +| [tsm_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) | 短边 320 | 8 | ResNet50 | ImageNet | 70.73 | 89.81 | x | x | x | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/tsm_r50_1x1x8_50e_kinetics400_rgb_20210701-68d582b4.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb/20210616_021451.log.json) | +| [tsm_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb.py) | 短边 320 | 8 | ResNet50 | ImageNet | 71.90 | 90.03 | x | x | x | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/tsm_r50_1x1x8_100e_kinetics400_rgb_20210701-7ff22268.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_100e_kinetics400_rgb/20210617_103543.log.json) | +| [tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py](/configs/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet50 | ImageNet | 70.48 | 89.40 | x | x | x | 7076 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219-bf96e6cc.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb/tsm_r50_gpu_normalize_1x1x8_50e_kinetics400_rgb_20210219.json) | +| [tsm_r50_video_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet50 | ImageNet | 70.25 | 89.66 | [70.36](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh) | [89.49](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_8f.sh) | 74.0 (8x1 frames) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_2d_1x1x8_50e_kinetics400_rgb.log.json) | +| [tsm_r50_dense_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb.py) | 短边 320 | 8 | ResNet50 | ImageNet | 73.46 | 90.84 | x | x | x | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/tsm_r50_dense_1x1x8_50e_kinetics400_rgb_20210701-a54ff3d3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_50e_kinetics400_rgb/20210617_103245.log.json) | +| [tsm_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb.py) | 短边 320 | 8 | ResNet50 | ImageNet | 74.55 | 91.74 | x | x | x | 7079 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/tsm_r50_dense_1x1x8_100e_kinetics400_rgb_20210701-e3e5e97f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_dense_1x1x8_100e_kinetics400_rgb/20210613_034931.log.json) | +| [tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 72.09 | 90.37 | [70.67](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh) | [89.98](https://github.com/mit-han-lab/temporal-shift-module/blob/8d53d6fda40bea2f1b37a6095279c4b454d672bd/scripts/train_tsm_kinetics_rgb_16f.sh) | 47.0 (16x1 frames) | 10404 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/tsm_r50_340x256_1x1x16_50e_kinetics400_rgb_20201011-2f27f229.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb/20201011_205356.log.json) | +| [tsm_r50_1x1x16_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_kinetics400_rgb.py) | 短边 256 | 8x4 | ResNet50 | ImageNet | 71.89 | 90.73 | x | x | x | 10398 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/tsm_r50_256p_1x1x16_50e_kinetics400_rgb_20201010-85645c2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_256p_1x1x16_50e_kinetics400_rgb/20201010_224825.log.json) | +| [tsm_r50_1x1x16_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb.py) | 短边 320 | 8 | ResNet50 | ImageNet | 72.80 | 90.75 | x | x | x | 10398 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/tsm_r50_1x1x16_100e_kinetics400_rgb_20210701-41ac92b9.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_100e_kinetics400_rgb/20210618_193859.log.json) | +| [tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb.py) | 短边 320 | 8x4 | ResNet50 | ImageNet | 72.03 | 90.25 | 71.81 | 90.36 | x | 8931 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200724-f00f1336.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_embedded_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200724_120023.log.json) | +| [tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb.py) | 短边 320 | 8x4 | ResNet50 | ImageNet | 70.70 | 89.90 | x | x | x | 10125 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb_20200816-b93fd297.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_gaussian_r50_1x1x8_50e_kinetics400_rgb/20200815_210253.log.json) | +| [tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb](/configs/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb.py) | 短边 320 | 8x4 | ResNet50 | ImageNet | 71.60 | 90.34 | x | x | x | 8358 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb_20200724-d8ad84d2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_nl_dot_product_r50_1x1x8_50e_kinetics400_rgb/20200723_220442.log.json) | +| [tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py) | 短边 320 | 8 | MobileNetV2 | ImageNet | 68.46 | 88.64 | x | x | x | 3385 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/tsm_mobilenetv2_dense_320p_1x1x8_100e_kinetics400_rgb_20210202-61135809.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb/20210129_024936.log.json) | +| [tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port](/configs/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_100e_kinetics400_rgb.py) | 短边 320 | 8 | MobileNetV2 | ImageNet | 69.89 | 89.01 | x | x | x | 3385 | [infer_ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_mobilenetv2_dense_1x1x8_kinetics400_rgb_port_20210922-aa5cadf6.pth) | x | x | ### Diving48 -|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_video_1x1x8_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py)| 8 | ResNet50 | ImageNet | 75.99 | 97.16 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json)| -|[tsm_r50_video_1x1x16_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py)| 8 | ResNet50 | ImageNet | 81.62 | 97.66 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json)| +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------- | :----: | :------: | :------: | :------: | :------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_video_1x1x8_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 75.99 | 97.16 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json) | +| [tsm_r50_video_1x1x16_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 81.62 | 97.66 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json) | ### Something-Something V1 -|配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| 参考代码的 top1 准确率 (efficient/accurate)| 参考代码的 top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py) |高 100|8| ResNet50 | ImageNet| 45.58 / 47.70|75.02 / 76.12|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json)| -|[tsm_r50_flip_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py) |高 100|8| ResNet50 | ImageNet| 47.10 / 48.51|76.02 / 77.56|[45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7077| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json)| -|[tsm_r50_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py)| 高 100 | 8 | ResNet50 | ImageNet | 47.16 / 48.90 | 76.07 / 77.92 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb_20210324-481268d9.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.json) | -| [tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.65 / 48.66 | 76.67 / 77.41 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb-ee93e5e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.json) | -| [tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 46.26 / 47.68 | 75.92 / 76.49 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb-4f4f4740.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.json) | -| [tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.85 / 50.31 | 76.78 / 78.18 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) |7077|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb_20210324-76937692.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json)| -|[tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py)|高 100|8| ResNet50 | ImageNet|47.77 / 49.03|76.82 / 77.83|[47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|10390|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20211202-b922e5d2.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.json)| -|[tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py)|高 100|8| ResNet50 | ImageNet|46.09 / 48.59|75.41 / 77.10|[46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|9800|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | 参考代码的 top1 准确率 (efficient/accurate) | 参考代码的 top5 准确率 (efficient/accurate) | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------------------------------------------------------------------- | :---: | :----: | :------: | :------: | :---------------------------: | :---------------------------: | :--------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------: | :----------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 45.58 / 47.70 | 75.02 / 76.12 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/tsm_r50_1x1x8_50e_sthv1_rgb_20210203-01dce462.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv1_rgb/20210203_150227.log.json) | +| [tsm_r50_flip_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.10 / 48.51 | 76.02 / 77.56 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/tsm_r50_flip_1x1x8_50e_sthv1_rgb_20210203-12596f16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_1x1x8_50e_sthv1_rgb/20210203_145829.log.json) | +| [tsm_r50_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.16 / 48.90 | 76.07 / 77.92 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb_20210324-481268d9.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_randaugment_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.65 / 48.66 | 76.67 / 77.41 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb-ee93e5e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_randaugment_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 46.26 / 47.68 | 75.92 / 76.49 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb-4f4f4740.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb/tsm_r50_ptv_augmix_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.85 / 50.31 | 76.78 / 78.18 | [45.50 / 47.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [74.34 / 76.60](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7077 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb_20210324-76937692.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb/tsm_r50_flip_randaugment_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 47.77 / 49.03 | 76.82 / 77.83 | [47.05 / 48.61](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [76.40 / 77.96](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 10390 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb_20211202-b922e5d2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv1_rgb/tsm_r50_1x1x16_50e_sthv1_rgb.json) | +| [tsm_r101_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 46.09 / 48.59 | 75.41 / 77.10 | [46.64 / 48.13](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [75.40 / 77.31](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 9800 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb_20201010-43fedf2e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv1_rgb/tsm_r101_1x1x8_50e_sthv1_rgb.json) | ### Something-Something V2 -|配置文件 | 分辨率 | GPU 数量 | 主干网络| 预训练 | top1 准确率 (efficient/accurate)| top5 准确率 (efficient/accurate)| 参考代码的 top1 准确率 (efficient/accurate)| 参考代码的 top5 准确率 (efficient/accurate)| GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 256|8| ResNet50| ImageNet |59.11 / 61.82|85.39 / 86.80|[xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210816-032aa4da.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log.json)| -|[tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) |高 256|8| ResNet50| ImageNet |61.06 / 63.19|86.66 / 87.93|[xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10400 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json)| -|[tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) |高 256|8| ResNet101 | ImageNet|60.88 / 63.84|86.56 / 88.30|[xx / 63.3](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 9727 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | 参考代码的 top1 准确率 (efficient/accurate) | 参考代码的 top5 准确率 (efficient/accurate) | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------- | :---: | :----: | :-------: | :------: | :---------------------------: | :---------------------------: | :----------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------: | :----------: | :--------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) | 高 256 | 8 | ResNet50 | ImageNet | 59.11 / 61.82 | 85.39 / 86.80 | [xx / 61.2](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 7069 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/tsm_r50_256h_1x1x8_50e_sthv2_rgb_20210816-032aa4da.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb/20210816_224310.log.json) | +| [tsm_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_sthv2_rgb.py) | 高 256 | 8 | ResNet50 | ImageNet | 61.06 / 63.19 | 86.66 / 87.93 | [xx / 63.1](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 10400 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/tsm_r50_256h_1x1x16_50e_sthv2_rgb_20210331-0a45549c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x16_50e_sthv2_rgb/20210331_134458.log.json) | +| [tsm_r101_1x1x8_50e_sthv2_rgb](/configs/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb.py) | 高 256 | 8 | ResNet101 | ImageNet | 60.88 / 63.84 | 86.56 / 88.30 | [xx / 63.3](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [xx / xx](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 9727 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/tsm_r101_256h_1x1x8_50e_sthv2_rgb_20210401-df97f3e1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r101_1x1x8_50e_sthv2_rgb/20210401_143656.log.json) | ### Diving48 -| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | -| :----------------------------------------------------------- | :------: | :------: | :------: | :---------: | :---------: | :--------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsm_r50_video_1x1x8_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 75.99 | 97.16 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json) | -| [tsm_r50_video_1x1x16_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 81.62 | 97.66 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json) | +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------- | :----: | :------: | :------: | :------: | :------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_video_1x1x8_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 75.99 | 97.16 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/tsm_r50_video_1x1x8_50e_diving48_rgb_20210426-aba5aa3d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_50e_diving48_rgb/20210426_012424.log.json) | +| [tsm_r50_video_1x1x16_50e_diving48_rgb](/configs/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 81.62 | 97.66 | 7070 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/tsm_r50_video_1x1x16_50e_diving48_rgb_20210426-aa9631c0.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x16_50e_diving48_rgb/20210426_012823.log.json) | ### MixUp & CutMix on Something-Something V1 -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | top1 准确率变化 (efficient/accurate) | top5 准确率变化 (efficient/accurate) | ckpt | log | json | -| :----------------------------------------------------------- | :--------: | :--: | :------: | :------: | :---------------------------: | :---------------------------: | :---------------------------------: | :---------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsm_r50_mixup_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 46.35 / 48.49 | 75.07 / 76.88 | +0.77 / +0.79 | +0.05 / +0.70 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb-9eca48e5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json) | -| [tsm_r50_cutmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 45.92 / 47.46 | 75.23 / 76.71 | +0.34 / -0.24 | +0.21 / +0.59 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb-34934615.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | top5 准确率 (efficient/accurate) | top1 准确率变化 (efficient/accurate) | top5 准确率变化 (efficient/accurate) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------- | :---: | :----: | :------: | :------: | :---------------------------: | :---------------------------: | :-----------------------------: | :-----------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_mixup_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 46.35 / 48.49 | 75.07 / 76.88 | +0.77 / +0.79 | +0.05 / +0.70 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb-9eca48e5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_mixup_1x1x8_50e_sthv1_rgb/tsm_r50_mixup_1x1x8_50e_sthv1_rgb.json) | +| [tsm_r50_cutmix_1x1x8_50e_sthv1_rgb](/configs/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 45.92 / 47.46 | 75.23 / 76.71 | +0.34 / -0.24 | +0.21 / +0.59 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb-34934615.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb/tsm_r50_cutmix_1x1x8_50e_sthv1_rgb.json) | ### Jester -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | ckpt | log | json | -| ------------------------------------------------------------ | :----: | :------: | :------: | :------: | :------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsm_r50_1x1x8_50e_jester_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 96.5 / 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb-c799267e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 (efficient/accurate) | ckpt | log | json | +| ---------------------------------------------------------------------------------------- | :---: | :----: | :------: | :------: | :---------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | +| [tsm_r50_1x1x8_50e_jester_rgb](/configs/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 96.5 / 97.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb-c799267e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_1x1x8_50e_jester_rgb/tsm_r50_1x1x8_50e_jester_rgb.json) | ### HMDB51 -| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | -| :----------------------------------------------------------- | :------: | :------: | :---------: | :---------: | :---------: | :--------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py) | 8 | ResNet50 | Kinetics400 | 72.68 | 92.03 | 10388 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb_20210630-10c74ee5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log.json) | -| [tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py) | 8 | ResNet50 | Kinetics400 | 74.77 | 93.86 | 10388 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb_20210630-4785548e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log.json) | +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------- | :----: | :------: | :---------: | :------: | :------: | :----------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | +| [tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb.py) | 8 | ResNet50 | Kinetics400 | 72.68 | 92.03 | 10388 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb_20210630-10c74ee5.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_hmdb51_rgb/20210605_182554.log.json) | +| [tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb.py) | 8 | ResNet50 | Kinetics400 | 74.77 | 93.86 | 10388 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb_20210630-4785548e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_hmdb51_rgb/20210605_182505.log.json) | ### UCF101 -| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | -| :----------------------------------------------------------- | :------: | :------: | :---------: | :---------: | :---------: | :--------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py) | 8 | ResNet50 | Kinetics400 | 94.50 | 99.58 | 10389 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb_20210630-1fae312b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json) | -| [tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py) | 8 | ResNet50 | Kinetics400 | 94.58 | 99.37 | 10389 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb_20210630-8df9c358.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json) | +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------------- | :----: | :------: | :---------: | :------: | :------: | :----------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------: | +| [tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb.py) | 8 | ResNet50 | Kinetics400 | 94.50 | 99.58 | 10389 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb_20210630-1fae312b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x8_25e_ucf101_rgb/20210605_182720.log.json) | +| [tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb](/configs/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb.py) | 8 | ResNet50 | Kinetics400 | 94.58 | 99.37 | 10389 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb_20210630-8df9c358.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsm/tsm_k400_pretrained_r50_1x1x16_25e_ucf101_rgb/20210605_182720.log.json) | 注: @@ -163,7 +163,7 @@ python tools/train.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb. --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -181,4 +181,4 @@ python tools/test.py configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.p --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/tsn/README.md b/configs/recognition/tsn/README.md index 61656f7d71..c3c01dc6ee 100644 --- a/configs/recognition/tsn/README.md +++ b/configs/recognition/tsn/README.md @@ -11,6 +11,7 @@ Deep convolutional networks have achieved great success for visual recognition in still images. However, for action recognition in videos, the advantage over traditional methods is not so evident. This paper aims to discover the principles to design effective ConvNet architectures for action recognition in videos and learn these models given limited training samples. Our first contribution is temporal segment network (TSN), a novel framework for video-based action recognition. which is based on the idea of long-range temporal structure modeling. It combines a sparse temporal sampling strategy and video-level supervision to enable efficient and effective learning using the whole action video. The other contribution is our study on a series of good practices in learning ConvNets on video data with the help of temporal segment network. Our approach obtains the state-the-of-art performance on the datasets of HMDB51 ( 69.4%) and UCF101 (94.2%). We also visualize the learned ConvNet models, which qualitatively demonstrates the effectiveness of temporal segment network and the proposed good practices. +
    @@ -19,47 +20,47 @@ Deep convolutional networks have achieved great success for visual recognition i ### UCF-101 -|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x3_75e_ucf101_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py) [1] |8| ResNet50 | ImageNet |83.03|96.78|8332| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023-d85ab600.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.json) | +| config | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------- | :--: | :------: | :------: | :------: | :------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x3_75e_ucf101_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py) \[1\] | 8 | ResNet50 | ImageNet | 83.03 | 96.78 | 8332 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023-d85ab600.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.json) | -[1] We report the performance on UCF-101 split1. +\[1\] We report the performance on UCF-101 split1. ### Diving48 -|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_video_1x1x8_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py)|8| ResNet50 | ImageNet | 71.27 | 95.74 | 5699 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/tsn_r50_video_1x1x8_100e_diving48_rgb_20210426-6dde0185.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json)| -|[tsn_r50_video_1x1x16_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py)|8| ResNet50 | ImageNet | 76.75 | 96.95 | 5705 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/tsn_r50_video_1x1x16_100e_diving48_rgb_20210426-63c5f2f7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json)| +| config | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :----------------------------------------------------------------------------------------------------------- | :--: | :------: | :------: | :------: | :------: | :--------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_video_1x1x8_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 71.27 | 95.74 | 5699 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/tsn_r50_video_1x1x8_100e_diving48_rgb_20210426-6dde0185.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json) | +| [tsn_r50_video_1x1x16_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 76.75 | 96.95 | 5705 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/tsn_r50_video_1x1x16_100e_diving48_rgb_20210426-63c5f2f7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json) | ### HMDB51 -|config | gpus | backbone | pretrain | top1 acc| top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py)|8| ResNet50 | ImageNet | 48.95| 80.19| 21535| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb_20201123-ce6c27ed.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log.json) | -|[tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py) |8| ResNet50 | Kinetics400 | 56.08 | 84.31 | 21535| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb_20201123-7f84701b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log.json) | -|[tsn_r50_1x1x8_50e_hmdb51_mit_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py) |8| ResNet50 | Moments | 54.25 | 83.86| 21535| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/tsn_r50_1x1x8_50e_hmdb51_mit_rgb_20201123-01526d41.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log.json) | +| config | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------- | :--: | :------: | :---------: | :------: | :------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py) | 8 | ResNet50 | ImageNet | 48.95 | 80.19 | 21535 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb_20201123-ce6c27ed.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log.json) | +| [tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py) | 8 | ResNet50 | Kinetics400 | 56.08 | 84.31 | 21535 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb_20201123-7f84701b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log.json) | +| [tsn_r50_1x1x8_50e_hmdb51_mit_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py) | 8 | ResNet50 | Moments | 54.25 | 83.86 | 21535 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/tsn_r50_1x1x8_50e_hmdb51_mit_rgb_20201123-01526d41.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log.json) | ### Kinetics-400 -|config | resolution | gpus | backbone|pretrain | top1 acc| top5 acc | reference top1 acc | reference top5 acc | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) |340x256|8| ResNet50 | ImageNet|70.60|89.26|x|x|4.3 (25x10 frames)|8344| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json)| -|[tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50 | ImageNet|70.42|89.03|x|x|x|8343|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json)| -|[tsn_r50_dense_1x1x5_50e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py) |340x256|8x3| ResNet50| ImageNet |70.18|89.10|[69.15](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[88.56](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|12.7 (8x10 frames)|7028| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/tsn_r50_dense_1x1x5_100e_kinetics400_rgb_20200627-a063165f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log.json)| -|[tsn_r50_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py) |short-side 320|8x2| ResNet50| ImageNet |70.91|89.51|x|x|10.7 (25x3 frames)| 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json)| -|[tsn_r50_320p_1x1x3_110e_kinetics400_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py) |short-side 320|8x2| ResNet50 | ImageNet|55.70|79.85|x|x|x| 8471 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_320p_1x1x3_110e_kinetics400_flow_20200705-3036bab6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log.json)| -|tsn_r50_320p_1x1x3_kinetics400_twostream [1: 1]* |x|x| ResNet50 | ImageNet|72.76|90.52| x | x | x | x | x|x|x| -|[tsn_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py)|short-side 256|8| ResNet50| ImageNet |71.80|90.17|x|x|x|8343|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/tsn_r50_256p_1x1x8_100e_kinetics400_rgb_20200817-883baf16.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log.json)| -|[tsn_r50_320p_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py) |short-side 320|8x3| ResNet50| ImageNet |72.41|90.55|x|x|11.1 (25x3 frames)| 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_320p_1x1x8_100e_kinetics400_rgb_20200702-ef80e3d7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log.json)| -|[tsn_r50_320p_1x1x8_110e_kinetics400_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py) |short-side 320|8x4| ResNet50 | ImageNet|57.76|80.99|x|x|x| 8473 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_320p_1x1x8_110e_kinetics400_flow_20200705-1f39486b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log.json)| -|tsn_r50_320p_1x1x8_kinetics400_twostream [1: 1]* |x|x| ResNet50| ImageNet |74.64|91.77| x | x | x | x | x|x|x| -|[tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py) |short-side 320|8| ResNet50 | ImageNet |71.11|90.04| x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014-5ae1ee79.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json)| -|[tsn_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8| ResNet50 | ImageNet|70.77|89.3|[68.75](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[88.42](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|12.2 (8x10 frames)|8344| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_dense_1x1x8_100e_kinetics400_rgb_20200606-e925e6e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json)| -|[tsn_r50_video_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet | 71.14 | 89.63 |x|x|x|21558| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json)| -|[tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py) |short-side 256|8| ResNet50| ImageNet | 70.40 | 89.12 |x|x|x|21553| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb_20200703-0f19175f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json)| - -Here, We use [1: 1] to indicate that we combine rgb and flow score with coefficients 1: 1 to get the two-stream prediction (without applying softmax). +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | reference top1 acc | reference top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | :---------------------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 70.60 | 89.26 | x | x | 4.3 (25x10 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | +| [tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | ImageNet | 70.42 | 89.03 | x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json) | +| [tsn_r50_dense_1x1x5_50e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py) | 340x256 | 8x3 | ResNet50 | ImageNet | 70.18 | 89.10 | [69.15](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [88.56](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 12.7 (8x10 frames) | 7028 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/tsn_r50_dense_1x1x5_100e_kinetics400_rgb_20200627-a063165f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log.json) | +| [tsn_r50_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNet50 | ImageNet | 70.91 | 89.51 | x | x | 10.7 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json) | +| [tsn_r50_320p_1x1x3_110e_kinetics400_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py) | short-side 320 | 8x2 | ResNet50 | ImageNet | 55.70 | 79.85 | x | x | x | 8471 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_320p_1x1x3_110e_kinetics400_flow_20200705-3036bab6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log.json) | +| tsn_r50_320p_1x1x3_kinetics400_twostream \[1: 1\]\* | x | x | ResNet50 | ImageNet | 72.76 | 90.52 | x | x | x | x | x | x | x | +| [tsn_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | ImageNet | 71.80 | 90.17 | x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/tsn_r50_256p_1x1x8_100e_kinetics400_rgb_20200817-883baf16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log.json) | +| [tsn_r50_320p_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py) | short-side 320 | 8x3 | ResNet50 | ImageNet | 72.41 | 90.55 | x | x | 11.1 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_320p_1x1x8_100e_kinetics400_rgb_20200702-ef80e3d7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log.json) | +| [tsn_r50_320p_1x1x8_110e_kinetics400_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py) | short-side 320 | 8x4 | ResNet50 | ImageNet | 57.76 | 80.99 | x | x | x | 8473 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_320p_1x1x8_110e_kinetics400_flow_20200705-1f39486b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log.json) | +| tsn_r50_320p_1x1x8_kinetics400_twostream \[1: 1\]\* | x | x | ResNet50 | ImageNet | 74.64 | 91.77 | x | x | x | x | x | x | x | +| [tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | ResNet50 | ImageNet | 71.11 | 90.04 | x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014-5ae1ee79.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json) | +| [tsn_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 70.77 | 89.3 | [68.75](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [88.42](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 12.2 (8x10 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_dense_1x1x8_100e_kinetics400_rgb_20200606-e925e6e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json) | +| [tsn_r50_video_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | ImageNet | 71.14 | 89.63 | x | x | x | 21558 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json) | +| [tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py) | short-side 256 | 8 | ResNet50 | ImageNet | 70.40 | 89.12 | x | x | x | 21553 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb_20200703-0f19175f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json) | + +Here, We use \[1: 1\] to indicate that we combine rgb and flow score with coefficients 1: 1 to get the two-stream prediction (without applying softmax). ### Using backbones from 3rd-party in TSN @@ -69,11 +70,11 @@ It's possible and convenient to use a 3rd-party backbone for TSN under the frame - [x] Backbones from [TorchVision](https://github.com/pytorch/vision/) - [x] Backbones from [TIMM (pytorch-image-models)](https://github.com/rwightman/pytorch-image-models) -| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :----------------------------------------------------------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] | ImageNet | 73.43 | 91.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json) | -| [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | Densenet-161 [[TorchVision](https://github.com/pytorch/vision/)] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | -| [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | 77.51 | 92.92 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb-805380f6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json) | +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------------------------------------------------------------------------------------------------------: | :------: | :------: | :------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | ResNeXt101-32x4d \[[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)\] | ImageNet | 73.43 | 91.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json) | +| [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8x2 | Densenet-161 \[[TorchVision](https://github.com/pytorch/vision/)\] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | +| [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | Swin Transformer Base \[[timm](https://github.com/rwightman/pytorch-image-models)\] | ImageNet | 77.51 | 92.92 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb-805380f6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json) | 1. Note that some backbones in TIMM are not supported due to multiple reasons. Please refer to to [PR #880](https://github.com/open-mmlab/mmaction2/pull/880) for details. @@ -85,94 +86,94 @@ In data benchmark, we compare: 2. Different data augmentation methods: (1) MultiScaleCrop, (2) RandomResizedCrop; 3. Different testing protocols: (1) 25 frames x 10 crops, (2) 25 frames x 3 crops. -| config | resolution | training augmentation | testing protocol | top1 acc | top5 acc | ckpt | log | json | -| :----------------------------------------------------------: | :------------: | :-------------------: | :--------------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py) | 340x256 | MultiScaleCrop | 25x10 frames | 70.60 | 89.26 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | -| x | 340x256 | MultiScaleCrop | 25x3 frames | 70.52 | 89.39 | x | x | x | +| config | resolution | training augmentation | testing protocol | top1 acc | top5 acc | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------: | :-------------------: | :--------------: | :------: | :------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py) | 340x256 | MultiScaleCrop | 25x10 frames | 70.60 | 89.26 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | +| x | 340x256 | MultiScaleCrop | 25x3 frames | 70.52 | 89.39 | x | x | x | | [tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py) | 340x256 | RandomResizedCrop | 25x10 frames | 70.11 | 89.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725-88cb325a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725.json) | -| x | 340x256 | RandomResizedCrop | 25x3 frames | 69.95 | 89.02 | x | x | x | -| [tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | MultiScaleCrop | 25x10 frames | 70.32 | 89.25 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725-9922802f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725.json) | -| x | short-side 320 | MultiScaleCrop | 25x3 frames | 70.54 | 89.39 | x | x | x | -| [tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | RandomResizedCrop | 25x10 frames | 70.44 | 89.23 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json) | -| x | short-side 320 | RandomResizedCrop | 25x3 frames | 70.91 | 89.51 | x | x | x | -| [tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py) | short-side 256 | MultiScaleCrop | 25x10 frames | 70.42 | 89.03 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json)| -| x | short-side 256 | MultiScaleCrop | 25x3 frames | 70.79 | 89.42 | x | x | x | -| [tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py) | short-side 256 | RandomResizedCrop | 25x10 frames | 69.80 | 89.06 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb_20200817-ae7963ca.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/20200815_172601.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/20200815_172601.log.json)| -| x | short-side 256 | RandomResizedCrop | 25x3 frames | 70.48 | 89.89 | x | x | x | +| x | 340x256 | RandomResizedCrop | 25x3 frames | 69.95 | 89.02 | x | x | x | +| [tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | MultiScaleCrop | 25x10 frames | 70.32 | 89.25 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725-9922802f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725.json) | +| x | short-side 320 | MultiScaleCrop | 25x3 frames | 70.54 | 89.39 | x | x | x | +| [tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | RandomResizedCrop | 25x10 frames | 70.44 | 89.23 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json) | +| x | short-side 320 | RandomResizedCrop | 25x3 frames | 70.91 | 89.51 | x | x | x | +| [tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py) | short-side 256 | MultiScaleCrop | 25x10 frames | 70.42 | 89.03 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json) | +| x | short-side 256 | MultiScaleCrop | 25x3 frames | 70.79 | 89.42 | x | x | x | +| [tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py) | short-side 256 | RandomResizedCrop | 25x10 frames | 69.80 | 89.06 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb_20200817-ae7963ca.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/20200815_172601.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/20200815_172601.log.json) | +| x | short-side 256 | RandomResizedCrop | 25x3 frames | 70.48 | 89.89 | x | x | x | ### Kinetics-400 OmniSource Experiments -| config | resolution | backbone | pretrain | w. OmniSource | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | -| :----------------------------------------------------------: | :------------: | :------: | :-------: | :----------------: | :------: | :------: | :---------------------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 340x256 | ResNet50 | ImageNet | :x: | 70.6 | 89.3 | 4.3 (25x10 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | -| x | 340x256 | ResNet50 | ImageNet | :heavy_check_mark: | 73.6 | 91.0 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | x | x | -| x | short-side 320 | ResNet50 | IG-1B [1] | :x: | 73.1 | 90.4 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) | x | x | -| x | short-side 320 | ResNet50 | IG-1B [1] | :heavy_check_mark: | 75.7 | 91.9 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | x | x | +| config | resolution | backbone | pretrain | w. OmniSource | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------: | :------------: | :------: | :---------: | :----------------: | :------: | :------: | :---------------------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 340x256 | ResNet50 | ImageNet | :x: | 70.6 | 89.3 | 4.3 (25x10 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | +| x | 340x256 | ResNet50 | ImageNet | :heavy_check_mark: | 73.6 | 91.0 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | x | x | +| x | short-side 320 | ResNet50 | IG-1B \[1\] | :x: | 73.1 | 90.4 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) | x | x | +| x | short-side 320 | ResNet50 | IG-1B \[1\] | :heavy_check_mark: | 75.7 | 91.9 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | x | x | -[1] We obtain the pre-trained model from [torch-hub](https://pytorch.org/hub/facebookresearch_semi-supervised-ImageNet1K-models_resnext/), the pretrain model we used is `resnet50_swsl` +\[1\] We obtain the pre-trained model from [torch-hub](https://pytorch.org/hub/facebookresearch_semi-supervised-ImageNet1K-models_resnext/), the pretrain model we used is `resnet50_swsl` ### Kinetics-600 -| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------: | | [tsn_r50_video_1x1x8_100e_kinetics600_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py) | short-side 256 | 8x2 | ResNet50 | ImageNet | 74.8 | 92.3 | 11.1 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015-4db3c461.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.json) | ### Kinetics-700 -| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------: | | [tsn_r50_video_1x1x8_100e_kinetics700_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py) | short-side 256 | 8x2 | ResNet50 | ImageNet | 61.7 | 83.6 | 11.1 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015-e381a6c7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.json) | ### Something-Something V1 -|config|resolution | gpus| backbone |pretrain| top1 acc| top5 acc | reference top1 acc | reference top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py)|height 100 |8| ResNet50 | ImageNet|18.55 |44.80 |[17.53](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[44.29](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10978 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_1x1x8_50e_sthv1_rgb_20200618-061b9195.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json)| -|[tsn_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py)| height 100 |8| ResNet50| ImageNet |15.77 |39.85 |[13.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[35.58](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 5691 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/tsn_r50_1x1x16_50e_sthv1_rgb_20200614-7e2fe4f1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | reference top1 acc | reference top5 acc | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :------: | :------: | :------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 18.55 | 44.80 | [17.53](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [44.29](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 10978 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_1x1x8_50e_sthv1_rgb_20200618-061b9195.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json) | +| [tsn_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py) | height 100 | 8 | ResNet50 | ImageNet | 15.77 | 39.85 | [13.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [35.58](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 5691 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/tsn_r50_1x1x16_50e_sthv1_rgb_20200614-7e2fe4f1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json) | ### Something-Something V2 -|config |resolution| gpus| backbone| pretrain | top1 acc| top5 acc | reference top1 acc | reference top5 acc | gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py)|height 256 |8| ResNet50| ImageNet |28.59 |59.56 | x | x | 10966 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20210816-1aafee8f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log.json)| -|[tsn_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py)| height 256 |8|ResNet50| ImageNet |20.89 |49.16 | x | x |8337| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20210816-5d23ac6e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | reference top1 acc | reference top5 acc | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------- | :--------: | :--: | :------: | :------: | :------: | :------: | :----------------: | :----------------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py) | height 256 | 8 | ResNet50 | ImageNet | 28.59 | 59.56 | x | x | 10966 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20210816-1aafee8f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log.json) | +| [tsn_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py) | height 256 | 8 | ResNet50 | ImageNet | 20.89 | 49.16 | x | x | 8337 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20210816-5d23ac6e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log.json) | ### Moments in Time -|config |resolution| gpus| backbone | pretrain | top1 acc| top5 acc | gpu_mem(M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x6_100e_mit_rgb](/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py)|short-side 256 |8x2| ResNet50| ImageNet |26.84|51.6| 8339| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_1x1x6_100e_mit_rgb_20200618-d512ab1b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_mit.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_f6_mit_26.8_51.6.log.json)| +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :----------------------------------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :--------: | :-----------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x6_100e_mit_rgb](/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py) | short-side 256 | 8x2 | ResNet50 | ImageNet | 26.84 | 51.6 | 8339 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_1x1x6_100e_mit_rgb_20200618-d512ab1b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_mit.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_f6_mit_26.8_51.6.log.json) | ### Multi-Moments in Time -|config | resolution|gpus| backbone | pretrain | mAP| gpu_mem(M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r101_1x1x5_50e_mmit_rgb](/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py)|short-side 256 |8x2| ResNet101| ImageNet |61.09| 10467 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_1x1x5_50e_mmit_rgb_20200618-642f450d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_mmit.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_f6_mmit_61.1.log.json)| +| config | resolution | gpus | backbone | pretrain | mAP | gpu_mem(M) | ckpt | log | json | +| :------------------------------------------------------------------------------------- | :------------: | :--: | :-------: | :------: | :---: | :--------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r101_1x1x5_50e_mmit_rgb](/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py) | short-side 256 | 8x2 | ResNet101 | ImageNet | 61.09 | 10467 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_1x1x5_50e_mmit_rgb_20200618-642f450d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_mmit.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_f6_mmit_61.1.log.json) | ### ActivityNet v1.3 -| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | -| :----------------------------------------------------------- | :--------: | :--: | :------: | :---------: | :------: | :------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r50_320p_1x1x8_50e_activitynet_video_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py) | short-side 320 | 8x1 | ResNet50 | Kinetics400 | 73.93 | 93.44 | 5692 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb_20210301-7f8da0c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log.json) | -| [tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py) | short-side 320 | 8x1 | ResNet50 | Kinetics400 | 76.90 | 94.47 | 5692 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb_20210301-c0f04a7e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log.json) | -| [tsn_r50_320p_1x1x8_150e_activitynet_video_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py) | 340x256 | 8x2 | ResNet50 | Kinetics400 | 57.51 | 83.02 | 5780 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804-13313f52.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.json) | -| [tsn_r50_320p_1x1x8_150e_activitynet_clip_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py) | 340x256 | 8x2 | ResNet50 | Kinetics400 | 59.51 | 82.69 | 5780 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804-8622cf38.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.json) | +| config | resolution | gpus | backbone | pretrain | top1 acc | top5 acc | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :------: | :---------: | :------: | :------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_320p_1x1x8_50e_activitynet_video_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py) | short-side 320 | 8x1 | ResNet50 | Kinetics400 | 73.93 | 93.44 | 5692 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb_20210301-7f8da0c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log.json) | +| [tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py) | short-side 320 | 8x1 | ResNet50 | Kinetics400 | 76.90 | 94.47 | 5692 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb_20210301-c0f04a7e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log.json) | +| [tsn_r50_320p_1x1x8_150e_activitynet_video_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py) | 340x256 | 8x2 | ResNet50 | Kinetics400 | 57.51 | 83.02 | 5780 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804-13313f52.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.json) | +| [tsn_r50_320p_1x1x8_150e_activitynet_clip_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py) | 340x256 | 8x2 | ResNet50 | Kinetics400 | 59.51 | 82.69 | 5780 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804-8622cf38.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.json) | ### HVU -| config[1] | tag category | resolution | gpus | backbone | pretrain | mAP | HATNet[2] | HATNet-multi[2] | ckpt | log | json | -| :----------------------------------------------------------: | :----------: | :------------: | :--: | :------: | :------: | :--: | :-------: | :-------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r18_1x1x8_100e_hvu_action_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py) | action | short-side 256 | 8x2 | ResNet18 | ImageNet | 57.5 | 51.8 | 53.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027-011b282b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.json) | -| [tsn_r18_1x1x8_100e_hvu_scene_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py) | scene | short-side 256 | 8 | ResNet18 | ImageNet | 55.2 | 55.8 | 57.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027-00e5748d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.json) | -| [tsn_r18_1x1x8_100e_hvu_object_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py) | object | short-side 256 | 8 | ResNet18 | ImageNet | 45.7 | 34.2 | 35.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201102-24a22f30.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.json) | -| [tsn_r18_1x1x8_100e_hvu_event_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py) | event | short-side 256 | 8 | ResNet18 | ImageNet | 63.7 | 38.5 | 39.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027-dea8cd71.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.json) | -| [tsn_r18_1x1x8_100e_hvu_concept_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py) | concept | short-side 256 | 8 | ResNet18 | ImageNet | 47.5 | 26.1 | 27.3 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027-fc1dd8e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.json) | -| [tsn_r18_1x1x8_100e_hvu_attribute_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py) | attribute | short-side 256 | 8 | ResNet18 | ImageNet | 46.1 | 33.6 | 34.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027-0b3b49d2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.json) | -| - | Overall | short-side 256 | - | ResNet18 | ImageNet | 52.6 | 40.0 | 41.3 | - | - | - | +| config\[1\] | tag category | resolution | gpus | backbone | pretrain | mAP | HATNet\[2\] | HATNet-multi\[2\] | ckpt | log | json | +| :----------------------------------------------------------------------------------------------------------: | :----------: | :------------: | :--: | :------: | :------: | :--: | :---------: | :---------------: | :--------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r18_1x1x8_100e_hvu_action_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py) | action | short-side 256 | 8x2 | ResNet18 | ImageNet | 57.5 | 51.8 | 53.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027-011b282b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_scene_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py) | scene | short-side 256 | 8 | ResNet18 | ImageNet | 55.2 | 55.8 | 57.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027-00e5748d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_object_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py) | object | short-side 256 | 8 | ResNet18 | ImageNet | 45.7 | 34.2 | 35.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201102-24a22f30.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_event_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py) | event | short-side 256 | 8 | ResNet18 | ImageNet | 63.7 | 38.5 | 39.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027-dea8cd71.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_concept_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py) | concept | short-side 256 | 8 | ResNet18 | ImageNet | 47.5 | 26.1 | 27.3 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027-fc1dd8e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_attribute_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py) | attribute | short-side 256 | 8 | ResNet18 | ImageNet | 46.1 | 33.6 | 34.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027-0b3b49d2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.json) | +| - | Overall | short-side 256 | - | ResNet18 | ImageNet | 52.6 | 40.0 | 41.3 | - | - | - | -[1] For simplicity, we train a specific model for each tag category as the baselines for HVU. +\[1\] For simplicity, we train a specific model for each tag category as the baselines for HVU. -[2] The performance of HATNet and HATNet-multi are from the paper [Large Scale Holistic Video Understanding](https://pages.iai.uni-bonn.de/gall_juergen/download/HVU_eccv20.pdf). The proposed HATNet is a 2 branch Convolution Network (one 2D branch, one 3D branch) and share the same backbone(ResNet18) with us. The inputs of HATNet are 16 or 32 frames long video clips (which is much larger than us), while the input resolution is coarser (112 instead of 224). HATNet is trained on each individual task (each tag category) while HATNet-multi is trained on multiple tasks. Since there is no released codes or models for the HATNet, we just include the performance reported by the original paper. +\[2\] The performance of HATNet and HATNet-multi are from the paper [Large Scale Holistic Video Understanding](https://pages.iai.uni-bonn.de/gall_juergen/download/HVU_eccv20.pdf). The proposed HATNet is a 2 branch Convolution Network (one 2D branch, one 3D branch) and share the same backbone(ResNet18) with us. The inputs of HATNet are 16 or 32 frames long video clips (which is much larger than us), while the input resolution is coarser (112 instead of 224). HATNet is trained on each individual task (each tag category) while HATNet-multi is trained on multiple tasks. Since there is no released codes or models for the HATNet, we just include the performance reported by the original paper. :::{note} diff --git a/configs/recognition/tsn/README_zh-CN.md b/configs/recognition/tsn/README_zh-CN.md index 69e95459a5..99cfcef26b 100644 --- a/configs/recognition/tsn/README_zh-CN.md +++ b/configs/recognition/tsn/README_zh-CN.md @@ -19,47 +19,47 @@ ### UCF-101 -|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x3_75e_ucf101_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py) [1] |8| ResNet50 | ImageNet |83.03|96.78|8332| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023-d85ab600.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.json) | +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------- | :----: | :------: | :------: | :------: | :------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x3_75e_ucf101_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb.py) \[1\] | 8 | ResNet50 | ImageNet | 83.03 | 96.78 | 8332 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023-d85ab600.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_75e_ucf101_rgb/tsn_r50_1x1x3_75e_ucf101_rgb_20201023.json) | -[1] 这里汇报的是 UCF-101 的 split1 部分的结果。 +\[1\] 这里汇报的是 UCF-101 的 split1 部分的结果。 ### Diving48 -|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_video_1x1x8_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py)|8| ResNet50 | ImageNet | 71.27 | 95.74 | 5699 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/tsn_r50_video_1x1x8_100e_diving48_rgb_20210426-6dde0185.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json)| -|[tsn_r50_video_1x1x16_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py)|8| ResNet50 | ImageNet | 76.75 | 96.95 | 5705 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/tsn_r50_video_1x1x16_100e_diving48_rgb_20210426-63c5f2f7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json)| +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------------------------------------------------------- | :----: | :------: | :------: | :------: | :------: | :----------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_video_1x1x8_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 71.27 | 95.74 | 5699 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/tsn_r50_video_1x1x8_100e_diving48_rgb_20210426-6dde0185.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_diving48_rgb/20210426_014138.log.json) | +| [tsn_r50_video_1x1x16_100e_diving48_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb.py) | 8 | ResNet50 | ImageNet | 76.75 | 96.95 | 5705 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/tsn_r50_video_1x1x16_100e_diving48_rgb_20210426-63c5f2f7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x16_100e_diving48_rgb/20210426_014103.log.json) | ### HMDB51 -|配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py)|8| ResNet50 | ImageNet | 48.95| 80.19| 21535| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb_20201123-ce6c27ed.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log.json) | -|[tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py) |8| ResNet50 | Kinetics400 | 56.08 | 84.31 | 21535| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb_20201123-7f84701b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log.json) | -|[tsn_r50_1x1x8_50e_hmdb51_mit_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py) |8| ResNet50 | Moments | 54.25 | 83.86| 21535| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/tsn_r50_1x1x8_50e_hmdb51_mit_rgb_20201123-01526d41.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log.json) | +| 配置文件 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------- | :----: | :------: | :---------: | :------: | :------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb.py) | 8 | ResNet50 | ImageNet | 48.95 | 80.19 | 21535 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb_20201123-ce6c27ed.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_imagenet_rgb/20201025_231108.log.json) | +| [tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb.py) | 8 | ResNet50 | Kinetics400 | 56.08 | 84.31 | 21535 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb_20201123-7f84701b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_kinetics400_rgb/20201108_190805.log.json) | +| [tsn_r50_1x1x8_50e_hmdb51_mit_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb.py) | 8 | ResNet50 | Moments | 54.25 | 83.86 | 21535 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/tsn_r50_1x1x8_50e_hmdb51_mit_rgb_20201123-01526d41.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_hmdb51_mit_rgb/20201112_170135.log.json) | ### Kinetics-400 -|配置文件 | 分辨率 | GPU 数量 | 主干网络|预训练 | top1 准确率| top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) |340x256|8| ResNet50 | ImageNet|70.60|89.26|x|x|4.3 (25x10 frames)|8344| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json)| -|[tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) |短边 256|8| ResNet50 | ImageNet|70.42|89.03|x|x|x|8343|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json)| -|[tsn_r50_dense_1x1x5_50e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py) |340x256|8x3| ResNet50| ImageNet |70.18|89.10|[69.15](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[88.56](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|12.7 (8x10 frames)|7028| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/tsn_r50_dense_1x1x5_100e_kinetics400_rgb_20200627-a063165f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log.json)| -|[tsn_r50_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py) |短边 320|8x2| ResNet50| ImageNet |70.91|89.51|x|x|10.7 (25x3 frames)| 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json)| -|[tsn_r50_320p_1x1x3_110e_kinetics400_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py) |短边 320|8x2| ResNet50 | ImageNet|55.70|79.85|x|x|x| 8471 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_320p_1x1x3_110e_kinetics400_flow_20200705-3036bab6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log.json)| -|tsn_r50_320p_1x1x3_kinetics400_twostream [1: 1]* |x|x| ResNet50 | ImageNet|72.76|90.52| x | x | x | x | x|x|x| -|[tsn_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py)|短边 256|8| ResNet50| ImageNet |71.80|90.17|x|x|x|8343|[ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/tsn_r50_256p_1x1x8_100e_kinetics400_rgb_20200817-883baf16.pth)|[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log.json)| -|[tsn_r50_320p_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py) |短边 320|8x3| ResNet50| ImageNet |72.41|90.55|x|x|11.1 (25x3 frames)| 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_320p_1x1x8_100e_kinetics400_rgb_20200702-ef80e3d7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log.json)| -|[tsn_r50_320p_1x1x8_110e_kinetics400_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py) |短边 320|8x4| ResNet50 | ImageNet|57.76|80.99|x|x|x| 8473 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_320p_1x1x8_110e_kinetics400_flow_20200705-1f39486b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log.json)| -|tsn_r50_320p_1x1x8_kinetics400_twostream [1: 1]* |x|x| ResNet50| ImageNet |74.64|91.77| x | x | x | x | x|x|x| -|[tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py) |短边 320|8| ResNet50 | ImageNet |71.11|90.04| x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014-5ae1ee79.pth) |[log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log)|[json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json)| -|[tsn_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py) |340x256|8| ResNet50 | ImageNet|70.77|89.3|[68.75](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[88.42](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|12.2 (8x10 frames)|8344| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_dense_1x1x8_100e_kinetics400_rgb_20200606-e925e6e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json)| -|[tsn_r50_video_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet | 71.14 | 89.63 |x|x|x|21558| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json)| -|[tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py) |短边 256|8| ResNet50| ImageNet | 70.40 | 89.12 |x|x|x|21553| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb_20200703-0f19175f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json)| - -这里,MMAction2 使用 [1: 1] 表示以 1: 1 的比例融合 RGB 和光流两分支的融合结果(融合前不经过 softmax) +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------- | :-----: | :----: | :------: | :------: | :------: | :------: | :------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | :----------------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 70.60 | 89.26 | x | x | 4.3 (25x10 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | +| [tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet50 | ImageNet | 70.42 | 89.03 | x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json) | +| [tsn_r50_dense_1x1x5_50e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb.py) | 340x256 | 8x3 | ResNet50 | ImageNet | 70.18 | 89.10 | [69.15](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [88.56](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 12.7 (8x10 frames) | 7028 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/tsn_r50_dense_1x1x5_100e_kinetics400_rgb_20200627-a063165f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x5_100e_kinetics400_rgb/20200627_105310.log.json) | +| [tsn_r50_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNet50 | ImageNet | 70.91 | 89.51 | x | x | 10.7 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json) | +| [tsn_r50_320p_1x1x3_110e_kinetics400_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow.py) | 短边 320 | 8x2 | ResNet50 | ImageNet | 55.70 | 79.85 | x | x | x | 8471 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_320p_1x1x3_110e_kinetics400_flow_20200705-3036bab6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_110e_kinetics400_flow/tsn_r50_f3_kinetics400_flow_shortedge_55.7_79.9.log.json) | +| tsn_r50_320p_1x1x3_kinetics400_twostream \[1: 1\]\* | x | x | ResNet50 | ImageNet | 72.76 | 90.52 | x | x | x | x | x | x | x | +| [tsn_r50_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet50 | ImageNet | 71.80 | 90.17 | x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/tsn_r50_256p_1x1x8_100e_kinetics400_rgb_20200817-883baf16.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x8_100e_kinetics400_rgb/20200815_173413.log.json) | +| [tsn_r50_320p_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb.py) | 短边 320 | 8x3 | ResNet50 | ImageNet | 72.41 | 90.55 | x | x | 11.1 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_320p_1x1x8_100e_kinetics400_rgb_20200702-ef80e3d7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_100e_kinetics400_rgb/tsn_r50_f8_kinetics400_shortedge_72.4_90.6.log.json) | +| [tsn_r50_320p_1x1x8_110e_kinetics400_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow.py) | 短边 320 | 8x4 | ResNet50 | ImageNet | 57.76 | 80.99 | x | x | x | 8473 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_320p_1x1x8_110e_kinetics400_flow_20200705-1f39486b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_110e_kinetics400_flow/tsn_r50_f8_kinetics400_flow_shortedge_57.8_81.0.log.json) | +| tsn_r50_320p_1x1x8_kinetics400_twostream \[1: 1\]\* | x | x | ResNet50 | ImageNet | 74.64 | 91.77 | x | x | x | x | x | x | x | +| [tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8 | ResNet50 | ImageNet | 71.11 | 90.04 | x | x | x | 8343 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014-5ae1ee79.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb_20201014.json) | +| [tsn_r50_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb.py) | 340x256 | 8 | ResNet50 | ImageNet | 70.77 | 89.3 | [68.75](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [88.42](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 12.2 (8x10 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_dense_1x1x8_100e_kinetics400_rgb_20200606-e925e6e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_dense_1x1x8_100e_kinetics400_rgb/20200606_003901.log.json) | +| [tsn_r50_video_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet50 | ImageNet | 71.14 | 89.63 | x | x | x | 21558 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_1x1x8_100e_kinetics400_rgb_20200702-568cde33.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_100e_kinetics400_rgb.log.json) | +| [tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb.py) | 短边 256 | 8 | ResNet50 | ImageNet | 70.40 | 89.12 | x | x | x | 21553 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb_20200703-0f19175f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_dense_1x1x8_100e_kinetics400_rgb/tsn_r50_video_2d_1x1x8_dense_100e_kinetics400_rgb.log.json) | + +这里,MMAction2 使用 \[1: 1\] 表示以 1: 1 的比例融合 RGB 和光流两分支的融合结果(融合前不经过 softmax) ### 在 TSN 模型中使用第三方的主干网络 @@ -69,11 +69,11 @@ - [x] TorchVision 中的主干网络 - [x] pytorch-image-models(timm) 中的主干网络 -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | -| :----------------------------------------------------------: | :------------: | :--: | :----------------------------------------------------------: | :------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNeXt101-32x4d [[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)] | ImageNet | 73.43 | 91.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json) | -| [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | Densenet-161 [[TorchVision](https://github.com/pytorch/vision/)] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | -| [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | Swin Transformer Base [[timm](https://github.com/rwightman/pytorch-image-models)] | ImageNet | 77.51 | 92.92 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb-805380f6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------: | :----: | :------------------------------------------------------------------------------------------------------: | :------: | :------: | :------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | ResNeXt101-32x4d \[[MMCls](https://github.com/open-mmlab/mmclassification/tree/master/configs/resnext)\] | ImageNet | 73.43 | 91.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb-16a8b561.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_rn101_32x4d_320p_1x1x3_100e_kinetics400_rgb.json) | +| [tsn_dense161_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | 8x2 | Densenet-161 \[[TorchVision](https://github.com/pytorch/vision/)\] | ImageNet | 72.78 | 90.75 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb-cbe85332.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb/tsn_dense161_320p_1x1x3_100e_kinetics400_rgb.json) | +| [tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.py) | short-side 320 | 8 | Swin Transformer Base \[[timm](https://github.com/rwightman/pytorch-image-models)\] | ImageNet | 77.51 | 92.92 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb-805380f6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/custom_backbones/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb/tsn_swin_transformer_video_320p_1x1x3_100e_kinetics400_rgb.json) | 1. 由于多种原因,TIMM 中的一些模型未能收到支持,详情请参考 [PR #880](https://github.com/open-mmlab/mmaction2/pull/880)。 @@ -85,94 +85,94 @@ 2. 不同的数据增强方法:(1) MultiScaleCrop, (2) RandomResizedCrop; 3. 不同的测试方法:(1) 25 帧 x 10 裁剪片段, (2) 25 frames x 3 裁剪片段. -| 配置文件 | 分辨率 | 训练时的数据增强 | 测试时的策略 | top1 准确率 | top5 准确率 | ckpt | log | json | -| :----------------------------------------------------------: | :------------: | :-------------------: | :--------------: | :------: | :------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py) | 340x256 | MultiScaleCrop | 25x10 frames | 70.60 | 89.26 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | -| x | 340x256 | MultiScaleCrop | 25x3 frames | 70.52 | 89.39 | x | x | x | -| [tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py) | 340x256 | RandomResizedCrop | 25x10 frames | 70.11 | 89.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725-88cb325a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725.json) | -| x | 340x256 | RandomResizedCrop | 25x3 frames | 69.95 | 89.02 | x | x | x | -| [tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | MultiScaleCrop | 25x10 frames | 70.32 | 89.25 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725-9922802f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725.json) | -| x | 短边 320 | MultiScaleCrop | 25x3 frames | 70.54 | 89.39 | x | x | x | -| [tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | RandomResizedCrop | 25x10 frames | 70.44 | 89.23 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json) | -| x | 短边 320 | RandomResizedCrop | 25x3 frames | 70.91 | 89.51 | x | x | x | -| [tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py) | 短边 256 | MultiScaleCrop | 25x10 frames | 70.42 | 89.03 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json)| -| x | 短边 256 | MultiScaleCrop | 25x3 frames | 70.79 | 89.42 | x | x | x | -| [tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py) | 短边 256 | RandomResizedCrop | 25x10 frames | 69.80 | 89.06 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb_20200817-ae7963ca.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/20200815_172601.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/20200815_172601.log.json)| -| x | 短边 256 | RandomResizedCrop | 25x3 frames | 70.48 | 89.89 | x | x | x | +| 配置文件 | 分辨率 | 训练时的数据增强 | 测试时的策略 | top1 准确率 | top5 准确率 | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----: | :---------------: | :----------: | :------: | :------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_340x256_1x1x3_100e_kinetics400_rgb.py) | 340x256 | MultiScaleCrop | 25x10 frames | 70.60 | 89.26 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | +| x | 340x256 | MultiScaleCrop | 25x3 frames | 70.52 | 89.39 | x | x | x | +| [tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb.py) | 340x256 | RandomResizedCrop | 25x10 frames | 70.11 | 89.01 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725-88cb325a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb/tsn_r50_randomresizedcrop_340x256_1x1x3_100e_kinetics400_rgb_20200725.json) | +| x | 340x256 | RandomResizedCrop | 25x3 frames | 69.95 | 89.02 | x | x | x | +| [tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | MultiScaleCrop | 25x10 frames | 70.32 | 89.25 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725-9922802f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_multiscalecrop_320p_1x1x3_100e_kinetics400_rgb_20200725.json) | +| x | 短边 320 | MultiScaleCrop | 25x3 frames | 70.54 | 89.39 | x | x | x | +| [tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_320p_1x1x3_100e_kinetics400_rgb.py) | 短边 320 | RandomResizedCrop | 25x10 frames | 70.44 | 89.23 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_320p_1x1x3_100e_kinetics400_rgb_20200702-cc665e2a.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x3_100e_kinetics400_rgb/tsn_r50_f3_kinetics400_shortedge_70.9_89.5.log.json) | +| x | 短边 320 | RandomResizedCrop | 25x3 frames | 70.91 | 89.51 | x | x | x | +| [tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_multiscalecrop_256p_1x1x3_100e_kinetics400_rgb.py) | 短边 256 | MultiScaleCrop | 25x10 frames | 70.42 | 89.03 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_1x1x3_100e_kinetics400_rgb_20200725-22592236.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_1x1x3_100e_kinetics400_rgb/20200725_031325.log.json) | +| x | 短边 256 | MultiScaleCrop | 25x3 frames | 70.79 | 89.42 | x | x | x | +| [tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/data_benchmark/tsn_r50_randomresizedcrop_256p_1x1x3_100e_kinetics400_rgb.py) | 短边 256 | RandomResizedCrop | 25x10 frames | 69.80 | 89.06 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb_20200817-ae7963ca.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/20200815_172601.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_256p_randomresize_1x1x3_100e_kinetics400_rgb/20200815_172601.log.json) | +| x | 短边 256 | RandomResizedCrop | 25x3 frames | 70.48 | 89.89 | x | x | x | ### Kinetics-400 OmniSource 实验 -| 配置文件 | 分辨率 | 主干网络 | 预训练 | w. OmniSource | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | -| :----------------------------------------------------------: | :------------: | :------: | :-------: | :----------------: | :------: | :------: | :---------------------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 340x256 | ResNet50 | ImageNet | :x: | 70.6 | 89.3 | 4.3 (25x10 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | -| x | 340x256 | ResNet50 | ImageNet | :heavy_check_mark: | 73.6 | 91.0 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | x | x | -| x | 短边 320 | ResNet50 | IG-1B [1] | :x: | 73.1 | 90.4 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) | x | x | -| x | 短边 320 | ResNet50 | IG-1B [1] | :heavy_check_mark: | 75.7 | 91.9 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | x | x | +| 配置文件 | 分辨率 | 主干网络 | 预训练 | w. OmniSource | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------: | :-----: | :------: | :---------: | :----------------: | :------: | :------: | :----------------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 340x256 | ResNet50 | ImageNet | :x: | 70.6 | 89.3 | 4.3 (25x10 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/20200614_063526.log.json) | +| x | 340x256 | ResNet50 | ImageNet | :heavy_check_mark: | 73.6 | 91.0 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_imagenet_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-54192355.pth) | x | x | +| x | 短边 320 | ResNet50 | IG-1B \[1\] | :x: | 73.1 | 90.4 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_without_omni_1x1x3_kinetics400_rgb_20200926-c133dd49.pth) | x | x | +| x | 短边 320 | ResNet50 | IG-1B \[1\] | :heavy_check_mark: | 75.7 | 91.9 | x | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/omni/tsn_1G1B_pretrained_r50_omni_1x1x3_kinetics400_rgb_20200926-2863fed0.pth) | x | x | -[1] MMAction2 使用 [torch-hub](https://pytorch.org/hub/facebookresearch_semi-supervised-ImageNet1K-models_resnext/) 提供的 `resnet50_swsl` 预训练模型。 +\[1\] MMAction2 使用 [torch-hub](https://pytorch.org/hub/facebookresearch_semi-supervised-ImageNet1K-models_resnext/) 提供的 `resnet50_swsl` 预训练模型。 ### Kinetics-600 -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r50_video_1x1x8_100e_kinetics600_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 74.8 | 92.3 | 11.1 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015-4db3c461.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------- | :----: | :----: | :------: | :------: | :------: | :------: | :----------------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_video_1x1x8_100e_kinetics600_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 74.8 | 92.3 | 11.1 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015-4db3c461.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics600_rgb/tsn_r50_video_1x1x8_100e_kinetics600_rgb_20201015.json) | ### Kinetics-700 -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :--: | :------: | :------: | :------: | :------: | :---------------------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r50_video_1x1x8_100e_kinetics700_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 61.7 | 83.6 | 11.1 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015-e381a6c7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------- | :----: | :----: | :------: | :------: | :------: | :------: | :----------------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_video_1x1x8_100e_kinetics700_rgb](/configs/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 61.7 | 83.6 | 11.1 (25x3 frames) | 8344 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015-e381a6c7.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_video_1x1x8_100e_kinetics700_rgb/tsn_r50_video_1x1x8_100e_kinetics700_rgb_20201015.json) | ### Something-Something V1 -|配置文件|分辨率 | GPU 数量| 主干网络 |预训练| top1 准确率| top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py)|高 100 |8| ResNet50 | ImageNet|18.55 |44.80 |[17.53](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[44.29](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 10978 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_1x1x8_50e_sthv1_rgb_20200618-061b9195.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json)| -|[tsn_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py)| 高 100 |8| ResNet50| ImageNet |15.77 |39.85 |[13.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)|[35.58](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training)| 5691 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/tsn_r50_1x1x16_50e_sthv1_rgb_20200614-7e2fe4f1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------- | :---: | :----: | :------: | :------: | :------: | :------: | :------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x8_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 18.55 | 44.80 | [17.53](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [44.29](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 10978 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_1x1x8_50e_sthv1_rgb_20200618-061b9195.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_sthv1.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb/tsn_r50_f8_sthv1_18.1_45.0.log.json) | +| [tsn_r50_1x1x16_50e_sthv1_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb.py) | 高 100 | 8 | ResNet50 | ImageNet | 15.77 | 39.85 | [13.33](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | [35.58](https://github.com/mit-han-lab/temporal-shift-module/tree/8d53d6fda40bea2f1b37a6095279c4b454d672bd#training) | 5691 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/tsn_r50_1x1x16_50e_sthv1_rgb_20200614-7e2fe4f1.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv1_rgb/20200614_211932.log.json) | ### Something-Something V2 -|配置文件 |分辨率| GPU 数量| 主干网络| 预训练 | top1 准确率| top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py)|高 256 |8| ResNet50| ImageNet |28.59 |59.56 | x | x | 10966 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20210816-1aafee8f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log.json)| -|[tsn_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py)|高 256 |8|ResNet50| ImageNet |20.89 |49.16 | x | x |8337| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20210816-5d23ac6e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | 参考代码的 top1 准确率 | 参考代码的 top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------- | :---: | :----: | :------: | :------: | :------: | :------: | :------------: | :------------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x8_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb.py) | 高 256 | 8 | ResNet50 | ImageNet | 28.59 | 59.56 | x | x | 10966 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/tsn_r50_1x1x8_50e_sthv2_rgb_20210816-1aafee8f.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x8_50e_sthv2_rgb/20210816_221116.log.json) | +| [tsn_r50_1x1x16_50e_sthv2_rgb](/configs/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb.py) | 高 256 | 8 | ResNet50 | ImageNet | 20.89 | 49.16 | x | x | 8337 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/tsn_r50_1x1x16_50e_sthv2_rgb_20210816-5d23ac6e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x16_50e_sthv2_rgb/20210816_225256.log.json) | ### Moments in Time -|配置文件 |分辨率| GPU 数量| 主干网络 | 预训练 | top1 准确率| top5 准确率 | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r50_1x1x6_100e_mit_rgb](/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py)|短边 256 |8x2| ResNet50| ImageNet |26.84|51.6| 8339| [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_1x1x6_100e_mit_rgb_20200618-d512ab1b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_mit.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_f6_mit_26.8_51.6.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :----------------------------------------------------------------------------------- | :----: | :----: | :------: | :------: | :------: | :------: | :----------: | :-----------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_1x1x6_100e_mit_rgb](/configs/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb.py) | 短边 256 | 8x2 | ResNet50 | ImageNet | 26.84 | 51.6 | 8339 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_1x1x6_100e_mit_rgb_20200618-d512ab1b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_mit.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x6_100e_mit_rgb/tsn_r50_f6_mit_26.8_51.6.log.json) | ### Multi-Moments in Time -|配置文件 | 分辨率|GPU 数量| 主干网络 | 预训练 | mAP| GPU 显存占用 (M) | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r101_1x1x5_50e_mmit_rgb](/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py)|短边 256 |8x2| ResNet101| ImageNet |61.09| 10467 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_1x1x5_50e_mmit_rgb_20200618-642f450d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_mmit.log)| [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_f6_mmit_61.1.log.json)| +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | mAP | GPU 显存占用 (M) | ckpt | log | json | +| :------------------------------------------------------------------------------------- | :----: | :----: | :-------: | :------: | :---: | :----------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r101_1x1x5_50e_mmit_rgb](/configs/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb.py) | 短边 256 | 8x2 | ResNet101 | ImageNet | 61.09 | 10467 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_1x1x5_50e_mmit_rgb_20200618-642f450d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_mmit.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r101_1x1x5_50e_mmit_rgb/tsn_r101_f6_mmit_61.1.log.json) | ### ActivityNet v1.3 -| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | -| :----------------------------------------------------------- | :--------: | :--: | :------: | :---------: | :------: | :------: | :--------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r50_320p_1x1x8_50e_activitynet_video_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py) | 短边 320 | 8x1 | ResNet50 | Kinetics400 | 73.93 | 93.44 | 5692 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb_20210301-7f8da0c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log.json) | -| [tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py) | 短边 320 | 8x1 | ResNet50 | Kinetics400 | 76.90 | 94.47 | 5692 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb_20210301-c0f04a7e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log.json) | -| [tsn_r50_320p_1x1x8_150e_activitynet_video_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py) | 340x256 | 8x2 | ResNet50 | Kinetics400 | 57.51 | 83.02 | 5780 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804-13313f52.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.json) | -| [tsn_r50_320p_1x1x8_150e_activitynet_clip_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py) | 340x256 | 8x2 | ResNet50 | Kinetics400 | 59.51 | 82.69 | 5780 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804-8622cf38.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.json) | +| 配置文件 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | top1 准确率 | top5 准确率 | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------- | :-----: | :----: | :------: | :---------: | :------: | :------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r50_320p_1x1x8_50e_activitynet_video_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb.py) | 短边 320 | 8x1 | ResNet50 | Kinetics400 | 73.93 | 93.44 | 5692 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb_20210301-7f8da0c6.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_video_rgb/20210228_223327.log.json) | +| [tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb](/configs/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb.py) | 短边 320 | 8x1 | ResNet50 | Kinetics400 | 76.90 | 94.47 | 5692 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb_20210301-c0f04a7e.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_50e_activitynet_clip_rgb/20210217_181313.log.json) | +| [tsn_r50_320p_1x1x8_150e_activitynet_video_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow.py) | 340x256 | 8x2 | ResNet50 | Kinetics400 | 57.51 | 83.02 | 5780 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804-13313f52.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_video_flow/tsn_r50_320p_1x1x8_150e_activitynet_video_flow_20200804.json) | +| [tsn_r50_320p_1x1x8_150e_activitynet_clip_flow](/configs/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow.py) | 340x256 | 8x2 | ResNet50 | Kinetics400 | 59.51 | 82.69 | 5780 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804-8622cf38.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow/tsn_r50_320p_1x1x8_150e_activitynet_clip_flow_20200804.json) | ### HVU -| 配置文件[1] | tag 类别 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | mAP | HATNet[2] | HATNet-multi[2] | ckpt | log | json | -| :----------------------------------------------------------: | :----------: | :------------: | :--: | :------: | :------: | :--: | :-------: | :-------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [tsn_r18_1x1x8_100e_hvu_action_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py) | action | 短边 256 | 8x2 | ResNet18 | ImageNet | 57.5 | 51.8 | 53.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027-011b282b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.json) | -| [tsn_r18_1x1x8_100e_hvu_scene_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py) | scene | 短边 256 | 8 | ResNet18 | ImageNet | 55.2 | 55.8 | 57.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027-00e5748d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.json) | -| [tsn_r18_1x1x8_100e_hvu_object_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py) | object | 短边 256 | 8 | ResNet18 | ImageNet | 45.7 | 34.2 | 35.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201102-24a22f30.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.json) | -| [tsn_r18_1x1x8_100e_hvu_event_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py) | event | 短边 256 | 8 | ResNet18 | ImageNet | 63.7 | 38.5 | 39.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027-dea8cd71.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.json) | -| [tsn_r18_1x1x8_100e_hvu_concept_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py) | concept | 短边 256 | 8 | ResNet18 | ImageNet | 47.5 | 26.1 | 27.3 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027-fc1dd8e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.json) | -| [tsn_r18_1x1x8_100e_hvu_attribute_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py) | attribute | 短边 256 | 8 | ResNet18 | ImageNet | 46.1 | 33.6 | 34.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027-0b3b49d2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.json) | -| - | 所有 tag | 短边 256 | - | ResNet18 | ImageNet | 52.6 | 40.0 | 41.3 | - | - | - | +| 配置文件\[1\] | tag 类别 | 分辨率 | GPU 数量 | 主干网络 | 预训练 | mAP | HATNet\[2\] | HATNet-multi\[2\] | ckpt | log | json | +| :----------------------------------------------------------------------------------------------------------: | :-------: | :----: | :----: | :------: | :------: | :--: | :---------: | :---------------: | :--------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r18_1x1x8_100e_hvu_action_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_action_rgb.py) | action | 短边 256 | 8x2 | ResNet18 | ImageNet | 57.5 | 51.8 | 53.5 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027-011b282b.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/action/tsn_r18_1x1x8_100e_hvu_action_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_scene_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_scene_rgb.py) | scene | 短边 256 | 8 | ResNet18 | ImageNet | 55.2 | 55.8 | 57.2 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027-00e5748d.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/scene/tsn_r18_1x1x8_100e_hvu_scene_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_object_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_object_rgb.py) | object | 短边 256 | 8 | ResNet18 | ImageNet | 45.7 | 34.2 | 35.1 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201102-24a22f30.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/object/tsn_r18_1x1x8_100e_hvu_object_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_event_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_event_rgb.py) | event | 短边 256 | 8 | ResNet18 | ImageNet | 63.7 | 38.5 | 39.8 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027-dea8cd71.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/event/tsn_r18_1x1x8_100e_hvu_event_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_concept_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_concept_rgb.py) | concept | 短边 256 | 8 | ResNet18 | ImageNet | 47.5 | 26.1 | 27.3 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027-fc1dd8e3.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/concept/tsn_r18_1x1x8_100e_hvu_concept_rgb_20201027.json) | +| [tsn_r18_1x1x8_100e_hvu_attribute_rgb](/configs/recognition/tsn/hvu/tsn_r18_1x1x8_100e_hvu_attribute_rgb.py) | attribute | 短边 256 | 8 | ResNet18 | ImageNet | 46.1 | 33.6 | 34.9 | [ckpt](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027-0b3b49d2.pth) | [log](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.log) | [json](https://download.openmmlab.com/mmaction/recognition/tsn/hvu/attribute/tsn_r18_1x1x8_100e_hvu_attribute_rgb_20201027.json) | +| - | 所有 tag | 短边 256 | - | ResNet18 | ImageNet | 52.6 | 40.0 | 41.3 | - | - | - | -[1] 简单起见,MMAction2 对每个 tag 类别训练特定的模型,作为 HVU 的基准模型。 +\[1\] 简单起见,MMAction2 对每个 tag 类别训练特定的模型,作为 HVU 的基准模型。 -[2] 这里 HATNet 和 HATNet-multi 的结果来自于 paper: [Large Scale Holistic Video Understanding](https://pages.iai.uni-bonn.de/gall_juergen/download/HVU_eccv20.pdf)。 +\[2\] 这里 HATNet 和 HATNet-multi 的结果来自于 paper: [Large Scale Holistic Video Understanding](https://pages.iai.uni-bonn.de/gall_juergen/download/HVU_eccv20.pdf)。 HATNet 的时序动作候选是一个双分支的卷积网络(一个 2D 分支,一个 3D 分支),并且和 MMAction2 有相同的主干网络(ResNet18)。HATNet 的输入是 16 帧或 32 帧的长视频片段(这样的片段比 MMAction2 使用的要长),同时输入分辨率更粗糙(112px 而非 224px)。 HATNet 是在每个独立的任务(对应每个 tag 类别)上进行训练的,HATNet-multi 是在多个任务上进行训练的。由于目前没有 HATNet 的开源代码和模型,这里仅汇报了原 paper 的精度。 @@ -213,7 +213,7 @@ python tools/train.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -231,4 +231,4 @@ python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb. --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition/x3d/README.md b/configs/recognition/x3d/README.md index 8b26c2bacc..0c835e3dee 100644 --- a/configs/recognition/x3d/README.md +++ b/configs/recognition/x3d/README.md @@ -11,6 +11,7 @@ This paper presents X3D, a family of efficient video networks that progressively expand a tiny 2D image classification architecture along multiple network axes, in space, time, width and depth. Inspired by feature selection methods in machine learning, a simple stepwise network expansion approach is employed that expands a single axis in each step, such that good accuracy to complexity trade-off is achieved. To expand X3D to a specific target complexity, we perform progressive forward expansion followed by backward contraction. X3D achieves state-of-the-art performance while requiring 4.8x and 5.5x fewer multiply-adds and parameters for similar accuracy as previous work. Our most surprising finding is that networks with high spatiotemporal resolution can perform well, while being extremely light in terms of network width and parameters. We report competitive accuracy at unprecedented efficiency on video classification and detection benchmarks. +
    @@ -19,12 +20,12 @@ This paper presents X3D, a family of efficient video networks that progressively ### Kinetics-400 -|config | resolution | backbone | top1 10-view | top1 30-view | reference top1 10-view | reference top1 30-view | ckpt | -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[x3d_s_13x6x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py)|short-side 320| X3D_S | 72.7 | 73.2 | 73.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | 73.5 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_s_facebook_13x6x1_kinetics400_rgb_20201027-623825a0.pth)[1] | -|[x3d_m_16x5x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py)|short-side 320| X3D_M | 75.0 | 75.6 | 75.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | 76.2 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth)[1] | +| config | resolution | backbone | top1 10-view | top1 30-view | reference top1 10-view | reference top1 30-view | ckpt | +| :--------------------------------------------------------------------------------------------------------- | :------------: | :------: | :----------: | :----------: | :----------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------: | +| [x3d_s_13x6x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py) | short-side 320 | X3D_S | 72.7 | 73.2 | 73.1 \[[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)\] | 73.5 \[[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)\] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_s_facebook_13x6x1_kinetics400_rgb_20201027-623825a0.pth)\[1\] | +| [x3d_m_16x5x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py) | short-side 320 | X3D_M | 75.0 | 75.6 | 75.1 \[[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)\] | 76.2 \[[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)\] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth)\[1\] | -[1] The models are ported from the repo [SlowFast](https://github.com/facebookresearch/SlowFast/) and tested on our data. Currently, we only support the testing of X3D models, training will be available soon. +\[1\] The models are ported from the repo [SlowFast](https://github.com/facebookresearch/SlowFast/) and tested on our data. Currently, we only support the testing of X3D models, training will be available soon. :::{note} diff --git a/configs/recognition/x3d/README_zh-CN.md b/configs/recognition/x3d/README_zh-CN.md index 3b09e5276b..60d486b35b 100644 --- a/configs/recognition/x3d/README_zh-CN.md +++ b/configs/recognition/x3d/README_zh-CN.md @@ -19,12 +19,12 @@ ### Kinetics-400 -|配置文件 | 分辨率 | 主干网络 | top1 10-view | top1 30-view | 参考代码的 top1 10-view | 参考代码的 top1 30-view | ckpt | -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[x3d_s_13x6x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py)|短边 320| X3D_S | 72.7 | 73.2 | 73.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | 73.5 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_s_facebook_13x6x1_kinetics400_rgb_20201027-623825a0.pth)[1] | -|[x3d_m_16x5x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py)|短边 320| X3D_M | 75.0 | 75.6 | 75.1 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | 76.2 [[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth)[1] | +| 配置文件 | 分辨率 | 主干网络 | top1 10-view | top1 30-view | 参考代码的 top1 10-view | 参考代码的 top1 30-view | ckpt | +| :--------------------------------------------------------------------------------------------------------- | :----: | :---: | :----------: | :----------: | :----------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------: | +| [x3d_s_13x6x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_rgb.py) | 短边 320 | X3D_S | 72.7 | 73.2 | 73.1 \[[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)\] | 73.5 \[[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)\] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_s_facebook_13x6x1_kinetics400_rgb_20201027-623825a0.pth)\[1\] | +| [x3d_m_16x5x1_facebook_kinetics400_rgb](/configs/recognition/x3d/x3d_m_16x5x1_facebook_kinetics400_rgb.py) | 短边 320 | X3D_M | 75.0 | 75.6 | 75.1 \[[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)\] | 76.2 \[[SlowFast](https://github.com/facebookresearch/SlowFast/blob/master/MODEL_ZOO.md)\] | [ckpt](https://download.openmmlab.com/mmaction/recognition/x3d/facebook/x3d_m_facebook_16x5x1_kinetics400_rgb_20201027-3f42382a.pth)\[1\] | -[1] 这里的模型是从 [SlowFast](https://github.com/facebookresearch/SlowFast/) 代码库中导入并在 MMAction2 使用的数据上进行测试的。目前仅支持 X3D 模型的测试,训练部分将会在近期提供。 +\[1\] 这里的模型是从 [SlowFast](https://github.com/facebookresearch/SlowFast/) 代码库中导入并在 MMAction2 使用的数据上进行测试的。目前仅支持 X3D 模型的测试,训练部分将会在近期提供。 注: @@ -49,4 +49,4 @@ python tools/test.py configs/recognition/x3d/x3d_s_13x6x1_facebook_kinetics400_r --out result.json --average-clips prob ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/recognition_audio/resnet/README.md b/configs/recognition_audio/resnet/README.md index 40b143142e..7d1526165c 100644 --- a/configs/recognition_audio/resnet/README.md +++ b/configs/recognition_audio/resnet/README.md @@ -12,6 +12,7 @@ We present Audiovisual SlowFast Networks, an archi- tecture for integrated audiovisual perception. AVSlowFast has Slow and Fast visual pathways that are deeply inte- grated with a Faster Audio pathway to model vision and sound in a unified representation. We fuse audio and vi- sual features at multiple layers, enabling audio to con- tribute to the formation of hierarchical audiovisual con- cepts. To overcome training difficulties that arise from dif- ferent learning dynamics for audio and visual modalities, we introduce DropPathway, which randomly drops the Au- dio pathway during training as an effective regularization technique. Inspired by prior studies in neuroscience, we perform hierarchical audiovisual synchronization to learn joint audiovisual features. We report state-of-the-art results on six video action classification and detection datasets, perform detailed ablation studies, and show the gener- alization of AVSlowFast to learn self-supervised audiovi- sual features. Code will be made available at: https: //github.com/facebookresearch/SlowFast. +
    @@ -20,10 +21,10 @@ tecture for integrated audiovisual perception. AVSlowFast has Slow and Fast visu ### Kinetics-400 -|config | n_fft | gpus | backbone |pretrain| top1 acc/delta| top5 acc/delta | inference_time(video/s) | gpu_mem(M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py)|1024|8| ResNet18 | None |19.7|35.75|x|1897|[ckpt](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth)|[log](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log)|[json](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log.json)| -|[tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py) + [tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py)|1024|8| ResNet(18+50) | None |71.50(+0.39)|90.18(+0.14)|x|x|x|x|x| +| config | n_fft | gpus | backbone | pretrain | top1 acc/delta | top5 acc/delta | inference_time(video/s) | gpu_mem(M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---: | :--: | :-----------: | :------: | :------------: | :------------: | :---------------------: | :--------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py) | 1024 | 8 | ResNet18 | None | 19.7 | 35.75 | x | 1897 | [ckpt](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log) | [json](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log.json) | +| [tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py) + [tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py) | 1024 | 8 | ResNet(18+50) | None | 71.50(+0.39) | 90.18(+0.14) | x | x | x | x | x | :::{note} @@ -35,7 +36,7 @@ tecture for integrated audiovisual perception. AVSlowFast has Slow and Fast visu ::: -For more details on data preparation, you can refer to ``Prepare audio`` in [Data Preparation](/docs/data_preparation.md). +For more details on data preparation, you can refer to `Prepare audio` in [Data Preparation](/docs/data_preparation.md). ## Train @@ -81,8 +82,8 @@ For multi-modality fusion, you can use the simple [script](/tools/analysis/repor python tools/analysis/report_accuracy.py --scores ${AUDIO_RESULT_PKL} ${VISUAL_RESULT_PKL} --datalist data/kinetics400/kinetics400_val_list_rawframes.txt --coefficient 1 1 ``` -+ AUDIO_RESULT_PKL: The saved output file of `tools/test.py` by the argument `--out`. -+ VISUAL_RESULT_PKL: The saved output file of `tools/test.py` by the argument `--out`. +- AUDIO_RESULT_PKL: The saved output file of `tools/test.py` by the argument `--out`. +- VISUAL_RESULT_PKL: The saved output file of `tools/test.py` by the argument `--out`. ## Citation diff --git a/configs/recognition_audio/resnet/README_zh-CN.md b/configs/recognition_audio/resnet/README_zh-CN.md index bf1188ff46..d6b1c3f33b 100644 --- a/configs/recognition_audio/resnet/README_zh-CN.md +++ b/configs/recognition_audio/resnet/README_zh-CN.md @@ -17,10 +17,10 @@ ### Kinetics-400 -|配置文件 | n_fft | GPU 数量 | 主干网络 |预训练| top1 acc/delta| top5 acc/delta | 推理时间 (video/s) | GPU 显存占用 (M)| ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| -|[tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py)|1024|8| ResNet18 | None |19.7|35.75|x|1897|[ckpt](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth)|[log](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log)|[json](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log.json)| -|[tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py) + [tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py)|1024|8| ResNet(18+50) | None |71.50(+0.39)|90.18(+0.14)|x|x|x|x|x| +| 配置文件 | n_fft | GPU 数量 | 主干网络 | 预训练 | top1 acc/delta | top5 acc/delta | 推理时间 (video/s) | GPU 显存占用 (M) | ckpt | log | json | +| :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---: | :----: | :-----------: | :--: | :------------: | :------------: | :------------: | :----------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------: | +| [tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py) | 1024 | 8 | ResNet18 | None | 19.7 | 35.75 | x | 1897 | [ckpt](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth) | [log](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log) | [json](https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/20201010_144630.log.json) | +| [tsn_r18_64x1x1_100e_kinetics400_audio_feature](/configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py) + [tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb](/configs/recognition/tsn/tsn_r50_video_320p_1x1x3_100e_kinetics400_rgb.py) | 1024 | 8 | ResNet(18+50) | None | 71.50(+0.39) | 90.18(+0.14) | x | x | x | x | x | 注: @@ -49,7 +49,7 @@ python tools/train.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -67,7 +67,7 @@ python tools/test.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_a --out result.json ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 ## 融合 @@ -77,5 +77,5 @@ python tools/test.py configs/audio_recognition/tsn_r50_64x1x1_100e_kinetics400_a python tools/analysis/report_accuracy.py --scores ${AUDIO_RESULT_PKL} ${VISUAL_RESULT_PKL} --datalist data/kinetics400/kinetics400_val_list_rawframes.txt --coefficient 1 1 ``` -+ AUDIO_RESULT_PKL: `tools/test.py` 脚本通过 `--out` 选项存储的输出文件。 -+ VISUAL_RESULT_PKL: `tools/test.py` 脚本通过 `--out` 选项存储的输出文件。 +- AUDIO_RESULT_PKL: `tools/test.py` 脚本通过 `--out` 选项存储的输出文件。 +- VISUAL_RESULT_PKL: `tools/test.py` 脚本通过 `--out` 选项存储的输出文件。 diff --git a/configs/skeleton/2s-agcn/README.md b/configs/skeleton/2s-agcn/README.md index 2bfab3c7a7..651142af70 100644 --- a/configs/skeleton/2s-agcn/README.md +++ b/configs/skeleton/2s-agcn/README.md @@ -11,6 +11,7 @@ In skeleton-based action recognition, graph convolutional networks (GCNs), which model the human body skeletons as spatiotemporal graphs, have achieved remarkable performance. However, in existing GCN-based methods, the topology of the graph is set manually, and it is fixed over all layers and input samples. This may not be optimal for the hierarchical GCN and diverse samples in action recognition tasks. In addition, the second-order information (the lengths and directions of bones) of the skeleton data, which is naturally more informative and discriminative for action recognition, is rarely investigated in existing methods. In this work, we propose a novel two-stream adaptive graph convolutional network (2s-AGCN) for skeleton-based action recognition. The topology of the graph in our model can be either uniformly or individually learned by the BP algorithm in an end-to-end manner. This data-driven method increases the flexibility of the model for graph construction and brings more generality to adapt to various data samples. Moreover, a two-stream framework is proposed to model both the first-order and the second-order information simultaneously, which shows notable improvement for the recognition accuracy. Extensive experiments on the two large-scale datasets, NTU-RGBD and Kinetics-Skeleton, demonstrate that the performance of our model exceeds the state-of-the-art with a significant margin. +
    @@ -19,10 +20,10 @@ In skeleton-based action recognition, graph convolutional networks (GCNs), which ### NTU60_XSub -| config | type | gpus | backbone | Top-1 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [2sagcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py) | joint | 1 | AGCN | 86.06 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-3bed61ba.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json) | -| [2sagcn_80e_ntu60_xsub_bone_3d](/configs/skeleton/ss-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py) | bone | 2 | AGCN | 86.89 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-278b8815.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json) | +| config | type | gpus | backbone | Top-1 | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------- | :---: | :--: | :------: | :---: | :-----------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------: | +| [2sagcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py) | joint | 1 | AGCN | 86.06 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-3bed61ba.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json) | +| [2sagcn_80e_ntu60_xsub_bone_3d](/configs/skeleton/ss-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py) | bone | 2 | AGCN | 86.89 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-278b8815.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json) | ## Train diff --git a/configs/skeleton/2s-agcn/README_zh-CN.md b/configs/skeleton/2s-agcn/README_zh-CN.md index ae7cc00a20..f32b71a092 100644 --- a/configs/skeleton/2s-agcn/README_zh-CN.md +++ b/configs/skeleton/2s-agcn/README_zh-CN.md @@ -18,10 +18,10 @@ ### NTU60_XSub -| 配置文件 | 数据格式 | GPU 数量 | 主干网络 | top1 准确率 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [2sagcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py) | joint | 1 | AGCN | 86.06 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-3bed61ba.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json) | -| [2sagcn_80e_ntu60_xsub_bone_3d](/configs/skeleton/ss-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py) | bone | 2 | AGCN | 86.89 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-278b8815.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json) | +| 配置文件 | 数据格式 | GPU 数量 | 主干网络 | top1 准确率 | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------- | :---: | :----: | :--: | :------: | :-----------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------: | +| [2sagcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d.py) | joint | 1 | AGCN | 86.06 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d-3bed61ba.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_keypoint_3d/2sagcn_80e_ntu60_xsub_keypoint_3d.json) | +| [2sagcn_80e_ntu60_xsub_bone_3d](/configs/skeleton/ss-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py) | bone | 2 | AGCN | 86.89 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d-278b8815.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d/2sagcn_80e_ntu60_xsub_bone_3d.json) | ## 如何训练 @@ -47,7 +47,7 @@ python tools/train.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -73,4 +73,4 @@ python tools/test.py configs/skeleton/2s-agcn/2sagcn_80e_ntu60_xsub_bone_3d.py \ --out bone_result.pkl ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/skeleton/posec3d/README.md b/configs/skeleton/posec3d/README.md index 60c47f2051..e8837c85d6 100644 --- a/configs/skeleton/posec3d/README.md +++ b/configs/skeleton/posec3d/README.md @@ -11,6 +11,7 @@ Human skeleton, as a compact representation of human action, has received increasing attention in recent years. Many skeleton-based action recognition methods adopt graph convolutional networks (GCN) to extract features on top of human skeletons. Despite the positive results shown in previous works, GCN-based methods are subject to limitations in robustness, interoperability, and scalability. In this work, we propose PoseC3D, a new approach to skeleton-based action recognition, which relies on a 3D heatmap stack instead of a graph sequence as the base representation of human skeletons. Compared to GCN-based methods, PoseC3D is more effective in learning spatiotemporal features, more robust against pose estimation noises, and generalizes better in cross-dataset settings. Also, PoseC3D can handle multiple-person scenarios without additional computation cost, and its features can be easily integrated with other modalities at early fusion stages, which provides a great design space to further boost the performance. On four challenging datasets, PoseC3D consistently obtains superior performance, when used alone on skeletons and in combination with the RGB modality. +
    @@ -53,39 +54,39 @@ Human skeleton, as a compact representation of human action, has received increa ### FineGYM -|config |pseudo heatmap | gpus | backbone | Mean Top-1 | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| -|[slowonly_r50_u48_240e_gym_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py) |keypoint |8 x 2| SlowOnly-R50 |93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint-b07a98a0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json) | -|[slowonly_r50_u48_240e_gym_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py) |limb |8 x 2| SlowOnly-R50 |94.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb-c0d7b482.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json) | -|Fusion | || |94.3 | | | | +| config | pseudo heatmap | gpus | backbone | Mean Top-1 | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------- | :------------: | :---: | :----------: | :--------: | :-------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_u48_240e_gym_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint-b07a98a0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json) | +| [slowonly_r50_u48_240e_gym_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 94.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb-c0d7b482.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json) | +| Fusion | | | | 94.3 | | | | ### NTU60_XSub -| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------ | :------------: | :---: | :----------: | :---: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------: | | [slowonly_r50_u48_240e_ntu60_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint-f3adabf1.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.json) | -| [slowonly_r50_u48_240e_ntu60_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 93.4 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb-1d69006a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json) | -| Fusion | | | | 94.1 | | | | +| [slowonly_r50_u48_240e_ntu60_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 93.4 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb-1d69006a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json) | +| Fusion | | | | 94.1 | | | | ### NTU120_XSub -| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------: | | [slowonly_r50_u48_240e_ntu120_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py) | keypoint | 8 x 2 | SlowOnly-R50 | 86.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.json) | -| [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 85.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth?) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json) | -| Fusion | | | | 86.9 | | | | +| [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | limb | 8 x 2 | SlowOnly-R50 | 85.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth?) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json) | +| Fusion | | | | 86.9 | | | | ### UCF101 -| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py) | keypoint | 8 | SlowOnly-R50 | 87.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint-cae8aa4a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.json) | +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :----------: | :---: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py) | keypoint | 8 | SlowOnly-R50 | 87.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint-cae8aa4a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.json) | ### HMDB51 -| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py) | keypoint | 8 | SlowOnly-R50 | 69.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint-76ffdd8b.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.json) | +| config | pseudo heatmap | gpus | backbone | Top-1 | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------: | :--: | :----------: | :---: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py) | keypoint | 8 | SlowOnly-R50 | 69.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint-76ffdd8b.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.json) | :::{note} diff --git a/configs/skeleton/posec3d/README_zh-CN.md b/configs/skeleton/posec3d/README_zh-CN.md index 4c4cdf8d46..c761313a32 100644 --- a/configs/skeleton/posec3d/README_zh-CN.md +++ b/configs/skeleton/posec3d/README_zh-CN.md @@ -53,45 +53,45 @@ ### FineGYM -|配置文件 | 热图类型 | GPU 数量 | 主干网络 | Mean Top-1 | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| -|[slowonly_r50_u48_240e_gym_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py) | 关键点 |8 x 2| SlowOnly-R50 |93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint-b07a98a0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json) | -|[slowonly_r50_u48_240e_gym_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py) | 肢体 |8 x 2| SlowOnly-R50 |94.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb-c0d7b482.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json) | -| 融合预测结果 | | | |94.3 | | | | +| 配置文件 | 热图类型 | GPU 数量 | 主干网络 | Mean Top-1 | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------- | :--: | :----: | :----------: | :--------: | :-------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_u48_240e_gym_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint.py) | 关键点 | 8 x 2 | SlowOnly-R50 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint-b07a98a0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint/slowonly_r50_u48_240e_gym_keypoint.json) | +| [slowonly_r50_u48_240e_gym_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb.py) | 肢体 | 8 x 2 | SlowOnly-R50 | 94.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb-c0d7b482.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_gym_limb/slowonly_r50_u48_240e_gym_limb.json) | +| 融合预测结果 | | | | 94.3 | | | | ### NTU60_XSub -|配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| -| [slowonly_r50_u48_240e_ntu60_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py) | 关键点 | 8 x 2 | SlowOnly-R50 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint-f3adabf1.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.json) | -| [slowonly_r50_u48_240e_ntu60_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py) | 肢体 | 8 x 2 | SlowOnly-R50 | 93.4 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb-1d69006a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json) | -| 融合预测结果 | | | | 94.1 | | | | +| 配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log | json | +| :------------------------------------------------------------------------------------------------------------------ | :--: | :----: | :----------: | :---: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_u48_240e_ntu60_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint.py) | 关键点 | 8 x 2 | SlowOnly-R50 | 93.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint-f3adabf1.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_keypoint/slowonly_r50_u48_240e_ntu60_xsub_keypoint.json) | +| [slowonly_r50_u48_240e_ntu60_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb.py) | 肢体 | 8 x 2 | SlowOnly-R50 | 93.4 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb-1d69006a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu60_xsub_limb/slowonly_r50_u48_240e_ntu60_xsub_limb.json) | +| 融合预测结果 | | | | 94.1 | | | | ### NTU120_XSub -|配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| -| [slowonly_r50_u48_240e_ntu120_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py) | 关键点 | 8 x 2 | SlowOnly-R50 | 86.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.json) | -| [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | 肢体 | 8 x 2 | SlowOnly-R50 | 85.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth?) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json) | -| 融合预测结果 | | | | 86.9 | | | | +| 配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log | json | +| :-------------------------------------------------------------------------------------------------------------------- | :--: | :----: | :----------: | :---: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_r50_u48_240e_ntu120_xsub_keypoint](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py) | 关键点 | 8 x 2 | SlowOnly-R50 | 86.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint-6736b03f.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint/slowonly_r50_u48_240e_ntu120_xsub_keypoint.json) | +| [slowonly_r50_u48_240e_ntu120_xsub_limb](/configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb.py) | 肢体 | 8 x 2 | SlowOnly-R50 | 85.7 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb-803c2317.pth?) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_limb/slowonly_r50_u48_240e_ntu120_xsub_limb.json) | +| 融合预测结果 | | | | 86.9 | | | | ### UCF101 -|配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| -| [slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py) | 关键点 | 8 | SlowOnly-R50 | 87.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint-cae8aa4a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.json) | +| 配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--: | :----: | :----------: | :---: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.py) | 关键点 | 8 | SlowOnly-R50 | 87.0 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint-cae8aa4a.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_ucf101_split1_keypoint.json) | ### HMDB51 -|配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log| json| -|:--|:--:|:--:|:--:|:--:|:--:|:--:|:-:| -| [slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py) | 关键点 | 8 | SlowOnly-R50 | 69.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint-76ffdd8b.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.json) | +| 配置文件 | 热图类型 | GPU 数量 | 主干网络 | Top-1 | ckpt | log | json | +| :---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--: | :----: | :----------: | :---: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| [slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint](/configs/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.py) | 关键点 | 8 | SlowOnly-R50 | 69.3 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint-76ffdd8b.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/posec3d/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint/slowonly_kinetics400_pretrained_r50_u48_120e_hmdb51_split1_keypoint.json) | 注: 1. 这里的 **GPU 数量** 指的是得到模型权重文件对应的 GPU 个数。默认地,MMAction2 所提供的配置文件对应使用 8 块 GPU 进行训练的情况。 - 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 - 如,lr=0.2 对应 8 GPUs x 16 video/gpu,以及 lr=0.4 对应 16 GPUs x 16 video/gpu。 + 依据 [线性缩放规则](https://arxiv.org/abs/1706.02677),当用户使用不同数量的 GPU 或者每块 GPU 处理不同视频个数时,需要根据批大小等比例地调节学习率。 + 如,lr=0.2 对应 8 GPUs x 16 video/gpu,以及 lr=0.4 对应 16 GPUs x 16 video/gpu。 2. 用户可以参照 [准备骨骼数据集](https://github.com/open-mmlab/mmaction2/blob/master/tools/data/skeleton/README_zh-CN.md) 来获取以上配置文件使用的骨骼标注。 ## 如何训练 @@ -112,7 +112,7 @@ python tools/train.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoin 有关自定义数据集上的训练,可以参考 [Custom Dataset Training](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/custom_dataset_training.md)。 -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -130,4 +130,4 @@ python tools/test.py configs/skeleton/posec3d/slowonly_r50_u48_240e_gym_keypoint --out result.pkl ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/configs/skeleton/posec3d/custom_dataset_training.md b/configs/skeleton/posec3d/custom_dataset_training.md index c01a183750..cb5b2f647f 100644 --- a/configs/skeleton/posec3d/custom_dataset_training.md +++ b/configs/skeleton/posec3d/custom_dataset_training.md @@ -1,42 +1,41 @@ - # Custom Dataset Training with PoseC3D We provide a step-by-step tutorial on how to train your custom dataset with PoseC3D. 1. First, you should know that action recognition with PoseC3D requires skeleton information only and for that you need to prepare your custom annotation files (for training and validation). To start with, you need to replace the placeholder `mmdet_root` and `mmpose_root` in `ntu_pose_extraction.py` with your installation path. Then you need to take advantage of [ntu_pose_extraction.py](https://github.com/open-mmlab/mmaction2/blob/90fc8440961987b7fe3ee99109e2c633c4e30158/tools/data/skeleton/ntu_pose_extraction.py) as shown in [Prepare Annotations](https://github.com/open-mmlab/mmaction2/blob/master/tools/data/skeleton/README.md#prepare-annotations) to extract 2D keypoints for each video in your custom dataset. The command looks like (assuming the name of your video is `some_video_from_my_dataset.mp4`): - ```shell - # You can use the above command to generate pickle files for all of your training and validation videos. - python ntu_pose_extraction.py some_video_from_my_dataset.mp4 some_video_from_my_dataset.pkl - ``` + ```shell + # You can use the above command to generate pickle files for all of your training and validation videos. + python ntu_pose_extraction.py some_video_from_my_dataset.mp4 some_video_from_my_dataset.pkl + ``` - @kennymckormick's [note](https://github.com/open-mmlab/mmaction2/issues/1216#issuecomment-950130079): + @kennymckormick's [note](https://github.com/open-mmlab/mmaction2/issues/1216#issuecomment-950130079): - > One only thing you may need to change is that: since ntu_pose_extraction.py is developed specifically for pose extraction of NTU videos, you can skip the [ntu_det_postproc](https://github.com/open-mmlab/mmaction2/blob/90fc8440961987b7fe3ee99109e2c633c4e30158/tools/data/skeleton/ntu_pose_extraction.py#L307) step when using this script for extracting pose from your custom video datasets. + > One only thing you may need to change is that: since ntu_pose_extraction.py is developed specifically for pose extraction of NTU videos, you can skip the [ntu_det_postproc](https://github.com/open-mmlab/mmaction2/blob/90fc8440961987b7fe3ee99109e2c633c4e30158/tools/data/skeleton/ntu_pose_extraction.py#L307) step when using this script for extracting pose from your custom video datasets. 2. Then, you will collect all the pickle files into one list for training (and, of course, for validation) and save them as a single file (like `custom_dataset_train.pkl` or `custom_dataset_val.pkl`). At that time, you finalize preparing annotation files for your custom dataset. 3. Next, you may use the following script (with some alterations according to your needs) for training as shown in [PoseC3D/Train](https://github.com/open-mmlab/mmaction2/blob/master/configs/skeleton/posec3d/README.md#train): `python tools/train.py configs/skeleton/posec3d/slowonly_r50_u48_240e_ntu120_xsub_keypoint.py --work-dir work_dirs/slowonly_r50_u48_240e_ntu120_xsub_keypoint --validate --test-best --gpus 2 --seed 0 --deterministic`: - - Before running the above script, you need to modify the variables to initialize with your newly made annotation files: + - Before running the above script, you need to modify the variables to initialize with your newly made annotation files: - ```python - model = dict( - ... - cls_head=dict( - ... - num_classes=4, # Your class number - ... - ), - ... - ) + ```python + model = dict( + ... + cls_head=dict( + ... + num_classes=4, # Your class number + ... + ), + ... + ) - ann_file_train = 'data/posec3d/custom_dataset_train.pkl' # Your annotation for training - ann_file_val = 'data/posec3d/custom_dataset_val.pkl' # Your annotation for validation + ann_file_train = 'data/posec3d/custom_dataset_train.pkl' # Your annotation for training + ann_file_val = 'data/posec3d/custom_dataset_val.pkl' # Your annotation for validation - load_from = 'pretrained_weight.pth' # Your can use released weights for initialization, set to None if training from scratch + load_from = 'pretrained_weight.pth' # Your can use released weights for initialization, set to None if training from scratch - # You can also alter the hyper parameters or training schedule - ``` + # You can also alter the hyper parameters or training schedule + ``` With that, your machine should start its work to let you grab a cup of coffee and watch how the training goes. diff --git a/configs/skeleton/stgcn/README.md b/configs/skeleton/stgcn/README.md index 0b2c3f481c..1b8f435d57 100644 --- a/configs/skeleton/stgcn/README.md +++ b/configs/skeleton/stgcn/README.md @@ -11,6 +11,7 @@ Dynamics of human body skeletons convey significant information for human action recognition. Conventional approaches for modeling skeletons usually rely on hand-crafted parts or traversal rules, thus resulting in limited expressive power and difficulties of generalization. In this work, we propose a novel model of dynamic skeletons called Spatial-Temporal Graph Convolutional Networks (ST-GCN), which moves beyond the limitations of previous methods by automatically learning both the spatial and temporal patterns from data. This formulation not only leads to greater expressive power but also stronger generalization capability. On two large datasets, Kinetics and NTU-RGBD, it achieves substantial improvements over mainstream methods. +
    @@ -19,19 +20,19 @@ Dynamics of human body skeletons convey significant information for human action ### NTU60_XSub -| config | keypoint | gpus | backbone | Top-1 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [stgcn_80e_ntu60_xsub_keypoint](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py) | 2d | 2 | STGCN | 86.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json) | -| [stgcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py) | 3d | 1 | STGCN | 84.61 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d-13e7ccf0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json) | +| config | keypoint | gpus | backbone | Top-1 | ckpt | log | json | +| :---------------------------------------------------------------------------------------------- | :------: | :--: | :------: | :---: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------: | +| [stgcn_80e_ntu60_xsub_keypoint](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py) | 2d | 2 | STGCN | 86.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json) | +| [stgcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py) | 3d | 1 | STGCN | 84.61 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d-13e7ccf0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json) | ### BABEL -| config | gpus | backbone | Top-1 | Mean Top-1 | Top-1 Official (AGCN) | Mean Top-1 Official (AGCN) | ckpt | log | -| ------------------------------------------------------------ | :--: | :------: | :-------: | :--------: | :-------------------: | :------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [stgcn_80e_babel60](/configs/skeleton/stgcn/stgcn_80e_babel60.py) | 8 | ST-GCN | **42.39** | **28.28** | 41.14 | 24.46 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60-3d206418.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60.log) | -| [stgcn_80e_babel60_wfl](/configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py) | 8 | ST-GCN | **40.31** | 29.79 | 33.41 | **30.42** | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60_wfl/stgcn_80e_babel60_wfl-1a9102d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60_wfl.log) | -| [stgcn_80e_babel120](/configs/skeleton/stgcn/stgcn_80e_babel120.py) | 8 | ST-GCN | **38.95** | **20.58** | 38.41 | 17.56 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120/stgcn_80e_babel120-e41eb6d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120.log) | -| [stgcn_80e_babel120_wfl](/configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py) | 8 | ST-GCN | **33.00** | 24.33 | 27.91 | **26.17*** | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120_wfl/stgcn_80e_babel120_wfl-3f2c100d.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120_wfl.log) | +| config | gpus | backbone | Top-1 | Mean Top-1 | Top-1 Official (AGCN) | Mean Top-1 Official (AGCN) | ckpt | log | +| --------------------------------------------------------------------------- | :--: | :------: | :-------: | :--------: | :-------------------: | :------------------------: | :-----------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | +| [stgcn_80e_babel60](/configs/skeleton/stgcn/stgcn_80e_babel60.py) | 8 | ST-GCN | **42.39** | **28.28** | 41.14 | 24.46 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60-3d206418.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60.log) | +| [stgcn_80e_babel60_wfl](/configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py) | 8 | ST-GCN | **40.31** | 29.79 | 33.41 | **30.42** | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60_wfl/stgcn_80e_babel60_wfl-1a9102d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60_wfl.log) | +| [stgcn_80e_babel120](/configs/skeleton/stgcn/stgcn_80e_babel120.py) | 8 | ST-GCN | **38.95** | **20.58** | 38.41 | 17.56 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120/stgcn_80e_babel120-e41eb6d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120.log) | +| [stgcn_80e_babel120_wfl](/configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py) | 8 | ST-GCN | **33.00** | 24.33 | 27.91 | **26.17**\* | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120_wfl/stgcn_80e_babel120_wfl-3f2c100d.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120_wfl.log) | \* The number is copied from the [paper](https://arxiv.org/pdf/2106.09696.pdf), the performance of the [released checkpoints](https://github.com/abhinanda-punnakkal/BABEL/tree/main/action_recognition) for BABEL-120 is inferior. diff --git a/configs/skeleton/stgcn/README_zh-CN.md b/configs/skeleton/stgcn/README_zh-CN.md index c7e57077cd..ecb7a9bd84 100644 --- a/configs/skeleton/stgcn/README_zh-CN.md +++ b/configs/skeleton/stgcn/README_zh-CN.md @@ -17,19 +17,19 @@ ### NTU60_XSub -| 配置文件 | 骨骼点 | GPU 数量 | 主干网络 | Top-1 准确率 | ckpt | log | json | -| :----------------------------------------------------------- | :------------: | :---: | :----------: | :---: | :----------------------------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [stgcn_80e_ntu60_xsub_keypoint](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py) | 2d | 2 | STGCN | 86.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json) | -| [stgcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py) | 3d | 1 | STGCN | 84.61 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d-13e7ccf0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json) | +| 配置文件 | 骨骼点 | GPU 数量 | 主干网络 | Top-1 准确率 | ckpt | log | json | +| :---------------------------------------------------------------------------------------------- | :-: | :----: | :---: | :-------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------------: | +| [stgcn_80e_ntu60_xsub_keypoint](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py) | 2d | 2 | STGCN | 86.91 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint-e7bb9653.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint/stgcn_80e_ntu60_xsub_keypoint.json) | +| [stgcn_80e_ntu60_xsub_keypoint_3d](/configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d.py) | 3d | 1 | STGCN | 84.61 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d-13e7ccf0.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.log) | [json](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint_3d/stgcn_80e_ntu60_xsub_keypoint_3d.json) | ### BABEL -| 配置文件 | GPU 数量 | 主干网络 | Top-1 准确率 | 类平均 Top-1 准确率 | Top-1 准确率
    (官方,使用 AGCN) | 类平均 Top-1 准确率
    (官方,使用 AGCN) | ckpt | log | -| ------------------------------------------------------------ | :------: | :------: | :----------: | :-----------------: | :----------------------------------: | :----------------------------------------: | :----------------------------------------------------------: | :----------------------------------------------------------: | -| [stgcn_80e_babel60](/configs/skeleton/stgcn/stgcn_80e_babel60.py) | 8 | ST-GCN | **42.39** | **28.28** | 41.14 | 24.46 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60-3d206418.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60.log) | -| [stgcn_80e_babel60_wfl](/configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py) | 8 | ST-GCN | **40.31** | 29.79 | 33.41 | **30.42** | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60_wfl/stgcn_80e_babel60_wfl-1a9102d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60_wfl.log) | -| [stgcn_80e_babel120](/configs/skeleton/stgcn/stgcn_80e_babel120.py) | 8 | ST-GCN | **38.95** | **20.58** | 38.41 | 17.56 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120/stgcn_80e_babel120-e41eb6d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120.log) | -| [stgcn_80e_babel120_wfl](/configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py) | 8 | ST-GCN | **33.00** | 24.33 | 27.91 | **26.17*** | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120_wfl/stgcn_80e_babel120_wfl-3f2c100d.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120_wfl.log) | +| 配置文件 | GPU 数量 | 主干网络 | Top-1 准确率 | 类平均 Top-1 准确率 | Top-1 准确率
    (官方,使用 AGCN) | 类平均 Top-1 准确率
    (官方,使用 AGCN) | ckpt | log | +| --------------------------------------------------------------------------- | :----: | :----: | :-------: | :-----------: | :------------------------: | :---------------------------: | :-----------------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | +| [stgcn_80e_babel60](/configs/skeleton/stgcn/stgcn_80e_babel60.py) | 8 | ST-GCN | **42.39** | **28.28** | 41.14 | 24.46 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60-3d206418.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60.log) | +| [stgcn_80e_babel60_wfl](/configs/skeleton/stgcn/stgcn_80e_babel60_wfl.py) | 8 | ST-GCN | **40.31** | 29.79 | 33.41 | **30.42** | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60_wfl/stgcn_80e_babel60_wfl-1a9102d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel60_wfl.log) | +| [stgcn_80e_babel120](/configs/skeleton/stgcn/stgcn_80e_babel120.py) | 8 | ST-GCN | **38.95** | **20.58** | 38.41 | 17.56 | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120/stgcn_80e_babel120-e41eb6d7.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120.log) | +| [stgcn_80e_babel120_wfl](/configs/skeleton/stgcn/stgcn_80e_babel120_wfl.py) | 8 | ST-GCN | **33.00** | 24.33 | 27.91 | **26.17**\* | [ckpt](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel120_wfl/stgcn_80e_babel120_wfl-3f2c100d.pth) | [log](https://download.openmmlab.com/mmaction/skeleton/stgcn/stgcn_80e_babel60/stgcn_80e_babel120_wfl.log) | \* 注:此数字引自原 [论文](https://arxiv.org/pdf/2106.09696.pdf), 实际公开的 [模型权重](https://github.com/abhinanda-punnakkal/BABEL/tree/main/action_recognition) 精度略低一些。 @@ -49,7 +49,7 @@ python tools/train.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ --validate --seed 0 --deterministic ``` -更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#训练配置) 中的 **训练配置** 部分。 +更多训练细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) 中的 **训练配置** 部分。 ## 如何测试 @@ -67,4 +67,4 @@ python tools/test.py configs/skeleton/stgcn/stgcn_80e_ntu60_xsub_keypoint.py \ --out result.pkl ``` -更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#测试某个数据集) 中的 **测试某个数据集** 部分。 +更多测试细节,可参考 [基础教程](/docs_zh_CN/getting_started.md#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) 中的 **测试某个数据集** 部分。 diff --git a/demo/README.md b/demo/README.md index 9f0dadc810..93f85fad85 100644 --- a/demo/README.md +++ b/demo/README.md @@ -31,7 +31,7 @@ When running demos using our provided scripts, you may specify `--cfg-options` t - Update values of list/tuples. If the value to be updated is a list or a tuple. For example, the config file normally sets `workflow=[('train', 1)]`. If you want to - change this key, you may specify `--cfg-options workflow="[(train,1),(val,1)]"`. Note that the quotation mark \" is necessary to + change this key, you may specify `--cfg-options workflow="[(train,1),(val,1)]"`. Note that the quotation mark " is necessary to support list/tuple data types, and that **NO** white space is allowed inside the quotation marks in the specified value. ## Video demo @@ -62,84 +62,84 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, 1. Recognize a video file as input by using a TSN model on cuda by default. - ```shell - # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt - ``` + ```shell + # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt + ``` 2. Recognize a video file as input by using a TSN model on cuda by default, loading checkpoint from url. - ```shell - # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt - ``` + ```shell + # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt + ``` 3. Recognize a list of rawframes as input by using a TSN model on cpu. - ```shell - python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - PATH_TO_FRAMES/ LABEL_FILE --use-frames --device cpu - ``` + ```shell + python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + PATH_TO_FRAMES/ LABEL_FILE --use-frames --device cpu + ``` 4. Recognize a video file as input by using a TSN model and then generate an mp4 file. - ```shell - # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --out-filename demo/demo_out.mp4 - ``` + ```shell + # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --out-filename demo/demo_out.mp4 + ``` 5. Recognize a list of rawframes as input by using a TSN model and then generate a gif file. - ```shell - python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - PATH_TO_FRAMES/ LABEL_FILE --use-frames --out-filename demo/demo_out.gif - ``` + ```shell + python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + PATH_TO_FRAMES/ LABEL_FILE --use-frames --out-filename demo/demo_out.gif + ``` 6. Recognize a video file as input by using a TSN model, then generate an mp4 file with a given resolution and resize algorithm. - ```shell - # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 340 256 --resize-algorithm bilinear \ - --out-filename demo/demo_out.mp4 - ``` - - ```shell - # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 - # If either dimension is set to -1, the frames are resized by keeping the existing aspect ratio - # For --target-resolution 170 -1, original resolution (340, 256) -> target resolution (170, 128) - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 170 -1 --resize-algorithm bilinear \ - --out-filename demo/demo_out.mp4 - ``` + ```shell + # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 340 256 --resize-algorithm bilinear \ + --out-filename demo/demo_out.mp4 + ``` + + ```shell + # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 + # If either dimension is set to -1, the frames are resized by keeping the existing aspect ratio + # For --target-resolution 170 -1, original resolution (340, 256) -> target resolution (170, 128) + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 170 -1 --resize-algorithm bilinear \ + --out-filename demo/demo_out.mp4 + ``` 7. Recognize a video file as input by using a TSN model, then generate an mp4 file with a label in a red color and fontscale 1. - ```shell - # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --font-scale 1 --font-color red \ - --out-filename demo/demo_out.mp4 - ``` + ```shell + # The demo.mp4 and label_map_k400.txt are both from Kinetics-400 + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --font-scale 1 --font-color red \ + --out-filename demo/demo_out.mp4 + ``` 8. Recognize a list of rawframes as input by using a TSN model and then generate an mp4 file with 24 fps. - ```shell - python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - PATH_TO_FRAMES/ LABEL_FILE --use-frames --fps 24 --out-filename demo/demo_out.gif - ``` + ```shell + python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + PATH_TO_FRAMES/ LABEL_FILE --use-frames --fps 24 --out-filename demo/demo_out.gif + ``` ## SpatioTemporal Action Detection Video Demo @@ -221,20 +221,20 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, 1. Get GradCAM results of a I3D model, using a video file as input and then generate an gif file with 10 fps. - ```shell - python demo/demo_gradcam.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth demo/demo.mp4 \ - --target-layer-name backbone/layer4/1/relu --fps 10 \ - --out-filename demo/demo_gradcam.gif - ``` + ```shell + python demo/demo_gradcam.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ + checkpoints/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth demo/demo.mp4 \ + --target-layer-name backbone/layer4/1/relu --fps 10 \ + --out-filename demo/demo_gradcam.gif + ``` 2. Get GradCAM results of a TSM model, using a video file as input and then generate an gif file, loading checkpoint from url. - ```shell - python demo/demo_gradcam.py configs/recognition/tsm/tsm_r50_video_inference_1x1x8_100e_kinetics400_rgb.py \ - https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth \ - demo/demo.mp4 --target-layer-name backbone/layer4/1/relu --out-filename demo/demo_gradcam_tsm.gif - ``` + ```shell + python demo/demo_gradcam.py configs/recognition/tsm/tsm_r50_video_inference_1x1x8_100e_kinetics400_rgb.py \ + https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth \ + demo/demo.mp4 --target-layer-name backbone/layer4/1/relu --out-filename demo/demo_gradcam_tsm.gif + ``` ## Webcam demo @@ -265,31 +265,31 @@ Assume that you are located at `$MMACTION2` and have already downloaded the chec or use checkpoint url from `configs/` to directly load corresponding checkpoint, which will be automatically saved in `$HOME/.cache/torch/checkpoints`. 1. Recognize the action from web camera as input by using a TSN model on cpu, averaging the score per 5 times - and outputting result labels with score higher than 0.2. + and outputting result labels with score higher than 0.2. - ```shell - python demo/webcam_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth tools/data/kinetics/label_map_k400.txt --average-size 5 \ - --threshold 0.2 --device cpu - ``` + ```shell + python demo/webcam_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth tools/data/kinetics/label_map_k400.txt --average-size 5 \ + --threshold 0.2 --device cpu + ``` 2. Recognize the action from web camera as input by using a TSN model on cpu, averaging the score per 5 times - and outputting result labels with score higher than 0.2, loading checkpoint from url. + and outputting result labels with score higher than 0.2, loading checkpoint from url. - ```shell - python demo/webcam_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - tools/data/kinetics/label_map_k400.txt --average-size 5 --threshold 0.2 --device cpu - ``` + ```shell + python demo/webcam_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + tools/data/kinetics/label_map_k400.txt --average-size 5 --threshold 0.2 --device cpu + ``` 3. Recognize the action from web camera as input by using a I3D model on gpu by default, averaging the score per 5 times - and outputting result labels with score higher than 0.2. + and outputting result labels with score higher than 0.2. - ```shell - python demo/webcam_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth tools/data/kinetics/label_map_k400.txt \ - --average-size 5 --threshold 0.2 - ``` + ```shell + python demo/webcam_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ + checkpoints/i3d_r50_32x2x1_100e_kinetics400_rgb_20200614-c25ef9a4.pth tools/data/kinetics/label_map_k400.txt \ + --average-size 5 --threshold 0.2 + ``` :::{note} Considering the efficiency difference for users' hardware, Some modifications might be done to suit the case. @@ -315,7 +315,7 @@ Optional arguments: - `INPUT_STEP`: Input step for sampling frames, which can help to get more spare input. If not specified , it will be set to 1. - `DEVICE_TYPE`: Type of device to run the demo. Allowed values are cuda device like `cuda:0` or `cpu`. If not specified, it will be set to `cuda:0`. - `THRESHOLD`: Threshold of prediction score for action recognition. Only label with score higher than the threshold will be shown. If not specified, it will be set to 0.01. -- `STRIDE`: By default, the demo generates a prediction for each single frame, which might cost lots of time. To speed up, you can set the argument `STRIDE` and then the demo will generate a prediction every `STRIDE x sample_length` frames (`sample_length` indicates the size of temporal window from which you sample frames, which equals to `clip_len x frame_interval`). For example, if the sample_length is 64 frames and you set `STRIDE` to 0.5, predictions will be generated every 32 frames. If set as 0, predictions will be generated for each frame. The desired value of `STRIDE` is (0, 1], while it also works for `STRIDE > 1` (the generated predictions will be too sparse). Default: 0. +- `STRIDE`: By default, the demo generates a prediction for each single frame, which might cost lots of time. To speed up, you can set the argument `STRIDE` and then the demo will generate a prediction every `STRIDE x sample_length` frames (`sample_length` indicates the size of temporal window from which you sample frames, which equals to `clip_len x frame_interval`). For example, if the sample_length is 64 frames and you set `STRIDE` to 0.5, predictions will be generated every 32 frames. If set as 0, predictions will be generated for each frame. The desired value of `STRIDE` is (0, 1\], while it also works for `STRIDE > 1` (the generated predictions will be too sparse). Default: 0. - `LABEL_COLOR`: Font Color of the labels in (B, G, R). Default is white, that is (256, 256, 256). - `MSG_COLOR`: Font Color of the messages in (B, G, R). Default is gray, that is (128, 128, 128). @@ -327,45 +327,45 @@ or use checkpoint url from `configs/` to directly load corresponding checkpoint, 1. Predict different labels in a long video by using a TSN model on cpu, with 3 frames for input steps (that is, random sample one from each 3 frames) and outputting result labels with score higher than 0.2. - ```shell - python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ - --input-step 3 --device cpu --threshold 0.2 - ``` + ```shell + python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ + --input-step 3 --device cpu --threshold 0.2 + ``` 2. Predict different labels in a long video by using a TSN model on cpu, with 3 frames for input steps (that is, random sample one from each 3 frames) and outputting result labels with score higher than 0.2, loading checkpoint from url. - ```shell - python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 - ``` + ```shell + python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 + ``` 3. Predict different labels in a long video from web by using a TSN model on cpu, with 3 frames for input steps (that is, random sample one from each 3 frames) and outputting result labels with score higher than 0.2, loading checkpoint from url. - ```shell - python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4 \ - tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 - ``` + ```shell + python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4 \ + tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 + ``` 4. Predict different labels in a long video by using a I3D model on gpu, with input_step=1, threshold=0.01 as default and print the labels in cyan. - ```shell - python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ - --label-color 255 255 0 - ``` + ```shell + python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ + checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ + --label-color 255 255 0 + ``` 5. Predict different labels in a long video by using a I3D model on gpu and save the results as a `json` file - ```shell - python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt ./results.json - ``` + ```shell + python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ + checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt ./results.json + ``` ## SpatioTemporal Action Detection Webcam Demo @@ -666,9 +666,9 @@ or use checkpoint url from `configs/` to directly load the corresponding checkpo 1. Recognize an audio file as input by using a tsn model on cuda by default. - ```shell - python demo/demo_audio.py \ - configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py \ - https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth \ - audio_feature.npy label_map_k400.txt - ``` + ```shell + python demo/demo_audio.py \ + configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py \ + https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth \ + audio_feature.npy label_map_k400.txt + ``` diff --git a/docs/benchmark.md b/docs/benchmark.md index 68472f95b3..562064e500 100644 --- a/docs/benchmark.md +++ b/docs/benchmark.md @@ -44,28 +44,28 @@ We provide the training log based on which we calculate the average iter time, w ### Recognizers -| Model |input| io backend | batch size x gpus | MMAction2 (s/iter) | GPU mem(GB) | MMAction (s/iter)| GPU mem(GB) | Temporal-Shift-Module (s/iter) | GPU mem(GB) | PySlowFast (s/iter)| GPU mem(GB) | -| :--- | :---------------:|:---------------:| :---------------:| :---------------: | :--------------------: | :----------------------------: | :-----------------: |:-----------------: |:-----------------: |:-----------------: |:-----------------: | -| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p rawframes |Memcached| 32x8|**[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_rawframes_memcahed_32x8.zip)** | 8.1 |[0.38](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/tsn_256p_rawframes_memcached_32x8.zip)|8.1| [0.42](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsn_256p_rawframes_memcached_32x8.zip)|10.5 | x |x | -| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p videos |Disk| 32x8|**[1.42](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_videos_disk_32x8.zip)** | 8.1 | x |x |x| x | TODO |TODO| -| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**|8.1 | x |x| x |x| TODO |TODO| -|[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p videos|Disk |8x8| **[0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_videos_disk_8x8.zip)** |4.6|x |x| x |x| [0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_video.log) |4.6| -|[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p dense-encoded video|Disk |8x8| **[0.35](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_fast_videos_disk_8x8.zip)**| 4.6 | x | x | x | x | [0.36](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_fast_video.log) |4.6| -| [I3D](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py)|256p rawframes|Memcached|8x8| **[0.43](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_256p_rawframes_memcahed_8x8.zip)**|5.0 | [0.56](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/i3d_256p_rawframes_memcached_8x8.zip)|5.0| x |x| x |x| -| [TSM](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |256p rawframes|Memcached| 8x8|**[0.31](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsm_256p_rawframes_memcahed_8x8.zip)** |6.9| x |x| [0.41](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsm_256p_rawframes_memcached_8x8.zip) |9.1| x|x | -| [Slowonly](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p videos|Disk|8x8 | **[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowonly_256p_videos_disk_8x8.zip)** |3.1| TODO|TODO | x|x | [0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowonly_r50_4x16_video.log)|3.4 | -| [Slowonly](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p dense-encoded video|Disk|8x8 | **[0.25](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowonly_256p_fast_videos_disk_8x8.zip)** |3.1| TODO |TODO| x |x| [0.28](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowonly_r50_4x16_fast_video.log) |3.4| -| [Slowfast](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p videos|Disk|8x8 | **[0.69](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowfast_256p_videos_disk_8x8.zip)**|6.1 | x |x| x |x| [1.04](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowfast_r50_4x16_video.log)|7.0 | -| [Slowfast](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p dense-encoded video|Disk|8x8 | **[0.68](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowfast_256p_fast_videos_disk_8x8.zip)** |6.1| x|x | x |x| [0.96](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowfast_r50_4x16_fast_video.log)|7.0 | -| [R(2+1)D](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py)|256p videos |Disk| 8x8|**[0.45](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/r2plus1d_256p_videos_disk_8x8.zip)**|5.1 | x | x | x | x | x | x | -| [R(2+1)D](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py)|256p dense-encoded video |Disk| 8x8|**[0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/r2plus1d_256p_fast_videos_disk_8x8.zip)** |5.1| x|x | x |x| x |x| +| Model | input | io backend | batch size x gpus | MMAction2 (s/iter) | GPU mem(GB) | MMAction (s/iter) | GPU mem(GB) | Temporal-Shift-Module (s/iter) | GPU mem(GB) | PySlowFast (s/iter) | GPU mem(GB) | +| :------------------------------------------------------------------------------------------ | :----------------------: | :--------: | :---------------: | :-------------------------------------------------------------------------------------------------------------------------: | :---------: | :------------------------------------------------------------------------------------------------------------------: | :---------: | :-------------------------------------------------------------------------------------------------------------------------------: | :---------: | :--------------------------------------------------------------------------------------------------------------------: | :---------: | +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 256p rawframes | Memcached | 32x8 | **[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_rawframes_memcahed_32x8.zip)** | 8.1 | [0.38](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/tsn_256p_rawframes_memcached_32x8.zip) | 8.1 | [0.42](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsn_256p_rawframes_memcached_32x8.zip) | 10.5 | x | x | +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 256p videos | Disk | 32x8 | **[1.42](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_videos_disk_32x8.zip)** | 8.1 | x | x | x | x | TODO | TODO | +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 256p dense-encoded video | Disk | 32x8 | **[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)** | 8.1 | x | x | x | x | TODO | TODO | +| [I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py) | 256p videos | Disk | 8x8 | **[0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_videos_disk_8x8.zip)** | 4.6 | x | x | x | x | [0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_video.log) | 4.6 | +| [I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py) | 256p dense-encoded video | Disk | 8x8 | **[0.35](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_fast_videos_disk_8x8.zip)** | 4.6 | x | x | x | x | [0.36](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_fast_video.log) | 4.6 | +| [I3D](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) | 256p rawframes | Memcached | 8x8 | **[0.43](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_256p_rawframes_memcahed_8x8.zip)** | 5.0 | [0.56](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/i3d_256p_rawframes_memcached_8x8.zip) | 5.0 | x | x | x | x | +| [TSM](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) | 256p rawframes | Memcached | 8x8 | **[0.31](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsm_256p_rawframes_memcahed_8x8.zip)** | 6.9 | x | x | [0.41](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsm_256p_rawframes_memcached_8x8.zip) | 9.1 | x | x | +| [Slowonly](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py) | 256p videos | Disk | 8x8 | **[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowonly_256p_videos_disk_8x8.zip)** | 3.1 | TODO | TODO | x | x | [0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowonly_r50_4x16_video.log) | 3.4 | +| [Slowonly](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py) | 256p dense-encoded video | Disk | 8x8 | **[0.25](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowonly_256p_fast_videos_disk_8x8.zip)** | 3.1 | TODO | TODO | x | x | [0.28](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowonly_r50_4x16_fast_video.log) | 3.4 | +| [Slowfast](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) | 256p videos | Disk | 8x8 | **[0.69](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowfast_256p_videos_disk_8x8.zip)** | 6.1 | x | x | x | x | [1.04](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowfast_r50_4x16_video.log) | 7.0 | +| [Slowfast](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) | 256p dense-encoded video | Disk | 8x8 | **[0.68](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowfast_256p_fast_videos_disk_8x8.zip)** | 6.1 | x | x | x | x | [0.96](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowfast_r50_4x16_fast_video.log) | 7.0 | +| [R(2+1)D](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py) | 256p videos | Disk | 8x8 | **[0.45](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/r2plus1d_256p_videos_disk_8x8.zip)** | 5.1 | x | x | x | x | x | x | +| [R(2+1)D](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py) | 256p dense-encoded video | Disk | 8x8 | **[0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/r2plus1d_256p_fast_videos_disk_8x8.zip)** | 5.1 | x | x | x | x | x | x | ### Localizers -| Model | MMAction2 (s/iter) | BSN(boundary sensitive network) (s/iter) |BMN(boundary matching network) (s/iter)| -| :--- | :---------------: | :-------------------------------------: | :-------------------------------------: | -| BSN ([TEM + PEM + PGM](/configs/localization/bsn)) | **0.074(TEM)+0.040(PEM)** | 0.101(TEM)+0.040(PEM) | x | -| BMN ([bmn_400x100_2x8_9e_activitynet_feature](/configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py)) | **3.27** | x | 3.30 | +| Model | MMAction2 (s/iter) | BSN(boundary sensitive network) (s/iter) | BMN(boundary matching network) (s/iter) | +| :------------------------------------------------------------------------------------------------------------------ | :-----------------------: | :--------------------------------------: | :-------------------------------------: | +| BSN ([TEM + PEM + PGM](/configs/localization/bsn)) | **0.074(TEM)+0.040(PEM)** | 0.101(TEM)+0.040(PEM) | x | +| BMN ([bmn_400x100_2x8_9e_activitynet_feature](/configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py)) | **3.27** | x | 3.30 | ## Details of Comparison diff --git a/docs/changelog.md b/docs/changelog.md index 0c51d891e1..94c3632fcb 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -87,7 +87,7 @@ **Bug and Typo Fixes** - Update train.py([#1375](https://github.com/open-mmlab/mmaction2/pull/1375)) -- Fix printout bug([#1382]((https://github.com/open-mmlab/mmaction2/pull/1382))) +- Fix printout bug([#1382](<(https://github.com/open-mmlab/mmaction2/pull/1382)>)) - Update multi processing setting([#1395](https://github.com/open-mmlab/mmaction2/pull/1395)) - Setup multi processing both in train and test([#1405](https://github.com/open-mmlab/mmaction2/pull/1405)) - Fix bug in nondistributed multi-gpu training([#1406](https://github.com/open-mmlab/mmaction2/pull/1406)) @@ -139,7 +139,7 @@ **Bug and Typo Fixes** - Update the inference part in notebooks([#1256](https://github.com/open-mmlab/mmaction2/pull/1256)) -- Update the map_location([#1262]((https://github.com/open-mmlab/mmaction2/pull/1262))) +- Update the map_location([#1262](<(https://github.com/open-mmlab/mmaction2/pull/1262)>)) - Fix bug that start_index is not used in RawFrameDecode([#1278](https://github.com/open-mmlab/mmaction2/pull/1278)) - Fix bug in init_random_seed([#1282](https://github.com/open-mmlab/mmaction2/pull/1282)) - Fix bug in setup.py([#1303](https://github.com/open-mmlab/mmaction2/pull/1303)) @@ -464,7 +464,7 @@ - Add TSN with Densenet161 backbone as an example for using TorchVision backbones ([#720](https://github.com/open-mmlab/mmaction2/pull/720)) - Add slowonly_nl_embedded_gaussian_r50_4x16x1_150e_kinetics400_rgb ([#690](https://github.com/open-mmlab/mmaction2/pull/690)) - Add slowonly_nl_embedded_gaussian_r50_8x8x1_150e_kinetics400_rgb ([#704](https://github.com/open-mmlab/mmaction2/pull/704)) -- Add slowonly_nl_kinetics_pretrained_r50_4x16x1(8x8x1)_20e_ava_rgb ([#730](https://github.com/open-mmlab/mmaction2/pull/730)) +- Add slowonly_nl_kinetics_pretrained_r50_4x16x1(8x8x1)\_20e_ava_rgb ([#730](https://github.com/open-mmlab/mmaction2/pull/730)) ### 0.12.0 (28/02/2021) diff --git a/docs/faq.md b/docs/faq.md index 0a462b7b8a..7ec9727aae 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -15,117 +15,118 @@ If the contents here do not cover your issue, please create an issue using the [ ## Installation -- **"No module named 'mmcv.ops'"; "No module named 'mmcv._ext'"** +- **"No module named 'mmcv.ops'"; "No module named 'mmcv.\_ext'"** - 1. Uninstall existing mmcv in the environment using `pip uninstall mmcv` - 2. Install mmcv-full following the [installation instruction](https://mmcv.readthedocs.io/en/latest/#installation) + 1. Uninstall existing mmcv in the environment using `pip uninstall mmcv` + 2. Install mmcv-full following the [installation instruction](https://mmcv.readthedocs.io/en/latest/#installation) - **"OSError: MoviePy Error: creation of None failed because of the following error"** - Refer to [install.md](https://github.com/open-mmlab/mmaction2/blob/master/docs/install.md#requirements) - 1. For Windows users, [ImageMagick](https://www.imagemagick.org/script/index.php) will not be automatically detected by MoviePy, there is a need to modify `moviepy/config_defaults.py` file by providing the path to the ImageMagick binary called `magick`, like `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` - 2. For Linux users, there is a need to modify the `/etc/ImageMagick-6/policy.xml` file by commenting out `` to ``, if ImageMagick is not detected by moviepy. + Refer to [install.md](https://github.com/open-mmlab/mmaction2/blob/master/docs/install.md#requirements) + + 1. For Windows users, [ImageMagick](https://www.imagemagick.org/script/index.php) will not be automatically detected by MoviePy, there is a need to modify `moviepy/config_defaults.py` file by providing the path to the ImageMagick binary called `magick`, like `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` + 2. For Linux users, there is a need to modify the `/etc/ImageMagick-6/policy.xml` file by commenting out `` to ``, if ImageMagick is not detected by moviepy. - **"Why I got the error message 'Please install XXCODEBASE to use XXX' even if I have already installed XXCODEBASE?"** - You got that error message because our project failed to import a function or a class from XXCODEBASE. You can try to run the corresponding line to see what happens. One possible reason is, for some codebases in OpenMMLAB, you need to install mmcv-full before you install them. + You got that error message because our project failed to import a function or a class from XXCODEBASE. You can try to run the corresponding line to see what happens. One possible reason is, for some codebases in OpenMMLAB, you need to install mmcv-full before you install them. ## Data - **FileNotFound like `No such file or directory: xxx/xxx/img_00300.jpg`** - In our repo, we set `start_index=1` as default value for rawframe dataset, and `start_index=0` as default value for video dataset. - If users encounter FileNotFound error for the first or last frame of the data, there is a need to check the files begin with offset 0 or 1, - that is `xxx_00000.jpg` or `xxx_00001.jpg`, and then change the `start_index` value of data pipeline in configs. + In our repo, we set `start_index=1` as default value for rawframe dataset, and `start_index=0` as default value for video dataset. + If users encounter FileNotFound error for the first or last frame of the data, there is a need to check the files begin with offset 0 or 1, + that is `xxx_00000.jpg` or `xxx_00001.jpg`, and then change the `start_index` value of data pipeline in configs. - **How should we preprocess the videos in the dataset? Resizing them to a fix size(all videos with the same height-width ratio) like `340x256`(1) or resizing them so that the short edges of all videos are of the same length (256px or 320px)** - We have tried both preprocessing approaches and found (2) is a better solution in general, so we use (2) with short edge length 256px as the default preprocessing setting. We benchmarked these preprocessing approaches and you may find the results in [TSN Data Benchmark](https://github.com/open-mmlab/mmaction2/tree/master/configs/recognition/tsn) and [SlowOnly Data Benchmark](https://github.com/open-mmlab/mmaction2/tree/master/configs/recognition/tsn). + We have tried both preprocessing approaches and found (2) is a better solution in general, so we use (2) with short edge length 256px as the default preprocessing setting. We benchmarked these preprocessing approaches and you may find the results in [TSN Data Benchmark](https://github.com/open-mmlab/mmaction2/tree/master/configs/recognition/tsn) and [SlowOnly Data Benchmark](https://github.com/open-mmlab/mmaction2/tree/master/configs/recognition/tsn). - **Mismatched data pipeline items lead to errors like `KeyError: 'total_frames'`** - We have both pipeline for processing videos and frames. + We have both pipeline for processing videos and frames. - **For videos**, We should decode them on the fly in the pipeline, so pairs like `DecordInit & DecordDecode`, `OpenCVInit & OpenCVDecode`, `PyAVInit & PyAVDecode` should be used for this case like [this example](https://github.com/open-mmlab/mmaction2/blob/023777cfd26bb175f85d78c455f6869673e0aa09/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py#L47-L49). + **For videos**, We should decode them on the fly in the pipeline, so pairs like `DecordInit & DecordDecode`, `OpenCVInit & OpenCVDecode`, `PyAVInit & PyAVDecode` should be used for this case like [this example](https://github.com/open-mmlab/mmaction2/blob/023777cfd26bb175f85d78c455f6869673e0aa09/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py#L47-L49). - **For Frames**, the image has been decoded offline, so pipeline item `RawFrameDecode` should be used for this case like [this example](https://github.com/open-mmlab/mmaction2/blob/023777cfd26bb175f85d78c455f6869673e0aa09/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py#L49). + **For Frames**, the image has been decoded offline, so pipeline item `RawFrameDecode` should be used for this case like [this example](https://github.com/open-mmlab/mmaction2/blob/023777cfd26bb175f85d78c455f6869673e0aa09/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py#L49). - `KeyError: 'total_frames'` is caused by incorrectly using `RawFrameDecode` step for videos, since when the input is a video, it can not get the `total_frame` beforehand. + `KeyError: 'total_frames'` is caused by incorrectly using `RawFrameDecode` step for videos, since when the input is a video, it can not get the `total_frame` beforehand. ## Training - **How to just use trained recognizer models for backbone pre-training?** - Refer to [Use Pre-Trained Model](https://github.com/open-mmlab/mmaction2/blob/master/docs/tutorials/2_finetune.md#use-pre-trained-model), - in order to use the pre-trained model for the whole network, the new config adds the link of pre-trained models in the `load_from`. + Refer to [Use Pre-Trained Model](https://github.com/open-mmlab/mmaction2/blob/master/docs/tutorials/2_finetune.md#use-pre-trained-model), + in order to use the pre-trained model for the whole network, the new config adds the link of pre-trained models in the `load_from`. - And to use backbone for pre-training, you can change `pretrained` value in the backbone dict of config files to the checkpoint path / url. - When training, the unexpected keys will be ignored. + And to use backbone for pre-training, you can change `pretrained` value in the backbone dict of config files to the checkpoint path / url. + When training, the unexpected keys will be ignored. - **How to visualize the training accuracy/loss curves in real-time?** - Use `TensorboardLoggerHook` in `log_config` like + Use `TensorboardLoggerHook` in `log_config` like - ```python - log_config=dict(interval=20, hooks=[dict(type='TensorboardLoggerHook')]) - ``` + ```python + log_config=dict(interval=20, hooks=[dict(type='TensorboardLoggerHook')]) + ``` - You can refer to [tutorials/1_config.md](tutorials/1_config.md), [tutorials/7_customize_runtime.md](tutorials/7_customize_runtime.md#log-config), and [this](https://github.com/open-mmlab/mmaction2/blob/master/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py#L118). + You can refer to [tutorials/1_config.md](tutorials/1_config.md), [tutorials/7_customize_runtime.md](tutorials/7_customize_runtime.md#log-config), and [this](https://github.com/open-mmlab/mmaction2/blob/master/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py#L118). - **In batchnorm.py: Expected more than 1 value per channel when training** - To use batchnorm, the batch_size should be larger than 1. If `drop_last` is set as False when building dataloaders, sometimes the last batch of an epoch will have `batch_size==1` (what a coincidence ...) and training will throw out this error. You can set `drop_last` as True to avoid this error: + To use batchnorm, the batch_size should be larger than 1. If `drop_last` is set as False when building dataloaders, sometimes the last batch of an epoch will have `batch_size==1` (what a coincidence ...) and training will throw out this error. You can set `drop_last` as True to avoid this error: - ```python - train_dataloader=dict(drop_last=True) - ``` + ```python + train_dataloader=dict(drop_last=True) + ``` - **How to fix stages of backbone when finetuning a model?** - You can refer to [`def _freeze_stages()`](https://github.com/open-mmlab/mmaction2/blob/0149a0e8c1e0380955db61680c0006626fd008e9/mmaction/models/backbones/x3d.py#L458) and [`frozen_stages`](https://github.com/open-mmlab/mmaction2/blob/0149a0e8c1e0380955db61680c0006626fd008e9/mmaction/models/backbones/x3d.py#L183-L184), - reminding to set `find_unused_parameters = True` in config files for distributed training or testing. + You can refer to [`def _freeze_stages()`](https://github.com/open-mmlab/mmaction2/blob/0149a0e8c1e0380955db61680c0006626fd008e9/mmaction/models/backbones/x3d.py#L458) and [`frozen_stages`](https://github.com/open-mmlab/mmaction2/blob/0149a0e8c1e0380955db61680c0006626fd008e9/mmaction/models/backbones/x3d.py#L183-L184), + reminding to set `find_unused_parameters = True` in config files for distributed training or testing. - Actually, users can set `frozen_stages` to freeze stages in backbones except C3D model, since all backbones inheriting from `ResNet` and `ResNet3D` support the inner function `_freeze_stages()`. + Actually, users can set `frozen_stages` to freeze stages in backbones except C3D model, since all backbones inheriting from `ResNet` and `ResNet3D` support the inner function `_freeze_stages()`. - **How to set memcached setting in config files?** - In MMAction2, you can pass memcached kwargs to `class DecordInit` for video dataset or `RawFrameDecode` for rawframes dataset. - For more details, you can refer to [`class FileClient`](https://github.com/open-mmlab/mmcv/blob/master/mmcv/fileio/file_client.py) in MMCV for more details. + In MMAction2, you can pass memcached kwargs to `class DecordInit` for video dataset or `RawFrameDecode` for rawframes dataset. + For more details, you can refer to [`class FileClient`](https://github.com/open-mmlab/mmcv/blob/master/mmcv/fileio/file_client.py) in MMCV for more details. - Here is an example to use memcached for rawframes dataset: + Here is an example to use memcached for rawframes dataset: - ```python - mc_cfg = dict(server_list_cfg='server_list_cfg', client_cfg='client_cfg', sys_path='sys_path') + ```python + mc_cfg = dict(server_list_cfg='server_list_cfg', client_cfg='client_cfg', sys_path='sys_path') - train_pipeline = [ - ... - dict(type='RawFrameDecode', io_backend='memcached', **mc_cfg), - ... - ] - ``` + train_pipeline = [ + ... + dict(type='RawFrameDecode', io_backend='memcached', **mc_cfg), + ... + ] + ``` - **How to set `load_from` value in config files to finetune models?** - In MMAction2, We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/tutorials/1_config.md), - users can directly change it by setting `load_from` in their configs. + In MMAction2, We set `load_from=None` as default in `configs/_base_/default_runtime.py` and owing to [inheritance design](/docs/tutorials/1_config.md), + users can directly change it by setting `load_from` in their configs. ## Testing -- **How to make predicted score normalized by softmax within [0, 1]?** +- **How to make predicted score normalized by softmax within \[0, 1\]?** - change this in the config, make `model['test_cfg'] = dict(average_clips='prob')`. + change this in the config, make `model['test_cfg'] = dict(average_clips='prob')`. - **What if the model is too large and the GPU memory can not fit even only one testing sample?** - By default, the 3d models are tested with 10clips x 3crops, which are 30 views in total. For extremely large models, the GPU memory can not fit even only one testing sample (cuz there are 30 views). To handle this, you can set `max_testing_views=n` in `model['test_cfg']` of the config file. If so, n views will be used as a batch during forwarding to save GPU memory used. + By default, the 3d models are tested with 10clips x 3crops, which are 30 views in total. For extremely large models, the GPU memory can not fit even only one testing sample (cuz there are 30 views). To handle this, you can set `max_testing_views=n` in `model['test_cfg']` of the config file. If so, n views will be used as a batch during forwarding to save GPU memory used. - **How to show test results?** - During testing, we can use the command `--out xxx.json/pkl/yaml` to output result files for checking. The testing output has exactly the same order as the test dataset. - Besides, we provide an analysis tool for evaluating a model using the output result files in [`tools/analysis/eval_metric.py`](/tools/analysis/eval_metric.py) + During testing, we can use the command `--out xxx.json/pkl/yaml` to output result files for checking. The testing output has exactly the same order as the test dataset. + Besides, we provide an analysis tool for evaluating a model using the output result files in [`tools/analysis/eval_metric.py`](/tools/analysis/eval_metric.py) ## Deploying - **Why is the onnx model converted by mmaction2 throwing error when converting to other frameworks such as TensorRT?** - For now, we can only make sure that models in mmaction2 are onnx-compatible. However, some operations in onnx may be unsupported by your target framework for deployment, e.g. TensorRT in [this issue](https://github.com/open-mmlab/mmaction2/issues/414). When such situation occurs, we suggest you raise an issue and ask the community to help as long as `pytorch2onnx.py` works well and is verified numerically. + For now, we can only make sure that models in mmaction2 are onnx-compatible. However, some operations in onnx may be unsupported by your target framework for deployment, e.g. TensorRT in [this issue](https://github.com/open-mmlab/mmaction2/issues/414). When such situation occurs, we suggest you raise an issue and ask the community to help as long as `pytorch2onnx.py` works well and is verified numerically. diff --git a/docs/getting_started.md b/docs/getting_started.md index 6c8c2a803c..9b492360fe 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -99,35 +99,35 @@ Assume that you have already downloaded the checkpoints to the directory `checkp 1. Test TSN on Kinetics-400 (without saving the test results) and evaluate the top-k accuracy and mean class accuracy. - ```shell - python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/SOME_CHECKPOINT.pth \ - --eval top_k_accuracy mean_class_accuracy - ``` + ```shell + python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth \ + --eval top_k_accuracy mean_class_accuracy + ``` 2. Test TSN on Something-Something V1 with 8 GPUS, and evaluate the top-k accuracy. - ```shell - ./tools/dist_test.sh configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py \ - checkpoints/SOME_CHECKPOINT.pth \ - 8 --out results.pkl --eval top_k_accuracy - ``` + ```shell + ./tools/dist_test.sh configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth \ + 8 --out results.pkl --eval top_k_accuracy + ``` 3. Test TSN on Kinetics-400 in slurm environment and evaluate the top-k accuracy - ```shell - python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/SOME_CHECKPOINT.pth \ - --launcher slurm --eval top_k_accuracy - ``` + ```shell + python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth \ + --launcher slurm --eval top_k_accuracy + ``` 4. Test TSN on Something-Something V1 with onnx model and evaluate the top-k accuracy - ```shell - python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/SOME_CHECKPOINT.onnx \ - --eval top_k_accuracy --onnx - ``` + ```shell + python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.onnx \ + --eval top_k_accuracy --onnx + ``` ### High-level APIs for testing a video and rawframes @@ -266,54 +266,54 @@ in [TSM: Temporal Shift Module for Efficient Video Understanding](https://arxiv. 1. create a new file in `mmaction/models/backbones/resnet_tsm.py`. - ```python - from ..builder import BACKBONES - from .resnet import ResNet + ```python + from ..builder import BACKBONES + from .resnet import ResNet - @BACKBONES.register_module() - class ResNetTSM(ResNet): + @BACKBONES.register_module() + class ResNetTSM(ResNet): - def __init__(self, - depth, - num_segments=8, - is_shift=True, - shift_div=8, - shift_place='blockres', - temporal_pool=False, - **kwargs): - pass + def __init__(self, + depth, + num_segments=8, + is_shift=True, + shift_div=8, + shift_place='blockres', + temporal_pool=False, + **kwargs): + pass - def forward(self, x): - # implementation is ignored - pass - ``` + def forward(self, x): + # implementation is ignored + pass + ``` 2. Import the module in `mmaction/models/backbones/__init__.py` - ```python - from .resnet_tsm import ResNetTSM - ``` + ```python + from .resnet_tsm import ResNetTSM + ``` 3. modify the config file from - ```python - backbone=dict( - type='ResNet', - pretrained='torchvision://resnet50', - depth=50, - norm_eval=False) - ``` + ```python + backbone=dict( + type='ResNet', + pretrained='torchvision://resnet50', + depth=50, + norm_eval=False) + ``` to - ```python - backbone=dict( - type='ResNetTSM', - pretrained='torchvision://resnet50', - depth=50, - norm_eval=False, - shift_div=8) - ``` + ```python + backbone=dict( + type='ResNetTSM', + pretrained='torchvision://resnet50', + depth=50, + norm_eval=False, + shift_div=8) + ``` ### Write a new model diff --git a/docs/install.md b/docs/install.md index c5db74571f..d1bde35709 100644 --- a/docs/install.md +++ b/docs/install.md @@ -32,9 +32,9 @@ We provide some tips for MMAction2 installation in this file. - [denseflow](https://github.com/open-mmlab/denseflow) (optional): See [here](https://github.com/innerlee/setup) for simple install scripts. - [moviepy](https://zulko.github.io/moviepy/) (optional): `pip install moviepy`. See [here](https://zulko.github.io/moviepy/install.html) for official installation. **Note**(according to [this issue](https://github.com/Zulko/moviepy/issues/693)) that: 1. For Windows users, [ImageMagick](https://www.imagemagick.org/script/index.php) will not be automatically detected by MoviePy, - there is a need to modify `moviepy/config_defaults.py` file by providing the path to the ImageMagick binary called `magick`, like `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` + there is a need to modify `moviepy/config_defaults.py` file by providing the path to the ImageMagick binary called `magick`, like `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` 2. For Linux users, there is a need to modify the `/etc/ImageMagick-6/policy.xml` file by commenting out - `` to ``, if [ImageMagick](https://www.imagemagick.org/script/index.php) is not detected by `moviepy`. + `` to ``, if [ImageMagick](https://www.imagemagick.org/script/index.php) is not detected by `moviepy`. - [Pillow-SIMD](https://docs.fast.ai/performance.html#pillow-simd) (optional): Install it by the following scripts. ```shell diff --git a/docs/projects.md b/docs/projects.md index 031f116fb0..01a6864339 100644 --- a/docs/projects.md +++ b/docs/projects.md @@ -15,9 +15,9 @@ Some of the papers are published in top-tier conferences (CVPR, ICCV, and ECCV), To make this list also a reference for the community to develop and compare new video understanding algorithms, we list them following the time order of top-tier conferences. Methods already supported and maintained by MMAction2 are not listed. -- Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 Oral. [[paper]](https://arxiv.org/abs/2107.10161)[[github]](https://github.com/Cogito2012/DEAR) -- Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 Oral. [[paper]](https://arxiv.org/abs/2103.17263)[[github]](https://github.com/xvjiarui/VFS) -- MGSampler: An Explainable Sampling Strategy for Video Action Recognition, ICCV 2021. [[paper]](https://arxiv.org/abs/2104.09952)[[github]](https://github.com/MCG-NJU/MGSampler) -- MultiSports: A Multi-Person Video Dataset of Spatio-Temporally Localized Sports Actions, ICCV 2021. [[paper]](https://arxiv.org/abs/2105.07404) -- Video Swin Transformer. [[paper]](https://arxiv.org/abs/2106.13230)[[github]](https://github.com/SwinTransformer/Video-Swin-Transformer) -- Long Short-Term Transformer for Online Action Detection. [[paper]](https://arxiv.org/abs/2107.03377) +- Evidential Deep Learning for Open Set Action Recognition, ICCV 2021 Oral. [\[paper\]](https://arxiv.org/abs/2107.10161)[\[github\]](https://github.com/Cogito2012/DEAR) +- Rethinking Self-supervised Correspondence Learning: A Video Frame-level Similarity Perspective, ICCV 2021 Oral. [\[paper\]](https://arxiv.org/abs/2103.17263)[\[github\]](https://github.com/xvjiarui/VFS) +- MGSampler: An Explainable Sampling Strategy for Video Action Recognition, ICCV 2021. [\[paper\]](https://arxiv.org/abs/2104.09952)[\[github\]](https://github.com/MCG-NJU/MGSampler) +- MultiSports: A Multi-Person Video Dataset of Spatio-Temporally Localized Sports Actions, ICCV 2021. [\[paper\]](https://arxiv.org/abs/2105.07404) +- Video Swin Transformer. [\[paper\]](https://arxiv.org/abs/2106.13230)[\[github\]](https://github.com/SwinTransformer/Video-Swin-Transformer) +- Long Short-Term Transformer for Online Action Detection. [\[paper\]](https://arxiv.org/abs/2107.03377) diff --git a/docs/supported_datasets.md b/docs/supported_datasets.md index ca449c1602..8a4403df0d 100644 --- a/docs/supported_datasets.md +++ b/docs/supported_datasets.md @@ -1,9 +1,10 @@ # Supported Datasets - Action Recognition + - [UCF101](/tools/data/ucf101/README.md) \[ [Homepage](https://www.crcv.ucf.edu/research/data-sets/ucf101/) \]. - [HMDB51](/tools/data/hmdb51/README.md) \[ [Homepage](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) \]. - - [Kinetics-[400/600/700]](/tools/data/kinetics/README.md) \[ [Homepage](https://deepmind.com/research/open-source/kinetics) \] + - [Kinetics-\[400/600/700\]](/tools/data/kinetics/README.md) \[ [Homepage](https://deepmind.com/research/open-source/kinetics) \] - [Something-Something V1](/tools/data/sthv1/README.md) \[ [Homepage](https://20bn.com/datasets/something-something/v1) \] - [Something-Something V2](/tools/data/sthv2/README.md) \[ [Homepage](https://20bn.com/datasets/something-something) \] - [Moments in Time](/tools/data/mit/README.md) \[ [Homepage](http://moments.csail.mit.edu/) \] @@ -16,15 +17,18 @@ - [OmniSource](/tools/data/omnisource/README.md) \[ [Homepage](https://kennymckormick.github.io/omnisource/) \] - Temporal Action Detection + - [ActivityNet](/tools/data/activitynet/README.md) \[ [Homepage](http://activity-net.org/) \] - [THUMOS14](/tools/data/thumos14/README.md) \[ [Homepage](https://www.crcv.ucf.edu/THUMOS14/download.html) \] - Spatial Temporal Action Detection + - [AVA](/tools/data/ava/README.md) \[ [Homepage](https://research.google.com/ava/index.html) \] - [UCF101-24](/tools/data/ucf101_24/README.md) \[ [Homepage](http://www.thumos.info/download.html) \] - [JHMDB](/tools/data/jhmdb/README.md) \[ [Homepage](http://jhmdb.is.tue.mpg.de/) \] - Skeleton-based Action Recognition + - [PoseC3D Skeleton Dataset](/tools/data/skeleton/README.md) \[ [Homepage](https://kennymckormick.github.io/posec3d/) \] The supported datasets are listed above. diff --git a/docs/tutorials/1_config.md b/docs/tutorials/1_config.md index bad5d56eee..617c71330a 100644 --- a/docs/tutorials/1_config.md +++ b/docs/tutorials/1_config.md @@ -35,7 +35,7 @@ When submitting jobs using "tools/train.py" or "tools/test.py", you may specify - Update values of list/tuples. If the value to be updated is a list or a tuple. For example, the config file normally sets `workflow=[('train', 1)]`. If you want to - change this key, you may specify `--cfg-options workflow="[(train,1),(val,1)]"`. Note that the quotation mark \" is necessary to + change this key, you may specify `--cfg-options workflow="[(train,1),(val,1)]"`. Note that the quotation mark " is necessary to support list/tuple data types, and that **NO** white space is allowed inside the quotation marks in the specified value. ## Config File Structure @@ -80,147 +80,147 @@ which is convenient to conduct various experiments. - An Example of BMN - To help the users have a basic idea of a complete config structure and the modules in an action localization system, - we make brief comments on the config of BMN as the following. - For more detailed usage and alternative for per parameter in each module, please refer to the [API documentation](https://mmaction2.readthedocs.io/en/latest/api.html). - - ```python - # model settings - model = dict( # Config of the model - type='BMN', # Type of the localizer - temporal_dim=100, # Total frames selected for each video - boundary_ratio=0.5, # Ratio for determining video boundaries - num_samples=32, # Number of samples for each proposal - num_samples_per_bin=3, # Number of bin samples for each sample - feat_dim=400, # Dimension of feature - soft_nms_alpha=0.4, # Soft NMS alpha - soft_nms_low_threshold=0.5, # Soft NMS low threshold - soft_nms_high_threshold=0.9, # Soft NMS high threshold - post_process_top_k=100) # Top k proposals in post process - # model training and testing settings - train_cfg = None # Config of training hyperparameters for BMN - test_cfg = dict(average_clips='score') # Config for testing hyperparameters for BMN - - # dataset settings - dataset_type = 'ActivityNetDataset' # Type of dataset for training, validation and testing - data_root = 'data/activitynet_feature_cuhk/csv_mean_100/' # Root path to data for training - data_root_val = 'data/activitynet_feature_cuhk/csv_mean_100/' # Root path to data for validation and testing - ann_file_train = 'data/ActivityNet/anet_anno_train.json' # Path to the annotation file for training - ann_file_val = 'data/ActivityNet/anet_anno_val.json' # Path to the annotation file for validation - ann_file_test = 'data/ActivityNet/anet_anno_test.json' # Path to the annotation file for testing - - train_pipeline = [ # List of training pipeline steps - dict(type='LoadLocalizationFeature'), # Load localization feature pipeline - dict(type='GenerateLocalizationLabels'), # Generate localization labels pipeline - dict( # Config of Collect - type='Collect', # Collect pipeline that decides which keys in the data should be passed to the localizer - keys=['raw_feature', 'gt_bbox'], # Keys of input - meta_name='video_meta', # Meta name - meta_keys=['video_name']), # Meta keys of input - dict( # Config of ToTensor - type='ToTensor', # Convert other types to tensor type pipeline - keys=['raw_feature']), # Keys to be converted from image to tensor - dict( # Config of ToDataContainer - type='ToDataContainer', # Pipeline to convert the data to DataContainer - fields=[dict(key='gt_bbox', stack=False, cpu_only=True)]) # Required fields to be converted with keys and attributes - ] - val_pipeline = [ # List of validation pipeline steps - dict(type='LoadLocalizationFeature'), # Load localization feature pipeline - dict(type='GenerateLocalizationLabels'), # Generate localization labels pipeline - dict( # Config of Collect - type='Collect', # Collect pipeline that decides which keys in the data should be passed to the localizer - keys=['raw_feature', 'gt_bbox'], # Keys of input - meta_name='video_meta', # Meta name - meta_keys=[ - 'video_name', 'duration_second', 'duration_frame', 'annotations', - 'feature_frame' - ]), # Meta keys of input - dict( # Config of ToTensor - type='ToTensor', # Convert other types to tensor type pipeline - keys=['raw_feature']), # Keys to be converted from image to tensor - dict( # Config of ToDataContainer - type='ToDataContainer', # Pipeline to convert the data to DataContainer - fields=[dict(key='gt_bbox', stack=False, cpu_only=True)]) # Required fields to be converted with keys and attributes - ] - test_pipeline = [ # List of testing pipeline steps - dict(type='LoadLocalizationFeature'), # Load localization feature pipeline - dict( # Config of Collect - type='Collect', # Collect pipeline that decides which keys in the data should be passed to the localizer - keys=['raw_feature'], # Keys of input - meta_name='video_meta', # Meta name - meta_keys=[ - 'video_name', 'duration_second', 'duration_frame', 'annotations', - 'feature_frame' - ]), # Meta keys of input - dict( # Config of ToTensor - type='ToTensor', # Convert other types to tensor type pipeline - keys=['raw_feature']), # Keys to be converted from image to tensor - ] - data = dict( # Config of data - videos_per_gpu=8, # Batch size of each single GPU - workers_per_gpu=8, # Workers to pre-fetch data for each single GPU - train_dataloader=dict( # Additional config of train dataloader - drop_last=True), # Whether to drop out the last batch of data in training - val_dataloader=dict( # Additional config of validation dataloader - videos_per_gpu=1), # Batch size of each single GPU during evaluation - test_dataloader=dict( # Additional config of test dataloader - videos_per_gpu=2), # Batch size of each single GPU during testing - test=dict( # Testing dataset config - type=dataset_type, - ann_file=ann_file_test, - pipeline=test_pipeline, - data_prefix=data_root_val), - val=dict( # Validation dataset config - type=dataset_type, - ann_file=ann_file_val, - pipeline=val_pipeline, - data_prefix=data_root_val), - train=dict( # Training dataset config - type=dataset_type, - ann_file=ann_file_train, - pipeline=train_pipeline, - data_prefix=data_root)) - - # optimizer - optimizer = dict( - # Config used to build optimizer, support (1). All the optimizers in PyTorch - # whose arguments are also the same as those in PyTorch. (2). Custom optimizers - # which are built on `constructor`, referring to "tutorials/5_new_modules.md" - # for implementation. - type='Adam', # Type of optimizer, refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 for more details - lr=0.001, # Learning rate, see detail usages of the parameters in the documentation of PyTorch - weight_decay=0.0001) # Weight decay of Adam - optimizer_config = dict( # Config used to build the optimizer hook - grad_clip=None) # Most of the methods do not use gradient clip - # learning policy - lr_config = dict( # Learning rate scheduler config used to register LrUpdater hook - policy='step', # Policy of scheduler, also support CosineAnnealing, Cyclic, etc. Refer to details of supported LrUpdater from https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 - step=7) # Steps to decay the learning rate - - total_epochs = 9 # Total epochs to train the model - checkpoint_config = dict( # Config to set the checkpoint hook, Refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py for implementation - interval=1) # Interval to save checkpoint - evaluation = dict( # Config of evaluation during training - interval=1, # Interval to perform evaluation - metrics=['AR@AN']) # Metrics to be performed - log_config = dict( # Config to register logger hook - interval=50, # Interval to print the log - hooks=[ # Hooks to be implemented during training - dict(type='TextLoggerHook'), # The logger used to record the training process - # dict(type='TensorboardLoggerHook'), # The Tensorboard logger is also supported - ]) - - # runtime settings - dist_params = dict(backend='nccl') # Parameters to setup distributed training, the port can also be set - log_level = 'INFO' # The level of logging - work_dir = './work_dirs/bmn_400x100_2x8_9e_activitynet_feature/' # Directory to save the model checkpoints and logs for the current experiments - load_from = None # load models as a pre-trained model from a given path. This will not resume training - resume_from = None # Resume checkpoints from a given path, the training will be resumed from the epoch when the checkpoint's is saved - workflow = [('train', 1)] # Workflow for runner. [('train', 1)] means there is only one workflow and the workflow named 'train' is executed once - output_config = dict( # Config of localization output - out=f'{work_dir}/results.json', # Path to output file - output_format='json') # File format of output file - ``` + To help the users have a basic idea of a complete config structure and the modules in an action localization system, + we make brief comments on the config of BMN as the following. + For more detailed usage and alternative for per parameter in each module, please refer to the [API documentation](https://mmaction2.readthedocs.io/en/latest/api.html). + + ```python + # model settings + model = dict( # Config of the model + type='BMN', # Type of the localizer + temporal_dim=100, # Total frames selected for each video + boundary_ratio=0.5, # Ratio for determining video boundaries + num_samples=32, # Number of samples for each proposal + num_samples_per_bin=3, # Number of bin samples for each sample + feat_dim=400, # Dimension of feature + soft_nms_alpha=0.4, # Soft NMS alpha + soft_nms_low_threshold=0.5, # Soft NMS low threshold + soft_nms_high_threshold=0.9, # Soft NMS high threshold + post_process_top_k=100) # Top k proposals in post process + # model training and testing settings + train_cfg = None # Config of training hyperparameters for BMN + test_cfg = dict(average_clips='score') # Config for testing hyperparameters for BMN + + # dataset settings + dataset_type = 'ActivityNetDataset' # Type of dataset for training, validation and testing + data_root = 'data/activitynet_feature_cuhk/csv_mean_100/' # Root path to data for training + data_root_val = 'data/activitynet_feature_cuhk/csv_mean_100/' # Root path to data for validation and testing + ann_file_train = 'data/ActivityNet/anet_anno_train.json' # Path to the annotation file for training + ann_file_val = 'data/ActivityNet/anet_anno_val.json' # Path to the annotation file for validation + ann_file_test = 'data/ActivityNet/anet_anno_test.json' # Path to the annotation file for testing + + train_pipeline = [ # List of training pipeline steps + dict(type='LoadLocalizationFeature'), # Load localization feature pipeline + dict(type='GenerateLocalizationLabels'), # Generate localization labels pipeline + dict( # Config of Collect + type='Collect', # Collect pipeline that decides which keys in the data should be passed to the localizer + keys=['raw_feature', 'gt_bbox'], # Keys of input + meta_name='video_meta', # Meta name + meta_keys=['video_name']), # Meta keys of input + dict( # Config of ToTensor + type='ToTensor', # Convert other types to tensor type pipeline + keys=['raw_feature']), # Keys to be converted from image to tensor + dict( # Config of ToDataContainer + type='ToDataContainer', # Pipeline to convert the data to DataContainer + fields=[dict(key='gt_bbox', stack=False, cpu_only=True)]) # Required fields to be converted with keys and attributes + ] + val_pipeline = [ # List of validation pipeline steps + dict(type='LoadLocalizationFeature'), # Load localization feature pipeline + dict(type='GenerateLocalizationLabels'), # Generate localization labels pipeline + dict( # Config of Collect + type='Collect', # Collect pipeline that decides which keys in the data should be passed to the localizer + keys=['raw_feature', 'gt_bbox'], # Keys of input + meta_name='video_meta', # Meta name + meta_keys=[ + 'video_name', 'duration_second', 'duration_frame', 'annotations', + 'feature_frame' + ]), # Meta keys of input + dict( # Config of ToTensor + type='ToTensor', # Convert other types to tensor type pipeline + keys=['raw_feature']), # Keys to be converted from image to tensor + dict( # Config of ToDataContainer + type='ToDataContainer', # Pipeline to convert the data to DataContainer + fields=[dict(key='gt_bbox', stack=False, cpu_only=True)]) # Required fields to be converted with keys and attributes + ] + test_pipeline = [ # List of testing pipeline steps + dict(type='LoadLocalizationFeature'), # Load localization feature pipeline + dict( # Config of Collect + type='Collect', # Collect pipeline that decides which keys in the data should be passed to the localizer + keys=['raw_feature'], # Keys of input + meta_name='video_meta', # Meta name + meta_keys=[ + 'video_name', 'duration_second', 'duration_frame', 'annotations', + 'feature_frame' + ]), # Meta keys of input + dict( # Config of ToTensor + type='ToTensor', # Convert other types to tensor type pipeline + keys=['raw_feature']), # Keys to be converted from image to tensor + ] + data = dict( # Config of data + videos_per_gpu=8, # Batch size of each single GPU + workers_per_gpu=8, # Workers to pre-fetch data for each single GPU + train_dataloader=dict( # Additional config of train dataloader + drop_last=True), # Whether to drop out the last batch of data in training + val_dataloader=dict( # Additional config of validation dataloader + videos_per_gpu=1), # Batch size of each single GPU during evaluation + test_dataloader=dict( # Additional config of test dataloader + videos_per_gpu=2), # Batch size of each single GPU during testing + test=dict( # Testing dataset config + type=dataset_type, + ann_file=ann_file_test, + pipeline=test_pipeline, + data_prefix=data_root_val), + val=dict( # Validation dataset config + type=dataset_type, + ann_file=ann_file_val, + pipeline=val_pipeline, + data_prefix=data_root_val), + train=dict( # Training dataset config + type=dataset_type, + ann_file=ann_file_train, + pipeline=train_pipeline, + data_prefix=data_root)) + + # optimizer + optimizer = dict( + # Config used to build optimizer, support (1). All the optimizers in PyTorch + # whose arguments are also the same as those in PyTorch. (2). Custom optimizers + # which are built on `constructor`, referring to "tutorials/5_new_modules.md" + # for implementation. + type='Adam', # Type of optimizer, refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 for more details + lr=0.001, # Learning rate, see detail usages of the parameters in the documentation of PyTorch + weight_decay=0.0001) # Weight decay of Adam + optimizer_config = dict( # Config used to build the optimizer hook + grad_clip=None) # Most of the methods do not use gradient clip + # learning policy + lr_config = dict( # Learning rate scheduler config used to register LrUpdater hook + policy='step', # Policy of scheduler, also support CosineAnnealing, Cyclic, etc. Refer to details of supported LrUpdater from https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 + step=7) # Steps to decay the learning rate + + total_epochs = 9 # Total epochs to train the model + checkpoint_config = dict( # Config to set the checkpoint hook, Refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py for implementation + interval=1) # Interval to save checkpoint + evaluation = dict( # Config of evaluation during training + interval=1, # Interval to perform evaluation + metrics=['AR@AN']) # Metrics to be performed + log_config = dict( # Config to register logger hook + interval=50, # Interval to print the log + hooks=[ # Hooks to be implemented during training + dict(type='TextLoggerHook'), # The logger used to record the training process + # dict(type='TensorboardLoggerHook'), # The Tensorboard logger is also supported + ]) + + # runtime settings + dist_params = dict(backend='nccl') # Parameters to setup distributed training, the port can also be set + log_level = 'INFO' # The level of logging + work_dir = './work_dirs/bmn_400x100_2x8_9e_activitynet_feature/' # Directory to save the model checkpoints and logs for the current experiments + load_from = None # load models as a pre-trained model from a given path. This will not resume training + resume_from = None # Resume checkpoints from a given path, the training will be resumed from the epoch when the checkpoint's is saved + workflow = [('train', 1)] # Workflow for runner. [('train', 1)] means there is only one workflow and the workflow named 'train' is executed once + output_config = dict( # Config of localization output + out=f'{work_dir}/results.json', # Path to output file + output_format='json') # File format of output file + ``` ### Config System for Action Recognition @@ -229,211 +229,211 @@ which is convenient to conduct various experiments. - An Example of TSN - To help the users have a basic idea of a complete config structure and the modules in an action recognition system, - we make brief comments on the config of TSN as the following. - For more detailed usage and alternative for per parameter in each module, please refer to the API documentation. - - ```python - # model settings - model = dict( # Config of the model - type='Recognizer2D', # Type of the recognizer - backbone=dict( # Dict for backbone - type='ResNet', # Name of the backbone - pretrained='torchvision://resnet50', # The url/site of the pretrained model - depth=50, # Depth of ResNet model - norm_eval=False), # Whether to set BN layers to eval mode when training - cls_head=dict( # Dict for classification head - type='TSNHead', # Name of classification head - num_classes=400, # Number of classes to be classified. - in_channels=2048, # The input channels of classification head. - spatial_type='avg', # Type of pooling in spatial dimension - consensus=dict(type='AvgConsensus', dim=1), # Config of consensus module - dropout_ratio=0.4, # Probability in dropout layer - init_std=0.01), # Std value for linear layer initiation - # model training and testing settings - train_cfg=None, # Config of training hyperparameters for TSN - test_cfg=dict(average_clips=None)) # Config for testing hyperparameters for TSN. - - # dataset settings - dataset_type = 'RawframeDataset' # Type of dataset for training, validation and testing - data_root = 'data/kinetics400/rawframes_train/' # Root path to data for training - data_root_val = 'data/kinetics400/rawframes_val/' # Root path to data for validation and testing - ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' # Path to the annotation file for training - ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' # Path to the annotation file for validation - ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' # Path to the annotation file for testing - img_norm_cfg = dict( # Config of image normalization used in data pipeline - mean=[123.675, 116.28, 103.53], # Mean values of different channels to normalize - std=[58.395, 57.12, 57.375], # Std values of different channels to normalize - to_bgr=False) # Whether to convert channels from RGB to BGR - - train_pipeline = [ # List of training pipeline steps - dict( # Config of SampleFrames - type='SampleFrames', # Sample frames pipeline, sampling frames from video - clip_len=1, # Frames of each sampled output clip - frame_interval=1, # Temporal interval of adjacent sampled frames - num_clips=3), # Number of clips to be sampled - dict( # Config of RawFrameDecode - type='RawFrameDecode'), # Load and decode Frames pipeline, picking raw frames with given indices - dict( # Config of Resize - type='Resize', # Resize pipeline - scale=(-1, 256)), # The scale to resize images - dict( # Config of MultiScaleCrop - type='MultiScaleCrop', # Multi scale crop pipeline, cropping images with a list of randomly selected scales - input_size=224, # Input size of the network - scales=(1, 0.875, 0.75, 0.66), # Scales of width and height to be selected - random_crop=False, # Whether to randomly sample cropping bbox - max_wh_scale_gap=1), # Maximum gap of w and h scale levels - dict( # Config of Resize - type='Resize', # Resize pipeline - scale=(224, 224), # The scale to resize images - keep_ratio=False), # Whether to resize with changing the aspect ratio - dict( # Config of Flip - type='Flip', # Flip Pipeline - flip_ratio=0.5), # Probability of implementing flip - dict( # Config of Normalize - type='Normalize', # Normalize pipeline - **img_norm_cfg), # Config of image normalization - dict( # Config of FormatShape - type='FormatShape', # Format shape pipeline, Format final image shape to the given input_format - input_format='NCHW'), # Final image shape format - dict( # Config of Collect - type='Collect', # Collect pipeline that decides which keys in the data should be passed to the recognizer - keys=['imgs', 'label'], # Keys of input - meta_keys=[]), # Meta keys of input - dict( # Config of ToTensor - type='ToTensor', # Convert other types to tensor type pipeline - keys=['imgs', 'label']) # Keys to be converted from image to tensor - ] - val_pipeline = [ # List of validation pipeline steps - dict( # Config of SampleFrames - type='SampleFrames', # Sample frames pipeline, sampling frames from video - clip_len=1, # Frames of each sampled output clip - frame_interval=1, # Temporal interval of adjacent sampled frames - num_clips=3, # Number of clips to be sampled - test_mode=True), # Whether to set test mode in sampling - dict( # Config of RawFrameDecode - type='RawFrameDecode'), # Load and decode Frames pipeline, picking raw frames with given indices - dict( # Config of Resize - type='Resize', # Resize pipeline - scale=(-1, 256)), # The scale to resize images - dict( # Config of CenterCrop - type='CenterCrop', # Center crop pipeline, cropping the center area from images - crop_size=224), # The size to crop images - dict( # Config of Flip - type='Flip', # Flip pipeline - flip_ratio=0), # Probability of implementing flip - dict( # Config of Normalize - type='Normalize', # Normalize pipeline - **img_norm_cfg), # Config of image normalization - dict( # Config of FormatShape - type='FormatShape', # Format shape pipeline, Format final image shape to the given input_format - input_format='NCHW'), # Final image shape format - dict( # Config of Collect - type='Collect', # Collect pipeline that decides which keys in the data should be passed to the recognizer - keys=['imgs', 'label'], # Keys of input - meta_keys=[]), # Meta keys of input - dict( # Config of ToTensor - type='ToTensor', # Convert other types to tensor type pipeline - keys=['imgs']) # Keys to be converted from image to tensor - ] - test_pipeline = [ # List of testing pipeline steps - dict( # Config of SampleFrames - type='SampleFrames', # Sample frames pipeline, sampling frames from video - clip_len=1, # Frames of each sampled output clip - frame_interval=1, # Temporal interval of adjacent sampled frames - num_clips=25, # Number of clips to be sampled - test_mode=True), # Whether to set test mode in sampling - dict( # Config of RawFrameDecode - type='RawFrameDecode'), # Load and decode Frames pipeline, picking raw frames with given indices - dict( # Config of Resize - type='Resize', # Resize pipeline - scale=(-1, 256)), # The scale to resize images - dict( # Config of TenCrop - type='TenCrop', # Ten crop pipeline, cropping ten area from images - crop_size=224), # The size to crop images - dict( # Config of Flip - type='Flip', # Flip pipeline - flip_ratio=0), # Probability of implementing flip - dict( # Config of Normalize - type='Normalize', # Normalize pipeline - **img_norm_cfg), # Config of image normalization - dict( # Config of FormatShape - type='FormatShape', # Format shape pipeline, Format final image shape to the given input_format - input_format='NCHW'), # Final image shape format - dict( # Config of Collect - type='Collect', # Collect pipeline that decides which keys in the data should be passed to the recognizer - keys=['imgs', 'label'], # Keys of input - meta_keys=[]), # Meta keys of input - dict( # Config of ToTensor - type='ToTensor', # Convert other types to tensor type pipeline - keys=['imgs']) # Keys to be converted from image to tensor - ] - data = dict( # Config of data - videos_per_gpu=32, # Batch size of each single GPU - workers_per_gpu=2, # Workers to pre-fetch data for each single GPU - train_dataloader=dict( # Additional config of train dataloader - drop_last=True), # Whether to drop out the last batch of data in training - val_dataloader=dict( # Additional config of validation dataloader - videos_per_gpu=1), # Batch size of each single GPU during evaluation - test_dataloader=dict( # Additional config of test dataloader - videos_per_gpu=2), # Batch size of each single GPU during testing - train=dict( # Training dataset config - type=dataset_type, - ann_file=ann_file_train, - data_prefix=data_root, - pipeline=train_pipeline), - val=dict( # Validation dataset config - type=dataset_type, - ann_file=ann_file_val, - data_prefix=data_root_val, - pipeline=val_pipeline), - test=dict( # Testing dataset config - type=dataset_type, - ann_file=ann_file_test, - data_prefix=data_root_val, - pipeline=test_pipeline)) - # optimizer - optimizer = dict( - # Config used to build optimizer, support (1). All the optimizers in PyTorch - # whose arguments are also the same as those in PyTorch. (2). Custom optimizers - # which are built on `constructor`, referring to "tutorials/5_new_modules.md" - # for implementation. - type='SGD', # Type of optimizer, refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 for more details - lr=0.01, # Learning rate, see detail usages of the parameters in the documentation of PyTorch - momentum=0.9, # Momentum, - weight_decay=0.0001) # Weight decay of SGD - optimizer_config = dict( # Config used to build the optimizer hook - grad_clip=dict(max_norm=40, norm_type=2)) # Use gradient clip - # learning policy - lr_config = dict( # Learning rate scheduler config used to register LrUpdater hook - policy='step', # Policy of scheduler, also support CosineAnnealing, Cyclic, etc. Refer to details of supported LrUpdater from https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 - step=[40, 80]) # Steps to decay the learning rate - total_epochs = 100 # Total epochs to train the model - checkpoint_config = dict( # Config to set the checkpoint hook, Refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py for implementation - interval=5) # Interval to save checkpoint - evaluation = dict( # Config of evaluation during training - interval=5, # Interval to perform evaluation - metrics=['top_k_accuracy', 'mean_class_accuracy'], # Metrics to be performed - metric_options=dict(top_k_accuracy=dict(topk=(1, 3))), # Set top-k accuracy to 1 and 3 during validation - save_best='top_k_accuracy') # set `top_k_accuracy` as key indicator to save best checkpoint - eval_config = dict( - metric_options=dict(top_k_accuracy=dict(topk=(1, 3)))) # Set top-k accuracy to 1 and 3 during testing. You can also use `--eval top_k_accuracy` to assign evaluation metrics - log_config = dict( # Config to register logger hook - interval=20, # Interval to print the log - hooks=[ # Hooks to be implemented during training - dict(type='TextLoggerHook'), # The logger used to record the training process - # dict(type='TensorboardLoggerHook'), # The Tensorboard logger is also supported - ]) - - # runtime settings - dist_params = dict(backend='nccl') # Parameters to setup distributed training, the port can also be set - log_level = 'INFO' # The level of logging - work_dir = './work_dirs/tsn_r50_1x1x3_100e_kinetics400_rgb/' # Directory to save the model checkpoints and logs for the current experiments - load_from = None # load models as a pre-trained model from a given path. This will not resume training - resume_from = None # Resume checkpoints from a given path, the training will be resumed from the epoch when the checkpoint's is saved - workflow = [('train', 1)] # Workflow for runner. [('train', 1)] means there is only one workflow and the workflow named 'train' is executed once - - ``` + To help the users have a basic idea of a complete config structure and the modules in an action recognition system, + we make brief comments on the config of TSN as the following. + For more detailed usage and alternative for per parameter in each module, please refer to the API documentation. + + ```python + # model settings + model = dict( # Config of the model + type='Recognizer2D', # Type of the recognizer + backbone=dict( # Dict for backbone + type='ResNet', # Name of the backbone + pretrained='torchvision://resnet50', # The url/site of the pretrained model + depth=50, # Depth of ResNet model + norm_eval=False), # Whether to set BN layers to eval mode when training + cls_head=dict( # Dict for classification head + type='TSNHead', # Name of classification head + num_classes=400, # Number of classes to be classified. + in_channels=2048, # The input channels of classification head. + spatial_type='avg', # Type of pooling in spatial dimension + consensus=dict(type='AvgConsensus', dim=1), # Config of consensus module + dropout_ratio=0.4, # Probability in dropout layer + init_std=0.01), # Std value for linear layer initiation + # model training and testing settings + train_cfg=None, # Config of training hyperparameters for TSN + test_cfg=dict(average_clips=None)) # Config for testing hyperparameters for TSN. + + # dataset settings + dataset_type = 'RawframeDataset' # Type of dataset for training, validation and testing + data_root = 'data/kinetics400/rawframes_train/' # Root path to data for training + data_root_val = 'data/kinetics400/rawframes_val/' # Root path to data for validation and testing + ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' # Path to the annotation file for training + ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' # Path to the annotation file for validation + ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' # Path to the annotation file for testing + img_norm_cfg = dict( # Config of image normalization used in data pipeline + mean=[123.675, 116.28, 103.53], # Mean values of different channels to normalize + std=[58.395, 57.12, 57.375], # Std values of different channels to normalize + to_bgr=False) # Whether to convert channels from RGB to BGR + + train_pipeline = [ # List of training pipeline steps + dict( # Config of SampleFrames + type='SampleFrames', # Sample frames pipeline, sampling frames from video + clip_len=1, # Frames of each sampled output clip + frame_interval=1, # Temporal interval of adjacent sampled frames + num_clips=3), # Number of clips to be sampled + dict( # Config of RawFrameDecode + type='RawFrameDecode'), # Load and decode Frames pipeline, picking raw frames with given indices + dict( # Config of Resize + type='Resize', # Resize pipeline + scale=(-1, 256)), # The scale to resize images + dict( # Config of MultiScaleCrop + type='MultiScaleCrop', # Multi scale crop pipeline, cropping images with a list of randomly selected scales + input_size=224, # Input size of the network + scales=(1, 0.875, 0.75, 0.66), # Scales of width and height to be selected + random_crop=False, # Whether to randomly sample cropping bbox + max_wh_scale_gap=1), # Maximum gap of w and h scale levels + dict( # Config of Resize + type='Resize', # Resize pipeline + scale=(224, 224), # The scale to resize images + keep_ratio=False), # Whether to resize with changing the aspect ratio + dict( # Config of Flip + type='Flip', # Flip Pipeline + flip_ratio=0.5), # Probability of implementing flip + dict( # Config of Normalize + type='Normalize', # Normalize pipeline + **img_norm_cfg), # Config of image normalization + dict( # Config of FormatShape + type='FormatShape', # Format shape pipeline, Format final image shape to the given input_format + input_format='NCHW'), # Final image shape format + dict( # Config of Collect + type='Collect', # Collect pipeline that decides which keys in the data should be passed to the recognizer + keys=['imgs', 'label'], # Keys of input + meta_keys=[]), # Meta keys of input + dict( # Config of ToTensor + type='ToTensor', # Convert other types to tensor type pipeline + keys=['imgs', 'label']) # Keys to be converted from image to tensor + ] + val_pipeline = [ # List of validation pipeline steps + dict( # Config of SampleFrames + type='SampleFrames', # Sample frames pipeline, sampling frames from video + clip_len=1, # Frames of each sampled output clip + frame_interval=1, # Temporal interval of adjacent sampled frames + num_clips=3, # Number of clips to be sampled + test_mode=True), # Whether to set test mode in sampling + dict( # Config of RawFrameDecode + type='RawFrameDecode'), # Load and decode Frames pipeline, picking raw frames with given indices + dict( # Config of Resize + type='Resize', # Resize pipeline + scale=(-1, 256)), # The scale to resize images + dict( # Config of CenterCrop + type='CenterCrop', # Center crop pipeline, cropping the center area from images + crop_size=224), # The size to crop images + dict( # Config of Flip + type='Flip', # Flip pipeline + flip_ratio=0), # Probability of implementing flip + dict( # Config of Normalize + type='Normalize', # Normalize pipeline + **img_norm_cfg), # Config of image normalization + dict( # Config of FormatShape + type='FormatShape', # Format shape pipeline, Format final image shape to the given input_format + input_format='NCHW'), # Final image shape format + dict( # Config of Collect + type='Collect', # Collect pipeline that decides which keys in the data should be passed to the recognizer + keys=['imgs', 'label'], # Keys of input + meta_keys=[]), # Meta keys of input + dict( # Config of ToTensor + type='ToTensor', # Convert other types to tensor type pipeline + keys=['imgs']) # Keys to be converted from image to tensor + ] + test_pipeline = [ # List of testing pipeline steps + dict( # Config of SampleFrames + type='SampleFrames', # Sample frames pipeline, sampling frames from video + clip_len=1, # Frames of each sampled output clip + frame_interval=1, # Temporal interval of adjacent sampled frames + num_clips=25, # Number of clips to be sampled + test_mode=True), # Whether to set test mode in sampling + dict( # Config of RawFrameDecode + type='RawFrameDecode'), # Load and decode Frames pipeline, picking raw frames with given indices + dict( # Config of Resize + type='Resize', # Resize pipeline + scale=(-1, 256)), # The scale to resize images + dict( # Config of TenCrop + type='TenCrop', # Ten crop pipeline, cropping ten area from images + crop_size=224), # The size to crop images + dict( # Config of Flip + type='Flip', # Flip pipeline + flip_ratio=0), # Probability of implementing flip + dict( # Config of Normalize + type='Normalize', # Normalize pipeline + **img_norm_cfg), # Config of image normalization + dict( # Config of FormatShape + type='FormatShape', # Format shape pipeline, Format final image shape to the given input_format + input_format='NCHW'), # Final image shape format + dict( # Config of Collect + type='Collect', # Collect pipeline that decides which keys in the data should be passed to the recognizer + keys=['imgs', 'label'], # Keys of input + meta_keys=[]), # Meta keys of input + dict( # Config of ToTensor + type='ToTensor', # Convert other types to tensor type pipeline + keys=['imgs']) # Keys to be converted from image to tensor + ] + data = dict( # Config of data + videos_per_gpu=32, # Batch size of each single GPU + workers_per_gpu=2, # Workers to pre-fetch data for each single GPU + train_dataloader=dict( # Additional config of train dataloader + drop_last=True), # Whether to drop out the last batch of data in training + val_dataloader=dict( # Additional config of validation dataloader + videos_per_gpu=1), # Batch size of each single GPU during evaluation + test_dataloader=dict( # Additional config of test dataloader + videos_per_gpu=2), # Batch size of each single GPU during testing + train=dict( # Training dataset config + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( # Validation dataset config + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( # Testing dataset config + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) + # optimizer + optimizer = dict( + # Config used to build optimizer, support (1). All the optimizers in PyTorch + # whose arguments are also the same as those in PyTorch. (2). Custom optimizers + # which are built on `constructor`, referring to "tutorials/5_new_modules.md" + # for implementation. + type='SGD', # Type of optimizer, refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 for more details + lr=0.01, # Learning rate, see detail usages of the parameters in the documentation of PyTorch + momentum=0.9, # Momentum, + weight_decay=0.0001) # Weight decay of SGD + optimizer_config = dict( # Config used to build the optimizer hook + grad_clip=dict(max_norm=40, norm_type=2)) # Use gradient clip + # learning policy + lr_config = dict( # Learning rate scheduler config used to register LrUpdater hook + policy='step', # Policy of scheduler, also support CosineAnnealing, Cyclic, etc. Refer to details of supported LrUpdater from https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 + step=[40, 80]) # Steps to decay the learning rate + total_epochs = 100 # Total epochs to train the model + checkpoint_config = dict( # Config to set the checkpoint hook, Refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py for implementation + interval=5) # Interval to save checkpoint + evaluation = dict( # Config of evaluation during training + interval=5, # Interval to perform evaluation + metrics=['top_k_accuracy', 'mean_class_accuracy'], # Metrics to be performed + metric_options=dict(top_k_accuracy=dict(topk=(1, 3))), # Set top-k accuracy to 1 and 3 during validation + save_best='top_k_accuracy') # set `top_k_accuracy` as key indicator to save best checkpoint + eval_config = dict( + metric_options=dict(top_k_accuracy=dict(topk=(1, 3)))) # Set top-k accuracy to 1 and 3 during testing. You can also use `--eval top_k_accuracy` to assign evaluation metrics + log_config = dict( # Config to register logger hook + interval=20, # Interval to print the log + hooks=[ # Hooks to be implemented during training + dict(type='TextLoggerHook'), # The logger used to record the training process + # dict(type='TensorboardLoggerHook'), # The Tensorboard logger is also supported + ]) + + # runtime settings + dist_params = dict(backend='nccl') # Parameters to setup distributed training, the port can also be set + log_level = 'INFO' # The level of logging + work_dir = './work_dirs/tsn_r50_1x1x3_100e_kinetics400_rgb/' # Directory to save the model checkpoints and logs for the current experiments + load_from = None # load models as a pre-trained model from a given path. This will not resume training + resume_from = None # Resume checkpoints from a given path, the training will be resumed from the epoch when the checkpoint's is saved + workflow = [('train', 1)] # Workflow for runner. [('train', 1)] means there is only one workflow and the workflow named 'train' is executed once + + ``` ### Config System for Spatio-Temporal Action Detection @@ -441,225 +441,225 @@ We incorporate modular design into our config system, which is convenient to con - An Example of FastRCNN - To help the users have a basic idea of a complete config structure and the modules in a spatio-temporal action detection system, - we make brief comments on the config of FastRCNN as the following. - For more detailed usage and alternative for per parameter in each module, please refer to the API documentation. - - ```python - # model setting - model = dict( # Config of the model - type='FastRCNN', # Type of the detector - backbone=dict( # Dict for backbone - type='ResNet3dSlowOnly', # Name of the backbone - depth=50, # Depth of ResNet model - pretrained=None, # The url/site of the pretrained model - pretrained2d=False, # If the pretrained model is 2D - lateral=False, # If the backbone is with lateral connections - num_stages=4, # Stages of ResNet model - conv1_kernel=(1, 7, 7), # Conv1 kernel size - conv1_stride_t=1, # Conv1 temporal stride - pool1_stride_t=1, # Pool1 temporal stride - spatial_strides=(1, 2, 2, 1)), # The spatial stride for each ResNet stage - roi_head=dict( # Dict for roi_head - type='AVARoIHead', # Name of the roi_head - bbox_roi_extractor=dict( # Dict for bbox_roi_extractor - type='SingleRoIExtractor3D', # Name of the bbox_roi_extractor - roi_layer_type='RoIAlign', # Type of the RoI op - output_size=8, # Output feature size of the RoI op - with_temporal_pool=True), # If temporal dim is pooled - bbox_head=dict( # Dict for bbox_head - type='BBoxHeadAVA', # Name of the bbox_head - in_channels=2048, # Number of channels of the input feature - num_classes=81, # Number of action classes + 1 - multilabel=True, # If the dataset is multilabel - dropout_ratio=0.5)), # The dropout ratio used - # model training and testing settings - train_cfg=dict( # Training config of FastRCNN - rcnn=dict( # Dict for rcnn training config - assigner=dict( # Dict for assigner - type='MaxIoUAssignerAVA', # Name of the assigner - pos_iou_thr=0.9, # IoU threshold for positive examples, > pos_iou_thr -> positive - neg_iou_thr=0.9, # IoU threshold for negative examples, < neg_iou_thr -> negative - min_pos_iou=0.9), # Minimum acceptable IoU for positive examples - sampler=dict( # Dict for sample - type='RandomSampler', # Name of the sampler - num=32, # Batch Size of the sampler - pos_fraction=1, # Positive bbox fraction of the sampler - neg_pos_ub=-1, # Upper bound of the ratio of num negative to num positive - add_gt_as_proposals=True), # Add gt bboxes as proposals - pos_weight=1.0, # Loss weight of positive examples - debug=False)), # Debug mode - test_cfg=dict( # Testing config of FastRCNN - rcnn=dict( # Dict for rcnn testing config - action_thr=0.002))) # The threshold of an action - - # dataset settings - dataset_type = 'AVADataset' # Type of dataset for training, validation and testing - data_root = 'data/ava/rawframes' # Root path to data - anno_root = 'data/ava/annotations' # Root path to annotations - - ann_file_train = f'{anno_root}/ava_train_v2.1.csv' # Path to the annotation file for training - ann_file_val = f'{anno_root}/ava_val_v2.1.csv' # Path to the annotation file for validation - - exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.1.csv' # Path to the exclude annotation file for training - exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.1.csv' # Path to the exclude annotation file for validation - - label_file = f'{anno_root}/ava_action_list_v2.1_for_activitynet_2018.pbtxt' # Path to the label file - - proposal_file_train = f'{anno_root}/ava_dense_proposals_train.FAIR.recall_93.9.pkl' # Path to the human detection proposals for training examples - proposal_file_val = f'{anno_root}/ava_dense_proposals_val.FAIR.recall_93.9.pkl' # Path to the human detection proposals for validation examples - - img_norm_cfg = dict( # Config of image normalization used in data pipeline - mean=[123.675, 116.28, 103.53], # Mean values of different channels to normalize - std=[58.395, 57.12, 57.375], # Std values of different channels to normalize - to_bgr=False) # Whether to convert channels from RGB to BGR - - train_pipeline = [ # List of training pipeline steps - dict( # Config of SampleFrames - type='AVASampleFrames', # Sample frames pipeline, sampling frames from video - clip_len=4, # Frames of each sampled output clip - frame_interval=16), # Temporal interval of adjacent sampled frames - dict( # Config of RawFrameDecode - type='RawFrameDecode'), # Load and decode Frames pipeline, picking raw frames with given indices - dict( # Config of RandomRescale - type='RandomRescale', # Randomly rescale the shortedge by a given range - scale_range=(256, 320)), # The shortedge size range of RandomRescale - dict( # Config of RandomCrop - type='RandomCrop', # Randomly crop a patch with the given size - size=256), # The size of the cropped patch - dict( # Config of Flip - type='Flip', # Flip Pipeline - flip_ratio=0.5), # Probability of implementing flip - dict( # Config of Normalize - type='Normalize', # Normalize pipeline - **img_norm_cfg), # Config of image normalization - dict( # Config of FormatShape - type='FormatShape', # Format shape pipeline, Format final image shape to the given input_format - input_format='NCTHW', # Final image shape format - collapse=True), # Collapse the dim N if N == 1 - dict( # Config of Rename - type='Rename', # Rename keys - mapping=dict(imgs='img')), # The old name to new name mapping - dict( # Config of ToTensor - type='ToTensor', # Convert other types to tensor type pipeline - keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), # Keys to be converted from image to tensor - dict( # Config of ToDataContainer - type='ToDataContainer', # Convert other types to DataContainer type pipeline - fields=[ # Fields to convert to DataContainer - dict( # Dict of fields - key=['proposals', 'gt_bboxes', 'gt_labels'], # Keys to Convert to DataContainer - stack=False)]), # Whether to stack these tensor - dict( # Config of Collect - type='Collect', # Collect pipeline that decides which keys in the data should be passed to the detector - keys=['img', 'proposals', 'gt_bboxes', 'gt_labels'], # Keys of input - meta_keys=['scores', 'entity_ids']), # Meta keys of input - ] - - val_pipeline = [ # List of validation pipeline steps - dict( # Config of SampleFrames - type='AVASampleFrames', # Sample frames pipeline, sampling frames from video - clip_len=4, # Frames of each sampled output clip - frame_interval=16) # Temporal interval of adjacent sampled frames - dict( # Config of RawFrameDecode - type='RawFrameDecode'), # Load and decode Frames pipeline, picking raw frames with given indices - dict( # Config of Resize - type='Resize', # Resize pipeline - scale=(-1, 256)), # The scale to resize images - dict( # Config of Normalize - type='Normalize', # Normalize pipeline - **img_norm_cfg), # Config of image normalization - dict( # Config of FormatShape - type='FormatShape', # Format shape pipeline, Format final image shape to the given input_format - input_format='NCTHW', # Final image shape format - collapse=True), # Collapse the dim N if N == 1 - dict( # Config of Rename - type='Rename', # Rename keys - mapping=dict(imgs='img')), # The old name to new name mapping - dict( # Config of ToTensor - type='ToTensor', # Convert other types to tensor type pipeline - keys=['img', 'proposals']), # Keys to be converted from image to tensor - dict( # Config of ToDataContainer - type='ToDataContainer', # Convert other types to DataContainer type pipeline - fields=[ # Fields to convert to DataContainer - dict( # Dict of fields - key=['proposals'], # Keys to Convert to DataContainer - stack=False)]), # Whether to stack these tensor - dict( # Config of Collect - type='Collect', # Collect pipeline that decides which keys in the data should be passed to the detector - keys=['img', 'proposals'], # Keys of input - meta_keys=['scores', 'entity_ids'], # Meta keys of input - nested=True) # Whether to wrap the data in a nested list - ] - - data = dict( # Config of data - videos_per_gpu=16, # Batch size of each single GPU - workers_per_gpu=2, # Workers to pre-fetch data for each single GPU - val_dataloader=dict( # Additional config of validation dataloader - videos_per_gpu=1), # Batch size of each single GPU during evaluation - train=dict( # Training dataset config - type=dataset_type, - ann_file=ann_file_train, - exclude_file=exclude_file_train, - pipeline=train_pipeline, - label_file=label_file, - proposal_file=proposal_file_train, - person_det_score_thr=0.9, - data_prefix=data_root), - val=dict( # Validation dataset config - type=dataset_type, - ann_file=ann_file_val, - exclude_file=exclude_file_val, - pipeline=val_pipeline, - label_file=label_file, - proposal_file=proposal_file_val, - person_det_score_thr=0.9, - data_prefix=data_root)) - data['test'] = data['val'] # Set test_dataset as val_dataset - - # optimizer - optimizer = dict( - # Config used to build optimizer, support (1). All the optimizers in PyTorch - # whose arguments are also the same as those in PyTorch. (2). Custom optimizers - # which are built on `constructor`, referring to "tutorials/5_new_modules.md" - # for implementation. - type='SGD', # Type of optimizer, refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 for more details - lr=0.2, # Learning rate, see detail usages of the parameters in the documentation of PyTorch (for 8gpu) - momentum=0.9, # Momentum, - weight_decay=0.00001) # Weight decay of SGD - - optimizer_config = dict( # Config used to build the optimizer hook - grad_clip=dict(max_norm=40, norm_type=2)) # Use gradient clip - - lr_config = dict( # Learning rate scheduler config used to register LrUpdater hook - policy='step', # Policy of scheduler, also support CosineAnnealing, Cyclic, etc. Refer to details of supported LrUpdater from https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 - step=[40, 80], # Steps to decay the learning rate - warmup='linear', # Warmup strategy - warmup_by_epoch=True, # Warmup_iters indicates iter num or epoch num - warmup_iters=5, # Number of iters or epochs for warmup - warmup_ratio=0.1) # The initial learning rate is warmup_ratio * lr - - total_epochs = 20 # Total epochs to train the model - checkpoint_config = dict( # Config to set the checkpoint hook, Refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py for implementation - interval=1) # Interval to save checkpoint - workflow = [('train', 1)] # Workflow for runner. [('train', 1)] means there is only one workflow and the workflow named 'train' is executed once - evaluation = dict( # Config of evaluation during training - interval=1, save_best='mAP@0.5IOU') # Interval to perform evaluation and the key for saving best checkpoint - log_config = dict( # Config to register logger hook - interval=20, # Interval to print the log - hooks=[ # Hooks to be implemented during training - dict(type='TextLoggerHook'), # The logger used to record the training process - ]) - - # runtime settings - dist_params = dict(backend='nccl') # Parameters to setup distributed training, the port can also be set - log_level = 'INFO' # The level of logging - work_dir = ('./work_dirs/ava/' # Directory to save the model checkpoints and logs for the current experiments - 'slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb') - load_from = ('https://download.openmmlab.com/mmaction/recognition/slowonly/' # load models as a pre-trained model from a given path. This will not resume training - 'slowonly_r50_4x16x1_256e_kinetics400_rgb/' - 'slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth') - resume_from = None # Resume checkpoints from a given path, the training will be resumed from the epoch when the checkpoint's is saved - ``` + To help the users have a basic idea of a complete config structure and the modules in a spatio-temporal action detection system, + we make brief comments on the config of FastRCNN as the following. + For more detailed usage and alternative for per parameter in each module, please refer to the API documentation. + + ```python + # model setting + model = dict( # Config of the model + type='FastRCNN', # Type of the detector + backbone=dict( # Dict for backbone + type='ResNet3dSlowOnly', # Name of the backbone + depth=50, # Depth of ResNet model + pretrained=None, # The url/site of the pretrained model + pretrained2d=False, # If the pretrained model is 2D + lateral=False, # If the backbone is with lateral connections + num_stages=4, # Stages of ResNet model + conv1_kernel=(1, 7, 7), # Conv1 kernel size + conv1_stride_t=1, # Conv1 temporal stride + pool1_stride_t=1, # Pool1 temporal stride + spatial_strides=(1, 2, 2, 1)), # The spatial stride for each ResNet stage + roi_head=dict( # Dict for roi_head + type='AVARoIHead', # Name of the roi_head + bbox_roi_extractor=dict( # Dict for bbox_roi_extractor + type='SingleRoIExtractor3D', # Name of the bbox_roi_extractor + roi_layer_type='RoIAlign', # Type of the RoI op + output_size=8, # Output feature size of the RoI op + with_temporal_pool=True), # If temporal dim is pooled + bbox_head=dict( # Dict for bbox_head + type='BBoxHeadAVA', # Name of the bbox_head + in_channels=2048, # Number of channels of the input feature + num_classes=81, # Number of action classes + 1 + multilabel=True, # If the dataset is multilabel + dropout_ratio=0.5)), # The dropout ratio used + # model training and testing settings + train_cfg=dict( # Training config of FastRCNN + rcnn=dict( # Dict for rcnn training config + assigner=dict( # Dict for assigner + type='MaxIoUAssignerAVA', # Name of the assigner + pos_iou_thr=0.9, # IoU threshold for positive examples, > pos_iou_thr -> positive + neg_iou_thr=0.9, # IoU threshold for negative examples, < neg_iou_thr -> negative + min_pos_iou=0.9), # Minimum acceptable IoU for positive examples + sampler=dict( # Dict for sample + type='RandomSampler', # Name of the sampler + num=32, # Batch Size of the sampler + pos_fraction=1, # Positive bbox fraction of the sampler + neg_pos_ub=-1, # Upper bound of the ratio of num negative to num positive + add_gt_as_proposals=True), # Add gt bboxes as proposals + pos_weight=1.0, # Loss weight of positive examples + debug=False)), # Debug mode + test_cfg=dict( # Testing config of FastRCNN + rcnn=dict( # Dict for rcnn testing config + action_thr=0.002))) # The threshold of an action + + # dataset settings + dataset_type = 'AVADataset' # Type of dataset for training, validation and testing + data_root = 'data/ava/rawframes' # Root path to data + anno_root = 'data/ava/annotations' # Root path to annotations + + ann_file_train = f'{anno_root}/ava_train_v2.1.csv' # Path to the annotation file for training + ann_file_val = f'{anno_root}/ava_val_v2.1.csv' # Path to the annotation file for validation + + exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.1.csv' # Path to the exclude annotation file for training + exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.1.csv' # Path to the exclude annotation file for validation + + label_file = f'{anno_root}/ava_action_list_v2.1_for_activitynet_2018.pbtxt' # Path to the label file + + proposal_file_train = f'{anno_root}/ava_dense_proposals_train.FAIR.recall_93.9.pkl' # Path to the human detection proposals for training examples + proposal_file_val = f'{anno_root}/ava_dense_proposals_val.FAIR.recall_93.9.pkl' # Path to the human detection proposals for validation examples + + img_norm_cfg = dict( # Config of image normalization used in data pipeline + mean=[123.675, 116.28, 103.53], # Mean values of different channels to normalize + std=[58.395, 57.12, 57.375], # Std values of different channels to normalize + to_bgr=False) # Whether to convert channels from RGB to BGR + + train_pipeline = [ # List of training pipeline steps + dict( # Config of SampleFrames + type='AVASampleFrames', # Sample frames pipeline, sampling frames from video + clip_len=4, # Frames of each sampled output clip + frame_interval=16), # Temporal interval of adjacent sampled frames + dict( # Config of RawFrameDecode + type='RawFrameDecode'), # Load and decode Frames pipeline, picking raw frames with given indices + dict( # Config of RandomRescale + type='RandomRescale', # Randomly rescale the shortedge by a given range + scale_range=(256, 320)), # The shortedge size range of RandomRescale + dict( # Config of RandomCrop + type='RandomCrop', # Randomly crop a patch with the given size + size=256), # The size of the cropped patch + dict( # Config of Flip + type='Flip', # Flip Pipeline + flip_ratio=0.5), # Probability of implementing flip + dict( # Config of Normalize + type='Normalize', # Normalize pipeline + **img_norm_cfg), # Config of image normalization + dict( # Config of FormatShape + type='FormatShape', # Format shape pipeline, Format final image shape to the given input_format + input_format='NCTHW', # Final image shape format + collapse=True), # Collapse the dim N if N == 1 + dict( # Config of Rename + type='Rename', # Rename keys + mapping=dict(imgs='img')), # The old name to new name mapping + dict( # Config of ToTensor + type='ToTensor', # Convert other types to tensor type pipeline + keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), # Keys to be converted from image to tensor + dict( # Config of ToDataContainer + type='ToDataContainer', # Convert other types to DataContainer type pipeline + fields=[ # Fields to convert to DataContainer + dict( # Dict of fields + key=['proposals', 'gt_bboxes', 'gt_labels'], # Keys to Convert to DataContainer + stack=False)]), # Whether to stack these tensor + dict( # Config of Collect + type='Collect', # Collect pipeline that decides which keys in the data should be passed to the detector + keys=['img', 'proposals', 'gt_bboxes', 'gt_labels'], # Keys of input + meta_keys=['scores', 'entity_ids']), # Meta keys of input + ] + + val_pipeline = [ # List of validation pipeline steps + dict( # Config of SampleFrames + type='AVASampleFrames', # Sample frames pipeline, sampling frames from video + clip_len=4, # Frames of each sampled output clip + frame_interval=16) # Temporal interval of adjacent sampled frames + dict( # Config of RawFrameDecode + type='RawFrameDecode'), # Load and decode Frames pipeline, picking raw frames with given indices + dict( # Config of Resize + type='Resize', # Resize pipeline + scale=(-1, 256)), # The scale to resize images + dict( # Config of Normalize + type='Normalize', # Normalize pipeline + **img_norm_cfg), # Config of image normalization + dict( # Config of FormatShape + type='FormatShape', # Format shape pipeline, Format final image shape to the given input_format + input_format='NCTHW', # Final image shape format + collapse=True), # Collapse the dim N if N == 1 + dict( # Config of Rename + type='Rename', # Rename keys + mapping=dict(imgs='img')), # The old name to new name mapping + dict( # Config of ToTensor + type='ToTensor', # Convert other types to tensor type pipeline + keys=['img', 'proposals']), # Keys to be converted from image to tensor + dict( # Config of ToDataContainer + type='ToDataContainer', # Convert other types to DataContainer type pipeline + fields=[ # Fields to convert to DataContainer + dict( # Dict of fields + key=['proposals'], # Keys to Convert to DataContainer + stack=False)]), # Whether to stack these tensor + dict( # Config of Collect + type='Collect', # Collect pipeline that decides which keys in the data should be passed to the detector + keys=['img', 'proposals'], # Keys of input + meta_keys=['scores', 'entity_ids'], # Meta keys of input + nested=True) # Whether to wrap the data in a nested list + ] + + data = dict( # Config of data + videos_per_gpu=16, # Batch size of each single GPU + workers_per_gpu=2, # Workers to pre-fetch data for each single GPU + val_dataloader=dict( # Additional config of validation dataloader + videos_per_gpu=1), # Batch size of each single GPU during evaluation + train=dict( # Training dataset config + type=dataset_type, + ann_file=ann_file_train, + exclude_file=exclude_file_train, + pipeline=train_pipeline, + label_file=label_file, + proposal_file=proposal_file_train, + person_det_score_thr=0.9, + data_prefix=data_root), + val=dict( # Validation dataset config + type=dataset_type, + ann_file=ann_file_val, + exclude_file=exclude_file_val, + pipeline=val_pipeline, + label_file=label_file, + proposal_file=proposal_file_val, + person_det_score_thr=0.9, + data_prefix=data_root)) + data['test'] = data['val'] # Set test_dataset as val_dataset + + # optimizer + optimizer = dict( + # Config used to build optimizer, support (1). All the optimizers in PyTorch + # whose arguments are also the same as those in PyTorch. (2). Custom optimizers + # which are built on `constructor`, referring to "tutorials/5_new_modules.md" + # for implementation. + type='SGD', # Type of optimizer, refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 for more details + lr=0.2, # Learning rate, see detail usages of the parameters in the documentation of PyTorch (for 8gpu) + momentum=0.9, # Momentum, + weight_decay=0.00001) # Weight decay of SGD + + optimizer_config = dict( # Config used to build the optimizer hook + grad_clip=dict(max_norm=40, norm_type=2)) # Use gradient clip + + lr_config = dict( # Learning rate scheduler config used to register LrUpdater hook + policy='step', # Policy of scheduler, also support CosineAnnealing, Cyclic, etc. Refer to details of supported LrUpdater from https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 + step=[40, 80], # Steps to decay the learning rate + warmup='linear', # Warmup strategy + warmup_by_epoch=True, # Warmup_iters indicates iter num or epoch num + warmup_iters=5, # Number of iters or epochs for warmup + warmup_ratio=0.1) # The initial learning rate is warmup_ratio * lr + + total_epochs = 20 # Total epochs to train the model + checkpoint_config = dict( # Config to set the checkpoint hook, Refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py for implementation + interval=1) # Interval to save checkpoint + workflow = [('train', 1)] # Workflow for runner. [('train', 1)] means there is only one workflow and the workflow named 'train' is executed once + evaluation = dict( # Config of evaluation during training + interval=1, save_best='mAP@0.5IOU') # Interval to perform evaluation and the key for saving best checkpoint + log_config = dict( # Config to register logger hook + interval=20, # Interval to print the log + hooks=[ # Hooks to be implemented during training + dict(type='TextLoggerHook'), # The logger used to record the training process + ]) + + # runtime settings + dist_params = dict(backend='nccl') # Parameters to setup distributed training, the port can also be set + log_level = 'INFO' # The level of logging + work_dir = ('./work_dirs/ava/' # Directory to save the model checkpoints and logs for the current experiments + 'slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb') + load_from = ('https://download.openmmlab.com/mmaction/recognition/slowonly/' # load models as a pre-trained model from a given path. This will not resume training + 'slowonly_r50_4x16x1_256e_kinetics400_rgb/' + 'slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth') + resume_from = None # Resume checkpoints from a given path, the training will be resumed from the epoch when the checkpoint's is saved + ``` ## FAQ diff --git a/docs/tutorials/2_finetune.md b/docs/tutorials/2_finetune.md index f29263601e..ea2c83046e 100644 --- a/docs/tutorials/2_finetune.md +++ b/docs/tutorials/2_finetune.md @@ -18,7 +18,7 @@ to finetune them on other datasets, so that better performance can be achieved. There are two steps to finetune a model on a new dataset. 1. Add support for the new dataset. See [Tutorial 3: Adding New Dataset](3_new_dataset.md). -1. Modify the configs. This will be discussed in this tutorial. +2. Modify the configs. This will be discussed in this tutorial. For example, if the users want to finetune models pre-trained on Kinetics-400 Dataset to another dataset, say UCF101, then four parts in the config (see [here](1_config.md)) needs attention. diff --git a/docs/tutorials/4_data_pipeline.md b/docs/tutorials/4_data_pipeline.md index 2ecdedb5ed..97c5deb10c 100644 --- a/docs/tutorials/4_data_pipeline.md +++ b/docs/tutorials/4_data_pipeline.md @@ -125,31 +125,31 @@ For each operation, we list the related dict fields that are added/updated/remov `SampleFrames` -- add: frame_inds, clip_len, frame_interval, num_clips, *total_frames +- add: frame_inds, clip_len, frame_interval, num_clips, \*total_frames `DenseSampleFrames` -- add: frame_inds, clip_len, frame_interval, num_clips, *total_frames +- add: frame_inds, clip_len, frame_interval, num_clips, \*total_frames `PyAVDecode` - add: imgs, original_shape -- update: *frame_inds +- update: \*frame_inds `DecordDecode` - add: imgs, original_shape -- update: *frame_inds +- update: \*frame_inds `OpenCVDecode` - add: imgs, original_shape -- update: *frame_inds +- update: \*frame_inds `RawFrameDecode` - add: imgs, original_shape -- update: *frame_inds +- update: \*frame_inds ### Pre-processing @@ -228,35 +228,35 @@ It is **noteworthy** that the first key, commonly `imgs`, will be used as the ma 1. Write a new pipeline in any file, e.g., `my_pipeline.py`. It takes a dict as input and return a dict. - ```python - from mmaction.datasets import PIPELINES + ```python + from mmaction.datasets import PIPELINES - @PIPELINES.register_module() - class MyTransform: + @PIPELINES.register_module() + class MyTransform: - def __call__(self, results): - results['key'] = value - return results - ``` + def __call__(self, results): + results['key'] = value + return results + ``` 2. Import the new class. - ```python - from .my_pipeline import MyTransform - ``` + ```python + from .my_pipeline import MyTransform + ``` 3. Use it in config files. - ```python - img_norm_cfg = dict( - mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) - train_pipeline = [ - dict(type='DenseSampleFrames', clip_len=8, frame_interval=8, num_clips=1), - dict(type='RawFrameDecode', io_backend='disk'), - dict(type='MyTransform'), # use a custom pipeline - dict(type='Normalize', **img_norm_cfg), - dict(type='FormatShape', input_format='NCTHW'), - dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), - dict(type='ToTensor', keys=['imgs', 'label']) - ] - ``` + ```python + img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + train_pipeline = [ + dict(type='DenseSampleFrames', clip_len=8, frame_interval=8, num_clips=1), + dict(type='RawFrameDecode', io_backend='disk'), + dict(type='MyTransform'), # use a custom pipeline + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) + ] + ``` diff --git a/docs/tutorials/5_new_modules.md b/docs/tutorials/5_new_modules.md index 366d979049..c683c7f96c 100644 --- a/docs/tutorials/5_new_modules.md +++ b/docs/tutorials/5_new_modules.md @@ -115,41 +115,41 @@ Here we show how to develop new components with an example of TSN. 1. Create a new file `mmaction/models/backbones/resnet.py`. - ```python - import torch.nn as nn + ```python + import torch.nn as nn - from ..builder import BACKBONES + from ..builder import BACKBONES - @BACKBONES.register_module() - class ResNet(nn.Module): + @BACKBONES.register_module() + class ResNet(nn.Module): - def __init__(self, arg1, arg2): - pass + def __init__(self, arg1, arg2): + pass - def forward(self, x): # should return a tuple - pass + def forward(self, x): # should return a tuple + pass - def init_weights(self, pretrained=None): - pass - ``` + def init_weights(self, pretrained=None): + pass + ``` 2. Import the module in `mmaction/models/backbones/__init__.py`. - ```python - from .resnet import ResNet - ``` + ```python + from .resnet import ResNet + ``` 3. Use it in your config file. - ```python - model = dict( - ... - backbone=dict( - type='ResNet', - arg1=xxx, - arg2=xxx), - ) - ``` + ```python + model = dict( + ... + backbone=dict( + type='ResNet', + arg1=xxx, + arg2=xxx), + ) + ``` ### Add new heads @@ -157,45 +157,45 @@ Here we show how to develop a new head with the example of TSNHead as the follow 1. Create a new file `mmaction/models/heads/tsn_head.py`. - You can write a new classification head inheriting from [BaseHead](/mmaction/models/heads/base.py), - and overwrite `init_weights(self)` and `forward(self, x)` method. + You can write a new classification head inheriting from [BaseHead](/mmaction/models/heads/base.py), + and overwrite `init_weights(self)` and `forward(self, x)` method. - ```python - from ..builder import HEADS - from .base import BaseHead + ```python + from ..builder import HEADS + from .base import BaseHead - @HEADS.register_module() - class TSNHead(BaseHead): + @HEADS.register_module() + class TSNHead(BaseHead): - def __init__(self, arg1, arg2): - pass + def __init__(self, arg1, arg2): + pass - def forward(self, x): - pass + def forward(self, x): + pass - def init_weights(self): - pass - ``` + def init_weights(self): + pass + ``` 2. Import the module in `mmaction/models/heads/__init__.py` - ```python - from .tsn_head import TSNHead - ``` + ```python + from .tsn_head import TSNHead + ``` 3. Use it in your config file - ```python - model = dict( - ... - cls_head=dict( - type='TSNHead', - num_classes=400, - in_channels=2048, - arg1=xxx, - arg2=xxx), - ``` + ```python + model = dict( + ... + cls_head=dict( + type='TSNHead', + num_classes=400, + in_channels=2048, + arg1=xxx, + arg2=xxx), + ``` ### Add new loss diff --git a/docs/tutorials/7_customize_runtime.md b/docs/tutorials/7_customize_runtime.md index c2201cab65..e0f2834db8 100644 --- a/docs/tutorials/7_customize_runtime.md +++ b/docs/tutorials/7_customize_runtime.md @@ -76,8 +76,8 @@ To find the above module defined above, this module should be imported into the - Modify `mmaction/core/optimizer/__init__.py` to import it. - The newly defined module should be imported in `mmaction/core/optimizer/__init__.py` so that the registry will - find the new module and add it: + The newly defined module should be imported in `mmaction/core/optimizer/__init__.py` so that the registry will + find the new module and add it: ```python from .my_optimizer import MyOptimizer @@ -136,32 +136,32 @@ Tricks not implemented by the optimizer should be implemented through optimizer We list some common settings that could stabilize the training or accelerate the training. Feel free to create PR, issue for more settings. - __Use gradient clip to stabilize training__: - Some models need gradient clip to clip the gradients to stabilize the training process. An example is as below: + Some models need gradient clip to clip the gradients to stabilize the training process. An example is as below: - ```python - optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) - ``` + ```python + optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) + ``` - __Use momentum schedule to accelerate model convergence__: - We support momentum scheduler to modify model's momentum according to learning rate, which could make the model converge in a faster way. - Momentum scheduler is usually used with LR scheduler, for example, the following config is used in 3D detection to accelerate convergence. - For more details, please refer to the implementation of [CyclicLrUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/lr_updater.py#L327) - and [CyclicMomentumUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/momentum_updater.py#L130). - - ```python - lr_config = dict( - policy='cyclic', - target_ratio=(10, 1e-4), - cyclic_times=1, - step_ratio_up=0.4, - ) - momentum_config = dict( - policy='cyclic', - target_ratio=(0.85 / 0.95, 1), - cyclic_times=1, - step_ratio_up=0.4, - ) - ``` + We support momentum scheduler to modify model's momentum according to learning rate, which could make the model converge in a faster way. + Momentum scheduler is usually used with LR scheduler, for example, the following config is used in 3D detection to accelerate convergence. + For more details, please refer to the implementation of [CyclicLrUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/lr_updater.py#L327) + and [CyclicMomentumUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/momentum_updater.py#L130). + + ```python + lr_config = dict( + policy='cyclic', + target_ratio=(10, 1e-4), + cyclic_times=1, + step_ratio_up=0.4, + ) + momentum_config = dict( + policy='cyclic', + target_ratio=(0.85 / 0.95, 1), + cyclic_times=1, + step_ratio_up=0.4, + ) + ``` ## Customize Training Schedules @@ -170,20 +170,20 @@ We support many other learning rate schedule [here](https://github.com/open-mmla - Poly schedule: - ```python - lr_config = dict(policy='poly', power=0.9, min_lr=1e-4, by_epoch=False) - ``` + ```python + lr_config = dict(policy='poly', power=0.9, min_lr=1e-4, by_epoch=False) + ``` - ConsineAnnealing schedule: - ```python - lr_config = dict( - policy='CosineAnnealing', - warmup='linear', - warmup_iters=1000, - warmup_ratio=1.0 / 10, - min_lr_ratio=1e-5) - ``` + ```python + lr_config = dict( + policy='CosineAnnealing', + warmup='linear', + warmup_iters=1000, + warmup_ratio=1.0 / 10, + min_lr_ratio=1e-5) + ``` ## Customize Workflow @@ -210,7 +210,7 @@ so that 1 epoch for training and 1 epoch for validation will be run iteratively. 1. The parameters of model will not be updated during val epoch. 2. Keyword `total_epochs` in the config only controls the number of training epochs and will not affect the validation workflow. 3. Workflows `[('train', 1), ('val', 1)]` and `[('train', 1)]` will not change the behavior of `EvalHook` because `EvalHook` is called by `after_train_epoch` and validation workflow only affect hooks that are called through `after_val_epoch`. - Therefore, the only difference between `[('train', 1), ('val', 1)]` and ``[('train', 1)]`` is that the runner will calculate losses on validation set after each training epoch. + Therefore, the only difference between `[('train', 1), ('val', 1)]` and `[('train', 1)]` is that the runner will calculate losses on validation set after each training epoch. ::: @@ -259,8 +259,8 @@ Then we need to make `MyHook` imported. Assuming the file is in `mmaction/core/u - Modify `mmaction/core/utils/__init__.py` to import it. - The newly defined module should be imported in `mmaction/core/utils/__init__.py` so that the registry will - find the new module and add it: + The newly defined module should be imported in `mmaction/core/utils/__init__.py` so that the registry will + find the new module and add it: ```python from .my_hook import MyHook diff --git a/docs/useful_tools.md b/docs/useful_tools.md index 2e76a9921f..086061020b 100644 --- a/docs/useful_tools.md +++ b/docs/useful_tools.md @@ -36,43 +36,43 @@ Examples: - Plot the classification loss of some run. - ```shell - python tools/analysis/analyze_logs.py plot_curve log.json --keys loss_cls --legend loss_cls - ``` + ```shell + python tools/analysis/analyze_logs.py plot_curve log.json --keys loss_cls --legend loss_cls + ``` - Plot the top-1 acc and top-5 acc of some run, and save the figure to a pdf. - ```shell - python tools/analysis/analyze_logs.py plot_curve log.json --keys top1_acc top5_acc --out results.pdf - ``` + ```shell + python tools/analysis/analyze_logs.py plot_curve log.json --keys top1_acc top5_acc --out results.pdf + ``` - Compare the top-1 acc of two runs in the same figure. - ```shell - python tools/analysis/analyze_logs.py plot_curve log1.json log2.json --keys top1_acc --legend run1 run2 - ``` + ```shell + python tools/analysis/analyze_logs.py plot_curve log1.json log2.json --keys top1_acc --legend run1 run2 + ``` - You can also compute the average training speed. + You can also compute the average training speed. - ```shell - python tools/analysis/analyze_logs.py cal_train_time ${JSON_LOGS} [--include-outliers] - ``` + ```shell + python tools/analysis/analyze_logs.py cal_train_time ${JSON_LOGS} [--include-outliers] + ``` - Compute the average training speed for a config file. - ```shell - python tools/analysis/analyze_logs.py cal_train_time work_dirs/some_exp/20200422_153324.log.json - ``` + ```shell + python tools/analysis/analyze_logs.py cal_train_time work_dirs/some_exp/20200422_153324.log.json + ``` - The output is expected to be like the following. + The output is expected to be like the following. - ```text - -----Analyze train time of work_dirs/some_exp/20200422_153324.log.json----- - slowest epoch 60, average time is 0.9736 - fastest epoch 18, average time is 0.9001 - time std over epochs is 0.0177 - average iter time: 0.9330 s/iter - ``` + ```text + -----Analyze train time of work_dirs/some_exp/20200422_153324.log.json----- + slowest epoch 60, average time is 0.9736 + fastest epoch 18, average time is 0.9001 + time std over epochs is 0.0177 + average iter time: 0.9330 s/iter + ``` ## Model Complexity @@ -111,15 +111,15 @@ Please note that a softmax layer could be added for recognizers by `--softmax` o - For recognizers, please run: - ```shell - python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify - ``` + ```shell + python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify + ``` - For localizers, please run: - ```shell - python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify - ``` + ```shell + python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify + ``` ### Prepare a model for publishing diff --git a/docs_zh_CN/benchmark.md b/docs_zh_CN/benchmark.md index 7bd65cb169..e329e75cd5 100644 --- a/docs_zh_CN/benchmark.md +++ b/docs_zh_CN/benchmark.md @@ -41,28 +41,28 @@ ### 行为识别器 -| 模型 |输入| IO 后端 | 批大小 x GPU 数量 | MMAction2 (s/iter) | GPU 显存占用 (GB) | MMAction (s/iter)| GPU 显存占用 (GB) | Temporal-Shift-Module (s/iter) | GPU 显存占用 (GB) | PySlowFast (s/iter)| GPU 显存占用 (GB) | -| :--- | :---------------:|:---------------:| :---------------:| :---------------: | :--------------------: | :----------------------------: | :-----------------: |:-----------------: |:-----------------: |:-----------------: |:-----------------: | -| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p rawframes |Memcached| 32x8|**[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_rawframes_memcahed_32x8.zip)** | 8.1 |[0.38](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/tsn_256p_rawframes_memcached_32x8.zip)|8.1| [0.42](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsn_256p_rawframes_memcached_32x8.zip)|10.5 | x |x | -| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p videos |Disk| 32x8|**[1.42](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_videos_disk_32x8.zip)** | 8.1 | x |x |x| x | TODO |TODO| -| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py)| 256p dense-encoded video |Disk| 32x8|**[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)**|8.1 | x |x| x |x| TODO |TODO| -|[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p videos|Disk |8x8| **[0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_videos_disk_8x8.zip)** |4.6|x |x| x |x| [0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_video.log) |4.6| -|[I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py)|256p dense-encoded video|Disk |8x8| **[0.35](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_fast_videos_disk_8x8.zip)**| 4.6 | x | x | x | x | [0.36](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_fast_video.log) |4.6| -| [I3D](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py)|256p rawframes|Memcached|8x8| **[0.43](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_256p_rawframes_memcahed_8x8.zip)**|5.0 | [0.56](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/i3d_256p_rawframes_memcached_8x8.zip)|5.0| x |x| x |x| -| [TSM](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) |256p rawframes|Memcached| 8x8|**[0.31](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsm_256p_rawframes_memcahed_8x8.zip)** |6.9| x |x| [0.41](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsm_256p_rawframes_memcached_8x8.zip) |9.1| x|x | -| [Slowonly](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p videos|Disk|8x8 | **[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowonly_256p_videos_disk_8x8.zip)** |3.1| TODO|TODO | x|x | [0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowonly_r50_4x16_video.log)|3.4 | -| [Slowonly](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p dense-encoded video|Disk|8x8 | **[0.25](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowonly_256p_fast_videos_disk_8x8.zip)** |3.1| TODO |TODO| x |x| [0.28](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowonly_r50_4x16_fast_video.log) |3.4| -| [Slowfast](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p videos|Disk|8x8 | **[0.69](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowfast_256p_videos_disk_8x8.zip)**|6.1 | x |x| x |x| [1.04](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowfast_r50_4x16_video.log)|7.0 | -| [Slowfast](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py)|256p dense-encoded video|Disk|8x8 | **[0.68](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowfast_256p_fast_videos_disk_8x8.zip)** |6.1| x|x | x |x| [0.96](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowfast_r50_4x16_fast_video.log)|7.0 | -| [R(2+1)D](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py)|256p videos |Disk| 8x8|**[0.45](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/r2plus1d_256p_videos_disk_8x8.zip)**|5.1 | x | x | x | x | x | x | -| [R(2+1)D](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py)|256p dense-encoded video |Disk| 8x8|**[0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/r2plus1d_256p_fast_videos_disk_8x8.zip)** |5.1| x|x | x |x| x |x| +| 模型 | 输入 | IO 后端 | 批大小 x GPU 数量 | MMAction2 (s/iter) | GPU 显存占用 (GB) | MMAction (s/iter) | GPU 显存占用 (GB) | Temporal-Shift-Module (s/iter) | GPU 显存占用 (GB) | PySlowFast (s/iter) | GPU 显存占用 (GB) | +| :------------------------------------------------------------------------------------------ | :----------------------: | :-------: | :----------: | :-------------------------------------------------------------------------------------------------------------------------: | :-----------: | :------------------------------------------------------------------------------------------------------------------: | :-----------: | :-------------------------------------------------------------------------------------------------------------------------------: | :-----------: | :--------------------------------------------------------------------------------------------------------------------: | :-----------: | +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 256p rawframes | Memcached | 32x8 | **[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_rawframes_memcahed_32x8.zip)** | 8.1 | [0.38](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/tsn_256p_rawframes_memcached_32x8.zip) | 8.1 | [0.42](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsn_256p_rawframes_memcached_32x8.zip) | 10.5 | x | x | +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 256p videos | Disk | 32x8 | **[1.42](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_videos_disk_32x8.zip)** | 8.1 | x | x | x | x | TODO | TODO | +| [TSN](/configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py) | 256p dense-encoded video | Disk | 32x8 | **[0.61](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsn_256p_fast_videos_disk_32x8.zip)** | 8.1 | x | x | x | x | TODO | TODO | +| [I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py) | 256p videos | Disk | 8x8 | **[0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_videos_disk_8x8.zip)** | 4.6 | x | x | x | x | [0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_video.log) | 4.6 | +| [I3D heavy](/configs/recognition/i3d/i3d_r50_video_heavy_8x8x1_100e_kinetics400_rgb.py) | 256p dense-encoded video | Disk | 8x8 | **[0.35](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_heavy_256p_fast_videos_disk_8x8.zip)** | 4.6 | x | x | x | x | [0.36](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_i3d_r50_8x8_fast_video.log) | 4.6 | +| [I3D](/configs/recognition/i3d/i3d_r50_32x2x1_100e_kinetics400_rgb.py) | 256p rawframes | Memcached | 8x8 | **[0.43](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/i3d_256p_rawframes_memcahed_8x8.zip)** | 5.0 | [0.56](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction/i3d_256p_rawframes_memcached_8x8.zip) | 5.0 | x | x | x | x | +| [TSM](/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py) | 256p rawframes | Memcached | 8x8 | **[0.31](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/tsm_256p_rawframes_memcahed_8x8.zip)** | 6.9 | x | x | [0.41](https://download.openmmlab.com/mmaction/benchmark/recognition/temporal_shift_module/tsm_256p_rawframes_memcached_8x8.zip) | 9.1 | x | x | +| [Slowonly](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py) | 256p videos | Disk | 8x8 | **[0.32](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowonly_256p_videos_disk_8x8.zip)** | 3.1 | TODO | TODO | x | x | [0.34](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowonly_r50_4x16_video.log) | 3.4 | +| [Slowonly](/configs/recognition/slowonly/slowonly_r50_video_4x16x1_256e_kinetics400_rgb.py) | 256p dense-encoded video | Disk | 8x8 | **[0.25](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowonly_256p_fast_videos_disk_8x8.zip)** | 3.1 | TODO | TODO | x | x | [0.28](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowonly_r50_4x16_fast_video.log) | 3.4 | +| [Slowfast](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) | 256p videos | Disk | 8x8 | **[0.69](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowfast_256p_videos_disk_8x8.zip)** | 6.1 | x | x | x | x | [1.04](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowfast_r50_4x16_video.log) | 7.0 | +| [Slowfast](/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py) | 256p dense-encoded video | Disk | 8x8 | **[0.68](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/slowfast_256p_fast_videos_disk_8x8.zip)** | 6.1 | x | x | x | x | [0.96](https://download.openmmlab.com/mmaction/benchmark/recognition/pyslowfast/pysf_slowfast_r50_4x16_fast_video.log) | 7.0 | +| [R(2+1)D](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py) | 256p videos | Disk | 8x8 | **[0.45](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/r2plus1d_256p_videos_disk_8x8.zip)** | 5.1 | x | x | x | x | x | x | +| [R(2+1)D](/configs/recognition/r2plus1d/r2plus1d_r34_video_8x8x1_180e_kinetics400_rgb.py) | 256p dense-encoded video | Disk | 8x8 | **[0.44](https://download.openmmlab.com/mmaction/benchmark/recognition/mmaction2/r2plus1d_256p_fast_videos_disk_8x8.zip)** | 5.1 | x | x | x | x | x | x | ### 时序动作检测器 -| Model | MMAction2 (s/iter) | BSN(boundary sensitive network) (s/iter) |BMN(boundary matching network) (s/iter)| -| :--- | :---------------: | :-------------------------------------: | :-------------------------------------: | -| BSN ([TEM + PEM + PGM](/configs/localization/bsn)) | **0.074(TEM)+0.040(PEM)** | 0.101(TEM)+0.040(PEM) | x | -| BMN ([bmn_400x100_2x8_9e_activitynet_feature](/configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py)) | **3.27** | x | 3.30 | +| Model | MMAction2 (s/iter) | BSN(boundary sensitive network) (s/iter) | BMN(boundary matching network) (s/iter) | +| :------------------------------------------------------------------------------------------------------------------ | :-----------------------: | :--------------------------------------: | :-------------------------------------: | +| BSN ([TEM + PEM + PGM](/configs/localization/bsn)) | **0.074(TEM)+0.040(PEM)** | 0.101(TEM)+0.040(PEM) | x | +| BMN ([bmn_400x100_2x8_9e_activitynet_feature](/configs/localization/bmn/bmn_400x100_2x8_9e_activitynet_feature.py)) | **3.27** | x | 3.30 | ## 比较细节 diff --git a/docs_zh_CN/data_preparation.md b/docs_zh_CN/data_preparation.md index 021fe4bc1b..bd43422bba 100644 --- a/docs_zh_CN/data_preparation.md +++ b/docs_zh_CN/data_preparation.md @@ -4,13 +4,13 @@ -- [视频格式数据的一些注意事项](#视频格式数据的一些注意事项) -- [获取数据](#获取数据) - - [准备视频](#准备视频) - - [提取帧](#提取帧) - - [denseflow 的替代项](#denseflow-的替代项) - - [生成文件列表](#生成文件列表) - - [准备音频](#准备音频) +- [视频格式数据的一些注意事项](#%E8%A7%86%E9%A2%91%E6%A0%BC%E5%BC%8F%E6%95%B0%E6%8D%AE%E7%9A%84%E4%B8%80%E4%BA%9B%E6%B3%A8%E6%84%8F%E4%BA%8B%E9%A1%B9) +- [获取数据](#%E8%8E%B7%E5%8F%96%E6%95%B0%E6%8D%AE) + - [准备视频](#%E5%87%86%E5%A4%87%E8%A7%86%E9%A2%91) + - [提取帧](#%E6%8F%90%E5%8F%96%E5%B8%A7) + - [denseflow 的替代项](#denseflow-%E7%9A%84%E6%9B%BF%E4%BB%A3%E9%A1%B9) + - [生成文件列表](#%E7%94%9F%E6%88%90%E6%96%87%E4%BB%B6%E5%88%97%E8%A1%A8) + - [准备音频](#%E5%87%86%E5%A4%87%E9%9F%B3%E9%A2%91) diff --git a/docs_zh_CN/demo.md b/docs_zh_CN/demo.md index 95969ae385..c5d12538bf 100644 --- a/docs_zh_CN/demo.md +++ b/docs_zh_CN/demo.md @@ -2,21 +2,21 @@ ## 目录 -- [Demo 示例](#demo-示例) - - [目录](#目录) - - [预测视频的动作标签](#预测视频的动作标签) - - [预测视频的时空检测结果](#预测视频的时空检测结果) - - [可视化输入视频的 GradCAM](#可视化输入视频的-gradcam) - - [使用网络摄像头的实时动作识别](#使用网络摄像头的实时动作识别) - - [滑动窗口预测长视频中不同动作类别](#滑动窗口预测长视频中不同动作类别) - - [基于网络摄像头的实时时空动作检测](#基于网络摄像头的实时时空动作检测) - - [基于人体姿态预测动作标签](#基于人体姿态预测动作标签) - - [视频结构化预测](#视频结构化预测) - - [基于音频的动作识别](#基于音频的动作识别) +- [Demo 示例](#demo-%E7%A4%BA%E4%BE%8B) + - [目录](#%E7%9B%AE%E5%BD%95) + - [预测视频的动作标签](#%E9%A2%84%E6%B5%8B%E8%A7%86%E9%A2%91%E7%9A%84%E5%8A%A8%E4%BD%9C%E6%A0%87%E7%AD%BE) + - [预测视频的时空检测结果](#%E9%A2%84%E6%B5%8B%E8%A7%86%E9%A2%91%E7%9A%84%E6%97%B6%E7%A9%BA%E6%A3%80%E6%B5%8B%E7%BB%93%E6%9E%9C) + - [可视化输入视频的 GradCAM](#%E5%8F%AF%E8%A7%86%E5%8C%96%E8%BE%93%E5%85%A5%E8%A7%86%E9%A2%91%E7%9A%84-gradcam) + - [使用网络摄像头的实时动作识别](#%E4%BD%BF%E7%94%A8%E7%BD%91%E7%BB%9C%E6%91%84%E5%83%8F%E5%A4%B4%E7%9A%84%E5%AE%9E%E6%97%B6%E5%8A%A8%E4%BD%9C%E8%AF%86%E5%88%AB) + - [滑动窗口预测长视频中不同动作类别](#%E6%BB%91%E5%8A%A8%E7%AA%97%E5%8F%A3%E9%A2%84%E6%B5%8B%E9%95%BF%E8%A7%86%E9%A2%91%E4%B8%AD%E4%B8%8D%E5%90%8C%E5%8A%A8%E4%BD%9C%E7%B1%BB%E5%88%AB) + - [基于网络摄像头的实时时空动作检测](#%E5%9F%BA%E4%BA%8E%E7%BD%91%E7%BB%9C%E6%91%84%E5%83%8F%E5%A4%B4%E7%9A%84%E5%AE%9E%E6%97%B6%E6%97%B6%E7%A9%BA%E5%8A%A8%E4%BD%9C%E6%A3%80%E6%B5%8B) + - [基于人体姿态预测动作标签](#%E5%9F%BA%E4%BA%8E%E4%BA%BA%E4%BD%93%E5%A7%BF%E6%80%81%E9%A2%84%E6%B5%8B%E5%8A%A8%E4%BD%9C%E6%A0%87%E7%AD%BE) + - [视频结构化预测](#%E8%A7%86%E9%A2%91%E7%BB%93%E6%9E%84%E5%8C%96%E9%A2%84%E6%B5%8B) + - [基于音频的动作识别](#%E5%9F%BA%E4%BA%8E%E9%9F%B3%E9%A2%91%E7%9A%84%E5%8A%A8%E4%BD%9C%E8%AF%86%E5%88%AB) ## 预测视频的动作标签 -MMAction2 提供如下脚本以预测视频的动作标签。为得到 [0, 1] 间的动作分值,请确保在配置文件中设定 `model['test_cfg'] = dict(average_clips='prob')`。 +MMAction2 提供如下脚本以预测视频的动作标签。为得到 \[0, 1\] 间的动作分值,请确保在配置文件中设定 `model['test_cfg'] = dict(average_clips='prob')`。 ```shell python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} [--use-frames] \ @@ -41,84 +41,84 @@ python demo/demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} {LABEL_FILE} 1. 在 cuda 设备上,使用 TSN 模型进行视频识别: - ```shell - # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt - ``` + ```shell + # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt + ``` 2. 在 cuda 设备上,使用 TSN 模型进行视频识别,并利用 URL 加载模型权重文件: - ```shell - # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt - ``` + ```shell + # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt + ``` 3. 在 CPU 上,使用 TSN 模型进行视频识别,输入为视频抽好的帧: - ```shell - python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - PATH_TO_FRAMES/ LABEL_FILE --use-frames --device cpu - ``` + ```shell + python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + PATH_TO_FRAMES/ LABEL_FILE --use-frames --device cpu + ``` 4. 使用 TSN 模型进行视频识别,输出 MP4 格式的识别结果: - ```shell - # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --out-filename demo/demo_out.mp4 - ``` + ```shell + # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --out-filename demo/demo_out.mp4 + ``` 5. 使用 TSN 模型进行视频识别,输入为视频抽好的帧,将识别结果存为 GIF 格式: - ```shell - python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - PATH_TO_FRAMES/ LABEL_FILE --use-frames --out-filename demo/demo_out.gif - ``` + ```shell + python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + PATH_TO_FRAMES/ LABEL_FILE --use-frames --out-filename demo/demo_out.gif + ``` 6. 使用 TSN 模型进行视频识别,输出 MP4 格式的识别结果,并指定输出视频分辨率及缩放视频时使用的插值方法: - ```shell - # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 340 256 --resize-algorithm bilinear \ - --out-filename demo/demo_out.mp4 - ``` - - ```shell - # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 - # 若 TARGET_RESOLUTION 的任一维度被设置为 -1,视频帧缩放时将保持长宽比 - # 如设定 --target-resolution 为 170 -1,原先长宽为 (340, 256) 的视频帧将被缩放至 (170, 128) - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 170 -1 --resize-algorithm bilinear \ - --out-filename demo/demo_out.mp4 - ``` + ```shell + # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 340 256 --resize-algorithm bilinear \ + --out-filename demo/demo_out.mp4 + ``` + + ```shell + # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 + # 若 TARGET_RESOLUTION 的任一维度被设置为 -1,视频帧缩放时将保持长宽比 + # 如设定 --target-resolution 为 170 -1,原先长宽为 (340, 256) 的视频帧将被缩放至 (170, 128) + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --target-resolution 170 -1 --resize-algorithm bilinear \ + --out-filename demo/demo_out.mp4 + ``` 7. 使用 TSN 模型进行视频识别,输出 MP4 格式的识别结果,指定输出视频中使用红色文字,字体大小为 10 像素: - ```shell - # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 - python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --font-size 10 --font-color red \ - --out-filename demo/demo_out.mp4 - ``` + ```shell + # demo.mp4 及 label_map_k400.txt 均来自 Kinetics-400 数据集 + python demo/demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + demo/demo.mp4 tools/data/kinetics/label_map_k400.txt --font-size 10 --font-color red \ + --out-filename demo/demo_out.mp4 + ``` 8. 使用 TSN 模型进行视频识别,输入为视频抽好的帧,将识别结果存为 MP4 格式,帧率设置为 24fps: - ```shell - python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - PATH_TO_FRAMES/ LABEL_FILE --use-frames --fps 24 --out-filename demo/demo_out.gif - ``` + ```shell + python demo/demo.py configs/recognition/tsn/tsn_r50_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + PATH_TO_FRAMES/ LABEL_FILE --use-frames --fps 24 --out-filename demo/demo_out.gif + ``` ## 预测视频的时空检测结果 @@ -201,24 +201,24 @@ python demo/demo_gradcam.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} [--u 1. 对于 I3D 模型进行 GradCAM 的可视化,使用视频作为输入,并输出一帧率为 10 的 GIF 文件: - ```shell - python demo/demo_gradcam.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth demo/demo.mp4 \ - --target-layer-name backbone/layer4/1/relu --fps 10 \ - --out-filename demo/demo_gradcam.gif - ``` + ```shell + python demo/demo_gradcam.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ + checkpoints/i3d_r50_video_32x2x1_100e_kinetics400_rgb_20200826-e31c6f52.pth demo/demo.mp4 \ + --target-layer-name backbone/layer4/1/relu --fps 10 \ + --out-filename demo/demo_gradcam.gif + ``` 2. 对于 I3D 模型进行 GradCAM 的可视化,使用视频作为输入,并输出一 GIF 文件,此示例利用 URL 加载模型权重文件: - ```shell - python demo/demo_gradcam.py configs/recognition/tsm/tsm_r50_video_inference_1x1x8_100e_kinetics400_rgb.py \ - https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth \ - demo/demo.mp4 --target-layer-name backbone/layer4/1/relu --out-filename demo/demo_gradcam_tsm.gif - ``` + ```shell + python demo/demo_gradcam.py configs/recognition/tsm/tsm_r50_video_inference_1x1x8_100e_kinetics400_rgb.py \ + https://download.openmmlab.com/mmaction/recognition/tsm/tsm_r50_video_1x1x8_100e_kinetics400_rgb/tsm_r50_video_1x1x8_100e_kinetics400_rgb_20200702-a77f4328.pth \ + demo/demo.mp4 --target-layer-name backbone/layer4/1/relu --out-filename demo/demo_gradcam_tsm.gif + ``` ## 使用网络摄像头的实时动作识别 -MMAction2 提供如下脚本来进行使用网络摄像头的实时动作识别。为得到 [0, 1] 间的动作分值,请确保在配置文件中设定 `model['test_cfg'] = dict(average_clips='prob')` 。 +MMAction2 提供如下脚本来进行使用网络摄像头的实时动作识别。为得到 \[0, 1\] 间的动作分值,请确保在配置文件中设定 `model['test_cfg'] = dict(average_clips='prob')` 。 ```shell python demo/webcam_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${LABEL_FILE} \ @@ -273,7 +273,7 @@ python demo/webcam_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${LABEL_FILE} \ ## 滑动窗口预测长视频中不同动作类别 -MMAction2 提供如下脚本来预测长视频中的不同动作类别。为得到 [0, 1] 间的动作分值,请确保在配置文件中设定 `model['test_cfg'] = dict(average_clips='prob')` 。 +MMAction2 提供如下脚本来预测长视频中的不同动作类别。为得到 \[0, 1\] 间的动作分值,请确保在配置文件中设定 `model['test_cfg'] = dict(average_clips='prob')` 。 ```shell python demo/long_video_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} ${LABEL_FILE} \ @@ -286,7 +286,7 @@ python demo/long_video_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} $ - `INPUT_STEP`: 在视频中的每 N 帧中选取一帧作为输入,默认为 1。 - `DEVICE_TYPE`: 指定脚本运行设备,支持 cuda 设备(如 `cuda:0`)或 cpu(`cpu`)。默认为 `cuda:0`。 - `THRESHOLD`: 动作识别的分数阈值,只有分数大于阈值的动作类型会被显示,默认为 0.01。 -- `STRIDE`: 默认情况下,脚本为每帧给出单独预测,较为耗时。可以设定 `STRIDE` 参数进行加速,此时脚本将会为每 `STRIDE x sample_length` 帧做一次预测(`sample_length` 指模型采帧时的时间窗大小,等于 `clip_len x frame_interval`)。例如,若 sample_length 为 64 帧且 `STRIDE` 设定为 0.5,模型将每 32 帧做一次预测。若 `STRIDE` 设为 0,模型将为每帧做一次预测。`STRIDE` 的理想取值为 (0, 1] 间,若大于 1,脚本亦可正常执行。`STRIDE` 默认值为 0。 +- `STRIDE`: 默认情况下,脚本为每帧给出单独预测,较为耗时。可以设定 `STRIDE` 参数进行加速,此时脚本将会为每 `STRIDE x sample_length` 帧做一次预测(`sample_length` 指模型采帧时的时间窗大小,等于 `clip_len x frame_interval`)。例如,若 sample_length 为 64 帧且 `STRIDE` 设定为 0.5,模型将每 32 帧做一次预测。若 `STRIDE` 设为 0,模型将为每帧做一次预测。`STRIDE` 的理想取值为 (0, 1\] 间,若大于 1,脚本亦可正常执行。`STRIDE` 默认值为 0。 示例: @@ -294,35 +294,35 @@ python demo/long_video_demo.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${VIDEO_FILE} $ 1. 利用 TSN 模型在 CPU 上预测长视频中的不同动作类别,设置 `INPUT_STEP` 为 3(即每 3 帧随机选取 1 帧作为输入),输出分值大于 0.2 的动作类别: - ```shell - python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ - --input-step 3 --device cpu --threshold 0.2 - ``` +```shell + python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ + --input-step 3 --device cpu --threshold 0.2 +``` 2. 利用 TSN 模型在 CPU 上预测长视频中的不同动作类别,设置 `INPUT_STEP` 为 3,输出分值大于 0.2 的动作类别,此示例利用 URL 加载模型权重文件: - ```shell - python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 - ``` +```shell + python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 +``` 3. 利用 TSN 模型在 CPU 上预测网络长视频(利用 URL 读取)中的不同动作类别,设置 `INPUT_STEP` 为 3,输出分值大于 0.2 的动作类别,此示例利用 URL 加载模型权重文件: - ```shell - python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ - https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ - https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4 \ - tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 - ``` +```shell + python demo/long_video_demo.py configs/recognition/tsn/tsn_r50_video_inference_1x1x3_100e_kinetics400_rgb.py \ + https://download.openmmlab.com/mmaction/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb/tsn_r50_1x1x3_100e_kinetics400_rgb_20200614-e508be42.pth \ + https://www.learningcontainer.com/wp-content/uploads/2020/05/sample-mp4-file.mp4 \ + tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO --input-step 3 --device cpu --threshold 0.2 +``` 4. 利用 I3D 模型在 GPU 上预测长视频中的不同动作类别,设置 `INPUT_STEP` 为 3,动作识别的分数阈值为 0.01: - ```shell - python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ - checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ - ``` + ```shell + python demo/long_video_demo.py configs/recognition/i3d/i3d_r50_video_inference_32x2x1_100e_kinetics400_rgb.py \ + checkpoints/i3d_r50_256p_32x2x1_100e_kinetics400_rgb_20200801-7d9f44de.pth PATH_TO_LONG_VIDEO tools/data/kinetics/label_map_k400.txt PATH_TO_SAVED_VIDEO \ + ``` ## 基于网络摄像头的实时时空动作检测 @@ -622,9 +622,9 @@ python demo/demo_audio.py ${CONFIG_FILE} ${CHECKPOINT_FILE} ${AUDIO_FILE} {LABEL 1. 在 GPU 上,使用 TSN 模型进行基于音频特征的动作识别。 - ```shell - python demo/demo_audio.py \ - configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py \ - https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth \ - audio_feature.npy label_map_k400.txt - ``` + ```shell + python demo/demo_audio.py \ + configs/recognition_audio/resnet/tsn_r18_64x1x1_100e_kinetics400_audio_feature.py \ + https://download.openmmlab.com/mmaction/recognition/audio_recognition/tsn_r18_64x1x1_100e_kinetics400_audio_feature/tsn_r18_64x1x1_100e_kinetics400_audio_feature_20201012-bf34df6c.pth \ + audio_feature.npy label_map_k400.txt + ``` diff --git a/docs_zh_CN/faq.md b/docs_zh_CN/faq.md index 2f328792f0..4c46302ecd 100644 --- a/docs_zh_CN/faq.md +++ b/docs_zh_CN/faq.md @@ -6,106 +6,107 @@ ## 安装 -- **"No module named 'mmcv.ops'"; "No module named 'mmcv._ext'"** +- **"No module named 'mmcv.ops'"; "No module named 'mmcv.\_ext'"** - 1. 使用 `pip uninstall mmcv` 卸载环境中已安装的 `mmcv`。 - 2. 遵循 [MMCV 安装文档](https://mmcv.readthedocs.io/en/latest/#installation) 来安装 `mmcv-full`。 + 1. 使用 `pip uninstall mmcv` 卸载环境中已安装的 `mmcv`。 + 2. 遵循 [MMCV 安装文档](https://mmcv.readthedocs.io/en/latest/#installation) 来安装 `mmcv-full`。 - **"OSError: MoviePy Error: creation of None failed because of the following error"** - 参照 [MMAction2 安装文档](https://github.com/open-mmlab/mmaction2/blob/master/docs_zh_CN/install.md#安装依赖包) - 1. 对于 Windows 用户,[ImageMagick](https://www.imagemagick.org/script/index.php) 不再被 MoviePy 自动检测, - 需要获取名为 `magick` 的 ImageMagick 二进制包的路径,来修改 `moviepy/config_defaults.py` 文件中的 `IMAGEMAGICK_BINARY`,如 `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` - 2. 对于 Linux 用户,如果 ImageMagick 没有被 moviepy 检测,需要注释掉 `/etc/ImageMagick-6/policy.xml` 文件中的 ``,即改为 ``。 + 参照 [MMAction2 安装文档](https://github.com/open-mmlab/mmaction2/blob/master/docs_zh_CN/install.md#%E5%AE%89%E8%A3%85%E4%BE%9D%E8%B5%96%E5%8C%85) + + 1. 对于 Windows 用户,[ImageMagick](https://www.imagemagick.org/script/index.php) 不再被 MoviePy 自动检测, + 需要获取名为 `magick` 的 ImageMagick 二进制包的路径,来修改 `moviepy/config_defaults.py` 文件中的 `IMAGEMAGICK_BINARY`,如 `IMAGEMAGICK_BINARY = "C:\\Program Files\\ImageMagick_VERSION\\magick.exe"` + 2. 对于 Linux 用户,如果 ImageMagick 没有被 moviepy 检测,需要注释掉 `/etc/ImageMagick-6/policy.xml` 文件中的 ``,即改为 ``。 - **"Please install XXCODEBASE to use XXX"** - 如得到报错消息 "Please install XXCODEBASE to use XXX",代表 MMAction2 无法从 XXCODEBASE 中 import XXX。用户可以执行对应 import 语句定位原因。 - 一个可能的原因是,对于部分 OpenMMLAB 中的代码库,需先安装 mmcv-full 后再进行安装。 + 如得到报错消息 "Please install XXCODEBASE to use XXX",代表 MMAction2 无法从 XXCODEBASE 中 import XXX。用户可以执行对应 import 语句定位原因。 + 一个可能的原因是,对于部分 OpenMMLAB 中的代码库,需先安装 mmcv-full 后再进行安装。 ## 数据 - **FileNotFound 如 `No such file or directory: xxx/xxx/img_00300.jpg`** - 在 MMAction2 中,对于帧数据集,`start_index` 的默认值为 1,而对于视频数据集, `start_index` 的默认值为 0。 - 如果 FileNotFound 错误发生于视频的第一帧或最后一帧,则需根据视频首帧(即 `xxx_00000.jpg` 或 `xxx_00001.jpg`)的偏移量,修改配置文件中数据处理流水线的 `start_index` 值。 + 在 MMAction2 中,对于帧数据集,`start_index` 的默认值为 1,而对于视频数据集, `start_index` 的默认值为 0。 + 如果 FileNotFound 错误发生于视频的第一帧或最后一帧,则需根据视频首帧(即 `xxx_00000.jpg` 或 `xxx_00001.jpg`)的偏移量,修改配置文件中数据处理流水线的 `start_index` 值。 - **如何处理数据集中传入视频的尺寸?是把所有视频调整为固定尺寸,如 “340x256”,还是把所有视频的短边调整成相同的长度(256像素或320像素)?** - 从基准测试来看,总体来说,后者(把所有视频的短边调整成相同的长度)效果更好,所以“调整尺寸为短边256像素”被设置为默认的数据处理方式。用户可以在 [TSN 数据基准测试](https://github.com/open-mmlab/mmaction2/tree/master/configs/recognition/tsn) 和 [SlowOnly 数据基准测试](https://github.com/open-mmlab/mmaction2/tree/master/configs/recognition/tsn) 中查看相关的基准测试结果。 + 从基准测试来看,总体来说,后者(把所有视频的短边调整成相同的长度)效果更好,所以“调整尺寸为短边256像素”被设置为默认的数据处理方式。用户可以在 [TSN 数据基准测试](https://github.com/open-mmlab/mmaction2/tree/master/configs/recognition/tsn) 和 [SlowOnly 数据基准测试](https://github.com/open-mmlab/mmaction2/tree/master/configs/recognition/tsn) 中查看相关的基准测试结果。 - **输入数据格式(视频或帧)与数据流水线不匹配,导致异常,如 `KeyError: 'total_frames'`** - 对于视频和帧,我们都有相应的流水线来处理。 + 对于视频和帧,我们都有相应的流水线来处理。 - **对于视频**,应该在处理时首先对其进行解码。可选的解码方式,有 `DecordInit & DecordDecode`, `OpenCVInit & OpenCVDecode`, `PyAVInit & PyAVDecode` 等等。可以参照 [这个例子](https://github.com/open-mmlab/mmaction2/blob/023777cfd26bb175f85d78c455f6869673e0aa09/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py#L47-L49)。 + **对于视频**,应该在处理时首先对其进行解码。可选的解码方式,有 `DecordInit & DecordDecode`, `OpenCVInit & OpenCVDecode`, `PyAVInit & PyAVDecode` 等等。可以参照 [这个例子](https://github.com/open-mmlab/mmaction2/blob/023777cfd26bb175f85d78c455f6869673e0aa09/configs/recognition/slowfast/slowfast_r50_video_4x16x1_256e_kinetics400_rgb.py#L47-L49)。 - **对于帧**,已经事先在本地对其解码,所以使用 `RawFrameDecode` 对帧处理即可。可以参照 [这个例子](https://github.com/open-mmlab/mmaction2/blob/023777cfd26bb175f85d78c455f6869673e0aa09/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py#L49)。 + **对于帧**,已经事先在本地对其解码,所以使用 `RawFrameDecode` 对帧处理即可。可以参照 [这个例子](https://github.com/open-mmlab/mmaction2/blob/023777cfd26bb175f85d78c455f6869673e0aa09/configs/recognition/slowfast/slowfast_r50_8x8x1_256e_kinetics400_rgb.py#L49)。 - `KeyError: 'total_frames'` 是因为错误地使用了 `RawFrameDecode` 来处理视频。当输入是视频的时候,程序是无法事先得到 `total_frame` 的。 + `KeyError: 'total_frames'` 是因为错误地使用了 `RawFrameDecode` 来处理视频。当输入是视频的时候,程序是无法事先得到 `total_frame` 的。 ## 训练 - **如何使用训练过的识别器作为主干网络的预训练模型?** - 参照 [使用预训练模型](https://github.com/open-mmlab/mmaction2/blob/master/docs_zh_CN/tutorials/2_finetune.md#使用预训练模型), - 如果想对整个网络使用预训练模型,可以在配置文件中,将 `load_from` 设置为预训练模型的链接。 + 参照 [使用预训练模型](https://github.com/open-mmlab/mmaction2/blob/master/docs_zh_CN/tutorials/2_finetune.md#%E4%BD%BF%E7%94%A8%E9%A2%84%E8%AE%AD%E7%BB%83%E6%A8%A1%E5%9E%8B), + 如果想对整个网络使用预训练模型,可以在配置文件中,将 `load_from` 设置为预训练模型的链接。 - 如果只想对主干网络使用预训练模型,可以在配置文件中,将主干网络 `backbone` 中的 `pretrained` 设置为预训练模型的地址或链接。 - 在训练时,预训练模型中无法与主干网络对应的参数会被忽略。 + 如果只想对主干网络使用预训练模型,可以在配置文件中,将主干网络 `backbone` 中的 `pretrained` 设置为预训练模型的地址或链接。 + 在训练时,预训练模型中无法与主干网络对应的参数会被忽略。 - **如何实时绘制训练集和验证集的准确率/损失函数曲线图?** - 使用 `log_config` 中的 `TensorboardLoggerHook`,如: + 使用 `log_config` 中的 `TensorboardLoggerHook`,如: - ```python - log_config=dict( - interval=20, - hooks=[ - dict(type='TensorboardLoggerHook') - ] - ) - ``` + ```python + log_config=dict( + interval=20, + hooks=[ + dict(type='TensorboardLoggerHook') + ] + ) + ``` - 可以参照 [教程1:如何编写配置文件](tutorials/1_config.md),[教程7:如何自定义模型运行参数](tutorials/7_customize_runtime.md#log-config),和 [这个例子](https://github.com/open-mmlab/mmaction2/blob/master/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py#L118) 了解更多相关内容。 + 可以参照 [教程1:如何编写配置文件](tutorials/1_config.md),[教程7:如何自定义模型运行参数](tutorials/7_customize_runtime.md#log-config),和 [这个例子](https://github.com/open-mmlab/mmaction2/blob/master/configs/recognition/tsm/tsm_r50_1x1x8_50e_kinetics400_rgb.py#L118) 了解更多相关内容。 - **在 batchnorm.py 中抛出错误: Expected more than 1 value per channel when training** - BatchNorm 层要求批大小(batch size)大于 1。构建数据集时, 若 `drop_last` 被设为 `False`,有时每个轮次的最后一个批次的批大小可能为 1,进而在训练时抛出错误,可以设置 `drop_last=True` 来避免该错误,如: + BatchNorm 层要求批大小(batch size)大于 1。构建数据集时, 若 `drop_last` 被设为 `False`,有时每个轮次的最后一个批次的批大小可能为 1,进而在训练时抛出错误,可以设置 `drop_last=True` 来避免该错误,如: - ```python - train_dataloader=dict(drop_last=True) - ``` + ```python + train_dataloader=dict(drop_last=True) + ``` - **微调模型参数时,如何冻结主干网络中的部分参数?** - 可以参照 [`def _freeze_stages()`](https://github.com/open-mmlab/mmaction2/blob/0149a0e8c1e0380955db61680c0006626fd008e9/mmaction/models/backbones/x3d.py#L458) 和 [`frozen_stages`](https://github.com/open-mmlab/mmaction2/blob/0149a0e8c1e0380955db61680c0006626fd008e9/mmaction/models/backbones/x3d.py#L183-L184)。在分布式训练和测试时,还须设置 `find_unused_parameters = True`。 + 可以参照 [`def _freeze_stages()`](https://github.com/open-mmlab/mmaction2/blob/0149a0e8c1e0380955db61680c0006626fd008e9/mmaction/models/backbones/x3d.py#L458) 和 [`frozen_stages`](https://github.com/open-mmlab/mmaction2/blob/0149a0e8c1e0380955db61680c0006626fd008e9/mmaction/models/backbones/x3d.py#L183-L184)。在分布式训练和测试时,还须设置 `find_unused_parameters = True`。 - 实际上,除了少数模型,如 C3D 等,用户都能通过设置 `frozen_stages` 来冻结模型参数,因为大多数主干网络继承自 `ResNet` 和 `ResNet3D`,而这两个模型都支持 `_freeze_stages()` 方法。 + 实际上,除了少数模型,如 C3D 等,用户都能通过设置 `frozen_stages` 来冻结模型参数,因为大多数主干网络继承自 `ResNet` 和 `ResNet3D`,而这两个模型都支持 `_freeze_stages()` 方法。 - **如何在配置文件中设置 `load_from` 参数以进行模型微调?** - MMAction2 在 `configs/_base_/default_runtime.py` 文件中将 `load_from=None` 设为默认。由于配置文件的可继承性,用户可直接在下游配置文件中设置 `load_from` 的值来进行更改。 + MMAction2 在 `configs/_base_/default_runtime.py` 文件中将 `load_from=None` 设为默认。由于配置文件的可继承性,用户可直接在下游配置文件中设置 `load_from` 的值来进行更改。 ## 测试 -- **如何将预测分值用 softmax 归一化到 [0, 1] 区间内?** +- **如何将预测分值用 softmax 归一化到 \[0, 1\] 区间内?** - 可以通过设置 `model['test_cfg'] = dict(average_clips='prob')` 来实现。 + 可以通过设置 `model['test_cfg'] = dict(average_clips='prob')` 来实现。 - **如果模型太大,连一个测试样例都没法放进显存,怎么办?** - 默认情况下,3D 模型是以 `10 clips x 3 crops` 的设置进行测试的,也即采样 10 个帧片段,每帧裁剪出 3 个图像块,总计有 30 个视图。 - 对于特别大的模型,GPU 显存可能连一个视频都放不下。对于这种情况,您可以在配置文件的 `model['test_cfg']` 中设置 `max_testing_views=n`。 - 如此设置,在模型推理过程中,一个批只会使用 n 个视图,以节省显存。 + 默认情况下,3D 模型是以 `10 clips x 3 crops` 的设置进行测试的,也即采样 10 个帧片段,每帧裁剪出 3 个图像块,总计有 30 个视图。 + 对于特别大的模型,GPU 显存可能连一个视频都放不下。对于这种情况,您可以在配置文件的 `model['test_cfg']` 中设置 `max_testing_views=n`。 + 如此设置,在模型推理过程中,一个批只会使用 n 个视图,以节省显存。 - **如何保存测试结果?** - 测试时,用户可在运行指令中设置可选项 `--out xxx.json/pkl/yaml` 来输出结果文件,以供后续检查。输出的测试结果顺序和测试集顺序保持一致。 - 除此之外,MMAction2 也在 [`tools/analysis/eval_metric.py`](/tools/analysis/eval_metric.py) 中提供了分析工具,用于结果文件的模型评估。 + 测试时,用户可在运行指令中设置可选项 `--out xxx.json/pkl/yaml` 来输出结果文件,以供后续检查。输出的测试结果顺序和测试集顺序保持一致。 + 除此之外,MMAction2 也在 [`tools/analysis/eval_metric.py`](/tools/analysis/eval_metric.py) 中提供了分析工具,用于结果文件的模型评估。 ## 部署 - **为什么由 MMAction2 转换的 ONNX 模型在转换到其他框架(如 TensorRT)时会抛出错误?** - 目前只能确保 MMAction2 中的模型与 ONNX 兼容。但是,ONNX 中的某些算子可能不受其他框架支持,例如 [这个问题](https://github.com/open-mmlab/mmaction2/issues/414) 中的 TensorRT。当这种情况发生时,如果 `pytorch2onnx.py` 没有出现问题,转换过去的 ONNX 模型也通过了数值检验,可以提 issue 让社区提供帮助。 + 目前只能确保 MMAction2 中的模型与 ONNX 兼容。但是,ONNX 中的某些算子可能不受其他框架支持,例如 [这个问题](https://github.com/open-mmlab/mmaction2/issues/414) 中的 TensorRT。当这种情况发生时,如果 `pytorch2onnx.py` 没有出现问题,转换过去的 ONNX 模型也通过了数值检验,可以提 issue 让社区提供帮助。 diff --git a/docs_zh_CN/feature_extraction.md b/docs_zh_CN/feature_extraction.md index 8eea87df24..62b76066bc 100644 --- a/docs_zh_CN/feature_extraction.md +++ b/docs_zh_CN/feature_extraction.md @@ -4,7 +4,7 @@ MMAction2 为特征提取提供了便捷使用的脚本。 ## 片段级特征提取 -片段级特征提取是从长度一般为几秒到几十秒不等的剪辑片段中提取深度特征。从每个片段中提取的特征是一个 n 维向量。当进行多视图特征提取时,例如 n 个片段 × m 种裁剪,提取的特征将会是 n*m 个视图的平均值。 +片段级特征提取是从长度一般为几秒到几十秒不等的剪辑片段中提取深度特征。从每个片段中提取的特征是一个 n 维向量。当进行多视图特征提取时,例如 n 个片段 × m 种裁剪,提取的特征将会是 n\*m 个视图的平均值。 在应用片段级特征提取之前,用户需要准备一个视频列表包含所有想要进行特征提取的视频。例如,由 UCF101 中视频组成的视频列表如下: diff --git a/docs_zh_CN/getting_started.md b/docs_zh_CN/getting_started.md index 6c53c74ac2..b1672f1300 100644 --- a/docs_zh_CN/getting_started.md +++ b/docs_zh_CN/getting_started.md @@ -4,22 +4,22 @@ -- [基础教程](#基础教程) - - [数据集](#数据集) - - [使用预训练模型进行推理](#使用预训练模型进行推理) - - [测试某个数据集](#测试某个数据集) - - [使用高级 API 对视频和帧文件夹进行测试](#使用高级-api-对视频和帧文件夹进行测试) - - [如何建立模型](#如何建立模型) - - [使用基本组件建立模型](#使用基本组件建立模型) - - [构建新模型](#构建新模型) - - [如何训练模型](#如何训练模型) - - [推理流水线](#推理流水线) - - [训练配置](#训练配置) - - [使用单个 GPU 进行训练](#使用单个-gpu-进行训练) - - [使用多个 GPU 进行训练](#使用多个-gpu-进行训练) - - [使用多台机器进行训练](#使用多台机器进行训练) - - [使用单台机器启动多个任务](#使用单台机器启动多个任务) - - [详细教程](#详细教程) +- [基础教程](#%E5%9F%BA%E7%A1%80%E6%95%99%E7%A8%8B) + - [数据集](#%E6%95%B0%E6%8D%AE%E9%9B%86) + - [使用预训练模型进行推理](#%E4%BD%BF%E7%94%A8%E9%A2%84%E8%AE%AD%E7%BB%83%E6%A8%A1%E5%9E%8B%E8%BF%9B%E8%A1%8C%E6%8E%A8%E7%90%86) + - [测试某个数据集](#%E6%B5%8B%E8%AF%95%E6%9F%90%E4%B8%AA%E6%95%B0%E6%8D%AE%E9%9B%86) + - [使用高级 API 对视频和帧文件夹进行测试](#%E4%BD%BF%E7%94%A8%E9%AB%98%E7%BA%A7-api-%E5%AF%B9%E8%A7%86%E9%A2%91%E5%92%8C%E5%B8%A7%E6%96%87%E4%BB%B6%E5%A4%B9%E8%BF%9B%E8%A1%8C%E6%B5%8B%E8%AF%95) + - [如何建立模型](#%E5%A6%82%E4%BD%95%E5%BB%BA%E7%AB%8B%E6%A8%A1%E5%9E%8B) + - [使用基本组件建立模型](#%E4%BD%BF%E7%94%A8%E5%9F%BA%E6%9C%AC%E7%BB%84%E4%BB%B6%E5%BB%BA%E7%AB%8B%E6%A8%A1%E5%9E%8B) + - [构建新模型](#%E6%9E%84%E5%BB%BA%E6%96%B0%E6%A8%A1%E5%9E%8B) + - [如何训练模型](#%E5%A6%82%E4%BD%95%E8%AE%AD%E7%BB%83%E6%A8%A1%E5%9E%8B) + - [推理流水线](#%E6%8E%A8%E7%90%86%E6%B5%81%E6%B0%B4%E7%BA%BF) + - [训练配置](#%E8%AE%AD%E7%BB%83%E9%85%8D%E7%BD%AE) + - [使用单个 GPU 进行训练](#%E4%BD%BF%E7%94%A8%E5%8D%95%E4%B8%AA-gpu-%E8%BF%9B%E8%A1%8C%E8%AE%AD%E7%BB%83) + - [使用多个 GPU 进行训练](#%E4%BD%BF%E7%94%A8%E5%A4%9A%E4%B8%AA-gpu-%E8%BF%9B%E8%A1%8C%E8%AE%AD%E7%BB%83) + - [使用多台机器进行训练](#%E4%BD%BF%E7%94%A8%E5%A4%9A%E5%8F%B0%E6%9C%BA%E5%99%A8%E8%BF%9B%E8%A1%8C%E8%AE%AD%E7%BB%83) + - [使用单台机器启动多个任务](#%E4%BD%BF%E7%94%A8%E5%8D%95%E5%8F%B0%E6%9C%BA%E5%99%A8%E5%90%AF%E5%8A%A8%E5%A4%9A%E4%B8%AA%E4%BB%BB%E5%8A%A1) + - [详细教程](#%E8%AF%A6%E7%BB%86%E6%95%99%E7%A8%8B) @@ -98,35 +98,35 @@ python tools/test.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [- 1. 在 Kinetics-400 数据集下测试 TSN (不存储测试结果为文件),并验证 `top-k accuracy` 和 `mean class accuracy` 指标 - ```shell - python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/SOME_CHECKPOINT.pth \ - --eval top_k_accuracy mean_class_accuracy - ``` + ```shell + python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth \ + --eval top_k_accuracy mean_class_accuracy + ``` 2. 使用 8 块 GPU 在 Something-Something V1 下测试 TSN,并验证 `top-k accuracy` 指标 - ```shell - ./tools/dist_test.sh configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py \ - checkpoints/SOME_CHECKPOINT.pth \ - 8 --out results.pkl --eval top_k_accuracy - ``` + ```shell + ./tools/dist_test.sh configs/recognition/tsn/tsn_r50_1x1x8_50e_sthv1_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth \ + 8 --out results.pkl --eval top_k_accuracy + ``` 3. 在 slurm 分布式环境中测试 TSN 在 Kinetics-400 数据集下的 `top-k accuracy` 指标 - ```shell - python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/SOME_CHECKPOINT.pth \ - --launcher slurm --eval top_k_accuracy - ``` + ```shell + python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.pth \ + --launcher slurm --eval top_k_accuracy + ``` 4. 在 Something-Something V1 下测试 onnx 格式的 TSN 模型,并验证 `top-k accuracy` 指标 - ```shell - python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ - checkpoints/SOME_CHECKPOINT.onnx \ - --eval top_k_accuracy --onnx - ``` + ```shell + python tools/test.py configs/recognition/tsn/tsn_r50_1x1x3_100e_kinetics400_rgb.py \ + checkpoints/SOME_CHECKPOINT.onnx \ + --eval top_k_accuracy --onnx + ``` ### 使用高级 API 对视频和帧文件夹进行测试 @@ -257,54 +257,54 @@ MMAction2 将模型组件分为 4 种基础模型: 1. 创建 `mmaction/models/backbones/resnet_tsm.py` 文件 - ```python - from ..builder import BACKBONES - from .resnet import ResNet + ```python + from ..builder import BACKBONES + from .resnet import ResNet - @BACKBONES.register_module() - class ResNetTSM(ResNet): + @BACKBONES.register_module() + class ResNetTSM(ResNet): - def __init__(self, - depth, - num_segments=8, - is_shift=True, - shift_div=8, - shift_place='blockres', - temporal_pool=False, - **kwargs): - pass + def __init__(self, + depth, + num_segments=8, + is_shift=True, + shift_div=8, + shift_place='blockres', + temporal_pool=False, + **kwargs): + pass - def forward(self, x): - # implementation is ignored - pass - ``` + def forward(self, x): + # implementation is ignored + pass + ``` 2. 从 `mmaction/models/backbones/__init__.py` 中导入模型 - ```python - from .resnet_tsm import ResNetTSM - ``` + ```python + from .resnet_tsm import ResNetTSM + ``` 3. 修改模型文件 - ```python - backbone=dict( - type='ResNet', - pretrained='torchvision://resnet50', - depth=50, - norm_eval=False) - ``` + ```python + backbone=dict( + type='ResNet', + pretrained='torchvision://resnet50', + depth=50, + norm_eval=False) + ``` 修改为 - ```python - backbone=dict( - type='ResNetTSM', - pretrained='torchvision://resnet50', - depth=50, - norm_eval=False, - shift_div=8) - ``` + ```python + backbone=dict( + type='ResNetTSM', + pretrained='torchvision://resnet50', + depth=50, + norm_eval=False, + shift_div=8) + ``` ### 构建新模型 diff --git a/docs_zh_CN/install.md b/docs_zh_CN/install.md index 69d88db831..14ee70e169 100644 --- a/docs_zh_CN/install.md +++ b/docs_zh_CN/install.md @@ -4,15 +4,15 @@ -- [安装](#安装) - - [安装依赖包](#安装依赖包) - - [准备环境](#准备环境) - - [MMAction2 的安装步骤](#mmaction2-的安装步骤) - - [CPU 环境下的安装步骤](#cpu-环境下的安装步骤) - - [利用 Docker 镜像安装 MMAction2](#利用-docker-镜像安装-mmaction2) - - [源码安装 MMAction2](#源码安装-mmaction2) - - [在多个 MMAction2 版本下进行开发](#在多个-mmaction2-版本下进行开发) - - [安装验证](#安装验证) +- [安装](#%E5%AE%89%E8%A3%85) + - [安装依赖包](#%E5%AE%89%E8%A3%85%E4%BE%9D%E8%B5%96%E5%8C%85) + - [准备环境](#%E5%87%86%E5%A4%87%E7%8E%AF%E5%A2%83) + - [MMAction2 的安装步骤](#mmaction2-%E7%9A%84%E5%AE%89%E8%A3%85%E6%AD%A5%E9%AA%A4) + - [CPU 环境下的安装步骤](#cpu-%E7%8E%AF%E5%A2%83%E4%B8%8B%E7%9A%84%E5%AE%89%E8%A3%85%E6%AD%A5%E9%AA%A4) + - [利用 Docker 镜像安装 MMAction2](#%E5%88%A9%E7%94%A8-docker-%E9%95%9C%E5%83%8F%E5%AE%89%E8%A3%85-mmaction2) + - [源码安装 MMAction2](#%E6%BA%90%E7%A0%81%E5%AE%89%E8%A3%85-mmaction2) + - [在多个 MMAction2 版本下进行开发](#%E5%9C%A8%E5%A4%9A%E4%B8%AA-mmaction2-%E7%89%88%E6%9C%AC%E4%B8%8B%E8%BF%9B%E8%A1%8C%E5%BC%80%E5%8F%91) + - [安装验证](#%E5%AE%89%E8%A3%85%E9%AA%8C%E8%AF%81) diff --git a/docs_zh_CN/supported_datasets.md b/docs_zh_CN/supported_datasets.md index b0c9b554f6..7cafa129dc 100644 --- a/docs_zh_CN/supported_datasets.md +++ b/docs_zh_CN/supported_datasets.md @@ -1,9 +1,10 @@ # 支持的数据集 - 支持的动作识别数据集: + - [UCF101](/tools/data/ucf101/README_zh-CN.md) \[ [主页](https://www.crcv.ucf.edu/research/data-sets/ucf101/) \]. - [HMDB51](/tools/data/hmdb51/README_zh-CN.md) \[ [主页](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) \]. - - [Kinetics-[400/600/700]](/tools/data/kinetics/README_zh-CN.md) \[ [主页](https://deepmind.com/research/open-source/kinetics) \] + - [Kinetics-\[400/600/700\]](/tools/data/kinetics/README_zh-CN.md) \[ [主页](https://deepmind.com/research/open-source/kinetics) \] - [Something-Something V1](/tools/data/sthv1/README_zh-CN.md) \[ [主页](https://20bn.com/datasets/something-something/v1) \] - [Something-Something V2](/tools/data/sthv2/README_zh-CN.md) \[ [主页](https://20bn.com/datasets/something-something) \] - [Moments in Time](/tools/data/mit/README_zh-CN.md) \[ [主页](http://moments.csail.mit.edu/) \] @@ -14,15 +15,18 @@ - [ActivityNet](/tools/data/activitynet/README_zh-CN.md) \[ [主页](http://activity-net.org/) \] - 支持的时序动作检测数据集: + - [ActivityNet](/tools/data/activitynet/README_zh-CN.md) \[ [主页](http://activity-net.org/) \] - [THUMOS14](/tools/data/thumos14/README_zh-CN.md) \[ [主页](https://www.crcv.ucf.edu/THUMOS14/download.html) \] - 支持的时空动作检测数据集: + - [AVA](/tools/data/ava/README_zh-CN.md) \[ [主页](https://research.google.com/ava/index.html) \] - [UCF101-24](/tools/data/ucf101_24/README_zh-CN.md) \[ [主页](http://www.thumos.info/download.html) \] - [JHMDB](/tools/data/jhmdb/README_zh-CN.md) \[ [主页](http://jhmdb.is.tue.mpg.de/) \] - 基于人体骨架的动作识别数据集: + - [PoseC3D Skeleton Dataset](/tools/data/skeleton/README.md) \[ [主页](https://kennymckormick.github.io/posec3d/) \] MMAction2 目前支持的数据集如上所列。 diff --git a/docs_zh_CN/tutorials/1_config.md b/docs_zh_CN/tutorials/1_config.md index 3d7d44294f..7c2f04abf5 100644 --- a/docs_zh_CN/tutorials/1_config.md +++ b/docs_zh_CN/tutorials/1_config.md @@ -6,14 +6,14 @@ MMAction2 提供的所有配置文件都放置在 `$MMAction2/configs` 文件夹 -- [通过命令行参数修改配置信息](#通过命令行参数修改配置信息) -- [配置文件结构](#配置文件结构) -- [配置文件命名规则](#配置文件命名规则) - - [时序动作检测的配置文件系统](#时序动作检测的配置文件系统) - - [动作识别的配置文件系统](#动作识别的配置文件系统) - - [时空动作检测的配置文件系统](#时空动作检测的配置文件系统) -- [常见问题](#常见问题) - - [配置文件中的中间变量](#配置文件中的中间变量) +- [通过命令行参数修改配置信息](#%E9%80%9A%E8%BF%87%E5%91%BD%E4%BB%A4%E8%A1%8C%E5%8F%82%E6%95%B0%E4%BF%AE%E6%94%B9%E9%85%8D%E7%BD%AE%E4%BF%A1%E6%81%AF) +- [配置文件结构](#%E9%85%8D%E7%BD%AE%E6%96%87%E4%BB%B6%E7%BB%93%E6%9E%84) +- [配置文件命名规则](#%E9%85%8D%E7%BD%AE%E6%96%87%E4%BB%B6%E5%91%BD%E5%90%8D%E8%A7%84%E5%88%99) + - [时序动作检测的配置文件系统](#%E6%97%B6%E5%BA%8F%E5%8A%A8%E4%BD%9C%E6%A3%80%E6%B5%8B%E7%9A%84%E9%85%8D%E7%BD%AE%E6%96%87%E4%BB%B6%E7%B3%BB%E7%BB%9F) + - [动作识别的配置文件系统](#%E5%8A%A8%E4%BD%9C%E8%AF%86%E5%88%AB%E7%9A%84%E9%85%8D%E7%BD%AE%E6%96%87%E4%BB%B6%E7%B3%BB%E7%BB%9F) + - [时空动作检测的配置文件系统](#%E6%97%B6%E7%A9%BA%E5%8A%A8%E4%BD%9C%E6%A3%80%E6%B5%8B%E7%9A%84%E9%85%8D%E7%BD%AE%E6%96%87%E4%BB%B6%E7%B3%BB%E7%BB%9F) +- [常见问题](#%E5%B8%B8%E8%A7%81%E9%97%AE%E9%A2%98) + - [配置文件中的中间变量](#%E9%85%8D%E7%BD%AE%E6%96%87%E4%BB%B6%E4%B8%AD%E7%9A%84%E4%B8%AD%E9%97%B4%E5%8F%98%E9%87%8F) @@ -35,7 +35,7 @@ MMAction2 提供的所有配置文件都放置在 `$MMAction2/configs` 文件夹 - 更新列表/元组的值。 当配置文件中需要更新的是一个列表或者元组,例如,配置文件通常会设置 `workflow=[('train', 1)]`,用户如果想更改, - 需要指定 `--cfg-options workflow="[(train,1),(val,1)]"`。注意这里的引号 \" 对于列表/元组数据类型的修改是必要的, + 需要指定 `--cfg-options workflow="[(train,1),(val,1)]"`。注意这里的引号 " 对于列表/元组数据类型的修改是必要的, 并且 **不允许** 引号内所指定的值的书写存在空格。 ## 配置文件结构 @@ -81,146 +81,146 @@ MMAction2 将模块化设计整合到配置文件系统中,以便于执行各 - 以 BMN 为例 - 为了帮助用户理解 MMAction2 的配置文件结构,以及时序动作检测系统中的一些模块,这里以 BMN 为例,给出其配置文件的注释。 - 对于每个模块的详细用法以及对应参数的选择,请参照 [API 文档](https://mmaction2.readthedocs.io/en/latest/api.html)。 - - ```python - # 模型设置 - model = dict( # 模型的配置 - type='BMN', # 时序动作检测器的类型 - temporal_dim=100, # 每个视频中所选择的帧数量 - boundary_ratio=0.5, # 视频边界的决策几率 - num_samples=32, # 每个候选的采样数 - num_samples_per_bin=3, # 每个样本的直方图采样数 - feat_dim=400, # 特征维度 - soft_nms_alpha=0.4, # soft-NMS 的 alpha 值 - soft_nms_low_threshold=0.5, # soft-NMS 的下界 - soft_nms_high_threshold=0.9, # soft-NMS 的上界 - post_process_top_k=100) # 后处理得到的最好的 K 个 proposal - # 模型训练和测试的设置 - train_cfg = None # 训练 BMN 的超参配置 - test_cfg = dict(average_clips='score') # 测试 BMN 的超参配置 - - # 数据集设置 - dataset_type = 'ActivityNetDataset' # 训练,验证,测试的数据集类型 - data_root = 'data/activitynet_feature_cuhk/csv_mean_100/' # 训练集的根目录 - data_root_val = 'data/activitynet_feature_cuhk/csv_mean_100/' # 验证集和测试集的根目录 - ann_file_train = 'data/ActivityNet/anet_anno_train.json' # 训练集的标注文件 - ann_file_val = 'data/ActivityNet/anet_anno_val.json' # 验证集的标注文件 - ann_file_test = 'data/ActivityNet/anet_anno_test.json' # 测试集的标注文件 - - train_pipeline = [ # 训练数据前处理流水线步骤组成的列表 - dict(type='LoadLocalizationFeature'), # 加载时序动作检测特征 - dict(type='GenerateLocalizationLabels'), # 生成时序动作检测标签 - dict( # Collect 类的配置 - type='Collect', # Collect 类决定哪些键会被传递到时序检测器中 - keys=['raw_feature', 'gt_bbox'], # 输入的键 - meta_name='video_meta', # 元名称 - meta_keys=['video_name']), # 输入的元键 - dict( # ToTensor 类的配置 - type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 - keys=['raw_feature']), # 将被从其他类型转化为 Tensor 类型的特征 - dict( # ToDataContainer 类的配置 - type='ToDataContainer', # 将一些信息转入到 ToDataContainer 中 - fields=[dict(key='gt_bbox', stack=False, cpu_only=True)]) # 携带额外键和属性的信息域 - ] - val_pipeline = [ # 验证数据前处理流水线步骤组成的列表 - dict(type='LoadLocalizationFeature'), # 加载时序动作检测特征 - dict(type='GenerateLocalizationLabels'), # 生成时序动作检测标签 - dict( # Collect 类的配置 - type='Collect', # Collect 类决定哪些键会被传递到时序检测器中 - keys=['raw_feature', 'gt_bbox'], # 输入的键 - meta_name='video_meta', # 元名称 - meta_keys=[ - 'video_name', 'duration_second', 'duration_frame', 'annotations', - 'feature_frame' - ]), # 输入的元键 - dict( # ToTensor 类的配置 - type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 - keys=['raw_feature']), # 将被从其他类型转化为 Tensor 类型的特征 - dict( # ToDataContainer 类的配置 - type='ToDataContainer', # 将一些信息转入到 ToDataContainer 中 - fields=[dict(key='gt_bbox', stack=False, cpu_only=True)]) # 携带额外键和属性的信息域 - ] - test_pipeline = [ # 测试数据前处理流水线步骤组成的列表 - dict(type='LoadLocalizationFeature'), # 加载时序动作检测特征 - dict( # Collect 类的配置 - type='Collect', # Collect 类决定哪些键会被传递到时序检测器中 - keys=['raw_feature'], # 输入的键 - meta_name='video_meta', # 元名称 - meta_keys=[ - 'video_name', 'duration_second', 'duration_frame', 'annotations', - 'feature_frame' - ]), # 输入的元键 - dict( # ToTensor 类的配置 - type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 - keys=['raw_feature']), # 将被从其他类型转化为 Tensor 类型的特征 - ] - data = dict( # 数据的配置 - videos_per_gpu=8, # 单个 GPU 的批大小 - workers_per_gpu=8, # 单个 GPU 的 dataloader 的进程 - train_dataloader=dict( # 训练过程 dataloader 的额外设置 - drop_last=True), # 在训练过程中是否丢弃最后一个批次 - val_dataloader=dict( # 验证过程 dataloader 的额外设置 - videos_per_gpu=1), # 单个 GPU 的批大小 - test_dataloader=dict( # 测试过程 dataloader 的额外设置 - videos_per_gpu=2), # 单个 GPU 的批大小 - test=dict( # 测试数据集的设置 - type=dataset_type, - ann_file=ann_file_test, - pipeline=test_pipeline, - data_prefix=data_root_val), - val=dict( # 验证数据集的设置 - type=dataset_type, - ann_file=ann_file_val, - pipeline=val_pipeline, - data_prefix=data_root_val), - train=dict( # 训练数据集的设置 - type=dataset_type, - ann_file=ann_file_train, - pipeline=train_pipeline, - data_prefix=data_root)) - - # 优化器设置 - optimizer = dict( - # 构建优化器的设置,支持: - # (1) 所有 PyTorch 原生的优化器,这些优化器的参数和 PyTorch 对应的一致; - # (2) 自定义的优化器,这些优化器在 `constructor` 的基础上构建。 - # 更多细节可参考 "tutorials/5_new_modules.md" 部分 - type='Adam', # 优化器类型, 参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 for more details - lr=0.001, # 学习率, 参数的细节使用可参考 PyTorch 的对应文档 - weight_decay=0.0001) # Adam 优化器的权重衰减 - optimizer_config = dict( # 用于构建优化器钩子的设置 - grad_clip=None) # 大部分的方法不使用梯度裁剪 - # 学习策略设置 - lr_config = dict( # 用于注册学习率调整钩子的设置 - policy='step', # 调整器策略, 支持 CosineAnnealing,Cyclic等方法。更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 - step=7) # 学习率衰减步长 - - total_epochs = 9 # 训练模型的总周期数 - checkpoint_config = dict( # 模型权重文件钩子设置,更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py - interval=1) # 模型权重文件保存间隔 - evaluation = dict( # 训练期间做验证的设置 - interval=1, # 执行验证的间隔 - metrics=['AR@AN']) # 验证方法 - log_config = dict( # 注册日志钩子的设置 - interval=50, # 打印日志间隔 - hooks=[ # 训练期间执行的钩子 - dict(type='TextLoggerHook'), # 记录训练过程信息的日志 - # dict(type='TensorboardLoggerHook'), # 同时支持 Tensorboard 日志 - ]) - - # 运行设置 - dist_params = dict(backend='nccl') # 建立分布式训练的设置(端口号,多 GPU 通信框架等) - log_level = 'INFO' # 日志等级 - work_dir = './work_dirs/bmn_400x100_2x8_9e_activitynet_feature/' # 记录当前实验日志和模型权重文件的文件夹 - load_from = None # 从给定路径加载模型作为预训练模型. 这个选项不会用于断点恢复训练 - resume_from = None # 加载给定路径的模型权重文件作为断点续连的模型, 训练将从该时间点保存的周期点继续进行 - workflow = [('train', 1)] # runner 的执行流. [('train', 1)] 代表只有一个执行流,并且这个名为 train 的执行流只执行一次 - output_config = dict( # 时序检测器输出设置 - out=f'{work_dir}/results.json', # 输出文件路径 - output_format='json') # 输出文件格式 - ``` + 为了帮助用户理解 MMAction2 的配置文件结构,以及时序动作检测系统中的一些模块,这里以 BMN 为例,给出其配置文件的注释。 + 对于每个模块的详细用法以及对应参数的选择,请参照 [API 文档](https://mmaction2.readthedocs.io/en/latest/api.html)。 + + ```python + # 模型设置 + model = dict( # 模型的配置 + type='BMN', # 时序动作检测器的类型 + temporal_dim=100, # 每个视频中所选择的帧数量 + boundary_ratio=0.5, # 视频边界的决策几率 + num_samples=32, # 每个候选的采样数 + num_samples_per_bin=3, # 每个样本的直方图采样数 + feat_dim=400, # 特征维度 + soft_nms_alpha=0.4, # soft-NMS 的 alpha 值 + soft_nms_low_threshold=0.5, # soft-NMS 的下界 + soft_nms_high_threshold=0.9, # soft-NMS 的上界 + post_process_top_k=100) # 后处理得到的最好的 K 个 proposal + # 模型训练和测试的设置 + train_cfg = None # 训练 BMN 的超参配置 + test_cfg = dict(average_clips='score') # 测试 BMN 的超参配置 + + # 数据集设置 + dataset_type = 'ActivityNetDataset' # 训练,验证,测试的数据集类型 + data_root = 'data/activitynet_feature_cuhk/csv_mean_100/' # 训练集的根目录 + data_root_val = 'data/activitynet_feature_cuhk/csv_mean_100/' # 验证集和测试集的根目录 + ann_file_train = 'data/ActivityNet/anet_anno_train.json' # 训练集的标注文件 + ann_file_val = 'data/ActivityNet/anet_anno_val.json' # 验证集的标注文件 + ann_file_test = 'data/ActivityNet/anet_anno_test.json' # 测试集的标注文件 + + train_pipeline = [ # 训练数据前处理流水线步骤组成的列表 + dict(type='LoadLocalizationFeature'), # 加载时序动作检测特征 + dict(type='GenerateLocalizationLabels'), # 生成时序动作检测标签 + dict( # Collect 类的配置 + type='Collect', # Collect 类决定哪些键会被传递到时序检测器中 + keys=['raw_feature', 'gt_bbox'], # 输入的键 + meta_name='video_meta', # 元名称 + meta_keys=['video_name']), # 输入的元键 + dict( # ToTensor 类的配置 + type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 + keys=['raw_feature']), # 将被从其他类型转化为 Tensor 类型的特征 + dict( # ToDataContainer 类的配置 + type='ToDataContainer', # 将一些信息转入到 ToDataContainer 中 + fields=[dict(key='gt_bbox', stack=False, cpu_only=True)]) # 携带额外键和属性的信息域 + ] + val_pipeline = [ # 验证数据前处理流水线步骤组成的列表 + dict(type='LoadLocalizationFeature'), # 加载时序动作检测特征 + dict(type='GenerateLocalizationLabels'), # 生成时序动作检测标签 + dict( # Collect 类的配置 + type='Collect', # Collect 类决定哪些键会被传递到时序检测器中 + keys=['raw_feature', 'gt_bbox'], # 输入的键 + meta_name='video_meta', # 元名称 + meta_keys=[ + 'video_name', 'duration_second', 'duration_frame', 'annotations', + 'feature_frame' + ]), # 输入的元键 + dict( # ToTensor 类的配置 + type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 + keys=['raw_feature']), # 将被从其他类型转化为 Tensor 类型的特征 + dict( # ToDataContainer 类的配置 + type='ToDataContainer', # 将一些信息转入到 ToDataContainer 中 + fields=[dict(key='gt_bbox', stack=False, cpu_only=True)]) # 携带额外键和属性的信息域 + ] + test_pipeline = [ # 测试数据前处理流水线步骤组成的列表 + dict(type='LoadLocalizationFeature'), # 加载时序动作检测特征 + dict( # Collect 类的配置 + type='Collect', # Collect 类决定哪些键会被传递到时序检测器中 + keys=['raw_feature'], # 输入的键 + meta_name='video_meta', # 元名称 + meta_keys=[ + 'video_name', 'duration_second', 'duration_frame', 'annotations', + 'feature_frame' + ]), # 输入的元键 + dict( # ToTensor 类的配置 + type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 + keys=['raw_feature']), # 将被从其他类型转化为 Tensor 类型的特征 + ] + data = dict( # 数据的配置 + videos_per_gpu=8, # 单个 GPU 的批大小 + workers_per_gpu=8, # 单个 GPU 的 dataloader 的进程 + train_dataloader=dict( # 训练过程 dataloader 的额外设置 + drop_last=True), # 在训练过程中是否丢弃最后一个批次 + val_dataloader=dict( # 验证过程 dataloader 的额外设置 + videos_per_gpu=1), # 单个 GPU 的批大小 + test_dataloader=dict( # 测试过程 dataloader 的额外设置 + videos_per_gpu=2), # 单个 GPU 的批大小 + test=dict( # 测试数据集的设置 + type=dataset_type, + ann_file=ann_file_test, + pipeline=test_pipeline, + data_prefix=data_root_val), + val=dict( # 验证数据集的设置 + type=dataset_type, + ann_file=ann_file_val, + pipeline=val_pipeline, + data_prefix=data_root_val), + train=dict( # 训练数据集的设置 + type=dataset_type, + ann_file=ann_file_train, + pipeline=train_pipeline, + data_prefix=data_root)) + + # 优化器设置 + optimizer = dict( + # 构建优化器的设置,支持: + # (1) 所有 PyTorch 原生的优化器,这些优化器的参数和 PyTorch 对应的一致; + # (2) 自定义的优化器,这些优化器在 `constructor` 的基础上构建。 + # 更多细节可参考 "tutorials/5_new_modules.md" 部分 + type='Adam', # 优化器类型, 参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 for more details + lr=0.001, # 学习率, 参数的细节使用可参考 PyTorch 的对应文档 + weight_decay=0.0001) # Adam 优化器的权重衰减 + optimizer_config = dict( # 用于构建优化器钩子的设置 + grad_clip=None) # 大部分的方法不使用梯度裁剪 + # 学习策略设置 + lr_config = dict( # 用于注册学习率调整钩子的设置 + policy='step', # 调整器策略, 支持 CosineAnnealing,Cyclic等方法。更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 + step=7) # 学习率衰减步长 + + total_epochs = 9 # 训练模型的总周期数 + checkpoint_config = dict( # 模型权重文件钩子设置,更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py + interval=1) # 模型权重文件保存间隔 + evaluation = dict( # 训练期间做验证的设置 + interval=1, # 执行验证的间隔 + metrics=['AR@AN']) # 验证方法 + log_config = dict( # 注册日志钩子的设置 + interval=50, # 打印日志间隔 + hooks=[ # 训练期间执行的钩子 + dict(type='TextLoggerHook'), # 记录训练过程信息的日志 + # dict(type='TensorboardLoggerHook'), # 同时支持 Tensorboard 日志 + ]) + + # 运行设置 + dist_params = dict(backend='nccl') # 建立分布式训练的设置(端口号,多 GPU 通信框架等) + log_level = 'INFO' # 日志等级 + work_dir = './work_dirs/bmn_400x100_2x8_9e_activitynet_feature/' # 记录当前实验日志和模型权重文件的文件夹 + load_from = None # 从给定路径加载模型作为预训练模型. 这个选项不会用于断点恢复训练 + resume_from = None # 加载给定路径的模型权重文件作为断点续连的模型, 训练将从该时间点保存的周期点继续进行 + workflow = [('train', 1)] # runner 的执行流. [('train', 1)] 代表只有一个执行流,并且这个名为 train 的执行流只执行一次 + output_config = dict( # 时序检测器输出设置 + out=f'{work_dir}/results.json', # 输出文件路径 + output_format='json') # 输出文件格式 + ``` ### 动作识别的配置文件系统 @@ -228,207 +228,207 @@ MMAction2 将模块化设计整合到配置文件系统中,以便执行各类 - 以 TSN 为例 - 为了帮助用户理解 MMAction2 的配置文件结构,以及动作识别系统中的一些模块,这里以 TSN 为例,给出其配置文件的注释。 - 对于每个模块的详细用法以及对应参数的选择,请参照 [API 文档](https://mmaction2.readthedocs.io/en/latest/api.html)。 - - ```python - # 模型设置 - model = dict( # 模型的配置 - type='Recognizer2D', # 动作识别器的类型 - backbone=dict( # Backbone 字典设置 - type='ResNet', # Backbone 名 - pretrained='torchvision://resnet50', # 预训练模型的 url 或文件位置 - depth=50, # ResNet 模型深度 - norm_eval=False), # 训练时是否设置 BN 层为验证模式 - cls_head=dict( # 分类器字典设置 - type='TSNHead', # 分类器名 - num_classes=400, # 分类类别数量 - in_channels=2048, # 分类器里输入通道数 - spatial_type='avg', # 空间维度的池化种类 - consensus=dict(type='AvgConsensus', dim=1), # consensus 模块设置 - dropout_ratio=0.4, # dropout 层概率 - init_std=0.01), # 线性层初始化 std 值 - # 模型训练和测试的设置 - train_cfg=None, # 训练 TSN 的超参配置 - test_cfg=dict(average_clips=None)) # 测试 TSN 的超参配置 - - # 数据集设置 - dataset_type = 'RawframeDataset' # 训练,验证,测试的数据集类型 - data_root = 'data/kinetics400/rawframes_train/' # 训练集的根目录 - data_root_val = 'data/kinetics400/rawframes_val/' # 验证集,测试集的根目录 - ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' # 训练集的标注文件 - ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' # 验证集的标注文件 - ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' # 测试集的标注文件 - img_norm_cfg = dict( # 图像正则化参数设置 - mean=[123.675, 116.28, 103.53], # 图像正则化平均值 - std=[58.395, 57.12, 57.375], # 图像正则化方差 - to_bgr=False) # 是否将通道数从 RGB 转为 BGR - - train_pipeline = [ # 训练数据前处理流水线步骤组成的列表 - dict( # SampleFrames 类的配置 - type='SampleFrames', # 选定采样哪些视频帧 - clip_len=1, # 每个输出视频片段的帧 - frame_interval=1, # 所采相邻帧的时序间隔 - num_clips=3), # 所采帧片段的数量 - dict( # RawFrameDecode 类的配置 - type='RawFrameDecode'), # 给定帧序列,加载对应帧,解码对应帧 - dict( # Resize 类的配置 - type='Resize', # 调整图片尺寸 - scale=(-1, 256)), # 调整比例 - dict( # MultiScaleCrop 类的配置 - type='MultiScaleCrop', # 多尺寸裁剪,随机从一系列给定尺寸中选择一个比例尺寸进行裁剪 - input_size=224, # 网络输入 - scales=(1, 0.875, 0.75, 0.66), # 长宽比例选择范围 - random_crop=False, # 是否进行随机裁剪 - max_wh_scale_gap=1), # 长宽最大比例间隔 - dict( # Resize 类的配置 - type='Resize', # 调整图片尺寸 - scale=(224, 224), # 调整比例 - keep_ratio=False), # 是否保持长宽比 - dict( # Flip 类的配置 - type='Flip', # 图片翻转 - flip_ratio=0.5), # 执行翻转几率 - dict( # Normalize 类的配置 - type='Normalize', # 图片正则化 - **img_norm_cfg), # 图片正则化参数 - dict( # FormatShape 类的配置 - type='FormatShape', # 将图片格式转变为给定的输入格式 - input_format='NCHW'), # 最终的图片组成格式 - dict( # Collect 类的配置 - type='Collect', # Collect 类决定哪些键会被传递到行为识别器中 - keys=['imgs', 'label'], # 输入的键 - meta_keys=[]), # 输入的元键 - dict( # ToTensor 类的配置 - type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 - keys=['imgs', 'label']) # 将被从其他类型转化为 Tensor 类型的特征 - ] - val_pipeline = [ # 验证数据前处理流水线步骤组成的列表 - dict( # SampleFrames 类的配置 - type='SampleFrames', # 选定采样哪些视频帧 - clip_len=1, # 每个输出视频片段的帧 - frame_interval=1, # 所采相邻帧的时序间隔 - num_clips=3, # 所采帧片段的数量 - test_mode=True), # 是否设置为测试模式采帧 - dict( # RawFrameDecode 类的配置 - type='RawFrameDecode'), # 给定帧序列,加载对应帧,解码对应帧 - dict( # Resize 类的配置 - type='Resize', # 调整图片尺寸 - scale=(-1, 256)), # 调整比例 - dict( # CenterCrop 类的配置 - type='CenterCrop', # 中心裁剪 - crop_size=224), # 裁剪部分的尺寸 - dict( # Flip 类的配置 - type='Flip', # 图片翻转 - flip_ratio=0), # 翻转几率 - dict( # Normalize 类的配置 - type='Normalize', # 图片正则化 - **img_norm_cfg), # 图片正则化参数 - dict( # FormatShape 类的配置 - type='FormatShape', # 将图片格式转变为给定的输入格式 - input_format='NCHW'), # 最终的图片组成格式 - dict( # Collect 类的配置 - type='Collect', # Collect 类决定哪些键会被传递到行为识别器中 - keys=['imgs', 'label'], # 输入的键 - meta_keys=[]), # 输入的元键 - dict( # ToTensor 类的配置 - type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 - keys=['imgs']) # 将被从其他类型转化为 Tensor 类型的特征 - ] - test_pipeline = [ # 测试数据前处理流水线步骤组成的列表 - dict( # SampleFrames 类的配置 - type='SampleFrames', # 选定采样哪些视频帧 - clip_len=1, # 每个输出视频片段的帧 - frame_interval=1, # 所采相邻帧的时序间隔 - num_clips=25, # 所采帧片段的数量 - test_mode=True), # 是否设置为测试模式采帧 - dict( # RawFrameDecode 类的配置 - type='RawFrameDecode'), # 给定帧序列,加载对应帧,解码对应帧 - dict( # Resize 类的配置 - type='Resize', # 调整图片尺寸 - scale=(-1, 256)), # 调整比例 - dict( # TenCrop 类的配置 - type='TenCrop', # 裁剪 10 个区域 - crop_size=224), # 裁剪部分的尺寸 - dict( # Flip 类的配置 - type='Flip', # 图片翻转 - flip_ratio=0), # 执行翻转几率 - dict( # Normalize 类的配置 - type='Normalize', # 图片正则化 - **img_norm_cfg), # 图片正则化参数 - dict( # FormatShape 类的配置 - type='FormatShape', # 将图片格式转变为给定的输入格式 - input_format='NCHW'), # 最终的图片组成格式 - dict( # Collect 类的配置 - type='Collect', # Collect 类决定哪些键会被传递到行为识别器中 - keys=['imgs', 'label'], # 输入的键 - meta_keys=[]), # 输入的元键 - dict( # ToTensor 类的配置 - type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 - keys=['imgs']) # 将被从其他类型转化为 Tensor 类型的特征 - ] - data = dict( # 数据的配置 - videos_per_gpu=32, # 单个 GPU 的批大小 - workers_per_gpu=2, # 单个 GPU 的 dataloader 的进程 - train_dataloader=dict( # 训练过程 dataloader 的额外设置 - drop_last=True), # 在训练过程中是否丢弃最后一个批次 - val_dataloader=dict( # 验证过程 dataloader 的额外设置 - videos_per_gpu=1), # 单个 GPU 的批大小 - test_dataloader=dict( # 测试过程 dataloader 的额外设置 - videos_per_gpu=2), # 单个 GPU 的批大小 - train=dict( # 训练数据集的设置 - type=dataset_type, - ann_file=ann_file_train, - data_prefix=data_root, - pipeline=train_pipeline), - val=dict( # 验证数据集的设置 - type=dataset_type, - ann_file=ann_file_val, - data_prefix=data_root_val, - pipeline=val_pipeline), - test=dict( # 测试数据集的设置 - type=dataset_type, - ann_file=ann_file_test, - data_prefix=data_root_val, - pipeline=test_pipeline)) - # 优化器设置 - optimizer = dict( - # 构建优化器的设置,支持: - # (1) 所有 PyTorch 原生的优化器,这些优化器的参数和 PyTorch 对应的一致; - # (2) 自定义的优化器,这些优化器在 `constructor` 的基础上构建。 - # 更多细节可参考 "tutorials/5_new_modules.md" 部分 - type='SGD', # 优化器类型, 参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 - lr=0.01, # 学习率, 参数的细节使用可参考 PyTorch 的对应文档 - momentum=0.9, # 动量大小 - weight_decay=0.0001) # SGD 优化器权重衰减 - optimizer_config = dict( # 用于构建优化器钩子的设置 - grad_clip=dict(max_norm=40, norm_type=2)) # 使用梯度裁剪 - # 学习策略设置 - lr_config = dict( # 用于注册学习率调整钩子的设置 - policy='step', # 调整器策略, 支持 CosineAnnealing,Cyclic等方法。更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 - step=[40, 80]) # 学习率衰减步长 - total_epochs = 100 # 训练模型的总周期数 - checkpoint_config = dict( # 模型权重钩子设置,更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py - interval=5) # 模型权重文件保存间隔 - evaluation = dict( # 训练期间做验证的设置 - interval=5, # 执行验证的间隔 - metrics=['top_k_accuracy', 'mean_class_accuracy'], # 验证方法 - save_best='top_k_accuracy') # 设置 `top_k_accuracy` 作为指示器,用于存储最好的模型权重文件 - log_config = dict( # 注册日志钩子的设置 - interval=20, # 打印日志间隔 - hooks=[ # 训练期间执行的钩子 - dict(type='TextLoggerHook'), # 记录训练过程信息的日志 - # dict(type='TensorboardLoggerHook'), # 同时支持 Tensorboard 日志 - ]) - - # 运行设置 - dist_params = dict(backend='nccl') # 建立分布式训练的设置,其中端口号也可以设置 - log_level = 'INFO' # 日志等级 - work_dir = './work_dirs/tsn_r50_1x1x3_100e_kinetics400_rgb/' # 记录当前实验日志和模型权重文件的文件夹 - load_from = None # 从给定路径加载模型作为预训练模型. 这个选项不会用于断点恢复训练 - resume_from = None # 加载给定路径的模型权重文件作为断点续连的模型, 训练将从该时间点保存的周期点继续进行 - workflow = [('train', 1)] # runner 的执行流. [('train', 1)] 代表只有一个执行流,并且这个名为 train 的执行流只执行一次 - - ``` + 为了帮助用户理解 MMAction2 的配置文件结构,以及动作识别系统中的一些模块,这里以 TSN 为例,给出其配置文件的注释。 + 对于每个模块的详细用法以及对应参数的选择,请参照 [API 文档](https://mmaction2.readthedocs.io/en/latest/api.html)。 + + ```python + # 模型设置 + model = dict( # 模型的配置 + type='Recognizer2D', # 动作识别器的类型 + backbone=dict( # Backbone 字典设置 + type='ResNet', # Backbone 名 + pretrained='torchvision://resnet50', # 预训练模型的 url 或文件位置 + depth=50, # ResNet 模型深度 + norm_eval=False), # 训练时是否设置 BN 层为验证模式 + cls_head=dict( # 分类器字典设置 + type='TSNHead', # 分类器名 + num_classes=400, # 分类类别数量 + in_channels=2048, # 分类器里输入通道数 + spatial_type='avg', # 空间维度的池化种类 + consensus=dict(type='AvgConsensus', dim=1), # consensus 模块设置 + dropout_ratio=0.4, # dropout 层概率 + init_std=0.01), # 线性层初始化 std 值 + # 模型训练和测试的设置 + train_cfg=None, # 训练 TSN 的超参配置 + test_cfg=dict(average_clips=None)) # 测试 TSN 的超参配置 + + # 数据集设置 + dataset_type = 'RawframeDataset' # 训练,验证,测试的数据集类型 + data_root = 'data/kinetics400/rawframes_train/' # 训练集的根目录 + data_root_val = 'data/kinetics400/rawframes_val/' # 验证集,测试集的根目录 + ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' # 训练集的标注文件 + ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' # 验证集的标注文件 + ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' # 测试集的标注文件 + img_norm_cfg = dict( # 图像正则化参数设置 + mean=[123.675, 116.28, 103.53], # 图像正则化平均值 + std=[58.395, 57.12, 57.375], # 图像正则化方差 + to_bgr=False) # 是否将通道数从 RGB 转为 BGR + + train_pipeline = [ # 训练数据前处理流水线步骤组成的列表 + dict( # SampleFrames 类的配置 + type='SampleFrames', # 选定采样哪些视频帧 + clip_len=1, # 每个输出视频片段的帧 + frame_interval=1, # 所采相邻帧的时序间隔 + num_clips=3), # 所采帧片段的数量 + dict( # RawFrameDecode 类的配置 + type='RawFrameDecode'), # 给定帧序列,加载对应帧,解码对应帧 + dict( # Resize 类的配置 + type='Resize', # 调整图片尺寸 + scale=(-1, 256)), # 调整比例 + dict( # MultiScaleCrop 类的配置 + type='MultiScaleCrop', # 多尺寸裁剪,随机从一系列给定尺寸中选择一个比例尺寸进行裁剪 + input_size=224, # 网络输入 + scales=(1, 0.875, 0.75, 0.66), # 长宽比例选择范围 + random_crop=False, # 是否进行随机裁剪 + max_wh_scale_gap=1), # 长宽最大比例间隔 + dict( # Resize 类的配置 + type='Resize', # 调整图片尺寸 + scale=(224, 224), # 调整比例 + keep_ratio=False), # 是否保持长宽比 + dict( # Flip 类的配置 + type='Flip', # 图片翻转 + flip_ratio=0.5), # 执行翻转几率 + dict( # Normalize 类的配置 + type='Normalize', # 图片正则化 + **img_norm_cfg), # 图片正则化参数 + dict( # FormatShape 类的配置 + type='FormatShape', # 将图片格式转变为给定的输入格式 + input_format='NCHW'), # 最终的图片组成格式 + dict( # Collect 类的配置 + type='Collect', # Collect 类决定哪些键会被传递到行为识别器中 + keys=['imgs', 'label'], # 输入的键 + meta_keys=[]), # 输入的元键 + dict( # ToTensor 类的配置 + type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 + keys=['imgs', 'label']) # 将被从其他类型转化为 Tensor 类型的特征 + ] + val_pipeline = [ # 验证数据前处理流水线步骤组成的列表 + dict( # SampleFrames 类的配置 + type='SampleFrames', # 选定采样哪些视频帧 + clip_len=1, # 每个输出视频片段的帧 + frame_interval=1, # 所采相邻帧的时序间隔 + num_clips=3, # 所采帧片段的数量 + test_mode=True), # 是否设置为测试模式采帧 + dict( # RawFrameDecode 类的配置 + type='RawFrameDecode'), # 给定帧序列,加载对应帧,解码对应帧 + dict( # Resize 类的配置 + type='Resize', # 调整图片尺寸 + scale=(-1, 256)), # 调整比例 + dict( # CenterCrop 类的配置 + type='CenterCrop', # 中心裁剪 + crop_size=224), # 裁剪部分的尺寸 + dict( # Flip 类的配置 + type='Flip', # 图片翻转 + flip_ratio=0), # 翻转几率 + dict( # Normalize 类的配置 + type='Normalize', # 图片正则化 + **img_norm_cfg), # 图片正则化参数 + dict( # FormatShape 类的配置 + type='FormatShape', # 将图片格式转变为给定的输入格式 + input_format='NCHW'), # 最终的图片组成格式 + dict( # Collect 类的配置 + type='Collect', # Collect 类决定哪些键会被传递到行为识别器中 + keys=['imgs', 'label'], # 输入的键 + meta_keys=[]), # 输入的元键 + dict( # ToTensor 类的配置 + type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 + keys=['imgs']) # 将被从其他类型转化为 Tensor 类型的特征 + ] + test_pipeline = [ # 测试数据前处理流水线步骤组成的列表 + dict( # SampleFrames 类的配置 + type='SampleFrames', # 选定采样哪些视频帧 + clip_len=1, # 每个输出视频片段的帧 + frame_interval=1, # 所采相邻帧的时序间隔 + num_clips=25, # 所采帧片段的数量 + test_mode=True), # 是否设置为测试模式采帧 + dict( # RawFrameDecode 类的配置 + type='RawFrameDecode'), # 给定帧序列,加载对应帧,解码对应帧 + dict( # Resize 类的配置 + type='Resize', # 调整图片尺寸 + scale=(-1, 256)), # 调整比例 + dict( # TenCrop 类的配置 + type='TenCrop', # 裁剪 10 个区域 + crop_size=224), # 裁剪部分的尺寸 + dict( # Flip 类的配置 + type='Flip', # 图片翻转 + flip_ratio=0), # 执行翻转几率 + dict( # Normalize 类的配置 + type='Normalize', # 图片正则化 + **img_norm_cfg), # 图片正则化参数 + dict( # FormatShape 类的配置 + type='FormatShape', # 将图片格式转变为给定的输入格式 + input_format='NCHW'), # 最终的图片组成格式 + dict( # Collect 类的配置 + type='Collect', # Collect 类决定哪些键会被传递到行为识别器中 + keys=['imgs', 'label'], # 输入的键 + meta_keys=[]), # 输入的元键 + dict( # ToTensor 类的配置 + type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 + keys=['imgs']) # 将被从其他类型转化为 Tensor 类型的特征 + ] + data = dict( # 数据的配置 + videos_per_gpu=32, # 单个 GPU 的批大小 + workers_per_gpu=2, # 单个 GPU 的 dataloader 的进程 + train_dataloader=dict( # 训练过程 dataloader 的额外设置 + drop_last=True), # 在训练过程中是否丢弃最后一个批次 + val_dataloader=dict( # 验证过程 dataloader 的额外设置 + videos_per_gpu=1), # 单个 GPU 的批大小 + test_dataloader=dict( # 测试过程 dataloader 的额外设置 + videos_per_gpu=2), # 单个 GPU 的批大小 + train=dict( # 训练数据集的设置 + type=dataset_type, + ann_file=ann_file_train, + data_prefix=data_root, + pipeline=train_pipeline), + val=dict( # 验证数据集的设置 + type=dataset_type, + ann_file=ann_file_val, + data_prefix=data_root_val, + pipeline=val_pipeline), + test=dict( # 测试数据集的设置 + type=dataset_type, + ann_file=ann_file_test, + data_prefix=data_root_val, + pipeline=test_pipeline)) + # 优化器设置 + optimizer = dict( + # 构建优化器的设置,支持: + # (1) 所有 PyTorch 原生的优化器,这些优化器的参数和 PyTorch 对应的一致; + # (2) 自定义的优化器,这些优化器在 `constructor` 的基础上构建。 + # 更多细节可参考 "tutorials/5_new_modules.md" 部分 + type='SGD', # 优化器类型, 参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 + lr=0.01, # 学习率, 参数的细节使用可参考 PyTorch 的对应文档 + momentum=0.9, # 动量大小 + weight_decay=0.0001) # SGD 优化器权重衰减 + optimizer_config = dict( # 用于构建优化器钩子的设置 + grad_clip=dict(max_norm=40, norm_type=2)) # 使用梯度裁剪 + # 学习策略设置 + lr_config = dict( # 用于注册学习率调整钩子的设置 + policy='step', # 调整器策略, 支持 CosineAnnealing,Cyclic等方法。更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 + step=[40, 80]) # 学习率衰减步长 + total_epochs = 100 # 训练模型的总周期数 + checkpoint_config = dict( # 模型权重钩子设置,更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py + interval=5) # 模型权重文件保存间隔 + evaluation = dict( # 训练期间做验证的设置 + interval=5, # 执行验证的间隔 + metrics=['top_k_accuracy', 'mean_class_accuracy'], # 验证方法 + save_best='top_k_accuracy') # 设置 `top_k_accuracy` 作为指示器,用于存储最好的模型权重文件 + log_config = dict( # 注册日志钩子的设置 + interval=20, # 打印日志间隔 + hooks=[ # 训练期间执行的钩子 + dict(type='TextLoggerHook'), # 记录训练过程信息的日志 + # dict(type='TensorboardLoggerHook'), # 同时支持 Tensorboard 日志 + ]) + + # 运行设置 + dist_params = dict(backend='nccl') # 建立分布式训练的设置,其中端口号也可以设置 + log_level = 'INFO' # 日志等级 + work_dir = './work_dirs/tsn_r50_1x1x3_100e_kinetics400_rgb/' # 记录当前实验日志和模型权重文件的文件夹 + load_from = None # 从给定路径加载模型作为预训练模型. 这个选项不会用于断点恢复训练 + resume_from = None # 加载给定路径的模型权重文件作为断点续连的模型, 训练将从该时间点保存的周期点继续进行 + workflow = [('train', 1)] # runner 的执行流. [('train', 1)] 代表只有一个执行流,并且这个名为 train 的执行流只执行一次 + + ``` ### 时空动作检测的配置文件系统 @@ -436,224 +436,224 @@ MMAction2 将模块化设计整合到配置文件系统中,以便于执行各 - 以 FastRCNN 为例 - 为了帮助用户理解 MMAction2 的完整配置文件结构,以及时空检测系统中的一些模块,这里以 FastRCNN 为例,给出其配置文件的注释。 - 对于每个模块的详细用法以及对应参数的选择,请参照 [API 文档](https://mmaction2.readthedocs.io/en/latest/api.html)。 - - ```python - # 模型设置 - model = dict( # 模型的配置 - type='FastRCNN', # 时空检测器类型 - backbone=dict( # Backbone 字典设置 - type='ResNet3dSlowOnly', # Backbone 名 - depth=50, # ResNet 模型深度 - pretrained=None, # 预训练模型的 url 或文件位置 - pretrained2d=False, # 预训练模型是否为 2D 模型 - lateral=False, # backbone 是否有侧连接 - num_stages=4, # ResNet 模型阶数 - conv1_kernel=(1, 7, 7), # Conv1 卷积核尺寸 - conv1_stride_t=1, # Conv1 时序步长 - pool1_stride_t=1, # Pool1 时序步长 - spatial_strides=(1, 2, 2, 1)), # 每个 ResNet 阶的空间步长 - roi_head=dict( # roi_head 字典设置 - type='AVARoIHead', # roi_head 名 - bbox_roi_extractor=dict( # bbox_roi_extractor 字典设置 - type='SingleRoIExtractor3D', # bbox_roi_extractor 名 - roi_layer_type='RoIAlign', # RoI op 类型 - output_size=8, # RoI op 输出特征尺寸 - with_temporal_pool=True), # 时序维度是否要经过池化 - bbox_head=dict( # bbox_head 字典设置 - type='BBoxHeadAVA', # bbox_head 名 - in_channels=2048, # 输入特征通道数 - num_classes=81, # 动作类别数 + 1(背景) - multilabel=True, # 数据集是否多标签 - dropout_ratio=0.5)), # dropout 比率 - # 模型训练和测试的设置 - train_cfg=dict( # 训练 FastRCNN 的超参配置 - rcnn=dict( # rcnn 训练字典设置 - assigner=dict( # assigner 字典设置 - type='MaxIoUAssignerAVA', # assigner 名 - pos_iou_thr=0.9, # 正样本 IoU 阈值, > pos_iou_thr -> positive - neg_iou_thr=0.9, # 负样本 IoU 阈值, < neg_iou_thr -> negative - min_pos_iou=0.9), # 正样本最小可接受 IoU - sampler=dict( # sample 字典设置 - type='RandomSampler', # sampler 名 - num=32, # sampler 批大小 - pos_fraction=1, # sampler 正样本边界框比率 - neg_pos_ub=-1, # 负样本数转正样本数的比率上界 - add_gt_as_proposals=True), # 是否添加 ground truth 为候选 - pos_weight=1.0, # 正样本 loss 权重 - debug=False)), # 是否为 debug 模式 - test_cfg=dict( # 测试 FastRCNN 的超参设置 - rcnn=dict( # rcnn 测试字典设置 - action_thr=0.002))) # 某行为的阈值 - - # 数据集设置 - dataset_type = 'AVADataset' # 训练,验证,测试的数据集类型 - data_root = 'data/ava/rawframes' # 训练集的根目录 - anno_root = 'data/ava/annotations' # 标注文件目录 - - ann_file_train = f'{anno_root}/ava_train_v2.1.csv' # 训练集的标注文件 - ann_file_val = f'{anno_root}/ava_val_v2.1.csv' # 验证集的标注文件 - - exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.1.csv' # 训练除外数据集文件路径 - exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.1.csv' # 验证除外数据集文件路径 - - label_file = f'{anno_root}/ava_action_list_v2.1_for_activitynet_2018.pbtxt' # 标签文件路径 - - proposal_file_train = f'{anno_root}/ava_dense_proposals_train.FAIR.recall_93.9.pkl' # 训练样本检测候选框的文件路径 - proposal_file_val = f'{anno_root}/ava_dense_proposals_val.FAIR.recall_93.9.pkl' # 验证样本检测候选框的文件路径 - - img_norm_cfg = dict( # 图像正则化参数设置 - mean=[123.675, 116.28, 103.53], # 图像正则化平均值 - std=[58.395, 57.12, 57.375], # 图像正则化方差 - to_bgr=False) # 是否将通道数从 RGB 转为 BGR - - train_pipeline = [ # 训练数据前处理流水线步骤组成的列表 - dict( # SampleFrames 类的配置 - type='AVASampleFrames', # 选定采样哪些视频帧 - clip_len=4, # 每个输出视频片段的帧 - frame_interval=16), # 所采相邻帧的时序间隔 - dict( # RawFrameDecode 类的配置 - type='RawFrameDecode'), # 给定帧序列,加载对应帧,解码对应帧 - dict( # RandomRescale 类的配置 - type='RandomRescale', # 给定一个范围,进行随机短边缩放 - scale_range=(256, 320)), # RandomRescale 的短边缩放范围 - dict( # RandomCrop 类的配置 - type='RandomCrop', # 给定一个尺寸进行随机裁剪 - size=256), # 裁剪尺寸 - dict( # Flip 类的配置 - type='Flip', # 图片翻转 - flip_ratio=0.5), # 执行翻转几率 - dict( # Normalize 类的配置 - type='Normalize', # 图片正则化 - **img_norm_cfg), # 图片正则化参数 - dict( # FormatShape 类的配置 - type='FormatShape', # 将图片格式转变为给定的输入格式 - input_format='NCTHW', # 最终的图片组成格式 - collapse=True), # 去掉 N 梯度当 N == 1 - dict( # Rename 类的配置 - type='Rename', # 重命名 key 名 - mapping=dict(imgs='img')), # 改名映射字典 - dict( # ToTensor 类的配置 - type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 - keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), # 将被从其他类型转化为 Tensor 类型的特征 - dict( # ToDataContainer 类的配置 - type='ToDataContainer', # 将一些信息转入到 ToDataContainer 中 - fields=[ # 转化为 Datacontainer 的域 - dict( # 域字典 - key=['proposals', 'gt_bboxes', 'gt_labels'], # 将转化为 DataContainer 的键 - stack=False)]), # 是否要堆列这些 tensor - dict( # Collect 类的配置 - type='Collect', # Collect 类决定哪些键会被传递到时空检测器中 - keys=['img', 'proposals', 'gt_bboxes', 'gt_labels'], # 输入的键 - meta_keys=['scores', 'entity_ids']), # 输入的元键 - ] - - val_pipeline = [ # 验证数据前处理流水线步骤组成的列表 - dict( # SampleFrames 类的配置 - type='AVASampleFrames', # 选定采样哪些视频帧 - clip_len=4, # 每个输出视频片段的帧 - frame_interval=16), # 所采相邻帧的时序间隔 - dict( # RawFrameDecode 类的配置 - type='RawFrameDecode'), # 给定帧序列,加载对应帧,解码对应帧 - dict( # Resize 类的配置 - type='Resize', # 调整图片尺寸 - scale=(-1, 256)), # 调整比例 - dict( # Normalize 类的配置 - type='Normalize', # 图片正则化 - **img_norm_cfg), # 图片正则化参数 - dict( # FormatShape 类的配置 - type='FormatShape', # 将图片格式转变为给定的输入格式 - input_format='NCTHW', # 最终的图片组成格式 - collapse=True), # 去掉 N 梯度当 N == 1 - dict( # Rename 类的配置 - type='Rename', # 重命名 key 名 - mapping=dict(imgs='img')), # 改名映射字典 - dict( # ToTensor 类的配置 - type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 - keys=['img', 'proposals']), # 将被从其他类型转化为 Tensor 类型的特征 - dict( # ToDataContainer 类的配置 - type='ToDataContainer', # 将一些信息转入到 ToDataContainer 中 - fields=[ # 转化为 Datacontainer 的域 - dict( # 域字典 - key=['proposals'], # 将转化为 DataContainer 的键 - stack=False)]), # 是否要堆列这些 tensor - dict( # Collect 类的配置 - type='Collect', # Collect 类决定哪些键会被传递到时空检测器中 - keys=['img', 'proposals'], # 输入的键 - meta_keys=['scores', 'entity_ids'], # 输入的元键 - nested=True) # 是否将数据包装为嵌套列表 - ] - - data = dict( # 数据的配置 - videos_per_gpu=16, # 单个 GPU 的批大小 - workers_per_gpu=2, # 单个 GPU 的 dataloader 的进程 - val_dataloader=dict( # 验证过程 dataloader 的额外设置 - videos_per_gpu=1), # 单个 GPU 的批大小 - train=dict( # 训练数据集的设置 - type=dataset_type, - ann_file=ann_file_train, - exclude_file=exclude_file_train, - pipeline=train_pipeline, - label_file=label_file, - proposal_file=proposal_file_train, - person_det_score_thr=0.9, - data_prefix=data_root), - val=dict( # 验证数据集的设置 - type=dataset_type, - ann_file=ann_file_val, - exclude_file=exclude_file_val, - pipeline=val_pipeline, - label_file=label_file, - proposal_file=proposal_file_val, - person_det_score_thr=0.9, - data_prefix=data_root)) - data['test'] = data['val'] # 将验证数据集设置复制到测试数据集设置 - - # 优化器设置 - optimizer = dict( - # 构建优化器的设置,支持: - # (1) 所有 PyTorch 原生的优化器,这些优化器的参数和 PyTorch 对应的一致; - # (2) 自定义的优化器,这些优化器在 `constructor` 的基础上构建。 - # 更多细节可参考 "tutorials/5_new_modules.md" 部分 - type='SGD', # 优化器类型, 参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 - lr=0.2, # 学习率, 参数的细节使用可参考 PyTorch 的对应文档 - momentum=0.9, # 动量大小 - weight_decay=0.00001) # SGD 优化器权重衰减 - - optimizer_config = dict( # 用于构建优化器钩子的设置 - grad_clip=dict(max_norm=40, norm_type=2)) # 使用梯度裁剪 - - lr_config = dict( # 用于注册学习率调整钩子的设置 - policy='step', # 调整器策略, 支持 CosineAnnealing,Cyclic等方法。更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 - step=[40, 80], # 学习率衰减步长 - warmup='linear', # Warmup 策略 - warmup_by_epoch=True, # Warmup 单位为 epoch 还是 iteration - warmup_iters=5, # warmup 数 - warmup_ratio=0.1) # 初始学习率为 warmup_ratio * lr - - total_epochs = 20 # 训练模型的总周期数 - checkpoint_config = dict( # 模型权重文件钩子设置,更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py - interval=1) # 模型权重文件保存间隔 - workflow = [('train', 1)] # runner 的执行流. [('train', 1)] 代表只有一个执行流,并且这个名为 train 的执行流只执行一次 - evaluation = dict( # 训练期间做验证的设置 - interval=1, save_best='mAP@0.5IOU') # 执行验证的间隔,以及设置 `mAP@0.5IOU` 作为指示器,用于存储最好的模型权重文件 - log_config = dict( # 注册日志钩子的设置 - interval=20, # 打印日志间隔 - hooks=[ # 训练期间执行的钩子 - dict(type='TextLoggerHook'), # 记录训练过程信息的日志 - ]) - - # 运行设置 - dist_params = dict(backend='nccl') # 建立分布式训练的设置,其中端口号也可以设置 - log_level = 'INFO' # 日志等级 - work_dir = ('./work_dirs/ava/' # 记录当前实验日志和模型权重文件的文件夹 - 'slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb') - load_from = ('https://download.openmmlab.com/mmaction/recognition/slowonly/' # 从给定路径加载模型作为预训练模型. 这个选项不会用于断点恢复训练 - 'slowonly_r50_4x16x1_256e_kinetics400_rgb/' - 'slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth') - resume_from = None # 加载给定路径的模型权重文件作为断点续连的模型, 训练将从该时间点保存的周期点继续进行 - ``` + 为了帮助用户理解 MMAction2 的完整配置文件结构,以及时空检测系统中的一些模块,这里以 FastRCNN 为例,给出其配置文件的注释。 + 对于每个模块的详细用法以及对应参数的选择,请参照 [API 文档](https://mmaction2.readthedocs.io/en/latest/api.html)。 + + ```python + # 模型设置 + model = dict( # 模型的配置 + type='FastRCNN', # 时空检测器类型 + backbone=dict( # Backbone 字典设置 + type='ResNet3dSlowOnly', # Backbone 名 + depth=50, # ResNet 模型深度 + pretrained=None, # 预训练模型的 url 或文件位置 + pretrained2d=False, # 预训练模型是否为 2D 模型 + lateral=False, # backbone 是否有侧连接 + num_stages=4, # ResNet 模型阶数 + conv1_kernel=(1, 7, 7), # Conv1 卷积核尺寸 + conv1_stride_t=1, # Conv1 时序步长 + pool1_stride_t=1, # Pool1 时序步长 + spatial_strides=(1, 2, 2, 1)), # 每个 ResNet 阶的空间步长 + roi_head=dict( # roi_head 字典设置 + type='AVARoIHead', # roi_head 名 + bbox_roi_extractor=dict( # bbox_roi_extractor 字典设置 + type='SingleRoIExtractor3D', # bbox_roi_extractor 名 + roi_layer_type='RoIAlign', # RoI op 类型 + output_size=8, # RoI op 输出特征尺寸 + with_temporal_pool=True), # 时序维度是否要经过池化 + bbox_head=dict( # bbox_head 字典设置 + type='BBoxHeadAVA', # bbox_head 名 + in_channels=2048, # 输入特征通道数 + num_classes=81, # 动作类别数 + 1(背景) + multilabel=True, # 数据集是否多标签 + dropout_ratio=0.5)), # dropout 比率 + # 模型训练和测试的设置 + train_cfg=dict( # 训练 FastRCNN 的超参配置 + rcnn=dict( # rcnn 训练字典设置 + assigner=dict( # assigner 字典设置 + type='MaxIoUAssignerAVA', # assigner 名 + pos_iou_thr=0.9, # 正样本 IoU 阈值, > pos_iou_thr -> positive + neg_iou_thr=0.9, # 负样本 IoU 阈值, < neg_iou_thr -> negative + min_pos_iou=0.9), # 正样本最小可接受 IoU + sampler=dict( # sample 字典设置 + type='RandomSampler', # sampler 名 + num=32, # sampler 批大小 + pos_fraction=1, # sampler 正样本边界框比率 + neg_pos_ub=-1, # 负样本数转正样本数的比率上界 + add_gt_as_proposals=True), # 是否添加 ground truth 为候选 + pos_weight=1.0, # 正样本 loss 权重 + debug=False)), # 是否为 debug 模式 + test_cfg=dict( # 测试 FastRCNN 的超参设置 + rcnn=dict( # rcnn 测试字典设置 + action_thr=0.002))) # 某行为的阈值 + + # 数据集设置 + dataset_type = 'AVADataset' # 训练,验证,测试的数据集类型 + data_root = 'data/ava/rawframes' # 训练集的根目录 + anno_root = 'data/ava/annotations' # 标注文件目录 + + ann_file_train = f'{anno_root}/ava_train_v2.1.csv' # 训练集的标注文件 + ann_file_val = f'{anno_root}/ava_val_v2.1.csv' # 验证集的标注文件 + + exclude_file_train = f'{anno_root}/ava_train_excluded_timestamps_v2.1.csv' # 训练除外数据集文件路径 + exclude_file_val = f'{anno_root}/ava_val_excluded_timestamps_v2.1.csv' # 验证除外数据集文件路径 + + label_file = f'{anno_root}/ava_action_list_v2.1_for_activitynet_2018.pbtxt' # 标签文件路径 + + proposal_file_train = f'{anno_root}/ava_dense_proposals_train.FAIR.recall_93.9.pkl' # 训练样本检测候选框的文件路径 + proposal_file_val = f'{anno_root}/ava_dense_proposals_val.FAIR.recall_93.9.pkl' # 验证样本检测候选框的文件路径 + + img_norm_cfg = dict( # 图像正则化参数设置 + mean=[123.675, 116.28, 103.53], # 图像正则化平均值 + std=[58.395, 57.12, 57.375], # 图像正则化方差 + to_bgr=False) # 是否将通道数从 RGB 转为 BGR + + train_pipeline = [ # 训练数据前处理流水线步骤组成的列表 + dict( # SampleFrames 类的配置 + type='AVASampleFrames', # 选定采样哪些视频帧 + clip_len=4, # 每个输出视频片段的帧 + frame_interval=16), # 所采相邻帧的时序间隔 + dict( # RawFrameDecode 类的配置 + type='RawFrameDecode'), # 给定帧序列,加载对应帧,解码对应帧 + dict( # RandomRescale 类的配置 + type='RandomRescale', # 给定一个范围,进行随机短边缩放 + scale_range=(256, 320)), # RandomRescale 的短边缩放范围 + dict( # RandomCrop 类的配置 + type='RandomCrop', # 给定一个尺寸进行随机裁剪 + size=256), # 裁剪尺寸 + dict( # Flip 类的配置 + type='Flip', # 图片翻转 + flip_ratio=0.5), # 执行翻转几率 + dict( # Normalize 类的配置 + type='Normalize', # 图片正则化 + **img_norm_cfg), # 图片正则化参数 + dict( # FormatShape 类的配置 + type='FormatShape', # 将图片格式转变为给定的输入格式 + input_format='NCTHW', # 最终的图片组成格式 + collapse=True), # 去掉 N 梯度当 N == 1 + dict( # Rename 类的配置 + type='Rename', # 重命名 key 名 + mapping=dict(imgs='img')), # 改名映射字典 + dict( # ToTensor 类的配置 + type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 + keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), # 将被从其他类型转化为 Tensor 类型的特征 + dict( # ToDataContainer 类的配置 + type='ToDataContainer', # 将一些信息转入到 ToDataContainer 中 + fields=[ # 转化为 Datacontainer 的域 + dict( # 域字典 + key=['proposals', 'gt_bboxes', 'gt_labels'], # 将转化为 DataContainer 的键 + stack=False)]), # 是否要堆列这些 tensor + dict( # Collect 类的配置 + type='Collect', # Collect 类决定哪些键会被传递到时空检测器中 + keys=['img', 'proposals', 'gt_bboxes', 'gt_labels'], # 输入的键 + meta_keys=['scores', 'entity_ids']), # 输入的元键 + ] + + val_pipeline = [ # 验证数据前处理流水线步骤组成的列表 + dict( # SampleFrames 类的配置 + type='AVASampleFrames', # 选定采样哪些视频帧 + clip_len=4, # 每个输出视频片段的帧 + frame_interval=16), # 所采相邻帧的时序间隔 + dict( # RawFrameDecode 类的配置 + type='RawFrameDecode'), # 给定帧序列,加载对应帧,解码对应帧 + dict( # Resize 类的配置 + type='Resize', # 调整图片尺寸 + scale=(-1, 256)), # 调整比例 + dict( # Normalize 类的配置 + type='Normalize', # 图片正则化 + **img_norm_cfg), # 图片正则化参数 + dict( # FormatShape 类的配置 + type='FormatShape', # 将图片格式转变为给定的输入格式 + input_format='NCTHW', # 最终的图片组成格式 + collapse=True), # 去掉 N 梯度当 N == 1 + dict( # Rename 类的配置 + type='Rename', # 重命名 key 名 + mapping=dict(imgs='img')), # 改名映射字典 + dict( # ToTensor 类的配置 + type='ToTensor', # ToTensor 类将其他类型转化为 Tensor 类型 + keys=['img', 'proposals']), # 将被从其他类型转化为 Tensor 类型的特征 + dict( # ToDataContainer 类的配置 + type='ToDataContainer', # 将一些信息转入到 ToDataContainer 中 + fields=[ # 转化为 Datacontainer 的域 + dict( # 域字典 + key=['proposals'], # 将转化为 DataContainer 的键 + stack=False)]), # 是否要堆列这些 tensor + dict( # Collect 类的配置 + type='Collect', # Collect 类决定哪些键会被传递到时空检测器中 + keys=['img', 'proposals'], # 输入的键 + meta_keys=['scores', 'entity_ids'], # 输入的元键 + nested=True) # 是否将数据包装为嵌套列表 + ] + + data = dict( # 数据的配置 + videos_per_gpu=16, # 单个 GPU 的批大小 + workers_per_gpu=2, # 单个 GPU 的 dataloader 的进程 + val_dataloader=dict( # 验证过程 dataloader 的额外设置 + videos_per_gpu=1), # 单个 GPU 的批大小 + train=dict( # 训练数据集的设置 + type=dataset_type, + ann_file=ann_file_train, + exclude_file=exclude_file_train, + pipeline=train_pipeline, + label_file=label_file, + proposal_file=proposal_file_train, + person_det_score_thr=0.9, + data_prefix=data_root), + val=dict( # 验证数据集的设置 + type=dataset_type, + ann_file=ann_file_val, + exclude_file=exclude_file_val, + pipeline=val_pipeline, + label_file=label_file, + proposal_file=proposal_file_val, + person_det_score_thr=0.9, + data_prefix=data_root)) + data['test'] = data['val'] # 将验证数据集设置复制到测试数据集设置 + + # 优化器设置 + optimizer = dict( + # 构建优化器的设置,支持: + # (1) 所有 PyTorch 原生的优化器,这些优化器的参数和 PyTorch 对应的一致; + # (2) 自定义的优化器,这些优化器在 `constructor` 的基础上构建。 + # 更多细节可参考 "tutorials/5_new_modules.md" 部分 + type='SGD', # 优化器类型, 参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/optimizer/default_constructor.py#L13 + lr=0.2, # 学习率, 参数的细节使用可参考 PyTorch 的对应文档 + momentum=0.9, # 动量大小 + weight_decay=0.00001) # SGD 优化器权重衰减 + + optimizer_config = dict( # 用于构建优化器钩子的设置 + grad_clip=dict(max_norm=40, norm_type=2)) # 使用梯度裁剪 + + lr_config = dict( # 用于注册学习率调整钩子的设置 + policy='step', # 调整器策略, 支持 CosineAnnealing,Cyclic等方法。更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9 + step=[40, 80], # 学习率衰减步长 + warmup='linear', # Warmup 策略 + warmup_by_epoch=True, # Warmup 单位为 epoch 还是 iteration + warmup_iters=5, # warmup 数 + warmup_ratio=0.1) # 初始学习率为 warmup_ratio * lr + + total_epochs = 20 # 训练模型的总周期数 + checkpoint_config = dict( # 模型权重文件钩子设置,更多细节可参考 https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py + interval=1) # 模型权重文件保存间隔 + workflow = [('train', 1)] # runner 的执行流. [('train', 1)] 代表只有一个执行流,并且这个名为 train 的执行流只执行一次 + evaluation = dict( # 训练期间做验证的设置 + interval=1, save_best='mAP@0.5IOU') # 执行验证的间隔,以及设置 `mAP@0.5IOU` 作为指示器,用于存储最好的模型权重文件 + log_config = dict( # 注册日志钩子的设置 + interval=20, # 打印日志间隔 + hooks=[ # 训练期间执行的钩子 + dict(type='TextLoggerHook'), # 记录训练过程信息的日志 + ]) + + # 运行设置 + dist_params = dict(backend='nccl') # 建立分布式训练的设置,其中端口号也可以设置 + log_level = 'INFO' # 日志等级 + work_dir = ('./work_dirs/ava/' # 记录当前实验日志和模型权重文件的文件夹 + 'slowonly_kinetics_pretrained_r50_4x16x1_20e_ava_rgb') + load_from = ('https://download.openmmlab.com/mmaction/recognition/slowonly/' # 从给定路径加载模型作为预训练模型. 这个选项不会用于断点恢复训练 + 'slowonly_r50_4x16x1_256e_kinetics400_rgb/' + 'slowonly_r50_4x16x1_256e_kinetics400_rgb_20200704-a69556c6.pth') + resume_from = None # 加载给定路径的模型权重文件作为断点续连的模型, 训练将从该时间点保存的周期点继续进行 + ``` ## 常见问题 diff --git a/docs_zh_CN/tutorials/2_finetune.md b/docs_zh_CN/tutorials/2_finetune.md index 0c4e7c09d2..dc3f19db25 100644 --- a/docs_zh_CN/tutorials/2_finetune.md +++ b/docs_zh_CN/tutorials/2_finetune.md @@ -4,11 +4,11 @@ -- [概要](#概要) -- [修改 Head](#修改-Head) -- [修改数据集](#修改数据集) -- [修改训练策略](#修改训练策略) -- [使用预训练模型](#使用预训练模型) +- [概要](#%E6%A6%82%E8%A6%81) +- [修改 Head](#%E4%BF%AE%E6%94%B9-Head) +- [修改数据集](#%E4%BF%AE%E6%94%B9%E6%95%B0%E6%8D%AE%E9%9B%86) +- [修改训练策略](#%E4%BF%AE%E6%94%B9%E8%AE%AD%E7%BB%83%E7%AD%96%E7%95%A5) +- [使用预训练模型](#%E4%BD%BF%E7%94%A8%E9%A2%84%E8%AE%AD%E7%BB%83%E6%A8%A1%E5%9E%8B) diff --git a/docs_zh_CN/tutorials/3_new_dataset.md b/docs_zh_CN/tutorials/3_new_dataset.md index 172d73b00d..9b2368c15c 100644 --- a/docs_zh_CN/tutorials/3_new_dataset.md +++ b/docs_zh_CN/tutorials/3_new_dataset.md @@ -4,11 +4,11 @@ -- [通过重组数据来自定义数据集](#通过重组数据来自定义数据集) - - [将数据集重新组织为现有格式](#将数据集重新组织为现有格式) - - [自定义数据集的示例](#自定义数据集的示例) -- [通过组合已有数据集来自定义数据集](#通过组合已有数据集来自定义数据集) - - [重复数据集](#重复数据集) +- [通过重组数据来自定义数据集](#%E9%80%9A%E8%BF%87%E9%87%8D%E7%BB%84%E6%95%B0%E6%8D%AE%E6%9D%A5%E8%87%AA%E5%AE%9A%E4%B9%89%E6%95%B0%E6%8D%AE%E9%9B%86) + - [将数据集重新组织为现有格式](#%E5%B0%86%E6%95%B0%E6%8D%AE%E9%9B%86%E9%87%8D%E6%96%B0%E7%BB%84%E7%BB%87%E4%B8%BA%E7%8E%B0%E6%9C%89%E6%A0%BC%E5%BC%8F) + - [自定义数据集的示例](#%E8%87%AA%E5%AE%9A%E4%B9%89%E6%95%B0%E6%8D%AE%E9%9B%86%E7%9A%84%E7%A4%BA%E4%BE%8B) +- [通过组合已有数据集来自定义数据集](#%E9%80%9A%E8%BF%87%E7%BB%84%E5%90%88%E5%B7%B2%E6%9C%89%E6%95%B0%E6%8D%AE%E9%9B%86%E6%9D%A5%E8%87%AA%E5%AE%9A%E4%B9%89%E6%95%B0%E6%8D%AE%E9%9B%86) + - [重复数据集](#%E9%87%8D%E5%A4%8D%E6%95%B0%E6%8D%AE%E9%9B%86) diff --git a/docs_zh_CN/tutorials/4_data_pipeline.md b/docs_zh_CN/tutorials/4_data_pipeline.md index a54bbcbd3d..8f52ff5a25 100644 --- a/docs_zh_CN/tutorials/4_data_pipeline.md +++ b/docs_zh_CN/tutorials/4_data_pipeline.md @@ -4,12 +4,12 @@ -- [教程 4:如何设计数据处理流程](#教程-4如何设计数据处理流程) - - [数据前处理流水线设计](#数据前处理流水线设计) - - [数据加载](#数据加载) - - [数据预处理](#数据预处理) - - [数据格式化](#数据格式化) - - [扩展和使用自定义流水线](#扩展和使用自定义流水线) +- [教程 4:如何设计数据处理流程](#%E6%95%99%E7%A8%8B-4%E5%A6%82%E4%BD%95%E8%AE%BE%E8%AE%A1%E6%95%B0%E6%8D%AE%E5%A4%84%E7%90%86%E6%B5%81%E7%A8%8B) + - [数据前处理流水线设计](#%E6%95%B0%E6%8D%AE%E5%89%8D%E5%A4%84%E7%90%86%E6%B5%81%E6%B0%B4%E7%BA%BF%E8%AE%BE%E8%AE%A1) + - [数据加载](#%E6%95%B0%E6%8D%AE%E5%8A%A0%E8%BD%BD) + - [数据预处理](#%E6%95%B0%E6%8D%AE%E9%A2%84%E5%A4%84%E7%90%86) + - [数据格式化](#%E6%95%B0%E6%8D%AE%E6%A0%BC%E5%BC%8F%E5%8C%96) + - [扩展和使用自定义流水线](#%E6%89%A9%E5%B1%95%E5%92%8C%E4%BD%BF%E7%94%A8%E8%87%AA%E5%AE%9A%E4%B9%89%E6%B5%81%E6%B0%B4%E7%BA%BF) @@ -120,31 +120,31 @@ train_pipeline = [ `SampleFrames` -- 新增: frame_inds, clip_len, frame_interval, num_clips, *total_frames +- 新增: frame_inds, clip_len, frame_interval, num_clips, \*total_frames `DenseSampleFrames` -- 新增: frame_inds, clip_len, frame_interval, num_clips, *total_frames +- 新增: frame_inds, clip_len, frame_interval, num_clips, \*total_frames `PyAVDecode` - 新增: imgs, original_shape -- 更新: *frame_inds +- 更新: \*frame_inds `DecordDecode` - 新增: imgs, original_shape -- 更新: *frame_inds +- 更新: \*frame_inds `OpenCVDecode` - 新增: imgs, original_shape -- 更新: *frame_inds +- 更新: \*frame_inds `RawFrameDecode` - 新增: imgs, original_shape -- 更新: *frame_inds +- 更新: \*frame_inds ### 数据预处理 @@ -223,35 +223,35 @@ train_pipeline = [ 1. 在任何文件写入一个新的处理流水线,如 `my_pipeline.py`。它以一个字典作为输入并返回一个字典 - ```python - from mmaction.datasets import PIPELINES + ```python + from mmaction.datasets import PIPELINES - @PIPELINES.register_module() - class MyTransform: + @PIPELINES.register_module() + class MyTransform: - def __call__(self, results): - results['key'] = value - return results - ``` + def __call__(self, results): + results['key'] = value + return results + ``` 2. 导入新类 - ```python - from .my_pipeline import MyTransform - ``` + ```python + from .my_pipeline import MyTransform + ``` 3. 在配置文件使用它 - ```python - img_norm_cfg = dict( - mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) - train_pipeline = [ - dict(type='DenseSampleFrames', clip_len=8, frame_interval=8, num_clips=1), - dict(type='RawFrameDecode', io_backend='disk'), - dict(type='MyTransform'), # 使用自定义流水线操作 - dict(type='Normalize', **img_norm_cfg), - dict(type='FormatShape', input_format='NCTHW'), - dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), - dict(type='ToTensor', keys=['imgs', 'label']) - ] - ``` + ```python + img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + train_pipeline = [ + dict(type='DenseSampleFrames', clip_len=8, frame_interval=8, num_clips=1), + dict(type='RawFrameDecode', io_backend='disk'), + dict(type='MyTransform'), # 使用自定义流水线操作 + dict(type='Normalize', **img_norm_cfg), + dict(type='FormatShape', input_format='NCTHW'), + dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), + dict(type='ToTensor', keys=['imgs', 'label']) + ] + ``` diff --git a/docs_zh_CN/tutorials/5_new_modules.md b/docs_zh_CN/tutorials/5_new_modules.md index 870d1886b1..7c381af8b1 100644 --- a/docs_zh_CN/tutorials/5_new_modules.md +++ b/docs_zh_CN/tutorials/5_new_modules.md @@ -4,13 +4,13 @@ -- [自定义优化器](#自定义优化器) -- [自定义优化器构造器](#自定义优化器构造器) -- [开发新组件](#开发新组件) - - [添加新的 backbones](#添加新-backbones) - - [添加新的 heads](#添加新-heads) - - [添加新的 loss function](#添加新-loss-function) -- [添加新的学习率调节器(更新器)](#添加新的学习率调节器(更新器)) +- [自定义优化器](#%E8%87%AA%E5%AE%9A%E4%B9%89%E4%BC%98%E5%8C%96%E5%99%A8) +- [自定义优化器构造器](#%E8%87%AA%E5%AE%9A%E4%B9%89%E4%BC%98%E5%8C%96%E5%99%A8%E6%9E%84%E9%80%A0%E5%99%A8) +- [开发新组件](#%E5%BC%80%E5%8F%91%E6%96%B0%E7%BB%84%E4%BB%B6) + - [添加新的 backbones](#%E6%B7%BB%E5%8A%A0%E6%96%B0-backbones) + - [添加新的 heads](#%E6%B7%BB%E5%8A%A0%E6%96%B0-heads) + - [添加新的 loss function](#%E6%B7%BB%E5%8A%A0%E6%96%B0-loss-function) +- [添加新的学习率调节器(更新器)](#%E6%B7%BB%E5%8A%A0%E6%96%B0%E7%9A%84%E5%AD%A6%E4%B9%A0%E7%8E%87%E8%B0%83%E8%8A%82%E5%99%A8%EF%BC%88%E6%9B%B4%E6%96%B0%E5%99%A8%EF%BC%89) @@ -102,41 +102,41 @@ MMAction2 将模型组件分为 4 种基础模型: 1. 创建新文件 `mmaction/models/backbones/resnet.py` - ```python - import torch.nn as nn + ```python + import torch.nn as nn - from ..builder import BACKBONES + from ..builder import BACKBONES - @BACKBONES.register_module() - class ResNet(nn.Module): + @BACKBONES.register_module() + class ResNet(nn.Module): - def __init__(self, arg1, arg2): - pass + def __init__(self, arg1, arg2): + pass - def forward(self, x): # 应该返回一个元组 - pass + def forward(self, x): # 应该返回一个元组 + pass - def init_weights(self, pretrained=None): - pass - ``` + def init_weights(self, pretrained=None): + pass + ``` 2. 在 `mmaction/models/backbones/__init__.py` 中导入模型 - ```python - from .resnet import ResNet - ``` + ```python + from .resnet import ResNet + ``` 3. 在配置文件中使用它 - ```python - model = dict( - ... - backbone=dict( - type='ResNet', - arg1=xxx, - arg2=xxx), - ) - ``` + ```python + model = dict( + ... + backbone=dict( + type='ResNet', + arg1=xxx, + arg2=xxx), + ) + ``` ### 添加新的 heads @@ -144,45 +144,45 @@ MMAction2 将模型组件分为 4 种基础模型: 1. 创建新文件 `mmaction/models/heads/tsn_head.py` - 可以通过继承 [BaseHead](/mmaction/models/heads/base.py) 编写一个新的分类头, - 并重写 `init_weights(self)` 和 `forward(self, x)` 方法 + 可以通过继承 [BaseHead](/mmaction/models/heads/base.py) 编写一个新的分类头, + 并重写 `init_weights(self)` 和 `forward(self, x)` 方法 - ```python - from ..builder import HEADS - from .base import BaseHead + ```python + from ..builder import HEADS + from .base import BaseHead - @HEADS.register_module() - class TSNHead(BaseHead): + @HEADS.register_module() + class TSNHead(BaseHead): - def __init__(self, arg1, arg2): - pass + def __init__(self, arg1, arg2): + pass - def forward(self, x): - pass + def forward(self, x): + pass - def init_weights(self): - pass - ``` + def init_weights(self): + pass + ``` 2. 在 `mmaction/models/heads/__init__.py` 中导入模型 - ```python - from .tsn_head import TSNHead - ``` + ```python + from .tsn_head import TSNHead + ``` 3. 在配置文件中使用它 - ```python - model = dict( - ... - cls_head=dict( - type='TSNHead', - num_classes=400, - in_channels=2048, - arg1=xxx, - arg2=xxx), - ``` + ```python + model = dict( + ... + cls_head=dict( + type='TSNHead', + num_classes=400, + in_channels=2048, + arg1=xxx, + arg2=xxx), + ``` ### 添加新的 loss function diff --git a/docs_zh_CN/tutorials/6_export_model.md b/docs_zh_CN/tutorials/6_export_model.md index 8dca014cfc..01b861d058 100644 --- a/docs_zh_CN/tutorials/6_export_model.md +++ b/docs_zh_CN/tutorials/6_export_model.md @@ -4,11 +4,11 @@ -- [支持的模型](#支持的模型) -- [如何使用](#如何使用) - - [准备工作](#准备工作) - - [行为识别器](#行为识别器) - - [时序动作检测器](#时序动作检测器) +- [支持的模型](#%E6%94%AF%E6%8C%81%E7%9A%84%E6%A8%A1%E5%9E%8B) +- [如何使用](#%E5%A6%82%E4%BD%95%E4%BD%BF%E7%94%A8) + - [准备工作](#%E5%87%86%E5%A4%87%E5%B7%A5%E4%BD%9C) + - [行为识别器](#%E8%A1%8C%E4%B8%BA%E8%AF%86%E5%88%AB%E5%99%A8) + - [时序动作检测器](#%E6%97%B6%E5%BA%8F%E5%8A%A8%E4%BD%9C%E6%A3%80%E6%B5%8B%E5%99%A8) diff --git a/docs_zh_CN/tutorials/7_customize_runtime.md b/docs_zh_CN/tutorials/7_customize_runtime.md index 027f0837f0..76507a5088 100644 --- a/docs_zh_CN/tutorials/7_customize_runtime.md +++ b/docs_zh_CN/tutorials/7_customize_runtime.md @@ -4,26 +4,26 @@ -- [定制优化方法](#定制优化方法) - - [使用 PyTorch 内置的优化器](#使用-PyTorch-内置的优化器) - - [定制用户自定义的优化器](#定制用户自定义的优化器) - - [1. 定义一个新的优化器](#1-定义一个新的优化器) - - [2. 注册优化器](#2-注册优化器) - - [3. 在配置文件中指定优化器](#3-在配置文件中指定优化器) - - [定制优化器构造器](#定制优化器构造器) - - [额外设定](#额外设定) -- [定制学习率调整策略](#定制学习率调整策略) -- [定制工作流](#定制工作流) -- [定制钩子](#定制钩子) - - [定制用户自定义钩子](#定制用户自定义钩子) - - [1. 创建一个新钩子](#1-创建一个新钩子) - - [2. 注册新钩子](#2-注册新钩子) - - [3. 修改配置](#3-修改配置) - - [使用 MMCV 内置钩子](#使用-MMCV-内置钩子) - - [修改默认运行的钩子](#修改默认运行的钩子) - - [模型权重文件配置](#模型权重文件配置) - - [日志配置](#日志配置) - - [验证配置](#验证配置) +- [定制优化方法](#%E5%AE%9A%E5%88%B6%E4%BC%98%E5%8C%96%E6%96%B9%E6%B3%95) + - [使用 PyTorch 内置的优化器](#%E4%BD%BF%E7%94%A8-PyTorch-%E5%86%85%E7%BD%AE%E7%9A%84%E4%BC%98%E5%8C%96%E5%99%A8) + - [定制用户自定义的优化器](#%E5%AE%9A%E5%88%B6%E7%94%A8%E6%88%B7%E8%87%AA%E5%AE%9A%E4%B9%89%E7%9A%84%E4%BC%98%E5%8C%96%E5%99%A8) + - [1. 定义一个新的优化器](#1-%E5%AE%9A%E4%B9%89%E4%B8%80%E4%B8%AA%E6%96%B0%E7%9A%84%E4%BC%98%E5%8C%96%E5%99%A8) + - [2. 注册优化器](#2-%E6%B3%A8%E5%86%8C%E4%BC%98%E5%8C%96%E5%99%A8) + - [3. 在配置文件中指定优化器](#3-%E5%9C%A8%E9%85%8D%E7%BD%AE%E6%96%87%E4%BB%B6%E4%B8%AD%E6%8C%87%E5%AE%9A%E4%BC%98%E5%8C%96%E5%99%A8) + - [定制优化器构造器](#%E5%AE%9A%E5%88%B6%E4%BC%98%E5%8C%96%E5%99%A8%E6%9E%84%E9%80%A0%E5%99%A8) + - [额外设定](#%E9%A2%9D%E5%A4%96%E8%AE%BE%E5%AE%9A) +- [定制学习率调整策略](#%E5%AE%9A%E5%88%B6%E5%AD%A6%E4%B9%A0%E7%8E%87%E8%B0%83%E6%95%B4%E7%AD%96%E7%95%A5) +- [定制工作流](#%E5%AE%9A%E5%88%B6%E5%B7%A5%E4%BD%9C%E6%B5%81) +- [定制钩子](#%E5%AE%9A%E5%88%B6%E9%92%A9%E5%AD%90) + - [定制用户自定义钩子](#%E5%AE%9A%E5%88%B6%E7%94%A8%E6%88%B7%E8%87%AA%E5%AE%9A%E4%B9%89%E9%92%A9%E5%AD%90) + - [1. 创建一个新钩子](#1-%E5%88%9B%E5%BB%BA%E4%B8%80%E4%B8%AA%E6%96%B0%E9%92%A9%E5%AD%90) + - [2. 注册新钩子](#2-%E6%B3%A8%E5%86%8C%E6%96%B0%E9%92%A9%E5%AD%90) + - [3. 修改配置](#3-%E4%BF%AE%E6%94%B9%E9%85%8D%E7%BD%AE) + - [使用 MMCV 内置钩子](#%E4%BD%BF%E7%94%A8-MMCV-%E5%86%85%E7%BD%AE%E9%92%A9%E5%AD%90) + - [修改默认运行的钩子](#%E4%BF%AE%E6%94%B9%E9%BB%98%E8%AE%A4%E8%BF%90%E8%A1%8C%E7%9A%84%E9%92%A9%E5%AD%90) + - [模型权重文件配置](#%E6%A8%A1%E5%9E%8B%E6%9D%83%E9%87%8D%E6%96%87%E4%BB%B6%E9%85%8D%E7%BD%AE) + - [日志配置](#%E6%97%A5%E5%BF%97%E9%85%8D%E7%BD%AE) + - [验证配置](#%E9%AA%8C%E8%AF%81%E9%85%8D%E7%BD%AE) @@ -75,7 +75,7 @@ class MyOptimizer(Optimizer): - 修改 `mmaction/core/optimizer/__init__.py` 来进行调用 - 新定义的模块应导入到 `mmaction/core/optimizer/__init__.py` 中,以便注册器能找到新模块并将其添加: + 新定义的模块应导入到 `mmaction/core/optimizer/__init__.py` 中,以便注册器能找到新模块并将其添加: ```python from .my_optimizer import MyOptimizer @@ -134,32 +134,32 @@ class MyOptimizerConstructor: 下面列出了一些可以稳定训练或加快训练速度的常用设置。用户亦可通过为 MMAction2 创建 PR,发布更多设置。 - __使用梯度裁剪来稳定训练__ - 一些模型需要使用梯度裁剪来剪辑渐变以稳定训练过程。 一个例子如下: + 一些模型需要使用梯度裁剪来剪辑渐变以稳定训练过程。 一个例子如下: - ```python - optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) - ``` + ```python + optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) + ``` - __使用动量调整来加速模型收敛__ - MMAction2 支持动量调整器根据学习率修改模型的动量,从而使模型收敛更快。 - 动量调整程序通常与学习率调整器一起使用,例如,以下配置用于3D检测以加速收敛。 - 更多细节可参考 [CyclicLrUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/lr_updater.py#L327) - 和 [CyclicMomentumUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/momentum_updater.py#L130)。 - - ```python - lr_config = dict( - policy='cyclic', - target_ratio=(10, 1e-4), - cyclic_times=1, - step_ratio_up=0.4, - ) - momentum_config = dict( - policy='cyclic', - target_ratio=(0.85 / 0.95, 1), - cyclic_times=1, - step_ratio_up=0.4, - ) - ``` + MMAction2 支持动量调整器根据学习率修改模型的动量,从而使模型收敛更快。 + 动量调整程序通常与学习率调整器一起使用,例如,以下配置用于3D检测以加速收敛。 + 更多细节可参考 [CyclicLrUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/lr_updater.py#L327) + 和 [CyclicMomentumUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/momentum_updater.py#L130)。 + + ```python + lr_config = dict( + policy='cyclic', + target_ratio=(10, 1e-4), + cyclic_times=1, + step_ratio_up=0.4, + ) + momentum_config = dict( + policy='cyclic', + target_ratio=(0.85 / 0.95, 1), + cyclic_times=1, + step_ratio_up=0.4, + ) + ``` ## 定制学习率调整策略 @@ -168,20 +168,20 @@ class MyOptimizerConstructor: - Poly: - ```python - lr_config = dict(policy='poly', power=0.9, min_lr=1e-4, by_epoch=False) - ``` + ```python + lr_config = dict(policy='poly', power=0.9, min_lr=1e-4, by_epoch=False) + ``` - ConsineAnnealing: - ```python - lr_config = dict( - policy='CosineAnnealing', - warmup='linear', - warmup_iters=1000, - warmup_ratio=1.0 / 10, - min_lr_ratio=1e-5) - ``` + ```python + lr_config = dict( + policy='CosineAnnealing', + warmup='linear', + warmup_iters=1000, + warmup_ratio=1.0 / 10, + min_lr_ratio=1e-5) + ``` ## 定制工作流 @@ -209,7 +209,7 @@ workflow = [('train', 1)] 2. 配置文件内的关键词 `total_epochs` 控制训练时期数,并且不会影响验证工作流程。 3. 工作流 `[('train', 1), ('val', 1)]` 和 `[('train', 1)]` 不会改变 `EvalHook` 的行为。 因为 `EvalHook` 由 `after_train_epoch` 调用,而验证工作流只会影响 `after_val_epoch` 调用的钩子。 - 因此,`[('train', 1), ('val', 1)]` 和 ``[('train', 1)]`` 的区别在于,runner 在完成每一轮训练后,会计算验证集上的损失。 + 因此,`[('train', 1), ('val', 1)]` 和 `[('train', 1)]` 的区别在于,runner 在完成每一轮训练后,会计算验证集上的损失。 ## 定制钩子 @@ -256,7 +256,7 @@ class MyHook(Hook): - 修改 `mmaction/core/utils/__init__.py` 进行导入 - 新定义的模块应导入到 `mmaction/core/utils/__init__py` 中,以便注册表能找到并添加新模块: + 新定义的模块应导入到 `mmaction/core/utils/__init__py` 中,以便注册表能找到并添加新模块: ```python from .my_hook import MyHook diff --git a/docs_zh_CN/useful_tools.md b/docs_zh_CN/useful_tools.md index 710e513460..a0969a2bff 100644 --- a/docs_zh_CN/useful_tools.md +++ b/docs_zh_CN/useful_tools.md @@ -4,14 +4,14 @@ -- [日志分析](#日志分析) -- [模型复杂度分析](#模型复杂度分析) -- [模型转换](#模型转换) - - [导出 MMAction2 模型为 ONNX 格式(实验特性)](#导出-MMAction2-模型为-ONNX-格式(实验特性)) - - [发布模型](#发布模型) -- [其他脚本](#其他脚本) - - [指标评价](#指标评价) - - [打印完整配置](#打印完整配置) +- [日志分析](#%E6%97%A5%E5%BF%97%E5%88%86%E6%9E%90) +- [模型复杂度分析](#%E6%A8%A1%E5%9E%8B%E5%A4%8D%E6%9D%82%E5%BA%A6%E5%88%86%E6%9E%90) +- [模型转换](#%E6%A8%A1%E5%9E%8B%E8%BD%AC%E6%8D%A2) + - [导出 MMAction2 模型为 ONNX 格式(实验特性)](#%E5%AF%BC%E5%87%BA-MMAction2-%E6%A8%A1%E5%9E%8B%E4%B8%BA-ONNX-%E6%A0%BC%E5%BC%8F%EF%BC%88%E5%AE%9E%E9%AA%8C%E7%89%B9%E6%80%A7%EF%BC%89) + - [发布模型](#%E5%8F%91%E5%B8%83%E6%A8%A1%E5%9E%8B) +- [其他脚本](#%E5%85%B6%E4%BB%96%E8%84%9A%E6%9C%AC) + - [指标评价](#%E6%8C%87%E6%A0%87%E8%AF%84%E4%BB%B7) + - [打印完整配置](#%E6%89%93%E5%8D%B0%E5%AE%8C%E6%95%B4%E9%85%8D%E7%BD%AE) @@ -29,43 +29,43 @@ python tools/analysis/analyze_logs.py plot_curve ${JSON_LOGS} [--keys ${KEYS}] [ - 绘制某日志文件对应的分类损失曲线图。 - ```shell - python tools/analysis/analyze_logs.py plot_curve log.json --keys loss_cls --legend loss_cls - ``` + ```shell + python tools/analysis/analyze_logs.py plot_curve log.json --keys loss_cls --legend loss_cls + ``` - 绘制某日志文件对应的 top-1 和 top-5 准确率曲线图,并将曲线图导出为 PDF 文件。 - ```shell - python tools/analysis/analyze_logs.py plot_curve log.json --keys top1_acc top5_acc --out results.pdf - ``` + ```shell + python tools/analysis/analyze_logs.py plot_curve log.json --keys top1_acc top5_acc --out results.pdf + ``` - 在同一图像内绘制两份日志文件对应的 top-1 准确率曲线图。 - ```shell - python tools/analysis/analyze_logs.py plot_curve log1.json log2.json --keys top1_acc --legend run1 run2 - ``` + ```shell + python tools/analysis/analyze_logs.py plot_curve log1.json log2.json --keys top1_acc --legend run1 run2 + ``` - 用户还可以通过本工具计算平均训练速度。 + 用户还可以通过本工具计算平均训练速度。 - ```shell - python tools/analysis/analyze_logs.py cal_train_time ${JSON_LOGS} [--include-outliers] - ``` + ```shell + python tools/analysis/analyze_logs.py cal_train_time ${JSON_LOGS} [--include-outliers] + ``` - 计算某日志文件对应的平均训练速度。 - ```shell - python tools/analysis/analyze_logs.py cal_train_time work_dirs/some_exp/20200422_153324.log.json - ``` + ```shell + python tools/analysis/analyze_logs.py cal_train_time work_dirs/some_exp/20200422_153324.log.json + ``` - 预计输出结果如下所示: + 预计输出结果如下所示: - ```text - -----Analyze train time of work_dirs/some_exp/20200422_153324.log.json----- - slowest epoch 60, average time is 0.9736 - fastest epoch 18, average time is 0.9001 - time std over epochs is 0.0177 - average iter time: 0.9330 s/iter - ``` + ```text + -----Analyze train time of work_dirs/some_exp/20200422_153324.log.json----- + slowest epoch 60, average time is 0.9736 + fastest epoch 18, average time is 0.9001 + time std over epochs is 0.0177 + average iter time: 0.9330 s/iter + ``` ## 模型复杂度分析 @@ -102,15 +102,15 @@ Params: 28.04 M - 对于行为识别模型,请运行: - ```shell - python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify - ``` + ```shell + python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --shape $SHAPE --verify + ``` - 对于时序动作检测模型,请运行: - ```shell - python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify - ``` + ```shell + python tools/deployment/pytorch2onnx.py $CONFIG_PATH $CHECKPOINT_PATH --is-localizer --shape $SHAPE --verify + ``` ### 发布模型 diff --git a/mmaction/__init__.py b/mmaction/__init__.py index 72a2a34d16..4f0774e219 100644 --- a/mmaction/__init__.py +++ b/mmaction/__init__.py @@ -5,7 +5,7 @@ from .version import __version__ mmcv_minimum_version = '1.3.6' -mmcv_maximum_version = '1.5.0' +mmcv_maximum_version = '1.6.0' mmcv_version = digit_version(mmcv.__version__) assert (digit_version(mmcv_minimum_version) <= mmcv_version diff --git a/tools/data/ava/AVA_annotation_explained.md b/tools/data/ava/AVA_annotation_explained.md index 9d9a3ec39b..3d0002d1b3 100644 --- a/tools/data/ava/AVA_annotation_explained.md +++ b/tools/data/ava/AVA_annotation_explained.md @@ -19,11 +19,11 @@ mmaction2 ## The proposals generated by human detectors -In the annotation folder, `ava_dense_proposals_[train/val/test].FAIR.recall_93.9.pkl` are human proposals generated by a human detector. They are used in training, validation and testing respectively. Take `ava_dense_proposals_train.FAIR.recall_93.9.pkl` as an example. It is a dictionary of size 203626. The key consists of the `videoID` and the `timestamp`. For example, the key `-5KQ66BBWC4,0902` means the values are the detection results for the frame at the $$902_{nd}$$ second in the video `-5KQ66BBWC4`. The values in the dictionary are numpy arrays with shape $$N \times 5$$ , $$N$$ is the number of detected human bounding boxes in the corresponding frame. The format of bounding box is $$[x_1, y_1, x_2, y_2, score], 0 \le x_1, y_1, x_2, w_2, score \le 1$$. $$(x_1, y_1)$$ indicates the top-left corner of the bounding box, $$(x_2, y_2)$$ indicates the bottom-right corner of the bounding box; $$(0, 0)$$ indicates the top-left corner of the image, while $$(1, 1)$$ indicates the bottom-right corner of the image. +In the annotation folder, `ava_dense_proposals_[train/val/test].FAIR.recall_93.9.pkl` are human proposals generated by a human detector. They are used in training, validation and testing respectively. Take `ava_dense_proposals_train.FAIR.recall_93.9.pkl` as an example. It is a dictionary of size 203626. The key consists of the `videoID` and the `timestamp`. For example, the key `-5KQ66BBWC4,0902` means the values are the detection results for the frame at the $$902\_{nd}$$ second in the video `-5KQ66BBWC4`. The values in the dictionary are numpy arrays with shape $$N \\times 5$$ , $$N$$ is the number of detected human bounding boxes in the corresponding frame. The format of bounding box is $$\[x_1, y_1, x_2, y_2, score\], 0 \\le x_1, y_1, x_2, w_2, score \\le 1$$. $$(x_1, y_1)$$ indicates the top-left corner of the bounding box, $$(x_2, y_2)$$ indicates the bottom-right corner of the bounding box; $$(0, 0)$$ indicates the top-left corner of the image, while $$(1, 1)$$ indicates the bottom-right corner of the image. ## The ground-truth labels for spatio-temporal action detection -In the annotation folder, `ava_[train/val]_v[2.1/2.2].csv` are ground-truth labels for spatio-temporal action detection, which are used during training & validation. Take `ava_train_v2.1.csv` as an example, it is a csv file with 837318 lines, each line is the annotation for a human instance in one frame. For example, the first line in `ava_train_v2.1.csv` is `'-5KQ66BBWC4,0902,0.077,0.151,0.283,0.811,80,1'`: the first two items `-5KQ66BBWC4` and `0902` indicate that it corresponds to the $$902_{nd}$$ second in the video `-5KQ66BBWC4`. The next four items ($$[0.077(x_1), 0.151(y_1), 0.283(x_2), 0.811(y_2)]$$) indicates the location of the bounding box, the bbox format is the same as human proposals. The next item `80` is the action label. The last item `1` is the ID of this bounding box. +In the annotation folder, `ava_[train/val]_v[2.1/2.2].csv` are ground-truth labels for spatio-temporal action detection, which are used during training & validation. Take `ava_train_v2.1.csv` as an example, it is a csv file with 837318 lines, each line is the annotation for a human instance in one frame. For example, the first line in `ava_train_v2.1.csv` is `'-5KQ66BBWC4,0902,0.077,0.151,0.283,0.811,80,1'`: the first two items `-5KQ66BBWC4` and `0902` indicate that it corresponds to the $$902\_{nd}$$ second in the video `-5KQ66BBWC4`. The next four items ($$\[0.077(x_1), 0.151(y_1), 0.283(x_2), 0.811(y_2)\]$$) indicates the location of the bounding box, the bbox format is the same as human proposals. The next item `80` is the action label. The last item `1` is the ID of this bounding box. ## Excluded timestamps diff --git a/tools/data/hvu/README.md b/tools/data/hvu/README.md index 755e71dbb3..6bcc73f862 100644 --- a/tools/data/hvu/README.md +++ b/tools/data/hvu/README.md @@ -80,8 +80,8 @@ contains tags of a specific category, can be handled with `VideoDataset` or `Raw recognition models can be trained with `BCELossWithLogits`. The following command generates file list for the tag category ${category}, note that the tag category you -specified should be in the 6 tag categories available in HVU: ['action', 'attribute', 'concept', 'event', -'object', 'scene']. +specified should be in the 6 tag categories available in HVU: \['action', 'attribute', 'concept', 'event', +'object', 'scene'\]. ```shell python generate_sub_file_list.py path/to/filelist.json ${category} diff --git a/tools/data/jhmdb/README.md b/tools/data/jhmdb/README.md index 2a39061858..6e2042c138 100644 --- a/tools/data/jhmdb/README.md +++ b/tools/data/jhmdb/README.md @@ -91,8 +91,8 @@ The `JHMDB-GT.pkl` exists as a cache, it contains 6 items as follows: 1. `labels` (list): List of the 21 labels. 2. `gttubes` (dict): Dictionary that contains the ground truth tubes for each video. - A **gttube** is dictionary that associates with each index of label and a list of tubes. - A **tube** is a numpy array with `nframes` rows and 5 columns, each col is in format like ` `. + A **gttube** is dictionary that associates with each index of label and a list of tubes. + A **tube** is a numpy array with `nframes` rows and 5 columns, each col is in format like ` `. 3. `nframes` (dict): Dictionary that contains the number of frames for each video, like `'walk/Panic_in_the_Streets_walk_u_cm_np1_ba_med_5': 16`. 4. `train_videos` (list): A list with `nsplits=1` elements, each one containing the list of training videos. 5. `test_videos` (list): A list with `nsplits=1` elements, each one containing the list of testing videos. diff --git a/tools/data/jhmdb/README_zh-CN.md b/tools/data/jhmdb/README_zh-CN.md index 5806a9d338..3e9fb638ae 100644 --- a/tools/data/jhmdb/README_zh-CN.md +++ b/tools/data/jhmdb/README_zh-CN.md @@ -90,8 +90,8 @@ mmaction2 1. `labels` (list):21 个行为类别名称组成的列表 2. `gttubes` (dict):每个视频对应的基准 tubes 组成的字典 - **gttube** 是由标签索引和 tube 列表组成的字典 - **tube** 是一个 `nframes` 行和 5 列的 numpy array,每一列的形式如 ` ` + **gttube** 是由标签索引和 tube 列表组成的字典 + **tube** 是一个 `nframes` 行和 5 列的 numpy array,每一列的形式如 ` ` 3. `nframes` (dict):用以表示每个视频对应的帧数,如 `'walk/Panic_in_the_Streets_walk_u_cm_np1_ba_med_5': 16` 4. `train_videos` (list):包含 `nsplits=1` 的元素,每一项都包含了训练视频的列表 5. `test_videos` (list):包含 `nsplits=1` 的元素,每一项都包含了测试视频的列表 diff --git a/tools/data/kinetics/README.md b/tools/data/kinetics/README.md index 725190ee41..4fc7b6bb1e 100644 --- a/tools/data/kinetics/README.md +++ b/tools/data/kinetics/README.md @@ -1,4 +1,4 @@ -# Preparing Kinetics-[400/600/700] +# Preparing Kinetics-\[400/600/700\] ## Introduction @@ -21,9 +21,9 @@ Before we start, please make sure that the directory is located at `$MMACTION2/t :::{note} Because of the expirations of some YouTube links, the sizes of kinetics dataset copies may be different. Here are the sizes of our kinetics dataset copies that used to train all checkpoints. -| Dataset | training videos | validation videos | -| :---------------:|:---------------:|:---------------:| -| kinetics400 | 240436 | 19796 | +| Dataset | training videos | validation videos | +| :---------: | :-------------: | :---------------: | +| kinetics400 | 240436 | 19796 | ::: diff --git a/tools/data/kinetics/README_zh-CN.md b/tools/data/kinetics/README_zh-CN.md index ef49ba8e8a..feb1045549 100644 --- a/tools/data/kinetics/README_zh-CN.md +++ b/tools/data/kinetics/README_zh-CN.md @@ -1,4 +1,4 @@ -# 准备 Kinetics-[400/600/700] +# 准备 Kinetics-\[400/600/700\] ## 简介 @@ -20,8 +20,8 @@ **注**:由于部分 YouTube 链接失效,爬取的 Kinetics 数据集大小可能与原版不同。以下是我们所使用 Kinetics 数据集的大小: -| 数据集 | 训练视频 | 验证集视频 | -| :-----:|:---------:|:-------:| +| 数据集 | 训练视频 | 验证集视频 | +| :---------: | :----: | :---: | | kinetics400 | 240436 | 19796 | ## 1. 准备标注文件 diff --git a/tools/data/omnisource/README_zh-CN.md b/tools/data/omnisource/README_zh-CN.md index 90aea5f4d1..4de86a5990 100644 --- a/tools/data/omnisource/README_zh-CN.md +++ b/tools/data/omnisource/README_zh-CN.md @@ -18,14 +18,14 @@ OmniSource 数据集中所有类别均来自 Kinetics-400。MMAction2 所提供 MMAction2 提供所有数据源中属于 Mini-Kinetics 200 类动作的数据,这些数据源包含:Kinetics 数据集,Kinetics 原始数据集(未经裁剪的长视频),来自 Google 和 Instagram 的网络图片,来自 Instagram 的网络视频。为获取这一数据集,用户需先填写 [数据申请表](https://docs.google.com/forms/d/e/1FAIpQLSd8_GlmHzG8FcDbW-OEu__G7qLgOSYZpH-i5vYVJcu7wcb_TQ/viewform?usp=sf_link)。在接收到申请后,下载链接将被发送至用户邮箱。由于发布的数据集均为爬取所得的原始数据,数据集较大,下载需要一定时间。下表中提供了 OmniSource 数据集各个分量的统计信息。 -| 数据集名称 | 样本个数 | 所占空间 | 过滤使用的 Teacher 模型 | 过滤后的样本个数 | 与 k200_val 中样本相似(疑似重复)的样本个数 | -| :-------------: | :------: | :------: | :---------------------: | :--------------: | :------------------------------------------: | -| k200_train | 76030 | 45.6G | N/A | N/A | N/A | -| k200_val | 4838 | 2.9G | N/A | N/A | N/A | -| googleimage_200 | 3050880 | 265.5G | TSN-R50-8seg | 1188695 | 967 | -| insimage_200 | 3654650 | 224.4G | TSN-R50-8seg | 879726 | 116 | -| insvideo_200 | 732855 | 1487.6G | SlowOnly-8x8-R50 | 330680 | 956 | -| k200_raw_train | 76027 | 963.5G | SlowOnly-8x8-R50 | N/A | N/A | +| 数据集名称 | 样本个数 | 所占空间 | 过滤使用的 Teacher 模型 | 过滤后的样本个数 | 与 k200_val 中样本相似(疑似重复)的样本个数 | +| :-------------: | :-----: | :-----: | :--------------: | :------: | :-------------------------: | +| k200_train | 76030 | 45.6G | N/A | N/A | N/A | +| k200_val | 4838 | 2.9G | N/A | N/A | N/A | +| googleimage_200 | 3050880 | 265.5G | TSN-R50-8seg | 1188695 | 967 | +| insimage_200 | 3654650 | 224.4G | TSN-R50-8seg | 879726 | 116 | +| insvideo_200 | 732855 | 1487.6G | SlowOnly-8x8-R50 | 330680 | 956 | +| k200_raw_train | 76027 | 963.5G | SlowOnly-8x8-R50 | N/A | N/A | MMAction2 所发布的 OmniSource 数据集目录结构如下所示: diff --git a/tools/data/ucf101_24/README.md b/tools/data/ucf101_24/README.md index f7c3579eea..8d637965b8 100644 --- a/tools/data/ucf101_24/README.md +++ b/tools/data/ucf101_24/README.md @@ -81,8 +81,8 @@ The `UCF101v2-GT.pkl` exists as a cache, it contains 6 items as follows: 1. `labels` (list): List of the 24 labels. 2. `gttubes` (dict): Dictionary that contains the ground truth tubes for each video. - A **gttube** is dictionary that associates with each index of label and a list of tubes. - A **tube** is a numpy array with `nframes` rows and 5 columns, each col is in format like ` `. + A **gttube** is dictionary that associates with each index of label and a list of tubes. + A **tube** is a numpy array with `nframes` rows and 5 columns, each col is in format like ` `. 3. `nframes` (dict): Dictionary that contains the number of frames for each video, like `'HorseRiding/v_HorseRiding_g05_c02': 151`. 4. `train_videos` (list): A list with `nsplits=1` elements, each one containing the list of training videos. 5. `test_videos` (list): A list with `nsplits=1` elements, each one containing the list of testing videos. diff --git a/tools/data/ucf101_24/README_zh-CN.md b/tools/data/ucf101_24/README_zh-CN.md index e06d05d40b..1e91b2518b 100644 --- a/tools/data/ucf101_24/README_zh-CN.md +++ b/tools/data/ucf101_24/README_zh-CN.md @@ -76,8 +76,8 @@ mmaction2 1. `labels` (list):24 个行为类别名称组成的列表 2. `gttubes` (dict):每个视频对应的基准 tubes 组成的字典 - **gttube** 是由标签索引和 tube 列表组成的字典 - **tube** 是一个 `nframes` 行和 5 列的 numpy array,每一列的形式如 ` ` + **gttube** 是由标签索引和 tube 列表组成的字典 + **tube** 是一个 `nframes` 行和 5 列的 numpy array,每一列的形式如 ` ` 3. `nframes` (dict):用以表示每个视频对应的帧数,如 `'HorseRiding/v_HorseRiding_g05_c02': 151` 4. `train_videos` (list):包含 `nsplits=1` 的元素,每一项都包含了训练视频的列表 5. `test_videos` (list):包含 `nsplits=1` 的元素,每一项都包含了测试视频的列表 From 6d6685632f28344e98cf34a14d1226cd6c008391 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Wed, 18 May 2022 20:42:51 +0800 Subject: [PATCH 408/414] [Doc] update myst config (#1637) --- docs/conf.py | 1 + docs_zh_CN/conf.py | 1 + 2 files changed, 2 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index 3248b1f326..049b1065a6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -124,6 +124,7 @@ def get_version(): html_css_files = ['css/readthedocs.css'] myst_enable_extensions = ['colon_fence'] +myst_heading_anchors = 3 def builder_inited_handler(app): diff --git a/docs_zh_CN/conf.py b/docs_zh_CN/conf.py index 7949166dc9..9ee1b8262b 100644 --- a/docs_zh_CN/conf.py +++ b/docs_zh_CN/conf.py @@ -117,6 +117,7 @@ def get_version(): html_css_files = ['css/readthedocs.css'] myst_enable_extensions = ['colon_fence'] +myst_heading_anchors = 3 language = 'zh_CN' master_doc = 'index' From 3911d185784e9f6c69c50d87d39b44c32b91c8ff Mon Sep 17 00:00:00 2001 From: minsoo jeong <39256153+ms-jeong@users.noreply.github.com> Date: Sat, 28 May 2022 21:54:02 +0900 Subject: [PATCH 409/414] [Doc] Fixed typo error at comment. (#1647) VideoDatset -> VideoDataset --- mmaction/datasets/pipelines/loading.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmaction/datasets/pipelines/loading.py b/mmaction/datasets/pipelines/loading.py index 4843fcbe50..8814753658 100644 --- a/mmaction/datasets/pipelines/loading.py +++ b/mmaction/datasets/pipelines/loading.py @@ -102,7 +102,7 @@ class SampleFrames: test_mode (bool): Store True when building test or validation dataset. Default: False. start_index (None): This argument is deprecated and moved to dataset - class (``BaseDataset``, ``VideoDatset``, ``RawframeDataset``, etc), + class (``BaseDataset``, ``VideoDataset``, ``RawframeDataset``, etc), see this: https://github.com/open-mmlab/mmaction2/pull/89. keep_tail_frames (bool): Whether to keep tail frames when sampling. Default: False. @@ -282,7 +282,7 @@ class UntrimmedSampleFrames: frame_interval (int): Temporal interval of adjacent sampled frames. Default: 16. start_index (None): This argument is deprecated and moved to dataset - class (``BaseDataset``, ``VideoDatset``, ``RawframeDataset``, etc), + class (``BaseDataset``, ``VideoDataset``, ``RawframeDataset``, etc), see this: https://github.com/open-mmlab/mmaction2/pull/89. """ From df4bd7fcc3dd87d8e9c641a037281c308e15233b Mon Sep 17 00:00:00 2001 From: Kaiyang <22972972+KaiyangZhou@users.noreply.github.com> Date: Sat, 28 May 2022 20:54:37 +0800 Subject: [PATCH 410/414] [Fix] Fix a typo in "extract_audio.py": args.num_worker -> args.num_workers (#1644) --- tools/data/extract_audio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/data/extract_audio.py b/tools/data/extract_audio.py index 46d2367593..ed828f990c 100644 --- a/tools/data/extract_audio.py +++ b/tools/data/extract_audio.py @@ -57,5 +57,5 @@ def parse_args(): print('Total number of videos extracted finished: ', len(done_fullpath_list)) - pool = Pool(args.num_worker) + pool = Pool(args.num_workers) pool.map(extract_audio_wav, fullpath_list) From fda023269e0fd9ed7ac36d48c67d2b86d422d602 Mon Sep 17 00:00:00 2001 From: Manoj Mohan Date: Sat, 28 May 2022 18:27:50 +0530 Subject: [PATCH 411/414] [Fix] Fix hrnet config to handle mmpose/pull/1311 (#1641) * Fix hrnet config to handle mmpose/pull/1311 * fix label map issue in rgb based action recognition --- demo/demo_video_structuralize.py | 3 ++- demo/hrnet_w32_coco_256x192.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/demo/demo_video_structuralize.py b/demo/demo_video_structuralize.py index a0cfc93ad7..2de2d8b55e 100644 --- a/demo/demo_video_structuralize.py +++ b/demo/demo_video_structuralize.py @@ -485,7 +485,8 @@ def rgb_based_action_recognition(args): action_results = inference_recognizer( rgb_model, args.video, label_path=args.label_map) rgb_action_result = action_results[0][0] - return rgb_action_result + label_map = [x.strip() for x in open(args.label_map).readlines()] + return label_map[rgb_action_result] def skeleton_based_stdet(args, label_map, human_detections, pose_results, diff --git a/demo/hrnet_w32_coco_256x192.py b/demo/hrnet_w32_coco_256x192.py index 79086b9baa..3806739d45 100644 --- a/demo/hrnet_w32_coco_256x192.py +++ b/demo/hrnet_w32_coco_256x192.py @@ -129,6 +129,7 @@ val_pipeline = [ dict(type='LoadImageFromFile'), + dict(type='TopDownGetBboxCenterScale', padding=1.25), dict(type='TopDownAffine'), dict(type='ToTensor'), dict( From bc129a8d694926ef64beae9c4e4d417e27699d15 Mon Sep 17 00:00:00 2001 From: kennymckormick Date: Thu, 2 Jun 2022 08:50:58 +0800 Subject: [PATCH 412/414] update READMEs --- README.md | 42 +++++++++++++++++++++--------------------- README_zh-CN.md | 38 +++++++++++++++++++------------------- 2 files changed, 40 insertions(+), 40 deletions(-) diff --git a/README.md b/README.md index 01d4459282..95617f0594 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,14 @@ +[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/en/latest/) +[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) +[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) +[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) +[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) +[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) +[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) + [📘Documentation](https://mmaction2.readthedocs.io/en/latest/) | [🛠️Installation](https://mmaction2.readthedocs.io/en/latest/install.html) | [👀Model Zoo](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | @@ -26,22 +34,14 @@ -## Introduction - English | [简体中文](/README_zh-CN.md) -[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/en/latest/) -[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) -[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) -[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) -[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) -[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) -[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) +## Introduction MMAction2 is an open-source toolbox for video understanding based on PyTorch. It is a part of the [OpenMMLab](http://openmmlab.org/) project. -The master branch works with **PyTorch 1.3+**. +The master branch works with **PyTorch 1.5+**.
    @@ -70,7 +70,7 @@ The master branch works with **PyTorch 1.3+**. - **Well tested and documented**: We provide detailed documentation and API reference, as well as unit tests. -## Updates +## What's New - (2022-03-04) We support **Multigrid** on Kinetics400, achieve 76.07% Top-1 accuracy and accelerate training speed. - (2021-11-24) We support **2s-AGCN** on NTU60 XSub, achieve 86.06% Top-1 accuracy on joint stream and 86.89% Top-1 accuracy on bone stream respectively. @@ -271,9 +271,15 @@ Currently, there are many research works and projects built on MMAction2 by user etc., check [projects.md](docs/projects.md) to see all related projects. -## License +## Contributing -This project is released under the [Apache 2.0 license](LICENSE). +We appreciate all contributions to improve MMAction2. Please refer to [CONTRIBUTING.md](https://github.com/open-mmlab/mmcv/blob/master/CONTRIBUTING.md) in MMCV for more details about the contributing guideline. + +## Acknowledgement + +MMAction2 is an open-source project that is contributed by researchers and engineers from various colleges and companies. +We appreciate all the contributors who implement their methods or add new features and users who give valuable feedback. +We wish that the toolbox and benchmark could serve the growing research community by providing a flexible toolkit to reimplement existing methods and develop their new models. ## Citation @@ -288,15 +294,9 @@ If you find this project useful in your research, please consider cite: } ``` -## Contributing - -We appreciate all contributions to improve MMAction2. Please refer to [CONTRIBUTING.md](https://github.com/open-mmlab/mmcv/blob/master/CONTRIBUTING.md) in MMCV for more details about the contributing guideline. - -## Acknowledgement +## License -MMAction2 is an open-source project that is contributed by researchers and engineers from various colleges and companies. -We appreciate all the contributors who implement their methods or add new features and users who give valuable feedback. -We wish that the toolbox and benchmark could serve the growing research community by providing a flexible toolkit to reimplement existing methods and develop their new models. +This project is released under the [Apache 2.0 license](LICENSE). ## Projects in OpenMMLab diff --git a/README_zh-CN.md b/README_zh-CN.md index f6b5fb4514..245ccfdf98 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -17,6 +17,14 @@
    +[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/zh_CN/latest/) +[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) +[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) +[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) +[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) +[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) +[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) + [📘文档](https://mmaction2.readthedocs.io/en/latest/) | [🛠️安装指南](https://mmaction2.readthedocs.io/en/latest/install.html) | [👀模型库](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | @@ -26,21 +34,13 @@
    -## 简介 - [English](/README.md) | 简体中文 -[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/zh_CN/latest/) -[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) -[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) -[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) -[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) -[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) -[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) +## 简介 MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLab](http://openmmlab.org/) 项目的成员之一 -主分支代码目前支持 **PyTorch 1.3 以上**的版本 +主分支代码目前支持 **PyTorch 1.5 以上**的版本
    @@ -264,9 +264,14 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 更多详情可见 [相关工作](docs/projects.md) -## 许可 +## 参与贡献 -该项目开源自 [Apache 2.0 license](/LICENSE) +我们非常欢迎用户对于 MMAction2 做出的任何贡献,可以参考 [贡献指南](/.github/CONTRIBUTING.md) 文件了解更多细节 + +## 致谢 + +MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们感谢所有为项目提供算法复现和新功能支持的贡献者,以及提供宝贵反馈的用户。 +我们希望该工具箱和基准测试可以为社区提供灵活的代码工具,供用户复现现有算法并开发自己的新模型,从而不断为开源社区提供贡献。 ## 引用 @@ -281,14 +286,9 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 } ``` -## 参与贡献 - -我们非常欢迎用户对于 MMAction2 做出的任何贡献,可以参考 [贡献指南](/.github/CONTRIBUTING.md) 文件了解更多细节 - -## 致谢 +## 许可 -MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们感谢所有为项目提供算法复现和新功能支持的贡献者,以及提供宝贵反馈的用户。 -我们希望该工具箱和基准测试可以为社区提供灵活的代码工具,供用户复现现有算法并开发自己的新模型,从而不断为开源社区提供贡献。 +该项目开源自 [Apache 2.0 license](/LICENSE) ## OpenMMLab 的其他项目 From a0782cbc8f2421e25180d87e020511d2448ed02e Mon Sep 17 00:00:00 2001 From: kennymckormick Date: Thu, 2 Jun 2022 08:55:28 +0800 Subject: [PATCH 413/414] Revert "update READMEs" This reverts commit bc129a8d694926ef64beae9c4e4d417e27699d15. --- README.md | 42 +++++++++++++++++++++--------------------- README_zh-CN.md | 38 +++++++++++++++++++------------------- 2 files changed, 40 insertions(+), 40 deletions(-) diff --git a/README.md b/README.md index 95617f0594..01d4459282 100644 --- a/README.md +++ b/README.md @@ -17,14 +17,6 @@
    -[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/en/latest/) -[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) -[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) -[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) -[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) -[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) -[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) - [📘Documentation](https://mmaction2.readthedocs.io/en/latest/) | [🛠️Installation](https://mmaction2.readthedocs.io/en/latest/install.html) | [👀Model Zoo](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | @@ -34,14 +26,22 @@
    +## Introduction + English | [简体中文](/README_zh-CN.md) -## Introduction +[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/en/latest/) +[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) +[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) +[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) +[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) +[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) +[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) MMAction2 is an open-source toolbox for video understanding based on PyTorch. It is a part of the [OpenMMLab](http://openmmlab.org/) project. -The master branch works with **PyTorch 1.5+**. +The master branch works with **PyTorch 1.3+**.
    @@ -70,7 +70,7 @@ The master branch works with **PyTorch 1.5+**. - **Well tested and documented**: We provide detailed documentation and API reference, as well as unit tests. -## What's New +## Updates - (2022-03-04) We support **Multigrid** on Kinetics400, achieve 76.07% Top-1 accuracy and accelerate training speed. - (2021-11-24) We support **2s-AGCN** on NTU60 XSub, achieve 86.06% Top-1 accuracy on joint stream and 86.89% Top-1 accuracy on bone stream respectively. @@ -271,15 +271,9 @@ Currently, there are many research works and projects built on MMAction2 by user etc., check [projects.md](docs/projects.md) to see all related projects. -## Contributing - -We appreciate all contributions to improve MMAction2. Please refer to [CONTRIBUTING.md](https://github.com/open-mmlab/mmcv/blob/master/CONTRIBUTING.md) in MMCV for more details about the contributing guideline. - -## Acknowledgement +## License -MMAction2 is an open-source project that is contributed by researchers and engineers from various colleges and companies. -We appreciate all the contributors who implement their methods or add new features and users who give valuable feedback. -We wish that the toolbox and benchmark could serve the growing research community by providing a flexible toolkit to reimplement existing methods and develop their new models. +This project is released under the [Apache 2.0 license](LICENSE). ## Citation @@ -294,9 +288,15 @@ If you find this project useful in your research, please consider cite: } ``` -## License +## Contributing -This project is released under the [Apache 2.0 license](LICENSE). +We appreciate all contributions to improve MMAction2. Please refer to [CONTRIBUTING.md](https://github.com/open-mmlab/mmcv/blob/master/CONTRIBUTING.md) in MMCV for more details about the contributing guideline. + +## Acknowledgement + +MMAction2 is an open-source project that is contributed by researchers and engineers from various colleges and companies. +We appreciate all the contributors who implement their methods or add new features and users who give valuable feedback. +We wish that the toolbox and benchmark could serve the growing research community by providing a flexible toolkit to reimplement existing methods and develop their new models. ## Projects in OpenMMLab diff --git a/README_zh-CN.md b/README_zh-CN.md index 245ccfdf98..f6b5fb4514 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -17,14 +17,6 @@
    -[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/zh_CN/latest/) -[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) -[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) -[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) -[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) -[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) -[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) - [📘文档](https://mmaction2.readthedocs.io/en/latest/) | [🛠️安装指南](https://mmaction2.readthedocs.io/en/latest/install.html) | [👀模型库](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | @@ -34,13 +26,21 @@
    +## 简介 + [English](/README.md) | 简体中文 -## 简介 +[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/zh_CN/latest/) +[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) +[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) +[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) +[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) +[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) +[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLab](http://openmmlab.org/) 项目的成员之一 -主分支代码目前支持 **PyTorch 1.5 以上**的版本 +主分支代码目前支持 **PyTorch 1.3 以上**的版本
    @@ -264,14 +264,9 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 更多详情可见 [相关工作](docs/projects.md) -## 参与贡献 - -我们非常欢迎用户对于 MMAction2 做出的任何贡献,可以参考 [贡献指南](/.github/CONTRIBUTING.md) 文件了解更多细节 - -## 致谢 +## 许可 -MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们感谢所有为项目提供算法复现和新功能支持的贡献者,以及提供宝贵反馈的用户。 -我们希望该工具箱和基准测试可以为社区提供灵活的代码工具,供用户复现现有算法并开发自己的新模型,从而不断为开源社区提供贡献。 +该项目开源自 [Apache 2.0 license](/LICENSE) ## 引用 @@ -286,9 +281,14 @@ MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们 } ``` -## 许可 +## 参与贡献 -该项目开源自 [Apache 2.0 license](/LICENSE) +我们非常欢迎用户对于 MMAction2 做出的任何贡献,可以参考 [贡献指南](/.github/CONTRIBUTING.md) 文件了解更多细节 + +## 致谢 + +MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们感谢所有为项目提供算法复现和新功能支持的贡献者,以及提供宝贵反馈的用户。 +我们希望该工具箱和基准测试可以为社区提供灵活的代码工具,供用户复现现有算法并开发自己的新模型,从而不断为开源社区提供贡献。 ## OpenMMLab 的其他项目 From 73866ef27c29d8868205335e2a3de5169ef40742 Mon Sep 17 00:00:00 2001 From: Haodong Duan Date: Thu, 2 Jun 2022 08:57:42 +0800 Subject: [PATCH 414/414] [Doc] update READMEs (#1654) This reverts commit a0782cbc8f2421e25180d87e020511d2448ed02e. --- README.md | 42 +++++++++++++++++++++--------------------- README_zh-CN.md | 38 +++++++++++++++++++------------------- 2 files changed, 40 insertions(+), 40 deletions(-) diff --git a/README.md b/README.md index 01d4459282..95617f0594 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,14 @@
    +[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/en/latest/) +[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) +[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) +[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) +[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) +[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) +[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) + [📘Documentation](https://mmaction2.readthedocs.io/en/latest/) | [🛠️Installation](https://mmaction2.readthedocs.io/en/latest/install.html) | [👀Model Zoo](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | @@ -26,22 +34,14 @@
    -## Introduction - English | [简体中文](/README_zh-CN.md) -[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/en/latest/) -[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) -[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) -[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) -[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) -[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) -[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) +## Introduction MMAction2 is an open-source toolbox for video understanding based on PyTorch. It is a part of the [OpenMMLab](http://openmmlab.org/) project. -The master branch works with **PyTorch 1.3+**. +The master branch works with **PyTorch 1.5+**.
    @@ -70,7 +70,7 @@ The master branch works with **PyTorch 1.3+**. - **Well tested and documented**: We provide detailed documentation and API reference, as well as unit tests. -## Updates +## What's New - (2022-03-04) We support **Multigrid** on Kinetics400, achieve 76.07% Top-1 accuracy and accelerate training speed. - (2021-11-24) We support **2s-AGCN** on NTU60 XSub, achieve 86.06% Top-1 accuracy on joint stream and 86.89% Top-1 accuracy on bone stream respectively. @@ -271,9 +271,15 @@ Currently, there are many research works and projects built on MMAction2 by user etc., check [projects.md](docs/projects.md) to see all related projects. -## License +## Contributing -This project is released under the [Apache 2.0 license](LICENSE). +We appreciate all contributions to improve MMAction2. Please refer to [CONTRIBUTING.md](https://github.com/open-mmlab/mmcv/blob/master/CONTRIBUTING.md) in MMCV for more details about the contributing guideline. + +## Acknowledgement + +MMAction2 is an open-source project that is contributed by researchers and engineers from various colleges and companies. +We appreciate all the contributors who implement their methods or add new features and users who give valuable feedback. +We wish that the toolbox and benchmark could serve the growing research community by providing a flexible toolkit to reimplement existing methods and develop their new models. ## Citation @@ -288,15 +294,9 @@ If you find this project useful in your research, please consider cite: } ``` -## Contributing - -We appreciate all contributions to improve MMAction2. Please refer to [CONTRIBUTING.md](https://github.com/open-mmlab/mmcv/blob/master/CONTRIBUTING.md) in MMCV for more details about the contributing guideline. - -## Acknowledgement +## License -MMAction2 is an open-source project that is contributed by researchers and engineers from various colleges and companies. -We appreciate all the contributors who implement their methods or add new features and users who give valuable feedback. -We wish that the toolbox and benchmark could serve the growing research community by providing a flexible toolkit to reimplement existing methods and develop their new models. +This project is released under the [Apache 2.0 license](LICENSE). ## Projects in OpenMMLab diff --git a/README_zh-CN.md b/README_zh-CN.md index f6b5fb4514..245ccfdf98 100644 --- a/README_zh-CN.md +++ b/README_zh-CN.md @@ -17,6 +17,14 @@
    +[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/zh_CN/latest/) +[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) +[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) +[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) +[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) +[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) +[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) + [📘文档](https://mmaction2.readthedocs.io/en/latest/) | [🛠️安装指南](https://mmaction2.readthedocs.io/en/latest/install.html) | [👀模型库](https://mmaction2.readthedocs.io/en/latest/modelzoo.html) | @@ -26,21 +34,13 @@
    -## 简介 - [English](/README.md) | 简体中文 -[![Documentation](https://readthedocs.org/projects/mmaction2/badge/?version=latest)](https://mmaction2.readthedocs.io/zh_CN/latest/) -[![actions](https://github.com/open-mmlab/mmaction2/workflows/build/badge.svg)](https://github.com/open-mmlab/mmaction2/actions) -[![codecov](https://codecov.io/gh/open-mmlab/mmaction2/branch/master/graph/badge.svg)](https://codecov.io/gh/open-mmlab/mmaction2) -[![PyPI](https://img.shields.io/pypi/v/mmaction2)](https://pypi.org/project/mmaction2/) -[![LICENSE](https://img.shields.io/github/license/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/blob/master/LICENSE) -[![Average time to resolve an issue](https://isitmaintained.com/badge/resolution/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) -[![Percentage of issues still open](https://isitmaintained.com/badge/open/open-mmlab/mmaction2.svg)](https://github.com/open-mmlab/mmaction2/issues) +## 简介 MMAction2 是一款基于 PyTorch 的视频理解开源工具箱,是 [OpenMMLab](http://openmmlab.org/) 项目的成员之一 -主分支代码目前支持 **PyTorch 1.3 以上**的版本 +主分支代码目前支持 **PyTorch 1.5 以上**的版本
    @@ -264,9 +264,14 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 更多详情可见 [相关工作](docs/projects.md) -## 许可 +## 参与贡献 -该项目开源自 [Apache 2.0 license](/LICENSE) +我们非常欢迎用户对于 MMAction2 做出的任何贡献,可以参考 [贡献指南](/.github/CONTRIBUTING.md) 文件了解更多细节 + +## 致谢 + +MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们感谢所有为项目提供算法复现和新功能支持的贡献者,以及提供宝贵反馈的用户。 +我们希望该工具箱和基准测试可以为社区提供灵活的代码工具,供用户复现现有算法并开发自己的新模型,从而不断为开源社区提供贡献。 ## 引用 @@ -281,14 +286,9 @@ MMAction2 将跟进学界的最新进展,并支持更多算法和框架。如 } ``` -## 参与贡献 - -我们非常欢迎用户对于 MMAction2 做出的任何贡献,可以参考 [贡献指南](/.github/CONTRIBUTING.md) 文件了解更多细节 - -## 致谢 +## 许可 -MMAction2 是一款由不同学校和公司共同贡献的开源项目。我们感谢所有为项目提供算法复现和新功能支持的贡献者,以及提供宝贵反馈的用户。 -我们希望该工具箱和基准测试可以为社区提供灵活的代码工具,供用户复现现有算法并开发自己的新模型,从而不断为开源社区提供贡献。 +该项目开源自 [Apache 2.0 license](/LICENSE) ## OpenMMLab 的其他项目